Add inline and bit manipulation in CpuId.h

avx_docs
liaogang 8 years ago
commit 880aa220be

3
.gitignore vendored

@ -9,3 +9,6 @@ build/
.pydevproject .pydevproject
Makefile Makefile
.test_env/ .test_env/
*~
bazel-*

@ -6,7 +6,8 @@
- repo: https://github.com/reyoung/mirrors-yapf.git - repo: https://github.com/reyoung/mirrors-yapf.git
sha: v0.13.2 sha: v0.13.2
hooks: hooks:
- id: yapf - id: yapf
files: (.*\.(py|bzl)|BUILD|.*\.BUILD|WORKSPACE)$ # Bazel BUILD files follow Python syntax.
- repo: https://github.com/pre-commit/pre-commit-hooks - repo: https://github.com/pre-commit/pre-commit-hooks
sha: 7539d8bd1a00a3c1bfd34cdb606d3a6372e83469 sha: 7539d8bd1a00a3c1bfd34cdb606d3a6372e83469
hooks: hooks:

@ -8,10 +8,13 @@ os:
env: env:
- JOB=DOCS - JOB=DOCS
- JOB=BUILD_AND_TEST - JOB=BUILD_AND_TEST
- JOB=PRE_COMMIT
matrix: matrix:
exclude: exclude:
- os: osx - os: osx
env: JOB=DOCS # Only generate documentation in linux env: JOB=DOCS # Only generate documentation in linux.
- os: osx
env: JOB=PRE_COMMIT # Only check pre-commit hook in linux
addons: addons:
apt: apt:
@ -39,18 +42,23 @@ addons:
- lcov - lcov
- graphviz - graphviz
- swig - swig
- clang-format-3.8
before_install: before_install:
- | - |
if [ ${JOB} == "BUILD_AND_TEST" ]; then if [ ${JOB} == "BUILD_AND_TEST" ]; then
if ! git diff --name-only $TRAVIS_COMMIT_RANGE | grep -qvE '(\.md$)|(\.rst$)|(\.jpg$)|(\.png$)' local change_list=`git diff --name-only $TRAVIS_COMMIT_RANGE`
then if [ $? -eq 0 ]; then # if git diff return no zero, then rerun unit test.
echo "Only markdown docs were updated, stopping build process." if ! echo ${change_list} | grep -qvE '(\.md$)|(\.rst$)|(\.jpg$)|(\.png$)'
exit then
echo "Only markdown docs were updated, stopping build process."
exit
fi
fi fi
fi fi
- if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then sudo paddle/scripts/travis/before_install.linux.sh; fi - if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then sudo paddle/scripts/travis/before_install.linux.sh; fi
- if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then paddle/scripts/travis/before_install.osx.sh; fi - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then paddle/scripts/travis/before_install.osx.sh; fi
- pip install wheel protobuf sphinx breathe recommonmark virtualenv numpy sphinx_rtd_theme - if [[ "$JOB" == "PRE_COMMIT" ]]; then sudo ln -s /usr/bin/clang-format-3.8 /usr/bin/clang-format; fi
- pip install wheel protobuf sphinx recommonmark virtualenv numpy sphinx_rtd_theme pre-commit
script: script:
- paddle/scripts/travis/main.sh - paddle/scripts/travis/main.sh
notifications: notifications:

@ -69,7 +69,7 @@ include(coveralls)
find_package(Git REQUIRED) find_package(Git REQUIRED)
# version.cmake will get the current PADDLE_VERSION # version.cmake will get the current PADDLE_VERSION
include(version) include(version)
add_definitions(-DPADDLE_VERSION=\"${PADDLE_VERSION}\") add_definitions(-DPADDLE_VERSION=${PADDLE_VERSION})
if(NOT WITH_GPU) if(NOT WITH_GPU)
add_definitions(-DPADDLE_ONLY_CPU) add_definitions(-DPADDLE_ONLY_CPU)

@ -1,4 +1,4 @@
Copyright (c) 2016 Baidu, Inc. All Rights Reserved Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved
Apache License Apache License
Version 2.0, January 2004 Version 2.0, January 2004
@ -188,7 +188,7 @@ Copyright (c) 2016 Baidu, Inc. All Rights Reserved
same "printed page" as the copyright notice for easier same "printed page" as the copyright notice for easier
identification within third-party archives. identification within third-party archives.
Copyright (c) 2016 Baidu, Inc. All Rights Reserve. Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.
Licensed under the Apache License, Version 2.0 (the "License"); Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License. you may not use this file except in compliance with the License.

@ -0,0 +1,15 @@
# External dependency to Google protobuf.
http_archive(
name="protobuf",
url="http://github.com/google/protobuf/archive/v3.1.0.tar.gz",
sha256="0a0ae63cbffc274efb573bdde9a253e3f32e458c41261df51c5dbc5ad541e8f7",
strip_prefix="protobuf-3.1.0", )
# External dependency to gtest 1.7.0. This method comes from
# https://www.bazel.io/versions/master/docs/tutorial/cpp.html.
new_http_archive(
name="gtest",
url="https://github.com/google/googletest/archive/release-1.7.0.zip",
sha256="b58cb7547a28b2c718d1e38aee18a3659c9e3ff52440297e965f5edffe34b6d0",
build_file="third_party/gtest.BUILD",
strip_prefix="googletest-release-1.7.0", )

@ -25,4 +25,3 @@ test 4 2 256 512
test 4 2 512 128 test 4 2 512 128
test 4 2 512 256 test 4 2 512 256
test 4 2 512 512 test 4 2 512 512

@ -30,7 +30,6 @@ if(WITH_DOC)
find_package(Sphinx REQUIRED) find_package(Sphinx REQUIRED)
find_package(Doxygen REQUIRED) find_package(Doxygen REQUIRED)
find_python_module(recommonmark REQUIRED) find_python_module(recommonmark REQUIRED)
find_python_module(breathe REQUIRED)
endif() endif()
if(WITH_SWIG_PY) if(WITH_SWIG_PY)

@ -1,4 +1,4 @@
# Copyright (c) 2016 Baidu, Inc. All Rights Reserved # Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved
# #
# Licensed under the Apache License, Version 2.0 (the "License"); # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. # you may not use this file except in compliance with the License.
@ -15,4 +15,3 @@ set -e
wget https://www.cs.toronto.edu/~kriz/cifar-10-python.tar.gz wget https://www.cs.toronto.edu/~kriz/cifar-10-python.tar.gz
tar zxf cifar-10-python.tar.gz tar zxf cifar-10-python.tar.gz
rm cifar-10-python.tar.gz rm cifar-10-python.tar.gz

@ -15,5 +15,3 @@ do
gunzip ${fname}.gz gunzip ${fname}.gz
fi fi
done done

@ -1,4 +1,4 @@
# Copyright (c) 2016 Baidu, Inc. All Rights Reserved # Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved
# #
# Licensed under the Apache License, Version 2.0 (the "License"); # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. # you may not use this file except in compliance with the License.
@ -14,10 +14,9 @@
from paddle.trainer_config_helpers import * from paddle.trainer_config_helpers import *
mode = get_config_arg("mode", str, "generator") mode = get_config_arg("mode", str, "generator")
assert mode in set(["generator", assert mode in set([
"discriminator", "generator", "discriminator", "generator_training", "discriminator_training"
"generator_training", ])
"discriminator_training"])
is_generator_training = mode == "generator_training" is_generator_training = mode == "generator_training"
is_discriminator_training = mode == "discriminator_training" is_discriminator_training = mode == "discriminator_training"
@ -38,8 +37,8 @@ sample_dim = 2
settings( settings(
batch_size=128, batch_size=128,
learning_rate=1e-4, learning_rate=1e-4,
learning_method=AdamOptimizer(beta1=0.5) learning_method=AdamOptimizer(beta1=0.5))
)
def discriminator(sample): def discriminator(sample):
""" """
@ -50,70 +49,87 @@ def discriminator(sample):
of the sample is from real data. of the sample is from real data.
""" """
param_attr = ParamAttr(is_static=is_generator_training) param_attr = ParamAttr(is_static=is_generator_training)
bias_attr = ParamAttr(is_static=is_generator_training, bias_attr = ParamAttr(
initial_mean=1.0, is_static=is_generator_training, initial_mean=1.0, initial_std=0)
initial_std=0)
hidden = fc_layer(
hidden = fc_layer(input=sample, name="dis_hidden", size=hidden_dim, input=sample,
bias_attr=bias_attr, name="dis_hidden",
param_attr=param_attr, size=hidden_dim,
act=ReluActivation()) bias_attr=bias_attr,
param_attr=param_attr,
hidden2 = fc_layer(input=hidden, name="dis_hidden2", size=hidden_dim, act=ReluActivation())
bias_attr=bias_attr,
param_attr=param_attr, hidden2 = fc_layer(
act=LinearActivation()) input=hidden,
name="dis_hidden2",
hidden_bn = batch_norm_layer(hidden2, size=hidden_dim,
act=ReluActivation(), bias_attr=bias_attr,
name="dis_hidden_bn", param_attr=param_attr,
bias_attr=bias_attr, act=LinearActivation())
param_attr=ParamAttr(is_static=is_generator_training,
initial_mean=1.0, hidden_bn = batch_norm_layer(
initial_std=0.02), hidden2,
use_global_stats=False) act=ReluActivation(),
name="dis_hidden_bn",
return fc_layer(input=hidden_bn, name="dis_prob", size=2, bias_attr=bias_attr,
bias_attr=bias_attr, param_attr=ParamAttr(
param_attr=param_attr, is_static=is_generator_training, initial_mean=1.0,
act=SoftmaxActivation()) initial_std=0.02),
use_global_stats=False)
return fc_layer(
input=hidden_bn,
name="dis_prob",
size=2,
bias_attr=bias_attr,
param_attr=param_attr,
act=SoftmaxActivation())
def generator(noise): def generator(noise):
""" """
generator generates a sample given noise generator generates a sample given noise
""" """
param_attr = ParamAttr(is_static=is_discriminator_training) param_attr = ParamAttr(is_static=is_discriminator_training)
bias_attr = ParamAttr(is_static=is_discriminator_training, bias_attr = ParamAttr(
initial_mean=1.0, is_static=is_discriminator_training, initial_mean=1.0, initial_std=0)
initial_std=0)
hidden = fc_layer(
hidden = fc_layer(input=noise, input=noise,
name="gen_layer_hidden", name="gen_layer_hidden",
size=hidden_dim, size=hidden_dim,
bias_attr=bias_attr, bias_attr=bias_attr,
param_attr=param_attr, param_attr=param_attr,
act=ReluActivation()) act=ReluActivation())
hidden2 = fc_layer(input=hidden, name="gen_hidden2", size=hidden_dim, hidden2 = fc_layer(
bias_attr=bias_attr, input=hidden,
param_attr=param_attr, name="gen_hidden2",
act=LinearActivation()) size=hidden_dim,
bias_attr=bias_attr,
hidden_bn = batch_norm_layer(hidden2, param_attr=param_attr,
act=ReluActivation(), act=LinearActivation())
name="gen_layer_hidden_bn",
bias_attr=bias_attr, hidden_bn = batch_norm_layer(
param_attr=ParamAttr(is_static=is_discriminator_training, hidden2,
initial_mean=1.0, act=ReluActivation(),
initial_std=0.02), name="gen_layer_hidden_bn",
use_global_stats=False) bias_attr=bias_attr,
param_attr=ParamAttr(
return fc_layer(input=hidden_bn, is_static=is_discriminator_training,
name="gen_layer1", initial_mean=1.0,
size=sample_dim, initial_std=0.02),
bias_attr=bias_attr, use_global_stats=False)
param_attr=param_attr,
act=LinearActivation()) return fc_layer(
input=hidden_bn,
name="gen_layer1",
size=sample_dim,
bias_attr=bias_attr,
param_attr=param_attr,
act=LinearActivation())
if is_generator_training: if is_generator_training:
noise = data_layer(name="noise", size=noise_dim) noise = data_layer(name="noise", size=noise_dim)
@ -126,7 +142,8 @@ if is_generator_training or is_discriminator_training:
label = data_layer(name="label", size=1) label = data_layer(name="label", size=1)
prob = discriminator(sample) prob = discriminator(sample)
cost = cross_entropy(input=prob, label=label) cost = cross_entropy(input=prob, label=label)
classification_error_evaluator(input=prob, label=label, name=mode+'_error') classification_error_evaluator(
input=prob, label=label, name=mode + '_error')
outputs(cost) outputs(cost)
if is_generator: if is_generator:

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

@ -1,4 +1,4 @@
# Copyright (c) 2016 Baidu, Inc. All Rights Reserved # Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved
# #
# Licensed under the Apache License, Version 2.0 (the "License"); # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. # you may not use this file except in compliance with the License.

@ -1,4 +1,4 @@
# Copyright (c) 2016 Baidu, Inc. All Rights Reserved # Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved
# #
# Licensed under the Apache License, Version 2.0 (the "License"); # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. # you may not use this file except in compliance with the License.

@ -1,4 +1,4 @@
# Copyright (c) 2016 Baidu, Inc. All Rights Reserved # Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved
# #
# Licensed under the Apache License, Version 2.0 (the "License"); # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. # you may not use this file except in compliance with the License.

@ -1,4 +1,4 @@
# Copyright (c) 2016 Baidu, Inc. All Rights Reserved # Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved
# #
# Licensed under the Apache License, Version 2.0 (the "License"); # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. # you may not use this file except in compliance with the License.

@ -1,5 +1,5 @@
#!/bin/bash #!/bin/bash
# Copyright (c) 2016 Baidu, Inc. All Rights Reserved # Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved
# #
# Licensed under the Apache License, Version 2.0 (the "License"); # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. # you may not use this file except in compliance with the License.

@ -1,4 +1,4 @@
# Copyright (c) 2016 Baidu, Inc. All Rights Reserved # Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved
# #
# Licensed under the Apache License, Version 2.0 (the "License"); # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. # you may not use this file except in compliance with the License.

@ -1,4 +1,4 @@
# Copyright (c) 2016 Baidu, Inc. All Rights Reserved # Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved
# #
# Licensed under the Apache License, Version 2.0 (the "License"); # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. # you may not use this file except in compliance with the License.

@ -1,5 +1,5 @@
#!/bin/bash #!/bin/bash
# Copyright (c) 2016 Baidu, Inc. All Rights Reserved # Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved
# #
# Licensed under the Apache License, Version 2.0 (the "License"); # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. # you may not use this file except in compliance with the License.

@ -1,5 +1,5 @@
#!/bin/bash #!/bin/bash
# Copyright (c) 2016 Baidu, Inc. All Rights Reserved # Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved
# #
# Licensed under the Apache License, Version 2.0 (the "License"); # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. # you may not use this file except in compliance with the License.

@ -1,4 +1,4 @@
# Copyright (c) 2016 Baidu, Inc. All Rights Reserved # Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved
# #
# Licensed under the Apache License, Version 2.0 (the "License"); # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. # you may not use this file except in compliance with the License.

@ -1,4 +1,4 @@
# Copyright (c) 2016 Baidu, Inc. All Rights Reserved # Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved
# #
# Licensed under the Apache License, Version 2.0 (the "License"); # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. # you may not use this file except in compliance with the License.

@ -1,7 +1,7 @@
#!/usr/bin/env python #!/usr/bin/env python
# -*- coding: UTF-8 -*- # -*- coding: UTF-8 -*-
# Copyright (c) 2016 Baidu, Inc. All Rights Reserved # Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved
# #
# Licensed under the Apache License, Version 2.0 (the "License"); # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. # you may not use this file except in compliance with the License.

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save