You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
91 lines
2.6 KiB
91 lines
2.6 KiB
7 years ago
|
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.
|
||
|
|
||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||
|
you may not use this file except in compliance with the License.
|
||
|
You may obtain a copy of the License at
|
||
|
|
||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||
|
|
||
|
Unless required by applicable law or agreed to in writing, software
|
||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||
|
See the License for the specific language governing permissions and
|
||
|
limitations under the License. */
|
||
|
|
||
7 years ago
|
#ifdef __AVX__
|
||
|
|
||
7 years ago
|
#include <immintrin.h>
|
||
7 years ago
|
#include "paddle/operators/math/detail/activation_functions.h"
|
||
7 years ago
|
// TODO(qingqing) refine this dependence
|
||
|
#include "paddle/cuda/src/avx_mathfun.h"
|
||
7 years ago
|
|
||
7 years ago
|
namespace paddle {
|
||
|
namespace operators {
|
||
|
namespace math {
|
||
|
namespace detail {
|
||
7 years ago
|
|
||
7 years ago
|
__m256 Exp(__m256 a) { return exp256_ps(a); }
|
||
7 years ago
|
|
||
7 years ago
|
namespace forward {
|
||
|
namespace avx {
|
||
7 years ago
|
__m256 Relu(const __m256 a) {
|
||
7 years ago
|
__m256 tmp = _mm256_set1_ps(0.0f);
|
||
|
return _mm256_max_ps(a, tmp);
|
||
|
}
|
||
|
|
||
7 years ago
|
__m256 Sigmoid(const __m256 a) {
|
||
7 years ago
|
__m256 max = _mm256_set1_ps(SIGMOID_THRESHOLD_MAX);
|
||
|
__m256 min = _mm256_set1_ps(SIGMOID_THRESHOLD_MIN);
|
||
|
__m256 tmp = _mm256_max_ps(a, min);
|
||
|
tmp = _mm256_min_ps(tmp, max);
|
||
|
tmp = _mm256_sub_ps(_mm256_set1_ps(0.0f), tmp);
|
||
7 years ago
|
tmp = Exp(tmp);
|
||
7 years ago
|
tmp = _mm256_add_ps(_mm256_set1_ps(1.0f), tmp);
|
||
|
tmp = _mm256_div_ps(_mm256_set1_ps(1.0f), tmp);
|
||
|
return tmp;
|
||
|
}
|
||
|
|
||
7 years ago
|
__m256 Tanh(const __m256 a) {
|
||
7 years ago
|
__m256 max = _mm256_set1_ps(EXP_MAX_INPUT);
|
||
|
__m256 tmp = _mm256_mul_ps(_mm256_set1_ps(-2.0f), a);
|
||
|
tmp = _mm256_min_ps(tmp, max);
|
||
7 years ago
|
tmp = Exp(tmp);
|
||
7 years ago
|
return _mm256_sub_ps(_mm256_div_ps(_mm256_set1_ps(2.0f),
|
||
|
_mm256_add_ps(_mm256_set1_ps(1.0f), tmp)),
|
||
|
_mm256_set1_ps(1.0f));
|
||
|
}
|
||
|
|
||
7 years ago
|
__m256 Identity(const __m256 a) { return a; }
|
||
7 years ago
|
|
||
7 years ago
|
} // namespace avx
|
||
|
} // namespace forward
|
||
|
|
||
|
namespace backward {
|
||
|
namespace avx {
|
||
7 years ago
|
__m256 Relu(const __m256 a, const __m256 b) {
|
||
7 years ago
|
return _mm256_mul_ps(
|
||
|
a, _mm256_and_ps(_mm256_cmp_ps(b, _mm256_set1_ps(0.0f), _CMP_GT_OS),
|
||
|
_mm256_set1_ps(1.0f)));
|
||
|
}
|
||
|
|
||
7 years ago
|
__m256 Sigmoid(const __m256 a, const __m256 b) {
|
||
7 years ago
|
return _mm256_mul_ps(_mm256_mul_ps(a, b),
|
||
|
_mm256_sub_ps(_mm256_set1_ps(1.0f), b));
|
||
|
}
|
||
|
|
||
7 years ago
|
__m256 Tanh(const __m256 a, const __m256 b) {
|
||
7 years ago
|
return _mm256_mul_ps(
|
||
|
a, _mm256_sub_ps(_mm256_set1_ps(1.0f), _mm256_mul_ps(b, b)));
|
||
|
}
|
||
|
|
||
7 years ago
|
__m256 Identity(const __m256 a, const __m256 b) { return a; }
|
||
7 years ago
|
} // namespace avx
|
||
|
} // namespace backward
|
||
|
|
||
|
} // namespace detail
|
||
|
} // namespace math
|
||
|
} // namespace operators
|
||
|
} // namespace paddle
|
||
7 years ago
|
|
||
|
#endif
|