commit
f46961e223
@ -1,15 +0,0 @@
|
||||
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License. */
|
||||
|
||||
#include "paddle/framework/enforce.h"
|
@ -1,75 +0,0 @@
|
||||
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License. */
|
||||
|
||||
#pragma once
|
||||
#include <glog/logging.h>
|
||||
#include <paddle/string/printf.h>
|
||||
#include <exception>
|
||||
#include <sstream>
|
||||
|
||||
namespace paddle {
|
||||
namespace framework {
|
||||
|
||||
/**
|
||||
* @brief Enforce exception. Inherits std::exception
|
||||
*
|
||||
* All enforce condition not met, will throw an EnforceNotMet exception.
|
||||
*/
|
||||
class EnforceNotMet : public std::exception {
|
||||
public:
|
||||
EnforceNotMet(const std::string& msg, const char* file, int fileline) {
|
||||
std::ostringstream sout;
|
||||
sout << msg << " at [" << file << ":" << fileline << "];";
|
||||
all_msg_ = sout.str();
|
||||
}
|
||||
|
||||
const char* what() const noexcept override { return all_msg_.c_str(); }
|
||||
|
||||
private:
|
||||
std::string all_msg_;
|
||||
};
|
||||
|
||||
// From https://stackoverflow.com/questions/30130930/
|
||||
// __buildin_expect is in C++ 11 standard. Since the condition which enforced
|
||||
// should be true in most situation, it will make the compiler generate faster
|
||||
// code by adding `UNLIKELY` macro.
|
||||
#define UNLIKELY(condition) __builtin_expect(static_cast<bool>(condition), 0)
|
||||
|
||||
/**
|
||||
* @brief Throw a EnforceNotMet exception, automatically filled __FILE__ &
|
||||
* __LINE__
|
||||
*
|
||||
* This macro take __VA_ARGS__, user can pass any type if that type can
|
||||
* serialize to std::ostream
|
||||
*/
|
||||
#define PADDLE_THROW(...) \
|
||||
do { \
|
||||
throw ::paddle::framework::EnforceNotMet( \
|
||||
::paddle::string::Sprintf(__VA_ARGS__), __FILE__, __LINE__); \
|
||||
} while (0)
|
||||
|
||||
/**
|
||||
* @brief Enforce a condition, otherwise throw an EnforceNotMet
|
||||
*/
|
||||
#ifdef NDEBUG
|
||||
#define PADDLE_ENFORCE(condition, ...) \
|
||||
do { \
|
||||
if (UNLIKELY(!(condition))) { \
|
||||
PADDLE_THROW(__VA_ARGS__); \
|
||||
} \
|
||||
} while (0)
|
||||
#else
|
||||
#define PADDLE_ENFORCE(condition, ...) \
|
||||
CHECK(condition) << ::paddle::string::Sprintf(__VA_ARGS__);
|
||||
#endif
|
||||
|
||||
} // namespace framework
|
||||
} // namespace paddle
|
@ -0,0 +1,61 @@
|
||||
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License. */
|
||||
|
||||
#include "paddle/operators/sgd_op.h"
|
||||
#include "paddle/framework/op_registry.h"
|
||||
#include "paddle/framework/tensor.h"
|
||||
|
||||
namespace paddle {
|
||||
namespace operators {
|
||||
|
||||
class SGDOp : public framework::OperatorWithKernel {
|
||||
protected:
|
||||
void InferShape(
|
||||
const std::vector<const framework::Tensor *> &inputs,
|
||||
const std::vector<framework::Tensor *> &outputs) const override {
|
||||
PADDLE_ENFORCE(inputs.size() == 2, "Input size of SGDOp must be two");
|
||||
PADDLE_ENFORCE(outputs.size() == 1, "Output size of SGDOp must be one");
|
||||
PADDLE_ENFORCE(inputs[0] != nullptr, "inputs[0] mast be set");
|
||||
PADDLE_ENFORCE(inputs[1] != nullptr, "inputs[1] mast be set");
|
||||
PADDLE_ENFORCE(outputs[0] != nullptr, "outputs[0] mast be set");
|
||||
PADDLE_ENFORCE(inputs[0]->dims() == inputs[1]->dims(),
|
||||
"Two input of SGD Op's dimension must be same.");
|
||||
outputs[0]->set_dims(inputs[0]->dims());
|
||||
}
|
||||
};
|
||||
|
||||
class SGDOpMaker : public framework::OpProtoAndCheckerMaker {
|
||||
public:
|
||||
SGDOpMaker(framework::OpProto *proto, framework::OpAttrChecker *op_checker)
|
||||
: framework::OpProtoAndCheckerMaker(proto, op_checker) {
|
||||
AddInput("param", "input parameter");
|
||||
AddInput("grad", "input gradient");
|
||||
AddOutput("param_out", "output parameter");
|
||||
AddAttr<float>("learning_rate", "learning rate of sgd");
|
||||
AddComment(R"DOC(
|
||||
|
||||
Simplest sgd algorithm.
|
||||
|
||||
param_out = param - learning_rate * grad;
|
||||
|
||||
)DOC");
|
||||
}
|
||||
};
|
||||
} // namespace operators
|
||||
} // namespace paddle
|
||||
|
||||
REGISTER_OP(sgd, paddle::operators::SGDOp, paddle::operators::SGDOpMaker);
|
||||
typedef paddle::operators::SGDOpKernel<::paddle::platform::CPUPlace, float>
|
||||
SGDOpKernel_CPU_float;
|
||||
REGISTER_OP_CPU_KERNEL(sgd, SGDOpKernel_CPU_float);
|
@ -0,0 +1,5 @@
|
||||
#include "paddle/operators/sgd_op.h"
|
||||
#include "paddle/framework/op_registry.h"
|
||||
|
||||
typedef paddle::operators::SGDOpKernel<::paddle::platform::GPUPlace, float> SGDOpKernel_GPU_float;
|
||||
REGISTER_OP_GPU_KERNEL(sgd, SGDOpKernel_GPU_float);
|
@ -0,0 +1,42 @@
|
||||
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License. */
|
||||
|
||||
#pragma once
|
||||
#include "glog/logging.h"
|
||||
#include "paddle/framework/eigen.h"
|
||||
#include "paddle/framework/operator.h"
|
||||
|
||||
namespace paddle {
|
||||
namespace operators {
|
||||
|
||||
template <typename Place, typename T>
|
||||
class SGDOpKernel : public framework::OpKernel {
|
||||
public:
|
||||
void Compute(const framework::KernelContext& ctx) const override {
|
||||
auto param = ctx.Input("param")->Get<framework::Tensor>();
|
||||
auto grad = ctx.Input("grad")->Get<framework::Tensor>();
|
||||
auto* param_out = ctx.Output(0)->GetMutable<framework::Tensor>();
|
||||
float lr = ctx.op_.GetAttr<float>("learning_rate");
|
||||
|
||||
param_out->mutable_data<T>(ctx.GetPlace());
|
||||
|
||||
framework::EigenVector<T>::Flatten(*param_out)
|
||||
.device(*(ctx.GetEigenDevice<Place>())) =
|
||||
framework::EigenVector<T>::Flatten(param) -
|
||||
lr * framework::EigenVector<T>::Flatten(grad);
|
||||
}
|
||||
};
|
||||
|
||||
} // namespace operators
|
||||
} // namespace paddle
|
@ -0,0 +1,22 @@
|
||||
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License. */
|
||||
|
||||
#include <gtest/gtest.h>
|
||||
#include <paddle/framework/op_registry.h>
|
||||
USE_OP(sgd);
|
||||
TEST(SGDOp, GetOpProto) {
|
||||
auto& protos = paddle::framework::OpRegistry::protos();
|
||||
auto it = protos.find("sgd");
|
||||
ASSERT_NE(it, protos.end());
|
||||
}
|
@ -0,0 +1,141 @@
|
||||
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License. */
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <paddle/string/printf.h>
|
||||
#include <sstream>
|
||||
#include <stdexcept>
|
||||
#include <string>
|
||||
|
||||
#ifndef PADDLE_ONLY_CPU
|
||||
|
||||
#include "paddle/platform/dynload/cublas.h"
|
||||
#include "paddle/platform/dynload/cudnn.h"
|
||||
#include "paddle/platform/dynload/curand.h"
|
||||
|
||||
#include <cublas_v2.h>
|
||||
#include <cudnn.h>
|
||||
#include <curand.h>
|
||||
#include <thrust/system/cuda/error.h>
|
||||
#include <thrust/system_error.h>
|
||||
|
||||
#endif // PADDLE_ONLY_CPU
|
||||
|
||||
namespace paddle {
|
||||
namespace platform {
|
||||
|
||||
// Because most enforce conditions would evaluate to true, we can use
|
||||
// __builtin_expect to instruct the C++ compiler to generate code that
|
||||
// always forces branch prediction of true.
|
||||
// This generates faster binary code. __builtin_expect is since C++11.
|
||||
// For more details, please check https://stackoverflow.com/a/43870188/724872.
|
||||
#define UNLIKELY(condition) __builtin_expect(static_cast<bool>(condition), 0)
|
||||
|
||||
#ifndef PADDLE_ONLY_CPU
|
||||
|
||||
template <typename... Args>
|
||||
inline void throw_on_error(cudaError_t e, const Args&... args) {
|
||||
if (UNLIKELY(e)) {
|
||||
// clang-format off
|
||||
throw thrust::system_error(
|
||||
e, thrust::cuda_category(),
|
||||
string::Sprintf(args...) +
|
||||
string::Sprintf(" at [%s:%s];", __FILE__, __LINE__));
|
||||
// clang-format on
|
||||
}
|
||||
}
|
||||
|
||||
template <typename... Args>
|
||||
inline void throw_on_error(curandStatus_t stat, const Args&... args) {
|
||||
if (stat != CURAND_STATUS_SUCCESS) {
|
||||
// clang-format off
|
||||
throw thrust::system_error(
|
||||
cudaErrorLaunchFailure, thrust::cuda_category(),
|
||||
string::Sprintf(args...) +
|
||||
string::Sprintf(" at [%s:%s];", __FILE__, __LINE__));
|
||||
// clang-format on
|
||||
}
|
||||
}
|
||||
|
||||
template <typename... Args>
|
||||
inline void throw_on_error(cudnnStatus_t stat, const Args&... args) {
|
||||
if (stat == CUDNN_STATUS_SUCCESS) {
|
||||
return;
|
||||
} else {
|
||||
// clang-format off
|
||||
throw std::runtime_error(
|
||||
platform::dynload::cudnnGetErrorString(stat) +
|
||||
string::Sprintf(args...) +
|
||||
string::Sprintf(" at [%s:%s];", __FILE__, __LINE__));
|
||||
// clang-format on
|
||||
}
|
||||
}
|
||||
|
||||
template <typename... Args>
|
||||
inline void throw_on_error(cublasStatus_t stat, const Args&... args) {
|
||||
std::string err;
|
||||
if (stat == CUBLAS_STATUS_SUCCESS) {
|
||||
return;
|
||||
} else if (stat == CUBLAS_STATUS_NOT_INITIALIZED) {
|
||||
err = "CUBLAS: not initialized, ";
|
||||
} else if (stat == CUBLAS_STATUS_ALLOC_FAILED) {
|
||||
err = "CUBLAS: alloc failed, ";
|
||||
} else if (stat == CUBLAS_STATUS_INVALID_VALUE) {
|
||||
err = "CUBLAS: invalid value, ";
|
||||
} else if (stat == CUBLAS_STATUS_ARCH_MISMATCH) {
|
||||
err = "CUBLAS: arch mismatch, ";
|
||||
} else if (stat == CUBLAS_STATUS_MAPPING_ERROR) {
|
||||
err = "CUBLAS: mapping error, ";
|
||||
} else if (stat == CUBLAS_STATUS_EXECUTION_FAILED) {
|
||||
err = "CUBLAS: execution failed, ";
|
||||
} else if (stat == CUBLAS_STATUS_INTERNAL_ERROR) {
|
||||
err = "CUBLAS: internal error, ";
|
||||
} else if (stat == CUBLAS_STATUS_NOT_SUPPORTED) {
|
||||
err = "CUBLAS: not supported, ";
|
||||
} else if (stat == CUBLAS_STATUS_LICENSE_ERROR) {
|
||||
err = "CUBLAS: license error, ";
|
||||
}
|
||||
throw std::runtime_error(err + string::Sprintf(args...) +
|
||||
string::Sprintf(" at [%s:%s];", __FILE__, __LINE__));
|
||||
}
|
||||
|
||||
#endif // PADDLE_ONLY_CPU
|
||||
|
||||
template <typename... Args>
|
||||
inline void throw_on_error(int stat, const Args&... args) {
|
||||
if (UNLIKELY(!(stat))) {
|
||||
throw std::runtime_error(
|
||||
string::Sprintf(args...) +
|
||||
string::Sprintf(" at [%s:%s];", __FILE__, __LINE__));
|
||||
}
|
||||
}
|
||||
|
||||
#define PADDLE_THROW(...) \
|
||||
do { \
|
||||
throw std::runtime_error( \
|
||||
string::Sprintf(__VA_ARGS__) + \
|
||||
string::Sprintf(" at [%s:%s];", __FILE__, __LINE__)); \
|
||||
} while (0)
|
||||
|
||||
/**
|
||||
* @brief Enforce a condition, otherwise throw an EnforceNotMet
|
||||
*/
|
||||
#define PADDLE_ENFORCE(condition, ...) \
|
||||
do { \
|
||||
::paddle::platform::throw_on_error(condition, __VA_ARGS__); \
|
||||
} while (0)
|
||||
|
||||
} // namespace platform
|
||||
} // namespace paddle
|
@ -1,87 +0,0 @@
|
||||
#pragma once
|
||||
|
||||
#include <sstream>
|
||||
#include <stdexcept>
|
||||
#include <string>
|
||||
|
||||
#ifndef PADDLE_ONLY_CPU
|
||||
|
||||
#include <cublas_v2.h>
|
||||
#include <cudnn.h>
|
||||
#include <curand.h>
|
||||
#include <thrust/system/cuda/error.h>
|
||||
#include <thrust/system_error.h>
|
||||
|
||||
#endif // PADDLE_ONLY_CPU
|
||||
|
||||
namespace paddle {
|
||||
namespace platform {
|
||||
|
||||
#ifndef PADDLE_ONLY_CPU
|
||||
|
||||
inline void throw_on_error(cudaError_t e, const char* message) {
|
||||
if (e) {
|
||||
throw thrust::system_error(e, thrust::cuda_category(), message);
|
||||
}
|
||||
}
|
||||
|
||||
inline void throw_on_error(curandStatus_t stat, const char* message) {
|
||||
if (stat != CURAND_STATUS_SUCCESS) {
|
||||
throw thrust::system_error(cudaErrorLaunchFailure, thrust::cuda_category(),
|
||||
message);
|
||||
}
|
||||
}
|
||||
|
||||
inline void throw_on_error(cudnnStatus_t stat, const char* message) {
|
||||
std::stringstream ss;
|
||||
if (stat == CUDNN_STATUS_SUCCESS) {
|
||||
return;
|
||||
} else {
|
||||
ss << cudnnGetErrorString(stat);
|
||||
ss << ", " << message;
|
||||
throw std::runtime_error(ss.str());
|
||||
}
|
||||
}
|
||||
|
||||
inline void throw_on_error(cublasStatus_t stat, const char* message) {
|
||||
std::stringstream ss;
|
||||
if (stat == CUBLAS_STATUS_SUCCESS) {
|
||||
return;
|
||||
} else if (stat == CUBLAS_STATUS_NOT_INITIALIZED) {
|
||||
ss << "CUBLAS: not initialized";
|
||||
} else if (stat == CUBLAS_STATUS_ALLOC_FAILED) {
|
||||
ss << "CUBLAS: alloc failed";
|
||||
} else if (stat == CUBLAS_STATUS_INVALID_VALUE) {
|
||||
ss << "CUBLAS: invalid value";
|
||||
} else if (stat == CUBLAS_STATUS_ARCH_MISMATCH) {
|
||||
ss << "CUBLAS: arch mismatch";
|
||||
} else if (stat == CUBLAS_STATUS_MAPPING_ERROR) {
|
||||
ss << "CUBLAS: mapping error";
|
||||
} else if (stat == CUBLAS_STATUS_EXECUTION_FAILED) {
|
||||
ss << "CUBLAS: execution failed";
|
||||
} else if (stat == CUBLAS_STATUS_INTERNAL_ERROR) {
|
||||
ss << "CUBLAS: internal error";
|
||||
} else if (stat == CUBLAS_STATUS_NOT_SUPPORTED) {
|
||||
ss << "CUBLAS: not supported";
|
||||
} else if (stat == CUBLAS_STATUS_LICENSE_ERROR) {
|
||||
ss << "CUBLAS: license error";
|
||||
}
|
||||
ss << ", " << message;
|
||||
throw std::runtime_error(ss.str());
|
||||
}
|
||||
|
||||
inline void throw_on_error(cublasStatus_t stat) {
|
||||
const char* message = "";
|
||||
throw_on_error(stat, message);
|
||||
}
|
||||
|
||||
#endif // PADDLE_ONLY_CPU
|
||||
|
||||
inline void throw_on_error(int stat, const char* message) {
|
||||
if (stat) {
|
||||
throw std::runtime_error(message + (", stat = " + std::to_string(stat)));
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace platform
|
||||
} // namespace paddle
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in new issue