parent
a468bce0ac
commit
9a592ec3aa
@ -1,76 +0,0 @@
|
||||
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License. */
|
||||
|
||||
#include "paddle/operators/net_op.h"
|
||||
|
||||
#include "paddle/framework/eigen.h"
|
||||
#include "paddle/framework/op_registry.h"
|
||||
|
||||
namespace paddle {
|
||||
namespace operators {
|
||||
|
||||
using OpRegistry = framework::OpRegistry;
|
||||
|
||||
class FullyConnectedOp : public NetOp {
|
||||
public:
|
||||
void Init() override {
|
||||
AddOp(OpRegistry::CreateOp("mul",
|
||||
{
|
||||
Input("X"), Input("W"),
|
||||
},
|
||||
{Output("before_act")}, {}));
|
||||
auto b = Input("b");
|
||||
if (b != framework::kEmptyVarName) {
|
||||
AddOp(OpRegistry::CreateOp("rowwise_add",
|
||||
{Output("before_act"), Input("b")},
|
||||
{Output("before_act")}, {}));
|
||||
}
|
||||
|
||||
auto activation = GetAttr<std::string>("activation");
|
||||
AddOp(OpRegistry::CreateOp(activation, {Output("before_act")},
|
||||
{Output("Y")}, {}));
|
||||
CompleteAddOp(false);
|
||||
}
|
||||
};
|
||||
|
||||
class FullyConnectedOpMaker : public framework::OpProtoAndCheckerMaker {
|
||||
public:
|
||||
FullyConnectedOpMaker(framework::OpProto *proto,
|
||||
framework::OpAttrChecker *op_checker)
|
||||
: OpProtoAndCheckerMaker(proto, op_checker) {
|
||||
AddInput("X", "the input of fc operator");
|
||||
AddInput("W", "the weight of fc operator");
|
||||
AddInput("b", "the bias of fc operator");
|
||||
|
||||
AddOutput("Y", "the output of fc operator");
|
||||
AddOutput("before_act", "the before activation output of fc operator")
|
||||
.SetTemporary();
|
||||
AddAttr<std::string>("activation", "The activation key for fc layer")
|
||||
.SetDefault("sigmoid")
|
||||
.InEnum({"sigmoid", "softmax"});
|
||||
|
||||
//! TODO(yuyang18): Complete comment;
|
||||
AddComment("FullyConnected Operator");
|
||||
}
|
||||
};
|
||||
} // namespace operators
|
||||
} // namespace paddle
|
||||
|
||||
USE_OP(mul);
|
||||
USE_OP(rowwise_add);
|
||||
USE_OP(sigmoid);
|
||||
USE_OP(softmax);
|
||||
|
||||
namespace ops = paddle::operators;
|
||||
REGISTER_OP(fc, ops::FullyConnectedOp, ops::FullyConnectedOpMaker);
|
@ -1,45 +0,0 @@
|
||||
import paddle.v2.framework.core as core
|
||||
import unittest
|
||||
import numpy
|
||||
from paddle.v2.framework.op import Operator
|
||||
|
||||
|
||||
class TestFc(unittest.TestCase):
|
||||
def test_fc(self):
|
||||
scope = core.Scope()
|
||||
place = core.CPUPlace()
|
||||
x = scope.new_var("X")
|
||||
|
||||
x_tensor = x.get_tensor()
|
||||
x_tensor.set_dims([1000, 784])
|
||||
x_tensor.alloc_float(place)
|
||||
|
||||
w = scope.new_var("W")
|
||||
w_tensor = w.get_tensor()
|
||||
w_tensor.set_dims([784, 100])
|
||||
w_tensor.alloc_float(place)
|
||||
|
||||
w_tensor.set(numpy.random.random((784, 100)).astype("float32"), place)
|
||||
|
||||
# Set a real numpy array here.
|
||||
# x_tensor.set(numpy.array([]))
|
||||
|
||||
op = Operator("fc", X="X", Y="Y", W="W")
|
||||
|
||||
for out in op.outputs():
|
||||
if scope.find_var(out) is None:
|
||||
scope.new_var(out).get_tensor()
|
||||
|
||||
tensor = scope.find_var("Y").get_tensor()
|
||||
op.infer_shape(scope)
|
||||
self.assertEqual([1000, 100], tensor.shape())
|
||||
|
||||
ctx = core.DeviceContext.create(place)
|
||||
|
||||
op.run(scope, ctx)
|
||||
|
||||
# After complete all ops, check Y is expect or not.
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
Loading…
Reference in new issue