commit
73f4779808
@ -0,0 +1,76 @@
|
|||||||
|
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License. */
|
||||||
|
|
||||||
|
#include "paddle/framework/net.h"
|
||||||
|
#include "paddle/framework/op_registry.h"
|
||||||
|
#include "paddle/framework/operator.h"
|
||||||
|
|
||||||
|
namespace paddle {
|
||||||
|
namespace operators {
|
||||||
|
|
||||||
|
class FullyConnectedOp : public framework::PlainNet {
|
||||||
|
public:
|
||||||
|
void Init() override {
|
||||||
|
AddOp(framework::OpRegistry::CreateOp("mul",
|
||||||
|
{
|
||||||
|
Input("X"), Input("W"),
|
||||||
|
},
|
||||||
|
{Output("before_act")},
|
||||||
|
{}));
|
||||||
|
auto b = Input("b");
|
||||||
|
if (b != framework::OperatorBase::EMPTY_VAR_NAME()) {
|
||||||
|
AddOp(framework::OpRegistry::CreateOp("rowwise_add",
|
||||||
|
{Output("before_act"), Input("b")},
|
||||||
|
{Output("before_act")},
|
||||||
|
{}));
|
||||||
|
}
|
||||||
|
|
||||||
|
auto activation = GetAttr<std::string>("activation");
|
||||||
|
AddOp(framework::OpRegistry::CreateOp(
|
||||||
|
activation, {Output("before_act")}, {Output("Y")}, {}));
|
||||||
|
CompleteAddOp(false);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
class FullyConnectedOpMaker : public framework::OpProtoAndCheckerMaker {
|
||||||
|
public:
|
||||||
|
FullyConnectedOpMaker(framework::OpProto *proto,
|
||||||
|
framework::OpAttrChecker *op_checker)
|
||||||
|
: OpProtoAndCheckerMaker(proto, op_checker) {
|
||||||
|
AddInput("X", "the input of fc operator");
|
||||||
|
AddInput("W", "the weight of fc operator");
|
||||||
|
AddInput("b", "the bias of fc operator");
|
||||||
|
|
||||||
|
AddOutput("Y", "the output of fc operator");
|
||||||
|
AddOutput(
|
||||||
|
"before_act", "the before activation output of fc operator", true);
|
||||||
|
AddAttr<std::string>("activation", "The activation key for fc layer")
|
||||||
|
.SetDefault("sigmoid")
|
||||||
|
.InEnum({"sigmoid", "softmax"});
|
||||||
|
|
||||||
|
//! TODO(yuyang18): Complete comment;
|
||||||
|
AddComment("FullyConnected Operator");
|
||||||
|
}
|
||||||
|
};
|
||||||
|
} // namespace operators
|
||||||
|
} // namespace paddle
|
||||||
|
|
||||||
|
USE_OP(mul);
|
||||||
|
USE_OP(rowwise_add);
|
||||||
|
USE_OP(sigmoid);
|
||||||
|
USE_OP(softmax);
|
||||||
|
|
||||||
|
REGISTER_OP(fc,
|
||||||
|
paddle::operators::FullyConnectedOp,
|
||||||
|
paddle::operators::FullyConnectedOpMaker);
|
@ -1,2 +1,2 @@
|
|||||||
cc_library(paddle_pybind SHARED SRCS pybind.cc DEPS pybind python
|
cc_library(paddle_pybind SHARED SRCS pybind.cc DEPS pybind python
|
||||||
add_op mul_op rowwise_add_op sigmoid_op softmax_op)
|
add_op fc_op)
|
||||||
|
@ -0,0 +1,42 @@
|
|||||||
|
# Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
import paddle.v2.dataset.voc2012
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
|
||||||
|
class TestVOC(unittest.TestCase):
|
||||||
|
def check_reader(self, reader):
|
||||||
|
sum = 0
|
||||||
|
label = 0
|
||||||
|
for l in reader():
|
||||||
|
self.assertEqual(l[0].size, 3 * l[1].size)
|
||||||
|
sum += 1
|
||||||
|
return sum
|
||||||
|
|
||||||
|
def test_train(self):
|
||||||
|
count = self.check_reader(paddle.v2.dataset.voc_seg.train())
|
||||||
|
self.assertEqual(count, 2913)
|
||||||
|
|
||||||
|
def test_test(self):
|
||||||
|
count = self.check_reader(paddle.v2.dataset.voc_seg.test())
|
||||||
|
self.assertEqual(count, 1464)
|
||||||
|
|
||||||
|
def test_val(self):
|
||||||
|
count = self.check_reader(paddle.v2.dataset.voc_seg.val())
|
||||||
|
self.assertEqual(count, 1449)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
@ -0,0 +1,85 @@
|
|||||||
|
# Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
"""
|
||||||
|
Image dataset for segmentation.
|
||||||
|
The 2012 dataset contains images from 2008-2011 for which additional
|
||||||
|
segmentations have been prepared. As in previous years the assignment
|
||||||
|
to training/test sets has been maintained. The total number of images
|
||||||
|
with segmentation has been increased from 7,062 to 9,993.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import tarfile
|
||||||
|
import io
|
||||||
|
import numpy as np
|
||||||
|
from paddle.v2.dataset.common import download
|
||||||
|
from paddle.v2.image import *
|
||||||
|
from PIL import Image
|
||||||
|
|
||||||
|
__all__ = ['train', 'test', 'val']
|
||||||
|
|
||||||
|
VOC_URL = 'http://host.robots.ox.ac.uk/pascal/VOC/voc2012/\
|
||||||
|
VOCtrainval_11-May-2012.tar'
|
||||||
|
|
||||||
|
VOC_MD5 = '6cd6e144f989b92b3379bac3b3de84fd'
|
||||||
|
SET_FILE = 'VOCdevkit/VOC2012/ImageSets/Segmentation/{}.txt'
|
||||||
|
DATA_FILE = 'VOCdevkit/VOC2012/JPEGImages/{}.jpg'
|
||||||
|
LABEL_FILE = 'VOCdevkit/VOC2012/SegmentationClass/{}.png'
|
||||||
|
|
||||||
|
CACHE_DIR = 'voc2012'
|
||||||
|
|
||||||
|
|
||||||
|
def reader_creator(filename, sub_name):
|
||||||
|
|
||||||
|
tarobject = tarfile.open(filename)
|
||||||
|
name2mem = {}
|
||||||
|
for ele in tarobject.getmembers():
|
||||||
|
name2mem[ele.name] = ele
|
||||||
|
|
||||||
|
def reader():
|
||||||
|
set_file = SET_FILE.format(sub_name)
|
||||||
|
sets = tarobject.extractfile(name2mem[set_file])
|
||||||
|
for line in sets:
|
||||||
|
line = line.strip()
|
||||||
|
data_file = DATA_FILE.format(line)
|
||||||
|
label_file = LABEL_FILE.format(line)
|
||||||
|
data = tarobject.extractfile(name2mem[data_file]).read()
|
||||||
|
label = tarobject.extractfile(name2mem[label_file]).read()
|
||||||
|
data = Image.open(io.BytesIO(data))
|
||||||
|
label = Image.open(io.BytesIO(label))
|
||||||
|
data = np.array(data)
|
||||||
|
label = np.array(label)
|
||||||
|
yield data, label
|
||||||
|
|
||||||
|
return reader
|
||||||
|
|
||||||
|
|
||||||
|
def train():
|
||||||
|
"""
|
||||||
|
Create a train dataset reader containing 2913 images in HWC order.
|
||||||
|
"""
|
||||||
|
return reader_creator(download(VOC_URL, CACHE_DIR, VOC_MD5), 'trainval')
|
||||||
|
|
||||||
|
|
||||||
|
def test():
|
||||||
|
"""
|
||||||
|
Create a test dataset reader containing 1464 images in HWC order.
|
||||||
|
"""
|
||||||
|
return reader_creator(download(VOC_URL, CACHE_DIR, VOC_MD5), 'train')
|
||||||
|
|
||||||
|
|
||||||
|
def val():
|
||||||
|
"""
|
||||||
|
Create a val dataset reader containing 1449 images in HWC order.
|
||||||
|
"""
|
||||||
|
return reader_creator(download(VOC_URL, CACHE_DIR, VOC_MD5), 'val')
|
@ -1,3 +1,3 @@
|
|||||||
add_python_test(test_framework test_protobuf.py test_scope.py
|
add_python_test(test_framework test_protobuf.py test_scope.py
|
||||||
test_default_scope_funcs.py test_op_creation_methods.py
|
test_default_scope_funcs.py test_op_creation_methods.py
|
||||||
test_tensor.py)
|
test_tensor.py test_fc_op.py)
|
||||||
|
@ -0,0 +1,43 @@
|
|||||||
|
import paddle.v2.framework.core as core
|
||||||
|
import unittest
|
||||||
|
import numpy
|
||||||
|
import paddle.v2.framework.create_op_creation_methods as creation
|
||||||
|
|
||||||
|
|
||||||
|
class TestFc(unittest.TestCase):
|
||||||
|
def test_fc(self):
|
||||||
|
scope = core.Scope(None)
|
||||||
|
x = scope.create_var("X")
|
||||||
|
x_tensor = x.get_tensor()
|
||||||
|
x_tensor.set_dims([1000, 784])
|
||||||
|
x_tensor.alloc_float()
|
||||||
|
|
||||||
|
w = scope.create_var("W")
|
||||||
|
w_tensor = w.get_tensor()
|
||||||
|
w_tensor.set_dims([784, 100])
|
||||||
|
w_tensor.alloc_float()
|
||||||
|
|
||||||
|
w_tensor.set(numpy.random.random((784, 100)).astype("float32"))
|
||||||
|
|
||||||
|
# Set a real numpy array here.
|
||||||
|
# x_tensor.set(numpy.array([]))
|
||||||
|
|
||||||
|
op = creation.op_creations.fc(X="X", Y="Y", W="W")
|
||||||
|
|
||||||
|
for out in op.outputs():
|
||||||
|
if scope.get_var(out) is None:
|
||||||
|
scope.create_var(out).get_tensor()
|
||||||
|
|
||||||
|
tensor = scope.get_var("Y").get_tensor()
|
||||||
|
op.infer_shape(scope)
|
||||||
|
self.assertEqual([1000, 100], tensor.shape())
|
||||||
|
|
||||||
|
ctx = core.DeviceContext.cpu_context()
|
||||||
|
|
||||||
|
op.run(scope, ctx)
|
||||||
|
|
||||||
|
# After complete all ops, check Y is expect or not.
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
Loading…
Reference in new issue