commit
7293c82154
@ -1,82 +1,61 @@
|
||||
"""
|
||||
CIFAR Dataset.
|
||||
|
||||
URL: https://www.cs.toronto.edu/~kriz/cifar.html
|
||||
|
||||
the default train_creator, test_creator used for CIFAR-10 dataset.
|
||||
CIFAR dataset: https://www.cs.toronto.edu/~kriz/cifar.html
|
||||
"""
|
||||
import cPickle
|
||||
import itertools
|
||||
import tarfile
|
||||
|
||||
import numpy
|
||||
import paddle.v2.dataset.common
|
||||
import tarfile
|
||||
|
||||
from common import download
|
||||
|
||||
__all__ = [
|
||||
'cifar_100_train_creator', 'cifar_100_test_creator', 'train_creator',
|
||||
'test_creator'
|
||||
]
|
||||
__all__ = ['train100', 'test100', 'train10', 'test10']
|
||||
|
||||
CIFAR10_URL = 'https://www.cs.toronto.edu/~kriz/cifar-10-python.tar.gz'
|
||||
URL_PREFIX = 'https://www.cs.toronto.edu/~kriz/'
|
||||
CIFAR10_URL = URL_PREFIX + 'cifar-10-python.tar.gz'
|
||||
CIFAR10_MD5 = 'c58f30108f718f92721af3b95e74349a'
|
||||
CIFAR100_URL = 'https://www.cs.toronto.edu/~kriz/cifar-100-python.tar.gz'
|
||||
CIFAR100_URL = URL_PREFIX + 'cifar-100-python.tar.gz'
|
||||
CIFAR100_MD5 = 'eb9058c3a382ffc7106e4002c42a8d85'
|
||||
|
||||
|
||||
def __read_batch__(filename, sub_name):
|
||||
def reader():
|
||||
def __read_one_batch_impl__(batch):
|
||||
data = batch['data']
|
||||
labels = batch.get('labels', batch.get('fine_labels', None))
|
||||
assert labels is not None
|
||||
for sample, label in itertools.izip(data, labels):
|
||||
yield (sample / 255.0).astype(numpy.float32), int(label)
|
||||
def reader_creator(filename, sub_name):
|
||||
def read_batch(batch):
|
||||
data = batch['data']
|
||||
labels = batch.get('labels', batch.get('fine_labels', None))
|
||||
assert labels is not None
|
||||
for sample, label in itertools.izip(data, labels):
|
||||
yield (sample / 255.0).astype(numpy.float32), int(label)
|
||||
|
||||
def reader():
|
||||
with tarfile.open(filename, mode='r') as f:
|
||||
names = (each_item.name for each_item in f
|
||||
if sub_name in each_item.name)
|
||||
|
||||
for name in names:
|
||||
batch = cPickle.load(f.extractfile(name))
|
||||
for item in __read_one_batch_impl__(batch):
|
||||
for item in read_batch(batch):
|
||||
yield item
|
||||
|
||||
return reader
|
||||
|
||||
|
||||
def cifar_100_train_creator():
|
||||
fn = download(url=CIFAR100_URL, md5=CIFAR100_MD5)
|
||||
return __read_batch__(fn, 'train')
|
||||
|
||||
|
||||
def cifar_100_test_creator():
|
||||
fn = download(url=CIFAR100_URL, md5=CIFAR100_MD5)
|
||||
return __read_batch__(fn, 'test')
|
||||
|
||||
|
||||
def train_creator():
|
||||
"""
|
||||
Default train reader creator. Use CIFAR-10 dataset.
|
||||
"""
|
||||
fn = download(url=CIFAR10_URL, md5=CIFAR10_MD5)
|
||||
return __read_batch__(fn, 'data_batch')
|
||||
def train100():
|
||||
return reader_creator(
|
||||
paddle.v2.dataset.common.download(CIFAR100_URL, 'cifar', CIFAR100_MD5),
|
||||
'train')
|
||||
|
||||
|
||||
def test_creator():
|
||||
"""
|
||||
Default test reader creator. Use CIFAR-10 dataset.
|
||||
"""
|
||||
fn = download(url=CIFAR10_URL, md5=CIFAR10_MD5)
|
||||
return __read_batch__(fn, 'test_batch')
|
||||
def test100():
|
||||
return reader_creator(
|
||||
paddle.v2.dataset.common.download(CIFAR100_URL, 'cifar', CIFAR100_MD5),
|
||||
'test')
|
||||
|
||||
|
||||
def unittest():
|
||||
for _ in train_creator()():
|
||||
pass
|
||||
for _ in test_creator()():
|
||||
pass
|
||||
def train10():
|
||||
return reader_creator(
|
||||
paddle.v2.dataset.common.download(CIFAR10_URL, 'cifar', CIFAR10_MD5),
|
||||
'data_batch')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest()
|
||||
def test10():
|
||||
return reader_creator(
|
||||
paddle.v2.dataset.common.download(CIFAR10_URL, 'cifar', CIFAR10_MD5),
|
||||
'test_batch')
|
||||
|
@ -0,0 +1,42 @@
|
||||
import paddle.v2.dataset.cifar
|
||||
import unittest
|
||||
|
||||
|
||||
class TestCIFAR(unittest.TestCase):
|
||||
def check_reader(self, reader):
|
||||
sum = 0
|
||||
label = 0
|
||||
for l in reader():
|
||||
self.assertEqual(l[0].size, 3072)
|
||||
if l[1] > label:
|
||||
label = l[1]
|
||||
sum += 1
|
||||
return sum, label
|
||||
|
||||
def test_test10(self):
|
||||
instances, max_label_value = self.check_reader(
|
||||
paddle.v2.dataset.cifar.test10())
|
||||
self.assertEqual(instances, 10000)
|
||||
self.assertEqual(max_label_value, 9)
|
||||
|
||||
def test_train10(self):
|
||||
instances, max_label_value = self.check_reader(
|
||||
paddle.v2.dataset.cifar.train10())
|
||||
self.assertEqual(instances, 50000)
|
||||
self.assertEqual(max_label_value, 9)
|
||||
|
||||
def test_test100(self):
|
||||
instances, max_label_value = self.check_reader(
|
||||
paddle.v2.dataset.cifar.test100())
|
||||
self.assertEqual(instances, 10000)
|
||||
self.assertEqual(max_label_value, 99)
|
||||
|
||||
def test_train100(self):
|
||||
instances, max_label_value = self.check_reader(
|
||||
paddle.v2.dataset.cifar.train100())
|
||||
self.assertEqual(instances, 50000)
|
||||
self.assertEqual(max_label_value, 99)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
@ -0,0 +1,83 @@
|
||||
# Copyright PaddlePaddle contributors. All Rights Reserved
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
import unittest
|
||||
import paddle.v2.layer as layer
|
||||
import paddle.v2.topology as topology
|
||||
import paddle.v2.data_type as data_type
|
||||
import paddle.trainer_config_helpers as conf_helps
|
||||
|
||||
|
||||
class TestTopology(unittest.TestCase):
|
||||
def test_data_type(self):
|
||||
pixel = layer.data(name='pixel', type=data_type.dense_vector(784))
|
||||
label = layer.data(name='label', type=data_type.integer_value(10))
|
||||
hidden = layer.fc(input=pixel,
|
||||
size=100,
|
||||
act=conf_helps.SigmoidActivation())
|
||||
inference = layer.fc(input=hidden,
|
||||
size=10,
|
||||
act=conf_helps.SoftmaxActivation())
|
||||
cost = layer.classification_cost(input=inference, label=label)
|
||||
topo = topology.Topology(cost)
|
||||
data_types = topo.data_type()
|
||||
self.assertEqual(len(data_types), 2)
|
||||
pixel_data_type = filter(lambda type: type[0] == "pixel", data_types)
|
||||
self.assertEqual(len(pixel_data_type), 1)
|
||||
pixel_data_type = pixel_data_type[0]
|
||||
self.assertEqual(pixel_data_type[1].type, data_type.DataType.Dense)
|
||||
self.assertEqual(pixel_data_type[1].dim, 784)
|
||||
|
||||
label_data_type = filter(lambda type: type[0] == "label", data_types)
|
||||
self.assertEqual(len(label_data_type), 1)
|
||||
label_data_type = label_data_type[0]
|
||||
self.assertEqual(label_data_type[1].type, data_type.DataType.Index)
|
||||
self.assertEqual(label_data_type[1].dim, 10)
|
||||
|
||||
def test_get_layer(self):
|
||||
pixel = layer.data(name='pixel', type=data_type.dense_vector(784))
|
||||
label = layer.data(name='label', type=data_type.integer_value(10))
|
||||
hidden = layer.fc(input=pixel,
|
||||
size=100,
|
||||
act=conf_helps.SigmoidActivation())
|
||||
inference = layer.fc(input=hidden,
|
||||
size=10,
|
||||
act=conf_helps.SoftmaxActivation())
|
||||
cost = layer.classification_cost(input=inference, label=label)
|
||||
topo = topology.Topology(cost)
|
||||
pixel_layer = topo.get_layer("pixel")
|
||||
label_layer = topo.get_layer("label")
|
||||
self.assertEqual(pixel_layer, pixel)
|
||||
self.assertEqual(label_layer, label)
|
||||
|
||||
def test_parse(self):
|
||||
pixel = layer.data(name='pixel', type=data_type.dense_vector(784))
|
||||
label = layer.data(name='label', type=data_type.integer_value(10))
|
||||
hidden = layer.fc(input=pixel,
|
||||
size=100,
|
||||
act=conf_helps.SigmoidActivation())
|
||||
inference = layer.fc(input=hidden,
|
||||
size=10,
|
||||
act=conf_helps.SoftmaxActivation())
|
||||
maxid = layer.max_id(input=inference)
|
||||
cost1 = layer.classification_cost(input=inference, label=label)
|
||||
cost2 = layer.cross_entropy_cost(input=inference, label=label)
|
||||
|
||||
topology.Topology(cost2).proto()
|
||||
topology.Topology([cost1]).proto()
|
||||
topology.Topology([cost1, cost2]).proto()
|
||||
topology.Topology([inference, maxid]).proto()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
@ -0,0 +1,96 @@
|
||||
# Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import collections
|
||||
|
||||
from paddle.proto.ModelConfig_pb2 import ModelConfig
|
||||
|
||||
import layer as v2_layer
|
||||
|
||||
__all__ = ['Topology']
|
||||
|
||||
|
||||
def __bfs_travel__(callback, *layers):
|
||||
for each_layer in layers:
|
||||
__break__ = callback(each_layer)
|
||||
if __break__:
|
||||
return
|
||||
__bfs_travel__(callback, *each_layer.__parent_layers__.values())
|
||||
|
||||
|
||||
class Topology(object):
|
||||
"""
|
||||
Topology is used to store the information about all layers
|
||||
and network configs.
|
||||
"""
|
||||
|
||||
def __init__(self, layers):
|
||||
if not isinstance(layers, collections.Sequence):
|
||||
__check_layer_type__(layers)
|
||||
layers = [layers]
|
||||
for layer in layers:
|
||||
__check_layer_type__(layer)
|
||||
self.layers = layers
|
||||
self.__model_config__ = v2_layer.parse_network(*layers)
|
||||
assert isinstance(self.__model_config__, ModelConfig)
|
||||
|
||||
def proto(self):
|
||||
return self.__model_config__
|
||||
|
||||
def get_layer(self, name):
|
||||
"""
|
||||
get v2.Layer Class instance by layer name
|
||||
:param name:
|
||||
:return:
|
||||
"""
|
||||
result_layer = [None]
|
||||
|
||||
def __impl__(l):
|
||||
if l.name == name:
|
||||
result_layer[0] = l
|
||||
return True # break
|
||||
return False
|
||||
|
||||
__bfs_travel__(__impl__, *self.layers)
|
||||
if result_layer[0] is None:
|
||||
raise ValueError("No such layer %s" % name)
|
||||
return result_layer[0]
|
||||
|
||||
def data_layers(self):
|
||||
"""
|
||||
get all data layer
|
||||
:return:
|
||||
"""
|
||||
data_layers = dict()
|
||||
|
||||
def __impl__(l):
|
||||
if isinstance(l, v2_layer.DataLayerV2):
|
||||
data_layers[l.name] = l
|
||||
|
||||
__bfs_travel__(__impl__, *self.layers)
|
||||
return data_layers
|
||||
|
||||
def data_type(self):
|
||||
"""
|
||||
get data_type from proto, such as:
|
||||
[('image', dense_vector(768)), ('label', integer_value(10))]
|
||||
"""
|
||||
data_layers = self.data_layers()
|
||||
return [(nm, data_layers[nm].type)
|
||||
for nm in self.proto().input_layer_names]
|
||||
|
||||
|
||||
def __check_layer_type__(layer):
|
||||
if not isinstance(layer, v2_layer.LayerV2):
|
||||
raise ValueError('layer should have type paddle.layer.Layer')
|
Loading…
Reference in new issue