parent
229c2e7883
commit
635a69ba4b
After Width: | Height: | Size: 170 KiB |
@ -0,0 +1,5 @@
|
|||||||
|
- [编译 PaddlePaddle 链接库](compile_paddle_lib.md)
|
||||||
|
- [C-API 使用示例](a_simple_example.md)
|
||||||
|
- [输入数据组织](organize_input_data.md)
|
||||||
|
- [核心概念介绍](core_concepts.md)
|
||||||
|
- [F&Q]()
|
@ -0,0 +1,8 @@
|
|||||||
|
from paddle.utils.merge_model import merge_v2_model
|
||||||
|
|
||||||
|
from mnist_v2 import network
|
||||||
|
|
||||||
|
net = network(is_infer=True)
|
||||||
|
param_file = "models/params_pass_4.tar"
|
||||||
|
output_file = "output.paddle.model"
|
||||||
|
merge_v2_model(net, param_file, output_file)
|
@ -0,0 +1,117 @@
|
|||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import gzip
|
||||||
|
import logging
|
||||||
|
import argparse
|
||||||
|
from PIL import Image
|
||||||
|
import numpy as np
|
||||||
|
|
||||||
|
import paddle.v2 as paddle
|
||||||
|
from paddle.utils.dump_v2_config import dump_v2_config
|
||||||
|
|
||||||
|
logger = logging.getLogger("paddle")
|
||||||
|
logger.setLevel(logging.INFO)
|
||||||
|
|
||||||
|
|
||||||
|
def multilayer_perceptron(img, layer_size, lbl_dim):
|
||||||
|
for idx, size in enumerate(layer_size):
|
||||||
|
hidden = paddle.layer.fc(input=(img if not idx else hidden),
|
||||||
|
size=size,
|
||||||
|
act=paddle.activation.Relu())
|
||||||
|
return paddle.layer.fc(input=hidden,
|
||||||
|
size=lbl_dim,
|
||||||
|
act=paddle.activation.Softmax())
|
||||||
|
|
||||||
|
|
||||||
|
def network(input_dim=784, lbl_dim=10, is_infer=False):
|
||||||
|
images = paddle.layer.data(
|
||||||
|
name='pixel', type=paddle.data_type.dense_vector(input_dim))
|
||||||
|
|
||||||
|
predict = multilayer_perceptron(
|
||||||
|
images, layer_size=[128, 64], lbl_dim=lbl_dim)
|
||||||
|
|
||||||
|
if is_infer:
|
||||||
|
return predict
|
||||||
|
else:
|
||||||
|
label = paddle.layer.data(
|
||||||
|
name='label', type=paddle.data_type.integer_value(lbl_dim))
|
||||||
|
return paddle.layer.classification_cost(input=predict, label=label)
|
||||||
|
|
||||||
|
|
||||||
|
def main(task="train", use_gpu=False, trainer_count=1, save_dir="models"):
|
||||||
|
if task == "train":
|
||||||
|
if not os.path.exists(save_dir):
|
||||||
|
os.mkdir(save_dir)
|
||||||
|
|
||||||
|
paddle.init(use_gpu=use_gpu, trainer_count=trainer_count)
|
||||||
|
cost = network()
|
||||||
|
parameters = paddle.parameters.create(cost)
|
||||||
|
optimizer = paddle.optimizer.Momentum(
|
||||||
|
learning_rate=0.1 / 128.0,
|
||||||
|
momentum=0.9,
|
||||||
|
regularization=paddle.optimizer.L2Regularization(rate=0.0005 * 128))
|
||||||
|
|
||||||
|
trainer = paddle.trainer.SGD(cost=cost,
|
||||||
|
parameters=parameters,
|
||||||
|
update_equation=optimizer)
|
||||||
|
|
||||||
|
def event_handler(event):
|
||||||
|
if isinstance(event, paddle.event.EndIteration):
|
||||||
|
if event.batch_id % 100 == 0:
|
||||||
|
logger.info("Pass %d, Batch %d, Cost %f, %s" %
|
||||||
|
(event.pass_id, event.batch_id, event.cost,
|
||||||
|
event.metrics))
|
||||||
|
if isinstance(event, paddle.event.EndPass):
|
||||||
|
with gzip.open(
|
||||||
|
os.path.join(save_dir, "params_pass_%d.tar" %
|
||||||
|
event.pass_id), "w") as f:
|
||||||
|
trainer.save_parameter_to_tar(f)
|
||||||
|
|
||||||
|
trainer.train(
|
||||||
|
reader=paddle.batch(
|
||||||
|
paddle.reader.shuffle(
|
||||||
|
paddle.dataset.mnist.train(), buf_size=8192),
|
||||||
|
batch_size=128),
|
||||||
|
event_handler=event_handler,
|
||||||
|
num_passes=5)
|
||||||
|
elif task == "dump_config":
|
||||||
|
predict = network(is_infer=True)
|
||||||
|
dump_v2_config(predict, "trainer_config.bin", True)
|
||||||
|
else:
|
||||||
|
raise RuntimeError(("Error value for parameter task. "
|
||||||
|
"Available options are: train and dump_config."))
|
||||||
|
|
||||||
|
|
||||||
|
def parse_cmd():
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
description="PaddlePaddle MNIST demo for CAPI.")
|
||||||
|
parser.add_argument(
|
||||||
|
"--task",
|
||||||
|
type=str,
|
||||||
|
required=False,
|
||||||
|
help=("A string indicating the taks type. "
|
||||||
|
"Available options are: \"train\", \"dump_config\"."),
|
||||||
|
default="train")
|
||||||
|
parser.add_argument(
|
||||||
|
"--use_gpu",
|
||||||
|
type=bool,
|
||||||
|
help=("A bool flag indicating whether to use GPU device or not."),
|
||||||
|
default=False)
|
||||||
|
parser.add_argument(
|
||||||
|
"--trainer_count",
|
||||||
|
type=int,
|
||||||
|
help=("This parameter is only used in training task. It indicates "
|
||||||
|
"how many computing threads are created in training."),
|
||||||
|
default=1)
|
||||||
|
parser.add_argument(
|
||||||
|
"--save_dir",
|
||||||
|
type=str,
|
||||||
|
help=("This parameter is only used in training task. It indicates "
|
||||||
|
"path of the directory to save the trained models."),
|
||||||
|
default="models")
|
||||||
|
return parser.parse_args()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
args = parse_cmd()
|
||||||
|
main(args.task, args.use_gpu, args.trainer_count, args.save_dir)
|
@ -0,0 +1,62 @@
|
|||||||
|
# Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
import collections
|
||||||
|
|
||||||
|
from paddle.trainer_config_helpers.layers import LayerOutput
|
||||||
|
from paddle.v2.layer import parse_network
|
||||||
|
from paddle.proto import TrainerConfig_pb2
|
||||||
|
|
||||||
|
__all__ = ["dump_v2_config"]
|
||||||
|
|
||||||
|
|
||||||
|
def dump_v2_config(topology, save_path, binary=False):
|
||||||
|
""" Dump the network topology to a specified file.
|
||||||
|
|
||||||
|
This function is only used to dump network defined by using PaddlePaddle V2
|
||||||
|
APIs. This function will NOT dump configurations related to PaddlePaddle
|
||||||
|
optimizer.
|
||||||
|
|
||||||
|
:param topology: The output layers (can be more than one layers given in a
|
||||||
|
Python List or Tuple) of the entire network. Using the
|
||||||
|
specified layers (if more than one layer is given) as root,
|
||||||
|
traversing back to the data layer(s), all the layers
|
||||||
|
connected to the specified output layers will be dumped.
|
||||||
|
Layers not connceted to the specified will not be dumped.
|
||||||
|
:type topology: LayerOutput|List|Tuple
|
||||||
|
:param save_path: The path to save the dumped network topology.
|
||||||
|
:type save_path: str
|
||||||
|
:param binary: Whether to dump the serialized network topology or not.
|
||||||
|
The default value is false. NOTE that, if you call this
|
||||||
|
function to generate network topology for PaddlePaddle C-API,
|
||||||
|
a serialized version of network topology is required. When
|
||||||
|
using PaddlePaddle C-API, this flag MUST be set to True.
|
||||||
|
:type binary: bool
|
||||||
|
"""
|
||||||
|
|
||||||
|
if isinstance(topology, LayerOutput):
|
||||||
|
topology = [topology]
|
||||||
|
elif isinstance(topology, collections.Sequence):
|
||||||
|
for out_layer in topology:
|
||||||
|
assert isinstance(out_layer, LayerOutput), (
|
||||||
|
"The type of each element in the parameter topology "
|
||||||
|
"should be LayerOutput.")
|
||||||
|
else:
|
||||||
|
raise RuntimeError("Error input type for parameter topology.")
|
||||||
|
|
||||||
|
model_str = parse_network(topology)
|
||||||
|
with open(save_path, "w") as fout:
|
||||||
|
if binary:
|
||||||
|
fout.write(model_str.SerializeToString())
|
||||||
|
else:
|
||||||
|
fout.write(str(model_str))
|
Loading…
Reference in new issue