@ -12,32 +12,29 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#Todo(luotao02) This config is only used for unitest. It is out of date now, and will be updated later.
from paddle.trainer_config_helpers import *
################################### Data Configuration ###################################
TrainData(ProtoData(files = "train.list"))
TrainData(ProtoData(files = "trainer/tests/mnist .list"))
################################### Algorithm Configuration ###################################
Settings(
learning_rate_decay_a = 0.0,
learning_rate_decay_b = 0.0,
learning_rate = 1e-03,
batch_size = 1000,
algorithm = 'sgd',
num_batches_per_send_parameter = 1,
num_batches_per_get_parameter = 1,
learning_method='sparse_momentum',
)
default_momentum(0.5)
settings(batch_size = 1000,
learning_method = MomentumOptimizer(momentum=0.5, sparse=False))
################################### Network Configuration ###################################
Layer(type = "data", name = "input", size = 784)
Layer(inputs = [Input("input", parameter_name = "_layer1.w")], name = "layer1", bias = Bias(parameter_name = "_layer1.bias"), active_type = "sigmoid", type = "fc", size = 800)
Layer(inputs = [Input("layer1", parameter_name = "_layer2.w")], name = "layer2", bias = Bias(parameter_name = "_layer2.bias"), active_type = "sigmoid", type = "fc", size = 800)
#Layer(inputs = [Input("layer2", parameter_name = "_layer_output.w", decay_rate = 0.02)], name = "output", bias = Bias(parameter_name = "_layer_output.bias"), active_type = "margin", type = "fc", size = 10)
#Layer(inputs = [Input("layer2", parameter_name = "_layer_output.w", decay_rate = 0.02)], name = "output", bias = Bias(parameter_name = "_layer_output.bias"), type = "fc", size = 10)
Layer(inputs = [Input("layer2", parameter_name = "_layer_output.w")], name = "output", bias = Bias(parameter_name = "_layer_output.bias"), active_type = "softmax", type = "fc", size = 10)
Layer(type = "data", name = "label", size = 1)
Layer(inputs = [Input("output"), Input("label")], type = "multi-class-cross-entropy", name = "cost")
#Layer(inputs = [Input("output"), Input("label")], type = "huber", name = "cost")
Evaluator(inputs=["output", "label"], type = "classification_error", name = "classification_error")
Inputs("input", "label")
Outputs("cost")
data = data_layer(name ="input", size=784)
fc1 = fc_layer(input=data, size=800,
bias_attr=True,
act=SigmoidActivation())
fc2 = fc_layer(input=fc1, size=800,
bias_attr=True,
act=SigmoidActivation())
output = fc_layer(input=[fc1, fc2], size=10,
bias_attr=True,
act=SoftmaxActivation())
lbl = data_layer(name ="label", size=1)
cost = classification_cost(input=output, label=lbl)
outputs(cost)