commit
16a39d24f3
@ -0,0 +1,86 @@
|
||||
package pserver
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/binary"
|
||||
"fmt"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
const testDir = "./test_data"
|
||||
|
||||
type myKV struct {
|
||||
m map[string][]byte
|
||||
}
|
||||
|
||||
func (m *myKV) GetKey(key string, timeout time.Duration) ([]byte, error) {
|
||||
if m.m == nil {
|
||||
m.m = make(map[string][]byte)
|
||||
}
|
||||
return m.m[key], nil
|
||||
}
|
||||
|
||||
func (m *myKV) PutKey(key string, value []byte, timeout time.Duration, withLease bool) error {
|
||||
if m.m == nil {
|
||||
m.m = make(map[string][]byte)
|
||||
}
|
||||
m.m[key] = value
|
||||
return nil
|
||||
}
|
||||
|
||||
func TestCheckpoint(t *testing.T) {
|
||||
kv := &myKV{}
|
||||
s, err := NewService(0, time.Hour, testDir, kv, nil)
|
||||
assert.Nil(t, err)
|
||||
err = s.checkpoint()
|
||||
assert.Nil(t, err)
|
||||
_, err = LoadCheckpoint(kv, 0)
|
||||
assert.Nil(t, err)
|
||||
}
|
||||
|
||||
func float32ToByte(f float32) []byte {
|
||||
var buf bytes.Buffer
|
||||
err := binary.Write(&buf, binary.LittleEndian, f)
|
||||
if err != nil {
|
||||
fmt.Println("binary.Write failed:", err)
|
||||
}
|
||||
return buf.Bytes()
|
||||
}
|
||||
|
||||
func TestCheckpointWithData(t *testing.T) {
|
||||
kv := &myKV{}
|
||||
s, err := NewService(0, time.Hour, testDir, kv, nil)
|
||||
assert.Nil(t, err)
|
||||
|
||||
var content []byte
|
||||
for i := 0; i < 50000; i++ {
|
||||
content = append(content, float32ToByte(float32(i))...)
|
||||
}
|
||||
|
||||
p1 := Parameter{Name: "p1", ElementType: 1, Content: content}
|
||||
err = s.InitParam(ParameterWithConfig{Param: p1}, nil)
|
||||
assert.Nil(t, err)
|
||||
|
||||
err = s.FinishInitParams(0, nil)
|
||||
assert.Nil(t, err)
|
||||
|
||||
var p2 Parameter
|
||||
err = s.GetParam(p1.Name, &p2)
|
||||
assert.Nil(t, err)
|
||||
assert.Equal(t, p1, p2)
|
||||
|
||||
err = s.checkpoint()
|
||||
assert.Nil(t, err)
|
||||
cp, err := LoadCheckpoint(kv, 0)
|
||||
assert.Nil(t, err)
|
||||
s1, err := NewService(0, time.Hour, testDir, kv, cp)
|
||||
assert.Nil(t, err)
|
||||
|
||||
var p3 Parameter
|
||||
err = s1.GetParam(p1.Name, &p3)
|
||||
assert.Nil(t, err)
|
||||
assert.Equal(t, p1, p3)
|
||||
}
|
@ -1,39 +0,0 @@
|
||||
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License. */
|
||||
|
||||
syntax = "proto2";
|
||||
option optimize_for = LITE_RUNTIME;
|
||||
package paddle.framework;
|
||||
|
||||
import "framework.proto";
|
||||
|
||||
/**
|
||||
* This file contains necessary information for model, checkpoint.
|
||||
* etc.
|
||||
*/
|
||||
|
||||
message LoDInfo { repeated int64 level = 1; }
|
||||
|
||||
/**
|
||||
* Save the LoDTensorDesc information through LoDTensorProto, its data memory
|
||||
* is copyed to c buffer immediately. See model_format.md for details.
|
||||
*/
|
||||
|
||||
message LoDTensorProto {
|
||||
optional DataType data_type = 1;
|
||||
repeated int64 dims = 2; // [UNK, 640, 480] is saved as [-1, 640, 480]
|
||||
repeated LoDInfo levels = 3;
|
||||
optional int32 lod_level = 4 [ default = 0 ];
|
||||
optional int32 version = 5;
|
||||
}
|
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,138 @@
|
||||
/* Copyright (c) 2017 PaddlePaddle Authors. All Rights Reserve.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License. */
|
||||
|
||||
#pragma once
|
||||
|
||||
#include "MKLDNNLayer.h"
|
||||
#include "mkldnn.hpp"
|
||||
|
||||
namespace paddle {
|
||||
typedef mkldnn::batch_normalization_forward bn_fwd;
|
||||
typedef mkldnn::batch_normalization_backward bn_bwd;
|
||||
|
||||
/**
|
||||
* @brief A subclass of MKLDNNLayer BatchNorm layer.
|
||||
*
|
||||
* The config file api is mkldnn_batch_norm
|
||||
*/
|
||||
class MKLDNNBatchNormLayer : public MKLDNNLayer {
|
||||
protected:
|
||||
// save forward primitive_desc, which can be used backward
|
||||
std::shared_ptr<bn_fwd::primitive_desc> fwdPD_;
|
||||
|
||||
// Epsilon value used in the batch normalization formula.
|
||||
static const real EPS;
|
||||
// weight and bias in paddle
|
||||
std::unique_ptr<Weight> weight_;
|
||||
std::unique_ptr<Weight> biases_;
|
||||
// mkldnn use a large buffer store both scale and shift
|
||||
// which are weight and bias in paddle corresponding.
|
||||
MatrixPtr valueScaleShift_;
|
||||
MatrixPtr gradScaleShift_;
|
||||
// Moving average of mean.
|
||||
std::unique_ptr<Weight> movingMean_;
|
||||
// Moving average of variance.
|
||||
std::unique_ptr<Weight> movingVar_;
|
||||
|
||||
// if useGlobalStats_ is true, will use the loaded mean and variance.
|
||||
// otherwise, calculate mean and variance in every mini-batch.
|
||||
bool useGlobalStats_;
|
||||
// used in MKLDNN primitive desc
|
||||
unsigned flags_;
|
||||
// use to compute moving mean and variance.
|
||||
real movingAvgFraction_;
|
||||
// whether the weight has been init
|
||||
bool hasInitedWgt_;
|
||||
|
||||
// local mean and variance
|
||||
// when useGlobalStats_ they are loaded from moving mean and variance
|
||||
// when do not useGlobalStats_ they are calculated from this mini-batch
|
||||
MKLDNNMatrixPtr mean_;
|
||||
MKLDNNMatrixPtr var_;
|
||||
|
||||
public:
|
||||
explicit MKLDNNBatchNormLayer(const LayerConfig& config)
|
||||
: MKLDNNLayer(config), useGlobalStats_(true), hasInitedWgt_(false) {}
|
||||
|
||||
~MKLDNNBatchNormLayer() {}
|
||||
|
||||
bool init(const LayerMap& layerMap,
|
||||
const ParameterMap& parameterMap) override;
|
||||
|
||||
void forward(PassType passType) override;
|
||||
|
||||
void reshape(
|
||||
int& bs, int& ic, int& ih, int& iw, int oc, int& oh, int& ow) override;
|
||||
|
||||
void resetFwd(std::vector<mkldnn::primitive>& pipeline,
|
||||
MKLDNNMatrixPtr& in,
|
||||
MKLDNNMatrixPtr& wgt,
|
||||
MKLDNNMatrixPtr& bias,
|
||||
MKLDNNMatrixPtr& out) override;
|
||||
|
||||
void resetBwd(std::vector<mkldnn::primitive>& pipeline,
|
||||
MKLDNNMatrixPtr& in,
|
||||
MKLDNNMatrixPtr& wgt,
|
||||
MKLDNNMatrixPtr& bias,
|
||||
MKLDNNMatrixPtr& out) override;
|
||||
|
||||
void updateWeights(const UpdateCallback& callback) override;
|
||||
|
||||
void convertWeightsFromPaddle() override;
|
||||
|
||||
protected:
|
||||
void initWeight();
|
||||
/**
|
||||
* cal moving mean and variance.
|
||||
* moving = moving * AvgFraction + local * (1 - AvgFraction)
|
||||
*/
|
||||
void calMovingMeanAndVar();
|
||||
/**
|
||||
* Forward functions: reset buffers(input, weight, output),
|
||||
* reset primitive descriptor,
|
||||
* reset pipeline.
|
||||
*/
|
||||
void resetFwdBuffers(MKLDNNMatrixPtr& in,
|
||||
MKLDNNMatrixPtr& wgt,
|
||||
MKLDNNMatrixPtr& out);
|
||||
void resetFwdPD(std::shared_ptr<bn_fwd::primitive_desc>& pd,
|
||||
MKLDNNMatrixPtr in,
|
||||
MKLDNNMatrixPtr wgt,
|
||||
MKLDNNMatrixPtr out);
|
||||
void resetFwdPipeline(std::vector<mkldnn::primitive>& pipeline,
|
||||
std::shared_ptr<bn_fwd::primitive_desc>& pd,
|
||||
MKLDNNMatrixPtr& in,
|
||||
MKLDNNMatrixPtr& wgt,
|
||||
MKLDNNMatrixPtr& out);
|
||||
|
||||
/**
|
||||
* Backward functions: reset buffers(input, weight, output),
|
||||
* reset primitive descriptor,
|
||||
* reset pipeline.
|
||||
*/
|
||||
void resetBwdBuffers(MKLDNNMatrixPtr& in,
|
||||
MKLDNNMatrixPtr& wgt,
|
||||
MKLDNNMatrixPtr& out);
|
||||
void resetBwdPD(std::shared_ptr<bn_bwd::primitive_desc>& pd,
|
||||
MKLDNNMatrixPtr& in,
|
||||
MKLDNNMatrixPtr& wgt,
|
||||
MKLDNNMatrixPtr& out);
|
||||
void resetBwdPipeline(std::vector<mkldnn::primitive>& pipeline,
|
||||
std::shared_ptr<bn_bwd::primitive_desc>& pd,
|
||||
MKLDNNMatrixPtr& in,
|
||||
MKLDNNMatrixPtr& wgt,
|
||||
MKLDNNMatrixPtr& out);
|
||||
};
|
||||
|
||||
} // namespace paddle
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in new issue