Merge branch 'develop' of https://github.com/PaddlePaddle/Paddle into argmin_argmax
	
		
	
				
					
				
			
						commit
						5c9dcc4d7d
					
				
											
												
													File diff suppressed because it is too large
													Load Diff
												
											
										
									
								@ -1,25 +0,0 @@
 | 
				
			||||
# Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved.
 | 
				
			||||
#
 | 
				
			||||
# Licensed under the Apache License, Version 2.0 (the "License");
 | 
				
			||||
# you may not use this file except in compliance with the License.
 | 
				
			||||
# You may obtain a copy of the License at
 | 
				
			||||
#
 | 
				
			||||
# http://www.apache.org/licenses/LICENSE-2.0
 | 
				
			||||
#
 | 
				
			||||
# Unless required by applicable law or agreed to in writing, software
 | 
				
			||||
# distributed under the License is distributed on an "AS IS" BASIS,
 | 
				
			||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 | 
				
			||||
# See the License for the specific language governing permissions and
 | 
				
			||||
# limitations under the License.
 | 
				
			||||
#
 | 
				
			||||
 | 
				
			||||
if(APPLE)
 | 
				
			||||
    set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-error=pessimizing-move")
 | 
				
			||||
endif(APPLE)
 | 
				
			||||
 | 
				
			||||
cc_library(tape_variable SRCS variable.cc DEPS ${FLUID_CORE_MODULES})
 | 
				
			||||
cc_library(tape SRCS tape.cc DEPS ${FLUID_CORE_MODULES} ${GLOB_OP_LIB} tape_variable)
 | 
				
			||||
 | 
				
			||||
cc_test(test_tape
 | 
				
			||||
        SRCS test_tape.cc
 | 
				
			||||
        DEPS tape tape_variable)
 | 
				
			||||
											
												
													File diff suppressed because it is too large
													Load Diff
												
											
										
									
								
											
												Binary file not shown.
											
										
									
								@ -1,131 +0,0 @@
 | 
				
			||||
// Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
 | 
				
			||||
//
 | 
				
			||||
// Licensed under the Apache License, Version 2.0 (the "License");
 | 
				
			||||
// you may not use this file except in compliance with the License.
 | 
				
			||||
// You may obtain a copy of the License at
 | 
				
			||||
//
 | 
				
			||||
//     http://www.apache.org/licenses/LICENSE-2.0
 | 
				
			||||
//
 | 
				
			||||
// Unless required by applicable law or agreed to in writing, software
 | 
				
			||||
// distributed under the License is distributed on an "AS IS" BASIS,
 | 
				
			||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 | 
				
			||||
// See the License for the specific language governing permissions and
 | 
				
			||||
// limitations under the License.
 | 
				
			||||
 | 
				
			||||
#pragma once
 | 
				
			||||
 | 
				
			||||
#include <string>
 | 
				
			||||
 | 
				
			||||
#include "paddle/contrib/tape/tape.h"
 | 
				
			||||
#include "paddle/contrib/tape/variable.h"
 | 
				
			||||
#include "paddle/fluid/framework/type_defs.h"
 | 
				
			||||
 | 
				
			||||
namespace paddle {
 | 
				
			||||
namespace tape {
 | 
				
			||||
 | 
				
			||||
class Function {};
 | 
				
			||||
 | 
				
			||||
class Fill {
 | 
				
			||||
 public:
 | 
				
			||||
  Fill(const std::string &initializer, const framework::AttributeMap &attrs)
 | 
				
			||||
      : initializer_(initializer), attrs_(attrs) {}
 | 
				
			||||
 | 
				
			||||
  void operator()(VariableHandle var) {
 | 
				
			||||
    get_global_tape().AddOp(initializer_, {}, {{"Out", {var}}}, attrs_);
 | 
				
			||||
  }
 | 
				
			||||
 | 
				
			||||
 private:
 | 
				
			||||
  const std::string initializer_;
 | 
				
			||||
  const framework::AttributeMap attrs_;
 | 
				
			||||
};
 | 
				
			||||
 | 
				
			||||
class Mean {
 | 
				
			||||
 public:
 | 
				
			||||
  VariableHandle operator()(VariableHandle var) {
 | 
				
			||||
    VariableHandle out(new Variable("mean"));
 | 
				
			||||
    get_global_tape().AddOp("mean", {{"X", {var}}}, {{"Out", {out}}}, {});
 | 
				
			||||
    return out;
 | 
				
			||||
  }
 | 
				
			||||
};
 | 
				
			||||
 | 
				
			||||
class Linear {
 | 
				
			||||
 public:
 | 
				
			||||
  Linear(int in_dim, int out_dim, const std::string &act)
 | 
				
			||||
      : w_(new Variable("LinearWeight")),
 | 
				
			||||
        b_(new Variable("LinearBias")),
 | 
				
			||||
        act_(act) {
 | 
				
			||||
    Tape init_tape;
 | 
				
			||||
 | 
				
			||||
    std::string initializer = "fill_constant";
 | 
				
			||||
    framework::AttributeMap attrs;
 | 
				
			||||
    attrs["dtype"] = paddle::framework::proto::VarType::Type::VarType_Type_FP32;
 | 
				
			||||
    attrs["shape"] = std::vector<int>{in_dim, out_dim};
 | 
				
			||||
    attrs["value"] = 1.0f;
 | 
				
			||||
    init_tape.AddOp(initializer, {}, {{"Out", {w_}}}, attrs);
 | 
				
			||||
 | 
				
			||||
    attrs["dtype"] = paddle::framework::proto::VarType::Type::VarType_Type_FP32;
 | 
				
			||||
    attrs["shape"] = std::vector<int>{out_dim};
 | 
				
			||||
    attrs["value"] = 1.0f;
 | 
				
			||||
    init_tape.AddOp(initializer, {}, {{"Out", {b_}}}, attrs);
 | 
				
			||||
 | 
				
			||||
    init_tape.Forward();
 | 
				
			||||
  }
 | 
				
			||||
 | 
				
			||||
  VariableHandle operator()(VariableHandle input) {
 | 
				
			||||
    VariableHandle pre_bias(new Variable("linear"));
 | 
				
			||||
    get_global_tape().AddOp("mul",
 | 
				
			||||
                            {{"X", {input}}, {"Y", {w_}}},
 | 
				
			||||
                            {{"Out", {pre_bias}}},
 | 
				
			||||
                            {{"x_num_col_dims", 1}, {"y_num_col_dims", 1}});
 | 
				
			||||
    VariableHandle pre_act(new Variable("linear"));
 | 
				
			||||
    get_global_tape().AddOp("elementwise_add",
 | 
				
			||||
                            {{"X", {pre_bias}}, {"Y", {b_}}},
 | 
				
			||||
                            {{"Out", {pre_act}}},
 | 
				
			||||
                            {{"axis", 1}});
 | 
				
			||||
    VariableHandle post_act(new Variable("linear"));
 | 
				
			||||
    get_global_tape().AddOp(
 | 
				
			||||
        act_, {{"X", {pre_act}}}, {{"Out", {post_act}}}, {});
 | 
				
			||||
    return post_act;
 | 
				
			||||
  }
 | 
				
			||||
 | 
				
			||||
  std::vector<VariableHandle> Params() { return {w_, b_}; }
 | 
				
			||||
 | 
				
			||||
 private:
 | 
				
			||||
  VariableHandle w_;
 | 
				
			||||
  VariableHandle b_;
 | 
				
			||||
  std::string act_;
 | 
				
			||||
};
 | 
				
			||||
 | 
				
			||||
class SGD {
 | 
				
			||||
 public:
 | 
				
			||||
  SGD(float learning_rate) : learning_rate_(new Variable("sgd")) {
 | 
				
			||||
    Tape init_tape;
 | 
				
			||||
 | 
				
			||||
    std::string initializer = "fill_constant";
 | 
				
			||||
    framework::AttributeMap attrs;
 | 
				
			||||
    attrs["dtype"] = paddle::framework::proto::VarType::Type::VarType_Type_FP32;
 | 
				
			||||
    attrs["shape"] = std::vector<int>{1};
 | 
				
			||||
    attrs["value"] = learning_rate;
 | 
				
			||||
    init_tape.AddOp(initializer, {}, {{"Out", {learning_rate_}}}, attrs);
 | 
				
			||||
 | 
				
			||||
    init_tape.Forward();
 | 
				
			||||
  }
 | 
				
			||||
 | 
				
			||||
  void operator()(VariableHandle input) {
 | 
				
			||||
    PADDLE_ENFORCE(get_global_tape().HasBeenBackwarded(),
 | 
				
			||||
                   "optimization must happen after the backward");
 | 
				
			||||
    Tape temp_tape;
 | 
				
			||||
    temp_tape.AddOp("sgd",
 | 
				
			||||
                    {{"Param", {input}},
 | 
				
			||||
                     {"LearningRate", {learning_rate_}},
 | 
				
			||||
                     {"Grad", {input->Grad()}}},
 | 
				
			||||
                    {{"ParamOut", {input}}},
 | 
				
			||||
                    {});
 | 
				
			||||
    temp_tape.Forward();
 | 
				
			||||
  }
 | 
				
			||||
 | 
				
			||||
 private:
 | 
				
			||||
  VariableHandle learning_rate_;
 | 
				
			||||
};
 | 
				
			||||
}
 | 
				
			||||
}
 | 
				
			||||
											
												
													File diff suppressed because it is too large
													Load Diff
												
											
										
									
								@ -1,64 +0,0 @@
 | 
				
			||||
// Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
 | 
				
			||||
//
 | 
				
			||||
// Licensed under the Apache License, Version 2.0 (the "License");
 | 
				
			||||
// you may not use this file except in compliance with the License.
 | 
				
			||||
// You may obtain a copy of the License at
 | 
				
			||||
//
 | 
				
			||||
//     http://www.apache.org/licenses/LICENSE-2.0
 | 
				
			||||
//
 | 
				
			||||
// Unless required by applicable law or agreed to in writing, software
 | 
				
			||||
// distributed under the License is distributed on an "AS IS" BASIS,
 | 
				
			||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 | 
				
			||||
// See the License for the specific language governing permissions and
 | 
				
			||||
// limitations under the License.
 | 
				
			||||
#pragma once
 | 
				
			||||
 | 
				
			||||
#include <map>
 | 
				
			||||
#include <memory>
 | 
				
			||||
#include <string>
 | 
				
			||||
#include <vector>
 | 
				
			||||
 | 
				
			||||
#include "paddle/contrib/tape/variable.h"
 | 
				
			||||
 | 
				
			||||
namespace paddle {
 | 
				
			||||
namespace tape {
 | 
				
			||||
 | 
				
			||||
using VariableHandleMap = std::map<std::string, std::vector<VariableHandle>>;
 | 
				
			||||
 | 
				
			||||
struct OpHandle {
 | 
				
			||||
  OpHandle(const std::string &type,
 | 
				
			||||
           const VariableHandleMap &in_vars,
 | 
				
			||||
           const VariableHandleMap &out_vars,
 | 
				
			||||
           const framework::AttributeMap &attrs)
 | 
				
			||||
      : type_(type), inputs_(in_vars), outputs_(out_vars), attrs_(attrs) {}
 | 
				
			||||
 | 
				
			||||
  std::string type_;
 | 
				
			||||
  VariableHandleMap inputs_;
 | 
				
			||||
  VariableHandleMap outputs_;
 | 
				
			||||
  framework::AttributeMap attrs_;
 | 
				
			||||
};
 | 
				
			||||
 | 
				
			||||
class Tape {
 | 
				
			||||
 public:
 | 
				
			||||
  void AddOp(const std::string &type,
 | 
				
			||||
             const VariableHandleMap &in_vars,
 | 
				
			||||
             VariableHandleMap out_vars,
 | 
				
			||||
             const framework::AttributeMap &attrs);
 | 
				
			||||
  void Forward();
 | 
				
			||||
  void Backward(VariableHandle target);
 | 
				
			||||
 | 
				
			||||
  bool HasBeenBackwarded() { return has_been_backwarded_; }
 | 
				
			||||
 | 
				
			||||
 private:
 | 
				
			||||
  bool has_been_backwarded_ = false;
 | 
				
			||||
  size_t current_position_ = 0;
 | 
				
			||||
 | 
				
			||||
  std::vector<OpHandle> tape_;
 | 
				
			||||
  std::shared_ptr<Tape> backward_tape_;
 | 
				
			||||
};
 | 
				
			||||
 | 
				
			||||
Tape &get_global_tape();
 | 
				
			||||
 | 
				
			||||
void reset_global_tape();
 | 
				
			||||
}
 | 
				
			||||
}
 | 
				
			||||
@ -1,61 +0,0 @@
 | 
				
			||||
// Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
 | 
				
			||||
//
 | 
				
			||||
// Licensed under the Apache License, Version 2.0 (the "License");
 | 
				
			||||
// you may not use this file except in compliance with the License.
 | 
				
			||||
// You may obtain a copy of the License at
 | 
				
			||||
//
 | 
				
			||||
//     http://www.apache.org/licenses/LICENSE-2.0
 | 
				
			||||
//
 | 
				
			||||
// Unless required by applicable law or agreed to in writing, software
 | 
				
			||||
// distributed under the License is distributed on an "AS IS" BASIS,
 | 
				
			||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 | 
				
			||||
// See the License for the specific language governing permissions and
 | 
				
			||||
// limitations under the License.
 | 
				
			||||
 | 
				
			||||
#include "gtest/gtest.h"
 | 
				
			||||
#include "paddle/contrib/tape/function.h"
 | 
				
			||||
 | 
				
			||||
using namespace paddle::tape;
 | 
				
			||||
 | 
				
			||||
TEST(Tape, TestMLP) {
 | 
				
			||||
  LOG(INFO) << "TestMLP";
 | 
				
			||||
  Linear linear1(3, 3, "relu");
 | 
				
			||||
  Linear linear2(3, 3, "relu");
 | 
				
			||||
  Mean mean;
 | 
				
			||||
 | 
				
			||||
  SGD sgd(0.001);
 | 
				
			||||
 | 
				
			||||
  std::string initializer = "fill_constant";
 | 
				
			||||
  paddle::framework::AttributeMap attrs;
 | 
				
			||||
  attrs["dtype"] = paddle::framework::proto::VarType::Type::VarType_Type_FP32;
 | 
				
			||||
  attrs["shape"] = std::vector<int>{3, 3};
 | 
				
			||||
  attrs["value"] = 1.0f;
 | 
				
			||||
  Fill filler(initializer, attrs);
 | 
				
			||||
 | 
				
			||||
  for (int i = 0; i < 2; ++i) {
 | 
				
			||||
    reset_global_tape();
 | 
				
			||||
 | 
				
			||||
    VariableHandle input(new Variable("input"));
 | 
				
			||||
    filler(input);
 | 
				
			||||
 | 
				
			||||
    auto loss = mean(linear2(linear1(input)));
 | 
				
			||||
 | 
				
			||||
    get_global_tape().Backward(loss);
 | 
				
			||||
 | 
				
			||||
    for (auto w : linear1.Params()) {
 | 
				
			||||
      sgd(w);
 | 
				
			||||
    }
 | 
				
			||||
    for (auto w : linear2.Params()) {
 | 
				
			||||
      sgd(w);
 | 
				
			||||
    }
 | 
				
			||||
  }
 | 
				
			||||
}
 | 
				
			||||
 | 
				
			||||
int main(int argc, char** argv) {
 | 
				
			||||
  std::vector<paddle::platform::Place> places;
 | 
				
			||||
  places.emplace_back(paddle::platform::CPUPlace());
 | 
				
			||||
  paddle::platform::DeviceContextPool::Init(places);
 | 
				
			||||
 | 
				
			||||
  testing::InitGoogleTest(&argc, argv);
 | 
				
			||||
  return RUN_ALL_TESTS();
 | 
				
			||||
}
 | 
				
			||||
@ -1,85 +0,0 @@
 | 
				
			||||
// Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
 | 
				
			||||
//
 | 
				
			||||
// Licensed under the Apache License, Version 2.0 (the "License");
 | 
				
			||||
// you may not use this file except in compliance with the License.
 | 
				
			||||
// You may obtain a copy of the License at
 | 
				
			||||
//
 | 
				
			||||
//     http://www.apache.org/licenses/LICENSE-2.0
 | 
				
			||||
//
 | 
				
			||||
// Unless required by applicable law or agreed to in writing, software
 | 
				
			||||
// distributed under the License is distributed on an "AS IS" BASIS,
 | 
				
			||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 | 
				
			||||
// See the License for the specific language governing permissions and
 | 
				
			||||
// limitations under the License.
 | 
				
			||||
#pragma once
 | 
				
			||||
 | 
				
			||||
#include <memory>
 | 
				
			||||
 | 
				
			||||
#include "paddle/fluid/framework/operator.h"  // framework::kGradVarSuffix
 | 
				
			||||
#include "paddle/fluid/framework/program_desc.h"
 | 
				
			||||
#include "paddle/fluid/framework/variable.h"
 | 
				
			||||
 | 
				
			||||
namespace paddle {
 | 
				
			||||
namespace tape {
 | 
				
			||||
 | 
				
			||||
class Variable;
 | 
				
			||||
using VariableHandle = std::shared_ptr<Variable>;
 | 
				
			||||
 | 
				
			||||
/*
 | 
				
			||||
 * Combination of
 | 
				
			||||
 *     framework::VarDesc desc_;
 | 
				
			||||
 *     framework::Variable var_;
 | 
				
			||||
 */
 | 
				
			||||
class Variable {
 | 
				
			||||
 public:
 | 
				
			||||
  Variable(const std::string pre_fix)
 | 
				
			||||
      : desc_(pre_fix + std::to_string(count())) {}
 | 
				
			||||
 | 
				
			||||
  Variable(const std::string pre_fix, bool is_grad)
 | 
				
			||||
      : desc_(pre_fix + (is_grad ? framework::kGradVarSuffix
 | 
				
			||||
                                 : std::to_string(count()))) {}
 | 
				
			||||
 | 
				
			||||
  ~Variable() { LOG(INFO) << "Deleting " << Name(); }
 | 
				
			||||
 | 
				
			||||
  // Instantiate LoDTensor/SelectedRow
 | 
				
			||||
  void InitializeVariable();
 | 
				
			||||
 | 
				
			||||
  VariableHandle Grad() {
 | 
				
			||||
    if (grad_.expired()) {
 | 
				
			||||
      VariableHandle new_grad(new Variable(desc_.Name(), true));
 | 
				
			||||
      grad_ = new_grad;
 | 
				
			||||
      return new_grad;
 | 
				
			||||
    } else {
 | 
				
			||||
      return VariableHandle(grad_);
 | 
				
			||||
    }
 | 
				
			||||
  }
 | 
				
			||||
 | 
				
			||||
  // Stochastic Gradient Descent with Momentum
 | 
				
			||||
  //  VariableHandle Momentum ();
 | 
				
			||||
 | 
				
			||||
  //  void init(const std::string& initializer,
 | 
				
			||||
  //            const framework::AttributeMap& attrs);
 | 
				
			||||
 | 
				
			||||
  // void value() {};
 | 
				
			||||
 | 
				
			||||
  const framework::VarDesc& Desc() const { return desc_; }
 | 
				
			||||
  framework::VarDesc* MutableDesc() { return &desc_; }
 | 
				
			||||
 | 
				
			||||
  // TODO(tonyyang-svail): No need to expose name
 | 
				
			||||
  std::string Name() const { return desc_.Name(); }
 | 
				
			||||
 | 
				
			||||
  framework::Variable* Var() { return &var_; }
 | 
				
			||||
 | 
				
			||||
 private:
 | 
				
			||||
  int count() {
 | 
				
			||||
    static int counter = 0;
 | 
				
			||||
    return counter++;
 | 
				
			||||
  }
 | 
				
			||||
 | 
				
			||||
  framework::VarDesc desc_;
 | 
				
			||||
  framework::Variable var_;
 | 
				
			||||
 | 
				
			||||
  std::weak_ptr<Variable> grad_;
 | 
				
			||||
};
 | 
				
			||||
}
 | 
				
			||||
}
 | 
				
			||||
Some files were not shown because too many files have changed in this diff Show More
					Loading…
					
					
				
		Reference in new issue