polish codes, test=develop (#20672)

revert-20712-fix_depthwise_conv
Zeng Jinle 6 years ago committed by GitHub
parent dd3d8997cf
commit 10505faf4e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -43,13 +43,11 @@ class SingleOpInplaceInToOut : public InplaceOpInference {
public:
std::unordered_map<std::string, std::string> operator()(
const OpDesc& op_desc, bool use_cuda) const override {
PADDLE_ENFORCE_EQ(op_desc.InputNames().size(), 1,
"Op inputs must be unique");
PADDLE_ENFORCE_EQ(op_desc.OutputNames().size(), 1,
"Op outputs must be unique");
auto x_name = op_desc.InputNames().at(0);
auto out_name = op_desc.OutputNames().at(0);
return std::unordered_map<std::string, std::string>{{x_name, out_name}};
auto inputs = op_desc.InputNames();
auto outputs = op_desc.OutputNames();
PADDLE_ENFORCE_EQ(inputs.size(), 1, "Op inputs must be unique");
PADDLE_ENFORCE_EQ(outputs.size(), 1, "Op outputs must be unique");
return {{inputs[0], outputs[0]}};
}
};

@ -210,14 +210,6 @@ struct BaseActivationFunctor {
using AttrPair = std::vector<std::pair<const char*, float*>>;
AttrPair GetAttrs() { return AttrPair(); }
/* NOTE(*): Output reuse X memory if X is not dependented by its Gradient.
For example, sigmoid op's gradient didn't involve x, so its output can
reuse
input memory. But abs op's gradient use x, it can not be inplaced.
gradient did use x.
*/
bool Inplace() const { return false; }
};
// sigmoid(x) = 1 / (1 + exp(-x))

Loading…
Cancel
Save