Remove Data Sharing between input and output in scatter_op (#12672)

* Remove Data Sharing between input and output in scatter_op

* Removed data sharing in backward op
revert-12469-sum_op_dim_fix
Qingsheng Li 7 years ago committed by GitHub
parent c44fb00371
commit 317e18abd2
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -35,7 +35,7 @@ class ScatterOpKernel : public framework::OpKernel<T> {
auto *Out = ctx.Output<Tensor>("Out");
// In place output: Out = X, Out[Ids] += Updates
Out->ShareDataWith(*X);
framework::TensorCopySync(*X, ctx.GetPlace(), Out);
// Apply ScatterUpdate: Out[index] += Updates[:]
ScatterAssign<T>(ctx.device_context(), *Updates, *Ids, Out);
}
@ -53,7 +53,7 @@ class ScatterGradientOpKernel : public framework::OpKernel<T> {
auto *dOut = ctx.Input<Tensor>(framework::GradVarName("Out"));
// In place gradient: dX = dO
dX->ShareDataWith(*dOut);
framework::TensorCopySync(*dOut, ctx.GetPlace(), dX);
dUpdates->mutable_data<T>(ctx.GetPlace());
// Gradient by Gather: dUpdates += dO[Ids]
CPUGather<T>(ctx.device_context(), *dOut, *Ids, dUpdates);

Loading…
Cancel
Save