|
|
@ -104,7 +104,7 @@ void ShareTensorBufferFunctor::operator()(Scope *exec_scope) {
|
|
|
|
// If in_var is inplaced in the previous batch and we want to fetch
|
|
|
|
// If in_var is inplaced in the previous batch and we want to fetch
|
|
|
|
// in_var in the current batch, we have to reset memory of out_var
|
|
|
|
// in_var in the current batch, we have to reset memory of out_var
|
|
|
|
// to avoid wrong calculation result.
|
|
|
|
// to avoid wrong calculation result.
|
|
|
|
if (in_tensor.Holder() == out_tensor->Holder()) {
|
|
|
|
if (out_tensor->IsSharedBufferWith(in_tensor)) {
|
|
|
|
VLOG(1) << "Clear " << out_var_names_[i]
|
|
|
|
VLOG(1) << "Clear " << out_var_names_[i]
|
|
|
|
<< " because you may want to fetch an inplaced variable "
|
|
|
|
<< " because you may want to fetch an inplaced variable "
|
|
|
|
<< in_var_info->Name()
|
|
|
|
<< in_var_info->Name()
|
|
|
|