fix fast pe to run highest priority ops first, test=develop (#19575)

fix_crf_doc
Zeng Jinle 6 years ago committed by GitHub
parent 84c728013c
commit 19474019c2
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -12,8 +12,8 @@
// See the License for the specific language governing permissions and
// limitations under the License.
#include "paddle/fluid/framework/details/fast_threaded_ssa_graph_executor.h"
#include <deque>
#include <memory>
#include <queue>
#include <string>
#include <unordered_map>
#include <unordered_set>
@ -191,13 +191,13 @@ void FastThreadedSSAGraphExecutor::RunOpAsync(
const std::shared_ptr<BlockingQueue<size_t>> &complete_q) {
++remaining_;
this->pool_.enqueue([=] {
std::queue<OpHandleBase *> op_queue;
op_queue.push(op);
std::deque<OpHandleBase *> op_queue;
op_queue.push_front(op);
size_t complete = 0;
while (!op_queue.empty()) {
OpHandleBase *op_to_run = op_queue.front();
op_queue.pop();
OpHandleBase *op_to_run = op_queue.back();
op_queue.pop_back();
if (!RunOp(op_to_run, complete_q, &complete)) {
return;
@ -213,7 +213,7 @@ void FastThreadedSSAGraphExecutor::RunOpAsync(
// NOTE(zjl): op with highest priority should run
// first without switching to another thread.
if (pending_op->GetPriority() == OpHandleBase::Priority::kHighest) {
op_queue.push(pending_op);
op_queue.push_back(pending_op);
} else {
if (op_to_run == nullptr) {
op_to_run = pending_op;
@ -224,7 +224,9 @@ void FastThreadedSSAGraphExecutor::RunOpAsync(
}
}
if (op_to_run != nullptr) op_queue.push(op_to_run);
if (op_to_run != nullptr) {
op_queue.push_front(op_to_run);
}
}
--remaining_;
complete_q->Push(complete);

@ -409,8 +409,6 @@ void Executor::RunPreparedContext(ExecutorPrepareContext* ctx, Scope* scope,
int64_t max_memory_size = GetEagerDeletionThreshold();
std::unique_ptr<GarbageCollector> gc;
// FIXME(zjl): recurrent_op is rather complex, we would
// disable gc forcely in recurrent_op
if (!ctx->force_disable_gc_ && max_memory_size >= 0) {
#ifdef PADDLE_WITH_CUDA
if (platform::is_gpu_place(place_)) {

Loading…
Cancel
Save