panyx0718-patch-1
Yu Yang 6 years ago
parent d231e55065
commit 02631965c8

@ -34,6 +34,8 @@ AllocatorStrategy GetAllocatorStrategy() {
static AllocatorStrategy strategy = GetStrategyFromFlag();
return strategy;
}
void UseAllocatorStrategyGFlag() {}
} // namespace allocation
} // namespace memory
} // namespace paddle

@ -22,6 +22,9 @@ enum class AllocatorStrategy { kLegacy, kNaiveBestFit };
extern AllocatorStrategy GetAllocatorStrategy();
// Do nothing, just make sure linker do not prune this file.
extern void UseAllocatorStrategyGFlag();
} // namespace allocation
} // namespace memory
} // namespace paddle

@ -34,6 +34,7 @@ limitations under the License. */
#include "paddle/fluid/framework/reader.h"
#include "paddle/fluid/framework/selected_rows.h"
#include "paddle/fluid/framework/version.h"
#include "paddle/fluid/memory/allocation/allocator_strategy.h"
#include "paddle/fluid/operators/activation_op.h"
#include "paddle/fluid/operators/reader/lod_tensor_blocking_queue.h"
#include "paddle/fluid/platform/enforce.h"
@ -83,6 +84,7 @@ bool IsCompiledWithDIST() {
}
PYBIND11_PLUGIN(core) {
paddle::memory::allocation::UseAllocatorStrategyGFlag();
py::module m("core", "C++ core of PaddlePaddle");
// using framework in this function. Since it is inside a function, it will

@ -16,10 +16,12 @@ limitations under the License. */
#include "gflags/gflags.h"
#include "gtest/gtest.h"
#include "paddle/fluid/memory/allocation/allocator_strategy.h"
#include "paddle/fluid/memory/memory.h"
#include "paddle/fluid/platform/init.h"
int main(int argc, char** argv) {
paddle::memory::allocation::UseAllocatorStrategyGFlag();
testing::InitGoogleTest(&argc, argv);
std::vector<char*> new_argv;
std::string gflags_env;

@ -116,7 +116,7 @@ class TestDataBalance(unittest.TestCase):
print("WARNING: Unittest TestDataBalance skipped. \
For the result is not correct when device count \
is larger than batch size.")
exit(0)
return
fetch_list = [image.name, label.name]
data_appeared = [False] * self.total_ins_num

Loading…
Cancel
Save