|
|
@ -21,6 +21,7 @@ from ..layer_helper import LayerHelper
|
|
|
|
from ..executor import global_scope
|
|
|
|
from ..executor import global_scope
|
|
|
|
from layer_function_generator import generate_layer_fn, templatedoc
|
|
|
|
from layer_function_generator import generate_layer_fn, templatedoc
|
|
|
|
import sys
|
|
|
|
import sys
|
|
|
|
|
|
|
|
import multiprocessing
|
|
|
|
|
|
|
|
|
|
|
|
__all__ = [
|
|
|
|
__all__ = [
|
|
|
|
'data', 'BlockGuardServ', 'ListenAndServ', 'Send', 'Recv',
|
|
|
|
'data', 'BlockGuardServ', 'ListenAndServ', 'Send', 'Recv',
|
|
|
@ -549,10 +550,9 @@ def open_files(filenames,
|
|
|
|
shapes(list): List of tuples which declaring data shapes.
|
|
|
|
shapes(list): List of tuples which declaring data shapes.
|
|
|
|
lod_levels(list): List of ints which declaring data lod_level.
|
|
|
|
lod_levels(list): List of ints which declaring data lod_level.
|
|
|
|
dtypes(list): List of strs which declaring data type.
|
|
|
|
dtypes(list): List of strs which declaring data type.
|
|
|
|
thread_num(None): Deprecated argument. It will be set by open_files
|
|
|
|
thread_num(None): The number of thread to read files.
|
|
|
|
automatically.
|
|
|
|
Default: min(len(filenames), cpu_number).
|
|
|
|
buffer_size(None): Deprecated argument. It will be set by open_files
|
|
|
|
buffer_size(None): The buffer size of reader. Default: 3 * thread_num
|
|
|
|
automatically.
|
|
|
|
|
|
|
|
pass_num(int): Number of passes to run.
|
|
|
|
pass_num(int): Number of passes to run.
|
|
|
|
is_test(bool|None): Whether `open_files` used for testing or not. If it
|
|
|
|
is_test(bool|None): Whether `open_files` used for testing or not. If it
|
|
|
|
is used for testing, the order of data generated is same as the file
|
|
|
|
is used for testing, the order of data generated is same as the file
|
|
|
@ -574,14 +574,15 @@ def open_files(filenames,
|
|
|
|
# Via the reader, we can use 'read_file' layer to get data:
|
|
|
|
# Via the reader, we can use 'read_file' layer to get data:
|
|
|
|
image, label = fluid.layers.io.read_file(reader)
|
|
|
|
image, label = fluid.layers.io.read_file(reader)
|
|
|
|
"""
|
|
|
|
"""
|
|
|
|
if thread_num is not None:
|
|
|
|
if thread_num is None:
|
|
|
|
print >> sys.stderr, "thread_num parameter of open_files is " \
|
|
|
|
thread_num = min(len(filenames), multiprocessing.cpu_count())
|
|
|
|
"deprecated. It will be ignored and set " \
|
|
|
|
else:
|
|
|
|
"automatically by open_files "
|
|
|
|
thread_num = int(thread_num)
|
|
|
|
if buffer_size is not None:
|
|
|
|
|
|
|
|
print >> sys.stderr, "buffer_size parameter of open_files is " \
|
|
|
|
if buffer_size is None:
|
|
|
|
"deprecated. It will be ignored and set " \
|
|
|
|
buffer_size = 3 * thread_num
|
|
|
|
"automatically by open_files "
|
|
|
|
else:
|
|
|
|
|
|
|
|
buffer_size = int(buffer_size)
|
|
|
|
|
|
|
|
|
|
|
|
if isinstance(filenames, basestring):
|
|
|
|
if isinstance(filenames, basestring):
|
|
|
|
filenames = [filenames]
|
|
|
|
filenames = [filenames]
|
|
|
@ -600,7 +601,9 @@ def open_files(filenames,
|
|
|
|
'shape_concat': shape_concat,
|
|
|
|
'shape_concat': shape_concat,
|
|
|
|
'lod_levels': lod_levels,
|
|
|
|
'lod_levels': lod_levels,
|
|
|
|
'ranks': ranks,
|
|
|
|
'ranks': ranks,
|
|
|
|
'file_names': filenames
|
|
|
|
'file_names': filenames,
|
|
|
|
|
|
|
|
'thread_num': thread_num,
|
|
|
|
|
|
|
|
'buffer_size': buffer_size
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if is_test is not None:
|
|
|
|
if is_test is not None:
|
|
|
|
attrs['is_test'] = is_test
|
|
|
|
attrs['is_test'] = is_test
|
|
|
|