fix API spec about infer_from_dataset

test=develop
revert-16555-model_data_cryption_link_all_lib
dongdaxiang 6 years ago
parent 87027a2eef
commit 3829eac27b

File diff suppressed because one or more lines are too long

@ -220,9 +220,11 @@ class InMemoryDataset(DatasetBase):
def global_shuffle(self, fleet=None):
"""
Global shuffle.
If you run distributed, you should pass fleet instead of None.
Global shuffle can be used only in distributed mode. i.e. multiple
processes on single machine or multiple machines training together.
If you run in distributed mode, you should pass fleet instead of None.
Example:
Examples:
>>> import paddle.fluid as fluid
>>> import paddle.fluid.incubate.fleet.parameter_server as fleet
>>> dataset = fluid.DatasetFactory.create_dataset("InMemoryDataset")

@ -11,7 +11,6 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
__all__ = ['DeviceWorker', 'Hogwild', 'DownpourSGD']
@ -117,7 +116,7 @@ class DownpourSGD(DeviceWorker):
program_id = str(id(self.program_))
if self.program_ == None:
print("program of current device worker is not configured")
sys.exit(-1)
exit(-1)
opt_info = self.program_._fleet_opt
program_configs = opt_info["program_configs"]
downpour = trainer_desc.downpour_param

@ -112,30 +112,6 @@ class DownpourServer(Server):
fea_dim += reduce(lambda x, y: x * y, param.shape, 1)
table.accessor.fea_dim = fea_dim
def add_data_norm_table(self, table_id, learning_rate, param_var, grad_var):
"""
Args:
table_id(int): id of sparse params table
learning_rate(float): the learning rate used to update parameters. \
Can be a float value
param_var(list): all dense param. it is a list.
grad_var(list): all dense grad parm it is a list.
Returns:
return None
"""
table = self.server_.downpour_server_param.downpour_table_param.add()
table.table_id = table_id
table.table_class = "DownpourDenseTable"
table.type = pslib.PS_DENSE_TABLE
table.accessor.accessor_class = "DownpourDenseValueAccessor"
table.accessor.dense_sgd_param.name = "summary"
table.accessor.dense_sgd_param.summary.summary_decay_rate = 0.999999
fea_dim = 0
for param in filter(lambda x: x.name.find("embedding") == -1,
param_var):
fea_dim += reduce(lambda x, y: x * y, param.shape, 1)
table.accessor.fea_dim = fea_dim
def get_desc(self):
"""
Return downpour server program_desc

@ -676,16 +676,16 @@ class Executor(object):
if not provided, then default_main_program (not compiled) will be used.
dataset(paddle.fluid.Dataset): dataset created outside this function,
a user should provide a well-defined dataset before calling this function.
Please check the document of Dataset if needed.
Please check the document of Dataset if needed. default is None
scope(Scope): the scope used to run this program, you can switch it to different scope
for each run. default is global_scope
thread(int): number of thread a user wants to run in this function. The actual number
of thread will be min(Dataset.thread_num, thread)
debug(bool): whether a user wants to run infer_from_dataset
of thread will be min(Dataset.thread_num, thread) if thread > 0, default is 0
debug(bool): whether a user wants to run infer_from_dataset, default is False
fetch_list(Variable List): fetch variable list, each variable
will be printed during training
fetch_info(String List): print information for each variable
print_period(int): the number of mini-batches for each print
will be printed during training, default is None
fetch_info(String List): print information for each variable, default is None
print_period(int): the number of mini-batches for each print, default is 100
Returns:
None
@ -693,6 +693,7 @@ class Executor(object):
Examples:
.. code-block:: python
import paddle.fluid as fluid
place = fluid.CPUPlace()
exe = fluid.Executor(place)
@ -707,6 +708,9 @@ class Executor(object):
dataset=dataset)
"""
if dataset == None:
raise RuntimeError("dataset is needed and should be initialized")
if self.place == paddle.fluid.CUDAPlace():
raise RuntimeError("infer_from_dataset is verified on CPUPlace"
"We will open CUDAPlace in the future")
@ -788,6 +792,9 @@ class Executor(object):
dataset=dataset)
"""
if dataset == None:
raise RuntimeError("dataset is need and should be initialized")
if self.place == paddle.fluid.CUDAPlace():
raise RuntimeError("train_from_dataset is verified on CPUPlace"
"We will open CUDAPlace in the future")

Loading…
Cancel
Save