【paddle.fleet】paddle.fleet -> paddle.distributed.fleet. (#26186)

* move paddle.fleet to paddle.distributed.fleet
revert-24895-update_cub
Dong Daxiang 5 years ago committed by GitHub
parent ffe52b4452
commit 50a5bcfc9d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -164,23 +164,23 @@ if(WITH_PYTHON)
if (NOT WIN32)
add_custom_command(TARGET framework_py_proto POST_BUILD
COMMAND ${CMAKE_COMMAND} -E make_directory ${PADDLE_BINARY_DIR}/python/paddle/fluid/proto
COMMAND ${CMAKE_COMMAND} -E make_directory ${PADDLE_BINARY_DIR}/python/paddle/fleet/proto
COMMAND ${CMAKE_COMMAND} -E touch ${PADDLE_BINARY_DIR}/python/paddle/fleet/proto/__init__.py
COMMAND ${CMAKE_COMMAND} -E make_directory ${PADDLE_BINARY_DIR}/python/paddle/distributed/fleet/proto
COMMAND ${CMAKE_COMMAND} -E touch ${PADDLE_BINARY_DIR}/python/paddle/distributed/fleet/proto/__init__.py
COMMAND cp *.py ${PADDLE_BINARY_DIR}/python/paddle/fluid/proto/
COMMAND cp distributed_strategy_*.py ${PADDLE_BINARY_DIR}/python/paddle/fleet/proto
COMMAND cp distributed_strategy_*.py ${PADDLE_BINARY_DIR}/python/paddle/distributed/fleet/proto
COMMENT "Copy generated python proto into directory paddle/fluid/proto."
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR})
else(NOT WIN32)
string(REPLACE "/" "\\" proto_dstpath "${PADDLE_BINARY_DIR}/python/paddle/fluid/proto/")
string(REPLACE "/" "\\" fleet_proto_dstpath "${PADDLE_BINARY_DIR}/python/paddle/fleet/proto/")
string(REPLACE "/" "\\" fleet_proto_dstpath "${PADDLE_BINARY_DIR}/python/paddle/distributed/fleet/proto/")
add_custom_command(TARGET framework_py_proto POST_BUILD
COMMAND ${CMAKE_COMMAND} -E make_directory ${PADDLE_BINARY_DIR}/python/paddle/fluid/proto
COMMAND ${CMAKE_COMMAND} -E make_directory ${PADDLE_BINARY_DIR}/python/paddle/fleet/proto
COMMAND ${CMAKE_COMMAND} -E touch ${PADDLE_BINARY_DIR}/python/paddle/fleet/proto/__init__.py
COMMAND ${CMAKE_COMMAND} -E make_directory ${PADDLE_BINARY_DIR}/python/paddle/distributed/fleet/proto
COMMAND ${CMAKE_COMMAND} -E touch ${PADDLE_BINARY_DIR}/python/paddle/distributed/fleet/proto/__init__.py
COMMAND copy /Y *.py ${proto_dstpath}
COMMAND copy /Y distributed_strategy_*.py ${fleet_proto_dstpath}
COMMENT "Copy generated python proto into directory paddle/fluid/proto."
COMMENT "Copy generated python proto into directory paddle/fleet/proto."
COMMENT "Copy generated python proto into directory paddle/distributed/fleet/proto."
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR})
endif(NOT WIN32)
endif()

@ -36,7 +36,7 @@ import paddle.distributed
import paddle.sysconfig
import paddle.tensor
import paddle.nn
import paddle.fleet
import paddle.distributed.fleet
import paddle.framework
import paddle.optimizer
import paddle.metric

@ -13,7 +13,7 @@
# limitations under the License.
import paddle
from paddle.fleet.proto import distributed_strategy_pb2
from paddle.distributed.fleet.proto import distributed_strategy_pb2
from paddle.fluid.framework import Variable
import google.protobuf.text_format
@ -103,7 +103,7 @@ class DistributedStrategy(object):
Examples:
.. code-block:: python
import paddle.fleet as fleet
import paddle.distributed.fleet as fleet
strategy = fleet.DistributedStrategy()
strategy.dgc = True
strategy.recompute = True
@ -120,7 +120,7 @@ class DistributedStrategy(object):
Examples:
.. code-block:: python
import paddle.fleet as fleet
import paddle.distributed.fleet as fleet
strategy = fleet.DistributedStrategy()
strategy.load_from_prototxt("dist_strategy.protoxt")
"""
@ -141,7 +141,7 @@ class DistributedStrategy(object):
exe_strategy.num_iteration_per_drop_scope = 10
exe_strategy.num_iteration_per_run = 10
strategy = paddle.fleet.DistributedStrategy()
strategy = paddle.distributed.fleet.DistributedStrategy()
strategy.execution_strategy = exe_strategy
"""
execution_strategy = paddle.fluid.ExecutionStrategy()
@ -178,7 +178,7 @@ class DistributedStrategy(object):
build_strategy.fuse_all_optimizer_ops = True
build_strategy.enable_inplace = True
strategy = paddle.fleet.DistributedStrategy()
strategy = paddle.distributed.fleet.DistributedStrategy()
strategy.build_strategy = build_strategy
"""
@ -211,7 +211,7 @@ class DistributedStrategy(object):
Examples:
.. code-block:: python
import paddle.fleet as fleet
import paddle.distributed.fleet as fleet
role_maker = fleet.PaddleCloudRoleMaker()
fleet.init(role_maker)
@ -253,7 +253,7 @@ class DistributedStrategy(object):
Examples:
.. code-block:: python
import paddle.fleet as fleet
import paddle.distributed.fleet as fleet
role_maker = fleet.PaddleCloudRoleMaker()
fleet.init(role_maker)
@ -282,7 +282,7 @@ class DistributedStrategy(object):
Examples:
.. code-block:: python
import paddle.fleet as fleet
import paddle.distributed.fleet as fleet
strategy = fleet.DistributedStrategy()
strategy.amp = True # by default this is false
@ -314,7 +314,7 @@ class DistributedStrategy(object):
Examples:
.. code-block:: python
import paddle.fleet as fleet
import paddle.distributed.fleet as fleet
strategy = fleet.DistributedStrategy()
strategy.recompute = True
# suppose x and y are names of checkpoint tensors for recomputation
@ -432,7 +432,7 @@ class DistributedStrategy(object):
Examples:
.. code-block:: python
import paddle.fleet as fleet
import paddle.distributed.fleet as fleet
strategy = fleet.DistributedStrategy()
strategy.recompute = True
strategy.recompute_configs = {"checkpionts": ["x", "y"]}
@ -457,7 +457,7 @@ class DistributedStrategy(object):
Examples:
.. code-block:: python
import paddle.fleet as fleet
import paddle.distributed.fleet as fleet
strategy = fleet.DistributedStrategy()
strategy.pipeline = True
@ -490,7 +490,7 @@ class DistributedStrategy(object):
Examples:
.. code-block:: python
import paddle.fleet as fleet
import paddle.distributed.fleet as fleet
strategy = fleet.DistributedStrategy()
strategy.pipeline = True
strategy.pipeline_configs = {"micro_batch": 12}
@ -560,7 +560,7 @@ class DistributedStrategy(object):
Examples:
.. code-block:: python
import paddle.fleet as fleet
import paddle.distributed.fleet as fleet
strategy = fleet.DistributedStrategy()
strategy.gradient_merge = True
strategy.gradient_merge_configs = {"k_steps": 4, "avg": True}
@ -583,7 +583,7 @@ class DistributedStrategy(object):
avg (bool): whether to average the gradients of each mini-batch,
the default value is `True`
Example:
import paddle.fleet as fleet
import paddle.distributed.fleet as fleet
strategy = fleet.DistributedStrategy()
strategy.gradient_merge = True
strategy.gradient_merge_configs = {"k_steps": 4, "avg": True}

@ -34,9 +34,8 @@ class Fleet(object):
Examples:
.. code-block:: python
import paddle.fleet as fleet
import paddle.fluid.incubate.fleet.base.role_maker as role_maker
role = role_maker.PaddleCloudRoleMaker(is_collective=True)
import paddle.distributed.fleet as fleet
role = fleet.role_maker.PaddleCloudRoleMaker(is_collective=True)
fleet.init(role)
strategy = fleet.DistributedStrategy()
optimizer = paddle.optimizer.SGD(learning_rate=0.001)
@ -218,9 +217,8 @@ class Fleet(object):
Examples:
.. code-block:: python
import paddle.fleet as fleet
import paddle.fluid.incubate.fleet.base.role_maker as role_maker
role = role_maker.PaddleCloudRoleMaker(is_collective=True)
import paddle.distributed.fleet as fleet
role = fleet.role_maker.PaddleCloudRoleMaker(is_collective=True)
fleet.init(role)
strategy = fleet.DistributedStrategy()
optimizer = paddle.optimizer.SGD(learning_rate=0.001)
@ -260,8 +258,7 @@ class Fleet(object):
Examples:
import paddle
import paddle.fleet as fleet
import paddle.fluid.incubate.fleet.base.role_maker as role_maker
import paddle.distributed.fleet as fleet
fc_1 = paddle.layers.fc(input=input_x, size=hid_dim, act='tanh')
fc_2 = paddlen.layers.fc(input=fc_1, size=hid_dim, act='tanh')
@ -269,7 +266,7 @@ class Fleet(object):
cost = paddle.layers.cross_entropy(input=prediction, label=input_y)
avg_cost = paddle.layers.mean(x=cost)
role = role_maker.PaddleCloudRoleMaker(is_collective=True)
role = fleet.role_maker.PaddleCloudRoleMaker(is_collective=True)
fleet.init(role)
strategy = fleet.DistributedStrategy()
optimizer = paddle.optimizer.SGD(learning_rate=0.001)

@ -481,7 +481,7 @@ class PaddleCloudRoleMaker(RoleMakerBase):
return "lo"
def __start_kv_server(self, http_server_d, size_d):
from paddle.fleet.utils import KVServer
from paddle.distributed.fleet.utils import KVServer
http_server = KVServer(int(self._http_ip_port[1]), size_d)
http_server.start()
wait_seconds = 5

@ -55,8 +55,8 @@ class UtilBase(object):
def set_file_system(self, fs_client):
assert isinstance(
fs_client,
FS), "fs_client must be the instance of paddle.fleet.utils.FS"
fs_client, FS
), "fs_client must be the instance of paddle.distributed.fleet.utils.FS"
self.fs_client = fs_client
def __check_comm_world(self, comm_world="worker"):

@ -14,7 +14,7 @@
import os
import paddle
from paddle.fleet.launch_utils import get_cluster, logger
from paddle.distributed.fleet.launch_utils import get_cluster, logger
def get_cloud_cluster(args_node_ips, selected_gpus, args_port=6170):

@ -66,8 +66,8 @@ from argparse import ArgumentParser, REMAINDER
import paddle
import paddle.fluid as fluid
from paddle.fleet.launch_utils import *
import paddle.fleet.cloud_utils as cloud_utils
from paddle.distributed.fleet.launch_utils import *
import paddle.distributed.fleet.cloud_utils as cloud_utils
def _print_arguments(args):

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save