!12793 [auto-monad] Refactor ascend_auto_monad

From: @hwhewei
Reviewed-by: 
Signed-off-by:
pull/12793/MERGE
mindspore-ci-bot 4 years ago committed by Gitee
commit 750d7e6e2a

File diff suppressed because it is too large Load Diff

@ -398,6 +398,8 @@ constexpr auto kAttrNeedCseAfterRecompute = "need_cse_after_recompute";
constexpr auto kAttrParallelDimInfo = "parallel_dim_info";
constexpr auto kAttrStitch = "stitch";
constexpr auto kAttrTopoSortRhsFirst = "topo_sort_rhs_first";
constexpr auto kAttrSwitchLayer = "switch_layer";
constexpr auto kAttrReturn = "return";
// attr value
constexpr auto kValueTargetSwitch = "target_switch";

@ -86,6 +86,7 @@ const char FUNC_GRAPH_FLAG_SPECIALIZE_PARAMETER[] = "spec_param";
const char kFuncGraphFlagUndetermined[] = "Undeterminate";
const char kFuncGraphFlagBackPropEntry[] = "BackPropEntry";
const char kFuncGraphFlagReAutoMonad[] = "ReAutoMonad";
const char kFuncGraphFlagRecursive[] = "Recursive";
namespace abstract {
class AbstractKeywordArg;

@ -24,11 +24,12 @@ from mindspore.common import dtype as mstype
class CaseNet(nn.Cell):
def __init__(self):
super(CaseNet, self).__init__()
self.conv = nn.Conv2d(1, 3, 3)
self.conv = nn.Conv2d(1, 1, 3)
self.relu = nn.ReLU()
self.relu1 = nn.ReLU()
self.softmax = nn.Softmax()
self.layers1 = (self.relu, self.softmax)
self.layers2 = (self.conv, self.relu)
self.layers2 = (self.conv, self.relu1)
def construct(self, x, index1, index2):
x = self.layers1[index1](x)
@ -50,7 +51,3 @@ def test_switch_layer():
true_value = relu(data)
ret = np.allclose(value.asnumpy(), true_value.asnumpy())
assert ret
idx3 = Tensor(3, mstype.int32)
with pytest.raises(IndexError):
value = net(data, idx3, idx2)

Loading…
Cancel
Save