|
|
@ -55,10 +55,10 @@ config_ascend_quant = ed({
|
|
|
|
dataset_path = "/home/workspace/mindspore_dataset/cifar-10-batches-bin/"
|
|
|
|
dataset_path = "/home/workspace/mindspore_dataset/cifar-10-batches-bin/"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.level1
|
|
|
|
@pytest.mark.level0
|
|
|
|
@pytest.mark.platform_arm_ascend_training
|
|
|
|
@pytest.mark.platform_arm_ascend_training
|
|
|
|
@pytest.mark.platform_x86_ascend_training
|
|
|
|
@pytest.mark.platform_x86_ascend_training
|
|
|
|
@pytest.mark.env_onecard
|
|
|
|
@pytest.mark.env_single
|
|
|
|
def test_mobilenetv2_quant():
|
|
|
|
def test_mobilenetv2_quant():
|
|
|
|
set_seed(1)
|
|
|
|
set_seed(1)
|
|
|
|
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
|
|
|
|
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
|
|
|
@ -111,9 +111,12 @@ def test_mobilenetv2_quant():
|
|
|
|
dataset_sink_mode=False)
|
|
|
|
dataset_sink_mode=False)
|
|
|
|
print("============== End Training ==============")
|
|
|
|
print("============== End Training ==============")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
export_time_used = 700
|
|
|
|
|
|
|
|
train_time = monitor.step_mseconds
|
|
|
|
|
|
|
|
print('train_time_used:{}'.format(train_time))
|
|
|
|
|
|
|
|
assert train_time < export_time_used
|
|
|
|
expect_avg_step_loss = 2.32
|
|
|
|
expect_avg_step_loss = 2.32
|
|
|
|
avg_step_loss = np.mean(np.array(monitor.losses))
|
|
|
|
avg_step_loss = np.mean(np.array(monitor.losses))
|
|
|
|
|
|
|
|
|
|
|
|
print("average step loss:{}".format(avg_step_loss))
|
|
|
|
print("average step loss:{}".format(avg_step_loss))
|
|
|
|
assert avg_step_loss < expect_avg_step_loss
|
|
|
|
assert avg_step_loss < expect_avg_step_loss
|
|
|
|
|
|
|
|
|
|
|
|