!12984 Fix summary ut
From: @ouwenchang Reviewed-by: @yelihua,@lixiaohui33 Signed-off-by: @lixiaohui33pull/12984/MERGE
commit
480fda8654
@ -1,125 +0,0 @@
|
|||||||
# Copyright 2020 Huawei Technologies Co., Ltd
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
# ============================================================================
|
|
||||||
""" test_graph_summary """
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
import numpy as np
|
|
||||||
|
|
||||||
import mindspore.nn as nn
|
|
||||||
from mindspore import Model, context
|
|
||||||
from mindspore.nn.optim import Momentum
|
|
||||||
from mindspore.train.summary import SummaryRecord
|
|
||||||
from mindspore.train.callback import SummaryCollector
|
|
||||||
from .....dataset_mock import MindData
|
|
||||||
|
|
||||||
CUR_DIR = os.getcwd()
|
|
||||||
SUMMARY_DIR = CUR_DIR + "/test_temp_summary_event_file/"
|
|
||||||
GRAPH_TEMP = CUR_DIR + "/ms_output-resnet50.pb"
|
|
||||||
|
|
||||||
log = logging.getLogger("test")
|
|
||||||
log.setLevel(level=logging.ERROR)
|
|
||||||
|
|
||||||
|
|
||||||
class Net(nn.Cell):
|
|
||||||
""" Net definition """
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
super(Net, self).__init__()
|
|
||||||
self.conv = nn.Conv2d(3, 64, 3, has_bias=False, weight_init='normal', pad_mode='valid')
|
|
||||||
self.bn = nn.BatchNorm2d(64)
|
|
||||||
self.relu = nn.ReLU()
|
|
||||||
self.flatten = nn.Flatten()
|
|
||||||
self.fc = nn.Dense(64 * 222 * 222, 3) # padding=0
|
|
||||||
|
|
||||||
def construct(self, x):
|
|
||||||
x = self.conv(x)
|
|
||||||
x = self.bn(x)
|
|
||||||
x = self.relu(x)
|
|
||||||
x = self.flatten(x)
|
|
||||||
out = self.fc(x)
|
|
||||||
return out
|
|
||||||
|
|
||||||
|
|
||||||
class LossNet(nn.Cell):
|
|
||||||
""" LossNet definition """
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
super(LossNet, self).__init__()
|
|
||||||
self.conv = nn.Conv2d(3, 64, 3, has_bias=False, weight_init='normal', pad_mode='valid')
|
|
||||||
self.bn = nn.BatchNorm2d(64)
|
|
||||||
self.relu = nn.ReLU()
|
|
||||||
self.flatten = nn.Flatten()
|
|
||||||
self.fc = nn.Dense(64 * 222 * 222, 3) # padding=0
|
|
||||||
self.loss = nn.SoftmaxCrossEntropyWithLogits()
|
|
||||||
|
|
||||||
def construct(self, x, y):
|
|
||||||
x = self.conv(x)
|
|
||||||
x = self.bn(x)
|
|
||||||
x = self.relu(x)
|
|
||||||
x = self.flatten(x)
|
|
||||||
x = self.fc(x)
|
|
||||||
out = self.loss(x, y)
|
|
||||||
return out
|
|
||||||
|
|
||||||
|
|
||||||
def get_model():
|
|
||||||
""" get_model """
|
|
||||||
net = Net()
|
|
||||||
loss = nn.SoftmaxCrossEntropyWithLogits()
|
|
||||||
optim = Momentum(net.trainable_params(), learning_rate=0.1, momentum=0.9)
|
|
||||||
model = Model(net, loss_fn=loss, optimizer=optim, metrics=None)
|
|
||||||
return model
|
|
||||||
|
|
||||||
|
|
||||||
def get_dataset():
|
|
||||||
""" get_datasetdataset """
|
|
||||||
dataset_types = (np.float32, np.float32)
|
|
||||||
dataset_shapes = ((2, 3, 224, 224), (2, 3))
|
|
||||||
|
|
||||||
dataset = MindData(size=2, batch_size=2,
|
|
||||||
np_types=dataset_types,
|
|
||||||
output_shapes=dataset_shapes,
|
|
||||||
input_indexs=(0, 1))
|
|
||||||
return dataset
|
|
||||||
|
|
||||||
|
|
||||||
# Test 1: summary sample of graph
|
|
||||||
def test_graph_summary_sample():
|
|
||||||
""" test_graph_summary_sample """
|
|
||||||
log.debug("begin test_graph_summary_sample")
|
|
||||||
dataset = get_dataset()
|
|
||||||
net = Net()
|
|
||||||
loss = nn.SoftmaxCrossEntropyWithLogits()
|
|
||||||
optim = Momentum(net.trainable_params(), 0.1, 0.9)
|
|
||||||
context.set_context(mode=context.GRAPH_MODE)
|
|
||||||
model = Model(net, loss_fn=loss, optimizer=optim, metrics=None)
|
|
||||||
with SummaryRecord(SUMMARY_DIR, file_suffix="_MS_GRAPH", network=model._train_network) as test_writer:
|
|
||||||
model.train(2, dataset)
|
|
||||||
for i in range(1, 5):
|
|
||||||
test_writer.record(i)
|
|
||||||
|
|
||||||
|
|
||||||
def test_graph_summary_callback():
|
|
||||||
dataset = get_dataset()
|
|
||||||
net = Net()
|
|
||||||
loss = nn.SoftmaxCrossEntropyWithLogits()
|
|
||||||
optim = Momentum(net.trainable_params(), 0.1, 0.9)
|
|
||||||
context.set_context(mode=context.GRAPH_MODE)
|
|
||||||
model = Model(net, loss_fn=loss, optimizer=optim, metrics=None)
|
|
||||||
summary_collector = SummaryCollector(SUMMARY_DIR,
|
|
||||||
collect_freq=1,
|
|
||||||
keep_default_action=False,
|
|
||||||
collect_specified_data={'collect_graph': True})
|
|
||||||
model.train(1, dataset, callbacks=[summary_collector])
|
|
@ -1,140 +0,0 @@
|
|||||||
# Copyright 2020-2021 Huawei Technologies Co., Ltd
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
# ============================================================================
|
|
||||||
"""Test summary."""
|
|
||||||
import os
|
|
||||||
import random
|
|
||||||
|
|
||||||
import numpy as np
|
|
||||||
|
|
||||||
|
|
||||||
import mindspore.nn as nn
|
|
||||||
from mindspore.common.tensor import Tensor
|
|
||||||
from mindspore.ops import operations as P
|
|
||||||
from mindspore.train.summary.summary_record import SummaryRecord, _cache_summary_tensor_data
|
|
||||||
|
|
||||||
CUR_DIR = os.getcwd()
|
|
||||||
SUMMARY_DIR = CUR_DIR + "/test_temp_summary_event_file/"
|
|
||||||
|
|
||||||
|
|
||||||
def get_test_data(step):
|
|
||||||
""" get_test_data """
|
|
||||||
test_data_list = []
|
|
||||||
tag1 = "x1[:Scalar]"
|
|
||||||
tag2 = "x2[:Scalar]"
|
|
||||||
np1 = np.array(step + 1).astype(np.float32)
|
|
||||||
np2 = np.array(step + 2).astype(np.float32)
|
|
||||||
|
|
||||||
dict1 = {}
|
|
||||||
dict1["name"] = tag1
|
|
||||||
dict1["data"] = Tensor(np1)
|
|
||||||
|
|
||||||
dict2 = {}
|
|
||||||
dict2["name"] = tag2
|
|
||||||
dict2["data"] = Tensor(np2)
|
|
||||||
|
|
||||||
test_data_list.append(dict1)
|
|
||||||
test_data_list.append(dict2)
|
|
||||||
|
|
||||||
return test_data_list
|
|
||||||
|
|
||||||
|
|
||||||
def test_scalar_summary_sample():
|
|
||||||
""" test_scalar_summary_sample """
|
|
||||||
with SummaryRecord(SUMMARY_DIR, file_suffix="_MS_SCALAR") as test_writer:
|
|
||||||
for i in range(1, 5):
|
|
||||||
test_data = get_test_data(i)
|
|
||||||
_cache_summary_tensor_data(test_data)
|
|
||||||
test_writer.record(i)
|
|
||||||
|
|
||||||
|
|
||||||
def get_test_data_shape_1(step):
|
|
||||||
""" get_test_data_shape_1 """
|
|
||||||
test_data_list = []
|
|
||||||
tag1 = "x1[:Scalar]"
|
|
||||||
tag2 = "x2[:Scalar]"
|
|
||||||
np1 = np.array([step + 1]).astype(np.float32)
|
|
||||||
np2 = np.array([step + 2]).astype(np.float32)
|
|
||||||
|
|
||||||
dict1 = {}
|
|
||||||
dict1["name"] = tag1
|
|
||||||
dict1["data"] = Tensor(np1)
|
|
||||||
|
|
||||||
dict2 = {}
|
|
||||||
dict2["name"] = tag2
|
|
||||||
dict2["data"] = Tensor(np2)
|
|
||||||
|
|
||||||
test_data_list.append(dict1)
|
|
||||||
test_data_list.append(dict2)
|
|
||||||
|
|
||||||
return test_data_list
|
|
||||||
|
|
||||||
|
|
||||||
# Test: shape = (1,)
|
|
||||||
def test_scalar_summary_sample_with_shape_1():
|
|
||||||
""" test_scalar_summary_sample_with_shape_1 """
|
|
||||||
with SummaryRecord(SUMMARY_DIR, file_suffix="_MS_SCALAR") as test_writer:
|
|
||||||
for i in range(1, 100):
|
|
||||||
test_data = get_test_data_shape_1(i)
|
|
||||||
_cache_summary_tensor_data(test_data)
|
|
||||||
test_writer.record(i)
|
|
||||||
|
|
||||||
|
|
||||||
# Test: test with ge
|
|
||||||
class SummaryDemo(nn.Cell):
|
|
||||||
""" SummaryDemo definition """
|
|
||||||
|
|
||||||
def __init__(self,):
|
|
||||||
super(SummaryDemo, self).__init__()
|
|
||||||
self.s = P.ScalarSummary()
|
|
||||||
self.histogram_summary = P.HistogramSummary()
|
|
||||||
self.add = P.Add()
|
|
||||||
|
|
||||||
def construct(self, x, y):
|
|
||||||
self.s("x1", x)
|
|
||||||
z = self.add(x, y)
|
|
||||||
self.s("z1", z)
|
|
||||||
self.s("y1", y)
|
|
||||||
self.histogram_summary("histogram", z)
|
|
||||||
return z
|
|
||||||
|
|
||||||
|
|
||||||
def test_scalar_summary_with_ge():
|
|
||||||
""" test_scalar_summary_with_ge """
|
|
||||||
with SummaryRecord(SUMMARY_DIR, file_suffix="_MS_SCALAR") as test_writer:
|
|
||||||
net = SummaryDemo()
|
|
||||||
net.set_train()
|
|
||||||
|
|
||||||
# step 2: create the Event
|
|
||||||
steps = 100
|
|
||||||
for i in range(1, steps):
|
|
||||||
x = Tensor(np.array([1.1 + random.uniform(1, 10)]).astype(np.float32))
|
|
||||||
y = Tensor(np.array([1.2 + random.uniform(1, 10)]).astype(np.float32))
|
|
||||||
net(x, y)
|
|
||||||
test_writer.record(i)
|
|
||||||
|
|
||||||
|
|
||||||
# test the problem of two consecutive use cases going wrong
|
|
||||||
def test_scalar_summary_with_ge_2():
|
|
||||||
""" test_scalar_summary_with_ge_2 """
|
|
||||||
with SummaryRecord(SUMMARY_DIR, file_suffix="_MS_SCALAR") as test_writer:
|
|
||||||
net = SummaryDemo()
|
|
||||||
net.set_train()
|
|
||||||
|
|
||||||
steps = 100
|
|
||||||
for i in range(1, steps):
|
|
||||||
x = Tensor(np.array([1.1]).astype(np.float32))
|
|
||||||
y = Tensor(np.array([1.2]).astype(np.float32))
|
|
||||||
net(x, y)
|
|
||||||
test_writer.record(i)
|
|
@ -1,145 +0,0 @@
|
|||||||
# Copyright 2020 Huawei Technologies Co., Ltd
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
# ============================================================================
|
|
||||||
"""
|
|
||||||
@File : test_tensor_summary.py
|
|
||||||
@Author:
|
|
||||||
@Date : 2019-07-4
|
|
||||||
@Desc : test summary function
|
|
||||||
"""
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
import numpy as np
|
|
||||||
|
|
||||||
import mindspore.nn as nn
|
|
||||||
from mindspore.common.tensor import Tensor
|
|
||||||
from mindspore.ops import operations as P
|
|
||||||
from mindspore.train.summary.summary_record import SummaryRecord, _cache_summary_tensor_data
|
|
||||||
|
|
||||||
CUR_DIR = os.getcwd()
|
|
||||||
SUMMARY_DIR = CUR_DIR + "/test_temp_summary_event_file/"
|
|
||||||
|
|
||||||
log = logging.getLogger("test")
|
|
||||||
log.setLevel(level=logging.ERROR)
|
|
||||||
|
|
||||||
|
|
||||||
def get_test_data(step):
|
|
||||||
""" get_test_data """
|
|
||||||
test_data_list = []
|
|
||||||
|
|
||||||
dict_x1 = {}
|
|
||||||
dict_x1["name"] = "x1[:Tensor]"
|
|
||||||
dict_x1["data"] = Tensor(np.array([[1, 2, step + 1], [2, 3, 4]]).astype(np.int8))
|
|
||||||
test_data_list.append(dict_x1)
|
|
||||||
dict_x2 = {}
|
|
||||||
dict_x2["name"] = "x2[:Tensor]"
|
|
||||||
dict_x2["data"] = Tensor(np.array([[1, 2, step + 2], [2, 3, 4]]).astype(np.int16))
|
|
||||||
test_data_list.append(dict_x2)
|
|
||||||
dict_x3 = {}
|
|
||||||
dict_x3["name"] = "x3[:Tensor]"
|
|
||||||
dict_x3["data"] = Tensor(np.array([[1, 2, step + 1], [2, 3, 4]]).astype(np.int32))
|
|
||||||
test_data_list.append(dict_x3)
|
|
||||||
dict_x4 = {}
|
|
||||||
dict_x4["name"] = "x4[:Tensor]"
|
|
||||||
dict_x4["data"] = Tensor(np.array([[1, 2, step + 1], [2, 3, 4]]).astype(np.int64))
|
|
||||||
test_data_list.append(dict_x4)
|
|
||||||
dict_x5 = {}
|
|
||||||
dict_x5["name"] = "x5[:Tensor]"
|
|
||||||
dict_x5["data"] = Tensor(np.array([[1, 2, step + 1], [2, 3, 4]]).astype(np.float))
|
|
||||||
test_data_list.append(dict_x5)
|
|
||||||
dict_x6 = {}
|
|
||||||
dict_x6["name"] = "x6[:Tensor]"
|
|
||||||
dict_x6["data"] = Tensor(np.array([[1, 2, step + 1], [2, 3, 4]]).astype(np.float16))
|
|
||||||
test_data_list.append(dict_x6)
|
|
||||||
dict_x7 = {}
|
|
||||||
dict_x7["name"] = "x7[:Tensor]"
|
|
||||||
dict_x7["data"] = Tensor(np.array([[1, 2, step + 1], [2, 3, 4]]).astype(np.float32))
|
|
||||||
test_data_list.append(dict_x7)
|
|
||||||
dict_x8 = {}
|
|
||||||
dict_x8["name"] = "x8[:Tensor]"
|
|
||||||
dict_x8["data"] = Tensor(np.array([[1, 2, step + 1], [2, 3, 4]]).astype(np.float64))
|
|
||||||
test_data_list.append(dict_x8)
|
|
||||||
|
|
||||||
return test_data_list
|
|
||||||
|
|
||||||
|
|
||||||
# Test: call method on parse graph code
|
|
||||||
def test_tensor_summary_sample():
|
|
||||||
""" test_tensor_summary_sample """
|
|
||||||
log.debug("begin test_tensor_summary_sample")
|
|
||||||
# step 0: create the thread
|
|
||||||
with SummaryRecord(SUMMARY_DIR, file_suffix="_MS_TENSOR") as test_writer:
|
|
||||||
# step 1: create the Event
|
|
||||||
for i in range(1, 100):
|
|
||||||
test_data = get_test_data(i)
|
|
||||||
|
|
||||||
_cache_summary_tensor_data(test_data)
|
|
||||||
test_writer.record(i)
|
|
||||||
|
|
||||||
# step 2: accept the event and write the file
|
|
||||||
|
|
||||||
log.debug("finished test_tensor_summary_sample")
|
|
||||||
|
|
||||||
|
|
||||||
def get_test_data_check(step):
|
|
||||||
""" get_test_data_check """
|
|
||||||
test_data_list = []
|
|
||||||
tag1 = "x1[:Tensor]"
|
|
||||||
np1 = np.array([[step, step, step], [2, 3, 4]]).astype(np.float32)
|
|
||||||
|
|
||||||
dict1 = {}
|
|
||||||
dict1["name"] = tag1
|
|
||||||
dict1["data"] = Tensor(np1)
|
|
||||||
test_data_list.append(dict1)
|
|
||||||
|
|
||||||
return test_data_list
|
|
||||||
|
|
||||||
|
|
||||||
# Test: test with ge
|
|
||||||
class SummaryDemo(nn.Cell):
|
|
||||||
""" SummaryDemo definition """
|
|
||||||
|
|
||||||
def __init__(self,):
|
|
||||||
super(SummaryDemo, self).__init__()
|
|
||||||
self.s = P.TensorSummary()
|
|
||||||
self.add = P.Add()
|
|
||||||
|
|
||||||
def construct(self, x, y):
|
|
||||||
self.s("x1", x)
|
|
||||||
z = self.add(x, y)
|
|
||||||
self.s("z1", z)
|
|
||||||
self.s("y1", y)
|
|
||||||
return z
|
|
||||||
|
|
||||||
def test_tensor_summary_with_ge():
|
|
||||||
""" test_tensor_summary_with_ge """
|
|
||||||
log.debug("begin test_tensor_summary_with_ge")
|
|
||||||
|
|
||||||
# step 0: create the thread
|
|
||||||
with SummaryRecord(SUMMARY_DIR) as test_writer:
|
|
||||||
# step 1: create the network for summary
|
|
||||||
x = Tensor(np.array([1.1]).astype(np.float32))
|
|
||||||
y = Tensor(np.array([1.2]).astype(np.float32))
|
|
||||||
net = SummaryDemo()
|
|
||||||
net.set_train()
|
|
||||||
|
|
||||||
# step 2: create the Event
|
|
||||||
steps = 100
|
|
||||||
for i in range(1, steps):
|
|
||||||
x = Tensor(np.array([[i], [i]]).astype(np.float32))
|
|
||||||
y = Tensor(np.array([[i + 1], [i + 1]]).astype(np.float32))
|
|
||||||
net(x, y)
|
|
||||||
test_writer.record(i)
|
|
||||||
|
|
||||||
log.debug("finished test_tensor_summary_with_ge")
|
|
Loading…
Reference in new issue