Synchronize r1.0 with master to deeplabv3.

pull/6936/head
jzg 5 years ago
parent 6be7aae403
commit ec2f958a2c

@ -15,6 +15,7 @@
- [Model Description](#model-description)
- [Performance](#performance)
- [Evaluation Performance](#evaluation-performance)
- [Description of Random Situation](#description-of-random-situation)
- [ModelZoo Homepage](#modelzoo-homepage)
@ -141,7 +142,7 @@ run_eval_s8_multiscale_flip.sh
├── run_standalone_train.sh # launch ascend standalone training(1 pc)
├── src
├── data
├── data_generator.py # mindrecord data generator
├── dataset.py # mindrecord data generator
├── build_seg_data.py # data preprocessing
├── loss
├── loss.py # loss definition for deeplabv3
@ -412,10 +413,9 @@ Note: There OS is output stride, and MS is multiscale.
| Checkpoint for Fine tuning | 443M (.ckpt file) |
| Scripts | [Link](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/cv/deeplabv3) |
# [Description of Random Situation](#contents)
In dataset.py, we set the seed inside "create_dataset" function. We also use random seed in train.py.
# [ModelZoo Homepage](#contents)
Please check the official [homepage](https://gitee.com/mindspore/mindspore/tree/master/model_zoo).

@ -0,0 +1,45 @@
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""export AIR file."""
import argparse
import numpy as np
from mindspore import Tensor
from mindspore import context
from mindspore.train.serialization import load_checkpoint, load_param_into_net
from mindspore.train.serialization import export
from src.nets import net_factory
context.set_context(mode=context.GRAPH_MODE, save_graphs=False)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='checkpoint export')
parser.add_argument('--checkpoint', type=str.lower, default='', help='checkpoint of deeplabv3 (Default: None)')
parser.add_argument('--model', type=str.lower, default='deeplab_v3_s8', choices=['deeplab_v3_s16', 'deeplab_v3_s8'],
help='Select model structure (Default: deeplab_v3_s8)')
parser.add_argument('--num_classes', type=int, default=21, help='the number of classes (Default: 21)')
args = parser.parse_args()
if args.model == 'deeplab_v3_s16':
network = net_factory.nets_map['deeplab_v3_s16']('eval', args.num_classes, 16, True)
else:
network = net_factory.nets_map['deeplab_v3_s8']('eval', args.num_classes, 8, True)
param_dict = load_checkpoint(args.checkpoint)
# load the parameter into net
load_param_into_net(network, param_dict)
input_data = np.random.uniform(0.0, 1.0, size=[32, 3, 513, 513]).astype(np.float32)
export(network, Tensor(input_data), file_name=args.model+'-300_11.air', file_format='AIR')

@ -0,0 +1,27 @@
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""hub config."""
from src.nets import net_factory
def create_network(name, *args, **kwargs):
freeze_bn = True
num_classes = kwargs["num_classes"]
if name == 'deeplab_v3_s16':
deeplab_v3_s16_network = net_factory.nets_map["deeplab_v3_s16"]('eval', num_classes, 16, freeze_bn)
return deeplab_v3_s16_network
if name == 'deeplab_v3_s8':
deeplab_v3_s8_network = net_factory.nets_map["deeplab_v3_s8"]('eval', num_classes, 8, freeze_bn)
return deeplab_v3_s8_network
raise NotImplementedError(f"{name} is not implemented in the repo")

@ -24,10 +24,13 @@ from mindspore.train.serialization import load_checkpoint, load_param_into_net
from mindspore.communication.management import init, get_rank, get_group_size
from mindspore.train.callback import LossMonitor, TimeMonitor
from mindspore.train.loss_scale_manager import FixedLossScaleManager
from src.data import data_generator
from mindspore.common import set_seed
from src.data import dataset as data_generator
from src.loss import loss
from src.nets import net_factory
from src.utils import learning_rates
set_seed(1)
context.set_context(mode=context.GRAPH_MODE, enable_auto_mixed_precision=True, save_graphs=False,
device_target="Ascend", device_id=int(os.getenv('DEVICE_ID')))

Loading…
Cancel
Save