add hub for densenet121 and inceptionv3

pull/6620/head
zhouyaqiang 5 years ago
parent d60033c8db
commit 0497092cf8

@ -0,0 +1,21 @@
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""hub config."""
from src.network import DenseNet121
def create_network(name, *args, **kwargs):
if name == 'densenet121':
return DenseNet121(*args, **kwargs)
raise NotImplementedError(f"{name} is not implemented in the repo")

@ -205,11 +205,13 @@ class DenseNet121(nn.Cell):
""" """
the densenet121 architectur the densenet121 architectur
""" """
def __init__(self, num_classes): def __init__(self, num_classes, include_top=True):
super(DenseNet121, self).__init__() super(DenseNet121, self).__init__()
self.backbone = _densenet121() self.backbone = _densenet121()
out_channels = self.backbone.get_out_channels() out_channels = self.backbone.get_out_channels()
self.head = CommonHead(num_classes, out_channels) self.include_top = include_top
if self.include_top:
self.head = CommonHead(num_classes, out_channels)
default_recurisive_init(self) default_recurisive_init(self)
for _, cell in self.cells_and_names(): for _, cell in self.cells_and_names():
@ -226,5 +228,7 @@ class DenseNet121(nn.Cell):
def construct(self, x): def construct(self, x):
x = self.backbone(x) x = self.backbone(x)
if not self.include_top:
return x
x = self.head(x) x = self.head(x)
return x return x

@ -0,0 +1,21 @@
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""hub config."""
from src.inception_v3 import InceptionV3
def create_network(name, *args, **kwargs):
if name == 'inceptionv3':
return InceptionV3(*args, **kwargs)
raise NotImplementedError(f"{name} is not implemented in the repo")

@ -203,7 +203,7 @@ class AuxLogits(nn.Cell):
class InceptionV3(nn.Cell): class InceptionV3(nn.Cell):
def __init__(self, num_classes=10, is_training=True, has_bias=False, dropout_keep_prob=0.8): def __init__(self, num_classes=10, is_training=True, has_bias=False, dropout_keep_prob=0.8, include_top=True):
super(InceptionV3, self).__init__() super(InceptionV3, self).__init__()
self.is_training = is_training self.is_training = is_training
self.Conv2d_1a = BasicConv2d(3, 32, kernel_size=3, stride=2, pad_mode='valid', has_bias=has_bias) self.Conv2d_1a = BasicConv2d(3, 32, kernel_size=3, stride=2, pad_mode='valid', has_bias=has_bias)
@ -226,7 +226,9 @@ class InceptionV3(nn.Cell):
self.Mixed_7c = Inception_E(2048, has_bias=has_bias) self.Mixed_7c = Inception_E(2048, has_bias=has_bias)
if is_training: if is_training:
self.aux_logits = AuxLogits(768, num_classes) self.aux_logits = AuxLogits(768, num_classes)
self.logits = Logits(num_classes, dropout_keep_prob) self.include_top = include_top
if self.include_top:
self.logits = Logits(num_classes, dropout_keep_prob)
def construct(self, x): def construct(self, x):
x = self.Conv2d_1a(x) x = self.Conv2d_1a(x)
@ -251,6 +253,8 @@ class InceptionV3(nn.Cell):
x = self.Mixed_7a(x) x = self.Mixed_7a(x)
x = self.Mixed_7b(x) x = self.Mixed_7b(x)
x = self.Mixed_7c(x) x = self.Mixed_7c(x)
if not self.include_top:
return x
logits = self.logits(x) logits = self.logits(x)
if self.is_training: if self.is_training:
return logits, aux_logits return logits, aux_logits

Loading…
Cancel
Save