|
|
|
@ -19,6 +19,7 @@ from __future__ import print_function
|
|
|
|
|
import paddle
|
|
|
|
|
from paddle import ParamAttr
|
|
|
|
|
import paddle.nn as nn
|
|
|
|
|
import paddle.nn.functional as F
|
|
|
|
|
|
|
|
|
|
__all__ = ["ResNet"]
|
|
|
|
|
|
|
|
|
@ -37,9 +38,9 @@ class ConvBNLayer(nn.Layer):
|
|
|
|
|
super(ConvBNLayer, self).__init__()
|
|
|
|
|
|
|
|
|
|
self.is_vd_mode = is_vd_mode
|
|
|
|
|
self._pool2d_avg = nn.AvgPool2d(
|
|
|
|
|
self._pool2d_avg = nn.AvgPool2D(
|
|
|
|
|
kernel_size=stride, stride=stride, padding=0, ceil_mode=True)
|
|
|
|
|
self._conv = nn.Conv2d(
|
|
|
|
|
self._conv = nn.Conv2D(
|
|
|
|
|
in_channels=in_channels,
|
|
|
|
|
out_channels=out_channels,
|
|
|
|
|
kernel_size=kernel_size,
|
|
|
|
@ -119,7 +120,8 @@ class BottleneckBlock(nn.Layer):
|
|
|
|
|
short = inputs
|
|
|
|
|
else:
|
|
|
|
|
short = self.short(inputs)
|
|
|
|
|
y = paddle.elementwise_add(x=short, y=conv2, act='relu')
|
|
|
|
|
y = paddle.add(x=short, y=conv2)
|
|
|
|
|
y = F.relu(y)
|
|
|
|
|
return y
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@ -166,7 +168,8 @@ class BasicBlock(nn.Layer):
|
|
|
|
|
short = inputs
|
|
|
|
|
else:
|
|
|
|
|
short = self.short(inputs)
|
|
|
|
|
y = paddle.elementwise_add(x=short, y=conv1, act='relu')
|
|
|
|
|
y = paddle.add(x=short, y=conv1)
|
|
|
|
|
y = F.relu(y)
|
|
|
|
|
return y
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@ -215,7 +218,7 @@ class ResNet(nn.Layer):
|
|
|
|
|
stride=1,
|
|
|
|
|
act='relu',
|
|
|
|
|
name="conv1_3")
|
|
|
|
|
self.pool2d_max = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
|
|
|
|
|
self.pool2d_max = nn.MaxPool2D(kernel_size=3, stride=2, padding=1)
|
|
|
|
|
|
|
|
|
|
self.block_list = []
|
|
|
|
|
if layers >= 50:
|
|
|
|
@ -270,7 +273,7 @@ class ResNet(nn.Layer):
|
|
|
|
|
shortcut = True
|
|
|
|
|
self.block_list.append(basic_block)
|
|
|
|
|
self.out_channels = num_filters[block]
|
|
|
|
|
self.out_pool = nn.MaxPool2d(kernel_size=2, stride=2, padding=0)
|
|
|
|
|
self.out_pool = nn.MaxPool2D(kernel_size=2, stride=2, padding=0)
|
|
|
|
|
|
|
|
|
|
def forward(self, inputs):
|
|
|
|
|
y = self.conv1_1(inputs)
|
|
|
|
|