Add relu layer for lenet (#24874)

* add relu for lenet, test=develop

* fix test model, test=develop
fix-sync_batch_norm-hang-in-fleet
LielinJiang 5 years ago committed by GitHub
parent 3b28629ee8
commit a01113c338
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -23,8 +23,7 @@ import shutil
import tempfile
from paddle import fluid
from paddle.fluid.dygraph.nn import Conv2D, Pool2D, Linear
from paddle.fluid.dygraph.container import Sequential
from paddle.nn import Conv2D, Pool2D, Linear, ReLU, Sequential
from paddle.fluid.dygraph.base import to_variable
from paddle.incubate.hapi.model import Model, Input, set_device
@ -42,9 +41,11 @@ class LeNetDygraph(fluid.dygraph.Layer):
self.features = Sequential(
Conv2D(
1, 6, 3, stride=1, padding=1),
ReLU(),
Pool2D(2, 'max', 2),
Conv2D(
6, 16, 5, stride=1, padding=0),
ReLU(),
Pool2D(2, 'max', 2))
if num_classes > 0:

@ -13,8 +13,7 @@
#limitations under the License.
import paddle.fluid as fluid
from paddle.fluid.dygraph.nn import Conv2D, BatchNorm, Pool2D, Linear
from paddle.fluid.dygraph.container import Sequential
from paddle.nn import Conv2D, Pool2D, Linear, ReLU, Sequential
from ...model import Model
@ -44,9 +43,11 @@ class LeNet(Model):
self.features = Sequential(
Conv2D(
1, 6, 3, stride=1, padding=1),
ReLU(),
Pool2D(2, 'max', 2),
Conv2D(
6, 16, 5, stride=1, padding=0),
ReLU(),
Pool2D(2, 'max', 2))
if num_classes > 0:

Loading…
Cancel
Save