2 import chainer.functions
as F
3 import chainer.links
as L
8 super(AlexNetBatchNormalization, self).
__init__(
9 conv1=L.Convolution2D(3, 96, 11, stride=4),
10 bn1=L.BatchNormalization(96),
11 conv2=L.Convolution2D(96, 256, 5, pad=2),
12 bn2=L.BatchNormalization(256),
13 conv3=L.Convolution2D(256, 384, 3, pad=1),
14 conv4=L.Convolution2D(384, 384, 3, pad=1),
15 conv5=L.Convolution2D(384, 256, 3, pad=1),
16 fc6=L.Linear(9216, 4096),
17 fc7=L.Linear(4096, 4096),
18 fc8=L.Linear(4096, n_class))
21 h = self.bn1(self.conv1(x))
22 h = F.max_pooling_2d(F.relu(h), 3, stride=2)
23 h = self.bn2(self.conv2(h))
24 h = F.max_pooling_2d(F.relu(h), 3, stride=2)
25 h = F.relu(self.conv3(h))
26 h = F.relu(self.conv4(h))
27 h = F.max_pooling_2d(F.relu(self.conv5(h)), 3, stride=2)
28 h = F.dropout(F.relu(self.fc6(h)))
29 h = F.dropout(F.relu(self.fc7(h)))
34 assert not chainer.config.train
37 self.
loss = F.softmax_cross_entropy(h, t)
def __call__(self, x, t=None)
def __init__(self, n_class=1000)