alexnet_batch_normalization.py
Go to the documentation of this file.
1 import chainer
2 import chainer.functions as F
3 import chainer.links as L
4 
5 
6 class AlexNetBatchNormalization(chainer.Chain):
7  def __init__(self, n_class=1000):
8  super(AlexNetBatchNormalization, self).__init__(
9  conv1=L.Convolution2D(3, 96, 11, stride=4),
10  bn1=L.BatchNormalization(96),
11  conv2=L.Convolution2D(96, 256, 5, pad=2),
12  bn2=L.BatchNormalization(256),
13  conv3=L.Convolution2D(256, 384, 3, pad=1),
14  conv4=L.Convolution2D(384, 384, 3, pad=1),
15  conv5=L.Convolution2D(384, 256, 3, pad=1),
16  fc6=L.Linear(9216, 4096),
17  fc7=L.Linear(4096, 4096),
18  fc8=L.Linear(4096, n_class))
19 
20  def __call__(self, x, t=None):
21  h = self.bn1(self.conv1(x))
22  h = F.max_pooling_2d(F.relu(h), 3, stride=2)
23  h = self.bn2(self.conv2(h))
24  h = F.max_pooling_2d(F.relu(h), 3, stride=2)
25  h = F.relu(self.conv3(h))
26  h = F.relu(self.conv4(h))
27  h = F.max_pooling_2d(F.relu(self.conv5(h)), 3, stride=2)
28  h = F.dropout(F.relu(self.fc6(h)))
29  h = F.dropout(F.relu(self.fc7(h)))
30  h = self.fc8(h)
31 
32  self.pred = F.softrmax(h)
33  if t is None:
34  assert not chainer.config.train
35  return
36 
37  self.loss = F.softmax_cross_entropy(h, t)
38  self.accuracy = F.accuracy(h, t)
39  return self.loss


jsk_recognition_utils
Author(s):
autogenerated on Mon May 3 2021 03:03:03