alexnet_batch_normalization.py
Go to the documentation of this file.
00001 import chainer
00002 import chainer.functions as F
00003 import chainer.links as L
00004 
00005 
00006 class AlexNetBatchNormalization(chainer.Chain):
00007     def __init__(self, n_class=1000):
00008         super(AlexNetBatchNormalization, self).__init__(
00009             conv1=L.Convolution2D(3,  96, 11, stride=4),
00010             bn1=L.BatchNormalization(96),
00011             conv2=L.Convolution2D(96, 256,  5, pad=2),
00012             bn2=L.BatchNormalization(256),
00013             conv3=L.Convolution2D(256, 384,  3, pad=1),
00014             conv4=L.Convolution2D(384, 384,  3, pad=1),
00015             conv5=L.Convolution2D(384, 256,  3, pad=1),
00016             fc6=L.Linear(9216, 4096),
00017             fc7=L.Linear(4096, 4096),
00018             fc8=L.Linear(4096, n_class))
00019 
00020     def __call__(self, x, t=None):
00021         h = self.bn1(self.conv1(x))
00022         h = F.max_pooling_2d(F.relu(h), 3, stride=2)
00023         h = self.bn2(self.conv2(h))
00024         h = F.max_pooling_2d(F.relu(h), 3, stride=2)
00025         h = F.relu(self.conv3(h))
00026         h = F.relu(self.conv4(h))
00027         h = F.max_pooling_2d(F.relu(self.conv5(h)), 3, stride=2)
00028         h = F.dropout(F.relu(self.fc6(h)))
00029         h = F.dropout(F.relu(self.fc7(h)))
00030         h = self.fc8(h)
00031 
00032         self.pred = F.softrmax(h)
00033         if t is None:
00034             assert not chainer.config.train
00035             return
00036 
00037         self.loss = F.softmax_cross_entropy(h, t)
00038         self.accuracy = F.accuracy(h, t)
00039         return self.loss


jsk_recognition_utils
Author(s):
autogenerated on Sun Oct 8 2017 02:42:48