7 from __future__
import print_function
10 import itertools, pkg_resources, sys
11 from distutils.version
import LooseVersion
12 if LooseVersion(pkg_resources.get_distribution(
"chainer").version) >= LooseVersion(
'7.0.0')
and \
13 sys.version_info.major == 2:
14 print(
'''Please install chainer < 7.0.0: 16 sudo pip install chainer==6.7.0 18 c.f https://github.com/jsk-ros-pkg/jsk_recognition/pull/2485 21 if [p
for p
in list(itertools.chain(*[pkg_resources.find_distributions(_)
for _
in sys.path]))
if "cupy-" in p.project_name ] == []:
22 print(
'''Please install CuPy 24 sudo pip install cupy-cuda[your cuda version] 26 sudo pip install cupy-cuda91 31 import chainer.functions
as F
32 import chainer.links
as L
35 base_url =
'http://posefs1.perception.cs.cmu.edu/OpenPose/models/hand/' 37 'auto':
'pose_iter_102000.chainermodel',
45 with self.init_scope():
47 in_channels=3, out_channels=64, ksize=3, stride=1, pad=1)
49 in_channels=64, out_channels=64, ksize=3, stride=1, pad=1)
51 in_channels=64, out_channels=128, ksize=3, stride=1, pad=1)
53 in_channels=128, out_channels=128, ksize=3, stride=1, pad=1)
55 in_channels=128, out_channels=256, ksize=3, stride=1, pad=1)
57 in_channels=256, out_channels=256, ksize=3, stride=1, pad=1)
59 in_channels=256, out_channels=256, ksize=3, stride=1, pad=1)
61 in_channels=256, out_channels=256, ksize=3, stride=1, pad=1)
63 in_channels=256, out_channels=512, ksize=3, stride=1, pad=1)
65 in_channels=512, out_channels=512, ksize=3, stride=1, pad=1)
67 in_channels=512, out_channels=512, ksize=3, stride=1, pad=1)
69 in_channels=512, out_channels=512, ksize=3, stride=1, pad=1)
71 in_channels=512, out_channels=512, ksize=3, stride=1, pad=1)
73 in_channels=512, out_channels=512, ksize=3, stride=1, pad=1)
75 in_channels=512, out_channels=128, ksize=3, stride=1, pad=1)
78 in_channels=128, out_channels=512, ksize=1, stride=1, pad=0)
80 in_channels=512, out_channels=22, ksize=1, stride=1, pad=0)
83 in_channels=150, out_channels=128, ksize=7, stride=1, pad=3)
85 in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)
87 in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)
89 in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)
91 in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)
93 in_channels=128, out_channels=128, ksize=1, stride=1, pad=0)
95 in_channels=128, out_channels=22, ksize=1, stride=1, pad=0)
98 in_channels=150, out_channels=128, ksize=7, stride=1, pad=3)
100 in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)
102 in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)
104 in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)
106 in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)
108 in_channels=128, out_channels=128, ksize=1, stride=1, pad=0)
110 in_channels=128, out_channels=22, ksize=1, stride=1, pad=0)
113 in_channels=150, out_channels=128, ksize=7, stride=1, pad=3)
115 in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)
117 in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)
119 in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)
121 in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)
123 in_channels=128, out_channels=128, ksize=1, stride=1, pad=0)
125 in_channels=128, out_channels=22, ksize=1, stride=1, pad=0)
128 in_channels=150, out_channels=128, ksize=7, stride=1, pad=3)
130 in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)
132 in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)
134 in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)
136 in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)
138 in_channels=128, out_channels=128, ksize=1, stride=1, pad=0)
140 in_channels=128, out_channels=22, ksize=1, stride=1, pad=0)
143 in_channels=150, out_channels=128, ksize=7, stride=1, pad=3)
145 in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)
147 in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)
149 in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)
151 in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)
153 in_channels=128, out_channels=128, ksize=1, stride=1, pad=0)
155 in_channels=128, out_channels=22, ksize=1, stride=1, pad=0)
157 if pretrained_model
in models.keys():
158 data_dir = chainer.dataset.get_dataset_directory(
'openpose/hand')
159 model_path = os.path.join(data_dir, models[pretrained_model])
161 os.makedirs(os.path.dirname(model_path))
164 chainer.dataset.cache_or_load_file(
168 chainer.serializers.load_npz(model_path, self)
169 elif pretrained_model
is not None:
170 if not os.path.exists(pretrained_model):
171 raise OSError(
'model does not exists: "%s"' % pretrained_model)
172 chainer.serializers.load_npz(pretrained_model, self)
179 h = F.max_pooling_2d(h, ksize=2, stride=2)
182 h = F.max_pooling_2d(h, ksize=2, stride=2)
187 h = F.max_pooling_2d(h, ksize=2, stride=2)
203 h = F.concat((h, feature_map), axis=1)
214 h = F.concat((h, feature_map), axis=1)
225 h = F.concat((h, feature_map), axis=1)
236 h = F.concat((h, feature_map), axis=1)
247 h = F.concat((h, feature_map), axis=1)
261 from chainer.links
import caffe
263 if os.path.exists(dest_path):
264 raise OSError(
'destination already exists: %s' % dest_path)
266 basename, ext = os.path.splitext(models[model_type])
267 url = base_url + basename +
'.caffemodel' 268 caffe_model_path = chainer.dataset.cached_download(url)
269 if not os.path.exists(caffe_model_path):
270 raise OSError(
'caffe model does not exist: %s' % caffe_model_path)
272 print(
'Converting to chainer model')
273 caffe_model = caffe.CaffeFunction(caffe_model_path)
274 chainer_model =
HandNet(pretrained_model=
None)
275 for link
in chainer_model.links():
276 if not isinstance(link, chainer.Link)
or not link.name:
278 if eval(
'chainer_model.{0}.b.shape == caffe_model["{0}"].b.shape'.format(link.name))
and\
279 eval(
'chainer_model.{0}.W.shape == caffe_model["{0}"].W.shape'.format(link.name)):
280 exec(
'chainer_model.{0}.W.data = caffe_model["{0}"].W.data'.format(link.name))
281 exec(
'chainer_model.{0}.b.data = caffe_model["{0}"].b.data'.format(link.name))
282 print(
'Copied layer {0}'.format(link.name))
284 print(
'Failed to copy layer {0}'.format(link.name))
286 chainer.serializers.save_npz(dest_path, chainer_model)
def _download_pretrained_model(model_type, dest_path)
def __init__(self, pretrained_model='auto')