1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33 from __future__ import print_function
34
35 import collections
36 import copy
37 try:
38 from cStringIO import StringIO
39 except ImportError:
40 from io import BytesIO as StringIO
41 import inspect
42 import itertools
43 import os
44 import string
45 import sys
46 import traceback
47
48 import genmsg.msgs
49 import genpy
50 import genpy.dynamic
51
52 import rospkg
53
54 import rosbag
55
56
57
58
61
63 """
64 Check whether a bag file can be played in the current system.
65 @param migrator: message migrator to use
66 @param inbag name of the bag to be checked.
67 @returns A list of tuples for each type in the bag file. The first
68 element of each tuple is the full migration path for the type. The
69 second element of the tuple is the expanded list of invalid rules
70 for that particular path.
71 """
72 checked = set()
73 migrations = []
74
75 bag = rosbag.Bag(inbag, 'r')
76
77 for topic, msg, t in bag.read_messages(raw=True):
78 key = get_message_key(msg[4])
79 if key not in checked:
80 target = migrator.find_target(msg[4])
81
82
83 path = migrator.find_path(msg[4], target)
84 if len(path) > 0:
85 migrations.append((path, [r for r in migrator.expand_rules([sn.rule for sn in path]) if r.valid == False]))
86
87 checked.add(key)
88
89 bag.close()
90
91 return migrations
92
94 """
95 Check whether a bag file can be played in the current system.
96 @param migrator The message migrator to use
97 @param message_list A list of message classes.
98 @returns A list of tuples for each type in the bag file. The first
99 element of each tuple is the full migration path for the type. The
100 second element of the tuple is the expanded list of invalid rules
101 for that particular path.
102 """
103
104 checked = set()
105 migrations = []
106
107 for msg in messages:
108 key = get_message_key(msg)
109 if key not in checked:
110 target = migrator.find_target(msg)
111
112
113 path = migrator.find_path(msg, target)
114 if len(path) > 0:
115 migrations.append((path, [r for r in migrator.expand_rules([sn.rule for sn in path]) if r.valid == False]))
116
117 checked.add(key)
118
119 return migrations
120
122 conn_header['type'] = new_msg_type._type
123 conn_header['md5sum'] = new_msg_type._md5sum
124 conn_header['message_definition'] = new_msg_type._full_text
125
126 return conn_header
127
128
129
130
131
132
133
134 -def fixbag(migrator, inbag, outbag):
135
136 res = checkbag(migrator, inbag)
137
138
139 if not False in [m[1] == [] for m in res]:
140 bag = rosbag.Bag(inbag, 'r')
141 rebag = rosbag.Bag(outbag, 'w', options=bag.options)
142 for topic, msg, t, conn_header in bag.read_messages(raw=True, return_connection_header=True):
143 new_msg_type = migrator.find_target(msg[4])
144 mig_msg = migrator.migrate_raw(msg, (new_msg_type._type, None, new_msg_type._md5sum, None, new_msg_type))
145 new_conn_header = _migrate_connection_header(conn_header, new_msg_type)
146 rebag.write(topic, mig_msg, t, connection_header=new_conn_header, raw=True)
147 rebag.close()
148 bag.close()
149 return True
150 else:
151 return False
152
153
154
155
156
157
158
159 -def fixbag2(migrator, inbag, outbag, force=False):
160
161 res = checkbag(migrator, inbag)
162
163 migrations = [m for m in res if len(m[1]) > 0]
164
165
166 if len(migrations) == 0 or force:
167 bag = rosbag.Bag(inbag, 'r')
168 rebag = rosbag.Bag(outbag, 'w', options=bag.options)
169 for topic, msg, t, conn_header in bag.read_messages(raw=True, return_connection_header=True):
170 new_msg_type = migrator.find_target(msg[4])
171 if new_msg_type != None:
172 mig_msg = migrator.migrate_raw(msg, (new_msg_type._type, None, new_msg_type._md5sum, None, new_msg_type))
173 new_conn_header = _migrate_connection_header(conn_header, new_msg_type)
174 rebag.write(topic, mig_msg, t, connection_header=new_conn_header, raw=True)
175 else:
176 rebag.write(topic, msg, t, connection_header=conn_header, raw=True)
177 rebag.close()
178 bag.close()
179
180 if force:
181 return []
182 else:
183 return migrations
184
185
186
187
188
189
190
191
192
193
194
196 name_split = name.split('/')
197 try:
198 name_split.remove('std_msgs')
199 except ValueError:
200 pass
201 try:
202 name_split.remove(top_name.split('/')[0])
203 except ValueError:
204 pass
205 new_name = '/'.join(name_split)
206 return new_name
207
208
209
210
211
212
213
214
215
216
218
219 tmp_name = clean_name(name, top_name)
220
221 if len(tmp_name.split('/')) == 2 or (genmsg.msgs.is_builtin(tmp_name)):
222 return tmp_name
223 elif tmp_name == 'Header':
224 return 'std_msgs/Header'
225 else:
226 return top_name.split('/')[0] + '/' + tmp_name
227
228
229
230
231
232
233
234
235
237 try:
238 return (c._type, c._md5sum)
239 except:
240 return None
241
242
243
244
245
246
247
248
249
250
251
257
258
260 old_type = ''
261 old_full_text = ''
262 new_type = ''
263 new_full_text = ''
264 migrated_types = []
265
266 order = -1
267
268 valid = False
269
272
273
274 - def __init__(self, migrator, location):
275
276 self.migrator = migrator
277 self.location = location
278
279 if (self.old_type != self.new_type):
280 self.rename_rule = True
281 else:
282 self.rename_rule = False
283
284
285 try:
286 if self.old_type == "":
287 raise self.EmptyType
288 self.old_types = genpy.dynamic.generate_dynamic(self.old_type, self.old_full_text)
289 self.old_class = self.old_types[self.old_type]
290 self.old_md5sum = self.old_class._md5sum
291 except Exception as e:
292 if not isinstance(e, self.EmptyType):
293 traceback.print_exc(file=sys.stderr)
294 self.old_types = {}
295 self.old_class = None
296 self.old_md5sum = ""
297 try:
298 if self.new_type == "":
299 raise self.EmptyType
300 self.new_types = genpy.dynamic.generate_dynamic(self.new_type, self.new_full_text)
301 self.new_class = self.new_types[self.new_type]
302 self.new_md5sum = self.new_class._md5sum
303 except Exception as e:
304 if not isinstance(e, self.EmptyType):
305 traceback.print_exc(file=sys.stderr)
306 self.new_types = {}
307 self.new_class = None
308 self.new_md5sum = ""
309
310
311
312 self.sub_rules_done = False
313 self.sub_rules_valid = False
314 self.sub_rules = []
315
316
317
318
319
320
321
322
323
324
326 self.sub_rules_valid = True
327 for (t1, t2) in self.migrated_types:
328 try:
329 tmp_old_class = self.get_old_class(t1)
330 except KeyError:
331 print("WARNING: Within rule [%s], specified migrated type [%s] not found in old message types" % (self.location, t1), file=sys.stderr)
332 self.sub_rules_valid = False
333 continue
334 try:
335 tmp_new_class = self.get_new_class(t2)
336 except KeyError:
337 print("WARNING: Within rule [%s], specified migrated type [%s] not found in new message types" % (self.location, t2), file=sys.stderr)
338 self.sub_rules_valid = False
339 continue
340
341
342
343
344 if (get_message_key(tmp_old_class) != get_message_key(self.old_class)) or (get_message_key(tmp_new_class) != get_message_key(self.new_class)):
345 path = self.migrator.find_path(tmp_old_class, tmp_new_class)
346 rules = [sn.rule for sn in path]
347 self.sub_rules.extend(rules)
348
349 if False in [r.valid for r in self.sub_rules]:
350 print("WARNING: Within rule [%s] cannot migrate from subtype [%s] to [%s].." % (self.location, t1, t2), file=sys.stderr)
351 self.sub_rules_valid = False
352 continue
353 self.sub_rules = self.migrator.filter_rules_unique(self.sub_rules)
354 self.sub_rules_done = True
355
356
357
358
359
360
361
363 try:
364 try:
365 return self.new_types[t]
366 except KeyError:
367 return self.new_types['std_msgs/' + t]
368 except KeyError:
369 return self.new_types[self.new_type.split('/')[0] + '/' + t]
370
371
372
373
374
375
376
378 try:
379 try:
380 return self.old_types[t]
381 except KeyError:
382 return self.old_types['std_msgs/' + t]
383 except KeyError:
384 return self.old_types[self.old_type.split('/')[0] + '/' + t]
385
386
387
388
389
390
391
392 - def migrate(self, msg_from, msg_to):
393 tmp_msg_from = clean_name(msg_from._type, self.old_type)
394 tmp_msg_to = clean_name(msg_to._type, self.new_type)
395 if (tmp_msg_from, tmp_msg_to) not in self.migrated_types:
396 raise BagMigrationException("Rule [%s] tried to perform a migration from old [%s] to new [%s] not listed in migrated_types"%(self.location, tmp_msg_from, tmp_msg_to))
397 self.migrator.migrate(msg_from, msg_to)
398
399
400
401
402
403
404
405
406 - def migrate_array(self, msg_from_array, msg_to_array, msg_to_name):
407 msg_to_class = self.get_new_class(msg_to_name)
408
409 while len(msg_to_array) > 0:
410 msg_to_array.pop()
411
412 if (len(msg_from_array) == 0):
413 return
414
415 tmp_msg_from = clean_name(msg_from_array[0]._type, self.old_type)
416 tmp_msg_to = clean_name(msg_to_class._type, self.new_type)
417 if (tmp_msg_from, tmp_msg_to) not in self.migrated_types:
418 raise BagMigrationException("Rule [%s] tried to perform a migration from old [%s] to new [%s] not listed in migrated_types"%(self.location, tmp_msg_from, tmp_msg_to))
419
420 msg_to_array.extend( [msg_to_class() for i in range(len(msg_from_array))] )
421
422 self.migrator.migrate_array(msg_from_array, msg_to_array)
423
424
427
428
429
430
431
432 - def apply(self, old_msg):
447
448
449
450
451
452 - def update(self, old_msg, new_msg):
454
455
456
457
458
459
460
461
462
465 self.chain = []
466 self.order_keys = set()
467 self.rename = None
468
469
470
471
472
473
474
475
476
478 - def __init__(self, old_class, new_class, rule):
479 self.old_class = old_class
480 self.new_class = new_class
481 self.rule = rule
482 self.next = None
483
484
485
486
487
489 - def __init__(self, input_rule_files=[], plugins=True):
490
491
492
493
494
495
496
497 self.rulechains = collections.defaultdict(RuleChain)
498
499
500
501 self.base_nodes = []
502
503
504
505
506 self.extra_nodes = []
507
508
509 self.first_type = {}
510
511
512
513
514 self.rename_map = {}
515
516
517
518 self.found_paths = {}
519 self.found_targets = {}
520
521
522 terminal_nodes = []
523
524
525 rule_dicts = []
526
527 self.false_rule_loaded = False
528
529
530
531 for r in input_rule_files:
532 try:
533 scratch_locals = {'MessageUpdateRule':MessageUpdateRule}
534 with open(r, 'r') as f:
535 exec(f.read(), scratch_locals)
536 rule_dicts.append((scratch_locals, r))
537 except:
538 print("Cannot load rule file [%s] in local package" % r, file=sys.stderr)
539
540
541
542 if plugins:
543 rospack = rospkg.RosPack()
544 for dep,export in [('rosbagmigration','rule_file'),('rosbag','migration_rule_file'),('rosbag_migration_rule','rule_file')]:
545 for pkg in rospack.get_depends_on(dep, implicit=False):
546 m = rospack.get_manifest(pkg)
547 p_rules = m.get_export(dep,export)
548 pkg_dir = rospack.get_path(pkg)
549 for r in p_rules:
550 if dep == 'rosbagmigration':
551 print("""WARNING: The package: [%s] is using a deprecated rosbagmigration export.
552 The export in the manifest should be changed to:
553 <rosbag migration_rule_file="%s"/>
554 """ % (pkg, r), file=sys.stderr)
555 try:
556 scratch_locals = {'MessageUpdateRule':MessageUpdateRule}
557 exec(open(pkg_dir + "/" + r).read(), scratch_locals)
558 rule_dicts.append((scratch_locals, r))
559 except ImportError:
560 print("Cannot load rule file [%s] in package [%s]" % (r, pkg), file=sys.stderr)
561
562
563 for (rule_dict, location_base) in rule_dicts:
564 for (n,c) in rule_dict.items():
565 if inspect.isclass(c):
566 if (not c == MessageUpdateRule) and issubclass(c, MessageUpdateRule):
567 self.add_update_rule(c(self, location_base + ':' + n))
568
569 if self.false_rule_loaded:
570 raise BagMigrationException("Cannot instantiate MessageMigrator with invalid rules")
571
572
573
574
575
576
577
578
579
580
581
582 for (type,rulechain) in self.rulechains.items():
583 first = True
584 sn = None
585 prev_sn = None
586
587
588 rename_set = set([type])
589 tmp = rulechain.rename
590 while tmp:
591 rename_set.add(tmp.new_type)
592 if tmp.new_type in self.rulechains:
593 tmp = self.rulechains[tmp.new_type].rename
594 else:
595 break
596
597 self.rename_map[type] = rename_set
598
599
600 for r in rulechain.chain:
601
602 sn = ScaffoldNode(r.old_class, r.new_class, r)
603 self.base_nodes.append(sn)
604
605 if first:
606 self.first_type[type] = sn
607 first = False
608
609
610 if prev_sn:
611 if get_message_key(prev_sn.new_class) == get_message_key(sn.old_class):
612 prev_sn.next = sn
613 else:
614 implicit_sn = ScaffoldNode(prev_sn.new_class, sn.old_class, None)
615 self.base_nodes.append(implicit_sn)
616 prev_sn.next = implicit_sn
617 implicit_sn.next = sn
618
619 prev_sn = sn
620
621
622 if rulechain.rename:
623
624 sn = ScaffoldNode(rulechain.rename.old_class, rulechain.rename.new_class, rulechain.rename)
625 self.base_nodes.append(sn)
626
627
628
629
630 if first:
631 self.first_type[type] = sn
632 first = False
633 if prev_sn:
634 if get_message_key(prev_sn.new_class) == get_message_key(sn.old_class):
635 prev_sn.next = sn
636 else:
637 implicit_sn = ScaffoldNode(prev_sn.new_class, sn.old_class, None)
638 self.base_nodes.append(implicit_sn)
639 prev_sn.next = implicit_sn
640 implicit_sn.next = sn
641 prev_sn = sn
642 terminal_nodes.append(sn)
643
644 else:
645 if prev_sn:
646 terminal_nodes.append(prev_sn)
647
648
649
650
651
652 for sn in terminal_nodes:
653 key = get_message_key(sn.new_class)
654
655 renamed = (sn.old_class._type != sn.new_class._type)
656
657 sys_class = genpy.message.get_message_class(sn.new_class._type)
658
659
660 if sys_class:
661 new_rule = self.make_update_rule(sn.new_class, sys_class)
662 R = new_rule(self, 'GENERATED.' + new_rule.__name__)
663 if R.valid:
664 sn.next = ScaffoldNode(sn.new_class, sys_class, R)
665 self.base_nodes.append(sn.next)
666
667 if renamed:
668 tmp_sns = self.scaffold_range(sn.new_class._type, sn.new_class._type)
669
670
671 if tmp_sns == []:
672 if sys_class is not None:
673 sn.next = ScaffoldNode(sn.new_class, sys_class, None)
674 self.base_nodes.append(sn.next)
675 continue
676
677
678 for tmp_sn in reversed(tmp_sns):
679 tmp_key = get_message_key(tmp_sn.old_class)
680 if (key == tmp_key):
681 sn.next = tmp_sn
682 break
683
684
685
686
687
688
689
690 if (sn.next is None):
691 for tmp_sn in reversed(tmp_sns):
692 new_rule = self.make_update_rule(sn.new_class, tmp_sn.old_class)
693 R = new_rule(self, 'GENERATED.' + new_rule.__name__)
694 if R.valid:
695 sn.next = ScaffoldNode(sn.new_class, tmp_sn.old_class, R)
696 self.base_nodes.append(sn.next)
697 break
698
699
700
701 if (sn.next is None):
702 if sys_class:
703 new_rule = self.make_update_rule(sn.new_class, sys_class)
704 else:
705 new_rule = self.make_old_half_rule(sn.new_class)
706 R = new_rule(self, 'GENERATED.' + new_rule.__name__)
707 sn.next = ScaffoldNode(sn.new_class, None, R)
708 self.base_nodes.append(sn.next)
709
710
711
712
713
714 for sn in self.base_nodes:
715 if (sn.rule is None):
716 new_rule = self.make_update_rule(sn.old_class, sn.new_class)
717 sn.rule = new_rule(self, 'GENERATED.' + new_rule.__name__)
718
719
720
721 for sn in self.base_nodes:
722 sn.rule.find_sub_paths()
723
724
725
726
727 self.class_dict = {}
728
729 for sn in self.base_nodes + self.extra_nodes:
730 self.class_dict[get_message_key(sn.old_class)] = sn.old_class
731 self.class_dict[get_message_key(sn.new_class)] = sn.new_class
732
733
735 if key in self.class_dict:
736 return self.class_dict[key]
737 else:
738 return None
739
740
742 if r.valid == False:
743 print("ERROR: Update rule [%s] has valid set to False." % (r.location), file=sys.stderr)
744 self.false_rule_loaded = True
745 return
746
747 rulechain = self.rulechains[r.old_type]
748
749 if r.rename_rule:
750 if (rulechain.rename != None):
751 print("WARNING: Update rules [%s] and [%s] both attempting to rename type [%s]. Ignoring [%s]" % (rulechain.rename.location, r.location, r.old_type, r.location), file=sys.stderr)
752 return
753
754
755 cycle = []
756 tmp = r
757 while tmp:
758 cycle.append(tmp)
759 if (tmp.new_type == r.old_type):
760 print("WARNING: Update rules %s introduce a renaming cycle. Ignoring [%s]" % ([x.location for x in cycle], r.location), file=sys.stderr)
761 return
762 if tmp.new_type in self.rulechains:
763 tmp = self.rulechains[tmp.new_type].rename
764 else:
765 break
766
767
768 if rulechain.chain and (r.order <= rulechain.chain[-1].order):
769 print("WARNING: Update rule [%s] which performs rename does not have largest order number. Ignoring" % r.location, file=sys.stderr)
770 return
771
772 rulechain.rename = r
773
774 else:
775 if r.order in rulechain.order_keys:
776 otherind = [x.order for x in rulechain.chain].index(r.order)
777 print("WARNING: Update rules [%s] and [%s] for type [%s] have the same order number. Ignoring [%s]" % (rulechain.chain[otherind].location, r.location, r.old_type, r.location), file=sys.stderr)
778 return
779 else:
780 if rulechain.rename and (r.order >= rulechain.rename.order):
781 print("WARNING: Update rule [%s] has order number larger than rename rule [%s]. Ignoring" % (r.location, rulechain.rename.location), file=sys.stderr)
782 return
783
784 rulechain.order_keys.add(r.order)
785 rulechain.chain.append(r)
786 rulechain.chain.sort(key=lambda x: x.order)
787
788
790 base_valid = not False in [sn.rule.valid for sn in self.base_nodes]
791 extra_valid = not False in [sn.rule.valid for sn in self.extra_nodes]
792 return base_valid and extra_valid
793
794
796 invalid_rules = []
797 invalid_rule_cache = []
798 for sn in self.base_nodes:
799 if not sn.rule.valid:
800 path_key = get_path_key(sn.old_class, sn.new_class)
801 if (path_key not in invalid_rule_cache):
802 invalid_rules.append(sn.rule)
803 invalid_rule_cache.append(path_key)
804 for sn in self.extra_nodes:
805 if not sn.rule.valid:
806 path_key = get_path_key(sn.old_class, sn.new_class)
807 if (path_key not in invalid_rule_cache):
808 invalid_rules.append(sn.rule)
809 invalid_rule_cache.append(path_key)
810 return invalid_rules
811
812
814 rule_cache = []
815 new_rules = []
816 for r in rules:
817 path_key = get_path_key(r.old_class, r.new_class)
818 if (path_key not in rule_cache):
819 new_rules.append(r)
820 return new_rules
821
822
832
834 try:
835 first_sn = self.first_type[old_type]
836
837 sn_range = [first_sn]
838
839 found_new_type = False
840
841 tmp_sn = first_sn
842
843 while (tmp_sn.next is not None and tmp_sn.next.new_class is not None):
844
845 tmp_sn = tmp_sn.next
846 if (tmp_sn != first_sn):
847 sn_range.append(tmp_sn)
848 if (tmp_sn.new_class._type == new_type):
849 found_new_type = True
850 if (found_new_type and tmp_sn.new_class._type != new_type):
851 break
852
853 return sn_range
854
855 except KeyError:
856 return []
857
858
860 key = get_message_key(old_class)
861
862 last_class = old_class
863
864 try:
865 return self.found_targets[key]
866 except KeyError:
867
868 sys_class = genpy.message.get_message_class(old_class._type)
869
870 if sys_class is not None:
871 self.found_targets[key] = sys_class
872 return sys_class
873
874 try:
875 tmp_sn = self.first_type[old_class._type]
876
877 if tmp_sn.new_class is not None:
878 last_class = tmp_sn.new_class
879
880 while tmp_sn.next is not None:
881 tmp_sn = tmp_sn.next
882
883 if tmp_sn.new_class is not None:
884 last_class = tmp_sn.new_class
885 sys_class = genpy.message.get_message_class(tmp_sn.new_class._type)
886 else:
887 sys_class = None
888
889 if sys_class is not None:
890 self.found_targets[key] = sys_class
891 return sys_class
892 except KeyError:
893 pass
894
895 self.found_targets[key] = None
896 return None
897
898
899
901 key = get_path_key(old_class, new_class)
902
903
904 try:
905 return self.found_paths[key]
906 except KeyError:
907 pass
908
909
910
911
912
913 if new_class is None:
914 sn_range = self.scaffold_range(old_class._type, "")
915
916 found_start = False
917
918 for (ind, tmp_sn) in reversed(list(zip(range(len(sn_range)), sn_range))):
919
920 if (tmp_sn.old_class._type != old_class._type):
921 continue
922 if get_message_key(tmp_sn.old_class) == get_message_key(old_class):
923 sn_range = sn_range[ind:]
924 found_start = True
925 break
926
927
928 if not found_start:
929 for (ind, tmp_sn) in reversed(list(zip(range(len(sn_range)), sn_range))):
930 if (tmp_sn.old_class._type != old_class._type):
931 continue
932 new_rule = self.make_update_rule(old_class, tmp_sn.old_class)
933 R = new_rule(self, 'GENERATED.' + new_rule.__name__)
934 if R.valid:
935 R.find_sub_paths()
936 sn = ScaffoldNode(old_class, tmp_sn.old_class, R)
937 self.extra_nodes.append(sn)
938 sn_range = sn_range[ind:]
939 sn_range.insert(0,sn)
940 found_start = True
941 break
942
943 if sn_range == []:
944 tmp_class = old_class
945 else:
946 tmp_class = sn_range[-1].new_class
947
948 new_rule = self.make_old_half_rule(tmp_class)
949 R = new_rule(self, 'GENERATED.' + new_rule.__name__)
950 sn = ScaffoldNode(tmp_class, None, R)
951 sn_range.append(sn)
952 self.extra_nodes.append(sn)
953 self.found_paths[key] = sn_range
954 return sn_range
955
956
957 if (old_class._type == new_class._type and old_class._full_text.strip() == new_class._full_text.strip()):
958 self.found_paths[key] = []
959 return []
960
961 sn_range = self.scaffold_range(old_class._type, new_class._type)
962
963
964 if sn_range == []:
965 new_rule = self.make_update_rule(old_class, new_class)
966 R = new_rule(self, 'GENERATED.' + new_rule.__name__)
967 R.find_sub_paths()
968 sn = ScaffoldNode(old_class, new_class, R)
969 self.extra_nodes.append(sn)
970 self.found_paths[key] = [sn]
971 return [sn]
972
973
974
975 found_stop = False
976
977
978 for (ind, tmp_sn) in reversed(list(zip(range(len(sn_range)), sn_range))):
979
980 if (tmp_sn.new_class._type != new_class._type):
981 break
982 if get_message_key(tmp_sn.new_class) == get_message_key(new_class):
983 sn_range = sn_range[:ind+1]
984 found_stop = True
985 break
986
987
988 if not found_stop:
989 for (ind, tmp_sn) in reversed(list(zip(range(len(sn_range)), sn_range))):
990 if (tmp_sn.new_class._type != new_class._type):
991 break
992 new_rule = self.make_update_rule(tmp_sn.new_class, new_class)
993 R = new_rule(self, 'GENERATED.' + new_rule.__name__)
994 if R.valid:
995 R.find_sub_paths()
996 if R.sub_rules_valid:
997 sn = ScaffoldNode(tmp_sn.new_class, new_class, R)
998 self.extra_nodes.append(sn)
999 sn_range = sn_range[:ind+1]
1000 sn_range.append(sn)
1001 found_stop = True
1002 break
1003
1004
1005 if not found_stop:
1006 new_rule = self.make_update_rule(sn_range[-1].new_class, new_class)
1007 R = new_rule(self, 'GENERATED.' + new_rule.__name__)
1008 R.find_sub_paths()
1009 sn = ScaffoldNode(sn_range[-1].new_class, new_class, R)
1010 self.extra_nodes.append(sn)
1011 sn_range.append(sn)
1012
1013
1014 found_start = False
1015
1016
1017 for (ind, tmp_sn) in reversed(list(zip(range(len(sn_range)), sn_range))):
1018
1019 if (tmp_sn.old_class._type != old_class._type):
1020 continue
1021 if get_message_key(tmp_sn.old_class) == get_message_key(old_class):
1022 sn_range = sn_range[ind:]
1023 found_start = True
1024 break
1025
1026
1027 if not found_start:
1028 new_rule = self.make_update_rule(old_class, new_class)
1029 R = new_rule(self, 'GENERATED.' + new_rule.__name__)
1030 if R.valid:
1031 R.find_sub_paths()
1032 if R.sub_rules_valid:
1033 sn = ScaffoldNode(old_class, new_class, R)
1034 self.extra_nodes.append(sn)
1035 self.found_paths[key] = [sn]
1036 return [sn]
1037
1038
1039 if not found_start:
1040 for (ind, tmp_sn) in reversed(list(zip(range(len(sn_range)), sn_range))):
1041 if (tmp_sn.old_class._type != old_class._type):
1042 continue
1043 new_rule = self.make_update_rule(old_class, tmp_sn.old_class)
1044 R = new_rule(self, 'GENERATED.' + new_rule.__name__)
1045 if R.valid:
1046 R.find_sub_paths()
1047 if R.sub_rules_valid:
1048 sn = ScaffoldNode(old_class, tmp_sn.old_class, R)
1049 self.extra_nodes.append(sn)
1050 sn_range = sn_range[ind:]
1051 sn_range.insert(0,sn)
1052 found_start = True
1053 break
1054
1055
1056 if not found_start:
1057 new_rule = self.make_update_rule(old_class, sn_range[0].old_class)
1058 R = new_rule(self, 'GENERATED.' + new_rule.__name__)
1059 R.find_sub_paths()
1060 sn = ScaffoldNode(old_class, sn_range[0].old_class, R)
1061 self.extra_nodes.append(sn)
1062 sn_range.insert(0,sn)
1063
1064 self.found_paths[key] = sn_range
1065 return sn_range
1066
1067
1069 path = self.find_path(msg_from[4], msg_to[4])
1070
1071 if False in [sn.rule.valid for sn in path]:
1072 raise BagMigrationException("Migrate called, but no valid migration path from [%s] to [%s]"%(msg_from[0], msg_to[0]))
1073
1074
1075 if path == [] or msg_from[2] == msg_to[2]:
1076 return (msg_to[0], msg_from[1], msg_to[2], msg_to[3], msg_to[4])
1077
1078 tmp_msg = path[0].old_class()
1079 tmp_msg.deserialize(msg_from[1])
1080
1081 for sn in path:
1082 tmp_msg = sn.rule.apply(tmp_msg)
1083
1084 buff = StringIO()
1085 tmp_msg.serialize(buff)
1086
1087 return (msg_to[0], buff.getvalue(), msg_to[2], msg_to[3], msg_to[4])
1088
1089
1090
1091 - def migrate(self, msg_from, msg_to):
1092 path = self.find_path(msg_from.__class__, msg_to.__class__)
1093
1094 if False in [sn.rule.valid for sn in path]:
1095 raise BagMigrationException("Migrate called, but no valid migration path from [%s] to [%s]"%(msg_from._type, msg_to._type))
1096
1097
1098 if path == [] or msg_from._md5sum == msg_to._md5sum:
1099 buff = StringIO()
1100 msg_from.serialize(buff)
1101 msg_to.deserialize(buff.getvalue())
1102 return
1103
1104 if len(path) > 0:
1105 buff = StringIO()
1106 msg_from.serialize(buff)
1107
1108 tmp_msg = path[0].old_class()
1109
1110 tmp_msg.deserialize(buff.getvalue())
1111
1112 for sn in path:
1113 tmp_msg = sn.rule.apply(tmp_msg)
1114 else:
1115 tmp_msg = msg_from
1116
1117 buff = StringIO()
1118 tmp_msg.serialize(buff)
1119 msg_to.deserialize(buff.getvalue())
1120
1122 if len(msg_from_array) != len(msg_to_array):
1123 raise BagMigrationException("Migrate array called on on arrays of unequal length.")
1124
1125 if len(msg_from_array) == 0:
1126 return
1127
1128 path = self.find_path(msg_from_array[0].__class__, msg_to_array[0].__class__)
1129
1130 if path is None:
1131 raise BagMigrationException("Migrate called, but no migration path from [%s] to [%s]"%(msg_from._type, msg_to._type))
1132
1133
1134 if path == []:
1135 for i in range(len(msg_from_array)):
1136 buff = StringIO()
1137 msg_from_array[i].serialize(buff)
1138 msg_to_array[i].deserialize(buff.getvalue())
1139 return
1140
1141 for i in range(len(msg_from_array)):
1142 buff = StringIO()
1143 tmp_msg = path[0].old_class()
1144 msg_from_array[i].serialize(buff)
1145 tmp_msg.deserialize(buff.getvalue())
1146 for sn in path:
1147 tmp_msg = sn.rule.apply(tmp_msg)
1148
1149 buff = StringIO()
1150 tmp_msg.serialize(buff)
1151 msg_to_array[i].deserialize(buff.getvalue())
1152
1154 name = "update_%s_%s"%(old_class._type.replace("/","_"), old_class._md5sum)
1155
1156
1157
1158 classdef = "class %s(MessageUpdateRule):\n"%name
1159 classdef += "\told_type = \"%s\"\n"%old_class._type
1160 classdef += "\told_full_text = \"\"\"\n%s\n\"\"\"\n\n"%old_class._full_text.strip()
1161 classdef += "\tnew_type = \"%s\"\n"%new_class._type
1162 classdef += "\tnew_full_text = \"\"\"\n%s\n\"\"\"\n"%new_class._full_text.strip()
1163 classdef += "\n"
1164 classdef += "\torder = 0"
1165 classdef += "\n"
1166
1167 validdef = "\tvalid = True\n"
1168
1169 migratedefs = "\tmigrated_types = ["
1170
1171 updatedef = "\tdef update(self, old_msg, new_msg):\n"
1172
1173 old_consts = constants_from_def(old_class._type, old_class._full_text)
1174 new_consts = constants_from_def(new_class._type, new_class._full_text)
1175
1176 if (not new_consts >= old_consts):
1177 validdef = "\tvalid = False\n"
1178 for c in (old_consts - new_consts):
1179 updatedef += "\t\t#Constant '%s' has changed\n"%(c[0],)
1180
1181 old_slots = []
1182 old_slots.extend(old_class.__slots__)
1183
1184 migrations_seen = []
1185
1186
1187 for (s,t) in zip(new_class.__slots__, new_class._slot_types):
1188 warn_msg = None
1189 new_base_type, new_is_array, new_array_len = genmsg.msgs.parse_type(t)
1190 try:
1191 ind = old_class.__slots__.index(s)
1192 old_slots.remove(s)
1193 old_base_type, old_is_array, old_array_len = genmsg.msgs.parse_type(old_class._slot_types[ind])
1194
1195 if new_is_array != old_is_array:
1196 warn_msg = "Could not match array with nonarray"
1197
1198 elif new_array_len != old_array_len:
1199 if old_array_len is None:
1200 warn_msg = "Converted from variable length array to fixed array of length %d"%(new_array_len)
1201 elif new_array_len is None:
1202 warn_msg = "Converted from fixed array of length %d to variable length"%(old_array_len)
1203 else:
1204 warn_msg = "Fixed length array converted from %d to %d"%(old_array_len,new_array_len)
1205
1206 elif genmsg.msgs.is_builtin(new_base_type):
1207 if new_base_type != old_base_type:
1208 warn_msg = "Primitive type changed"
1209 else:
1210 updatedef += "\t\tnew_msg.%s = old_msg.%s\n"%(s,s)
1211
1212 else:
1213 tmp_old_type = clean_name(old_base_type, old_class._type)
1214 tmp_new_type = clean_name(new_base_type, new_class._type)
1215
1216 tmp_qualified_old_type = qualified_name(old_base_type, old_class._type)
1217 tmp_qualified_new_type = qualified_name(new_base_type, new_class._type)
1218
1219
1220 if (tmp_qualified_old_type == tmp_qualified_new_type) or \
1221 (tmp_qualified_old_type in self.rename_map and
1222 tmp_qualified_new_type in self.rename_map[tmp_qualified_old_type]):
1223
1224 if (tmp_old_type, tmp_new_type) not in migrations_seen:
1225 migratedefs += "\n\t\t(\"%s\",\"%s\"),"%(tmp_old_type, tmp_new_type)
1226 migrations_seen.append((tmp_old_type, tmp_new_type))
1227
1228 if not new_is_array:
1229 updatedef += "\t\tself.migrate(old_msg.%s, new_msg.%s)\n"%(s,s)
1230 else:
1231 updatedef += "\t\tself.migrate_array(old_msg.%s, new_msg.%s, \"%s\")\n"%(s,s,new_base_type)
1232 else:
1233 warn_msg = "No migration path between [%s] and [%s]"%(tmp_old_type, tmp_new_type)
1234 except ValueError:
1235 warn_msg = "No matching field name in old message"
1236
1237 if warn_msg is not None:
1238 validdef = "\tvalid = False\n"
1239 updatedef += "\t\t#%s\n"%warn_msg
1240 updatedef += "\t\tnew_msg.%s = %s\n"%(s,migration_default_value(t))
1241
1242 migratedefs += "]\n"
1243
1244 if old_slots:
1245 validdef = "\tvalid = False\n"
1246 for s in old_slots:
1247 updatedef += "\t\t#No field to match field %s from old message\n"%(s)
1248
1249 classdef += migratedefs + '\n' + validdef + '\n' + updatedef
1250
1251 printclassdef = classdef + "\tdef get_class_def(self):\n\t\treturn \'\'\'%s\'\'\'\n"%classdef
1252
1253
1254 exec(printclassdef)
1255 return locals()[name]
1256
1258 name = "update__%s__%s"%(old_class._type.replace("/","_"), old_class._md5sum)
1259
1260
1261
1262 classdef = "class %s(MessageUpdateRule):\n"%name
1263 classdef += "\told_type = \"%s\"\n"%old_class._type
1264 classdef += "\told_full_text = \"\"\"\n%s\n\"\"\"\n\n"%old_class._full_text.strip()
1265 classdef += "\tnew_type = \"\"\n"
1266 classdef += "\tnew_full_text = \"\"\"\n\n\"\"\"\n"
1267 classdef += "\n"
1268 classdef += "\torder = 0"
1269 classdef += "\n"
1270
1271 validdef = "\tvalid = False\n"
1272
1273 migratedefs = "\tmigrated_types = []\n"
1274
1275 updatedef = "\tdef update(self, old_msg, new_msg):\n"
1276 updatedef += "\t\tpass\n"
1277
1278 classdef += migratedefs + '\n' + validdef + '\n' + updatedef
1279
1280 printclassdef = classdef + "\tdef get_class_def(self):\n\t\treturn \'\'\'%s\'\'\'\n"%classdef
1281
1282
1283 exec(printclassdef)
1284 return locals()[name]
1285
1287 name = "update_to_%s_%s"%(new_class._type.replace("/","_"), new_class._md5sum)
1288
1289
1290
1291 classdef = "class %s(MessageUpdateRule):\n"%name
1292 classdef += "\told_type = \"\"\n"
1293 classdef += "\told_full_text = \"\"\"\n\n\"\"\"\n\n"
1294 classdef += "\tnew_type = \"%s\"\n"%new_class._type
1295 classdef += "\tnew_full_text = \"\"\"\n%s\n\"\"\"\n"%new_class._full_text.strip()
1296 classdef += "\n"
1297 classdef += "\torder = 0"
1298 classdef += "\n"
1299
1300 validdef = "\tvalid = False\n"
1301
1302 migratedefs = "\tmigrated_types = []\n"
1303
1304 updatedef = "\tdef update(self, old_msg, new_msg):\n"
1305 updatedef += "\t\tpass\n"
1306
1307 classdef += migratedefs + '\n' + validdef + '\n' + updatedef
1308
1309 printclassdef = classdef + "\tdef get_class_def(self):\n\t\treturn \'\'\'%s\'\'\'\n"%classdef
1310
1311
1312 exec(printclassdef)
1313 return locals()[name]
1314
1316 if field_type in ['bool', 'byte', 'int8', 'int16', 'int32', 'int64',\
1317 'char', 'uint8', 'uint16', 'uint32', 'uint64']:
1318 return '0'
1319 elif field_type in ['float32', 'float64']:
1320 return '0.'
1321 elif field_type == 'string':
1322
1323 return "''"
1324 elif field_type.endswith(']'):
1325 base_type, is_array, array_len = genmsg.msgs.parse_type(field_type)
1326 if base_type in ['byte', 'uint8']:
1327
1328 if array_len is not None:
1329 return "chr(0)*%s"%array_len
1330 else:
1331 return "''"
1332 elif array_len is None:
1333 return '[]'
1334 else:
1335 def_val = migration_default_value(base_type)
1336 return '[' + ','.join(itertools.repeat(def_val, array_len)) + ']'
1337 else:
1338 return "self.get_new_class('%s')()"%field_type
1339
1341 core_pkg, core_base_type = genmsg.package_resource_name(core_type)
1342
1343 splits = msg_def.split('\n' + '=' * 80 + '\n')
1344
1345 core_msg = splits[0]
1346 deps_msgs = splits[1:]
1347
1348
1349 from genmsg import MsgContext
1350 context = MsgContext.create_default()
1351 specs = { core_type: genmsg.msg_loader.load_msg_from_string(context, core_msg, core_pkg) }
1352
1353
1354
1355
1356
1357
1358 return set([(x.name, x.val, x.type) for x in specs[core_type].constants])
1359