Package rosbag :: Module migration
[frames] | no frames]

Source Code for Module rosbag.migration

   1  # Software License Agreement (BSD License) 
   2  # 
   3  # Copyright (c) 2009, Willow Garage, Inc. 
   4  # All rights reserved. 
   5  # 
   6  # Redistribution and use in source and binary forms, with or without 
   7  # modification, are permitted provided that the following conditions 
   8  # are met: 
   9  # 
  10  #  * Redistributions of source code must retain the above copyright 
  11  #    notice, this list of conditions and the following disclaimer. 
  12  #  * Redistributions in binary form must reproduce the above 
  13  #    copyright notice, this list of conditions and the following 
  14  #    disclaimer in the documentation and/or other materials provided 
  15  #    with the distribution. 
  16  #  * Neither the name of Willow Garage, Inc. nor the names of its 
  17  #    contributors may be used to endorse or promote products derived 
  18  #    from this software without specific prior written permission. 
  19  # 
  20  # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 
  21  # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 
  22  # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS 
  23  # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE 
  24  # COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, 
  25  # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, 
  26  # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; 
  27  # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 
  28  # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT 
  29  # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN 
  30  # ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 
  31  # POSSIBILITY OF SUCH DAMAGE. 
  32   
  33  from __future__ import print_function 
  34   
  35  import collections 
  36  import copy 
  37  try: 
  38      from cStringIO import StringIO  # Python 2.x 
  39  except ImportError: 
  40      from io import BytesIO as StringIO  # Python 3.x 
  41  import inspect 
  42  import itertools 
  43  import os 
  44  import string 
  45  import sys 
  46   
  47  import genmsg.msgs 
  48  import genpy 
  49  import genpy.dynamic 
  50   
  51  import rospkg 
  52   
  53  import rosbag 
  54   
  55  # Anything outside the scope of these primitives is a submessage 
  56  #_PRIMITIVES = ['bool', 'byte','int8','int16','int32','int64','char','uint8','uint16','uint32','uint64','float32','float64','string','time'] 
  57   
58 -class BagMigrationException(Exception):
59 pass
60
61 -def checkbag(migrator, inbag):
62 """ 63 Check whether a bag file can be played in the current system. 64 @param migrator: message migrator to use 65 @param inbag name of the bag to be checked. 66 @returns A list of tuples for each type in the bag file. The first 67 element of each tuple is the full migration path for the type. The 68 second element of the tuple is the expanded list of invalid rules 69 for that particular path. 70 """ 71 checked = set() 72 migrations = [] 73 74 bag = rosbag.Bag(inbag, 'r') 75 76 for topic, msg, t in bag.read_messages(raw=True): 77 key = get_message_key(msg[4]) 78 if key not in checked: 79 target = migrator.find_target(msg[4]) 80 # Even in the case of a zero-length path (matching md5sums), we still want 81 # to migrate in the event of a type change (message move). 82 path = migrator.find_path(msg[4], target) 83 if len(path) > 0: 84 migrations.append((path, [r for r in migrator.expand_rules([sn.rule for sn in path]) if r.valid == False])) 85 86 checked.add(key) 87 88 bag.close() 89 90 return migrations
91
92 -def checkmessages(migrator, messages):
93 """ 94 Check whether a bag file can be played in the current system. 95 @param migrator The message migrator to use 96 @param message_list A list of message classes. 97 @returns A list of tuples for each type in the bag file. The first 98 element of each tuple is the full migration path for the type. The 99 second element of the tuple is the expanded list of invalid rules 100 for that particular path. 101 """ 102 103 checked = set() 104 migrations = [] 105 106 for msg in messages: 107 key = get_message_key(msg) 108 if key not in checked: 109 target = migrator.find_target(msg) 110 # Even in the case of a zero-length path (matching md5sums), we still want 111 # to migrate in the event of a type change (message move). 112 path = migrator.find_path(msg, target) 113 if len(path) > 0: 114 migrations.append((path, [r for r in migrator.expand_rules([sn.rule for sn in path]) if r.valid == False])) 115 116 checked.add(key) 117 118 return migrations
119 120 ## Fix a bag so that it can be played in the current system 121 # 122 # @param migrator The message migrator to use 123 # @param inbag Name of the bag to be fixed. 124 # @param outbag Name of the bag to be saved. 125 # @returns True if migration was successful.
126 -def fixbag(migrator, inbag, outbag):
127 # This checks/builds up rules for the given migrator 128 res = checkbag(migrator, inbag) 129 130 # Deserializing all messages is inefficient, but we can speed this up later 131 if not False in [m[1] == [] for m in res]: 132 bag = rosbag.Bag(inbag, 'r') 133 rebag = rosbag.Bag(outbag, 'w', options=bag.options) 134 for topic, msg, t in bag.read_messages(raw=True): 135 new_msg_type = migrator.find_target(msg[4]) 136 mig_msg = migrator.migrate_raw(msg, (new_msg_type._type, None, new_msg_type._md5sum, None, new_msg_type)) 137 rebag.write(topic, mig_msg, t, raw=True) 138 rebag.close() 139 bag.close() 140 return True 141 else: 142 return False
143 144 ## Fix a bag so that it can be played in the current system 145 # 146 # @param migrator The message migrator to use 147 # @param inbag Name of the bag to be fixed. 148 # @param outbag Name of the bag to be saved. 149 # @returns [] if bag could be migrated, otherwise, it returns the list of necessary migration paths
150 -def fixbag2(migrator, inbag, outbag, force=False):
151 # This checks/builds up rules for the given migrator 152 res = checkbag(migrator, inbag) 153 154 migrations = [m for m in res if len(m[1]) > 0] 155 156 # Deserializing all messages is inefficient, but we can speed this up later 157 if len(migrations) == 0 or force: 158 bag = rosbag.Bag(inbag, 'r') 159 rebag = rosbag.Bag(outbag, 'w', options=bag.options) 160 for topic, msg, t in bag.read_messages(raw=True): 161 new_msg_type = migrator.find_target(msg[4]) 162 if new_msg_type != None: 163 mig_msg = migrator.migrate_raw(msg, (new_msg_type._type, None, new_msg_type._md5sum, None, new_msg_type)) 164 rebag.write(topic, mig_msg, t, raw=True) 165 else: 166 rebag.write(topic, msg, t, raw=True) 167 rebag.close() 168 bag.close() 169 170 if force: 171 return [] 172 else: 173 return migrations
174 175 ## Helper function to strip out roslib and package name from name usages. 176 # 177 # There is some inconsistency in whether a fully-qualified path is 178 # used for sub-messages within a given message. This function is 179 # useful for stripping out the package name in a fully qualified 180 # sub-message. 181 # 182 # @param name The name to clean. 183 # @param top_name The name of the top-level type 184 # @returns The cleaned version of the name.
185 -def clean_name(name, top_name):
186 name_split = name.split('/') 187 try: 188 name_split.remove('std_msgs') 189 except ValueError: 190 pass 191 try: 192 name_split.remove(top_name.split('/')[0]) 193 except ValueError: 194 pass 195 new_name = '/'.join(name_split) 196 return new_name
197 198 ## Helper function to ensure we end up with a qualified name 199 # 200 # There is some inconsistency in whether a fully-qualified path is 201 # used for sub-messages within a given message. This function is 202 # useful for ensuring that a name is fully qualified correctly. 203 # 204 # @param name The name to quailfy 205 # @param top_name The name of the top-level type 206 # @returns The qualified version of the name.
207 -def qualified_name(name, top_name):
208 # First clean the name, to make everyting else more deterministic 209 tmp_name = clean_name(name, top_name) 210 211 if len(tmp_name.split('/')) == 2 or (genmsg.msgs.is_builtin(tmp_name)): 212 return tmp_name 213 elif tmp_name == 'Header': 214 return 'std_msgs/Header' 215 else: 216 return top_name.split('/')[0] + '/' + tmp_name
217 218 ## Helper function to return a key from a given class 219 # 220 # For now, we choose the tuple (type,md5sum) as a unique key for the 221 # class. However, this is subject to change and assumptions about keys 222 # should not be made other than their uniqueness. 223 # 224 # @param c The message class or instance to get a key for 225 # @returns The unique key
226 -def get_message_key(c):
227 try: 228 return (c._type, c._md5sum) 229 except: 230 return None
231 232 ## Helper function to return a key for a given path 233 # 234 # For now, we choose the tuple ((type1,md5sum1),(type2,md5sum2)) as a 235 # unique key for the path. However, this is subject to change and 236 # assumptions about keys should not be made other than their 237 # uniqueness. 238 # 239 # @param c1 The start point of the path 240 # @param c1 The stop point of the path 241 # @returns The unique key
242 -def get_path_key(c1, c2):
243 try: 244 return (get_message_key(c1), get_message_key(c2)) 245 except: 246 return None
247 248 ## Base class for all message update rules
249 -class MessageUpdateRule(object):
250 old_type = '' 251 old_full_text = '' 252 new_type = '' 253 new_full_text = '' 254 migrated_types = [] 255 256 order = -1 257 258 valid = False 259 260 ## Initialize class
261 - def __init__(self, migrator, location):
262 # Every rule needs to hang onto the migrator so we can potentially use it 263 self.migrator = migrator 264 self.location = location 265 266 if (self.old_type != self.new_type): 267 self.rename_rule = True 268 else: 269 self.rename_rule = False 270 271 # Instantiate types dynamically based on definition 272 try: 273 if self.old_type == "": 274 raise Exception 275 self.old_types = genpy.dynamic.generate_dynamic(self.old_type, self.old_full_text) 276 self.old_class = self.old_types[self.old_type] 277 self.old_md5sum = self.old_class._md5sum 278 except: 279 self.old_types = [] 280 self.old_class = None 281 self.old_md5sum = "" 282 283 try: 284 if self.new_type == "": 285 raise Exception 286 self.new_types = genpy.dynamic.generate_dynamic(self.new_type, self.new_full_text) 287 self.new_class = self.new_types[self.new_type] 288 self.new_md5sum = self.new_class._md5sum 289 except: 290 self.new_types = [] 291 self.new_class = None 292 self.new_md5sum = "" 293 294 # We have not populated our sub rules (and ideally should 295 # wait until the full scaffold exists before doing this) 296 self.sub_rules_done = False 297 self.sub_rules_valid = False 298 self.sub_rules = []
299 300 ## Find all of the sub paths 301 # 302 # For any migrated type the user might want to use, we must make 303 # sure the migrator had found a path for it. To facilitated this 304 # check we require that all migrated types must be listed as pairs 305 # in the migrated_types field. 306 # 307 # It would be nice not to need these through performing some kind 308 # of other inspection of the update rule itself.
309 - def find_sub_paths(self):
310 self.sub_rules_valid = True 311 for (t1, t2) in self.migrated_types: 312 try: 313 tmp_old_class = self.get_old_class(t1) 314 except KeyError: 315 print("WARNING: Within rule [%s], specified migrated type [%s] not found in old message types" % (self.location, t1), file=sys.stderr) 316 self.sub_rules_valid = False 317 continue 318 try: 319 tmp_new_class = self.get_new_class(t2) 320 except KeyError: 321 print("WARNING: Within rule [%s], specified migrated type [%s] not found in new message types" % (self.location, t2), file=sys.stderr) 322 self.sub_rules_valid = False 323 continue 324 325 # If a rule instantiates itself as a subrule (because the 326 # author knows the md5sums match), we don't Want to end up 327 # with an infinite recursion. 328 if (get_message_key(tmp_old_class) != get_message_key(self.old_class)) or (get_message_key(tmp_new_class) != get_message_key(self.new_class)): 329 path = self.migrator.find_path(tmp_old_class, tmp_new_class) 330 rules = [sn.rule for sn in path] 331 self.sub_rules.extend(rules) 332 333 if False in [r.valid for r in self.sub_rules]: 334 print("WARNING: Within rule [%s] cannot migrate from subtype [%s] to [%s].." % (self.location, t1, t2), file=sys.stderr) 335 self.sub_rules_valid = False 336 continue 337 self.sub_rules = self.migrator.filter_rules_unique(self.sub_rules) 338 self.sub_rules_done = True
339 340 ## Helper function to get the class of a submsg for the new type 341 # 342 # This function should be used inside of update to access new classes. 343 # 344 # @param t The subtype to return the class of 345 # @returns The class of the new sub type
346 - def get_new_class(self,t):
347 try: 348 try: 349 return self.new_types[t] 350 except KeyError: 351 return self.new_types['std_msgs/' + t] 352 except KeyError: 353 return self.new_types[self.new_type.split('/')[0] + '/' + t]
354 355 ## Helper function to get the class of a submsg for the old type 356 # 357 # This function should be used inside of update to access old classes. 358 # 359 # @param t The subtype to return the class of 360 # @returns The class of the old sub type
361 - def get_old_class(self,t):
362 try: 363 try: 364 return self.old_types[t] 365 except KeyError: 366 return self.old_types['std_msgs/' + t] 367 except KeyError: 368 return self.old_types[self.old_type.split('/')[0] + '/' + t]
369 370 ## Actually migrate one sub_type to another 371 # 372 # This function should be used inside of update to migrate sub msgs. 373 # 374 # @param msg_from A message instance of the old message type 375 # @param msg_to A message instance of a new message type to be populated
376 - def migrate(self, msg_from, msg_to):
377 tmp_msg_from = clean_name(msg_from._type, self.old_type) 378 tmp_msg_to = clean_name(msg_to._type, self.new_type) 379 if (tmp_msg_from, tmp_msg_to) not in self.migrated_types: 380 raise BagMigrationException("Rule [%s] tried to perform a migration from old [%s] to new [%s] not listed in migrated_types"%(self.location, tmp_msg_from, tmp_msg_to)) 381 self.migrator.migrate(msg_from, msg_to)
382 383 ## Helper function to migrate a whole array of messages 384 # 385 # This function should be used inside of update to migrate arrays of sub msgs. 386 # 387 # @param msg_from_array An array of messages of the old message type 388 # @param msg_to_array An array of messages of the new message type (this will be emptied if not already) 389 # @param msg_to_class The name of the new message type since msg_to_array may be an empty array.
390 - def migrate_array(self, msg_from_array, msg_to_array, msg_to_name):
391 msg_to_class = self.get_new_class(msg_to_name) 392 393 while len(msg_to_array) > 0: 394 msg_to_array.pop() 395 396 if (len(msg_from_array) == 0): 397 return 398 399 tmp_msg_from = clean_name(msg_from_array[0]._type, self.old_type) 400 tmp_msg_to = clean_name(msg_to_class._type, self.new_type) 401 if (tmp_msg_from, tmp_msg_to) not in self.migrated_types: 402 raise BagMigrationException("Rule [%s] tried to perform a migration from old [%s] to new [%s] not listed in migrated_types"%(self.location, tmp_msg_from, tmp_msg_to)) 403 404 msg_to_array.extend( [msg_to_class() for i in range(len(msg_from_array))] ) 405 406 self.migrator.migrate_array(msg_from_array, msg_to_array)
407 408 ## A helper function to print out the definiton of autogenerated messages.
409 - def get_class_def(self):
410 pass
411 412 ## The function actually called by the message migrator 413 # 414 # @param old_msg An instance of the old message type. 415 # @returns An instance of a new message type
416 - def apply(self, old_msg):
417 if not self.valid: 418 raise BagMigrationException("Attempted to apply an invalid rule") 419 if not self.sub_rules_done: 420 raise BagMigrationException("Attempted to apply a rule without building up its sub rules") 421 if not self.sub_rules_valid: 422 raise BagMigrationException("Attempted to apply a rule without valid sub-rules") 423 if (get_message_key(old_msg) != get_message_key(self.old_class)): 424 raise BagMigrationException("Attempted to apply rule to incorrect class %s %s."%(get_message_key(old_msg),get_message_key(self.old_class))) 425 426 # Apply update rule 427 new_msg = self.new_class() 428 self.update(old_msg, new_msg) 429 430 return new_msg
431 432 ## The function which a user overrides to actually perform the message update 433 # 434 # @param msg_from A message instance of the old message type 435 # @param msg_to A message instance of a new message type to be populated
436 - def update(self, old_msg, new_msg):
437 raise BagMigrationException("Tried to use rule without update overidden")
438 439 440 ## A class for book-keeping about rule-chains. 441 # 442 # Rule chains define the ordered set of update rules, indexed by 443 # typename, terminated by a rename rule. This class is only used 444 # temporarily to help us get the ordering right, until all explicit 445 # rules have been loaded (possibly out of order) and the proper 446 # scaffold can be built.
447 -class RuleChain(object):
448 - def __init__(self):
449 self.chain = [] 450 self.order_keys = set() 451 self.rename = None
452 453 454 ## A class for arranging the ordered rules 455 # 456 # They provide a scaffolding (essentially a linked list) over which we 457 # assume we can migrate messages forward. This allows us to verify a 458 # path exists before actually creating all of the necessary implicit 459 # rules (mostly migration of sub-messages) that such a path 460 # necessitates.
461 -class ScaffoldNode(object):
462 - def __init__(self, old_class, new_class, rule):
463 self.old_class = old_class 464 self.new_class = new_class 465 self.rule = rule 466 self.next = None
467 468 ## A class to actually migrate messages 469 # 470 # This is the big class that actually handles all of the fancy 471 # migration work. Better documentation to come later.
472 -class MessageMigrator(object):
473 - def __init__(self, input_rule_files=[], plugins=True):
474 # We use the rulechains to scaffold our initial creation of 475 # implicit rules. Each RuleChain is keyed off of a type and 476 # consists of an ordered set of update rules followed by an 477 # optional rename rule. For the system rule definitions to be 478 # valid, all members of a rulechains must be connectable via 479 # implicit rules and all rulechains must terminate in a known 480 # system type which is also reachable by an implicit rule. 481 self.rulechains = collections.defaultdict(RuleChain) 482 483 # The list of all nodes that we can iterate through in the 484 # future when making sure all rules have been constructed. 485 self.base_nodes = [] 486 487 # The list of extra (non-scaffolded) nodes that we can use 488 # when determining if all rules are valid and printing invalid 489 # rules. 490 self.extra_nodes = [] 491 492 # A map from typename to the first node of a particular type 493 self.first_type = {} 494 495 # A map from a typename to all other typenames for which 496 # rename rules exist. This is necessary to determine whether 497 # an appropriate implicit rule can actually be constructed. 498 self.rename_map = {} 499 500 # The cached set of all found paths, keyed by: 501 # ((old_type, old_md5), (new_type, new_md5)) 502 self.found_paths = {} 503 self.found_targets = {} 504 505 # Temporary list of the terminal nodes 506 terminal_nodes = [] 507 508 # Temporary list of rule modules we are loading 509 rule_dicts = [] 510 511 self.false_rule_loaded = False 512 513 # To make debugging easy we can pass in a list of local 514 # rulefiles. 515 for r in input_rule_files: 516 try: 517 scratch_locals = {'MessageUpdateRule':MessageUpdateRule} 518 with open(r, 'r') as f: 519 exec(f.read(), scratch_locals) 520 rule_dicts.append((scratch_locals, r)) 521 except: 522 print("Cannot load rule file [%s] in local package" % r, file=sys.stderr) 523 524 # Alternatively the preferred method is to load definitions 525 # from the migration ruleset export flag. 526 if plugins: 527 rospack = rospkg.RosPack() 528 for dep,export in [('rosbagmigration','rule_file'),('rosbag','migration_rule_file'),('rosbag_migration_rule','rule_file')]: 529 for pkg in rospack.get_depends_on(dep, implicit=False): 530 m = rospack.get_manifest(pkg) 531 p_rules = m.get_export(dep,export) 532 pkg_dir = rospack.get_path(pkg) 533 for r in p_rules: 534 if dep == 'rosbagmigration': 535 print("""WARNING: The package: [%s] is using a deprecated rosbagmigration export. 536 The export in the manifest should be changed to: 537 <rosbag migration_rule_file="%s"/> 538 """ % (pkg, r), file=sys.stderr) 539 try: 540 scratch_locals = {'MessageUpdateRule':MessageUpdateRule} 541 exec(open(pkg_dir + "/" + r).read(), scratch_locals) 542 rule_dicts.append((scratch_locals, r)) 543 except ImportError: 544 print("Cannot load rule file [%s] in package [%s]" % (r, pkg), file=sys.stderr) 545 546 547 for (rule_dict, location_base) in rule_dicts: 548 for (n,c) in rule_dict.items(): 549 if inspect.isclass(c): 550 if (not c == MessageUpdateRule) and issubclass(c, MessageUpdateRule): 551 self.add_update_rule(c(self, location_base + ':' + n)) 552 553 if self.false_rule_loaded: 554 raise BagMigrationException("Cannot instantiate MessageMigrator with invalid rules") 555 556 # Now, go through and build up a better scaffolded 557 # representation, deferring implicit rule generation until 558 # complete, since the implicit rule generation and sub-rule 559 # population makes use of the scaffold. 560 561 # First we each particular type chain (now including implicit 562 # rules). Additionally, we build up our name remapping lists. 563 564 565 # For Each rulechain 566 for (type,rulechain) in self.rulechains.items(): 567 first = True 568 sn = None 569 prev_sn = None 570 571 # Find name remapping list 572 rename_set = set([type]) 573 tmp = rulechain.rename 574 while tmp: 575 rename_set.add(tmp.new_type) 576 if tmp.new_type in self.rulechains: 577 tmp = self.rulechains[tmp.new_type].rename 578 else: 579 break 580 581 self.rename_map[type] = rename_set 582 583 # For each element in the rulechain chain, 584 for r in rulechain.chain: 585 # Create a scaffoldnode 586 sn = ScaffoldNode(r.old_class, r.new_class, r) 587 self.base_nodes.append(sn) 588 # If it's the first one, stick it in our first_type map 589 if first: 590 self.first_type[type] = sn 591 first = False 592 # If there was a previous node, link them if keys 593 # match, or else create an implicit SN 594 if prev_sn: 595 if get_message_key(prev_sn.new_class) == get_message_key(sn.old_class): 596 prev_sn.next = sn 597 else: 598 implicit_sn = ScaffoldNode(prev_sn.new_class, sn.old_class, None) 599 self.base_nodes.append(implicit_sn) 600 prev_sn.next = implicit_sn 601 implicit_sn.next = sn 602 # The just-created node now becomes the previous 603 prev_sn = sn 604 605 # If there is a rename rule 606 if rulechain.rename: 607 # Create a scaffoldnode 608 sn = ScaffoldNode(rulechain.rename.old_class, rulechain.rename.new_class, rulechain.rename) 609 self.base_nodes.append(sn) 610 611 # Same rules apply here as when we created each node 612 # from chain. Link if possible, otherwise create 613 # implicit 614 if first: 615 self.first_type[type] = sn 616 first = False 617 if prev_sn: 618 if get_message_key(prev_sn.new_class) == get_message_key(sn.old_class): 619 prev_sn.next = sn 620 else: 621 implicit_sn = ScaffoldNode(prev_sn.new_class, sn.old_class, None) 622 self.base_nodes.append(implicit_sn) 623 prev_sn.next = implicit_sn 624 implicit_sn.next = sn 625 prev_sn = sn 626 terminal_nodes.append(sn) 627 # If there was not a rename rule, this must be a terminal node 628 else: 629 if prev_sn: 630 terminal_nodes.append(prev_sn) 631 632 # Between our partial scaffold and name remapping list, we can 633 # now GENERATE rules, though we cannot yet populate the 634 # subrules. 635 636 for sn in terminal_nodes: 637 key = get_message_key(sn.new_class) 638 639 renamed = (sn.old_class._type != sn.new_class._type) 640 641 sys_class = genpy.message.get_message_class(sn.new_class._type) 642 643 # If we map directly to a system-defined class we're done 644 if sys_class: 645 new_rule = self.make_update_rule(sn.new_class, sys_class) 646 R = new_rule(self, 'GENERATED.' + new_rule.__name__) 647 if R.valid: 648 sn.next = ScaffoldNode(sn.new_class, sys_class, R) 649 self.base_nodes.append(sn.next) 650 651 if renamed: 652 tmp_sns = self.scaffold_range(sn.new_class._type, sn.new_class._type) 653 654 # If we don't map to a scaffold range, we appear to be done 655 if tmp_sns == []: 656 if sys_class is not None: 657 sn.next = ScaffoldNode(sn.new_class, sys_class, None) 658 self.base_nodes.append(sn.next) 659 continue 660 661 # Otherwise look for trivial bridges 662 for tmp_sn in reversed(tmp_sns): 663 tmp_key = get_message_key(tmp_sn.old_class) 664 if (key == tmp_key): 665 sn.next = tmp_sn 666 break 667 668 # If we did not find a trivial bridge, we instead need 669 # to create the right implicit rule ourselves. This 670 # is based on the ability to create a valid implicit 671 # rule as LATE in the chain as possible. We do this 672 # to avoid extra conversions in some boundary 673 # circumstances. 674 if (sn.next is None): 675 for tmp_sn in reversed(tmp_sns): 676 new_rule = self.make_update_rule(sn.new_class, tmp_sn.old_class) 677 R = new_rule(self, 'GENERATED.' + new_rule.__name__) 678 if R.valid: 679 sn.next = ScaffoldNode(sn.new_class, tmp_sn.old_class, R) 680 self.base_nodes.append(sn.next) 681 break 682 683 684 # If we have still failed we need to create a placeholder. 685 if (sn.next is None): 686 if sys_class: 687 new_rule = self.make_update_rule(sn.new_class, sys_class) 688 else: 689 new_rule = self.make_old_half_rule(sn.new_class) 690 R = new_rule(self, 'GENERATED.' + new_rule.__name__) 691 sn.next = ScaffoldNode(sn.new_class, None, R) 692 self.base_nodes.append(sn.next) 693 694 695 # Now that our scaffolding is actually complete, we iterate 696 # through all of our rules and generate the rules for which we 697 # have scaffoldnodes, but no rule yet 698 for sn in self.base_nodes: 699 if (sn.rule is None): 700 new_rule = self.make_update_rule(sn.old_class, sn.new_class) 701 sn.rule = new_rule(self, 'GENERATED.' + new_rule.__name__) 702 703 # Finally, we go through and try to find sub_paths for every 704 # rule in the system so far 705 for sn in self.base_nodes: 706 sn.rule.find_sub_paths() 707 708 # Construction should be done, we can now use the system in 709 # the event that we don't have invalid update rules. 710 711 self.class_dict = {} 712 713 for sn in self.base_nodes + self.extra_nodes: 714 self.class_dict[get_message_key(sn.old_class)] = sn.old_class 715 self.class_dict[get_message_key(sn.new_class)] = sn.new_class
716 717
718 - def lookup_type(self, key):
719 if key in self.class_dict: 720 return self.class_dict[key] 721 else: 722 return None
723 724 # Add an update rule to our set of rule chains
725 - def add_update_rule(self, r):
726 if r.valid == False: 727 print("ERROR: Update rule [%s] has valid set to False." % (r.location), file=sys.stderr) 728 self.false_rule_loaded = True 729 return 730 731 rulechain = self.rulechains[r.old_type] 732 733 if r.rename_rule: 734 if (rulechain.rename != None): 735 print("WARNING: Update rules [%s] and [%s] both attempting to rename type [%s]. Ignoring [%s]" % (rulechain.rename.location, r.location, r.old_type, r.location), file=sys.stderr) 736 return 737 738 # Search forward to make sure we havn't created a cycle 739 cycle = [] 740 tmp = r 741 while tmp: 742 cycle.append(tmp) 743 if (tmp.new_type == r.old_type): 744 print("WARNING: Update rules %s introduce a renaming cycle. Ignoring [%s]" % ([x.location for x in cycle], r.location), file=sys.stderr) 745 return 746 if tmp.new_type in self.rulechains: 747 tmp = self.rulechains[tmp.new_type].rename 748 else: 749 break 750 751 752 if rulechain.chain and (r.order <= rulechain.chain[-1].order): 753 print("WARNING: Update rule [%s] which performs rename does not have largest order number. Ignoring" % r.location, file=sys.stderr) 754 return 755 756 rulechain.rename = r 757 758 else: 759 if r.order in rulechain.order_keys: 760 otherind = [x.order for x in rulechain.chain].index(r.order) 761 print("WARNING: Update rules [%s] and [%s] for type [%s] have the same order number. Ignoring [%s]" % (rulechain.chain[otherind].location, r.location, r.old_type, r.location), file=sys.stderr) 762 return 763 else: 764 if rulechain.rename and (r.order >= rulechain.rename.order): 765 print("WARNING: Update rule [%s] has order number larger than rename rule [%s]. Ignoring" % (r.location, rulechain.rename.location), file=sys.stderr) 766 return 767 # Insert the rule into a rule chain 768 rulechain.order_keys.add(r.order) 769 rulechain.chain.append(r) 770 rulechain.chain.sort(key=lambda x: x.order)
771 772 # Helper function to determine if all rules are valid
773 - def all_rules_valid(self):
774 base_valid = not False in [sn.rule.valid for sn in self.base_nodes] 775 extra_valid = not False in [sn.rule.valid for sn in self.extra_nodes] 776 return base_valid and extra_valid
777 778 # Helper function to print out the definitions for all invalid rules (which include definitions)
779 - def get_invalid_rules(self):
780 invalid_rules = [] 781 invalid_rule_cache = [] 782 for sn in self.base_nodes: 783 if not sn.rule.valid: 784 path_key = get_path_key(sn.old_class, sn.new_class) 785 if (path_key not in invalid_rule_cache): 786 invalid_rules.append(sn.rule) 787 invalid_rule_cache.append(path_key) 788 for sn in self.extra_nodes: 789 if not sn.rule.valid: 790 path_key = get_path_key(sn.old_class, sn.new_class) 791 if (path_key not in invalid_rule_cache): 792 invalid_rules.append(sn.rule) 793 invalid_rule_cache.append(path_key) 794 return invalid_rules
795 796 # Helper function to remove non-unique rules
797 - def filter_rules_unique(self, rules):
798 rule_cache = [] 799 new_rules = [] 800 for r in rules: 801 path_key = get_path_key(r.old_class, r.new_class) 802 if (path_key not in rule_cache): 803 new_rules.append(r) 804 return new_rules
805 806 # Helper function to expand a list of rules to include subrules
807 - def expand_rules(self, rules):
808 filtered = self.filter_rules_unique(rules) 809 expanded = [] 810 for r in filtered: 811 expanded.append(r) 812 #print "For rule %s --> %s"%(r.old_class._type, r.new_class._type) 813 expanded.extend(self.expand_rules(r.sub_rules)) 814 filtered = self.filter_rules_unique(expanded) 815 return filtered
816
817 - def scaffold_range(self, old_type, new_type):
818 try: 819 first_sn = self.first_type[old_type] 820 821 sn_range = [first_sn] 822 823 found_new_type = False 824 825 tmp_sn = first_sn 826 827 while (tmp_sn.next is not None and tmp_sn.next.new_class is not None): 828 # print sn_range 829 tmp_sn = tmp_sn.next 830 if (tmp_sn != first_sn): 831 sn_range.append(tmp_sn) 832 if (tmp_sn.new_class._type == new_type): 833 found_new_type == True 834 if (found_new_type and tmp_sn.new_class._type != new_type): 835 break 836 837 return sn_range 838 839 except KeyError: 840 return []
841 842
843 - def find_target(self, old_class):
844 key = get_message_key(old_class) 845 846 last_class = old_class 847 848 try: 849 return self.found_targets[key] 850 except KeyError: 851 852 sys_class = genpy.message.get_message_class(old_class._type) 853 854 if sys_class is not None: 855 self.found_targets[key] = sys_class 856 return sys_class 857 858 try: 859 tmp_sn = self.first_type[old_class._type] 860 861 if tmp_sn.new_class is not None: 862 last_class = tmp_sn.new_class 863 864 while tmp_sn.next is not None: 865 tmp_sn = tmp_sn.next 866 867 if tmp_sn.new_class is not None: 868 last_class = tmp_sn.new_class 869 sys_class = genpy.message.get_message_class(tmp_sn.new_class._type) 870 else: 871 sys_class = None 872 873 if sys_class is not None: 874 self.found_targets[key] = sys_class 875 return sys_class 876 except KeyError: 877 pass 878 879 self.found_targets[key] = None 880 return None
881 882 # This function determines the set of rules which must be created 883 # to get from the old type to the new type.
884 - def find_path(self, old_class, new_class):
885 key = get_path_key(old_class, new_class) 886 887 # Return any path already found in the cache 888 try: 889 return self.found_paths[key] 890 except KeyError: 891 pass 892 893 # If the new_class is none, e.g., a message has been moved and 894 # we are lacking a proper rename rule, such that find-target 895 # failed, the best we can do is create a half-rule from the 896 # end-point 897 if new_class is None: 898 sn_range = self.scaffold_range(old_class._type, "") 899 900 found_start = False 901 902 for (ind, tmp_sn) in reversed(list(zip(range(len(sn_range)), sn_range))): 903 # Skip until we find the class we're trying to match 904 if (tmp_sn.old_class._type != old_class._type): 905 continue 906 if get_message_key(tmp_sn.old_class) == get_message_key(old_class): 907 sn_range = sn_range[ind:] 908 found_start = True 909 break 910 911 # Next see if we can create a valid rule 912 if not found_start: 913 for (ind, tmp_sn) in reversed(list(zip(range(len(sn_range)), sn_range))): 914 if (tmp_sn.old_class._type != old_class._type): 915 continue 916 new_rule = self.make_update_rule(old_class, tmp_sn.old_class) 917 R = new_rule(self, 'GENERATED.' + new_rule.__name__) 918 if R.valid: 919 R.find_sub_paths() 920 sn = ScaffoldNode(old_class, tmp_sn.old_class, R) 921 self.extra_nodes.append(sn) 922 sn_range = sn_range[ind:] 923 sn_range.insert(0,sn) 924 found_start = True 925 break 926 927 if sn_range == []: 928 tmp_class = old_class 929 else: 930 tmp_class = sn_range[-1].new_class 931 932 new_rule = self.make_old_half_rule(tmp_class) 933 R = new_rule(self, 'GENERATED.' + new_rule.__name__) 934 sn = ScaffoldNode(tmp_class, None, R) 935 sn_range.append(sn) 936 self.extra_nodes.append(sn) 937 self.found_paths[key] = sn_range 938 return sn_range 939 940 # If the messages are the same, there is no actually path 941 if (old_class._type == new_class._type and old_class._full_text.strip() == new_class._full_text.strip()): 942 self.found_paths[key] = [] 943 return [] 944 945 sn_range = self.scaffold_range(old_class._type, new_class._type) 946 947 # If we have no scaffolding, we just try to create the one path 948 if sn_range == []: 949 new_rule = self.make_update_rule(old_class, new_class) 950 R = new_rule(self, 'GENERATED.' + new_rule.__name__) 951 R.find_sub_paths() 952 sn = ScaffoldNode(old_class, new_class, R) 953 self.extra_nodes.append(sn) 954 self.found_paths[key] = [sn] 955 return [sn] 956 957 958 # Search for the stop point in the scaffold 959 found_stop = False 960 961 # First look for a trivial match 962 for (ind, tmp_sn) in reversed(list(zip(range(len(sn_range)), sn_range))): 963 # Stop looking early if the classes don't match 964 if (tmp_sn.new_class._type != new_class._type): 965 break 966 if get_message_key(tmp_sn.new_class) == get_message_key(new_class): 967 sn_range = sn_range[:ind+1] 968 found_stop = True 969 break 970 971 # Next see if we can create a valid rule, including the sub rules 972 if not found_stop: 973 for (ind, tmp_sn) in reversed(list(zip(range(len(sn_range)), sn_range))): 974 if (tmp_sn.new_class._type != new_class._type): 975 break 976 new_rule = self.make_update_rule(tmp_sn.new_class, new_class) 977 R = new_rule(self, 'GENERATED.' + new_rule.__name__) 978 if R.valid: 979 R.find_sub_paths() 980 if R.sub_rules_valid: 981 sn = ScaffoldNode(tmp_sn.new_class, new_class, R) 982 self.extra_nodes.append(sn) 983 sn_range = sn_range[:ind+1] 984 sn_range.append(sn) 985 found_stop = True 986 break 987 988 # If there were no valid implicit rules, we suggest a new one from to the end 989 if not found_stop: 990 new_rule = self.make_update_rule(sn_range[-1].new_class, new_class) 991 R = new_rule(self, 'GENERATED.' + new_rule.__name__) 992 R.find_sub_paths() 993 sn = ScaffoldNode(sn_range[-1].new_class, new_class, R) 994 self.extra_nodes.append(sn) 995 sn_range.append(sn) 996 997 # Search for the start point in the scaffold 998 found_start = False 999 1000 # First look for a trivial match 1001 for (ind, tmp_sn) in reversed(list(zip(range(len(sn_range)), sn_range))): 1002 # Skip until we find the class we're trying to match 1003 if (tmp_sn.old_class._type != old_class._type): 1004 continue 1005 if get_message_key(tmp_sn.old_class) == get_message_key(old_class): 1006 sn_range = sn_range[ind:] 1007 found_start = True 1008 break 1009 1010 # Next see if we can create a valid rule directly to the end, including the sub rules 1011 if not found_start: 1012 new_rule = self.make_update_rule(old_class, new_class) 1013 R = new_rule(self, 'GENERATED.' + new_rule.__name__) 1014 if R.valid: 1015 R.find_sub_paths() 1016 if R.sub_rules_valid: 1017 sn = ScaffoldNode(old_class, new_class, R) 1018 self.extra_nodes.append(sn) 1019 self.found_paths[key] = [sn] 1020 return [sn] 1021 1022 # Next see if we can create a valid rule, including the sub rules 1023 if not found_start: 1024 for (ind, tmp_sn) in reversed(list(zip(range(len(sn_range)), sn_range))): 1025 if (tmp_sn.old_class._type != old_class._type): 1026 continue 1027 new_rule = self.make_update_rule(old_class, tmp_sn.old_class) 1028 R = new_rule(self, 'GENERATED.' + new_rule.__name__) 1029 if R.valid: 1030 R.find_sub_paths() 1031 if R.sub_rules_valid: 1032 sn = ScaffoldNode(old_class, tmp_sn.old_class, R) 1033 self.extra_nodes.append(sn) 1034 sn_range = sn_range[ind:] 1035 sn_range.insert(0,sn) 1036 found_start = True 1037 break 1038 1039 # If there were no valid implicit rules, we suggest a new one from the beginning 1040 if not found_start: 1041 new_rule = self.make_update_rule(old_class, sn_range[0].old_class) 1042 R = new_rule(self, 'GENERATED.' + new_rule.__name__) 1043 R.find_sub_paths() 1044 sn = ScaffoldNode(old_class, sn_range[0].old_class, R) 1045 self.extra_nodes.append(sn) 1046 sn_range.insert(0,sn) 1047 1048 self.found_paths[key] = sn_range 1049 return sn_range
1050 1051
1052 - def migrate_raw(self, msg_from, msg_to):
1053 path = self.find_path(msg_from[4], msg_to[4]) 1054 1055 if False in [sn.rule.valid for sn in path]: 1056 raise BagMigrationException("Migrate called, but no valid migration path from [%s] to [%s]"%(msg_from[0], msg_to[0])) 1057 1058 # Short cut to speed up case of matching md5sum: 1059 if path == [] or msg_from[2] == msg_to[2]: 1060 return (msg_to[0], msg_from[1], msg_to[2], msg_to[3], msg_to[4]) 1061 1062 tmp_msg = path[0].old_class() 1063 tmp_msg.deserialize(msg_from[1]) 1064 1065 for sn in path: 1066 tmp_msg = sn.rule.apply(tmp_msg) 1067 1068 buff = StringIO() 1069 tmp_msg.serialize(buff) 1070 1071 return (msg_to[0], buff.getvalue(), msg_to[2], msg_to[3], msg_to[4])
1072 1073 1074
1075 - def migrate(self, msg_from, msg_to):
1076 path = self.find_path(msg_from.__class__, msg_to.__class__) 1077 1078 if False in [sn.rule.valid for sn in path]: 1079 raise BagMigrationException("Migrate called, but no valid migration path from [%s] to [%s]"%(msg_from._type, msg_to._type)) 1080 1081 # Short cut to speed up case of matching md5sum: 1082 if path == [] or msg_from._md5sum == msg_to._md5sum: 1083 buff = StringIO() 1084 msg_from.serialize(buff) 1085 msg_to.deserialize(buff.getvalue()) 1086 return 1087 1088 if len(path) > 0: 1089 buff = StringIO() 1090 msg_from.serialize(buff) 1091 1092 tmp_msg = path[0].old_class() 1093 1094 tmp_msg.deserialize(buff.getvalue()) 1095 1096 for sn in path: 1097 tmp_msg = sn.rule.apply(tmp_msg) 1098 else: 1099 tmp_msg = msg_from 1100 1101 buff = StringIO() 1102 tmp_msg.serialize(buff) 1103 msg_to.deserialize(buff.getvalue())
1104
1105 - def migrate_array(self, msg_from_array, msg_to_array):
1106 if len(msg_from_array) != len(msg_to_array): 1107 raise BagMigrationException("Migrate array called on on arrays of unequal length.") 1108 1109 if len(msg_from_array) == 0: 1110 return 1111 1112 path = self.find_path(msg_from_array[0].__class__, msg_to_array[0].__class__) 1113 1114 if path is None: 1115 raise BagMigrationException("Migrate called, but no migration path from [%s] to [%s]"%(msg_from._type, msg_to._type)) 1116 1117 # Short cut to speed up case of matching md5sum: 1118 if path == []: 1119 for i in range(len(msg_from_array)): 1120 buff = StringIO() 1121 msg_from_array[i].serialize(buff) 1122 msg_to_array[i].deserialize(buff.getvalue()) 1123 return 1124 1125 for i in range(len(msg_from_array)): 1126 buff = StringIO() 1127 tmp_msg = path[0].old_class() 1128 msg_from_array[i].serialize(buff) 1129 tmp_msg.deserialize(buff.getvalue()) 1130 for sn in path: 1131 tmp_msg = sn.rule.apply(tmp_msg) 1132 1133 buff = StringIO() 1134 tmp_msg.serialize(buff) 1135 msg_to_array[i].deserialize(buff.getvalue())
1136
1137 - def make_update_rule(self, old_class, new_class):
1138 name = "update_%s_%s"%(old_class._type.replace("/","_"), old_class._md5sum) 1139 1140 # We assemble the class as a string and then exec it to end up with a class 1141 # that can essentially print its own definition. 1142 classdef = "class %s(MessageUpdateRule):\n"%name 1143 classdef += "\told_type = \"%s\"\n"%old_class._type 1144 classdef += "\told_full_text = \"\"\"\n%s\n\"\"\"\n\n"%old_class._full_text.strip() 1145 classdef += "\tnew_type = \"%s\"\n"%new_class._type 1146 classdef += "\tnew_full_text = \"\"\"\n%s\n\"\"\"\n"%new_class._full_text.strip() 1147 classdef += "\n" 1148 classdef += "\torder = 0" 1149 classdef += "\n" 1150 1151 validdef = "\tvalid = True\n" 1152 1153 migratedefs = "\tmigrated_types = [" 1154 1155 updatedef = "\tdef update(self, old_msg, new_msg):\n" 1156 1157 old_consts = constants_from_def(old_class._type, old_class._full_text) 1158 new_consts = constants_from_def(new_class._type, new_class._full_text) 1159 1160 if (not new_consts >= old_consts): 1161 validdef = "\tvalid = False\n" 1162 for c in (old_consts - new_consts): 1163 updatedef += "\t\t#Constant '%s' has changed\n"%(c[0],) 1164 1165 old_slots = [] 1166 old_slots.extend(old_class.__slots__) 1167 1168 migrations_seen = [] 1169 1170 # Assign across primitives, self.migrate or self.migrate_array non-primitives 1171 for (s,t) in zip(new_class.__slots__, new_class._slot_types): 1172 warn_msg = None 1173 new_base_type, new_is_array, new_array_len = genmsg.msgs.parse_type(t) 1174 try: 1175 ind = old_class.__slots__.index(s) 1176 old_slots.remove(s) 1177 old_base_type, old_is_array, old_array_len = genmsg.msgs.parse_type(old_class._slot_types[ind]) 1178 1179 if new_is_array != old_is_array: 1180 warn_msg = "Could not match array with nonarray" 1181 1182 elif new_array_len != old_array_len: 1183 if old_array_len is None: 1184 warn_msg = "Converted from variable length array to fixed array of length %d"%(new_array_len) 1185 elif new_array_len is None: 1186 warn_msg = "Converted from fixed array of length %d to variable length"%(old_array_len) 1187 else: 1188 warn_msg = "Fixed length array converted from %d to %d"%(old_array_len,new_array_len) 1189 1190 elif genmsg.msgs.is_builtin(new_base_type): 1191 if new_base_type != old_base_type: 1192 warn_msg = "Primitive type changed" 1193 else: 1194 updatedef += "\t\tnew_msg.%s = old_msg.%s\n"%(s,s) 1195 1196 else: 1197 tmp_old_type = clean_name(old_base_type, old_class._type) 1198 tmp_new_type = clean_name(new_base_type, new_class._type) 1199 1200 tmp_qualified_old_type = qualified_name(old_base_type, old_class._type) 1201 tmp_qualified_new_type = qualified_name(new_base_type, new_class._type) 1202 1203 # Verify the type can theoretically be migrated 1204 if (tmp_qualified_old_type == tmp_qualified_new_type) or \ 1205 (tmp_qualified_old_type in self.rename_map and 1206 tmp_qualified_new_type in self.rename_map[tmp_qualified_old_type]): 1207 1208 if (tmp_old_type, tmp_new_type) not in migrations_seen: 1209 migratedefs += "\n\t\t(\"%s\",\"%s\"),"%(tmp_old_type, tmp_new_type) 1210 migrations_seen.append((tmp_old_type, tmp_new_type)) 1211 1212 if not new_is_array: 1213 updatedef += "\t\tself.migrate(old_msg.%s, new_msg.%s)\n"%(s,s) 1214 else: 1215 updatedef += "\t\tself.migrate_array(old_msg.%s, new_msg.%s, \"%s\")\n"%(s,s,new_base_type) 1216 else: 1217 warn_msg = "No migration path between [%s] and [%s]"%(tmp_old_type, tmp_new_type) 1218 except ValueError: 1219 warn_msg = "No matching field name in old message" 1220 1221 if warn_msg is not None: 1222 validdef = "\tvalid = False\n" 1223 updatedef += "\t\t#%s\n"%warn_msg 1224 updatedef += "\t\tnew_msg.%s = %s\n"%(s,migration_default_value(t)) 1225 1226 migratedefs += "]\n" 1227 1228 if old_slots: 1229 validdef = "\tvalid = False\n" 1230 for s in old_slots: 1231 updatedef += "\t\t#No field to match field %s from old message\n"%(s) 1232 1233 classdef += migratedefs + '\n' + validdef + '\n' + updatedef 1234 1235 printclassdef = classdef + "\tdef get_class_def(self):\n\t\treturn \'\'\'%s\'\'\'\n"%classdef 1236 1237 # This is probably a TERRIBLE idea? 1238 exec(printclassdef) 1239 return locals()[name]
1240
1241 - def make_old_half_rule(self, old_class):
1242 name = "update__%s__%s"%(old_class._type.replace("/","_"), old_class._md5sum) 1243 1244 # We assemble the class as a string and then exec it to end up with a class 1245 # that can essentially print its own definition. 1246 classdef = "class %s(MessageUpdateRule):\n"%name 1247 classdef += "\told_type = \"%s\"\n"%old_class._type 1248 classdef += "\told_full_text = \"\"\"\n%s\n\"\"\"\n\n"%old_class._full_text.strip() 1249 classdef += "\tnew_type = \"\"\n" 1250 classdef += "\tnew_full_text = \"\"\"\n\n\"\"\"\n" 1251 classdef += "\n" 1252 classdef += "\torder = 0" 1253 classdef += "\n" 1254 1255 validdef = "\tvalid = False\n" 1256 1257 migratedefs = "\tmigrated_types = []\n" 1258 1259 updatedef = "\tdef update(self, old_msg, new_msg):\n" 1260 updatedef += "\t\tpass\n" 1261 1262 classdef += migratedefs + '\n' + validdef + '\n' + updatedef 1263 1264 printclassdef = classdef + "\tdef get_class_def(self):\n\t\treturn \'\'\'%s\'\'\'\n"%classdef 1265 1266 # This is probably a TERRIBLE idea? 1267 exec(printclassdef) 1268 return locals()[name]
1269
1270 - def make_new_half_rule(self, new_class):
1271 name = "update_to_%s_%s"%(new_class._type.replace("/","_"), new_class._md5sum) 1272 1273 # We assemble the class as a string and then exec it to end up with a class 1274 # that can essentially print its own definition. 1275 classdef = "class %s(MessageUpdateRule):\n"%name 1276 classdef += "\told_type = \"\"\n" 1277 classdef += "\told_full_text = \"\"\"\n\n\"\"\"\n\n" 1278 classdef += "\tnew_type = \"%s\"\n"%new_class._type 1279 classdef += "\tnew_full_text = \"\"\"\n%s\n\"\"\"\n"%new_class._full_text.strip() 1280 classdef += "\n" 1281 classdef += "\torder = 0" 1282 classdef += "\n" 1283 1284 validdef = "\tvalid = False\n" 1285 1286 migratedefs = "\tmigrated_types = []\n" 1287 1288 updatedef = "\tdef update(self, old_msg, new_msg):\n" 1289 updatedef += "\t\tpass\n" 1290 1291 classdef += migratedefs + '\n' + validdef + '\n' + updatedef 1292 1293 printclassdef = classdef + "\tdef get_class_def(self):\n\t\treturn \'\'\'%s\'\'\'\n"%classdef 1294 1295 # This is probably a TERRIBLE idea? 1296 exec(printclassdef) 1297 return locals()[name]
1298
1299 -def migration_default_value(field_type):
1300 if field_type in ['bool', 'byte', 'int8', 'int16', 'int32', 'int64',\ 1301 'char', 'uint8', 'uint16', 'uint32', 'uint64']: 1302 return '0' 1303 elif field_type in ['float32', 'float64']: 1304 return '0.' 1305 elif field_type == 'string': 1306 # strings, byte[], and uint8s are all optimized to be strings 1307 return "''" 1308 elif field_type.endswith(']'): # array type 1309 base_type, is_array, array_len = genmsg.msgs.parse_type(field_type) 1310 if base_type in ['byte', 'uint8']: 1311 # strings, byte[], and uint8s are all optimized to be strings 1312 if array_len is not None: 1313 return "chr(0)*%s"%array_len 1314 else: 1315 return "''" 1316 elif array_len is None: #var-length 1317 return '[]' 1318 else: # fixed-length, fill values 1319 def_val = migration_default_value(base_type) 1320 return '[' + ','.join(itertools.repeat(def_val, array_len)) + ']' 1321 else: 1322 return "self.get_new_class('%s')()"%field_type
1323
1324 -def constants_from_def(core_type, msg_def):
1325 core_pkg, core_base_type = genmsg.package_resource_name(core_type) 1326 1327 splits = msg_def.split('\n' + '=' * 80 + '\n') 1328 1329 core_msg = splits[0] 1330 deps_msgs = splits[1:] 1331 1332 # create MsgSpec representations of .msg text 1333 from genmsg import MsgContext 1334 context = MsgContext.create_default() 1335 specs = { core_type: genmsg.msg_loader.load_msg_from_string(context, core_msg, core_pkg) } 1336 # - dependencies 1337 # for dep_msg in deps_msgs: 1338 # # dependencies require more handling to determine type name 1339 # dep_type, dep_spec = _generate_dynamic_specs(specs, dep_msg) 1340 # specs[dep_type] = dep_spec 1341 1342 return set([(x.name, x.val, x.type) for x in specs[core_type].constants])
1343