Package rosbag :: Module migration
[frames] | no frames]

Source Code for Module rosbag.migration

   1  # Software License Agreement (BSD License) 
   2  # 
   3  # Copyright (c) 2009, Willow Garage, Inc. 
   4  # All rights reserved. 
   5  # 
   6  # Redistribution and use in source and binary forms, with or without 
   7  # modification, are permitted provided that the following conditions 
   8  # are met: 
   9  # 
  10  #  * Redistributions of source code must retain the above copyright 
  11  #    notice, this list of conditions and the following disclaimer. 
  12  #  * Redistributions in binary form must reproduce the above 
  13  #    copyright notice, this list of conditions and the following 
  14  #    disclaimer in the documentation and/or other materials provided 
  15  #    with the distribution. 
  16  #  * Neither the name of Willow Garage, Inc. nor the names of its 
  17  #    contributors may be used to endorse or promote products derived 
  18  #    from this software without specific prior written permission. 
  19  # 
  20  # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 
  21  # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 
  22  # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS 
  23  # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE 
  24  # COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, 
  25  # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, 
  26  # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; 
  27  # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 
  28  # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT 
  29  # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN 
  30  # ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 
  31  # POSSIBILITY OF SUCH DAMAGE. 
  32   
  33  from __future__ import print_function 
  34   
  35  import collections 
  36  import copy 
  37  try: 
  38      from cStringIO import StringIO  # Python 2.x 
  39  except ImportError: 
  40      from io import BytesIO as StringIO  # Python 3.x 
  41  import inspect 
  42  import itertools 
  43  import os 
  44  import string 
  45  import sys 
  46  import traceback 
  47   
  48  import genmsg.msgs 
  49  import genpy 
  50  import genpy.dynamic 
  51   
  52  import rospkg 
  53   
  54  import rosbag 
  55   
  56  # Anything outside the scope of these primitives is a submessage 
  57  #_PRIMITIVES = ['bool', 'byte','int8','int16','int32','int64','char','uint8','uint16','uint32','uint64','float32','float64','string','time'] 
  58   
59 -class BagMigrationException(Exception):
60 pass
61
62 -def checkbag(migrator, inbag):
63 """ 64 Check whether a bag file can be played in the current system. 65 @param migrator: message migrator to use 66 @param inbag name of the bag to be checked. 67 @returns A list of tuples for each type in the bag file. The first 68 element of each tuple is the full migration path for the type. The 69 second element of the tuple is the expanded list of invalid rules 70 for that particular path. 71 """ 72 checked = set() 73 migrations = [] 74 75 bag = rosbag.Bag(inbag, 'r') 76 77 for topic, msg, t in bag.read_messages(raw=True): 78 key = get_message_key(msg[4]) 79 if key not in checked: 80 target = migrator.find_target(msg[4]) 81 # Even in the case of a zero-length path (matching md5sums), we still want 82 # to migrate in the event of a type change (message move). 83 path = migrator.find_path(msg[4], target) 84 if len(path) > 0: 85 migrations.append((path, [r for r in migrator.expand_rules([sn.rule for sn in path]) if r.valid == False])) 86 87 checked.add(key) 88 89 bag.close() 90 91 return migrations
92
93 -def checkmessages(migrator, messages):
94 """ 95 Check whether a bag file can be played in the current system. 96 @param migrator The message migrator to use 97 @param message_list A list of message classes. 98 @returns A list of tuples for each type in the bag file. The first 99 element of each tuple is the full migration path for the type. The 100 second element of the tuple is the expanded list of invalid rules 101 for that particular path. 102 """ 103 104 checked = set() 105 migrations = [] 106 107 for msg in messages: 108 key = get_message_key(msg) 109 if key not in checked: 110 target = migrator.find_target(msg) 111 # Even in the case of a zero-length path (matching md5sums), we still want 112 # to migrate in the event of a type change (message move). 113 path = migrator.find_path(msg, target) 114 if len(path) > 0: 115 migrations.append((path, [r for r in migrator.expand_rules([sn.rule for sn in path]) if r.valid == False])) 116 117 checked.add(key) 118 119 return migrations
120 121 ## Fix a bag so that it can be played in the current system 122 # 123 # @param migrator The message migrator to use 124 # @param inbag Name of the bag to be fixed. 125 # @param outbag Name of the bag to be saved. 126 # @returns True if migration was successful.
127 -def fixbag(migrator, inbag, outbag):
128 # This checks/builds up rules for the given migrator 129 res = checkbag(migrator, inbag) 130 131 # Deserializing all messages is inefficient, but we can speed this up later 132 if not False in [m[1] == [] for m in res]: 133 bag = rosbag.Bag(inbag, 'r') 134 rebag = rosbag.Bag(outbag, 'w', options=bag.options) 135 for topic, msg, t in bag.read_messages(raw=True): 136 new_msg_type = migrator.find_target(msg[4]) 137 mig_msg = migrator.migrate_raw(msg, (new_msg_type._type, None, new_msg_type._md5sum, None, new_msg_type)) 138 rebag.write(topic, mig_msg, t, raw=True) 139 rebag.close() 140 bag.close() 141 return True 142 else: 143 return False
144 145 ## Fix a bag so that it can be played in the current system 146 # 147 # @param migrator The message migrator to use 148 # @param inbag Name of the bag to be fixed. 149 # @param outbag Name of the bag to be saved. 150 # @returns [] if bag could be migrated, otherwise, it returns the list of necessary migration paths
151 -def fixbag2(migrator, inbag, outbag, force=False):
152 # This checks/builds up rules for the given migrator 153 res = checkbag(migrator, inbag) 154 155 migrations = [m for m in res if len(m[1]) > 0] 156 157 # Deserializing all messages is inefficient, but we can speed this up later 158 if len(migrations) == 0 or force: 159 bag = rosbag.Bag(inbag, 'r') 160 rebag = rosbag.Bag(outbag, 'w', options=bag.options) 161 for topic, msg, t in bag.read_messages(raw=True): 162 new_msg_type = migrator.find_target(msg[4]) 163 if new_msg_type != None: 164 mig_msg = migrator.migrate_raw(msg, (new_msg_type._type, None, new_msg_type._md5sum, None, new_msg_type)) 165 rebag.write(topic, mig_msg, t, raw=True) 166 else: 167 rebag.write(topic, msg, t, raw=True) 168 rebag.close() 169 bag.close() 170 171 if force: 172 return [] 173 else: 174 return migrations
175 176 ## Helper function to strip out roslib and package name from name usages. 177 # 178 # There is some inconsistency in whether a fully-qualified path is 179 # used for sub-messages within a given message. This function is 180 # useful for stripping out the package name in a fully qualified 181 # sub-message. 182 # 183 # @param name The name to clean. 184 # @param top_name The name of the top-level type 185 # @returns The cleaned version of the name.
186 -def clean_name(name, top_name):
187 name_split = name.split('/') 188 try: 189 name_split.remove('std_msgs') 190 except ValueError: 191 pass 192 try: 193 name_split.remove(top_name.split('/')[0]) 194 except ValueError: 195 pass 196 new_name = '/'.join(name_split) 197 return new_name
198 199 ## Helper function to ensure we end up with a qualified name 200 # 201 # There is some inconsistency in whether a fully-qualified path is 202 # used for sub-messages within a given message. This function is 203 # useful for ensuring that a name is fully qualified correctly. 204 # 205 # @param name The name to quailfy 206 # @param top_name The name of the top-level type 207 # @returns The qualified version of the name.
208 -def qualified_name(name, top_name):
209 # First clean the name, to make everyting else more deterministic 210 tmp_name = clean_name(name, top_name) 211 212 if len(tmp_name.split('/')) == 2 or (genmsg.msgs.is_builtin(tmp_name)): 213 return tmp_name 214 elif tmp_name == 'Header': 215 return 'std_msgs/Header' 216 else: 217 return top_name.split('/')[0] + '/' + tmp_name
218 219 ## Helper function to return a key from a given class 220 # 221 # For now, we choose the tuple (type,md5sum) as a unique key for the 222 # class. However, this is subject to change and assumptions about keys 223 # should not be made other than their uniqueness. 224 # 225 # @param c The message class or instance to get a key for 226 # @returns The unique key
227 -def get_message_key(c):
228 try: 229 return (c._type, c._md5sum) 230 except: 231 return None
232 233 ## Helper function to return a key for a given path 234 # 235 # For now, we choose the tuple ((type1,md5sum1),(type2,md5sum2)) as a 236 # unique key for the path. However, this is subject to change and 237 # assumptions about keys should not be made other than their 238 # uniqueness. 239 # 240 # @param c1 The start point of the path 241 # @param c1 The stop point of the path 242 # @returns The unique key
243 -def get_path_key(c1, c2):
244 try: 245 return (get_message_key(c1), get_message_key(c2)) 246 except: 247 return None
248 249 ## Base class for all message update rules
250 -class MessageUpdateRule(object):
251 old_type = '' 252 old_full_text = '' 253 new_type = '' 254 new_full_text = '' 255 migrated_types = [] 256 257 order = -1 258 259 valid = False 260
261 - class EmptyType(Exception):
262 pass
263 264 ## Initialize class
265 - def __init__(self, migrator, location):
266 # Every rule needs to hang onto the migrator so we can potentially use it 267 self.migrator = migrator 268 self.location = location 269 270 if (self.old_type != self.new_type): 271 self.rename_rule = True 272 else: 273 self.rename_rule = False 274 275 # Instantiate types dynamically based on definition 276 try: 277 if self.old_type == "": 278 raise self.EmptyType 279 self.old_types = genpy.dynamic.generate_dynamic(self.old_type, self.old_full_text) 280 self.old_class = self.old_types[self.old_type] 281 self.old_md5sum = self.old_class._md5sum 282 except Exception as e: 283 if not isinstance(e, self.EmptyType): 284 traceback.print_exc(file=sys.stderr) 285 self.old_types = {} 286 self.old_class = None 287 self.old_md5sum = "" 288 try: 289 if self.new_type == "": 290 raise self.EmptyType 291 self.new_types = genpy.dynamic.generate_dynamic(self.new_type, self.new_full_text) 292 self.new_class = self.new_types[self.new_type] 293 self.new_md5sum = self.new_class._md5sum 294 except Exception as e: 295 if not isinstance(e, self.EmptyType): 296 traceback.print_exc(file=sys.stderr) 297 self.new_types = {} 298 self.new_class = None 299 self.new_md5sum = "" 300 301 # We have not populated our sub rules (and ideally should 302 # wait until the full scaffold exists before doing this) 303 self.sub_rules_done = False 304 self.sub_rules_valid = False 305 self.sub_rules = []
306 307 ## Find all of the sub paths 308 # 309 # For any migrated type the user might want to use, we must make 310 # sure the migrator had found a path for it. To facilitated this 311 # check we require that all migrated types must be listed as pairs 312 # in the migrated_types field. 313 # 314 # It would be nice not to need these through performing some kind 315 # of other inspection of the update rule itself.
316 - def find_sub_paths(self):
317 self.sub_rules_valid = True 318 for (t1, t2) in self.migrated_types: 319 try: 320 tmp_old_class = self.get_old_class(t1) 321 except KeyError: 322 print("WARNING: Within rule [%s], specified migrated type [%s] not found in old message types" % (self.location, t1), file=sys.stderr) 323 self.sub_rules_valid = False 324 continue 325 try: 326 tmp_new_class = self.get_new_class(t2) 327 except KeyError: 328 print("WARNING: Within rule [%s], specified migrated type [%s] not found in new message types" % (self.location, t2), file=sys.stderr) 329 self.sub_rules_valid = False 330 continue 331 332 # If a rule instantiates itself as a subrule (because the 333 # author knows the md5sums match), we don't Want to end up 334 # with an infinite recursion. 335 if (get_message_key(tmp_old_class) != get_message_key(self.old_class)) or (get_message_key(tmp_new_class) != get_message_key(self.new_class)): 336 path = self.migrator.find_path(tmp_old_class, tmp_new_class) 337 rules = [sn.rule for sn in path] 338 self.sub_rules.extend(rules) 339 340 if False in [r.valid for r in self.sub_rules]: 341 print("WARNING: Within rule [%s] cannot migrate from subtype [%s] to [%s].." % (self.location, t1, t2), file=sys.stderr) 342 self.sub_rules_valid = False 343 continue 344 self.sub_rules = self.migrator.filter_rules_unique(self.sub_rules) 345 self.sub_rules_done = True
346 347 ## Helper function to get the class of a submsg for the new type 348 # 349 # This function should be used inside of update to access new classes. 350 # 351 # @param t The subtype to return the class of 352 # @returns The class of the new sub type
353 - def get_new_class(self,t):
354 try: 355 try: 356 return self.new_types[t] 357 except KeyError: 358 return self.new_types['std_msgs/' + t] 359 except KeyError: 360 return self.new_types[self.new_type.split('/')[0] + '/' + t]
361 362 ## Helper function to get the class of a submsg for the old type 363 # 364 # This function should be used inside of update to access old classes. 365 # 366 # @param t The subtype to return the class of 367 # @returns The class of the old sub type
368 - def get_old_class(self,t):
369 try: 370 try: 371 return self.old_types[t] 372 except KeyError: 373 return self.old_types['std_msgs/' + t] 374 except KeyError: 375 return self.old_types[self.old_type.split('/')[0] + '/' + t]
376 377 ## Actually migrate one sub_type to another 378 # 379 # This function should be used inside of update to migrate sub msgs. 380 # 381 # @param msg_from A message instance of the old message type 382 # @param msg_to A message instance of a new message type to be populated
383 - def migrate(self, msg_from, msg_to):
384 tmp_msg_from = clean_name(msg_from._type, self.old_type) 385 tmp_msg_to = clean_name(msg_to._type, self.new_type) 386 if (tmp_msg_from, tmp_msg_to) not in self.migrated_types: 387 raise BagMigrationException("Rule [%s] tried to perform a migration from old [%s] to new [%s] not listed in migrated_types"%(self.location, tmp_msg_from, tmp_msg_to)) 388 self.migrator.migrate(msg_from, msg_to)
389 390 ## Helper function to migrate a whole array of messages 391 # 392 # This function should be used inside of update to migrate arrays of sub msgs. 393 # 394 # @param msg_from_array An array of messages of the old message type 395 # @param msg_to_array An array of messages of the new message type (this will be emptied if not already) 396 # @param msg_to_class The name of the new message type since msg_to_array may be an empty array.
397 - def migrate_array(self, msg_from_array, msg_to_array, msg_to_name):
398 msg_to_class = self.get_new_class(msg_to_name) 399 400 while len(msg_to_array) > 0: 401 msg_to_array.pop() 402 403 if (len(msg_from_array) == 0): 404 return 405 406 tmp_msg_from = clean_name(msg_from_array[0]._type, self.old_type) 407 tmp_msg_to = clean_name(msg_to_class._type, self.new_type) 408 if (tmp_msg_from, tmp_msg_to) not in self.migrated_types: 409 raise BagMigrationException("Rule [%s] tried to perform a migration from old [%s] to new [%s] not listed in migrated_types"%(self.location, tmp_msg_from, tmp_msg_to)) 410 411 msg_to_array.extend( [msg_to_class() for i in range(len(msg_from_array))] ) 412 413 self.migrator.migrate_array(msg_from_array, msg_to_array)
414 415 ## A helper function to print out the definiton of autogenerated messages.
416 - def get_class_def(self):
417 pass
418 419 ## The function actually called by the message migrator 420 # 421 # @param old_msg An instance of the old message type. 422 # @returns An instance of a new message type
423 - def apply(self, old_msg):
424 if not self.valid: 425 raise BagMigrationException("Attempted to apply an invalid rule") 426 if not self.sub_rules_done: 427 raise BagMigrationException("Attempted to apply a rule without building up its sub rules") 428 if not self.sub_rules_valid: 429 raise BagMigrationException("Attempted to apply a rule without valid sub-rules") 430 if (get_message_key(old_msg) != get_message_key(self.old_class)): 431 raise BagMigrationException("Attempted to apply rule to incorrect class %s %s."%(get_message_key(old_msg),get_message_key(self.old_class))) 432 433 # Apply update rule 434 new_msg = self.new_class() 435 self.update(old_msg, new_msg) 436 437 return new_msg
438 439 ## The function which a user overrides to actually perform the message update 440 # 441 # @param msg_from A message instance of the old message type 442 # @param msg_to A message instance of a new message type to be populated
443 - def update(self, old_msg, new_msg):
444 raise BagMigrationException("Tried to use rule without update overidden")
445 446 447 ## A class for book-keeping about rule-chains. 448 # 449 # Rule chains define the ordered set of update rules, indexed by 450 # typename, terminated by a rename rule. This class is only used 451 # temporarily to help us get the ordering right, until all explicit 452 # rules have been loaded (possibly out of order) and the proper 453 # scaffold can be built.
454 -class RuleChain(object):
455 - def __init__(self):
456 self.chain = [] 457 self.order_keys = set() 458 self.rename = None
459 460 461 ## A class for arranging the ordered rules 462 # 463 # They provide a scaffolding (essentially a linked list) over which we 464 # assume we can migrate messages forward. This allows us to verify a 465 # path exists before actually creating all of the necessary implicit 466 # rules (mostly migration of sub-messages) that such a path 467 # necessitates.
468 -class ScaffoldNode(object):
469 - def __init__(self, old_class, new_class, rule):
470 self.old_class = old_class 471 self.new_class = new_class 472 self.rule = rule 473 self.next = None
474 475 ## A class to actually migrate messages 476 # 477 # This is the big class that actually handles all of the fancy 478 # migration work. Better documentation to come later.
479 -class MessageMigrator(object):
480 - def __init__(self, input_rule_files=[], plugins=True):
481 # We use the rulechains to scaffold our initial creation of 482 # implicit rules. Each RuleChain is keyed off of a type and 483 # consists of an ordered set of update rules followed by an 484 # optional rename rule. For the system rule definitions to be 485 # valid, all members of a rulechains must be connectable via 486 # implicit rules and all rulechains must terminate in a known 487 # system type which is also reachable by an implicit rule. 488 self.rulechains = collections.defaultdict(RuleChain) 489 490 # The list of all nodes that we can iterate through in the 491 # future when making sure all rules have been constructed. 492 self.base_nodes = [] 493 494 # The list of extra (non-scaffolded) nodes that we can use 495 # when determining if all rules are valid and printing invalid 496 # rules. 497 self.extra_nodes = [] 498 499 # A map from typename to the first node of a particular type 500 self.first_type = {} 501 502 # A map from a typename to all other typenames for which 503 # rename rules exist. This is necessary to determine whether 504 # an appropriate implicit rule can actually be constructed. 505 self.rename_map = {} 506 507 # The cached set of all found paths, keyed by: 508 # ((old_type, old_md5), (new_type, new_md5)) 509 self.found_paths = {} 510 self.found_targets = {} 511 512 # Temporary list of the terminal nodes 513 terminal_nodes = [] 514 515 # Temporary list of rule modules we are loading 516 rule_dicts = [] 517 518 self.false_rule_loaded = False 519 520 # To make debugging easy we can pass in a list of local 521 # rulefiles. 522 for r in input_rule_files: 523 try: 524 scratch_locals = {'MessageUpdateRule':MessageUpdateRule} 525 with open(r, 'r') as f: 526 exec(f.read(), scratch_locals) 527 rule_dicts.append((scratch_locals, r)) 528 except: 529 print("Cannot load rule file [%s] in local package" % r, file=sys.stderr) 530 531 # Alternatively the preferred method is to load definitions 532 # from the migration ruleset export flag. 533 if plugins: 534 rospack = rospkg.RosPack() 535 for dep,export in [('rosbagmigration','rule_file'),('rosbag','migration_rule_file'),('rosbag_migration_rule','rule_file')]: 536 for pkg in rospack.get_depends_on(dep, implicit=False): 537 m = rospack.get_manifest(pkg) 538 p_rules = m.get_export(dep,export) 539 pkg_dir = rospack.get_path(pkg) 540 for r in p_rules: 541 if dep == 'rosbagmigration': 542 print("""WARNING: The package: [%s] is using a deprecated rosbagmigration export. 543 The export in the manifest should be changed to: 544 <rosbag migration_rule_file="%s"/> 545 """ % (pkg, r), file=sys.stderr) 546 try: 547 scratch_locals = {'MessageUpdateRule':MessageUpdateRule} 548 exec(open(pkg_dir + "/" + r).read(), scratch_locals) 549 rule_dicts.append((scratch_locals, r)) 550 except ImportError: 551 print("Cannot load rule file [%s] in package [%s]" % (r, pkg), file=sys.stderr) 552 553 554 for (rule_dict, location_base) in rule_dicts: 555 for (n,c) in rule_dict.items(): 556 if inspect.isclass(c): 557 if (not c == MessageUpdateRule) and issubclass(c, MessageUpdateRule): 558 self.add_update_rule(c(self, location_base + ':' + n)) 559 560 if self.false_rule_loaded: 561 raise BagMigrationException("Cannot instantiate MessageMigrator with invalid rules") 562 563 # Now, go through and build up a better scaffolded 564 # representation, deferring implicit rule generation until 565 # complete, since the implicit rule generation and sub-rule 566 # population makes use of the scaffold. 567 568 # First we each particular type chain (now including implicit 569 # rules). Additionally, we build up our name remapping lists. 570 571 572 # For Each rulechain 573 for (type,rulechain) in self.rulechains.items(): 574 first = True 575 sn = None 576 prev_sn = None 577 578 # Find name remapping list 579 rename_set = set([type]) 580 tmp = rulechain.rename 581 while tmp: 582 rename_set.add(tmp.new_type) 583 if tmp.new_type in self.rulechains: 584 tmp = self.rulechains[tmp.new_type].rename 585 else: 586 break 587 588 self.rename_map[type] = rename_set 589 590 # For each element in the rulechain chain, 591 for r in rulechain.chain: 592 # Create a scaffoldnode 593 sn = ScaffoldNode(r.old_class, r.new_class, r) 594 self.base_nodes.append(sn) 595 # If it's the first one, stick it in our first_type map 596 if first: 597 self.first_type[type] = sn 598 first = False 599 # If there was a previous node, link them if keys 600 # match, or else create an implicit SN 601 if prev_sn: 602 if get_message_key(prev_sn.new_class) == get_message_key(sn.old_class): 603 prev_sn.next = sn 604 else: 605 implicit_sn = ScaffoldNode(prev_sn.new_class, sn.old_class, None) 606 self.base_nodes.append(implicit_sn) 607 prev_sn.next = implicit_sn 608 implicit_sn.next = sn 609 # The just-created node now becomes the previous 610 prev_sn = sn 611 612 # If there is a rename rule 613 if rulechain.rename: 614 # Create a scaffoldnode 615 sn = ScaffoldNode(rulechain.rename.old_class, rulechain.rename.new_class, rulechain.rename) 616 self.base_nodes.append(sn) 617 618 # Same rules apply here as when we created each node 619 # from chain. Link if possible, otherwise create 620 # implicit 621 if first: 622 self.first_type[type] = sn 623 first = False 624 if prev_sn: 625 if get_message_key(prev_sn.new_class) == get_message_key(sn.old_class): 626 prev_sn.next = sn 627 else: 628 implicit_sn = ScaffoldNode(prev_sn.new_class, sn.old_class, None) 629 self.base_nodes.append(implicit_sn) 630 prev_sn.next = implicit_sn 631 implicit_sn.next = sn 632 prev_sn = sn 633 terminal_nodes.append(sn) 634 # If there was not a rename rule, this must be a terminal node 635 else: 636 if prev_sn: 637 terminal_nodes.append(prev_sn) 638 639 # Between our partial scaffold and name remapping list, we can 640 # now GENERATE rules, though we cannot yet populate the 641 # subrules. 642 643 for sn in terminal_nodes: 644 key = get_message_key(sn.new_class) 645 646 renamed = (sn.old_class._type != sn.new_class._type) 647 648 sys_class = genpy.message.get_message_class(sn.new_class._type) 649 650 # If we map directly to a system-defined class we're done 651 if sys_class: 652 new_rule = self.make_update_rule(sn.new_class, sys_class) 653 R = new_rule(self, 'GENERATED.' + new_rule.__name__) 654 if R.valid: 655 sn.next = ScaffoldNode(sn.new_class, sys_class, R) 656 self.base_nodes.append(sn.next) 657 658 if renamed: 659 tmp_sns = self.scaffold_range(sn.new_class._type, sn.new_class._type) 660 661 # If we don't map to a scaffold range, we appear to be done 662 if tmp_sns == []: 663 if sys_class is not None: 664 sn.next = ScaffoldNode(sn.new_class, sys_class, None) 665 self.base_nodes.append(sn.next) 666 continue 667 668 # Otherwise look for trivial bridges 669 for tmp_sn in reversed(tmp_sns): 670 tmp_key = get_message_key(tmp_sn.old_class) 671 if (key == tmp_key): 672 sn.next = tmp_sn 673 break 674 675 # If we did not find a trivial bridge, we instead need 676 # to create the right implicit rule ourselves. This 677 # is based on the ability to create a valid implicit 678 # rule as LATE in the chain as possible. We do this 679 # to avoid extra conversions in some boundary 680 # circumstances. 681 if (sn.next is None): 682 for tmp_sn in reversed(tmp_sns): 683 new_rule = self.make_update_rule(sn.new_class, tmp_sn.old_class) 684 R = new_rule(self, 'GENERATED.' + new_rule.__name__) 685 if R.valid: 686 sn.next = ScaffoldNode(sn.new_class, tmp_sn.old_class, R) 687 self.base_nodes.append(sn.next) 688 break 689 690 691 # If we have still failed we need to create a placeholder. 692 if (sn.next is None): 693 if sys_class: 694 new_rule = self.make_update_rule(sn.new_class, sys_class) 695 else: 696 new_rule = self.make_old_half_rule(sn.new_class) 697 R = new_rule(self, 'GENERATED.' + new_rule.__name__) 698 sn.next = ScaffoldNode(sn.new_class, None, R) 699 self.base_nodes.append(sn.next) 700 701 702 # Now that our scaffolding is actually complete, we iterate 703 # through all of our rules and generate the rules for which we 704 # have scaffoldnodes, but no rule yet 705 for sn in self.base_nodes: 706 if (sn.rule is None): 707 new_rule = self.make_update_rule(sn.old_class, sn.new_class) 708 sn.rule = new_rule(self, 'GENERATED.' + new_rule.__name__) 709 710 # Finally, we go through and try to find sub_paths for every 711 # rule in the system so far 712 for sn in self.base_nodes: 713 sn.rule.find_sub_paths() 714 715 # Construction should be done, we can now use the system in 716 # the event that we don't have invalid update rules. 717 718 self.class_dict = {} 719 720 for sn in self.base_nodes + self.extra_nodes: 721 self.class_dict[get_message_key(sn.old_class)] = sn.old_class 722 self.class_dict[get_message_key(sn.new_class)] = sn.new_class
723 724
725 - def lookup_type(self, key):
726 if key in self.class_dict: 727 return self.class_dict[key] 728 else: 729 return None
730 731 # Add an update rule to our set of rule chains
732 - def add_update_rule(self, r):
733 if r.valid == False: 734 print("ERROR: Update rule [%s] has valid set to False." % (r.location), file=sys.stderr) 735 self.false_rule_loaded = True 736 return 737 738 rulechain = self.rulechains[r.old_type] 739 740 if r.rename_rule: 741 if (rulechain.rename != None): 742 print("WARNING: Update rules [%s] and [%s] both attempting to rename type [%s]. Ignoring [%s]" % (rulechain.rename.location, r.location, r.old_type, r.location), file=sys.stderr) 743 return 744 745 # Search forward to make sure we havn't created a cycle 746 cycle = [] 747 tmp = r 748 while tmp: 749 cycle.append(tmp) 750 if (tmp.new_type == r.old_type): 751 print("WARNING: Update rules %s introduce a renaming cycle. Ignoring [%s]" % ([x.location for x in cycle], r.location), file=sys.stderr) 752 return 753 if tmp.new_type in self.rulechains: 754 tmp = self.rulechains[tmp.new_type].rename 755 else: 756 break 757 758 759 if rulechain.chain and (r.order <= rulechain.chain[-1].order): 760 print("WARNING: Update rule [%s] which performs rename does not have largest order number. Ignoring" % r.location, file=sys.stderr) 761 return 762 763 rulechain.rename = r 764 765 else: 766 if r.order in rulechain.order_keys: 767 otherind = [x.order for x in rulechain.chain].index(r.order) 768 print("WARNING: Update rules [%s] and [%s] for type [%s] have the same order number. Ignoring [%s]" % (rulechain.chain[otherind].location, r.location, r.old_type, r.location), file=sys.stderr) 769 return 770 else: 771 if rulechain.rename and (r.order >= rulechain.rename.order): 772 print("WARNING: Update rule [%s] has order number larger than rename rule [%s]. Ignoring" % (r.location, rulechain.rename.location), file=sys.stderr) 773 return 774 # Insert the rule into a rule chain 775 rulechain.order_keys.add(r.order) 776 rulechain.chain.append(r) 777 rulechain.chain.sort(key=lambda x: x.order)
778 779 # Helper function to determine if all rules are valid
780 - def all_rules_valid(self):
781 base_valid = not False in [sn.rule.valid for sn in self.base_nodes] 782 extra_valid = not False in [sn.rule.valid for sn in self.extra_nodes] 783 return base_valid and extra_valid
784 785 # Helper function to print out the definitions for all invalid rules (which include definitions)
786 - def get_invalid_rules(self):
787 invalid_rules = [] 788 invalid_rule_cache = [] 789 for sn in self.base_nodes: 790 if not sn.rule.valid: 791 path_key = get_path_key(sn.old_class, sn.new_class) 792 if (path_key not in invalid_rule_cache): 793 invalid_rules.append(sn.rule) 794 invalid_rule_cache.append(path_key) 795 for sn in self.extra_nodes: 796 if not sn.rule.valid: 797 path_key = get_path_key(sn.old_class, sn.new_class) 798 if (path_key not in invalid_rule_cache): 799 invalid_rules.append(sn.rule) 800 invalid_rule_cache.append(path_key) 801 return invalid_rules
802 803 # Helper function to remove non-unique rules
804 - def filter_rules_unique(self, rules):
805 rule_cache = [] 806 new_rules = [] 807 for r in rules: 808 path_key = get_path_key(r.old_class, r.new_class) 809 if (path_key not in rule_cache): 810 new_rules.append(r) 811 return new_rules
812 813 # Helper function to expand a list of rules to include subrules
814 - def expand_rules(self, rules):
815 filtered = self.filter_rules_unique(rules) 816 expanded = [] 817 for r in filtered: 818 expanded.append(r) 819 #print "For rule %s --> %s"%(r.old_class._type, r.new_class._type) 820 expanded.extend(self.expand_rules(r.sub_rules)) 821 filtered = self.filter_rules_unique(expanded) 822 return filtered
823
824 - def scaffold_range(self, old_type, new_type):
825 try: 826 first_sn = self.first_type[old_type] 827 828 sn_range = [first_sn] 829 830 found_new_type = False 831 832 tmp_sn = first_sn 833 834 while (tmp_sn.next is not None and tmp_sn.next.new_class is not None): 835 # print sn_range 836 tmp_sn = tmp_sn.next 837 if (tmp_sn != first_sn): 838 sn_range.append(tmp_sn) 839 if (tmp_sn.new_class._type == new_type): 840 found_new_type = True 841 if (found_new_type and tmp_sn.new_class._type != new_type): 842 break 843 844 return sn_range 845 846 except KeyError: 847 return []
848 849
850 - def find_target(self, old_class):
851 key = get_message_key(old_class) 852 853 last_class = old_class 854 855 try: 856 return self.found_targets[key] 857 except KeyError: 858 859 sys_class = genpy.message.get_message_class(old_class._type) 860 861 if sys_class is not None: 862 self.found_targets[key] = sys_class 863 return sys_class 864 865 try: 866 tmp_sn = self.first_type[old_class._type] 867 868 if tmp_sn.new_class is not None: 869 last_class = tmp_sn.new_class 870 871 while tmp_sn.next is not None: 872 tmp_sn = tmp_sn.next 873 874 if tmp_sn.new_class is not None: 875 last_class = tmp_sn.new_class 876 sys_class = genpy.message.get_message_class(tmp_sn.new_class._type) 877 else: 878 sys_class = None 879 880 if sys_class is not None: 881 self.found_targets[key] = sys_class 882 return sys_class 883 except KeyError: 884 pass 885 886 self.found_targets[key] = None 887 return None
888 889 # This function determines the set of rules which must be created 890 # to get from the old type to the new type.
891 - def find_path(self, old_class, new_class):
892 key = get_path_key(old_class, new_class) 893 894 # Return any path already found in the cache 895 try: 896 return self.found_paths[key] 897 except KeyError: 898 pass 899 900 # If the new_class is none, e.g., a message has been moved and 901 # we are lacking a proper rename rule, such that find-target 902 # failed, the best we can do is create a half-rule from the 903 # end-point 904 if new_class is None: 905 sn_range = self.scaffold_range(old_class._type, "") 906 907 found_start = False 908 909 for (ind, tmp_sn) in reversed(list(zip(range(len(sn_range)), sn_range))): 910 # Skip until we find the class we're trying to match 911 if (tmp_sn.old_class._type != old_class._type): 912 continue 913 if get_message_key(tmp_sn.old_class) == get_message_key(old_class): 914 sn_range = sn_range[ind:] 915 found_start = True 916 break 917 918 # Next see if we can create a valid rule 919 if not found_start: 920 for (ind, tmp_sn) in reversed(list(zip(range(len(sn_range)), sn_range))): 921 if (tmp_sn.old_class._type != old_class._type): 922 continue 923 new_rule = self.make_update_rule(old_class, tmp_sn.old_class) 924 R = new_rule(self, 'GENERATED.' + new_rule.__name__) 925 if R.valid: 926 R.find_sub_paths() 927 sn = ScaffoldNode(old_class, tmp_sn.old_class, R) 928 self.extra_nodes.append(sn) 929 sn_range = sn_range[ind:] 930 sn_range.insert(0,sn) 931 found_start = True 932 break 933 934 if sn_range == []: 935 tmp_class = old_class 936 else: 937 tmp_class = sn_range[-1].new_class 938 939 new_rule = self.make_old_half_rule(tmp_class) 940 R = new_rule(self, 'GENERATED.' + new_rule.__name__) 941 sn = ScaffoldNode(tmp_class, None, R) 942 sn_range.append(sn) 943 self.extra_nodes.append(sn) 944 self.found_paths[key] = sn_range 945 return sn_range 946 947 # If the messages are the same, there is no actually path 948 if (old_class._type == new_class._type and old_class._full_text.strip() == new_class._full_text.strip()): 949 self.found_paths[key] = [] 950 return [] 951 952 sn_range = self.scaffold_range(old_class._type, new_class._type) 953 954 # If we have no scaffolding, we just try to create the one path 955 if sn_range == []: 956 new_rule = self.make_update_rule(old_class, new_class) 957 R = new_rule(self, 'GENERATED.' + new_rule.__name__) 958 R.find_sub_paths() 959 sn = ScaffoldNode(old_class, new_class, R) 960 self.extra_nodes.append(sn) 961 self.found_paths[key] = [sn] 962 return [sn] 963 964 965 # Search for the stop point in the scaffold 966 found_stop = False 967 968 # First look for a trivial match 969 for (ind, tmp_sn) in reversed(list(zip(range(len(sn_range)), sn_range))): 970 # Stop looking early if the classes don't match 971 if (tmp_sn.new_class._type != new_class._type): 972 break 973 if get_message_key(tmp_sn.new_class) == get_message_key(new_class): 974 sn_range = sn_range[:ind+1] 975 found_stop = True 976 break 977 978 # Next see if we can create a valid rule 979 if not found_stop: 980 for (ind, tmp_sn) in reversed(list(zip(range(len(sn_range)), sn_range))): 981 if (tmp_sn.new_class._type != new_class._type): 982 break 983 new_rule = self.make_update_rule(tmp_sn.new_class, new_class) 984 R = new_rule(self, 'GENERATED.' + new_rule.__name__) 985 if R.valid: 986 R.find_sub_paths() 987 sn = ScaffoldNode(tmp_sn.new_class, new_class, R) 988 self.extra_nodes.append(sn) 989 sn_range = sn_range[:ind+1] 990 sn_range.append(sn) 991 found_stop = True 992 break 993 994 # If there were no valid implicit rules, we suggest a new one from to the end 995 if not found_stop: 996 new_rule = self.make_update_rule(sn_range[-1].new_class, new_class) 997 R = new_rule(self, 'GENERATED.' + new_rule.__name__) 998 R.find_sub_paths() 999 sn = ScaffoldNode(sn_range[-1].new_class, new_class, R) 1000 self.extra_nodes.append(sn) 1001 sn_range.append(sn) 1002 1003 # Search for the start point in the scaffold 1004 found_start = False 1005 1006 # First look for a trivial match 1007 for (ind, tmp_sn) in reversed(list(zip(range(len(sn_range)), sn_range))): 1008 # Skip until we find the class we're trying to match 1009 if (tmp_sn.old_class._type != old_class._type): 1010 continue 1011 if get_message_key(tmp_sn.old_class) == get_message_key(old_class): 1012 sn_range = sn_range[ind:] 1013 found_start = True 1014 break 1015 1016 # Next see if we can create a valid rule 1017 if not found_start: 1018 for (ind, tmp_sn) in reversed(list(zip(range(len(sn_range)), sn_range))): 1019 if (tmp_sn.old_class._type != old_class._type): 1020 continue 1021 new_rule = self.make_update_rule(old_class, tmp_sn.old_class) 1022 R = new_rule(self, 'GENERATED.' + new_rule.__name__) 1023 if R.valid: 1024 R.find_sub_paths() 1025 sn = ScaffoldNode(old_class, tmp_sn.old_class, R) 1026 self.extra_nodes.append(sn) 1027 sn_range = sn_range[ind:] 1028 sn_range.insert(0,sn) 1029 found_start = True 1030 break 1031 1032 # If there were no valid implicit rules, we suggest a new one from the beginning 1033 if not found_start: 1034 new_rule = self.make_update_rule(old_class, sn_range[0].old_class) 1035 R = new_rule(self, 'GENERATED.' + new_rule.__name__) 1036 R.find_sub_paths() 1037 sn = ScaffoldNode(old_class, sn_range[0].old_class, R) 1038 self.extra_nodes.append(sn) 1039 sn_range.insert(0,sn) 1040 1041 self.found_paths[key] = sn_range 1042 return sn_range
1043 1044
1045 - def migrate_raw(self, msg_from, msg_to):
1046 path = self.find_path(msg_from[4], msg_to[4]) 1047 1048 if False in [sn.rule.valid for sn in path]: 1049 raise BagMigrationException("Migrate called, but no valid migration path from [%s] to [%s]"%(msg_from[0], msg_to[0])) 1050 1051 # Short cut to speed up case of matching md5sum: 1052 if path == [] or msg_from[2] == msg_to[2]: 1053 return (msg_to[0], msg_from[1], msg_to[2], msg_to[3], msg_to[4]) 1054 1055 tmp_msg = path[0].old_class() 1056 tmp_msg.deserialize(msg_from[1]) 1057 1058 for sn in path: 1059 tmp_msg = sn.rule.apply(tmp_msg) 1060 1061 buff = StringIO() 1062 tmp_msg.serialize(buff) 1063 1064 return (msg_to[0], buff.getvalue(), msg_to[2], msg_to[3], msg_to[4])
1065 1066 1067
1068 - def migrate(self, msg_from, msg_to):
1069 path = self.find_path(msg_from.__class__, msg_to.__class__) 1070 1071 if False in [sn.rule.valid for sn in path]: 1072 raise BagMigrationException("Migrate called, but no valid migration path from [%s] to [%s]"%(msg_from._type, msg_to._type)) 1073 1074 # Short cut to speed up case of matching md5sum: 1075 if path == [] or msg_from._md5sum == msg_to._md5sum: 1076 buff = StringIO() 1077 msg_from.serialize(buff) 1078 msg_to.deserialize(buff.getvalue()) 1079 return 1080 1081 if len(path) > 0: 1082 buff = StringIO() 1083 msg_from.serialize(buff) 1084 1085 tmp_msg = path[0].old_class() 1086 1087 tmp_msg.deserialize(buff.getvalue()) 1088 1089 for sn in path: 1090 tmp_msg = sn.rule.apply(tmp_msg) 1091 else: 1092 tmp_msg = msg_from 1093 1094 buff = StringIO() 1095 tmp_msg.serialize(buff) 1096 msg_to.deserialize(buff.getvalue())
1097
1098 - def migrate_array(self, msg_from_array, msg_to_array):
1099 if len(msg_from_array) != len(msg_to_array): 1100 raise BagMigrationException("Migrate array called on on arrays of unequal length.") 1101 1102 if len(msg_from_array) == 0: 1103 return 1104 1105 path = self.find_path(msg_from_array[0].__class__, msg_to_array[0].__class__) 1106 1107 if path is None: 1108 raise BagMigrationException("Migrate called, but no migration path from [%s] to [%s]"%(msg_from._type, msg_to._type)) 1109 1110 # Short cut to speed up case of matching md5sum: 1111 if path == []: 1112 for i in range(len(msg_from_array)): 1113 buff = StringIO() 1114 msg_from_array[i].serialize(buff) 1115 msg_to_array[i].deserialize(buff.getvalue()) 1116 return 1117 1118 for i in range(len(msg_from_array)): 1119 buff = StringIO() 1120 tmp_msg = path[0].old_class() 1121 msg_from_array[i].serialize(buff) 1122 tmp_msg.deserialize(buff.getvalue()) 1123 for sn in path: 1124 tmp_msg = sn.rule.apply(tmp_msg) 1125 1126 buff = StringIO() 1127 tmp_msg.serialize(buff) 1128 msg_to_array[i].deserialize(buff.getvalue())
1129
1130 - def make_update_rule(self, old_class, new_class):
1131 name = "update_%s_%s"%(old_class._type.replace("/","_"), old_class._md5sum) 1132 1133 # We assemble the class as a string and then exec it to end up with a class 1134 # that can essentially print its own definition. 1135 classdef = "class %s(MessageUpdateRule):\n"%name 1136 classdef += "\told_type = \"%s\"\n"%old_class._type 1137 classdef += "\told_full_text = \"\"\"\n%s\n\"\"\"\n\n"%old_class._full_text.strip() 1138 classdef += "\tnew_type = \"%s\"\n"%new_class._type 1139 classdef += "\tnew_full_text = \"\"\"\n%s\n\"\"\"\n"%new_class._full_text.strip() 1140 classdef += "\n" 1141 classdef += "\torder = 0" 1142 classdef += "\n" 1143 1144 validdef = "\tvalid = True\n" 1145 1146 migratedefs = "\tmigrated_types = [" 1147 1148 updatedef = "\tdef update(self, old_msg, new_msg):\n" 1149 1150 old_consts = constants_from_def(old_class._type, old_class._full_text) 1151 new_consts = constants_from_def(new_class._type, new_class._full_text) 1152 1153 if (not new_consts >= old_consts): 1154 validdef = "\tvalid = False\n" 1155 for c in (old_consts - new_consts): 1156 updatedef += "\t\t#Constant '%s' has changed\n"%(c[0],) 1157 1158 old_slots = [] 1159 old_slots.extend(old_class.__slots__) 1160 1161 migrations_seen = [] 1162 1163 # Assign across primitives, self.migrate or self.migrate_array non-primitives 1164 for (s,t) in zip(new_class.__slots__, new_class._slot_types): 1165 warn_msg = None 1166 new_base_type, new_is_array, new_array_len = genmsg.msgs.parse_type(t) 1167 try: 1168 ind = old_class.__slots__.index(s) 1169 old_slots.remove(s) 1170 old_base_type, old_is_array, old_array_len = genmsg.msgs.parse_type(old_class._slot_types[ind]) 1171 1172 if new_is_array != old_is_array: 1173 warn_msg = "Could not match array with nonarray" 1174 1175 elif new_array_len != old_array_len: 1176 if old_array_len is None: 1177 warn_msg = "Converted from variable length array to fixed array of length %d"%(new_array_len) 1178 elif new_array_len is None: 1179 warn_msg = "Converted from fixed array of length %d to variable length"%(old_array_len) 1180 else: 1181 warn_msg = "Fixed length array converted from %d to %d"%(old_array_len,new_array_len) 1182 1183 elif genmsg.msgs.is_builtin(new_base_type): 1184 if new_base_type != old_base_type: 1185 warn_msg = "Primitive type changed" 1186 else: 1187 updatedef += "\t\tnew_msg.%s = old_msg.%s\n"%(s,s) 1188 1189 else: 1190 tmp_old_type = clean_name(old_base_type, old_class._type) 1191 tmp_new_type = clean_name(new_base_type, new_class._type) 1192 1193 tmp_qualified_old_type = qualified_name(old_base_type, old_class._type) 1194 tmp_qualified_new_type = qualified_name(new_base_type, new_class._type) 1195 1196 # Verify the type can theoretically be migrated 1197 if (tmp_qualified_old_type == tmp_qualified_new_type) or \ 1198 (tmp_qualified_old_type in self.rename_map and 1199 tmp_qualified_new_type in self.rename_map[tmp_qualified_old_type]): 1200 1201 if (tmp_old_type, tmp_new_type) not in migrations_seen: 1202 migratedefs += "\n\t\t(\"%s\",\"%s\"),"%(tmp_old_type, tmp_new_type) 1203 migrations_seen.append((tmp_old_type, tmp_new_type)) 1204 1205 if not new_is_array: 1206 updatedef += "\t\tself.migrate(old_msg.%s, new_msg.%s)\n"%(s,s) 1207 else: 1208 updatedef += "\t\tself.migrate_array(old_msg.%s, new_msg.%s, \"%s\")\n"%(s,s,new_base_type) 1209 else: 1210 warn_msg = "No migration path between [%s] and [%s]"%(tmp_old_type, tmp_new_type) 1211 except ValueError: 1212 warn_msg = "No matching field name in old message" 1213 1214 if warn_msg is not None: 1215 validdef = "\tvalid = False\n" 1216 updatedef += "\t\t#%s\n"%warn_msg 1217 updatedef += "\t\tnew_msg.%s = %s\n"%(s,migration_default_value(t)) 1218 1219 migratedefs += "]\n" 1220 1221 if old_slots: 1222 validdef = "\tvalid = False\n" 1223 for s in old_slots: 1224 updatedef += "\t\t#No field to match field %s from old message\n"%(s) 1225 1226 classdef += migratedefs + '\n' + validdef + '\n' + updatedef 1227 1228 printclassdef = classdef + "\tdef get_class_def(self):\n\t\treturn \'\'\'%s\'\'\'\n"%classdef 1229 1230 # This is probably a TERRIBLE idea? 1231 exec(printclassdef) 1232 return locals()[name]
1233
1234 - def make_old_half_rule(self, old_class):
1235 name = "update__%s__%s"%(old_class._type.replace("/","_"), old_class._md5sum) 1236 1237 # We assemble the class as a string and then exec it to end up with a class 1238 # that can essentially print its own definition. 1239 classdef = "class %s(MessageUpdateRule):\n"%name 1240 classdef += "\told_type = \"%s\"\n"%old_class._type 1241 classdef += "\told_full_text = \"\"\"\n%s\n\"\"\"\n\n"%old_class._full_text.strip() 1242 classdef += "\tnew_type = \"\"\n" 1243 classdef += "\tnew_full_text = \"\"\"\n\n\"\"\"\n" 1244 classdef += "\n" 1245 classdef += "\torder = 0" 1246 classdef += "\n" 1247 1248 validdef = "\tvalid = False\n" 1249 1250 migratedefs = "\tmigrated_types = []\n" 1251 1252 updatedef = "\tdef update(self, old_msg, new_msg):\n" 1253 updatedef += "\t\tpass\n" 1254 1255 classdef += migratedefs + '\n' + validdef + '\n' + updatedef 1256 1257 printclassdef = classdef + "\tdef get_class_def(self):\n\t\treturn \'\'\'%s\'\'\'\n"%classdef 1258 1259 # This is probably a TERRIBLE idea? 1260 exec(printclassdef) 1261 return locals()[name]
1262
1263 - def make_new_half_rule(self, new_class):
1264 name = "update_to_%s_%s"%(new_class._type.replace("/","_"), new_class._md5sum) 1265 1266 # We assemble the class as a string and then exec it to end up with a class 1267 # that can essentially print its own definition. 1268 classdef = "class %s(MessageUpdateRule):\n"%name 1269 classdef += "\told_type = \"\"\n" 1270 classdef += "\told_full_text = \"\"\"\n\n\"\"\"\n\n" 1271 classdef += "\tnew_type = \"%s\"\n"%new_class._type 1272 classdef += "\tnew_full_text = \"\"\"\n%s\n\"\"\"\n"%new_class._full_text.strip() 1273 classdef += "\n" 1274 classdef += "\torder = 0" 1275 classdef += "\n" 1276 1277 validdef = "\tvalid = False\n" 1278 1279 migratedefs = "\tmigrated_types = []\n" 1280 1281 updatedef = "\tdef update(self, old_msg, new_msg):\n" 1282 updatedef += "\t\tpass\n" 1283 1284 classdef += migratedefs + '\n' + validdef + '\n' + updatedef 1285 1286 printclassdef = classdef + "\tdef get_class_def(self):\n\t\treturn \'\'\'%s\'\'\'\n"%classdef 1287 1288 # This is probably a TERRIBLE idea? 1289 exec(printclassdef) 1290 return locals()[name]
1291
1292 -def migration_default_value(field_type):
1293 if field_type in ['bool', 'byte', 'int8', 'int16', 'int32', 'int64',\ 1294 'char', 'uint8', 'uint16', 'uint32', 'uint64']: 1295 return '0' 1296 elif field_type in ['float32', 'float64']: 1297 return '0.' 1298 elif field_type == 'string': 1299 # strings, byte[], and uint8s are all optimized to be strings 1300 return "''" 1301 elif field_type.endswith(']'): # array type 1302 base_type, is_array, array_len = genmsg.msgs.parse_type(field_type) 1303 if base_type in ['byte', 'uint8']: 1304 # strings, byte[], and uint8s are all optimized to be strings 1305 if array_len is not None: 1306 return "chr(0)*%s"%array_len 1307 else: 1308 return "''" 1309 elif array_len is None: #var-length 1310 return '[]' 1311 else: # fixed-length, fill values 1312 def_val = migration_default_value(base_type) 1313 return '[' + ','.join(itertools.repeat(def_val, array_len)) + ']' 1314 else: 1315 return "self.get_new_class('%s')()"%field_type
1316
1317 -def constants_from_def(core_type, msg_def):
1318 core_pkg, core_base_type = genmsg.package_resource_name(core_type) 1319 1320 splits = msg_def.split('\n' + '=' * 80 + '\n') 1321 1322 core_msg = splits[0] 1323 deps_msgs = splits[1:] 1324 1325 # create MsgSpec representations of .msg text 1326 from genmsg import MsgContext 1327 context = MsgContext.create_default() 1328 specs = { core_type: genmsg.msg_loader.load_msg_from_string(context, core_msg, core_pkg) } 1329 # - dependencies 1330 # for dep_msg in deps_msgs: 1331 # # dependencies require more handling to determine type name 1332 # dep_type, dep_spec = _generate_dynamic_specs(specs, dep_msg) 1333 # specs[dep_type] = dep_spec 1334 1335 return set([(x.name, x.val, x.type) for x in specs[core_type].constants])
1336