Package rosbag :: Module migration
[frames] | no frames]

Source Code for Module rosbag.migration

   1  # Software License Agreement (BSD License) 
   2  # 
   3  # Copyright (c) 2009, Willow Garage, Inc. 
   4  # All rights reserved. 
   5  # 
   6  # Redistribution and use in source and binary forms, with or without 
   7  # modification, are permitted provided that the following conditions 
   8  # are met: 
   9  # 
  10  #  * Redistributions of source code must retain the above copyright 
  11  #    notice, this list of conditions and the following disclaimer. 
  12  #  * Redistributions in binary form must reproduce the above 
  13  #    copyright notice, this list of conditions and the following 
  14  #    disclaimer in the documentation and/or other materials provided 
  15  #    with the distribution. 
  16  #  * Neither the name of Willow Garage, Inc. nor the names of its 
  17  #    contributors may be used to endorse or promote products derived 
  18  #    from this software without specific prior written permission. 
  19  # 
  20  # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 
  21  # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 
  22  # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS 
  23  # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE 
  24  # COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, 
  25  # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, 
  26  # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; 
  27  # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 
  28  # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT 
  29  # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN 
  30  # ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 
  31  # POSSIBILITY OF SUCH DAMAGE. 
  32   
  33  from __future__ import print_function 
  34   
  35  import collections 
  36  import copy 
  37  try: 
  38      from cStringIO import StringIO  # Python 2.x 
  39  except ImportError: 
  40      from io import BytesIO as StringIO  # Python 3.x 
  41  import inspect 
  42  import itertools 
  43  import os 
  44  import string 
  45  import sys 
  46  import traceback 
  47   
  48  import genmsg.msgs 
  49  import genpy 
  50  import genpy.dynamic 
  51   
  52  import rospkg 
  53   
  54  import rosbag 
  55   
  56  # Anything outside the scope of these primitives is a submessage 
  57  #_PRIMITIVES = ['bool', 'byte','int8','int16','int32','int64','char','uint8','uint16','uint32','uint64','float32','float64','string','time'] 
  58   
59 -class BagMigrationException(Exception):
60 pass
61
62 -def checkbag(migrator, inbag):
63 """ 64 Check whether a bag file can be played in the current system. 65 @param migrator: message migrator to use 66 @param inbag name of the bag to be checked. 67 @returns A list of tuples for each type in the bag file. The first 68 element of each tuple is the full migration path for the type. The 69 second element of the tuple is the expanded list of invalid rules 70 for that particular path. 71 """ 72 checked = set() 73 migrations = [] 74 75 bag = rosbag.Bag(inbag, 'r') 76 77 for topic, msg, t in bag.read_messages(raw=True): 78 key = get_message_key(msg[4]) 79 if key not in checked: 80 target = migrator.find_target(msg[4]) 81 # Even in the case of a zero-length path (matching md5sums), we still want 82 # to migrate in the event of a type change (message move). 83 path = migrator.find_path(msg[4], target) 84 if len(path) > 0: 85 migrations.append((path, [r for r in migrator.expand_rules([sn.rule for sn in path]) if r.valid == False])) 86 87 checked.add(key) 88 89 bag.close() 90 91 return migrations
92
93 -def checkmessages(migrator, messages):
94 """ 95 Check whether a bag file can be played in the current system. 96 @param migrator The message migrator to use 97 @param message_list A list of message classes. 98 @returns A list of tuples for each type in the bag file. The first 99 element of each tuple is the full migration path for the type. The 100 second element of the tuple is the expanded list of invalid rules 101 for that particular path. 102 """ 103 104 checked = set() 105 migrations = [] 106 107 for msg in messages: 108 key = get_message_key(msg) 109 if key not in checked: 110 target = migrator.find_target(msg) 111 # Even in the case of a zero-length path (matching md5sums), we still want 112 # to migrate in the event of a type change (message move). 113 path = migrator.find_path(msg, target) 114 if len(path) > 0: 115 migrations.append((path, [r for r in migrator.expand_rules([sn.rule for sn in path]) if r.valid == False])) 116 117 checked.add(key) 118 119 return migrations
120
121 -def _migrate_connection_header(conn_header, new_msg_type):
122 conn_header['type'] = new_msg_type._type 123 conn_header['md5sum'] = new_msg_type._md5sum 124 conn_header['message_definition'] = new_msg_type._full_text 125 126 return conn_header
127 128 ## Fix a bag so that it can be played in the current system 129 # 130 # @param migrator The message migrator to use 131 # @param inbag Name of the bag to be fixed. 132 # @param outbag Name of the bag to be saved. 133 # @returns True if migration was successful.
134 -def fixbag(migrator, inbag, outbag):
135 # This checks/builds up rules for the given migrator 136 res = checkbag(migrator, inbag) 137 138 # Deserializing all messages is inefficient, but we can speed this up later 139 if not False in [m[1] == [] for m in res]: 140 bag = rosbag.Bag(inbag, 'r') 141 rebag = rosbag.Bag(outbag, 'w', options=bag.options) 142 for topic, msg, t, conn_header in bag.read_messages(raw=True, return_connection_header=True): 143 new_msg_type = migrator.find_target(msg[4]) 144 mig_msg = migrator.migrate_raw(msg, (new_msg_type._type, None, new_msg_type._md5sum, None, new_msg_type)) 145 new_conn_header = _migrate_connection_header(conn_header, new_msg_type) 146 rebag.write(topic, mig_msg, t, connection_header=new_conn_header, raw=True) 147 rebag.close() 148 bag.close() 149 return True 150 else: 151 return False
152 153 ## Fix a bag so that it can be played in the current system 154 # 155 # @param migrator The message migrator to use 156 # @param inbag Name of the bag to be fixed. 157 # @param outbag Name of the bag to be saved. 158 # @returns [] if bag could be migrated, otherwise, it returns the list of necessary migration paths
159 -def fixbag2(migrator, inbag, outbag, force=False):
160 # This checks/builds up rules for the given migrator 161 res = checkbag(migrator, inbag) 162 163 migrations = [m for m in res if len(m[1]) > 0] 164 165 # Deserializing all messages is inefficient, but we can speed this up later 166 if len(migrations) == 0 or force: 167 bag = rosbag.Bag(inbag, 'r') 168 rebag = rosbag.Bag(outbag, 'w', options=bag.options) 169 for topic, msg, t, conn_header in bag.read_messages(raw=True, return_connection_header=True): 170 new_msg_type = migrator.find_target(msg[4]) 171 if new_msg_type != None: 172 mig_msg = migrator.migrate_raw(msg, (new_msg_type._type, None, new_msg_type._md5sum, None, new_msg_type)) 173 new_conn_header = _migrate_connection_header(conn_header, new_msg_type) 174 rebag.write(topic, mig_msg, t, connection_header=new_conn_header, raw=True) 175 else: 176 rebag.write(topic, msg, t, connection_header=conn_header, raw=True) 177 rebag.close() 178 bag.close() 179 180 if force: 181 return [] 182 else: 183 return migrations
184 185 ## Helper function to strip out roslib and package name from name usages. 186 # 187 # There is some inconsistency in whether a fully-qualified path is 188 # used for sub-messages within a given message. This function is 189 # useful for stripping out the package name in a fully qualified 190 # sub-message. 191 # 192 # @param name The name to clean. 193 # @param top_name The name of the top-level type 194 # @returns The cleaned version of the name.
195 -def clean_name(name, top_name):
196 name_split = name.split('/') 197 try: 198 name_split.remove('std_msgs') 199 except ValueError: 200 pass 201 try: 202 name_split.remove(top_name.split('/')[0]) 203 except ValueError: 204 pass 205 new_name = '/'.join(name_split) 206 return new_name
207 208 ## Helper function to ensure we end up with a qualified name 209 # 210 # There is some inconsistency in whether a fully-qualified path is 211 # used for sub-messages within a given message. This function is 212 # useful for ensuring that a name is fully qualified correctly. 213 # 214 # @param name The name to qualify 215 # @param top_name The name of the top-level type 216 # @returns The qualified version of the name.
217 -def qualified_name(name, top_name):
218 # First clean the name, to make everything else more deterministic 219 tmp_name = clean_name(name, top_name) 220 221 if len(tmp_name.split('/')) == 2 or (genmsg.msgs.is_builtin(tmp_name)): 222 return tmp_name 223 elif tmp_name == 'Header': 224 return 'std_msgs/Header' 225 else: 226 return top_name.split('/')[0] + '/' + tmp_name
227 228 ## Helper function to return a key from a given class 229 # 230 # For now, we choose the tuple (type,md5sum) as a unique key for the 231 # class. However, this is subject to change and assumptions about keys 232 # should not be made other than their uniqueness. 233 # 234 # @param c The message class or instance to get a key for 235 # @returns The unique key
236 -def get_message_key(c):
237 try: 238 return (c._type, c._md5sum) 239 except: 240 return None
241 242 ## Helper function to return a key for a given path 243 # 244 # For now, we choose the tuple ((type1,md5sum1),(type2,md5sum2)) as a 245 # unique key for the path. However, this is subject to change and 246 # assumptions about keys should not be made other than their 247 # uniqueness. 248 # 249 # @param c1 The start point of the path 250 # @param c1 The stop point of the path 251 # @returns The unique key
252 -def get_path_key(c1, c2):
253 try: 254 return (get_message_key(c1), get_message_key(c2)) 255 except: 256 return None
257 258 ## Base class for all message update rules
259 -class MessageUpdateRule(object):
260 old_type = '' 261 old_full_text = '' 262 new_type = '' 263 new_full_text = '' 264 migrated_types = [] 265 266 order = -1 267 268 valid = False 269
270 - class EmptyType(Exception):
271 pass
272 273 ## Initialize class
274 - def __init__(self, migrator, location):
275 # Every rule needs to hang onto the migrator so we can potentially use it 276 self.migrator = migrator 277 self.location = location 278 279 if (self.old_type != self.new_type): 280 self.rename_rule = True 281 else: 282 self.rename_rule = False 283 284 # Instantiate types dynamically based on definition 285 try: 286 if self.old_type == "": 287 raise self.EmptyType 288 self.old_types = genpy.dynamic.generate_dynamic(self.old_type, self.old_full_text) 289 self.old_class = self.old_types[self.old_type] 290 self.old_md5sum = self.old_class._md5sum 291 except Exception as e: 292 if not isinstance(e, self.EmptyType): 293 traceback.print_exc(file=sys.stderr) 294 self.old_types = {} 295 self.old_class = None 296 self.old_md5sum = "" 297 try: 298 if self.new_type == "": 299 raise self.EmptyType 300 self.new_types = genpy.dynamic.generate_dynamic(self.new_type, self.new_full_text) 301 self.new_class = self.new_types[self.new_type] 302 self.new_md5sum = self.new_class._md5sum 303 except Exception as e: 304 if not isinstance(e, self.EmptyType): 305 traceback.print_exc(file=sys.stderr) 306 self.new_types = {} 307 self.new_class = None 308 self.new_md5sum = "" 309 310 # We have not populated our sub rules (and ideally should 311 # wait until the full scaffold exists before doing this) 312 self.sub_rules_done = False 313 self.sub_rules_valid = False 314 self.sub_rules = []
315 316 ## Find all of the sub paths 317 # 318 # For any migrated type the user might want to use, we must make 319 # sure the migrator had found a path for it. To facilitated this 320 # check we require that all migrated types must be listed as pairs 321 # in the migrated_types field. 322 # 323 # It would be nice not to need these through performing some kind 324 # of other inspection of the update rule itself.
325 - def find_sub_paths(self):
326 self.sub_rules_valid = True 327 for (t1, t2) in self.migrated_types: 328 try: 329 tmp_old_class = self.get_old_class(t1) 330 except KeyError: 331 print("WARNING: Within rule [%s], specified migrated type [%s] not found in old message types" % (self.location, t1), file=sys.stderr) 332 self.sub_rules_valid = False 333 continue 334 try: 335 tmp_new_class = self.get_new_class(t2) 336 except KeyError: 337 print("WARNING: Within rule [%s], specified migrated type [%s] not found in new message types" % (self.location, t2), file=sys.stderr) 338 self.sub_rules_valid = False 339 continue 340 341 # If a rule instantiates itself as a subrule (because the 342 # author knows the md5sums match), we don't Want to end up 343 # with an infinite recursion. 344 if (get_message_key(tmp_old_class) != get_message_key(self.old_class)) or (get_message_key(tmp_new_class) != get_message_key(self.new_class)): 345 path = self.migrator.find_path(tmp_old_class, tmp_new_class) 346 rules = [sn.rule for sn in path] 347 self.sub_rules.extend(rules) 348 349 if False in [r.valid for r in self.sub_rules]: 350 print("WARNING: Within rule [%s] cannot migrate from subtype [%s] to [%s].." % (self.location, t1, t2), file=sys.stderr) 351 self.sub_rules_valid = False 352 continue 353 self.sub_rules = self.migrator.filter_rules_unique(self.sub_rules) 354 self.sub_rules_done = True
355 356 ## Helper function to get the class of a submsg for the new type 357 # 358 # This function should be used inside of update to access new classes. 359 # 360 # @param t The subtype to return the class of 361 # @returns The class of the new sub type
362 - def get_new_class(self,t):
363 try: 364 try: 365 return self.new_types[t] 366 except KeyError: 367 return self.new_types['std_msgs/' + t] 368 except KeyError: 369 return self.new_types[self.new_type.split('/')[0] + '/' + t]
370 371 ## Helper function to get the class of a submsg for the old type 372 # 373 # This function should be used inside of update to access old classes. 374 # 375 # @param t The subtype to return the class of 376 # @returns The class of the old sub type
377 - def get_old_class(self,t):
378 try: 379 try: 380 return self.old_types[t] 381 except KeyError: 382 return self.old_types['std_msgs/' + t] 383 except KeyError: 384 return self.old_types[self.old_type.split('/')[0] + '/' + t]
385 386 ## Actually migrate one sub_type to another 387 # 388 # This function should be used inside of update to migrate sub msgs. 389 # 390 # @param msg_from A message instance of the old message type 391 # @param msg_to A message instance of a new message type to be populated
392 - def migrate(self, msg_from, msg_to):
393 tmp_msg_from = clean_name(msg_from._type, self.old_type) 394 tmp_msg_to = clean_name(msg_to._type, self.new_type) 395 if (tmp_msg_from, tmp_msg_to) not in self.migrated_types: 396 raise BagMigrationException("Rule [%s] tried to perform a migration from old [%s] to new [%s] not listed in migrated_types"%(self.location, tmp_msg_from, tmp_msg_to)) 397 self.migrator.migrate(msg_from, msg_to)
398 399 ## Helper function to migrate a whole array of messages 400 # 401 # This function should be used inside of update to migrate arrays of sub msgs. 402 # 403 # @param msg_from_array An array of messages of the old message type 404 # @param msg_to_array An array of messages of the new message type (this will be emptied if not already) 405 # @param msg_to_class The name of the new message type since msg_to_array may be an empty array.
406 - def migrate_array(self, msg_from_array, msg_to_array, msg_to_name):
407 msg_to_class = self.get_new_class(msg_to_name) 408 409 while len(msg_to_array) > 0: 410 msg_to_array.pop() 411 412 if (len(msg_from_array) == 0): 413 return 414 415 tmp_msg_from = clean_name(msg_from_array[0]._type, self.old_type) 416 tmp_msg_to = clean_name(msg_to_class._type, self.new_type) 417 if (tmp_msg_from, tmp_msg_to) not in self.migrated_types: 418 raise BagMigrationException("Rule [%s] tried to perform a migration from old [%s] to new [%s] not listed in migrated_types"%(self.location, tmp_msg_from, tmp_msg_to)) 419 420 msg_to_array.extend( [msg_to_class() for i in range(len(msg_from_array))] ) 421 422 self.migrator.migrate_array(msg_from_array, msg_to_array)
423 424 ## A helper function to print out the definition of autogenerated messages.
425 - def get_class_def(self):
426 pass
427 428 ## The function actually called by the message migrator 429 # 430 # @param old_msg An instance of the old message type. 431 # @returns An instance of a new message type
432 - def apply(self, old_msg):
433 if not self.valid: 434 raise BagMigrationException("Attempted to apply an invalid rule") 435 if not self.sub_rules_done: 436 raise BagMigrationException("Attempted to apply a rule without building up its sub rules") 437 if not self.sub_rules_valid: 438 raise BagMigrationException("Attempted to apply a rule without valid sub-rules") 439 if (get_message_key(old_msg) != get_message_key(self.old_class)): 440 raise BagMigrationException("Attempted to apply rule to incorrect class %s %s."%(get_message_key(old_msg),get_message_key(self.old_class))) 441 442 # Apply update rule 443 new_msg = self.new_class() 444 self.update(old_msg, new_msg) 445 446 return new_msg
447 448 ## The function which a user overrides to actually perform the message update 449 # 450 # @param msg_from A message instance of the old message type 451 # @param msg_to A message instance of a new message type to be populated
452 - def update(self, old_msg, new_msg):
453 raise BagMigrationException("Tried to use rule without update overidden")
454 455 456 ## A class for book-keeping about rule-chains. 457 # 458 # Rule chains define the ordered set of update rules, indexed by 459 # typename, terminated by a rename rule. This class is only used 460 # temporarily to help us get the ordering right, until all explicit 461 # rules have been loaded (possibly out of order) and the proper 462 # scaffold can be built.
463 -class RuleChain(object):
464 - def __init__(self):
465 self.chain = [] 466 self.order_keys = set() 467 self.rename = None
468 469 470 ## A class for arranging the ordered rules 471 # 472 # They provide a scaffolding (essentially a linked list) over which we 473 # assume we can migrate messages forward. This allows us to verify a 474 # path exists before actually creating all of the necessary implicit 475 # rules (mostly migration of sub-messages) that such a path 476 # necessitates.
477 -class ScaffoldNode(object):
478 - def __init__(self, old_class, new_class, rule):
479 self.old_class = old_class 480 self.new_class = new_class 481 self.rule = rule 482 self.next = None
483 484 ## A class to actually migrate messages 485 # 486 # This is the big class that actually handles all of the fancy 487 # migration work. Better documentation to come later.
488 -class MessageMigrator(object):
489 - def __init__(self, input_rule_files=[], plugins=True):
490 # We use the rulechains to scaffold our initial creation of 491 # implicit rules. Each RuleChain is keyed off of a type and 492 # consists of an ordered set of update rules followed by an 493 # optional rename rule. For the system rule definitions to be 494 # valid, all members of a rulechains must be connectable via 495 # implicit rules and all rulechains must terminate in a known 496 # system type which is also reachable by an implicit rule. 497 self.rulechains = collections.defaultdict(RuleChain) 498 499 # The list of all nodes that we can iterate through in the 500 # future when making sure all rules have been constructed. 501 self.base_nodes = [] 502 503 # The list of extra (non-scaffolded) nodes that we can use 504 # when determining if all rules are valid and printing invalid 505 # rules. 506 self.extra_nodes = [] 507 508 # A map from typename to the first node of a particular type 509 self.first_type = {} 510 511 # A map from a typename to all other typenames for which 512 # rename rules exist. This is necessary to determine whether 513 # an appropriate implicit rule can actually be constructed. 514 self.rename_map = {} 515 516 # The cached set of all found paths, keyed by: 517 # ((old_type, old_md5), (new_type, new_md5)) 518 self.found_paths = {} 519 self.found_targets = {} 520 521 # Temporary list of the terminal nodes 522 terminal_nodes = [] 523 524 # Temporary list of rule modules we are loading 525 rule_dicts = [] 526 527 self.false_rule_loaded = False 528 529 # To make debugging easy we can pass in a list of local 530 # rulefiles. 531 for r in input_rule_files: 532 try: 533 scratch_locals = {'MessageUpdateRule':MessageUpdateRule} 534 with open(r, 'r') as f: 535 exec(f.read(), scratch_locals) 536 rule_dicts.append((scratch_locals, r)) 537 except: 538 print("Cannot load rule file [%s] in local package" % r, file=sys.stderr) 539 540 # Alternatively the preferred method is to load definitions 541 # from the migration ruleset export flag. 542 if plugins: 543 rospack = rospkg.RosPack() 544 for dep,export in [('rosbagmigration','rule_file'),('rosbag','migration_rule_file'),('rosbag_migration_rule','rule_file')]: 545 for pkg in rospack.get_depends_on(dep, implicit=False): 546 m = rospack.get_manifest(pkg) 547 p_rules = m.get_export(dep,export) 548 pkg_dir = rospack.get_path(pkg) 549 for r in p_rules: 550 if dep == 'rosbagmigration': 551 print("""WARNING: The package: [%s] is using a deprecated rosbagmigration export. 552 The export in the manifest should be changed to: 553 <rosbag migration_rule_file="%s"/> 554 """ % (pkg, r), file=sys.stderr) 555 try: 556 scratch_locals = {'MessageUpdateRule':MessageUpdateRule} 557 exec(open(pkg_dir + "/" + r).read(), scratch_locals) 558 rule_dicts.append((scratch_locals, r)) 559 except ImportError: 560 print("Cannot load rule file [%s] in package [%s]" % (r, pkg), file=sys.stderr) 561 562 563 for (rule_dict, location_base) in rule_dicts: 564 for (n,c) in rule_dict.items(): 565 if inspect.isclass(c): 566 if (not c == MessageUpdateRule) and issubclass(c, MessageUpdateRule): 567 self.add_update_rule(c(self, location_base + ':' + n)) 568 569 if self.false_rule_loaded: 570 raise BagMigrationException("Cannot instantiate MessageMigrator with invalid rules") 571 572 # Now, go through and build up a better scaffolded 573 # representation, deferring implicit rule generation until 574 # complete, since the implicit rule generation and sub-rule 575 # population makes use of the scaffold. 576 577 # First we each particular type chain (now including implicit 578 # rules). Additionally, we build up our name remapping lists. 579 580 581 # For Each rulechain 582 for (type,rulechain) in self.rulechains.items(): 583 first = True 584 sn = None 585 prev_sn = None 586 587 # Find name remapping list 588 rename_set = set([type]) 589 tmp = rulechain.rename 590 while tmp: 591 rename_set.add(tmp.new_type) 592 if tmp.new_type in self.rulechains: 593 tmp = self.rulechains[tmp.new_type].rename 594 else: 595 break 596 597 self.rename_map[type] = rename_set 598 599 # For each element in the rulechain chain, 600 for r in rulechain.chain: 601 # Create a scaffoldnode 602 sn = ScaffoldNode(r.old_class, r.new_class, r) 603 self.base_nodes.append(sn) 604 # If it's the first one, stick it in our first_type map 605 if first: 606 self.first_type[type] = sn 607 first = False 608 # If there was a previous node, link them if keys 609 # match, or else create an implicit SN 610 if prev_sn: 611 if get_message_key(prev_sn.new_class) == get_message_key(sn.old_class): 612 prev_sn.next = sn 613 else: 614 implicit_sn = ScaffoldNode(prev_sn.new_class, sn.old_class, None) 615 self.base_nodes.append(implicit_sn) 616 prev_sn.next = implicit_sn 617 implicit_sn.next = sn 618 # The just-created node now becomes the previous 619 prev_sn = sn 620 621 # If there is a rename rule 622 if rulechain.rename: 623 # Create a scaffoldnode 624 sn = ScaffoldNode(rulechain.rename.old_class, rulechain.rename.new_class, rulechain.rename) 625 self.base_nodes.append(sn) 626 627 # Same rules apply here as when we created each node 628 # from chain. Link if possible, otherwise create 629 # implicit 630 if first: 631 self.first_type[type] = sn 632 first = False 633 if prev_sn: 634 if get_message_key(prev_sn.new_class) == get_message_key(sn.old_class): 635 prev_sn.next = sn 636 else: 637 implicit_sn = ScaffoldNode(prev_sn.new_class, sn.old_class, None) 638 self.base_nodes.append(implicit_sn) 639 prev_sn.next = implicit_sn 640 implicit_sn.next = sn 641 prev_sn = sn 642 terminal_nodes.append(sn) 643 # If there was not a rename rule, this must be a terminal node 644 else: 645 if prev_sn: 646 terminal_nodes.append(prev_sn) 647 648 # Between our partial scaffold and name remapping list, we can 649 # now GENERATE rules, though we cannot yet populate the 650 # subrules. 651 652 for sn in terminal_nodes: 653 key = get_message_key(sn.new_class) 654 655 renamed = (sn.old_class._type != sn.new_class._type) 656 657 sys_class = genpy.message.get_message_class(sn.new_class._type) 658 659 # If we map directly to a system-defined class we're done 660 if sys_class: 661 new_rule = self.make_update_rule(sn.new_class, sys_class) 662 R = new_rule(self, 'GENERATED.' + new_rule.__name__) 663 if R.valid: 664 sn.next = ScaffoldNode(sn.new_class, sys_class, R) 665 self.base_nodes.append(sn.next) 666 667 if renamed: 668 tmp_sns = self.scaffold_range(sn.new_class._type, sn.new_class._type) 669 670 # If we don't map to a scaffold range, we appear to be done 671 if tmp_sns == []: 672 if sys_class is not None: 673 sn.next = ScaffoldNode(sn.new_class, sys_class, None) 674 self.base_nodes.append(sn.next) 675 continue 676 677 # Otherwise look for trivial bridges 678 for tmp_sn in reversed(tmp_sns): 679 tmp_key = get_message_key(tmp_sn.old_class) 680 if (key == tmp_key): 681 sn.next = tmp_sn 682 break 683 684 # If we did not find a trivial bridge, we instead need 685 # to create the right implicit rule ourselves. This 686 # is based on the ability to create a valid implicit 687 # rule as LATE in the chain as possible. We do this 688 # to avoid extra conversions in some boundary 689 # circumstances. 690 if (sn.next is None): 691 for tmp_sn in reversed(tmp_sns): 692 new_rule = self.make_update_rule(sn.new_class, tmp_sn.old_class) 693 R = new_rule(self, 'GENERATED.' + new_rule.__name__) 694 if R.valid: 695 sn.next = ScaffoldNode(sn.new_class, tmp_sn.old_class, R) 696 self.base_nodes.append(sn.next) 697 break 698 699 700 # If we have still failed we need to create a placeholder. 701 if (sn.next is None): 702 if sys_class: 703 new_rule = self.make_update_rule(sn.new_class, sys_class) 704 else: 705 new_rule = self.make_old_half_rule(sn.new_class) 706 R = new_rule(self, 'GENERATED.' + new_rule.__name__) 707 sn.next = ScaffoldNode(sn.new_class, None, R) 708 self.base_nodes.append(sn.next) 709 710 711 # Now that our scaffolding is actually complete, we iterate 712 # through all of our rules and generate the rules for which we 713 # have scaffoldnodes, but no rule yet 714 for sn in self.base_nodes: 715 if (sn.rule is None): 716 new_rule = self.make_update_rule(sn.old_class, sn.new_class) 717 sn.rule = new_rule(self, 'GENERATED.' + new_rule.__name__) 718 719 # Finally, we go through and try to find sub_paths for every 720 # rule in the system so far 721 for sn in self.base_nodes: 722 sn.rule.find_sub_paths() 723 724 # Construction should be done, we can now use the system in 725 # the event that we don't have invalid update rules. 726 727 self.class_dict = {} 728 729 for sn in self.base_nodes + self.extra_nodes: 730 self.class_dict[get_message_key(sn.old_class)] = sn.old_class 731 self.class_dict[get_message_key(sn.new_class)] = sn.new_class
732 733
734 - def lookup_type(self, key):
735 if key in self.class_dict: 736 return self.class_dict[key] 737 else: 738 return None
739 740 # Add an update rule to our set of rule chains
741 - def add_update_rule(self, r):
742 if r.valid == False: 743 print("ERROR: Update rule [%s] has valid set to False." % (r.location), file=sys.stderr) 744 self.false_rule_loaded = True 745 return 746 747 rulechain = self.rulechains[r.old_type] 748 749 if r.rename_rule: 750 if (rulechain.rename != None): 751 print("WARNING: Update rules [%s] and [%s] both attempting to rename type [%s]. Ignoring [%s]" % (rulechain.rename.location, r.location, r.old_type, r.location), file=sys.stderr) 752 return 753 754 # Search forward to make sure we havn't created a cycle 755 cycle = [] 756 tmp = r 757 while tmp: 758 cycle.append(tmp) 759 if (tmp.new_type == r.old_type): 760 print("WARNING: Update rules %s introduce a renaming cycle. Ignoring [%s]" % ([x.location for x in cycle], r.location), file=sys.stderr) 761 return 762 if tmp.new_type in self.rulechains: 763 tmp = self.rulechains[tmp.new_type].rename 764 else: 765 break 766 767 768 if rulechain.chain and (r.order <= rulechain.chain[-1].order): 769 print("WARNING: Update rule [%s] which performs rename does not have largest order number. Ignoring" % r.location, file=sys.stderr) 770 return 771 772 rulechain.rename = r 773 774 else: 775 if r.order in rulechain.order_keys: 776 otherind = [x.order for x in rulechain.chain].index(r.order) 777 print("WARNING: Update rules [%s] and [%s] for type [%s] have the same order number. Ignoring [%s]" % (rulechain.chain[otherind].location, r.location, r.old_type, r.location), file=sys.stderr) 778 return 779 else: 780 if rulechain.rename and (r.order >= rulechain.rename.order): 781 print("WARNING: Update rule [%s] has order number larger than rename rule [%s]. Ignoring" % (r.location, rulechain.rename.location), file=sys.stderr) 782 return 783 # Insert the rule into a rule chain 784 rulechain.order_keys.add(r.order) 785 rulechain.chain.append(r) 786 rulechain.chain.sort(key=lambda x: x.order)
787 788 # Helper function to determine if all rules are valid
789 - def all_rules_valid(self):
790 base_valid = not False in [sn.rule.valid for sn in self.base_nodes] 791 extra_valid = not False in [sn.rule.valid for sn in self.extra_nodes] 792 return base_valid and extra_valid
793 794 # Helper function to print out the definitions for all invalid rules (which include definitions)
795 - def get_invalid_rules(self):
796 invalid_rules = [] 797 invalid_rule_cache = [] 798 for sn in self.base_nodes: 799 if not sn.rule.valid: 800 path_key = get_path_key(sn.old_class, sn.new_class) 801 if (path_key not in invalid_rule_cache): 802 invalid_rules.append(sn.rule) 803 invalid_rule_cache.append(path_key) 804 for sn in self.extra_nodes: 805 if not sn.rule.valid: 806 path_key = get_path_key(sn.old_class, sn.new_class) 807 if (path_key not in invalid_rule_cache): 808 invalid_rules.append(sn.rule) 809 invalid_rule_cache.append(path_key) 810 return invalid_rules
811 812 # Helper function to remove non-unique rules
813 - def filter_rules_unique(self, rules):
814 rule_cache = [] 815 new_rules = [] 816 for r in rules: 817 path_key = get_path_key(r.old_class, r.new_class) 818 if (path_key not in rule_cache): 819 new_rules.append(r) 820 return new_rules
821 822 # Helper function to expand a list of rules to include subrules
823 - def expand_rules(self, rules):
824 filtered = self.filter_rules_unique(rules) 825 expanded = [] 826 for r in filtered: 827 expanded.append(r) 828 #print "For rule %s --> %s"%(r.old_class._type, r.new_class._type) 829 expanded.extend(self.expand_rules(r.sub_rules)) 830 filtered = self.filter_rules_unique(expanded) 831 return filtered
832
833 - def scaffold_range(self, old_type, new_type):
834 try: 835 first_sn = self.first_type[old_type] 836 837 sn_range = [first_sn] 838 839 found_new_type = False 840 841 tmp_sn = first_sn 842 843 while (tmp_sn.next is not None and tmp_sn.next.new_class is not None): 844 # print sn_range 845 tmp_sn = tmp_sn.next 846 if (tmp_sn != first_sn): 847 sn_range.append(tmp_sn) 848 if (tmp_sn.new_class._type == new_type): 849 found_new_type = True 850 if (found_new_type and tmp_sn.new_class._type != new_type): 851 break 852 853 return sn_range 854 855 except KeyError: 856 return []
857 858
859 - def find_target(self, old_class):
860 key = get_message_key(old_class) 861 862 last_class = old_class 863 864 try: 865 return self.found_targets[key] 866 except KeyError: 867 868 sys_class = genpy.message.get_message_class(old_class._type) 869 870 if sys_class is not None: 871 self.found_targets[key] = sys_class 872 return sys_class 873 874 try: 875 tmp_sn = self.first_type[old_class._type] 876 877 if tmp_sn.new_class is not None: 878 last_class = tmp_sn.new_class 879 880 while tmp_sn.next is not None: 881 tmp_sn = tmp_sn.next 882 883 if tmp_sn.new_class is not None: 884 last_class = tmp_sn.new_class 885 sys_class = genpy.message.get_message_class(tmp_sn.new_class._type) 886 else: 887 sys_class = None 888 889 if sys_class is not None: 890 self.found_targets[key] = sys_class 891 return sys_class 892 except KeyError: 893 pass 894 895 self.found_targets[key] = None 896 return None
897 898 # This function determines the set of rules which must be created 899 # to get from the old type to the new type.
900 - def find_path(self, old_class, new_class):
901 key = get_path_key(old_class, new_class) 902 903 # Return any path already found in the cache 904 try: 905 return self.found_paths[key] 906 except KeyError: 907 pass 908 909 # If the new_class is none, e.g., a message has been moved and 910 # we are lacking a proper rename rule, such that find-target 911 # failed, the best we can do is create a half-rule from the 912 # end-point 913 if new_class is None: 914 sn_range = self.scaffold_range(old_class._type, "") 915 916 found_start = False 917 918 for (ind, tmp_sn) in reversed(list(zip(range(len(sn_range)), sn_range))): 919 # Skip until we find the class we're trying to match 920 if (tmp_sn.old_class._type != old_class._type): 921 continue 922 if get_message_key(tmp_sn.old_class) == get_message_key(old_class): 923 sn_range = sn_range[ind:] 924 found_start = True 925 break 926 927 # Next see if we can create a valid rule 928 if not found_start: 929 for (ind, tmp_sn) in reversed(list(zip(range(len(sn_range)), sn_range))): 930 if (tmp_sn.old_class._type != old_class._type): 931 continue 932 new_rule = self.make_update_rule(old_class, tmp_sn.old_class) 933 R = new_rule(self, 'GENERATED.' + new_rule.__name__) 934 if R.valid: 935 R.find_sub_paths() 936 sn = ScaffoldNode(old_class, tmp_sn.old_class, R) 937 self.extra_nodes.append(sn) 938 sn_range = sn_range[ind:] 939 sn_range.insert(0,sn) 940 found_start = True 941 break 942 943 if sn_range == []: 944 tmp_class = old_class 945 else: 946 tmp_class = sn_range[-1].new_class 947 948 new_rule = self.make_old_half_rule(tmp_class) 949 R = new_rule(self, 'GENERATED.' + new_rule.__name__) 950 sn = ScaffoldNode(tmp_class, None, R) 951 sn_range.append(sn) 952 self.extra_nodes.append(sn) 953 self.found_paths[key] = sn_range 954 return sn_range 955 956 # If the messages are the same, there is no actually path 957 if (old_class._type == new_class._type and old_class._full_text.strip() == new_class._full_text.strip()): 958 self.found_paths[key] = [] 959 return [] 960 961 sn_range = self.scaffold_range(old_class._type, new_class._type) 962 963 # If we have no scaffolding, we just try to create the one path 964 if sn_range == []: 965 new_rule = self.make_update_rule(old_class, new_class) 966 R = new_rule(self, 'GENERATED.' + new_rule.__name__) 967 R.find_sub_paths() 968 sn = ScaffoldNode(old_class, new_class, R) 969 self.extra_nodes.append(sn) 970 self.found_paths[key] = [sn] 971 return [sn] 972 973 974 # Search for the stop point in the scaffold 975 found_stop = False 976 977 # First look for a trivial match 978 for (ind, tmp_sn) in reversed(list(zip(range(len(sn_range)), sn_range))): 979 # Stop looking early if the classes don't match 980 if (tmp_sn.new_class._type != new_class._type): 981 break 982 if get_message_key(tmp_sn.new_class) == get_message_key(new_class): 983 sn_range = sn_range[:ind+1] 984 found_stop = True 985 break 986 987 # Next see if we can create a valid rule, including the sub rules 988 if not found_stop: 989 for (ind, tmp_sn) in reversed(list(zip(range(len(sn_range)), sn_range))): 990 if (tmp_sn.new_class._type != new_class._type): 991 break 992 new_rule = self.make_update_rule(tmp_sn.new_class, new_class) 993 R = new_rule(self, 'GENERATED.' + new_rule.__name__) 994 if R.valid: 995 R.find_sub_paths() 996 if R.sub_rules_valid: 997 sn = ScaffoldNode(tmp_sn.new_class, new_class, R) 998 self.extra_nodes.append(sn) 999 sn_range = sn_range[:ind+1] 1000 sn_range.append(sn) 1001 found_stop = True 1002 break 1003 1004 # If there were no valid implicit rules, we suggest a new one from to the end 1005 if not found_stop: 1006 new_rule = self.make_update_rule(sn_range[-1].new_class, new_class) 1007 R = new_rule(self, 'GENERATED.' + new_rule.__name__) 1008 R.find_sub_paths() 1009 sn = ScaffoldNode(sn_range[-1].new_class, new_class, R) 1010 self.extra_nodes.append(sn) 1011 sn_range.append(sn) 1012 1013 # Search for the start point in the scaffold 1014 found_start = False 1015 1016 # First look for a trivial match 1017 for (ind, tmp_sn) in reversed(list(zip(range(len(sn_range)), sn_range))): 1018 # Skip until we find the class we're trying to match 1019 if (tmp_sn.old_class._type != old_class._type): 1020 continue 1021 if get_message_key(tmp_sn.old_class) == get_message_key(old_class): 1022 sn_range = sn_range[ind:] 1023 found_start = True 1024 break 1025 1026 # Next see if we can create a valid rule directly to the end, including the sub rules 1027 if not found_start: 1028 new_rule = self.make_update_rule(old_class, new_class) 1029 R = new_rule(self, 'GENERATED.' + new_rule.__name__) 1030 if R.valid: 1031 R.find_sub_paths() 1032 if R.sub_rules_valid: 1033 sn = ScaffoldNode(old_class, new_class, R) 1034 self.extra_nodes.append(sn) 1035 self.found_paths[key] = [sn] 1036 return [sn] 1037 1038 # Next see if we can create a valid rule, including the sub rules 1039 if not found_start: 1040 for (ind, tmp_sn) in reversed(list(zip(range(len(sn_range)), sn_range))): 1041 if (tmp_sn.old_class._type != old_class._type): 1042 continue 1043 new_rule = self.make_update_rule(old_class, tmp_sn.old_class) 1044 R = new_rule(self, 'GENERATED.' + new_rule.__name__) 1045 if R.valid: 1046 R.find_sub_paths() 1047 if R.sub_rules_valid: 1048 sn = ScaffoldNode(old_class, tmp_sn.old_class, R) 1049 self.extra_nodes.append(sn) 1050 sn_range = sn_range[ind:] 1051 sn_range.insert(0,sn) 1052 found_start = True 1053 break 1054 1055 # If there were no valid implicit rules, we suggest a new one from the beginning 1056 if not found_start: 1057 new_rule = self.make_update_rule(old_class, sn_range[0].old_class) 1058 R = new_rule(self, 'GENERATED.' + new_rule.__name__) 1059 R.find_sub_paths() 1060 sn = ScaffoldNode(old_class, sn_range[0].old_class, R) 1061 self.extra_nodes.append(sn) 1062 sn_range.insert(0,sn) 1063 1064 self.found_paths[key] = sn_range 1065 return sn_range
1066 1067
1068 - def migrate_raw(self, msg_from, msg_to):
1069 path = self.find_path(msg_from[4], msg_to[4]) 1070 1071 if False in [sn.rule.valid for sn in path]: 1072 raise BagMigrationException("Migrate called, but no valid migration path from [%s] to [%s]"%(msg_from[0], msg_to[0])) 1073 1074 # Short cut to speed up case of matching md5sum: 1075 if path == [] or msg_from[2] == msg_to[2]: 1076 return (msg_to[0], msg_from[1], msg_to[2], msg_to[3], msg_to[4]) 1077 1078 tmp_msg = path[0].old_class() 1079 tmp_msg.deserialize(msg_from[1]) 1080 1081 for sn in path: 1082 tmp_msg = sn.rule.apply(tmp_msg) 1083 1084 buff = StringIO() 1085 tmp_msg.serialize(buff) 1086 1087 return (msg_to[0], buff.getvalue(), msg_to[2], msg_to[3], msg_to[4])
1088 1089 1090
1091 - def migrate(self, msg_from, msg_to):
1092 path = self.find_path(msg_from.__class__, msg_to.__class__) 1093 1094 if False in [sn.rule.valid for sn in path]: 1095 raise BagMigrationException("Migrate called, but no valid migration path from [%s] to [%s]"%(msg_from._type, msg_to._type)) 1096 1097 # Short cut to speed up case of matching md5sum: 1098 if path == [] or msg_from._md5sum == msg_to._md5sum: 1099 buff = StringIO() 1100 msg_from.serialize(buff) 1101 msg_to.deserialize(buff.getvalue()) 1102 return 1103 1104 if len(path) > 0: 1105 buff = StringIO() 1106 msg_from.serialize(buff) 1107 1108 tmp_msg = path[0].old_class() 1109 1110 tmp_msg.deserialize(buff.getvalue()) 1111 1112 for sn in path: 1113 tmp_msg = sn.rule.apply(tmp_msg) 1114 else: 1115 tmp_msg = msg_from 1116 1117 buff = StringIO() 1118 tmp_msg.serialize(buff) 1119 msg_to.deserialize(buff.getvalue())
1120
1121 - def migrate_array(self, msg_from_array, msg_to_array):
1122 if len(msg_from_array) != len(msg_to_array): 1123 raise BagMigrationException("Migrate array called on on arrays of unequal length.") 1124 1125 if len(msg_from_array) == 0: 1126 return 1127 1128 path = self.find_path(msg_from_array[0].__class__, msg_to_array[0].__class__) 1129 1130 if path is None: 1131 raise BagMigrationException("Migrate called, but no migration path from [%s] to [%s]"%(msg_from._type, msg_to._type)) 1132 1133 # Short cut to speed up case of matching md5sum: 1134 if path == []: 1135 for i in range(len(msg_from_array)): 1136 buff = StringIO() 1137 msg_from_array[i].serialize(buff) 1138 msg_to_array[i].deserialize(buff.getvalue()) 1139 return 1140 1141 for i in range(len(msg_from_array)): 1142 buff = StringIO() 1143 tmp_msg = path[0].old_class() 1144 msg_from_array[i].serialize(buff) 1145 tmp_msg.deserialize(buff.getvalue()) 1146 for sn in path: 1147 tmp_msg = sn.rule.apply(tmp_msg) 1148 1149 buff = StringIO() 1150 tmp_msg.serialize(buff) 1151 msg_to_array[i].deserialize(buff.getvalue())
1152
1153 - def make_update_rule(self, old_class, new_class):
1154 name = "update_%s_%s"%(old_class._type.replace("/","_"), old_class._md5sum) 1155 1156 # We assemble the class as a string and then exec it to end up with a class 1157 # that can essentially print its own definition. 1158 classdef = "class %s(MessageUpdateRule):\n"%name 1159 classdef += "\told_type = \"%s\"\n"%old_class._type 1160 classdef += "\told_full_text = \"\"\"\n%s\n\"\"\"\n\n"%old_class._full_text.strip() 1161 classdef += "\tnew_type = \"%s\"\n"%new_class._type 1162 classdef += "\tnew_full_text = \"\"\"\n%s\n\"\"\"\n"%new_class._full_text.strip() 1163 classdef += "\n" 1164 classdef += "\torder = 0" 1165 classdef += "\n" 1166 1167 validdef = "\tvalid = True\n" 1168 1169 migratedefs = "\tmigrated_types = [" 1170 1171 updatedef = "\tdef update(self, old_msg, new_msg):\n" 1172 1173 old_consts = constants_from_def(old_class._type, old_class._full_text) 1174 new_consts = constants_from_def(new_class._type, new_class._full_text) 1175 1176 if (not new_consts >= old_consts): 1177 validdef = "\tvalid = False\n" 1178 for c in (old_consts - new_consts): 1179 updatedef += "\t\t#Constant '%s' has changed\n"%(c[0],) 1180 1181 old_slots = [] 1182 old_slots.extend(old_class.__slots__) 1183 1184 migrations_seen = [] 1185 1186 # Assign across primitives, self.migrate or self.migrate_array non-primitives 1187 for (s,t) in zip(new_class.__slots__, new_class._slot_types): 1188 warn_msg = None 1189 new_base_type, new_is_array, new_array_len = genmsg.msgs.parse_type(t) 1190 try: 1191 ind = old_class.__slots__.index(s) 1192 old_slots.remove(s) 1193 old_base_type, old_is_array, old_array_len = genmsg.msgs.parse_type(old_class._slot_types[ind]) 1194 1195 if new_is_array != old_is_array: 1196 warn_msg = "Could not match array with nonarray" 1197 1198 elif new_array_len != old_array_len: 1199 if old_array_len is None: 1200 warn_msg = "Converted from variable length array to fixed array of length %d"%(new_array_len) 1201 elif new_array_len is None: 1202 warn_msg = "Converted from fixed array of length %d to variable length"%(old_array_len) 1203 else: 1204 warn_msg = "Fixed length array converted from %d to %d"%(old_array_len,new_array_len) 1205 1206 elif genmsg.msgs.is_builtin(new_base_type): 1207 if new_base_type != old_base_type: 1208 warn_msg = "Primitive type changed" 1209 else: 1210 updatedef += "\t\tnew_msg.%s = old_msg.%s\n"%(s,s) 1211 1212 else: 1213 tmp_old_type = clean_name(old_base_type, old_class._type) 1214 tmp_new_type = clean_name(new_base_type, new_class._type) 1215 1216 tmp_qualified_old_type = qualified_name(old_base_type, old_class._type) 1217 tmp_qualified_new_type = qualified_name(new_base_type, new_class._type) 1218 1219 # Verify the type can theoretically be migrated 1220 if (tmp_qualified_old_type == tmp_qualified_new_type) or \ 1221 (tmp_qualified_old_type in self.rename_map and 1222 tmp_qualified_new_type in self.rename_map[tmp_qualified_old_type]): 1223 1224 if (tmp_old_type, tmp_new_type) not in migrations_seen: 1225 migratedefs += "\n\t\t(\"%s\",\"%s\"),"%(tmp_old_type, tmp_new_type) 1226 migrations_seen.append((tmp_old_type, tmp_new_type)) 1227 1228 if not new_is_array: 1229 updatedef += "\t\tself.migrate(old_msg.%s, new_msg.%s)\n"%(s,s) 1230 else: 1231 updatedef += "\t\tself.migrate_array(old_msg.%s, new_msg.%s, \"%s\")\n"%(s,s,new_base_type) 1232 else: 1233 warn_msg = "No migration path between [%s] and [%s]"%(tmp_old_type, tmp_new_type) 1234 except ValueError: 1235 warn_msg = "No matching field name in old message" 1236 1237 if warn_msg is not None: 1238 validdef = "\tvalid = False\n" 1239 updatedef += "\t\t#%s\n"%warn_msg 1240 updatedef += "\t\tnew_msg.%s = %s\n"%(s,migration_default_value(t)) 1241 1242 migratedefs += "]\n" 1243 1244 if old_slots: 1245 validdef = "\tvalid = False\n" 1246 for s in old_slots: 1247 updatedef += "\t\t#No field to match field %s from old message\n"%(s) 1248 1249 classdef += migratedefs + '\n' + validdef + '\n' + updatedef 1250 1251 printclassdef = classdef + "\tdef get_class_def(self):\n\t\treturn \'\'\'%s\'\'\'\n"%classdef 1252 1253 # This is probably a TERRIBLE idea? 1254 exec(printclassdef) 1255 return locals()[name]
1256
1257 - def make_old_half_rule(self, old_class):
1258 name = "update__%s__%s"%(old_class._type.replace("/","_"), old_class._md5sum) 1259 1260 # We assemble the class as a string and then exec it to end up with a class 1261 # that can essentially print its own definition. 1262 classdef = "class %s(MessageUpdateRule):\n"%name 1263 classdef += "\told_type = \"%s\"\n"%old_class._type 1264 classdef += "\told_full_text = \"\"\"\n%s\n\"\"\"\n\n"%old_class._full_text.strip() 1265 classdef += "\tnew_type = \"\"\n" 1266 classdef += "\tnew_full_text = \"\"\"\n\n\"\"\"\n" 1267 classdef += "\n" 1268 classdef += "\torder = 0" 1269 classdef += "\n" 1270 1271 validdef = "\tvalid = False\n" 1272 1273 migratedefs = "\tmigrated_types = []\n" 1274 1275 updatedef = "\tdef update(self, old_msg, new_msg):\n" 1276 updatedef += "\t\tpass\n" 1277 1278 classdef += migratedefs + '\n' + validdef + '\n' + updatedef 1279 1280 printclassdef = classdef + "\tdef get_class_def(self):\n\t\treturn \'\'\'%s\'\'\'\n"%classdef 1281 1282 # This is probably a TERRIBLE idea? 1283 exec(printclassdef) 1284 return locals()[name]
1285
1286 - def make_new_half_rule(self, new_class):
1287 name = "update_to_%s_%s"%(new_class._type.replace("/","_"), new_class._md5sum) 1288 1289 # We assemble the class as a string and then exec it to end up with a class 1290 # that can essentially print its own definition. 1291 classdef = "class %s(MessageUpdateRule):\n"%name 1292 classdef += "\told_type = \"\"\n" 1293 classdef += "\told_full_text = \"\"\"\n\n\"\"\"\n\n" 1294 classdef += "\tnew_type = \"%s\"\n"%new_class._type 1295 classdef += "\tnew_full_text = \"\"\"\n%s\n\"\"\"\n"%new_class._full_text.strip() 1296 classdef += "\n" 1297 classdef += "\torder = 0" 1298 classdef += "\n" 1299 1300 validdef = "\tvalid = False\n" 1301 1302 migratedefs = "\tmigrated_types = []\n" 1303 1304 updatedef = "\tdef update(self, old_msg, new_msg):\n" 1305 updatedef += "\t\tpass\n" 1306 1307 classdef += migratedefs + '\n' + validdef + '\n' + updatedef 1308 1309 printclassdef = classdef + "\tdef get_class_def(self):\n\t\treturn \'\'\'%s\'\'\'\n"%classdef 1310 1311 # This is probably a TERRIBLE idea? 1312 exec(printclassdef) 1313 return locals()[name]
1314
1315 -def migration_default_value(field_type):
1316 if field_type in ['bool', 'byte', 'int8', 'int16', 'int32', 'int64',\ 1317 'char', 'uint8', 'uint16', 'uint32', 'uint64']: 1318 return '0' 1319 elif field_type in ['float32', 'float64']: 1320 return '0.' 1321 elif field_type == 'string': 1322 # strings, byte[], and uint8s are all optimized to be strings 1323 return "''" 1324 elif field_type.endswith(']'): # array type 1325 base_type, is_array, array_len = genmsg.msgs.parse_type(field_type) 1326 if base_type in ['byte', 'uint8']: 1327 # strings, byte[], and uint8s are all optimized to be strings 1328 if array_len is not None: 1329 return "chr(0)*%s"%array_len 1330 else: 1331 return "''" 1332 elif array_len is None: #var-length 1333 return '[]' 1334 else: # fixed-length, fill values 1335 def_val = migration_default_value(base_type) 1336 return '[' + ','.join(itertools.repeat(def_val, array_len)) + ']' 1337 else: 1338 return "self.get_new_class('%s')()"%field_type
1339
1340 -def constants_from_def(core_type, msg_def):
1341 core_pkg, core_base_type = genmsg.package_resource_name(core_type) 1342 1343 splits = msg_def.split('\n' + '=' * 80 + '\n') 1344 1345 core_msg = splits[0] 1346 deps_msgs = splits[1:] 1347 1348 # create MsgSpec representations of .msg text 1349 from genmsg import MsgContext 1350 context = MsgContext.create_default() 1351 specs = { core_type: genmsg.msg_loader.load_msg_from_string(context, core_msg, core_pkg) } 1352 # - dependencies 1353 # for dep_msg in deps_msgs: 1354 # # dependencies require more handling to determine type name 1355 # dep_type, dep_spec = _generate_dynamic_specs(specs, dep_msg) 1356 # specs[dep_type] = dep_spec 1357 1358 return set([(x.name, x.val, x.type) for x in specs[core_type].constants])
1359