Package rosbag :: Module migration
[frames] | no frames]

Source Code for Module rosbag.migration

   1  # Software License Agreement (BSD License) 
   2  # 
   3  # Copyright (c) 2009, Willow Garage, Inc. 
   4  # All rights reserved. 
   5  # 
   6  # Redistribution and use in source and binary forms, with or without 
   7  # modification, are permitted provided that the following conditions 
   8  # are met: 
   9  # 
  10  #  * Redistributions of source code must retain the above copyright 
  11  #    notice, this list of conditions and the following disclaimer. 
  12  #  * Redistributions in binary form must reproduce the above 
  13  #    copyright notice, this list of conditions and the following 
  14  #    disclaimer in the documentation and/or other materials provided 
  15  #    with the distribution. 
  16  #  * Neither the name of Willow Garage, Inc. nor the names of its 
  17  #    contributors may be used to endorse or promote products derived 
  18  #    from this software without specific prior written permission. 
  19  # 
  20  # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 
  21  # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 
  22  # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS 
  23  # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE 
  24  # COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, 
  25  # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, 
  26  # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; 
  27  # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 
  28  # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT 
  29  # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN 
  30  # ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 
  31  # POSSIBILITY OF SUCH DAMAGE. 
  32   
  33  PKG = 'rosbag' 
  34  import roslib; roslib.load_manifest(PKG) 
  35   
  36  import collections 
  37  import copy 
  38  from cStringIO import StringIO 
  39  import inspect 
  40  import itertools 
  41  import os 
  42  import string 
  43  import sys 
  44   
  45  import roslib.rospack 
  46  import roslib.message 
  47  import roslib.msgs 
  48   
  49  import rosbag 
  50   
  51  # Anything outside the scope of these primitives is a submessage 
  52  #_PRIMITIVES = ['bool', 'byte','int8','int16','int32','int64','char','uint8','uint16','uint32','uint64','float32','float64','string','time'] 
  53   
54 -class BagMigrationException(Exception):
55 pass
56
57 -def checkbag(migrator, inbag):
58 """ 59 Check whether a bag file can be played in the current system. 60 @param migrator: message migrator to use 61 @param inbag name of the bag to be checked. 62 @returns A list of tuples for each type in the bag file. The first 63 element of each tuple is the full migration path for the type. The 64 second element of the tuple is the expanded list of invalid rules 65 for that particular path. 66 """ 67 checked = set() 68 migrations = [] 69 70 bag = rosbag.Bag(inbag, 'r') 71 72 for topic, msg, t in bag.read_messages(raw=True): 73 key = get_message_key(msg[4]) 74 if key not in checked: 75 target = migrator.find_target(msg[4]) 76 # Even in the case of a zero-length path (matching md5sums), we still want 77 # to migrate in the event of a type change (message move). 78 path = migrator.find_path(msg[4], target) 79 if len(path) > 0: 80 migrations.append((path, [r for r in migrator.expand_rules([sn.rule for sn in path]) if r.valid == False])) 81 82 checked.add(key) 83 84 bag.close() 85 86 return migrations
87
88 -def checkmessages(migrator, messages):
89 """ 90 Check whether a bag file can be played in the current system. 91 @param migrator The message migrator to use 92 @param message_list A list of message classes. 93 @returns A list of tuples for each type in the bag file. The first 94 element of each tuple is the full migration path for the type. The 95 second element of the tuple is the expanded list of invalid rules 96 for that particular path. 97 """ 98 99 checked = set() 100 migrations = [] 101 102 for msg in messages: 103 key = get_message_key(msg) 104 if key not in checked: 105 target = migrator.find_target(msg) 106 # Even in the case of a zero-length path (matching md5sums), we still want 107 # to migrate in the event of a type change (message move). 108 path = migrator.find_path(msg, target) 109 if len(path) > 0: 110 migrations.append((path, [r for r in migrator.expand_rules([sn.rule for sn in path]) if r.valid == False])) 111 112 checked.add(key) 113 114 return migrations
115 116 ## Fix a bag so that it can be played in the current system 117 # 118 # @param migrator The message migrator to use 119 # @param inbag Name of the bag to be fixed. 120 # @param outbag Name of the bag to be saved. 121 # @returns True if migration was successful.
122 -def fixbag(migrator, inbag, outbag):
123 # This checks/builds up rules for the given migrator 124 res = checkbag(migrator, inbag) 125 126 # Deserializing all messages is inefficient, but we can speed this up later 127 if not False in [m[1] == [] for m in res]: 128 bag = rosbag.Bag(inbag, 'r') 129 rebag = rosbag.Bag(outbag, 'w', options=bag.options) 130 for topic, msg, t in bag.read_messages(raw=True): 131 new_msg_type = migrator.find_target(msg[4]) 132 mig_msg = migrator.migrate_raw(msg, (new_msg_type._type, None, new_msg_type._md5sum, None, new_msg_type)) 133 rebag.write(topic, mig_msg, t, raw=True) 134 rebag.close() 135 bag.close() 136 return True 137 else: 138 return False
139 140 ## Fix a bag so that it can be played in the current system 141 # 142 # @param migrator The message migrator to use 143 # @param inbag Name of the bag to be fixed. 144 # @param outbag Name of the bag to be saved. 145 # @returns [] if bag could be migrated, otherwise, it returns the list of necessary migration paths
146 -def fixbag2(migrator, inbag, outbag, force=False):
147 # This checks/builds up rules for the given migrator 148 res = checkbag(migrator, inbag) 149 150 migrations = [m for m in res if len(m[1]) > 0] 151 152 # Deserializing all messages is inefficient, but we can speed this up later 153 if len(migrations) == 0 or force: 154 bag = rosbag.Bag(inbag, 'r') 155 rebag = rosbag.Bag(outbag, 'w', options=bag.options) 156 for topic, msg, t in bag.read_messages(raw=True): 157 new_msg_type = migrator.find_target(msg[4]) 158 if new_msg_type != None: 159 mig_msg = migrator.migrate_raw(msg, (new_msg_type._type, None, new_msg_type._md5sum, None, new_msg_type)) 160 rebag.write(topic, mig_msg, t, raw=True) 161 else: 162 rebag.write(topic, msg, t, raw=True) 163 rebag.close() 164 bag.close() 165 166 if force: 167 return [] 168 else: 169 return migrations
170 171 ## Helper function to strip out roslib and package name from name usages. 172 # 173 # There is some inconsistency in whether a fully-qualified path is 174 # used for sub-messages within a given message. This function is 175 # useful for stripping out the package name in a fully qualified 176 # sub-message. 177 # 178 # @param name The name to clean. 179 # @param top_name The name of the top-level type 180 # @returns The cleaned version of the name.
181 -def clean_name(name, top_name):
182 name_split = name.split('/') 183 try: 184 name_split.remove('std_msgs') 185 except ValueError: 186 pass 187 try: 188 name_split.remove(top_name.split('/')[0]) 189 except ValueError: 190 pass 191 new_name = string.join(name_split,'/') 192 return new_name
193 194 ## Helper function to ensure we end up with a qualified name 195 # 196 # There is some inconsistency in whether a fully-qualified path is 197 # used for sub-messages within a given message. This function is 198 # useful for ensuring that a name is fully qualified correctly. 199 # 200 # @param name The name to quailfy 201 # @param top_name The name of the top-level type 202 # @returns The qualified version of the name.
203 -def qualified_name(name, top_name):
204 # First clean the name, to make everyting else more deterministic 205 tmp_name = clean_name(name, top_name) 206 207 if len(tmp_name.split('/')) == 2 or (roslib.msgs.is_builtin(tmp_name)): 208 return tmp_name 209 elif tmp_name == 'Header': 210 return 'std_msgs/Header' 211 else: 212 return top_name.split('/')[0] + '/' + tmp_name
213 214 ## Helper function to return a key from a given class 215 # 216 # For now, we choose the tuple (type,md5sum) as a unique key for the 217 # class. However, this is subject to change and assumptions about keys 218 # should not be made other than their uniqueness. 219 # 220 # @param c The message class or instance to get a key for 221 # @returns The unique key
222 -def get_message_key(c):
223 try: 224 return (c._type, c._md5sum) 225 except: 226 return None
227 228 ## Helper function to return a key for a given path 229 # 230 # For now, we choose the tuple ((type1,md5sum1),(type2,md5sum2)) as a 231 # unique key for the path. However, this is subject to change and 232 # assumptions about keys should not be made other than their 233 # uniqueness. 234 # 235 # @param c1 The start point of the path 236 # @param c1 The stop point of the path 237 # @returns The unique key
238 -def get_path_key(c1, c2):
239 try: 240 return (get_message_key(c1), get_message_key(c2)) 241 except: 242 return None
243 244 ## Base class for all message update rules
245 -class MessageUpdateRule(object):
246 old_type = '' 247 old_full_text = '' 248 new_type = '' 249 new_full_text = '' 250 migrated_types = [] 251 252 order = -1 253 254 valid = False 255 256 ## Initialize class
257 - def __init__(self, migrator, location):
258 # Every rule needs to hang onto the migrator so we can potentially use it 259 self.migrator = migrator 260 self.location = location 261 262 if (self.old_type != self.new_type): 263 self.rename_rule = True 264 else: 265 self.rename_rule = False 266 267 # Instantiate types dynamically based on definition 268 try: 269 if self.old_type == "": 270 raise Exception 271 self.old_types = roslib.genpy.generate_dynamic(self.old_type, self.old_full_text) 272 self.old_class = self.old_types[self.old_type] 273 self.old_md5sum = self.old_class._md5sum 274 except: 275 self.old_types = [] 276 self.old_class = None 277 self.old_md5sum = "" 278 279 try: 280 if self.new_type == "": 281 raise Exception 282 self.new_types = roslib.genpy.generate_dynamic(self.new_type, self.new_full_text) 283 self.new_class = self.new_types[self.new_type] 284 self.new_md5sum = self.new_class._md5sum 285 except: 286 self.new_types = [] 287 self.new_class = None 288 self.new_md5sum = "" 289 290 # We have not populated our sub rules (and ideally should 291 # wait until the full scaffold exists before doing this) 292 self.sub_rules_done = False 293 self.sub_rules_valid = False 294 self.sub_rules = []
295 296 ## Find all of the sub paths 297 # 298 # For any migrated type the user might want to use, we must make 299 # sure the migrator had found a path for it. To facilitated this 300 # check we require that all migrated types must be listed as pairs 301 # in the migrated_types field. 302 # 303 # It would be nice not to need these through performing some kind 304 # of other inspection of the update rule itself.
305 - def find_sub_paths(self):
306 self.sub_rules_valid = True 307 for (t1, t2) in self.migrated_types: 308 try: 309 tmp_old_class = self.get_old_class(t1) 310 except KeyError: 311 print >> sys.stderr, "WARNING: Within rule [%s], specified migrated type [%s] not found in old message types"%(self.location,t1) 312 self.sub_rules_valid = False 313 continue 314 try: 315 tmp_new_class = self.get_new_class(t2) 316 except KeyError: 317 print >> sys.stderr, "WARNING: Within rule [%s], specified migrated type [%s] not found in new message types"%(self.location,t2) 318 self.sub_rules_valid = False 319 continue 320 321 # If a rule instantiates itself as a subrule (because the 322 # author knows the md5sums match), we don't Want to end up 323 # with an infinite recursion. 324 if (get_message_key(tmp_old_class) != get_message_key(self.old_class)) or (get_message_key(tmp_new_class) != get_message_key(self.new_class)): 325 path = self.migrator.find_path(tmp_old_class, tmp_new_class) 326 rules = [sn.rule for sn in path] 327 self.sub_rules.extend(rules) 328 329 if False in [r.valid for r in self.sub_rules]: 330 print >> sys.stderr, "WARNING: Within rule [%s] cannot migrate from subtype [%s] to [%s].."%( 331 self.location, t1, t2) 332 self.sub_rules_valid = False 333 continue 334 self.sub_rules = self.migrator.filter_rules_unique(self.sub_rules) 335 self.sub_rules_done = True
336 337 ## Helper function to get the class of a submsg for the new type 338 # 339 # This function should be used inside of update to access new classes. 340 # 341 # @param t The subtype to return the class of 342 # @returns The class of the new sub type
343 - def get_new_class(self,t):
344 try: 345 try: 346 return self.new_types[t] 347 except KeyError: 348 return self.new_types['std_msgs/' + t] 349 except KeyError: 350 return self.new_types[self.new_type.split('/')[0] + '/' + t]
351 352 ## Helper function to get the class of a submsg for the old type 353 # 354 # This function should be used inside of update to access old classes. 355 # 356 # @param t The subtype to return the class of 357 # @returns The class of the old sub type
358 - def get_old_class(self,t):
359 try: 360 try: 361 return self.old_types[t] 362 except KeyError: 363 return self.old_types['std_msgs/' + t] 364 except KeyError: 365 return self.old_types[self.old_type.split('/')[0] + '/' + t]
366 367 ## Actually migrate one sub_type to another 368 # 369 # This function should be used inside of update to migrate sub msgs. 370 # 371 # @param msg_from A message instance of the old message type 372 # @param msg_to A message instance of a new message type to be populated
373 - def migrate(self, msg_from, msg_to):
374 tmp_msg_from = clean_name(msg_from._type, self.old_type) 375 tmp_msg_to = clean_name(msg_to._type, self.new_type) 376 if (tmp_msg_from, tmp_msg_to) not in self.migrated_types: 377 raise BagMigrationException("Rule [%s] tried to perform a migration from old [%s] to new [%s] not listed in migrated_types"%(self.location, tmp_msg_from, tmp_msg_to)) 378 self.migrator.migrate(msg_from, msg_to)
379 380 ## Helper function to migrate a whole array of messages 381 # 382 # This function should be used inside of update to migrate arrays of sub msgs. 383 # 384 # @param msg_from_array An array of messages of the old message type 385 # @param msg_to_array An array of messages of the new message type (this will be emptied if not already) 386 # @param msg_to_class The name of the new message type since msg_to_array may be an empty array.
387 - def migrate_array(self, msg_from_array, msg_to_array, msg_to_name):
388 msg_to_class = self.get_new_class(msg_to_name) 389 390 while len(msg_to_array) > 0: 391 msg_to_array.pop() 392 393 if (len(msg_from_array) == 0): 394 return 395 396 tmp_msg_from = clean_name(msg_from_array[0]._type, self.old_type) 397 tmp_msg_to = clean_name(msg_to_class._type, self.new_type) 398 if (tmp_msg_from, tmp_msg_to) not in self.migrated_types: 399 raise BagMigrationException("Rule [%s] tried to perform a migration from old [%s] to new [%s] not listed in migrated_types"%(self.location, tmp_msg_from, tmp_msg_to)) 400 401 msg_to_array.extend( [msg_to_class() for i in xrange(len(msg_from_array))] ) 402 403 self.migrator.migrate_array(msg_from_array, msg_to_array)
404 405 ## A helper function to print out the definiton of autogenerated messages.
406 - def get_class_def(self):
407 pass
408 409 ## The function actually called by the message migrator 410 # 411 # @param old_msg An instance of the old message type. 412 # @returns An instance of a new message type
413 - def apply(self, old_msg):
414 if not self.valid: 415 raise BagMigrationException("Attempted to apply an invalid rule") 416 if not self.sub_rules_done: 417 raise BagMigrationException("Attempted to apply a rule without building up its sub rules") 418 if not self.sub_rules_valid: 419 raise BagMigrationException("Attempted to apply a rule without valid sub-rules") 420 if (get_message_key(old_msg) != get_message_key(self.old_class)): 421 raise BagMigrationException("Attempted to apply rule to incorrect class %s %s."%(get_message_key(old_msg),get_message_key(self.old_class))) 422 423 # Apply update rule 424 new_msg = self.new_class() 425 self.update(old_msg, new_msg) 426 427 return new_msg
428 429 ## The function which a user overrides to actually perform the message update 430 # 431 # @param msg_from A message instance of the old message type 432 # @param msg_to A message instance of a new message type to be populated
433 - def update(self, old_msg, new_msg):
434 raise BagMigrationException("Tried to use rule without update overidden")
435 436 437 ## A class for book-keeping about rule-chains. 438 # 439 # Rule chains define the ordered set of update rules, indexed by 440 # typename, terminated by a rename rule. This class is only used 441 # temporarily to help us get the ordering right, until all explicit 442 # rules have been loaded (possibly out of order) and the proper 443 # scaffold can be built.
444 -class RuleChain(object):
445 - def __init__(self):
446 self.chain = [] 447 self.order_keys = set() 448 self.rename = None
449 450 451 ## A class for arranging the ordered rules 452 # 453 # They provide a scaffolding (essentially a linked list) over which we 454 # assume we can migrate messages forward. This allows us to verify a 455 # path exists before actually creating all of the necessary implicit 456 # rules (mostly migration of sub-messages) that such a path 457 # necessitates.
458 -class ScaffoldNode(object):
459 - def __init__(self, old_class, new_class, rule):
460 self.old_class = old_class 461 self.new_class = new_class 462 self.rule = rule 463 self.next = None
464 465 ## A class to actually migrate messages 466 # 467 # This is the big class that actually handles all of the fancy 468 # migration work. Better documentation to come later.
469 -class MessageMigrator(object):
470 - def __init__(self, input_rule_files=[], plugins=True):
471 # We use the rulechains to scaffold our initial creation of 472 # implicit rules. Each RuleChain is keyed off of a type and 473 # consists of an ordered set of update rules followed by an 474 # optional rename rule. For the system rule definitions to be 475 # valid, all members of a rulechains must be connectable via 476 # implicit rules and all rulechains must terminate in a known 477 # system type which is also reachable by an implicit rule. 478 self.rulechains = collections.defaultdict(RuleChain) 479 480 # The list of all nodes that we can iterate through in the 481 # future when making sure all rules have been constructed. 482 self.base_nodes = [] 483 484 # The list of extra (non-scaffolded) nodes that we can use 485 # when determining if all rules are valid and printing invalid 486 # rules. 487 self.extra_nodes = [] 488 489 # A map from typename to the first node of a particular type 490 self.first_type = {} 491 492 # A map from a typename to all other typenames for which 493 # rename rules exist. This is necessary to determine whether 494 # an appropriate implicit rule can actually be constructed. 495 self.rename_map = {} 496 497 # The cached set of all found paths, keyed by: 498 # ((old_type, old_md5), (new_type, new_md5)) 499 self.found_paths = {} 500 self.found_targets = {} 501 502 # Temporary list of the terminal nodes 503 terminal_nodes = [] 504 505 # Temporary list of rule modules we are loading 506 rule_dicts = [] 507 508 self.false_rule_loaded = False 509 510 # To make debugging easy we can pass in a list of local 511 # rulefiles. 512 for r in input_rule_files: 513 try: 514 scratch_locals = {'MessageUpdateRule':MessageUpdateRule} 515 execfile(r,scratch_locals) 516 rule_dicts.append((scratch_locals, r)) 517 except: 518 print >> sys.stderr, "Cannot load rule file [%s] in local package"%r 519 520 # Alternatively the preferred method is to load definitions 521 # from the migration ruleset export flag. 522 if plugins: 523 for dep,export in [('rosbagmigration','rule_file'),('rosbag','migration_rule_file')]: 524 for pkg in roslib.rospack.rospack_depends_on_1(dep): 525 m_file = roslib.manifest.manifest_file(pkg, True) 526 m = roslib.manifest.parse_file(m_file) 527 p_rules = m.get_export(dep,export) 528 pkg_dir = roslib.packages.get_pkg_dir(pkg) 529 for r in p_rules: 530 if dep == 'rosbagmigration': 531 print >> sys.stderr, """WARNING: The package: [%s] is using a deprecated rosbagmigration export. 532 The export in the manifest should be changed to: 533 <rosbag migration_rule_file="%s"/> 534 """%(pkg, r) 535 try: 536 scratch_locals = {'MessageUpdateRule':MessageUpdateRule} 537 execfile(pkg_dir + "/" + r,scratch_locals) 538 rule_dicts.append((scratch_locals, r)) 539 except ImportError: 540 print >> sys.stderr, "Cannot load rule file [%s] in package [%s]"%(r, pkg) 541 542 543 for (rule_dict, location_base) in rule_dicts: 544 for (n,c) in rule_dict.iteritems(): 545 if inspect.isclass(c): 546 if (not c == MessageUpdateRule) and issubclass(c, MessageUpdateRule): 547 self.add_update_rule(c(self, location_base + ':' + n)) 548 549 if self.false_rule_loaded: 550 raise BagMigrationException("Cannot instantiate MessageMigrator with invalid rules") 551 552 # Now, go through and build up a better scaffolded 553 # representation, deferring implicit rule generation until 554 # complete, since the implicit rule generation and sub-rule 555 # population makes use of the scaffold. 556 557 # First we each particular type chain (now including implicit 558 # rules). Additionally, we build up our name remapping lists. 559 560 561 # For Each rulechain 562 for (type,rulechain) in self.rulechains.iteritems(): 563 first = True 564 sn = None 565 prev_sn = None 566 567 # Find name remapping list 568 rename_set = set([type]) 569 tmp = rulechain.rename 570 while tmp: 571 rename_set.add(tmp.new_type) 572 if (self.rulechains.has_key(tmp.new_type)): 573 tmp = self.rulechains[tmp.new_type].rename 574 else: 575 break 576 577 self.rename_map[type] = rename_set 578 579 # For each element in the rulechain chain, 580 for r in rulechain.chain: 581 # Create a scaffoldnode 582 sn = ScaffoldNode(r.old_class, r.new_class, r) 583 self.base_nodes.append(sn) 584 # If it's the first one, stick it in our first_type map 585 if first: 586 self.first_type[type] = sn 587 first = False 588 # If there was a previous node, link them if keys 589 # match, or else create an implicit SN 590 if prev_sn: 591 if get_message_key(prev_sn.new_class) == get_message_key(sn.old_class): 592 prev_sn.next = sn 593 else: 594 implicit_sn = ScaffoldNode(prev_sn.new_class, sn.old_class, None) 595 self.base_nodes.append(implicit_sn) 596 prev_sn.next = implicit_sn 597 implicit_sn.next = sn 598 # The just-created node now becomes the previous 599 prev_sn = sn 600 601 # If there is a rename rule 602 if rulechain.rename: 603 # Create a scaffoldnode 604 sn = ScaffoldNode(rulechain.rename.old_class, rulechain.rename.new_class, rulechain.rename) 605 self.base_nodes.append(sn) 606 607 # Same rules apply here as when we created each node 608 # from chain. Link if possible, otherwise create 609 # implicit 610 if first: 611 self.first_type[type] = sn 612 first = False 613 if prev_sn: 614 if get_message_key(prev_sn.new_class) == get_message_key(sn.old_class): 615 prev_sn.next = sn 616 else: 617 implicit_sn = ScaffoldNode(prev_sn.new_class, sn.old_class, None) 618 self.base_nodes.append(implicit_sn) 619 prev_sn.next = implicit_sn 620 implicit_sn.next = sn 621 prev_sn = sn 622 terminal_nodes.append(sn) 623 # If there was not a rename rule, this must be a terminal node 624 else: 625 if prev_sn: 626 terminal_nodes.append(prev_sn) 627 628 # Between our partial scaffold and name remapping list, we can 629 # now GENERATE rules, though we cannot yet populate the 630 # subrules. 631 632 for sn in terminal_nodes: 633 key = get_message_key(sn.new_class) 634 635 renamed = (sn.old_class._type != sn.new_class._type) 636 637 sys_class = roslib.message.get_message_class(sn.new_class._type) 638 639 # If we map directly to a system-defined class we're done 640 if sys_class: 641 new_rule = self.make_update_rule(sn.new_class, sys_class) 642 R = new_rule(self, 'GENERATED.' + new_rule.__name__) 643 if R.valid: 644 sn.next = ScaffoldNode(sn.new_class, sys_class, R) 645 self.base_nodes.append(sn.next) 646 647 if renamed: 648 tmp_sns = self.scaffold_range(sn.new_class._type, sn.new_class._type) 649 650 # If we don't map to a scaffold range, we appear to be done 651 if tmp_sns == []: 652 if sys_class is not None: 653 sn.next = ScaffoldNode(sn.new_class, sys_class, None) 654 self.base_nodes.append(sn.next) 655 continue 656 657 # Otherwise look for trivial bridges 658 for tmp_sn in reversed(tmp_sns): 659 tmp_key = get_message_key(tmp_sn.old_class) 660 if (key == tmp_key): 661 sn.next = tmp_sn 662 break 663 664 # If we did not find a trivial bridge, we instead need 665 # to create the right implicit rule ourselves. This 666 # is based on the ability to create a valid implicit 667 # rule as LATE in the chain as possible. We do this 668 # to avoid extra conversions in some boundary 669 # circumstances. 670 if (sn.next is None): 671 for tmp_sn in reversed(tmp_sns): 672 new_rule = self.make_update_rule(sn.new_class, tmp_sn.old_class) 673 R = new_rule(self, 'GENERATED.' + new_rule.__name__) 674 if R.valid: 675 sn.next = ScaffoldNode(sn.new_class, tmp_sn.old_class, R) 676 self.base_nodes.append(sn.next) 677 break 678 679 680 # If we have still failed we need to create a placeholder. 681 if (sn.next is None): 682 if sys_class: 683 new_rule = self.make_update_rule(sn.new_class, sys_class) 684 else: 685 new_rule = self.make_old_half_rule(sn.new_class) 686 R = new_rule(self, 'GENERATED.' + new_rule.__name__) 687 sn.next = ScaffoldNode(sn.new_class, None, R) 688 self.base_nodes.append(sn.next) 689 690 691 # Now that our scaffolding is actually complete, we iterate 692 # through all of our rules and generate the rules for which we 693 # have scaffoldnodes, but no rule yet 694 for sn in self.base_nodes: 695 if (sn.rule is None): 696 new_rule = self.make_update_rule(sn.old_class, sn.new_class) 697 sn.rule = new_rule(self, 'GENERATED.' + new_rule.__name__) 698 699 # Finally, we go through and try to find sub_paths for every 700 # rule in the system so far 701 for sn in self.base_nodes: 702 sn.rule.find_sub_paths() 703 704 # Construction should be done, we can now use the system in 705 # the event that we don't have invalid update rules. 706 707 self.class_dict = {} 708 709 for sn in self.base_nodes + self.extra_nodes: 710 self.class_dict[get_message_key(sn.old_class)] = sn.old_class 711 self.class_dict[get_message_key(sn.new_class)] = sn.new_class
712 713
714 - def lookup_type(self, key):
715 if key in self.class_dict: 716 return self.class_dict[key] 717 else: 718 return None
719 720 # Add an update rule to our set of rule chains
721 - def add_update_rule(self, r):
722 if r.valid == False: 723 print >> sys.stderr, "ERROR: Update rule [%s] has valid set to False."%(r.location) 724 self.false_rule_loaded = True 725 return 726 727 rulechain = self.rulechains[r.old_type] 728 729 if r.rename_rule: 730 if (rulechain.rename != None): 731 print >> sys.stderr, "WARNING: Update rules [%s] and [%s] both attempting to rename type [%s]. Ignoring [%s]"%( 732 rulechain.rename.location, r.location, r.old_type, r.location) 733 return 734 735 # Search forward to make sure we havn't created a cycle 736 cycle = [] 737 tmp = r 738 while tmp: 739 cycle.append(tmp) 740 if (tmp.new_type == r.old_type): 741 print >> sys.stderr, "WARNING: Update rules %s introduce a renaming cycle. Ignoring [%s]"%( 742 [x.location for x in cycle],r.location) 743 return 744 if (self.rulechains.has_key(tmp.new_type)): 745 tmp = self.rulechains[tmp.new_type].rename 746 else: 747 break 748 749 750 if rulechain.chain and (r.order <= rulechain.chain[-1].order): 751 print >> sys.stderr, "WARNING: Update rule [%s] which performs rename does not have largest order number. Ignoring"%( 752 r.location) 753 return 754 755 rulechain.rename = r 756 757 else: 758 if r.order in rulechain.order_keys: 759 otherind = [x.order for x in rulechain.chain].index(r.order) 760 print >> sys.stderr, "WARNING: Update rules [%s] and [%s] for type [%s] have the same order number. Ignoring [%s]"%( 761 rulechain.chain[otherind].location, r.location, r.old_type, r.location) 762 return 763 else: 764 if rulechain.rename and (r.order >= rulechain.chain[-1]): 765 print >> sys.stderr, "WARNING: Update rule [%s] has order number larger than rename rule [%s]. Ignoring"%( 766 r.location, rulechain.rename.location) 767 return 768 # Insert the rule into a rule chain 769 rulechain.order_keys.add(r.order) 770 rulechain.chain.append(r) 771 rulechain.chain.sort(key=lambda x: x.order)
772 773 # Helper function to determine if all rules are valid
774 - def all_rules_valid(self):
775 base_valid = not False in [sn.rule.valid for sn in self.base_nodes] 776 extra_valid = not False in [sn.rule.valid for sn in self.extra_nodes] 777 return base_valid and extra_valid
778 779 # Helper function to print out the definitions for all invalid rules (which include definitions)
780 - def get_invalid_rules(self):
781 invalid_rules = [] 782 invalid_rule_cache = [] 783 for sn in self.base_nodes: 784 if not sn.rule.valid: 785 path_key = get_path_key(sn.old_class, sn.new_class) 786 if (path_key not in invalid_rule_cache): 787 invalid_rules.append(sn.rule) 788 invalid_rule_cache.append(path_key) 789 for sn in self.extra_nodes: 790 if not sn.rule.valid: 791 path_key = get_path_key(sn.old_class, sn.new_class) 792 if (path_key not in invalid_rule_cache): 793 invalid_rules.append(sn.rule) 794 invalid_rule_cache.append(path_key) 795 return invalid_rules
796 797 # Helper function to remove non-unique rules
798 - def filter_rules_unique(self, rules):
799 rule_cache = [] 800 new_rules = [] 801 for r in rules: 802 path_key = get_path_key(r.old_class, r.new_class) 803 if (path_key not in rule_cache): 804 new_rules.append(r) 805 return new_rules
806 807 # Helper function to expand a list of rules to include subrules
808 - def expand_rules(self, rules):
809 filtered = self.filter_rules_unique(rules) 810 expanded = [] 811 for r in filtered: 812 expanded.append(r) 813 #print "For rule %s --> %s"%(r.old_class._type, r.new_class._type) 814 expanded.extend(self.expand_rules(r.sub_rules)) 815 filtered = self.filter_rules_unique(expanded) 816 return filtered
817
818 - def scaffold_range(self, old_type, new_type):
819 try: 820 first_sn = self.first_type[old_type] 821 822 sn_range = [first_sn] 823 824 found_new_type = False 825 826 tmp_sn = first_sn 827 828 while (tmp_sn.next is not None and tmp_sn.next.new_class is not None): 829 # print sn_range 830 tmp_sn = tmp_sn.next 831 if (tmp_sn != first_sn): 832 sn_range.append(tmp_sn) 833 if (tmp_sn.new_class._type == new_type): 834 found_new_type == True 835 if (found_new_type and tmp_sn.new_class._type != new_type): 836 break 837 838 return sn_range 839 840 except KeyError: 841 return []
842 843
844 - def find_target(self, old_class):
845 key = get_message_key(old_class) 846 847 last_class = old_class 848 849 try: 850 return self.found_targets[key] 851 except KeyError: 852 853 sys_class = roslib.message.get_message_class(old_class._type) 854 855 if sys_class is not None: 856 self.found_targets[key] = sys_class 857 return sys_class 858 859 try: 860 tmp_sn = self.first_type[old_class._type] 861 862 if tmp_sn.new_class is not None: 863 last_class = tmp_sn.new_class 864 865 while tmp_sn.next is not None: 866 tmp_sn = tmp_sn.next 867 868 if tmp_sn.new_class is not None: 869 last_class = tmp_sn.new_class 870 sys_class = roslib.message.get_message_class(tmp_sn.new_class._type) 871 else: 872 sys_class = None 873 874 if sys_class is not None: 875 self.found_targets[key] = sys_class 876 return sys_class 877 except KeyError: 878 pass 879 880 self.found_targets[key] = None 881 882 (pkg, msg) = last_class._type.split('/') 883 try: 884 pkg_dir = roslib.packages.get_pkg_dir(pkg) 885 except roslib.packages.InvalidROSPkgException: 886 return None 887 mtypes = roslib.msgs.list_msg_types(pkg, False) 888 if msg in mtypes: 889 if not os.path.isfile(os.path.join(pkg_dir, os.path.join('src', pkg, 'msg', '_%s.py'%msg))): 890 print >> sys.stderr, "WARNING: Package \'%s\' contains message '%s' but is not built."%(pkg,msg) 891 return None
892 893 # This function determines the set of rules which must be created 894 # to get from the old type to the new type.
895 - def find_path(self, old_class, new_class):
896 key = get_path_key(old_class, new_class) 897 898 # Return any path already found in the cache 899 try: 900 return self.found_paths[key] 901 except KeyError: 902 pass 903 904 # If the new_class is none, e.g., a message has been moved and 905 # we are lacking a proper rename rule, such that find-target 906 # failed, the best we can do is create a half-rule from the 907 # end-point 908 if new_class is None: 909 sn_range = self.scaffold_range(old_class._type, "") 910 911 found_start = False 912 913 for (ind, tmp_sn) in reversed(zip(range(len(sn_range)), sn_range)): 914 # Skip until we find the class we're trying to match 915 if (tmp_sn.old_class._type != old_class._type): 916 continue 917 if get_message_key(tmp_sn.old_class) == get_message_key(old_class): 918 sn_range = sn_range[ind:] 919 found_start = True 920 break 921 922 # Next see if we can create a valid rule 923 if not found_start: 924 for (ind, tmp_sn) in reversed(zip(range(len(sn_range)), sn_range)): 925 if (tmp_sn.old_class._type != old_class._type): 926 continue 927 new_rule = self.make_update_rule(old_class, tmp_sn.old_class) 928 R = new_rule(self, 'GENERATED.' + new_rule.__name__) 929 if R.valid: 930 R.find_sub_paths() 931 sn = ScaffoldNode(old_class, tmp_sn.old_class, R) 932 self.extra_nodes.append(sn) 933 sn_range = sn_range[ind:] 934 sn_range.insert(0,sn) 935 found_start = True 936 break 937 938 if sn_range == []: 939 tmp_class = old_class 940 else: 941 tmp_class = sn_range[-1].new_class 942 943 new_rule = self.make_old_half_rule(tmp_class) 944 R = new_rule(self, 'GENERATED.' + new_rule.__name__) 945 sn = ScaffoldNode(tmp_class, None, R) 946 sn_range.append(sn) 947 self.extra_nodes.append(sn) 948 self.found_paths[key] = sn_range 949 return sn_range 950 951 # If the messages are the same, there is no actually path 952 if (old_class._type == new_class._type and old_class._full_text.strip() == new_class._full_text.strip()): 953 self.found_paths[key] = [] 954 return [] 955 956 sn_range = self.scaffold_range(old_class._type, new_class._type) 957 958 # If we have no scaffolding, we just try to create the one path 959 if sn_range == []: 960 new_rule = self.make_update_rule(old_class, new_class) 961 R = new_rule(self, 'GENERATED.' + new_rule.__name__) 962 R.find_sub_paths() 963 sn = ScaffoldNode(old_class, new_class, R) 964 self.extra_nodes.append(sn) 965 self.found_paths[key] = [sn] 966 return [sn] 967 968 969 # Search for the stop point in the scaffold 970 found_stop = False 971 972 # First look for a trivial match 973 for (ind, tmp_sn) in reversed(zip(range(len(sn_range)), sn_range)): 974 # Stop looking early if the classes don't match 975 if (tmp_sn.new_class._type != new_class._type): 976 break 977 if get_message_key(tmp_sn.new_class) == get_message_key(new_class): 978 sn_range = sn_range[:ind+1] 979 found_stop = True 980 break 981 982 # Next see if we can create a valid rule 983 if not found_stop: 984 for (ind, tmp_sn) in reversed(zip(range(len(sn_range)), sn_range)): 985 if (tmp_sn.new_class._type != new_class._type): 986 break 987 new_rule = self.make_update_rule(tmp_sn.new_class, new_class) 988 R = new_rule(self, 'GENERATED.' + new_rule.__name__) 989 if R.valid: 990 R.find_sub_paths() 991 sn = ScaffoldNode(tmp_sn.new_class, new_class, R) 992 self.extra_nodes.append(sn) 993 sn_range = sn_range[:ind+1] 994 sn_range.append(sn) 995 found_stop = True 996 break 997 998 # If there were no valid implicit rules, we suggest a new one from to the end 999 if not found_stop: 1000 new_rule = self.make_update_rule(sn_range[-1].new_class, new_class) 1001 R = new_rule(self, 'GENERATED.' + new_rule.__name__) 1002 R.find_sub_paths() 1003 sn = ScaffoldNode(sn_range[-1].new_class, new_class, R) 1004 self.extra_nodes.append(sn) 1005 sn_range.append(sn) 1006 1007 # Search for the start point in the scaffold 1008 found_start = False 1009 1010 # First look for a trivial match 1011 for (ind, tmp_sn) in reversed(zip(range(len(sn_range)), sn_range)): 1012 # Skip until we find the class we're trying to match 1013 if (tmp_sn.old_class._type != old_class._type): 1014 continue 1015 if get_message_key(tmp_sn.old_class) == get_message_key(old_class): 1016 sn_range = sn_range[ind:] 1017 found_start = True 1018 break 1019 1020 # Next see if we can create a valid rule 1021 if not found_start: 1022 for (ind, tmp_sn) in reversed(zip(range(len(sn_range)), sn_range)): 1023 if (tmp_sn.old_class._type != old_class._type): 1024 continue 1025 new_rule = self.make_update_rule(old_class, tmp_sn.old_class) 1026 R = new_rule(self, 'GENERATED.' + new_rule.__name__) 1027 if R.valid: 1028 R.find_sub_paths() 1029 sn = ScaffoldNode(old_class, tmp_sn.old_class, R) 1030 self.extra_nodes.append(sn) 1031 sn_range = sn_range[ind:] 1032 sn_range.insert(0,sn) 1033 found_start = True 1034 break 1035 1036 # If there were no valid implicit rules, we suggest a new one from the beginning 1037 if not found_start: 1038 new_rule = self.make_update_rule(old_class, sn_range[0].old_class) 1039 R = new_rule(self, 'GENERATED.' + new_rule.__name__) 1040 R.find_sub_paths() 1041 sn = ScaffoldNode(old_class, sn_range[0].old_class, R) 1042 self.extra_nodes.append(sn) 1043 sn_range.insert(0,sn) 1044 1045 self.found_paths[key] = sn_range 1046 return sn_range
1047 1048
1049 - def migrate_raw(self, msg_from, msg_to):
1050 path = self.find_path(msg_from[4], msg_to[4]) 1051 1052 if False in [sn.rule.valid for sn in path]: 1053 raise BagMigrationException("Migrate called, but no valid migration path from [%s] to [%s]"%(msg_from._type, msg_to._type)) 1054 1055 # Short cut to speed up case of matching md5sum: 1056 if path == [] or msg_from[2] == msg_to[2]: 1057 return (msg_to[0], msg_from[1], msg_to[2], msg_to[3], msg_to[4]) 1058 1059 tmp_msg = path[0].old_class() 1060 tmp_msg.deserialize(msg_from[1]) 1061 1062 for sn in path: 1063 tmp_msg = sn.rule.apply(tmp_msg) 1064 1065 buff = StringIO() 1066 tmp_msg.serialize(buff) 1067 1068 return (msg_to[0], buff.getvalue(), msg_to[2], msg_to[3], msg_to[4])
1069 1070 1071
1072 - def migrate(self, msg_from, msg_to):
1073 path = self.find_path(msg_from.__class__, msg_to.__class__) 1074 1075 if False in [sn.rule.valid for sn in path]: 1076 raise BagMigrationException("Migrate called, but no valid migration path from [%s] to [%s]"%(msg_from._type, msg_to._type)) 1077 1078 # Short cut to speed up case of matching md5sum: 1079 if path == [] or msg_from._md5sum == msg_to._md5sum: 1080 buff = StringIO() 1081 msg_from.serialize(buff) 1082 msg_to.deserialize(buff.getvalue()) 1083 return 1084 1085 if len(path) > 0: 1086 buff = StringIO() 1087 msg_from.serialize(buff) 1088 1089 tmp_msg = path[0].old_class() 1090 1091 tmp_msg.deserialize(buff.getvalue()) 1092 1093 for sn in path: 1094 tmp_msg = sn.rule.apply(tmp_msg) 1095 else: 1096 tmp_msg = msg_from 1097 1098 buff = StringIO() 1099 tmp_msg.serialize(buff) 1100 msg_to.deserialize(buff.getvalue())
1101
1102 - def migrate_array(self, msg_from_array, msg_to_array):
1103 if len(msg_from_array) != len(msg_to_array): 1104 raise BagMigrationException("Migrate array called on on arrays of unequal length.") 1105 1106 if len(msg_from_array) == 0: 1107 return 1108 1109 path = self.find_path(msg_from_array[0].__class__, msg_to_array[0].__class__) 1110 1111 if path is None: 1112 raise BagMigrationException("Migrate called, but no migration path from [%s] to [%s]"%(msg_from._type, msg_to._type)) 1113 1114 # Short cut to speed up case of matching md5sum: 1115 if path == []: 1116 for i in xrange(len(msg_from_array)): 1117 buff = StringIO() 1118 msg_from_array[i].serialize(buff) 1119 msg_to_array[i].deserialize(buff.getvalue()) 1120 return 1121 1122 for i in xrange(len(msg_from_array)): 1123 buff = StringIO() 1124 tmp_msg = path[0].old_class() 1125 msg_from_array[i].serialize(buff) 1126 tmp_msg.deserialize(buff.getvalue()) 1127 for sn in path: 1128 tmp_msg = sn.rule.apply(tmp_msg) 1129 1130 buff = StringIO() 1131 tmp_msg.serialize(buff) 1132 msg_to_array[i].deserialize(buff.getvalue())
1133
1134 - def make_update_rule(self, old_class, new_class):
1135 name = "update_%s_%s"%(old_class._type.replace("/","_"), old_class._md5sum) 1136 1137 # We assemble the class as a string and then exec it to end up with a class 1138 # that can essentially print its own definition. 1139 classdef = "class %s(MessageUpdateRule):\n"%name 1140 classdef += "\told_type = \"%s\"\n"%old_class._type 1141 classdef += "\told_full_text = \"\"\"\n%s\n\"\"\"\n\n"%old_class._full_text.strip() 1142 classdef += "\tnew_type = \"%s\"\n"%new_class._type 1143 classdef += "\tnew_full_text = \"\"\"\n%s\n\"\"\"\n"%new_class._full_text.strip() 1144 classdef += "\n" 1145 classdef += "\torder = 0" 1146 classdef += "\n" 1147 1148 validdef = "\tvalid = True\n" 1149 1150 migratedefs = "\tmigrated_types = [" 1151 1152 updatedef = "\tdef update(self, old_msg, new_msg):\n" 1153 1154 old_consts = constants_from_def(old_class._type, old_class._full_text) 1155 new_consts = constants_from_def(new_class._type, new_class._full_text) 1156 1157 if (not new_consts >= old_consts): 1158 validdef = "\tvalid = False\n" 1159 for c in (old_consts - new_consts): 1160 updatedef += "\t\t#Constant '%s' has changed\n"%(c[0],) 1161 1162 old_slots = [] 1163 old_slots.extend(old_class.__slots__) 1164 1165 migrations_seen = [] 1166 1167 # Assign across primitives, self.migrate or self.migrate_array non-primitives 1168 for (s,t) in zip(new_class.__slots__, new_class._slot_types): 1169 warn_msg = None 1170 new_base_type, new_is_array, new_array_len = roslib.msgs.parse_type(t) 1171 try: 1172 ind = old_class.__slots__.index(s) 1173 old_slots.remove(s) 1174 old_base_type, old_is_array, old_array_len = roslib.msgs.parse_type(old_class._slot_types[ind]) 1175 1176 if new_is_array != old_is_array: 1177 warn_msg = "Could not match array with nonarray" 1178 1179 elif new_array_len != old_array_len: 1180 if old_array_len is None: 1181 warn_msg = "Converted from variable length array to fixed array of length %d"%(new_array_len) 1182 elif new_array_len is None: 1183 warn_msg = "Converted from fixed array of length %d to variable length"%(old_array_len) 1184 else: 1185 warn_msg = "Fixed length array converted from %d to %d"%(old_array_len,new_array_len) 1186 1187 elif roslib.msgs.is_builtin(new_base_type): 1188 if new_base_type != old_base_type: 1189 warn_msg = "Primitive type changed" 1190 else: 1191 updatedef += "\t\tnew_msg.%s = old_msg.%s\n"%(s,s) 1192 1193 else: 1194 tmp_old_type = clean_name(old_base_type, old_class._type) 1195 tmp_new_type = clean_name(new_base_type, new_class._type) 1196 1197 tmp_qualified_old_type = qualified_name(old_base_type, old_class._type) 1198 tmp_qualified_new_type = qualified_name(new_base_type, new_class._type) 1199 1200 # Verify the type can theoretically be migrated 1201 if (tmp_qualified_old_type == tmp_qualified_new_type) or \ 1202 (self.rename_map.has_key(tmp_qualified_old_type) and 1203 tmp_qualified_new_type in self.rename_map[tmp_qualified_old_type]): 1204 1205 if (tmp_old_type, tmp_new_type) not in migrations_seen: 1206 migratedefs += "\n\t\t(\"%s\",\"%s\"),"%(tmp_old_type, tmp_new_type) 1207 migrations_seen.append((tmp_old_type, tmp_new_type)) 1208 1209 if not new_is_array: 1210 updatedef += "\t\tself.migrate(old_msg.%s, new_msg.%s)\n"%(s,s) 1211 else: 1212 updatedef += "\t\tself.migrate_array(old_msg.%s, new_msg.%s, \"%s\")\n"%(s,s,new_base_type) 1213 else: 1214 warn_msg = "No migration path between [%s] and [%s]"%(tmp_old_type, tmp_new_type) 1215 except ValueError: 1216 warn_msg = "No matching field name in old message" 1217 1218 if warn_msg is not None: 1219 validdef = "\tvalid = False\n" 1220 updatedef += "\t\t#%s\n"%warn_msg 1221 updatedef += "\t\tnew_msg.%s = %s\n"%(s,migration_default_value(t)) 1222 1223 migratedefs += "]\n" 1224 1225 if old_slots: 1226 validdef = "\tvalid = False\n" 1227 for s in old_slots: 1228 updatedef += "\t\t#No field to match field %s from old message\n"%(s) 1229 1230 classdef += migratedefs + '\n' + validdef + '\n' + updatedef 1231 1232 printclassdef = classdef + "\tdef get_class_def(self):\n\t\treturn \'\'\'%s\'\'\'\n"%classdef 1233 1234 # This is probably a TERRIBLE idea? 1235 exec(printclassdef) 1236 return locals()[name]
1237
1238 - def make_old_half_rule(self, old_class):
1239 name = "update__%s__%s"%(old_class._type.replace("/","_"), old_class._md5sum) 1240 1241 # We assemble the class as a string and then exec it to end up with a class 1242 # that can essentially print its own definition. 1243 classdef = "class %s(MessageUpdateRule):\n"%name 1244 classdef += "\told_type = \"%s\"\n"%old_class._type 1245 classdef += "\told_full_text = \"\"\"\n%s\n\"\"\"\n\n"%old_class._full_text.strip() 1246 classdef += "\tnew_type = \"\"\n" 1247 classdef += "\tnew_full_text = \"\"\"\n\n\"\"\"\n" 1248 classdef += "\n" 1249 classdef += "\torder = 0" 1250 classdef += "\n" 1251 1252 validdef = "\tvalid = False\n" 1253 1254 migratedefs = "\tmigrated_types = []\n" 1255 1256 updatedef = "\tdef update(self, old_msg, new_msg):\n" 1257 updatedef += "\t\tpass\n" 1258 1259 classdef += migratedefs + '\n' + validdef + '\n' + updatedef 1260 1261 printclassdef = classdef + "\tdef get_class_def(self):\n\t\treturn \'\'\'%s\'\'\'\n"%classdef 1262 1263 # This is probably a TERRIBLE idea? 1264 exec(printclassdef) 1265 return locals()[name]
1266
1267 - def make_new_half_rule(self, new_class):
1268 name = "update_to_%s_%s"%(new_class._type.replace("/","_"), new_class._md5sum) 1269 1270 # We assemble the class as a string and then exec it to end up with a class 1271 # that can essentially print its own definition. 1272 classdef = "class %s(MessageUpdateRule):\n"%name 1273 classdef += "\told_type = \"\"\n" 1274 classdef += "\told_full_text = \"\"\"\n\n\"\"\"\n\n" 1275 classdef += "\tnew_type = \"%s\"\n"%new_class._type 1276 classdef += "\tnew_full_text = \"\"\"\n%s\n\"\"\"\n"%new_class._full_text.strip() 1277 classdef += "\n" 1278 classdef += "\torder = 0" 1279 classdef += "\n" 1280 1281 validdef = "\tvalid = False\n" 1282 1283 migratedefs = "\tmigrated_types = []\n" 1284 1285 updatedef = "\tdef update(self, old_msg, new_msg):\n" 1286 updatedef += "\t\tpass\n" 1287 1288 classdef += migratedefs + '\n' + validdef + '\n' + updatedef 1289 1290 printclassdef = classdef + "\tdef get_class_def(self):\n\t\treturn \'\'\'%s\'\'\'\n"%classdef 1291 1292 # This is probably a TERRIBLE idea? 1293 exec(printclassdef) 1294 return locals()[name]
1295
1296 -def migration_default_value(field_type):
1297 if field_type in ['bool', 'byte', 'int8', 'int16', 'int32', 'int64',\ 1298 'char', 'uint8', 'uint16', 'uint32', 'uint64']: 1299 return '0' 1300 elif field_type in ['float32', 'float64']: 1301 return '0.' 1302 elif field_type == 'string': 1303 # strings, byte[], and uint8s are all optimized to be strings 1304 return "''" 1305 elif field_type.endswith(']'): # array type 1306 base_type, is_array, array_len = roslib.msgs.parse_type(field_type) 1307 if base_type in ['byte', 'uint8']: 1308 # strings, byte[], and uint8s are all optimized to be strings 1309 if array_len is not None: 1310 return "chr(0)*%s"%array_len 1311 else: 1312 return "''" 1313 elif array_len is None: #var-length 1314 return '[]' 1315 else: # fixed-length, fill values 1316 def_val = migration_default_value(base_type) 1317 return '[' + ','.join(itertools.repeat(def_val, array_len)) + ']' 1318 else: 1319 return "self.get_new_class('%s')()"%field_type
1320
1321 -def constants_from_def(core_type, msg_def):
1322 core_pkg, core_base_type = roslib.names.package_resource_name(core_type) 1323 1324 splits = msg_def.split('\n' + '=' * 80 + '\n') 1325 1326 core_msg = splits[0] 1327 deps_msgs = splits[1:] 1328 1329 # create MsgSpec representations of .msg text 1330 specs = { core_type: roslib.msgs.load_from_string(core_msg, core_pkg) } 1331 # - dependencies 1332 # for dep_msg in deps_msgs: 1333 # # dependencies require more handling to determine type name 1334 # dep_type, dep_spec = _generate_dynamic_specs(specs, dep_msg) 1335 # specs[dep_type] = dep_spec 1336 1337 return set([(x.name, x.val, x.type) for x in specs[core_type].constants])
1338