Package rosbag :: Module migration
[frames] | no frames]

Source Code for Module rosbag.migration

   1  # Software License Agreement (BSD License) 
   2  # 
   3  # Copyright (c) 2009, Willow Garage, Inc. 
   4  # All rights reserved. 
   5  # 
   6  # Redistribution and use in source and binary forms, with or without 
   7  # modification, are permitted provided that the following conditions 
   8  # are met: 
   9  # 
  10  #  * Redistributions of source code must retain the above copyright 
  11  #    notice, this list of conditions and the following disclaimer. 
  12  #  * Redistributions in binary form must reproduce the above 
  13  #    copyright notice, this list of conditions and the following 
  14  #    disclaimer in the documentation and/or other materials provided 
  15  #    with the distribution. 
  16  #  * Neither the name of Willow Garage, Inc. nor the names of its 
  17  #    contributors may be used to endorse or promote products derived 
  18  #    from this software without specific prior written permission. 
  19  # 
  20  # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 
  21  # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 
  22  # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS 
  23  # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE 
  24  # COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, 
  25  # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, 
  26  # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; 
  27  # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 
  28  # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT 
  29  # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN 
  30  # ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 
  31  # POSSIBILITY OF SUCH DAMAGE. 
  32   
  33  import collections 
  34  import copy 
  35  try: 
  36      from cStringIO import StringIO  # Python 2.x 
  37  except ImportError: 
  38      from io import StringIO  # Python 3.x 
  39  import inspect 
  40  import itertools 
  41  import os 
  42  import string 
  43  import sys 
  44   
  45  import genmsg.msgs 
  46  import genpy 
  47  import genpy.dynamic 
  48   
  49  import rospkg 
  50   
  51  import rosbag 
  52   
  53  # Anything outside the scope of these primitives is a submessage 
  54  #_PRIMITIVES = ['bool', 'byte','int8','int16','int32','int64','char','uint8','uint16','uint32','uint64','float32','float64','string','time'] 
  55   
56 -class BagMigrationException(Exception):
57 pass
58
59 -def checkbag(migrator, inbag):
60 """ 61 Check whether a bag file can be played in the current system. 62 @param migrator: message migrator to use 63 @param inbag name of the bag to be checked. 64 @returns A list of tuples for each type in the bag file. The first 65 element of each tuple is the full migration path for the type. The 66 second element of the tuple is the expanded list of invalid rules 67 for that particular path. 68 """ 69 checked = set() 70 migrations = [] 71 72 bag = rosbag.Bag(inbag, 'r') 73 74 for topic, msg, t in bag.read_messages(raw=True): 75 key = get_message_key(msg[4]) 76 if key not in checked: 77 target = migrator.find_target(msg[4]) 78 # Even in the case of a zero-length path (matching md5sums), we still want 79 # to migrate in the event of a type change (message move). 80 path = migrator.find_path(msg[4], target) 81 if len(path) > 0: 82 migrations.append((path, [r for r in migrator.expand_rules([sn.rule for sn in path]) if r.valid == False])) 83 84 checked.add(key) 85 86 bag.close() 87 88 return migrations
89
90 -def checkmessages(migrator, messages):
91 """ 92 Check whether a bag file can be played in the current system. 93 @param migrator The message migrator to use 94 @param message_list A list of message classes. 95 @returns A list of tuples for each type in the bag file. The first 96 element of each tuple is the full migration path for the type. The 97 second element of the tuple is the expanded list of invalid rules 98 for that particular path. 99 """ 100 101 checked = set() 102 migrations = [] 103 104 for msg in messages: 105 key = get_message_key(msg) 106 if key not in checked: 107 target = migrator.find_target(msg) 108 # Even in the case of a zero-length path (matching md5sums), we still want 109 # to migrate in the event of a type change (message move). 110 path = migrator.find_path(msg, target) 111 if len(path) > 0: 112 migrations.append((path, [r for r in migrator.expand_rules([sn.rule for sn in path]) if r.valid == False])) 113 114 checked.add(key) 115 116 return migrations
117 118 ## Fix a bag so that it can be played in the current system 119 # 120 # @param migrator The message migrator to use 121 # @param inbag Name of the bag to be fixed. 122 # @param outbag Name of the bag to be saved. 123 # @returns True if migration was successful.
124 -def fixbag(migrator, inbag, outbag):
125 # This checks/builds up rules for the given migrator 126 res = checkbag(migrator, inbag) 127 128 # Deserializing all messages is inefficient, but we can speed this up later 129 if not False in [m[1] == [] for m in res]: 130 bag = rosbag.Bag(inbag, 'r') 131 rebag = rosbag.Bag(outbag, 'w', options=bag.options) 132 for topic, msg, t in bag.read_messages(raw=True): 133 new_msg_type = migrator.find_target(msg[4]) 134 mig_msg = migrator.migrate_raw(msg, (new_msg_type._type, None, new_msg_type._md5sum, None, new_msg_type)) 135 rebag.write(topic, mig_msg, t, raw=True) 136 rebag.close() 137 bag.close() 138 return True 139 else: 140 return False
141 142 ## Fix a bag so that it can be played in the current system 143 # 144 # @param migrator The message migrator to use 145 # @param inbag Name of the bag to be fixed. 146 # @param outbag Name of the bag to be saved. 147 # @returns [] if bag could be migrated, otherwise, it returns the list of necessary migration paths
148 -def fixbag2(migrator, inbag, outbag, force=False):
149 # This checks/builds up rules for the given migrator 150 res = checkbag(migrator, inbag) 151 152 migrations = [m for m in res if len(m[1]) > 0] 153 154 # Deserializing all messages is inefficient, but we can speed this up later 155 if len(migrations) == 0 or force: 156 bag = rosbag.Bag(inbag, 'r') 157 rebag = rosbag.Bag(outbag, 'w', options=bag.options) 158 for topic, msg, t in bag.read_messages(raw=True): 159 new_msg_type = migrator.find_target(msg[4]) 160 if new_msg_type != None: 161 mig_msg = migrator.migrate_raw(msg, (new_msg_type._type, None, new_msg_type._md5sum, None, new_msg_type)) 162 rebag.write(topic, mig_msg, t, raw=True) 163 else: 164 rebag.write(topic, msg, t, raw=True) 165 rebag.close() 166 bag.close() 167 168 if force: 169 return [] 170 else: 171 return migrations
172 173 ## Helper function to strip out roslib and package name from name usages. 174 # 175 # There is some inconsistency in whether a fully-qualified path is 176 # used for sub-messages within a given message. This function is 177 # useful for stripping out the package name in a fully qualified 178 # sub-message. 179 # 180 # @param name The name to clean. 181 # @param top_name The name of the top-level type 182 # @returns The cleaned version of the name.
183 -def clean_name(name, top_name):
184 name_split = name.split('/') 185 try: 186 name_split.remove('std_msgs') 187 except ValueError: 188 pass 189 try: 190 name_split.remove(top_name.split('/')[0]) 191 except ValueError: 192 pass 193 new_name = string.join(name_split,'/') 194 return new_name
195 196 ## Helper function to ensure we end up with a qualified name 197 # 198 # There is some inconsistency in whether a fully-qualified path is 199 # used for sub-messages within a given message. This function is 200 # useful for ensuring that a name is fully qualified correctly. 201 # 202 # @param name The name to quailfy 203 # @param top_name The name of the top-level type 204 # @returns The qualified version of the name.
205 -def qualified_name(name, top_name):
206 # First clean the name, to make everyting else more deterministic 207 tmp_name = clean_name(name, top_name) 208 209 if len(tmp_name.split('/')) == 2 or (genmsg.msgs.is_builtin(tmp_name)): 210 return tmp_name 211 elif tmp_name == 'Header': 212 return 'std_msgs/Header' 213 else: 214 return top_name.split('/')[0] + '/' + tmp_name
215 216 ## Helper function to return a key from a given class 217 # 218 # For now, we choose the tuple (type,md5sum) as a unique key for the 219 # class. However, this is subject to change and assumptions about keys 220 # should not be made other than their uniqueness. 221 # 222 # @param c The message class or instance to get a key for 223 # @returns The unique key
224 -def get_message_key(c):
225 try: 226 return (c._type, c._md5sum) 227 except: 228 return None
229 230 ## Helper function to return a key for a given path 231 # 232 # For now, we choose the tuple ((type1,md5sum1),(type2,md5sum2)) as a 233 # unique key for the path. However, this is subject to change and 234 # assumptions about keys should not be made other than their 235 # uniqueness. 236 # 237 # @param c1 The start point of the path 238 # @param c1 The stop point of the path 239 # @returns The unique key
240 -def get_path_key(c1, c2):
241 try: 242 return (get_message_key(c1), get_message_key(c2)) 243 except: 244 return None
245 246 ## Base class for all message update rules
247 -class MessageUpdateRule(object):
248 old_type = '' 249 old_full_text = '' 250 new_type = '' 251 new_full_text = '' 252 migrated_types = [] 253 254 order = -1 255 256 valid = False 257 258 ## Initialize class
259 - def __init__(self, migrator, location):
260 # Every rule needs to hang onto the migrator so we can potentially use it 261 self.migrator = migrator 262 self.location = location 263 264 if (self.old_type != self.new_type): 265 self.rename_rule = True 266 else: 267 self.rename_rule = False 268 269 # Instantiate types dynamically based on definition 270 try: 271 if self.old_type == "": 272 raise Exception 273 self.old_types = genpy.dynamic.generate_dynamic(self.old_type, self.old_full_text) 274 self.old_class = self.old_types[self.old_type] 275 self.old_md5sum = self.old_class._md5sum 276 except: 277 self.old_types = [] 278 self.old_class = None 279 self.old_md5sum = "" 280 281 try: 282 if self.new_type == "": 283 raise Exception 284 self.new_types = genpy.dynamic.generate_dynamic(self.new_type, self.new_full_text) 285 self.new_class = self.new_types[self.new_type] 286 self.new_md5sum = self.new_class._md5sum 287 except: 288 self.new_types = [] 289 self.new_class = None 290 self.new_md5sum = "" 291 292 # We have not populated our sub rules (and ideally should 293 # wait until the full scaffold exists before doing this) 294 self.sub_rules_done = False 295 self.sub_rules_valid = False 296 self.sub_rules = []
297 298 ## Find all of the sub paths 299 # 300 # For any migrated type the user might want to use, we must make 301 # sure the migrator had found a path for it. To facilitated this 302 # check we require that all migrated types must be listed as pairs 303 # in the migrated_types field. 304 # 305 # It would be nice not to need these through performing some kind 306 # of other inspection of the update rule itself.
307 - def find_sub_paths(self):
308 self.sub_rules_valid = True 309 for (t1, t2) in self.migrated_types: 310 try: 311 tmp_old_class = self.get_old_class(t1) 312 except KeyError: 313 print >> sys.stderr, "WARNING: Within rule [%s], specified migrated type [%s] not found in old message types"%(self.location,t1) 314 self.sub_rules_valid = False 315 continue 316 try: 317 tmp_new_class = self.get_new_class(t2) 318 except KeyError: 319 print >> sys.stderr, "WARNING: Within rule [%s], specified migrated type [%s] not found in new message types"%(self.location,t2) 320 self.sub_rules_valid = False 321 continue 322 323 # If a rule instantiates itself as a subrule (because the 324 # author knows the md5sums match), we don't Want to end up 325 # with an infinite recursion. 326 if (get_message_key(tmp_old_class) != get_message_key(self.old_class)) or (get_message_key(tmp_new_class) != get_message_key(self.new_class)): 327 path = self.migrator.find_path(tmp_old_class, tmp_new_class) 328 rules = [sn.rule for sn in path] 329 self.sub_rules.extend(rules) 330 331 if False in [r.valid for r in self.sub_rules]: 332 print >> sys.stderr, "WARNING: Within rule [%s] cannot migrate from subtype [%s] to [%s].."%( 333 self.location, t1, t2) 334 self.sub_rules_valid = False 335 continue 336 self.sub_rules = self.migrator.filter_rules_unique(self.sub_rules) 337 self.sub_rules_done = True
338 339 ## Helper function to get the class of a submsg for the new type 340 # 341 # This function should be used inside of update to access new classes. 342 # 343 # @param t The subtype to return the class of 344 # @returns The class of the new sub type
345 - def get_new_class(self,t):
346 try: 347 try: 348 return self.new_types[t] 349 except KeyError: 350 return self.new_types['std_msgs/' + t] 351 except KeyError: 352 return self.new_types[self.new_type.split('/')[0] + '/' + t]
353 354 ## Helper function to get the class of a submsg for the old type 355 # 356 # This function should be used inside of update to access old classes. 357 # 358 # @param t The subtype to return the class of 359 # @returns The class of the old sub type
360 - def get_old_class(self,t):
361 try: 362 try: 363 return self.old_types[t] 364 except KeyError: 365 return self.old_types['std_msgs/' + t] 366 except KeyError: 367 return self.old_types[self.old_type.split('/')[0] + '/' + t]
368 369 ## Actually migrate one sub_type to another 370 # 371 # This function should be used inside of update to migrate sub msgs. 372 # 373 # @param msg_from A message instance of the old message type 374 # @param msg_to A message instance of a new message type to be populated
375 - def migrate(self, msg_from, msg_to):
376 tmp_msg_from = clean_name(msg_from._type, self.old_type) 377 tmp_msg_to = clean_name(msg_to._type, self.new_type) 378 if (tmp_msg_from, tmp_msg_to) not in self.migrated_types: 379 raise BagMigrationException("Rule [%s] tried to perform a migration from old [%s] to new [%s] not listed in migrated_types"%(self.location, tmp_msg_from, tmp_msg_to)) 380 self.migrator.migrate(msg_from, msg_to)
381 382 ## Helper function to migrate a whole array of messages 383 # 384 # This function should be used inside of update to migrate arrays of sub msgs. 385 # 386 # @param msg_from_array An array of messages of the old message type 387 # @param msg_to_array An array of messages of the new message type (this will be emptied if not already) 388 # @param msg_to_class The name of the new message type since msg_to_array may be an empty array.
389 - def migrate_array(self, msg_from_array, msg_to_array, msg_to_name):
390 msg_to_class = self.get_new_class(msg_to_name) 391 392 while len(msg_to_array) > 0: 393 msg_to_array.pop() 394 395 if (len(msg_from_array) == 0): 396 return 397 398 tmp_msg_from = clean_name(msg_from_array[0]._type, self.old_type) 399 tmp_msg_to = clean_name(msg_to_class._type, self.new_type) 400 if (tmp_msg_from, tmp_msg_to) not in self.migrated_types: 401 raise BagMigrationException("Rule [%s] tried to perform a migration from old [%s] to new [%s] not listed in migrated_types"%(self.location, tmp_msg_from, tmp_msg_to)) 402 403 msg_to_array.extend( [msg_to_class() for i in xrange(len(msg_from_array))] ) 404 405 self.migrator.migrate_array(msg_from_array, msg_to_array)
406 407 ## A helper function to print out the definiton of autogenerated messages.
408 - def get_class_def(self):
409 pass
410 411 ## The function actually called by the message migrator 412 # 413 # @param old_msg An instance of the old message type. 414 # @returns An instance of a new message type
415 - def apply(self, old_msg):
416 if not self.valid: 417 raise BagMigrationException("Attempted to apply an invalid rule") 418 if not self.sub_rules_done: 419 raise BagMigrationException("Attempted to apply a rule without building up its sub rules") 420 if not self.sub_rules_valid: 421 raise BagMigrationException("Attempted to apply a rule without valid sub-rules") 422 if (get_message_key(old_msg) != get_message_key(self.old_class)): 423 raise BagMigrationException("Attempted to apply rule to incorrect class %s %s."%(get_message_key(old_msg),get_message_key(self.old_class))) 424 425 # Apply update rule 426 new_msg = self.new_class() 427 self.update(old_msg, new_msg) 428 429 return new_msg
430 431 ## The function which a user overrides to actually perform the message update 432 # 433 # @param msg_from A message instance of the old message type 434 # @param msg_to A message instance of a new message type to be populated
435 - def update(self, old_msg, new_msg):
436 raise BagMigrationException("Tried to use rule without update overidden")
437 438 439 ## A class for book-keeping about rule-chains. 440 # 441 # Rule chains define the ordered set of update rules, indexed by 442 # typename, terminated by a rename rule. This class is only used 443 # temporarily to help us get the ordering right, until all explicit 444 # rules have been loaded (possibly out of order) and the proper 445 # scaffold can be built.
446 -class RuleChain(object):
447 - def __init__(self):
448 self.chain = [] 449 self.order_keys = set() 450 self.rename = None
451 452 453 ## A class for arranging the ordered rules 454 # 455 # They provide a scaffolding (essentially a linked list) over which we 456 # assume we can migrate messages forward. This allows us to verify a 457 # path exists before actually creating all of the necessary implicit 458 # rules (mostly migration of sub-messages) that such a path 459 # necessitates.
460 -class ScaffoldNode(object):
461 - def __init__(self, old_class, new_class, rule):
462 self.old_class = old_class 463 self.new_class = new_class 464 self.rule = rule 465 self.next = None
466 467 ## A class to actually migrate messages 468 # 469 # This is the big class that actually handles all of the fancy 470 # migration work. Better documentation to come later.
471 -class MessageMigrator(object):
472 - def __init__(self, input_rule_files=[], plugins=True):
473 # We use the rulechains to scaffold our initial creation of 474 # implicit rules. Each RuleChain is keyed off of a type and 475 # consists of an ordered set of update rules followed by an 476 # optional rename rule. For the system rule definitions to be 477 # valid, all members of a rulechains must be connectable via 478 # implicit rules and all rulechains must terminate in a known 479 # system type which is also reachable by an implicit rule. 480 self.rulechains = collections.defaultdict(RuleChain) 481 482 # The list of all nodes that we can iterate through in the 483 # future when making sure all rules have been constructed. 484 self.base_nodes = [] 485 486 # The list of extra (non-scaffolded) nodes that we can use 487 # when determining if all rules are valid and printing invalid 488 # rules. 489 self.extra_nodes = [] 490 491 # A map from typename to the first node of a particular type 492 self.first_type = {} 493 494 # A map from a typename to all other typenames for which 495 # rename rules exist. This is necessary to determine whether 496 # an appropriate implicit rule can actually be constructed. 497 self.rename_map = {} 498 499 # The cached set of all found paths, keyed by: 500 # ((old_type, old_md5), (new_type, new_md5)) 501 self.found_paths = {} 502 self.found_targets = {} 503 504 # Temporary list of the terminal nodes 505 terminal_nodes = [] 506 507 # Temporary list of rule modules we are loading 508 rule_dicts = [] 509 510 self.false_rule_loaded = False 511 512 # To make debugging easy we can pass in a list of local 513 # rulefiles. 514 for r in input_rule_files: 515 try: 516 scratch_locals = {'MessageUpdateRule':MessageUpdateRule} 517 execfile(r,scratch_locals) 518 rule_dicts.append((scratch_locals, r)) 519 except: 520 print >> sys.stderr, "Cannot load rule file [%s] in local package"%r 521 522 # Alternatively the preferred method is to load definitions 523 # from the migration ruleset export flag. 524 if plugins: 525 rospack = rospkg.RosPack() 526 for dep,export in [('rosbagmigration','rule_file'),('rosbag','migration_rule_file')]: 527 for pkg in rospack.get_depends_on(dep, implicit=False): 528 m = rospack.get_manifest(pkg) 529 p_rules = m.get_export(dep,export) 530 pkg_dir = rospack.get_path(pkg) 531 for r in p_rules: 532 if dep == 'rosbagmigration': 533 print >> sys.stderr, """WARNING: The package: [%s] is using a deprecated rosbagmigration export. 534 The export in the manifest should be changed to: 535 <rosbag migration_rule_file="%s"/> 536 """%(pkg, r) 537 try: 538 scratch_locals = {'MessageUpdateRule':MessageUpdateRule} 539 execfile(pkg_dir + "/" + r,scratch_locals) 540 rule_dicts.append((scratch_locals, r)) 541 except ImportError: 542 print >> sys.stderr, "Cannot load rule file [%s] in package [%s]"%(r, pkg) 543 544 545 for (rule_dict, location_base) in rule_dicts: 546 for (n,c) in rule_dict.iteritems(): 547 if inspect.isclass(c): 548 if (not c == MessageUpdateRule) and issubclass(c, MessageUpdateRule): 549 self.add_update_rule(c(self, location_base + ':' + n)) 550 551 if self.false_rule_loaded: 552 raise BagMigrationException("Cannot instantiate MessageMigrator with invalid rules") 553 554 # Now, go through and build up a better scaffolded 555 # representation, deferring implicit rule generation until 556 # complete, since the implicit rule generation and sub-rule 557 # population makes use of the scaffold. 558 559 # First we each particular type chain (now including implicit 560 # rules). Additionally, we build up our name remapping lists. 561 562 563 # For Each rulechain 564 for (type,rulechain) in self.rulechains.iteritems(): 565 first = True 566 sn = None 567 prev_sn = None 568 569 # Find name remapping list 570 rename_set = set([type]) 571 tmp = rulechain.rename 572 while tmp: 573 rename_set.add(tmp.new_type) 574 if (self.rulechains.has_key(tmp.new_type)): 575 tmp = self.rulechains[tmp.new_type].rename 576 else: 577 break 578 579 self.rename_map[type] = rename_set 580 581 # For each element in the rulechain chain, 582 for r in rulechain.chain: 583 # Create a scaffoldnode 584 sn = ScaffoldNode(r.old_class, r.new_class, r) 585 self.base_nodes.append(sn) 586 # If it's the first one, stick it in our first_type map 587 if first: 588 self.first_type[type] = sn 589 first = False 590 # If there was a previous node, link them if keys 591 # match, or else create an implicit SN 592 if prev_sn: 593 if get_message_key(prev_sn.new_class) == get_message_key(sn.old_class): 594 prev_sn.next = sn 595 else: 596 implicit_sn = ScaffoldNode(prev_sn.new_class, sn.old_class, None) 597 self.base_nodes.append(implicit_sn) 598 prev_sn.next = implicit_sn 599 implicit_sn.next = sn 600 # The just-created node now becomes the previous 601 prev_sn = sn 602 603 # If there is a rename rule 604 if rulechain.rename: 605 # Create a scaffoldnode 606 sn = ScaffoldNode(rulechain.rename.old_class, rulechain.rename.new_class, rulechain.rename) 607 self.base_nodes.append(sn) 608 609 # Same rules apply here as when we created each node 610 # from chain. Link if possible, otherwise create 611 # implicit 612 if first: 613 self.first_type[type] = sn 614 first = False 615 if prev_sn: 616 if get_message_key(prev_sn.new_class) == get_message_key(sn.old_class): 617 prev_sn.next = sn 618 else: 619 implicit_sn = ScaffoldNode(prev_sn.new_class, sn.old_class, None) 620 self.base_nodes.append(implicit_sn) 621 prev_sn.next = implicit_sn 622 implicit_sn.next = sn 623 prev_sn = sn 624 terminal_nodes.append(sn) 625 # If there was not a rename rule, this must be a terminal node 626 else: 627 if prev_sn: 628 terminal_nodes.append(prev_sn) 629 630 # Between our partial scaffold and name remapping list, we can 631 # now GENERATE rules, though we cannot yet populate the 632 # subrules. 633 634 for sn in terminal_nodes: 635 key = get_message_key(sn.new_class) 636 637 renamed = (sn.old_class._type != sn.new_class._type) 638 639 sys_class = genpy.message.get_message_class(sn.new_class._type) 640 641 # If we map directly to a system-defined class we're done 642 if sys_class: 643 new_rule = self.make_update_rule(sn.new_class, sys_class) 644 R = new_rule(self, 'GENERATED.' + new_rule.__name__) 645 if R.valid: 646 sn.next = ScaffoldNode(sn.new_class, sys_class, R) 647 self.base_nodes.append(sn.next) 648 649 if renamed: 650 tmp_sns = self.scaffold_range(sn.new_class._type, sn.new_class._type) 651 652 # If we don't map to a scaffold range, we appear to be done 653 if tmp_sns == []: 654 if sys_class is not None: 655 sn.next = ScaffoldNode(sn.new_class, sys_class, None) 656 self.base_nodes.append(sn.next) 657 continue 658 659 # Otherwise look for trivial bridges 660 for tmp_sn in reversed(tmp_sns): 661 tmp_key = get_message_key(tmp_sn.old_class) 662 if (key == tmp_key): 663 sn.next = tmp_sn 664 break 665 666 # If we did not find a trivial bridge, we instead need 667 # to create the right implicit rule ourselves. This 668 # is based on the ability to create a valid implicit 669 # rule as LATE in the chain as possible. We do this 670 # to avoid extra conversions in some boundary 671 # circumstances. 672 if (sn.next is None): 673 for tmp_sn in reversed(tmp_sns): 674 new_rule = self.make_update_rule(sn.new_class, tmp_sn.old_class) 675 R = new_rule(self, 'GENERATED.' + new_rule.__name__) 676 if R.valid: 677 sn.next = ScaffoldNode(sn.new_class, tmp_sn.old_class, R) 678 self.base_nodes.append(sn.next) 679 break 680 681 682 # If we have still failed we need to create a placeholder. 683 if (sn.next is None): 684 if sys_class: 685 new_rule = self.make_update_rule(sn.new_class, sys_class) 686 else: 687 new_rule = self.make_old_half_rule(sn.new_class) 688 R = new_rule(self, 'GENERATED.' + new_rule.__name__) 689 sn.next = ScaffoldNode(sn.new_class, None, R) 690 self.base_nodes.append(sn.next) 691 692 693 # Now that our scaffolding is actually complete, we iterate 694 # through all of our rules and generate the rules for which we 695 # have scaffoldnodes, but no rule yet 696 for sn in self.base_nodes: 697 if (sn.rule is None): 698 new_rule = self.make_update_rule(sn.old_class, sn.new_class) 699 sn.rule = new_rule(self, 'GENERATED.' + new_rule.__name__) 700 701 # Finally, we go through and try to find sub_paths for every 702 # rule in the system so far 703 for sn in self.base_nodes: 704 sn.rule.find_sub_paths() 705 706 # Construction should be done, we can now use the system in 707 # the event that we don't have invalid update rules. 708 709 self.class_dict = {} 710 711 for sn in self.base_nodes + self.extra_nodes: 712 self.class_dict[get_message_key(sn.old_class)] = sn.old_class 713 self.class_dict[get_message_key(sn.new_class)] = sn.new_class
714 715
716 - def lookup_type(self, key):
717 if key in self.class_dict: 718 return self.class_dict[key] 719 else: 720 return None
721 722 # Add an update rule to our set of rule chains
723 - def add_update_rule(self, r):
724 if r.valid == False: 725 print >> sys.stderr, "ERROR: Update rule [%s] has valid set to False."%(r.location) 726 self.false_rule_loaded = True 727 return 728 729 rulechain = self.rulechains[r.old_type] 730 731 if r.rename_rule: 732 if (rulechain.rename != None): 733 print >> sys.stderr, "WARNING: Update rules [%s] and [%s] both attempting to rename type [%s]. Ignoring [%s]"%( 734 rulechain.rename.location, r.location, r.old_type, r.location) 735 return 736 737 # Search forward to make sure we havn't created a cycle 738 cycle = [] 739 tmp = r 740 while tmp: 741 cycle.append(tmp) 742 if (tmp.new_type == r.old_type): 743 print >> sys.stderr, "WARNING: Update rules %s introduce a renaming cycle. Ignoring [%s]"%( 744 [x.location for x in cycle],r.location) 745 return 746 if (self.rulechains.has_key(tmp.new_type)): 747 tmp = self.rulechains[tmp.new_type].rename 748 else: 749 break 750 751 752 if rulechain.chain and (r.order <= rulechain.chain[-1].order): 753 print >> sys.stderr, "WARNING: Update rule [%s] which performs rename does not have largest order number. Ignoring"%( 754 r.location) 755 return 756 757 rulechain.rename = r 758 759 else: 760 if r.order in rulechain.order_keys: 761 otherind = [x.order for x in rulechain.chain].index(r.order) 762 print >> sys.stderr, "WARNING: Update rules [%s] and [%s] for type [%s] have the same order number. Ignoring [%s]"%( 763 rulechain.chain[otherind].location, r.location, r.old_type, r.location) 764 return 765 else: 766 if rulechain.rename and (r.order >= rulechain.chain[-1]): 767 print >> sys.stderr, "WARNING: Update rule [%s] has order number larger than rename rule [%s]. Ignoring"%( 768 r.location, rulechain.rename.location) 769 return 770 # Insert the rule into a rule chain 771 rulechain.order_keys.add(r.order) 772 rulechain.chain.append(r) 773 rulechain.chain.sort(key=lambda x: x.order)
774 775 # Helper function to determine if all rules are valid
776 - def all_rules_valid(self):
777 base_valid = not False in [sn.rule.valid for sn in self.base_nodes] 778 extra_valid = not False in [sn.rule.valid for sn in self.extra_nodes] 779 return base_valid and extra_valid
780 781 # Helper function to print out the definitions for all invalid rules (which include definitions)
782 - def get_invalid_rules(self):
783 invalid_rules = [] 784 invalid_rule_cache = [] 785 for sn in self.base_nodes: 786 if not sn.rule.valid: 787 path_key = get_path_key(sn.old_class, sn.new_class) 788 if (path_key not in invalid_rule_cache): 789 invalid_rules.append(sn.rule) 790 invalid_rule_cache.append(path_key) 791 for sn in self.extra_nodes: 792 if not sn.rule.valid: 793 path_key = get_path_key(sn.old_class, sn.new_class) 794 if (path_key not in invalid_rule_cache): 795 invalid_rules.append(sn.rule) 796 invalid_rule_cache.append(path_key) 797 return invalid_rules
798 799 # Helper function to remove non-unique rules
800 - def filter_rules_unique(self, rules):
801 rule_cache = [] 802 new_rules = [] 803 for r in rules: 804 path_key = get_path_key(r.old_class, r.new_class) 805 if (path_key not in rule_cache): 806 new_rules.append(r) 807 return new_rules
808 809 # Helper function to expand a list of rules to include subrules
810 - def expand_rules(self, rules):
811 filtered = self.filter_rules_unique(rules) 812 expanded = [] 813 for r in filtered: 814 expanded.append(r) 815 #print "For rule %s --> %s"%(r.old_class._type, r.new_class._type) 816 expanded.extend(self.expand_rules(r.sub_rules)) 817 filtered = self.filter_rules_unique(expanded) 818 return filtered
819
820 - def scaffold_range(self, old_type, new_type):
821 try: 822 first_sn = self.first_type[old_type] 823 824 sn_range = [first_sn] 825 826 found_new_type = False 827 828 tmp_sn = first_sn 829 830 while (tmp_sn.next is not None and tmp_sn.next.new_class is not None): 831 # print sn_range 832 tmp_sn = tmp_sn.next 833 if (tmp_sn != first_sn): 834 sn_range.append(tmp_sn) 835 if (tmp_sn.new_class._type == new_type): 836 found_new_type == True 837 if (found_new_type and tmp_sn.new_class._type != new_type): 838 break 839 840 return sn_range 841 842 except KeyError: 843 return []
844 845
846 - def find_target(self, old_class):
847 key = get_message_key(old_class) 848 849 last_class = old_class 850 851 try: 852 return self.found_targets[key] 853 except KeyError: 854 855 sys_class = genpy.message.get_message_class(old_class._type) 856 857 if sys_class is not None: 858 self.found_targets[key] = sys_class 859 return sys_class 860 861 try: 862 tmp_sn = self.first_type[old_class._type] 863 864 if tmp_sn.new_class is not None: 865 last_class = tmp_sn.new_class 866 867 while tmp_sn.next is not None: 868 tmp_sn = tmp_sn.next 869 870 if tmp_sn.new_class is not None: 871 last_class = tmp_sn.new_class 872 sys_class = genpy.message.get_message_class(tmp_sn.new_class._type) 873 else: 874 sys_class = None 875 876 if sys_class is not None: 877 self.found_targets[key] = sys_class 878 return sys_class 879 except KeyError: 880 pass 881 882 self.found_targets[key] = None 883 return None
884 885 # This function determines the set of rules which must be created 886 # to get from the old type to the new type.
887 - def find_path(self, old_class, new_class):
888 key = get_path_key(old_class, new_class) 889 890 # Return any path already found in the cache 891 try: 892 return self.found_paths[key] 893 except KeyError: 894 pass 895 896 # If the new_class is none, e.g., a message has been moved and 897 # we are lacking a proper rename rule, such that find-target 898 # failed, the best we can do is create a half-rule from the 899 # end-point 900 if new_class is None: 901 sn_range = self.scaffold_range(old_class._type, "") 902 903 found_start = False 904 905 for (ind, tmp_sn) in reversed(zip(range(len(sn_range)), sn_range)): 906 # Skip until we find the class we're trying to match 907 if (tmp_sn.old_class._type != old_class._type): 908 continue 909 if get_message_key(tmp_sn.old_class) == get_message_key(old_class): 910 sn_range = sn_range[ind:] 911 found_start = True 912 break 913 914 # Next see if we can create a valid rule 915 if not found_start: 916 for (ind, tmp_sn) in reversed(zip(range(len(sn_range)), sn_range)): 917 if (tmp_sn.old_class._type != old_class._type): 918 continue 919 new_rule = self.make_update_rule(old_class, tmp_sn.old_class) 920 R = new_rule(self, 'GENERATED.' + new_rule.__name__) 921 if R.valid: 922 R.find_sub_paths() 923 sn = ScaffoldNode(old_class, tmp_sn.old_class, R) 924 self.extra_nodes.append(sn) 925 sn_range = sn_range[ind:] 926 sn_range.insert(0,sn) 927 found_start = True 928 break 929 930 if sn_range == []: 931 tmp_class = old_class 932 else: 933 tmp_class = sn_range[-1].new_class 934 935 new_rule = self.make_old_half_rule(tmp_class) 936 R = new_rule(self, 'GENERATED.' + new_rule.__name__) 937 sn = ScaffoldNode(tmp_class, None, R) 938 sn_range.append(sn) 939 self.extra_nodes.append(sn) 940 self.found_paths[key] = sn_range 941 return sn_range 942 943 # If the messages are the same, there is no actually path 944 if (old_class._type == new_class._type and old_class._full_text.strip() == new_class._full_text.strip()): 945 self.found_paths[key] = [] 946 return [] 947 948 sn_range = self.scaffold_range(old_class._type, new_class._type) 949 950 # If we have no scaffolding, we just try to create the one path 951 if sn_range == []: 952 new_rule = self.make_update_rule(old_class, new_class) 953 R = new_rule(self, 'GENERATED.' + new_rule.__name__) 954 R.find_sub_paths() 955 sn = ScaffoldNode(old_class, new_class, R) 956 self.extra_nodes.append(sn) 957 self.found_paths[key] = [sn] 958 return [sn] 959 960 961 # Search for the stop point in the scaffold 962 found_stop = False 963 964 # First look for a trivial match 965 for (ind, tmp_sn) in reversed(zip(range(len(sn_range)), sn_range)): 966 # Stop looking early if the classes don't match 967 if (tmp_sn.new_class._type != new_class._type): 968 break 969 if get_message_key(tmp_sn.new_class) == get_message_key(new_class): 970 sn_range = sn_range[:ind+1] 971 found_stop = True 972 break 973 974 # Next see if we can create a valid rule 975 if not found_stop: 976 for (ind, tmp_sn) in reversed(zip(range(len(sn_range)), sn_range)): 977 if (tmp_sn.new_class._type != new_class._type): 978 break 979 new_rule = self.make_update_rule(tmp_sn.new_class, new_class) 980 R = new_rule(self, 'GENERATED.' + new_rule.__name__) 981 if R.valid: 982 R.find_sub_paths() 983 sn = ScaffoldNode(tmp_sn.new_class, new_class, R) 984 self.extra_nodes.append(sn) 985 sn_range = sn_range[:ind+1] 986 sn_range.append(sn) 987 found_stop = True 988 break 989 990 # If there were no valid implicit rules, we suggest a new one from to the end 991 if not found_stop: 992 new_rule = self.make_update_rule(sn_range[-1].new_class, new_class) 993 R = new_rule(self, 'GENERATED.' + new_rule.__name__) 994 R.find_sub_paths() 995 sn = ScaffoldNode(sn_range[-1].new_class, new_class, R) 996 self.extra_nodes.append(sn) 997 sn_range.append(sn) 998 999 # Search for the start point in the scaffold 1000 found_start = False 1001 1002 # First look for a trivial match 1003 for (ind, tmp_sn) in reversed(zip(range(len(sn_range)), sn_range)): 1004 # Skip until we find the class we're trying to match 1005 if (tmp_sn.old_class._type != old_class._type): 1006 continue 1007 if get_message_key(tmp_sn.old_class) == get_message_key(old_class): 1008 sn_range = sn_range[ind:] 1009 found_start = True 1010 break 1011 1012 # Next see if we can create a valid rule 1013 if not found_start: 1014 for (ind, tmp_sn) in reversed(zip(range(len(sn_range)), sn_range)): 1015 if (tmp_sn.old_class._type != old_class._type): 1016 continue 1017 new_rule = self.make_update_rule(old_class, tmp_sn.old_class) 1018 R = new_rule(self, 'GENERATED.' + new_rule.__name__) 1019 if R.valid: 1020 R.find_sub_paths() 1021 sn = ScaffoldNode(old_class, tmp_sn.old_class, R) 1022 self.extra_nodes.append(sn) 1023 sn_range = sn_range[ind:] 1024 sn_range.insert(0,sn) 1025 found_start = True 1026 break 1027 1028 # If there were no valid implicit rules, we suggest a new one from the beginning 1029 if not found_start: 1030 new_rule = self.make_update_rule(old_class, sn_range[0].old_class) 1031 R = new_rule(self, 'GENERATED.' + new_rule.__name__) 1032 R.find_sub_paths() 1033 sn = ScaffoldNode(old_class, sn_range[0].old_class, R) 1034 self.extra_nodes.append(sn) 1035 sn_range.insert(0,sn) 1036 1037 self.found_paths[key] = sn_range 1038 return sn_range
1039 1040
1041 - def migrate_raw(self, msg_from, msg_to):
1042 path = self.find_path(msg_from[4], msg_to[4]) 1043 1044 if False in [sn.rule.valid for sn in path]: 1045 raise BagMigrationException("Migrate called, but no valid migration path from [%s] to [%s]"%(msg_from._type, msg_to._type)) 1046 1047 # Short cut to speed up case of matching md5sum: 1048 if path == [] or msg_from[2] == msg_to[2]: 1049 return (msg_to[0], msg_from[1], msg_to[2], msg_to[3], msg_to[4]) 1050 1051 tmp_msg = path[0].old_class() 1052 tmp_msg.deserialize(msg_from[1]) 1053 1054 for sn in path: 1055 tmp_msg = sn.rule.apply(tmp_msg) 1056 1057 buff = StringIO() 1058 tmp_msg.serialize(buff) 1059 1060 return (msg_to[0], buff.getvalue(), msg_to[2], msg_to[3], msg_to[4])
1061 1062 1063
1064 - def migrate(self, msg_from, msg_to):
1065 path = self.find_path(msg_from.__class__, msg_to.__class__) 1066 1067 if False in [sn.rule.valid for sn in path]: 1068 raise BagMigrationException("Migrate called, but no valid migration path from [%s] to [%s]"%(msg_from._type, msg_to._type)) 1069 1070 # Short cut to speed up case of matching md5sum: 1071 if path == [] or msg_from._md5sum == msg_to._md5sum: 1072 buff = StringIO() 1073 msg_from.serialize(buff) 1074 msg_to.deserialize(buff.getvalue()) 1075 return 1076 1077 if len(path) > 0: 1078 buff = StringIO() 1079 msg_from.serialize(buff) 1080 1081 tmp_msg = path[0].old_class() 1082 1083 tmp_msg.deserialize(buff.getvalue()) 1084 1085 for sn in path: 1086 tmp_msg = sn.rule.apply(tmp_msg) 1087 else: 1088 tmp_msg = msg_from 1089 1090 buff = StringIO() 1091 tmp_msg.serialize(buff) 1092 msg_to.deserialize(buff.getvalue())
1093
1094 - def migrate_array(self, msg_from_array, msg_to_array):
1095 if len(msg_from_array) != len(msg_to_array): 1096 raise BagMigrationException("Migrate array called on on arrays of unequal length.") 1097 1098 if len(msg_from_array) == 0: 1099 return 1100 1101 path = self.find_path(msg_from_array[0].__class__, msg_to_array[0].__class__) 1102 1103 if path is None: 1104 raise BagMigrationException("Migrate called, but no migration path from [%s] to [%s]"%(msg_from._type, msg_to._type)) 1105 1106 # Short cut to speed up case of matching md5sum: 1107 if path == []: 1108 for i in xrange(len(msg_from_array)): 1109 buff = StringIO() 1110 msg_from_array[i].serialize(buff) 1111 msg_to_array[i].deserialize(buff.getvalue()) 1112 return 1113 1114 for i in xrange(len(msg_from_array)): 1115 buff = StringIO() 1116 tmp_msg = path[0].old_class() 1117 msg_from_array[i].serialize(buff) 1118 tmp_msg.deserialize(buff.getvalue()) 1119 for sn in path: 1120 tmp_msg = sn.rule.apply(tmp_msg) 1121 1122 buff = StringIO() 1123 tmp_msg.serialize(buff) 1124 msg_to_array[i].deserialize(buff.getvalue())
1125
1126 - def make_update_rule(self, old_class, new_class):
1127 name = "update_%s_%s"%(old_class._type.replace("/","_"), old_class._md5sum) 1128 1129 # We assemble the class as a string and then exec it to end up with a class 1130 # that can essentially print its own definition. 1131 classdef = "class %s(MessageUpdateRule):\n"%name 1132 classdef += "\told_type = \"%s\"\n"%old_class._type 1133 classdef += "\told_full_text = \"\"\"\n%s\n\"\"\"\n\n"%old_class._full_text.strip() 1134 classdef += "\tnew_type = \"%s\"\n"%new_class._type 1135 classdef += "\tnew_full_text = \"\"\"\n%s\n\"\"\"\n"%new_class._full_text.strip() 1136 classdef += "\n" 1137 classdef += "\torder = 0" 1138 classdef += "\n" 1139 1140 validdef = "\tvalid = True\n" 1141 1142 migratedefs = "\tmigrated_types = [" 1143 1144 updatedef = "\tdef update(self, old_msg, new_msg):\n" 1145 1146 old_consts = constants_from_def(old_class._type, old_class._full_text) 1147 new_consts = constants_from_def(new_class._type, new_class._full_text) 1148 1149 if (not new_consts >= old_consts): 1150 validdef = "\tvalid = False\n" 1151 for c in (old_consts - new_consts): 1152 updatedef += "\t\t#Constant '%s' has changed\n"%(c[0],) 1153 1154 old_slots = [] 1155 old_slots.extend(old_class.__slots__) 1156 1157 migrations_seen = [] 1158 1159 # Assign across primitives, self.migrate or self.migrate_array non-primitives 1160 for (s,t) in zip(new_class.__slots__, new_class._slot_types): 1161 warn_msg = None 1162 new_base_type, new_is_array, new_array_len = genmsg.msgs.parse_type(t) 1163 try: 1164 ind = old_class.__slots__.index(s) 1165 old_slots.remove(s) 1166 old_base_type, old_is_array, old_array_len = genmsg.msgs.parse_type(old_class._slot_types[ind]) 1167 1168 if new_is_array != old_is_array: 1169 warn_msg = "Could not match array with nonarray" 1170 1171 elif new_array_len != old_array_len: 1172 if old_array_len is None: 1173 warn_msg = "Converted from variable length array to fixed array of length %d"%(new_array_len) 1174 elif new_array_len is None: 1175 warn_msg = "Converted from fixed array of length %d to variable length"%(old_array_len) 1176 else: 1177 warn_msg = "Fixed length array converted from %d to %d"%(old_array_len,new_array_len) 1178 1179 elif genmsg.msgs.is_builtin(new_base_type): 1180 if new_base_type != old_base_type: 1181 warn_msg = "Primitive type changed" 1182 else: 1183 updatedef += "\t\tnew_msg.%s = old_msg.%s\n"%(s,s) 1184 1185 else: 1186 tmp_old_type = clean_name(old_base_type, old_class._type) 1187 tmp_new_type = clean_name(new_base_type, new_class._type) 1188 1189 tmp_qualified_old_type = qualified_name(old_base_type, old_class._type) 1190 tmp_qualified_new_type = qualified_name(new_base_type, new_class._type) 1191 1192 # Verify the type can theoretically be migrated 1193 if (tmp_qualified_old_type == tmp_qualified_new_type) or \ 1194 (self.rename_map.has_key(tmp_qualified_old_type) and 1195 tmp_qualified_new_type in self.rename_map[tmp_qualified_old_type]): 1196 1197 if (tmp_old_type, tmp_new_type) not in migrations_seen: 1198 migratedefs += "\n\t\t(\"%s\",\"%s\"),"%(tmp_old_type, tmp_new_type) 1199 migrations_seen.append((tmp_old_type, tmp_new_type)) 1200 1201 if not new_is_array: 1202 updatedef += "\t\tself.migrate(old_msg.%s, new_msg.%s)\n"%(s,s) 1203 else: 1204 updatedef += "\t\tself.migrate_array(old_msg.%s, new_msg.%s, \"%s\")\n"%(s,s,new_base_type) 1205 else: 1206 warn_msg = "No migration path between [%s] and [%s]"%(tmp_old_type, tmp_new_type) 1207 except ValueError: 1208 warn_msg = "No matching field name in old message" 1209 1210 if warn_msg is not None: 1211 validdef = "\tvalid = False\n" 1212 updatedef += "\t\t#%s\n"%warn_msg 1213 updatedef += "\t\tnew_msg.%s = %s\n"%(s,migration_default_value(t)) 1214 1215 migratedefs += "]\n" 1216 1217 if old_slots: 1218 validdef = "\tvalid = False\n" 1219 for s in old_slots: 1220 updatedef += "\t\t#No field to match field %s from old message\n"%(s) 1221 1222 classdef += migratedefs + '\n' + validdef + '\n' + updatedef 1223 1224 printclassdef = classdef + "\tdef get_class_def(self):\n\t\treturn \'\'\'%s\'\'\'\n"%classdef 1225 1226 # This is probably a TERRIBLE idea? 1227 exec(printclassdef) 1228 return locals()[name]
1229
1230 - def make_old_half_rule(self, old_class):
1231 name = "update__%s__%s"%(old_class._type.replace("/","_"), old_class._md5sum) 1232 1233 # We assemble the class as a string and then exec it to end up with a class 1234 # that can essentially print its own definition. 1235 classdef = "class %s(MessageUpdateRule):\n"%name 1236 classdef += "\told_type = \"%s\"\n"%old_class._type 1237 classdef += "\told_full_text = \"\"\"\n%s\n\"\"\"\n\n"%old_class._full_text.strip() 1238 classdef += "\tnew_type = \"\"\n" 1239 classdef += "\tnew_full_text = \"\"\"\n\n\"\"\"\n" 1240 classdef += "\n" 1241 classdef += "\torder = 0" 1242 classdef += "\n" 1243 1244 validdef = "\tvalid = False\n" 1245 1246 migratedefs = "\tmigrated_types = []\n" 1247 1248 updatedef = "\tdef update(self, old_msg, new_msg):\n" 1249 updatedef += "\t\tpass\n" 1250 1251 classdef += migratedefs + '\n' + validdef + '\n' + updatedef 1252 1253 printclassdef = classdef + "\tdef get_class_def(self):\n\t\treturn \'\'\'%s\'\'\'\n"%classdef 1254 1255 # This is probably a TERRIBLE idea? 1256 exec(printclassdef) 1257 return locals()[name]
1258
1259 - def make_new_half_rule(self, new_class):
1260 name = "update_to_%s_%s"%(new_class._type.replace("/","_"), new_class._md5sum) 1261 1262 # We assemble the class as a string and then exec it to end up with a class 1263 # that can essentially print its own definition. 1264 classdef = "class %s(MessageUpdateRule):\n"%name 1265 classdef += "\told_type = \"\"\n" 1266 classdef += "\told_full_text = \"\"\"\n\n\"\"\"\n\n" 1267 classdef += "\tnew_type = \"%s\"\n"%new_class._type 1268 classdef += "\tnew_full_text = \"\"\"\n%s\n\"\"\"\n"%new_class._full_text.strip() 1269 classdef += "\n" 1270 classdef += "\torder = 0" 1271 classdef += "\n" 1272 1273 validdef = "\tvalid = False\n" 1274 1275 migratedefs = "\tmigrated_types = []\n" 1276 1277 updatedef = "\tdef update(self, old_msg, new_msg):\n" 1278 updatedef += "\t\tpass\n" 1279 1280 classdef += migratedefs + '\n' + validdef + '\n' + updatedef 1281 1282 printclassdef = classdef + "\tdef get_class_def(self):\n\t\treturn \'\'\'%s\'\'\'\n"%classdef 1283 1284 # This is probably a TERRIBLE idea? 1285 exec(printclassdef) 1286 return locals()[name]
1287
1288 -def migration_default_value(field_type):
1289 if field_type in ['bool', 'byte', 'int8', 'int16', 'int32', 'int64',\ 1290 'char', 'uint8', 'uint16', 'uint32', 'uint64']: 1291 return '0' 1292 elif field_type in ['float32', 'float64']: 1293 return '0.' 1294 elif field_type == 'string': 1295 # strings, byte[], and uint8s are all optimized to be strings 1296 return "''" 1297 elif field_type.endswith(']'): # array type 1298 base_type, is_array, array_len = genmsg.msgs.parse_type(field_type) 1299 if base_type in ['byte', 'uint8']: 1300 # strings, byte[], and uint8s are all optimized to be strings 1301 if array_len is not None: 1302 return "chr(0)*%s"%array_len 1303 else: 1304 return "''" 1305 elif array_len is None: #var-length 1306 return '[]' 1307 else: # fixed-length, fill values 1308 def_val = migration_default_value(base_type) 1309 return '[' + ','.join(itertools.repeat(def_val, array_len)) + ']' 1310 else: 1311 return "self.get_new_class('%s')()"%field_type
1312
1313 -def constants_from_def(core_type, msg_def):
1314 core_pkg, core_base_type = genmsg.package_resource_name(core_type) 1315 1316 splits = msg_def.split('\n' + '=' * 80 + '\n') 1317 1318 core_msg = splits[0] 1319 deps_msgs = splits[1:] 1320 1321 # create MsgSpec representations of .msg text 1322 from genmsg import MsgContext 1323 context = MsgContext.create_default() 1324 specs = { core_type: genmsg.msg_loader.load_msg_from_string(context, core_msg, core_pkg) } 1325 # - dependencies 1326 # for dep_msg in deps_msgs: 1327 # # dependencies require more handling to determine type name 1328 # dep_type, dep_spec = _generate_dynamic_specs(specs, dep_msg) 1329 # specs[dep_type] = dep_spec 1330 1331 return set([(x.name, x.val, x.type) for x in specs[core_type].constants])
1332