migration.py
Go to the documentation of this file.
00001 # Software License Agreement (BSD License)
00002 #
00003 # Copyright (c) 2009, Willow Garage, Inc.
00004 # All rights reserved.
00005 #
00006 # Redistribution and use in source and binary forms, with or without
00007 # modification, are permitted provided that the following conditions
00008 # are met:
00009 #
00010 #  * Redistributions of source code must retain the above copyright
00011 #    notice, this list of conditions and the following disclaimer.
00012 #  * Redistributions in binary form must reproduce the above
00013 #    copyright notice, this list of conditions and the following
00014 #    disclaimer in the documentation and/or other materials provided
00015 #    with the distribution.
00016 #  * Neither the name of Willow Garage, Inc. nor the names of its
00017 #    contributors may be used to endorse or promote products derived
00018 #    from this software without specific prior written permission.
00019 #
00020 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
00021 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
00022 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
00023 # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
00024 # COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
00025 # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
00026 # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
00027 # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
00028 # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
00029 # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
00030 # ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
00031 # POSSIBILITY OF SUCH DAMAGE.
00032 
00033 import collections
00034 import copy
00035 try:
00036     from cStringIO import StringIO  # Python 2.x
00037 except ImportError:
00038     from io import StringIO  # Python 3.x
00039 import inspect
00040 import itertools
00041 import os
00042 import string
00043 import sys
00044 
00045 import genmsg.msgs
00046 import genpy
00047 import genpy.dynamic
00048 
00049 import rospkg
00050 
00051 import rosbag
00052 
00053 # Anything outside the scope of these primitives is a submessage
00054 #_PRIMITIVES = ['bool', 'byte','int8','int16','int32','int64','char','uint8','uint16','uint32','uint64','float32','float64','string','time']
00055 
00056 class BagMigrationException(Exception):
00057     pass
00058 
00059 def checkbag(migrator, inbag):
00060     """
00061     Check whether a bag file can be played in the current system.
00062     @param migrator: message migrator to use
00063     @param inbag name of the bag to be checked.
00064     @returns A list of tuples for each type in the bag file.  The first
00065     element of each tuple is the full migration path for the type.  The
00066     second element of the tuple is the expanded list of invalid rules
00067     for that particular path.
00068     """
00069     checked = set()
00070     migrations = []
00071 
00072     bag = rosbag.Bag(inbag, 'r')
00073 
00074     for topic, msg, t in bag.read_messages(raw=True):
00075         key = get_message_key(msg[4])
00076         if key not in checked:
00077             target = migrator.find_target(msg[4])
00078             # Even in the case of a zero-length path (matching md5sums), we still want
00079             # to migrate in the event of a type change (message move).
00080             path = migrator.find_path(msg[4], target)
00081             if len(path) > 0:
00082                 migrations.append((path, [r for r in migrator.expand_rules([sn.rule for sn in path]) if r.valid == False]))
00083 
00084             checked.add(key)
00085 
00086     bag.close()
00087             
00088     return migrations
00089 
00090 def checkmessages(migrator, messages):
00091     """
00092     Check whether a bag file can be played in the current system.
00093     @param migrator The message migrator to use
00094     @param message_list A list of message classes.
00095     @returns A list of tuples for each type in the bag file.  The first
00096     element of each tuple is the full migration path for the type.  The
00097     second element of the tuple is the expanded list of invalid rules
00098     for that particular path.
00099     """
00100     
00101     checked = set()
00102     migrations = []
00103 
00104     for msg in messages:
00105         key = get_message_key(msg)
00106         if key not in checked:
00107             target = migrator.find_target(msg)
00108             # Even in the case of a zero-length path (matching md5sums), we still want
00109             # to migrate in the event of a type change (message move).
00110             path = migrator.find_path(msg, target)
00111             if len(path) > 0:
00112                 migrations.append((path, [r for r in migrator.expand_rules([sn.rule for sn in path]) if r.valid == False]))
00113 
00114             checked.add(key)
00115             
00116     return migrations
00117 
00118 ## Fix a bag so that it can be played in the current system
00119 #
00120 # @param migrator The message migrator to use
00121 # @param inbag Name of the bag to be fixed.
00122 # @param outbag Name of the bag to be saved.
00123 # @returns True if migration was successful.
00124 def fixbag(migrator, inbag, outbag):
00125     # This checks/builds up rules for the given migrator
00126     res = checkbag(migrator, inbag)
00127 
00128     # Deserializing all messages is inefficient, but we can speed this up later
00129     if not False in [m[1] == [] for m in res]:
00130         bag = rosbag.Bag(inbag, 'r')
00131         rebag = rosbag.Bag(outbag, 'w', options=bag.options)
00132         for topic, msg, t in bag.read_messages(raw=True):
00133             new_msg_type = migrator.find_target(msg[4])
00134             mig_msg = migrator.migrate_raw(msg, (new_msg_type._type, None, new_msg_type._md5sum, None, new_msg_type))
00135             rebag.write(topic, mig_msg, t, raw=True)
00136         rebag.close()
00137         bag.close()
00138         return True
00139     else:
00140         return False
00141 
00142 ## Fix a bag so that it can be played in the current system
00143 #
00144 # @param migrator The message migrator to use
00145 # @param inbag Name of the bag to be fixed.
00146 # @param outbag Name of the bag to be saved.
00147 # @returns [] if bag could be migrated, otherwise, it returns the list of necessary migration paths
00148 def fixbag2(migrator, inbag, outbag, force=False):
00149     # This checks/builds up rules for the given migrator
00150     res = checkbag(migrator, inbag)
00151 
00152     migrations = [m for m in res if len(m[1]) > 0]
00153 
00154     # Deserializing all messages is inefficient, but we can speed this up later
00155     if len(migrations) == 0 or force:
00156         bag = rosbag.Bag(inbag, 'r')
00157         rebag = rosbag.Bag(outbag, 'w', options=bag.options)
00158         for topic, msg, t in bag.read_messages(raw=True):
00159             new_msg_type = migrator.find_target(msg[4])
00160             if new_msg_type != None:
00161                 mig_msg = migrator.migrate_raw(msg, (new_msg_type._type, None, new_msg_type._md5sum, None, new_msg_type))
00162                 rebag.write(topic, mig_msg, t, raw=True)
00163             else:
00164                 rebag.write(topic, msg, t, raw=True)
00165         rebag.close()
00166         bag.close()
00167 
00168     if force:
00169         return []
00170     else:
00171         return migrations
00172 
00173 ## Helper function to strip out roslib and package name from name usages.
00174 # 
00175 # There is some inconsistency in whether a fully-qualified path is
00176 # used for sub-messages within a given message.  This function is
00177 # useful for stripping out the package name in a fully qualified
00178 # sub-message.
00179 #
00180 # @param name      The name to clean.
00181 # @param top_name  The name of the top-level type
00182 # @returns         The cleaned version of the name.
00183 def clean_name(name, top_name):
00184     name_split = name.split('/')
00185     try:
00186         name_split.remove('std_msgs')
00187     except ValueError:
00188         pass
00189     try:
00190         name_split.remove(top_name.split('/')[0])
00191     except ValueError:
00192         pass
00193     new_name = string.join(name_split,'/')
00194     return new_name
00195 
00196 ## Helper function to ensure we end up with a qualified name
00197 # 
00198 # There is some inconsistency in whether a fully-qualified path is
00199 # used for sub-messages within a given message.  This function is
00200 # useful for ensuring that a name is fully qualified correctly.
00201 #
00202 # @param name      The name to quailfy
00203 # @param top_name  The name of the top-level type
00204 # @returns         The qualified version of the name.
00205 def qualified_name(name, top_name):
00206     # First clean the name, to make everyting else more deterministic
00207     tmp_name = clean_name(name, top_name)
00208 
00209     if len(tmp_name.split('/')) == 2 or (genmsg.msgs.is_builtin(tmp_name)):
00210         return tmp_name
00211     elif tmp_name == 'Header':
00212         return 'std_msgs/Header'
00213     else:
00214         return top_name.split('/')[0] + '/' + tmp_name
00215 
00216 ## Helper function to return a key from a given class
00217 #
00218 # For now, we choose the tuple (type,md5sum) as a unique key for the
00219 # class.  However, this is subject to change and assumptions about keys
00220 # should not be made other than their uniqueness.
00221 #
00222 # @param c  The message class or instance to get a key for
00223 # @returns The unique key
00224 def get_message_key(c):
00225     try:
00226         return (c._type, c._md5sum)
00227     except:
00228         return None
00229 
00230 ## Helper function to return a key for a given path
00231 #
00232 # For now, we choose the tuple ((type1,md5sum1),(type2,md5sum2)) as a
00233 # unique key for the path.  However, this is subject to change and
00234 # assumptions about keys should not be made other than their
00235 # uniqueness.
00236 #
00237 # @param c1  The start point of the path
00238 # @param c1  The stop point of the path
00239 # @returns The unique key
00240 def get_path_key(c1, c2):
00241     try:
00242         return (get_message_key(c1), get_message_key(c2))
00243     except:
00244         return None
00245 
00246 ## Base class for all message update rules
00247 class MessageUpdateRule(object):
00248     old_type = ''
00249     old_full_text = ''
00250     new_type = ''
00251     new_full_text = ''
00252     migrated_types = []
00253 
00254     order = -1
00255 
00256     valid = False
00257 
00258     ## Initialize class
00259     def __init__(self, migrator, location):
00260         # Every rule needs to hang onto the migrator so we can potentially use it
00261         self.migrator = migrator
00262         self.location = location
00263 
00264         if (self.old_type != self.new_type):
00265             self.rename_rule = True
00266         else:
00267             self.rename_rule = False
00268 
00269         # Instantiate types dynamically based on definition
00270         try:
00271             if self.old_type == "":
00272                 raise Exception
00273             self.old_types = genpy.dynamic.generate_dynamic(self.old_type, self.old_full_text)
00274             self.old_class = self.old_types[self.old_type]
00275             self.old_md5sum = self.old_class._md5sum
00276         except:
00277             self.old_types = []
00278             self.old_class = None
00279             self.old_md5sum = ""
00280 
00281         try:
00282             if self.new_type == "":
00283                 raise Exception
00284             self.new_types = genpy.dynamic.generate_dynamic(self.new_type, self.new_full_text)
00285             self.new_class = self.new_types[self.new_type]
00286             self.new_md5sum = self.new_class._md5sum
00287         except:
00288             self.new_types = []
00289             self.new_class = None
00290             self.new_md5sum = ""
00291 
00292         # We have not populated our sub rules (and ideally should
00293         # wait until the full scaffold exists before doing this)
00294         self.sub_rules_done = False
00295         self.sub_rules_valid = False
00296         self.sub_rules = []
00297 
00298     ## Find all of the sub paths
00299     # 
00300     # For any migrated type the user might want to use, we must make
00301     # sure the migrator had found a path for it.  To facilitated this
00302     # check we require that all migrated types must be listed as pairs
00303     # in the migrated_types field.
00304     #
00305     # It would be nice not to need these through performing some kind
00306     # of other inspection of the update rule itself.
00307     def find_sub_paths(self):
00308         self.sub_rules_valid = True
00309         for (t1, t2) in self.migrated_types:
00310             try:
00311                 tmp_old_class = self.get_old_class(t1)
00312             except KeyError:
00313                 print >> sys.stderr, "WARNING: Within rule [%s], specified migrated type [%s] not found in old message types"%(self.location,t1)
00314                 self.sub_rules_valid = False
00315                 continue
00316             try:
00317                 tmp_new_class = self.get_new_class(t2)
00318             except KeyError:
00319                 print >> sys.stderr, "WARNING: Within rule [%s], specified migrated type [%s] not found in new message types"%(self.location,t2)
00320                 self.sub_rules_valid = False
00321                 continue
00322 
00323             # If a rule instantiates itself as a subrule (because the
00324             # author knows the md5sums match), we don't Want to end up
00325             # with an infinite recursion.
00326             if (get_message_key(tmp_old_class) != get_message_key(self.old_class)) or (get_message_key(tmp_new_class) != get_message_key(self.new_class)):
00327                 path = self.migrator.find_path(tmp_old_class, tmp_new_class)
00328                 rules = [sn.rule for sn in path]
00329                 self.sub_rules.extend(rules)
00330 
00331             if False in [r.valid for r in self.sub_rules]:
00332                 print >> sys.stderr, "WARNING: Within rule [%s] cannot migrate from subtype [%s] to [%s].."%(
00333                     self.location, t1, t2)
00334                 self.sub_rules_valid = False
00335                 continue
00336         self.sub_rules = self.migrator.filter_rules_unique(self.sub_rules)
00337         self.sub_rules_done = True
00338 
00339     ## Helper function to get the class of a submsg for the new type
00340     #
00341     # This function should be used inside of update to access new classes.
00342     #
00343     # @param t The subtype to return the class of
00344     # @returns The class of the new sub type
00345     def get_new_class(self,t):
00346         try:
00347             try:
00348                 return self.new_types[t]
00349             except KeyError:                
00350                 return self.new_types['std_msgs/' + t]
00351         except KeyError:
00352             return self.new_types[self.new_type.split('/')[0] + '/' + t]
00353 
00354     ## Helper function to get the class of a submsg for the old type
00355     #
00356     # This function should be used inside of update to access old classes.
00357     #
00358     # @param t The subtype to return the class of
00359     # @returns The class of the old sub type
00360     def get_old_class(self,t):
00361         try:
00362             try:
00363                 return self.old_types[t]
00364             except KeyError:                
00365                 return self.old_types['std_msgs/' + t]
00366         except KeyError:
00367             return self.old_types[self.old_type.split('/')[0] + '/' + t]
00368 
00369     ## Actually migrate one sub_type to another
00370     #
00371     # This function should be used inside of update to migrate sub msgs.
00372     #
00373     # @param msg_from A message instance of the old message type
00374     # @param msg_to   A message instance of a new message type to be populated
00375     def migrate(self, msg_from, msg_to):
00376         tmp_msg_from = clean_name(msg_from._type, self.old_type)
00377         tmp_msg_to = clean_name(msg_to._type, self.new_type)
00378         if (tmp_msg_from, tmp_msg_to) not in self.migrated_types:
00379             raise BagMigrationException("Rule [%s] tried to perform a migration from old [%s] to new [%s] not listed in migrated_types"%(self.location, tmp_msg_from, tmp_msg_to))
00380         self.migrator.migrate(msg_from, msg_to)
00381 
00382     ## Helper function to migrate a whole array of messages
00383     #
00384     # This function should be used inside of update to migrate arrays of sub msgs.
00385     #
00386     # @param msg_from_array An array of messages of the old message type
00387     # @param msg_to_array An array of messages of the new message type (this will be emptied if not already)
00388     # @param msg_to_class  The name of the new message type since msg_to_array may be an empty array.
00389     def migrate_array(self, msg_from_array, msg_to_array, msg_to_name):
00390         msg_to_class = self.get_new_class(msg_to_name)
00391 
00392         while len(msg_to_array) > 0:
00393             msg_to_array.pop()
00394 
00395         if (len(msg_from_array) == 0):
00396             return
00397 
00398         tmp_msg_from = clean_name(msg_from_array[0]._type, self.old_type)
00399         tmp_msg_to = clean_name(msg_to_class._type, self.new_type)
00400         if (tmp_msg_from, tmp_msg_to) not in self.migrated_types:
00401             raise BagMigrationException("Rule [%s] tried to perform a migration from old [%s] to new [%s] not listed in migrated_types"%(self.location, tmp_msg_from, tmp_msg_to))
00402 
00403         msg_to_array.extend( [msg_to_class() for i in xrange(len(msg_from_array))] )
00404 
00405         self.migrator.migrate_array(msg_from_array, msg_to_array)
00406 
00407     ## A helper function to print out the definiton of autogenerated messages.
00408     def get_class_def(self):
00409         pass
00410 
00411     ## The function actually called by the message migrator 
00412     #
00413     # @param old_msg An instance of the old message type.
00414     # @returns An instance of a new message type
00415     def apply(self, old_msg):
00416         if not self.valid:
00417             raise BagMigrationException("Attempted to apply an invalid rule")
00418         if not self.sub_rules_done:
00419             raise BagMigrationException("Attempted to apply a rule without building up its sub rules")
00420         if not self.sub_rules_valid:
00421             raise BagMigrationException("Attempted to apply a rule without valid sub-rules")
00422         if (get_message_key(old_msg) != get_message_key(self.old_class)):
00423             raise BagMigrationException("Attempted to apply rule to incorrect class %s %s."%(get_message_key(old_msg),get_message_key(self.old_class)))
00424 
00425         # Apply update rule
00426         new_msg = self.new_class()
00427         self.update(old_msg, new_msg)
00428 
00429         return new_msg
00430     
00431     ## The function which a user overrides to actually perform the message update
00432     #
00433     # @param msg_from A message instance of the old message type
00434     # @param msg_to   A message instance of a new message type to be populated
00435     def update(self, old_msg, new_msg):
00436         raise BagMigrationException("Tried to use rule without update overidden")
00437 
00438 
00439 ## A class for book-keeping about rule-chains.
00440 #
00441 # Rule chains define the ordered set of update rules, indexed by
00442 # typename, terminated by a rename rule.  This class is only used
00443 # temporarily to help us get the ordering right, until all explicit
00444 # rules have been loaded (possibly out of order) and the proper
00445 # scaffold can be built.
00446 class RuleChain(object):
00447     def __init__(self):
00448         self.chain = []
00449         self.order_keys = set()
00450         self.rename = None
00451  
00452 
00453 ## A class for arranging the ordered rules
00454 #
00455 # They provide a scaffolding (essentially a linked list) over which we
00456 # assume we can migrate messages forward.  This allows us to verify a
00457 # path exists before actually creating all of the necessary implicit
00458 # rules (mostly migration of sub-messages) that such a path
00459 # necessitates.
00460 class ScaffoldNode(object):
00461     def __init__(self, old_class, new_class, rule):
00462         self.old_class = old_class
00463         self.new_class = new_class
00464         self.rule = rule
00465         self.next = None
00466 
00467 ## A class to actually migrate messages
00468 #
00469 # This is the big class that actually handles all of the fancy
00470 # migration work.  Better documentation to come later.
00471 class MessageMigrator(object):
00472     def __init__(self, input_rule_files=[], plugins=True):
00473         # We use the rulechains to scaffold our initial creation of
00474         # implicit rules.  Each RuleChain is keyed off of a type and
00475         # consists of an ordered set of update rules followed by an
00476         # optional rename rule.  For the system rule definitions to be
00477         # valid, all members of a rulechains must be connectable via
00478         # implicit rules and all rulechains must terminate in a known
00479         # system type which is also reachable by an implicit rule.
00480         self.rulechains = collections.defaultdict(RuleChain)
00481         
00482         # The list of all nodes that we can iterate through in the
00483         # future when making sure all rules have been constructed.
00484         self.base_nodes = []
00485 
00486         # The list of extra (non-scaffolded) nodes that we can use
00487         # when determining if all rules are valid and printing invalid
00488         # rules.
00489         self.extra_nodes = []
00490 
00491         # A map from typename to the first node of a particular type
00492         self.first_type = {}
00493                 
00494         # A map from a typename to all other typenames for which
00495         # rename rules exist.  This is necessary to determine whether
00496         # an appropriate implicit rule can actually be constructed.
00497         self.rename_map = {}
00498 
00499         # The cached set of all found paths, keyed by:
00500         # ((old_type, old_md5), (new_type, new_md5))
00501         self.found_paths = {}
00502         self.found_targets = {}
00503 
00504         # Temporary list of the terminal nodes
00505         terminal_nodes = []
00506 
00507         # Temporary list of rule modules we are loading
00508         rule_dicts = []
00509 
00510         self.false_rule_loaded = False
00511         
00512         # To make debugging easy we can pass in a list of local
00513         # rulefiles.
00514         for r in input_rule_files:
00515             try:
00516                 scratch_locals = {'MessageUpdateRule':MessageUpdateRule}
00517                 execfile(r,scratch_locals)
00518                 rule_dicts.append((scratch_locals, r))
00519             except:
00520                 print >> sys.stderr, "Cannot load rule file [%s] in local package"%r
00521 
00522         # Alternatively the preferred method is to load definitions
00523         # from the migration ruleset export flag.
00524         if plugins:
00525             rospack = rospkg.RosPack()
00526             for dep,export in [('rosbagmigration','rule_file'),('rosbag','migration_rule_file'),('rosbag_migration_rule','rule_file')]:
00527                 for pkg in rospack.get_depends_on(dep, implicit=False):
00528                     m = rospack.get_manifest(pkg)
00529                     p_rules = m.get_export(dep,export)
00530                     pkg_dir = rospack.get_path(pkg)
00531                     for r in p_rules:
00532                         if dep == 'rosbagmigration':
00533                             print >> sys.stderr, """WARNING: The package: [%s] is using a deprecated rosbagmigration export.
00534     The export in the manifest should be changed to:
00535     <rosbag migration_rule_file="%s"/>
00536 """%(pkg, r)
00537                         try:
00538                             scratch_locals = {'MessageUpdateRule':MessageUpdateRule}
00539                             execfile(pkg_dir + "/" + r,scratch_locals)
00540                             rule_dicts.append((scratch_locals, r))
00541                         except ImportError:
00542                             print >> sys.stderr, "Cannot load rule file [%s] in package [%s]"%(r, pkg)
00543 
00544 
00545         for (rule_dict, location_base) in rule_dicts:
00546             for (n,c) in rule_dict.iteritems():
00547                 if inspect.isclass(c):
00548                     if (not c == MessageUpdateRule) and issubclass(c, MessageUpdateRule):
00549                         self.add_update_rule(c(self, location_base + ':' + n))
00550                 
00551         if self.false_rule_loaded:
00552             raise BagMigrationException("Cannot instantiate MessageMigrator with invalid rules")
00553 
00554         # Now, go through and build up a better scaffolded
00555         # representation, deferring implicit rule generation until
00556         # complete, since the implicit rule generation and sub-rule
00557         # population makes use of the scaffold.
00558 
00559         # First we each particular type chain (now including implicit
00560         # rules).  Additionally, we build up our name remapping lists.
00561 
00562 
00563         # For Each rulechain
00564         for (type,rulechain) in self.rulechains.iteritems():
00565             first = True
00566             sn = None
00567             prev_sn = None
00568 
00569             # Find name remapping list
00570             rename_set = set([type])
00571             tmp = rulechain.rename
00572             while tmp:
00573                 rename_set.add(tmp.new_type)
00574                 if (self.rulechains.has_key(tmp.new_type)):
00575                     tmp = self.rulechains[tmp.new_type].rename
00576                 else:
00577                     break
00578                     
00579             self.rename_map[type] = rename_set
00580 
00581             # For each element in the rulechain chain, 
00582             for r in rulechain.chain:
00583                 # Create a scaffoldnode
00584                 sn = ScaffoldNode(r.old_class, r.new_class, r)
00585                 self.base_nodes.append(sn)
00586                 # If it's the first one, stick it in our first_type map
00587                 if first:
00588                     self.first_type[type] = sn
00589                     first = False
00590                 # If there was a previous node, link them if keys
00591                 # match, or else create an implicit SN
00592                 if prev_sn:
00593                     if get_message_key(prev_sn.new_class) == get_message_key(sn.old_class):
00594                         prev_sn.next = sn
00595                     else:
00596                         implicit_sn = ScaffoldNode(prev_sn.new_class, sn.old_class, None)
00597                         self.base_nodes.append(implicit_sn)
00598                         prev_sn.next = implicit_sn
00599                         implicit_sn.next = sn
00600                 # The just-created node now becomes the previous
00601                 prev_sn = sn
00602 
00603             # If there is a rename rule
00604             if rulechain.rename:
00605                 # Create a scaffoldnode
00606                 sn = ScaffoldNode(rulechain.rename.old_class, rulechain.rename.new_class, rulechain.rename)
00607                 self.base_nodes.append(sn)
00608 
00609                 # Same rules apply here as when we created each node
00610                 # from chain.  Link if possible, otherwise create
00611                 # implicit
00612                 if first:
00613                     self.first_type[type] = sn
00614                     first = False
00615                 if prev_sn:
00616                     if get_message_key(prev_sn.new_class) == get_message_key(sn.old_class):
00617                         prev_sn.next = sn
00618                     else:
00619                         implicit_sn = ScaffoldNode(prev_sn.new_class, sn.old_class, None)
00620                         self.base_nodes.append(implicit_sn)
00621                         prev_sn.next = implicit_sn
00622                         implicit_sn.next = sn                        
00623                 prev_sn = sn
00624                 terminal_nodes.append(sn)
00625             # If there was not a rename rule, this must be a terminal node
00626             else:
00627                 if prev_sn:
00628                     terminal_nodes.append(prev_sn)
00629         
00630         # Between our partial scaffold and name remapping list, we can
00631         # now GENERATE rules, though we cannot yet populate the
00632         # subrules.
00633 
00634         for sn in terminal_nodes:
00635             key = get_message_key(sn.new_class)
00636 
00637             renamed = (sn.old_class._type != sn.new_class._type)
00638 
00639             sys_class = genpy.message.get_message_class(sn.new_class._type)
00640 
00641             # If we map directly to a system-defined class we're done
00642             if sys_class:
00643                 new_rule = self.make_update_rule(sn.new_class, sys_class)
00644                 R = new_rule(self, 'GENERATED.' + new_rule.__name__)
00645                 if R.valid:
00646                     sn.next = ScaffoldNode(sn.new_class, sys_class, R)
00647                     self.base_nodes.append(sn.next)
00648 
00649             if renamed:
00650                 tmp_sns = self.scaffold_range(sn.new_class._type, sn.new_class._type)
00651 
00652                 # If we don't map to a scaffold range, we appear to be done
00653                 if tmp_sns == []:
00654                     if sys_class is not None:
00655                         sn.next = ScaffoldNode(sn.new_class, sys_class, None)
00656                         self.base_nodes.append(sn.next)
00657                         continue
00658 
00659                 # Otherwise look for trivial bridges
00660                 for tmp_sn in reversed(tmp_sns):
00661                     tmp_key = get_message_key(tmp_sn.old_class)
00662                     if (key == tmp_key):
00663                         sn.next = tmp_sn
00664                         break
00665 
00666                 # If we did not find a trivial bridge, we instead need
00667                 # to create the right implicit rule ourselves.  This
00668                 # is based on the ability to create a valid implicit
00669                 # rule as LATE in the chain as possible.  We do this
00670                 # to avoid extra conversions in some boundary
00671                 # circumstances.
00672                 if (sn.next is None):
00673                     for tmp_sn in reversed(tmp_sns):
00674                         new_rule = self.make_update_rule(sn.new_class, tmp_sn.old_class)
00675                         R = new_rule(self, 'GENERATED.' + new_rule.__name__)
00676                         if R.valid:
00677                             sn.next = ScaffoldNode(sn.new_class, tmp_sn.old_class, R)
00678                             self.base_nodes.append(sn.next)
00679                             break
00680 
00681             
00682             # If we have still failed we need to create a placeholder.  
00683             if (sn.next is None):
00684                 if sys_class:
00685                     new_rule = self.make_update_rule(sn.new_class, sys_class)
00686                 else:
00687                     new_rule = self.make_old_half_rule(sn.new_class)
00688                 R = new_rule(self, 'GENERATED.' + new_rule.__name__)
00689                 sn.next = ScaffoldNode(sn.new_class, None, R)
00690                 self.base_nodes.append(sn.next)
00691                     
00692 
00693         # Now that our scaffolding is actually complete, we iterate
00694         # through all of our rules and generate the rules for which we
00695         # have scaffoldnodes, but no rule yet
00696         for sn in self.base_nodes:
00697             if (sn.rule is None):
00698                 new_rule = self.make_update_rule(sn.old_class, sn.new_class)
00699                 sn.rule = new_rule(self, 'GENERATED.' + new_rule.__name__)
00700 
00701         # Finally, we go through and try to find sub_paths for every
00702         # rule in the system so far
00703         for sn in self.base_nodes:
00704             sn.rule.find_sub_paths()
00705 
00706         # Construction should be done, we can now use the system in
00707         # the event that we don't have invalid update rules.
00708 
00709         self.class_dict = {}
00710 
00711         for sn in self.base_nodes + self.extra_nodes:
00712             self.class_dict[get_message_key(sn.old_class)] = sn.old_class
00713             self.class_dict[get_message_key(sn.new_class)] = sn.new_class
00714 
00715 
00716     def lookup_type(self, key):
00717         if key in self.class_dict:
00718             return self.class_dict[key]
00719         else:
00720             return None
00721 
00722     # Add an update rule to our set of rule chains
00723     def add_update_rule(self, r):
00724         if r.valid == False:
00725             print >> sys.stderr, "ERROR: Update rule [%s] has valid set to False."%(r.location)
00726             self.false_rule_loaded = True
00727             return
00728 
00729         rulechain = self.rulechains[r.old_type]
00730 
00731         if r.rename_rule:
00732             if (rulechain.rename != None):
00733                 print >> sys.stderr, "WARNING: Update rules [%s] and [%s] both attempting to rename type [%s]. Ignoring [%s]"%(
00734                     rulechain.rename.location, r.location, r.old_type, r.location)
00735                 return
00736 
00737             # Search forward to make sure we havn't created a cycle
00738             cycle = []
00739             tmp = r
00740             while tmp:
00741                 cycle.append(tmp)
00742                 if (tmp.new_type == r.old_type):
00743                     print >> sys.stderr, "WARNING: Update rules %s introduce a renaming cycle. Ignoring [%s]"%(
00744                         [x.location for x in cycle],r.location)
00745                     return
00746                 if (self.rulechains.has_key(tmp.new_type)):
00747                     tmp = self.rulechains[tmp.new_type].rename
00748                 else:
00749                     break
00750 
00751 
00752             if rulechain.chain and (r.order <= rulechain.chain[-1].order):
00753                 print >> sys.stderr, "WARNING: Update rule [%s] which performs rename does not have largest order number. Ignoring"%(
00754                     r.location)
00755                 return
00756 
00757             rulechain.rename = r
00758 
00759         else:
00760             if r.order in rulechain.order_keys:
00761                 otherind = [x.order for x in rulechain.chain].index(r.order)
00762                 print >> sys.stderr, "WARNING: Update rules [%s] and [%s] for type [%s] have the same order number. Ignoring [%s]"%(
00763                     rulechain.chain[otherind].location, r.location, r.old_type, r.location)
00764                 return
00765             else:
00766                 if rulechain.rename and (r.order >= rulechain.rename.order):
00767                     print >> sys.stderr, "WARNING: Update rule [%s] has order number larger than rename rule [%s]. Ignoring"%(
00768                         r.location, rulechain.rename.location)
00769                     return
00770                 # Insert the rule into a rule chain
00771                 rulechain.order_keys.add(r.order)
00772                 rulechain.chain.append(r)
00773                 rulechain.chain.sort(key=lambda x: x.order)
00774                 
00775     # Helper function to determine if all rules are valid
00776     def all_rules_valid(self):
00777         base_valid  = not False in [sn.rule.valid for sn in self.base_nodes]
00778         extra_valid = not False in [sn.rule.valid for sn in self.extra_nodes]
00779         return base_valid and extra_valid
00780 
00781     # Helper function to print out the definitions for all invalid rules (which include definitions)
00782     def get_invalid_rules(self):
00783         invalid_rules = []
00784         invalid_rule_cache = []
00785         for sn in self.base_nodes:
00786             if not sn.rule.valid:
00787                 path_key = get_path_key(sn.old_class, sn.new_class)
00788                 if (path_key not in invalid_rule_cache):
00789                     invalid_rules.append(sn.rule)
00790                     invalid_rule_cache.append(path_key)
00791         for sn in self.extra_nodes:
00792             if not sn.rule.valid:
00793                 path_key = get_path_key(sn.old_class, sn.new_class)
00794                 if (path_key not in invalid_rule_cache):
00795                     invalid_rules.append(sn.rule)
00796                     invalid_rule_cache.append(path_key)
00797         return invalid_rules
00798 
00799     # Helper function to remove non-unique rules
00800     def filter_rules_unique(self, rules):
00801         rule_cache = []
00802         new_rules = []
00803         for r in rules:
00804             path_key = get_path_key(r.old_class, r.new_class)
00805             if (path_key not in rule_cache):
00806                 new_rules.append(r)
00807         return new_rules
00808 
00809     # Helper function to expand a list of rules to include subrules
00810     def expand_rules(self, rules):
00811         filtered = self.filter_rules_unique(rules)
00812         expanded = []
00813         for r in filtered:
00814             expanded.append(r)
00815             #print "For rule %s --> %s"%(r.old_class._type, r.new_class._type)
00816             expanded.extend(self.expand_rules(r.sub_rules))
00817         filtered = self.filter_rules_unique(expanded)
00818         return filtered
00819 
00820     def scaffold_range(self, old_type, new_type):
00821         try:
00822             first_sn = self.first_type[old_type]
00823             
00824             sn_range = [first_sn]
00825 
00826             found_new_type = False
00827 
00828             tmp_sn = first_sn
00829 
00830             while (tmp_sn.next is not None and tmp_sn.next.new_class is not None):
00831 #                print sn_range
00832                 tmp_sn = tmp_sn.next
00833                 if (tmp_sn != first_sn):
00834                     sn_range.append(tmp_sn)
00835                 if (tmp_sn.new_class._type == new_type):
00836                     found_new_type == True
00837                 if (found_new_type and tmp_sn.new_class._type != new_type):
00838                     break
00839 
00840             return sn_range
00841 
00842         except KeyError:
00843             return []
00844 
00845 
00846     def find_target(self, old_class):
00847         key = get_message_key(old_class)
00848 
00849         last_class = old_class
00850 
00851         try:
00852             return self.found_targets[key]
00853         except KeyError:
00854 
00855             sys_class = genpy.message.get_message_class(old_class._type)
00856 
00857             if sys_class is not None:
00858                 self.found_targets[key] = sys_class
00859                 return sys_class
00860 
00861             try:
00862                 tmp_sn = self.first_type[old_class._type]
00863 
00864                 if tmp_sn.new_class is not None:
00865                     last_class = tmp_sn.new_class
00866 
00867                 while tmp_sn.next is not None:
00868                     tmp_sn = tmp_sn.next
00869 
00870                 if tmp_sn.new_class is not None:
00871                     last_class = tmp_sn.new_class
00872                     sys_class = genpy.message.get_message_class(tmp_sn.new_class._type)
00873                 else:
00874                     sys_class = None
00875 
00876                 if sys_class is not None:
00877                     self.found_targets[key] = sys_class
00878                     return sys_class
00879             except KeyError:
00880                 pass
00881 
00882         self.found_targets[key] = None
00883         return None
00884             
00885     # This function determines the set of rules which must be created
00886     # to get from the old type to the new type.
00887     def find_path(self, old_class, new_class):
00888         key = get_path_key(old_class, new_class)
00889 
00890         # Return any path already found in the cache
00891         try:
00892             return self.found_paths[key]
00893         except KeyError:
00894             pass
00895 
00896         # If the new_class is none, e.g., a message has been moved and
00897         # we are lacking a proper rename rule, such that find-target
00898         # failed, the best we can do is create a half-rule from the
00899         # end-point
00900         if new_class is None:
00901             sn_range = self.scaffold_range(old_class._type, "")
00902 
00903             found_start = False
00904 
00905             for (ind, tmp_sn) in reversed(zip(range(len(sn_range)), sn_range)):
00906                 # Skip until we find the class we're trying to match
00907                 if (tmp_sn.old_class._type != old_class._type):
00908                     continue
00909                 if get_message_key(tmp_sn.old_class) == get_message_key(old_class):
00910                     sn_range = sn_range[ind:]
00911                     found_start = True
00912                     break
00913 
00914             # Next see if we can create a valid rule
00915             if not found_start:
00916                 for (ind, tmp_sn) in reversed(zip(range(len(sn_range)), sn_range)):
00917                     if (tmp_sn.old_class._type != old_class._type):
00918                         continue
00919                     new_rule = self.make_update_rule(old_class, tmp_sn.old_class)
00920                     R = new_rule(self, 'GENERATED.' + new_rule.__name__)
00921                     if R.valid:
00922                         R.find_sub_paths()
00923                         sn = ScaffoldNode(old_class, tmp_sn.old_class, R)
00924                         self.extra_nodes.append(sn)
00925                         sn_range = sn_range[ind:]
00926                         sn_range.insert(0,sn)
00927                         found_start = True
00928                         break
00929 
00930             if sn_range == []:
00931                 tmp_class = old_class
00932             else:
00933                 tmp_class = sn_range[-1].new_class
00934 
00935             new_rule = self.make_old_half_rule(tmp_class)
00936             R = new_rule(self, 'GENERATED.' + new_rule.__name__)
00937             sn = ScaffoldNode(tmp_class, None, R)
00938             sn_range.append(sn)
00939             self.extra_nodes.append(sn)
00940             self.found_paths[key] = sn_range
00941             return sn_range
00942 
00943         # If the messages are the same, there is no actually path
00944         if (old_class._type == new_class._type and old_class._full_text.strip() == new_class._full_text.strip()):
00945             self.found_paths[key] = []
00946             return []
00947 
00948         sn_range = self.scaffold_range(old_class._type, new_class._type)
00949 
00950         # If we have no scaffolding, we just try to create the one path
00951         if sn_range == []:
00952             new_rule = self.make_update_rule(old_class, new_class)
00953             R = new_rule(self, 'GENERATED.' + new_rule.__name__)
00954             R.find_sub_paths()
00955             sn = ScaffoldNode(old_class, new_class, R)
00956             self.extra_nodes.append(sn)
00957             self.found_paths[key] = [sn]
00958             return [sn]
00959 
00960 
00961         # Search for the stop point in the scaffold
00962         found_stop = False
00963 
00964         # First look for a trivial match
00965         for (ind, tmp_sn) in reversed(zip(range(len(sn_range)), sn_range)):
00966             # Stop looking early if the classes don't match
00967             if (tmp_sn.new_class._type != new_class._type):
00968                 break
00969             if get_message_key(tmp_sn.new_class) == get_message_key(new_class):
00970                 sn_range = sn_range[:ind+1]
00971                 found_stop = True
00972                 break
00973 
00974         # Next see if we can create a valid rule
00975         if not found_stop:
00976             for (ind, tmp_sn) in reversed(zip(range(len(sn_range)), sn_range)):
00977                 if (tmp_sn.new_class._type != new_class._type):
00978                     break
00979                 new_rule = self.make_update_rule(tmp_sn.new_class, new_class)
00980                 R = new_rule(self, 'GENERATED.' + new_rule.__name__)
00981                 if R.valid:
00982                     R.find_sub_paths()
00983                     sn = ScaffoldNode(tmp_sn.new_class, new_class, R)
00984                     self.extra_nodes.append(sn)
00985                     sn_range = sn_range[:ind+1]
00986                     sn_range.append(sn)
00987                     found_stop = True
00988                     break
00989 
00990         # If there were no valid implicit rules, we suggest a new one from to the end
00991         if not found_stop:
00992             new_rule = self.make_update_rule(sn_range[-1].new_class, new_class)
00993             R = new_rule(self, 'GENERATED.' + new_rule.__name__)
00994             R.find_sub_paths()
00995             sn = ScaffoldNode(sn_range[-1].new_class, new_class, R)
00996             self.extra_nodes.append(sn)
00997             sn_range.append(sn)
00998 
00999         # Search for the start point in the scaffold
01000         found_start = False
01001 
01002         # First look for a trivial match
01003         for (ind, tmp_sn) in reversed(zip(range(len(sn_range)), sn_range)):
01004             # Skip until we find the class we're trying to match
01005             if (tmp_sn.old_class._type != old_class._type):
01006                 continue
01007             if get_message_key(tmp_sn.old_class) == get_message_key(old_class):
01008                 sn_range = sn_range[ind:]
01009                 found_start = True
01010                 break
01011 
01012         # Next see if we can create a valid rule
01013         if not found_start:
01014             for (ind, tmp_sn) in reversed(zip(range(len(sn_range)), sn_range)):
01015                 if (tmp_sn.old_class._type != old_class._type):
01016                     continue
01017                 new_rule = self.make_update_rule(old_class, tmp_sn.old_class)
01018                 R = new_rule(self, 'GENERATED.' + new_rule.__name__)
01019                 if R.valid:
01020                     R.find_sub_paths()
01021                     sn = ScaffoldNode(old_class, tmp_sn.old_class, R)
01022                     self.extra_nodes.append(sn)
01023                     sn_range = sn_range[ind:]
01024                     sn_range.insert(0,sn)
01025                     found_start = True
01026                     break
01027 
01028         # If there were no valid implicit rules, we suggest a new one from the beginning
01029         if not found_start:
01030             new_rule = self.make_update_rule(old_class, sn_range[0].old_class)
01031             R = new_rule(self, 'GENERATED.' + new_rule.__name__)
01032             R.find_sub_paths()
01033             sn = ScaffoldNode(old_class, sn_range[0].old_class, R)
01034             self.extra_nodes.append(sn)
01035             sn_range.insert(0,sn)
01036 
01037         self.found_paths[key] = sn_range
01038         return sn_range
01039 
01040 
01041     def migrate_raw(self, msg_from, msg_to):
01042         path = self.find_path(msg_from[4], msg_to[4])
01043 
01044         if False in [sn.rule.valid for sn in path]:
01045             raise BagMigrationException("Migrate called, but no valid migration path from [%s] to [%s]"%(msg_from._type, msg_to._type))
01046 
01047         # Short cut to speed up case of matching md5sum:
01048         if path == [] or msg_from[2] == msg_to[2]:
01049             return (msg_to[0], msg_from[1], msg_to[2], msg_to[3], msg_to[4])
01050 
01051         tmp_msg = path[0].old_class()
01052         tmp_msg.deserialize(msg_from[1])
01053 
01054         for sn in path:
01055             tmp_msg = sn.rule.apply(tmp_msg)
01056 
01057         buff = StringIO()
01058         tmp_msg.serialize(buff)
01059 
01060         return (msg_to[0], buff.getvalue(), msg_to[2], msg_to[3], msg_to[4])
01061 
01062 
01063 
01064     def migrate(self, msg_from, msg_to):
01065         path = self.find_path(msg_from.__class__, msg_to.__class__)
01066 
01067         if False in [sn.rule.valid for sn in path]:
01068             raise BagMigrationException("Migrate called, but no valid migration path from [%s] to [%s]"%(msg_from._type, msg_to._type))
01069 
01070         # Short cut to speed up case of matching md5sum:
01071         if path == [] or msg_from._md5sum == msg_to._md5sum:
01072             buff = StringIO()
01073             msg_from.serialize(buff)
01074             msg_to.deserialize(buff.getvalue())
01075             return
01076 
01077         if len(path) > 0:
01078             buff = StringIO()
01079             msg_from.serialize(buff)
01080 
01081             tmp_msg = path[0].old_class()
01082 
01083             tmp_msg.deserialize(buff.getvalue())
01084 
01085             for sn in path:
01086                 tmp_msg = sn.rule.apply(tmp_msg)
01087         else:
01088             tmp_msg = msg_from
01089 
01090         buff = StringIO()
01091         tmp_msg.serialize(buff)
01092         msg_to.deserialize(buff.getvalue())
01093 
01094     def migrate_array(self, msg_from_array, msg_to_array):
01095         if len(msg_from_array) != len(msg_to_array):
01096             raise BagMigrationException("Migrate array called on on arrays of unequal length.")
01097 
01098         if len(msg_from_array) == 0:
01099             return
01100 
01101         path = self.find_path(msg_from_array[0].__class__, msg_to_array[0].__class__)
01102 
01103         if path is None:
01104             raise BagMigrationException("Migrate called, but no migration path from [%s] to [%s]"%(msg_from._type, msg_to._type))
01105 
01106         # Short cut to speed up case of matching md5sum:
01107         if path == []:
01108             for i in xrange(len(msg_from_array)):
01109                 buff = StringIO()
01110                 msg_from_array[i].serialize(buff)
01111                 msg_to_array[i].deserialize(buff.getvalue())
01112             return
01113 
01114         for i in xrange(len(msg_from_array)):
01115             buff = StringIO()
01116             tmp_msg = path[0].old_class()
01117             msg_from_array[i].serialize(buff)
01118             tmp_msg.deserialize(buff.getvalue())
01119             for sn in path:
01120                 tmp_msg = sn.rule.apply(tmp_msg)
01121 
01122             buff = StringIO()
01123             tmp_msg.serialize(buff)
01124             msg_to_array[i].deserialize(buff.getvalue())
01125 
01126     def make_update_rule(self, old_class, new_class):
01127         name = "update_%s_%s"%(old_class._type.replace("/","_"), old_class._md5sum)
01128 
01129         # We assemble the class as a string and then exec it to end up with a class
01130         # that can essentially print its own definition.
01131         classdef = "class %s(MessageUpdateRule):\n"%name
01132         classdef += "\told_type = \"%s\"\n"%old_class._type
01133         classdef += "\told_full_text = \"\"\"\n%s\n\"\"\"\n\n"%old_class._full_text.strip()
01134         classdef += "\tnew_type = \"%s\"\n"%new_class._type
01135         classdef += "\tnew_full_text = \"\"\"\n%s\n\"\"\"\n"%new_class._full_text.strip()
01136         classdef += "\n"
01137         classdef += "\torder = 0"
01138         classdef += "\n"
01139 
01140         validdef = "\tvalid = True\n"
01141 
01142         migratedefs = "\tmigrated_types = ["
01143 
01144         updatedef = "\tdef update(self, old_msg, new_msg):\n"
01145 
01146         old_consts = constants_from_def(old_class._type, old_class._full_text)
01147         new_consts = constants_from_def(new_class._type, new_class._full_text)
01148 
01149         if (not new_consts >= old_consts):
01150             validdef = "\tvalid = False\n"
01151             for c in (old_consts - new_consts):
01152                 updatedef += "\t\t#Constant '%s' has changed\n"%(c[0],)
01153         
01154         old_slots = []
01155         old_slots.extend(old_class.__slots__)
01156 
01157         migrations_seen = []
01158 
01159         # Assign across primitives, self.migrate or self.migrate_array non-primitives
01160         for (s,t) in zip(new_class.__slots__, new_class._slot_types):
01161             warn_msg = None
01162             new_base_type, new_is_array, new_array_len = genmsg.msgs.parse_type(t)
01163             try:
01164                 ind = old_class.__slots__.index(s)
01165                 old_slots.remove(s)
01166                 old_base_type, old_is_array, old_array_len = genmsg.msgs.parse_type(old_class._slot_types[ind])
01167 
01168                 if new_is_array != old_is_array:
01169                     warn_msg = "Could not match array with nonarray"
01170 
01171                 elif new_array_len != old_array_len:
01172                     if old_array_len is None:
01173                         warn_msg = "Converted from variable length array to fixed array of length %d"%(new_array_len)
01174                     elif new_array_len is None:
01175                         warn_msg = "Converted from fixed array of length %d to variable length"%(old_array_len)
01176                     else:
01177                         warn_msg = "Fixed length array converted from %d to %d"%(old_array_len,new_array_len)
01178 
01179                 elif genmsg.msgs.is_builtin(new_base_type):
01180                     if new_base_type != old_base_type:
01181                         warn_msg = "Primitive type changed"
01182                     else:
01183                         updatedef += "\t\tnew_msg.%s = old_msg.%s\n"%(s,s)
01184 
01185                 else:
01186                     tmp_old_type = clean_name(old_base_type, old_class._type)
01187                     tmp_new_type = clean_name(new_base_type, new_class._type)
01188 
01189                     tmp_qualified_old_type = qualified_name(old_base_type, old_class._type)
01190                     tmp_qualified_new_type = qualified_name(new_base_type, new_class._type)
01191 
01192                     # Verify the type can theoretically be migrated
01193                     if (tmp_qualified_old_type == tmp_qualified_new_type) or \
01194                             (self.rename_map.has_key(tmp_qualified_old_type) and 
01195                              tmp_qualified_new_type in self.rename_map[tmp_qualified_old_type]):
01196 
01197                         if (tmp_old_type, tmp_new_type) not in migrations_seen:
01198                             migratedefs += "\n\t\t(\"%s\",\"%s\"),"%(tmp_old_type, tmp_new_type)
01199                             migrations_seen.append((tmp_old_type, tmp_new_type))
01200 
01201                         if not new_is_array:
01202                             updatedef += "\t\tself.migrate(old_msg.%s, new_msg.%s)\n"%(s,s)
01203                         else:
01204                             updatedef += "\t\tself.migrate_array(old_msg.%s, new_msg.%s, \"%s\")\n"%(s,s,new_base_type)
01205                     else:
01206                         warn_msg = "No migration path between [%s] and [%s]"%(tmp_old_type, tmp_new_type)
01207             except ValueError:
01208                 warn_msg = "No matching field name in old message"
01209 
01210             if warn_msg is not None:
01211                 validdef = "\tvalid = False\n"
01212                 updatedef += "\t\t#%s\n"%warn_msg
01213                 updatedef += "\t\tnew_msg.%s = %s\n"%(s,migration_default_value(t))
01214                 
01215         migratedefs += "]\n"
01216 
01217         if old_slots:
01218             validdef = "\tvalid = False\n"
01219             for s in old_slots:
01220                 updatedef += "\t\t#No field to match field %s from old message\n"%(s)
01221 
01222         classdef += migratedefs + '\n' + validdef + '\n' + updatedef
01223 
01224         printclassdef = classdef +  "\tdef get_class_def(self):\n\t\treturn \'\'\'%s\'\'\'\n"%classdef
01225         
01226         # This is probably a TERRIBLE idea?
01227         exec(printclassdef)
01228         return locals()[name]
01229 
01230     def make_old_half_rule(self, old_class):
01231         name = "update__%s__%s"%(old_class._type.replace("/","_"), old_class._md5sum)
01232 
01233         # We assemble the class as a string and then exec it to end up with a class
01234         # that can essentially print its own definition.
01235         classdef = "class %s(MessageUpdateRule):\n"%name
01236         classdef += "\told_type = \"%s\"\n"%old_class._type
01237         classdef += "\told_full_text = \"\"\"\n%s\n\"\"\"\n\n"%old_class._full_text.strip()
01238         classdef += "\tnew_type = \"\"\n"
01239         classdef += "\tnew_full_text = \"\"\"\n\n\"\"\"\n"
01240         classdef += "\n"
01241         classdef += "\torder = 0"
01242         classdef += "\n"
01243     
01244         validdef = "\tvalid = False\n"
01245 
01246         migratedefs = "\tmigrated_types = []\n"
01247 
01248         updatedef = "\tdef update(self, old_msg, new_msg):\n"
01249         updatedef += "\t\tpass\n"
01250         
01251         classdef += migratedefs + '\n' + validdef + '\n' + updatedef
01252 
01253         printclassdef = classdef +  "\tdef get_class_def(self):\n\t\treturn \'\'\'%s\'\'\'\n"%classdef
01254         
01255         # This is probably a TERRIBLE idea?
01256         exec(printclassdef)
01257         return locals()[name]
01258 
01259     def make_new_half_rule(self, new_class):
01260         name = "update_to_%s_%s"%(new_class._type.replace("/","_"), new_class._md5sum)
01261 
01262         # We assemble the class as a string and then exec it to end up with a class
01263         # that can essentially print its own definition.
01264         classdef = "class %s(MessageUpdateRule):\n"%name
01265         classdef += "\told_type = \"\"\n"
01266         classdef += "\told_full_text = \"\"\"\n\n\"\"\"\n\n"
01267         classdef += "\tnew_type = \"%s\"\n"%new_class._type
01268         classdef += "\tnew_full_text = \"\"\"\n%s\n\"\"\"\n"%new_class._full_text.strip()
01269         classdef += "\n"
01270         classdef += "\torder = 0"
01271         classdef += "\n"
01272     
01273         validdef = "\tvalid = False\n"
01274 
01275         migratedefs = "\tmigrated_types = []\n"
01276 
01277         updatedef = "\tdef update(self, old_msg, new_msg):\n"
01278         updatedef += "\t\tpass\n"
01279         
01280         classdef += migratedefs + '\n' + validdef + '\n' + updatedef
01281 
01282         printclassdef = classdef +  "\tdef get_class_def(self):\n\t\treturn \'\'\'%s\'\'\'\n"%classdef
01283         
01284         # This is probably a TERRIBLE idea?
01285         exec(printclassdef)
01286         return locals()[name]
01287 
01288 def migration_default_value(field_type):
01289     if field_type in ['bool', 'byte', 'int8', 'int16', 'int32', 'int64',\
01290                           'char', 'uint8', 'uint16', 'uint32', 'uint64']:
01291         return '0'
01292     elif field_type in ['float32', 'float64']:
01293         return '0.'
01294     elif field_type == 'string':
01295         # strings, byte[], and uint8s are all optimized to be strings
01296         return "''"
01297     elif field_type.endswith(']'): # array type
01298         base_type, is_array, array_len = genmsg.msgs.parse_type(field_type)
01299         if base_type in ['byte', 'uint8']:
01300             # strings, byte[], and uint8s are all optimized to be strings
01301             if array_len is not None:
01302                 return "chr(0)*%s"%array_len
01303             else:
01304                 return "''"
01305         elif array_len is None: #var-length
01306             return '[]'
01307         else: # fixed-length, fill values
01308             def_val = migration_default_value(base_type)
01309             return '[' + ','.join(itertools.repeat(def_val, array_len)) + ']'
01310     else:
01311         return "self.get_new_class('%s')()"%field_type
01312 
01313 def constants_from_def(core_type, msg_def):
01314     core_pkg, core_base_type = genmsg.package_resource_name(core_type)
01315 
01316     splits = msg_def.split('\n' + '=' * 80 + '\n')
01317     
01318     core_msg = splits[0]
01319     deps_msgs = splits[1:]
01320 
01321     # create MsgSpec representations of .msg text
01322     from genmsg import MsgContext
01323     context = MsgContext.create_default()
01324     specs = { core_type: genmsg.msg_loader.load_msg_from_string(context, core_msg, core_pkg) }
01325     # - dependencies
01326 #    for dep_msg in deps_msgs:
01327 #        # dependencies require more handling to determine type name
01328 #        dep_type, dep_spec = _generate_dynamic_specs(specs, dep_msg)
01329 #        specs[dep_type] = dep_spec
01330 
01331     return set([(x.name, x.val, x.type) for x in specs[core_type].constants])


rosbag
Author(s): Tim Field, Jeremy Leibs, James Bowman
autogenerated on Fri Aug 28 2015 12:33:52