00001 """autogenerated by genpy from tabletop_collision_map_processing/TabletopCollisionMapProcessingRequest.msg. Do not edit."""
00002 import sys
00003 python3 = True if sys.hexversion > 0x03000000 else False
00004 import genpy
00005 import struct
00006
00007 import tabletop_object_detector.msg
00008 import geometry_msgs.msg
00009 import shape_msgs.msg
00010 import object_recognition_msgs.msg
00011 import sensor_msgs.msg
00012 import std_msgs.msg
00013 import household_objects_database_msgs.msg
00014
00015 class TabletopCollisionMapProcessingRequest(genpy.Message):
00016 _md5sum = "18a3708a4404b659def754ff5dc4dbb3"
00017 _type = "tabletop_collision_map_processing/TabletopCollisionMapProcessingRequest"
00018 _has_header = False
00019 _full_text = """
00020
00021
00022
00023
00024
00025
00026 tabletop_object_detector/TabletopDetectionResult detection_result
00027
00028
00029 bool reset_collision_models
00030
00031
00032 bool reset_attached_models
00033
00034
00035
00036 string desired_frame
00037
00038
00039 ================================================================================
00040 MSG: tabletop_object_detector/TabletopDetectionResult
00041 # Contains all the information from one run of the tabletop detection node
00042
00043 # The information for the plane that has been detected
00044 Table table
00045
00046 # The raw clusters detected in the scan
00047 sensor_msgs/PointCloud[] clusters
00048
00049 # The list of potential models that have been detected for each cluster
00050 # An empty list will be returned for a cluster that has no recognition results at all
00051 household_objects_database_msgs/DatabaseModelPoseList[] models
00052
00053 # For each cluster, the index of the list of models that was fit to that cluster
00054 # keep in mind that multiple raw clusters can correspond to a single fit
00055 int32[] cluster_model_indices
00056
00057 # Whether the detection has succeeded or failed
00058 int32 NO_CLOUD_RECEIVED = 1
00059 int32 NO_TABLE = 2
00060 int32 OTHER_ERROR = 3
00061 int32 SUCCESS = 4
00062 int32 result
00063
00064 ================================================================================
00065 MSG: tabletop_object_detector/Table
00066 # Informs that a planar table has been detected at a given location
00067
00068 # The pose gives you the transform that take you to the coordinate system
00069 # of the table, with the origin somewhere in the table plane and the
00070 # z axis normal to the plane
00071 geometry_msgs/PoseStamped pose
00072
00073 # These values give you the observed extents of the table, along x and y,
00074 # in the table's own coordinate system (above)
00075 # there is no guarantee that the origin of the table coordinate system is
00076 # inside the boundary defined by these values.
00077 float32 x_min
00078 float32 x_max
00079 float32 y_min
00080 float32 y_max
00081
00082 # There is no guarantee that the table does NOT extend further than these
00083 # values; this is just as far as we've observed it.
00084
00085
00086 # Newer table definition as triangle mesh of convex hull (relative to pose)
00087 shape_msgs/Mesh convex_hull
00088
00089 ================================================================================
00090 MSG: geometry_msgs/PoseStamped
00091 # A Pose with reference coordinate frame and timestamp
00092 Header header
00093 Pose pose
00094
00095 ================================================================================
00096 MSG: std_msgs/Header
00097 # Standard metadata for higher-level stamped data types.
00098 # This is generally used to communicate timestamped data
00099 # in a particular coordinate frame.
00100 #
00101 # sequence ID: consecutively increasing ID
00102 uint32 seq
00103 #Two-integer timestamp that is expressed as:
00104 # * stamp.secs: seconds (stamp_secs) since epoch
00105 # * stamp.nsecs: nanoseconds since stamp_secs
00106 # time-handling sugar is provided by the client library
00107 time stamp
00108 #Frame this data is associated with
00109 # 0: no frame
00110 # 1: global frame
00111 string frame_id
00112
00113 ================================================================================
00114 MSG: geometry_msgs/Pose
00115 # A representation of pose in free space, composed of postion and orientation.
00116 Point position
00117 Quaternion orientation
00118
00119 ================================================================================
00120 MSG: geometry_msgs/Point
00121 # This contains the position of a point in free space
00122 float64 x
00123 float64 y
00124 float64 z
00125
00126 ================================================================================
00127 MSG: geometry_msgs/Quaternion
00128 # This represents an orientation in free space in quaternion form.
00129
00130 float64 x
00131 float64 y
00132 float64 z
00133 float64 w
00134
00135 ================================================================================
00136 MSG: shape_msgs/Mesh
00137 # Definition of a mesh
00138
00139 # list of triangles; the index values refer to positions in vertices[]
00140 MeshTriangle[] triangles
00141
00142 # the actual vertices that make up the mesh
00143 geometry_msgs/Point[] vertices
00144
00145 ================================================================================
00146 MSG: shape_msgs/MeshTriangle
00147 # Definition of a triangle's vertices
00148 uint32[3] vertex_indices
00149
00150 ================================================================================
00151 MSG: sensor_msgs/PointCloud
00152 # This message holds a collection of 3d points, plus optional additional
00153 # information about each point.
00154
00155 # Time of sensor data acquisition, coordinate frame ID.
00156 Header header
00157
00158 # Array of 3d points. Each Point32 should be interpreted as a 3d point
00159 # in the frame given in the header.
00160 geometry_msgs/Point32[] points
00161
00162 # Each channel should have the same number of elements as points array,
00163 # and the data in each channel should correspond 1:1 with each point.
00164 # Channel names in common practice are listed in ChannelFloat32.msg.
00165 ChannelFloat32[] channels
00166
00167 ================================================================================
00168 MSG: geometry_msgs/Point32
00169 # This contains the position of a point in free space(with 32 bits of precision).
00170 # It is recommeded to use Point wherever possible instead of Point32.
00171 #
00172 # This recommendation is to promote interoperability.
00173 #
00174 # This message is designed to take up less space when sending
00175 # lots of points at once, as in the case of a PointCloud.
00176
00177 float32 x
00178 float32 y
00179 float32 z
00180 ================================================================================
00181 MSG: sensor_msgs/ChannelFloat32
00182 # This message is used by the PointCloud message to hold optional data
00183 # associated with each point in the cloud. The length of the values
00184 # array should be the same as the length of the points array in the
00185 # PointCloud, and each value should be associated with the corresponding
00186 # point.
00187
00188 # Channel names in existing practice include:
00189 # "u", "v" - row and column (respectively) in the left stereo image.
00190 # This is opposite to usual conventions but remains for
00191 # historical reasons. The newer PointCloud2 message has no
00192 # such problem.
00193 # "rgb" - For point clouds produced by color stereo cameras. uint8
00194 # (R,G,B) values packed into the least significant 24 bits,
00195 # in order.
00196 # "intensity" - laser or pixel intensity.
00197 # "distance"
00198
00199 # The channel name should give semantics of the channel (e.g.
00200 # "intensity" instead of "value").
00201 string name
00202
00203 # The values array should be 1-1 with the elements of the associated
00204 # PointCloud.
00205 float32[] values
00206
00207 ================================================================================
00208 MSG: household_objects_database_msgs/DatabaseModelPoseList
00209 # stores a list of possible database models recognition results
00210 DatabaseModelPose[] model_list
00211 ================================================================================
00212 MSG: household_objects_database_msgs/DatabaseModelPose
00213 # Informs that a specific model from the Model Database has been
00214 # identified at a certain location
00215
00216 # the database id of the model
00217 int32 model_id
00218
00219 # if the object was recognized by the ORK pipeline, its type will be in here
00220 # if this is not empty, then the string in here will be converted to a household_objects_database id
00221 # leave this empty if providing an id in the model_id field
00222 object_recognition_msgs/ObjectType type
00223
00224 # the pose that it can be found in
00225 geometry_msgs/PoseStamped pose
00226
00227 # a measure of the confidence level in this detection result
00228 float32 confidence
00229
00230 # the name of the object detector that generated this detection result
00231 string detector_name
00232
00233 ================================================================================
00234 MSG: object_recognition_msgs/ObjectType
00235 ################################################## OBJECT ID #########################################################
00236
00237 # Contains information about the type of a found object. Those two sets of parameters together uniquely define an
00238 # object
00239
00240 # The key of the found object: the unique identifier in the given db
00241 string key
00242
00243 # The db parameters stored as a JSON/compressed YAML string. An object id does not make sense without the corresponding
00244 # database. E.g., in object_recognition, it can look like: "{'type':'CouchDB', 'root':'http://localhost'}"
00245 # There is no conventional format for those parameters and it's nice to keep that flexibility.
00246 # The object_recognition_core as a generic DB type that can read those fields
00247 # Current examples:
00248 # For CouchDB:
00249 # type: 'CouchDB'
00250 # root: 'http://localhost:5984'
00251 # collection: 'object_recognition'
00252 # For SQL household database:
00253 # type: 'SqlHousehold'
00254 # host: 'wgs36'
00255 # port: 5432
00256 # user: 'willow'
00257 # password: 'willow'
00258 # name: 'household_objects'
00259 # module: 'tabletop'
00260 string db
00261
00262 """
00263 __slots__ = ['detection_result','reset_collision_models','reset_attached_models','desired_frame']
00264 _slot_types = ['tabletop_object_detector/TabletopDetectionResult','bool','bool','string']
00265
00266 def __init__(self, *args, **kwds):
00267 """
00268 Constructor. Any message fields that are implicitly/explicitly
00269 set to None will be assigned a default value. The recommend
00270 use is keyword arguments as this is more robust to future message
00271 changes. You cannot mix in-order arguments and keyword arguments.
00272
00273 The available fields are:
00274 detection_result,reset_collision_models,reset_attached_models,desired_frame
00275
00276 :param args: complete set of field values, in .msg order
00277 :param kwds: use keyword arguments corresponding to message field names
00278 to set specific fields.
00279 """
00280 if args or kwds:
00281 super(TabletopCollisionMapProcessingRequest, self).__init__(*args, **kwds)
00282 #message fields cannot be None, assign default values for those that are
00283 if self.detection_result is None:
00284 self.detection_result = tabletop_object_detector.msg.TabletopDetectionResult()
00285 if self.reset_collision_models is None:
00286 self.reset_collision_models = False
00287 if self.reset_attached_models is None:
00288 self.reset_attached_models = False
00289 if self.desired_frame is None:
00290 self.desired_frame = ''
00291 else:
00292 self.detection_result = tabletop_object_detector.msg.TabletopDetectionResult()
00293 self.reset_collision_models = False
00294 self.reset_attached_models = False
00295 self.desired_frame = ''
00296
00297 def _get_types(self):
00298 """
00299 internal API method
00300 """
00301 return self._slot_types
00302
00303 def serialize(self, buff):
00304 """
00305 serialize message into buffer
00306 :param buff: buffer, ``StringIO``
00307 """
00308 try:
00309 _x = self
00310 buff.write(_struct_3I.pack(_x.detection_result.table.pose.header.seq, _x.detection_result.table.pose.header.stamp.secs, _x.detection_result.table.pose.header.stamp.nsecs))
00311 _x = self.detection_result.table.pose.header.frame_id
00312 length = len(_x)
00313 if python3 or type(_x) == unicode:
00314 _x = _x.encode('utf-8')
00315 length = len(_x)
00316 buff.write(struct.pack('<I%ss'%length, length, _x))
00317 _x = self
00318 buff.write(_struct_7d4f.pack(_x.detection_result.table.pose.pose.position.x, _x.detection_result.table.pose.pose.position.y, _x.detection_result.table.pose.pose.position.z, _x.detection_result.table.pose.pose.orientation.x, _x.detection_result.table.pose.pose.orientation.y, _x.detection_result.table.pose.pose.orientation.z, _x.detection_result.table.pose.pose.orientation.w, _x.detection_result.table.x_min, _x.detection_result.table.x_max, _x.detection_result.table.y_min, _x.detection_result.table.y_max))
00319 length = len(self.detection_result.table.convex_hull.triangles)
00320 buff.write(_struct_I.pack(length))
00321 for val1 in self.detection_result.table.convex_hull.triangles:
00322 buff.write(_struct_3I.pack(*val1.vertex_indices))
00323 length = len(self.detection_result.table.convex_hull.vertices)
00324 buff.write(_struct_I.pack(length))
00325 for val1 in self.detection_result.table.convex_hull.vertices:
00326 _x = val1
00327 buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
00328 length = len(self.detection_result.clusters)
00329 buff.write(_struct_I.pack(length))
00330 for val1 in self.detection_result.clusters:
00331 _v1 = val1.header
00332 buff.write(_struct_I.pack(_v1.seq))
00333 _v2 = _v1.stamp
00334 _x = _v2
00335 buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
00336 _x = _v1.frame_id
00337 length = len(_x)
00338 if python3 or type(_x) == unicode:
00339 _x = _x.encode('utf-8')
00340 length = len(_x)
00341 buff.write(struct.pack('<I%ss'%length, length, _x))
00342 length = len(val1.points)
00343 buff.write(_struct_I.pack(length))
00344 for val2 in val1.points:
00345 _x = val2
00346 buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))
00347 length = len(val1.channels)
00348 buff.write(_struct_I.pack(length))
00349 for val2 in val1.channels:
00350 _x = val2.name
00351 length = len(_x)
00352 if python3 or type(_x) == unicode:
00353 _x = _x.encode('utf-8')
00354 length = len(_x)
00355 buff.write(struct.pack('<I%ss'%length, length, _x))
00356 length = len(val2.values)
00357 buff.write(_struct_I.pack(length))
00358 pattern = '<%sf'%length
00359 buff.write(struct.pack(pattern, *val2.values))
00360 length = len(self.detection_result.models)
00361 buff.write(_struct_I.pack(length))
00362 for val1 in self.detection_result.models:
00363 length = len(val1.model_list)
00364 buff.write(_struct_I.pack(length))
00365 for val2 in val1.model_list:
00366 buff.write(_struct_i.pack(val2.model_id))
00367 _v3 = val2.type
00368 _x = _v3.key
00369 length = len(_x)
00370 if python3 or type(_x) == unicode:
00371 _x = _x.encode('utf-8')
00372 length = len(_x)
00373 buff.write(struct.pack('<I%ss'%length, length, _x))
00374 _x = _v3.db
00375 length = len(_x)
00376 if python3 or type(_x) == unicode:
00377 _x = _x.encode('utf-8')
00378 length = len(_x)
00379 buff.write(struct.pack('<I%ss'%length, length, _x))
00380 _v4 = val2.pose
00381 _v5 = _v4.header
00382 buff.write(_struct_I.pack(_v5.seq))
00383 _v6 = _v5.stamp
00384 _x = _v6
00385 buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
00386 _x = _v5.frame_id
00387 length = len(_x)
00388 if python3 or type(_x) == unicode:
00389 _x = _x.encode('utf-8')
00390 length = len(_x)
00391 buff.write(struct.pack('<I%ss'%length, length, _x))
00392 _v7 = _v4.pose
00393 _v8 = _v7.position
00394 _x = _v8
00395 buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
00396 _v9 = _v7.orientation
00397 _x = _v9
00398 buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))
00399 buff.write(_struct_f.pack(val2.confidence))
00400 _x = val2.detector_name
00401 length = len(_x)
00402 if python3 or type(_x) == unicode:
00403 _x = _x.encode('utf-8')
00404 length = len(_x)
00405 buff.write(struct.pack('<I%ss'%length, length, _x))
00406 length = len(self.detection_result.cluster_model_indices)
00407 buff.write(_struct_I.pack(length))
00408 pattern = '<%si'%length
00409 buff.write(struct.pack(pattern, *self.detection_result.cluster_model_indices))
00410 _x = self
00411 buff.write(_struct_i2B.pack(_x.detection_result.result, _x.reset_collision_models, _x.reset_attached_models))
00412 _x = self.desired_frame
00413 length = len(_x)
00414 if python3 or type(_x) == unicode:
00415 _x = _x.encode('utf-8')
00416 length = len(_x)
00417 buff.write(struct.pack('<I%ss'%length, length, _x))
00418 except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(_x))))
00419 except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(_x))))
00420
00421 def deserialize(self, str):
00422 """
00423 unpack serialized message in str into this message instance
00424 :param str: byte array of serialized message, ``str``
00425 """
00426 try:
00427 if self.detection_result is None:
00428 self.detection_result = tabletop_object_detector.msg.TabletopDetectionResult()
00429 end = 0
00430 _x = self
00431 start = end
00432 end += 12
00433 (_x.detection_result.table.pose.header.seq, _x.detection_result.table.pose.header.stamp.secs, _x.detection_result.table.pose.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end])
00434 start = end
00435 end += 4
00436 (length,) = _struct_I.unpack(str[start:end])
00437 start = end
00438 end += length
00439 if python3:
00440 self.detection_result.table.pose.header.frame_id = str[start:end].decode('utf-8')
00441 else:
00442 self.detection_result.table.pose.header.frame_id = str[start:end]
00443 _x = self
00444 start = end
00445 end += 72
00446 (_x.detection_result.table.pose.pose.position.x, _x.detection_result.table.pose.pose.position.y, _x.detection_result.table.pose.pose.position.z, _x.detection_result.table.pose.pose.orientation.x, _x.detection_result.table.pose.pose.orientation.y, _x.detection_result.table.pose.pose.orientation.z, _x.detection_result.table.pose.pose.orientation.w, _x.detection_result.table.x_min, _x.detection_result.table.x_max, _x.detection_result.table.y_min, _x.detection_result.table.y_max,) = _struct_7d4f.unpack(str[start:end])
00447 start = end
00448 end += 4
00449 (length,) = _struct_I.unpack(str[start:end])
00450 self.detection_result.table.convex_hull.triangles = []
00451 for i in range(0, length):
00452 val1 = shape_msgs.msg.MeshTriangle()
00453 start = end
00454 end += 12
00455 val1.vertex_indices = _struct_3I.unpack(str[start:end])
00456 self.detection_result.table.convex_hull.triangles.append(val1)
00457 start = end
00458 end += 4
00459 (length,) = _struct_I.unpack(str[start:end])
00460 self.detection_result.table.convex_hull.vertices = []
00461 for i in range(0, length):
00462 val1 = geometry_msgs.msg.Point()
00463 _x = val1
00464 start = end
00465 end += 24
00466 (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
00467 self.detection_result.table.convex_hull.vertices.append(val1)
00468 start = end
00469 end += 4
00470 (length,) = _struct_I.unpack(str[start:end])
00471 self.detection_result.clusters = []
00472 for i in range(0, length):
00473 val1 = sensor_msgs.msg.PointCloud()
00474 _v10 = val1.header
00475 start = end
00476 end += 4
00477 (_v10.seq,) = _struct_I.unpack(str[start:end])
00478 _v11 = _v10.stamp
00479 _x = _v11
00480 start = end
00481 end += 8
00482 (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
00483 start = end
00484 end += 4
00485 (length,) = _struct_I.unpack(str[start:end])
00486 start = end
00487 end += length
00488 if python3:
00489 _v10.frame_id = str[start:end].decode('utf-8')
00490 else:
00491 _v10.frame_id = str[start:end]
00492 start = end
00493 end += 4
00494 (length,) = _struct_I.unpack(str[start:end])
00495 val1.points = []
00496 for i in range(0, length):
00497 val2 = geometry_msgs.msg.Point32()
00498 _x = val2
00499 start = end
00500 end += 12
00501 (_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end])
00502 val1.points.append(val2)
00503 start = end
00504 end += 4
00505 (length,) = _struct_I.unpack(str[start:end])
00506 val1.channels = []
00507 for i in range(0, length):
00508 val2 = sensor_msgs.msg.ChannelFloat32()
00509 start = end
00510 end += 4
00511 (length,) = _struct_I.unpack(str[start:end])
00512 start = end
00513 end += length
00514 if python3:
00515 val2.name = str[start:end].decode('utf-8')
00516 else:
00517 val2.name = str[start:end]
00518 start = end
00519 end += 4
00520 (length,) = _struct_I.unpack(str[start:end])
00521 pattern = '<%sf'%length
00522 start = end
00523 end += struct.calcsize(pattern)
00524 val2.values = struct.unpack(pattern, str[start:end])
00525 val1.channels.append(val2)
00526 self.detection_result.clusters.append(val1)
00527 start = end
00528 end += 4
00529 (length,) = _struct_I.unpack(str[start:end])
00530 self.detection_result.models = []
00531 for i in range(0, length):
00532 val1 = household_objects_database_msgs.msg.DatabaseModelPoseList()
00533 start = end
00534 end += 4
00535 (length,) = _struct_I.unpack(str[start:end])
00536 val1.model_list = []
00537 for i in range(0, length):
00538 val2 = household_objects_database_msgs.msg.DatabaseModelPose()
00539 start = end
00540 end += 4
00541 (val2.model_id,) = _struct_i.unpack(str[start:end])
00542 _v12 = val2.type
00543 start = end
00544 end += 4
00545 (length,) = _struct_I.unpack(str[start:end])
00546 start = end
00547 end += length
00548 if python3:
00549 _v12.key = str[start:end].decode('utf-8')
00550 else:
00551 _v12.key = str[start:end]
00552 start = end
00553 end += 4
00554 (length,) = _struct_I.unpack(str[start:end])
00555 start = end
00556 end += length
00557 if python3:
00558 _v12.db = str[start:end].decode('utf-8')
00559 else:
00560 _v12.db = str[start:end]
00561 _v13 = val2.pose
00562 _v14 = _v13.header
00563 start = end
00564 end += 4
00565 (_v14.seq,) = _struct_I.unpack(str[start:end])
00566 _v15 = _v14.stamp
00567 _x = _v15
00568 start = end
00569 end += 8
00570 (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
00571 start = end
00572 end += 4
00573 (length,) = _struct_I.unpack(str[start:end])
00574 start = end
00575 end += length
00576 if python3:
00577 _v14.frame_id = str[start:end].decode('utf-8')
00578 else:
00579 _v14.frame_id = str[start:end]
00580 _v16 = _v13.pose
00581 _v17 = _v16.position
00582 _x = _v17
00583 start = end
00584 end += 24
00585 (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
00586 _v18 = _v16.orientation
00587 _x = _v18
00588 start = end
00589 end += 32
00590 (_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])
00591 start = end
00592 end += 4
00593 (val2.confidence,) = _struct_f.unpack(str[start:end])
00594 start = end
00595 end += 4
00596 (length,) = _struct_I.unpack(str[start:end])
00597 start = end
00598 end += length
00599 if python3:
00600 val2.detector_name = str[start:end].decode('utf-8')
00601 else:
00602 val2.detector_name = str[start:end]
00603 val1.model_list.append(val2)
00604 self.detection_result.models.append(val1)
00605 start = end
00606 end += 4
00607 (length,) = _struct_I.unpack(str[start:end])
00608 pattern = '<%si'%length
00609 start = end
00610 end += struct.calcsize(pattern)
00611 self.detection_result.cluster_model_indices = struct.unpack(pattern, str[start:end])
00612 _x = self
00613 start = end
00614 end += 6
00615 (_x.detection_result.result, _x.reset_collision_models, _x.reset_attached_models,) = _struct_i2B.unpack(str[start:end])
00616 self.reset_collision_models = bool(self.reset_collision_models)
00617 self.reset_attached_models = bool(self.reset_attached_models)
00618 start = end
00619 end += 4
00620 (length,) = _struct_I.unpack(str[start:end])
00621 start = end
00622 end += length
00623 if python3:
00624 self.desired_frame = str[start:end].decode('utf-8')
00625 else:
00626 self.desired_frame = str[start:end]
00627 return self
00628 except struct.error as e:
00629 raise genpy.DeserializationError(e) #most likely buffer underfill
00630
00631
00632 def serialize_numpy(self, buff, numpy):
00633 """
00634 serialize message with numpy array types into buffer
00635 :param buff: buffer, ``StringIO``
00636 :param numpy: numpy python module
00637 """
00638 try:
00639 _x = self
00640 buff.write(_struct_3I.pack(_x.detection_result.table.pose.header.seq, _x.detection_result.table.pose.header.stamp.secs, _x.detection_result.table.pose.header.stamp.nsecs))
00641 _x = self.detection_result.table.pose.header.frame_id
00642 length = len(_x)
00643 if python3 or type(_x) == unicode:
00644 _x = _x.encode('utf-8')
00645 length = len(_x)
00646 buff.write(struct.pack('<I%ss'%length, length, _x))
00647 _x = self
00648 buff.write(_struct_7d4f.pack(_x.detection_result.table.pose.pose.position.x, _x.detection_result.table.pose.pose.position.y, _x.detection_result.table.pose.pose.position.z, _x.detection_result.table.pose.pose.orientation.x, _x.detection_result.table.pose.pose.orientation.y, _x.detection_result.table.pose.pose.orientation.z, _x.detection_result.table.pose.pose.orientation.w, _x.detection_result.table.x_min, _x.detection_result.table.x_max, _x.detection_result.table.y_min, _x.detection_result.table.y_max))
00649 length = len(self.detection_result.table.convex_hull.triangles)
00650 buff.write(_struct_I.pack(length))
00651 for val1 in self.detection_result.table.convex_hull.triangles:
00652 buff.write(val1.vertex_indices.tostring())
00653 length = len(self.detection_result.table.convex_hull.vertices)
00654 buff.write(_struct_I.pack(length))
00655 for val1 in self.detection_result.table.convex_hull.vertices:
00656 _x = val1
00657 buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
00658 length = len(self.detection_result.clusters)
00659 buff.write(_struct_I.pack(length))
00660 for val1 in self.detection_result.clusters:
00661 _v19 = val1.header
00662 buff.write(_struct_I.pack(_v19.seq))
00663 _v20 = _v19.stamp
00664 _x = _v20
00665 buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
00666 _x = _v19.frame_id
00667 length = len(_x)
00668 if python3 or type(_x) == unicode:
00669 _x = _x.encode('utf-8')
00670 length = len(_x)
00671 buff.write(struct.pack('<I%ss'%length, length, _x))
00672 length = len(val1.points)
00673 buff.write(_struct_I.pack(length))
00674 for val2 in val1.points:
00675 _x = val2
00676 buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))
00677 length = len(val1.channels)
00678 buff.write(_struct_I.pack(length))
00679 for val2 in val1.channels:
00680 _x = val2.name
00681 length = len(_x)
00682 if python3 or type(_x) == unicode:
00683 _x = _x.encode('utf-8')
00684 length = len(_x)
00685 buff.write(struct.pack('<I%ss'%length, length, _x))
00686 length = len(val2.values)
00687 buff.write(_struct_I.pack(length))
00688 pattern = '<%sf'%length
00689 buff.write(val2.values.tostring())
00690 length = len(self.detection_result.models)
00691 buff.write(_struct_I.pack(length))
00692 for val1 in self.detection_result.models:
00693 length = len(val1.model_list)
00694 buff.write(_struct_I.pack(length))
00695 for val2 in val1.model_list:
00696 buff.write(_struct_i.pack(val2.model_id))
00697 _v21 = val2.type
00698 _x = _v21.key
00699 length = len(_x)
00700 if python3 or type(_x) == unicode:
00701 _x = _x.encode('utf-8')
00702 length = len(_x)
00703 buff.write(struct.pack('<I%ss'%length, length, _x))
00704 _x = _v21.db
00705 length = len(_x)
00706 if python3 or type(_x) == unicode:
00707 _x = _x.encode('utf-8')
00708 length = len(_x)
00709 buff.write(struct.pack('<I%ss'%length, length, _x))
00710 _v22 = val2.pose
00711 _v23 = _v22.header
00712 buff.write(_struct_I.pack(_v23.seq))
00713 _v24 = _v23.stamp
00714 _x = _v24
00715 buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
00716 _x = _v23.frame_id
00717 length = len(_x)
00718 if python3 or type(_x) == unicode:
00719 _x = _x.encode('utf-8')
00720 length = len(_x)
00721 buff.write(struct.pack('<I%ss'%length, length, _x))
00722 _v25 = _v22.pose
00723 _v26 = _v25.position
00724 _x = _v26
00725 buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
00726 _v27 = _v25.orientation
00727 _x = _v27
00728 buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))
00729 buff.write(_struct_f.pack(val2.confidence))
00730 _x = val2.detector_name
00731 length = len(_x)
00732 if python3 or type(_x) == unicode:
00733 _x = _x.encode('utf-8')
00734 length = len(_x)
00735 buff.write(struct.pack('<I%ss'%length, length, _x))
00736 length = len(self.detection_result.cluster_model_indices)
00737 buff.write(_struct_I.pack(length))
00738 pattern = '<%si'%length
00739 buff.write(self.detection_result.cluster_model_indices.tostring())
00740 _x = self
00741 buff.write(_struct_i2B.pack(_x.detection_result.result, _x.reset_collision_models, _x.reset_attached_models))
00742 _x = self.desired_frame
00743 length = len(_x)
00744 if python3 or type(_x) == unicode:
00745 _x = _x.encode('utf-8')
00746 length = len(_x)
00747 buff.write(struct.pack('<I%ss'%length, length, _x))
00748 except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(_x))))
00749 except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(_x))))
00750
00751 def deserialize_numpy(self, str, numpy):
00752 """
00753 unpack serialized message in str into this message instance using numpy for array types
00754 :param str: byte array of serialized message, ``str``
00755 :param numpy: numpy python module
00756 """
00757 try:
00758 if self.detection_result is None:
00759 self.detection_result = tabletop_object_detector.msg.TabletopDetectionResult()
00760 end = 0
00761 _x = self
00762 start = end
00763 end += 12
00764 (_x.detection_result.table.pose.header.seq, _x.detection_result.table.pose.header.stamp.secs, _x.detection_result.table.pose.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end])
00765 start = end
00766 end += 4
00767 (length,) = _struct_I.unpack(str[start:end])
00768 start = end
00769 end += length
00770 if python3:
00771 self.detection_result.table.pose.header.frame_id = str[start:end].decode('utf-8')
00772 else:
00773 self.detection_result.table.pose.header.frame_id = str[start:end]
00774 _x = self
00775 start = end
00776 end += 72
00777 (_x.detection_result.table.pose.pose.position.x, _x.detection_result.table.pose.pose.position.y, _x.detection_result.table.pose.pose.position.z, _x.detection_result.table.pose.pose.orientation.x, _x.detection_result.table.pose.pose.orientation.y, _x.detection_result.table.pose.pose.orientation.z, _x.detection_result.table.pose.pose.orientation.w, _x.detection_result.table.x_min, _x.detection_result.table.x_max, _x.detection_result.table.y_min, _x.detection_result.table.y_max,) = _struct_7d4f.unpack(str[start:end])
00778 start = end
00779 end += 4
00780 (length,) = _struct_I.unpack(str[start:end])
00781 self.detection_result.table.convex_hull.triangles = []
00782 for i in range(0, length):
00783 val1 = shape_msgs.msg.MeshTriangle()
00784 start = end
00785 end += 12
00786 val1.vertex_indices = numpy.frombuffer(str[start:end], dtype=numpy.uint32, count=3)
00787 self.detection_result.table.convex_hull.triangles.append(val1)
00788 start = end
00789 end += 4
00790 (length,) = _struct_I.unpack(str[start:end])
00791 self.detection_result.table.convex_hull.vertices = []
00792 for i in range(0, length):
00793 val1 = geometry_msgs.msg.Point()
00794 _x = val1
00795 start = end
00796 end += 24
00797 (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
00798 self.detection_result.table.convex_hull.vertices.append(val1)
00799 start = end
00800 end += 4
00801 (length,) = _struct_I.unpack(str[start:end])
00802 self.detection_result.clusters = []
00803 for i in range(0, length):
00804 val1 = sensor_msgs.msg.PointCloud()
00805 _v28 = val1.header
00806 start = end
00807 end += 4
00808 (_v28.seq,) = _struct_I.unpack(str[start:end])
00809 _v29 = _v28.stamp
00810 _x = _v29
00811 start = end
00812 end += 8
00813 (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
00814 start = end
00815 end += 4
00816 (length,) = _struct_I.unpack(str[start:end])
00817 start = end
00818 end += length
00819 if python3:
00820 _v28.frame_id = str[start:end].decode('utf-8')
00821 else:
00822 _v28.frame_id = str[start:end]
00823 start = end
00824 end += 4
00825 (length,) = _struct_I.unpack(str[start:end])
00826 val1.points = []
00827 for i in range(0, length):
00828 val2 = geometry_msgs.msg.Point32()
00829 _x = val2
00830 start = end
00831 end += 12
00832 (_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end])
00833 val1.points.append(val2)
00834 start = end
00835 end += 4
00836 (length,) = _struct_I.unpack(str[start:end])
00837 val1.channels = []
00838 for i in range(0, length):
00839 val2 = sensor_msgs.msg.ChannelFloat32()
00840 start = end
00841 end += 4
00842 (length,) = _struct_I.unpack(str[start:end])
00843 start = end
00844 end += length
00845 if python3:
00846 val2.name = str[start:end].decode('utf-8')
00847 else:
00848 val2.name = str[start:end]
00849 start = end
00850 end += 4
00851 (length,) = _struct_I.unpack(str[start:end])
00852 pattern = '<%sf'%length
00853 start = end
00854 end += struct.calcsize(pattern)
00855 val2.values = numpy.frombuffer(str[start:end], dtype=numpy.float32, count=length)
00856 val1.channels.append(val2)
00857 self.detection_result.clusters.append(val1)
00858 start = end
00859 end += 4
00860 (length,) = _struct_I.unpack(str[start:end])
00861 self.detection_result.models = []
00862 for i in range(0, length):
00863 val1 = household_objects_database_msgs.msg.DatabaseModelPoseList()
00864 start = end
00865 end += 4
00866 (length,) = _struct_I.unpack(str[start:end])
00867 val1.model_list = []
00868 for i in range(0, length):
00869 val2 = household_objects_database_msgs.msg.DatabaseModelPose()
00870 start = end
00871 end += 4
00872 (val2.model_id,) = _struct_i.unpack(str[start:end])
00873 _v30 = val2.type
00874 start = end
00875 end += 4
00876 (length,) = _struct_I.unpack(str[start:end])
00877 start = end
00878 end += length
00879 if python3:
00880 _v30.key = str[start:end].decode('utf-8')
00881 else:
00882 _v30.key = str[start:end]
00883 start = end
00884 end += 4
00885 (length,) = _struct_I.unpack(str[start:end])
00886 start = end
00887 end += length
00888 if python3:
00889 _v30.db = str[start:end].decode('utf-8')
00890 else:
00891 _v30.db = str[start:end]
00892 _v31 = val2.pose
00893 _v32 = _v31.header
00894 start = end
00895 end += 4
00896 (_v32.seq,) = _struct_I.unpack(str[start:end])
00897 _v33 = _v32.stamp
00898 _x = _v33
00899 start = end
00900 end += 8
00901 (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
00902 start = end
00903 end += 4
00904 (length,) = _struct_I.unpack(str[start:end])
00905 start = end
00906 end += length
00907 if python3:
00908 _v32.frame_id = str[start:end].decode('utf-8')
00909 else:
00910 _v32.frame_id = str[start:end]
00911 _v34 = _v31.pose
00912 _v35 = _v34.position
00913 _x = _v35
00914 start = end
00915 end += 24
00916 (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
00917 _v36 = _v34.orientation
00918 _x = _v36
00919 start = end
00920 end += 32
00921 (_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])
00922 start = end
00923 end += 4
00924 (val2.confidence,) = _struct_f.unpack(str[start:end])
00925 start = end
00926 end += 4
00927 (length,) = _struct_I.unpack(str[start:end])
00928 start = end
00929 end += length
00930 if python3:
00931 val2.detector_name = str[start:end].decode('utf-8')
00932 else:
00933 val2.detector_name = str[start:end]
00934 val1.model_list.append(val2)
00935 self.detection_result.models.append(val1)
00936 start = end
00937 end += 4
00938 (length,) = _struct_I.unpack(str[start:end])
00939 pattern = '<%si'%length
00940 start = end
00941 end += struct.calcsize(pattern)
00942 self.detection_result.cluster_model_indices = numpy.frombuffer(str[start:end], dtype=numpy.int32, count=length)
00943 _x = self
00944 start = end
00945 end += 6
00946 (_x.detection_result.result, _x.reset_collision_models, _x.reset_attached_models,) = _struct_i2B.unpack(str[start:end])
00947 self.reset_collision_models = bool(self.reset_collision_models)
00948 self.reset_attached_models = bool(self.reset_attached_models)
00949 start = end
00950 end += 4
00951 (length,) = _struct_I.unpack(str[start:end])
00952 start = end
00953 end += length
00954 if python3:
00955 self.desired_frame = str[start:end].decode('utf-8')
00956 else:
00957 self.desired_frame = str[start:end]
00958 return self
00959 except struct.error as e:
00960 raise genpy.DeserializationError(e) #most likely buffer underfill
00961
00962 _struct_I = genpy.struct_I
00963 _struct_i2B = struct.Struct("<i2B")
00964 _struct_7d4f = struct.Struct("<7d4f")
00965 _struct_f = struct.Struct("<f")
00966 _struct_2I = struct.Struct("<2I")
00967 _struct_i = struct.Struct("<i")
00968 _struct_3I = struct.Struct("<3I")
00969 _struct_4d = struct.Struct("<4d")
00970 _struct_3f = struct.Struct("<3f")
00971 _struct_3d = struct.Struct("<3d")
00972 """autogenerated by genpy from tabletop_collision_map_processing/TabletopCollisionMapProcessingResponse.msg. Do not edit."""
00973 import sys
00974 python3 = True if sys.hexversion > 0x03000000 else False
00975 import genpy
00976 import struct
00977
00978 import manipulation_msgs.msg
00979 import geometry_msgs.msg
00980 import object_recognition_msgs.msg
00981 import sensor_msgs.msg
00982 import household_objects_database_msgs.msg
00983 import std_msgs.msg
00984
00985 class TabletopCollisionMapProcessingResponse(genpy.Message):
00986 _md5sum = "4ec7913db348a0083551a1e3d51778b8"
00987 _type = "tabletop_collision_map_processing/TabletopCollisionMapProcessingResponse"
00988 _has_header = False #flag to mark the presence of a Header object
00989 _full_text = """
00990
00991 manipulation_msgs/GraspableObject[] graspable_objects
00992
00993
00994
00995 string[] collision_object_names
00996
00997
00998 string collision_support_surface_name
00999
01000
01001 ================================================================================
01002 MSG: manipulation_msgs/GraspableObject
01003 # an object that the object_manipulator can work on
01004
01005 # a graspable object can be represented in multiple ways. This message
01006 # can contain all of them. Which one is actually used is up to the receiver
01007 # of this message. When adding new representations, one must be careful that
01008 # they have reasonable lightweight defaults indicating that that particular
01009 # representation is not available.
01010
01011 # the tf frame to be used as a reference frame when combining information from
01012 # the different representations below
01013 string reference_frame_id
01014
01015 # potential recognition results from a database of models
01016 # all poses are relative to the object reference pose
01017 household_objects_database_msgs/DatabaseModelPose[] potential_models
01018
01019 # the point cloud itself
01020 sensor_msgs/PointCloud cluster
01021
01022 # a region of a PointCloud2 of interest
01023 SceneRegion region
01024
01025 # the name that this object has in the collision environment
01026 string collision_name
01027 ================================================================================
01028 MSG: household_objects_database_msgs/DatabaseModelPose
01029 # Informs that a specific model from the Model Database has been
01030 # identified at a certain location
01031
01032 # the database id of the model
01033 int32 model_id
01034
01035 # if the object was recognized by the ORK pipeline, its type will be in here
01036 # if this is not empty, then the string in here will be converted to a household_objects_database id
01037 # leave this empty if providing an id in the model_id field
01038 object_recognition_msgs/ObjectType type
01039
01040 # the pose that it can be found in
01041 geometry_msgs/PoseStamped pose
01042
01043 # a measure of the confidence level in this detection result
01044 float32 confidence
01045
01046 # the name of the object detector that generated this detection result
01047 string detector_name
01048
01049 ================================================================================
01050 MSG: object_recognition_msgs/ObjectType
01051 ################################################## OBJECT ID #########################################################
01052
01053 # Contains information about the type of a found object. Those two sets of parameters together uniquely define an
01054 # object
01055
01056 # The key of the found object: the unique identifier in the given db
01057 string key
01058
01059 # The db parameters stored as a JSON/compressed YAML string. An object id does not make sense without the corresponding
01060 # database. E.g., in object_recognition, it can look like: "{'type':'CouchDB', 'root':'http://localhost'}"
01061 # There is no conventional format for those parameters and it's nice to keep that flexibility.
01062 # The object_recognition_core as a generic DB type that can read those fields
01063 # Current examples:
01064 # For CouchDB:
01065 # type: 'CouchDB'
01066 # root: 'http://localhost:5984'
01067 # collection: 'object_recognition'
01068 # For SQL household database:
01069 # type: 'SqlHousehold'
01070 # host: 'wgs36'
01071 # port: 5432
01072 # user: 'willow'
01073 # password: 'willow'
01074 # name: 'household_objects'
01075 # module: 'tabletop'
01076 string db
01077
01078 ================================================================================
01079 MSG: geometry_msgs/PoseStamped
01080 # A Pose with reference coordinate frame and timestamp
01081 Header header
01082 Pose pose
01083
01084 ================================================================================
01085 MSG: std_msgs/Header
01086 # Standard metadata for higher-level stamped data types.
01087 # This is generally used to communicate timestamped data
01088 # in a particular coordinate frame.
01089 #
01090 # sequence ID: consecutively increasing ID
01091 uint32 seq
01092 #Two-integer timestamp that is expressed as:
01093 # * stamp.secs: seconds (stamp_secs) since epoch
01094 # * stamp.nsecs: nanoseconds since stamp_secs
01095 # time-handling sugar is provided by the client library
01096 time stamp
01097 #Frame this data is associated with
01098 # 0: no frame
01099 # 1: global frame
01100 string frame_id
01101
01102 ================================================================================
01103 MSG: geometry_msgs/Pose
01104 # A representation of pose in free space, composed of postion and orientation.
01105 Point position
01106 Quaternion orientation
01107
01108 ================================================================================
01109 MSG: geometry_msgs/Point
01110 # This contains the position of a point in free space
01111 float64 x
01112 float64 y
01113 float64 z
01114
01115 ================================================================================
01116 MSG: geometry_msgs/Quaternion
01117 # This represents an orientation in free space in quaternion form.
01118
01119 float64 x
01120 float64 y
01121 float64 z
01122 float64 w
01123
01124 ================================================================================
01125 MSG: sensor_msgs/PointCloud
01126 # This message holds a collection of 3d points, plus optional additional
01127 # information about each point.
01128
01129 # Time of sensor data acquisition, coordinate frame ID.
01130 Header header
01131
01132 # Array of 3d points. Each Point32 should be interpreted as a 3d point
01133 # in the frame given in the header.
01134 geometry_msgs/Point32[] points
01135
01136 # Each channel should have the same number of elements as points array,
01137 # and the data in each channel should correspond 1:1 with each point.
01138 # Channel names in common practice are listed in ChannelFloat32.msg.
01139 ChannelFloat32[] channels
01140
01141 ================================================================================
01142 MSG: geometry_msgs/Point32
01143 # This contains the position of a point in free space(with 32 bits of precision).
01144 # It is recommeded to use Point wherever possible instead of Point32.
01145 #
01146 # This recommendation is to promote interoperability.
01147 #
01148 # This message is designed to take up less space when sending
01149 # lots of points at once, as in the case of a PointCloud.
01150
01151 float32 x
01152 float32 y
01153 float32 z
01154 ================================================================================
01155 MSG: sensor_msgs/ChannelFloat32
01156 # This message is used by the PointCloud message to hold optional data
01157 # associated with each point in the cloud. The length of the values
01158 # array should be the same as the length of the points array in the
01159 # PointCloud, and each value should be associated with the corresponding
01160 # point.
01161
01162 # Channel names in existing practice include:
01163 # "u", "v" - row and column (respectively) in the left stereo image.
01164 # This is opposite to usual conventions but remains for
01165 # historical reasons. The newer PointCloud2 message has no
01166 # such problem.
01167 # "rgb" - For point clouds produced by color stereo cameras. uint8
01168 # (R,G,B) values packed into the least significant 24 bits,
01169 # in order.
01170 # "intensity" - laser or pixel intensity.
01171 # "distance"
01172
01173 # The channel name should give semantics of the channel (e.g.
01174 # "intensity" instead of "value").
01175 string name
01176
01177 # The values array should be 1-1 with the elements of the associated
01178 # PointCloud.
01179 float32[] values
01180
01181 ================================================================================
01182 MSG: manipulation_msgs/SceneRegion
01183 # Point cloud
01184 sensor_msgs/PointCloud2 cloud
01185
01186 # Indices for the region of interest
01187 int32[] mask
01188
01189 # One of the corresponding 2D images, if applicable
01190 sensor_msgs/Image image
01191
01192 # The disparity image, if applicable
01193 sensor_msgs/Image disparity_image
01194
01195 # Camera info for the camera that took the image
01196 sensor_msgs/CameraInfo cam_info
01197
01198 # a 3D region of interest for grasp planning
01199 geometry_msgs/PoseStamped roi_box_pose
01200 geometry_msgs/Vector3 roi_box_dims
01201
01202 ================================================================================
01203 MSG: sensor_msgs/PointCloud2
01204 # This message holds a collection of N-dimensional points, which may
01205 # contain additional information such as normals, intensity, etc. The
01206 # point data is stored as a binary blob, its layout described by the
01207 # contents of the "fields" array.
01208
01209 # The point cloud data may be organized 2d (image-like) or 1d
01210 # (unordered). Point clouds organized as 2d images may be produced by
01211 # camera depth sensors such as stereo or time-of-flight.
01212
01213 # Time of sensor data acquisition, and the coordinate frame ID (for 3d
01214 # points).
01215 Header header
01216
01217 # 2D structure of the point cloud. If the cloud is unordered, height is
01218 # 1 and width is the length of the point cloud.
01219 uint32 height
01220 uint32 width
01221
01222 # Describes the channels and their layout in the binary data blob.
01223 PointField[] fields
01224
01225 bool is_bigendian # Is this data bigendian?
01226 uint32 point_step # Length of a point in bytes
01227 uint32 row_step # Length of a row in bytes
01228 uint8[] data # Actual point data, size is (row_step*height)
01229
01230 bool is_dense # True if there are no invalid points
01231
01232 ================================================================================
01233 MSG: sensor_msgs/PointField
01234 # This message holds the description of one point entry in the
01235 # PointCloud2 message format.
01236 uint8 INT8 = 1
01237 uint8 UINT8 = 2
01238 uint8 INT16 = 3
01239 uint8 UINT16 = 4
01240 uint8 INT32 = 5
01241 uint8 UINT32 = 6
01242 uint8 FLOAT32 = 7
01243 uint8 FLOAT64 = 8
01244
01245 string name # Name of field
01246 uint32 offset # Offset from start of point struct
01247 uint8 datatype # Datatype enumeration, see above
01248 uint32 count # How many elements in the field
01249
01250 ================================================================================
01251 MSG: sensor_msgs/Image
01252 # This message contains an uncompressed image
01253 # (0, 0) is at top-left corner of image
01254 #
01255
01256 Header header # Header timestamp should be acquisition time of image
01257 # Header frame_id should be optical frame of camera
01258 # origin of frame should be optical center of cameara
01259 # +x should point to the right in the image
01260 # +y should point down in the image
01261 # +z should point into to plane of the image
01262 # If the frame_id here and the frame_id of the CameraInfo
01263 # message associated with the image conflict
01264 # the behavior is undefined
01265
01266 uint32 height # image height, that is, number of rows
01267 uint32 width # image width, that is, number of columns
01268
01269 # The legal values for encoding are in file src/image_encodings.cpp
01270 # If you want to standardize a new string format, join
01271 # ros-users@lists.sourceforge.net and send an email proposing a new encoding.
01272
01273 string encoding # Encoding of pixels -- channel meaning, ordering, size
01274 # taken from the list of strings in include/sensor_msgs/image_encodings.h
01275
01276 uint8 is_bigendian # is this data bigendian?
01277 uint32 step # Full row length in bytes
01278 uint8[] data # actual matrix data, size is (step * rows)
01279
01280 ================================================================================
01281 MSG: sensor_msgs/CameraInfo
01282 # This message defines meta information for a camera. It should be in a
01283 # camera namespace on topic "camera_info" and accompanied by up to five
01284 # image topics named:
01285 #
01286 # image_raw - raw data from the camera driver, possibly Bayer encoded
01287 # image - monochrome, distorted
01288 # image_color - color, distorted
01289 # image_rect - monochrome, rectified
01290 # image_rect_color - color, rectified
01291 #
01292 # The image_pipeline contains packages (image_proc, stereo_image_proc)
01293 # for producing the four processed image topics from image_raw and
01294 # camera_info. The meaning of the camera parameters are described in
01295 # detail at http://www.ros.org/wiki/image_pipeline/CameraInfo.
01296 #
01297 # The image_geometry package provides a user-friendly interface to
01298 # common operations using this meta information. If you want to, e.g.,
01299 # project a 3d point into image coordinates, we strongly recommend
01300 # using image_geometry.
01301 #
01302 # If the camera is uncalibrated, the matrices D, K, R, P should be left
01303 # zeroed out. In particular, clients may assume that K[0] == 0.0
01304 # indicates an uncalibrated camera.
01305
01306 #######################################################################
01307 # Image acquisition info #
01308 #######################################################################
01309
01310 # Time of image acquisition, camera coordinate frame ID
01311 Header header # Header timestamp should be acquisition time of image
01312 # Header frame_id should be optical frame of camera
01313 # origin of frame should be optical center of camera
01314 # +x should point to the right in the image
01315 # +y should point down in the image
01316 # +z should point into the plane of the image
01317
01318
01319 #######################################################################
01320 # Calibration Parameters #
01321 #######################################################################
01322 # These are fixed during camera calibration. Their values will be the #
01323 # same in all messages until the camera is recalibrated. Note that #
01324 # self-calibrating systems may "recalibrate" frequently. #
01325 # #
01326 # The internal parameters can be used to warp a raw (distorted) image #
01327 # to: #
01328 # 1. An undistorted image (requires D and K) #
01329 # 2. A rectified image (requires D, K, R) #
01330 # The projection matrix P projects 3D points into the rectified image.#
01331 #######################################################################
01332
01333 # The image dimensions with which the camera was calibrated. Normally
01334 # this will be the full camera resolution in pixels.
01335 uint32 height
01336 uint32 width
01337
01338 # The distortion model used. Supported models are listed in
01339 # sensor_msgs/distortion_models.h. For most cameras, "plumb_bob" - a
01340 # simple model of radial and tangential distortion - is sufficent.
01341 string distortion_model
01342
01343 # The distortion parameters, size depending on the distortion model.
01344 # For "plumb_bob", the 5 parameters are: (k1, k2, t1, t2, k3).
01345 float64[] D
01346
01347 # Intrinsic camera matrix for the raw (distorted) images.
01348 # [fx 0 cx]
01349 # K = [ 0 fy cy]
01350 # [ 0 0 1]
01351 # Projects 3D points in the camera coordinate frame to 2D pixel
01352 # coordinates using the focal lengths (fx, fy) and principal point
01353 # (cx, cy).
01354 float64[9] K # 3x3 row-major matrix
01355
01356 # Rectification matrix (stereo cameras only)
01357 # A rotation matrix aligning the camera coordinate system to the ideal
01358 # stereo image plane so that epipolar lines in both stereo images are
01359 # parallel.
01360 float64[9] R # 3x3 row-major matrix
01361
01362 # Projection/camera matrix
01363 # [fx' 0 cx' Tx]
01364 # P = [ 0 fy' cy' Ty]
01365 # [ 0 0 1 0]
01366 # By convention, this matrix specifies the intrinsic (camera) matrix
01367 # of the processed (rectified) image. That is, the left 3x3 portion
01368 # is the normal camera intrinsic matrix for the rectified image.
01369 # It projects 3D points in the camera coordinate frame to 2D pixel
01370 # coordinates using the focal lengths (fx', fy') and principal point
01371 # (cx', cy') - these may differ from the values in K.
01372 # For monocular cameras, Tx = Ty = 0. Normally, monocular cameras will
01373 # also have R = the identity and P[1:3,1:3] = K.
01374 # For a stereo pair, the fourth column [Tx Ty 0]' is related to the
01375 # position of the optical center of the second camera in the first
01376 # camera's frame. We assume Tz = 0 so both cameras are in the same
01377 # stereo image plane. The first camera always has Tx = Ty = 0. For
01378 # the right (second) camera of a horizontal stereo pair, Ty = 0 and
01379 # Tx = -fx' * B, where B is the baseline between the cameras.
01380 # Given a 3D point [X Y Z]', the projection (x, y) of the point onto
01381 # the rectified image is given by:
01382 # [u v w]' = P * [X Y Z 1]'
01383 # x = u / w
01384 # y = v / w
01385 # This holds for both images of a stereo pair.
01386 float64[12] P # 3x4 row-major matrix
01387
01388
01389 #######################################################################
01390 # Operational Parameters #
01391 #######################################################################
01392 # These define the image region actually captured by the camera #
01393 # driver. Although they affect the geometry of the output image, they #
01394 # may be changed freely without recalibrating the camera. #
01395 #######################################################################
01396
01397 # Binning refers here to any camera setting which combines rectangular
01398 # neighborhoods of pixels into larger "super-pixels." It reduces the
01399 # resolution of the output image to
01400 # (width / binning_x) x (height / binning_y).
01401 # The default values binning_x = binning_y = 0 is considered the same
01402 # as binning_x = binning_y = 1 (no subsampling).
01403 uint32 binning_x
01404 uint32 binning_y
01405
01406 # Region of interest (subwindow of full camera resolution), given in
01407 # full resolution (unbinned) image coordinates. A particular ROI
01408 # always denotes the same window of pixels on the camera sensor,
01409 # regardless of binning settings.
01410 # The default setting of roi (all values 0) is considered the same as
01411 # full resolution (roi.width = width, roi.height = height).
01412 RegionOfInterest roi
01413
01414 ================================================================================
01415 MSG: sensor_msgs/RegionOfInterest
01416 # This message is used to specify a region of interest within an image.
01417 #
01418 # When used to specify the ROI setting of the camera when the image was
01419 # taken, the height and width fields should either match the height and
01420 # width fields for the associated image; or height = width = 0
01421 # indicates that the full resolution image was captured.
01422
01423 uint32 x_offset # Leftmost pixel of the ROI
01424 # (0 if the ROI includes the left edge of the image)
01425 uint32 y_offset # Topmost pixel of the ROI
01426 # (0 if the ROI includes the top edge of the image)
01427 uint32 height # Height of ROI
01428 uint32 width # Width of ROI
01429
01430 # True if a distinct rectified ROI should be calculated from the "raw"
01431 # ROI in this message. Typically this should be False if the full image
01432 # is captured (ROI not used), and True if a subwindow is captured (ROI
01433 # used).
01434 bool do_rectify
01435
01436 ================================================================================
01437 MSG: geometry_msgs/Vector3
01438 # This represents a vector in free space.
01439
01440 float64 x
01441 float64 y
01442 float64 z
01443 """
01444 __slots__ = ['graspable_objects','collision_object_names','collision_support_surface_name']
01445 _slot_types = ['manipulation_msgs/GraspableObject[]','string[]','string']
01446
01447 def __init__(self, *args, **kwds):
01448 """
01449 Constructor. Any message fields that are implicitly/explicitly
01450 set to None will be assigned a default value. The recommend
01451 use is keyword arguments as this is more robust to future message
01452 changes. You cannot mix in-order arguments and keyword arguments.
01453
01454 The available fields are:
01455 graspable_objects,collision_object_names,collision_support_surface_name
01456
01457 :param args: complete set of field values, in .msg order
01458 :param kwds: use keyword arguments corresponding to message field names
01459 to set specific fields.
01460 """
01461 if args or kwds:
01462 super(TabletopCollisionMapProcessingResponse, self).__init__(*args, **kwds)
01463 #message fields cannot be None, assign default values for those that are
01464 if self.graspable_objects is None:
01465 self.graspable_objects = []
01466 if self.collision_object_names is None:
01467 self.collision_object_names = []
01468 if self.collision_support_surface_name is None:
01469 self.collision_support_surface_name = ''
01470 else:
01471 self.graspable_objects = []
01472 self.collision_object_names = []
01473 self.collision_support_surface_name = ''
01474
01475 def _get_types(self):
01476 """
01477 internal API method
01478 """
01479 return self._slot_types
01480
01481 def serialize(self, buff):
01482 """
01483 serialize message into buffer
01484 :param buff: buffer, ``StringIO``
01485 """
01486 try:
01487 length = len(self.graspable_objects)
01488 buff.write(_struct_I.pack(length))
01489 for val1 in self.graspable_objects:
01490 _x = val1.reference_frame_id
01491 length = len(_x)
01492 if python3 or type(_x) == unicode:
01493 _x = _x.encode('utf-8')
01494 length = len(_x)
01495 buff.write(struct.pack('<I%ss'%length, length, _x))
01496 length = len(val1.potential_models)
01497 buff.write(_struct_I.pack(length))
01498 for val2 in val1.potential_models:
01499 buff.write(_struct_i.pack(val2.model_id))
01500 _v37 = val2.type
01501 _x = _v37.key
01502 length = len(_x)
01503 if python3 or type(_x) == unicode:
01504 _x = _x.encode('utf-8')
01505 length = len(_x)
01506 buff.write(struct.pack('<I%ss'%length, length, _x))
01507 _x = _v37.db
01508 length = len(_x)
01509 if python3 or type(_x) == unicode:
01510 _x = _x.encode('utf-8')
01511 length = len(_x)
01512 buff.write(struct.pack('<I%ss'%length, length, _x))
01513 _v38 = val2.pose
01514 _v39 = _v38.header
01515 buff.write(_struct_I.pack(_v39.seq))
01516 _v40 = _v39.stamp
01517 _x = _v40
01518 buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
01519 _x = _v39.frame_id
01520 length = len(_x)
01521 if python3 or type(_x) == unicode:
01522 _x = _x.encode('utf-8')
01523 length = len(_x)
01524 buff.write(struct.pack('<I%ss'%length, length, _x))
01525 _v41 = _v38.pose
01526 _v42 = _v41.position
01527 _x = _v42
01528 buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
01529 _v43 = _v41.orientation
01530 _x = _v43
01531 buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))
01532 buff.write(_struct_f.pack(val2.confidence))
01533 _x = val2.detector_name
01534 length = len(_x)
01535 if python3 or type(_x) == unicode:
01536 _x = _x.encode('utf-8')
01537 length = len(_x)
01538 buff.write(struct.pack('<I%ss'%length, length, _x))
01539 _v44 = val1.cluster
01540 _v45 = _v44.header
01541 buff.write(_struct_I.pack(_v45.seq))
01542 _v46 = _v45.stamp
01543 _x = _v46
01544 buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
01545 _x = _v45.frame_id
01546 length = len(_x)
01547 if python3 or type(_x) == unicode:
01548 _x = _x.encode('utf-8')
01549 length = len(_x)
01550 buff.write(struct.pack('<I%ss'%length, length, _x))
01551 length = len(_v44.points)
01552 buff.write(_struct_I.pack(length))
01553 for val3 in _v44.points:
01554 _x = val3
01555 buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))
01556 length = len(_v44.channels)
01557 buff.write(_struct_I.pack(length))
01558 for val3 in _v44.channels:
01559 _x = val3.name
01560 length = len(_x)
01561 if python3 or type(_x) == unicode:
01562 _x = _x.encode('utf-8')
01563 length = len(_x)
01564 buff.write(struct.pack('<I%ss'%length, length, _x))
01565 length = len(val3.values)
01566 buff.write(_struct_I.pack(length))
01567 pattern = '<%sf'%length
01568 buff.write(struct.pack(pattern, *val3.values))
01569 _v47 = val1.region
01570 _v48 = _v47.cloud
01571 _v49 = _v48.header
01572 buff.write(_struct_I.pack(_v49.seq))
01573 _v50 = _v49.stamp
01574 _x = _v50
01575 buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
01576 _x = _v49.frame_id
01577 length = len(_x)
01578 if python3 or type(_x) == unicode:
01579 _x = _x.encode('utf-8')
01580 length = len(_x)
01581 buff.write(struct.pack('<I%ss'%length, length, _x))
01582 _x = _v48
01583 buff.write(_struct_2I.pack(_x.height, _x.width))
01584 length = len(_v48.fields)
01585 buff.write(_struct_I.pack(length))
01586 for val4 in _v48.fields:
01587 _x = val4.name
01588 length = len(_x)
01589 if python3 or type(_x) == unicode:
01590 _x = _x.encode('utf-8')
01591 length = len(_x)
01592 buff.write(struct.pack('<I%ss'%length, length, _x))
01593 _x = val4
01594 buff.write(_struct_IBI.pack(_x.offset, _x.datatype, _x.count))
01595 _x = _v48
01596 buff.write(_struct_B2I.pack(_x.is_bigendian, _x.point_step, _x.row_step))
01597 _x = _v48.data
01598 length = len(_x)
01599 # - if encoded as a list instead, serialize as bytes instead of string
01600 if type(_x) in [list, tuple]:
01601 buff.write(struct.pack('<I%sB'%length, length, *_x))
01602 else:
01603 buff.write(struct.pack('<I%ss'%length, length, _x))
01604 buff.write(_struct_B.pack(_v48.is_dense))
01605 length = len(_v47.mask)
01606 buff.write(_struct_I.pack(length))
01607 pattern = '<%si'%length
01608 buff.write(struct.pack(pattern, *_v47.mask))
01609 _v51 = _v47.image
01610 _v52 = _v51.header
01611 buff.write(_struct_I.pack(_v52.seq))
01612 _v53 = _v52.stamp
01613 _x = _v53
01614 buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
01615 _x = _v52.frame_id
01616 length = len(_x)
01617 if python3 or type(_x) == unicode:
01618 _x = _x.encode('utf-8')
01619 length = len(_x)
01620 buff.write(struct.pack('<I%ss'%length, length, _x))
01621 _x = _v51
01622 buff.write(_struct_2I.pack(_x.height, _x.width))
01623 _x = _v51.encoding
01624 length = len(_x)
01625 if python3 or type(_x) == unicode:
01626 _x = _x.encode('utf-8')
01627 length = len(_x)
01628 buff.write(struct.pack('<I%ss'%length, length, _x))
01629 _x = _v51
01630 buff.write(_struct_BI.pack(_x.is_bigendian, _x.step))
01631 _x = _v51.data
01632 length = len(_x)
01633 # - if encoded as a list instead, serialize as bytes instead of string
01634 if type(_x) in [list, tuple]:
01635 buff.write(struct.pack('<I%sB'%length, length, *_x))
01636 else:
01637 buff.write(struct.pack('<I%ss'%length, length, _x))
01638 _v54 = _v47.disparity_image
01639 _v55 = _v54.header
01640 buff.write(_struct_I.pack(_v55.seq))
01641 _v56 = _v55.stamp
01642 _x = _v56
01643 buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
01644 _x = _v55.frame_id
01645 length = len(_x)
01646 if python3 or type(_x) == unicode:
01647 _x = _x.encode('utf-8')
01648 length = len(_x)
01649 buff.write(struct.pack('<I%ss'%length, length, _x))
01650 _x = _v54
01651 buff.write(_struct_2I.pack(_x.height, _x.width))
01652 _x = _v54.encoding
01653 length = len(_x)
01654 if python3 or type(_x) == unicode:
01655 _x = _x.encode('utf-8')
01656 length = len(_x)
01657 buff.write(struct.pack('<I%ss'%length, length, _x))
01658 _x = _v54
01659 buff.write(_struct_BI.pack(_x.is_bigendian, _x.step))
01660 _x = _v54.data
01661 length = len(_x)
01662 # - if encoded as a list instead, serialize as bytes instead of string
01663 if type(_x) in [list, tuple]:
01664 buff.write(struct.pack('<I%sB'%length, length, *_x))
01665 else:
01666 buff.write(struct.pack('<I%ss'%length, length, _x))
01667 _v57 = _v47.cam_info
01668 _v58 = _v57.header
01669 buff.write(_struct_I.pack(_v58.seq))
01670 _v59 = _v58.stamp
01671 _x = _v59
01672 buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
01673 _x = _v58.frame_id
01674 length = len(_x)
01675 if python3 or type(_x) == unicode:
01676 _x = _x.encode('utf-8')
01677 length = len(_x)
01678 buff.write(struct.pack('<I%ss'%length, length, _x))
01679 _x = _v57
01680 buff.write(_struct_2I.pack(_x.height, _x.width))
01681 _x = _v57.distortion_model
01682 length = len(_x)
01683 if python3 or type(_x) == unicode:
01684 _x = _x.encode('utf-8')
01685 length = len(_x)
01686 buff.write(struct.pack('<I%ss'%length, length, _x))
01687 length = len(_v57.D)
01688 buff.write(_struct_I.pack(length))
01689 pattern = '<%sd'%length
01690 buff.write(struct.pack(pattern, *_v57.D))
01691 buff.write(_struct_9d.pack(*_v57.K))
01692 buff.write(_struct_9d.pack(*_v57.R))
01693 buff.write(_struct_12d.pack(*_v57.P))
01694 _x = _v57
01695 buff.write(_struct_2I.pack(_x.binning_x, _x.binning_y))
01696 _v60 = _v57.roi
01697 _x = _v60
01698 buff.write(_struct_4IB.pack(_x.x_offset, _x.y_offset, _x.height, _x.width, _x.do_rectify))
01699 _v61 = _v47.roi_box_pose
01700 _v62 = _v61.header
01701 buff.write(_struct_I.pack(_v62.seq))
01702 _v63 = _v62.stamp
01703 _x = _v63
01704 buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
01705 _x = _v62.frame_id
01706 length = len(_x)
01707 if python3 or type(_x) == unicode:
01708 _x = _x.encode('utf-8')
01709 length = len(_x)
01710 buff.write(struct.pack('<I%ss'%length, length, _x))
01711 _v64 = _v61.pose
01712 _v65 = _v64.position
01713 _x = _v65
01714 buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
01715 _v66 = _v64.orientation
01716 _x = _v66
01717 buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))
01718 _v67 = _v47.roi_box_dims
01719 _x = _v67
01720 buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
01721 _x = val1.collision_name
01722 length = len(_x)
01723 if python3 or type(_x) == unicode:
01724 _x = _x.encode('utf-8')
01725 length = len(_x)
01726 buff.write(struct.pack('<I%ss'%length, length, _x))
01727 length = len(self.collision_object_names)
01728 buff.write(_struct_I.pack(length))
01729 for val1 in self.collision_object_names:
01730 length = len(val1)
01731 if python3 or type(val1) == unicode:
01732 val1 = val1.encode('utf-8')
01733 length = len(val1)
01734 buff.write(struct.pack('<I%ss'%length, length, val1))
01735 _x = self.collision_support_surface_name
01736 length = len(_x)
01737 if python3 or type(_x) == unicode:
01738 _x = _x.encode('utf-8')
01739 length = len(_x)
01740 buff.write(struct.pack('<I%ss'%length, length, _x))
01741 except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(_x))))
01742 except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(_x))))
01743
01744 def deserialize(self, str):
01745 """
01746 unpack serialized message in str into this message instance
01747 :param str: byte array of serialized message, ``str``
01748 """
01749 try:
01750 if self.graspable_objects is None:
01751 self.graspable_objects = None
01752 end = 0
01753 start = end
01754 end += 4
01755 (length,) = _struct_I.unpack(str[start:end])
01756 self.graspable_objects = []
01757 for i in range(0, length):
01758 val1 = manipulation_msgs.msg.GraspableObject()
01759 start = end
01760 end += 4
01761 (length,) = _struct_I.unpack(str[start:end])
01762 start = end
01763 end += length
01764 if python3:
01765 val1.reference_frame_id = str[start:end].decode('utf-8')
01766 else:
01767 val1.reference_frame_id = str[start:end]
01768 start = end
01769 end += 4
01770 (length,) = _struct_I.unpack(str[start:end])
01771 val1.potential_models = []
01772 for i in range(0, length):
01773 val2 = household_objects_database_msgs.msg.DatabaseModelPose()
01774 start = end
01775 end += 4
01776 (val2.model_id,) = _struct_i.unpack(str[start:end])
01777 _v68 = val2.type
01778 start = end
01779 end += 4
01780 (length,) = _struct_I.unpack(str[start:end])
01781 start = end
01782 end += length
01783 if python3:
01784 _v68.key = str[start:end].decode('utf-8')
01785 else:
01786 _v68.key = str[start:end]
01787 start = end
01788 end += 4
01789 (length,) = _struct_I.unpack(str[start:end])
01790 start = end
01791 end += length
01792 if python3:
01793 _v68.db = str[start:end].decode('utf-8')
01794 else:
01795 _v68.db = str[start:end]
01796 _v69 = val2.pose
01797 _v70 = _v69.header
01798 start = end
01799 end += 4
01800 (_v70.seq,) = _struct_I.unpack(str[start:end])
01801 _v71 = _v70.stamp
01802 _x = _v71
01803 start = end
01804 end += 8
01805 (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
01806 start = end
01807 end += 4
01808 (length,) = _struct_I.unpack(str[start:end])
01809 start = end
01810 end += length
01811 if python3:
01812 _v70.frame_id = str[start:end].decode('utf-8')
01813 else:
01814 _v70.frame_id = str[start:end]
01815 _v72 = _v69.pose
01816 _v73 = _v72.position
01817 _x = _v73
01818 start = end
01819 end += 24
01820 (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
01821 _v74 = _v72.orientation
01822 _x = _v74
01823 start = end
01824 end += 32
01825 (_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])
01826 start = end
01827 end += 4
01828 (val2.confidence,) = _struct_f.unpack(str[start:end])
01829 start = end
01830 end += 4
01831 (length,) = _struct_I.unpack(str[start:end])
01832 start = end
01833 end += length
01834 if python3:
01835 val2.detector_name = str[start:end].decode('utf-8')
01836 else:
01837 val2.detector_name = str[start:end]
01838 val1.potential_models.append(val2)
01839 _v75 = val1.cluster
01840 _v76 = _v75.header
01841 start = end
01842 end += 4
01843 (_v76.seq,) = _struct_I.unpack(str[start:end])
01844 _v77 = _v76.stamp
01845 _x = _v77
01846 start = end
01847 end += 8
01848 (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
01849 start = end
01850 end += 4
01851 (length,) = _struct_I.unpack(str[start:end])
01852 start = end
01853 end += length
01854 if python3:
01855 _v76.frame_id = str[start:end].decode('utf-8')
01856 else:
01857 _v76.frame_id = str[start:end]
01858 start = end
01859 end += 4
01860 (length,) = _struct_I.unpack(str[start:end])
01861 _v75.points = []
01862 for i in range(0, length):
01863 val3 = geometry_msgs.msg.Point32()
01864 _x = val3
01865 start = end
01866 end += 12
01867 (_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end])
01868 _v75.points.append(val3)
01869 start = end
01870 end += 4
01871 (length,) = _struct_I.unpack(str[start:end])
01872 _v75.channels = []
01873 for i in range(0, length):
01874 val3 = sensor_msgs.msg.ChannelFloat32()
01875 start = end
01876 end += 4
01877 (length,) = _struct_I.unpack(str[start:end])
01878 start = end
01879 end += length
01880 if python3:
01881 val3.name = str[start:end].decode('utf-8')
01882 else:
01883 val3.name = str[start:end]
01884 start = end
01885 end += 4
01886 (length,) = _struct_I.unpack(str[start:end])
01887 pattern = '<%sf'%length
01888 start = end
01889 end += struct.calcsize(pattern)
01890 val3.values = struct.unpack(pattern, str[start:end])
01891 _v75.channels.append(val3)
01892 _v78 = val1.region
01893 _v79 = _v78.cloud
01894 _v80 = _v79.header
01895 start = end
01896 end += 4
01897 (_v80.seq,) = _struct_I.unpack(str[start:end])
01898 _v81 = _v80.stamp
01899 _x = _v81
01900 start = end
01901 end += 8
01902 (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
01903 start = end
01904 end += 4
01905 (length,) = _struct_I.unpack(str[start:end])
01906 start = end
01907 end += length
01908 if python3:
01909 _v80.frame_id = str[start:end].decode('utf-8')
01910 else:
01911 _v80.frame_id = str[start:end]
01912 _x = _v79
01913 start = end
01914 end += 8
01915 (_x.height, _x.width,) = _struct_2I.unpack(str[start:end])
01916 start = end
01917 end += 4
01918 (length,) = _struct_I.unpack(str[start:end])
01919 _v79.fields = []
01920 for i in range(0, length):
01921 val4 = sensor_msgs.msg.PointField()
01922 start = end
01923 end += 4
01924 (length,) = _struct_I.unpack(str[start:end])
01925 start = end
01926 end += length
01927 if python3:
01928 val4.name = str[start:end].decode('utf-8')
01929 else:
01930 val4.name = str[start:end]
01931 _x = val4
01932 start = end
01933 end += 9
01934 (_x.offset, _x.datatype, _x.count,) = _struct_IBI.unpack(str[start:end])
01935 _v79.fields.append(val4)
01936 _x = _v79
01937 start = end
01938 end += 9
01939 (_x.is_bigendian, _x.point_step, _x.row_step,) = _struct_B2I.unpack(str[start:end])
01940 _v79.is_bigendian = bool(_v79.is_bigendian)
01941 start = end
01942 end += 4
01943 (length,) = _struct_I.unpack(str[start:end])
01944 start = end
01945 end += length
01946 _v79.data = str[start:end]
01947 start = end
01948 end += 1
01949 (_v79.is_dense,) = _struct_B.unpack(str[start:end])
01950 _v79.is_dense = bool(_v79.is_dense)
01951 start = end
01952 end += 4
01953 (length,) = _struct_I.unpack(str[start:end])
01954 pattern = '<%si'%length
01955 start = end
01956 end += struct.calcsize(pattern)
01957 _v78.mask = struct.unpack(pattern, str[start:end])
01958 _v82 = _v78.image
01959 _v83 = _v82.header
01960 start = end
01961 end += 4
01962 (_v83.seq,) = _struct_I.unpack(str[start:end])
01963 _v84 = _v83.stamp
01964 _x = _v84
01965 start = end
01966 end += 8
01967 (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
01968 start = end
01969 end += 4
01970 (length,) = _struct_I.unpack(str[start:end])
01971 start = end
01972 end += length
01973 if python3:
01974 _v83.frame_id = str[start:end].decode('utf-8')
01975 else:
01976 _v83.frame_id = str[start:end]
01977 _x = _v82
01978 start = end
01979 end += 8
01980 (_x.height, _x.width,) = _struct_2I.unpack(str[start:end])
01981 start = end
01982 end += 4
01983 (length,) = _struct_I.unpack(str[start:end])
01984 start = end
01985 end += length
01986 if python3:
01987 _v82.encoding = str[start:end].decode('utf-8')
01988 else:
01989 _v82.encoding = str[start:end]
01990 _x = _v82
01991 start = end
01992 end += 5
01993 (_x.is_bigendian, _x.step,) = _struct_BI.unpack(str[start:end])
01994 start = end
01995 end += 4
01996 (length,) = _struct_I.unpack(str[start:end])
01997 start = end
01998 end += length
01999 _v82.data = str[start:end]
02000 _v85 = _v78.disparity_image
02001 _v86 = _v85.header
02002 start = end
02003 end += 4
02004 (_v86.seq,) = _struct_I.unpack(str[start:end])
02005 _v87 = _v86.stamp
02006 _x = _v87
02007 start = end
02008 end += 8
02009 (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
02010 start = end
02011 end += 4
02012 (length,) = _struct_I.unpack(str[start:end])
02013 start = end
02014 end += length
02015 if python3:
02016 _v86.frame_id = str[start:end].decode('utf-8')
02017 else:
02018 _v86.frame_id = str[start:end]
02019 _x = _v85
02020 start = end
02021 end += 8
02022 (_x.height, _x.width,) = _struct_2I.unpack(str[start:end])
02023 start = end
02024 end += 4
02025 (length,) = _struct_I.unpack(str[start:end])
02026 start = end
02027 end += length
02028 if python3:
02029 _v85.encoding = str[start:end].decode('utf-8')
02030 else:
02031 _v85.encoding = str[start:end]
02032 _x = _v85
02033 start = end
02034 end += 5
02035 (_x.is_bigendian, _x.step,) = _struct_BI.unpack(str[start:end])
02036 start = end
02037 end += 4
02038 (length,) = _struct_I.unpack(str[start:end])
02039 start = end
02040 end += length
02041 _v85.data = str[start:end]
02042 _v88 = _v78.cam_info
02043 _v89 = _v88.header
02044 start = end
02045 end += 4
02046 (_v89.seq,) = _struct_I.unpack(str[start:end])
02047 _v90 = _v89.stamp
02048 _x = _v90
02049 start = end
02050 end += 8
02051 (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
02052 start = end
02053 end += 4
02054 (length,) = _struct_I.unpack(str[start:end])
02055 start = end
02056 end += length
02057 if python3:
02058 _v89.frame_id = str[start:end].decode('utf-8')
02059 else:
02060 _v89.frame_id = str[start:end]
02061 _x = _v88
02062 start = end
02063 end += 8
02064 (_x.height, _x.width,) = _struct_2I.unpack(str[start:end])
02065 start = end
02066 end += 4
02067 (length,) = _struct_I.unpack(str[start:end])
02068 start = end
02069 end += length
02070 if python3:
02071 _v88.distortion_model = str[start:end].decode('utf-8')
02072 else:
02073 _v88.distortion_model = str[start:end]
02074 start = end
02075 end += 4
02076 (length,) = _struct_I.unpack(str[start:end])
02077 pattern = '<%sd'%length
02078 start = end
02079 end += struct.calcsize(pattern)
02080 _v88.D = struct.unpack(pattern, str[start:end])
02081 start = end
02082 end += 72
02083 _v88.K = _struct_9d.unpack(str[start:end])
02084 start = end
02085 end += 72
02086 _v88.R = _struct_9d.unpack(str[start:end])
02087 start = end
02088 end += 96
02089 _v88.P = _struct_12d.unpack(str[start:end])
02090 _x = _v88
02091 start = end
02092 end += 8
02093 (_x.binning_x, _x.binning_y,) = _struct_2I.unpack(str[start:end])
02094 _v91 = _v88.roi
02095 _x = _v91
02096 start = end
02097 end += 17
02098 (_x.x_offset, _x.y_offset, _x.height, _x.width, _x.do_rectify,) = _struct_4IB.unpack(str[start:end])
02099 _v91.do_rectify = bool(_v91.do_rectify)
02100 _v92 = _v78.roi_box_pose
02101 _v93 = _v92.header
02102 start = end
02103 end += 4
02104 (_v93.seq,) = _struct_I.unpack(str[start:end])
02105 _v94 = _v93.stamp
02106 _x = _v94
02107 start = end
02108 end += 8
02109 (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
02110 start = end
02111 end += 4
02112 (length,) = _struct_I.unpack(str[start:end])
02113 start = end
02114 end += length
02115 if python3:
02116 _v93.frame_id = str[start:end].decode('utf-8')
02117 else:
02118 _v93.frame_id = str[start:end]
02119 _v95 = _v92.pose
02120 _v96 = _v95.position
02121 _x = _v96
02122 start = end
02123 end += 24
02124 (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
02125 _v97 = _v95.orientation
02126 _x = _v97
02127 start = end
02128 end += 32
02129 (_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])
02130 _v98 = _v78.roi_box_dims
02131 _x = _v98
02132 start = end
02133 end += 24
02134 (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
02135 start = end
02136 end += 4
02137 (length,) = _struct_I.unpack(str[start:end])
02138 start = end
02139 end += length
02140 if python3:
02141 val1.collision_name = str[start:end].decode('utf-8')
02142 else:
02143 val1.collision_name = str[start:end]
02144 self.graspable_objects.append(val1)
02145 start = end
02146 end += 4
02147 (length,) = _struct_I.unpack(str[start:end])
02148 self.collision_object_names = []
02149 for i in range(0, length):
02150 start = end
02151 end += 4
02152 (length,) = _struct_I.unpack(str[start:end])
02153 start = end
02154 end += length
02155 if python3:
02156 val1 = str[start:end].decode('utf-8')
02157 else:
02158 val1 = str[start:end]
02159 self.collision_object_names.append(val1)
02160 start = end
02161 end += 4
02162 (length,) = _struct_I.unpack(str[start:end])
02163 start = end
02164 end += length
02165 if python3:
02166 self.collision_support_surface_name = str[start:end].decode('utf-8')
02167 else:
02168 self.collision_support_surface_name = str[start:end]
02169 return self
02170 except struct.error as e:
02171 raise genpy.DeserializationError(e) #most likely buffer underfill
02172
02173
02174 def serialize_numpy(self, buff, numpy):
02175 """
02176 serialize message with numpy array types into buffer
02177 :param buff: buffer, ``StringIO``
02178 :param numpy: numpy python module
02179 """
02180 try:
02181 length = len(self.graspable_objects)
02182 buff.write(_struct_I.pack(length))
02183 for val1 in self.graspable_objects:
02184 _x = val1.reference_frame_id
02185 length = len(_x)
02186 if python3 or type(_x) == unicode:
02187 _x = _x.encode('utf-8')
02188 length = len(_x)
02189 buff.write(struct.pack('<I%ss'%length, length, _x))
02190 length = len(val1.potential_models)
02191 buff.write(_struct_I.pack(length))
02192 for val2 in val1.potential_models:
02193 buff.write(_struct_i.pack(val2.model_id))
02194 _v99 = val2.type
02195 _x = _v99.key
02196 length = len(_x)
02197 if python3 or type(_x) == unicode:
02198 _x = _x.encode('utf-8')
02199 length = len(_x)
02200 buff.write(struct.pack('<I%ss'%length, length, _x))
02201 _x = _v99.db
02202 length = len(_x)
02203 if python3 or type(_x) == unicode:
02204 _x = _x.encode('utf-8')
02205 length = len(_x)
02206 buff.write(struct.pack('<I%ss'%length, length, _x))
02207 _v100 = val2.pose
02208 _v101 = _v100.header
02209 buff.write(_struct_I.pack(_v101.seq))
02210 _v102 = _v101.stamp
02211 _x = _v102
02212 buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
02213 _x = _v101.frame_id
02214 length = len(_x)
02215 if python3 or type(_x) == unicode:
02216 _x = _x.encode('utf-8')
02217 length = len(_x)
02218 buff.write(struct.pack('<I%ss'%length, length, _x))
02219 _v103 = _v100.pose
02220 _v104 = _v103.position
02221 _x = _v104
02222 buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
02223 _v105 = _v103.orientation
02224 _x = _v105
02225 buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))
02226 buff.write(_struct_f.pack(val2.confidence))
02227 _x = val2.detector_name
02228 length = len(_x)
02229 if python3 or type(_x) == unicode:
02230 _x = _x.encode('utf-8')
02231 length = len(_x)
02232 buff.write(struct.pack('<I%ss'%length, length, _x))
02233 _v106 = val1.cluster
02234 _v107 = _v106.header
02235 buff.write(_struct_I.pack(_v107.seq))
02236 _v108 = _v107.stamp
02237 _x = _v108
02238 buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
02239 _x = _v107.frame_id
02240 length = len(_x)
02241 if python3 or type(_x) == unicode:
02242 _x = _x.encode('utf-8')
02243 length = len(_x)
02244 buff.write(struct.pack('<I%ss'%length, length, _x))
02245 length = len(_v106.points)
02246 buff.write(_struct_I.pack(length))
02247 for val3 in _v106.points:
02248 _x = val3
02249 buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))
02250 length = len(_v106.channels)
02251 buff.write(_struct_I.pack(length))
02252 for val3 in _v106.channels:
02253 _x = val3.name
02254 length = len(_x)
02255 if python3 or type(_x) == unicode:
02256 _x = _x.encode('utf-8')
02257 length = len(_x)
02258 buff.write(struct.pack('<I%ss'%length, length, _x))
02259 length = len(val3.values)
02260 buff.write(_struct_I.pack(length))
02261 pattern = '<%sf'%length
02262 buff.write(val3.values.tostring())
02263 _v109 = val1.region
02264 _v110 = _v109.cloud
02265 _v111 = _v110.header
02266 buff.write(_struct_I.pack(_v111.seq))
02267 _v112 = _v111.stamp
02268 _x = _v112
02269 buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
02270 _x = _v111.frame_id
02271 length = len(_x)
02272 if python3 or type(_x) == unicode:
02273 _x = _x.encode('utf-8')
02274 length = len(_x)
02275 buff.write(struct.pack('<I%ss'%length, length, _x))
02276 _x = _v110
02277 buff.write(_struct_2I.pack(_x.height, _x.width))
02278 length = len(_v110.fields)
02279 buff.write(_struct_I.pack(length))
02280 for val4 in _v110.fields:
02281 _x = val4.name
02282 length = len(_x)
02283 if python3 or type(_x) == unicode:
02284 _x = _x.encode('utf-8')
02285 length = len(_x)
02286 buff.write(struct.pack('<I%ss'%length, length, _x))
02287 _x = val4
02288 buff.write(_struct_IBI.pack(_x.offset, _x.datatype, _x.count))
02289 _x = _v110
02290 buff.write(_struct_B2I.pack(_x.is_bigendian, _x.point_step, _x.row_step))
02291 _x = _v110.data
02292 length = len(_x)
02293 # - if encoded as a list instead, serialize as bytes instead of string
02294 if type(_x) in [list, tuple]:
02295 buff.write(struct.pack('<I%sB'%length, length, *_x))
02296 else:
02297 buff.write(struct.pack('<I%ss'%length, length, _x))
02298 buff.write(_struct_B.pack(_v110.is_dense))
02299 length = len(_v109.mask)
02300 buff.write(_struct_I.pack(length))
02301 pattern = '<%si'%length
02302 buff.write(_v109.mask.tostring())
02303 _v113 = _v109.image
02304 _v114 = _v113.header
02305 buff.write(_struct_I.pack(_v114.seq))
02306 _v115 = _v114.stamp
02307 _x = _v115
02308 buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
02309 _x = _v114.frame_id
02310 length = len(_x)
02311 if python3 or type(_x) == unicode:
02312 _x = _x.encode('utf-8')
02313 length = len(_x)
02314 buff.write(struct.pack('<I%ss'%length, length, _x))
02315 _x = _v113
02316 buff.write(_struct_2I.pack(_x.height, _x.width))
02317 _x = _v113.encoding
02318 length = len(_x)
02319 if python3 or type(_x) == unicode:
02320 _x = _x.encode('utf-8')
02321 length = len(_x)
02322 buff.write(struct.pack('<I%ss'%length, length, _x))
02323 _x = _v113
02324 buff.write(_struct_BI.pack(_x.is_bigendian, _x.step))
02325 _x = _v113.data
02326 length = len(_x)
02327 # - if encoded as a list instead, serialize as bytes instead of string
02328 if type(_x) in [list, tuple]:
02329 buff.write(struct.pack('<I%sB'%length, length, *_x))
02330 else:
02331 buff.write(struct.pack('<I%ss'%length, length, _x))
02332 _v116 = _v109.disparity_image
02333 _v117 = _v116.header
02334 buff.write(_struct_I.pack(_v117.seq))
02335 _v118 = _v117.stamp
02336 _x = _v118
02337 buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
02338 _x = _v117.frame_id
02339 length = len(_x)
02340 if python3 or type(_x) == unicode:
02341 _x = _x.encode('utf-8')
02342 length = len(_x)
02343 buff.write(struct.pack('<I%ss'%length, length, _x))
02344 _x = _v116
02345 buff.write(_struct_2I.pack(_x.height, _x.width))
02346 _x = _v116.encoding
02347 length = len(_x)
02348 if python3 or type(_x) == unicode:
02349 _x = _x.encode('utf-8')
02350 length = len(_x)
02351 buff.write(struct.pack('<I%ss'%length, length, _x))
02352 _x = _v116
02353 buff.write(_struct_BI.pack(_x.is_bigendian, _x.step))
02354 _x = _v116.data
02355 length = len(_x)
02356 # - if encoded as a list instead, serialize as bytes instead of string
02357 if type(_x) in [list, tuple]:
02358 buff.write(struct.pack('<I%sB'%length, length, *_x))
02359 else:
02360 buff.write(struct.pack('<I%ss'%length, length, _x))
02361 _v119 = _v109.cam_info
02362 _v120 = _v119.header
02363 buff.write(_struct_I.pack(_v120.seq))
02364 _v121 = _v120.stamp
02365 _x = _v121
02366 buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
02367 _x = _v120.frame_id
02368 length = len(_x)
02369 if python3 or type(_x) == unicode:
02370 _x = _x.encode('utf-8')
02371 length = len(_x)
02372 buff.write(struct.pack('<I%ss'%length, length, _x))
02373 _x = _v119
02374 buff.write(_struct_2I.pack(_x.height, _x.width))
02375 _x = _v119.distortion_model
02376 length = len(_x)
02377 if python3 or type(_x) == unicode:
02378 _x = _x.encode('utf-8')
02379 length = len(_x)
02380 buff.write(struct.pack('<I%ss'%length, length, _x))
02381 length = len(_v119.D)
02382 buff.write(_struct_I.pack(length))
02383 pattern = '<%sd'%length
02384 buff.write(_v119.D.tostring())
02385 buff.write(_v119.K.tostring())
02386 buff.write(_v119.R.tostring())
02387 buff.write(_v119.P.tostring())
02388 _x = _v119
02389 buff.write(_struct_2I.pack(_x.binning_x, _x.binning_y))
02390 _v122 = _v119.roi
02391 _x = _v122
02392 buff.write(_struct_4IB.pack(_x.x_offset, _x.y_offset, _x.height, _x.width, _x.do_rectify))
02393 _v123 = _v109.roi_box_pose
02394 _v124 = _v123.header
02395 buff.write(_struct_I.pack(_v124.seq))
02396 _v125 = _v124.stamp
02397 _x = _v125
02398 buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
02399 _x = _v124.frame_id
02400 length = len(_x)
02401 if python3 or type(_x) == unicode:
02402 _x = _x.encode('utf-8')
02403 length = len(_x)
02404 buff.write(struct.pack('<I%ss'%length, length, _x))
02405 _v126 = _v123.pose
02406 _v127 = _v126.position
02407 _x = _v127
02408 buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
02409 _v128 = _v126.orientation
02410 _x = _v128
02411 buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))
02412 _v129 = _v109.roi_box_dims
02413 _x = _v129
02414 buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
02415 _x = val1.collision_name
02416 length = len(_x)
02417 if python3 or type(_x) == unicode:
02418 _x = _x.encode('utf-8')
02419 length = len(_x)
02420 buff.write(struct.pack('<I%ss'%length, length, _x))
02421 length = len(self.collision_object_names)
02422 buff.write(_struct_I.pack(length))
02423 for val1 in self.collision_object_names:
02424 length = len(val1)
02425 if python3 or type(val1) == unicode:
02426 val1 = val1.encode('utf-8')
02427 length = len(val1)
02428 buff.write(struct.pack('<I%ss'%length, length, val1))
02429 _x = self.collision_support_surface_name
02430 length = len(_x)
02431 if python3 or type(_x) == unicode:
02432 _x = _x.encode('utf-8')
02433 length = len(_x)
02434 buff.write(struct.pack('<I%ss'%length, length, _x))
02435 except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(_x))))
02436 except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(_x))))
02437
02438 def deserialize_numpy(self, str, numpy):
02439 """
02440 unpack serialized message in str into this message instance using numpy for array types
02441 :param str: byte array of serialized message, ``str``
02442 :param numpy: numpy python module
02443 """
02444 try:
02445 if self.graspable_objects is None:
02446 self.graspable_objects = None
02447 end = 0
02448 start = end
02449 end += 4
02450 (length,) = _struct_I.unpack(str[start:end])
02451 self.graspable_objects = []
02452 for i in range(0, length):
02453 val1 = manipulation_msgs.msg.GraspableObject()
02454 start = end
02455 end += 4
02456 (length,) = _struct_I.unpack(str[start:end])
02457 start = end
02458 end += length
02459 if python3:
02460 val1.reference_frame_id = str[start:end].decode('utf-8')
02461 else:
02462 val1.reference_frame_id = str[start:end]
02463 start = end
02464 end += 4
02465 (length,) = _struct_I.unpack(str[start:end])
02466 val1.potential_models = []
02467 for i in range(0, length):
02468 val2 = household_objects_database_msgs.msg.DatabaseModelPose()
02469 start = end
02470 end += 4
02471 (val2.model_id,) = _struct_i.unpack(str[start:end])
02472 _v130 = val2.type
02473 start = end
02474 end += 4
02475 (length,) = _struct_I.unpack(str[start:end])
02476 start = end
02477 end += length
02478 if python3:
02479 _v130.key = str[start:end].decode('utf-8')
02480 else:
02481 _v130.key = str[start:end]
02482 start = end
02483 end += 4
02484 (length,) = _struct_I.unpack(str[start:end])
02485 start = end
02486 end += length
02487 if python3:
02488 _v130.db = str[start:end].decode('utf-8')
02489 else:
02490 _v130.db = str[start:end]
02491 _v131 = val2.pose
02492 _v132 = _v131.header
02493 start = end
02494 end += 4
02495 (_v132.seq,) = _struct_I.unpack(str[start:end])
02496 _v133 = _v132.stamp
02497 _x = _v133
02498 start = end
02499 end += 8
02500 (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
02501 start = end
02502 end += 4
02503 (length,) = _struct_I.unpack(str[start:end])
02504 start = end
02505 end += length
02506 if python3:
02507 _v132.frame_id = str[start:end].decode('utf-8')
02508 else:
02509 _v132.frame_id = str[start:end]
02510 _v134 = _v131.pose
02511 _v135 = _v134.position
02512 _x = _v135
02513 start = end
02514 end += 24
02515 (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
02516 _v136 = _v134.orientation
02517 _x = _v136
02518 start = end
02519 end += 32
02520 (_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])
02521 start = end
02522 end += 4
02523 (val2.confidence,) = _struct_f.unpack(str[start:end])
02524 start = end
02525 end += 4
02526 (length,) = _struct_I.unpack(str[start:end])
02527 start = end
02528 end += length
02529 if python3:
02530 val2.detector_name = str[start:end].decode('utf-8')
02531 else:
02532 val2.detector_name = str[start:end]
02533 val1.potential_models.append(val2)
02534 _v137 = val1.cluster
02535 _v138 = _v137.header
02536 start = end
02537 end += 4
02538 (_v138.seq,) = _struct_I.unpack(str[start:end])
02539 _v139 = _v138.stamp
02540 _x = _v139
02541 start = end
02542 end += 8
02543 (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
02544 start = end
02545 end += 4
02546 (length,) = _struct_I.unpack(str[start:end])
02547 start = end
02548 end += length
02549 if python3:
02550 _v138.frame_id = str[start:end].decode('utf-8')
02551 else:
02552 _v138.frame_id = str[start:end]
02553 start = end
02554 end += 4
02555 (length,) = _struct_I.unpack(str[start:end])
02556 _v137.points = []
02557 for i in range(0, length):
02558 val3 = geometry_msgs.msg.Point32()
02559 _x = val3
02560 start = end
02561 end += 12
02562 (_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end])
02563 _v137.points.append(val3)
02564 start = end
02565 end += 4
02566 (length,) = _struct_I.unpack(str[start:end])
02567 _v137.channels = []
02568 for i in range(0, length):
02569 val3 = sensor_msgs.msg.ChannelFloat32()
02570 start = end
02571 end += 4
02572 (length,) = _struct_I.unpack(str[start:end])
02573 start = end
02574 end += length
02575 if python3:
02576 val3.name = str[start:end].decode('utf-8')
02577 else:
02578 val3.name = str[start:end]
02579 start = end
02580 end += 4
02581 (length,) = _struct_I.unpack(str[start:end])
02582 pattern = '<%sf'%length
02583 start = end
02584 end += struct.calcsize(pattern)
02585 val3.values = numpy.frombuffer(str[start:end], dtype=numpy.float32, count=length)
02586 _v137.channels.append(val3)
02587 _v140 = val1.region
02588 _v141 = _v140.cloud
02589 _v142 = _v141.header
02590 start = end
02591 end += 4
02592 (_v142.seq,) = _struct_I.unpack(str[start:end])
02593 _v143 = _v142.stamp
02594 _x = _v143
02595 start = end
02596 end += 8
02597 (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
02598 start = end
02599 end += 4
02600 (length,) = _struct_I.unpack(str[start:end])
02601 start = end
02602 end += length
02603 if python3:
02604 _v142.frame_id = str[start:end].decode('utf-8')
02605 else:
02606 _v142.frame_id = str[start:end]
02607 _x = _v141
02608 start = end
02609 end += 8
02610 (_x.height, _x.width,) = _struct_2I.unpack(str[start:end])
02611 start = end
02612 end += 4
02613 (length,) = _struct_I.unpack(str[start:end])
02614 _v141.fields = []
02615 for i in range(0, length):
02616 val4 = sensor_msgs.msg.PointField()
02617 start = end
02618 end += 4
02619 (length,) = _struct_I.unpack(str[start:end])
02620 start = end
02621 end += length
02622 if python3:
02623 val4.name = str[start:end].decode('utf-8')
02624 else:
02625 val4.name = str[start:end]
02626 _x = val4
02627 start = end
02628 end += 9
02629 (_x.offset, _x.datatype, _x.count,) = _struct_IBI.unpack(str[start:end])
02630 _v141.fields.append(val4)
02631 _x = _v141
02632 start = end
02633 end += 9
02634 (_x.is_bigendian, _x.point_step, _x.row_step,) = _struct_B2I.unpack(str[start:end])
02635 _v141.is_bigendian = bool(_v141.is_bigendian)
02636 start = end
02637 end += 4
02638 (length,) = _struct_I.unpack(str[start:end])
02639 start = end
02640 end += length
02641 _v141.data = str[start:end]
02642 start = end
02643 end += 1
02644 (_v141.is_dense,) = _struct_B.unpack(str[start:end])
02645 _v141.is_dense = bool(_v141.is_dense)
02646 start = end
02647 end += 4
02648 (length,) = _struct_I.unpack(str[start:end])
02649 pattern = '<%si'%length
02650 start = end
02651 end += struct.calcsize(pattern)
02652 _v140.mask = numpy.frombuffer(str[start:end], dtype=numpy.int32, count=length)
02653 _v144 = _v140.image
02654 _v145 = _v144.header
02655 start = end
02656 end += 4
02657 (_v145.seq,) = _struct_I.unpack(str[start:end])
02658 _v146 = _v145.stamp
02659 _x = _v146
02660 start = end
02661 end += 8
02662 (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
02663 start = end
02664 end += 4
02665 (length,) = _struct_I.unpack(str[start:end])
02666 start = end
02667 end += length
02668 if python3:
02669 _v145.frame_id = str[start:end].decode('utf-8')
02670 else:
02671 _v145.frame_id = str[start:end]
02672 _x = _v144
02673 start = end
02674 end += 8
02675 (_x.height, _x.width,) = _struct_2I.unpack(str[start:end])
02676 start = end
02677 end += 4
02678 (length,) = _struct_I.unpack(str[start:end])
02679 start = end
02680 end += length
02681 if python3:
02682 _v144.encoding = str[start:end].decode('utf-8')
02683 else:
02684 _v144.encoding = str[start:end]
02685 _x = _v144
02686 start = end
02687 end += 5
02688 (_x.is_bigendian, _x.step,) = _struct_BI.unpack(str[start:end])
02689 start = end
02690 end += 4
02691 (length,) = _struct_I.unpack(str[start:end])
02692 start = end
02693 end += length
02694 _v144.data = str[start:end]
02695 _v147 = _v140.disparity_image
02696 _v148 = _v147.header
02697 start = end
02698 end += 4
02699 (_v148.seq,) = _struct_I.unpack(str[start:end])
02700 _v149 = _v148.stamp
02701 _x = _v149
02702 start = end
02703 end += 8
02704 (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
02705 start = end
02706 end += 4
02707 (length,) = _struct_I.unpack(str[start:end])
02708 start = end
02709 end += length
02710 if python3:
02711 _v148.frame_id = str[start:end].decode('utf-8')
02712 else:
02713 _v148.frame_id = str[start:end]
02714 _x = _v147
02715 start = end
02716 end += 8
02717 (_x.height, _x.width,) = _struct_2I.unpack(str[start:end])
02718 start = end
02719 end += 4
02720 (length,) = _struct_I.unpack(str[start:end])
02721 start = end
02722 end += length
02723 if python3:
02724 _v147.encoding = str[start:end].decode('utf-8')
02725 else:
02726 _v147.encoding = str[start:end]
02727 _x = _v147
02728 start = end
02729 end += 5
02730 (_x.is_bigendian, _x.step,) = _struct_BI.unpack(str[start:end])
02731 start = end
02732 end += 4
02733 (length,) = _struct_I.unpack(str[start:end])
02734 start = end
02735 end += length
02736 _v147.data = str[start:end]
02737 _v150 = _v140.cam_info
02738 _v151 = _v150.header
02739 start = end
02740 end += 4
02741 (_v151.seq,) = _struct_I.unpack(str[start:end])
02742 _v152 = _v151.stamp
02743 _x = _v152
02744 start = end
02745 end += 8
02746 (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
02747 start = end
02748 end += 4
02749 (length,) = _struct_I.unpack(str[start:end])
02750 start = end
02751 end += length
02752 if python3:
02753 _v151.frame_id = str[start:end].decode('utf-8')
02754 else:
02755 _v151.frame_id = str[start:end]
02756 _x = _v150
02757 start = end
02758 end += 8
02759 (_x.height, _x.width,) = _struct_2I.unpack(str[start:end])
02760 start = end
02761 end += 4
02762 (length,) = _struct_I.unpack(str[start:end])
02763 start = end
02764 end += length
02765 if python3:
02766 _v150.distortion_model = str[start:end].decode('utf-8')
02767 else:
02768 _v150.distortion_model = str[start:end]
02769 start = end
02770 end += 4
02771 (length,) = _struct_I.unpack(str[start:end])
02772 pattern = '<%sd'%length
02773 start = end
02774 end += struct.calcsize(pattern)
02775 _v150.D = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)
02776 start = end
02777 end += 72
02778 _v150.K = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=9)
02779 start = end
02780 end += 72
02781 _v150.R = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=9)
02782 start = end
02783 end += 96
02784 _v150.P = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=12)
02785 _x = _v150
02786 start = end
02787 end += 8
02788 (_x.binning_x, _x.binning_y,) = _struct_2I.unpack(str[start:end])
02789 _v153 = _v150.roi
02790 _x = _v153
02791 start = end
02792 end += 17
02793 (_x.x_offset, _x.y_offset, _x.height, _x.width, _x.do_rectify,) = _struct_4IB.unpack(str[start:end])
02794 _v153.do_rectify = bool(_v153.do_rectify)
02795 _v154 = _v140.roi_box_pose
02796 _v155 = _v154.header
02797 start = end
02798 end += 4
02799 (_v155.seq,) = _struct_I.unpack(str[start:end])
02800 _v156 = _v155.stamp
02801 _x = _v156
02802 start = end
02803 end += 8
02804 (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
02805 start = end
02806 end += 4
02807 (length,) = _struct_I.unpack(str[start:end])
02808 start = end
02809 end += length
02810 if python3:
02811 _v155.frame_id = str[start:end].decode('utf-8')
02812 else:
02813 _v155.frame_id = str[start:end]
02814 _v157 = _v154.pose
02815 _v158 = _v157.position
02816 _x = _v158
02817 start = end
02818 end += 24
02819 (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
02820 _v159 = _v157.orientation
02821 _x = _v159
02822 start = end
02823 end += 32
02824 (_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])
02825 _v160 = _v140.roi_box_dims
02826 _x = _v160
02827 start = end
02828 end += 24
02829 (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
02830 start = end
02831 end += 4
02832 (length,) = _struct_I.unpack(str[start:end])
02833 start = end
02834 end += length
02835 if python3:
02836 val1.collision_name = str[start:end].decode('utf-8')
02837 else:
02838 val1.collision_name = str[start:end]
02839 self.graspable_objects.append(val1)
02840 start = end
02841 end += 4
02842 (length,) = _struct_I.unpack(str[start:end])
02843 self.collision_object_names = []
02844 for i in range(0, length):
02845 start = end
02846 end += 4
02847 (length,) = _struct_I.unpack(str[start:end])
02848 start = end
02849 end += length
02850 if python3:
02851 val1 = str[start:end].decode('utf-8')
02852 else:
02853 val1 = str[start:end]
02854 self.collision_object_names.append(val1)
02855 start = end
02856 end += 4
02857 (length,) = _struct_I.unpack(str[start:end])
02858 start = end
02859 end += length
02860 if python3:
02861 self.collision_support_surface_name = str[start:end].decode('utf-8')
02862 else:
02863 self.collision_support_surface_name = str[start:end]
02864 return self
02865 except struct.error as e:
02866 raise genpy.DeserializationError(e) #most likely buffer underfill
02867
02868 _struct_I = genpy.struct_I
02869 _struct_IBI = struct.Struct("<IBI")
02870 _struct_B = struct.Struct("<B")
02871 _struct_12d = struct.Struct("<12d")
02872 _struct_f = struct.Struct("<f")
02873 _struct_i = struct.Struct("<i")
02874 _struct_BI = struct.Struct("<BI")
02875 _struct_3f = struct.Struct("<3f")
02876 _struct_9d = struct.Struct("<9d")
02877 _struct_B2I = struct.Struct("<B2I")
02878 _struct_4d = struct.Struct("<4d")
02879 _struct_2I = struct.Struct("<2I")
02880 _struct_4IB = struct.Struct("<4IB")
02881 _struct_3d = struct.Struct("<3d")
02882 class TabletopCollisionMapProcessing(object):
02883 _type = 'tabletop_collision_map_processing/TabletopCollisionMapProcessing'
02884 _md5sum = '9bb0e67d3827378f877a06e70c6e4b13'
02885 _request_class = TabletopCollisionMapProcessingRequest
02886 _response_class = TabletopCollisionMapProcessingResponse