00001 """autogenerated by genpy from tabletop_collision_map_processing/TabletopCollisionMapProcessingRequest.msg. Do not edit."""
00002 import sys
00003 python3 = True if sys.hexversion > 0x03000000 else False
00004 import genpy
00005 import struct
00006
00007 import arm_navigation_msgs.msg
00008 import tabletop_object_detector.msg
00009 import geometry_msgs.msg
00010 import sensor_msgs.msg
00011 import std_msgs.msg
00012 import household_objects_database_msgs.msg
00013
00014 class TabletopCollisionMapProcessingRequest(genpy.Message):
00015 _md5sum = "fe986e22908c7a2936a0cdf9f240d4a3"
00016 _type = "tabletop_collision_map_processing/TabletopCollisionMapProcessingRequest"
00017 _has_header = False
00018 _full_text = """
00019
00020
00021
00022
00023
00024
00025 tabletop_object_detector/TabletopDetectionResult detection_result
00026
00027
00028 bool reset_collision_models
00029
00030
00031 bool reset_attached_models
00032
00033
00034
00035 string desired_frame
00036
00037
00038 ================================================================================
00039 MSG: tabletop_object_detector/TabletopDetectionResult
00040 # Contains all the information from one run of the tabletop detection node
00041
00042 # The information for the plane that has been detected
00043 Table table
00044
00045 # The raw clusters detected in the scan
00046 sensor_msgs/PointCloud[] clusters
00047
00048 # The list of potential models that have been detected for each cluster
00049 # An empty list will be returned for a cluster that has no recognition results at all
00050 household_objects_database_msgs/DatabaseModelPoseList[] models
00051
00052 # For each cluster, the index of the list of models that was fit to that cluster
00053 # keep in mind that multiple raw clusters can correspond to a single fit
00054 int32[] cluster_model_indices
00055
00056 # Whether the detection has succeeded or failed
00057 int32 NO_CLOUD_RECEIVED = 1
00058 int32 NO_TABLE = 2
00059 int32 OTHER_ERROR = 3
00060 int32 SUCCESS = 4
00061 int32 result
00062
00063 ================================================================================
00064 MSG: tabletop_object_detector/Table
00065 # Informs that a planar table has been detected at a given location
00066
00067 # The pose gives you the transform that take you to the coordinate system
00068 # of the table, with the origin somewhere in the table plane and the
00069 # z axis normal to the plane
00070 geometry_msgs/PoseStamped pose
00071
00072 # These values give you the observed extents of the table, along x and y,
00073 # in the table's own coordinate system (above)
00074 # there is no guarantee that the origin of the table coordinate system is
00075 # inside the boundary defined by these values.
00076 float32 x_min
00077 float32 x_max
00078 float32 y_min
00079 float32 y_max
00080
00081 # There is no guarantee that the table does NOT extend further than these
00082 # values; this is just as far as we've observed it.
00083
00084
00085 # Newer table definition as triangle mesh of convex hull (relative to pose)
00086 arm_navigation_msgs/Shape convex_hull
00087
00088 ================================================================================
00089 MSG: geometry_msgs/PoseStamped
00090 # A Pose with reference coordinate frame and timestamp
00091 Header header
00092 Pose pose
00093
00094 ================================================================================
00095 MSG: std_msgs/Header
00096 # Standard metadata for higher-level stamped data types.
00097 # This is generally used to communicate timestamped data
00098 # in a particular coordinate frame.
00099 #
00100 # sequence ID: consecutively increasing ID
00101 uint32 seq
00102 #Two-integer timestamp that is expressed as:
00103 # * stamp.secs: seconds (stamp_secs) since epoch
00104 # * stamp.nsecs: nanoseconds since stamp_secs
00105 # time-handling sugar is provided by the client library
00106 time stamp
00107 #Frame this data is associated with
00108 # 0: no frame
00109 # 1: global frame
00110 string frame_id
00111
00112 ================================================================================
00113 MSG: geometry_msgs/Pose
00114 # A representation of pose in free space, composed of postion and orientation.
00115 Point position
00116 Quaternion orientation
00117
00118 ================================================================================
00119 MSG: geometry_msgs/Point
00120 # This contains the position of a point in free space
00121 float64 x
00122 float64 y
00123 float64 z
00124
00125 ================================================================================
00126 MSG: geometry_msgs/Quaternion
00127 # This represents an orientation in free space in quaternion form.
00128
00129 float64 x
00130 float64 y
00131 float64 z
00132 float64 w
00133
00134 ================================================================================
00135 MSG: arm_navigation_msgs/Shape
00136 byte SPHERE=0
00137 byte BOX=1
00138 byte CYLINDER=2
00139 byte MESH=3
00140
00141 byte type
00142
00143
00144 #### define sphere, box, cylinder ####
00145 # the origin of each shape is considered at the shape's center
00146
00147 # for sphere
00148 # radius := dimensions[0]
00149
00150 # for cylinder
00151 # radius := dimensions[0]
00152 # length := dimensions[1]
00153 # the length is along the Z axis
00154
00155 # for box
00156 # size_x := dimensions[0]
00157 # size_y := dimensions[1]
00158 # size_z := dimensions[2]
00159 float64[] dimensions
00160
00161
00162 #### define mesh ####
00163
00164 # list of triangles; triangle k is defined by tre vertices located
00165 # at indices triangles[3k], triangles[3k+1], triangles[3k+2]
00166 int32[] triangles
00167 geometry_msgs/Point[] vertices
00168
00169 ================================================================================
00170 MSG: sensor_msgs/PointCloud
00171 # This message holds a collection of 3d points, plus optional additional
00172 # information about each point.
00173
00174 # Time of sensor data acquisition, coordinate frame ID.
00175 Header header
00176
00177 # Array of 3d points. Each Point32 should be interpreted as a 3d point
00178 # in the frame given in the header.
00179 geometry_msgs/Point32[] points
00180
00181 # Each channel should have the same number of elements as points array,
00182 # and the data in each channel should correspond 1:1 with each point.
00183 # Channel names in common practice are listed in ChannelFloat32.msg.
00184 ChannelFloat32[] channels
00185
00186 ================================================================================
00187 MSG: geometry_msgs/Point32
00188 # This contains the position of a point in free space(with 32 bits of precision).
00189 # It is recommeded to use Point wherever possible instead of Point32.
00190 #
00191 # This recommendation is to promote interoperability.
00192 #
00193 # This message is designed to take up less space when sending
00194 # lots of points at once, as in the case of a PointCloud.
00195
00196 float32 x
00197 float32 y
00198 float32 z
00199 ================================================================================
00200 MSG: sensor_msgs/ChannelFloat32
00201 # This message is used by the PointCloud message to hold optional data
00202 # associated with each point in the cloud. The length of the values
00203 # array should be the same as the length of the points array in the
00204 # PointCloud, and each value should be associated with the corresponding
00205 # point.
00206
00207 # Channel names in existing practice include:
00208 # "u", "v" - row and column (respectively) in the left stereo image.
00209 # This is opposite to usual conventions but remains for
00210 # historical reasons. The newer PointCloud2 message has no
00211 # such problem.
00212 # "rgb" - For point clouds produced by color stereo cameras. uint8
00213 # (R,G,B) values packed into the least significant 24 bits,
00214 # in order.
00215 # "intensity" - laser or pixel intensity.
00216 # "distance"
00217
00218 # The channel name should give semantics of the channel (e.g.
00219 # "intensity" instead of "value").
00220 string name
00221
00222 # The values array should be 1-1 with the elements of the associated
00223 # PointCloud.
00224 float32[] values
00225
00226 ================================================================================
00227 MSG: household_objects_database_msgs/DatabaseModelPoseList
00228 # stores a list of possible database models recognition results
00229 DatabaseModelPose[] model_list
00230 ================================================================================
00231 MSG: household_objects_database_msgs/DatabaseModelPose
00232 # Informs that a specific model from the Model Database has been
00233 # identified at a certain location
00234
00235 # the database id of the model
00236 int32 model_id
00237
00238 # the pose that it can be found in
00239 geometry_msgs/PoseStamped pose
00240
00241 # a measure of the confidence level in this detection result
00242 float32 confidence
00243
00244 # the name of the object detector that generated this detection result
00245 string detector_name
00246
00247 """
00248 __slots__ = ['detection_result','reset_collision_models','reset_attached_models','desired_frame']
00249 _slot_types = ['tabletop_object_detector/TabletopDetectionResult','bool','bool','string']
00250
00251 def __init__(self, *args, **kwds):
00252 """
00253 Constructor. Any message fields that are implicitly/explicitly
00254 set to None will be assigned a default value. The recommend
00255 use is keyword arguments as this is more robust to future message
00256 changes. You cannot mix in-order arguments and keyword arguments.
00257
00258 The available fields are:
00259 detection_result,reset_collision_models,reset_attached_models,desired_frame
00260
00261 :param args: complete set of field values, in .msg order
00262 :param kwds: use keyword arguments corresponding to message field names
00263 to set specific fields.
00264 """
00265 if args or kwds:
00266 super(TabletopCollisionMapProcessingRequest, self).__init__(*args, **kwds)
00267 #message fields cannot be None, assign default values for those that are
00268 if self.detection_result is None:
00269 self.detection_result = tabletop_object_detector.msg.TabletopDetectionResult()
00270 if self.reset_collision_models is None:
00271 self.reset_collision_models = False
00272 if self.reset_attached_models is None:
00273 self.reset_attached_models = False
00274 if self.desired_frame is None:
00275 self.desired_frame = ''
00276 else:
00277 self.detection_result = tabletop_object_detector.msg.TabletopDetectionResult()
00278 self.reset_collision_models = False
00279 self.reset_attached_models = False
00280 self.desired_frame = ''
00281
00282 def _get_types(self):
00283 """
00284 internal API method
00285 """
00286 return self._slot_types
00287
00288 def serialize(self, buff):
00289 """
00290 serialize message into buffer
00291 :param buff: buffer, ``StringIO``
00292 """
00293 try:
00294 _x = self
00295 buff.write(_struct_3I.pack(_x.detection_result.table.pose.header.seq, _x.detection_result.table.pose.header.stamp.secs, _x.detection_result.table.pose.header.stamp.nsecs))
00296 _x = self.detection_result.table.pose.header.frame_id
00297 length = len(_x)
00298 if python3 or type(_x) == unicode:
00299 _x = _x.encode('utf-8')
00300 length = len(_x)
00301 buff.write(struct.pack('<I%ss'%length, length, _x))
00302 _x = self
00303 buff.write(_struct_7d4fb.pack(_x.detection_result.table.pose.pose.position.x, _x.detection_result.table.pose.pose.position.y, _x.detection_result.table.pose.pose.position.z, _x.detection_result.table.pose.pose.orientation.x, _x.detection_result.table.pose.pose.orientation.y, _x.detection_result.table.pose.pose.orientation.z, _x.detection_result.table.pose.pose.orientation.w, _x.detection_result.table.x_min, _x.detection_result.table.x_max, _x.detection_result.table.y_min, _x.detection_result.table.y_max, _x.detection_result.table.convex_hull.type))
00304 length = len(self.detection_result.table.convex_hull.dimensions)
00305 buff.write(_struct_I.pack(length))
00306 pattern = '<%sd'%length
00307 buff.write(struct.pack(pattern, *self.detection_result.table.convex_hull.dimensions))
00308 length = len(self.detection_result.table.convex_hull.triangles)
00309 buff.write(_struct_I.pack(length))
00310 pattern = '<%si'%length
00311 buff.write(struct.pack(pattern, *self.detection_result.table.convex_hull.triangles))
00312 length = len(self.detection_result.table.convex_hull.vertices)
00313 buff.write(_struct_I.pack(length))
00314 for val1 in self.detection_result.table.convex_hull.vertices:
00315 _x = val1
00316 buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
00317 length = len(self.detection_result.clusters)
00318 buff.write(_struct_I.pack(length))
00319 for val1 in self.detection_result.clusters:
00320 _v1 = val1.header
00321 buff.write(_struct_I.pack(_v1.seq))
00322 _v2 = _v1.stamp
00323 _x = _v2
00324 buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
00325 _x = _v1.frame_id
00326 length = len(_x)
00327 if python3 or type(_x) == unicode:
00328 _x = _x.encode('utf-8')
00329 length = len(_x)
00330 buff.write(struct.pack('<I%ss'%length, length, _x))
00331 length = len(val1.points)
00332 buff.write(_struct_I.pack(length))
00333 for val2 in val1.points:
00334 _x = val2
00335 buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))
00336 length = len(val1.channels)
00337 buff.write(_struct_I.pack(length))
00338 for val2 in val1.channels:
00339 _x = val2.name
00340 length = len(_x)
00341 if python3 or type(_x) == unicode:
00342 _x = _x.encode('utf-8')
00343 length = len(_x)
00344 buff.write(struct.pack('<I%ss'%length, length, _x))
00345 length = len(val2.values)
00346 buff.write(_struct_I.pack(length))
00347 pattern = '<%sf'%length
00348 buff.write(struct.pack(pattern, *val2.values))
00349 length = len(self.detection_result.models)
00350 buff.write(_struct_I.pack(length))
00351 for val1 in self.detection_result.models:
00352 length = len(val1.model_list)
00353 buff.write(_struct_I.pack(length))
00354 for val2 in val1.model_list:
00355 buff.write(_struct_i.pack(val2.model_id))
00356 _v3 = val2.pose
00357 _v4 = _v3.header
00358 buff.write(_struct_I.pack(_v4.seq))
00359 _v5 = _v4.stamp
00360 _x = _v5
00361 buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
00362 _x = _v4.frame_id
00363 length = len(_x)
00364 if python3 or type(_x) == unicode:
00365 _x = _x.encode('utf-8')
00366 length = len(_x)
00367 buff.write(struct.pack('<I%ss'%length, length, _x))
00368 _v6 = _v3.pose
00369 _v7 = _v6.position
00370 _x = _v7
00371 buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
00372 _v8 = _v6.orientation
00373 _x = _v8
00374 buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))
00375 buff.write(_struct_f.pack(val2.confidence))
00376 _x = val2.detector_name
00377 length = len(_x)
00378 if python3 or type(_x) == unicode:
00379 _x = _x.encode('utf-8')
00380 length = len(_x)
00381 buff.write(struct.pack('<I%ss'%length, length, _x))
00382 length = len(self.detection_result.cluster_model_indices)
00383 buff.write(_struct_I.pack(length))
00384 pattern = '<%si'%length
00385 buff.write(struct.pack(pattern, *self.detection_result.cluster_model_indices))
00386 _x = self
00387 buff.write(_struct_i2B.pack(_x.detection_result.result, _x.reset_collision_models, _x.reset_attached_models))
00388 _x = self.desired_frame
00389 length = len(_x)
00390 if python3 or type(_x) == unicode:
00391 _x = _x.encode('utf-8')
00392 length = len(_x)
00393 buff.write(struct.pack('<I%ss'%length, length, _x))
00394 except struct.error as se: self._check_types(se)
00395 except TypeError as te: self._check_types(te)
00396
00397 def deserialize(self, str):
00398 """
00399 unpack serialized message in str into this message instance
00400 :param str: byte array of serialized message, ``str``
00401 """
00402 try:
00403 if self.detection_result is None:
00404 self.detection_result = tabletop_object_detector.msg.TabletopDetectionResult()
00405 end = 0
00406 _x = self
00407 start = end
00408 end += 12
00409 (_x.detection_result.table.pose.header.seq, _x.detection_result.table.pose.header.stamp.secs, _x.detection_result.table.pose.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end])
00410 start = end
00411 end += 4
00412 (length,) = _struct_I.unpack(str[start:end])
00413 start = end
00414 end += length
00415 if python3:
00416 self.detection_result.table.pose.header.frame_id = str[start:end].decode('utf-8')
00417 else:
00418 self.detection_result.table.pose.header.frame_id = str[start:end]
00419 _x = self
00420 start = end
00421 end += 73
00422 (_x.detection_result.table.pose.pose.position.x, _x.detection_result.table.pose.pose.position.y, _x.detection_result.table.pose.pose.position.z, _x.detection_result.table.pose.pose.orientation.x, _x.detection_result.table.pose.pose.orientation.y, _x.detection_result.table.pose.pose.orientation.z, _x.detection_result.table.pose.pose.orientation.w, _x.detection_result.table.x_min, _x.detection_result.table.x_max, _x.detection_result.table.y_min, _x.detection_result.table.y_max, _x.detection_result.table.convex_hull.type,) = _struct_7d4fb.unpack(str[start:end])
00423 start = end
00424 end += 4
00425 (length,) = _struct_I.unpack(str[start:end])
00426 pattern = '<%sd'%length
00427 start = end
00428 end += struct.calcsize(pattern)
00429 self.detection_result.table.convex_hull.dimensions = struct.unpack(pattern, str[start:end])
00430 start = end
00431 end += 4
00432 (length,) = _struct_I.unpack(str[start:end])
00433 pattern = '<%si'%length
00434 start = end
00435 end += struct.calcsize(pattern)
00436 self.detection_result.table.convex_hull.triangles = struct.unpack(pattern, str[start:end])
00437 start = end
00438 end += 4
00439 (length,) = _struct_I.unpack(str[start:end])
00440 self.detection_result.table.convex_hull.vertices = []
00441 for i in range(0, length):
00442 val1 = geometry_msgs.msg.Point()
00443 _x = val1
00444 start = end
00445 end += 24
00446 (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
00447 self.detection_result.table.convex_hull.vertices.append(val1)
00448 start = end
00449 end += 4
00450 (length,) = _struct_I.unpack(str[start:end])
00451 self.detection_result.clusters = []
00452 for i in range(0, length):
00453 val1 = sensor_msgs.msg.PointCloud()
00454 _v9 = val1.header
00455 start = end
00456 end += 4
00457 (_v9.seq,) = _struct_I.unpack(str[start:end])
00458 _v10 = _v9.stamp
00459 _x = _v10
00460 start = end
00461 end += 8
00462 (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
00463 start = end
00464 end += 4
00465 (length,) = _struct_I.unpack(str[start:end])
00466 start = end
00467 end += length
00468 if python3:
00469 _v9.frame_id = str[start:end].decode('utf-8')
00470 else:
00471 _v9.frame_id = str[start:end]
00472 start = end
00473 end += 4
00474 (length,) = _struct_I.unpack(str[start:end])
00475 val1.points = []
00476 for i in range(0, length):
00477 val2 = geometry_msgs.msg.Point32()
00478 _x = val2
00479 start = end
00480 end += 12
00481 (_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end])
00482 val1.points.append(val2)
00483 start = end
00484 end += 4
00485 (length,) = _struct_I.unpack(str[start:end])
00486 val1.channels = []
00487 for i in range(0, length):
00488 val2 = sensor_msgs.msg.ChannelFloat32()
00489 start = end
00490 end += 4
00491 (length,) = _struct_I.unpack(str[start:end])
00492 start = end
00493 end += length
00494 if python3:
00495 val2.name = str[start:end].decode('utf-8')
00496 else:
00497 val2.name = str[start:end]
00498 start = end
00499 end += 4
00500 (length,) = _struct_I.unpack(str[start:end])
00501 pattern = '<%sf'%length
00502 start = end
00503 end += struct.calcsize(pattern)
00504 val2.values = struct.unpack(pattern, str[start:end])
00505 val1.channels.append(val2)
00506 self.detection_result.clusters.append(val1)
00507 start = end
00508 end += 4
00509 (length,) = _struct_I.unpack(str[start:end])
00510 self.detection_result.models = []
00511 for i in range(0, length):
00512 val1 = household_objects_database_msgs.msg.DatabaseModelPoseList()
00513 start = end
00514 end += 4
00515 (length,) = _struct_I.unpack(str[start:end])
00516 val1.model_list = []
00517 for i in range(0, length):
00518 val2 = household_objects_database_msgs.msg.DatabaseModelPose()
00519 start = end
00520 end += 4
00521 (val2.model_id,) = _struct_i.unpack(str[start:end])
00522 _v11 = val2.pose
00523 _v12 = _v11.header
00524 start = end
00525 end += 4
00526 (_v12.seq,) = _struct_I.unpack(str[start:end])
00527 _v13 = _v12.stamp
00528 _x = _v13
00529 start = end
00530 end += 8
00531 (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
00532 start = end
00533 end += 4
00534 (length,) = _struct_I.unpack(str[start:end])
00535 start = end
00536 end += length
00537 if python3:
00538 _v12.frame_id = str[start:end].decode('utf-8')
00539 else:
00540 _v12.frame_id = str[start:end]
00541 _v14 = _v11.pose
00542 _v15 = _v14.position
00543 _x = _v15
00544 start = end
00545 end += 24
00546 (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
00547 _v16 = _v14.orientation
00548 _x = _v16
00549 start = end
00550 end += 32
00551 (_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])
00552 start = end
00553 end += 4
00554 (val2.confidence,) = _struct_f.unpack(str[start:end])
00555 start = end
00556 end += 4
00557 (length,) = _struct_I.unpack(str[start:end])
00558 start = end
00559 end += length
00560 if python3:
00561 val2.detector_name = str[start:end].decode('utf-8')
00562 else:
00563 val2.detector_name = str[start:end]
00564 val1.model_list.append(val2)
00565 self.detection_result.models.append(val1)
00566 start = end
00567 end += 4
00568 (length,) = _struct_I.unpack(str[start:end])
00569 pattern = '<%si'%length
00570 start = end
00571 end += struct.calcsize(pattern)
00572 self.detection_result.cluster_model_indices = struct.unpack(pattern, str[start:end])
00573 _x = self
00574 start = end
00575 end += 6
00576 (_x.detection_result.result, _x.reset_collision_models, _x.reset_attached_models,) = _struct_i2B.unpack(str[start:end])
00577 self.reset_collision_models = bool(self.reset_collision_models)
00578 self.reset_attached_models = bool(self.reset_attached_models)
00579 start = end
00580 end += 4
00581 (length,) = _struct_I.unpack(str[start:end])
00582 start = end
00583 end += length
00584 if python3:
00585 self.desired_frame = str[start:end].decode('utf-8')
00586 else:
00587 self.desired_frame = str[start:end]
00588 return self
00589 except struct.error as e:
00590 raise genpy.DeserializationError(e) #most likely buffer underfill
00591
00592
00593 def serialize_numpy(self, buff, numpy):
00594 """
00595 serialize message with numpy array types into buffer
00596 :param buff: buffer, ``StringIO``
00597 :param numpy: numpy python module
00598 """
00599 try:
00600 _x = self
00601 buff.write(_struct_3I.pack(_x.detection_result.table.pose.header.seq, _x.detection_result.table.pose.header.stamp.secs, _x.detection_result.table.pose.header.stamp.nsecs))
00602 _x = self.detection_result.table.pose.header.frame_id
00603 length = len(_x)
00604 if python3 or type(_x) == unicode:
00605 _x = _x.encode('utf-8')
00606 length = len(_x)
00607 buff.write(struct.pack('<I%ss'%length, length, _x))
00608 _x = self
00609 buff.write(_struct_7d4fb.pack(_x.detection_result.table.pose.pose.position.x, _x.detection_result.table.pose.pose.position.y, _x.detection_result.table.pose.pose.position.z, _x.detection_result.table.pose.pose.orientation.x, _x.detection_result.table.pose.pose.orientation.y, _x.detection_result.table.pose.pose.orientation.z, _x.detection_result.table.pose.pose.orientation.w, _x.detection_result.table.x_min, _x.detection_result.table.x_max, _x.detection_result.table.y_min, _x.detection_result.table.y_max, _x.detection_result.table.convex_hull.type))
00610 length = len(self.detection_result.table.convex_hull.dimensions)
00611 buff.write(_struct_I.pack(length))
00612 pattern = '<%sd'%length
00613 buff.write(self.detection_result.table.convex_hull.dimensions.tostring())
00614 length = len(self.detection_result.table.convex_hull.triangles)
00615 buff.write(_struct_I.pack(length))
00616 pattern = '<%si'%length
00617 buff.write(self.detection_result.table.convex_hull.triangles.tostring())
00618 length = len(self.detection_result.table.convex_hull.vertices)
00619 buff.write(_struct_I.pack(length))
00620 for val1 in self.detection_result.table.convex_hull.vertices:
00621 _x = val1
00622 buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
00623 length = len(self.detection_result.clusters)
00624 buff.write(_struct_I.pack(length))
00625 for val1 in self.detection_result.clusters:
00626 _v17 = val1.header
00627 buff.write(_struct_I.pack(_v17.seq))
00628 _v18 = _v17.stamp
00629 _x = _v18
00630 buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
00631 _x = _v17.frame_id
00632 length = len(_x)
00633 if python3 or type(_x) == unicode:
00634 _x = _x.encode('utf-8')
00635 length = len(_x)
00636 buff.write(struct.pack('<I%ss'%length, length, _x))
00637 length = len(val1.points)
00638 buff.write(_struct_I.pack(length))
00639 for val2 in val1.points:
00640 _x = val2
00641 buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))
00642 length = len(val1.channels)
00643 buff.write(_struct_I.pack(length))
00644 for val2 in val1.channels:
00645 _x = val2.name
00646 length = len(_x)
00647 if python3 or type(_x) == unicode:
00648 _x = _x.encode('utf-8')
00649 length = len(_x)
00650 buff.write(struct.pack('<I%ss'%length, length, _x))
00651 length = len(val2.values)
00652 buff.write(_struct_I.pack(length))
00653 pattern = '<%sf'%length
00654 buff.write(val2.values.tostring())
00655 length = len(self.detection_result.models)
00656 buff.write(_struct_I.pack(length))
00657 for val1 in self.detection_result.models:
00658 length = len(val1.model_list)
00659 buff.write(_struct_I.pack(length))
00660 for val2 in val1.model_list:
00661 buff.write(_struct_i.pack(val2.model_id))
00662 _v19 = val2.pose
00663 _v20 = _v19.header
00664 buff.write(_struct_I.pack(_v20.seq))
00665 _v21 = _v20.stamp
00666 _x = _v21
00667 buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
00668 _x = _v20.frame_id
00669 length = len(_x)
00670 if python3 or type(_x) == unicode:
00671 _x = _x.encode('utf-8')
00672 length = len(_x)
00673 buff.write(struct.pack('<I%ss'%length, length, _x))
00674 _v22 = _v19.pose
00675 _v23 = _v22.position
00676 _x = _v23
00677 buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
00678 _v24 = _v22.orientation
00679 _x = _v24
00680 buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))
00681 buff.write(_struct_f.pack(val2.confidence))
00682 _x = val2.detector_name
00683 length = len(_x)
00684 if python3 or type(_x) == unicode:
00685 _x = _x.encode('utf-8')
00686 length = len(_x)
00687 buff.write(struct.pack('<I%ss'%length, length, _x))
00688 length = len(self.detection_result.cluster_model_indices)
00689 buff.write(_struct_I.pack(length))
00690 pattern = '<%si'%length
00691 buff.write(self.detection_result.cluster_model_indices.tostring())
00692 _x = self
00693 buff.write(_struct_i2B.pack(_x.detection_result.result, _x.reset_collision_models, _x.reset_attached_models))
00694 _x = self.desired_frame
00695 length = len(_x)
00696 if python3 or type(_x) == unicode:
00697 _x = _x.encode('utf-8')
00698 length = len(_x)
00699 buff.write(struct.pack('<I%ss'%length, length, _x))
00700 except struct.error as se: self._check_types(se)
00701 except TypeError as te: self._check_types(te)
00702
00703 def deserialize_numpy(self, str, numpy):
00704 """
00705 unpack serialized message in str into this message instance using numpy for array types
00706 :param str: byte array of serialized message, ``str``
00707 :param numpy: numpy python module
00708 """
00709 try:
00710 if self.detection_result is None:
00711 self.detection_result = tabletop_object_detector.msg.TabletopDetectionResult()
00712 end = 0
00713 _x = self
00714 start = end
00715 end += 12
00716 (_x.detection_result.table.pose.header.seq, _x.detection_result.table.pose.header.stamp.secs, _x.detection_result.table.pose.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end])
00717 start = end
00718 end += 4
00719 (length,) = _struct_I.unpack(str[start:end])
00720 start = end
00721 end += length
00722 if python3:
00723 self.detection_result.table.pose.header.frame_id = str[start:end].decode('utf-8')
00724 else:
00725 self.detection_result.table.pose.header.frame_id = str[start:end]
00726 _x = self
00727 start = end
00728 end += 73
00729 (_x.detection_result.table.pose.pose.position.x, _x.detection_result.table.pose.pose.position.y, _x.detection_result.table.pose.pose.position.z, _x.detection_result.table.pose.pose.orientation.x, _x.detection_result.table.pose.pose.orientation.y, _x.detection_result.table.pose.pose.orientation.z, _x.detection_result.table.pose.pose.orientation.w, _x.detection_result.table.x_min, _x.detection_result.table.x_max, _x.detection_result.table.y_min, _x.detection_result.table.y_max, _x.detection_result.table.convex_hull.type,) = _struct_7d4fb.unpack(str[start:end])
00730 start = end
00731 end += 4
00732 (length,) = _struct_I.unpack(str[start:end])
00733 pattern = '<%sd'%length
00734 start = end
00735 end += struct.calcsize(pattern)
00736 self.detection_result.table.convex_hull.dimensions = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)
00737 start = end
00738 end += 4
00739 (length,) = _struct_I.unpack(str[start:end])
00740 pattern = '<%si'%length
00741 start = end
00742 end += struct.calcsize(pattern)
00743 self.detection_result.table.convex_hull.triangles = numpy.frombuffer(str[start:end], dtype=numpy.int32, count=length)
00744 start = end
00745 end += 4
00746 (length,) = _struct_I.unpack(str[start:end])
00747 self.detection_result.table.convex_hull.vertices = []
00748 for i in range(0, length):
00749 val1 = geometry_msgs.msg.Point()
00750 _x = val1
00751 start = end
00752 end += 24
00753 (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
00754 self.detection_result.table.convex_hull.vertices.append(val1)
00755 start = end
00756 end += 4
00757 (length,) = _struct_I.unpack(str[start:end])
00758 self.detection_result.clusters = []
00759 for i in range(0, length):
00760 val1 = sensor_msgs.msg.PointCloud()
00761 _v25 = val1.header
00762 start = end
00763 end += 4
00764 (_v25.seq,) = _struct_I.unpack(str[start:end])
00765 _v26 = _v25.stamp
00766 _x = _v26
00767 start = end
00768 end += 8
00769 (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
00770 start = end
00771 end += 4
00772 (length,) = _struct_I.unpack(str[start:end])
00773 start = end
00774 end += length
00775 if python3:
00776 _v25.frame_id = str[start:end].decode('utf-8')
00777 else:
00778 _v25.frame_id = str[start:end]
00779 start = end
00780 end += 4
00781 (length,) = _struct_I.unpack(str[start:end])
00782 val1.points = []
00783 for i in range(0, length):
00784 val2 = geometry_msgs.msg.Point32()
00785 _x = val2
00786 start = end
00787 end += 12
00788 (_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end])
00789 val1.points.append(val2)
00790 start = end
00791 end += 4
00792 (length,) = _struct_I.unpack(str[start:end])
00793 val1.channels = []
00794 for i in range(0, length):
00795 val2 = sensor_msgs.msg.ChannelFloat32()
00796 start = end
00797 end += 4
00798 (length,) = _struct_I.unpack(str[start:end])
00799 start = end
00800 end += length
00801 if python3:
00802 val2.name = str[start:end].decode('utf-8')
00803 else:
00804 val2.name = str[start:end]
00805 start = end
00806 end += 4
00807 (length,) = _struct_I.unpack(str[start:end])
00808 pattern = '<%sf'%length
00809 start = end
00810 end += struct.calcsize(pattern)
00811 val2.values = numpy.frombuffer(str[start:end], dtype=numpy.float32, count=length)
00812 val1.channels.append(val2)
00813 self.detection_result.clusters.append(val1)
00814 start = end
00815 end += 4
00816 (length,) = _struct_I.unpack(str[start:end])
00817 self.detection_result.models = []
00818 for i in range(0, length):
00819 val1 = household_objects_database_msgs.msg.DatabaseModelPoseList()
00820 start = end
00821 end += 4
00822 (length,) = _struct_I.unpack(str[start:end])
00823 val1.model_list = []
00824 for i in range(0, length):
00825 val2 = household_objects_database_msgs.msg.DatabaseModelPose()
00826 start = end
00827 end += 4
00828 (val2.model_id,) = _struct_i.unpack(str[start:end])
00829 _v27 = val2.pose
00830 _v28 = _v27.header
00831 start = end
00832 end += 4
00833 (_v28.seq,) = _struct_I.unpack(str[start:end])
00834 _v29 = _v28.stamp
00835 _x = _v29
00836 start = end
00837 end += 8
00838 (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
00839 start = end
00840 end += 4
00841 (length,) = _struct_I.unpack(str[start:end])
00842 start = end
00843 end += length
00844 if python3:
00845 _v28.frame_id = str[start:end].decode('utf-8')
00846 else:
00847 _v28.frame_id = str[start:end]
00848 _v30 = _v27.pose
00849 _v31 = _v30.position
00850 _x = _v31
00851 start = end
00852 end += 24
00853 (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
00854 _v32 = _v30.orientation
00855 _x = _v32
00856 start = end
00857 end += 32
00858 (_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])
00859 start = end
00860 end += 4
00861 (val2.confidence,) = _struct_f.unpack(str[start:end])
00862 start = end
00863 end += 4
00864 (length,) = _struct_I.unpack(str[start:end])
00865 start = end
00866 end += length
00867 if python3:
00868 val2.detector_name = str[start:end].decode('utf-8')
00869 else:
00870 val2.detector_name = str[start:end]
00871 val1.model_list.append(val2)
00872 self.detection_result.models.append(val1)
00873 start = end
00874 end += 4
00875 (length,) = _struct_I.unpack(str[start:end])
00876 pattern = '<%si'%length
00877 start = end
00878 end += struct.calcsize(pattern)
00879 self.detection_result.cluster_model_indices = numpy.frombuffer(str[start:end], dtype=numpy.int32, count=length)
00880 _x = self
00881 start = end
00882 end += 6
00883 (_x.detection_result.result, _x.reset_collision_models, _x.reset_attached_models,) = _struct_i2B.unpack(str[start:end])
00884 self.reset_collision_models = bool(self.reset_collision_models)
00885 self.reset_attached_models = bool(self.reset_attached_models)
00886 start = end
00887 end += 4
00888 (length,) = _struct_I.unpack(str[start:end])
00889 start = end
00890 end += length
00891 if python3:
00892 self.desired_frame = str[start:end].decode('utf-8')
00893 else:
00894 self.desired_frame = str[start:end]
00895 return self
00896 except struct.error as e:
00897 raise genpy.DeserializationError(e) #most likely buffer underfill
00898
00899 _struct_I = genpy.struct_I
00900 _struct_i2B = struct.Struct("<i2B")
00901 _struct_f = struct.Struct("<f")
00902 _struct_7d4fb = struct.Struct("<7d4fb")
00903 _struct_2I = struct.Struct("<2I")
00904 _struct_i = struct.Struct("<i")
00905 _struct_3I = struct.Struct("<3I")
00906 _struct_4d = struct.Struct("<4d")
00907 _struct_3f = struct.Struct("<3f")
00908 _struct_3d = struct.Struct("<3d")
00909 """autogenerated by genpy from tabletop_collision_map_processing/TabletopCollisionMapProcessingResponse.msg. Do not edit."""
00910 import sys
00911 python3 = True if sys.hexversion > 0x03000000 else False
00912 import genpy
00913 import struct
00914
00915 import geometry_msgs.msg
00916 import sensor_msgs.msg
00917 import std_msgs.msg
00918 import object_manipulation_msgs.msg
00919 import household_objects_database_msgs.msg
00920
00921 class TabletopCollisionMapProcessingResponse(genpy.Message):
00922 _md5sum = "684fb6e369cb671a9c4149f683c75e88"
00923 _type = "tabletop_collision_map_processing/TabletopCollisionMapProcessingResponse"
00924 _has_header = False #flag to mark the presence of a Header object
00925 _full_text = """
00926
00927 object_manipulation_msgs/GraspableObject[] graspable_objects
00928
00929
00930
00931 string[] collision_object_names
00932
00933
00934 string collision_support_surface_name
00935
00936
00937 ================================================================================
00938 MSG: object_manipulation_msgs/GraspableObject
00939 # an object that the object_manipulator can work on
00940
00941 # a graspable object can be represented in multiple ways. This message
00942 # can contain all of them. Which one is actually used is up to the receiver
00943 # of this message. When adding new representations, one must be careful that
00944 # they have reasonable lightweight defaults indicating that that particular
00945 # representation is not available.
00946
00947 # the tf frame to be used as a reference frame when combining information from
00948 # the different representations below
00949 string reference_frame_id
00950
00951 # potential recognition results from a database of models
00952 # all poses are relative to the object reference pose
00953 household_objects_database_msgs/DatabaseModelPose[] potential_models
00954
00955 # the point cloud itself
00956 sensor_msgs/PointCloud cluster
00957
00958 # a region of a PointCloud2 of interest
00959 object_manipulation_msgs/SceneRegion region
00960
00961 # the name that this object has in the collision environment
00962 string collision_name
00963 ================================================================================
00964 MSG: household_objects_database_msgs/DatabaseModelPose
00965 # Informs that a specific model from the Model Database has been
00966 # identified at a certain location
00967
00968 # the database id of the model
00969 int32 model_id
00970
00971 # the pose that it can be found in
00972 geometry_msgs/PoseStamped pose
00973
00974 # a measure of the confidence level in this detection result
00975 float32 confidence
00976
00977 # the name of the object detector that generated this detection result
00978 string detector_name
00979
00980 ================================================================================
00981 MSG: geometry_msgs/PoseStamped
00982 # A Pose with reference coordinate frame and timestamp
00983 Header header
00984 Pose pose
00985
00986 ================================================================================
00987 MSG: std_msgs/Header
00988 # Standard metadata for higher-level stamped data types.
00989 # This is generally used to communicate timestamped data
00990 # in a particular coordinate frame.
00991 #
00992 # sequence ID: consecutively increasing ID
00993 uint32 seq
00994 #Two-integer timestamp that is expressed as:
00995 # * stamp.secs: seconds (stamp_secs) since epoch
00996 # * stamp.nsecs: nanoseconds since stamp_secs
00997 # time-handling sugar is provided by the client library
00998 time stamp
00999 #Frame this data is associated with
01000 # 0: no frame
01001 # 1: global frame
01002 string frame_id
01003
01004 ================================================================================
01005 MSG: geometry_msgs/Pose
01006 # A representation of pose in free space, composed of postion and orientation.
01007 Point position
01008 Quaternion orientation
01009
01010 ================================================================================
01011 MSG: geometry_msgs/Point
01012 # This contains the position of a point in free space
01013 float64 x
01014 float64 y
01015 float64 z
01016
01017 ================================================================================
01018 MSG: geometry_msgs/Quaternion
01019 # This represents an orientation in free space in quaternion form.
01020
01021 float64 x
01022 float64 y
01023 float64 z
01024 float64 w
01025
01026 ================================================================================
01027 MSG: sensor_msgs/PointCloud
01028 # This message holds a collection of 3d points, plus optional additional
01029 # information about each point.
01030
01031 # Time of sensor data acquisition, coordinate frame ID.
01032 Header header
01033
01034 # Array of 3d points. Each Point32 should be interpreted as a 3d point
01035 # in the frame given in the header.
01036 geometry_msgs/Point32[] points
01037
01038 # Each channel should have the same number of elements as points array,
01039 # and the data in each channel should correspond 1:1 with each point.
01040 # Channel names in common practice are listed in ChannelFloat32.msg.
01041 ChannelFloat32[] channels
01042
01043 ================================================================================
01044 MSG: geometry_msgs/Point32
01045 # This contains the position of a point in free space(with 32 bits of precision).
01046 # It is recommeded to use Point wherever possible instead of Point32.
01047 #
01048 # This recommendation is to promote interoperability.
01049 #
01050 # This message is designed to take up less space when sending
01051 # lots of points at once, as in the case of a PointCloud.
01052
01053 float32 x
01054 float32 y
01055 float32 z
01056 ================================================================================
01057 MSG: sensor_msgs/ChannelFloat32
01058 # This message is used by the PointCloud message to hold optional data
01059 # associated with each point in the cloud. The length of the values
01060 # array should be the same as the length of the points array in the
01061 # PointCloud, and each value should be associated with the corresponding
01062 # point.
01063
01064 # Channel names in existing practice include:
01065 # "u", "v" - row and column (respectively) in the left stereo image.
01066 # This is opposite to usual conventions but remains for
01067 # historical reasons. The newer PointCloud2 message has no
01068 # such problem.
01069 # "rgb" - For point clouds produced by color stereo cameras. uint8
01070 # (R,G,B) values packed into the least significant 24 bits,
01071 # in order.
01072 # "intensity" - laser or pixel intensity.
01073 # "distance"
01074
01075 # The channel name should give semantics of the channel (e.g.
01076 # "intensity" instead of "value").
01077 string name
01078
01079 # The values array should be 1-1 with the elements of the associated
01080 # PointCloud.
01081 float32[] values
01082
01083 ================================================================================
01084 MSG: object_manipulation_msgs/SceneRegion
01085 # Point cloud
01086 sensor_msgs/PointCloud2 cloud
01087
01088 # Indices for the region of interest
01089 int32[] mask
01090
01091 # One of the corresponding 2D images, if applicable
01092 sensor_msgs/Image image
01093
01094 # The disparity image, if applicable
01095 sensor_msgs/Image disparity_image
01096
01097 # Camera info for the camera that took the image
01098 sensor_msgs/CameraInfo cam_info
01099
01100 # a 3D region of interest for grasp planning
01101 geometry_msgs/PoseStamped roi_box_pose
01102 geometry_msgs/Vector3 roi_box_dims
01103
01104 ================================================================================
01105 MSG: sensor_msgs/PointCloud2
01106 # This message holds a collection of N-dimensional points, which may
01107 # contain additional information such as normals, intensity, etc. The
01108 # point data is stored as a binary blob, its layout described by the
01109 # contents of the "fields" array.
01110
01111 # The point cloud data may be organized 2d (image-like) or 1d
01112 # (unordered). Point clouds organized as 2d images may be produced by
01113 # camera depth sensors such as stereo or time-of-flight.
01114
01115 # Time of sensor data acquisition, and the coordinate frame ID (for 3d
01116 # points).
01117 Header header
01118
01119 # 2D structure of the point cloud. If the cloud is unordered, height is
01120 # 1 and width is the length of the point cloud.
01121 uint32 height
01122 uint32 width
01123
01124 # Describes the channels and their layout in the binary data blob.
01125 PointField[] fields
01126
01127 bool is_bigendian # Is this data bigendian?
01128 uint32 point_step # Length of a point in bytes
01129 uint32 row_step # Length of a row in bytes
01130 uint8[] data # Actual point data, size is (row_step*height)
01131
01132 bool is_dense # True if there are no invalid points
01133
01134 ================================================================================
01135 MSG: sensor_msgs/PointField
01136 # This message holds the description of one point entry in the
01137 # PointCloud2 message format.
01138 uint8 INT8 = 1
01139 uint8 UINT8 = 2
01140 uint8 INT16 = 3
01141 uint8 UINT16 = 4
01142 uint8 INT32 = 5
01143 uint8 UINT32 = 6
01144 uint8 FLOAT32 = 7
01145 uint8 FLOAT64 = 8
01146
01147 string name # Name of field
01148 uint32 offset # Offset from start of point struct
01149 uint8 datatype # Datatype enumeration, see above
01150 uint32 count # How many elements in the field
01151
01152 ================================================================================
01153 MSG: sensor_msgs/Image
01154 # This message contains an uncompressed image
01155 # (0, 0) is at top-left corner of image
01156 #
01157
01158 Header header # Header timestamp should be acquisition time of image
01159 # Header frame_id should be optical frame of camera
01160 # origin of frame should be optical center of cameara
01161 # +x should point to the right in the image
01162 # +y should point down in the image
01163 # +z should point into to plane of the image
01164 # If the frame_id here and the frame_id of the CameraInfo
01165 # message associated with the image conflict
01166 # the behavior is undefined
01167
01168 uint32 height # image height, that is, number of rows
01169 uint32 width # image width, that is, number of columns
01170
01171 # The legal values for encoding are in file src/image_encodings.cpp
01172 # If you want to standardize a new string format, join
01173 # ros-users@lists.sourceforge.net and send an email proposing a new encoding.
01174
01175 string encoding # Encoding of pixels -- channel meaning, ordering, size
01176 # taken from the list of strings in src/image_encodings.cpp
01177
01178 uint8 is_bigendian # is this data bigendian?
01179 uint32 step # Full row length in bytes
01180 uint8[] data # actual matrix data, size is (step * rows)
01181
01182 ================================================================================
01183 MSG: sensor_msgs/CameraInfo
01184 # This message defines meta information for a camera. It should be in a
01185 # camera namespace on topic "camera_info" and accompanied by up to five
01186 # image topics named:
01187 #
01188 # image_raw - raw data from the camera driver, possibly Bayer encoded
01189 # image - monochrome, distorted
01190 # image_color - color, distorted
01191 # image_rect - monochrome, rectified
01192 # image_rect_color - color, rectified
01193 #
01194 # The image_pipeline contains packages (image_proc, stereo_image_proc)
01195 # for producing the four processed image topics from image_raw and
01196 # camera_info. The meaning of the camera parameters are described in
01197 # detail at http://www.ros.org/wiki/image_pipeline/CameraInfo.
01198 #
01199 # The image_geometry package provides a user-friendly interface to
01200 # common operations using this meta information. If you want to, e.g.,
01201 # project a 3d point into image coordinates, we strongly recommend
01202 # using image_geometry.
01203 #
01204 # If the camera is uncalibrated, the matrices D, K, R, P should be left
01205 # zeroed out. In particular, clients may assume that K[0] == 0.0
01206 # indicates an uncalibrated camera.
01207
01208 #######################################################################
01209 # Image acquisition info #
01210 #######################################################################
01211
01212 # Time of image acquisition, camera coordinate frame ID
01213 Header header # Header timestamp should be acquisition time of image
01214 # Header frame_id should be optical frame of camera
01215 # origin of frame should be optical center of camera
01216 # +x should point to the right in the image
01217 # +y should point down in the image
01218 # +z should point into the plane of the image
01219
01220
01221 #######################################################################
01222 # Calibration Parameters #
01223 #######################################################################
01224 # These are fixed during camera calibration. Their values will be the #
01225 # same in all messages until the camera is recalibrated. Note that #
01226 # self-calibrating systems may "recalibrate" frequently. #
01227 # #
01228 # The internal parameters can be used to warp a raw (distorted) image #
01229 # to: #
01230 # 1. An undistorted image (requires D and K) #
01231 # 2. A rectified image (requires D, K, R) #
01232 # The projection matrix P projects 3D points into the rectified image.#
01233 #######################################################################
01234
01235 # The image dimensions with which the camera was calibrated. Normally
01236 # this will be the full camera resolution in pixels.
01237 uint32 height
01238 uint32 width
01239
01240 # The distortion model used. Supported models are listed in
01241 # sensor_msgs/distortion_models.h. For most cameras, "plumb_bob" - a
01242 # simple model of radial and tangential distortion - is sufficent.
01243 string distortion_model
01244
01245 # The distortion parameters, size depending on the distortion model.
01246 # For "plumb_bob", the 5 parameters are: (k1, k2, t1, t2, k3).
01247 float64[] D
01248
01249 # Intrinsic camera matrix for the raw (distorted) images.
01250 # [fx 0 cx]
01251 # K = [ 0 fy cy]
01252 # [ 0 0 1]
01253 # Projects 3D points in the camera coordinate frame to 2D pixel
01254 # coordinates using the focal lengths (fx, fy) and principal point
01255 # (cx, cy).
01256 float64[9] K # 3x3 row-major matrix
01257
01258 # Rectification matrix (stereo cameras only)
01259 # A rotation matrix aligning the camera coordinate system to the ideal
01260 # stereo image plane so that epipolar lines in both stereo images are
01261 # parallel.
01262 float64[9] R # 3x3 row-major matrix
01263
01264 # Projection/camera matrix
01265 # [fx' 0 cx' Tx]
01266 # P = [ 0 fy' cy' Ty]
01267 # [ 0 0 1 0]
01268 # By convention, this matrix specifies the intrinsic (camera) matrix
01269 # of the processed (rectified) image. That is, the left 3x3 portion
01270 # is the normal camera intrinsic matrix for the rectified image.
01271 # It projects 3D points in the camera coordinate frame to 2D pixel
01272 # coordinates using the focal lengths (fx', fy') and principal point
01273 # (cx', cy') - these may differ from the values in K.
01274 # For monocular cameras, Tx = Ty = 0. Normally, monocular cameras will
01275 # also have R = the identity and P[1:3,1:3] = K.
01276 # For a stereo pair, the fourth column [Tx Ty 0]' is related to the
01277 # position of the optical center of the second camera in the first
01278 # camera's frame. We assume Tz = 0 so both cameras are in the same
01279 # stereo image plane. The first camera always has Tx = Ty = 0. For
01280 # the right (second) camera of a horizontal stereo pair, Ty = 0 and
01281 # Tx = -fx' * B, where B is the baseline between the cameras.
01282 # Given a 3D point [X Y Z]', the projection (x, y) of the point onto
01283 # the rectified image is given by:
01284 # [u v w]' = P * [X Y Z 1]'
01285 # x = u / w
01286 # y = v / w
01287 # This holds for both images of a stereo pair.
01288 float64[12] P # 3x4 row-major matrix
01289
01290
01291 #######################################################################
01292 # Operational Parameters #
01293 #######################################################################
01294 # These define the image region actually captured by the camera #
01295 # driver. Although they affect the geometry of the output image, they #
01296 # may be changed freely without recalibrating the camera. #
01297 #######################################################################
01298
01299 # Binning refers here to any camera setting which combines rectangular
01300 # neighborhoods of pixels into larger "super-pixels." It reduces the
01301 # resolution of the output image to
01302 # (width / binning_x) x (height / binning_y).
01303 # The default values binning_x = binning_y = 0 is considered the same
01304 # as binning_x = binning_y = 1 (no subsampling).
01305 uint32 binning_x
01306 uint32 binning_y
01307
01308 # Region of interest (subwindow of full camera resolution), given in
01309 # full resolution (unbinned) image coordinates. A particular ROI
01310 # always denotes the same window of pixels on the camera sensor,
01311 # regardless of binning settings.
01312 # The default setting of roi (all values 0) is considered the same as
01313 # full resolution (roi.width = width, roi.height = height).
01314 RegionOfInterest roi
01315
01316 ================================================================================
01317 MSG: sensor_msgs/RegionOfInterest
01318 # This message is used to specify a region of interest within an image.
01319 #
01320 # When used to specify the ROI setting of the camera when the image was
01321 # taken, the height and width fields should either match the height and
01322 # width fields for the associated image; or height = width = 0
01323 # indicates that the full resolution image was captured.
01324
01325 uint32 x_offset # Leftmost pixel of the ROI
01326 # (0 if the ROI includes the left edge of the image)
01327 uint32 y_offset # Topmost pixel of the ROI
01328 # (0 if the ROI includes the top edge of the image)
01329 uint32 height # Height of ROI
01330 uint32 width # Width of ROI
01331
01332 # True if a distinct rectified ROI should be calculated from the "raw"
01333 # ROI in this message. Typically this should be False if the full image
01334 # is captured (ROI not used), and True if a subwindow is captured (ROI
01335 # used).
01336 bool do_rectify
01337
01338 ================================================================================
01339 MSG: geometry_msgs/Vector3
01340 # This represents a vector in free space.
01341
01342 float64 x
01343 float64 y
01344 float64 z
01345 """
01346 __slots__ = ['graspable_objects','collision_object_names','collision_support_surface_name']
01347 _slot_types = ['object_manipulation_msgs/GraspableObject[]','string[]','string']
01348
01349 def __init__(self, *args, **kwds):
01350 """
01351 Constructor. Any message fields that are implicitly/explicitly
01352 set to None will be assigned a default value. The recommend
01353 use is keyword arguments as this is more robust to future message
01354 changes. You cannot mix in-order arguments and keyword arguments.
01355
01356 The available fields are:
01357 graspable_objects,collision_object_names,collision_support_surface_name
01358
01359 :param args: complete set of field values, in .msg order
01360 :param kwds: use keyword arguments corresponding to message field names
01361 to set specific fields.
01362 """
01363 if args or kwds:
01364 super(TabletopCollisionMapProcessingResponse, self).__init__(*args, **kwds)
01365 #message fields cannot be None, assign default values for those that are
01366 if self.graspable_objects is None:
01367 self.graspable_objects = []
01368 if self.collision_object_names is None:
01369 self.collision_object_names = []
01370 if self.collision_support_surface_name is None:
01371 self.collision_support_surface_name = ''
01372 else:
01373 self.graspable_objects = []
01374 self.collision_object_names = []
01375 self.collision_support_surface_name = ''
01376
01377 def _get_types(self):
01378 """
01379 internal API method
01380 """
01381 return self._slot_types
01382
01383 def serialize(self, buff):
01384 """
01385 serialize message into buffer
01386 :param buff: buffer, ``StringIO``
01387 """
01388 try:
01389 length = len(self.graspable_objects)
01390 buff.write(_struct_I.pack(length))
01391 for val1 in self.graspable_objects:
01392 _x = val1.reference_frame_id
01393 length = len(_x)
01394 if python3 or type(_x) == unicode:
01395 _x = _x.encode('utf-8')
01396 length = len(_x)
01397 buff.write(struct.pack('<I%ss'%length, length, _x))
01398 length = len(val1.potential_models)
01399 buff.write(_struct_I.pack(length))
01400 for val2 in val1.potential_models:
01401 buff.write(_struct_i.pack(val2.model_id))
01402 _v33 = val2.pose
01403 _v34 = _v33.header
01404 buff.write(_struct_I.pack(_v34.seq))
01405 _v35 = _v34.stamp
01406 _x = _v35
01407 buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
01408 _x = _v34.frame_id
01409 length = len(_x)
01410 if python3 or type(_x) == unicode:
01411 _x = _x.encode('utf-8')
01412 length = len(_x)
01413 buff.write(struct.pack('<I%ss'%length, length, _x))
01414 _v36 = _v33.pose
01415 _v37 = _v36.position
01416 _x = _v37
01417 buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
01418 _v38 = _v36.orientation
01419 _x = _v38
01420 buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))
01421 buff.write(_struct_f.pack(val2.confidence))
01422 _x = val2.detector_name
01423 length = len(_x)
01424 if python3 or type(_x) == unicode:
01425 _x = _x.encode('utf-8')
01426 length = len(_x)
01427 buff.write(struct.pack('<I%ss'%length, length, _x))
01428 _v39 = val1.cluster
01429 _v40 = _v39.header
01430 buff.write(_struct_I.pack(_v40.seq))
01431 _v41 = _v40.stamp
01432 _x = _v41
01433 buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
01434 _x = _v40.frame_id
01435 length = len(_x)
01436 if python3 or type(_x) == unicode:
01437 _x = _x.encode('utf-8')
01438 length = len(_x)
01439 buff.write(struct.pack('<I%ss'%length, length, _x))
01440 length = len(_v39.points)
01441 buff.write(_struct_I.pack(length))
01442 for val3 in _v39.points:
01443 _x = val3
01444 buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))
01445 length = len(_v39.channels)
01446 buff.write(_struct_I.pack(length))
01447 for val3 in _v39.channels:
01448 _x = val3.name
01449 length = len(_x)
01450 if python3 or type(_x) == unicode:
01451 _x = _x.encode('utf-8')
01452 length = len(_x)
01453 buff.write(struct.pack('<I%ss'%length, length, _x))
01454 length = len(val3.values)
01455 buff.write(_struct_I.pack(length))
01456 pattern = '<%sf'%length
01457 buff.write(struct.pack(pattern, *val3.values))
01458 _v42 = val1.region
01459 _v43 = _v42.cloud
01460 _v44 = _v43.header
01461 buff.write(_struct_I.pack(_v44.seq))
01462 _v45 = _v44.stamp
01463 _x = _v45
01464 buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
01465 _x = _v44.frame_id
01466 length = len(_x)
01467 if python3 or type(_x) == unicode:
01468 _x = _x.encode('utf-8')
01469 length = len(_x)
01470 buff.write(struct.pack('<I%ss'%length, length, _x))
01471 _x = _v43
01472 buff.write(_struct_2I.pack(_x.height, _x.width))
01473 length = len(_v43.fields)
01474 buff.write(_struct_I.pack(length))
01475 for val4 in _v43.fields:
01476 _x = val4.name
01477 length = len(_x)
01478 if python3 or type(_x) == unicode:
01479 _x = _x.encode('utf-8')
01480 length = len(_x)
01481 buff.write(struct.pack('<I%ss'%length, length, _x))
01482 _x = val4
01483 buff.write(_struct_IBI.pack(_x.offset, _x.datatype, _x.count))
01484 _x = _v43
01485 buff.write(_struct_B2I.pack(_x.is_bigendian, _x.point_step, _x.row_step))
01486 _x = _v43.data
01487 length = len(_x)
01488 # - if encoded as a list instead, serialize as bytes instead of string
01489 if type(_x) in [list, tuple]:
01490 buff.write(struct.pack('<I%sB'%length, length, *_x))
01491 else:
01492 buff.write(struct.pack('<I%ss'%length, length, _x))
01493 buff.write(_struct_B.pack(_v43.is_dense))
01494 length = len(_v42.mask)
01495 buff.write(_struct_I.pack(length))
01496 pattern = '<%si'%length
01497 buff.write(struct.pack(pattern, *_v42.mask))
01498 _v46 = _v42.image
01499 _v47 = _v46.header
01500 buff.write(_struct_I.pack(_v47.seq))
01501 _v48 = _v47.stamp
01502 _x = _v48
01503 buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
01504 _x = _v47.frame_id
01505 length = len(_x)
01506 if python3 or type(_x) == unicode:
01507 _x = _x.encode('utf-8')
01508 length = len(_x)
01509 buff.write(struct.pack('<I%ss'%length, length, _x))
01510 _x = _v46
01511 buff.write(_struct_2I.pack(_x.height, _x.width))
01512 _x = _v46.encoding
01513 length = len(_x)
01514 if python3 or type(_x) == unicode:
01515 _x = _x.encode('utf-8')
01516 length = len(_x)
01517 buff.write(struct.pack('<I%ss'%length, length, _x))
01518 _x = _v46
01519 buff.write(_struct_BI.pack(_x.is_bigendian, _x.step))
01520 _x = _v46.data
01521 length = len(_x)
01522 # - if encoded as a list instead, serialize as bytes instead of string
01523 if type(_x) in [list, tuple]:
01524 buff.write(struct.pack('<I%sB'%length, length, *_x))
01525 else:
01526 buff.write(struct.pack('<I%ss'%length, length, _x))
01527 _v49 = _v42.disparity_image
01528 _v50 = _v49.header
01529 buff.write(_struct_I.pack(_v50.seq))
01530 _v51 = _v50.stamp
01531 _x = _v51
01532 buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
01533 _x = _v50.frame_id
01534 length = len(_x)
01535 if python3 or type(_x) == unicode:
01536 _x = _x.encode('utf-8')
01537 length = len(_x)
01538 buff.write(struct.pack('<I%ss'%length, length, _x))
01539 _x = _v49
01540 buff.write(_struct_2I.pack(_x.height, _x.width))
01541 _x = _v49.encoding
01542 length = len(_x)
01543 if python3 or type(_x) == unicode:
01544 _x = _x.encode('utf-8')
01545 length = len(_x)
01546 buff.write(struct.pack('<I%ss'%length, length, _x))
01547 _x = _v49
01548 buff.write(_struct_BI.pack(_x.is_bigendian, _x.step))
01549 _x = _v49.data
01550 length = len(_x)
01551 # - if encoded as a list instead, serialize as bytes instead of string
01552 if type(_x) in [list, tuple]:
01553 buff.write(struct.pack('<I%sB'%length, length, *_x))
01554 else:
01555 buff.write(struct.pack('<I%ss'%length, length, _x))
01556 _v52 = _v42.cam_info
01557 _v53 = _v52.header
01558 buff.write(_struct_I.pack(_v53.seq))
01559 _v54 = _v53.stamp
01560 _x = _v54
01561 buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
01562 _x = _v53.frame_id
01563 length = len(_x)
01564 if python3 or type(_x) == unicode:
01565 _x = _x.encode('utf-8')
01566 length = len(_x)
01567 buff.write(struct.pack('<I%ss'%length, length, _x))
01568 _x = _v52
01569 buff.write(_struct_2I.pack(_x.height, _x.width))
01570 _x = _v52.distortion_model
01571 length = len(_x)
01572 if python3 or type(_x) == unicode:
01573 _x = _x.encode('utf-8')
01574 length = len(_x)
01575 buff.write(struct.pack('<I%ss'%length, length, _x))
01576 length = len(_v52.D)
01577 buff.write(_struct_I.pack(length))
01578 pattern = '<%sd'%length
01579 buff.write(struct.pack(pattern, *_v52.D))
01580 buff.write(_struct_9d.pack(*_v52.K))
01581 buff.write(_struct_9d.pack(*_v52.R))
01582 buff.write(_struct_12d.pack(*_v52.P))
01583 _x = _v52
01584 buff.write(_struct_2I.pack(_x.binning_x, _x.binning_y))
01585 _v55 = _v52.roi
01586 _x = _v55
01587 buff.write(_struct_4IB.pack(_x.x_offset, _x.y_offset, _x.height, _x.width, _x.do_rectify))
01588 _v56 = _v42.roi_box_pose
01589 _v57 = _v56.header
01590 buff.write(_struct_I.pack(_v57.seq))
01591 _v58 = _v57.stamp
01592 _x = _v58
01593 buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
01594 _x = _v57.frame_id
01595 length = len(_x)
01596 if python3 or type(_x) == unicode:
01597 _x = _x.encode('utf-8')
01598 length = len(_x)
01599 buff.write(struct.pack('<I%ss'%length, length, _x))
01600 _v59 = _v56.pose
01601 _v60 = _v59.position
01602 _x = _v60
01603 buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
01604 _v61 = _v59.orientation
01605 _x = _v61
01606 buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))
01607 _v62 = _v42.roi_box_dims
01608 _x = _v62
01609 buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
01610 _x = val1.collision_name
01611 length = len(_x)
01612 if python3 or type(_x) == unicode:
01613 _x = _x.encode('utf-8')
01614 length = len(_x)
01615 buff.write(struct.pack('<I%ss'%length, length, _x))
01616 length = len(self.collision_object_names)
01617 buff.write(_struct_I.pack(length))
01618 for val1 in self.collision_object_names:
01619 length = len(val1)
01620 if python3 or type(val1) == unicode:
01621 val1 = val1.encode('utf-8')
01622 length = len(val1)
01623 buff.write(struct.pack('<I%ss'%length, length, val1))
01624 _x = self.collision_support_surface_name
01625 length = len(_x)
01626 if python3 or type(_x) == unicode:
01627 _x = _x.encode('utf-8')
01628 length = len(_x)
01629 buff.write(struct.pack('<I%ss'%length, length, _x))
01630 except struct.error as se: self._check_types(se)
01631 except TypeError as te: self._check_types(te)
01632
01633 def deserialize(self, str):
01634 """
01635 unpack serialized message in str into this message instance
01636 :param str: byte array of serialized message, ``str``
01637 """
01638 try:
01639 if self.graspable_objects is None:
01640 self.graspable_objects = None
01641 end = 0
01642 start = end
01643 end += 4
01644 (length,) = _struct_I.unpack(str[start:end])
01645 self.graspable_objects = []
01646 for i in range(0, length):
01647 val1 = object_manipulation_msgs.msg.GraspableObject()
01648 start = end
01649 end += 4
01650 (length,) = _struct_I.unpack(str[start:end])
01651 start = end
01652 end += length
01653 if python3:
01654 val1.reference_frame_id = str[start:end].decode('utf-8')
01655 else:
01656 val1.reference_frame_id = str[start:end]
01657 start = end
01658 end += 4
01659 (length,) = _struct_I.unpack(str[start:end])
01660 val1.potential_models = []
01661 for i in range(0, length):
01662 val2 = household_objects_database_msgs.msg.DatabaseModelPose()
01663 start = end
01664 end += 4
01665 (val2.model_id,) = _struct_i.unpack(str[start:end])
01666 _v63 = val2.pose
01667 _v64 = _v63.header
01668 start = end
01669 end += 4
01670 (_v64.seq,) = _struct_I.unpack(str[start:end])
01671 _v65 = _v64.stamp
01672 _x = _v65
01673 start = end
01674 end += 8
01675 (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
01676 start = end
01677 end += 4
01678 (length,) = _struct_I.unpack(str[start:end])
01679 start = end
01680 end += length
01681 if python3:
01682 _v64.frame_id = str[start:end].decode('utf-8')
01683 else:
01684 _v64.frame_id = str[start:end]
01685 _v66 = _v63.pose
01686 _v67 = _v66.position
01687 _x = _v67
01688 start = end
01689 end += 24
01690 (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
01691 _v68 = _v66.orientation
01692 _x = _v68
01693 start = end
01694 end += 32
01695 (_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])
01696 start = end
01697 end += 4
01698 (val2.confidence,) = _struct_f.unpack(str[start:end])
01699 start = end
01700 end += 4
01701 (length,) = _struct_I.unpack(str[start:end])
01702 start = end
01703 end += length
01704 if python3:
01705 val2.detector_name = str[start:end].decode('utf-8')
01706 else:
01707 val2.detector_name = str[start:end]
01708 val1.potential_models.append(val2)
01709 _v69 = val1.cluster
01710 _v70 = _v69.header
01711 start = end
01712 end += 4
01713 (_v70.seq,) = _struct_I.unpack(str[start:end])
01714 _v71 = _v70.stamp
01715 _x = _v71
01716 start = end
01717 end += 8
01718 (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
01719 start = end
01720 end += 4
01721 (length,) = _struct_I.unpack(str[start:end])
01722 start = end
01723 end += length
01724 if python3:
01725 _v70.frame_id = str[start:end].decode('utf-8')
01726 else:
01727 _v70.frame_id = str[start:end]
01728 start = end
01729 end += 4
01730 (length,) = _struct_I.unpack(str[start:end])
01731 _v69.points = []
01732 for i in range(0, length):
01733 val3 = geometry_msgs.msg.Point32()
01734 _x = val3
01735 start = end
01736 end += 12
01737 (_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end])
01738 _v69.points.append(val3)
01739 start = end
01740 end += 4
01741 (length,) = _struct_I.unpack(str[start:end])
01742 _v69.channels = []
01743 for i in range(0, length):
01744 val3 = sensor_msgs.msg.ChannelFloat32()
01745 start = end
01746 end += 4
01747 (length,) = _struct_I.unpack(str[start:end])
01748 start = end
01749 end += length
01750 if python3:
01751 val3.name = str[start:end].decode('utf-8')
01752 else:
01753 val3.name = str[start:end]
01754 start = end
01755 end += 4
01756 (length,) = _struct_I.unpack(str[start:end])
01757 pattern = '<%sf'%length
01758 start = end
01759 end += struct.calcsize(pattern)
01760 val3.values = struct.unpack(pattern, str[start:end])
01761 _v69.channels.append(val3)
01762 _v72 = val1.region
01763 _v73 = _v72.cloud
01764 _v74 = _v73.header
01765 start = end
01766 end += 4
01767 (_v74.seq,) = _struct_I.unpack(str[start:end])
01768 _v75 = _v74.stamp
01769 _x = _v75
01770 start = end
01771 end += 8
01772 (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
01773 start = end
01774 end += 4
01775 (length,) = _struct_I.unpack(str[start:end])
01776 start = end
01777 end += length
01778 if python3:
01779 _v74.frame_id = str[start:end].decode('utf-8')
01780 else:
01781 _v74.frame_id = str[start:end]
01782 _x = _v73
01783 start = end
01784 end += 8
01785 (_x.height, _x.width,) = _struct_2I.unpack(str[start:end])
01786 start = end
01787 end += 4
01788 (length,) = _struct_I.unpack(str[start:end])
01789 _v73.fields = []
01790 for i in range(0, length):
01791 val4 = sensor_msgs.msg.PointField()
01792 start = end
01793 end += 4
01794 (length,) = _struct_I.unpack(str[start:end])
01795 start = end
01796 end += length
01797 if python3:
01798 val4.name = str[start:end].decode('utf-8')
01799 else:
01800 val4.name = str[start:end]
01801 _x = val4
01802 start = end
01803 end += 9
01804 (_x.offset, _x.datatype, _x.count,) = _struct_IBI.unpack(str[start:end])
01805 _v73.fields.append(val4)
01806 _x = _v73
01807 start = end
01808 end += 9
01809 (_x.is_bigendian, _x.point_step, _x.row_step,) = _struct_B2I.unpack(str[start:end])
01810 _v73.is_bigendian = bool(_v73.is_bigendian)
01811 start = end
01812 end += 4
01813 (length,) = _struct_I.unpack(str[start:end])
01814 start = end
01815 end += length
01816 if python3:
01817 _v73.data = str[start:end].decode('utf-8')
01818 else:
01819 _v73.data = str[start:end]
01820 start = end
01821 end += 1
01822 (_v73.is_dense,) = _struct_B.unpack(str[start:end])
01823 _v73.is_dense = bool(_v73.is_dense)
01824 start = end
01825 end += 4
01826 (length,) = _struct_I.unpack(str[start:end])
01827 pattern = '<%si'%length
01828 start = end
01829 end += struct.calcsize(pattern)
01830 _v72.mask = struct.unpack(pattern, str[start:end])
01831 _v76 = _v72.image
01832 _v77 = _v76.header
01833 start = end
01834 end += 4
01835 (_v77.seq,) = _struct_I.unpack(str[start:end])
01836 _v78 = _v77.stamp
01837 _x = _v78
01838 start = end
01839 end += 8
01840 (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
01841 start = end
01842 end += 4
01843 (length,) = _struct_I.unpack(str[start:end])
01844 start = end
01845 end += length
01846 if python3:
01847 _v77.frame_id = str[start:end].decode('utf-8')
01848 else:
01849 _v77.frame_id = str[start:end]
01850 _x = _v76
01851 start = end
01852 end += 8
01853 (_x.height, _x.width,) = _struct_2I.unpack(str[start:end])
01854 start = end
01855 end += 4
01856 (length,) = _struct_I.unpack(str[start:end])
01857 start = end
01858 end += length
01859 if python3:
01860 _v76.encoding = str[start:end].decode('utf-8')
01861 else:
01862 _v76.encoding = str[start:end]
01863 _x = _v76
01864 start = end
01865 end += 5
01866 (_x.is_bigendian, _x.step,) = _struct_BI.unpack(str[start:end])
01867 start = end
01868 end += 4
01869 (length,) = _struct_I.unpack(str[start:end])
01870 start = end
01871 end += length
01872 if python3:
01873 _v76.data = str[start:end].decode('utf-8')
01874 else:
01875 _v76.data = str[start:end]
01876 _v79 = _v72.disparity_image
01877 _v80 = _v79.header
01878 start = end
01879 end += 4
01880 (_v80.seq,) = _struct_I.unpack(str[start:end])
01881 _v81 = _v80.stamp
01882 _x = _v81
01883 start = end
01884 end += 8
01885 (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
01886 start = end
01887 end += 4
01888 (length,) = _struct_I.unpack(str[start:end])
01889 start = end
01890 end += length
01891 if python3:
01892 _v80.frame_id = str[start:end].decode('utf-8')
01893 else:
01894 _v80.frame_id = str[start:end]
01895 _x = _v79
01896 start = end
01897 end += 8
01898 (_x.height, _x.width,) = _struct_2I.unpack(str[start:end])
01899 start = end
01900 end += 4
01901 (length,) = _struct_I.unpack(str[start:end])
01902 start = end
01903 end += length
01904 if python3:
01905 _v79.encoding = str[start:end].decode('utf-8')
01906 else:
01907 _v79.encoding = str[start:end]
01908 _x = _v79
01909 start = end
01910 end += 5
01911 (_x.is_bigendian, _x.step,) = _struct_BI.unpack(str[start:end])
01912 start = end
01913 end += 4
01914 (length,) = _struct_I.unpack(str[start:end])
01915 start = end
01916 end += length
01917 if python3:
01918 _v79.data = str[start:end].decode('utf-8')
01919 else:
01920 _v79.data = str[start:end]
01921 _v82 = _v72.cam_info
01922 _v83 = _v82.header
01923 start = end
01924 end += 4
01925 (_v83.seq,) = _struct_I.unpack(str[start:end])
01926 _v84 = _v83.stamp
01927 _x = _v84
01928 start = end
01929 end += 8
01930 (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
01931 start = end
01932 end += 4
01933 (length,) = _struct_I.unpack(str[start:end])
01934 start = end
01935 end += length
01936 if python3:
01937 _v83.frame_id = str[start:end].decode('utf-8')
01938 else:
01939 _v83.frame_id = str[start:end]
01940 _x = _v82
01941 start = end
01942 end += 8
01943 (_x.height, _x.width,) = _struct_2I.unpack(str[start:end])
01944 start = end
01945 end += 4
01946 (length,) = _struct_I.unpack(str[start:end])
01947 start = end
01948 end += length
01949 if python3:
01950 _v82.distortion_model = str[start:end].decode('utf-8')
01951 else:
01952 _v82.distortion_model = str[start:end]
01953 start = end
01954 end += 4
01955 (length,) = _struct_I.unpack(str[start:end])
01956 pattern = '<%sd'%length
01957 start = end
01958 end += struct.calcsize(pattern)
01959 _v82.D = struct.unpack(pattern, str[start:end])
01960 start = end
01961 end += 72
01962 _v82.K = _struct_9d.unpack(str[start:end])
01963 start = end
01964 end += 72
01965 _v82.R = _struct_9d.unpack(str[start:end])
01966 start = end
01967 end += 96
01968 _v82.P = _struct_12d.unpack(str[start:end])
01969 _x = _v82
01970 start = end
01971 end += 8
01972 (_x.binning_x, _x.binning_y,) = _struct_2I.unpack(str[start:end])
01973 _v85 = _v82.roi
01974 _x = _v85
01975 start = end
01976 end += 17
01977 (_x.x_offset, _x.y_offset, _x.height, _x.width, _x.do_rectify,) = _struct_4IB.unpack(str[start:end])
01978 _v85.do_rectify = bool(_v85.do_rectify)
01979 _v86 = _v72.roi_box_pose
01980 _v87 = _v86.header
01981 start = end
01982 end += 4
01983 (_v87.seq,) = _struct_I.unpack(str[start:end])
01984 _v88 = _v87.stamp
01985 _x = _v88
01986 start = end
01987 end += 8
01988 (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
01989 start = end
01990 end += 4
01991 (length,) = _struct_I.unpack(str[start:end])
01992 start = end
01993 end += length
01994 if python3:
01995 _v87.frame_id = str[start:end].decode('utf-8')
01996 else:
01997 _v87.frame_id = str[start:end]
01998 _v89 = _v86.pose
01999 _v90 = _v89.position
02000 _x = _v90
02001 start = end
02002 end += 24
02003 (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
02004 _v91 = _v89.orientation
02005 _x = _v91
02006 start = end
02007 end += 32
02008 (_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])
02009 _v92 = _v72.roi_box_dims
02010 _x = _v92
02011 start = end
02012 end += 24
02013 (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
02014 start = end
02015 end += 4
02016 (length,) = _struct_I.unpack(str[start:end])
02017 start = end
02018 end += length
02019 if python3:
02020 val1.collision_name = str[start:end].decode('utf-8')
02021 else:
02022 val1.collision_name = str[start:end]
02023 self.graspable_objects.append(val1)
02024 start = end
02025 end += 4
02026 (length,) = _struct_I.unpack(str[start:end])
02027 self.collision_object_names = []
02028 for i in range(0, length):
02029 start = end
02030 end += 4
02031 (length,) = _struct_I.unpack(str[start:end])
02032 start = end
02033 end += length
02034 if python3:
02035 val1 = str[start:end].decode('utf-8')
02036 else:
02037 val1 = str[start:end]
02038 self.collision_object_names.append(val1)
02039 start = end
02040 end += 4
02041 (length,) = _struct_I.unpack(str[start:end])
02042 start = end
02043 end += length
02044 if python3:
02045 self.collision_support_surface_name = str[start:end].decode('utf-8')
02046 else:
02047 self.collision_support_surface_name = str[start:end]
02048 return self
02049 except struct.error as e:
02050 raise genpy.DeserializationError(e) #most likely buffer underfill
02051
02052
02053 def serialize_numpy(self, buff, numpy):
02054 """
02055 serialize message with numpy array types into buffer
02056 :param buff: buffer, ``StringIO``
02057 :param numpy: numpy python module
02058 """
02059 try:
02060 length = len(self.graspable_objects)
02061 buff.write(_struct_I.pack(length))
02062 for val1 in self.graspable_objects:
02063 _x = val1.reference_frame_id
02064 length = len(_x)
02065 if python3 or type(_x) == unicode:
02066 _x = _x.encode('utf-8')
02067 length = len(_x)
02068 buff.write(struct.pack('<I%ss'%length, length, _x))
02069 length = len(val1.potential_models)
02070 buff.write(_struct_I.pack(length))
02071 for val2 in val1.potential_models:
02072 buff.write(_struct_i.pack(val2.model_id))
02073 _v93 = val2.pose
02074 _v94 = _v93.header
02075 buff.write(_struct_I.pack(_v94.seq))
02076 _v95 = _v94.stamp
02077 _x = _v95
02078 buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
02079 _x = _v94.frame_id
02080 length = len(_x)
02081 if python3 or type(_x) == unicode:
02082 _x = _x.encode('utf-8')
02083 length = len(_x)
02084 buff.write(struct.pack('<I%ss'%length, length, _x))
02085 _v96 = _v93.pose
02086 _v97 = _v96.position
02087 _x = _v97
02088 buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
02089 _v98 = _v96.orientation
02090 _x = _v98
02091 buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))
02092 buff.write(_struct_f.pack(val2.confidence))
02093 _x = val2.detector_name
02094 length = len(_x)
02095 if python3 or type(_x) == unicode:
02096 _x = _x.encode('utf-8')
02097 length = len(_x)
02098 buff.write(struct.pack('<I%ss'%length, length, _x))
02099 _v99 = val1.cluster
02100 _v100 = _v99.header
02101 buff.write(_struct_I.pack(_v100.seq))
02102 _v101 = _v100.stamp
02103 _x = _v101
02104 buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
02105 _x = _v100.frame_id
02106 length = len(_x)
02107 if python3 or type(_x) == unicode:
02108 _x = _x.encode('utf-8')
02109 length = len(_x)
02110 buff.write(struct.pack('<I%ss'%length, length, _x))
02111 length = len(_v99.points)
02112 buff.write(_struct_I.pack(length))
02113 for val3 in _v99.points:
02114 _x = val3
02115 buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))
02116 length = len(_v99.channels)
02117 buff.write(_struct_I.pack(length))
02118 for val3 in _v99.channels:
02119 _x = val3.name
02120 length = len(_x)
02121 if python3 or type(_x) == unicode:
02122 _x = _x.encode('utf-8')
02123 length = len(_x)
02124 buff.write(struct.pack('<I%ss'%length, length, _x))
02125 length = len(val3.values)
02126 buff.write(_struct_I.pack(length))
02127 pattern = '<%sf'%length
02128 buff.write(val3.values.tostring())
02129 _v102 = val1.region
02130 _v103 = _v102.cloud
02131 _v104 = _v103.header
02132 buff.write(_struct_I.pack(_v104.seq))
02133 _v105 = _v104.stamp
02134 _x = _v105
02135 buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
02136 _x = _v104.frame_id
02137 length = len(_x)
02138 if python3 or type(_x) == unicode:
02139 _x = _x.encode('utf-8')
02140 length = len(_x)
02141 buff.write(struct.pack('<I%ss'%length, length, _x))
02142 _x = _v103
02143 buff.write(_struct_2I.pack(_x.height, _x.width))
02144 length = len(_v103.fields)
02145 buff.write(_struct_I.pack(length))
02146 for val4 in _v103.fields:
02147 _x = val4.name
02148 length = len(_x)
02149 if python3 or type(_x) == unicode:
02150 _x = _x.encode('utf-8')
02151 length = len(_x)
02152 buff.write(struct.pack('<I%ss'%length, length, _x))
02153 _x = val4
02154 buff.write(_struct_IBI.pack(_x.offset, _x.datatype, _x.count))
02155 _x = _v103
02156 buff.write(_struct_B2I.pack(_x.is_bigendian, _x.point_step, _x.row_step))
02157 _x = _v103.data
02158 length = len(_x)
02159 # - if encoded as a list instead, serialize as bytes instead of string
02160 if type(_x) in [list, tuple]:
02161 buff.write(struct.pack('<I%sB'%length, length, *_x))
02162 else:
02163 buff.write(struct.pack('<I%ss'%length, length, _x))
02164 buff.write(_struct_B.pack(_v103.is_dense))
02165 length = len(_v102.mask)
02166 buff.write(_struct_I.pack(length))
02167 pattern = '<%si'%length
02168 buff.write(_v102.mask.tostring())
02169 _v106 = _v102.image
02170 _v107 = _v106.header
02171 buff.write(_struct_I.pack(_v107.seq))
02172 _v108 = _v107.stamp
02173 _x = _v108
02174 buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
02175 _x = _v107.frame_id
02176 length = len(_x)
02177 if python3 or type(_x) == unicode:
02178 _x = _x.encode('utf-8')
02179 length = len(_x)
02180 buff.write(struct.pack('<I%ss'%length, length, _x))
02181 _x = _v106
02182 buff.write(_struct_2I.pack(_x.height, _x.width))
02183 _x = _v106.encoding
02184 length = len(_x)
02185 if python3 or type(_x) == unicode:
02186 _x = _x.encode('utf-8')
02187 length = len(_x)
02188 buff.write(struct.pack('<I%ss'%length, length, _x))
02189 _x = _v106
02190 buff.write(_struct_BI.pack(_x.is_bigendian, _x.step))
02191 _x = _v106.data
02192 length = len(_x)
02193 # - if encoded as a list instead, serialize as bytes instead of string
02194 if type(_x) in [list, tuple]:
02195 buff.write(struct.pack('<I%sB'%length, length, *_x))
02196 else:
02197 buff.write(struct.pack('<I%ss'%length, length, _x))
02198 _v109 = _v102.disparity_image
02199 _v110 = _v109.header
02200 buff.write(_struct_I.pack(_v110.seq))
02201 _v111 = _v110.stamp
02202 _x = _v111
02203 buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
02204 _x = _v110.frame_id
02205 length = len(_x)
02206 if python3 or type(_x) == unicode:
02207 _x = _x.encode('utf-8')
02208 length = len(_x)
02209 buff.write(struct.pack('<I%ss'%length, length, _x))
02210 _x = _v109
02211 buff.write(_struct_2I.pack(_x.height, _x.width))
02212 _x = _v109.encoding
02213 length = len(_x)
02214 if python3 or type(_x) == unicode:
02215 _x = _x.encode('utf-8')
02216 length = len(_x)
02217 buff.write(struct.pack('<I%ss'%length, length, _x))
02218 _x = _v109
02219 buff.write(_struct_BI.pack(_x.is_bigendian, _x.step))
02220 _x = _v109.data
02221 length = len(_x)
02222 # - if encoded as a list instead, serialize as bytes instead of string
02223 if type(_x) in [list, tuple]:
02224 buff.write(struct.pack('<I%sB'%length, length, *_x))
02225 else:
02226 buff.write(struct.pack('<I%ss'%length, length, _x))
02227 _v112 = _v102.cam_info
02228 _v113 = _v112.header
02229 buff.write(_struct_I.pack(_v113.seq))
02230 _v114 = _v113.stamp
02231 _x = _v114
02232 buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
02233 _x = _v113.frame_id
02234 length = len(_x)
02235 if python3 or type(_x) == unicode:
02236 _x = _x.encode('utf-8')
02237 length = len(_x)
02238 buff.write(struct.pack('<I%ss'%length, length, _x))
02239 _x = _v112
02240 buff.write(_struct_2I.pack(_x.height, _x.width))
02241 _x = _v112.distortion_model
02242 length = len(_x)
02243 if python3 or type(_x) == unicode:
02244 _x = _x.encode('utf-8')
02245 length = len(_x)
02246 buff.write(struct.pack('<I%ss'%length, length, _x))
02247 length = len(_v112.D)
02248 buff.write(_struct_I.pack(length))
02249 pattern = '<%sd'%length
02250 buff.write(_v112.D.tostring())
02251 buff.write(_v112.K.tostring())
02252 buff.write(_v112.R.tostring())
02253 buff.write(_v112.P.tostring())
02254 _x = _v112
02255 buff.write(_struct_2I.pack(_x.binning_x, _x.binning_y))
02256 _v115 = _v112.roi
02257 _x = _v115
02258 buff.write(_struct_4IB.pack(_x.x_offset, _x.y_offset, _x.height, _x.width, _x.do_rectify))
02259 _v116 = _v102.roi_box_pose
02260 _v117 = _v116.header
02261 buff.write(_struct_I.pack(_v117.seq))
02262 _v118 = _v117.stamp
02263 _x = _v118
02264 buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
02265 _x = _v117.frame_id
02266 length = len(_x)
02267 if python3 or type(_x) == unicode:
02268 _x = _x.encode('utf-8')
02269 length = len(_x)
02270 buff.write(struct.pack('<I%ss'%length, length, _x))
02271 _v119 = _v116.pose
02272 _v120 = _v119.position
02273 _x = _v120
02274 buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
02275 _v121 = _v119.orientation
02276 _x = _v121
02277 buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))
02278 _v122 = _v102.roi_box_dims
02279 _x = _v122
02280 buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
02281 _x = val1.collision_name
02282 length = len(_x)
02283 if python3 or type(_x) == unicode:
02284 _x = _x.encode('utf-8')
02285 length = len(_x)
02286 buff.write(struct.pack('<I%ss'%length, length, _x))
02287 length = len(self.collision_object_names)
02288 buff.write(_struct_I.pack(length))
02289 for val1 in self.collision_object_names:
02290 length = len(val1)
02291 if python3 or type(val1) == unicode:
02292 val1 = val1.encode('utf-8')
02293 length = len(val1)
02294 buff.write(struct.pack('<I%ss'%length, length, val1))
02295 _x = self.collision_support_surface_name
02296 length = len(_x)
02297 if python3 or type(_x) == unicode:
02298 _x = _x.encode('utf-8')
02299 length = len(_x)
02300 buff.write(struct.pack('<I%ss'%length, length, _x))
02301 except struct.error as se: self._check_types(se)
02302 except TypeError as te: self._check_types(te)
02303
02304 def deserialize_numpy(self, str, numpy):
02305 """
02306 unpack serialized message in str into this message instance using numpy for array types
02307 :param str: byte array of serialized message, ``str``
02308 :param numpy: numpy python module
02309 """
02310 try:
02311 if self.graspable_objects is None:
02312 self.graspable_objects = None
02313 end = 0
02314 start = end
02315 end += 4
02316 (length,) = _struct_I.unpack(str[start:end])
02317 self.graspable_objects = []
02318 for i in range(0, length):
02319 val1 = object_manipulation_msgs.msg.GraspableObject()
02320 start = end
02321 end += 4
02322 (length,) = _struct_I.unpack(str[start:end])
02323 start = end
02324 end += length
02325 if python3:
02326 val1.reference_frame_id = str[start:end].decode('utf-8')
02327 else:
02328 val1.reference_frame_id = str[start:end]
02329 start = end
02330 end += 4
02331 (length,) = _struct_I.unpack(str[start:end])
02332 val1.potential_models = []
02333 for i in range(0, length):
02334 val2 = household_objects_database_msgs.msg.DatabaseModelPose()
02335 start = end
02336 end += 4
02337 (val2.model_id,) = _struct_i.unpack(str[start:end])
02338 _v123 = val2.pose
02339 _v124 = _v123.header
02340 start = end
02341 end += 4
02342 (_v124.seq,) = _struct_I.unpack(str[start:end])
02343 _v125 = _v124.stamp
02344 _x = _v125
02345 start = end
02346 end += 8
02347 (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
02348 start = end
02349 end += 4
02350 (length,) = _struct_I.unpack(str[start:end])
02351 start = end
02352 end += length
02353 if python3:
02354 _v124.frame_id = str[start:end].decode('utf-8')
02355 else:
02356 _v124.frame_id = str[start:end]
02357 _v126 = _v123.pose
02358 _v127 = _v126.position
02359 _x = _v127
02360 start = end
02361 end += 24
02362 (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
02363 _v128 = _v126.orientation
02364 _x = _v128
02365 start = end
02366 end += 32
02367 (_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])
02368 start = end
02369 end += 4
02370 (val2.confidence,) = _struct_f.unpack(str[start:end])
02371 start = end
02372 end += 4
02373 (length,) = _struct_I.unpack(str[start:end])
02374 start = end
02375 end += length
02376 if python3:
02377 val2.detector_name = str[start:end].decode('utf-8')
02378 else:
02379 val2.detector_name = str[start:end]
02380 val1.potential_models.append(val2)
02381 _v129 = val1.cluster
02382 _v130 = _v129.header
02383 start = end
02384 end += 4
02385 (_v130.seq,) = _struct_I.unpack(str[start:end])
02386 _v131 = _v130.stamp
02387 _x = _v131
02388 start = end
02389 end += 8
02390 (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
02391 start = end
02392 end += 4
02393 (length,) = _struct_I.unpack(str[start:end])
02394 start = end
02395 end += length
02396 if python3:
02397 _v130.frame_id = str[start:end].decode('utf-8')
02398 else:
02399 _v130.frame_id = str[start:end]
02400 start = end
02401 end += 4
02402 (length,) = _struct_I.unpack(str[start:end])
02403 _v129.points = []
02404 for i in range(0, length):
02405 val3 = geometry_msgs.msg.Point32()
02406 _x = val3
02407 start = end
02408 end += 12
02409 (_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end])
02410 _v129.points.append(val3)
02411 start = end
02412 end += 4
02413 (length,) = _struct_I.unpack(str[start:end])
02414 _v129.channels = []
02415 for i in range(0, length):
02416 val3 = sensor_msgs.msg.ChannelFloat32()
02417 start = end
02418 end += 4
02419 (length,) = _struct_I.unpack(str[start:end])
02420 start = end
02421 end += length
02422 if python3:
02423 val3.name = str[start:end].decode('utf-8')
02424 else:
02425 val3.name = str[start:end]
02426 start = end
02427 end += 4
02428 (length,) = _struct_I.unpack(str[start:end])
02429 pattern = '<%sf'%length
02430 start = end
02431 end += struct.calcsize(pattern)
02432 val3.values = numpy.frombuffer(str[start:end], dtype=numpy.float32, count=length)
02433 _v129.channels.append(val3)
02434 _v132 = val1.region
02435 _v133 = _v132.cloud
02436 _v134 = _v133.header
02437 start = end
02438 end += 4
02439 (_v134.seq,) = _struct_I.unpack(str[start:end])
02440 _v135 = _v134.stamp
02441 _x = _v135
02442 start = end
02443 end += 8
02444 (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
02445 start = end
02446 end += 4
02447 (length,) = _struct_I.unpack(str[start:end])
02448 start = end
02449 end += length
02450 if python3:
02451 _v134.frame_id = str[start:end].decode('utf-8')
02452 else:
02453 _v134.frame_id = str[start:end]
02454 _x = _v133
02455 start = end
02456 end += 8
02457 (_x.height, _x.width,) = _struct_2I.unpack(str[start:end])
02458 start = end
02459 end += 4
02460 (length,) = _struct_I.unpack(str[start:end])
02461 _v133.fields = []
02462 for i in range(0, length):
02463 val4 = sensor_msgs.msg.PointField()
02464 start = end
02465 end += 4
02466 (length,) = _struct_I.unpack(str[start:end])
02467 start = end
02468 end += length
02469 if python3:
02470 val4.name = str[start:end].decode('utf-8')
02471 else:
02472 val4.name = str[start:end]
02473 _x = val4
02474 start = end
02475 end += 9
02476 (_x.offset, _x.datatype, _x.count,) = _struct_IBI.unpack(str[start:end])
02477 _v133.fields.append(val4)
02478 _x = _v133
02479 start = end
02480 end += 9
02481 (_x.is_bigendian, _x.point_step, _x.row_step,) = _struct_B2I.unpack(str[start:end])
02482 _v133.is_bigendian = bool(_v133.is_bigendian)
02483 start = end
02484 end += 4
02485 (length,) = _struct_I.unpack(str[start:end])
02486 start = end
02487 end += length
02488 if python3:
02489 _v133.data = str[start:end].decode('utf-8')
02490 else:
02491 _v133.data = str[start:end]
02492 start = end
02493 end += 1
02494 (_v133.is_dense,) = _struct_B.unpack(str[start:end])
02495 _v133.is_dense = bool(_v133.is_dense)
02496 start = end
02497 end += 4
02498 (length,) = _struct_I.unpack(str[start:end])
02499 pattern = '<%si'%length
02500 start = end
02501 end += struct.calcsize(pattern)
02502 _v132.mask = numpy.frombuffer(str[start:end], dtype=numpy.int32, count=length)
02503 _v136 = _v132.image
02504 _v137 = _v136.header
02505 start = end
02506 end += 4
02507 (_v137.seq,) = _struct_I.unpack(str[start:end])
02508 _v138 = _v137.stamp
02509 _x = _v138
02510 start = end
02511 end += 8
02512 (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
02513 start = end
02514 end += 4
02515 (length,) = _struct_I.unpack(str[start:end])
02516 start = end
02517 end += length
02518 if python3:
02519 _v137.frame_id = str[start:end].decode('utf-8')
02520 else:
02521 _v137.frame_id = str[start:end]
02522 _x = _v136
02523 start = end
02524 end += 8
02525 (_x.height, _x.width,) = _struct_2I.unpack(str[start:end])
02526 start = end
02527 end += 4
02528 (length,) = _struct_I.unpack(str[start:end])
02529 start = end
02530 end += length
02531 if python3:
02532 _v136.encoding = str[start:end].decode('utf-8')
02533 else:
02534 _v136.encoding = str[start:end]
02535 _x = _v136
02536 start = end
02537 end += 5
02538 (_x.is_bigendian, _x.step,) = _struct_BI.unpack(str[start:end])
02539 start = end
02540 end += 4
02541 (length,) = _struct_I.unpack(str[start:end])
02542 start = end
02543 end += length
02544 if python3:
02545 _v136.data = str[start:end].decode('utf-8')
02546 else:
02547 _v136.data = str[start:end]
02548 _v139 = _v132.disparity_image
02549 _v140 = _v139.header
02550 start = end
02551 end += 4
02552 (_v140.seq,) = _struct_I.unpack(str[start:end])
02553 _v141 = _v140.stamp
02554 _x = _v141
02555 start = end
02556 end += 8
02557 (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
02558 start = end
02559 end += 4
02560 (length,) = _struct_I.unpack(str[start:end])
02561 start = end
02562 end += length
02563 if python3:
02564 _v140.frame_id = str[start:end].decode('utf-8')
02565 else:
02566 _v140.frame_id = str[start:end]
02567 _x = _v139
02568 start = end
02569 end += 8
02570 (_x.height, _x.width,) = _struct_2I.unpack(str[start:end])
02571 start = end
02572 end += 4
02573 (length,) = _struct_I.unpack(str[start:end])
02574 start = end
02575 end += length
02576 if python3:
02577 _v139.encoding = str[start:end].decode('utf-8')
02578 else:
02579 _v139.encoding = str[start:end]
02580 _x = _v139
02581 start = end
02582 end += 5
02583 (_x.is_bigendian, _x.step,) = _struct_BI.unpack(str[start:end])
02584 start = end
02585 end += 4
02586 (length,) = _struct_I.unpack(str[start:end])
02587 start = end
02588 end += length
02589 if python3:
02590 _v139.data = str[start:end].decode('utf-8')
02591 else:
02592 _v139.data = str[start:end]
02593 _v142 = _v132.cam_info
02594 _v143 = _v142.header
02595 start = end
02596 end += 4
02597 (_v143.seq,) = _struct_I.unpack(str[start:end])
02598 _v144 = _v143.stamp
02599 _x = _v144
02600 start = end
02601 end += 8
02602 (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
02603 start = end
02604 end += 4
02605 (length,) = _struct_I.unpack(str[start:end])
02606 start = end
02607 end += length
02608 if python3:
02609 _v143.frame_id = str[start:end].decode('utf-8')
02610 else:
02611 _v143.frame_id = str[start:end]
02612 _x = _v142
02613 start = end
02614 end += 8
02615 (_x.height, _x.width,) = _struct_2I.unpack(str[start:end])
02616 start = end
02617 end += 4
02618 (length,) = _struct_I.unpack(str[start:end])
02619 start = end
02620 end += length
02621 if python3:
02622 _v142.distortion_model = str[start:end].decode('utf-8')
02623 else:
02624 _v142.distortion_model = str[start:end]
02625 start = end
02626 end += 4
02627 (length,) = _struct_I.unpack(str[start:end])
02628 pattern = '<%sd'%length
02629 start = end
02630 end += struct.calcsize(pattern)
02631 _v142.D = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)
02632 start = end
02633 end += 72
02634 _v142.K = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=9)
02635 start = end
02636 end += 72
02637 _v142.R = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=9)
02638 start = end
02639 end += 96
02640 _v142.P = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=12)
02641 _x = _v142
02642 start = end
02643 end += 8
02644 (_x.binning_x, _x.binning_y,) = _struct_2I.unpack(str[start:end])
02645 _v145 = _v142.roi
02646 _x = _v145
02647 start = end
02648 end += 17
02649 (_x.x_offset, _x.y_offset, _x.height, _x.width, _x.do_rectify,) = _struct_4IB.unpack(str[start:end])
02650 _v145.do_rectify = bool(_v145.do_rectify)
02651 _v146 = _v132.roi_box_pose
02652 _v147 = _v146.header
02653 start = end
02654 end += 4
02655 (_v147.seq,) = _struct_I.unpack(str[start:end])
02656 _v148 = _v147.stamp
02657 _x = _v148
02658 start = end
02659 end += 8
02660 (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
02661 start = end
02662 end += 4
02663 (length,) = _struct_I.unpack(str[start:end])
02664 start = end
02665 end += length
02666 if python3:
02667 _v147.frame_id = str[start:end].decode('utf-8')
02668 else:
02669 _v147.frame_id = str[start:end]
02670 _v149 = _v146.pose
02671 _v150 = _v149.position
02672 _x = _v150
02673 start = end
02674 end += 24
02675 (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
02676 _v151 = _v149.orientation
02677 _x = _v151
02678 start = end
02679 end += 32
02680 (_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])
02681 _v152 = _v132.roi_box_dims
02682 _x = _v152
02683 start = end
02684 end += 24
02685 (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
02686 start = end
02687 end += 4
02688 (length,) = _struct_I.unpack(str[start:end])
02689 start = end
02690 end += length
02691 if python3:
02692 val1.collision_name = str[start:end].decode('utf-8')
02693 else:
02694 val1.collision_name = str[start:end]
02695 self.graspable_objects.append(val1)
02696 start = end
02697 end += 4
02698 (length,) = _struct_I.unpack(str[start:end])
02699 self.collision_object_names = []
02700 for i in range(0, length):
02701 start = end
02702 end += 4
02703 (length,) = _struct_I.unpack(str[start:end])
02704 start = end
02705 end += length
02706 if python3:
02707 val1 = str[start:end].decode('utf-8')
02708 else:
02709 val1 = str[start:end]
02710 self.collision_object_names.append(val1)
02711 start = end
02712 end += 4
02713 (length,) = _struct_I.unpack(str[start:end])
02714 start = end
02715 end += length
02716 if python3:
02717 self.collision_support_surface_name = str[start:end].decode('utf-8')
02718 else:
02719 self.collision_support_surface_name = str[start:end]
02720 return self
02721 except struct.error as e:
02722 raise genpy.DeserializationError(e) #most likely buffer underfill
02723
02724 _struct_I = genpy.struct_I
02725 _struct_IBI = struct.Struct("<IBI")
02726 _struct_B = struct.Struct("<B")
02727 _struct_12d = struct.Struct("<12d")
02728 _struct_f = struct.Struct("<f")
02729 _struct_i = struct.Struct("<i")
02730 _struct_BI = struct.Struct("<BI")
02731 _struct_3f = struct.Struct("<3f")
02732 _struct_9d = struct.Struct("<9d")
02733 _struct_B2I = struct.Struct("<B2I")
02734 _struct_4d = struct.Struct("<4d")
02735 _struct_2I = struct.Struct("<2I")
02736 _struct_4IB = struct.Struct("<4IB")
02737 _struct_3d = struct.Struct("<3d")
02738 class TabletopCollisionMapProcessing(object):
02739 _type = 'tabletop_collision_map_processing/TabletopCollisionMapProcessing'
02740 _md5sum = '58e439dda25eed20079051e6af1b5eaa'
02741 _request_class = TabletopCollisionMapProcessingRequest
02742 _response_class = TabletopCollisionMapProcessingResponse