00001 """autogenerated by genmsg_py from TabletopCollisionMapProcessingRequest.msg. Do not edit."""
00002 import roslib.message
00003 import struct
00004
00005 import arm_navigation_msgs.msg
00006 import tabletop_object_detector.msg
00007 import geometry_msgs.msg
00008 import sensor_msgs.msg
00009 import std_msgs.msg
00010 import household_objects_database_msgs.msg
00011
00012 class TabletopCollisionMapProcessingRequest(roslib.message.Message):
00013 _md5sum = "fe986e22908c7a2936a0cdf9f240d4a3"
00014 _type = "tabletop_collision_map_processing/TabletopCollisionMapProcessingRequest"
00015 _has_header = False
00016 _full_text = """
00017
00018
00019
00020
00021
00022
00023 tabletop_object_detector/TabletopDetectionResult detection_result
00024
00025
00026 bool reset_collision_models
00027
00028
00029 bool reset_attached_models
00030
00031
00032
00033 string desired_frame
00034
00035
00036 ================================================================================
00037 MSG: tabletop_object_detector/TabletopDetectionResult
00038 # Contains all the information from one run of the tabletop detection node
00039
00040 # The information for the plane that has been detected
00041 Table table
00042
00043 # The raw clusters detected in the scan
00044 sensor_msgs/PointCloud[] clusters
00045
00046 # The list of potential models that have been detected for each cluster
00047 # An empty list will be returned for a cluster that has no recognition results at all
00048 household_objects_database_msgs/DatabaseModelPoseList[] models
00049
00050 # For each cluster, the index of the list of models that was fit to that cluster
00051 # keep in mind that multiple raw clusters can correspond to a single fit
00052 int32[] cluster_model_indices
00053
00054 # Whether the detection has succeeded or failed
00055 int32 NO_CLOUD_RECEIVED = 1
00056 int32 NO_TABLE = 2
00057 int32 OTHER_ERROR = 3
00058 int32 SUCCESS = 4
00059 int32 result
00060
00061 ================================================================================
00062 MSG: tabletop_object_detector/Table
00063 # Informs that a planar table has been detected at a given location
00064
00065 # The pose gives you the transform that take you to the coordinate system
00066 # of the table, with the origin somewhere in the table plane and the
00067 # z axis normal to the plane
00068 geometry_msgs/PoseStamped pose
00069
00070 # These values give you the observed extents of the table, along x and y,
00071 # in the table's own coordinate system (above)
00072 # there is no guarantee that the origin of the table coordinate system is
00073 # inside the boundary defined by these values.
00074 float32 x_min
00075 float32 x_max
00076 float32 y_min
00077 float32 y_max
00078
00079 # There is no guarantee that the table does NOT extend further than these
00080 # values; this is just as far as we've observed it.
00081
00082
00083 # Newer table definition as triangle mesh of convex hull (relative to pose)
00084 arm_navigation_msgs/Shape convex_hull
00085
00086 ================================================================================
00087 MSG: geometry_msgs/PoseStamped
00088 # A Pose with reference coordinate frame and timestamp
00089 Header header
00090 Pose pose
00091
00092 ================================================================================
00093 MSG: std_msgs/Header
00094 # Standard metadata for higher-level stamped data types.
00095 # This is generally used to communicate timestamped data
00096 # in a particular coordinate frame.
00097 #
00098 # sequence ID: consecutively increasing ID
00099 uint32 seq
00100 #Two-integer timestamp that is expressed as:
00101 # * stamp.secs: seconds (stamp_secs) since epoch
00102 # * stamp.nsecs: nanoseconds since stamp_secs
00103 # time-handling sugar is provided by the client library
00104 time stamp
00105 #Frame this data is associated with
00106 # 0: no frame
00107 # 1: global frame
00108 string frame_id
00109
00110 ================================================================================
00111 MSG: geometry_msgs/Pose
00112 # A representation of pose in free space, composed of postion and orientation.
00113 Point position
00114 Quaternion orientation
00115
00116 ================================================================================
00117 MSG: geometry_msgs/Point
00118 # This contains the position of a point in free space
00119 float64 x
00120 float64 y
00121 float64 z
00122
00123 ================================================================================
00124 MSG: geometry_msgs/Quaternion
00125 # This represents an orientation in free space in quaternion form.
00126
00127 float64 x
00128 float64 y
00129 float64 z
00130 float64 w
00131
00132 ================================================================================
00133 MSG: arm_navigation_msgs/Shape
00134 byte SPHERE=0
00135 byte BOX=1
00136 byte CYLINDER=2
00137 byte MESH=3
00138
00139 byte type
00140
00141
00142 #### define sphere, box, cylinder ####
00143 # the origin of each shape is considered at the shape's center
00144
00145 # for sphere
00146 # radius := dimensions[0]
00147
00148 # for cylinder
00149 # radius := dimensions[0]
00150 # length := dimensions[1]
00151 # the length is along the Z axis
00152
00153 # for box
00154 # size_x := dimensions[0]
00155 # size_y := dimensions[1]
00156 # size_z := dimensions[2]
00157 float64[] dimensions
00158
00159
00160 #### define mesh ####
00161
00162 # list of triangles; triangle k is defined by tre vertices located
00163 # at indices triangles[3k], triangles[3k+1], triangles[3k+2]
00164 int32[] triangles
00165 geometry_msgs/Point[] vertices
00166
00167 ================================================================================
00168 MSG: sensor_msgs/PointCloud
00169 # This message holds a collection of 3d points, plus optional additional
00170 # information about each point.
00171
00172 # Time of sensor data acquisition, coordinate frame ID.
00173 Header header
00174
00175 # Array of 3d points. Each Point32 should be interpreted as a 3d point
00176 # in the frame given in the header.
00177 geometry_msgs/Point32[] points
00178
00179 # Each channel should have the same number of elements as points array,
00180 # and the data in each channel should correspond 1:1 with each point.
00181 # Channel names in common practice are listed in ChannelFloat32.msg.
00182 ChannelFloat32[] channels
00183
00184 ================================================================================
00185 MSG: geometry_msgs/Point32
00186 # This contains the position of a point in free space(with 32 bits of precision).
00187 # It is recommeded to use Point wherever possible instead of Point32.
00188 #
00189 # This recommendation is to promote interoperability.
00190 #
00191 # This message is designed to take up less space when sending
00192 # lots of points at once, as in the case of a PointCloud.
00193
00194 float32 x
00195 float32 y
00196 float32 z
00197 ================================================================================
00198 MSG: sensor_msgs/ChannelFloat32
00199 # This message is used by the PointCloud message to hold optional data
00200 # associated with each point in the cloud. The length of the values
00201 # array should be the same as the length of the points array in the
00202 # PointCloud, and each value should be associated with the corresponding
00203 # point.
00204
00205 # Channel names in existing practice include:
00206 # "u", "v" - row and column (respectively) in the left stereo image.
00207 # This is opposite to usual conventions but remains for
00208 # historical reasons. The newer PointCloud2 message has no
00209 # such problem.
00210 # "rgb" - For point clouds produced by color stereo cameras. uint8
00211 # (R,G,B) values packed into the least significant 24 bits,
00212 # in order.
00213 # "intensity" - laser or pixel intensity.
00214 # "distance"
00215
00216 # The channel name should give semantics of the channel (e.g.
00217 # "intensity" instead of "value").
00218 string name
00219
00220 # The values array should be 1-1 with the elements of the associated
00221 # PointCloud.
00222 float32[] values
00223
00224 ================================================================================
00225 MSG: household_objects_database_msgs/DatabaseModelPoseList
00226 # stores a list of possible database models recognition results
00227 DatabaseModelPose[] model_list
00228 ================================================================================
00229 MSG: household_objects_database_msgs/DatabaseModelPose
00230 # Informs that a specific model from the Model Database has been
00231 # identified at a certain location
00232
00233 # the database id of the model
00234 int32 model_id
00235
00236 # the pose that it can be found in
00237 geometry_msgs/PoseStamped pose
00238
00239 # a measure of the confidence level in this detection result
00240 float32 confidence
00241
00242 # the name of the object detector that generated this detection result
00243 string detector_name
00244
00245 """
00246 __slots__ = ['detection_result','reset_collision_models','reset_attached_models','desired_frame']
00247 _slot_types = ['tabletop_object_detector/TabletopDetectionResult','bool','bool','string']
00248
00249 def __init__(self, *args, **kwds):
00250 """
00251 Constructor. Any message fields that are implicitly/explicitly
00252 set to None will be assigned a default value. The recommend
00253 use is keyword arguments as this is more robust to future message
00254 changes. You cannot mix in-order arguments and keyword arguments.
00255
00256 The available fields are:
00257 detection_result,reset_collision_models,reset_attached_models,desired_frame
00258
00259 @param args: complete set of field values, in .msg order
00260 @param kwds: use keyword arguments corresponding to message field names
00261 to set specific fields.
00262 """
00263 if args or kwds:
00264 super(TabletopCollisionMapProcessingRequest, self).__init__(*args, **kwds)
00265 #message fields cannot be None, assign default values for those that are
00266 if self.detection_result is None:
00267 self.detection_result = tabletop_object_detector.msg.TabletopDetectionResult()
00268 if self.reset_collision_models is None:
00269 self.reset_collision_models = False
00270 if self.reset_attached_models is None:
00271 self.reset_attached_models = False
00272 if self.desired_frame is None:
00273 self.desired_frame = ''
00274 else:
00275 self.detection_result = tabletop_object_detector.msg.TabletopDetectionResult()
00276 self.reset_collision_models = False
00277 self.reset_attached_models = False
00278 self.desired_frame = ''
00279
00280 def _get_types(self):
00281 """
00282 internal API method
00283 """
00284 return self._slot_types
00285
00286 def serialize(self, buff):
00287 """
00288 serialize message into buffer
00289 @param buff: buffer
00290 @type buff: StringIO
00291 """
00292 try:
00293 _x = self
00294 buff.write(_struct_3I.pack(_x.detection_result.table.pose.header.seq, _x.detection_result.table.pose.header.stamp.secs, _x.detection_result.table.pose.header.stamp.nsecs))
00295 _x = self.detection_result.table.pose.header.frame_id
00296 length = len(_x)
00297 buff.write(struct.pack('<I%ss'%length, length, _x))
00298 _x = self
00299 buff.write(_struct_7d4fb.pack(_x.detection_result.table.pose.pose.position.x, _x.detection_result.table.pose.pose.position.y, _x.detection_result.table.pose.pose.position.z, _x.detection_result.table.pose.pose.orientation.x, _x.detection_result.table.pose.pose.orientation.y, _x.detection_result.table.pose.pose.orientation.z, _x.detection_result.table.pose.pose.orientation.w, _x.detection_result.table.x_min, _x.detection_result.table.x_max, _x.detection_result.table.y_min, _x.detection_result.table.y_max, _x.detection_result.table.convex_hull.type))
00300 length = len(self.detection_result.table.convex_hull.dimensions)
00301 buff.write(_struct_I.pack(length))
00302 pattern = '<%sd'%length
00303 buff.write(struct.pack(pattern, *self.detection_result.table.convex_hull.dimensions))
00304 length = len(self.detection_result.table.convex_hull.triangles)
00305 buff.write(_struct_I.pack(length))
00306 pattern = '<%si'%length
00307 buff.write(struct.pack(pattern, *self.detection_result.table.convex_hull.triangles))
00308 length = len(self.detection_result.table.convex_hull.vertices)
00309 buff.write(_struct_I.pack(length))
00310 for val1 in self.detection_result.table.convex_hull.vertices:
00311 _x = val1
00312 buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
00313 length = len(self.detection_result.clusters)
00314 buff.write(_struct_I.pack(length))
00315 for val1 in self.detection_result.clusters:
00316 _v1 = val1.header
00317 buff.write(_struct_I.pack(_v1.seq))
00318 _v2 = _v1.stamp
00319 _x = _v2
00320 buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
00321 _x = _v1.frame_id
00322 length = len(_x)
00323 buff.write(struct.pack('<I%ss'%length, length, _x))
00324 length = len(val1.points)
00325 buff.write(_struct_I.pack(length))
00326 for val2 in val1.points:
00327 _x = val2
00328 buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))
00329 length = len(val1.channels)
00330 buff.write(_struct_I.pack(length))
00331 for val2 in val1.channels:
00332 _x = val2.name
00333 length = len(_x)
00334 buff.write(struct.pack('<I%ss'%length, length, _x))
00335 length = len(val2.values)
00336 buff.write(_struct_I.pack(length))
00337 pattern = '<%sf'%length
00338 buff.write(struct.pack(pattern, *val2.values))
00339 length = len(self.detection_result.models)
00340 buff.write(_struct_I.pack(length))
00341 for val1 in self.detection_result.models:
00342 length = len(val1.model_list)
00343 buff.write(_struct_I.pack(length))
00344 for val2 in val1.model_list:
00345 buff.write(_struct_i.pack(val2.model_id))
00346 _v3 = val2.pose
00347 _v4 = _v3.header
00348 buff.write(_struct_I.pack(_v4.seq))
00349 _v5 = _v4.stamp
00350 _x = _v5
00351 buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
00352 _x = _v4.frame_id
00353 length = len(_x)
00354 buff.write(struct.pack('<I%ss'%length, length, _x))
00355 _v6 = _v3.pose
00356 _v7 = _v6.position
00357 _x = _v7
00358 buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
00359 _v8 = _v6.orientation
00360 _x = _v8
00361 buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))
00362 buff.write(_struct_f.pack(val2.confidence))
00363 _x = val2.detector_name
00364 length = len(_x)
00365 buff.write(struct.pack('<I%ss'%length, length, _x))
00366 length = len(self.detection_result.cluster_model_indices)
00367 buff.write(_struct_I.pack(length))
00368 pattern = '<%si'%length
00369 buff.write(struct.pack(pattern, *self.detection_result.cluster_model_indices))
00370 _x = self
00371 buff.write(_struct_i2B.pack(_x.detection_result.result, _x.reset_collision_models, _x.reset_attached_models))
00372 _x = self.desired_frame
00373 length = len(_x)
00374 buff.write(struct.pack('<I%ss'%length, length, _x))
00375 except struct.error as se: self._check_types(se)
00376 except TypeError as te: self._check_types(te)
00377
00378 def deserialize(self, str):
00379 """
00380 unpack serialized message in str into this message instance
00381 @param str: byte array of serialized message
00382 @type str: str
00383 """
00384 try:
00385 if self.detection_result is None:
00386 self.detection_result = tabletop_object_detector.msg.TabletopDetectionResult()
00387 end = 0
00388 _x = self
00389 start = end
00390 end += 12
00391 (_x.detection_result.table.pose.header.seq, _x.detection_result.table.pose.header.stamp.secs, _x.detection_result.table.pose.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end])
00392 start = end
00393 end += 4
00394 (length,) = _struct_I.unpack(str[start:end])
00395 start = end
00396 end += length
00397 self.detection_result.table.pose.header.frame_id = str[start:end]
00398 _x = self
00399 start = end
00400 end += 73
00401 (_x.detection_result.table.pose.pose.position.x, _x.detection_result.table.pose.pose.position.y, _x.detection_result.table.pose.pose.position.z, _x.detection_result.table.pose.pose.orientation.x, _x.detection_result.table.pose.pose.orientation.y, _x.detection_result.table.pose.pose.orientation.z, _x.detection_result.table.pose.pose.orientation.w, _x.detection_result.table.x_min, _x.detection_result.table.x_max, _x.detection_result.table.y_min, _x.detection_result.table.y_max, _x.detection_result.table.convex_hull.type,) = _struct_7d4fb.unpack(str[start:end])
00402 start = end
00403 end += 4
00404 (length,) = _struct_I.unpack(str[start:end])
00405 pattern = '<%sd'%length
00406 start = end
00407 end += struct.calcsize(pattern)
00408 self.detection_result.table.convex_hull.dimensions = struct.unpack(pattern, str[start:end])
00409 start = end
00410 end += 4
00411 (length,) = _struct_I.unpack(str[start:end])
00412 pattern = '<%si'%length
00413 start = end
00414 end += struct.calcsize(pattern)
00415 self.detection_result.table.convex_hull.triangles = struct.unpack(pattern, str[start:end])
00416 start = end
00417 end += 4
00418 (length,) = _struct_I.unpack(str[start:end])
00419 self.detection_result.table.convex_hull.vertices = []
00420 for i in range(0, length):
00421 val1 = geometry_msgs.msg.Point()
00422 _x = val1
00423 start = end
00424 end += 24
00425 (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
00426 self.detection_result.table.convex_hull.vertices.append(val1)
00427 start = end
00428 end += 4
00429 (length,) = _struct_I.unpack(str[start:end])
00430 self.detection_result.clusters = []
00431 for i in range(0, length):
00432 val1 = sensor_msgs.msg.PointCloud()
00433 _v9 = val1.header
00434 start = end
00435 end += 4
00436 (_v9.seq,) = _struct_I.unpack(str[start:end])
00437 _v10 = _v9.stamp
00438 _x = _v10
00439 start = end
00440 end += 8
00441 (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
00442 start = end
00443 end += 4
00444 (length,) = _struct_I.unpack(str[start:end])
00445 start = end
00446 end += length
00447 _v9.frame_id = str[start:end]
00448 start = end
00449 end += 4
00450 (length,) = _struct_I.unpack(str[start:end])
00451 val1.points = []
00452 for i in range(0, length):
00453 val2 = geometry_msgs.msg.Point32()
00454 _x = val2
00455 start = end
00456 end += 12
00457 (_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end])
00458 val1.points.append(val2)
00459 start = end
00460 end += 4
00461 (length,) = _struct_I.unpack(str[start:end])
00462 val1.channels = []
00463 for i in range(0, length):
00464 val2 = sensor_msgs.msg.ChannelFloat32()
00465 start = end
00466 end += 4
00467 (length,) = _struct_I.unpack(str[start:end])
00468 start = end
00469 end += length
00470 val2.name = str[start:end]
00471 start = end
00472 end += 4
00473 (length,) = _struct_I.unpack(str[start:end])
00474 pattern = '<%sf'%length
00475 start = end
00476 end += struct.calcsize(pattern)
00477 val2.values = struct.unpack(pattern, str[start:end])
00478 val1.channels.append(val2)
00479 self.detection_result.clusters.append(val1)
00480 start = end
00481 end += 4
00482 (length,) = _struct_I.unpack(str[start:end])
00483 self.detection_result.models = []
00484 for i in range(0, length):
00485 val1 = household_objects_database_msgs.msg.DatabaseModelPoseList()
00486 start = end
00487 end += 4
00488 (length,) = _struct_I.unpack(str[start:end])
00489 val1.model_list = []
00490 for i in range(0, length):
00491 val2 = household_objects_database_msgs.msg.DatabaseModelPose()
00492 start = end
00493 end += 4
00494 (val2.model_id,) = _struct_i.unpack(str[start:end])
00495 _v11 = val2.pose
00496 _v12 = _v11.header
00497 start = end
00498 end += 4
00499 (_v12.seq,) = _struct_I.unpack(str[start:end])
00500 _v13 = _v12.stamp
00501 _x = _v13
00502 start = end
00503 end += 8
00504 (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
00505 start = end
00506 end += 4
00507 (length,) = _struct_I.unpack(str[start:end])
00508 start = end
00509 end += length
00510 _v12.frame_id = str[start:end]
00511 _v14 = _v11.pose
00512 _v15 = _v14.position
00513 _x = _v15
00514 start = end
00515 end += 24
00516 (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
00517 _v16 = _v14.orientation
00518 _x = _v16
00519 start = end
00520 end += 32
00521 (_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])
00522 start = end
00523 end += 4
00524 (val2.confidence,) = _struct_f.unpack(str[start:end])
00525 start = end
00526 end += 4
00527 (length,) = _struct_I.unpack(str[start:end])
00528 start = end
00529 end += length
00530 val2.detector_name = str[start:end]
00531 val1.model_list.append(val2)
00532 self.detection_result.models.append(val1)
00533 start = end
00534 end += 4
00535 (length,) = _struct_I.unpack(str[start:end])
00536 pattern = '<%si'%length
00537 start = end
00538 end += struct.calcsize(pattern)
00539 self.detection_result.cluster_model_indices = struct.unpack(pattern, str[start:end])
00540 _x = self
00541 start = end
00542 end += 6
00543 (_x.detection_result.result, _x.reset_collision_models, _x.reset_attached_models,) = _struct_i2B.unpack(str[start:end])
00544 self.reset_collision_models = bool(self.reset_collision_models)
00545 self.reset_attached_models = bool(self.reset_attached_models)
00546 start = end
00547 end += 4
00548 (length,) = _struct_I.unpack(str[start:end])
00549 start = end
00550 end += length
00551 self.desired_frame = str[start:end]
00552 return self
00553 except struct.error as e:
00554 raise roslib.message.DeserializationError(e) #most likely buffer underfill
00555
00556
00557 def serialize_numpy(self, buff, numpy):
00558 """
00559 serialize message with numpy array types into buffer
00560 @param buff: buffer
00561 @type buff: StringIO
00562 @param numpy: numpy python module
00563 @type numpy module
00564 """
00565 try:
00566 _x = self
00567 buff.write(_struct_3I.pack(_x.detection_result.table.pose.header.seq, _x.detection_result.table.pose.header.stamp.secs, _x.detection_result.table.pose.header.stamp.nsecs))
00568 _x = self.detection_result.table.pose.header.frame_id
00569 length = len(_x)
00570 buff.write(struct.pack('<I%ss'%length, length, _x))
00571 _x = self
00572 buff.write(_struct_7d4fb.pack(_x.detection_result.table.pose.pose.position.x, _x.detection_result.table.pose.pose.position.y, _x.detection_result.table.pose.pose.position.z, _x.detection_result.table.pose.pose.orientation.x, _x.detection_result.table.pose.pose.orientation.y, _x.detection_result.table.pose.pose.orientation.z, _x.detection_result.table.pose.pose.orientation.w, _x.detection_result.table.x_min, _x.detection_result.table.x_max, _x.detection_result.table.y_min, _x.detection_result.table.y_max, _x.detection_result.table.convex_hull.type))
00573 length = len(self.detection_result.table.convex_hull.dimensions)
00574 buff.write(_struct_I.pack(length))
00575 pattern = '<%sd'%length
00576 buff.write(self.detection_result.table.convex_hull.dimensions.tostring())
00577 length = len(self.detection_result.table.convex_hull.triangles)
00578 buff.write(_struct_I.pack(length))
00579 pattern = '<%si'%length
00580 buff.write(self.detection_result.table.convex_hull.triangles.tostring())
00581 length = len(self.detection_result.table.convex_hull.vertices)
00582 buff.write(_struct_I.pack(length))
00583 for val1 in self.detection_result.table.convex_hull.vertices:
00584 _x = val1
00585 buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
00586 length = len(self.detection_result.clusters)
00587 buff.write(_struct_I.pack(length))
00588 for val1 in self.detection_result.clusters:
00589 _v17 = val1.header
00590 buff.write(_struct_I.pack(_v17.seq))
00591 _v18 = _v17.stamp
00592 _x = _v18
00593 buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
00594 _x = _v17.frame_id
00595 length = len(_x)
00596 buff.write(struct.pack('<I%ss'%length, length, _x))
00597 length = len(val1.points)
00598 buff.write(_struct_I.pack(length))
00599 for val2 in val1.points:
00600 _x = val2
00601 buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))
00602 length = len(val1.channels)
00603 buff.write(_struct_I.pack(length))
00604 for val2 in val1.channels:
00605 _x = val2.name
00606 length = len(_x)
00607 buff.write(struct.pack('<I%ss'%length, length, _x))
00608 length = len(val2.values)
00609 buff.write(_struct_I.pack(length))
00610 pattern = '<%sf'%length
00611 buff.write(val2.values.tostring())
00612 length = len(self.detection_result.models)
00613 buff.write(_struct_I.pack(length))
00614 for val1 in self.detection_result.models:
00615 length = len(val1.model_list)
00616 buff.write(_struct_I.pack(length))
00617 for val2 in val1.model_list:
00618 buff.write(_struct_i.pack(val2.model_id))
00619 _v19 = val2.pose
00620 _v20 = _v19.header
00621 buff.write(_struct_I.pack(_v20.seq))
00622 _v21 = _v20.stamp
00623 _x = _v21
00624 buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
00625 _x = _v20.frame_id
00626 length = len(_x)
00627 buff.write(struct.pack('<I%ss'%length, length, _x))
00628 _v22 = _v19.pose
00629 _v23 = _v22.position
00630 _x = _v23
00631 buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
00632 _v24 = _v22.orientation
00633 _x = _v24
00634 buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))
00635 buff.write(_struct_f.pack(val2.confidence))
00636 _x = val2.detector_name
00637 length = len(_x)
00638 buff.write(struct.pack('<I%ss'%length, length, _x))
00639 length = len(self.detection_result.cluster_model_indices)
00640 buff.write(_struct_I.pack(length))
00641 pattern = '<%si'%length
00642 buff.write(self.detection_result.cluster_model_indices.tostring())
00643 _x = self
00644 buff.write(_struct_i2B.pack(_x.detection_result.result, _x.reset_collision_models, _x.reset_attached_models))
00645 _x = self.desired_frame
00646 length = len(_x)
00647 buff.write(struct.pack('<I%ss'%length, length, _x))
00648 except struct.error as se: self._check_types(se)
00649 except TypeError as te: self._check_types(te)
00650
00651 def deserialize_numpy(self, str, numpy):
00652 """
00653 unpack serialized message in str into this message instance using numpy for array types
00654 @param str: byte array of serialized message
00655 @type str: str
00656 @param numpy: numpy python module
00657 @type numpy: module
00658 """
00659 try:
00660 if self.detection_result is None:
00661 self.detection_result = tabletop_object_detector.msg.TabletopDetectionResult()
00662 end = 0
00663 _x = self
00664 start = end
00665 end += 12
00666 (_x.detection_result.table.pose.header.seq, _x.detection_result.table.pose.header.stamp.secs, _x.detection_result.table.pose.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end])
00667 start = end
00668 end += 4
00669 (length,) = _struct_I.unpack(str[start:end])
00670 start = end
00671 end += length
00672 self.detection_result.table.pose.header.frame_id = str[start:end]
00673 _x = self
00674 start = end
00675 end += 73
00676 (_x.detection_result.table.pose.pose.position.x, _x.detection_result.table.pose.pose.position.y, _x.detection_result.table.pose.pose.position.z, _x.detection_result.table.pose.pose.orientation.x, _x.detection_result.table.pose.pose.orientation.y, _x.detection_result.table.pose.pose.orientation.z, _x.detection_result.table.pose.pose.orientation.w, _x.detection_result.table.x_min, _x.detection_result.table.x_max, _x.detection_result.table.y_min, _x.detection_result.table.y_max, _x.detection_result.table.convex_hull.type,) = _struct_7d4fb.unpack(str[start:end])
00677 start = end
00678 end += 4
00679 (length,) = _struct_I.unpack(str[start:end])
00680 pattern = '<%sd'%length
00681 start = end
00682 end += struct.calcsize(pattern)
00683 self.detection_result.table.convex_hull.dimensions = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)
00684 start = end
00685 end += 4
00686 (length,) = _struct_I.unpack(str[start:end])
00687 pattern = '<%si'%length
00688 start = end
00689 end += struct.calcsize(pattern)
00690 self.detection_result.table.convex_hull.triangles = numpy.frombuffer(str[start:end], dtype=numpy.int32, count=length)
00691 start = end
00692 end += 4
00693 (length,) = _struct_I.unpack(str[start:end])
00694 self.detection_result.table.convex_hull.vertices = []
00695 for i in range(0, length):
00696 val1 = geometry_msgs.msg.Point()
00697 _x = val1
00698 start = end
00699 end += 24
00700 (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
00701 self.detection_result.table.convex_hull.vertices.append(val1)
00702 start = end
00703 end += 4
00704 (length,) = _struct_I.unpack(str[start:end])
00705 self.detection_result.clusters = []
00706 for i in range(0, length):
00707 val1 = sensor_msgs.msg.PointCloud()
00708 _v25 = val1.header
00709 start = end
00710 end += 4
00711 (_v25.seq,) = _struct_I.unpack(str[start:end])
00712 _v26 = _v25.stamp
00713 _x = _v26
00714 start = end
00715 end += 8
00716 (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
00717 start = end
00718 end += 4
00719 (length,) = _struct_I.unpack(str[start:end])
00720 start = end
00721 end += length
00722 _v25.frame_id = str[start:end]
00723 start = end
00724 end += 4
00725 (length,) = _struct_I.unpack(str[start:end])
00726 val1.points = []
00727 for i in range(0, length):
00728 val2 = geometry_msgs.msg.Point32()
00729 _x = val2
00730 start = end
00731 end += 12
00732 (_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end])
00733 val1.points.append(val2)
00734 start = end
00735 end += 4
00736 (length,) = _struct_I.unpack(str[start:end])
00737 val1.channels = []
00738 for i in range(0, length):
00739 val2 = sensor_msgs.msg.ChannelFloat32()
00740 start = end
00741 end += 4
00742 (length,) = _struct_I.unpack(str[start:end])
00743 start = end
00744 end += length
00745 val2.name = str[start:end]
00746 start = end
00747 end += 4
00748 (length,) = _struct_I.unpack(str[start:end])
00749 pattern = '<%sf'%length
00750 start = end
00751 end += struct.calcsize(pattern)
00752 val2.values = numpy.frombuffer(str[start:end], dtype=numpy.float32, count=length)
00753 val1.channels.append(val2)
00754 self.detection_result.clusters.append(val1)
00755 start = end
00756 end += 4
00757 (length,) = _struct_I.unpack(str[start:end])
00758 self.detection_result.models = []
00759 for i in range(0, length):
00760 val1 = household_objects_database_msgs.msg.DatabaseModelPoseList()
00761 start = end
00762 end += 4
00763 (length,) = _struct_I.unpack(str[start:end])
00764 val1.model_list = []
00765 for i in range(0, length):
00766 val2 = household_objects_database_msgs.msg.DatabaseModelPose()
00767 start = end
00768 end += 4
00769 (val2.model_id,) = _struct_i.unpack(str[start:end])
00770 _v27 = val2.pose
00771 _v28 = _v27.header
00772 start = end
00773 end += 4
00774 (_v28.seq,) = _struct_I.unpack(str[start:end])
00775 _v29 = _v28.stamp
00776 _x = _v29
00777 start = end
00778 end += 8
00779 (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
00780 start = end
00781 end += 4
00782 (length,) = _struct_I.unpack(str[start:end])
00783 start = end
00784 end += length
00785 _v28.frame_id = str[start:end]
00786 _v30 = _v27.pose
00787 _v31 = _v30.position
00788 _x = _v31
00789 start = end
00790 end += 24
00791 (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
00792 _v32 = _v30.orientation
00793 _x = _v32
00794 start = end
00795 end += 32
00796 (_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])
00797 start = end
00798 end += 4
00799 (val2.confidence,) = _struct_f.unpack(str[start:end])
00800 start = end
00801 end += 4
00802 (length,) = _struct_I.unpack(str[start:end])
00803 start = end
00804 end += length
00805 val2.detector_name = str[start:end]
00806 val1.model_list.append(val2)
00807 self.detection_result.models.append(val1)
00808 start = end
00809 end += 4
00810 (length,) = _struct_I.unpack(str[start:end])
00811 pattern = '<%si'%length
00812 start = end
00813 end += struct.calcsize(pattern)
00814 self.detection_result.cluster_model_indices = numpy.frombuffer(str[start:end], dtype=numpy.int32, count=length)
00815 _x = self
00816 start = end
00817 end += 6
00818 (_x.detection_result.result, _x.reset_collision_models, _x.reset_attached_models,) = _struct_i2B.unpack(str[start:end])
00819 self.reset_collision_models = bool(self.reset_collision_models)
00820 self.reset_attached_models = bool(self.reset_attached_models)
00821 start = end
00822 end += 4
00823 (length,) = _struct_I.unpack(str[start:end])
00824 start = end
00825 end += length
00826 self.desired_frame = str[start:end]
00827 return self
00828 except struct.error as e:
00829 raise roslib.message.DeserializationError(e) #most likely buffer underfill
00830
00831 _struct_I = roslib.message.struct_I
00832 _struct_i2B = struct.Struct("<i2B")
00833 _struct_f = struct.Struct("<f")
00834 _struct_7d4fb = struct.Struct("<7d4fb")
00835 _struct_2I = struct.Struct("<2I")
00836 _struct_i = struct.Struct("<i")
00837 _struct_3I = struct.Struct("<3I")
00838 _struct_4d = struct.Struct("<4d")
00839 _struct_3f = struct.Struct("<3f")
00840 _struct_3d = struct.Struct("<3d")
00841 """autogenerated by genmsg_py from TabletopCollisionMapProcessingResponse.msg. Do not edit."""
00842 import roslib.message
00843 import struct
00844
00845 import geometry_msgs.msg
00846 import sensor_msgs.msg
00847 import std_msgs.msg
00848 import object_manipulation_msgs.msg
00849 import household_objects_database_msgs.msg
00850
00851 class TabletopCollisionMapProcessingResponse(roslib.message.Message):
00852 _md5sum = "684fb6e369cb671a9c4149f683c75e88"
00853 _type = "tabletop_collision_map_processing/TabletopCollisionMapProcessingResponse"
00854 _has_header = False #flag to mark the presence of a Header object
00855 _full_text = """
00856
00857 object_manipulation_msgs/GraspableObject[] graspable_objects
00858
00859
00860
00861 string[] collision_object_names
00862
00863
00864 string collision_support_surface_name
00865
00866
00867 ================================================================================
00868 MSG: object_manipulation_msgs/GraspableObject
00869 # an object that the object_manipulator can work on
00870
00871 # a graspable object can be represented in multiple ways. This message
00872 # can contain all of them. Which one is actually used is up to the receiver
00873 # of this message. When adding new representations, one must be careful that
00874 # they have reasonable lightweight defaults indicating that that particular
00875 # representation is not available.
00876
00877 # the tf frame to be used as a reference frame when combining information from
00878 # the different representations below
00879 string reference_frame_id
00880
00881 # potential recognition results from a database of models
00882 # all poses are relative to the object reference pose
00883 household_objects_database_msgs/DatabaseModelPose[] potential_models
00884
00885 # the point cloud itself
00886 sensor_msgs/PointCloud cluster
00887
00888 # a region of a PointCloud2 of interest
00889 object_manipulation_msgs/SceneRegion region
00890
00891 # the name that this object has in the collision environment
00892 string collision_name
00893 ================================================================================
00894 MSG: household_objects_database_msgs/DatabaseModelPose
00895 # Informs that a specific model from the Model Database has been
00896 # identified at a certain location
00897
00898 # the database id of the model
00899 int32 model_id
00900
00901 # the pose that it can be found in
00902 geometry_msgs/PoseStamped pose
00903
00904 # a measure of the confidence level in this detection result
00905 float32 confidence
00906
00907 # the name of the object detector that generated this detection result
00908 string detector_name
00909
00910 ================================================================================
00911 MSG: geometry_msgs/PoseStamped
00912 # A Pose with reference coordinate frame and timestamp
00913 Header header
00914 Pose pose
00915
00916 ================================================================================
00917 MSG: std_msgs/Header
00918 # Standard metadata for higher-level stamped data types.
00919 # This is generally used to communicate timestamped data
00920 # in a particular coordinate frame.
00921 #
00922 # sequence ID: consecutively increasing ID
00923 uint32 seq
00924 #Two-integer timestamp that is expressed as:
00925 # * stamp.secs: seconds (stamp_secs) since epoch
00926 # * stamp.nsecs: nanoseconds since stamp_secs
00927 # time-handling sugar is provided by the client library
00928 time stamp
00929 #Frame this data is associated with
00930 # 0: no frame
00931 # 1: global frame
00932 string frame_id
00933
00934 ================================================================================
00935 MSG: geometry_msgs/Pose
00936 # A representation of pose in free space, composed of postion and orientation.
00937 Point position
00938 Quaternion orientation
00939
00940 ================================================================================
00941 MSG: geometry_msgs/Point
00942 # This contains the position of a point in free space
00943 float64 x
00944 float64 y
00945 float64 z
00946
00947 ================================================================================
00948 MSG: geometry_msgs/Quaternion
00949 # This represents an orientation in free space in quaternion form.
00950
00951 float64 x
00952 float64 y
00953 float64 z
00954 float64 w
00955
00956 ================================================================================
00957 MSG: sensor_msgs/PointCloud
00958 # This message holds a collection of 3d points, plus optional additional
00959 # information about each point.
00960
00961 # Time of sensor data acquisition, coordinate frame ID.
00962 Header header
00963
00964 # Array of 3d points. Each Point32 should be interpreted as a 3d point
00965 # in the frame given in the header.
00966 geometry_msgs/Point32[] points
00967
00968 # Each channel should have the same number of elements as points array,
00969 # and the data in each channel should correspond 1:1 with each point.
00970 # Channel names in common practice are listed in ChannelFloat32.msg.
00971 ChannelFloat32[] channels
00972
00973 ================================================================================
00974 MSG: geometry_msgs/Point32
00975 # This contains the position of a point in free space(with 32 bits of precision).
00976 # It is recommeded to use Point wherever possible instead of Point32.
00977 #
00978 # This recommendation is to promote interoperability.
00979 #
00980 # This message is designed to take up less space when sending
00981 # lots of points at once, as in the case of a PointCloud.
00982
00983 float32 x
00984 float32 y
00985 float32 z
00986 ================================================================================
00987 MSG: sensor_msgs/ChannelFloat32
00988 # This message is used by the PointCloud message to hold optional data
00989 # associated with each point in the cloud. The length of the values
00990 # array should be the same as the length of the points array in the
00991 # PointCloud, and each value should be associated with the corresponding
00992 # point.
00993
00994 # Channel names in existing practice include:
00995 # "u", "v" - row and column (respectively) in the left stereo image.
00996 # This is opposite to usual conventions but remains for
00997 # historical reasons. The newer PointCloud2 message has no
00998 # such problem.
00999 # "rgb" - For point clouds produced by color stereo cameras. uint8
01000 # (R,G,B) values packed into the least significant 24 bits,
01001 # in order.
01002 # "intensity" - laser or pixel intensity.
01003 # "distance"
01004
01005 # The channel name should give semantics of the channel (e.g.
01006 # "intensity" instead of "value").
01007 string name
01008
01009 # The values array should be 1-1 with the elements of the associated
01010 # PointCloud.
01011 float32[] values
01012
01013 ================================================================================
01014 MSG: object_manipulation_msgs/SceneRegion
01015 # Point cloud
01016 sensor_msgs/PointCloud2 cloud
01017
01018 # Indices for the region of interest
01019 int32[] mask
01020
01021 # One of the corresponding 2D images, if applicable
01022 sensor_msgs/Image image
01023
01024 # The disparity image, if applicable
01025 sensor_msgs/Image disparity_image
01026
01027 # Camera info for the camera that took the image
01028 sensor_msgs/CameraInfo cam_info
01029
01030 # a 3D region of interest for grasp planning
01031 geometry_msgs/PoseStamped roi_box_pose
01032 geometry_msgs/Vector3 roi_box_dims
01033
01034 ================================================================================
01035 MSG: sensor_msgs/PointCloud2
01036 # This message holds a collection of N-dimensional points, which may
01037 # contain additional information such as normals, intensity, etc. The
01038 # point data is stored as a binary blob, its layout described by the
01039 # contents of the "fields" array.
01040
01041 # The point cloud data may be organized 2d (image-like) or 1d
01042 # (unordered). Point clouds organized as 2d images may be produced by
01043 # camera depth sensors such as stereo or time-of-flight.
01044
01045 # Time of sensor data acquisition, and the coordinate frame ID (for 3d
01046 # points).
01047 Header header
01048
01049 # 2D structure of the point cloud. If the cloud is unordered, height is
01050 # 1 and width is the length of the point cloud.
01051 uint32 height
01052 uint32 width
01053
01054 # Describes the channels and their layout in the binary data blob.
01055 PointField[] fields
01056
01057 bool is_bigendian # Is this data bigendian?
01058 uint32 point_step # Length of a point in bytes
01059 uint32 row_step # Length of a row in bytes
01060 uint8[] data # Actual point data, size is (row_step*height)
01061
01062 bool is_dense # True if there are no invalid points
01063
01064 ================================================================================
01065 MSG: sensor_msgs/PointField
01066 # This message holds the description of one point entry in the
01067 # PointCloud2 message format.
01068 uint8 INT8 = 1
01069 uint8 UINT8 = 2
01070 uint8 INT16 = 3
01071 uint8 UINT16 = 4
01072 uint8 INT32 = 5
01073 uint8 UINT32 = 6
01074 uint8 FLOAT32 = 7
01075 uint8 FLOAT64 = 8
01076
01077 string name # Name of field
01078 uint32 offset # Offset from start of point struct
01079 uint8 datatype # Datatype enumeration, see above
01080 uint32 count # How many elements in the field
01081
01082 ================================================================================
01083 MSG: sensor_msgs/Image
01084 # This message contains an uncompressed image
01085 # (0, 0) is at top-left corner of image
01086 #
01087
01088 Header header # Header timestamp should be acquisition time of image
01089 # Header frame_id should be optical frame of camera
01090 # origin of frame should be optical center of cameara
01091 # +x should point to the right in the image
01092 # +y should point down in the image
01093 # +z should point into to plane of the image
01094 # If the frame_id here and the frame_id of the CameraInfo
01095 # message associated with the image conflict
01096 # the behavior is undefined
01097
01098 uint32 height # image height, that is, number of rows
01099 uint32 width # image width, that is, number of columns
01100
01101 # The legal values for encoding are in file src/image_encodings.cpp
01102 # If you want to standardize a new string format, join
01103 # ros-users@lists.sourceforge.net and send an email proposing a new encoding.
01104
01105 string encoding # Encoding of pixels -- channel meaning, ordering, size
01106 # taken from the list of strings in src/image_encodings.cpp
01107
01108 uint8 is_bigendian # is this data bigendian?
01109 uint32 step # Full row length in bytes
01110 uint8[] data # actual matrix data, size is (step * rows)
01111
01112 ================================================================================
01113 MSG: sensor_msgs/CameraInfo
01114 # This message defines meta information for a camera. It should be in a
01115 # camera namespace on topic "camera_info" and accompanied by up to five
01116 # image topics named:
01117 #
01118 # image_raw - raw data from the camera driver, possibly Bayer encoded
01119 # image - monochrome, distorted
01120 # image_color - color, distorted
01121 # image_rect - monochrome, rectified
01122 # image_rect_color - color, rectified
01123 #
01124 # The image_pipeline contains packages (image_proc, stereo_image_proc)
01125 # for producing the four processed image topics from image_raw and
01126 # camera_info. The meaning of the camera parameters are described in
01127 # detail at http://www.ros.org/wiki/image_pipeline/CameraInfo.
01128 #
01129 # The image_geometry package provides a user-friendly interface to
01130 # common operations using this meta information. If you want to, e.g.,
01131 # project a 3d point into image coordinates, we strongly recommend
01132 # using image_geometry.
01133 #
01134 # If the camera is uncalibrated, the matrices D, K, R, P should be left
01135 # zeroed out. In particular, clients may assume that K[0] == 0.0
01136 # indicates an uncalibrated camera.
01137
01138 #######################################################################
01139 # Image acquisition info #
01140 #######################################################################
01141
01142 # Time of image acquisition, camera coordinate frame ID
01143 Header header # Header timestamp should be acquisition time of image
01144 # Header frame_id should be optical frame of camera
01145 # origin of frame should be optical center of camera
01146 # +x should point to the right in the image
01147 # +y should point down in the image
01148 # +z should point into the plane of the image
01149
01150
01151 #######################################################################
01152 # Calibration Parameters #
01153 #######################################################################
01154 # These are fixed during camera calibration. Their values will be the #
01155 # same in all messages until the camera is recalibrated. Note that #
01156 # self-calibrating systems may "recalibrate" frequently. #
01157 # #
01158 # The internal parameters can be used to warp a raw (distorted) image #
01159 # to: #
01160 # 1. An undistorted image (requires D and K) #
01161 # 2. A rectified image (requires D, K, R) #
01162 # The projection matrix P projects 3D points into the rectified image.#
01163 #######################################################################
01164
01165 # The image dimensions with which the camera was calibrated. Normally
01166 # this will be the full camera resolution in pixels.
01167 uint32 height
01168 uint32 width
01169
01170 # The distortion model used. Supported models are listed in
01171 # sensor_msgs/distortion_models.h. For most cameras, "plumb_bob" - a
01172 # simple model of radial and tangential distortion - is sufficent.
01173 string distortion_model
01174
01175 # The distortion parameters, size depending on the distortion model.
01176 # For "plumb_bob", the 5 parameters are: (k1, k2, t1, t2, k3).
01177 float64[] D
01178
01179 # Intrinsic camera matrix for the raw (distorted) images.
01180 # [fx 0 cx]
01181 # K = [ 0 fy cy]
01182 # [ 0 0 1]
01183 # Projects 3D points in the camera coordinate frame to 2D pixel
01184 # coordinates using the focal lengths (fx, fy) and principal point
01185 # (cx, cy).
01186 float64[9] K # 3x3 row-major matrix
01187
01188 # Rectification matrix (stereo cameras only)
01189 # A rotation matrix aligning the camera coordinate system to the ideal
01190 # stereo image plane so that epipolar lines in both stereo images are
01191 # parallel.
01192 float64[9] R # 3x3 row-major matrix
01193
01194 # Projection/camera matrix
01195 # [fx' 0 cx' Tx]
01196 # P = [ 0 fy' cy' Ty]
01197 # [ 0 0 1 0]
01198 # By convention, this matrix specifies the intrinsic (camera) matrix
01199 # of the processed (rectified) image. That is, the left 3x3 portion
01200 # is the normal camera intrinsic matrix for the rectified image.
01201 # It projects 3D points in the camera coordinate frame to 2D pixel
01202 # coordinates using the focal lengths (fx', fy') and principal point
01203 # (cx', cy') - these may differ from the values in K.
01204 # For monocular cameras, Tx = Ty = 0. Normally, monocular cameras will
01205 # also have R = the identity and P[1:3,1:3] = K.
01206 # For a stereo pair, the fourth column [Tx Ty 0]' is related to the
01207 # position of the optical center of the second camera in the first
01208 # camera's frame. We assume Tz = 0 so both cameras are in the same
01209 # stereo image plane. The first camera always has Tx = Ty = 0. For
01210 # the right (second) camera of a horizontal stereo pair, Ty = 0 and
01211 # Tx = -fx' * B, where B is the baseline between the cameras.
01212 # Given a 3D point [X Y Z]', the projection (x, y) of the point onto
01213 # the rectified image is given by:
01214 # [u v w]' = P * [X Y Z 1]'
01215 # x = u / w
01216 # y = v / w
01217 # This holds for both images of a stereo pair.
01218 float64[12] P # 3x4 row-major matrix
01219
01220
01221 #######################################################################
01222 # Operational Parameters #
01223 #######################################################################
01224 # These define the image region actually captured by the camera #
01225 # driver. Although they affect the geometry of the output image, they #
01226 # may be changed freely without recalibrating the camera. #
01227 #######################################################################
01228
01229 # Binning refers here to any camera setting which combines rectangular
01230 # neighborhoods of pixels into larger "super-pixels." It reduces the
01231 # resolution of the output image to
01232 # (width / binning_x) x (height / binning_y).
01233 # The default values binning_x = binning_y = 0 is considered the same
01234 # as binning_x = binning_y = 1 (no subsampling).
01235 uint32 binning_x
01236 uint32 binning_y
01237
01238 # Region of interest (subwindow of full camera resolution), given in
01239 # full resolution (unbinned) image coordinates. A particular ROI
01240 # always denotes the same window of pixels on the camera sensor,
01241 # regardless of binning settings.
01242 # The default setting of roi (all values 0) is considered the same as
01243 # full resolution (roi.width = width, roi.height = height).
01244 RegionOfInterest roi
01245
01246 ================================================================================
01247 MSG: sensor_msgs/RegionOfInterest
01248 # This message is used to specify a region of interest within an image.
01249 #
01250 # When used to specify the ROI setting of the camera when the image was
01251 # taken, the height and width fields should either match the height and
01252 # width fields for the associated image; or height = width = 0
01253 # indicates that the full resolution image was captured.
01254
01255 uint32 x_offset # Leftmost pixel of the ROI
01256 # (0 if the ROI includes the left edge of the image)
01257 uint32 y_offset # Topmost pixel of the ROI
01258 # (0 if the ROI includes the top edge of the image)
01259 uint32 height # Height of ROI
01260 uint32 width # Width of ROI
01261
01262 # True if a distinct rectified ROI should be calculated from the "raw"
01263 # ROI in this message. Typically this should be False if the full image
01264 # is captured (ROI not used), and True if a subwindow is captured (ROI
01265 # used).
01266 bool do_rectify
01267
01268 ================================================================================
01269 MSG: geometry_msgs/Vector3
01270 # This represents a vector in free space.
01271
01272 float64 x
01273 float64 y
01274 float64 z
01275 """
01276 __slots__ = ['graspable_objects','collision_object_names','collision_support_surface_name']
01277 _slot_types = ['object_manipulation_msgs/GraspableObject[]','string[]','string']
01278
01279 def __init__(self, *args, **kwds):
01280 """
01281 Constructor. Any message fields that are implicitly/explicitly
01282 set to None will be assigned a default value. The recommend
01283 use is keyword arguments as this is more robust to future message
01284 changes. You cannot mix in-order arguments and keyword arguments.
01285
01286 The available fields are:
01287 graspable_objects,collision_object_names,collision_support_surface_name
01288
01289 @param args: complete set of field values, in .msg order
01290 @param kwds: use keyword arguments corresponding to message field names
01291 to set specific fields.
01292 """
01293 if args or kwds:
01294 super(TabletopCollisionMapProcessingResponse, self).__init__(*args, **kwds)
01295 #message fields cannot be None, assign default values for those that are
01296 if self.graspable_objects is None:
01297 self.graspable_objects = []
01298 if self.collision_object_names is None:
01299 self.collision_object_names = []
01300 if self.collision_support_surface_name is None:
01301 self.collision_support_surface_name = ''
01302 else:
01303 self.graspable_objects = []
01304 self.collision_object_names = []
01305 self.collision_support_surface_name = ''
01306
01307 def _get_types(self):
01308 """
01309 internal API method
01310 """
01311 return self._slot_types
01312
01313 def serialize(self, buff):
01314 """
01315 serialize message into buffer
01316 @param buff: buffer
01317 @type buff: StringIO
01318 """
01319 try:
01320 length = len(self.graspable_objects)
01321 buff.write(_struct_I.pack(length))
01322 for val1 in self.graspable_objects:
01323 _x = val1.reference_frame_id
01324 length = len(_x)
01325 buff.write(struct.pack('<I%ss'%length, length, _x))
01326 length = len(val1.potential_models)
01327 buff.write(_struct_I.pack(length))
01328 for val2 in val1.potential_models:
01329 buff.write(_struct_i.pack(val2.model_id))
01330 _v33 = val2.pose
01331 _v34 = _v33.header
01332 buff.write(_struct_I.pack(_v34.seq))
01333 _v35 = _v34.stamp
01334 _x = _v35
01335 buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
01336 _x = _v34.frame_id
01337 length = len(_x)
01338 buff.write(struct.pack('<I%ss'%length, length, _x))
01339 _v36 = _v33.pose
01340 _v37 = _v36.position
01341 _x = _v37
01342 buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
01343 _v38 = _v36.orientation
01344 _x = _v38
01345 buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))
01346 buff.write(_struct_f.pack(val2.confidence))
01347 _x = val2.detector_name
01348 length = len(_x)
01349 buff.write(struct.pack('<I%ss'%length, length, _x))
01350 _v39 = val1.cluster
01351 _v40 = _v39.header
01352 buff.write(_struct_I.pack(_v40.seq))
01353 _v41 = _v40.stamp
01354 _x = _v41
01355 buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
01356 _x = _v40.frame_id
01357 length = len(_x)
01358 buff.write(struct.pack('<I%ss'%length, length, _x))
01359 length = len(_v39.points)
01360 buff.write(_struct_I.pack(length))
01361 for val3 in _v39.points:
01362 _x = val3
01363 buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))
01364 length = len(_v39.channels)
01365 buff.write(_struct_I.pack(length))
01366 for val3 in _v39.channels:
01367 _x = val3.name
01368 length = len(_x)
01369 buff.write(struct.pack('<I%ss'%length, length, _x))
01370 length = len(val3.values)
01371 buff.write(_struct_I.pack(length))
01372 pattern = '<%sf'%length
01373 buff.write(struct.pack(pattern, *val3.values))
01374 _v42 = val1.region
01375 _v43 = _v42.cloud
01376 _v44 = _v43.header
01377 buff.write(_struct_I.pack(_v44.seq))
01378 _v45 = _v44.stamp
01379 _x = _v45
01380 buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
01381 _x = _v44.frame_id
01382 length = len(_x)
01383 buff.write(struct.pack('<I%ss'%length, length, _x))
01384 _x = _v43
01385 buff.write(_struct_2I.pack(_x.height, _x.width))
01386 length = len(_v43.fields)
01387 buff.write(_struct_I.pack(length))
01388 for val4 in _v43.fields:
01389 _x = val4.name
01390 length = len(_x)
01391 buff.write(struct.pack('<I%ss'%length, length, _x))
01392 _x = val4
01393 buff.write(_struct_IBI.pack(_x.offset, _x.datatype, _x.count))
01394 _x = _v43
01395 buff.write(_struct_B2I.pack(_x.is_bigendian, _x.point_step, _x.row_step))
01396 _x = _v43.data
01397 length = len(_x)
01398 # - if encoded as a list instead, serialize as bytes instead of string
01399 if type(_x) in [list, tuple]:
01400 buff.write(struct.pack('<I%sB'%length, length, *_x))
01401 else:
01402 buff.write(struct.pack('<I%ss'%length, length, _x))
01403 buff.write(_struct_B.pack(_v43.is_dense))
01404 length = len(_v42.mask)
01405 buff.write(_struct_I.pack(length))
01406 pattern = '<%si'%length
01407 buff.write(struct.pack(pattern, *_v42.mask))
01408 _v46 = _v42.image
01409 _v47 = _v46.header
01410 buff.write(_struct_I.pack(_v47.seq))
01411 _v48 = _v47.stamp
01412 _x = _v48
01413 buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
01414 _x = _v47.frame_id
01415 length = len(_x)
01416 buff.write(struct.pack('<I%ss'%length, length, _x))
01417 _x = _v46
01418 buff.write(_struct_2I.pack(_x.height, _x.width))
01419 _x = _v46.encoding
01420 length = len(_x)
01421 buff.write(struct.pack('<I%ss'%length, length, _x))
01422 _x = _v46
01423 buff.write(_struct_BI.pack(_x.is_bigendian, _x.step))
01424 _x = _v46.data
01425 length = len(_x)
01426 # - if encoded as a list instead, serialize as bytes instead of string
01427 if type(_x) in [list, tuple]:
01428 buff.write(struct.pack('<I%sB'%length, length, *_x))
01429 else:
01430 buff.write(struct.pack('<I%ss'%length, length, _x))
01431 _v49 = _v42.disparity_image
01432 _v50 = _v49.header
01433 buff.write(_struct_I.pack(_v50.seq))
01434 _v51 = _v50.stamp
01435 _x = _v51
01436 buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
01437 _x = _v50.frame_id
01438 length = len(_x)
01439 buff.write(struct.pack('<I%ss'%length, length, _x))
01440 _x = _v49
01441 buff.write(_struct_2I.pack(_x.height, _x.width))
01442 _x = _v49.encoding
01443 length = len(_x)
01444 buff.write(struct.pack('<I%ss'%length, length, _x))
01445 _x = _v49
01446 buff.write(_struct_BI.pack(_x.is_bigendian, _x.step))
01447 _x = _v49.data
01448 length = len(_x)
01449 # - if encoded as a list instead, serialize as bytes instead of string
01450 if type(_x) in [list, tuple]:
01451 buff.write(struct.pack('<I%sB'%length, length, *_x))
01452 else:
01453 buff.write(struct.pack('<I%ss'%length, length, _x))
01454 _v52 = _v42.cam_info
01455 _v53 = _v52.header
01456 buff.write(_struct_I.pack(_v53.seq))
01457 _v54 = _v53.stamp
01458 _x = _v54
01459 buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
01460 _x = _v53.frame_id
01461 length = len(_x)
01462 buff.write(struct.pack('<I%ss'%length, length, _x))
01463 _x = _v52
01464 buff.write(_struct_2I.pack(_x.height, _x.width))
01465 _x = _v52.distortion_model
01466 length = len(_x)
01467 buff.write(struct.pack('<I%ss'%length, length, _x))
01468 length = len(_v52.D)
01469 buff.write(_struct_I.pack(length))
01470 pattern = '<%sd'%length
01471 buff.write(struct.pack(pattern, *_v52.D))
01472 buff.write(_struct_9d.pack(*_v52.K))
01473 buff.write(_struct_9d.pack(*_v52.R))
01474 buff.write(_struct_12d.pack(*_v52.P))
01475 _x = _v52
01476 buff.write(_struct_2I.pack(_x.binning_x, _x.binning_y))
01477 _v55 = _v52.roi
01478 _x = _v55
01479 buff.write(_struct_4IB.pack(_x.x_offset, _x.y_offset, _x.height, _x.width, _x.do_rectify))
01480 _v56 = _v42.roi_box_pose
01481 _v57 = _v56.header
01482 buff.write(_struct_I.pack(_v57.seq))
01483 _v58 = _v57.stamp
01484 _x = _v58
01485 buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
01486 _x = _v57.frame_id
01487 length = len(_x)
01488 buff.write(struct.pack('<I%ss'%length, length, _x))
01489 _v59 = _v56.pose
01490 _v60 = _v59.position
01491 _x = _v60
01492 buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
01493 _v61 = _v59.orientation
01494 _x = _v61
01495 buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))
01496 _v62 = _v42.roi_box_dims
01497 _x = _v62
01498 buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
01499 _x = val1.collision_name
01500 length = len(_x)
01501 buff.write(struct.pack('<I%ss'%length, length, _x))
01502 length = len(self.collision_object_names)
01503 buff.write(_struct_I.pack(length))
01504 for val1 in self.collision_object_names:
01505 length = len(val1)
01506 buff.write(struct.pack('<I%ss'%length, length, val1))
01507 _x = self.collision_support_surface_name
01508 length = len(_x)
01509 buff.write(struct.pack('<I%ss'%length, length, _x))
01510 except struct.error as se: self._check_types(se)
01511 except TypeError as te: self._check_types(te)
01512
01513 def deserialize(self, str):
01514 """
01515 unpack serialized message in str into this message instance
01516 @param str: byte array of serialized message
01517 @type str: str
01518 """
01519 try:
01520 end = 0
01521 start = end
01522 end += 4
01523 (length,) = _struct_I.unpack(str[start:end])
01524 self.graspable_objects = []
01525 for i in range(0, length):
01526 val1 = object_manipulation_msgs.msg.GraspableObject()
01527 start = end
01528 end += 4
01529 (length,) = _struct_I.unpack(str[start:end])
01530 start = end
01531 end += length
01532 val1.reference_frame_id = str[start:end]
01533 start = end
01534 end += 4
01535 (length,) = _struct_I.unpack(str[start:end])
01536 val1.potential_models = []
01537 for i in range(0, length):
01538 val2 = household_objects_database_msgs.msg.DatabaseModelPose()
01539 start = end
01540 end += 4
01541 (val2.model_id,) = _struct_i.unpack(str[start:end])
01542 _v63 = val2.pose
01543 _v64 = _v63.header
01544 start = end
01545 end += 4
01546 (_v64.seq,) = _struct_I.unpack(str[start:end])
01547 _v65 = _v64.stamp
01548 _x = _v65
01549 start = end
01550 end += 8
01551 (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
01552 start = end
01553 end += 4
01554 (length,) = _struct_I.unpack(str[start:end])
01555 start = end
01556 end += length
01557 _v64.frame_id = str[start:end]
01558 _v66 = _v63.pose
01559 _v67 = _v66.position
01560 _x = _v67
01561 start = end
01562 end += 24
01563 (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
01564 _v68 = _v66.orientation
01565 _x = _v68
01566 start = end
01567 end += 32
01568 (_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])
01569 start = end
01570 end += 4
01571 (val2.confidence,) = _struct_f.unpack(str[start:end])
01572 start = end
01573 end += 4
01574 (length,) = _struct_I.unpack(str[start:end])
01575 start = end
01576 end += length
01577 val2.detector_name = str[start:end]
01578 val1.potential_models.append(val2)
01579 _v69 = val1.cluster
01580 _v70 = _v69.header
01581 start = end
01582 end += 4
01583 (_v70.seq,) = _struct_I.unpack(str[start:end])
01584 _v71 = _v70.stamp
01585 _x = _v71
01586 start = end
01587 end += 8
01588 (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
01589 start = end
01590 end += 4
01591 (length,) = _struct_I.unpack(str[start:end])
01592 start = end
01593 end += length
01594 _v70.frame_id = str[start:end]
01595 start = end
01596 end += 4
01597 (length,) = _struct_I.unpack(str[start:end])
01598 _v69.points = []
01599 for i in range(0, length):
01600 val3 = geometry_msgs.msg.Point32()
01601 _x = val3
01602 start = end
01603 end += 12
01604 (_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end])
01605 _v69.points.append(val3)
01606 start = end
01607 end += 4
01608 (length,) = _struct_I.unpack(str[start:end])
01609 _v69.channels = []
01610 for i in range(0, length):
01611 val3 = sensor_msgs.msg.ChannelFloat32()
01612 start = end
01613 end += 4
01614 (length,) = _struct_I.unpack(str[start:end])
01615 start = end
01616 end += length
01617 val3.name = str[start:end]
01618 start = end
01619 end += 4
01620 (length,) = _struct_I.unpack(str[start:end])
01621 pattern = '<%sf'%length
01622 start = end
01623 end += struct.calcsize(pattern)
01624 val3.values = struct.unpack(pattern, str[start:end])
01625 _v69.channels.append(val3)
01626 _v72 = val1.region
01627 _v73 = _v72.cloud
01628 _v74 = _v73.header
01629 start = end
01630 end += 4
01631 (_v74.seq,) = _struct_I.unpack(str[start:end])
01632 _v75 = _v74.stamp
01633 _x = _v75
01634 start = end
01635 end += 8
01636 (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
01637 start = end
01638 end += 4
01639 (length,) = _struct_I.unpack(str[start:end])
01640 start = end
01641 end += length
01642 _v74.frame_id = str[start:end]
01643 _x = _v73
01644 start = end
01645 end += 8
01646 (_x.height, _x.width,) = _struct_2I.unpack(str[start:end])
01647 start = end
01648 end += 4
01649 (length,) = _struct_I.unpack(str[start:end])
01650 _v73.fields = []
01651 for i in range(0, length):
01652 val4 = sensor_msgs.msg.PointField()
01653 start = end
01654 end += 4
01655 (length,) = _struct_I.unpack(str[start:end])
01656 start = end
01657 end += length
01658 val4.name = str[start:end]
01659 _x = val4
01660 start = end
01661 end += 9
01662 (_x.offset, _x.datatype, _x.count,) = _struct_IBI.unpack(str[start:end])
01663 _v73.fields.append(val4)
01664 _x = _v73
01665 start = end
01666 end += 9
01667 (_x.is_bigendian, _x.point_step, _x.row_step,) = _struct_B2I.unpack(str[start:end])
01668 _v73.is_bigendian = bool(_v73.is_bigendian)
01669 start = end
01670 end += 4
01671 (length,) = _struct_I.unpack(str[start:end])
01672 start = end
01673 end += length
01674 _v73.data = str[start:end]
01675 start = end
01676 end += 1
01677 (_v73.is_dense,) = _struct_B.unpack(str[start:end])
01678 _v73.is_dense = bool(_v73.is_dense)
01679 start = end
01680 end += 4
01681 (length,) = _struct_I.unpack(str[start:end])
01682 pattern = '<%si'%length
01683 start = end
01684 end += struct.calcsize(pattern)
01685 _v72.mask = struct.unpack(pattern, str[start:end])
01686 _v76 = _v72.image
01687 _v77 = _v76.header
01688 start = end
01689 end += 4
01690 (_v77.seq,) = _struct_I.unpack(str[start:end])
01691 _v78 = _v77.stamp
01692 _x = _v78
01693 start = end
01694 end += 8
01695 (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
01696 start = end
01697 end += 4
01698 (length,) = _struct_I.unpack(str[start:end])
01699 start = end
01700 end += length
01701 _v77.frame_id = str[start:end]
01702 _x = _v76
01703 start = end
01704 end += 8
01705 (_x.height, _x.width,) = _struct_2I.unpack(str[start:end])
01706 start = end
01707 end += 4
01708 (length,) = _struct_I.unpack(str[start:end])
01709 start = end
01710 end += length
01711 _v76.encoding = str[start:end]
01712 _x = _v76
01713 start = end
01714 end += 5
01715 (_x.is_bigendian, _x.step,) = _struct_BI.unpack(str[start:end])
01716 start = end
01717 end += 4
01718 (length,) = _struct_I.unpack(str[start:end])
01719 start = end
01720 end += length
01721 _v76.data = str[start:end]
01722 _v79 = _v72.disparity_image
01723 _v80 = _v79.header
01724 start = end
01725 end += 4
01726 (_v80.seq,) = _struct_I.unpack(str[start:end])
01727 _v81 = _v80.stamp
01728 _x = _v81
01729 start = end
01730 end += 8
01731 (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
01732 start = end
01733 end += 4
01734 (length,) = _struct_I.unpack(str[start:end])
01735 start = end
01736 end += length
01737 _v80.frame_id = str[start:end]
01738 _x = _v79
01739 start = end
01740 end += 8
01741 (_x.height, _x.width,) = _struct_2I.unpack(str[start:end])
01742 start = end
01743 end += 4
01744 (length,) = _struct_I.unpack(str[start:end])
01745 start = end
01746 end += length
01747 _v79.encoding = str[start:end]
01748 _x = _v79
01749 start = end
01750 end += 5
01751 (_x.is_bigendian, _x.step,) = _struct_BI.unpack(str[start:end])
01752 start = end
01753 end += 4
01754 (length,) = _struct_I.unpack(str[start:end])
01755 start = end
01756 end += length
01757 _v79.data = str[start:end]
01758 _v82 = _v72.cam_info
01759 _v83 = _v82.header
01760 start = end
01761 end += 4
01762 (_v83.seq,) = _struct_I.unpack(str[start:end])
01763 _v84 = _v83.stamp
01764 _x = _v84
01765 start = end
01766 end += 8
01767 (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
01768 start = end
01769 end += 4
01770 (length,) = _struct_I.unpack(str[start:end])
01771 start = end
01772 end += length
01773 _v83.frame_id = str[start:end]
01774 _x = _v82
01775 start = end
01776 end += 8
01777 (_x.height, _x.width,) = _struct_2I.unpack(str[start:end])
01778 start = end
01779 end += 4
01780 (length,) = _struct_I.unpack(str[start:end])
01781 start = end
01782 end += length
01783 _v82.distortion_model = str[start:end]
01784 start = end
01785 end += 4
01786 (length,) = _struct_I.unpack(str[start:end])
01787 pattern = '<%sd'%length
01788 start = end
01789 end += struct.calcsize(pattern)
01790 _v82.D = struct.unpack(pattern, str[start:end])
01791 start = end
01792 end += 72
01793 _v82.K = _struct_9d.unpack(str[start:end])
01794 start = end
01795 end += 72
01796 _v82.R = _struct_9d.unpack(str[start:end])
01797 start = end
01798 end += 96
01799 _v82.P = _struct_12d.unpack(str[start:end])
01800 _x = _v82
01801 start = end
01802 end += 8
01803 (_x.binning_x, _x.binning_y,) = _struct_2I.unpack(str[start:end])
01804 _v85 = _v82.roi
01805 _x = _v85
01806 start = end
01807 end += 17
01808 (_x.x_offset, _x.y_offset, _x.height, _x.width, _x.do_rectify,) = _struct_4IB.unpack(str[start:end])
01809 _v85.do_rectify = bool(_v85.do_rectify)
01810 _v86 = _v72.roi_box_pose
01811 _v87 = _v86.header
01812 start = end
01813 end += 4
01814 (_v87.seq,) = _struct_I.unpack(str[start:end])
01815 _v88 = _v87.stamp
01816 _x = _v88
01817 start = end
01818 end += 8
01819 (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
01820 start = end
01821 end += 4
01822 (length,) = _struct_I.unpack(str[start:end])
01823 start = end
01824 end += length
01825 _v87.frame_id = str[start:end]
01826 _v89 = _v86.pose
01827 _v90 = _v89.position
01828 _x = _v90
01829 start = end
01830 end += 24
01831 (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
01832 _v91 = _v89.orientation
01833 _x = _v91
01834 start = end
01835 end += 32
01836 (_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])
01837 _v92 = _v72.roi_box_dims
01838 _x = _v92
01839 start = end
01840 end += 24
01841 (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
01842 start = end
01843 end += 4
01844 (length,) = _struct_I.unpack(str[start:end])
01845 start = end
01846 end += length
01847 val1.collision_name = str[start:end]
01848 self.graspable_objects.append(val1)
01849 start = end
01850 end += 4
01851 (length,) = _struct_I.unpack(str[start:end])
01852 self.collision_object_names = []
01853 for i in range(0, length):
01854 start = end
01855 end += 4
01856 (length,) = _struct_I.unpack(str[start:end])
01857 start = end
01858 end += length
01859 val1 = str[start:end]
01860 self.collision_object_names.append(val1)
01861 start = end
01862 end += 4
01863 (length,) = _struct_I.unpack(str[start:end])
01864 start = end
01865 end += length
01866 self.collision_support_surface_name = str[start:end]
01867 return self
01868 except struct.error as e:
01869 raise roslib.message.DeserializationError(e) #most likely buffer underfill
01870
01871
01872 def serialize_numpy(self, buff, numpy):
01873 """
01874 serialize message with numpy array types into buffer
01875 @param buff: buffer
01876 @type buff: StringIO
01877 @param numpy: numpy python module
01878 @type numpy module
01879 """
01880 try:
01881 length = len(self.graspable_objects)
01882 buff.write(_struct_I.pack(length))
01883 for val1 in self.graspable_objects:
01884 _x = val1.reference_frame_id
01885 length = len(_x)
01886 buff.write(struct.pack('<I%ss'%length, length, _x))
01887 length = len(val1.potential_models)
01888 buff.write(_struct_I.pack(length))
01889 for val2 in val1.potential_models:
01890 buff.write(_struct_i.pack(val2.model_id))
01891 _v93 = val2.pose
01892 _v94 = _v93.header
01893 buff.write(_struct_I.pack(_v94.seq))
01894 _v95 = _v94.stamp
01895 _x = _v95
01896 buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
01897 _x = _v94.frame_id
01898 length = len(_x)
01899 buff.write(struct.pack('<I%ss'%length, length, _x))
01900 _v96 = _v93.pose
01901 _v97 = _v96.position
01902 _x = _v97
01903 buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
01904 _v98 = _v96.orientation
01905 _x = _v98
01906 buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))
01907 buff.write(_struct_f.pack(val2.confidence))
01908 _x = val2.detector_name
01909 length = len(_x)
01910 buff.write(struct.pack('<I%ss'%length, length, _x))
01911 _v99 = val1.cluster
01912 _v100 = _v99.header
01913 buff.write(_struct_I.pack(_v100.seq))
01914 _v101 = _v100.stamp
01915 _x = _v101
01916 buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
01917 _x = _v100.frame_id
01918 length = len(_x)
01919 buff.write(struct.pack('<I%ss'%length, length, _x))
01920 length = len(_v99.points)
01921 buff.write(_struct_I.pack(length))
01922 for val3 in _v99.points:
01923 _x = val3
01924 buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))
01925 length = len(_v99.channels)
01926 buff.write(_struct_I.pack(length))
01927 for val3 in _v99.channels:
01928 _x = val3.name
01929 length = len(_x)
01930 buff.write(struct.pack('<I%ss'%length, length, _x))
01931 length = len(val3.values)
01932 buff.write(_struct_I.pack(length))
01933 pattern = '<%sf'%length
01934 buff.write(val3.values.tostring())
01935 _v102 = val1.region
01936 _v103 = _v102.cloud
01937 _v104 = _v103.header
01938 buff.write(_struct_I.pack(_v104.seq))
01939 _v105 = _v104.stamp
01940 _x = _v105
01941 buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
01942 _x = _v104.frame_id
01943 length = len(_x)
01944 buff.write(struct.pack('<I%ss'%length, length, _x))
01945 _x = _v103
01946 buff.write(_struct_2I.pack(_x.height, _x.width))
01947 length = len(_v103.fields)
01948 buff.write(_struct_I.pack(length))
01949 for val4 in _v103.fields:
01950 _x = val4.name
01951 length = len(_x)
01952 buff.write(struct.pack('<I%ss'%length, length, _x))
01953 _x = val4
01954 buff.write(_struct_IBI.pack(_x.offset, _x.datatype, _x.count))
01955 _x = _v103
01956 buff.write(_struct_B2I.pack(_x.is_bigendian, _x.point_step, _x.row_step))
01957 _x = _v103.data
01958 length = len(_x)
01959 # - if encoded as a list instead, serialize as bytes instead of string
01960 if type(_x) in [list, tuple]:
01961 buff.write(struct.pack('<I%sB'%length, length, *_x))
01962 else:
01963 buff.write(struct.pack('<I%ss'%length, length, _x))
01964 buff.write(_struct_B.pack(_v103.is_dense))
01965 length = len(_v102.mask)
01966 buff.write(_struct_I.pack(length))
01967 pattern = '<%si'%length
01968 buff.write(_v102.mask.tostring())
01969 _v106 = _v102.image
01970 _v107 = _v106.header
01971 buff.write(_struct_I.pack(_v107.seq))
01972 _v108 = _v107.stamp
01973 _x = _v108
01974 buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
01975 _x = _v107.frame_id
01976 length = len(_x)
01977 buff.write(struct.pack('<I%ss'%length, length, _x))
01978 _x = _v106
01979 buff.write(_struct_2I.pack(_x.height, _x.width))
01980 _x = _v106.encoding
01981 length = len(_x)
01982 buff.write(struct.pack('<I%ss'%length, length, _x))
01983 _x = _v106
01984 buff.write(_struct_BI.pack(_x.is_bigendian, _x.step))
01985 _x = _v106.data
01986 length = len(_x)
01987 # - if encoded as a list instead, serialize as bytes instead of string
01988 if type(_x) in [list, tuple]:
01989 buff.write(struct.pack('<I%sB'%length, length, *_x))
01990 else:
01991 buff.write(struct.pack('<I%ss'%length, length, _x))
01992 _v109 = _v102.disparity_image
01993 _v110 = _v109.header
01994 buff.write(_struct_I.pack(_v110.seq))
01995 _v111 = _v110.stamp
01996 _x = _v111
01997 buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
01998 _x = _v110.frame_id
01999 length = len(_x)
02000 buff.write(struct.pack('<I%ss'%length, length, _x))
02001 _x = _v109
02002 buff.write(_struct_2I.pack(_x.height, _x.width))
02003 _x = _v109.encoding
02004 length = len(_x)
02005 buff.write(struct.pack('<I%ss'%length, length, _x))
02006 _x = _v109
02007 buff.write(_struct_BI.pack(_x.is_bigendian, _x.step))
02008 _x = _v109.data
02009 length = len(_x)
02010 # - if encoded as a list instead, serialize as bytes instead of string
02011 if type(_x) in [list, tuple]:
02012 buff.write(struct.pack('<I%sB'%length, length, *_x))
02013 else:
02014 buff.write(struct.pack('<I%ss'%length, length, _x))
02015 _v112 = _v102.cam_info
02016 _v113 = _v112.header
02017 buff.write(_struct_I.pack(_v113.seq))
02018 _v114 = _v113.stamp
02019 _x = _v114
02020 buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
02021 _x = _v113.frame_id
02022 length = len(_x)
02023 buff.write(struct.pack('<I%ss'%length, length, _x))
02024 _x = _v112
02025 buff.write(_struct_2I.pack(_x.height, _x.width))
02026 _x = _v112.distortion_model
02027 length = len(_x)
02028 buff.write(struct.pack('<I%ss'%length, length, _x))
02029 length = len(_v112.D)
02030 buff.write(_struct_I.pack(length))
02031 pattern = '<%sd'%length
02032 buff.write(_v112.D.tostring())
02033 buff.write(_v112.K.tostring())
02034 buff.write(_v112.R.tostring())
02035 buff.write(_v112.P.tostring())
02036 _x = _v112
02037 buff.write(_struct_2I.pack(_x.binning_x, _x.binning_y))
02038 _v115 = _v112.roi
02039 _x = _v115
02040 buff.write(_struct_4IB.pack(_x.x_offset, _x.y_offset, _x.height, _x.width, _x.do_rectify))
02041 _v116 = _v102.roi_box_pose
02042 _v117 = _v116.header
02043 buff.write(_struct_I.pack(_v117.seq))
02044 _v118 = _v117.stamp
02045 _x = _v118
02046 buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
02047 _x = _v117.frame_id
02048 length = len(_x)
02049 buff.write(struct.pack('<I%ss'%length, length, _x))
02050 _v119 = _v116.pose
02051 _v120 = _v119.position
02052 _x = _v120
02053 buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
02054 _v121 = _v119.orientation
02055 _x = _v121
02056 buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))
02057 _v122 = _v102.roi_box_dims
02058 _x = _v122
02059 buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
02060 _x = val1.collision_name
02061 length = len(_x)
02062 buff.write(struct.pack('<I%ss'%length, length, _x))
02063 length = len(self.collision_object_names)
02064 buff.write(_struct_I.pack(length))
02065 for val1 in self.collision_object_names:
02066 length = len(val1)
02067 buff.write(struct.pack('<I%ss'%length, length, val1))
02068 _x = self.collision_support_surface_name
02069 length = len(_x)
02070 buff.write(struct.pack('<I%ss'%length, length, _x))
02071 except struct.error as se: self._check_types(se)
02072 except TypeError as te: self._check_types(te)
02073
02074 def deserialize_numpy(self, str, numpy):
02075 """
02076 unpack serialized message in str into this message instance using numpy for array types
02077 @param str: byte array of serialized message
02078 @type str: str
02079 @param numpy: numpy python module
02080 @type numpy: module
02081 """
02082 try:
02083 end = 0
02084 start = end
02085 end += 4
02086 (length,) = _struct_I.unpack(str[start:end])
02087 self.graspable_objects = []
02088 for i in range(0, length):
02089 val1 = object_manipulation_msgs.msg.GraspableObject()
02090 start = end
02091 end += 4
02092 (length,) = _struct_I.unpack(str[start:end])
02093 start = end
02094 end += length
02095 val1.reference_frame_id = str[start:end]
02096 start = end
02097 end += 4
02098 (length,) = _struct_I.unpack(str[start:end])
02099 val1.potential_models = []
02100 for i in range(0, length):
02101 val2 = household_objects_database_msgs.msg.DatabaseModelPose()
02102 start = end
02103 end += 4
02104 (val2.model_id,) = _struct_i.unpack(str[start:end])
02105 _v123 = val2.pose
02106 _v124 = _v123.header
02107 start = end
02108 end += 4
02109 (_v124.seq,) = _struct_I.unpack(str[start:end])
02110 _v125 = _v124.stamp
02111 _x = _v125
02112 start = end
02113 end += 8
02114 (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
02115 start = end
02116 end += 4
02117 (length,) = _struct_I.unpack(str[start:end])
02118 start = end
02119 end += length
02120 _v124.frame_id = str[start:end]
02121 _v126 = _v123.pose
02122 _v127 = _v126.position
02123 _x = _v127
02124 start = end
02125 end += 24
02126 (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
02127 _v128 = _v126.orientation
02128 _x = _v128
02129 start = end
02130 end += 32
02131 (_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])
02132 start = end
02133 end += 4
02134 (val2.confidence,) = _struct_f.unpack(str[start:end])
02135 start = end
02136 end += 4
02137 (length,) = _struct_I.unpack(str[start:end])
02138 start = end
02139 end += length
02140 val2.detector_name = str[start:end]
02141 val1.potential_models.append(val2)
02142 _v129 = val1.cluster
02143 _v130 = _v129.header
02144 start = end
02145 end += 4
02146 (_v130.seq,) = _struct_I.unpack(str[start:end])
02147 _v131 = _v130.stamp
02148 _x = _v131
02149 start = end
02150 end += 8
02151 (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
02152 start = end
02153 end += 4
02154 (length,) = _struct_I.unpack(str[start:end])
02155 start = end
02156 end += length
02157 _v130.frame_id = str[start:end]
02158 start = end
02159 end += 4
02160 (length,) = _struct_I.unpack(str[start:end])
02161 _v129.points = []
02162 for i in range(0, length):
02163 val3 = geometry_msgs.msg.Point32()
02164 _x = val3
02165 start = end
02166 end += 12
02167 (_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end])
02168 _v129.points.append(val3)
02169 start = end
02170 end += 4
02171 (length,) = _struct_I.unpack(str[start:end])
02172 _v129.channels = []
02173 for i in range(0, length):
02174 val3 = sensor_msgs.msg.ChannelFloat32()
02175 start = end
02176 end += 4
02177 (length,) = _struct_I.unpack(str[start:end])
02178 start = end
02179 end += length
02180 val3.name = str[start:end]
02181 start = end
02182 end += 4
02183 (length,) = _struct_I.unpack(str[start:end])
02184 pattern = '<%sf'%length
02185 start = end
02186 end += struct.calcsize(pattern)
02187 val3.values = numpy.frombuffer(str[start:end], dtype=numpy.float32, count=length)
02188 _v129.channels.append(val3)
02189 _v132 = val1.region
02190 _v133 = _v132.cloud
02191 _v134 = _v133.header
02192 start = end
02193 end += 4
02194 (_v134.seq,) = _struct_I.unpack(str[start:end])
02195 _v135 = _v134.stamp
02196 _x = _v135
02197 start = end
02198 end += 8
02199 (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
02200 start = end
02201 end += 4
02202 (length,) = _struct_I.unpack(str[start:end])
02203 start = end
02204 end += length
02205 _v134.frame_id = str[start:end]
02206 _x = _v133
02207 start = end
02208 end += 8
02209 (_x.height, _x.width,) = _struct_2I.unpack(str[start:end])
02210 start = end
02211 end += 4
02212 (length,) = _struct_I.unpack(str[start:end])
02213 _v133.fields = []
02214 for i in range(0, length):
02215 val4 = sensor_msgs.msg.PointField()
02216 start = end
02217 end += 4
02218 (length,) = _struct_I.unpack(str[start:end])
02219 start = end
02220 end += length
02221 val4.name = str[start:end]
02222 _x = val4
02223 start = end
02224 end += 9
02225 (_x.offset, _x.datatype, _x.count,) = _struct_IBI.unpack(str[start:end])
02226 _v133.fields.append(val4)
02227 _x = _v133
02228 start = end
02229 end += 9
02230 (_x.is_bigendian, _x.point_step, _x.row_step,) = _struct_B2I.unpack(str[start:end])
02231 _v133.is_bigendian = bool(_v133.is_bigendian)
02232 start = end
02233 end += 4
02234 (length,) = _struct_I.unpack(str[start:end])
02235 start = end
02236 end += length
02237 _v133.data = str[start:end]
02238 start = end
02239 end += 1
02240 (_v133.is_dense,) = _struct_B.unpack(str[start:end])
02241 _v133.is_dense = bool(_v133.is_dense)
02242 start = end
02243 end += 4
02244 (length,) = _struct_I.unpack(str[start:end])
02245 pattern = '<%si'%length
02246 start = end
02247 end += struct.calcsize(pattern)
02248 _v132.mask = numpy.frombuffer(str[start:end], dtype=numpy.int32, count=length)
02249 _v136 = _v132.image
02250 _v137 = _v136.header
02251 start = end
02252 end += 4
02253 (_v137.seq,) = _struct_I.unpack(str[start:end])
02254 _v138 = _v137.stamp
02255 _x = _v138
02256 start = end
02257 end += 8
02258 (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
02259 start = end
02260 end += 4
02261 (length,) = _struct_I.unpack(str[start:end])
02262 start = end
02263 end += length
02264 _v137.frame_id = str[start:end]
02265 _x = _v136
02266 start = end
02267 end += 8
02268 (_x.height, _x.width,) = _struct_2I.unpack(str[start:end])
02269 start = end
02270 end += 4
02271 (length,) = _struct_I.unpack(str[start:end])
02272 start = end
02273 end += length
02274 _v136.encoding = str[start:end]
02275 _x = _v136
02276 start = end
02277 end += 5
02278 (_x.is_bigendian, _x.step,) = _struct_BI.unpack(str[start:end])
02279 start = end
02280 end += 4
02281 (length,) = _struct_I.unpack(str[start:end])
02282 start = end
02283 end += length
02284 _v136.data = str[start:end]
02285 _v139 = _v132.disparity_image
02286 _v140 = _v139.header
02287 start = end
02288 end += 4
02289 (_v140.seq,) = _struct_I.unpack(str[start:end])
02290 _v141 = _v140.stamp
02291 _x = _v141
02292 start = end
02293 end += 8
02294 (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
02295 start = end
02296 end += 4
02297 (length,) = _struct_I.unpack(str[start:end])
02298 start = end
02299 end += length
02300 _v140.frame_id = str[start:end]
02301 _x = _v139
02302 start = end
02303 end += 8
02304 (_x.height, _x.width,) = _struct_2I.unpack(str[start:end])
02305 start = end
02306 end += 4
02307 (length,) = _struct_I.unpack(str[start:end])
02308 start = end
02309 end += length
02310 _v139.encoding = str[start:end]
02311 _x = _v139
02312 start = end
02313 end += 5
02314 (_x.is_bigendian, _x.step,) = _struct_BI.unpack(str[start:end])
02315 start = end
02316 end += 4
02317 (length,) = _struct_I.unpack(str[start:end])
02318 start = end
02319 end += length
02320 _v139.data = str[start:end]
02321 _v142 = _v132.cam_info
02322 _v143 = _v142.header
02323 start = end
02324 end += 4
02325 (_v143.seq,) = _struct_I.unpack(str[start:end])
02326 _v144 = _v143.stamp
02327 _x = _v144
02328 start = end
02329 end += 8
02330 (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
02331 start = end
02332 end += 4
02333 (length,) = _struct_I.unpack(str[start:end])
02334 start = end
02335 end += length
02336 _v143.frame_id = str[start:end]
02337 _x = _v142
02338 start = end
02339 end += 8
02340 (_x.height, _x.width,) = _struct_2I.unpack(str[start:end])
02341 start = end
02342 end += 4
02343 (length,) = _struct_I.unpack(str[start:end])
02344 start = end
02345 end += length
02346 _v142.distortion_model = str[start:end]
02347 start = end
02348 end += 4
02349 (length,) = _struct_I.unpack(str[start:end])
02350 pattern = '<%sd'%length
02351 start = end
02352 end += struct.calcsize(pattern)
02353 _v142.D = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)
02354 start = end
02355 end += 72
02356 _v142.K = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=9)
02357 start = end
02358 end += 72
02359 _v142.R = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=9)
02360 start = end
02361 end += 96
02362 _v142.P = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=12)
02363 _x = _v142
02364 start = end
02365 end += 8
02366 (_x.binning_x, _x.binning_y,) = _struct_2I.unpack(str[start:end])
02367 _v145 = _v142.roi
02368 _x = _v145
02369 start = end
02370 end += 17
02371 (_x.x_offset, _x.y_offset, _x.height, _x.width, _x.do_rectify,) = _struct_4IB.unpack(str[start:end])
02372 _v145.do_rectify = bool(_v145.do_rectify)
02373 _v146 = _v132.roi_box_pose
02374 _v147 = _v146.header
02375 start = end
02376 end += 4
02377 (_v147.seq,) = _struct_I.unpack(str[start:end])
02378 _v148 = _v147.stamp
02379 _x = _v148
02380 start = end
02381 end += 8
02382 (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
02383 start = end
02384 end += 4
02385 (length,) = _struct_I.unpack(str[start:end])
02386 start = end
02387 end += length
02388 _v147.frame_id = str[start:end]
02389 _v149 = _v146.pose
02390 _v150 = _v149.position
02391 _x = _v150
02392 start = end
02393 end += 24
02394 (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
02395 _v151 = _v149.orientation
02396 _x = _v151
02397 start = end
02398 end += 32
02399 (_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])
02400 _v152 = _v132.roi_box_dims
02401 _x = _v152
02402 start = end
02403 end += 24
02404 (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
02405 start = end
02406 end += 4
02407 (length,) = _struct_I.unpack(str[start:end])
02408 start = end
02409 end += length
02410 val1.collision_name = str[start:end]
02411 self.graspable_objects.append(val1)
02412 start = end
02413 end += 4
02414 (length,) = _struct_I.unpack(str[start:end])
02415 self.collision_object_names = []
02416 for i in range(0, length):
02417 start = end
02418 end += 4
02419 (length,) = _struct_I.unpack(str[start:end])
02420 start = end
02421 end += length
02422 val1 = str[start:end]
02423 self.collision_object_names.append(val1)
02424 start = end
02425 end += 4
02426 (length,) = _struct_I.unpack(str[start:end])
02427 start = end
02428 end += length
02429 self.collision_support_surface_name = str[start:end]
02430 return self
02431 except struct.error as e:
02432 raise roslib.message.DeserializationError(e) #most likely buffer underfill
02433
02434 _struct_I = roslib.message.struct_I
02435 _struct_IBI = struct.Struct("<IBI")
02436 _struct_B = struct.Struct("<B")
02437 _struct_12d = struct.Struct("<12d")
02438 _struct_f = struct.Struct("<f")
02439 _struct_i = struct.Struct("<i")
02440 _struct_BI = struct.Struct("<BI")
02441 _struct_3f = struct.Struct("<3f")
02442 _struct_9d = struct.Struct("<9d")
02443 _struct_B2I = struct.Struct("<B2I")
02444 _struct_4d = struct.Struct("<4d")
02445 _struct_2I = struct.Struct("<2I")
02446 _struct_4IB = struct.Struct("<4IB")
02447 _struct_3d = struct.Struct("<3d")
02448 class TabletopCollisionMapProcessing(roslib.message.ServiceDefinition):
02449 _type = 'tabletop_collision_map_processing/TabletopCollisionMapProcessing'
02450 _md5sum = '58e439dda25eed20079051e6af1b5eaa'
02451 _request_class = TabletopCollisionMapProcessingRequest
02452 _response_class = TabletopCollisionMapProcessingResponse