00001 """autogenerated by genmsg_py from TabletopCollisionMapProcessingRequest.msg. Do not edit."""
00002 import roslib.message
00003 import struct
00004
00005 import tabletop_object_detector.msg
00006 import sensor_msgs.msg
00007 import geometry_msgs.msg
00008 import household_objects_database_msgs.msg
00009 import std_msgs.msg
00010
00011 class TabletopCollisionMapProcessingRequest(roslib.message.Message):
00012 _md5sum = "2269a5d6f09bf4f6fd5743f2876f63a7"
00013 _type = "tabletop_collision_map_processing/TabletopCollisionMapProcessingRequest"
00014 _has_header = False
00015 _full_text = """
00016
00017
00018
00019
00020
00021
00022 tabletop_object_detector/TabletopDetectionResult detection_result
00023
00024
00025 bool reset_collision_models
00026
00027
00028 bool reset_attached_models
00029
00030
00031 bool reset_static_map
00032
00033
00034 bool take_static_collision_map
00035
00036
00037
00038 string desired_frame
00039
00040
00041 ================================================================================
00042 MSG: tabletop_object_detector/TabletopDetectionResult
00043 # Contains all the information from one run of the tabletop detection node
00044
00045 # The information for the plane that has been detected
00046 Table table
00047
00048 # The raw clusters detected in the scan
00049 sensor_msgs/PointCloud[] clusters
00050
00051 # The list of potential models that have been detected for each cluster
00052 # An empty list will be returned for a cluster that has no recognition results at all
00053 household_objects_database_msgs/DatabaseModelPoseList[] models
00054
00055 # For each cluster, the index of the list of models that was fit to that cluster
00056 # keep in mind that multiple raw clusters can correspond to a single fit
00057 int32[] cluster_model_indices
00058
00059 # Whether the detection has succeeded or failed
00060 int32 NO_CLOUD_RECEIVED = 1
00061 int32 NO_TABLE = 2
00062 int32 OTHER_ERROR = 3
00063 int32 SUCCESS = 4
00064 int32 result
00065
00066 ================================================================================
00067 MSG: tabletop_object_detector/Table
00068 # Informs that a planar table has been detected at a given location
00069
00070 # The pose gives you the transform that take you to the coordinate system
00071 # of the table, with the origin somewhere in the table plane and the
00072 # z axis normal to the plane
00073 geometry_msgs/PoseStamped pose
00074
00075 # These values give you the observed extents of the table, along x and y,
00076 # in the table's own coordinate system (above)
00077 # there is no guarantee that the origin of the table coordinate system is
00078 # inside the boundary defined by these values.
00079 float32 x_min
00080 float32 x_max
00081 float32 y_min
00082 float32 y_max
00083
00084 # There is no guarantee that the table doe NOT extend further than these
00085 # values; this is just as far as we've observed it.
00086
00087 ================================================================================
00088 MSG: geometry_msgs/PoseStamped
00089 # A Pose with reference coordinate frame and timestamp
00090 Header header
00091 Pose pose
00092
00093 ================================================================================
00094 MSG: std_msgs/Header
00095 # Standard metadata for higher-level stamped data types.
00096 # This is generally used to communicate timestamped data
00097 # in a particular coordinate frame.
00098 #
00099 # sequence ID: consecutively increasing ID
00100 uint32 seq
00101 #Two-integer timestamp that is expressed as:
00102 # * stamp.secs: seconds (stamp_secs) since epoch
00103 # * stamp.nsecs: nanoseconds since stamp_secs
00104 # time-handling sugar is provided by the client library
00105 time stamp
00106 #Frame this data is associated with
00107 # 0: no frame
00108 # 1: global frame
00109 string frame_id
00110
00111 ================================================================================
00112 MSG: geometry_msgs/Pose
00113 # A representation of pose in free space, composed of postion and orientation.
00114 Point position
00115 Quaternion orientation
00116
00117 ================================================================================
00118 MSG: geometry_msgs/Point
00119 # This contains the position of a point in free space
00120 float64 x
00121 float64 y
00122 float64 z
00123
00124 ================================================================================
00125 MSG: geometry_msgs/Quaternion
00126 # This represents an orientation in free space in quaternion form.
00127
00128 float64 x
00129 float64 y
00130 float64 z
00131 float64 w
00132
00133 ================================================================================
00134 MSG: sensor_msgs/PointCloud
00135 # This message holds a collection of 3d points, plus optional additional
00136 # information about each point.
00137
00138 # Time of sensor data acquisition, coordinate frame ID.
00139 Header header
00140
00141 # Array of 3d points. Each Point32 should be interpreted as a 3d point
00142 # in the frame given in the header.
00143 geometry_msgs/Point32[] points
00144
00145 # Each channel should have the same number of elements as points array,
00146 # and the data in each channel should correspond 1:1 with each point.
00147 # Channel names in common practice are listed in ChannelFloat32.msg.
00148 ChannelFloat32[] channels
00149
00150 ================================================================================
00151 MSG: geometry_msgs/Point32
00152 # This contains the position of a point in free space(with 32 bits of precision).
00153 # It is recommeded to use Point wherever possible instead of Point32.
00154 #
00155 # This recommendation is to promote interoperability.
00156 #
00157 # This message is designed to take up less space when sending
00158 # lots of points at once, as in the case of a PointCloud.
00159
00160 float32 x
00161 float32 y
00162 float32 z
00163 ================================================================================
00164 MSG: sensor_msgs/ChannelFloat32
00165 # This message is used by the PointCloud message to hold optional data
00166 # associated with each point in the cloud. The length of the values
00167 # array should be the same as the length of the points array in the
00168 # PointCloud, and each value should be associated with the corresponding
00169 # point.
00170
00171 # Channel names in existing practice include:
00172 # "u", "v" - row and column (respectively) in the left stereo image.
00173 # This is opposite to usual conventions but remains for
00174 # historical reasons. The newer PointCloud2 message has no
00175 # such problem.
00176 # "rgb" - For point clouds produced by color stereo cameras. uint8
00177 # (R,G,B) values packed into the least significant 24 bits,
00178 # in order.
00179 # "intensity" - laser or pixel intensity.
00180 # "distance"
00181
00182 # The channel name should give semantics of the channel (e.g.
00183 # "intensity" instead of "value").
00184 string name
00185
00186 # The values array should be 1-1 with the elements of the associated
00187 # PointCloud.
00188 float32[] values
00189
00190 ================================================================================
00191 MSG: household_objects_database_msgs/DatabaseModelPoseList
00192 # stores a list of possible database models recognition results
00193 DatabaseModelPose[] model_list
00194 ================================================================================
00195 MSG: household_objects_database_msgs/DatabaseModelPose
00196 # Informs that a specific model from the Model Database has been
00197 # identified at a certain location
00198
00199 # the database id of the model
00200 int32 model_id
00201
00202 # the pose that it can be found in
00203 geometry_msgs/PoseStamped pose
00204
00205 # a measure of the confidence level in this detection result
00206 float32 confidence
00207 """
00208 __slots__ = ['detection_result','reset_collision_models','reset_attached_models','reset_static_map','take_static_collision_map','desired_frame']
00209 _slot_types = ['tabletop_object_detector/TabletopDetectionResult','bool','bool','bool','bool','string']
00210
00211 def __init__(self, *args, **kwds):
00212 """
00213 Constructor. Any message fields that are implicitly/explicitly
00214 set to None will be assigned a default value. The recommend
00215 use is keyword arguments as this is more robust to future message
00216 changes. You cannot mix in-order arguments and keyword arguments.
00217
00218 The available fields are:
00219 detection_result,reset_collision_models,reset_attached_models,reset_static_map,take_static_collision_map,desired_frame
00220
00221 @param args: complete set of field values, in .msg order
00222 @param kwds: use keyword arguments corresponding to message field names
00223 to set specific fields.
00224 """
00225 if args or kwds:
00226 super(TabletopCollisionMapProcessingRequest, self).__init__(*args, **kwds)
00227 #message fields cannot be None, assign default values for those that are
00228 if self.detection_result is None:
00229 self.detection_result = tabletop_object_detector.msg.TabletopDetectionResult()
00230 if self.reset_collision_models is None:
00231 self.reset_collision_models = False
00232 if self.reset_attached_models is None:
00233 self.reset_attached_models = False
00234 if self.reset_static_map is None:
00235 self.reset_static_map = False
00236 if self.take_static_collision_map is None:
00237 self.take_static_collision_map = False
00238 if self.desired_frame is None:
00239 self.desired_frame = ''
00240 else:
00241 self.detection_result = tabletop_object_detector.msg.TabletopDetectionResult()
00242 self.reset_collision_models = False
00243 self.reset_attached_models = False
00244 self.reset_static_map = False
00245 self.take_static_collision_map = False
00246 self.desired_frame = ''
00247
00248 def _get_types(self):
00249 """
00250 internal API method
00251 """
00252 return self._slot_types
00253
00254 def serialize(self, buff):
00255 """
00256 serialize message into buffer
00257 @param buff: buffer
00258 @type buff: StringIO
00259 """
00260 try:
00261 _x = self
00262 buff.write(_struct_3I.pack(_x.detection_result.table.pose.header.seq, _x.detection_result.table.pose.header.stamp.secs, _x.detection_result.table.pose.header.stamp.nsecs))
00263 _x = self.detection_result.table.pose.header.frame_id
00264 length = len(_x)
00265 buff.write(struct.pack('<I%ss'%length, length, _x))
00266 _x = self
00267 buff.write(_struct_7d4f.pack(_x.detection_result.table.pose.pose.position.x, _x.detection_result.table.pose.pose.position.y, _x.detection_result.table.pose.pose.position.z, _x.detection_result.table.pose.pose.orientation.x, _x.detection_result.table.pose.pose.orientation.y, _x.detection_result.table.pose.pose.orientation.z, _x.detection_result.table.pose.pose.orientation.w, _x.detection_result.table.x_min, _x.detection_result.table.x_max, _x.detection_result.table.y_min, _x.detection_result.table.y_max))
00268 length = len(self.detection_result.clusters)
00269 buff.write(_struct_I.pack(length))
00270 for val1 in self.detection_result.clusters:
00271 _v1 = val1.header
00272 buff.write(_struct_I.pack(_v1.seq))
00273 _v2 = _v1.stamp
00274 _x = _v2
00275 buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
00276 _x = _v1.frame_id
00277 length = len(_x)
00278 buff.write(struct.pack('<I%ss'%length, length, _x))
00279 length = len(val1.points)
00280 buff.write(_struct_I.pack(length))
00281 for val2 in val1.points:
00282 _x = val2
00283 buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))
00284 length = len(val1.channels)
00285 buff.write(_struct_I.pack(length))
00286 for val2 in val1.channels:
00287 _x = val2.name
00288 length = len(_x)
00289 buff.write(struct.pack('<I%ss'%length, length, _x))
00290 length = len(val2.values)
00291 buff.write(_struct_I.pack(length))
00292 pattern = '<%sf'%length
00293 buff.write(struct.pack(pattern, *val2.values))
00294 length = len(self.detection_result.models)
00295 buff.write(_struct_I.pack(length))
00296 for val1 in self.detection_result.models:
00297 length = len(val1.model_list)
00298 buff.write(_struct_I.pack(length))
00299 for val2 in val1.model_list:
00300 buff.write(_struct_i.pack(val2.model_id))
00301 _v3 = val2.pose
00302 _v4 = _v3.header
00303 buff.write(_struct_I.pack(_v4.seq))
00304 _v5 = _v4.stamp
00305 _x = _v5
00306 buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
00307 _x = _v4.frame_id
00308 length = len(_x)
00309 buff.write(struct.pack('<I%ss'%length, length, _x))
00310 _v6 = _v3.pose
00311 _v7 = _v6.position
00312 _x = _v7
00313 buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
00314 _v8 = _v6.orientation
00315 _x = _v8
00316 buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))
00317 buff.write(_struct_f.pack(val2.confidence))
00318 length = len(self.detection_result.cluster_model_indices)
00319 buff.write(_struct_I.pack(length))
00320 pattern = '<%si'%length
00321 buff.write(struct.pack(pattern, *self.detection_result.cluster_model_indices))
00322 _x = self
00323 buff.write(_struct_i4B.pack(_x.detection_result.result, _x.reset_collision_models, _x.reset_attached_models, _x.reset_static_map, _x.take_static_collision_map))
00324 _x = self.desired_frame
00325 length = len(_x)
00326 buff.write(struct.pack('<I%ss'%length, length, _x))
00327 except struct.error, se: self._check_types(se)
00328 except TypeError, te: self._check_types(te)
00329
00330 def deserialize(self, str):
00331 """
00332 unpack serialized message in str into this message instance
00333 @param str: byte array of serialized message
00334 @type str: str
00335 """
00336 try:
00337 if self.detection_result is None:
00338 self.detection_result = tabletop_object_detector.msg.TabletopDetectionResult()
00339 end = 0
00340 _x = self
00341 start = end
00342 end += 12
00343 (_x.detection_result.table.pose.header.seq, _x.detection_result.table.pose.header.stamp.secs, _x.detection_result.table.pose.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end])
00344 start = end
00345 end += 4
00346 (length,) = _struct_I.unpack(str[start:end])
00347 start = end
00348 end += length
00349 self.detection_result.table.pose.header.frame_id = str[start:end]
00350 _x = self
00351 start = end
00352 end += 72
00353 (_x.detection_result.table.pose.pose.position.x, _x.detection_result.table.pose.pose.position.y, _x.detection_result.table.pose.pose.position.z, _x.detection_result.table.pose.pose.orientation.x, _x.detection_result.table.pose.pose.orientation.y, _x.detection_result.table.pose.pose.orientation.z, _x.detection_result.table.pose.pose.orientation.w, _x.detection_result.table.x_min, _x.detection_result.table.x_max, _x.detection_result.table.y_min, _x.detection_result.table.y_max,) = _struct_7d4f.unpack(str[start:end])
00354 start = end
00355 end += 4
00356 (length,) = _struct_I.unpack(str[start:end])
00357 self.detection_result.clusters = []
00358 for i in xrange(0, length):
00359 val1 = sensor_msgs.msg.PointCloud()
00360 _v9 = val1.header
00361 start = end
00362 end += 4
00363 (_v9.seq,) = _struct_I.unpack(str[start:end])
00364 _v10 = _v9.stamp
00365 _x = _v10
00366 start = end
00367 end += 8
00368 (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
00369 start = end
00370 end += 4
00371 (length,) = _struct_I.unpack(str[start:end])
00372 start = end
00373 end += length
00374 _v9.frame_id = str[start:end]
00375 start = end
00376 end += 4
00377 (length,) = _struct_I.unpack(str[start:end])
00378 val1.points = []
00379 for i in xrange(0, length):
00380 val2 = geometry_msgs.msg.Point32()
00381 _x = val2
00382 start = end
00383 end += 12
00384 (_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end])
00385 val1.points.append(val2)
00386 start = end
00387 end += 4
00388 (length,) = _struct_I.unpack(str[start:end])
00389 val1.channels = []
00390 for i in xrange(0, length):
00391 val2 = sensor_msgs.msg.ChannelFloat32()
00392 start = end
00393 end += 4
00394 (length,) = _struct_I.unpack(str[start:end])
00395 start = end
00396 end += length
00397 val2.name = str[start:end]
00398 start = end
00399 end += 4
00400 (length,) = _struct_I.unpack(str[start:end])
00401 pattern = '<%sf'%length
00402 start = end
00403 end += struct.calcsize(pattern)
00404 val2.values = struct.unpack(pattern, str[start:end])
00405 val1.channels.append(val2)
00406 self.detection_result.clusters.append(val1)
00407 start = end
00408 end += 4
00409 (length,) = _struct_I.unpack(str[start:end])
00410 self.detection_result.models = []
00411 for i in xrange(0, length):
00412 val1 = household_objects_database_msgs.msg.DatabaseModelPoseList()
00413 start = end
00414 end += 4
00415 (length,) = _struct_I.unpack(str[start:end])
00416 val1.model_list = []
00417 for i in xrange(0, length):
00418 val2 = household_objects_database_msgs.msg.DatabaseModelPose()
00419 start = end
00420 end += 4
00421 (val2.model_id,) = _struct_i.unpack(str[start:end])
00422 _v11 = val2.pose
00423 _v12 = _v11.header
00424 start = end
00425 end += 4
00426 (_v12.seq,) = _struct_I.unpack(str[start:end])
00427 _v13 = _v12.stamp
00428 _x = _v13
00429 start = end
00430 end += 8
00431 (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
00432 start = end
00433 end += 4
00434 (length,) = _struct_I.unpack(str[start:end])
00435 start = end
00436 end += length
00437 _v12.frame_id = str[start:end]
00438 _v14 = _v11.pose
00439 _v15 = _v14.position
00440 _x = _v15
00441 start = end
00442 end += 24
00443 (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
00444 _v16 = _v14.orientation
00445 _x = _v16
00446 start = end
00447 end += 32
00448 (_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])
00449 start = end
00450 end += 4
00451 (val2.confidence,) = _struct_f.unpack(str[start:end])
00452 val1.model_list.append(val2)
00453 self.detection_result.models.append(val1)
00454 start = end
00455 end += 4
00456 (length,) = _struct_I.unpack(str[start:end])
00457 pattern = '<%si'%length
00458 start = end
00459 end += struct.calcsize(pattern)
00460 self.detection_result.cluster_model_indices = struct.unpack(pattern, str[start:end])
00461 _x = self
00462 start = end
00463 end += 8
00464 (_x.detection_result.result, _x.reset_collision_models, _x.reset_attached_models, _x.reset_static_map, _x.take_static_collision_map,) = _struct_i4B.unpack(str[start:end])
00465 self.reset_collision_models = bool(self.reset_collision_models)
00466 self.reset_attached_models = bool(self.reset_attached_models)
00467 self.reset_static_map = bool(self.reset_static_map)
00468 self.take_static_collision_map = bool(self.take_static_collision_map)
00469 start = end
00470 end += 4
00471 (length,) = _struct_I.unpack(str[start:end])
00472 start = end
00473 end += length
00474 self.desired_frame = str[start:end]
00475 return self
00476 except struct.error, e:
00477 raise roslib.message.DeserializationError(e) #most likely buffer underfill
00478
00479
00480 def serialize_numpy(self, buff, numpy):
00481 """
00482 serialize message with numpy array types into buffer
00483 @param buff: buffer
00484 @type buff: StringIO
00485 @param numpy: numpy python module
00486 @type numpy module
00487 """
00488 try:
00489 _x = self
00490 buff.write(_struct_3I.pack(_x.detection_result.table.pose.header.seq, _x.detection_result.table.pose.header.stamp.secs, _x.detection_result.table.pose.header.stamp.nsecs))
00491 _x = self.detection_result.table.pose.header.frame_id
00492 length = len(_x)
00493 buff.write(struct.pack('<I%ss'%length, length, _x))
00494 _x = self
00495 buff.write(_struct_7d4f.pack(_x.detection_result.table.pose.pose.position.x, _x.detection_result.table.pose.pose.position.y, _x.detection_result.table.pose.pose.position.z, _x.detection_result.table.pose.pose.orientation.x, _x.detection_result.table.pose.pose.orientation.y, _x.detection_result.table.pose.pose.orientation.z, _x.detection_result.table.pose.pose.orientation.w, _x.detection_result.table.x_min, _x.detection_result.table.x_max, _x.detection_result.table.y_min, _x.detection_result.table.y_max))
00496 length = len(self.detection_result.clusters)
00497 buff.write(_struct_I.pack(length))
00498 for val1 in self.detection_result.clusters:
00499 _v17 = val1.header
00500 buff.write(_struct_I.pack(_v17.seq))
00501 _v18 = _v17.stamp
00502 _x = _v18
00503 buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
00504 _x = _v17.frame_id
00505 length = len(_x)
00506 buff.write(struct.pack('<I%ss'%length, length, _x))
00507 length = len(val1.points)
00508 buff.write(_struct_I.pack(length))
00509 for val2 in val1.points:
00510 _x = val2
00511 buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))
00512 length = len(val1.channels)
00513 buff.write(_struct_I.pack(length))
00514 for val2 in val1.channels:
00515 _x = val2.name
00516 length = len(_x)
00517 buff.write(struct.pack('<I%ss'%length, length, _x))
00518 length = len(val2.values)
00519 buff.write(_struct_I.pack(length))
00520 pattern = '<%sf'%length
00521 buff.write(val2.values.tostring())
00522 length = len(self.detection_result.models)
00523 buff.write(_struct_I.pack(length))
00524 for val1 in self.detection_result.models:
00525 length = len(val1.model_list)
00526 buff.write(_struct_I.pack(length))
00527 for val2 in val1.model_list:
00528 buff.write(_struct_i.pack(val2.model_id))
00529 _v19 = val2.pose
00530 _v20 = _v19.header
00531 buff.write(_struct_I.pack(_v20.seq))
00532 _v21 = _v20.stamp
00533 _x = _v21
00534 buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
00535 _x = _v20.frame_id
00536 length = len(_x)
00537 buff.write(struct.pack('<I%ss'%length, length, _x))
00538 _v22 = _v19.pose
00539 _v23 = _v22.position
00540 _x = _v23
00541 buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
00542 _v24 = _v22.orientation
00543 _x = _v24
00544 buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))
00545 buff.write(_struct_f.pack(val2.confidence))
00546 length = len(self.detection_result.cluster_model_indices)
00547 buff.write(_struct_I.pack(length))
00548 pattern = '<%si'%length
00549 buff.write(self.detection_result.cluster_model_indices.tostring())
00550 _x = self
00551 buff.write(_struct_i4B.pack(_x.detection_result.result, _x.reset_collision_models, _x.reset_attached_models, _x.reset_static_map, _x.take_static_collision_map))
00552 _x = self.desired_frame
00553 length = len(_x)
00554 buff.write(struct.pack('<I%ss'%length, length, _x))
00555 except struct.error, se: self._check_types(se)
00556 except TypeError, te: self._check_types(te)
00557
00558 def deserialize_numpy(self, str, numpy):
00559 """
00560 unpack serialized message in str into this message instance using numpy for array types
00561 @param str: byte array of serialized message
00562 @type str: str
00563 @param numpy: numpy python module
00564 @type numpy: module
00565 """
00566 try:
00567 if self.detection_result is None:
00568 self.detection_result = tabletop_object_detector.msg.TabletopDetectionResult()
00569 end = 0
00570 _x = self
00571 start = end
00572 end += 12
00573 (_x.detection_result.table.pose.header.seq, _x.detection_result.table.pose.header.stamp.secs, _x.detection_result.table.pose.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end])
00574 start = end
00575 end += 4
00576 (length,) = _struct_I.unpack(str[start:end])
00577 start = end
00578 end += length
00579 self.detection_result.table.pose.header.frame_id = str[start:end]
00580 _x = self
00581 start = end
00582 end += 72
00583 (_x.detection_result.table.pose.pose.position.x, _x.detection_result.table.pose.pose.position.y, _x.detection_result.table.pose.pose.position.z, _x.detection_result.table.pose.pose.orientation.x, _x.detection_result.table.pose.pose.orientation.y, _x.detection_result.table.pose.pose.orientation.z, _x.detection_result.table.pose.pose.orientation.w, _x.detection_result.table.x_min, _x.detection_result.table.x_max, _x.detection_result.table.y_min, _x.detection_result.table.y_max,) = _struct_7d4f.unpack(str[start:end])
00584 start = end
00585 end += 4
00586 (length,) = _struct_I.unpack(str[start:end])
00587 self.detection_result.clusters = []
00588 for i in xrange(0, length):
00589 val1 = sensor_msgs.msg.PointCloud()
00590 _v25 = val1.header
00591 start = end
00592 end += 4
00593 (_v25.seq,) = _struct_I.unpack(str[start:end])
00594 _v26 = _v25.stamp
00595 _x = _v26
00596 start = end
00597 end += 8
00598 (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
00599 start = end
00600 end += 4
00601 (length,) = _struct_I.unpack(str[start:end])
00602 start = end
00603 end += length
00604 _v25.frame_id = str[start:end]
00605 start = end
00606 end += 4
00607 (length,) = _struct_I.unpack(str[start:end])
00608 val1.points = []
00609 for i in xrange(0, length):
00610 val2 = geometry_msgs.msg.Point32()
00611 _x = val2
00612 start = end
00613 end += 12
00614 (_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end])
00615 val1.points.append(val2)
00616 start = end
00617 end += 4
00618 (length,) = _struct_I.unpack(str[start:end])
00619 val1.channels = []
00620 for i in xrange(0, length):
00621 val2 = sensor_msgs.msg.ChannelFloat32()
00622 start = end
00623 end += 4
00624 (length,) = _struct_I.unpack(str[start:end])
00625 start = end
00626 end += length
00627 val2.name = str[start:end]
00628 start = end
00629 end += 4
00630 (length,) = _struct_I.unpack(str[start:end])
00631 pattern = '<%sf'%length
00632 start = end
00633 end += struct.calcsize(pattern)
00634 val2.values = numpy.frombuffer(str[start:end], dtype=numpy.float32, count=length)
00635 val1.channels.append(val2)
00636 self.detection_result.clusters.append(val1)
00637 start = end
00638 end += 4
00639 (length,) = _struct_I.unpack(str[start:end])
00640 self.detection_result.models = []
00641 for i in xrange(0, length):
00642 val1 = household_objects_database_msgs.msg.DatabaseModelPoseList()
00643 start = end
00644 end += 4
00645 (length,) = _struct_I.unpack(str[start:end])
00646 val1.model_list = []
00647 for i in xrange(0, length):
00648 val2 = household_objects_database_msgs.msg.DatabaseModelPose()
00649 start = end
00650 end += 4
00651 (val2.model_id,) = _struct_i.unpack(str[start:end])
00652 _v27 = val2.pose
00653 _v28 = _v27.header
00654 start = end
00655 end += 4
00656 (_v28.seq,) = _struct_I.unpack(str[start:end])
00657 _v29 = _v28.stamp
00658 _x = _v29
00659 start = end
00660 end += 8
00661 (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
00662 start = end
00663 end += 4
00664 (length,) = _struct_I.unpack(str[start:end])
00665 start = end
00666 end += length
00667 _v28.frame_id = str[start:end]
00668 _v30 = _v27.pose
00669 _v31 = _v30.position
00670 _x = _v31
00671 start = end
00672 end += 24
00673 (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
00674 _v32 = _v30.orientation
00675 _x = _v32
00676 start = end
00677 end += 32
00678 (_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])
00679 start = end
00680 end += 4
00681 (val2.confidence,) = _struct_f.unpack(str[start:end])
00682 val1.model_list.append(val2)
00683 self.detection_result.models.append(val1)
00684 start = end
00685 end += 4
00686 (length,) = _struct_I.unpack(str[start:end])
00687 pattern = '<%si'%length
00688 start = end
00689 end += struct.calcsize(pattern)
00690 self.detection_result.cluster_model_indices = numpy.frombuffer(str[start:end], dtype=numpy.int32, count=length)
00691 _x = self
00692 start = end
00693 end += 8
00694 (_x.detection_result.result, _x.reset_collision_models, _x.reset_attached_models, _x.reset_static_map, _x.take_static_collision_map,) = _struct_i4B.unpack(str[start:end])
00695 self.reset_collision_models = bool(self.reset_collision_models)
00696 self.reset_attached_models = bool(self.reset_attached_models)
00697 self.reset_static_map = bool(self.reset_static_map)
00698 self.take_static_collision_map = bool(self.take_static_collision_map)
00699 start = end
00700 end += 4
00701 (length,) = _struct_I.unpack(str[start:end])
00702 start = end
00703 end += length
00704 self.desired_frame = str[start:end]
00705 return self
00706 except struct.error, e:
00707 raise roslib.message.DeserializationError(e) #most likely buffer underfill
00708
00709 _struct_I = roslib.message.struct_I
00710 _struct_7d4f = struct.Struct("<7d4f")
00711 _struct_f = struct.Struct("<f")
00712 _struct_i4B = struct.Struct("<i4B")
00713 _struct_2I = struct.Struct("<2I")
00714 _struct_i = struct.Struct("<i")
00715 _struct_3I = struct.Struct("<3I")
00716 _struct_4d = struct.Struct("<4d")
00717 _struct_3f = struct.Struct("<3f")
00718 _struct_3d = struct.Struct("<3d")
00719 """autogenerated by genmsg_py from TabletopCollisionMapProcessingResponse.msg. Do not edit."""
00720 import roslib.message
00721 import struct
00722
00723 import geometry_msgs.msg
00724 import sensor_msgs.msg
00725 import std_msgs.msg
00726 import object_manipulation_msgs.msg
00727 import household_objects_database_msgs.msg
00728
00729 class TabletopCollisionMapProcessingResponse(roslib.message.Message):
00730 _md5sum = "33936dac29953c0907375257fd65ad73"
00731 _type = "tabletop_collision_map_processing/TabletopCollisionMapProcessingResponse"
00732 _has_header = False #flag to mark the presence of a Header object
00733 _full_text = """
00734
00735 object_manipulation_msgs/GraspableObject[] graspable_objects
00736
00737
00738
00739 string[] collision_object_names
00740
00741
00742 string collision_support_surface_name
00743
00744
00745 ================================================================================
00746 MSG: object_manipulation_msgs/GraspableObject
00747 # an object that the object_manipulator can work on
00748
00749 # a graspable object can be represented in multiple ways. This message
00750 # can contain all of them. Which one is actually used is up to the receiver
00751 # of this message. When adding new representations, one must be careful that
00752 # they have reasonable lightweight defaults indicating that that particular
00753 # representation is not available.
00754
00755 # the tf frame to be used as a reference frame when combining information from
00756 # the different representations below
00757 string reference_frame_id
00758
00759 # potential recognition results from a database of models
00760 # all poses are relative to the object reference pose
00761 household_objects_database_msgs/DatabaseModelPose[] potential_models
00762
00763 # the point cloud itself
00764 sensor_msgs/PointCloud cluster
00765
00766 # a region of a PointCloud2 of interest
00767 object_manipulation_msgs/SceneRegion region
00768
00769
00770 ================================================================================
00771 MSG: household_objects_database_msgs/DatabaseModelPose
00772 # Informs that a specific model from the Model Database has been
00773 # identified at a certain location
00774
00775 # the database id of the model
00776 int32 model_id
00777
00778 # the pose that it can be found in
00779 geometry_msgs/PoseStamped pose
00780
00781 # a measure of the confidence level in this detection result
00782 float32 confidence
00783 ================================================================================
00784 MSG: geometry_msgs/PoseStamped
00785 # A Pose with reference coordinate frame and timestamp
00786 Header header
00787 Pose pose
00788
00789 ================================================================================
00790 MSG: std_msgs/Header
00791 # Standard metadata for higher-level stamped data types.
00792 # This is generally used to communicate timestamped data
00793 # in a particular coordinate frame.
00794 #
00795 # sequence ID: consecutively increasing ID
00796 uint32 seq
00797 #Two-integer timestamp that is expressed as:
00798 # * stamp.secs: seconds (stamp_secs) since epoch
00799 # * stamp.nsecs: nanoseconds since stamp_secs
00800 # time-handling sugar is provided by the client library
00801 time stamp
00802 #Frame this data is associated with
00803 # 0: no frame
00804 # 1: global frame
00805 string frame_id
00806
00807 ================================================================================
00808 MSG: geometry_msgs/Pose
00809 # A representation of pose in free space, composed of postion and orientation.
00810 Point position
00811 Quaternion orientation
00812
00813 ================================================================================
00814 MSG: geometry_msgs/Point
00815 # This contains the position of a point in free space
00816 float64 x
00817 float64 y
00818 float64 z
00819
00820 ================================================================================
00821 MSG: geometry_msgs/Quaternion
00822 # This represents an orientation in free space in quaternion form.
00823
00824 float64 x
00825 float64 y
00826 float64 z
00827 float64 w
00828
00829 ================================================================================
00830 MSG: sensor_msgs/PointCloud
00831 # This message holds a collection of 3d points, plus optional additional
00832 # information about each point.
00833
00834 # Time of sensor data acquisition, coordinate frame ID.
00835 Header header
00836
00837 # Array of 3d points. Each Point32 should be interpreted as a 3d point
00838 # in the frame given in the header.
00839 geometry_msgs/Point32[] points
00840
00841 # Each channel should have the same number of elements as points array,
00842 # and the data in each channel should correspond 1:1 with each point.
00843 # Channel names in common practice are listed in ChannelFloat32.msg.
00844 ChannelFloat32[] channels
00845
00846 ================================================================================
00847 MSG: geometry_msgs/Point32
00848 # This contains the position of a point in free space(with 32 bits of precision).
00849 # It is recommeded to use Point wherever possible instead of Point32.
00850 #
00851 # This recommendation is to promote interoperability.
00852 #
00853 # This message is designed to take up less space when sending
00854 # lots of points at once, as in the case of a PointCloud.
00855
00856 float32 x
00857 float32 y
00858 float32 z
00859 ================================================================================
00860 MSG: sensor_msgs/ChannelFloat32
00861 # This message is used by the PointCloud message to hold optional data
00862 # associated with each point in the cloud. The length of the values
00863 # array should be the same as the length of the points array in the
00864 # PointCloud, and each value should be associated with the corresponding
00865 # point.
00866
00867 # Channel names in existing practice include:
00868 # "u", "v" - row and column (respectively) in the left stereo image.
00869 # This is opposite to usual conventions but remains for
00870 # historical reasons. The newer PointCloud2 message has no
00871 # such problem.
00872 # "rgb" - For point clouds produced by color stereo cameras. uint8
00873 # (R,G,B) values packed into the least significant 24 bits,
00874 # in order.
00875 # "intensity" - laser or pixel intensity.
00876 # "distance"
00877
00878 # The channel name should give semantics of the channel (e.g.
00879 # "intensity" instead of "value").
00880 string name
00881
00882 # The values array should be 1-1 with the elements of the associated
00883 # PointCloud.
00884 float32[] values
00885
00886 ================================================================================
00887 MSG: object_manipulation_msgs/SceneRegion
00888 # Point cloud
00889 sensor_msgs/PointCloud2 cloud
00890
00891 # Indices for the region of interest
00892 int32[] mask
00893
00894 # One of the corresponding 2D images, if applicable
00895 sensor_msgs/Image image
00896
00897 # The disparity image, if applicable
00898 sensor_msgs/Image disparity_image
00899
00900 # Camera info for the camera that took the image
00901 sensor_msgs/CameraInfo cam_info
00902
00903 ================================================================================
00904 MSG: sensor_msgs/PointCloud2
00905 # This message holds a collection of N-dimensional points, which may
00906 # contain additional information such as normals, intensity, etc. The
00907 # point data is stored as a binary blob, its layout described by the
00908 # contents of the "fields" array.
00909
00910 # The point cloud data may be organized 2d (image-like) or 1d
00911 # (unordered). Point clouds organized as 2d images may be produced by
00912 # camera depth sensors such as stereo or time-of-flight.
00913
00914 # Time of sensor data acquisition, and the coordinate frame ID (for 3d
00915 # points).
00916 Header header
00917
00918 # 2D structure of the point cloud. If the cloud is unordered, height is
00919 # 1 and width is the length of the point cloud.
00920 uint32 height
00921 uint32 width
00922
00923 # Describes the channels and their layout in the binary data blob.
00924 PointField[] fields
00925
00926 bool is_bigendian # Is this data bigendian?
00927 uint32 point_step # Length of a point in bytes
00928 uint32 row_step # Length of a row in bytes
00929 uint8[] data # Actual point data, size is (row_step*height)
00930
00931 bool is_dense # True if there are no invalid points
00932
00933 ================================================================================
00934 MSG: sensor_msgs/PointField
00935 # This message holds the description of one point entry in the
00936 # PointCloud2 message format.
00937 uint8 INT8 = 1
00938 uint8 UINT8 = 2
00939 uint8 INT16 = 3
00940 uint8 UINT16 = 4
00941 uint8 INT32 = 5
00942 uint8 UINT32 = 6
00943 uint8 FLOAT32 = 7
00944 uint8 FLOAT64 = 8
00945
00946 string name # Name of field
00947 uint32 offset # Offset from start of point struct
00948 uint8 datatype # Datatype enumeration, see above
00949 uint32 count # How many elements in the field
00950
00951 ================================================================================
00952 MSG: sensor_msgs/Image
00953 # This message contains an uncompressed image
00954 # (0, 0) is at top-left corner of image
00955 #
00956
00957 Header header # Header timestamp should be acquisition time of image
00958 # Header frame_id should be optical frame of camera
00959 # origin of frame should be optical center of cameara
00960 # +x should point to the right in the image
00961 # +y should point down in the image
00962 # +z should point into to plane of the image
00963 # If the frame_id here and the frame_id of the CameraInfo
00964 # message associated with the image conflict
00965 # the behavior is undefined
00966
00967 uint32 height # image height, that is, number of rows
00968 uint32 width # image width, that is, number of columns
00969
00970 # The legal values for encoding are in file src/image_encodings.cpp
00971 # If you want to standardize a new string format, join
00972 # ros-users@lists.sourceforge.net and send an email proposing a new encoding.
00973
00974 string encoding # Encoding of pixels -- channel meaning, ordering, size
00975 # taken from the list of strings in src/image_encodings.cpp
00976
00977 uint8 is_bigendian # is this data bigendian?
00978 uint32 step # Full row length in bytes
00979 uint8[] data # actual matrix data, size is (step * rows)
00980
00981 ================================================================================
00982 MSG: sensor_msgs/CameraInfo
00983 # This message defines meta information for a camera. It should be in a
00984 # camera namespace on topic "camera_info" and accompanied by up to five
00985 # image topics named:
00986 #
00987 # image_raw - raw data from the camera driver, possibly Bayer encoded
00988 # image - monochrome, distorted
00989 # image_color - color, distorted
00990 # image_rect - monochrome, rectified
00991 # image_rect_color - color, rectified
00992 #
00993 # The image_pipeline contains packages (image_proc, stereo_image_proc)
00994 # for producing the four processed image topics from image_raw and
00995 # camera_info. The meaning of the camera parameters are described in
00996 # detail at http://www.ros.org/wiki/image_pipeline/CameraInfo.
00997 #
00998 # The image_geometry package provides a user-friendly interface to
00999 # common operations using this meta information. If you want to, e.g.,
01000 # project a 3d point into image coordinates, we strongly recommend
01001 # using image_geometry.
01002 #
01003 # If the camera is uncalibrated, the matrices D, K, R, P should be left
01004 # zeroed out. In particular, clients may assume that K[0] == 0.0
01005 # indicates an uncalibrated camera.
01006
01007 #######################################################################
01008 # Image acquisition info #
01009 #######################################################################
01010
01011 # Time of image acquisition, camera coordinate frame ID
01012 Header header # Header timestamp should be acquisition time of image
01013 # Header frame_id should be optical frame of camera
01014 # origin of frame should be optical center of camera
01015 # +x should point to the right in the image
01016 # +y should point down in the image
01017 # +z should point into the plane of the image
01018
01019
01020 #######################################################################
01021 # Calibration Parameters #
01022 #######################################################################
01023 # These are fixed during camera calibration. Their values will be the #
01024 # same in all messages until the camera is recalibrated. Note that #
01025 # self-calibrating systems may "recalibrate" frequently. #
01026 # #
01027 # The internal parameters can be used to warp a raw (distorted) image #
01028 # to: #
01029 # 1. An undistorted image (requires D and K) #
01030 # 2. A rectified image (requires D, K, R) #
01031 # The projection matrix P projects 3D points into the rectified image.#
01032 #######################################################################
01033
01034 # The image dimensions with which the camera was calibrated. Normally
01035 # this will be the full camera resolution in pixels.
01036 uint32 height
01037 uint32 width
01038
01039 # The distortion model used. Supported models are listed in
01040 # sensor_msgs/distortion_models.h. For most cameras, "plumb_bob" - a
01041 # simple model of radial and tangential distortion - is sufficent.
01042 string distortion_model
01043
01044 # The distortion parameters, size depending on the distortion model.
01045 # For "plumb_bob", the 5 parameters are: (k1, k2, t1, t2, k3).
01046 float64[] D
01047
01048 # Intrinsic camera matrix for the raw (distorted) images.
01049 # [fx 0 cx]
01050 # K = [ 0 fy cy]
01051 # [ 0 0 1]
01052 # Projects 3D points in the camera coordinate frame to 2D pixel
01053 # coordinates using the focal lengths (fx, fy) and principal point
01054 # (cx, cy).
01055 float64[9] K # 3x3 row-major matrix
01056
01057 # Rectification matrix (stereo cameras only)
01058 # A rotation matrix aligning the camera coordinate system to the ideal
01059 # stereo image plane so that epipolar lines in both stereo images are
01060 # parallel.
01061 float64[9] R # 3x3 row-major matrix
01062
01063 # Projection/camera matrix
01064 # [fx' 0 cx' Tx]
01065 # P = [ 0 fy' cy' Ty]
01066 # [ 0 0 1 0]
01067 # By convention, this matrix specifies the intrinsic (camera) matrix
01068 # of the processed (rectified) image. That is, the left 3x3 portion
01069 # is the normal camera intrinsic matrix for the rectified image.
01070 # It projects 3D points in the camera coordinate frame to 2D pixel
01071 # coordinates using the focal lengths (fx', fy') and principal point
01072 # (cx', cy') - these may differ from the values in K.
01073 # For monocular cameras, Tx = Ty = 0. Normally, monocular cameras will
01074 # also have R = the identity and P[1:3,1:3] = K.
01075 # For a stereo pair, the fourth column [Tx Ty 0]' is related to the
01076 # position of the optical center of the second camera in the first
01077 # camera's frame. We assume Tz = 0 so both cameras are in the same
01078 # stereo image plane. The first camera always has Tx = Ty = 0. For
01079 # the right (second) camera of a horizontal stereo pair, Ty = 0 and
01080 # Tx = -fx' * B, where B is the baseline between the cameras.
01081 # Given a 3D point [X Y Z]', the projection (x, y) of the point onto
01082 # the rectified image is given by:
01083 # [u v w]' = P * [X Y Z 1]'
01084 # x = u / w
01085 # y = v / w
01086 # This holds for both images of a stereo pair.
01087 float64[12] P # 3x4 row-major matrix
01088
01089
01090 #######################################################################
01091 # Operational Parameters #
01092 #######################################################################
01093 # These define the image region actually captured by the camera #
01094 # driver. Although they affect the geometry of the output image, they #
01095 # may be changed freely without recalibrating the camera. #
01096 #######################################################################
01097
01098 # Binning refers here to any camera setting which combines rectangular
01099 # neighborhoods of pixels into larger "super-pixels." It reduces the
01100 # resolution of the output image to
01101 # (width / binning_x) x (height / binning_y).
01102 # The default values binning_x = binning_y = 0 is considered the same
01103 # as binning_x = binning_y = 1 (no subsampling).
01104 uint32 binning_x
01105 uint32 binning_y
01106
01107 # Region of interest (subwindow of full camera resolution), given in
01108 # full resolution (unbinned) image coordinates. A particular ROI
01109 # always denotes the same window of pixels on the camera sensor,
01110 # regardless of binning settings.
01111 # The default setting of roi (all values 0) is considered the same as
01112 # full resolution (roi.width = width, roi.height = height).
01113 RegionOfInterest roi
01114
01115 ================================================================================
01116 MSG: sensor_msgs/RegionOfInterest
01117 # This message is used to specify a region of interest within an image.
01118 #
01119 # When used to specify the ROI setting of the camera when the image was
01120 # taken, the height and width fields should either match the height and
01121 # width fields for the associated image; or height = width = 0
01122 # indicates that the full resolution image was captured.
01123
01124 uint32 x_offset # Leftmost pixel of the ROI
01125 # (0 if the ROI includes the left edge of the image)
01126 uint32 y_offset # Topmost pixel of the ROI
01127 # (0 if the ROI includes the top edge of the image)
01128 uint32 height # Height of ROI
01129 uint32 width # Width of ROI
01130
01131 # True if a distinct rectified ROI should be calculated from the "raw"
01132 # ROI in this message. Typically this should be False if the full image
01133 # is captured (ROI not used), and True if a subwindow is captured (ROI
01134 # used).
01135 bool do_rectify
01136
01137 """
01138 __slots__ = ['graspable_objects','collision_object_names','collision_support_surface_name']
01139 _slot_types = ['object_manipulation_msgs/GraspableObject[]','string[]','string']
01140
01141 def __init__(self, *args, **kwds):
01142 """
01143 Constructor. Any message fields that are implicitly/explicitly
01144 set to None will be assigned a default value. The recommend
01145 use is keyword arguments as this is more robust to future message
01146 changes. You cannot mix in-order arguments and keyword arguments.
01147
01148 The available fields are:
01149 graspable_objects,collision_object_names,collision_support_surface_name
01150
01151 @param args: complete set of field values, in .msg order
01152 @param kwds: use keyword arguments corresponding to message field names
01153 to set specific fields.
01154 """
01155 if args or kwds:
01156 super(TabletopCollisionMapProcessingResponse, self).__init__(*args, **kwds)
01157 #message fields cannot be None, assign default values for those that are
01158 if self.graspable_objects is None:
01159 self.graspable_objects = []
01160 if self.collision_object_names is None:
01161 self.collision_object_names = []
01162 if self.collision_support_surface_name is None:
01163 self.collision_support_surface_name = ''
01164 else:
01165 self.graspable_objects = []
01166 self.collision_object_names = []
01167 self.collision_support_surface_name = ''
01168
01169 def _get_types(self):
01170 """
01171 internal API method
01172 """
01173 return self._slot_types
01174
01175 def serialize(self, buff):
01176 """
01177 serialize message into buffer
01178 @param buff: buffer
01179 @type buff: StringIO
01180 """
01181 try:
01182 length = len(self.graspable_objects)
01183 buff.write(_struct_I.pack(length))
01184 for val1 in self.graspable_objects:
01185 _x = val1.reference_frame_id
01186 length = len(_x)
01187 buff.write(struct.pack('<I%ss'%length, length, _x))
01188 length = len(val1.potential_models)
01189 buff.write(_struct_I.pack(length))
01190 for val2 in val1.potential_models:
01191 buff.write(_struct_i.pack(val2.model_id))
01192 _v33 = val2.pose
01193 _v34 = _v33.header
01194 buff.write(_struct_I.pack(_v34.seq))
01195 _v35 = _v34.stamp
01196 _x = _v35
01197 buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
01198 _x = _v34.frame_id
01199 length = len(_x)
01200 buff.write(struct.pack('<I%ss'%length, length, _x))
01201 _v36 = _v33.pose
01202 _v37 = _v36.position
01203 _x = _v37
01204 buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
01205 _v38 = _v36.orientation
01206 _x = _v38
01207 buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))
01208 buff.write(_struct_f.pack(val2.confidence))
01209 _v39 = val1.cluster
01210 _v40 = _v39.header
01211 buff.write(_struct_I.pack(_v40.seq))
01212 _v41 = _v40.stamp
01213 _x = _v41
01214 buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
01215 _x = _v40.frame_id
01216 length = len(_x)
01217 buff.write(struct.pack('<I%ss'%length, length, _x))
01218 length = len(_v39.points)
01219 buff.write(_struct_I.pack(length))
01220 for val3 in _v39.points:
01221 _x = val3
01222 buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))
01223 length = len(_v39.channels)
01224 buff.write(_struct_I.pack(length))
01225 for val3 in _v39.channels:
01226 _x = val3.name
01227 length = len(_x)
01228 buff.write(struct.pack('<I%ss'%length, length, _x))
01229 length = len(val3.values)
01230 buff.write(_struct_I.pack(length))
01231 pattern = '<%sf'%length
01232 buff.write(struct.pack(pattern, *val3.values))
01233 _v42 = val1.region
01234 _v43 = _v42.cloud
01235 _v44 = _v43.header
01236 buff.write(_struct_I.pack(_v44.seq))
01237 _v45 = _v44.stamp
01238 _x = _v45
01239 buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
01240 _x = _v44.frame_id
01241 length = len(_x)
01242 buff.write(struct.pack('<I%ss'%length, length, _x))
01243 _x = _v43
01244 buff.write(_struct_2I.pack(_x.height, _x.width))
01245 length = len(_v43.fields)
01246 buff.write(_struct_I.pack(length))
01247 for val4 in _v43.fields:
01248 _x = val4.name
01249 length = len(_x)
01250 buff.write(struct.pack('<I%ss'%length, length, _x))
01251 _x = val4
01252 buff.write(_struct_IBI.pack(_x.offset, _x.datatype, _x.count))
01253 _x = _v43
01254 buff.write(_struct_B2I.pack(_x.is_bigendian, _x.point_step, _x.row_step))
01255 _x = _v43.data
01256 length = len(_x)
01257 # - if encoded as a list instead, serialize as bytes instead of string
01258 if type(_x) in [list, tuple]:
01259 buff.write(struct.pack('<I%sB'%length, length, *_x))
01260 else:
01261 buff.write(struct.pack('<I%ss'%length, length, _x))
01262 buff.write(_struct_B.pack(_v43.is_dense))
01263 length = len(_v42.mask)
01264 buff.write(_struct_I.pack(length))
01265 pattern = '<%si'%length
01266 buff.write(struct.pack(pattern, *_v42.mask))
01267 _v46 = _v42.image
01268 _v47 = _v46.header
01269 buff.write(_struct_I.pack(_v47.seq))
01270 _v48 = _v47.stamp
01271 _x = _v48
01272 buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
01273 _x = _v47.frame_id
01274 length = len(_x)
01275 buff.write(struct.pack('<I%ss'%length, length, _x))
01276 _x = _v46
01277 buff.write(_struct_2I.pack(_x.height, _x.width))
01278 _x = _v46.encoding
01279 length = len(_x)
01280 buff.write(struct.pack('<I%ss'%length, length, _x))
01281 _x = _v46
01282 buff.write(_struct_BI.pack(_x.is_bigendian, _x.step))
01283 _x = _v46.data
01284 length = len(_x)
01285 # - if encoded as a list instead, serialize as bytes instead of string
01286 if type(_x) in [list, tuple]:
01287 buff.write(struct.pack('<I%sB'%length, length, *_x))
01288 else:
01289 buff.write(struct.pack('<I%ss'%length, length, _x))
01290 _v49 = _v42.disparity_image
01291 _v50 = _v49.header
01292 buff.write(_struct_I.pack(_v50.seq))
01293 _v51 = _v50.stamp
01294 _x = _v51
01295 buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
01296 _x = _v50.frame_id
01297 length = len(_x)
01298 buff.write(struct.pack('<I%ss'%length, length, _x))
01299 _x = _v49
01300 buff.write(_struct_2I.pack(_x.height, _x.width))
01301 _x = _v49.encoding
01302 length = len(_x)
01303 buff.write(struct.pack('<I%ss'%length, length, _x))
01304 _x = _v49
01305 buff.write(_struct_BI.pack(_x.is_bigendian, _x.step))
01306 _x = _v49.data
01307 length = len(_x)
01308 # - if encoded as a list instead, serialize as bytes instead of string
01309 if type(_x) in [list, tuple]:
01310 buff.write(struct.pack('<I%sB'%length, length, *_x))
01311 else:
01312 buff.write(struct.pack('<I%ss'%length, length, _x))
01313 _v52 = _v42.cam_info
01314 _v53 = _v52.header
01315 buff.write(_struct_I.pack(_v53.seq))
01316 _v54 = _v53.stamp
01317 _x = _v54
01318 buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
01319 _x = _v53.frame_id
01320 length = len(_x)
01321 buff.write(struct.pack('<I%ss'%length, length, _x))
01322 _x = _v52
01323 buff.write(_struct_2I.pack(_x.height, _x.width))
01324 _x = _v52.distortion_model
01325 length = len(_x)
01326 buff.write(struct.pack('<I%ss'%length, length, _x))
01327 length = len(_v52.D)
01328 buff.write(_struct_I.pack(length))
01329 pattern = '<%sd'%length
01330 buff.write(struct.pack(pattern, *_v52.D))
01331 buff.write(_struct_9d.pack(*_v52.K))
01332 buff.write(_struct_9d.pack(*_v52.R))
01333 buff.write(_struct_12d.pack(*_v52.P))
01334 _x = _v52
01335 buff.write(_struct_2I.pack(_x.binning_x, _x.binning_y))
01336 _v55 = _v52.roi
01337 _x = _v55
01338 buff.write(_struct_4IB.pack(_x.x_offset, _x.y_offset, _x.height, _x.width, _x.do_rectify))
01339 length = len(self.collision_object_names)
01340 buff.write(_struct_I.pack(length))
01341 for val1 in self.collision_object_names:
01342 length = len(val1)
01343 buff.write(struct.pack('<I%ss'%length, length, val1))
01344 _x = self.collision_support_surface_name
01345 length = len(_x)
01346 buff.write(struct.pack('<I%ss'%length, length, _x))
01347 except struct.error, se: self._check_types(se)
01348 except TypeError, te: self._check_types(te)
01349
01350 def deserialize(self, str):
01351 """
01352 unpack serialized message in str into this message instance
01353 @param str: byte array of serialized message
01354 @type str: str
01355 """
01356 try:
01357 end = 0
01358 start = end
01359 end += 4
01360 (length,) = _struct_I.unpack(str[start:end])
01361 self.graspable_objects = []
01362 for i in xrange(0, length):
01363 val1 = object_manipulation_msgs.msg.GraspableObject()
01364 start = end
01365 end += 4
01366 (length,) = _struct_I.unpack(str[start:end])
01367 start = end
01368 end += length
01369 val1.reference_frame_id = str[start:end]
01370 start = end
01371 end += 4
01372 (length,) = _struct_I.unpack(str[start:end])
01373 val1.potential_models = []
01374 for i in xrange(0, length):
01375 val2 = household_objects_database_msgs.msg.DatabaseModelPose()
01376 start = end
01377 end += 4
01378 (val2.model_id,) = _struct_i.unpack(str[start:end])
01379 _v56 = val2.pose
01380 _v57 = _v56.header
01381 start = end
01382 end += 4
01383 (_v57.seq,) = _struct_I.unpack(str[start:end])
01384 _v58 = _v57.stamp
01385 _x = _v58
01386 start = end
01387 end += 8
01388 (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
01389 start = end
01390 end += 4
01391 (length,) = _struct_I.unpack(str[start:end])
01392 start = end
01393 end += length
01394 _v57.frame_id = str[start:end]
01395 _v59 = _v56.pose
01396 _v60 = _v59.position
01397 _x = _v60
01398 start = end
01399 end += 24
01400 (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
01401 _v61 = _v59.orientation
01402 _x = _v61
01403 start = end
01404 end += 32
01405 (_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])
01406 start = end
01407 end += 4
01408 (val2.confidence,) = _struct_f.unpack(str[start:end])
01409 val1.potential_models.append(val2)
01410 _v62 = val1.cluster
01411 _v63 = _v62.header
01412 start = end
01413 end += 4
01414 (_v63.seq,) = _struct_I.unpack(str[start:end])
01415 _v64 = _v63.stamp
01416 _x = _v64
01417 start = end
01418 end += 8
01419 (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
01420 start = end
01421 end += 4
01422 (length,) = _struct_I.unpack(str[start:end])
01423 start = end
01424 end += length
01425 _v63.frame_id = str[start:end]
01426 start = end
01427 end += 4
01428 (length,) = _struct_I.unpack(str[start:end])
01429 _v62.points = []
01430 for i in xrange(0, length):
01431 val3 = geometry_msgs.msg.Point32()
01432 _x = val3
01433 start = end
01434 end += 12
01435 (_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end])
01436 _v62.points.append(val3)
01437 start = end
01438 end += 4
01439 (length,) = _struct_I.unpack(str[start:end])
01440 _v62.channels = []
01441 for i in xrange(0, length):
01442 val3 = sensor_msgs.msg.ChannelFloat32()
01443 start = end
01444 end += 4
01445 (length,) = _struct_I.unpack(str[start:end])
01446 start = end
01447 end += length
01448 val3.name = str[start:end]
01449 start = end
01450 end += 4
01451 (length,) = _struct_I.unpack(str[start:end])
01452 pattern = '<%sf'%length
01453 start = end
01454 end += struct.calcsize(pattern)
01455 val3.values = struct.unpack(pattern, str[start:end])
01456 _v62.channels.append(val3)
01457 _v65 = val1.region
01458 _v66 = _v65.cloud
01459 _v67 = _v66.header
01460 start = end
01461 end += 4
01462 (_v67.seq,) = _struct_I.unpack(str[start:end])
01463 _v68 = _v67.stamp
01464 _x = _v68
01465 start = end
01466 end += 8
01467 (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
01468 start = end
01469 end += 4
01470 (length,) = _struct_I.unpack(str[start:end])
01471 start = end
01472 end += length
01473 _v67.frame_id = str[start:end]
01474 _x = _v66
01475 start = end
01476 end += 8
01477 (_x.height, _x.width,) = _struct_2I.unpack(str[start:end])
01478 start = end
01479 end += 4
01480 (length,) = _struct_I.unpack(str[start:end])
01481 _v66.fields = []
01482 for i in xrange(0, length):
01483 val4 = sensor_msgs.msg.PointField()
01484 start = end
01485 end += 4
01486 (length,) = _struct_I.unpack(str[start:end])
01487 start = end
01488 end += length
01489 val4.name = str[start:end]
01490 _x = val4
01491 start = end
01492 end += 9
01493 (_x.offset, _x.datatype, _x.count,) = _struct_IBI.unpack(str[start:end])
01494 _v66.fields.append(val4)
01495 _x = _v66
01496 start = end
01497 end += 9
01498 (_x.is_bigendian, _x.point_step, _x.row_step,) = _struct_B2I.unpack(str[start:end])
01499 _v66.is_bigendian = bool(_v66.is_bigendian)
01500 start = end
01501 end += 4
01502 (length,) = _struct_I.unpack(str[start:end])
01503 start = end
01504 end += length
01505 _v66.data = str[start:end]
01506 start = end
01507 end += 1
01508 (_v66.is_dense,) = _struct_B.unpack(str[start:end])
01509 _v66.is_dense = bool(_v66.is_dense)
01510 start = end
01511 end += 4
01512 (length,) = _struct_I.unpack(str[start:end])
01513 pattern = '<%si'%length
01514 start = end
01515 end += struct.calcsize(pattern)
01516 _v65.mask = struct.unpack(pattern, str[start:end])
01517 _v69 = _v65.image
01518 _v70 = _v69.header
01519 start = end
01520 end += 4
01521 (_v70.seq,) = _struct_I.unpack(str[start:end])
01522 _v71 = _v70.stamp
01523 _x = _v71
01524 start = end
01525 end += 8
01526 (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
01527 start = end
01528 end += 4
01529 (length,) = _struct_I.unpack(str[start:end])
01530 start = end
01531 end += length
01532 _v70.frame_id = str[start:end]
01533 _x = _v69
01534 start = end
01535 end += 8
01536 (_x.height, _x.width,) = _struct_2I.unpack(str[start:end])
01537 start = end
01538 end += 4
01539 (length,) = _struct_I.unpack(str[start:end])
01540 start = end
01541 end += length
01542 _v69.encoding = str[start:end]
01543 _x = _v69
01544 start = end
01545 end += 5
01546 (_x.is_bigendian, _x.step,) = _struct_BI.unpack(str[start:end])
01547 start = end
01548 end += 4
01549 (length,) = _struct_I.unpack(str[start:end])
01550 start = end
01551 end += length
01552 _v69.data = str[start:end]
01553 _v72 = _v65.disparity_image
01554 _v73 = _v72.header
01555 start = end
01556 end += 4
01557 (_v73.seq,) = _struct_I.unpack(str[start:end])
01558 _v74 = _v73.stamp
01559 _x = _v74
01560 start = end
01561 end += 8
01562 (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
01563 start = end
01564 end += 4
01565 (length,) = _struct_I.unpack(str[start:end])
01566 start = end
01567 end += length
01568 _v73.frame_id = str[start:end]
01569 _x = _v72
01570 start = end
01571 end += 8
01572 (_x.height, _x.width,) = _struct_2I.unpack(str[start:end])
01573 start = end
01574 end += 4
01575 (length,) = _struct_I.unpack(str[start:end])
01576 start = end
01577 end += length
01578 _v72.encoding = str[start:end]
01579 _x = _v72
01580 start = end
01581 end += 5
01582 (_x.is_bigendian, _x.step,) = _struct_BI.unpack(str[start:end])
01583 start = end
01584 end += 4
01585 (length,) = _struct_I.unpack(str[start:end])
01586 start = end
01587 end += length
01588 _v72.data = str[start:end]
01589 _v75 = _v65.cam_info
01590 _v76 = _v75.header
01591 start = end
01592 end += 4
01593 (_v76.seq,) = _struct_I.unpack(str[start:end])
01594 _v77 = _v76.stamp
01595 _x = _v77
01596 start = end
01597 end += 8
01598 (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
01599 start = end
01600 end += 4
01601 (length,) = _struct_I.unpack(str[start:end])
01602 start = end
01603 end += length
01604 _v76.frame_id = str[start:end]
01605 _x = _v75
01606 start = end
01607 end += 8
01608 (_x.height, _x.width,) = _struct_2I.unpack(str[start:end])
01609 start = end
01610 end += 4
01611 (length,) = _struct_I.unpack(str[start:end])
01612 start = end
01613 end += length
01614 _v75.distortion_model = str[start:end]
01615 start = end
01616 end += 4
01617 (length,) = _struct_I.unpack(str[start:end])
01618 pattern = '<%sd'%length
01619 start = end
01620 end += struct.calcsize(pattern)
01621 _v75.D = struct.unpack(pattern, str[start:end])
01622 start = end
01623 end += 72
01624 _v75.K = _struct_9d.unpack(str[start:end])
01625 start = end
01626 end += 72
01627 _v75.R = _struct_9d.unpack(str[start:end])
01628 start = end
01629 end += 96
01630 _v75.P = _struct_12d.unpack(str[start:end])
01631 _x = _v75
01632 start = end
01633 end += 8
01634 (_x.binning_x, _x.binning_y,) = _struct_2I.unpack(str[start:end])
01635 _v78 = _v75.roi
01636 _x = _v78
01637 start = end
01638 end += 17
01639 (_x.x_offset, _x.y_offset, _x.height, _x.width, _x.do_rectify,) = _struct_4IB.unpack(str[start:end])
01640 _v78.do_rectify = bool(_v78.do_rectify)
01641 self.graspable_objects.append(val1)
01642 start = end
01643 end += 4
01644 (length,) = _struct_I.unpack(str[start:end])
01645 self.collision_object_names = []
01646 for i in xrange(0, length):
01647 start = end
01648 end += 4
01649 (length,) = _struct_I.unpack(str[start:end])
01650 start = end
01651 end += length
01652 val1 = str[start:end]
01653 self.collision_object_names.append(val1)
01654 start = end
01655 end += 4
01656 (length,) = _struct_I.unpack(str[start:end])
01657 start = end
01658 end += length
01659 self.collision_support_surface_name = str[start:end]
01660 return self
01661 except struct.error, e:
01662 raise roslib.message.DeserializationError(e) #most likely buffer underfill
01663
01664
01665 def serialize_numpy(self, buff, numpy):
01666 """
01667 serialize message with numpy array types into buffer
01668 @param buff: buffer
01669 @type buff: StringIO
01670 @param numpy: numpy python module
01671 @type numpy module
01672 """
01673 try:
01674 length = len(self.graspable_objects)
01675 buff.write(_struct_I.pack(length))
01676 for val1 in self.graspable_objects:
01677 _x = val1.reference_frame_id
01678 length = len(_x)
01679 buff.write(struct.pack('<I%ss'%length, length, _x))
01680 length = len(val1.potential_models)
01681 buff.write(_struct_I.pack(length))
01682 for val2 in val1.potential_models:
01683 buff.write(_struct_i.pack(val2.model_id))
01684 _v79 = val2.pose
01685 _v80 = _v79.header
01686 buff.write(_struct_I.pack(_v80.seq))
01687 _v81 = _v80.stamp
01688 _x = _v81
01689 buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
01690 _x = _v80.frame_id
01691 length = len(_x)
01692 buff.write(struct.pack('<I%ss'%length, length, _x))
01693 _v82 = _v79.pose
01694 _v83 = _v82.position
01695 _x = _v83
01696 buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
01697 _v84 = _v82.orientation
01698 _x = _v84
01699 buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))
01700 buff.write(_struct_f.pack(val2.confidence))
01701 _v85 = val1.cluster
01702 _v86 = _v85.header
01703 buff.write(_struct_I.pack(_v86.seq))
01704 _v87 = _v86.stamp
01705 _x = _v87
01706 buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
01707 _x = _v86.frame_id
01708 length = len(_x)
01709 buff.write(struct.pack('<I%ss'%length, length, _x))
01710 length = len(_v85.points)
01711 buff.write(_struct_I.pack(length))
01712 for val3 in _v85.points:
01713 _x = val3
01714 buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))
01715 length = len(_v85.channels)
01716 buff.write(_struct_I.pack(length))
01717 for val3 in _v85.channels:
01718 _x = val3.name
01719 length = len(_x)
01720 buff.write(struct.pack('<I%ss'%length, length, _x))
01721 length = len(val3.values)
01722 buff.write(_struct_I.pack(length))
01723 pattern = '<%sf'%length
01724 buff.write(val3.values.tostring())
01725 _v88 = val1.region
01726 _v89 = _v88.cloud
01727 _v90 = _v89.header
01728 buff.write(_struct_I.pack(_v90.seq))
01729 _v91 = _v90.stamp
01730 _x = _v91
01731 buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
01732 _x = _v90.frame_id
01733 length = len(_x)
01734 buff.write(struct.pack('<I%ss'%length, length, _x))
01735 _x = _v89
01736 buff.write(_struct_2I.pack(_x.height, _x.width))
01737 length = len(_v89.fields)
01738 buff.write(_struct_I.pack(length))
01739 for val4 in _v89.fields:
01740 _x = val4.name
01741 length = len(_x)
01742 buff.write(struct.pack('<I%ss'%length, length, _x))
01743 _x = val4
01744 buff.write(_struct_IBI.pack(_x.offset, _x.datatype, _x.count))
01745 _x = _v89
01746 buff.write(_struct_B2I.pack(_x.is_bigendian, _x.point_step, _x.row_step))
01747 _x = _v89.data
01748 length = len(_x)
01749 # - if encoded as a list instead, serialize as bytes instead of string
01750 if type(_x) in [list, tuple]:
01751 buff.write(struct.pack('<I%sB'%length, length, *_x))
01752 else:
01753 buff.write(struct.pack('<I%ss'%length, length, _x))
01754 buff.write(_struct_B.pack(_v89.is_dense))
01755 length = len(_v88.mask)
01756 buff.write(_struct_I.pack(length))
01757 pattern = '<%si'%length
01758 buff.write(_v88.mask.tostring())
01759 _v92 = _v88.image
01760 _v93 = _v92.header
01761 buff.write(_struct_I.pack(_v93.seq))
01762 _v94 = _v93.stamp
01763 _x = _v94
01764 buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
01765 _x = _v93.frame_id
01766 length = len(_x)
01767 buff.write(struct.pack('<I%ss'%length, length, _x))
01768 _x = _v92
01769 buff.write(_struct_2I.pack(_x.height, _x.width))
01770 _x = _v92.encoding
01771 length = len(_x)
01772 buff.write(struct.pack('<I%ss'%length, length, _x))
01773 _x = _v92
01774 buff.write(_struct_BI.pack(_x.is_bigendian, _x.step))
01775 _x = _v92.data
01776 length = len(_x)
01777 # - if encoded as a list instead, serialize as bytes instead of string
01778 if type(_x) in [list, tuple]:
01779 buff.write(struct.pack('<I%sB'%length, length, *_x))
01780 else:
01781 buff.write(struct.pack('<I%ss'%length, length, _x))
01782 _v95 = _v88.disparity_image
01783 _v96 = _v95.header
01784 buff.write(_struct_I.pack(_v96.seq))
01785 _v97 = _v96.stamp
01786 _x = _v97
01787 buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
01788 _x = _v96.frame_id
01789 length = len(_x)
01790 buff.write(struct.pack('<I%ss'%length, length, _x))
01791 _x = _v95
01792 buff.write(_struct_2I.pack(_x.height, _x.width))
01793 _x = _v95.encoding
01794 length = len(_x)
01795 buff.write(struct.pack('<I%ss'%length, length, _x))
01796 _x = _v95
01797 buff.write(_struct_BI.pack(_x.is_bigendian, _x.step))
01798 _x = _v95.data
01799 length = len(_x)
01800 # - if encoded as a list instead, serialize as bytes instead of string
01801 if type(_x) in [list, tuple]:
01802 buff.write(struct.pack('<I%sB'%length, length, *_x))
01803 else:
01804 buff.write(struct.pack('<I%ss'%length, length, _x))
01805 _v98 = _v88.cam_info
01806 _v99 = _v98.header
01807 buff.write(_struct_I.pack(_v99.seq))
01808 _v100 = _v99.stamp
01809 _x = _v100
01810 buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
01811 _x = _v99.frame_id
01812 length = len(_x)
01813 buff.write(struct.pack('<I%ss'%length, length, _x))
01814 _x = _v98
01815 buff.write(_struct_2I.pack(_x.height, _x.width))
01816 _x = _v98.distortion_model
01817 length = len(_x)
01818 buff.write(struct.pack('<I%ss'%length, length, _x))
01819 length = len(_v98.D)
01820 buff.write(_struct_I.pack(length))
01821 pattern = '<%sd'%length
01822 buff.write(_v98.D.tostring())
01823 buff.write(_v98.K.tostring())
01824 buff.write(_v98.R.tostring())
01825 buff.write(_v98.P.tostring())
01826 _x = _v98
01827 buff.write(_struct_2I.pack(_x.binning_x, _x.binning_y))
01828 _v101 = _v98.roi
01829 _x = _v101
01830 buff.write(_struct_4IB.pack(_x.x_offset, _x.y_offset, _x.height, _x.width, _x.do_rectify))
01831 length = len(self.collision_object_names)
01832 buff.write(_struct_I.pack(length))
01833 for val1 in self.collision_object_names:
01834 length = len(val1)
01835 buff.write(struct.pack('<I%ss'%length, length, val1))
01836 _x = self.collision_support_surface_name
01837 length = len(_x)
01838 buff.write(struct.pack('<I%ss'%length, length, _x))
01839 except struct.error, se: self._check_types(se)
01840 except TypeError, te: self._check_types(te)
01841
01842 def deserialize_numpy(self, str, numpy):
01843 """
01844 unpack serialized message in str into this message instance using numpy for array types
01845 @param str: byte array of serialized message
01846 @type str: str
01847 @param numpy: numpy python module
01848 @type numpy: module
01849 """
01850 try:
01851 end = 0
01852 start = end
01853 end += 4
01854 (length,) = _struct_I.unpack(str[start:end])
01855 self.graspable_objects = []
01856 for i in xrange(0, length):
01857 val1 = object_manipulation_msgs.msg.GraspableObject()
01858 start = end
01859 end += 4
01860 (length,) = _struct_I.unpack(str[start:end])
01861 start = end
01862 end += length
01863 val1.reference_frame_id = str[start:end]
01864 start = end
01865 end += 4
01866 (length,) = _struct_I.unpack(str[start:end])
01867 val1.potential_models = []
01868 for i in xrange(0, length):
01869 val2 = household_objects_database_msgs.msg.DatabaseModelPose()
01870 start = end
01871 end += 4
01872 (val2.model_id,) = _struct_i.unpack(str[start:end])
01873 _v102 = val2.pose
01874 _v103 = _v102.header
01875 start = end
01876 end += 4
01877 (_v103.seq,) = _struct_I.unpack(str[start:end])
01878 _v104 = _v103.stamp
01879 _x = _v104
01880 start = end
01881 end += 8
01882 (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
01883 start = end
01884 end += 4
01885 (length,) = _struct_I.unpack(str[start:end])
01886 start = end
01887 end += length
01888 _v103.frame_id = str[start:end]
01889 _v105 = _v102.pose
01890 _v106 = _v105.position
01891 _x = _v106
01892 start = end
01893 end += 24
01894 (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
01895 _v107 = _v105.orientation
01896 _x = _v107
01897 start = end
01898 end += 32
01899 (_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])
01900 start = end
01901 end += 4
01902 (val2.confidence,) = _struct_f.unpack(str[start:end])
01903 val1.potential_models.append(val2)
01904 _v108 = val1.cluster
01905 _v109 = _v108.header
01906 start = end
01907 end += 4
01908 (_v109.seq,) = _struct_I.unpack(str[start:end])
01909 _v110 = _v109.stamp
01910 _x = _v110
01911 start = end
01912 end += 8
01913 (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
01914 start = end
01915 end += 4
01916 (length,) = _struct_I.unpack(str[start:end])
01917 start = end
01918 end += length
01919 _v109.frame_id = str[start:end]
01920 start = end
01921 end += 4
01922 (length,) = _struct_I.unpack(str[start:end])
01923 _v108.points = []
01924 for i in xrange(0, length):
01925 val3 = geometry_msgs.msg.Point32()
01926 _x = val3
01927 start = end
01928 end += 12
01929 (_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end])
01930 _v108.points.append(val3)
01931 start = end
01932 end += 4
01933 (length,) = _struct_I.unpack(str[start:end])
01934 _v108.channels = []
01935 for i in xrange(0, length):
01936 val3 = sensor_msgs.msg.ChannelFloat32()
01937 start = end
01938 end += 4
01939 (length,) = _struct_I.unpack(str[start:end])
01940 start = end
01941 end += length
01942 val3.name = str[start:end]
01943 start = end
01944 end += 4
01945 (length,) = _struct_I.unpack(str[start:end])
01946 pattern = '<%sf'%length
01947 start = end
01948 end += struct.calcsize(pattern)
01949 val3.values = numpy.frombuffer(str[start:end], dtype=numpy.float32, count=length)
01950 _v108.channels.append(val3)
01951 _v111 = val1.region
01952 _v112 = _v111.cloud
01953 _v113 = _v112.header
01954 start = end
01955 end += 4
01956 (_v113.seq,) = _struct_I.unpack(str[start:end])
01957 _v114 = _v113.stamp
01958 _x = _v114
01959 start = end
01960 end += 8
01961 (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
01962 start = end
01963 end += 4
01964 (length,) = _struct_I.unpack(str[start:end])
01965 start = end
01966 end += length
01967 _v113.frame_id = str[start:end]
01968 _x = _v112
01969 start = end
01970 end += 8
01971 (_x.height, _x.width,) = _struct_2I.unpack(str[start:end])
01972 start = end
01973 end += 4
01974 (length,) = _struct_I.unpack(str[start:end])
01975 _v112.fields = []
01976 for i in xrange(0, length):
01977 val4 = sensor_msgs.msg.PointField()
01978 start = end
01979 end += 4
01980 (length,) = _struct_I.unpack(str[start:end])
01981 start = end
01982 end += length
01983 val4.name = str[start:end]
01984 _x = val4
01985 start = end
01986 end += 9
01987 (_x.offset, _x.datatype, _x.count,) = _struct_IBI.unpack(str[start:end])
01988 _v112.fields.append(val4)
01989 _x = _v112
01990 start = end
01991 end += 9
01992 (_x.is_bigendian, _x.point_step, _x.row_step,) = _struct_B2I.unpack(str[start:end])
01993 _v112.is_bigendian = bool(_v112.is_bigendian)
01994 start = end
01995 end += 4
01996 (length,) = _struct_I.unpack(str[start:end])
01997 start = end
01998 end += length
01999 _v112.data = str[start:end]
02000 start = end
02001 end += 1
02002 (_v112.is_dense,) = _struct_B.unpack(str[start:end])
02003 _v112.is_dense = bool(_v112.is_dense)
02004 start = end
02005 end += 4
02006 (length,) = _struct_I.unpack(str[start:end])
02007 pattern = '<%si'%length
02008 start = end
02009 end += struct.calcsize(pattern)
02010 _v111.mask = numpy.frombuffer(str[start:end], dtype=numpy.int32, count=length)
02011 _v115 = _v111.image
02012 _v116 = _v115.header
02013 start = end
02014 end += 4
02015 (_v116.seq,) = _struct_I.unpack(str[start:end])
02016 _v117 = _v116.stamp
02017 _x = _v117
02018 start = end
02019 end += 8
02020 (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
02021 start = end
02022 end += 4
02023 (length,) = _struct_I.unpack(str[start:end])
02024 start = end
02025 end += length
02026 _v116.frame_id = str[start:end]
02027 _x = _v115
02028 start = end
02029 end += 8
02030 (_x.height, _x.width,) = _struct_2I.unpack(str[start:end])
02031 start = end
02032 end += 4
02033 (length,) = _struct_I.unpack(str[start:end])
02034 start = end
02035 end += length
02036 _v115.encoding = str[start:end]
02037 _x = _v115
02038 start = end
02039 end += 5
02040 (_x.is_bigendian, _x.step,) = _struct_BI.unpack(str[start:end])
02041 start = end
02042 end += 4
02043 (length,) = _struct_I.unpack(str[start:end])
02044 start = end
02045 end += length
02046 _v115.data = str[start:end]
02047 _v118 = _v111.disparity_image
02048 _v119 = _v118.header
02049 start = end
02050 end += 4
02051 (_v119.seq,) = _struct_I.unpack(str[start:end])
02052 _v120 = _v119.stamp
02053 _x = _v120
02054 start = end
02055 end += 8
02056 (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
02057 start = end
02058 end += 4
02059 (length,) = _struct_I.unpack(str[start:end])
02060 start = end
02061 end += length
02062 _v119.frame_id = str[start:end]
02063 _x = _v118
02064 start = end
02065 end += 8
02066 (_x.height, _x.width,) = _struct_2I.unpack(str[start:end])
02067 start = end
02068 end += 4
02069 (length,) = _struct_I.unpack(str[start:end])
02070 start = end
02071 end += length
02072 _v118.encoding = str[start:end]
02073 _x = _v118
02074 start = end
02075 end += 5
02076 (_x.is_bigendian, _x.step,) = _struct_BI.unpack(str[start:end])
02077 start = end
02078 end += 4
02079 (length,) = _struct_I.unpack(str[start:end])
02080 start = end
02081 end += length
02082 _v118.data = str[start:end]
02083 _v121 = _v111.cam_info
02084 _v122 = _v121.header
02085 start = end
02086 end += 4
02087 (_v122.seq,) = _struct_I.unpack(str[start:end])
02088 _v123 = _v122.stamp
02089 _x = _v123
02090 start = end
02091 end += 8
02092 (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
02093 start = end
02094 end += 4
02095 (length,) = _struct_I.unpack(str[start:end])
02096 start = end
02097 end += length
02098 _v122.frame_id = str[start:end]
02099 _x = _v121
02100 start = end
02101 end += 8
02102 (_x.height, _x.width,) = _struct_2I.unpack(str[start:end])
02103 start = end
02104 end += 4
02105 (length,) = _struct_I.unpack(str[start:end])
02106 start = end
02107 end += length
02108 _v121.distortion_model = str[start:end]
02109 start = end
02110 end += 4
02111 (length,) = _struct_I.unpack(str[start:end])
02112 pattern = '<%sd'%length
02113 start = end
02114 end += struct.calcsize(pattern)
02115 _v121.D = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)
02116 start = end
02117 end += 72
02118 _v121.K = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=9)
02119 start = end
02120 end += 72
02121 _v121.R = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=9)
02122 start = end
02123 end += 96
02124 _v121.P = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=12)
02125 _x = _v121
02126 start = end
02127 end += 8
02128 (_x.binning_x, _x.binning_y,) = _struct_2I.unpack(str[start:end])
02129 _v124 = _v121.roi
02130 _x = _v124
02131 start = end
02132 end += 17
02133 (_x.x_offset, _x.y_offset, _x.height, _x.width, _x.do_rectify,) = _struct_4IB.unpack(str[start:end])
02134 _v124.do_rectify = bool(_v124.do_rectify)
02135 self.graspable_objects.append(val1)
02136 start = end
02137 end += 4
02138 (length,) = _struct_I.unpack(str[start:end])
02139 self.collision_object_names = []
02140 for i in xrange(0, length):
02141 start = end
02142 end += 4
02143 (length,) = _struct_I.unpack(str[start:end])
02144 start = end
02145 end += length
02146 val1 = str[start:end]
02147 self.collision_object_names.append(val1)
02148 start = end
02149 end += 4
02150 (length,) = _struct_I.unpack(str[start:end])
02151 start = end
02152 end += length
02153 self.collision_support_surface_name = str[start:end]
02154 return self
02155 except struct.error, e:
02156 raise roslib.message.DeserializationError(e) #most likely buffer underfill
02157
02158 _struct_I = roslib.message.struct_I
02159 _struct_IBI = struct.Struct("<IBI")
02160 _struct_B = struct.Struct("<B")
02161 _struct_12d = struct.Struct("<12d")
02162 _struct_f = struct.Struct("<f")
02163 _struct_i = struct.Struct("<i")
02164 _struct_BI = struct.Struct("<BI")
02165 _struct_3f = struct.Struct("<3f")
02166 _struct_9d = struct.Struct("<9d")
02167 _struct_B2I = struct.Struct("<B2I")
02168 _struct_4d = struct.Struct("<4d")
02169 _struct_2I = struct.Struct("<2I")
02170 _struct_4IB = struct.Struct("<4IB")
02171 _struct_3d = struct.Struct("<3d")
02172 class TabletopCollisionMapProcessing(roslib.message.ServiceDefinition):
02173 _type = 'tabletop_collision_map_processing/TabletopCollisionMapProcessing'
02174 _md5sum = '2a1cb6170b4cb73398aa70e26c6ba0b8'
02175 _request_class = TabletopCollisionMapProcessingRequest
02176 _response_class = TabletopCollisionMapProcessingResponse