00001
00002 #ifndef ARTICULATION_MSGS_SERVICE_ALIGNMODELSRV_H
00003 #define ARTICULATION_MSGS_SERVICE_ALIGNMODELSRV_H
00004 #include <string>
00005 #include <vector>
00006 #include <ostream>
00007 #include "ros/serialization.h"
00008 #include "ros/builtin_message_traits.h"
00009 #include "ros/message_operations.h"
00010 #include "ros/message.h"
00011 #include "ros/time.h"
00012
00013 #include "ros/service_traits.h"
00014
00015 #include "articulation_msgs/ModelMsg.h"
00016 #include "articulation_msgs/ModelMsg.h"
00017
00018
00019 #include "articulation_msgs/ModelMsg.h"
00020 #include "articulation_msgs/ModelMsg.h"
00021
00022 namespace articulation_msgs
00023 {
00024 template <class ContainerAllocator>
00025 struct AlignModelSrvRequest_ : public ros::Message
00026 {
00027 typedef AlignModelSrvRequest_<ContainerAllocator> Type;
00028
00029 AlignModelSrvRequest_()
00030 : model()
00031 , data()
00032 {
00033 }
00034
00035 AlignModelSrvRequest_(const ContainerAllocator& _alloc)
00036 : model(_alloc)
00037 , data(_alloc)
00038 {
00039 }
00040
00041 typedef ::articulation_msgs::ModelMsg_<ContainerAllocator> _model_type;
00042 ::articulation_msgs::ModelMsg_<ContainerAllocator> model;
00043
00044 typedef ::articulation_msgs::ModelMsg_<ContainerAllocator> _data_type;
00045 ::articulation_msgs::ModelMsg_<ContainerAllocator> data;
00046
00047
00048 private:
00049 static const char* __s_getDataType_() { return "articulation_msgs/AlignModelSrvRequest"; }
00050 public:
00051 ROS_DEPRECATED static const std::string __s_getDataType() { return __s_getDataType_(); }
00052
00053 ROS_DEPRECATED const std::string __getDataType() const { return __s_getDataType_(); }
00054
00055 private:
00056 static const char* __s_getMD5Sum_() { return "41219d340df4fefcab2da62ad6d15697"; }
00057 public:
00058 ROS_DEPRECATED static const std::string __s_getMD5Sum() { return __s_getMD5Sum_(); }
00059
00060 ROS_DEPRECATED const std::string __getMD5Sum() const { return __s_getMD5Sum_(); }
00061
00062 private:
00063 static const char* __s_getServerMD5Sum_() { return "24f8639d9e5b811cbb8a46156e9ac900"; }
00064 public:
00065 ROS_DEPRECATED static const std::string __s_getServerMD5Sum() { return __s_getServerMD5Sum_(); }
00066
00067 ROS_DEPRECATED const std::string __getServerMD5Sum() const { return __s_getServerMD5Sum_(); }
00068
00069 private:
00070 static const char* __s_getMessageDefinition_() { return "\n\
00071 \n\
00072 \n\
00073 \n\
00074 \n\
00075 \n\
00076 \n\
00077 articulation_msgs/ModelMsg model\n\
00078 articulation_msgs/ModelMsg data\n\
00079 \n\
00080 ================================================================================\n\
00081 MSG: articulation_msgs/ModelMsg\n\
00082 # Single kinematic model\n\
00083 #\n\
00084 # A kinematic model is defined by its model class name, and a set of parameters. \n\
00085 # The client may additionally specify a model id, that can be used to colorize the\n\
00086 # model when visualized using the RVIZ model display.\n\
00087 # \n\
00088 # For a list of currently implemented models, see the documetation at\n\
00089 # http://www.ros.org/wiki/articulation_models\n\
00090 #\n\
00091 #\n\
00092 \n\
00093 Header header # frame and timestamp\n\
00094 \n\
00095 int32 id # user specified model id\n\
00096 string name # name of the model family (e.g. \"rotational\",\n\
00097 # \"prismatic\", \"pca-gp\", \"rigid\")\n\
00098 articulation_msgs/ParamMsg[] params # model parameters\n\
00099 articulation_msgs/TrackMsg track # trajectory from which the model is estimated, or\n\
00100 # that is evaluated using the model\n\
00101 \n\
00102 ================================================================================\n\
00103 MSG: std_msgs/Header\n\
00104 # Standard metadata for higher-level stamped data types.\n\
00105 # This is generally used to communicate timestamped data \n\
00106 # in a particular coordinate frame.\n\
00107 # \n\
00108 # sequence ID: consecutively increasing ID \n\
00109 uint32 seq\n\
00110 #Two-integer timestamp that is expressed as:\n\
00111 # * stamp.secs: seconds (stamp_secs) since epoch\n\
00112 # * stamp.nsecs: nanoseconds since stamp_secs\n\
00113 # time-handling sugar is provided by the client library\n\
00114 time stamp\n\
00115 #Frame this data is associated with\n\
00116 # 0: no frame\n\
00117 # 1: global frame\n\
00118 string frame_id\n\
00119 \n\
00120 ================================================================================\n\
00121 MSG: articulation_msgs/ParamMsg\n\
00122 # Single parameter passed to or from model fitting\n\
00123 #\n\
00124 # This mechanism allows to flexibly pass parameters to \n\
00125 # model fitting (and vice versa). Note that these parameters \n\
00126 # are model-specific: A client may supply additional\n\
00127 # parameters to the model estimator, and, similarly, a estimator\n\
00128 # may add the estimated model parameters to the model message.\n\
00129 # When the model is then evaluated, for example to make predictions\n\
00130 # or to compute the likelihood, the model class can then use\n\
00131 # these parameters.\n\
00132 #\n\
00133 # A parameter has a name, a value, and a type. The type globally\n\
00134 # indicates whether it is a prior parameter (prior to model fitting),\n\
00135 # or a model parameter (found during model fitting, using a maximum-\n\
00136 # likelihood estimator), or a cached evaluation (e.g., the likelihood\n\
00137 # or the BIC are a typical \"side\"-product of model estimation, and\n\
00138 # can therefore already be cached).\n\
00139 #\n\
00140 # For a list of currently used parameters, see the documentation at\n\
00141 # http://www.ros.org/wiki/articulation_models\n\
00142 #\n\
00143 \n\
00144 uint8 PRIOR=0 # indicates a prior model parameter \n\
00145 # (e.g., \"sigma_position\")\n\
00146 uint8 PARAM=1 # indicates a estimated model parameter \n\
00147 # (e.g., \"rot_radius\", the estimated radius)\n\
00148 uint8 EVAL=2 # indicates a cached evaluation of the model, given \n\
00149 # the current trajectory\n\
00150 # (e.g., \"loglikelihood\", the log likelihood of the\n\
00151 # data, given the model and its parameters)\n\
00152 \n\
00153 string name # name of the parameter\n\
00154 float64 value # value of the parameter\n\
00155 uint8 type # type of the parameter (PRIOR, PARAM, EVAL)\n\
00156 \n\
00157 \n\
00158 ================================================================================\n\
00159 MSG: articulation_msgs/TrackMsg\n\
00160 # Single kinematic trajectory\n\
00161 #\n\
00162 # This message contains a kinematic trajectory. The trajectory is specified\n\
00163 # as a vector of 6D poses. An additional flag, track_type, indicates whether\n\
00164 # the track is valid, and whether it includes orientation. The track id\n\
00165 # can be used for automatic coloring in the RVIZ track plugin, and can be \n\
00166 # freely chosen by the client. \n\
00167 #\n\
00168 # A model is fitting only from the trajectory stored in the pose[]-vector. \n\
00169 # Additional information may be associated to each pose using the channels\n\
00170 # vector, with arbitrary # fields (e.g., to include applied/measured forces). \n\
00171 #\n\
00172 # After model evaluation,\n\
00173 # also the associated configurations of the object are stored in the channels,\n\
00174 # named \"q[0]\"..\"q[DOF-1]\", where DOF is the number of degrees of freedom.\n\
00175 # Model evaluation also projects the poses in the pose vector onto the model,\n\
00176 # and stores these ideal poses in the vector pose_projected. Further, during model\n\
00177 # evaluation, a new set of (uniform) configurations over the valid configuration\n\
00178 # range is sampled, and the result is stored in pose_resampled.\n\
00179 # The vector pose_flags contains additional display flags for the poses in the\n\
00180 # pose vector, for example, whether a pose is visible and/or\n\
00181 # the end of a trajectory segment. At the moment, this is only used by the\n\
00182 # prior_model_learner.\n\
00183 #\n\
00184 \n\
00185 Header header # Timestamp and frame\n\
00186 int32 id # used-specified track id\n\
00187 \n\
00188 geometry_msgs/Pose[] pose # sequence of poses, defining the observed trajectory\n\
00189 geometry_msgs/Pose[] pose_projected # sequence of poses, projected to the model \n\
00190 # (after model evaluation)\n\
00191 geometry_msgs/Pose[] pose_resampled # sequence of poses, re-sampled from the model in\n\
00192 # the valid configuration range\n\
00193 uint32[] pose_flags # bit-wise combination of POSE_VISIBLE and POSE_END_OF_SEGMENT\n\
00194 \n\
00195 uint32 POSE_VISIBLE=1\n\
00196 uint32 POSE_END_OF_SEGMENT=2\n\
00197 \n\
00198 # Each channel should have the same number of elements as pose array, \n\
00199 # and the data in each channel should correspond 1:1 with each pose\n\
00200 # possible channels: \"width\", \"height\", \"rgb\", ...\n\
00201 sensor_msgs/ChannelFloat32[] channels \n\
00202 \n\
00203 \n\
00204 \n\
00205 ================================================================================\n\
00206 MSG: geometry_msgs/Pose\n\
00207 # A representation of pose in free space, composed of postion and orientation. \n\
00208 Point position\n\
00209 Quaternion orientation\n\
00210 \n\
00211 ================================================================================\n\
00212 MSG: geometry_msgs/Point\n\
00213 # This contains the position of a point in free space\n\
00214 float64 x\n\
00215 float64 y\n\
00216 float64 z\n\
00217 \n\
00218 ================================================================================\n\
00219 MSG: geometry_msgs/Quaternion\n\
00220 # This represents an orientation in free space in quaternion form.\n\
00221 \n\
00222 float64 x\n\
00223 float64 y\n\
00224 float64 z\n\
00225 float64 w\n\
00226 \n\
00227 ================================================================================\n\
00228 MSG: sensor_msgs/ChannelFloat32\n\
00229 # This message is used by the PointCloud message to hold optional data\n\
00230 # associated with each point in the cloud. The length of the values\n\
00231 # array should be the same as the length of the points array in the\n\
00232 # PointCloud, and each value should be associated with the corresponding\n\
00233 # point.\n\
00234 \n\
00235 # Channel names in existing practice include:\n\
00236 # \"u\", \"v\" - row and column (respectively) in the left stereo image.\n\
00237 # This is opposite to usual conventions but remains for\n\
00238 # historical reasons. The newer PointCloud2 message has no\n\
00239 # such problem.\n\
00240 # \"rgb\" - For point clouds produced by color stereo cameras. uint8\n\
00241 # (R,G,B) values packed into the least significant 24 bits,\n\
00242 # in order.\n\
00243 # \"intensity\" - laser or pixel intensity.\n\
00244 # \"distance\"\n\
00245 \n\
00246 # The channel name should give semantics of the channel (e.g.\n\
00247 # \"intensity\" instead of \"value\").\n\
00248 string name\n\
00249 \n\
00250 # The values array should be 1-1 with the elements of the associated\n\
00251 # PointCloud.\n\
00252 float32[] values\n\
00253 \n\
00254 "; }
00255 public:
00256 ROS_DEPRECATED static const std::string __s_getMessageDefinition() { return __s_getMessageDefinition_(); }
00257
00258 ROS_DEPRECATED const std::string __getMessageDefinition() const { return __s_getMessageDefinition_(); }
00259
00260 ROS_DEPRECATED virtual uint8_t *serialize(uint8_t *write_ptr, uint32_t seq) const
00261 {
00262 ros::serialization::OStream stream(write_ptr, 1000000000);
00263 ros::serialization::serialize(stream, model);
00264 ros::serialization::serialize(stream, data);
00265 return stream.getData();
00266 }
00267
00268 ROS_DEPRECATED virtual uint8_t *deserialize(uint8_t *read_ptr)
00269 {
00270 ros::serialization::IStream stream(read_ptr, 1000000000);
00271 ros::serialization::deserialize(stream, model);
00272 ros::serialization::deserialize(stream, data);
00273 return stream.getData();
00274 }
00275
00276 ROS_DEPRECATED virtual uint32_t serializationLength() const
00277 {
00278 uint32_t size = 0;
00279 size += ros::serialization::serializationLength(model);
00280 size += ros::serialization::serializationLength(data);
00281 return size;
00282 }
00283
00284 typedef boost::shared_ptr< ::articulation_msgs::AlignModelSrvRequest_<ContainerAllocator> > Ptr;
00285 typedef boost::shared_ptr< ::articulation_msgs::AlignModelSrvRequest_<ContainerAllocator> const> ConstPtr;
00286 };
00287 typedef ::articulation_msgs::AlignModelSrvRequest_<std::allocator<void> > AlignModelSrvRequest;
00288
00289 typedef boost::shared_ptr< ::articulation_msgs::AlignModelSrvRequest> AlignModelSrvRequestPtr;
00290 typedef boost::shared_ptr< ::articulation_msgs::AlignModelSrvRequest const> AlignModelSrvRequestConstPtr;
00291
00292
00293 template <class ContainerAllocator>
00294 struct AlignModelSrvResponse_ : public ros::Message
00295 {
00296 typedef AlignModelSrvResponse_<ContainerAllocator> Type;
00297
00298 AlignModelSrvResponse_()
00299 : model_aligned()
00300 , data_aligned()
00301 , R()
00302 , T()
00303 , dist_rot(0.0)
00304 , dist_trans(0.0)
00305 {
00306 R.assign(0.0);
00307 T.assign(0.0);
00308 }
00309
00310 AlignModelSrvResponse_(const ContainerAllocator& _alloc)
00311 : model_aligned(_alloc)
00312 , data_aligned(_alloc)
00313 , R()
00314 , T()
00315 , dist_rot(0.0)
00316 , dist_trans(0.0)
00317 {
00318 R.assign(0.0);
00319 T.assign(0.0);
00320 }
00321
00322 typedef ::articulation_msgs::ModelMsg_<ContainerAllocator> _model_aligned_type;
00323 ::articulation_msgs::ModelMsg_<ContainerAllocator> model_aligned;
00324
00325 typedef ::articulation_msgs::ModelMsg_<ContainerAllocator> _data_aligned_type;
00326 ::articulation_msgs::ModelMsg_<ContainerAllocator> data_aligned;
00327
00328 typedef boost::array<double, 9> _R_type;
00329 boost::array<double, 9> R;
00330
00331 typedef boost::array<double, 3> _T_type;
00332 boost::array<double, 3> T;
00333
00334 typedef double _dist_rot_type;
00335 double dist_rot;
00336
00337 typedef float _dist_trans_type;
00338 float dist_trans;
00339
00340
00341 ROS_DEPRECATED uint32_t get_R_size() const { return (uint32_t)R.size(); }
00342 ROS_DEPRECATED uint32_t get_T_size() const { return (uint32_t)T.size(); }
00343 private:
00344 static const char* __s_getDataType_() { return "articulation_msgs/AlignModelSrvResponse"; }
00345 public:
00346 ROS_DEPRECATED static const std::string __s_getDataType() { return __s_getDataType_(); }
00347
00348 ROS_DEPRECATED const std::string __getDataType() const { return __s_getDataType_(); }
00349
00350 private:
00351 static const char* __s_getMD5Sum_() { return "bf4815f3efa4d7f5c033d2485524a4e6"; }
00352 public:
00353 ROS_DEPRECATED static const std::string __s_getMD5Sum() { return __s_getMD5Sum_(); }
00354
00355 ROS_DEPRECATED const std::string __getMD5Sum() const { return __s_getMD5Sum_(); }
00356
00357 private:
00358 static const char* __s_getServerMD5Sum_() { return "24f8639d9e5b811cbb8a46156e9ac900"; }
00359 public:
00360 ROS_DEPRECATED static const std::string __s_getServerMD5Sum() { return __s_getServerMD5Sum_(); }
00361
00362 ROS_DEPRECATED const std::string __getServerMD5Sum() const { return __s_getServerMD5Sum_(); }
00363
00364 private:
00365 static const char* __s_getMessageDefinition_() { return "articulation_msgs/ModelMsg model_aligned\n\
00366 \n\
00367 \n\
00368 articulation_msgs/ModelMsg data_aligned\n\
00369 \n\
00370 \n\
00371 float64[9] R\n\
00372 float64[3] T\n\
00373 float64 dist_rot\n\
00374 float32 dist_trans\n\
00375 \n\
00376 \n\
00377 \n\
00378 ================================================================================\n\
00379 MSG: articulation_msgs/ModelMsg\n\
00380 # Single kinematic model\n\
00381 #\n\
00382 # A kinematic model is defined by its model class name, and a set of parameters. \n\
00383 # The client may additionally specify a model id, that can be used to colorize the\n\
00384 # model when visualized using the RVIZ model display.\n\
00385 # \n\
00386 # For a list of currently implemented models, see the documetation at\n\
00387 # http://www.ros.org/wiki/articulation_models\n\
00388 #\n\
00389 #\n\
00390 \n\
00391 Header header # frame and timestamp\n\
00392 \n\
00393 int32 id # user specified model id\n\
00394 string name # name of the model family (e.g. \"rotational\",\n\
00395 # \"prismatic\", \"pca-gp\", \"rigid\")\n\
00396 articulation_msgs/ParamMsg[] params # model parameters\n\
00397 articulation_msgs/TrackMsg track # trajectory from which the model is estimated, or\n\
00398 # that is evaluated using the model\n\
00399 \n\
00400 ================================================================================\n\
00401 MSG: std_msgs/Header\n\
00402 # Standard metadata for higher-level stamped data types.\n\
00403 # This is generally used to communicate timestamped data \n\
00404 # in a particular coordinate frame.\n\
00405 # \n\
00406 # sequence ID: consecutively increasing ID \n\
00407 uint32 seq\n\
00408 #Two-integer timestamp that is expressed as:\n\
00409 # * stamp.secs: seconds (stamp_secs) since epoch\n\
00410 # * stamp.nsecs: nanoseconds since stamp_secs\n\
00411 # time-handling sugar is provided by the client library\n\
00412 time stamp\n\
00413 #Frame this data is associated with\n\
00414 # 0: no frame\n\
00415 # 1: global frame\n\
00416 string frame_id\n\
00417 \n\
00418 ================================================================================\n\
00419 MSG: articulation_msgs/ParamMsg\n\
00420 # Single parameter passed to or from model fitting\n\
00421 #\n\
00422 # This mechanism allows to flexibly pass parameters to \n\
00423 # model fitting (and vice versa). Note that these parameters \n\
00424 # are model-specific: A client may supply additional\n\
00425 # parameters to the model estimator, and, similarly, a estimator\n\
00426 # may add the estimated model parameters to the model message.\n\
00427 # When the model is then evaluated, for example to make predictions\n\
00428 # or to compute the likelihood, the model class can then use\n\
00429 # these parameters.\n\
00430 #\n\
00431 # A parameter has a name, a value, and a type. The type globally\n\
00432 # indicates whether it is a prior parameter (prior to model fitting),\n\
00433 # or a model parameter (found during model fitting, using a maximum-\n\
00434 # likelihood estimator), or a cached evaluation (e.g., the likelihood\n\
00435 # or the BIC are a typical \"side\"-product of model estimation, and\n\
00436 # can therefore already be cached).\n\
00437 #\n\
00438 # For a list of currently used parameters, see the documentation at\n\
00439 # http://www.ros.org/wiki/articulation_models\n\
00440 #\n\
00441 \n\
00442 uint8 PRIOR=0 # indicates a prior model parameter \n\
00443 # (e.g., \"sigma_position\")\n\
00444 uint8 PARAM=1 # indicates a estimated model parameter \n\
00445 # (e.g., \"rot_radius\", the estimated radius)\n\
00446 uint8 EVAL=2 # indicates a cached evaluation of the model, given \n\
00447 # the current trajectory\n\
00448 # (e.g., \"loglikelihood\", the log likelihood of the\n\
00449 # data, given the model and its parameters)\n\
00450 \n\
00451 string name # name of the parameter\n\
00452 float64 value # value of the parameter\n\
00453 uint8 type # type of the parameter (PRIOR, PARAM, EVAL)\n\
00454 \n\
00455 \n\
00456 ================================================================================\n\
00457 MSG: articulation_msgs/TrackMsg\n\
00458 # Single kinematic trajectory\n\
00459 #\n\
00460 # This message contains a kinematic trajectory. The trajectory is specified\n\
00461 # as a vector of 6D poses. An additional flag, track_type, indicates whether\n\
00462 # the track is valid, and whether it includes orientation. The track id\n\
00463 # can be used for automatic coloring in the RVIZ track plugin, and can be \n\
00464 # freely chosen by the client. \n\
00465 #\n\
00466 # A model is fitting only from the trajectory stored in the pose[]-vector. \n\
00467 # Additional information may be associated to each pose using the channels\n\
00468 # vector, with arbitrary # fields (e.g., to include applied/measured forces). \n\
00469 #\n\
00470 # After model evaluation,\n\
00471 # also the associated configurations of the object are stored in the channels,\n\
00472 # named \"q[0]\"..\"q[DOF-1]\", where DOF is the number of degrees of freedom.\n\
00473 # Model evaluation also projects the poses in the pose vector onto the model,\n\
00474 # and stores these ideal poses in the vector pose_projected. Further, during model\n\
00475 # evaluation, a new set of (uniform) configurations over the valid configuration\n\
00476 # range is sampled, and the result is stored in pose_resampled.\n\
00477 # The vector pose_flags contains additional display flags for the poses in the\n\
00478 # pose vector, for example, whether a pose is visible and/or\n\
00479 # the end of a trajectory segment. At the moment, this is only used by the\n\
00480 # prior_model_learner.\n\
00481 #\n\
00482 \n\
00483 Header header # Timestamp and frame\n\
00484 int32 id # used-specified track id\n\
00485 \n\
00486 geometry_msgs/Pose[] pose # sequence of poses, defining the observed trajectory\n\
00487 geometry_msgs/Pose[] pose_projected # sequence of poses, projected to the model \n\
00488 # (after model evaluation)\n\
00489 geometry_msgs/Pose[] pose_resampled # sequence of poses, re-sampled from the model in\n\
00490 # the valid configuration range\n\
00491 uint32[] pose_flags # bit-wise combination of POSE_VISIBLE and POSE_END_OF_SEGMENT\n\
00492 \n\
00493 uint32 POSE_VISIBLE=1\n\
00494 uint32 POSE_END_OF_SEGMENT=2\n\
00495 \n\
00496 # Each channel should have the same number of elements as pose array, \n\
00497 # and the data in each channel should correspond 1:1 with each pose\n\
00498 # possible channels: \"width\", \"height\", \"rgb\", ...\n\
00499 sensor_msgs/ChannelFloat32[] channels \n\
00500 \n\
00501 \n\
00502 \n\
00503 ================================================================================\n\
00504 MSG: geometry_msgs/Pose\n\
00505 # A representation of pose in free space, composed of postion and orientation. \n\
00506 Point position\n\
00507 Quaternion orientation\n\
00508 \n\
00509 ================================================================================\n\
00510 MSG: geometry_msgs/Point\n\
00511 # This contains the position of a point in free space\n\
00512 float64 x\n\
00513 float64 y\n\
00514 float64 z\n\
00515 \n\
00516 ================================================================================\n\
00517 MSG: geometry_msgs/Quaternion\n\
00518 # This represents an orientation in free space in quaternion form.\n\
00519 \n\
00520 float64 x\n\
00521 float64 y\n\
00522 float64 z\n\
00523 float64 w\n\
00524 \n\
00525 ================================================================================\n\
00526 MSG: sensor_msgs/ChannelFloat32\n\
00527 # This message is used by the PointCloud message to hold optional data\n\
00528 # associated with each point in the cloud. The length of the values\n\
00529 # array should be the same as the length of the points array in the\n\
00530 # PointCloud, and each value should be associated with the corresponding\n\
00531 # point.\n\
00532 \n\
00533 # Channel names in existing practice include:\n\
00534 # \"u\", \"v\" - row and column (respectively) in the left stereo image.\n\
00535 # This is opposite to usual conventions but remains for\n\
00536 # historical reasons. The newer PointCloud2 message has no\n\
00537 # such problem.\n\
00538 # \"rgb\" - For point clouds produced by color stereo cameras. uint8\n\
00539 # (R,G,B) values packed into the least significant 24 bits,\n\
00540 # in order.\n\
00541 # \"intensity\" - laser or pixel intensity.\n\
00542 # \"distance\"\n\
00543 \n\
00544 # The channel name should give semantics of the channel (e.g.\n\
00545 # \"intensity\" instead of \"value\").\n\
00546 string name\n\
00547 \n\
00548 # The values array should be 1-1 with the elements of the associated\n\
00549 # PointCloud.\n\
00550 float32[] values\n\
00551 \n\
00552 "; }
00553 public:
00554 ROS_DEPRECATED static const std::string __s_getMessageDefinition() { return __s_getMessageDefinition_(); }
00555
00556 ROS_DEPRECATED const std::string __getMessageDefinition() const { return __s_getMessageDefinition_(); }
00557
00558 ROS_DEPRECATED virtual uint8_t *serialize(uint8_t *write_ptr, uint32_t seq) const
00559 {
00560 ros::serialization::OStream stream(write_ptr, 1000000000);
00561 ros::serialization::serialize(stream, model_aligned);
00562 ros::serialization::serialize(stream, data_aligned);
00563 ros::serialization::serialize(stream, R);
00564 ros::serialization::serialize(stream, T);
00565 ros::serialization::serialize(stream, dist_rot);
00566 ros::serialization::serialize(stream, dist_trans);
00567 return stream.getData();
00568 }
00569
00570 ROS_DEPRECATED virtual uint8_t *deserialize(uint8_t *read_ptr)
00571 {
00572 ros::serialization::IStream stream(read_ptr, 1000000000);
00573 ros::serialization::deserialize(stream, model_aligned);
00574 ros::serialization::deserialize(stream, data_aligned);
00575 ros::serialization::deserialize(stream, R);
00576 ros::serialization::deserialize(stream, T);
00577 ros::serialization::deserialize(stream, dist_rot);
00578 ros::serialization::deserialize(stream, dist_trans);
00579 return stream.getData();
00580 }
00581
00582 ROS_DEPRECATED virtual uint32_t serializationLength() const
00583 {
00584 uint32_t size = 0;
00585 size += ros::serialization::serializationLength(model_aligned);
00586 size += ros::serialization::serializationLength(data_aligned);
00587 size += ros::serialization::serializationLength(R);
00588 size += ros::serialization::serializationLength(T);
00589 size += ros::serialization::serializationLength(dist_rot);
00590 size += ros::serialization::serializationLength(dist_trans);
00591 return size;
00592 }
00593
00594 typedef boost::shared_ptr< ::articulation_msgs::AlignModelSrvResponse_<ContainerAllocator> > Ptr;
00595 typedef boost::shared_ptr< ::articulation_msgs::AlignModelSrvResponse_<ContainerAllocator> const> ConstPtr;
00596 };
00597 typedef ::articulation_msgs::AlignModelSrvResponse_<std::allocator<void> > AlignModelSrvResponse;
00598
00599 typedef boost::shared_ptr< ::articulation_msgs::AlignModelSrvResponse> AlignModelSrvResponsePtr;
00600 typedef boost::shared_ptr< ::articulation_msgs::AlignModelSrvResponse const> AlignModelSrvResponseConstPtr;
00601
00602 struct AlignModelSrv
00603 {
00604
00605 typedef AlignModelSrvRequest Request;
00606 typedef AlignModelSrvResponse Response;
00607 Request request;
00608 Response response;
00609
00610 typedef Request RequestType;
00611 typedef Response ResponseType;
00612 };
00613 }
00614
00615 namespace ros
00616 {
00617 namespace message_traits
00618 {
00619 template<class ContainerAllocator>
00620 struct MD5Sum< ::articulation_msgs::AlignModelSrvRequest_<ContainerAllocator> > {
00621 static const char* value()
00622 {
00623 return "41219d340df4fefcab2da62ad6d15697";
00624 }
00625
00626 static const char* value(const ::articulation_msgs::AlignModelSrvRequest_<ContainerAllocator> &) { return value(); }
00627 static const uint64_t static_value1 = 0x41219d340df4fefcULL;
00628 static const uint64_t static_value2 = 0xab2da62ad6d15697ULL;
00629 };
00630
00631 template<class ContainerAllocator>
00632 struct DataType< ::articulation_msgs::AlignModelSrvRequest_<ContainerAllocator> > {
00633 static const char* value()
00634 {
00635 return "articulation_msgs/AlignModelSrvRequest";
00636 }
00637
00638 static const char* value(const ::articulation_msgs::AlignModelSrvRequest_<ContainerAllocator> &) { return value(); }
00639 };
00640
00641 template<class ContainerAllocator>
00642 struct Definition< ::articulation_msgs::AlignModelSrvRequest_<ContainerAllocator> > {
00643 static const char* value()
00644 {
00645 return "\n\
00646 \n\
00647 \n\
00648 \n\
00649 \n\
00650 \n\
00651 \n\
00652 articulation_msgs/ModelMsg model\n\
00653 articulation_msgs/ModelMsg data\n\
00654 \n\
00655 ================================================================================\n\
00656 MSG: articulation_msgs/ModelMsg\n\
00657 # Single kinematic model\n\
00658 #\n\
00659 # A kinematic model is defined by its model class name, and a set of parameters. \n\
00660 # The client may additionally specify a model id, that can be used to colorize the\n\
00661 # model when visualized using the RVIZ model display.\n\
00662 # \n\
00663 # For a list of currently implemented models, see the documetation at\n\
00664 # http://www.ros.org/wiki/articulation_models\n\
00665 #\n\
00666 #\n\
00667 \n\
00668 Header header # frame and timestamp\n\
00669 \n\
00670 int32 id # user specified model id\n\
00671 string name # name of the model family (e.g. \"rotational\",\n\
00672 # \"prismatic\", \"pca-gp\", \"rigid\")\n\
00673 articulation_msgs/ParamMsg[] params # model parameters\n\
00674 articulation_msgs/TrackMsg track # trajectory from which the model is estimated, or\n\
00675 # that is evaluated using the model\n\
00676 \n\
00677 ================================================================================\n\
00678 MSG: std_msgs/Header\n\
00679 # Standard metadata for higher-level stamped data types.\n\
00680 # This is generally used to communicate timestamped data \n\
00681 # in a particular coordinate frame.\n\
00682 # \n\
00683 # sequence ID: consecutively increasing ID \n\
00684 uint32 seq\n\
00685 #Two-integer timestamp that is expressed as:\n\
00686 # * stamp.secs: seconds (stamp_secs) since epoch\n\
00687 # * stamp.nsecs: nanoseconds since stamp_secs\n\
00688 # time-handling sugar is provided by the client library\n\
00689 time stamp\n\
00690 #Frame this data is associated with\n\
00691 # 0: no frame\n\
00692 # 1: global frame\n\
00693 string frame_id\n\
00694 \n\
00695 ================================================================================\n\
00696 MSG: articulation_msgs/ParamMsg\n\
00697 # Single parameter passed to or from model fitting\n\
00698 #\n\
00699 # This mechanism allows to flexibly pass parameters to \n\
00700 # model fitting (and vice versa). Note that these parameters \n\
00701 # are model-specific: A client may supply additional\n\
00702 # parameters to the model estimator, and, similarly, a estimator\n\
00703 # may add the estimated model parameters to the model message.\n\
00704 # When the model is then evaluated, for example to make predictions\n\
00705 # or to compute the likelihood, the model class can then use\n\
00706 # these parameters.\n\
00707 #\n\
00708 # A parameter has a name, a value, and a type. The type globally\n\
00709 # indicates whether it is a prior parameter (prior to model fitting),\n\
00710 # or a model parameter (found during model fitting, using a maximum-\n\
00711 # likelihood estimator), or a cached evaluation (e.g., the likelihood\n\
00712 # or the BIC are a typical \"side\"-product of model estimation, and\n\
00713 # can therefore already be cached).\n\
00714 #\n\
00715 # For a list of currently used parameters, see the documentation at\n\
00716 # http://www.ros.org/wiki/articulation_models\n\
00717 #\n\
00718 \n\
00719 uint8 PRIOR=0 # indicates a prior model parameter \n\
00720 # (e.g., \"sigma_position\")\n\
00721 uint8 PARAM=1 # indicates a estimated model parameter \n\
00722 # (e.g., \"rot_radius\", the estimated radius)\n\
00723 uint8 EVAL=2 # indicates a cached evaluation of the model, given \n\
00724 # the current trajectory\n\
00725 # (e.g., \"loglikelihood\", the log likelihood of the\n\
00726 # data, given the model and its parameters)\n\
00727 \n\
00728 string name # name of the parameter\n\
00729 float64 value # value of the parameter\n\
00730 uint8 type # type of the parameter (PRIOR, PARAM, EVAL)\n\
00731 \n\
00732 \n\
00733 ================================================================================\n\
00734 MSG: articulation_msgs/TrackMsg\n\
00735 # Single kinematic trajectory\n\
00736 #\n\
00737 # This message contains a kinematic trajectory. The trajectory is specified\n\
00738 # as a vector of 6D poses. An additional flag, track_type, indicates whether\n\
00739 # the track is valid, and whether it includes orientation. The track id\n\
00740 # can be used for automatic coloring in the RVIZ track plugin, and can be \n\
00741 # freely chosen by the client. \n\
00742 #\n\
00743 # A model is fitting only from the trajectory stored in the pose[]-vector. \n\
00744 # Additional information may be associated to each pose using the channels\n\
00745 # vector, with arbitrary # fields (e.g., to include applied/measured forces). \n\
00746 #\n\
00747 # After model evaluation,\n\
00748 # also the associated configurations of the object are stored in the channels,\n\
00749 # named \"q[0]\"..\"q[DOF-1]\", where DOF is the number of degrees of freedom.\n\
00750 # Model evaluation also projects the poses in the pose vector onto the model,\n\
00751 # and stores these ideal poses in the vector pose_projected. Further, during model\n\
00752 # evaluation, a new set of (uniform) configurations over the valid configuration\n\
00753 # range is sampled, and the result is stored in pose_resampled.\n\
00754 # The vector pose_flags contains additional display flags for the poses in the\n\
00755 # pose vector, for example, whether a pose is visible and/or\n\
00756 # the end of a trajectory segment. At the moment, this is only used by the\n\
00757 # prior_model_learner.\n\
00758 #\n\
00759 \n\
00760 Header header # Timestamp and frame\n\
00761 int32 id # used-specified track id\n\
00762 \n\
00763 geometry_msgs/Pose[] pose # sequence of poses, defining the observed trajectory\n\
00764 geometry_msgs/Pose[] pose_projected # sequence of poses, projected to the model \n\
00765 # (after model evaluation)\n\
00766 geometry_msgs/Pose[] pose_resampled # sequence of poses, re-sampled from the model in\n\
00767 # the valid configuration range\n\
00768 uint32[] pose_flags # bit-wise combination of POSE_VISIBLE and POSE_END_OF_SEGMENT\n\
00769 \n\
00770 uint32 POSE_VISIBLE=1\n\
00771 uint32 POSE_END_OF_SEGMENT=2\n\
00772 \n\
00773 # Each channel should have the same number of elements as pose array, \n\
00774 # and the data in each channel should correspond 1:1 with each pose\n\
00775 # possible channels: \"width\", \"height\", \"rgb\", ...\n\
00776 sensor_msgs/ChannelFloat32[] channels \n\
00777 \n\
00778 \n\
00779 \n\
00780 ================================================================================\n\
00781 MSG: geometry_msgs/Pose\n\
00782 # A representation of pose in free space, composed of postion and orientation. \n\
00783 Point position\n\
00784 Quaternion orientation\n\
00785 \n\
00786 ================================================================================\n\
00787 MSG: geometry_msgs/Point\n\
00788 # This contains the position of a point in free space\n\
00789 float64 x\n\
00790 float64 y\n\
00791 float64 z\n\
00792 \n\
00793 ================================================================================\n\
00794 MSG: geometry_msgs/Quaternion\n\
00795 # This represents an orientation in free space in quaternion form.\n\
00796 \n\
00797 float64 x\n\
00798 float64 y\n\
00799 float64 z\n\
00800 float64 w\n\
00801 \n\
00802 ================================================================================\n\
00803 MSG: sensor_msgs/ChannelFloat32\n\
00804 # This message is used by the PointCloud message to hold optional data\n\
00805 # associated with each point in the cloud. The length of the values\n\
00806 # array should be the same as the length of the points array in the\n\
00807 # PointCloud, and each value should be associated with the corresponding\n\
00808 # point.\n\
00809 \n\
00810 # Channel names in existing practice include:\n\
00811 # \"u\", \"v\" - row and column (respectively) in the left stereo image.\n\
00812 # This is opposite to usual conventions but remains for\n\
00813 # historical reasons. The newer PointCloud2 message has no\n\
00814 # such problem.\n\
00815 # \"rgb\" - For point clouds produced by color stereo cameras. uint8\n\
00816 # (R,G,B) values packed into the least significant 24 bits,\n\
00817 # in order.\n\
00818 # \"intensity\" - laser or pixel intensity.\n\
00819 # \"distance\"\n\
00820 \n\
00821 # The channel name should give semantics of the channel (e.g.\n\
00822 # \"intensity\" instead of \"value\").\n\
00823 string name\n\
00824 \n\
00825 # The values array should be 1-1 with the elements of the associated\n\
00826 # PointCloud.\n\
00827 float32[] values\n\
00828 \n\
00829 ";
00830 }
00831
00832 static const char* value(const ::articulation_msgs::AlignModelSrvRequest_<ContainerAllocator> &) { return value(); }
00833 };
00834
00835 }
00836 }
00837
00838
00839 namespace ros
00840 {
00841 namespace message_traits
00842 {
00843 template<class ContainerAllocator>
00844 struct MD5Sum< ::articulation_msgs::AlignModelSrvResponse_<ContainerAllocator> > {
00845 static const char* value()
00846 {
00847 return "bf4815f3efa4d7f5c033d2485524a4e6";
00848 }
00849
00850 static const char* value(const ::articulation_msgs::AlignModelSrvResponse_<ContainerAllocator> &) { return value(); }
00851 static const uint64_t static_value1 = 0xbf4815f3efa4d7f5ULL;
00852 static const uint64_t static_value2 = 0xc033d2485524a4e6ULL;
00853 };
00854
00855 template<class ContainerAllocator>
00856 struct DataType< ::articulation_msgs::AlignModelSrvResponse_<ContainerAllocator> > {
00857 static const char* value()
00858 {
00859 return "articulation_msgs/AlignModelSrvResponse";
00860 }
00861
00862 static const char* value(const ::articulation_msgs::AlignModelSrvResponse_<ContainerAllocator> &) { return value(); }
00863 };
00864
00865 template<class ContainerAllocator>
00866 struct Definition< ::articulation_msgs::AlignModelSrvResponse_<ContainerAllocator> > {
00867 static const char* value()
00868 {
00869 return "articulation_msgs/ModelMsg model_aligned\n\
00870 \n\
00871 \n\
00872 articulation_msgs/ModelMsg data_aligned\n\
00873 \n\
00874 \n\
00875 float64[9] R\n\
00876 float64[3] T\n\
00877 float64 dist_rot\n\
00878 float32 dist_trans\n\
00879 \n\
00880 \n\
00881 \n\
00882 ================================================================================\n\
00883 MSG: articulation_msgs/ModelMsg\n\
00884 # Single kinematic model\n\
00885 #\n\
00886 # A kinematic model is defined by its model class name, and a set of parameters. \n\
00887 # The client may additionally specify a model id, that can be used to colorize the\n\
00888 # model when visualized using the RVIZ model display.\n\
00889 # \n\
00890 # For a list of currently implemented models, see the documetation at\n\
00891 # http://www.ros.org/wiki/articulation_models\n\
00892 #\n\
00893 #\n\
00894 \n\
00895 Header header # frame and timestamp\n\
00896 \n\
00897 int32 id # user specified model id\n\
00898 string name # name of the model family (e.g. \"rotational\",\n\
00899 # \"prismatic\", \"pca-gp\", \"rigid\")\n\
00900 articulation_msgs/ParamMsg[] params # model parameters\n\
00901 articulation_msgs/TrackMsg track # trajectory from which the model is estimated, or\n\
00902 # that is evaluated using the model\n\
00903 \n\
00904 ================================================================================\n\
00905 MSG: std_msgs/Header\n\
00906 # Standard metadata for higher-level stamped data types.\n\
00907 # This is generally used to communicate timestamped data \n\
00908 # in a particular coordinate frame.\n\
00909 # \n\
00910 # sequence ID: consecutively increasing ID \n\
00911 uint32 seq\n\
00912 #Two-integer timestamp that is expressed as:\n\
00913 # * stamp.secs: seconds (stamp_secs) since epoch\n\
00914 # * stamp.nsecs: nanoseconds since stamp_secs\n\
00915 # time-handling sugar is provided by the client library\n\
00916 time stamp\n\
00917 #Frame this data is associated with\n\
00918 # 0: no frame\n\
00919 # 1: global frame\n\
00920 string frame_id\n\
00921 \n\
00922 ================================================================================\n\
00923 MSG: articulation_msgs/ParamMsg\n\
00924 # Single parameter passed to or from model fitting\n\
00925 #\n\
00926 # This mechanism allows to flexibly pass parameters to \n\
00927 # model fitting (and vice versa). Note that these parameters \n\
00928 # are model-specific: A client may supply additional\n\
00929 # parameters to the model estimator, and, similarly, a estimator\n\
00930 # may add the estimated model parameters to the model message.\n\
00931 # When the model is then evaluated, for example to make predictions\n\
00932 # or to compute the likelihood, the model class can then use\n\
00933 # these parameters.\n\
00934 #\n\
00935 # A parameter has a name, a value, and a type. The type globally\n\
00936 # indicates whether it is a prior parameter (prior to model fitting),\n\
00937 # or a model parameter (found during model fitting, using a maximum-\n\
00938 # likelihood estimator), or a cached evaluation (e.g., the likelihood\n\
00939 # or the BIC are a typical \"side\"-product of model estimation, and\n\
00940 # can therefore already be cached).\n\
00941 #\n\
00942 # For a list of currently used parameters, see the documentation at\n\
00943 # http://www.ros.org/wiki/articulation_models\n\
00944 #\n\
00945 \n\
00946 uint8 PRIOR=0 # indicates a prior model parameter \n\
00947 # (e.g., \"sigma_position\")\n\
00948 uint8 PARAM=1 # indicates a estimated model parameter \n\
00949 # (e.g., \"rot_radius\", the estimated radius)\n\
00950 uint8 EVAL=2 # indicates a cached evaluation of the model, given \n\
00951 # the current trajectory\n\
00952 # (e.g., \"loglikelihood\", the log likelihood of the\n\
00953 # data, given the model and its parameters)\n\
00954 \n\
00955 string name # name of the parameter\n\
00956 float64 value # value of the parameter\n\
00957 uint8 type # type of the parameter (PRIOR, PARAM, EVAL)\n\
00958 \n\
00959 \n\
00960 ================================================================================\n\
00961 MSG: articulation_msgs/TrackMsg\n\
00962 # Single kinematic trajectory\n\
00963 #\n\
00964 # This message contains a kinematic trajectory. The trajectory is specified\n\
00965 # as a vector of 6D poses. An additional flag, track_type, indicates whether\n\
00966 # the track is valid, and whether it includes orientation. The track id\n\
00967 # can be used for automatic coloring in the RVIZ track plugin, and can be \n\
00968 # freely chosen by the client. \n\
00969 #\n\
00970 # A model is fitting only from the trajectory stored in the pose[]-vector. \n\
00971 # Additional information may be associated to each pose using the channels\n\
00972 # vector, with arbitrary # fields (e.g., to include applied/measured forces). \n\
00973 #\n\
00974 # After model evaluation,\n\
00975 # also the associated configurations of the object are stored in the channels,\n\
00976 # named \"q[0]\"..\"q[DOF-1]\", where DOF is the number of degrees of freedom.\n\
00977 # Model evaluation also projects the poses in the pose vector onto the model,\n\
00978 # and stores these ideal poses in the vector pose_projected. Further, during model\n\
00979 # evaluation, a new set of (uniform) configurations over the valid configuration\n\
00980 # range is sampled, and the result is stored in pose_resampled.\n\
00981 # The vector pose_flags contains additional display flags for the poses in the\n\
00982 # pose vector, for example, whether a pose is visible and/or\n\
00983 # the end of a trajectory segment. At the moment, this is only used by the\n\
00984 # prior_model_learner.\n\
00985 #\n\
00986 \n\
00987 Header header # Timestamp and frame\n\
00988 int32 id # used-specified track id\n\
00989 \n\
00990 geometry_msgs/Pose[] pose # sequence of poses, defining the observed trajectory\n\
00991 geometry_msgs/Pose[] pose_projected # sequence of poses, projected to the model \n\
00992 # (after model evaluation)\n\
00993 geometry_msgs/Pose[] pose_resampled # sequence of poses, re-sampled from the model in\n\
00994 # the valid configuration range\n\
00995 uint32[] pose_flags # bit-wise combination of POSE_VISIBLE and POSE_END_OF_SEGMENT\n\
00996 \n\
00997 uint32 POSE_VISIBLE=1\n\
00998 uint32 POSE_END_OF_SEGMENT=2\n\
00999 \n\
01000 # Each channel should have the same number of elements as pose array, \n\
01001 # and the data in each channel should correspond 1:1 with each pose\n\
01002 # possible channels: \"width\", \"height\", \"rgb\", ...\n\
01003 sensor_msgs/ChannelFloat32[] channels \n\
01004 \n\
01005 \n\
01006 \n\
01007 ================================================================================\n\
01008 MSG: geometry_msgs/Pose\n\
01009 # A representation of pose in free space, composed of postion and orientation. \n\
01010 Point position\n\
01011 Quaternion orientation\n\
01012 \n\
01013 ================================================================================\n\
01014 MSG: geometry_msgs/Point\n\
01015 # This contains the position of a point in free space\n\
01016 float64 x\n\
01017 float64 y\n\
01018 float64 z\n\
01019 \n\
01020 ================================================================================\n\
01021 MSG: geometry_msgs/Quaternion\n\
01022 # This represents an orientation in free space in quaternion form.\n\
01023 \n\
01024 float64 x\n\
01025 float64 y\n\
01026 float64 z\n\
01027 float64 w\n\
01028 \n\
01029 ================================================================================\n\
01030 MSG: sensor_msgs/ChannelFloat32\n\
01031 # This message is used by the PointCloud message to hold optional data\n\
01032 # associated with each point in the cloud. The length of the values\n\
01033 # array should be the same as the length of the points array in the\n\
01034 # PointCloud, and each value should be associated with the corresponding\n\
01035 # point.\n\
01036 \n\
01037 # Channel names in existing practice include:\n\
01038 # \"u\", \"v\" - row and column (respectively) in the left stereo image.\n\
01039 # This is opposite to usual conventions but remains for\n\
01040 # historical reasons. The newer PointCloud2 message has no\n\
01041 # such problem.\n\
01042 # \"rgb\" - For point clouds produced by color stereo cameras. uint8\n\
01043 # (R,G,B) values packed into the least significant 24 bits,\n\
01044 # in order.\n\
01045 # \"intensity\" - laser or pixel intensity.\n\
01046 # \"distance\"\n\
01047 \n\
01048 # The channel name should give semantics of the channel (e.g.\n\
01049 # \"intensity\" instead of \"value\").\n\
01050 string name\n\
01051 \n\
01052 # The values array should be 1-1 with the elements of the associated\n\
01053 # PointCloud.\n\
01054 float32[] values\n\
01055 \n\
01056 ";
01057 }
01058
01059 static const char* value(const ::articulation_msgs::AlignModelSrvResponse_<ContainerAllocator> &) { return value(); }
01060 };
01061
01062 }
01063 }
01064
01065 namespace ros
01066 {
01067 namespace serialization
01068 {
01069
01070 template<class ContainerAllocator> struct Serializer< ::articulation_msgs::AlignModelSrvRequest_<ContainerAllocator> >
01071 {
01072 template<typename Stream, typename T> inline static void allInOne(Stream& stream, T m)
01073 {
01074 stream.next(m.model);
01075 stream.next(m.data);
01076 }
01077
01078 ROS_DECLARE_ALLINONE_SERIALIZER;
01079 };
01080 }
01081 }
01082
01083
01084 namespace ros
01085 {
01086 namespace serialization
01087 {
01088
01089 template<class ContainerAllocator> struct Serializer< ::articulation_msgs::AlignModelSrvResponse_<ContainerAllocator> >
01090 {
01091 template<typename Stream, typename T> inline static void allInOne(Stream& stream, T m)
01092 {
01093 stream.next(m.model_aligned);
01094 stream.next(m.data_aligned);
01095 stream.next(m.R);
01096 stream.next(m.T);
01097 stream.next(m.dist_rot);
01098 stream.next(m.dist_trans);
01099 }
01100
01101 ROS_DECLARE_ALLINONE_SERIALIZER;
01102 };
01103 }
01104 }
01105
01106 namespace ros
01107 {
01108 namespace service_traits
01109 {
01110 template<>
01111 struct MD5Sum<articulation_msgs::AlignModelSrv> {
01112 static const char* value()
01113 {
01114 return "24f8639d9e5b811cbb8a46156e9ac900";
01115 }
01116
01117 static const char* value(const articulation_msgs::AlignModelSrv&) { return value(); }
01118 };
01119
01120 template<>
01121 struct DataType<articulation_msgs::AlignModelSrv> {
01122 static const char* value()
01123 {
01124 return "articulation_msgs/AlignModelSrv";
01125 }
01126
01127 static const char* value(const articulation_msgs::AlignModelSrv&) { return value(); }
01128 };
01129
01130 template<class ContainerAllocator>
01131 struct MD5Sum<articulation_msgs::AlignModelSrvRequest_<ContainerAllocator> > {
01132 static const char* value()
01133 {
01134 return "24f8639d9e5b811cbb8a46156e9ac900";
01135 }
01136
01137 static const char* value(const articulation_msgs::AlignModelSrvRequest_<ContainerAllocator> &) { return value(); }
01138 };
01139
01140 template<class ContainerAllocator>
01141 struct DataType<articulation_msgs::AlignModelSrvRequest_<ContainerAllocator> > {
01142 static const char* value()
01143 {
01144 return "articulation_msgs/AlignModelSrv";
01145 }
01146
01147 static const char* value(const articulation_msgs::AlignModelSrvRequest_<ContainerAllocator> &) { return value(); }
01148 };
01149
01150 template<class ContainerAllocator>
01151 struct MD5Sum<articulation_msgs::AlignModelSrvResponse_<ContainerAllocator> > {
01152 static const char* value()
01153 {
01154 return "24f8639d9e5b811cbb8a46156e9ac900";
01155 }
01156
01157 static const char* value(const articulation_msgs::AlignModelSrvResponse_<ContainerAllocator> &) { return value(); }
01158 };
01159
01160 template<class ContainerAllocator>
01161 struct DataType<articulation_msgs::AlignModelSrvResponse_<ContainerAllocator> > {
01162 static const char* value()
01163 {
01164 return "articulation_msgs/AlignModelSrv";
01165 }
01166
01167 static const char* value(const articulation_msgs::AlignModelSrvResponse_<ContainerAllocator> &) { return value(); }
01168 };
01169
01170 }
01171 }
01172
01173 #endif // ARTICULATION_MSGS_SERVICE_ALIGNMODELSRV_H
01174