$search
00001 /* Auto-generated by genmsg_cpp for file /home/rosbuild/hudson/workspace/doc-electric-common_msgs/doc_stacks/2013-03-01_14-58-52.505545/common_msgs/sensor_msgs/srv/SetCameraInfo.srv */ 00002 #ifndef SENSOR_MSGS_SERVICE_SETCAMERAINFO_H 00003 #define SENSOR_MSGS_SERVICE_SETCAMERAINFO_H 00004 #include <string> 00005 #include <vector> 00006 #include <map> 00007 #include <ostream> 00008 #include "ros/serialization.h" 00009 #include "ros/builtin_message_traits.h" 00010 #include "ros/message_operations.h" 00011 #include "ros/time.h" 00012 00013 #include "ros/macros.h" 00014 00015 #include "ros/assert.h" 00016 00017 #include "ros/service_traits.h" 00018 00019 #include "sensor_msgs/CameraInfo.h" 00020 00021 00022 00023 namespace sensor_msgs 00024 { 00025 template <class ContainerAllocator> 00026 struct SetCameraInfoRequest_ { 00027 typedef SetCameraInfoRequest_<ContainerAllocator> Type; 00028 00029 SetCameraInfoRequest_() 00030 : camera_info() 00031 { 00032 } 00033 00034 SetCameraInfoRequest_(const ContainerAllocator& _alloc) 00035 : camera_info(_alloc) 00036 { 00037 } 00038 00039 typedef ::sensor_msgs::CameraInfo_<ContainerAllocator> _camera_info_type; 00040 ::sensor_msgs::CameraInfo_<ContainerAllocator> camera_info; 00041 00042 00043 private: 00044 static const char* __s_getDataType_() { return "sensor_msgs/SetCameraInfoRequest"; } 00045 public: 00046 ROS_DEPRECATED static const std::string __s_getDataType() { return __s_getDataType_(); } 00047 00048 ROS_DEPRECATED const std::string __getDataType() const { return __s_getDataType_(); } 00049 00050 private: 00051 static const char* __s_getMD5Sum_() { return "ee34be01fdeee563d0d99cd594d5581d"; } 00052 public: 00053 ROS_DEPRECATED static const std::string __s_getMD5Sum() { return __s_getMD5Sum_(); } 00054 00055 ROS_DEPRECATED const std::string __getMD5Sum() const { return __s_getMD5Sum_(); } 00056 00057 private: 00058 static const char* __s_getServerMD5Sum_() { return "bef1df590ed75ed1f393692395e15482"; } 00059 public: 00060 ROS_DEPRECATED static const std::string __s_getServerMD5Sum() { return __s_getServerMD5Sum_(); } 00061 00062 ROS_DEPRECATED const std::string __getServerMD5Sum() const { return __s_getServerMD5Sum_(); } 00063 00064 private: 00065 static const char* __s_getMessageDefinition_() { return "\n\ 00066 \n\ 00067 \n\ 00068 \n\ 00069 \n\ 00070 \n\ 00071 \n\ 00072 \n\ 00073 sensor_msgs/CameraInfo camera_info\n\ 00074 \n\ 00075 ================================================================================\n\ 00076 MSG: sensor_msgs/CameraInfo\n\ 00077 # This message defines meta information for a camera. It should be in a\n\ 00078 # camera namespace on topic \"camera_info\" and accompanied by up to five\n\ 00079 # image topics named:\n\ 00080 #\n\ 00081 # image_raw - raw data from the camera driver, possibly Bayer encoded\n\ 00082 # image - monochrome, distorted\n\ 00083 # image_color - color, distorted\n\ 00084 # image_rect - monochrome, rectified\n\ 00085 # image_rect_color - color, rectified\n\ 00086 #\n\ 00087 # The image_pipeline contains packages (image_proc, stereo_image_proc)\n\ 00088 # for producing the four processed image topics from image_raw and\n\ 00089 # camera_info. The meaning of the camera parameters are described in\n\ 00090 # detail at http://www.ros.org/wiki/image_pipeline/CameraInfo.\n\ 00091 #\n\ 00092 # The image_geometry package provides a user-friendly interface to\n\ 00093 # common operations using this meta information. If you want to, e.g.,\n\ 00094 # project a 3d point into image coordinates, we strongly recommend\n\ 00095 # using image_geometry.\n\ 00096 #\n\ 00097 # If the camera is uncalibrated, the matrices D, K, R, P should be left\n\ 00098 # zeroed out. In particular, clients may assume that K[0] == 0.0\n\ 00099 # indicates an uncalibrated camera.\n\ 00100 \n\ 00101 #######################################################################\n\ 00102 # Image acquisition info #\n\ 00103 #######################################################################\n\ 00104 \n\ 00105 # Time of image acquisition, camera coordinate frame ID\n\ 00106 Header header # Header timestamp should be acquisition time of image\n\ 00107 # Header frame_id should be optical frame of camera\n\ 00108 # origin of frame should be optical center of camera\n\ 00109 # +x should point to the right in the image\n\ 00110 # +y should point down in the image\n\ 00111 # +z should point into the plane of the image\n\ 00112 \n\ 00113 \n\ 00114 #######################################################################\n\ 00115 # Calibration Parameters #\n\ 00116 #######################################################################\n\ 00117 # These are fixed during camera calibration. Their values will be the #\n\ 00118 # same in all messages until the camera is recalibrated. Note that #\n\ 00119 # self-calibrating systems may \"recalibrate\" frequently. #\n\ 00120 # #\n\ 00121 # The internal parameters can be used to warp a raw (distorted) image #\n\ 00122 # to: #\n\ 00123 # 1. An undistorted image (requires D and K) #\n\ 00124 # 2. A rectified image (requires D, K, R) #\n\ 00125 # The projection matrix P projects 3D points into the rectified image.#\n\ 00126 #######################################################################\n\ 00127 \n\ 00128 # The image dimensions with which the camera was calibrated. Normally\n\ 00129 # this will be the full camera resolution in pixels.\n\ 00130 uint32 height\n\ 00131 uint32 width\n\ 00132 \n\ 00133 # The distortion model used. Supported models are listed in\n\ 00134 # sensor_msgs/distortion_models.h. For most cameras, \"plumb_bob\" - a\n\ 00135 # simple model of radial and tangential distortion - is sufficent.\n\ 00136 string distortion_model\n\ 00137 \n\ 00138 # The distortion parameters, size depending on the distortion model.\n\ 00139 # For \"plumb_bob\", the 5 parameters are: (k1, k2, t1, t2, k3).\n\ 00140 float64[] D\n\ 00141 \n\ 00142 # Intrinsic camera matrix for the raw (distorted) images.\n\ 00143 # [fx 0 cx]\n\ 00144 # K = [ 0 fy cy]\n\ 00145 # [ 0 0 1]\n\ 00146 # Projects 3D points in the camera coordinate frame to 2D pixel\n\ 00147 # coordinates using the focal lengths (fx, fy) and principal point\n\ 00148 # (cx, cy).\n\ 00149 float64[9] K # 3x3 row-major matrix\n\ 00150 \n\ 00151 # Rectification matrix (stereo cameras only)\n\ 00152 # A rotation matrix aligning the camera coordinate system to the ideal\n\ 00153 # stereo image plane so that epipolar lines in both stereo images are\n\ 00154 # parallel.\n\ 00155 float64[9] R # 3x3 row-major matrix\n\ 00156 \n\ 00157 # Projection/camera matrix\n\ 00158 # [fx' 0 cx' Tx]\n\ 00159 # P = [ 0 fy' cy' Ty]\n\ 00160 # [ 0 0 1 0]\n\ 00161 # By convention, this matrix specifies the intrinsic (camera) matrix\n\ 00162 # of the processed (rectified) image. That is, the left 3x3 portion\n\ 00163 # is the normal camera intrinsic matrix for the rectified image.\n\ 00164 # It projects 3D points in the camera coordinate frame to 2D pixel\n\ 00165 # coordinates using the focal lengths (fx', fy') and principal point\n\ 00166 # (cx', cy') - these may differ from the values in K.\n\ 00167 # For monocular cameras, Tx = Ty = 0. Normally, monocular cameras will\n\ 00168 # also have R = the identity and P[1:3,1:3] = K.\n\ 00169 # For a stereo pair, the fourth column [Tx Ty 0]' is related to the\n\ 00170 # position of the optical center of the second camera in the first\n\ 00171 # camera's frame. We assume Tz = 0 so both cameras are in the same\n\ 00172 # stereo image plane. The first camera always has Tx = Ty = 0. For\n\ 00173 # the right (second) camera of a horizontal stereo pair, Ty = 0 and\n\ 00174 # Tx = -fx' * B, where B is the baseline between the cameras.\n\ 00175 # Given a 3D point [X Y Z]', the projection (x, y) of the point onto\n\ 00176 # the rectified image is given by:\n\ 00177 # [u v w]' = P * [X Y Z 1]'\n\ 00178 # x = u / w\n\ 00179 # y = v / w\n\ 00180 # This holds for both images of a stereo pair.\n\ 00181 float64[12] P # 3x4 row-major matrix\n\ 00182 \n\ 00183 \n\ 00184 #######################################################################\n\ 00185 # Operational Parameters #\n\ 00186 #######################################################################\n\ 00187 # These define the image region actually captured by the camera #\n\ 00188 # driver. Although they affect the geometry of the output image, they #\n\ 00189 # may be changed freely without recalibrating the camera. #\n\ 00190 #######################################################################\n\ 00191 \n\ 00192 # Binning refers here to any camera setting which combines rectangular\n\ 00193 # neighborhoods of pixels into larger \"super-pixels.\" It reduces the\n\ 00194 # resolution of the output image to\n\ 00195 # (width / binning_x) x (height / binning_y).\n\ 00196 # The default values binning_x = binning_y = 0 is considered the same\n\ 00197 # as binning_x = binning_y = 1 (no subsampling).\n\ 00198 uint32 binning_x\n\ 00199 uint32 binning_y\n\ 00200 \n\ 00201 # Region of interest (subwindow of full camera resolution), given in\n\ 00202 # full resolution (unbinned) image coordinates. A particular ROI\n\ 00203 # always denotes the same window of pixels on the camera sensor,\n\ 00204 # regardless of binning settings.\n\ 00205 # The default setting of roi (all values 0) is considered the same as\n\ 00206 # full resolution (roi.width = width, roi.height = height).\n\ 00207 RegionOfInterest roi\n\ 00208 \n\ 00209 ================================================================================\n\ 00210 MSG: std_msgs/Header\n\ 00211 # Standard metadata for higher-level stamped data types.\n\ 00212 # This is generally used to communicate timestamped data \n\ 00213 # in a particular coordinate frame.\n\ 00214 # \n\ 00215 # sequence ID: consecutively increasing ID \n\ 00216 uint32 seq\n\ 00217 #Two-integer timestamp that is expressed as:\n\ 00218 # * stamp.secs: seconds (stamp_secs) since epoch\n\ 00219 # * stamp.nsecs: nanoseconds since stamp_secs\n\ 00220 # time-handling sugar is provided by the client library\n\ 00221 time stamp\n\ 00222 #Frame this data is associated with\n\ 00223 # 0: no frame\n\ 00224 # 1: global frame\n\ 00225 string frame_id\n\ 00226 \n\ 00227 ================================================================================\n\ 00228 MSG: sensor_msgs/RegionOfInterest\n\ 00229 # This message is used to specify a region of interest within an image.\n\ 00230 #\n\ 00231 # When used to specify the ROI setting of the camera when the image was\n\ 00232 # taken, the height and width fields should either match the height and\n\ 00233 # width fields for the associated image; or height = width = 0\n\ 00234 # indicates that the full resolution image was captured.\n\ 00235 \n\ 00236 uint32 x_offset # Leftmost pixel of the ROI\n\ 00237 # (0 if the ROI includes the left edge of the image)\n\ 00238 uint32 y_offset # Topmost pixel of the ROI\n\ 00239 # (0 if the ROI includes the top edge of the image)\n\ 00240 uint32 height # Height of ROI\n\ 00241 uint32 width # Width of ROI\n\ 00242 \n\ 00243 # True if a distinct rectified ROI should be calculated from the \"raw\"\n\ 00244 # ROI in this message. Typically this should be False if the full image\n\ 00245 # is captured (ROI not used), and True if a subwindow is captured (ROI\n\ 00246 # used).\n\ 00247 bool do_rectify\n\ 00248 \n\ 00249 "; } 00250 public: 00251 ROS_DEPRECATED static const std::string __s_getMessageDefinition() { return __s_getMessageDefinition_(); } 00252 00253 ROS_DEPRECATED const std::string __getMessageDefinition() const { return __s_getMessageDefinition_(); } 00254 00255 ROS_DEPRECATED virtual uint8_t *serialize(uint8_t *write_ptr, uint32_t seq) const 00256 { 00257 ros::serialization::OStream stream(write_ptr, 1000000000); 00258 ros::serialization::serialize(stream, camera_info); 00259 return stream.getData(); 00260 } 00261 00262 ROS_DEPRECATED virtual uint8_t *deserialize(uint8_t *read_ptr) 00263 { 00264 ros::serialization::IStream stream(read_ptr, 1000000000); 00265 ros::serialization::deserialize(stream, camera_info); 00266 return stream.getData(); 00267 } 00268 00269 ROS_DEPRECATED virtual uint32_t serializationLength() const 00270 { 00271 uint32_t size = 0; 00272 size += ros::serialization::serializationLength(camera_info); 00273 return size; 00274 } 00275 00276 typedef boost::shared_ptr< ::sensor_msgs::SetCameraInfoRequest_<ContainerAllocator> > Ptr; 00277 typedef boost::shared_ptr< ::sensor_msgs::SetCameraInfoRequest_<ContainerAllocator> const> ConstPtr; 00278 boost::shared_ptr<std::map<std::string, std::string> > __connection_header; 00279 }; // struct SetCameraInfoRequest 00280 typedef ::sensor_msgs::SetCameraInfoRequest_<std::allocator<void> > SetCameraInfoRequest; 00281 00282 typedef boost::shared_ptr< ::sensor_msgs::SetCameraInfoRequest> SetCameraInfoRequestPtr; 00283 typedef boost::shared_ptr< ::sensor_msgs::SetCameraInfoRequest const> SetCameraInfoRequestConstPtr; 00284 00285 00286 template <class ContainerAllocator> 00287 struct SetCameraInfoResponse_ { 00288 typedef SetCameraInfoResponse_<ContainerAllocator> Type; 00289 00290 SetCameraInfoResponse_() 00291 : success(false) 00292 , status_message() 00293 { 00294 } 00295 00296 SetCameraInfoResponse_(const ContainerAllocator& _alloc) 00297 : success(false) 00298 , status_message(_alloc) 00299 { 00300 } 00301 00302 typedef uint8_t _success_type; 00303 uint8_t success; 00304 00305 typedef std::basic_string<char, std::char_traits<char>, typename ContainerAllocator::template rebind<char>::other > _status_message_type; 00306 std::basic_string<char, std::char_traits<char>, typename ContainerAllocator::template rebind<char>::other > status_message; 00307 00308 00309 private: 00310 static const char* __s_getDataType_() { return "sensor_msgs/SetCameraInfoResponse"; } 00311 public: 00312 ROS_DEPRECATED static const std::string __s_getDataType() { return __s_getDataType_(); } 00313 00314 ROS_DEPRECATED const std::string __getDataType() const { return __s_getDataType_(); } 00315 00316 private: 00317 static const char* __s_getMD5Sum_() { return "2ec6f3eff0161f4257b808b12bc830c2"; } 00318 public: 00319 ROS_DEPRECATED static const std::string __s_getMD5Sum() { return __s_getMD5Sum_(); } 00320 00321 ROS_DEPRECATED const std::string __getMD5Sum() const { return __s_getMD5Sum_(); } 00322 00323 private: 00324 static const char* __s_getServerMD5Sum_() { return "bef1df590ed75ed1f393692395e15482"; } 00325 public: 00326 ROS_DEPRECATED static const std::string __s_getServerMD5Sum() { return __s_getServerMD5Sum_(); } 00327 00328 ROS_DEPRECATED const std::string __getServerMD5Sum() const { return __s_getServerMD5Sum_(); } 00329 00330 private: 00331 static const char* __s_getMessageDefinition_() { return "bool success\n\ 00332 string status_message\n\ 00333 \n\ 00334 \n\ 00335 "; } 00336 public: 00337 ROS_DEPRECATED static const std::string __s_getMessageDefinition() { return __s_getMessageDefinition_(); } 00338 00339 ROS_DEPRECATED const std::string __getMessageDefinition() const { return __s_getMessageDefinition_(); } 00340 00341 ROS_DEPRECATED virtual uint8_t *serialize(uint8_t *write_ptr, uint32_t seq) const 00342 { 00343 ros::serialization::OStream stream(write_ptr, 1000000000); 00344 ros::serialization::serialize(stream, success); 00345 ros::serialization::serialize(stream, status_message); 00346 return stream.getData(); 00347 } 00348 00349 ROS_DEPRECATED virtual uint8_t *deserialize(uint8_t *read_ptr) 00350 { 00351 ros::serialization::IStream stream(read_ptr, 1000000000); 00352 ros::serialization::deserialize(stream, success); 00353 ros::serialization::deserialize(stream, status_message); 00354 return stream.getData(); 00355 } 00356 00357 ROS_DEPRECATED virtual uint32_t serializationLength() const 00358 { 00359 uint32_t size = 0; 00360 size += ros::serialization::serializationLength(success); 00361 size += ros::serialization::serializationLength(status_message); 00362 return size; 00363 } 00364 00365 typedef boost::shared_ptr< ::sensor_msgs::SetCameraInfoResponse_<ContainerAllocator> > Ptr; 00366 typedef boost::shared_ptr< ::sensor_msgs::SetCameraInfoResponse_<ContainerAllocator> const> ConstPtr; 00367 boost::shared_ptr<std::map<std::string, std::string> > __connection_header; 00368 }; // struct SetCameraInfoResponse 00369 typedef ::sensor_msgs::SetCameraInfoResponse_<std::allocator<void> > SetCameraInfoResponse; 00370 00371 typedef boost::shared_ptr< ::sensor_msgs::SetCameraInfoResponse> SetCameraInfoResponsePtr; 00372 typedef boost::shared_ptr< ::sensor_msgs::SetCameraInfoResponse const> SetCameraInfoResponseConstPtr; 00373 00374 struct SetCameraInfo 00375 { 00376 00377 typedef SetCameraInfoRequest Request; 00378 typedef SetCameraInfoResponse Response; 00379 Request request; 00380 Response response; 00381 00382 typedef Request RequestType; 00383 typedef Response ResponseType; 00384 }; // struct SetCameraInfo 00385 } // namespace sensor_msgs 00386 00387 namespace ros 00388 { 00389 namespace message_traits 00390 { 00391 template<class ContainerAllocator> struct IsMessage< ::sensor_msgs::SetCameraInfoRequest_<ContainerAllocator> > : public TrueType {}; 00392 template<class ContainerAllocator> struct IsMessage< ::sensor_msgs::SetCameraInfoRequest_<ContainerAllocator> const> : public TrueType {}; 00393 template<class ContainerAllocator> 00394 struct MD5Sum< ::sensor_msgs::SetCameraInfoRequest_<ContainerAllocator> > { 00395 static const char* value() 00396 { 00397 return "ee34be01fdeee563d0d99cd594d5581d"; 00398 } 00399 00400 static const char* value(const ::sensor_msgs::SetCameraInfoRequest_<ContainerAllocator> &) { return value(); } 00401 static const uint64_t static_value1 = 0xee34be01fdeee563ULL; 00402 static const uint64_t static_value2 = 0xd0d99cd594d5581dULL; 00403 }; 00404 00405 template<class ContainerAllocator> 00406 struct DataType< ::sensor_msgs::SetCameraInfoRequest_<ContainerAllocator> > { 00407 static const char* value() 00408 { 00409 return "sensor_msgs/SetCameraInfoRequest"; 00410 } 00411 00412 static const char* value(const ::sensor_msgs::SetCameraInfoRequest_<ContainerAllocator> &) { return value(); } 00413 }; 00414 00415 template<class ContainerAllocator> 00416 struct Definition< ::sensor_msgs::SetCameraInfoRequest_<ContainerAllocator> > { 00417 static const char* value() 00418 { 00419 return "\n\ 00420 \n\ 00421 \n\ 00422 \n\ 00423 \n\ 00424 \n\ 00425 \n\ 00426 \n\ 00427 sensor_msgs/CameraInfo camera_info\n\ 00428 \n\ 00429 ================================================================================\n\ 00430 MSG: sensor_msgs/CameraInfo\n\ 00431 # This message defines meta information for a camera. It should be in a\n\ 00432 # camera namespace on topic \"camera_info\" and accompanied by up to five\n\ 00433 # image topics named:\n\ 00434 #\n\ 00435 # image_raw - raw data from the camera driver, possibly Bayer encoded\n\ 00436 # image - monochrome, distorted\n\ 00437 # image_color - color, distorted\n\ 00438 # image_rect - monochrome, rectified\n\ 00439 # image_rect_color - color, rectified\n\ 00440 #\n\ 00441 # The image_pipeline contains packages (image_proc, stereo_image_proc)\n\ 00442 # for producing the four processed image topics from image_raw and\n\ 00443 # camera_info. The meaning of the camera parameters are described in\n\ 00444 # detail at http://www.ros.org/wiki/image_pipeline/CameraInfo.\n\ 00445 #\n\ 00446 # The image_geometry package provides a user-friendly interface to\n\ 00447 # common operations using this meta information. If you want to, e.g.,\n\ 00448 # project a 3d point into image coordinates, we strongly recommend\n\ 00449 # using image_geometry.\n\ 00450 #\n\ 00451 # If the camera is uncalibrated, the matrices D, K, R, P should be left\n\ 00452 # zeroed out. In particular, clients may assume that K[0] == 0.0\n\ 00453 # indicates an uncalibrated camera.\n\ 00454 \n\ 00455 #######################################################################\n\ 00456 # Image acquisition info #\n\ 00457 #######################################################################\n\ 00458 \n\ 00459 # Time of image acquisition, camera coordinate frame ID\n\ 00460 Header header # Header timestamp should be acquisition time of image\n\ 00461 # Header frame_id should be optical frame of camera\n\ 00462 # origin of frame should be optical center of camera\n\ 00463 # +x should point to the right in the image\n\ 00464 # +y should point down in the image\n\ 00465 # +z should point into the plane of the image\n\ 00466 \n\ 00467 \n\ 00468 #######################################################################\n\ 00469 # Calibration Parameters #\n\ 00470 #######################################################################\n\ 00471 # These are fixed during camera calibration. Their values will be the #\n\ 00472 # same in all messages until the camera is recalibrated. Note that #\n\ 00473 # self-calibrating systems may \"recalibrate\" frequently. #\n\ 00474 # #\n\ 00475 # The internal parameters can be used to warp a raw (distorted) image #\n\ 00476 # to: #\n\ 00477 # 1. An undistorted image (requires D and K) #\n\ 00478 # 2. A rectified image (requires D, K, R) #\n\ 00479 # The projection matrix P projects 3D points into the rectified image.#\n\ 00480 #######################################################################\n\ 00481 \n\ 00482 # The image dimensions with which the camera was calibrated. Normally\n\ 00483 # this will be the full camera resolution in pixels.\n\ 00484 uint32 height\n\ 00485 uint32 width\n\ 00486 \n\ 00487 # The distortion model used. Supported models are listed in\n\ 00488 # sensor_msgs/distortion_models.h. For most cameras, \"plumb_bob\" - a\n\ 00489 # simple model of radial and tangential distortion - is sufficent.\n\ 00490 string distortion_model\n\ 00491 \n\ 00492 # The distortion parameters, size depending on the distortion model.\n\ 00493 # For \"plumb_bob\", the 5 parameters are: (k1, k2, t1, t2, k3).\n\ 00494 float64[] D\n\ 00495 \n\ 00496 # Intrinsic camera matrix for the raw (distorted) images.\n\ 00497 # [fx 0 cx]\n\ 00498 # K = [ 0 fy cy]\n\ 00499 # [ 0 0 1]\n\ 00500 # Projects 3D points in the camera coordinate frame to 2D pixel\n\ 00501 # coordinates using the focal lengths (fx, fy) and principal point\n\ 00502 # (cx, cy).\n\ 00503 float64[9] K # 3x3 row-major matrix\n\ 00504 \n\ 00505 # Rectification matrix (stereo cameras only)\n\ 00506 # A rotation matrix aligning the camera coordinate system to the ideal\n\ 00507 # stereo image plane so that epipolar lines in both stereo images are\n\ 00508 # parallel.\n\ 00509 float64[9] R # 3x3 row-major matrix\n\ 00510 \n\ 00511 # Projection/camera matrix\n\ 00512 # [fx' 0 cx' Tx]\n\ 00513 # P = [ 0 fy' cy' Ty]\n\ 00514 # [ 0 0 1 0]\n\ 00515 # By convention, this matrix specifies the intrinsic (camera) matrix\n\ 00516 # of the processed (rectified) image. That is, the left 3x3 portion\n\ 00517 # is the normal camera intrinsic matrix for the rectified image.\n\ 00518 # It projects 3D points in the camera coordinate frame to 2D pixel\n\ 00519 # coordinates using the focal lengths (fx', fy') and principal point\n\ 00520 # (cx', cy') - these may differ from the values in K.\n\ 00521 # For monocular cameras, Tx = Ty = 0. Normally, monocular cameras will\n\ 00522 # also have R = the identity and P[1:3,1:3] = K.\n\ 00523 # For a stereo pair, the fourth column [Tx Ty 0]' is related to the\n\ 00524 # position of the optical center of the second camera in the first\n\ 00525 # camera's frame. We assume Tz = 0 so both cameras are in the same\n\ 00526 # stereo image plane. The first camera always has Tx = Ty = 0. For\n\ 00527 # the right (second) camera of a horizontal stereo pair, Ty = 0 and\n\ 00528 # Tx = -fx' * B, where B is the baseline between the cameras.\n\ 00529 # Given a 3D point [X Y Z]', the projection (x, y) of the point onto\n\ 00530 # the rectified image is given by:\n\ 00531 # [u v w]' = P * [X Y Z 1]'\n\ 00532 # x = u / w\n\ 00533 # y = v / w\n\ 00534 # This holds for both images of a stereo pair.\n\ 00535 float64[12] P # 3x4 row-major matrix\n\ 00536 \n\ 00537 \n\ 00538 #######################################################################\n\ 00539 # Operational Parameters #\n\ 00540 #######################################################################\n\ 00541 # These define the image region actually captured by the camera #\n\ 00542 # driver. Although they affect the geometry of the output image, they #\n\ 00543 # may be changed freely without recalibrating the camera. #\n\ 00544 #######################################################################\n\ 00545 \n\ 00546 # Binning refers here to any camera setting which combines rectangular\n\ 00547 # neighborhoods of pixels into larger \"super-pixels.\" It reduces the\n\ 00548 # resolution of the output image to\n\ 00549 # (width / binning_x) x (height / binning_y).\n\ 00550 # The default values binning_x = binning_y = 0 is considered the same\n\ 00551 # as binning_x = binning_y = 1 (no subsampling).\n\ 00552 uint32 binning_x\n\ 00553 uint32 binning_y\n\ 00554 \n\ 00555 # Region of interest (subwindow of full camera resolution), given in\n\ 00556 # full resolution (unbinned) image coordinates. A particular ROI\n\ 00557 # always denotes the same window of pixels on the camera sensor,\n\ 00558 # regardless of binning settings.\n\ 00559 # The default setting of roi (all values 0) is considered the same as\n\ 00560 # full resolution (roi.width = width, roi.height = height).\n\ 00561 RegionOfInterest roi\n\ 00562 \n\ 00563 ================================================================================\n\ 00564 MSG: std_msgs/Header\n\ 00565 # Standard metadata for higher-level stamped data types.\n\ 00566 # This is generally used to communicate timestamped data \n\ 00567 # in a particular coordinate frame.\n\ 00568 # \n\ 00569 # sequence ID: consecutively increasing ID \n\ 00570 uint32 seq\n\ 00571 #Two-integer timestamp that is expressed as:\n\ 00572 # * stamp.secs: seconds (stamp_secs) since epoch\n\ 00573 # * stamp.nsecs: nanoseconds since stamp_secs\n\ 00574 # time-handling sugar is provided by the client library\n\ 00575 time stamp\n\ 00576 #Frame this data is associated with\n\ 00577 # 0: no frame\n\ 00578 # 1: global frame\n\ 00579 string frame_id\n\ 00580 \n\ 00581 ================================================================================\n\ 00582 MSG: sensor_msgs/RegionOfInterest\n\ 00583 # This message is used to specify a region of interest within an image.\n\ 00584 #\n\ 00585 # When used to specify the ROI setting of the camera when the image was\n\ 00586 # taken, the height and width fields should either match the height and\n\ 00587 # width fields for the associated image; or height = width = 0\n\ 00588 # indicates that the full resolution image was captured.\n\ 00589 \n\ 00590 uint32 x_offset # Leftmost pixel of the ROI\n\ 00591 # (0 if the ROI includes the left edge of the image)\n\ 00592 uint32 y_offset # Topmost pixel of the ROI\n\ 00593 # (0 if the ROI includes the top edge of the image)\n\ 00594 uint32 height # Height of ROI\n\ 00595 uint32 width # Width of ROI\n\ 00596 \n\ 00597 # True if a distinct rectified ROI should be calculated from the \"raw\"\n\ 00598 # ROI in this message. Typically this should be False if the full image\n\ 00599 # is captured (ROI not used), and True if a subwindow is captured (ROI\n\ 00600 # used).\n\ 00601 bool do_rectify\n\ 00602 \n\ 00603 "; 00604 } 00605 00606 static const char* value(const ::sensor_msgs::SetCameraInfoRequest_<ContainerAllocator> &) { return value(); } 00607 }; 00608 00609 } // namespace message_traits 00610 } // namespace ros 00611 00612 00613 namespace ros 00614 { 00615 namespace message_traits 00616 { 00617 template<class ContainerAllocator> struct IsMessage< ::sensor_msgs::SetCameraInfoResponse_<ContainerAllocator> > : public TrueType {}; 00618 template<class ContainerAllocator> struct IsMessage< ::sensor_msgs::SetCameraInfoResponse_<ContainerAllocator> const> : public TrueType {}; 00619 template<class ContainerAllocator> 00620 struct MD5Sum< ::sensor_msgs::SetCameraInfoResponse_<ContainerAllocator> > { 00621 static const char* value() 00622 { 00623 return "2ec6f3eff0161f4257b808b12bc830c2"; 00624 } 00625 00626 static const char* value(const ::sensor_msgs::SetCameraInfoResponse_<ContainerAllocator> &) { return value(); } 00627 static const uint64_t static_value1 = 0x2ec6f3eff0161f42ULL; 00628 static const uint64_t static_value2 = 0x57b808b12bc830c2ULL; 00629 }; 00630 00631 template<class ContainerAllocator> 00632 struct DataType< ::sensor_msgs::SetCameraInfoResponse_<ContainerAllocator> > { 00633 static const char* value() 00634 { 00635 return "sensor_msgs/SetCameraInfoResponse"; 00636 } 00637 00638 static const char* value(const ::sensor_msgs::SetCameraInfoResponse_<ContainerAllocator> &) { return value(); } 00639 }; 00640 00641 template<class ContainerAllocator> 00642 struct Definition< ::sensor_msgs::SetCameraInfoResponse_<ContainerAllocator> > { 00643 static const char* value() 00644 { 00645 return "bool success\n\ 00646 string status_message\n\ 00647 \n\ 00648 \n\ 00649 "; 00650 } 00651 00652 static const char* value(const ::sensor_msgs::SetCameraInfoResponse_<ContainerAllocator> &) { return value(); } 00653 }; 00654 00655 } // namespace message_traits 00656 } // namespace ros 00657 00658 namespace ros 00659 { 00660 namespace serialization 00661 { 00662 00663 template<class ContainerAllocator> struct Serializer< ::sensor_msgs::SetCameraInfoRequest_<ContainerAllocator> > 00664 { 00665 template<typename Stream, typename T> inline static void allInOne(Stream& stream, T m) 00666 { 00667 stream.next(m.camera_info); 00668 } 00669 00670 ROS_DECLARE_ALLINONE_SERIALIZER; 00671 }; // struct SetCameraInfoRequest_ 00672 } // namespace serialization 00673 } // namespace ros 00674 00675 00676 namespace ros 00677 { 00678 namespace serialization 00679 { 00680 00681 template<class ContainerAllocator> struct Serializer< ::sensor_msgs::SetCameraInfoResponse_<ContainerAllocator> > 00682 { 00683 template<typename Stream, typename T> inline static void allInOne(Stream& stream, T m) 00684 { 00685 stream.next(m.success); 00686 stream.next(m.status_message); 00687 } 00688 00689 ROS_DECLARE_ALLINONE_SERIALIZER; 00690 }; // struct SetCameraInfoResponse_ 00691 } // namespace serialization 00692 } // namespace ros 00693 00694 namespace ros 00695 { 00696 namespace service_traits 00697 { 00698 template<> 00699 struct MD5Sum<sensor_msgs::SetCameraInfo> { 00700 static const char* value() 00701 { 00702 return "bef1df590ed75ed1f393692395e15482"; 00703 } 00704 00705 static const char* value(const sensor_msgs::SetCameraInfo&) { return value(); } 00706 }; 00707 00708 template<> 00709 struct DataType<sensor_msgs::SetCameraInfo> { 00710 static const char* value() 00711 { 00712 return "sensor_msgs/SetCameraInfo"; 00713 } 00714 00715 static const char* value(const sensor_msgs::SetCameraInfo&) { return value(); } 00716 }; 00717 00718 template<class ContainerAllocator> 00719 struct MD5Sum<sensor_msgs::SetCameraInfoRequest_<ContainerAllocator> > { 00720 static const char* value() 00721 { 00722 return "bef1df590ed75ed1f393692395e15482"; 00723 } 00724 00725 static const char* value(const sensor_msgs::SetCameraInfoRequest_<ContainerAllocator> &) { return value(); } 00726 }; 00727 00728 template<class ContainerAllocator> 00729 struct DataType<sensor_msgs::SetCameraInfoRequest_<ContainerAllocator> > { 00730 static const char* value() 00731 { 00732 return "sensor_msgs/SetCameraInfo"; 00733 } 00734 00735 static const char* value(const sensor_msgs::SetCameraInfoRequest_<ContainerAllocator> &) { return value(); } 00736 }; 00737 00738 template<class ContainerAllocator> 00739 struct MD5Sum<sensor_msgs::SetCameraInfoResponse_<ContainerAllocator> > { 00740 static const char* value() 00741 { 00742 return "bef1df590ed75ed1f393692395e15482"; 00743 } 00744 00745 static const char* value(const sensor_msgs::SetCameraInfoResponse_<ContainerAllocator> &) { return value(); } 00746 }; 00747 00748 template<class ContainerAllocator> 00749 struct DataType<sensor_msgs::SetCameraInfoResponse_<ContainerAllocator> > { 00750 static const char* value() 00751 { 00752 return "sensor_msgs/SetCameraInfo"; 00753 } 00754 00755 static const char* value(const sensor_msgs::SetCameraInfoResponse_<ContainerAllocator> &) { return value(); } 00756 }; 00757 00758 } // namespace service_traits 00759 } // namespace ros 00760 00761 #endif // SENSOR_MSGS_SERVICE_SETCAMERAINFO_H 00762