$search
00001 /****************************************************************************** 00002 * \file 00003 * 00004 * $Id:$ 00005 * 00006 * Copyright (C) Brno University of Technology 00007 * 00008 * This file is part of software developed by dcgm-robotics@FIT group. 00009 * 00010 * Author: Vladimir Blahoz (xblaho02@stud.fit.vutbr.cz) 00011 * Supervised by: Michal Spanel (spanel@fit.vutbr.cz) 00012 * Date: dd/mm/2012 00013 * 00014 * This file is free software: you can redistribute it and/or modify 00015 * it under the terms of the GNU Lesser General Public License as published by 00016 * the Free Software Foundation, either version 3 of the License, or 00017 * (at your option) any later version. 00018 * 00019 * This file is distributed in the hope that it will be useful, 00020 * but WITHOUT ANY WARRANTY; without even the implied warranty of 00021 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 00022 * GNU Lesser General Public License for more details. 00023 * 00024 * You should have received a copy of the GNU Lesser General Public License 00025 * along with this file. If not, see <http://www.gnu.org/licenses/>. 00026 */ 00027 00028 #include <ros/ros.h> 00029 00030 #include <OgreVector3.h> 00031 00032 #include <srs_ui_but/topics_list.h> 00033 #include <srs_ui_but/services_list.h> 00034 #include <srs_ui_but/ButCamMsg.h> 00035 00036 #include <visualization_msgs/Marker.h> 00037 #include <sensor_msgs/Image.h> 00038 #include <sensor_msgs/PointCloud2.h> 00039 #include <sensor_msgs/CameraInfo.h> 00040 #include <sensor_msgs/image_encodings.h> 00041 00042 #include <message_filters/subscriber.h> 00043 #include <message_filters/synchronizer.h> 00044 #include <message_filters/sync_policies/approximate_time.h> 00045 00046 #include <pcl_ros/point_cloud.h> 00047 #include <pcl_ros/transforms.h> 00048 00049 #include <tf/transform_listener.h> 00050 #include <tf/message_filter.h> 00051 00052 #include <stdio.h> 00053 #include <utility> 00054 #include <sstream> 00055 #include <float.h> 00056 00057 // maximal depth of view frustum lines 00058 #define MAX_FRUSTUM_DEPTH 15.0f 00059 00060 // maximal distance of camera display rendering 00061 #define MAX_DISPLAY_DEPTH 15.0f 00062 00063 using namespace std; 00064 using namespace sensor_msgs; 00065 00066 // function prototypes 00067 void countCameraParams(const CameraInfoConstPtr& camInfo); 00068 pair<float, float> countPclDepths(const PointCloud2ConstPtr& pcl); 00069 void publishViewFrustumMarker(const CameraInfoConstPtr cam_info, float frustum_depth); 00070 void publishButDisplay(const CameraInfoConstPtr cam_info, float display_depth); 00071 void updateCameraTopic(ros::NodeHandle& nh); 00072 00073 // Camera parameters 00074 float elev_d = 0.0f, steer_r = 0.0f, elev_u = 0.0f, steer_l = 0.0f; 00075 00076 // TF transformation listener for camera info message 00077 tf::TransformListener *tf_cam_info_Listener; 00078 00079 // ROS messages subscribers 00080 message_filters::Subscriber<CameraInfo> *cam_info_sub; 00081 message_filters::Subscriber<Image> *cam_image_sub; 00082 00083 // ROS messages publishers 00084 ros::Publisher frustum_marker_pub, but_display_pub; 00085 00086 // parameters regularly updated from parameter server, default value 00087 //std::string camera_topic_par = "/stereo/left/camera_info"; 00088 std::string camera_topic_par = srs_ui_but::DEFAULT_CAMERA_INFO; 00089 double depth_par = 1.0f; 00090 00091 #ifndef NDEBUG 00092 void callback1(const CameraInfoConstPtr& cam_info) 00093 { 00094 ROS_DEBUG("got cam_info"); 00095 } 00096 00097 void callback2(const PointCloud2ConstPtr& pcl) 00098 { 00099 ROS_DEBUG("got pcl"); 00100 } 00101 00102 void callback3(const ImageConstPtr& im) 00103 { 00104 ROS_DEBUG("got image"); 00105 } 00106 #endif 00107 00108 /* 00109 * @brief Callback for time-synchronised cameraInfo and PointCloud2 messages 00110 * cameraInfo also with available TF transformation 00111 * 00112 * @param nh Node handle 00113 * @param cam_info CameraInfo message 00114 * @param pcl PointCloud2 message 00115 */ 00116 00117 00118 void callback(ros::NodeHandle& nh, const CameraInfoConstPtr& cam_info, const PointCloud2ConstPtr& pcl) 00119 { 00120 ROS_DEBUG("Got everything synced"); 00121 00122 // check parameter server for change of desired camera 00123 ros::param::getCached(srs_ui_but::Camera_PARAM, camera_topic_par); 00124 if (!camera_topic_par.empty()) 00125 updateCameraTopic(nh); 00126 00127 //check parameter server for change of desired polygon depth 00128 ros::param::getCached(srs_ui_but::Depth_PARAM, depth_par); 00129 00130 // Count internal parameters of view volume 00131 countCameraParams(cam_info); 00132 00133 // count maximal and minimal point cloud distance 00134 pair<float, float> distances = countPclDepths(pcl); 00135 00136 ROS_DEBUG_STREAM("nearest point " << distances.first << " most distant point " << distances.second); 00137 00138 publishViewFrustumMarker(cam_info, distances.second); 00139 00140 publishButDisplay(cam_info, distances.first * depth_par); 00141 } 00142 00148 void updateCameraTopic(ros::NodeHandle& nh) 00149 { 00150 ROS_DEBUG_STREAM("updateCameraTopic :" << camera_topic_par << "!"); 00151 00152 std::string cam3dRGB(srs_ui_but::CAM3D_BASE); 00153 std::string camLeft(srs_ui_but::STEREO_LEFT_BASE); 00154 std::string camRight(srs_ui_but::STEREO_RIGHT_BASE); 00155 00156 if (!camera_topic_par.compare(0, cam3dRGB.size(), cam3dRGB)) 00157 { 00158 ROS_DEBUG_STREAM("cam3dRGB"); 00159 cam_info_sub->subscribe(nh, cam3dRGB + "camera_info", 10); 00160 cam_image_sub->subscribe(nh, cam3dRGB + "image_raw", 10); 00161 } 00162 else if (!camera_topic_par.compare(0, camLeft.size(), camLeft)) 00163 { 00164 ROS_DEBUG_STREAM("left"); 00165 cam_info_sub->subscribe(nh, camLeft + "camera_info", 10); 00166 cam_image_sub->subscribe(nh, camRight + "image_raw", 10); 00167 } 00168 else if (!camera_topic_par.compare(0, camRight.size(), camRight)) 00169 { 00170 ROS_DEBUG_STREAM("right"); 00171 cam_info_sub->subscribe(nh, camRight + "camera_info", 10); 00172 cam_image_sub->subscribe(nh, camRight + "image_raw", 10); 00173 } 00174 else 00175 ROS_ERROR("UNKNOWN CAMERA"); 00176 00177 ROS_DEBUG_STREAM("Subscribed to " << cam_info_sub->getTopic() << " and " << cam_image_sub->getTopic()); 00178 } 00179 00186 void publishButDisplay(const CameraInfoConstPtr cam_info, float display_depth) 00187 { 00188 ROS_DEBUG("publishButDisplay"); 00189 00190 // recalculate real maximal polygon depth from closest point cloud point 00191 float real_depth = cos(elev_d) * display_depth; 00192 00193 srs_ui_but::ButCamMsg rectangle; 00194 00195 rectangle.header.frame_id = srs_ui_but::MAP_TOPIC; 00196 rectangle.header.stamp = cam_info->header.stamp; 00197 00198 // retrieve transform for display rotation 00199 tf::StampedTransform cameraToWorldTf; 00200 try 00201 { 00202 tf_cam_info_Listener->waitForTransform(srs_ui_but::MAP_TOPIC, cam_info->header.frame_id, cam_info->header.stamp, 00203 ros::Duration(0.2)); 00204 tf_cam_info_Listener->lookupTransform(srs_ui_but::MAP_TOPIC, cam_info->header.frame_id, cam_info->header.stamp, cameraToWorldTf); 00205 } 00206 // In case of absence of transformation path 00207 catch (tf::TransformException& ex) 00208 { 00209 ROS_ERROR_STREAM("Camera info transform error: " << ex.what() << ", quitting callback"); 00210 return; 00211 } 00212 00213 rectangle.pose.orientation.x = cameraToWorldTf.getRotation().x(); 00214 rectangle.pose.orientation.y = cameraToWorldTf.getRotation().y(); 00215 rectangle.pose.orientation.z = cameraToWorldTf.getRotation().z(); 00216 rectangle.pose.orientation.w = cameraToWorldTf.getRotation().w(); 00217 00218 // Three corner vectors of display for display size 00219 geometry_msgs::PointStamped tl, tr, bl; 00220 00221 Ogre::Vector3 tl_vec; 00222 tl_vec.x = +steer_l * real_depth; 00223 tl_vec.y = +elev_u * real_depth; 00224 tl_vec.z = real_depth; 00225 00226 Ogre::Vector3 tr_vec; 00227 tr_vec.x = -steer_r * real_depth; 00228 tr_vec.y = +elev_u * real_depth; 00229 tr_vec.z = real_depth; 00230 00231 Ogre::Vector3 bl_vec; 00232 bl_vec.x = +steer_l * real_depth; 00233 bl_vec.y = -elev_d * real_depth; 00234 bl_vec.z = real_depth; 00235 00236 rectangle.scale.x = (tl_vec.distance(tr_vec)); 00237 rectangle.scale.y = (tl_vec.distance(bl_vec)); 00238 rectangle.scale.z = 1.0; 00239 00240 // bottom right point transformed to /map frame for display position 00241 geometry_msgs::PointStamped br_map; 00242 00243 br_map.header.frame_id = cam_info->header.frame_id; 00244 br_map.header.stamp = cam_info->header.stamp; 00245 br_map.point.x = -steer_r * real_depth; 00246 br_map.point.y = -elev_d * real_depth; 00247 br_map.point.z = real_depth; 00248 // transform point into /map frame 00249 tf_cam_info_Listener->transformPoint(srs_ui_but::MAP_TOPIC, br_map, br_map); 00250 00251 rectangle.pose.position.x = br_map.point.x; 00252 rectangle.pose.position.y = br_map.point.y; 00253 rectangle.pose.position.z = br_map.point.z; 00254 00255 // publish display message 00256 but_display_pub.publish(rectangle); 00257 00258 } 00259 00266 void publishViewFrustumMarker(const CameraInfoConstPtr cam_info, float frustum_depth) 00267 { 00268 ROS_DEBUG("publishViewFrustumMarker"); 00269 00270 visualization_msgs::Marker marker; 00271 00272 // global attributes for all points 00273 marker.id = 0; 00274 marker.header.stamp = cam_info->header.stamp; 00275 marker.header.frame_id = srs_ui_but::MAP_TOPIC; 00276 marker.ns = "view_frustum"; 00277 marker.type = visualization_msgs::Marker::LINE_LIST; 00278 marker.action = visualization_msgs::Marker::ADD; 00279 marker.pose.position.x = 0; 00280 marker.pose.position.y = 0; 00281 marker.pose.position.z = 0; 00282 marker.pose.orientation.x = 0; 00283 marker.pose.orientation.y = 0; 00284 marker.pose.orientation.z = 0; 00285 marker.pose.orientation.w = 1.0; 00286 marker.scale.x = 0.01; 00287 marker.color.a = 1.0; 00288 marker.color.r = 1.0; 00289 00290 // 5 vertices of view frustum and corresponding points 00291 // transformed to /map frame 00292 geometry_msgs::PointStamped tl, tr, bl, br, camera; 00293 geometry_msgs::PointStamped tl_map, tr_map, bl_map, br_map, camera_map; 00294 00295 /* 00296 * central point - camera position 00297 */ 00298 camera.header.frame_id = cam_info->header.frame_id; 00299 camera.header.stamp = cam_info->header.stamp; 00300 camera.point.x = 0.0; 00301 camera.point.y = 0.0; 00302 camera.point.z = 0.0; 00303 // transform point into /map frame 00304 tf_cam_info_Listener->transformPoint(srs_ui_but::MAP_TOPIC, camera, camera_map); 00305 00306 /* 00307 * bottom right point 00308 */ 00309 br.header.frame_id = cam_info->header.frame_id; 00310 br.header.stamp = cam_info->header.stamp; 00311 br.point.x = -steer_r * frustum_depth; 00312 br.point.y = -elev_d * frustum_depth; 00313 br.point.z = frustum_depth; 00314 // transform point into /map frame 00315 tf_cam_info_Listener->transformPoint(srs_ui_but::MAP_TOPIC, br, br_map); 00316 00317 /* 00318 * bottom left point 00319 */ 00320 bl.header.frame_id = cam_info->header.frame_id; 00321 bl.header.stamp = cam_info->header.stamp; 00322 bl.point.x = +steer_l * frustum_depth; 00323 bl.point.y = -elev_d * frustum_depth; 00324 bl.point.z = frustum_depth; 00325 // transform point into /map frame 00326 tf_cam_info_Listener->transformPoint(srs_ui_but::MAP_TOPIC, bl, bl_map); 00327 00328 /* 00329 * top left point 00330 */ 00331 tl.header.frame_id = cam_info->header.frame_id; 00332 tl.header.stamp = cam_info->header.stamp; 00333 tl.point.x = +steer_l * frustum_depth; 00334 tl.point.y = +elev_u * frustum_depth; 00335 tl.point.z = frustum_depth; 00336 // transform point into /map frame 00337 tf_cam_info_Listener->transformPoint(srs_ui_but::MAP_TOPIC, tl, tl_map); 00338 00339 /* 00340 * top right point 00341 */ 00342 tr.header.frame_id = cam_info->header.frame_id; 00343 tr.header.stamp = cam_info->header.stamp; 00344 tr.point.x = -steer_r * frustum_depth; 00345 tr.point.y = +elev_u * frustum_depth; 00346 tr.point.z = frustum_depth; 00347 // transform point into /map frame 00348 tf_cam_info_Listener->transformPoint(srs_ui_but::MAP_TOPIC, tr, tr_map); 00349 00350 // view frustum is made of 4 lines 00351 // pushing two point for each line 00352 marker.points.push_back(camera_map.point); 00353 marker.points.push_back(br_map.point); 00354 marker.points.push_back(camera_map.point); 00355 marker.points.push_back(bl_map.point); 00356 marker.points.push_back(camera_map.point); 00357 marker.points.push_back(tl_map.point); 00358 marker.points.push_back(camera_map.point); 00359 marker.points.push_back(tr_map.point); 00360 00361 // publishing marker 00362 frustum_marker_pub.publish(marker); 00363 return; 00364 } 00365 00366 /* 00367 * @brief Counts possible distance for rendering cameraDisplay according to closest 00368 * point in point cloud (so that display doesn't collide with pcl) and 00369 * depth of view frustum according to most distant point in point cloud 00370 */ 00371 pair<float, float> countPclDepths(const PointCloud2ConstPtr& pcl) 00372 { 00373 ROS_DEBUG("countPclDepths"); 00374 00375 // PCL PointCloud 00376 pcl::PointCloud<pcl::PointXYZ> pcl_pointCloud; 00377 00378 // Transfotm PointCloud2 to PCL PointCloud 00379 pcl::fromROSMsg(*pcl, pcl_pointCloud); 00380 00381 // depth of view_frustum 00382 float far_distance = 0.0f; 00383 00384 // distance of but display from camera 00385 float near_distance = FLT_MAX; 00386 00387 // distance of current point from origin (3D camera position) 00388 float dist; 00389 // Get closest point and the most distant point 00390 BOOST_FOREACH (const pcl::PointXYZ& pt, pcl_pointCloud.points) 00391 { dist = pt.x*pt.x + pt.y*pt.y + pt.z*pt.z; 00392 if (dist > far_distance) far_distance = dist; 00393 if (dist < near_distance) near_distance = dist; 00394 00395 } 00396 00397 far_distance = sqrt(far_distance); 00398 near_distance = sqrt(near_distance); 00399 00400 // some points could be too far away from camera causing infinite frustum 00401 if (far_distance > MAX_FRUSTUM_DEPTH) 00402 far_distance = MAX_FRUSTUM_DEPTH; 00403 if (near_distance > MAX_DISPLAY_DEPTH) 00404 near_distance = MAX_DISPLAY_DEPTH; 00405 00406 return make_pair(near_distance, far_distance); 00407 } 00408 00409 /* 00410 * @brief Counts angle parameters of view frustum of one camera specified in given 00411 * CameraInfo message and sets internal variables elev_d, elev_u, steer_l and steer_r 00412 * 00413 * @param camInfo given CamerInfo message 00414 */ 00415 void countCameraParams(const CameraInfoConstPtr& camInfo) 00416 { 00417 ROS_DEBUG("countCameraParams"); 00418 00419 // do other distortion models have the same camera matrices? 00420 if (camInfo->distortion_model != "plumb_bob") 00421 { 00422 ROS_ERROR("Unknown distortion model in CameraInfo message.\\" 00423 "Optimized only for plumb bob distortion model."); 00424 } 00425 00426 unsigned int height = 0; 00427 unsigned int width = 0; 00428 00429 // focal lengths and principal point coordinates 00430 float fx, fy, cx, cy; 00431 00432 height = camInfo->height; 00433 width = camInfo->width; 00434 00435 // getting essential parameters from intrinsic camera matrix 00436 // [fx 0 cx] 00437 // K = [ 0 fy cy] 00438 // [ 0 0 1] 00439 fx = camInfo->K[0]; 00440 00441 if (fx == 0) 00442 { 00443 ROS_ERROR("Uncalibrated camera, unable to count view frustum parameters."); 00444 } 00445 00446 fy = camInfo->K[4]; 00447 cx = camInfo->K[2]; 00448 cy = camInfo->K[5]; 00449 00450 // counting view frustum parameters from camera parameters 00451 elev_d = cy / fy; 00452 steer_r = cx / fx; 00453 00454 elev_u = (height - cy) / fy; 00455 steer_l = (width - cx) / fx; 00456 00457 return; 00458 } 00459 00460 int main(int argc, char** argv) 00461 { 00462 ROS_DEBUG_STREAM("test " << "default_camera_info" << ", " << srs_ui_but::CAM3D_BASE << ", " << srs_ui_but::DEFAULT_CAMERA_INFO << ", " << srs_ui_but::STEREO_LEFT_BASE << ", " << srs_ui_but::STEREO_RIGHT_BASE << ", " << srs_ui_but::MAP_TOPIC << ", " << srs_ui_but::DEPTH_IMAGE_IN << ", " << srs_ui_but::DEFAULT_CAMERA_IMAGE); 00463 00464 ros::init(argc, argv, "view"); 00465 00466 ros::NodeHandle nh; 00467 00468 // initialize ROS messages publishers 00469 // frustum is published as simple marker 00470 frustum_marker_pub = nh.advertise<visualization_msgs::Marker> (srs_ui_but::ViewFrustum_TOPIC, 1); 00471 // but rectangle as srs_ui_but::ButCamMsg 00472 but_display_pub = nh.advertise<srs_ui_but::ButCamMsg> (srs_ui_but::CameraView_TOPIC, 1); 00473 00474 // subscribers to required topics 00475 cam_info_sub = new message_filters::Subscriber<CameraInfo>(nh, camera_topic_par, 10); 00476 00477 message_filters::Subscriber<PointCloud2> pcl_sub(nh, srs_ui_but::DEPTH_IMAGE_IN, 10); 00478 00479 // bugfix, ROS messes with camer_info topic, when camera is not subscribed 00480 cam_image_sub = new message_filters::Subscriber<Image>(nh, srs_ui_but::DEFAULT_CAMERA_IMAGE, 10); 00481 00482 #ifndef NDEBUG 00483 cam_info_sub->registerCallback(boost::bind(&callback1, _1)); 00484 pcl_sub.registerCallback(boost::bind(&callback2, _1)); 00485 cam_image_sub->registerCallback(boost::bind(&callback3, _1)); 00486 #endif 00487 00488 // initializing of listeners for tf transformation and tf message filter 00489 tf_cam_info_Listener = new tf::TransformListener(); 00490 tf::MessageFilter<CameraInfo> *cam_info_transform_filter = new tf::MessageFilter<CameraInfo>(*cam_info_sub, 00491 *tf_cam_info_Listener, 00492 srs_ui_but::MAP_TOPIC, 10); 00493 00494 // synchronization policy - approximate time (exact time has too low hit rate) 00495 typedef message_filters::sync_policies::ApproximateTime<CameraInfo, PointCloud2> App_sync_policy; 00496 00497 // time-synchronizing both messages with CameraInfo tf transformation 00498 message_filters::Synchronizer<App_sync_policy> time_sync(App_sync_policy(10), *cam_info_transform_filter, pcl_sub); 00499 00500 // message_filters::TimeSynchronizer<CameraInfo, PointCloud2> time_sync( 00501 // *cam_info_transform_filter, pcl_sub, 10); 00502 00503 // callback for TimeSynchronizer 00504 time_sync.registerCallback(boost::bind(&callback, nh, _1, _2)); 00505 00506 ros::spin(); 00507 00508 return 1; 00509 }