led_finder.cpp
Go to the documentation of this file.
00001 /*
00002  * Copyright (C) 2015 Fetch Robotics Inc.
00003  * Copyright (C) 2013-2014 Unbounded Robotics Inc.
00004  *
00005  * Licensed under the Apache License, Version 2.0 (the "License");
00006  * you may not use this file except in compliance with the License.
00007  * You may obtain a copy of the License at
00008  *
00009  *     http://www.apache.org/licenses/LICENSE-2.0
00010  *
00011  * Unless required by applicable law or agreed to in writing, software
00012  * distributed under the License is distributed on an "AS IS" BASIS,
00013  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
00014  * See the License for the specific language governing permissions and
00015  * limitations under the License.
00016  */
00017 
00018 // Author: Michael Ferguson
00019 
00020 #include <math.h>
00021 #include <robot_calibration/capture/led_finder.h>
00022 #include <sensor_msgs/point_cloud2_iterator.h>
00023 #include <sensor_msgs/image_encodings.h>
00024 
00025 namespace robot_calibration
00026 {
00027 
00028 // We use a number of PC2 iterators, define the indexes here
00029 const unsigned X = 0;
00030 const unsigned Y = 1;
00031 const unsigned Z = 2;
00032 const unsigned R = 0;
00033 const unsigned G = 1;
00034 const unsigned B = 2;
00035 
00036 double distancePoints(
00037   const geometry_msgs::Point p1,
00038   const geometry_msgs::Point p2)
00039 {
00040   return std::sqrt((p1.x-p2.x) * (p1.x-p2.x) +
00041                    (p1.y-p2.y) * (p1.y-p2.y) +
00042                    (p1.z-p2.z) * (p1.z-p2.z));
00043 }
00044 
00045 LedFinder::LedFinder(ros::NodeHandle& nh) :
00046   FeatureFinder(nh),
00047   waiting_(false)
00048 {
00049   // Setup the action client
00050   std::string topic_name;
00051   nh.param<std::string>("action", topic_name, "/gripper_led_action");
00052   client_.reset(new LedClient(topic_name, true));
00053   ROS_INFO("Waiting for %s...", topic_name.c_str());
00054   client_->waitForServer();
00055 
00056   // Setup subscriber
00057   nh.param<std::string>("topic", topic_name, "/points");
00058   subscriber_ = nh.subscribe(topic_name,
00059                              1,
00060                              &LedFinder::cameraCallback,
00061                              this);
00062 
00063   // Publish where LEDs were seen
00064   publisher_ = nh.advertise<sensor_msgs::PointCloud2>("led_points", 10);
00065 
00066   // Maximum distance LED can be from expected pose
00067   nh.param<double>("max_error", max_error_, 0.1);
00068   // Maximum relative difference between two LEDs
00069   nh.param<double>("max_inconsistency", max_inconsistency_, 0.01);
00070 
00071   // Parameters for detection
00072   nh.param<double>("threshold", threshold_, 1000.0);
00073   nh.param<int>("max_iterations", max_iterations_, 50);
00074 
00075   // Should we output debug image/cloud
00076   nh.param<bool>("debug", output_debug_, false);
00077 
00078   // Get sensor names
00079   nh.param<std::string>("camera_sensor_name", camera_sensor_name_, "camera");
00080   nh.param<std::string>("chain_sensor_name", chain_sensor_name_, "arm");
00081 
00082   // Parameters for LEDs themselves
00083   std::string gripper_led_frame;
00084   nh.param<std::string>("gripper_led_frame", gripper_led_frame, "wrist_roll_link");
00085   XmlRpc::XmlRpcValue led_poses;
00086   nh.getParam("poses", led_poses);
00087   ROS_ASSERT(led_poses.getType() == XmlRpc::XmlRpcValue::TypeArray);
00088   // Each LED has a code, and pose in the gripper_led_frame
00089   for (int i = 0; i < led_poses.size(); ++i)
00090   {
00091     codes_.push_back(static_cast<int>(led_poses[i]["code"]));
00092     codes_.push_back(0);  // assumes "0" is code for "OFF"
00093 
00094     double x, y, z;
00095     x = static_cast<double>(led_poses[i]["x"]);
00096     y = static_cast<double>(led_poses[i]["y"]);
00097     z = static_cast<double>(led_poses[i]["z"]);
00098     trackers_.push_back(CloudDifferenceTracker(gripper_led_frame, x, y, z));
00099 
00100     // Publisher
00101     boost::shared_ptr<ros::Publisher> pub(new ros::Publisher);
00102     *pub = nh.advertise<sensor_msgs::Image>(static_cast<std::string>(led_poses[i]["topic"]), 10);
00103     tracker_publishers_.push_back(pub);
00104   }
00105 
00106   // Setup to get camera depth info
00107   if (!depth_camera_manager_.init(nh))
00108   {
00109     // Error will be printed in manager
00110     throw;
00111   }
00112 }
00113 
00114 void LedFinder::cameraCallback(const sensor_msgs::PointCloud2::ConstPtr& cloud)
00115 {
00116   if (waiting_)
00117   {
00118     cloud_ = *cloud;
00119     waiting_ = false;
00120   }
00121 }
00122 
00123 // Returns true if we got a message, false if we timeout.
00124 bool LedFinder::waitForCloud()
00125 {
00126   // Initial wait cycle so that camera is definitely up to date.
00127   ros::Duration(1/10.0).sleep();
00128 
00129   waiting_ = true;
00130   int count = 250;
00131   while (--count)
00132   {
00133     if (!waiting_)
00134     {
00135       // success
00136       return true;
00137     }
00138     ros::Duration(0.01).sleep();
00139     ros::spinOnce();
00140   }
00141   ROS_ERROR("Failed to get cloud");
00142   return !waiting_;
00143 }
00144 
00145 bool LedFinder::find(robot_calibration_msgs::CalibrationData * msg)
00146 {
00147   uint8_t code_idx = -1;
00148 
00149   std::vector<geometry_msgs::PointStamped> rgbd;
00150   std::vector<geometry_msgs::PointStamped> world;
00151 
00152   sensor_msgs::PointCloud2 prev_cloud;
00153 
00154   robot_calibration_msgs::GripperLedCommandGoal command;
00155   command.led_code = 0;
00156   client_->sendGoal(command);
00157   client_->waitForResult(ros::Duration(10.0));
00158 
00159   // Get initial cloud
00160   if (!waitForCloud())
00161   {
00162     return false;
00163   }
00164   prev_cloud = cloud_;
00165 
00166   // Initialize difference trackers
00167   for (size_t i = 0; i < trackers_.size(); ++i)
00168   {
00169     trackers_[i].reset(cloud_.height, cloud_.width);
00170   }
00171 
00172   int cycles = 0;
00173   while (true)
00174   {
00175     // Toggle LED to next state
00176     code_idx = (code_idx + 1) % 8;
00177     command.led_code = codes_[code_idx];
00178     client_->sendGoal(command);
00179     client_->waitForResult(ros::Duration(10.0));
00180 
00181     // Get a point cloud
00182     if (!waitForCloud())
00183     {
00184       return false;
00185     }
00186 
00187     // Commands are organized as On-Off for each led.
00188     int tracker = code_idx/2;
00189     // Even indexes are turning on, Odd are turning off
00190     double weight = (code_idx%2 == 0) ? 1: -1;
00191 
00192     // Has each point converged?
00193     bool done = true;
00194     for (size_t t = 0; t < trackers_.size(); ++t)
00195     {
00196       done &= trackers_[t].isFound(cloud_, threshold_);
00197     }
00198     // We want to break only if the LED is off, so that pixel is not washed out
00199     if (done && (weight == -1))
00200     {
00201       break;
00202     }
00203 
00204     // Get expected pose of LED in the cloud frame
00205     geometry_msgs::PointStamped led;
00206     led.point = trackers_[tracker].point_;
00207     led.header.frame_id = trackers_[tracker].frame_;
00208     try
00209     {
00210       listener_.transformPoint(cloud_.header.frame_id, ros::Time(0), led,
00211                                led.header.frame_id, led);
00212     }
00213     catch (const tf::TransformException& ex)
00214     {
00215       ROS_ERROR_STREAM("Failed to transform feature to " << cloud_.header.frame_id);
00216       return false;
00217     }
00218 
00219     // Update the tracker
00220     trackers_[tracker].process(cloud_, prev_cloud, led.point, max_error_, weight);
00221 
00222     if (++cycles > max_iterations_)
00223     {
00224       ROS_ERROR("Failed to find features before using maximum iterations.");
00225       return false;
00226     }
00227 
00228     prev_cloud = cloud_;
00229 
00230     // Publish state of each tracker
00231     for (size_t i = 0; i < trackers_.size(); i++)
00232     {
00233       sensor_msgs::Image image = trackers_[i].getImage();
00234       tracker_publishers_[i]->publish(image);
00235     }
00236   }
00237 
00238   // Create PointCloud2 to publish
00239   sensor_msgs::PointCloud2 cloud;
00240   cloud.width = 0;
00241   cloud.height = 0;
00242   cloud.header.stamp = ros::Time::now();
00243   cloud.header.frame_id = cloud_.header.frame_id;
00244   sensor_msgs::PointCloud2Modifier cloud_mod(cloud);
00245   cloud_mod.setPointCloud2FieldsByString(1, "xyz");
00246   cloud_mod.resize(4);
00247   sensor_msgs::PointCloud2Iterator<float> iter_cloud(cloud, "x");
00248 
00249   // Collect Results
00250   const int CAMERA = 0;
00251   const int CHAIN = 1;
00252   robot_calibration_msgs::Observation observations[2];
00253   observations[CAMERA].sensor_name = camera_sensor_name_;
00254   observations[CHAIN].sensor_name = chain_sensor_name_;
00255 
00256   for (size_t t = 0; t < trackers_.size(); ++t)
00257   {
00258     geometry_msgs::PointStamped rgbd_pt;
00259     geometry_msgs::PointStamped world_pt;
00260 
00261     // Get point
00262     if (!trackers_[t].getRefinedCentroid(cloud_, rgbd_pt))
00263     {
00264       ROS_ERROR_STREAM("No centroid for feature " << t);
00265       return false;
00266     }
00267 
00268     // Check that point is close enough to expected pose
00269     try
00270     {
00271       listener_.transformPoint(trackers_[t].frame_, ros::Time(0), rgbd_pt,
00272                                rgbd_pt.header.frame_id, world_pt);
00273     }
00274     catch(const tf::TransformException &ex)
00275     {
00276       ROS_ERROR_STREAM("Failed to transform feature to " << trackers_[t].frame_);
00277       return false;
00278     }
00279     double distance = distancePoints(world_pt.point, trackers_[t].point_);
00280     if (distance > max_error_)
00281     {
00282       ROS_ERROR_STREAM("Feature was too far away from expected pose in " << trackers_[t].frame_ << ": " << distance);
00283       return false;
00284     }
00285 
00286     // Check that points are consistent with one another
00287     for (size_t t2 = 0; t2 < t; ++t2)
00288     {
00289       double expected = distancePoints(trackers_[t2].point_, trackers_[t].point_);
00290       double actual = distancePoints(observations[CAMERA].features[t2].point, rgbd_pt.point);
00291       if (fabs(expected-actual) > max_inconsistency_)
00292       {
00293         ROS_ERROR_STREAM("Features not internally consistent: " << expected << " " << actual);
00294         return false;
00295       }
00296     }
00297 
00298     // Push back observation
00299     observations[CAMERA].features.push_back(rgbd_pt);
00300     observations[CAMERA].ext_camera_info = depth_camera_manager_.getDepthCameraInfo();
00301 
00302     // Visualize
00303     iter_cloud[0] = rgbd_pt.point.x;
00304     iter_cloud[1] = rgbd_pt.point.y;
00305     iter_cloud[2] = rgbd_pt.point.z;
00306     ++iter_cloud;
00307 
00308     // Push back expected location of point on robot
00309     world_pt.header.frame_id = trackers_[t].frame_;
00310     world_pt.point = trackers_[t].point_;
00311     observations[CHAIN].features.push_back(world_pt);
00312   }
00313 
00314   // Final check that all points are valid
00315   if (observations[CAMERA].features.size() != trackers_.size())
00316   {
00317     return false;
00318   }
00319 
00320   // Add debug cloud to message
00321   if (output_debug_)
00322   {
00323     observations[CAMERA].cloud = cloud_;
00324   }
00325 
00326   // Copy results to message
00327   msg->observations.push_back(observations[CAMERA]);
00328   msg->observations.push_back(observations[CHAIN]);
00329 
00330   // Publish results
00331   publisher_.publish(cloud);
00332 
00333   return true;
00334 }
00335 
00336 LedFinder::CloudDifferenceTracker::CloudDifferenceTracker(
00337   std::string frame, double x, double y, double z) :
00338     frame_(frame)
00339 {
00340   point_.x = x;
00341   point_.y = y;
00342   point_.z = z;
00343 }
00344 
00345 void LedFinder::CloudDifferenceTracker::reset(size_t height, size_t width)
00346 {
00347   // Save for creating images
00348   height_ = height;
00349   width_ = width;
00350 
00351   // Number of clouds processed.
00352   count_ = 0;
00353   // Maximum difference observed
00354   max_ = -1000.0;
00355   // Pixel this was observed in
00356   max_idx_ = -1;
00357 
00358   // Setup difference tracker
00359   diff_.resize(height * width);
00360   for (std::vector<double>::iterator it = diff_.begin(); it != diff_.end(); ++it)
00361   {
00362     *it = 0.0;
00363   }
00364 }
00365 
00366 // Weight should be +/- 1 typically
00367 bool LedFinder::CloudDifferenceTracker::process(
00368   sensor_msgs::PointCloud2& cloud,
00369   sensor_msgs::PointCloud2& prev,
00370   geometry_msgs::Point& led_point,
00371   double max_distance,
00372   double weight)
00373 {
00374   if ((cloud.width * cloud.height) != diff_.size())
00375   {
00376     ROS_ERROR("Cloud size has changed");
00377     return false;
00378   }
00379 
00380   sensor_msgs::PointCloud2ConstIterator<float> xyz(cloud, "x");
00381   sensor_msgs::PointCloud2ConstIterator<uint8_t> rgb(cloud, "rgb");
00382   sensor_msgs::PointCloud2ConstIterator<uint8_t> prev_rgb(prev, "rgb");
00383 
00384   // We want to compare each point to the expected LED pose,
00385   // but when the LED is on, the points will be NAN,
00386   // fall back on most recent distance for these points
00387   double last_distance = 1000.0;
00388 
00389   // Update each point in the tracker
00390   const size_t num_points = cloud.data.size() / cloud.point_step;
00391   int valid = 0;
00392   int used = 0;
00393   for (size_t i = 0; i < num_points; i++)
00394   {
00395     // If within range of LED pose...
00396     geometry_msgs::Point p;
00397     p.x = (xyz + i)[X];
00398     p.y = (xyz + i)[Y];
00399     p.z = (xyz + i)[Z];
00400     double distance = distancePoints(p, led_point);
00401 
00402     if (std::isfinite(distance))
00403     {
00404       last_distance = distance;
00405       valid++;
00406     }
00407     else
00408     {
00409       distance = last_distance;
00410     }
00411 
00412     if (!std::isfinite(distance) || distance > max_distance)
00413     {
00414       continue;
00415     }
00416 
00417     // ...and has proper change in sign
00418     double r = (double)((rgb + i)[R]) - (double)((prev_rgb + i)[R]);
00419     double g = (double)((rgb + i)[G]) - (double)((prev_rgb + i)[G]);
00420     double b = (double)((rgb + i)[B]) - (double)((prev_rgb + i)[B]);
00421     if (r > 0 && g > 0 && b > 0 && weight > 0)
00422     {
00423       diff_[i] += (r + g + b) * weight;
00424       used++;
00425     }
00426     else if (r < 0 && g < 0 && b < 0 && weight < 0)
00427     {
00428       diff_[i] += (r + g + b) * weight;
00429       used++;
00430     }
00431 
00432     // Is this a new max value?
00433     if (diff_[i] > max_)
00434     {
00435       max_ = diff_[i];
00436       max_idx_ = i;
00437     }
00438   }
00439 
00440   return true;
00441 }
00442 
00443 bool LedFinder::CloudDifferenceTracker::isFound(
00444   const sensor_msgs::PointCloud2& cloud,
00445   double threshold)
00446 {
00447   // Returns true only if the max exceeds threshold
00448   if (max_ < threshold)
00449   {
00450     return false;
00451   }
00452 
00453   // Access point in cloud
00454   sensor_msgs::PointCloud2ConstIterator<float> point(cloud, "x");
00455   point += max_idx_;
00456 
00457   // AND the current index is a valid point in the cloud.
00458   if (isnan(point[X]) ||
00459       isnan(point[Y]) ||
00460       isnan(point[Z]))
00461   {
00462     return false;
00463   }
00464 
00465   return true;
00466 }
00467 
00468 bool LedFinder::CloudDifferenceTracker::getRefinedCentroid(
00469   const sensor_msgs::PointCloud2& cloud,
00470   geometry_msgs::PointStamped& centroid)
00471 {
00472   // Access point in cloud
00473   sensor_msgs::PointCloud2ConstIterator<float> iter(cloud, "x");
00474   const size_t num_points = cloud.data.size() / cloud.point_step;
00475 
00476   // Get initial centroid
00477   centroid.header = cloud.header;
00478   centroid.point.x = (iter + max_idx_)[X];
00479   centroid.point.y = (iter + max_idx_)[Y];
00480   centroid.point.z = (iter + max_idx_)[Z];
00481 
00482   // Do not accept NANs
00483   if (isnan(centroid.point.x) ||
00484       isnan(centroid.point.y) ||
00485       isnan(centroid.point.z))
00486   {
00487     return false;
00488   }
00489 
00490   // Get a better centroid
00491   int points = 0;
00492   double sum_x = 0.0;
00493   double sum_y = 0.0;
00494   double sum_z = 0.0;
00495   for (size_t i = 0; i < num_points; i++)
00496   {
00497     sensor_msgs::PointCloud2ConstIterator<float> point = iter + i;
00498 
00499     // Using highly likely points
00500     if (diff_[i] > (max_*0.75))
00501     {
00502       if (isnan(point[X]) || isnan(point[Y]) || isnan(point[Z]))
00503       {
00504         continue;
00505       }
00506 
00507       double dx = point[X] - centroid.point.x;
00508       double dy = point[Y] - centroid.point.y;
00509       double dz = point[Z] - centroid.point.z;
00510 
00511       // That are less than 1cm from the max point
00512       if ((dx*dx) + (dy*dy) + (dz*dz) < (0.05*0.05))
00513       {
00514         sum_x += point[X];
00515         sum_y += point[Y];
00516         sum_z += point[Z];
00517         ++points;
00518       }
00519     }
00520   }
00521 
00522   if (points > 0)
00523   {
00524     centroid.point.x = (centroid.point.x + sum_x)/(points+1);
00525     centroid.point.y = (centroid.point.y + sum_y)/(points+1);
00526     centroid.point.z = (centroid.point.z + sum_z)/(points+1);
00527   }
00528 
00529   return true;
00530 }
00531 
00532 sensor_msgs::Image LedFinder::CloudDifferenceTracker::getImage()
00533 {
00534   sensor_msgs::Image image;
00535 
00536   image.height = height_;
00537   image.width = width_;
00538 
00539   image.encoding = sensor_msgs::image_encodings::BGR8;
00540   image.step = width_ * 3;
00541 
00542   image.data.resize(width_ * height_ * 3);
00543 
00544   for (size_t i = 0; i < diff_.size(); i++)
00545   {
00546     if (diff_[i] > max_ * 0.9)
00547     {
00548       image.data[i*3] = 255;
00549       image.data[i*3 + 1] = 0;
00550       image.data[i*3 + 2] = 0;
00551     }
00552     else if (diff_[i] > 0)
00553     {
00554       image.data[i*3] = static_cast<uint8_t>(diff_[i]/2.0);
00555       image.data[i*3 + 1] = static_cast<uint8_t>(diff_[i]/2.0);
00556       image.data[i*3 + 2] = static_cast<uint8_t>(diff_[i]/2.0);
00557     }
00558     else
00559     {
00560       image.data[i*3] = 0;
00561       image.data[i*3 + 1] = 0;
00562       image.data[i*3 + 2] = 0;
00563     }
00564   }
00565 
00566   return image;
00567 }
00568 
00569 }  // namespace robot_calibration


robot_calibration
Author(s): Michael Ferguson
autogenerated on Thu Jun 6 2019 21:54:10