led_finder.cpp
Go to the documentation of this file.
1 /*
2  * Copyright (C) 2015 Fetch Robotics Inc.
3  * Copyright (C) 2013-2014 Unbounded Robotics Inc.
4  *
5  * Licensed under the Apache License, Version 2.0 (the "License");
6  * you may not use this file except in compliance with the License.
7  * You may obtain a copy of the License at
8  *
9  * http://www.apache.org/licenses/LICENSE-2.0
10  *
11  * Unless required by applicable law or agreed to in writing, software
12  * distributed under the License is distributed on an "AS IS" BASIS,
13  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14  * See the License for the specific language governing permissions and
15  * limitations under the License.
16  */
17 
18 // Author: Michael Ferguson
19 
20 #include <math.h>
25 
27 
28 namespace robot_calibration
29 {
30 
31 // We use a number of PC2 iterators, define the indexes here
32 const unsigned X = 0;
33 const unsigned Y = 1;
34 const unsigned Z = 2;
35 const unsigned R = 0;
36 const unsigned G = 1;
37 const unsigned B = 2;
38 
40  const geometry_msgs::Point p1,
41  const geometry_msgs::Point p2)
42 {
43  return std::sqrt((p1.x-p2.x) * (p1.x-p2.x) +
44  (p1.y-p2.y) * (p1.y-p2.y) +
45  (p1.z-p2.z) * (p1.z-p2.z));
46 }
47 
48 LedFinder::LedFinder() :
49  waiting_(false)
50 {
51 }
52 
53 bool LedFinder::init(const std::string& name,
54  ros::NodeHandle& nh)
55 {
56  if (!FeatureFinder::init(name, nh))
57  return false;
58 
59  // Setup the action client
60  std::string topic_name;
61  nh.param<std::string>("action", topic_name, "/gripper_led_action");
62  client_.reset(new LedClient(topic_name, true));
63  ROS_INFO("Waiting for %s...", topic_name.c_str());
64  client_->waitForServer();
65 
66  // Setup subscriber
67  nh.param<std::string>("topic", topic_name, "/points");
68  subscriber_ = nh.subscribe(topic_name,
69  1,
71  this);
72 
73  // Publish where LEDs were seen
74  publisher_ = nh.advertise<sensor_msgs::PointCloud2>(getName() + "_points", 10);
75 
76  // Maximum distance LED can be from expected pose
77  nh.param<double>("max_error", max_error_, 0.1);
78  // Maximum relative difference between two LEDs
79  nh.param<double>("max_inconsistency", max_inconsistency_, 0.01);
80 
81  // Parameters for detection
82  nh.param<double>("threshold", threshold_, 1000.0);
83  nh.param<int>("max_iterations", max_iterations_, 50);
84 
85  // Should we output debug image/cloud
86  nh.param<bool>("debug", output_debug_, false);
87 
88  // Name of the sensor model that will be used during optimization
89  nh.param<std::string>("camera_sensor_name", camera_sensor_name_, "camera");
90  nh.param<std::string>("chain_sensor_name", chain_sensor_name_, "arm");
91 
92  // Parameters for LEDs themselves
93  std::string gripper_led_frame;
94  nh.param<std::string>("gripper_led_frame", gripper_led_frame, "wrist_roll_link");
95  XmlRpc::XmlRpcValue led_poses;
96  nh.getParam("poses", led_poses);
97  ROS_ASSERT(led_poses.getType() == XmlRpc::XmlRpcValue::TypeArray);
98  // Each LED has a code, and pose in the gripper_led_frame
99  for (int i = 0; i < led_poses.size(); ++i)
100  {
101  codes_.push_back(static_cast<int>(led_poses[i]["code"]));
102  codes_.push_back(0); // assumes "0" is code for "OFF"
103 
104  double x, y, z;
105  x = static_cast<double>(led_poses[i]["x"]);
106  y = static_cast<double>(led_poses[i]["y"]);
107  z = static_cast<double>(led_poses[i]["z"]);
108  trackers_.push_back(CloudDifferenceTracker(gripper_led_frame, x, y, z));
109 
110  // Publisher
112  *pub = nh.advertise<sensor_msgs::Image>(static_cast<std::string>(led_poses[i]["topic"]), 10);
113  tracker_publishers_.push_back(pub);
114  }
115 
116  // Setup to get camera depth info
117  if (!depth_camera_manager_.init(nh))
118  {
119  // Error will have been printed by manager
120  return false;
121  }
122 
123  return true;
124 }
125 
126 void LedFinder::cameraCallback(const sensor_msgs::PointCloud2::ConstPtr& cloud)
127 {
128  if (waiting_)
129  {
130  cloud_ = *cloud;
131  waiting_ = false;
132  }
133 }
134 
135 // Returns true if we got a message, false if we timeout.
137 {
138  // Initial wait cycle so that camera is definitely up to date.
139  ros::Duration(1/10.0).sleep();
140 
141  waiting_ = true;
142  int count = 250;
143  while (--count)
144  {
145  if (!waiting_)
146  {
147  // success
148  return true;
149  }
150  ros::Duration(0.01).sleep();
151  ros::spinOnce();
152  }
153  ROS_ERROR("Failed to get cloud");
154  return !waiting_;
155 }
156 
157 bool LedFinder::find(robot_calibration_msgs::CalibrationData * msg)
158 {
159  uint8_t code_idx = -1;
160 
161  std::vector<geometry_msgs::PointStamped> rgbd;
162  std::vector<geometry_msgs::PointStamped> world;
163 
164  sensor_msgs::PointCloud2 prev_cloud;
165 
166  robot_calibration_msgs::GripperLedCommandGoal command;
167  command.led_code = 0;
168  client_->sendGoal(command);
169  client_->waitForResult(ros::Duration(10.0));
170 
171  // Get initial cloud
172  if (!waitForCloud())
173  {
174  return false;
175  }
176  prev_cloud = cloud_;
177 
178  // Initialize difference trackers
179  for (size_t i = 0; i < trackers_.size(); ++i)
180  {
181  trackers_[i].reset(cloud_.height, cloud_.width);
182  }
183 
184  int cycles = 0;
185  while (true)
186  {
187  // Toggle LED to next state
188  code_idx = (code_idx + 1) % codes_.size();
189  command.led_code = codes_[code_idx];
190  client_->sendGoal(command);
191  client_->waitForResult(ros::Duration(10.0));
192 
193  // Get a point cloud
194  if (!waitForCloud())
195  {
196  return false;
197  }
198 
199  // Commands are organized as On-Off for each led.
200  int tracker = code_idx/2;
201  // Even indexes are turning on, Odd are turning off
202  double weight = (code_idx%2 == 0) ? 1: -1;
203 
204  // Has each point converged?
205  bool done = true;
206  for (size_t t = 0; t < trackers_.size(); ++t)
207  {
208  done &= trackers_[t].isFound(cloud_, threshold_);
209  }
210  // We want to break only if the LED is off, so that pixel is not washed out
211  if (done && (weight == -1))
212  {
213  break;
214  }
215 
216  // Get expected pose of LED in the cloud frame
217  geometry_msgs::PointStamped led;
218  led.point = trackers_[tracker].point_;
219  led.header.frame_id = trackers_[tracker].frame_;
220  try
221  {
222  listener_.transformPoint(cloud_.header.frame_id, ros::Time(0), led,
223  led.header.frame_id, led);
224  }
225  catch (const tf::TransformException& ex)
226  {
227  ROS_ERROR_STREAM("Failed to transform feature to " << cloud_.header.frame_id);
228  return false;
229  }
230 
231  // Update the tracker
232  trackers_[tracker].process(cloud_, prev_cloud, led.point, max_error_, weight);
233 
234  if (++cycles > max_iterations_)
235  {
236  ROS_ERROR("Failed to find features before using maximum iterations.");
237  return false;
238  }
239 
240  prev_cloud = cloud_;
241 
242  // Publish state of each tracker
243  for (size_t i = 0; i < trackers_.size(); i++)
244  {
245  sensor_msgs::Image image = trackers_[i].getImage();
246  tracker_publishers_[i]->publish(image);
247  }
248  }
249 
250  // Create PointCloud2 to publish
251  sensor_msgs::PointCloud2 cloud;
252  cloud.width = 0;
253  cloud.height = 0;
254  cloud.header.stamp = ros::Time::now();
255  cloud.header.frame_id = cloud_.header.frame_id;
256  sensor_msgs::PointCloud2Modifier cloud_mod(cloud);
257  cloud_mod.setPointCloud2FieldsByString(1, "xyz");
258  cloud_mod.resize(4);
259  sensor_msgs::PointCloud2Iterator<float> iter_cloud(cloud, "x");
260 
261  // Collect Results
262  const int CAMERA = 0;
263  const int CHAIN = 1;
264  robot_calibration_msgs::Observation observations[2];
265  observations[CAMERA].sensor_name = camera_sensor_name_;
266  observations[CHAIN].sensor_name = chain_sensor_name_;
267 
268  for (size_t t = 0; t < trackers_.size(); ++t)
269  {
270  geometry_msgs::PointStamped rgbd_pt;
271  geometry_msgs::PointStamped world_pt;
272 
273  // Get point
274  if (!trackers_[t].getRefinedCentroid(cloud_, rgbd_pt))
275  {
276  ROS_ERROR_STREAM("No centroid for feature " << t);
277  return false;
278  }
279 
280  // Check that point is close enough to expected pose
281  try
282  {
283  listener_.transformPoint(trackers_[t].frame_, ros::Time(0), rgbd_pt,
284  rgbd_pt.header.frame_id, world_pt);
285  }
286  catch(const tf::TransformException &ex)
287  {
288  ROS_ERROR_STREAM("Failed to transform feature to " << trackers_[t].frame_);
289  return false;
290  }
291  double distance = distancePoints(world_pt.point, trackers_[t].point_);
292  if (distance > max_error_)
293  {
294  ROS_ERROR_STREAM("Feature was too far away from expected pose in " << trackers_[t].frame_ << ": " << distance);
295  return false;
296  }
297 
298  // Check that points are consistent with one another
299  for (size_t t2 = 0; t2 < t; ++t2)
300  {
301  double expected = distancePoints(trackers_[t2].point_, trackers_[t].point_);
302  double actual = distancePoints(observations[CAMERA].features[t2].point, rgbd_pt.point);
303  if (fabs(expected-actual) > max_inconsistency_)
304  {
305  ROS_ERROR_STREAM("Features not internally consistent: " << expected << " " << actual);
306  return false;
307  }
308  }
309 
310  // Push back observation
311  observations[CAMERA].features.push_back(rgbd_pt);
312  observations[CAMERA].ext_camera_info = depth_camera_manager_.getDepthCameraInfo();
313 
314  // Visualize
315  iter_cloud[0] = rgbd_pt.point.x;
316  iter_cloud[1] = rgbd_pt.point.y;
317  iter_cloud[2] = rgbd_pt.point.z;
318  ++iter_cloud;
319 
320  // Push back expected location of point on robot
321  world_pt.header.frame_id = trackers_[t].frame_;
322  world_pt.point = trackers_[t].point_;
323  observations[CHAIN].features.push_back(world_pt);
324  }
325 
326  // Final check that all points are valid
327  if (observations[CAMERA].features.size() != trackers_.size())
328  {
329  return false;
330  }
331 
332  // Add debug cloud to message
333  if (output_debug_)
334  {
335  observations[CAMERA].cloud = cloud_;
336  }
337 
338  // Copy results to message
339  msg->observations.push_back(observations[CAMERA]);
340  msg->observations.push_back(observations[CHAIN]);
341 
342  // Publish results
343  publisher_.publish(cloud);
344 
345  return true;
346 }
347 
349  std::string frame, double x, double y, double z) :
350  frame_(frame)
351 {
352  point_.x = x;
353  point_.y = y;
354  point_.z = z;
355 }
356 
357 void LedFinder::CloudDifferenceTracker::reset(size_t height, size_t width)
358 {
359  // Save for creating images
360  height_ = height;
361  width_ = width;
362 
363  // Number of clouds processed.
364  count_ = 0;
365  // Maximum difference observed
366  max_ = -1000.0;
367  // Pixel this was observed in
368  max_idx_ = -1;
369 
370  // Setup difference tracker
371  diff_.resize(height * width);
372  for (std::vector<double>::iterator it = diff_.begin(); it != diff_.end(); ++it)
373  {
374  *it = 0.0;
375  }
376 }
377 
378 // Weight should be +/- 1 typically
380  sensor_msgs::PointCloud2& cloud,
381  sensor_msgs::PointCloud2& prev,
382  geometry_msgs::Point& led_point,
383  double max_distance,
384  double weight)
385 {
386  if ((cloud.width * cloud.height) != diff_.size())
387  {
388  ROS_ERROR("Cloud size has changed");
389  return false;
390  }
391 
395 
396  // We want to compare each point to the expected LED pose,
397  // but when the LED is on, the points will be NAN,
398  // fall back on most recent distance for these points
399  double last_distance = 1000.0;
400 
401  // Update each point in the tracker
402  const size_t num_points = cloud.data.size() / cloud.point_step;
403  int valid = 0;
404  int used = 0;
405  for (size_t i = 0; i < num_points; i++)
406  {
407  // If within range of LED pose...
408  geometry_msgs::Point p;
409  p.x = (xyz + i)[X];
410  p.y = (xyz + i)[Y];
411  p.z = (xyz + i)[Z];
412  double distance = distancePoints(p, led_point);
413 
414  if (std::isfinite(distance))
415  {
416  last_distance = distance;
417  valid++;
418  }
419  else
420  {
421  distance = last_distance;
422  }
423 
424  if (!std::isfinite(distance) || distance > max_distance)
425  {
426  continue;
427  }
428 
429  // ...and has proper change in sign
430  double r = (double)((rgb + i)[R]) - (double)((prev_rgb + i)[R]);
431  double g = (double)((rgb + i)[G]) - (double)((prev_rgb + i)[G]);
432  double b = (double)((rgb + i)[B]) - (double)((prev_rgb + i)[B]);
433  if (r > 0 && g > 0 && b > 0 && weight > 0)
434  {
435  diff_[i] += (r + g + b) * weight;
436  used++;
437  }
438  else if (r < 0 && g < 0 && b < 0 && weight < 0)
439  {
440  diff_[i] += (r + g + b) * weight;
441  used++;
442  }
443 
444  // Is this a new max value?
445  if (diff_[i] > max_)
446  {
447  max_ = diff_[i];
448  max_idx_ = i;
449  }
450  }
451 
452  return true;
453 }
454 
456  const sensor_msgs::PointCloud2& cloud,
457  double threshold)
458 {
459  // Returns true only if the max exceeds threshold
460  if (max_ < threshold)
461  {
462  return false;
463  }
464 
465  // Access point in cloud
467  point += max_idx_;
468 
469  // AND the current index is a valid point in the cloud.
470  if (std::isnan(point[X]) ||
471  std::isnan(point[Y]) ||
472  std::isnan(point[Z]))
473  {
474  return false;
475  }
476 
477  return true;
478 }
479 
481  const sensor_msgs::PointCloud2& cloud,
482  geometry_msgs::PointStamped& centroid)
483 {
484  // Access point in cloud
486  const size_t num_points = cloud.data.size() / cloud.point_step;
487 
488  // Get initial centroid
489  centroid.header = cloud.header;
490  centroid.point.x = (iter + max_idx_)[X];
491  centroid.point.y = (iter + max_idx_)[Y];
492  centroid.point.z = (iter + max_idx_)[Z];
493 
494  // Do not accept NANs
495  if (std::isnan(centroid.point.x) ||
496  std::isnan(centroid.point.y) ||
497  std::isnan(centroid.point.z))
498  {
499  return false;
500  }
501 
502  // Get a better centroid
503  int points = 0;
504  double sum_x = 0.0;
505  double sum_y = 0.0;
506  double sum_z = 0.0;
507  for (size_t i = 0; i < num_points; i++)
508  {
510 
511  // Using highly likely points
512  if (diff_[i] > (max_*0.75))
513  {
514  if (std::isnan(point[X]) || std::isnan(point[Y]) || std::isnan(point[Z]))
515  {
516  continue;
517  }
518 
519  double dx = point[X] - centroid.point.x;
520  double dy = point[Y] - centroid.point.y;
521  double dz = point[Z] - centroid.point.z;
522 
523  // That are less than 1cm from the max point
524  if ((dx*dx) + (dy*dy) + (dz*dz) < (0.05*0.05))
525  {
526  sum_x += point[X];
527  sum_y += point[Y];
528  sum_z += point[Z];
529  ++points;
530  }
531  }
532  }
533 
534  if (points > 0)
535  {
536  centroid.point.x = (centroid.point.x + sum_x)/(points+1);
537  centroid.point.y = (centroid.point.y + sum_y)/(points+1);
538  centroid.point.z = (centroid.point.z + sum_z)/(points+1);
539  }
540 
541  return true;
542 }
543 
545 {
546  sensor_msgs::Image image;
547 
548  image.height = height_;
549  image.width = width_;
550 
551  image.encoding = sensor_msgs::image_encodings::BGR8;
552  image.step = width_ * 3;
553 
554  image.data.resize(width_ * height_ * 3);
555 
556  for (size_t i = 0; i < diff_.size(); i++)
557  {
558  if (diff_[i] > max_ * 0.9)
559  {
560  image.data[i*3] = 255;
561  image.data[i*3 + 1] = 0;
562  image.data[i*3 + 2] = 0;
563  }
564  else if (diff_[i] > 0)
565  {
566  image.data[i*3] = static_cast<uint8_t>(diff_[i]/2.0);
567  image.data[i*3 + 1] = static_cast<uint8_t>(diff_[i]/2.0);
568  image.data[i*3 + 2] = static_cast<uint8_t>(diff_[i]/2.0);
569  }
570  else
571  {
572  image.data[i*3] = 0;
573  image.data[i*3 + 1] = 0;
574  image.data[i*3 + 2] = 0;
575  }
576  }
577 
578  return image;
579 }
580 
581 } // namespace robot_calibration
bool getRefinedCentroid(const sensor_msgs::PointCloud2 &cloud, geometry_msgs::PointStamped &centroid)
Definition: led_finder.cpp:480
This class processes the point cloud input to find the LED.
Definition: led_finder.h:39
void publish(const boost::shared_ptr< M > &message) const
Base class for a feature finder.
Subscriber subscribe(const std::string &topic, uint32_t queue_size, void(T::*fp)(M), T *obj, const TransportHints &transport_hints=TransportHints())
DepthCameraInfoManager depth_camera_manager_
Definition: led_finder.h:109
bool sleep() const
std::string camera_sensor_name_
Should we output debug image/cloud?
Definition: led_finder.h:122
int max_iterations_
Minimum value of diffs in order to trigger that this is an LED.
Definition: led_finder.h:118
CloudDifferenceTracker(std::string frame, double x, double y, double z)
Definition: led_finder.cpp:348
void transformPoint(const std::string &target_frame, const geometry_msgs::PointStamped &stamped_in, geometry_msgs::PointStamped &stamped_out) const
tf::TransformListener listener_
Definition: led_finder.h:108
TFSIMD_FORCE_INLINE tfScalar distance(const Vector3 &v) const
const unsigned B
Definition: led_finder.cpp:37
TFSIMD_FORCE_INLINE const tfScalar & y() const
geometry_msgs::TransformStamped t
std::vector< CloudDifferenceTracker > trackers_
Definition: led_finder.h:105
robot_calibration_msgs::ExtendedCameraInfo getDepthCameraInfo()
Definition: depth_camera.h:75
bool process(sensor_msgs::PointCloud2 &cloud, sensor_msgs::PointCloud2 &prev, geometry_msgs::Point &led_point, double max_distance, double weight)
Update the tracker based on new cloud compared to previous.
Definition: led_finder.cpp:379
bool init(const std::string &name, ros::NodeHandle &n)
Initialize the feature finder.
Definition: led_finder.cpp:53
ROSLIB_DECL std::string command(const std::string &cmd)
#define ROS_INFO(...)
virtual bool init(const std::string &name, ros::NodeHandle &nh)
Initialize the feature finder.
bool param(const std::string &param_name, T &param_val, const T &default_val) const
void cameraCallback(const sensor_msgs::PointCloud2::ConstPtr &cloud)
Definition: led_finder.cpp:126
double distancePoints(const geometry_msgs::Point p1, const geometry_msgs::Point p2)
Definition: led_finder.cpp:39
TFSIMD_FORCE_INLINE const tfScalar & x() const
std::vector< uint8_t > codes_
Definition: led_finder.h:106
Publisher advertise(const std::string &topic, uint32_t queue_size, bool latch=false)
bool output_debug_
Maximum number of cycles before we abort finding the LED.
Definition: led_finder.h:120
std::string getName()
Get the name of this feature finder.
Calibration code lives under this namespace.
const unsigned R
Definition: led_finder.cpp:35
sensor_msgs::PointCloud2 cloud_
Definition: led_finder.h:102
TFSIMD_FORCE_INLINE const tfScalar & z() const
bool find(robot_calibration_msgs::CalibrationData *msg)
Once the robot has been moved into the proper position and settled, this function will be called...
Definition: led_finder.cpp:157
Internally used within LED finder to track each of several LEDs.
Definition: led_finder.h:42
bool getParam(const std::string &key, std::string &s) const
const unsigned G
Definition: led_finder.cpp:36
static Time now()
boost::scoped_ptr< LedClient > client_
Outgoing sensor_msgs::PointCloud2.
Definition: led_finder.h:99
std::vector< boost::shared_ptr< ros::Publisher > > tracker_publishers_
Definition: led_finder.h:104
bool isFound(const sensor_msgs::PointCloud2 &cloud, double threshold)
Definition: led_finder.cpp:455
ros::Publisher publisher_
Incoming sensor_msgs::PointCloud2.
Definition: led_finder.h:98
#define ROS_ERROR_STREAM(args)
#define ROS_ASSERT(cond)
actionlib::SimpleActionClient< robot_calibration_msgs::GripperLedCommandAction > LedClient
Definition: led_finder.h:86
ROSCPP_DECL void spinOnce()
void setPointCloud2FieldsByString(int n_fields,...)
#define PLUGINLIB_EXPORT_CLASS(class_type, base_class_type)
#define ROS_ERROR(...)
double max_inconsistency_
Maximum distance led can be from expected pose.
Definition: led_finder.h:115
ros::Subscriber subscriber_
Definition: led_finder.h:97


robot_calibration
Author(s): Michael Ferguson
autogenerated on Tue Nov 3 2020 17:30:30