40 const geometry_msgs::Point p1,
41 const geometry_msgs::Point p2)
43 return std::sqrt((p1.x-p2.x) * (p1.x-p2.x) +
44 (p1.y-p2.y) * (p1.y-p2.y) +
45 (p1.z-p2.z) * (p1.z-p2.z));
48 LedFinder::LedFinder() :
60 std::string topic_name;
61 nh.
param<std::string>(
"action", topic_name,
"/gripper_led_action");
63 ROS_INFO(
"Waiting for %s...", topic_name.c_str());
67 nh.
param<std::string>(
"topic", topic_name,
"/points");
93 std::string gripper_led_frame;
94 nh.
param<std::string>(
"gripper_led_frame", gripper_led_frame,
"wrist_roll_link");
99 for (
int i = 0; i < led_poses.size(); ++i)
101 codes_.push_back(static_cast<int>(led_poses[i][
"code"]));
105 x =
static_cast<double>(led_poses[i][
"x"]);
106 y =
static_cast<double>(led_poses[i][
"y"]);
107 z =
static_cast<double>(led_poses[i][
"z"]);
112 *pub = nh.
advertise<sensor_msgs::Image>(
static_cast<std::string
>(led_poses[i][
"topic"]), 10);
159 uint8_t code_idx = -1;
161 std::vector<geometry_msgs::PointStamped> rgbd;
162 std::vector<geometry_msgs::PointStamped> world;
164 sensor_msgs::PointCloud2 prev_cloud;
166 robot_calibration_msgs::GripperLedCommandGoal
command;
167 command.led_code = 0;
179 for (
size_t i = 0; i <
trackers_.size(); ++i)
188 code_idx = (code_idx + 1) %
codes_.size();
189 command.led_code =
codes_[code_idx];
200 int tracker = code_idx/2;
202 double weight = (code_idx%2 == 0) ? 1: -1;
211 if (done && (weight == -1))
217 geometry_msgs::PointStamped led;
219 led.header.frame_id =
trackers_[tracker].frame_;
223 led.header.frame_id, led);
236 ROS_ERROR(
"Failed to find features before using maximum iterations.");
243 for (
size_t i = 0; i <
trackers_.size(); i++)
245 sensor_msgs::Image image =
trackers_[i].getImage();
251 sensor_msgs::PointCloud2 cloud;
255 cloud.header.frame_id =
cloud_.header.frame_id;
262 const int CAMERA = 0;
264 robot_calibration_msgs::Observation observations[2];
270 geometry_msgs::PointStamped rgbd_pt;
271 geometry_msgs::PointStamped world_pt;
284 rgbd_pt.header.frame_id, world_pt);
299 for (
size_t t2 = 0; t2 < t; ++t2)
302 double actual =
distancePoints(observations[CAMERA].features[t2].point, rgbd_pt.point);
305 ROS_ERROR_STREAM(
"Features not internally consistent: " << expected <<
" " << actual);
311 observations[CAMERA].features.push_back(rgbd_pt);
315 iter_cloud[0] = rgbd_pt.point.x;
316 iter_cloud[1] = rgbd_pt.point.y;
317 iter_cloud[2] = rgbd_pt.point.z;
321 world_pt.header.frame_id =
trackers_[t].frame_;
323 observations[CHAIN].features.push_back(world_pt);
327 if (observations[CAMERA].features.size() !=
trackers_.size())
335 observations[CAMERA].cloud =
cloud_;
339 msg->observations.push_back(observations[CAMERA]);
340 msg->observations.push_back(observations[CHAIN]);
349 std::string frame,
double x,
double y,
double z) :
371 diff_.resize(height * width);
372 for (std::vector<double>::iterator it =
diff_.begin(); it !=
diff_.end(); ++it)
380 sensor_msgs::PointCloud2& cloud,
381 sensor_msgs::PointCloud2& prev,
382 geometry_msgs::Point& led_point,
386 if ((cloud.width * cloud.height) !=
diff_.size())
399 double last_distance = 1000.0;
402 const size_t num_points = cloud.data.size() / cloud.point_step;
405 for (
size_t i = 0; i < num_points; i++)
408 geometry_msgs::Point p;
414 if (std::isfinite(distance))
416 last_distance = distance;
421 distance = last_distance;
424 if (!std::isfinite(distance) || distance > max_distance)
430 double r = (double)((rgb + i)[
R]) - (
double)((prev_rgb + i)[R]);
431 double g = (double)((rgb + i)[
G]) - (
double)((prev_rgb + i)[G]);
432 double b = (double)((rgb + i)[
B]) - (
double)((prev_rgb + i)[B]);
433 if (r > 0 && g > 0 && b > 0 && weight > 0)
435 diff_[i] += (r + g + b) * weight;
438 else if (r < 0 && g < 0 && b < 0 && weight < 0)
440 diff_[i] += (r + g + b) * weight;
456 const sensor_msgs::PointCloud2& cloud,
460 if (
max_ < threshold)
470 if (std::isnan(point[X]) ||
471 std::isnan(point[Y]) ||
472 std::isnan(point[Z]))
481 const sensor_msgs::PointCloud2& cloud,
482 geometry_msgs::PointStamped& centroid)
486 const size_t num_points = cloud.data.size() / cloud.point_step;
489 centroid.header = cloud.header;
490 centroid.point.x = (iter +
max_idx_)[X];
491 centroid.point.y = (iter +
max_idx_)[Y];
492 centroid.point.z = (iter +
max_idx_)[Z];
495 if (std::isnan(centroid.point.x) ||
496 std::isnan(centroid.point.y) ||
497 std::isnan(centroid.point.z))
507 for (
size_t i = 0; i < num_points; i++)
514 if (std::isnan(point[X]) || std::isnan(point[Y]) || std::isnan(point[Z]))
519 double dx = point[
X] - centroid.point.x;
520 double dy = point[
Y] - centroid.point.y;
521 double dz = point[
Z] - centroid.point.z;
524 if ((dx*dx) + (dy*dy) + (dz*dz) < (0.05*0.05))
536 centroid.point.x = (centroid.point.x + sum_x)/(points+1);
537 centroid.point.y = (centroid.point.y + sum_y)/(points+1);
538 centroid.point.z = (centroid.point.z + sum_z)/(points+1);
546 sensor_msgs::Image image;
556 for (
size_t i = 0; i <
diff_.size(); i++)
560 image.data[i*3] = 255;
561 image.data[i*3 + 1] = 0;
562 image.data[i*3 + 2] = 0;
564 else if (
diff_[i] > 0)
566 image.data[i*3] =
static_cast<uint8_t
>(
diff_[i]/2.0);
567 image.data[i*3 + 1] =
static_cast<uint8_t
>(
diff_[i]/2.0);
568 image.data[i*3 + 2] =
static_cast<uint8_t
>(
diff_[i]/2.0);
573 image.data[i*3 + 1] = 0;
574 image.data[i*3 + 2] = 0;
bool getRefinedCentroid(const sensor_msgs::PointCloud2 &cloud, geometry_msgs::PointStamped ¢roid)
This class processes the point cloud input to find the LED.
void publish(const boost::shared_ptr< M > &message) const
Base class for a feature finder.
sensor_msgs::Image getImage()
Subscriber subscribe(const std::string &topic, uint32_t queue_size, void(T::*fp)(M), T *obj, const TransportHints &transport_hints=TransportHints())
DepthCameraInfoManager depth_camera_manager_
std::string camera_sensor_name_
Should we output debug image/cloud?
int max_iterations_
Minimum value of diffs in order to trigger that this is an LED.
CloudDifferenceTracker(std::string frame, double x, double y, double z)
tf::TransformListener listener_
TFSIMD_FORCE_INLINE tfScalar distance(const Vector3 &v) const
TFSIMD_FORCE_INLINE const tfScalar & y() const
geometry_msgs::TransformStamped t
std::vector< CloudDifferenceTracker > trackers_
std::string chain_sensor_name_
robot_calibration_msgs::ExtendedCameraInfo getDepthCameraInfo()
bool process(sensor_msgs::PointCloud2 &cloud, sensor_msgs::PointCloud2 &prev, geometry_msgs::Point &led_point, double max_distance, double weight)
Update the tracker based on new cloud compared to previous.
bool init(const std::string &name, ros::NodeHandle &n)
Initialize the feature finder.
ROSLIB_DECL std::string command(const std::string &cmd)
bool init(ros::NodeHandle &n)
std::vector< double > diff_
virtual bool init(const std::string &name, ros::NodeHandle &nh)
Initialize the feature finder.
bool param(const std::string ¶m_name, T ¶m_val, const T &default_val) const
void cameraCallback(const sensor_msgs::PointCloud2::ConstPtr &cloud)
geometry_msgs::Point point_
double distancePoints(const geometry_msgs::Point p1, const geometry_msgs::Point p2)
TFSIMD_FORCE_INLINE const tfScalar & x() const
std::vector< uint8_t > codes_
Publisher advertise(const std::string &topic, uint32_t queue_size, bool latch=false)
bool output_debug_
Maximum number of cycles before we abort finding the LED.
std::string getName()
Get the name of this feature finder.
Calibration code lives under this namespace.
sensor_msgs::PointCloud2 cloud_
TFSIMD_FORCE_INLINE const tfScalar & z() const
bool find(robot_calibration_msgs::CalibrationData *msg)
Once the robot has been moved into the proper position and settled, this function will be called...
Internally used within LED finder to track each of several LEDs.
bool getParam(const std::string &key, std::string &s) const
boost::scoped_ptr< LedClient > client_
Outgoing sensor_msgs::PointCloud2.
std::vector< boost::shared_ptr< ros::Publisher > > tracker_publishers_
bool isFound(const sensor_msgs::PointCloud2 &cloud, double threshold)
ros::Publisher publisher_
Incoming sensor_msgs::PointCloud2.
#define ROS_ERROR_STREAM(args)
actionlib::SimpleActionClient< robot_calibration_msgs::GripperLedCommandAction > LedClient
ROSCPP_DECL void spinOnce()
void setPointCloud2FieldsByString(int n_fields,...)
#define PLUGINLIB_EXPORT_CLASS(class_type, base_class_type)
double max_inconsistency_
Maximum distance led can be from expected pose.
ros::Subscriber subscriber_
void reset(size_t height, size_t width)