00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013
00014
00015
00016
00017
00018
00019
00020 #include <math.h>
00021 #include <robot_calibration/capture/led_finder.h>
00022 #include <sensor_msgs/point_cloud2_iterator.h>
00023 #include <sensor_msgs/image_encodings.h>
00024
00025 namespace robot_calibration
00026 {
00027
00028
00029 const unsigned X = 0;
00030 const unsigned Y = 1;
00031 const unsigned Z = 2;
00032 const unsigned R = 0;
00033 const unsigned G = 1;
00034 const unsigned B = 2;
00035
00036 double distancePoints(
00037 const geometry_msgs::Point p1,
00038 const geometry_msgs::Point p2)
00039 {
00040 return std::sqrt((p1.x-p2.x) * (p1.x-p2.x) +
00041 (p1.y-p2.y) * (p1.y-p2.y) +
00042 (p1.z-p2.z) * (p1.z-p2.z));
00043 }
00044
00045 LedFinder::LedFinder(ros::NodeHandle& nh) :
00046 FeatureFinder(nh),
00047 waiting_(false)
00048 {
00049
00050 std::string topic_name;
00051 nh.param<std::string>("action", topic_name, "/gripper_led_action");
00052 client_.reset(new LedClient(topic_name, true));
00053 ROS_INFO("Waiting for %s...", topic_name.c_str());
00054 client_->waitForServer();
00055
00056
00057 nh.param<std::string>("topic", topic_name, "/points");
00058 subscriber_ = nh.subscribe(topic_name,
00059 1,
00060 &LedFinder::cameraCallback,
00061 this);
00062
00063
00064 publisher_ = nh.advertise<sensor_msgs::PointCloud2>("led_points", 10);
00065
00066
00067 nh.param<double>("max_error", max_error_, 0.1);
00068
00069 nh.param<double>("max_inconsistency", max_inconsistency_, 0.01);
00070
00071
00072 nh.param<double>("threshold", threshold_, 1000.0);
00073 nh.param<int>("max_iterations", max_iterations_, 50);
00074
00075
00076 nh.param<bool>("debug", output_debug_, false);
00077
00078
00079 nh.param<std::string>("camera_sensor_name", camera_sensor_name_, "camera");
00080 nh.param<std::string>("chain_sensor_name", chain_sensor_name_, "arm");
00081
00082
00083 std::string gripper_led_frame;
00084 nh.param<std::string>("gripper_led_frame", gripper_led_frame, "wrist_roll_link");
00085 XmlRpc::XmlRpcValue led_poses;
00086 nh.getParam("poses", led_poses);
00087 ROS_ASSERT(led_poses.getType() == XmlRpc::XmlRpcValue::TypeArray);
00088
00089 for (int i = 0; i < led_poses.size(); ++i)
00090 {
00091 codes_.push_back(static_cast<int>(led_poses[i]["code"]));
00092 codes_.push_back(0);
00093
00094 double x, y, z;
00095 x = static_cast<double>(led_poses[i]["x"]);
00096 y = static_cast<double>(led_poses[i]["y"]);
00097 z = static_cast<double>(led_poses[i]["z"]);
00098 trackers_.push_back(CloudDifferenceTracker(gripper_led_frame, x, y, z));
00099
00100
00101 boost::shared_ptr<ros::Publisher> pub(new ros::Publisher);
00102 *pub = nh.advertise<sensor_msgs::Image>(static_cast<std::string>(led_poses[i]["topic"]), 10);
00103 tracker_publishers_.push_back(pub);
00104 }
00105
00106
00107 if (!depth_camera_manager_.init(nh))
00108 {
00109
00110 throw;
00111 }
00112 }
00113
00114 void LedFinder::cameraCallback(const sensor_msgs::PointCloud2::ConstPtr& cloud)
00115 {
00116 if (waiting_)
00117 {
00118 cloud_ = *cloud;
00119 waiting_ = false;
00120 }
00121 }
00122
00123
00124 bool LedFinder::waitForCloud()
00125 {
00126
00127 ros::Duration(1/10.0).sleep();
00128
00129 waiting_ = true;
00130 int count = 250;
00131 while (--count)
00132 {
00133 if (!waiting_)
00134 {
00135
00136 return true;
00137 }
00138 ros::Duration(0.01).sleep();
00139 ros::spinOnce();
00140 }
00141 ROS_ERROR("Failed to get cloud");
00142 return !waiting_;
00143 }
00144
00145 bool LedFinder::find(robot_calibration_msgs::CalibrationData * msg)
00146 {
00147 uint8_t code_idx = -1;
00148
00149 std::vector<geometry_msgs::PointStamped> rgbd;
00150 std::vector<geometry_msgs::PointStamped> world;
00151
00152 sensor_msgs::PointCloud2 prev_cloud;
00153
00154 robot_calibration_msgs::GripperLedCommandGoal command;
00155 command.led_code = 0;
00156 client_->sendGoal(command);
00157 client_->waitForResult(ros::Duration(10.0));
00158
00159
00160 if (!waitForCloud())
00161 {
00162 return false;
00163 }
00164 prev_cloud = cloud_;
00165
00166
00167 for (size_t i = 0; i < trackers_.size(); ++i)
00168 {
00169 trackers_[i].reset(cloud_.height, cloud_.width);
00170 }
00171
00172 int cycles = 0;
00173 while (true)
00174 {
00175
00176 code_idx = (code_idx + 1) % 8;
00177 command.led_code = codes_[code_idx];
00178 client_->sendGoal(command);
00179 client_->waitForResult(ros::Duration(10.0));
00180
00181
00182 if (!waitForCloud())
00183 {
00184 return false;
00185 }
00186
00187
00188 int tracker = code_idx/2;
00189
00190 double weight = (code_idx%2 == 0) ? 1: -1;
00191
00192
00193 bool done = true;
00194 for (size_t t = 0; t < trackers_.size(); ++t)
00195 {
00196 done &= trackers_[t].isFound(cloud_, threshold_);
00197 }
00198
00199 if (done && (weight == -1))
00200 {
00201 break;
00202 }
00203
00204
00205 geometry_msgs::PointStamped led;
00206 led.point = trackers_[tracker].point_;
00207 led.header.frame_id = trackers_[tracker].frame_;
00208 try
00209 {
00210 listener_.transformPoint(cloud_.header.frame_id, ros::Time(0), led,
00211 led.header.frame_id, led);
00212 }
00213 catch (const tf::TransformException& ex)
00214 {
00215 ROS_ERROR_STREAM("Failed to transform feature to " << cloud_.header.frame_id);
00216 return false;
00217 }
00218
00219
00220 trackers_[tracker].process(cloud_, prev_cloud, led.point, max_error_, weight);
00221
00222 if (++cycles > max_iterations_)
00223 {
00224 ROS_ERROR("Failed to find features before using maximum iterations.");
00225 return false;
00226 }
00227
00228 prev_cloud = cloud_;
00229
00230
00231 for (size_t i = 0; i < trackers_.size(); i++)
00232 {
00233 sensor_msgs::Image image = trackers_[i].getImage();
00234 tracker_publishers_[i]->publish(image);
00235 }
00236 }
00237
00238
00239 sensor_msgs::PointCloud2 cloud;
00240 cloud.width = 0;
00241 cloud.height = 0;
00242 cloud.header.stamp = ros::Time::now();
00243 cloud.header.frame_id = cloud_.header.frame_id;
00244 sensor_msgs::PointCloud2Modifier cloud_mod(cloud);
00245 cloud_mod.setPointCloud2FieldsByString(1, "xyz");
00246 cloud_mod.resize(4);
00247 sensor_msgs::PointCloud2Iterator<float> iter_cloud(cloud, "x");
00248
00249
00250 int idx_cam = msg->observations.size() + 0;
00251 int idx_chain = msg->observations.size() + 1;
00252 msg->observations.resize(msg->observations.size() + 2);
00253 msg->observations[idx_cam].sensor_name = camera_sensor_name_;
00254 msg->observations[idx_chain].sensor_name = chain_sensor_name_;
00255
00256 for (size_t t = 0; t < trackers_.size(); ++t)
00257 {
00258 geometry_msgs::PointStamped rgbd_pt;
00259 geometry_msgs::PointStamped world_pt;
00260
00261
00262 if (!trackers_[t].getRefinedCentroid(cloud_, rgbd_pt))
00263 {
00264 ROS_ERROR_STREAM("No centroid for feature " << t);
00265 return false;
00266 }
00267
00268
00269 try
00270 {
00271 listener_.transformPoint(trackers_[t].frame_, ros::Time(0), rgbd_pt,
00272 rgbd_pt.header.frame_id, world_pt);
00273 }
00274 catch(const tf::TransformException &ex)
00275 {
00276 ROS_ERROR_STREAM("Failed to transform feature to " << trackers_[t].frame_);
00277 return false;
00278 }
00279 double distance = distancePoints(world_pt.point, trackers_[t].point_);
00280 if (distance > max_error_)
00281 {
00282 ROS_ERROR_STREAM("Feature was too far away from expected pose in " << trackers_[t].frame_ << ": " << distance);
00283 return false;
00284 }
00285
00286
00287 for (size_t t2 = 0; t2 < t; ++t2)
00288 {
00289 double expected = distancePoints(trackers_[t2].point_, trackers_[t].point_);
00290 double actual = distancePoints(msg->observations[0].features[t2].point, rgbd_pt.point);
00291 if (fabs(expected-actual) > max_inconsistency_)
00292 {
00293 ROS_ERROR_STREAM("Features not internally consistent: " << expected << " " << actual);
00294 return false;
00295 }
00296 }
00297
00298
00299 msg->observations[idx_cam].features.push_back(rgbd_pt);
00300 msg->observations[idx_cam].ext_camera_info = depth_camera_manager_.getDepthCameraInfo();
00301
00302
00303 iter_cloud[0] = rgbd_pt.point.x;
00304 iter_cloud[1] = rgbd_pt.point.y;
00305 iter_cloud[2] = rgbd_pt.point.z;
00306 ++iter_cloud;
00307
00308
00309 world_pt.header.frame_id = trackers_[t].frame_;
00310 world_pt.point = trackers_[t].point_;
00311 msg->observations[idx_chain].features.push_back(world_pt);
00312 }
00313
00314
00315 if (msg->observations[idx_cam].features.size() != trackers_.size())
00316 {
00317 return false;
00318 }
00319
00320
00321 if (output_debug_)
00322 {
00323 msg->observations[idx_cam].cloud = cloud_;
00324 }
00325
00326
00327 publisher_.publish(cloud);
00328
00329 return true;
00330 }
00331
00332 LedFinder::CloudDifferenceTracker::CloudDifferenceTracker(
00333 std::string frame, double x, double y, double z) :
00334 frame_(frame)
00335 {
00336 point_.x = x;
00337 point_.y = y;
00338 point_.z = z;
00339 }
00340
00341 void LedFinder::CloudDifferenceTracker::reset(size_t height, size_t width)
00342 {
00343
00344 height_ = height;
00345 width_ = width;
00346
00347
00348 count_ = 0;
00349
00350 max_ = -1000.0;
00351
00352 max_idx_ = -1;
00353
00354
00355 diff_.resize(height * width);
00356 for (std::vector<double>::iterator it = diff_.begin(); it != diff_.end(); ++it)
00357 {
00358 *it = 0.0;
00359 }
00360 }
00361
00362
00363 bool LedFinder::CloudDifferenceTracker::process(
00364 sensor_msgs::PointCloud2& cloud,
00365 sensor_msgs::PointCloud2& prev,
00366 geometry_msgs::Point& led_point,
00367 double max_distance,
00368 double weight)
00369 {
00370 if ((cloud.width * cloud.height) != diff_.size())
00371 {
00372 ROS_ERROR("Cloud size has changed");
00373 return false;
00374 }
00375
00376 sensor_msgs::PointCloud2ConstIterator<float> xyz(cloud, "x");
00377 sensor_msgs::PointCloud2ConstIterator<uint8_t> rgb(cloud, "rgb");
00378 sensor_msgs::PointCloud2ConstIterator<uint8_t> prev_rgb(prev, "rgb");
00379
00380
00381
00382
00383 double last_distance = 1000.0;
00384
00385
00386 const size_t num_points = cloud.data.size() / cloud.point_step;
00387 int valid = 0;
00388 int used = 0;
00389 for (size_t i = 0; i < num_points; i++)
00390 {
00391
00392 geometry_msgs::Point p;
00393 p.x = (xyz + i)[X];
00394 p.y = (xyz + i)[Y];
00395 p.z = (xyz + i)[Z];
00396 double distance = distancePoints(p, led_point);
00397
00398 if (std::isfinite(distance))
00399 {
00400 last_distance = distance;
00401 valid++;
00402 }
00403 else
00404 {
00405 distance = last_distance;
00406 }
00407
00408 if (!std::isfinite(distance) || distance > max_distance)
00409 {
00410 continue;
00411 }
00412
00413
00414 double r = (double)((rgb + i)[R]) - (double)((prev_rgb + i)[R]);
00415 double g = (double)((rgb + i)[G]) - (double)((prev_rgb + i)[G]);
00416 double b = (double)((rgb + i)[B]) - (double)((prev_rgb + i)[B]);
00417 if (r > 0 && g > 0 && b > 0 && weight > 0)
00418 {
00419 diff_[i] += (r + g + b) * weight;
00420 used++;
00421 }
00422 else if (r < 0 && g < 0 && b < 0 && weight < 0)
00423 {
00424 diff_[i] += (r + g + b) * weight;
00425 used++;
00426 }
00427
00428
00429 if (diff_[i] > max_)
00430 {
00431 max_ = diff_[i];
00432 max_idx_ = i;
00433 }
00434 }
00435
00436 return true;
00437 }
00438
00439 bool LedFinder::CloudDifferenceTracker::isFound(
00440 const sensor_msgs::PointCloud2& cloud,
00441 double threshold)
00442 {
00443
00444 if (max_ < threshold)
00445 {
00446 return false;
00447 }
00448
00449
00450 sensor_msgs::PointCloud2ConstIterator<float> point(cloud, "x");
00451 point += max_idx_;
00452
00453
00454 if (isnan(point[X]) ||
00455 isnan(point[Y]) ||
00456 isnan(point[Z]))
00457 {
00458 return false;
00459 }
00460
00461 return true;
00462 }
00463
00464 bool LedFinder::CloudDifferenceTracker::getRefinedCentroid(
00465 const sensor_msgs::PointCloud2& cloud,
00466 geometry_msgs::PointStamped& centroid)
00467 {
00468
00469 sensor_msgs::PointCloud2ConstIterator<float> iter(cloud, "x");
00470 const size_t num_points = cloud.data.size() / cloud.point_step;
00471
00472
00473 centroid.header = cloud.header;
00474 centroid.point.x = (iter + max_idx_)[X];
00475 centroid.point.y = (iter + max_idx_)[Y];
00476 centroid.point.z = (iter + max_idx_)[Z];
00477
00478
00479 if (isnan(centroid.point.x) ||
00480 isnan(centroid.point.y) ||
00481 isnan(centroid.point.z))
00482 {
00483 return false;
00484 }
00485
00486
00487 int points = 0;
00488 double sum_x = 0.0;
00489 double sum_y = 0.0;
00490 double sum_z = 0.0;
00491 for (size_t i = 0; i < num_points; i++)
00492 {
00493 sensor_msgs::PointCloud2ConstIterator<float> point = iter + i;
00494
00495
00496 if (diff_[i] > (max_*0.75))
00497 {
00498 if (isnan(point[X]) || isnan(point[Y]) || isnan(point[Z]))
00499 {
00500 continue;
00501 }
00502
00503 double dx = point[X] - centroid.point.x;
00504 double dy = point[Y] - centroid.point.y;
00505 double dz = point[Z] - centroid.point.z;
00506
00507
00508 if ((dx*dx) + (dy*dy) + (dz*dz) < (0.05*0.05))
00509 {
00510 sum_x += point[X];
00511 sum_y += point[Y];
00512 sum_z += point[Z];
00513 ++points;
00514 }
00515 }
00516 }
00517
00518 if (points > 0)
00519 {
00520 centroid.point.x = (centroid.point.x + sum_x)/(points+1);
00521 centroid.point.y = (centroid.point.y + sum_y)/(points+1);
00522 centroid.point.z = (centroid.point.z + sum_z)/(points+1);
00523 }
00524
00525 return true;
00526 }
00527
00528 sensor_msgs::Image LedFinder::CloudDifferenceTracker::getImage()
00529 {
00530 sensor_msgs::Image image;
00531
00532 image.height = height_;
00533 image.width = width_;
00534
00535 image.encoding = sensor_msgs::image_encodings::BGR8;
00536 image.step = width_ * 3;
00537
00538 image.data.resize(width_ * height_ * 3);
00539
00540 for (size_t i = 0; i < diff_.size(); i++)
00541 {
00542 if (diff_[i] > max_ * 0.9)
00543 {
00544 image.data[i*3] = 255;
00545 image.data[i*3 + 1] = 0;
00546 image.data[i*3 + 2] = 0;
00547 }
00548 else if (diff_[i] > 0)
00549 {
00550 image.data[i*3] = static_cast<uint8_t>(diff_[i]/2.0);
00551 image.data[i*3 + 1] = static_cast<uint8_t>(diff_[i]/2.0);
00552 image.data[i*3 + 2] = static_cast<uint8_t>(diff_[i]/2.0);
00553 }
00554 else
00555 {
00556 image.data[i*3] = 0;
00557 image.data[i*3 + 1] = 0;
00558 image.data[i*3 + 2] = 0;
00559 }
00560 }
00561
00562 return image;
00563 }
00564
00565 }