kinfu_nodelet.cpp
Go to the documentation of this file.
1 /*********************************************************************
2  * Software License Agreement (BSD License)
3  *
4  * Copyright (c) 2017, Kentaro Wada and JSK Lab
5  * All rights reserved.
6  *
7  * Redistribution and use in source and binary forms, with or without
8  * modification, are permitted provided that the following conditions
9  * are met:
10  *
11  * * Redistributions of source code must retain the above copyright
12  * notice, this list of conditions and the following disclaimer.
13  * * Redistributions in binary form must reproduce the above
14  * copyright notice, this list of conditions and the following
15  * disclaimer in the documentation and/or other materials provided
16  * with the distribution.
17  * * Neither the name of Kentaro Wada and JSK Lab nor the names of its
18  * contributors may be used to endorse or promote products derived
19  * from this software without specific prior written permission.
20  *
21  * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
22  * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
23  * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
24  * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
25  * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
26  * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
27  * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
28  * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
29  * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
30  * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
31  * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
32  * POSSIBILITY OF SUCH DAMAGE.
33  *********************************************************************/
34 
35 #define BOOST_PARAMETER_MAX_ARITY 7
36 
37 #include <boost/thread/thread.hpp>
38 #include <boost/filesystem.hpp>
39 
40 #include <pcl/features/normal_3d.h>
41 #include <pcl/io/obj_io.h>
42 
43 #include <cv_bridge/cv_bridge.h>
44 #include <jsk_recognition_msgs/TrackingStatus.h>
45 #include <jsk_recognition_msgs/SaveMesh.h>
47 #include <sensor_msgs/fill_image.h>
49 
50 #include "jsk_pcl_ros/kinfu.h"
51 
53 
54 // TODO(wkentaro): Move to upstream.
55 namespace pcl
56 {
57  void
59  {
60  if (cloud_xyz.points.size() != cloud_rgb.points.size())
61  {
62  std::cout << "Clouds being concatenated must have same size." << std::endl;
63  return;
64  }
65  cloud.points.resize(cloud_xyz.points.size());
66  for (size_t i = 0; i < cloud_xyz.points.size(); i++)
67  {
68  cloud.points[i].x = cloud_xyz.points[i].x;
69  cloud.points[i].y = cloud_xyz.points[i].y;
70  cloud.points[i].z = cloud_xyz.points[i].z;
71  cloud.points[i].r = cloud_rgb.points[i].r;
72  cloud.points[i].g = cloud_rgb.points[i].g;
73  cloud.points[i].b = cloud_rgb.points[i].b;
74  }
75  cloud.width = cloud_xyz.width;
76  cloud.height = cloud_xyz.height;
77  }
78 }
79 
80 namespace jsk_pcl_ros
81 {
82  void
84  {
85  ConnectionBasedNodelet::onInit();
86  always_subscribe_ = true; // for mapping
87 
88  pnh_->param("device", device_, 0);
89  pnh_->param("auto_reset", auto_reset_, true);
90  pnh_->param("integrate_color", integrate_color_, false);
91  pnh_->param("slam", slam_, false);
92  pnh_->param<std::string>("fixed_frame_id", fixed_frame_id_, "odom_init");
93  pnh_->param("n_textures", n_textures_, -1);
94  pnh_->param("volume_size", volume_size_, pcl::device::kinfuLS::VOLUME_SIZE);
95 
96  srv_ = boost::make_shared <dynamic_reconfigure::Server<Config> >(*pnh_);
97  dynamic_reconfigure::Server<Config>::CallbackType f = boost::bind(&Kinfu::configCallback, this, _1, _2);
98  srv_->setCallback(f);
99 
100  tf_listener_.reset(new tf::TransformListener());
101 
102  pub_camera_pose_ = advertise<geometry_msgs::PoseStamped>(*pnh_, "output", 1);
103  pub_cloud_ = advertise<sensor_msgs::PointCloud2>(*pnh_, "output/cloud", 1);
104  pub_depth_ = advertise<sensor_msgs::Image>(*pnh_, "output/depth", 1);
105  pub_rendered_image_ = advertise<sensor_msgs::Image>(*pnh_, "output/rendered_image", 1);
106  pub_status_ = advertise<jsk_recognition_msgs::TrackingStatus>(*pnh_, "output/status", 1);
107 
108  srv_reset_ = pnh_->advertiseService("reset", &Kinfu::resetCallback, this);
109  srv_save_mesh_ = pnh_->advertiseService("save_mesh", &Kinfu::saveMeshCallback, this);
110  srv_save_mesh_with_context_ = pnh_->advertiseService(
111  "save_mesh_with_context", &Kinfu::saveMeshWithContextCallback, this);
112 
113  onInitPostProcess();
114  }
115 
117  // message_filters::Synchronizer needs to be called reset
118  // before message_filters::Subscriber is freed.
119  // Calling reset fixes the following error on shutdown of the nodelet:
120  // terminate called after throwing an instance of
121  // 'boost::exception_detail::clone_impl<boost::exception_detail::error_info_injector<boost::lock_error> >'
122  // what(): boost: mutex lock failed in pthread_mutex_lock: Invalid argument
123  // Also see https://github.com/ros/ros_comm/issues/720 .
124  sync_.reset();
125  sync_with_color_.reset();
126  }
127 
128  void
129  Kinfu::configCallback(Config& config, uint32_t level)
130  {
131  boost::mutex::scoped_lock lock(mutex_);
132  save_dir_ = config.save_dir;
133  }
134 
135  void
136  Kinfu::initKinfu(const sensor_msgs::CameraInfo::ConstPtr& caminfo_msg)
137  {
138  pcl::gpu::setDevice(device_);
139  pcl::gpu::printShortCudaDeviceInfo(device_);
140 
141  /* below are copied from pcl/gpu/kinfu_large_scale/src/kinfuLS_app.cpp */
142 
143  float shift_distance = pcl::device::kinfuLS::DISTANCE_THRESHOLD;
144  Eigen::Vector3f volume_size_vector = Eigen::Vector3f::Constant(volume_size_/*meters*/);
145  if (shift_distance > 2.5 * volume_size_)
146  {
147  NODELET_WARN("WARNING Shifting distance (%.2f) is very large compared to the volume size (%.2f).",
148  shift_distance, volume_size_);
149  }
150 
151  kinfu_.reset(new pcl::gpu::kinfuLS::KinfuTracker(
152  volume_size_vector, shift_distance, caminfo_msg->height, caminfo_msg->width));
153 
154  Eigen::Matrix3f R = Eigen::Matrix3f::Identity();
155  Eigen::Vector3f t = volume_size_vector * 0.5f - Eigen::Vector3f(0, 0, volume_size_vector(2) / 2 * 1.2f);
156 
157  Eigen::Affine3f pose = Eigen::Translation3f(t) * Eigen::AngleAxisf(R);
158 
159  kinfu_->setInitialCameraPose(pose);
160  kinfu_->volume().setTsdfTruncDist(0.030f/*meters*/);
161  kinfu_->setIcpCorespFilteringParams(0.1f/*meters*/, sin(pcl::deg2rad(20.f)));
162  //kinfu_->setDepthTruncationForICP(3.0f/*meters*/);
163  kinfu_->setCameraMovementThreshold(0.001f);
164 
165  if (integrate_color_)
166  {
167  const int max_color_integration_weight = 2;
168  kinfu_->initColorIntegration(max_color_integration_weight);
169  }
170  }
171 
172  void
174  {
175  int queue_size;
176  pnh_->param("queue_size", queue_size, 10);
177 
178  sub_camera_info_.subscribe(*pnh_, "input/camera_info", 1);
179  sub_depth_.subscribe(*pnh_, "input/depth", 1);
180  if (integrate_color_)
181  {
182  sub_color_.subscribe(*pnh_, "input/color", 1);
185  sync_with_color_->registerCallback(boost::bind(&Kinfu::update, this, _1, _2, _3));
186  }
187  else
188  {
189  sync_.reset(new message_filters::Synchronizer<SyncPolicy>(queue_size));
190  sync_->connectInput(sub_camera_info_, sub_depth_);
191  sync_->registerCallback(boost::bind(&Kinfu::update, this, _1, _2));
192  }
193  }
194 
195  void
197  {
198  }
199 
200  void
201  Kinfu::update(const sensor_msgs::CameraInfo::ConstPtr& caminfo_msg,
202  const sensor_msgs::Image::ConstPtr& depth_msg)
203  {
204  update(caminfo_msg, depth_msg, sensor_msgs::ImageConstPtr());
205  }
206 
207  void
208  Kinfu::update(const sensor_msgs::CameraInfo::ConstPtr& caminfo_msg,
209  const sensor_msgs::Image::ConstPtr& depth_msg,
210  const sensor_msgs::Image::ConstPtr& color_msg)
211  {
212  boost::mutex::scoped_lock lock(mutex_);
213 
214  if ((depth_msg->height != caminfo_msg->height) || (depth_msg->width != caminfo_msg->width))
215  {
216  ROS_ERROR("Image size of input depth and camera info must be same. Depth: (%d, %d), Camera Info: (%d, %d)",
217  depth_msg->height, depth_msg->width, caminfo_msg->height, caminfo_msg->width);
218  return;
219  }
220  if (integrate_color_ && ((color_msg->height != caminfo_msg->height) || (color_msg->width != color_msg->width)))
221  {
222  ROS_ERROR("Image size of input color image and camera info must be same. Color: (%d, %d), Camera Info: (%d, %d)",
223  color_msg->height, color_msg->width, caminfo_msg->height, caminfo_msg->width);
224  return;
225  }
226 
227  if (!kinfu_)
228  {
229  initKinfu(caminfo_msg);
230  }
231 
232  // run kinfu
233  {
234  kinfu_->setDepthIntrinsics(/*fx=*/caminfo_msg->K[0], /*fy=*/caminfo_msg->K[4],
235  /*cx=*/caminfo_msg->K[2], /*cy=*/caminfo_msg->K[5]);
236 
237  // depth: 32fc1 -> 16uc1
238  cv::Mat depth;
239  if (depth_msg->encoding == enc::TYPE_32FC1)
240  {
241  cv::Mat depth_32fc1 = cv_bridge::toCvShare(depth_msg, enc::TYPE_32FC1)->image;
242  depth_32fc1 *= 1000.;
243  depth_32fc1.convertTo(depth, CV_16UC1);
244  }
245  else if (depth_msg->encoding == enc::TYPE_16UC1)
246  {
247  depth = cv_bridge::toCvShare(depth_msg, enc::TYPE_16UC1)->image;
248  }
249  else
250  {
251  NODELET_FATAL("Unsupported depth image encoding: %s", depth_msg->encoding.c_str());
252  return;
253  }
254 
255  // depth: cpu -> gpu
256  pcl::gpu::kinfuLS::KinfuTracker::DepthMap depth_device;
257  depth_device.upload(&(depth.data[0]), depth.cols * 2, depth.rows, depth.cols);
258 
259 
260  if (integrate_color_)
261  {
262  // color: cpu -> gpu
263  if (color_msg->encoding == enc::BGR8)
264  {
265  cv_bridge::CvImagePtr tmp_image_ptr_ = cv_bridge::toCvCopy(color_msg, enc::RGB8);
266  colors_device_.upload(&(tmp_image_ptr_->toImageMsg()->data[0]),
267  color_msg->step, color_msg->height, color_msg->width);
268  }
269  else
270  {
271  colors_device_.upload(&(color_msg->data[0]), color_msg->step, color_msg->height, color_msg->width);
272  }
273 
274  (*kinfu_)(depth_device, colors_device_);
275  }
276  else
277  {
278  (*kinfu_)(depth_device);
279  }
280  frame_idx_++;
281  }
282 
283  jsk_recognition_msgs::TrackingStatus status;
284  status.header = caminfo_msg->header;
285  if (kinfu_->icpIsLost())
286  {
287  NODELET_FATAL_THROTTLE(10, "Tracking by ICP in kinect fusion is lost. auto_reset: %d", auto_reset_);
288  if (auto_reset_)
289  {
290  kinfu_.reset();
291  frame_idx_ = 0;
292  cameras_.clear();
293  }
294  status.is_lost = true;
296  return;
297  }
298  status.is_lost = false;
300 
301  // save texture
302  if (integrate_color_ && (frame_idx_ % pcl::device::kinfuLS::SNAPSHOT_RATE == 1))
303  {
304  cv::Mat texture = cv_bridge::toCvCopy(color_msg, color_msg->encoding)->image;
305  if (color_msg->encoding == enc::RGB8)
306  {
307  cv::cvtColor(texture, texture, cv::COLOR_RGB2BGR);
308  }
309  textures_.push_back(texture);
310 
311  pcl::TextureMapping<pcl::PointXYZ>::Camera camera;
312  camera.pose = kinfu_->getCameraPose();
313  camera.focal_length = caminfo_msg->K[0]; // must be equal to caminfo_msg->K[4]
314  camera.height = caminfo_msg->height;
315  camera.width = caminfo_msg->width;
316  cameras_.push_back(camera);
317  }
318 
319  // publish kinfu origin and slam
320  {
321  Eigen::Affine3f camera_pose = kinfu_->getCameraPose();
322 
323  // publish camera pose
325  {
326  geometry_msgs::PoseStamped camera_pose_msg;
327  tf::poseEigenToMsg(camera_pose, camera_pose_msg.pose);
328  camera_pose_msg.header.stamp = caminfo_msg->header.stamp;
329  camera_pose_msg.header.frame_id = "kinfu_origin";
330  pub_camera_pose_.publish(camera_pose_msg);
331  }
332 
333  Eigen::Affine3f camera_to_kinfu_origin = camera_pose.inverse();
334  tf::Transform tf_camera_to_kinfu_origin;
335  tf::transformEigenToTF(camera_to_kinfu_origin, tf_camera_to_kinfu_origin);
336  tf_camera_to_kinfu_origin.setRotation(tf_camera_to_kinfu_origin.getRotation().normalized());
338  tf::StampedTransform(tf_camera_to_kinfu_origin, caminfo_msg->header.stamp,
339  caminfo_msg->header.frame_id, "kinfu_origin"));
340 
341  if (slam_)
342  {
343  // use kinfu as slam, and publishes tf: map -> fixed_frame_id_ (usually odom_init)
344  try
345  {
346  tf::StampedTransform tf_odom_to_camera;
347  tf_listener_->lookupTransform(
348  fixed_frame_id_, caminfo_msg->header.frame_id, ros::Time(0), tf_odom_to_camera);
349  Eigen::Affine3f odom_to_camera;
350  tf::transformTFToEigen(tf_odom_to_camera, odom_to_camera);
351 
352  if (frame_idx_ == 1)
353  {
354  odom_init_to_kinfu_origin_ = odom_to_camera * camera_to_kinfu_origin;
355  }
356 
357  Eigen::Affine3f map_to_odom;
358  // map_to_odom * odom_to_camera * camera_to_kinfu_origin == odom_init_to_kinfu_origin_
359  map_to_odom = odom_init_to_kinfu_origin_ * (odom_to_camera * camera_to_kinfu_origin).inverse();
360  tf::StampedTransform tf_map_to_odom;
361  tf::transformEigenToTF(map_to_odom, tf_map_to_odom);
362  tf_map_to_odom.setRotation(tf_map_to_odom.getRotation().normalized());
364  tf::StampedTransform(tf_map_to_odom, caminfo_msg->header.stamp, "map", fixed_frame_id_));
365  }
366  catch (tf::TransformException e)
367  {
368  NODELET_FATAL("%s", e.what());
369  }
370  }
371  }
372 
373  // publish depth image
374  if (pub_depth_.getNumSubscribers() > 0)
375  {
376  pcl::gpu::kinfuLS::KinfuTracker::DepthMap depth_gpu;
377  Eigen::Affine3f camera_pose = kinfu_->getCameraPose();
378  if (!raycaster_)
379  {
380  raycaster_ = pcl::gpu::kinfuLS::RayCaster::Ptr(
381  new pcl::gpu::kinfuLS::RayCaster(
382  /*height=*/kinfu_->rows(), /*width=*/kinfu_->cols(),
383  /*fx=*/caminfo_msg->K[0], /*fy=*/caminfo_msg->K[4],
384  /*cx=*/caminfo_msg->K[2], /*cy=*/caminfo_msg->K[5]));
385  }
386  raycaster_->run(kinfu_->volume(), camera_pose, kinfu_->getCyclicalBufferStructure());
387  raycaster_->generateDepthImage(depth_gpu);
388 
389  int cols;
390  std::vector<unsigned short> data;
391  depth_gpu.download(data, cols);
392 
393  sensor_msgs::Image output_depth_msg;
394  sensor_msgs::fillImage(output_depth_msg,
395  enc::TYPE_16UC1,
396  depth_gpu.rows(),
397  depth_gpu.cols(),
398  depth_gpu.cols() * 2,
399  reinterpret_cast<unsigned short*>(&data[0]));
400  output_depth_msg.header = caminfo_msg->header;
401  pub_depth_.publish(output_depth_msg);
402  }
403 
404  // publish rendered image
406  {
407  pcl::gpu::kinfuLS::KinfuTracker::View view_device;
408  std::vector<pcl::gpu::kinfuLS::PixelRGB> view_host;
409  kinfu_->getImage(view_device);
410 
411  if (integrate_color_)
412  {
414  }
415 
416  int cols;
417  view_device.download(view_host, cols);
418 
419  sensor_msgs::Image rendered_image_msg;
420  sensor_msgs::fillImage(rendered_image_msg,
421  enc::RGB8,
422  view_device.rows(),
423  view_device.cols(),
424  view_device.cols() * 3,
425  reinterpret_cast<unsigned char*>(&view_host[0]));
426  rendered_image_msg.header = caminfo_msg->header;
427  pub_rendered_image_.publish(rendered_image_msg);
428  }
429 
430  // publish cloud
431  if (pub_cloud_.getNumSubscribers() > 0)
432  {
433  pcl::gpu::DeviceArray<pcl::PointXYZ> cloud_buffer_device;
434  pcl::gpu::DeviceArray<pcl::PointXYZ> extracted = kinfu_->volume().fetchCloud(cloud_buffer_device);
435 
436  pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_xyz(new pcl::PointCloud<pcl::PointXYZ>());
437  extracted.download(cloud_xyz->points);
438  cloud_xyz->width = static_cast<int>(cloud_xyz->points.size());
439  cloud_xyz->height = 1;
440 
441  sensor_msgs::PointCloud2 output_cloud_msg;
442  if (integrate_color_)
443  {
444  pcl::gpu::DeviceArray<pcl::RGB> point_colors_device;
445  kinfu_->colorVolume().fetchColors(extracted, point_colors_device);
446 
447  pcl::PointCloud<pcl::RGB>::Ptr cloud_rgb(new pcl::PointCloud<pcl::RGB>());
448  point_colors_device.download(cloud_rgb->points);
449  cloud_rgb->width = static_cast<int>(cloud_rgb->points.size());
450  cloud_rgb->height = 1;
451 
452  pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloud(new pcl::PointCloud<pcl::PointXYZRGB>());
453  cloud->points.resize(cloud_xyz->width);
454  pcl::concatenateFields(*cloud_xyz, *cloud_rgb, *cloud);
455  pcl::toROSMsg(*cloud, output_cloud_msg);
456  }
457  else
458  {
459  pcl::toROSMsg(*cloud_xyz, output_cloud_msg);
460  }
461  output_cloud_msg.header.frame_id = "kinfu_origin";
462  pub_cloud_.publish(output_cloud_msg);
463  }
464  }
465 
466  bool
467  Kinfu::resetCallback(std_srvs::Empty::Request& req, std_srvs::Empty::Response& res)
468  {
469  boost::mutex::scoped_lock lock(mutex_);
470  kinfu_.reset();
471  textures_.clear();
472  cameras_.clear();
473  NODELET_INFO("Reset kinect fusion by request.");
474  return true;
475  }
476 
477  bool
479  jsk_recognition_msgs::SaveMesh::Request& req, jsk_recognition_msgs::SaveMesh::Response& res)
480  {
481  pcl::PolygonMesh polygon_mesh = createPolygonMesh(req.box, req.ground_frame_id);
482 
483  boost::filesystem::path dir(integrate_color_ ? save_dir_ + "/textures" : save_dir_);
484  if (boost::filesystem::create_directories(dir))
485  {
486  NODELET_INFO("Created save_dir: %s", save_dir_.c_str());
487  }
488 
489  std::string out_file = save_dir_ + "/mesh.obj";
490  if (integrate_color_ && n_textures_ != 0)
491  {
492  pcl::TextureMesh texture_mesh;
493  if (n_textures_ > 0)
494  {
495  std::vector<cv::Mat> textures(textures_.end() - n_textures_, textures_.end());
496  pcl::texture_mapping::CameraVector cameras(cameras_.end() - n_textures_, cameras_.end());
497  texture_mesh = convertToTextureMesh(polygon_mesh, textures, cameras);
498  }
499  else
500  {
501  texture_mesh = convertToTextureMesh(polygon_mesh, textures_, cameras_);
502  }
503  pcl::io::saveOBJFile(out_file, texture_mesh, 5);
504  }
505  else
506  {
507  pcl::io::saveOBJFile(out_file, polygon_mesh);
508  }
509  NODELET_INFO("Saved mesh file: %s", out_file.c_str());
510  return true;
511  }
512 
513  bool
514  Kinfu::saveMeshCallback(std_srvs::Empty::Request& req, std_srvs::Empty::Response& res)
515  {
516  pcl::PolygonMesh polygon_mesh = createPolygonMesh();
517 
518  boost::filesystem::path dir(integrate_color_ ? save_dir_ + "/textures" : save_dir_);
519  if (boost::filesystem::create_directories(dir))
520  {
521  NODELET_INFO("Created save_dir: %s", save_dir_.c_str());
522  }
523 
524  std::string out_file = save_dir_ + "/mesh.obj";
525  if (integrate_color_ && n_textures_ != 0)
526  {
527  pcl::TextureMesh texture_mesh;
528  if (n_textures_ > 0)
529  {
530  std::vector<cv::Mat> textures(textures_.end() - n_textures_, textures_.end());
531  pcl::texture_mapping::CameraVector cameras(cameras_.end() - n_textures_, cameras_.end());
532  texture_mesh = convertToTextureMesh(polygon_mesh, textures, cameras);
533  }
534  else
535  {
536  texture_mesh = convertToTextureMesh(polygon_mesh, textures_, cameras_);
537  }
538  pcl::io::saveOBJFile(out_file, texture_mesh, 5);
539  }
540  else
541  {
542  pcl::io::saveOBJFile(out_file, polygon_mesh);
543  }
544  NODELET_INFO("Saved mesh file: %s", out_file.c_str());
545  return true;
546  }
547 
548  pcl::TextureMesh
549  Kinfu::convertToTextureMesh(const pcl::PolygonMesh& triangles,
550  const std::vector<cv::Mat> textures,
551  pcl::texture_mapping::CameraVector cameras)
552  {
553  pcl::PointCloud<pcl::PointXYZ>::Ptr cloud (new pcl::PointCloud<pcl::PointXYZ>);
554  pcl::fromPCLPointCloud2(triangles.cloud, *cloud);
555 
556  // Create the texturemesh object that will contain our UV-mapped mesh
557  pcl::TextureMesh mesh;
558  mesh.cloud = triangles.cloud;
559  std::vector< pcl::Vertices> polygon_1;
560 
561  // push faces into the texturemesh object
562  polygon_1.resize (triangles.polygons.size ());
563  for(size_t i =0; i < triangles.polygons.size (); ++i)
564  {
565  polygon_1[i] = triangles.polygons[i];
566  }
567  mesh.tex_polygons.push_back(polygon_1);
568  NODELET_INFO("Input mesh contains %lu faces, %lu vertices and %lu textures.",
569  mesh.tex_polygons[0].size(), cloud->points.size(), cameras.size());
570 
571  // Create materials for each texture (and one extra for occluded faces)
572  mesh.tex_materials.resize(cameras.size () + 1);
573  for(int i = 0 ; i <= cameras.size() ; ++i)
574  {
575  pcl::TexMaterial mesh_material;
576  mesh_material.tex_Ka.r = 0.2f;
577  mesh_material.tex_Ka.g = 0.2f;
578  mesh_material.tex_Ka.b = 0.2f;
579 
580  mesh_material.tex_Kd.r = 0.8f;
581  mesh_material.tex_Kd.g = 0.8f;
582  mesh_material.tex_Kd.b = 0.8f;
583 
584  mesh_material.tex_Ks.r = 1.0f;
585  mesh_material.tex_Ks.g = 1.0f;
586  mesh_material.tex_Ks.b = 1.0f;
587 
588  mesh_material.tex_d = 1.0f;
589  mesh_material.tex_Ns = 75.0f;
590  mesh_material.tex_illum = 2;
591 
592  std::stringstream tex_name;
593  tex_name << "material_" << i;
594  tex_name >> mesh_material.tex_name;
595 
596  if (i < cameras.size ())
597  {
598  std::stringstream ss;
599  ss << "textures/" << i << ".jpg";
600  std::string texture_file = ss.str();
601  cv::imwrite(save_dir_ + "/" + texture_file, textures[i]);
602  cameras[i].texture_file = texture_file;
603  mesh_material.tex_file = texture_file;
604  }
605  else
606  {
607  std::string texture_file = "textures/occluded.jpg";
608  cv::imwrite(save_dir_ + "/" + texture_file,
609  cv::Mat::zeros(textures[0].rows, textures[0].cols, CV_8UC1));
610  mesh_material.tex_file = texture_file;
611  }
612 
613  mesh.tex_materials[i] = mesh_material;
614  }
615 
616  // sort faces
617  pcl::TextureMapping<pcl::PointXYZ> tm; // TextureMapping object that will perform the sort
618  tm.textureMeshwithMultipleCameras(mesh, cameras);
619 
620  // compute normals for the mesh
621  pcl::NormalEstimation<pcl::PointXYZ, pcl::Normal> n;
622  pcl::PointCloud<pcl::Normal>::Ptr normals(new pcl::PointCloud<pcl::Normal>);
623  pcl::search::KdTree<pcl::PointXYZ>::Ptr tree(new pcl::search::KdTree<pcl::PointXYZ>);
624  tree->setInputCloud(cloud);
625  n.setInputCloud(cloud);
626  n.setSearchMethod(tree);
627  n.setKSearch(20);
628  n.compute(*normals);
629  pcl::PointCloud<pcl::PointNormal>::Ptr cloud_with_normals(new pcl::PointCloud<pcl::PointNormal>);
630  pcl::concatenateFields(*cloud, *normals, *cloud_with_normals);
631  pcl::toPCLPointCloud2(*cloud_with_normals, mesh.cloud);
632 
633  return mesh;
634  }
635 
636  pcl::PolygonMesh
638  {
639  // create triangles
640  if (!marching_cubes_)
641  {
642  marching_cubes_ = pcl::gpu::kinfuLS::MarchingCubes::Ptr(new pcl::gpu::kinfuLS::MarchingCubes());
643  }
644  pcl::gpu::DeviceArray<pcl::PointXYZ> triangles_buffer_device;
645  pcl::gpu::DeviceArray<pcl::PointXYZ> triangles_device =
646  marching_cubes_->run(kinfu_->volume(), triangles_buffer_device);
647 
648  if (triangles_device.empty())
649  {
650  return pcl::PolygonMesh();
651  }
652 
653  pcl::PointCloud<pcl::PointXYZ>::Ptr cloud (new pcl::PointCloud<pcl::PointXYZ>);
654  cloud->width = static_cast<int>(triangles_device.size());
655  cloud->height = 1;
656  cloud->header.frame_id = "kinfu_origin";
657  triangles_device.download(cloud->points);
658 
659  pcl::PolygonMesh mesh;
660  pcl::toPCLPointCloud2(*cloud, mesh.cloud);
661  mesh.polygons.resize(triangles_device.size() / 3);
662  for (size_t i = 0; i < mesh.polygons.size(); ++i)
663  {
664  pcl::Vertices v;
665  v.vertices.push_back(i*3+0);
666  v.vertices.push_back(i*3+2);
667  v.vertices.push_back(i*3+1);
668  mesh.polygons[i] = v;
669  }
670  return mesh;
671  }
672 
673  pcl::PolygonMesh
674  Kinfu::createPolygonMesh(const jsk_recognition_msgs::BoundingBox& box_msg, const std::string& ground_frame_id)
675  {
676  // create triangles
677  if (!marching_cubes_)
678  {
679  marching_cubes_ = pcl::gpu::kinfuLS::MarchingCubes::Ptr(new pcl::gpu::kinfuLS::MarchingCubes());
680  }
681  pcl::gpu::DeviceArray<pcl::PointXYZ> triangles_buffer_device;
682  pcl::gpu::DeviceArray<pcl::PointXYZ> triangles_device =
683  marching_cubes_->run(kinfu_->volume(), triangles_buffer_device);
684 
685  if (triangles_device.empty())
686  {
687  return pcl::PolygonMesh();
688  }
689 
690  // get original polygon mesh
691  pcl::PointCloud<pcl::PointXYZ>::Ptr cloud(new pcl::PointCloud<pcl::PointXYZ>());
692  cloud->width = static_cast<int>(triangles_device.size());
693  cloud->height = 1;
694  cloud->header.frame_id = "kinfu_origin";
695  triangles_device.download(cloud->points);
696 
697  // get tf
698  Eigen::Affine3f kinfu_origin_to_box = Eigen::Affine3f::Identity();
699  if (box_msg.header.frame_id != "kinfu_origin")
700  {
701  tf::StampedTransform tf_kinfu_origin_to_box;
702  tf_listener_->lookupTransform("kinfu_origin", box_msg.header.frame_id, ros::Time(0), tf_kinfu_origin_to_box);
703  tf::transformTFToEigen(tf_kinfu_origin_to_box, kinfu_origin_to_box);
704  }
705 
706  // transform bounding box to kinfu frame
707  jsk_recognition_msgs::BoundingBox transformed_box_msg;
708  transformed_box_msg.dimensions = box_msg.dimensions;
709  Eigen::Affine3f box_pose;
710  tf::poseMsgToEigen(box_msg.pose, box_pose);
711  box_pose = kinfu_origin_to_box * box_pose;
712  tf::poseEigenToMsg(box_pose, transformed_box_msg.pose);
713  transformed_box_msg.header.frame_id = "kinfu_origin";
714 
715  // crop cloud
716  std::vector<int> indices;
717  jsk_recognition_utils::cropPointCloud<pcl::PointXYZ>(cloud, transformed_box_msg, &indices);
718 
719  // generate filtered polygon mesh
720  pcl::PolygonMesh mesh;
721  pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_in_box(new pcl::PointCloud<pcl::PointXYZ>());
722  cloud_in_box->width = static_cast<int>(indices.size());
723  cloud_in_box->height = 1;
724  cloud_in_box->points.resize(indices.size());
725  for (size_t i = 0; i < indices.size(); i++)
726  {
727  cloud_in_box->points[i] = cloud->points[indices[i]];
728  if (indices[i] % 3 == 0 && (indices[i] + 1) == indices[i + 1] && (indices[i] + 2) == indices[i + 2])
729  {
730  pcl::Vertices v;
731  v.vertices.push_back(i + 0);
732  v.vertices.push_back(i + 2);
733  v.vertices.push_back(i + 1);
734  mesh.polygons.push_back(v);
735  }
736  }
737 
738  // fill face occluded by putting object on plane (ex. tabletop)
739  if (!ground_frame_id.empty())
740  {
741  pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_ground_frame(new pcl::PointCloud<pcl::PointXYZ>());
742  tf::StampedTransform tf_transform;
743  tf_listener_->lookupTransform(ground_frame_id, "kinfu_origin", ros::Time(0), tf_transform);
744  Eigen::Affine3f transform;
745  tf::transformTFToEigen(tf_transform, transform);
746  pcl::transformPointCloud(*cloud_in_box, *cloud_ground_frame, transform);
747 
748  pcl::PointXYZ min_pt, max_pt;
749  pcl::getMinMax3D(*cloud_ground_frame, min_pt, max_pt);
750 
751  pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_occluded(new pcl::PointCloud<pcl::PointXYZ>());
752  for (size_t i = 0; i < cloud_ground_frame->points.size(); i++)
753  {
754  pcl::PointXYZ pt_visible = cloud_ground_frame->points[i];
755  pcl::PointXYZ pt_occluded(pt_visible.x, pt_visible.y, min_pt.z);
756  cloud_occluded->points.push_back(pt_occluded);
757  if (i >= 3)
758  {
759  pcl::Vertices v;
760  v.vertices.push_back(cloud_in_box->width + i - 2);
761  v.vertices.push_back(cloud_in_box->width + i - 1);
762  v.vertices.push_back(cloud_in_box->width + i - 0);
763  mesh.polygons.push_back(v);
764  }
765  }
766  cloud_occluded->width = cloud_occluded->points.size();
767  cloud_occluded->height = 1;
768  pcl::transformPointCloud(*cloud_occluded, *cloud_occluded, transform.inverse());
769  *cloud_in_box = *cloud_in_box + *cloud_occluded;
770  }
771 
772  pcl::toPCLPointCloud2(*cloud_in_box, mesh.cloud);
773  return mesh;
774  }
775 } // namespace jsk_pcl_ros
776 
tf::poseMsgToEigen
void poseMsgToEigen(const geometry_msgs::Pose &m, Eigen::Affine3d &e)
tf::Transform::getRotation
Quaternion getRotation() const
jsk_pcl_ros::Kinfu::onInit
virtual void onInit()
Definition: kinfu_nodelet.cpp:83
sensor_msgs::image_encodings
jsk_pcl_ros::Kinfu::marching_cubes_
pcl::gpu::kinfuLS::MarchingCubes::Ptr marching_cubes_
Definition: kinfu.h:116
NODELET_FATAL
#define NODELET_FATAL(...)
pcl
jsk_pcl_ros::Kinfu::kinfu_
boost::shared_ptr< pcl::gpu::kinfuLS::KinfuTracker > kinfu_
Definition: kinfu.h:115
jsk_pcl_ros::Kinfu::configCallback
virtual void configCallback(Config &config, uint32_t level)
Definition: kinfu_nodelet.cpp:129
fill_image.h
jsk_pcl_ros::Kinfu::convertToTextureMesh
pcl::TextureMesh convertToTextureMesh(const pcl::PolygonMesh &triangles, const std::vector< cv::Mat > textures, pcl::texture_mapping::CameraVector cameras)
Definition: kinfu_nodelet.cpp:549
depth_error_calibration.lock
lock
Definition: depth_error_calibration.py:32
t
int t
_2
boost::arg< 2 > _2
tf::TransformBroadcaster::sendTransform
void sendTransform(const geometry_msgs::TransformStamped &transform)
image_encodings.h
jsk_pcl_ros::Kinfu::cameras_
pcl::texture_mapping::CameraVector cameras_
Definition: kinfu.h:120
jsk_pcl_ros::Kinfu::integrate_color_
bool integrate_color_
Definition: kinfu.h:124
jsk_pcl_ros::Kinfu::auto_reset_
bool auto_reset_
Definition: kinfu.h:123
message_filters::Synchronizer
_3
boost::arg< 3 > _3
tf::transformEigenToTF
void transformEigenToTF(Eigen::Affine3f &eigen, tf::Transform &t)
boost::shared_ptr< CvImage >
i
int i
jsk_pcl_ros::Kinfu::n_textures_
int n_textures_
Definition: kinfu.h:127
jsk_pcl_ros::Kinfu::tf_listener_
boost::shared_ptr< tf::TransformListener > tf_listener_
Definition: kinfu.h:135
tf::Quaternion::normalized
Quaternion normalized() const
kinfu.h
box_msg
jsk_recognition_msgs::BoundingBoxArray::ConstPtr box_msg
pcl_ros_util.h
R
R
jsk_pcl_ros::Kinfu::pub_rendered_image_
ros::Publisher pub_rendered_image_
Definition: kinfu.h:147
jsk_pcl_ros::Kinfu::initKinfu
void initKinfu(const sensor_msgs::CameraInfo::ConstPtr &caminfo_msg)
Definition: kinfu_nodelet.cpp:136
jsk_pcl_ros::Kinfu::colors_device_
pcl::gpu::kinfuLS::KinfuTracker::View colors_device_
Definition: kinfu.h:117
jsk_pcl_ros::Kinfu::textures_
std::vector< cv::Mat > textures_
Definition: kinfu.h:119
jsk_pcl_ros::Kinfu::pub_depth_
ros::Publisher pub_depth_
Definition: kinfu.h:146
jsk_pcl_ros::Kinfu::sub_depth_
message_filters::Subscriber< sensor_msgs::Image > sub_depth_
Definition: kinfu.h:139
inverse
geometry_msgs::Pose inverse(geometry_msgs::Pose pose)
jsk_pcl_ros::Kinfu::save_dir_
std::string save_dir_
Definition: kinfu.h:131
NODELET_WARN
#define NODELET_WARN(...)
jsk_pcl_ros::Kinfu::raycaster_
pcl::gpu::kinfuLS::RayCaster::Ptr raycaster_
Definition: kinfu.h:118
jsk_pcl_ros::Kinfu::device_
int device_
Definition: kinfu.h:122
sensor_msgs::fillImage
static bool fillImage(Image &image, const std::string &encoding_arg, uint32_t rows_arg, uint32_t cols_arg, uint32_t step_arg, const void *data_arg)
jsk_pcl_ros::Kinfu
Definition: kinfu.h:78
transform
template Halfspace< double > transform(const Halfspace< double > &a, const Transform3< double > &tf)
jsk_pcl_ros::Kinfu::saveMeshCallback
bool saveMeshCallback(std_srvs::Empty::Request &req, std_srvs::Empty::Response &res)
Definition: kinfu_nodelet.cpp:514
tf::StampedTransform
ros::Publisher::publish
void publish(const boost::shared_ptr< M > &message) const
sin
double sin()
cloud
cloud
pose
pose
class_list_macros.h
jsk_pcl_ros::Kinfu::~Kinfu
virtual ~Kinfu()
Definition: kinfu_nodelet.cpp:116
jsk_pcl_ros::Kinfu::saveMeshWithContextCallback
bool saveMeshWithContextCallback(jsk_recognition_msgs::SaveMesh::Request &req, jsk_recognition_msgs::SaveMesh::Response &res)
Definition: kinfu_nodelet.cpp:478
tree
tree
jsk_pcl_ros::Kinfu::pub_cloud_
ros::Publisher pub_cloud_
Definition: kinfu.h:145
jsk_pcl_ros
Definition: add_color_from_image.h:51
tf::Transform::setRotation
TFSIMD_FORCE_INLINE void setRotation(const Quaternion &q)
jsk_pcl_ros::Kinfu::fixed_frame_id_
std::string fixed_frame_id_
Definition: kinfu.h:126
cv_bridge::toCvCopy
CvImagePtr toCvCopy(const sensor_msgs::CompressedImage &source, const std::string &encoding=std::string())
pcl::gpu::kinfuLS::paint3DView
void paint3DView(const KinfuTracker::View &rgb24, KinfuTracker::View &view, float colors_weight=0.5f)
dump_depth_error.data
data
Definition: dump_depth_error.py:11
tf::transformTFToEigen
void transformTFToEigen(const tf::Transform &t, Eigen::Affine3f &eigen)
jsk_pcl_ros::Kinfu::srv_save_mesh_
ros::ServiceServer srv_save_mesh_
Definition: kinfu.h:153
ROS_ERROR
#define ROS_ERROR(...)
pcl::concatenateFields
void concatenateFields(PointCloud< PointXYZ > &cloud_xyz, PointCloud< RGB > &cloud_rgb, PointCloud< PointXYZRGB > &cloud)
Definition: kinfu_nodelet.cpp:58
jsk_pcl_ros::Kinfu::volume_size_
float volume_size_
Definition: kinfu.h:128
_1
boost::arg< 1 > _1
jsk_pcl_ros::Kinfu::sync_
boost::shared_ptr< message_filters::Synchronizer< SyncPolicy > > sync_
Definition: kinfu.h:141
jsk_pcl_ros::Kinfu::mutex_
boost::mutex mutex_
Definition: kinfu.h:133
tf::poseEigenToMsg
void poseEigenToMsg(const Eigen::Affine3d &e, geometry_msgs::Pose &m)
tf::Transform
NODELET_FATAL_THROTTLE
#define NODELET_FATAL_THROTTLE(rate,...)
message_filters::Subscriber::subscribe
void subscribe()
NODELET_INFO
#define NODELET_INFO(...)
jsk_pcl_ros::Kinfu::resetCallback
bool resetCallback(std_srvs::Empty::Request &req, std_srvs::Empty::Response &res)
Definition: kinfu_nodelet.cpp:467
PointCloud
sensor_msgs::PointCloud2 PointCloud
jsk_pcl_ros::Kinfu::createPolygonMesh
pcl::PolygonMesh createPolygonMesh()
Definition: kinfu_nodelet.cpp:637
nodelet::Nodelet
jsk_pcl_ros::Kinfu::subscribe
virtual void subscribe()
Definition: kinfu_nodelet.cpp:173
ros::Time
jsk_pcl_ros::Kinfu::pub_status_
ros::Publisher pub_status_
Definition: kinfu.h:148
jsk_pcl_ros::Kinfu::tf_broadcaster_
tf::TransformBroadcaster tf_broadcaster_
Definition: kinfu.h:150
jsk_pcl_ros::Kinfu::update
void update(const sensor_msgs::CameraInfo::ConstPtr &caminfo_msg, const sensor_msgs::Image::ConstPtr &depth_msg)
Definition: kinfu_nodelet.cpp:201
cv_bridge.h
dump_depth_error.f
f
Definition: dump_depth_error.py:39
jsk_pcl_ros::Kinfu::Config
jsk_pcl_ros::KinfuConfig Config
Definition: kinfu.h:85
tf::TransformListener
pcl::toROSMsg
void toROSMsg(const pcl::PointCloud< T > &cloud, sensor_msgs::Image &msg)
jsk_pcl_ros::Kinfu::srv_save_mesh_with_context_
ros::ServiceServer srv_save_mesh_with_context_
Definition: kinfu.h:154
jsk_pcl_ros::Kinfu::slam_
bool slam_
Definition: kinfu.h:125
jsk_pcl_ros::Kinfu::sub_color_
message_filters::Subscriber< sensor_msgs::Image > sub_color_
Definition: kinfu.h:140
ros::Publisher::getNumSubscribers
uint32_t getNumSubscribers() const
cv_bridge::toCvShare
CvImageConstPtr toCvShare(const sensor_msgs::Image &source, const boost::shared_ptr< void const > &tracked_object, const std::string &encoding=std::string())
jsk_pcl_ros::Kinfu::pub_camera_pose_
ros::Publisher pub_camera_pose_
Definition: kinfu.h:144
tf2::TransformException
jsk_pcl_ros::Kinfu::sub_camera_info_
message_filters::Subscriber< sensor_msgs::CameraInfo > sub_camera_info_
Definition: kinfu.h:138
config
config
jsk_pcl_ros::Kinfu::odom_init_to_kinfu_origin_
Eigen::Affine3f odom_init_to_kinfu_origin_
Definition: kinfu.h:136
jsk_pcl_ros::Kinfu::srv_
boost::shared_ptr< dynamic_reconfigure::Server< Config > > srv_
Definition: kinfu.h:102
jsk_pcl_ros::Kinfu::srv_reset_
ros::ServiceServer srv_reset_
Definition: kinfu.h:152
n
GLfloat n[6][3]
jsk_pcl_ros::Kinfu::unsubscribe
virtual void unsubscribe()
Definition: kinfu_nodelet.cpp:196
sample_pointcloud_localization_client.req
req
Definition: sample_pointcloud_localization_client.py:15
PLUGINLIB_EXPORT_CLASS
PLUGINLIB_EXPORT_CLASS(jsk_pcl_ros::Kinfu, nodelet::Nodelet)
jsk_pcl_ros::Kinfu::frame_idx_
int frame_idx_
Definition: kinfu.h:130
status
status
jsk_pcl_ros::Kinfu::sync_with_color_
boost::shared_ptr< message_filters::Synchronizer< SyncPolicyWithColor > > sync_with_color_
Definition: kinfu.h:142
v
GLfloat v[8][3]
sample_empty_service_caller.res
res
Definition: sample_empty_service_caller.py:14


jsk_pcl_ros
Author(s): Yohei Kakiuchi
autogenerated on Tue Jan 7 2025 04:05:45