kinfu_nodelet.cpp
Go to the documentation of this file.
1 /*********************************************************************
2  * Software License Agreement (BSD License)
3  *
4  * Copyright (c) 2017, Kentaro Wada and JSK Lab
5  * All rights reserved.
6  *
7  * Redistribution and use in source and binary forms, with or without
8  * modification, are permitted provided that the following conditions
9  * are met:
10  *
11  * * Redistributions of source code must retain the above copyright
12  * notice, this list of conditions and the following disclaimer.
13  * * Redistributions in binary form must reproduce the above
14  * copyright notice, this list of conditions and the following
15  * disclaimer in the documentation and/o2r other materials provided
16  * with the distribution.
17  * * Neither the name of Kentaro Wada and JSK Lab nor the names of its
18  * contributors may be used to endorse or promote products derived
19  * from this software without specific prior written permission.
20  *
21  * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
22  * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
23  * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
24  * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
25  * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
26  * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
27  * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
28  * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
29  * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
30  * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
31  * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
32  * POSSIBILITY OF SUCH DAMAGE.
33  *********************************************************************/
34 
35 #define BOOST_PARAMETER_MAX_ARITY 7
36 
37 #include <boost/thread/thread.hpp>
38 #include <boost/filesystem.hpp>
39 
40 #include <pcl/features/normal_3d.h>
41 #include <pcl/io/obj_io.h>
42 
43 #include <cv_bridge/cv_bridge.h>
44 #include <jsk_recognition_msgs/TrackingStatus.h>
45 #include <jsk_recognition_msgs/SaveMesh.h>
47 #include <sensor_msgs/fill_image.h>
49 
50 #include "jsk_pcl_ros/kinfu.h"
51 
53 
54 // TODO(wkentaro): Move to upstream.
55 namespace pcl
56 {
57  void
59  {
60  if (cloud_xyz.points.size() != cloud_rgb.points.size())
61  {
62  std::cout << "Clouds being concatenated must have same size." << std::endl;
63  return;
64  }
65  cloud.points.resize(cloud_xyz.points.size());
66  for (size_t i = 0; i < cloud_xyz.points.size(); i++)
67  {
68  cloud.points[i].x = cloud_xyz.points[i].x;
69  cloud.points[i].y = cloud_xyz.points[i].y;
70  cloud.points[i].z = cloud_xyz.points[i].z;
71  cloud.points[i].r = cloud_rgb.points[i].r;
72  cloud.points[i].g = cloud_rgb.points[i].g;
73  cloud.points[i].b = cloud_rgb.points[i].b;
74  }
75  cloud.width = cloud_xyz.width;
76  cloud.height = cloud_xyz.height;
77  }
78 }
79 
80 namespace jsk_pcl_ros
81 {
82  void
83  Kinfu::onInit()
84  {
85  ConnectionBasedNodelet::onInit();
86  always_subscribe_ = true; // for mapping
87 
88  pnh_->param("device", device_, 0);
89  pnh_->param("auto_reset", auto_reset_, true);
90  pnh_->param("integrate_color", integrate_color_, false);
91  pnh_->param("slam", slam_, false);
92  pnh_->param<std::string>("fixed_frame_id", fixed_frame_id_, "odom_init");
93  pnh_->param("n_textures", n_textures_, -1);
94  pnh_->param("volume_size", volume_size_, pcl::device::kinfuLS::VOLUME_SIZE);
95 
96  srv_ = boost::make_shared <dynamic_reconfigure::Server<Config> >(*pnh_);
97  dynamic_reconfigure::Server<Config>::CallbackType f = boost::bind(&Kinfu::configCallback, this, _1, _2);
98  srv_->setCallback(f);
99 
100  tf_listener_.reset(new tf::TransformListener());
101 
102  pub_camera_pose_ = advertise<geometry_msgs::PoseStamped>(*pnh_, "output", 1);
103  pub_cloud_ = advertise<sensor_msgs::PointCloud2>(*pnh_, "output/cloud", 1);
104  pub_depth_ = advertise<sensor_msgs::Image>(*pnh_, "output/depth", 1);
105  pub_rendered_image_ = advertise<sensor_msgs::Image>(*pnh_, "output/rendered_image", 1);
106  pub_status_ = advertise<jsk_recognition_msgs::TrackingStatus>(*pnh_, "output/status", 1);
107 
108  srv_reset_ = pnh_->advertiseService("reset", &Kinfu::resetCallback, this);
109  srv_save_mesh_ = pnh_->advertiseService("save_mesh", &Kinfu::saveMeshCallback, this);
110  srv_save_mesh_with_context_ = pnh_->advertiseService(
111  "save_mesh_with_context", &Kinfu::saveMeshWithContextCallback, this);
112 
113  onInitPostProcess();
114  }
115 
116  void
117  Kinfu::configCallback(Config& config, uint32_t level)
118  {
119  boost::mutex::scoped_lock lock(mutex_);
120  save_dir_ = config.save_dir;
121  }
122 
123  void
124  Kinfu::initKinfu(const sensor_msgs::CameraInfo::ConstPtr& caminfo_msg)
125  {
126  pcl::gpu::setDevice(device_);
127  pcl::gpu::printShortCudaDeviceInfo(device_);
128 
129  /* below are copied from pcl/gpu/kinfu_large_scale/src/kinfuLS_app.cpp */
130 
131  float shift_distance = pcl::device::kinfuLS::DISTANCE_THRESHOLD;
132  Eigen::Vector3f volume_size_vector = Eigen::Vector3f::Constant(volume_size_/*meters*/);
133  if (shift_distance > 2.5 * volume_size_)
134  {
135  NODELET_WARN("WARNING Shifting distance (%.2f) is very large compared to the volume size (%.2f).",
136  shift_distance, volume_size_);
137  }
138 
139  kinfu_.reset(new pcl::gpu::kinfuLS::KinfuTracker(
140  volume_size_vector, shift_distance, caminfo_msg->height, caminfo_msg->width));
141 
142  Eigen::Matrix3f R = Eigen::Matrix3f::Identity();
143  Eigen::Vector3f t = volume_size_vector * 0.5f - Eigen::Vector3f(0, 0, volume_size_vector(2) / 2 * 1.2f);
144 
145  Eigen::Affine3f pose = Eigen::Translation3f(t) * Eigen::AngleAxisf(R);
146 
147  kinfu_->setInitialCameraPose(pose);
148  kinfu_->volume().setTsdfTruncDist(0.030f/*meters*/);
149  kinfu_->setIcpCorespFilteringParams(0.1f/*meters*/, sin(pcl::deg2rad(20.f)));
150  //kinfu_->setDepthTruncationForICP(3.0f/*meters*/);
151  kinfu_->setCameraMovementThreshold(0.001f);
152 
153  if (integrate_color_)
154  {
155  const int max_color_integration_weight = 2;
156  kinfu_->initColorIntegration(max_color_integration_weight);
157  }
158  }
159 
160  void
161  Kinfu::subscribe()
162  {
163  int queue_size;
164  pnh_->param("queue_size", queue_size, 10);
165 
166  sub_camera_info_.subscribe(*pnh_, "input/camera_info", 1);
167  sub_depth_.subscribe(*pnh_, "input/depth", 1);
168  if (integrate_color_)
169  {
170  sub_color_.subscribe(*pnh_, "input/color", 1);
171  sync_with_color_.reset(new message_filters::Synchronizer<SyncPolicyWithColor>(queue_size));
172  sync_with_color_->connectInput(sub_camera_info_, sub_depth_, sub_color_);
173  sync_with_color_->registerCallback(boost::bind(&Kinfu::update, this, _1, _2, _3));
174  }
175  else
176  {
177  sync_.reset(new message_filters::Synchronizer<SyncPolicy>(queue_size));
178  sync_->connectInput(sub_camera_info_, sub_depth_);
179  sync_->registerCallback(boost::bind(&Kinfu::update, this, _1, _2));
180  }
181  }
182 
183  void
184  Kinfu::unsubscribe()
185  {
186  }
187 
188  void
189  Kinfu::update(const sensor_msgs::CameraInfo::ConstPtr& caminfo_msg,
190  const sensor_msgs::Image::ConstPtr& depth_msg)
191  {
192  update(caminfo_msg, depth_msg, sensor_msgs::ImageConstPtr());
193  }
194 
195  void
196  Kinfu::update(const sensor_msgs::CameraInfo::ConstPtr& caminfo_msg,
197  const sensor_msgs::Image::ConstPtr& depth_msg,
198  const sensor_msgs::Image::ConstPtr& color_msg)
199  {
200  boost::mutex::scoped_lock lock(mutex_);
201 
202  if ((depth_msg->height != caminfo_msg->height) || (depth_msg->width != caminfo_msg->width))
203  {
204  ROS_ERROR("Image size of input depth and camera info must be same. Depth: (%d, %d), Camera Info: (%d, %d)",
205  depth_msg->height, depth_msg->width, caminfo_msg->height, caminfo_msg->width);
206  return;
207  }
208  if (integrate_color_ && ((color_msg->height != caminfo_msg->height) || (color_msg->width != color_msg->width)))
209  {
210  ROS_ERROR("Image size of input color image and camera info must be same. Color: (%d, %d), Camera Info: (%d, %d)",
211  color_msg->height, color_msg->width, caminfo_msg->height, caminfo_msg->width);
212  return;
213  }
214 
215  if (!kinfu_)
216  {
217  initKinfu(caminfo_msg);
218  }
219 
220  // run kinfu
221  {
222  kinfu_->setDepthIntrinsics(/*fx=*/caminfo_msg->K[0], /*fy=*/caminfo_msg->K[4],
223  /*cx=*/caminfo_msg->K[2], /*cy=*/caminfo_msg->K[5]);
224 
225  // depth: 32fc1 -> 16uc1
226  cv::Mat depth;
227  if (depth_msg->encoding == enc::TYPE_32FC1)
228  {
229  cv::Mat depth_32fc1 = cv_bridge::toCvShare(depth_msg, enc::TYPE_32FC1)->image;
230  depth_32fc1 *= 1000.;
231  depth_32fc1.convertTo(depth, CV_16UC1);
232  }
233  else if (depth_msg->encoding == enc::TYPE_16UC1)
234  {
235  depth = cv_bridge::toCvShare(depth_msg, enc::TYPE_16UC1)->image;
236  }
237  else
238  {
239  NODELET_FATAL("Unsupported depth image encoding: %s", depth_msg->encoding.c_str());
240  return;
241  }
242 
243  // depth: cpu -> gpu
244  pcl::gpu::kinfuLS::KinfuTracker::DepthMap depth_device;
245  depth_device.upload(&(depth.data[0]), depth.cols * 2, depth.rows, depth.cols);
246 
247 
248  if (integrate_color_)
249  {
250  // color: cpu -> gpu
251  if (color_msg->encoding == enc::BGR8)
252  {
253  cv_bridge::CvImagePtr tmp_image_ptr_ = cv_bridge::toCvCopy(color_msg, enc::RGB8);
254  colors_device_.upload(&(tmp_image_ptr_->toImageMsg()->data[0]),
255  color_msg->step, color_msg->height, color_msg->width);
256  }
257  else
258  {
259  colors_device_.upload(&(color_msg->data[0]), color_msg->step, color_msg->height, color_msg->width);
260  }
261 
262  (*kinfu_)(depth_device, colors_device_);
263  }
264  else
265  {
266  (*kinfu_)(depth_device);
267  }
268  frame_idx_++;
269  }
270 
271  jsk_recognition_msgs::TrackingStatus status;
272  status.header = caminfo_msg->header;
273  if (kinfu_->icpIsLost())
274  {
275  NODELET_FATAL_THROTTLE(10, "Tracking by ICP in kinect fusion is lost. auto_reset: %d", auto_reset_);
276  if (auto_reset_)
277  {
278  kinfu_.reset();
279  frame_idx_ = 0;
280  cameras_.clear();
281  }
282  status.is_lost = true;
283  pub_status_.publish(status);
284  return;
285  }
286  status.is_lost = false;
287  pub_status_.publish(status);
288 
289  // save texture
290  if (integrate_color_ && (frame_idx_ % pcl::device::kinfuLS::SNAPSHOT_RATE == 1))
291  {
292  cv::Mat texture = cv_bridge::toCvCopy(color_msg, color_msg->encoding)->image;
293  if (color_msg->encoding == enc::RGB8)
294  {
295  cv::cvtColor(texture, texture, cv::COLOR_RGB2BGR);
296  }
297  textures_.push_back(texture);
298 
299  pcl::TextureMapping<pcl::PointXYZ>::Camera camera;
300  camera.pose = kinfu_->getCameraPose();
301  camera.focal_length = caminfo_msg->K[0]; // must be equal to caminfo_msg->K[4]
302  camera.height = caminfo_msg->height;
303  camera.width = caminfo_msg->width;
304  cameras_.push_back(camera);
305  }
306 
307  // publish kinfu origin and slam
308  {
309  Eigen::Affine3f camera_pose = kinfu_->getCameraPose();
310 
311  // publish camera pose
312  if (pub_camera_pose_.getNumSubscribers() > 0)
313  {
314  geometry_msgs::PoseStamped camera_pose_msg;
315  tf::poseEigenToMsg(camera_pose, camera_pose_msg.pose);
316  camera_pose_msg.header.stamp = caminfo_msg->header.stamp;
317  camera_pose_msg.header.frame_id = "kinfu_origin";
318  pub_camera_pose_.publish(camera_pose_msg);
319  }
320 
321  Eigen::Affine3f camera_to_kinfu_origin = camera_pose.inverse();
322  tf::Transform tf_camera_to_kinfu_origin;
323  tf::transformEigenToTF(camera_to_kinfu_origin, tf_camera_to_kinfu_origin);
324  tf_camera_to_kinfu_origin.setRotation(tf_camera_to_kinfu_origin.getRotation().normalized());
325  tf_broadcaster_.sendTransform(
326  tf::StampedTransform(tf_camera_to_kinfu_origin, caminfo_msg->header.stamp,
327  caminfo_msg->header.frame_id, "kinfu_origin"));
328 
329  if (slam_)
330  {
331  // use kinfu as slam, and publishes tf: map -> fixed_frame_id_ (usually odom_init)
332  try
333  {
334  tf::StampedTransform tf_odom_to_camera;
335  tf_listener_->lookupTransform(
336  fixed_frame_id_, caminfo_msg->header.frame_id, ros::Time(0), tf_odom_to_camera);
337  Eigen::Affine3f odom_to_camera;
338  tf::transformTFToEigen(tf_odom_to_camera, odom_to_camera);
339 
340  if (frame_idx_ == 1)
341  {
342  odom_init_to_kinfu_origin_ = odom_to_camera * camera_to_kinfu_origin;
343  }
344 
345  Eigen::Affine3f map_to_odom;
346  // map_to_odom * odom_to_camera * camera_to_kinfu_origin == odom_init_to_kinfu_origin_
347  map_to_odom = odom_init_to_kinfu_origin_ * (odom_to_camera * camera_to_kinfu_origin).inverse();
348  tf::StampedTransform tf_map_to_odom;
349  tf::transformEigenToTF(map_to_odom, tf_map_to_odom);
350  tf_map_to_odom.setRotation(tf_map_to_odom.getRotation().normalized());
351  tf_broadcaster_.sendTransform(
352  tf::StampedTransform(tf_map_to_odom, caminfo_msg->header.stamp, "map", fixed_frame_id_));
353  }
354  catch (tf::TransformException e)
355  {
356  NODELET_FATAL("%s", e.what());
357  }
358  }
359  }
360 
361  // publish depth image
362  if (pub_depth_.getNumSubscribers() > 0)
363  {
364  pcl::gpu::kinfuLS::KinfuTracker::DepthMap depth_gpu;
365  Eigen::Affine3f camera_pose = kinfu_->getCameraPose();
366  if (!raycaster_)
367  {
368  raycaster_ = pcl::gpu::kinfuLS::RayCaster::Ptr(
369  new pcl::gpu::kinfuLS::RayCaster(
370  /*height=*/kinfu_->rows(), /*width=*/kinfu_->cols(),
371  /*fx=*/caminfo_msg->K[0], /*fy=*/caminfo_msg->K[4],
372  /*cx=*/caminfo_msg->K[2], /*cy=*/caminfo_msg->K[5]));
373  }
374  raycaster_->run(kinfu_->volume(), camera_pose, kinfu_->getCyclicalBufferStructure());
375  raycaster_->generateDepthImage(depth_gpu);
376 
377  int cols;
378  std::vector<unsigned short> data;
379  depth_gpu.download(data, cols);
380 
381  sensor_msgs::Image output_depth_msg;
382  sensor_msgs::fillImage(output_depth_msg,
383  enc::TYPE_16UC1,
384  depth_gpu.rows(),
385  depth_gpu.cols(),
386  depth_gpu.cols() * 2,
387  reinterpret_cast<unsigned short*>(&data[0]));
388  output_depth_msg.header = caminfo_msg->header;
389  pub_depth_.publish(output_depth_msg);
390  }
391 
392  // publish rendered image
393  if (pub_rendered_image_.getNumSubscribers() > 0)
394  {
395  pcl::gpu::kinfuLS::KinfuTracker::View view_device;
396  std::vector<pcl::gpu::kinfuLS::PixelRGB> view_host;
397  kinfu_->getImage(view_device);
398 
399  if (integrate_color_)
400  {
401  pcl::gpu::kinfuLS::paint3DView(colors_device_, view_device);
402  }
403 
404  int cols;
405  view_device.download(view_host, cols);
406 
407  sensor_msgs::Image rendered_image_msg;
408  sensor_msgs::fillImage(rendered_image_msg,
409  enc::RGB8,
410  view_device.rows(),
411  view_device.cols(),
412  view_device.cols() * 3,
413  reinterpret_cast<unsigned char*>(&view_host[0]));
414  rendered_image_msg.header = caminfo_msg->header;
415  pub_rendered_image_.publish(rendered_image_msg);
416  }
417 
418  // publish cloud
419  if (pub_cloud_.getNumSubscribers() > 0)
420  {
421  pcl::gpu::DeviceArray<pcl::PointXYZ> cloud_buffer_device;
422  pcl::gpu::DeviceArray<pcl::PointXYZ> extracted = kinfu_->volume().fetchCloud(cloud_buffer_device);
423 
424  pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_xyz(new pcl::PointCloud<pcl::PointXYZ>());
425  extracted.download(cloud_xyz->points);
426  cloud_xyz->width = static_cast<int>(cloud_xyz->points.size());
427  cloud_xyz->height = 1;
428 
429  sensor_msgs::PointCloud2 output_cloud_msg;
430  if (integrate_color_)
431  {
432  pcl::gpu::DeviceArray<pcl::RGB> point_colors_device;
433  kinfu_->colorVolume().fetchColors(extracted, point_colors_device);
434 
435  pcl::PointCloud<pcl::RGB>::Ptr cloud_rgb(new pcl::PointCloud<pcl::RGB>());
436  point_colors_device.download(cloud_rgb->points);
437  cloud_rgb->width = static_cast<int>(cloud_rgb->points.size());
438  cloud_rgb->height = 1;
439 
440  pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloud(new pcl::PointCloud<pcl::PointXYZRGB>());
441  cloud->points.resize(cloud_xyz->width);
442  pcl::concatenateFields(*cloud_xyz, *cloud_rgb, *cloud);
443  pcl::toROSMsg(*cloud, output_cloud_msg);
444  }
445  else
446  {
447  pcl::toROSMsg(*cloud_xyz, output_cloud_msg);
448  }
449  output_cloud_msg.header.frame_id = "kinfu_origin";
450  pub_cloud_.publish(output_cloud_msg);
451  }
452  }
453 
454  bool
455  Kinfu::resetCallback(std_srvs::Empty::Request& req, std_srvs::Empty::Response& res)
456  {
457  boost::mutex::scoped_lock lock(mutex_);
458  kinfu_.reset();
459  textures_.clear();
460  cameras_.clear();
461  NODELET_INFO("Reset kinect fusion by request.");
462  return true;
463  }
464 
465  bool
466  Kinfu::saveMeshWithContextCallback(
467  jsk_recognition_msgs::SaveMesh::Request& req, jsk_recognition_msgs::SaveMesh::Response& res)
468  {
469  pcl::PolygonMesh polygon_mesh = createPolygonMesh(req.box, req.ground_frame_id);
470 
471  boost::filesystem::path dir(integrate_color_ ? save_dir_ + "/textures" : save_dir_);
472  if (boost::filesystem::create_directories(dir))
473  {
474  NODELET_INFO("Created save_dir: %s", save_dir_.c_str());
475  }
476 
477  std::string out_file = save_dir_ + "/mesh.obj";
478  if (integrate_color_ && n_textures_ != 0)
479  {
480  pcl::TextureMesh texture_mesh;
481  if (n_textures_ > 0)
482  {
483  std::vector<cv::Mat> textures(textures_.end() - n_textures_, textures_.end());
484  pcl::texture_mapping::CameraVector cameras(cameras_.end() - n_textures_, cameras_.end());
485  texture_mesh = convertToTextureMesh(polygon_mesh, textures, cameras);
486  }
487  else
488  {
489  texture_mesh = convertToTextureMesh(polygon_mesh, textures_, cameras_);
490  }
491  pcl::io::saveOBJFile(out_file, texture_mesh, 5);
492  }
493  else
494  {
495  pcl::io::saveOBJFile(out_file, polygon_mesh);
496  }
497  NODELET_INFO("Saved mesh file: %s", out_file.c_str());
498  return true;
499  }
500 
501  bool
502  Kinfu::saveMeshCallback(std_srvs::Empty::Request& req, std_srvs::Empty::Response& res)
503  {
504  pcl::PolygonMesh polygon_mesh = createPolygonMesh();
505 
506  boost::filesystem::path dir(integrate_color_ ? save_dir_ + "/textures" : save_dir_);
507  if (boost::filesystem::create_directories(dir))
508  {
509  NODELET_INFO("Created save_dir: %s", save_dir_.c_str());
510  }
511 
512  std::string out_file = save_dir_ + "/mesh.obj";
513  if (integrate_color_ && n_textures_ != 0)
514  {
515  pcl::TextureMesh texture_mesh;
516  if (n_textures_ > 0)
517  {
518  std::vector<cv::Mat> textures(textures_.end() - n_textures_, textures_.end());
519  pcl::texture_mapping::CameraVector cameras(cameras_.end() - n_textures_, cameras_.end());
520  texture_mesh = convertToTextureMesh(polygon_mesh, textures, cameras);
521  }
522  else
523  {
524  texture_mesh = convertToTextureMesh(polygon_mesh, textures_, cameras_);
525  }
526  pcl::io::saveOBJFile(out_file, texture_mesh, 5);
527  }
528  else
529  {
530  pcl::io::saveOBJFile(out_file, polygon_mesh);
531  }
532  NODELET_INFO("Saved mesh file: %s", out_file.c_str());
533  return true;
534  }
535 
536  pcl::TextureMesh
537  Kinfu::convertToTextureMesh(const pcl::PolygonMesh& triangles,
538  const std::vector<cv::Mat> textures,
539  pcl::texture_mapping::CameraVector cameras)
540  {
541  pcl::PointCloud<pcl::PointXYZ>::Ptr cloud (new pcl::PointCloud<pcl::PointXYZ>);
542  pcl::fromPCLPointCloud2(triangles.cloud, *cloud);
543 
544  // Create the texturemesh object that will contain our UV-mapped mesh
545  pcl::TextureMesh mesh;
546  mesh.cloud = triangles.cloud;
547  std::vector< pcl::Vertices> polygon_1;
548 
549  // push faces into the texturemesh object
550  polygon_1.resize (triangles.polygons.size ());
551  for(size_t i =0; i < triangles.polygons.size (); ++i)
552  {
553  polygon_1[i] = triangles.polygons[i];
554  }
555  mesh.tex_polygons.push_back(polygon_1);
556  NODELET_INFO("Input mesh contains %lu faces, %lu vertices and %lu textures.",
557  mesh.tex_polygons[0].size(), cloud->points.size(), cameras.size());
558 
559  // Create materials for each texture (and one extra for occluded faces)
560  mesh.tex_materials.resize(cameras.size () + 1);
561  for(int i = 0 ; i <= cameras.size() ; ++i)
562  {
563  pcl::TexMaterial mesh_material;
564  mesh_material.tex_Ka.r = 0.2f;
565  mesh_material.tex_Ka.g = 0.2f;
566  mesh_material.tex_Ka.b = 0.2f;
567 
568  mesh_material.tex_Kd.r = 0.8f;
569  mesh_material.tex_Kd.g = 0.8f;
570  mesh_material.tex_Kd.b = 0.8f;
571 
572  mesh_material.tex_Ks.r = 1.0f;
573  mesh_material.tex_Ks.g = 1.0f;
574  mesh_material.tex_Ks.b = 1.0f;
575 
576  mesh_material.tex_d = 1.0f;
577  mesh_material.tex_Ns = 75.0f;
578  mesh_material.tex_illum = 2;
579 
580  std::stringstream tex_name;
581  tex_name << "material_" << i;
582  tex_name >> mesh_material.tex_name;
583 
584  if (i < cameras.size ())
585  {
586  std::stringstream ss;
587  ss << "textures/" << i << ".jpg";
588  std::string texture_file = ss.str();
589  cv::imwrite(save_dir_ + "/" + texture_file, textures[i]);
590  cameras[i].texture_file = texture_file;
591  mesh_material.tex_file = texture_file;
592  }
593  else
594  {
595  std::string texture_file = "textures/occluded.jpg";
596  cv::imwrite(save_dir_ + "/" + texture_file,
597  cv::Mat::zeros(textures[0].rows, textures[0].cols, CV_8UC1));
598  mesh_material.tex_file = texture_file;
599  }
600 
601  mesh.tex_materials[i] = mesh_material;
602  }
603 
604  // sort faces
605  pcl::TextureMapping<pcl::PointXYZ> tm; // TextureMapping object that will perform the sort
606  tm.textureMeshwithMultipleCameras(mesh, cameras);
607 
608  // compute normals for the mesh
609  pcl::NormalEstimation<pcl::PointXYZ, pcl::Normal> n;
610  pcl::PointCloud<pcl::Normal>::Ptr normals(new pcl::PointCloud<pcl::Normal>);
611  pcl::search::KdTree<pcl::PointXYZ>::Ptr tree(new pcl::search::KdTree<pcl::PointXYZ>);
612  tree->setInputCloud(cloud);
613  n.setInputCloud(cloud);
614  n.setSearchMethod(tree);
615  n.setKSearch(20);
616  n.compute(*normals);
617  pcl::PointCloud<pcl::PointNormal>::Ptr cloud_with_normals(new pcl::PointCloud<pcl::PointNormal>);
618  pcl::concatenateFields(*cloud, *normals, *cloud_with_normals);
619  pcl::toPCLPointCloud2(*cloud_with_normals, mesh.cloud);
620 
621  return mesh;
622  }
623 
624  pcl::PolygonMesh
625  Kinfu::createPolygonMesh()
626  {
627  // create triangles
628  if (!marching_cubes_)
629  {
630  marching_cubes_ = pcl::gpu::kinfuLS::MarchingCubes::Ptr(new pcl::gpu::kinfuLS::MarchingCubes());
631  }
632  pcl::gpu::DeviceArray<pcl::PointXYZ> triangles_buffer_device;
633  pcl::gpu::DeviceArray<pcl::PointXYZ> triangles_device =
634  marching_cubes_->run(kinfu_->volume(), triangles_buffer_device);
635 
636  if (triangles_device.empty())
637  {
638  return pcl::PolygonMesh();
639  }
640 
641  pcl::PointCloud<pcl::PointXYZ>::Ptr cloud (new pcl::PointCloud<pcl::PointXYZ>);
642  cloud->width = static_cast<int>(triangles_device.size());
643  cloud->height = 1;
644  cloud->header.frame_id = "kinfu_origin";
645  triangles_device.download(cloud->points);
646 
647  pcl::PolygonMesh mesh;
648  pcl::toPCLPointCloud2(*cloud, mesh.cloud);
649  mesh.polygons.resize(triangles_device.size() / 3);
650  for (size_t i = 0; i < mesh.polygons.size(); ++i)
651  {
652  pcl::Vertices v;
653  v.vertices.push_back(i*3+0);
654  v.vertices.push_back(i*3+2);
655  v.vertices.push_back(i*3+1);
656  mesh.polygons[i] = v;
657  }
658  return mesh;
659  }
660 
661  pcl::PolygonMesh
662  Kinfu::createPolygonMesh(const jsk_recognition_msgs::BoundingBox& box_msg, const std::string& ground_frame_id)
663  {
664  // create triangles
665  if (!marching_cubes_)
666  {
667  marching_cubes_ = pcl::gpu::kinfuLS::MarchingCubes::Ptr(new pcl::gpu::kinfuLS::MarchingCubes());
668  }
669  pcl::gpu::DeviceArray<pcl::PointXYZ> triangles_buffer_device;
670  pcl::gpu::DeviceArray<pcl::PointXYZ> triangles_device =
671  marching_cubes_->run(kinfu_->volume(), triangles_buffer_device);
672 
673  if (triangles_device.empty())
674  {
675  return pcl::PolygonMesh();
676  }
677 
678  // get original polygon mesh
679  pcl::PointCloud<pcl::PointXYZ>::Ptr cloud(new pcl::PointCloud<pcl::PointXYZ>());
680  cloud->width = static_cast<int>(triangles_device.size());
681  cloud->height = 1;
682  cloud->header.frame_id = "kinfu_origin";
683  triangles_device.download(cloud->points);
684 
685  // get tf
686  Eigen::Affine3f kinfu_origin_to_box = Eigen::Affine3f::Identity();
687  if (box_msg.header.frame_id != "kinfu_origin")
688  {
689  tf::StampedTransform tf_kinfu_origin_to_box;
690  tf_listener_->lookupTransform("kinfu_origin", box_msg.header.frame_id, ros::Time(0), tf_kinfu_origin_to_box);
691  tf::transformTFToEigen(tf_kinfu_origin_to_box, kinfu_origin_to_box);
692  }
693 
694  // transform bounding box to kinfu frame
695  jsk_recognition_msgs::BoundingBox transformed_box_msg;
696  transformed_box_msg.dimensions = box_msg.dimensions;
697  Eigen::Affine3f box_pose;
698  tf::poseMsgToEigen(box_msg.pose, box_pose);
699  box_pose = kinfu_origin_to_box * box_pose;
700  tf::poseEigenToMsg(box_pose, transformed_box_msg.pose);
701  transformed_box_msg.header.frame_id = "kinfu_origin";
702 
703  // crop cloud
704  std::vector<int> indices;
705  jsk_recognition_utils::cropPointCloud<pcl::PointXYZ>(cloud, transformed_box_msg, &indices);
706 
707  // generate filtered polygon mesh
708  pcl::PolygonMesh mesh;
709  pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_in_box(new pcl::PointCloud<pcl::PointXYZ>());
710  cloud_in_box->width = static_cast<int>(indices.size());
711  cloud_in_box->height = 1;
712  cloud_in_box->points.resize(indices.size());
713  for (size_t i = 0; i < indices.size(); i++)
714  {
715  cloud_in_box->points[i] = cloud->points[indices[i]];
716  if (indices[i] % 3 == 0 && (indices[i] + 1) == indices[i + 1] && (indices[i] + 2) == indices[i + 2])
717  {
718  pcl::Vertices v;
719  v.vertices.push_back(i + 0);
720  v.vertices.push_back(i + 2);
721  v.vertices.push_back(i + 1);
722  mesh.polygons.push_back(v);
723  }
724  }
725 
726  // fill face occluded by putting object on plane (ex. tabletop)
727  if (!ground_frame_id.empty())
728  {
729  pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_ground_frame(new pcl::PointCloud<pcl::PointXYZ>());
730  tf::StampedTransform tf_transform;
731  tf_listener_->lookupTransform(ground_frame_id, "kinfu_origin", ros::Time(0), tf_transform);
732  Eigen::Affine3f transform;
733  tf::transformTFToEigen(tf_transform, transform);
734  pcl::transformPointCloud(*cloud_in_box, *cloud_ground_frame, transform);
735 
736  pcl::PointXYZ min_pt, max_pt;
737  pcl::getMinMax3D(*cloud_ground_frame, min_pt, max_pt);
738 
739  pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_occluded(new pcl::PointCloud<pcl::PointXYZ>());
740  for (size_t i = 0; i < cloud_ground_frame->points.size(); i++)
741  {
742  pcl::PointXYZ pt_visible = cloud_ground_frame->points[i];
743  pcl::PointXYZ pt_occluded(pt_visible.x, pt_visible.y, min_pt.z);
744  cloud_occluded->points.push_back(pt_occluded);
745  if (i >= 3)
746  {
747  pcl::Vertices v;
748  v.vertices.push_back(cloud_in_box->width + i - 2);
749  v.vertices.push_back(cloud_in_box->width + i - 1);
750  v.vertices.push_back(cloud_in_box->width + i - 0);
751  mesh.polygons.push_back(v);
752  }
753  }
754  cloud_occluded->width = cloud_occluded->points.size();
755  cloud_occluded->height = 1;
756  pcl::transformPointCloud(*cloud_occluded, *cloud_occluded, transform.inverse());
757  *cloud_in_box = *cloud_in_box + *cloud_occluded;
758  }
759 
760  pcl::toPCLPointCloud2(*cloud_in_box, mesh.cloud);
761  return mesh;
762  }
763 } // namespace jsk_pcl_ros
764 
static bool fillImage(Image &image, const std::string &encoding_arg, uint32_t rows_arg, uint32_t cols_arg, uint32_t step_arg, const void *data_arg)
CvImageConstPtr toCvShare(const sensor_msgs::ImageConstPtr &source, const std::string &encoding=std::string())
void transformEigenToTF(const Eigen::Affine3d &e, tf::Transform &t)
TFSIMD_FORCE_INLINE void setRotation(const Quaternion &q)
#define NODELET_WARN(...)
void transformTFToEigen(const tf::Transform &t, Eigen::Affine3d &e)
void poseEigenToMsg(const Eigen::Affine3d &e, geometry_msgs::Pose &m)
double sin()
void poseMsgToEigen(const geometry_msgs::Pose &m, Eigen::Affine3d &e)
GLfloat n[6][3]
float t
bool update(const T &new_val, T &my_val)
pose
R
void paint3DView(const KinfuTracker::View &rgb24, KinfuTracker::View &view, float colors_weight=0.5f)
string status
sensor_msgs::PointCloud2 PointCloud
CvImagePtr toCvCopy(const sensor_msgs::ImageConstPtr &source, const std::string &encoding=std::string())
void concatenateFields(PointCloud< PointXYZ > &cloud_xyz, PointCloud< RGB > &cloud_rgb, PointCloud< PointXYZRGB > &cloud)
jsk_pcl_ros::KinfuConfig Config
Definition: kinfu.h:85
void toROSMsg(const pcl::PointCloud< T > &pcl_cloud, sensor_msgs::PointCloud2 &cloud)
PLUGINLIB_EXPORT_CLASS(jsk_pcl_ros::Kinfu, nodelet::Nodelet)
#define NODELET_INFO(...)
Quaternion getRotation() const
GLfloat v[8][3]
#define NODELET_FATAL_THROTTLE(rate,...)
inverse
#define NODELET_FATAL(...)
boost::mutex mutex_
cloud
#define ROS_ERROR(...)
Quaternion normalized() const


jsk_pcl_ros
Author(s): Yohei Kakiuchi
autogenerated on Mon May 3 2021 03:03:47