35 #define BOOST_PARAMETER_MAX_ARITY 7 
   37 #include <boost/thread/thread.hpp> 
   38 #include <boost/filesystem.hpp> 
   40 #include <pcl/features/normal_3d.h> 
   41 #include <pcl/io/obj_io.h> 
   44 #include <jsk_recognition_msgs/TrackingStatus.h> 
   45 #include <jsk_recognition_msgs/SaveMesh.h> 
   60     if (cloud_xyz.points.size() != cloud_rgb.points.size())
 
   62       std::cout << 
"Clouds being concatenated must have same size." << std::endl;
 
   65     cloud.points.resize(cloud_xyz.points.size());
 
   66     for (
size_t i = 0; 
i < cloud_xyz.points.size(); 
i++)
 
   68       cloud.points[
i].x = cloud_xyz.points[
i].x;
 
   69       cloud.points[
i].y = cloud_xyz.points[
i].y;
 
   70       cloud.points[
i].z = cloud_xyz.points[
i].z;
 
   71       cloud.points[
i].r = cloud_rgb.points[
i].r;
 
   72       cloud.points[
i].g = cloud_rgb.points[
i].g;
 
   73       cloud.points[
i].b = cloud_rgb.points[
i].b;
 
   75     cloud.width = cloud_xyz.width;
 
   76     cloud.height = cloud_xyz.height;
 
   85     ConnectionBasedNodelet::onInit();
 
   86     always_subscribe_ = 
true;  
 
   88     pnh_->param(
"device", 
device_, 0);
 
   91     pnh_->param(
"slam", 
slam_, 
false);
 
   92     pnh_->param<std::string>(
"fixed_frame_id", 
fixed_frame_id_, 
"odom_init");
 
   94     pnh_->param(
"volume_size", 
volume_size_, pcl::device::kinfuLS::VOLUME_SIZE);
 
   96     srv_ = boost::make_shared <dynamic_reconfigure::Server<Config> >(*pnh_);
 
  102     pub_camera_pose_ = advertise<geometry_msgs::PoseStamped>(*pnh_, 
"output", 1);
 
  103     pub_cloud_ = advertise<sensor_msgs::PointCloud2>(*pnh_, 
"output/cloud", 1);
 
  104     pub_depth_ = advertise<sensor_msgs::Image>(*pnh_, 
"output/depth", 1);
 
  106     pub_status_ = advertise<jsk_recognition_msgs::TrackingStatus>(*pnh_, 
"output/status", 1);
 
  139     pcl::gpu::printShortCudaDeviceInfo(
device_);
 
  143     float shift_distance = pcl::device::kinfuLS::DISTANCE_THRESHOLD;
 
  144     Eigen::Vector3f volume_size_vector = Eigen::Vector3f::Constant(
volume_size_);
 
  147       NODELET_WARN(
"WARNING Shifting distance (%.2f) is very large compared to the volume size (%.2f).",
 
  151     kinfu_.reset(
new pcl::gpu::kinfuLS::KinfuTracker(
 
  152       volume_size_vector, shift_distance, caminfo_msg->height, caminfo_msg->width));
 
  154     Eigen::Matrix3f 
R = Eigen::Matrix3f::Identity();
 
  155     Eigen::Vector3f 
t = volume_size_vector * 0.5f - Eigen::Vector3f(0, 0, volume_size_vector(2) / 2 * 1.2
f);
 
  157     Eigen::Affine3f 
pose = Eigen::Translation3f(
t) * Eigen::AngleAxisf(
R);
 
  160     kinfu_->volume().setTsdfTruncDist(0.030
f);
 
  161     kinfu_->setIcpCorespFilteringParams(0.1
f, 
sin(pcl::deg2rad(20.
f)));
 
  163     kinfu_->setCameraMovementThreshold(0.001
f);
 
  167       const int max_color_integration_weight = 2;
 
  168       kinfu_->initColorIntegration(max_color_integration_weight);
 
  176     pnh_->param(
"queue_size", queue_size, 10);
 
  202                 const sensor_msgs::Image::ConstPtr& depth_msg)
 
  204     update(caminfo_msg, depth_msg, sensor_msgs::ImageConstPtr());
 
  209                 const sensor_msgs::Image::ConstPtr& depth_msg,
 
  210                 const sensor_msgs::Image::ConstPtr& color_msg)
 
  214     if ((depth_msg->height != caminfo_msg->height) || (depth_msg->width != caminfo_msg->width))
 
  216       ROS_ERROR(
"Image size of input depth and camera info must be same. Depth: (%d, %d), Camera Info: (%d, %d)",
 
  217                 depth_msg->height, depth_msg->width, caminfo_msg->height, caminfo_msg->width);
 
  220     if (
integrate_color_ && ((color_msg->height != caminfo_msg->height) || (color_msg->width != color_msg->width)))
 
  222       ROS_ERROR(
"Image size of input color image and camera info must be same. Color: (%d, %d), Camera Info: (%d, %d)",
 
  223                 color_msg->height, color_msg->width, caminfo_msg->height, caminfo_msg->width);
 
  234       kinfu_->setDepthIntrinsics(caminfo_msg->K[0], caminfo_msg->K[4],
 
  235                                  caminfo_msg->K[2], caminfo_msg->K[5]);
 
  239       if (depth_msg->encoding == enc::TYPE_32FC1)
 
  242         depth_32fc1 *= 1000.;
 
  243         depth_32fc1.convertTo(depth, CV_16UC1);
 
  245       else if (depth_msg->encoding == enc::TYPE_16UC1)
 
  251         NODELET_FATAL(
"Unsupported depth image encoding: %s", depth_msg->encoding.c_str());
 
  256       pcl::gpu::kinfuLS::KinfuTracker::DepthMap depth_device;
 
  257       depth_device.upload(&(depth.data[0]), depth.cols * 2, depth.rows, depth.cols);
 
  263         if (color_msg->encoding == enc::BGR8)
 
  267                                 color_msg->step, color_msg->height, color_msg->width);
 
  271           colors_device_.upload(&(color_msg->data[0]), color_msg->step, color_msg->height, color_msg->width);
 
  278         (*kinfu_)(depth_device);
 
  283     jsk_recognition_msgs::TrackingStatus 
status;
 
  284     status.header = caminfo_msg->header;
 
  305       if (color_msg->encoding == enc::RGB8)
 
  307         cv::cvtColor(texture, texture, cv::COLOR_RGB2BGR);
 
  311       pcl::TextureMapping<pcl::PointXYZ>::Camera camera;
 
  312       camera.pose = 
kinfu_->getCameraPose();
 
  313       camera.focal_length = caminfo_msg->K[0];  
 
  314       camera.height = caminfo_msg->height;
 
  315       camera.width = caminfo_msg->width;
 
  321       Eigen::Affine3f camera_pose = 
kinfu_->getCameraPose();
 
  326         geometry_msgs::PoseStamped camera_pose_msg;
 
  328         camera_pose_msg.header.stamp = caminfo_msg->header.stamp;
 
  329         camera_pose_msg.header.frame_id = 
"kinfu_origin";
 
  333       Eigen::Affine3f camera_to_kinfu_origin = camera_pose.inverse();
 
  339                              caminfo_msg->header.frame_id, 
"kinfu_origin"));
 
  349           Eigen::Affine3f odom_to_camera;
 
  357           Eigen::Affine3f map_to_odom;
 
  376       pcl::gpu::kinfuLS::KinfuTracker::DepthMap depth_gpu;
 
  377       Eigen::Affine3f camera_pose = 
kinfu_->getCameraPose();
 
  380         raycaster_ = pcl::gpu::kinfuLS::RayCaster::Ptr(
 
  381           new pcl::gpu::kinfuLS::RayCaster(
 
  383             caminfo_msg->K[0], caminfo_msg->K[4],
 
  384             caminfo_msg->K[2], caminfo_msg->K[5]));
 
  390       std::vector<unsigned short> 
data;
 
  391       depth_gpu.download(
data, cols);
 
  393       sensor_msgs::Image output_depth_msg;
 
  398                              depth_gpu.cols() * 2,
 
  399                              reinterpret_cast<unsigned short*
>(&
data[0]));
 
  400       output_depth_msg.header = caminfo_msg->header;
 
  407       pcl::gpu::kinfuLS::KinfuTracker::View view_device;
 
  408       std::vector<pcl::gpu::kinfuLS::PixelRGB> view_host;
 
  409       kinfu_->getImage(view_device);
 
  417       view_device.download(view_host, cols);
 
  419       sensor_msgs::Image rendered_image_msg;
 
  424                              view_device.cols() * 3,
 
  425                              reinterpret_cast<unsigned char*
>(&view_host[0]));
 
  426       rendered_image_msg.header = caminfo_msg->header;
 
  433       pcl::gpu::DeviceArray<pcl::PointXYZ> cloud_buffer_device;
 
  434       pcl::gpu::DeviceArray<pcl::PointXYZ> extracted = 
kinfu_->volume().fetchCloud(cloud_buffer_device);
 
  436       pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_xyz(
new pcl::PointCloud<pcl::PointXYZ>());
 
  437       extracted.download(cloud_xyz->points);
 
  438       cloud_xyz->width = 
static_cast<int>(cloud_xyz->points.size());
 
  439       cloud_xyz->height = 1;
 
  441       sensor_msgs::PointCloud2 output_cloud_msg;
 
  444         pcl::gpu::DeviceArray<pcl::RGB> point_colors_device;
 
  445         kinfu_->colorVolume().fetchColors(extracted, point_colors_device);
 
  447         pcl::PointCloud<pcl::RGB>::Ptr cloud_rgb(
new pcl::PointCloud<pcl::RGB>());
 
  448         point_colors_device.download(cloud_rgb->points);
 
  449         cloud_rgb->width = 
static_cast<int>(cloud_rgb->points.size());
 
  450         cloud_rgb->height = 1;
 
  452         pcl::PointCloud<pcl::PointXYZRGB>::Ptr 
cloud(
new pcl::PointCloud<pcl::PointXYZRGB>());
 
  453         cloud->points.resize(cloud_xyz->width);
 
  461       output_cloud_msg.header.frame_id = 
"kinfu_origin";
 
  479     jsk_recognition_msgs::SaveMesh::Request& 
req, jsk_recognition_msgs::SaveMesh::Response& 
res)
 
  484     if (boost::filesystem::create_directories(dir))
 
  489     std::string out_file = 
save_dir_ + 
"/mesh.obj";
 
  492       pcl::TextureMesh texture_mesh;
 
  503       pcl::io::saveOBJFile(out_file, texture_mesh, 5);
 
  507       pcl::io::saveOBJFile(out_file, polygon_mesh);
 
  519     if (boost::filesystem::create_directories(dir))
 
  524     std::string out_file = 
save_dir_ + 
"/mesh.obj";
 
  527       pcl::TextureMesh texture_mesh;
 
  538       pcl::io::saveOBJFile(out_file, texture_mesh, 5);
 
  542       pcl::io::saveOBJFile(out_file, polygon_mesh);
 
  550                               const std::vector<cv::Mat> textures,
 
  551                               pcl::texture_mapping::CameraVector cameras)
 
  553     pcl::PointCloud<pcl::PointXYZ>::Ptr 
cloud (
new pcl::PointCloud<pcl::PointXYZ>);
 
  554     pcl::fromPCLPointCloud2(triangles.cloud, *
cloud);
 
  557     pcl::TextureMesh mesh;
 
  558     mesh.cloud = triangles.cloud;
 
  559     std::vector< pcl::Vertices> polygon_1;
 
  562     polygon_1.resize (triangles.polygons.size ());
 
  563     for(
size_t i =0; 
i < triangles.polygons.size (); ++
i)
 
  565       polygon_1[
i] = triangles.polygons[
i];
 
  567     mesh.tex_polygons.push_back(polygon_1);
 
  568     NODELET_INFO(
"Input mesh contains %lu faces, %lu vertices and %lu textures.",
 
  569                  mesh.tex_polygons[0].size(), 
cloud->points.size(), cameras.size());
 
  572     mesh.tex_materials.resize(cameras.size () + 1);
 
  573     for(
int i = 0 ; 
i <= cameras.size() ; ++
i)
 
  575       pcl::TexMaterial mesh_material;
 
  576       mesh_material.tex_Ka.r = 0.2f;
 
  577       mesh_material.tex_Ka.g = 0.2f;
 
  578       mesh_material.tex_Ka.b = 0.2f;
 
  580       mesh_material.tex_Kd.r = 0.8f;
 
  581       mesh_material.tex_Kd.g = 0.8f;
 
  582       mesh_material.tex_Kd.b = 0.8f;
 
  584       mesh_material.tex_Ks.r = 1.0f;
 
  585       mesh_material.tex_Ks.g = 1.0f;
 
  586       mesh_material.tex_Ks.b = 1.0f;
 
  588       mesh_material.tex_d = 1.0f;
 
  589       mesh_material.tex_Ns = 75.0f;
 
  590       mesh_material.tex_illum = 2;
 
  592       std::stringstream tex_name;
 
  593       tex_name << 
"material_" << 
i;
 
  594       tex_name >> mesh_material.tex_name;
 
  596       if (
i < cameras.size ())
 
  598         std::stringstream ss;
 
  599         ss << 
"textures/" << 
i << 
".jpg";
 
  600         std::string texture_file = ss.str();
 
  601         cv::imwrite(
save_dir_ + 
"/" + texture_file, textures[
i]);
 
  602         cameras[
i].texture_file = texture_file;
 
  603         mesh_material.tex_file = texture_file;
 
  607         std::string texture_file = 
"textures/occluded.jpg";
 
  608         cv::imwrite(
save_dir_ + 
"/" + texture_file,
 
  609                     cv::Mat::zeros(textures[0].rows, textures[0].cols, CV_8UC1));
 
  610         mesh_material.tex_file = texture_file;
 
  613       mesh.tex_materials[
i] = mesh_material;
 
  617     pcl::TextureMapping<pcl::PointXYZ> tm;  
 
  618     tm.textureMeshwithMultipleCameras(mesh, cameras);
 
  621     pcl::NormalEstimation<pcl::PointXYZ, pcl::Normal> 
n;
 
  622     pcl::PointCloud<pcl::Normal>::Ptr normals(
new pcl::PointCloud<pcl::Normal>);
 
  623     pcl::search::KdTree<pcl::PointXYZ>::Ptr 
tree(
new pcl::search::KdTree<pcl::PointXYZ>);
 
  626     n.setSearchMethod(
tree);
 
  629     pcl::PointCloud<pcl::PointNormal>::Ptr cloud_with_normals(
new pcl::PointCloud<pcl::PointNormal>);
 
  631     pcl::toPCLPointCloud2(*cloud_with_normals, mesh.cloud);
 
  642       marching_cubes_ = pcl::gpu::kinfuLS::MarchingCubes::Ptr(
new pcl::gpu::kinfuLS::MarchingCubes());
 
  644     pcl::gpu::DeviceArray<pcl::PointXYZ> triangles_buffer_device;
 
  645     pcl::gpu::DeviceArray<pcl::PointXYZ> triangles_device =
 
  648     if (triangles_device.empty())
 
  650       return pcl::PolygonMesh();
 
  653     pcl::PointCloud<pcl::PointXYZ>::Ptr 
cloud (
new pcl::PointCloud<pcl::PointXYZ>);
 
  654     cloud->width = 
static_cast<int>(triangles_device.size());
 
  656     cloud->header.frame_id = 
"kinfu_origin";
 
  657     triangles_device.download(
cloud->points);
 
  659     pcl::PolygonMesh mesh;
 
  660     pcl::toPCLPointCloud2(*
cloud, mesh.cloud);
 
  661     mesh.polygons.resize(triangles_device.size() / 3);
 
  662     for (
size_t i = 0; 
i < mesh.polygons.size(); ++
i)
 
  665       v.vertices.push_back(
i*3+0);
 
  666       v.vertices.push_back(
i*3+2);
 
  667       v.vertices.push_back(
i*3+1);
 
  668       mesh.polygons[
i] = 
v;
 
  679       marching_cubes_ = pcl::gpu::kinfuLS::MarchingCubes::Ptr(
new pcl::gpu::kinfuLS::MarchingCubes());
 
  681     pcl::gpu::DeviceArray<pcl::PointXYZ> triangles_buffer_device;
 
  682     pcl::gpu::DeviceArray<pcl::PointXYZ> triangles_device =
 
  685     if (triangles_device.empty())
 
  687       return pcl::PolygonMesh();
 
  691     pcl::PointCloud<pcl::PointXYZ>::Ptr 
cloud(
new pcl::PointCloud<pcl::PointXYZ>());
 
  692     cloud->width = 
static_cast<int>(triangles_device.size());
 
  694     cloud->header.frame_id = 
"kinfu_origin";
 
  695     triangles_device.download(
cloud->points);
 
  698     Eigen::Affine3f kinfu_origin_to_box = Eigen::Affine3f::Identity();
 
  699     if (
box_msg.header.frame_id != 
"kinfu_origin")
 
  707     jsk_recognition_msgs::BoundingBox transformed_box_msg;
 
  708     transformed_box_msg.dimensions = 
box_msg.dimensions;
 
  709     Eigen::Affine3f box_pose;
 
  711     box_pose = kinfu_origin_to_box * box_pose;
 
  713     transformed_box_msg.header.frame_id = 
"kinfu_origin";
 
  716     std::vector<int> indices;
 
  717     jsk_recognition_utils::cropPointCloud<pcl::PointXYZ>(
cloud, transformed_box_msg, &indices);
 
  720     pcl::PolygonMesh mesh;
 
  721     pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_in_box(
new pcl::PointCloud<pcl::PointXYZ>());
 
  722     cloud_in_box->width = 
static_cast<int>(indices.size());
 
  723     cloud_in_box->height = 1;
 
  724     cloud_in_box->points.resize(indices.size());
 
  725     for (
size_t i = 0; 
i < indices.size(); 
i++)
 
  727       cloud_in_box->points[
i] = 
cloud->points[indices[
i]];
 
  728       if (indices[
i] % 3 == 0 && (indices[
i] + 1) == indices[
i  + 1] && (indices[
i] + 2) == indices[
i + 2])
 
  731         v.vertices.push_back(
i + 0);
 
  732         v.vertices.push_back(
i + 2);
 
  733         v.vertices.push_back(
i + 1);
 
  734         mesh.polygons.push_back(
v);
 
  739     if (!ground_frame_id.empty())
 
  741       pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_ground_frame(
new pcl::PointCloud<pcl::PointXYZ>());
 
  746       pcl::transformPointCloud(*cloud_in_box, *cloud_ground_frame, 
transform);
 
  748       pcl::PointXYZ min_pt, max_pt;
 
  749       pcl::getMinMax3D(*cloud_ground_frame, min_pt, max_pt);
 
  751       pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_occluded(
new pcl::PointCloud<pcl::PointXYZ>());
 
  752       for (
size_t i = 0; 
i < cloud_ground_frame->points.size(); 
i++)
 
  754         pcl::PointXYZ pt_visible = cloud_ground_frame->points[
i];
 
  755         pcl::PointXYZ pt_occluded(pt_visible.x, pt_visible.y, min_pt.z);
 
  756         cloud_occluded->points.push_back(pt_occluded);
 
  760           v.vertices.push_back(cloud_in_box->width + 
i - 2);
 
  761           v.vertices.push_back(cloud_in_box->width + 
i - 1);
 
  762           v.vertices.push_back(cloud_in_box->width + 
i - 0);
 
  763           mesh.polygons.push_back(
v);
 
  766       cloud_occluded->width = cloud_occluded->points.size();
 
  767       cloud_occluded->height = 1;
 
  768       pcl::transformPointCloud(*cloud_occluded, *cloud_occluded, 
transform.inverse());
 
  769       *cloud_in_box = *cloud_in_box + *cloud_occluded;
 
  772     pcl::toPCLPointCloud2(*cloud_in_box, mesh.cloud);