CameraTango.cpp
Go to the documentation of this file.
00001 /*
00002 Copyright (c) 2010-2016, Mathieu Labbe - IntRoLab - Universite de Sherbrooke
00003 All rights reserved.
00004 
00005 Redistribution and use in source and binary forms, with or without
00006 modification, are permitted provided that the following conditions are met:
00007     * Redistributions of source code must retain the above copyright
00008       notice, this list of conditions and the following disclaimer.
00009     * Redistributions in binary form must reproduce the above copyright
00010       notice, this list of conditions and the following disclaimer in the
00011       documentation and/or other materials provided with the distribution.
00012     * Neither the name of the Universite de Sherbrooke nor the
00013       names of its contributors may be used to endorse or promote products
00014       derived from this software without specific prior written permission.
00015 
00016 THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
00017 ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
00018 WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
00019 DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
00020 DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
00021 (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
00022 LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
00023 ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
00024 (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
00025 SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
00026 */
00027 
00028 #include "CameraTango.h"
00029 #include "util.h"
00030 #include "rtabmap/utilite/ULogger.h"
00031 #include "rtabmap/core/util3d_transforms.h"
00032 #include "rtabmap/core/util2d.h"
00033 #include "rtabmap/core/OdometryEvent.h"
00034 #include <tango_client_api.h>
00035 
00036 namespace rtabmap {
00037 
00038 #define nullptr 0
00039 const int kVersionStringLength = 128;
00040 const int holeSize = 10;
00041 const float maxDepthError = 0.10;
00042 
00043 // Callbacks
00044 void onPointCloudAvailableRouter(void* context, const TangoXYZij* xyz_ij)
00045 {
00046         CameraTango* app = static_cast<CameraTango*>(context);
00047         app->cloudReceived(cv::Mat(1, xyz_ij->xyz_count, CV_32FC3, xyz_ij->xyz[0]), xyz_ij->timestamp);
00048 }
00049 
00050 void onFrameAvailableRouter(void* context, TangoCameraId id, const TangoImageBuffer* color)
00051 {
00052         CameraTango* app = static_cast<CameraTango*>(context);
00053         cv::Mat tangoImage;
00054         if(color->format == TANGO_HAL_PIXEL_FORMAT_RGBA_8888)
00055         {
00056                 tangoImage = cv::Mat(color->height, color->width, CV_8UC4, color->data);
00057         }
00058         else if(color->format == TANGO_HAL_PIXEL_FORMAT_YV12)
00059         {
00060                 tangoImage = cv::Mat(color->height+color->height/2, color->width, CV_8UC1, color->data);
00061         }
00062         else if(color->format == TANGO_HAL_PIXEL_FORMAT_YCrCb_420_SP)
00063         {
00064                 tangoImage = cv::Mat(color->height+color->height/2, color->width, CV_8UC1, color->data);
00065         }
00066         else
00067         {
00068                 LOGE("Not supported color format : %d.", color->format);
00069         }
00070 
00071         if(!tangoImage.empty())
00072         {
00073                 app->rgbReceived(tangoImage, (unsigned int)color->format, color->timestamp);
00074         }
00075 }
00076 
00077 void onPoseAvailableRouter(void* context, const TangoPoseData* pose)
00078 {
00079         CameraTango* app = static_cast<CameraTango*>(context);
00080         app->poseReceived(app->tangoPoseToTransform(pose, true));
00081 }
00082 
00083 void onTangoEventAvailableRouter(void* context, const TangoEvent* event)
00084 {
00085         CameraTango* app = static_cast<CameraTango*>(context);
00086         app->tangoEventReceived(event->type, event->event_key, event->event_value);
00087 }
00088 
00089 // In OpenGL, axes are x->right, y->up and z->outScreen
00090 // Image is x->right, y->down and z->inScreen
00091 static rtabmap::Transform opticalRotation(
00092                 1.0f, 0.0f, 0.0f, 0.0f,
00093                 0.0f, -1.0f, 0.0f, 0.0f,
00094                 0.0f, 0.0f, -1.0f, 0.0f);
00095 
00097 // CameraTango
00099 CameraTango::CameraTango(int decimation, bool autoExposure) :
00100                 Camera(0, opticalRotation),
00101                 tango_config_(0),
00102                 firstFrame_(true),
00103                 decimation_(decimation),
00104                 autoExposure_(autoExposure),
00105                 cloudStamp_(0),
00106                 tangoColorType_(0),
00107                 tangoColorStamp_(0)
00108 {
00109         UASSERT(decimation >= 1);
00110 }
00111 
00112 CameraTango::~CameraTango() {
00113         // Disconnect Tango service
00114         close();
00115 }
00116 
00117 bool CameraTango::init(const std::string & calibrationFolder, const std::string & cameraName)
00118 {
00119         close();
00120 
00121         // Connect to Tango
00122         LOGI("NativeRTABMap: Setup tango config");
00123         tango_config_ = TangoService_getConfig(TANGO_CONFIG_DEFAULT);
00124         if (tango_config_ == nullptr)
00125         {
00126                 LOGE("NativeRTABMap: Failed to get default config form");
00127                 return false;
00128         }
00129 
00130         // Set auto-recovery for motion tracking as requested by the user.
00131         bool is_atuo_recovery = true;
00132         int ret = TangoConfig_setBool(tango_config_, "config_enable_auto_recovery", is_atuo_recovery);
00133         if (ret != TANGO_SUCCESS)
00134         {
00135                 LOGE("NativeRTABMap: config_enable_auto_recovery() failed with error code: %d", ret);
00136                 return false;
00137         }
00138 
00139         // Enable color.
00140         ret = TangoConfig_setBool(tango_config_, "config_enable_color_camera", true);
00141         if (ret != TANGO_SUCCESS)
00142         {
00143                 LOGE("NativeRTABMap: config_enable_color_camera() failed with error code: %d", ret);
00144                 return false;
00145         }
00146 
00147         // disable auto exposure (disabled, seems broken on latest Tango releases)
00148         ret = TangoConfig_setBool(tango_config_, "config_color_mode_auto", autoExposure_);
00149         if (ret != TANGO_SUCCESS)
00150         {
00151                 LOGE("NativeRTABMap: config_color_mode_auto() failed with error code: %d", ret);
00152                 //return false;
00153         }
00154         else
00155         {
00156                 if(!autoExposure_)
00157                 {
00158                         ret = TangoConfig_setInt32(tango_config_, "config_color_iso", 800);
00159                         if (ret != TANGO_SUCCESS)
00160                         {
00161                                 LOGE("NativeRTABMap: config_color_iso() failed with error code: %d", ret);
00162                                 return false;
00163                         }
00164                 }
00165                 bool verifyAutoExposureState;
00166                 int32_t verifyIso, verifyExp;
00167                 TangoConfig_getBool( tango_config_, "config_color_mode_auto", &verifyAutoExposureState );
00168                 TangoConfig_getInt32( tango_config_, "config_color_iso", &verifyIso );
00169                 TangoConfig_getInt32( tango_config_, "config_color_exp", &verifyExp );
00170                 LOGI( "NativeRTABMap: config_color autoExposure=%s %d %d", verifyAutoExposureState?"On" : "Off", verifyIso, verifyExp );
00171         }
00172 
00173         // Enable depth.
00174         ret = TangoConfig_setBool(tango_config_, "config_enable_depth", true);
00175         if (ret != TANGO_SUCCESS)
00176         {
00177                 LOGE("NativeRTABMap: config_enable_depth() failed with error code: %d", ret);
00178                 return false;
00179         }
00180 
00181         // Note that it's super important for AR applications that we enable low
00182         // latency imu integration so that we have pose information available as
00183         // quickly as possible. Without setting this flag, you'll often receive
00184         // invalid poses when calling GetPoseAtTime for an image.
00185         ret = TangoConfig_setBool(tango_config_, "config_enable_low_latency_imu_integration", true);
00186         if (ret != TANGO_SUCCESS)
00187         {
00188                 LOGE("NativeRTABMap: Failed to enable low latency imu integration.");
00189                 return false;
00190         }
00191 
00192         // Get TangoCore version string from service.
00193         char tango_core_version[kVersionStringLength];
00194         ret = TangoConfig_getString(tango_config_, "tango_service_library_version", tango_core_version, kVersionStringLength);
00195         if (ret != TANGO_SUCCESS)
00196         {
00197                 LOGE("NativeRTABMap: get tango core version failed with error code: %d", ret);
00198                 return false;
00199         }
00200         LOGI("NativeRTABMap: Tango version : %s", tango_core_version);
00201 
00202 
00203         // Callbacks
00204         LOGI("NativeRTABMap: Setup callbacks");
00205         // Attach the OnXYZijAvailable callback.
00206         // The callback will be called after the service is connected.
00207         ret = TangoService_connectOnXYZijAvailable(onPointCloudAvailableRouter);
00208         if (ret != TANGO_SUCCESS)
00209         {
00210                 LOGE("NativeRTABMap: Failed to connect to point cloud callback with error code: %d", ret);
00211                 return false;
00212         }
00213 
00214         ret = TangoService_connectOnFrameAvailable(TANGO_CAMERA_COLOR, this, onFrameAvailableRouter);
00215         if (ret != TANGO_SUCCESS)
00216         {
00217                 LOGE("NativeRTABMap: Failed to connect to color callback with error code: %d", ret);
00218                 return false;
00219         }
00220 
00221         // Attach the onPoseAvailable callback.
00222         // The callback will be called after the service is connected.
00223         TangoCoordinateFramePair pair;
00224         pair.base = TANGO_COORDINATE_FRAME_START_OF_SERVICE;
00225         pair.target = TANGO_COORDINATE_FRAME_DEVICE;
00226         ret = TangoService_connectOnPoseAvailable(1, &pair, onPoseAvailableRouter);
00227         if (ret != TANGO_SUCCESS)
00228         {
00229                 LOGE("NativeRTABMap: Failed to connect to pose callback with error code: %d", ret);
00230                 return false;
00231         }
00232 
00233         // Attach the onEventAvailable callback.
00234         // The callback will be called after the service is connected.
00235         ret = TangoService_connectOnTangoEvent(onTangoEventAvailableRouter);
00236         if (ret != TANGO_SUCCESS)
00237         {
00238                 LOGE("PointCloudApp: Failed to connect to event callback with error code: %d", ret);
00239                 return false;
00240         }
00241 
00242         // Now connect service so the callbacks above will be called
00243         LOGI("NativeRTABMap: Connect to tango service");
00244         ret = TangoService_connect(this, tango_config_);
00245         if (ret != TANGO_SUCCESS)
00246         {
00247                 LOGE("NativeRTABMap: Failed to connect to the Tango service with error code: %d", ret);
00248                 return false;
00249         }
00250 
00251         // update extrinsics
00252         LOGI("NativeRTABMap: Update extrinsics");
00253         TangoPoseData pose_data;
00254         TangoCoordinateFramePair frame_pair;
00255 
00256         // TangoService_getPoseAtTime function is used for query device extrinsics
00257         // as well. We use timestamp 0.0 and the target frame pair to get the
00258         // extrinsics from the sensors.
00259         //
00260         // Get device with respect to imu transformation matrix.
00261         frame_pair.base = TANGO_COORDINATE_FRAME_IMU;
00262         frame_pair.target = TANGO_COORDINATE_FRAME_DEVICE;
00263         ret = TangoService_getPoseAtTime(0.0, frame_pair, &pose_data);
00264         if (ret != TANGO_SUCCESS)
00265         {
00266                 LOGE("NativeRTABMap: Failed to get transform between the IMU frame and device frames");
00267                 return false;
00268         }
00269         imuTDevice_ = rtabmap::Transform(
00270                         pose_data.translation[0],
00271                         pose_data.translation[1],
00272                         pose_data.translation[2],
00273                         pose_data.orientation[0],
00274                         pose_data.orientation[1],
00275                         pose_data.orientation[2],
00276                         pose_data.orientation[3]);
00277 
00278         // Get color camera with respect to imu transformation matrix.
00279         frame_pair.base = TANGO_COORDINATE_FRAME_IMU;
00280         frame_pair.target = TANGO_COORDINATE_FRAME_CAMERA_DEPTH;
00281         ret = TangoService_getPoseAtTime(0.0, frame_pair, &pose_data);
00282         if (ret != TANGO_SUCCESS)
00283         {
00284                 LOGE("NativeRTABMap: Failed to get transform between the color camera frame and device frames");
00285                 return false;
00286         }
00287         imuTDepthCamera_ = rtabmap::Transform(
00288                         pose_data.translation[0],
00289                         pose_data.translation[1],
00290                         pose_data.translation[2],
00291                         pose_data.orientation[0],
00292                         pose_data.orientation[1],
00293                         pose_data.orientation[2],
00294                         pose_data.orientation[3]);
00295 
00296         deviceTDepth_ = imuTDevice_.inverse() * imuTDepthCamera_;
00297 
00298         // camera intrinsic
00299         TangoCameraIntrinsics color_camera_intrinsics;
00300         ret = TangoService_getCameraIntrinsics(TANGO_CAMERA_COLOR, &color_camera_intrinsics);
00301         if (ret != TANGO_SUCCESS)
00302         {
00303                 LOGE("NativeRTABMap: Failed to get the intrinsics for the color camera with error code: %d.", ret);
00304                 return false;
00305         }
00306         model_ = CameraModel(
00307                         color_camera_intrinsics.fx,
00308                         color_camera_intrinsics.fy,
00309                         color_camera_intrinsics.cx,
00310                         color_camera_intrinsics.cy,
00311                         this->getLocalTransform());
00312         model_.setImageSize(cv::Size(color_camera_intrinsics.width, color_camera_intrinsics.height));
00313 
00314         // optical rotation
00315         model_.setLocalTransform(Transform(
00316                         0.0f, 0.0f, 1.0f, 0.0f,
00317                         -1.0f, 0.0f, 0.0f, 0.0f,
00318                         0.0f, -1.0f, 0.0f, 0.0f));
00319 
00320         return true;
00321 }
00322 
00323 void CameraTango::close()
00324 {
00325         if(tango_config_)
00326         {
00327                 TangoConfig_free(tango_config_);
00328                 tango_config_ = nullptr;
00329                 TangoService_disconnect();
00330         }
00331         firstFrame_ = true;
00332 }
00333 
00334 void CameraTango::cloudReceived(const cv::Mat & cloud, double timestamp)
00335 {
00336         if(this->isRunning())
00337         {
00338                 UASSERT(cloud.type() == CV_32FC3);
00339                 boost::mutex::scoped_lock  lock(dataMutex_);
00340 
00341                 bool notify = cloud_.empty();
00342                 cloud_ = cloud.clone();
00343                 cloudStamp_ = timestamp;
00344                 LOGD("Depth received! (%d points)", cloud.cols);
00345                 if(!tangoColor_.empty() && notify)
00346                 {
00347                         LOGD("Cloud: Release semaphore");
00348                         dataReady_.release();
00349                 }
00350         }
00351 }
00352 
00353 void CameraTango::rgbReceived(const cv::Mat & tangoImage, int type, double timestamp)
00354 {
00355         if(this->isRunning())
00356         {
00357                 boost::mutex::scoped_lock  lock(dataMutex_);
00358 
00359                 if(!cloud_.empty())
00360                 {
00361                         if(!tangoImage.empty())
00362                         {
00363                                 bool notify = tangoColor_.empty();
00364                                 tangoColor_ = tangoImage.clone();
00365                                 tangoColorStamp_ = timestamp;
00366                                 tangoColorType_ = type;
00367                                 LOGD("RGB received!");
00368                                 if(!cloud_.empty() && notify)
00369                                 {
00370                                         LOGD("RGB: Release semaphore");
00371                                         dataReady_.release();
00372                                 }
00373                         }
00374                 }
00375         }
00376 }
00377 
00378 void CameraTango::poseReceived(const Transform & pose)
00379 {
00380         if(!pose.isNull() && pose.getNormSquared() < 100000)
00381         {
00382                 this->post(new PoseEvent(pose));
00383         }
00384 }
00385 
00386 void CameraTango::tangoEventReceived(int type, const char * key, const char * value)
00387 {
00388         this->post(new CameraTangoEvent(type, key, value));
00389 }
00390 
00391 bool CameraTango::isCalibrated() const
00392 {
00393         return model_.isValidForProjection();
00394 }
00395 
00396 std::string CameraTango::getSerial() const
00397 {
00398         return "Tango";
00399 }
00400 
00401 rtabmap::Transform CameraTango::tangoPoseToTransform(const TangoPoseData * tangoPose, bool inOpenGLFrame) const
00402 {
00403         UASSERT(tangoPose);
00404         rtabmap::Transform pose;
00405         if(!deviceTDepth_.isNull())
00406         {
00407                 pose = rtabmap::Transform(
00408                                 tangoPose->translation[0],
00409                                 tangoPose->translation[1],
00410                                 tangoPose->translation[2],
00411                                 tangoPose->orientation[0],
00412                                 tangoPose->orientation[1],
00413                                 tangoPose->orientation[2],
00414                                 tangoPose->orientation[3]);
00415 
00416                 // transform in OpenGL + extrinsics
00417                 // opengl_world_T_opengl_camera =
00418                 //      opengl_world_T_start_service *
00419                 //      start_service_T_device *
00420                 //      device_T_imu *
00421                 //      imu_T_depth_camera *
00422                 //      depth_camera_T_opengl_camera;
00423                 if(inOpenGLFrame)
00424                 {
00425                         pose = opengl_world_T_tango_world * pose * deviceTDepth_ * depth_camera_T_opengl_camera;
00426                 }
00427         }
00428 
00429         return pose;
00430 }
00431 
00432 rtabmap::Transform CameraTango::getPoseAtTimestamp(double timestamp, bool inOpenGLFrame)
00433 {
00434         rtabmap::Transform pose;
00435         if(!deviceTDepth_.isNull())
00436         {
00437                 TangoPoseData pose_start_service_T_device;
00438                 TangoCoordinateFramePair frame_pair;
00439                 frame_pair.base = TANGO_COORDINATE_FRAME_START_OF_SERVICE;
00440                 frame_pair.target = TANGO_COORDINATE_FRAME_DEVICE;
00441                 TangoErrorType status = TangoService_getPoseAtTime(timestamp, frame_pair, &pose_start_service_T_device);
00442                 if (status != TANGO_SUCCESS)
00443                 {
00444                         LOGE(
00445                                 "PoseData: Failed to get transform between the Start of service and "
00446                                 "device frames at timestamp %lf",
00447                                 timestamp);
00448                 }
00449                 if (pose_start_service_T_device.status_code != TANGO_POSE_VALID)
00450                 {
00451                         LOGW(
00452                                 "PoseData: Failed to get transform between the Start of service and "
00453                                 "device frames at timestamp %lf",
00454                                 timestamp);
00455                 }
00456                 else
00457                 {
00458 
00459                         pose = tangoPoseToTransform(&pose_start_service_T_device, inOpenGLFrame);
00460                 }
00461 
00462 
00463         }
00464         return pose;
00465 }
00466 
00467 SensorData CameraTango::captureImage(CameraInfo * info)
00468 {
00469         LOGI("Capturing image...");
00470 
00471         SensorData data;
00472         if(!dataReady_.acquire(1, 2000))
00473         {
00474                 if(this->isRunning())
00475                 {
00476                         LOGE("Not received any frames since 2 seconds, try to restart the camera again.");
00477                         this->post(new CameraTangoEvent(0, "CameraTango", "No frames received since 2 seconds."));
00478 
00479                         boost::mutex::scoped_lock  lock(dataMutex_);
00480                         if(!cloud_.empty() && !tangoColor_.empty())
00481                         {
00482                                 UERROR("cloud and image were set!?");
00483                         }
00484                 }
00485                 cloud_ = cv::Mat();
00486                 cloudStamp_ = 0.0;
00487                 tangoColor_ = cv::Mat();
00488                 tangoColorStamp_ = 0.0;
00489                 tangoColorType_ = 0;
00490         }
00491         else
00492         {
00493                 cv::Mat cloud;
00494                 cv::Mat tangoImage;
00495                 cv::Mat rgb;
00496                 double cloudStamp = 0.0;
00497                 double rgbStamp = 0.0;
00498                 int tangoColorType = 0;
00499 
00500                 {
00501                         boost::mutex::scoped_lock  lock(dataMutex_);
00502                         cloud = cloud_;
00503                         cloudStamp = cloudStamp_;
00504                         cloud_ = cv::Mat();
00505                         cloudStamp_ = 0.0;
00506                         tangoImage = tangoColor_;
00507                         rgbStamp = tangoColorStamp_;
00508                         tangoColorType = tangoColorType_;
00509                         tangoColor_ = cv::Mat();
00510                         tangoColorStamp_ = 0.0;
00511                         tangoColorType_ = 0;
00512                 }
00513 
00514                 if(tangoColorType == TANGO_HAL_PIXEL_FORMAT_RGBA_8888)
00515                 {
00516                         cv::cvtColor(tangoImage, rgb, CV_RGBA2BGR);
00517                 }
00518                 else if(tangoColorType == TANGO_HAL_PIXEL_FORMAT_YV12)
00519                 {
00520                         cv::cvtColor(tangoImage, rgb, CV_YUV2BGR_YV12);
00521                 }
00522                 else if(tangoColorType == TANGO_HAL_PIXEL_FORMAT_YCrCb_420_SP)
00523                 {
00524                         cv::cvtColor(tangoImage, rgb, CV_YUV2BGR_NV21);
00525                 }
00526                 else
00527                 {
00528                         LOGE("Not supported color format : %d.", tangoColorType);
00529                         return data;
00530                 }
00531 
00532                 //for(int i=0; i<rgb.cols; ++i)
00533                 //{
00534                 //      UERROR("%d,%d,%d", (int)rgb.at<cv::Vec3b>(i)[0], (int)rgb.at<cv::Vec3b>(i)[1], (int)rgb.at<cv::Vec3b>(i)[2]);
00535                 //}
00536 
00537                 CameraModel model = model_;
00538                 if(decimation_ > 1)
00539                 {
00540                         rgb = util2d::decimate(rgb, decimation_);
00541                         model = model.scaled(1.0/double(decimation_));
00542                 }
00543 
00544                 // Querying the depth image's frame transformation based on the depth image's
00545                 // timestamp.
00546                 cv::Mat depth;
00547                 Transform poseDepth = getPoseAtTimestamp(cloudStamp, false);
00548                 Transform poseColor = getPoseAtTimestamp(rgbStamp, false);
00549 
00550                 if(poseColor.getNormSquared() > 100000)
00551                 {
00552                         LOGE("Very large odometry color pose detected (%s)! Ignoring this frame!", poseColor.prettyPrint().c_str());
00553                         poseColor.setNull();
00554                 }
00555                 if(poseDepth.getNormSquared() > 100000)
00556                 {
00557                         LOGE("Very large odometry depth pose detected (%s)! Ignoring this frame!", poseDepth.prettyPrint().c_str());
00558                         poseDepth.setNull();
00559                 }
00560 
00561                 if(!poseDepth.isNull() && !poseColor.isNull())
00562                 {
00563                         // The Color Camera frame at timestamp t0 with respect to Depth
00564                         // Camera frame at timestamp t1.
00565                         Transform colorToDepth = deviceTDepth_.inverse() * poseColor.inverse() * poseDepth * deviceTDepth_;
00566                         LOGI("colorToDepth=%s", colorToDepth.prettyPrint().c_str());
00567 
00568                         int pixelsSet = 0;
00569                         depth = cv::Mat::zeros(model_.imageHeight()/8, model_.imageWidth()/8, CV_16UC1); // mm
00570                         CameraModel depthModel = model_.scaled(1.0f/8.0f);
00571                         for(unsigned int i=0; i<cloud.total(); ++i)
00572                         {
00573                                 cv::Vec3f & p = cloud.at<cv::Vec3f>(i);
00574                                 cv::Point3f pt = util3d::transformPoint(cv::Point3f(p[0], p[1], p[2]), colorToDepth);
00575 
00576                                 int pixel_x, pixel_y;
00577                                 // get the coordinate on image plane.
00578                                 pixel_x = static_cast<int>((depthModel.fx()) * (pt.x / pt.z) + depthModel.cx());
00579                                 pixel_y = static_cast<int>((depthModel.fy()) * (pt.y / pt.z) + depthModel.cy());
00580                                 unsigned short depth_value(pt.z * 1000.0f);
00581 
00582                                 if(pixel_x>=0 && pixel_x<depth.cols &&
00583                                    pixel_y>0 && pixel_y<depth.rows &&
00584                                    depth_value)
00585                                 {
00586                                         unsigned short & depthPixel = depth.at<unsigned short>(pixel_y, pixel_x);
00587                                         if(depthPixel == 0 || depthPixel > depth_value)
00588                                         {
00589                                                 depthPixel = depth_value;
00590                                                 pixelsSet += 1;
00591                                         }
00592                                 }
00593                         }
00594                         LOGI("pixels depth set= %d", pixelsSet);
00595                 }
00596                 else
00597                 {
00598                         LOGE("Poses are null?!? color=%d (stamp=%f) depth=%d (stamp=%f)", poseColor.isNull()?0:1, rgbStamp, poseDepth.isNull()?0:1, cloudStamp);
00599                 }
00600 
00601                 if(!rgb.empty() && !depth.empty())
00602                 {
00603                         depth = rtabmap::util2d::fillDepthHoles(depth, holeSize, maxDepthError);
00604 
00605                         Transform poseColorOpenGL = getPoseAtTimestamp(rgbStamp, true);
00606 
00607                         //Rotate in RTAB-Map's coordinate
00608                         Transform odom = rtabmap_world_T_opengl_world * poseColorOpenGL * depth_camera_T_opengl_camera * model.localTransform().inverse();
00609 
00610                         data = SensorData(rgb, depth, model, this->getNextSeqID(), rgbStamp);
00611                         data.setGroundTruth(odom);
00612                 }
00613                 else
00614                 {
00615                         LOGE("Could not get depth and rgb images!?!");
00616                 }
00617         }
00618         return data;
00619 
00620 }
00621 
00622 void CameraTango::mainLoopBegin()
00623 {
00624         uSleep(2000); // just to make sure that the camera is started
00625 }
00626 
00627 void CameraTango::mainLoop()
00628 {
00629         if(tango_config_)
00630         {
00631                 SensorData data = this->captureImage();
00632 
00633                 if(!data.groundTruth().isNull())
00634                 {
00635                         rtabmap::Transform pose = data.groundTruth();
00636                         data.setGroundTruth(Transform());
00637                         LOGI("Publish odometry message (variance=%f)", firstFrame_?9999:0.0001);
00638                         this->post(new OdometryEvent(data, pose, firstFrame_?9999:0.0001, firstFrame_?9999:0.0001));
00639                         firstFrame_ = false;
00640                 }
00641                 else if(!this->isKilled())
00642                 {
00643                         LOGW("Odometry lost");
00644                         this->post(new OdometryEvent());
00645                 }
00646         }
00647         else
00648         {
00649                 UERROR("Camera not initialized, cannot start thread.");
00650                 this->kill();
00651         }
00652 }
00653 
00654 } /* namespace rtabmap */


rtabmap
Author(s): Mathieu Labbe
autogenerated on Sat Jul 23 2016 11:44:15