00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013
00014
00015
00016
00017
00018
00019
00020
00021
00022
00023
00024
00025
00026
00027
00028 #include "CameraTango.h"
00029 #include "util.h"
00030 #include "rtabmap/utilite/ULogger.h"
00031 #include "rtabmap/core/util3d_transforms.h"
00032 #include "rtabmap/core/OdometryEvent.h"
00033 #include "rtabmap/core/util2d.h"
00034 #include <tango_client_api.h>
00035 #include <tango_support_api.h>
00036
00037 namespace rtabmap {
00038
00039 #define nullptr 0
00040 const int kVersionStringLength = 128;
00041 const int holeSize = 5;
00042 const float maxDepthError = 0.10;
00043 const int scanDownsampling = 1;
00044
00045
00046 void onPointCloudAvailableRouter(void* context, const TangoPointCloud* point_cloud)
00047 {
00048 CameraTango* app = static_cast<CameraTango*>(context);
00049 if(app->isRunning() && point_cloud->num_points>0)
00050 {
00051 app->cloudReceived(cv::Mat(1, point_cloud->num_points, CV_32FC4, point_cloud->points[0]), point_cloud->timestamp);
00052 }
00053 }
00054
00055 void onFrameAvailableRouter(void* context, TangoCameraId id, const TangoImageBuffer* color)
00056 {
00057 CameraTango* app = static_cast<CameraTango*>(context);
00058 if(app->isRunning())
00059 {
00060 cv::Mat tangoImage;
00061 if(color->format == TANGO_HAL_PIXEL_FORMAT_RGBA_8888)
00062 {
00063 tangoImage = cv::Mat(color->height, color->width, CV_8UC4, color->data);
00064 }
00065 else if(color->format == TANGO_HAL_PIXEL_FORMAT_YV12)
00066 {
00067 tangoImage = cv::Mat(color->height+color->height/2, color->width, CV_8UC1, color->data);
00068 }
00069 else if(color->format == TANGO_HAL_PIXEL_FORMAT_YCrCb_420_SP)
00070 {
00071 tangoImage = cv::Mat(color->height+color->height/2, color->width, CV_8UC1, color->data);
00072 }
00073 else if(color->format == 35)
00074 {
00075 tangoImage = cv::Mat(color->height+color->height/2, color->width, CV_8UC1, color->data);
00076 }
00077 else
00078 {
00079 LOGE("Not supported color format : %d.", color->format);
00080 }
00081
00082 if(!tangoImage.empty())
00083 {
00084 app->rgbReceived(tangoImage, (unsigned int)color->format, color->timestamp);
00085 }
00086 }
00087 }
00088
00089 void onPoseAvailableRouter(void* context, const TangoPoseData* pose)
00090 {
00091 if(pose->status_code == TANGO_POSE_VALID)
00092 {
00093 CameraTango* app = static_cast<CameraTango*>(context);
00094 app->poseReceived(app->tangoPoseToTransform(pose));
00095 }
00096 }
00097
00098 void onTangoEventAvailableRouter(void* context, const TangoEvent* event)
00099 {
00100 CameraTango* app = static_cast<CameraTango*>(context);
00101 app->tangoEventReceived(event->type, event->event_key, event->event_value);
00102 }
00103
00105
00107 const float CameraTango::bilateralFilteringSigmaS = 2.0f;
00108 const float CameraTango::bilateralFilteringSigmaR = 0.075f;
00109
00110 CameraTango::CameraTango(bool colorCamera, int decimation, bool publishRawScan, bool smoothing) :
00111 Camera(0),
00112 tango_config_(0),
00113 previousStamp_(0.0),
00114 stampEpochOffset_(0.0),
00115 colorCamera_(colorCamera),
00116 decimation_(decimation),
00117 rawScanPublished_(publishRawScan),
00118 smoothing_(smoothing),
00119 cloudStamp_(0),
00120 tangoColorType_(0),
00121 tangoColorStamp_(0),
00122 colorCameraToDisplayRotation_(ROTATION_0),
00123 originUpdate_(false)
00124 {
00125 UASSERT(decimation >= 1);
00126 }
00127
00128 CameraTango::~CameraTango() {
00129
00130 close();
00131 }
00132
00133
00134
00135
00136
00137
00138 void applyFovModel(
00139 double xu, double yu, double w, double w_inverse, double two_tan_w_div_two,
00140 double* xd, double* yd) {
00141 double ru = sqrt(xu * xu + yu * yu);
00142 constexpr double epsilon = 1e-7;
00143 if (w < epsilon || ru < epsilon) {
00144 *xd = xu;
00145 *yd = yu ;
00146 } else {
00147 double rd_div_ru = std::atan(ru * two_tan_w_div_two) * w_inverse / ru;
00148 *xd = xu * rd_div_ru;
00149 *yd = yu * rd_div_ru;
00150 }
00151 }
00152
00153
00154
00155
00156
00157
00158
00159 void initFisheyeRectificationMap(
00160 const CameraModel& fisheyeModel,
00161 cv::Mat & mapX, cv::Mat & mapY) {
00162 const double & fx = fisheyeModel.K().at<double>(0,0);
00163 const double & fy = fisheyeModel.K().at<double>(1,1);
00164 const double & cx = fisheyeModel.K().at<double>(0,2);
00165 const double & cy = fisheyeModel.K().at<double>(1,2);
00166 const double & w = fisheyeModel.D().at<double>(0,0);
00167 mapX.create(fisheyeModel.imageSize(), CV_32FC1);
00168 mapY.create(fisheyeModel.imageSize(), CV_32FC1);
00169 LOGD("initFisheyeRectificationMap: fx=%f fy=%f, cx=%f, cy=%f, w=%f", fx, fy, cx, cy, w);
00170
00171 const double fy_inverse = 1.0 / fy;
00172 const double fx_inverse = 1.0 / fx;
00173 const double w_inverse = 1 / w;
00174 const double two_tan_w_div_two = 2.0 * std::tan(w * 0.5);
00175
00176
00177
00178 for(int iu = 0; iu < fisheyeModel.imageHeight(); ++iu) {
00179 for (int ju = 0; ju < fisheyeModel.imageWidth(); ++ju) {
00180 double xu = (ju - cx) * fx_inverse;
00181 double yu = (iu - cy) * fy_inverse;
00182 double xd, yd;
00183 applyFovModel(xu, yu, w, w_inverse, two_tan_w_div_two, &xd, &yd);
00184 double jd = cx + xd * fx;
00185 double id = cy + yd * fy;
00186 mapX.at<float>(iu, ju) = jd;
00187 mapY.at<float>(iu, ju) = id;
00188 }
00189 }
00190 }
00191
00192 bool CameraTango::init(const std::string & calibrationFolder, const std::string & cameraName)
00193 {
00194 close();
00195
00196 TangoSupport_initialize(TangoService_getPoseAtTime, TangoService_getCameraIntrinsics);
00197
00198
00199 LOGI("NativeRTABMap: Setup tango config");
00200 tango_config_ = TangoService_getConfig(TANGO_CONFIG_DEFAULT);
00201 if (tango_config_ == nullptr)
00202 {
00203 LOGE("NativeRTABMap: Failed to get default config form");
00204 return false;
00205 }
00206
00207
00208 bool is_atuo_recovery = true;
00209 int ret = TangoConfig_setBool(tango_config_, "config_enable_auto_recovery", is_atuo_recovery);
00210 if (ret != TANGO_SUCCESS)
00211 {
00212 LOGE("NativeRTABMap: config_enable_auto_recovery() failed with error code: %d", ret);
00213 return false;
00214 }
00215
00216 if(colorCamera_)
00217 {
00218
00219 ret = TangoConfig_setBool(tango_config_, "config_enable_color_camera", true);
00220 if (ret != TANGO_SUCCESS)
00221 {
00222 LOGE("NativeRTABMap: config_enable_color_camera() failed with error code: %d", ret);
00223 return false;
00224 }
00225 }
00226
00227
00228 ret = TangoConfig_setBool(tango_config_, "config_enable_depth", true);
00229 if (ret != TANGO_SUCCESS)
00230 {
00231 LOGE("NativeRTABMap: config_enable_depth() failed with error code: %d", ret);
00232 return false;
00233 }
00234
00235
00236 ret = TangoConfig_setInt32(tango_config_, "config_depth_mode", TANGO_POINTCLOUD_XYZC);
00237 if (ret != TANGO_SUCCESS)
00238 {
00239 LOGE("Failed to set 'depth_mode' configuration flag with error code: %d", ret);
00240 return false;
00241 }
00242
00243
00244
00245
00246
00247 ret = TangoConfig_setBool(tango_config_, "config_enable_low_latency_imu_integration", true);
00248 if (ret != TANGO_SUCCESS)
00249 {
00250 LOGE("NativeRTABMap: Failed to enable low latency imu integration.");
00251 return false;
00252 }
00253
00254
00255
00256
00257
00258
00259
00260
00261
00262
00263
00264
00265
00266
00267
00268 char tango_core_version[kVersionStringLength];
00269 ret = TangoConfig_getString(tango_config_, "tango_service_library_version", tango_core_version, kVersionStringLength);
00270 if (ret != TANGO_SUCCESS)
00271 {
00272 LOGE("NativeRTABMap: get tango core version failed with error code: %d", ret);
00273 return false;
00274 }
00275 LOGI("NativeRTABMap: Tango version : %s", tango_core_version);
00276
00277
00278
00279 LOGI("NativeRTABMap: Setup callbacks");
00280
00281
00282 ret = TangoService_connectOnPointCloudAvailable(onPointCloudAvailableRouter);
00283 if (ret != TANGO_SUCCESS)
00284 {
00285 LOGE("NativeRTABMap: Failed to connect to point cloud callback with error code: %d", ret);
00286 return false;
00287 }
00288
00289 ret = TangoService_connectOnFrameAvailable(colorCamera_?TANGO_CAMERA_COLOR:TANGO_CAMERA_FISHEYE, this, onFrameAvailableRouter);
00290 if (ret != TANGO_SUCCESS)
00291 {
00292 LOGE("NativeRTABMap: Failed to connect to color callback with error code: %d", ret);
00293 return false;
00294 }
00295
00296
00297
00298 TangoCoordinateFramePair pair;
00299
00300 pair.base = TANGO_COORDINATE_FRAME_START_OF_SERVICE;
00301 pair.target = TANGO_COORDINATE_FRAME_DEVICE;
00302 ret = TangoService_connectOnPoseAvailable(1, &pair, onPoseAvailableRouter);
00303 if (ret != TANGO_SUCCESS)
00304 {
00305 LOGE("NativeRTABMap: Failed to connect to pose callback with error code: %d", ret);
00306 return false;
00307 }
00308
00309
00310
00311 ret = TangoService_connectOnTangoEvent(onTangoEventAvailableRouter);
00312 if (ret != TANGO_SUCCESS)
00313 {
00314 LOGE("PointCloudApp: Failed to connect to event callback with error code: %d", ret);
00315 return false;
00316 }
00317
00318
00319 LOGI("NativeRTABMap: Connect to tango service");
00320 ret = TangoService_connect(this, tango_config_);
00321 if (ret != TANGO_SUCCESS)
00322 {
00323 LOGE("NativeRTABMap: Failed to connect to the Tango service with error code: %d", ret);
00324 return false;
00325 }
00326
00327
00328 LOGI("NativeRTABMap: Update extrinsics");
00329 TangoPoseData pose_data;
00330 TangoCoordinateFramePair frame_pair;
00331
00332
00333
00334
00335
00336
00337 frame_pair.base = TANGO_COORDINATE_FRAME_DEVICE;
00338 frame_pair.target = colorCamera_?TANGO_COORDINATE_FRAME_CAMERA_COLOR:TANGO_COORDINATE_FRAME_CAMERA_FISHEYE;
00339 ret = TangoService_getPoseAtTime(0.0, frame_pair, &pose_data);
00340 if (ret != TANGO_SUCCESS)
00341 {
00342 LOGE("NativeRTABMap: Failed to get transform between the color camera frame and device frames");
00343 return false;
00344 }
00345 deviceTColorCamera_ = rtabmap::Transform(
00346 pose_data.translation[0],
00347 pose_data.translation[1],
00348 pose_data.translation[2],
00349 pose_data.orientation[0],
00350 pose_data.orientation[1],
00351 pose_data.orientation[2],
00352 pose_data.orientation[3]);
00353
00354
00355 TangoCameraIntrinsics color_camera_intrinsics;
00356 ret = TangoService_getCameraIntrinsics(colorCamera_?TANGO_CAMERA_COLOR:TANGO_CAMERA_FISHEYE, &color_camera_intrinsics);
00357 if (ret != TANGO_SUCCESS)
00358 {
00359 LOGE("NativeRTABMap: Failed to get the intrinsics for the color camera with error code: %d.", ret);
00360 return false;
00361 }
00362
00363 LOGD("Calibration: fx=%f fy=%f cx=%f cy=%f width=%d height=%d",
00364 color_camera_intrinsics.fx,
00365 color_camera_intrinsics.fy,
00366 color_camera_intrinsics.cx,
00367 color_camera_intrinsics.cy,
00368 color_camera_intrinsics.width,
00369 color_camera_intrinsics.height);
00370
00371 cv::Mat K = cv::Mat::eye(3, 3, CV_64FC1);
00372 K.at<double>(0,0) = color_camera_intrinsics.fx;
00373 K.at<double>(1,1) = color_camera_intrinsics.fy;
00374 K.at<double>(0,2) = color_camera_intrinsics.cx;
00375 K.at<double>(1,2) = color_camera_intrinsics.cy;
00376 cv::Mat D = cv::Mat::zeros(1, 5, CV_64FC1);
00377 LOGD("Calibration type = %d", color_camera_intrinsics.calibration_type);
00378 if(color_camera_intrinsics.calibration_type == TANGO_CALIBRATION_POLYNOMIAL_5_PARAMETERS ||
00379 color_camera_intrinsics.calibration_type == TANGO_CALIBRATION_EQUIDISTANT)
00380 {
00381 D.at<double>(0,0) = color_camera_intrinsics.distortion[0];
00382 D.at<double>(0,1) = color_camera_intrinsics.distortion[1];
00383 D.at<double>(0,2) = color_camera_intrinsics.distortion[2];
00384 D.at<double>(0,3) = color_camera_intrinsics.distortion[3];
00385 D.at<double>(0,4) = color_camera_intrinsics.distortion[4];
00386 }
00387 else if(color_camera_intrinsics.calibration_type == TANGO_CALIBRATION_POLYNOMIAL_3_PARAMETERS)
00388 {
00389 D.at<double>(0,0) = color_camera_intrinsics.distortion[0];
00390 D.at<double>(0,1) = color_camera_intrinsics.distortion[1];
00391 D.at<double>(0,2) = 0.;
00392 D.at<double>(0,3) = 0.;
00393 D.at<double>(0,4) = color_camera_intrinsics.distortion[2];
00394 }
00395 else if(color_camera_intrinsics.calibration_type == TANGO_CALIBRATION_POLYNOMIAL_2_PARAMETERS)
00396 {
00397 D.at<double>(0,0) = color_camera_intrinsics.distortion[0];
00398 D.at<double>(0,1) = color_camera_intrinsics.distortion[1];
00399 D.at<double>(0,2) = 0.;
00400 D.at<double>(0,3) = 0.;
00401 D.at<double>(0,4) = 0.;
00402 }
00403
00404 cv::Mat R = cv::Mat::eye(3, 3, CV_64FC1);
00405 cv::Mat P;
00406
00407 LOGD("Distortion params: %f, %f, %f, %f, %f", D.at<double>(0,0), D.at<double>(0,1), D.at<double>(0,2), D.at<double>(0,3), D.at<double>(0,4));
00408 model_ = CameraModel(colorCamera_?"color":"fisheye",
00409 cv::Size(color_camera_intrinsics.width, color_camera_intrinsics.height),
00410 K, D, R, P,
00411 tango_device_T_rtabmap_device.inverse()*deviceTColorCamera_);
00412
00413 if(!colorCamera_)
00414 {
00415 initFisheyeRectificationMap(model_, fisheyeRectifyMapX_, fisheyeRectifyMapY_);
00416 }
00417
00418 LOGI("deviceTColorCameraTango =%s", deviceTColorCamera_.prettyPrint().c_str());
00419 LOGI("deviceTColorCameraRtabmap=%s", (tango_device_T_rtabmap_device.inverse()*deviceTColorCamera_).prettyPrint().c_str());
00420
00421 cameraStartedTime_.restart();
00422
00423 return true;
00424 }
00425
00426 void CameraTango::close()
00427 {
00428 if(tango_config_)
00429 {
00430 TangoConfig_free(tango_config_);
00431 tango_config_ = nullptr;
00432 LOGI("TangoService_disconnect()");
00433 TangoService_disconnect();
00434 LOGI("TangoService_disconnect() done.");
00435 }
00436 previousPose_.setNull();
00437 previousStamp_ = 0.0;
00438 fisheyeRectifyMapX_ = cv::Mat();
00439 fisheyeRectifyMapY_ = cv::Mat();
00440 lastKnownGPS_ = GPS();
00441 originOffset_ = Transform();
00442 originUpdate_ = false;
00443 }
00444
00445 void CameraTango::resetOrigin()
00446 {
00447 originUpdate_ = true;
00448 }
00449
00450 void CameraTango::cloudReceived(const cv::Mat & cloud, double timestamp)
00451 {
00452 if(this->isRunning() && !cloud.empty())
00453 {
00454
00455
00456 UASSERT(cloud.type() == CV_32FC4);
00457 boost::mutex::scoped_lock lock(dataMutex_);
00458
00459
00460
00461
00462
00463
00464
00465
00466
00467 if(!tangoColor_.empty())
00468 {
00469 double dt = fabs(timestamp - tangoColorStamp_);
00470
00471
00472
00473 if(dt >= 0.0 && dt < 0.5)
00474 {
00475 bool notify = cloud_.empty();
00476 cloud_ = cloud.clone();
00477 cloudStamp_ = timestamp;
00478 if(notify)
00479 {
00480
00481 dataReady_.release();
00482 }
00483 }
00484 }
00485 }
00486 }
00487
00488 void CameraTango::rgbReceived(const cv::Mat & tangoImage, int type, double timestamp)
00489 {
00490 if(this->isRunning() && !tangoImage.empty())
00491 {
00492
00493
00494 boost::mutex::scoped_lock lock(dataMutex_);
00495
00496 tangoColor_ = tangoImage.clone();
00497 tangoColorStamp_ = timestamp;
00498 tangoColorType_ = type;
00499 }
00500 }
00501
00502 static rtabmap::Transform opticalRotation(
00503 1.0f, 0.0f, 0.0f, 0.0f,
00504 0.0f, -1.0f, 0.0f, 0.0f,
00505 0.0f, 0.0f, -1.0f, 0.0f);
00506 void CameraTango::poseReceived(const Transform & pose)
00507 {
00508 if(!pose.isNull())
00509 {
00510
00511 Transform p = pose*deviceTColorCamera_*opticalRotation;
00512 if(originUpdate_)
00513 {
00514 originOffset_ = p.translation().inverse();
00515 originUpdate_ = false;
00516 }
00517 if(!originOffset_.isNull())
00518 {
00519 this->post(new PoseEvent(originOffset_*p));
00520 }
00521 else
00522 {
00523 this->post(new PoseEvent(p));
00524 }
00525 }
00526 }
00527
00528 void CameraTango::tangoEventReceived(int type, const char * key, const char * value)
00529 {
00530 this->post(new CameraTangoEvent(type, key, value));
00531 }
00532
00533 bool CameraTango::isCalibrated() const
00534 {
00535 return model_.isValidForProjection();
00536 }
00537
00538 std::string CameraTango::getSerial() const
00539 {
00540 return "Tango";
00541 }
00542
00543 void CameraTango::setGPS(const GPS & gps)
00544 {
00545 lastKnownGPS_ = gps;
00546 }
00547
00548 rtabmap::Transform CameraTango::tangoPoseToTransform(const TangoPoseData * tangoPose) const
00549 {
00550 UASSERT(tangoPose);
00551 rtabmap::Transform pose;
00552
00553 pose = rtabmap::Transform(
00554 tangoPose->translation[0],
00555 tangoPose->translation[1],
00556 tangoPose->translation[2],
00557 tangoPose->orientation[0],
00558 tangoPose->orientation[1],
00559 tangoPose->orientation[2],
00560 tangoPose->orientation[3]);
00561
00562 return pose;
00563 }
00564
00565 rtabmap::Transform CameraTango::getPoseAtTimestamp(double timestamp)
00566 {
00567 rtabmap::Transform pose;
00568
00569 TangoPoseData pose_start_service_T_device;
00570 TangoCoordinateFramePair frame_pair;
00571 frame_pair.base = TANGO_COORDINATE_FRAME_START_OF_SERVICE;
00572 frame_pair.target = TANGO_COORDINATE_FRAME_DEVICE;
00573 TangoErrorType status = TangoService_getPoseAtTime(timestamp, frame_pair, &pose_start_service_T_device);
00574 if (status != TANGO_SUCCESS)
00575 {
00576 LOGE(
00577 "PoseData: Failed to get transform between the Start of service and "
00578 "device frames at timestamp %lf",
00579 timestamp);
00580 }
00581 if (pose_start_service_T_device.status_code != TANGO_POSE_VALID)
00582 {
00583 LOGW(
00584 "PoseData: Failed to get transform between the Start of service and "
00585 "device frames at timestamp %lf",
00586 timestamp);
00587 }
00588 else
00589 {
00590
00591 pose = tangoPoseToTransform(&pose_start_service_T_device);
00592 }
00593
00594 return pose;
00595 }
00596
00597 SensorData CameraTango::captureImage(CameraInfo * info)
00598 {
00599
00600
00601 SensorData data;
00602 if(!dataReady_.acquire(1, 2000))
00603 {
00604 if(this->isRunning())
00605 {
00606 LOGE("Not received any frames since 2 seconds, try to restart the camera again.");
00607 this->post(new CameraTangoEvent(0, "CameraTango", "No frames received since 2 seconds."));
00608
00609 boost::mutex::scoped_lock lock(dataMutex_);
00610 if(!cloud_.empty() && !tangoColor_.empty())
00611 {
00612 UERROR("cloud and image were set!?");
00613 }
00614 }
00615 cloud_ = cv::Mat();
00616 cloudStamp_ = 0.0;
00617 tangoColor_ = cv::Mat();
00618 tangoColorStamp_ = 0.0;
00619 tangoColorType_ = 0;
00620 }
00621 else
00622 {
00623 cv::Mat cloud;
00624 cv::Mat tangoImage;
00625 cv::Mat rgb;
00626 double cloudStamp = 0.0;
00627 double rgbStamp = 0.0;
00628 int tangoColorType = 0;
00629
00630 {
00631 boost::mutex::scoped_lock lock(dataMutex_);
00632 cloud = cloud_;
00633 cloudStamp = cloudStamp_;
00634 cloud_ = cv::Mat();
00635 cloudStamp_ = 0.0;
00636 tangoImage = tangoColor_;
00637 rgbStamp = tangoColorStamp_;
00638 tangoColorType = tangoColorType_;
00639 tangoColor_ = cv::Mat();
00640 tangoColorStamp_ = 0.0;
00641 tangoColorType_ = 0;
00642 }
00643
00644 LOGD("tangoColorType=%d", tangoColorType);
00645 if(tangoColorType == TANGO_HAL_PIXEL_FORMAT_RGBA_8888)
00646 {
00647 cv::cvtColor(tangoImage, rgb, CV_RGBA2BGR);
00648 }
00649 else if(tangoColorType == TANGO_HAL_PIXEL_FORMAT_YV12)
00650 {
00651 cv::cvtColor(tangoImage, rgb, CV_YUV2BGR_YV12);
00652 }
00653 else if(tangoColorType == TANGO_HAL_PIXEL_FORMAT_YCrCb_420_SP)
00654 {
00655 cv::cvtColor(tangoImage, rgb, CV_YUV2BGR_NV21);
00656 }
00657 else if(tangoColorType == 35)
00658 {
00659 cv::cvtColor(tangoImage, rgb, cv::COLOR_YUV420sp2GRAY);
00660 }
00661 else
00662 {
00663 LOGE("Not supported color format : %d.", tangoColorType);
00664 return data;
00665 }
00666
00667
00668
00669
00670
00671
00672 CameraModel model = model_;
00673
00674 if(colorCamera_)
00675 {
00676 if(decimation_ > 1)
00677 {
00678 rgb = util2d::decimate(rgb, decimation_);
00679 model = model.scaled(1.0/double(decimation_));
00680 }
00681 }
00682 else
00683 {
00684
00685 cv::Mat rgbRect;
00686 cv::remap(rgb, rgbRect, fisheyeRectifyMapX_, fisheyeRectifyMapY_, cv::INTER_LINEAR, cv::BORDER_CONSTANT, 0);
00687 rgb = rgbRect;
00688
00689 }
00690
00691
00692
00693 cv::Mat depth;
00694
00695
00696
00697
00698 Transform colorToDepth;
00699 TangoPoseData pose_color_image_t1_T_depth_image_t0;
00700 if (TangoSupport_calculateRelativePose(
00701 rgbStamp, colorCamera_?TANGO_COORDINATE_FRAME_CAMERA_COLOR:TANGO_COORDINATE_FRAME_CAMERA_FISHEYE, cloudStamp,
00702 TANGO_COORDINATE_FRAME_CAMERA_DEPTH,
00703 &pose_color_image_t1_T_depth_image_t0) == TANGO_SUCCESS)
00704 {
00705 colorToDepth = tangoPoseToTransform(&pose_color_image_t1_T_depth_image_t0);
00706 }
00707 else
00708 {
00709 LOGE(
00710 "SynchronizationApplication: Could not find a valid relative pose at "
00711 "time for color and "
00712 " depth cameras.");
00713 }
00714
00715 if(colorToDepth.getNormSquared() > 100000)
00716 {
00717 LOGE("Very large color to depth error detected (%s)! Ignoring this frame!", colorToDepth.prettyPrint().c_str());
00718 colorToDepth.setNull();
00719 }
00720 cv::Mat scan;
00721 if(!colorToDepth.isNull())
00722 {
00723
00724
00725
00726 LOGD("rgb=%dx%d cloud size=%d", rgb.cols, rgb.rows, (int)cloud.total());
00727
00728 int pixelsSet = 0;
00729 int depthSizeDec = colorCamera_?8:1;
00730 depth = cv::Mat::zeros(model_.imageHeight()/depthSizeDec, model_.imageWidth()/depthSizeDec, CV_16UC1);
00731 CameraModel depthModel = model_.scaled(1.0f/float(depthSizeDec));
00732 std::vector<cv::Point3f> scanData(rawScanPublished_?cloud.total():0);
00733 int oi=0;
00734 int closePoints = 0;
00735 float closeROI[4];
00736 closeROI[0] = depth.cols/4;
00737 closeROI[1] = 3*(depth.cols/4);
00738 closeROI[2] = depth.rows/4;
00739 closeROI[3] = 3*(depth.rows/4);
00740 unsigned short minDepthValue=10000;
00741 for(unsigned int i=0; i<cloud.total(); ++i)
00742 {
00743 float * p = cloud.ptr<float>(0,i);
00744 cv::Point3f pt = util3d::transformPoint(cv::Point3f(p[0], p[1], p[2]), colorToDepth);
00745
00746 if(pt.z > 0.0f && i%scanDownsampling == 0 && rawScanPublished_)
00747 {
00748 scanData.at(oi++) = pt;
00749 }
00750
00751 int pixel_x_l, pixel_y_l, pixel_x_h, pixel_y_h;
00752
00753 pixel_x_l = static_cast<int>((depthModel.fx()) * (pt.x / pt.z) + depthModel.cx());
00754 pixel_y_l = static_cast<int>((depthModel.fy()) * (pt.y / pt.z) + depthModel.cy());
00755 pixel_x_h = static_cast<int>((depthModel.fx()) * (pt.x / pt.z) + depthModel.cx() + 0.5f);
00756 pixel_y_h = static_cast<int>((depthModel.fy()) * (pt.y / pt.z) + depthModel.cy() + 0.5f);
00757 unsigned short depth_value(pt.z * 1000.0f);
00758
00759 if(pixel_x_l>=closeROI[0] && pixel_x_l<closeROI[1] &&
00760 pixel_y_l>closeROI[2] && pixel_y_l<closeROI[3] &&
00761 depth_value < 600)
00762 {
00763 ++closePoints;
00764 if(depth_value < minDepthValue)
00765 {
00766 minDepthValue = depth_value;
00767 }
00768 }
00769
00770 bool pixelSet = false;
00771 if(pixel_x_l>=0 && pixel_x_l<depth.cols &&
00772 pixel_y_l>0 && pixel_y_l<depth.rows &&
00773 depth_value)
00774 {
00775 unsigned short & depthPixel = depth.at<unsigned short>(pixel_y_l, pixel_x_l);
00776 if(depthPixel == 0 || depthPixel > depth_value)
00777 {
00778 depthPixel = depth_value;
00779 pixelSet = true;
00780 }
00781 }
00782 if(pixel_x_h>=0 && pixel_x_h<depth.cols &&
00783 pixel_y_h>0 && pixel_y_h<depth.rows &&
00784 depth_value)
00785 {
00786 unsigned short & depthPixel = depth.at<unsigned short>(pixel_y_h, pixel_x_h);
00787 if(depthPixel == 0 || depthPixel > depth_value)
00788 {
00789 depthPixel = depth_value;
00790 pixelSet = true;
00791 }
00792 }
00793 if(pixelSet)
00794 {
00795 pixelsSet += 1;
00796 }
00797 }
00798
00799 if(closePoints > 100)
00800 {
00801 this->post(new CameraTangoEvent(0, "TooClose", ""));
00802 }
00803
00804 if(oi)
00805 {
00806 scan = cv::Mat(1, oi, CV_32FC3, scanData.data()).clone();
00807 }
00808
00809 }
00810 else
00811 {
00812 LOGE("color to depth pose is null?!? (rgb stamp=%f) (depth stamp=%f)", rgbStamp, cloudStamp);
00813 }
00814
00815 if(!rgb.empty() && !depth.empty())
00816 {
00817 depth = rtabmap::util2d::fillDepthHoles(depth, holeSize, maxDepthError);
00818
00819 Transform poseDevice = getPoseAtTimestamp(rgbStamp);
00820
00821
00822 if(!originOffset_.isNull())
00823 {
00824 poseDevice = originOffset_ * poseDevice;
00825 }
00826
00827
00828
00829
00830
00831
00832 Transform odom = rtabmap_world_T_tango_world * poseDevice * tango_device_T_rtabmap_device;
00833
00834
00835
00836
00837 Transform scanLocalTransform = model.localTransform();
00838
00839
00840 if(colorCameraToDisplayRotation_ == ROTATION_90)
00841 {
00842 cv::Mat rgbt(rgb.cols, rgb.rows, rgb.type());
00843 cv::flip(rgb,rgb,1);
00844 cv::transpose(rgb,rgbt);
00845 rgb = rgbt;
00846 cv::Mat deptht(depth.cols, depth.rows, depth.type());
00847 cv::flip(depth,depth,1);
00848 cv::transpose(depth,deptht);
00849 depth = deptht;
00850 cv::Size sizet(model.imageHeight(), model.imageWidth());
00851 model = CameraModel(model.fy(), model.fx(), model.cy(), model.cx()>0?model.imageWidth()-model.cx():0, model.localTransform()*rtabmap::Transform(0,0,0,0,0,1.57079632679489661923132169163975144));
00852 model.setImageSize(sizet);
00853 }
00854 else if(colorCameraToDisplayRotation_ == ROTATION_180)
00855 {
00856 cv::flip(rgb,rgb,1);
00857 cv::flip(rgb,rgb,0);
00858 cv::flip(depth,depth,1);
00859 cv::flip(depth,depth,0);
00860 cv::Size sizet(model.imageWidth(), model.imageHeight());
00861 model = CameraModel(
00862 model.fx(),
00863 model.fy(),
00864 model.cx()>0?model.imageWidth()-model.cx():0,
00865 model.cy()>0?model.imageHeight()-model.cy():0,
00866 model.localTransform()*rtabmap::Transform(0,0,0,0,0,1.57079632679489661923132169163975144*2.0));
00867 model.setImageSize(sizet);
00868 }
00869 else if(colorCameraToDisplayRotation_ == ROTATION_270)
00870 {
00871 cv::Mat rgbt(rgb.cols, rgb.rows, rgb.type());
00872 cv::transpose(rgb,rgbt);
00873 cv::flip(rgbt,rgbt,1);
00874 rgb = rgbt;
00875 cv::Mat deptht(depth.cols, depth.rows, depth.type());
00876 cv::transpose(depth,deptht);
00877 cv::flip(deptht,deptht,1);
00878 depth = deptht;
00879 cv::Size sizet(model.imageHeight(), model.imageWidth());
00880 model = CameraModel(model.fy(), model.fx(), model.cy()>0?model.imageHeight()-model.cy():0, model.cx(), model.localTransform()*rtabmap::Transform(0,0,0,0,0,-1.57079632679489661923132169163975144));
00881 model.setImageSize(sizet);
00882 }
00883
00884 if(smoothing_)
00885 {
00886
00887 depth = rtabmap::util2d::fastBilateralFiltering(depth, bilateralFilteringSigmaS, bilateralFilteringSigmaR);
00888 data.setDepthOrRightRaw(depth);
00889
00890 }
00891
00892 if(rawScanPublished_)
00893 {
00894 data = SensorData(LaserScan::backwardCompatibility(scan, cloud.total()/scanDownsampling, 0, scanLocalTransform), rgb, depth, model, this->getNextSeqID(), rgbStamp);
00895 }
00896 else
00897 {
00898 data = SensorData(rgb, depth, model, this->getNextSeqID(), rgbStamp);
00899 }
00900 data.setGroundTruth(odom);
00901
00902 if(lastKnownGPS_.stamp() > 0.0 && rgbStamp-lastKnownGPS_.stamp()<1.0)
00903 {
00904 data.setGPS(lastKnownGPS_);
00905 }
00906 else if(lastKnownGPS_.stamp()>0.0)
00907 {
00908 LOGD("GPS too old (current time=%f, gps time = %f)", rgbStamp, lastKnownGPS_.stamp());
00909 }
00910 }
00911 else
00912 {
00913 LOGE("Could not get depth and rgb images!?!");
00914 }
00915 }
00916 return data;
00917
00918 }
00919
00920 void CameraTango::mainLoopBegin()
00921 {
00922 double t = cameraStartedTime_.elapsed();
00923 if(t < 5.0)
00924 {
00925 uSleep((5.0-t)*1000);
00926 }
00927 }
00928
00929 void CameraTango::mainLoop()
00930 {
00931 if(tango_config_)
00932 {
00933 SensorData data = this->captureImage();
00934
00935 if(!data.groundTruth().isNull())
00936 {
00937 rtabmap::Transform pose = data.groundTruth();
00938 data.setGroundTruth(Transform());
00939
00940
00941 bool firstFrame = previousPose_.isNull();
00942 if(firstFrame)
00943 {
00944 stampEpochOffset_ = UTimer::now()-data.stamp();
00945 }
00946 data.setStamp(stampEpochOffset_ + data.stamp());
00947 OdometryInfo info;
00948 if(!firstFrame)
00949 {
00950 info.interval = data.stamp()-previousStamp_;
00951 info.transform = previousPose_.inverse() * pose;
00952 }
00953 info.reg.covariance = cv::Mat::eye(6,6,CV_64FC1) * (firstFrame?9999.0:0.0001);
00954 LOGI("Publish odometry message (variance=%f)", firstFrame?9999:0.0001);
00955 this->post(new OdometryEvent(data, pose, info));
00956 previousPose_ = pose;
00957 previousStamp_ = data.stamp();
00958 }
00959 else if(!this->isKilled())
00960 {
00961 LOGW("Odometry lost");
00962 this->post(new OdometryEvent());
00963 }
00964 }
00965 else
00966 {
00967 UERROR("Camera not initialized, cannot start thread.");
00968 this->kill();
00969 }
00970 }
00971
00972 }