31 #include "rtabmap/core/Version.h" 42 #include <opencv2/imgproc/types_c.h> 43 #if CV_MAJOR_VERSION >= 3 44 #include <opencv2/videoio/videoio_c.h> 48 #include <dc1394/dc1394.h> 51 #ifdef RTABMAP_FLYCAPTURE2 53 #include <fc2triclops.h> 57 #include <sl/Camera.hpp> 82 if (DC1394_SUCCESS != dc1394_video_set_transmission(camera_, DC1394_OFF) ||
83 DC1394_SUCCESS != dc1394_capture_stop(camera_))
85 UWARN(
"unable to stop camera");
89 dc1394_capture_stop(camera_);
90 dc1394_camera_free(camera_);
95 dc1394_free(context_);
100 const std::string & guid()
const {
return guid_;}
107 dc1394_capture_stop(camera_);
108 dc1394_camera_free(camera_);
116 context_ = dc1394_new ();
117 if (context_ == NULL)
119 UERROR(
"Could not initialize dc1394_context.\n" 120 "Make sure /dev/raw1394 exists, you have access permission,\n" 121 "and libraw1394 development package is installed.");
126 dc1394camera_list_t *list;
127 err = dc1394_camera_enumerate(context_, &list);
128 if (err != DC1394_SUCCESS)
130 UERROR(
"Could not get camera list");
136 UERROR(
"No cameras found");
137 dc1394_camera_free_list (list);
141 dc1394_camera_free_list (list);
144 camera_ = dc1394_camera_new (context_, guid);
147 UERROR(
"Failed to initialize camera with GUID [%016lx]", guid);
152 value[0]= camera_->guid & 0xffffffff;
153 value[1]= (camera_->guid >>32) & 0x000000ff;
154 value[2]= (camera_->guid >>40) & 0xfffff;
155 guid_ =
uFormat(
"%06x%02x%08x", value[2], value[1], value[0]);
157 UINFO(
"camera model: %s %s", camera_->vendor, camera_->model);
161 bool bmode = camera_->bmode_capable;
163 && (DC1394_SUCCESS !=
164 dc1394_video_set_operation_mode(camera_,
165 DC1394_OPERATION_MODE_1394B)))
168 UWARN(
"failed to set IEEE1394b mode");
172 dc1394speed_t request = DC1394_ISO_SPEED_3200;
177 request = DC1394_ISO_SPEED_400;
184 if (request <= DC1394_ISO_SPEED_MIN)
187 dc1394speed_t curSpeed;
188 if (DC1394_SUCCESS == dc1394_video_get_iso_speed(camera_, &curSpeed) && curSpeed <= DC1394_ISO_SPEED_MAX)
194 rate = 100 << (curSpeed - DC1394_ISO_SPEED_MIN);
198 UWARN(
"Unable to get ISO speed; assuming 400Mb/s");
200 request = DC1394_ISO_SPEED_400;
205 request = (dc1394speed_t) ((
int) request - 1);
210 if (DC1394_SUCCESS != dc1394_video_set_iso_speed(camera_, request))
212 UERROR(
"Failed to set iso speed");
217 dc1394video_modes_t vmodes;
218 err = dc1394_video_get_supported_modes(camera_, &vmodes);
219 if (err != DC1394_SUCCESS)
221 UERROR(
"unable to get supported video modes");
222 return (dc1394video_mode_t) 0;
227 dc1394video_mode_t videoMode = DC1394_VIDEO_MODE_FORMAT7_3;
228 for (
uint32_t i = 0; i < vmodes.num; ++i)
230 if (vmodes.modes[i] == videoMode)
237 UERROR(
"unable to get video mode %d", videoMode);
241 if (DC1394_SUCCESS != dc1394_video_set_mode(camera_, videoMode))
243 UERROR(
"Failed to set video mode %d", videoMode);
248 if (dc1394_is_video_mode_scalable(videoMode) == DC1394_TRUE)
250 if (DC1394_SUCCESS != dc1394_format7_set_color_coding(camera_, videoMode, DC1394_COLOR_CODING_RAW16))
252 UERROR(
"Could not set color coding");
256 if (DC1394_SUCCESS != dc1394_format7_get_recommended_packet_size(camera_, videoMode, &packetSize))
258 UERROR(
"Could not get default packet size");
262 if (DC1394_SUCCESS != dc1394_format7_set_packet_size(camera_, videoMode, packetSize))
264 UERROR(
"Could not set packet size");
270 UERROR(
"Video is not in mode scalable");
275 if (DC1394_SUCCESS != dc1394_capture_setup(camera_, 4, DC1394_CAPTURE_FLAGS_DEFAULT))
277 UERROR(
"Failed to open device!");
282 if (DC1394_SUCCESS != dc1394_video_set_transmission(camera_, DC1394_ON))
284 UERROR(
"Failed to start device!");
291 bool getImages(cv::Mat & left, cv::Mat & right)
295 dc1394video_frame_t * frame =
NULL;
296 UDEBUG(
"[%016lx] waiting camera", camera_->guid);
297 dc1394_capture_dequeue (camera_, DC1394_CAPTURE_POLICY_WAIT, &frame);
300 UERROR(
"Unable to capture frame");
303 dc1394video_frame_t frame1 = *frame;
305 size_t frame1_size = frame->total_bytes;
306 frame1.image = (
unsigned char *) malloc(frame1_size);
307 frame1.allocated_image_bytes = frame1_size;
308 frame1.color_coding = DC1394_COLOR_CODING_RAW8;
309 int err = dc1394_deinterlace_stereo_frames(frame, &frame1, DC1394_STEREO_METHOD_INTERLACED);
310 if (err != DC1394_SUCCESS)
313 dc1394_capture_enqueue(camera_, frame);
314 UERROR(
"Could not extract stereo frames");
318 uint8_t* capture_buffer =
reinterpret_cast<uint8_t *
>(frame1.image);
321 cv::Mat image(frame->size[1], frame->size[0], CV_8UC3);
322 cv::Mat image2 = image.clone();
326 cv::cvtColor(cv::Mat(frame->size[1], frame->size[0], CV_8UC1, capture_buffer), left, CV_BayerRG2BGR);
327 cv::cvtColor(cv::Mat(frame->size[1], frame->size[0], CV_8UC1, capture_buffer+image.total()), right, CV_BayerRG2GRAY);
329 dc1394_capture_enqueue(camera_, frame);
339 dc1394camera_t *camera_;
347 #ifdef RTABMAP_DC1394 355 Camera(imageRate, localTransform)
356 #ifdef RTABMAP_DC1394
361 #ifdef RTABMAP_DC1394 362 device_ =
new DC1394Device();
368 #ifdef RTABMAP_DC1394 375 #ifdef RTABMAP_DC1394 378 bool ok = device_->init();
382 if(!calibrationFolder.empty())
384 if(!stereoModel_.load(calibrationFolder, cameraName.empty()?device_->guid():cameraName,
false))
386 UWARN(
"Missing calibration files for camera \"%s\" in \"%s\" folder, you should calibrate the camera!",
387 cameraName.empty()?device_->guid().c_str():cameraName.c_str(), calibrationFolder.c_str());
391 UINFO(
"Stereo parameters: fx=%f cx=%f cy=%f baseline=%f",
392 stereoModel_.left().fx(),
393 stereoModel_.left().cx(),
394 stereoModel_.left().cy(),
395 stereoModel_.baseline());
402 UERROR(
"CameraDC1394: RTAB-Map is not built with dc1394 support!");
409 #ifdef RTABMAP_DC1394 410 return stereoModel_.isValidForProjection();
418 #ifdef RTABMAP_DC1394 421 return device_->guid();
430 #ifdef RTABMAP_DC1394 434 device_->getImages(left, right);
436 if(!left.empty() && !right.empty())
439 if(stereoModel_.left().isValidForRectification())
441 left = stereoModel_.left().rectifyImage(left);
443 if(stereoModel_.right().isValidForRectification())
445 right = stereoModel_.right().rectifyImage(right);
448 if(stereoModel_.isValidForProjection())
451 stereoModel_.left().fx(),
452 stereoModel_.left().fy(),
453 stereoModel_.left().cx(),
454 stereoModel_.left().cy(),
455 stereoModel_.baseline(),
463 UERROR(
"CameraDC1394: RTAB-Map is not built with dc1394 support!");
472 Camera(imageRate, localTransform)
473 #ifdef RTABMAP_FLYCAPTURE2
479 #ifdef RTABMAP_FLYCAPTURE2 480 camera_ =
new FlyCapture2::Camera();
486 #ifdef RTABMAP_FLYCAPTURE2 488 camera_->StopCapture();
489 camera_->Disconnect();
492 triclopsDestroyContext( triclopsCtx_ ) ;
500 #ifdef RTABMAP_FLYCAPTURE2 509 #ifdef RTABMAP_FLYCAPTURE2 513 camera_->StopCapture();
514 camera_->Disconnect();
518 triclopsDestroyContext(triclopsCtx_);
523 FlyCapture2::Error fc2Error = camera_->Connect();
524 if(fc2Error != FlyCapture2::PGRERROR_OK)
526 UERROR(
"Failed to connect the camera.");
531 Fc2Triclops::StereoCameraMode mode = Fc2Triclops::TWO_CAMERA_NARROW;
532 if(Fc2Triclops::setStereoMode(*camera_, mode ))
534 UERROR(
"Failed to set stereo mode.");
539 FlyCapture2::CameraInfo camInfo;
540 if(camera_->GetCameraInfo(&camInfo) != FlyCapture2::PGRERROR_OK)
542 UERROR(
"Failed to get camera info.");
548 FlyCapture2::Format7ImageSettings imageSettings;
551 if(camera_->GetFormat7Configuration(&imageSettings, &packetSz, &dummy) == FlyCapture2::PGRERROR_OK)
553 maxHeight = imageSettings.height;
554 maxWidth = imageSettings.width;
558 if(Fc2Triclops::getContextFromCamera(camInfo.serialNumber, &triclopsCtx_))
560 UERROR(
"Failed to get calibration from the camera.");
564 float fx, cx, cy, baseline;
565 triclopsGetFocalLength(triclopsCtx_, &fx);
566 triclopsGetImageCenter(triclopsCtx_, &cy, &cx);
567 triclopsGetBaseline(triclopsCtx_, &baseline);
568 UINFO(
"Stereo parameters: fx=%f cx=%f cy=%f baseline=%f", fx, cx, cy, baseline);
570 triclopsSetCameraConfiguration(triclopsCtx_, TriCfg_2CAM_HORIZONTAL_NARROW );
571 UASSERT(triclopsSetResolutionAndPrepare(triclopsCtx_, maxHeight, maxWidth, maxHeight, maxWidth) == Fc2Triclops::ERRORTYPE_OK);
573 if(camera_->StartCapture() != FlyCapture2::PGRERROR_OK)
575 UERROR(
"Failed to start capture.");
581 UERROR(
"CameraStereoFlyCapture2: RTAB-Map is not built with Triclops support!");
588 #ifdef RTABMAP_FLYCAPTURE2 591 float fx, cx, cy, baseline;
592 triclopsGetFocalLength(triclopsCtx_, &fx);
593 triclopsGetImageCenter(triclopsCtx_, &cy, &cx);
594 triclopsGetBaseline(triclopsCtx_, &baseline);
595 return fx > 0.0f && cx > 0.0f && cy > 0.0f && baseline > 0.0f;
603 #ifdef RTABMAP_FLYCAPTURE2 604 if(camera_ && camera_->IsConnected())
606 FlyCapture2::CameraInfo camInfo;
607 if(camera_->GetCameraInfo(&camInfo) == FlyCapture2::PGRERROR_OK)
617 #ifdef RTABMAP_FLYCAPTURE2 618 struct ImageContainer
620 FlyCapture2::Image tmp[2];
621 FlyCapture2::Image unprocessed[2];
628 #ifdef RTABMAP_FLYCAPTURE2 629 if(camera_ && triclopsCtx_ && camera_->IsConnected())
633 FlyCapture2::Image grabbedImage;
634 if(camera_->RetrieveBuffer(&grabbedImage) == FlyCapture2::PGRERROR_OK)
637 ImageContainer imageCont;
640 FlyCapture2::Image imageRawRight;
641 FlyCapture2::Image imageRawLeft;
642 FlyCapture2::Image * unprocessedImage = imageCont.unprocessed;
645 if(Fc2Triclops::unpackUnprocessedRawOrMono16Image(
649 imageRawRight ) == Fc2Triclops::ERRORTYPE_OK)
652 FlyCapture2::Image srcImgRightRef(imageRawRight);
653 FlyCapture2::Image srcImgLeftRef(imageRawLeft);
656 if ( srcImgRightRef.SetColorProcessing(FlyCapture2::HQ_LINEAR) != FlyCapture2::PGRERROR_OK ||
657 srcImgLeftRef.SetColorProcessing(FlyCapture2::HQ_LINEAR) != FlyCapture2::PGRERROR_OK)
664 FlyCapture2::Image imageColorRight;
665 FlyCapture2::Image imageColorLeft;
666 if ( srcImgRightRef.Convert(FlyCapture2::PIXEL_FORMAT_MONO8, &imageColorRight) != FlyCapture2::PGRERROR_OK ||
667 srcImgLeftRef.Convert(FlyCapture2::PIXEL_FORMAT_BGRU, &imageColorLeft) != FlyCapture2::PGRERROR_OK)
675 TriclopsInput triclopsColorInputs;
676 triclopsBuildRGBTriclopsInput(
677 grabbedImage.GetCols(),
678 grabbedImage.GetRows(),
679 imageColorRight.GetStride(),
680 (
unsigned long)grabbedImage.GetTimeStamp().seconds,
681 (
unsigned long)grabbedImage.GetTimeStamp().microSeconds,
682 imageColorRight.GetData(),
683 imageColorRight.GetData(),
684 imageColorRight.GetData(),
685 &triclopsColorInputs);
687 triclopsRectify(triclopsCtx_, const_cast<TriclopsInput *>(&triclopsColorInputs) );
689 TriclopsImage rectifiedImage;
690 triclopsGetImage( triclopsCtx_,
696 right = cv::Mat(rectifiedImage.nrows, rectifiedImage.ncols, CV_8UC1, rectifiedImage.data).clone();
699 triclopsBuildPackedTriclopsInput(
700 grabbedImage.GetCols(),
701 grabbedImage.GetRows(),
702 imageColorLeft.GetStride(),
703 (
unsigned long)grabbedImage.GetTimeStamp().seconds,
704 (
unsigned long)grabbedImage.GetTimeStamp().microSeconds,
705 imageColorLeft.GetData(),
706 &triclopsColorInputs );
708 cv::Mat pixelsLeftBuffer( grabbedImage.GetRows(), grabbedImage.GetCols(), CV_8UC4);
709 TriclopsPackedColorImage colorImage;
710 triclopsSetPackedColorImageBuffer(
713 (TriclopsPackedColorPixel*)pixelsLeftBuffer.data );
715 triclopsRectifyPackedColorImage(
718 &triclopsColorInputs,
721 cv::cvtColor(pixelsLeftBuffer, left, CV_RGBA2RGB);
724 float fx, cy, cx, baseline;
725 triclopsGetFocalLength(triclopsCtx_, &fx);
726 triclopsGetImageCenter(triclopsCtx_, &cy, &cx);
727 triclopsGetBaseline(triclopsCtx_, &baseline);
745 UERROR(
"CameraStereoFlyCapture2: RTAB-Map is not built with Triclops support!");
768 bool computeOdometry,
771 bool selfCalibration) :
772 Camera(imageRate, localTransform)
777 usbDevice_(deviceId),
779 resolution_(resolution),
781 selfCalibration_(selfCalibration),
782 sensingMode_(sensingMode),
783 confidenceThr_(confidenceThr),
784 computeOdometry_(computeOdometry),
790 UASSERT(resolution_ >= sl::RESOLUTION_HD2K && resolution_ <sl::RESOLUTION_LAST);
791 UASSERT(quality_ >= sl::DEPTH_MODE_NONE && quality_ <sl::DEPTH_MODE_LAST);
792 UASSERT(sensingMode_ >= sl::SENSING_MODE_STANDARD && sensingMode_ <sl::SENSING_MODE_LAST);
793 UASSERT(confidenceThr_ >= 0 && confidenceThr_ <=100);
798 const std::string & filePath,
802 bool computeOdometry,
805 bool selfCalibration) :
806 Camera(imageRate, localTransform)
812 svoFilePath_(filePath),
815 selfCalibration_(selfCalibration),
816 sensingMode_(sensingMode),
817 confidenceThr_(confidenceThr),
818 computeOdometry_(computeOdometry),
824 UASSERT(resolution_ >= sl::RESOLUTION_HD2K && resolution_ <sl::RESOLUTION_LAST);
825 UASSERT(quality_ >= sl::DEPTH_MODE_NONE && quality_ <sl::DEPTH_MODE_LAST);
826 UASSERT(sensingMode_ >= sl::SENSING_MODE_STANDARD && sensingMode_ <sl::SENSING_MODE_LAST);
827 UASSERT(confidenceThr_ >= 0 && confidenceThr_ <=100);
850 sl::InitParameters param;
851 param.camera_resolution=
static_cast<sl::RESOLUTION
>(resolution_);
853 param.camera_linux_id=usbDevice_;
854 param.depth_mode=(sl::DEPTH_MODE)quality_;
855 param.coordinate_units=sl::UNIT_METER;
856 param.coordinate_system=(sl::COORDINATE_SYSTEM)sl::COORDINATE_SYSTEM_IMAGE ;
857 param.sdk_verbose=
true;
859 param.depth_minimum_distance=-1;
860 param.camera_disable_self_calib=!selfCalibration_;
862 sl::ERROR_CODE r = sl::ERROR_CODE::SUCCESS;
865 UINFO(
"svo file = %s", svoFilePath_.c_str());
866 zed_ =
new sl::Camera();
867 param.svo_input_filename=svoFilePath_.c_str();
868 r = zed_->open(param);
872 UINFO(
"Resolution=%d imagerate=%f device=%d", resolution_,
getImageRate(), usbDevice_);
873 zed_ =
new sl::Camera();
874 r = zed_->open(param);
877 if(r!=sl::ERROR_CODE::SUCCESS)
879 UERROR(
"Camera initialization failed: \"%s\"", toString(r).c_str());
886 UINFO(
"Init ZED: Mode=%d Unit=%d CoordinateSystem=%d Verbose=false device=-1 minDist=-1 self-calibration=%s vflip=false",
887 quality_, sl::UNIT_METER, sl::COORDINATE_SYSTEM_IMAGE , selfCalibration_?
"true":
"false");
890 if(quality_!=sl::DEPTH_MODE_NONE)
892 zed_->setConfidenceThreshold(confidenceThr_);
895 if (computeOdometry_)
897 sl::TrackingParameters tparam;
898 tparam.enable_spatial_memory=
false;
899 zed_->enableTracking(tparam);
900 if(r!=sl::ERROR_CODE::SUCCESS)
902 UERROR(
"Camera tracking initialization failed: \"%s\"", toString(r).c_str());
906 sl::CameraInformation infos = zed_->getCameraInformation();
907 sl::CalibrationParameters *stereoParams = &(infos.calibration_parameters );
908 sl::Resolution res = stereoParams->left_cam.image_size;
911 stereoParams->left_cam.fx,
912 stereoParams->left_cam.fy,
913 stereoParams->left_cam.cx,
914 stereoParams->left_cam.cy,
916 this->getLocalTransform(),
917 cv::Size(res.width, res.height));
919 UINFO(
"Calibration: fx=%f, fy=%f, cx=%f, cy=%f, baseline=%f, width=%d, height=%d, transform=%s",
920 stereoParams->left_cam.fx,
921 stereoParams->left_cam.fy,
922 stereoParams->left_cam.cx,
923 stereoParams->left_cam.cy,
927 this->getLocalTransform().prettyPrint().c_str());
931 UERROR(
"CameraStereoZED: RTAB-Map is not built with ZED sdk support!");
939 return stereoModel_.isValidForProjection();
950 return uFormat(
"%x", zed_->getCameraInformation ().serial_number);
959 return computeOdometry_;
965 static cv::Mat slMat2cvMat(sl::Mat& input) {
968 switch (input.getDataType()) {
969 case sl::MAT_TYPE_32F_C1: cv_type = CV_32FC1;
break;
970 case sl::MAT_TYPE_32F_C2: cv_type = CV_32FC2;
break;
971 case sl::MAT_TYPE_32F_C3: cv_type = CV_32FC3;
break;
972 case sl::MAT_TYPE_32F_C4: cv_type = CV_32FC4;
break;
973 case sl::MAT_TYPE_8U_C1: cv_type = CV_8UC1;
break;
974 case sl::MAT_TYPE_8U_C2: cv_type = CV_8UC2;
break;
975 case sl::MAT_TYPE_8U_C3: cv_type = CV_8UC3;
break;
976 case sl::MAT_TYPE_8U_C4: cv_type = CV_8UC4;
break;
981 return cv::Mat(input.getHeight(), input.getWidth(), cv_type, input.getPtr<sl::uchar1>(sl::MEM_CPU));
984 Transform zedPoseToTransform(
const sl::Pose & pose)
987 pose.pose_data.m[0], pose.pose_data.m[1], pose.pose_data.m[2], pose.pose_data.m[3],
988 pose.pose_data.m[4], pose.pose_data.m[5], pose.pose_data.m[6], pose.pose_data.m[7],
989 pose.pose_data.m[8], pose.pose_data.m[9], pose.pose_data.m[10], pose.pose_data.m[11]);
997 sl::RuntimeParameters rparam((sl::SENSING_MODE)sensingMode_, quality_ > 0, quality_ > 0, sl::REFERENCE_FRAME_CAMERA);
1001 bool res = zed_->grab(rparam);
1006 res = zed_->grab(rparam);
1008 if(res==sl::SUCCESS)
1012 zed_->retrieveImage(tmp,sl::VIEW_LEFT);
1013 cv::Mat rgbaLeft = slMat2cvMat(tmp);
1016 cv::cvtColor(rgbaLeft, left, cv::COLOR_BGRA2BGR);
1023 zed_->retrieveMeasure(tmp,sl::MEASURE_DEPTH);
1024 slMat2cvMat(tmp).copyTo(depth);
1031 sl::Mat tmp;zed_->retrieveImage(tmp,sl::VIEW_RIGHT );
1032 cv::Mat rgbaRight = slMat2cvMat(tmp);
1034 cv::cvtColor(rgbaRight, right, cv::COLOR_BGRA2GRAY);
1039 if (computeOdometry_ && info)
1042 sl::TRACKING_STATE tracking_state = zed_->getPosition(pose);
1043 if (tracking_state == sl::TRACKING_STATE_OK)
1045 int trackingConfidence = pose.pose_confidence;
1047 if (trackingConfidence>0)
1049 info->
odomPose = zedPoseToTransform(pose);
1053 Transform opticalTransform(0, 0, 1, 0, -1, 0, 0, 0, 0, -1, 0, 0);
1064 info->
odomCovariance = cv::Mat::eye(6, 6, CV_64FC1) * 1.0f / float(trackingConfidence);
1072 UWARN(
"ZED lost! (trackingConfidence=%d)", trackingConfidence);
1079 UWARN(
"ZED lost! (trackingConfidence=%d)", trackingConfidence);
1084 UWARN(
"Tracking not ok: state=\"%s\"", toString(tracking_state).c_str());
1090 UERROR(
"CameraStereoZed: Failed to grab images after 2 seconds!");
1094 UWARN(
"CameraStereoZed: end of stream is reached!");
1098 UERROR(
"CameraStereoZED: RTAB-Map is not built with ZED sdk support!");
1113 const std::string & pathLeftImages,
1114 const std::string & pathRightImages,
1118 CameraImages(pathLeftImages, imageRate, localTransform),
1125 const std::string & pathLeftRightImages,
1133 if(paths.size() >= 1)
1138 if(paths.size() >= 2)
1145 UERROR(
"The path is empty!");
1159 if(!calibrationFolder.empty() && !cameraName.empty())
1163 UWARN(
"Missing calibration files for camera \"%s\" in \"%s\" folder, you should calibrate the camera!",
1164 cameraName.c_str(), calibrationFolder.c_str());
1168 UINFO(
"Stereo parameters: fx=%f cx=%f cy=%f baseline=%f",
1180 UERROR(
"Parameter \"rectifyImages\" is set, but no stereo model is loaded or valid.");
1188 bool success =
false;
1202 UERROR(
"Cameras don't have the same number of images (%d vs %d)",
1208 UERROR(
"Cannot initialize the second camera.");
1250 cv::Mat leftImage = left.
imageRaw();
1251 cv::Mat rightImage = right.
imageRaw();
1252 if(rightImage.type() != CV_8UC1)
1255 cv::cvtColor(rightImage, tmp, CV_BGR2GRAY);
1285 const std::string & path,
1289 Camera(imageRate, localTransform),
1291 rectifyImages_(rectifyImages),
1299 const std::string & pathLeft,
1300 const std::string & pathRight,
1304 Camera(imageRate, localTransform),
1319 Camera(imageRate, localTransform),
1333 Camera(imageRate, localTransform),
1392 ULOGGER_ERROR(
"CameraStereoVideo: Failed to create a capture object!");
1400 unsigned int guid = (
unsigned int)
capture_.get(CV_CAP_PROP_GUID);
1401 if (guid != 0 && guid != 0xffffffff)
1408 if(!calibrationFolder.empty() && !
cameraName_.empty())
1412 UWARN(
"Missing calibration files for camera \"%s\" in \"%s\" folder, you should calibrate the camera!",
1417 UINFO(
"Stereo parameters: fx=%f cx=%f cy=%f baseline=%f",
1428 UERROR(
"Parameter \"rectifyImages\" is set, but no stereo model is loaded or valid.");
1460 leftImage = cv::Mat(img, cv::Rect( 0, 0, img.size().width/2, img.size().height ));
1461 rightImage = cv::Mat(img, cv::Rect( img.size().width/2, 0, img.size().width/2, img.size().height ));
1467 else if(leftImage.cols != rightImage.cols || leftImage.rows != rightImage.rows)
1469 UERROR(
"Left and right streams don't have image of the same size: left=%dx%d right=%dx%d",
1470 leftImage.cols, leftImage.rows, rightImage.cols, rightImage.rows);
1475 bool rightCvt =
false;
1476 if(rightImage.type() != CV_8UC1)
1479 cv::cvtColor(rightImage, tmp, CV_BGR2GRAY);
1491 leftImage = leftImage.clone();
1494 rightImage = rightImage.clone();
1507 ULOGGER_WARN(
"The camera must be initialized before requesting an image.");
void setPath(const std::string &dir)
StereoCameraModel stereoModel_
virtual ~CameraStereoFlyCapture2()
cv::VideoCapture capture_
void setImagesRectified(bool enabled)
const Transform & getLocalTransform() const
virtual SensorData captureImage(CameraInfo *info=0)
virtual SensorData captureImage(CameraInfo *info=0)
virtual SensorData captureImage(CameraInfo *info=0)
virtual bool init(const std::string &calibrationFolder=".", const std::string &cameraName="")
CameraStereoVideo(const std::string &pathSideBySide, bool rectifyImages=false, float imageRate=0.0f, const Transform &localTransform=Transform::getIdentity())
virtual SensorData captureImage(CameraInfo *info=0)
Basic mathematics functions.
virtual ~CameraStereoZed()
unsigned int imagesCount() const
Some conversion functions.
const LaserScan & laserScanRaw() const
const Transform & groundTruth() const
const cv::Mat & imageRaw() const
bool uStrContains(const std::string &string, const std::string &substring)
virtual bool init(const std::string &calibrationFolder=".", const std::string &cameraName="")
virtual bool isCalibrated() const
std::list< std::string > uSplit(const std::string &str, char separator= ' ')
#define UASSERT(condition)
SensorData takeImage(CameraInfo *info=0)
Wrappers of STL for convenient functions.
virtual std::string getSerial() const
bool isValidForProjection() const
const CameraModel & left() const
void setBayerMode(int mode)
virtual SensorData captureImage(CameraInfo *info=0)
#define ULOGGER_DEBUG(...)
virtual ~CameraStereoImages()
CameraStereoImages(const std::string &pathLeftImages, const std::string &pathRightImages, bool rectifyImages=false, float imageRate=0.0f, const Transform &localTransform=Transform::getIdentity())
virtual bool isCalibrated() const
bool isValidForRectification() const
StereoCameraModel stereoModel_
virtual bool isCalibrated() const
cv::Mat rectifyImage(const cv::Mat &raw, int interpolation=cv::INTER_LINEAR) const
virtual std::string getSerial() const
void uSleep(unsigned int ms)
std::vector< V > uListToVector(const std::list< V > &list)
float getImageRate() const
bool load(const std::string &directory, const std::string &cameraName, bool ignoreStereoTransform=true)
bool isValidForRectification() const
const CameraModel & right() const
virtual bool init(const std::string &calibrationFolder=".", const std::string &cameraName="")
virtual std::string getSerial() const
#define ULOGGER_WARN(...)
virtual ~CameraStereoVideo()
virtual bool init(const std::string &calibrationFolder=".", const std::string &cameraName="")
void setName(const std::string &name, const std::string &leftSuffix="left", const std::string &rightSuffix="right")
virtual std::string getSerial() const
virtual bool isCalibrated() const
virtual ~CameraStereoDC1394()
CameraStereoDC1394(float imageRate=0.0f, const Transform &localTransform=Transform::getIdentity())
void setGroundTruth(const Transform &pose)
void setLocalTransform(const Transform &transform)
CameraStereoFlyCapture2(float imageRate=0.0f, const Transform &localTransform=Transform::getIdentity())
void setImageSize(const cv::Size &size)
bool isImagesRectified() const
virtual bool isCalibrated() const
virtual bool init(const std::string &calibrationFolder=".", const std::string &cameraName="")
#define ULOGGER_ERROR(...)
virtual bool init(const std::string &calibrationFolder=".", const std::string &cameraName="")
std::string UTILITE_EXP uFormat(const char *fmt,...)
virtual bool odomProvided() const
std::string UTILITE_EXP uNumber2Str(unsigned int number)
CameraStereoZed(int deviceId, int resolution=2, int quality=1, int sensingMode=0, int confidenceThr=100, bool computeOdometry=false, float imageRate=0.0f, const Transform &localTransform=Transform::getIdentity(), bool selfCalibration=true)
virtual std::string getSerial() const
cv::VideoCapture capture2_
virtual SensorData captureImage(CameraInfo *info=0)
const std::string & name() const