21 ROS_INFO(
"Received a new configuration via dynamic_reconfigure");
49 }
catch(visiontransfer::ParameterException& e) {
50 ROS_ERROR(
"ParameterException while connecting to parameter service: %s", e.what());
53 param::ParameterSet ssParams;
56 }
catch(visiontransfer::TransferException& e) {
57 ROS_ERROR(
"TransferException while obtaining parameter set: %s", e.what());
59 }
catch(visiontransfer::ParameterException& e) {
60 ROS_ERROR(
"ParameterException while obtaining parameter set: %s", e.what());
66 dynReconfServer.reset(
new dynamic_reconfigure::Server<nerian_stereo::NerianStereoConfig>());
80 std::string intensityChannel =
"mono8";
81 privateNh.
getParam(
"point_cloud_intensity_channel", intensityChannel);
82 if(intensityChannel ==
"none") {
84 }
else if(intensityChannel ==
"rgb8") {
86 }
else if(intensityChannel ==
"rgb32f") {
151 "/nerian_stereo/disparity_map", 5)));
153 "/nerian_stereo/left_image", 5)));
155 "/nerian_stereo/right_image", 5)));
157 "/nerian_stereo/color_image", 5)));
162 "/nerian_stereo/stereo_camera_info", 1)));
164 "/nerian_stereo/point_cloud", 5)));
187 useTcp ? ImageProtocol::PROTOCOL_TCP : ImageProtocol::PROTOCOL_UDP));
193 if(
asyncTransfer->collectReceivedImageSet(imageSet, 0.005)) {
200 int secs = 0, microsecs = 0;
201 imageSet.getTimestamp(secs, microsecs);
205 bool hasLeft =
false, hasRight =
false, hasColor =
false, hasDisparity =
false;
208 if (imageSet.hasImageType(ImageSet::IMAGE_LEFT)) {
212 if (imageSet.hasImageType(ImageSet::IMAGE_DISPARITY)) {
216 if (imageSet.hasImageType(ImageSet::IMAGE_RIGHT)) {
220 if (imageSet.hasImageType(ImageSet::IMAGE_COLOR)) {
227 ROS_INFO(
"Topics currently being served, based on the device \"Output Channels\" settings:");
228 if (hasLeft)
ROS_INFO(
" /nerian_stereo/left_image");
229 if (hasRight)
ROS_INFO(
" /nerian_stereo/right_image");
230 if (hasColor)
ROS_INFO(
" /nerian_stereo/color_image");
232 ROS_INFO(
" /nerian_stereo/disparity_map");
233 ROS_INFO(
" /nerian_stereo/point_cloud");
235 ROS_WARN(
"Disparity channel deactivated on device -> no disparity or point cloud data!");
272 ROS_WARN(
"No camera calibration file configured. Cannot publish detailed camera information!");
274 bool success =
false;
283 ROS_WARN(
"Error reading calibration file: %s\n" 284 "Cannot publish detailed camera information!",
calibFile.c_str());
298 cvImg.
header.stamp = stamp;
299 cvImg.
header.seq = imageSet.getSequenceNumber();
301 bool format12Bit = (imageSet.getPixelFormat(imageIndex) == ImageSet::FORMAT_12_BIT_MONO);
302 string encoding =
"";
306 switch (imageSet.getPixelFormat(imageIndex)) {
307 case ImageSet::FORMAT_8_BIT_RGB: {
308 cv::Mat rgbImg(imageSet.getHeight(), imageSet.getWidth(),
310 imageSet.getPixelData(imageIndex), imageSet.getRowStride(imageIndex));
311 cvImg.
image = rgbImg;
315 case ImageSet::FORMAT_8_BIT_MONO:
316 case ImageSet::FORMAT_12_BIT_MONO: {
317 cv::Mat monoImg(imageSet.getHeight(), imageSet.getWidth(),
318 format12Bit ? CV_16UC1 : CV_8UC1,
319 imageSet.getPixelData(imageIndex), imageSet.getRowStride(imageIndex));
320 cvImg.
image = monoImg;
321 encoding = (format12Bit ?
"mono16":
"mono8");
325 ROS_WARN(
"Omitting an image with unhandled pixel format");
330 cv::Mat monoImg(imageSet.getHeight(), imageSet.getWidth(),
331 format12Bit ? CV_16UC1 : CV_8UC1,
332 imageSet.getPixelData(imageIndex), imageSet.getRowStride(imageIndex));
335 int dispMin = 0, dispMax = 0;
336 imageSet.getDisparityRange(dispMin, dispMax);
339 colorCodeDispMap ==
"rainbow" ? ColorCoder::COLOR_RAINBOW_BGR : ColorCoder::COLOR_RED_BLUE_BGR,
340 dispMin*16, dispMax*16,
true,
true));
345 colDispMap = cv::Mat_<cv::Vec3b>(monoImg.rows, monoImg.cols);
349 cv::Mat_<cv::Vec3b> dispSection =
colDispMap(cv::Rect(0, 0, monoImg.cols, monoImg.rows));
351 colCoder->codeImage(cv::Mat_<unsigned short>(monoImg), dispSection);
357 sensor_msgs::ImagePtr msg = cvImg.
toImageMsg();
358 msg->encoding = encoding;
364 dst[0] = src[8]; dst[1] = src[9];
365 dst[2] = src[10]; dst[3] = src[11];
367 dst[4] = -src[0]; dst[5] = -src[1];
368 dst[6] = -src[2]; dst[7] = -src[3];
370 dst[8] = -src[4]; dst[9] = -src[5];
371 dst[10] = -src[6]; dst[11] = -src[7];
373 dst[12] = src[12]; dst[13] = src[13];
374 dst[14] = src[14]; dst[15] = src[15];
378 if ((!imageSet.hasImageType(ImageSet::IMAGE_DISPARITY))
379 || (imageSet.getPixelFormat(ImageSet::IMAGE_DISPARITY) != ImageSet::FORMAT_12_BIT_MONO)) {
385 static std::vector<float> q;
387 imageSet.setQMatrix(&q[0]);
394 imageSet.setQMatrix(qRos);
398 float* pointMap =
nullptr;
400 pointMap =
recon3d->createPointMap(imageSet, 0);
401 }
catch(std::exception& ex) {
402 cerr <<
"Error creating point cloud: " << ex.what() << endl;
412 if(
pointCloudMsg->data.size() != imageSet.getWidth()*imageSet.getHeight()*4*
sizeof(float)) {
414 pointCloudMsg->data.resize(imageSet.getWidth()*imageSet.getHeight()*4*
sizeof(float));
428 imageSet.getWidth()*imageSet.getHeight()*4*
sizeof(float));
432 copyPointCloudClamped<0>(pointMap,
reinterpret_cast<float*
>(&
pointCloudMsg->data[0]),
433 imageSet.getWidth()*imageSet.getHeight());
435 copyPointCloudClamped<2>(pointMap,
reinterpret_cast<float*
>(&
pointCloudMsg->data[0]),
436 imageSet.getWidth()*imageSet.getHeight());
440 if (imageSet.hasImageType(ImageSet::IMAGE_LEFT) || imageSet.hasImageType(ImageSet::IMAGE_COLOR)) {
444 copyPointCloudIntensity<INTENSITY>(imageSet);
447 copyPointCloudIntensity<RGB_COMBINED>(imageSet);
450 copyPointCloudIntensity<RGB_SEPARATE>(imageSet);
461 auto imageIndex = imageSet.hasImageType(ImageSet::IMAGE_COLOR) ? ImageSet::IMAGE_COLOR : ImageSet::IMAGE_LEFT;
465 + imageSet.getWidth()*imageSet.getHeight()*4*
sizeof(float);
467 if(imageSet.getPixelFormat(imageIndex) == ImageSet::FORMAT_8_BIT_MONO) {
469 unsigned char* imagePtr = imageSet.getPixelData(imageIndex);
470 unsigned char* rowEndPtr = imagePtr + imageSet.getWidth();
471 int rowIncrement = imageSet.getRowStride(imageIndex) - imageSet.getWidth();
473 for(
unsigned char* cloudPtr = cloudStart + 3*
sizeof(
float);
474 cloudPtr < cloudEnd; cloudPtr+= 4*
sizeof(float)) {
476 *
reinterpret_cast<float*
>(cloudPtr) = static_cast<float>(*imagePtr) / 255.0F;
478 const unsigned char intensity = *imagePtr;
479 *
reinterpret_cast<unsigned int*
>(cloudPtr) = (intensity << 16) | (intensity << 8) | intensity;
481 *cloudPtr = *imagePtr;
485 if(imagePtr == rowEndPtr) {
487 imagePtr += rowIncrement;
488 rowEndPtr = imagePtr + imageSet.getWidth();
491 }
else if(imageSet.getPixelFormat(imageIndex) == ImageSet::FORMAT_12_BIT_MONO) {
493 unsigned short* imagePtr =
reinterpret_cast<unsigned short*
>(imageSet.getPixelData(imageIndex));
494 unsigned short* rowEndPtr = imagePtr + imageSet.getWidth();
495 int rowIncrement = imageSet.getRowStride(imageIndex) - 2*imageSet.getWidth();
497 for(
unsigned char* cloudPtr = cloudStart + 3*
sizeof(
float);
498 cloudPtr < cloudEnd; cloudPtr+= 4*
sizeof(float)) {
501 *
reinterpret_cast<float*
>(cloudPtr) = static_cast<float>(*imagePtr) / 4095.0F;
503 const unsigned char intensity = *imagePtr/16;
504 *
reinterpret_cast<unsigned int*
>(cloudPtr) = (intensity << 16) | (intensity << 8) | intensity;
506 *cloudPtr = *imagePtr/16;
510 if(imagePtr == rowEndPtr) {
512 imagePtr += rowIncrement;
513 rowEndPtr = imagePtr + imageSet.getWidth();
516 }
else if(imageSet.getPixelFormat(imageIndex) == ImageSet::FORMAT_8_BIT_RGB) {
518 unsigned char* imagePtr = imageSet.getPixelData(imageIndex);
519 unsigned char* rowEndPtr = imagePtr + 3*imageSet.getWidth();
520 int rowIncrement = imageSet.getRowStride(imageIndex) - 3*imageSet.getWidth();
522 static bool warned =
false;
525 ROS_WARN(
"RGBF32 is not supported for color images. Please use RGB8!");
528 for(
unsigned char* cloudPtr = cloudStart + 3*
sizeof(
float);
529 cloudPtr < cloudEnd; cloudPtr+= 4*
sizeof(float)) {
531 *
reinterpret_cast<float*
>(cloudPtr) = static_cast<float>(imagePtr[2]) / 255.0F;
533 *
reinterpret_cast<unsigned int*
>(cloudPtr) = (imagePtr[0] << 16) | (imagePtr[1] << 8) | imagePtr[2];
535 *cloudPtr = (imagePtr[0] + imagePtr[1]*2 + imagePtr[2])/4;
539 if(imagePtr == rowEndPtr) {
541 imagePtr += rowIncrement;
542 rowEndPtr = imagePtr + imageSet.getWidth();
546 throw std::runtime_error(
"Invalid pixel format!");
552 float* endPtr = src + 4*size;
553 for(
float *srcPtr = src, *dstPtr = dst; srcPtr < endPtr; srcPtr+=4, dstPtr+=4) {
555 dstPtr[0] = std::numeric_limits<float>::quiet_NaN();
556 dstPtr[1] = std::numeric_limits<float>::quiet_NaN();
557 dstPtr[2] = std::numeric_limits<float>::quiet_NaN();
559 dstPtr[0] = srcPtr[0];
560 dstPtr[1] = srcPtr[1];
561 dstPtr[2] = srcPtr[2];
570 recon3d.reset(
new Reconstruct3D);
576 sensor_msgs::PointField fieldX;
579 fieldX.datatype = sensor_msgs::PointField::FLOAT32;
583 sensor_msgs::PointField fieldY;
585 fieldY.offset =
sizeof(float);
586 fieldY.datatype = sensor_msgs::PointField::FLOAT32;
590 sensor_msgs::PointField fieldZ;
592 fieldZ.offset = 2*
sizeof(float);
593 fieldZ.datatype = sensor_msgs::PointField::FLOAT32;
598 sensor_msgs::PointField fieldI;
599 fieldI.name =
"intensity";
600 fieldI.offset = 3*
sizeof(float);
601 fieldI.datatype = sensor_msgs::PointField::UINT8;
606 sensor_msgs::PointField fieldRed;
608 fieldRed.offset = 3*
sizeof(float);
609 fieldRed.datatype = sensor_msgs::PointField::FLOAT32;
613 sensor_msgs::PointField fieldGreen;
614 fieldGreen.name =
"g";
615 fieldGreen.offset = 3*
sizeof(float);
616 fieldGreen.datatype = sensor_msgs::PointField::FLOAT32;
617 fieldGreen.count = 1;
620 sensor_msgs::PointField fieldBlue;
622 fieldBlue.offset = 3*
sizeof(float);
623 fieldBlue.datatype = sensor_msgs::PointField::FLOAT32;
627 sensor_msgs::PointField fieldRGB;
628 fieldRGB.name =
"rgb";
629 fieldRGB.offset = 3*
sizeof(float);
630 fieldRGB.datatype = sensor_msgs::PointField::UINT32;
639 camInfoMsg.reset(
new nerian_stereo::StereoCameraInfo);
642 camInfoMsg->header.seq = imageSet.getSequenceNumber();
645 std::vector<int> sizeVec;
647 if(sizeVec.size() != 2) {
648 std::runtime_error(
"Calibration file format error!");
654 camInfoMsg->left_info.distortion_model =
"plumb_bob";
670 camInfoMsg->right_info.distortion_model =
"plumb_bob";
677 camInfoMsg->right_info.roi.do_rectify =
false;
692 const float* qMatrix = imageSet.getQMatrix();
693 if(qMatrix[0] != 0.0) {
694 for(
int i=0; i<16; i++) {
695 camInfoMsg->Q[i] =
static_cast<double>(qMatrix[i]);
710 std::vector<double> doubleVec;
713 if(doubleVec.size() != dest.size()) {
714 std::runtime_error(
"Calibration file format error!");
717 std::copy(doubleVec.begin(), doubleVec.end(), dest.begin());
virtual ros::NodeHandle & getPrivateNH()=0
void prepareAsyncTransfer()
Connects to the image service to request the stream of image sets.
std::string internalFrame
sensor_msgs::PointCloud2Ptr pointCloudMsg
boost::scoped_ptr< dynamic_reconfigure::Server< nerian_stereo::NerianStereoConfig > > dynReconfServer
boost::scoped_ptr< ColorCoder > colCoder
boost::scoped_ptr< DataChannelService > dataChannelService
void copyPointCloudIntensity(ImageSet &imageSet)
Copies the intensity or RGB data to the point cloud.
virtual ros::NodeHandle & getNH()=0
void processDataChannels()
void loadCameraCalibration()
Loads a camera calibration file if configured.
nerian_stereo::StereoCameraInfoPtr camInfoMsg
bool initialConfigReceived
boost::scoped_ptr< ros::Publisher > rightImagePublisher
void updateParameterServerFromDevice(param::ParameterSet &cfg)
void updateDynamicReconfigureFromDevice(param::ParameterSet &cfg)
void dynamicReconfigureCallback(nerian_stereo::NerianStereoConfig &config, uint32_t level)
cv::FileStorage calibStorage
void autogen_updateDynamicReconfigureFromDevice(param::ParameterSet &cfg)
Auto-generated code to override the dynamic_reconfigure limits and defaults for all parameters...
geometry_msgs::TransformStamped currentTransform
void processOneImageSet()
cv::Mat_< cv::Vec3b > colDispMap
void autogen_updateParameterServerFromDevice(param::ParameterSet &cfg)
Auto-generated code to set initial parameters according to those obtained from the device...
PointCloudColorMode pointCloudColorMode
sensor_msgs::ImagePtr toImageMsg() const
void publish(const boost::shared_ptr< M > &message) const
void publishImageMsg(const ImageSet &imageSet, int imageIndex, ros::Time stamp, bool allowColorCode, ros::Publisher *publisher)
Publishes the disparity map as 16-bit grayscale image or color coded RGB image.
void initDataChannelService()
boost::scoped_ptr< ros::Publisher > thirdImagePublisher
bool getParam(const std::string &key, std::string &s) const
void initPointCloud()
Performs all neccessary initializations for point cloud+ publishing.
void readCalibrationArray(const char *key, T &dest)
Reads a vector from the calibration file to a boost:array.
boost::scoped_ptr< Reconstruct3D > recon3d
void initDynamicReconfigure()
ros::Time lastCamInfoPublish
boost::scoped_ptr< ros::Publisher > cloudPublisher
boost::scoped_ptr< ros::Publisher > disparityPublisher
boost::scoped_ptr< tf2_ros::TransformBroadcaster > transformBroadcaster
void setParam(const std::string &key, const XmlRpc::XmlRpcValue &v) const
void publishPointCloudMsg(ImageSet &imageSet, ros::Time stamp)
Reconstructs the 3D locations form the disparity map and publishes them as point cloud.
A driver node that receives data from Nerian stereo devices and forwards it to ROS.
boost::scoped_ptr< ros::Publisher > leftImagePublisher
boost::scoped_ptr< DeviceParameters > deviceParameters
void copyPointCloudClamped(float *src, float *dst, int size)
Copies all points in a point cloud that have a depth smaller than maxDepth. Other points are set to N...
uint32_t getNumSubscribers() const
std::string colorCodeDispMap
void publishCameraInfo(ros::Time stamp, const ImageSet &imageSet)
Publishes the camera info once per second.
void qMatrixToRosCoords(const float *src, float *dst)
Transform Q matrix to match the ROS coordinate system: Swap y/z axis, then swap x/y axis...
void init()
Performs general initializations.
void autogen_dynamicReconfigureCallback(nerian_stereo::NerianStereoConfig &config, uint32_t level)
Auto-generated code to check for parameter changes and forward them to the device.
boost::scoped_ptr< ros::Publisher > cameraInfoPublisher
nerian_stereo::NerianStereoConfig lastKnownConfig
boost::scoped_ptr< AsyncTransfer > asyncTransfer