21 ROS_INFO(
"Received a new configuration via dynamic_reconfigure");
45 std::map<std::string, ParameterInfo> ssParams;
50 }
catch(visiontransfer::ParameterException& e) {
51 ROS_ERROR(
"ParameterException while connecting to parameter service: %s", e.what());
56 }
catch(visiontransfer::TransferException& e) {
57 ROS_ERROR(
"TransferException while obtaining parameter enumeration: %s", e.what());
59 }
catch(visiontransfer::ParameterException& e) {
60 ROS_ERROR(
"ParameterException while obtaining parameter enumeration: %s", e.what());
66 dynReconfServer.reset(
new dynamic_reconfigure::Server<nerian_stereo::NerianStereoConfig>());
80 std::string intensityChannel =
"mono8";
81 privateNh.
getParam(
"point_cloud_intensity_channel", intensityChannel);
82 if(intensityChannel ==
"none") {
84 }
else if(intensityChannel ==
"rgb8") {
86 }
else if(intensityChannel ==
"rgb32f") {
151 "/nerian_stereo/disparity_map", 5)));
153 "/nerian_stereo/left_image", 5)));
155 "/nerian_stereo/right_image", 5)));
160 "/nerian_stereo/stereo_camera_info", 1)));
162 "/nerian_stereo/point_cloud", 5)));
185 useTcp ? ImageProtocol::PROTOCOL_TCP : ImageProtocol::PROTOCOL_UDP));
191 if(
asyncTransfer->collectReceivedImageSet(imageSet, 0.005)) {
198 int secs = 0, microsecs = 0;
199 imageSet.getTimestamp(secs, microsecs);
204 if (imageSet.hasImageType(ImageSet::IMAGE_LEFT)) {
207 if (imageSet.hasImageType(ImageSet::IMAGE_DISPARITY)) {
210 if (imageSet.hasImageType(ImageSet::IMAGE_RIGHT)) {
243 ROS_WARN(
"No camera calibration file configured. Cannot publish detailed camera information!");
245 bool success =
false;
254 ROS_WARN(
"Error reading calibration file: %s\n" 255 "Cannot publish detailed camera information!",
calibFile.c_str());
269 cvImg.
header.stamp = stamp;
270 cvImg.
header.seq = imageSet.getSequenceNumber();
272 bool format12Bit = (imageSet.getPixelFormat(imageIndex) == ImageSet::FORMAT_12_BIT_MONO);
273 string encoding =
"";
277 switch (imageSet.getPixelFormat(imageIndex)) {
278 case ImageSet::FORMAT_8_BIT_RGB: {
279 cv::Mat rgbImg(imageSet.getHeight(), imageSet.getWidth(),
281 imageSet.getPixelData(imageIndex), imageSet.getRowStride(imageIndex));
282 cvImg.
image = rgbImg;
286 case ImageSet::FORMAT_8_BIT_MONO:
287 case ImageSet::FORMAT_12_BIT_MONO: {
288 cv::Mat monoImg(imageSet.getHeight(), imageSet.getWidth(),
289 format12Bit ? CV_16UC1 : CV_8UC1,
290 imageSet.getPixelData(imageIndex), imageSet.getRowStride(imageIndex));
291 cvImg.
image = monoImg;
292 encoding = (format12Bit ?
"mono16":
"mono8");
296 ROS_WARN(
"Omitting an image with unhandled pixel format");
301 cv::Mat monoImg(imageSet.getHeight(), imageSet.getWidth(),
302 format12Bit ? CV_16UC1 : CV_8UC1,
303 imageSet.getPixelData(imageIndex), imageSet.getRowStride(imageIndex));
306 int dispMin = 0, dispMax = 0;
307 imageSet.getDisparityRange(dispMin, dispMax);
310 colorCodeDispMap ==
"rainbow" ? ColorCoder::COLOR_RAINBOW_BGR : ColorCoder::COLOR_RED_BLUE_BGR,
311 dispMin*16, dispMax*16,
true,
true));
316 colDispMap = cv::Mat_<cv::Vec3b>(monoImg.rows, monoImg.cols);
320 cv::Mat_<cv::Vec3b> dispSection =
colDispMap(cv::Rect(0, 0, monoImg.cols, monoImg.rows));
322 colCoder->codeImage(cv::Mat_<unsigned short>(monoImg), dispSection);
328 sensor_msgs::ImagePtr msg = cvImg.
toImageMsg();
329 msg->encoding = encoding;
335 dst[0] = src[8]; dst[1] = src[9];
336 dst[2] = src[10]; dst[3] = src[11];
338 dst[4] = -src[0]; dst[5] = -src[1];
339 dst[6] = -src[2]; dst[7] = -src[3];
341 dst[8] = -src[4]; dst[9] = -src[5];
342 dst[10] = -src[6]; dst[11] = -src[7];
344 dst[12] = src[12]; dst[13] = src[13];
345 dst[14] = src[14]; dst[15] = src[15];
349 if ((!imageSet.hasImageType(ImageSet::IMAGE_DISPARITY))
350 || (imageSet.getPixelFormat(ImageSet::IMAGE_DISPARITY) != ImageSet::FORMAT_12_BIT_MONO)) {
356 static std::vector<float> q;
358 imageSet.setQMatrix(&q[0]);
365 imageSet.setQMatrix(qRos);
369 float* pointMap =
nullptr;
371 pointMap =
recon3d->createPointMap(imageSet, 0);
372 }
catch(std::exception& ex) {
373 cerr <<
"Error creating point cloud: " << ex.what() << endl;
383 if(
pointCloudMsg->data.size() != imageSet.getWidth()*imageSet.getHeight()*4*
sizeof(float)) {
385 pointCloudMsg->data.resize(imageSet.getWidth()*imageSet.getHeight()*4*
sizeof(float));
399 imageSet.getWidth()*imageSet.getHeight()*4*
sizeof(float));
403 copyPointCloudClamped<0>(pointMap,
reinterpret_cast<float*
>(&
pointCloudMsg->data[0]),
404 imageSet.getWidth()*imageSet.getHeight());
406 copyPointCloudClamped<2>(pointMap,
reinterpret_cast<float*
>(&
pointCloudMsg->data[0]),
407 imageSet.getWidth()*imageSet.getHeight());
411 if (imageSet.hasImageType(ImageSet::IMAGE_LEFT)) {
415 copyPointCloudIntensity<INTENSITY>(imageSet);
418 copyPointCloudIntensity<RGB_COMBINED>(imageSet);
421 copyPointCloudIntensity<RGB_SEPARATE>(imageSet);
435 + imageSet.getWidth()*imageSet.getHeight()*4*
sizeof(float);
437 if(imageSet.getPixelFormat(ImageSet::IMAGE_LEFT) == ImageSet::FORMAT_8_BIT_MONO) {
439 unsigned char* imagePtr = imageSet.getPixelData(ImageSet::IMAGE_LEFT);
440 unsigned char* rowEndPtr = imagePtr + imageSet.getWidth();
441 int rowIncrement = imageSet.getRowStride(ImageSet::IMAGE_LEFT) - imageSet.getWidth();
443 for(
unsigned char* cloudPtr = cloudStart + 3*
sizeof(
float);
444 cloudPtr < cloudEnd; cloudPtr+= 4*
sizeof(float)) {
446 *
reinterpret_cast<float*
>(cloudPtr) = static_cast<float>(*imagePtr) / 255.0F;
448 const unsigned char intensity = *imagePtr;
449 *
reinterpret_cast<unsigned int*
>(cloudPtr) = (intensity << 16) | (intensity << 8) | intensity;
451 *cloudPtr = *imagePtr;
455 if(imagePtr == rowEndPtr) {
457 imagePtr += rowIncrement;
458 rowEndPtr = imagePtr + imageSet.getWidth();
461 }
else if(imageSet.getPixelFormat(ImageSet::IMAGE_LEFT) == ImageSet::FORMAT_12_BIT_MONO) {
463 unsigned short* imagePtr =
reinterpret_cast<unsigned short*
>(imageSet.getPixelData(ImageSet::IMAGE_LEFT));
464 unsigned short* rowEndPtr = imagePtr + imageSet.getWidth();
465 int rowIncrement = imageSet.getRowStride(ImageSet::IMAGE_LEFT) - 2*imageSet.getWidth();
467 for(
unsigned char* cloudPtr = cloudStart + 3*
sizeof(
float);
468 cloudPtr < cloudEnd; cloudPtr+= 4*
sizeof(float)) {
471 *
reinterpret_cast<float*
>(cloudPtr) = static_cast<float>(*imagePtr) / 4095.0F;
473 const unsigned char intensity = *imagePtr/16;
474 *
reinterpret_cast<unsigned int*
>(cloudPtr) = (intensity << 16) | (intensity << 8) | intensity;
476 *cloudPtr = *imagePtr/16;
480 if(imagePtr == rowEndPtr) {
482 imagePtr += rowIncrement;
483 rowEndPtr = imagePtr + imageSet.getWidth();
486 }
else if(imageSet.getPixelFormat(ImageSet::IMAGE_LEFT) == ImageSet::FORMAT_8_BIT_RGB) {
488 unsigned char* imagePtr = imageSet.getPixelData(ImageSet::IMAGE_LEFT);
489 unsigned char* rowEndPtr = imagePtr + imageSet.getWidth();
490 int rowIncrement = imageSet.getRowStride(ImageSet::IMAGE_LEFT) - imageSet.getWidth();
492 static bool warned =
false;
495 ROS_WARN(
"RGBF32 is not supported for color images. Please use RGB8!");
498 for(
unsigned char* cloudPtr = cloudStart + 3*
sizeof(
float);
499 cloudPtr < cloudEnd; cloudPtr+= 4*
sizeof(float)) {
501 *
reinterpret_cast<float*
>(cloudPtr) = static_cast<float>(imagePtr[2]) / 255.0F;
503 *
reinterpret_cast<unsigned int*
>(cloudPtr) = (imagePtr[0] << 16) | (imagePtr[1] << 8) | imagePtr[2];
505 *cloudPtr = (imagePtr[0] + imagePtr[1]*2 + imagePtr[2])/4;
509 if(imagePtr == rowEndPtr) {
511 imagePtr += rowIncrement;
512 rowEndPtr = imagePtr + imageSet.getWidth();
516 throw std::runtime_error(
"Invalid pixel format!");
522 float* endPtr = src + 4*size;
523 for(
float *srcPtr = src, *dstPtr = dst; srcPtr < endPtr; srcPtr+=4, dstPtr+=4) {
525 dstPtr[0] = std::numeric_limits<float>::quiet_NaN();
526 dstPtr[1] = std::numeric_limits<float>::quiet_NaN();
527 dstPtr[2] = std::numeric_limits<float>::quiet_NaN();
529 dstPtr[0] = srcPtr[0];
530 dstPtr[1] = srcPtr[1];
531 dstPtr[2] = srcPtr[2];
540 recon3d.reset(
new Reconstruct3D);
546 sensor_msgs::PointField fieldX;
549 fieldX.datatype = sensor_msgs::PointField::FLOAT32;
553 sensor_msgs::PointField fieldY;
555 fieldY.offset =
sizeof(float);
556 fieldY.datatype = sensor_msgs::PointField::FLOAT32;
560 sensor_msgs::PointField fieldZ;
562 fieldZ.offset = 2*
sizeof(float);
563 fieldZ.datatype = sensor_msgs::PointField::FLOAT32;
568 sensor_msgs::PointField fieldI;
569 fieldI.name =
"intensity";
570 fieldI.offset = 3*
sizeof(float);
571 fieldI.datatype = sensor_msgs::PointField::UINT8;
576 sensor_msgs::PointField fieldRed;
578 fieldRed.offset = 3*
sizeof(float);
579 fieldRed.datatype = sensor_msgs::PointField::FLOAT32;
583 sensor_msgs::PointField fieldGreen;
584 fieldGreen.name =
"g";
585 fieldGreen.offset = 3*
sizeof(float);
586 fieldGreen.datatype = sensor_msgs::PointField::FLOAT32;
587 fieldGreen.count = 1;
590 sensor_msgs::PointField fieldBlue;
592 fieldBlue.offset = 3*
sizeof(float);
593 fieldBlue.datatype = sensor_msgs::PointField::FLOAT32;
597 sensor_msgs::PointField fieldRGB;
598 fieldRGB.name =
"rgb";
599 fieldRGB.offset = 3*
sizeof(float);
600 fieldRGB.datatype = sensor_msgs::PointField::UINT32;
609 camInfoMsg.reset(
new nerian_stereo::StereoCameraInfo);
612 camInfoMsg->header.seq = imageSet.getSequenceNumber();
615 std::vector<int> sizeVec;
617 if(sizeVec.size() != 2) {
618 std::runtime_error(
"Calibration file format error!");
624 camInfoMsg->left_info.distortion_model =
"plumb_bob";
640 camInfoMsg->right_info.distortion_model =
"plumb_bob";
647 camInfoMsg->right_info.roi.do_rectify =
false;
662 const float* qMatrix = imageSet.getQMatrix();
663 if(qMatrix[0] != 0.0) {
664 for(
int i=0; i<16; i++) {
665 camInfoMsg->Q[i] =
static_cast<double>(qMatrix[i]);
680 std::vector<double> doubleVec;
683 if(doubleVec.size() != dest.size()) {
684 std::runtime_error(
"Calibration file format error!");
687 std::copy(doubleVec.begin(), doubleVec.end(), dest.begin());
virtual ros::NodeHandle & getPrivateNH()=0
void prepareAsyncTransfer()
Connects to the image service to request the stream of image sets.
std::string internalFrame
sensor_msgs::PointCloud2Ptr pointCloudMsg
boost::scoped_ptr< dynamic_reconfigure::Server< nerian_stereo::NerianStereoConfig > > dynReconfServer
boost::scoped_ptr< ColorCoder > colCoder
boost::scoped_ptr< DataChannelService > dataChannelService
void publish(const boost::shared_ptr< M > &message) const
void copyPointCloudIntensity(ImageSet &imageSet)
Copies the intensity or RGB data to the point cloud.
virtual ros::NodeHandle & getNH()=0
void processDataChannels()
void loadCameraCalibration()
Loads a camera calibration file if configured.
nerian_stereo::StereoCameraInfoPtr camInfoMsg
bool initialConfigReceived
boost::scoped_ptr< ros::Publisher > rightImagePublisher
void autogen_updateParameterServerFromDevice(std::map< std::string, ParameterInfo > &cfg)
Auto-generated code to set initial parameters according to those obtained from the device...
void autogen_updateDynamicReconfigureFromDevice(std::map< std::string, ParameterInfo > &cfg)
Auto-generated code to override the dynamic_reconfigure limits and defaults for all parameters...
void dynamicReconfigureCallback(nerian_stereo::NerianStereoConfig &config, uint32_t level)
cv::FileStorage calibStorage
geometry_msgs::TransformStamped currentTransform
void processOneImageSet()
cv::Mat_< cv::Vec3b > colDispMap
PointCloudColorMode pointCloudColorMode
void publishImageMsg(const ImageSet &imageSet, int imageIndex, ros::Time stamp, bool allowColorCode, ros::Publisher *publisher)
Publishes the disparity map as 16-bit grayscale image or color coded RGB image.
void initDataChannelService()
void initPointCloud()
Performs all neccessary initializations for point cloud+ publishing.
void readCalibrationArray(const char *key, T &dest)
Reads a vector from the calibration file to a boost:array.
boost::scoped_ptr< Reconstruct3D > recon3d
void initDynamicReconfigure()
ros::Time lastCamInfoPublish
boost::scoped_ptr< ros::Publisher > cloudPublisher
boost::scoped_ptr< ros::Publisher > disparityPublisher
boost::scoped_ptr< tf2_ros::TransformBroadcaster > transformBroadcaster
uint32_t getNumSubscribers() const
void publishPointCloudMsg(ImageSet &imageSet, ros::Time stamp)
Reconstructs the 3D locations form the disparity map and publishes them as point cloud.
bool getParam(const std::string &key, std::string &s) const
A driver node that receives data from Nerian stereo devices and forwards it to ROS.
boost::scoped_ptr< ros::Publisher > leftImagePublisher
void updateParameterServerFromDevice(std::map< std::string, ParameterInfo > &cfg)
void updateDynamicReconfigureFromDevice(std::map< std::string, ParameterInfo > &cfg)
boost::scoped_ptr< DeviceParameters > deviceParameters
void copyPointCloudClamped(float *src, float *dst, int size)
Copies all points in a point cloud that have a depth smaller than maxDepth. Other points are set to N...
std::string colorCodeDispMap
void publishCameraInfo(ros::Time stamp, const ImageSet &imageSet)
Publishes the camera info once per second.
void qMatrixToRosCoords(const float *src, float *dst)
Transform Q matrix to match the ROS coordinate system: Swap y/z axis, then swap x/y axis...
void setParam(const std::string &key, const XmlRpc::XmlRpcValue &v) const
sensor_msgs::ImagePtr toImageMsg() const
void init()
Performs general initializations.
void autogen_dynamicReconfigureCallback(nerian_stereo::NerianStereoConfig &config, uint32_t level)
Auto-generated code to check for parameter changes and forward them to the device.
boost::scoped_ptr< ros::Publisher > cameraInfoPublisher
nerian_stereo::NerianStereoConfig lastKnownConfig
boost::scoped_ptr< AsyncTransfer > asyncTransfer