54 : node_handler_(node_handler)
109 std::string default_calib_file =
"~/.ros/camera_info/default.yaml";
137 float range_1, range_2, temp_range = 0;
138 uint16_t intensity_1, intensity_2 = 0;
139 uint8_t classification, ch = 0;
144 byte_offset = start_byte + (
col_ * 4);
147 big_to_native(*
reinterpret_cast<const uint16_t*
>(&packet[byte_offset])))) / 256.0;
149 big_to_native(*
reinterpret_cast<const uint16_t*
>(&packet[byte_offset + 2])))) / 256.0;
153 byte_offset = start_byte + 512 + (
col_ * 4);
156 intensity_1 = uint16_t(
157 big_to_native(*
reinterpret_cast<const uint16_t*
>(&packet[byte_offset])));
158 intensity_2 = uint16_t(
159 big_to_native(*
reinterpret_cast<const uint16_t*
>(&packet[byte_offset + 2])));
175 byte_offset = start_byte + 1152 +
col_;
177 classification =
big_to_native(*
reinterpret_cast<const uint8_t*
>(&packet[byte_offset]));
210 ROS_ERROR(
"Unexpected packet (dropped packet?) expecting: %i, received: %i",
232 "x", 1, sensor_msgs::PointField::FLOAT32,
233 "y", 1, sensor_msgs::PointField::FLOAT32,
234 "z", 1, sensor_msgs::PointField::FLOAT32,
235 "intensity", 1, sensor_msgs::PointField::FLOAT32,
236 "return", 1, sensor_msgs::PointField::UINT8,
237 "crosstalk", 1, sensor_msgs::PointField::UINT8,
238 "saturated", 1, sensor_msgs::PointField::UINT8,
239 "superimposed", 1, sensor_msgs::PointField::UINT8);
280 float fx = *
reinterpret_cast<const float*
>(&
frame_data[20]);
282 float fy = *
reinterpret_cast<const float*
>(&
frame_data[24]);
284 float ux = *
reinterpret_cast<const float*
>(&
frame_data[28]);
286 float uy = *
reinterpret_cast<const float*
>(&
frame_data[32]);
288 float r1 = *
reinterpret_cast<const float*
>(&
frame_data[36]);
290 float r2 = *
reinterpret_cast<const float*
>(&
frame_data[40]);
292 float t1 = *
reinterpret_cast<const float*
>(&
frame_data[44]);
294 float t2 = *
reinterpret_cast<const float*
>(&
frame_data[48]);
296 float r4 = *
reinterpret_cast<const float*
>(&
frame_data[52]);
299 float intrinsic_yaw = *
reinterpret_cast<const float*
>(&
frame_data[56]);
300 float intrinsic_pitch = *
reinterpret_cast<const float*
>(&
frame_data[60]);
301 float extrinsic_yaw = *
reinterpret_cast<const float*
>(&
frame_data[64]);
302 float extrinsic_pitch = *
reinterpret_cast<const float*
>(&
frame_data[68]);
303 float extrinsic_roll = *
reinterpret_cast<const float*
>(&
frame_data[72]);
304 float extrinsic_z = *
reinterpret_cast<const float*
>(&
frame_data[76]);
305 float extrinsic_y = *
reinterpret_cast<const float*
>(&
frame_data[80]);
306 float extrinsic_x = *
reinterpret_cast<const float*
>(&
frame_data[84]);
320 double r=-1.5707, p=0.0, y=-1.5707;
323 global_tf_.transform.translation.x = extrinsic_x;
324 global_tf_.transform.translation.y = extrinsic_y;
325 global_tf_.transform.translation.z = extrinsic_z;
326 q_orig.
setRPY(extrinsic_roll, extrinsic_pitch, extrinsic_yaw);
327 q_final = q_orig * q_rot;
338 ROS_WARN(
"Initialized intrinsics do not match those received from sensor");
339 ROS_WARN(
"Setting intrinsics to values received from sensor");
341 ci.distortion_model =
"rational_polynomial";
369 cv::Mat(ci.D), ci.width, ci.height,
true);
382 sensor_msgs::CameraInfoPtr flash_cam_info(
new sensor_msgs::CameraInfo(ci));
425 const cv::Vec3f &cvPoint =
451 *out_x = cvPoint2(0);
452 *out_y = cvPoint2(1);
453 *out_z = cvPoint2(2);
491 }
else if (count == 11) {
496 for (
int i = start_byte; i < packet.size(); i += 129)
498 if (count == last_object)
505 objects_[count].geometry.x_rear_r = *
reinterpret_cast<const float*
>(&packet[i + 0]);
506 objects_[count].geometry.y_rear_r = *
reinterpret_cast<const float*
>(&packet[i + 4]);
507 objects_[count].geometry.x_rear_l = *
reinterpret_cast<const float*
>(&packet[i + 8]);
508 objects_[count].geometry.y_rear_l = *
reinterpret_cast<const float*
>(&packet[i + 12]);
509 objects_[count].geometry.x_front_l = *
reinterpret_cast<const float*
>(&packet[i + 16]);
510 objects_[count].geometry.y_front_l = *
reinterpret_cast<const float*
>(&packet[i + 20]);
511 objects_[count].geometry.height = *
reinterpret_cast<const float*
>(&packet[i + 24]);
512 objects_[count].geometry.ground_offset = *
reinterpret_cast<const float*
>(&packet[i + 28]);
513 objects_[count].geometry.fDistX = *
reinterpret_cast<const float*
>(&packet[i + 32]);
514 objects_[count].geometry.fDistY = *
reinterpret_cast<const float*
>(&packet[i + 36]);
515 objects_[count].geometry.yaw = *
reinterpret_cast<const float*
>(&packet[i + 40]);
517 objects_[count].kinematics.fVabsX = *
reinterpret_cast<const float*
>(&packet[i + 44]);
518 objects_[count].kinematics.fVabsY = *
reinterpret_cast<const float*
>(&packet[i + 48]);
519 objects_[count].kinematics.fVrelX = *
reinterpret_cast<const float*
>(&packet[i + 52]);
520 objects_[count].kinematics.fVrelY = *
reinterpret_cast<const float*
>(&packet[i + 56]);
521 objects_[count].kinematics.fAabsX = *
reinterpret_cast<const float*
>(&packet[i + 60]);
522 objects_[count].kinematics.fDistXDistY = *
reinterpret_cast<const float*
>(&packet[i + 64]);
523 objects_[count].kinematics.fDistXVx = *
reinterpret_cast<const float*
>(&packet[i + 68]);
524 objects_[count].kinematics.fDistXVy = *
reinterpret_cast<const float*
>(&packet[i + 72]);
525 objects_[count].kinematics.fDistXAx = *
reinterpret_cast<const float*
>(&packet[i + 76]);
526 objects_[count].kinematics.fDistXAy = *
reinterpret_cast<const float*
>(&packet[i + 80]);
527 objects_[count].kinematics.fDistYVx = *
reinterpret_cast<const float*
>(&packet[i + 84]);
528 objects_[count].kinematics.fDistYVy = *
reinterpret_cast<const float*
>(&packet[i + 88]);
529 objects_[count].kinematics.fDistYAx = *
reinterpret_cast<const float*
>(&packet[i + 92]);
530 objects_[count].kinematics.fDistYAy = *
reinterpret_cast<const float*
>(&packet[i + 96]);
531 objects_[count].kinematics.fVxVy = *
reinterpret_cast<const float*
>(&packet[i + 100]);
532 objects_[count].kinematics.fVxAx = *
reinterpret_cast<const float*
>(&packet[i + 104]);
533 objects_[count].kinematics.fVxAy = *
reinterpret_cast<const float*
>(&packet[i + 108]);
534 objects_[count].kinematics.fVyAx = *
reinterpret_cast<const float*
>(&packet[i + 112]);
535 objects_[count].kinematics.fVyAy = *
reinterpret_cast<const float*
>(&packet[i + 116]);
536 objects_[count].kinematics.fAxAy = *
reinterpret_cast<const float*
>(&packet[i + 120]);
540 objects_[count].quality = *
reinterpret_cast<const uint8_t*
>(&packet[i + 126]);
542 objects_[count].classification = *
reinterpret_cast<const uint8_t*
>(&packet[i + 127]);
543 objects_[count].confidence = *
reinterpret_cast<const uint8_t*
>(&packet[i + 128]);
557 uint32_t obj_packet =
564 visualization_msgs::Marker bBox;
565 visualization_msgs::MarkerArray marker_array;
568 for (
int i = 0; i <
objects_.size(); i += 1)
570 bBox.pose.position.x = (
objects_[i].geometry.x_rear_r + 0.5 *
573 bBox.pose.position.y = (
objects_[i].geometry.y_rear_r + 0.5 *
576 bBox.pose.position.z =
586 float width = sqrt((
objects_[i].geometry.x_rear_r -
objects_[i].geometry.x_rear_l) *
591 bBox.scale.y = width;
592 bBox.scale.z =
objects_[i].geometry.height +
objects_[i].geometry.ground_offset;
595 if (
objects_[i].classification == 9)
598 bBox.color.r = 240.0 / 255.0;
599 bBox.color.g = 230.0 / 255.0;
600 bBox.color.b = 140.0 / 255.0;
601 bBox.color.a =
objects_[i].confidence / 100.0;
602 }
else if (
objects_[i].classification == 8) {
604 bBox.color.r = 238.0 / 255.0;
605 bBox.color.g = 232.0 / 255.0;
606 bBox.color.b = 170.0 / 255.0;
607 bBox.color.a =
objects_[i].confidence / 100.0;
608 }
else if (
objects_[i].classification == 7) {
610 bBox.color.r = 238.0 / 255.0;
611 bBox.color.g = 232.0 / 255.0;
612 bBox.color.b = 170.0 / 255.0;
613 bBox.color.a =
objects_[i].confidence / 100.0;
614 }
else if (
objects_[i].classification == 6) {
616 bBox.color.r = 238.0 / 255.0;
617 bBox.color.g = 232.0 / 255.0;
618 bBox.color.b = 170.0 / 255.0;
619 bBox.color.a =
objects_[i].confidence / 100.0;
620 }
else if (
objects_[i].classification == 5) {
622 bBox.color.r = 255.0 / 255.0;
623 bBox.color.g = 140.0 / 255.0;
625 bBox.color.a =
objects_[i].confidence / 100.0;
626 }
else if (
objects_[i].classification == 4) {
628 bBox.color.r = 230 / 255.0;
629 bBox.color.g = 190 / 255.0;
630 bBox.color.b = 138 / 255.0;
631 bBox.color.a =
objects_[i].confidence / 100.0;
632 }
else if (
objects_[i].classification == 3) {
634 bBox.color.r = 215 / 255.0;
635 bBox.color.g = 215 / 255.0;
637 bBox.color.a =
objects_[i].confidence / 100.0;
638 }
else if (
objects_[i].classification == 2) {
640 bBox.color.r = 218 / 255.0;
641 bBox.color.g = 165 / 255.0;
642 bBox.color.b = 32 / 255.0;
643 bBox.color.a =
objects_[i].confidence / 100.0;
644 }
else if (
objects_[i].classification == 1) {
646 bBox.color.r = 139 / 255.0;
647 bBox.color.g = 69 / 255.0;
648 bBox.color.b = 19 / 255.0;
649 bBox.color.a =
objects_[i].confidence / 100.0;
650 }
else if (
objects_[i].classification == 0) {
652 bBox.color.r = 210 / 255.0;
653 bBox.color.g = 105 / 255.0;
654 bBox.color.b = 30 / 255.0;
655 bBox.color.a =
objects_[i].confidence / 100.0;
661 bBox.frame_locked =
false;
663 bBox.action = visualization_msgs::Marker::ADD;
666 marker_array.markers.push_back(bBox);
682 (
big_to_native(*
reinterpret_cast<const uint32_t*
>(&tele_data[0])));
684 (*
reinterpret_cast<const float*
>(&tele_data[4]));
686 (-*
reinterpret_cast<const float*
>(&tele_data[8]));
688 (
big_to_native(*
reinterpret_cast<const uint32_t*
>(&tele_data[12])));
690 (*
reinterpret_cast<const float*
>(&tele_data[16]));
692 (*
reinterpret_cast<const float*
>(&tele_data[20]));
694 (*
reinterpret_cast<const float*
>(&tele_data[24]));
696 (*
reinterpret_cast<const float*
>(&tele_data[28]));
698 (*
reinterpret_cast<const float*
>(&tele_data[32]));
700 (*
reinterpret_cast<const float*
>(&tele_data[36]));
702 (unsigned(*
reinterpret_cast<const uint8_t*
>(&tele_data[40])));
704 for (
int i = 25; i >= 0; i--)
707 (*
reinterpret_cast<const char*
>(&tele_data[41 + i]));
724 int width,
int height,
bool radial)
727 int totalsize = width*height;
728 cv::Mat pixelVectors(1, totalsize, CV_32FC3);
729 cv::Mat dst(1, totalsize, CV_32FC3);
731 cv::Mat sensorPoints(cv::Size(height, width), CV_32FC2);
732 cv::Mat undistortedSensorPoints(1, totalsize, CV_32FC2);
734 std::vector<cv::Mat> ch;
735 for(j = 0; j < height; j++)
737 for(i = 0; i < width; i++)
739 cv::Vec2f &p = sensorPoints.at<cv::Vec2f>(i, j);
745 sensorPoints = sensorPoints.reshape(2, 1);
747 cv::undistortPoints(sensorPoints, undistortedSensorPoints, cameraMatrix, distCoeffs);
749 ch.push_back(undistortedSensorPoints);
750 ch.push_back(cv::Mat::ones(1, totalsize, CV_32FC1));
751 cv::merge(ch, pixelVectors);
755 for(i = 0; i < totalsize; i++)
757 normalize(pixelVectors.at<cv::Vec3f>(i),
758 dst.at<cv::Vec3f>(i));
762 return pixelVectors.reshape(3, width);
785 stat.level = diagnostic_msgs::DiagnosticStatus::OK;