54 : node_handler_(node_handler)
109 std::string default_calib_file =
"~/.ros/camera_info/default.yaml";
137 float range_1, range_2, temp_range = 0;
138 uint16_t intensity_1, intensity_2 = 0;
139 uint8_t classification, ch = 0;
144 byte_offset = start_byte + (
col_ * 4);
147 big_to_native(*reinterpret_cast<const uint16_t*>(&packet[byte_offset])))) / 256.0;
149 big_to_native(*reinterpret_cast<const uint16_t*>(&packet[byte_offset + 2])))) / 256.0;
153 byte_offset = start_byte + 512 + (
col_ * 4);
156 intensity_1 = uint16_t(
157 big_to_native(*reinterpret_cast<const uint16_t*>(&packet[byte_offset])));
158 intensity_2 = uint16_t(
159 big_to_native(*reinterpret_cast<const uint16_t*>(&packet[byte_offset + 2])));
175 byte_offset = start_byte + 1152 +
col_;
177 classification =
big_to_native(*reinterpret_cast<const uint8_t*>(&packet[byte_offset]));
201 int size = frame_data.size();
205 int frame_num =
big_to_native(*reinterpret_cast<const uint32_t*>(&frame_data[12]));
210 ROS_ERROR(
"Unexpected packet (dropped packet?) expecting: %i, received: %i",
232 "x", 1, sensor_msgs::PointField::FLOAT32,
233 "y", 1, sensor_msgs::PointField::FLOAT32,
234 "z", 1, sensor_msgs::PointField::FLOAT32,
235 "intensity", 1, sensor_msgs::PointField::FLOAT32,
236 "return", 1, sensor_msgs::PointField::UINT8,
237 "crosstalk", 1, sensor_msgs::PointField::UINT8,
238 "saturated", 1, sensor_msgs::PointField::UINT8,
239 "superimposed", 1, sensor_msgs::PointField::UINT8);
280 float fx = *
reinterpret_cast<const float*
>(&frame_data[20]);
282 float fy = *
reinterpret_cast<const float*
>(&frame_data[24]);
284 float ux = *
reinterpret_cast<const float*
>(&frame_data[28]);
286 float uy = *
reinterpret_cast<const float*
>(&frame_data[32]);
288 float r1 = *
reinterpret_cast<const float*
>(&frame_data[36]);
290 float r2 = *
reinterpret_cast<const float*
>(&frame_data[40]);
292 float t1 = *
reinterpret_cast<const float*
>(&frame_data[44]);
294 float t2 = *
reinterpret_cast<const float*
>(&frame_data[48]);
296 float r4 = *
reinterpret_cast<const float*
>(&frame_data[52]);
299 float intrinsic_yaw = *
reinterpret_cast<const float*
>(&frame_data[56]);
300 float intrinsic_pitch = *
reinterpret_cast<const float*
>(&frame_data[60]);
301 float extrinsic_yaw = *
reinterpret_cast<const float*
>(&frame_data[64]);
302 float extrinsic_pitch = *
reinterpret_cast<const float*
>(&frame_data[68]);
303 float extrinsic_roll = *
reinterpret_cast<const float*
>(&frame_data[72]);
304 float extrinsic_z = *
reinterpret_cast<const float*
>(&frame_data[76]);
305 float extrinsic_y = *
reinterpret_cast<const float*
>(&frame_data[80]);
306 float extrinsic_x = *
reinterpret_cast<const float*
>(&frame_data[84]);
320 double r=-1.5707, p=0.0,
y=-1.5707;
323 global_tf_.transform.translation.x = extrinsic_x;
324 global_tf_.transform.translation.y = extrinsic_y;
325 global_tf_.transform.translation.z = extrinsic_z;
326 q_orig.
setRPY(extrinsic_roll, extrinsic_pitch, extrinsic_yaw);
327 q_final = q_orig * q_rot;
338 ROS_WARN(
"Initialized intrinsics do not match those received from sensor");
339 ROS_WARN(
"Setting intrinsics to values received from sensor");
341 ci.distortion_model =
"rational_polynomial";
369 cv::Mat(ci.D), ci.width, ci.height,
true);
382 sensor_msgs::CameraInfoPtr flash_cam_info(
new sensor_msgs::CameraInfo(ci));
425 const cv::Vec3f &cvPoint =
451 *out_x = cvPoint2(0);
452 *out_y = cvPoint2(1);
453 *out_z = cvPoint2(2);
491 }
else if (count == 11) {
496 for (
int i = start_byte; i < packet.size(); i += 129)
498 if (count == last_object)
505 objects_[count].geometry.x_rear_r = *
reinterpret_cast<const float*
>(&packet[i + 0]);
506 objects_[count].geometry.y_rear_r = *
reinterpret_cast<const float*
>(&packet[i + 4]);
507 objects_[count].geometry.x_rear_l = *
reinterpret_cast<const float*
>(&packet[i + 8]);
508 objects_[count].geometry.y_rear_l = *
reinterpret_cast<const float*
>(&packet[i + 12]);
509 objects_[count].geometry.x_front_l = *
reinterpret_cast<const float*
>(&packet[i + 16]);
510 objects_[count].geometry.y_front_l = *
reinterpret_cast<const float*
>(&packet[i + 20]);
511 objects_[count].geometry.height = *
reinterpret_cast<const float*
>(&packet[i + 24]);
512 objects_[count].geometry.ground_offset = *
reinterpret_cast<const float*
>(&packet[i + 28]);
513 objects_[count].geometry.fDistX = *
reinterpret_cast<const float*
>(&packet[i + 32]);
514 objects_[count].geometry.fDistY = *
reinterpret_cast<const float*
>(&packet[i + 36]);
515 objects_[count].geometry.yaw = *
reinterpret_cast<const float*
>(&packet[i + 40]);
517 objects_[count].kinematics.fVabsX = *
reinterpret_cast<const float*
>(&packet[i + 44]);
518 objects_[count].kinematics.fVabsY = *
reinterpret_cast<const float*
>(&packet[i + 48]);
519 objects_[count].kinematics.fVrelX = *
reinterpret_cast<const float*
>(&packet[i + 52]);
520 objects_[count].kinematics.fVrelY = *
reinterpret_cast<const float*
>(&packet[i + 56]);
521 objects_[count].kinematics.fAabsX = *
reinterpret_cast<const float*
>(&packet[i + 60]);
522 objects_[count].kinematics.fDistXDistY = *
reinterpret_cast<const float*
>(&packet[i + 64]);
523 objects_[count].kinematics.fDistXVx = *
reinterpret_cast<const float*
>(&packet[i + 68]);
524 objects_[count].kinematics.fDistXVy = *
reinterpret_cast<const float*
>(&packet[i + 72]);
525 objects_[count].kinematics.fDistXAx = *
reinterpret_cast<const float*
>(&packet[i + 76]);
526 objects_[count].kinematics.fDistXAy = *
reinterpret_cast<const float*
>(&packet[i + 80]);
527 objects_[count].kinematics.fDistYVx = *
reinterpret_cast<const float*
>(&packet[i + 84]);
528 objects_[count].kinematics.fDistYVy = *
reinterpret_cast<const float*
>(&packet[i + 88]);
529 objects_[count].kinematics.fDistYAx = *
reinterpret_cast<const float*
>(&packet[i + 92]);
530 objects_[count].kinematics.fDistYAy = *
reinterpret_cast<const float*
>(&packet[i + 96]);
531 objects_[count].kinematics.fVxVy = *
reinterpret_cast<const float*
>(&packet[i + 100]);
532 objects_[count].kinematics.fVxAx = *
reinterpret_cast<const float*
>(&packet[i + 104]);
533 objects_[count].kinematics.fVxAy = *
reinterpret_cast<const float*
>(&packet[i + 108]);
534 objects_[count].kinematics.fVyAx = *
reinterpret_cast<const float*
>(&packet[i + 112]);
535 objects_[count].kinematics.fVyAy = *
reinterpret_cast<const float*
>(&packet[i + 116]);
536 objects_[count].kinematics.fAxAy = *
reinterpret_cast<const float*
>(&packet[i + 120]);
540 objects_[count].quality = *
reinterpret_cast<const uint8_t*
>(&packet[i + 126]);
542 objects_[count].classification = *
reinterpret_cast<const uint8_t*
>(&packet[i + 127]);
543 objects_[count].confidence = *
reinterpret_cast<const uint8_t*
>(&packet[i + 128]);
557 uint32_t obj_packet =
558 (
big_to_native(*reinterpret_cast<const uint32_t*>(&object_data[10])) >> 0) & 1;
564 visualization_msgs::Marker bBox;
565 visualization_msgs::MarkerArray marker_array;
568 for (
int i = 0; i <
objects_.size(); i += 1)
570 bBox.pose.position.x = (
objects_[i].geometry.x_rear_r + 0.5 *
573 bBox.pose.position.y = (
objects_[i].geometry.y_rear_r + 0.5 *
576 bBox.pose.position.z =
590 bBox.scale.x = length;
591 bBox.scale.y = width;
592 bBox.scale.z =
objects_[i].geometry.height +
objects_[i].geometry.ground_offset;
595 if (
objects_[i].classification == 9)
598 bBox.color.r = 240.0 / 255.0;
599 bBox.color.g = 230.0 / 255.0;
600 bBox.color.b = 140.0 / 255.0;
601 bBox.color.a =
objects_[i].confidence / 100.0;
602 }
else if (
objects_[i].classification == 8) {
604 bBox.color.r = 238.0 / 255.0;
605 bBox.color.g = 232.0 / 255.0;
606 bBox.color.b = 170.0 / 255.0;
607 bBox.color.a =
objects_[i].confidence / 100.0;
608 }
else if (
objects_[i].classification == 7) {
610 bBox.color.r = 238.0 / 255.0;
611 bBox.color.g = 232.0 / 255.0;
612 bBox.color.b = 170.0 / 255.0;
613 bBox.color.a =
objects_[i].confidence / 100.0;
614 }
else if (
objects_[i].classification == 6) {
616 bBox.color.r = 238.0 / 255.0;
617 bBox.color.g = 232.0 / 255.0;
618 bBox.color.b = 170.0 / 255.0;
619 bBox.color.a =
objects_[i].confidence / 100.0;
620 }
else if (
objects_[i].classification == 5) {
622 bBox.color.r = 255.0 / 255.0;
623 bBox.color.g = 140.0 / 255.0;
625 bBox.color.a =
objects_[i].confidence / 100.0;
626 }
else if (
objects_[i].classification == 4) {
628 bBox.color.r = 230 / 255.0;
629 bBox.color.g = 190 / 255.0;
630 bBox.color.b = 138 / 255.0;
631 bBox.color.a =
objects_[i].confidence / 100.0;
632 }
else if (
objects_[i].classification == 3) {
634 bBox.color.r = 215 / 255.0;
635 bBox.color.g = 215 / 255.0;
637 bBox.color.a =
objects_[i].confidence / 100.0;
638 }
else if (
objects_[i].classification == 2) {
640 bBox.color.r = 218 / 255.0;
641 bBox.color.g = 165 / 255.0;
642 bBox.color.b = 32 / 255.0;
643 bBox.color.a =
objects_[i].confidence / 100.0;
644 }
else if (
objects_[i].classification == 1) {
646 bBox.color.r = 139 / 255.0;
647 bBox.color.g = 69 / 255.0;
648 bBox.color.b = 19 / 255.0;
649 bBox.color.a =
objects_[i].confidence / 100.0;
650 }
else if (
objects_[i].classification == 0) {
652 bBox.color.r = 210 / 255.0;
653 bBox.color.g = 105 / 255.0;
654 bBox.color.b = 30 / 255.0;
655 bBox.color.a =
objects_[i].confidence / 100.0;
661 bBox.frame_locked =
false;
663 bBox.action = visualization_msgs::Marker::ADD;
666 marker_array.markers.push_back(bBox);
682 (
big_to_native(*reinterpret_cast<const uint32_t*>(&tele_data[0])));
684 (*
reinterpret_cast<const float*
>(&tele_data[4]));
686 (-*
reinterpret_cast<const float*
>(&tele_data[8]));
688 (
big_to_native(*reinterpret_cast<const uint32_t*>(&tele_data[12])));
690 (*
reinterpret_cast<const float*
>(&tele_data[16]));
692 (*
reinterpret_cast<const float*
>(&tele_data[20]));
694 (*
reinterpret_cast<const float*
>(&tele_data[24]));
696 (*
reinterpret_cast<const float*
>(&tele_data[28]));
698 (*
reinterpret_cast<const float*
>(&tele_data[32]));
700 (*
reinterpret_cast<const float*
>(&tele_data[36]));
702 (unsigned(*reinterpret_cast<const uint8_t*>(&tele_data[40])));
704 for (
int i = 25; i >= 0; i--)
707 (*
reinterpret_cast<const char*
>(&tele_data[41 + i]));
724 int width,
int height,
bool radial)
727 int totalsize = width*height;
728 cv::Mat pixelVectors(1, totalsize, CV_32FC3);
729 cv::Mat dst(1, totalsize, CV_32FC3);
731 cv::Mat sensorPoints(cv::Size(height, width), CV_32FC2);
732 cv::Mat undistortedSensorPoints(1, totalsize, CV_32FC2);
734 std::vector<cv::Mat> ch;
735 for(j = 0; j < height; j++)
737 for(i = 0; i < width; i++)
739 cv::Vec2f &p = sensorPoints.at<cv::Vec2f>(i, j);
745 sensorPoints = sensorPoints.reshape(2, 1);
747 cv::undistortPoints(sensorPoints, undistortedSensorPoints, cameraMatrix, distCoeffs);
749 ch.push_back(undistortedSensorPoints);
750 ch.push_back(cv::Mat::ones(1, totalsize, CV_32FC1));
751 cv::merge(ch, pixelVectors);
755 for(i = 0; i < totalsize; i++)
758 dst.at<cv::Vec3f>(i));
762 return pixelVectors.reshape(3, width);
785 stat.level = diagnostic_msgs::DiagnosticStatus::OK;
std::shared_ptr< std_msgs::Header > tele_header_message_
Telemetry Header message.
image_transport::CameraPublisher pub_depth_
Depth image publisher.
geometry_msgs::TransformStamped global_tf_
ROS Transform.
image_transport::CameraPublisher pub_depth2_
Depth image publisher second return 2.
This file defines the HFL110DCU image processor class.
void setRPY(const tf2Scalar &roll, const tf2Scalar &pitch, const tf2Scalar &yaw)
void publish(const sensor_msgs::Image &image, const sensor_msgs::CameraInfo &info) const
uint32_t uiHardwareRevision
sensor_msgs::CameraInfo getCameraInfo(void)
bool processObjectData(const std::vector< uint8_t > &data) override
void publish(const boost::shared_ptr< M > &message) const
#define ROS_INFO_ONCE(...)
image_transport::CameraPublisher pub_si_
Superimposed flag image publisher.
std::shared_ptr< std_msgs::Header > tf_header_message_
TF Header message.
diagnostic_updater::Updater updater_
ros::Publisher pub_slices_
Slices publisher.
ros::Publisher pub_objects_
Objects publisher.
void add(const std::string &name, TaskFunction f)
const std::string TYPE_8UC1
cv_bridge::CvImagePtr p_image_depth2_
Pointer to depth image second return.
void setPointCloud2Fields(int n_fields,...)
cv_bridge::CvImagePtr p_image_crosstalk2_
Pointer to crosstalk2 flags.
image_transport::CameraPublisher pub_sat2_
Saturated2 flag image publisher.
cv::Mat transform_
Transform.
HFL110DCU v1 object dynamic property.
bool processFrameData(const std::vector< uint8_t > &data) override
TFSIMD_FORCE_INLINE const tfScalar & y() const
const uint16_t FRAME_COLUMNS
Default frame cols.
std::shared_ptr< std_msgs::Header > frame_header_message_
Frame Header message.
double global_offset_
global range offset
std::string version_
Current camera model.
image_transport::CameraPublisher pub_ct2_
Crosstalk2 flag image publisher.
cv_bridge::CvImagePtr p_image_superimposed2_
Pointer to superimposed2 flags.
cv_bridge::CvImagePtr p_image_saturated_
Pointer to saturated flags.
HFL110DCU v1 ethernet object struct.
cv_bridge::CvImagePtr p_image_crosstalk_
Pointer to crosstalk flags.
std::shared_ptr< std_msgs::Header > pdm_header_message_
PDM Header message.
CameraPublisher advertiseCamera(const std::string &base_topic, uint32_t queue_size, bool latch=false)
cv_bridge::CvImagePtr p_image_intensity2_
Pointer to 16 bit intensity image second return.
uint8_t expected_packet_
Return counter.
cv::Mat initTransform(cv::Mat cameraMatrix, cv::Mat distCoeffs, int width, int height, bool radial)
image_transport::CameraPublisher pub_ct_
Crosstalk flag image publisher.
const std::string TYPE_32FC1
const uint16_t FRAME_ROWS
Default frame rows.
telemetry telem_
Telemetry Data.
const std::string TYPE_16UC1
bool parseFrame(int start_byte, const std::vector< uint8_t > &packet) override
HFL110DCU v1 object state.
Publisher advertise(const std::string &topic, uint32_t queue_size, bool latch=false)
HFL110DCU(std::string model, std::string version, std::string frame_id, ros::NodeHandle &node_handler)
INLINE Rall1d< T, V, S > sqrt(const Rall1d< T, V, S > &arg)
cv_bridge::CvImagePtr p_image_saturated2_
Pointer to saturated2 flags.
ros::NodeHandle node_handler_
ROS node handler.
TFSIMD_FORCE_INLINE Vector3 & normalize()
void update_diagnostics(diagnostic_updater::DiagnosticStatusWrapper &stat)
image_transport::CameraPublisher pub_intensity_
16 bit Intensity image publisher
std::shared_ptr< std_msgs::Header > slice_header_message_
Slice Header message.
bool parseObjects(int start_byte, const std::vector< uint8_t > &packet) override
uint8_t row_
Row and column Counter.
image_transport::CameraPublisher pub_si2_
Superimposed flag image publisher.
bool setCameraInfo(const sensor_msgs::CameraInfo &camera_info)
std::shared_ptr< std_msgs::Header > object_header_message_
Object Header message.
camera_info_manager::CameraInfoManager * camera_info_manager_
bool processTelemetryData(const std::vector< uint8_t > &data) override
TFSIMD_FORCE_INLINE tfScalar length(const Quaternion &q)
bool processSliceData(const std::vector< uint8_t > &data) override
cv_bridge::CvImagePtr p_image_depth_
Pointer to depth image.
cv_bridge::CvImagePtr p_image_superimposed_
Pointer to superimposed flags.
std::shared_ptr< sensor_msgs::PointCloud2 > pointcloud_
Pointcloud msg.
cv_bridge::CvImagePtr p_image_intensity_
Pointer to 16 bit intensity image.
void add(const std::string &key, const T &val)
std::vector< hflObj > objects_
Objects vector;.
void setHardwareIDf(const char *format,...)
image_transport::CameraPublisher pub_intensity2_
16 bit Intensity image publisher return 2
static float big_to_native(float x)
std::string model_
Current camera model.
image_transport::CameraPublisher pub_sat_
Saturated flag image publisher.
unsigned uiTempSensorFeedback
ros::Publisher pub_points_
Pointcloud publisher.