BosonCamera.cpp
Go to the documentation of this file.
1 /*
2  * Copyright © 2019 AutonomouStuff, LLC
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a copy of this
5  * software and associated documentation files (the “Software”), to deal in the Software
6  * without restriction, including without limitation the rights to use, copy, modify,
7  * merge, publish, distribute, sublicense, and/or sell copies of the Software, and to
8  * permit persons to whom the Software is furnished to do so, subject to the following conditions:
9  *
10  * The above copyright notice and this permission notice shall be included in all copies
11  * or substantial portions of the Software.
12  *
13  * THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
14  * INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
15  * PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
16  * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
17  * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
18  * OR OTHER DEALINGS IN THE SOFTWARE.
19  */
20 
23 
25 
26 using namespace cv;
27 using namespace flir_boson_usb;
28 
30  cv_img()
31 {
32 }
33 
34 BosonCamera::~BosonCamera()
35 {
36  closeCamera();
37 }
38 
40 {
41  nh = getNodeHandle();
43  camera_info = std::shared_ptr<camera_info_manager::CameraInfoManager>(
45  it = std::shared_ptr<image_transport::ImageTransport>(new image_transport::ImageTransport(nh));
46  image_pub = it->advertiseCamera("image_raw", 1);
47 
48  bool exit = false;
49 
50  pnh.param<std::string>("frame_id", frame_id, "boson_camera");
51  pnh.param<std::string>("dev", dev_path, "/dev/video0");
52  pnh.param<float>("frame_rate", frame_rate, 60.0);
53  pnh.param<std::string>("video_mode", video_mode_str, "RAW16");
54  pnh.param<bool>("zoon_enable", zoom_enable, false);
55  pnh.param<std::string>("sensor_type", sensor_type_str, "Boson_640");
56  pnh.param<std::string>("camera_info_url", camera_info_url, "");
57 
58  ROS_INFO("flir_boson_usb - Got frame_id: %s.", frame_id.c_str());
59  ROS_INFO("flir_boson_usb - Got dev: %s.", dev_path.c_str());
60  ROS_INFO("flir_boson_usb - Got frame rate: %f.", frame_rate);
61  ROS_INFO("flir_boson_usb - Got video mode: %s.", video_mode_str.c_str());
62  ROS_INFO("flir_boson_usb - Got zoom enable: %s.", (zoom_enable ? "true" : "false"));
63  ROS_INFO("flir_boson_usb - Got sensor type: %s.", sensor_type_str.c_str());
64  ROS_INFO("flir_boson_usb - Got camera_info_url: %s.", camera_info_url.c_str());
65 
66  if (video_mode_str == "RAW16")
67  {
68  video_mode = RAW16;
69  }
70  else if (video_mode_str == "YUV")
71  {
72  video_mode = YUV;
73  }
74  else
75  {
76  exit = true;
77  ROS_ERROR("flir_boson_usb - Invalid video_mode value provided. Exiting.");
78  }
79 
80  if (sensor_type_str == "Boson_320" ||
81  sensor_type_str == "boson_320")
82  {
84  camera_info->setCameraName("Boson320");
85  }
86  else if (sensor_type_str == "Boson_640" ||
87  sensor_type_str == "boson_640")
88  {
90  camera_info->setCameraName("Boson640");
91  }
92  else
93  {
94  exit = true;
95  ROS_ERROR("flir_boson_usb - Invalid sensor_type value provided. Exiting.");
96  }
97 
98  if (camera_info->validateURL(camera_info_url))
99  {
100  camera_info->loadCameraInfo(camera_info_url);
101  }
102  else
103  {
104  ROS_INFO("flir_boson_usb - camera_info_url could not be validated. Publishing with unconfigured camera.");
105  }
106 
107  if (!exit)
108  exit = openCamera() ? exit : true;
109 
110  if (exit)
111  {
112  ros::shutdown();
113  return;
114  }
115  else
116  {
117  capture_timer = nh.createTimer(ros::Duration(1.0 / frame_rate),
118  boost::bind(&BosonCamera::captureAndPublish, this, _1));
119  }
120 }
121 
122 // AGC Sample ONE: Linear from min to max.
123 // Input is a MATRIX (height x width) of 16bits. (OpenCV mat)
124 // Output is a MATRIX (height x width) of 8 bits (OpenCV mat)
125 void BosonCamera::agcBasicLinear(const Mat& input_16,
126  Mat* output_8,
127  const int& height,
128  const int& width)
129 {
130  int i, j; // aux variables
131 
132  // auxiliary variables for AGC calcultion
133  unsigned int max1 = 0; // 16 bits
134  unsigned int min1 = 0xFFFF; // 16 bits
135  unsigned int value1, value2, value3, value4;
136 
137  // RUN a super basic AGC
138  for (i = 0; i < height; i++)
139  {
140  for (j = 0; j < width; j++)
141  {
142  value1 = input_16.at<uchar>(i, j * 2 + 1) & 0xFF; // High Byte
143  value2 = input_16.at<uchar>(i, j * 2) & 0xFF; // Low Byte
144  value3 = (value1 << 8) + value2;
145 
146  if (value3 <= min1)
147  min1 = value3;
148 
149  if (value3 >= max1)
150  max1 = value3;
151  }
152  }
153 
154  for (int i = 0; i < height; i++)
155  {
156  for (int j = 0; j < width; j++)
157  {
158  value1 = input_16.at<uchar>(i, j * 2 + 1) & 0xFF; // High Byte
159  value2 = input_16.at<uchar>(i, j * 2) & 0xFF; // Low Byte
160  value3 = (value1 << 8) + value2;
161  value4 = ((255 * (value3 - min1))) / (max1 - min1);
162 
163  output_8->at<uchar>(i, j) = static_cast<uint8_t>(value4 & 0xFF);
164  }
165  }
166 }
167 
169 {
170  // Open the Video device
171  if ((fd = open(dev_path.c_str(), O_RDWR)) < 0)
172  {
173  ROS_ERROR("flir_boson_usb - ERROR : OPEN. Invalid Video Device.");
174  return false;
175  }
176 
177  // Check VideoCapture mode is available
178  if (ioctl(fd, VIDIOC_QUERYCAP, &cap) < 0)
179  {
180  ROS_ERROR("flir_boson_usb - ERROR : VIDIOC_QUERYCAP. Video Capture is not available.");
181  return false;
182  }
183 
184  if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE))
185  {
186  ROS_ERROR("flir_boson_usb - The device does not handle single-planar video capture.");
187  return false;
188  }
189 
190  struct v4l2_format format;
191 
192  // Two different FORMAT modes, 8 bits vs RAW16
193  if (video_mode == RAW16)
194  {
195  // I am requiring thermal 16 bits mode
196  format.fmt.pix.pixelformat = V4L2_PIX_FMT_Y16;
197 
198  // Select the frame SIZE (will depend on the type of sensor)
199  switch (sensor_type)
200  {
201  case Boson320: // Boson320
202  width = 320;
203  height = 256;
204  break;
205  case Boson640: // Boson640
206  width = 640;
207  height = 512;
208  break;
209  default: // Boson320
210  width = 320;
211  height = 256;
212  break;
213  }
214  }
215  else // 8- bits is always 640x512 (even for a Boson 320)
216  {
217  format.fmt.pix.pixelformat = V4L2_PIX_FMT_YVU420; // thermal, works LUMA, full Cr, full Cb
218  width = 640;
219  height = 512;
220  }
221 
222  // Common varibles
223  format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
224  format.fmt.pix.width = width;
225  format.fmt.pix.height = height;
226 
227  // request desired FORMAT
228  if (ioctl(fd, VIDIOC_S_FMT, &format) < 0)
229  {
230  ROS_ERROR("flir_boson_usb - VIDIOC_S_FMT error. The camera does not support the requested video format.");
231  return false;
232  }
233 
234  // we need to inform the device about buffers to use.
235  // and we need to allocate them.
236  // we'll use a single buffer, and map our memory using mmap.
237  // All this information is sent using the VIDIOC_REQBUFS call and a
238  // v4l2_requestbuffers structure:
239  struct v4l2_requestbuffers bufrequest;
240  bufrequest.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
241  bufrequest.memory = V4L2_MEMORY_MMAP;
242  bufrequest.count = 1; // we are asking for one buffer
243 
244  if (ioctl(fd, VIDIOC_REQBUFS, &bufrequest) < 0)
245  {
246  ROS_ERROR("flir_boson_usb - VIDIOC_REQBUFS error. The camera failed to allocate a buffer.");
247  return false;
248  }
249 
250  // Now that the device knows how to provide its data,
251  // we need to ask it about the amount of memory it needs,
252  // and allocate it. This information is retrieved using the VIDIOC_QUERYBUF call,
253  // and its v4l2_buffer structure.
254 
255  memset(&bufferinfo, 0, sizeof(bufferinfo));
256 
257  bufferinfo.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
258  bufferinfo.memory = V4L2_MEMORY_MMAP;
259  bufferinfo.index = 0;
260 
261  if (ioctl(fd, VIDIOC_QUERYBUF, &bufferinfo) < 0)
262  {
263  ROS_ERROR("flir_boson_usb - VIDIOC_QUERYBUF error. Failed to retreive buffer information.");
264  return false;
265  }
266 
267  // map fd+offset into a process location (kernel will decide due to our NULL). length and
268  // properties are also passed
269  buffer_start = mmap(NULL, bufferinfo.length, PROT_READ | PROT_WRITE, MAP_SHARED, fd, bufferinfo.m.offset);
270 
271  if (buffer_start == MAP_FAILED)
272  {
273  ROS_ERROR("flir_boson_usb - mmap error. Failed to create a memory map for buffer.");
274  return false;
275  }
276 
277  // Fill this buffer with ceros. Initialization. Optional but nice to do
278  memset(buffer_start, 0, bufferinfo.length);
279 
280  // Activate streaming
281  int type = bufferinfo.type;
282  if (ioctl(fd, VIDIOC_STREAMON, &type) < 0)
283  {
284  ROS_ERROR("flir_boson_usb - VIDIOC_STREAMON error. Failed to activate streaming on the camera.");
285  return false;
286  }
287 
288  // Declarations for RAW16 representation
289  // Will be used in case we are reading RAW16 format
290  // Boson320 , Boson 640
291  // OpenCV input buffer : Asking for all info: two bytes per pixel (RAW16) RAW16 mode`
292  thermal16 = Mat(height, width, CV_16U, buffer_start);
293  // OpenCV output buffer : Data used to display the video
294  thermal16_linear = Mat(height, width, CV_8U, 1);
295 
296  // Declarations for 8bits YCbCr mode
297  // Will be used in case we are reading YUV format
298  // Boson320, 640 : 4:2:0
299  int luma_height = height+height/2;
300  int luma_width = width;
301  int color_space = CV_8UC1;
302 
303  // Declarations for Zoom representation
304  // Will be used or not depending on program arguments
305  thermal_luma = Mat(luma_height, luma_width, color_space, buffer_start); // OpenCV input buffer
306  // OpenCV output buffer , BGR -> Three color spaces :
307  // (640 - 640 - 640 : p11 p21 p31 .... / p12 p22 p32 ..../ p13 p23 p33 ...)
308  thermal_rgb = Mat(height, width, CV_8UC3, 1);
309 
310  return true;
311 }
312 
314 {
315  // Finish loop. Exiting.
316  // Deactivate streaming
317  int type = bufferinfo.type;
318  if (ioctl(fd, VIDIOC_STREAMOFF, &type) < 0 )
319  {
320  ROS_ERROR("flir_boson_usb - VIDIOC_STREAMOFF error. Failed to disable streaming on the camera.");
321  return false;
322  };
323 
324  close(fd);
325 
326  return true;
327 }
328 
330 {
331  Size size(640, 512);
332 
333  sensor_msgs::CameraInfoPtr
334  ci(new sensor_msgs::CameraInfo(camera_info->getCameraInfo()));
335 
336  ci->header.frame_id = frame_id;
337 
338  // Put the buffer in the incoming queue.
339  if (ioctl(fd, VIDIOC_QBUF, &bufferinfo) < 0)
340  {
341  ROS_ERROR("flir_boson_usb - VIDIOC_QBUF error. Failed to queue the image buffer.");
342  return;
343  }
344 
345  // The buffer's waiting in the outgoing queue.
346  if (ioctl(fd, VIDIOC_DQBUF, &bufferinfo) < 0)
347  {
348  ROS_ERROR("flir_boson_usb - VIDIOC_DQBUF error. Failed to dequeue the image buffer.");
349  return;
350  }
351 
352  if (video_mode == RAW16)
353  {
354  // -----------------------------
355  // RAW16 DATA
357 
358  // Display thermal after 16-bits AGC... will display an image
359  if (!zoom_enable)
360  {
361  // Threshold using Otsu's method, then use the result as a mask on the original image
362  Mat mask_mat, masked_img;
363  threshold(thermal16_linear, mask_mat, 0, 255, CV_THRESH_BINARY|CV_THRESH_OTSU);
364  thermal16_linear.copyTo(masked_img, mask_mat);
365 
366  // Normalize the pixel values to the range [0, 1] then raise to power (gamma). Then convert back for display.
367  Mat d_out_img, norm_image, d_norm_image, gamma_corrected_image, d_gamma_corrected_image;
368  double gamma = 0.8;
369  masked_img.convertTo(d_out_img, CV_64FC1);
370  normalize(d_out_img, d_norm_image, 0, 1, NORM_MINMAX, CV_64FC1);
371  pow(d_out_img, gamma, d_gamma_corrected_image);
372  d_gamma_corrected_image.convertTo(gamma_corrected_image, CV_8UC1);
373  normalize(gamma_corrected_image, gamma_corrected_image, 0, 255, NORM_MINMAX, CV_8UC1);
374 
375  // Apply top hat filter
376  int erosion_size = 5;
377  Mat top_hat_img, kernel = getStructuringElement(MORPH_ELLIPSE,
378  Size(2 * erosion_size + 1, 2 * erosion_size + 1));
379  morphologyEx(gamma_corrected_image, top_hat_img, MORPH_TOPHAT, kernel);
380 
382  cv_img.header.stamp = ros::Time::now();
383  cv_img.header.frame_id = frame_id;
384  cv_img.encoding = "mono8";
386 
387  ci->header.stamp = pub_image->header.stamp;
389  }
390  else
391  {
393 
395  cv_img.header.stamp = ros::Time::now();
396  cv_img.header.frame_id = frame_id;
397  cv_img.encoding = "mono8";
399 
400  ci->header.stamp = pub_image->header.stamp;
402  }
403  }
404  else // Video is in 8 bits YUV
405  {
406  // ---------------------------------
407  // DATA in YUV
408  cvtColor(thermal_luma, thermal_rgb, COLOR_YUV2GRAY_I420, 0);
409 
411  cv_img.encoding = "mono8";
412  cv_img.header.stamp = ros::Time::now();
413  cv_img.header.frame_id = frame_id;
415 
416  ci->header.stamp = pub_image->header.stamp;
418  }
419 }
void publish(const sensor_msgs::Image &image, const sensor_msgs::CameraInfo &info) const
void agcBasicLinear(const cv::Mat &input_16, cv::Mat *output_8, const int &height, const int &width)
std::shared_ptr< image_transport::ImageTransport > it
Definition: BosonCamera.h:83
void captureAndPublish(const ros::TimerEvent &evt)
std::string encoding
cv_bridge::CvImage cv_img
Definition: BosonCamera.h:85
struct v4l2_buffer bufferinfo
Definition: BosonCamera.h:94
ros::NodeHandle & getPrivateNodeHandle() const
CvImagePtr cvtColor(const CvImageConstPtr &source, const std::string &encoding)
#define ROS_INFO(...)
bool param(const std::string &param_name, T &param_val, const T &default_val) const
Timer createTimer(Rate r, Handler h, Obj o, bool oneshot=false, bool autostart=true) const
ros::NodeHandle & getNodeHandle() const
sensor_msgs::ImagePtr pub_image
Definition: BosonCamera.h:86
static Time now()
ROSCPP_DECL void shutdown()
struct v4l2_capability cap
Definition: BosonCamera.h:91
#define PLUGINLIB_EXPORT_CLASS(class_type, base_class_type)
#define ROS_ERROR(...)
sensor_msgs::ImagePtr toImageMsg() const
image_transport::CameraPublisher image_pub
Definition: BosonCamera.h:84
std_msgs::Header header
std::shared_ptr< camera_info_manager::CameraInfoManager > camera_info
Definition: BosonCamera.h:82


flir_boson_usb
Author(s): Joe Driscoll , Joshua Whitley
autogenerated on Mon Jul 1 2019 20:02:05