levinson.cpp
Go to the documentation of this file.
1 /*
2  velo2cam_calibration - Automatic calibration algorithm for extrinsic parameters of a stereo camera and a velodyne
3  Copyright (C) 2017-2018 Jorge Beltran, Carlos Guindel
4 
5  This file is part of velo2cam_calibration.
6 
7  velo2cam_calibration is free software: you can redistribute it and/or modify
8  it under the terms of the GNU General Public License as published by
9  the Free Software Foundation, either version 2 of the License, or
10  (at your option) any later version.
11 
12  velo2cam_calibration is distributed in the hope that it will be useful,
13  but WITHOUT ANY WARRANTY; without even the implied warranty of
14  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15  GNU General Public License for more details.
16 
17  You should have received a copy of the GNU General Public License
18  along with velo2cam_calibration. If not, see <http://www.gnu.org/licenses/>.
19 */
20 
21 /*
22  Includes code from https://github.com/Flos/ros-image_cloud licensed
23  under the MIT license, set out below.
24 
25  The MIT License (MIT)
26 
27  Copyright (c) 2015
28 
29  Permission is hereby granted, free of charge, to any person obtaining a copy
30  of this software and associated documentation files (the "Software"), to deal
31  in the Software without restriction, including without limitation the rights
32  to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
33  copies of the Software, and to permit persons to whom the Software is
34  furnished to do so, subject to the following conditions:
35 
36  The above copyright notice and this permission notice shall be included in all
37  copies or substantial portions of the Software.
38 
39  THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
40  IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
41  FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
42  AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
43  LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
44  OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
45  SOFTWARE.
46 */
47 
48 /*
49  Calculates miscalibration value according to:
50  J. Levinson and S. Thrun, “Automatic Online Calibration of Cameras and Lasers,”
51  in Robotics: Science and Systems, 2013.
52 */
53 
54 #define PCL_NO_PRECOMPILE
55 
56 #include "velo2cam_utils.h"
57 #include <ros/ros.h>
58 #include <ros/package.h>
59 #include <sensor_msgs/PointCloud2.h>
60 #include <sensor_msgs/CameraInfo.h>
61 #include <pcl/point_cloud.h>
63 #include <pcl/common/transforms.h>
64 #include <opencv2/opencv.hpp>
65 #include <cv_bridge/cv_bridge.h>
67 #include <tf/tf.h>
68 #include <tf/transform_listener.h>
69 #include <pcl_ros/transforms.h>
71 
73 pcl::PointCloud<Velodyne::Point>::Ptr edges_cloud;
75 cv::Mat img_out;
78 int step_;
79 std::ofstream savefile;
80 
81 typedef struct {
82  double r,g,b;
83 } COLOUR;
84 
85 COLOUR GetColour(double v,double vmin,double vmax)
86 {
87  COLOUR c = {1.0,1.0,1.0}; // white
88  double dv;
89 
90  if (v < vmin)
91  v = vmin;
92  if (v > vmax)
93  v = vmax;
94  dv = vmax - vmin;
95 
96  if (v < (vmin + 0.25 * dv)) {
97  c.r = 0;
98  c.g = 4 * (v - vmin) / dv;
99  } else if (v < (vmin + 0.5 * dv)) {
100  c.r = 0;
101  c.b = 1 + 4 * (vmin + 0.25 * dv - v) / dv;
102  } else if (v < (vmin + 0.75 * dv)) {
103  c.r = 4 * (v - vmin - 0.5 * dv) / dv;
104  c.b = 0;
105  } else {
106  c.g = 1 + 4 * (vmin + 0.75 * dv - v) / dv;
107  c.b = 0;
108  }
109 
110  return(c);
111 }
112 
113 void edges_pointcloud(pcl::PointCloud<Velodyne::Point>::Ptr pc){
114  std::vector<std::vector<Velodyne::Point*> > rings = Velodyne::getRings(*pc);
115  for (std::vector<std::vector<Velodyne::Point*> >::iterator ring = rings.begin(); ring < rings.end(); ++ring){
116  if (ring->empty()) continue;
117 
118  (*ring->begin())->intensity = 0;
119  (*(ring->end() - 1))->intensity = 0;
120  for (vector<Velodyne::Point*>::iterator pt = ring->begin() + 1; pt < ring->end() - 1; ++pt){
121  Velodyne::Point *prev = *(pt - 1);
122  Velodyne::Point *succ = *(pt + 1);
123  (*pt)->intensity = pow(std::max( std::max( prev->range-(*pt)->range, succ->range-(*pt)->range), 0.f), 0.5);
124  }
125  }
126 }
127 
128 template <class T>
129 inline bool between(T min, T test, T max){
130  if( min < test && test < max ) return true;
131  return false;
132 }
133 
134 template <typename ImageT>
135 inline
136 void get_diff(int col, int row, int col_c, int row_c, int &result, cv::Mat& in) {
137  if (between(-1, col, in.cols) && between(-1, row, in.rows)) {
138  // hit upper left
139  result = std::max(abs(in.at<ImageT>(row, col) - in.at<ImageT>(row_c, col_c)), result);
140  }
141 }
142 
143 template <typename ImageT>
144 inline
145 int max_diff_neighbors(int row_c, int col_c, cv::Mat &in){
146  //Check
147  int result = 0;
148 
149  get_diff<ImageT>(col_c -1, row_c -1, col_c, row_c, result, in);
150  get_diff<ImageT>(col_c , row_c -1, col_c, row_c, result, in);
151  get_diff<ImageT>(col_c +1, row_c -1, col_c, row_c, result, in);
152  get_diff<ImageT>(col_c -1, row_c , col_c, row_c, result, in);
153  get_diff<ImageT>(col_c +1, row_c , col_c, row_c, result, in);
154  get_diff<ImageT>(col_c -1, row_c +1, col_c, row_c, result, in);
155  get_diff<ImageT>(col_c , row_c +1, col_c, row_c, result, in);
156  get_diff<ImageT>(col_c +1, row_c +1, col_c, row_c, result, in);
157 
158  return result;
159 }
160 
161 template <typename ImageT>
162 inline void
163 edge_max(cv::Mat &in, cv::Mat &out)
164 {
165  assert(in.rows == out.rows);
166  assert(in.cols == out.cols);
167  assert(in.depth() == out.depth());
168  assert(in.channels() == out.channels());
169 
170  for(int r = 0; r < in.rows; r++)
171  {
172  for(int c = 0; c < in.cols; c++)
173  {
174  out.at<ImageT>(r,c) = max_diff_neighbors<ImageT>(r, c, in);
175  }
176  }
177 }
178 
179 template <typename Type_in, typename Type_out>
180 inline float calc(float &val, const float &psi, int row, int col, const cv::Mat& in, cv::Mat& out) {
181 
182  val = val * psi; /* Fade out the value */
183 
184  if (in.at<Type_in>(row, col) > val) /* In Value in the image bigger than the current value */
185  {
186  val = in.at<Type_in>(row, col); /* yes, get the bigger value */
187  }
188 
189  if (out.at<Type_out>(row, col) < val) /* is the calculated value bigger then the value in the filtered image? */
190  {
191  out.at<Type_out>(row, col) = val; /* yes, store the calculated value in the filtered image */
192  }
193  else{
194  val = out.at<Type_out>(row, col); /* no, the value of the filtered image is bigger use it */
195  }
196 
197  return val;
198 }
199 
200 template <typename Type_in, typename Type_out>
201 inline void
202 neighbors_x_pos(cv::Mat &in, cv::Mat &out, float psi, float alpha){
203  for(int row = 0; row < in.rows; ++row)
204  {
205  float val = 0;
206  val = 0;
207  for(int col = 0; col < in.cols; ++col)
208  {
209  val = calc<Type_in, Type_out>(val, psi, row, col, in, out);
210  }
211  }
212 }
213 
214 template <typename Type_in, typename Type_out>
215 inline void
216 neighbors_x_neg(cv::Mat &in, cv::Mat &out, float psi, float alpha){
217  float val = 0;
218 
219  for(int row = 0; row < in.rows; ++row)
220  {
221  val = 0;
222  for(int col = in.cols -1; col >= 0; --col)
223  {
224  val = calc<Type_in, Type_out>(val, psi, row, col, in, out);
225  }
226  }
227 }
228 
229 template <typename Type_in, typename Type_out>
230 inline void
231 neighbors_y_pos(cv::Mat &in, cv::Mat &out, float psi, float alpha){
232  float val = 0;
233 
234  for(int col = 0; col < in.cols; ++col)
235  {
236  val = 0;
237  for(int row = 0; row < in.rows; ++row)
238  {
239  val = calc<Type_in, Type_out>(val, psi, row, col, in, out);
240  }
241  }
242 }
243 
244 template <typename Type_in, typename Type_out>
245 inline void
246 neighbors_y_neg(cv::Mat &in, cv::Mat &out, float psi, float alpha){
247  float val = 0;
248 
249  for(int col = 0; col < in.cols; ++col)
250  {
251  val = 0;
252  for(int row = in.rows -1; row >= 0; --row)
253  {
254  val = calc<Type_in, Type_out>(val, psi, row, col, in, out);
255  }
256  }
257 }
258 
259 template <typename Type_in, typename Type_out>
260 void
261 inverse_distance_transformation(cv::Mat &in, cv::Mat &out, float alpha = 0.333333333, float psi = 0.98)
262 {
263  assert(in.channels() == 1);
264  assert(in.depth() == CV_8U);
265 
266  assert(in.size == out.size);
267  assert(in.rows == out.rows);
268  assert(in.cols == out.cols);
269 
270  neighbors_x_pos<Type_in, Type_out>(in, out, psi, alpha);
271  neighbors_x_neg<Type_in, Type_out>(in, out, psi, alpha);
272  neighbors_y_pos<Type_in, Type_out>(in, out, psi, alpha);
273  neighbors_y_neg<Type_in, Type_out>(in, out, psi, alpha);
274 
275  for(int row = 0; row < in.rows; row++)
276  {
277  for(int col = 0; col < in.cols; col++)
278  {
279  int val = alpha * in.at<Type_in>(row,col) + (1 - alpha)*(float)out.at<Type_out>(row,col);
280  out.at<Type_out>(row, col) = val;
281  }
282  }
283 }
284 
285 void
286 transformAsMatrix (const tf::Transform& bt, Eigen::Matrix4f &out_mat)
287 {
288  double mv[12];
289  bt.getBasis ().getOpenGLSubMatrix (mv);
290 
291  tf::Vector3 origin = bt.getOrigin ();
292 
293  out_mat (0, 0) = mv[0]; out_mat (0, 1) = mv[4]; out_mat (0, 2) = mv[8];
294  out_mat (1, 0) = mv[1]; out_mat (1, 1) = mv[5]; out_mat (1, 2) = mv[9];
295  out_mat (2, 0) = mv[2]; out_mat (2, 1) = mv[6]; out_mat (2, 2) = mv[10];
296 
297  out_mat (3, 0) = out_mat (3, 1) = out_mat (3, 2) = 0; out_mat (3, 3) = 1;
298  out_mat (0, 3) = origin.x ();
299  out_mat (1, 3) = origin.y ();
300  out_mat (2, 3) = origin.z ();
301 }
302 
303 void checkExtrinics(const sensor_msgs::CameraInfoConstPtr& cinfo_msg){
304 
305  if(!laserReceived || !cameraReceived){
306  return;
307  }
308 
309  ROS_INFO("Both received");
310 
312  cam_model_.fromCameraInfo(cinfo_msg);
313 
314  pcl::PointCloud<Velodyne::Point>::Ptr trans_cloud(new pcl::PointCloud<Velodyne::Point>);
315 
316  double x,y,z;
317  double r,p,yaw;
318 
319  tf::StampedTransform transform;
320  if (listen_to_tf_){
321  ROS_INFO("Using available tf transformation");
322  tf::TransformListener listener;
323 
324  try{
325  listener.waitForTransform(target_frame.c_str(), source_frame.c_str(), ros::Time(0), ros::Duration(20.0));
326  listener.lookupTransform (target_frame.c_str(), source_frame.c_str(), ros::Time(0), transform);
327  }catch (tf::TransformException& ex) {
328  ROS_WARN("TF exception:\n%s", ex.what());
329  return;
330  }
331  Eigen::Matrix4f transf_mat;
332  transformAsMatrix(transform, transf_mat);
333  pcl::transformPointCloud(*edges_cloud, *trans_cloud, transf_mat);
334 
335  trans_cloud->header.frame_id = target_frame;
336 
337  cv::Mat img_color;
338  cv::cvtColor(img_out, img_color, cv::COLOR_GRAY2BGR);
339 
340  double objective_function = 0;
341 
342  ROS_INFO("Accumulating");
343 
344  for (pcl::PointCloud<Velodyne::Point>::iterator pt = trans_cloud->points.begin(); pt < trans_cloud->points.end(); pt++)
345  {
346  cv::Point3d pt_cv((*pt).x, (*pt).y, (*pt).z);
347  cv::Point2d uv;
348  uv = cam_model_.project3dToPixel(pt_cv);
349 
350  COLOUR c = GetColour(int((*pt).intensity*255.0), 0, 255);
351 
352  cv::circle(img_color, uv, 4, cv::Scalar(int(255*c.b),int(255*c.g),int(255*c.r)), -1);
353 
354  if (uv.x>=0 && uv.x < img_out.cols && uv.y >= 0 && uv.y < img_out.rows){
355  objective_function += img_out.at<uchar>(uv.y, uv.x)*(*pt).intensity;
356  }
357 
358  }
359 
360  if (save_to_file_){
361  savefile << transform.getOrigin()[0] << ", " << transform.getOrigin()[1] << ", " << transform.getOrigin()[2] << ", " << transform.getRotation()[0] << ", " << transform.getRotation()[1] << ", " << transform.getRotation()[2] << ", " << transform.getRotation()[3] << ", " << objective_function << ", " << endl;
362  }
363 
364  ROS_INFO("Objective funcion: %lf", objective_function);
365 
366  cv::imshow("Final Levinson", img_color);
367  cv::waitKey(3);
368  edges_cloud.reset();
369 
370  laserReceived =false;
371  cameraReceived = false;
372 
373  step_++;
374  return;
375 
376  }else{
377 
378  tf::TransformListener listener;
379 
380  try{
381  listener.waitForTransform(target_frame.c_str(), source_frame.c_str(), ros::Time(0), ros::Duration(20.0));
382  listener.lookupTransform (target_frame.c_str(), source_frame.c_str(), ros::Time(0), transform);
383  }catch (tf::TransformException& ex) {
384  ROS_WARN("TF exception:\n%s", ex.what());
385  return;
386  }
387 
388  x= transform.getOrigin().getX();
389  y= transform.getOrigin().getY();
390  z= transform.getOrigin().getZ();
391  transform.getBasis().getRPY(r,p,yaw);
392 
393  }
394 
395  ROS_INFO("%lf %lf %lf %lf %lf %lf", x, y, z, r, p, yaw);
396 
397  double x_mod, y_mod, z_mod;
398  double r_mod, yaw_mod, p_mod;
399 
400  int MAX = 5;
401  int M = (MAX-1)/2;
402  double INC = 0.001, INC_ANG = 0.001;
403 
404  //#pragma omp parallel for
405  for (int iter_x=0; iter_x<MAX; iter_x++){
406  ROS_INFO("iter_x %d", iter_x);
407  for (int iter_y=0; iter_y<MAX; iter_y++){
408  for (int iter_z=0; iter_z<MAX; iter_z++){
409  for (int iter_r=0; iter_r<MAX; iter_r++){
410  for (int iter_p=0; iter_p<MAX; iter_p++){
411  for (int iter_yaw=0; iter_yaw<MAX; iter_yaw++){
412  x_mod = x + (iter_x-M)*INC;
413  y_mod = y + (iter_y-M)*INC;
414  z_mod = z + (iter_z-M)*INC;
415  r_mod = r + (iter_r-M)*INC_ANG;
416  p_mod = p + (iter_p-M)*INC_ANG;
417  yaw_mod = yaw + (iter_yaw-M)*INC_ANG;
418 
419  tf::Vector3 origin(x_mod, y_mod, z_mod);
420  tf::Quaternion q;
421  q.setRPY(r_mod,p_mod,yaw_mod);
422  transform.setOrigin(origin);
423  transform.setRotation(q);
424 
425  Eigen::Matrix4f transf_mat;
426  transformAsMatrix(transform, transf_mat);
427  pcl::transformPointCloud(*edges_cloud, *trans_cloud, transf_mat);
428 
429  trans_cloud->header.frame_id = target_frame;
430 
431  cv::Mat img_color;
432  cv::cvtColor(img_out, img_color, cv::COLOR_GRAY2BGR);
433 
434  double objective_function = 0;
435 
436  for (pcl::PointCloud<Velodyne::Point>::iterator pt = trans_cloud->points.begin(); pt < trans_cloud->points.end(); ++pt)
437  {
438  cv::Point3d pt_cv((*pt).x, (*pt).y, (*pt).z);
439  cv::Point2d uv;
440  uv = cam_model_.project3dToPixel(pt_cv);
441 
442  COLOUR c = GetColour(int((*pt).intensity*255.0), 0, 255);
443 
444  cv::circle(img_color, uv, 4, cv::Scalar(int(255*c.b),int(255*c.g),int(255*c.r)), -1);
445 
446  if (uv.x>=0 && uv.x < img_out.cols && uv.y >= 0 && uv.y < img_out.rows){
447  objective_function += img_out.at<uchar>(uv.y, uv.x)*(*pt).intensity;
448  }
449 
450  }
451 
452  if (save_to_file_){
453  savefile << transform.getOrigin()[0] << ", " << transform.getOrigin()[1] << ", " << transform.getOrigin()[2] << ", " << r_mod << ", " << p_mod << ", " << yaw_mod << ", " << objective_function << ", " << endl;
454  }
455 
456  }
457  }
458  }
459  }
460  }
461  }
462 
463  edges_cloud.reset();
464 
465  laserReceived =false;
466  cameraReceived = false;
467 
468  step_++;
469 
470 };
471 
472 void laser_callback(const sensor_msgs::PointCloud2::ConstPtr& velo_cloud){
473  ROS_INFO("Velodyne pattern ready!");
474  laserReceived = true;
475  pcl::PointCloud<Velodyne::Point>::Ptr t_laser_cloud(new pcl::PointCloud<Velodyne::Point>);
476  pcl::PointCloud<Velodyne::Point>::Ptr laser_cloud(new pcl::PointCloud<Velodyne::Point>);
477  fromROSMsg(*velo_cloud, *laser_cloud);
478 
479  Velodyne::addRange(*laser_cloud);
480 
481  edges_pointcloud(laser_cloud);
482 
483  edges_cloud = pcl::PointCloud<Velodyne::Point>::Ptr(new pcl::PointCloud<Velodyne::Point>);
484 
485  float THRESHOLD = 0.30;
486  for (pcl::PointCloud<Velodyne::Point>::iterator pt = laser_cloud->points.begin(); pt < laser_cloud->points.end(); ++pt){
487  if(pow(pt->intensity,2)>THRESHOLD){
488  edges_cloud->push_back(*pt);
489  }
490  }
491 
492  sensor_msgs::PointCloud2 cloud_ros;
493  pcl::toROSMsg(*edges_cloud, cloud_ros);
494  cloud_ros.header = velo_cloud->header;
495  cloud_pub.publish(cloud_ros);
496 
497  t_laser_cloud.reset();
498  laser_cloud.reset();
499 
500 }
501 
502 void stereo_callback(const sensor_msgs::ImageConstPtr& image_msg){
503  ROS_INFO("Camera pattern ready!");
504  cameraReceived = true;
505 
506  cv::Mat img_in;
507  try{
508  img_in = cv_bridge::toCvShare(image_msg)->image;
509  }catch (cv_bridge::Exception& e){
510  ROS_ERROR("cv_bridge exception: %s", e.what());
511  return;
512  }
513 
514  cv::Mat img_gray;
515  cv::cvtColor(img_in, img_gray, cv::COLOR_RGB2GRAY);
516 
517  cv::Mat img_edge(img_gray.size(), CV_8UC1);;
518  edge_max<uchar>(img_gray, img_edge);
519 
520  img_out = cv::Mat::zeros(img_edge.size(), CV_8UC1);
521  inverse_distance_transformation<uchar,uchar>(img_edge, img_out);
522 }
523 
524 // Get current date/time, format is YYYY-MM-DD.HH:mm:ss
525 const std::string currentDateTime() {
526  time_t now = time(0);
527  struct tm tstruct;
528  char buf[80];
529  tstruct = *localtime(&now);
530  // Visit http://en.cppreference.com/w/cpp/chrono/c/strftime
531  // for more information about date/time format
532  strftime(buf, sizeof(buf), "%Y-%m-%d.%X", &tstruct);
533 
534  return buf;
535 }
536 
537 
538 int main(int argc, char **argv){
539  ros::init(argc, argv, "levinson");
540  ros::NodeHandle nh_("~"); // LOCAL
542 
543  // Parameters
544  nh_.param<std::string>("target_frame", target_frame, "/stereo_camera");
545  nh_.param<std::string>("source_frame", source_frame, "/velodyne");
546  nh_.param<bool>("listen_to_tf", listen_to_tf_, true);
547  nh_.param<bool>("save_to_file", save_to_file_, true);
548 
549  laserReceived = false;
550  cameraReceived = false;
551 
553  ros::Subscriber laser_sub = nh_.subscribe ("cloud", 1, laser_callback);
554  ros::Subscriber cinfo_sub = nh_.subscribe("camera_info", 1, checkExtrinics);
555 
556  cloud_pub = nh_.advertise<sensor_msgs::PointCloud2> ("levinson_cloud", 1);
557 
558  step_ = 0;
559 
560  ostringstream os;
561  os << getenv("HOME") << "/" << "levinson_" << currentDateTime() << ".csv" ;
562 
563  if (save_to_file_){
564  ROS_INFO("Opening %s", os.str().c_str());
565  savefile.open (os.str().c_str());
566 
567  savefile << "x, y, z, r, p, y, obj" << endl;
568  }
569 
570  ros::spin();
571 
572  if (save_to_file_) savefile.close();
573  return 0;
574 }
CvImageConstPtr toCvShare(const sensor_msgs::ImageConstPtr &source, const std::string &encoding=std::string())
Subscriber subscribe(const std::string &base_topic, uint32_t queue_size, const boost::function< void(const sensor_msgs::ImageConstPtr &)> &callback, const ros::VoidPtr &tracked_object=ros::VoidPtr(), const TransportHints &transport_hints=TransportHints())
TFSIMD_FORCE_INLINE void setRotation(const Quaternion &q)
void edges_pointcloud(pcl::PointCloud< Velodyne::Point >::Ptr pc)
Definition: levinson.cpp:113
bool cameraReceived
Definition: levinson.cpp:72
void publish(const boost::shared_ptr< M > &message) const
void inverse_distance_transformation(cv::Mat &in, cv::Mat &out, float alpha=0.333333333, float psi=0.98)
Definition: levinson.cpp:261
vector< vector< Point * > > getRings(pcl::PointCloud< Velodyne::Point > &pc)
float intensity
laser intensity reading
Subscriber subscribe(const std::string &topic, uint32_t queue_size, void(T::*fp)(M), T *obj, const TransportHints &transport_hints=TransportHints())
void neighbors_y_neg(cv::Mat &in, cv::Mat &out, float psi, float alpha)
Definition: levinson.cpp:246
bool between(T min, T test, T max)
Definition: levinson.cpp:129
TFSIMD_FORCE_INLINE Matrix3x3 & getBasis()
int step_
Definition: levinson.cpp:78
pcl::PointCloud< Velodyne::Point >::Ptr edges_cloud
Definition: levinson.cpp:73
void get_diff(int col, int row, int col_c, int row_c, int &result, cv::Mat &in)
Definition: levinson.cpp:136
ROSCPP_DECL void init(int &argc, char **argv, const std::string &name, uint32_t options=0)
void fromROSMsg(const sensor_msgs::PointCloud2 &cloud, pcl::PointCloud< T > &pcl_cloud)
TFSIMD_FORCE_INLINE const tfScalar & getY() const
TFSIMD_FORCE_INLINE const tfScalar & y() const
TFSIMD_FORCE_INLINE const tfScalar & getZ() const
#define ROS_WARN(...)
int max_diff_neighbors(int row_c, int col_c, cv::Mat &in)
Definition: levinson.cpp:145
std::string target_frame
Definition: levinson.cpp:76
ROSCPP_DECL void spin(Spinner &spinner)
bool waitForTransform(const std::string &target_frame, const std::string &source_frame, const ros::Time &time, const ros::Duration &timeout, const ros::Duration &polling_sleep_duration=ros::Duration(0.01), std::string *error_msg=NULL) const
std::ofstream savefile
Definition: levinson.cpp:79
TFSIMD_FORCE_INLINE const tfScalar & x() const
pcl::PointCloud< pcl::PointXYZ >::Ptr laser_cloud
void neighbors_y_pos(cv::Mat &in, cv::Mat &out, float psi, float alpha)
Definition: levinson.cpp:231
int main(int argc, char **argv)
Definition: levinson.cpp:538
void stereo_callback(const sensor_msgs::ImageConstPtr &image_msg)
Definition: levinson.cpp:502
TFSIMD_FORCE_INLINE const tfScalar & z() const
bool listen_to_tf_
Definition: levinson.cpp:77
void setRPY(const tfScalar &roll, const tfScalar &pitch, const tfScalar &yaw)
void getRPY(tfScalar &roll, tfScalar &pitch, tfScalar &yaw, unsigned int solution_number=1) const
#define ROS_INFO(...)
bool fromCameraInfo(const sensor_msgs::CameraInfo &msg)
bool param(const std::string &param_name, T &param_val, const T &default_val) const
TFSIMD_FORCE_INLINE const tfScalar & y() const
void addRange(pcl::PointCloud< Velodyne::Point > &pc)
TFSIMD_FORCE_INLINE const tfScalar & x() const
double b
Definition: levinson.cpp:82
Publisher advertise(const std::string &topic, uint32_t queue_size, bool latch=false)
ros::Publisher cloud_pub
Definition: levinson.cpp:74
double g
Definition: levinson.cpp:82
std::string source_frame
Definition: levinson.cpp:76
TFSIMD_FORCE_INLINE Vector3 & getOrigin()
void lookupTransform(const std::string &target_frame, const std::string &source_frame, const ros::Time &time, StampedTransform &transform) const
TFSIMD_FORCE_INLINE const tfScalar & z() const
double r
Definition: levinson.cpp:82
void neighbors_x_neg(cv::Mat &in, cv::Mat &out, float psi, float alpha)
Definition: levinson.cpp:216
INLINE Rall1d< T, V, S > pow(const Rall1d< T, V, S > &arg, double m)
INLINE Rall1d< T, V, S > abs(const Rall1d< T, V, S > &x)
Quaternion getRotation() const
bool laserReceived
Definition: levinson.cpp:72
void laser_callback(const sensor_msgs::PointCloud2::ConstPtr &velo_cloud)
Definition: levinson.cpp:472
const std::string currentDateTime()
Definition: levinson.cpp:525
void toROSMsg(const sensor_msgs::PointCloud2 &cloud, sensor_msgs::Image &image)
TFSIMD_FORCE_INLINE const tfScalar & getX() const
COLOUR GetColour(double v, double vmin, double vmax)
Definition: levinson.cpp:85
cv::Point2d project3dToPixel(const cv::Point3d &xyz) const
bool save_to_file_
Definition: levinson.cpp:77
void checkExtrinics(const sensor_msgs::CameraInfoConstPtr &cinfo_msg)
Definition: levinson.cpp:303
#define ROS_ERROR(...)
void getOpenGLSubMatrix(tfScalar *m) const
void transformAsMatrix(const tf::Transform &bt, Eigen::Matrix4f &out_mat)
Definition: levinson.cpp:286
TFSIMD_FORCE_INLINE void setOrigin(const Vector3 &origin)
void edge_max(cv::Mat &in, cv::Mat &out)
Definition: levinson.cpp:163
cv::Mat img_out
Definition: levinson.cpp:75
void neighbors_x_pos(cv::Mat &in, cv::Mat &out, float psi, float alpha)
Definition: levinson.cpp:202
float calc(float &val, const float &psi, int row, int col, const cv::Mat &in, cv::Mat &out)
Definition: levinson.cpp:180


velo2cam_calibration
Author(s): Jorge Beltran , Carlos Guindel
autogenerated on Thu Feb 28 2019 03:24:25