eddiebot_line_follower.cpp
Go to the documentation of this file.
00001 #include <ros/ros.h>
00002 #include <stdio.h>
00003 #include <iostream>
00004 #include "std_msgs/String.h"
00005 #include <image_transport/image_transport.h>
00006 #include <cv_bridge/cv_bridge.h>
00007 #include <sensor_msgs/image_encodings.h>
00008 #include <opencv2/imgproc/imgproc.hpp>     //make sure to include the relevant headerfiles
00009 #include <opencv2/highgui/highgui.hpp>
00010 #include <opencv/cv.h>
00011 #include <opencv/highgui.h>
00012 #include <cv_bridge/CvBridge.h>
00013 #include <cvaux.h>
00014 #include <math.h>
00015 #include <cxcore.h>
00016 #include <geometry_msgs/Twist.h>
00017 #include "boost/thread/mutex.hpp"
00018 #include "boost/thread/thread.hpp"
00019 
00020 /*here is a simple program which demonstrates the use of ros and opencv to do image manipulations on video streams given out as image topics from the monocular vision
00021  of robots,here the device used is a ardrone(quad-rotor).*/
00022 using namespace std;
00023 using namespace cv;
00024 namespace enc = sensor_msgs::image_encodings;
00025 static const char WINDOW[] = "Image window";
00026 
00027 float prevVelocity_angular, prevVelocity_linear, newVelocity_angular,
00028                 newVelocity_linear;
00029 float derive_angular, derive_linear, dt = 0.5;
00030 float horizontalcount;
00031 
00032 class ImageConverter {
00033         ros::NodeHandle nh_, ph_;
00034         ros::NodeHandle n;
00035         ros::Publisher pub;
00036         ros::Publisher tog;
00037         image_transport::ImageTransport it_;
00038         image_transport::Subscriber image_sub_; //image subscriber
00039         image_transport::Publisher image_pub_; //image publisher(we subscribe to ardrone image_raw)
00040         std_msgs::String msg;
00041 
00042         double linear_, angular_;
00043         double l_scale_, a_scale_;
00044         double left_threshold, right_threshold;
00045 public:
00046         ImageConverter() :
00047                 linear_(0.05),
00048                 angular_(0.05),
00049                 l_scale_(1.0),
00050                 a_scale_(1.0),
00051                 left_threshold(150),
00052                 right_threshold(450),
00053                 it_(nh_) {
00054                         pub = n.advertise <geometry_msgs::Twist> ("cmd_vel", 1);
00055                         image_sub_ = it_.subscribe("/usb_cam/image_raw", 1,
00056                                         &ImageConverter::imageCb, this);
00057                         image_pub_ = it_.advertise("/arcv/Image", 1);
00058 
00059                         // Initialize Parameters
00060                         nh_.param("scale_angular", a_scale_, a_scale_);
00061                         nh_.param("scale_linear", l_scale_, l_scale_);
00062                         nh_.param("angular", angular_, angular_);
00063                         nh_.param("linear", linear_, linear_);
00064                         nh_.param("left_threshold", left_threshold, left_threshold);
00065                         nh_.param("right_threshold", right_threshold, right_threshold);
00066 
00067         }
00068 
00069         ~ImageConverter() {
00070                 cv::destroyWindow(WINDOW);
00071         }
00072 
00073         void imageCb(const sensor_msgs::ImageConstPtr& msg) {
00074 
00075                 sensor_msgs::CvBridge bridge; //we need this object bridge for facilitating conversion from ros-img to opencv
00076                 IplImage* img = bridge.imgMsgToCv(msg, "rgb8"); //image being converted from ros to opencv using cvbridge
00077                 geometry_msgs::Twist velMsg;
00078                 CvMemStorage* storage = cvCreateMemStorage(0);
00079                 CvSeq* lines = 0;
00080                 int i, c, d;
00081                 float c1[50];
00082                 float m, angle;
00083                 float buf;
00084                 float m1;
00085                 float dis;
00086                 int k = 0, k1 = 0;
00087                 int count = 0;
00088 
00089                 float xv;
00090                 float yv;
00091                 int vc = 0;
00092                 float xvan = 0, yvan = 0;
00093                 static float xvan1 = 0, yvan1 = 0;
00094                 float num = 0;
00095                 static float count1 = 0;
00096                 float dv;
00097                 float vxx, vyy;
00098 
00099                 cvSetImageROI(img, cvRect(0, 0, 640, 480));
00100                 IplImage* out1 = cvCreateImage(cvGetSize(img), IPL_DEPTH_8U, 3); //make sure to feed the image(img) data to the parameters necessary for canny edge output
00101                 IplImage* gray_out = cvCreateImage(cvGetSize(img), IPL_DEPTH_8U, 1);
00102                 IplImage* canny_out = cvCreateImage(cvGetSize(img), IPL_DEPTH_8U, 1);
00103                 IplImage* gray_out1 = cvCreateImage(cvGetSize(img), IPL_DEPTH_8U, 3);
00104                 IplImage* canny_out1 = cvCreateImage(cvGetSize(img), IPL_DEPTH_8U, 1);
00105                 IplImage* canny_out2 = cvCreateImage(cvGetSize(img), IPL_DEPTH_8U, 1);
00106 
00107                 IplImage* gray_out2 = cvCreateImage(cvGetSize(img), IPL_DEPTH_8U, 1);
00108 
00109                 cvSmooth(img, out1, CV_GAUSSIAN, 11, 11);
00110 
00111                 cvCvtColor(out1, gray_out, CV_RGB2GRAY);
00112                 cvNot(gray_out,gray_out2);
00113                 cvCanny(gray_out2, canny_out, 80, 125, 3);
00114                 cvCvtColor(canny_out, gray_out1, CV_GRAY2BGR);
00115 
00116 //              //////////////// Color Filtering //////////////
00117 //
00118 //              //IplImage *imgHsv,*imgResult;
00119 //              int mR_val=255,mG_val=128,mB_val=64,MAR_val=128,MAG_val=0,MAB_val=0;//default green .ctrl BLUE to find color
00120 //
00121 //
00122 //              IplImage* test=cvCreateImage(cvGetSize(img), IPL_DEPTH_8U, 3);
00123 //              IplImage* imgResult=cvCreateImage(cvGetSize(img), IPL_DEPTH_8U, 3);
00124 //              IplImage* imgHsv=cvCreateImage(cvGetSize(img), IPL_DEPTH_8U, 3);
00125 //
00126 //              cvCvtColor( test, imgHsv, CV_BGR2HSV);//convert the color space
00127 //              CvScalar min_color = CV_RGB(mR_val,mG_val,mB_val);
00128 //              CvScalar max_color = CV_RGB(MAR_val,MAG_val,MAB_val);
00129 //              cvInRangeS(imgHsv, min_color,max_color, imgResult);//search for the color in image
00130 //              //////////////// Color Filtering //////////////
00131 
00132 
00133 
00134                 lines = cvHoughLines2(canny_out, storage, CV_HOUGH_PROBABILISTIC, 1,
00135                                 CV_PI / 180, 80, 50, 10);
00136                 for (i = 0; i < lines->total; i++) {
00137 
00138                         CvPoint* line = (CvPoint*) cvGetSeqElem(lines, i);
00139                         {
00140                                 {
00141                                         cvLine(out1, line[0], line[1], CV_RGB(0, 255, 0), 1, 8);
00142                                         cvLine(gray_out1, line[0], line[1], CV_RGB(0, 255, 0), 2,
00143                                                         8);
00144                                         xv = line[0].x - line[1].x;
00145                                         yv = line[0].y - line[1].y;
00146 //                                      velMsg.linear = atan2(xv, yv) * 180 / 3.14159265;
00147 //                                      angle = velMsg.linear;
00148 //                                      printf("test");
00149 //                                      if (velMsg.linear < -90) {
00150 //                                              printf("one");
00151 //                                              velMsg.linear = velMsg.linear + 180;
00152 //                                      }
00153 //                                      buf = (line[0].x + line[1].x) / 2;
00154 //
00155 //                                      if (abs(85 - buf) <= 15) {
00156 //                                              velMsg.angular = 0;
00157 //                                              printf("two");
00158 //                                      } else {
00159 //                                              velMsg.angular = (85 - (line[0].x + line[1].x) / 2);
00160 //                                              printf("three");
00161 //                                      }
00162 
00163 //                                      if (abs(velMsg.angular) > 50) {
00164 //                                              velMsg.angular = 0;
00165 //                                              printf("four");
00166 //                                      }
00167 // Modified
00168                                         angle = atan2(xv, yv) * 180 / 3.14159265;
00169 
00170 
00171                                         buf = (line[0].x + line[1].x) / 2;
00172                                         printf("line[0].x: %f", line[0].x);
00173                                         printf("line[1].x: %f", line[1].x);
00174                                         velMsg.linear.z = buf;
00175                                         velMsg.linear.y = angle;
00176 
00177                                         if (abs(buf) <= left_threshold) { // turn left
00178                                                 velMsg.angular.z = angular_ * a_scale_;
00179                                         }
00180                                         else if (abs(buf) >= right_threshold) { // turn right
00181                                                 velMsg.angular.z = -angular_ * a_scale_;
00182                                         }
00183                                         else {
00184                                                 velMsg.linear.x = linear_ * l_scale_;
00185                                         }
00186 
00187 
00188                                         printf("\nX::Y::X1::Y1::%d:%d:%d:%d", line[0].x, line[0].y,
00189                                                         line[1].x, line[1].y);
00190 
00191                                         pub.publish(velMsg);
00192 
00193                                 }
00194 
00195                         }
00196 
00197                         cvSeqRemove(lines, i);
00198 
00199                 }
00200                 cvShowImage("OUT1", out1);   //lines projected onto the real picture
00201                 cvShowImage("GRAY_OUT1", gray_out1);
00202 
00203                 // Added Display
00204 //              cvShowImage("GRAY_OUT", gray_out);
00205 //              cvShowImage("CANNY_OUT", canny_out);
00206 //              cvShowImage("CANNY_OUT1", canny_out1);
00207 //              cvShowImage("gray_out2", gray_out2);
00208 //              cvShowImage("imgResult", imgResult);
00209                 cv::waitKey(3);
00210                 sensor_msgs::ImagePtr out = sensor_msgs::CvBridge::cvToImgMsg(img,
00211                                 "rgb8");   //image converted from opencv to ros for publishing
00212                 image_pub_.publish(out);
00213                 cvClearMemStorage(storage);
00214                 cvReleaseMemStorage(&storage);
00215                 cvReleaseImage( &out1 );
00216                 cvReleaseImage( &gray_out );
00217                 cvReleaseImage( &canny_out );
00218                 cvReleaseImage( &gray_out1 );
00219                 cvReleaseImage( &canny_out1 );
00220                 cvReleaseImage( &canny_out2 );
00221                 cvReleaseImage( &gray_out2 );
00222 
00223 
00224 
00225         }
00226 
00227 };
00228 
00229 int main(int argc, char** argv) {
00230         ros::init(argc, argv, "image_converter");
00231         ImageConverter ic;
00232         ros::spin();
00233         return 0;
00234 }
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Friends


eddiebot_line_follower-RelWithDebInfo@eddiebot_line_follower
Author(s): Tang Tiong Yew
autogenerated on Wed Jan 2 2013 13:19:23