projection_2D.cpp
Go to the documentation of this file.
00001 /* Pojection 2D
00002 */
00003 
00004 #include <ucl_drone/map/projection_2D.h>
00005 
00006 //#define PROJECTION_2D_2015  // to comment if wanna use the 2016 projection
00007 
00008 // this function prepares structures to compute all point projections
00009 void projection2D_utils1(double h, const ucl_drone::Pose3D pose, cv::Mat &T, cv::Mat &n_cam)
00010 {
00011   // Conversion
00012   double yaw = -pose.rotZ;
00013   double pitch = -pose.rotY;
00014   double roll = -pose.rotX;
00015 
00016   // compute the rotation matrix from world  to the drone
00017   cv::Mat world2drone = rollPitchYawToRotationMatrix(roll, pitch, yaw);
00018   // compute the rotation matrix from the drone to the camera
00019   cv::Mat drone2cam = rollPitchYawToRotationMatrix(PI, 0, -PI / 2);
00020   // compute the rotation matrix from the world to the camera
00021   T = drone2cam * world2drone;
00022 
00023   // ground normal vector in world coordinates
00024   cv::Mat n_world = (cv::Mat_< double >(3, 1) << 0.0, 0.0, 1.0);
00025   // ground normal vector in camera coordinates
00026   n_cam = T * n_world;
00027 }
00028 
00029 // this function compute the ground projection for one point
00030 void projection2D_utils2(double pixelx, double pixely, double h, const cv::Mat &T,
00031                          const cv::Mat &n_cam, cv::Mat &p_world)
00032 {
00033   // compute point coordinates in calibrated image coordinates
00034   cv::Mat d = (cv::Mat_< double >(3, 1) << (pixelx - Read::img_center_x()) / Read::focal_length_x(),
00035                (pixely - Read::img_center_y()) / Read::focal_length_y(), 1);
00036 
00037   // see report 2016 for details section workspace transformation
00038   cv::Mat temp = n_cam.t() * d;
00039   double t = -h / (temp.at< double >(0, 0));
00040 
00041   cv::Mat p_cam = d * t;
00042   p_world = T.t() * p_cam;
00043 }
00044 
00045 #ifndef PROJECTION_2D_2015
00046 
00049 void projection_2D(std::vector< cv::Point2f > &points_in, ucl_drone::Pose3D &pose,
00050                    std::vector< cv::Point3f > &points_out, bool h_flag /*= false*/)
00051 {
00052   cv::Mat T;
00053   cv::Mat n_cam;
00054   double h = pose.z;
00055   if (h_flag)  // useful when the drone is not flying: the altitude is not published
00056   {
00057     h = 0.73;
00058   }
00059   projection2D_utils1(h, pose, T, n_cam);
00060   points_out.resize(points_in.size());  // prepares output structure
00061   for (int i = 0; i < points_in.size(); i++)
00062   {
00063     cv::Mat p_world;
00064     projection2D_utils2(points_in[i].x, points_in[i].y, h, T, n_cam, p_world);
00065     points_out[i] = cv::Point3f((float)(p_world.at< double >(0, 0) + pose.x),
00066                                 (float)(p_world.at< double >(1, 0) + pose.y),
00067                                 (float)(p_world.at< double >(2, 0) + h));
00068 
00069     // ROS_DEBUG("POINT[%d] (%f,%f) ::: (%f,%f,%f)", i, points_in[i].x, points_in[i].y,
00070     //           points_out[i].x, points_out[i].y, points_out[i].z);
00071   }
00072 }
00073 
00074 #endif
00075 
00076 #ifdef PROJECTION_2D_2015
00077 
00078 #define MAP_WIDTH 640
00079 #define MAP_HEIGHT 360
00080 // Half of the total view angle for x and y respectively
00081 #define FOV_X 0.2575
00082 #define FOV_Y 0.465
00083 // Measured 93cm along y axis and 51.5cm along x axis for height = 100cm
00084 
00087 void projection_2D(std::vector< cv::Point2f > &points_in, ucl_drone::Pose3D &pose,
00088                    std::vector< cv::Point3f > &points_out, bool h_flag /*= false*/)
00089 
00090 // boost::shared_ptr< ProcessedImage > image, ucl_drone::Pose3D pose,
00091 // pcl::PointCloud< pcl::PointXYZRGB >::Ptr pointcloud)
00092 {
00093   // see report 2015 for details section workspace transformation
00094 
00095   double altitude = 1;  // image->navdata.altd; // Altitude given by the verticale
00096 
00097   double theta = -(double)pose.rotY / 1000 / 180 * PI;  // Conversion from 10^-3 degrees to radians
00098   double phi = (double)pose.rotX / 1000 / 180 * PI;
00099   double theta_z = (double)pose.rotZ / 1000 / 180 * PI;
00100   double theta_x = cos(theta_z) * theta - sin(theta_z) * phi;
00101   double theta_y = -sin(theta_z) * theta - cos(theta_z) * phi;
00102 
00103   // Pixel projection on image plane
00104   // Distance from the drone to the center of the image plane (=center on the
00105   // ground)
00106   double plane_dist = altitude / cos(theta_x) / cos(theta_y);
00107 
00108   points_out.resize(points_in.size());
00109 
00110   for (int i = 0; i < points_in.size(); i++)
00111   {
00112     double pixelx = points_in[i].x;
00113     double pixely = points_in[i].y;
00114     /* (x,y,z) is the relative position between the camera and the point on the
00115     image
00116     !! Watch out, x and y axis are switched between the image and the map:
00117             Looking forwards is along positive y values on the image as opposed
00118     to
00119     positive
00120             x values on the map */
00121     double x = plane_dist *
00122                sin(((double)pixelx - (double)MAP_WIDTH / 2) / ((double)MAP_WIDTH / 2) * FOV_Y);
00123     double y = plane_dist *
00124                sin(-((double)pixely - (double)MAP_HEIGHT / 2) / ((double)MAP_HEIGHT / 2) * FOV_X);
00125     double z = 1 / cos(theta_x) / cos(theta_y);
00126     /* The terms plane_dist get eliminated by division, however if the depth of
00127     the points weren't the
00128     same, the results would change. So plane_dist is left in case 3D
00129     implementation is to be implemented
00130     at some point. */
00131 
00132     /* (dx,dy,dz) are results of multiplying (x,y,z) by three rotation matrices,
00133     respectively around axis x,y and z */
00134     double dx = cos(theta_y) * (sin(theta_z) * y + cos(theta_z) * x) - sin(theta_y) * z;
00135     double dy =
00136         sin(theta_x) * (cos(theta_y) * z + sin(theta_y) * (sin(theta_z) * y + cos(theta_z) * x)) +
00137         cos(theta_x) * (cos(theta_z) * y - sin(theta_z) * x);
00138     double dz =
00139         cos(theta_x) * (cos(theta_y) * z + sin(theta_y) * (sin(theta_z) * y + cos(theta_z) * x)) -
00140         sin(theta_x) * (cos(theta_z) * y - sin(theta_z) * x);
00141 
00142     // printf("dx = %f, dy = %f, dz = %f\n", dx, dy, dz);
00143 
00144     cv::Point3f point;
00145     point.x = pose.x + dx / dz;
00146     point.y = pose.y + dy / dz;
00147     point.z = 0;
00148     points_out[i] = point;
00149   }
00150 }
00151 
00152 /* Function convert_imgpoint_to_mappoint
00153 *       @pre : Takes as inputs the pixel positions on the image(pixelx,y),
00154 *                       and the navdata.
00155 *       @post : Pointers to the output map positions (output_mapx,y)
00156 *       Converts a point from the image workspace to the map workspace.
00157 *       This requires geometric conversions based on rotation matrices as well as
00158 *       calibrated values FOV_X,y, the image dimensions (WIDTH and HEIGHT) all defined
00159 *       in the header (.h)
00160 */
00161 void convert_imgpoint_to_mappoint(int pixelx, int pixely, double *output_mapx, double *output_mapy,
00162                                   navdata_struct navdata)
00163 {
00164   double altitude = 1;  // navdata.altitude; // Altitude given by the verticale distance between the
00165                         // drĂ´ne and the Cocard [in m]
00166   double theta =
00167       (double)navdata.Theta / 1000 / 180 * PI;  // Conversion from 10^-3 degrees to radians
00168   double phi = -(double)navdata.Phi / 1000 / 180 * PI;
00169   double theta_z = (double)navdata.Psi / 1000 / 180 * PI;
00170   double theta_x = cos(theta_z) * theta - sin(theta_z) * phi;
00171   double theta_y = -sin(theta_z) * theta - cos(theta_z) * phi;
00172 
00173   // Pixel projection on image plane
00174   // Distance from the drone to the center of the image plane (=center on the ground)
00175   double plane_dist = altitude / cos(theta_x) / cos(theta_y);
00176   /* (x,y,z) is the relative position between the camera and the point on the image
00177   !! Watch out, x and y axis are switched between the image and the map:
00178     Looking forwards is along positive y values on the image as opposed to positive
00179     x values on the map */
00180   double x =
00181       plane_dist * sin(((double)pixelx - (double)MAP_WIDTH / 2) / ((double)MAP_WIDTH / 2) * FOV_Y);
00182   double y = plane_dist *
00183              sin(-((double)pixely - (double)MAP_HEIGHT / 2) / ((double)MAP_HEIGHT / 2) * FOV_X);
00184   double z = 1 / cos(theta_x) / cos(theta_y);
00185   /* The terms plane_dist get eliminated by division, however if the depth of the points weren't the
00186   same, the results would change. So plane_dist is left in case 3D implementation is to be
00187   implemented
00188   at some point. */
00189 
00190   /* (dx,dy,dz) are results of multiplying (x,y,z) by three rotation matrices,
00191   respectively around axis x,y and z */
00192   double dx = cos(theta_y) * (sin(theta_z) * y + cos(theta_z) * x) - sin(theta_y) * z;
00193   double dy =
00194       sin(theta_x) * (cos(theta_y) * z + sin(theta_y) * (sin(theta_z) * y + cos(theta_z) * x)) +
00195       cos(theta_x) * (cos(theta_z) * y - sin(theta_z) * x);
00196   double dz =
00197       cos(theta_x) * (cos(theta_y) * z + sin(theta_y) * (sin(theta_z) * y + cos(theta_z) * x)) -
00198       sin(theta_x) * (cos(theta_z) * y - sin(theta_z) * x);
00199 
00200   *output_mapx = navdata.posx + dx / dz;  // Result is in meters
00201   *output_mapy = navdata.posy + dy / dz;
00202 }
00203 
00204 #endif


ucl_drone
Author(s): dronesinma
autogenerated on Sat Jun 8 2019 20:51:53