models.cpp
Go to the documentation of this file.
00001 /*
00002  * Copyright (C) 2015 Fetch Robotics Inc.
00003  * Copyright (C) 2013-2014 Unbounded Robotics Inc.
00004  *
00005  * Licensed under the Apache License, Version 2.0 (the "License");
00006  * you may not use this file except in compliance with the License.
00007  * You may obtain a copy of the License at
00008  *
00009  *     http://www.apache.org/licenses/LICENSE-2.0
00010  *
00011  * Unless required by applicable law or agreed to in writing, software
00012  * distributed under the License is distributed on an "AS IS" BASIS,
00013  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
00014  * See the License for the specific language governing permissions and
00015  * limitations under the License.
00016  */
00017 
00018 // Author: Michael Ferguson
00019 
00020 #include <robot_calibration/models/chain.h>
00021 #include <robot_calibration/models/camera3d.h>
00022 
00023 namespace robot_calibration
00024 {
00025 
00026 double positionFromMsg(const std::string& name,
00027                        const sensor_msgs::JointState& msg)
00028 {
00029   for (size_t i = 0; i < msg.name.size(); ++i)
00030   {
00031     if (msg.name[i] == name)
00032       return msg.position[i];
00033   }
00034 
00035   std::cerr << "Unable to find " << name << " in sensor_msgs::JointState" << std::endl;
00036 
00037   return 0.0;
00038 }
00039 
00040 ChainModel::ChainModel(const std::string& name, KDL::Tree model, std::string root, std::string tip) :
00041     root_(root), tip_(tip), name_(name)
00042 {
00043   // Create a KDL::Chain
00044   if (!model.getChain(root, tip, chain_))
00045     std::cerr << "Failed to get chain" << std::endl;
00046 }
00047 
00048 std::vector<geometry_msgs::PointStamped> ChainModel::project(
00049     const robot_calibration_msgs::CalibrationData& data,
00050     const CalibrationOffsetParser& offsets)
00051 {
00052   std::vector<geometry_msgs::PointStamped> points;
00053 
00054   // Determine which observation to use
00055   int sensor_idx = -1;
00056   for (size_t obs = 0; obs < data.observations.size(); obs++)
00057   {
00058     if (data.observations[obs].sensor_name == name_)
00059     {
00060       sensor_idx = obs;
00061       break;
00062     }
00063   }
00064 
00065   if (sensor_idx < 0)
00066   {
00067     // TODO: any sort of error message?
00068     return points;
00069   }
00070 
00071   // Resize to match # of features
00072   points.resize(data.observations[sensor_idx].features.size());
00073 
00074   KDL::Frame fk = getChainFK(offsets, data.joint_states);
00075 
00076   for (size_t i = 0; i < points.size(); ++i)
00077   {
00078     points[i].header.frame_id = root_;  // fk returns point in root_ frame
00079 
00080     KDL::Frame p(KDL::Frame::Identity());
00081     p.p.x(data.observations[sensor_idx].features[i].point.x);
00082     p.p.y(data.observations[sensor_idx].features[i].point.y);
00083     p.p.z(data.observations[sensor_idx].features[i].point.z);
00084 
00085     if (data.observations[sensor_idx].features[i].header.frame_id != tip_)
00086     {
00087       KDL::Frame p2(KDL::Frame::Identity());
00088       if (offsets.getFrame(data.observations[sensor_idx].features[i].header.frame_id, p2))
00089       {
00090         p = p2 * p;
00091       }
00092     }
00093 
00094     p = fk * p;
00095 
00096     points[i].point.x = p.p.x();
00097     points[i].point.y = p.p.y();
00098     points[i].point.z = p.p.z();
00099   }
00100 
00101   return points;
00102 }
00103 
00104 KDL::Frame ChainModel::getChainFK(const CalibrationOffsetParser& offsets,
00105                                   const sensor_msgs::JointState& state)
00106 {
00107   // FK from root to tip
00108   KDL::Frame p_out = KDL::Frame::Identity();
00109 
00110   // Step through joints
00111   for (size_t i = 0; i < chain_.getNrOfSegments(); ++i)
00112   {
00113     std::string name = chain_.getSegment(i).getJoint().getName();
00114     KDL::Frame correction;
00115 
00116     // Apply any frame calibration
00117     if (offsets.getFrame(name, correction))
00118       p_out = p_out * correction;
00119 
00120     // Apply any joint offset calibration
00121     if (chain_.getSegment(i).getJoint().getType() != KDL::Joint::None)
00122     {
00123       double p = positionFromMsg(name, state) + offsets.get(name);
00124       p_out = p_out * chain_.getSegment(i).pose(p);
00125     }
00126     else
00127     {
00128       p_out = p_out * chain_.getSegment(i).pose(0.0);
00129     }
00130   }
00131   return p_out;
00132 }
00133 
00134 Camera3dModel::Camera3dModel(const std::string& name, KDL::Tree model, std::string root, std::string tip) :
00135     ChainModel(name, model, root, tip)
00136 {
00137   // TODO add additional parameters for unprojecting observations using initial parameters
00138 }
00139 
00140 std::vector<geometry_msgs::PointStamped> Camera3dModel::project(
00141     const robot_calibration_msgs::CalibrationData& data,
00142     const CalibrationOffsetParser& offsets)
00143 {
00144   std::vector<geometry_msgs::PointStamped> points;
00145 
00146   // Determine which observation to use
00147   int sensor_idx = -1;
00148   for (size_t obs = 0; obs < data.observations.size(); obs++)
00149   {
00150     if (data.observations[obs].sensor_name == name_)
00151     {
00152       sensor_idx = obs;
00153       break;
00154     }
00155   }
00156 
00157   if (sensor_idx < 0)
00158   {
00159     // TODO: any sort of error message?
00160     return points;
00161   }
00162 
00163   // Get existing camera info
00164   if (data.observations[sensor_idx].ext_camera_info.camera_info.P.size() != 12)
00165     std::cerr << "Unexpected CameraInfo projection matrix size" << std::endl;
00166 
00167   double camera_fx = data.observations[sensor_idx].ext_camera_info.camera_info.P[CAMERA_INFO_P_FX_INDEX];
00168   double camera_fy = data.observations[sensor_idx].ext_camera_info.camera_info.P[CAMERA_INFO_P_FY_INDEX];
00169   double camera_cx = data.observations[sensor_idx].ext_camera_info.camera_info.P[CAMERA_INFO_P_CX_INDEX];
00170   double camera_cy = data.observations[sensor_idx].ext_camera_info.camera_info.P[CAMERA_INFO_P_CY_INDEX];
00171 
00172   /*
00173    * z_scale and z_offset defined in openni2_camera/src/openni2_driver.cpp
00174    * new_depth_mm = (depth_mm + z_offset_mm) * z_scale
00175    * NOTE: these work on integer values not floats
00176    */
00177   double z_offset = 0.0;
00178   double z_scaling = 1.0;
00179   for (size_t i = 0; i < data.observations[sensor_idx].ext_camera_info.parameters.size(); i++)
00180   {
00181     if (data.observations[sensor_idx].ext_camera_info.parameters[i].name == "z_scaling")
00182     {
00183       z_scaling = data.observations[sensor_idx].ext_camera_info.parameters[i].value;
00184     }
00185     else if (data.observations[sensor_idx].ext_camera_info.parameters[i].name == "z_offset_mm")
00186     {
00187       z_offset = data.observations[sensor_idx].ext_camera_info.parameters[i].value / 1000.0;  // (mm -> m)
00188     }
00189   }
00190 
00191   // Get calibrated camera info
00192   double new_camera_fx = camera_fx * (1.0 + offsets.get(name_+"_fx"));
00193   double new_camera_fy = camera_fy * (1.0 + offsets.get(name_+"_fy"));
00194   double new_camera_cx = camera_cx * (1.0 + offsets.get(name_+"_cx"));
00195   double new_camera_cy = camera_cy * (1.0 + offsets.get(name_+"_cy"));
00196   double new_z_offset = offsets.get(name_+"_z_offset");
00197   double new_z_scaling = 1.0 + offsets.get(name_+"_z_scaling");
00198 
00199   points.resize(data.observations[sensor_idx].features.size());
00200 
00201   // Get position of camera frame
00202   KDL::Frame fk = getChainFK(offsets, data.joint_states);
00203 
00204   for (size_t i = 0; i < points.size(); ++i)
00205   {
00206     // TODO: warn if frame_id != tip?
00207     double x = data.observations[sensor_idx].features[i].point.x;
00208     double y = data.observations[sensor_idx].features[i].point.y;
00209     double z = data.observations[sensor_idx].features[i].point.z;
00210 
00211     // Unproject through parameters stored at runtime
00212     double u = x * camera_fx / z + camera_cx;
00213     double v = y * camera_fy / z + camera_cy;
00214     double depth = z/z_scaling - z_offset;
00215 
00216     KDL::Frame pt(KDL::Frame::Identity());
00217 
00218     // Reproject through new calibrated parameters
00219     pt.p.z((depth + new_z_offset) * new_z_scaling);
00220     pt.p.x((u - new_camera_cx) * pt.p.z() / new_camera_fx);
00221     pt.p.y((v - new_camera_cy) * pt.p.z() / new_camera_fy);
00222 
00223     // Project through fk
00224     pt = fk * pt;
00225 
00226     points[i].point.x = pt.p.x();
00227     points[i].point.y = pt.p.y();
00228     points[i].point.z = pt.p.z();
00229     points[i].header.frame_id = root_;
00230   }
00231 
00232   return points;
00233 }
00234 
00235 KDL::Rotation rotation_from_axis_magnitude(const double x, const double y, const double z)
00236 {
00237   double magnitude = sqrt(x*x + y*y + z*z);
00238 
00239   if (magnitude == 0.0)
00240     return KDL::Rotation::Quaternion(0.0, 0.0, 0.0, 1.0);
00241 
00242   return KDL::Rotation::Quaternion(x/magnitude * sin(magnitude/2.0),
00243                                    y/magnitude * sin(magnitude/2.0),
00244                                    z/magnitude * sin(magnitude/2.0),
00245                                    cos(magnitude/2.0));
00246 }
00247 
00248 void axis_magnitude_from_rotation(const KDL::Rotation& r, double& x, double& y, double& z)
00249 {
00250   double qx, qy, qz, qw;
00251   r.GetQuaternion(qx, qy, qz, qw);
00252 
00253   if (qw == 1.0)
00254   {
00255     x = y = z = 0.0;
00256     return;
00257   }
00258 
00259   double magnitude = 2 * acos(qw);
00260   double k = sqrt(1 - (qw*qw));
00261 
00262   x = (qx / k) * magnitude;
00263   y = (qy / k) * magnitude;
00264   z = (qz / k) * magnitude;
00265 }
00266 
00267 }  // namespace robot_calibration


robot_calibration
Author(s): Michael Ferguson
autogenerated on Thu Jun 6 2019 21:54:10