models.cpp
Go to the documentation of this file.
1 /*
2  * Copyright (C) 2015 Fetch Robotics Inc.
3  * Copyright (C) 2013-2014 Unbounded Robotics Inc.
4  *
5  * Licensed under the Apache License, Version 2.0 (the "License");
6  * you may not use this file except in compliance with the License.
7  * You may obtain a copy of the License at
8  *
9  * http://www.apache.org/licenses/LICENSE-2.0
10  *
11  * Unless required by applicable law or agreed to in writing, software
12  * distributed under the License is distributed on an "AS IS" BASIS,
13  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14  * See the License for the specific language governing permissions and
15  * limitations under the License.
16  */
17 
18 // Author: Michael Ferguson
19 
20 #include <ros/console.h>
23 
24 namespace robot_calibration
25 {
26 
27 double positionFromMsg(const std::string& name,
28  const sensor_msgs::JointState& msg)
29 {
30  for (size_t i = 0; i < msg.name.size(); ++i)
31  {
32  if (msg.name[i] == name)
33  return msg.position[i];
34  }
35 
36  std::cerr << "Unable to find " << name << " in sensor_msgs::JointState" << std::endl;
37 
38  return 0.0;
39 }
40 
41 ChainModel::ChainModel(const std::string& name, KDL::Tree model, std::string root, std::string tip) :
42  root_(root), tip_(tip), name_(name)
43 {
44  // Create a KDL::Chain
45  if (!model.getChain(root, tip, chain_))
46  {
47  auto error_msg = std::string{"Failed to build a chain model from "} + root + " to " + tip + ", check the link names";
48  ROS_ERROR("%s", error_msg.c_str());
49  throw std::runtime_error(error_msg);
50  }
51 }
52 
53 std::vector<geometry_msgs::PointStamped> ChainModel::project(
54  const robot_calibration_msgs::CalibrationData& data,
55  const CalibrationOffsetParser& offsets)
56 {
57  // Projected points, to be returned
58  std::vector<geometry_msgs::PointStamped> points;
59 
60  // Determine which observation to use
61  int sensor_idx = -1;
62  for (size_t obs = 0; obs < data.observations.size(); obs++)
63  {
64  if (data.observations[obs].sensor_name == name_)
65  {
66  sensor_idx = obs;
67  break;
68  }
69  }
70 
71  if (sensor_idx < 0)
72  {
73  // TODO: any sort of error message?
74  return points;
75  }
76 
77  // Resize to match # of features
78  points.resize(data.observations[sensor_idx].features.size());
79 
80  // Get the projection from forward kinematics of the robot chain
81  KDL::Frame fk = getChainFK(offsets, data.joint_states);
82 
83  // Project each individual point
84  for (size_t i = 0; i < points.size(); ++i)
85  {
86  points[i].header.frame_id = root_; // fk returns point in root_ frame
87 
88  KDL::Frame p(KDL::Frame::Identity());
89  p.p.x(data.observations[sensor_idx].features[i].point.x);
90  p.p.y(data.observations[sensor_idx].features[i].point.y);
91  p.p.z(data.observations[sensor_idx].features[i].point.z);
92 
93  // This is primarily for the case of checkerboards
94  // The observation is in "checkerboard" frame, but the tip of the
95  // kinematic chain is typically something like "wrist_roll_link".
96  if (data.observations[sensor_idx].features[i].header.frame_id != tip_)
97  {
98  KDL::Frame p2(KDL::Frame::Identity());
99  if (offsets.getFrame(data.observations[sensor_idx].features[i].header.frame_id, p2))
100  {
101  // We have to apply the frame offset before the FK projection
102  p = p2 * p;
103  }
104  }
105 
106  // Apply the FK projection
107  p = fk * p;
108 
109  points[i].point.x = p.p.x();
110  points[i].point.y = p.p.y();
111  points[i].point.z = p.p.z();
112  }
113 
114  return points;
115 }
116 
118  const sensor_msgs::JointState& state)
119 {
120  // FK from root to tip
121  KDL::Frame p_out = KDL::Frame::Identity();
122 
123  // Step through joints
124  for (size_t i = 0; i < chain_.getNrOfSegments(); ++i)
125  {
126  std::string name = chain_.getSegment(i).getJoint().getName();
127  KDL::Frame correction = KDL::Frame::Identity();
128  offsets.getFrame(name, correction);
129 
130  KDL::Frame pose;
131  if (chain_.getSegment(i).getJoint().getType() != KDL::Joint::None)
132  {
133  // Apply any joint offset calibration
134  double p = positionFromMsg(name, state) + offsets.get(name);
135  pose = chain_.getSegment(i).pose(p);
136  }
137  else
138  {
139  pose = chain_.getSegment(i).pose(0.0);
140  }
141 
142  KDL::Frame totip = chain_.getSegment(i).getFrameToTip();
143 
144  // Apply any frame calibration on the joint <origin> frame
145  p_out = p_out * KDL::Frame(pose.p + totip.M * correction.p);
146  p_out = p_out * KDL::Frame(totip.M * correction.M * totip.M.Inverse() * pose.M);
147 
148  }
149  return p_out;
150 }
151 
152 std::string ChainModel::getName() const
153 {
154  return name_;
155 }
156 
157 std::string ChainModel::getType() const
158 {
159  return "ChainModel";
160 }
161 
162 Camera3dModel::Camera3dModel(const std::string& name, const std::string& param_name, KDL::Tree model, std::string root, std::string tip) :
163  ChainModel(name, model, root, tip),
164  param_name_(param_name)
165 {
166  // TODO add additional parameters for unprojecting observations using initial parameters
167 }
168 
169 std::vector<geometry_msgs::PointStamped> Camera3dModel::project(
170  const robot_calibration_msgs::CalibrationData& data,
171  const CalibrationOffsetParser& offsets)
172 {
173  std::vector<geometry_msgs::PointStamped> points;
174 
175  // Determine which observation to use
176  int sensor_idx = -1;
177  for (size_t obs = 0; obs < data.observations.size(); obs++)
178  {
179  if (data.observations[obs].sensor_name == name_)
180  {
181  sensor_idx = obs;
182  break;
183  }
184  }
185 
186  if (sensor_idx < 0)
187  {
188  // TODO: any sort of error message?
189  return points;
190  }
191 
192  // Get existing camera info
193  if (data.observations[sensor_idx].ext_camera_info.camera_info.P.size() != 12)
194  std::cerr << "Unexpected CameraInfo projection matrix size" << std::endl;
195 
196  double camera_fx = data.observations[sensor_idx].ext_camera_info.camera_info.P[CAMERA_INFO_P_FX_INDEX];
197  double camera_fy = data.observations[sensor_idx].ext_camera_info.camera_info.P[CAMERA_INFO_P_FY_INDEX];
198  double camera_cx = data.observations[sensor_idx].ext_camera_info.camera_info.P[CAMERA_INFO_P_CX_INDEX];
199  double camera_cy = data.observations[sensor_idx].ext_camera_info.camera_info.P[CAMERA_INFO_P_CY_INDEX];
200 
201  /*
202  * z_scale and z_offset defined in openni2_camera/src/openni2_driver.cpp
203  * new_depth_mm = (depth_mm + z_offset_mm) * z_scale
204  * NOTE: these work on integer values not floats
205  */
206  double z_offset = 0.0;
207  double z_scaling = 1.0;
208  for (size_t i = 0; i < data.observations[sensor_idx].ext_camera_info.parameters.size(); i++)
209  {
210  if (data.observations[sensor_idx].ext_camera_info.parameters[i].name == "z_scaling")
211  {
212  z_scaling = data.observations[sensor_idx].ext_camera_info.parameters[i].value;
213  }
214  else if (data.observations[sensor_idx].ext_camera_info.parameters[i].name == "z_offset_mm")
215  {
216  z_offset = data.observations[sensor_idx].ext_camera_info.parameters[i].value / 1000.0; // (mm -> m)
217  }
218  }
219 
220  // Get calibrated camera info
221  double new_camera_fx = camera_fx * (1.0 + offsets.get(param_name_ + "_fx"));
222  double new_camera_fy = camera_fy * (1.0 + offsets.get(param_name_ + "_fy"));
223  double new_camera_cx = camera_cx * (1.0 + offsets.get(param_name_ + "_cx"));
224  double new_camera_cy = camera_cy * (1.0 + offsets.get(param_name_ + "_cy"));
225  double new_z_offset = offsets.get(param_name_ + "_z_offset");
226  double new_z_scaling = 1.0 + offsets.get(param_name_ + "_z_scaling");
227 
228  points.resize(data.observations[sensor_idx].features.size());
229 
230  // Get position of camera frame
231  KDL::Frame fk = getChainFK(offsets, data.joint_states);
232 
233  for (size_t i = 0; i < points.size(); ++i)
234  {
235  // TODO: warn if frame_id != tip?
236  double x = data.observations[sensor_idx].features[i].point.x;
237  double y = data.observations[sensor_idx].features[i].point.y;
238  double z = data.observations[sensor_idx].features[i].point.z;
239 
240  // Unproject through parameters stored at runtime
241  double u = x * camera_fx / z + camera_cx;
242  double v = y * camera_fy / z + camera_cy;
243  double depth = z/z_scaling - z_offset;
244 
245  KDL::Frame pt(KDL::Frame::Identity());
246 
247  // Reproject through new calibrated parameters
248  pt.p.z((depth + new_z_offset) * new_z_scaling);
249  pt.p.x((u - new_camera_cx) * pt.p.z() / new_camera_fx);
250  pt.p.y((v - new_camera_cy) * pt.p.z() / new_camera_fy);
251 
252  // Project through fk
253  pt = fk * pt;
254 
255  points[i].point.x = pt.p.x();
256  points[i].point.y = pt.p.y();
257  points[i].point.z = pt.p.z();
258  points[i].header.frame_id = root_;
259  }
260 
261  return points;
262 }
263 
264 std::string Camera3dModel::getType() const
265 {
266  return "Camera3dModel";
267 }
268 
269 KDL::Rotation rotation_from_axis_magnitude(const double x, const double y, const double z)
270 {
271  double magnitude = sqrt(x*x + y*y + z*z);
272 
273  if (magnitude == 0.0)
274  return KDL::Rotation::Quaternion(0.0, 0.0, 0.0, 1.0);
275 
276  return KDL::Rotation::Quaternion(x/magnitude * sin(magnitude/2.0),
277  y/magnitude * sin(magnitude/2.0),
278  z/magnitude * sin(magnitude/2.0),
279  cos(magnitude/2.0));
280 }
281 
282 void axis_magnitude_from_rotation(const KDL::Rotation& r, double& x, double& y, double& z)
283 {
284  double qx, qy, qz, qw;
285  r.GetQuaternion(qx, qy, qz, qw);
286 
287  if (qw == 1.0)
288  {
289  x = y = z = 0.0;
290  return;
291  }
292 
293  double magnitude = 2 * acos(qw);
294  double k = sqrt(1 - (qw*qw));
295 
296  x = (qx / k) * magnitude;
297  y = (qy / k) * magnitude;
298  z = (qz / k) * magnitude;
299 }
300 
301 } // namespace robot_calibration
robot_calibration::Camera3dModel::getType
virtual std::string getType() const
Get the type for this model.
Definition: models.cpp:264
robot_calibration::ChainModel::name_
std::string name_
Definition: chain.h:152
robot_calibration::ChainModel::ChainModel
ChainModel(const std::string &name, KDL::Tree model, std::string root, std::string tip)
Create a new chain model.
Definition: models.cpp:41
robot_calibration::CalibrationOffsetParser
Combined parser and configuration for calibration offsets. Holds the configuration of what is to be c...
Definition: offset_parser.h:33
robot_calibration::Camera3dModel::param_name_
std::string param_name_
Definition: camera3d.h:59
robot_calibration::ChainModel::root_
std::string root_
Definition: chain.h:150
robot_calibration::ChainModel::tip_
std::string tip_
Definition: chain.h:151
console.h
name
std::string name
robot_calibration::CAMERA_INFO_P_CY_INDEX
@ CAMERA_INFO_P_CY_INDEX
Definition: camera_info.h:32
robot_calibration::CAMERA_INFO_P_CX_INDEX
@ CAMERA_INFO_P_CX_INDEX
Definition: camera_info.h:31
robot_calibration::ChainModel::chain_
KDL::Chain chain_
Definition: chain.h:147
y
double y
ROS_ERROR
#define ROS_ERROR(...)
chain.h
r
S r
robot_calibration::rotation_from_axis_magnitude
KDL::Rotation rotation_from_axis_magnitude(const double x, const double y, const double z)
Converts our angle-axis-with-integrated-magnitude representation to a KDL::Rotation.
Definition: models.cpp:269
robot_calibration::axis_magnitude_from_rotation
void axis_magnitude_from_rotation(const KDL::Rotation &r, double &x, double &y, double &z)
Converts from KDL::Rotation to angle-axis-with-integrated-magnitude.
Definition: models.cpp:282
robot_calibration::CalibrationOffsetParser::get
double get(const std::string name) const
Get the offset.
Definition: calibration_offset_parser.cpp:139
robot_calibration::Camera3dModel::project
virtual std::vector< geometry_msgs::PointStamped > project(const robot_calibration_msgs::CalibrationData &data, const CalibrationOffsetParser &offsets)
Compute the updated positions of the observed points.
Definition: models.cpp:169
robot_calibration
Calibration code lives under this namespace.
Definition: base_calibration.h:28
robot_calibration::ChainModel::getName
virtual std::string getName() const
Get the name of this model (as provided in the YAML config)
Definition: models.cpp:152
robot_calibration::ChainModel::getType
virtual std::string getType() const
Get the type of this model.
Definition: models.cpp:157
x
double x
robot_calibration::CalibrationOffsetParser::getFrame
bool getFrame(const std::string name, KDL::Frame &offset) const
Get the offset for a frame calibration.
Definition: calibration_offset_parser.cpp:150
robot_calibration::CAMERA_INFO_P_FX_INDEX
@ CAMERA_INFO_P_FX_INDEX
Definition: camera_info.h:29
camera3d.h
robot_calibration::Camera3dModel::Camera3dModel
Camera3dModel(const std::string &name, const std::string &param_name, KDL::Tree model, std::string root, std::string tip)
Create a new camera 3d model (Kinect/Primesense).
Definition: models.cpp:162
robot_calibration::ChainModel::getChainFK
KDL::Frame getChainFK(const CalibrationOffsetParser &offsets, const sensor_msgs::JointState &state)
Compute the forward kinematics of the chain, based on the offsets and the joint positions of the stat...
Definition: models.cpp:117
robot_calibration::positionFromMsg
double positionFromMsg(const std::string &name, const sensor_msgs::JointState &msg)
Definition: models.cpp:27
z
double z
robot_calibration::ChainModel::project
virtual std::vector< geometry_msgs::PointStamped > project(const robot_calibration_msgs::CalibrationData &data, const CalibrationOffsetParser &offsets)
Compute the position of the estimated points.
Definition: models.cpp:53
robot_calibration::CAMERA_INFO_P_FY_INDEX
@ CAMERA_INFO_P_FY_INDEX
Definition: camera_info.h:30
robot_calibration::ChainModel
Model of a kinematic chain. This is the basic instance where we transform the world observations into...
Definition: chain.h:107


robot_calibration
Author(s): Michael Ferguson
autogenerated on Fri Sep 1 2023 02:52:01