mono_camera.cpp
Go to the documentation of this file.
2 
4 
6 {
7  cameraInfo = ensenso::std::make_shared<sensor_msgs::msg::CameraInfo>();
8  rectifiedCameraInfo = ensenso::std::make_shared<sensor_msgs::msg::CameraInfo>();
9 
10  requestDataServer = MAKE_MONO_SERVER(MonoCamera, RequestData, "request_data");
11  locatePatternServer = MAKE_MONO_SERVER(MonoCamera, LocatePattern, "locate_pattern");
12 }
13 
15 {
18 }
19 
21 {
23  startServers();
26 }
27 
29 {
31  requestDataServer->start();
32  locatePatternServer->start();
33 }
34 
36 {
37  ENSENSO_DEBUG(nh, "Capturing an image...");
38 
39  NxLibCommand capture(cmdCapture, params.serial);
40  capture.parameters()[itmCameras] = params.serial;
41  capture.execute();
42 
43  NxLibItem imageNode = cameraNode[itmImages][itmRaw];
44  if (imageNode.isArray())
45  {
46  imageNode = imageNode[0];
47  }
48 
49  if (params.isFileCamera)
50  {
51  // This workaround is needed, because the timestamp of captures from file cameras will not change over time. When
52  // looking up the current tf tree, this will result in errors, because the time of the original timestamp is
53  // requested, which lies in the past (and most often longer than the tfBuffer will store the transform!)
54  return ensenso::ros::now(nh);
55  }
56 
57  return timestampFromNxLibNode(imageNode);
58 }
59 
61 {
64 }
65 
66 void MonoCamera::fillCameraInfoFromNxLib(sensor_msgs::msg::CameraInfoPtr const& info, bool rectified)
67 {
69 
70  NxLibItem calibrationNode = cameraNode[itmCalibration];
71 
72  if (rectified)
73  {
74  GET_D_MATRIX(info).resize(5, 0.);
75  }
76  else
77  {
78  fillDistortionParamsFromNxLib(calibrationNode[itmDistortion], info);
79  }
80 
81  for (int row = 0; row < 3; row++)
82  {
83  for (int column = 0; column < 3; column++)
84  {
85  GET_K_MATRIX(info)[3 * row + column] = calibrationNode[itmCamera][column][row].asDouble();
86  GET_P_MATRIX(info)[4 * row + column] = GET_K_MATRIX(info)[3 * row + column];
87  if (row == column)
88  {
89  GET_R_MATRIX(info)[3 * row + column] = 1.0f;
90  }
91  }
92  }
93 }
94 
95 void MonoCamera::onRequestData(ensenso::action::RequestDataMonoGoalConstPtr const& goal)
96 {
97  START_NXLIB_ACTION(RequestDataMono, requestDataServer)
98 
99  ensenso::action::RequestDataMonoResult result;
100  ensenso::action::RequestDataMonoFeedback feedback;
101 
102  bool publishResults = goal->publish_results;
103  if (!goal->publish_results && !goal->include_results_in_response)
104  {
105  publishResults = true;
106  }
107 
108  bool requestRectified = goal->request_rectified_images;
109  if (!goal->request_rectified_images && !goal->request_raw_images)
110  {
111  requestRectified = true;
112  }
113 
114  capture();
115 
117 
118  feedback.images_acquired = true;
119  requestDataServer->publishFeedback(feedback);
120 
121  if (goal->request_raw_images)
122  {
123  auto rawImages = imagesFromNxLibNode(cameraNode[itmImages][itmRaw], params.cameraFrame, params.isFileCamera);
124  cameraInfo->header.stamp = rawImages[0]->header.stamp;
125  cameraInfo->header.frame_id = params.cameraFrame;
126  if (goal->include_results_in_response)
127  {
128  for (auto const& image : rawImages)
129  {
130  result.raw_images.push_back(*image);
131  }
132  result.camera_info = *cameraInfo;
133  }
134  if (publishResults)
135  {
136  rawImagePublisher.publish(rawImages[0], cameraInfo);
137  }
138  }
139 
141 
142  if (requestRectified)
143  {
144  NxLibCommand rectify(cmdRectifyImages, params.serial);
145  rectify.parameters()[itmCameras] = params.serial;
146  rectify.execute();
147 
148  auto rectifiedImages =
150  rectifiedCameraInfo->header.stamp = rectifiedImages[0]->header.stamp;
151  rectifiedCameraInfo->header.frame_id = params.cameraFrame;
152  if (goal->include_results_in_response)
153  {
154  for (auto const& image : rectifiedImages)
155  {
156  result.rectified_images.push_back(*image);
157  }
158  result.rectified_camera_info = *rectifiedCameraInfo;
159  }
160  if (publishResults)
161  {
163  }
164  }
165 
166  requestDataServer->setSucceeded(std::move(result));
167 
168  FINISH_NXLIB_ACTION(RequestDataMono)
169 }
170 
171 void MonoCamera::onSetParameter(ensenso::action::SetParameterGoalConstPtr const& goal)
172 {
174 
175  ensenso::action::SetParameterResult result;
176 
177  loadParameterSet(goal->parameter_set);
178 
179  result.parameter_file_applied = false;
180  if (!goal->parameter_file.empty())
181  {
182  result.parameter_file_applied = loadSettings(goal->parameter_file);
183  if (!result.parameter_file_applied)
184  {
185  server->setAborted(std::move(result));
186  return;
187  }
188  }
189 
190  for (auto const& parameter : goal->parameters)
191  {
192  writeParameter(parameter);
193  }
194 
195  // Synchronize to make sure that we read back the correct values.
196  NxLibCommand synchronize(cmdSynchronize, params.serial);
197  synchronize.parameters()[itmCameras] = params.serial;
198  synchronize.execute();
199 
200  saveParameterSet(goal->parameter_set);
201 
202  // Calibration could have changed after setting new parameters.
204 
205  // Read back the actual values.
206  for (auto const& parameter : goal->parameters)
207  {
208  result.results.push_back(*readParameter(parameter.key));
209  }
210 
211  setParameterServer->setSucceeded(std::move(result));
212 
213  FINISH_NXLIB_ACTION(SetParameter)
214 }
215 
216 void MonoCamera::onLocatePattern(ensenso::action::LocatePatternMonoGoalConstPtr const& goal)
217 {
218  START_NXLIB_ACTION(LocatePatternMono, locatePatternServer)
219 
220  ensenso::action::LocatePatternMonoResult result;
221  ensenso::action::LocatePatternMonoFeedback feedback;
222 
223  loadParameterSet(goal->parameter_set);
224 
225  int numberOfShots = goal->number_of_shots;
226  if (numberOfShots < 1)
227  {
228  numberOfShots = 1;
229  }
230 
231  std::vector<MonoCalibrationPattern> patterns;
232  ensenso::ros::Time imageTimestamp;
233  for (int i = 0; i < numberOfShots; i++)
234  {
236 
237  ensenso::ros::Time timestamp = capture();
238  if (i == 0)
239  imageTimestamp = timestamp;
240 
242 
243  if (i == (numberOfShots - 1))
244  {
245  // The last image has been captured.
246  feedback.images_acquired = true;
247  locatePatternServer->publishFeedback(feedback);
248  }
249 
250  bool clearBuffer = (i == 0);
251  patterns = collectPattern(clearBuffer);
252  if (patterns.empty())
253  {
254  result.found_pattern = false;
255  locatePatternServer->setSucceeded(std::move(result));
256  return;
257  }
258 
259  if (patterns.size() > 1)
260  {
261  // Cannot average multiple shots of multiple patterns. We will cancel the capturing and estimate the pose of each
262  // pattern individually.
263  break;
264  }
265  }
266 
267  result.found_pattern = true;
268  result.mono_patterns.resize(patterns.size());
269  for (size_t i = 0; i < patterns.size(); i++)
270  {
271  patterns[i].writeToMessage(result.mono_patterns[i]);
272  }
273 
275 
276  std::string patternFrame = params.targetFrame;
277  if (!goal->target_frame.empty())
278  {
279  patternFrame = goal->target_frame;
280  }
281 
282  result.frame = patternFrame;
283 
284  if (patterns.size() > 1)
285  {
286  // Estimate the pose of all the patterns we found.
287  auto patternPoses = estimatePatternPoses(imageTimestamp, patternFrame);
288 
289  result.mono_pattern_poses.resize(patternPoses.size());
290  for (size_t i = 0; i < patternPoses.size(); i++)
291  {
292  result.mono_pattern_poses[i] = patternPoses[i];
293  }
294  }
295  else
296  {
297  // Estimate the pose of a single pattern, averaging over the different shots.
298  auto patternPose = estimatePatternPose(imageTimestamp, patternFrame);
299 
300  result.mono_pattern_poses.resize(1);
301  result.mono_pattern_poses[0] = patternPose;
302  }
303 
304  if (!goal->tf_frame.empty())
305  {
306  if (patterns.size() == 1)
307  {
308  geometry_msgs::msg::TransformStamped transform = transformFromPose(result.mono_pattern_poses[0], goal->tf_frame);
309  transformBroadcaster->sendTransform(transform);
310  }
311  else
312  {
313  ENSENSO_WARN(nh, "Cannot publish the pattern pose in tf, because there are multiple patterns!");
314  }
315  }
316 
317  locatePatternServer->setSucceeded(std::move(result));
318 
319  FINISH_NXLIB_ACTION(LocatePatternMono)
320 }
321 
322 std::vector<MonoCalibrationPattern> MonoCamera::collectPattern(bool clearBuffer) const
323 {
324  if (clearBuffer)
325  {
326  NxLibCommand(cmdDiscardPatterns, params.serial).execute();
327  }
328 
329  NxLibCommand collectPattern(cmdCollectPattern, params.serial);
330  collectPattern.parameters()[itmCameras] = params.serial;
331  collectPattern.parameters()[itmDecodeData] = true;
332  try
333  {
334  collectPattern.execute();
335  }
336  catch (NxLibException& e)
337  {
338  if (e.getErrorCode() == NxLibExecutionFailed)
339  {
340  if (collectPattern.result()[itmErrorSymbol] == errPatternNotFound ||
341  collectPattern.result()[itmErrorSymbol] == errPatternNotDecodable)
342  {
343  return {};
344  }
345  }
346  throw;
347  }
348 
349  std::vector<MonoCalibrationPattern> result;
350 
351  for (int i = 0; i < collectPattern.result()[itmPatterns][0][params.serial].count(); i++)
352  {
353  result.emplace_back(collectPattern.result()[itmPatterns][0][params.serial][i][itmPattern]);
354  }
355 
356  // Extract the pattern's image points from the result.
357  NxLibItem pattern = collectPattern.result()[itmPatterns][0][params.serial];
358 
359  for (size_t i = 0; i < result.size(); i++)
360  {
361  for (int j = 0; j < pattern[i][itmPoints].count(); j++)
362  {
363  NxLibItem pointNode = pattern[i][itmPoints][j];
364 
365  ensenso::msg::ImagePoint point;
366  point.x = pointNode[0].asDouble();
367  point.y = pointNode[1].asDouble();
368  result[i].points.push_back(point);
369  }
370  }
371 
372  return result;
373 }
374 
375 geometry_msgs::msg::PoseStamped MonoCamera::estimatePatternPose(ensenso::ros::Time imageTimestamp,
376  std::string const& targetFrame,
377  bool latestPatternOnly) const
378 {
379  updateGlobalLink(imageTimestamp, targetFrame);
380 
381  NxLibCommand estimatePatternPose(cmdEstimatePatternPose, params.serial);
382  estimatePatternPose.parameters()[itmType] = itmMonocular;
383  if (latestPatternOnly)
384  {
385  estimatePatternPose.parameters()[itmAverage] = false;
386 
387  int patternCount = NxLibItem()[itmParameters][itmPatternCount].asInt();
388  estimatePatternPose.parameters()[itmFilter][itmOr][0][itmAnd][0][itmType] = valIndex;
389  estimatePatternPose.parameters()[itmFilter][itmOr][0][itmAnd][0][itmValue] = patternCount - 1;
390  }
391  else
392  {
393  estimatePatternPose.parameters()[itmAverage] = true;
394  }
395  estimatePatternPose.execute();
396 
397  auto patterns = estimatePatternPose.result()[itmPatterns];
398  ENSENSO_ASSERT(patterns.count() == 1);
399 
400  return stampedPoseFromNxLib(patterns[0][itmPatternPose], targetFrame, imageTimestamp);
401 }
402 
403 std::vector<geometry_msgs::msg::PoseStamped> MonoCamera::estimatePatternPoses(ensenso::ros::Time imageTimestamp,
404  std::string const& targetFrame) const
405 {
406  updateGlobalLink(imageTimestamp, targetFrame);
407 
408  NxLibCommand estimatePatternPose(cmdEstimatePatternPose, params.serial);
409  estimatePatternPose.parameters()[itmAverage] = false;
410  estimatePatternPose.parameters()[itmType] = itmMonocular;
411  estimatePatternPose.parameters()[itmFilter][itmCameras] = params.serial;
412 
413  estimatePatternPose.execute();
414 
415  auto patterns = estimatePatternPose.result()[itmPatterns];
416 
417  std::vector<geometry_msgs::msg::PoseStamped> result;
418  result.reserve(patterns.count());
419 
420  for (int i = 0; i < patterns.count(); i++)
421  {
422  result.push_back(stampedPoseFromNxLib(patterns[i][itmPatternPose], targetFrame, imageTimestamp));
423  }
424 
425  return result;
426 }
std::string cameraFrame
Definition: camera.h:206
#define GET_K_MATRIX(info)
Definition: namespace.h:55
void ENSENSO_DEBUG(T... args)
Definition: logging.h:74
virtual void initStatusTimer()
Definition: camera.cpp:703
void onLocatePattern(ensenso::action::LocatePatternMonoGoalConstPtr const &goal)
::ros::Time Time
Definition: time.h:67
StampedTransformMsg transformFromPose(StampedPoseMsg const &pose, std::string const &childFrame)
#define GET_P_MATRIX(info)
Definition: namespace.h:56
std::unique_ptr< LocatePatternMonoServer > locatePatternServer
Definition: mono_camera.h:16
bool isFileCamera
Definition: camera.h:186
CameraParameters params
Definition: camera.h:258
virtual ensenso::msg::ParameterPtr readParameter(std::string const &key) const
Definition: camera.cpp:406
void fillCameraInfoFromNxLib(sensor_msgs::msg::CameraInfoPtr const &info, bool rectified=false)
Definition: mono_camera.cpp:66
StampedPoseMsg stampedPoseFromNxLib(NxLibItem const &node, std::string const &parentFrame, ensenso::ros::Time timestamp)
image_transport::CameraPublisher rectifiedImagePublisher
Definition: mono_camera.h:19
virtual void writeParameter(ensenso::msg::Parameter const &parameter)
Definition: camera.cpp:445
std::string serial
Definition: camera.h:180
ensenso::ros::NodeHandle nh
Definition: camera.h:271
inline ::image_transport::CameraPublisher create_camera_publisher(ensenso::ros::NodeHandle const &nh, ::std::string const &topic_name)
std::unique_ptr< SetParameterServer > setParameterServer
Definition: camera.h:285
MonoCamera(ensenso::ros::NodeHandle &nh, CameraParameters params)
Definition: mono_camera.cpp:5
void updateGlobalLink(ensenso::ros::Time time, std::string targetFrame="", bool useCachedTransformation=false) const
Definition: camera.cpp:362
std::vector< geometry_msgs::msg::PoseStamped > estimatePatternPoses(ensenso::ros::Time imageTimestamp, std::string const &targetFrame="") const override
virtual void initTfPublishTimer()
Definition: camera.cpp:698
sensor_msgs::msg::CameraInfoPtr cameraInfo
Definition: mono_camera.h:12
std::vector< ImagePtr > imagesFromNxLibNode(NxLibItem const &node, std::string const &frame, bool isFileCamera)
#define GET_D_MATRIX(info)
Definition: namespace.h:54
ensenso::ros::Time capture() const override
Definition: mono_camera.cpp:35
inline ::ros::Time now(ensenso::ros::NodeHandle const &nh)
Definition: time.h:76
void onSetParameter(ensenso::action::SetParameterGoalConstPtr const &goal) override
#define MAKE_MONO_SERVER(CallbackClass, ActionName, TopicName)
Definition: action_server.h:39
sensor_msgs::msg::CameraInfoPtr rectifiedCameraInfo
Definition: mono_camera.h:13
::ros::NodeHandle NodeHandle
Definition: node.h:214
std::unique_ptr< RequestDataMonoServer > requestDataServer
Definition: mono_camera.h:15
void fillDistortionParamsFromNxLib(NxLibItem const &distortionItem, sensor_msgs::msg::CameraInfoPtr const &info)
void ENSENSO_WARN(T... args)
Definition: logging.h:210
NxLibItem cameraNode
Definition: camera.h:266
std::string targetFrame
Definition: camera.h:227
void onRequestData(ensenso::action::RequestDataMonoGoalConstPtr const &goal)
Definition: mono_camera.cpp:95
void startServers() const override
Definition: mono_camera.cpp:28
void loadParameterSet(std::string name)
Definition: camera.cpp:614
ensenso::ros::Time timestampFromNxLibNode(NxLibItem const &node)
void move(std::vector< T > &a, std::vector< T > &b)
void publish(const sensor_msgs::Image &image, const sensor_msgs::CameraInfo &info) const
#define GET_R_MATRIX(info)
Definition: namespace.h:57
image_transport::CameraPublisher rawImagePublisher
Definition: mono_camera.h:18
geometry_msgs::msg::PoseStamped estimatePatternPose(ensenso::ros::Time imageTimestamp, std::string const &targetFrame="", bool latestPatternOnly=false) const override
std::vector< MonoCalibrationPattern > collectPattern(bool clearBuffer=false) const
#define START_NXLIB_ACTION(ACTION_NAME, ACTION_SERVER)
Definition: camera.h:93
#define PREEMPT_ACTION_IF_REQUESTED
Definition: camera.h:136
#define ENSENSO_ASSERT(cond)
Definition: core.h:159
void updateCameraInfo() override
Definition: mono_camera.cpp:60
virtual void startServers() const
Definition: camera.cpp:120
void saveParameterSet(std::string name)
Definition: camera.cpp:601
void advertiseTopics()
Definition: mono_camera.cpp:14
Definition: camera.h:255
#define FINISH_NXLIB_ACTION(ACTION_NAME)
Definition: camera.h:104
void init() override
Definition: mono_camera.cpp:20
bool loadSettings(std::string const &jsonFile, bool saveAsDefaultParameters=false)
Definition: camera.cpp:129
std::unique_ptr< tf2_ros::TransformBroadcaster > transformBroadcaster
Definition: camera.h:280
void fillBasicCameraInfoFromNxLib(sensor_msgs::msg::CameraInfoPtr const &info) const
Definition: camera.cpp:324


ensenso_camera
Author(s): Ensenso
autogenerated on Sat Jun 3 2023 02:17:04