unit-tests-post-processing-from-bag.cpp
Go to the documentation of this file.
1 // License: Apache 2.0. See LICENSE file in root directory.
2 // Copyright(c) 2018 Intel Corporation. All Rights Reserved.
3 
5 // This set of tests is valid for any number and combination of RealSense cameras, including R200 and F200 //
7 #include <cmath>
8 #include "unit-tests-common.h"
9 #include "../include/librealsense2/rs_advanced_mode.hpp"
11 #include <iostream>
12 #include <chrono>
13 #include <ctime>
14 #include <algorithm>
15 #include <librealsense2/rsutil.h>
16 
17 # define SECTION_FROM_TEST_NAME space_to_underscore(Catch::getCurrentContext().getResultCapture()->getCurrentTestName()).c_str()
18 
19 typedef struct _sw_context
20 {
21 
23  std::map<std::string, std::shared_ptr<rs2::software_sensor>> sw_sensors;
24  std::map<std::string, rs2::syncer> sw_syncers;
25  std::map<std::string, std::map<rs2_stream, rs2::stream_profile>> sw_stream_profiles;
26 } sw_context;
27 
29 {
30  uint32_t data_size = f.get_width() * f.get_bytes_per_pixel() * f.get_height();
31  uint8_t* data = new uint8_t[data_size];
32  std::memcpy(data, f.get_data(), data_size);
33  rs2_software_video_frame new_frame = {
34  (void*)data,
35  [](void* data) { delete[] (uint8_t*)data; },
38  f.get_timestamp(),
40  static_cast<int>(f.get_frame_number()),
41  profile
42  };
43  return new_frame;
44 }
45 
47 {
48 public:
49  virtual void record(std::string sensor_name, const rs2::frame& frame, sw_context sctx) = 0;
50  virtual rs2::frame process(const rs2::frame& frame) = 0;
51 };
52 
54 {
55 public:
56  align_record_block(rs2_stream align_to, rs2_stream align_from) : _align(align_to), _align_from(align_from) {}
57  virtual void record(std::string sensor_name, const rs2::frame& frame, sw_context sctx) override
58  {
59  auto ss = sctx.sw_sensors[sensor_name];
60  ss->on_video_frame(create_sw_frame(frame, sctx.sw_stream_profiles[sensor_name][_align_from]));
61  ss->stop();
62  ss->close();
63  }
64  virtual rs2::frame process(const rs2::frame& frame) override
65  {
66  auto fs = frame.as<rs2::frameset>();
67  return (_align.process(fs)).first_or_default(_align_from);
68  }
69 
70 private:
73 
74 };
75 
77 {
78 public:
80  virtual void record(std::string sensor_name, const rs2::frame& frame, sw_context sctx) override
81  {
83  const int points_bpp = 20;
84  uint32_t data_size = profile.width() * points_bpp * profile.height();
85  uint8_t* data = new uint8_t[data_size];
86  std::memcpy(data, frame.get_data(), data_size);
87  rs2_software_video_frame points_frame = {
88  (void*)data,
89  [](void* data) { delete[] (uint8_t*)data; },
90  profile.width() * points_bpp,
91  points_bpp,
92  frame.get_timestamp(),
94  static_cast<int>(frame.get_frame_number()),
95  sctx.sw_stream_profiles[sensor_name][RS2_STREAM_DEPTH]
96  };
97 
98  auto ss = sctx.sw_sensors[sensor_name];
99  ss->on_video_frame(points_frame);
100  ss->stop();
101  ss->close();
102  }
103  virtual rs2::frame process(const rs2::frame& frame) override
104  {
105  auto fs = frame.as<rs2::frameset>();
106  return _pc.calculate(fs.get_depth_frame());
107  }
108 
109 private:
111 };
112 
114 {
115  std::string dres = std::to_string(d.width()) + "x" + std::to_string(d.height());
116  std::string cres = std::to_string(c.width()) + "x" + std::to_string(c.height());
117  std::string name = "depth_" + dres + "_color_" + std::to_string(c.format()) + "_" + cres;
118  return name;
119 }
120 
121 rs2::stream_profile init_stream_profile(std::shared_ptr<rs2::software_sensor> ss, rs2::video_stream_profile stream_profile)
122 {
123  rs2_video_stream new_stream = {
124  stream_profile.stream_type(),
125  stream_profile.stream_index(),
126  stream_profile.unique_id(),
127  stream_profile.width(),
128  stream_profile.height(),
129  stream_profile.fps(),
130  stream_profile.height(),
131  stream_profile.format(),
132  stream_profile.get_intrinsics()
133  };
134 
135  return ss->add_video_stream(new_stream);
136 }
137 
138 std::vector<rs2::stream_profile> init_stream_profiles(sw_context& sctx, std::shared_ptr<rs2::software_sensor> ss, std::string sensor_name, rs2::video_stream_profile c, rs2::video_stream_profile d)
139 {
140  sctx.sw_stream_profiles[sensor_name][RS2_STREAM_DEPTH] = init_stream_profile(ss, d);
141  sctx.sw_stream_profiles[sensor_name][RS2_STREAM_COLOR] = init_stream_profile(ss, c);
142  std::vector<rs2::stream_profile> profiles = {
143  sctx.sw_stream_profiles[sensor_name][RS2_STREAM_DEPTH],
144  sctx.sw_stream_profiles[sensor_name][RS2_STREAM_COLOR]
145  };
146  return profiles;
147 }
148 
149 sw_context init_sw_device(std::vector<rs2::video_stream_profile> depth_profiles,
150  std::vector<rs2::video_stream_profile> color_profiles, float depth_units)
151 {
152  sw_context sctx;
153  for (auto depth_profile : depth_profiles)
154  {
155  for (auto color_profile : color_profiles)
156  {
157  if (depth_profile.width() == color_profile.width() && depth_profile.height() == color_profile.height())
158  continue;
159 
161  auto sensor = std::make_shared<rs2::software_sensor>(sctx.sdev.add_sensor(name));
162 
163  sensor->add_read_only_option(RS2_OPTION_DEPTH_UNITS, depth_units);
164 
167  sensor->start(sync);
168 
169  sctx.sw_sensors[name] = sensor;
170  sctx.sw_syncers[name] = sync;
171  }
172  }
173  return sctx;
174 }
175 
177 {
178  auto ss = sctx.sw_sensors[sensor_name];
181  ss->stop();
182  ss->close();
183 }
184 
185 std::vector<rs2::frameset> get_composite_frames(std::vector<rs2::sensor> sensors)
186 {
187  std::vector<rs2::frameset> composite_frames;
188 
189  std::vector<rs2::frame> frames;
190  std::mutex frame_processor_lock;
192  {
193  std::lock_guard<std::mutex> lock(frame_processor_lock);
194  frames.push_back(data);
195  if (frames.size() == 2)
196  {
197  source.frame_ready(source.allocate_composite_frame(frames));
198  frames.clear();
199  }
200  });
201 
202  rs2::frame_queue postprocessed_frames;
203  frame_processor >> postprocessed_frames;
204 
205  for (auto s : sensors)
206  {
207  s.open(s.get_stream_profiles());
208  }
209 
210  for (auto s : sensors)
211  {
212  s.start([&](rs2::frame f)
213  {
214  frame_processor.invoke(f);
215  });
216  }
217 
218  while (composite_frames.size() < sensors.size())
219  {
220  rs2::frameset composite_fs;
221  if (postprocessed_frames.try_wait_for_frame(&composite_fs))
222  {
223  composite_fs.keep();
224  composite_frames.push_back(composite_fs);
225  }
226  }
227 
228  return composite_frames;
229 }
230 
231 std::vector<rs2::frame> get_frames(std::vector<rs2::sensor> sensors)
232 {
233  std::vector<rs2::frame> frames;
234  std::mutex frames_lock;
235 
236  for (auto s : sensors)
237  {
238  s.open(s.get_stream_profiles());
239  }
240 
241  for (auto s : sensors)
242  {
243  s.start([&](rs2::frame f)
244  {
245  std::lock_guard<std::mutex> lock(frames_lock);
246  frames.push_back(f);
247  });
248  }
249 
250  while (frames.size() < sensors.size())
251  {
252  std::this_thread::sleep_for(std::chrono::microseconds(100));
253  }
254 
255  return frames;
256 }
257 
258 sw_context init_sw_device(std::vector<std::string> sensor_names, std::vector<rs2::frame> processed_frames)
259 {
260  sw_context sctx;
261  for (int i = 0; i < processed_frames.size(); i++)
262  {
263  auto processed_frame = processed_frames[i];
264 
265  auto sensor = std::make_shared<rs2::software_sensor>(sctx.sdev.add_sensor(sensor_names[i]));
266 
267  auto profile = processed_frame.get_profile().as<rs2::video_stream_profile>();
268 
269  sctx.sw_stream_profiles[sensor_names[i]][profile.stream_type()] = init_stream_profile(sensor, profile);
270 
271  sensor->open(sctx.sw_stream_profiles[sensor_names[i]][profile.stream_type()]);
273  sensor->start(sync);
274 
275  sctx.sw_sensors[sensor_names[i]] = sensor;
276  sctx.sw_syncers[sensor_names[i]] = sync;
277  }
278  return sctx;
279 }
280 
282 {
285  return;
286 
288  auto dev = ctx.load_device(folder_name + "all_combinations_depth_color.bag");
289  dev.set_real_time(false);
290 
291  std::cout << "Recording was loaded" << std::endl;
292 
293  std::vector<rs2::sensor> sensors = dev.query_sensors();
294  auto original_frames = get_composite_frames(sensors);
295 
296  std::cout << "Recieved all recorded composite frames" << std::endl;
297 
298  std::vector<rs2::frame> processed_frames;
299  std::vector<std::string> sensor_names;
300 
301  for (auto f : original_frames)
302  {
303  auto processed_frame = record_block.process(f);
304  processed_frame.get_data();
305  processed_frame.keep();
306  processed_frames.push_back(processed_frame);
307 
308  auto color_stream_profile = f.get_color_frame().get_profile().as<rs2::video_stream_profile>();
309  auto depth_stream_profile = f.get_depth_frame().get_profile().as<rs2::video_stream_profile>();
310  sensor_names.push_back(get_sensor_name(color_stream_profile, depth_stream_profile));
311  }
312  std::cout << "All frames were processed" << std::endl;
313 
314  auto sctx = init_sw_device(sensor_names, processed_frames);
315  rs2::recorder recorder(folder_name + file, sctx.sdev);
316 
317  std::cout << "SW device initilized" << std::endl;
318 
319  for (int i = 0; i < processed_frames.size(); i++)
320  {
321  record_block.record(sensor_names[i], processed_frames[i], sctx);
322  }
323  std::cout << "All frames were recorded" << std::endl;
324 
325  for (auto s : sensors)
326  {
327  s.stop();
328  s.close();
329  }
330  std::cout << "Done" << std::endl;
331 }
332 
333 void validate_ppf_results(const rs2::frame& result_frame, const rs2::frame& reference_frame)
334 {
335  auto result_profile = result_frame.get_profile().as<rs2::video_stream_profile>();
336  REQUIRE(result_profile);
337  CAPTURE(result_profile.width());
338  CAPTURE(result_profile.height());
339  CAPTURE(result_profile.format());
340 
341  auto reference_profile = reference_frame.get_profile().as<rs2::video_stream_profile>();
342  REQUIRE(reference_profile);
343  CAPTURE(reference_profile.width());
344  CAPTURE(reference_profile.height());
345  CAPTURE(reference_profile.format());
346 
347  REQUIRE(result_profile.width() == reference_profile.width());
348  REQUIRE(result_profile.height() == reference_profile.height());
349 
350  size_t pixels_as_bytes = reference_frame.as<rs2::video_frame>().get_bytes_per_pixel() * result_profile.width() * result_profile.height();
351 
352  // Pixel-by-pixel comparison of the resulted filtered depth vs data ercorded with external tool
353  auto v1 = reinterpret_cast<const uint8_t*>(result_frame.get_data());
354  auto v2 = reinterpret_cast<const uint8_t*>(reference_frame.get_data());
355 
356  REQUIRE(std::memcmp(v1, v2, pixels_as_bytes) == 0);
357 }
358 
360 {
363  return;
364 
366 
367  auto dev = ctx.load_device(folder_name + "all_combinations_depth_color.bag");
368  dev.set_real_time(false);
369 
370  std::vector<rs2::sensor> sensors = dev.query_sensors();
371  auto frames = get_composite_frames(sensors);
372 
373 
374  auto ref_dev = ctx.load_device(folder_name + file);
375  ref_dev.set_real_time(false);
376 
377  std::vector<rs2::sensor> ref_sensors = ref_dev.query_sensors();
378  auto ref_frames = get_frames(ref_sensors);
379  CAPTURE(ref_frames.size());
380  CAPTURE(frames.size());
381  REQUIRE(ref_frames.size() == frames.size());
382  std::cout << "---------------------------------------------------------------------------------------------\n";
383  std::cout << "Calculated time interval to process frame\n";
384  std::cout << "---------------------------------------------------------------------------------------------\n";
385  for (int i = 0; i < frames.size(); i++)
386  {
387  auto d = frames[i].get_depth_frame().get_profile().as<rs2::video_stream_profile>();
388  auto c = frames[i].get_color_frame().get_profile().as<rs2::video_stream_profile>();
390  auto fs_res = record_block.process(frames[i]);
392 
393  std::cout << "DEPTH " << std::setw(4) << d.format() << " " << std::setw(10) << std::to_string(d.width()) + "x" + std::to_string(d.height()) << " | " <<
394  "COLOR " << std::setw(6) << c.format() << " " << std::setw(10) << std::to_string(c.width()) + "x" + std::to_string(c.height()) << std::setw(4) << " [" <<
395  std::setw(6) << std::chrono::duration_cast<std::chrono::microseconds>(done - started).count() << " us]" << std::endl;
396 
397  validate_ppf_results(fs_res, ref_frames[i]);
398  }
399 
400  for (auto s : sensors)
401  {
402  s.stop();
403  s.close();
404  }
405 
406  for (auto s : ref_sensors)
407  {
408  s.stop();
409  s.close();
410  }
411 }
412 
413 TEST_CASE("Record software-device all resolutions", "[record-bag]")
414 {
417  return;
418 
419  auto dev = ctx.query_devices()[0];
420  auto sensors = dev.query_sensors();
421 
422  std::vector<rs2::video_stream_profile> depth_profiles;
423  for (auto p : sensors[0].get_stream_profiles())
424  {
425  if (p.stream_type() == rs2_stream::RS2_STREAM_DEPTH)
426  {
427  auto pv = p.as<rs2::video_stream_profile>();
428  if (!std::any_of(depth_profiles.begin(), depth_profiles.end(), [&](rs2::video_stream_profile i)
429  {
430  return i.height() == pv.height() && i.width() == pv.width() && i.format() == pv.format();
431  }))
432  {
433  depth_profiles.push_back(pv);
434  }
435  }
436  }
437 
438  std::vector<rs2::video_stream_profile> color_profiles;
439  for (auto p : sensors[1].get_stream_profiles())
440  {
441  if (p.stream_type() == rs2_stream::RS2_STREAM_COLOR)
442  {
443  auto pv = p.as<rs2::video_stream_profile>();
444  if (!std::any_of(color_profiles.begin(), color_profiles.end(), [&](rs2::video_stream_profile i)
445  {
446  return i.height() == pv.height() || i.width() == pv.width() || i.format() == pv.format();
447  }))
448  {
449  color_profiles.push_back(pv);
450  }
451  }
452  }
453 
454  auto sctx = init_sw_device(depth_profiles, color_profiles, sensors[0].get_option(RS2_OPTION_DEPTH_UNITS));
456  rs2::recorder recorder(folder_name + "all_combinations_depth_color.bag", sctx.sdev);
457 
458  for (auto depth_profile : depth_profiles)
459  {
460  for (auto color_profile : color_profiles)
461  {
462  if (depth_profile.width() == color_profile.width() && depth_profile.height() == color_profile.height())
463  continue;
465  sensors[0].open(depth_profile);
466  sensors[1].open(color_profile);
467 
468  sensors[0].start(sync);
469  sensors[1].start(sync);
470 
471  while (true)
472  {
473  auto fs = sync.wait_for_frames();
474  if (fs.size() == 2)
475  {
476  std::cout << "Recording : " << "Depth : " << depth_profile.format() << " " << depth_profile.width() << "x" << depth_profile.height() <<
477  ", Color : " << color_profile.format() << " " << color_profile.width() << "x" << color_profile.height() << std::endl;
479  record_sw_frame(sensor_name, fs, sctx);
480  break;
481  }
482  }
483  sensors[0].stop();
484  sensors[1].stop();
485  sensors[0].close();
486  sensors[1].close();
487  }
488  }
489 }
490 
491 TEST_CASE("Record align color to depth software-device all resolutions", "[record-bag][align]")
492 {
494  record_frames_all_res(record_block, "[aligned_2d]_all_combinations_depth_color.bag");
495 }
496 
497 TEST_CASE("Record align depth to color software-device all resolutions", "[record-bag][align]")
498 {
500  record_frames_all_res(record_block, "[aligned_2c]_all_combinations_depth_color.bag");
501 }
502 
503 TEST_CASE("Record point cloud software-device all resolutions", "[record-bag][point-cloud]")
504 {
505  auto record_block = pointcloud_record_block();
506  record_frames_all_res(record_block, "[pointcloud]_all_combinations_depth_color.bag");
507 }
508 
509 TEST_CASE("Test align color to depth from recording", "[software-device][align]")
510 {
512  compare_processed_frames_vs_recorded_frames(record_block, "[aligned_2d]_all_combinations_depth_color.bag");
513 }
514 
515 TEST_CASE("Test align depth to color from recording", "[software-device][align]")
516 {
518  compare_processed_frames_vs_recorded_frames(record_block, "[aligned_2c]_all_combinations_depth_color.bag");
519 }
520 
521 TEST_CASE("Test point cloud from recording", "[software-device][point-cloud]")
522 {
523  auto record_block = pointcloud_record_block();
524  compare_processed_frames_vs_recorded_frames(record_block, "[pointcloud]_all_combinations_depth_color.bag");
525 }
static const textual_icon lock
Definition: model-views.h:218
void record_frames_all_res(processing_recordable_block &record_block, std::string file)
rs2_software_video_frame create_sw_frame(const rs2::video_frame &f, rs2::stream_profile profile)
align_record_block(rs2_stream align_to, rs2_stream align_from)
GLuint const GLchar * name
std::map< std::string, rs2::syncer > sw_syncers
GLdouble s
rs2::stream_profile init_stream_profile(std::shared_ptr< rs2::software_sensor > ss, rs2::video_stream_profile stream_profile)
GLfloat GLfloat p
Definition: glext.h:12687
sw_context init_sw_device(std::vector< rs2::video_stream_profile > depth_profiles, std::vector< rs2::video_stream_profile > color_profiles, float depth_units)
virtual rs2::frame process(const rs2::frame &frame) override
playback load_device(const std::string &file)
Definition: rs_context.hpp:184
stream_profile get_profile() const
Definition: rs_frame.hpp:557
device_list query_devices() const
Definition: rs_context.hpp:112
All the parameters required to define a video stream.
Definition: rs_internal.h:41
int get_bytes_per_pixel() const
Definition: rs_frame.hpp:707
std::string get_folder_path(special_folder f)
Definition: os.cpp:247
struct _sw_context sw_context
virtual void record(std::string sensor_name, const rs2::frame &frame, sw_context sctx) override
const void * get_data() const
Definition: rs_frame.hpp:545
GLsizei const GLchar *const * string
d
Definition: rmse.py:171
software_sensor add_sensor(std::string name)
unsigned char uint8_t
Definition: stdint.h:78
void frame_ready(frame result) const
frameset wait_for_frames(unsigned int timeout_ms=5000) const
depth_frame get_depth_frame() const
Definition: rs_frame.hpp:1006
video_frame get_color_frame() const
Definition: rs_frame.hpp:1015
virtual rs2::frame process(const rs2::frame &frame) override
GLdouble f
std::map< std::string, std::map< rs2_stream, rs2::stream_profile > > sw_stream_profiles
std::ostream & cout()
const GLubyte * c
Definition: glext.h:12690
std::vector< rs2::stream_profile > init_stream_profiles(sw_context &sctx, std::shared_ptr< rs2::software_sensor > ss, std::string sensor_name, rs2::video_stream_profile c, rs2::video_stream_profile d)
#define CAPTURE(...)
Definition: catch.hpp:17432
REQUIRE(n_callbacks==1)
unsigned int uint32_t
Definition: stdint.h:80
double get_timestamp() const
Definition: rs_frame.hpp:474
std::vector< rs2::frame > get_frames(std::vector< rs2::sensor > sensors)
frame allocate_composite_frame(std::vector< frame > frames) const
static int done
void keep()
Definition: rs_frame.hpp:437
int stream_index() const
Definition: rs_frame.hpp:34
std::map< std::string, std::shared_ptr< rs2::software_sensor > > sw_sensors
virtual rs2::frame process(const rs2::frame &frame)=0
rs2_intrinsics get_intrinsics() const
Definition: rs_frame.hpp:238
rs2_stream
Streams are different types of data provided by RealSense devices.
Definition: rs_sensor.h:42
bool make_context(const char *id, rs2::context *ctx, std::string min_api_version="0.0.0")
std::vector< rs2::frameset > get_composite_frames(std::vector< rs2::sensor > sensors)
virtual void record(std::string sensor_name, const rs2::frame &frame, sw_context sctx) override
GLdouble GLdouble GLint GLint GLdouble v1
TEST_CASE("Record software-device all resolutions","[record-bag]")
rs2_format format() const
Definition: rs_frame.hpp:44
void compare_processed_frames_vs_recorded_frames(processing_recordable_block &record_block, std::string file)
std::string get_sensor_name(rs2::video_stream_profile c, rs2::video_stream_profile d)
void validate_ppf_results(const rs2::frame &result_frame, const rs2::frame &reference_frame)
rs2_timestamp_domain get_frame_timestamp_domain() const
Definition: rs_frame.hpp:485
int get_height() const
Definition: rs_frame.hpp:671
virtual void record(std::string sensor_name, const rs2::frame &frame, sw_context sctx)=0
void set_real_time(bool real_time) const
GLint GLsizei count
GLsizei GLsizei GLchar * source
void record_sw_frame(std::string sensor_name, rs2::frameset fs, sw_context sctx)
int i
const GLdouble * v2
All the parameters required to define a video frame.
Definition: rs_internal.h:76
int unique_id() const
Definition: rs_frame.hpp:54
unsigned long long get_frame_number() const
Definition: rs_frame.hpp:521
rs2_stream stream_type() const
Definition: rs_frame.hpp:39
int get_width() const
Definition: rs_frame.hpp:659
int fps() const
Definition: rs_frame.hpp:49
Definition: parser.hpp:150
#define SECTION_FROM_TEST_NAME
T as() const
Definition: rs_frame.hpp:580
std::string to_string(T value)


librealsense2
Author(s): Sergey Dorodnicov , Doron Hirshberg , Mark Horn , Reagan Lopez , Itay Carpis
autogenerated on Mon May 3 2021 02:50:13