9 #include "../include/librealsense2/rs_advanced_mode.hpp" 17 # define SECTION_FROM_TEST_NAME space_to_underscore(Catch::getCurrentContext().getResultCapture()->getCurrentTestName()).c_str() 23 std::map<std::string, std::shared_ptr<rs2::software_sensor>>
sw_sensors;
32 std::memcpy(data, f.
get_data(), data_size);
35 [](
void* data) {
delete[] (
uint8_t*)data; },
67 return (_align.process(fs)).first_or_default(_align_from);
83 const int points_bpp = 20;
86 std::memcpy(data, frame.
get_data(), data_size);
89 [](
void* data) {
delete[] (
uint8_t*)data; },
99 ss->on_video_frame(points_frame);
106 return _pc.calculate(fs.get_depth_frame());
127 stream_profile.
width(),
129 stream_profile.
fps(),
135 return ss->add_video_stream(new_stream);
142 std::vector<rs2::stream_profile>
profiles = {
150 std::vector<rs2::video_stream_profile> color_profiles,
float depth_units)
187 std::vector<rs2::frameset> composite_frames;
189 std::vector<rs2::frame>
frames;
190 std::mutex frame_processor_lock;
193 std::lock_guard<std::mutex>
lock(frame_processor_lock);
194 frames.push_back(data);
195 if (frames.size() == 2)
203 frame_processor >> postprocessed_frames;
205 for (
auto s : sensors)
207 s.open(
s.get_stream_profiles());
210 for (
auto s : sensors)
214 frame_processor.invoke(f);
218 while (composite_frames.size() < sensors.size())
221 if (postprocessed_frames.try_wait_for_frame(&composite_fs))
224 composite_frames.push_back(composite_fs);
228 return composite_frames;
231 std::vector<rs2::frame>
get_frames(std::vector<rs2::sensor> sensors)
233 std::vector<rs2::frame>
frames;
234 std::mutex frames_lock;
236 for (
auto s : sensors)
238 s.open(
s.get_stream_profiles());
241 for (
auto s : sensors)
245 std::lock_guard<std::mutex>
lock(frames_lock);
250 while (frames.size() < sensors.size())
252 std::this_thread::sleep_for(std::chrono::microseconds(100));
261 for (
int i = 0;
i < processed_frames.size();
i++)
263 auto processed_frame = processed_frames[
i];
288 auto dev = ctx.
load_device(folder_name +
"all_combinations_depth_color.bag");
289 dev.set_real_time(
false);
291 std::cout <<
"Recording was loaded" << std::endl;
293 std::vector<rs2::sensor> sensors =
dev.query_sensors();
296 std::cout <<
"Recieved all recorded composite frames" << std::endl;
298 std::vector<rs2::frame> processed_frames;
299 std::vector<std::string> sensor_names;
301 for (
auto f : original_frames)
303 auto processed_frame = record_block.
process(
f);
305 processed_frame.keep();
306 processed_frames.push_back(processed_frame);
310 sensor_names.push_back(
get_sensor_name(color_stream_profile, depth_stream_profile));
312 std::cout <<
"All frames were processed" << std::endl;
317 std::cout <<
"SW device initilized" << std::endl;
319 for (
int i = 0;
i < processed_frames.size();
i++)
321 record_block.
record(sensor_names[
i], processed_frames[i], sctx);
323 std::cout <<
"All frames were recorded" << std::endl;
325 for (
auto s : sensors)
337 CAPTURE(result_profile.width());
338 CAPTURE(result_profile.height());
339 CAPTURE(result_profile.format());
343 CAPTURE(reference_profile.width());
344 CAPTURE(reference_profile.height());
345 CAPTURE(reference_profile.format());
347 REQUIRE(result_profile.width() == reference_profile.width());
348 REQUIRE(result_profile.height() == reference_profile.height());
350 size_t pixels_as_bytes = reference_frame.
as<
rs2::video_frame>().get_bytes_per_pixel() * result_profile.width() * result_profile.height();
356 REQUIRE(std::memcmp(
v1,
v2, pixels_as_bytes) == 0);
367 auto dev = ctx.
load_device(folder_name +
"all_combinations_depth_color.bag");
368 dev.set_real_time(
false);
370 std::vector<rs2::sensor> sensors =
dev.query_sensors();
374 auto ref_dev = ctx.
load_device(folder_name + file);
377 std::vector<rs2::sensor> ref_sensors = ref_dev.query_sensors();
382 std::cout <<
"---------------------------------------------------------------------------------------------\n";
383 std::cout <<
"Calculated time interval to process frame\n";
384 std::cout <<
"---------------------------------------------------------------------------------------------\n";
393 std::cout <<
"DEPTH " << std::setw(4) <<
d.format() <<
" " << std::setw(10) <<
std::to_string(
d.width()) +
"x" +
std::to_string(
d.height()) <<
" | " <<
395 std::setw(6) << std::chrono::duration_cast<std::chrono::microseconds>(
done - started).
count() <<
" us]" << std::endl;
400 for (
auto s : sensors)
406 for (
auto s : ref_sensors)
413 TEST_CASE(
"Record software-device all resolutions",
"[record-bag]")
420 auto sensors =
dev.query_sensors();
422 std::vector<rs2::video_stream_profile> depth_profiles;
423 for (
auto p : sensors[0].get_stream_profiles())
430 return i.height() == pv.height() &&
i.width() == pv.width() &&
i.format() == pv.format();
433 depth_profiles.push_back(pv);
438 std::vector<rs2::video_stream_profile> color_profiles;
439 for (
auto p : sensors[1].get_stream_profiles())
446 return i.height() == pv.height() ||
i.width() == pv.width() ||
i.format() == pv.format();
449 color_profiles.push_back(pv);
468 sensors[0].start(sync);
469 sensors[1].start(sync);
491 TEST_CASE(
"Record align color to depth software-device all resolutions",
"[record-bag][align]")
497 TEST_CASE(
"Record align depth to color software-device all resolutions",
"[record-bag][align]")
503 TEST_CASE(
"Record point cloud software-device all resolutions",
"[record-bag][point-cloud]")
509 TEST_CASE(
"Test align color to depth from recording",
"[software-device][align]")
515 TEST_CASE(
"Test align depth to color from recording",
"[software-device][align]")
521 TEST_CASE(
"Test point cloud from recording",
"[software-device][point-cloud]")
static const textual_icon lock
void record_frames_all_res(processing_recordable_block &record_block, std::string file)
rs2_software_video_frame create_sw_frame(const rs2::video_frame &f, rs2::stream_profile profile)
align_record_block(rs2_stream align_to, rs2_stream align_from)
GLuint const GLchar * name
std::map< std::string, rs2::syncer > sw_syncers
rs2::stream_profile init_stream_profile(std::shared_ptr< rs2::software_sensor > ss, rs2::video_stream_profile stream_profile)
sw_context init_sw_device(std::vector< rs2::video_stream_profile > depth_profiles, std::vector< rs2::video_stream_profile > color_profiles, float depth_units)
virtual rs2::frame process(const rs2::frame &frame) override
playback load_device(const std::string &file)
stream_profile get_profile() const
device_list query_devices() const
All the parameters required to define a video stream.
rs2::software_device sdev
int get_bytes_per_pixel() const
std::string get_folder_path(special_folder f)
struct _sw_context sw_context
virtual void record(std::string sensor_name, const rs2::frame &frame, sw_context sctx) override
const void * get_data() const
GLsizei const GLchar *const * string
software_sensor add_sensor(std::string name)
void frame_ready(frame result) const
frameset wait_for_frames(unsigned int timeout_ms=5000) const
depth_frame get_depth_frame() const
video_frame get_color_frame() const
virtual rs2::frame process(const rs2::frame &frame) override
std::map< std::string, std::map< rs2_stream, rs2::stream_profile > > sw_stream_profiles
pointcloud_record_block()
std::vector< rs2::stream_profile > init_stream_profiles(sw_context &sctx, std::shared_ptr< rs2::software_sensor > ss, std::string sensor_name, rs2::video_stream_profile c, rs2::video_stream_profile d)
double get_timestamp() const
std::vector< rs2::frame > get_frames(std::vector< rs2::sensor > sensors)
frame allocate_composite_frame(std::vector< frame > frames) const
std::map< std::string, std::shared_ptr< rs2::software_sensor > > sw_sensors
virtual rs2::frame process(const rs2::frame &frame)=0
rs2_intrinsics get_intrinsics() const
rs2_stream
Streams are different types of data provided by RealSense devices.
bool make_context(const char *id, rs2::context *ctx, std::string min_api_version="0.0.0")
std::vector< rs2::frameset > get_composite_frames(std::vector< rs2::sensor > sensors)
virtual void record(std::string sensor_name, const rs2::frame &frame, sw_context sctx) override
GLdouble GLdouble GLint GLint GLdouble v1
TEST_CASE("Record software-device all resolutions","[record-bag]")
rs2_format format() const
void compare_processed_frames_vs_recorded_frames(processing_recordable_block &record_block, std::string file)
std::string get_sensor_name(rs2::video_stream_profile c, rs2::video_stream_profile d)
void validate_ppf_results(const rs2::frame &result_frame, const rs2::frame &reference_frame)
rs2_timestamp_domain get_frame_timestamp_domain() const
virtual void record(std::string sensor_name, const rs2::frame &frame, sw_context sctx)=0
void set_real_time(bool real_time) const
GLsizei GLsizei GLchar * source
void record_sw_frame(std::string sensor_name, rs2::frameset fs, sw_context sctx)
All the parameters required to define a video frame.
unsigned long long get_frame_number() const
rs2_stream stream_type() const
#define SECTION_FROM_TEST_NAME
std::string to_string(T value)