model-views.cpp
Go to the documentation of this file.
1 // License: Apache 2.0. See LICENSE file in root directory.
2 // Copyright(c) 2017 Intel Corporation. All Rights Reserved.
3 
4 #ifdef _MSC_VER
5 #ifndef NOMINMAX
6 #define NOMINMAX
7 #endif
8 #endif
9 
10 #include <thread>
11 #include <algorithm>
12 #include <regex>
13 #include <cmath>
14 
15 #include <opengl3.h>
16 
18 #include <librealsense2/rsutil.h>
19 #include <librealsense2/rs.hpp>
20 
21 #include "model-views.h"
22 #include "updates-model.h"
23 #include "notifications.h"
24 #include "fw-update-helper.h"
25 #include "on-chip-calib.h"
26 #include "viewer.h"
29 #include <imgui_internal.h>
30 #include <time.h>
31 
32 #include "imgui-fonts-karla.hpp"
34 #include "imgui-fonts-monofont.hpp"
35 
36 #include "os.h"
37 
38 #include "metadata-helper.h"
39 #include "calibration-model.h"
41 
42 using namespace rs400;
43 using namespace nlohmann;
44 using namespace rs2::sw_update;
45 
47 {
48  if ((width == 240 && height == 320) || (width == 320 && height == 240))
49  return RS2_SENSOR_MODE_QVGA;
50  else if ((width == 640 && height == 480) || (height == 640 && width == 480))
51  return RS2_SENSOR_MODE_VGA;
52  else if ((width == 1024 && height == 768) || (height == 768 && width == 1024))
53  return RS2_SENSOR_MODE_XGA;
54  else
55  return RS2_SENSOR_MODE_COUNT;
56 }
57 
59 {
60  switch (mode)
61  {
63  width = 640;
64  height = 480;
65  break;
67  width = 1024;
68  height = 768;
69  break;
71  width = 320;
72  height = 240;
73  break;
74  default:
75  width = height = 0;
76  break;
77  }
78 }
79 
81  const rs2::sensor & s,
82  const std::vector< std::pair< int, int > > & res_values )
83 {
84  int width = 0, height = 0;
85  width_height_from_resolution( sensor_mode, width, height );
86  auto iter = std::find_if( res_values.begin(),
87  res_values.end(),
88  [width, height]( std::pair< int, int > res ) {
89  if( ( res.first == width ) && ( res.second == height )
90  || ( res.first == height ) && ( res.second == width ) )
91  return true;
92  return false;
93  } );
94  if( iter != res_values.end() )
95  {
96  return static_cast< int >( std::distance( res_values.begin(), iter ) );
97  }
98 
99  throw std::runtime_error( "cannot convert sensor mode to resolution ID" );
100 }
101 
103 {
104  return{ c.y, c.x, c.z, c.w };
105 }
106 
107 ImVec4 from_rgba(uint8_t r, uint8_t g, uint8_t b, uint8_t a, bool consistent_color)
108 {
109  auto res = ImVec4(r / (float)255, g / (float)255, b / (float)255, a / (float)255);
110 #ifdef FLIP_COLOR_SCHEME
111  if (!consistent_color) return flip(res);
112 #endif
113  return res;
114 }
115 
116 ImVec4 operator+(const ImVec4& c, float v)
117 {
118  return ImVec4(
119  std::max(0.f, std::min(1.f, c.x + v)),
120  std::max(0.f, std::min(1.f, c.y + v)),
121  std::max(0.f, std::min(1.f, c.z + v)),
122  std::max(0.f, std::min(1.f, c.w))
123  );
124 }
125 
126 struct attribute
127 {
131 };
132 
133 namespace rs2
134 {
135  template <typename T>
137  {
138  try
139  {
140  t();
141  return "";
142  }
143  catch (const error& e)
144  {
145  return error_to_string(e);
146  }
147  catch (const std::exception& e)
148  {
149  return e.what();
150  }
151  catch (...)
152  {
153  return "Unknown error occurred";
154  }
155  }
156 
157  std::vector<uint8_t> bytes_from_bin_file(const std::string& filename)
158  {
159  std::ifstream file(filename.c_str(), std::ios::binary);
160  if (!file.good())
161  throw std::runtime_error(to_string() << "Invalid binary file specified " << filename << " verify the source path and location permissions");
162 
163  // Determine the file length
164  file.seekg(0, std::ios_base::end);
165  std::size_t size = file.tellg();
166  if (!size)
167  throw std::runtime_error(to_string() << "Invalid binary file " << filename << " provided - zero-size ");
168  file.seekg(0, std::ios_base::beg);
169 
170  // Create a vector to store the data
171  std::vector<uint8_t> v(size);
172 
173  // Load the data
174  file.read((char*)&v[0], size);
175 
176  return v;
177  }
178 
179  // Flush binary stream to file, override previous if exists
180  void bin_file_from_bytes(const std::string& filename, const std::vector<uint8_t> bytes)
181  {
182  std::ofstream file(filename, std::ios::binary | std::ios::trunc);
183  if (!file.good())
184  throw std::runtime_error(to_string() << "Invalid binary file specified " << filename << " verify the target path and location permissions");
185  file.write((char*)bytes.data(), bytes.size());
186  }
187 
188  void imgui_easy_theming(ImFont*& font_14, ImFont*& font_18, ImFont*& monofont)
189  {
190  ImGuiStyle& style = ImGui::GetStyle();
191 
192  ImGuiIO& io = ImGui::GetIO();
193  io.IniFilename = nullptr;
194 
195  const int OVERSAMPLE = config_file::instance().get(configurations::performance::font_oversample);
196 
197  static const ImWchar icons_ranges[] = { 0xf000, 0xf999, 0 }; // will not be copied by AddFont* so keep in scope.
198 
199  {
200  ImFontConfig config_words;
201  config_words.OversampleV = OVERSAMPLE;
202  config_words.OversampleH = OVERSAMPLE;
204 
205  ImFontConfig config_glyphs;
206  config_glyphs.MergeMode = true;
207  config_glyphs.OversampleV = OVERSAMPLE;
208  config_glyphs.OversampleH = OVERSAMPLE;
210  font_awesome_compressed_size, 14.f, &config_glyphs, icons_ranges);
211  }
212 
213  // Load 18px size fonts
214  {
215  ImFontConfig config_words;
216  config_words.OversampleV = OVERSAMPLE;
217  config_words.OversampleH = OVERSAMPLE;
219 
220  ImFontConfig config_glyphs;
221  config_glyphs.MergeMode = true;
222  config_glyphs.OversampleV = OVERSAMPLE;
223  config_glyphs.OversampleH = OVERSAMPLE;
225  font_awesome_compressed_size, 20.f, &config_glyphs, icons_ranges);
226 
227  }
228 
229  // Load monofont
230  {
231  ImFontConfig config_words;
232  config_words.OversampleV = OVERSAMPLE;
233  config_words.OversampleH = OVERSAMPLE;
235 
236  ImFontConfig config_glyphs;
237  config_glyphs.MergeMode = true;
238  config_glyphs.OversampleV = OVERSAMPLE;
239  config_glyphs.OversampleH = OVERSAMPLE;
241  font_awesome_compressed_size, 14.f, &config_glyphs, icons_ranges);
242  }
243 
244  style.WindowRounding = 0.0f;
245  style.ScrollbarRounding = 0.0f;
246 
248  style.Colors[ImGuiCol_Border] = black;
261  style.Colors[ImGuiCol_ButtonActive] = button_color + (-0.1f);
263  style.Colors[ImGuiCol_HeaderActive] = header_color + (-0.1f);
268  }
269 
271  std::shared_ptr<rs2::processing_block> pb, bool enable)
272  {
273  for (auto opt : pb->get_supported_options())
274  {
275  auto val = pb->get_option(opt);
276  std::string key = name;
277  key += ".";
278  key += pb->get_option_name(opt);
279  config_file::instance().set(key.c_str(), val);
280  }
281 
282  std::string key = name;
283  key += ".enabled";
284  config_file::instance().set(key.c_str(), enable);
285  }
286 
287  bool restore_processing_block(const char* name,
288  std::shared_ptr<rs2::processing_block> pb, bool enable = true)
289  {
290  for (auto opt : pb->get_supported_options())
291  {
292  std::string key = name;
293  key += ".";
294  key += pb->get_option_name(opt);
295  if (config_file::instance().contains(key.c_str()))
296  {
297  float val = config_file::instance().get(key.c_str());
298  try
299  {
300  auto range = pb->get_option_range(opt);
301  if (val >= range.min && val <= range.max)
302  pb->set_option(opt, val);
303  }
304  catch (...)
305  {
306  }
307  }
308  }
309 
310  std::string key = name;
311  key += ".enabled";
312  if (config_file::instance().contains(key.c_str()))
313  {
314  return config_file::instance().get(key.c_str());
315  }
316  return enable;
317  }
318 
319  void hyperlink(ux_window& window, const char* title, const char* link)
320  {
321  if (ImGui::Button(title))
322  {
323  open_url(link);
324  }
325  if (ImGui::IsItemHovered())
326  {
327  window.link_hovered();
328  }
329  }
330 
332  {
333  std::string link = "https://github.com/IntelRealSense/librealsense/issues/new?body=" + url_encode(body);
334  open_url(link.c_str());
335  }
336 
338  {
339  std::stringstream ss;
340 
341  rs2_error* e = nullptr;
342 
343  ss << "| | |\n";
344  ss << "|---|---|\n";
345  ss << "|**librealsense**|" << api_version_to_string(rs2_get_api_version(&e)) << (is_debug() ? " DEBUG" : " RELEASE") << "|\n";
346  ss << "|**OS**|" << get_os_name() << "|\n";
347 
348  for (auto& dm : devices)
349  {
350  for (auto& kvp : dm->infos)
351  {
352  if (kvp.first != "Recommended Firmware Version" &&
353  kvp.first != "Debug Op Code" &&
354  kvp.first != "Physical Port" &&
355  kvp.first != "Product Id")
356  ss << "|**" << kvp.first << "**|" << kvp.second << "|\n";
357  }
358  }
359 
360  ss << "\nPlease provide a description of the problem";
361 
362  open_issue(ss.str());
363  }
364 
365  std::tuple<uint8_t, uint8_t, uint8_t> get_texcolor(video_frame texture, texture_coordinate texcoords)
366  {
367  const int w = texture.get_width(), h = texture.get_height();
368  int x = std::min(std::max(int(texcoords.u*w + .5f), 0), w - 1);
369  int y = std::min(std::max(int(texcoords.v*h + .5f), 0), h - 1);
370  int idx = x * texture.get_bytes_per_pixel() + y * texture.get_stride_in_bytes();
371  const auto texture_data = reinterpret_cast<const uint8_t*>(texture.get_data());
372  return std::tuple<uint8_t, uint8_t, uint8_t>(
373  texture_data[idx], texture_data[idx + 1], texture_data[idx + 2]);
374  }
375 
376  void export_frame(const std::string& fname, std::unique_ptr<rs2::filter> exporter,
377  notifications_model& ns, frame data, bool notify)
378  {
379  auto manager = std::make_shared<export_manager>(fname, std::move(exporter), data);
380 
381  auto n = std::make_shared<export_notification_model>(manager);
382  ns.add_notification(n);
383  n->forced = true;
384 
385  auto invoke = [n](std::function<void()> action) {
386  n->invoke(action);
387  };
388  manager->start(invoke);
389  }
390 
392  {
393  bool ret = false;
394  auto image = frame.as<video_frame>();
395  if (image)
396  {
397  std::ofstream outfile(filename.data(), std::ofstream::binary);
398  outfile.write(static_cast<const char*>(image.get_data()), image.get_height()*image.get_stride_in_bytes());
399 
400  outfile.close();
401  ret = true;
402  }
403 
404  return ret;
405  }
406 
408  {
409  bool ret = false;
410  auto image = frame.as<video_frame>();
411  if (image)
412  {
413  std::ofstream csv(filename);
414 
415  auto profile = image.get_profile();
416  csv << "Frame Info: " << std::endl << "Type," << profile.stream_name() << std::endl;
417  csv << "Format," << rs2_format_to_string(profile.format()) << std::endl;
418  csv << "Frame Number," << image.get_frame_number() << std::endl;
419  csv << "Timestamp (ms)," << std::fixed << std::setprecision(2) << image.get_timestamp() << std::endl;
420  csv << "Resolution x," << (int)image.get_width() << std::endl;
421  csv << "Resolution y," << (int)image.get_height() << std::endl;
422  csv << "Bytes per pixel," << (int)image.get_bytes_per_pixel() << std::endl;
423 
424  if (auto vsp = profile.as<video_stream_profile>())
425  {
426  auto intrinsics = vsp.get_intrinsics();
427  csv << std::endl << "Intrinsic:," << std::fixed << std::setprecision(6) << std::endl;
428  csv << "Fx," << intrinsics.fx << std::endl;
429  csv << "Fy," << intrinsics.fy << std::endl;
430  csv << "PPx," << intrinsics.ppx << std::endl;
431  csv << "PPy," << intrinsics.ppy << std::endl;
432  csv << "Distorsion," << rs2_distortion_to_string(intrinsics.model) << std::endl;
433  }
434 
435  ret = true;
436  }
437 
438  return ret;
439  }
440 
442  {
443  bool ret = false;
444  if( auto motion = frame.as< motion_frame >() )
445  {
447  if( motion.get_profile().stream_type() == RS2_STREAM_GYRO )
448  units = "( deg/sec )";
449  else
450  units = "( m/sec^2 )";
451  auto axes = motion.get_motion_data();
452  std::ofstream csv( filename );
453 
454  auto profile = frame.get_profile();
455  csv << "Frame Info: " << std::endl << "Type," << profile.stream_name() << std::endl;
456  csv << "Format," << rs2_format_to_string( profile.format() ) << std::endl;
457  csv << "Frame Number," << frame.get_frame_number() << std::endl;
458  csv << "Timestamp (ms)," << std::fixed << std::setprecision( 2 )
459  << frame.get_timestamp() << std::endl;
460  csv << std::setprecision( 7 ) << "Axes" << units << ", " << axes << std::endl;
461 
462  ret = true;
463  }
464 
465  return ret;
466  }
467 
469  {
470  bool ret = false;
471  if( auto pose = frame.as< pose_frame >() )
472  {
473  auto pose_data = pose.get_pose_data();
474  std::ofstream csv( filename );
475 
476  auto profile = frame.get_profile();
477  csv << "Frame Info: " << std::endl << "Type," << profile.stream_name() << std::endl;
478  csv << "Format," << rs2_format_to_string( profile.format() ) << std::endl;
479  csv << "Frame Number," << frame.get_frame_number() << std::endl;
480  csv << "Timestamp (ms)," << std::fixed << std::setprecision( 2 )
481  << frame.get_timestamp() << std::endl;
482  csv << std::setprecision( 7 ) << "Acceleration( meters/sec^2 ), "
483  << pose_data.acceleration << std::endl;
484  csv << std::setprecision( 7 ) << "Angular_acceleration( radians/sec^2 ), "
485  << pose_data.angular_acceleration << std::endl;
486  csv << std::setprecision( 7 ) << "Angular_velocity( radians/sec ), "
487  << pose_data.angular_velocity << std::endl;
488  csv << std::setprecision( 7 )
489  << "Mapper_confidence( 0x0 - Failed 0x1 - Low 0x2 - Medium 0x3 - High ), "
490  << pose_data.mapper_confidence << std::endl;
491  csv << std::setprecision( 7 )
492  << "Rotation( quaternion rotation (relative to initial position) ), "
493  << pose_data.rotation << std::endl;
494  csv << std::setprecision( 7 )
495  << "Tracker_confidence( 0x0 - Failed 0x1 - Low 0x2 - Medium 0x3 - High ), "
496  << pose_data.tracker_confidence << std::endl;
497  csv << std::setprecision( 7 ) << "Translation( meters ), " << pose_data.translation
498  << std::endl;
499  csv << std::setprecision( 7 ) << "Velocity( meters/sec ), " << pose_data.velocity
500  << std::endl;
501 
502  ret = true;
503  }
504 
505  return ret;
506  }
507 
508  std::vector<const char*> get_string_pointers(const std::vector<std::string>& vec)
509  {
510  std::vector<const char*> res;
511  for (auto&& s : vec) res.push_back(s.c_str());
512  return res;
513  }
514 
516  const std::string& opt_base_label,
518  std::shared_ptr<options> options,
519  bool* options_invalidated,
520  std::string& error_message)
521  {
523 
524  std::stringstream ss;
525 
526  ss << opt_base_label << "/" << options->get_option_name(opt);
527  option.id = ss.str();
528  option.opt = opt;
529  option.endpoint = options;
530  option.label = options->get_option_name(opt) + std::string("##") + ss.str();
531  option.invalidate_flag = options_invalidated;
532  option.dev = model;
533 
534  option.supported = options->supports(opt);
535  if (option.supported)
536  {
537  try
538  {
539  option.range = options->get_option_range(opt);
540  option.read_only = options->is_option_read_only(opt);
541  option.value = options->get_option(opt);
542  }
543  catch (const error& e)
544  {
545  option.range = { 0, 1, 0, 0 };
546  option.value = 0;
547  error_message = error_to_string(e);
548  }
549  }
550  return option;
551  }
552 
553  bool option_model::draw(std::string& error_message, notifications_model& model, bool new_line, bool use_option_name)
554  {
555  auto res = false;
556  if (endpoint->supports(opt))
557  {
558  // The option's rendering model supports an alternative option title derived from its description rather than name.
559  // This is applied to the Holes Filling as its display must conform with the names used by a 3rd-party tools for consistency.
560  if (opt == RS2_OPTION_HOLES_FILL)
561  use_option_name = false;
562 
563  auto desc = endpoint->get_option_description(opt);
564 
565  // remain option to append to the current line
566  if (!new_line)
567  ImGui::SameLine();
568 
569  if (is_checkbox())
570  {
571  auto bool_value = value > 0.0f;
572  if (ImGui::Checkbox(label.c_str(), &bool_value))
573  {
574  if (allow_change((bool_value ? 1.0f : 0.0f), error_message))
575  {
576  res = true;
577  model.add_log(to_string() << "Setting " << opt << " to "
578  << (bool_value? "1.0" : "0.0") << " (" << (bool_value ? "ON" : "OFF") << ")");
579 
580  set_option(opt, bool_value ? 1.f : 0.f, error_message);
581  *invalidate_flag = true;
582  }
583  }
584  if (ImGui::IsItemHovered() && desc)
585  {
586  ImGui::SetTooltip("%s", desc);
587  }
588  }
589  else
590  {
591  if (!is_enum())
592  {
593  std::string txt = to_string() << endpoint->get_option_name(opt) << ":";
594  ImGui::Text("%s", txt.c_str());
595 
596  ImGui::SameLine();
597  ImGui::SetCursorPosX(read_only ? 268.f : 245.f);
600  ImGui::PushStyleColor(ImGuiCol_ButtonActive, { 1.f,1.f,1.f,0.f });
601  ImGui::PushStyleColor(ImGuiCol_ButtonHovered, { 1.f,1.f,1.f,0.f });
602  ImGui::PushStyleColor(ImGuiCol_Button, { 1.f,1.f,1.f,0.f });
605  if (ImGui::IsItemHovered() && desc)
606  {
607  ImGui::SetTooltip("%s", desc);
608  }
609 
610  if (!read_only)
611  {
612  ImGui::SameLine();
614  if (!edit_mode)
615  {
616  std::string edit_id = to_string() << textual_icons::edit << "##" << id;
619  ImGui::PushStyleColor(ImGuiCol_ButtonHovered, { 1.f,1.f,1.f,0.f });
620  ImGui::PushStyleColor(ImGuiCol_Button, { 1.f,1.f,1.f,0.f });
621  if (ImGui::Button(edit_id.c_str(), { 20, 20 }))
622  {
623  if (is_all_integers())
624  edit_value = to_string() << (int)value;
625  else
626  edit_value = to_string() << value;
627  edit_mode = true;
628  }
629  if (ImGui::IsItemHovered())
630  {
631  ImGui::SetTooltip("Enter text-edit mode");
632  }
634  }
635  else
636  {
637  std::string edit_id = to_string() << textual_icons::edit << "##" << id;
640  ImGui::PushStyleColor(ImGuiCol_ButtonHovered, { 1.f,1.f,1.f,0.f });
641  ImGui::PushStyleColor(ImGuiCol_Button, { 1.f,1.f,1.f,0.f });
642  if (ImGui::Button(edit_id.c_str(), { 20, 20 }))
643  {
644  edit_mode = false;
645  }
646  if (ImGui::IsItemHovered())
647  {
648  ImGui::SetTooltip("Exit text-edit mode");
649  }
651  }
652  }
653 
655 
656  try
657  {
658  if (read_only)
659  {
660  ImVec2 vec{ 0, 20 };
661  std::string text = (value == (int)value) ? std::to_string((int)value) : std::to_string(value);
662  if (range.min != range.max)
663  {
664  ImGui::ProgressBar((value / (range.max - range.min)), vec, text.c_str());
665  }
666  else //constant value options
667  {
671  float dummy = std::floor(value);
672  ImGui::DragFloat(id.c_str(), &dummy, 1, 0, 0, text.c_str());
674  }
675  }
676  else if (edit_mode)
677  {
678  char buff[TEXT_BUFF_SIZE];
679  memset(buff, 0, TEXT_BUFF_SIZE);
680  strcpy(buff, edit_value.c_str());
681  if (ImGui::InputText(id.c_str(), buff, TEXT_BUFF_SIZE,
683  {
684  float new_value;
685  if (!string_to_int(buff, new_value))
686  {
687  error_message = "Invalid numeric input!";
688  }
689  else if (new_value < range.min || new_value > range.max)
690  {
691  error_message = to_string() << new_value
692  << " is out of bounds [" << range.min << ", "
693  << range.max << "]";
694  }
695  else
696  {
697  set_option(opt, new_value, error_message);
698  }
699 
700  edit_mode = false;
701  }
702  edit_value = buff;
703  }
704  else if (is_all_integers())
705  {
706  auto int_value = static_cast<int>(value);
707 
708  if (ImGui::SliderIntWithSteps(id.c_str(), &int_value,
709  static_cast<int>(range.min),
710  static_cast<int>(range.max),
711  static_cast<int>(range.step)))
712  {
713  // TODO: Round to step?
714  model.add_log(to_string() << "Setting " << opt << " to " << int_value);
715  set_option(opt, static_cast<float>(int_value), error_message);
716  *invalidate_flag = true;
717  res = true;
718  }
719  }
720  else
721  {
722  float tmp_value = value;
723  if (ImGui::SliderFloat(id.c_str(), &tmp_value,
724  range.min, range.max, "%.4f"))
725  {
726  auto loffset = std::abs(fmod(tmp_value, range.step));
727  auto roffset = range.step - loffset;
728  if (tmp_value >= 0)
729  tmp_value = (loffset < roffset) ? tmp_value - loffset : tmp_value + roffset;
730  else
731  tmp_value = (loffset < roffset) ? tmp_value + loffset : tmp_value - roffset;
732  tmp_value = (tmp_value < range.min) ? range.min : tmp_value;
733  tmp_value = (tmp_value > range.max) ? range.max : tmp_value;
734  model.add_log(to_string() << "Setting " << opt << " to " << tmp_value);
735  set_option(opt, tmp_value, error_message);
736  *invalidate_flag = true;
737  res = true;
738  }
739  }
740  }
741  catch (const error& e)
742  {
743  error_message = error_to_string(e);
744  }
745  }
746  else
747  {
748  std::string txt = to_string() << (use_option_name ? endpoint->get_option_name(opt) : desc) << ":";
749 
750  auto pos_x = ImGui::GetCursorPosX();
751 
752  ImGui::Text("%s", txt.c_str());
753  if (ImGui::IsItemHovered() && desc)
754  {
755  ImGui::SetTooltip("%s", desc);
756  }
757 
758  ImGui::SameLine();
759  if (new_line)
760  ImGui::SetCursorPosX(pos_x + 120);
761 
762  ImGui::PushItemWidth(new_line ? -1.f : 100.f);
763 
764  std::vector<const char*> labels;
765  auto selected = 0, counter = 0;
766  for (auto i = range.min; i <= range.max; i += range.step, counter++)
767  {
768  if (std::fabs(i - value) < 0.001f) selected = counter;
769  labels.push_back(endpoint->get_option_value_description(opt, i));
770  }
772 
773  try
774  {
775  int tmp_selected = selected;
776  float tmp_value = value;
777  ImGui::PushItemWidth(135.f);
778  if (ImGui::Combo(id.c_str(), &tmp_selected, labels.data(),
779  static_cast<int>(labels.size())))
780  {
781  tmp_value = range.min + range.step * tmp_selected;
782  model.add_log(to_string() << "Setting " << opt << " to "
783  << tmp_value << " (" << labels[tmp_selected] << ")");
784  set_option(opt, tmp_value, error_message);
785  selected = tmp_selected;
786  if (invalidate_flag) *invalidate_flag = true;
787  res = true;
788  }
789  }
790  catch (const error& e)
791  {
792  error_message = error_to_string(e);
793  }
794 
796 
798  }
799 
800 
801  }
802 
803  if (!read_only && opt == RS2_OPTION_ENABLE_AUTO_EXPOSURE && dev->auto_exposure_enabled && dev->s->is<roi_sensor>() && dev->streaming)
804  {
805  ImGui::SameLine(0, 10);
806  std::string button_label = label;
807  auto index = label.find_last_of('#');
808  if (index != std::string::npos)
809  {
810  button_label = label.substr(index + 1);
811  }
812 
813  ImGui::PushStyleColor(ImGuiCol_TextSelectedBg, { 1.f,1.f,1.f,1.f });
814  if (!dev->roi_checked)
815  {
816  std::string caption = to_string() << "Set ROI##" << button_label;
817  if (ImGui::Button(caption.c_str(), { 55, 0 }))
818  {
819  dev->roi_checked = true;
820  }
821  }
822  else
823  {
824  std::string caption = to_string() << "Cancel##" << button_label;
825  if (ImGui::Button(caption.c_str(), { 55, 0 }))
826  {
827  dev->roi_checked = false;
828  }
829  }
831 
832  if (ImGui::IsItemHovered())
833  ImGui::SetTooltip("Select custom region of interest for the auto-exposure algorithm\nClick the button, then draw a rect on the frame");
834  }
835  }
836 
837  return res;
838  }
839 
840  void option_model::update_supported(std::string& error_message)
841  {
842  try
843  {
844  supported = endpoint->supports(opt);
845  }
846  catch (const error& e)
847  {
848  error_message = error_to_string(e);
849  }
850  }
851 
852  void option_model::update_read_only_status(std::string& error_message)
853  {
854  try
855  {
856  read_only = endpoint->is_option_read_only(opt);
857  }
858  catch (const error& e)
859  {
860  error_message = error_to_string(e);
861  }
862  }
863 
864  void option_model::update_all_fields(std::string& error_message, notifications_model& model)
865  {
866  try
867  {
868  if ((supported = endpoint->supports(opt)))
869  {
870  value = endpoint->get_option(opt);
871  range = endpoint->get_option_range(opt);
872  read_only = endpoint->is_option_read_only(opt);
873  }
874  }
875  catch (const error& e)
876  {
877  if (read_only) {
878  model.add_notification({ to_string() << "Could not refresh read-only option " << endpoint->get_option_name(opt) << ": " << e.what(),
881  }
882  else
883  error_message = error_to_string(e);
884  }
885  }
886 
887  bool option_model::is_all_integers() const
888  {
889  return is_integer(range.min) && is_integer(range.max) &&
890  is_integer(range.def) && is_integer(range.step);
891  }
892 
893  bool option_model::is_enum() const
894  {
895  if (range.step < 0.001f) return false;
896 
897  for (auto i = range.min; i <= range.max; i += range.step)
898  {
899  if (endpoint->get_option_value_description(opt, i) == nullptr)
900  return false;
901  }
902  return true;
903  }
904 
905  bool option_model::is_checkbox() const
906  {
907  return range.max == 1.0f &&
908  range.min == 0.0f &&
909  range.step == 1.0f;
910  }
911 
912  bool option_model::allow_change(float val, std::string& error_message) const
913  {
914  // Place here option restrictions
915  return true;
916  }
917 
918  void subdevice_model::populate_options(std::map<int, option_model>& opt_container,
919  const std::string& opt_base_label,
921  std::shared_ptr<options> options,
922  bool* options_invalidated,
923  std::string& error_message)
924  {
925  for (auto&& i : options->get_supported_options())
926  {
927  auto opt = static_cast<rs2_option>(i);
928 
929  opt_container[opt] = create_option_model(opt, opt_base_label, model, options, options_invalidated, error_message);
930  }
931  }
932 
934  {
935  std::stringstream ss;
937  << "." << sub->dev.get_info(RS2_CAMERA_INFO_NAME)
938  << "." << sub->s->get_info(RS2_CAMERA_INFO_NAME);
939  return ss.str();
940  }
941 
942  processing_block_model::processing_block_model(
943  subdevice_model* owner,
944  const std::string& name,
945  std::shared_ptr<rs2::filter> block,
946  std::function<rs2::frame(rs2::frame)> invoker,
947  std::string& error_message, bool enable)
948  : _owner(owner), _name(name), _block(block), _invoker(invoker), _enabled(enable)
949  {
950  std::stringstream ss;
951  ss << "##" << ((owner) ? owner->dev.get_info(RS2_CAMERA_INFO_NAME) : _name)
952  << "/" << ((owner) ? (*owner->s).get_info(RS2_CAMERA_INFO_NAME) : "_")
953  << "/" << (long long)this;
954 
955  if (_owner)
957  else
958  _full_name = _name;
959 
961  block, _enabled);
962 
963  populate_options(ss.str().c_str(), owner, owner ? &owner->_options_invalidated : nullptr, error_message);
964  }
965 
967  {
969  }
970 
972  {
973  if (options_metadata.find(opt) != options_metadata.end())
974  return options_metadata[opt];
975 
976  std::string error_message;
977  options_metadata[opt] = create_option_model(opt, get_name(), _owner, _block, _owner ? &_owner->_options_invalidated : nullptr, error_message);
978  return options_metadata[opt];
979  }
980 
983  bool* options_invalidated,
984  std::string& error_message)
985  {
986  for (auto opt : _block->get_supported_options())
987  {
988  options_metadata[opt] = create_option_model(opt, opt_base_label, model, _block, model ? &model->_options_invalidated : nullptr, error_message);
989  }
990  }
991 
993  device& dev,
994  std::shared_ptr<sensor> s,
995  std::shared_ptr< atomic_objects_in_frame > device_detected_objects,
996  std::string& error_message,
997  viewer_model& viewer,
998  bool new_device_connected
999  )
1000  : s(s), dev(dev), tm2(), ui(), last_valid_ui(),
1001  streaming(false), _pause(false),
1002  depth_colorizer(std::make_shared<rs2::gl::colorizer>()),
1003  yuy2rgb(std::make_shared<rs2::gl::yuy_decoder>()),
1004  depth_decoder(std::make_shared<rs2::depth_huffman_decoder>()),
1005  viewer(viewer),
1006  detected_objects(device_detected_objects)
1007  {
1008  supported_options = s->get_supported_options();
1010  restore_processing_block("yuy2rgb", yuy2rgb);
1011 
1012  std::string device_name(dev.get_info(RS2_CAMERA_INFO_NAME));
1013  std::string sensor_name(s->get_info(RS2_CAMERA_INFO_NAME));
1014 
1015  std::stringstream ss;
1017  << "." << device_name
1018  << "." << sensor_name;
1019  auto key = ss.str();
1020 
1021  bool const is_rgb_camera = s->is< color_sensor >();
1022 
1023  if (config_file::instance().contains(key.c_str()))
1024  {
1026  }
1027 
1028  try
1029  {
1030  if (s->supports(RS2_OPTION_ENABLE_AUTO_EXPOSURE))
1032  }
1033  catch (...)
1034  {
1035 
1036  }
1037 
1038  try
1039  {
1040  if (s->supports(RS2_OPTION_DEPTH_UNITS))
1041  depth_units = s->get_option(RS2_OPTION_DEPTH_UNITS);
1042  }
1043  catch (...) {}
1044 
1045  try
1046  {
1047  if (s->supports(RS2_OPTION_STEREO_BASELINE))
1049  }
1050  catch (...) {}
1051 
1052  auto filters = s->get_recommended_filters();
1053 
1054  auto it = std::find_if(filters.begin(), filters.end(), [&](const filter &f)
1055  {
1056  if (f.is<zero_order_invalidation>())
1057  return true;
1058  return false;
1059  });
1060 
1061  auto is_zo = it != filters.end();
1062 
1063  for (auto&& f : s->get_recommended_filters())
1064  {
1065  auto shared_filter = std::make_shared<filter>(f);
1066  auto model = std::make_shared<processing_block_model>(
1067  this, shared_filter->get_info(RS2_CAMERA_INFO_NAME), shared_filter,
1068  [=](rs2::frame f) { return shared_filter->process(f); }, error_message);
1069 
1070  if (shared_filter->is<depth_huffman_decoder>())
1071  model->visible = false;
1072 
1073  if (is_zo)
1074  {
1075  if (shared_filter->is<zero_order_invalidation>())
1076  {
1078  viewer.zo_sensors++;
1079  }
1080  else
1081  model->enable(false);
1082  }
1083 
1084  if (shared_filter->is<hole_filling_filter>())
1085  model->enable(false);
1086 
1087  if (shared_filter->is<sequence_id_filter>())
1088  model->enable(false);
1089 
1090  if (shared_filter->is<decimation_filter>())
1091  {
1092  if (is_rgb_camera)
1093  model->enable(false);
1094  }
1095 
1096  if (shared_filter->is<threshold_filter>())
1097  {
1098  if (s->supports(RS2_CAMERA_INFO_PRODUCT_ID))
1099  {
1100  // using short range for D405
1101  std::string device_pid = s->get_info(RS2_CAMERA_INFO_PRODUCT_ID);
1102  if (device_pid == "0B5B")
1103  {
1104  std::string error_msg;
1105  auto threshold_pb = shared_filter->as<threshold_filter>();
1107  threshold_pb.set_option(RS2_OPTION_MAX_DISTANCE, SHORT_RANGE_MAX_DISTANCE);
1108  }
1109  }
1110  }
1111 
1112  if (shared_filter->is<hdr_merge>())
1113  {
1114  // processing block will be skipped if the requested option is not supported
1115  auto supported_options = s->get_supported_options();
1117  continue;
1118  }
1119 
1120  post_processing.push_back(model);
1121  }
1122 
1123  if (is_rgb_camera)
1124  {
1125  for (auto & create_filter : post_processing_filters_list::get())
1126  {
1127  auto filter = create_filter();
1128  if (!filter)
1129  continue;
1130  filter->start(*this);
1131  std::shared_ptr< processing_block_model > model(
1133  this, filter,
1134  [=](rs2::frame f) { return filter->process(f); },
1135  error_message
1136  });
1137  post_processing.push_back(model);
1138  }
1139  }
1140 
1141  auto colorizer = std::make_shared<processing_block_model>(
1142  this, "Depth Visualization", depth_colorizer,
1143  [=](rs2::frame f) { return depth_colorizer->colorize(f); }, error_message);
1144  const_effects.push_back(colorizer);
1145 
1146 
1147  if (s->supports(RS2_CAMERA_INFO_PRODUCT_ID))
1148  {
1149  std::string device_pid = s->get_info(RS2_CAMERA_INFO_PRODUCT_ID);
1150 
1151  // using short range for D405
1152  if (device_pid == "0B5B")
1153  {
1154  std::string error_msg;
1155  depth_colorizer->set_option(RS2_OPTION_MIN_DISTANCE, SHORT_RANGE_MIN_DISTANCE);
1156  depth_colorizer->set_option(RS2_OPTION_MAX_DISTANCE, SHORT_RANGE_MAX_DISTANCE);
1157  }
1158  }
1159 
1160  ss.str("");
1161  ss << "##" << dev.get_info(RS2_CAMERA_INFO_NAME)
1162  << "/" << s->get_info(RS2_CAMERA_INFO_NAME)
1163  << "/" << (long long)this;
1164 
1166  {
1168  std::string id = s->get_info(RS2_CAMERA_INFO_PHYSICAL_PORT);
1169 
1170  bool has_metadata = !rs2::metadata_helper::instance().can_support_metadata(product)
1172  static bool showed_metadata_prompt = false;
1173 
1174  if (!has_metadata && !showed_metadata_prompt)
1175  {
1176  auto n = std::make_shared<metadata_warning_model>();
1177  viewer.not_model->add_notification(n);
1178  showed_metadata_prompt = true;
1179  }
1180  }
1181 
1182  try
1183  {
1184  auto sensor_profiles = s->get_stream_profiles();
1185  reverse(begin(sensor_profiles), end(sensor_profiles));
1186  std::map<int, rs2_format> def_format{ {0, RS2_FORMAT_ANY} };
1187  auto default_resolution = std::make_pair(1280, 720);
1188  auto default_fps = 30;
1189  for (auto&& profile : sensor_profiles)
1190  {
1191  std::stringstream res;
1192  if (auto vid_prof = profile.as<video_stream_profile>())
1193  {
1194  if (profile.is_default())
1195  {
1196  default_resolution = std::pair<int, int>(vid_prof.width(), vid_prof.height());
1197  default_fps = profile.fps();
1198  }
1199  res << vid_prof.width() << " x " << vid_prof.height();
1200  push_back_if_not_exists(res_values, std::pair<int, int>(vid_prof.width(), vid_prof.height()));
1202  }
1203 
1204  std::stringstream fps;
1205  fps << profile.fps();
1208  push_back_if_not_exists(fpses_per_stream[profile.unique_id()], fps.str());
1210  stream_display_names[profile.unique_id()] = profile.stream_name();
1211 
1213 
1216 
1217  if (profile.is_default())
1218  {
1219  stream_enabled[profile.unique_id()] = true;
1220  def_format[profile.unique_id()] = profile.format();
1221  }
1222 
1223  profiles.push_back(profile);
1224  }
1225  auto any_stream_enabled = std::any_of(std::begin(stream_enabled), std::end(stream_enabled), [](const std::pair<int, bool>& p) { return p.second; });
1226  if (!any_stream_enabled)
1227  {
1228  if (sensor_profiles.size() > 0)
1229  stream_enabled[sensor_profiles.rbegin()->unique_id()] = true;
1230  }
1231 
1232  for (auto&& fps_list : fps_values_per_stream)
1233  {
1234  sort_together(fps_list.second, fpses_per_stream[fps_list.first]);
1235  }
1238 
1240 
1241  int selection_index{};
1242 
1243  if (!show_single_fps_list)
1244  {
1245  for (auto fps_array : fps_values_per_stream)
1246  {
1247  if (get_default_selection_index(fps_array.second, default_fps, &selection_index))
1248  {
1249  ui.selected_fps_id[fps_array.first] = selection_index;
1250  break;
1251  }
1252  }
1253  }
1254  else
1255  {
1256  if (get_default_selection_index(shared_fps_values, default_fps, &selection_index))
1257  ui.selected_shared_fps_id = selection_index;
1258  }
1259 
1260  for (auto format_array : format_values)
1261  {
1262  if (get_default_selection_index(format_array.second, def_format[format_array.first], &selection_index))
1263  {
1264  ui.selected_format_id[format_array.first] = selection_index;
1265  }
1266  }
1267 
1268  get_default_selection_index(res_values, default_resolution, &selection_index);
1269  ui.selected_res_id = selection_index;
1270 
1271  if (new_device_connected)
1272  {
1273  // Have the various preset options automatically update based on the resolution of the
1274  // (closed) stream...
1275  // TODO we have no res_values when loading color rosbag, and color sensor isn't
1276  // even supposed to support SENSOR_MODE... see RS5-7726
1277  if( s->supports( RS2_OPTION_SENSOR_MODE ) && !res_values.empty() )
1278  {
1279  // Watch out for read-only options in the playback sensor!
1280  try
1281  {
1282  s->set_option( RS2_OPTION_SENSOR_MODE,
1283  static_cast< float >( resolution_from_width_height(
1284  res_values[ui.selected_res_id].first,
1285  res_values[ui.selected_res_id].second ) ) );
1286  }
1287  catch( not_implemented_error const &)
1288  {
1289  // Just ignore for now: need to figure out a way to write to playback sensors...
1290  }
1291  }
1292  }
1293 
1295  last_valid_ui = ui;
1296  }
1297  catch (const error& e)
1298  {
1299  error_message = error_to_string(e);
1300  }
1301  populate_options(options_metadata, ss.str().c_str(), this, s, &_options_invalidated, error_message);
1302 
1303  }
1304 
1306  {
1308  viewer.zo_sensors--;
1309  }
1310 
1312  {
1313  std::vector<int> first_fps_group;
1314  size_t group_index = 0;
1315  for (; group_index < fps_values_per_stream.size(); ++group_index)
1316  {
1317  if (!fps_values_per_stream[(rs2_stream)group_index].empty())
1318  {
1319  first_fps_group = fps_values_per_stream[(rs2_stream)group_index];
1320  break;
1321  }
1322  }
1323 
1324  for (size_t i = group_index + 1; i < fps_values_per_stream.size(); ++i)
1325  {
1326  auto fps_group = fps_values_per_stream[(rs2_stream)i];
1327  if (fps_group.empty())
1328  continue;
1329 
1330  for (auto& fps1 : first_fps_group)
1331  {
1332  auto it = std::find_if(std::begin(fps_group),
1333  std::end(fps_group),
1334  [&](const int& fps2)
1335  {
1336  return fps2 == fps1;
1337  });
1338  if (it != std::end(fps_group))
1339  {
1340  break;
1341  }
1342  return false;
1343  }
1344  }
1345  return true;
1346  }
1347 
1349  {
1350  bool res = false;
1351 
1352  std::string label = to_string() << "Stream Selection Columns##" << dev.get_info(RS2_CAMERA_INFO_NAME)
1353  << s->get_info(RS2_CAMERA_INFO_NAME);
1354 
1355  auto streaming_tooltip = [&]() {
1357  && ImGui::IsItemHovered() )
1358  ImGui::SetTooltip( "Can't modify while streaming" );
1359  };
1360 
1361  //ImGui::Columns(2, label.c_str(), false);
1362  //ImGui::SetColumnOffset(1, 135);
1363  auto col0 = ImGui::GetCursorPosX();
1364  auto col1 = 155.f;
1365 
1366  // Draw combo-box with all resolution options for this device
1367  auto res_chars = get_string_pointers(resolutions);
1368  if (res_chars.size() > 0)
1369  {
1370  ImGui::Text("Resolution:");
1371  streaming_tooltip();
1373 
1374  label = to_string() << "##" << dev.get_info(RS2_CAMERA_INFO_NAME)
1375  << s->get_info(RS2_CAMERA_INFO_NAME) << " resolution";
1376 
1378  {
1379  ImGui::Text("%s", res_chars[ui.selected_res_id]);
1380  streaming_tooltip();
1381  }
1382  else
1383  {
1386  auto tmp_selected_res_id = ui.selected_res_id;
1387  if (ImGui::Combo(label.c_str(), &tmp_selected_res_id, res_chars.data(),
1388  static_cast<int>(res_chars.size())))
1389  {
1390  res = true;
1391  _options_invalidated = true;
1392 
1393  if (s->supports(RS2_OPTION_SENSOR_MODE))
1394  {
1395  auto width = res_values[tmp_selected_res_id].first;
1396  auto height = res_values[tmp_selected_res_id].second;
1398  if (res >= RS2_SENSOR_MODE_VGA && res < RS2_SENSOR_MODE_COUNT)
1399  {
1400  try
1401  {
1402  s->set_option(RS2_OPTION_SENSOR_MODE, float(res));
1403  }
1404  catch (const error& e)
1405  {
1406  error_message = error_to_string(e);
1407  }
1408 
1409  // Only update the cached value once set_option is done! That way, if it doesn't change anything...
1410  try
1411  {
1412  int sensor_mode_val = static_cast<int>(s->get_option( RS2_OPTION_SENSOR_MODE ));
1413  {
1415  static_cast< rs2_sensor_mode >( sensor_mode_val ),
1416  *s,
1417  res_values );
1418  }
1419  }
1420  catch (...) {}
1421  }
1422  else
1423  {
1424  error_message = to_string() << "Resolution " << width << "x" << height
1425  << " is not supported on this device";
1426  }
1427  }
1428  else
1429  {
1430  ui.selected_res_id = tmp_selected_res_id;
1431  }
1432  }
1435  }
1436  ImGui::SetCursorPosX(col0);
1437  }
1438 
1439  if (draw_fps_selector)
1440  {
1441  // FPS
1443  {
1444  auto fps_chars = get_string_pointers(shared_fpses);
1445  ImGui::Text("Frame Rate (FPS):");
1446  streaming_tooltip();
1448 
1449  label = to_string() << "##" << dev.get_info(RS2_CAMERA_INFO_NAME)
1450  << s->get_info(RS2_CAMERA_INFO_NAME) << " fps";
1451 
1453  {
1454  ImGui::Text("%s", fps_chars[ui.selected_shared_fps_id]);
1455  streaming_tooltip();
1456  }
1457  else
1458  {
1461  if (ImGui::Combo(label.c_str(), &ui.selected_shared_fps_id, fps_chars.data(),
1462  static_cast<int>(fps_chars.size())))
1463  {
1464  res = true;
1465  }
1468  }
1469 
1470  ImGui::SetCursorPosX(col0);
1471  }
1472  }
1473 
1475  {
1476  if (!streaming)
1477  {
1478  ImGui::Text("Available Streams:");
1479  }
1480 
1481  // Draw combo-box with all format options for current device
1482  for (auto&& f : formats)
1483  {
1484  // Format
1485  if (f.second.size() == 0)
1486  continue;
1487 
1488  auto formats_chars = get_string_pointers(f.second);
1489  if (!streaming || (streaming && stream_enabled[f.first]))
1490  {
1491  if (streaming)
1492  {
1493  label = to_string() << stream_display_names[f.first] << (show_single_fps_list ? "" : " stream:");
1494  ImGui::Text("%s", label.c_str());
1495  streaming_tooltip();
1496  }
1497  else
1498  {
1499  auto tmp = stream_enabled;
1500  label = to_string() << stream_display_names[f.first] << "##" << f.first;
1501  if (ImGui::Checkbox(label.c_str(), &stream_enabled[f.first]))
1502  {
1504  }
1505  }
1506  }
1507 
1508  if (stream_enabled[f.first])
1509  {
1511  {
1512  ImGui::SameLine();
1513  ImGui::SetCursorPosX(col1);
1514  }
1515 
1516  label = to_string() << "##" << dev.get_info(RS2_CAMERA_INFO_NAME)
1517  << s->get_info(RS2_CAMERA_INFO_NAME)
1518  << " " << f.first << " format";
1519 
1520  if (!show_single_fps_list)
1521  {
1522  ImGui::Text("Format:");
1523  streaming_tooltip();
1525  }
1526 
1527  if (streaming)
1528  {
1529  ImGui::Text("%s", formats_chars[ui.selected_format_id[f.first]]);
1530  streaming_tooltip();
1531  }
1532  else
1533  {
1536  ImGui::Combo(label.c_str(), &ui.selected_format_id[f.first], formats_chars.data(),
1537  static_cast<int>(formats_chars.size()));
1540  }
1541  ImGui::SetCursorPosX(col0);
1542  // FPS
1543  // Draw combo-box with all FPS options for this device
1544  if (!show_single_fps_list && !fpses_per_stream[f.first].empty() && stream_enabled[f.first])
1545  {
1546  auto fps_chars = get_string_pointers(fpses_per_stream[f.first]);
1547  ImGui::Text("Frame Rate (FPS):");
1548  streaming_tooltip();
1550 
1551  label = to_string() << "##" << s->get_info(RS2_CAMERA_INFO_NAME)
1552  << s->get_info(RS2_CAMERA_INFO_NAME)
1553  << f.first << " fps";
1554 
1555  if (streaming)
1556  {
1557  ImGui::Text("%s", fps_chars[ui.selected_fps_id[f.first]]);
1558  streaming_tooltip();
1559  }
1560  else
1561  {
1564  ImGui::Combo(label.c_str(), &ui.selected_fps_id[f.first], fps_chars.data(),
1565  static_cast<int>(fps_chars.size()));
1568  }
1569  ImGui::SetCursorPosX(col0);
1570  }
1571  }
1572  else
1573  {
1574  //ImGui::NextColumn();
1575  }
1576  }
1577  }
1578 
1580  return res;
1581  }
1582 
1584  {
1585  std::vector<stream_profile> results;
1586 
1587  for (auto&& f : formats)
1588  {
1589  auto stream = f.first;
1590  if (stream_enabled[stream])
1591  {
1592  auto fps = 0;
1595  else
1597 
1599 
1600  for (auto&& p : profiles)
1601  {
1602  if (auto vid_prof = p.as<video_stream_profile>())
1603  {
1604  if (res_values.size() > 0)
1605  {
1606  auto width = res_values[ui.selected_res_id].first;
1607  auto height = res_values[ui.selected_res_id].second;
1608 
1609  if (vid_prof.width() == width &&
1610  vid_prof.height() == height &&
1611  p.unique_id() == stream &&
1612  p.fps() == fps &&
1613  p.format() == format)
1614  results.push_back(p);
1615  }
1616  }
1617  else
1618  {
1619  if (p.fps() == fps &&
1620  p.unique_id() == stream &&
1621  p.format() == format)
1622  results.push_back(p);
1623  }
1624  }
1625  }
1626  }
1627 
1628  if (results.size() == 0)
1629  return false;
1630  // Verify that the number of found matches corrseponds to the number of the requested streams
1631  // TODO - review whether the comparison can be made strict (==)
1632  return results.size() >= size_t(std::count_if(stream_enabled.begin(), stream_enabled.end(), [](const std::pair<int, bool>& kpv)-> bool { return kpv.second == true; }));
1633  }
1634 
1635  void subdevice_model::update_ui(std::vector<stream_profile> profiles_vec)
1636  {
1637  if (profiles_vec.empty())
1638  return;
1639  for (auto& s : stream_enabled)
1640  s.second = false;
1641  for (auto& p : profiles_vec)
1642  {
1643  stream_enabled[p.unique_id()] = true;
1644 
1645  auto format_vec = format_values[p.unique_id()];
1646  for (int i = 0; i < format_vec.size(); i++)
1647  {
1648  if (format_vec[i] == p.format())
1649  {
1650  ui.selected_format_id[p.unique_id()] = i;
1651  break;
1652  }
1653  }
1654  for (int i = 0; i < res_values.size(); i++)
1655  {
1656  if (auto vid_prof = p.as<video_stream_profile>())
1657  if (res_values[i].first == vid_prof.width() && res_values[i].second == vid_prof.height())
1658  {
1659  ui.selected_res_id = i;
1660  break;
1661  }
1662  }
1663  for (int i = 0; i < shared_fps_values.size(); i++)
1664  {
1665  if (shared_fps_values[i] == p.fps())
1666  {
1668  break;
1669  }
1670  }
1671  }
1672  last_valid_ui = ui;
1673  prev_stream_enabled = stream_enabled; // prev differs from curr only after user changes
1674  }
1675 
1676  template<typename T, typename V>
1677  bool subdevice_model::check_profile(stream_profile p, T cond, std::map<V, std::map<int, stream_profile>>& profiles_map,
1678  std::vector<stream_profile>& results, V key, int num_streams, stream_profile& def_p)
1679  {
1680  bool found = false;
1681  if (auto vid_prof = p.as<video_stream_profile>())
1682  {
1683  for (auto& s : stream_enabled)
1684  {
1685  // find profiles that have an enabled stream and match the required condition
1686  if (s.second == true && vid_prof.unique_id() == s.first && cond(vid_prof))
1687  {
1688  profiles_map[key].insert(std::pair<int, stream_profile>(p.unique_id(), p));
1689  if (profiles_map[key].size() == num_streams)
1690  {
1691  results.clear(); // make sure only current profiles are saved
1692  for (auto& it : profiles_map[key])
1693  results.push_back(it.second);
1694  found = true;
1695  }
1696  else if (results.empty() && num_streams > 1 && profiles_map[key].size() == num_streams - 1)
1697  {
1698  for (auto& it : profiles_map[key])
1699  results.push_back(it.second);
1700  }
1701  }
1702  else if (!def_p.get() && cond(vid_prof))
1703  def_p = p; // in case no matching profile for current stream will be found, we'll use some profile that matches the condition
1704  }
1705  }
1706  return found;
1707  }
1708 
1709 
1710  void subdevice_model::get_sorted_profiles(std::vector<stream_profile>& profiles)
1711  {
1713  auto width = res_values[ui.selected_res_id].first;
1714  auto height = res_values[ui.selected_res_id].second;
1715  std::sort(profiles.begin(), profiles.end(), [&](stream_profile a, stream_profile b) {
1716  int score_a = 0, score_b = 0;
1717  if (a.fps() != fps)
1718  score_a++;
1719  if (b.fps() != fps)
1720  score_b++;
1721 
1723  score_a++;
1724  if (b.format() != format_values[b.unique_id()][ui.selected_format_id[b.unique_id()]])
1725  score_b++;
1726 
1727  auto a_vp = a.as<video_stream_profile>();
1728  auto b_vp = a.as<video_stream_profile>();
1729  if (!a_vp || !b_vp)
1730  return score_a < score_b;
1731  if (a_vp.width() != width || a_vp.height() != height)
1732  score_a++;
1733  if (b_vp.width() != width || b_vp.height() != height)
1734  score_b++;
1735  return score_a < score_b;
1736  });
1737  }
1738 
1739  std::vector<stream_profile> subdevice_model::get_supported_profiles()
1740  {
1741  std::vector<stream_profile> results;
1742  if (!show_single_fps_list || res_values.size() == 0)
1743  return results;
1744 
1745  int num_streams = 0;
1746  for (auto& s : stream_enabled)
1747  if (s.second == true)
1748  num_streams++;
1749  stream_profile def_p;
1751  auto width = res_values[ui.selected_res_id].first;
1752  auto height = res_values[ui.selected_res_id].second;
1753  std::vector<stream_profile> sorted_profiles = profiles;
1754 
1756  {
1757  get_sorted_profiles(sorted_profiles);
1758  std::map<int, std::map<int, stream_profile>> profiles_by_fps;
1759  for (auto&& p : sorted_profiles)
1760  {
1761  if (check_profile(p, [&](video_stream_profile vsp)
1762  { return (vsp.width() == width && vsp.height() == height); },
1763  profiles_by_fps, results, p.fps(), num_streams, def_p))
1764  break;
1765  }
1766  }
1768  {
1769  get_sorted_profiles(sorted_profiles);
1770  std::map<std::tuple<int, int>, std::map<int, stream_profile>> profiles_by_res;
1771 
1772  for (auto&& p : sorted_profiles)
1773  {
1774  if (auto vid_prof = p.as<video_stream_profile>())
1775  {
1776  if (check_profile(p, [&](video_stream_profile vsp) { return (vsp.fps() == fps); },
1777  profiles_by_res, results, std::make_tuple(vid_prof.width(), vid_prof.height()), num_streams, def_p))
1778  break;
1779  }
1780  }
1781  }
1783  {
1784  if (num_streams == 0)
1785  {
1786  last_valid_ui = ui;
1787  return results;
1788  }
1789  get_sorted_profiles(sorted_profiles);
1790  std::vector<stream_profile> matching_profiles;
1791  std::map<std::tuple<int, int, int>, std::map<int, stream_profile>> profiles_by_fps_res; //fps, width, height
1793  int stream_id;
1794  // find the stream to which the user made changes
1795  for (auto& it : ui.selected_format_id)
1796  {
1797  if (stream_enabled[it.first])
1798  {
1799  auto last_valid_it = last_valid_ui.selected_format_id.find(it.first);
1800  if ((last_valid_it == last_valid_ui.selected_format_id.end() || it.second != last_valid_it->second))
1801  {
1802  format = format_values[it.first][it.second];
1803  stream_id = it.first;
1804  }
1805  }
1806  }
1807  for (auto&& p : sorted_profiles)
1808  {
1809  if (auto vid_prof = p.as<video_stream_profile>())
1810  if (p.unique_id() == stream_id && p.format() == format) // && stream_enabled[stream_id]
1811  {
1812  profiles_by_fps_res[std::make_tuple(p.fps(), vid_prof.width(), vid_prof.height())].insert(std::pair<int, stream_profile>(p.unique_id(), p));
1813  matching_profiles.push_back(p);
1814  if (!def_p.get())
1815  def_p = p;
1816  }
1817  }
1818  // take profiles not in matching_profiles with enabled stream and fps+resolution matching some profile in matching_profiles
1819  for (auto&& p : sorted_profiles)
1820  {
1821  if (auto vid_prof = p.as<video_stream_profile>())
1822  {
1823  if (check_profile(p, [&](stream_profile prof) { return (std::find_if(matching_profiles.begin(), matching_profiles.end(), [&](stream_profile sp)
1824  { return (stream_id != p.unique_id() && sp.fps() == p.fps() && sp.as<video_stream_profile>().width() == vid_prof.width() &&
1825  sp.as<video_stream_profile>().height() == vid_prof.height()); }) != matching_profiles.end()); },
1826  profiles_by_fps_res, results, std::make_tuple(p.fps(), vid_prof.width(), vid_prof.height()), num_streams, def_p))
1827  break;
1828  }
1829  }
1830  }
1831  else if (stream_enabled != prev_stream_enabled)
1832  {
1833  if (num_streams == 0)
1834  return results;
1835  get_sorted_profiles(sorted_profiles);
1836  std::vector<stream_profile> matching_profiles;
1837  std::map<rs2_format, std::map<int, stream_profile>> profiles_by_format;
1838 
1839  for (auto&& p : sorted_profiles)
1840  {
1841  // first try to find profile from the new stream to meatch the current configuration
1842  if (check_profile(p, [&](video_stream_profile vid_prof)
1843  { return (p.fps() == fps && vid_prof.width() == width && vid_prof.height() == height); },
1844  profiles_by_format, results, p.format(), num_streams, def_p))
1845  break;
1846  }
1847  if (results.size() < num_streams)
1848  {
1849  results.clear();
1850  std::map<std::tuple<int, int, int>, std::map<int, stream_profile>> profiles_by_fps_res;
1851  for (auto&& p : sorted_profiles)
1852  {
1853  if (auto vid_prof = p.as<video_stream_profile>())
1854  {
1855  // if no stream with current configuration was found, try to find some configuration to match all enabled streams
1856  if (check_profile(p, [&](video_stream_profile vsp) { return true; }, profiles_by_fps_res, results,
1857  std::make_tuple(p.fps(), vid_prof.width(), vid_prof.height()), num_streams, def_p))
1858  break;
1859  }
1860  }
1861  }
1862  }
1863  if (results.empty())
1864  results.push_back(def_p);
1865  update_ui(results);
1866  return results;
1867  }
1868 
1869  std::vector<stream_profile> subdevice_model::get_selected_profiles()
1870  {
1871  std::vector<stream_profile> results;
1872 
1873  std::stringstream error_message;
1874  error_message << "The profile ";
1875 
1876  for (auto&& f : formats)
1877  {
1878  auto stream = f.first;
1879  if (stream_enabled[stream])
1880  {
1882 
1883  auto fps = 0;
1886  else
1888 
1889  for (auto&& p : profiles)
1890  {
1891  if (auto vid_prof = p.as<video_stream_profile>())
1892  {
1893  if (res_values.size() > 0)
1894  {
1895  auto width = res_values[ui.selected_res_id].first;
1896  auto height = res_values[ui.selected_res_id].second;
1897 
1898  error_message << "\n{" << stream_display_names[stream] << ","
1899  << width << "x" << height << " at " << fps << "Hz, "
1900  << rs2_format_to_string(format) << "} ";
1901 
1902  if (vid_prof.width() == width &&
1903  vid_prof.height() == height &&
1904  p.unique_id() == stream &&
1905  p.fps() == fps &&
1906  p.format() == format)
1907  results.push_back(p);
1908  }
1909  }
1910  else
1911  {
1912  error_message << "\n{" << stream_display_names[stream] << ", at " << fps << "Hz, "
1913  << rs2_format_to_string(format) << "} ";
1914 
1915  if (p.fps() == fps &&
1916  p.unique_id() == stream &&
1917  p.format() == format)
1918  results.push_back(p);
1919  }
1920  }
1921  }
1922  }
1923  if (results.size() == 0)
1924  {
1925  error_message << " is unsupported!";
1926  throw std::runtime_error(error_message.str());
1927  }
1928  return results;
1929  }
1930 
1931  void subdevice_model::stop(std::shared_ptr<notifications_model> not_model)
1932  {
1933  if ( not_model )
1934  not_model->add_log("Stopping streaming");
1935 
1936  for_each(stream_display_names.begin(), stream_display_names.end(), [this](std::pair<int, std::string> kvp)
1937  {
1938  if ("Pose" == kvp.second)
1939  {
1940  this->tm2.reset_trajectory();
1941  this->tm2.record_trajectory(false);
1942  }
1943  });
1944 
1945  streaming = false;
1946  _pause = false;
1947 
1948  if (profiles[0].stream_type() == RS2_STREAM_COLOR)
1949  {
1950  std::lock_guard< std::mutex > lock(detected_objects->mutex);
1951  detected_objects->clear();
1952  detected_objects->sensor_is_on = false;
1953  }
1954 
1955  s->stop();
1956 
1957  _options_invalidated = true;
1958 
1959  queues.foreach([&](frame_queue& q)
1960  {
1961  frame f;
1962  while (q.poll_for_frame(&f));
1963  });
1964 
1965  s->close();
1966  }
1967 
1969  {
1970  return _pause.load();
1971  }
1972 
1974  {
1975  _pause = true;
1976  }
1977 
1979  {
1980  _pause = false;
1981  }
1982 
1984  {
1985  auto ir = std::find_if(profiles.begin(), profiles.end(), [&](stream_profile p) { return (p.stream_type() == RS2_STREAM_INFRARED); });
1986  auto depth = std::find_if(profiles.begin(), profiles.end(), [&](stream_profile p) { return (p.stream_type() == RS2_STREAM_DEPTH); });
1987 
1988  return !(ir == profiles.end() || depth == profiles.end() || !stream_enabled[depth->unique_id()] || !stream_enabled[ir->unique_id()]);
1989  }
1990 
1992  {
1993  if (!can_enable_zero_order())
1994  throw std::runtime_error(to_string() << "Zero order filter requires both IR and Depth streams turned on.\nPlease rectify the configuration and rerun");
1995  }
1996 
1997  //The function decides if specific frame should be sent to the syncer
1999  {
2000  if (zero_order_artifact_fix && zero_order_artifact_fix->is_enabled() &&
2002  return true;
2003  if (!viewer.is_3d_view || viewer.is_3d_depth_source(f) || viewer.is_3d_texture_source(f))
2004  return true;
2005 
2006  return false;
2007  }
2008 
2009  void subdevice_model::play(const std::vector<stream_profile>& profiles, viewer_model& viewer, std::shared_ptr<rs2::asynchronous_syncer> syncer)
2010  {
2012  {
2014  }
2015 
2016  std::stringstream ss;
2017  ss << "Starting streaming of ";
2018  for (size_t i = 0; i < profiles.size(); i++)
2019  {
2020  ss << profiles[i].stream_type();
2021  if (i < profiles.size() - 1) ss << ", ";
2022  }
2023  ss << "...";
2024  viewer.not_model->add_log(ss.str());
2025 
2026  // TODO - refactor tm2 from viewer to subdevice
2027  for_each(stream_display_names.begin(), stream_display_names.end(), [this](std::pair<int, std::string> kvp)
2028  {
2029  if ("Pose" == kvp.second)
2030  {
2031  this->tm2.record_trajectory(true);
2032  }
2033  });
2034 
2035  s->open(profiles);
2036 
2037  try {
2038  s->start([&, syncer](frame f)
2039  {
2040  if (viewer.synchronization_enable && is_synchronized_frame(viewer, f))
2041  {
2042  syncer->invoke(f);
2043  }
2044  else
2045  {
2046  auto id = f.get_profile().unique_id();
2047  viewer.ppf.frames_queue[id].enqueue(f);
2048 
2049  on_frame();
2050  }
2051  });
2052  }
2053 
2054  catch (...)
2055  {
2056  s->close();
2057  throw;
2058  }
2059 
2060  _options_invalidated = true;
2061  streaming = true;
2062  if (s->is< color_sensor >())
2063  {
2064  std::lock_guard< std::mutex > lock(detected_objects->mutex);
2065  detected_objects->sensor_is_on = true;
2066  }
2067  }
2068  void subdevice_model::update(std::string& error_message, notifications_model& notifications)
2069  {
2071  {
2072  next_option = 0;
2073  _options_invalidated = false;
2074 
2077 
2078  for (auto&& pbm : post_processing) pbm->save_to_config_file();
2079  }
2080 
2081  if (next_option < supported_options.size())
2082  {
2084  if (options_metadata.find(static_cast<rs2_option>(next)) != options_metadata.end())
2085  {
2086  auto& opt_md = options_metadata[static_cast<rs2_option>(next)];
2087  opt_md.update_all_fields(error_message, notifications);
2088 
2090  {
2091  auto old_ae_enabled = auto_exposure_enabled;
2092  auto_exposure_enabled = opt_md.value > 0;
2093 
2094  if (!old_ae_enabled && auto_exposure_enabled)
2095  {
2096  try
2097  {
2098  if (s->is<roi_sensor>())
2099  {
2100  auto r = s->as<roi_sensor>().get_region_of_interest();
2101  roi_rect.x = static_cast<float>(r.min_x);
2102  roi_rect.y = static_cast<float>(r.min_y);
2103  roi_rect.w = static_cast<float>(r.max_x - r.min_x);
2104  roi_rect.h = static_cast<float>(r.max_y - r.min_y);
2105  }
2106  }
2107  catch (...)
2108  {
2109  auto_exposure_enabled = false;
2110  }
2111  }
2112  }
2113 
2115  {
2116  opt_md.dev->depth_units = opt_md.value;
2117  }
2118 
2120  opt_md.dev->stereo_baseline = opt_md.value;
2121  }
2122 
2123  next_option++;
2124  }
2125  }
2126 
2127  void subdevice_model::draw_options(const std::vector<rs2_option>& drawing_order,
2128  bool update_read_only_options, std::string& error_message,
2129  notifications_model& notifications)
2130  {
2131  for (auto& opt : drawing_order)
2132  {
2133  draw_option(opt, update_read_only_options, error_message, notifications);
2134  }
2135 
2136  for (auto i = 0; i < RS2_OPTION_COUNT; i++)
2137  {
2138  auto opt = static_cast<rs2_option>(i);
2139  if (viewer.is_option_skipped(opt)) continue;
2140  if (std::find(drawing_order.begin(), drawing_order.end(), opt) == drawing_order.end())
2141  {
2142  draw_option(opt, update_read_only_options, error_message, notifications);
2143  }
2144  }
2145  }
2146 
2148  {
2149  return (uint64_t)std::count_if(
2152  [&](const std::pair<int, option_model>& p) {return p.second.supported && !viewer.is_option_skipped(p.second.opt); });
2153  }
2154 
2155  bool option_model::draw_option(bool update_read_only_options,
2156  bool is_streaming,
2157  std::string& error_message, notifications_model& model)
2158  {
2159  if (update_read_only_options)
2160  {
2161  update_supported(error_message);
2162  if (supported && is_streaming)
2163  {
2164  update_read_only_status(error_message);
2165  if (read_only)
2166  {
2167  update_all_fields(error_message, model);
2168  }
2169  }
2170  }
2171  if (custom_draw_method)
2172  return custom_draw_method(*this, error_message, model);
2173  else
2174  return draw(error_message, model);
2175  }
2176 
2177  void option_model::set_option(rs2_option opt, float req_value, std::string &error_message)
2178  {
2179  try
2180  {
2181  endpoint->set_option(opt, req_value);
2182  }
2183  catch (const error& e)
2184  {
2185  error_message = error_to_string(e);
2186  }
2187  // Only update the cached value once set_option is done! That way, if it doesn't change anything...
2188  try { value = endpoint->get_option(opt); }
2189  catch (...) {}
2190  }
2191 
2193  : texture(std::unique_ptr<texture_buffer>(new texture_buffer())),
2194  _stream_not_alive(std::chrono::milliseconds(1500)),
2195  _stabilized_reflectivity(10)
2196  {
2201  }
2202 
2203  std::shared_ptr<texture_buffer> stream_model::upload_frame(frame&& f)
2204  {
2205  if (dev && dev->is_paused()) return nullptr;
2206 
2208 
2209  auto image = f.as<video_frame>();
2210  auto width = (image) ? image.get_width() : 640.f;
2211  auto height = (image) ? image.get_height() : 480.f;
2212 
2213  size = { static_cast<float>(width), static_cast<float>(height) };
2214  profile = f.get_profile();
2215  frame_number = f.get_frame_number();
2216  timestamp_domain = f.get_frame_timestamp_domain();
2217  timestamp = f.get_timestamp();
2218  fps.add_timestamp(f.get_timestamp(), f.get_frame_number());
2219 
2220  view_fps.add_timestamp(glfwGetTime() * 1000, count++);
2221 
2222  // populate frame metadata attributes
2223  for (auto i = 0; i < RS2_FRAME_METADATA_COUNT; i++)
2224  {
2225  if (f.supports_frame_metadata((rs2_frame_metadata_value)i))
2226  frame_md.md_attributes[i] = std::make_pair(true, f.get_frame_metadata((rs2_frame_metadata_value)i));
2227  else
2228  frame_md.md_attributes[i].first = false;
2229  }
2230 
2231  texture->upload(f);
2232  return texture;
2233  }
2234 
2235  void outline_rect(const rect& r)
2236  {
2238 
2239  glLineWidth(1);
2240  glLineStipple(1, 0xAAAA);
2242 
2244  glVertex2f(r.x, r.y);
2245  glVertex2f(r.x, r.y + r.h);
2246  glVertex2f(r.x + r.w, r.y + r.h);
2247  glVertex2f(r.x + r.w, r.y);
2248  glVertex2f(r.x, r.y);
2249  glEnd();
2250 
2251  glPopAttrib();
2252  }
2253 
2254  void draw_rect(const rect& r, int line_width)
2255  {
2257 
2258  glLineWidth((GLfloat)line_width);
2259 
2261  glVertex2f(r.x, r.y);
2262  glVertex2f(r.x, r.y + r.h);
2263  glVertex2f(r.x + r.w, r.y + r.h);
2264  glVertex2f(r.x + r.w, r.y);
2265  glVertex2f(r.x, r.y);
2266  glVertex2f(r.x, r.y + r.h);
2267  glVertex2f(r.x + r.w, r.y + r.h);
2268  glVertex2f(r.x + r.w, r.y);
2269  glVertex2f(r.x, r.y);
2270  glEnd();
2271 
2272  glPopAttrib();
2273  }
2274 
2276  {
2277  if (dev &&
2278  (dev->is_paused() ||
2279  (dev->streaming && dev->dev.is<playback>()) ||
2280  (dev->streaming /*&& texture->get_last_frame()*/)))
2281  {
2282  return true;
2283  }
2284  return false;
2285  }
2286 
2288  {
2289  if (dev &&
2290  (dev->is_paused() ||
2291  (dev->streaming && dev->dev.is<playback>())))
2292  {
2294  return true;
2295  }
2296 
2297  using namespace std::chrono;
2299  auto diff = now - last_frame;
2300  auto ms = duration_cast<milliseconds>(diff).count();
2302  return !_stream_not_alive.eval();
2303  }
2304 
2305  void stream_model::begin_stream(std::shared_ptr<subdevice_model> d, rs2::stream_profile p, const viewer_model& viewer)
2306  {
2307  dev = d;
2308  original_profile = p;
2309 
2310  profile = p;
2311  texture->colorize = d->depth_colorizer;
2312  texture->yuy2rgb = d->yuy2rgb;
2313  texture->depth_decode = d->depth_decoder;
2314 
2315  if (auto vd = p.as<video_stream_profile>())
2316  {
2317  size = {
2318  static_cast<float>(vd.width()),
2319  static_cast<float>(vd.height()) };
2320 
2321  original_size = {
2322  static_cast<float>(vd.width()),
2323  static_cast<float>(vd.height()) };
2324  }
2326 
2327  try
2328  {
2329  auto ds = d->dev.first< depth_sensor >();
2330  if( viewer._support_ir_reflectivity
2331  && ds.supports( RS2_OPTION_ENABLE_IR_REFLECTIVITY )
2332  && ds.supports( RS2_OPTION_ENABLE_MAX_USABLE_RANGE )
2333  && ( ( p.stream_type() == RS2_STREAM_INFRARED )
2334  || ( p.stream_type() == RS2_STREAM_DEPTH ) ) )
2335  {
2336  _reflectivity = std::unique_ptr< reflectivity >( new reflectivity() );
2337  }
2338  }
2339  catch(...) {};
2340 
2341  }
2342 
2344  int y,
2346  const std::map< int, stream_model > & streams,
2347  std::stringstream & ss,
2348  bool same_line )
2349  {
2350  bool reflectivity_valid = false;
2351 
2352  static const int MAX_PIXEL_MOVEMENT_TOLERANCE = 0;
2353 
2354  if( std::abs( _prev_mouse_pos_x - x ) > MAX_PIXEL_MOVEMENT_TOLERANCE
2355  || std::abs( _prev_mouse_pos_y - y ) > MAX_PIXEL_MOVEMENT_TOLERANCE )
2356  {
2357  _reflectivity->reset_history();
2359  _prev_mouse_pos_x = x;
2360  _prev_mouse_pos_y = y;
2361  }
2362 
2363  // Get IR sample for getting current reflectivity
2364  auto ir_stream
2365  = std::find_if( streams.cbegin(),
2366  streams.cend(),
2367  []( const std::pair< const int, stream_model > & stream ) {
2368  return stream.second.profile.stream_type() == RS2_STREAM_INFRARED;
2369  } );
2370 
2371  // Get depth sample for adding to reflectivity history
2372  auto depth_stream
2373  = std::find_if( streams.cbegin(),
2374  streams.cend(),
2375  []( const std::pair< const int, stream_model > & stream ) {
2376  return stream.second.profile.stream_type() == RS2_STREAM_DEPTH;
2377  } );
2378 
2379  if ((ir_stream != streams.end()) && (depth_stream != streams.end()))
2380  {
2381  auto depth_val = 0.0f;
2382  auto ir_val = 0.0f;
2383  depth_stream->second.texture->try_pick( x, y, &depth_val );
2384  ir_stream->second.texture->try_pick( x, y, &ir_val );
2385 
2386  _reflectivity->add_depth_sample( depth_val, x, y ); // Add depth sample to the history
2387 
2388  float noise_est = ds.get_option( RS2_OPTION_NOISE_ESTIMATION );
2389  auto mur_sensor = ds.as< max_usable_range_sensor >();
2390  if( mur_sensor )
2391  {
2392  auto max_usable_range = mur_sensor.get_max_usable_depth_range();
2393  reflectivity_valid = true;
2394  std::string ref_str = "N/A";
2395  try
2396  {
2397  if (_reflectivity->is_history_full())
2398  {
2399  auto pixel_ref
2400  = _reflectivity->get_reflectivity(noise_est, max_usable_range, ir_val);
2401  _stabilized_reflectivity.add(pixel_ref);
2402  auto stabilized_pixel_ref = _stabilized_reflectivity.get( 0.75f ); // We use 75% stability for preventing spikes
2403  ref_str = to_string() << std::dec << round( stabilized_pixel_ref * 100 ) << "%";
2404  }
2405  else
2406  {
2407  // Show dots when calculating ,dots count [3-10]
2408  int dots_count = static_cast<int>(_reflectivity->get_samples_ratio() * 7);
2409  ref_str = "calculating...";
2410  ref_str += std::string(dots_count, '.');
2411  }
2412  }
2413  catch( ... )
2414  {
2415  }
2416 
2417  if( same_line )
2418  ss << ", Reflectivity: " << ref_str;
2419  else
2420  ss << "\nReflectivity: " << ref_str;
2421  }
2422  }
2423 
2424  return reflectivity_valid;
2425  }
2426 
2427  void stream_model::update_ae_roi_rect(const rect& stream_rect, const mouse_info& mouse, std::string& error_message)
2428  {
2429  if (dev->roi_checked)
2430  {
2431  auto&& sensor = dev->s;
2432  // Case 1: Starting Dragging of the ROI rect
2433  // Pre-condition: not capturing already + mouse is down + we are inside stream rect
2434  if (!capturing_roi && mouse.mouse_down[0] && stream_rect.contains(mouse.cursor))
2435  {
2436  // Initialize roi_display_rect with drag-start position
2437  roi_display_rect.x = mouse.cursor.x;
2438  roi_display_rect.y = mouse.cursor.y;
2439  roi_display_rect.w = 0; // Still unknown, will be update later
2440  roi_display_rect.h = 0;
2441  capturing_roi = true; // Mark that we are in process of capturing the ROI rect
2442  }
2443  // Case 2: We are in the middle of dragging (capturing) ROI rect and we did not leave the stream boundaries
2444  if (capturing_roi && stream_rect.contains(mouse.cursor))
2445  {
2446  // x,y remain the same, only update the width,height with new mouse position relative to starting mouse position
2449  }
2450  // Case 3: We are in middle of dragging (capturing) and mouse was released
2451  if (!mouse.mouse_down[0] && capturing_roi && stream_rect.contains(mouse.cursor))
2452  {
2453  // Update width,height one last time
2456  capturing_roi = false; // Mark that we are no longer dragging
2457 
2458  if (roi_display_rect) // If the rect is not empty?
2459  {
2460  // Convert from local (pixel) coordinate system to device coordinate system
2461  auto r = roi_display_rect;
2463  dev->roi_rect = r; // Store new rect in device coordinates into the subdevice object
2464 
2465  // Send it to firmware:
2466  // Step 1: get rid of negative width / height
2467  region_of_interest roi{};
2468  roi.min_x = static_cast<int>(std::min(r.x, r.x + r.w));
2469  roi.max_x = static_cast<int>(std::max(r.x, r.x + r.w));
2470  roi.min_y = static_cast<int>(std::min(r.y, r.y + r.h));
2471  roi.max_y = static_cast<int>(std::max(r.y, r.y + r.h));
2472 
2473  try
2474  {
2475  // Step 2: send it to firmware
2476  if (sensor->is<roi_sensor>())
2477  {
2478  sensor->as<roi_sensor>().set_region_of_interest(roi);
2479  }
2480  }
2481  catch (const error& e)
2482  {
2483  error_message = error_to_string(e);
2484  }
2485  }
2486  else // If the rect is empty
2487  {
2488  try
2489  {
2490  // To reset ROI, just set ROI to the entire frame
2491  auto x_margin = (int)size.x / 8;
2492  auto y_margin = (int)size.y / 8;
2493 
2494  // Default ROI behavior is center 3/4 of the screen:
2495  if (sensor->is<roi_sensor>())
2496  {
2497  sensor->as<roi_sensor>().set_region_of_interest({ x_margin, y_margin,
2498  (int)size.x - x_margin - 1,
2499  (int)size.y - y_margin - 1 });
2500  }
2501 
2502  roi_display_rect = { 0, 0, 0, 0 };
2503  dev->roi_rect = { 0, 0, 0, 0 };
2504  }
2505  catch (const error& e)
2506  {
2507  error_message = error_to_string(e);
2508  }
2509  }
2510 
2511  dev->roi_checked = false;
2512  }
2513  // If we left stream bounds while capturing, stop capturing
2514  if (capturing_roi && !stream_rect.contains(mouse.cursor))
2515  {
2516  capturing_roi = false;
2517  }
2518 
2519  // When not capturing, just refresh the ROI rect in case the stream box moved
2520  if (!capturing_roi)
2521  {
2522  auto r = dev->roi_rect; // Take the current from device, convert to local coordinates
2523  r = r.normalize(_normalized_zoom.unnormalize(get_original_stream_bounds())).unnormalize(stream_rect).cut_by(stream_rect);
2524  roi_display_rect = r;
2525  }
2526 
2527  // Display ROI rect
2528  glColor3f(1.0f, 1.0f, 1.0f);
2530  }
2531  }
2532 
2533  bool draw_combo_box(const std::string& id, const std::vector<std::string>& device_names, int& new_index)
2534  {
2535  std::vector<const char*> device_names_chars = get_string_pointers(device_names);
2536  return ImGui::Combo(id.c_str(), &new_index, device_names_chars.data(), static_cast<int>(device_names.size()));
2537  }
2538 
2539  void stream_model::show_stream_header(ImFont* font, const rect &stream_rect, viewer_model& viewer)
2540  {
2541  const auto top_bar_height = 32.f;
2542  auto num_of_buttons = 5;
2543 
2544  if (!viewer.allow_stream_close) --num_of_buttons;
2545  if (viewer.streams.size() > 1) ++num_of_buttons;
2546  if (RS2_STREAM_DEPTH == profile.stream_type()) ++num_of_buttons; // Color map ruler button
2547 
2548  ImGui_ScopePushFont(font);
2551 
2555 
2556  std::string label = to_string() << "Stream of " << profile.unique_id();
2557 
2558  ImGui::GetWindowDrawList()->AddRectFilled({ stream_rect.x, stream_rect.y - top_bar_height },
2559  { stream_rect.x + stream_rect.w, stream_rect.y }, ImColor(sensor_bg));
2560 
2561  int offset = 5;
2562  if (dev->_is_being_recorded) offset += 23;
2563  auto p = dev->dev.as<playback>();
2564  if (dev->is_paused() || (p && p.current_status() == RS2_PLAYBACK_STATUS_PAUSED)) offset += 23;
2565 
2566  ImGui::SetCursorScreenPos({ stream_rect.x + 4 + offset, stream_rect.y - top_bar_height + 7 });
2567 
2568  std::string tooltip;
2569  if (dev && dev->dev.supports(RS2_CAMERA_INFO_NAME) &&
2570  dev->dev.supports(RS2_CAMERA_INFO_SERIAL_NUMBER) &&
2571  dev->s->supports(RS2_CAMERA_INFO_NAME))
2572  {
2573  std::string dev_name = dev->dev.get_info(RS2_CAMERA_INFO_NAME);
2574  std::string dev_serial = dev->dev.get_info(RS2_CAMERA_INFO_SERIAL_NUMBER);
2575  std::string sensor_name = dev->s->get_info(RS2_CAMERA_INFO_NAME);
2576  std::string stream_name = rs2_stream_to_string(profile.stream_type());
2577 
2578  tooltip = to_string() << dev_name << " s.n:" << dev_serial << " | " << sensor_name << ", " << stream_name << " stream";
2579  const auto approx_char_width = 12;
2580  if (stream_rect.w - 32 * num_of_buttons >= (dev_name.size() + dev_serial.size() + sensor_name.size() + stream_name.size()) * approx_char_width)
2581  label = tooltip;
2582  else
2583  {
2584  // Use only the SKU type for compact representation and use only the last three digits for S.N
2585  auto short_name = split_string(dev_name, ' ').back();
2586  auto short_sn = dev_serial;
2587  short_sn.erase(0, dev_serial.size() - 5).replace(0, 2, "..");
2588 
2589  auto label_length = stream_rect.w - 32 * num_of_buttons;
2590 
2591  if (label_length >= (short_name.size() + dev_serial.size() + sensor_name.size() + stream_name.size()) * approx_char_width)
2592  label = to_string() << short_name << " s.n:" << dev_serial << " | " << sensor_name << " " << stream_name << " stream";
2593  else if (label_length >= (short_name.size() + short_sn.size() + stream_name.size()) * approx_char_width)
2594  label = to_string() << short_name << " s.n:" << short_sn << " " << stream_name << " stream";
2595  else if (label_length >= short_name.size() * approx_char_width)
2596  label = to_string() << short_name << " " << stream_name;
2597  else
2598  label = "";
2599  }
2600  }
2601  else
2602  {
2603  label = to_string() << "Unknown " << rs2_stream_to_string(profile.stream_type()) << " stream";
2604  tooltip = label;
2605  }
2606 
2607  ImGui::PushTextWrapPos(stream_rect.x + stream_rect.w - 32 * num_of_buttons - 5);
2608  ImGui::Text("%s", label.c_str());
2609  if (tooltip != label && ImGui::IsItemHovered())
2610  ImGui::SetTooltip("%s", tooltip.c_str());
2612 
2613  ImGui::SetCursorScreenPos({ stream_rect.x + stream_rect.w - 32 * num_of_buttons, stream_rect.y - top_bar_height });
2614 
2615 
2616  label = to_string() << textual_icons::metadata << "##Metadata" << profile.unique_id();
2617  if (show_metadata)
2618  {
2621  if (ImGui::Button(label.c_str(), { 24, top_bar_height }))
2622  {
2623  show_metadata = false;
2624  }
2625  if (ImGui::IsItemHovered())
2626  {
2627  ImGui::SetTooltip("Hide frame metadata");
2628  }
2630  }
2631  else
2632  {
2633  if (ImGui::Button(label.c_str(), { 24, top_bar_height }))
2634  {
2635  show_metadata = true;
2636  }
2637  if (ImGui::IsItemHovered())
2638  {
2639  ImGui::SetTooltip("Show frame metadata");
2640  }
2641  }
2642  ImGui::SameLine();
2643 
2644  if (RS2_STREAM_DEPTH == profile.stream_type())
2645  {
2646  label = to_string() << textual_icons::bar_chart << "##Color map";
2647  if (show_map_ruler)
2648  {
2651  if (ImGui::Button(label.c_str(), { 24, top_bar_height }))
2652  {
2653  show_map_ruler = false;
2655  }
2656  if (ImGui::IsItemHovered())
2657  {
2658  ImGui::SetTooltip("Hide color map ruler");
2659  }
2661  }
2662  else
2663  {
2664  if (ImGui::Button(label.c_str(), { 24, top_bar_height }))
2665  {
2666  show_map_ruler = true;
2668  }
2669  if (ImGui::IsItemHovered())
2670  {
2671  ImGui::SetTooltip("Show color map ruler");
2672  }
2673  }
2674  ImGui::SameLine();
2675  }
2676 
2677  if (dev->is_paused() || (p && p.current_status() == RS2_PLAYBACK_STATUS_PAUSED))
2678  {
2681  label = to_string() << textual_icons::play << "##Resume " << profile.unique_id();
2682  if (ImGui::Button(label.c_str(), { 24, top_bar_height }))
2683  {
2684  if (p)
2685  {
2686  p.resume();
2687  }
2688  dev->resume();
2689  viewer.paused = false;
2690  }
2691  if (ImGui::IsItemHovered())
2692  {
2693  ImGui::SetTooltip("Resume sensor");
2694  }
2696  }
2697  else
2698  {
2699  label = to_string() << textual_icons::pause << "##Pause " << profile.unique_id();
2700  if (ImGui::Button(label.c_str(), { 24, top_bar_height }))
2701  {
2702  if (p)
2703  {
2704  p.pause();
2705  }
2706  dev->pause();
2707  viewer.paused = true;
2708  }
2709  if (ImGui::IsItemHovered())
2710  {
2711  ImGui::SetTooltip("Pause sensor");
2712  }
2713  }
2714  ImGui::SameLine();
2715 
2716  label = to_string() << textual_icons::camera << "##Snapshot " << profile.unique_id();
2717  if (ImGui::Button(label.c_str(), { 24, top_bar_height }))
2718  {
2719  auto filename = file_dialog_open(save_file, "Portable Network Graphics (PNG)\0*.png\0", NULL, NULL);
2720 
2721  if (filename)
2722  {
2723  snapshot_frame(filename, viewer);
2724  }
2725  }
2726  if (ImGui::IsItemHovered())
2727  {
2728  ImGui::SetTooltip("Save snapshot");
2729  }
2730  ImGui::SameLine();
2731 
2732  label = to_string() << textual_icons::info_circle << "##Info " << profile.unique_id();
2733  if (show_stream_details)
2734  {
2737 
2738  if (ImGui::Button(label.c_str(), { 24, top_bar_height }))
2739  {
2740  show_stream_details = false;
2744  }
2745  if (ImGui::IsItemHovered())
2746  {
2747  ImGui::SetTooltip("Hide stream info overlay");
2748  }
2749 
2751  }
2752  else
2753  {
2754  if (ImGui::Button(label.c_str(), { 24, top_bar_height }))
2755  {
2756  show_stream_details = true;
2760  }
2761  if (ImGui::IsItemHovered())
2762  {
2763  ImGui::SetTooltip("Show stream info overlay");
2764  }
2765  }
2766  ImGui::SameLine();
2767 
2768  if (viewer.streams.size() > 1)
2769  {
2770  if (!viewer.fullscreen)
2771  {
2772  label = to_string() << textual_icons::window_maximize << "##Maximize " << profile.unique_id();
2773 
2774  if (ImGui::Button(label.c_str(), { 24, top_bar_height }))
2775  {
2776  viewer.fullscreen = true;
2777  viewer.selected_stream = this;
2778  }
2779  if (ImGui::IsItemHovered())
2780  {
2781  ImGui::SetTooltip("Maximize stream to full-screen");
2782  }
2783 
2784  ImGui::SameLine();
2785  }
2786  else if (viewer.fullscreen)
2787  {
2790 
2791  label = to_string() << textual_icons::window_restore << "##Restore " << profile.unique_id();
2792 
2793  if (ImGui::Button(label.c_str(), { 24, top_bar_height }))
2794  {
2795  viewer.fullscreen = false;
2796  }
2797  if (ImGui::IsItemHovered())
2798  {
2799  ImGui::SetTooltip("Restore tile view");
2800  }
2801 
2803  ImGui::SameLine();
2804  }
2805  }
2806  else
2807  {
2808  viewer.fullscreen = false;
2809  }
2810 
2811  if (viewer.allow_stream_close)
2812  {
2813  label = to_string() << textual_icons::times << "##Stop " << profile.unique_id();
2814  if (ImGui::Button(label.c_str(), { 24, top_bar_height }))
2815  {
2816  dev->stop(viewer.not_model);
2817  }
2818  if (ImGui::IsItemHovered())
2819  {
2820  ImGui::SetTooltip("Stop this sensor");
2821  }
2822  }
2823 
2825 
2826  _info_height = (show_stream_details || show_metadata) ? (show_metadata ? stream_rect.h : 32.f) : 0.f;
2827 
2828  static const auto y_offset_info_rect = 0.f;
2829  static const auto x_offset_info_rect = 0.f;
2830  auto width_info_rect = stream_rect.w - 2.f * x_offset_info_rect;
2831 
2832  curr_info_rect = rect{ stream_rect.x + x_offset_info_rect,
2833  stream_rect.y + y_offset_info_rect,
2834  width_info_rect,
2835  _info_height };
2836 
2840 
2843 
2847 
2848  float line_y = curr_info_rect.y + 8;
2849 
2851  {
2852  if (_info_height.get() > line_y + ImGui::GetTextLineHeight() - curr_info_rect.y)
2853  {
2854  ImGui::SetCursorScreenPos({ curr_info_rect.x + 10, line_y });
2855 
2858 
2859  label = to_string() << "Time: " << std::left << std::fixed << std::setprecision(1) << timestamp << " ";
2860 
2862 
2863  ImGui::Text("%s", label.c_str());
2864 
2866 
2867  if (ImGui::IsItemHovered())
2868  {
2870  {
2872  ImGui::PushTextWrapPos(450.0f);
2873  ImGui::TextUnformatted("Timestamp Domain: System Time. Hardware Timestamps unavailable!\nPlease refer to frame_metadata.md for more information");
2876  }
2878  {
2879  ImGui::SetTooltip("Timestamp: Global Time");
2880  }
2881  else
2882  {
2883  ImGui::SetTooltip("Timestamp: Hardware Clock");
2884  }
2885  }
2886 
2887  ImGui::SameLine();
2888 
2889  label = to_string() << " Frame: " << std::left << frame_number;
2890  ImGui::Text("%s", label.c_str());
2891 
2892  ImGui::SameLine();
2893 
2894  std::string res;
2896  res = to_string() << size.x << "x" << size.y << ", ";
2897  label = to_string() << res << truncate_string(rs2_format_to_string(profile.format()), 9) << ", ";
2898  ImGui::Text("%s", label.c_str());
2899  if (ImGui::IsItemHovered())
2900  {
2901  ImGui::SetTooltip("%s", "Stream Resolution, Format");
2902  }
2903 
2904  ImGui::SameLine();
2905 
2906  label = to_string() << "FPS: " << std::setprecision(2) << std::setw(7) << std::fixed << fps.get_fps();
2907  ImGui::Text("%s", label.c_str());
2908  if (ImGui::IsItemHovered())
2909  {
2910  ImGui::SetTooltip("%s", "FPS is calculated based on timestamps and not viewer time");
2911  }
2912 
2913  line_y += ImGui::GetTextLineHeight() + 5;
2914  }
2915  }
2916 
2917 
2918 
2919  if (show_metadata)
2920  {
2921  std::vector<attribute> stream_details;
2922 
2923  if (true) // Always show stream details options
2924  {
2925  stream_details.push_back({ "Frame Timestamp",
2926  to_string() << std::fixed << std::setprecision(1) << timestamp,
2927  "Frame Timestamp is normalized represetation of when the frame was taken.\n"
2928  "It's a property of every frame, so when exact creation time is not provided by the hardware, an approximation will be used.\n"
2929  "Clock Domain feilds helps to interpret the meaning of timestamp\n"
2930  "Timestamp is measured in milliseconds, and is allowed to roll-over (reset to zero) in some situations" });
2931  stream_details.push_back({ "Clock Domain",
2933  "Clock Domain describes the format of Timestamp field. It can be one of the following:\n"
2934  "1. System Time - When no hardware timestamp is available, system time of arrival will be used.\n"
2935  " System time benefits from being comparable between device, but suffers from not being able to approximate latency.\n"
2936  "2. Hardware Clock - Hardware timestamp is attached to the frame by the device, and is consistent accross device sensors.\n"
2937  " Hardware timestamp encodes precisely when frame was captured, but cannot be compared across devices\n"
2938  "3. Global Time - Global time is provided when the device can both offer hardware timestamp and implements Global Timestamp Protocol.\n"
2939  " Global timestamps encode exact time of capture and at the same time are comparable accross devices." });
2940  stream_details.push_back({ "Frame Number",
2941  to_string() << frame_number, "Frame Number is a rolling ID assigned to frames.\n"
2942  "Most devices do not guarantee consequitive frames to have conseuquitive frame numbers\n"
2943  "But it is true most of the time" });
2944 
2946  {
2947  stream_details.push_back({ "Hardware Size",
2948  to_string() << original_size.x << " x " << original_size.y, "" });
2949 
2950  stream_details.push_back({ "Display Size",
2951  to_string() << size.x << " x " << size.y,
2952  "When Post-Processing is enabled, the actual display size of the frame may differ from original capture size" });
2953  }
2954  stream_details.push_back({ "Pixel Format",
2956 
2957  stream_details.push_back({ "Hardware FPS",
2958  to_string() << std::setprecision(2) << std::fixed << fps.get_fps(),
2959  "Hardware FPS captures the number of frames per second produced by the device.\n"
2960  "It is possible and likely that not all of these frames will make it to the application." });
2961 
2962  stream_details.push_back({ "Viewer FPS",
2963  to_string() << std::setprecision(2) << std::fixed << view_fps.get_fps(),
2964  "Viewer FPS captures how many frames the application manages to render.\n"
2965  "Frame drops can occur for variety of reasons." });
2966 
2967  stream_details.push_back({ "", "", "" });
2968  }
2969 
2970  const std::string no_md = "no md";
2971 
2973  {
2974  stream_details.push_back({ no_md, "", "" });
2975  }
2976 
2977  std::map<rs2_frame_metadata_value, std::string> descriptions = {
2978  { RS2_FRAME_METADATA_FRAME_COUNTER , "A sequential index managed per-stream. Integer value" },
2979  { RS2_FRAME_METADATA_FRAME_TIMESTAMP , "Timestamp set by device clock when data readout and transmit commence. Units are device dependent" },
2980  { RS2_FRAME_METADATA_SENSOR_TIMESTAMP , "Timestamp of the middle of sensor's exposure calculated by device. usec" },
2981  { RS2_FRAME_METADATA_ACTUAL_EXPOSURE , "Sensor's exposure width. When Auto Exposure (AE) is on the value is controlled by firmware. usec" },
2982  { RS2_FRAME_METADATA_GAIN_LEVEL , "A relative value increasing which will increase the Sensor's gain factor.\n"
2983  "When AE is set On, the value is controlled by firmware. Integer value" },
2984  { RS2_FRAME_METADATA_AUTO_EXPOSURE , "Auto Exposure Mode indicator. Zero corresponds to AE switched off. " },
2985  { RS2_FRAME_METADATA_WHITE_BALANCE , "White Balance setting as a color temperature. Kelvin degrees" },
2986  { RS2_FRAME_METADATA_TIME_OF_ARRIVAL , "Time of arrival in system clock " },
2987  { RS2_FRAME_METADATA_TEMPERATURE , "Temperature of the device, measured at the time of the frame capture. Celsius degrees " },
2988  { RS2_FRAME_METADATA_BACKEND_TIMESTAMP , "Timestamp get from uvc driver. usec" },
2989  { RS2_FRAME_METADATA_ACTUAL_FPS , "Actual hardware FPS. May differ from requested due to Auto-Exposure" },
2990  { RS2_FRAME_METADATA_FRAME_LASER_POWER_MODE , "Laser power mode. Zero corresponds to Laser power switched off and one for switched on." },
2991  { RS2_FRAME_METADATA_EXPOSURE_PRIORITY , "Exposure priority. When enabled Auto-exposure algorithm is allowed to reduce requested FPS to sufficiently increase exposure time (an get enough light)" },
2992  { RS2_FRAME_METADATA_POWER_LINE_FREQUENCY , "Power Line Frequency for anti-flickering Off/50Hz/60Hz/Auto. " },
2993  };
2994 
2995  for (auto i = 0; i < RS2_FRAME_METADATA_COUNT; i++)
2996  {
2997  auto&& kvp = frame_md.md_attributes[i];
2998  if (kvp.first)
2999  {
3000  auto val = (rs2_frame_metadata_value)i;
3002  std::string desc = "";
3003  if (descriptions.find(val) != descriptions.end()) desc = descriptions[val];
3004  stream_details.push_back({ name, to_string() << kvp.second, desc });
3005  }
3006  }
3007 
3008  float max_text_width = 0.;
3009  for (auto&& kvp : stream_details)
3010  max_text_width = std::max(max_text_width, ImGui::CalcTextSize(kvp.name.c_str()).x);
3011 
3012  for (auto&& at : stream_details)
3013  {
3014  if (_info_height.get() > line_y + ImGui::GetTextLineHeight() - curr_info_rect.y)
3015  {
3016  ImGui::SetCursorScreenPos({ curr_info_rect.x + 10, line_y });
3017 
3018  if (at.name == no_md)
3019  {
3020  auto text = "Per-frame metadata is not enabled at the OS level!\nPlease follow the installation guide for the details";
3021  auto size = ImGui::CalcTextSize(text);
3022 
3023  for (int i = 3; i > 0; i -= 1)
3024  ImGui::GetWindowDrawList()->AddRectFilled({ curr_info_rect.x + 10 - i, line_y - i },
3025  { curr_info_rect.x + 10 + i + size.x, line_y + size.y + i },
3026  ImColor(alpha(sensor_bg, 0.1f)));
3027 
3029  ImGui::Text("%s", text);
3031 
3032  line_y += ImGui::GetTextLineHeight() + 3;
3033  }
3034  else
3035  {
3036  std::string text = "";
3037  if (at.name != "") text = to_string() << at.name << ":";
3038  auto size = ImGui::CalcTextSize(text.c_str());
3039 
3040  for (int i = 3; i > 0; i -= 1)
3041  ImGui::GetWindowDrawList()->AddRectFilled({ curr_info_rect.x + 10 - i, line_y - i },
3042  { curr_info_rect.x + 10 + i + size.x, line_y + size.y + i },
3043  ImColor(alpha(sensor_bg, 0.1f)));
3044 
3046  ImGui::Text("%s", text.c_str()); ImGui::SameLine();
3047 
3048  if (at.description != "")
3049  {
3050  if (ImGui::IsItemHovered())
3051  {
3052  ImGui::SetTooltip("%s", at.description.c_str());
3053  }
3054  }
3055 
3056  text = at.value;
3057  size = ImGui::CalcTextSize(text.c_str());
3058 
3059  for (int i = 3; i > 0; i -= 1)
3060  ImGui::GetWindowDrawList()->AddRectFilled({ curr_info_rect.x + 20 + max_text_width - i, line_y - i },
3061  { curr_info_rect.x + 30 + max_text_width + i + size.x, line_y + size.y + i },
3062  ImColor(alpha(sensor_bg, 0.1f)));
3063 
3065 
3066  ImGui::SetCursorScreenPos({ curr_info_rect.x + 20 + max_text_width, line_y });
3067 
3068  std::string id = to_string() << "##" << at.name << "-" << profile.unique_id();
3069 
3072 
3073  ImGui::InputText(id.c_str(),
3074  (char*)text.c_str(),
3075  text.size() + 1,
3077 
3079  }
3080 
3081  line_y += ImGui::GetTextLineHeight() + 3;
3082  }
3083  }
3084  }
3085 
3087  }
3088 
3089  void stream_model::show_stream_footer(ImFont* font, const rect &stream_rect, const mouse_info& mouse, const std::map<int, stream_model> &streams, viewer_model& viewer)
3090  {
3091  auto non_visual_stream = (profile.stream_type() == RS2_STREAM_GYRO)
3092  || (profile.stream_type() == RS2_STREAM_ACCEL)
3093  || (profile.stream_type() == RS2_STREAM_GPIO)
3094  || (profile.stream_type() == RS2_STREAM_POSE);
3095 
3096  if (stream_rect.contains(mouse.cursor) && !non_visual_stream && !show_metadata)
3097  {
3098  std::stringstream ss;
3099  rect cursor_rect{ mouse.cursor.x, mouse.cursor.y };
3100  auto ts = cursor_rect.normalize(stream_rect);
3101  auto pixels = ts.unnormalize(_normalized_zoom.unnormalize(get_stream_bounds()));
3102  auto x = (int)pixels.x;
3103  auto y = (int)pixels.y;
3104 
3105  ss << std::fixed << std::setprecision(0) << x << ", " << y;
3106 
3107  float val{};
3108  if (texture->try_pick(x, y, &val))
3109  {
3110  ss << " 0x" << std::hex << static_cast< int >( round( val ) );
3111  }
3112 
3113  bool show_max_range = false;
3114  bool show_reflectivity = false;
3115 
3116  if (texture->get_last_frame().is<depth_frame>())
3117  {
3118  // Draw pixel distance
3119  auto meters = texture->get_last_frame().as<depth_frame>().get_distance(x, y);
3120 
3121  if (viewer.metric_system)
3122  {
3123  // depth is displayed in mm when distance is below 20 cm and gets back to meters when above 30 cm
3124  static bool display_in_mm = false;
3125  if (!display_in_mm && meters > 0.f && meters < 0.2f)
3126  {
3127  display_in_mm = true;
3128  }
3129  else if (display_in_mm && meters > 0.3f)
3130  {
3131  display_in_mm = false;
3132  }
3133  if (display_in_mm)
3134  ss << std::dec << " = " << std::setprecision(3) << meters * 1000 << " millimeters";
3135  else
3136  ss << std::dec << " = " << std::setprecision(3) << meters << " meters";
3137  }
3138  else
3139  ss << std::dec << " = " << std::setprecision(3) << meters / FEET_TO_METER << " feet";
3140 
3141  // Draw maximum usable depth range
3142  auto ds = sensor_from_frame(texture->get_last_frame())->as<depth_sensor>();
3144  {
3145  if (ds.supports(RS2_OPTION_ENABLE_MAX_USABLE_RANGE) &&
3146  (ds.get_option(RS2_OPTION_ENABLE_MAX_USABLE_RANGE) == 1.0f))
3147  {
3148  auto mur_sensor = ds.as<max_usable_range_sensor>();
3149  if (mur_sensor)
3150  {
3151  show_max_range = true;
3152  auto max_usable_range = mur_sensor.get_max_usable_depth_range();
3153  const float MIN_RANGE = 1.5f;
3154  const float MAX_RANGE = 9.0f;
3155  // display maximum usable range in range 1.5-9 [m] at 1.5 [m] resolution (rounded)
3156  auto max_usable_range_limited = std::min(std::max(max_usable_range, MIN_RANGE), MAX_RANGE);
3157 
3158  //round to 1.5 [m]
3159  auto max_usable_range_rounded = static_cast<int>(max_usable_range_limited / 1.5f) * 1.5f;
3160 
3161  if (viewer.metric_system)
3162  ss << std::dec << "\nMax usable range: " << std::setprecision(1) << max_usable_range_rounded << " meters";
3163  else
3164  ss << std::dec << "\nMax usable range: " << std::setprecision(1) << max_usable_range_rounded / FEET_TO_METER << " feet";
3165  }
3166  }
3167  }
3168 
3169  // Draw IR reflectivity on depth frame
3170  if (_reflectivity)
3171  {
3172  if (ds.get_option(RS2_OPTION_ENABLE_IR_REFLECTIVITY) == 1.0f)
3173  {
3174  if ((0.2f == roi_percentage) && roi_display_rect.contains(mouse.cursor))
3175  {
3176  // Add reflectivity information on frame, if max usable range is displayed, display reflectivity on the same line
3177  show_reflectivity = draw_reflectivity(x, y, ds, streams, ss, show_max_range);
3178  }
3179  }
3180  }
3181  }
3182 
3183  // Draw IR reflectivity on IR frame
3184  if (_reflectivity)
3185  {
3186  bool lf_exist = texture->get_last_frame();
3187  if (lf_exist)
3188  {
3189  auto ds = sensor_from_frame(texture->get_last_frame())->as<depth_sensor>();
3190  if (ds.get_option( RS2_OPTION_ENABLE_IR_REFLECTIVITY ) == 1.0f )
3191  {
3192  bool lf_exist = texture->get_last_frame();
3193  if (is_stream_alive() && texture->get_last_frame().get_profile().stream_type() == RS2_STREAM_INFRARED)
3194  {
3195  if ((0.2f == roi_percentage) && roi_display_rect.contains(mouse.cursor))
3196  {
3197  show_reflectivity = draw_reflectivity(x, y, ds, streams, ss, show_max_range);
3198  }
3199  }
3200  }
3201  }
3202  }
3203 
3204  std::string msg(ss.str().c_str());
3205 
3206  ImGui_ScopePushFont(font);
3207 
3208  // adjust windows size to the message length
3209  auto new_line_start_idx = msg.find_first_of('\n');
3210  int footer_vertical_size = 35;
3211  auto width = float(msg.size() * 8);
3212 
3213  // adjust width according to the longest line
3214  if (show_max_range || show_reflectivity)
3215  {
3216  footer_vertical_size = 50;
3217  auto first_line_size = msg.find_first_of('\n') + 1;
3218  auto second_line_size = msg.substr(new_line_start_idx).size();
3219  width = std::max(first_line_size, second_line_size) * 8.0f;
3220  }
3221 
3222  auto align = 20;
3223  width += align - (int)width % align;
3224 
3225  ImVec2 pos{ stream_rect.x + 5, stream_rect.y + stream_rect.h - footer_vertical_size };
3227  { pos.x + width, pos.y + footer_vertical_size - 5 }, ImColor(dark_sensor_bg));
3228 
3229  ImGui::SetCursorScreenPos({ pos.x + 10, pos.y + 5 });
3230 
3231  std::string label = to_string() << "Footer for stream of " << profile.unique_id();
3232 
3235 
3236  ImGui::Text("%s", msg.c_str());
3238  }
3239  else
3240  {
3241  if (_reflectivity)
3242  {
3243  _reflectivity->reset_history();
3245  }
3246  }
3247  }
3248 
3249  void stream_model::show_stream_imu(ImFont* font, const rect &stream_rect, const rs2_vector& axis, const mouse_info& mouse)
3250  {
3251  if (stream_rect.contains(mouse.cursor))
3252  {
3253  const auto precision = 3;
3254  rs2_stream stream_type = profile.stream_type();
3255 
3258 
3262 
3263  float y_offset = 0;
3264  if (show_stream_details)
3265  {
3266  y_offset += 30;
3267  }
3268 
3269  std::string label = to_string() << "IMU Stream Info of " << profile.unique_id();
3270 
3271  ImVec2 pos{ stream_rect.x, stream_rect.y + y_offset };
3272  ImGui::SetCursorScreenPos({ pos.x + 5, pos.y + 5 });
3273 
3274  struct motion_data {
3275  std::string name;
3276  float coordinate;
3278  std::string toolTip;
3279  ImVec4 colorFg;
3280  ImVec4 colorBg;
3281  int nameExtraSpace;
3282  };
3283 
3284  float norm = std::sqrt((axis.x*axis.x) + (axis.y*axis.y) + (axis.z*axis.z));
3285 
3286  std::map<rs2_stream, std::string> motion_unit = { { RS2_STREAM_GYRO, "Radians/Sec" },{ RS2_STREAM_ACCEL, "Meter/Sec^2" } };
3287  std::vector<motion_data> motion_vector = { { "X", axis.x, motion_unit[stream_type].c_str(), "Vector X", from_rgba(233, 0, 0, 255, true) , from_rgba(233, 0, 0, 255, true), 0},
3288  { "Y", axis.y, motion_unit[stream_type].c_str(), "Vector Y", from_rgba(0, 255, 0, 255, true) , from_rgba(2, 100, 2, 255, true), 0},
3289  { "Z", axis.z, motion_unit[stream_type].c_str(), "Vector Z", from_rgba(85, 89, 245, 255, true) , from_rgba(0, 0, 245, 255, true), 0},
3290  { "N", norm, "Norm", "||V|| = SQRT(X^2 + Y^2 + Z^2)",from_rgba(255, 255, 255, 255, true) , from_rgba(255, 255, 255, 255, true), 0} };
3291 
3292  int line_h = 18;
3293  for (auto&& motion : motion_vector)
3294  {
3295  auto rc = ImGui::GetCursorPos();
3296  ImGui::SetCursorPos({ rc.x + 12, rc.y + 4 });
3297  ImGui::PushStyleColor(ImGuiCol_Text, motion.colorFg);
3298  ImGui::Text("%s:", motion.name.c_str());
3299  if (ImGui::IsItemHovered())
3300  {
3301  ImGui::SetTooltip("%s", motion.toolTip.c_str());
3302  }
3304 
3305  ImGui::SameLine();
3308 
3309  ImGui::PushItemWidth(100);
3310  ImGui::SetCursorPos({ rc.x + 27 + motion.nameExtraSpace, rc.y + 1 });
3311  std::string label = to_string() << "##" << profile.unique_id() << " " << motion.name.c_str();
3312  std::string coordinate = to_string() << std::fixed << std::setprecision(precision) << std::showpos << motion.coordinate;
3313  ImGui::InputText(label.c_str(), (char*)coordinate.c_str(), coordinate.size() + 1, ImGuiInputTextFlags_AutoSelectAll | ImGuiInputTextFlags_ReadOnly);
3315 
3316  ImGui::SetCursorPos({ rc.x + 80 + motion.nameExtraSpace, rc.y + 4 });
3317  ImGui::PushStyleColor(ImGuiCol_Text, from_rgba(255, 255, 255, 100, true));
3318  ImGui::Text("(%s)", motion.units.c_str());
3319 
3321  ImGui::SetCursorPos({ rc.x, rc.y + line_h });
3322  }
3323 
3325  }
3326  }
3327 
3328  void stream_model::show_stream_pose(ImFont* font, const rect &stream_rect,
3329  const rs2_pose& pose_frame, rs2_stream stream_type, bool fullScreen, float y_offset,
3330  viewer_model& viewer)
3331  {
3334 
3338 
3339  std::string label = to_string() << "Pose Stream Info of " << profile.unique_id();
3340 
3341  ImVec2 pos{ stream_rect.x, stream_rect.y + y_offset };
3342  ImGui::SetCursorScreenPos({ pos.x + 5, pos.y + 5 });
3343 
3344  std::string confidenceName[4] = { "Failed", "Low", "Medium", "High" };
3345  struct pose_data {
3346  std::string name;
3347  float floatData[4];
3348  std::string strData;
3350  bool signedNumber;
3352  std::string toolTip;
3353  uint32_t nameExtraSpace;
3354  bool showOnNonFullScreen;
3355  bool fixedPlace;
3356  bool fixedColor;
3357  };
3358 
3359  rs2_vector velocity = pose_frame.velocity;
3360  rs2_vector acceleration = pose_frame.acceleration;
3361  rs2_vector translation = pose_frame.translation;
3362  const float feetTranslator = 3.2808f;
3363  std::string unit = viewer.metric_system ? "meters" : "feet";
3364 
3365  if (!viewer.metric_system)
3366  {
3367  velocity.x *= feetTranslator; velocity.y *= feetTranslator; velocity.z *= feetTranslator;
3368  acceleration.x *= feetTranslator; acceleration.y *= feetTranslator; acceleration.z *= feetTranslator;
3369  translation.x *= feetTranslator; translation.y *= feetTranslator; translation.z *= feetTranslator;
3370  }
3371 
3372  std::vector<pose_data> pose_vector = {
3373  { "Confidence",{ FLT_MAX , FLT_MAX , FLT_MAX , FLT_MAX }, confidenceName[pose_frame.tracker_confidence], 3, true, "", "Tracker confidence: High=Green, Medium=Yellow, Low=Red, Failed=Grey", 50, false, true, false },
3374  { "Velocity", {velocity.x, velocity.y , velocity.z , FLT_MAX }, "", 3, true, "(" + unit + "/Sec)", "Velocity: X, Y, Z values of velocity, in " + unit + "/Sec", 50, false, true, false},
3375  { "Angular Velocity",{ pose_frame.angular_velocity.x, pose_frame.angular_velocity.y , pose_frame.angular_velocity.z , FLT_MAX }, "", 3, true, "(Radians/Sec)", "Angular Velocity: X, Y, Z values of angular velocity, in Radians/Sec", 50, false, true, false },
3376  { "Acceleration",{ acceleration.x, acceleration.y , acceleration.z , FLT_MAX }, "", 3, true, "(" + unit + "/Sec^2)", "Acceleration: X, Y, Z values of acceleration, in " + unit + "/Sec^2", 50, false, true, false },
3377  { "Angular Acceleration",{ pose_frame.angular_acceleration.x, pose_frame.angular_acceleration.y , pose_frame.angular_acceleration.z , FLT_MAX }, "", 3, true, "(Radians/Sec^2)", "Angular Acceleration: X, Y, Z values of angular acceleration, in Radians/Sec^2", 50, false, true, false },
3378  { "Translation",{ translation.x, translation.y , translation.z , FLT_MAX }, "", 3, true, "(" + unit + ")", "Translation: X, Y, Z values of translation in " + unit + " (relative to initial position)", 50, true, true, false },
3379  { "Rotation",{ pose_frame.rotation.x, pose_frame.rotation.y , pose_frame.rotation.z , pose_frame.rotation.w }, "", 3, true, "(Quaternion)", "Rotation: Qi, Qj, Qk, Qr components of rotation as represented in quaternion rotation (relative to initial position)", 50, true, true, false },
3380  };
3381 
3382  int line_h = 18;
3383  if (fullScreen)
3384  {
3385  line_h += 2;
3386  }
3387 
3388  for (auto&& pose : pose_vector)
3389  {
3390  if ((fullScreen == false) && (pose.showOnNonFullScreen == false))
3391  {
3392  continue;
3393  }
3394 
3395  auto rc = ImGui::GetCursorPos();
3396  ImGui::SetCursorPos({ rc.x + 12, rc.y + 4 });
3397  ImGui::Text("%s:", pose.name.c_str());
3398  if (ImGui::IsItemHovered())
3399  {
3400  ImGui::SetTooltip("%s", pose.toolTip.c_str());
3401  }
3402 
3403  if (pose.fixedColor == false)
3404  {
3405  switch (pose_frame.tracker_confidence) //color the line according to confidence
3406  {
3407  case 3: // High confidence - Green
3409  break;
3410  case 2: // Medium confidence - Yellow
3412  break;
3413  case 1: // Low confidence - Red
3415  break;
3416  case 0: // Failed confidence - Grey
3417  default: // Fall thourgh
3419  break;
3420  }
3421  }
3422 
3423  ImGui::SetCursorPos({ rc.x + 100 + (fullScreen ? pose.nameExtraSpace : 0), rc.y + 1 });
3424  std::string label = to_string() << "##" << profile.unique_id() << " " << pose.name.c_str();
3425  std::string data = "";
3426 
3427  if (pose.strData.empty())
3428  {
3429  data = "[";
3430  std::string comma = "";
3431  unsigned int i = 0;
3432  while ((i < 4) && (pose.floatData[i] != FLT_MAX))
3433  {
3434 
3435  data += to_string() << std::fixed << std::setprecision(pose.precision) << (pose.signedNumber ? std::showpos : std::noshowpos) << comma << pose.floatData[i];
3436  comma = ", ";
3437  i++;
3438  }
3439  data += "]";
3440  }
3441  else
3442  {
3443  data = pose.strData;
3444  }
3445 
3446  auto textSize = ImGui::CalcTextSize((char*)data.c_str(), (char*)data.c_str() + data.size() + 1);
3447  ImGui::PushItemWidth(textSize.x);
3448  ImGui::InputText(label.c_str(), (char*)data.c_str(), data.size() + 1, ImGuiInputTextFlags_AutoSelectAll | ImGuiInputTextFlags_ReadOnly);
3450 
3451  if (pose.fixedColor == false)
3452  {
3454  }
3455 
3456  if (pose.fixedPlace == true)
3457  {
3458  ImGui::SetCursorPos({ rc.x + 300 + (fullScreen ? pose.nameExtraSpace : 0), rc.y + 4 });
3459  }
3460  else
3461  {
3462  ImGui::SameLine();
3463  }
3464 
3465  ImGui::PushStyleColor(ImGuiCol_Text, from_rgba(255, 255, 255, 100, true));
3466  ImGui::Text("%s", pose.units.c_str());
3468 
3469  ImGui::SetCursorPos({ rc.x, rc.y + line_h });
3470  }
3471 
3473  }
3474 
3475  void stream_model::snapshot_frame(const char* filename, viewer_model& viewer) const
3476  {
3477  std::stringstream ss;
3478  std::string stream_desc{};
3479  std::string filename_base(filename);
3480 
3481  // Trim the file extension when provided. Note that this may amend user-provided file name in case it uses the "." character, e.g. "my.file.name"
3482  auto loc = filename_base.find_last_of(".");
3483  if (loc != std::string::npos)
3484  filename_base.erase(loc, std::string::npos);
3485 
3486  // Snapshot the color-augmented version of the frame
3487  if (auto colorized_frame = texture->get_last_frame(true).as<video_frame>())
3488  {
3489  stream_desc = rs2_stream_to_string(colorized_frame.get_profile().stream_type());
3490  auto filename_png = filename_base + "_" + stream_desc + ".png";
3491  save_to_png(filename_png.data(), colorized_frame.get_width(), colorized_frame.get_height(), colorized_frame.get_bytes_per_pixel(),
3492  colorized_frame.get_data(), colorized_frame.get_width() * colorized_frame.get_bytes_per_pixel());
3493 
3494  ss << "PNG snapshot was saved to " << filename_png << std::endl;
3495  }
3496 
3497  auto last_frame = texture->get_last_frame( false );
3498  auto original_frame = last_frame.as< video_frame >();
3499 
3500  // For Depth-originated streams also provide a copy of the raw data accompanied by sensor-specific metadata
3501  if (original_frame && val_in_range(original_frame.get_profile().stream_type(), { RS2_STREAM_DEPTH , RS2_STREAM_INFRARED }))
3502  {
3503  stream_desc = rs2_stream_to_string(original_frame.get_profile().stream_type());
3504 
3505  //Capture raw frame
3506  auto filename = filename_base + "_" + stream_desc + ".raw";
3507  if (save_frame_raw_data(filename, original_frame))
3508  ss << "Raw data is captured into " << filename << std::endl;
3509  else
3510  viewer.not_model->add_notification({ to_string() << "Failed to save frame raw data " << filename,
3512 
3513  // And the frame's attributes
3514  filename = filename_base + "_" + stream_desc + "_metadata.csv";
3515 
3516  try
3517  {
3518  if (frame_metadata_to_csv(filename, original_frame))
3519  ss << "The frame attributes are saved into " << filename;
3520  else
3521  viewer.not_model->add_notification({ to_string() << "Failed to save frame metadata file " << filename,
3523  }
3524  catch (std::exception& e)
3525  {
3526  viewer.not_model->add_notification({ to_string() << e.what(),
3528  }
3529  }
3530 
3531  auto motion = last_frame.as< motion_frame >();
3532  if( motion )
3533  {
3534  stream_desc = rs2_stream_to_string( motion.get_profile().stream_type() );
3535 
3536  // And the frame's attributes
3537  auto filename = filename_base + "_" + stream_desc + ".csv";
3538 
3539  try
3540  {
3541  if( motion_data_to_csv( filename, motion ) )
3542  ss << "The frame attributes are saved into\n" << filename;
3543  else
3544  viewer.not_model->add_notification(
3545  { to_string() << "Failed to save frame file " << filename,
3548  }
3549  catch( std::exception & e )
3550  {
3551  viewer.not_model->add_notification( { to_string() << e.what(),
3554  }
3555  }
3556 
3557  auto pose = last_frame.as< pose_frame >();
3558  if( pose )
3559  {
3560  stream_desc = rs2_stream_to_string( pose.get_profile().stream_type() );
3561 
3562  // And the frame's attributes
3563  auto filename = filename_base + "_" + stream_desc + ".csv";
3564 
3565  try
3566  {
3567  if( pose_data_to_csv( filename, pose ) )
3568  ss << "The frame attributes are saved into\n" << filename;
3569  else
3570  viewer.not_model->add_notification(
3571  { to_string() << "Failed to save frame file " << filename,
3574  }
3575  catch( std::exception & e )
3576  {
3577  viewer.not_model->add_notification( { to_string() << e.what(),
3580  }
3581  }
3582  if (ss.str().size())
3583  viewer.not_model->add_notification(notification_data{
3584  ss.str().c_str(), RS2_LOG_SEVERITY_INFO, RS2_NOTIFICATION_CATEGORY_HARDWARE_EVENT });
3585 
3586  }
3587 
3588  rect stream_model::get_normalized_zoom(const rect& stream_rect, const mouse_info& g, bool is_middle_clicked, float zoom_val)
3589  {
3590  rect zoomed_rect = dev->normalized_zoom.unnormalize(stream_rect);
3591  if (stream_rect.contains(g.cursor))
3592  {
3593  if (!is_middle_clicked)
3594  {
3595  if (zoom_val < 1.f)
3596  {
3597  zoomed_rect = zoomed_rect.center_at(g.cursor)
3598  .zoom(zoom_val)
3599  .fit({ 0, 0, 40, 40 })
3600  .enclose_in(zoomed_rect)
3601  .enclose_in(stream_rect);
3602  }
3603  else if (zoom_val > 1.f)
3604  {
3605  zoomed_rect = zoomed_rect.zoom(zoom_val).enclose_in(stream_rect);
3606  }
3607  }
3608  else
3609  {
3610  auto dir = g.cursor - _middle_pos;
3611 
3612  if (dir.length() > 0)
3613  {
3614  zoomed_rect = zoomed_rect.pan(1.1f * dir).enclose_in(stream_rect);
3615  }
3616  }
3617  dev->normalized_zoom = zoomed_rect.normalize(stream_rect);
3618  }
3619  return dev->normalized_zoom;
3620  }
3621 
3622  void stream_model::show_frame(const rect& stream_rect, const mouse_info& g, std::string& error_message)
3623  {
3624  auto zoom_val = 1.f;
3625  if (stream_rect.contains(g.cursor))
3626  {
3627  static const auto wheel_step = 0.1f;
3628  auto mouse_wheel_value = -g.mouse_wheel * 0.1f;
3629  if (mouse_wheel_value > 0)
3630  zoom_val += wheel_step;
3631  else if (mouse_wheel_value < 0)
3632  zoom_val -= wheel_step;
3633  }
3634 
3635  auto is_middle_clicked = ImGui::GetIO().MouseDown[0] ||
3636  ImGui::GetIO().MouseDown[2];
3637 
3638  if (!_mid_click && is_middle_clicked)
3639  _middle_pos = g.cursor;
3640 
3641  _mid_click = is_middle_clicked;
3642 
3643  _normalized_zoom = get_normalized_zoom(stream_rect,
3644  g, is_middle_clicked,
3645  zoom_val);
3646  texture->show(stream_rect, 1.f, _normalized_zoom);
3647 
3648  if (dev && dev->show_algo_roi)
3649  {
3650  rect r{ float(dev->algo_roi.min_x), float(dev->algo_roi.min_y),
3651  float(dev->algo_roi.max_x - dev->algo_roi.min_x),
3652  float(dev->algo_roi.max_y - dev->algo_roi.min_y) };
3653 
3654  r = r.normalize(_normalized_zoom.unnormalize(get_original_stream_bounds())).unnormalize(stream_rect).cut_by(stream_rect);
3656  draw_rect(r, 2);
3657 
3658  std::string message = "Metrics Region of Interest";
3659  auto msg_width = stb_easy_font_width((char*)message.c_str());
3660  if (msg_width < r.w)
3661  draw_text(static_cast<int>(r.x + r.w / 2 - msg_width / 2), static_cast<int>(r.y + 10), message.c_str());
3662 
3663  glColor3f(1.f, 1.f, 1.f);
3664  roi_percentage = dev->roi_percentage;
3665  roi_display_rect = r;
3666  }
3667 
3668  update_ae_roi_rect(stream_rect, g, error_message);
3669  texture->show_preview(stream_rect, _normalized_zoom);
3670 
3671  if (is_middle_clicked)
3672  _middle_pos = g.cursor;
3673  }
3674 
3676  {
3677  syncer->remove_syncer(dev_syncer);
3678  subdevices.resize(0);
3679  _recorder.reset();
3680 
3681  }
3682 
3683  std::pair<std::string, std::string> get_device_name(const device& dev)
3684  {
3685  // retrieve device name
3686  std::string name = (dev.supports(RS2_CAMERA_INFO_NAME)) ? dev.get_info(RS2_CAMERA_INFO_NAME) : "Unknown";
3687 
3688  // retrieve device serial number
3690 
3691  std::stringstream s;
3692 
3693  if (dev.is<playback>())
3694  {
3695  auto playback_dev = dev.as<playback>();
3696 
3697  s << "Playback device: ";
3698  name += (to_string() << " (File: " << playback_dev.file_name() << ")");
3699  }
3700  s << std::setw(25) << std::left << name;
3701  return std::make_pair(s.str(), serial); // push name and sn to list
3702  }
3703 
3705  {
3706  for (auto&& n : related_notifications) n->dismiss(false);
3707 
3708  _updates->set_device_status(*_updates_profile, false);
3709  }
3710 
3711  void device_model::check_for_bundled_fw_update(const rs2::context &ctx, std::shared_ptr<notifications_model> not_model)
3712  {
3713  if( dev.supports( RS2_CAMERA_INFO_FIRMWARE_VERSION )
3715  && dev.supports( RS2_CAMERA_INFO_PRODUCT_LINE ) )
3716  {
3718  std::string recommended
3720 
3721  int product_line
3723 
3726  false );
3727  bool is_rc = ( product_line == RS2_PRODUCT_LINE_D400 ) && allow_rc_firmware;
3728  std::string available = get_available_firmware_version( product_line );
3729 
3730  std::shared_ptr< firmware_update_manager > manager = nullptr;
3731 
3732  if( is_upgradeable( fw, available ) )
3733  {
3734  recommended = available;
3735 
3736  static auto table = create_default_fw_table();
3737 
3738  manager = std::make_shared< firmware_update_manager >( not_model,
3739  *this,
3740  dev,
3741  ctx,
3743  true );
3744  }
3745 
3746  if( is_upgradeable( fw, recommended ) )
3747  {
3748  auto dev_name = get_device_name( dev );
3749  std::stringstream msg;
3750  msg << dev_name.first << " (S/N " << dev_name.second << ")\n"
3751  << "Current Version: " << fw << "\n";
3752 
3753  if( is_rc )
3754  msg << "Release Candidate: " << recommended << " Pre-Release";
3755  else
3756  msg << "Recommended Version: " << recommended;
3757 
3758  auto n = std::make_shared< fw_update_notification_model >( msg.str(),
3759  manager,
3760  false );
3761  n->delay_id = "dfu." + dev_name.second;
3762  n->enable_complex_dismiss = true;
3763  if( ! n->is_delayed() )
3764  {
3765  not_model->add_notification( n );
3766  related_notifications.push_back( n );
3767  }
3768  }
3769  }
3770  }
3771 
3773  {
3774  for (auto&& n : related_notifications) n->dismiss(false);
3775 
3776  auto name = get_device_name(dev);
3777 
3778  // Inhibit on DQT / Playback device
3779  if( _allow_remove && ( ! dev.is< playback >() ) )
3780  check_for_device_updates(viewer);
3781 
3783  {
3784  for (auto&& model : subdevices)
3785  {
3786  if (model->supports_on_chip_calib())
3787  {
3788  // Make sure we don't spam calibration remainders too often:
3789  time_t rawtime;
3790  time(&rawtime);
3791  std::string id = to_string() << configurations::viewer::last_calib_notice << "." << name.second;
3792  long long last_time = config_file::instance().get_or_default(id.c_str(), (long long)0);
3793 
3794  std::string msg = to_string()
3795  << name.first << " (S/N " << name.second << ")";
3796  auto manager = std::make_shared<on_chip_calib_manager>(viewer, model, *this, dev);
3797  auto n = std::make_shared<autocalib_notification_model>(
3798  msg, manager, false);
3799 
3800  // Recommend calibration once a week per device
3801  if (rawtime - last_time < 60)
3802  {
3803  n->snoozed = true;
3804  }
3805 
3806  // NOTE: For now do not pre-emptively suggest auto-calibration
3807  // TODO: Revert in later release
3808  //viewer.not_model->add_notification(n);
3809  //related_notifications.push_back(n);
3810  }
3811  }
3812  }
3813  }
3814 
3815  device_model::device_model(device& dev, std::string& error_message, viewer_model& viewer, bool new_device_connected, bool remove)
3816  : dev(dev),
3817  _calib_model(dev),
3818  syncer(viewer.syncer),
3819  _update_readonly_options_timer(std::chrono::seconds(6)),
3820  _detected_objects(std::make_shared< atomic_objects_in_frame >()),
3821  _updates(viewer.updates),
3822  _updates_profile(std::make_shared<dev_updates_profile::update_profile>()),
3823  _allow_remove(remove)
3824  {
3825 
3827  {
3828  _accuracy_health_model = std::unique_ptr< cah_model >( new cah_model( *this, viewer ) );
3829  }
3830 
3831  auto name = get_device_name(dev);
3832  id = to_string() << name.first << ", " << name.second;
3833 
3834  for (auto&& sub : dev.query_sensors())
3835  {
3836  auto s = std::make_shared<sensor>(sub);
3837  auto objects = std::make_shared< atomic_objects_in_frame >();
3838  // checking if the sensor is color_sensor or is D405 (with integrated RGB in depth sensor)
3839  if (s->is<color_sensor>() || (dev.supports(RS2_CAMERA_INFO_PRODUCT_ID) && !strcmp(dev.get_info(RS2_CAMERA_INFO_PRODUCT_ID), "0B5B")))
3840  objects = _detected_objects;
3841  auto model = std::make_shared<subdevice_model>(dev, std::make_shared<sensor>(sub), objects, error_message, viewer, new_device_connected);
3842  subdevices.push_back(model);
3843  }
3844 
3845  // Initialize static camera info:
3846  for (auto i = 0; i < RS2_CAMERA_INFO_COUNT; i++)
3847  {
3848  auto info = static_cast<rs2_camera_info>(i);
3849 
3850  try
3851  {
3852  if (dev.supports(info))
3853  {
3854  auto value = dev.get_info(info);
3856  std::string(value) });
3857  }
3858  }
3859  catch (...)
3860  {
3862  std::string("???") });
3863  }
3864  }
3865 
3866  if (dev.is<playback>())
3867  {
3868  for (auto&& sub : subdevices)
3869  {
3870  for (auto&& p : sub->profiles)
3871  {
3872  sub->stream_enabled[p.unique_id()] = true;
3873  }
3874  }
3875  play_defaults(viewer);
3876  }
3877 
3878  refresh_notifications(viewer);
3879  }
3881  {
3882  if (!dev_syncer)
3883  dev_syncer = viewer.syncer->create_syncer();
3884  for (auto&& sub : subdevices)
3885  {
3886  if (!sub->streaming)
3887  {
3888  std::vector<rs2::stream_profile> profiles;
3889  try
3890  {
3891  profiles = sub->get_selected_profiles();
3892  }
3893  catch (...)
3894  {
3895  continue;
3896  }
3897 
3898  if (profiles.empty())
3899  continue;
3900 
3901  std::string friendly_name = sub->s->get_info(RS2_CAMERA_INFO_NAME);
3902  if ((friendly_name.find("Tracking") != std::string::npos) ||
3903  (friendly_name.find("Motion") != std::string::npos))
3904  {
3906  viewer.synchronization_enable = false;
3907  }
3908  sub->play(profiles, viewer, dev_syncer);
3909 
3910  for (auto&& profile : profiles)
3911  {
3912  viewer.begin_stream(sub, profile);
3913  }
3914  }
3915  }
3916  }
3917 
3919  {
3920  std::vector<rs2::frame> frames;
3921  if (auto composite = f.as<rs2::frameset>())
3922  {
3923  for (auto&& f : composite)
3924  frames.push_back(f);
3925  }
3926  else
3927  frames.push_back(f);
3928 
3929  auto res = f;
3930 
3931  //In order to know what are the processing blocks we need to apply
3932  //We should find all the sub devices releted to the frames
3933  std::set<std::shared_ptr<subdevice_model>> subdevices;
3934  for (auto f : frames)
3935  {
3936  auto sub = get_frame_origin(f);
3937  if (sub)
3938  subdevices.insert(sub);
3939  }
3940 
3941  for (auto sub : subdevices)
3942  {
3943  if (!sub->post_processing_enabled)
3944  continue;
3945 
3946  for (auto&& pp : sub->post_processing)
3947  if (pp->is_enabled())
3948  res = pp->invoke(res);
3949  }
3950 
3951  return res;
3952  }
3953 
3954  std::shared_ptr<subdevice_model> post_processing_filters::get_frame_origin(const rs2::frame& f)
3955  {
3956  for (auto&& s : viewer.streams)
3957  {
3958  if (s.second.dev)
3959  {
3960  auto dev = s.second.dev;
3961 
3962  if (s.second.original_profile.unique_id() == f.get_profile().unique_id())
3963  {
3964  return dev;
3965  }
3966  }
3967  }
3968  return nullptr;
3969  }
3970 
3971  //Zero the first pixel on frame ,used to invalidate the occlusion pixels
3973  {
3974  auto stream_type = f.get_profile().stream_type();
3975 
3976  switch (stream_type)
3977  {
3978  case RS2_STREAM_COLOR:
3979  {
3980  auto rgb_stream = const_cast<uint8_t*>(static_cast<const uint8_t*>(f.get_data()));
3981  memset(rgb_stream, 0, 3);
3982  // Alternatively, enable the next two lines to render invalidation with magenta color for inspection
3983  //rgb_stream[0] = rgb_stream[2] = 0xff; // Use magenta to highlight the occlusion areas
3984  //rgb_stream[1] = 0;
3985  }
3986  break;
3987  case RS2_STREAM_INFRARED:
3988  {
3989  auto ir_stream = const_cast<uint8_t*>(static_cast<const uint8_t*>(f.get_data()));
3990  memset(ir_stream, 0, 2); // Override the first two bytes to cover Y8/Y16 formats
3991  }
3992  break;
3993  default:
3994  break;
3995  }
3996  }
3997 
3999  {
4000  if (auto new_set = new_frame.as<rs2::frameset>())
4001  {
4002  if (auto old_set = old_frame.as<rs2::frameset>())
4003  {
4004  map_id_frameset_to_frameset(new_set, old_set);
4005  }
4006  else
4007  {
4008  map_id_frameset_to_frame(new_set, old_frame);
4009  }
4010  }
4011  else if (auto old_set = old_frame.as<rs2::frameset>())
4012  {
4013  map_id_frameset_to_frame(old_set, new_frame);
4014  }
4015  else
4016  map_id_frame_to_frame(new_frame, old_frame);
4017  }
4018 
4020  {
4021  if (auto f = first.first_or_default(second.get_profile().stream_type()))
4022  {
4023  auto first_uid = f.get_profile().unique_id();
4024  auto second_uid = second.get_profile().unique_id();
4025 
4026  viewer.streams_origin[first_uid] = second_uid;
4027  viewer.streams_origin[second_uid] = first_uid;
4028  }
4029  }
4030 
4032  {
4033  for (auto&& f : first)
4034  {
4035  auto first_uid = f.get_profile().unique_id();
4036  if (auto second_f = second.first_or_default(f.get_profile().stream_type()))
4037  {
4038  auto second_uid = second_f.get_profile().unique_id();
4039 
4040  viewer.streams_origin[first_uid] = second_uid;
4041  viewer.streams_origin[second_uid] = first_uid;
4042  }
4043  }
4044  }
4045 
4047  {
4048  if (first.get_profile().stream_type() == second.get_profile().stream_type())
4049  {
4050  auto first_uid = first.get_profile().unique_id();
4051  auto second_uid = second.get_profile().unique_id();
4052 
4053  viewer.streams_origin[first_uid] = second_uid;
4054  viewer.streams_origin[second_uid] = first_uid;
4055  }
4056  }
4057 
4058 
4060  {
4061  std::vector<rs2::frame> res;
4062 
4063  if (uploader) f = uploader->process(f);
4064 
4065  auto filtered = apply_filters(f, source);
4066 
4067  map_id(filtered, f);
4068 
4069  if (auto composite = filtered.as<rs2::frameset>())
4070  {
4071  for (auto&& frame : composite)
4072  {
4073  res.push_back(frame);
4074  }
4075  }
4076  else
4077  res.push_back(filtered);
4078 
4079  if (viewer.is_3d_view)
4080  {
4081  if (auto depth = viewer.get_3d_depth_source(filtered))
4082  {
4083  switch (depth.get_profile().format())
4084  {
4085  case RS2_FORMAT_DISPARITY32: depth = disp_to_depth.process(depth); break;
4086  case RS2_FORMAT_Z16H: depth = depth_decoder.process(depth); break;
4087  default: break;
4088  }
4089 
4090  pc->set_option(RS2_OPTION_FILTER_MAGNITUDE,
4091  viewer.occlusion_invalidation ? 2.f : 1.f);
4092  res.push_back(pc->calculate(depth));
4093  }
4094  if (auto texture = viewer.get_3d_texture_source(filtered))
4095  {
4096  update_texture(texture);
4097  }
4098  }
4099 
4100  return res;
4101  }
4102 
4103 
4105  {
4106  points p;
4107  std::vector<frame> results;
4108 
4109  auto res = handle_frame(f, source);
4110  auto frame = source.allocate_composite_frame(res);
4111 
4112  if (frame)
4113  source.frame_ready(std::move(frame));
4114  }
4115 
4117  {
4118  stop();
4119  if (render_thread_active.exchange(true) == false)
4120  {
4121  viewer.syncer->start();
4122  render_thread = std::make_shared<std::thread>([&]() {post_processing_filters::render_loop(); });
4123  }
4124  }
4125 
4127  {
4128  if (render_thread_active.exchange(false) == true)
4129  {
4130  viewer.syncer->stop();
4131  render_thread->join();
4132  render_thread.reset();
4133  }
4134  }
4136  {
4137  while (render_thread_active)
4138  {
4139  try
4140  {
4141  if (viewer.synchronization_enable)
4142  {
4143  auto frames = viewer.syncer->try_wait_for_frames();
4144  for (auto f : frames)
4145  {
4147  }
4148  }
4149  else
4150  {
4151  std::map<int, rs2::frame_queue> frames_queue_local;
4152  {
4153  std::lock_guard<std::mutex> lock(viewer.streams_mutex);
4154  frames_queue_local = frames_queue;
4155  }
4156  for (auto&& q : frames_queue_local)
4157  {
4158  frame frm;
4159  if (q.second.try_wait_for_frame(&frm, 30))
4160  {
4161  processing_block.invoke(frm);
4162  }
4163  }
4164  }
4165  }
4166  catch (...) {}
4167  }
4168  }
4169 
4171  {
4172  if (_recorder != nullptr)
4173  {
4174  return; //already recording
4175  }
4176 
4177  try
4178  {
4180  if (compression_mode == 2)
4181  _recorder = std::make_shared<recorder>(path, dev);
4182  else
4183  _recorder = std::make_shared<recorder>(path, dev, compression_mode == 0);
4184 
4185  for (auto&& sub_dev_model : subdevices)
4186  {
4187  sub_dev_model->_is_being_recorded = true;
4188  }
4189  is_recording = true;
4190  }
4191  catch (const rs2::error& e)
4192  {
4193  error_message = error_to_string(e);
4194  }
4195  catch (const std::exception& e)
4196  {
4197  error_message = e.what();
4198  }
4199  }
4200 
4202  {
4203  auto saved_to_filename = _recorder->filename();
4204  _recorder.reset();
4205  for (auto&& sub_dev_model : subdevices)
4206  {
4207  sub_dev_model->_is_being_recorded = false;
4208  }
4209  is_recording = false;
4210  notification_data nd{ to_string() << "Saved recording to:\n" << saved_to_filename,
4213  viewer.not_model->add_notification(nd);
4214  }
4215 
4217  {
4218  _recorder->pause();
4219  }
4220 
4222  {
4223  _recorder->resume();
4224  }
4225 
4227  {
4228  auto p = dev.as<playback>();
4229  rs2_playback_status current_playback_status = p.current_status();
4230 
4231  const int playback_control_height = 35;
4232  const float combo_box_width = 90.f;
4233  const float icon_width = 28;
4234  const float line_width = 255; //Ideally should use: ImGui::GetContentRegionMax().x
4235  //Line looks like this ("-" == space, "[]" == icon, "[ ]" == combo_box): |-[]-[]-[]-[]-[]-[ ]-[]-|
4236  const int num_icons_in_line = 6;
4237  const int num_combo_boxes_in_line = 1;
4238  const int num_spaces_in_line = num_icons_in_line + num_combo_boxes_in_line + 1;
4239  const float required_row_width = (num_combo_boxes_in_line * combo_box_width) + (num_icons_in_line * icon_width);
4240  float space_width = std::max(line_width - required_row_width, 0.f) / num_spaces_in_line;
4241  ImVec2 button_dim = { icon_width, icon_width };
4242 
4243  const bool supports_playback_step = current_playback_status == RS2_PLAYBACK_STATUS_PAUSED;
4244 
4245  ImGui::PushFont(font);
4246 
4248  ImGui::SetCursorPosX(ImGui::GetCursorPosX() + space_width);
4249  std::string label = to_string() << textual_icons::step_backward << "##Step Backward " << id;
4250  if (ImGui::ButtonEx(label.c_str(), button_dim, supports_playback_step ? 0 : ImGuiButtonFlags_Disabled))
4251  {
4252  int fps = 0;
4253  for (auto&& s : viewer.streams)
4254  {
4255  if (s.second.profile.fps() > fps)
4256  fps = s.second.profile.fps();
4257  }
4258  auto curr_frame = p.get_position();
4259  uint64_t step = uint64_t(1000.0 / (float)fps * 1e6);
4260  if (curr_frame >= step)
4261  {
4262  p.seek(std::chrono::nanoseconds(curr_frame - step));
4263  }
4264  }
4265  if (ImGui::IsItemHovered())
4266  {
4267  std::string tooltip = to_string() << "Step Backwards" << (supports_playback_step ? "" : "(Not available)");
4268  ImGui::SetTooltip("%s", tooltip.c_str());
4269  }
4270  ImGui::SameLine();
4272 
4273 
4275  ImGui::SetCursorPosX(ImGui::GetCursorPosX() + space_width);
4276  label = to_string() << textual_icons::stop << "##Stop Playback " << id;
4277 
4278  if (ImGui::ButtonEx(label.c_str(), button_dim))
4279  {
4280  bool prev = _playback_repeat;
4281  _playback_repeat = false;
4282  p.stop();
4283  _playback_repeat = prev;
4284  }
4285  if (ImGui::IsItemHovered())
4286  {
4287  std::string tooltip = to_string() << "Stop Playback";
4288  ImGui::SetTooltip("%s", tooltip.c_str());
4289  }
4290  ImGui::SameLine();
4292 
4293 
4294 
4296  ImGui::SetCursorPosX(ImGui::GetCursorPosX() + space_width);
4297  if (current_playback_status == RS2_PLAYBACK_STATUS_PAUSED || current_playback_status == RS2_PLAYBACK_STATUS_STOPPED)
4298  {
4299  label = to_string() << textual_icons::play << "##Play " << id;
4300  if (ImGui::ButtonEx(label.c_str(), button_dim))
4301  {
4302  if (current_playback_status == RS2_PLAYBACK_STATUS_STOPPED)
4303  {
4304  play_defaults(viewer);
4305  }
4306  else
4307  {
4308  syncer->on_frame = [] {};
4309  for (auto&& s : subdevices)
4310  {
4311  s->on_frame = [] {};
4312  if (s->streaming)
4313  s->resume();
4314  }
4315  viewer.paused = false;
4316  p.resume();
4317  }
4318 
4319  }
4320  if (ImGui::IsItemHovered())
4321  {
4322  ImGui::SetTooltip(current_playback_status == RS2_PLAYBACK_STATUS_PAUSED ? "Resume Playback" : "Start Playback");
4323  }
4324  }
4325  else
4326  {
4327  label = to_string() << textual_icons::pause << "##Pause Playback " << id;
4328  if (ImGui::ButtonEx(label.c_str(), button_dim))
4329  {
4330  p.pause();
4331  for (auto&& s : subdevices)
4332  {
4333  if (s->streaming)
4334  s->pause();
4335  }
4336  viewer.paused = true;
4337  }
4338  if (ImGui::IsItemHovered())
4339  {
4340  ImGui::SetTooltip("Pause Playback");
4341  }
4342  }
4343 
4344  ImGui::SameLine();
4346 
4347 
4348 
4349 
4351  ImGui::SetCursorPosX(ImGui::GetCursorPosX() + space_width);
4352  label = to_string() << textual_icons::step_forward << "##Step Forward " << id;
4353  if (ImGui::ButtonEx(label.c_str(), button_dim, supports_playback_step ? 0 : ImGuiButtonFlags_Disabled))
4354  {
4355  int fps = 0;
4356  for (auto&& s : viewer.streams)
4357  {
4358  if (s.second.profile.fps() > fps)
4359  fps = s.second.profile.fps();
4360  }
4361  auto curr_frame = p.get_position();
4362  uint64_t step = uint64_t(1000.0 / (float)fps * 1e6);
4363  p.seek(std::chrono::nanoseconds(curr_frame + step));
4364  }
4365  if (ImGui::IsItemHovered())
4366  {
4367  std::string tooltip = to_string() << "Step Forward" << (supports_playback_step ? "" : "(Not available)");
4368  ImGui::SetTooltip("%s", tooltip.c_str());
4369  }
4370  ImGui::SameLine();
4372 
4373 
4374 
4375 
4377  ImGui::SetCursorPosX(ImGui::GetCursorPosX() + space_width);
4378  if (_playback_repeat)
4379  {
4382  }
4383  else
4384  {
4387  }
4388  label = to_string() << textual_icons::repeat << "##Repeat " << id;
4389  if (ImGui::ButtonEx(label.c_str(), button_dim))
4390  {
4392  }
4393  if (ImGui::IsItemHovered())
4394  {
4395  std::string tooltip = to_string() << (_playback_repeat ? "Disable " : "Enable ") << "Repeat ";
4396  ImGui::SetTooltip("%s", tooltip.c_str());
4397  }
4399  ImGui::SameLine();
4401 
4402 
4404  auto pos = ImGui::GetCursorPos();
4405  const float speed_combo_box_v_alignment = 3.f;
4406  ImGui::SetCursorPos({ pos.x + space_width, pos.y + speed_combo_box_v_alignment });
4407  ImGui::PushItemWidth(combo_box_width);
4408 
4411 
4412  label = to_string() << "## " << id;
4413  if (ImGui::Combo(label.c_str(), &playback_speed_index, "Speed: x0.25\0Speed: x0.5\0Speed: x1\0Speed: x1.5\0Speed: x2\0\0", -1, false))
4414  {
4415  float speed = 1;
4416  switch (playback_speed_index)
4417  {
4418  case 0: speed = 0.25f; break;
4419  case 1: speed = 0.5f; break;
4420  case 2: speed = 1.0f; break;
4421  case 3: speed = 1.5f; break;
4422  case 4: speed = 2.0f; break;
4423  default:
4424  throw std::runtime_error(to_string() << "Speed #" << playback_speed_index << " is unhandled");
4425  }
4426  p.set_playback_speed(speed);
4427  }
4428  if (ImGui::IsItemHovered())
4429  {
4430  ImGui::SetTooltip("Change playback speed rate");
4431  }
4433  ImGui::SameLine();
4434  //Restore Y movement
4436  ImGui::SetCursorPos({ pos.x, pos.y - speed_combo_box_v_alignment });
4438 
4440  ImGui::SetCursorPosX(ImGui::GetCursorPosX() + space_width);
4441  draw_info_icon(window, font, button_dim);
4443 
4444  ImGui::PopFont();
4445 
4446  return playback_control_height;
4447  }
4448 
4450  {
4451  using namespace std::chrono;
4452  auto hhh = duration_cast<hours>(duration);
4453  duration -= hhh;
4454  auto mm = duration_cast<minutes>(duration);
4455  duration -= mm;
4456  auto ss = duration_cast<seconds>(duration);
4457  duration -= ss;
4458  auto ms = duration_cast<milliseconds>(duration);
4459 
4460  std::ostringstream stream;
4461  stream << std::setfill('0') << std::setw(hhh.count() >= 10 ? 2 : 1) << hhh.count() << ':' <<
4462  std::setfill('0') << std::setw(2) << mm.count() << ':' <<
4463  std::setfill('0') << std::setw(2) << ss.count() << '.' <<
4464  std::setfill('0') << std::setw(3) << ms.count();
4465  return stream.str();
4466  }
4467 
4469  {
4470  auto pos = ImGui::GetCursorPos();
4471 
4472  auto p = dev.as<playback>();
4473  //rs2_playback_status current_playback_status = p.current_status();
4474  int64_t playback_total_duration = p.get_duration().count();
4475  auto progress = p.get_position();
4476  double part = (1.0 * progress) / playback_total_duration;
4477  seek_pos = static_cast<int>(std::max(0.0, std::min(part, 1.0)) * 100);
4478  auto playback_status = p.current_status();
4479  if (seek_pos != 0 && playback_status == RS2_PLAYBACK_STATUS_STOPPED)
4480  {
4481  seek_pos = 0;
4482  }
4483  float seek_bar_width = 300.f;
4484  ImGui::PushItemWidth(seek_bar_width);
4485  std::string label1 = "## " + id;
4486  if (ImGui::SeekSlider(label1.c_str(), &seek_pos, ""))
4487  {
4488  //Seek was dragged
4489  if (playback_status != RS2_PLAYBACK_STATUS_STOPPED) //Ignore seek when playback is stopped
4490  {
4491  auto duration_db = std::chrono::duration_cast<std::chrono::duration<double, std::nano>>(p.get_duration());
4492  auto single_percent = duration_db.count() / 100;
4493  auto seek_time = std::chrono::duration<double, std::nano>(seek_pos * single_percent);
4494  p.seek(std::chrono::duration_cast<std::chrono::nanoseconds>(seek_time));
4495  }
4496  }
4497 
4498  ImGui::SetCursorPos({ pos.x, pos.y + 17 });
4499 
4501  std::string duration_str = pretty_time(std::chrono::nanoseconds(playback_total_duration));
4502  ImGui::Text("%s", time_elapsed.c_str());
4503  ImGui::SameLine();
4504  float pos_y = ImGui::GetCursorPosY();
4505 
4506  ImGui::SetCursorPos({ pos.x + seek_bar_width - 45 , pos_y });
4507  ImGui::Text("%s", duration_str.c_str());
4508 
4509  return 50;
4510  }
4511 
4513  {
4519 
4520 
4521  auto pos = ImGui::GetCursorPos();
4522  auto controls_height = draw_playback_controls(window, font, view);
4523  float seek_bar_left_alignment = 4.f;
4524  ImGui::SetCursorPos({ pos.x + seek_bar_left_alignment, pos.y + controls_height });
4525  ImGui::PushFont(font);
4526  auto seek_bar_height = draw_seek_bar();
4527  ImGui::PopFont();
4529  return controls_height + seek_bar_height;
4530 
4531  }
4532 
4533  std::vector<std::string> get_device_info(const device& dev, bool include_location)
4534  {
4535  std::vector<std::string> res;
4536  for (auto i = 0; i < RS2_CAMERA_INFO_COUNT; i++)
4537  {
4538  auto info = static_cast<rs2_camera_info>(i);
4539 
4540  // When camera is being reset, either because of "hardware reset"
4541  // or because of switch into advanced mode,
4542  // we don't want to capture the info that is about to change
4545  && !include_location) continue;
4546 
4547  if (dev.supports(info))
4548  {
4549  auto value = dev.get_info(info);
4550  res.push_back(value);
4551  }
4552  }
4553  return res;
4554  }
4555 
4556  std::vector<std::pair<std::string, std::string>> get_devices_names(const device_list& list)
4557  {
4558  std::vector<std::pair<std::string, std::string>> device_names;
4559 
4560  for (uint32_t i = 0; i < list.size(); i++)
4561  {
4562  try
4563  {
4564  auto dev = list[i];
4565  device_names.push_back(get_device_name(dev)); // push name and sn to list
4566  }
4567  catch (...)
4568  {
4569  device_names.push_back(std::pair<std::string, std::string>(to_string() << "Unknown Device #" << i, ""));
4570  }
4571  }
4572  return device_names;
4573  }
4574 
4575  bool yes_no_dialog(const std::string& title, const std::string& message_text, bool& approved, ux_window& window, const std::string& error_message, bool disabled, const std::string& disabled_reason)
4576  {
4577  ImGui_ScopePushFont(window.get_font());
4586  auto clicked = false;
4587 
4588  ImGui::OpenPopup(title.c_str());
4589  ImGui::SetNextWindowPos( {window.width() * 0.35f, window.height() * 0.35f });
4590  if (ImGui::BeginPopup(title.c_str()))
4591  {
4592  {
4594 
4596  ImGui::Text("%s", title.c_str());
4597  }
4598  {
4600  ImGui::Separator();
4602  ImGui::Text("\n%s\n", message_text.c_str());
4603 
4604  if (!disabled)
4605  {
4606  ImGui::SameLine();
4607  auto width = ImGui::GetWindowWidth();
4608  ImGui::Dummy(ImVec2(0, 0));
4609  ImGui::Dummy(ImVec2(width / 3.f, 0));
4610  ImGui::SameLine();
4611  if (ImGui::Button("Yes", ImVec2(60, 30)))
4612  {
4614  approved = true;
4615  clicked = true;
4616  }
4617  ImGui::SameLine();
4618  if (ImGui::Button("No", ImVec2(60, 30)))
4619  {
4621  approved = false;
4622  clicked = true;
4623  }
4624  }
4625  else
4626  {
4627  ImGui::NewLine();
4628  {
4630  ImGui::Text("%s\n\n", disabled_reason.c_str());
4631  }
4632  auto window_width = ImGui::GetWindowWidth();
4633  ImGui::SetCursorPosX(ImGui::GetCursorPosX() + window_width / 2.f - 30.f - ImGui::GetStyle().WindowPadding.x);
4634  if (ImGui::Button("Close", ImVec2(60, 30)))
4635  {
4637  approved = false;
4638  clicked = true;
4639  }
4640  }
4641  }
4642  ImGui::EndPopup();
4643  }
4644  return clicked;
4645  }
4646 
4647  // Create a process windows with process details from the caller,
4648  // and close button activated by the caller
4649  bool status_dialog(const std::string& title, const std::string& process_topic_text, const std::string& process_status_text , bool enable_close, ux_window& window)
4650  {
4651  ImGui_ScopePushFont(window.get_font());
4661  auto close_clicked = false;
4662 
4663  ImGui::OpenPopup(title.c_str());
4664  ImGui::SetNextWindowPos({ window.width() * 0.35f, window.height() * 0.35f });
4665  if (ImGui::BeginPopup(title.c_str()))
4666  {
4667  {
4669 
4671  ImGui::Text("%s", title.c_str());
4672  }
4673  {
4674 
4675  ImGui::Separator();
4677 
4678  ImGui::NewLine();
4679  ImGui::Text("%s", process_topic_text.c_str());
4680  ImGui::NewLine();
4681 
4682  auto window_width = ImGui::GetWindowWidth();
4683  auto process_status_text_size = ImGui::CalcTextSize(process_status_text.c_str()).x + ImGui::CalcTextSize("Status: ").x + ImGui::GetStyle().WindowPadding.x;
4684  auto future_window_width = std::max(process_status_text_size, window_width);
4685  ImGui::SetCursorPosX(ImGui::GetCursorPosX() + future_window_width / 2.f - process_status_text_size / 2.f);
4686 
4687  ImGui::Text("Status: %s", process_status_text.c_str());
4688  ImGui::NewLine();
4689  window_width = ImGui::GetWindowWidth();
4690 
4691  if (enable_close)
4692  {
4693  ImGui::SetCursorPosX(ImGui::GetCursorPosX() + window_width / 2.f - 50.f); // 50 = 30 (button size) + 20 (padding)
4694  if (ImGui::Button("Close", ImVec2(60, 30)))
4695  {
4697  close_clicked = true;
4698  }
4699  }
4700  }
4701  ImGui::EndPopup();
4702  }
4703  return close_clicked;
4704  }
4705 
4706  bool device_model::prompt_toggle_advanced_mode(bool enable_advanced_mode, const std::string& message_text, std::vector<std::string>& restarting_device_info, viewer_model& view, ux_window& window, const std::string& error_message)
4707  {
4708  bool keep_showing = true;
4709  bool yes_was_chosen = false;
4710  if (yes_no_dialog("Advanced Mode", message_text, yes_was_chosen, window, error_message))
4711  {
4712  if (yes_was_chosen)
4713  {
4714  dev.as<advanced_mode>().toggle_advanced_mode(enable_advanced_mode);
4715  restarting_device_info = get_device_info(dev, false);
4716  view.not_model->add_log(enable_advanced_mode ? "Turning on advanced mode..." : "Turning off advanced mode...");
4717  }
4718  keep_showing = false;
4719  }
4720  return keep_showing;
4721  }
4722 
4723 
4725  {
4726  bool was_set = false;
4727 
4728  ImGui::PushStyleColor(ImGuiCol_TextSelectedBg, { 0.9f, 0.9f, 0.9f, 1 });
4729 
4730  auto is_advanced_mode = dev.is<advanced_mode>();
4731  if (is_advanced_mode && ImGui::TreeNode("Advanced Controls"))
4732  {
4733  try
4734  {
4735  auto advanced = dev.as<advanced_mode>();
4736  if (advanced.is_enabled())
4737  {
4738  draw_advanced_mode_controls(advanced, amc, get_curr_advanced_controls, was_set, error_message);
4739  }
4740  else
4741  {
4742  ImGui::TextColored(redish, "Device is not in advanced mode");
4743  std::string button_text = to_string() << "Turn on Advanced Mode" << "##" << id;
4744  static bool show_yes_no_modal = false;
4745  if (ImGui::Button(button_text.c_str(), ImVec2{ 226, 0 }))
4746  {
4747  show_yes_no_modal = true;
4748  }
4749  if (ImGui::IsItemHovered())
4750  {
4751  ImGui::SetTooltip("Advanced mode is a persistent camera state unlocking calibration formats and depth generation controls\nYou can always reset the camera to factory defaults by disabling advanced mode");
4752  }
4753  if (show_yes_no_modal)
4754  {
4755  show_yes_no_modal = prompt_toggle_advanced_mode(true, "\t\tAre you sure you want to turn on Advanced Mode?\t\t", restarting_device_info, view, window, error_message);
4756  }
4757  }
4758  }
4759  catch (const std::exception& ex)
4760  {
4761  error_message = ex.what();
4762  }
4763 
4764  ImGui::TreePop();
4765  }
4766 
4768  return was_set;
4769  }
4770 
4772  {
4773  std::string info_button_name = to_string() << textual_icons::info_circle << "##" << id;
4774  auto info_button_color = show_device_info ? light_blue : light_grey;
4775  ImGui::PushStyleColor(ImGuiCol_Text, info_button_color);
4776  ImGui::PushStyleColor(ImGuiCol_TextSelectedBg, info_button_color);
4777  if (ImGui::Button(info_button_name.c_str(), size))
4778  {
4780  }
4781  if (ImGui::IsItemHovered())
4782  {
4783  ImGui::SetTooltip("%s", show_device_info ? "Hide Device Details" : "Show Device Details");
4784  window.link_hovered();
4785  }
4787  }
4788 
4790  {
4791  try
4792  {
4793  std::vector<uint8_t> data;
4794  auto ret = file_dialog_open(open_file, "Unsigned Firmware Image\0*.bin\0", NULL, NULL);
4795  if (ret)
4796  {
4797  std::ifstream file(ret, std::ios::binary | std::ios::in);
4798  if (file.good())
4799  {
4800  data = std::vector<uint8_t>((std::istreambuf_iterator<char>(file)),
4801  std::istreambuf_iterator<char>());