archive.cpp
Go to the documentation of this file.
1 // License: Apache 2.0. See LICENSE file in root directory.
2 // Copyright(c) 2019 Intel Corporation. All Rights Reserved.
3 #include "metadata-parser.h"
4 #include "archive.h"
5 #include <fstream>
6 #include "core/processing.h"
7 #include "core/video.h"
8 #include "frame-archive.h"
9 
10 #define MIN_DISTANCE 1e-6
11 
12 namespace librealsense
13 {
14  std::shared_ptr<sensor_interface> frame::get_sensor() const
15  {
16  auto res = sensor.lock();
17  if (!res)
18  {
19  auto archive = get_owner();
20  if (archive) return archive->get_sensor();
21  }
22  return res;
23  }
24  void frame::set_sensor(std::shared_ptr<sensor_interface> s) { sensor = s; }
25 
27  {
28  get_frame_data(); // call GetData to ensure data is in main memory
29  auto xyz = (float3*)data.data();
30  return xyz;
31  }
32 
33  std::tuple<uint8_t, uint8_t, uint8_t> get_texcolor(const frame_holder& texture, float u, float v)
34  {
35  auto ptr = dynamic_cast<video_frame*>(texture.frame);
36  if (ptr == nullptr) {
37  throw librealsense::invalid_value_exception("frame must be video frame");
38  }
39  const int w = ptr->get_width(), h = ptr->get_height();
40  int x = std::min(std::max(int(u*w + .5f), 0), w - 1);
41  int y = std::min(std::max(int(v*h + .5f), 0), h - 1);
42  int idx = x * ptr->get_bpp() / 8 + y * ptr->get_stride();
43  const auto texture_data = reinterpret_cast<const uint8_t*>(ptr->get_frame_data());
44  return std::make_tuple(texture_data[idx], texture_data[idx + 1], texture_data[idx + 2]);
45  }
46 
47 
49  {
50  auto stream_profile = get_stream().get();
53  throw librealsense::invalid_value_exception("stream must be video stream");
54  const auto vertices = get_vertices();
55  const auto texcoords = get_texture_coordinates();
56  std::vector<float3> new_vertices;
57  std::vector<std::tuple<uint8_t, uint8_t, uint8_t>> new_tex;
58  std::map<int, int> index2reducedIndex;
59 
60  new_vertices.reserve(get_vertex_count());
61  new_tex.reserve(get_vertex_count());
62  assert(get_vertex_count());
63  for (int i = 0; i < get_vertex_count(); ++i)
64  if (fabs(vertices[i].x) >= MIN_DISTANCE || fabs(vertices[i].y) >= MIN_DISTANCE ||
65  fabs(vertices[i].z) >= MIN_DISTANCE)
66  {
67  index2reducedIndex[i] = (int)new_vertices.size();
68  new_vertices.push_back({ vertices[i].x, -1*vertices[i].y, -1*vertices[i].z });
69  if (texture)
70  {
71  auto color = get_texcolor(texture, texcoords[i].x, texcoords[i].y);
72  new_tex.push_back(color);
73  }
74  }
75 
76  const auto threshold = 0.05f;
78  std::vector<std::tuple<int, int, int>> faces;
79  for( uint32_t x = 0; x < width - 1; ++x )
80  {
81  for( uint32_t y = 0; y < video_stream_profile->get_height() - 1; ++y )
82  {
83  auto a = y * width + x, b = y * width + x + 1, c = (y + 1)*width + x, d = (y + 1)*width + x + 1;
84  if (vertices[a].z && vertices[b].z && vertices[c].z && vertices[d].z
85  && abs(vertices[a].z - vertices[b].z) < threshold && abs(vertices[a].z - vertices[c].z) < threshold
86  && abs(vertices[b].z - vertices[d].z) < threshold && abs(vertices[c].z - vertices[d].z) < threshold)
87  {
88  if (index2reducedIndex.count(a) == 0 || index2reducedIndex.count(b) == 0 || index2reducedIndex.count(c) == 0 ||
89  index2reducedIndex.count(d) == 0)
90  continue;
91 
92  faces.emplace_back(index2reducedIndex[a], index2reducedIndex[d], index2reducedIndex[b]);
93  faces.emplace_back(index2reducedIndex[d], index2reducedIndex[a], index2reducedIndex[c]);
94  }
95  }
96  }
97 
98  std::ofstream out(fname);
99  out << "ply\n";
100  out << "format binary_little_endian 1.0\n";
101  out << "comment pointcloud saved from Realsense Viewer\n";
102  out << "element vertex " << new_vertices.size() << "\n";
103  out << "property float" << sizeof(float) * 8 << " x\n";
104  out << "property float" << sizeof(float) * 8 << " y\n";
105  out << "property float" << sizeof(float) * 8 << " z\n";
106  if (texture)
107  {
108  out << "property uchar red\n";
109  out << "property uchar green\n";
110  out << "property uchar blue\n";
111  }
112  out << "element face " << faces.size() << "\n";
113  out << "property list uchar int vertex_indices\n";
114  out << "end_header\n";
115  out.close();
116 
117  out.open(fname, std::ios_base::app | std::ios_base::binary);
118  for (int i = 0; i < new_vertices.size(); ++i)
119  {
120  // we assume little endian architecture on your device
121  out.write(reinterpret_cast<const char*>(&(new_vertices[i].x)), sizeof(float));
122  out.write(reinterpret_cast<const char*>(&(new_vertices[i].y)), sizeof(float));
123  out.write(reinterpret_cast<const char*>(&(new_vertices[i].z)), sizeof(float));
124 
125  if (texture)
126  {
127  uint8_t x, y, z;
128  std::tie(x, y, z) = new_tex[i];
129  out.write(reinterpret_cast<const char*>(&x), sizeof(uint8_t));
130  out.write(reinterpret_cast<const char*>(&y), sizeof(uint8_t));
131  out.write(reinterpret_cast<const char*>(&z), sizeof(uint8_t));
132  }
133  }
134  auto size = faces.size();
135  for (int i = 0; i < size; ++i) {
136  int three = 3;
137  out.write(reinterpret_cast<const char*>(&three), sizeof(uint8_t));
138  out.write(reinterpret_cast<const char*>(&(std::get<0>(faces[i]))), sizeof(int));
139  out.write(reinterpret_cast<const char*>(&(std::get<1>(faces[i]))), sizeof(int));
140  out.write(reinterpret_cast<const char*>(&(std::get<2>(faces[i]))), sizeof(int));
141  }
142  }
143 
145  {
146  return data.size() / (sizeof(float3) + sizeof(int2));
147  }
148 
150  {
151  get_frame_data(); // call GetData to ensure data is in main memory
152  auto xyz = (float3*)data.data();
153  auto ijs = (float2*)(xyz + get_vertex_count());
154  return ijs;
155  }
156 
157 
158  std::shared_ptr<archive_interface> make_archive(rs2_extension type,
159  std::atomic<uint32_t>* in_max_frame_queue_size,
160  std::shared_ptr<platform::time_service> ts,
161  std::shared_ptr<metadata_parser_map> parsers)
162  {
163  switch (type)
164  {
166  return std::make_shared<frame_archive<video_frame>>(in_max_frame_queue_size, ts, parsers);
167 
169  return std::make_shared<frame_archive<composite_frame>>(in_max_frame_queue_size, ts, parsers);
170 
172  return std::make_shared<frame_archive<motion_frame>>(in_max_frame_queue_size, ts, parsers);
173 
175  return std::make_shared<frame_archive<points>>(in_max_frame_queue_size, ts, parsers);
176 
178  return std::make_shared<frame_archive<depth_frame>>(in_max_frame_queue_size, ts, parsers);
179 
181  return std::make_shared<frame_archive<pose_frame>>(in_max_frame_queue_size, ts, parsers);
182 
184  return std::make_shared<frame_archive<disparity_frame>>(in_max_frame_queue_size, ts, parsers);
185 
186  default:
187  throw std::runtime_error("Requested frame type is not supported!");
188  }
189  }
190 
192  {
193  if (ref_count.fetch_sub(1) == 1)
194  {
195  unpublish();
196  on_release();
197  owner->unpublish_frame(this);
198  }
199  }
200 
201  void frame::keep()
202  {
203  if (!_kept.exchange(true))
204  {
205  owner->keep_frame(this);
206  }
207  }
208 
209  frame_interface* frame::publish(std::shared_ptr<archive_interface> new_owner)
210  {
211  owner = new_owner;
212  _kept = false;
213  return owner->publish_frame(this);
214  }
215 
217  {
218  if (!metadata_parsers)
219  throw invalid_value_exception(to_string() << "metadata not available for "
220  << get_string(get_stream()->get_stream_type()) << " stream");
221 
222  auto parsers = metadata_parsers->equal_range(frame_metadata);
223  if (parsers.first == metadata_parsers->end()) // Possible user error - md attribute is not supported by this frame type
224  throw invalid_value_exception(to_string() << get_string(frame_metadata)
225  << " attribute is not applicable for "
226  << get_string(get_stream()->get_stream_type()) << " stream ");
227 
229  bool value_retrieved = false;
230  std::string exc_str;
231  for (auto it = parsers.first; it != parsers.second; ++it)
232  {
233  try
234  {
235  result = it->second->get(*this);
236  value_retrieved = true;
237  break;
238  }
239  catch (invalid_value_exception& e)
240  {
241  exc_str = e.what();
242  }
243  }
244  if (!value_retrieved)
245  throw invalid_value_exception(exc_str);
246 
247  return result;
248  }
249 
251  {
252  // verify preconditions
253  if (!metadata_parsers)
254  return false; // No parsers are available or no metadata was attached
255 
256  bool ret = false;
257  auto found = metadata_parsers->equal_range(frame_metadata);
258  if (found.first == metadata_parsers->end())
259  return false;
260 
261  for (auto it = found.first; it != found.second; ++it)
262  if (it->second->supports(*this))
263  {
264  ret = true;
265  break;
266  }
267 
268  return ret;
269  }
270 
272  {
273  return (int)data.size();
274  }
275 
277  {
278  const byte* frame_data = data.data();
279 
280  if (on_release.get_data())
281  {
282  frame_data = static_cast<const byte*>(on_release.get_data());
283  }
284 
285  return frame_data;
286  }
287 
289  {
291  }
292 
294  {
295  return additional_data.timestamp;
296  }
297 
298  unsigned long long frame::get_frame_number() const
299  {
301  }
302 
304  {
306  }
307 
309  {
311  }
312 
314  {
316  }
317 
319  {
321  LOG_DEBUG("CallbackStarted," << std::dec << librealsense::get_string(get_stream()->get_stream_type()) << "," << get_frame_number() << ",DispatchedAt," << std::fixed << timestamp);
322  }
323 
325  {
326  auto callback_warning_duration = 1000.f / (get_stream()->get_framerate() + 1);
327  auto callback_duration = timestamp - get_frame_callback_start_time_point();
328 
329  LOG_DEBUG("CallbackFinished," << librealsense::get_string(get_stream()->get_stream_type()) << ","
330  << std::dec << get_frame_number() << ",DispatchedAt," << std::fixed << timestamp);
331 
332  if (callback_duration > callback_warning_duration)
333  {
334  LOG_INFO("Frame Callback " << librealsense::get_string(get_stream()->get_stream_type())
335  << "#" << std::dec << get_frame_number()
336  << "overdue. (Duration: " << callback_duration
337  << "ms, FPS: " << get_stream()->get_framerate() << ", Max Duration: " << callback_warning_duration << "ms)");
338  }
339  }
340 }
std::shared_ptr< stream_profile_interface > get_stream() const override
Definition: archive.h:144
std::atomic_bool _kept
Definition: archive.h:181
bool supports_frame_metadata(const rs2_frame_metadata_value &frame_metadata) const override
Definition: archive.cpp:250
GLboolean GLboolean GLboolean b
GLint y
const char * get_string(rs2_rs400_visual_preset value)
GLdouble s
std::atomic< int > ref_count
Definition: archive.h:176
float2 * get_texture_coordinates()
Definition: archive.cpp:149
std::shared_ptr< archive_interface > make_archive(rs2_extension type, std::atomic< uint32_t > *in_max_frame_queue_size, std::shared_ptr< platform::time_service > ts, std::shared_ptr< metadata_parser_map > parsers)
Definition: archive.cpp:158
const void * get_data() const
Definition: src/types.h:1476
uint32_t get_height() const override
Definition: src/stream.h:105
GLuint color
unsigned long long get_frame_number() const override
Definition: archive.cpp:298
std::shared_ptr< archive_interface > owner
Definition: archive.h:177
dictionary frame_data
Definition: t265_stereo.py:80
GLdouble GLdouble GLdouble w
GLsizei const GLchar *const * string
void export_to_ply(const std::string &fname, const frame_holder &texture)
Definition: archive.cpp:48
d
Definition: rmse.py:171
void unpublish() override
Definition: archive.h:155
archive_interface * get_owner() const override
Definition: archive.h:159
void log_callback_end(rs2_time_t timestamp) const override
Definition: archive.cpp:324
GLfloat GLfloat GLfloat GLfloat h
Definition: glext.h:1960
GLdouble GLdouble z
unsigned char uint8_t
Definition: stdint.h:78
e
Definition: rmse.py:177
The texture class.
Definition: example.hpp:402
void release() override
Definition: archive.cpp:191
frame_additional_data additional_data
Definition: archive.h:101
GLboolean GLboolean GLboolean GLboolean a
GLdouble f
GLsizeiptr size
const GLubyte * c
Definition: glext.h:12690
GLdouble x
unsigned int uint32_t
Definition: stdint.h:80
float3 * get_vertices()
Definition: archive.cpp:26
rs2_timestamp_domain get_frame_timestamp_domain() const override
Definition: archive.cpp:288
int get_frame_data_size() const override
Definition: archive.cpp:271
unsigned long long frame_number
Definition: archive.h:31
rs2_time_t get_frame_callback_start_time_point() const override
Definition: archive.cpp:313
uint32_t get_width() const override
Definition: src/stream.h:104
void set_sensor(std::shared_ptr< sensor_interface > s) override
Definition: archive.cpp:24
size_t get_vertex_count() const
Definition: archive.cpp:144
std::shared_ptr< metadata_parser_map > metadata_parsers
Definition: archive.h:102
frame_continuation on_release
Definition: archive.h:179
LOG_INFO("Log message using LOG_INFO()")
void keep() override
Definition: archive.cpp:201
fname
Definition: rmse.py:13
unsigned char byte
Definition: src/types.h:52
void log_callback_start(rs2_time_t timestamp) override
Definition: archive.cpp:318
frame_interface * frame
Definition: streaming.h:126
std::weak_ptr< sensor_interface > sensor
Definition: archive.h:178
frame_interface * publish(std::shared_ptr< archive_interface > new_owner) override
Definition: archive.cpp:209
static const struct @18 vertices[3]
rs2_extension
Specifies advanced interfaces (capabilities) objects may implement.
Definition: rs_types.h:166
void update_frame_callback_start_ts(rs2_time_t ts) override
Definition: archive.cpp:308
static auto it
long long rs2_metadata_type
Definition: rs_types.h:301
GLenum type
const char * what() const noexceptoverride
Definition: src/types.h:284
rs2_timestamp_domain timestamp_domain
Definition: archive.h:32
int min(int a, int b)
Definition: lz4s.c:73
const GLuint GLenum const void * binary
Definition: glext.h:1882
std::shared_ptr< sensor_interface > get_sensor() const override
Definition: archive.cpp:14
xyz
Definition: rmse.py:152
int i
GLuint res
Definition: glext.h:8856
#define MIN_DISTANCE
Definition: archive.cpp:10
#define LOG_DEBUG(...)
Definition: src/types.h:239
const byte * get_frame_data() const override
Definition: archive.cpp:276
rs2_time_t get_frame_system_time() const override
Definition: archive.cpp:303
double rs2_time_t
Definition: rs_types.h:300
std::tuple< uint8_t, uint8_t, uint8_t > get_texcolor(const frame_holder &texture, float u, float v)
Definition: archive.cpp:33
GLuint64EXT * result
Definition: glext.h:10921
GLdouble v
rs2_time_t get_frame_timestamp() const override
Definition: archive.cpp:293
Definition: parser.hpp:150
rs2_frame_metadata_value
Per-Frame-Metadata is the set of read-only properties that might be exposed for each individual frame...
Definition: rs_frame.h:29
rs2_metadata_type get_frame_metadata(const rs2_frame_metadata_value &frame_metadata) const override
Definition: archive.cpp:216
GLint GLsizei width
std::string to_string(T value)
rs2_timestamp_domain
Specifies the clock in relation to which the frame timestamp was measured.
Definition: rs_frame.h:19


librealsense2
Author(s): Sergey Dorodnicov , Doron Hirshberg , Mark Horn , Reagan Lopez , Itay Carpis
autogenerated on Mon May 3 2021 02:45:06