disparity_encoding.cpp
Go to the documentation of this file.
1 #include <csignal>
2 #include <iostream>
3 
4 // Includes common necessary includes for development using depthai library
5 #include "depthai/depthai.hpp"
6 
7 static std::atomic<bool> alive{true};
8 static void sigintHandler(int signum) {
9  alive = false;
10 }
11 
12 int main() {
13  using namespace std;
14  std::signal(SIGINT, &sigintHandler);
15 
16  // Create pipeline
17  dai::Pipeline pipeline;
18 
19  // Define sources and outputs
20  auto monoLeft = pipeline.create<dai::node::MonoCamera>();
22  monoLeft->setCamera("left");
23 
24  auto monoRight = pipeline.create<dai::node::MonoCamera>();
26  monoRight->setCamera("right");
27 
28  auto stereo = pipeline.create<dai::node::StereoDepth>();
30  stereo->initialConfig.setMedianFilter(dai::MedianFilter::KERNEL_7x7);
31  stereo->setLeftRightCheck(false);
32  stereo->setExtendedDisparity(false);
33  // Subpixel disparity is of UINT16 format, which is unsupported by VideoEncoder
34  stereo->setSubpixel(false);
35  monoLeft->out.link(stereo->left);
36  monoRight->out.link(stereo->right);
37 
38  auto videoEnc = pipeline.create<dai::node::VideoEncoder>();
40  stereo->disparity.link(videoEnc->input);
41 
42  auto xout = pipeline.create<dai::node::XLinkOut>();
43  xout->setStreamName("disparity");
44  videoEnc->bitstream.link(xout->input);
45 
46  // Connect to device and start pipeline
47  dai::Device device(pipeline);
48 
49  // Output queue will be used to get the disparity frames from the outputs defined above
50  auto q = device.getOutputQueue("disparity");
51 
52  auto videoFile = std::ofstream("disparity.mjpeg", std::ios::binary);
53  cout << "Press Ctrl+C to stop encoding..." << endl;
54 
55  while(alive) {
56  auto h265Packet = q->get<dai::ImgFrame>();
57  videoFile.write((char*)(h265Packet->getData().data()), h265Packet->getData().size());
58  }
59 
60  cout << "To view the encoded data, convert the stream file (.mjpeg) into a video file (.mp4) using a command below:" << endl;
61  cout << "ffmpeg -framerate 30 -i disparity.mjpeg -c copy video.mp4" << endl;
62  return 0;
63 }
dai::node::XLinkOut
XLinkOut node. Sends messages over XLink.
Definition: XLinkOut.hpp:14
dai::Pipeline
Represents the pipeline, set of nodes and connections between them.
Definition: Pipeline.hpp:100
dai::node::StereoDepth
StereoDepth node. Compute stereo disparity and depth from left-right image pair.
Definition: StereoDepth.hpp:15
dai::node::StereoDepth::setDefaultProfilePreset
void setDefaultProfilePreset(PresetMode mode)
Definition: StereoDepth.cpp:206
dai::node::MonoCamera
MonoCamera node. For use with grayscale sensors.
Definition: MonoCamera.hpp:17
dai::VideoEncoderProperties::Profile::MJPEG
@ MJPEG
dai::Device::getOutputQueue
std::shared_ptr< DataOutputQueue > getOutputQueue(const std::string &name)
Definition: Device.cpp:86
dai::MonoCameraProperties::SensorResolution::THE_400_P
@ THE_400_P
depthai.hpp
dai::node::VideoEncoder::setDefaultProfilePreset
void setDefaultProfilePreset(float fps, Properties::Profile profile)
dai::Pipeline::create
std::shared_ptr< N > create()
Definition: Pipeline.hpp:145
dai::node::MonoCamera::setResolution
void setResolution(Properties::SensorResolution resolution)
Set sensor resolution.
Definition: MonoCamera.cpp:82
dai::ImgFrame
Definition: ImgFrame.hpp:25
dai::node::StereoDepth::PresetMode::HIGH_DENSITY
@ HIGH_DENSITY
alive
static std::atomic< bool > alive
Definition: disparity_encoding.cpp:7
dai::Device
Definition: Device.hpp:21
std
Definition: Node.hpp:366
dai::node::VideoEncoder
VideoEncoder node. Encodes frames into MJPEG, H264 or H265.
Definition: VideoEncoder.hpp:14
dai::MedianFilter::KERNEL_7x7
@ KERNEL_7x7
dai::node::XLinkOut::setStreamName
void setStreamName(const std::string &name)
Definition: XLinkOut.cpp:13
main
int main()
Definition: disparity_encoding.cpp:12
sigintHandler
static void sigintHandler(int signum)
Definition: disparity_encoding.cpp:8


depthai
Author(s): Martin Peterlin
autogenerated on Sat Mar 22 2025 02:58:19