rgb_mobilenet.cpp
Go to the documentation of this file.
1 #include <chrono>
2 #include <cstdio>
3 #include <iostream>
4 
5 #include "utility.hpp"
6 
7 // Includes common necessary includes for development using depthai library
8 #include "depthai/depthai.hpp"
9 
10 // MobilenetSSD label texts
11 static const std::vector<std::string> labelMap = {"background", "aeroplane", "bicycle", "bird", "boat", "bottle", "bus",
12  "car", "cat", "chair", "cow", "diningtable", "dog", "horse",
13  "motorbike", "person", "pottedplant", "sheep", "sofa", "train", "tvmonitor"};
14 
15 static std::atomic<bool> syncNN{true};
16 
17 int main(int argc, char** argv) {
18  using namespace std;
19  using namespace std::chrono;
20  // Default blob path provided by Hunter private data download
21  // Applicable for easier example usage only
22  std::string nnPath(BLOB_PATH);
23 
24  // If path to blob specified, use that
25  if(argc > 1) {
26  nnPath = std::string(argv[1]);
27  }
28 
29  // Print which blob we are using
30  printf("Using blob at path: %s\n", nnPath.c_str());
31 
32  // Create pipeline
33  dai::Pipeline pipeline;
34 
35  // Define sources and outputs
36  auto camRgb = pipeline.create<dai::node::ColorCamera>();
37  auto nn = pipeline.create<dai::node::MobileNetDetectionNetwork>();
38  auto xoutRgb = pipeline.create<dai::node::XLinkOut>();
39  auto nnOut = pipeline.create<dai::node::XLinkOut>();
40  auto nnNetworkOut = pipeline.create<dai::node::XLinkOut>();
41 
42  xoutRgb->setStreamName("rgb");
43  nnOut->setStreamName("nn");
44  nnNetworkOut->setStreamName("nnNetwork");
45 
46  // Properties
47  camRgb->setPreviewSize(300, 300); // NN input
48  camRgb->setInterleaved(false);
49  camRgb->setFps(40);
50  // Define a neural network that will make predictions based on the source frames
51  nn->setConfidenceThreshold(0.5);
52  nn->setBlobPath(nnPath);
53  nn->setNumInferenceThreads(2);
54  nn->input.setBlocking(false);
55 
56  // Linking
57  if(syncNN) {
58  nn->passthrough.link(xoutRgb->input);
59  } else {
60  camRgb->preview.link(xoutRgb->input);
61  }
62 
63  camRgb->preview.link(nn->input);
64  nn->out.link(nnOut->input);
65  nn->outNetwork.link(nnNetworkOut->input);
66 
67  // Connect to device and start pipeline
68  dai::Device device(pipeline);
69 
70  // Output queues will be used to get the rgb frames and nn data from the outputs defined above
71  auto qRgb = device.getOutputQueue("rgb", 4, false);
72  auto qDet = device.getOutputQueue("nn", 4, false);
73  auto qNN = device.getOutputQueue("nnNetwork", 4, false);
74 
75  cv::Mat frame;
76  std::vector<dai::ImgDetection> detections;
77  auto startTime = steady_clock::now();
78  int counter = 0;
79  float fps = 0;
80  auto color2 = cv::Scalar(255, 255, 255);
81 
82  // Add bounding boxes and text to the frame and show it to the user
83  auto displayFrame = [](std::string name, cv::Mat frame, std::vector<dai::ImgDetection>& detections) {
84  auto color = cv::Scalar(255, 0, 0);
85  // nn data, being the bounding box locations, are in <0..1> range - they need to be normalized with frame width/height
86  for(auto& detection : detections) {
87  int x1 = detection.xmin * frame.cols;
88  int y1 = detection.ymin * frame.rows;
89  int x2 = detection.xmax * frame.cols;
90  int y2 = detection.ymax * frame.rows;
91 
92  uint32_t labelIndex = detection.label;
93  std::string labelStr = to_string(labelIndex);
94  if(labelIndex < labelMap.size()) {
95  labelStr = labelMap[labelIndex];
96  }
97  cv::putText(frame, labelStr, cv::Point(x1 + 10, y1 + 20), cv::FONT_HERSHEY_TRIPLEX, 0.5, color);
98  std::stringstream confStr;
99  confStr << std::fixed << std::setprecision(2) << detection.confidence * 100;
100  cv::putText(frame, confStr.str(), cv::Point(x1 + 10, y1 + 40), cv::FONT_HERSHEY_TRIPLEX, 0.5, color);
101  cv::rectangle(frame, cv::Rect(cv::Point(x1, y1), cv::Point(x2, y2)), color, cv::FONT_HERSHEY_SIMPLEX);
102  }
103  // Show the frame
104  cv::imshow(name, frame);
105  };
106 
107  bool printOutputLayersOnce = true;
108 
109  while(true) {
110  std::shared_ptr<dai::ImgFrame> inRgb;
111  std::shared_ptr<dai::ImgDetections> inDet;
112  std::shared_ptr<dai::NNData> inNN;
113 
114  if(syncNN) {
115  inRgb = qRgb->get<dai::ImgFrame>();
116  inDet = qDet->get<dai::ImgDetections>();
117  inNN = qNN->get<dai::NNData>();
118  } else {
119  inRgb = qRgb->tryGet<dai::ImgFrame>();
120  inDet = qDet->tryGet<dai::ImgDetections>();
121  inNN = qNN->tryGet<dai::NNData>();
122  }
123 
124  counter++;
125  auto currentTime = steady_clock::now();
126  auto elapsed = duration_cast<duration<float>>(currentTime - startTime);
127  if(elapsed > seconds(1)) {
128  fps = counter / elapsed.count();
129  counter = 0;
130  startTime = currentTime;
131  }
132 
133  if(inRgb) {
134  frame = inRgb->getCvFrame();
135  std::stringstream fpsStr;
136  fpsStr << "NN fps: " << std::fixed << std::setprecision(2) << fps;
137  cv::putText(frame, fpsStr.str(), cv::Point(2, inRgb->getHeight() - 4), cv::FONT_HERSHEY_TRIPLEX, 0.4, color2);
138  }
139 
140  if(inDet) {
141  detections = inDet->detections;
142  }
143 
144  if(printOutputLayersOnce && inNN) {
145  std::cout << "Output layer names: ";
146  for(const auto& ten : inNN->getAllLayerNames()) {
147  std::cout << ten << ", ";
148  }
149  std::cout << std::endl;
150  printOutputLayersOnce = false;
151  }
152 
153  if(!frame.empty()) {
154  displayFrame("video", frame, detections);
155  }
156 
157  int key = cv::waitKey(1);
158  if(key == 'q' || key == 'Q') {
159  return 0;
160  }
161  }
162  return 0;
163 }
dai::node::XLinkOut
XLinkOut node. Sends messages over XLink.
Definition: XLinkOut.hpp:14
dai::Pipeline
Represents the pipeline, set of nodes and connections between them.
Definition: Pipeline.hpp:100
dai::NNData
Definition: NNData.hpp:16
fps
static constexpr int fps
Definition: rgb_depth_aligned.cpp:12
dai::node::ColorCamera
ColorCamera node. For use with color sensors.
Definition: ColorCamera.hpp:16
dai::Device::getOutputQueue
std::shared_ptr< DataOutputQueue > getOutputQueue(const std::string &name)
Definition: Device.cpp:86
dai::node::XLinkOut::input
Input input
Definition: XLinkOut.hpp:27
depthai.hpp
dai::Pipeline::create
std::shared_ptr< N > create()
Definition: Pipeline.hpp:145
syncNN
static std::atomic< bool > syncNN
Definition: rgb_mobilenet.cpp:15
dai::ImgFrame
Definition: ImgFrame.hpp:25
nanorpc::core::exception::to_string
std::string to_string(std::exception const &e)
Definition: exception.h:46
main
int main(int argc, char **argv)
Definition: rgb_mobilenet.cpp:17
labelMap
static const std::vector< std::string > labelMap
Definition: rgb_mobilenet.cpp:11
dai::Device
Definition: Device.hpp:21
dai::node::ColorCamera::preview
Output preview
Definition: ColorCamera.hpp:69
std
Definition: Node.hpp:366
dai::Node::Output::link
void link(const Input &in)
Definition: Node.cpp:84
dai::node::MobileNetDetectionNetwork
MobileNetDetectionNetwork node. Parses MobileNet results.
Definition: DetectionNetwork.hpp:56
dai::node::XLinkOut::setStreamName
void setStreamName(const std::string &name)
Definition: XLinkOut.cpp:13
dai::ImgDetections
Definition: ImgDetections.hpp:14
utility.hpp


depthai
Author(s): Martin Peterlin
autogenerated on Sat Mar 22 2025 02:58:19