yolov8_nano.cpp
Go to the documentation of this file.
1 #include <chrono>
2 #include <iostream>
3 
4 // Includes common necessary includes for development using depthai library
5 #include "depthai/depthai.hpp"
6 
7 /*
8 The code is the same as for Tiny-yolo-V3 and V4, the only difference is the blob file and anchors that are not needed for V8.
9 The blob was compiled following this tutorial: https://github.com/TNTWEN/OpenVINO-YOLOV4
10 */
11 
12 static const std::vector<std::string> labelMap = {
13  "person", "bicycle", "car", "motorbike", "aeroplane", "bus", "train", "truck", "boat",
14  "traffic light", "fire hydrant", "stop sign", "parking meter", "bench", "bird", "cat", "dog", "horse",
15  "sheep", "cow", "elephant", "bear", "zebra", "giraffe", "backpack", "umbrella", "handbag",
16  "tie", "suitcase", "frisbee", "skis", "snowboard", "sports ball", "kite", "baseball bat", "baseball glove",
17  "skateboard", "surfboard", "tennis racket", "bottle", "wine glass", "cup", "fork", "knife", "spoon",
18  "bowl", "banana", "apple", "sandwich", "orange", "broccoli", "carrot", "hot dog", "pizza",
19  "donut", "cake", "chair", "sofa", "pottedplant", "bed", "diningtable", "toilet", "tvmonitor",
20  "laptop", "mouse", "remote", "keyboard", "cell phone", "microwave", "oven", "toaster", "sink",
21  "refrigerator", "book", "clock", "vase", "scissors", "teddy bear", "hair drier", "toothbrush"};
22 
23 static std::atomic<bool> syncNN{true};
24 
25 int main(int argc, char** argv) {
26  using namespace std;
27  using namespace std::chrono;
28  std::string nnPath(BLOB_PATH);
29 
30  // If path to blob specified, use that
31  if(argc > 1) {
32  nnPath = std::string(argv[1]);
33  }
34 
35  // Print which blob we are using
36  printf("Using blob at path: %s\n", nnPath.c_str());
37 
38  // Create pipeline
39  dai::Pipeline pipeline;
40 
41  // Define sources and outputs
42  auto camRgb = pipeline.create<dai::node::ColorCamera>();
43  auto detectionNetwork = pipeline.create<dai::node::YoloDetectionNetwork>();
44  auto xoutRgb = pipeline.create<dai::node::XLinkOut>();
45  auto nnOut = pipeline.create<dai::node::XLinkOut>();
46 
47  xoutRgb->setStreamName("rgb");
48  nnOut->setStreamName("detections");
49 
50  // Properties
51  camRgb->setPreviewSize(640, 352);
53  camRgb->setInterleaved(false);
54  camRgb->setColorOrder(dai::ColorCameraProperties::ColorOrder::BGR);
55  camRgb->setFps(40);
56 
57  // Network specific settings
58  detectionNetwork->setConfidenceThreshold(0.5f);
59  detectionNetwork->setNumClasses(80);
60  detectionNetwork->setCoordinateSize(4);
61  detectionNetwork->setIouThreshold(0.5f);
62  detectionNetwork->setBlobPath(nnPath);
63  detectionNetwork->setNumInferenceThreads(2);
64  detectionNetwork->input.setBlocking(false);
65 
66  // Linking
67  camRgb->preview.link(detectionNetwork->input);
68  if(syncNN) {
69  detectionNetwork->passthrough.link(xoutRgb->input);
70  } else {
71  camRgb->preview.link(xoutRgb->input);
72  }
73 
74  detectionNetwork->out.link(nnOut->input);
75 
76  // Connect to device and start pipeline
77  dai::Device device(pipeline);
78 
79  // Output queues will be used to get the rgb frames and nn data from the outputs defined above
80  auto qRgb = device.getOutputQueue("rgb", 4, false);
81  auto qDet = device.getOutputQueue("detections", 4, false);
82 
83  cv::Mat frame;
84  std::vector<dai::ImgDetection> detections;
85  auto startTime = steady_clock::now();
86  int counter = 0;
87  float fps = 0;
88  auto color2 = cv::Scalar(255, 255, 255);
89 
90  // Add bounding boxes and text to the frame and show it to the user
91  auto displayFrame = [](std::string name, cv::Mat frame, std::vector<dai::ImgDetection>& detections) {
92  auto color = cv::Scalar(255, 0, 0);
93  // nn data, being the bounding box locations, are in <0..1> range - they need to be normalized with frame width/height
94  for(auto& detection : detections) {
95  int x1 = detection.xmin * frame.cols;
96  int y1 = detection.ymin * frame.rows;
97  int x2 = detection.xmax * frame.cols;
98  int y2 = detection.ymax * frame.rows;
99 
100  uint32_t labelIndex = detection.label;
101  std::string labelStr = to_string(labelIndex);
102  if(labelIndex < labelMap.size()) {
103  labelStr = labelMap[labelIndex];
104  }
105  cv::putText(frame, labelStr, cv::Point(x1 + 10, y1 + 20), cv::FONT_HERSHEY_TRIPLEX, 0.5, 255);
106  std::stringstream confStr;
107  confStr << std::fixed << std::setprecision(2) << detection.confidence * 100;
108  cv::putText(frame, confStr.str(), cv::Point(x1 + 10, y1 + 40), cv::FONT_HERSHEY_TRIPLEX, 0.5, 255);
109  cv::rectangle(frame, cv::Rect(cv::Point(x1, y1), cv::Point(x2, y2)), color, cv::FONT_HERSHEY_SIMPLEX);
110  }
111  // Show the frame
112  cv::imshow(name, frame);
113  };
114 
115  while(true) {
116  std::shared_ptr<dai::ImgFrame> inRgb;
117  std::shared_ptr<dai::ImgDetections> inDet;
118 
119  if(syncNN) {
120  inRgb = qRgb->get<dai::ImgFrame>();
121  inDet = qDet->get<dai::ImgDetections>();
122  } else {
123  inRgb = qRgb->tryGet<dai::ImgFrame>();
124  inDet = qDet->tryGet<dai::ImgDetections>();
125  }
126 
127  counter++;
128  auto currentTime = steady_clock::now();
129  auto elapsed = duration_cast<duration<float>>(currentTime - startTime);
130  if(elapsed > seconds(1)) {
131  fps = counter / elapsed.count();
132  counter = 0;
133  startTime = currentTime;
134  }
135 
136  if(inRgb) {
137  frame = inRgb->getCvFrame();
138  std::stringstream fpsStr;
139  fpsStr << "NN fps: " << std::fixed << std::setprecision(2) << fps;
140  cv::putText(frame, fpsStr.str(), cv::Point(2, inRgb->getHeight() - 4), cv::FONT_HERSHEY_TRIPLEX, 0.4, color2);
141  }
142 
143  if(inDet) {
144  detections = inDet->detections;
145  }
146 
147  if(!frame.empty()) {
148  displayFrame("rgb", frame, detections);
149  }
150 
151  int key = cv::waitKey(1);
152  if(key == 'q' || key == 'Q') {
153  return 0;
154  }
155  }
156  return 0;
157 }
dai::node::XLinkOut
XLinkOut node. Sends messages over XLink.
Definition: XLinkOut.hpp:14
dai::Pipeline
Represents the pipeline, set of nodes and connections between them.
Definition: Pipeline.hpp:100
fps
static constexpr int fps
Definition: rgb_depth_aligned.cpp:12
dai::node::ColorCamera
ColorCamera node. For use with color sensors.
Definition: ColorCamera.hpp:16
dai::Device::getOutputQueue
std::shared_ptr< DataOutputQueue > getOutputQueue(const std::string &name)
Definition: Device.cpp:86
dai::node::YoloDetectionNetwork
YoloDetectionNetwork node. Parses Yolo results.
Definition: DetectionNetwork.hpp:65
dai::node::XLinkOut::input
Input input
Definition: XLinkOut.hpp:27
depthai.hpp
dai::Pipeline::create
std::shared_ptr< N > create()
Definition: Pipeline.hpp:145
dai::ColorCameraProperties::ColorOrder::BGR
@ BGR
dai::ImgFrame
Definition: ImgFrame.hpp:25
nanorpc::core::exception::to_string
std::string to_string(std::exception const &e)
Definition: exception.h:46
dai::ColorCameraProperties::SensorResolution::THE_1080_P
@ THE_1080_P
1920 × 1080
dai::Device
Definition: Device.hpp:21
dai::node::ColorCamera::preview
Output preview
Definition: ColorCamera.hpp:69
std
Definition: Node.hpp:366
labelMap
static const std::vector< std::string > labelMap
Definition: yolov8_nano.cpp:12
syncNN
static std::atomic< bool > syncNN
Definition: yolov8_nano.cpp:23
main
int main(int argc, char **argv)
Definition: yolov8_nano.cpp:25
dai::Node::Output::link
void link(const Input &in)
Definition: Node.cpp:84
dai::node::XLinkOut::setStreamName
void setStreamName(const std::string &name)
Definition: XLinkOut.cpp:13
dai::ImgDetections
Definition: ImgDetections.hpp:14


depthai
Author(s): Martin Peterlin
autogenerated on Sat Mar 22 2025 02:58:19