11 static const std::vector<std::string>
labelMap = {
"background",
"aeroplane",
"bicycle",
"bird",
"boat",
"bottle",
"bus",
12 "car",
"cat",
"chair",
"cow",
"diningtable",
"dog",
"horse",
13 "motorbike",
"person",
"pottedplant",
"sheep",
"sofa",
"train",
"tvmonitor"};
15 static std::atomic<bool>
syncNN{
true};
17 int main(
int argc,
char** argv) {
19 using namespace std::chrono;
22 std::string nnPath(BLOB_PATH);
26 nnPath = std::string(argv[1]);
30 printf(
"Using blob at path: %s\n", nnPath.c_str());
42 xoutRgb->setStreamName(
"rgb");
47 camRgb->setPreviewSize(300, 300);
48 camRgb->setInterleaved(
false);
51 nn->setConfidenceThreshold(0.5);
52 nn->setBlobPath(nnPath);
53 nn->setNumInferenceThreads(2);
54 nn->input.setBlocking(
false);
58 nn->passthrough.link(xoutRgb->input);
63 camRgb->preview.link(nn->input);
64 nn->out.link(nnOut->
input);
65 nn->outNetwork.link(nnNetworkOut->
input);
76 std::vector<dai::ImgDetection> detections;
77 auto startTime = steady_clock::now();
80 auto color2 = cv::Scalar(255, 255, 255);
83 auto displayFrame = [](std::string name, cv::Mat frame, std::vector<dai::ImgDetection>& detections) {
84 auto color = cv::Scalar(255, 0, 0);
86 for(
auto& detection : detections) {
87 int x1 = detection.xmin * frame.cols;
88 int y1 = detection.ymin * frame.rows;
89 int x2 = detection.xmax * frame.cols;
90 int y2 = detection.ymax * frame.rows;
92 uint32_t labelIndex = detection.label;
93 std::string labelStr =
to_string(labelIndex);
97 cv::putText(frame, labelStr, cv::Point(x1 + 10, y1 + 20), cv::FONT_HERSHEY_TRIPLEX, 0.5, color);
98 std::stringstream confStr;
99 confStr << std::fixed << std::setprecision(2) << detection.confidence * 100;
100 cv::putText(frame, confStr.str(), cv::Point(x1 + 10, y1 + 40), cv::FONT_HERSHEY_TRIPLEX, 0.5, color);
101 cv::rectangle(frame, cv::Rect(cv::Point(x1, y1), cv::Point(x2, y2)), color, cv::FONT_HERSHEY_SIMPLEX);
104 cv::imshow(name, frame);
107 bool printOutputLayersOnce =
true;
110 std::shared_ptr<dai::ImgFrame> inRgb;
111 std::shared_ptr<dai::ImgDetections> inDet;
112 std::shared_ptr<dai::NNData> inNN;
125 auto currentTime = steady_clock::now();
126 auto elapsed = duration_cast<duration<float>>(currentTime - startTime);
127 if(elapsed > seconds(1)) {
128 fps = counter / elapsed.count();
130 startTime = currentTime;
134 frame = inRgb->getCvFrame();
135 std::stringstream fpsStr;
136 fpsStr <<
"NN fps: " << std::fixed << std::setprecision(2) <<
fps;
137 cv::putText(frame, fpsStr.str(), cv::Point(2, inRgb->getHeight() - 4), cv::FONT_HERSHEY_TRIPLEX, 0.4, color2);
141 detections = inDet->detections;
144 if(printOutputLayersOnce && inNN) {
145 std::cout <<
"Output layer names: ";
146 for(
const auto& ten : inNN->getAllLayerNames()) {
147 std::cout << ten <<
", ";
149 std::cout << std::endl;
150 printOutputLayersOnce =
false;
154 displayFrame(
"video", frame, detections);
157 int key = cv::waitKey(1);
158 if(key ==
'q' || key ==
'Q') {