9 {dai::RawImgFrame::Type::RGBA8888,
"rgba8"},
10 {dai::RawImgFrame::Type::RGB888i,
"rgb8"},
11 {dai::RawImgFrame::Type::BGR888i,
"bgr8"},
12 {dai::RawImgFrame::Type::GRAY8,
"mono8"},
13 {dai::RawImgFrame::Type::RAW8,
"mono8"},
14 {dai::RawImgFrame::Type::RAW16,
"16UC1"},
15 {dai::RawImgFrame::Type::YUV420p,
"YUV420"}};
19 {dai::RawImgFrame::Type::BGR888p,
"rgb8"},
20 {dai::RawImgFrame::Type::RGB888p,
"rgb8"},
21 {dai::RawImgFrame::Type::NV12,
"rgb8"},
22 {dai::RawImgFrame::Type::YUV420p,
"rgb8"}};
29 auto tstamp = inData->getTimestamp();
30 ImageMsgs::Image outImageMsg;
35 auto rclNow = rclcpp::Clock().now();
36 auto steadyTime = std::chrono::steady_clock::now();
37 auto diffTime = steadyTime - tstamp;
38 auto rclStamp = rclNow - diffTime;
39 header.stamp = rclStamp;
42 auto steadyTime = std::chrono::steady_clock::now();
43 auto diffTime = steadyTime - tstamp;
44 uint64_t nsec = rosNow.toNSec() - diffTime.count();
45 auto rosStamp = rosNow.fromNSec(nsec);
46 header.stamp = rosStamp;
47 header.seq = inData->getSequenceNum();
53 cv::Size size = {0, 0};
55 switch(inData->getType()) {
56 case dai::RawImgFrame::Type::BGR888p:
57 case dai::RawImgFrame::Type::RGB888p:
58 size = cv::Size(inData->getWidth(), inData->getHeight());
61 case dai::RawImgFrame::Type::YUV420p:
62 case dai::RawImgFrame::Type::NV12:
63 size = cv::Size(inData->getWidth(), inData->getHeight() * 3 / 2);
68 std::runtime_error(
"Invalid dataType inputs..");
71 mat = cv::Mat(size, type, inData->getData().data());
73 switch(inData->getType()) {
74 case dai::RawImgFrame::Type::RGB888p: {
75 cv::Size
s(inData->getWidth(), inData->getHeight());
76 std::vector<cv::Mat> channels;
78 channels.push_back(cv::Mat(
s, CV_8UC1, inData->getData().data() +
s.area() * 2));
79 channels.push_back(cv::Mat(
s, CV_8UC1, inData->getData().data() +
s.area() * 1));
80 channels.push_back(cv::Mat(
s, CV_8UC1, inData->getData().data() +
s.area() * 0));
81 cv::merge(channels, output);
84 case dai::RawImgFrame::Type::BGR888p: {
85 cv::Size
s(inData->getWidth(), inData->getHeight());
86 std::vector<cv::Mat> channels;
88 channels.push_back(cv::Mat(
s, CV_8UC1, inData->getData().data() +
s.area() * 0));
89 channels.push_back(cv::Mat(
s, CV_8UC1, inData->getData().data() +
s.area() * 1));
90 channels.push_back(cv::Mat(
s, CV_8UC1, inData->getData().data() +
s.area() * 2));
91 cv::merge(channels, output);
94 case dai::RawImgFrame::Type::YUV420p:
95 cv::cvtColor(mat, output, cv::ColorConversionCodes::COLOR_YUV2BGR_IYUV);
98 case dai::RawImgFrame::Type::NV12:
99 cv::cvtColor(mat, output, cv::ColorConversionCodes::COLOR_YUV2BGR_NV12);
103 output = mat.clone();
110 outImageMsg.header = header;
112 outImageMsg.encoding = temp_str;
113 outImageMsg.height = inData->getHeight();
114 outImageMsg.width = inData->getWidth();
115 outImageMsg.step = inData->getData().size() / inData->getHeight();
116 if(outImageMsg.encoding ==
"16UC1")
117 outImageMsg.is_bigendian =
false;
119 outImageMsg.is_bigendian =
true;
121 size_t size = inData->getData().size();
122 outImageMsg.data.resize(size);
123 unsigned char* imageMsgDataPtr =
reinterpret_cast<unsigned char*
>(&outImageMsg.data[0]);
124 unsigned char* daiImgData =
reinterpret_cast<unsigned char*
>(inData->getData().data());
128 memcpy(imageMsgDataPtr, daiImgData, size);
130 outImageMsgs.push_back(outImageMsg);
136 std::unordered_map<dai::RawImgFrame::Type, std::string>::iterator revEncodingIter;
139 return pair.second == inMsg.encoding;
143 "Unable to find DAI encoding for the corresponding " 144 "sensor_msgs::image.encoding stream");
146 outData.setData(inMsg.data);
149 return pair.second.find(inMsg.encoding) != std::string::npos;
152 std::istringstream
f(revEncodingIter->second);
153 std::vector<std::string> encoding_info;
156 while(getline(f, s,
'_')) encoding_info.push_back(s);
158 std::vector<std::uint8_t> opData(inMsg.data.size());
159 interleavedToPlanar(inMsg.data, opData, inMsg.height, inMsg.width, std::stoi(encoding_info[0]), std::stoi(encoding_info[1]));
160 outData.setData(opData);
176 outData.setWidth(inMsg.width);
177 outData.setHeight(inMsg.height);
178 outData.setType(revEncodingIter->first);
182 std::deque<ImageMsgs::Image> msgQueue;
184 auto msg = msgQueue.front();
187 ImagePtr ptr = std::make_shared<ImageMsgs::Image>(msg);
189 ImagePtr ptr = boost::make_shared<ImageMsgs::Image>(msg);
197 for(
int i = 0; i < w * h; i++) {
198 uint8_t b = srcData.data()[i + w * h * 0];
199 destData[i * 3 + 0] = b;
201 for(
int i = 0; i < w * h; i++) {
202 uint8_t g = srcData.data()[i + w * h * 1];
203 destData[i * 3 + 1] = g;
205 for(
int i = 0; i < w * h; i++) {
206 uint8_t r = srcData.data()[i + w * h * 2];
207 destData[i * 3 + 2] = r;
211 "If you encounter the scenario where you need this " 212 "please create an issue on github");
220 for(
int i = 0; i < w * h; i++) {
221 uint8_t b = srcData[i * 3 + 0];
222 uint8_t g = srcData[i * 3 + 1];
223 uint8_t r = srcData[i * 3 + 2];
225 destData[i + w * h * 0] = b;
226 destData[i + w * h * 1] = g;
227 destData[i + w * h * 2] = r;
239 "If you encounter the scenario where you need this " 240 "please create an issue on github");
246 cv::Mat rgb(inMsg.height, inMsg.width, CV_8UC3);
247 if(inMsg.encoding ==
"nv12") {
248 cv::Mat nv_frame(inMsg.height * 3 / 2, inMsg.width, CV_8UC1, inMsg.data.data());
249 cv::cvtColor(nv_frame, rgb, cv::COLOR_YUV2BGR_NV12);
252 std::runtime_error(
"This frature is still WIP");
258 dai::CalibrationHandler calibHandler, dai::CameraBoardSocket cameraId,
int width,
int height, Point2f topLeftPixelId, Point2f bottomRightPixelId) {
259 std::vector<std::vector<float>> camIntrinsics, rectifiedRotation;
260 std::vector<float> distCoeffs;
261 std::vector<double> flatIntrinsics, distCoeffsDouble;
262 int defWidth, defHeight;
263 ImageMsgs::CameraInfo cameraData;
264 std::tie(std::ignore, defWidth, defHeight) = calibHandler.getDefaultIntrinsics(cameraId);
267 cameraData.width =
static_cast<uint32_t
>(defWidth);
269 cameraData.width =
static_cast<uint32_t
>(width);
273 cameraData.height =
static_cast<uint32_t
>(defHeight);
275 cameraData.height =
static_cast<uint32_t
>(height);
278 camIntrinsics = calibHandler.getCameraIntrinsics(cameraId, cameraData.width, cameraData.height, topLeftPixelId, bottomRightPixelId);
280 flatIntrinsics.resize(9);
281 for(
int i = 0; i < 3; i++) {
282 std::copy(camIntrinsics[i].begin(), camIntrinsics[i].end(), flatIntrinsics.begin() + 3 * i);
286 auto& intrinsics = cameraData.k;
287 auto& distortions = cameraData.d;
288 auto& projection = cameraData.p;
289 auto& rotation = cameraData.r;
291 auto& intrinsics = cameraData.K;
292 auto& distortions = cameraData.D;
293 auto& projection = cameraData.P;
294 auto& rotation = cameraData.R;
297 rotation[0] = rotation[4] = rotation[8] = 1;
298 for(
size_t i = 0; i < 3; i++) {
299 std::copy(flatIntrinsics.begin() + i * 3, flatIntrinsics.begin() + (i + 1) * 3, projection.begin() + i * 4);
301 std::copy(flatIntrinsics.begin(), flatIntrinsics.end(), intrinsics.begin());
303 distCoeffs = calibHandler.getDistortionCoefficients(cameraId);
305 for(
size_t i = 0; i < 8; i++) {
306 distortions.push_back(static_cast<double>(distCoeffs[i]));
310 if(calibHandler.getStereoRightCameraId() != dai::CameraBoardSocket::AUTO && calibHandler.getStereoLeftCameraId() != dai::CameraBoardSocket::AUTO) {
311 if(calibHandler.getStereoRightCameraId() == cameraId || calibHandler.getStereoLeftCameraId() == cameraId) {
312 std::vector<std::vector<float>> stereoIntrinsics = calibHandler.getCameraIntrinsics(
313 calibHandler.getStereoRightCameraId(), cameraData.width, cameraData.height, topLeftPixelId, bottomRightPixelId);
314 std::vector<double> stereoFlatIntrinsics(12), flatRectifiedRotation(9);
315 for(
int i = 0; i < 3; i++) {
316 std::copy(stereoIntrinsics[i].begin(), stereoIntrinsics[i].end(), stereoFlatIntrinsics.begin() + 4 * i);
317 stereoFlatIntrinsics[(4 * i) + 3] = 0;
320 if(calibHandler.getStereoLeftCameraId() == cameraId) {
323 stereoFlatIntrinsics[3] = stereoFlatIntrinsics[0]
324 * calibHandler.getCameraExtrinsics(calibHandler.getStereoRightCameraId(), calibHandler.getStereoLeftCameraId())[0][3]
326 rectifiedRotation = calibHandler.getStereoLeftRectificationRotation();
328 rectifiedRotation = calibHandler.getStereoRightRectificationRotation();
331 for(
int i = 0; i < 3; i++) {
332 std::copy(rectifiedRotation[i].begin(), rectifiedRotation[i].end(), flatRectifiedRotation.begin() + 3 * i);
335 std::copy(stereoFlatIntrinsics.begin(), stereoFlatIntrinsics.end(), projection.begin());
336 std::copy(flatRectifiedRotation.begin(), flatRectifiedRotation.end(), rotation.begin());
339 cameraData.distortion_model =
"rational_polynomial";
static std::unordered_map< dai::RawImgFrame::Type, std::string > planarEncodingEnumMap
const std::string _frameName
ImagePtr toRosMsgPtr(std::shared_ptr< dai::ImgFrame > inData)
ImageConverter(const std::string frameName, bool interleaved)
cv::Mat rosMsgtoCvMat(ImageMsgs::Image &inMsg)
void planarToInterleaved(const std::vector< uint8_t > &srcData, std::vector< uint8_t > &destData, int w, int h, int numPlanes, int bpp)
sensor_msgs::ImagePtr toImageMsg() const
void toDaiMsg(const ImageMsgs::Image &inMsg, dai::ImgFrame &outData)
void interleavedToPlanar(const std::vector< uint8_t > &srcData, std::vector< uint8_t > &destData, int w, int h, int numPlanes, int bpp)
ImageMsgs::ImagePtr ImagePtr
static std::unordered_map< dai::RawImgFrame::Type, std::string > encodingEnumMap
ImageMsgs::CameraInfo calibrationToCameraInfo(dai::CalibrationHandler calibHandler, dai::CameraBoardSocket cameraId, int width=-1, int height=-1, Point2f topLeftPixelId=Point2f(), Point2f bottomRightPixelId=Point2f())
void toRosMsg(std::shared_ptr< dai::ImgFrame > inData, std::deque< ImageMsgs::Image > &outImageMsgs)