camera_pusher.cpp
Go to the documentation of this file.
1 
2 
3 #include <stdlib.h>
4 #include <iostream>
5 #include <stdio.h>
6 #include <string>
7 #include <opencv2/opencv.hpp>
8 #include <opencv2/core/core.hpp>
9 #include <opencv2/highgui/highgui.hpp>
10 #include <opencv2/imgproc/imgproc.hpp>
11 #include <openni2/OpenNI.h>
12 #include <ros/ros.h>
13 #include <std_msgs/String.h>
14 #include <signal.h>
15 
16 extern "C"{
17 #include <libavutil/mathematics.h>
18 #include <libavutil/avassert.h>
19 #include <libavutil/channel_layout.h>
20 #include <libavutil/opt.h>
21 #include <libavutil/imgutils.h>
22 #include <libswscale/swscale.h>
23 #include <libavformat/avformat.h>
24 #include <libavutil/time.h>
25 #include <libavcodec/avcodec.h>
26 #include <libavutil/pixfmt.h>
27 }
28 
29 
48 using namespace std;
49 using namespace cv;
50 using namespace openni;
51 
52 void startOpenni();
53 void initializeFFmpeg();
54 void send_mat_to_server(cv::Mat * mat,bool is_rgb);
55 void checkOpenNIError( Status result, char* status ) ;
56 void checkFFmpegError(char* info ,int ret);
57 void signal_handler(int s);
58 
59 //global member variables
60 int64_t frame_count_rgb = 0;
61 int64_t frame_count_depth = 0;
62 int64_t start_time;
63 int t,t1,t2;
64 int hz = 25;
65 AVCodec* encoder = NULL;
66 AVCodecContext* codec_ctx_rgb = NULL;
67 AVCodecContext* codec_ctx_depth = NULL;
68 AVStream* out_stream_rgb = NULL;
69 AVStream* out_stream_depth = NULL;
70 AVOutputFormat* outputfmt_rgb = NULL;
71 AVOutputFormat* outputfmt_depth = NULL;
72 AVFormatContext* format_ctx_rgb =NULL;
73 AVFormatContext* format_ctx_depth = NULL;
74 const char * rtmp_addr_rgb = "rtmp://localhost/rgb";
75 const char * rtmp_addr_depth="rtmp://localhost/depth";
76 //publish stream information to let other nodes get information about this program
78 openni::Device any_device;
79 openni::VideoStream video_stream_depth;
80 openni::VideoStream video_stream_color;
81 
82 int main( int argc, char** argv ){
83  ros::init(argc,argv,"camera_pusher");
84  ros::NodeHandle nh("~");
85  info_publisher = nh.advertise<std_msgs::String>("/camera/streamer_info",100);
86 
88  if(ros::ok()){
89  startOpenni();
90  }else{
91  cout<<"Ros Error "<<endl;
92  }
93 
94  return 0;
95 }
96 
97 //initilaze FFmpeg to handle video stream and encoding
99  av_register_all();
100  t = avformat_network_init();
101  checkFFmpegError("network init",t);
102 
103  encoder = avcodec_find_encoder(AV_CODEC_ID_H264);
104 
105  if(!encoder){
106  checkFFmpegError("find avcodec", -1);
107  }else{
108  checkFFmpegError("find avcodec", 1);
109  }
110  codec_ctx_rgb = avcodec_alloc_context3(encoder);
111  codec_ctx_rgb->codec_type = AVMEDIA_TYPE_VIDEO;
112  codec_ctx_rgb->codec_id = AV_CODEC_ID_H264;
113  codec_ctx_rgb->pix_fmt = AV_PIX_FMT_YUV420P;
114  codec_ctx_rgb->framerate = (AVRational){hz,1};
115  codec_ctx_rgb->bit_rate = 400000;//bit rate 400kbps
116  codec_ctx_rgb->width = 640;
117  codec_ctx_rgb->height = 480;
118  codec_ctx_rgb->gop_size = 20;
119  codec_ctx_rgb->qmin = 2;
120  codec_ctx_rgb->qmax = 30;
121  codec_ctx_rgb->max_b_frames =0;
122  codec_ctx_rgb->codec = encoder;
123 
124  codec_ctx_depth = avcodec_alloc_context3(encoder);
125  codec_ctx_depth->codec_type = AVMEDIA_TYPE_VIDEO;
126  codec_ctx_depth->codec_id = AV_CODEC_ID_H264;
127  codec_ctx_depth->pix_fmt = AV_PIX_FMT_YUV420P;
128  codec_ctx_depth->framerate = (AVRational){hz,1};
129  codec_ctx_depth->bit_rate = 200000;//bit rate
130  codec_ctx_depth->width = 640;
131  codec_ctx_depth->height = 480;
132  codec_ctx_depth->gop_size = 20;
133  codec_ctx_depth->qmin = 2;
134  codec_ctx_depth->qmax = 30;
135  codec_ctx_depth->max_b_frames =0;
136  codec_ctx_depth->codec = encoder;
137 
138  //set x264 options
139  t = av_opt_set(codec_ctx_rgb->priv_data,"preset","ultrafast",0);
140  checkFFmpegError("set preset policy to ultrafast ",t);
141  t = av_opt_set(codec_ctx_rgb->priv_data,"tune","zerolatency",0);
142  checkFFmpegError("set tune policy to zerolatency ",t);
143  t = av_opt_set(codec_ctx_rgb->priv_data,"profile","baseline",0);
144  checkFFmpegError("set profile policy to baseline",t);
145 
146  av_opt_set(codec_ctx_depth->priv_data,"preset","ultrafast",0);
147  av_opt_set(codec_ctx_depth->priv_data,"tune","zerolatency",0);
148  av_opt_set(codec_ctx_depth->priv_data,"profile","baseline",0);
149 
150 
151  avcodec_open2(codec_ctx_rgb,encoder,NULL);
152  t = avcodec_open2(codec_ctx_depth,encoder,NULL);
153  checkFFmpegError("open avcodec",t);
154 
155  avformat_alloc_output_context2(&format_ctx_rgb,NULL,"flv",rtmp_addr_rgb);
156  avformat_alloc_output_context2(&format_ctx_depth,NULL,"flv",rtmp_addr_depth);
157 
158  if(!format_ctx_rgb){
159  std_msgs::String msg;
160  string str = "Could not create output format context";
161  cout<<str<<endl;
162  msg.data = str;
163  info_publisher.publish(msg);
164  }
165  format_ctx_rgb->oformat->flags = AVFMT_NOTIMESTAMPS ;
166  format_ctx_depth->oformat->flags = AVFMT_NOTIMESTAMPS ;
167  outputfmt_rgb = format_ctx_rgb->oformat;
169 
170  if(!outputfmt_rgb){
171  std_msgs::String msg;
172  string str ="Could not create OutputFormat";
173  cout<<str<<endl;
174  msg.data =str;
175  info_publisher.publish(msg);
176  }
177 
178  out_stream_rgb = avformat_new_stream(format_ctx_rgb, encoder);
179  out_stream_depth = avformat_new_stream(format_ctx_depth,encoder);
180  if(!out_stream_rgb){
181  std_msgs::String msg;
182  string str ="Could not create output stream ";
183  cout<<str<<endl;
184  msg.data = str;
185  info_publisher.publish(msg);
186  return ;
187  }
188 
189  out_stream_rgb->time_base = (AVRational){1,10000};
190  out_stream_rgb->index = 0;
191  out_stream_rgb->codec = codec_ctx_rgb;
192  out_stream_depth->time_base = (AVRational){1,10000};
193  out_stream_depth->index = 0;
195 
196  if (outputfmt_rgb->flags & AVFMT_GLOBALHEADER) {
197  out_stream_rgb->codec->flags |= CODEC_FLAG_GLOBAL_HEADER;
198  }
199  if(format_ctx_depth->flags&AVFMT_GLOBALHEADER){
200  out_stream_depth->codec->flags |= CODEC_FLAG_GLOBAL_HEADER;
201  }
202 
203  format_ctx_rgb->nb_streams = 1;
204  format_ctx_rgb->streams[0] = out_stream_rgb;
205  format_ctx_depth->nb_streams = 1;
206  format_ctx_depth->streams[0] = out_stream_depth;
207  memcpy(format_ctx_rgb->filename,rtmp_addr_rgb,sizeof(rtmp_addr_rgb));
208  memcpy(format_ctx_depth->filename,rtmp_addr_depth,sizeof(rtmp_addr_depth));
209 
210  cout<<"---------------Stream Info----------------------"<<endl;
211  av_dump_format(format_ctx_rgb,0,rtmp_addr_rgb,1);
212  av_dump_format(format_ctx_depth,0,rtmp_addr_depth,1);
213  cout<<"------------------------------------------------"<<endl;
214 
215  //Open output URL
216  if (!(outputfmt_rgb->flags & AVFMT_NOFILE)) {
217  t1 = avio_open(&format_ctx_rgb->pb, rtmp_addr_rgb, AVIO_FLAG_WRITE);
218  checkFFmpegError("open RGB output URL ", t1);
219  }
220  if(!(outputfmt_depth->flags & AVFMT_NOFILE)){
221  t2 = avio_open(&format_ctx_depth->pb, rtmp_addr_depth, AVIO_FLAG_WRITE);
222  checkFFmpegError("open Depth output URL ", t2);
223  }
224 
225  //Write file header
226  t1 = avformat_write_header(format_ctx_rgb, NULL);
227  t2 = avformat_write_header(format_ctx_depth,NULL);
228  checkFFmpegError("write RGB output header ",t1);
229  checkFFmpegError("write Depth output header ",t2);
230 }
231 
232 
233 //start Openni to read image data from camera device
234 void startOpenni(){
235  // initialize OpenNI environment
236  Status result;
237  result = openni::OpenNI::initialize();
238  checkOpenNIError(result ,"initialize openni ");
239 
240  // open device,such as Kinect and ASUS xtion pro
241  result = any_device.open(openni::ANY_DEVICE );
242  checkOpenNIError(result ,"open camera device ");
243 
244  // create depth camera stream
245  video_stream_depth.create( any_device, openni::SENSOR_DEPTH );
246  //create RGB camera stream
247  video_stream_color.create( any_device, openni::SENSOR_COLOR );
248 
249  //set Depth-Image mode
250  openni::VideoMode depth_mode;
251  //image width :640 height:480
252  depth_mode.setResolution( 640, 480 );
253  depth_mode.setFps( 30 );
254  // pixel format
255  depth_mode.setPixelFormat( openni::PIXEL_FORMAT_DEPTH_1_MM );
256  video_stream_depth.setVideoMode( depth_mode);
257 
258  // set RGB-Image mode
259  openni::VideoMode color_mode;
260  color_mode.setResolution( 640, 480 );
261  color_mode.setFps( 30 );
262  color_mode.setPixelFormat( openni::PIXEL_FORMAT_RGB888 );
263  video_stream_color.setVideoMode( color_mode);
264  //set depth and color image registration mode
265  if( any_device.isImageRegistrationModeSupported(
266  openni::IMAGE_REGISTRATION_DEPTH_TO_COLOR ) ){
267  any_device.setImageRegistrationMode( openni::IMAGE_REGISTRATION_DEPTH_TO_COLOR );
268  }
269 
270  // open depth stream and color stream
271  Status depthStatus = video_stream_depth.start();
272  checkOpenNIError(depthStatus,"start depth stream ");
273  Status colorStatus = video_stream_color.start();
274  checkOpenNIError(colorStatus,"start color stream ");
275  // create OpenCV image window
276  // namedWindow( "Depth Image", CV_WINDOW_AUTOSIZE );
277  // namedWindow( "Color Image", CV_WINDOW_AUTOSIZE );
278 
279  openni::VideoFrameRef frame_depth;
280  openni::VideoFrameRef frame_color;
281 
282  frame_count_rgb =0;
283  frame_count_depth = 0;
284  start_time = av_gettime();
285 
286  signal(SIGINT,signal_handler);
287  //read data stream periodically and save frame data info VideoFrameRef
288  while( 1){
289  if(video_stream_color.isValid()){
290  if(video_stream_color.readFrame( &frame_color )==STATUS_OK){
291  //transform VideoFrameRef to cv::Mat
292  const cv::Mat mImageRGB(frame_color.getHeight(), frame_color.getWidth(), CV_8UC3, (void*)frame_color.getData());
293  //make RGB pixel format to BGR pixel format
294  cv::Mat imageBGRSrc , imageBGRDest;
295  cv::cvtColor( mImageRGB, imageBGRSrc, CV_RGB2BGR );
296  imageBGRDest .create(imageBGRSrc.size(),imageBGRSrc.type());
297 
298  //get the mirror image of source image
299  Mat map_x;
300  Mat map_y;
301  map_x.create(imageBGRSrc.size(),CV_32FC1);
302  map_y.create(imageBGRSrc.size(),CV_32FC1);
303  for(int i=0;i<imageBGRSrc.rows;i++){
304  for(int j=0;j<imageBGRSrc.cols;j++){
305  map_x.at<float>(i,j) =(float) (imageBGRSrc.cols-j);
306  map_y.at<float>(i,j) = (float) i;
307  }
308  }
309  remap(imageBGRSrc,imageBGRDest,map_x,map_y,CV_INTER_LINEAR);
310 
311  //show Image
312  //cv::imshow( "Color Image", imageBGRDest );
313  //waitKey(1);
314  //send image to rtmp server
315  send_mat_to_server(&imageBGRDest,true);
316  }else{
317  cout<<"read color stream error"<<endl;
318  }
319  }
320 
321  if(video_stream_depth.isValid()){
322  Status readFrameStatus ;
323  readFrameStatus = video_stream_depth.readFrame( &frame_depth );
324  checkOpenNIError(readFrameStatus,"read depth frame");
325  if(readFrameStatus ==STATUS_OK){
326  const cv::Mat mImageDepth( frame_depth.getHeight(), frame_depth.getWidth(), CV_16UC1, (void*)frame_depth.getData());
327  // change pixel format to CV_8U
328  cv::Mat mScaledDepth;
329  // get max depth value
330  int iMaxDepth = video_stream_depth.getMaxPixelValue();
331  mImageDepth.convertTo( mScaledDepth, CV_8U, 255.0 / iMaxDepth );
332  flip(mScaledDepth,mScaledDepth,1);
333 
334  cv::Mat imageDepthSrc ;
335  cv::cvtColor( mScaledDepth, imageDepthSrc, CV_GRAY2BGR );
336  //cv::imshow( "Depth Image", imageDepthSrc );
337  //cv::waitKey(1);
338  send_mat_to_server(&imageDepthSrc,false);
339  }else{
340  cout<<"read depth stream error"<<endl;
341  }
342  }else{
343  cout<<"depth stream is not valid"<<endl;
344  }
345  }
346 
347 
348 }
349 
356 void send_mat_to_server(cv::Mat * mat,bool is_rgb){
357  int nbytes;
358  int got_pkt =0 ;
359  const int strides[] = {mat->step[0]};
360  AVPacket* pkt =NULL;
361 
362  pkt = av_packet_alloc();
363  pkt->data = NULL;
364  pkt->size = 0;
365  av_init_packet(pkt);
366 
367  // Creating two frames for conversion
368  AVFrame *pFrameYUV =av_frame_alloc();
369  AVFrame *pFrameBGR =av_frame_alloc();
370  int width = mat->cols;
371  int height = mat->rows;
372  pFrameBGR->width = width;
373  pFrameBGR->height = height;
374 
375  // Assign image buffers
376  avpicture_fill((AVPicture *)pFrameBGR, mat->data, AV_PIX_FMT_BGR24,width, height);
377 
378  // Determine required buffer size and allocate buffer for YUV frame
379  int numBytesYUV=av_image_get_buffer_size(AV_PIX_FMT_YUV420P, width,height,1);
380  uint8_t* bufferYUV=(uint8_t *)av_malloc(numBytesYUV*sizeof(uint8_t));
381  avpicture_fill((AVPicture *)pFrameYUV, bufferYUV, AV_PIX_FMT_YUV420P,width, height);
382 
383  //Initialise Software scaling context
384  SwsContext *sws_ctx = sws_getContext(width,height,
385  AV_PIX_FMT_BGR24,
386  width,height,
387  AV_PIX_FMT_YUV420P,
388  SWS_BICUBIC,
389  NULL,NULL,NULL);
390  if(is_rgb){
391  pFrameYUV->width = width;
392  pFrameYUV->height = height;
393  pFrameYUV->format = AV_PIX_FMT_YUV420P;
394  pFrameYUV->pts = frame_count_rgb;
395  }else{
396  pFrameYUV->width = width;
397  pFrameYUV->height = height;
398  pFrameYUV->format = AV_PIX_FMT_YUV420P;
399  pFrameYUV->pts = frame_count_depth;
400  }
401 
402  // Convert the image from BGR to YUV
403  sws_scale(sws_ctx, (uint8_t const * const *)pFrameBGR->data,
404  pFrameBGR->linesize, 0, height,
405  pFrameYUV->data, pFrameYUV->linesize);
406 
407  if(is_rgb){
408  t1 = avcodec_encode_video2(format_ctx_rgb->streams[0]->codec,pkt,pFrameYUV,&got_pkt);
409  char info[100];
410  sprintf(info,"%s %d","encode RGB video frame number: ",frame_count_rgb);
411  checkFFmpegError(info,t1);
412  }else{
413  t2 = avcodec_encode_video2(format_ctx_depth->streams[0]->codec,pkt,pFrameYUV,&got_pkt);
414  char info[100];
415  sprintf(info,"%s %d","encode Depth video frame number: ",frame_count_depth);
416  checkFFmpegError(info,t2);
417  }
418 
419 
420  if(got_pkt==1){
421  pkt->stream_index = 0;
422  if(is_rgb){
423  AVRational time_base = out_stream_rgb->time_base; //
424  AVRational framerate = codec_ctx_rgb->framerate; //FPS
425  AVRational time_base_q = {1,AV_TIME_BASE}; //AV_TIME_BASE 1000000
426 
427  //duration between 2 frame
428  int64_t interval_duration = (double)(AV_TIME_BASE)*(1 / av_q2d(framerate));;//40000
429 
430  //change ffmpeg internal time to the outputstream time
431  pkt->pts = av_rescale_q(frame_count_rgb *interval_duration, time_base_q, time_base);
432  pkt->dts = pkt->pts;
433  pkt->duration = av_rescale_q(interval_duration, time_base_q, time_base);
434  pkt->pos = -1;
435 
436  //Delay
437  int64_t pts_time = av_rescale_q(pkt->pts, time_base, time_base_q);
438  int64_t now_time = av_gettime() - start_time;
439  if (pts_time > now_time){
440  av_usleep(pts_time - now_time);
441  }
442  t1 = av_write_frame(format_ctx_rgb,pkt);
443  checkFFmpegError("write RGB frame to output URL ",t1);
444  frame_count_rgb++;
445  }else{
446  AVRational time_base = out_stream_depth->time_base; //
447  AVRational framerate = codec_ctx_depth->framerate; //FPS
448  AVRational time_base_q = {1,AV_TIME_BASE}; //AV_TIME_BASE 1000000
449 
450  //duration between 2 frame
451  int64_t interval_duration = (double)(AV_TIME_BASE)*(1 / av_q2d(framerate));;//40000
452 
453  //change ffmpeg internal time to the outputstream time
454  pkt->pts = av_rescale_q(frame_count_depth *interval_duration, time_base_q, time_base);
455  pkt->dts = pkt->pts;
456  pkt->duration = av_rescale_q(interval_duration, time_base_q, time_base);
457  pkt->pos = -1;
458 
459  //Delay
460  int64_t pts_time = av_rescale_q(pkt->pts, time_base, time_base_q);
461  int64_t now_time = av_gettime() - start_time;
462  if (pts_time > now_time){
463  av_usleep(pts_time - now_time);
464  }
465  t2 = av_write_frame(format_ctx_depth,pkt);
466  checkFFmpegError("write Depth frame to output URL ",t2);
468  }
469 
470  }
471 
472  av_free(bufferYUV);
473  av_frame_free(&pFrameBGR);
474  av_frame_free(&pFrameYUV);
475  av_free_packet(pkt);
476  sws_freeContext(sws_ctx);
477 }
478 
479 void signal_handler(int s){
480  std_msgs::String msg ;
481  string str = "caught interrupt signal , program exited";
482  msg.data = str;
483  info_publisher.publish(msg);
484 
485  //close tream and close device
486  video_stream_depth.destroy();
487  video_stream_color.destroy();
488  any_device.close();
489  openni::OpenNI::shutdown();
490 
491  av_write_trailer(format_ctx_rgb);
492  avformat_close_input(&format_ctx_rgb);
493  avformat_free_context(format_ctx_rgb);
494  exit(1);
495 }
496 
497 void checkOpenNIError( Status result, char* status ) {
498  std_msgs::String msg;
499  string str ;
500  if( result != STATUS_OK ) {
501  cerr << "[Error] -- "<< status <<endl;
502  cout<<OpenNI::getExtendedError() << endl;
503  str.append("[Error] -- ");
504  exit(0);
505  }else{
506  cout<<"[OK] -- "<<status<<endl;
507  str.append("[OK] -- ");
508  }
509  str.append(status);
510  msg.data = str;
511  info_publisher.publish(msg);
512 }
513 
514 void checkFFmpegError(char* info ,int ret){
515  std_msgs::String msg;
516  string str ;
517  if(ret <0){
518  cerr<< "[Error] -- "<<info<<endl;
519  str = "[Error] -- ";
520  exit(0);
521  }else{
522  cout<<"[OK] -- "<<info<<endl;
523  str = "[OK] -- ";
524  }
525  str.append(info);
526  msg.data = str;
527  info_publisher.publish(msg);
528 }
int t1
AVCodecContext * codec_ctx_rgb
void checkOpenNIError(Status result, char *status)
void publish(const boost::shared_ptr< M > &message) const
AVOutputFormat * outputfmt_rgb
int t2
void checkFFmpegError(char *info, int ret)
const char * rtmp_addr_depth
int64_t frame_count_depth
int64_t start_time
void send_mat_to_server(cv::Mat *mat, bool is_rgb)
ROSCPP_DECL void init(int &argc, char **argv, const std::string &name, uint32_t options=0)
AVFormatContext * format_ctx_depth
const char * rtmp_addr_rgb
AVCodecContext * codec_ctx_depth
int t
ROSCPP_DECL std::string remap(const std::string &name)
ros::Publisher info_publisher
AVFormatContext * format_ctx_rgb
openni::VideoStream video_stream_depth
void signal_handler(int s)
int main(int argc, char **argv)
ROSCPP_DECL bool ok()
Publisher advertise(const std::string &topic, uint32_t queue_size, bool latch=false)
openni::VideoStream video_stream_color
void initializeFFmpeg()
AVStream * out_stream_rgb
AVStream * out_stream_depth
int hz
AVCodec * encoder
int64_t frame_count_rgb
openni::Device any_device
void startOpenni()
AVOutputFormat * outputfmt_depth


xbot_tools
Author(s):
autogenerated on Sat Oct 10 2020 03:28:22