00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011 #include <AR/config.h>
00012 #include <AR/ar.h>
00013 #include <AR/video.h>
00014
00015
00016 #include <glib.h>
00017
00018
00019 #include <gst/gst.h>
00020
00021
00022 #include <string.h>
00023
00024
00025 struct _AR2VideoParamT {
00026
00027
00028 GstElement *pipeline;
00029
00030
00031 GstElement *probe;
00032
00033
00034 int width, height;
00035
00036
00037 ARUint8 *videoBuffer;
00038
00039 };
00040
00041
00042 static AR2VideoParamT *gVid = 0;
00043
00044 static gboolean
00045 cb_have_data (GstPad *pad,
00046 GstBuffer *buffer,
00047 gpointer u_data)
00048 {
00049 const GstCaps *caps;
00050 GstStructure *str;
00051
00052 gint width,height;
00053 gdouble rate;
00054
00055 AR2VideoParamT *vid = (AR2VideoParamT*)u_data;
00056
00057
00058
00059 if (vid->videoBuffer == 0)
00060 {
00061
00062
00063
00064
00065
00066 caps=gst_pad_get_negotiated_caps(pad);
00067 str=gst_caps_get_structure(caps,0);
00068
00069
00070 gst_structure_get_int(str,"width",&width);
00071 gst_structure_get_int(str,"height",&height);
00072 gst_structure_get_double(str,"framerate",&rate);
00073
00074 g_print("libARvideo: GStreamer negotiated %dx%d\n",width,height);
00075
00076 vid->width = width;
00077 vid->height = height;
00078
00079
00080 vid->videoBuffer = malloc(buffer->size);
00081
00082 return TRUE;
00083
00084 }
00085 else
00086 {
00087
00088 memcpy(vid->videoBuffer, buffer->data, buffer->size);
00089 }
00090
00091 return TRUE;
00092 }
00093
00094 void
00095 testing_pad(GstPad *pad)
00096 {
00097 const GstCaps *caps;
00098 GstStructure *str;
00099
00100 gint width,height;
00101 gdouble rate;
00102
00103 caps=gst_pad_get_negotiated_caps(pad);
00104
00105 if (caps) {
00106 str=gst_caps_get_structure(caps,0);
00107
00108
00109 gst_structure_get_int(str,"width",&width);
00110 gst_structure_get_int(str,"height",&height);
00111 gst_structure_get_double(str,"framerate",&rate);
00112
00113 g_print("libARvideo: GStreamer negotiated %dx%d\n",width,height);
00114 } else {
00115 return;
00116 #if 0
00117 g_print("Nothing yet!");
00118 #endif
00119
00120 }
00121 }
00122
00123
00124 int
00125 arVideoOpen( char *config ) {
00126 if( gVid != NULL ) {
00127 printf("Device has been opened!!\n");
00128 return -1;
00129 }
00130 gVid = ar2VideoOpen( config );
00131 if( gVid == NULL ) return -1;
00132 }
00133
00134 int
00135 arVideoClose( void )
00136 {
00137 return ar2VideoClose(gVid);
00138 }
00139
00140 int
00141 arVideoDispOption( void )
00142 {
00143 return 0;
00144 }
00145
00146 int
00147 arVideoInqSize( int *x, int *y ) {
00148
00149 ar2VideoInqSize(gVid,x,y);
00150
00151 return 0;
00152 }
00153
00154 ARUint8
00155 *arVideoGetImage( void )
00156 {
00157 return ar2VideoGetImage(gVid);
00158 }
00159
00160 int
00161 arVideoCapStart( void ) {
00162
00163 ar2VideoCapStart(gVid);
00164 return 0;
00165 }
00166
00167 int
00168 arVideoCapStop( void )
00169 {
00170 ar2VideoCapStop(gVid);
00171 return 0;
00172 }
00173
00174 int arVideoCapNext( void )
00175 {
00176 ar2VideoCapNext(gVid);
00177 return 0;
00178 }
00179
00180
00181
00182 AR2VideoParamT*
00183 ar2VideoOpen(char *config_in ) {
00184
00185 AR2VideoParamT *vid = 0;
00186 GError *error = 0;
00187 int i;
00188 GstPad *pad, *peerpad;
00189 GstXML *xml;
00190 GstStateChangeReturn _ret;
00191 char *config;
00192
00193
00194 if (!config_in || !(config_in[0])) {
00195
00196 char *envconf = getenv ("ARTOOLKIT_CONFIG");
00197 if (envconf && envconf[0]) {
00198 config = envconf;
00199 g_printf ("Using config string from environment [%s].\n", envconf);
00200 } else {
00201 config = NULL;
00202 g_printf ("No video config string supplied, using defaults.\n");
00203 }
00204 } else {
00205 config = config_in;
00206 g_printf ("Using supplied video config string [%s].\n", config_in);
00207 }
00208
00209
00210 gst_init(0,0);
00211
00212
00213 arMalloc( vid, AR2VideoParamT, 1 );
00214
00215
00216 vid->videoBuffer = 0;
00217
00218
00219 g_print ("libARvideo: %s\n", gst_version_string());
00220
00221 #if 0
00222 xml = gst_xml_new();
00223
00224
00225 if (gst_xml_parse_file(xml,config,NULL))
00226 {
00227
00228
00229 } else
00230 {
00231 vid->pipeline = gst_xml_get_element(xml,"pipeline");
00232 }
00233
00234 #endif
00235
00236 vid->pipeline = gst_parse_launch (config, &error);
00237
00238 if (!vid->pipeline) {
00239 g_print ("Parse error: %s\n", error->message);
00240 return 0;
00241 };
00242
00243
00244 vid->probe = gst_bin_get_by_name(GST_BIN(vid->pipeline), "artoolkit");
00245
00246 if (!vid->probe) {
00247 g_print("Pipeline has no element named 'artoolkit'!\n");
00248 return 0;
00249 };
00250
00251
00252 pad = gst_element_get_pad (vid->probe, "src");
00253
00254
00255
00256 gst_pad_add_buffer_probe (pad, G_CALLBACK (cb_have_data), vid);
00257
00258
00259
00260 #if 0
00261
00262 gst_element_set_state (vid->pipeline, GST_STATE_READY);
00263
00264
00265 if (gst_element_get_state (vid->pipeline, NULL, NULL, -1) == GST_STATE_CHANGE_FAILURE) {
00266 g_error ("libARvideo: failed to put GStreamer into READY state!\n");
00267 } else {
00268 g_print ("libARvideo: GStreamer pipeline is READY!\n");
00269 }
00270 #endif
00271
00272
00273 gst_element_set_state (vid->pipeline, GST_STATE_PAUSED);
00274
00275 peerpad = gst_pad_get_peer(pad);
00276
00277 testing_pad(peerpad);
00278
00279
00280 gst_object_unref (pad);
00281
00282
00283 if (gst_element_get_state (vid->pipeline, NULL, NULL, -1) == GST_STATE_CHANGE_FAILURE) {
00284 g_error ("libARvideo: failed to put GStreamer into PAUSE state!\n");
00285 } else {
00286 g_print ("libARvideo: GStreamer pipeline is PAUSED!\n");
00287 }
00288
00289
00290 if ((strstr(config, "v4l2src") != 0) ||
00291 (strstr(config, "dv1394src") != 0))
00292 {
00293
00294 gst_element_set_state (vid->pipeline, GST_STATE_PLAYING);
00295
00296
00297 if (gst_element_get_state (vid->pipeline, NULL, NULL, -1) == GST_STATE_CHANGE_FAILURE) {
00298 g_error ("libARvideo: failed to put GStreamer into PLAYING state!\n");
00299 } else {
00300 g_print ("libARvideo: GStreamer pipeline is PLAYING!\n");
00301 }
00302
00303
00304 gst_element_set_state (vid->pipeline, GST_STATE_PAUSED);
00305
00306
00307 if (gst_element_get_state (vid->pipeline, NULL, NULL, -1) == GST_STATE_CHANGE_FAILURE) {
00308 g_error ("libARvideo: failed to put GStreamer into PAUSED state!\n");
00309 } else {
00310 g_print ("libARvideo: GStreamer pipeline is PAUSED!\n");
00311 }
00312 }
00313
00314 #if 0
00315
00316 gst_xml_write_file (GST_ELEMENT (vid->pipeline), stdout);
00317 #endif
00318
00319
00320 return vid;
00321 };
00322
00323
00324 int
00325 ar2VideoClose(AR2VideoParamT *vid) {
00326
00327
00328 gst_element_set_state (vid->pipeline, GST_STATE_NULL);
00329
00330
00331 gst_object_unref (GST_OBJECT (vid->pipeline));
00332
00333 return 0;
00334 }
00335
00336
00337 ARUint8*
00338 ar2VideoGetImage(AR2VideoParamT *vid) {
00339
00340 return vid->videoBuffer;
00341 }
00342
00343 int
00344 ar2VideoCapStart(AR2VideoParamT *vid)
00345 {
00346 GstStateChangeReturn _ret;
00347
00348
00349 _ret = gst_element_set_state (vid->pipeline, GST_STATE_PLAYING);
00350
00351 if (_ret == GST_STATE_CHANGE_ASYNC)
00352 {
00353
00354
00355 if (gst_element_get_state (vid->pipeline,
00356 NULL, NULL, GST_CLOCK_TIME_NONE) == GST_STATE_CHANGE_FAILURE)
00357 {
00358 g_error ("libARvideo: failed to put GStreamer into PLAYING state!\n");
00359 return 0;
00360
00361 } else {
00362 g_print ("libARvideo: GStreamer pipeline is PLAYING!\n");
00363 }
00364 }
00365 return 1;
00366 }
00367
00368 int
00369 ar2VideoCapStop(AR2VideoParamT *vid) {
00370
00371 return gst_element_set_state (vid->pipeline, GST_STATE_NULL);
00372 }
00373
00374 int
00375 ar2VideoCapNext(AR2VideoParamT *vid)
00376 {
00377
00378 return TRUE;
00379 }
00380
00381 int
00382 ar2VideoInqSize(AR2VideoParamT *vid, int *x, int *y )
00383 {
00384
00385 *x = vid->width;
00386 *y = vid->height;
00387
00388 }