38 #include <gst/interfaces/propertyprobe.h> 
   52   #include <gst/app/gstappsink.h> 
   53   #define DECODEBIN "decodebin" 
   54   #define VIDEOCONVERT "videoconvert" 
   56   #define VIDEOCONVERT "ffmpegcolorspace" 
   58    #define DECODEBIN "decodebin" 
   60     #define DECODEBIN "decodebin2" 
   64 static gboolean bus_call ( GstBus *bus, GstMessage *msg, 
void* user_data)
 
   71       const GstStructure *s;
 
   73       s = gst_message_get_structure ((GstMessage *)msg);
 
   86         sstr = gst_structure_to_string (s);
 
   97   switch (GST_MESSAGE_TYPE (msg))
 
  108       case GST_MESSAGE_ERROR:
 
  114           gst_message_parse_error ((GstMessage *)msg, &err, &debug);
 
  212   if (!pGsGraph) 
return GST_FLOW_ERROR;
 
  216   GstAppSink* psink = (GstAppSink*) appsink;
 
  217   if (!psink) 
return GST_FLOW_ERROR;
 
  219   GstSample* sample = gst_app_sink_pull_sample( psink );
 
  220   if (!sample) 
return GST_FLOW_OK;
 
  222   GstCaps* bcaps = gst_sample_get_caps( sample );
 
  223   if (!bcaps) 
return GST_FLOW_OK;
 
  225   GstBuffer* Gbuffer = gst_sample_get_buffer (sample);
 
  226   int bsize = gst_buffer_get_size( Gbuffer );
 
  227   if (!( bsize>0 && (
int)bsize<=(h*w*4) )) 
return GST_FLOW_ERROR;
 
  233   if (!pGsGraph->m_pBucketsPool) 
return GST_FLOW_ERROR;
 
  234   if(pGsGraph->m_pBucketsPool->
IsFull()) {
 
  239   if (pbucket==NULL) 
return GST_FLOW_ERROR;
 
  242   gst_buffer_map ( Gbuffer, &mapinfo, GST_MAP_READ);
 
  244   MOubyte color = mapinfo.data[0];
 
  249   bool added_bucket = pGsGraph->m_pBucketsPool->
AddBucket( pbucket );
 
  253   gst_buffer_unmap ( Gbuffer, &mapinfo );
 
  254   gst_sample_unref(sample);
 
  283     GstStructure* str = NULL;
 
  285     GstCaps* caps = NULL;
 
  289     Gbuffer = (GstBuffer*)buffer;
 
  290     caps = Gbuffer->caps;
 
  292     GstPadProbeInfo* Ginfo = (GstPadProbeInfo*) info;
 
  293     Gbuffer = GST_PAD_PROBE_INFO_BUFFER ( Ginfo );
 
  296       caps = gst_pad_get_current_caps(  Gpad );
 
  300       str = gst_caps_get_structure ( (caps), 0);
 
  309     const gchar *strname;
 
  311     strname = gst_structure_get_name( str );
 
  312     sstr = gst_structure_to_string (str);
 
  317     gchar* isaudio =  NULL;
 
  318     gchar* isvideo =  NULL;
 
  320     isaudio = g_strrstr (strname, 
"audio");
 
  321     isvideo =  g_strrstr (strname, 
"video");
 
  352     bsize = Gbuffer->size;
 
  354     bsize = gst_buffer_get_size( Gbuffer );
 
  357       if ( bsize>0 && (
int)bsize<=(h*w*4) ) {
 
  361         if (pGsGraph->m_pBucketsPool)
 
  362         if(!pGsGraph->m_pBucketsPool->
IsFull()) {
 
  372                 gint value_numerator, value_denominator;
 
  373                 gst_structure_get_fraction( str, 
"framerate", &value_numerator, &value_denominator );
 
  387                 if(!pGsGraph->m_pBucketsPool->
AddBucket( pbucket )) {
 
  414     GstCaps *caps = NULL;
 
  415     GstPadLinkReturn padlink;
 
  416     gchar* padname = NULL;
 
  417     const gchar* strname = NULL;
 
  418     const gchar* medianame = NULL;
 
  419     GstStructure *str = NULL;
 
  420     GstPad* Gpad = (GstPad*) pad;
 
  423     if (gst_pad_is_linked(Gpad)) {
 
  430       caps = gst_pad_get_caps (Gpad);
 
  431       padname = gst_pad_get_name(Gpad);
 
  433         str = gst_caps_get_structure (caps, 0);
 
  437         sstr = gst_structure_to_string (str);
 
  438         strname = gst_structure_get_name (str);
 
  439         medianame = gst_structure_get_string (str, 
"media");
 
  442         moText dbgstr = medianame;
 
  445         if (g_strrstr (medianame, 
"video")) {
 
  447             if ( pGsGraph->m_pRTSPDepaySink ) {
 
  448                 padlink = gst_pad_link ( Gpad, (GstPad*)pGsGraph->m_pRTSPDepaySink);
 
  449                 if (padlink==GST_PAD_LINK_OK) {
 
  453             if ( pGsGraph->m_pHTTPSource ) {
 
  454                 padlink = gst_pad_link ( Gpad, (GstPad*)pGsGraph->m_pDecoderBin );
 
  455                 if (padlink==GST_PAD_LINK_OK) {
 
  483   GstCaps *caps = NULL;
 
  484   GstPad  *videopad = NULL;
 
  486   GstPad  *audiopadinconverter = NULL;
 
  487   GstPadLinkReturn padlink;
 
  488   gchar* padname = NULL;
 
  489   const gchar* strname = NULL;
 
  490   GstStructure *str = NULL;
 
  491   GstPad* Gpad = (GstPad*) pad;
 
  494   GstElement* SinkElement = NULL;
 
  496     cout << 
"cb_pad_added_new" << endl;
 
  499   if (gst_pad_is_linked(Gpad)) {
 
  500       cout << 
"cb_pad_added_new already linked!" << endl;
 
  509       caps = gst_pad_get_caps (Gpad);
 
  511       caps = gst_pad_get_current_caps(Gpad);
 
  513       padname = gst_pad_get_name(Gpad);
 
  515         str = gst_caps_get_structure (caps, 0);
 
  517         const gchar *sstr=NULL;
 
  519             sstr = gst_structure_to_string (str);
 
  520             cout << 
"cb_newpad: new pad: " << padname << 
"caps:" << sstr << endl;
 
  522         MODebug2->Error(
moText(
"moGsGraph::cb_newpad > gst_caps_get_structure is empty")  );
 
  526         MODebug2->Error(
moText(
"moGsGraph::cb_newpad > sstr gst_structure_to_string is empty")  );
 
  527     } 
else strname = gst_structure_get_name (str);
 
  529         bool forcing_video = 
false;
 
  530     bool is_video = 
false;
 
  531     bool is_audio = 
false;
 
  534             MODebug2->Error(
moText(
"moGsGraph::cb_newpad > gst_structure_to_string is empty, forcing video!")  );
 
  536         forcing_video = 
true;
 
  538         is_video = g_strrstr (strname, 
"video");
 
  539         is_audio = g_strrstr (strname, 
"audio");
 
  546             pGsGraph->m_pAudioPad = Gpad;
 
  550             if (pGsGraph->m_pAudioConverter) {
 
  552                 audiopadinconverter = gst_element_get_pad ( (GstElement*) pGsGraph->m_pAudioConverter, 
"sink");
 
  554 audiopadinconverter = gst_element_get_static_pad ( (GstElement*) pGsGraph->m_pAudioConverter, 
"sink");
 
  556                 padlink = gst_pad_link (Gpad, audiopadinconverter);
 
  558                 GstPad* srcAudio = gst_element_get_static_pad ( (GstElement*)pGsGraph->m_pAudioConverter, 
"src");
 
  560                 if (padlink==GST_PAD_LINK_OK) {
 
  562                     pGsGraph->
cb_have_data_handler_id = gst_pad_add_buffer_probe_full ( srcAudio, G_CALLBACK (cb_have_data), pGsGraph, (GDestroyNotify) (cb_buffer_disconnected) );
 
  572             } 
else if (pGsGraph->m_pAudioSink) {
 
  573                 audiopadinconverter = gst_element_get_static_pad ( (GstElement*) pGsGraph->m_pAudioSink, 
"sink");
 
  574                 padlink = gst_pad_link (Gpad, audiopadinconverter);
 
  578           } 
else if (is_video || forcing_video ) {
 
  579             pGsGraph->m_pVideoPad = Gpad;
 
  581             MODebug2->Message(
moText(
"moGsGraph::cb_newpad: video pad created"));
 
  582             if (pGsGraph->m_pVideoScale==NULL) {
 
  584                 if (!(GstElement*)pGsGraph->m_pColorSpaceInterlace) {
 
  585                     SinkElement = (GstElement*)pGsGraph->m_pColorSpace;
 
  587                     SinkElement = (GstElement*)pGsGraph->m_pColorSpaceInterlace;
 
  590                 videopad = gst_element_get_pad ( SinkElement, 
"sink");
 
  592                     padlink = gst_pad_link( Gpad, videopad );
 
  595                 videopad = gst_element_get_static_pad( SinkElement, 
"sink");
 
  597                     padlink = gst_pad_link( Gpad, videopad );
 
  605                 if (padlink==GST_PAD_LINK_OK) {
 
  609                     GstPad* srcRGB = gst_element_get_pad ( (GstElement*)pGsGraph->m_pColorSpace, 
"src");
 
  610                     pGsGraph->
cb_have_data_handler_id = gst_pad_add_buffer_probe_full ( srcRGB, G_CALLBACK (cb_have_data), pGsGraph, (GDestroyNotify) (cb_buffer_disconnected) );
 
  612                     GstPad* srcRGB = gst_element_get_static_pad ( (GstElement*)pGsGraph->m_pFakeSink, 
"sink");
 
  622                 } 
else MODebug2->Error(
moText(
"moGsGraph::cb_newpad > padlink BAD!")  );
 
  629 videopad = gst_element_get_pad ( (GstElement*)pGsGraph->m_pVideoScale, 
"sink");
 
  631 videopad = gst_element_get_static_pad ( (GstElement*)pGsGraph->m_pVideoScale, 
"sink");
 
  637                 padlink = gst_pad_link( Gpad, videopad );
 
  639                 if (padlink==GST_PAD_LINK_OK) {
 
  643                     GstPad* srcRGB = gst_element_get_pad ( (GstElement*)pGsGraph->m_pColorSpace, 
"src");
 
  644                     pGsGraph->
cb_have_data_handler_id = gst_pad_add_buffer_probe_full ( srcRGB, G_CALLBACK (cb_have_data), pGsGraph, (GDestroyNotify) (cb_buffer_disconnected) );
 
  646                     GstPad* srcRGB = gst_element_get_static_pad ( (GstElement*)pGsGraph->m_pColorSpace, 
"src");
 
  648                                                                            GST_PAD_PROBE_TYPE_BUFFER,
 
  649                                                                           (GstPadProbeCallback) cb_have_data,
 
  651                                                                            (GDestroyNotify) (cb_buffer_disconnected) );
 
  669   GstCaps *caps = NULL;
 
  670   GstPad  *videopad = NULL;
 
  672   GstPad  *audiopadinconverter = NULL;
 
  673   GstPadLinkReturn padlink;
 
  674   gchar* padname = NULL;
 
  675   const gchar* strname = NULL;
 
  676   GstStructure *str = NULL;
 
  677   GstPad* Gpad = (GstPad*) pad;
 
  680   GstElement* SinkElement = NULL;
 
  682     cout << 
"pad added" << endl;
 
  683   if (gst_pad_is_linked(Gpad)) {
 
  692       caps = gst_pad_get_caps (Gpad);
 
  694       caps = gst_pad_get_current_caps(Gpad);
 
  696       padname = gst_pad_get_name(Gpad);
 
  698         str = gst_caps_get_structure (caps, 0);
 
  702         sstr = gst_structure_to_string (str);
 
  703         cout << 
"cb_newpad: new pad: " << padname << 
"caps:" << sstr << endl;
 
  705         strname = gst_structure_get_name (str);
 
  707           if (g_strrstr (strname, 
"audio")) {
 
  708             pGsGraph->m_pAudioPad = Gpad;
 
  714             if (pGsGraph->m_pAudioConverter && 1==1) {
 
  716                 gboolean link_audioresult = gst_element_link_many( (GstElement*)pGsGraph->m_pAudioConverter,
 
  717                                       (GstElement*)pGsGraph->m_pAudioVolume,
 
  718                                       (GstElement*)pGsGraph->m_pAudioPanorama,
 
  719                                       (GstElement*)pGsGraph->m_pAudioSink, NULL );
 
  720                 if (link_audioresult) {
 
  722                     audiopadinconverter = gst_element_get_pad ( (GstElement*) pGsGraph->m_pAudioConverter, 
"sink");
 
  724                     audiopadinconverter = gst_element_get_static_pad( (GstElement*) pGsGraph->m_pAudioConverter, 
"sink");
 
  726                     padlink = gst_pad_link (Gpad, audiopadinconverter);
 
  729                     GstPad* srcAudio = gst_element_get_pad ( (GstElement*)pGsGraph->m_pAudioConverter, 
"src");
 
  731                     GstPad* srcAudio = gst_element_get_static_pad( (GstElement*)pGsGraph->m_pAudioConverter, 
"src");
 
  733                     if (padlink==GST_PAD_LINK_OK) {
 
  738                                                                               GST_PAD_PROBE_TYPE_BUFFER,
 
  746             } 
else if (pGsGraph->m_pAudioSink && 1==1) {
 
  748                 audiopadinconverter = gst_element_get_pad ( (GstElement*) pGsGraph->m_pAudioSink, 
"sink");
 
  750                 audiopadinconverter = gst_element_get_static_pad ( (GstElement*) pGsGraph->m_pAudioSink, 
"sink");
 
  752                 padlink = gst_pad_link (Gpad, audiopadinconverter);
 
  756           } 
else if (g_strrstr (strname, 
"video")) {
 
  757             pGsGraph->m_pVideoPad = Gpad;
 
  760             if (pGsGraph->m_pVideoScale==NULL) {
 
  762                 if (!(GstElement*)pGsGraph->m_pColorSpaceInterlace) {
 
  763                     SinkElement = (GstElement*)pGsGraph->m_pColorSpace;
 
  765                     SinkElement = (GstElement*)pGsGraph->m_pColorSpaceInterlace;
 
  768                 videopad = gst_element_get_pad ( SinkElement, 
"sink");
 
  770                     padlink = gst_pad_link( Gpad, videopad );
 
  773                 videopad = gst_element_get_static_pad( SinkElement, 
"sink");
 
  775                     padlink = gst_pad_link( Gpad, videopad );
 
  783                 if (padlink==GST_PAD_LINK_OK) {
 
  786                     GstPad* srcRGB = gst_element_get_pad ( (GstElement*)pGsGraph->m_pColorSpace, 
"src");
 
  790                     GstPad* srcRGB = gst_element_get_static_pad ( (GstElement*)pGsGraph->m_pColorSpace, 
"src");
 
  792                                                                            GST_PAD_PROBE_TYPE_BUFFER,
 
  803 videopad = gst_element_get_pad ( (GstElement*)pGsGraph->m_pVideoScale, 
"sink");
 
  805 videopad = gst_element_get_static_pad ( (GstElement*)pGsGraph->m_pVideoScale, 
"sink");
 
  811                 padlink = gst_pad_link( Gpad, videopad );
 
  813                 if (padlink==GST_PAD_LINK_OK) {
 
  817                     GstPad* srcRGB = gst_element_get_pad ( (GstElement*)pGsGraph->m_pColorSpace, 
"src");
 
  820                     GstPad* srcRGB = gst_element_get_static_pad ( (GstElement*)pGsGraph->m_pColorSpace, 
"src");
 
  822                                                                            GST_PAD_PROBE_TYPE_BUFFER,
 
  846     static gboolean white = FALSE;
 
  848     GstElement* Gfakesrc = (GstElement*)fakesrc;
 
  849     GstBuffer* Gbuffer = (GstBuffer*)buffer;
 
  850     GstPad* Gpad = (GstPad*)pad;
 
  860       pGsGraph->
CopyVideoFrame( GST_BUFFER_DATA (Gbuffer), GST_BUFFER_SIZE (Gbuffer) );
 
  864         memset (GST_BUFFER_DATA (Gbuffer), white ? 0xff : 0x0, GST_BUFFER_SIZE (Gbuffer));
 
  872     caps = gst_caps_new_simple (
"video/x-raw-rgb", 
"width", G_TYPE_INT, 400,
 
  873                                                     "height", G_TYPE_INT, 300,
 
  874                                                     "bpp", G_TYPE_INT, 24,
 
  875                                                     "depth", G_TYPE_INT, 24,
 
  876                                                     "framerate", GST_TYPE_FRACTION, 10, 1,
 
  878     gst_buffer_set_caps (Gbuffer, caps);
 
  879     gst_caps_unref (caps);
 
  891 event_loop (GstElement * pipeline, gboolean blocking, GstState target_state)
 
  895   gboolean res = FALSE;
 
  896   gboolean buffering = FALSE;
 
  898   bus = gst_element_get_bus (GST_ELEMENT (pipeline));
 
  903     message = gst_bus_poll (bus, GST_MESSAGE_ANY, blocking ? -1 : 0);
 
  911       const GstStructure *s;
 
  913       s = gst_message_get_structure (message);
 
  915       g_print ((
"Got Message from element \"%s\" (%s): "),
 
  916           GST_STR_NULL (GST_ELEMENT_NAME (GST_MESSAGE_SRC (message))),
 
  917           gst_message_type_get_name (GST_MESSAGE_TYPE (message)));
 
  921         sstr = gst_structure_to_string (s);
 
  922         g_print (
"%s\n", sstr);
 
  925         g_print (
"no message details\n");
 
  929     switch (GST_MESSAGE_TYPE (message)) {
 
  931       case GST_MESSAGE_WARNING:{
 
  934         gchar *name = gst_object_get_path_string (GST_MESSAGE_SRC (message));
 
  937         GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS (GST_BIN (pipeline),
 
  938             GST_DEBUG_GRAPH_SHOW_ALL, 
"gst-launch.warning");
 
  940         gst_message_parse_warning (message, &gerror, &debug);
 
  941         g_print ((
"WARNING: from element %s: %s\n"), name, gerror->message);
 
  943           g_print ((
"Additional debug info:\n%s\n"), debug);
 
  945         g_error_free (gerror);
 
  950       case GST_MESSAGE_ERROR:{
 
  955         GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS (GST_BIN (pipeline),
 
  956             GST_DEBUG_GRAPH_SHOW_ALL, 
"gst-launch.error");
 
  958         gst_message_parse_error (message, &gerror, &debug);
 
  959         gst_object_default_error (GST_MESSAGE_SRC (message), gerror, debug);
 
  960         g_error_free (gerror);
 
  966       case GST_MESSAGE_STATE_CHANGED:{
 
  967         GstState old, mnew, pending;
 
  969         gst_message_parse_state_changed (message, &old, &mnew, &pending);
 
  976         if (GST_MESSAGE_SRC (message) != GST_OBJECT_CAST (pipeline))
 
  993               (
"Prerolled, waiting for buffering to finish...\n"));
 
  998         if (target_state == GST_STATE_PAUSED && mnew == target_state)
 
 1004       case GST_MESSAGE_BUFFERING:{
 
 1007         gst_message_parse_buffering (message, &percent);
 
 1008         fprintf (stderr, (
"buffering... %d  \r"), percent);
 
 1016         if (percent == 100) {
 
 1020           if (target_state == GST_STATE_PLAYING) {
 
 1022                 (
"Done buffering, setting pipeline to PLAYING ...\n"));
 
 1023             gst_element_set_state (pipeline, GST_STATE_PLAYING);
 
 1028           if (buffering == FALSE && target_state == GST_STATE_PLAYING) {
 
 1030             fprintf (stderr, (
"Buffering, setting pipeline to PAUSED ...\n"));
 
 1031             gst_element_set_state (pipeline, GST_STATE_PAUSED);
 
 1037       case GST_MESSAGE_APPLICATION:{
 
 1038         const GstStructure *s;
 
 1040         s = gst_message_get_structure (message);
 
 1042         if (gst_structure_has_name (s, 
"GstLaunchInterrupt")) {
 
 1045           fprintf (stderr, (
"Interrupt: Stopping pipeline ...\n"));
 
 1056       gst_message_unref (message);
 
 1058   g_assert_not_reached ();
 
 1063       gst_message_unref (message);
 
 1064     gst_object_unref (bus);
 
 1163         GstPropertyProbe* probe;
 
 1169         GValue valDef = { 0, };
 
 1192         moText dname( 
"ksvideosrc" );
 
 1194         moText dname( 
"dshowvideosrc" );
 
 1196         device_name = dname;
 
 1231         device_name = 
"wrappercamerabinsrc";
 
 1233         device_name = 
moText(
"v4l2src");
 
 1256     device = gst_element_factory_make (device_name, 
"source");
 
 1257     gst_element_get_state(device, NULL, NULL, 5 * GST_SECOND);
 
 1258      moText probepname = 
"device-name";
 
 1259     if (!device || !GST_IS_PROPERTY_PROBE(device))
 
 1261     probe = GST_PROPERTY_PROBE (device);
 
 1263         plist = (GList *)gst_property_probe_get_properties( probe );
 
 1265             plist = (GList *)g_list_first(plist);
 
 1267                 pm = (GParamSpec *)plist->data;
 
 1270                         probepname = 
moText((
char*)pm->name);
 
 1271                         MODebug2->
Message( 
"moGsFramework::LoadCaptureDevices > probe property:"+probepname);
 
 1272                         va = gst_property_probe_get_values(probe, pm);
 
 1274                             MODebug2->
Message( 
"moGsFramework::LoadCaptureDevices > probe property:"+probepname+
" has values!");
 
 1278             } 
while( plist=g_list_next(plist) );
 
 1281     va = gst_property_probe_get_values_name (probe, (
char*)probepname);
 
 1285         g_value_init( &valDef, G_PARAM_SPEC_VALUE_TYPE(pm) );
 
 1287         g_param_value_set_default( pm, &valDef );
 
 1290             moText defaultText(g_value_get_string( vdefault ));
 
 1291             MODebug2->
Message(
"moGsFramework::LoadCaptureDevices > Default value for: \""+
moText((
char*)probepname)+
"\" is "+defaultText);
 
 1293             moText cap_dev_name = defaultText;
 
 1307     for(guint 
i=0; 
i < va->n_values; ++
i) {
 
 1308       GValue* v = g_value_array_get_nth(va, 
i);
 
 1310       GString* stv = g_string_new( g_value_get_string(v) );
 
 1325     g_value_array_free(va);
 
 1329     gst_element_set_state (device, GST_STATE_NULL);
 
 1330     gst_object_unref(GST_OBJECT (device));
 
 1334     MODebug2->
Error(
"moGsFramework::LoadCaptureDevices > exception error.");
 
 1337 #if (GST_VERSION_MINOR > 8) 
 1338   GstDeviceMonitor *monitor = NULL;
 
 1339   GList *devices = NULL;
 
 1341   monitor = gst_device_monitor_new();
 
 1342   if (!gst_device_monitor_start (monitor))
 
 1343       g_error (
"Failed to start device monitor!");
 
 1345     devices = gst_device_monitor_get_devices (monitor);
 
 1347     if (devices != NULL) {
 
 1348       while (devices != NULL) {
 
 1349         GstDevice *device = (GstDevice*)devices->data;
 
 1351         gchar *device_class, *caps_str, *name;
 
 1355         caps = gst_device_get_caps (device);
 
 1357           size = gst_caps_get_size (caps);
 
 1359         name = gst_device_get_display_name (device);
 
 1360         device_class = gst_device_get_device_class (device);
 
 1361         for (i = 0; i < size; ++
i) {
 
 1362             GstStructure *s = gst_caps_get_structure (caps, i);
 
 1363             caps_str = gst_structure_to_string (s);
 
 1373             moText cap_dev_name = name;
 
 1387         gst_object_unref (device);
 
 1388         devices = g_list_remove_link (devices, devices);
 
 1392       g_print (
"No devices found!\n");
 
 1394 #endif // GST_VERSION_MINOR 
 1448     m_pGMainLoop = NULL;
 
 1449     m_pGMainContext = NULL;
 
 1450     m_pGstPipeline = NULL;
 
 1451     m_pGsFramework = NULL;
 
 1453     m_pFileSource = NULL;
 
 1454     m_pFinalSource = NULL;
 
 1456     m_pRTSPSource = NULL;
 
 1457     m_pRTSPDepay = NULL;
 
 1458     m_pHTTPSource = NULL;
 
 1459     m_pMultipartDemux = NULL;
 
 1460     m_pJpegDecode = NULL;
 
 1461     m_pDecoderBin = NULL;
 
 1465     m_pCapsFilter = NULL;
 
 1467     m_pFakeSource = NULL;
 
 1470     m_pBucketsPool = NULL;
 
 1471     m_pVideoScale = NULL;
 
 1472     m_pVideoBalance = NULL;
 
 1474     m_pVideoDeinterlace = NULL;
 
 1475     m_pColorSpaceInterlace = NULL;
 
 1476     m_pColorSpace = NULL;
 
 1478     m_pAudioConverter = NULL;
 
 1479     m_pAudioConverter2 = NULL;
 
 1480     m_pAudioConverter3 = NULL;
 
 1481     m_pAudioConverter4 = NULL;
 
 1482     m_pAudioEcho =  NULL;
 
 1483     m_pAudioPanorama =  NULL;
 
 1484     m_pAudioAmplify =  NULL;
 
 1485     m_pAudioSpeed =  NULL;
 
 1486     m_pAudioVolume =  NULL;
 
 1487     m_pAudioSink = NULL;
 
 1562     m_pGstPipeline = gst_pipeline_new (
"pipeline");
 
 1570     m_pGstBus = gst_pipeline_get_bus (GST_PIPELINE (m_pGstPipeline));
 
 1571     m_BusWatchId = gst_bus_add_watch ( (GstBus*)m_pGstBus, bus_call, 
this );
 
 1572     gst_object_unref (m_pGstBus);
 
 1621       g_main_loop_quit( (GMainLoop*) m_pGMainLoop );
 
 1622       g_main_loop_unref( (GMainLoop*) m_pGMainLoop);
 
 1624       m_pGMainLoop = NULL;
 
 1625       m_pGMainContext = NULL;
 
 1628     if (m_pColorSpace) {
 
 1630         GstPad* srcRGB = gst_element_get_pad ( (GstElement*)m_pColorSpace, 
"src");
 
 1636     if (m_pColorSpaceInterlace) {
 
 1638         GstPad* srcRGB = gst_element_get_pad ( (GstElement*)m_pColorSpaceInterlace, 
"src");
 
 1645     if (m_pFileSource) {
 
 1647         m_pFileSource = NULL;
 
 1650     if (m_pJpegDecode) {
 
 1652         m_pJpegDecode = NULL;
 
 1655     if (m_pMultipartDemux) {
 
 1657         m_pMultipartDemux = NULL;
 
 1660     if (m_pHTTPSource) {
 
 1662         m_pHTTPSource = NULL;
 
 1667         m_pRTSPDepay = NULL;
 
 1670     if (m_pRTSPSource) {
 
 1672         m_pRTSPSource = NULL;
 
 1676     if (m_pFinalSource) {
 
 1677         m_pFinalSource = NULL;
 
 1680     if (m_pColorSpace) {
 
 1682         m_pColorSpace = NULL;
 
 1685     if (m_pColorSpaceInterlace) {
 
 1687         m_pColorSpaceInterlace = NULL;
 
 1690     if (m_pCapsFilter) {
 
 1692         m_pCapsFilter = NULL;
 
 1695     if (m_pDecoderBin) {
 
 1696         if (g_signal_handler_is_connected((GstElement*)m_pDecoderBin, 
signal_newpad_id))
 
 1697             g_signal_handler_disconnect ( (GstElement*)m_pDecoderBin, 
signal_newpad_id );
 
 1700         m_pDecoderBin = NULL;
 
 1708     if (m_pAudioConverter) {
 
 1710         m_pAudioConverter = NULL;
 
 1715         m_pAudioSink = NULL;
 
 1728     if (m_pFakeSource) {
 
 1729         if (g_signal_handler_is_connected((GstElement*)m_pFakeSource, 
signal_handoff_id))
 
 1733         m_pFakeSource = NULL;
 
 1746     if (m_pVideoDeinterlace) {
 
 1747         gst_object_unref( (GstElement*) m_pVideoDeinterlace);
 
 1748         m_pVideoDeinterlace = NULL;
 
 1751     if (m_pVideoScale) {
 
 1753         m_pVideoScale = NULL;
 
 1757     if (m_pGstPipeline) {
 
 1758         gst_object_unref( (GstElement*) m_pGstPipeline);
 
 1759         m_pGstPipeline = NULL;
 
 1793     if (m_pBucketsPool) {
 
 1800             memcpy( bufferdst, (
void*)pbuf, size );
 
 1844     pBucketsPool = NULL;
 
 1856     m_pBucketsPool = pBucketsPool;
 
 1857     bool link_result = 
false;
 
 1870         m_pFakeSource = gst_element_factory_make (
"fakesrc", 
"source");
 
 1873         if (m_pFakeSource) {
 
 1874             g_object_set (G_OBJECT (m_pFakeSource),
 
 1875                 "signal-handoffs", TRUE,
 
 1876                 "sizemax", 400 * 300 * 3,
 
 1879                 "num-buffers", 30*200,
 
 1880                 "sizetype", 2, NULL);
 
 1885             res = gst_bin_add (GST_BIN (m_pGstPipeline), (GstElement*) m_pFakeSource );
 
 1889         m_pCapsFilter = gst_element_factory_make (
"capsfilter", 
"filtsource");
 
 1890         if (m_pCapsFilter) {
 
 1891            g_object_set (G_OBJECT (m_pCapsFilter), 
"caps", gst_caps_new_simple (
"video/x-raw-rgb",
 
 1892            "width", G_TYPE_INT, 400,
 
 1893            "height", G_TYPE_INT, 300,
 
 1894            "framerate", GST_TYPE_FRACTION, 10, 1,
 
 1895            "bpp", G_TYPE_INT, 24,
 
 1896            "depth", G_TYPE_INT, 24,
 
 1897            "red_mask",G_TYPE_INT, 255,
 
 1898            "green_mask",G_TYPE_INT, 65280,
 
 1899            "blue_mask",G_TYPE_INT, 16711680,
 
 1900            "endianness", G_TYPE_INT, 4321,
 
 1903            res = gst_bin_add (GST_BIN (m_pGstPipeline), (GstElement*) m_pCapsFilter );
 
 1907        m_pColorSpace = gst_element_factory_make (
VIDEOCONVERT, 
"color");
 
 1908        if (m_pColorSpace) {
 
 1909             res = gst_bin_add (GST_BIN (m_pGstPipeline), (GstElement*) m_pColorSpace );
 
 1912        link_result = gst_element_link_many( (GstElement*) m_pFakeSource, (GstElement*) m_pCapsFilter, (GstElement*) m_pColorSpace, NULL );
 
 1916            m_pEncoder = gst_element_factory_make( 
"ffenc_mpeg1video", 
"encoder");
 
 1918                 res = gst_bin_add (GST_BIN (m_pGstPipeline), (GstElement*) m_pEncoder );
 
 1921            m_pMultiplexer = gst_element_factory_make( 
"ffmux_mpeg", 
"multiplexer");
 
 1922            if (m_pMultiplexer) {
 
 1923                 res = gst_bin_add (GST_BIN (m_pGstPipeline), (GstElement*) m_pMultiplexer );
 
 1926            m_pFileSink = gst_element_factory_make( 
"filesink", 
"filesink");
 
 1928                 g_object_set (G_OBJECT (m_pFileSink), 
"location", (
char*)filename, NULL);
 
 1929                 res = gst_bin_add (GST_BIN (m_pGstPipeline), (GstElement*) m_pFileSink );
 
 1932            link_result = gst_element_link_many( (GstElement*) m_pColorSpace, (GstElement*) m_pEncoder, (GstElement*) m_pMultiplexer, (GstElement*) m_pFileSink, NULL );
 
 1938                 gst_element_set_state ( (GstElement*) m_pGstPipeline, GST_STATE_PLAYING);
 
 1943        } 
else return false;
 
 1956   pBucketsPool = NULL;
 
 1983     m_pBucketsPool = pBucketsPool;
 
 1984     GstCaps *caps = NULL;
 
 1985     bool link_result = 
false;
 
 1987     bool b_sourceselect = 
false;
 
 1988     bool b_forcevideoscale = 
false;
 
 1989     bool b_forcevideoflip = 
false;
 
 1991     bool b_forcevideointerlace = 
false;
 
 2004     MOint p_sourcewidth;
 
 2005     MOint p_sourceheight;
 
 2008     MOint p_forceheight;
 
 2016     devicename = p_capdev.
GetName();
 
 2019         colormode = 
moText(
"video/x-raw-yuv");
 
 2022         colormode = 
moText(
"video/x-raw-rgb");
 
 2037     if (p_forcewidth!=0 || p_forceheight!=0) {
 
 2038         b_forcevideoscale = 
true;
 
 2041     if (p_forceflipH!=0 || p_forceflipV!=0) {
 
 2042         b_forcevideoflip = 
true;
 
 2045     if (p_sourcewidth!=0 || p_sourceheight!=0) {
 
 2046         b_sourceselect = 
true;
 
 2049     if (devicename.
Length()>0)
 
 2056         if (labelname==
moText(
"RTSP")) {
 
 2058             m_pRTSPSource = gst_element_factory_make (
"rtspsrc", 
"source");
 
 2059             m_pRTSPDepay = gst_element_factory_make (
"rtpmp4vdepay", 
"depay");
 
 2062                 m_pRTSPDepaySink = gst_element_get_static_pad ( (GstElement*)m_pRTSPDepay, 
"sink"  );
 
 2067         } 
else if (labelname==
moText(
"HTTP") || dname.find(
"http")==0 ) {
 
 2068             m_pHTTPSource = gst_element_factory_make (
"souphttpsrc", 
"source");
 
 2071             if ( m_pHTTPSource && m_pMultipartDemux ) {
 
 2079                 m_pFileSource = gst_element_factory_make (
"ksvideosrc", 
"source");
 
 2081                 m_pFileSource = gst_element_factory_make (
"dshowvideosrc", 
"source");
 
 2086                 m_pFileSource = gst_element_factory_make (
"wrappercamerabinsrc", 
"source");
 
 2087             cout << 
"wrappercamerabinsrc created!" << endl;
 
 2089                 if (devicename==
moText(
"DV"))
 
 2090                     m_pFileSource = gst_element_factory_make (
"dv1394src", 
"source");
 
 2095                     m_pFileSource = gst_element_factory_make (
"v4l2src", 
"source");
 
 2100             if (devicename==
moText(
"DV"))
 
 2101                 m_pFileSource = gst_element_factory_make (
"dv1394src", 
"source");
 
 2103                     m_pFileSource = gst_element_factory_make (
"v4l2src", 
"source");
 
 2107             m_pFinalSource = m_pFileSource;
 
 2110         if (m_pRTSPDepay && m_pRTSPSource) {
 
 2111             if (devicename.
Length() > 0 && ( devicename!=
moText(
"default")) ) {
 
 2112                 g_object_set (G_OBJECT (m_pRTSPSource), 
"location", (
char*)devicename, NULL);
 
 2113                 g_object_set (G_OBJECT (m_pRTSPSource), 
"latency", (guint) 0, NULL);
 
 2114                 g_object_set (G_OBJECT (m_pRTSPSource), 
"debug", (gboolean) 
true, NULL);
 
 2116                 g_object_set (G_OBJECT (m_pRTSPSource), 
"protocols", (guint) 0x00000004, NULL);
 
 2119             res = gst_bin_add (GST_BIN (m_pGstPipeline), (GstElement*) m_pRTSPSource );
 
 2122                 res = gst_bin_add (GST_BIN (m_pGstPipeline), (GstElement*) m_pRTSPDepay );
 
 2136                 m_pFinalSource = m_pRTSPDepay;
 
 2138                 m_pFinalSource = NULL;
 
 2144         if ( m_pHTTPSource  ) {
 
 2146             g_object_set (G_OBJECT (m_pHTTPSource), 
"location", (
char*)devicename, NULL);
 
 2147             g_object_set (G_OBJECT (m_pHTTPSource), 
"automatic-redirect", TRUE, NULL);
 
 2154             res = gst_bin_add (GST_BIN (m_pGstPipeline), (GstElement*) m_pHTTPSource );
 
 2157             link_result  = 
false;
 
 2163             if ( link_result ) {
 
 2165                 m_pFinalSource = m_pHTTPSource;
 
 2166                 m_pDecoderBin = gst_element_factory_make ( 
"decodebin", 
"decoder");
 
 2168                 MODebug2->
Error(
moText(
"moGsGraph::BuildLiveWebcamGraph > SOUP HTTP source failed linking with MultipartDemux"));
 
 2169                 m_pFinalSource = NULL;
 
 2176        if (m_pFileSource) {
 
 2179            if (devicename.
Length() > 0 && ( devicename!=
moText(
"default")) ) {
 
 2180                 g_object_set (G_OBJECT (m_pFileSource), 
"device-name", (
char*)devicename, NULL);
 
 2183             if (devicename==
moText(
"DV") ) {
 
 2184                 g_object_set (G_OBJECT (m_pFileSource), 
"port", 0, NULL);
 
 2187                 if ( devicename.
Length() > 0 && ( devicename!=
moText(
"default") ) ) {
 
 2188                     if (devicename.
Find( 
"/dev/" )==0 ) {
 
 2189                         g_object_set (G_OBJECT (m_pFileSource), 
"device", (
char*)devicename, NULL);
 
 2191                         g_object_set (G_OBJECT (m_pFileSource), 
"device-name", (
char*)devicename, NULL);
 
 2197            res = gst_bin_add (GST_BIN (m_pGstPipeline), (GstElement*) m_pFileSource );
 
 2199            m_pFinalSource = m_pFileSource;
 
 2202        if (m_pFinalSource) {
 
 2208            GstIterator* iterator = NULL;
 
 2209            iterator = gst_element_iterate_src_pads( (GstElement*) m_pFinalSource );
 
 2214            GValue item = G_VALUE_INIT;
 
 2216            GstPad* srcpad = NULL;
 
 2217            GstCaps* itemcaps = NULL;
 
 2218            GstCaps* capstpl = NULL;
 
 2219            GstCaps* capsQuery = NULL;
 
 2220            GstPad* peerPad = NULL;
 
 2231             switch (gst_iterator_next (iterator, &item)) {
 
 2233             switch (gst_iterator_next (iterator, &item)) {
 
 2235                case GST_ITERATOR_OK:
 
 2238                  srcpad = (GstPad*)item;
 
 2240                 srcpad = (GstPad*)g_value_dup_object (&item);
 
 2242                  padname = gst_object_get_name((GstObject*) srcpad );
 
 2247                  itemcaps = gst_pad_get_caps( srcpad );
 
 2249                  itemcaps = gst_pad_get_current_caps( srcpad );
 
 2250                  capstpl = gst_pad_get_pad_template_caps( srcpad );
 
 2251                  capsQuery = gst_pad_query_caps( srcpad, NULL );
 
 2252                  peerPad = gst_pad_get_peer( srcpad );
 
 2260                      icapsstr = 
moText( gst_caps_to_string(capsQuery) );
 
 2265                 g_value_reset (&item);
 
 2268                case GST_ITERATOR_RESYNC:
 
 2270                  gst_iterator_resync (iterator);
 
 2272                case GST_ITERATOR_ERROR:
 
 2276                case GST_ITERATOR_DONE:
 
 2281            gst_iterator_free (iterator);
 
 2287            if (b_sourceselect) {
 
 2290               b_sourceselect = 
false;
 
 2291               #endif // GSTVERSION 
 2295            if (b_sourceselect) {
 
 2300                m_pCapsFilterSource = gst_element_factory_make (
"capsfilter", 
"filtsource");
 
 2302                if (m_pCapsFilterSource) {
 
 2310                   if (colormode==
"") colormode = 
"video/x-raw-yuv";
 
 2312                    g_object_set (G_OBJECT (m_pCapsFilterSource), 
"caps", gst_caps_new_simple ( colormode,
 
 2313                    "width", G_TYPE_INT, p_sourcewidth,
 
 2314                    "height", G_TYPE_INT, p_sourceheight,
 
 2315                     "depth", G_TYPE_INT, 24,
 
 2316                    "red_mask",G_TYPE_INT, 16711680,
 
 2317                    "green_mask",G_TYPE_INT, 65280,
 
 2318                    "blue_mask",G_TYPE_INT, 255,
 
 2324                     int opt_framerate = 15;
 
 2325                   if (colormode==
"") {
 
 2326                     colormode = 
"video/x-raw";
 
 2341                       moText fullf = colormode+ 
","+ colormodef;
 
 2344                       g_object_set (G_OBJECT (m_pCapsFilterSource), 
"caps", gst_caps_new_simple ( colormode,
 
 2345                                                                                                  "format", G_TYPE_STRING, (
char*)colormodef,
 
 2346                                                                                                  "width", G_TYPE_INT, p_sourcewidth,
 
 2347                                                                                                  "height", G_TYPE_INT, p_sourceheight,
 
 2348                                                  "framerate", GST_TYPE_FRACTION, opt_framerate, 1,
 
 2352                     colormode=
"video/x-raw-yuv";
 
 2354                     if (colormode==
"video/x-raw-rgb") {
 
 2356                     } 
else if (colormode==
"video/x-raw-yuv") {
 
 2360                     colormode=
"video/x-raw";
 
 2362                     g_object_set (G_OBJECT (m_pCapsFilterSource), 
"caps", gst_caps_new_simple ( colormode,
 
 2365                      "width", G_TYPE_INT, p_sourcewidth,
 
 2366                      "height", G_TYPE_INT, p_sourceheight,
 
 2382                    res = gst_bin_add (GST_BIN (m_pGstPipeline), (GstElement*) m_pCapsFilterSource );
 
 2388             b_forcevideoscale = 
false;
 
 2389            if (b_forcevideoscale) {
 
 2391                m_pVideoScale = gst_element_factory_make (
"videoscale", 
"scale");
 
 2392                if (m_pVideoScale) {
 
 2394                    colormode = 
"video/x-raw";
 
 2397                    g_object_set (G_OBJECT (m_pVideoScale), 
"method", &method, NULL);
 
 2399                    res = gst_bin_add (GST_BIN (m_pGstPipeline), (GstElement*) m_pVideoScale );
 
 2401                     m_pCapsFilter2 = gst_element_factory_make (
"capsfilter", 
"filt2");
 
 2402                     if (m_pCapsFilter2) {
 
 2403                         if (b_forcevideoscale) {
 
 2404                             g_object_set (G_OBJECT (m_pCapsFilter2), 
"caps", gst_caps_new_simple ( colormode,
 
 2405                                 "width", G_TYPE_INT, p_forcewidth,
 
 2406                                 "height", G_TYPE_INT, p_forceheight,
 
 2409                             g_object_set (G_OBJECT (m_pCapsFilter2), 
"caps", gst_caps_new_simple ( colormode,
 
 2410                                 "width", G_TYPE_INT, 240,
 
 2411                                 "height", G_TYPE_INT, 160,
 
 2415                         res = gst_bin_add (GST_BIN (m_pGstPipeline), (GstElement*) m_pCapsFilter2 );
 
 2422             b_forcevideointerlace = 
false;
 
 2423            if (b_forcevideointerlace) {
 
 2424                m_pColorSpaceInterlace = gst_element_factory_make (
VIDEOCONVERT, 
"colordeinterlace");
 
 2425                if (m_pColorSpaceInterlace) {
 
 2427                     res = gst_bin_add (GST_BIN (m_pGstPipeline), (GstElement*) m_pColorSpaceInterlace );
 
 2431                m_pVideoDeinterlace = gst_element_factory_make (
"ffdeinterlace", 
"deinterlace");
 
 2432                if (m_pVideoDeinterlace) {
 
 2436                     res = gst_bin_add (GST_BIN (m_pGstPipeline), (GstElement*) m_pVideoDeinterlace );
 
 2440            m_pColorSpace = gst_element_factory_make (
VIDEOCONVERT, 
"color");
 
 2441            if (m_pColorSpace) {
 
 2443                 res = gst_bin_add (GST_BIN (m_pGstPipeline), (GstElement*) m_pColorSpace );
 
 2446            m_pCapsFilter = gst_element_factory_make (
"capsfilter", 
"filt");
 
 2447            if (m_pCapsFilter) {
 
 2450                g_object_set (G_OBJECT (m_pCapsFilter), 
"caps", gst_caps_new_simple (
"video/x-raw-rgb",
 
 2451                "bpp", G_TYPE_INT, 24,
 
 2452                "depth", G_TYPE_INT, 24,
 
 2453                "red_mask",G_TYPE_INT, 255,
 
 2454                "green_mask",G_TYPE_INT, 65280,
 
 2455                "blue_mask",G_TYPE_INT, 16711680,
 
 2456                "endianness", G_TYPE_INT, 4321,
 
 2458                res = gst_bin_add (GST_BIN (m_pGstPipeline), (GstElement*) m_pCapsFilter );
 
 2460               caps = gst_caps_new_simple ( 
"video/x-raw",
 
 2461                    "format", G_TYPE_STRING, 
"RGB",
 
 2463               g_object_set (G_OBJECT (m_pCapsFilter), 
"caps", caps, NULL);
 
 2464               res = gst_bin_add (GST_BIN (m_pGstPipeline), (GstElement*) m_pCapsFilter );
 
 2481             if (m_pDecoderBin==NULL) m_pDecoderBin = gst_element_factory_make ( 
DECODEBIN, 
"decoder");
 
 2482             if (m_pDecoderBin) {
 
 2484                 res = gst_bin_add (GST_BIN (m_pGstPipeline), (GstElement*) m_pDecoderBin );
 
 2488                 signal_newpad_id = g_signal_connect (m_pDecoderBin, 
"pad-added", G_CALLBACK (cb_pad_added_new), (gpointer)
this);
 
 2493                 m_pFakeSink = gst_element_factory_make (
"fakesink", 
"destout");
 
 2495                 cout << 
"creating FakeSink from appsink" << endl;
 
 2496                 m_pFakeSink = gst_element_factory_make (
"appsink", 
"destout");
 
 2502                      g_object_set (G_OBJECT (m_pFakeSink), 
"caps", caps, NULL);
 
 2503                      g_object_set (G_OBJECT (m_pFakeSink), 
"sync", 
false, NULL);
 
 2504                      g_object_set (G_OBJECT (m_pFakeSink), 
"drop", 
true, NULL);
 
 2506                      res = gst_bin_add (GST_BIN (m_pGstPipeline), (GstElement*) m_pFakeSink );
 
 2510                     if (b_sourceselect) {
 
 2511                         cout << 
"linking m_pFinalSource, m_pCapsFilterSource, m_pDecoderBin" << endl;
 
 2512                         link_result = gst_element_link_many( (GstElement*) m_pFinalSource,  (GstElement*) m_pCapsFilterSource, (GstElement*) m_pDecoderBin, NULL );
 
 2515                          cout << 
"linking m_pFinalSource, m_pDecoderBin" << endl;
 
 2516                         link_result = gst_element_link_many( (GstElement*) m_pFinalSource, (GstElement*) m_pDecoderBin, NULL );
 
 2522                         if (b_forcevideoscale) {
 
 2523                             cout << 
"linking forcing videoscale" << endl;
 
 2524                             if (b_forcevideointerlace)
 
 2525                                 link_result = gst_element_link_many( (GstElement*) m_pVideoScale, (GstElement*)m_pCapsFilter2, (GstElement*) m_pColorSpaceInterlace, (GstElement*) m_pVideoDeinterlace, (GstElement*) m_pColorSpace, (GstElement*) m_pCapsFilter, (GstElement*) m_pFakeSink, NULL );
 
 2527                                 link_result = gst_element_link_many( (GstElement*) m_pVideoScale, (GstElement*)m_pCapsFilter2, (GstElement*) m_pColorSpace, (GstElement*) m_pCapsFilter, (GstElement*) m_pFakeSink, NULL );
 
 2532                             cout << 
"linking no videoscale" << endl;
 
 2534                             if (b_forcevideointerlace) {
 
 2535                                 cout << 
"linking m_pColorSpaceInterlace, m_pVideoDeinterlace, m_pColorSpace, m_pCapsFilter, m_pFakeSink" << endl;
 
 2536                                 link_result = gst_element_link_many( (GstElement*) m_pColorSpaceInterlace, (GstElement*) m_pVideoDeinterlace, (GstElement*)m_pColorSpace, (GstElement*) m_pCapsFilter, (GstElement*) m_pFakeSink, NULL );
 
 2538                                 cout << 
"linking m_pColorSpace, /*m_pCapsFilter*/, m_pFakeSink" << endl;
 
 2539                                 link_result = gst_element_link_many(
 
 2540                                         (GstElement*) m_pColorSpace,
 
 2542                                         (GstElement*) m_pCapsFilter,
 
 2544                                         (GstElement*) m_pFakeSink, NULL );
 
 2554                             CheckState( gst_element_set_state ((GstElement*) m_pGstPipeline, GST_STATE_PLAYING), 
true  );
 
 2559                             MODebug2->
Message( 
moText(
"moGsGraph::BuildLiveWebcamGraph > gst_app_sink_pull_preroll for appsink"));
 
 2562                             sample = gst_app_sink_pull_preroll( (GstAppSink*) m_pFakeSink );
 
 2564                 MODebug2->
Message( 
moText(
"moGsGraph::BuildLiveWebcamGraph > RECEIVED sample from gst_app_sink_pull_preroll!"));
 
 2573                                 bcaps = gst_sample_get_caps( sample );
 
 2575                                   Gbuffer = gst_sample_get_buffer (sample);
 
 2577                                   gst_app_sink_set_emit_signals((GstAppSink*)m_pFakeSink, 
true);
 
 2578                                   gst_app_sink_set_drop((GstAppSink*)m_pFakeSink, 
true);
 
 2580                                   gst_app_sink_set_max_buffers((GstAppSink*)m_pFakeSink, 1);
 
 2581                                   g_signal_connect( (GstElement*)m_pFakeSink, 
"new-sample", G_CALLBACK (appsink_new_sample), (gpointer)
this );
 
 2585                             } 
else MODebug2->
Error( 
moText(
"moGsGraph::BuildLiveWebcamGraph > NO sample from gst_app_sink_pull_preroll!"));
 
 2586                 MODebug2->
Message( 
moText(
"moGsGraph::BuildLiveWebcamGraph > gst_app_sink_pull_preroll for appsink ended"));
 
 2600                             MODebug2->
Error(
moText(
"moGsGraph::BuildLiveWebcamGraph > m_pColorSpace m_pCapsFilter m_pFakeSink linking failed"));
 
 2601                             event_loop( (GstElement*) m_pGstPipeline, 
false, GST_STATE_PAUSED);
 
 2604                         MODebug2->
Error(
moText(
"moGsGraph::BuildLiveWebcamGraph > src and decodebin linkage failed: ") + devicename );
 
 2605                         event_loop( (GstElement*) m_pGstPipeline, 
false, GST_STATE_PAUSED);
 
 2609                     MODebug2->
Error(
moText(
"moGsGraph::BuildLiveWebcamGraph > fakesink construction failed"));
 
 2610                     event_loop( (GstElement*) m_pGstPipeline, 
false, GST_STATE_PAUSED);
 
 2613                 MODebug2->
Error(
moText(
"moGsGraph::BuildLiveWebcamGraph > decodebin construction failed"));
 
 2614                 event_loop( (GstElement*) m_pGstPipeline, 
false, GST_STATE_PAUSED);
 
 2618             event_loop( (GstElement*) m_pGstPipeline, 
false, GST_STATE_PAUSED);
 
 2644     GValue gvalue = G_VALUE_INIT;
 
 2649     piter = gst_element_iterate_pads( (GstElement*)FilterElement );
 
 2654         switch (gst_iterator_next (piter, &ppointer)) {
 
 2656         switch (gst_iterator_next (piter, &gvalue)) {
 
 2658             case GST_ITERATOR_OK:
 
 2661                 ppad = (GstPad*) ppointer;
 
 2663                 ppad = (GstPad*) g_value_dup_object( &gvalue );
 
 2665                 nname = gst_pad_get_name(ppad);
 
 2666                 res = gst_pad_is_active(ppad);
 
 2667                 res = gst_pad_is_linked(ppad);
 
 2668                 res = gst_pad_is_blocking(ppad);
 
 2670                 gst_object_unref (ppointer);
 
 2672                 g_value_reset( &gvalue );
 
 2676             case GST_ITERATOR_RESYNC:
 
 2678                 gst_iterator_resync (piter);
 
 2681             case GST_ITERATOR_ERROR:
 
 2686             case GST_ITERATOR_DONE:
 
 2691     gst_iterator_free (piter);done = FALSE;
 
 2724     while((time1 - time0) < timeout) {
 
 2733     MODebug2->
Error(
"moGsGraph::WaitForFormatDefinition > time out !!! " + 
IntToStr(timeout) + 
" ms elapsed!");
 
 2739     bool link_result = 
false;
 
 2745     moFile SoundFile( filename );
 
 2747     if ( !SoundFile.
Exists() ) 
return false;
 
 2752         moText extension = filename;
 
 2755         m_pFileSource = gst_element_factory_make (
"filesrc", 
"source");
 
 2757         if (m_pFileSource) {
 
 2759            g_object_set (G_OBJECT (m_pFileSource), 
"location", (
char*)filename, NULL);
 
 2761            res = gst_bin_add (GST_BIN ((GstElement*)m_pGstPipeline), (GstElement*)m_pFileSource );
 
 2763            m_pAudioConverter = NULL;
 
 2771            if (extension==
moText(
".wav")) {
 
 2772               m_pAudioConverter = gst_element_factory_make (
"audioresample", 
"resample");
 
 2775               m_pAudioConverter =  gst_element_factory_make (
"audioconvert", 
"converter");
 
 2778            if (m_pAudioConverter) {
 
 2779                 res = gst_bin_add (GST_BIN ((GstElement*)m_pGstPipeline), (GstElement*)m_pAudioConverter );
 
 2782            m_pAudioSink = gst_element_factory_make (
"autoaudiosink", 
"audioout");
 
 2785                 res = gst_bin_add (GST_BIN ((GstElement*)m_pGstPipeline), (GstElement*)m_pAudioSink );
 
 2792             m_pAudioSpeed = gst_element_factory_make (
"speed", 
"speed");
 
 2794             if (m_pAudioSpeed) {
 
 2795                 res = gst_bin_add (GST_BIN ((GstElement*)m_pGstPipeline), (GstElement*)m_pAudioSpeed );
 
 2798            m_pAudioVolume = gst_element_factory_make (
"volume", 
"volume");
 
 2800            if (m_pAudioVolume) {
 
 2801                 res = gst_bin_add (GST_BIN ((GstElement*)m_pGstPipeline), (GstElement*)m_pAudioVolume );
 
 2804            m_pAudioPanorama = gst_element_factory_make (
"audiopanorama", 
"audiopanorama");
 
 2806            if (m_pAudioPanorama) {
 
 2807                 res = gst_bin_add (GST_BIN ((GstElement*)m_pGstPipeline), (GstElement*)m_pAudioPanorama );
 
 2810            m_pAudioConverter2 = gst_element_factory_make (
"audioconvert", 
"audioconvert2");
 
 2812            if (m_pAudioConverter2) {
 
 2813                 res = gst_bin_add (GST_BIN ((GstElement*)m_pGstPipeline), (GstElement*)m_pAudioConverter2 );
 
 2816            m_pAudioConverter3 = gst_element_factory_make (
"audioconvert", 
"audioconvert3");
 
 2818            if (m_pAudioConverter3) {
 
 2819                 res = gst_bin_add (GST_BIN ((GstElement*)m_pGstPipeline), (GstElement*)m_pAudioConverter3 );
 
 2839            m_pAudioConverter4 = gst_element_factory_make (
"audioconvert", 
"audioconvert4");
 
 2841            if (m_pAudioConverter4) {
 
 2842                 res = gst_bin_add (GST_BIN ((GstElement*)m_pGstPipeline), (GstElement*)m_pAudioConverter4 );
 
 2845            m_pDecoderBin = gst_element_factory_make ( 
DECODEBIN, 
"decoder");
 
 2846             if (m_pDecoderBin) {
 
 2848                 signal_newpad_id = g_signal_connect ((GstElement*)m_pDecoderBin, 
"new-decoded-pad", G_CALLBACK (
cb_newpad), (gpointer)
this);
 
 2850                 signal_newpad_id = g_signal_connect ((GstElement*)m_pDecoderBin, 
"pad-added", G_CALLBACK (cb_pad_added_new), (gpointer)
this);
 
 2852                 res = gst_bin_add (GST_BIN ((GstElement*)m_pGstPipeline), (GstElement*)m_pDecoderBin );
 
 2857             link_result = gst_element_link_many( (GstElement*)m_pFileSource, (GstElement*)m_pDecoderBin, NULL );
 
 2874                 if (m_pAudioConverter) link_result = gst_element_link_many(
 
 2875                                                                            (GstElement*)m_pAudioConverter,
 
 2876                                                                            (GstElement*)m_pAudioSpeed,
 
 2877                                                                            (GstElement*)m_pAudioConverter2,
 
 2878                                                                            (GstElement*)m_pAudioPanorama,
 
 2879                                                                            (GstElement*)m_pAudioConverter3,
 
 2880                                                                            (GstElement*)m_pAudioVolume,
 
 2881                                                                            (GstElement*)m_pAudioConverter4,
 
 2882                                                                            (GstElement*)m_pAudioSink,
 
 2889                     CheckState( gst_element_set_state ((GstElement*)m_pGstPipeline, GST_STATE_PAUSED), 
true  );
 
 2893                     cout << 
"state gstreamer finish" << endl;
 
 2898                     MODebug2->
Error(
moText(
"moGsGraph::error: m_pAudioConverter m_pAudioResample m_pAudioSink linking failed"));
 
 2899                     event_loop( (GstElement*)m_pGstPipeline, 
false, GST_STATE_PAUSED);
 
 2903                event_loop( (GstElement*)m_pGstPipeline, 
false, GST_STATE_PAUSED);
 
 2922     if (m_pGstPipeline) {
 
 2923            m_pAudioConverter = gst_element_factory_make (
"audioconvert", 
"convert");
 
 2925            if (m_pAudioConverter) {
 
 2926                 res = gst_bin_add (GST_BIN ((GstElement*)m_pGstPipeline), (GstElement*)m_pAudioConverter );
 
 2929            m_pAudioVolume = gst_element_factory_make (
"volume", 
"volume");
 
 2931            if (m_pAudioVolume) {
 
 2932                 res = gst_bin_add (GST_BIN ((GstElement*)m_pGstPipeline), (GstElement*)m_pAudioVolume );
 
 2935            m_pAudioPanorama = gst_element_factory_make (
"audiopanorama", 
"balance");
 
 2937            if (m_pAudioPanorama) {
 
 2938                 res = gst_bin_add (GST_BIN ((GstElement*)m_pGstPipeline), (GstElement*)m_pAudioPanorama );
 
 2941            m_pAudioSink = gst_element_factory_make (
"autoaudiosink", 
"audioout");
 
 2944                 res = gst_bin_add (GST_BIN ((GstElement*)m_pGstPipeline), (GstElement*)m_pAudioSink );
 
 2954     m_pBucketsPool = pBucketsPool;
 
 2955     bool link_result = 
false;
 
 2959     moFile VideoFile( filename );
 
 2961     if ( !VideoFile.
Exists() ) 
return false;
 
 2966         m_pFileSource = gst_element_factory_make (
"filesrc", 
"source");
 
 2968         if (m_pFileSource) {
 
 2969            g_object_set (G_OBJECT (m_pFileSource), 
"location", (
char*)filename, NULL);
 
 2976            res = gst_bin_add (GST_BIN ((GstElement*)m_pGstPipeline), (GstElement*)m_pFileSource );
 
 2979            m_pColorSpaceInterlace = gst_element_factory_make (
VIDEOCONVERT, 
"color0");
 
 2980            if (m_pColorSpaceInterlace) {
 
 2981                 res = gst_bin_add (GST_BIN ((GstElement*)m_pGstPipeline), (GstElement*)m_pColorSpaceInterlace );
 
 2984            m_pVideoBalance = gst_element_factory_make (
"videobalance", 
"videobalance");
 
 2985            if (m_pVideoBalance) {
 
 2986                 res = gst_bin_add (GST_BIN ((GstElement*)m_pGstPipeline), (GstElement*)m_pVideoBalance );
 
 2989            m_pColorSpace = gst_element_factory_make (
VIDEOCONVERT, 
"color");
 
 2990            if (m_pColorSpace) {
 
 2991                 res = gst_bin_add (GST_BIN ((GstElement*)m_pGstPipeline), (GstElement*)m_pColorSpace );
 
 3024             m_pDecoderBin = gst_element_factory_make ( 
DECODEBIN, 
"decoder");
 
 3025             if (m_pDecoderBin) {
 
 3026                 res = gst_bin_add (GST_BIN ((GstElement*)m_pGstPipeline), (GstElement*)m_pDecoderBin );
 
 3028                 m_pFakeSink = gst_element_factory_make (
"fakesink", 
"destout");
 
 3033                 signal_newpad_id = g_signal_connect (m_pDecoderBin, 
"pad-added", G_CALLBACK (cb_pad_added_new), (gpointer)
this);
 
 3038                 m_pFakeSink = gst_element_factory_make (
"fakesink", 
"destout");
 
 3040                 cout << 
"creating FakeSink from appsink" << endl;
 
 3041                 m_pFakeSink = gst_element_factory_make (
"appsink", 
"destout");
 
 3047                     g_object_set (G_OBJECT (m_pFakeSink), 
"caps", gst_caps_new_simple ( 
"video/x-raw",
 
 3048                                                                                        "format", G_TYPE_STRING, 
"RGB",
 
 3050                     g_object_set (G_OBJECT (m_pFakeSink), 
"sync", (
bool)
true, NULL);
 
 3051                     g_object_set (G_OBJECT (m_pFakeSink), 
"drop", 
true, NULL);
 
 3053                     gst_app_sink_set_max_buffers( (GstAppSink*)m_pFakeSink, 100 );
 
 3055                     g_object_set (G_OBJECT (m_pFakeSink), 
"sync", (
bool)
true, NULL);
 
 3059                      res = gst_bin_add (GST_BIN ((GstElement*)m_pGstPipeline), (GstElement*)m_pFakeSink );
 
 3061                     link_result = gst_element_link_many( (GstElement*)m_pFileSource, (GstElement*)m_pDecoderBin, NULL );
 
 3064                         if (m_pVideoBalance)
 
 3065                             link_result = gst_element_link_many( (GstElement*)m_pColorSpaceInterlace, (GstElement*)m_pVideoBalance, (GstElement*)m_pColorSpace, (GstElement*)m_pCapsFilter, (GstElement*)m_pFakeSink, NULL );
 
 3067                             link_result = gst_element_link_many( (GstElement*)m_pColorSpaceInterlace, (GstElement*)m_pColorSpace, (GstElement*)m_pCapsFilter, (GstElement*)m_pFakeSink, NULL );
 
 3069                         if (m_pVideoBalance)
 
 3070                             link_result = gst_element_link_many( (GstElement*)m_pColorSpaceInterlace, (GstElement*)m_pVideoBalance, (GstElement*)m_pColorSpace, (GstElement*)m_pFakeSink, NULL );
 
 3072                             link_result = gst_element_link_many( (GstElement*)m_pColorSpaceInterlace, (GstElement*)m_pColorSpace, (GstElement*)m_pFakeSink, NULL );
 
 3076                         if (m_pAudioConverter)
 
 3077                           bool link_audio_result = gst_element_link_many( (GstElement*)m_pAudioConverter, (GstElement*)m_pAudioVolume, (GstElement*)m_pAudioPanorama, (GstElement*)m_pAudioSink, NULL );
 
 3082                             CheckState( gst_element_set_state ((GstElement*)m_pGstPipeline, GST_STATE_PAUSED), 
true  );
 
 3086                             MODebug2->
Message( 
moText(
"moGsGraph::BuildLiveVideoGraph > gst_app_sink_pull_preroll for appsink"));
 
 3089                             sample = gst_app_sink_pull_preroll( (GstAppSink*) m_pFakeSink );
 
 3099                                 bcaps = gst_sample_get_caps( sample );
 
 3101                                     Gbuffer = gst_sample_get_buffer (sample);
 
 3103                                     gst_app_sink_set_emit_signals((GstAppSink*)m_pFakeSink, 
true);
 
 3104                                     gst_app_sink_set_drop((GstAppSink*)m_pFakeSink, 
true);
 
 3106                                     gst_app_sink_set_max_buffers((GstAppSink*)m_pFakeSink, 10000 );
 
 3107                                     g_signal_connect( (GstElement*)m_pFakeSink, 
"new-sample", G_CALLBACK (appsink_new_sample), (gpointer)
this );
 
 3113                                 cout << 
"gst_app_sink_is_eos: " << gst_app_sink_is_eos((GstAppSink*)m_pFakeSink) << endl;
 
 3114                                 cout << 
"gst_app_sink_get_emit_signals: " << gst_app_sink_get_emit_signals((GstAppSink*)m_pFakeSink) << endl;
 
 3115                                 cout << 
"gst_app_sink_get_max_buffers: " << gst_app_sink_get_max_buffers((GstAppSink*)m_pFakeSink) << endl;
 
 3128                             MODebug2->
Error( 
moText(
"moGsGraph::BuildLiveVideoGraph > m_pColorSpace m_pCapsFilter m_pFakeSink linking failed"));
 
 3129                             event_loop( (GstElement*)m_pGstPipeline, 
false, GST_STATE_PAUSED);
 
 3132                         MODebug2->
Error( 
moText(
"moGsGraph::BuildLiveVideoGraph > filesrc and decodebin linkage failed: ") + filename );
 
 3133                         event_loop( (GstElement*)m_pGstPipeline, 
false, GST_STATE_PAUSED);
 
 3137                     MODebug2->
Error( 
moText(
"moGsGraph::BuildLiveVideoGraph > fakesink construction failed"));
 
 3138                     event_loop( (GstElement*)m_pGstPipeline, 
false, GST_STATE_PAUSED);
 
 3141                 MODebug2->
Error( 
moText(
"moGsGraph::BuildLiveVideoGraph > decodebin construction failed"));
 
 3142                 event_loop( (GstElement*)m_pGstPipeline, 
false, GST_STATE_PAUSED);
 
 3145             MODebug2->
Error( 
moText(
"moGsGraph::BuildLiveVideoGraph > file source failed: ") + filename);
 
 3146             event_loop( (GstElement*)m_pGstPipeline, 
false, GST_STATE_PAUSED);
 
 3280     bool isfixed = 
false;
 
 3281     GstBuffer* Gbuffer =  (GstBuffer*)buffer;
 
 3283     isfixed = gst_caps_is_fixed((GstCaps*)caps);
 
 3292     str = gst_caps_get_structure ((GstCaps*)caps, 0);
 
 3296     sstr = gst_structure_to_string (str);
 
 3300     if (g_strrstr( sstr, 
"channels" )) {
 
 3305         gint channels, rate;
 
 3307         gst_structure_get_int( str, 
"width", &width);
 
 3308         gst_structure_get_int( str, 
"depth", &depth);
 
 3309         gst_structure_get_int( str, 
"channels", &channels);
 
 3310         gst_structure_get_int( str, 
"rate", &rate);
 
 3330         if (Gbuffer!=NULL) {
 
 3344                         "SetAudioFormat: we have a format!! " 
 3352                       + 
" bytes per buffer, " 
 3354                       + 
" nanoseconds per sample " 
 3364     bool isfixed = 
false;
 
 3365     GstBuffer* Gbuffer =  (GstBuffer*)buffer;
 
 3367     isfixed = gst_caps_is_fixed((GstCaps*)caps);
 
 3376     str = gst_caps_get_structure ((GstCaps*)caps, 0);
 
 3380     sstr = gst_structure_to_string (str);
 
 3384     if (g_strrstr( sstr, 
"width" )) {
 
 3387         gint width, height, value_numerator, value_denominator, redmask, greenmask, bluemask, bitcount;
 
 3389         gst_structure_get_int( str, 
"width", &width);
 
 3390         gst_structure_get_int( str, 
"height", &height);
 
 3391         gst_structure_get_fraction( str, 
"framerate", &value_numerator, &value_denominator );
 
 3392         gst_structure_get_int( str, 
"red_mask", &redmask );
 
 3393         gst_structure_get_int( str, 
"green_mask", &greenmask );
 
 3394         gst_structure_get_int( str, 
"blue_mask", &bluemask );
 
 3395         gst_structure_get_int( str, 
"bpp", &bitcount );
 
 3425                         "SetVideoFormat: we have a format!!" 
 3433                       + 
" buffer duration: " 
 3497   GstStateChangeReturn Gstate_change_result = (GstStateChangeReturn)state_change_result;
 
 3500   switch(Gstate_change_result) {
 
 3501     case GST_STATE_CHANGE_FAILURE:
 
 3505     case GST_STATE_CHANGE_SUCCESS:
 
 3509     case GST_STATE_CHANGE_ASYNC:
 
 3513     case GST_STATE_CHANGE_NO_PREROLL:
 
 3519   GstStateChangeReturn state_wait;
 
 3520   GstState current_state, pending_state;
 
 3521   GstClockTime  time_out = GST_CLOCK_TIME_NONE;
 
 3522   time_out = GST_SECOND;
 
 3524   while(waitforsync) {
 
 3525       state_wait = gst_element_get_state(GST_ELEMENT (m_pGstPipeline),¤t_state, &pending_state, time_out);
 
 3526       switch(state_wait) {
 
 3527           case GST_STATE_CHANGE_SUCCESS:
 
 3528             waitforsync = 
false;
 
 3531           case GST_STATE_CHANGE_FAILURE:
 
 3532             waitforsync = 
false;
 
 3536             waitforsync = 
false;
 
 3555     GstStateChangeReturn state_wait;
 
 3556     GstState current_state, pending_state;
 
 3557     GstClockTime  time_out = GST_CLOCK_TIME_NONE;
 
 3558     time_out = GST_SECOND;
 
 3560     GstPad* srcRGB = NULL;
 
 3561     bool padactive = 
false;
 
 3562     bool padlinked = 
false;
 
 3563     bool padblocked = 
false;
 
 3564     bool padblocking = 
false;
 
 3567     if (m_pColorSpace) {
 
 3569       srcRGB = gst_element_get_pad ( (GstElement*)m_pColorSpace, 
"src");
 
 3571       srcRGB = gst_element_get_static_pad ( (GstElement*)m_pColorSpace, 
"src" );
 
 3574       padactive = gst_pad_is_active( srcRGB );
 
 3575       padlinked =  gst_pad_is_linked( srcRGB );
 
 3576       padblocked =  gst_pad_is_blocked( srcRGB );
 
 3577       padblocking =  gst_pad_is_blocking( srcRGB );
 
 3580     if (m_pGMainContext) {
 
 3581        if (g_main_context_iteration( (GMainContext*)m_pGMainContext, 
false )) {
 
 3601     state_wait = gst_element_get_state(GST_ELEMENT (m_pGstPipeline),¤t_state, &pending_state, time_out);
 
 3610     switch(current_state) {
 
 3611           case GST_STATE_VOID_PENDING:
 
 3615           case GST_STATE_NULL:
 
 3619           case GST_STATE_READY:
 
 3623           case GST_STATE_PAUSED:
 
 3627           case GST_STATE_PLAYING:
 
 3647   CheckState( gst_element_set_state (GST_ELEMENT (m_pGstPipeline), GST_STATE_PLAYING), 
true );
 
 3655   CheckState( gst_element_set_state (GST_ELEMENT (m_pGstPipeline), GST_STATE_NULL) );
 
 3664     CheckState( gst_element_set_state (GST_ELEMENT (m_pGstPipeline), GST_STATE_PAUSED));
 
 3668 #define MO_INFINITE -1 
 3676   gint64     time_nanoseconds;
 
 3686         frame = m_FramesLength - 1;
 
 3696     res = gst_element_seek_simple(
 
 3697                                   (GstElement*)m_pGstPipeline,
 
 3701                                                  | GST_SEEK_FLAG_KEY_UNIT
 
 3709       time_nanoseconds = frame * GST_MSECOND;
 
 3710       res = gst_element_seek_simple( (GstElement*)m_pGstPipeline, GST_FORMAT_TIME, (GstSeekFlags)(GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_KEY_UNIT ), time_nanoseconds );
 
 3725     GstFormat fmt = GST_FORMAT_TIME;
 
 3729     if (gst_element_query_duration ((GstElement*)m_pGstPipeline, &fmt, &len)) {
 
 3731     if (gst_element_query_duration ((GstElement*)m_pGstPipeline, fmt, &len)) {
 
 3740          m_FramesLength = lenF;
 
 3741          return m_FramesLength;
 
 3749     GstFormat fmt = GST_FORMAT_TIME;
 
 3753     if (gst_element_query_duration ((GstElement*)m_pGstPipeline, &fmt, &len)) {
 
 3755     if (gst_element_query_duration ((GstElement*)m_pGstPipeline, fmt, &len)) {
 
 3763          m_SamplesLength = lenF;
 
 3764          return m_SamplesLength;
 
 3773     GstFormat fmt = GST_FORMAT_TIME;
 
 3778     if (gst_element_query_duration ((GstElement*)m_pGstPipeline, &fmt, &dur)) {
 
 3780     if (gst_element_query_duration ((GstElement*)m_pGstPipeline, fmt, &dur)) {
 
 3782           m_Duration = GST_TIME_AS_MSECONDS(dur); 
 
 3796     GstFormat fmt = GST_FORMAT_TIME;
 
 3800     if (gst_element_query_position ((GstElement*)m_pGstPipeline, &fmt, &pos)) {
 
 3802     if (gst_element_query_position ((GstElement*)m_pGstPipeline, fmt, &pos)) {
 
 3805             return (pos / 1000000);
 
 3816     GstFormat fmt = GST_FORMAT_TIME;
 
 3820     if (gst_element_query_position ((GstElement*)m_pGstPipeline, &fmt, &pos)) {
 
 3822     if (gst_element_query_position ((GstElement*)m_pGstPipeline, fmt, &pos)) {
 
 3824         return (
MOulong)GST_TIME_AS_MSECONDS(pos);
 
 3832     if (!m_pGstPipeline) 
return false;
 
 3833     if (gst_element_get_state ((GstElement*)m_pGstPipeline, NULL, NULL, -1) == GST_STATE_CHANGE_FAILURE ) 
return false;
 
 3840     g_object_set ( (GstElement*)m_pAudioVolume, 
"volume", volume, NULL);
 
 3847     g_object_set ( (GstElement*)m_pAudioPanorama, 
"panorama", balance, NULL);
 
 3855     g_object_set ( (GstElement*)m_pAudioSpeed, 
"speed", pitch, NULL);
 
 3862   unsigned long long delayl = 
delay;
 
 3864     g_object_set ( (GstElement*)m_pAudioEcho, 
"delay", delayl, NULL);
 
 3871     g_object_set ( (GstElement*)m_pAudioEcho, 
"intensity", intensity, NULL);
 
 3878     g_object_set ( (GstElement*)m_pAudioEcho, 
"feedback", feedback, NULL);
 
 3885     g_object_set ( (GstElement*)m_pVideoBalance, 
"brightness", brightness, NULL);
 
 3893     g_object_set ( (GstElement*)m_pVideoBalance, 
"contrast", contrast, NULL);
 
 3901     g_object_set ( (GstElement*)m_pVideoBalance, 
"hue", hue, NULL);
 
 3909     g_object_set ( (GstElement*)m_pVideoBalance, 
"saturation", saturation, NULL);
 
moCaptureDevices m_PreferredDevices
Dispositivos de video disponibles. 
 
moAudioFormat m_AudioFormat
Formato de video. 
 
virtual ~moGsGraph()
Destructor. 
 
void SetEchoIntensity(float intensity)
 
bool BuildLiveVideoGraph(moText filename, moBucketsPool *pBucketsPool)
Grafo de reproducción de video en modo vivo, asyncronicamente reproducido en función del clock...
 
void SetAudioFormat(moGstCaps *caps, moGstBuffer *buffer=NULL)
 
moVideoFormat GetVideoFormat()
Devuelve el formato de video. 
 
virtual void SetEOS(bool iseos)
 
void Error(moText p_text)
Anuncia y registra un error. 
 
static moGBoolean cb_have_data(moGstPad *pad, moGstBuffer *buffer, moGPointer u_data)
 
int GetSourceHeight() const 
Devuelve el alto de la imagen de origen. 
 
int GetSourceBpp() const 
Devuelve los bits por pixel de la imagen de origen. 
 
bool DestroyRetreivedBucket()
 
void Stop()
Detener la reproducción del video. 
 
void SetPitch(float pitch)
 
int GetSourceFlipH() const 
Devuelve el valor de inversión de imagen horizontal. 
 
moCaptureDevices m_CaptureDevices
 
void SetContrast(float contrast)
 
bool Unlock()
Libera el acceso al buffer interno. 
 
virtual bool CheckCaptureDevice(int i)
Chequea si el dispositivos de video disponible está aún disponible. 
 
void Seek(MOuint frame, float rate=1.0)
 
bool Lock()
Paraliza el acceso al buffer interno. 
 
int moGstStateChangeReturn
 
bool CheckState(moGstStateChangeReturn state_change_result, bool waitforsync=false)
 
MOulong moGetTicksAbsolute(bool force_real_absolute)
Devuelve en milisegundos el valor del reloj de Moldeo. 
 
void RetreivePads(moGstElement *FilterElement)
 
MOulong GetSamplesLength()
 
bool BuildLiveGraph(moBucketsPool *pBucketsPool, moCaptureDevice p_capdev)
 
bool BuildLiveStreamingGraph(moBucketsPool *pBucketsPool, moText p_location)
 
int GetSourceFlipV() const 
Devuelve el valor de inversión de imagen vertical. 
 
bool BuildLiveSound(moText filename)
 
virtual bool InitGraph()
Inicialización del grafo. 
 
static moGBoolean cb_buffer_disconnected(moGPointer u_data)
 
moVideoFormat & GetVideoFormat()
Devuelve el formato de video del dispositivo. 
 
virtual moCaptureDevices * UpdateCaptureDevices()
Actualiza los dispositivos de video disponibles. 
 
clase de para manejar textos 
 
static void cb_pad_added(moGstElement *decodebin2, moGstPad *pad, moGPointer u_data)
 
moTypes MOint moText moParamIndex moParamReference int iRow int int i int i
 
virtual moCaptureDevices * LoadCaptureDevices()
 
const moText & GetLabelName() const 
Devuelve el nombre de código del dispositivo. 
 
virtual MOulong GetDuration()
La duración total del stream en nanosegundos. 
 
static void cb_handoff(moGstElement *fakesrc, moGstBuffer *buffer, moGstPad *pad, moGPointer user_data)
 
void SetLabelName(const moText &p_labelname)
Fija el nombre de código del dispositivo. 
 
bool IsRunning()
Está corriendo. 
 
void Present(bool p=true)
Fija la presencia del dispositivo. 
 
const moText & GetName() const 
Devuelve el nombre del dispositivo. 
 
void SetBrightness(float brightness)
 
void Pause()
Pausa la reproducción del video. 
 
void SetEchoFeedback(float feedback)
 
bool BuildLiveWebcamGraph(moBucketsPool *pBucketsPool, moCaptureDevice &p_capdev)
 
Administrador de moBucket 's. 
 
void WaitForFormatDefinition(MOulong timeout)
 
int GetSourceWidth() const 
Devuelve el ancho de la imagen de origen. 
 
virtual bool AddCaptureDevice(moCaptureDevice &p_capdev)
Agrega un dispositivo de video. 
 
static moDebug * MODebug2
Clase de impresión de errores para depuración. 
 
static void cb_newpad(moGstElement *decodebin, moGstPad *pad, moGBoolean last, moGPointer u_data)
 
Definición de un dispositivo de video, generalmente uno de captura de video, o camara. 
 
static void on_rtsppadd_added(moGstElement *rtspsrc, moGstPad *pad, moGPointer u_data)
 
Espacio en memoria para compartir datos entre objetos. 
 
void Push(moText p_text)
Apila el mensaje dentro de la pila de mensajes. 
 
MOulong GetFramesLength()
La cantidad de frames, el largo del stream. 
 
virtual MOulong GetPositionMS()
 
void SetVolume(float volume)
 
virtual moStreamState GetState()
Estado de la reproducción. 
 
void SetName(const moText &p_name)
 
bool BuildLiveQTVideoGraph(moText filename, moBucketsPool *pBucketsPool)
 
void Play()
Reproducir el video. 
 
bool BuildRecordGraph(moText filename, moBucketsPool *pBucketsPool)
 
LIBMOLDEO_API moText0 IntToStr(int a)
 
MObyte * GetFrameBuffer(MOlong *size)
 
MOubyte * GetBuffer()
Devuelve el puntero al buffer de datos. 
 
void SetSaturation(float saturation)
 
moBucket * RetreiveBucket()
 
void SetBuffer(MOlong size, MOubyte *pbuf)
Crea un espacio de memoria y asigna los valores desde un puntero a otro espacio de memoria...
 
void CopyVideoFrame(void *bufferdst, int size)
 
void SetEchoDelay(float delay)
 
void SetVideoFormat(moGstCaps *caps, moGstBuffer *buffer=NULL)
 
MOboolean m_bInitialized
Valor de inicialización. 
 
int Find(const moText0 &target)
divide el texto separado por el caracter especificado 
 
virtual MOulong GetPosition()
 
bool SetCaptureDevice(moText deviceport, MOint idevice=0)
 
moVideoFormat m_VideoFormat
 
void Message(moText p_text)
Anuncia un mensaje al usuario además de guardarlo en el log de texto. 
 
long cb_have_data_handler_id
 
bool BuildLiveDVGraph(moBucketsPool *pBucketsPool, moCaptureDevice &p_capdev)
 
virtual bool FinishGraph()
Finalización del grafo. 
 
void SetBalance(float balance)
Fija el balance entre canal izquierdo y derecho en caso de haberlos. 
 
bool AddBucket(moBucket *pBucket)
 
long signal_rtsppad_added_id