38 #include <gst/interfaces/propertyprobe.h> 52 #include <gst/app/gstappsink.h> 53 #define DECODEBIN "decodebin" 54 #define VIDEOCONVERT "videoconvert" 56 #define VIDEOCONVERT "ffmpegcolorspace" 58 #define DECODEBIN "decodebin" 60 #define DECODEBIN "decodebin2" 64 #define USING_SYNC_FRAMEBUFFER 66 static gboolean
bus_call ( GstBus *bus, GstMessage *msg,
void* user_data)
73 const GstStructure *s;
75 s = gst_message_get_structure ((GstMessage *)msg);
88 sstr = gst_structure_to_string (s);
99 switch (GST_MESSAGE_TYPE (msg))
101 case GST_MESSAGE_EOS:
110 case GST_MESSAGE_ERROR:
116 gst_message_parse_error ((GstMessage *)msg, &err, &debug);
212 GstAppSink* psink = (GstAppSink*) appsink;
213 if (!psink)
return GST_FLOW_ERROR;
219 if (!pGsGraph)
return GST_FLOW_ERROR;
227 GstSample* sample = gst_app_sink_pull_sample ( psink );
232 return GST_FLOW_ERROR;
237 GstCaps* bcaps = gst_sample_get_caps( sample );
238 if (!bcaps)
return GST_FLOW_OK;
240 GstBuffer* Gbuffer = gst_sample_get_buffer (sample);
241 int bsize = gst_buffer_get_size( Gbuffer );
242 if (!( bsize>0 && (
int)bsize<=(h*w*4) ))
return GST_FLOW_ERROR;
252 gst_sample_unref(sample);
257 if (pbucket==NULL)
return GST_FLOW_ERROR;
261 gst_buffer_map ( Gbuffer, &mapinfo, GST_MAP_READ);
280 gst_buffer_unmap ( Gbuffer, &mapinfo );
293 gst_sample_unref(sample);
323 GstStructure* str = NULL;
325 GstCaps* caps = NULL;
329 Gbuffer = (GstBuffer*)buffer;
330 caps = Gbuffer->caps;
332 GstPadProbeInfo* Ginfo = (GstPadProbeInfo*) info;
333 Gbuffer = GST_PAD_PROBE_INFO_BUFFER ( Ginfo );
336 caps = gst_pad_get_current_caps( Gpad );
340 str = gst_caps_get_structure ( (caps), 0);
349 const gchar *strname;
351 strname = gst_structure_get_name( str );
352 sstr = gst_structure_to_string (str);
357 gchar* isaudio = NULL;
358 gchar* isvideo = NULL;
360 isaudio = g_strrstr (strname,
"audio");
361 isvideo = g_strrstr (strname,
"video");
392 bsize = Gbuffer->size;
394 bsize = gst_buffer_get_size( Gbuffer );
397 if ( bsize>0 && (
int)bsize<=(h*w*4) ) {
412 gint value_numerator, value_denominator;
413 gst_structure_get_fraction( str,
"framerate", &value_numerator, &value_denominator );
449 gst_object_unref( caps );
460 GstCaps *caps = NULL;
461 GstPadLinkReturn padlink;
462 gchar* padname = NULL;
463 const gchar* strname = NULL;
464 const gchar* medianame = NULL;
465 GstStructure *str = NULL;
466 GstPad* Gpad = (GstPad*) pad;
469 if (gst_pad_is_linked(Gpad)) {
476 caps = gst_pad_get_caps (Gpad);
477 padname = gst_pad_get_name(Gpad);
479 str = gst_caps_get_structure (caps, 0);
483 sstr = gst_structure_to_string (str);
484 strname = gst_structure_get_name (str);
485 medianame = gst_structure_get_string (str,
"media");
488 moText dbgstr = medianame;
491 if (g_strrstr (medianame,
"video")) {
495 if (padlink==GST_PAD_LINK_OK) {
500 padlink = gst_pad_link ( Gpad, (GstPad*)pGsGraph->
m_pDecoderBin );
501 if (padlink==GST_PAD_LINK_OK) {
529 GstCaps *caps = NULL;
530 GstPad *videopad = NULL;
532 GstPad *audiopadinconverter = NULL;
533 GstPadLinkReturn padlink;
534 gchar* padname = NULL;
535 const gchar* strname = NULL;
536 GstStructure *str = NULL;
537 GstPad* Gpad = (GstPad*) pad;
540 GstElement* SinkElement = NULL;
542 cout <<
"cb_pad_added_new" << endl;
545 if (gst_pad_is_linked(Gpad)) {
546 cout <<
"cb_pad_added_new already linked!" << endl;
555 caps = gst_pad_get_caps (Gpad);
557 caps = gst_pad_get_current_caps(Gpad);
559 padname = gst_pad_get_name(Gpad);
561 str = gst_caps_get_structure (caps, 0);
563 const gchar *sstr=NULL;
565 sstr = gst_structure_to_string (str);
566 cout <<
"cb_pad_added_new: new pad: " << padname <<
"caps:" << sstr << endl;
572 MODebug2->
Error(
moText(
"moGsGraph::cb_pad_added_new > sstr gst_structure_to_string is empty") );
573 }
else strname = gst_structure_get_name (str);
574 cout <<
"cb_newpad: new pad: " << padname <<
"strname:" << strname << endl;
576 bool is_rtsp =
false;
577 if (g_strrstr (strname,
"application/x-rtp")) {
579 strname = gst_structure_get_string(str,
"media");
580 cout <<
"application/x-rtp: media: " << strname << endl;
584 bool forcing_video =
false;
585 bool is_video =
false;
586 bool is_audio =
false;
589 MODebug2->
Error(
moText(
"moGsGraph::cb_pad_added_new > gst_structure_to_string is empty, forcing video!") );
591 forcing_video =
true;
593 is_video = g_strrstr (strname,
"video");
594 is_audio = g_strrstr (strname,
"audio");
610 audiopadinconverter = gst_element_get_pad ( (GstElement*) pGsGraph->
m_pAudioConverter,
"sink");
613 audiopadinconverter = gst_element_get_static_pad ( (GstElement*) pGsGraph->
m_pAudioConverter,
"sink");
616 padlink = gst_pad_link (Gpad, audiopadinconverter);
619 GstPad* srcAudio = gst_element_get_static_pad ( (GstElement*)pGsGraph->
m_pAudioConverter,
"src");
622 if (padlink==GST_PAD_LINK_OK) {
636 audiopadinconverter = gst_element_get_static_pad ( (GstElement*) pGsGraph->
m_pAudioSink,
"sink");
637 padlink = gst_pad_link (Gpad, audiopadinconverter);
641 }
else if (is_video || forcing_video ) {
648 padlink = gst_pad_link( Gpad, videopad );
650 if (padlink==GST_PAD_LINK_OK) {
651 cout <<
"moGsGraph::cb_pad_added_new: linked rtsp source with rtsp depay" << endl;
653 cout <<
"moGsGraph::cb_pad_added_new: ERROR: UNlinked rtsp source with rtsp depay" << endl;
664 videopad = gst_element_get_pad ( SinkElement,
"sink");
666 padlink = gst_pad_link( Gpad, videopad );
669 videopad = gst_element_get_static_pad( SinkElement,
"sink");
671 padlink = gst_pad_link( Gpad, videopad );
679 if (padlink==GST_PAD_LINK_OK) {
683 GstPad* srcRGB = gst_element_get_pad ( (GstElement*)pGsGraph->
m_pColorSpace,
"src");
687 MODebug2->
Message(
moText(
"moGsGraph::cb_pad_added_new > padlink success, rock and rolling live video.") );
689 GstPad* srcRGB = gst_element_get_static_pad ( (GstElement*)pGsGraph->
m_pFakeSink,
"sink");
706 videopad = gst_element_get_pad ( (GstElement*)pGsGraph->
m_pVideoScale,
"sink");
708 videopad = gst_element_get_static_pad ( (GstElement*)pGsGraph->
m_pVideoScale,
"sink");
714 padlink = gst_pad_link( Gpad, videopad );
716 if (padlink==GST_PAD_LINK_OK) {
720 GstPad* srcRGB = gst_element_get_pad ( (GstElement*)pGsGraph->
m_pColorSpace,
"src");
723 GstPad* srcRGB = gst_element_get_static_pad ( (GstElement*)pGsGraph->
m_pColorSpace,
"src");
725 GST_PAD_PROBE_TYPE_BUFFER,
746 GstCaps *caps = NULL;
747 GstPad *videopad = NULL;
749 GstPad *audiopadinconverter = NULL;
750 GstPadLinkReturn padlink;
751 gchar* padname = NULL;
752 const gchar* strname = NULL;
753 GstStructure *str = NULL;
754 GstPad* Gpad = (GstPad*) pad;
757 GstElement* SinkElement = NULL;
759 cout <<
"pad added" << endl;
760 if (gst_pad_is_linked(Gpad)) {
769 caps = gst_pad_get_caps (Gpad);
771 caps = gst_pad_get_current_caps(Gpad);
773 padname = gst_pad_get_name(Gpad);
775 str = gst_caps_get_structure (caps, 0);
779 sstr = gst_structure_to_string (str);
780 cout <<
"cb_newpad: new pad: " << padname <<
" caps:" << sstr << endl;
782 strname = gst_structure_get_name (str);
783 cout <<
"cb_newpad: new pad: " << padname <<
" strname:" << strname << endl;
786 if (g_strrstr (strname,
"audio")) {
789 MODebug2->
Message(
"moGsGraph::cb_pad_added: audio pad created > creating audio filters!");
796 gboolean link_audioresult = gst_element_link_many( (GstElement*)pGsGraph->
m_pAudioConverter,
800 if (link_audioresult) {
802 audiopadinconverter = gst_element_get_pad ( (GstElement*) pGsGraph->
m_pAudioConverter,
"sink");
804 audiopadinconverter = gst_element_get_static_pad( (GstElement*) pGsGraph->
m_pAudioConverter,
"sink");
806 padlink = gst_pad_link (Gpad, audiopadinconverter);
809 GstPad* srcAudio = gst_element_get_pad ( (GstElement*)pGsGraph->
m_pAudioConverter,
"src");
811 GstPad* srcAudio = gst_element_get_static_pad( (GstElement*)pGsGraph->
m_pAudioConverter,
"src");
813 if (padlink==GST_PAD_LINK_OK) {
818 GST_PAD_PROBE_TYPE_BUFFER,
828 audiopadinconverter = gst_element_get_pad ( (GstElement*) pGsGraph->
m_pAudioSink,
"sink");
830 audiopadinconverter = gst_element_get_static_pad ( (GstElement*) pGsGraph->
m_pAudioSink,
"sink");
832 padlink = gst_pad_link (Gpad, audiopadinconverter);
836 }
else if (g_strrstr (strname,
"video")) {
840 cout <<
"is video" << endl;
845 cout <<
"SinkElement: m_pColorSpace" << endl;
850 videopad = gst_element_get_pad ( SinkElement,
"sink");
852 padlink = gst_pad_link( Gpad, videopad );
855 videopad = gst_element_get_static_pad( SinkElement,
"sink");
857 padlink = gst_pad_link( Gpad, videopad );
865 if (padlink==GST_PAD_LINK_OK) {
868 GstPad* srcRGB = gst_element_get_pad ( (GstElement*)pGsGraph->
m_pColorSpace,
"src");
872 GstPad* srcRGB = gst_element_get_static_pad ( (GstElement*)pGsGraph->
m_pColorSpace,
"src");
874 GST_PAD_PROBE_TYPE_BUFFER,
886 videopad = gst_element_get_pad ( (GstElement*)pGsGraph->
m_pVideoScale,
"sink");
888 videopad = gst_element_get_static_pad ( (GstElement*)pGsGraph->
m_pVideoScale,
"sink");
894 padlink = gst_pad_link( Gpad, videopad );
896 if (padlink==GST_PAD_LINK_OK) {
900 GstPad* srcRGB = gst_element_get_pad ( (GstElement*)pGsGraph->
m_pColorSpace,
"src");
903 GstPad* srcRGB = gst_element_get_static_pad ( (GstElement*)pGsGraph->
m_pColorSpace,
"src");
905 GST_PAD_PROBE_TYPE_BUFFER,
929 static gboolean white = FALSE;
931 GstElement* Gfakesrc = (GstElement*)fakesrc;
932 GstBuffer* Gbuffer = (GstBuffer*)buffer;
933 GstPad* Gpad = (GstPad*)pad;
943 pGsGraph->
CopyVideoFrame( GST_BUFFER_DATA (Gbuffer), GST_BUFFER_SIZE (Gbuffer) );
947 memset (GST_BUFFER_DATA (Gbuffer), white ? 0xff : 0x0, GST_BUFFER_SIZE (Gbuffer));
955 caps = gst_caps_new_simple (
"video/x-raw-rgb",
"width", G_TYPE_INT, 400,
956 "height", G_TYPE_INT, 300,
957 "bpp", G_TYPE_INT, 24,
958 "depth", G_TYPE_INT, 24,
959 "framerate", GST_TYPE_FRACTION, 10, 1,
961 gst_buffer_set_caps (Gbuffer, caps);
962 gst_caps_unref (caps);
974 event_loop (GstElement * pipeline, gboolean blocking, GstState target_state)
978 gboolean res = FALSE;
979 gboolean buffering = FALSE;
981 bus = gst_element_get_bus (GST_ELEMENT (pipeline));
986 message = gst_bus_poll (bus, GST_MESSAGE_ANY, blocking ? -1 : 0);
994 const GstStructure *s;
996 s = gst_message_get_structure (message);
998 g_print ((
"Got Message from element \"%s\" (%s): "),
999 GST_STR_NULL (GST_ELEMENT_NAME (GST_MESSAGE_SRC (message))),
1000 gst_message_type_get_name (GST_MESSAGE_TYPE (message)));
1004 sstr = gst_structure_to_string (s);
1005 g_print (
"%s\n", sstr);
1008 g_print (
"no message details\n");
1012 switch (GST_MESSAGE_TYPE (message)) {
1014 case GST_MESSAGE_WARNING:{
1017 gchar *name = gst_object_get_path_string (GST_MESSAGE_SRC (message));
1020 GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS (GST_BIN (pipeline),
1021 GST_DEBUG_GRAPH_SHOW_ALL,
"gst-launch.warning");
1023 gst_message_parse_warning (message, &gerror, &debug);
1024 g_print ((
"WARNING: from element %s: %s\n"), name, gerror->message);
1026 g_print ((
"Additional debug info:\n%s\n"), debug);
1028 g_error_free (gerror);
1033 case GST_MESSAGE_ERROR:{
1038 GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS (GST_BIN (pipeline),
1039 GST_DEBUG_GRAPH_SHOW_ALL,
"gst-launch.error");
1041 gst_message_parse_error (message, &gerror, &debug);
1042 gst_object_default_error (GST_MESSAGE_SRC (message), gerror, debug);
1043 g_error_free (gerror);
1049 case GST_MESSAGE_STATE_CHANGED:{
1050 GstState old, mnew, pending;
1052 gst_message_parse_state_changed (message, &old, &mnew, &pending);
1059 if (GST_MESSAGE_SRC (message) != GST_OBJECT_CAST (pipeline))
1076 (
"Prerolled, waiting for buffering to finish...\n"));
1081 if (target_state == GST_STATE_PAUSED && mnew == target_state)
1087 case GST_MESSAGE_BUFFERING:{
1090 gst_message_parse_buffering (message, &percent);
1091 fprintf (stderr, (
"buffering... %d \r"), percent);
1099 if (percent == 100) {
1103 if (target_state == GST_STATE_PLAYING) {
1105 (
"Done buffering, setting pipeline to PLAYING ...\n"));
1106 gst_element_set_state (pipeline, GST_STATE_PLAYING);
1111 if (buffering == FALSE && target_state == GST_STATE_PLAYING) {
1113 fprintf (stderr, (
"Buffering, setting pipeline to PAUSED ...\n"));
1114 gst_element_set_state (pipeline, GST_STATE_PAUSED);
1120 case GST_MESSAGE_APPLICATION:{
1121 const GstStructure *s;
1123 s = gst_message_get_structure (message);
1125 if (gst_structure_has_name (s,
"GstLaunchInterrupt")) {
1128 fprintf (stderr, (
"Interrupt: Stopping pipeline ...\n"));
1139 gst_message_unref (message);
1141 g_assert_not_reached ();
1146 gst_message_unref (message);
1147 gst_object_unref (bus);
1246 GstPropertyProbe* probe;
1252 GValue valDef = { 0, };
1259 m_CaptureDevices.Empty();
1261 for(
int i=0; i<m_PreferredDevices.Count(); i++) {
1263 +
moText(
" Name: ") + m_PreferredDevices[i].GetName()
1264 +
moText(
" LabelName: ") + m_PreferredDevices[i].GetLabelName()
1265 +
moText(
" Path: ") + m_PreferredDevices[i].GetPath()
1267 +
moText(
" W: ") +
IntToStr(m_PreferredDevices[i].GetSourceWidth())
1268 +
moText(
" H: ") +
IntToStr(m_PreferredDevices[i].GetSourceHeight())
1269 +
moText(
" FlipH: ") +
IntToStr(m_PreferredDevices[i].GetSourceFlipH())
1270 +
moText(
" FlipV: ") +
IntToStr(m_PreferredDevices[i].GetSourceFlipV())
1271 +
moText(
" Bpp: ") +
IntToStr(m_PreferredDevices[i].GetSourceBpp()) );
1274 if (m_PreferredDevices.Count()==0) {
1282 m_PreferredDevices.Add( newdev );
1288 moText dname(
"ksvideosrc" );
1290 moText dname(
"dshowvideosrc" );
1292 device_name = dname;
1294 for(
MOuint i=0; i<m_PreferredDevices.Count();i++) {
1297 AddCaptureDevice( CaptDev );
1327 device_name =
"wrappercamerabinsrc";
1329 device_name =
moText(
"v4l2src");
1337 for(
int i=0; i<m_PreferredDevices.Count();i++) {
1340 AddCaptureDevice( CaptDev );
1352 device = gst_element_factory_make (device_name,
"source");
1353 gst_element_get_state(device, NULL, NULL, 5 * GST_SECOND);
1354 moText probepname =
"device-name";
1355 if (!device || !GST_IS_PROPERTY_PROBE(device))
1357 probe = GST_PROPERTY_PROBE (device);
1359 plist = (GList *)gst_property_probe_get_properties( probe );
1361 plist = (GList *)g_list_first(plist);
1363 pm = (GParamSpec *)plist->data;
1366 probepname =
moText((
char*)pm->name);
1367 MODebug2->
Message(
"moGsFramework::LoadCaptureDevices > probe property:"+probepname);
1368 va = gst_property_probe_get_values(probe, pm);
1370 MODebug2->
Message(
"moGsFramework::LoadCaptureDevices > probe property:"+probepname+
" has values!");
1374 }
while( plist=g_list_next(plist) );
1377 va = gst_property_probe_get_values_name (probe, (
char*)probepname);
1381 g_value_init( &valDef, G_PARAM_SPEC_VALUE_TYPE(pm) );
1383 g_param_value_set_default( pm, &valDef );
1386 moText defaultText(g_value_get_string( vdefault ));
1387 MODebug2->
Message(
"moGsFramework::LoadCaptureDevices > Default value for: \""+
moText((
char*)probepname)+
"\" is "+defaultText);
1389 moText cap_dev_name = defaultText;
1396 m_CaptureDevices.Add( newdev );
1403 for(guint i=0; i < va->n_values; ++i) {
1404 GValue* v = g_value_array_get_nth(va, i);
1406 GString* stv = g_string_new( g_value_get_string(v) );
1415 m_CaptureDevices.Add( newdev );
1416 MODebug2->
Message(
"moGsFramework::LoadCaptureDevices > AUTO Added" );
1420 g_value_array_free(va);
1424 gst_element_set_state (device, GST_STATE_NULL);
1425 gst_object_unref(GST_OBJECT (device));
1429 MODebug2->
Error(
"moGsFramework::LoadCaptureDevices > exception error.");
1432 #if (GST_VERSION_MINOR >= 8) 1433 GstDeviceMonitor *monitor = NULL;
1434 GList *devices = NULL;
1435 GstStructure* properties = NULL;
1437 monitor = gst_device_monitor_new();
1438 GstCaps *mon_caps = gst_caps_new_empty_simple (
"video/x-raw");
1439 gst_device_monitor_add_filter (monitor,
"Video/Source", mon_caps);
1440 gst_caps_unref (mon_caps);
1442 if (!gst_device_monitor_start (monitor))
1443 g_error (
"Failed to start device monitor!");
1445 devices = gst_device_monitor_get_devices (monitor);
1447 if (devices != NULL) {
1448 while (devices != NULL) {
1449 GstDevice *device = (GstDevice*)devices->data;
1451 gchar *device_class, *caps_str, *name,*device_path;
1455 caps = gst_device_get_caps (device);
1457 size = gst_caps_get_size (caps);
1459 name = gst_device_get_display_name (device);
1460 properties = gst_device_get_properties(device);
1463 device_path = gst_structure_get_string(properties,
"device.path");
1471 device_class = gst_device_get_device_class (device);
1473 for (i = 0; i < size; ++i) {
1474 GstStructure *s = gst_caps_get_structure (caps, i);
1475 caps_str = gst_structure_to_string (s);
1481 moText cap_dev_name = name;
1483 if (m_CaptureDevices.Count()>idev) {
1487 if (idev>0) upddev.
SetName(cap_dev_name);
1488 upddev.
m_Path = device_path;
1491 m_CaptureDevices.Set(idev,upddev);
1499 newdev.
m_Path = device_path;
1502 m_CaptureDevices.Add( newdev );
1508 gst_object_unref (device);
1509 devices = g_list_remove_link (devices, devices);
1513 g_print (
"No devices found!\n");
1515 #endif // GST_VERSION_MINOR 1520 return &m_CaptureDevices;
1529 return &m_CaptureDevices;
1547 for(
int i=0; i<(int)m_CaptureDevices.Count(); i++) {
1548 if ( m_CaptureDevices[i].GetName() == p_capdev.
GetName() ) {
1553 m_CaptureDevices.Add( p_capdev );
1694 gst_object_unref (m_pGstBus);
1744 g_main_loop_unref( (GMainLoop*) m_pGMainLoop);
1746 m_pGMainLoop = NULL;
1752 GstPad* srcRGB = gst_element_get_pad ( (GstElement*)
m_pColorSpace,
"src");
1819 g_signal_handler_disconnect ( (GstElement*)m_pDecoderBin,
signal_newpad_id );
1822 m_pDecoderBin = NULL;
1855 m_pFakeSource = NULL;
1870 m_pVideoDeinterlace = NULL;
1886 m_pGstPipeline = NULL;
1927 memcpy( bufferdst, (
void*)pbuf, size );
1971 pBucketsPool = NULL;
1984 bool link_result =
false;
1997 m_pFakeSource = gst_element_factory_make (
"fakesrc",
"source");
2002 "signal-handoffs", TRUE,
2003 "sizemax", 400 * 300 * 3,
2006 "num-buffers", 30*200,
2007 "sizetype", 2, NULL);
2016 m_pCapsFilter = gst_element_factory_make (
"capsfilter",
"filtsource");
2018 g_object_set (G_OBJECT (
m_pCapsFilter),
"caps", gst_caps_new_simple (
"video/x-raw-rgb",
2019 "width", G_TYPE_INT, 400,
2020 "height", G_TYPE_INT, 300,
2021 "framerate", GST_TYPE_FRACTION, 10, 1,
2022 "bpp", G_TYPE_INT, 24,
2023 "depth", G_TYPE_INT, 24,
2024 "red_mask",G_TYPE_INT, 255,
2025 "green_mask",G_TYPE_INT, 65280,
2026 "blue_mask",G_TYPE_INT, 16711680,
2027 "endianness", G_TYPE_INT, 4321,
2043 m_pEncoder = gst_element_factory_make(
"ffenc_mpeg1video",
"encoder");
2048 m_pMultiplexer = gst_element_factory_make(
"ffmux_mpeg",
"multiplexer");
2053 m_pFileSink = gst_element_factory_make(
"filesink",
"filesink");
2055 g_object_set (G_OBJECT (
m_pFileSink),
"location", (
char*)filename, NULL);
2065 gst_element_set_state ( (GstElement*)
m_pGstPipeline, GST_STATE_PLAYING);
2070 }
else return false;
2083 pBucketsPool = NULL;
2111 GstCaps *caps = NULL;
2112 GstCaps *rsrc_caps = NULL;
2113 bool link_result =
false;
2115 bool b_sourceselect =
false;
2116 bool b_forcevideoscale =
false;
2117 bool b_forcevideoflip =
false;
2119 bool b_forcevideointerlace =
false;
2133 MOint p_sourcewidth;
2134 MOint p_sourceheight;
2137 MOint p_forceheight;
2145 devicename = p_capdev.
GetName();
2146 devicepath = p_capdev.
GetPath();
2149 colormode =
moText(
"video/x-raw-yuv");
2152 colormode =
moText(
"video/x-raw-rgb");
2167 if (p_forcewidth!=0 || p_forceheight!=0) {
2168 b_forcevideoscale =
true;
2171 if (p_forceflipH!=0 || p_forceflipV!=0) {
2172 b_forcevideoflip =
true;
2175 if (p_sourcewidth!=0 || p_sourceheight!=0) {
2176 b_sourceselect =
true;
2179 devinfo =
moText(
"Label/Texture ") + labelname;
2180 devinfo+=
moText(
"; DeviceName ") + devicename;
2181 devinfo+=
moText(
"; DevicePath ") + devicepath;
2182 devinfo+=
moText(
"; colormode ") + colormode;
2187 if (devicename.
Length()>0)
2197 rtspindex = dname.find(
"rtsp");
2198 httpindex = dname.find(
"http");
2201 if ( labelname==
moText(
"RTSP") || rtspindex == 0 ) {
2203 m_pRTSPSource = gst_element_factory_make (
"rtspsrc",
"source");
2204 m_pRTSPDepay = gst_element_factory_make (
"rtph264depay",
"depay");
2214 }
else if (labelname==
moText(
"HTTP") || httpindex==0 ) {
2215 m_pHTTPSource = gst_element_factory_make (
"souphttpsrc",
"source");
2226 m_pFileSource = gst_element_factory_make (
"ksvideosrc",
"source");
2228 m_pFileSource = gst_element_factory_make (
"dshowvideosrc",
"source");
2233 m_pFileSource = gst_element_factory_make (
"wrappercamerabinsrc",
"source");
2234 cout <<
"wrappercamerabinsrc created!" << endl;
2236 if (devicename==
moText(
"DV"))
2237 m_pFileSource = gst_element_factory_make (
"dv1394src",
"source");
2242 m_pFileSource = gst_element_factory_make (
"v4l2src",
"source");
2247 if (devicename==
moText(
"DV"))
2248 m_pFileSource = gst_element_factory_make (
"dv1394src",
"source");
2250 m_pFileSource = gst_element_factory_make (
"v4l2src",
"source");
2258 if (devicename.
Length() > 0 && ( devicename!=
moText(
"default")) ) {
2259 g_object_set (G_OBJECT (
m_pRTSPSource),
"location", (
char*)devicename, NULL);
2260 g_object_set (G_OBJECT (
m_pRTSPSource),
"latency", (guint) 0, NULL);
2261 g_object_set (G_OBJECT (
m_pRTSPSource),
"debug", (gboolean)
false, NULL);
2288 link_result = gst_element_link_many( m_pRTSPDepay, m_pMultipartDemux, NULL );
2313 g_object_set (G_OBJECT (
m_pHTTPSource),
"location", (
char*)devicename, NULL);
2314 g_object_set (G_OBJECT (
m_pHTTPSource),
"automatic-redirect", TRUE, NULL);
2324 link_result =
false;
2330 if ( link_result ) {
2333 m_pDecoderBin = gst_element_factory_make (
"decodebin",
"decoder");
2335 MODebug2->
Error(
moText(
"moGsGraph::BuildLiveWebcamGraph > SOUP HTTP source failed linking with MultipartDemux"));
2346 if (devicename.
Length() > 0 && ( devicename!=
moText(
"default")) ) {
2347 g_object_set (G_OBJECT (
m_pFileSource),
"device-name", (
char*)devicename, NULL);
2350 if (devicename==
moText(
"DV") ) {
2354 if ( devicename.
Length() > 0 && ( devicename!=
moText(
"default") ) ) {
2355 if (dname.find(
"/dev/" )==0 ) {
2356 g_object_set (G_OBJECT (
m_pFileSource),
"device", (
char*)devicename, NULL);
2357 }
else if (devicepath.
Find(
"/dev/" )==0) {
2358 g_object_set (G_OBJECT (
m_pFileSource),
"device", (
char*)devicepath, NULL);
2360 g_object_set (G_OBJECT (
m_pFileSource),
"device-name", (
char*)devicename, NULL);
2377 GstIterator* iterator = NULL;
2378 iterator = gst_element_iterate_src_pads( (GstElement*)
m_pFinalSource );
2383 GValue item = G_VALUE_INIT;
2385 GstPad* srcpad = NULL;
2386 GstCaps* itemcaps = NULL;
2387 GstCaps* capstpl = NULL;
2388 GstCaps* capsQuery = NULL;
2389 GstPad* peerPad = NULL;
2400 switch (gst_iterator_next (iterator, &item)) {
2402 switch (gst_iterator_next (iterator, &item)) {
2404 case GST_ITERATOR_OK:
2407 srcpad = (GstPad*)item;
2409 srcpad = (GstPad*)g_value_dup_object (&item);
2411 padname = gst_object_get_name((GstObject*) srcpad );
2416 itemcaps = gst_pad_get_caps( srcpad );
2418 itemcaps = gst_pad_get_current_caps( srcpad );
2419 capstpl = gst_pad_get_pad_template_caps( srcpad );
2420 capsQuery = gst_pad_query_caps( srcpad, NULL );
2421 peerPad = gst_pad_get_peer( srcpad );
2429 icapsstr =
moText( gst_caps_to_string(capsQuery) );
2434 g_value_reset (&item);
2437 case GST_ITERATOR_RESYNC:
2439 gst_iterator_resync (iterator);
2441 case GST_ITERATOR_ERROR:
2445 case GST_ITERATOR_DONE:
2451 gst_iterator_free (iterator);
2457 if (b_sourceselect) {
2461 #endif // GSTVERSION 2465 if (b_sourceselect) {
2470 m_pCapsFilterSource = gst_element_factory_make (
"capsfilter",
"filtsource");
2472 if (m_pCapsFilterSource) {
2480 if (colormode==
"") colormode =
"video/x-raw-yuv";
2482 g_object_set (G_OBJECT (m_pCapsFilterSource),
"caps", gst_caps_new_simple ( colormode,
2483 "width", G_TYPE_INT, p_sourcewidth,
2484 "height", G_TYPE_INT, p_sourceheight,
2485 "depth", G_TYPE_INT, 24,
2486 "red_mask",G_TYPE_INT, 16711680,
2487 "green_mask",G_TYPE_INT, 65280,
2488 "blue_mask",G_TYPE_INT, 255,
2494 int opt_framerate = 15;
2495 if (colormode==
"") {
2496 colormode =
"video/x-raw";
2511 moText fullf = colormode+
","+ colormodef;
2521 g_object_set (G_OBJECT (m_pCapsFilterSource),
"caps", gst_caps_new_simple ( colormode,
2523 "width", G_TYPE_INT, p_sourcewidth,
2524 "height", G_TYPE_INT, p_sourceheight,
2529 colormode=
"video/x-raw-yuv";
2531 if (colormode==
"video/x-raw-rgb") {
2533 }
else if (colormode==
"video/x-raw-yuv") {
2537 colormode=
"video/x-raw";
2539 g_object_set (G_OBJECT (m_pCapsFilterSource),
"caps", gst_caps_new_simple ( colormode,
2542 "width", G_TYPE_INT, p_sourcewidth,
2543 "height", G_TYPE_INT, p_sourceheight,
2559 res = gst_bin_add (GST_BIN (
m_pGstPipeline), (GstElement*) m_pCapsFilterSource );
2566 if (b_forcevideoflip) {
2592 m_pVideoFlip = gst_element_factory_make (
"videoflip",
"flip");
2595 if (p_forceflipH==1 && p_forceflipV==1) {
2597 }
else if (p_forceflipH==1) {
2599 }
else if (p_forceflipV==1) {
2603 g_object_set (G_OBJECT (
m_pVideoScale),
"method", &method, NULL);
2606 g_object_set (G_OBJECT (m_pVideoFlip),
"method", (
int)method, NULL);
2613 b_forcevideoscale =
false;
2614 if (b_forcevideoscale) {
2616 m_pVideoScale = gst_element_factory_make (
"videoscale",
"scale");
2619 colormode =
"video/x-raw";
2622 g_object_set (G_OBJECT (
m_pVideoScale),
"method", &method, NULL);
2626 m_pCapsFilter2 = gst_element_factory_make (
"capsfilter",
"filt2");
2627 if (m_pCapsFilter2) {
2628 if (b_forcevideoscale) {
2629 g_object_set (G_OBJECT (m_pCapsFilter2),
"caps", gst_caps_new_simple ( colormode,
2630 "width", G_TYPE_INT, p_forcewidth,
2631 "height", G_TYPE_INT, p_forceheight,
2634 g_object_set (G_OBJECT (m_pCapsFilter2),
"caps", gst_caps_new_simple ( colormode,
2635 "width", G_TYPE_INT, 240,
2636 "height", G_TYPE_INT, 160,
2640 res = gst_bin_add (GST_BIN (
m_pGstPipeline), (GstElement*) m_pCapsFilter2 );
2647 b_forcevideointerlace =
false;
2648 if (b_forcevideointerlace) {
2671 m_pCapsFilter = gst_element_factory_make (
"capsfilter",
"filt");
2675 g_object_set (G_OBJECT (
m_pCapsFilter),
"caps", gst_caps_new_simple (
"video/x-raw-rgb",
2676 "bpp", G_TYPE_INT, 24,
2677 "depth", G_TYPE_INT, 24,
2678 "red_mask",G_TYPE_INT, 255,
2679 "green_mask",G_TYPE_INT, 65280,
2680 "blue_mask",G_TYPE_INT, 16711680,
2681 "endianness", G_TYPE_INT, 4321,
2685 caps = gst_caps_new_simple (
"video/x-raw",
2686 "format", G_TYPE_STRING,
"RGB",
2688 g_object_set (G_OBJECT (m_pCapsFilter),
"caps", caps, NULL);
2689 res = gst_bin_add (GST_BIN (
m_pGstPipeline), (GstElement*) m_pCapsFilter );
2713 signal_newpad_id = g_signal_connect (m_pDecoderBin,
"pad-added", G_CALLBACK (cb_pad_added_new), (gpointer)
this);
2718 m_pFakeSink = gst_element_factory_make (
"fakesink",
"destout");
2720 cout <<
"creating FakeSink from appsink" << endl;
2721 m_pFakeSink = gst_element_factory_make (
"appsink",
"destout");
2727 g_object_set (G_OBJECT (
m_pFakeSink),
"caps", caps, NULL);
2728 g_object_set (G_OBJECT (
m_pFakeSink),
"sync",
false, NULL);
2729 g_object_set (G_OBJECT (
m_pFakeSink),
"drop",
true, NULL);
2736 if (b_sourceselect) {
2737 cout <<
"linking m_pFinalSource, m_pCapsFilterSource, m_pDecoderBin" << endl;
2738 if (b_forcevideoflip) {
2739 link_result = gst_element_link_many( (GstElement*) m_pFinalSource, (GstElement*) m_pCapsFilterSource,
2742 (GstElement*) m_pDecoderBin, NULL );
2744 link_result = gst_element_link_many( (GstElement*) m_pFinalSource, (GstElement*) m_pCapsFilterSource,
2746 (GstElement*) m_pDecoderBin, NULL );
2749 if (b_forcevideoflip) {
2750 link_result = gst_element_link_many( (GstElement*) m_pFinalSource,
2752 (GstElement*) m_pDecoderBin, NULL );
2754 cout <<
"linking m_pFinalSource, m_pDecoderBin" << endl;
2755 link_result = gst_element_link_many( (GstElement*) m_pFinalSource,
2756 (GstElement*) m_pDecoderBin, NULL );
2763 if (b_forcevideoscale) {
2764 cout <<
"linking forcing videoscale" << endl;
2765 if (b_forcevideointerlace)
2768 link_result = gst_element_link_many( (GstElement*) m_pVideoScale, (GstElement*)m_pCapsFilter2, (GstElement*) m_pColorSpace, (GstElement*) m_pCapsFilter, (GstElement*) m_pFakeSink, NULL );
2773 cout <<
"linking no videoscale" << endl;
2775 if (b_forcevideointerlace) {
2776 cout <<
"linking m_pColorSpaceInterlace, m_pVideoDeinterlace, m_pColorSpace, m_pCapsFilter, m_pFakeSink" << endl;
2781 (GstElement*) m_pFakeSink,
2784 cout <<
"linking m_pColorSpace, /*m_pCapsFilter*/, m_pFakeSink" << endl;
2785 link_result = gst_element_link_many(
2790 (GstElement*) m_pFakeSink, NULL );
2801 if (ret==
false) ret =
CheckState( gst_element_set_state ((GstElement*) m_pGstPipeline, GST_STATE_PLAYING),
true );
2812 GstSample *sample=NULL;
2816 MODebug2->
Message(
moText(
"moGsGraph::BuildLiveWebcamGraph > gst_app_sink_pull_preroll for appsink. ")+devinfo);
2817 sample = gst_app_sink_pull_preroll( (GstAppSink*) m_pFakeSink );
2821 MODebug2->
Message(
moText(
"moGsGraph::BuildLiveWebcamGraph > RECEIVED sample from gst_app_sink_pull_preroll!")+devinfo);
2830 bcaps = gst_sample_get_caps( sample );
2832 Gbuffer = gst_sample_get_buffer (sample);
2834 gst_app_sink_set_emit_signals((GstAppSink*)m_pFakeSink,
true);
2835 gst_app_sink_set_drop((GstAppSink*)m_pFakeSink,
true);
2836 gst_app_sink_set_wait_on_eos ((GstAppSink*)m_pFakeSink,
false);
2838 gst_app_sink_set_max_buffers((GstAppSink*)m_pFakeSink, 1);
2839 g_signal_connect( (GstElement*)m_pFakeSink,
"new-sample", G_CALLBACK (appsink_new_sample), (gpointer)
this );
2843 }
else MODebug2->
Error(
moText(
"moGsGraph::BuildLiveWebcamGraph > NO sample from gst_app_sink_pull_preroll!")+devinfo);
2845 MODebug2->
Message(
moText(
"moGsGraph::BuildLiveWebcamGraph > gst_app_sink_pull_preroll for appsink ended.")+devinfo);
2858 MODebug2->
Error(
moText(
"moGsGraph::BuildLiveWebcamGraph > m_pColorSpace m_pCapsFilter m_pFakeSink linking failed.")+devinfo);
2859 event_loop( (GstElement*)
m_pGstPipeline,
false, GST_STATE_PAUSED);
2862 MODebug2->
Error(
moText(
"moGsGraph::BuildLiveWebcamGraph > src and decodebin linkage failed:")+devinfo);
2863 event_loop( (GstElement*)
m_pGstPipeline,
false, GST_STATE_PAUSED);
2867 MODebug2->
Error(
moText(
"moGsGraph::BuildLiveWebcamGraph > fakesink construction failed.")+devinfo);
2868 event_loop( (GstElement*)
m_pGstPipeline,
false, GST_STATE_PAUSED);
2871 MODebug2->
Error(
moText(
"moGsGraph::BuildLiveWebcamGraph > decodebin construction failed.")+devinfo);
2872 event_loop( (GstElement*)
m_pGstPipeline,
false, GST_STATE_PAUSED);
2875 MODebug2->
Error(
moText(
"moGsGraph::BuildLiveWebcamGraph > final source failed.")+devinfo);
2876 event_loop( (GstElement*)
m_pGstPipeline,
false, GST_STATE_PAUSED);
2902 GValue gvalue = G_VALUE_INIT;
2907 piter = gst_element_iterate_pads( (GstElement*)FilterElement );
2912 switch (gst_iterator_next (piter, &ppointer)) {
2914 switch (gst_iterator_next (piter, &gvalue)) {
2916 case GST_ITERATOR_OK:
2919 ppad = (GstPad*) ppointer;
2921 ppad = (GstPad*) g_value_dup_object( &gvalue );
2923 nname = gst_pad_get_name(ppad);
2924 res = gst_pad_is_active(ppad);
2925 res = gst_pad_is_linked(ppad);
2926 res = gst_pad_is_blocking(ppad);
2928 gst_object_unref (ppointer);
2930 g_value_reset( &gvalue );
2934 case GST_ITERATOR_RESYNC:
2936 gst_iterator_resync (piter);
2939 case GST_ITERATOR_ERROR:
2944 case GST_ITERATOR_DONE:
2949 gst_iterator_free (piter);done = FALSE;
2982 while((time1 - time0) < timeout) {
2991 MODebug2->
Error(
"moGsGraph::WaitForFormatDefinition > time out !!! " +
IntToStr(timeout) +
" ms elapsed!");
2997 bool link_result =
false;
3003 moFile SoundFile( filename );
3005 if ( !SoundFile.
Exists() )
return false;
3010 moText extension = filename;
3013 m_pFileSource = gst_element_factory_make (
"filesrc",
"source");
3017 g_object_set (G_OBJECT (
m_pFileSource),
"location", (
char*)filename, NULL);
3029 if (extension==
moText(
".wav")) {
3037 res = gst_bin_add (GST_BIN ((GstElement*)m_pGstPipeline), (GstElement*)
m_pAudioConverter );
3040 m_pAudioSink = gst_element_factory_make (
"autoaudiosink",
"audioout");
3043 res = gst_bin_add (GST_BIN ((GstElement*)m_pGstPipeline), (GstElement*)
m_pAudioSink );
3050 m_pAudioSpeed = gst_element_factory_make (
"speed",
"speed");
3053 res = gst_bin_add (GST_BIN ((GstElement*)m_pGstPipeline), (GstElement*)
m_pAudioSpeed );
3059 res = gst_bin_add (GST_BIN ((GstElement*)m_pGstPipeline), (GstElement*)
m_pAudioVolume );
3062 m_pAudioPanorama = gst_element_factory_make (
"audiopanorama",
"audiopanorama");
3065 res = gst_bin_add (GST_BIN ((GstElement*)m_pGstPipeline), (GstElement*)
m_pAudioPanorama );
3071 res = gst_bin_add (GST_BIN ((GstElement*)m_pGstPipeline), (GstElement*)
m_pAudioConverter2 );
3077 res = gst_bin_add (GST_BIN ((GstElement*)m_pGstPipeline), (GstElement*)
m_pAudioConverter3 );
3100 res = gst_bin_add (GST_BIN ((GstElement*)m_pGstPipeline), (GstElement*)
m_pAudioConverter4 );
3108 signal_newpad_id = g_signal_connect ((GstElement*)m_pDecoderBin,
"pad-added", G_CALLBACK (cb_pad_added_new), (gpointer)
this);
3110 res = gst_bin_add (GST_BIN ((GstElement*)m_pGstPipeline), (GstElement*)m_pDecoderBin );
3115 link_result = gst_element_link_many( (GstElement*)m_pFileSource, (GstElement*)
m_pDecoderBin, NULL );
3147 CheckState( gst_element_set_state ((GstElement*)m_pGstPipeline, GST_STATE_PAUSED),
true );
3151 cout <<
"state gstreamer finish" << endl;
3156 MODebug2->
Error(
moText(
"moGsGraph::error: m_pAudioConverter m_pAudioResample m_pAudioSink linking failed"));
3157 event_loop( (GstElement*)m_pGstPipeline,
false, GST_STATE_PAUSED);
3161 event_loop( (GstElement*)m_pGstPipeline,
false, GST_STATE_PAUSED);
3199 m_pAudioSink = gst_element_factory_make (
"autoaudiosink",
"audioout");
3216 bool link_result =
false;
3220 moFile VideoFile( filename );
3222 if ( !VideoFile.
Exists() )
return false;
3227 m_pFileSource = gst_element_factory_make (
"filesrc",
"source");
3230 g_object_set (G_OBJECT (
m_pFileSource),
"location", (
char*)filename, NULL);
3245 m_pVideoBalance = gst_element_factory_make (
"videobalance",
"videobalance");
3247 res = gst_bin_add (GST_BIN ((GstElement*)m_pGstPipeline), (GstElement*)
m_pVideoBalance );
3252 res = gst_bin_add (GST_BIN ((GstElement*)m_pGstPipeline), (GstElement*)
m_pColorSpace );
3287 res = gst_bin_add (GST_BIN ((GstElement*)m_pGstPipeline), (GstElement*)
m_pDecoderBin );
3289 m_pFakeSink = gst_element_factory_make (
"fakesink",
"destout");
3294 signal_newpad_id = g_signal_connect (m_pDecoderBin,
"pad-added", G_CALLBACK (cb_pad_added_new), (gpointer)
this);
3299 m_pFakeSink = gst_element_factory_make (
"fakesink",
"destout");
3301 cout <<
"creating FakeSink from appsink" << endl;
3302 m_pFakeSink = gst_element_factory_make (
"appsink",
"destout");
3308 g_object_set (G_OBJECT (
m_pFakeSink),
"caps", gst_caps_new_simple (
"video/x-raw",
3309 "format", G_TYPE_STRING,
"RGB",
3311 g_object_set (G_OBJECT (
m_pFakeSink),
"sync", (
bool)
true, NULL);
3312 g_object_set (G_OBJECT (
m_pFakeSink),
"drop",
true, NULL);
3314 gst_app_sink_set_max_buffers( (GstAppSink*)
m_pFakeSink, 100 );
3316 g_object_set (G_OBJECT (m_pFakeSink),
"sync", (
bool)
true, NULL);
3320 res = gst_bin_add (GST_BIN ((GstElement*)m_pGstPipeline), (GstElement*)m_pFakeSink );
3322 link_result = gst_element_link_many( (GstElement*)m_pFileSource, (GstElement*)m_pDecoderBin, NULL );
3328 link_result = gst_element_link_many( (GstElement*)m_pColorSpaceInterlace, (GstElement*)m_pColorSpace, (GstElement*)m_pCapsFilter, (GstElement*)m_pFakeSink, NULL );
3330 if (m_pVideoBalance)
3331 link_result = gst_element_link_many( (GstElement*)m_pColorSpaceInterlace, (GstElement*)m_pVideoBalance, (GstElement*)m_pColorSpace, (GstElement*)m_pFakeSink, NULL );
3333 link_result = gst_element_link_many( (GstElement*)m_pColorSpaceInterlace, (GstElement*)m_pColorSpace, (GstElement*)m_pFakeSink, NULL );
3342 CheckState( gst_element_set_state ((GstElement*)m_pGstPipeline, GST_STATE_PAUSED),
true );
3351 MODebug2->
Message(
moText(
"moGsGraph::BuildLiveVideoGraph > gst_app_sink_pull_preroll for appsink"));
3354 sample = gst_app_sink_pull_preroll( (GstAppSink*) m_pFakeSink );
3364 bcaps = gst_sample_get_caps( sample );
3366 Gbuffer = gst_sample_get_buffer (sample);
3368 gst_app_sink_set_emit_signals((GstAppSink*)m_pFakeSink,
true);
3369 gst_app_sink_set_drop((GstAppSink*)m_pFakeSink,
true);
3370 gst_app_sink_set_wait_on_eos ((GstAppSink*)m_pFakeSink,
false);
3372 gst_app_sink_set_max_buffers((GstAppSink*)m_pFakeSink, 100 );
3373 #ifndef USING_SYNC_FRAMEBUFFER 3374 g_signal_connect( (GstElement*)m_pFakeSink,
"new-sample", G_CALLBACK (appsink_new_sample), (gpointer)
this );
3383 cout <<
"gst_app_sink_is_eos: " << gst_app_sink_is_eos((GstAppSink*)m_pFakeSink) << endl;
3384 cout <<
"gst_app_sink_get_emit_signals: " << gst_app_sink_get_emit_signals((GstAppSink*)m_pFakeSink) << endl;
3385 cout <<
"gst_app_sink_get_max_buffers: " << gst_app_sink_get_max_buffers((GstAppSink*)m_pFakeSink) << endl;
3398 MODebug2->
Error(
moText(
"moGsGraph::BuildLiveVideoGraph > m_pColorSpace m_pCapsFilter m_pFakeSink linking failed"));
3399 event_loop( (GstElement*)m_pGstPipeline,
false, GST_STATE_PAUSED);
3402 MODebug2->
Error(
moText(
"moGsGraph::BuildLiveVideoGraph > filesrc and decodebin linkage failed: ") + filename );
3403 event_loop( (GstElement*)m_pGstPipeline,
false, GST_STATE_PAUSED);
3407 MODebug2->
Error(
moText(
"moGsGraph::BuildLiveVideoGraph > fakesink construction failed"));
3408 event_loop( (GstElement*)m_pGstPipeline,
false, GST_STATE_PAUSED);
3411 MODebug2->
Error(
moText(
"moGsGraph::BuildLiveVideoGraph > decodebin construction failed"));
3412 event_loop( (GstElement*)m_pGstPipeline,
false, GST_STATE_PAUSED);
3415 MODebug2->
Error(
moText(
"moGsGraph::BuildLiveVideoGraph > file source failed: ") + filename);
3416 event_loop( (GstElement*)
m_pGstPipeline,
false, GST_STATE_PAUSED);
3550 bool isfixed =
false;
3551 GstBuffer* Gbuffer = (GstBuffer*)buffer;
3553 isfixed = gst_caps_is_fixed((GstCaps*)caps);
3562 str = gst_caps_get_structure ((GstCaps*)caps, 0);
3566 sstr = gst_structure_to_string (str);
3570 if (g_strrstr( sstr,
"channels" )) {
3575 gint channels, rate;
3577 gst_structure_get_int( str,
"width", &width);
3578 gst_structure_get_int( str,
"depth", &depth);
3579 gst_structure_get_int( str,
"channels", &channels);
3580 gst_structure_get_int( str,
"rate", &rate);
3600 if (Gbuffer!=NULL) {
3614 "SetAudioFormat: we have a format!! " 3622 +
" bytes per buffer, " 3624 +
" nanoseconds per sample " 3634 bool isfixed =
false;
3635 GstBuffer* Gbuffer = (GstBuffer*)buffer;
3637 isfixed = gst_caps_is_fixed((GstCaps*)caps);
3646 str = gst_caps_get_structure ((GstCaps*)caps, 0);
3650 sstr = gst_structure_to_string (str);
3654 if (g_strrstr( sstr,
"width" )) {
3657 gint width, height, value_numerator, value_denominator, redmask, greenmask, bluemask, bitcount;
3659 gst_structure_get_int( str,
"width", &width);
3660 gst_structure_get_int( str,
"height", &height);
3661 gst_structure_get_fraction( str,
"framerate", &value_numerator, &value_denominator );
3662 gst_structure_get_int( str,
"red_mask", &redmask );
3663 gst_structure_get_int( str,
"green_mask", &greenmask );
3664 gst_structure_get_int( str,
"blue_mask", &bluemask );
3665 gst_structure_get_int( str,
"bpp", &bitcount );
3699 "SetVideoFormat: we have a format!!" 3707 +
" buffer duration: " 3771 GstStateChangeReturn Gstate_change_result = (GstStateChangeReturn)state_change_result;
3774 switch(Gstate_change_result) {
3775 case GST_STATE_CHANGE_FAILURE:
3779 case GST_STATE_CHANGE_SUCCESS:
3783 case GST_STATE_CHANGE_ASYNC:
3787 case GST_STATE_CHANGE_NO_PREROLL:
3793 GstStateChangeReturn state_wait;
3794 GstState current_state, pending_state;
3795 GstClockTime time_out = GST_CLOCK_TIME_NONE;
3796 time_out = GST_SECOND;
3798 while(waitforsync) {
3800 state_wait = gst_element_get_state(GST_ELEMENT (
m_pGstPipeline),¤t_state, &pending_state, time_out);
3802 switch(state_wait) {
3803 case GST_STATE_CHANGE_SUCCESS:
3804 waitforsync =
false;
3808 case GST_STATE_CHANGE_FAILURE:
3809 waitforsync =
false;
3813 case GST_STATE_CHANGE_ASYNC:
3835 GstStateChangeReturn state_wait;
3836 GstState current_state, pending_state;
3837 GstClockTime time_out = GST_CLOCK_TIME_NONE;
3838 time_out = GST_SECOND;
3840 GstPad* srcRGB = NULL;
3841 bool padactive =
false;
3842 bool padlinked =
false;
3843 bool padblocked =
false;
3844 bool padblocking =
false;
3849 srcRGB = gst_element_get_pad ( (GstElement*)
m_pColorSpace,
"src");
3851 srcRGB = gst_element_get_static_pad ( (GstElement*)m_pColorSpace,
"src" );
3854 padactive = gst_pad_is_active( srcRGB );
3855 padlinked = gst_pad_is_linked( srcRGB );
3856 padblocked = gst_pad_is_blocked( srcRGB );
3857 padblocking = gst_pad_is_blocking( srcRGB );
3861 if (g_main_context_iteration( (GMainContext*)
m_pGMainContext,
false )) {
3881 state_wait = gst_element_get_state(GST_ELEMENT (
m_pGstPipeline),¤t_state, &pending_state, time_out);
3890 switch(current_state) {
3891 case GST_STATE_VOID_PENDING:
3895 case GST_STATE_NULL:
3899 case GST_STATE_READY:
3903 case GST_STATE_PAUSED:
3907 case GST_STATE_PLAYING:
3951 GstStateChangeReturn st = gst_element_set_state (GST_ELEMENT (
m_pGstPipeline), GST_STATE_PAUSED);
3959 #define MO_INFINITE -1 3967 gint64 time_nanoseconds;
3987 res = gst_element_seek_simple(
3992 | GST_SEEK_FLAG_KEY_UNIT
4000 time_nanoseconds = frame * GST_MSECOND;
4001 res = gst_element_seek_simple( (GstElement*)
m_pGstPipeline, GST_FORMAT_TIME, (GstSeekFlags)(GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_KEY_UNIT ), time_nanoseconds );
4016 GstFormat fmt = GST_FORMAT_TIME;
4020 if (gst_element_query_duration ((GstElement*)
m_pGstPipeline, &fmt, &len)) {
4022 if (gst_element_query_duration ((GstElement*)m_pGstPipeline, fmt, &len)) {
4040 GstFormat fmt = GST_FORMAT_TIME;
4044 if (gst_element_query_duration ((GstElement*)m_pGstPipeline, &fmt, &len)) {
4046 if (gst_element_query_duration ((GstElement*)m_pGstPipeline, fmt, &len)) {
4064 GstFormat fmt = GST_FORMAT_TIME;
4069 if (gst_element_query_duration ((GstElement*)m_pGstPipeline, &fmt, &dur)) {
4071 if (gst_element_query_duration ((GstElement*)m_pGstPipeline, fmt, &dur)) {
4087 GstFormat fmt = GST_FORMAT_TIME;
4091 if (gst_element_query_position ((GstElement*)m_pGstPipeline, &fmt, &pos)) {
4093 if (gst_element_query_position ((GstElement*)m_pGstPipeline, fmt, &pos)) {
4096 return (pos / 1000000);
4107 GstFormat fmt = GST_FORMAT_TIME;
4111 if (gst_element_query_position ((GstElement*)m_pGstPipeline, &fmt, &pos)) {
4113 if (gst_element_query_position ((GstElement*)m_pGstPipeline, fmt, &pos)) {
4115 return (
MOulong)GST_TIME_AS_MSECONDS(pos);
4123 if (!m_pGstPipeline)
return false;
4124 if (gst_element_get_state ((GstElement*)m_pGstPipeline, NULL, NULL, -1) == GST_STATE_CHANGE_FAILURE )
return false;
4131 g_object_set ( (GstElement*)
m_pAudioVolume,
"volume", volume, NULL);
4146 g_object_set ( (GstElement*)
m_pAudioSpeed,
"speed", pitch, NULL);
4153 unsigned long long delayl = delay;
4155 g_object_set ( (GstElement*)
m_pAudioEcho,
"delay", delayl, NULL);
4162 g_object_set ( (GstElement*)
m_pAudioEcho,
"intensity", intensity, NULL);
4169 g_object_set ( (GstElement*)
m_pAudioEcho,
"feedback", feedback, NULL);
4176 g_object_set ( (GstElement*)
m_pVideoBalance,
"brightness", brightness, NULL);
4184 g_object_set ( (GstElement*)
m_pVideoBalance,
"contrast", contrast, NULL);
4200 g_object_set ( (GstElement*)
m_pVideoBalance,
"saturation", saturation, NULL);
4209 #ifdef USING_SYNC_FRAMEBUFFER 4217 sample = gst_app_sink_pull_sample ( psink);
4222 GstCaps* bcaps = gst_sample_get_caps( sample );
4223 if (!bcaps)
return NULL;
4225 GstBuffer* Gbuffer = gst_sample_get_buffer (sample);
4226 int bsize = gst_buffer_get_size( Gbuffer );
4227 if (!( bsize>0 && (
int)bsize<=(h*w*4) ))
return NULL;
4231 gst_sample_unref(sample);
4236 if (pbucket==NULL)
return NULL;
4239 gst_buffer_map ( Gbuffer, &mapinfo, GST_MAP_READ);
4248 gst_buffer_unmap ( Gbuffer, &mapinfo );
4253 gst_sample_unref(sample);
moGstElement * m_pAudioConverter4
moAudioFormat m_AudioFormat
Formato de video.
void SetEchoIntensity(float intensity)
virtual moStreamState GetState()
Estado de la reproducción.
int GetSourceFlipH() const
Devuelve el valor de inversión de imagen horizontal.
moGstElement * m_pAudioConverter
bool BuildLiveVideoGraph(moText filename, moBucketsPool *pBucketsPool)
Grafo de reproducción de video en modo vivo, asyncronicamente reproducido en función del clock...
void SetAudioFormat(moGstCaps *caps, moGstBuffer *buffer=NULL)
moGstElement * m_pColorSpace
moVideoFormat GetVideoFormat()
Devuelve el formato de video.
moGstElement * m_pAudioVolume
volume: volume, 0 - 10 [1]
int GetSourceHeight() const
Devuelve el alto de la imagen de origen.
moGstPad * m_pRTSPDepaySink
void Error(moText p_text)
Anuncia y registra un error.
moGstElement * m_pRTSPSource
bool DestroyRetreivedBucket()
moGstElement * m_pAudioPanorama
stereo balance-1 a 1: panorama
void Stop()
Detener la reproducción del video.
moGstElement * m_pAudioSink
void SetPitch(float pitch)
virtual moCaptureDevices * LoadCaptureDevices()
Carga los dispositivos de video disponibles.
moGMainContext * m_pGMainContext
moGstElement * m_pVideoDeinterlace
moGstElement * m_pHTTPSource
void SetContrast(float contrast)
bool Unlock()
Libera el acceso al buffer interno.
static void on_rtsppadd_added(moGstElement *rtspsrc, moGstPad *pad, moGPointer u_data)
void Seek(MOuint frame, float rate=1.0)
Busca y posiciona.
bool Lock()
Paraliza el acceso al buffer interno.
int moGstStateChangeReturn
moGstElement * m_pAudioConverter3
bool CheckState(moGstStateChangeReturn state_change_result, bool waitforsync=false)
MOulong moGetTicksAbsolute(bool force_real_absolute)
Devuelve en milisegundos el valor del reloj de Moldeo.
void RetreivePads(moGstElement *FilterElement)
const moText & GetName() const
Devuelve el nombre del dispositivo.
MOulong GetSamplesLength()
bool BuildLiveGraph(moBucketsPool *pBucketsPool, moCaptureDevice p_capdev)
Grafo de captura de video.
bool BuildLiveStreamingGraph(moBucketsPool *pBucketsPool, moText p_location)
virtual ~moGsGraph()
Destructor.
bool BuildLiveSound(moText filename)
moGstElement * m_pMultiplexer
virtual MOulong GetPositionMS()
static moGBoolean cb_buffer_disconnected(moGPointer u_data)
moVideoFormat & GetVideoFormat()
Devuelve el formato de video del dispositivo.
clase de para manejar textos
moGstElement * m_pEncoder
static gboolean bus_call(GstBus *bus, GstMessage *msg, void *user_data)
static void cb_pad_added(moGstElement *decodebin2, moGstPad *pad, moGPointer u_data)
moGstElement * m_pAudioEcho
echo effect : audioecho > delay [ nanoseconds 10E-9, intensity, feedback ]
moGstElement * m_pCapsFilter
moGstElement * m_pAudioConverter2
virtual void SetEOS(bool iseos)
virtual bool CheckCaptureDevice(int i)
Chequea si el dispositivos de video disponible está aún disponible.
void SetLabelName(const moText &p_labelname)
Fija el nombre de código del dispositivo.
bool IsRunning()
Está corriendo.
void Present(bool p=true)
Fija la presencia del dispositivo.
void SetBrightness(float brightness)
static void cb_handoff(moGstElement *fakesrc, moGstBuffer *buffer, moGstPad *pad, moGPointer user_data)
moGstElement * m_pFakeSink
virtual bool FinishGraph()
Finalización del grafo.
static void cb_newpad(moGstElement *decodebin, moGstPad *pad, moGBoolean last, moGPointer u_data)
moGstElement * m_pDecoderBin
void Pause()
Pausa la reproducción del video.
int GetSourceWidth() const
Devuelve el ancho de la imagen de origen.
const moText & GetLabelName() const
Devuelve el nombre de código del dispositivo.
void SetEchoFeedback(float feedback)
bool BuildLiveWebcamGraph(moBucketsPool *pBucketsPool, moCaptureDevice &p_capdev)
Administrador de moBucket 's.
moGstElement * m_pJpegDecode
moGstElement * m_pFileSink
void WaitForFormatDefinition(MOulong timeout)
moGstElement * m_pVideoScale
moBucketsPool * m_pBucketsPool
virtual bool InitGraph()
Inicialización del grafo.
virtual MOulong GetPosition()
moGstElement * m_pFinalSource
moGstElement * m_pAudioAmplify
audioamplify: amplification: -inf +inf (dangerous)
virtual bool AddCaptureDevice(moCaptureDevice &p_capdev)
Agrega un dispositivo de video.
moGsFramework * m_pGsFramework
moGstElement * m_pAudioSpeed
speed:pitch speed 0.1 - 40.0
static moDebug * MODebug2
Clase de impresión de errores para depuración.
moGstElement * m_pVideoFlip
virtual moCaptureDevices * UpdateCaptureDevices()
Actualiza los dispositivos de video disponibles.
moGstElement * m_pVideoBalance
Definición de un dispositivo de video, generalmente uno de captura de video, o camara.
Espacio en memoria para compartir datos entre objetos.
void Push(moText p_text)
Apila el mensaje dentro de la pila de mensajes.
MOulong GetFramesLength()
La cantidad de frames, el largo del stream.
void SetVolume(float volume)
moGMainLoop * m_pGMainLoop
MObyte * GetFrameBuffer(MOlong *size)
int GetSourceBpp() const
Devuelve los bits por pixel de la imagen de origen.
moGstElement * m_pTypeFind
void SetName(const moText &p_name)
bool BuildLiveQTVideoGraph(moText filename, moBucketsPool *pBucketsPool)
moGstElement * m_pMultipartDemux
void Play()
Reproducir el video.
bool BuildRecordGraph(moText filename, moBucketsPool *pBucketsPool)
Grafo de grabación.
moGstElement * m_pGstPipeline
MOlong GetSize()
Devuelve el tamaño en bytes asignado por el buffer.
LIBMOLDEO_API moText0 IntToStr(int a)
MOubyte * GetBuffer()
Devuelve el puntero al buffer de datos.
void SetSaturation(float saturation)
moGstElement * m_pColorSpaceInterlace
moBucket * RetreiveBucket()
moGstElement * m_pFakeSource
int GetSourceFlipV() const
Devuelve el valor de inversión de imagen vertical.
void SetBuffer(MOlong size, MOubyte *pbuf)
Crea un espacio de memoria y asigna los valores desde un puntero a otro espacio de memoria...
void CopyVideoFrame(void *bufferdst, int size)
void SetEchoDelay(float delay)
void SetVideoFormat(moGstCaps *caps, moGstBuffer *buffer=NULL)
MOboolean m_bInitialized
Valor de inicialización.
int Find(const moText0 &target)
divide el texto separado por el caracter especificado
bool SetCaptureDevice(moText deviceport, MOint idevice=0)
moVideoFormat m_VideoFormat
virtual MOulong GetDuration()
La duración total del stream en nanosegundos.
void Message(moText p_text)
Anuncia un mensaje al usuario además de guardarlo en el log de texto.
long cb_have_data_handler_id
bool BuildLiveDVGraph(moBucketsPool *pBucketsPool, moCaptureDevice &p_capdev)
void SetBalance(float balance)
Fija el balance entre canal izquierdo y derecho en caso de haberlos.
static moGBoolean cb_have_data(moGstPad *pad, moGstBuffer *buffer, moGPointer u_data)
bool AddBucket(moBucket *pBucket)
long signal_rtsppad_added_id
moGstElement * m_pFileSource
const moText & GetPath() const
Devuelve el camino al dispositivo.
moGstElement * m_pIdentity
moText m_Path
Descripción del dispositivo.
moGstElement * m_pRTSPDepay
void BuildBucket(MOlong size, int setvalue)
Habilita el buffer en memoria con el valor prefijado.