Thanks. Eventually I “cheated” and used gst_parse_launch ( )
which does work
but I was not able to do that with this code:
The QT includes are blanked !?!: let me fiddle
#include <QApplication
#include <QWidget
#include <QSettings
#include <QTimer
#include <gst/gst.h>
#include <gst/video/videooverlay.h>
int main (int argc, char *argv[])
{
// Initialize Qt
QApplication app (argc, argv);
// Initialize GStreamer
gst_init (&argc, &argv);
// Create the pipeline
GstElement *pipeline = gst_pipeline_new ("pipeline");
// Create UDP sources for video and audio
GstElement *udpsrc_video = gst_element_factory_make ("udpsrc", NULL);
g_object_set (udpsrc_video, "multicast-group", "224.1.1.1", NULL);
g_object_set (udpsrc_video, "auto-multicast", TRUE, NULL);
g_object_set (udpsrc_video, "port", 5002, NULL);
g_object_set (udpsrc_video, "close-socket", FALSE, NULL);
GstElement *udpsrc_audio = gst_element_factory_make ("udpsrc", NULL);
g_object_set (udpsrc_audio, "multicast-group", "224.1.1.1", NULL);
g_object_set (udpsrc_audio, "auto-multicast", TRUE, NULL);
g_object_set (udpsrc_audio, "port", 5000, NULL);
g_object_set (udpsrc_audio, "caps", gst_caps_from_string ("application/x-rtp, media=(string)audio,"
" clock-rate=(int)8000, encoding-name=(string)PCMU, payload=(int)0, ssrc=(guint)1350777638,"
" clock-base=(guint)2942119800, seqnum-base=(guint)47141"), NULL);
// Create video processing elements
GstElement *video_rtpjitterbuffer = gst_element_factory_make ("rtpjitterbuffer", NULL);
g_object_set (video_rtpjitterbuffer, "drop-on-latency", TRUE, NULL);
// g_object_set(video_rtpjitterbuffer, "caps", gst_caps_from_string("application/x-rtp, encoding-name=(string)H264, media=(string)video, clock-rate=(int)90000"), NULL);
GstElement *video_rtph264depay = gst_element_factory_make ("rtph264depay", NULL);
GstElement *video_h264parse = gst_element_factory_make ("h264parse", NULL);
GstElement *video_avdec_h264 = gst_element_factory_make ("avdec_h264", NULL);
GstElement *videosink = gst_element_factory_make ("glimagesink", NULL);
// Create audio processing elements
GstElement *audio_queue = gst_element_factory_make ("queue", NULL);
GstElement *audio_rtppcmudepay = gst_element_factory_make ("rtppcmudepay", NULL);
GstElement *audio_mulawdec = gst_element_factory_make ("mulawdec", NULL);
GstElement *audio_audioconvert = gst_element_factory_make ("audioconvert", NULL);
GstElement *audio_audioresample = gst_element_factory_make ("audioresample", NULL);
GstElement *audio_autoaudiosink = gst_element_factory_make ("autoaudiosink", NULL);
// Add elements to the pipeline
gst_bin_add_many (GST_BIN (pipeline),
udpsrc_video, video_rtpjitterbuffer, video_rtph264depay, video_h264parse, video_avdec_h264,
videosink, udpsrc_audio, audio_queue, audio_rtppcmudepay, audio_mulawdec, audio_audioconvert,
audio_audioresample, audio_autoaudiosink, NULL);
// Link elements
gst_element_link_many (udpsrc_video, video_rtpjitterbuffer, video_rtph264depay, video_h264parse, video_avdec_h264,
videosink, NULL);
gst_element_link_many (udpsrc_audio, audio_queue, audio_rtppcmudepay, audio_mulawdec, audio_audioconvert,
audio_audioresample, audio_autoaudiosink, NULL);
QWidget window;
QSettings settings;
//restoreGeometry (settings.value ("geometry").toByteArray ());
window.resize (1200, 720);
window.show ();
WId xwinid = window.winId ();
gst_video_overlay_set_window_handle (GST_VIDEO_OVERLAY (videosink), xwinid);
// run the pipeline
GstStateChangeReturn sret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
if (sret == GST_STATE_CHANGE_FAILURE) {
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);
// Exit application
QTimer::singleShot (0, QApplication::activeWindow (), SLOT (quit ()));
}
//GstCaps* caps = gst_caps_new_simple("application/x-rtp", "clock-rate", G_TYPE_INT, 90000, NULL);
//gst_pad_set_caps(filter->srcpad, caps);
//gst_caps_unref(caps);
int ret = app.exec ();
//settings.setValue ("geometry", saveGeometry ());
window.hide ();
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);
// Run the Qt application
return app.exec ();
}
An extra query: I’ve got a rtpjitterbuf on the video pipe; ought I do one on the audio pipe too?
Thanks
James