Hello I am fairly new to Gstreamer and need some direction. Will pay whoever can help me as I am sick of dealing with this.
I have to take in multiple raw audio and video streams that contain no timestamps and then timestamp and preferably encode them to something more useful.
Currently I am setting up a pipeline for each stream as they are coming from partitcipants in a meeting.
My main goal is to maintain perfect sync for the full meeting this could last 7 hours and users streams stop and start every-time they unmute unvute.
I am currently locking all streams clock to the mixed audio channel that runs the entire stream so they all share the same clock, this seems to be working correctly my main issue is when I use AudioRate to smooth out the jitter and make sure they are all locked I am getting massive choppy/stuttery and dropped audio.
I am splitting them in 5 second chunks to be uploaded to a server.
I have been struggling with this for weeks no matter what I try the audio is still stuttery or if I remove audiorate it does not maintain sync. I will admit I am new and could be doing this all wrong.
Adjusting the tolerance does not help and I have plenty of cpu so its not that thats causing issues.
I will pay someone $100+ to help me solve this issue either through some pair programming or if you provide the right solution for me.
Here is the relevant code. Feel free to shame me if I am doing this completely wrong. I am also new to C++ so sorry if its weird.
Pipeline Descriptions.
if (m_type == "video") {
Log::info("Building pipeline for video");
pipelineDesc =
"appsrc name=video_src is-live=true format=time do-timestamp=true ! "
"queue max-size-buffers=60 max-size-time=0 max-size-bytes=0 leaky=downstream ! "
"videoconvert ! videorate ! video/x-raw,framerate=30/1 ! "
"identity sync=true ! "
"x264enc tune=zerolatency bitrate=1024 speed-preset=ultrafast key-int-max=30 ! "
"h264parse ! video/x-h264,stream-format=avc,alignment=au ! "
"queue ! mux.video " // Connect to video pad
"splitmuxsink name=mux max-size-time=" + std::to_string(5 * GST_SECOND) +
" muxer=mp4mux location=" + locationPattern;
} else if (m_type == "audio" || m_type == "master" || m_type == "shared_audio") {
Log::info("Building pipeline for audio");
pipelineDesc =
"appsrc name=audio_src is-live=true format=time do-timestamp=true ! "
"queue max-size-buffers=60 max-size-time=0 max-size-bytes=0 leaky=downstream ! "
"audioconvert ! audioresample quality=10 ! audiorate ! "
"audio/x-raw,rate=" + std::to_string(m_sampleRate) + ",channels=" + std::to_string(m_channels) + " ! "
"identity sync=true ! "
"voaacenc bitrate=128000 ! aacparse ! audio/mpeg,mpegversion=4,stream-format=raw ! "
"queue ! mux.audio_0 " // Connect to audio_0 pad
"splitmuxsink name=mux max-size-time=" + std::to_string(5 * GST_SECOND) +
" muxer=mp4mux location=" + locationPattern;
} else {
Log::error("Invalid recorder type specified: " + m_type);
return false;
}
Caps:
// Get the splitmuxsink element for callbacks
m_splitmuxsink = gst_bin_get_by_name(GST_BIN(m_pipeline), "mux");
if (m_splitmuxsink) {
// Connect to the 'fragment-closed' signal to be notified when a new file is created
g_signal_connect(m_splitmuxsink, "format-location", G_CALLBACK(GStreamerRecorder::onFormatLocation), this);
g_signal_connect(m_splitmuxsink, "fragment-closed", G_CALLBACK(GStreamerRecorder::onFragmentClosed), this);
}
// Get the appsrc elements
if (m_type == "video") {
m_videoAppSrc = gst_bin_get_by_name(GST_BIN(m_pipeline), "video_src");
if (!m_videoAppSrc) {
Log::error("Could not get video appsrc element");
return false;
}
Log::info("Video appsrc element retrieved");
// Configure video appsrc
GstCaps *videoCaps = gst_caps_new_simple("video/x-raw",
"format", G_TYPE_STRING, "I420",
"width", G_TYPE_INT, m_width,
"height", G_TYPE_INT, m_height,
"framerate", GST_TYPE_FRACTION, 30, 1,
"pixel-aspect-ratio", GST_TYPE_FRACTION, 1, 1,
"interlace-mode", G_TYPE_STRING, "progressive",
NULL);
g_object_set(m_videoAppSrc,
"caps", videoCaps,
"format", GST_FORMAT_TIME,
"is-live", TRUE,
"do-timestamp", TRUE, // Let GStreamer timestamp buffers
"stream-type", 0, // GST_APP_STREAM_TYPE_STREAM
NULL);
gst_caps_unref(videoCaps);
Log::info("Video appsrc configured with caps");
} else {
m_audioAppSrc = gst_bin_get_by_name(GST_BIN(m_pipeline), "audio_src");
if (!m_audioAppSrc) {
Log::error("Could not get audio appsrc element");
return false;
}
Log::info("Audio appsrc element retrieved");
// Configure audio appsrc
GstCaps *audioCaps = gst_caps_new_simple("audio/x-raw",
"format", G_TYPE_STRING, "S16LE",
"rate", G_TYPE_INT, m_sampleRate,
"channels", G_TYPE_INT, m_channels,
"layout", G_TYPE_STRING, "interleaved",
NULL);
g_object_set(m_audioAppSrc,
"caps", audioCaps,
"format", GST_FORMAT_TIME,
"is-live", TRUE,
"do-timestamp", TRUE, // Let GStreamer timestamp buffers
"stream-type", 0, // GST_APP_STREAM_TYPE_STREAM
NULL);
gst_caps_unref(audioCaps);
Log::info("Audio appsrc configured with caps");
}
// Set the pipeline to PLAYING
Log::info("Setting pipeline to PLAYING state");
GstStateChangeReturn ret = gst_element_set_state(m_pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
Log::error("Failed to set pipeline to PLAYING");
gst_object_unref(m_pipeline);
m_pipeline = nullptr;
return false;
}