Hello Everyone,
I’m trying to set the framerate when the device is in streaming mode. Essentially, the application receives input from the user and sets the frame rate in the pipeline.
Regards.
Hello Everyone,
I’m trying to set the framerate when the device is in streaming mode. Essentially, the application receives input from the user and sets the frame rate in the pipeline.
Regards.
Hello @parthjenex,
We would suggest you take a look at the videorate element.
You should be able dynamically change the frame-rate with it.
Please let us know if you have any other questions or comments.
best regards,
Andres Campos
Hi,
I’ve integrated the videorate
element in my GStreamer pipeline to modify the frame rate dynamically. However, despite explicitly setting a new frame rate, the pipeline continues to operate using the original (older) frame rate. It appears that the new frame rate setting is not being applied as expected. I’m trying to determine where exactly this configuration is being overridden or ignored.
void start_rtsp_server() {
gst_init(NULL, NULL);
main_loop = g_main_loop_new(NULL, FALSE);
g_mutex_init(&pipeline_mutex);
// Create source pipeline
source_pipeline = gst_pipeline_new("source-pipeline");
v4l2src = gst_element_factory_make("v4l2src", "camera_source");
videorate = gst_element_factory_make("videorate", "videorate");
GstElement *videoscale = gst_element_factory_make("videoscale", "videoscale");
capsfilter = gst_element_factory_make("capsfilter", "capsfilter");
inter_sink = gst_element_factory_make("intervideosink", "sink");
if (!source_pipeline || !v4l2src || !videorate ||
!videoscale || !capsfilter || !inter_sink) {
LOGError("Failed to create source pipeline elements");
return;
}
// Build and link source pipeline
gst_bin_add_many(GST_BIN(source_pipeline),
v4l2src, videorate, videoscale, capsfilter, inter_sink, NULL);
if (!gst_element_link_many(
v4l2src, videorate, videoscale, capsfilter, inter_sink, NULL)) {
LOGError("Source pipeline linking failed");
return;
}
// Set initial caps
GstCaps *caps = gst_caps_new_simple("video/x-raw",
"width", G_TYPE_INT, atoi(VID_RES_WIDTH),
"height", G_TYPE_INT, atoi(VID_RES_HEIGHT),
"framerate", GST_TYPE_FRACTION, atoi(DEFAULT_FRAMERATE), 1,
NULL);
g_object_set(G_OBJECT(capsfilter), "caps", caps, NULL);
gst_caps_unref(caps);
// Start source pipeline
GstStateChangeReturn ret = gst_element_set_state(source_pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
LOGError("Failed to start source pipeline");
return;
}
// Create RTSP server
rtsp_server = gst_rtsp_server_new();
gst_rtsp_server_set_service(rtsp_server, RTSP_PORT);
GstRTSPMountPoints *mounts = gst_rtsp_server_get_mount_points(rtsp_server);
factory = gst_rtsp_media_factory_new();
// Updated pipeline without fixed framerate
gst_rtsp_media_factory_set_launch(factory,
"( "
"intervideosrc ! video/x-raw,format=NV12,width=" VID_RES_WIDTH ",height=" VID_RES_HEIGHT " ! "
"videoconvert name=videoconvert ! queue ! "
"mpph265enc name=mpph265enc0 ! h265parse ! tee name=video_tee "
"video_tee. ! queue ! rtph265pay pt=96 name=pay0 config-interval=1 "
"alsasrc device=hw:0 ! audio/x-raw,rate=44100,channels=2 ! audioconvert ! audioresample ! "
"voaacenc bitrate=" AUDIO_BITRATE " ! aacparse ! tee name=audio_tee "
"audio_tee. ! queue ! rtpmp4gpay pt=97 name=pay1 "
"video_tee. ! queue leaky=2 ! valve name=recordvalve drop=true ! identity single-segment=true ! queue ! rec_sink. "
"audio_tee. ! queue leaky=2 ! identity single-segment=true ! queue ! rec_sink.audio_0 "
"splitmuxsink name=rec_sink muxer=matroskamux location=" RECORDING_FILE " max-size-time=30000000000 async-finalize=false "
")");
gst_rtsp_media_factory_set_shared(factory, TRUE);
g_signal_connect(factory, "media-configure", (GCallback)media_configure, NULL);
gst_rtsp_mount_points_add_factory(mounts, RTSP_MOUNT_POINT, factory);
g_object_unref(mounts);
gst_rtsp_server_attach(rtsp_server, NULL);
g_signal_connect(rtsp_server, "client-connected", G_CALLBACK(on_client_connected), NULL);
// Start local capture client
pthread_t capture_thread;
pthread_create(&capture_thread, NULL, capture_client_thread, NULL);
pthread_detach(capture_thread);
// Start main loop
g_main_loop_run(main_loop);
}
void update_fps(gint new_fps) {
g_mutex_lock(&pipeline_mutex);
if (!capsfilter) {
g_mutex_unlock(&pipeline_mutex);
return;
}
// Update source pipeline
GstCaps *new_caps = gst_caps_new_simple("video/x-raw",
"width", G_TYPE_INT, atoi(VID_RES_WIDTH),
"height", G_TYPE_INT, atoi(VID_RES_HEIGHT),
"framerate", GST_TYPE_FRACTION, new_fps, 1,
NULL);
g_object_set(G_OBJECT(capsfilter), "caps", new_caps, NULL);
gst_caps_unref(new_caps);
// Update v4l2src to ensure framerate change
g_object_set(G_OBJECT(v4l2src), "framerate", new_fps, NULL);
// Update RTSP pipeline if exists
if (pipeline) {
GstElement *conv = gst_bin_get_by_name(GST_BIN(pipeline), "videoconvert");
if (conv) {
GstCaps *rtsp_caps = gst_caps_new_simple("video/x-raw",
"framerate", GST_TYPE_FRACTION, new_fps, 1,
NULL);
g_object_set(conv, "caps", rtsp_caps, NULL);
gst_caps_unref(rtsp_caps);
gst_object_unref(conv);
}
}
LOGInfo("[INFO] Updated FPS to: %d\n", new_fps);
g_mutex_unlock(&pipeline_mutex);
}
There is a capsfilter
on which the DEFAULT_FRAMERATE
is being set. And this capsfilter
is after the videorate
element in pipeline.