Hi Team,
I encountered an issue while converting H264 encoded buffers to an mp4 file. Initially, I aimed to skip the first 20 seconds of buffers and store the ones from 21 seconds to 2 minutes. Then, I began converting these buffers to an mp4 file using appsrc elements. However, I faced a problem with the resulting video file. During playback, the video starts from the 21-second mark, whereas I expected it to start from 0 seconds. Additionally, the total duration is not displayed correctly.
Here’s the pipeline of v4l2src:
gst-launch-1.0 v4l2src device=/dev/video0 ! videoconvert ! queue ! x264enc tune=zerolatency ! identity ! fakesink
We are using the identity callback to capture the frames.
Callback Code:
// Get the source pad of the identity element
identity_src_pad = gst_element_get_static_pad(identity, "src");
// Add probe to the identity source pad
gst_pad_add_probe(identity_src_pad, GST_PAD_PROBE_TYPE_BUFFER, pad_probe_cb, pipeline, NULL);
static GstPadProbeReturn pad_probe_cb(GstPad* pad, GstPadProbeInfo* info, gpointer user_data)
{
static GstClockTime timestamp = 0;
GstMapInfo map;
GstBuffer* buff;
GstBuffer* new_buffer;
buff = gst_pad_probe_info_get_buffer(info);
gsize buffer_size = gst_buffer_get_size(buff);
double bufferSizeMB = static_cast<double>(buffer_size) / (1024*1024);
guint buflen = gst_buffer_list_length(buflist);
new_buffer = gst_buffer_copy_deep(buff);
g_print(" timestamp : %ld\n", GST_BUFFER_PTS(new_buffer) );
// Store the buffer in here:
gst_buffer_list_add(buflist, new_buffer);
return GST_PAD_PROBE_OK;
}
File Conversion Code (H264 encoded frames to mp4 file):
(Note: 20 seconds of buffers, equivalent to 10 FPS, have been removed)
void fileconversion()
{
gst_buffer_list_remove(buflist, 0, 200); // Remove the oldest buffer from index 0 with a length of 200
convertToMP4(buflist);
}
void convertToMP4(GstBufferList *buflist){
// Create pipeline for MP4 conversion.
GstElement *pipeline, *appsrc, *videoconvert, *videoencode, *parse, *muxer, *file_sink;
pipeline = gst_pipeline_new("MP4-pipeline");
appsrc = gst_element_factory_make("appsrc", "source");
parse = gst_element_factory_make("h264parse", "source-h264parse");
muxer = gst_element_factory_make("mp4mux", "mp4-muxer");
file_sink = gst_element_factory_make("filesink", "filesink");
g_object_set(G_OBJECT(appsrc),
"stream-type", 0,
"format", GST_FORMAT_TIME, NULL);
// Set the resolution and framerate caps
GstCaps *caps = gst_caps_new_simple("video/x-h264",
"width", G_TYPE_INT, 1280,
"height", G_TYPE_INT, 720,
"framerate", GST_TYPE_FRACTION, 10, 1,
"stream-format", G_TYPE_STRING, byte-stream,
NULL);
gst_app_src_set_caps (GST_APP_SRC(appsrc),caps);
gst_caps_unref(caps);
g_object_set(G_OBJECT(muxer), "fragment-duration", 2000, NULL);
g_object_set(file_sink, "location", camera.mp4, NULL);
printf("Cam name is %s", prefixedFilename);
gst_bin_add_many(GST_BIN(pipeline), appsrc,parse,muxer, file_sink, NULL);
if (gst_element_link_many(appsrc,parse,muxer, file_sink, NULL) != TRUE)
{
g_printerr("Elements could not be linked in the pipeline.\n");
gst_object_unref(pipeline);
exit(1);
}
copy_buflist = gst_buffer_list_copy_deep(buflist);
g_print("isbuffered is filled and Buffer size is %d\n", gst_buffer_list_length(copy_buflist));
gst_element_set_state(pipeline, GST_STATE_PLAYING);
retval = gst_app_src_push_buffer_list(GST_APP_SRC(appsrc), copy_buflist);
g_print("RETVAL %d\n", retval);
g_print("Sending EOS!!!!!!!");
g_signal_emit_by_name(appsrc, "end-of-stream", &retval);
}
I kindly request your assistance in providing a solution to this issue as soon as possible.
Best Regards,
Sulthan