How to tile multiple videos using GStreamer in c

I’m trying to display multiple videos in one window using tiling. When I run the below code I only see 1 one without tiling.

#include <gst/gst.h>

static void pad_added_handler_1(GstElement *src, GstPad *new_pad, gpointer user_data) {
    GstElement *decoder = GST_ELEMENT(user_data);
    GstPad *sink_pad = gst_element_get_static_pad(decoder, "sink");
    GstPadLinkReturn ret;
    
    /* Attempt to link the newly created pad */
    ret = gst_pad_link(new_pad, sink_pad);
    if (ret != GST_PAD_LINK_OK) {
        g_printerr("Failed to link pads\n");
    } else {
        g_print("Pads linked\n");
    }
    
    gst_object_unref(sink_pad);
}

static void pad_added_handler_2(GstElement *src, GstPad *new_pad, gpointer user_data) {
    GstElement *decoder = GST_ELEMENT(user_data);
    GstPad *sink_pad = gst_element_get_static_pad(decoder, "sink");
    GstPadLinkReturn ret;
    
    /* Attempt to link the newly created pad */
    ret = gst_pad_link(new_pad, sink_pad);
    if (ret != GST_PAD_LINK_OK) {
        g_printerr("Failed to link pads\n");
    } else {
        g_print("Pads linked\n");
    }
    
    gst_object_unref(sink_pad);
}

int main(int argc, char *argv[]) {
    GstElement *pipeline, *source1, *source2, *decodebin1, *decodebin2, *videoconvert1, *videoconvert2, *videobox1, *videobox2, *videomixer, *videoconvert_final, *videosink;
    GstBus *bus;
    GstMessage *msg;
    GstStateChangeReturn ret;

    /* Initialize GStreamer */
    gst_init(&argc, &argv);

    /* Create the elements */
    pipeline = gst_pipeline_new("video-player");
    source1 = gst_element_factory_make("filesrc", "file-source1");
    source2 = gst_element_factory_make("filesrc", "file-source2");
    decodebin1 = gst_element_factory_make("decodebin", "decoder1");
    decodebin2 = gst_element_factory_make("decodebin", "decoder2");
    videoconvert1 = gst_element_factory_make("videoconvert", "videoconvert1");
    videoconvert2 = gst_element_factory_make("videoconvert", "videoconvert2");
    videobox1 = gst_element_factory_make("videobox", "videobox1");
    videobox2 = gst_element_factory_make("videobox", "videobox2");
    videomixer = gst_element_factory_make("videomixer", "videomixer");
    videoconvert_final = gst_element_factory_make("videoconvert", "videoconvert_final");
    videosink = gst_element_factory_make("autovideosink", "video-sink");

    /* Check if elements are created successfully */
    if (!pipeline || !source1 || !source2 || !decodebin1 || !decodebin2 || !videoconvert1 || !videoconvert2 || !videobox1 || !videobox2 || !videomixer || !videoconvert_final || !videosink) {
        g_printerr("One or more elements could not be created. Exiting.\n");
        return -1;
    }

    /* Set the properties of filesrc elements */
    g_object_set(G_OBJECT(source1), "location", "/video1.mp4", NULL);
    g_object_set(G_OBJECT(source2), "location", "/video2.mp4", NULL);
    g_object_set(G_OBJECT(videobox1), "border-alpha", 0, "top", 0, "left", 0, NULL);
    g_object_set(G_OBJECT(videobox2), "border-alpha", 0, "top", 40, "left", 640, NULL);

    gst_bin_add_many(GST_BIN(pipeline), source1, source2, decodebin1, decodebin2, videoconvert1, videoconvert2, videobox1, videobox2, videomixer, videoconvert_final, videosink, NULL);

    /* Link the elements */
    if (!gst_element_link_many(source1, decodebin1, NULL)) {
    g_printerr("Failed to link source1 and decodebin1. Exiting.\n");
    return -1;
    }   

    if (!gst_element_link_many(source2, decodebin2, NULL)) {
        g_printerr("Failed to link source2 and decodebin2. Exiting.\n");
        return -1;
    }

    if (!gst_element_link_many(videoconvert1, videobox1, NULL)) {
        g_printerr("Failed to link videoconvert1 and videobox1. Exiting.\n");
        return -1;
    }

    if (!gst_element_link_many(videoconvert2, videobox2, NULL)) {
        g_printerr("Failed to link videoconvert2 and videobox2. Exiting.\n");
        return -1;
    }

    if (!gst_element_link_many(videobox1, videomixer, NULL)) {
        g_printerr("Failed to link videobox1 and videomixer. Exiting.\n");
        return -1;
    }

    if (!gst_element_link_many(videobox2, videomixer, NULL)) {
        g_printerr("Failed to link videobox2 and videomixer. Exiting.\n");
        return -1;
    }

    if (!gst_element_link_many(videomixer, videoconvert_final, videosink, NULL)) {
        g_printerr("Failed to link videomixer and videoconvert_final. Exiting.\n");
        return -1;
    }

    g_signal_connect(decodebin1, "pad-added", G_CALLBACK(pad_added_handler_1), videoconvert1);
    g_signal_connect(decodebin2, "pad-added", G_CALLBACK(pad_added_handler_2), videoconvert2);

    /* Set the pipeline to "playing" state */
    ret = gst_element_set_state(pipeline, GST_STATE_PLAYING);
    if (ret == GST_STATE_CHANGE_FAILURE) {
        g_printerr("Unable to set the pipeline to the playing state. Exiting.\n");
        return -1;
    }

    /* Wait until error or EOS (End of Stream) */
    bus = gst_element_get_bus(pipeline);
    msg = gst_bus_timed_pop_filtered(bus, GST_CLOCK_TIME_NONE, static_cast<GstMessageType>(GST_MESSAGE_ERROR | GST_MESSAGE_EOS));

    /* Free resources */
    if (msg != NULL)
        gst_message_unref(msg);

    gst_object_unref(bus);
    gst_element_set_state(pipeline, GST_STATE_NULL);
    gst_object_unref(pipeline);

    return 0;
}

videomixer is obsolete, you may use compositor instead.
Here is a sample that may help for your case:

#include <gst/gst.h>

int main(int argc, char *argv[]) {
	gst_init (&argc, &argv);

	const char* pipelineStr = "videotestsrc pattern=snow is-live=1 ! video/x-raw,format=RGBA,width=640,height=480,framerate=30/1 ! queue ! comp.sink_0 "
		                      "videotestsrc pattern=ball is-live=1 ! video/x-raw,format=RGBA,width=640,height=480,framerate=30/1 ! queue ! comp.sink_1 "
		                      "compositor name=comp "
		                      " sink_0::xpos=0   sink_0::ypos=0 sink_0::width=640 sink_0::height=480 sink_0::zorder=0 sink_0::alpha=1 "
		                      " sink_1::xpos=640 sink_1::ypos=0 sink_1::width=640 sink_1::height=480 sink_1::zorder=0 sink_1::alpha=1 "
		                      "! video/x-raw,format=RGBA,width=1280,height=480 ! videoconvert ! x264enc key-int-max=30 speed-preset=veryfast tune=zerolatency qinsert-vui=1 ! queue ! h264parse ! rtph264pay ! udpsink auto-multicast=1 host=224.1.1.1 port=5002 "; 

	/* const char* pipelineStr = "uridecodebin uri=file:///home/user/Videos/test1.mp4 ! videoconvert ! videoscale ! video/x-raw,format=RGBA,width=640,height=480,framerate=30/1 ! queue ! comp.sink_0"
		                  "uridecodebin uri=file:///home/user/Videos/test2.mp4 ! videoconvert ! videoscale ! video/x-raw,format=RGBA,width=640,height=480,framerate=30/1 ! queue ! comp.sink_1"
		                  "compositor name=comp "
		                  " sink_0::xpos=0   sink_0::ypos=0 sink_0::width=640 sink_0::height=480 sink_0::zorder=0 sink_0::alpha=1 "
		                  " sink_1::xpos=640 sink_1::ypos=0 sink_1::width=640 sink_1::height=480 sink_1::zorder=0 sink_1::alpha=1 "
		                  "! video/x-raw,format=RGBA,width=1280,height=480 ! videoconvert ! x264enc key-int-max=30 speed-preset=veryfast tune=zerolatency qinsert-vui=1 ! queue ! h264parse ! rtph264pay ! udpsink auto-multicast=1 host=224.1.1.1 port=5002 "; */

	/* Create the pipeline...this will negociate unspecified caps between elements */
	g_print("Creating pipeline: [%s]\n", pipelineStr);
	GstElement *pipeline = gst_parse_launch (pipelineStr, NULL);

	/* Check if pipeline has successfully been created  */
	if (!pipeline ) {
		g_printerr("Pipeline could not be created. Exiting.\n");
		return -1;
	}
	g_print("Pipeline created\n");

	/* Set the pipeline to "playing" state */
	int ret = gst_element_set_state(pipeline, GST_STATE_PLAYING);
	if (ret == GST_STATE_CHANGE_FAILURE) {
		g_printerr("Unable to set the pipeline to the playing state. Exiting.\n");
		return -1;
	}
	g_print("Pipeline started\n");	

	/* Wait until error or EOS (End of Stream) */
	GstBus *bus = gst_element_get_bus(pipeline);
	GstMessage *msg = gst_bus_timed_pop_filtered(bus, GST_CLOCK_TIME_NONE, (GST_MESSAGE_ERROR | GST_MESSAGE_EOS));
	g_print("Pipeline got EOS or Error\n");		

	/* Free resources */
	if (msg != NULL)
		gst_message_unref(msg);

	gst_object_unref(bus);
	
	gst_element_set_state(pipeline, GST_STATE_NULL);
	gst_object_unref(pipeline);

	return 0;
}

/* Built on Linux with:
gcc -o test_compose test_compose.c $(pkg-config --cflags --libs gstreamer-1.0) 
# and tested with:
gst-launch-1.0 udpsrc address=224.1.1.1 port=5002 ! application/x-rtp,encoding-name=H264 ! rtph264depay ! h264parse ! avdec_h264 ! queue ! videoconvert ! autovideosink
*/