How to use cv::VideoCapture cap to forward the obtained video through gstreamer rtsp

My platform is NVIDIA jeston nano board gstreamer1.14.5 on Ubuntu.

1.VLC in the LAN can work a few second,and is stuck and stopped.

I have 2 question.

1. Am I using GStreamer correctly? Is this code solution correct?
2.Hoe can I accelerate it.

Thanks.

#include <gst/gst.h>
#include <gst/rtsp-server/rtsp-server.h>
#include <gst/app/gstappsrc.h>
#include "opencv2/core.hpp"
#include "opencv2/highgui.hpp"
#include <queue>
#include <mutex>
#include <thread>
#include <chrono> 
#define DEFAULT_RTSP_PORT "8554"

static char *port = (char *)DEFAULT_RTSP_PORT;
using namespace cv;

std::queue<cv::Mat> frameQueue;
std::mutex frameQueueMutex;

VideoCapture cap;

int g_cnt = 0;
int g_camera_cnt = 0;
int g_need_data = 0;
typedef struct
{
    gboolean white;
    GstClockTime timestamp;
} MyContext;

static void need_data(GstElement *appsrc, guint unused, MyContext *ctx)
{
    cv::Mat frame;

    // Get frame from the queue
    std::unique_lock<std::mutex> lock(frameQueueMutex);
    if (!frameQueue.empty())
    {
        frame = frameQueue.front();
        frameQueue.pop();
    }
    lock.unlock();
	g_need_data++;
    if (!frame.empty())
    {
		g_cnt++;
        GstBuffer *buffer;
        GstFlowReturn ret;

        buffer = gst_buffer_new_allocate(NULL, frame.total() * frame.elemSize(), NULL);

        GstMapInfo map;
        gst_buffer_map(buffer, &map, GST_MAP_WRITE);
        memcpy(map.data, frame.data, frame.total() * frame.elemSize());
        gst_buffer_unmap(buffer, &map);

		GST_BUFFER_PTS (buffer) = ctx->timestamp;
		GST_BUFFER_DURATION (buffer) = gst_util_uint64_scale_int (1, GST_SECOND, 30);
		ctx->timestamp += GST_BUFFER_DURATION (buffer);
        g_signal_emit_by_name(appsrc, "push-buffer", buffer, &ret);
        gst_buffer_unref(buffer);
    }
}

void captureThread()
{
    while (true)
    {
        cv::Mat frame;
        cap >> frame;

        if (!frame.empty())
        {
            // Add the frame to the queue
            std::unique_lock<std::mutex> lock(frameQueueMutex);
			g_print("2frame size is %d, need-data:%d, %d, cap:%d, \n", frame.total() * frame.elemSize(), g_cnt, g_need_data, g_camera_cnt);
            frameQueue.push(frame);
			g_camera_cnt++;
            lock.unlock();
        }
		 std::this_thread::sleep_for(std::chrono::milliseconds(33)); 
        // You might want to add a delay or use some synchronization mechanism here
        // to control the frame acquisition rate
    }
}

static void media_configure(GstRTSPMediaFactory *factory, GstRTSPMedia *media, gpointer user_data)
{
    GstElement *element, *appsrc;
    MyContext *ctx;

    element = gst_rtsp_media_get_element(media);
    appsrc = gst_bin_get_by_name_recurse_up(GST_BIN(element), "mysrc");

    gst_util_set_object_arg(G_OBJECT(appsrc), "format", "time");
    g_object_set(G_OBJECT(appsrc), "caps",
                 gst_caps_new_simple("video/x-raw",
                                     "format", G_TYPE_STRING, "BGR",
                                     "width", G_TYPE_INT, 1920,
                                     "height", G_TYPE_INT, 1080,
                                     "framerate", GST_TYPE_FRACTION, 30, 1,
                                     NULL),
                 "format", GST_FORMAT_TIME,
                 NULL);

    ctx = g_new0(MyContext, 1);
    ctx->white = FALSE;
    ctx->timestamp = 0;
    g_object_set_data_full(G_OBJECT(media), "my-extra-data", ctx,
                           (GDestroyNotify)g_free);

    g_signal_connect(appsrc, "need-data", (GCallback)need_data, ctx);
    gst_object_unref(appsrc);
    gst_object_unref(element);
}

int main(int argc, char *argv[])
{
	putenv("GST_DEBUG=*:3");
    putenv("GST_DEBUG_FILE=debug.log");
    putenv("GST_DEBUG_NO_COLOR=1");
    GMainLoop *loop;
    GstRTSPServer *server;
    GstRTSPMountPoints *mounts;
    GstRTSPMediaFactory *factory;

    gst_init(&argc, &argv);

    cap.open(0); // Open default camera
    if (!cap.isOpened())
    {
        g_print("Error: Couldn't open the camera.\n");
        return -1;
    }

    std::thread captureThreadObj(captureThread);

    loop = g_main_loop_new(NULL, FALSE);

    server = gst_rtsp_server_new();
    g_object_set(server, "service", port, NULL);
    gst_rtsp_server_set_address(server, "0.0.0.0");

    mounts = gst_rtsp_server_get_mount_points(server);

    factory = gst_rtsp_media_factory_new();
	gst_rtsp_media_factory_set_launch(factory,
    "( appsrc name=mysrc ! videoconvert ! capsfilter caps=\"video/x-raw,format=I420\" ! x264enc tune=zerolatency ! rtph264pay name=pay0 pt=96 )");

    g_signal_connect(factory, "media-configure", (GCallback)media_configure,
                     NULL);

    gst_rtsp_mount_points_add_factory(mounts, "/test", factory);
    g_object_unref(mounts);

    gst_rtsp_server_attach(server, NULL);

    g_print("stream ready at rtsp://127.0.0.1:8554/test\n");
    g_main_loop_run(loop);

    captureThreadObj.join(); // Wait for the capture thread to finish
    cap.release();           // Release the camera

    return 0;
}