How to play rtsp H265 on Android

When I use Android Universal 1.24.4 version to play rtsp video stream, I can only play H264, but H265 cannot be played. How to solve this problem.

you need H265 codec. How do you install gstreamer in Android? Which version do you have?

Using gstreamer 1.0 android universal 1.24.5。

The player reported the following error

2024-06-26 09:47:07.601 18652-25221 GStreamer+basetransform org...streamer.tutorials.tutorial_5  W  0:16:35.264030792 0xb400007c277b2980 ../libs/gst/base/gstbasetransform.c:1373:gst_base_transform_setcaps:<capsfilter320> transform could not transform video/x-h265, stream-format=(string)byte-stream, alignment=(string)au, width=(int)1920, height=(int)1080, chroma-format=(string)4:2:0, bit-depth-luma=(uint)8, bit-depth-chroma=(uint)8, parsed=(boolean)true, profile=(string)main, tier=(string)main, level=(string)4 in anything we support
2024-06-26 09:47:07.601 18652-25221 GStreamer+basetransform org...streamer.tutorials.tutorial_5  W  0:16:35.264166708 0xb400007c277b2980 ../libs/gst/base/gstbasetransform.c:1433:gst_base_transform_reconfigure_unlocked:<capsfilter320> warning: not negotiated
2024-06-26 09:47:07.601 18652-25221 GStreamer+basetransform org...streamer.tutorials.tutorial_5  W  0:16:35.264190333 0xb400007c277b2980 ../libs/gst/base/gstbasetransform.c:1433:gst_base_transform_reconfigure_unlocked:<capsfilter320> warning: not negotiated

2024-06-26 09:47:07.684 18652-25224 CCodecBufferChannel org...streamer.tutorials.tutorial_5 D [OMX.c2.rk.hevc.decoder#166] MediaCodec discarded an unknown buffer

Make the following configuration in the code

/* Configure jitterbuffer properties */
static void configure_jitterbuffer(GstElement *jitterbuffer) {
    g_object_set(jitterbuffer, "latency", 0, NULL);
    g_object_set(jitterbuffer, "do-lost", TRUE, NULL);
    g_object_set(jitterbuffer, "drop-on-latency", TRUE, NULL);
    g_object_set(jitterbuffer, "do-retransmissions", FALSE, NULL);
    g_object_set(jitterbuffer, "max-dropout-time", 0, NULL); 
    g_object_set(jitterbuffer, "max-misorder-time", 0, NULL);
}


/* Main method for the native code. This is executed on its own thread. */
static void *
app_function (void *userdata)
{
  JavaVMAttachArgs args;
  GstBus *bus;
  CustomData *data = (CustomData *) userdata;
  GSource *timeout_source;
  GSource *bus_source;
  GError *error = NULL;
  guint flags;

  GST_DEBUG ("Creating pipeline in CustomData at %p", data);

  /* Create our own GLib Main Context and make it the default one */
  data->context = g_main_context_new ();
  g_main_context_push_thread_default (data->context);
  /* Build pipeline */
    gchar *descr = g_strconcat("playbin",NULL);
    data->pipeline = gst_parse_launch(descr,&error);

  if (error) {
    gchar *message =
        g_strdup_printf ("Unable to build pipeline: %s", error->message);
    g_clear_error (&error);
    GST_DEBUG ("error_cb g_free %s", message);
    set_ui_error (message, data);
    g_free (message);
    return NULL;
  }
  /* Set the RTSP transport mode to TCP */
  g_object_get(data->pipeline, "source", &data->rtspsrc, NULL);
  if (data->rtspsrc) {
    g_object_set(data->rtspsrc, "protocols", GST_RTSP_LOWER_TRANS_TCP, NULL);
    gst_object_unref(data->rtspsrc);
  }
  /* Add jitterbuffer to handle timestamp issues */
  data->jitterbuffer = gst_element_factory_make("rtpjitterbuffer", "jitterbuffer");
  if (!data->jitterbuffer) {
    GST_ERROR("Failed to create jitterbuffer element.");
    return NULL;
  }
  configure_jitterbuffer(data->jitterbuffer);
  gst_bin_add(GST_BIN(data->pipeline), data->jitterbuffer);
  gst_element_sync_state_with_parent(data->jitterbuffer);
    /* Create a queue element and set it to leaky downstream mode */
    GstElement *queue = gst_element_factory_make("queue", "queue");
    if (!queue) {
        GST_ERROR("Failed to create queue element.");
        return NULL;
    }
    g_object_set(queue, "leaky", 2, NULL); // 2 means downstream

    /* Create video sink and set sync=false */
    GstElement *video_sink = gst_element_factory_make("glimagesink", "video_sink");
    if (!video_sink) {
      GST_ERROR("Failed to create video sink element.");
      return NULL;
   }
    g_object_set(video_sink, "sync", FALSE, NULL);
    g_object_set(data->pipeline, "video-sink", video_sink, NULL);

  /* Set the pipeline to READY, so it can already accept a window handle, if we have one */
  data->target_state = GST_STATE_READY;
  gst_element_set_state (data->pipeline, GST_STATE_READY);
  /* Instruct the bus to emit signals for each received message, and connect to the interesting signals */
  bus = gst_element_get_bus (data->pipeline);
  bus_source = gst_bus_create_watch (bus);
  g_source_set_callback (bus_source, (GSourceFunc) gst_bus_async_signal_func,
      NULL, NULL);
  g_source_attach (bus_source, data->context);
  g_source_unref (bus_source);
  g_signal_connect (G_OBJECT (bus), "message::error", (GCallback) error_cb,
      data);
  g_signal_connect (G_OBJECT (bus), "message::eos", (GCallback) eos_cb, data);
  g_signal_connect (G_OBJECT (bus), "message::state-changed",
      (GCallback) state_changed_cb, data);
  g_signal_connect (G_OBJECT (bus), "message::duration",
      (GCallback) duration_cb, data);
  g_signal_connect (G_OBJECT (bus), "message::buffering",
      (GCallback) buffering_cb, data);
  g_signal_connect (G_OBJECT (bus), "message::clock-lost",
      (GCallback) clock_lost_cb, data);
  gst_object_unref (bus);
  /* Register a function that GLib will call 4 times per second */
  timeout_source = g_timeout_source_new (100);
  g_source_set_callback (timeout_source, (GSourceFunc) refresh_ui, data, NULL);
  g_source_attach (timeout_source, data->context);
  g_source_unref (timeout_source);
  /* Create a GLib Main Loop and set it to run */
  GST_DEBUG ("Entering main loop... (CustomData:%p)", data);
  data->main_loop = g_main_loop_new (data->context, FALSE);
  check_initialization_complete (data);
  g_main_loop_run (data->main_loop);
  GST_DEBUG ("Exited main loop");
  g_main_loop_unref (data->main_loop);
  data->main_loop = NULL;
  /* Free resources */
  g_main_context_pop_thread_default (data->context);
  g_main_context_unref (data->context);
  data->target_state = GST_STATE_NULL;
  gst_element_set_state (data->pipeline, GST_STATE_NULL);
  gst_object_unref (data->pipeline);
  return NULL;
}

How do you build your app for Android? On Linux? If yes, try to run your app on Linux first and see if your code works there.

Building Android App on Mac。

examples/tutorials/android · discontinued-for-monorepo · GStreamer / gst-docs · GitLab ,The code downloaded here,Using gstreamer 1.0 android universal 1.24.5,Playing RTSP video stream, H264 can play normally, H265 cannot play.

With 1.22.05 I do not have any problem with H265 in rtsp streaming on Ubuntu. But I have not tried 1.24.5.

What commands need to be entered on Ubuntu

Try playbin3 to decode rtsp streaming. Playbin is old.
gst-launch-1.0 playbin3 uri=rtsp://your_rtsp_stream_url

gst-launch-1.0 -v playbin3 uri=rtsp://your_rtsp_stream_url
to find out which decoder is applied. Try to use better decoder. I guess you also need hardware acceleration in Android. Playbin3 has a property latency and try to set it to 0.

List properties of playbin3. latency can be seen, but not listed in gstreamer doc.
gst-inspect-1.0 playin3

Code Configuration:

type or paste code here/* Configure jitterbuffer properties */
static void configure_jitterbuffer(GstElement *jitterbuffer) {
    g_object_set(jitterbuffer, "latency", 0, NULL);
    g_object_set(jitterbuffer, "do-lost", TRUE, NULL);
    g_object_set(jitterbuffer, "drop-on-latency", TRUE, NULL);
    g_object_set(jitterbuffer, "do-retransmissions", FALSE, NULL);
    g_object_set(jitterbuffer, "max-dropout-time", 0, NULL);
    g_object_set(jitterbuffer, "max-misorder-time", 0, NULL);
}

    /* Create our own GLib Main Context and make it the default one */
    data->context = g_main_context_new();
    g_main_context_push_thread_default(data->context);
    /* Build pipeline */
    gchar *descr = g_strconcat("playbin3", NULL);
    data->pipeline = gst_parse_launch(descr, &error);
    if (error) {
        gchar *message =
                g_strdup_printf("Unable to build pipeline: %s", error->message);
        g_clear_error(&error);
        GST_DEBUG ("error_cb g_free %s", message);
        set_ui_error(message, data);
        g_free(message);
        return NULL;
    }
    /* Set the RTSP transport mode to TCP */
    g_object_get(data->pipeline, "source", &data->rtspsrc, NULL);
    if (data->rtspsrc) {
        g_object_set(data->rtspsrc, "protocols", GST_RTSP_LOWER_TRANS_TCP, NULL);
        gst_object_unref(data->rtspsrc);
    }
    /* Add jitterbuffer to handle timestamp issues */
    data->jitterbuffer = gst_element_factory_make("rtpjitterbuffer", "jitterbuffer");
    if (!data->jitterbuffer) {
        GST_ERROR("Failed to create jitterbuffer element.");
        return NULL;
    }
    configure_jitterbuffer(data->jitterbuffer);
    gst_bin_add(GST_BIN(data->pipeline), data->jitterbuffer);
    gst_element_sync_state_with_parent(data->jitterbuffer);
    /* Create a queue element and set it to leaky downstream mode */
    GstElement *queue = gst_element_factory_make("queue", "queue");
    if (!queue) {
        GST_ERROR("Failed to create queue element.");
        return NULL;
    }
    g_object_set(queue, "leaky", 2, NULL); // 2 means downstream
    g_object_set(queue, "max-size-buffers", 0, NULL);
    g_object_set(queue, "max-size-bytes", 0, NULL);
    g_object_set(queue, "max-size-time", 0, NULL);

    /* Create video sink and set sync=false */
    GstElement *video_sink = gst_element_factory_make("glimagesink", "video-sink");
    if (!video_sink) {
        GST_ERROR("Failed to create video sink element.");
        return NULL;
    }
    g_object_set(video_sink, "sync", FALSE, NULL);
    g_object_set(data->pipeline, "video-sink", video_sink, NULL);

Add latency to playbin3
gst-launch-1.0 -v playbin3 uri=rtsp://your_rtsp_stream_url latency=0

@Joe. Please don’t set latency to 0, it will break on packet loss or any packet jitter over the network. The value of latency should be chosen based on the network properties but should (almost) never be 0.

1 Like

You can try with different values. Using playbin3 is only to make things work.
Also in the end you need a customized pipeline for better
performance with GPU acceleration.