Creating both ghost source as well as sink pads

I was working on a transcoding pipeline. I added and linked all the elements like decoding, scaling, encoding into a bin called videoTranscodeBin. Let us assume only 1 media type - video. I could create a ghost sink pad as follows:

 // sink
  GstPad* padSink = gst_element_get_static_pad(binFirstElement, "sink");
  GstPad* ghostPadSink = gst_ghost_pad_new("sink", padSink);
  gst_pad_set_active(ghostPadSink, TRUE);
  gst_element_add_pad(videoTrancodeBin, ghostPadSink);
  gst_object_unref(padSink); 

Here binFirstElement is the first element in the videoTranscode bin. How does one create ghost source pad ? I tried as in above with “source” replacing “sink” but I get an error.

What error do you get, and can you share the exact code you tried?

Here are the errors I get upon execution:


GSTREAMER VERSION (Major, Minor, Macro, Nano): 1 24 3 0 Release

(gstMediaTranscoder:17643): GStreamer-**CRITICAL** **: 15:58:54.578: gst_ghost_pad_new: assertion 'GST_IS_PAD (target)' failed

(gstMediaTranscoder:17643): GStreamer-**CRITICAL** **: 15:58:54.579: gst_pad_set_active: assertion 'GST_IS_PAD (pad)' failed

(gstMediaTranscoder:17643): GStreamer-**CRITICAL** **: 15:58:54.579: gst_element_add_pad: assertion 'GST_IS_PAD (pad)' failed

(gstMediaTranscoder:17643): GStreamer-**CRITICAL** **: 15:58:54.579: gst_object_unref: assertion 'object != NULL' failed

Elements audEnc and mux could not be linked.

The application involves a media (both audio and video) transcoding chain from filesrc to filesink. I am attaching the whole code here.

#include <gst/gst.h>

#ifdef __APPLE__
#include <TargetConditionals.h>
#endif

typedef struct CustomData_s {
  // overall pipeline
  GstElement *pipeline;
  // filesource - demux
  GstElement *source;
  GstElement *demux;
  // audio transcode bin:
  // queue - mpegaudioparse - mpg123audiodec - audioconvert - audioresample - opusenc - mux
  GstElement *audQueueD;
  GstElement *audParse;
  GstElement *audDec;
  GstElement *audConv;
  GstElement *audResample;
  GstElement *audEnc;
  GstElement *audioTranscodeBin;
  // video transcode bin:
  // queue - h264parse - avdec_h264 - videoscale - capsfilter - x264enc - h264timestamper - muxer - queue - filesink
  GstElement *vidQueueD;
  GstElement *h264parse;
  GstElement *h264dec;
  GstElement *scale;
  GstElement *capsfilter;
  GstElement *x264enc;
  GstElement *h264ts;
  GstElement *videoTrancodeBin;
  // mux onward to sink
  GstElement *mux;
  GstElement *queueM;
  GstElement *sink;
} CustomData_t;

/* This function will be called by the pad-added signal for video */
static void on_pad_added_video (GstElement * src, GstPad * new_pad, GstElement * dest) {
  GstPad *sink_pad = gst_element_get_static_pad (dest, "sink");
  GstPadLinkReturn ret;
  GstCaps *new_pad_caps = NULL;
  GstStructure *new_pad_struct = NULL;
  const gchar *new_pad_type = NULL;

  g_print ("Received new pad '%s' from '%s':\n", GST_PAD_NAME (new_pad),
      GST_ELEMENT_NAME (src));

  // If our converter is already linked, we have nothing to do here
  if (gst_pad_is_linked (sink_pad)) {
    g_print ("We are already linked. Ignoring.\n");
    goto exit;
  }

  /* Check the new pad's type */
  new_pad_caps = gst_pad_get_current_caps (new_pad);
  new_pad_struct = gst_caps_get_structure (new_pad_caps, 0);
  new_pad_type = gst_structure_get_name (new_pad_struct);
  if (g_str_has_prefix(new_pad_type, "audio")) {
    g_print ("It has type '%s' which is audio. Ignoring. \n", new_pad_type);
    goto exit;
  } else if (g_str_has_prefix (new_pad_type, "video")) {
    g_print ("It has type '%s' which is video. \n", new_pad_type);
  } else {
    g_print ("It has type '%s' which is unknown. Ignoring. \n", new_pad_type);
    goto exit;
  }

  /* Attempt the link */
  ret = gst_pad_link (new_pad, sink_pad);
  if (GST_PAD_LINK_FAILED (ret)) {
    g_print ("Type is '%s' but link failed.\n", new_pad_type);
  } else {
    g_print ("Link succeeded (type '%s').\n", new_pad_type);
  }

exit:
  /* Unreference the new pad's caps, if we got them */
  if (new_pad_caps != NULL)
    gst_caps_unref (new_pad_caps);

  /* Unreference the sink pad */
  gst_object_unref (sink_pad);
}

/* This function will be called by the pad-added signal for audio */
static void on_pad_added_audio (GstElement * src, GstPad * new_pad, GstElement * dest)
{
  GstPad *sink_pad = gst_element_get_static_pad (dest, "sink");
  GstPadLinkReturn ret;
  GstCaps *new_pad_caps = NULL;
  GstStructure *new_pad_struct = NULL;
  const gchar *new_pad_type = NULL;

  g_print ("Received new pad '%s' from '%s':\n", GST_PAD_NAME (new_pad),
      GST_ELEMENT_NAME (src));

  // If our converter is already linked, we have nothing to do here
  if (gst_pad_is_linked (sink_pad)) {
    g_print ("We are already linked. Ignoring.\n");
    goto exit;
  }

  /* Check the new pad's type */
  new_pad_caps = gst_pad_get_current_caps (new_pad);
  new_pad_struct = gst_caps_get_structure (new_pad_caps, 0);
  new_pad_type = gst_structure_get_name (new_pad_struct);
  if (g_str_has_prefix(new_pad_type, "audio")) {
    g_print ("It has type '%s' which is audio. \n", new_pad_type);
  } else if (g_str_has_prefix (new_pad_type, "video")) {
    g_print ("It has type '%s' which is video. Ignoring. \n", new_pad_type);
    goto exit;
  } else {
    g_print ("It has type '%s' which is unknown. Ignoring. \n", new_pad_type);
    goto exit;
  }

  /* Attempt the link */
  ret = gst_pad_link (new_pad, sink_pad);
  if (GST_PAD_LINK_FAILED (ret)) {
    g_print ("Type is '%s' but link failed.\n", new_pad_type);
  } else {
    g_print ("Link succeeded (type '%s').\n", new_pad_type);
  }

exit:
  /* Unreference the new pad's caps, if we got them */
  if (new_pad_caps != NULL)
    gst_caps_unref (new_pad_caps);

  /* Unreference the sink pad */
  gst_object_unref (sink_pad);
}

int
tutorial_main (int argc, char *argv[])
{
  CustomData_t data;
  GstBus *bus;
  GstMessage *msg;

  // Initialize GStreamer
  gst_init (&argc, &argv);

  // Print version
  guint major, minor, micro, nano;
  const gchar* nano_str;
  gst_version(&major, &minor, &micro, &nano);
  nano_str = nano == 1 ? "(CVS)" : nano == 2 ? "Prerelease" : "Release";
  g_print("GSTREAMER VERSION (Major, Minor, Macro, Nano): %u %u %u %u %s \n", major, minor, micro, nano, nano_str);

  // Init the pipeline
  data.pipeline = gst_pipeline_new("test-pipeline");

  // Create the elements from source to demux
  data.source = gst_element_factory_make("filesrc", "source");
  data.demux = gst_element_factory_make("qtdemux", "demux");

  // Create the audio transcoding elements
  data.audQueueD = gst_element_factory_make("queue", "auQueueD");
  data.audParse = gst_element_factory_make("mpegaudioparse", "audParse");
  data.audDec = gst_element_factory_make("mpg123audiodec", "audDec");
  data.audConv = gst_element_factory_make("audioconvert", "audConv");
  data.audResample = gst_element_factory_make("audioresample", "audResample");
  data.audEnc = gst_element_factory_make("opusenc", "audEnc");

  // Create the video transcoding elements
  data.vidQueueD = gst_element_factory_make("queue", "vidQueueD");
  data.h264parse = gst_element_factory_make("h264parse", "h264parse");
  data.h264dec = gst_element_factory_make("avdec_h264", "h264dec");
  data.scale = gst_element_factory_make("videoscale", "scale");
  data.capsfilter = gst_element_factory_make("capsfilter", "capsfilter");
  data.x264enc = gst_element_factory_make("x264enc", "x264enc");
  data.h264ts = gst_element_factory_make("h264timestamper", "h264ts");

  // Create the elements from mux to sink
  data.mux = gst_element_factory_make("mp4mux", "mux");
  data.queueM = gst_element_factory_make("queue", "queueM");
  data.sink = gst_element_factory_make("filesink", "sink");

  // sanity check
  if (  !data.pipeline || 
        // common front end
        !data.source ||
        !data.demux ||
        // audio 
        !data.audQueueD ||
        !data.audParse ||
        !data.audDec ||
        !data.audConv ||
        !data.audResample ||
        !data.audEnc ||
        // video
        !data.vidQueueD || 
        !data.h264parse ||
        !data.h264dec ||
        !data.scale ||
        !data.capsfilter ||
        !data.x264enc ||
        !data.h264ts ||
        // common back end
        !data.mux ||
        !data.queueM ||
        !data.sink ) {
    g_printerr ("Not all elements could be created.\n");
    return -1;
  }

  // create audio bin
  data.audioTranscodeBin = gst_bin_new("audioTranscodeBin");
  gst_bin_add_many(GST_BIN(data.audioTranscodeBin), data.audQueueD, data.audParse, data.audDec, data.audConv, 
                                                    data.audResample, data.audEnc, NULL);
  if (!gst_element_link_many( data.audQueueD, data.audParse, data.audDec, data.audConv, 
                              data.audResample, data.audEnc, NULL)) {
    g_printerr ("Elements could not be linked into the audio bin.\n");
    gst_object_unref(data.audioTranscodeBin);
    gst_object_unref(data.pipeline);
    return -1;
  }

  // add ghost pads to audio bin - sink
  GstPad* padAudSink = gst_element_get_static_pad(data.audQueueD, "sink");
  GstPad* ghostPadAudSink = gst_ghost_pad_new("sink", padAudSink);
  gst_pad_set_active(ghostPadAudSink, TRUE);
  gst_element_add_pad(data.audioTranscodeBin, ghostPadAudSink);
  gst_object_unref(padAudSink);
  // add ghost pads to audio bin - source
  GstPad* padAudSrc = gst_element_get_static_pad(data.audEnc, "source");
  GstPad* ghostPadAudSrc = gst_ghost_pad_new("sink", padAudSrc);
  gst_pad_set_active(ghostPadAudSrc, TRUE);
  gst_element_add_pad(data.audioTranscodeBin, ghostPadAudSrc);
  gst_object_unref(padAudSrc);

  // create video bin
  data.videoTrancodeBin = gst_bin_new ("videoTranscodeBin");
  gst_bin_add_many(GST_BIN(data.videoTrancodeBin), data.vidQueueD, data.h264parse, data.h264dec, 
                                                   data.scale, data.capsfilter, data.x264enc, data.h264ts, 
                                                   NULL);
  if (!gst_element_link_many(data.vidQueueD, data.h264parse, data.h264dec, data.scale, data.capsfilter, 
                             data.x264enc, data.h264ts, NULL)) {
    g_printerr ("Elements could not be linked into the video bin.\n");
    gst_object_unref(data.audioTranscodeBin);
    gst_object_unref(data.videoTrancodeBin);
    gst_object_unref(data.pipeline);
    return -1;
  }

  // add ghost pads to video bin - sink
  GstPad* padVidSink = gst_element_get_static_pad(data.vidQueueD, "sink");
  GstPad* ghostPadVidSink = gst_ghost_pad_new("sink", padVidSink);
  gst_pad_set_active(ghostPadVidSink, TRUE);
  gst_element_add_pad(data.videoTrancodeBin, ghostPadVidSink);
  gst_object_unref(padVidSink);

  // Add all elements to pipeline
  gst_bin_add_many(GST_BIN(data.pipeline),  data.source, data.demux,
                                            data.audioTranscodeBin,
                                            data.videoTrancodeBin, 
                                            data.mux, data.queueM, data.sink, NULL);

  // set the source element
  g_object_set(G_OBJECT(data.source), "location", 
               "/Users/kumarsnathan/Movies/youtube/1920x1080/AhalyaShortFilm.m4v", NULL);

  // audio bin settings
  g_object_set(G_OBJECT(data.audQueueD),  "max-size-bytes", (guint) 0,
                                          "max-size-buffers", (guint) 0,
                                          "max-size-time", (guint64) 0,
                                          NULL);

  // video bin settings
  // set the caps filter element
  GstCaps* caps = gst_caps_new_simple("video/x-raw",
                                      "format", G_TYPE_STRING, "I420",
                                      "width", G_TYPE_INT, 1280,
                                      "height", G_TYPE_INT, 720,
                                      NULL);
  g_object_set(G_OBJECT(data.capsfilter), "caps", caps, NULL);

  // set the x264enc element
  g_object_set(G_OBJECT(data.x264enc),  "bitrate", (guint) 4000, 
                                        "dct8x8", (gboolean) TRUE,
                                        "noise-reduction", (guint) 2,
                                        "ref", (guint) 4,
                                        "rc-lookahead", (gint) 25,
                                        "vbv-buf-capacity", (guint) 1000,
                                        NULL);

  // set the sink element
  g_object_set(G_OBJECT(data.sink), "location", "./data/AhalyaShortFilm-video-try0.mp4", NULL);

  // start linking all pipeline elements
  if (!gst_element_link(data.source, data.demux)) {
    g_printerr ("Elements source and demux could not be linked.\n");
    gst_object_unref(data.pipeline);
    return -1;
  }
  if (!gst_element_link(data.audioTranscodeBin, data.mux)) {
    g_printerr ("Elements audEnc and mux could not be linked.\n");
    gst_object_unref(data.pipeline);
    return -1;
  }
  if (!gst_element_link(data.h264ts, data.mux)) {
    g_printerr ("Elements h264ts and mux could not be linked.\n");
    gst_object_unref(data.pipeline);
    return -1;
  }
  if (!gst_element_link(data.mux, data.queueM)) {
    g_printerr ("Elements mux and queueM could not be linked.\n");
    gst_object_unref(data.pipeline);
    return -1;
  }
  if (!gst_element_link(data.queueM, data.sink)) {
    g_printerr ("Elements queueM and sink could not be linked.\n");
    gst_object_unref(data.pipeline);
    return -1;
  }
  g_signal_connect(data.demux, "pad-added", G_CALLBACK (on_pad_added_audio), data.audioTranscodeBin);
  g_signal_connect(data.demux, "pad-added", G_CALLBACK (on_pad_added_video), data.videoTrancodeBin);

  /* Start playing */
  gst_element_set_state (data.pipeline, GST_STATE_PLAYING);

  /* Wait until error or EOS */
  bus = gst_element_get_bus (data.pipeline);
  msg =
      gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE,
      GST_MESSAGE_ERROR | GST_MESSAGE_EOS);

  /* Free resources */
  if (msg != NULL)
    gst_message_unref (msg);
  gst_object_unref (bus);
  gst_element_set_state (data.pipeline, GST_STATE_NULL);
  gst_object_unref (data.pipeline);
  return 0;
}

int
main (int argc, char *argv[])
{
#if defined(__APPLE__) && TARGET_OS_MAC && !TARGET_OS_IPHONE
  return gst_macos_main ((GstMainFunc) tutorial_main, argc, argv, NULL);
#else
  return tutorial_main (argc, argv);
#endif
}

Note that in the code above, I was able to successfully use ghost sink pads for both audio and video transcode chains. But when I tried using ghost src pad for either audio or video, I get this error and it says that the audio/video transcode bin could not be linked to muxer. But the last element in either bin can be linked to the muxer successfully. Why is this ?

-  GstPad* padAudSrc = gst_element_get_static_pad(data.audEnc, "source");
-  GstPad* ghostPadAudSrc = gst_ghost_pad_new("sink", padAudSrc);
+  GstPad* padAudSrc = gst_element_get_static_pad(data.audEnc, "src");
+  GstPad* ghostPadAudSrc = gst_ghost_pad_new("src", padAudSrc);

is what you want here.

To debug such critical warnings, you can set the environment varabile G_DEBUG=fatal_warnings and run in a debugger to get a breakpoint on the first of those to know where it comes from. In this case, also the GStreamer debug logs would’ve helped.

Thank you for pointing out this error in my code. Yes, the changes you suggest do work. Much appreciated.