Embedding 2 pipelines in the same wayland window at a time

I have 2 simple gstreamer pipelines using waylandsink and I would like to embed into a tauri window, one at a time.

pub fn start(app_handle: AppHandle) {
    std::thread::sleep(Duration::from_secs(10));
    gstreamer::init().unwrap();
    let mut pipeline_str = String::from("videotestsrc is-live=true ! videoconvert ! waylandsink name=videosink");
    let pipeline = gstreamer::parse::launch(&pipeline_str).unwrap();
    let pipeline_orig = pipeline.dynamic_cast::<gstreamer::Pipeline>().unwrap();

    // tauri's webview window
    let webview_window = app_handle.get_webview_window("main").unwrap();

    std::thread::sleep(Duration::from_secs(2));
    let video_sink = pipeline_orig.by_name("videosink").unwrap();
    if video_sink
        .context("GstWlDisplayHandleContextType")
        .is_none()
    {
        if let RawDisplayHandle::Wayland(display_handle) =
            webview_window.display_handle().unwrap().as_raw()
        {
            let mut context = gstreamer::Context::new("GstWlDisplayHandleContextType", false);
            let context_ref = context.get_mut().unwrap();
            let structure_ref = context_ref.structure_mut();
            let mut value = MaybeUninit::zeroed();
            let send_value = unsafe {
                gstreamer::glib::gobject_ffi::g_value_init(
                    value.as_mut_ptr(),
                    gstreamer::glib::gobject_ffi::G_TYPE_POINTER,
                );

                gstreamer::glib::gobject_ffi::g_value_set_pointer(
                    value.as_mut_ptr(),
                    display_handle.display.as_ptr(),
                );
                gstreamer::glib::SendValue::from_glib_none(&value.assume_init() as *const _)
            };
            structure_ref.set_value("handle", send_value);
            video_sink.set_context(&context);
            println!("Set the context for sink");
        }
    }
    let video_overlay = video_sink
        .dynamic_cast::<gstreamer_video::VideoOverlay>()
        .expect("Sink doesn't implement overlay");

    let scale_factor = webview_window.scale_factor().unwrap();
    let display_handle = webview_window.window_handle().unwrap();

    match display_handle.as_raw() {
        RawWindowHandle::Wayland(raw_handle) => unsafe {
            video_overlay.set_window_handle(raw_handle.surface.as_ptr() as usize);
            println!("Set the window handle");
            let inner_size = webview_window.inner_size().unwrap();
            let inner_position = webview_window.inner_position().unwrap();
            let logical_size = inner_size.to_logical::<i32>(scale_factor);
            video_overlay
                .set_render_rectangle(
                    inner_position.x,
                    inner_position.y,
                    // 1920,
                    // 1080,
                    // inner_size.width as i32,
                    // inner_size.height as i32,
                    logical_size.width,
                    logical_size.height,
                )
                .unwrap();
            println!("{:?} {:?} {:?}", inner_size, inner_position, logical_size);
        },
        _ => (),
    };

    pipeline_orig.set_state(gstreamer::State::Playing).unwrap();
    let (tx, rx) = std::sync::mpsc::channel::<()>();
    std::thread::spawn(move || {
        std::thread::sleep(Duration::from_secs(5));
        tx.send(()).unwrap();
    });
    rx.recv().unwrap();

    pipeline_orig.set_state(gstreamer::State::Null).unwrap();

    let pipeline_str = String::from(
        "videotestsrc is-live=true pattern=snow ! videoconvert name=videopadsrc ! fakesink name=fakesink",
    );

    let pipeline = gstreamer::parse::launch(&pipeline_str).unwrap();
    let pipeline = pipeline.dynamic_cast::<gstreamer::Pipeline>().unwrap();
    pipeline.set_state(gstreamer::State::Playing).unwrap();

    let (tx, rx) = std::sync::mpsc::channel::<()>();
    std::thread::spawn(move || {
        std::thread::sleep(Duration::from_secs(5));
        tx.send(()).unwrap();
    });
    rx.recv().unwrap();

    if let Some(fakevideosink) = pipeline.by_name("fakesink") {
        let videopadsrc = pipeline.by_name("videopadsrc").unwrap();
        let source_pad = videopadsrc.static_pad("src").unwrap();
        let pipeline = pipeline.clone();
        let (tx, rx) = std::sync::mpsc::channel::<()>();
        source_pad.add_probe(
            gstreamer::PadProbeType::BLOCK_DOWNSTREAM | gstreamer::PadProbeType::IDLE,
            move |_pad, _pad_probe_info| {
                videopadsrc.unlink(&fakevideosink);
                fakevideosink.set_state(gstreamer::State::Null).unwrap();
                if pipeline.by_name("fakevideosink").is_some() {
                    println!("It exists");
                    pipeline.remove(&fakevideosink).unwrap();
                }

                let videosink = gstreamer::ElementFactory::make("waylandsink")
                    .name("videosink")
                    .property("sync", true)
                    .property("async", false)
                    .build()
                    .unwrap();
                pipeline.add(&videosink).unwrap();
                videopadsrc.link(&videosink).unwrap();
                videosink.sync_state_with_parent().unwrap();
                let _ = tx.send(());
                gstreamer::PadProbeReturn::Remove
            },
        );
        rx.recv().unwrap();
        println!("Received removal of fakesink");
    }

    let webview_window = app_handle.get_webview_window("main").unwrap();

    let video_sink = pipeline.by_name("videosink").unwrap();
    if video_sink
        .context("GstWlDisplayHandleContextType")
        .is_none()
    {
        if let RawDisplayHandle::Wayland(display_handle) =
            webview_window.display_handle().unwrap().as_raw()
        {
            let mut context = gstreamer::Context::new("GstWlDisplayHandleContextType", true);
            let context_ref = context.get_mut().unwrap();
            let structure_ref = context_ref.structure_mut();
            let mut value = MaybeUninit::zeroed();
            let send_value = unsafe {
                gstreamer::glib::gobject_ffi::g_value_init(
                    value.as_mut_ptr(),
                    gstreamer::glib::gobject_ffi::G_TYPE_POINTER,
                );

                gstreamer::glib::gobject_ffi::g_value_set_pointer(
                    value.as_mut_ptr(),
                    display_handle.display.as_ptr(),
                );
                gstreamer::glib::SendValue::from_glib_none(&value.assume_init() as *const _)
            };
            structure_ref.set_value("handle", send_value);
            video_sink.set_context(&context);
            println!("Set the context for sink");
        }
    }
    let video_overlay = video_sink
        .dynamic_cast::<gstreamer_video::VideoOverlay>()
        .expect("Sink doesn't implement overlay");

    let scale_factor = webview_window.scale_factor().unwrap();
    let display_handle = webview_window.window_handle().unwrap();

    match display_handle.as_raw() {
        RawWindowHandle::Wayland(raw_handle) => unsafe {
            video_overlay.set_window_handle(raw_handle.surface.as_ptr() as usize);
            println!("Set the window handle");
            let inner_size = webview_window.inner_size().unwrap();
            let inner_position = webview_window.inner_position().unwrap();
            let logical_size = inner_size.to_logical::<i32>(scale_factor);
            video_overlay
                .set_render_rectangle(
                    inner_position.x,
                    inner_position.y,
                    // 1920,
                    // 1080,
                    // inner_size.width as i32,
                    // inner_size.height as i32,
                    logical_size.width,
                    logical_size.height,
                )
                .unwrap();
            println!("{:?} {:?} {:?}", inner_size, inner_position, logical_size);
        },
        _ => (),
    };
    let (tx, rx) = std::sync::mpsc::channel::<()>();
    std::thread::spawn(move || {
        std::thread::sleep(Duration::from_secs(5));
        tx.send(()).unwrap();
    });
    rx.recv().unwrap();
}


When I stop the first pipeline and try embedding the second pipeline, it fails and I get the warning message changing display handle is not supported.
Why do I get this warning despite cleaning up the previous pipeline? Is there a way I can embed the second pipeline to the sink?

I just discovered that if I move the line pipeline.set_state(gstreamer::State::Playing).unwrap(); for the second pipeline after setting the window handle, it works! But any reason why it doesn’t work when the pipeline is already in the Playing state?

If you move to playing state before setting a handle (or without using a sync handler), the sink will create an internal debug top level surface.

Thanks!

So if I understood correctly, if I do a

tokio::spawn(async move {
    let bus = pipeline_clone.bus().unwrap();
    for msg in bus.iter_timed(gstreamer::ClockTime::NONE) {
        match msg.view() {
            MessageView::Eos(_) => {
                println!("End of stream");
            }
            MessageView::Error(e) => {
                println!("stream error {}", e);
            }
            _ => (),
        }
    }
});
pipeline.set_state(gstreamer::State:Playing).unwrap();

then, it won’t create an internal surface and I won’t have an issue?

Not really. I referred to the Bus sync handler . This handler will get the message before they get queued. This is how window handle request message are supposed to be dealts with. The sink won’t create an internal surface as long as you provide a surface to parent to within that handler.

1 Like