Set_window_handle?

• Hardware Platform (Jetson / GPU)
Jetson
• DeepStream Version
5.0
• JetPack Version (valid for Jetson only)
4.4
• TensorRT Version
n/a

Are there any nvidia gstreamer sinks that allow one to set the window handle, as would be done with xvimagesink? I have a Gtk based gui and a Gtk.DrawingArea. I’d like to give an overlay sink the xid of the drawing area so it knows where to draw. None of the Nvidia sinks I’ve tried seem to emit a prepare-window-handle message. Are there any examples for using Nvidia sinks with a Gui toolkit? Either Gtk or Qt is fine.

Thanks,
Mike

Ended up solving the issue by using the GObject properties on nvoverlaysink. I addeed a refresh_overlay_position method that’s called on various UI events that calculates the position of for the overlay and sets the appopriate nvoverlaysink properties:

    def refresh_overlay_position(self, *_, **__):
        if self.win and self.win.video_area and self._preview:
            allocation = self.win.video_area.get_allocation()
            win = self.win.video_area.get_window()
            x, y, = win.get_root_origin()  # the root origin of the window
            x_pos, y_pos = win.get_position()  # the relative position of D Area
            x += x_pos
            y += y_pos
            self._preview.set_property("overlay-x", x)
            self._preview.set_property("overlay-y", y)
            self._preview.set_property("overlay-w", allocation.width)
            self._preview.set_property("overlay-h", allocation.height)

This works. Only small issue is nothing can draw on top other than the mouse pointer, but for this app, that’s not a problem. If anybody has a better solution, or a better sink, would appreciate sharing!

Just to update the solution. It turns out the nveglglessink implements the GstVideoOverlay interface, so that can be used instead of setting the coordinates manually. Just set the xid as show in the 5th Gstreamer tutorial. Also supports wayland according to the GObject properties.

Just to further update. Nveglglessink does work with Gtk. Here is a sample Vala app that shows the CSI camera in a window. It’s mostly ported from the 5th gstreamer tutorial.

// Copyright (C) GStreamer developers
// Copyright (C) Michael de Gans

// this just tests the nveglglessink works with Gtk in X11 (and indeed it does)

// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the "Software"),
// to deal in the Software without restriction, including without limitation
// the rights to use, copy, modify, merge, publish, distribute, sublicense,
// and/or sell copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following conditions:

// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.

// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
// THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
// DEALINGS IN THE SOFTWARE.

public class GuiPlayer: Gtk.Application {
	dynamic Gst.Pipeline pipe;
	dynamic Gst.Element camera;
	dynamic Gst.Element converter;
	dynamic Gst.Element sink;

	Gst.State state { get; default=Gst.State.NULL; } // cached state

	// Gtk

	/** The uppermost window, containing all other windows */
	Gtk.ApplicationWindow win;

	/* The drawing area where the video will be shown */
	Gtk.DrawingArea area;

	public GuiPlayer(int sensor_id=0) {
		// chain up to Object
		Object(
			application_id: "dev.mdegans.GuiPlayer",
			flags: ApplicationFlags.FLAGS_NONE);

		/* Create the pipeline */
		// NOTE: "as Gst.Pipeline" does a dynamic cast
		this.pipe = Gst.ElementFactory.make("pipeline", null) as Gst.Pipeline;

		/* Create the elements */
		this.camera = Gst.ElementFactory.make("nvarguscamerasrc", null);
		this.converter = Gst.ElementFactory.make("nvegltransform", null);
		this.sink = Gst.ElementFactory.make("nveglglessink", null);

		/** Add the elements */
		this.pipe.add_many(this.camera, this.converter, this.sink);

		/** link the elements */
		this.camera.link_many(this.converter, this.sink);

		/* Set the URI to play */
		this.camera.sensor_id = sensor_id;

		/* Instruct the bus to emit signals for each received message,
		 * and connect to the interesting signals */
		var bus = this.pipe.get_bus();
		bus.add_signal_watch();
		bus.message.connect(this.on_bus_message);
	}

	protected override void activate () {
		/* C version create_ui stuff here */

		// create windows
		this.win = new Gtk.ApplicationWindow (this);
		this.win.delete_event.connect(this.on_delete_event);
	
		// create the video area
		this.area = new Gtk.DrawingArea();
		this.area.realize.connect(this.on_video_window_realize);
		this.area.draw.connect(this.on_video_window_draw);

		// add the drawing area to the window
		this.win.add(this.area);
		this.win.set_default_size(640, 480);

		// Create the window of this application and show it
		this.win.show_all();

		/* Start playing */
		this.play();
	}

	/* This function is called when the main window is closed */
	bool on_delete_event(Gtk.Widget? widget, Gdk.EventAny? event) {
		this.pipe.set_state(Gst.State.NULL);
		this.quit();
		return false;
	}

	// shortcuts to change pipeline state
	public void play() {
		this.pipe.set_state(Gst.State.PLAYING);
	}
	public void pause() {
		this.pipe.set_state(Gst.State.PAUSED);
	}
	public void stop() {
		this.pipe.set_state(Gst.State.READY);
	}

	/** set_window_handle on the sink */
	void on_video_window_realize(Gtk.Widget widget) {
		// TODO(mdegans): support other platforms (right now this is X11 only,
		// while the C original supports Mac and Windows)
		var window = widget.get_window() as Gdk.X11.Window;
		// Gst.Video.Overlay is an interface implemented by playbin
		var overlay = this.sink as Gst.Video.Overlay;
		// NOTE(mdegans): "foo as bar" is a dynamic cast with runtime checking
		//  while (bar)foo would be a static cast (like below).
		overlay.set_window_handle((uint*)window.get_xid());
	}

	/* This function is called everytime the video window needs to be redrawn
	 * (due to damage/exposure, rescaling, etc). GStreamer takes care of this in
	 * the PAUSED and PLAYING states, otherwise, we simply draw a black
	 * rectangle to avoid garbage showing up. */
	bool on_video_window_draw(Cairo.Context ctx) {
		if (this.state < Gst.State.PAUSED) {
			Gtk.Allocation allocation;
			this.area.get_allocation(out allocation);
			ctx.set_source_rgb(0,0,0);
			ctx.rectangle(0,0,allocation.width, allocation.height);
			ctx.fill();
		}
		return false;
	}

	/* This function is called when a message is posted on the bus */
	void on_bus_message(Gst.Bus bus, Gst.Message msg) {
		switch (msg.type) {
			case Gst.MessageType.ERROR: {
				Error err;
				string debug_info;
				/* Print error details on the screen */
				msg.parse_error(out err, out debug_info);
				printerr("Error received from element %s: %s\n",
					msg.src.name, err.message);
				printerr(@"Debugging information: $(debug_info)");
				this.pipe.set_state(Gst.State.NULL);
				this.quit();
				break;
			}
			case Gst.MessageType.EOS: {
				print("End-Of-Stream reached.\n");
				this.pipe.set_state(Gst.State.NULL);
				this.quit();
				break;
			}
			case Gst.MessageType.STATE_CHANGED: {
				if (msg.src == this.pipe) {
					msg.parse_state_changed(null, out this._state, null);
				}
				break;
			}
		}
	}
}

static int main(string[] argv) {
	/* Initialize GStreamer */
	Gst.init(ref argv);

	var app = new GuiPlayer();

	return app.run(argv);
}