1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
// This file was generated by gir (https://github.com/gtk-rs/gir)
// from gir-files (https://github.com/gtk-rs/gir-files)
// from gst-gir-files (https://gitlab.freedesktop.org/gstreamer/gir-files-rs.git)
// DO NOT EDIT

use glib::{prelude::*, translate::*};

glib::wrapper! {
    /// The [`VideoOverlay`][crate::VideoOverlay] interface is used for 2 main purposes :
    ///
    /// * To get a grab on the Window where the video sink element is going to render.
    ///  This is achieved by either being informed about the Window identifier that
    ///  the video sink element generated, or by forcing the video sink element to use
    ///  a specific Window identifier for rendering.
    /// * To force a redrawing of the latest video frame the video sink element
    ///  displayed on the Window. Indeed if the [`gst::Pipeline`][crate::gst::Pipeline] is in [`gst::State::Paused`][crate::gst::State::Paused]
    ///  state, moving the Window around will damage its content. Application
    ///  developers will want to handle the Expose events themselves and force the
    ///  video sink element to refresh the Window's content.
    ///
    /// Using the Window created by the video sink is probably the simplest scenario,
    /// in some cases, though, it might not be flexible enough for application
    /// developers if they need to catch events such as mouse moves and button
    /// clicks.
    ///
    /// Setting a specific Window identifier on the video sink element is the most
    /// flexible solution but it has some issues. Indeed the application needs to set
    /// its Window identifier at the right time to avoid internal Window creation
    /// from the video sink element. To solve this issue a [`gst::Message`][crate::gst::Message] is posted on
    /// the bus to inform the application that it should set the Window identifier
    /// immediately. Here is an example on how to do that correctly:
    ///
    /// ```text
    /// static GstBusSyncReply
    /// create_window (GstBus * bus, GstMessage * message, GstPipeline * pipeline)
    /// {
    ///  // ignore anything but 'prepare-window-handle' element messages
    ///  if (!gst_is_video_overlay_prepare_window_handle_message (message))
    ///    return GST_BUS_PASS;
    ///
    ///  win = XCreateSimpleWindow (disp, root, 0, 0, 320, 240, 0, 0, 0);
    ///
    ///  XSetWindowBackgroundPixmap (disp, win, None);
    ///
    ///  XMapRaised (disp, win);
    ///
    ///  XSync (disp, FALSE);
    ///
    ///  gst_video_overlay_set_window_handle (GST_VIDEO_OVERLAY (GST_MESSAGE_SRC (message)),
    ///      win);
    ///
    ///  gst_message_unref (message);
    ///
    ///  return GST_BUS_DROP;
    /// }
    /// ...
    /// int
    /// main (int argc, char **argv)
    /// {
    /// ...
    ///  bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
    ///  gst_bus_set_sync_handler (bus, (GstBusSyncHandler) create_window, pipeline,
    ///         NULL);
    /// ...
    /// }
    /// ```
    ///
    /// ## Two basic usage scenarios
    ///
    /// There are two basic usage scenarios: in the simplest case, the application
    /// uses `playbin` or `playsink` or knows exactly what particular element is used
    /// for video output, which is usually the case when the application creates
    /// the videosink to use (e.g. `xvimagesink`, `ximagesink`, etc.) itself; in this
    /// case, the application can just create the videosink element, create and
    /// realize the window to render the video on and then
    /// call [`VideoOverlayExtManual::set_window_handle()`][crate::prelude::VideoOverlayExtManual::set_window_handle()] directly with the XID or native
    /// window handle, before starting up the pipeline.
    /// As `playbin` and `playsink` implement the video overlay interface and proxy
    /// it transparently to the actual video sink even if it is created later, this
    /// case also applies when using these elements.
    ///
    /// In the other and more common case, the application does not know in advance
    /// what GStreamer video sink element will be used for video output. This is
    /// usually the case when an element such as `autovideosink` is used.
    /// In this case, the video sink element itself is created
    /// asynchronously from a GStreamer streaming thread some time after the
    /// pipeline has been started up. When that happens, however, the video sink
    /// will need to know right then whether to render onto an already existing
    /// application window or whether to create its own window. This is when it
    /// posts a prepare-window-handle message, and that is also why this message needs
    /// to be handled in a sync bus handler which will be called from the streaming
    /// thread directly (because the video sink will need an answer right then).
    ///
    /// As response to the prepare-window-handle element message in the bus sync
    /// handler, the application may use [`VideoOverlayExtManual::set_window_handle()`][crate::prelude::VideoOverlayExtManual::set_window_handle()] to tell
    /// the video sink to render onto an existing window surface. At this point the
    /// application should already have obtained the window handle / XID, so it
    /// just needs to set it. It is generally not advisable to call any GUI toolkit
    /// functions or window system functions from the streaming thread in which the
    /// prepare-window-handle message is handled, because most GUI toolkits and
    /// windowing systems are not thread-safe at all and a lot of care would be
    /// required to co-ordinate the toolkit and window system calls of the
    /// different threads (Gtk+ users please note: prior to Gtk+ 2.18
    /// `GDK_WINDOW_XID` was just a simple structure access, so generally fine to do
    /// within the bus sync handler; this macro was changed to a function call in
    /// Gtk+ 2.18 and later, which is likely to cause problems when called from a
    /// sync handler; see below for a better approach without `GDK_WINDOW_XID`
    /// used in the callback).
    ///
    /// ## GstVideoOverlay and Gtk+
    ///
    ///
    /// ```text
    /// #include <gst/video/videooverlay.h>
    /// #include <gtk/gtk.h>
    /// #ifdef GDK_WINDOWING_X11
    /// #include <gdk/gdkx.h>  // for GDK_WINDOW_XID
    /// #endif
    /// #ifdef GDK_WINDOWING_WIN32
    /// #include <gdk/gdkwin32.h>  // for GDK_WINDOW_HWND
    /// #endif
    /// ...
    /// static guintptr video_window_handle = 0;
    /// ...
    /// static GstBusSyncReply
    /// bus_sync_handler (GstBus * bus, GstMessage * message, gpointer user_data)
    /// {
    ///  // ignore anything but 'prepare-window-handle' element messages
    ///  if (!gst_is_video_overlay_prepare_window_handle_message (message))
    ///    return GST_BUS_PASS;
    ///
    ///  if (video_window_handle != 0) {
    ///    GstVideoOverlay *overlay;
    ///
    ///    // GST_MESSAGE_SRC (message) will be the video sink element
    ///    overlay = GST_VIDEO_OVERLAY (GST_MESSAGE_SRC (message));
    ///    gst_video_overlay_set_window_handle (overlay, video_window_handle);
    ///  } else {
    ///    g_warning ("Should have obtained video_window_handle by now!");
    ///  }
    ///
    ///  gst_message_unref (message);
    ///  return GST_BUS_DROP;
    /// }
    /// ...
    /// static void
    /// video_widget_realize_cb (GtkWidget * widget, gpointer data)
    /// {
    /// #if GTK_CHECK_VERSION(2,18,0)
    ///   // Tell Gtk+/Gdk to create a native window for this widget instead of
    ///   // drawing onto the parent widget.
    ///   // This is here just for pedagogical purposes, GDK_WINDOW_XID will call
    ///   // it as well in newer Gtk versions
    ///   if (!gdk_window_ensure_native (widget->window))
    ///     g_error ("Couldn't create native window needed for GstVideoOverlay!");
    /// #endif
    ///
    /// #ifdef GDK_WINDOWING_X11
    ///   {
    ///     gulong xid = GDK_WINDOW_XID (gtk_widget_get_window (video_window));
    ///     video_window_handle = xid;
    ///   }
    /// #endif
    /// #ifdef GDK_WINDOWING_WIN32
    ///   {
    ///     HWND wnd = GDK_WINDOW_HWND (gtk_widget_get_window (video_window));
    ///     video_window_handle = (guintptr) wnd;
    ///   }
    /// #endif
    /// }
    /// ...
    /// int
    /// main (int argc, char **argv)
    /// {
    ///   GtkWidget *video_window;
    ///   GtkWidget *app_window;
    ///   ...
    ///   app_window = gtk_window_new (GTK_WINDOW_TOPLEVEL);
    ///   ...
    ///   video_window = gtk_drawing_area_new ();
    ///   g_signal_connect (video_window, "realize",
    ///       G_CALLBACK (video_widget_realize_cb), NULL);
    ///   gtk_widget_set_double_buffered (video_window, FALSE);
    ///   ...
    ///   // usually the video_window will not be directly embedded into the
    ///   // application window like this, but there will be many other widgets
    ///   // and the video window will be embedded in one of them instead
    ///   gtk_container_add (GTK_CONTAINER (ap_window), video_window);
    ///   ...
    ///   // show the GUI
    ///   gtk_widget_show_all (app_window);
    ///
    ///   // realize window now so that the video window gets created and we can
    ///   // obtain its XID/HWND before the pipeline is started up and the videosink
    ///   // asks for the XID/HWND of the window to render onto
    ///   gtk_widget_realize (video_window);
    ///
    ///   // we should have the XID/HWND now
    ///   g_assert (video_window_handle != 0);
    ///   ...
    ///   // set up sync handler for setting the xid once the pipeline is started
    ///   bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
    ///   gst_bus_set_sync_handler (bus, (GstBusSyncHandler) bus_sync_handler, NULL,
    ///       NULL);
    ///   gst_object_unref (bus);
    ///   ...
    ///   gst_element_set_state (pipeline, GST_STATE_PLAYING);
    ///   ...
    /// }
    /// ```
    ///
    /// ## GstVideoOverlay and Qt
    ///
    ///
    /// ```text
    /// #include <glib.h>;
    /// #include <gst/gst.h>;
    /// #include <gst/video/videooverlay.h>;
    ///
    /// #include <QApplication>;
    /// #include <QTimer>;
    /// #include <QWidget>;
    ///
    /// int main(int argc, char *argv[])
    /// {
    ///   if (!g_thread_supported ())
    ///     g_thread_init (NULL);
    ///
    ///   gst_init (&argc, &argv);
    ///   QApplication app(argc, argv);
    ///   app.connect(&app, SIGNAL(lastWindowClosed()), &app, SLOT(quit ()));
    ///
    ///   // prepare the pipeline
    ///
    ///   GstElement *pipeline = gst_pipeline_new ("xvoverlay");
    ///   GstElement *src = gst_element_factory_make ("videotestsrc", NULL);
    ///   GstElement *sink = gst_element_factory_make ("xvimagesink", NULL);
    ///   gst_bin_add_many (GST_BIN (pipeline), src, sink, NULL);
    ///   gst_element_link (src, sink);
    ///
    ///   // prepare the ui
    ///
    ///   QWidget window;
    ///   window.resize(320, 240);
    ///   window.show();
    ///
    ///   WId xwinid = window.winId();
    ///   gst_video_overlay_set_window_handle (GST_VIDEO_OVERLAY (sink), xwinid);
    ///
    ///   // run the pipeline
    ///
    ///   GstStateChangeReturn sret = gst_element_set_state (pipeline,
    ///       GST_STATE_PLAYING);
    ///   if (sret == GST_STATE_CHANGE_FAILURE) {
    ///     gst_element_set_state (pipeline, GST_STATE_NULL);
    ///     gst_object_unref (pipeline);
    ///     // Exit application
    ///     QTimer::singleShot(0, QApplication::activeWindow(), SLOT(quit()));
    ///   }
    ///
    ///   int ret = app.exec();
    ///
    ///   window.hide();
    ///   gst_element_set_state (pipeline, GST_STATE_NULL);
    ///   gst_object_unref (pipeline);
    ///
    ///   return ret;
    /// }
    /// ```
    ///
    /// # Implements
    ///
    /// [`VideoOverlayExt`][trait@crate::prelude::VideoOverlayExt], [`VideoOverlayExtManual`][trait@crate::prelude::VideoOverlayExtManual]
    #[doc(alias = "GstVideoOverlay")]
    pub struct VideoOverlay(Interface<ffi::GstVideoOverlay, ffi::GstVideoOverlayInterface>);

    match fn {
        type_ => || ffi::gst_video_overlay_get_type(),
    }
}

impl VideoOverlay {
    pub const NONE: Option<&'static VideoOverlay> = None;

    //#[doc(alias = "gst_video_overlay_install_properties")]
    //pub fn install_properties(oclass: /*Ignored*/&mut glib::ObjectClass, last_prop_id: i32) {
    //    unsafe { TODO: call ffi:gst_video_overlay_install_properties() }
    //}
}

unsafe impl Send for VideoOverlay {}
unsafe impl Sync for VideoOverlay {}

mod sealed {
    pub trait Sealed {}
    impl<T: super::IsA<super::VideoOverlay>> Sealed for T {}
}

/// Trait containing all [`struct@VideoOverlay`] methods.
///
/// # Implementors
///
/// [`VideoOverlay`][struct@crate::VideoOverlay]
pub trait VideoOverlayExt: IsA<VideoOverlay> + sealed::Sealed + 'static {
    /// Tell an overlay that it has been exposed. This will redraw the current frame
    /// in the drawable even if the pipeline is PAUSED.
    #[doc(alias = "gst_video_overlay_expose")]
    fn expose(&self) {
        unsafe {
            ffi::gst_video_overlay_expose(self.as_ref().to_glib_none().0);
        }
    }

    //#[doc(alias = "gst_video_overlay_got_window_handle")]
    //fn got_window_handle(&self, handle: /*Unimplemented*/Basic: UIntPtr) {
    //    unsafe { TODO: call ffi:gst_video_overlay_got_window_handle() }
    //}

    /// Tell an overlay that it should handle events from the window system. These
    /// events are forwarded upstream as navigation events. In some window system,
    /// events are not propagated in the window hierarchy if a client is listening
    /// for them. This method allows you to disable events handling completely
    /// from the [`VideoOverlay`][crate::VideoOverlay].
    /// ## `handle_events`
    /// a `gboolean` indicating if events should be handled or not.
    #[doc(alias = "gst_video_overlay_handle_events")]
    fn handle_events(&self, handle_events: bool) {
        unsafe {
            ffi::gst_video_overlay_handle_events(
                self.as_ref().to_glib_none().0,
                handle_events.into_glib(),
            );
        }
    }

    /// This will post a "prepare-window-handle" element message on the bus
    /// to give applications an opportunity to call
    /// [`VideoOverlayExtManual::set_window_handle()`][crate::prelude::VideoOverlayExtManual::set_window_handle()] before a plugin creates its own
    /// window.
    ///
    /// This function should only be used by video overlay plugin developers.
    #[doc(alias = "gst_video_overlay_prepare_window_handle")]
    fn prepare_window_handle(&self) {
        unsafe {
            ffi::gst_video_overlay_prepare_window_handle(self.as_ref().to_glib_none().0);
        }
    }

    /// Configure a subregion as a video target within the window set by
    /// [`VideoOverlayExtManual::set_window_handle()`][crate::prelude::VideoOverlayExtManual::set_window_handle()]. If this is not used or not supported
    /// the video will fill the area of the window set as the overlay to 100%.
    /// By specifying the rectangle, the video can be overlayed to a specific region
    /// of that window only. After setting the new rectangle one should call
    /// [`expose()`][Self::expose()] to force a redraw. To unset the region pass -1 for
    /// the `width` and `height` parameters.
    ///
    /// This method is needed for non fullscreen video overlay in UI toolkits that
    /// do not support subwindows.
    /// ## `x`
    /// the horizontal offset of the render area inside the window
    /// ## `y`
    /// the vertical offset of the render area inside the window
    /// ## `width`
    /// the width of the render area inside the window
    /// ## `height`
    /// the height of the render area inside the window
    ///
    /// # Returns
    ///
    /// [`false`] if not supported by the sink.
    #[doc(alias = "gst_video_overlay_set_render_rectangle")]
    fn set_render_rectangle(
        &self,
        x: i32,
        y: i32,
        width: i32,
        height: i32,
    ) -> Result<(), glib::error::BoolError> {
        unsafe {
            glib::result_from_gboolean!(
                ffi::gst_video_overlay_set_render_rectangle(
                    self.as_ref().to_glib_none().0,
                    x,
                    y,
                    width,
                    height
                ),
                "Failed to set render rectangle"
            )
        }
    }
}

impl<O: IsA<VideoOverlay>> VideoOverlayExt for O {}