1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
// This file was generated by gir (https://github.com/gtk-rs/gir)
// from gir-files (https://github.com/gtk-rs/gir-files)
// from gst-gir-files (https://gitlab.freedesktop.org/gstreamer/gir-files-rs.git)
// DO NOT EDIT

use crate::{ffi, VideoCodecFrame};
use glib::{
    prelude::*,
    signal::{connect_raw, SignalHandlerId},
    translate::*,
};
use std::boxed::Box as Box_;

glib::wrapper! {
    /// This base class is for video encoders turning raw video into
    /// encoded video data.
    ///
    /// GstVideoEncoder and subclass should cooperate as follows.
    ///
    /// ## Configuration
    ///
    ///  * Initially, GstVideoEncoder calls `start` when the encoder element
    ///  is activated, which allows subclass to perform any global setup.
    ///  * GstVideoEncoder calls `set_format` to inform subclass of the format
    ///  of input video data that it is about to receive. Subclass should
    ///  setup for encoding and configure base class as appropriate
    ///  (e.g. latency). While unlikely, it might be called more than once,
    ///  if changing input parameters require reconfiguration. Baseclass
    ///  will ensure that processing of current configuration is finished.
    ///  * GstVideoEncoder calls `stop` at end of all processing.
    ///
    /// ## Data processing
    ///
    ///  * Base class collects input data and metadata into a frame and hands
    ///  this to subclass' `handle_frame`.
    ///
    ///  * If codec processing results in encoded data, subclass should call
    ///  [`VideoEncoderExt::finish_frame()`][crate::prelude::VideoEncoderExt::finish_frame()] to have encoded data pushed
    ///  downstream.
    ///
    ///  * If implemented, baseclass calls subclass `pre_push` just prior to
    ///  pushing to allow subclasses to modify some metadata on the buffer.
    ///  If it returns GST_FLOW_OK, the buffer is pushed downstream.
    ///
    ///  * GstVideoEncoderClass will handle both srcpad and sinkpad events.
    ///  Sink events will be passed to subclass if `event` callback has been
    ///  provided.
    ///
    /// ## Shutdown phase
    ///
    ///  * GstVideoEncoder class calls `stop` to inform the subclass that data
    ///  parsing will be stopped.
    ///
    /// Subclass is responsible for providing pad template caps for
    /// source and sink pads. The pads need to be named "sink" and "src". It should
    /// also be able to provide fixed src pad caps in `getcaps` by the time it calls
    /// [`VideoEncoderExt::finish_frame()`][crate::prelude::VideoEncoderExt::finish_frame()].
    ///
    /// Things that subclass need to take care of:
    ///
    ///  * Provide pad templates
    ///  * Provide source pad caps before pushing the first buffer
    ///  * Accept data in `handle_frame` and provide encoded results to
    ///  [`VideoEncoderExt::finish_frame()`][crate::prelude::VideoEncoderExt::finish_frame()].
    ///
    ///
    /// The [`qos`][struct@crate::VideoEncoder#qos] property will enable the Quality-of-Service
    /// features of the encoder which gather statistics about the real-time
    /// performance of the downstream elements. If enabled, subclasses can
    /// use [`VideoEncoderExt::max_encode_time()`][crate::prelude::VideoEncoderExt::max_encode_time()] to check if input frames
    /// are already late and drop them right away to give a chance to the
    /// pipeline to catch up.
    ///
    /// This is an Abstract Base Class, you cannot instantiate it.
    ///
    /// ## Properties
    ///
    ///
    /// #### `min-force-key-unit-interval`
    ///  Minimum interval between force-keyunit requests in nanoseconds. See
    /// [`VideoEncoderExt::set_min_force_key_unit_interval()`][crate::prelude::VideoEncoderExt::set_min_force_key_unit_interval()] for more details.
    ///
    /// Readable | Writeable
    ///
    ///
    /// #### `qos`
    ///  Readable | Writeable
    /// <details><summary><h4>Object</h4></summary>
    ///
    ///
    /// #### `name`
    ///  Readable | Writeable | Construct
    ///
    ///
    /// #### `parent`
    ///  The parent of the object. Please note, that when changing the 'parent'
    /// property, we don't emit [`notify`][struct@crate::glib::Object#notify] and [`deep-notify`][struct@crate::gst::Object#deep-notify]
    /// signals due to locking issues. In some cases one can use
    /// `GstBin::element-added` or `GstBin::element-removed` signals on the parent to
    /// achieve a similar effect.
    ///
    /// Readable | Writeable
    /// </details>
    ///
    /// # Implements
    ///
    /// [`VideoEncoderExt`][trait@crate::prelude::VideoEncoderExt], [`trait@gst::prelude::ElementExt`], [`trait@gst::prelude::ObjectExt`], [`trait@glib::ObjectExt`], [`VideoEncoderExtManual`][trait@crate::prelude::VideoEncoderExtManual]
    #[doc(alias = "GstVideoEncoder")]
    pub struct VideoEncoder(Object<ffi::GstVideoEncoder, ffi::GstVideoEncoderClass>) @extends gst::Element, gst::Object;

    match fn {
        type_ => || ffi::gst_video_encoder_get_type(),
    }
}

impl VideoEncoder {
    pub const NONE: Option<&'static VideoEncoder> = None;
}

unsafe impl Send for VideoEncoder {}
unsafe impl Sync for VideoEncoder {}

mod sealed {
    pub trait Sealed {}
    impl<T: super::IsA<super::VideoEncoder>> Sealed for T {}
}

/// Trait containing all [`struct@VideoEncoder`] methods.
///
/// # Implementors
///
/// [`VideoEncoder`][struct@crate::VideoEncoder]
pub trait VideoEncoderExt: IsA<VideoEncoder> + sealed::Sealed + 'static {
    /// Helper function that allocates a buffer to hold an encoded video frame
    /// for `self`'s current [`VideoCodecState`][crate::VideoCodecState].
    /// ## `size`
    /// size of the buffer
    ///
    /// # Returns
    ///
    /// allocated buffer
    #[doc(alias = "gst_video_encoder_allocate_output_buffer")]
    fn allocate_output_buffer(&self, size: usize) -> gst::Buffer {
        unsafe {
            from_glib_full(ffi::gst_video_encoder_allocate_output_buffer(
                self.as_ref().to_glib_none().0,
                size,
            ))
        }
    }

    /// `frame` must have a valid encoded data buffer, whose metadata fields
    /// are then appropriately set according to frame data or no buffer at
    /// all if the frame should be dropped.
    /// It is subsequently pushed downstream or provided to `pre_push`.
    /// In any case, the frame is considered finished and released.
    ///
    /// After calling this function the output buffer of the frame is to be
    /// considered read-only. This function will also change the metadata
    /// of the buffer.
    /// ## `frame`
    /// an encoded [`VideoCodecFrame`][crate::VideoCodecFrame]
    ///
    /// # Returns
    ///
    /// a [`gst::FlowReturn`][crate::gst::FlowReturn] resulting from sending data downstream
    #[doc(alias = "gst_video_encoder_finish_frame")]
    fn finish_frame(&self, frame: VideoCodecFrame) -> Result<gst::FlowSuccess, gst::FlowError> {
        unsafe {
            try_from_glib(ffi::gst_video_encoder_finish_frame(
                self.as_ref().to_glib_none().0,
                frame.into_glib_ptr(),
            ))
        }
    }

    /// Determines maximum possible encoding time for `frame` that will
    /// allow it to encode and arrive in time (as determined by QoS events).
    /// In particular, a negative result means encoding in time is no longer possible
    /// and should therefore occur as soon/skippy as possible.
    ///
    /// If no QoS events have been received from downstream, or if
    /// [`qos`][struct@crate::VideoEncoder#qos] is disabled this function returns `G_MAXINT64`.
    /// ## `frame`
    /// a [`VideoCodecFrame`][crate::VideoCodecFrame]
    ///
    /// # Returns
    ///
    /// max decoding time.
    #[doc(alias = "gst_video_encoder_get_max_encode_time")]
    #[doc(alias = "get_max_encode_time")]
    fn max_encode_time(&self, frame: &VideoCodecFrame) -> gst::ClockTimeDiff {
        unsafe {
            ffi::gst_video_encoder_get_max_encode_time(
                self.as_ref().to_glib_none().0,
                frame.to_glib_none().0,
            )
        }
    }

    /// Returns the minimum force-keyunit interval, see [`set_min_force_key_unit_interval()`][Self::set_min_force_key_unit_interval()]
    /// for more details.
    ///
    /// # Returns
    ///
    /// the minimum force-keyunit interval
    #[cfg(feature = "v1_18")]
    #[cfg_attr(docsrs, doc(cfg(feature = "v1_18")))]
    #[doc(alias = "gst_video_encoder_get_min_force_key_unit_interval")]
    #[doc(alias = "get_min_force_key_unit_interval")]
    #[doc(alias = "min-force-key-unit-interval")]
    fn min_force_key_unit_interval(&self) -> Option<gst::ClockTime> {
        unsafe {
            from_glib(ffi::gst_video_encoder_get_min_force_key_unit_interval(
                self.as_ref().to_glib_none().0,
            ))
        }
    }

    /// Checks if `self` is currently configured to handle Quality-of-Service
    /// events from downstream.
    ///
    /// # Returns
    ///
    /// [`true`] if the encoder is configured to perform Quality-of-Service.
    #[doc(alias = "gst_video_encoder_is_qos_enabled")]
    fn is_qos_enabled(&self) -> bool {
        unsafe {
            from_glib(ffi::gst_video_encoder_is_qos_enabled(
                self.as_ref().to_glib_none().0,
            ))
        }
    }

    /// Sets the video encoder tags and how they should be merged with any
    /// upstream stream tags. This will override any tags previously-set
    /// with [`merge_tags()`][Self::merge_tags()].
    ///
    /// Note that this is provided for convenience, and the subclass is
    /// not required to use this and can still do tag handling on its own.
    ///
    /// MT safe.
    /// ## `tags`
    /// a [`gst::TagList`][crate::gst::TagList] to merge, or NULL to unset
    ///  previously-set tags
    /// ## `mode`
    /// the [`gst::TagMergeMode`][crate::gst::TagMergeMode] to use, usually [`gst::TagMergeMode::Replace`][crate::gst::TagMergeMode::Replace]
    #[doc(alias = "gst_video_encoder_merge_tags")]
    fn merge_tags(&self, tags: Option<&gst::TagList>, mode: gst::TagMergeMode) {
        unsafe {
            ffi::gst_video_encoder_merge_tags(
                self.as_ref().to_glib_none().0,
                tags.to_glib_none().0,
                mode.into_glib(),
            );
        }
    }

    /// Returns caps that express `caps` (or sink template caps if `caps` == NULL)
    /// restricted to resolution/format/... combinations supported by downstream
    /// elements (e.g. muxers).
    /// ## `caps`
    /// initial caps
    /// ## `filter`
    /// filter caps
    ///
    /// # Returns
    ///
    /// a [`gst::Caps`][crate::gst::Caps] owned by caller
    #[doc(alias = "gst_video_encoder_proxy_getcaps")]
    fn proxy_getcaps(&self, caps: Option<&gst::Caps>, filter: Option<&gst::Caps>) -> gst::Caps {
        unsafe {
            from_glib_full(ffi::gst_video_encoder_proxy_getcaps(
                self.as_ref().to_glib_none().0,
                caps.to_glib_none().0,
                filter.to_glib_none().0,
            ))
        }
    }

    /// Sets the minimum interval for requesting keyframes based on force-keyunit
    /// events. Setting this to 0 will allow to handle every event, setting this to
    /// `GST_CLOCK_TIME_NONE` causes force-keyunit events to be ignored.
    /// ## `interval`
    /// minimum interval
    #[cfg(feature = "v1_18")]
    #[cfg_attr(docsrs, doc(cfg(feature = "v1_18")))]
    #[doc(alias = "gst_video_encoder_set_min_force_key_unit_interval")]
    #[doc(alias = "min-force-key-unit-interval")]
    fn set_min_force_key_unit_interval(&self, interval: impl Into<Option<gst::ClockTime>>) {
        unsafe {
            ffi::gst_video_encoder_set_min_force_key_unit_interval(
                self.as_ref().to_glib_none().0,
                interval.into().into_glib(),
            );
        }
    }

    /// Request minimal value for PTS passed to handle_frame.
    ///
    /// For streams with reordered frames this can be used to ensure that there
    /// is enough time to accommodate first DTS, which may be less than first PTS
    /// ## `min_pts`
    /// minimal PTS that will be passed to handle_frame
    #[doc(alias = "gst_video_encoder_set_min_pts")]
    fn set_min_pts(&self, min_pts: impl Into<Option<gst::ClockTime>>) {
        unsafe {
            ffi::gst_video_encoder_set_min_pts(
                self.as_ref().to_glib_none().0,
                min_pts.into().into_glib(),
            );
        }
    }

    /// Configures `self` to handle Quality-of-Service events from downstream.
    /// ## `enabled`
    /// the new qos value.
    #[doc(alias = "gst_video_encoder_set_qos_enabled")]
    fn set_qos_enabled(&self, enabled: bool) {
        unsafe {
            ffi::gst_video_encoder_set_qos_enabled(
                self.as_ref().to_glib_none().0,
                enabled.into_glib(),
            );
        }
    }

    fn is_qos(&self) -> bool {
        ObjectExt::property(self.as_ref(), "qos")
    }

    fn set_qos(&self, qos: bool) {
        ObjectExt::set_property(self.as_ref(), "qos", qos)
    }

    #[cfg(feature = "v1_18")]
    #[cfg_attr(docsrs, doc(cfg(feature = "v1_18")))]
    #[doc(alias = "min-force-key-unit-interval")]
    fn connect_min_force_key_unit_interval_notify<F: Fn(&Self) + Send + Sync + 'static>(
        &self,
        f: F,
    ) -> SignalHandlerId {
        unsafe extern "C" fn notify_min_force_key_unit_interval_trampoline<
            P: IsA<VideoEncoder>,
            F: Fn(&P) + Send + Sync + 'static,
        >(
            this: *mut ffi::GstVideoEncoder,
            _param_spec: glib::ffi::gpointer,
            f: glib::ffi::gpointer,
        ) {
            let f: &F = &*(f as *const F);
            f(VideoEncoder::from_glib_borrow(this).unsafe_cast_ref())
        }
        unsafe {
            let f: Box_<F> = Box_::new(f);
            connect_raw(
                self.as_ptr() as *mut _,
                b"notify::min-force-key-unit-interval\0".as_ptr() as *const _,
                Some(std::mem::transmute::<*const (), unsafe extern "C" fn()>(
                    notify_min_force_key_unit_interval_trampoline::<Self, F> as *const (),
                )),
                Box_::into_raw(f),
            )
        }
    }

    #[doc(alias = "qos")]
    fn connect_qos_notify<F: Fn(&Self) + Send + Sync + 'static>(&self, f: F) -> SignalHandlerId {
        unsafe extern "C" fn notify_qos_trampoline<
            P: IsA<VideoEncoder>,
            F: Fn(&P) + Send + Sync + 'static,
        >(
            this: *mut ffi::GstVideoEncoder,
            _param_spec: glib::ffi::gpointer,
            f: glib::ffi::gpointer,
        ) {
            let f: &F = &*(f as *const F);
            f(VideoEncoder::from_glib_borrow(this).unsafe_cast_ref())
        }
        unsafe {
            let f: Box_<F> = Box_::new(f);
            connect_raw(
                self.as_ptr() as *mut _,
                b"notify::qos\0".as_ptr() as *const _,
                Some(std::mem::transmute::<*const (), unsafe extern "C" fn()>(
                    notify_qos_trampoline::<Self, F> as *const (),
                )),
                Box_::into_raw(f),
            )
        }
    }
}

impl<O: IsA<VideoEncoder>> VideoEncoderExt for O {}