gstreamer_video/auto/
video_encoder.rs

1// This file was generated by gir (https://github.com/gtk-rs/gir)
2// from gir-files (https://github.com/gtk-rs/gir-files)
3// from gst-gir-files (https://gitlab.freedesktop.org/gstreamer/gir-files-rs.git)
4// DO NOT EDIT
5
6use crate::{ffi, VideoCodecFrame};
7use glib::{
8    prelude::*,
9    signal::{connect_raw, SignalHandlerId},
10    translate::*,
11};
12use std::boxed::Box as Box_;
13
14glib::wrapper! {
15    /// This base class is for video encoders turning raw video into
16    /// encoded video data.
17    ///
18    /// GstVideoEncoder and subclass should cooperate as follows.
19    ///
20    /// ## Configuration
21    ///
22    ///  * Initially, GstVideoEncoder calls `start` when the encoder element
23    ///  is activated, which allows subclass to perform any global setup.
24    ///  * GstVideoEncoder calls `set_format` to inform subclass of the format
25    ///  of input video data that it is about to receive. Subclass should
26    ///  setup for encoding and configure base class as appropriate
27    ///  (e.g. latency). While unlikely, it might be called more than once,
28    ///  if changing input parameters require reconfiguration. Baseclass
29    ///  will ensure that processing of current configuration is finished.
30    ///  * GstVideoEncoder calls `stop` at end of all processing.
31    ///
32    /// ## Data processing
33    ///
34    ///  * Base class collects input data and metadata into a frame and hands
35    ///  this to subclass' `handle_frame`.
36    ///
37    ///  * If codec processing results in encoded data, subclass should call
38    ///  [`VideoEncoderExt::finish_frame()`][crate::prelude::VideoEncoderExt::finish_frame()] to have encoded data pushed
39    ///  downstream.
40    ///
41    ///  * If implemented, baseclass calls subclass `pre_push` just prior to
42    ///  pushing to allow subclasses to modify some metadata on the buffer.
43    ///  If it returns GST_FLOW_OK, the buffer is pushed downstream.
44    ///
45    ///  * GstVideoEncoderClass will handle both srcpad and sinkpad events.
46    ///  Sink events will be passed to subclass if `event` callback has been
47    ///  provided.
48    ///
49    /// ## Shutdown phase
50    ///
51    ///  * GstVideoEncoder class calls `stop` to inform the subclass that data
52    ///  parsing will be stopped.
53    ///
54    /// Subclass is responsible for providing pad template caps for
55    /// source and sink pads. The pads need to be named "sink" and "src". It should
56    /// also be able to provide fixed src pad caps in `getcaps` by the time it calls
57    /// [`VideoEncoderExt::finish_frame()`][crate::prelude::VideoEncoderExt::finish_frame()].
58    ///
59    /// Things that subclass need to take care of:
60    ///
61    ///  * Provide pad templates
62    ///  * Provide source pad caps before pushing the first buffer
63    ///  * Accept data in `handle_frame` and provide encoded results to
64    ///  [`VideoEncoderExt::finish_frame()`][crate::prelude::VideoEncoderExt::finish_frame()].
65    ///
66    ///
67    /// The [`qos`][struct@crate::VideoEncoder#qos] property will enable the Quality-of-Service
68    /// features of the encoder which gather statistics about the real-time
69    /// performance of the downstream elements. If enabled, subclasses can
70    /// use [`VideoEncoderExt::max_encode_time()`][crate::prelude::VideoEncoderExt::max_encode_time()] to check if input frames
71    /// are already late and drop them right away to give a chance to the
72    /// pipeline to catch up.
73    ///
74    /// This is an Abstract Base Class, you cannot instantiate it.
75    ///
76    /// ## Properties
77    ///
78    ///
79    /// #### `min-force-key-unit-interval`
80    ///  Minimum interval between force-keyunit requests in nanoseconds. See
81    /// [`VideoEncoderExt::set_min_force_key_unit_interval()`][crate::prelude::VideoEncoderExt::set_min_force_key_unit_interval()] for more details.
82    ///
83    /// Readable | Writeable
84    ///
85    ///
86    /// #### `qos`
87    ///  Readable | Writeable
88    /// <details><summary><h4>Object</h4></summary>
89    ///
90    ///
91    /// #### `name`
92    ///  Readable | Writeable | Construct
93    ///
94    ///
95    /// #### `parent`
96    ///  The parent of the object. Please note, that when changing the 'parent'
97    /// property, we don't emit [`notify`][struct@crate::glib::Object#notify] and [`deep-notify`][struct@crate::gst::Object#deep-notify]
98    /// signals due to locking issues. In some cases one can use
99    /// `GstBin::element-added` or `GstBin::element-removed` signals on the parent to
100    /// achieve a similar effect.
101    ///
102    /// Readable | Writeable
103    /// </details>
104    ///
105    /// # Implements
106    ///
107    /// [`VideoEncoderExt`][trait@crate::prelude::VideoEncoderExt], [`trait@gst::prelude::ElementExt`], [`trait@gst::prelude::ObjectExt`], [`trait@glib::ObjectExt`], [`VideoEncoderExtManual`][trait@crate::prelude::VideoEncoderExtManual]
108    #[doc(alias = "GstVideoEncoder")]
109    pub struct VideoEncoder(Object<ffi::GstVideoEncoder, ffi::GstVideoEncoderClass>) @extends gst::Element, gst::Object;
110
111    match fn {
112        type_ => || ffi::gst_video_encoder_get_type(),
113    }
114}
115
116impl VideoEncoder {
117    pub const NONE: Option<&'static VideoEncoder> = None;
118}
119
120unsafe impl Send for VideoEncoder {}
121unsafe impl Sync for VideoEncoder {}
122
123/// Trait containing all [`struct@VideoEncoder`] methods.
124///
125/// # Implementors
126///
127/// [`VideoEncoder`][struct@crate::VideoEncoder]
128pub trait VideoEncoderExt: IsA<VideoEncoder> + 'static {
129    /// Helper function that allocates a buffer to hold an encoded video frame
130    /// for `self`'s current [`VideoCodecState`][crate::VideoCodecState].
131    /// ## `size`
132    /// size of the buffer
133    ///
134    /// # Returns
135    ///
136    /// allocated buffer
137    #[doc(alias = "gst_video_encoder_allocate_output_buffer")]
138    fn allocate_output_buffer(&self, size: usize) -> gst::Buffer {
139        unsafe {
140            from_glib_full(ffi::gst_video_encoder_allocate_output_buffer(
141                self.as_ref().to_glib_none().0,
142                size,
143            ))
144        }
145    }
146
147    /// Removes `frame` from the list of pending frames, releases it
148    /// and posts a QoS message with the frame's details on the bus.
149    /// Similar to calling [`finish_frame()`][Self::finish_frame()] without a buffer
150    /// attached to `frame`, but this function additionally stores events
151    /// from `frame` as pending, to be pushed out alongside the next frame
152    /// submitted via [`finish_frame()`][Self::finish_frame()].
153    /// ## `frame`
154    /// a [`VideoCodecFrame`][crate::VideoCodecFrame]
155    #[cfg(feature = "v1_26")]
156    #[cfg_attr(docsrs, doc(cfg(feature = "v1_26")))]
157    #[doc(alias = "gst_video_encoder_drop_frame")]
158    fn drop_frame(&self, frame: VideoCodecFrame) {
159        unsafe {
160            ffi::gst_video_encoder_drop_frame(
161                self.as_ref().to_glib_none().0,
162                frame.into_glib_ptr(),
163            );
164        }
165    }
166
167    /// `frame` must have a valid encoded data buffer, whose metadata fields
168    /// are then appropriately set according to frame data or no buffer at
169    /// all if the frame should be dropped.
170    /// It is subsequently pushed downstream or provided to `pre_push`.
171    /// In any case, the frame is considered finished and released.
172    ///
173    /// If `frame` does not have a buffer attached, it will be dropped, and
174    /// a QoS message will be posted on the bus. Events from `frame` will be
175    /// pushed out immediately.
176    ///
177    /// After calling this function the output buffer of the frame is to be
178    /// considered read-only. This function will also change the metadata
179    /// of the buffer.
180    /// ## `frame`
181    /// an encoded [`VideoCodecFrame`][crate::VideoCodecFrame]
182    ///
183    /// # Returns
184    ///
185    /// a [`gst::FlowReturn`][crate::gst::FlowReturn] resulting from sending data downstream
186    #[doc(alias = "gst_video_encoder_finish_frame")]
187    fn finish_frame(&self, frame: VideoCodecFrame) -> Result<gst::FlowSuccess, gst::FlowError> {
188        unsafe {
189            try_from_glib(ffi::gst_video_encoder_finish_frame(
190                self.as_ref().to_glib_none().0,
191                frame.into_glib_ptr(),
192            ))
193        }
194    }
195
196    /// Determines maximum possible encoding time for `frame` that will
197    /// allow it to encode and arrive in time (as determined by QoS events).
198    /// In particular, a negative result means encoding in time is no longer possible
199    /// and should therefore occur as soon/skippy as possible.
200    ///
201    /// If no QoS events have been received from downstream, or if
202    /// [`qos`][struct@crate::VideoEncoder#qos] is disabled this function returns `G_MAXINT64`.
203    /// ## `frame`
204    /// a [`VideoCodecFrame`][crate::VideoCodecFrame]
205    ///
206    /// # Returns
207    ///
208    /// max decoding time.
209    #[doc(alias = "gst_video_encoder_get_max_encode_time")]
210    #[doc(alias = "get_max_encode_time")]
211    fn max_encode_time(&self, frame: &VideoCodecFrame) -> gst::ClockTimeDiff {
212        unsafe {
213            ffi::gst_video_encoder_get_max_encode_time(
214                self.as_ref().to_glib_none().0,
215                frame.to_glib_none().0,
216            )
217        }
218    }
219
220    /// Returns the minimum force-keyunit interval, see [`set_min_force_key_unit_interval()`][Self::set_min_force_key_unit_interval()]
221    /// for more details.
222    ///
223    /// # Returns
224    ///
225    /// the minimum force-keyunit interval
226    #[cfg(feature = "v1_18")]
227    #[cfg_attr(docsrs, doc(cfg(feature = "v1_18")))]
228    #[doc(alias = "gst_video_encoder_get_min_force_key_unit_interval")]
229    #[doc(alias = "get_min_force_key_unit_interval")]
230    #[doc(alias = "min-force-key-unit-interval")]
231    fn min_force_key_unit_interval(&self) -> Option<gst::ClockTime> {
232        unsafe {
233            from_glib(ffi::gst_video_encoder_get_min_force_key_unit_interval(
234                self.as_ref().to_glib_none().0,
235            ))
236        }
237    }
238
239    /// Checks if `self` is currently configured to handle Quality-of-Service
240    /// events from downstream.
241    ///
242    /// # Returns
243    ///
244    /// [`true`] if the encoder is configured to perform Quality-of-Service.
245    #[doc(alias = "gst_video_encoder_is_qos_enabled")]
246    fn is_qos_enabled(&self) -> bool {
247        unsafe {
248            from_glib(ffi::gst_video_encoder_is_qos_enabled(
249                self.as_ref().to_glib_none().0,
250            ))
251        }
252    }
253
254    /// Sets the video encoder tags and how they should be merged with any
255    /// upstream stream tags. This will override any tags previously-set
256    /// with [`merge_tags()`][Self::merge_tags()].
257    ///
258    /// Note that this is provided for convenience, and the subclass is
259    /// not required to use this and can still do tag handling on its own.
260    ///
261    /// MT safe.
262    /// ## `tags`
263    /// a [`gst::TagList`][crate::gst::TagList] to merge, or NULL to unset
264    ///  previously-set tags
265    /// ## `mode`
266    /// the [`gst::TagMergeMode`][crate::gst::TagMergeMode] to use, usually [`gst::TagMergeMode::Replace`][crate::gst::TagMergeMode::Replace]
267    #[doc(alias = "gst_video_encoder_merge_tags")]
268    fn merge_tags(&self, tags: Option<&gst::TagList>, mode: gst::TagMergeMode) {
269        unsafe {
270            ffi::gst_video_encoder_merge_tags(
271                self.as_ref().to_glib_none().0,
272                tags.to_glib_none().0,
273                mode.into_glib(),
274            );
275        }
276    }
277
278    /// Returns caps that express `caps` (or sink template caps if `caps` == NULL)
279    /// restricted to resolution/format/... combinations supported by downstream
280    /// elements (e.g. muxers).
281    /// ## `caps`
282    /// initial caps
283    /// ## `filter`
284    /// filter caps
285    ///
286    /// # Returns
287    ///
288    /// a [`gst::Caps`][crate::gst::Caps] owned by caller
289    #[doc(alias = "gst_video_encoder_proxy_getcaps")]
290    fn proxy_getcaps(&self, caps: Option<&gst::Caps>, filter: Option<&gst::Caps>) -> gst::Caps {
291        unsafe {
292            from_glib_full(ffi::gst_video_encoder_proxy_getcaps(
293                self.as_ref().to_glib_none().0,
294                caps.to_glib_none().0,
295                filter.to_glib_none().0,
296            ))
297        }
298    }
299
300    /// Removes `frame` from list of pending frames and releases it, similar
301    /// to calling [`finish_frame()`][Self::finish_frame()] without a buffer attached
302    /// to the frame, but does not post a QoS message or do any additional
303    /// processing. Events from `frame` are moved to the pending events list.
304    /// ## `frame`
305    /// a [`VideoCodecFrame`][crate::VideoCodecFrame]
306    #[cfg(feature = "v1_26")]
307    #[cfg_attr(docsrs, doc(cfg(feature = "v1_26")))]
308    #[doc(alias = "gst_video_encoder_release_frame")]
309    fn release_frame(&self, frame: VideoCodecFrame) {
310        unsafe {
311            ffi::gst_video_encoder_release_frame(
312                self.as_ref().to_glib_none().0,
313                frame.into_glib_ptr(),
314            );
315        }
316    }
317
318    /// Sets the minimum interval for requesting keyframes based on force-keyunit
319    /// events. Setting this to 0 will allow to handle every event, setting this to
320    /// `GST_CLOCK_TIME_NONE` causes force-keyunit events to be ignored.
321    /// ## `interval`
322    /// minimum interval
323    #[cfg(feature = "v1_18")]
324    #[cfg_attr(docsrs, doc(cfg(feature = "v1_18")))]
325    #[doc(alias = "gst_video_encoder_set_min_force_key_unit_interval")]
326    #[doc(alias = "min-force-key-unit-interval")]
327    fn set_min_force_key_unit_interval(&self, interval: impl Into<Option<gst::ClockTime>>) {
328        unsafe {
329            ffi::gst_video_encoder_set_min_force_key_unit_interval(
330                self.as_ref().to_glib_none().0,
331                interval.into().into_glib(),
332            );
333        }
334    }
335
336    /// Request minimal value for PTS passed to handle_frame.
337    ///
338    /// For streams with reordered frames this can be used to ensure that there
339    /// is enough time to accommodate first DTS, which may be less than first PTS
340    /// ## `min_pts`
341    /// minimal PTS that will be passed to handle_frame
342    #[doc(alias = "gst_video_encoder_set_min_pts")]
343    fn set_min_pts(&self, min_pts: impl Into<Option<gst::ClockTime>>) {
344        unsafe {
345            ffi::gst_video_encoder_set_min_pts(
346                self.as_ref().to_glib_none().0,
347                min_pts.into().into_glib(),
348            );
349        }
350    }
351
352    /// Configures `self` to handle Quality-of-Service events from downstream.
353    /// ## `enabled`
354    /// the new qos value.
355    #[doc(alias = "gst_video_encoder_set_qos_enabled")]
356    fn set_qos_enabled(&self, enabled: bool) {
357        unsafe {
358            ffi::gst_video_encoder_set_qos_enabled(
359                self.as_ref().to_glib_none().0,
360                enabled.into_glib(),
361            );
362        }
363    }
364
365    fn is_qos(&self) -> bool {
366        ObjectExt::property(self.as_ref(), "qos")
367    }
368
369    fn set_qos(&self, qos: bool) {
370        ObjectExt::set_property(self.as_ref(), "qos", qos)
371    }
372
373    #[cfg(feature = "v1_18")]
374    #[cfg_attr(docsrs, doc(cfg(feature = "v1_18")))]
375    #[doc(alias = "min-force-key-unit-interval")]
376    fn connect_min_force_key_unit_interval_notify<F: Fn(&Self) + Send + Sync + 'static>(
377        &self,
378        f: F,
379    ) -> SignalHandlerId {
380        unsafe extern "C" fn notify_min_force_key_unit_interval_trampoline<
381            P: IsA<VideoEncoder>,
382            F: Fn(&P) + Send + Sync + 'static,
383        >(
384            this: *mut ffi::GstVideoEncoder,
385            _param_spec: glib::ffi::gpointer,
386            f: glib::ffi::gpointer,
387        ) {
388            let f: &F = &*(f as *const F);
389            f(VideoEncoder::from_glib_borrow(this).unsafe_cast_ref())
390        }
391        unsafe {
392            let f: Box_<F> = Box_::new(f);
393            connect_raw(
394                self.as_ptr() as *mut _,
395                c"notify::min-force-key-unit-interval".as_ptr() as *const _,
396                Some(std::mem::transmute::<*const (), unsafe extern "C" fn()>(
397                    notify_min_force_key_unit_interval_trampoline::<Self, F> as *const (),
398                )),
399                Box_::into_raw(f),
400            )
401        }
402    }
403
404    #[doc(alias = "qos")]
405    fn connect_qos_notify<F: Fn(&Self) + Send + Sync + 'static>(&self, f: F) -> SignalHandlerId {
406        unsafe extern "C" fn notify_qos_trampoline<
407            P: IsA<VideoEncoder>,
408            F: Fn(&P) + Send + Sync + 'static,
409        >(
410            this: *mut ffi::GstVideoEncoder,
411            _param_spec: glib::ffi::gpointer,
412            f: glib::ffi::gpointer,
413        ) {
414            let f: &F = &*(f as *const F);
415            f(VideoEncoder::from_glib_borrow(this).unsafe_cast_ref())
416        }
417        unsafe {
418            let f: Box_<F> = Box_::new(f);
419            connect_raw(
420                self.as_ptr() as *mut _,
421                c"notify::qos".as_ptr() as *const _,
422                Some(std::mem::transmute::<*const (), unsafe extern "C" fn()>(
423                    notify_qos_trampoline::<Self, F> as *const (),
424                )),
425                Box_::into_raw(f),
426            )
427        }
428    }
429}
430
431impl<O: IsA<VideoEncoder>> VideoEncoderExt for O {}