gstreamer_video/auto/video_encoder.rs
1// This file was generated by gir (https://github.com/gtk-rs/gir)
2// from gir-files (https://github.com/gtk-rs/gir-files)
3// from gst-gir-files (https://gitlab.freedesktop.org/gstreamer/gir-files-rs.git)
4// DO NOT EDIT
5
6use crate::{ffi, VideoCodecFrame};
7use glib::{
8 prelude::*,
9 signal::{connect_raw, SignalHandlerId},
10 translate::*,
11};
12use std::boxed::Box as Box_;
13
14glib::wrapper! {
15 /// This base class is for video encoders turning raw video into
16 /// encoded video data.
17 ///
18 /// GstVideoEncoder and subclass should cooperate as follows.
19 ///
20 /// ## Configuration
21 ///
22 /// * Initially, GstVideoEncoder calls `start` when the encoder element
23 /// is activated, which allows subclass to perform any global setup.
24 /// * GstVideoEncoder calls `set_format` to inform subclass of the format
25 /// of input video data that it is about to receive. Subclass should
26 /// setup for encoding and configure base class as appropriate
27 /// (e.g. latency). While unlikely, it might be called more than once,
28 /// if changing input parameters require reconfiguration. Baseclass
29 /// will ensure that processing of current configuration is finished.
30 /// * GstVideoEncoder calls `stop` at end of all processing.
31 ///
32 /// ## Data processing
33 ///
34 /// * Base class collects input data and metadata into a frame and hands
35 /// this to subclass' `handle_frame`.
36 ///
37 /// * If codec processing results in encoded data, subclass should call
38 /// [`VideoEncoderExt::finish_frame()`][crate::prelude::VideoEncoderExt::finish_frame()] to have encoded data pushed
39 /// downstream.
40 ///
41 /// * If implemented, baseclass calls subclass `pre_push` just prior to
42 /// pushing to allow subclasses to modify some metadata on the buffer.
43 /// If it returns GST_FLOW_OK, the buffer is pushed downstream.
44 ///
45 /// * GstVideoEncoderClass will handle both srcpad and sinkpad events.
46 /// Sink events will be passed to subclass if `event` callback has been
47 /// provided.
48 ///
49 /// ## Shutdown phase
50 ///
51 /// * GstVideoEncoder class calls `stop` to inform the subclass that data
52 /// parsing will be stopped.
53 ///
54 /// Subclass is responsible for providing pad template caps for
55 /// source and sink pads. The pads need to be named "sink" and "src". It should
56 /// also be able to provide fixed src pad caps in `getcaps` by the time it calls
57 /// [`VideoEncoderExt::finish_frame()`][crate::prelude::VideoEncoderExt::finish_frame()].
58 ///
59 /// Things that subclass need to take care of:
60 ///
61 /// * Provide pad templates
62 /// * Provide source pad caps before pushing the first buffer
63 /// * Accept data in `handle_frame` and provide encoded results to
64 /// [`VideoEncoderExt::finish_frame()`][crate::prelude::VideoEncoderExt::finish_frame()].
65 ///
66 ///
67 /// The [`qos`][struct@crate::VideoEncoder#qos] property will enable the Quality-of-Service
68 /// features of the encoder which gather statistics about the real-time
69 /// performance of the downstream elements. If enabled, subclasses can
70 /// use [`VideoEncoderExt::max_encode_time()`][crate::prelude::VideoEncoderExt::max_encode_time()] to check if input frames
71 /// are already late and drop them right away to give a chance to the
72 /// pipeline to catch up.
73 ///
74 /// This is an Abstract Base Class, you cannot instantiate it.
75 ///
76 /// ## Properties
77 ///
78 ///
79 /// #### `min-force-key-unit-interval`
80 /// Minimum interval between force-keyunit requests in nanoseconds. See
81 /// [`VideoEncoderExt::set_min_force_key_unit_interval()`][crate::prelude::VideoEncoderExt::set_min_force_key_unit_interval()] for more details.
82 ///
83 /// Readable | Writeable
84 ///
85 ///
86 /// #### `qos`
87 /// Readable | Writeable
88 /// <details><summary><h4>Object</h4></summary>
89 ///
90 ///
91 /// #### `name`
92 /// Readable | Writeable | Construct
93 ///
94 ///
95 /// #### `parent`
96 /// The parent of the object. Please note, that when changing the 'parent'
97 /// property, we don't emit [`notify`][struct@crate::glib::Object#notify] and [`deep-notify`][struct@crate::gst::Object#deep-notify]
98 /// signals due to locking issues. In some cases one can use
99 /// `GstBin::element-added` or `GstBin::element-removed` signals on the parent to
100 /// achieve a similar effect.
101 ///
102 /// Readable | Writeable
103 /// </details>
104 ///
105 /// # Implements
106 ///
107 /// [`VideoEncoderExt`][trait@crate::prelude::VideoEncoderExt], [`trait@gst::prelude::ElementExt`], [`trait@gst::prelude::ObjectExt`], [`trait@glib::ObjectExt`], [`VideoEncoderExtManual`][trait@crate::prelude::VideoEncoderExtManual]
108 #[doc(alias = "GstVideoEncoder")]
109 pub struct VideoEncoder(Object<ffi::GstVideoEncoder, ffi::GstVideoEncoderClass>) @extends gst::Element, gst::Object;
110
111 match fn {
112 type_ => || ffi::gst_video_encoder_get_type(),
113 }
114}
115
116impl VideoEncoder {
117 pub const NONE: Option<&'static VideoEncoder> = None;
118}
119
120unsafe impl Send for VideoEncoder {}
121unsafe impl Sync for VideoEncoder {}
122
123mod sealed {
124 pub trait Sealed {}
125 impl<T: super::IsA<super::VideoEncoder>> Sealed for T {}
126}
127
128/// Trait containing all [`struct@VideoEncoder`] methods.
129///
130/// # Implementors
131///
132/// [`VideoEncoder`][struct@crate::VideoEncoder]
133pub trait VideoEncoderExt: IsA<VideoEncoder> + sealed::Sealed + 'static {
134 /// Helper function that allocates a buffer to hold an encoded video frame
135 /// for `self`'s current [`VideoCodecState`][crate::VideoCodecState].
136 /// ## `size`
137 /// size of the buffer
138 ///
139 /// # Returns
140 ///
141 /// allocated buffer
142 #[doc(alias = "gst_video_encoder_allocate_output_buffer")]
143 fn allocate_output_buffer(&self, size: usize) -> gst::Buffer {
144 unsafe {
145 from_glib_full(ffi::gst_video_encoder_allocate_output_buffer(
146 self.as_ref().to_glib_none().0,
147 size,
148 ))
149 }
150 }
151
152 /// Removes `frame` from the list of pending frames, releases it
153 /// and posts a QoS message with the frame's details on the bus.
154 /// Similar to calling [`finish_frame()`][Self::finish_frame()] without a buffer
155 /// attached to `frame`, but this function additionally stores events
156 /// from `frame` as pending, to be pushed out alongside the next frame
157 /// submitted via [`finish_frame()`][Self::finish_frame()].
158 /// ## `frame`
159 /// a [`VideoCodecFrame`][crate::VideoCodecFrame]
160 #[cfg(feature = "v1_26")]
161 #[cfg_attr(docsrs, doc(cfg(feature = "v1_26")))]
162 #[doc(alias = "gst_video_encoder_drop_frame")]
163 fn drop_frame(&self, frame: VideoCodecFrame) {
164 unsafe {
165 ffi::gst_video_encoder_drop_frame(
166 self.as_ref().to_glib_none().0,
167 frame.into_glib_ptr(),
168 );
169 }
170 }
171
172 /// `frame` must have a valid encoded data buffer, whose metadata fields
173 /// are then appropriately set according to frame data or no buffer at
174 /// all if the frame should be dropped.
175 /// It is subsequently pushed downstream or provided to `pre_push`.
176 /// In any case, the frame is considered finished and released.
177 ///
178 /// If `frame` does not have a buffer attached, it will be dropped, and
179 /// a QoS message will be posted on the bus. Events from `frame` will be
180 /// pushed out immediately.
181 ///
182 /// After calling this function the output buffer of the frame is to be
183 /// considered read-only. This function will also change the metadata
184 /// of the buffer.
185 /// ## `frame`
186 /// an encoded [`VideoCodecFrame`][crate::VideoCodecFrame]
187 ///
188 /// # Returns
189 ///
190 /// a [`gst::FlowReturn`][crate::gst::FlowReturn] resulting from sending data downstream
191 #[doc(alias = "gst_video_encoder_finish_frame")]
192 fn finish_frame(&self, frame: VideoCodecFrame) -> Result<gst::FlowSuccess, gst::FlowError> {
193 unsafe {
194 try_from_glib(ffi::gst_video_encoder_finish_frame(
195 self.as_ref().to_glib_none().0,
196 frame.into_glib_ptr(),
197 ))
198 }
199 }
200
201 /// Determines maximum possible encoding time for `frame` that will
202 /// allow it to encode and arrive in time (as determined by QoS events).
203 /// In particular, a negative result means encoding in time is no longer possible
204 /// and should therefore occur as soon/skippy as possible.
205 ///
206 /// If no QoS events have been received from downstream, or if
207 /// [`qos`][struct@crate::VideoEncoder#qos] is disabled this function returns `G_MAXINT64`.
208 /// ## `frame`
209 /// a [`VideoCodecFrame`][crate::VideoCodecFrame]
210 ///
211 /// # Returns
212 ///
213 /// max decoding time.
214 #[doc(alias = "gst_video_encoder_get_max_encode_time")]
215 #[doc(alias = "get_max_encode_time")]
216 fn max_encode_time(&self, frame: &VideoCodecFrame) -> gst::ClockTimeDiff {
217 unsafe {
218 ffi::gst_video_encoder_get_max_encode_time(
219 self.as_ref().to_glib_none().0,
220 frame.to_glib_none().0,
221 )
222 }
223 }
224
225 /// Returns the minimum force-keyunit interval, see [`set_min_force_key_unit_interval()`][Self::set_min_force_key_unit_interval()]
226 /// for more details.
227 ///
228 /// # Returns
229 ///
230 /// the minimum force-keyunit interval
231 #[cfg(feature = "v1_18")]
232 #[cfg_attr(docsrs, doc(cfg(feature = "v1_18")))]
233 #[doc(alias = "gst_video_encoder_get_min_force_key_unit_interval")]
234 #[doc(alias = "get_min_force_key_unit_interval")]
235 #[doc(alias = "min-force-key-unit-interval")]
236 fn min_force_key_unit_interval(&self) -> Option<gst::ClockTime> {
237 unsafe {
238 from_glib(ffi::gst_video_encoder_get_min_force_key_unit_interval(
239 self.as_ref().to_glib_none().0,
240 ))
241 }
242 }
243
244 /// Checks if `self` is currently configured to handle Quality-of-Service
245 /// events from downstream.
246 ///
247 /// # Returns
248 ///
249 /// [`true`] if the encoder is configured to perform Quality-of-Service.
250 #[doc(alias = "gst_video_encoder_is_qos_enabled")]
251 fn is_qos_enabled(&self) -> bool {
252 unsafe {
253 from_glib(ffi::gst_video_encoder_is_qos_enabled(
254 self.as_ref().to_glib_none().0,
255 ))
256 }
257 }
258
259 /// Sets the video encoder tags and how they should be merged with any
260 /// upstream stream tags. This will override any tags previously-set
261 /// with [`merge_tags()`][Self::merge_tags()].
262 ///
263 /// Note that this is provided for convenience, and the subclass is
264 /// not required to use this and can still do tag handling on its own.
265 ///
266 /// MT safe.
267 /// ## `tags`
268 /// a [`gst::TagList`][crate::gst::TagList] to merge, or NULL to unset
269 /// previously-set tags
270 /// ## `mode`
271 /// the [`gst::TagMergeMode`][crate::gst::TagMergeMode] to use, usually [`gst::TagMergeMode::Replace`][crate::gst::TagMergeMode::Replace]
272 #[doc(alias = "gst_video_encoder_merge_tags")]
273 fn merge_tags(&self, tags: Option<&gst::TagList>, mode: gst::TagMergeMode) {
274 unsafe {
275 ffi::gst_video_encoder_merge_tags(
276 self.as_ref().to_glib_none().0,
277 tags.to_glib_none().0,
278 mode.into_glib(),
279 );
280 }
281 }
282
283 /// Returns caps that express `caps` (or sink template caps if `caps` == NULL)
284 /// restricted to resolution/format/... combinations supported by downstream
285 /// elements (e.g. muxers).
286 /// ## `caps`
287 /// initial caps
288 /// ## `filter`
289 /// filter caps
290 ///
291 /// # Returns
292 ///
293 /// a [`gst::Caps`][crate::gst::Caps] owned by caller
294 #[doc(alias = "gst_video_encoder_proxy_getcaps")]
295 fn proxy_getcaps(&self, caps: Option<&gst::Caps>, filter: Option<&gst::Caps>) -> gst::Caps {
296 unsafe {
297 from_glib_full(ffi::gst_video_encoder_proxy_getcaps(
298 self.as_ref().to_glib_none().0,
299 caps.to_glib_none().0,
300 filter.to_glib_none().0,
301 ))
302 }
303 }
304
305 /// Removes `frame` from list of pending frames and releases it, similar
306 /// to calling [`finish_frame()`][Self::finish_frame()] without a buffer attached
307 /// to the frame, but does not post a QoS message or do any additional
308 /// processing. Events from `frame` are moved to the pending events list.
309 /// ## `frame`
310 /// a [`VideoCodecFrame`][crate::VideoCodecFrame]
311 #[cfg(feature = "v1_26")]
312 #[cfg_attr(docsrs, doc(cfg(feature = "v1_26")))]
313 #[doc(alias = "gst_video_encoder_release_frame")]
314 fn release_frame(&self, frame: VideoCodecFrame) {
315 unsafe {
316 ffi::gst_video_encoder_release_frame(
317 self.as_ref().to_glib_none().0,
318 frame.into_glib_ptr(),
319 );
320 }
321 }
322
323 /// Sets the minimum interval for requesting keyframes based on force-keyunit
324 /// events. Setting this to 0 will allow to handle every event, setting this to
325 /// `GST_CLOCK_TIME_NONE` causes force-keyunit events to be ignored.
326 /// ## `interval`
327 /// minimum interval
328 #[cfg(feature = "v1_18")]
329 #[cfg_attr(docsrs, doc(cfg(feature = "v1_18")))]
330 #[doc(alias = "gst_video_encoder_set_min_force_key_unit_interval")]
331 #[doc(alias = "min-force-key-unit-interval")]
332 fn set_min_force_key_unit_interval(&self, interval: impl Into<Option<gst::ClockTime>>) {
333 unsafe {
334 ffi::gst_video_encoder_set_min_force_key_unit_interval(
335 self.as_ref().to_glib_none().0,
336 interval.into().into_glib(),
337 );
338 }
339 }
340
341 /// Request minimal value for PTS passed to handle_frame.
342 ///
343 /// For streams with reordered frames this can be used to ensure that there
344 /// is enough time to accommodate first DTS, which may be less than first PTS
345 /// ## `min_pts`
346 /// minimal PTS that will be passed to handle_frame
347 #[doc(alias = "gst_video_encoder_set_min_pts")]
348 fn set_min_pts(&self, min_pts: impl Into<Option<gst::ClockTime>>) {
349 unsafe {
350 ffi::gst_video_encoder_set_min_pts(
351 self.as_ref().to_glib_none().0,
352 min_pts.into().into_glib(),
353 );
354 }
355 }
356
357 /// Configures `self` to handle Quality-of-Service events from downstream.
358 /// ## `enabled`
359 /// the new qos value.
360 #[doc(alias = "gst_video_encoder_set_qos_enabled")]
361 fn set_qos_enabled(&self, enabled: bool) {
362 unsafe {
363 ffi::gst_video_encoder_set_qos_enabled(
364 self.as_ref().to_glib_none().0,
365 enabled.into_glib(),
366 );
367 }
368 }
369
370 fn is_qos(&self) -> bool {
371 ObjectExt::property(self.as_ref(), "qos")
372 }
373
374 fn set_qos(&self, qos: bool) {
375 ObjectExt::set_property(self.as_ref(), "qos", qos)
376 }
377
378 #[cfg(feature = "v1_18")]
379 #[cfg_attr(docsrs, doc(cfg(feature = "v1_18")))]
380 #[doc(alias = "min-force-key-unit-interval")]
381 fn connect_min_force_key_unit_interval_notify<F: Fn(&Self) + Send + Sync + 'static>(
382 &self,
383 f: F,
384 ) -> SignalHandlerId {
385 unsafe extern "C" fn notify_min_force_key_unit_interval_trampoline<
386 P: IsA<VideoEncoder>,
387 F: Fn(&P) + Send + Sync + 'static,
388 >(
389 this: *mut ffi::GstVideoEncoder,
390 _param_spec: glib::ffi::gpointer,
391 f: glib::ffi::gpointer,
392 ) {
393 let f: &F = &*(f as *const F);
394 f(VideoEncoder::from_glib_borrow(this).unsafe_cast_ref())
395 }
396 unsafe {
397 let f: Box_<F> = Box_::new(f);
398 connect_raw(
399 self.as_ptr() as *mut _,
400 b"notify::min-force-key-unit-interval\0".as_ptr() as *const _,
401 Some(std::mem::transmute::<*const (), unsafe extern "C" fn()>(
402 notify_min_force_key_unit_interval_trampoline::<Self, F> as *const (),
403 )),
404 Box_::into_raw(f),
405 )
406 }
407 }
408
409 #[doc(alias = "qos")]
410 fn connect_qos_notify<F: Fn(&Self) + Send + Sync + 'static>(&self, f: F) -> SignalHandlerId {
411 unsafe extern "C" fn notify_qos_trampoline<
412 P: IsA<VideoEncoder>,
413 F: Fn(&P) + Send + Sync + 'static,
414 >(
415 this: *mut ffi::GstVideoEncoder,
416 _param_spec: glib::ffi::gpointer,
417 f: glib::ffi::gpointer,
418 ) {
419 let f: &F = &*(f as *const F);
420 f(VideoEncoder::from_glib_borrow(this).unsafe_cast_ref())
421 }
422 unsafe {
423 let f: Box_<F> = Box_::new(f);
424 connect_raw(
425 self.as_ptr() as *mut _,
426 b"notify::qos\0".as_ptr() as *const _,
427 Some(std::mem::transmute::<*const (), unsafe extern "C" fn()>(
428 notify_qos_trampoline::<Self, F> as *const (),
429 )),
430 Box_::into_raw(f),
431 )
432 }
433 }
434}
435
436impl<O: IsA<VideoEncoder>> VideoEncoderExt for O {}