Skip to main content

gstreamer_video/
functions.rs

1// Take a look at the license at the top of the repository in the LICENSE file.
2
3use std::{mem, ptr};
4
5use crate::ffi;
6use glib::translate::{IntoGlib, ToGlibPtr, from_glib, from_glib_full};
7
8#[doc(alias = "gst_video_convert_sample")]
9pub fn convert_sample(
10    sample: &gst::Sample,
11    caps: &gst::Caps,
12    timeout: Option<gst::ClockTime>,
13) -> Result<gst::Sample, glib::Error> {
14    skip_assert_initialized!();
15    unsafe {
16        let mut error = ptr::null_mut();
17        let ret = ffi::gst_video_convert_sample(
18            sample.to_glib_none().0,
19            caps.to_glib_none().0,
20            timeout.into_glib(),
21            &mut error,
22        );
23
24        if error.is_null() {
25            Ok(from_glib_full(ret))
26        } else {
27            Err(from_glib_full(error))
28        }
29    }
30}
31
32pub fn convert_sample_async<F>(
33    sample: &gst::Sample,
34    caps: &gst::Caps,
35    timeout: Option<gst::ClockTime>,
36    func: F,
37) where
38    F: FnOnce(Result<gst::Sample, glib::Error>) + Send + 'static,
39{
40    skip_assert_initialized!();
41    unsafe { convert_sample_async_unsafe(sample, caps, timeout, func) }
42}
43
44pub fn convert_sample_async_local<F>(
45    sample: &gst::Sample,
46    caps: &gst::Caps,
47    timeout: Option<gst::ClockTime>,
48    func: F,
49) where
50    F: FnOnce(Result<gst::Sample, glib::Error>) + 'static,
51{
52    skip_assert_initialized!();
53    unsafe {
54        let ctx = glib::MainContext::ref_thread_default();
55        let _acquire = ctx
56            .acquire()
57            .expect("thread default main context already acquired by another thread");
58
59        let func = glib::thread_guard::ThreadGuard::new(func);
60
61        convert_sample_async_unsafe(sample, caps, timeout, move |res| (func.into_inner())(res))
62    }
63}
64
65unsafe fn convert_sample_async_unsafe<F>(
66    sample: &gst::Sample,
67    caps: &gst::Caps,
68    timeout: Option<gst::ClockTime>,
69    func: F,
70) where
71    F: FnOnce(Result<gst::Sample, glib::Error>) + 'static,
72{
73    unsafe {
74        unsafe extern "C" fn convert_sample_async_trampoline<F>(
75            sample: *mut gst::ffi::GstSample,
76            error: *mut glib::ffi::GError,
77            user_data: glib::ffi::gpointer,
78        ) where
79            F: FnOnce(Result<gst::Sample, glib::Error>) + 'static,
80        {
81            unsafe {
82                let callback: &mut Option<F> = &mut *(user_data as *mut Option<F>);
83                let callback = callback.take().unwrap();
84
85                if error.is_null() {
86                    callback(Ok(from_glib_full(sample)))
87                } else {
88                    callback(Err(from_glib_full(error)))
89                }
90            }
91        }
92        unsafe extern "C" fn convert_sample_async_free<F>(user_data: glib::ffi::gpointer)
93        where
94            F: FnOnce(Result<gst::Sample, glib::Error>) + 'static,
95        {
96            unsafe {
97                let _: Box<Option<F>> = Box::from_raw(user_data as *mut _);
98            }
99        }
100
101        let user_data: Box<Option<F>> = Box::new(Some(func));
102
103        ffi::gst_video_convert_sample_async(
104            sample.to_glib_none().0,
105            caps.to_glib_none().0,
106            timeout.into_glib(),
107            Some(convert_sample_async_trampoline::<F>),
108            Box::into_raw(user_data) as glib::ffi::gpointer,
109            Some(convert_sample_async_free::<F>),
110        );
111    }
112}
113
114pub fn convert_sample_future(
115    sample: &gst::Sample,
116    caps: &gst::Caps,
117    timeout: Option<gst::ClockTime>,
118) -> std::pin::Pin<Box<dyn std::future::Future<Output = Result<gst::Sample, glib::Error>> + 'static>>
119{
120    skip_assert_initialized!();
121
122    use futures_channel::oneshot;
123
124    let (sender, receiver) = oneshot::channel();
125
126    let sample = sample.clone();
127    let caps = caps.clone();
128    let future = async move {
129        assert!(
130            glib::MainContext::ref_thread_default().is_owner(),
131            "Spawning futures only allowed if the thread is owning the MainContext"
132        );
133
134        convert_sample_async(&sample, &caps, timeout, move |res| {
135            let _ = sender.send(res);
136        });
137
138        receiver
139            .await
140            .expect("Sender dropped before callback was called")
141    };
142
143    Box::pin(future)
144}
145
146#[doc(alias = "gst_video_calculate_display_ratio")]
147pub fn calculate_display_ratio(
148    video_width: u32,
149    video_height: u32,
150    video_par: gst::Fraction,
151    display_par: gst::Fraction,
152) -> Option<gst::Fraction> {
153    skip_assert_initialized!();
154
155    unsafe {
156        let mut dar_n = mem::MaybeUninit::uninit();
157        let mut dar_d = mem::MaybeUninit::uninit();
158
159        let res: bool = from_glib(ffi::gst_video_calculate_display_ratio(
160            dar_n.as_mut_ptr(),
161            dar_d.as_mut_ptr(),
162            video_width,
163            video_height,
164            video_par.numer() as u32,
165            video_par.denom() as u32,
166            display_par.numer() as u32,
167            display_par.denom() as u32,
168        ));
169        if res {
170            Some(gst::Fraction::new(
171                dar_n.assume_init() as i32,
172                dar_d.assume_init() as i32,
173            ))
174        } else {
175            None
176        }
177    }
178}
179
180#[doc(alias = "gst_video_guess_framerate")]
181pub fn guess_framerate(duration: gst::ClockTime) -> Option<gst::Fraction> {
182    skip_assert_initialized!();
183
184    unsafe {
185        let mut dest_n = mem::MaybeUninit::uninit();
186        let mut dest_d = mem::MaybeUninit::uninit();
187        let res: bool = from_glib(ffi::gst_video_guess_framerate(
188            duration.into_glib(),
189            dest_n.as_mut_ptr(),
190            dest_d.as_mut_ptr(),
191        ));
192        if res {
193            Some(gst::Fraction::new(
194                dest_n.assume_init(),
195                dest_d.assume_init(),
196            ))
197        } else {
198            None
199        }
200    }
201}
202
203#[cfg(feature = "v1_22")]
204#[cfg_attr(docsrs, doc(cfg(feature = "v1_22")))]
205#[doc(alias = "gst_video_is_common_aspect_ratio")]
206pub fn is_common_aspect_ratio(width: u32, height: u32, par: gst::Fraction) -> bool {
207    skip_assert_initialized!();
208
209    unsafe {
210        from_glib(ffi::gst_video_is_common_aspect_ratio(
211            width as i32,
212            height as i32,
213            par.numer(),
214            par.denom(),
215        ))
216    }
217}
218
219pub fn video_make_raw_caps(
220    formats: &[crate::VideoFormat],
221) -> crate::VideoCapsBuilder<gst::caps::NoFeature> {
222    skip_assert_initialized!();
223
224    let formats = formats.iter().copied().map(|f| match f {
225        crate::VideoFormat::Encoded => panic!("Invalid encoded format"),
226        crate::VideoFormat::Unknown => panic!("Invalid unknown format"),
227        _ => f,
228    });
229
230    crate::VideoCapsBuilder::new().format_list(formats)
231}
232
233#[cfg(test)]
234mod tests {
235    use std::sync::{Arc, Mutex};
236
237    use super::*;
238
239    #[test]
240    fn test_convert_sample_async() {
241        gst::init().unwrap();
242
243        let l = glib::MainLoop::new(None, false);
244
245        let mut in_buffer = gst::Buffer::with_size(320 * 240 * 4).unwrap();
246        {
247            let buffer = in_buffer.get_mut().unwrap();
248            let mut data = buffer.map_writable().unwrap();
249
250            for p in data.as_mut_slice().chunks_mut(4) {
251                p[0] = 63;
252                p[1] = 127;
253                p[2] = 191;
254                p[3] = 255;
255            }
256        }
257        let in_caps = crate::VideoInfo::builder(crate::VideoFormat::Rgba, 320, 240)
258            .build()
259            .unwrap()
260            .to_caps()
261            .unwrap();
262        let sample = gst::Sample::builder()
263            .buffer(&in_buffer)
264            .caps(&in_caps)
265            .build();
266
267        let out_caps = crate::VideoInfo::builder(crate::VideoFormat::Abgr, 320, 240)
268            .build()
269            .unwrap()
270            .to_caps()
271            .unwrap();
272
273        let l_clone = l.clone();
274        let res_store = Arc::new(Mutex::new(None));
275        let res_store_clone = res_store.clone();
276        convert_sample_async(&sample, &out_caps, gst::ClockTime::NONE, move |res| {
277            *res_store_clone.lock().unwrap() = Some(res);
278            l_clone.quit();
279        });
280
281        l.run();
282
283        let res = res_store.lock().unwrap().take().unwrap();
284        let res = res.unwrap();
285
286        let converted_out_caps = res.caps().unwrap();
287        assert_eq!(out_caps.as_ref(), converted_out_caps);
288        let out_buffer = res.buffer().unwrap();
289        {
290            let data = out_buffer.map_readable().unwrap();
291
292            for p in data.as_slice().chunks(4) {
293                assert_eq!(p, &[255, 191, 127, 63]);
294            }
295        }
296    }
297
298    #[test]
299    fn video_caps() {
300        gst::init().unwrap();
301
302        let caps =
303            video_make_raw_caps(&[crate::VideoFormat::Nv12, crate::VideoFormat::Nv16]).build();
304        assert_eq!(
305            caps.to_string(),
306            "video/x-raw, format=(string){ NV12, NV16 }, width=(int)[ 1, 2147483647 ], height=(int)[ 1, 2147483647 ], framerate=(fraction)[ 0/1, 2147483647/1 ]"
307        );
308
309        #[cfg(feature = "v1_18")]
310        {
311            /* video_make_raw_caps() is a re-implementation so ensure it returns the same caps as the C API */
312            let c_caps = unsafe {
313                let formats: Vec<ffi::GstVideoFormat> =
314                    [crate::VideoFormat::Nv12, crate::VideoFormat::Nv16]
315                        .iter()
316                        .map(|f| f.into_glib())
317                        .collect();
318                let caps = ffi::gst_video_make_raw_caps(formats.as_ptr(), formats.len() as u32);
319                gst::Caps::from_glib_full(caps)
320            };
321            assert_eq!(caps, c_caps);
322        }
323
324        let caps = video_make_raw_caps(&[crate::VideoFormat::Nv12, crate::VideoFormat::Nv16])
325            .width(800)
326            .height(600)
327            .framerate((30, 1).into())
328            .build();
329        assert_eq!(
330            caps.to_string(),
331            "video/x-raw, format=(string){ NV12, NV16 }, width=(int)800, height=(int)600, framerate=(fraction)30/1"
332        );
333    }
334
335    #[test]
336    #[should_panic(expected = "Invalid encoded format")]
337    fn video_caps_encoded() {
338        gst::init().unwrap();
339        let _caps = video_make_raw_caps(&[crate::VideoFormat::Encoded]);
340    }
341
342    #[test]
343    #[should_panic(expected = "Invalid unknown format")]
344    fn video_caps_unknown() {
345        gst::init().unwrap();
346        let _caps = video_make_raw_caps(&[crate::VideoFormat::Unknown]);
347    }
348}