gstreamer/subclass/
allocator.rs

1// Take a look at the license at the top of the repository in the LICENSE file.
2
3use std::ptr;
4
5use glib::{prelude::*, subclass::prelude::*, translate::*};
6
7use super::prelude::*;
8use crate::{ffi, AllocationParams, Allocator};
9
10pub unsafe trait AllocatorImpl:
11    GstObjectImpl + ObjectSubclass<Type: IsA<Allocator>>
12{
13    /// Use `self` to allocate a new memory block with memory that is at least
14    /// `size` big.
15    ///
16    /// The optional `params` can specify the prefix and padding for the memory. If
17    /// [`None`] is passed, no flags, no extra prefix/padding and a default alignment is
18    /// used.
19    ///
20    /// The prefix/padding will be filled with 0 if flags contains
21    /// [`MemoryFlags::ZERO_PREFIXED`][crate::MemoryFlags::ZERO_PREFIXED] and [`MemoryFlags::ZERO_PADDED`][crate::MemoryFlags::ZERO_PADDED] respectively.
22    ///
23    /// When `self` is [`None`], the default allocator will be used.
24    ///
25    /// The alignment in `params` is given as a bitmask so that `align` + 1 equals
26    /// the amount of bytes to align to. For example, to align to 8 bytes,
27    /// use an alignment of 7.
28    /// ## `size`
29    /// size of the visible memory area
30    /// ## `params`
31    /// optional parameters
32    ///
33    /// # Returns
34    ///
35    /// a new [`Memory`][crate::Memory].
36    unsafe fn alloc(&self, size: usize, params: &AllocationParams) -> *mut ffi::GstMemory {
37        self.parent_alloc(size, params)
38    }
39
40    unsafe fn free(&self, memory: *mut ffi::GstMemory) {
41        unsafe { self.parent_free(memory) }
42    }
43}
44
45pub trait AllocatorImplExt: AllocatorImpl {
46    unsafe fn parent_alloc(&self, size: usize, params: &AllocationParams) -> *mut ffi::GstMemory {
47        unsafe {
48            let data = Self::type_data();
49            let parent_class = data.as_ref().parent_class() as *mut ffi::GstAllocatorClass;
50
51            if let Some(f) = (*parent_class).alloc {
52                f(
53                    self.obj().unsafe_cast_ref::<Allocator>().to_glib_none().0,
54                    size,
55                    mut_override(params.to_glib_none().0),
56                )
57            } else {
58                ptr::null_mut()
59            }
60        }
61    }
62
63    unsafe fn parent_free(&self, memory: *mut ffi::GstMemory) {
64        unsafe {
65            let data = Self::type_data();
66            let parent_class = data.as_ref().parent_class() as *mut ffi::GstAllocatorClass;
67
68            if let Some(f) = (*parent_class).free {
69                f(
70                    self.obj().unsafe_cast_ref::<Allocator>().to_glib_none().0,
71                    memory,
72                )
73            }
74        }
75    }
76}
77
78impl<T: AllocatorImpl> AllocatorImplExt for T {}
79
80unsafe impl<T: AllocatorImpl> IsSubclassable<T> for Allocator {
81    fn class_init(klass: &mut glib::Class<Self>) {
82        Self::parent_class_init::<T>(klass);
83        let klass = klass.as_mut();
84        klass.alloc = Some(alloc::<T>);
85        klass.free = Some(free::<T>);
86    }
87}
88
89unsafe extern "C" fn alloc<T: AllocatorImpl>(
90    ptr: *mut ffi::GstAllocator,
91    size: usize,
92    params: *mut ffi::GstAllocationParams,
93) -> *mut ffi::GstMemory {
94    let instance = &*(ptr as *mut T::Instance);
95    let imp = instance.imp();
96
97    let params = &*(params as *mut AllocationParams);
98
99    imp.alloc(size, params)
100}
101
102unsafe extern "C" fn free<T: AllocatorImpl>(
103    ptr: *mut ffi::GstAllocator,
104    memory: *mut ffi::GstMemory,
105) {
106    debug_assert_eq!((*memory).mini_object.refcount, 0);
107
108    let instance = &*(ptr as *mut T::Instance);
109    let imp = instance.imp();
110
111    imp.free(memory);
112}
113
114#[cfg(test)]
115mod tests {
116    use super::*;
117    use crate::prelude::*;
118
119    // The test allocator below is basically replicating GStreamer's default
120    // sysmem allocator except that the memory allocation is separate from the
121    // memory struct for clarity.
122
123    pub mod imp {
124        use glib::translate::*;
125        use std::alloc;
126
127        use super::*;
128
129        #[repr(C)]
130        struct Memory {
131            mem: ffi::GstMemory,
132            layout: alloc::Layout,
133            data: *mut u8,
134        }
135
136        const LAYOUT: alloc::Layout = alloc::Layout::new::<Memory>();
137
138        #[derive(Default)]
139        pub struct TestAllocator;
140
141        impl ObjectImpl for TestAllocator {}
142        impl GstObjectImpl for TestAllocator {}
143        unsafe impl AllocatorImpl for TestAllocator {
144            unsafe fn alloc(&self, size: usize, params: &AllocationParams) -> *mut ffi::GstMemory {
145                unsafe {
146                    let Some(maxsize) = size
147                        .checked_add(params.prefix())
148                        .and_then(|s| s.checked_add(params.padding()))
149                    else {
150                        return ptr::null_mut();
151                    };
152
153                    let align = params.align() | crate::Memory::default_alignment();
154                    let Ok(layout) = alloc::Layout::from_size_align(maxsize, align + 1) else {
155                        return ptr::null_mut();
156                    };
157
158                    let mem = alloc::alloc(LAYOUT) as *mut Memory;
159
160                    let data = alloc::alloc(layout);
161
162                    if params.prefix() > 0
163                        && params.flags().contains(crate::MemoryFlags::ZERO_PREFIXED)
164                    {
165                        ptr::write_bytes(data, 0, params.prefix());
166                    }
167
168                    if params.flags().contains(crate::MemoryFlags::ZERO_PADDED) {
169                        ptr::write_bytes(data.add(params.prefix()).add(size), 0, params.padding());
170                    }
171
172                    ffi::gst_memory_init(
173                        ptr::addr_of_mut!((*mem).mem),
174                        params.flags().into_glib(),
175                        self.obj().as_ptr() as *mut ffi::GstAllocator,
176                        ptr::null_mut(),
177                        maxsize,
178                        params.align(),
179                        params.prefix(),
180                        size,
181                    );
182                    ptr::write(ptr::addr_of_mut!((*mem).layout), layout);
183                    ptr::write(ptr::addr_of_mut!((*mem).data), data);
184
185                    mem as *mut ffi::GstMemory
186                }
187            }
188
189            unsafe fn free(&self, mem: *mut ffi::GstMemory) {
190                unsafe {
191                    let mem = mem as *mut Memory;
192
193                    if (*mem).mem.parent.is_null() {
194                        alloc::dealloc((*mem).data, (*mem).layout);
195                        ptr::drop_in_place(ptr::addr_of_mut!((*mem).layout));
196                    }
197                    alloc::dealloc(mem as *mut u8, LAYOUT);
198                }
199            }
200        }
201
202        #[glib::object_subclass]
203        impl ObjectSubclass for TestAllocator {
204            const NAME: &'static str = "TestAllocator";
205            type Type = super::TestAllocator;
206            type ParentType = Allocator;
207
208            fn instance_init(obj: &glib::subclass::InitializingObject<Self>) {
209                static ALLOCATOR_TYPE: &[u8] = b"TestAllocatorMemory\0";
210
211                unsafe {
212                    let allocator = obj.as_ptr() as *mut ffi::GstAllocator;
213
214                    // TODO: This should all be in some kind of trait ideally
215                    (*allocator).mem_type = ALLOCATOR_TYPE.as_ptr() as *const _;
216                    (*allocator).mem_map = Some(TestAllocator::mem_map);
217                    (*allocator).mem_unmap = Some(TestAllocator::mem_unmap);
218                    // mem_copy not set because the fallback already does the right thing
219                    (*allocator).mem_share = Some(TestAllocator::mem_share);
220                    (*allocator).mem_is_span = Some(TestAllocator::mem_is_span);
221                }
222            }
223        }
224
225        impl TestAllocator {
226            unsafe extern "C" fn mem_map(
227                mem: *mut ffi::GstMemory,
228                _maxsize: usize,
229                _flags: ffi::GstMapFlags,
230            ) -> glib::ffi::gpointer {
231                let mem = mem as *mut Memory;
232
233                let parent = if (*mem).mem.parent.is_null() {
234                    mem
235                } else {
236                    (*mem).mem.parent as *mut Memory
237                };
238
239                // `(*mem).offset` is added to the pointer by `gst_memory_map()`
240                (*parent).data as *mut _
241            }
242
243            unsafe extern "C" fn mem_unmap(_mem: *mut ffi::GstMemory) {}
244
245            unsafe extern "C" fn mem_share(
246                mem: *mut ffi::GstMemory,
247                offset: isize,
248                size: isize,
249            ) -> *mut ffi::GstMemory {
250                let mem = mem as *mut Memory;
251
252                // Basically a re-implementation of _sysmem_share()
253
254                let parent = if (*mem).mem.parent.is_null() {
255                    mem
256                } else {
257                    (*mem).mem.parent as *mut Memory
258                };
259
260                // Offset and size are actually usizes and the API assumes that negative values simply wrap
261                // around, so let's cast to usizes here and do wrapping arithmetic.
262                let offset = offset as usize;
263                let mut size = size as usize;
264
265                let new_offset = (*mem).mem.offset.wrapping_add(offset);
266                debug_assert!(new_offset < (*mem).mem.maxsize);
267
268                if size == usize::MAX {
269                    size = (*mem).mem.size.wrapping_sub(offset);
270                }
271                debug_assert!(new_offset <= usize::MAX - size);
272                debug_assert!(new_offset + size <= (*mem).mem.maxsize);
273
274                let sub = alloc::alloc(LAYOUT) as *mut Memory;
275
276                ffi::gst_memory_init(
277                    sub as *mut ffi::GstMemory,
278                    (*mem).mem.mini_object.flags | ffi::GST_MINI_OBJECT_FLAG_LOCK_READONLY,
279                    (*mem).mem.allocator,
280                    parent as *mut ffi::GstMemory,
281                    (*mem).mem.maxsize,
282                    (*mem).mem.align,
283                    new_offset,
284                    size,
285                );
286                // This is never actually accessed
287                ptr::write(ptr::addr_of_mut!((*sub).data), ptr::null_mut());
288
289                sub as *mut ffi::GstMemory
290            }
291
292            unsafe extern "C" fn mem_is_span(
293                mem1: *mut ffi::GstMemory,
294                mem2: *mut ffi::GstMemory,
295                offset: *mut usize,
296            ) -> glib::ffi::gboolean {
297                let mem1 = mem1 as *mut Memory;
298                let mem2 = mem2 as *mut Memory;
299
300                // Same parent is checked by `gst_memory_is_span()` already
301                let parent1 = (*mem1).mem.parent as *mut Memory;
302                let parent2 = (*mem2).mem.parent as *mut Memory;
303                debug_assert_eq!(parent1, parent2);
304
305                if !offset.is_null() {
306                    // Offset that can be used on the parent memory to create a
307                    // shared memory that starts with `mem1`.
308                    //
309                    // This needs to use wrapping arithmetic too as in `mem_share()`.
310                    *offset = (*mem1).mem.offset.wrapping_sub((*parent1).mem.offset);
311                }
312
313                // Check if both memories are contiguous.
314                let is_span = ((*mem1).mem.offset + ((*mem1).mem.size)) == (*mem2).mem.offset;
315
316                is_span.into_glib()
317            }
318        }
319    }
320
321    glib::wrapper! {
322        pub struct TestAllocator(ObjectSubclass<imp::TestAllocator>) @extends Allocator, crate::Object;
323    }
324
325    impl Default for TestAllocator {
326        fn default() -> Self {
327            glib::Object::new()
328        }
329    }
330
331    #[test]
332    fn test_allocator_registration() {
333        crate::init().unwrap();
334
335        const TEST_ALLOCATOR_NAME: &str = "TestAllocator";
336
337        let allocator = TestAllocator::default();
338        Allocator::register(TEST_ALLOCATOR_NAME, allocator);
339
340        let allocator = Allocator::find(Some(TEST_ALLOCATOR_NAME));
341
342        assert!(allocator.is_some());
343    }
344
345    #[test]
346    fn test_allocator_alloc() {
347        crate::init().unwrap();
348
349        const SIZE: usize = 1024;
350
351        let allocator = TestAllocator::default();
352
353        let memory = allocator.alloc(SIZE, None).unwrap();
354
355        assert_eq!(memory.size(), SIZE);
356    }
357
358    #[test]
359    fn test_allocator_mem_ops() {
360        crate::init().unwrap();
361
362        let data = [0, 1, 2, 3, 4, 5, 6, 7];
363
364        let allocator = TestAllocator::default();
365
366        let mut memory = allocator.alloc(data.len(), None).unwrap();
367        assert_eq!(memory.size(), data.len());
368
369        {
370            let memory = memory.get_mut().unwrap();
371            let mut map = memory.map_writable().unwrap();
372            map.copy_from_slice(&data);
373        }
374
375        let copy = memory.copy();
376        assert!(copy.parent().is_none());
377
378        {
379            let map1 = memory.map_readable().unwrap();
380            let map2 = copy.map_readable().unwrap();
381            assert_eq!(map1.as_slice(), map2.as_slice());
382        }
383
384        let share = memory.share(..);
385        assert_eq!(share.parent().unwrap().as_ptr(), memory.as_ptr());
386
387        {
388            let map1 = memory.map_readable().unwrap();
389            let map2 = share.map_readable().unwrap();
390            assert_eq!(map1.as_slice(), map2.as_slice());
391        }
392
393        let sub1 = memory.share(..2);
394        assert_eq!(sub1.size(), 2);
395        assert_eq!(sub1.parent().unwrap().as_ptr(), memory.as_ptr());
396
397        {
398            let map = sub1.map_readable().unwrap();
399            assert_eq!(map.as_slice(), &data[..2]);
400        }
401
402        let sub2 = memory.share(2..);
403        assert_eq!(sub2.size(), 6);
404        assert_eq!(sub2.parent().unwrap().as_ptr(), memory.as_ptr());
405
406        {
407            let map = sub2.map_readable().unwrap();
408            assert_eq!(map.as_slice(), &data[2..]);
409        }
410
411        let offset = sub1.is_span(&sub2).unwrap();
412        assert_eq!(offset, 0);
413
414        let sub3 = sub2.share(2..);
415        assert_eq!(sub3.size(), 4);
416        assert_eq!(sub3.parent().unwrap().as_ptr(), memory.as_ptr());
417
418        {
419            let map = sub3.map_readable().unwrap();
420            assert_eq!(map.as_slice(), &data[4..]);
421        }
422    }
423}