Skip to main content

gstreamer/subclass/
allocator.rs

1// Take a look at the license at the top of the repository in the LICENSE file.
2
3use std::ptr;
4
5use glib::{prelude::*, subclass::prelude::*, translate::*};
6
7use super::prelude::*;
8use crate::{AllocationParams, Allocator, ffi};
9
10pub unsafe trait AllocatorImpl:
11    GstObjectImpl + ObjectSubclass<Type: IsA<Allocator>>
12{
13    /// Use `self` to allocate a new memory block with memory that is at least
14    /// `size` big.
15    ///
16    /// The optional `params` can specify the prefix and padding for the memory. If
17    /// [`None`] is passed, no flags, no extra prefix/padding and a default alignment is
18    /// used.
19    ///
20    /// The prefix/padding will be filled with 0 if flags contains
21    /// [`MemoryFlags::ZERO_PREFIXED`][crate::MemoryFlags::ZERO_PREFIXED] and [`MemoryFlags::ZERO_PADDED`][crate::MemoryFlags::ZERO_PADDED] respectively.
22    ///
23    /// When `self` is [`None`], the default allocator will be used.
24    ///
25    /// The alignment in `params` is given as a bitmask so that `align` + 1 equals
26    /// the amount of bytes to align to. For example, to align to 8 bytes,
27    /// use an alignment of 7.
28    /// ## `size`
29    /// size of the visible memory area
30    /// ## `params`
31    /// optional parameters
32    ///
33    /// # Returns
34    ///
35    /// a new [`Memory`][crate::Memory].
36    unsafe fn alloc(&self, size: usize, params: &AllocationParams) -> *mut ffi::GstMemory {
37        unsafe { self.parent_alloc(size, params) }
38    }
39
40    unsafe fn free(&self, memory: *mut ffi::GstMemory) {
41        unsafe { self.parent_free(memory) }
42    }
43}
44
45pub trait AllocatorImplExt: AllocatorImpl {
46    unsafe fn parent_alloc(&self, size: usize, params: &AllocationParams) -> *mut ffi::GstMemory {
47        unsafe {
48            let data = Self::type_data();
49            let parent_class = data.as_ref().parent_class() as *mut ffi::GstAllocatorClass;
50
51            if let Some(f) = (*parent_class).alloc {
52                f(
53                    self.obj().unsafe_cast_ref::<Allocator>().to_glib_none().0,
54                    size,
55                    mut_override(params.to_glib_none().0),
56                )
57            } else {
58                ptr::null_mut()
59            }
60        }
61    }
62
63    unsafe fn parent_free(&self, memory: *mut ffi::GstMemory) {
64        unsafe {
65            let data = Self::type_data();
66            let parent_class = data.as_ref().parent_class() as *mut ffi::GstAllocatorClass;
67
68            if let Some(f) = (*parent_class).free {
69                f(
70                    self.obj().unsafe_cast_ref::<Allocator>().to_glib_none().0,
71                    memory,
72                )
73            }
74        }
75    }
76}
77
78impl<T: AllocatorImpl> AllocatorImplExt for T {}
79
80unsafe impl<T: AllocatorImpl> IsSubclassable<T> for Allocator {
81    fn class_init(klass: &mut glib::Class<Self>) {
82        Self::parent_class_init::<T>(klass);
83        let klass = klass.as_mut();
84        klass.alloc = Some(alloc::<T>);
85        klass.free = Some(free::<T>);
86    }
87}
88
89unsafe extern "C" fn alloc<T: AllocatorImpl>(
90    ptr: *mut ffi::GstAllocator,
91    size: usize,
92    params: *mut ffi::GstAllocationParams,
93) -> *mut ffi::GstMemory {
94    unsafe {
95        let instance = &*(ptr as *mut T::Instance);
96        let imp = instance.imp();
97
98        let params = &*(params as *mut AllocationParams);
99
100        imp.alloc(size, params)
101    }
102}
103
104unsafe extern "C" fn free<T: AllocatorImpl>(
105    ptr: *mut ffi::GstAllocator,
106    memory: *mut ffi::GstMemory,
107) {
108    unsafe {
109        debug_assert_eq!((*memory).mini_object.refcount, 0);
110
111        let instance = &*(ptr as *mut T::Instance);
112        let imp = instance.imp();
113
114        imp.free(memory);
115    }
116}
117
118#[cfg(test)]
119mod tests {
120    use super::*;
121    use crate::prelude::*;
122
123    // The test allocator below is basically replicating GStreamer's default
124    // sysmem allocator except that the memory allocation is separate from the
125    // memory struct for clarity.
126
127    pub mod imp {
128        use glib::translate::*;
129        use std::alloc;
130
131        use super::*;
132
133        #[repr(C)]
134        struct Memory {
135            mem: ffi::GstMemory,
136            layout: alloc::Layout,
137            data: *mut u8,
138        }
139
140        const LAYOUT: alloc::Layout = alloc::Layout::new::<Memory>();
141
142        #[derive(Default)]
143        pub struct TestAllocator;
144
145        impl ObjectImpl for TestAllocator {}
146        impl GstObjectImpl for TestAllocator {}
147        unsafe impl AllocatorImpl for TestAllocator {
148            unsafe fn alloc(&self, size: usize, params: &AllocationParams) -> *mut ffi::GstMemory {
149                unsafe {
150                    let Some(maxsize) = size
151                        .checked_add(params.prefix())
152                        .and_then(|s| s.checked_add(params.padding()))
153                    else {
154                        return ptr::null_mut();
155                    };
156
157                    let align = params.align() | crate::Memory::default_alignment();
158                    let Ok(layout) = alloc::Layout::from_size_align(maxsize, align + 1) else {
159                        return ptr::null_mut();
160                    };
161
162                    let mem = alloc::alloc(LAYOUT) as *mut Memory;
163
164                    let data = alloc::alloc(layout);
165
166                    if params.prefix() > 0
167                        && params.flags().contains(crate::MemoryFlags::ZERO_PREFIXED)
168                    {
169                        ptr::write_bytes(data, 0, params.prefix());
170                    }
171
172                    if params.flags().contains(crate::MemoryFlags::ZERO_PADDED) {
173                        ptr::write_bytes(data.add(params.prefix()).add(size), 0, params.padding());
174                    }
175
176                    ffi::gst_memory_init(
177                        ptr::addr_of_mut!((*mem).mem),
178                        params.flags().into_glib(),
179                        self.obj().as_ptr() as *mut ffi::GstAllocator,
180                        ptr::null_mut(),
181                        maxsize,
182                        params.align(),
183                        params.prefix(),
184                        size,
185                    );
186                    ptr::write(ptr::addr_of_mut!((*mem).layout), layout);
187                    ptr::write(ptr::addr_of_mut!((*mem).data), data);
188
189                    mem as *mut ffi::GstMemory
190                }
191            }
192
193            unsafe fn free(&self, mem: *mut ffi::GstMemory) {
194                unsafe {
195                    let mem = mem as *mut Memory;
196
197                    if (*mem).mem.parent.is_null() {
198                        alloc::dealloc((*mem).data, (*mem).layout);
199                        ptr::drop_in_place(ptr::addr_of_mut!((*mem).layout));
200                    }
201                    alloc::dealloc(mem as *mut u8, LAYOUT);
202                }
203            }
204        }
205
206        #[glib::object_subclass]
207        impl ObjectSubclass for TestAllocator {
208            const NAME: &'static str = "TestAllocator";
209            type Type = super::TestAllocator;
210            type ParentType = Allocator;
211
212            fn instance_init(obj: &glib::subclass::InitializingObject<Self>) {
213                static ALLOCATOR_TYPE: &[u8] = b"TestAllocatorMemory\0";
214
215                unsafe {
216                    let allocator = obj.as_ptr() as *mut ffi::GstAllocator;
217
218                    // TODO: This should all be in some kind of trait ideally
219                    (*allocator).mem_type = ALLOCATOR_TYPE.as_ptr() as *const _;
220                    (*allocator).mem_map = Some(TestAllocator::mem_map);
221                    (*allocator).mem_unmap = Some(TestAllocator::mem_unmap);
222                    // mem_copy not set because the fallback already does the right thing
223                    (*allocator).mem_share = Some(TestAllocator::mem_share);
224                    (*allocator).mem_is_span = Some(TestAllocator::mem_is_span);
225                }
226            }
227        }
228
229        impl TestAllocator {
230            unsafe extern "C" fn mem_map(
231                mem: *mut ffi::GstMemory,
232                _maxsize: usize,
233                _flags: ffi::GstMapFlags,
234            ) -> glib::ffi::gpointer {
235                unsafe {
236                    let mem = mem as *mut Memory;
237
238                    let parent = if (*mem).mem.parent.is_null() {
239                        mem
240                    } else {
241                        (*mem).mem.parent as *mut Memory
242                    };
243
244                    // `(*mem).offset` is added to the pointer by `gst_memory_map()`
245                    (*parent).data as *mut _
246                }
247            }
248
249            unsafe extern "C" fn mem_unmap(_mem: *mut ffi::GstMemory) {}
250
251            unsafe extern "C" fn mem_share(
252                mem: *mut ffi::GstMemory,
253                offset: isize,
254                size: isize,
255            ) -> *mut ffi::GstMemory {
256                unsafe {
257                    let mem = mem as *mut Memory;
258
259                    // Basically a re-implementation of _sysmem_share()
260
261                    let parent = if (*mem).mem.parent.is_null() {
262                        mem
263                    } else {
264                        (*mem).mem.parent as *mut Memory
265                    };
266
267                    // Offset and size are actually usizes and the API assumes that negative values simply wrap
268                    // around, so let's cast to usizes here and do wrapping arithmetic.
269                    let offset = offset as usize;
270                    let mut size = size as usize;
271
272                    let new_offset = (*mem).mem.offset.wrapping_add(offset);
273                    debug_assert!(new_offset < (*mem).mem.maxsize);
274
275                    if size == usize::MAX {
276                        size = (*mem).mem.size.wrapping_sub(offset);
277                    }
278                    debug_assert!(new_offset <= usize::MAX - size);
279                    debug_assert!(new_offset + size <= (*mem).mem.maxsize);
280
281                    let sub = alloc::alloc(LAYOUT) as *mut Memory;
282
283                    ffi::gst_memory_init(
284                        sub as *mut ffi::GstMemory,
285                        (*mem).mem.mini_object.flags | ffi::GST_MINI_OBJECT_FLAG_LOCK_READONLY,
286                        (*mem).mem.allocator,
287                        parent as *mut ffi::GstMemory,
288                        (*mem).mem.maxsize,
289                        (*mem).mem.align,
290                        new_offset,
291                        size,
292                    );
293                    // This is never actually accessed
294                    ptr::write(ptr::addr_of_mut!((*sub).data), ptr::null_mut());
295
296                    sub as *mut ffi::GstMemory
297                }
298            }
299
300            unsafe extern "C" fn mem_is_span(
301                mem1: *mut ffi::GstMemory,
302                mem2: *mut ffi::GstMemory,
303                offset: *mut usize,
304            ) -> glib::ffi::gboolean {
305                unsafe {
306                    let mem1 = mem1 as *mut Memory;
307                    let mem2 = mem2 as *mut Memory;
308
309                    // Same parent is checked by `gst_memory_is_span()` already
310                    let parent1 = (*mem1).mem.parent as *mut Memory;
311                    let parent2 = (*mem2).mem.parent as *mut Memory;
312                    debug_assert_eq!(parent1, parent2);
313
314                    if !offset.is_null() {
315                        // Offset that can be used on the parent memory to create a
316                        // shared memory that starts with `mem1`.
317                        //
318                        // This needs to use wrapping arithmetic too as in `mem_share()`.
319                        *offset = (*mem1).mem.offset.wrapping_sub((*parent1).mem.offset);
320                    }
321
322                    // Check if both memories are contiguous.
323                    let is_span = ((*mem1).mem.offset + ((*mem1).mem.size)) == (*mem2).mem.offset;
324
325                    is_span.into_glib()
326                }
327            }
328        }
329    }
330
331    glib::wrapper! {
332        pub struct TestAllocator(ObjectSubclass<imp::TestAllocator>) @extends Allocator, crate::Object;
333    }
334
335    impl Default for TestAllocator {
336        fn default() -> Self {
337            glib::Object::new()
338        }
339    }
340
341    #[test]
342    fn test_allocator_registration() {
343        crate::init().unwrap();
344
345        const TEST_ALLOCATOR_NAME: &str = "TestAllocator";
346
347        let allocator = TestAllocator::default();
348        Allocator::register(TEST_ALLOCATOR_NAME, allocator);
349
350        let allocator = Allocator::find(Some(TEST_ALLOCATOR_NAME));
351
352        assert!(allocator.is_some());
353    }
354
355    #[test]
356    fn test_allocator_alloc() {
357        crate::init().unwrap();
358
359        const SIZE: usize = 1024;
360
361        let allocator = TestAllocator::default();
362
363        let memory = allocator.alloc(SIZE, None).unwrap();
364
365        assert_eq!(memory.size(), SIZE);
366    }
367
368    #[test]
369    fn test_allocator_mem_ops() {
370        crate::init().unwrap();
371
372        let data = [0, 1, 2, 3, 4, 5, 6, 7];
373
374        let allocator = TestAllocator::default();
375
376        let mut memory = allocator.alloc(data.len(), None).unwrap();
377        assert_eq!(memory.size(), data.len());
378
379        {
380            let memory = memory.get_mut().unwrap();
381            let mut map = memory.map_writable().unwrap();
382            map.copy_from_slice(&data);
383        }
384
385        let copy = memory.copy();
386        assert!(copy.parent().is_none());
387
388        {
389            let map1 = memory.map_readable().unwrap();
390            let map2 = copy.map_readable().unwrap();
391            assert_eq!(map1.as_slice(), map2.as_slice());
392        }
393
394        let share = memory.share(..);
395        assert_eq!(share.parent().unwrap().as_ptr(), memory.as_ptr());
396
397        {
398            let map1 = memory.map_readable().unwrap();
399            let map2 = share.map_readable().unwrap();
400            assert_eq!(map1.as_slice(), map2.as_slice());
401        }
402
403        let sub1 = memory.share(..2);
404        assert_eq!(sub1.size(), 2);
405        assert_eq!(sub1.parent().unwrap().as_ptr(), memory.as_ptr());
406
407        {
408            let map = sub1.map_readable().unwrap();
409            assert_eq!(map.as_slice(), &data[..2]);
410        }
411
412        let sub2 = memory.share(2..);
413        assert_eq!(sub2.size(), 6);
414        assert_eq!(sub2.parent().unwrap().as_ptr(), memory.as_ptr());
415
416        {
417            let map = sub2.map_readable().unwrap();
418            assert_eq!(map.as_slice(), &data[2..]);
419        }
420
421        let offset = sub1.is_span(&sub2).unwrap();
422        assert_eq!(offset, 0);
423
424        let sub3 = sub2.share(2..);
425        assert_eq!(sub3.size(), 4);
426        assert_eq!(sub3.parent().unwrap().as_ptr(), memory.as_ptr());
427
428        {
429            let map = sub3.map_readable().unwrap();
430            assert_eq!(map.as_slice(), &data[4..]);
431        }
432    }
433}