gstreamer/
memory_wrapped.rs1use glib::{prelude::*, translate::*};
4
5use std::{alloc, mem, ptr};
6
7use crate::{ffi, Memory};
8
9#[repr(C)]
10struct WrappedMemory<T> {
11 mem: ffi::GstMemory,
12
13 data: *mut u8,
15
16 layout: alloc::Layout,
18
19 wrap_offset: usize,
21 wrap_drop_in_place: unsafe fn(*mut T),
23 wrap: T,
24}
25
26unsafe extern "C" fn free(_allocator: *mut ffi::GstAllocator, mem: *mut ffi::GstMemory) {
27 let mem = mem as *mut WrappedMemory<()>;
28
29 if (*mem).wrap_offset > 0 {
30 let wrap = (mem as *mut u8).add((*mem).wrap_offset) as *mut ();
31 ((*mem).wrap_drop_in_place)(wrap);
32 }
33
34 alloc::dealloc(mem as *mut u8, (*mem).layout);
35}
36
37unsafe extern "C" fn mem_map(
38 mem: *mut ffi::GstMemory,
39 _maxsize: usize,
40 _flags: ffi::GstMapFlags,
41) -> glib::ffi::gpointer {
42 let mem = mem as *mut WrappedMemory<()>;
43
44 (*mem).data as glib::ffi::gpointer
45}
46
47unsafe extern "C" fn mem_unmap(_mem: *mut ffi::GstMemory) {}
48
49unsafe extern "C" fn mem_share(
50 mem: *mut ffi::GstMemory,
51 offset: isize,
52 size: isize,
53) -> *mut ffi::GstMemory {
54 let mem = mem as *mut WrappedMemory<()>;
55
56 let parent = if (*mem).mem.parent.is_null() {
59 mem
60 } else {
61 (*mem).mem.parent as *mut WrappedMemory<()>
62 };
63
64 let offset = offset as usize;
67 let mut size = size as usize;
68
69 let new_offset = (*mem).mem.offset.wrapping_add(offset);
70 debug_assert!(new_offset < (*mem).mem.maxsize);
71
72 if size == usize::MAX {
73 size = (*mem).mem.size.wrapping_sub(offset);
74 }
75 debug_assert!(new_offset <= usize::MAX - size);
76 debug_assert!(new_offset + size <= (*mem).mem.maxsize);
77
78 let layout = alloc::Layout::new::<WrappedMemory<()>>();
79 let sub = alloc::alloc(layout) as *mut WrappedMemory<()>;
80
81 ffi::gst_memory_init(
82 sub as *mut ffi::GstMemory,
83 (*mem).mem.mini_object.flags | ffi::GST_MINI_OBJECT_FLAG_LOCK_READONLY,
84 (*mem).mem.allocator,
85 parent as *mut ffi::GstMemory,
86 (*mem).mem.maxsize,
87 (*mem).mem.align,
88 new_offset,
89 size,
90 );
91 ptr::write(ptr::addr_of_mut!((*sub).data), (*mem).data);
92 ptr::write(ptr::addr_of_mut!((*sub).layout), layout);
93 ptr::write(ptr::addr_of_mut!((*sub).wrap_offset), 0);
94 ptr::write(ptr::addr_of_mut!((*sub).wrap_drop_in_place), |_| ());
95
96 sub as *mut ffi::GstMemory
97}
98
99unsafe extern "C" fn mem_is_span(
100 mem1: *mut ffi::GstMemory,
101 mem2: *mut ffi::GstMemory,
102 offset: *mut usize,
103) -> glib::ffi::gboolean {
104 let mem1 = mem1 as *mut WrappedMemory<()>;
105 let mem2 = mem2 as *mut WrappedMemory<()>;
106
107 if !offset.is_null() {
109 let parent = (*mem1).mem.parent as *mut WrappedMemory<()>;
110 *offset = (*mem1).mem.offset - (*parent).mem.offset;
111 }
112
113 let is_span = (*mem1).data.add((*mem1).mem.offset).add((*mem1).mem.size)
114 == (*mem2).data.add((*mem2).mem.offset);
115
116 is_span.into_glib()
117}
118
119unsafe extern "C" fn class_init(class: glib::ffi::gpointer, _class_data: glib::ffi::gpointer) {
120 let class = class as *mut ffi::GstAllocatorClass;
121
122 (*class).free = Some(free);
123}
124
125unsafe extern "C" fn instance_init(
126 obj: *mut glib::gobject_ffi::GTypeInstance,
127 _class: glib::ffi::gpointer,
128) {
129 static ALLOCATOR_TYPE: &[u8] = b"RustGlobalAllocatorMemory\0";
130
131 let allocator = obj as *mut ffi::GstAllocator;
132
133 (*allocator).mem_type = ALLOCATOR_TYPE.as_ptr() as *const _;
134 (*allocator).mem_map = Some(mem_map);
135 (*allocator).mem_unmap = Some(mem_unmap);
136 (*allocator).mem_share = Some(mem_share);
138 (*allocator).mem_is_span = Some(mem_is_span);
139
140 (*allocator).object.flags |= ffi::GST_ALLOCATOR_FLAG_CUSTOM_ALLOC;
142 (*allocator).object.flags |= ffi::GST_OBJECT_FLAG_MAY_BE_LEAKED;
143}
144
145fn rust_allocator() -> &'static crate::Allocator {
146 static RUST_ALLOCATOR: std::sync::OnceLock<crate::Allocator> = std::sync::OnceLock::new();
147
148 RUST_ALLOCATOR.get_or_init(|| unsafe {
149 struct TypeInfoWrap(glib::gobject_ffi::GTypeInfo);
150 unsafe impl Send for TypeInfoWrap {}
151 unsafe impl Sync for TypeInfoWrap {}
152
153 static TYPE_INFO: TypeInfoWrap = TypeInfoWrap(glib::gobject_ffi::GTypeInfo {
154 class_size: mem::size_of::<ffi::GstAllocatorClass>() as u16,
155 base_init: None,
156 base_finalize: None,
157 class_init: Some(class_init),
158 class_finalize: None,
159 class_data: ptr::null_mut(),
160 instance_size: mem::size_of::<ffi::GstAllocator>() as u16,
161 n_preallocs: 0,
162 instance_init: Some(instance_init),
163 value_table: ptr::null(),
164 });
165
166 let type_name = {
167 let mut idx = 0;
168
169 loop {
170 let type_name = glib::gformat!("GstRsAllocator-{}", idx);
171 if glib::gobject_ffi::g_type_from_name(type_name.as_ptr())
172 == glib::gobject_ffi::G_TYPE_INVALID
173 {
174 break type_name;
175 }
176 idx += 1;
177 }
178 };
179
180 let t = glib::gobject_ffi::g_type_register_static(
181 crate::Allocator::static_type().into_glib(),
182 type_name.as_ptr(),
183 &TYPE_INFO.0,
184 0,
185 );
186
187 assert!(t != glib::gobject_ffi::G_TYPE_INVALID);
188
189 from_glib_none(
190 glib::gobject_ffi::g_object_newv(t, 0, ptr::null_mut()) as *mut ffi::GstAllocator
191 )
192 })
193}
194
195impl Memory {
196 #[doc(alias = "gst_memory_new_wrapped")]
197 #[doc(alias = "gst_memory_new_wrapped_full")]
198 #[inline]
199 pub fn from_slice<T: AsRef<[u8]> + Send + 'static>(slice: T) -> Self {
200 assert_initialized_main_thread!();
201
202 let len = slice.as_ref().len();
203 unsafe {
204 let layout = alloc::Layout::new::<WrappedMemory<T>>();
205 let mem = alloc::alloc(layout) as *mut WrappedMemory<T>;
206
207 ffi::gst_memory_init(
208 mem as *mut ffi::GstMemory,
209 ffi::GST_MINI_OBJECT_FLAG_LOCK_READONLY,
210 rust_allocator().to_glib_none().0,
211 ptr::null_mut(),
212 len,
213 0,
214 0,
215 len,
216 );
217
218 ptr::write(ptr::addr_of_mut!((*mem).wrap), slice);
219
220 assert_eq!(len, (*mem).wrap.as_ref().len());
221 let data = (*mem).wrap.as_ref().as_ptr();
222 ptr::write(ptr::addr_of_mut!((*mem).data), mut_override(data));
223
224 ptr::write(ptr::addr_of_mut!((*mem).layout), layout);
225
226 let wrap_offset = ptr::addr_of!((*mem).wrap) as usize - mem as usize;
227 ptr::write(ptr::addr_of_mut!((*mem).wrap_offset), wrap_offset);
228
229 ptr::write(
230 ptr::addr_of_mut!((*mem).wrap_drop_in_place),
231 ptr::drop_in_place::<T>,
232 );
233
234 from_glib_full(mem as *mut ffi::GstMemory)
235 }
236 }
237
238 #[doc(alias = "gst_memory_new_wrapped")]
239 #[doc(alias = "gst_memory_new_wrapped_full")]
240 #[inline]
241 pub fn from_mut_slice<T: AsMut<[u8]> + Send + 'static>(mut slice: T) -> Self {
242 assert_initialized_main_thread!();
243
244 let len = slice.as_mut().len();
245 unsafe {
246 let layout = alloc::Layout::new::<WrappedMemory<T>>();
247 let mem = alloc::alloc(layout) as *mut WrappedMemory<T>;
248
249 ffi::gst_memory_init(
250 mem as *mut ffi::GstMemory,
251 0,
252 rust_allocator().to_glib_none().0,
253 ptr::null_mut(),
254 len,
255 0,
256 0,
257 len,
258 );
259
260 ptr::write(ptr::addr_of_mut!((*mem).wrap), slice);
261
262 assert_eq!(len, (*mem).wrap.as_mut().len());
263 let data = (*mem).wrap.as_mut().as_mut_ptr();
264 ptr::write(ptr::addr_of_mut!((*mem).data), data);
265
266 ptr::write(ptr::addr_of_mut!((*mem).layout), layout);
267
268 let wrap_offset = ptr::addr_of!((*mem).wrap) as usize - mem as usize;
269 ptr::write(ptr::addr_of_mut!((*mem).wrap_offset), wrap_offset);
270
271 ptr::write(
272 ptr::addr_of_mut!((*mem).wrap_drop_in_place),
273 ptr::drop_in_place::<T>,
274 );
275
276 from_glib_full(mem as *mut ffi::GstMemory)
277 }
278 }
279}