1use glib::{prelude::*, translate::*};
4
5use std::{alloc, any::TypeId, mem, ptr};
6
7use crate::{Memory, ffi};
8
9#[derive(Debug, Clone, PartialEq, Eq, thiserror::Error)]
12pub enum MemoryIntoInnerError {
13 #[error("Memory does not use the Rust allocator (uses {actual_allocator:?})")]
14 WrongAllocator { actual_allocator: Option<String> },
15 #[error("Memory is not writable")]
16 NotWritable,
17 #[error("Cannot extract wrapped value from sub-memory (shared memory)")]
18 SubMemory,
19 #[error("Memory does not wrap the requested type (expected {expected:?}, found {actual:?})")]
20 TypeMismatch { expected: TypeId, actual: TypeId },
21 #[error("Buffer must contain exactly one memory block")]
22 MultipleMemoryBlocks,
23}
24
25#[repr(C)]
26struct WrappedMemory<T> {
27 mem: ffi::GstMemory,
28
29 data: *mut u8,
31
32 layout: alloc::Layout,
34
35 wrap_offset: usize,
37 wrap_drop_in_place: Option<unsafe fn(*mut ())>,
39 wrap_type_id: TypeId,
41 wrap: T,
42}
43
44unsafe extern "C" fn alloc(
45 allocator: *mut ffi::GstAllocator,
46 size: usize,
47 params: *mut ffi::GstAllocationParams,
48) -> *mut ffi::GstMemory {
49 unsafe {
50 let params = &*params;
51
52 let Some(maxsize) = size
53 .checked_add(params.prefix)
54 .and_then(|s| s.checked_add(params.padding))
55 else {
56 return ptr::null_mut();
57 };
58
59 let align = params.align | crate::Memory::default_alignment();
60
61 let layout_base = alloc::Layout::new::<WrappedMemory<()>>();
62
63 let layout_data = match alloc::Layout::from_size_align(maxsize, align + 1) {
64 Ok(res) => res,
65 Err(err) => {
66 crate::warning!(
67 crate::CAT_RUST,
68 "Invalid size {maxsize} or alignment {align}: {err}"
69 );
70 return ptr::null_mut();
71 }
72 };
73 let (layout, data_offset) = match layout_base.extend(layout_data) {
74 Ok(res) => res,
75 Err(err) => {
76 crate::warning!(
77 crate::CAT_RUST,
78 "Can't extend base memory layout to {maxsize} or alignment {align}: {err}"
79 );
80 return ptr::null_mut();
81 }
82 };
83 let layout = layout.pad_to_align();
84
85 let mem = alloc::alloc(layout);
86 let data = mem.add(data_offset);
87
88 if params.prefix > 0 && (params.flags & ffi::GST_MEMORY_FLAG_ZERO_PREFIXED) != 0 {
89 ptr::write_bytes(data, 0, params.prefix);
90 }
91
92 if (params.flags & ffi::GST_MEMORY_FLAG_ZERO_PADDED) != 0 {
93 ptr::write_bytes(data.add(params.prefix).add(size), 0, params.padding);
94 }
95
96 let mem = mem as *mut WrappedMemory<()>;
97 ffi::gst_memory_init(
98 ptr::addr_of_mut!((*mem).mem),
99 params.flags,
100 allocator,
101 ptr::null_mut(),
102 maxsize,
103 params.align,
104 params.prefix,
105 size,
106 );
107 ptr::write(ptr::addr_of_mut!((*mem).data), data);
108 ptr::write(ptr::addr_of_mut!((*mem).layout), layout);
109 ptr::write(ptr::addr_of_mut!((*mem).wrap_type_id), TypeId::of::<()>());
110 ptr::write(ptr::addr_of_mut!((*mem).wrap_offset), 0);
111 ptr::write(ptr::addr_of_mut!((*mem).wrap_drop_in_place), None);
112
113 mem as *mut ffi::GstMemory
114 }
115}
116
117unsafe extern "C" fn free(_allocator: *mut ffi::GstAllocator, mem: *mut ffi::GstMemory) {
118 unsafe {
119 debug_assert_eq!((*mem).mini_object.refcount, 0);
120
121 let mem = mem as *mut WrappedMemory<()>;
122
123 if let Some(wrap_drop_in_place) = (*mem).wrap_drop_in_place {
124 let wrap = (mem as *mut u8).add((*mem).wrap_offset) as *mut ();
125 wrap_drop_in_place(wrap);
126 }
127
128 alloc::dealloc(mem as *mut u8, (*mem).layout);
129 }
130}
131
132unsafe extern "C" fn mem_map(
133 mem: *mut ffi::GstMemory,
134 _maxsize: usize,
135 _flags: ffi::GstMapFlags,
136) -> glib::ffi::gpointer {
137 unsafe {
138 let mem = mem as *mut WrappedMemory<()>;
139
140 (*mem).data as glib::ffi::gpointer
142 }
143}
144
145unsafe extern "C" fn mem_unmap(_mem: *mut ffi::GstMemory) {}
146
147unsafe extern "C" fn mem_share(
148 mem: *mut ffi::GstMemory,
149 offset: isize,
150 size: isize,
151) -> *mut ffi::GstMemory {
152 unsafe {
153 let mem = mem as *mut WrappedMemory<()>;
154
155 let parent = if (*mem).mem.parent.is_null() {
158 mem
159 } else {
160 (*mem).mem.parent as *mut WrappedMemory<()>
161 };
162
163 let offset = offset as usize;
166 let mut size = size as usize;
167
168 let new_offset = (*mem).mem.offset.wrapping_add(offset);
169 debug_assert!(new_offset < (*mem).mem.maxsize);
170
171 if size == usize::MAX {
172 size = (*mem).mem.size.wrapping_sub(offset);
173 }
174 debug_assert!(new_offset <= usize::MAX - size);
175 debug_assert!(new_offset + size <= (*mem).mem.maxsize);
176
177 let layout = alloc::Layout::new::<WrappedMemory<()>>();
178 let sub = alloc::alloc(layout) as *mut WrappedMemory<()>;
179
180 ffi::gst_memory_init(
181 sub as *mut ffi::GstMemory,
182 (*mem).mem.mini_object.flags | ffi::GST_MINI_OBJECT_FLAG_LOCK_READONLY,
183 (*mem).mem.allocator,
184 parent as *mut ffi::GstMemory,
185 (*mem).mem.maxsize,
186 (*mem).mem.align,
187 new_offset,
188 size,
189 );
190 ptr::write(ptr::addr_of_mut!((*sub).data), (*mem).data);
191 ptr::write(ptr::addr_of_mut!((*sub).layout), layout);
192 ptr::write(ptr::addr_of_mut!((*sub).wrap_offset), 0);
193 ptr::write(ptr::addr_of_mut!((*sub).wrap_drop_in_place), None);
194
195 sub as *mut ffi::GstMemory
196 }
197}
198
199unsafe extern "C" fn mem_is_span(
200 mem1: *mut ffi::GstMemory,
201 mem2: *mut ffi::GstMemory,
202 offset: *mut usize,
203) -> glib::ffi::gboolean {
204 unsafe {
205 let mem1 = mem1 as *mut WrappedMemory<()>;
206 let mem2 = mem2 as *mut WrappedMemory<()>;
207
208 let parent1 = (*mem1).mem.parent as *mut WrappedMemory<()>;
210 let parent2 = (*mem2).mem.parent as *mut WrappedMemory<()>;
211 debug_assert_eq!(parent1, parent2);
212
213 if !offset.is_null() {
215 *offset = (*mem1).mem.offset.wrapping_sub((*parent1).mem.offset);
220 }
221
222 let is_span = ((*mem1).mem.offset + ((*mem1).mem.size)) == (*mem2).mem.offset;
224
225 is_span.into_glib()
226 }
227}
228
229unsafe extern "C" fn class_init(class: glib::ffi::gpointer, _class_data: glib::ffi::gpointer) {
230 unsafe {
231 let class = class as *mut ffi::GstAllocatorClass;
232
233 (*class).alloc = Some(alloc);
234 (*class).free = Some(free);
235 }
236}
237
238unsafe extern "C" fn instance_init(
239 obj: *mut glib::gobject_ffi::GTypeInstance,
240 _class: glib::ffi::gpointer,
241) {
242 unsafe {
243 static ALLOCATOR_TYPE: &[u8] = b"RustGlobalAllocatorMemory\0";
244
245 let allocator = obj as *mut ffi::GstAllocator;
246
247 (*allocator).mem_type = ALLOCATOR_TYPE.as_ptr() as *const _;
248 (*allocator).mem_map = Some(mem_map);
249 (*allocator).mem_unmap = Some(mem_unmap);
250 (*allocator).mem_share = Some(mem_share);
252 (*allocator).mem_is_span = Some(mem_is_span);
253
254 (*allocator).object.flags |= ffi::GST_OBJECT_FLAG_MAY_BE_LEAKED;
255 }
256}
257
258pub fn rust_allocator() -> &'static crate::Allocator {
259 assert_initialized_main_thread!();
260
261 rust_allocator_internal()
262}
263
264fn rust_allocator_internal() -> &'static crate::Allocator {
265 static RUST_ALLOCATOR: std::sync::OnceLock<crate::Allocator> = std::sync::OnceLock::new();
266
267 RUST_ALLOCATOR.get_or_init(|| unsafe {
268 struct TypeInfoWrap(glib::gobject_ffi::GTypeInfo);
269 unsafe impl Send for TypeInfoWrap {}
270 unsafe impl Sync for TypeInfoWrap {}
271
272 static TYPE_INFO: TypeInfoWrap = TypeInfoWrap(glib::gobject_ffi::GTypeInfo {
273 class_size: mem::size_of::<ffi::GstAllocatorClass>() as u16,
274 base_init: None,
275 base_finalize: None,
276 class_init: Some(class_init),
277 class_finalize: None,
278 class_data: ptr::null_mut(),
279 instance_size: mem::size_of::<ffi::GstAllocator>() as u16,
280 n_preallocs: 0,
281 instance_init: Some(instance_init),
282 value_table: ptr::null(),
283 });
284
285 let type_name = {
286 let mut idx = 0;
287
288 loop {
289 let type_name = glib::gformat!("GstRsAllocator-{}", idx);
290 if glib::gobject_ffi::g_type_from_name(type_name.as_ptr())
291 == glib::gobject_ffi::G_TYPE_INVALID
292 {
293 break type_name;
294 }
295 idx += 1;
296 }
297 };
298
299 let t = glib::gobject_ffi::g_type_register_static(
300 crate::Allocator::static_type().into_glib(),
301 type_name.as_ptr(),
302 &TYPE_INFO.0,
303 0,
304 );
305
306 assert!(t != glib::gobject_ffi::G_TYPE_INVALID);
307
308 from_glib_none(
309 glib::gobject_ffi::g_object_newv(t, 0, ptr::null_mut()) as *mut ffi::GstAllocator
310 )
311 })
312}
313
314#[inline]
317pub(crate) unsafe fn try_into_from_memory_ptr<T: 'static>(
318 mem_ptr: *mut ffi::GstMemory,
319) -> Result<T, MemoryIntoInnerError> {
320 skip_assert_initialized!();
321
322 unsafe {
323 if (*mem_ptr).allocator.is_null()
325 || (*mem_ptr).allocator != rust_allocator_internal().as_ptr()
326 {
327 let actual_allocator = if (*mem_ptr).allocator.is_null() {
328 None
329 } else {
330 Some(
331 std::ffi::CStr::from_ptr(glib::gobject_ffi::g_type_name_from_instance(
332 (*mem_ptr).allocator as *mut glib::gobject_ffi::GTypeInstance,
333 ))
334 .to_string_lossy()
335 .to_string(),
336 )
337 };
338 return Err(MemoryIntoInnerError::WrongAllocator { actual_allocator });
339 }
340
341 if ffi::gst_mini_object_is_writable(mem_ptr as *mut ffi::GstMiniObject) == glib::ffi::GFALSE
342 {
343 return Err(MemoryIntoInnerError::NotWritable);
344 }
345
346 if !(*mem_ptr).parent.is_null() {
348 return Err(MemoryIntoInnerError::SubMemory);
349 }
350
351 let mem_wrapper = &*(mem_ptr as *mut WrappedMemory<T>);
356
357 if mem_wrapper.wrap_type_id != TypeId::of::<T>() {
360 return Err(MemoryIntoInnerError::TypeMismatch {
361 expected: std::any::TypeId::of::<T>(),
362 actual: mem_wrapper.wrap_type_id,
363 });
364 }
365
366 let mem_wrapper_mut = &mut *(mem_ptr as *mut WrappedMemory<T>);
368 let value = ptr::read(&mem_wrapper_mut.wrap);
369
370 mem_wrapper_mut.wrap_drop_in_place = None;
372
373 Ok(value)
374 }
375}
376
377impl Memory {
378 #[doc(alias = "gst_memory_new_wrapped")]
379 #[doc(alias = "gst_memory_new_wrapped_full")]
380 #[inline]
381 pub fn from_slice<T: AsRef<[u8]> + Send + 'static>(slice: T) -> Self {
382 assert_initialized_main_thread!();
383
384 let len = slice.as_ref().len();
385 unsafe {
386 let layout = alloc::Layout::new::<WrappedMemory<T>>();
387 let mem = alloc::alloc(layout) as *mut WrappedMemory<T>;
388
389 ffi::gst_memory_init(
390 mem as *mut ffi::GstMemory,
391 ffi::GST_MINI_OBJECT_FLAG_LOCK_READONLY,
392 rust_allocator_internal().to_glib_none().0,
393 ptr::null_mut(),
394 len,
395 0,
396 0,
397 len,
398 );
399
400 ptr::write(ptr::addr_of_mut!((*mem).wrap), slice);
401
402 assert_eq!(len, (*mem).wrap.as_ref().len());
403 let data = (*mem).wrap.as_ref().as_ptr();
404 ptr::write(ptr::addr_of_mut!((*mem).data), mut_override(data));
405
406 ptr::write(ptr::addr_of_mut!((*mem).layout), layout);
407
408 let wrap_offset = ptr::addr_of!((*mem).wrap) as usize - mem as usize;
409 ptr::write(ptr::addr_of_mut!((*mem).wrap_offset), wrap_offset);
410
411 ptr::write(
412 ptr::addr_of_mut!((*mem).wrap_drop_in_place),
413 if mem::needs_drop::<T>() {
414 Some(|ptr| ptr::drop_in_place::<T>(ptr as *mut T))
415 } else {
416 None
417 },
418 );
419
420 ptr::write(ptr::addr_of_mut!((*mem).wrap_type_id), TypeId::of::<T>());
421
422 from_glib_full(mem as *mut ffi::GstMemory)
423 }
424 }
425
426 #[doc(alias = "gst_memory_new_wrapped")]
427 #[doc(alias = "gst_memory_new_wrapped_full")]
428 #[inline]
429 pub fn from_mut_slice<T: AsMut<[u8]> + Send + 'static>(mut slice: T) -> Self {
430 assert_initialized_main_thread!();
431
432 let len = slice.as_mut().len();
433 unsafe {
434 let layout = alloc::Layout::new::<WrappedMemory<T>>();
435 let mem = alloc::alloc(layout) as *mut WrappedMemory<T>;
436
437 ffi::gst_memory_init(
438 mem as *mut ffi::GstMemory,
439 0,
440 rust_allocator_internal().to_glib_none().0,
441 ptr::null_mut(),
442 len,
443 0,
444 0,
445 len,
446 );
447
448 ptr::write(ptr::addr_of_mut!((*mem).wrap), slice);
449
450 assert_eq!(len, (*mem).wrap.as_mut().len());
451 let data = (*mem).wrap.as_mut().as_mut_ptr();
452 ptr::write(ptr::addr_of_mut!((*mem).data), data);
453
454 ptr::write(ptr::addr_of_mut!((*mem).layout), layout);
455
456 let wrap_offset = ptr::addr_of!((*mem).wrap) as usize - mem as usize;
457 ptr::write(ptr::addr_of_mut!((*mem).wrap_offset), wrap_offset);
458
459 ptr::write(
460 ptr::addr_of_mut!((*mem).wrap_drop_in_place),
461 if mem::needs_drop::<T>() {
462 Some(|ptr| ptr::drop_in_place::<T>(ptr as *mut T))
463 } else {
464 None
465 },
466 );
467
468 ptr::write(ptr::addr_of_mut!((*mem).wrap_type_id), TypeId::of::<T>());
469
470 from_glib_full(mem as *mut ffi::GstMemory)
471 }
472 }
473
474 #[inline]
500 pub fn try_into_inner<T: 'static>(self) -> Result<T, (Self, MemoryIntoInnerError)> {
501 unsafe {
503 let mem_ptr = self.as_mut_ptr();
504
505 match try_into_from_memory_ptr(mem_ptr) {
507 Ok(value) => {
508 drop(self);
511 Ok(value)
512 }
513 Err(err) => {
514 Err((self, err))
516 }
517 }
518 }
519 }
520}
521
522#[cfg(test)]
523mod tests {
524 use super::*;
525
526 #[test]
527 fn test_alloc() {
528 use crate::prelude::AllocatorExt;
529
530 crate::init().unwrap();
531
532 let data = [0, 1, 2, 3, 4, 5, 6, 7];
533
534 let mut mem = rust_allocator().alloc(data.len(), None).unwrap();
535 assert_eq!(mem.size(), data.len());
536 assert_eq!(mem.allocator().unwrap(), rust_allocator());
537
538 {
539 let mem = mem.get_mut().unwrap();
540 let mut map = mem.map_writable().unwrap();
541 assert_eq!(
542 map.as_ptr() as usize & crate::Memory::default_alignment(),
543 0
544 );
545 map.copy_from_slice(&data);
546 }
547
548 let copy = mem.copy();
549 assert_eq!(copy.size(), data.len());
550 assert_eq!(copy.allocator().unwrap(), rust_allocator());
551
552 {
553 let map = copy.map_readable().unwrap();
554 assert_eq!(
555 copy.as_ptr() as usize & crate::Memory::default_alignment(),
556 0
557 );
558 assert_eq!(map.as_slice(), &data);
559 }
560
561 let mut mem = rust_allocator()
562 .alloc(
563 data.len(),
564 Some(&crate::AllocationParams::new(
565 crate::MemoryFlags::empty(),
566 1023,
567 0,
568 0,
569 )),
570 )
571 .unwrap();
572 assert_eq!(mem.size(), data.len());
573 assert_eq!(mem.maxsize(), data.len());
574 assert_eq!(mem.allocator().unwrap(), rust_allocator());
575
576 {
577 let mem = mem.get_mut().unwrap();
578 let mut map = mem.map_writable().unwrap();
579 assert_eq!(map.as_ptr() as usize & 1023, 0);
580 map.copy_from_slice(&data);
581 }
582
583 let copy = mem.copy();
584 assert_eq!(copy.size(), data.len());
585 assert_eq!(copy.allocator().unwrap(), rust_allocator());
586
587 {
588 let map = copy.map_readable().unwrap();
589 assert_eq!(map.as_slice(), &data);
590 }
591
592 let share = mem.share(2..4);
593 assert_eq!(share.size(), 2);
594 assert_eq!(share.allocator().unwrap(), rust_allocator());
595 {
596 let map = share.map_readable().unwrap();
597 assert_eq!(map.as_slice(), &data[2..4]);
598 }
599
600 let mut mem = rust_allocator()
601 .alloc(
602 data.len(),
603 Some(&crate::AllocationParams::new(
604 crate::MemoryFlags::ZERO_PADDED | crate::MemoryFlags::ZERO_PREFIXED,
605 1023,
606 32,
607 32,
608 )),
609 )
610 .unwrap();
611 assert_eq!(mem.size(), data.len());
612 assert_eq!(mem.maxsize(), data.len() + 32 + 32);
613 assert_eq!(mem.allocator().unwrap(), rust_allocator());
614
615 {
616 let mem = mem.get_mut().unwrap();
617 let mut map = mem.map_writable().unwrap();
618 assert_eq!((map.as_ptr() as usize - 32) & 1023, 0);
619 map.copy_from_slice(&data);
620 }
621
622 let copy = mem.copy();
623 assert_eq!(copy.size(), data.len());
624 assert_eq!(copy.allocator().unwrap(), rust_allocator());
625
626 {
627 let map = copy.map_readable().unwrap();
628 assert_eq!(map.as_slice(), &data);
629 }
630
631 let share = mem.share(2..4);
632 assert_eq!(share.size(), 2);
633 assert_eq!(share.allocator().unwrap(), rust_allocator());
634 {
635 let map = share.map_readable().unwrap();
636 assert_eq!(map.as_slice(), &data[2..4]);
637 }
638
639 let share = mem.share_maxsize(0..(data.len() + 32 + 32));
640 assert_eq!(share.size(), data.len() + 32 + 32);
641 assert_eq!(share.allocator().unwrap(), rust_allocator());
642 {
643 let map = share.map_readable().unwrap();
644 let padding = [0; 32];
645 assert_eq!(&map.as_slice()[..32], &padding);
646 assert_eq!(&map.as_slice()[32..][..data.len()], &data);
647 assert_eq!(&map.as_slice()[(32 + data.len())..], &padding);
648 }
649 }
650
651 #[test]
652 fn test_wrap_vec_u8() {
653 crate::init().unwrap();
654
655 let data = vec![1u8, 2, 3, 4, 5];
656 let expected = data.clone();
657
658 let mem = Memory::from_slice(data);
659 assert_eq!(mem.size(), 5);
660 {
661 let map = mem.map_readable().unwrap();
662 assert_eq!(&expected, map.as_slice());
663 }
664 }
665
666 #[test]
667 fn test_wrap_array_u8() {
668 crate::init().unwrap();
669
670 let data: [u8; 5] = [1u8, 2, 3, 4, 5];
671 let expected = data;
672
673 let mem = Memory::from_slice(data);
674 assert_eq!(mem.size(), 5);
675 assert_eq!(mem.size(), 5);
676 {
677 let map = mem.map_readable().unwrap();
678 assert_eq!(&expected, map.as_slice());
679 }
680 }
681
682 #[test]
683 fn test_wrap_vec_u8_and_back() {
684 crate::init().unwrap();
685
686 let data = vec![1u8, 2, 3, 4, 5];
687 let expected = data.clone();
688
689 let mem = Memory::from_slice(data);
690 assert_eq!(mem.size(), 5);
691 {
692 let map = mem.map_readable().unwrap();
693 assert_eq!(&expected, map.as_slice());
694 }
695
696 let extracted: Vec<u8> = mem.try_into_inner().unwrap();
697 assert_eq!(extracted, expected);
698 }
699
700 #[test]
701 fn test_wrap_array_u8_and_back() {
702 crate::init().unwrap();
703
704 let data: [u8; 5] = [1u8, 2, 3, 4, 5];
705 let expected = data;
706
707 let mem = Memory::from_slice(data);
708 assert_eq!(mem.size(), 5);
709 assert_eq!(mem.size(), 5);
710 {
711 let map = mem.map_readable().unwrap();
712 assert_eq!(&expected, map.as_slice());
713 }
714
715 let extracted: [u8; 5] = mem.try_into_inner().unwrap();
716 assert_eq!(extracted, expected);
717 }
718
719 #[test]
720 fn test_wrap_array_u8_mem_ops() {
721 crate::init().unwrap();
722
723 let data = [0, 1, 2, 3, 4, 5, 6, 7];
724
725 let memory = Memory::from_slice(data);
726 assert_eq!(memory.size(), data.len());
727
728 {
729 let map = memory.map_readable().unwrap();
730 assert_eq!(map.as_slice(), &data);
731 }
732
733 let copy = memory.copy();
734 assert!(copy.parent().is_none());
735
736 {
737 let map1 = memory.map_readable().unwrap();
738 let map2 = copy.map_readable().unwrap();
739 assert_eq!(map1.as_slice(), map2.as_slice());
740 }
741
742 let share = memory.share(..);
743 assert_eq!(share.parent().unwrap().as_ptr(), memory.as_ptr());
744
745 {
746 let map1 = memory.map_readable().unwrap();
747 let map2 = share.map_readable().unwrap();
748 assert_eq!(map1.as_slice(), map2.as_slice());
749 }
750
751 let sub1 = memory.share(..2);
752 assert_eq!(sub1.size(), 2);
753 assert_eq!(sub1.parent().unwrap().as_ptr(), memory.as_ptr());
754
755 {
756 let map = sub1.map_readable().unwrap();
757 assert_eq!(map.as_slice(), &data[..2]);
758 }
759
760 let sub2 = memory.share(2..);
761 assert_eq!(sub2.size(), 6);
762 assert_eq!(sub2.parent().unwrap().as_ptr(), memory.as_ptr());
763
764 {
765 let map = sub2.map_readable().unwrap();
766 assert_eq!(map.as_slice(), &data[2..]);
767 }
768
769 let offset = sub1.is_span(&sub2).unwrap();
770 assert_eq!(offset, 0);
771
772 let sub3 = sub2.share(2..);
773 assert_eq!(sub3.size(), 4);
774 assert_eq!(sub3.parent().unwrap().as_ptr(), memory.as_ptr());
775
776 {
777 let map = sub3.map_readable().unwrap();
778 assert_eq!(map.as_slice(), &data[4..]);
779 }
780 }
781}