1use glib::{prelude::*, translate::*};
4
5use std::{alloc, any::TypeId, mem, ptr};
6
7use crate::{ffi, Memory};
8
9#[derive(Debug, Clone, PartialEq, Eq, thiserror::Error)]
12pub enum MemoryIntoInnerError {
13 #[error("Memory does not use the Rust allocator (uses {actual_allocator:?})")]
14 WrongAllocator { actual_allocator: Option<String> },
15 #[error("Memory is not writable")]
16 NotWritable,
17 #[error("Cannot extract wrapped value from sub-memory (shared memory)")]
18 SubMemory,
19 #[error("Memory does not wrap the requested type (expected {expected:?}, found {actual:?})")]
20 TypeMismatch { expected: TypeId, actual: TypeId },
21 #[error("Buffer must contain exactly one memory block")]
22 MultipleMemoryBlocks,
23}
24
25#[repr(C)]
26struct WrappedMemory<T> {
27 mem: ffi::GstMemory,
28
29 data: *mut u8,
31
32 layout: alloc::Layout,
34
35 wrap_offset: usize,
37 wrap_drop_in_place: Option<unsafe fn(*mut ())>,
39 wrap_type_id: TypeId,
41 wrap: T,
42}
43
44unsafe extern "C" fn alloc(
45 allocator: *mut ffi::GstAllocator,
46 size: usize,
47 params: *mut ffi::GstAllocationParams,
48) -> *mut ffi::GstMemory {
49 let params = &*params;
50
51 let Some(maxsize) = size
52 .checked_add(params.prefix)
53 .and_then(|s| s.checked_add(params.padding))
54 else {
55 return ptr::null_mut();
56 };
57
58 let align = params.align | crate::Memory::default_alignment();
59
60 let layout_base = alloc::Layout::new::<WrappedMemory<()>>();
61
62 let layout_data = match alloc::Layout::from_size_align(maxsize, align + 1) {
63 Ok(res) => res,
64 Err(err) => {
65 crate::warning!(
66 crate::CAT_RUST,
67 "Invalid size {maxsize} or alignment {align}: {err}"
68 );
69 return ptr::null_mut();
70 }
71 };
72 let (layout, data_offset) = match layout_base.extend(layout_data) {
73 Ok(res) => res,
74 Err(err) => {
75 crate::warning!(
76 crate::CAT_RUST,
77 "Can't extend base memory layout to {maxsize} or alignment {align}: {err}"
78 );
79 return ptr::null_mut();
80 }
81 };
82 let layout = layout.pad_to_align();
83
84 let mem = alloc::alloc(layout);
85 let data = mem.add(data_offset);
86
87 if params.prefix > 0 && (params.flags & ffi::GST_MEMORY_FLAG_ZERO_PREFIXED) != 0 {
88 ptr::write_bytes(data, 0, params.prefix);
89 }
90
91 if (params.flags & ffi::GST_MEMORY_FLAG_ZERO_PADDED) != 0 {
92 ptr::write_bytes(data.add(params.prefix).add(size), 0, params.padding);
93 }
94
95 let mem = mem as *mut WrappedMemory<()>;
96 ffi::gst_memory_init(
97 ptr::addr_of_mut!((*mem).mem),
98 params.flags,
99 allocator,
100 ptr::null_mut(),
101 maxsize,
102 params.align,
103 params.prefix,
104 size,
105 );
106 ptr::write(ptr::addr_of_mut!((*mem).data), data);
107 ptr::write(ptr::addr_of_mut!((*mem).layout), layout);
108 ptr::write(ptr::addr_of_mut!((*mem).wrap_type_id), TypeId::of::<()>());
109 ptr::write(ptr::addr_of_mut!((*mem).wrap_offset), 0);
110 ptr::write(ptr::addr_of_mut!((*mem).wrap_drop_in_place), None);
111
112 mem as *mut ffi::GstMemory
113}
114
115unsafe extern "C" fn free(_allocator: *mut ffi::GstAllocator, mem: *mut ffi::GstMemory) {
116 debug_assert_eq!((*mem).mini_object.refcount, 0);
117
118 let mem = mem as *mut WrappedMemory<()>;
119
120 if let Some(wrap_drop_in_place) = (*mem).wrap_drop_in_place {
121 let wrap = (mem as *mut u8).add((*mem).wrap_offset) as *mut ();
122 wrap_drop_in_place(wrap);
123 }
124
125 alloc::dealloc(mem as *mut u8, (*mem).layout);
126}
127
128unsafe extern "C" fn mem_map(
129 mem: *mut ffi::GstMemory,
130 _maxsize: usize,
131 _flags: ffi::GstMapFlags,
132) -> glib::ffi::gpointer {
133 let mem = mem as *mut WrappedMemory<()>;
134
135 (*mem).data as glib::ffi::gpointer
137}
138
139unsafe extern "C" fn mem_unmap(_mem: *mut ffi::GstMemory) {}
140
141unsafe extern "C" fn mem_share(
142 mem: *mut ffi::GstMemory,
143 offset: isize,
144 size: isize,
145) -> *mut ffi::GstMemory {
146 let mem = mem as *mut WrappedMemory<()>;
147
148 let parent = if (*mem).mem.parent.is_null() {
151 mem
152 } else {
153 (*mem).mem.parent as *mut WrappedMemory<()>
154 };
155
156 let offset = offset as usize;
159 let mut size = size as usize;
160
161 let new_offset = (*mem).mem.offset.wrapping_add(offset);
162 debug_assert!(new_offset < (*mem).mem.maxsize);
163
164 if size == usize::MAX {
165 size = (*mem).mem.size.wrapping_sub(offset);
166 }
167 debug_assert!(new_offset <= usize::MAX - size);
168 debug_assert!(new_offset + size <= (*mem).mem.maxsize);
169
170 let layout = alloc::Layout::new::<WrappedMemory<()>>();
171 let sub = alloc::alloc(layout) as *mut WrappedMemory<()>;
172
173 ffi::gst_memory_init(
174 sub as *mut ffi::GstMemory,
175 (*mem).mem.mini_object.flags | ffi::GST_MINI_OBJECT_FLAG_LOCK_READONLY,
176 (*mem).mem.allocator,
177 parent as *mut ffi::GstMemory,
178 (*mem).mem.maxsize,
179 (*mem).mem.align,
180 new_offset,
181 size,
182 );
183 ptr::write(ptr::addr_of_mut!((*sub).data), (*mem).data);
184 ptr::write(ptr::addr_of_mut!((*sub).layout), layout);
185 ptr::write(ptr::addr_of_mut!((*sub).wrap_offset), 0);
186 ptr::write(ptr::addr_of_mut!((*sub).wrap_drop_in_place), None);
187
188 sub as *mut ffi::GstMemory
189}
190
191unsafe extern "C" fn mem_is_span(
192 mem1: *mut ffi::GstMemory,
193 mem2: *mut ffi::GstMemory,
194 offset: *mut usize,
195) -> glib::ffi::gboolean {
196 let mem1 = mem1 as *mut WrappedMemory<()>;
197 let mem2 = mem2 as *mut WrappedMemory<()>;
198
199 let parent1 = (*mem1).mem.parent as *mut WrappedMemory<()>;
201 let parent2 = (*mem2).mem.parent as *mut WrappedMemory<()>;
202 debug_assert_eq!(parent1, parent2);
203
204 if !offset.is_null() {
206 *offset = (*mem1).mem.offset.wrapping_sub((*parent1).mem.offset);
211 }
212
213 let is_span = ((*mem1).mem.offset + ((*mem1).mem.size)) == (*mem2).mem.offset;
215
216 is_span.into_glib()
217}
218
219unsafe extern "C" fn class_init(class: glib::ffi::gpointer, _class_data: glib::ffi::gpointer) {
220 let class = class as *mut ffi::GstAllocatorClass;
221
222 (*class).alloc = Some(alloc);
223 (*class).free = Some(free);
224}
225
226unsafe extern "C" fn instance_init(
227 obj: *mut glib::gobject_ffi::GTypeInstance,
228 _class: glib::ffi::gpointer,
229) {
230 static ALLOCATOR_TYPE: &[u8] = b"RustGlobalAllocatorMemory\0";
231
232 let allocator = obj as *mut ffi::GstAllocator;
233
234 (*allocator).mem_type = ALLOCATOR_TYPE.as_ptr() as *const _;
235 (*allocator).mem_map = Some(mem_map);
236 (*allocator).mem_unmap = Some(mem_unmap);
237 (*allocator).mem_share = Some(mem_share);
239 (*allocator).mem_is_span = Some(mem_is_span);
240
241 (*allocator).object.flags |= ffi::GST_OBJECT_FLAG_MAY_BE_LEAKED;
242}
243
244pub fn rust_allocator() -> &'static crate::Allocator {
245 assert_initialized_main_thread!();
246
247 rust_allocator_internal()
248}
249
250fn rust_allocator_internal() -> &'static crate::Allocator {
251 static RUST_ALLOCATOR: std::sync::OnceLock<crate::Allocator> = std::sync::OnceLock::new();
252
253 RUST_ALLOCATOR.get_or_init(|| unsafe {
254 struct TypeInfoWrap(glib::gobject_ffi::GTypeInfo);
255 unsafe impl Send for TypeInfoWrap {}
256 unsafe impl Sync for TypeInfoWrap {}
257
258 static TYPE_INFO: TypeInfoWrap = TypeInfoWrap(glib::gobject_ffi::GTypeInfo {
259 class_size: mem::size_of::<ffi::GstAllocatorClass>() as u16,
260 base_init: None,
261 base_finalize: None,
262 class_init: Some(class_init),
263 class_finalize: None,
264 class_data: ptr::null_mut(),
265 instance_size: mem::size_of::<ffi::GstAllocator>() as u16,
266 n_preallocs: 0,
267 instance_init: Some(instance_init),
268 value_table: ptr::null(),
269 });
270
271 let type_name = {
272 let mut idx = 0;
273
274 loop {
275 let type_name = glib::gformat!("GstRsAllocator-{}", idx);
276 if glib::gobject_ffi::g_type_from_name(type_name.as_ptr())
277 == glib::gobject_ffi::G_TYPE_INVALID
278 {
279 break type_name;
280 }
281 idx += 1;
282 }
283 };
284
285 let t = glib::gobject_ffi::g_type_register_static(
286 crate::Allocator::static_type().into_glib(),
287 type_name.as_ptr(),
288 &TYPE_INFO.0,
289 0,
290 );
291
292 assert!(t != glib::gobject_ffi::G_TYPE_INVALID);
293
294 from_glib_none(
295 glib::gobject_ffi::g_object_newv(t, 0, ptr::null_mut()) as *mut ffi::GstAllocator
296 )
297 })
298}
299
300#[inline]
303pub(crate) unsafe fn try_into_from_memory_ptr<T: 'static>(
304 mem_ptr: *mut ffi::GstMemory,
305) -> Result<T, MemoryIntoInnerError> {
306 skip_assert_initialized!();
307
308 if (*mem_ptr).allocator.is_null() || (*mem_ptr).allocator != rust_allocator_internal().as_ptr()
310 {
311 let actual_allocator = if (*mem_ptr).allocator.is_null() {
312 None
313 } else {
314 Some(
315 std::ffi::CStr::from_ptr(glib::gobject_ffi::g_type_name_from_instance(
316 (*mem_ptr).allocator as *mut glib::gobject_ffi::GTypeInstance,
317 ))
318 .to_string_lossy()
319 .to_string(),
320 )
321 };
322 return Err(MemoryIntoInnerError::WrongAllocator { actual_allocator });
323 }
324
325 if ffi::gst_mini_object_is_writable(mem_ptr as *mut ffi::GstMiniObject) == glib::ffi::GFALSE {
326 return Err(MemoryIntoInnerError::NotWritable);
327 }
328
329 if !(*mem_ptr).parent.is_null() {
331 return Err(MemoryIntoInnerError::SubMemory);
332 }
333
334 let mem_wrapper = &*(mem_ptr as *mut WrappedMemory<T>);
339
340 if mem_wrapper.wrap_type_id != TypeId::of::<T>() {
343 return Err(MemoryIntoInnerError::TypeMismatch {
344 expected: std::any::TypeId::of::<T>(),
345 actual: mem_wrapper.wrap_type_id,
346 });
347 }
348
349 let mem_wrapper_mut = &mut *(mem_ptr as *mut WrappedMemory<T>);
351 let value = ptr::read(&mem_wrapper_mut.wrap);
352
353 mem_wrapper_mut.wrap_drop_in_place = None;
355
356 Ok(value)
357}
358
359impl Memory {
360 #[doc(alias = "gst_memory_new_wrapped")]
361 #[doc(alias = "gst_memory_new_wrapped_full")]
362 #[inline]
363 pub fn from_slice<T: AsRef<[u8]> + Send + 'static>(slice: T) -> Self {
364 assert_initialized_main_thread!();
365
366 let len = slice.as_ref().len();
367 unsafe {
368 let layout = alloc::Layout::new::<WrappedMemory<T>>();
369 let mem = alloc::alloc(layout) as *mut WrappedMemory<T>;
370
371 ffi::gst_memory_init(
372 mem as *mut ffi::GstMemory,
373 ffi::GST_MINI_OBJECT_FLAG_LOCK_READONLY,
374 rust_allocator_internal().to_glib_none().0,
375 ptr::null_mut(),
376 len,
377 0,
378 0,
379 len,
380 );
381
382 ptr::write(ptr::addr_of_mut!((*mem).wrap), slice);
383
384 assert_eq!(len, (*mem).wrap.as_ref().len());
385 let data = (*mem).wrap.as_ref().as_ptr();
386 ptr::write(ptr::addr_of_mut!((*mem).data), mut_override(data));
387
388 ptr::write(ptr::addr_of_mut!((*mem).layout), layout);
389
390 let wrap_offset = ptr::addr_of!((*mem).wrap) as usize - mem as usize;
391 ptr::write(ptr::addr_of_mut!((*mem).wrap_offset), wrap_offset);
392
393 ptr::write(
394 ptr::addr_of_mut!((*mem).wrap_drop_in_place),
395 if mem::needs_drop::<T>() {
396 Some(|ptr| ptr::drop_in_place::<T>(ptr as *mut T))
397 } else {
398 None
399 },
400 );
401
402 ptr::write(ptr::addr_of_mut!((*mem).wrap_type_id), TypeId::of::<T>());
403
404 from_glib_full(mem as *mut ffi::GstMemory)
405 }
406 }
407
408 #[doc(alias = "gst_memory_new_wrapped")]
409 #[doc(alias = "gst_memory_new_wrapped_full")]
410 #[inline]
411 pub fn from_mut_slice<T: AsMut<[u8]> + Send + 'static>(mut slice: T) -> Self {
412 assert_initialized_main_thread!();
413
414 let len = slice.as_mut().len();
415 unsafe {
416 let layout = alloc::Layout::new::<WrappedMemory<T>>();
417 let mem = alloc::alloc(layout) as *mut WrappedMemory<T>;
418
419 ffi::gst_memory_init(
420 mem as *mut ffi::GstMemory,
421 0,
422 rust_allocator_internal().to_glib_none().0,
423 ptr::null_mut(),
424 len,
425 0,
426 0,
427 len,
428 );
429
430 ptr::write(ptr::addr_of_mut!((*mem).wrap), slice);
431
432 assert_eq!(len, (*mem).wrap.as_mut().len());
433 let data = (*mem).wrap.as_mut().as_mut_ptr();
434 ptr::write(ptr::addr_of_mut!((*mem).data), data);
435
436 ptr::write(ptr::addr_of_mut!((*mem).layout), layout);
437
438 let wrap_offset = ptr::addr_of!((*mem).wrap) as usize - mem as usize;
439 ptr::write(ptr::addr_of_mut!((*mem).wrap_offset), wrap_offset);
440
441 ptr::write(
442 ptr::addr_of_mut!((*mem).wrap_drop_in_place),
443 if mem::needs_drop::<T>() {
444 Some(|ptr| ptr::drop_in_place::<T>(ptr as *mut T))
445 } else {
446 None
447 },
448 );
449
450 ptr::write(ptr::addr_of_mut!((*mem).wrap_type_id), TypeId::of::<T>());
451
452 from_glib_full(mem as *mut ffi::GstMemory)
453 }
454 }
455
456 #[inline]
482 pub fn try_into_inner<T: 'static>(self) -> Result<T, (Self, MemoryIntoInnerError)> {
483 unsafe {
485 let mem_ptr = self.as_mut_ptr();
486
487 match try_into_from_memory_ptr(mem_ptr) {
489 Ok(value) => {
490 drop(self);
493 Ok(value)
494 }
495 Err(err) => {
496 Err((self, err))
498 }
499 }
500 }
501 }
502}
503
504#[cfg(test)]
505mod tests {
506 use super::*;
507
508 #[test]
509 fn test_alloc() {
510 use crate::prelude::AllocatorExt;
511
512 crate::init().unwrap();
513
514 let data = [0, 1, 2, 3, 4, 5, 6, 7];
515
516 let mut mem = rust_allocator().alloc(data.len(), None).unwrap();
517 assert_eq!(mem.size(), data.len());
518 assert_eq!(mem.allocator().unwrap(), rust_allocator());
519
520 {
521 let mem = mem.get_mut().unwrap();
522 let mut map = mem.map_writable().unwrap();
523 assert_eq!(
524 map.as_ptr() as usize & crate::Memory::default_alignment(),
525 0
526 );
527 map.copy_from_slice(&data);
528 }
529
530 let copy = mem.copy();
531 assert_eq!(copy.size(), data.len());
532 assert_eq!(copy.allocator().unwrap(), rust_allocator());
533
534 {
535 let map = copy.map_readable().unwrap();
536 assert_eq!(
537 copy.as_ptr() as usize & crate::Memory::default_alignment(),
538 0
539 );
540 assert_eq!(map.as_slice(), &data);
541 }
542
543 let mut mem = rust_allocator()
544 .alloc(
545 data.len(),
546 Some(&crate::AllocationParams::new(
547 crate::MemoryFlags::empty(),
548 1023,
549 0,
550 0,
551 )),
552 )
553 .unwrap();
554 assert_eq!(mem.size(), data.len());
555 assert_eq!(mem.maxsize(), data.len());
556 assert_eq!(mem.allocator().unwrap(), rust_allocator());
557
558 {
559 let mem = mem.get_mut().unwrap();
560 let mut map = mem.map_writable().unwrap();
561 assert_eq!(map.as_ptr() as usize & 1023, 0);
562 map.copy_from_slice(&data);
563 }
564
565 let copy = mem.copy();
566 assert_eq!(copy.size(), data.len());
567 assert_eq!(copy.allocator().unwrap(), rust_allocator());
568
569 {
570 let map = copy.map_readable().unwrap();
571 assert_eq!(map.as_slice(), &data);
572 }
573
574 let share = mem.share(2..4);
575 assert_eq!(share.size(), 2);
576 assert_eq!(share.allocator().unwrap(), rust_allocator());
577 {
578 let map = share.map_readable().unwrap();
579 assert_eq!(map.as_slice(), &data[2..4]);
580 }
581
582 let mut mem = rust_allocator()
583 .alloc(
584 data.len(),
585 Some(&crate::AllocationParams::new(
586 crate::MemoryFlags::ZERO_PADDED | crate::MemoryFlags::ZERO_PREFIXED,
587 1023,
588 32,
589 32,
590 )),
591 )
592 .unwrap();
593 assert_eq!(mem.size(), data.len());
594 assert_eq!(mem.maxsize(), data.len() + 32 + 32);
595 assert_eq!(mem.allocator().unwrap(), rust_allocator());
596
597 {
598 let mem = mem.get_mut().unwrap();
599 let mut map = mem.map_writable().unwrap();
600 assert_eq!((map.as_ptr() as usize - 32) & 1023, 0);
601 map.copy_from_slice(&data);
602 }
603
604 let copy = mem.copy();
605 assert_eq!(copy.size(), data.len());
606 assert_eq!(copy.allocator().unwrap(), rust_allocator());
607
608 {
609 let map = copy.map_readable().unwrap();
610 assert_eq!(map.as_slice(), &data);
611 }
612
613 let share = mem.share(2..4);
614 assert_eq!(share.size(), 2);
615 assert_eq!(share.allocator().unwrap(), rust_allocator());
616 {
617 let map = share.map_readable().unwrap();
618 assert_eq!(map.as_slice(), &data[2..4]);
619 }
620
621 let share = mem.share_maxsize(0..(data.len() + 32 + 32));
622 assert_eq!(share.size(), data.len() + 32 + 32);
623 assert_eq!(share.allocator().unwrap(), rust_allocator());
624 {
625 let map = share.map_readable().unwrap();
626 let padding = [0; 32];
627 assert_eq!(&map.as_slice()[..32], &padding);
628 assert_eq!(&map.as_slice()[32..][..data.len()], &data);
629 assert_eq!(&map.as_slice()[(32 + data.len())..], &padding);
630 }
631 }
632
633 #[test]
634 fn test_wrap_vec_u8() {
635 crate::init().unwrap();
636
637 let data = vec![1u8, 2, 3, 4, 5];
638 let expected = data.clone();
639
640 let mem = Memory::from_slice(data);
641 assert_eq!(mem.size(), 5);
642 {
643 let map = mem.map_readable().unwrap();
644 assert_eq!(&expected, map.as_slice());
645 }
646 }
647
648 #[test]
649 fn test_wrap_array_u8() {
650 crate::init().unwrap();
651
652 let data: [u8; 5] = [1u8, 2, 3, 4, 5];
653 let expected = data;
654
655 let mem = Memory::from_slice(data);
656 assert_eq!(mem.size(), 5);
657 assert_eq!(mem.size(), 5);
658 {
659 let map = mem.map_readable().unwrap();
660 assert_eq!(&expected, map.as_slice());
661 }
662 }
663
664 #[test]
665 fn test_wrap_vec_u8_and_back() {
666 crate::init().unwrap();
667
668 let data = vec![1u8, 2, 3, 4, 5];
669 let expected = data.clone();
670
671 let mem = Memory::from_slice(data);
672 assert_eq!(mem.size(), 5);
673 {
674 let map = mem.map_readable().unwrap();
675 assert_eq!(&expected, map.as_slice());
676 }
677
678 let extracted: Vec<u8> = mem.try_into_inner().unwrap();
679 assert_eq!(extracted, expected);
680 }
681
682 #[test]
683 fn test_wrap_array_u8_and_back() {
684 crate::init().unwrap();
685
686 let data: [u8; 5] = [1u8, 2, 3, 4, 5];
687 let expected = data;
688
689 let mem = Memory::from_slice(data);
690 assert_eq!(mem.size(), 5);
691 assert_eq!(mem.size(), 5);
692 {
693 let map = mem.map_readable().unwrap();
694 assert_eq!(&expected, map.as_slice());
695 }
696
697 let extracted: [u8; 5] = mem.try_into_inner().unwrap();
698 assert_eq!(extracted, expected);
699 }
700
701 #[test]
702 fn test_wrap_array_u8_mem_ops() {
703 crate::init().unwrap();
704
705 let data = [0, 1, 2, 3, 4, 5, 6, 7];
706
707 let memory = Memory::from_slice(data);
708 assert_eq!(memory.size(), data.len());
709
710 {
711 let map = memory.map_readable().unwrap();
712 assert_eq!(map.as_slice(), &data);
713 }
714
715 let copy = memory.copy();
716 assert!(copy.parent().is_none());
717
718 {
719 let map1 = memory.map_readable().unwrap();
720 let map2 = copy.map_readable().unwrap();
721 assert_eq!(map1.as_slice(), map2.as_slice());
722 }
723
724 let share = memory.share(..);
725 assert_eq!(share.parent().unwrap().as_ptr(), memory.as_ptr());
726
727 {
728 let map1 = memory.map_readable().unwrap();
729 let map2 = share.map_readable().unwrap();
730 assert_eq!(map1.as_slice(), map2.as_slice());
731 }
732
733 let sub1 = memory.share(..2);
734 assert_eq!(sub1.size(), 2);
735 assert_eq!(sub1.parent().unwrap().as_ptr(), memory.as_ptr());
736
737 {
738 let map = sub1.map_readable().unwrap();
739 assert_eq!(map.as_slice(), &data[..2]);
740 }
741
742 let sub2 = memory.share(2..);
743 assert_eq!(sub2.size(), 6);
744 assert_eq!(sub2.parent().unwrap().as_ptr(), memory.as_ptr());
745
746 {
747 let map = sub2.map_readable().unwrap();
748 assert_eq!(map.as_slice(), &data[2..]);
749 }
750
751 let offset = sub1.is_span(&sub2).unwrap();
752 assert_eq!(offset, 0);
753
754 let sub3 = sub2.share(2..);
755 assert_eq!(sub3.size(), 4);
756 assert_eq!(sub3.parent().unwrap().as_ptr(), memory.as_ptr());
757
758 {
759 let map = sub3.map_readable().unwrap();
760 assert_eq!(map.as_slice(), &data[4..]);
761 }
762 }
763}