1use std::{
4 fmt,
5 marker::PhantomData,
6 mem,
7 ops::{Bound, Deref, DerefMut, RangeBounds},
8 ptr, slice,
9};
10
11use glib::{prelude::*, translate::*};
12
13use crate::{ffi, AllocationParams, Allocator, MemoryFlags};
14
15mini_object_wrapper!(Memory, MemoryRef, ffi::GstMemory, || {
16 ffi::gst_memory_get_type()
17});
18
19pub struct MemoryMap<'a, T> {
20 map_info: ffi::GstMapInfo,
21 phantom: PhantomData<(&'a MemoryRef, T)>,
22}
23
24pub struct MappedMemory<T> {
25 map_info: ffi::GstMapInfo,
26 phantom: PhantomData<(Memory, T)>,
27}
28
29impl fmt::Debug for Memory {
30 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
31 MemoryRef::fmt(self, f)
32 }
33}
34
35impl fmt::Debug for MemoryRef {
36 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
37 f.debug_struct("Memory")
38 .field("ptr", &self.as_ptr())
39 .field("allocator", &self.allocator())
40 .field("parent", &self.parent())
41 .field("maxsize", &self.maxsize())
42 .field("align", &self.align())
43 .field("offset", &self.offset())
44 .field("size", &self.size())
45 .field("flags", &self.flags())
46 .finish()
47 }
48}
49
50pub enum Readable {}
51pub enum Writable {}
52
53impl Memory {
54 #[inline]
55 pub fn default_alignment() -> usize {
56 #[cfg(not(windows))]
57 {
58 extern "C" {
59 static gst_memory_alignment: usize;
60 }
61 unsafe { gst_memory_alignment }
62 }
63 #[cfg(windows)]
64 {
65 7
70 }
71 }
72
73 #[inline]
74 pub fn with_size(size: usize) -> Self {
75 assert_initialized_main_thread!();
76 unsafe {
77 from_glib_full(ffi::gst_allocator_alloc(
78 ptr::null_mut(),
79 size,
80 ptr::null_mut(),
81 ))
82 }
83 }
84
85 #[inline]
86 pub fn with_size_and_params(size: usize, params: &AllocationParams) -> Self {
87 assert_initialized_main_thread!();
88 unsafe {
89 from_glib_full(ffi::gst_allocator_alloc(
90 ptr::null_mut(),
91 size,
92 params.as_ptr() as *mut _,
93 ))
94 }
95 }
96
97 #[inline]
119 pub fn into_mapped_memory_readable(self) -> Result<MappedMemory<Readable>, Self> {
120 unsafe {
121 let s = mem::ManuallyDrop::new(self);
122 let mut map_info = mem::MaybeUninit::uninit();
123 let res: bool = from_glib(ffi::gst_memory_map(
124 s.as_mut_ptr(),
125 map_info.as_mut_ptr(),
126 ffi::GST_MAP_READ,
127 ));
128 if res {
129 Ok(MappedMemory {
130 map_info: map_info.assume_init(),
131 phantom: PhantomData,
132 })
133 } else {
134 Err(mem::ManuallyDrop::into_inner(s))
135 }
136 }
137 }
138
139 #[inline]
140 pub fn into_mapped_memory_writable(self) -> Result<MappedMemory<Writable>, Self> {
141 unsafe {
142 let s = mem::ManuallyDrop::new(self);
143 let mut map_info = mem::MaybeUninit::uninit();
144 let res: bool = from_glib(ffi::gst_memory_map(
145 s.as_mut_ptr(),
146 map_info.as_mut_ptr(),
147 ffi::GST_MAP_READWRITE,
148 ));
149 if res {
150 Ok(MappedMemory {
151 map_info: map_info.assume_init(),
152 phantom: PhantomData,
153 })
154 } else {
155 Err(mem::ManuallyDrop::into_inner(s))
156 }
157 }
158 }
159}
160
161impl MemoryRef {
162 #[doc(alias = "get_allocator")]
163 #[inline]
164 pub fn allocator(&self) -> Option<&Allocator> {
165 unsafe {
166 if self.0.allocator.is_null() {
167 None
168 } else {
169 Some(Allocator::from_glib_ptr_borrow(&self.0.allocator))
170 }
171 }
172 }
173
174 #[doc(alias = "get_parent")]
175 #[inline]
176 pub fn parent(&self) -> Option<&MemoryRef> {
177 unsafe {
178 if self.0.parent.is_null() {
179 None
180 } else {
181 Some(MemoryRef::from_ptr(self.0.parent))
182 }
183 }
184 }
185
186 #[doc(alias = "get_maxsize")]
187 #[inline]
188 pub fn maxsize(&self) -> usize {
189 self.0.maxsize
190 }
191
192 #[doc(alias = "get_align")]
193 #[inline]
194 pub fn align(&self) -> usize {
195 self.0.align
196 }
197
198 #[doc(alias = "get_offset")]
199 #[inline]
200 pub fn offset(&self) -> usize {
201 self.0.offset
202 }
203
204 #[doc(alias = "get_size")]
205 #[inline]
206 pub fn size(&self) -> usize {
207 self.0.size
208 }
209
210 #[doc(alias = "get_flags")]
211 #[inline]
212 pub fn flags(&self) -> MemoryFlags {
213 unsafe { from_glib(self.0.mini_object.flags) }
214 }
215
216 fn calculate_offset_size(&self, range: impl RangeBounds<usize>) -> (isize, isize) {
217 let size = self.size();
218
219 let start_offset = match range.start_bound() {
220 Bound::Included(v) => *v,
221 Bound::Excluded(v) => v.checked_add(1).expect("Invalid start offset"),
222 Bound::Unbounded => 0,
223 };
224 assert!(start_offset < size, "Start offset after valid range");
225
226 let end_offset = match range.end_bound() {
227 Bound::Included(v) => v.checked_add(1).expect("Invalid end offset"),
228 Bound::Excluded(v) => *v,
229 Bound::Unbounded => size,
230 };
231 assert!(end_offset <= size, "End offset after valid range");
232
233 let new_offset = start_offset as isize;
238 let new_size = end_offset.saturating_sub(start_offset) as isize;
239
240 (new_offset, new_size)
241 }
242
243 fn calculate_offset_size_maxsize(&self, range: impl RangeBounds<usize>) -> (isize, isize) {
244 let maxsize = self.maxsize();
245
246 let start_offset = match range.start_bound() {
247 Bound::Included(v) => *v,
248 Bound::Excluded(v) => v.checked_add(1).expect("Invalid start offset"),
249 Bound::Unbounded => 0,
250 };
251 assert!(start_offset < maxsize, "Start offset after valid range");
252
253 let end_offset = match range.end_bound() {
254 Bound::Included(v) => v.checked_add(1).expect("Invalid end offset"),
255 Bound::Excluded(v) => *v,
256 Bound::Unbounded => maxsize,
257 };
258 assert!(end_offset <= maxsize, "End offset after valid range");
259
260 let offset = self.offset();
265
266 let new_offset = start_offset.wrapping_sub(offset) as isize;
267 let new_size = end_offset.saturating_sub(start_offset) as isize;
268
269 (new_offset, new_size)
270 }
271
272 #[doc(alias = "gst_memory_copy")]
273 pub fn copy_range(&self, range: impl RangeBounds<usize>) -> Memory {
274 let (offset, size) = self.calculate_offset_size(range);
275 unsafe { from_glib_full(ffi::gst_memory_copy(self.as_mut_ptr(), offset, size)) }
276 }
277
278 #[doc(alias = "gst_memory_copy")]
279 pub fn copy_range_maxsize(&self, range: impl RangeBounds<usize>) -> Memory {
280 let (offset, size) = self.calculate_offset_size_maxsize(range);
281 unsafe { from_glib_full(ffi::gst_memory_copy(self.as_mut_ptr(), offset, size)) }
282 }
283
284 #[doc(alias = "gst_memory_is_span")]
285 pub fn is_span(&self, mem2: &MemoryRef) -> Option<usize> {
286 unsafe {
287 let mut offset = mem::MaybeUninit::uninit();
288 let res = from_glib(ffi::gst_memory_is_span(
289 self.as_mut_ptr(),
290 mem2.as_mut_ptr(),
291 offset.as_mut_ptr(),
292 ));
293 if res {
294 Some(offset.assume_init())
295 } else {
296 None
297 }
298 }
299 }
300
301 #[doc(alias = "gst_memory_is_type")]
302 pub fn is_type(&self, mem_type: &str) -> bool {
303 unsafe {
304 from_glib(ffi::gst_memory_is_type(
305 self.as_mut_ptr(),
306 mem_type.to_glib_none().0,
307 ))
308 }
309 }
310
311 #[inline]
312 pub fn map_readable(&self) -> Result<MemoryMap<'_, Readable>, glib::BoolError> {
313 unsafe {
314 let mut map_info = mem::MaybeUninit::uninit();
315 let res =
316 ffi::gst_memory_map(self.as_mut_ptr(), map_info.as_mut_ptr(), ffi::GST_MAP_READ);
317 if res == glib::ffi::GTRUE {
318 Ok(MemoryMap {
319 map_info: map_info.assume_init(),
320 phantom: PhantomData,
321 })
322 } else {
323 Err(glib::bool_error!("Failed to map memory readable"))
324 }
325 }
326 }
327
328 #[inline]
329 pub fn map_writable(&mut self) -> Result<MemoryMap<'_, Writable>, glib::BoolError> {
330 unsafe {
331 let mut map_info = mem::MaybeUninit::uninit();
332 let res = ffi::gst_memory_map(
333 self.as_mut_ptr(),
334 map_info.as_mut_ptr(),
335 ffi::GST_MAP_READWRITE,
336 );
337 if res == glib::ffi::GTRUE {
338 Ok(MemoryMap {
339 map_info: map_info.assume_init(),
340 phantom: PhantomData,
341 })
342 } else {
343 Err(glib::bool_error!("Failed to map memory writable"))
344 }
345 }
346 }
347
348 #[doc(alias = "gst_memory_share")]
349 pub fn share(&self, range: impl RangeBounds<usize>) -> Memory {
350 let (offset, size) = self.calculate_offset_size(range);
351 unsafe { from_glib_full(ffi::gst_memory_share(self.as_ptr() as *mut _, offset, size)) }
352 }
353
354 #[doc(alias = "gst_memory_share")]
355 pub fn share_maxsize(&self, range: impl RangeBounds<usize>) -> Memory {
356 let (offset, size) = self.calculate_offset_size_maxsize(range);
357 unsafe { from_glib_full(ffi::gst_memory_share(self.as_ptr() as *mut _, offset, size)) }
358 }
359
360 #[doc(alias = "gst_memory_resize")]
361 pub fn resize(&mut self, range: impl RangeBounds<usize>) {
362 let (offset, size) = self.calculate_offset_size(range);
363 unsafe { ffi::gst_memory_resize(self.as_mut_ptr(), offset, size as usize) }
364 }
365
366 #[doc(alias = "gst_memory_resize")]
367 pub fn resize_maxsize(&mut self, range: impl RangeBounds<usize>) {
368 let (offset, size) = self.calculate_offset_size_maxsize(range);
369 unsafe { ffi::gst_memory_resize(self.as_mut_ptr(), offset, size as usize) }
370 }
371
372 #[doc(alias = "gst_util_dump_mem")]
373 pub fn dump(&self) -> Dump<'_> {
374 Dump {
375 memory: self,
376 start: Bound::Unbounded,
377 end: Bound::Unbounded,
378 }
379 }
380
381 #[doc(alias = "gst_util_dump_mem")]
382 pub fn dump_range(&self, range: impl RangeBounds<usize>) -> Dump<'_> {
383 Dump {
384 memory: self,
385 start: range.start_bound().cloned(),
386 end: range.end_bound().cloned(),
387 }
388 }
389}
390
391impl<T> MemoryMap<'_, T> {
392 #[doc(alias = "get_size")]
393 #[inline]
394 pub fn size(&self) -> usize {
395 self.map_info.size
396 }
397
398 #[doc(alias = "get_memory")]
399 #[inline]
400 pub fn memory(&self) -> &MemoryRef {
401 unsafe { MemoryRef::from_ptr(self.map_info.memory) }
402 }
403
404 #[inline]
405 pub fn as_slice(&self) -> &[u8] {
406 if self.map_info.size == 0 {
407 return &[];
408 }
409 unsafe { slice::from_raw_parts(self.map_info.data, self.map_info.size) }
410 }
411}
412
413impl MemoryMap<'_, Writable> {
414 #[inline]
415 pub fn as_mut_slice(&mut self) -> &mut [u8] {
416 if self.map_info.size == 0 {
417 return &mut [];
418 }
419 unsafe { slice::from_raw_parts_mut(self.map_info.data, self.map_info.size) }
420 }
421}
422
423impl<T> AsRef<[u8]> for MemoryMap<'_, T> {
424 #[inline]
425 fn as_ref(&self) -> &[u8] {
426 self.as_slice()
427 }
428}
429
430impl AsMut<[u8]> for MemoryMap<'_, Writable> {
431 #[inline]
432 fn as_mut(&mut self) -> &mut [u8] {
433 self.as_mut_slice()
434 }
435}
436
437impl<T> Deref for MemoryMap<'_, T> {
438 type Target = [u8];
439
440 #[inline]
441 fn deref(&self) -> &[u8] {
442 self.as_slice()
443 }
444}
445
446impl DerefMut for MemoryMap<'_, Writable> {
447 #[inline]
448 fn deref_mut(&mut self) -> &mut [u8] {
449 self.as_mut_slice()
450 }
451}
452
453impl<T> fmt::Debug for MemoryMap<'_, T> {
454 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
455 f.debug_tuple("MemoryMap").field(&self.memory()).finish()
456 }
457}
458
459impl<'a, T> PartialEq for MemoryMap<'a, T> {
460 fn eq(&self, other: &MemoryMap<'a, T>) -> bool {
461 self.as_slice().eq(other.as_slice())
462 }
463}
464
465impl<T> Eq for MemoryMap<'_, T> {}
466
467impl<T> Drop for MemoryMap<'_, T> {
468 #[inline]
469 fn drop(&mut self) {
470 unsafe {
471 ffi::gst_memory_unmap(self.map_info.memory, &mut self.map_info);
472 }
473 }
474}
475
476unsafe impl<T> Send for MemoryMap<'_, T> {}
477unsafe impl<T> Sync for MemoryMap<'_, T> {}
478
479impl<T> MappedMemory<T> {
480 #[inline]
481 pub fn as_slice(&self) -> &[u8] {
482 if self.map_info.size == 0 {
483 return &[];
484 }
485 unsafe { slice::from_raw_parts(self.map_info.data, self.map_info.size) }
486 }
487
488 #[doc(alias = "get_size")]
489 #[inline]
490 pub fn size(&self) -> usize {
491 self.map_info.size
492 }
493
494 #[doc(alias = "get_memory")]
495 #[inline]
496 pub fn memory(&self) -> &MemoryRef {
497 unsafe { MemoryRef::from_ptr(self.map_info.memory) }
498 }
499
500 #[inline]
501 pub fn into_memory(self) -> Memory {
502 let mut s = mem::ManuallyDrop::new(self);
503 let memory = unsafe { from_glib_full(s.map_info.memory) };
504 unsafe {
505 ffi::gst_memory_unmap(s.map_info.memory, &mut s.map_info);
506 }
507
508 memory
509 }
510}
511
512impl MappedMemory<Writable> {
513 #[inline]
514 pub fn as_mut_slice(&mut self) -> &mut [u8] {
515 if self.map_info.size == 0 {
516 return &mut [];
517 }
518 unsafe { slice::from_raw_parts_mut(self.map_info.data, self.map_info.size) }
519 }
520}
521
522impl<T> AsRef<[u8]> for MappedMemory<T> {
523 #[inline]
524 fn as_ref(&self) -> &[u8] {
525 self.as_slice()
526 }
527}
528
529impl AsMut<[u8]> for MappedMemory<Writable> {
530 #[inline]
531 fn as_mut(&mut self) -> &mut [u8] {
532 self.as_mut_slice()
533 }
534}
535
536impl<T> Deref for MappedMemory<T> {
537 type Target = [u8];
538
539 #[inline]
540 fn deref(&self) -> &[u8] {
541 self.as_slice()
542 }
543}
544
545impl DerefMut for MappedMemory<Writable> {
546 #[inline]
547 fn deref_mut(&mut self) -> &mut [u8] {
548 self.as_mut_slice()
549 }
550}
551
552impl<T> Drop for MappedMemory<T> {
553 #[inline]
554 fn drop(&mut self) {
555 unsafe {
556 let _memory = Memory::from_glib_full(self.map_info.memory);
557 ffi::gst_memory_unmap(self.map_info.memory, &mut self.map_info);
558 }
559 }
560}
561
562impl<T> fmt::Debug for MappedMemory<T> {
563 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
564 f.debug_tuple("MappedMemory").field(&self.memory()).finish()
565 }
566}
567
568impl<T> PartialEq for MappedMemory<T> {
569 fn eq(&self, other: &MappedMemory<T>) -> bool {
570 self.as_slice().eq(other.as_slice())
571 }
572}
573
574impl<T> Eq for MappedMemory<T> {}
575
576unsafe impl<T> Send for MappedMemory<T> {}
577unsafe impl<T> Sync for MappedMemory<T> {}
578
579pub struct Dump<'a> {
580 memory: &'a MemoryRef,
581 start: Bound<usize>,
582 end: Bound<usize>,
583}
584
585impl Dump<'_> {
586 fn fmt(&self, f: &mut fmt::Formatter, debug: bool) -> fmt::Result {
587 let map = self.memory.map_readable().expect("Failed to map memory");
588 let data = map.as_slice();
589
590 let dump = crate::slice::Dump {
591 data,
592 start: self.start,
593 end: self.end,
594 };
595
596 if debug {
597 <crate::slice::Dump as fmt::Debug>::fmt(&dump, f)
598 } else {
599 <crate::slice::Dump as fmt::Display>::fmt(&dump, f)
600 }
601 }
602}
603
604impl fmt::Display for Dump<'_> {
605 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
606 self.fmt(f, false)
607 }
608}
609
610impl fmt::Debug for Dump<'_> {
611 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
612 self.fmt(f, true)
613 }
614}
615
616pub unsafe trait MemoryType: crate::prelude::IsMiniObject + AsRef<Memory>
617where
618 <Self as crate::prelude::IsMiniObject>::RefType: AsRef<MemoryRef> + AsMut<MemoryRef>,
619{
620 fn check_memory_type(mem: &MemoryRef) -> bool;
621}
622
623#[derive(Debug, thiserror::Error)]
624pub enum MemoryTypeMismatchError {
625 #[error(transparent)]
626 ValueTypeMismatch(#[from] glib::value::ValueTypeMismatchError),
627 #[error("the memory is not of the requested type {requested}")]
628 MemoryTypeMismatch { requested: &'static str },
629}
630
631pub struct MemoryTypeValueTypeChecker<M>(PhantomData<M>);
632
633unsafe impl<M> glib::value::ValueTypeChecker for MemoryTypeValueTypeChecker<M>
634where
635 M: MemoryType + glib::prelude::StaticType,
636 <M as crate::prelude::IsMiniObject>::RefType: AsRef<MemoryRef> + AsMut<MemoryRef>,
637{
638 type Error = glib::value::ValueTypeMismatchOrNoneError<MemoryTypeMismatchError>;
639
640 fn check(value: &glib::Value) -> Result<(), Self::Error> {
641 skip_assert_initialized!();
642 let mem = value.get::<&Memory>().map_err(|err| match err {
643 glib::value::ValueTypeMismatchOrNoneError::UnexpectedNone => {
644 glib::value::ValueTypeMismatchOrNoneError::UnexpectedNone
645 }
646 glib::value::ValueTypeMismatchOrNoneError::WrongValueType(err) => {
647 glib::value::ValueTypeMismatchOrNoneError::WrongValueType(
648 MemoryTypeMismatchError::ValueTypeMismatch(err),
649 )
650 }
651 })?;
652
653 if mem.is_memory_type::<M>() {
654 Ok(())
655 } else {
656 Err(glib::value::ValueTypeMismatchOrNoneError::WrongValueType(
657 MemoryTypeMismatchError::MemoryTypeMismatch {
658 requested: std::any::type_name::<M>(),
659 },
660 ))
661 }
662 }
663}
664
665impl AsRef<MemoryRef> for MemoryRef {
666 #[inline]
667 fn as_ref(&self) -> &MemoryRef {
668 self
669 }
670}
671
672impl AsMut<MemoryRef> for MemoryRef {
673 #[inline]
674 fn as_mut(&mut self) -> &mut MemoryRef {
675 self
676 }
677}
678
679impl AsRef<Memory> for Memory {
680 #[inline]
681 fn as_ref(&self) -> &Memory {
682 self
683 }
684}
685
686unsafe impl MemoryType for Memory {
687 #[inline]
688 fn check_memory_type(_mem: &MemoryRef) -> bool {
689 skip_assert_initialized!();
690 true
691 }
692}
693
694impl Memory {
695 #[inline]
696 pub fn downcast_memory<M: MemoryType>(self) -> Result<M, Self>
697 where
698 <M as crate::prelude::IsMiniObject>::RefType: AsRef<MemoryRef> + AsMut<MemoryRef>,
699 {
700 if M::check_memory_type(&self) {
701 unsafe { Ok(from_glib_full(self.into_glib_ptr() as *mut M::FfiType)) }
702 } else {
703 Err(self)
704 }
705 }
706}
707
708impl MemoryRef {
709 #[inline]
710 pub fn is_memory_type<M: MemoryType>(&self) -> bool
711 where
712 <M as crate::prelude::IsMiniObject>::RefType: AsRef<MemoryRef> + AsMut<MemoryRef>,
713 {
714 M::check_memory_type(self)
715 }
716
717 #[inline]
718 pub fn downcast_memory_ref<M: MemoryType>(&self) -> Option<&M::RefType>
719 where
720 <M as crate::prelude::IsMiniObject>::RefType: AsRef<MemoryRef> + AsMut<MemoryRef>,
721 {
722 if M::check_memory_type(self) {
723 unsafe { Some(&*(self as *const Self as *const M::RefType)) }
724 } else {
725 None
726 }
727 }
728
729 #[inline]
730 pub fn downcast_memory_mut<M: MemoryType>(&mut self) -> Option<&mut M::RefType>
731 where
732 <M as crate::prelude::IsMiniObject>::RefType: AsRef<MemoryRef> + AsMut<MemoryRef>,
733 {
734 if M::check_memory_type(self) {
735 unsafe { Some(&mut *(self as *mut Self as *mut M::RefType)) }
736 } else {
737 None
738 }
739 }
740}
741
742#[macro_export]
743macro_rules! memory_object_wrapper {
744 ($name:ident, $ref_name:ident, $ffi_name:path, $mem_type_check:expr, $parent_memory_type:path, $parent_memory_ref_type:path) => {
745 $crate::mini_object_wrapper!($name, $ref_name, $ffi_name);
746
747 unsafe impl $crate::memory::MemoryType for $name {
748 #[inline]
749 fn check_memory_type(mem: &$crate::MemoryRef) -> bool {
750 skip_assert_initialized!();
751 $mem_type_check(mem)
752 }
753 }
754
755 impl $name {
756 #[inline]
757 pub fn downcast_memory<M: $crate::memory::MemoryType>(self) -> Result<M, Self>
758 where
759 <M as $crate::miniobject::IsMiniObject>::RefType: AsRef<$crate::MemoryRef>
760 + AsMut<$crate::MemoryRef>
761 + AsRef<$ref_name>
762 + AsMut<$ref_name>,
763 {
764 if M::check_memory_type(&self) {
765 unsafe {
766 Ok($crate::glib::translate::from_glib_full(
767 self.into_glib_ptr() as *mut M::FfiType
768 ))
769 }
770 } else {
771 Err(self)
772 }
773 }
774
775 #[inline]
776 pub fn upcast_memory<M>(self) -> M
777 where
778 M: $crate::memory::MemoryType
779 + $crate::glib::translate::FromGlibPtrFull<
780 *const <M as $crate::miniobject::IsMiniObject>::FfiType,
781 >,
782 <M as $crate::miniobject::IsMiniObject>::RefType:
783 AsRef<$crate::MemoryRef> + AsMut<$crate::MemoryRef>,
784 Self: AsRef<M>,
785 {
786 unsafe {
787 $crate::glib::translate::from_glib_full(
788 self.into_glib_ptr() as *const <M as $crate::miniobject::IsMiniObject>::FfiType
789 )
790 }
791 }
792 }
793
794 impl $ref_name {
795 #[inline]
796 pub fn upcast_memory_ref<M>(&self) -> &M::RefType
797 where
798 M: $crate::memory::MemoryType,
799 <M as $crate::miniobject::IsMiniObject>::RefType:
800 AsRef<$crate::MemoryRef> + AsMut<$crate::MemoryRef>,
801 Self: AsRef<M::RefType> + AsMut<M::RefType>
802 {
803 self.as_ref()
804 }
805
806 #[inline]
807 pub fn upcast_memory_mut<M>(&mut self) -> &mut M::RefType
808 where
809 M: $crate::memory::MemoryType,
810 <M as $crate::miniobject::IsMiniObject>::RefType:
811 AsRef<$crate::MemoryRef> + AsMut<$crate::MemoryRef>,
812 Self: AsRef<M::RefType> + AsMut<M::RefType>
813 {
814 self.as_mut()
815 }
816 }
817
818 impl std::ops::Deref for $ref_name {
819 type Target = $parent_memory_ref_type;
820
821 #[inline]
822 fn deref(&self) -> &Self::Target {
823 unsafe { &*(self as *const _ as *const Self::Target) }
824 }
825 }
826
827 impl std::ops::DerefMut for $ref_name {
828 #[inline]
829 fn deref_mut(&mut self) -> &mut Self::Target {
830 unsafe { &mut *(self as *mut _ as *mut Self::Target) }
831 }
832 }
833
834 impl AsRef<$parent_memory_type> for $name {
835 #[inline]
836 fn as_ref(&self) -> &$parent_memory_type {
837 unsafe { &*(self as *const _ as *const $parent_memory_type) }
838 }
839 }
840
841 impl AsRef<$parent_memory_ref_type> for $ref_name {
842 #[inline]
843 fn as_ref(&self) -> &$parent_memory_ref_type {
844 self
845 }
846 }
847
848 impl AsMut<$parent_memory_ref_type> for $ref_name {
849 #[inline]
850 fn as_mut(&mut self) -> &mut $parent_memory_ref_type {
851 &mut *self
852 }
853 }
854
855 impl $crate::glib::types::StaticType for $name {
856 #[inline]
857 fn static_type() -> glib::types::Type {
858 $ref_name::static_type()
859 }
860 }
861
862 impl $crate::glib::types::StaticType for $ref_name {
863 #[inline]
864 fn static_type() -> $crate::glib::types::Type {
865 unsafe { $crate::glib::translate::from_glib($crate::ffi::gst_memory_get_type()) }
866 }
867 }
868
869 impl $crate::glib::value::ValueType for $name {
870 type Type = Self;
871 }
872
873 unsafe impl<'a> $crate::glib::value::FromValue<'a> for $name {
874 type Checker = $crate::memory::MemoryTypeValueTypeChecker<Self>;
875
876 unsafe fn from_value(value: &'a $crate::glib::Value) -> Self {
877 skip_assert_initialized!();
878 $crate::glib::translate::from_glib_none($crate::glib::gobject_ffi::g_value_get_boxed(
879 $crate::glib::translate::ToGlibPtr::to_glib_none(value).0,
880 ) as *mut $ffi_name)
881 }
882 }
883
884 unsafe impl<'a> $crate::glib::value::FromValue<'a> for &'a $name {
885 type Checker = $crate::memory::MemoryTypeValueTypeChecker<$name>;
886
887 unsafe fn from_value(value: &'a $crate::glib::Value) -> Self {
888 skip_assert_initialized!();
889 assert_eq!(
890 std::mem::size_of::<$name>(),
891 std::mem::size_of::<$crate::glib::ffi::gpointer>()
892 );
893 let value = &*(value as *const $crate::glib::Value as *const $crate::glib::gobject_ffi::GValue);
894 let ptr = &value.data[0].v_pointer as *const $crate::glib::ffi::gpointer
895 as *const *const $ffi_name;
896 debug_assert!(!(*ptr).is_null());
897 &*(ptr as *const $name)
898 }
899 }
900
901 impl $crate::glib::value::ToValue for $name {
902 fn to_value(&self) -> $crate::glib::Value {
903 let mut value = $crate::glib::Value::for_value_type::<Self>();
904 unsafe {
905 $crate::glib::gobject_ffi::g_value_set_boxed(
906 $crate::glib::translate::ToGlibPtrMut::to_glib_none_mut(&mut value).0,
907 $crate::glib::translate::ToGlibPtr::<*const $ffi_name>::to_glib_none(self).0
908 as *mut _,
909 )
910 }
911 value
912 }
913
914 fn value_type(&self) -> glib::Type {
915 <Self as $crate::glib::prelude::StaticType>::static_type()
916 }
917 }
918
919 impl $crate::glib::value::ToValueOptional for $name {
920 fn to_value_optional(s: Option<&Self>) -> $crate::glib::Value {
921 skip_assert_initialized!();
922 let mut value = $crate::glib::Value::for_value_type::<Self>();
923 unsafe {
924 $crate::glib::gobject_ffi::g_value_set_boxed(
925 $crate::glib::translate::ToGlibPtrMut::to_glib_none_mut(&mut value).0,
926 $crate::glib::translate::ToGlibPtr::<*const $ffi_name>::to_glib_none(&s).0
927 as *mut _,
928 )
929 }
930 value
931 }
932 }
933
934 impl From<$name> for $crate::glib::Value {
935 fn from(v: $name) -> $crate::glib::Value {
936 skip_assert_initialized!();
937 let mut value = $crate::glib::Value::for_value_type::<$name>();
938 unsafe {
939 $crate::glib::gobject_ffi::g_value_take_boxed(
940 $crate::glib::translate::ToGlibPtrMut::to_glib_none_mut(&mut value).0,
941 $crate::glib::translate::IntoGlibPtr::<*mut $ffi_name>::into_glib_ptr(v) as *mut _,
942 )
943 }
944 value
945 }
946 }
947
948 unsafe impl<'a> $crate::glib::value::FromValue<'a> for &'a $ref_name {
949 type Checker = $crate::memory::MemoryTypeValueTypeChecker<$name>;
950
951 unsafe fn from_value(value: &'a glib::Value) -> Self {
952 skip_assert_initialized!();
953 &*($crate::glib::gobject_ffi::g_value_get_boxed($crate::glib::translate::ToGlibPtr::to_glib_none(value).0)
954 as *const $ref_name)
955 }
956 }
957
958 };
961 ($name:ident, $ref_name:ident, $ffi_name:path, $mem_type_check:expr, $parent_memory_type:path, $parent_memory_ref_type:path, $($parent_parent_memory_type:path, $parent_parent_memory_ref_type:path),*) => {
962 $crate::memory_object_wrapper!($name, $ref_name, $ffi_name, $mem_type_check, $parent_memory_type, $parent_memory_ref_type);
963
964 $(
965 impl AsRef<$parent_parent_memory_type> for $name {
966 #[inline]
967 fn as_ref(&self) -> &$parent_parent_memory_type {
968 unsafe { &*(self as *const _ as *const $parent_parent_memory_type) }
969 }
970 }
971
972 impl AsRef<$parent_parent_memory_ref_type> for $ref_name {
973 #[inline]
974 fn as_ref(&self) -> &$parent_parent_memory_ref_type {
975 self
976 }
977 }
978
979 impl AsMut<$parent_parent_memory_ref_type> for $ref_name {
980 #[inline]
981 fn as_mut(&mut self) -> &mut $parent_parent_memory_ref_type {
982 &mut *self
983 }
984 }
985 )*
986 };
987}
988
989#[cfg(feature = "v1_26")]
990#[cfg_attr(docsrs, doc(cfg(feature = "v1_26")))]
991#[doc(alias = "GstMemory")]
992pub struct MemoryRefTrace(ffi::GstMemory);
993#[cfg(feature = "v1_26")]
994#[cfg_attr(docsrs, doc(cfg(feature = "v1_26")))]
995impl MemoryRefTrace {
996 pub unsafe fn from_ptr<'a>(ptr: *mut ffi::GstMemory) -> &'a MemoryRefTrace {
997 assert!(!ptr.is_null());
998
999 &*(ptr as *const Self)
1000 }
1001
1002 pub fn as_ptr(&self) -> *const ffi::GstMemory {
1003 self as *const Self as *const ffi::GstMemory
1004 }
1005
1006 #[doc(alias = "get_allocator")]
1007 #[inline]
1008 pub fn allocator(&self) -> Option<&Allocator> {
1009 unsafe {
1010 if self.0.allocator.is_null() {
1011 None
1012 } else {
1013 Some(Allocator::from_glib_ptr_borrow(&self.0.allocator))
1014 }
1015 }
1016 }
1017
1018 #[doc(alias = "get_parent")]
1019 #[inline]
1020 pub fn parent(&self) -> Option<&MemoryRef> {
1021 unsafe {
1022 if self.0.parent.is_null() {
1023 None
1024 } else {
1025 Some(MemoryRef::from_ptr(self.0.parent))
1026 }
1027 }
1028 }
1029
1030 #[doc(alias = "get_maxsize")]
1031 #[inline]
1032 pub fn maxsize(&self) -> usize {
1033 self.0.maxsize
1034 }
1035
1036 #[doc(alias = "get_align")]
1037 #[inline]
1038 pub fn align(&self) -> usize {
1039 self.0.align
1040 }
1041
1042 #[doc(alias = "get_offset")]
1043 #[inline]
1044 pub fn offset(&self) -> usize {
1045 self.0.offset
1046 }
1047
1048 #[doc(alias = "get_size")]
1049 #[inline]
1050 pub fn size(&self) -> usize {
1051 self.0.size
1052 }
1053
1054 #[doc(alias = "get_flags")]
1055 #[inline]
1056 pub fn flags(&self) -> crate::MemoryFlags {
1057 unsafe { from_glib(self.0.mini_object.flags) }
1058 }
1059
1060 #[doc(alias = "gst_memory_is_type")]
1061 pub fn is_type(&self, mem_type: &str) -> bool {
1062 unsafe {
1063 from_glib(ffi::gst_memory_is_type(
1064 self as *const Self as *mut ffi::GstMemory,
1065 mem_type.to_glib_none().0,
1066 ))
1067 }
1068 }
1069}
1070
1071#[cfg(test)]
1072mod tests {
1073 #[test]
1074 fn test_map() {
1075 crate::init().unwrap();
1076
1077 let mem = crate::Memory::from_slice(vec![1, 2, 3, 4]);
1078 let map = mem.map_readable().unwrap();
1079 assert_eq!(map.as_slice(), &[1, 2, 3, 4]);
1080 drop(map);
1081
1082 let mem = mem.into_mapped_memory_readable().unwrap();
1083 assert_eq!(mem.as_slice(), &[1, 2, 3, 4]);
1084
1085 let mem = mem.into_memory();
1086 let map = mem.map_readable().unwrap();
1087 assert_eq!(map.as_slice(), &[1, 2, 3, 4]);
1088 }
1089
1090 #[test]
1091 fn test_share() {
1092 crate::init().unwrap();
1093
1094 let mem = crate::Memory::from_slice(vec![1, 2, 3, 4]);
1095 let sub = mem.share(1..=2); let sub_sub1 = sub.share(1..=1); let sub_sub2 = sub.share_maxsize(0..4); let map = mem.map_readable().unwrap();
1100 assert_eq!(map.as_slice(), &[1, 2, 3, 4]);
1101 drop(map);
1102
1103 let map = sub.map_readable().unwrap();
1104 assert_eq!(map.as_slice(), &[2, 3]);
1105 drop(map);
1106
1107 let map = sub_sub1.map_readable().unwrap();
1108 assert_eq!(map.as_slice(), &[3]);
1109 drop(map);
1110
1111 let map = sub_sub2.map_readable().unwrap();
1112 assert_eq!(map.as_slice(), &[1, 2, 3, 4]);
1113 drop(map);
1114 }
1115
1116 #[test]
1117 fn test_dump() {
1118 use std::fmt::Write;
1119
1120 crate::init().unwrap();
1121
1122 let mut s = String::new();
1123 let mem = crate::Memory::from_slice(vec![1, 2, 3, 4]);
1124 write!(&mut s, "{:?}", mem.dump()).unwrap();
1125 assert_eq!(
1126 s,
1127 "0000: 01 02 03 04 ...."
1128 );
1129 s.clear();
1130 write!(&mut s, "{}", mem.dump()).unwrap();
1131 assert_eq!(s, "01 02 03 04");
1132 s.clear();
1133
1134 let mem = crate::Memory::from_slice(vec![1, 2, 3, 4]);
1135 write!(&mut s, "{:?}", mem.dump_range(..)).unwrap();
1136 assert_eq!(
1137 s,
1138 "0000: 01 02 03 04 ...."
1139 );
1140 s.clear();
1141 write!(&mut s, "{:?}", mem.dump_range(..2)).unwrap();
1142 assert_eq!(
1143 s,
1144 "0000: 01 02 .."
1145 );
1146 s.clear();
1147 write!(&mut s, "{:?}", mem.dump_range(2..=3)).unwrap();
1148 assert_eq!(
1149 s,
1150 "0002: 03 04 .."
1151 );
1152 s.clear();
1153 write!(&mut s, "{:?}", mem.dump_range(..100)).unwrap();
1154 assert_eq!(s, "<end out of range>",);
1155 s.clear();
1156 write!(&mut s, "{:?}", mem.dump_range(90..100)).unwrap();
1157 assert_eq!(s, "<start out of range>",);
1158 s.clear();
1159
1160 let mem = crate::Memory::from_slice(vec![0; 19]);
1161 write!(&mut s, "{:?}", mem.dump()).unwrap();
1162 assert_eq!(
1163 s,
1164 "0000: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 ................\n\
1165 0010: 00 00 00 ..."
1166 );
1167 s.clear();
1168 }
1169
1170 #[test]
1171 fn test_value() {
1172 use glib::prelude::*;
1173
1174 crate::init().unwrap();
1175
1176 let v = None::<&crate::Memory>.to_value();
1177 assert!(matches!(v.get::<Option<crate::Memory>>(), Ok(None)));
1178
1179 let mem = crate::Memory::from_slice(vec![1, 2, 3, 4]);
1180 let v = mem.to_value();
1181 assert!(matches!(v.get::<Option<crate::Memory>>(), Ok(Some(_))));
1182 assert!(v.get::<crate::Memory>().is_ok());
1183 }
1184}