1use std::{
4 fmt,
5 marker::PhantomData,
6 mem,
7 ops::{Bound, Deref, DerefMut, RangeBounds},
8 ptr, slice,
9};
10
11use glib::translate::*;
12
13use crate::{ffi, AllocationParams, Allocator, MemoryFlags};
14
15mini_object_wrapper!(Memory, MemoryRef, ffi::GstMemory, || {
16 ffi::gst_memory_get_type()
17});
18
19pub struct MemoryMap<'a, T> {
20 memory: &'a MemoryRef,
21 map_info: ffi::GstMapInfo,
22 phantom: PhantomData<T>,
23}
24
25pub struct MappedMemory<T> {
26 memory: Memory,
27 map_info: ffi::GstMapInfo,
28 phantom: PhantomData<T>,
29}
30
31impl fmt::Debug for Memory {
32 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
33 MemoryRef::fmt(self, f)
34 }
35}
36
37impl fmt::Debug for MemoryRef {
38 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
39 f.debug_struct("Memory")
40 .field("ptr", &self.as_ptr())
41 .field("allocator", &self.allocator())
42 .field("parent", &self.parent())
43 .field("maxsize", &self.maxsize())
44 .field("align", &self.align())
45 .field("offset", &self.offset())
46 .field("size", &self.size())
47 .field("flags", &self.flags())
48 .finish()
49 }
50}
51
52pub enum Readable {}
53pub enum Writable {}
54
55impl Memory {
56 #[inline]
57 pub fn with_size(size: usize) -> Self {
58 assert_initialized_main_thread!();
59 unsafe {
60 from_glib_full(ffi::gst_allocator_alloc(
61 ptr::null_mut(),
62 size,
63 ptr::null_mut(),
64 ))
65 }
66 }
67
68 #[inline]
69 pub fn with_size_and_params(size: usize, params: &AllocationParams) -> Self {
70 assert_initialized_main_thread!();
71 unsafe {
72 from_glib_full(ffi::gst_allocator_alloc(
73 ptr::null_mut(),
74 size,
75 params.as_ptr() as *mut _,
76 ))
77 }
78 }
79
80 #[inline]
102 pub fn into_mapped_memory_readable(self) -> Result<MappedMemory<Readable>, Self> {
103 unsafe {
104 let mut map_info = mem::MaybeUninit::uninit();
105 let res: bool = from_glib(ffi::gst_memory_map(
106 self.as_mut_ptr(),
107 map_info.as_mut_ptr(),
108 ffi::GST_MAP_READ,
109 ));
110 if res {
111 Ok(MappedMemory {
112 memory: self,
113 map_info: map_info.assume_init(),
114 phantom: PhantomData,
115 })
116 } else {
117 Err(self)
118 }
119 }
120 }
121
122 #[inline]
123 pub fn into_mapped_memory_writable(self) -> Result<MappedMemory<Writable>, Self> {
124 unsafe {
125 let mut map_info = mem::MaybeUninit::uninit();
126 let res: bool = from_glib(ffi::gst_memory_map(
127 self.as_mut_ptr(),
128 map_info.as_mut_ptr(),
129 ffi::GST_MAP_READWRITE,
130 ));
131 if res {
132 Ok(MappedMemory {
133 memory: self,
134 map_info: map_info.assume_init(),
135 phantom: PhantomData,
136 })
137 } else {
138 Err(self)
139 }
140 }
141 }
142}
143
144impl MemoryRef {
145 #[doc(alias = "get_allocator")]
146 #[inline]
147 pub fn allocator(&self) -> Option<&Allocator> {
148 unsafe {
149 if self.0.allocator.is_null() {
150 None
151 } else {
152 Some(&*(&self.0.allocator as *const *mut ffi::GstAllocator as *const Allocator))
153 }
154 }
155 }
156
157 #[doc(alias = "get_parent")]
158 #[inline]
159 pub fn parent(&self) -> Option<&MemoryRef> {
160 unsafe {
161 if self.0.parent.is_null() {
162 None
163 } else {
164 Some(MemoryRef::from_ptr(self.0.parent))
165 }
166 }
167 }
168
169 #[doc(alias = "get_maxsize")]
170 #[inline]
171 pub fn maxsize(&self) -> usize {
172 self.0.maxsize
173 }
174
175 #[doc(alias = "get_align")]
176 #[inline]
177 pub fn align(&self) -> usize {
178 self.0.align
179 }
180
181 #[doc(alias = "get_offset")]
182 #[inline]
183 pub fn offset(&self) -> usize {
184 self.0.offset
185 }
186
187 #[doc(alias = "get_size")]
188 #[inline]
189 pub fn size(&self) -> usize {
190 self.0.size
191 }
192
193 #[doc(alias = "get_flags")]
194 #[inline]
195 pub fn flags(&self) -> MemoryFlags {
196 unsafe { from_glib(self.0.mini_object.flags) }
197 }
198
199 fn calculate_offset_size(&self, range: impl RangeBounds<usize>) -> (isize, isize) {
200 let size = self.size();
201
202 let start_offset = match range.start_bound() {
203 Bound::Included(v) => *v,
204 Bound::Excluded(v) => v.checked_add(1).expect("Invalid start offset"),
205 Bound::Unbounded => 0,
206 };
207 assert!(start_offset < size, "Start offset after valid range");
208
209 let end_offset = match range.end_bound() {
210 Bound::Included(v) => v.checked_add(1).expect("Invalid end offset"),
211 Bound::Excluded(v) => *v,
212 Bound::Unbounded => size,
213 };
214 assert!(end_offset <= size, "End offset after valid range");
215
216 let new_offset = start_offset as isize;
221 let new_size = end_offset.saturating_sub(start_offset) as isize;
222
223 (new_offset, new_size)
224 }
225
226 fn calculate_offset_size_maxsize(&self, range: impl RangeBounds<usize>) -> (isize, isize) {
227 let maxsize = self.maxsize();
228
229 let start_offset = match range.start_bound() {
230 Bound::Included(v) => *v,
231 Bound::Excluded(v) => v.checked_add(1).expect("Invalid start offset"),
232 Bound::Unbounded => 0,
233 };
234 assert!(start_offset < maxsize, "Start offset after valid range");
235
236 let end_offset = match range.end_bound() {
237 Bound::Included(v) => v.checked_add(1).expect("Invalid end offset"),
238 Bound::Excluded(v) => *v,
239 Bound::Unbounded => maxsize,
240 };
241 assert!(end_offset <= maxsize, "End offset after valid range");
242
243 let offset = self.offset();
248
249 let new_offset = start_offset.wrapping_sub(offset) as isize;
250 let new_size = end_offset.saturating_sub(start_offset) as isize;
251
252 (new_offset, new_size)
253 }
254
255 #[doc(alias = "gst_memory_copy")]
256 pub fn copy_range(&self, range: impl RangeBounds<usize>) -> Memory {
257 let (offset, size) = self.calculate_offset_size(range);
258 unsafe { from_glib_full(ffi::gst_memory_copy(self.as_mut_ptr(), offset, size)) }
259 }
260
261 #[doc(alias = "gst_memory_copy")]
262 pub fn copy_range_maxsize(&self, range: impl RangeBounds<usize>) -> Memory {
263 let (offset, size) = self.calculate_offset_size_maxsize(range);
264 unsafe { from_glib_full(ffi::gst_memory_copy(self.as_mut_ptr(), offset, size)) }
265 }
266
267 #[doc(alias = "gst_memory_is_span")]
268 pub fn is_span(&self, mem2: &MemoryRef) -> Option<usize> {
269 unsafe {
270 let mut offset = mem::MaybeUninit::uninit();
271 let res = from_glib(ffi::gst_memory_is_span(
272 self.as_mut_ptr(),
273 mem2.as_mut_ptr(),
274 offset.as_mut_ptr(),
275 ));
276 if res {
277 Some(offset.assume_init())
278 } else {
279 None
280 }
281 }
282 }
283
284 #[doc(alias = "gst_memory_is_type")]
285 pub fn is_type(&self, mem_type: &str) -> bool {
286 unsafe {
287 from_glib(ffi::gst_memory_is_type(
288 self.as_mut_ptr(),
289 mem_type.to_glib_none().0,
290 ))
291 }
292 }
293
294 #[inline]
295 pub fn map_readable(&self) -> Result<MemoryMap<Readable>, glib::BoolError> {
296 unsafe {
297 let mut map_info = mem::MaybeUninit::uninit();
298 let res =
299 ffi::gst_memory_map(self.as_mut_ptr(), map_info.as_mut_ptr(), ffi::GST_MAP_READ);
300 if res == glib::ffi::GTRUE {
301 Ok(MemoryMap {
302 memory: self,
303 map_info: map_info.assume_init(),
304 phantom: PhantomData,
305 })
306 } else {
307 Err(glib::bool_error!("Failed to map memory readable"))
308 }
309 }
310 }
311
312 #[inline]
313 pub fn map_writable(&mut self) -> Result<MemoryMap<Writable>, glib::BoolError> {
314 unsafe {
315 let mut map_info = mem::MaybeUninit::uninit();
316 let res = ffi::gst_memory_map(
317 self.as_mut_ptr(),
318 map_info.as_mut_ptr(),
319 ffi::GST_MAP_READWRITE,
320 );
321 if res == glib::ffi::GTRUE {
322 Ok(MemoryMap {
323 memory: self,
324 map_info: map_info.assume_init(),
325 phantom: PhantomData,
326 })
327 } else {
328 Err(glib::bool_error!("Failed to map memory writable"))
329 }
330 }
331 }
332
333 #[doc(alias = "gst_memory_share")]
334 pub fn share(&self, range: impl RangeBounds<usize>) -> Memory {
335 let (offset, size) = self.calculate_offset_size(range);
336 unsafe { from_glib_full(ffi::gst_memory_share(self.as_ptr() as *mut _, offset, size)) }
337 }
338
339 #[doc(alias = "gst_memory_share")]
340 pub fn share_maxsize(&self, range: impl RangeBounds<usize>) -> Memory {
341 let (offset, size) = self.calculate_offset_size_maxsize(range);
342 unsafe { from_glib_full(ffi::gst_memory_share(self.as_ptr() as *mut _, offset, size)) }
343 }
344
345 #[doc(alias = "gst_memory_resize")]
346 pub fn resize(&mut self, range: impl RangeBounds<usize>) {
347 let (offset, size) = self.calculate_offset_size(range);
348 unsafe { ffi::gst_memory_resize(self.as_mut_ptr(), offset, size as usize) }
349 }
350
351 #[doc(alias = "gst_memory_resize")]
352 pub fn resize_maxsize(&mut self, range: impl RangeBounds<usize>) {
353 let (offset, size) = self.calculate_offset_size_maxsize(range);
354 unsafe { ffi::gst_memory_resize(self.as_mut_ptr(), offset, size as usize) }
355 }
356
357 #[doc(alias = "gst_util_dump_mem")]
358 pub fn dump(&self) -> Dump {
359 Dump {
360 memory: self,
361 start: Bound::Unbounded,
362 end: Bound::Unbounded,
363 }
364 }
365
366 #[doc(alias = "gst_util_dump_mem")]
367 pub fn dump_range(&self, range: impl RangeBounds<usize>) -> Dump {
368 Dump {
369 memory: self,
370 start: range.start_bound().cloned(),
371 end: range.end_bound().cloned(),
372 }
373 }
374}
375
376impl<T> MemoryMap<'_, T> {
377 #[doc(alias = "get_size")]
378 #[inline]
379 pub fn size(&self) -> usize {
380 self.map_info.size
381 }
382
383 #[doc(alias = "get_memory")]
384 #[inline]
385 pub fn memory(&self) -> &MemoryRef {
386 self.memory
387 }
388
389 #[inline]
390 pub fn as_slice(&self) -> &[u8] {
391 if self.map_info.size == 0 {
392 return &[];
393 }
394 unsafe { slice::from_raw_parts(self.map_info.data, self.map_info.size) }
395 }
396}
397
398impl MemoryMap<'_, Writable> {
399 #[inline]
400 pub fn as_mut_slice(&mut self) -> &mut [u8] {
401 if self.map_info.size == 0 {
402 return &mut [];
403 }
404 unsafe { slice::from_raw_parts_mut(self.map_info.data, self.map_info.size) }
405 }
406}
407
408impl<T> AsRef<[u8]> for MemoryMap<'_, T> {
409 #[inline]
410 fn as_ref(&self) -> &[u8] {
411 self.as_slice()
412 }
413}
414
415impl AsMut<[u8]> for MemoryMap<'_, Writable> {
416 #[inline]
417 fn as_mut(&mut self) -> &mut [u8] {
418 self.as_mut_slice()
419 }
420}
421
422impl<T> Deref for MemoryMap<'_, T> {
423 type Target = [u8];
424
425 #[inline]
426 fn deref(&self) -> &[u8] {
427 self.as_slice()
428 }
429}
430
431impl DerefMut for MemoryMap<'_, Writable> {
432 #[inline]
433 fn deref_mut(&mut self) -> &mut [u8] {
434 self.as_mut_slice()
435 }
436}
437
438impl<T> fmt::Debug for MemoryMap<'_, T> {
439 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
440 f.debug_tuple("MemoryMap").field(&self.memory()).finish()
441 }
442}
443
444impl<'a, T> PartialEq for MemoryMap<'a, T> {
445 fn eq(&self, other: &MemoryMap<'a, T>) -> bool {
446 self.as_slice().eq(other.as_slice())
447 }
448}
449
450impl<T> Eq for MemoryMap<'_, T> {}
451
452impl<T> Drop for MemoryMap<'_, T> {
453 #[inline]
454 fn drop(&mut self) {
455 unsafe {
456 ffi::gst_memory_unmap(self.memory.as_mut_ptr(), &mut self.map_info);
457 }
458 }
459}
460
461unsafe impl<T> Send for MemoryMap<'_, T> {}
462unsafe impl<T> Sync for MemoryMap<'_, T> {}
463
464impl<T> MappedMemory<T> {
465 #[inline]
466 pub fn as_slice(&self) -> &[u8] {
467 if self.map_info.size == 0 {
468 return &[];
469 }
470 unsafe { slice::from_raw_parts(self.map_info.data, self.map_info.size) }
471 }
472
473 #[doc(alias = "get_size")]
474 #[inline]
475 pub fn size(&self) -> usize {
476 self.map_info.size
477 }
478
479 #[doc(alias = "get_memory")]
480 #[inline]
481 pub fn memory(&self) -> &MemoryRef {
482 self.memory.as_ref()
483 }
484
485 #[inline]
486 pub fn into_memory(self) -> Memory {
487 let mut s = mem::ManuallyDrop::new(self);
488 let memory = unsafe { ptr::read(&s.memory) };
489 unsafe {
490 ffi::gst_memory_unmap(memory.as_mut_ptr(), &mut s.map_info);
491 }
492
493 memory
494 }
495}
496
497impl MappedMemory<Writable> {
498 #[inline]
499 pub fn as_mut_slice(&mut self) -> &mut [u8] {
500 if self.map_info.size == 0 {
501 return &mut [];
502 }
503 unsafe { slice::from_raw_parts_mut(self.map_info.data, self.map_info.size) }
504 }
505}
506
507impl<T> AsRef<[u8]> for MappedMemory<T> {
508 #[inline]
509 fn as_ref(&self) -> &[u8] {
510 self.as_slice()
511 }
512}
513
514impl AsMut<[u8]> for MappedMemory<Writable> {
515 #[inline]
516 fn as_mut(&mut self) -> &mut [u8] {
517 self.as_mut_slice()
518 }
519}
520
521impl<T> Deref for MappedMemory<T> {
522 type Target = [u8];
523
524 #[inline]
525 fn deref(&self) -> &[u8] {
526 self.as_slice()
527 }
528}
529
530impl DerefMut for MappedMemory<Writable> {
531 #[inline]
532 fn deref_mut(&mut self) -> &mut [u8] {
533 self.as_mut_slice()
534 }
535}
536
537impl<T> Drop for MappedMemory<T> {
538 #[inline]
539 fn drop(&mut self) {
540 unsafe {
541 ffi::gst_memory_unmap(self.memory.as_mut_ptr(), &mut self.map_info);
542 }
543 }
544}
545
546impl<T> fmt::Debug for MappedMemory<T> {
547 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
548 f.debug_tuple("MappedMemory").field(&self.memory()).finish()
549 }
550}
551
552impl<T> PartialEq for MappedMemory<T> {
553 fn eq(&self, other: &MappedMemory<T>) -> bool {
554 self.as_slice().eq(other.as_slice())
555 }
556}
557
558impl<T> Eq for MappedMemory<T> {}
559
560unsafe impl<T> Send for MappedMemory<T> {}
561unsafe impl<T> Sync for MappedMemory<T> {}
562
563pub struct Dump<'a> {
564 memory: &'a MemoryRef,
565 start: Bound<usize>,
566 end: Bound<usize>,
567}
568
569impl Dump<'_> {
570 fn fmt(&self, f: &mut fmt::Formatter, debug: bool) -> fmt::Result {
571 let map = self.memory.map_readable().expect("Failed to map memory");
572 let data = map.as_slice();
573
574 let dump = crate::slice::Dump {
575 data,
576 start: self.start,
577 end: self.end,
578 };
579
580 if debug {
581 <crate::slice::Dump as fmt::Debug>::fmt(&dump, f)
582 } else {
583 <crate::slice::Dump as fmt::Display>::fmt(&dump, f)
584 }
585 }
586}
587
588impl fmt::Display for Dump<'_> {
589 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
590 self.fmt(f, false)
591 }
592}
593
594impl fmt::Debug for Dump<'_> {
595 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
596 self.fmt(f, true)
597 }
598}
599
600pub unsafe trait MemoryType: crate::prelude::IsMiniObject + AsRef<Memory>
601where
602 <Self as crate::prelude::IsMiniObject>::RefType: AsRef<MemoryRef> + AsMut<MemoryRef>,
603{
604 fn check_memory_type(mem: &MemoryRef) -> bool;
605}
606
607#[derive(Debug, thiserror::Error)]
608pub enum MemoryTypeMismatchError {
609 #[error(transparent)]
610 ValueTypeMismatch(#[from] glib::value::ValueTypeMismatchError),
611 #[error("the memory is not of the requested type {requested}")]
612 MemoryTypeMismatch { requested: &'static str },
613}
614
615pub struct MemoryTypeValueTypeChecker<M>(PhantomData<M>);
616
617unsafe impl<M> glib::value::ValueTypeChecker for MemoryTypeValueTypeChecker<M>
618where
619 M: MemoryType + glib::prelude::StaticType,
620 <M as crate::prelude::IsMiniObject>::RefType: AsRef<MemoryRef> + AsMut<MemoryRef>,
621{
622 type Error = glib::value::ValueTypeMismatchOrNoneError<MemoryTypeMismatchError>;
623
624 fn check(value: &glib::Value) -> Result<(), Self::Error> {
625 skip_assert_initialized!();
626 let mem = value.get::<&Memory>().map_err(|err| match err {
627 glib::value::ValueTypeMismatchOrNoneError::UnexpectedNone => {
628 glib::value::ValueTypeMismatchOrNoneError::UnexpectedNone
629 }
630 glib::value::ValueTypeMismatchOrNoneError::WrongValueType(err) => {
631 glib::value::ValueTypeMismatchOrNoneError::WrongValueType(
632 MemoryTypeMismatchError::ValueTypeMismatch(err),
633 )
634 }
635 })?;
636
637 if mem.is_memory_type::<M>() {
638 Ok(())
639 } else {
640 Err(glib::value::ValueTypeMismatchOrNoneError::WrongValueType(
641 MemoryTypeMismatchError::MemoryTypeMismatch {
642 requested: std::any::type_name::<M>(),
643 },
644 ))
645 }
646 }
647}
648
649impl AsRef<MemoryRef> for MemoryRef {
650 #[inline]
651 fn as_ref(&self) -> &MemoryRef {
652 self
653 }
654}
655
656impl AsMut<MemoryRef> for MemoryRef {
657 #[inline]
658 fn as_mut(&mut self) -> &mut MemoryRef {
659 self
660 }
661}
662
663impl AsRef<Memory> for Memory {
664 #[inline]
665 fn as_ref(&self) -> &Memory {
666 self
667 }
668}
669
670unsafe impl MemoryType for Memory {
671 #[inline]
672 fn check_memory_type(_mem: &MemoryRef) -> bool {
673 skip_assert_initialized!();
674 true
675 }
676}
677
678impl Memory {
679 #[inline]
680 pub fn downcast_memory<M: MemoryType>(self) -> Result<M, Self>
681 where
682 <M as crate::prelude::IsMiniObject>::RefType: AsRef<MemoryRef> + AsMut<MemoryRef>,
683 {
684 if M::check_memory_type(&self) {
685 unsafe { Ok(from_glib_full(self.into_glib_ptr() as *mut M::FfiType)) }
686 } else {
687 Err(self)
688 }
689 }
690}
691
692impl MemoryRef {
693 #[inline]
694 pub fn is_memory_type<M: MemoryType>(&self) -> bool
695 where
696 <M as crate::prelude::IsMiniObject>::RefType: AsRef<MemoryRef> + AsMut<MemoryRef>,
697 {
698 M::check_memory_type(self)
699 }
700
701 #[inline]
702 pub fn downcast_memory_ref<M: MemoryType>(&self) -> Option<&M::RefType>
703 where
704 <M as crate::prelude::IsMiniObject>::RefType: AsRef<MemoryRef> + AsMut<MemoryRef>,
705 {
706 if M::check_memory_type(self) {
707 unsafe { Some(&*(self as *const Self as *const M::RefType)) }
708 } else {
709 None
710 }
711 }
712
713 #[inline]
714 pub fn downcast_memory_mut<M: MemoryType>(&mut self) -> Option<&mut M::RefType>
715 where
716 <M as crate::prelude::IsMiniObject>::RefType: AsRef<MemoryRef> + AsMut<MemoryRef>,
717 {
718 if M::check_memory_type(self) {
719 unsafe { Some(&mut *(self as *mut Self as *mut M::RefType)) }
720 } else {
721 None
722 }
723 }
724}
725
726#[macro_export]
727macro_rules! memory_object_wrapper {
728 ($name:ident, $ref_name:ident, $ffi_name:path, $mem_type_check:expr, $parent_memory_type:path, $parent_memory_ref_type:path) => {
729 $crate::mini_object_wrapper!($name, $ref_name, $ffi_name);
730
731 unsafe impl $crate::memory::MemoryType for $name {
732 #[inline]
733 fn check_memory_type(mem: &$crate::MemoryRef) -> bool {
734 skip_assert_initialized!();
735 $mem_type_check(mem)
736 }
737 }
738
739 impl $name {
740 #[inline]
741 pub fn downcast_memory<M: $crate::memory::MemoryType>(self) -> Result<M, Self>
742 where
743 <M as $crate::miniobject::IsMiniObject>::RefType: AsRef<$crate::MemoryRef>
744 + AsMut<$crate::MemoryRef>
745 + AsRef<$ref_name>
746 + AsMut<$ref_name>,
747 {
748 if M::check_memory_type(&self) {
749 unsafe {
750 Ok($crate::glib::translate::from_glib_full(
751 self.into_glib_ptr() as *mut M::FfiType
752 ))
753 }
754 } else {
755 Err(self)
756 }
757 }
758
759 #[inline]
760 pub fn upcast_memory<M>(self) -> M
761 where
762 M: $crate::memory::MemoryType
763 + $crate::glib::translate::FromGlibPtrFull<
764 *const <M as $crate::miniobject::IsMiniObject>::FfiType,
765 >,
766 <M as $crate::miniobject::IsMiniObject>::RefType:
767 AsRef<$crate::MemoryRef> + AsMut<$crate::MemoryRef>,
768 Self: AsRef<M>,
769 {
770 unsafe {
771 $crate::glib::translate::from_glib_full(
772 self.into_glib_ptr() as *const <M as $crate::miniobject::IsMiniObject>::FfiType
773 )
774 }
775 }
776 }
777
778 impl $ref_name {
779 #[inline]
780 pub fn upcast_memory_ref<M>(&self) -> &M::RefType
781 where
782 M: $crate::memory::MemoryType,
783 <M as $crate::miniobject::IsMiniObject>::RefType:
784 AsRef<$crate::MemoryRef> + AsMut<$crate::MemoryRef>,
785 Self: AsRef<M::RefType> + AsMut<M::RefType>
786 {
787 self.as_ref()
788 }
789
790 #[inline]
791 pub fn upcast_memory_mut<M>(&mut self) -> &mut M::RefType
792 where
793 M: $crate::memory::MemoryType,
794 <M as $crate::miniobject::IsMiniObject>::RefType:
795 AsRef<$crate::MemoryRef> + AsMut<$crate::MemoryRef>,
796 Self: AsRef<M::RefType> + AsMut<M::RefType>
797 {
798 self.as_mut()
799 }
800 }
801
802 impl std::ops::Deref for $ref_name {
803 type Target = $parent_memory_ref_type;
804
805 #[inline]
806 fn deref(&self) -> &Self::Target {
807 unsafe { &*(self as *const _ as *const Self::Target) }
808 }
809 }
810
811 impl std::ops::DerefMut for $ref_name {
812 #[inline]
813 fn deref_mut(&mut self) -> &mut Self::Target {
814 unsafe { &mut *(self as *mut _ as *mut Self::Target) }
815 }
816 }
817
818 impl AsRef<$parent_memory_type> for $name {
819 #[inline]
820 fn as_ref(&self) -> &$parent_memory_type {
821 unsafe { &*(self as *const _ as *const $parent_memory_type) }
822 }
823 }
824
825 impl AsRef<$parent_memory_ref_type> for $ref_name {
826 #[inline]
827 fn as_ref(&self) -> &$parent_memory_ref_type {
828 self
829 }
830 }
831
832 impl AsMut<$parent_memory_ref_type> for $ref_name {
833 #[inline]
834 fn as_mut(&mut self) -> &mut $parent_memory_ref_type {
835 &mut *self
836 }
837 }
838
839 impl $crate::glib::types::StaticType for $name {
840 #[inline]
841 fn static_type() -> glib::types::Type {
842 $ref_name::static_type()
843 }
844 }
845
846 impl $crate::glib::types::StaticType for $ref_name {
847 #[inline]
848 fn static_type() -> $crate::glib::types::Type {
849 unsafe { $crate::glib::translate::from_glib($crate::ffi::gst_memory_get_type()) }
850 }
851 }
852
853 impl $crate::glib::value::ValueType for $name {
854 type Type = Self;
855 }
856
857 unsafe impl<'a> $crate::glib::value::FromValue<'a> for $name {
858 type Checker = $crate::memory::MemoryTypeValueTypeChecker<Self>;
859
860 unsafe fn from_value(value: &'a $crate::glib::Value) -> Self {
861 skip_assert_initialized!();
862 $crate::glib::translate::from_glib_none($crate::glib::gobject_ffi::g_value_get_boxed(
863 $crate::glib::translate::ToGlibPtr::to_glib_none(value).0,
864 ) as *mut $ffi_name)
865 }
866 }
867
868 unsafe impl<'a> $crate::glib::value::FromValue<'a> for &'a $name {
869 type Checker = $crate::memory::MemoryTypeValueTypeChecker<$name>;
870
871 unsafe fn from_value(value: &'a $crate::glib::Value) -> Self {
872 skip_assert_initialized!();
873 assert_eq!(
874 std::mem::size_of::<$name>(),
875 std::mem::size_of::<$crate::glib::ffi::gpointer>()
876 );
877 let value = &*(value as *const $crate::glib::Value as *const $crate::glib::gobject_ffi::GValue);
878 let ptr = &value.data[0].v_pointer as *const $crate::glib::ffi::gpointer
879 as *const *const $ffi_name;
880 debug_assert!(!(*ptr).is_null());
881 &*(ptr as *const $name)
882 }
883 }
884
885 impl $crate::glib::value::ToValue for $name {
886 fn to_value(&self) -> $crate::glib::Value {
887 let mut value = $crate::glib::Value::for_value_type::<Self>();
888 unsafe {
889 $crate::glib::gobject_ffi::g_value_set_boxed(
890 $crate::glib::translate::ToGlibPtrMut::to_glib_none_mut(&mut value).0,
891 $crate::glib::translate::ToGlibPtr::<*const $ffi_name>::to_glib_none(self).0
892 as *mut _,
893 )
894 }
895 value
896 }
897
898 fn value_type(&self) -> glib::Type {
899 <Self as $crate::glib::prelude::StaticType>::static_type()
900 }
901 }
902
903 impl $crate::glib::value::ToValueOptional for $name {
904 fn to_value_optional(s: Option<&Self>) -> $crate::glib::Value {
905 skip_assert_initialized!();
906 let mut value = $crate::glib::Value::for_value_type::<Self>();
907 unsafe {
908 $crate::glib::gobject_ffi::g_value_set_boxed(
909 $crate::glib::translate::ToGlibPtrMut::to_glib_none_mut(&mut value).0,
910 $crate::glib::translate::ToGlibPtr::<*const $ffi_name>::to_glib_none(&s).0
911 as *mut _,
912 )
913 }
914 value
915 }
916 }
917
918 impl From<$name> for $crate::glib::Value {
919 fn from(v: $name) -> $crate::glib::Value {
920 skip_assert_initialized!();
921 let mut value = $crate::glib::Value::for_value_type::<$name>();
922 unsafe {
923 $crate::glib::gobject_ffi::g_value_take_boxed(
924 $crate::glib::translate::ToGlibPtrMut::to_glib_none_mut(&mut value).0,
925 $crate::glib::translate::IntoGlibPtr::<*mut $ffi_name>::into_glib_ptr(v) as *mut _,
926 )
927 }
928 value
929 }
930 }
931
932 unsafe impl<'a> $crate::glib::value::FromValue<'a> for &'a $ref_name {
933 type Checker = $crate::memory::MemoryTypeValueTypeChecker<$name>;
934
935 unsafe fn from_value(value: &'a glib::Value) -> Self {
936 skip_assert_initialized!();
937 &*($crate::glib::gobject_ffi::g_value_get_boxed($crate::glib::translate::ToGlibPtr::to_glib_none(value).0)
938 as *const $ref_name)
939 }
940 }
941
942 };
945 ($name:ident, $ref_name:ident, $ffi_name:path, $mem_type_check:expr, $parent_memory_type:path, $parent_memory_ref_type:path, $($parent_parent_memory_type:path, $parent_parent_memory_ref_type:path),*) => {
946 $crate::memory_object_wrapper!($name, $ref_name, $ffi_name, $mem_type_check, $parent_memory_type, $parent_memory_ref_type);
947
948 $(
949 impl AsRef<$parent_parent_memory_type> for $name {
950 #[inline]
951 fn as_ref(&self) -> &$parent_parent_memory_type {
952 unsafe { &*(self as *const _ as *const $parent_parent_memory_type) }
953 }
954 }
955
956 impl AsRef<$parent_parent_memory_ref_type> for $ref_name {
957 #[inline]
958 fn as_ref(&self) -> &$parent_parent_memory_ref_type {
959 self
960 }
961 }
962
963 impl AsMut<$parent_parent_memory_ref_type> for $ref_name {
964 #[inline]
965 fn as_mut(&mut self) -> &mut $parent_parent_memory_ref_type {
966 &mut *self
967 }
968 }
969 )*
970 };
971}
972
973#[cfg(feature = "v1_26")]
974#[cfg_attr(docsrs, doc(cfg(feature = "v1_26")))]
975#[doc(alias = "GstMemory")]
976pub struct MemoryRefTrace(ffi::GstMemory);
977#[cfg(feature = "v1_26")]
978#[cfg_attr(docsrs, doc(cfg(feature = "v1_26")))]
979impl MemoryRefTrace {
980 pub unsafe fn from_ptr<'a>(ptr: *mut ffi::GstMemory) -> &'a MemoryRefTrace {
981 assert!(!ptr.is_null());
982
983 &*(ptr as *const Self)
984 }
985
986 pub fn as_ptr(&self) -> *const ffi::GstMemory {
987 self as *const Self as *const ffi::GstMemory
988 }
989
990 #[doc(alias = "get_allocator")]
991 #[inline]
992 pub fn allocator(&self) -> Option<&Allocator> {
993 unsafe {
994 if self.0.allocator.is_null() {
995 None
996 } else {
997 Some(&*(&self.0.allocator as *const *mut ffi::GstAllocator as *const Allocator))
998 }
999 }
1000 }
1001
1002 #[doc(alias = "get_parent")]
1003 #[inline]
1004 pub fn parent(&self) -> Option<&MemoryRef> {
1005 unsafe {
1006 if self.0.parent.is_null() {
1007 None
1008 } else {
1009 Some(MemoryRef::from_ptr(self.0.parent))
1010 }
1011 }
1012 }
1013
1014 #[doc(alias = "get_maxsize")]
1015 #[inline]
1016 pub fn maxsize(&self) -> usize {
1017 self.0.maxsize
1018 }
1019
1020 #[doc(alias = "get_align")]
1021 #[inline]
1022 pub fn align(&self) -> usize {
1023 self.0.align
1024 }
1025
1026 #[doc(alias = "get_offset")]
1027 #[inline]
1028 pub fn offset(&self) -> usize {
1029 self.0.offset
1030 }
1031
1032 #[doc(alias = "get_size")]
1033 #[inline]
1034 pub fn size(&self) -> usize {
1035 self.0.size
1036 }
1037
1038 #[doc(alias = "get_flags")]
1039 #[inline]
1040 pub fn flags(&self) -> crate::MemoryFlags {
1041 unsafe { from_glib(self.0.mini_object.flags) }
1042 }
1043
1044 #[doc(alias = "gst_memory_is_type")]
1045 pub fn is_type(&self, mem_type: &str) -> bool {
1046 unsafe {
1047 from_glib(ffi::gst_memory_is_type(
1048 self as *const Self as *mut ffi::GstMemory,
1049 mem_type.to_glib_none().0,
1050 ))
1051 }
1052 }
1053}
1054
1055#[cfg(test)]
1056mod tests {
1057 #[test]
1058 fn test_map() {
1059 crate::init().unwrap();
1060
1061 let mem = crate::Memory::from_slice(vec![1, 2, 3, 4]);
1062 let map = mem.map_readable().unwrap();
1063 assert_eq!(map.as_slice(), &[1, 2, 3, 4]);
1064 drop(map);
1065
1066 let mem = mem.into_mapped_memory_readable().unwrap();
1067 assert_eq!(mem.as_slice(), &[1, 2, 3, 4]);
1068
1069 let mem = mem.into_memory();
1070 let map = mem.map_readable().unwrap();
1071 assert_eq!(map.as_slice(), &[1, 2, 3, 4]);
1072 }
1073
1074 #[test]
1075 fn test_share() {
1076 crate::init().unwrap();
1077
1078 let mem = crate::Memory::from_slice(vec![1, 2, 3, 4]);
1079 let sub = mem.share(1..=2); let sub_sub1 = sub.share(1..=1); let sub_sub2 = sub.share_maxsize(0..4); let map = mem.map_readable().unwrap();
1084 assert_eq!(map.as_slice(), &[1, 2, 3, 4]);
1085 drop(map);
1086
1087 let map = sub.map_readable().unwrap();
1088 assert_eq!(map.as_slice(), &[2, 3]);
1089 drop(map);
1090
1091 let map = sub_sub1.map_readable().unwrap();
1092 assert_eq!(map.as_slice(), &[3]);
1093 drop(map);
1094
1095 let map = sub_sub2.map_readable().unwrap();
1096 assert_eq!(map.as_slice(), &[1, 2, 3, 4]);
1097 drop(map);
1098 }
1099
1100 #[test]
1101 fn test_dump() {
1102 use std::fmt::Write;
1103
1104 crate::init().unwrap();
1105
1106 let mut s = String::new();
1107 let mem = crate::Memory::from_slice(vec![1, 2, 3, 4]);
1108 write!(&mut s, "{:?}", mem.dump()).unwrap();
1109 assert_eq!(
1110 s,
1111 "0000: 01 02 03 04 ...."
1112 );
1113 s.clear();
1114 write!(&mut s, "{}", mem.dump()).unwrap();
1115 assert_eq!(s, "01 02 03 04");
1116 s.clear();
1117
1118 let mem = crate::Memory::from_slice(vec![1, 2, 3, 4]);
1119 write!(&mut s, "{:?}", mem.dump_range(..)).unwrap();
1120 assert_eq!(
1121 s,
1122 "0000: 01 02 03 04 ...."
1123 );
1124 s.clear();
1125 write!(&mut s, "{:?}", mem.dump_range(..2)).unwrap();
1126 assert_eq!(
1127 s,
1128 "0000: 01 02 .."
1129 );
1130 s.clear();
1131 write!(&mut s, "{:?}", mem.dump_range(2..=3)).unwrap();
1132 assert_eq!(
1133 s,
1134 "0002: 03 04 .."
1135 );
1136 s.clear();
1137 write!(&mut s, "{:?}", mem.dump_range(..100)).unwrap();
1138 assert_eq!(s, "<end out of range>",);
1139 s.clear();
1140 write!(&mut s, "{:?}", mem.dump_range(90..100)).unwrap();
1141 assert_eq!(s, "<start out of range>",);
1142 s.clear();
1143
1144 let mem = crate::Memory::from_slice(vec![0; 19]);
1145 write!(&mut s, "{:?}", mem.dump()).unwrap();
1146 assert_eq!(
1147 s,
1148 "0000: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 ................\n\
1149 0010: 00 00 00 ..."
1150 );
1151 s.clear();
1152 }
1153
1154 #[test]
1155 fn test_value() {
1156 use glib::prelude::*;
1157
1158 crate::init().unwrap();
1159
1160 let v = None::<&crate::Memory>.to_value();
1161 assert!(matches!(v.get::<Option<crate::Memory>>(), Ok(None)));
1162
1163 let mem = crate::Memory::from_slice(vec![1, 2, 3, 4]);
1164 let v = mem.to_value();
1165 assert!(matches!(v.get::<Option<crate::Memory>>(), Ok(Some(_))));
1166 assert!(v.get::<crate::Memory>().is_ok());
1167 }
1168}