1use std::{
4 fmt,
5 marker::PhantomData,
6 mem,
7 ops::{Bound, Deref, DerefMut, RangeBounds},
8 ptr, slice,
9};
10
11use glib::{prelude::*, translate::*};
12
13use crate::{AllocationParams, Allocator, MemoryFlags, ffi};
14
15mini_object_wrapper!(Memory, MemoryRef, ffi::GstMemory, || {
16 ffi::gst_memory_get_type()
17});
18
19pub struct MemoryMap<'a, T> {
20 map_info: ffi::GstMapInfo,
21 phantom: PhantomData<(&'a MemoryRef, T)>,
22}
23
24pub struct MappedMemory<T> {
25 map_info: ffi::GstMapInfo,
26 phantom: PhantomData<(Memory, T)>,
27}
28
29impl fmt::Debug for Memory {
30 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
31 MemoryRef::fmt(self, f)
32 }
33}
34
35impl fmt::Debug for MemoryRef {
36 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
37 f.debug_struct("Memory")
38 .field("ptr", &self.as_ptr())
39 .field("allocator", &self.allocator())
40 .field("parent", &self.parent())
41 .field("maxsize", &self.maxsize())
42 .field("align", &self.align())
43 .field("offset", &self.offset())
44 .field("size", &self.size())
45 .field("flags", &self.flags())
46 .finish()
47 }
48}
49
50pub enum Readable {}
51pub enum Writable {}
52
53impl Memory {
54 #[inline]
55 pub fn default_alignment() -> usize {
56 #[cfg(not(windows))]
57 {
58 unsafe extern "C" {
59 static gst_memory_alignment: usize;
60 }
61 unsafe { gst_memory_alignment }
62 }
63 #[cfg(windows)]
64 {
65 7
70 }
71 }
72
73 #[inline]
74 pub fn with_size(size: usize) -> Self {
75 assert_initialized_main_thread!();
76 unsafe {
77 from_glib_full(ffi::gst_allocator_alloc(
78 ptr::null_mut(),
79 size,
80 ptr::null_mut(),
81 ))
82 }
83 }
84
85 #[inline]
86 pub fn with_size_and_params(size: usize, params: &AllocationParams) -> Self {
87 assert_initialized_main_thread!();
88 unsafe {
89 from_glib_full(ffi::gst_allocator_alloc(
90 ptr::null_mut(),
91 size,
92 params.as_ptr() as *mut _,
93 ))
94 }
95 }
96
97 #[inline]
119 pub fn into_mapped_memory_readable(self) -> Result<MappedMemory<Readable>, Self> {
120 unsafe {
121 let s = mem::ManuallyDrop::new(self);
122 let mut map_info = mem::MaybeUninit::uninit();
123 let res: bool = from_glib(ffi::gst_memory_map(
124 s.as_mut_ptr(),
125 map_info.as_mut_ptr(),
126 ffi::GST_MAP_READ,
127 ));
128 if res {
129 Ok(MappedMemory {
130 map_info: map_info.assume_init(),
131 phantom: PhantomData,
132 })
133 } else {
134 Err(mem::ManuallyDrop::into_inner(s))
135 }
136 }
137 }
138
139 #[inline]
140 pub fn into_mapped_memory_writable(self) -> Result<MappedMemory<Writable>, Self> {
141 unsafe {
142 let s = mem::ManuallyDrop::new(self);
143 let mut map_info = mem::MaybeUninit::uninit();
144 let res: bool = from_glib(ffi::gst_memory_map(
145 s.as_mut_ptr(),
146 map_info.as_mut_ptr(),
147 ffi::GST_MAP_READWRITE,
148 ));
149 if res {
150 Ok(MappedMemory {
151 map_info: map_info.assume_init(),
152 phantom: PhantomData,
153 })
154 } else {
155 Err(mem::ManuallyDrop::into_inner(s))
156 }
157 }
158 }
159}
160
161impl MemoryRef {
162 #[doc(alias = "get_allocator")]
163 #[inline]
164 pub fn allocator(&self) -> Option<&Allocator> {
165 unsafe {
166 if self.0.allocator.is_null() {
167 None
168 } else {
169 Some(Allocator::from_glib_ptr_borrow(&self.0.allocator))
170 }
171 }
172 }
173
174 #[doc(alias = "get_parent")]
175 #[inline]
176 pub fn parent(&self) -> Option<&MemoryRef> {
177 unsafe {
178 if self.0.parent.is_null() {
179 None
180 } else {
181 Some(MemoryRef::from_ptr(self.0.parent))
182 }
183 }
184 }
185
186 #[doc(alias = "get_maxsize")]
187 #[inline]
188 pub fn maxsize(&self) -> usize {
189 self.0.maxsize
190 }
191
192 #[doc(alias = "get_align")]
193 #[inline]
194 pub fn align(&self) -> usize {
195 self.0.align
196 }
197
198 #[doc(alias = "get_offset")]
199 #[inline]
200 pub fn offset(&self) -> usize {
201 self.0.offset
202 }
203
204 #[doc(alias = "get_size")]
205 #[inline]
206 pub fn size(&self) -> usize {
207 self.0.size
208 }
209
210 #[doc(alias = "gst_memory_get_sizes")]
211 #[inline]
212 pub fn sizes(&self) -> (usize, usize, usize) {
213 unsafe {
214 let mut offset = 0;
215 let mut maxsize = 0;
216 let total_size =
217 ffi::gst_memory_get_sizes(mut_override(self.as_ptr()), &mut offset, &mut maxsize);
218
219 (total_size, offset, maxsize)
220 }
221 }
222
223 #[doc(alias = "get_flags")]
224 #[inline]
225 pub fn flags(&self) -> MemoryFlags {
226 unsafe { from_glib(self.0.mini_object.flags) }
227 }
228
229 fn calculate_offset_size(&self, range: impl RangeBounds<usize>) -> (isize, isize) {
230 let size = self.size();
231
232 let start_offset = match range.start_bound() {
233 Bound::Included(v) => *v,
234 Bound::Excluded(v) => v.checked_add(1).expect("Invalid start offset"),
235 Bound::Unbounded => 0,
236 };
237 assert!(start_offset < size, "Start offset after valid range");
238
239 let end_offset = match range.end_bound() {
240 Bound::Included(v) => v.checked_add(1).expect("Invalid end offset"),
241 Bound::Excluded(v) => *v,
242 Bound::Unbounded => size,
243 };
244 assert!(end_offset <= size, "End offset after valid range");
245
246 let new_offset = start_offset as isize;
251 let new_size = end_offset.saturating_sub(start_offset) as isize;
252
253 (new_offset, new_size)
254 }
255
256 fn calculate_offset_size_maxsize(&self, range: impl RangeBounds<usize>) -> (isize, isize) {
257 let maxsize = self.maxsize();
258
259 let start_offset = match range.start_bound() {
260 Bound::Included(v) => *v,
261 Bound::Excluded(v) => v.checked_add(1).expect("Invalid start offset"),
262 Bound::Unbounded => 0,
263 };
264 assert!(start_offset < maxsize, "Start offset after valid range");
265
266 let end_offset = match range.end_bound() {
267 Bound::Included(v) => v.checked_add(1).expect("Invalid end offset"),
268 Bound::Excluded(v) => *v,
269 Bound::Unbounded => maxsize,
270 };
271 assert!(end_offset <= maxsize, "End offset after valid range");
272
273 let offset = self.offset();
278
279 let new_offset = start_offset.wrapping_sub(offset) as isize;
280 let new_size = end_offset.saturating_sub(start_offset) as isize;
281
282 (new_offset, new_size)
283 }
284
285 #[doc(alias = "gst_memory_copy")]
286 pub fn copy_range(&self, range: impl RangeBounds<usize>) -> Memory {
287 let (offset, size) = self.calculate_offset_size(range);
288 unsafe { from_glib_full(ffi::gst_memory_copy(self.as_mut_ptr(), offset, size)) }
289 }
290
291 #[doc(alias = "gst_memory_copy")]
292 pub fn copy_range_maxsize(&self, range: impl RangeBounds<usize>) -> Memory {
293 let (offset, size) = self.calculate_offset_size_maxsize(range);
294 unsafe { from_glib_full(ffi::gst_memory_copy(self.as_mut_ptr(), offset, size)) }
295 }
296
297 #[doc(alias = "gst_memory_is_span")]
298 pub fn is_span(&self, mem2: &MemoryRef) -> Option<usize> {
299 unsafe {
300 let mut offset = mem::MaybeUninit::uninit();
301 let res = from_glib(ffi::gst_memory_is_span(
302 self.as_mut_ptr(),
303 mem2.as_mut_ptr(),
304 offset.as_mut_ptr(),
305 ));
306 if res {
307 Some(offset.assume_init())
308 } else {
309 None
310 }
311 }
312 }
313
314 #[doc(alias = "gst_memory_is_type")]
315 pub fn is_type(&self, mem_type: impl IntoGStr) -> bool {
316 unsafe {
317 mem_type.run_with_gstr(|mem_type| {
318 from_glib(ffi::gst_memory_is_type(
319 self.as_mut_ptr(),
320 mem_type.as_ptr(),
321 ))
322 })
323 }
324 }
325
326 #[inline]
327 pub fn map_readable(&self) -> Result<MemoryMap<'_, Readable>, glib::BoolError> {
328 unsafe {
329 let mut map_info = mem::MaybeUninit::uninit();
330 let res =
331 ffi::gst_memory_map(self.as_mut_ptr(), map_info.as_mut_ptr(), ffi::GST_MAP_READ);
332 if res == glib::ffi::GTRUE {
333 Ok(MemoryMap {
334 map_info: map_info.assume_init(),
335 phantom: PhantomData,
336 })
337 } else {
338 Err(glib::bool_error!("Failed to map memory readable"))
339 }
340 }
341 }
342
343 #[inline]
344 pub fn map_writable(&mut self) -> Result<MemoryMap<'_, Writable>, glib::BoolError> {
345 unsafe {
346 let mut map_info = mem::MaybeUninit::uninit();
347 let res = ffi::gst_memory_map(
348 self.as_mut_ptr(),
349 map_info.as_mut_ptr(),
350 ffi::GST_MAP_READWRITE,
351 );
352 if res == glib::ffi::GTRUE {
353 Ok(MemoryMap {
354 map_info: map_info.assume_init(),
355 phantom: PhantomData,
356 })
357 } else {
358 Err(glib::bool_error!("Failed to map memory writable"))
359 }
360 }
361 }
362
363 #[doc(alias = "gst_memory_share")]
364 pub fn share(&self, range: impl RangeBounds<usize>) -> Memory {
365 let (offset, size) = self.calculate_offset_size(range);
366 unsafe { from_glib_full(ffi::gst_memory_share(self.as_ptr() as *mut _, offset, size)) }
367 }
368
369 #[doc(alias = "gst_memory_share")]
370 pub fn share_maxsize(&self, range: impl RangeBounds<usize>) -> Memory {
371 let (offset, size) = self.calculate_offset_size_maxsize(range);
372 unsafe { from_glib_full(ffi::gst_memory_share(self.as_ptr() as *mut _, offset, size)) }
373 }
374
375 #[doc(alias = "gst_memory_resize")]
376 pub fn resize(&mut self, range: impl RangeBounds<usize>) {
377 let (offset, size) = self.calculate_offset_size(range);
378 unsafe { ffi::gst_memory_resize(self.as_mut_ptr(), offset, size as usize) }
379 }
380
381 #[doc(alias = "gst_memory_resize")]
382 pub fn resize_maxsize(&mut self, range: impl RangeBounds<usize>) {
383 let (offset, size) = self.calculate_offset_size_maxsize(range);
384 unsafe { ffi::gst_memory_resize(self.as_mut_ptr(), offset, size as usize) }
385 }
386
387 #[doc(alias = "gst_util_dump_mem")]
388 pub fn dump(&self) -> Dump<'_> {
389 Dump {
390 memory: self,
391 start: Bound::Unbounded,
392 end: Bound::Unbounded,
393 }
394 }
395
396 #[doc(alias = "gst_util_dump_mem")]
397 pub fn dump_range(&self, range: impl RangeBounds<usize>) -> Dump<'_> {
398 Dump {
399 memory: self,
400 start: range.start_bound().cloned(),
401 end: range.end_bound().cloned(),
402 }
403 }
404}
405
406impl<T> MemoryMap<'_, T> {
407 #[doc(alias = "get_size")]
408 #[inline]
409 pub fn size(&self) -> usize {
410 self.map_info.size
411 }
412
413 #[doc(alias = "get_memory")]
414 #[inline]
415 pub fn memory(&self) -> &MemoryRef {
416 unsafe { MemoryRef::from_ptr(self.map_info.memory) }
417 }
418
419 #[inline]
420 pub fn as_slice(&self) -> &[u8] {
421 if self.map_info.size == 0 {
422 return &[];
423 }
424 unsafe { slice::from_raw_parts(self.map_info.data, self.map_info.size) }
425 }
426}
427
428impl MemoryMap<'_, Writable> {
429 #[inline]
430 pub fn as_mut_slice(&mut self) -> &mut [u8] {
431 if self.map_info.size == 0 {
432 return &mut [];
433 }
434 unsafe { slice::from_raw_parts_mut(self.map_info.data, self.map_info.size) }
435 }
436}
437
438impl<T> AsRef<[u8]> for MemoryMap<'_, T> {
439 #[inline]
440 fn as_ref(&self) -> &[u8] {
441 self.as_slice()
442 }
443}
444
445impl AsMut<[u8]> for MemoryMap<'_, Writable> {
446 #[inline]
447 fn as_mut(&mut self) -> &mut [u8] {
448 self.as_mut_slice()
449 }
450}
451
452impl<T> Deref for MemoryMap<'_, T> {
453 type Target = [u8];
454
455 #[inline]
456 fn deref(&self) -> &[u8] {
457 self.as_slice()
458 }
459}
460
461impl DerefMut for MemoryMap<'_, Writable> {
462 #[inline]
463 fn deref_mut(&mut self) -> &mut [u8] {
464 self.as_mut_slice()
465 }
466}
467
468impl<T> fmt::Debug for MemoryMap<'_, T> {
469 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
470 f.debug_tuple("MemoryMap").field(&self.memory()).finish()
471 }
472}
473
474impl<'a, T> PartialEq for MemoryMap<'a, T> {
475 fn eq(&self, other: &MemoryMap<'a, T>) -> bool {
476 self.as_slice().eq(other.as_slice())
477 }
478}
479
480impl<T> Eq for MemoryMap<'_, T> {}
481
482impl<T> Drop for MemoryMap<'_, T> {
483 #[inline]
484 fn drop(&mut self) {
485 unsafe {
486 ffi::gst_memory_unmap(self.map_info.memory, &mut self.map_info);
487 }
488 }
489}
490
491unsafe impl<T> Send for MemoryMap<'_, T> {}
492unsafe impl<T> Sync for MemoryMap<'_, T> {}
493
494impl<T> MappedMemory<T> {
495 #[inline]
496 pub fn as_slice(&self) -> &[u8] {
497 if self.map_info.size == 0 {
498 return &[];
499 }
500 unsafe { slice::from_raw_parts(self.map_info.data, self.map_info.size) }
501 }
502
503 #[doc(alias = "get_size")]
504 #[inline]
505 pub fn size(&self) -> usize {
506 self.map_info.size
507 }
508
509 #[doc(alias = "get_memory")]
510 #[inline]
511 pub fn memory(&self) -> &MemoryRef {
512 unsafe { MemoryRef::from_ptr(self.map_info.memory) }
513 }
514
515 #[inline]
516 pub fn into_memory(self) -> Memory {
517 let mut s = mem::ManuallyDrop::new(self);
518 let memory = unsafe { from_glib_full(s.map_info.memory) };
519 unsafe {
520 ffi::gst_memory_unmap(s.map_info.memory, &mut s.map_info);
521 }
522
523 memory
524 }
525}
526
527impl MappedMemory<Writable> {
528 #[inline]
529 pub fn as_mut_slice(&mut self) -> &mut [u8] {
530 if self.map_info.size == 0 {
531 return &mut [];
532 }
533 unsafe { slice::from_raw_parts_mut(self.map_info.data, self.map_info.size) }
534 }
535}
536
537impl<T> AsRef<[u8]> for MappedMemory<T> {
538 #[inline]
539 fn as_ref(&self) -> &[u8] {
540 self.as_slice()
541 }
542}
543
544impl AsMut<[u8]> for MappedMemory<Writable> {
545 #[inline]
546 fn as_mut(&mut self) -> &mut [u8] {
547 self.as_mut_slice()
548 }
549}
550
551impl<T> Deref for MappedMemory<T> {
552 type Target = [u8];
553
554 #[inline]
555 fn deref(&self) -> &[u8] {
556 self.as_slice()
557 }
558}
559
560impl DerefMut for MappedMemory<Writable> {
561 #[inline]
562 fn deref_mut(&mut self) -> &mut [u8] {
563 self.as_mut_slice()
564 }
565}
566
567impl<T> Drop for MappedMemory<T> {
568 #[inline]
569 fn drop(&mut self) {
570 unsafe {
571 let _memory = Memory::from_glib_full(self.map_info.memory);
572 ffi::gst_memory_unmap(self.map_info.memory, &mut self.map_info);
573 }
574 }
575}
576
577impl<T> fmt::Debug for MappedMemory<T> {
578 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
579 f.debug_tuple("MappedMemory").field(&self.memory()).finish()
580 }
581}
582
583impl<T> PartialEq for MappedMemory<T> {
584 fn eq(&self, other: &MappedMemory<T>) -> bool {
585 self.as_slice().eq(other.as_slice())
586 }
587}
588
589impl<T> Eq for MappedMemory<T> {}
590
591unsafe impl<T> Send for MappedMemory<T> {}
592unsafe impl<T> Sync for MappedMemory<T> {}
593
594pub struct Dump<'a> {
595 memory: &'a MemoryRef,
596 start: Bound<usize>,
597 end: Bound<usize>,
598}
599
600impl Dump<'_> {
601 fn fmt(&self, f: &mut fmt::Formatter, debug: bool) -> fmt::Result {
602 let map = self.memory.map_readable().expect("Failed to map memory");
603 let data = map.as_slice();
604
605 let dump = crate::slice::Dump {
606 data,
607 start: self.start,
608 end: self.end,
609 };
610
611 if debug {
612 <crate::slice::Dump as fmt::Debug>::fmt(&dump, f)
613 } else {
614 <crate::slice::Dump as fmt::Display>::fmt(&dump, f)
615 }
616 }
617}
618
619impl fmt::Display for Dump<'_> {
620 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
621 self.fmt(f, false)
622 }
623}
624
625impl fmt::Debug for Dump<'_> {
626 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
627 self.fmt(f, true)
628 }
629}
630
631pub unsafe trait MemoryType: crate::prelude::IsMiniObject + AsRef<Memory>
632where
633 <Self as crate::prelude::IsMiniObject>::RefType: AsRef<MemoryRef> + AsMut<MemoryRef>,
634{
635 fn check_memory_type(mem: &MemoryRef) -> bool;
636}
637
638#[derive(Debug, thiserror::Error)]
639pub enum MemoryTypeMismatchError {
640 #[error(transparent)]
641 ValueTypeMismatch(#[from] glib::value::ValueTypeMismatchError),
642 #[error("the memory is not of the requested type {requested}")]
643 MemoryTypeMismatch { requested: &'static str },
644}
645
646pub struct MemoryTypeValueTypeChecker<M>(PhantomData<M>);
647
648unsafe impl<M> glib::value::ValueTypeChecker for MemoryTypeValueTypeChecker<M>
649where
650 M: MemoryType + glib::prelude::StaticType,
651 <M as crate::prelude::IsMiniObject>::RefType: AsRef<MemoryRef> + AsMut<MemoryRef>,
652{
653 type Error = glib::value::ValueTypeMismatchOrNoneError<MemoryTypeMismatchError>;
654
655 fn check(value: &glib::Value) -> Result<(), Self::Error> {
656 skip_assert_initialized!();
657 let mem = value.get::<&Memory>().map_err(|err| match err {
658 glib::value::ValueTypeMismatchOrNoneError::UnexpectedNone => {
659 glib::value::ValueTypeMismatchOrNoneError::UnexpectedNone
660 }
661 glib::value::ValueTypeMismatchOrNoneError::WrongValueType(err) => {
662 glib::value::ValueTypeMismatchOrNoneError::WrongValueType(
663 MemoryTypeMismatchError::ValueTypeMismatch(err),
664 )
665 }
666 })?;
667
668 if mem.is_memory_type::<M>() {
669 Ok(())
670 } else {
671 Err(glib::value::ValueTypeMismatchOrNoneError::WrongValueType(
672 MemoryTypeMismatchError::MemoryTypeMismatch {
673 requested: std::any::type_name::<M>(),
674 },
675 ))
676 }
677 }
678}
679
680impl AsRef<MemoryRef> for MemoryRef {
681 #[inline]
682 fn as_ref(&self) -> &MemoryRef {
683 self
684 }
685}
686
687impl AsMut<MemoryRef> for MemoryRef {
688 #[inline]
689 fn as_mut(&mut self) -> &mut MemoryRef {
690 self
691 }
692}
693
694impl AsRef<Memory> for Memory {
695 #[inline]
696 fn as_ref(&self) -> &Memory {
697 self
698 }
699}
700
701unsafe impl MemoryType for Memory {
702 #[inline]
703 fn check_memory_type(_mem: &MemoryRef) -> bool {
704 skip_assert_initialized!();
705 true
706 }
707}
708
709impl Memory {
710 #[inline]
711 pub fn downcast_memory<M: MemoryType>(self) -> Result<M, Self>
712 where
713 <M as crate::prelude::IsMiniObject>::RefType: AsRef<MemoryRef> + AsMut<MemoryRef>,
714 {
715 if M::check_memory_type(&self) {
716 unsafe { Ok(from_glib_full(self.into_glib_ptr() as *mut M::FfiType)) }
717 } else {
718 Err(self)
719 }
720 }
721}
722
723impl MemoryRef {
724 #[inline]
725 pub fn is_memory_type<M: MemoryType>(&self) -> bool
726 where
727 <M as crate::prelude::IsMiniObject>::RefType: AsRef<MemoryRef> + AsMut<MemoryRef>,
728 {
729 M::check_memory_type(self)
730 }
731
732 #[inline]
733 pub fn downcast_memory_ref<M: MemoryType>(&self) -> Option<&M::RefType>
734 where
735 <M as crate::prelude::IsMiniObject>::RefType: AsRef<MemoryRef> + AsMut<MemoryRef>,
736 {
737 if M::check_memory_type(self) {
738 unsafe { Some(&*(self as *const Self as *const M::RefType)) }
739 } else {
740 None
741 }
742 }
743
744 #[inline]
745 pub fn downcast_memory_mut<M: MemoryType>(&mut self) -> Option<&mut M::RefType>
746 where
747 <M as crate::prelude::IsMiniObject>::RefType: AsRef<MemoryRef> + AsMut<MemoryRef>,
748 {
749 if M::check_memory_type(self) {
750 unsafe { Some(&mut *(self as *mut Self as *mut M::RefType)) }
751 } else {
752 None
753 }
754 }
755}
756
757#[macro_export]
758macro_rules! memory_object_wrapper {
759 ($name:ident, $ref_name:ident, $ffi_name:path, $mem_type_check:expr, $parent_memory_type:path, $parent_memory_ref_type:path) => {
760 $crate::mini_object_wrapper!($name, $ref_name, $ffi_name);
761
762 unsafe impl $crate::memory::MemoryType for $name {
763 #[inline]
764 fn check_memory_type(mem: &$crate::MemoryRef) -> bool {
765 skip_assert_initialized!();
766 $mem_type_check(mem)
767 }
768 }
769
770 impl $name {
771 #[inline]
772 pub fn downcast_memory<M: $crate::memory::MemoryType>(self) -> Result<M, Self>
773 where
774 <M as $crate::miniobject::IsMiniObject>::RefType: AsRef<$crate::MemoryRef>
775 + AsMut<$crate::MemoryRef>
776 + AsRef<$ref_name>
777 + AsMut<$ref_name>,
778 {
779 if M::check_memory_type(&self) {
780 unsafe {
781 Ok($crate::glib::translate::from_glib_full(
782 self.into_glib_ptr() as *mut M::FfiType
783 ))
784 }
785 } else {
786 Err(self)
787 }
788 }
789
790 #[inline]
791 pub fn upcast_memory<M>(self) -> M
792 where
793 M: $crate::memory::MemoryType
794 + $crate::glib::translate::FromGlibPtrFull<
795 *const <M as $crate::miniobject::IsMiniObject>::FfiType,
796 >,
797 <M as $crate::miniobject::IsMiniObject>::RefType:
798 AsRef<$crate::MemoryRef> + AsMut<$crate::MemoryRef>,
799 Self: AsRef<M>,
800 {
801 unsafe {
802 $crate::glib::translate::from_glib_full(
803 self.into_glib_ptr() as *const <M as $crate::miniobject::IsMiniObject>::FfiType
804 )
805 }
806 }
807 }
808
809 impl $ref_name {
810 #[inline]
811 pub fn upcast_memory_ref<M>(&self) -> &M::RefType
812 where
813 M: $crate::memory::MemoryType,
814 <M as $crate::miniobject::IsMiniObject>::RefType:
815 AsRef<$crate::MemoryRef> + AsMut<$crate::MemoryRef>,
816 Self: AsRef<M::RefType> + AsMut<M::RefType>
817 {
818 self.as_ref()
819 }
820
821 #[inline]
822 pub fn upcast_memory_mut<M>(&mut self) -> &mut M::RefType
823 where
824 M: $crate::memory::MemoryType,
825 <M as $crate::miniobject::IsMiniObject>::RefType:
826 AsRef<$crate::MemoryRef> + AsMut<$crate::MemoryRef>,
827 Self: AsRef<M::RefType> + AsMut<M::RefType>
828 {
829 self.as_mut()
830 }
831 }
832
833 impl std::ops::Deref for $ref_name {
834 type Target = $parent_memory_ref_type;
835
836 #[inline]
837 fn deref(&self) -> &Self::Target {
838 unsafe { &*(self as *const _ as *const Self::Target) }
839 }
840 }
841
842 impl std::ops::DerefMut for $ref_name {
843 #[inline]
844 fn deref_mut(&mut self) -> &mut Self::Target {
845 unsafe { &mut *(self as *mut _ as *mut Self::Target) }
846 }
847 }
848
849 impl AsRef<$parent_memory_type> for $name {
850 #[inline]
851 fn as_ref(&self) -> &$parent_memory_type {
852 unsafe { &*(self as *const _ as *const $parent_memory_type) }
853 }
854 }
855
856 impl AsRef<$parent_memory_ref_type> for $ref_name {
857 #[inline]
858 fn as_ref(&self) -> &$parent_memory_ref_type {
859 self
860 }
861 }
862
863 impl AsMut<$parent_memory_ref_type> for $ref_name {
864 #[inline]
865 fn as_mut(&mut self) -> &mut $parent_memory_ref_type {
866 &mut *self
867 }
868 }
869
870 impl $crate::glib::types::StaticType for $name {
871 #[inline]
872 fn static_type() -> glib::types::Type {
873 $ref_name::static_type()
874 }
875 }
876
877 impl $crate::glib::types::StaticType for $ref_name {
878 #[inline]
879 fn static_type() -> $crate::glib::types::Type {
880 unsafe { $crate::glib::translate::from_glib($crate::ffi::gst_memory_get_type()) }
881 }
882 }
883
884 impl $crate::glib::value::ValueType for $name {
885 type Type = Self;
886 }
887
888 unsafe impl<'a> $crate::glib::value::FromValue<'a> for $name {
889 type Checker = $crate::memory::MemoryTypeValueTypeChecker<Self>;
890
891 unsafe fn from_value(value: &'a $crate::glib::Value) -> Self {
892 skip_assert_initialized!();
893 $crate::glib::translate::from_glib_none($crate::glib::gobject_ffi::g_value_get_boxed(
894 $crate::glib::translate::ToGlibPtr::to_glib_none(value).0,
895 ) as *mut $ffi_name)
896 }
897 }
898
899 unsafe impl<'a> $crate::glib::value::FromValue<'a> for &'a $name {
900 type Checker = $crate::memory::MemoryTypeValueTypeChecker<$name>;
901
902 unsafe fn from_value(value: &'a $crate::glib::Value) -> Self {
903 skip_assert_initialized!();
904 assert_eq!(
905 std::mem::size_of::<$name>(),
906 std::mem::size_of::<$crate::glib::ffi::gpointer>()
907 );
908 let value = &*(value as *const $crate::glib::Value as *const $crate::glib::gobject_ffi::GValue);
909 let ptr = &value.data[0].v_pointer as *const $crate::glib::ffi::gpointer
910 as *const *const $ffi_name;
911 debug_assert!(!(*ptr).is_null());
912 &*(ptr as *const $name)
913 }
914 }
915
916 impl $crate::glib::value::ToValue for $name {
917 fn to_value(&self) -> $crate::glib::Value {
918 let mut value = $crate::glib::Value::for_value_type::<Self>();
919 unsafe {
920 $crate::glib::gobject_ffi::g_value_set_boxed(
921 $crate::glib::translate::ToGlibPtrMut::to_glib_none_mut(&mut value).0,
922 $crate::glib::translate::ToGlibPtr::<*const $ffi_name>::to_glib_none(self).0
923 as *mut _,
924 )
925 }
926 value
927 }
928
929 fn value_type(&self) -> glib::Type {
930 <Self as $crate::glib::prelude::StaticType>::static_type()
931 }
932 }
933
934 impl $crate::glib::value::ToValueOptional for $name {
935 fn to_value_optional(s: Option<&Self>) -> $crate::glib::Value {
936 skip_assert_initialized!();
937 let mut value = $crate::glib::Value::for_value_type::<Self>();
938 unsafe {
939 $crate::glib::gobject_ffi::g_value_set_boxed(
940 $crate::glib::translate::ToGlibPtrMut::to_glib_none_mut(&mut value).0,
941 $crate::glib::translate::ToGlibPtr::<*const $ffi_name>::to_glib_none(&s).0
942 as *mut _,
943 )
944 }
945 value
946 }
947 }
948
949 impl From<$name> for $crate::glib::Value {
950 fn from(v: $name) -> $crate::glib::Value {
951 skip_assert_initialized!();
952 let mut value = $crate::glib::Value::for_value_type::<$name>();
953 unsafe {
954 $crate::glib::gobject_ffi::g_value_take_boxed(
955 $crate::glib::translate::ToGlibPtrMut::to_glib_none_mut(&mut value).0,
956 $crate::glib::translate::IntoGlibPtr::<*mut $ffi_name>::into_glib_ptr(v) as *mut _,
957 )
958 }
959 value
960 }
961 }
962
963 unsafe impl<'a> $crate::glib::value::FromValue<'a> for &'a $ref_name {
964 type Checker = $crate::memory::MemoryTypeValueTypeChecker<$name>;
965
966 unsafe fn from_value(value: &'a glib::Value) -> Self {
967 skip_assert_initialized!();
968 &*($crate::glib::gobject_ffi::g_value_get_boxed($crate::glib::translate::ToGlibPtr::to_glib_none(value).0)
969 as *const $ref_name)
970 }
971 }
972
973 };
976 ($name:ident, $ref_name:ident, $ffi_name:path, $mem_type_check:expr, $parent_memory_type:path, $parent_memory_ref_type:path, $($parent_parent_memory_type:path, $parent_parent_memory_ref_type:path),*) => {
977 $crate::memory_object_wrapper!($name, $ref_name, $ffi_name, $mem_type_check, $parent_memory_type, $parent_memory_ref_type);
978
979 $(
980 impl AsRef<$parent_parent_memory_type> for $name {
981 #[inline]
982 fn as_ref(&self) -> &$parent_parent_memory_type {
983 unsafe { &*(self as *const _ as *const $parent_parent_memory_type) }
984 }
985 }
986
987 impl AsRef<$parent_parent_memory_ref_type> for $ref_name {
988 #[inline]
989 fn as_ref(&self) -> &$parent_parent_memory_ref_type {
990 self
991 }
992 }
993
994 impl AsMut<$parent_parent_memory_ref_type> for $ref_name {
995 #[inline]
996 fn as_mut(&mut self) -> &mut $parent_parent_memory_ref_type {
997 &mut *self
998 }
999 }
1000 )*
1001 };
1002}
1003
1004#[cfg(feature = "v1_26")]
1005#[cfg_attr(docsrs, doc(cfg(feature = "v1_26")))]
1006#[doc(alias = "GstMemory")]
1007pub struct MemoryRefTrace(ffi::GstMemory);
1008#[cfg(feature = "v1_26")]
1009#[cfg_attr(docsrs, doc(cfg(feature = "v1_26")))]
1010impl MemoryRefTrace {
1011 pub unsafe fn from_ptr<'a>(ptr: *mut ffi::GstMemory) -> &'a MemoryRefTrace {
1012 unsafe {
1013 assert!(!ptr.is_null());
1014
1015 &*(ptr as *const Self)
1016 }
1017 }
1018
1019 pub fn as_ptr(&self) -> *const ffi::GstMemory {
1020 self as *const Self as *const ffi::GstMemory
1021 }
1022
1023 #[doc(alias = "get_allocator")]
1024 #[inline]
1025 pub fn allocator(&self) -> Option<&Allocator> {
1026 unsafe {
1027 if self.0.allocator.is_null() {
1028 None
1029 } else {
1030 Some(Allocator::from_glib_ptr_borrow(&self.0.allocator))
1031 }
1032 }
1033 }
1034
1035 #[doc(alias = "get_parent")]
1036 #[inline]
1037 pub fn parent(&self) -> Option<&MemoryRef> {
1038 unsafe {
1039 if self.0.parent.is_null() {
1040 None
1041 } else {
1042 Some(MemoryRef::from_ptr(self.0.parent))
1043 }
1044 }
1045 }
1046
1047 #[doc(alias = "get_maxsize")]
1048 #[inline]
1049 pub fn maxsize(&self) -> usize {
1050 self.0.maxsize
1051 }
1052
1053 #[doc(alias = "get_align")]
1054 #[inline]
1055 pub fn align(&self) -> usize {
1056 self.0.align
1057 }
1058
1059 #[doc(alias = "get_offset")]
1060 #[inline]
1061 pub fn offset(&self) -> usize {
1062 self.0.offset
1063 }
1064
1065 #[doc(alias = "get_size")]
1066 #[inline]
1067 pub fn size(&self) -> usize {
1068 self.0.size
1069 }
1070
1071 #[doc(alias = "get_flags")]
1072 #[inline]
1073 pub fn flags(&self) -> crate::MemoryFlags {
1074 unsafe { from_glib(self.0.mini_object.flags) }
1075 }
1076
1077 #[doc(alias = "gst_memory_is_type")]
1078 pub fn is_type(&self, mem_type: impl IntoGStr) -> bool {
1079 unsafe {
1080 mem_type.run_with_gstr(|mem_type| {
1081 from_glib(ffi::gst_memory_is_type(
1082 self as *const Self as *mut ffi::GstMemory,
1083 mem_type.as_ptr(),
1084 ))
1085 })
1086 }
1087 }
1088}
1089
1090#[cfg(test)]
1091mod tests {
1092 #[test]
1093 fn test_map() {
1094 crate::init().unwrap();
1095
1096 let mem = crate::Memory::from_slice(vec![1, 2, 3, 4]);
1097 let map = mem.map_readable().unwrap();
1098 assert_eq!(map.as_slice(), &[1, 2, 3, 4]);
1099 drop(map);
1100
1101 let mem = mem.into_mapped_memory_readable().unwrap();
1102 assert_eq!(mem.as_slice(), &[1, 2, 3, 4]);
1103
1104 let mem = mem.into_memory();
1105 let map = mem.map_readable().unwrap();
1106 assert_eq!(map.as_slice(), &[1, 2, 3, 4]);
1107 }
1108
1109 #[test]
1110 fn test_share() {
1111 crate::init().unwrap();
1112
1113 let mem = crate::Memory::from_slice(vec![1, 2, 3, 4]);
1114 let sub = mem.share(1..=2); let sub_sub1 = sub.share(1..=1); let sub_sub2 = sub.share_maxsize(0..4); let map = mem.map_readable().unwrap();
1119 assert_eq!(map.as_slice(), &[1, 2, 3, 4]);
1120 drop(map);
1121
1122 let map = sub.map_readable().unwrap();
1123 assert_eq!(map.as_slice(), &[2, 3]);
1124 drop(map);
1125
1126 let map = sub_sub1.map_readable().unwrap();
1127 assert_eq!(map.as_slice(), &[3]);
1128 drop(map);
1129
1130 let map = sub_sub2.map_readable().unwrap();
1131 assert_eq!(map.as_slice(), &[1, 2, 3, 4]);
1132 drop(map);
1133 }
1134
1135 #[test]
1136 fn test_dump() {
1137 use std::fmt::Write;
1138
1139 crate::init().unwrap();
1140
1141 let mut s = String::new();
1142 let mem = crate::Memory::from_slice(vec![1, 2, 3, 4]);
1143 write!(&mut s, "{:?}", mem.dump()).unwrap();
1144 assert_eq!(
1145 s,
1146 "0000: 01 02 03 04 ...."
1147 );
1148 s.clear();
1149 write!(&mut s, "{}", mem.dump()).unwrap();
1150 assert_eq!(s, "01 02 03 04");
1151 s.clear();
1152
1153 let mem = crate::Memory::from_slice(vec![1, 2, 3, 4]);
1154 write!(&mut s, "{:?}", mem.dump_range(..)).unwrap();
1155 assert_eq!(
1156 s,
1157 "0000: 01 02 03 04 ...."
1158 );
1159 s.clear();
1160 write!(&mut s, "{:?}", mem.dump_range(..2)).unwrap();
1161 assert_eq!(
1162 s,
1163 "0000: 01 02 .."
1164 );
1165 s.clear();
1166 write!(&mut s, "{:?}", mem.dump_range(2..=3)).unwrap();
1167 assert_eq!(
1168 s,
1169 "0002: 03 04 .."
1170 );
1171 s.clear();
1172 write!(&mut s, "{:?}", mem.dump_range(..100)).unwrap();
1173 assert_eq!(s, "<end out of range>",);
1174 s.clear();
1175 write!(&mut s, "{:?}", mem.dump_range(90..100)).unwrap();
1176 assert_eq!(s, "<start out of range>",);
1177 s.clear();
1178
1179 let mem = crate::Memory::from_slice(vec![0; 19]);
1180 write!(&mut s, "{:?}", mem.dump()).unwrap();
1181 assert_eq!(
1182 s,
1183 "0000: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 ................\n\
1184 0010: 00 00 00 ..."
1185 );
1186 s.clear();
1187 }
1188
1189 #[test]
1190 fn test_value() {
1191 use glib::prelude::*;
1192
1193 crate::init().unwrap();
1194
1195 let v = None::<&crate::Memory>.to_value();
1196 assert!(matches!(v.get::<Option<crate::Memory>>(), Ok(None)));
1197
1198 let mem = crate::Memory::from_slice(vec![1, 2, 3, 4]);
1199 let v = mem.to_value();
1200 assert!(matches!(v.get::<Option<crate::Memory>>(), Ok(Some(_))));
1201 assert!(v.get::<crate::Memory>().is_ok());
1202 }
1203}