Skip to main content

gstreamer/
buffer.rs

1// Take a look at the license at the top of the repository in the LICENSE file.
2
3use std::{
4    cmp, fmt,
5    marker::PhantomData,
6    mem, ops,
7    ops::{Bound, ControlFlow, Range, RangeBounds},
8    ptr, slice,
9};
10
11use glib::translate::*;
12
13use crate::{
14    BufferCursor, BufferFlags, BufferRefCursor, ClockTime, Memory, MemoryRef, ffi, meta::*,
15};
16
17pub enum Readable {}
18pub enum Writable {}
19
20#[derive(Copy, Clone, Debug, PartialEq, Eq)]
21pub enum BufferMetaForeachAction {
22    Keep,
23    Remove,
24}
25
26mini_object_wrapper!(Buffer, BufferRef, ffi::GstBuffer, || {
27    ffi::gst_buffer_get_type()
28});
29
30pub struct BufferMap<'a, T> {
31    buffer: &'a BufferRef,
32    map_info: ffi::GstMapInfo,
33    phantom: PhantomData<T>,
34}
35
36pub struct MappedBuffer<T> {
37    buffer: Buffer,
38    map_info: ffi::GstMapInfo,
39    phantom: PhantomData<T>,
40}
41
42impl Buffer {
43    /// Creates a newly allocated buffer without any data.
44    ///
45    /// # Returns
46    ///
47    /// the new [`Buffer`][crate::Buffer].
48    #[doc(alias = "gst_buffer_new")]
49    #[inline]
50    pub fn new() -> Self {
51        assert_initialized_main_thread!();
52
53        unsafe { from_glib_full(ffi::gst_buffer_new()) }
54    }
55
56    #[doc(alias = "gst_buffer_new_allocate")]
57    #[doc(alias = "gst_buffer_new_and_alloc")]
58    #[inline]
59    pub fn with_size(size: usize) -> Result<Self, glib::BoolError> {
60        assert_initialized_main_thread!();
61
62        unsafe {
63            Option::<_>::from_glib_full(ffi::gst_buffer_new_allocate(
64                ptr::null_mut(),
65                size,
66                ptr::null_mut(),
67            ))
68            .ok_or_else(|| glib::bool_error!("Failed to allocate buffer"))
69        }
70    }
71
72    #[doc(alias = "gst_buffer_new_wrapped")]
73    #[doc(alias = "gst_buffer_new_wrapped_full")]
74    #[inline]
75    pub fn from_mut_slice<T: AsMut<[u8]> + Send + 'static>(slice: T) -> Self {
76        assert_initialized_main_thread!();
77
78        let mem = Memory::from_mut_slice(slice);
79        let mut buffer = Buffer::new();
80        {
81            let buffer = buffer.get_mut().unwrap();
82            buffer.append_memory(mem);
83            buffer.unset_flags(BufferFlags::TAG_MEMORY);
84        }
85
86        buffer
87    }
88
89    #[doc(alias = "gst_buffer_new_wrapped")]
90    #[doc(alias = "gst_buffer_new_wrapped_full")]
91    #[inline]
92    pub fn from_slice<T: AsRef<[u8]> + Send + 'static>(slice: T) -> Self {
93        assert_initialized_main_thread!();
94
95        let mem = Memory::from_slice(slice);
96        let mut buffer = Buffer::new();
97        {
98            let buffer = buffer.get_mut().unwrap();
99            buffer.append_memory(mem);
100            buffer.unset_flags(BufferFlags::TAG_MEMORY);
101        }
102
103        buffer
104    }
105
106    // rustdoc-stripper-ignore-next
107    /// Attempts to extract the underlying wrapped value of type `T` from this `Buffer`.
108    ///
109    /// This will only succeed if:
110    /// - The buffer contains exactly one memory
111    /// - That memory was created with `Memory::from_slice()` wrapping type `T`
112    /// - The memory can be converted (see `Memory::try_into_inner()` requirements)
113    ///
114    /// On success, the `Buffer` is consumed and the original wrapped value is returned.
115    /// On failure, the original `Buffer` and an error are returned in the `Err` variant.
116    ///
117    /// # Examples
118    ///
119    /// ```
120    /// use gstreamer::Buffer;
121    ///
122    /// gstreamer::init().unwrap();
123    ///
124    /// let vec = vec![1u8, 2, 3, 4, 5];
125    /// let expected = vec.clone();
126    /// let buf = Buffer::from_slice(vec);
127    ///
128    /// let converted: Vec<u8> = buf.try_into_inner().unwrap();
129    /// assert_eq!(converted, expected);
130    /// ```
131    #[inline]
132    pub fn try_into_inner<T: 'static>(self) -> Result<T, (Self, crate::MemoryIntoInnerError)> {
133        if self.n_memory() != 1 {
134            return Err((self, crate::MemoryIntoInnerError::MultipleMemoryBlocks));
135        }
136
137        // Check if the buffer is writable first to ensure we have unique access to the memory
138        if !self.is_writable() {
139            return Err((self, crate::MemoryIntoInnerError::NotWritable));
140        }
141
142        unsafe {
143            // Peek the memory as we own a reference on it via the buffer
144            // and we know there is exactly one memory block
145            let mem_ptr = ffi::gst_buffer_peek_memory(self.as_mut_ptr(), 0);
146
147            // This should never be null after validation
148            assert!(
149                !mem_ptr.is_null(),
150                "peek_memory returned null after validation - this is a bug"
151            );
152
153            match crate::memory_wrapped::try_into_from_memory_ptr(mem_ptr) {
154                Ok(value) => {
155                    ffi::gst_buffer_remove_memory(self.as_mut_ptr(), 0);
156
157                    Ok(value)
158                }
159                Err(err) => Err((self, err)),
160            }
161        }
162    }
163
164    /// Fills `info` with the `GstMapInfo` of all merged memory blocks in `self`.
165    ///
166    /// `flags` describe the desired access of the memory. When `flags` is
167    /// `GST_MAP_WRITE`, `self` should be writable (as returned from
168    /// [`is_writable()`][Self::is_writable()]).
169    ///
170    /// When `self` is writable but the memory isn't, a writable copy will
171    /// automatically be created and returned. The readonly copy of the
172    /// buffer memory will then also be replaced with this writable copy.
173    ///
174    /// The memory in `info` should be unmapped with `gst_buffer_unmap()` after
175    /// usage.
176    /// ## `flags`
177    /// flags for the mapping
178    ///
179    /// # Returns
180    ///
181    /// [`true`] if the map succeeded and `info` contains valid data.
182    ///
183    /// ## `info`
184    /// info about the mapping
185    #[doc(alias = "gst_buffer_map")]
186    #[inline]
187    pub fn into_mapped_buffer_readable(self) -> Result<MappedBuffer<Readable>, Self> {
188        unsafe {
189            let mut map_info = mem::MaybeUninit::uninit();
190            let res: bool = from_glib(ffi::gst_buffer_map(
191                self.as_mut_ptr(),
192                map_info.as_mut_ptr(),
193                ffi::GST_MAP_READ,
194            ));
195            if res {
196                Ok(MappedBuffer {
197                    buffer: self,
198                    map_info: map_info.assume_init(),
199                    phantom: PhantomData,
200                })
201            } else {
202                Err(self)
203            }
204        }
205    }
206
207    #[doc(alias = "gst_buffer_map")]
208    #[inline]
209    pub fn into_mapped_buffer_writable(self) -> Result<MappedBuffer<Writable>, Self> {
210        unsafe {
211            let mut map_info = mem::MaybeUninit::uninit();
212            let res: bool = from_glib(ffi::gst_buffer_map(
213                self.as_mut_ptr(),
214                map_info.as_mut_ptr(),
215                ffi::GST_MAP_READWRITE,
216            ));
217            if res {
218                Ok(MappedBuffer {
219                    buffer: self,
220                    map_info: map_info.assume_init(),
221                    phantom: PhantomData,
222                })
223            } else {
224                Err(self)
225            }
226        }
227    }
228
229    #[inline]
230    pub fn into_cursor_readable(self) -> BufferCursor<Readable> {
231        BufferCursor::new_readable(self)
232    }
233
234    #[inline]
235    pub fn into_cursor_writable(self) -> Result<BufferCursor<Writable>, glib::BoolError> {
236        BufferCursor::new_writable(self)
237    }
238
239    /// Appends all the memory from `buf2` to `self`. The result buffer will contain a
240    /// concatenation of the memory of `self` and `buf2`.
241    /// ## `buf2`
242    /// the second source [`Buffer`][crate::Buffer] to append.
243    ///
244    /// # Returns
245    ///
246    /// the new [`Buffer`][crate::Buffer] that contains the memory
247    ///  of the two source buffers.
248    #[doc(alias = "gst_buffer_append")]
249    pub fn append(&mut self, other: Self) {
250        unsafe {
251            let ptr = ffi::gst_buffer_append(self.as_mut_ptr(), other.into_glib_ptr());
252            self.replace_ptr(ptr);
253        }
254    }
255}
256
257impl Default for Buffer {
258    fn default() -> Self {
259        Self::new()
260    }
261}
262
263impl BufferRef {
264    #[doc(alias = "gst_buffer_map")]
265    #[inline]
266    pub fn map_readable(&self) -> Result<BufferMap<'_, Readable>, glib::BoolError> {
267        unsafe {
268            let mut map_info = mem::MaybeUninit::uninit();
269            let res =
270                ffi::gst_buffer_map(self.as_mut_ptr(), map_info.as_mut_ptr(), ffi::GST_MAP_READ);
271            if res == glib::ffi::GTRUE {
272                Ok(BufferMap {
273                    buffer: self,
274                    map_info: map_info.assume_init(),
275                    phantom: PhantomData,
276                })
277            } else {
278                Err(glib::bool_error!("Failed to map buffer readable"))
279            }
280        }
281    }
282
283    #[doc(alias = "gst_buffer_map")]
284    #[inline]
285    pub fn map_writable(&mut self) -> Result<BufferMap<'_, Writable>, glib::BoolError> {
286        unsafe {
287            let mut map_info = mem::MaybeUninit::uninit();
288            let res = ffi::gst_buffer_map(
289                self.as_mut_ptr(),
290                map_info.as_mut_ptr(),
291                ffi::GST_MAP_READWRITE,
292            );
293            if res == glib::ffi::GTRUE {
294                Ok(BufferMap {
295                    buffer: self,
296                    map_info: map_info.assume_init(),
297                    phantom: PhantomData,
298                })
299            } else {
300                Err(glib::bool_error!("Failed to map buffer writable"))
301            }
302        }
303    }
304
305    fn memory_range_into_idx_len(
306        &self,
307        range: impl RangeBounds<usize>,
308    ) -> Result<(u32, i32), glib::BoolError> {
309        let n_memory = self.n_memory();
310        debug_assert!(n_memory <= u32::MAX as usize);
311
312        let start_idx = match range.start_bound() {
313            ops::Bound::Included(idx) if *idx >= n_memory => {
314                return Err(glib::bool_error!("Invalid range start"));
315            }
316            ops::Bound::Included(idx) => *idx,
317            ops::Bound::Excluded(idx) if idx.checked_add(1).is_none_or(|idx| idx >= n_memory) => {
318                return Err(glib::bool_error!("Invalid range start"));
319            }
320            ops::Bound::Excluded(idx) => *idx + 1,
321            ops::Bound::Unbounded => 0,
322        };
323
324        let end_idx = match range.end_bound() {
325            ops::Bound::Included(idx) if idx.checked_add(1).is_none_or(|idx| idx > n_memory) => {
326                return Err(glib::bool_error!("Invalid range end"));
327            }
328            ops::Bound::Included(idx) => *idx + 1,
329            ops::Bound::Excluded(idx) if *idx > n_memory => {
330                return Err(glib::bool_error!("Invalid range end"));
331            }
332            ops::Bound::Excluded(idx) => *idx,
333            ops::Bound::Unbounded => n_memory,
334        };
335
336        Ok((
337            start_idx as u32,
338            i32::try_from(end_idx - start_idx).map_err(|_| glib::bool_error!("Too large range"))?,
339        ))
340    }
341
342    #[doc(alias = "gst_buffer_map_range")]
343    #[inline]
344    pub fn map_range_readable(
345        &self,
346        range: impl RangeBounds<usize>,
347    ) -> Result<BufferMap<'_, Readable>, glib::BoolError> {
348        let (idx, len) = self.memory_range_into_idx_len(range)?;
349        unsafe {
350            let mut map_info = mem::MaybeUninit::uninit();
351            let res = ffi::gst_buffer_map_range(
352                self.as_mut_ptr(),
353                idx,
354                len,
355                map_info.as_mut_ptr(),
356                ffi::GST_MAP_READ,
357            );
358            if res == glib::ffi::GTRUE {
359                Ok(BufferMap {
360                    buffer: self,
361                    map_info: map_info.assume_init(),
362                    phantom: PhantomData,
363                })
364            } else {
365                Err(glib::bool_error!("Failed to map buffer readable"))
366            }
367        }
368    }
369
370    #[doc(alias = "gst_buffer_map_range")]
371    #[inline]
372    pub fn map_range_writable(
373        &mut self,
374        range: impl RangeBounds<usize>,
375    ) -> Result<BufferMap<'_, Writable>, glib::BoolError> {
376        let (idx, len) = self.memory_range_into_idx_len(range)?;
377        unsafe {
378            let mut map_info = mem::MaybeUninit::uninit();
379            let res = ffi::gst_buffer_map_range(
380                self.as_mut_ptr(),
381                idx,
382                len,
383                map_info.as_mut_ptr(),
384                ffi::GST_MAP_READWRITE,
385            );
386            if res == glib::ffi::GTRUE {
387                Ok(BufferMap {
388                    buffer: self,
389                    map_info: map_info.assume_init(),
390                    phantom: PhantomData,
391                })
392            } else {
393                Err(glib::bool_error!("Failed to map buffer writable"))
394            }
395        }
396    }
397
398    pub(crate) fn byte_range_into_offset_len(
399        &self,
400        range: impl RangeBounds<usize>,
401    ) -> Result<(usize, usize), glib::BoolError> {
402        let size = self.size();
403
404        let start_idx = match range.start_bound() {
405            ops::Bound::Included(idx) if *idx >= size => {
406                return Err(glib::bool_error!("Invalid range start"));
407            }
408            ops::Bound::Included(idx) => *idx,
409            ops::Bound::Excluded(idx) if idx.checked_add(1).is_none_or(|idx| idx >= size) => {
410                return Err(glib::bool_error!("Invalid range start"));
411            }
412            ops::Bound::Excluded(idx) => *idx + 1,
413            ops::Bound::Unbounded => 0,
414        };
415
416        let end_idx = match range.end_bound() {
417            ops::Bound::Included(idx) if idx.checked_add(1).is_none_or(|idx| idx > size) => {
418                return Err(glib::bool_error!("Invalid range end"));
419            }
420            ops::Bound::Included(idx) => *idx + 1,
421            ops::Bound::Excluded(idx) if *idx > size => {
422                return Err(glib::bool_error!("Invalid range end"));
423            }
424            ops::Bound::Excluded(idx) => *idx,
425            ops::Bound::Unbounded => size,
426        };
427
428        Ok((start_idx, end_idx - start_idx))
429    }
430
431    #[doc(alias = "gst_buffer_copy_region")]
432    pub fn copy_region(
433        &self,
434        flags: crate::BufferCopyFlags,
435        range: impl RangeBounds<usize>,
436    ) -> Result<Buffer, glib::BoolError> {
437        let (offset, size) = self.byte_range_into_offset_len(range)?;
438
439        unsafe {
440            Option::<_>::from_glib_full(ffi::gst_buffer_copy_region(
441                self.as_mut_ptr(),
442                flags.into_glib(),
443                offset,
444                size,
445            ))
446            .ok_or_else(|| glib::bool_error!("Failed to copy region of buffer"))
447        }
448    }
449
450    #[doc(alias = "gst_buffer_copy_into")]
451    pub fn copy_into(
452        &self,
453        dest: &mut BufferRef,
454        flags: crate::BufferCopyFlags,
455        range: impl RangeBounds<usize>,
456    ) -> Result<(), glib::BoolError> {
457        let (offset, size) = self.byte_range_into_offset_len(range)?;
458
459        unsafe {
460            glib::result_from_gboolean!(
461                ffi::gst_buffer_copy_into(
462                    dest.as_mut_ptr(),
463                    self.as_mut_ptr(),
464                    flags.into_glib(),
465                    offset,
466                    size,
467                ),
468                "Failed to copy into destination buffer",
469            )
470        }
471    }
472
473    #[doc(alias = "gst_buffer_fill")]
474    pub fn copy_from_slice(&mut self, offset: usize, slice: &[u8]) -> Result<(), usize> {
475        let maxsize = self.maxsize();
476        let size = slice.len();
477
478        assert!(maxsize >= offset && maxsize - offset >= size);
479
480        let copied = unsafe {
481            let src = slice.as_ptr();
482            ffi::gst_buffer_fill(
483                self.as_mut_ptr(),
484                offset,
485                src as glib::ffi::gconstpointer,
486                size,
487            )
488        };
489
490        if copied == size { Ok(()) } else { Err(copied) }
491    }
492
493    #[doc(alias = "gst_buffer_extract")]
494    pub fn copy_to_slice(&self, offset: usize, slice: &mut [u8]) -> Result<(), usize> {
495        let maxsize = self.size();
496        let size = slice.len();
497
498        assert!(maxsize >= offset && maxsize - offset >= size);
499
500        let copied = unsafe {
501            let dest = slice.as_mut_ptr();
502            ffi::gst_buffer_extract(self.as_mut_ptr(), offset, dest as glib::ffi::gpointer, size)
503        };
504
505        if copied == size { Ok(()) } else { Err(copied) }
506    }
507
508    #[doc(alias = "gst_buffer_memset")]
509    pub fn memset(
510        &mut self,
511        range: impl RangeBounds<usize>,
512        val: u8,
513    ) -> Result<usize, glib::BoolError> {
514        let (offset, size) = self.byte_range_into_offset_len(range)?;
515
516        unsafe { Ok(ffi::gst_buffer_memset(self.as_mut_ptr(), offset, val, size)) }
517    }
518
519    #[doc(alias = "gst_buffer_memcmp")]
520    pub fn memcmp(
521        &mut self,
522        range: impl RangeBounds<usize>,
523        slice: &[u8],
524    ) -> Result<cmp::Ordering, glib::BoolError> {
525        let (offset, size) = self.byte_range_into_offset_len(range)?;
526
527        assert!(slice.len() >= size);
528
529        unsafe {
530            let res =
531                ffi::gst_buffer_memcmp(self.as_mut_ptr(), offset, slice.as_ptr() as *const _, size);
532
533            Ok(from_glib(res))
534        }
535    }
536
537    #[doc(alias = "gst_buffer_copy_deep")]
538    pub fn copy_deep(&self) -> Result<Buffer, glib::BoolError> {
539        unsafe {
540            Option::<_>::from_glib_full(ffi::gst_buffer_copy_deep(self.as_ptr()))
541                .ok_or_else(|| glib::bool_error!("Failed to deep copy buffer"))
542        }
543    }
544
545    #[doc(alias = "get_sizes")]
546    #[doc(alias = "gst_buffer_get_sizes")]
547    pub fn sizes(&self) -> (usize, usize, usize) {
548        unsafe {
549            let mut offset = 0;
550            let mut maxsize = 0;
551            let total_size =
552                ffi::gst_buffer_get_sizes(mut_override(self.as_ptr()), &mut offset, &mut maxsize);
553
554            (total_size, offset, maxsize)
555        }
556    }
557
558    #[doc(alias = "get_sizes_range")]
559    #[doc(alias = "gst_buffer_get_sizes_range")]
560    pub fn sizes_range(&self, range: impl RangeBounds<usize>) -> (usize, usize, usize) {
561        let (idx, len) = self
562            .memory_range_into_idx_len(range)
563            .expect("Invalid memory range");
564
565        unsafe {
566            let mut offset = 0;
567            let mut maxsize = 0;
568            let total_size = ffi::gst_buffer_get_sizes_range(
569                mut_override(self.as_ptr()),
570                idx,
571                len,
572                &mut offset,
573                &mut maxsize,
574            );
575
576            (total_size, offset, maxsize)
577        }
578    }
579
580    #[doc(alias = "get_size")]
581    #[doc(alias = "gst_buffer_get_size")]
582    pub fn size(&self) -> usize {
583        unsafe { ffi::gst_buffer_get_size(self.as_mut_ptr()) }
584    }
585
586    #[doc(alias = "get_maxsize")]
587    pub fn maxsize(&self) -> usize {
588        unsafe {
589            let mut maxsize = mem::MaybeUninit::uninit();
590            ffi::gst_buffer_get_sizes_range(
591                self.as_mut_ptr(),
592                0,
593                -1,
594                ptr::null_mut(),
595                maxsize.as_mut_ptr(),
596            );
597
598            maxsize.assume_init()
599        }
600    }
601
602    #[doc(alias = "gst_buffer_set_size")]
603    pub fn set_size(&mut self, size: usize) {
604        assert!(self.maxsize() >= size);
605
606        unsafe {
607            ffi::gst_buffer_set_size(self.as_mut_ptr(), size as isize);
608        }
609    }
610
611    #[doc(alias = "gst_buffer_resize")]
612    pub fn resize(&mut self, range: impl RangeBounds<usize>) -> Result<(), glib::BoolError> {
613        let (offset, size) = self.byte_range_into_offset_len(range)?;
614
615        unsafe {
616            ffi::gst_buffer_resize(self.as_mut_ptr(), offset as isize, size as isize);
617        }
618
619        Ok(())
620    }
621
622    #[doc(alias = "gst_buffer_resize_range")]
623    pub fn resize_range(
624        &mut self,
625        mem_range: impl RangeBounds<usize>,
626        byte_range: impl RangeBounds<usize>,
627    ) -> Result<(), glib::BoolError> {
628        let (idx, len) = self
629            .memory_range_into_idx_len(mem_range)
630            .expect("Invalid memory range");
631        let (offset, size) = self.byte_range_into_offset_len(byte_range)?;
632
633        unsafe {
634            glib::result_from_gboolean!(
635                ffi::gst_buffer_resize_range(
636                    self.as_mut_ptr(),
637                    idx,
638                    len,
639                    offset as isize,
640                    size as isize,
641                ),
642                "Failed to resize buffer with ranges"
643            )
644        }
645    }
646
647    #[doc(alias = "get_offset")]
648    #[doc(alias = "GST_BUFFER_OFFSET")]
649    #[inline]
650    pub fn offset(&self) -> u64 {
651        self.0.offset
652    }
653
654    #[inline]
655    pub fn set_offset(&mut self, offset: u64) {
656        self.0.offset = offset;
657    }
658
659    #[doc(alias = "get_offset_end")]
660    #[doc(alias = "GST_BUFFER_OFFSET_END")]
661    #[inline]
662    pub fn offset_end(&self) -> u64 {
663        self.0.offset_end
664    }
665
666    #[inline]
667    pub fn set_offset_end(&mut self, offset_end: u64) {
668        self.0.offset_end = offset_end;
669    }
670
671    #[doc(alias = "get_pts")]
672    #[doc(alias = "GST_BUFFER_PTS")]
673    #[inline]
674    pub fn pts(&self) -> Option<ClockTime> {
675        unsafe { from_glib(self.0.pts) }
676    }
677
678    #[inline]
679    pub fn set_pts(&mut self, pts: impl Into<Option<ClockTime>>) {
680        self.0.pts = pts.into().into_glib();
681    }
682
683    #[doc(alias = "get_dts")]
684    #[doc(alias = "GST_BUFFER_DTS")]
685    #[inline]
686    pub fn dts(&self) -> Option<ClockTime> {
687        unsafe { from_glib(self.0.dts) }
688    }
689
690    #[inline]
691    pub fn set_dts(&mut self, dts: impl Into<Option<ClockTime>>) {
692        self.0.dts = dts.into().into_glib();
693    }
694
695    #[doc(alias = "get_dts_or_pts")]
696    #[doc(alias = "GST_BUFFER_DTS_OR_PTS")]
697    #[inline]
698    pub fn dts_or_pts(&self) -> Option<ClockTime> {
699        let val = self.dts();
700        if val.is_none() { self.pts() } else { val }
701    }
702
703    #[doc(alias = "get_duration")]
704    #[doc(alias = "GST_BUFFER_DURATION")]
705    #[inline]
706    pub fn duration(&self) -> Option<ClockTime> {
707        unsafe { from_glib(self.0.duration) }
708    }
709
710    #[inline]
711    pub fn set_duration(&mut self, duration: impl Into<Option<ClockTime>>) {
712        self.0.duration = duration.into().into_glib();
713    }
714
715    #[doc(alias = "get_flags")]
716    #[doc(alias = "GST_BUFFER_FLAGS")]
717    #[inline]
718    pub fn flags(&self) -> BufferFlags {
719        BufferFlags::from_bits_truncate(self.0.mini_object.flags)
720    }
721
722    #[doc(alias = "GST_BUFFER_FLAG_SET")]
723    #[inline]
724    pub fn set_flags(&mut self, flags: BufferFlags) {
725        self.0.mini_object.flags |= flags.bits();
726    }
727
728    #[doc(alias = "GST_BUFFER_FLAG_UNSET")]
729    #[inline]
730    pub fn unset_flags(&mut self, flags: BufferFlags) {
731        self.0.mini_object.flags &= !flags.bits();
732    }
733
734    #[doc(alias = "get_meta")]
735    #[doc(alias = "gst_buffer_get_meta")]
736    #[inline]
737    pub fn meta<T: MetaAPI>(&self) -> Option<MetaRef<'_, T>> {
738        unsafe {
739            let meta = ffi::gst_buffer_get_meta(self.as_mut_ptr(), T::meta_api().into_glib());
740            if meta.is_null() {
741                None
742            } else {
743                Some(T::from_ptr(self, meta as *const <T as MetaAPI>::GstType))
744            }
745        }
746    }
747
748    #[doc(alias = "get_meta_mut")]
749    #[inline]
750    pub fn meta_mut<T: MetaAPI>(&mut self) -> Option<MetaRefMut<'_, T, crate::meta::Standalone>> {
751        unsafe {
752            let meta = ffi::gst_buffer_get_meta(self.as_mut_ptr(), T::meta_api().into_glib());
753            if meta.is_null() {
754                None
755            } else {
756                Some(T::from_mut_ptr(self, meta as *mut <T as MetaAPI>::GstType))
757            }
758        }
759    }
760
761    pub fn iter_meta<T: MetaAPI>(&self) -> MetaIter<'_, T> {
762        MetaIter::new(self)
763    }
764
765    pub fn iter_meta_mut<T: MetaAPI>(&mut self) -> MetaIterMut<'_, T> {
766        MetaIterMut::new(self)
767    }
768
769    #[doc(alias = "gst_buffer_foreach_meta")]
770    pub fn foreach_meta<F: FnMut(MetaRef<Meta>) -> ControlFlow<(), ()>>(&self, func: F) -> bool {
771        unsafe extern "C" fn trampoline<F: FnMut(MetaRef<Meta>) -> ControlFlow<(), ()>>(
772            buffer: *mut ffi::GstBuffer,
773            meta: *mut *mut ffi::GstMeta,
774            user_data: glib::ffi::gpointer,
775        ) -> glib::ffi::gboolean {
776            unsafe {
777                let func = user_data as *mut F;
778                let res = (*func)(Meta::from_ptr(BufferRef::from_ptr(buffer), *meta));
779
780                matches!(res, ControlFlow::Continue(_)).into_glib()
781            }
782        }
783
784        unsafe {
785            let mut func = func;
786            let func_ptr: &mut F = &mut func;
787
788            from_glib(ffi::gst_buffer_foreach_meta(
789                mut_override(self.as_ptr()),
790                Some(trampoline::<F>),
791                func_ptr as *mut _ as *mut _,
792            ))
793        }
794    }
795
796    #[doc(alias = "gst_buffer_foreach_meta")]
797    pub fn foreach_meta_mut<
798        F: FnMut(
799            MetaRefMut<Meta, crate::meta::Iterated>,
800        ) -> ControlFlow<BufferMetaForeachAction, BufferMetaForeachAction>,
801    >(
802        &mut self,
803        func: F,
804    ) -> bool {
805        unsafe extern "C" fn trampoline<
806            F: FnMut(
807                MetaRefMut<Meta, crate::meta::Iterated>,
808            ) -> ControlFlow<BufferMetaForeachAction, BufferMetaForeachAction>,
809        >(
810            buffer: *mut ffi::GstBuffer,
811            meta: *mut *mut ffi::GstMeta,
812            user_data: glib::ffi::gpointer,
813        ) -> glib::ffi::gboolean {
814            unsafe {
815                let func = user_data as *mut F;
816                let res = (*func)(Meta::from_mut_ptr(BufferRef::from_mut_ptr(buffer), *meta));
817
818                let (cont, action) = match res {
819                    ControlFlow::Continue(action) => (true, action),
820                    ControlFlow::Break(action) => (false, action),
821                };
822
823                if action == BufferMetaForeachAction::Remove {
824                    *meta = ptr::null_mut();
825                }
826
827                cont.into_glib()
828            }
829        }
830
831        unsafe {
832            let mut func = func;
833            let func_ptr: &mut F = &mut func;
834
835            from_glib(ffi::gst_buffer_foreach_meta(
836                mut_override(self.as_ptr()),
837                Some(trampoline::<F>),
838                func_ptr as *mut _ as *mut _,
839            ))
840        }
841    }
842
843    #[doc(alias = "gst_buffer_append_memory")]
844    pub fn append_memory(&mut self, mem: Memory) {
845        unsafe { ffi::gst_buffer_append_memory(self.as_mut_ptr(), mem.into_glib_ptr()) }
846    }
847
848    #[doc(alias = "gst_buffer_find_memory")]
849    pub fn find_memory(&self, range: impl RangeBounds<usize>) -> Option<(Range<usize>, usize)> {
850        let (offset, size) = self.byte_range_into_offset_len(range).ok()?;
851
852        unsafe {
853            let mut idx = mem::MaybeUninit::uninit();
854            let mut length = mem::MaybeUninit::uninit();
855            let mut skip = mem::MaybeUninit::uninit();
856
857            let res = from_glib(ffi::gst_buffer_find_memory(
858                self.as_mut_ptr(),
859                offset,
860                size,
861                idx.as_mut_ptr(),
862                length.as_mut_ptr(),
863                skip.as_mut_ptr(),
864            ));
865
866            if res {
867                let idx = idx.assume_init() as usize;
868                let length = length.assume_init() as usize;
869                let skip = skip.assume_init();
870                Some((idx..(idx + length), skip))
871            } else {
872                None
873            }
874        }
875    }
876
877    #[doc(alias = "get_all_memory")]
878    #[doc(alias = "gst_buffer_get_all_memory")]
879    pub fn all_memory(&self) -> Option<Memory> {
880        unsafe { from_glib_full(ffi::gst_buffer_get_all_memory(self.as_mut_ptr())) }
881    }
882
883    #[doc(alias = "get_max_memory")]
884    #[doc(alias = "gst_buffer_get_max_memory")]
885    pub fn max_memory() -> usize {
886        unsafe { ffi::gst_buffer_get_max_memory() as usize }
887    }
888
889    #[doc(alias = "get_memory")]
890    #[doc(alias = "gst_buffer_get_memory")]
891    pub fn memory(&self, idx: usize) -> Option<Memory> {
892        if idx >= self.n_memory() {
893            return None;
894        }
895        unsafe {
896            let res = ffi::gst_buffer_get_memory(self.as_mut_ptr(), idx as u32);
897            Some(from_glib_full(res))
898        }
899    }
900
901    #[doc(alias = "get_memory_range")]
902    #[doc(alias = "gst_buffer_get_memory_range")]
903    pub fn memory_range(&self, range: impl RangeBounds<usize>) -> Option<Memory> {
904        let (idx, len) = self.memory_range_into_idx_len(range).ok()?;
905
906        unsafe {
907            let res = ffi::gst_buffer_get_memory_range(self.as_mut_ptr(), idx, len);
908            from_glib_full(res)
909        }
910    }
911
912    #[doc(alias = "gst_buffer_insert_memory")]
913    pub fn insert_memory(&mut self, idx: impl Into<Option<usize>>, mem: Memory) {
914        let n_memory = self.n_memory();
915        let idx = idx.into();
916        let idx = idx.unwrap_or(n_memory);
917        assert!(idx <= self.n_memory());
918        unsafe { ffi::gst_buffer_insert_memory(self.as_mut_ptr(), idx as i32, mem.into_glib_ptr()) }
919    }
920
921    #[doc(alias = "gst_buffer_is_all_memory_writable")]
922    pub fn is_all_memory_writable(&self) -> bool {
923        unsafe { from_glib(ffi::gst_buffer_is_all_memory_writable(self.as_mut_ptr())) }
924    }
925
926    #[doc(alias = "gst_buffer_is_memory_range_writable")]
927    pub fn is_memory_range_writable(&self, range: impl RangeBounds<usize>) -> bool {
928        let Some((idx, len)) = self.memory_range_into_idx_len(range).ok() else {
929            return false;
930        };
931
932        unsafe {
933            from_glib(ffi::gst_buffer_is_memory_range_writable(
934                self.as_mut_ptr(),
935                idx,
936                len,
937            ))
938        }
939    }
940
941    #[doc(alias = "gst_buffer_n_memory")]
942    pub fn n_memory(&self) -> usize {
943        unsafe { ffi::gst_buffer_n_memory(self.as_ptr() as *mut _) as usize }
944    }
945
946    #[doc(alias = "gst_buffer_peek_memory")]
947    pub fn peek_memory(&self, idx: usize) -> &MemoryRef {
948        assert!(idx < self.n_memory());
949        unsafe { MemoryRef::from_ptr(ffi::gst_buffer_peek_memory(self.as_mut_ptr(), idx as u32)) }
950    }
951
952    #[doc(alias = "gst_buffer_peek_memory")]
953    pub fn peek_memory_mut(&mut self, idx: usize) -> Result<&mut MemoryRef, glib::BoolError> {
954        assert!(idx < self.n_memory());
955        unsafe {
956            let mem = ffi::gst_buffer_peek_memory(self.as_mut_ptr(), idx as u32);
957            if ffi::gst_mini_object_is_writable(mem as *mut _) == glib::ffi::GFALSE {
958                Err(glib::bool_error!("Memory not writable"))
959            } else {
960                Ok(MemoryRef::from_mut_ptr(mem))
961            }
962        }
963    }
964
965    #[doc(alias = "gst_buffer_prepend_memory")]
966    pub fn prepend_memory(&mut self, mem: Memory) {
967        unsafe { ffi::gst_buffer_prepend_memory(self.as_mut_ptr(), mem.into_glib_ptr()) }
968    }
969
970    #[doc(alias = "gst_buffer_remove_all_memory")]
971    pub fn remove_all_memory(&mut self) {
972        unsafe { ffi::gst_buffer_remove_all_memory(self.as_mut_ptr()) }
973    }
974
975    #[doc(alias = "gst_buffer_remove_memory")]
976    pub fn remove_memory(&mut self, idx: usize) {
977        assert!(idx < self.n_memory());
978        unsafe { ffi::gst_buffer_remove_memory(self.as_mut_ptr(), idx as u32) }
979    }
980
981    #[doc(alias = "gst_buffer_remove_memory_range")]
982    pub fn remove_memory_range(&mut self, range: impl RangeBounds<usize>) {
983        let (idx, len) = self
984            .memory_range_into_idx_len(range)
985            .expect("Invalid memory range");
986
987        unsafe { ffi::gst_buffer_remove_memory_range(self.as_mut_ptr(), idx, len) }
988    }
989
990    #[doc(alias = "gst_buffer_replace_all_memory")]
991    pub fn replace_all_memory(&mut self, mem: Memory) {
992        unsafe { ffi::gst_buffer_replace_all_memory(self.as_mut_ptr(), mem.into_glib_ptr()) }
993    }
994
995    #[doc(alias = "gst_buffer_replace_memory")]
996    pub fn replace_memory(&mut self, idx: usize, mem: Memory) {
997        assert!(idx < self.n_memory());
998        unsafe {
999            ffi::gst_buffer_replace_memory(self.as_mut_ptr(), idx as u32, mem.into_glib_ptr())
1000        }
1001    }
1002
1003    #[doc(alias = "gst_buffer_replace_memory_range")]
1004    pub fn replace_memory_range(&mut self, range: impl RangeBounds<usize>, mem: Memory) {
1005        let (idx, len) = self
1006            .memory_range_into_idx_len(range)
1007            .expect("Invalid memory range");
1008
1009        unsafe {
1010            ffi::gst_buffer_replace_memory_range(self.as_mut_ptr(), idx, len, mem.into_glib_ptr())
1011        }
1012    }
1013
1014    pub fn iter_memories(&self) -> Iter<'_> {
1015        Iter::new(self)
1016    }
1017
1018    pub fn iter_memories_mut(&mut self) -> Result<IterMut<'_>, glib::BoolError> {
1019        if !self.is_all_memory_writable() {
1020            Err(glib::bool_error!("Not all memory are writable"))
1021        } else {
1022            Ok(IterMut::new(self))
1023        }
1024    }
1025
1026    pub fn iter_memories_owned(&self) -> IterOwned<'_> {
1027        IterOwned::new(self)
1028    }
1029
1030    pub fn as_cursor_readable(&self) -> BufferRefCursor<&BufferRef> {
1031        BufferRefCursor::new_readable(self)
1032    }
1033
1034    pub fn as_cursor_writable(
1035        &mut self,
1036    ) -> Result<BufferRefCursor<&mut BufferRef>, glib::BoolError> {
1037        BufferRefCursor::new_writable(self)
1038    }
1039
1040    #[doc(alias = "gst_util_dump_buffer")]
1041    pub fn dump(&self) -> Dump<'_> {
1042        Dump {
1043            buffer: self,
1044            start: Bound::Unbounded,
1045            end: Bound::Unbounded,
1046        }
1047    }
1048
1049    #[doc(alias = "gst_util_dump_buffer")]
1050    pub fn dump_range(&self, range: impl RangeBounds<usize>) -> Dump<'_> {
1051        Dump {
1052            buffer: self,
1053            start: range.start_bound().cloned(),
1054            end: range.end_bound().cloned(),
1055        }
1056    }
1057}
1058
1059macro_rules! define_meta_iter(
1060    ($name:ident, $typ:ty, $mtyp:ty, $prepare_buffer:expr, $from_ptr:expr) => {
1061    #[must_use = "iterators are lazy and do nothing unless consumed"]
1062    pub struct $name<'a, T: MetaAPI + 'a> {
1063        buffer: $typ,
1064        state: glib::ffi::gpointer,
1065        meta_api: glib::Type,
1066        items: PhantomData<$mtyp>,
1067    }
1068
1069    unsafe impl<'a, T: MetaAPI> Send for $name<'a, T> { }
1070    unsafe impl<'a, T: MetaAPI> Sync for $name<'a, T> { }
1071
1072    impl<'a, T: MetaAPI> fmt::Debug for $name<'a, T> {
1073        fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1074            f.debug_struct(stringify!($name))
1075                .field("buffer", &self.buffer)
1076                .field("state", &self.state)
1077                .field("meta_api", &self.meta_api)
1078                .field("items", &self.items)
1079                .finish()
1080        }
1081    }
1082
1083    impl<'a, T: MetaAPI> $name<'a, T> {
1084        fn new(buffer: $typ) -> $name<'a, T> {
1085            skip_assert_initialized!();
1086
1087            $name {
1088                buffer,
1089                state: ptr::null_mut(),
1090                meta_api: T::meta_api(),
1091                items: PhantomData,
1092            }
1093        }
1094    }
1095
1096    #[allow(clippy::redundant_closure_call)]
1097    impl<'a, T: MetaAPI> Iterator for $name<'a, T> {
1098        type Item = $mtyp;
1099
1100        fn next(&mut self) -> Option<Self::Item> {
1101            loop {
1102                unsafe {
1103                    let meta = ffi::gst_buffer_iterate_meta(self.buffer.as_mut_ptr(), &mut self.state);
1104
1105                    if meta.is_null() {
1106                        return None;
1107                    } else if self.meta_api == glib::Type::INVALID || glib::Type::from_glib((*(*meta).info).api) == self.meta_api {
1108                        // FIXME: Workaround for a lifetime issue with the mutable iterator only
1109                        let buffer = $prepare_buffer(self.buffer.as_mut_ptr());
1110                        let item = $from_ptr(buffer, meta);
1111                        return Some(item);
1112                    }
1113                }
1114            }
1115        }
1116    }
1117
1118    impl<'a, T: MetaAPI> std::iter::FusedIterator for $name<'a, T> { }
1119    }
1120);
1121
1122define_meta_iter!(
1123    MetaIter,
1124    &'a BufferRef,
1125    MetaRef<'a, T>,
1126    |buffer: *const ffi::GstBuffer| BufferRef::from_ptr(buffer),
1127    |buffer, meta| T::from_ptr(buffer, meta as *const <T as MetaAPI>::GstType)
1128);
1129define_meta_iter!(
1130    MetaIterMut,
1131    &'a mut BufferRef,
1132    MetaRefMut<'a, T, crate::meta::Iterated>,
1133    |buffer: *mut ffi::GstBuffer| BufferRef::from_mut_ptr(buffer),
1134    |buffer: &'a mut BufferRef, meta| T::from_mut_ptr(buffer, meta as *mut <T as MetaAPI>::GstType)
1135);
1136
1137macro_rules! define_iter(
1138    ($name:ident, $typ:ty, $mtyp:ty, $get_item:expr) => {
1139        crate::utils::define_fixed_size_iter!(
1140            $name, $typ, $mtyp,
1141            |buffer: &BufferRef| buffer.n_memory() as usize,
1142            $get_item
1143        );
1144    }
1145);
1146
1147define_iter!(
1148    Iter,
1149    &'a BufferRef,
1150    &'a MemoryRef,
1151    |buffer: &BufferRef, idx| unsafe {
1152        let ptr = ffi::gst_buffer_peek_memory(buffer.as_mut_ptr(), idx as u32);
1153        MemoryRef::from_ptr(ptr as *const ffi::GstMemory)
1154    }
1155);
1156
1157define_iter!(
1158    IterMut,
1159    &'a mut BufferRef,
1160    &'a mut MemoryRef,
1161    |buffer: &mut BufferRef, idx| unsafe {
1162        let ptr = ffi::gst_buffer_peek_memory(buffer.as_mut_ptr(), idx as u32);
1163        MemoryRef::from_mut_ptr(ptr)
1164    }
1165);
1166
1167impl<'a> IntoIterator for &'a BufferRef {
1168    type IntoIter = Iter<'a>;
1169    type Item = &'a MemoryRef;
1170
1171    fn into_iter(self) -> Self::IntoIter {
1172        self.iter_memories()
1173    }
1174}
1175
1176impl From<Memory> for Buffer {
1177    fn from(value: Memory) -> Self {
1178        skip_assert_initialized!();
1179
1180        let mut buffer = Buffer::new();
1181        {
1182            let buffer = buffer.get_mut().unwrap();
1183            buffer.append_memory(value);
1184        }
1185        buffer
1186    }
1187}
1188
1189impl<const N: usize> From<[Memory; N]> for Buffer {
1190    fn from(value: [Memory; N]) -> Self {
1191        skip_assert_initialized!();
1192
1193        let mut buffer = Buffer::new();
1194        {
1195            let buffer = buffer.get_mut().unwrap();
1196            value.into_iter().for_each(|b| buffer.append_memory(b));
1197        }
1198        buffer
1199    }
1200}
1201
1202impl std::iter::FromIterator<Memory> for Buffer {
1203    fn from_iter<T: IntoIterator<Item = Memory>>(iter: T) -> Self {
1204        skip_assert_initialized!();
1205        let iter = iter.into_iter();
1206
1207        let mut buffer = Buffer::new();
1208
1209        {
1210            let buffer = buffer.get_mut().unwrap();
1211            iter.for_each(|m| buffer.append_memory(m));
1212        }
1213
1214        buffer
1215    }
1216}
1217
1218impl std::iter::Extend<Memory> for BufferRef {
1219    fn extend<T: IntoIterator<Item = Memory>>(&mut self, iter: T) {
1220        iter.into_iter().for_each(|m| self.append_memory(m));
1221    }
1222}
1223
1224define_iter!(
1225    IterOwned,
1226    &'a BufferRef,
1227    Memory,
1228    |buffer: &BufferRef, idx| unsafe {
1229        let ptr = ffi::gst_buffer_get_memory(buffer.as_mut_ptr(), idx as u32);
1230        from_glib_full(ptr)
1231    }
1232);
1233
1234impl fmt::Debug for Buffer {
1235    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1236        BufferRef::fmt(self, f)
1237    }
1238}
1239
1240impl PartialEq for Buffer {
1241    fn eq(&self, other: &Buffer) -> bool {
1242        BufferRef::eq(self, other)
1243    }
1244}
1245
1246impl Eq for Buffer {}
1247
1248impl PartialEq<BufferRef> for Buffer {
1249    fn eq(&self, other: &BufferRef) -> bool {
1250        BufferRef::eq(self, other)
1251    }
1252}
1253impl PartialEq<Buffer> for BufferRef {
1254    fn eq(&self, other: &Buffer) -> bool {
1255        BufferRef::eq(other, self)
1256    }
1257}
1258
1259impl fmt::Debug for BufferRef {
1260    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1261        use std::cell::RefCell;
1262
1263        use crate::utils::Displayable;
1264
1265        struct DebugIter<I>(RefCell<I>);
1266        impl<I: Iterator> fmt::Debug for DebugIter<I>
1267        where
1268            I::Item: fmt::Debug,
1269        {
1270            fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1271                f.debug_list().entries(&mut *self.0.borrow_mut()).finish()
1272            }
1273        }
1274
1275        f.debug_struct("Buffer")
1276            .field("ptr", &self.as_ptr())
1277            .field("pts", &self.pts().display())
1278            .field("dts", &self.dts().display())
1279            .field("duration", &self.duration().display())
1280            .field("size", &self.size())
1281            .field("offset", &self.offset())
1282            .field("offset_end", &self.offset_end())
1283            .field("flags", &self.flags())
1284            .field(
1285                "metas",
1286                &DebugIter(RefCell::new(
1287                    self.iter_meta::<crate::Meta>().map(|m| m.api()),
1288                )),
1289            )
1290            .finish()
1291    }
1292}
1293
1294impl PartialEq for BufferRef {
1295    fn eq(&self, other: &BufferRef) -> bool {
1296        if self.size() != other.size() {
1297            return false;
1298        }
1299
1300        let self_map = self.map_readable();
1301        let other_map = other.map_readable();
1302
1303        match (self_map, other_map) {
1304            (Ok(self_map), Ok(other_map)) => self_map.as_slice().eq(other_map.as_slice()),
1305            _ => false,
1306        }
1307    }
1308}
1309
1310impl Eq for BufferRef {}
1311
1312impl<T> BufferMap<'_, T> {
1313    #[doc(alias = "get_size")]
1314    #[inline]
1315    pub fn size(&self) -> usize {
1316        self.map_info.size
1317    }
1318
1319    #[doc(alias = "get_buffer")]
1320    #[inline]
1321    pub fn buffer(&self) -> &BufferRef {
1322        self.buffer
1323    }
1324
1325    #[inline]
1326    pub fn as_slice(&self) -> &[u8] {
1327        if self.map_info.size == 0 {
1328            return &[];
1329        }
1330        unsafe { slice::from_raw_parts(self.map_info.data, self.map_info.size) }
1331    }
1332}
1333
1334impl BufferMap<'_, Writable> {
1335    #[inline]
1336    pub fn as_mut_slice(&mut self) -> &mut [u8] {
1337        if self.map_info.size == 0 {
1338            return &mut [];
1339        }
1340        unsafe { slice::from_raw_parts_mut(self.map_info.data, self.map_info.size) }
1341    }
1342}
1343
1344impl<T> AsRef<[u8]> for BufferMap<'_, T> {
1345    #[inline]
1346    fn as_ref(&self) -> &[u8] {
1347        self.as_slice()
1348    }
1349}
1350
1351impl AsMut<[u8]> for BufferMap<'_, Writable> {
1352    #[inline]
1353    fn as_mut(&mut self) -> &mut [u8] {
1354        self.as_mut_slice()
1355    }
1356}
1357
1358impl<T> ops::Deref for BufferMap<'_, T> {
1359    type Target = [u8];
1360
1361    #[inline]
1362    fn deref(&self) -> &[u8] {
1363        self.as_slice()
1364    }
1365}
1366
1367impl ops::DerefMut for BufferMap<'_, Writable> {
1368    #[inline]
1369    fn deref_mut(&mut self) -> &mut [u8] {
1370        self.as_mut_slice()
1371    }
1372}
1373
1374impl<T> fmt::Debug for BufferMap<'_, T> {
1375    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1376        f.debug_tuple("BufferMap").field(&self.buffer()).finish()
1377    }
1378}
1379
1380impl<'a, T> PartialEq for BufferMap<'a, T> {
1381    fn eq(&self, other: &BufferMap<'a, T>) -> bool {
1382        self.as_slice().eq(other.as_slice())
1383    }
1384}
1385
1386impl<T> Eq for BufferMap<'_, T> {}
1387
1388impl<T> Drop for BufferMap<'_, T> {
1389    #[inline]
1390    fn drop(&mut self) {
1391        unsafe {
1392            ffi::gst_buffer_unmap(self.buffer.as_mut_ptr(), &mut self.map_info);
1393        }
1394    }
1395}
1396
1397unsafe impl<T> Send for BufferMap<'_, T> {}
1398unsafe impl<T> Sync for BufferMap<'_, T> {}
1399
1400impl<T> MappedBuffer<T> {
1401    #[inline]
1402    pub fn as_slice(&self) -> &[u8] {
1403        if self.map_info.size == 0 {
1404            return &[];
1405        }
1406        unsafe { slice::from_raw_parts(self.map_info.data, self.map_info.size) }
1407    }
1408
1409    #[doc(alias = "get_size")]
1410    #[inline]
1411    pub fn size(&self) -> usize {
1412        self.map_info.size
1413    }
1414
1415    #[doc(alias = "get_buffer")]
1416    #[inline]
1417    pub fn buffer(&self) -> &BufferRef {
1418        self.buffer.as_ref()
1419    }
1420
1421    #[inline]
1422    pub fn into_buffer(self) -> Buffer {
1423        let mut s = mem::ManuallyDrop::new(self);
1424        let buffer = unsafe { ptr::read(&s.buffer) };
1425        unsafe {
1426            ffi::gst_buffer_unmap(buffer.as_mut_ptr(), &mut s.map_info);
1427        }
1428
1429        buffer
1430    }
1431}
1432
1433impl MappedBuffer<Readable> {
1434    #[doc(alias = "get_buffer")]
1435    #[inline]
1436    pub fn buffer_owned(&self) -> Buffer {
1437        self.buffer.clone()
1438    }
1439}
1440
1441impl MappedBuffer<Writable> {
1442    #[inline]
1443    pub fn as_mut_slice(&mut self) -> &mut [u8] {
1444        if self.map_info.size == 0 {
1445            return &mut [];
1446        }
1447        unsafe { slice::from_raw_parts_mut(self.map_info.data, self.map_info.size) }
1448    }
1449}
1450
1451impl<T> AsRef<[u8]> for MappedBuffer<T> {
1452    #[inline]
1453    fn as_ref(&self) -> &[u8] {
1454        self.as_slice()
1455    }
1456}
1457
1458impl AsMut<[u8]> for MappedBuffer<Writable> {
1459    #[inline]
1460    fn as_mut(&mut self) -> &mut [u8] {
1461        self.as_mut_slice()
1462    }
1463}
1464
1465impl<T> ops::Deref for MappedBuffer<T> {
1466    type Target = [u8];
1467
1468    #[inline]
1469    fn deref(&self) -> &[u8] {
1470        self.as_slice()
1471    }
1472}
1473
1474impl ops::DerefMut for MappedBuffer<Writable> {
1475    #[inline]
1476    fn deref_mut(&mut self) -> &mut [u8] {
1477        self.as_mut_slice()
1478    }
1479}
1480
1481impl<T> Drop for MappedBuffer<T> {
1482    #[inline]
1483    fn drop(&mut self) {
1484        unsafe {
1485            ffi::gst_buffer_unmap(self.buffer.as_mut_ptr(), &mut self.map_info);
1486        }
1487    }
1488}
1489
1490impl<T> fmt::Debug for MappedBuffer<T> {
1491    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1492        f.debug_tuple("MappedBuffer").field(&self.buffer()).finish()
1493    }
1494}
1495
1496impl<T> PartialEq for MappedBuffer<T> {
1497    fn eq(&self, other: &MappedBuffer<T>) -> bool {
1498        self.as_slice().eq(other.as_slice())
1499    }
1500}
1501
1502impl<T> Eq for MappedBuffer<T> {}
1503
1504unsafe impl<T> Send for MappedBuffer<T> {}
1505unsafe impl<T> Sync for MappedBuffer<T> {}
1506
1507#[doc(alias = "GST_BUFFER_COPY_METADATA")]
1508pub const BUFFER_COPY_METADATA: crate::BufferCopyFlags =
1509    crate::BufferCopyFlags::from_bits_truncate(ffi::GST_BUFFER_COPY_METADATA);
1510#[doc(alias = "GST_BUFFER_COPY_ALL")]
1511pub const BUFFER_COPY_ALL: crate::BufferCopyFlags =
1512    crate::BufferCopyFlags::from_bits_truncate(ffi::GST_BUFFER_COPY_ALL);
1513
1514pub struct Dump<'a> {
1515    buffer: &'a BufferRef,
1516    start: Bound<usize>,
1517    end: Bound<usize>,
1518}
1519
1520#[must_use = "iterators are lazy and do nothing unless consumed"]
1521struct BufferChunked16Iter<'a> {
1522    buffer: &'a BufferRef,
1523    mem_idx: usize,
1524    mem_len: usize,
1525    map: Option<crate::memory::MemoryMap<'a, crate::memory::Readable>>,
1526    map_offset: usize,
1527    len: usize,
1528}
1529
1530impl Iterator for BufferChunked16Iter<'_> {
1531    // FIXME: Return a `&'self [u8]` once there's some GAT iterator trait
1532    type Item = ([u8; 16], usize);
1533
1534    fn next(&mut self) -> Option<Self::Item> {
1535        if self.mem_idx == self.mem_len || self.len == 0 {
1536            return None;
1537        }
1538
1539        let mut item = [0u8; 16];
1540        let mut data = item.as_mut_slice();
1541
1542        while !data.is_empty() && self.mem_idx < self.mem_len && self.len > 0 {
1543            if self.map.is_none() {
1544                let mem = self.buffer.peek_memory(self.mem_idx);
1545                self.map = Some(mem.map_readable().expect("failed to map memory"));
1546            }
1547
1548            let map = self.map.as_ref().unwrap();
1549            debug_assert!(self.map_offset < map.len());
1550            let copy = cmp::min(cmp::min(map.len() - self.map_offset, data.len()), self.len);
1551            data[..copy].copy_from_slice(&map[self.map_offset..][..copy]);
1552            self.map_offset += copy;
1553            self.len -= copy;
1554            data = &mut data[copy..];
1555
1556            if self.map_offset == map.len() {
1557                self.map = None;
1558                self.map_offset = 0;
1559                self.mem_idx += 1;
1560            }
1561        }
1562
1563        let copied = 16 - data.len();
1564        Some((item, copied))
1565    }
1566}
1567
1568impl Dump<'_> {
1569    fn fmt(&self, f: &mut fmt::Formatter, debug: bool) -> fmt::Result {
1570        let n_memory = self.buffer.n_memory();
1571        if n_memory == 0 {
1572            write!(f, "<empty>")?;
1573            return Ok(());
1574        }
1575
1576        use std::fmt::Write;
1577
1578        let len = self.buffer.size();
1579
1580        // Kind of re-implementation of slice indexing to allow handling out of range values better
1581        // with specific output strings
1582        let mut start_idx = match self.start {
1583            Bound::Included(idx) if idx >= len => {
1584                write!(f, "<start out of range>")?;
1585                return Ok(());
1586            }
1587            Bound::Excluded(idx) if idx.checked_add(1).is_none_or(|idx| idx >= len) => {
1588                write!(f, "<start out of range>")?;
1589                return Ok(());
1590            }
1591            Bound::Included(idx) => idx,
1592            Bound::Excluded(idx) => idx + 1,
1593            Bound::Unbounded => 0,
1594        };
1595
1596        let end_idx = match self.end {
1597            Bound::Included(idx) if idx.checked_add(1).is_none_or(|idx| idx > len) => {
1598                write!(f, "<end out of range>")?;
1599                return Ok(());
1600            }
1601            Bound::Excluded(idx) if idx > len => {
1602                write!(f, "<end out of range>")?;
1603                return Ok(());
1604            }
1605            Bound::Included(idx) => idx + 1,
1606            Bound::Excluded(idx) => idx,
1607            Bound::Unbounded => len,
1608        };
1609
1610        if start_idx >= end_idx {
1611            write!(f, "<empty range>")?;
1612            return Ok(());
1613        }
1614
1615        // This can't really fail because of the above
1616        let (memory_range, skip) = self
1617            .buffer
1618            .find_memory(start_idx..)
1619            .expect("can't find memory");
1620
1621        let chunks = BufferChunked16Iter {
1622            buffer: self.buffer,
1623            mem_idx: memory_range.start,
1624            mem_len: n_memory,
1625            map: None,
1626            map_offset: skip,
1627            len: end_idx - start_idx,
1628        };
1629
1630        if debug {
1631            for (line, line_len) in chunks {
1632                let line = &line[..line_len];
1633
1634                match end_idx {
1635                    0x00_00..=0xff_ff => write!(f, "{start_idx:04x}:  ")?,
1636                    0x01_00_00..=0xff_ff_ff => write!(f, "{start_idx:06x}:  ")?,
1637                    0x01_00_00_00..=0xff_ff_ff_ff => write!(f, "{start_idx:08x}:  ")?,
1638                    _ => write!(f, "{start_idx:016x}:  ")?,
1639                }
1640
1641                for (i, v) in line.iter().enumerate() {
1642                    if i > 0 {
1643                        write!(f, " {v:02x}")?;
1644                    } else {
1645                        write!(f, "{v:02x}")?;
1646                    }
1647                }
1648
1649                for _ in line.len()..16 {
1650                    write!(f, "   ")?;
1651                }
1652                write!(f, "   ")?;
1653
1654                for v in line {
1655                    if v.is_ascii() && !v.is_ascii_control() {
1656                        f.write_char((*v).into())?;
1657                    } else {
1658                        f.write_char('.')?;
1659                    }
1660                }
1661
1662                start_idx = start_idx.saturating_add(16);
1663                if start_idx < end_idx {
1664                    writeln!(f)?;
1665                }
1666            }
1667
1668            Ok(())
1669        } else {
1670            for (line, line_len) in chunks {
1671                let line = &line[..line_len];
1672
1673                for (i, v) in line.iter().enumerate() {
1674                    if i > 0 {
1675                        write!(f, " {v:02x}")?;
1676                    } else {
1677                        write!(f, "{v:02x}")?;
1678                    }
1679                }
1680
1681                start_idx = start_idx.saturating_add(16);
1682                if start_idx < end_idx {
1683                    writeln!(f)?;
1684                }
1685            }
1686
1687            Ok(())
1688        }
1689    }
1690}
1691
1692impl fmt::Display for Dump<'_> {
1693    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1694        self.fmt(f, false)
1695    }
1696}
1697
1698impl fmt::Debug for Dump<'_> {
1699    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1700        self.fmt(f, true)
1701    }
1702}
1703
1704#[cfg(test)]
1705mod tests {
1706    use super::*;
1707
1708    #[test]
1709    fn test_fields() {
1710        crate::init().unwrap();
1711
1712        let mut buffer = Buffer::new();
1713
1714        {
1715            let buffer = buffer.get_mut().unwrap();
1716            buffer.set_pts(ClockTime::NSECOND);
1717            buffer.set_dts(2 * ClockTime::NSECOND);
1718            buffer.set_offset(3);
1719            buffer.set_offset_end(4);
1720            buffer.set_duration(Some(5 * ClockTime::NSECOND));
1721        }
1722        assert_eq!(buffer.pts(), Some(ClockTime::NSECOND));
1723        assert_eq!(buffer.dts(), Some(2 * ClockTime::NSECOND));
1724        assert_eq!(buffer.offset(), 3);
1725        assert_eq!(buffer.offset_end(), 4);
1726        assert_eq!(buffer.duration(), Some(5 * ClockTime::NSECOND));
1727    }
1728
1729    #[test]
1730    fn test_writability() {
1731        crate::init().unwrap();
1732
1733        let mut buffer = Buffer::from_slice(vec![1, 2, 3, 4]);
1734        {
1735            let data = buffer.map_readable().unwrap();
1736            assert_eq!(data.as_slice(), vec![1, 2, 3, 4].as_slice());
1737        }
1738        assert_ne!(buffer.get_mut(), None);
1739        {
1740            let buffer = buffer.get_mut().unwrap();
1741            buffer.set_pts(Some(ClockTime::NSECOND));
1742        }
1743
1744        let mut buffer2 = buffer.clone();
1745        assert_eq!(buffer.get_mut(), None);
1746
1747        assert_eq!(buffer2.as_ptr(), buffer.as_ptr());
1748
1749        {
1750            let buffer2 = buffer2.make_mut();
1751            assert_ne!(buffer2.as_ptr(), buffer.as_ptr());
1752
1753            buffer2.set_pts(Some(2 * ClockTime::NSECOND));
1754
1755            let mut data = buffer2.map_writable().unwrap();
1756            assert_eq!(data.as_slice(), vec![1, 2, 3, 4].as_slice());
1757            data.as_mut_slice()[0] = 0;
1758        }
1759
1760        assert_eq!(buffer.pts(), Some(ClockTime::NSECOND));
1761        assert_eq!(buffer2.pts(), Some(2 * ClockTime::NSECOND));
1762
1763        {
1764            let data = buffer.map_readable().unwrap();
1765            assert_eq!(data.as_slice(), vec![1, 2, 3, 4].as_slice());
1766
1767            let data = buffer2.map_readable().unwrap();
1768            assert_eq!(data.as_slice(), vec![0, 2, 3, 4].as_slice());
1769        }
1770    }
1771
1772    #[test]
1773    #[allow(clippy::cognitive_complexity)]
1774    fn test_memories() {
1775        crate::init().unwrap();
1776
1777        let mut buffer = Buffer::new();
1778        {
1779            let buffer = buffer.get_mut().unwrap();
1780            buffer.append_memory(crate::Memory::from_mut_slice(vec![0; 5]));
1781            buffer.append_memory(crate::Memory::from_mut_slice(vec![0; 5]));
1782            buffer.append_memory(crate::Memory::from_mut_slice(vec![0; 5]));
1783            buffer.append_memory(crate::Memory::from_mut_slice(vec![0; 5]));
1784            buffer.append_memory(crate::Memory::from_mut_slice(vec![0; 10]));
1785        }
1786
1787        assert!(buffer.is_all_memory_writable());
1788        assert_eq!(buffer.n_memory(), 5);
1789        assert_eq!(buffer.size(), 30);
1790
1791        for i in 0..5 {
1792            {
1793                let mem = buffer.memory(i).unwrap();
1794                assert_eq!(mem.size(), if i < 4 { 5 } else { 10 });
1795                let map = mem.map_readable().unwrap();
1796                assert_eq!(map.size(), if i < 4 { 5 } else { 10 });
1797            }
1798
1799            {
1800                let mem = buffer.peek_memory(i);
1801                assert_eq!(mem.size(), if i < 4 { 5 } else { 10 });
1802                let map = mem.map_readable().unwrap();
1803                assert_eq!(map.size(), if i < 4 { 5 } else { 10 });
1804            }
1805
1806            {
1807                let buffer = buffer.get_mut().unwrap();
1808                let mem = buffer.peek_memory_mut(i).unwrap();
1809                assert_eq!(mem.size(), if i < 4 { 5 } else { 10 });
1810                let map = mem.map_writable().unwrap();
1811                assert_eq!(map.size(), if i < 4 { 5 } else { 10 });
1812            }
1813        }
1814
1815        {
1816            let buffer = buffer.get_mut().unwrap();
1817            let mut last = 0;
1818            for (i, mem) in buffer.iter_memories_mut().unwrap().enumerate() {
1819                {
1820                    assert_eq!(mem.size(), if i < 4 { 5 } else { 10 });
1821                    let map = mem.map_readable().unwrap();
1822                    assert_eq!(map.size(), if i < 4 { 5 } else { 10 });
1823                }
1824
1825                {
1826                    assert_eq!(mem.size(), if i < 4 { 5 } else { 10 });
1827                    let map = mem.map_readable().unwrap();
1828                    assert_eq!(map.size(), if i < 4 { 5 } else { 10 });
1829                }
1830
1831                {
1832                    assert_eq!(mem.size(), if i < 4 { 5 } else { 10 });
1833                    let map = mem.map_writable().unwrap();
1834                    assert_eq!(map.size(), if i < 4 { 5 } else { 10 });
1835                }
1836
1837                last = i;
1838            }
1839
1840            assert_eq!(last, 4);
1841        }
1842
1843        let mut last = 0;
1844        for (i, mem) in buffer.iter_memories().enumerate() {
1845            {
1846                assert_eq!(mem.size(), if i < 4 { 5 } else { 10 });
1847                let map = mem.map_readable().unwrap();
1848                assert_eq!(map.size(), if i < 4 { 5 } else { 10 });
1849            }
1850
1851            {
1852                assert_eq!(mem.size(), if i < 4 { 5 } else { 10 });
1853                let map = mem.map_readable().unwrap();
1854                assert_eq!(map.size(), if i < 4 { 5 } else { 10 });
1855            }
1856
1857            last = i;
1858        }
1859
1860        assert_eq!(last, 4);
1861
1862        let mut last = 0;
1863        for (i, mem) in buffer.iter_memories_owned().enumerate() {
1864            {
1865                assert_eq!(mem.size(), if i < 4 { 5 } else { 10 });
1866                let map = mem.map_readable().unwrap();
1867                assert_eq!(map.size(), if i < 4 { 5 } else { 10 });
1868            }
1869
1870            {
1871                assert_eq!(mem.size(), if i < 4 { 5 } else { 10 });
1872                let map = mem.map_readable().unwrap();
1873                assert_eq!(map.size(), if i < 4 { 5 } else { 10 });
1874            }
1875
1876            last = i;
1877        }
1878
1879        assert_eq!(last, 4);
1880    }
1881
1882    #[test]
1883    fn test_meta_foreach() {
1884        crate::init().unwrap();
1885
1886        let mut buffer = Buffer::new();
1887        {
1888            let buffer = buffer.get_mut().unwrap();
1889            crate::ReferenceTimestampMeta::add(
1890                buffer,
1891                &crate::Caps::builder("foo/bar").build(),
1892                ClockTime::ZERO,
1893                ClockTime::NONE,
1894            );
1895            crate::ReferenceTimestampMeta::add(
1896                buffer,
1897                &crate::Caps::builder("foo/bar").build(),
1898                ClockTime::SECOND,
1899                ClockTime::NONE,
1900            );
1901        }
1902
1903        let mut res = vec![];
1904        buffer.foreach_meta(|meta| {
1905            let meta = meta
1906                .downcast_ref::<crate::ReferenceTimestampMeta>()
1907                .unwrap();
1908            res.push(meta.timestamp());
1909            ControlFlow::Continue(())
1910        });
1911
1912        assert_eq!(&[ClockTime::ZERO, ClockTime::SECOND][..], &res[..]);
1913    }
1914
1915    #[test]
1916    fn test_meta_foreach_mut() {
1917        crate::init().unwrap();
1918
1919        let mut buffer = Buffer::new();
1920        {
1921            let buffer = buffer.get_mut().unwrap();
1922            crate::ReferenceTimestampMeta::add(
1923                buffer,
1924                &crate::Caps::builder("foo/bar").build(),
1925                ClockTime::ZERO,
1926                ClockTime::NONE,
1927            );
1928            crate::ReferenceTimestampMeta::add(
1929                buffer,
1930                &crate::Caps::builder("foo/bar").build(),
1931                ClockTime::SECOND,
1932                ClockTime::NONE,
1933            );
1934        }
1935
1936        let mut res = vec![];
1937        buffer.get_mut().unwrap().foreach_meta_mut(|mut meta| {
1938            let meta = meta
1939                .downcast_ref::<crate::ReferenceTimestampMeta>()
1940                .unwrap();
1941            res.push(meta.timestamp());
1942            if meta.timestamp() == ClockTime::SECOND {
1943                ControlFlow::Continue(BufferMetaForeachAction::Remove)
1944            } else {
1945                ControlFlow::Continue(BufferMetaForeachAction::Keep)
1946            }
1947        });
1948
1949        assert_eq!(&[ClockTime::ZERO, ClockTime::SECOND][..], &res[..]);
1950
1951        let mut res = vec![];
1952        buffer.foreach_meta(|meta| {
1953            let meta = meta
1954                .downcast_ref::<crate::ReferenceTimestampMeta>()
1955                .unwrap();
1956            res.push(meta.timestamp());
1957            ControlFlow::Continue(())
1958        });
1959
1960        assert_eq!(&[ClockTime::ZERO][..], &res[..]);
1961    }
1962
1963    #[test]
1964    fn test_ptr_eq() {
1965        crate::init().unwrap();
1966
1967        let buffer1 = Buffer::new();
1968        assert!(BufferRef::ptr_eq(&buffer1, &buffer1));
1969        let buffer2 = Buffer::new();
1970        assert!(!BufferRef::ptr_eq(&buffer1, &buffer2));
1971    }
1972
1973    #[test]
1974    fn test_copy_region() {
1975        crate::init().unwrap();
1976
1977        let buffer1 = Buffer::from_mut_slice(vec![0, 1, 2, 3, 4, 5, 6, 7]);
1978        let buffer2 = buffer1.copy_region(BUFFER_COPY_ALL, ..).unwrap();
1979        assert_eq!(
1980            buffer2.map_readable().unwrap().as_slice(),
1981            &[0, 1, 2, 3, 4, 5, 6, 7]
1982        );
1983        let buffer2 = buffer1.copy_region(BUFFER_COPY_ALL, 0..8).unwrap();
1984        assert_eq!(
1985            buffer2.map_readable().unwrap().as_slice(),
1986            &[0, 1, 2, 3, 4, 5, 6, 7]
1987        );
1988        let buffer2 = buffer1.copy_region(BUFFER_COPY_ALL, 0..=7).unwrap();
1989        assert_eq!(
1990            buffer2.map_readable().unwrap().as_slice(),
1991            &[0, 1, 2, 3, 4, 5, 6, 7]
1992        );
1993        let buffer2 = buffer1.copy_region(BUFFER_COPY_ALL, ..=7).unwrap();
1994        assert_eq!(
1995            buffer2.map_readable().unwrap().as_slice(),
1996            &[0, 1, 2, 3, 4, 5, 6, 7]
1997        );
1998        let buffer2 = buffer1.copy_region(BUFFER_COPY_ALL, ..8).unwrap();
1999        assert_eq!(
2000            buffer2.map_readable().unwrap().as_slice(),
2001            &[0, 1, 2, 3, 4, 5, 6, 7]
2002        );
2003        let buffer2 = buffer1.copy_region(BUFFER_COPY_ALL, 0..).unwrap();
2004        assert_eq!(
2005            buffer2.map_readable().unwrap().as_slice(),
2006            &[0, 1, 2, 3, 4, 5, 6, 7]
2007        );
2008
2009        assert!(buffer1.copy_region(BUFFER_COPY_ALL, 0..=8).is_err());
2010        assert!(buffer1.copy_region(BUFFER_COPY_ALL, 0..=10).is_err());
2011        assert!(buffer1.copy_region(BUFFER_COPY_ALL, 8..=10).is_err());
2012        assert!(buffer1.copy_region(BUFFER_COPY_ALL, 8..=8).is_err());
2013        assert!(buffer1.copy_region(BUFFER_COPY_ALL, 10..).is_err());
2014        assert!(buffer1.copy_region(BUFFER_COPY_ALL, 10..100).is_err());
2015
2016        let buffer2 = buffer1.copy_region(BUFFER_COPY_ALL, 2..4).unwrap();
2017        assert_eq!(buffer2.map_readable().unwrap().as_slice(), &[2, 3]);
2018
2019        let buffer2 = buffer1.copy_region(BUFFER_COPY_ALL, 2..=4).unwrap();
2020        assert_eq!(buffer2.map_readable().unwrap().as_slice(), &[2, 3, 4]);
2021
2022        let buffer2 = buffer1.copy_region(BUFFER_COPY_ALL, 2..).unwrap();
2023        assert_eq!(
2024            buffer2.map_readable().unwrap().as_slice(),
2025            &[2, 3, 4, 5, 6, 7]
2026        );
2027        let buffer2 = buffer1.copy_region(BUFFER_COPY_ALL, ..2).unwrap();
2028        assert_eq!(buffer2.map_readable().unwrap().as_slice(), &[0, 1]);
2029        let buffer2 = buffer1.copy_region(BUFFER_COPY_ALL, ..=2).unwrap();
2030        assert_eq!(buffer2.map_readable().unwrap().as_slice(), &[0, 1, 2]);
2031    }
2032
2033    #[test]
2034    fn test_dump() {
2035        use std::fmt::Write;
2036
2037        crate::init().unwrap();
2038
2039        let mut s = String::new();
2040        let buffer = crate::Buffer::from_slice(vec![1, 2, 3, 4]);
2041        write!(&mut s, "{:?}", buffer.dump()).unwrap();
2042        assert_eq!(
2043            s,
2044            "0000:  01 02 03 04                                       ...."
2045        );
2046        s.clear();
2047        write!(&mut s, "{}", buffer.dump()).unwrap();
2048        assert_eq!(s, "01 02 03 04");
2049        s.clear();
2050
2051        let buffer = crate::Buffer::from_slice(vec![1, 2, 3, 4]);
2052        write!(&mut s, "{:?}", buffer.dump_range(..)).unwrap();
2053        assert_eq!(
2054            s,
2055            "0000:  01 02 03 04                                       ...."
2056        );
2057        s.clear();
2058        write!(&mut s, "{:?}", buffer.dump_range(..2)).unwrap();
2059        assert_eq!(
2060            s,
2061            "0000:  01 02                                             .."
2062        );
2063        s.clear();
2064        write!(&mut s, "{:?}", buffer.dump_range(2..=3)).unwrap();
2065        assert_eq!(
2066            s,
2067            "0002:  03 04                                             .."
2068        );
2069        s.clear();
2070        write!(&mut s, "{:?}", buffer.dump_range(..100)).unwrap();
2071        assert_eq!(s, "<end out of range>",);
2072        s.clear();
2073        write!(&mut s, "{:?}", buffer.dump_range(90..100)).unwrap();
2074        assert_eq!(s, "<start out of range>",);
2075        s.clear();
2076
2077        let buffer = crate::Buffer::from_slice(vec![0; 19]);
2078        write!(&mut s, "{:?}", buffer.dump()).unwrap();
2079        assert_eq!(
2080            s,
2081            "0000:  00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00   ................\n\
2082             0010:  00 00 00                                          ..."
2083        );
2084        s.clear();
2085    }
2086
2087    #[test]
2088    fn test_dump_multi_memories() {
2089        use std::fmt::Write;
2090
2091        crate::init().unwrap();
2092
2093        let mut buffer = crate::Buffer::new();
2094        {
2095            let buffer = buffer.get_mut().unwrap();
2096
2097            let mem = crate::Memory::from_slice(vec![1, 2, 3, 4]);
2098            buffer.append_memory(mem);
2099
2100            let mem = crate::Memory::from_slice(vec![5, 6, 7, 8]);
2101            buffer.append_memory(mem);
2102
2103            let mem = crate::Memory::from_slice(vec![9, 10, 11, 12]);
2104            buffer.append_memory(mem);
2105
2106            let mem = crate::Memory::from_slice(vec![13, 14, 15, 16]);
2107            buffer.append_memory(mem);
2108
2109            let mem = crate::Memory::from_slice(vec![17, 18, 19]);
2110            buffer.append_memory(mem);
2111        }
2112
2113        let mut s = String::new();
2114        write!(&mut s, "{:?}", buffer.dump()).unwrap();
2115        assert_eq!(
2116            s,
2117            "0000:  01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f 10   ................\n\
2118             0010:  11 12 13                                          ..."
2119        );
2120        s.clear();
2121        write!(&mut s, "{}", buffer.dump()).unwrap();
2122        assert_eq!(
2123            s,
2124            "01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f 10\n11 12 13"
2125        );
2126        s.clear();
2127
2128        write!(&mut s, "{:?}", buffer.dump_range(2..)).unwrap();
2129        assert_eq!(
2130            s,
2131            "0002:  03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f 10 11 12   ................\n\
2132             0012:  13                                                ."
2133        );
2134        s.clear();
2135
2136        write!(&mut s, "{:?}", buffer.dump_range(14..17)).unwrap();
2137        assert_eq!(
2138            s,
2139            "000e:  0f 10 11                                          ..."
2140        );
2141        s.clear();
2142
2143        write!(&mut s, "{:?}", buffer.dump_range(14..20)).unwrap();
2144        assert_eq!(s, "<end out of range>");
2145        s.clear();
2146
2147        #[allow(clippy::reversed_empty_ranges)]
2148        {
2149            write!(&mut s, "{:?}", buffer.dump_range(23..20)).unwrap();
2150            assert_eq!(s, "<start out of range>");
2151            s.clear();
2152        }
2153    }
2154
2155    #[test]
2156    fn test_buffer_wrap_vec_u8() {
2157        crate::init().unwrap();
2158
2159        let data = vec![1u8, 2, 3, 4, 5];
2160        let expected = data.clone();
2161
2162        let buf = Buffer::from_slice(data);
2163        assert_eq!(buf.size(), 5);
2164        assert_eq!(buf.n_memory(), 1);
2165
2166        let converted: Vec<u8> = buf.try_into_inner().unwrap();
2167        assert_eq!(converted, expected);
2168    }
2169
2170    #[test]
2171    fn test_buffer_into_wrong_type() {
2172        crate::init().unwrap();
2173
2174        let buf = Buffer::from_slice(vec![1u8, 2, 3, 4, 5]);
2175        assert_eq!(buf.size(), 5);
2176        assert_eq!(buf.n_memory(), 1);
2177
2178        let res = buf.try_into_inner::<Vec<u32>>();
2179        assert!(res.is_err());
2180        let (_buf, err) = res.err().unwrap();
2181        assert!(matches!(
2182            err,
2183            crate::MemoryIntoInnerError::TypeMismatch { .. }
2184        ));
2185    }
2186
2187    #[test]
2188    fn test_buffer_modify_and_extract() {
2189        crate::init().unwrap();
2190
2191        let data = vec![0u8; 10];
2192        let mut buf = Buffer::from_mut_slice(data);
2193
2194        // Modify the buffer
2195        {
2196            let bufref = buf.make_mut();
2197            let mut mapped = bufref.map_writable().unwrap();
2198            let slice = mapped.as_mut_slice();
2199            for (i, byte) in slice.iter_mut().enumerate() {
2200                *byte = (i * 2) as u8;
2201            }
2202        }
2203
2204        // Extract and verify modifications
2205        let extracted: Vec<u8> = buf.try_into_inner().unwrap();
2206        assert_eq!(extracted, vec![0, 2, 4, 6, 8, 10, 12, 14, 16, 18]);
2207    }
2208}