1use std::{
4 cmp, fmt,
5 marker::PhantomData,
6 mem, ops,
7 ops::{Bound, ControlFlow, Range, RangeBounds},
8 ptr, slice,
9};
10
11use glib::translate::*;
12
13use crate::{
14 ffi, meta::*, BufferCursor, BufferFlags, BufferRefCursor, ClockTime, Memory, MemoryRef,
15};
16
17pub enum Readable {}
18pub enum Writable {}
19
20#[derive(Copy, Clone, Debug, PartialEq, Eq)]
21pub enum BufferMetaForeachAction {
22 Keep,
23 Remove,
24}
25
26mini_object_wrapper!(Buffer, BufferRef, ffi::GstBuffer, || {
27 ffi::gst_buffer_get_type()
28});
29
30pub struct BufferMap<'a, T> {
31 buffer: &'a BufferRef,
32 map_info: ffi::GstMapInfo,
33 phantom: PhantomData<T>,
34}
35
36pub struct MappedBuffer<T> {
37 buffer: Buffer,
38 map_info: ffi::GstMapInfo,
39 phantom: PhantomData<T>,
40}
41
42impl Buffer {
43 #[doc(alias = "gst_buffer_new")]
49 #[inline]
50 pub fn new() -> Self {
51 assert_initialized_main_thread!();
52
53 unsafe { from_glib_full(ffi::gst_buffer_new()) }
54 }
55
56 #[doc(alias = "gst_buffer_new_allocate")]
57 #[doc(alias = "gst_buffer_new_and_alloc")]
58 #[inline]
59 pub fn with_size(size: usize) -> Result<Self, glib::BoolError> {
60 assert_initialized_main_thread!();
61
62 unsafe {
63 Option::<_>::from_glib_full(ffi::gst_buffer_new_allocate(
64 ptr::null_mut(),
65 size,
66 ptr::null_mut(),
67 ))
68 .ok_or_else(|| glib::bool_error!("Failed to allocate buffer"))
69 }
70 }
71
72 #[doc(alias = "gst_buffer_new_wrapped")]
73 #[doc(alias = "gst_buffer_new_wrapped_full")]
74 #[inline]
75 pub fn from_mut_slice<T: AsMut<[u8]> + Send + 'static>(slice: T) -> Self {
76 assert_initialized_main_thread!();
77
78 let mem = Memory::from_mut_slice(slice);
79 let mut buffer = Buffer::new();
80 {
81 let buffer = buffer.get_mut().unwrap();
82 buffer.append_memory(mem);
83 buffer.unset_flags(BufferFlags::TAG_MEMORY);
84 }
85
86 buffer
87 }
88
89 #[doc(alias = "gst_buffer_new_wrapped")]
90 #[doc(alias = "gst_buffer_new_wrapped_full")]
91 #[inline]
92 pub fn from_slice<T: AsRef<[u8]> + Send + 'static>(slice: T) -> Self {
93 assert_initialized_main_thread!();
94
95 let mem = Memory::from_slice(slice);
96 let mut buffer = Buffer::new();
97 {
98 let buffer = buffer.get_mut().unwrap();
99 buffer.append_memory(mem);
100 buffer.unset_flags(BufferFlags::TAG_MEMORY);
101 }
102
103 buffer
104 }
105
106 #[doc(alias = "gst_buffer_map")]
128 #[inline]
129 pub fn into_mapped_buffer_readable(self) -> Result<MappedBuffer<Readable>, Self> {
130 unsafe {
131 let mut map_info = mem::MaybeUninit::uninit();
132 let res: bool = from_glib(ffi::gst_buffer_map(
133 self.as_mut_ptr(),
134 map_info.as_mut_ptr(),
135 ffi::GST_MAP_READ,
136 ));
137 if res {
138 Ok(MappedBuffer {
139 buffer: self,
140 map_info: map_info.assume_init(),
141 phantom: PhantomData,
142 })
143 } else {
144 Err(self)
145 }
146 }
147 }
148
149 #[doc(alias = "gst_buffer_map")]
150 #[inline]
151 pub fn into_mapped_buffer_writable(self) -> Result<MappedBuffer<Writable>, Self> {
152 unsafe {
153 let mut map_info = mem::MaybeUninit::uninit();
154 let res: bool = from_glib(ffi::gst_buffer_map(
155 self.as_mut_ptr(),
156 map_info.as_mut_ptr(),
157 ffi::GST_MAP_READWRITE,
158 ));
159 if res {
160 Ok(MappedBuffer {
161 buffer: self,
162 map_info: map_info.assume_init(),
163 phantom: PhantomData,
164 })
165 } else {
166 Err(self)
167 }
168 }
169 }
170
171 #[inline]
172 pub fn into_cursor_readable(self) -> BufferCursor<Readable> {
173 BufferCursor::new_readable(self)
174 }
175
176 #[inline]
177 pub fn into_cursor_writable(self) -> Result<BufferCursor<Writable>, glib::BoolError> {
178 BufferCursor::new_writable(self)
179 }
180
181 #[doc(alias = "gst_buffer_append")]
191 pub fn append(&mut self, other: Self) {
192 unsafe {
193 let ptr = ffi::gst_buffer_append(self.as_mut_ptr(), other.into_glib_ptr());
194 self.replace_ptr(ptr);
195 }
196 }
197}
198
199impl Default for Buffer {
200 fn default() -> Self {
201 Self::new()
202 }
203}
204
205impl BufferRef {
206 #[doc(alias = "gst_buffer_map")]
207 #[inline]
208 pub fn map_readable(&self) -> Result<BufferMap<Readable>, glib::BoolError> {
209 unsafe {
210 let mut map_info = mem::MaybeUninit::uninit();
211 let res =
212 ffi::gst_buffer_map(self.as_mut_ptr(), map_info.as_mut_ptr(), ffi::GST_MAP_READ);
213 if res == glib::ffi::GTRUE {
214 Ok(BufferMap {
215 buffer: self,
216 map_info: map_info.assume_init(),
217 phantom: PhantomData,
218 })
219 } else {
220 Err(glib::bool_error!("Failed to map buffer readable"))
221 }
222 }
223 }
224
225 #[doc(alias = "gst_buffer_map")]
226 #[inline]
227 pub fn map_writable(&mut self) -> Result<BufferMap<Writable>, glib::BoolError> {
228 unsafe {
229 let mut map_info = mem::MaybeUninit::uninit();
230 let res = ffi::gst_buffer_map(
231 self.as_mut_ptr(),
232 map_info.as_mut_ptr(),
233 ffi::GST_MAP_READWRITE,
234 );
235 if res == glib::ffi::GTRUE {
236 Ok(BufferMap {
237 buffer: self,
238 map_info: map_info.assume_init(),
239 phantom: PhantomData,
240 })
241 } else {
242 Err(glib::bool_error!("Failed to map buffer writable"))
243 }
244 }
245 }
246
247 fn memory_range_into_idx_len(
248 &self,
249 range: impl RangeBounds<usize>,
250 ) -> Result<(u32, i32), glib::BoolError> {
251 let n_memory = self.n_memory();
252 debug_assert!(n_memory <= u32::MAX as usize);
253
254 let start_idx = match range.start_bound() {
255 ops::Bound::Included(idx) if *idx >= n_memory => {
256 return Err(glib::bool_error!("Invalid range start"));
257 }
258 ops::Bound::Included(idx) => *idx,
259 ops::Bound::Excluded(idx) if idx.checked_add(1).map_or(true, |idx| idx >= n_memory) => {
260 return Err(glib::bool_error!("Invalid range start"));
261 }
262 ops::Bound::Excluded(idx) => *idx + 1,
263 ops::Bound::Unbounded => 0,
264 };
265
266 let end_idx = match range.end_bound() {
267 ops::Bound::Included(idx) if idx.checked_add(1).map_or(true, |idx| idx > n_memory) => {
268 return Err(glib::bool_error!("Invalid range end"));
269 }
270 ops::Bound::Included(idx) => *idx + 1,
271 ops::Bound::Excluded(idx) if *idx > n_memory => {
272 return Err(glib::bool_error!("Invalid range end"));
273 }
274 ops::Bound::Excluded(idx) => *idx,
275 ops::Bound::Unbounded => n_memory,
276 };
277
278 Ok((
279 start_idx as u32,
280 i32::try_from(end_idx - start_idx).map_err(|_| glib::bool_error!("Too large range"))?,
281 ))
282 }
283
284 #[doc(alias = "gst_buffer_map_range")]
285 #[inline]
286 pub fn map_range_readable(
287 &self,
288 range: impl RangeBounds<usize>,
289 ) -> Result<BufferMap<Readable>, glib::BoolError> {
290 let (idx, len) = self.memory_range_into_idx_len(range)?;
291 unsafe {
292 let mut map_info = mem::MaybeUninit::uninit();
293 let res = ffi::gst_buffer_map_range(
294 self.as_mut_ptr(),
295 idx,
296 len,
297 map_info.as_mut_ptr(),
298 ffi::GST_MAP_READ,
299 );
300 if res == glib::ffi::GTRUE {
301 Ok(BufferMap {
302 buffer: self,
303 map_info: map_info.assume_init(),
304 phantom: PhantomData,
305 })
306 } else {
307 Err(glib::bool_error!("Failed to map buffer readable"))
308 }
309 }
310 }
311
312 #[doc(alias = "gst_buffer_map_range")]
313 #[inline]
314 pub fn map_range_writable(
315 &mut self,
316 range: impl RangeBounds<usize>,
317 ) -> Result<BufferMap<Writable>, glib::BoolError> {
318 let (idx, len) = self.memory_range_into_idx_len(range)?;
319 unsafe {
320 let mut map_info = mem::MaybeUninit::uninit();
321 let res = ffi::gst_buffer_map_range(
322 self.as_mut_ptr(),
323 idx,
324 len,
325 map_info.as_mut_ptr(),
326 ffi::GST_MAP_READWRITE,
327 );
328 if res == glib::ffi::GTRUE {
329 Ok(BufferMap {
330 buffer: self,
331 map_info: map_info.assume_init(),
332 phantom: PhantomData,
333 })
334 } else {
335 Err(glib::bool_error!("Failed to map buffer writable"))
336 }
337 }
338 }
339
340 pub(crate) fn byte_range_into_offset_len(
341 &self,
342 range: impl RangeBounds<usize>,
343 ) -> Result<(usize, usize), glib::BoolError> {
344 let size = self.size();
345
346 let start_idx = match range.start_bound() {
347 ops::Bound::Included(idx) if *idx >= size => {
348 return Err(glib::bool_error!("Invalid range start"));
349 }
350 ops::Bound::Included(idx) => *idx,
351 ops::Bound::Excluded(idx) if idx.checked_add(1).map_or(true, |idx| idx >= size) => {
352 return Err(glib::bool_error!("Invalid range start"));
353 }
354 ops::Bound::Excluded(idx) => *idx + 1,
355 ops::Bound::Unbounded => 0,
356 };
357
358 let end_idx = match range.end_bound() {
359 ops::Bound::Included(idx) if idx.checked_add(1).map_or(true, |idx| idx > size) => {
360 return Err(glib::bool_error!("Invalid range end"));
361 }
362 ops::Bound::Included(idx) => *idx + 1,
363 ops::Bound::Excluded(idx) if *idx > size => {
364 return Err(glib::bool_error!("Invalid range end"));
365 }
366 ops::Bound::Excluded(idx) => *idx,
367 ops::Bound::Unbounded => size,
368 };
369
370 Ok((start_idx, end_idx - start_idx))
371 }
372
373 #[doc(alias = "gst_buffer_copy_region")]
374 pub fn copy_region(
375 &self,
376 flags: crate::BufferCopyFlags,
377 range: impl RangeBounds<usize>,
378 ) -> Result<Buffer, glib::BoolError> {
379 let (offset, size) = self.byte_range_into_offset_len(range)?;
380
381 unsafe {
382 Option::<_>::from_glib_full(ffi::gst_buffer_copy_region(
383 self.as_mut_ptr(),
384 flags.into_glib(),
385 offset,
386 size,
387 ))
388 .ok_or_else(|| glib::bool_error!("Failed to copy region of buffer"))
389 }
390 }
391
392 #[doc(alias = "gst_buffer_copy_into")]
393 pub fn copy_into(
394 &self,
395 dest: &mut BufferRef,
396 flags: crate::BufferCopyFlags,
397 range: impl RangeBounds<usize>,
398 ) -> Result<(), glib::BoolError> {
399 let (offset, size) = self.byte_range_into_offset_len(range)?;
400
401 unsafe {
402 glib::result_from_gboolean!(
403 ffi::gst_buffer_copy_into(
404 dest.as_mut_ptr(),
405 self.as_mut_ptr(),
406 flags.into_glib(),
407 offset,
408 size,
409 ),
410 "Failed to copy into destination buffer",
411 )
412 }
413 }
414
415 #[doc(alias = "gst_buffer_fill")]
416 pub fn copy_from_slice(&mut self, offset: usize, slice: &[u8]) -> Result<(), usize> {
417 let maxsize = self.maxsize();
418 let size = slice.len();
419
420 assert!(maxsize >= offset && maxsize - offset >= size);
421
422 let copied = unsafe {
423 let src = slice.as_ptr();
424 ffi::gst_buffer_fill(
425 self.as_mut_ptr(),
426 offset,
427 src as glib::ffi::gconstpointer,
428 size,
429 )
430 };
431
432 if copied == size {
433 Ok(())
434 } else {
435 Err(copied)
436 }
437 }
438
439 #[doc(alias = "gst_buffer_extract")]
440 pub fn copy_to_slice(&self, offset: usize, slice: &mut [u8]) -> Result<(), usize> {
441 let maxsize = self.size();
442 let size = slice.len();
443
444 assert!(maxsize >= offset && maxsize - offset >= size);
445
446 let copied = unsafe {
447 let dest = slice.as_mut_ptr();
448 ffi::gst_buffer_extract(self.as_mut_ptr(), offset, dest as glib::ffi::gpointer, size)
449 };
450
451 if copied == size {
452 Ok(())
453 } else {
454 Err(copied)
455 }
456 }
457
458 #[doc(alias = "gst_buffer_copy_deep")]
459 pub fn copy_deep(&self) -> Result<Buffer, glib::BoolError> {
460 unsafe {
461 Option::<_>::from_glib_full(ffi::gst_buffer_copy_deep(self.as_ptr()))
462 .ok_or_else(|| glib::bool_error!("Failed to deep copy buffer"))
463 }
464 }
465
466 #[doc(alias = "get_size")]
467 #[doc(alias = "gst_buffer_get_size")]
468 pub fn size(&self) -> usize {
469 unsafe { ffi::gst_buffer_get_size(self.as_mut_ptr()) }
470 }
471
472 #[doc(alias = "get_maxsize")]
473 pub fn maxsize(&self) -> usize {
474 unsafe {
475 let mut maxsize = mem::MaybeUninit::uninit();
476 ffi::gst_buffer_get_sizes_range(
477 self.as_mut_ptr(),
478 0,
479 -1,
480 ptr::null_mut(),
481 maxsize.as_mut_ptr(),
482 );
483
484 maxsize.assume_init()
485 }
486 }
487
488 #[doc(alias = "gst_buffer_set_size")]
489 pub fn set_size(&mut self, size: usize) {
490 assert!(self.maxsize() >= size);
491
492 unsafe {
493 ffi::gst_buffer_set_size(self.as_mut_ptr(), size as isize);
494 }
495 }
496
497 #[doc(alias = "get_offset")]
498 #[doc(alias = "GST_BUFFER_OFFSET")]
499 #[inline]
500 pub fn offset(&self) -> u64 {
501 self.0.offset
502 }
503
504 #[inline]
505 pub fn set_offset(&mut self, offset: u64) {
506 self.0.offset = offset;
507 }
508
509 #[doc(alias = "get_offset_end")]
510 #[doc(alias = "GST_BUFFER_OFFSET_END")]
511 #[inline]
512 pub fn offset_end(&self) -> u64 {
513 self.0.offset_end
514 }
515
516 #[inline]
517 pub fn set_offset_end(&mut self, offset_end: u64) {
518 self.0.offset_end = offset_end;
519 }
520
521 #[doc(alias = "get_pts")]
522 #[doc(alias = "GST_BUFFER_PTS")]
523 #[inline]
524 pub fn pts(&self) -> Option<ClockTime> {
525 unsafe { from_glib(self.0.pts) }
526 }
527
528 #[inline]
529 pub fn set_pts(&mut self, pts: impl Into<Option<ClockTime>>) {
530 self.0.pts = pts.into().into_glib();
531 }
532
533 #[doc(alias = "get_dts")]
534 #[doc(alias = "GST_BUFFER_DTS")]
535 #[inline]
536 pub fn dts(&self) -> Option<ClockTime> {
537 unsafe { from_glib(self.0.dts) }
538 }
539
540 #[inline]
541 pub fn set_dts(&mut self, dts: impl Into<Option<ClockTime>>) {
542 self.0.dts = dts.into().into_glib();
543 }
544
545 #[doc(alias = "get_dts_or_pts")]
546 #[doc(alias = "GST_BUFFER_DTS_OR_PTS")]
547 #[inline]
548 pub fn dts_or_pts(&self) -> Option<ClockTime> {
549 let val = self.dts();
550 if val.is_none() {
551 self.pts()
552 } else {
553 val
554 }
555 }
556
557 #[doc(alias = "get_duration")]
558 #[doc(alias = "GST_BUFFER_DURATION")]
559 #[inline]
560 pub fn duration(&self) -> Option<ClockTime> {
561 unsafe { from_glib(self.0.duration) }
562 }
563
564 #[inline]
565 pub fn set_duration(&mut self, duration: impl Into<Option<ClockTime>>) {
566 self.0.duration = duration.into().into_glib();
567 }
568
569 #[doc(alias = "get_flags")]
570 #[doc(alias = "GST_BUFFER_FLAGS")]
571 #[inline]
572 pub fn flags(&self) -> BufferFlags {
573 BufferFlags::from_bits_truncate(self.0.mini_object.flags)
574 }
575
576 #[doc(alias = "GST_BUFFER_FLAG_SET")]
577 #[inline]
578 pub fn set_flags(&mut self, flags: BufferFlags) {
579 self.0.mini_object.flags |= flags.bits();
580 }
581
582 #[doc(alias = "GST_BUFFER_FLAG_UNSET")]
583 #[inline]
584 pub fn unset_flags(&mut self, flags: BufferFlags) {
585 self.0.mini_object.flags &= !flags.bits();
586 }
587
588 #[doc(alias = "get_meta")]
589 #[doc(alias = "gst_buffer_get_meta")]
590 #[inline]
591 pub fn meta<T: MetaAPI>(&self) -> Option<MetaRef<T>> {
592 unsafe {
593 let meta = ffi::gst_buffer_get_meta(self.as_mut_ptr(), T::meta_api().into_glib());
594 if meta.is_null() {
595 None
596 } else {
597 Some(T::from_ptr(self, meta as *const <T as MetaAPI>::GstType))
598 }
599 }
600 }
601
602 #[doc(alias = "get_meta_mut")]
603 #[inline]
604 pub fn meta_mut<T: MetaAPI>(&mut self) -> Option<MetaRefMut<T, crate::meta::Standalone>> {
605 unsafe {
606 let meta = ffi::gst_buffer_get_meta(self.as_mut_ptr(), T::meta_api().into_glib());
607 if meta.is_null() {
608 None
609 } else {
610 Some(T::from_mut_ptr(self, meta as *mut <T as MetaAPI>::GstType))
611 }
612 }
613 }
614
615 pub fn iter_meta<T: MetaAPI>(&self) -> MetaIter<T> {
616 MetaIter::new(self)
617 }
618
619 pub fn iter_meta_mut<T: MetaAPI>(&mut self) -> MetaIterMut<T> {
620 MetaIterMut::new(self)
621 }
622
623 #[doc(alias = "gst_buffer_foreach_meta")]
624 pub fn foreach_meta<F: FnMut(MetaRef<Meta>) -> ControlFlow<(), ()>>(&self, func: F) -> bool {
625 unsafe extern "C" fn trampoline<F: FnMut(MetaRef<Meta>) -> ControlFlow<(), ()>>(
626 buffer: *mut ffi::GstBuffer,
627 meta: *mut *mut ffi::GstMeta,
628 user_data: glib::ffi::gpointer,
629 ) -> glib::ffi::gboolean {
630 let func = user_data as *mut F;
631 let res = (*func)(Meta::from_ptr(BufferRef::from_ptr(buffer), *meta));
632
633 matches!(res, ControlFlow::Continue(_)).into_glib()
634 }
635
636 unsafe {
637 let mut func = func;
638 let func_ptr: &mut F = &mut func;
639
640 from_glib(ffi::gst_buffer_foreach_meta(
641 mut_override(self.as_ptr()),
642 Some(trampoline::<F>),
643 func_ptr as *mut _ as *mut _,
644 ))
645 }
646 }
647
648 #[doc(alias = "gst_buffer_foreach_meta")]
649 pub fn foreach_meta_mut<
650 F: FnMut(
651 MetaRefMut<Meta, crate::meta::Iterated>,
652 ) -> ControlFlow<BufferMetaForeachAction, BufferMetaForeachAction>,
653 >(
654 &mut self,
655 func: F,
656 ) -> bool {
657 unsafe extern "C" fn trampoline<
658 F: FnMut(
659 MetaRefMut<Meta, crate::meta::Iterated>,
660 ) -> ControlFlow<BufferMetaForeachAction, BufferMetaForeachAction>,
661 >(
662 buffer: *mut ffi::GstBuffer,
663 meta: *mut *mut ffi::GstMeta,
664 user_data: glib::ffi::gpointer,
665 ) -> glib::ffi::gboolean {
666 let func = user_data as *mut F;
667 let res = (*func)(Meta::from_mut_ptr(BufferRef::from_mut_ptr(buffer), *meta));
668
669 let (cont, action) = match res {
670 ControlFlow::Continue(action) => (true, action),
671 ControlFlow::Break(action) => (false, action),
672 };
673
674 if action == BufferMetaForeachAction::Remove {
675 *meta = ptr::null_mut();
676 }
677
678 cont.into_glib()
679 }
680
681 unsafe {
682 let mut func = func;
683 let func_ptr: &mut F = &mut func;
684
685 from_glib(ffi::gst_buffer_foreach_meta(
686 mut_override(self.as_ptr()),
687 Some(trampoline::<F>),
688 func_ptr as *mut _ as *mut _,
689 ))
690 }
691 }
692
693 #[doc(alias = "gst_buffer_append_memory")]
694 pub fn append_memory(&mut self, mem: Memory) {
695 unsafe { ffi::gst_buffer_append_memory(self.as_mut_ptr(), mem.into_glib_ptr()) }
696 }
697
698 #[doc(alias = "gst_buffer_find_memory")]
699 pub fn find_memory(&self, range: impl RangeBounds<usize>) -> Option<(Range<usize>, usize)> {
700 let (offset, size) = self.byte_range_into_offset_len(range).ok()?;
701
702 unsafe {
703 let mut idx = mem::MaybeUninit::uninit();
704 let mut length = mem::MaybeUninit::uninit();
705 let mut skip = mem::MaybeUninit::uninit();
706
707 let res = from_glib(ffi::gst_buffer_find_memory(
708 self.as_mut_ptr(),
709 offset,
710 size,
711 idx.as_mut_ptr(),
712 length.as_mut_ptr(),
713 skip.as_mut_ptr(),
714 ));
715
716 if res {
717 let idx = idx.assume_init() as usize;
718 let length = length.assume_init() as usize;
719 let skip = skip.assume_init();
720 Some((idx..(idx + length), skip))
721 } else {
722 None
723 }
724 }
725 }
726
727 #[doc(alias = "get_all_memory")]
728 #[doc(alias = "gst_buffer_get_all_memory")]
729 pub fn all_memory(&self) -> Option<Memory> {
730 unsafe { from_glib_full(ffi::gst_buffer_get_all_memory(self.as_mut_ptr())) }
731 }
732
733 #[doc(alias = "get_max_memory")]
734 #[doc(alias = "gst_buffer_get_max_memory")]
735 pub fn max_memory() -> usize {
736 unsafe { ffi::gst_buffer_get_max_memory() as usize }
737 }
738
739 #[doc(alias = "get_memory")]
740 #[doc(alias = "gst_buffer_get_memory")]
741 pub fn memory(&self, idx: usize) -> Option<Memory> {
742 if idx >= self.n_memory() {
743 return None;
744 }
745 unsafe {
746 let res = ffi::gst_buffer_get_memory(self.as_mut_ptr(), idx as u32);
747 Some(from_glib_full(res))
748 }
749 }
750
751 #[doc(alias = "get_memory_range")]
752 #[doc(alias = "gst_buffer_get_memory_range")]
753 pub fn memory_range(&self, range: impl RangeBounds<usize>) -> Option<Memory> {
754 let (idx, len) = self.memory_range_into_idx_len(range).ok()?;
755
756 unsafe {
757 let res = ffi::gst_buffer_get_memory_range(self.as_mut_ptr(), idx, len);
758 from_glib_full(res)
759 }
760 }
761
762 #[doc(alias = "gst_buffer_insert_memory")]
763 pub fn insert_memory(&mut self, idx: impl Into<Option<usize>>, mem: Memory) {
764 let n_memory = self.n_memory();
765 let idx = idx.into();
766 let idx = idx.unwrap_or(n_memory);
767 assert!(idx <= self.n_memory());
768 unsafe { ffi::gst_buffer_insert_memory(self.as_mut_ptr(), idx as i32, mem.into_glib_ptr()) }
769 }
770
771 #[doc(alias = "gst_buffer_is_all_memory_writable")]
772 pub fn is_all_memory_writable(&self) -> bool {
773 unsafe { from_glib(ffi::gst_buffer_is_all_memory_writable(self.as_mut_ptr())) }
774 }
775
776 #[doc(alias = "gst_buffer_is_memory_range_writable")]
777 pub fn is_memory_range_writable(&self, range: impl RangeBounds<usize>) -> bool {
778 let Some((idx, len)) = self.memory_range_into_idx_len(range).ok() else {
779 return false;
780 };
781
782 unsafe {
783 from_glib(ffi::gst_buffer_is_memory_range_writable(
784 self.as_mut_ptr(),
785 idx,
786 len,
787 ))
788 }
789 }
790
791 #[doc(alias = "gst_buffer_n_memory")]
792 pub fn n_memory(&self) -> usize {
793 unsafe { ffi::gst_buffer_n_memory(self.as_ptr() as *mut _) as usize }
794 }
795
796 #[doc(alias = "gst_buffer_peek_memory")]
797 pub fn peek_memory(&self, idx: usize) -> &MemoryRef {
798 assert!(idx < self.n_memory());
799 unsafe { MemoryRef::from_ptr(ffi::gst_buffer_peek_memory(self.as_mut_ptr(), idx as u32)) }
800 }
801
802 #[doc(alias = "gst_buffer_peek_memory")]
803 pub fn peek_memory_mut(&mut self, idx: usize) -> Result<&mut MemoryRef, glib::BoolError> {
804 assert!(idx < self.n_memory());
805 unsafe {
806 let mem = ffi::gst_buffer_peek_memory(self.as_mut_ptr(), idx as u32);
807 if ffi::gst_mini_object_is_writable(mem as *mut _) == glib::ffi::GFALSE {
808 Err(glib::bool_error!("Memory not writable"))
809 } else {
810 Ok(MemoryRef::from_mut_ptr(mem))
811 }
812 }
813 }
814
815 #[doc(alias = "gst_buffer_prepend_memory")]
816 pub fn prepend_memory(&mut self, mem: Memory) {
817 unsafe { ffi::gst_buffer_prepend_memory(self.as_mut_ptr(), mem.into_glib_ptr()) }
818 }
819
820 #[doc(alias = "gst_buffer_remove_all_memory")]
821 pub fn remove_all_memory(&mut self) {
822 unsafe { ffi::gst_buffer_remove_all_memory(self.as_mut_ptr()) }
823 }
824
825 #[doc(alias = "gst_buffer_remove_memory")]
826 pub fn remove_memory(&mut self, idx: usize) {
827 assert!(idx < self.n_memory());
828 unsafe { ffi::gst_buffer_remove_memory(self.as_mut_ptr(), idx as u32) }
829 }
830
831 #[doc(alias = "gst_buffer_remove_memory_range")]
832 pub fn remove_memory_range(&mut self, range: impl RangeBounds<usize>) {
833 let (idx, len) = self
834 .memory_range_into_idx_len(range)
835 .expect("Invalid memory range");
836
837 unsafe { ffi::gst_buffer_remove_memory_range(self.as_mut_ptr(), idx, len) }
838 }
839
840 #[doc(alias = "gst_buffer_replace_all_memory")]
841 pub fn replace_all_memory(&mut self, mem: Memory) {
842 unsafe { ffi::gst_buffer_replace_all_memory(self.as_mut_ptr(), mem.into_glib_ptr()) }
843 }
844
845 #[doc(alias = "gst_buffer_replace_memory")]
846 pub fn replace_memory(&mut self, idx: usize, mem: Memory) {
847 assert!(idx < self.n_memory());
848 unsafe {
849 ffi::gst_buffer_replace_memory(self.as_mut_ptr(), idx as u32, mem.into_glib_ptr())
850 }
851 }
852
853 #[doc(alias = "gst_buffer_replace_memory_range")]
854 pub fn replace_memory_range(&mut self, range: impl RangeBounds<usize>, mem: Memory) {
855 let (idx, len) = self
856 .memory_range_into_idx_len(range)
857 .expect("Invalid memory range");
858
859 unsafe {
860 ffi::gst_buffer_replace_memory_range(self.as_mut_ptr(), idx, len, mem.into_glib_ptr())
861 }
862 }
863
864 pub fn iter_memories(&self) -> Iter {
865 Iter::new(self)
866 }
867
868 pub fn iter_memories_mut(&mut self) -> Result<IterMut, glib::BoolError> {
869 if !self.is_all_memory_writable() {
870 Err(glib::bool_error!("Not all memory are writable"))
871 } else {
872 Ok(IterMut::new(self))
873 }
874 }
875
876 pub fn iter_memories_owned(&self) -> IterOwned {
877 IterOwned::new(self)
878 }
879
880 pub fn as_cursor_readable(&self) -> BufferRefCursor<&BufferRef> {
881 BufferRefCursor::new_readable(self)
882 }
883
884 pub fn as_cursor_writable(
885 &mut self,
886 ) -> Result<BufferRefCursor<&mut BufferRef>, glib::BoolError> {
887 BufferRefCursor::new_writable(self)
888 }
889
890 #[doc(alias = "gst_util_dump_buffer")]
891 pub fn dump(&self) -> Dump {
892 Dump {
893 buffer: self,
894 start: Bound::Unbounded,
895 end: Bound::Unbounded,
896 }
897 }
898
899 #[doc(alias = "gst_util_dump_buffer")]
900 pub fn dump_range(&self, range: impl RangeBounds<usize>) -> Dump {
901 Dump {
902 buffer: self,
903 start: range.start_bound().cloned(),
904 end: range.end_bound().cloned(),
905 }
906 }
907}
908
909macro_rules! define_meta_iter(
910 ($name:ident, $typ:ty, $mtyp:ty, $prepare_buffer:expr, $from_ptr:expr) => {
911 #[must_use = "iterators are lazy and do nothing unless consumed"]
912 pub struct $name<'a, T: MetaAPI + 'a> {
913 buffer: $typ,
914 state: glib::ffi::gpointer,
915 meta_api: glib::Type,
916 items: PhantomData<$mtyp>,
917 }
918
919 unsafe impl<'a, T: MetaAPI> Send for $name<'a, T> { }
920 unsafe impl<'a, T: MetaAPI> Sync for $name<'a, T> { }
921
922 impl<'a, T: MetaAPI> fmt::Debug for $name<'a, T> {
923 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
924 f.debug_struct(stringify!($name))
925 .field("buffer", &self.buffer)
926 .field("state", &self.state)
927 .field("meta_api", &self.meta_api)
928 .field("items", &self.items)
929 .finish()
930 }
931 }
932
933 impl<'a, T: MetaAPI> $name<'a, T> {
934 fn new(buffer: $typ) -> $name<'a, T> {
935 skip_assert_initialized!();
936
937 $name {
938 buffer,
939 state: ptr::null_mut(),
940 meta_api: T::meta_api(),
941 items: PhantomData,
942 }
943 }
944 }
945
946 #[allow(clippy::redundant_closure_call)]
947 impl<'a, T: MetaAPI> Iterator for $name<'a, T> {
948 type Item = $mtyp;
949
950 fn next(&mut self) -> Option<Self::Item> {
951 loop {
952 unsafe {
953 let meta = ffi::gst_buffer_iterate_meta(self.buffer.as_mut_ptr(), &mut self.state);
954
955 if meta.is_null() {
956 return None;
957 } else if self.meta_api == glib::Type::INVALID || glib::Type::from_glib((*(*meta).info).api) == self.meta_api {
958 let buffer = $prepare_buffer(self.buffer.as_mut_ptr());
960 let item = $from_ptr(buffer, meta);
961 return Some(item);
962 }
963 }
964 }
965 }
966 }
967
968 impl<'a, T: MetaAPI> std::iter::FusedIterator for $name<'a, T> { }
969 }
970);
971
972define_meta_iter!(
973 MetaIter,
974 &'a BufferRef,
975 MetaRef<'a, T>,
976 |buffer: *const ffi::GstBuffer| BufferRef::from_ptr(buffer),
977 |buffer, meta| T::from_ptr(buffer, meta as *const <T as MetaAPI>::GstType)
978);
979define_meta_iter!(
980 MetaIterMut,
981 &'a mut BufferRef,
982 MetaRefMut<'a, T, crate::meta::Iterated>,
983 |buffer: *mut ffi::GstBuffer| BufferRef::from_mut_ptr(buffer),
984 |buffer: &'a mut BufferRef, meta| T::from_mut_ptr(buffer, meta as *mut <T as MetaAPI>::GstType)
985);
986
987macro_rules! define_iter(
988 ($name:ident, $typ:ty, $mtyp:ty, $get_item:expr) => {
989 crate::utils::define_fixed_size_iter!(
990 $name, $typ, $mtyp,
991 |buffer: &BufferRef| buffer.n_memory() as usize,
992 $get_item
993 );
994 }
995);
996
997define_iter!(
998 Iter,
999 &'a BufferRef,
1000 &'a MemoryRef,
1001 |buffer: &BufferRef, idx| unsafe {
1002 let ptr = ffi::gst_buffer_peek_memory(buffer.as_mut_ptr(), idx as u32);
1003 MemoryRef::from_ptr(ptr as *const ffi::GstMemory)
1004 }
1005);
1006
1007define_iter!(
1008 IterMut,
1009 &'a mut BufferRef,
1010 &'a mut MemoryRef,
1011 |buffer: &mut BufferRef, idx| unsafe {
1012 let ptr = ffi::gst_buffer_peek_memory(buffer.as_mut_ptr(), idx as u32);
1013 MemoryRef::from_mut_ptr(ptr)
1014 }
1015);
1016
1017impl<'a> IntoIterator for &'a BufferRef {
1018 type IntoIter = Iter<'a>;
1019 type Item = &'a MemoryRef;
1020
1021 fn into_iter(self) -> Self::IntoIter {
1022 self.iter_memories()
1023 }
1024}
1025
1026impl From<Memory> for Buffer {
1027 fn from(value: Memory) -> Self {
1028 skip_assert_initialized!();
1029
1030 let mut buffer = Buffer::new();
1031 {
1032 let buffer = buffer.get_mut().unwrap();
1033 buffer.append_memory(value);
1034 }
1035 buffer
1036 }
1037}
1038
1039impl<const N: usize> From<[Memory; N]> for Buffer {
1040 fn from(value: [Memory; N]) -> Self {
1041 skip_assert_initialized!();
1042
1043 let mut buffer = Buffer::new();
1044 {
1045 let buffer = buffer.get_mut().unwrap();
1046 value.into_iter().for_each(|b| buffer.append_memory(b));
1047 }
1048 buffer
1049 }
1050}
1051
1052impl std::iter::FromIterator<Memory> for Buffer {
1053 fn from_iter<T: IntoIterator<Item = Memory>>(iter: T) -> Self {
1054 skip_assert_initialized!();
1055 let iter = iter.into_iter();
1056
1057 let mut buffer = Buffer::new();
1058
1059 {
1060 let buffer = buffer.get_mut().unwrap();
1061 iter.for_each(|m| buffer.append_memory(m));
1062 }
1063
1064 buffer
1065 }
1066}
1067
1068impl std::iter::Extend<Memory> for BufferRef {
1069 fn extend<T: IntoIterator<Item = Memory>>(&mut self, iter: T) {
1070 iter.into_iter().for_each(|m| self.append_memory(m));
1071 }
1072}
1073
1074define_iter!(
1075 IterOwned,
1076 &'a BufferRef,
1077 Memory,
1078 |buffer: &BufferRef, idx| unsafe {
1079 let ptr = ffi::gst_buffer_get_memory(buffer.as_mut_ptr(), idx as u32);
1080 from_glib_full(ptr)
1081 }
1082);
1083
1084impl fmt::Debug for Buffer {
1085 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1086 BufferRef::fmt(self, f)
1087 }
1088}
1089
1090impl PartialEq for Buffer {
1091 fn eq(&self, other: &Buffer) -> bool {
1092 BufferRef::eq(self, other)
1093 }
1094}
1095
1096impl Eq for Buffer {}
1097
1098impl PartialEq<BufferRef> for Buffer {
1099 fn eq(&self, other: &BufferRef) -> bool {
1100 BufferRef::eq(self, other)
1101 }
1102}
1103impl PartialEq<Buffer> for BufferRef {
1104 fn eq(&self, other: &Buffer) -> bool {
1105 BufferRef::eq(other, self)
1106 }
1107}
1108
1109impl fmt::Debug for BufferRef {
1110 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1111 use std::cell::RefCell;
1112
1113 use crate::utils::Displayable;
1114
1115 struct DebugIter<I>(RefCell<I>);
1116 impl<I: Iterator> fmt::Debug for DebugIter<I>
1117 where
1118 I::Item: fmt::Debug,
1119 {
1120 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1121 f.debug_list().entries(&mut *self.0.borrow_mut()).finish()
1122 }
1123 }
1124
1125 f.debug_struct("Buffer")
1126 .field("ptr", &self.as_ptr())
1127 .field("pts", &self.pts().display())
1128 .field("dts", &self.dts().display())
1129 .field("duration", &self.duration().display())
1130 .field("size", &self.size())
1131 .field("offset", &self.offset())
1132 .field("offset_end", &self.offset_end())
1133 .field("flags", &self.flags())
1134 .field(
1135 "metas",
1136 &DebugIter(RefCell::new(
1137 self.iter_meta::<crate::Meta>().map(|m| m.api()),
1138 )),
1139 )
1140 .finish()
1141 }
1142}
1143
1144impl PartialEq for BufferRef {
1145 fn eq(&self, other: &BufferRef) -> bool {
1146 if self.size() != other.size() {
1147 return false;
1148 }
1149
1150 let self_map = self.map_readable();
1151 let other_map = other.map_readable();
1152
1153 match (self_map, other_map) {
1154 (Ok(self_map), Ok(other_map)) => self_map.as_slice().eq(other_map.as_slice()),
1155 _ => false,
1156 }
1157 }
1158}
1159
1160impl Eq for BufferRef {}
1161
1162impl<T> BufferMap<'_, T> {
1163 #[doc(alias = "get_size")]
1164 #[inline]
1165 pub fn size(&self) -> usize {
1166 self.map_info.size
1167 }
1168
1169 #[doc(alias = "get_buffer")]
1170 #[inline]
1171 pub fn buffer(&self) -> &BufferRef {
1172 self.buffer
1173 }
1174
1175 #[inline]
1176 pub fn as_slice(&self) -> &[u8] {
1177 if self.map_info.size == 0 {
1178 return &[];
1179 }
1180 unsafe { slice::from_raw_parts(self.map_info.data, self.map_info.size) }
1181 }
1182}
1183
1184impl BufferMap<'_, Writable> {
1185 #[inline]
1186 pub fn as_mut_slice(&mut self) -> &mut [u8] {
1187 if self.map_info.size == 0 {
1188 return &mut [];
1189 }
1190 unsafe { slice::from_raw_parts_mut(self.map_info.data, self.map_info.size) }
1191 }
1192}
1193
1194impl<T> AsRef<[u8]> for BufferMap<'_, T> {
1195 #[inline]
1196 fn as_ref(&self) -> &[u8] {
1197 self.as_slice()
1198 }
1199}
1200
1201impl AsMut<[u8]> for BufferMap<'_, Writable> {
1202 #[inline]
1203 fn as_mut(&mut self) -> &mut [u8] {
1204 self.as_mut_slice()
1205 }
1206}
1207
1208impl<T> ops::Deref for BufferMap<'_, T> {
1209 type Target = [u8];
1210
1211 #[inline]
1212 fn deref(&self) -> &[u8] {
1213 self.as_slice()
1214 }
1215}
1216
1217impl ops::DerefMut for BufferMap<'_, Writable> {
1218 #[inline]
1219 fn deref_mut(&mut self) -> &mut [u8] {
1220 self.as_mut_slice()
1221 }
1222}
1223
1224impl<T> fmt::Debug for BufferMap<'_, T> {
1225 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1226 f.debug_tuple("BufferMap").field(&self.buffer()).finish()
1227 }
1228}
1229
1230impl<'a, T> PartialEq for BufferMap<'a, T> {
1231 fn eq(&self, other: &BufferMap<'a, T>) -> bool {
1232 self.as_slice().eq(other.as_slice())
1233 }
1234}
1235
1236impl<T> Eq for BufferMap<'_, T> {}
1237
1238impl<T> Drop for BufferMap<'_, T> {
1239 #[inline]
1240 fn drop(&mut self) {
1241 unsafe {
1242 ffi::gst_buffer_unmap(self.buffer.as_mut_ptr(), &mut self.map_info);
1243 }
1244 }
1245}
1246
1247unsafe impl<T> Send for BufferMap<'_, T> {}
1248unsafe impl<T> Sync for BufferMap<'_, T> {}
1249
1250impl<T> MappedBuffer<T> {
1251 #[inline]
1252 pub fn as_slice(&self) -> &[u8] {
1253 if self.map_info.size == 0 {
1254 return &[];
1255 }
1256 unsafe { slice::from_raw_parts(self.map_info.data, self.map_info.size) }
1257 }
1258
1259 #[doc(alias = "get_size")]
1260 #[inline]
1261 pub fn size(&self) -> usize {
1262 self.map_info.size
1263 }
1264
1265 #[doc(alias = "get_buffer")]
1266 #[inline]
1267 pub fn buffer(&self) -> &BufferRef {
1268 self.buffer.as_ref()
1269 }
1270
1271 #[inline]
1272 pub fn into_buffer(self) -> Buffer {
1273 let mut s = mem::ManuallyDrop::new(self);
1274 let buffer = unsafe { ptr::read(&s.buffer) };
1275 unsafe {
1276 ffi::gst_buffer_unmap(buffer.as_mut_ptr(), &mut s.map_info);
1277 }
1278
1279 buffer
1280 }
1281}
1282
1283impl MappedBuffer<Readable> {
1284 #[doc(alias = "get_buffer")]
1285 #[inline]
1286 pub fn buffer_owned(&self) -> Buffer {
1287 self.buffer.clone()
1288 }
1289}
1290
1291impl MappedBuffer<Writable> {
1292 #[inline]
1293 pub fn as_mut_slice(&mut self) -> &mut [u8] {
1294 if self.map_info.size == 0 {
1295 return &mut [];
1296 }
1297 unsafe { slice::from_raw_parts_mut(self.map_info.data, self.map_info.size) }
1298 }
1299}
1300
1301impl<T> AsRef<[u8]> for MappedBuffer<T> {
1302 #[inline]
1303 fn as_ref(&self) -> &[u8] {
1304 self.as_slice()
1305 }
1306}
1307
1308impl AsMut<[u8]> for MappedBuffer<Writable> {
1309 #[inline]
1310 fn as_mut(&mut self) -> &mut [u8] {
1311 self.as_mut_slice()
1312 }
1313}
1314
1315impl<T> ops::Deref for MappedBuffer<T> {
1316 type Target = [u8];
1317
1318 #[inline]
1319 fn deref(&self) -> &[u8] {
1320 self.as_slice()
1321 }
1322}
1323
1324impl ops::DerefMut for MappedBuffer<Writable> {
1325 #[inline]
1326 fn deref_mut(&mut self) -> &mut [u8] {
1327 self.as_mut_slice()
1328 }
1329}
1330
1331impl<T> Drop for MappedBuffer<T> {
1332 #[inline]
1333 fn drop(&mut self) {
1334 unsafe {
1335 ffi::gst_buffer_unmap(self.buffer.as_mut_ptr(), &mut self.map_info);
1336 }
1337 }
1338}
1339
1340impl<T> fmt::Debug for MappedBuffer<T> {
1341 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1342 f.debug_tuple("MappedBuffer").field(&self.buffer()).finish()
1343 }
1344}
1345
1346impl<T> PartialEq for MappedBuffer<T> {
1347 fn eq(&self, other: &MappedBuffer<T>) -> bool {
1348 self.as_slice().eq(other.as_slice())
1349 }
1350}
1351
1352impl<T> Eq for MappedBuffer<T> {}
1353
1354unsafe impl<T> Send for MappedBuffer<T> {}
1355unsafe impl<T> Sync for MappedBuffer<T> {}
1356
1357#[doc(alias = "GST_BUFFER_COPY_METADATA")]
1358pub const BUFFER_COPY_METADATA: crate::BufferCopyFlags =
1359 crate::BufferCopyFlags::from_bits_truncate(ffi::GST_BUFFER_COPY_METADATA);
1360#[doc(alias = "GST_BUFFER_COPY_ALL")]
1361pub const BUFFER_COPY_ALL: crate::BufferCopyFlags =
1362 crate::BufferCopyFlags::from_bits_truncate(ffi::GST_BUFFER_COPY_ALL);
1363
1364pub struct Dump<'a> {
1365 buffer: &'a BufferRef,
1366 start: Bound<usize>,
1367 end: Bound<usize>,
1368}
1369
1370#[must_use = "iterators are lazy and do nothing unless consumed"]
1371struct BufferChunked16Iter<'a> {
1372 buffer: &'a BufferRef,
1373 mem_idx: usize,
1374 mem_len: usize,
1375 map: Option<crate::memory::MemoryMap<'a, crate::memory::Readable>>,
1376 map_offset: usize,
1377 len: usize,
1378}
1379
1380impl Iterator for BufferChunked16Iter<'_> {
1381 type Item = ([u8; 16], usize);
1383
1384 fn next(&mut self) -> Option<Self::Item> {
1385 if self.mem_idx == self.mem_len || self.len == 0 {
1386 return None;
1387 }
1388
1389 let mut item = [0u8; 16];
1390 let mut data = item.as_mut_slice();
1391
1392 while !data.is_empty() && self.mem_idx < self.mem_len && self.len > 0 {
1393 if self.map.is_none() {
1394 let mem = self.buffer.peek_memory(self.mem_idx);
1395 self.map = Some(mem.map_readable().expect("failed to map memory"));
1396 }
1397
1398 let map = self.map.as_ref().unwrap();
1399 debug_assert!(self.map_offset < map.len());
1400 let copy = cmp::min(cmp::min(map.len() - self.map_offset, data.len()), self.len);
1401 data[..copy].copy_from_slice(&map[self.map_offset..][..copy]);
1402 self.map_offset += copy;
1403 self.len -= copy;
1404 data = &mut data[copy..];
1405
1406 if self.map_offset == map.len() {
1407 self.map = None;
1408 self.map_offset = 0;
1409 self.mem_idx += 1;
1410 }
1411 }
1412
1413 let copied = 16 - data.len();
1414 Some((item, copied))
1415 }
1416}
1417
1418impl Dump<'_> {
1419 fn fmt(&self, f: &mut fmt::Formatter, debug: bool) -> fmt::Result {
1420 let n_memory = self.buffer.n_memory();
1421 if n_memory == 0 {
1422 write!(f, "<empty>")?;
1423 return Ok(());
1424 }
1425
1426 use std::fmt::Write;
1427
1428 let len = self.buffer.size();
1429
1430 let mut start_idx = match self.start {
1433 Bound::Included(idx) if idx >= len => {
1434 write!(f, "<start out of range>")?;
1435 return Ok(());
1436 }
1437 Bound::Excluded(idx) if idx.checked_add(1).map_or(true, |idx| idx >= len) => {
1438 write!(f, "<start out of range>")?;
1439 return Ok(());
1440 }
1441 Bound::Included(idx) => idx,
1442 Bound::Excluded(idx) => idx + 1,
1443 Bound::Unbounded => 0,
1444 };
1445
1446 let end_idx = match self.end {
1447 Bound::Included(idx) if idx.checked_add(1).map_or(true, |idx| idx > len) => {
1448 write!(f, "<end out of range>")?;
1449 return Ok(());
1450 }
1451 Bound::Excluded(idx) if idx > len => {
1452 write!(f, "<end out of range>")?;
1453 return Ok(());
1454 }
1455 Bound::Included(idx) => idx + 1,
1456 Bound::Excluded(idx) => idx,
1457 Bound::Unbounded => len,
1458 };
1459
1460 if start_idx >= end_idx {
1461 write!(f, "<empty range>")?;
1462 return Ok(());
1463 }
1464
1465 let (memory_range, skip) = self
1467 .buffer
1468 .find_memory(start_idx..)
1469 .expect("can't find memory");
1470
1471 let chunks = BufferChunked16Iter {
1472 buffer: self.buffer,
1473 mem_idx: memory_range.start,
1474 mem_len: n_memory,
1475 map: None,
1476 map_offset: skip,
1477 len: end_idx - start_idx,
1478 };
1479
1480 if debug {
1481 for (line, line_len) in chunks {
1482 let line = &line[..line_len];
1483
1484 match end_idx {
1485 0x00_00..=0xff_ff => write!(f, "{:04x}: ", start_idx)?,
1486 0x01_00_00..=0xff_ff_ff => write!(f, "{:06x}: ", start_idx)?,
1487 0x01_00_00_00..=0xff_ff_ff_ff => write!(f, "{:08x}: ", start_idx)?,
1488 _ => write!(f, "{:016x}: ", start_idx)?,
1489 }
1490
1491 for (i, v) in line.iter().enumerate() {
1492 if i > 0 {
1493 write!(f, " {:02x}", v)?;
1494 } else {
1495 write!(f, "{:02x}", v)?;
1496 }
1497 }
1498
1499 for _ in line.len()..16 {
1500 write!(f, " ")?;
1501 }
1502 write!(f, " ")?;
1503
1504 for v in line {
1505 if v.is_ascii() && !v.is_ascii_control() {
1506 f.write_char((*v).into())?;
1507 } else {
1508 f.write_char('.')?;
1509 }
1510 }
1511
1512 start_idx = start_idx.saturating_add(16);
1513 if start_idx < end_idx {
1514 writeln!(f)?;
1515 }
1516 }
1517
1518 Ok(())
1519 } else {
1520 for (line, line_len) in chunks {
1521 let line = &line[..line_len];
1522
1523 for (i, v) in line.iter().enumerate() {
1524 if i > 0 {
1525 write!(f, " {:02x}", v)?;
1526 } else {
1527 write!(f, "{:02x}", v)?;
1528 }
1529 }
1530
1531 start_idx = start_idx.saturating_add(16);
1532 if start_idx < end_idx {
1533 writeln!(f)?;
1534 }
1535 }
1536
1537 Ok(())
1538 }
1539 }
1540}
1541
1542impl fmt::Display for Dump<'_> {
1543 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1544 self.fmt(f, false)
1545 }
1546}
1547
1548impl fmt::Debug for Dump<'_> {
1549 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1550 self.fmt(f, true)
1551 }
1552}
1553
1554#[cfg(test)]
1555mod tests {
1556 use super::*;
1557
1558 #[test]
1559 fn test_fields() {
1560 crate::init().unwrap();
1561
1562 let mut buffer = Buffer::new();
1563
1564 {
1565 let buffer = buffer.get_mut().unwrap();
1566 buffer.set_pts(ClockTime::NSECOND);
1567 buffer.set_dts(2 * ClockTime::NSECOND);
1568 buffer.set_offset(3);
1569 buffer.set_offset_end(4);
1570 buffer.set_duration(Some(5 * ClockTime::NSECOND));
1571 }
1572 assert_eq!(buffer.pts(), Some(ClockTime::NSECOND));
1573 assert_eq!(buffer.dts(), Some(2 * ClockTime::NSECOND));
1574 assert_eq!(buffer.offset(), 3);
1575 assert_eq!(buffer.offset_end(), 4);
1576 assert_eq!(buffer.duration(), Some(5 * ClockTime::NSECOND));
1577 }
1578
1579 #[test]
1580 fn test_writability() {
1581 crate::init().unwrap();
1582
1583 let mut buffer = Buffer::from_slice(vec![1, 2, 3, 4]);
1584 {
1585 let data = buffer.map_readable().unwrap();
1586 assert_eq!(data.as_slice(), vec![1, 2, 3, 4].as_slice());
1587 }
1588 assert_ne!(buffer.get_mut(), None);
1589 {
1590 let buffer = buffer.get_mut().unwrap();
1591 buffer.set_pts(Some(ClockTime::NSECOND));
1592 }
1593
1594 let mut buffer2 = buffer.clone();
1595 assert_eq!(buffer.get_mut(), None);
1596
1597 assert_eq!(buffer2.as_ptr(), buffer.as_ptr());
1598
1599 {
1600 let buffer2 = buffer2.make_mut();
1601 assert_ne!(buffer2.as_ptr(), buffer.as_ptr());
1602
1603 buffer2.set_pts(Some(2 * ClockTime::NSECOND));
1604
1605 let mut data = buffer2.map_writable().unwrap();
1606 assert_eq!(data.as_slice(), vec![1, 2, 3, 4].as_slice());
1607 data.as_mut_slice()[0] = 0;
1608 }
1609
1610 assert_eq!(buffer.pts(), Some(ClockTime::NSECOND));
1611 assert_eq!(buffer2.pts(), Some(2 * ClockTime::NSECOND));
1612
1613 {
1614 let data = buffer.map_readable().unwrap();
1615 assert_eq!(data.as_slice(), vec![1, 2, 3, 4].as_slice());
1616
1617 let data = buffer2.map_readable().unwrap();
1618 assert_eq!(data.as_slice(), vec![0, 2, 3, 4].as_slice());
1619 }
1620 }
1621
1622 #[test]
1623 #[allow(clippy::cognitive_complexity)]
1624 fn test_memories() {
1625 crate::init().unwrap();
1626
1627 let mut buffer = Buffer::new();
1628 {
1629 let buffer = buffer.get_mut().unwrap();
1630 buffer.append_memory(crate::Memory::from_mut_slice(vec![0; 5]));
1631 buffer.append_memory(crate::Memory::from_mut_slice(vec![0; 5]));
1632 buffer.append_memory(crate::Memory::from_mut_slice(vec![0; 5]));
1633 buffer.append_memory(crate::Memory::from_mut_slice(vec![0; 5]));
1634 buffer.append_memory(crate::Memory::from_mut_slice(vec![0; 10]));
1635 }
1636
1637 assert!(buffer.is_all_memory_writable());
1638 assert_eq!(buffer.n_memory(), 5);
1639 assert_eq!(buffer.size(), 30);
1640
1641 for i in 0..5 {
1642 {
1643 let mem = buffer.memory(i).unwrap();
1644 assert_eq!(mem.size(), if i < 4 { 5 } else { 10 });
1645 let map = mem.map_readable().unwrap();
1646 assert_eq!(map.size(), if i < 4 { 5 } else { 10 });
1647 }
1648
1649 {
1650 let mem = buffer.peek_memory(i);
1651 assert_eq!(mem.size(), if i < 4 { 5 } else { 10 });
1652 let map = mem.map_readable().unwrap();
1653 assert_eq!(map.size(), if i < 4 { 5 } else { 10 });
1654 }
1655
1656 {
1657 let buffer = buffer.get_mut().unwrap();
1658 let mem = buffer.peek_memory_mut(i).unwrap();
1659 assert_eq!(mem.size(), if i < 4 { 5 } else { 10 });
1660 let map = mem.map_writable().unwrap();
1661 assert_eq!(map.size(), if i < 4 { 5 } else { 10 });
1662 }
1663 }
1664
1665 {
1666 let buffer = buffer.get_mut().unwrap();
1667 let mut last = 0;
1668 for (i, mem) in buffer.iter_memories_mut().unwrap().enumerate() {
1669 {
1670 assert_eq!(mem.size(), if i < 4 { 5 } else { 10 });
1671 let map = mem.map_readable().unwrap();
1672 assert_eq!(map.size(), if i < 4 { 5 } else { 10 });
1673 }
1674
1675 {
1676 assert_eq!(mem.size(), if i < 4 { 5 } else { 10 });
1677 let map = mem.map_readable().unwrap();
1678 assert_eq!(map.size(), if i < 4 { 5 } else { 10 });
1679 }
1680
1681 {
1682 assert_eq!(mem.size(), if i < 4 { 5 } else { 10 });
1683 let map = mem.map_writable().unwrap();
1684 assert_eq!(map.size(), if i < 4 { 5 } else { 10 });
1685 }
1686
1687 last = i;
1688 }
1689
1690 assert_eq!(last, 4);
1691 }
1692
1693 let mut last = 0;
1694 for (i, mem) in buffer.iter_memories().enumerate() {
1695 {
1696 assert_eq!(mem.size(), if i < 4 { 5 } else { 10 });
1697 let map = mem.map_readable().unwrap();
1698 assert_eq!(map.size(), if i < 4 { 5 } else { 10 });
1699 }
1700
1701 {
1702 assert_eq!(mem.size(), if i < 4 { 5 } else { 10 });
1703 let map = mem.map_readable().unwrap();
1704 assert_eq!(map.size(), if i < 4 { 5 } else { 10 });
1705 }
1706
1707 last = i;
1708 }
1709
1710 assert_eq!(last, 4);
1711
1712 let mut last = 0;
1713 for (i, mem) in buffer.iter_memories_owned().enumerate() {
1714 {
1715 assert_eq!(mem.size(), if i < 4 { 5 } else { 10 });
1716 let map = mem.map_readable().unwrap();
1717 assert_eq!(map.size(), if i < 4 { 5 } else { 10 });
1718 }
1719
1720 {
1721 assert_eq!(mem.size(), if i < 4 { 5 } else { 10 });
1722 let map = mem.map_readable().unwrap();
1723 assert_eq!(map.size(), if i < 4 { 5 } else { 10 });
1724 }
1725
1726 last = i;
1727 }
1728
1729 assert_eq!(last, 4);
1730 }
1731
1732 #[test]
1733 fn test_meta_foreach() {
1734 crate::init().unwrap();
1735
1736 let mut buffer = Buffer::new();
1737 {
1738 let buffer = buffer.get_mut().unwrap();
1739 crate::ReferenceTimestampMeta::add(
1740 buffer,
1741 &crate::Caps::builder("foo/bar").build(),
1742 ClockTime::ZERO,
1743 ClockTime::NONE,
1744 );
1745 crate::ReferenceTimestampMeta::add(
1746 buffer,
1747 &crate::Caps::builder("foo/bar").build(),
1748 ClockTime::SECOND,
1749 ClockTime::NONE,
1750 );
1751 }
1752
1753 let mut res = vec![];
1754 buffer.foreach_meta(|meta| {
1755 let meta = meta
1756 .downcast_ref::<crate::ReferenceTimestampMeta>()
1757 .unwrap();
1758 res.push(meta.timestamp());
1759 ControlFlow::Continue(())
1760 });
1761
1762 assert_eq!(&[ClockTime::ZERO, ClockTime::SECOND][..], &res[..]);
1763 }
1764
1765 #[test]
1766 fn test_meta_foreach_mut() {
1767 crate::init().unwrap();
1768
1769 let mut buffer = Buffer::new();
1770 {
1771 let buffer = buffer.get_mut().unwrap();
1772 crate::ReferenceTimestampMeta::add(
1773 buffer,
1774 &crate::Caps::builder("foo/bar").build(),
1775 ClockTime::ZERO,
1776 ClockTime::NONE,
1777 );
1778 crate::ReferenceTimestampMeta::add(
1779 buffer,
1780 &crate::Caps::builder("foo/bar").build(),
1781 ClockTime::SECOND,
1782 ClockTime::NONE,
1783 );
1784 }
1785
1786 let mut res = vec![];
1787 buffer.get_mut().unwrap().foreach_meta_mut(|mut meta| {
1788 let meta = meta
1789 .downcast_ref::<crate::ReferenceTimestampMeta>()
1790 .unwrap();
1791 res.push(meta.timestamp());
1792 if meta.timestamp() == ClockTime::SECOND {
1793 ControlFlow::Continue(BufferMetaForeachAction::Remove)
1794 } else {
1795 ControlFlow::Continue(BufferMetaForeachAction::Keep)
1796 }
1797 });
1798
1799 assert_eq!(&[ClockTime::ZERO, ClockTime::SECOND][..], &res[..]);
1800
1801 let mut res = vec![];
1802 buffer.foreach_meta(|meta| {
1803 let meta = meta
1804 .downcast_ref::<crate::ReferenceTimestampMeta>()
1805 .unwrap();
1806 res.push(meta.timestamp());
1807 ControlFlow::Continue(())
1808 });
1809
1810 assert_eq!(&[ClockTime::ZERO][..], &res[..]);
1811 }
1812
1813 #[test]
1814 fn test_ptr_eq() {
1815 crate::init().unwrap();
1816
1817 let buffer1 = Buffer::new();
1818 assert!(BufferRef::ptr_eq(&buffer1, &buffer1));
1819 let buffer2 = Buffer::new();
1820 assert!(!BufferRef::ptr_eq(&buffer1, &buffer2));
1821 }
1822
1823 #[test]
1824 fn test_copy_region() {
1825 crate::init().unwrap();
1826
1827 let buffer1 = Buffer::from_mut_slice(vec![0, 1, 2, 3, 4, 5, 6, 7]);
1828 let buffer2 = buffer1.copy_region(BUFFER_COPY_ALL, ..).unwrap();
1829 assert_eq!(
1830 buffer2.map_readable().unwrap().as_slice(),
1831 &[0, 1, 2, 3, 4, 5, 6, 7]
1832 );
1833 let buffer2 = buffer1.copy_region(BUFFER_COPY_ALL, 0..8).unwrap();
1834 assert_eq!(
1835 buffer2.map_readable().unwrap().as_slice(),
1836 &[0, 1, 2, 3, 4, 5, 6, 7]
1837 );
1838 let buffer2 = buffer1.copy_region(BUFFER_COPY_ALL, 0..=7).unwrap();
1839 assert_eq!(
1840 buffer2.map_readable().unwrap().as_slice(),
1841 &[0, 1, 2, 3, 4, 5, 6, 7]
1842 );
1843 let buffer2 = buffer1.copy_region(BUFFER_COPY_ALL, ..=7).unwrap();
1844 assert_eq!(
1845 buffer2.map_readable().unwrap().as_slice(),
1846 &[0, 1, 2, 3, 4, 5, 6, 7]
1847 );
1848 let buffer2 = buffer1.copy_region(BUFFER_COPY_ALL, ..8).unwrap();
1849 assert_eq!(
1850 buffer2.map_readable().unwrap().as_slice(),
1851 &[0, 1, 2, 3, 4, 5, 6, 7]
1852 );
1853 let buffer2 = buffer1.copy_region(BUFFER_COPY_ALL, 0..).unwrap();
1854 assert_eq!(
1855 buffer2.map_readable().unwrap().as_slice(),
1856 &[0, 1, 2, 3, 4, 5, 6, 7]
1857 );
1858
1859 assert!(buffer1.copy_region(BUFFER_COPY_ALL, 0..=8).is_err());
1860 assert!(buffer1.copy_region(BUFFER_COPY_ALL, 0..=10).is_err());
1861 assert!(buffer1.copy_region(BUFFER_COPY_ALL, 8..=10).is_err());
1862 assert!(buffer1.copy_region(BUFFER_COPY_ALL, 8..=8).is_err());
1863 assert!(buffer1.copy_region(BUFFER_COPY_ALL, 10..).is_err());
1864 assert!(buffer1.copy_region(BUFFER_COPY_ALL, 10..100).is_err());
1865
1866 let buffer2 = buffer1.copy_region(BUFFER_COPY_ALL, 2..4).unwrap();
1867 assert_eq!(buffer2.map_readable().unwrap().as_slice(), &[2, 3]);
1868
1869 let buffer2 = buffer1.copy_region(BUFFER_COPY_ALL, 2..=4).unwrap();
1870 assert_eq!(buffer2.map_readable().unwrap().as_slice(), &[2, 3, 4]);
1871
1872 let buffer2 = buffer1.copy_region(BUFFER_COPY_ALL, 2..).unwrap();
1873 assert_eq!(
1874 buffer2.map_readable().unwrap().as_slice(),
1875 &[2, 3, 4, 5, 6, 7]
1876 );
1877 let buffer2 = buffer1.copy_region(BUFFER_COPY_ALL, ..2).unwrap();
1878 assert_eq!(buffer2.map_readable().unwrap().as_slice(), &[0, 1]);
1879 let buffer2 = buffer1.copy_region(BUFFER_COPY_ALL, ..=2).unwrap();
1880 assert_eq!(buffer2.map_readable().unwrap().as_slice(), &[0, 1, 2]);
1881 }
1882
1883 #[test]
1884 fn test_dump() {
1885 use std::fmt::Write;
1886
1887 crate::init().unwrap();
1888
1889 let mut s = String::new();
1890 let buffer = crate::Buffer::from_slice(vec![1, 2, 3, 4]);
1891 write!(&mut s, "{:?}", buffer.dump()).unwrap();
1892 assert_eq!(
1893 s,
1894 "0000: 01 02 03 04 ...."
1895 );
1896 s.clear();
1897 write!(&mut s, "{}", buffer.dump()).unwrap();
1898 assert_eq!(s, "01 02 03 04");
1899 s.clear();
1900
1901 let buffer = crate::Buffer::from_slice(vec![1, 2, 3, 4]);
1902 write!(&mut s, "{:?}", buffer.dump_range(..)).unwrap();
1903 assert_eq!(
1904 s,
1905 "0000: 01 02 03 04 ...."
1906 );
1907 s.clear();
1908 write!(&mut s, "{:?}", buffer.dump_range(..2)).unwrap();
1909 assert_eq!(
1910 s,
1911 "0000: 01 02 .."
1912 );
1913 s.clear();
1914 write!(&mut s, "{:?}", buffer.dump_range(2..=3)).unwrap();
1915 assert_eq!(
1916 s,
1917 "0002: 03 04 .."
1918 );
1919 s.clear();
1920 write!(&mut s, "{:?}", buffer.dump_range(..100)).unwrap();
1921 assert_eq!(s, "<end out of range>",);
1922 s.clear();
1923 write!(&mut s, "{:?}", buffer.dump_range(90..100)).unwrap();
1924 assert_eq!(s, "<start out of range>",);
1925 s.clear();
1926
1927 let buffer = crate::Buffer::from_slice(vec![0; 19]);
1928 write!(&mut s, "{:?}", buffer.dump()).unwrap();
1929 assert_eq!(
1930 s,
1931 "0000: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 ................\n\
1932 0010: 00 00 00 ..."
1933 );
1934 s.clear();
1935 }
1936
1937 #[test]
1938 fn test_dump_multi_memories() {
1939 use std::fmt::Write;
1940
1941 crate::init().unwrap();
1942
1943 let mut buffer = crate::Buffer::new();
1944 {
1945 let buffer = buffer.get_mut().unwrap();
1946
1947 let mem = crate::Memory::from_slice(vec![1, 2, 3, 4]);
1948 buffer.append_memory(mem);
1949
1950 let mem = crate::Memory::from_slice(vec![5, 6, 7, 8]);
1951 buffer.append_memory(mem);
1952
1953 let mem = crate::Memory::from_slice(vec![9, 10, 11, 12]);
1954 buffer.append_memory(mem);
1955
1956 let mem = crate::Memory::from_slice(vec![13, 14, 15, 16]);
1957 buffer.append_memory(mem);
1958
1959 let mem = crate::Memory::from_slice(vec![17, 18, 19]);
1960 buffer.append_memory(mem);
1961 }
1962
1963 let mut s = String::new();
1964 write!(&mut s, "{:?}", buffer.dump()).unwrap();
1965 assert_eq!(
1966 s,
1967 "0000: 01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f 10 ................\n\
1968 0010: 11 12 13 ..."
1969 );
1970 s.clear();
1971 write!(&mut s, "{}", buffer.dump()).unwrap();
1972 assert_eq!(
1973 s,
1974 "01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f 10\n11 12 13"
1975 );
1976 s.clear();
1977
1978 write!(&mut s, "{:?}", buffer.dump_range(2..)).unwrap();
1979 assert_eq!(
1980 s,
1981 "0002: 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f 10 11 12 ................\n\
1982 0012: 13 ."
1983 );
1984 s.clear();
1985
1986 write!(&mut s, "{:?}", buffer.dump_range(14..17)).unwrap();
1987 assert_eq!(
1988 s,
1989 "000e: 0f 10 11 ..."
1990 );
1991 s.clear();
1992
1993 write!(&mut s, "{:?}", buffer.dump_range(14..20)).unwrap();
1994 assert_eq!(s, "<end out of range>");
1995 s.clear();
1996
1997 #[allow(clippy::reversed_empty_ranges)]
1998 {
1999 write!(&mut s, "{:?}", buffer.dump_range(23..20)).unwrap();
2000 assert_eq!(s, "<start out of range>");
2001 s.clear();
2002 }
2003 }
2004}