1use std::{
4 cmp, fmt,
5 marker::PhantomData,
6 mem, ops,
7 ops::{Bound, ControlFlow, Range, RangeBounds},
8 ptr, slice,
9};
10
11use glib::translate::*;
12
13use crate::{
14 ffi, meta::*, BufferCursor, BufferFlags, BufferRefCursor, ClockTime, Memory, MemoryRef,
15};
16
17pub enum Readable {}
18pub enum Writable {}
19
20#[derive(Copy, Clone, Debug, PartialEq, Eq)]
21pub enum BufferMetaForeachAction {
22 Keep,
23 Remove,
24}
25
26mini_object_wrapper!(Buffer, BufferRef, ffi::GstBuffer, || {
27 ffi::gst_buffer_get_type()
28});
29
30pub struct BufferMap<'a, T> {
31 buffer: &'a BufferRef,
32 map_info: ffi::GstMapInfo,
33 phantom: PhantomData<T>,
34}
35
36pub struct MappedBuffer<T> {
37 buffer: Buffer,
38 map_info: ffi::GstMapInfo,
39 phantom: PhantomData<T>,
40}
41
42impl Buffer {
43 #[doc(alias = "gst_buffer_new")]
49 #[inline]
50 pub fn new() -> Self {
51 assert_initialized_main_thread!();
52
53 unsafe { from_glib_full(ffi::gst_buffer_new()) }
54 }
55
56 #[doc(alias = "gst_buffer_new_allocate")]
57 #[doc(alias = "gst_buffer_new_and_alloc")]
58 #[inline]
59 pub fn with_size(size: usize) -> Result<Self, glib::BoolError> {
60 assert_initialized_main_thread!();
61
62 unsafe {
63 Option::<_>::from_glib_full(ffi::gst_buffer_new_allocate(
64 ptr::null_mut(),
65 size,
66 ptr::null_mut(),
67 ))
68 .ok_or_else(|| glib::bool_error!("Failed to allocate buffer"))
69 }
70 }
71
72 #[doc(alias = "gst_buffer_new_wrapped")]
73 #[doc(alias = "gst_buffer_new_wrapped_full")]
74 #[inline]
75 pub fn from_mut_slice<T: AsMut<[u8]> + Send + 'static>(slice: T) -> Self {
76 assert_initialized_main_thread!();
77
78 let mem = Memory::from_mut_slice(slice);
79 let mut buffer = Buffer::new();
80 {
81 let buffer = buffer.get_mut().unwrap();
82 buffer.append_memory(mem);
83 buffer.unset_flags(BufferFlags::TAG_MEMORY);
84 }
85
86 buffer
87 }
88
89 #[doc(alias = "gst_buffer_new_wrapped")]
90 #[doc(alias = "gst_buffer_new_wrapped_full")]
91 #[inline]
92 pub fn from_slice<T: AsRef<[u8]> + Send + 'static>(slice: T) -> Self {
93 assert_initialized_main_thread!();
94
95 let mem = Memory::from_slice(slice);
96 let mut buffer = Buffer::new();
97 {
98 let buffer = buffer.get_mut().unwrap();
99 buffer.append_memory(mem);
100 buffer.unset_flags(BufferFlags::TAG_MEMORY);
101 }
102
103 buffer
104 }
105
106 #[doc(alias = "gst_buffer_map")]
128 #[inline]
129 pub fn into_mapped_buffer_readable(self) -> Result<MappedBuffer<Readable>, Self> {
130 unsafe {
131 let mut map_info = mem::MaybeUninit::uninit();
132 let res: bool = from_glib(ffi::gst_buffer_map(
133 self.as_mut_ptr(),
134 map_info.as_mut_ptr(),
135 ffi::GST_MAP_READ,
136 ));
137 if res {
138 Ok(MappedBuffer {
139 buffer: self,
140 map_info: map_info.assume_init(),
141 phantom: PhantomData,
142 })
143 } else {
144 Err(self)
145 }
146 }
147 }
148
149 #[doc(alias = "gst_buffer_map")]
150 #[inline]
151 pub fn into_mapped_buffer_writable(self) -> Result<MappedBuffer<Writable>, Self> {
152 unsafe {
153 let mut map_info = mem::MaybeUninit::uninit();
154 let res: bool = from_glib(ffi::gst_buffer_map(
155 self.as_mut_ptr(),
156 map_info.as_mut_ptr(),
157 ffi::GST_MAP_READWRITE,
158 ));
159 if res {
160 Ok(MappedBuffer {
161 buffer: self,
162 map_info: map_info.assume_init(),
163 phantom: PhantomData,
164 })
165 } else {
166 Err(self)
167 }
168 }
169 }
170
171 #[inline]
172 pub fn into_cursor_readable(self) -> BufferCursor<Readable> {
173 BufferCursor::new_readable(self)
174 }
175
176 #[inline]
177 pub fn into_cursor_writable(self) -> Result<BufferCursor<Writable>, glib::BoolError> {
178 BufferCursor::new_writable(self)
179 }
180
181 #[doc(alias = "gst_buffer_append")]
191 pub fn append(&mut self, other: Self) {
192 unsafe {
193 let ptr = ffi::gst_buffer_append(self.as_mut_ptr(), other.into_glib_ptr());
194 self.replace_ptr(ptr);
195 }
196 }
197}
198
199impl Default for Buffer {
200 fn default() -> Self {
201 Self::new()
202 }
203}
204
205impl BufferRef {
206 #[doc(alias = "gst_buffer_map")]
207 #[inline]
208 pub fn map_readable(&self) -> Result<BufferMap<Readable>, glib::BoolError> {
209 unsafe {
210 let mut map_info = mem::MaybeUninit::uninit();
211 let res =
212 ffi::gst_buffer_map(self.as_mut_ptr(), map_info.as_mut_ptr(), ffi::GST_MAP_READ);
213 if res == glib::ffi::GTRUE {
214 Ok(BufferMap {
215 buffer: self,
216 map_info: map_info.assume_init(),
217 phantom: PhantomData,
218 })
219 } else {
220 Err(glib::bool_error!("Failed to map buffer readable"))
221 }
222 }
223 }
224
225 #[doc(alias = "gst_buffer_map")]
226 #[inline]
227 pub fn map_writable(&mut self) -> Result<BufferMap<Writable>, glib::BoolError> {
228 unsafe {
229 let mut map_info = mem::MaybeUninit::uninit();
230 let res = ffi::gst_buffer_map(
231 self.as_mut_ptr(),
232 map_info.as_mut_ptr(),
233 ffi::GST_MAP_READWRITE,
234 );
235 if res == glib::ffi::GTRUE {
236 Ok(BufferMap {
237 buffer: self,
238 map_info: map_info.assume_init(),
239 phantom: PhantomData,
240 })
241 } else {
242 Err(glib::bool_error!("Failed to map buffer writable"))
243 }
244 }
245 }
246
247 fn memory_range_into_idx_len(
248 &self,
249 range: impl RangeBounds<usize>,
250 ) -> Result<(u32, i32), glib::BoolError> {
251 let n_memory = self.n_memory();
252 debug_assert!(n_memory <= u32::MAX as usize);
253
254 let start_idx = match range.start_bound() {
255 ops::Bound::Included(idx) if *idx >= n_memory => {
256 return Err(glib::bool_error!("Invalid range start"));
257 }
258 ops::Bound::Included(idx) => *idx,
259 ops::Bound::Excluded(idx) if idx.checked_add(1).map_or(true, |idx| idx >= n_memory) => {
260 return Err(glib::bool_error!("Invalid range start"));
261 }
262 ops::Bound::Excluded(idx) => *idx + 1,
263 ops::Bound::Unbounded => 0,
264 };
265
266 let end_idx = match range.end_bound() {
267 ops::Bound::Included(idx) if idx.checked_add(1).map_or(true, |idx| idx > n_memory) => {
268 return Err(glib::bool_error!("Invalid range end"));
269 }
270 ops::Bound::Included(idx) => *idx + 1,
271 ops::Bound::Excluded(idx) if *idx > n_memory => {
272 return Err(glib::bool_error!("Invalid range end"));
273 }
274 ops::Bound::Excluded(idx) => *idx,
275 ops::Bound::Unbounded => n_memory,
276 };
277
278 Ok((
279 start_idx as u32,
280 i32::try_from(end_idx - start_idx).map_err(|_| glib::bool_error!("Too large range"))?,
281 ))
282 }
283
284 #[doc(alias = "gst_buffer_map_range")]
285 #[inline]
286 pub fn map_range_readable(
287 &self,
288 range: impl RangeBounds<usize>,
289 ) -> Result<BufferMap<Readable>, glib::BoolError> {
290 let (idx, len) = self.memory_range_into_idx_len(range)?;
291 unsafe {
292 let mut map_info = mem::MaybeUninit::uninit();
293 let res = ffi::gst_buffer_map_range(
294 self.as_mut_ptr(),
295 idx,
296 len,
297 map_info.as_mut_ptr(),
298 ffi::GST_MAP_READ,
299 );
300 if res == glib::ffi::GTRUE {
301 Ok(BufferMap {
302 buffer: self,
303 map_info: map_info.assume_init(),
304 phantom: PhantomData,
305 })
306 } else {
307 Err(glib::bool_error!("Failed to map buffer readable"))
308 }
309 }
310 }
311
312 #[doc(alias = "gst_buffer_map_range")]
313 #[inline]
314 pub fn map_range_writable(
315 &mut self,
316 range: impl RangeBounds<usize>,
317 ) -> Result<BufferMap<Writable>, glib::BoolError> {
318 let (idx, len) = self.memory_range_into_idx_len(range)?;
319 unsafe {
320 let mut map_info = mem::MaybeUninit::uninit();
321 let res = ffi::gst_buffer_map_range(
322 self.as_mut_ptr(),
323 idx,
324 len,
325 map_info.as_mut_ptr(),
326 ffi::GST_MAP_READWRITE,
327 );
328 if res == glib::ffi::GTRUE {
329 Ok(BufferMap {
330 buffer: self,
331 map_info: map_info.assume_init(),
332 phantom: PhantomData,
333 })
334 } else {
335 Err(glib::bool_error!("Failed to map buffer writable"))
336 }
337 }
338 }
339
340 pub(crate) fn byte_range_into_offset_len(
341 &self,
342 range: impl RangeBounds<usize>,
343 ) -> Result<(usize, usize), glib::BoolError> {
344 let size = self.size();
345
346 let start_idx = match range.start_bound() {
347 ops::Bound::Included(idx) if *idx >= size => {
348 return Err(glib::bool_error!("Invalid range start"));
349 }
350 ops::Bound::Included(idx) => *idx,
351 ops::Bound::Excluded(idx) if idx.checked_add(1).map_or(true, |idx| idx >= size) => {
352 return Err(glib::bool_error!("Invalid range start"));
353 }
354 ops::Bound::Excluded(idx) => *idx + 1,
355 ops::Bound::Unbounded => 0,
356 };
357
358 let end_idx = match range.end_bound() {
359 ops::Bound::Included(idx) if idx.checked_add(1).map_or(true, |idx| idx > size) => {
360 return Err(glib::bool_error!("Invalid range end"));
361 }
362 ops::Bound::Included(idx) => *idx + 1,
363 ops::Bound::Excluded(idx) if *idx > size => {
364 return Err(glib::bool_error!("Invalid range end"));
365 }
366 ops::Bound::Excluded(idx) => *idx,
367 ops::Bound::Unbounded => size,
368 };
369
370 Ok((start_idx, end_idx - start_idx))
371 }
372
373 #[doc(alias = "gst_buffer_copy_region")]
374 pub fn copy_region(
375 &self,
376 flags: crate::BufferCopyFlags,
377 range: impl RangeBounds<usize>,
378 ) -> Result<Buffer, glib::BoolError> {
379 let (offset, size) = self.byte_range_into_offset_len(range)?;
380
381 unsafe {
382 Option::<_>::from_glib_full(ffi::gst_buffer_copy_region(
383 self.as_mut_ptr(),
384 flags.into_glib(),
385 offset,
386 size,
387 ))
388 .ok_or_else(|| glib::bool_error!("Failed to copy region of buffer"))
389 }
390 }
391
392 #[doc(alias = "gst_buffer_copy_into")]
393 pub fn copy_into(
394 &self,
395 dest: &mut BufferRef,
396 flags: crate::BufferCopyFlags,
397 range: impl RangeBounds<usize>,
398 ) -> Result<(), glib::BoolError> {
399 let (offset, size) = self.byte_range_into_offset_len(range)?;
400
401 unsafe {
402 glib::result_from_gboolean!(
403 ffi::gst_buffer_copy_into(
404 dest.as_mut_ptr(),
405 self.as_mut_ptr(),
406 flags.into_glib(),
407 offset,
408 size,
409 ),
410 "Failed to copy into destination buffer",
411 )
412 }
413 }
414
415 #[doc(alias = "gst_buffer_fill")]
416 pub fn copy_from_slice(&mut self, offset: usize, slice: &[u8]) -> Result<(), usize> {
417 let maxsize = self.maxsize();
418 let size = slice.len();
419
420 assert!(maxsize >= offset && maxsize - offset >= size);
421
422 let copied = unsafe {
423 let src = slice.as_ptr();
424 ffi::gst_buffer_fill(
425 self.as_mut_ptr(),
426 offset,
427 src as glib::ffi::gconstpointer,
428 size,
429 )
430 };
431
432 if copied == size {
433 Ok(())
434 } else {
435 Err(copied)
436 }
437 }
438
439 #[doc(alias = "gst_buffer_extract")]
440 pub fn copy_to_slice(&self, offset: usize, slice: &mut [u8]) -> Result<(), usize> {
441 let maxsize = self.size();
442 let size = slice.len();
443
444 assert!(maxsize >= offset && maxsize - offset >= size);
445
446 let copied = unsafe {
447 let dest = slice.as_mut_ptr();
448 ffi::gst_buffer_extract(self.as_mut_ptr(), offset, dest as glib::ffi::gpointer, size)
449 };
450
451 if copied == size {
452 Ok(())
453 } else {
454 Err(copied)
455 }
456 }
457
458 #[doc(alias = "gst_buffer_copy_deep")]
459 pub fn copy_deep(&self) -> Result<Buffer, glib::BoolError> {
460 unsafe {
461 Option::<_>::from_glib_full(ffi::gst_buffer_copy_deep(self.as_ptr()))
462 .ok_or_else(|| glib::bool_error!("Failed to deep copy buffer"))
463 }
464 }
465
466 #[doc(alias = "get_size")]
467 #[doc(alias = "gst_buffer_get_size")]
468 pub fn size(&self) -> usize {
469 unsafe { ffi::gst_buffer_get_size(self.as_mut_ptr()) }
470 }
471
472 #[doc(alias = "get_maxsize")]
473 pub fn maxsize(&self) -> usize {
474 unsafe {
475 let mut maxsize = mem::MaybeUninit::uninit();
476 ffi::gst_buffer_get_sizes_range(
477 self.as_mut_ptr(),
478 0,
479 -1,
480 ptr::null_mut(),
481 maxsize.as_mut_ptr(),
482 );
483
484 maxsize.assume_init()
485 }
486 }
487
488 #[doc(alias = "gst_buffer_set_size")]
489 pub fn set_size(&mut self, size: usize) {
490 assert!(self.maxsize() >= size);
491
492 unsafe {
493 ffi::gst_buffer_set_size(self.as_mut_ptr(), size as isize);
494 }
495 }
496
497 #[doc(alias = "get_offset")]
498 #[doc(alias = "GST_BUFFER_OFFSET")]
499 #[inline]
500 pub fn offset(&self) -> u64 {
501 self.0.offset
502 }
503
504 #[inline]
505 pub fn set_offset(&mut self, offset: u64) {
506 self.0.offset = offset;
507 }
508
509 #[doc(alias = "get_offset_end")]
510 #[doc(alias = "GST_BUFFER_OFFSET_END")]
511 #[inline]
512 pub fn offset_end(&self) -> u64 {
513 self.0.offset_end
514 }
515
516 #[inline]
517 pub fn set_offset_end(&mut self, offset_end: u64) {
518 self.0.offset_end = offset_end;
519 }
520
521 #[doc(alias = "get_pts")]
522 #[doc(alias = "GST_BUFFER_PTS")]
523 #[inline]
524 pub fn pts(&self) -> Option<ClockTime> {
525 unsafe { from_glib(self.0.pts) }
526 }
527
528 #[inline]
529 pub fn set_pts(&mut self, pts: impl Into<Option<ClockTime>>) {
530 self.0.pts = pts.into().into_glib();
531 }
532
533 #[doc(alias = "get_dts")]
534 #[doc(alias = "GST_BUFFER_DTS")]
535 #[inline]
536 pub fn dts(&self) -> Option<ClockTime> {
537 unsafe { from_glib(self.0.dts) }
538 }
539
540 #[inline]
541 pub fn set_dts(&mut self, dts: impl Into<Option<ClockTime>>) {
542 self.0.dts = dts.into().into_glib();
543 }
544
545 #[doc(alias = "get_dts_or_pts")]
546 #[doc(alias = "GST_BUFFER_DTS_OR_PTS")]
547 #[inline]
548 pub fn dts_or_pts(&self) -> Option<ClockTime> {
549 let val = self.dts();
550 if val.is_none() {
551 self.pts()
552 } else {
553 val
554 }
555 }
556
557 #[doc(alias = "get_duration")]
558 #[doc(alias = "GST_BUFFER_DURATION")]
559 #[inline]
560 pub fn duration(&self) -> Option<ClockTime> {
561 unsafe { from_glib(self.0.duration) }
562 }
563
564 #[inline]
565 pub fn set_duration(&mut self, duration: impl Into<Option<ClockTime>>) {
566 self.0.duration = duration.into().into_glib();
567 }
568
569 #[doc(alias = "get_flags")]
570 #[doc(alias = "GST_BUFFER_FLAGS")]
571 #[inline]
572 pub fn flags(&self) -> BufferFlags {
573 BufferFlags::from_bits_truncate(self.0.mini_object.flags)
574 }
575
576 #[doc(alias = "GST_BUFFER_FLAG_SET")]
577 #[inline]
578 pub fn set_flags(&mut self, flags: BufferFlags) {
579 self.0.mini_object.flags |= flags.bits();
580 }
581
582 #[doc(alias = "GST_BUFFER_FLAG_UNSET")]
583 #[inline]
584 pub fn unset_flags(&mut self, flags: BufferFlags) {
585 self.0.mini_object.flags &= !flags.bits();
586 }
587
588 #[doc(alias = "get_meta")]
589 #[doc(alias = "gst_buffer_get_meta")]
590 #[inline]
591 pub fn meta<T: MetaAPI>(&self) -> Option<MetaRef<T>> {
592 unsafe {
593 let meta = ffi::gst_buffer_get_meta(self.as_mut_ptr(), T::meta_api().into_glib());
594 if meta.is_null() {
595 None
596 } else {
597 Some(T::from_ptr(self, meta as *const <T as MetaAPI>::GstType))
598 }
599 }
600 }
601
602 #[doc(alias = "get_meta_mut")]
603 #[inline]
604 pub fn meta_mut<T: MetaAPI>(&mut self) -> Option<MetaRefMut<T, crate::meta::Standalone>> {
605 unsafe {
606 let meta = ffi::gst_buffer_get_meta(self.as_mut_ptr(), T::meta_api().into_glib());
607 if meta.is_null() {
608 None
609 } else {
610 Some(T::from_mut_ptr(self, meta as *mut <T as MetaAPI>::GstType))
611 }
612 }
613 }
614
615 pub fn iter_meta<T: MetaAPI>(&self) -> MetaIter<T> {
616 MetaIter::new(self)
617 }
618
619 pub fn iter_meta_mut<T: MetaAPI>(&mut self) -> MetaIterMut<T> {
620 MetaIterMut::new(self)
621 }
622
623 #[doc(alias = "gst_buffer_foreach_meta")]
624 pub fn foreach_meta<F: FnMut(MetaRef<Meta>) -> ControlFlow<(), ()>>(&self, func: F) -> bool {
625 unsafe extern "C" fn trampoline<F: FnMut(MetaRef<Meta>) -> ControlFlow<(), ()>>(
626 buffer: *mut ffi::GstBuffer,
627 meta: *mut *mut ffi::GstMeta,
628 user_data: glib::ffi::gpointer,
629 ) -> glib::ffi::gboolean {
630 let func = user_data as *mut F;
631 let res = (*func)(Meta::from_ptr(BufferRef::from_ptr(buffer), *meta));
632
633 matches!(res, ControlFlow::Continue(_)).into_glib()
634 }
635
636 unsafe {
637 let mut func = func;
638 let func_ptr: &mut F = &mut func;
639
640 from_glib(ffi::gst_buffer_foreach_meta(
641 mut_override(self.as_ptr()),
642 Some(trampoline::<F>),
643 func_ptr as *mut _ as *mut _,
644 ))
645 }
646 }
647
648 #[doc(alias = "gst_buffer_foreach_meta")]
649 pub fn foreach_meta_mut<
650 F: FnMut(
651 MetaRefMut<Meta, crate::meta::Iterated>,
652 ) -> ControlFlow<BufferMetaForeachAction, BufferMetaForeachAction>,
653 >(
654 &mut self,
655 func: F,
656 ) -> bool {
657 unsafe extern "C" fn trampoline<
658 F: FnMut(
659 MetaRefMut<Meta, crate::meta::Iterated>,
660 ) -> ControlFlow<BufferMetaForeachAction, BufferMetaForeachAction>,
661 >(
662 buffer: *mut ffi::GstBuffer,
663 meta: *mut *mut ffi::GstMeta,
664 user_data: glib::ffi::gpointer,
665 ) -> glib::ffi::gboolean {
666 let func = user_data as *mut F;
667 let res = (*func)(Meta::from_mut_ptr(BufferRef::from_mut_ptr(buffer), *meta));
668
669 let (cont, action) = match res {
670 ControlFlow::Continue(action) => (true, action),
671 ControlFlow::Break(action) => (false, action),
672 };
673
674 if action == BufferMetaForeachAction::Remove {
675 *meta = ptr::null_mut();
676 }
677
678 cont.into_glib()
679 }
680
681 unsafe {
682 let mut func = func;
683 let func_ptr: &mut F = &mut func;
684
685 from_glib(ffi::gst_buffer_foreach_meta(
686 mut_override(self.as_ptr()),
687 Some(trampoline::<F>),
688 func_ptr as *mut _ as *mut _,
689 ))
690 }
691 }
692
693 #[doc(alias = "gst_buffer_append_memory")]
694 pub fn append_memory(&mut self, mem: Memory) {
695 unsafe { ffi::gst_buffer_append_memory(self.as_mut_ptr(), mem.into_glib_ptr()) }
696 }
697
698 #[doc(alias = "gst_buffer_find_memory")]
699 pub fn find_memory(&self, range: impl RangeBounds<usize>) -> Option<(Range<usize>, usize)> {
700 let (offset, size) = self.byte_range_into_offset_len(range).ok()?;
701
702 unsafe {
703 let mut idx = mem::MaybeUninit::uninit();
704 let mut length = mem::MaybeUninit::uninit();
705 let mut skip = mem::MaybeUninit::uninit();
706
707 let res = from_glib(ffi::gst_buffer_find_memory(
708 self.as_mut_ptr(),
709 offset,
710 size,
711 idx.as_mut_ptr(),
712 length.as_mut_ptr(),
713 skip.as_mut_ptr(),
714 ));
715
716 if res {
717 let idx = idx.assume_init() as usize;
718 let length = length.assume_init() as usize;
719 let skip = skip.assume_init();
720 Some((idx..(idx + length), skip))
721 } else {
722 None
723 }
724 }
725 }
726
727 #[doc(alias = "get_all_memory")]
728 #[doc(alias = "gst_buffer_get_all_memory")]
729 pub fn all_memory(&self) -> Option<Memory> {
730 unsafe { from_glib_full(ffi::gst_buffer_get_all_memory(self.as_mut_ptr())) }
731 }
732
733 #[doc(alias = "get_max_memory")]
734 #[doc(alias = "gst_buffer_get_max_memory")]
735 pub fn max_memory() -> usize {
736 unsafe { ffi::gst_buffer_get_max_memory() as usize }
737 }
738
739 #[doc(alias = "get_memory")]
740 #[doc(alias = "gst_buffer_get_memory")]
741 pub fn memory(&self, idx: usize) -> Option<Memory> {
742 if idx >= self.n_memory() {
743 return None;
744 }
745 unsafe {
746 let res = ffi::gst_buffer_get_memory(self.as_mut_ptr(), idx as u32);
747 Some(from_glib_full(res))
748 }
749 }
750
751 #[doc(alias = "get_memory_range")]
752 #[doc(alias = "gst_buffer_get_memory_range")]
753 pub fn memory_range(&self, range: impl RangeBounds<usize>) -> Option<Memory> {
754 let (idx, len) = self.memory_range_into_idx_len(range).ok()?;
755
756 unsafe {
757 let res = ffi::gst_buffer_get_memory_range(self.as_mut_ptr(), idx, len);
758 from_glib_full(res)
759 }
760 }
761
762 #[doc(alias = "gst_buffer_insert_memory")]
763 pub fn insert_memory(&mut self, idx: impl Into<Option<usize>>, mem: Memory) {
764 let n_memory = self.n_memory();
765 let idx = idx.into();
766 let idx = idx.unwrap_or(n_memory);
767 assert!(idx <= self.n_memory());
768 unsafe { ffi::gst_buffer_insert_memory(self.as_mut_ptr(), idx as i32, mem.into_glib_ptr()) }
769 }
770
771 #[doc(alias = "gst_buffer_is_all_memory_writable")]
772 pub fn is_all_memory_writable(&self) -> bool {
773 unsafe { from_glib(ffi::gst_buffer_is_all_memory_writable(self.as_mut_ptr())) }
774 }
775
776 #[doc(alias = "gst_buffer_is_memory_range_writable")]
777 pub fn is_memory_range_writable(&self, range: impl RangeBounds<usize>) -> bool {
778 let Some((idx, len)) = self.memory_range_into_idx_len(range).ok() else {
779 return false;
780 };
781
782 unsafe {
783 from_glib(ffi::gst_buffer_is_memory_range_writable(
784 self.as_mut_ptr(),
785 idx,
786 len,
787 ))
788 }
789 }
790
791 #[doc(alias = "gst_buffer_n_memory")]
792 pub fn n_memory(&self) -> usize {
793 unsafe { ffi::gst_buffer_n_memory(self.as_ptr() as *mut _) as usize }
794 }
795
796 #[doc(alias = "gst_buffer_peek_memory")]
797 pub fn peek_memory(&self, idx: usize) -> &MemoryRef {
798 assert!(idx < self.n_memory());
799 unsafe { MemoryRef::from_ptr(ffi::gst_buffer_peek_memory(self.as_mut_ptr(), idx as u32)) }
800 }
801
802 #[doc(alias = "gst_buffer_peek_memory")]
803 pub fn peek_memory_mut(&mut self, idx: usize) -> Result<&mut MemoryRef, glib::BoolError> {
804 assert!(idx < self.n_memory());
805 unsafe {
806 let mem = ffi::gst_buffer_peek_memory(self.as_mut_ptr(), idx as u32);
807 if ffi::gst_mini_object_is_writable(mem as *mut _) == glib::ffi::GFALSE {
808 Err(glib::bool_error!("Memory not writable"))
809 } else {
810 Ok(MemoryRef::from_mut_ptr(mem))
811 }
812 }
813 }
814
815 #[doc(alias = "gst_buffer_prepend_memory")]
816 pub fn prepend_memory(&mut self, mem: Memory) {
817 unsafe { ffi::gst_buffer_prepend_memory(self.as_mut_ptr(), mem.into_glib_ptr()) }
818 }
819
820 #[doc(alias = "gst_buffer_remove_all_memory")]
821 pub fn remove_all_memory(&mut self) {
822 unsafe { ffi::gst_buffer_remove_all_memory(self.as_mut_ptr()) }
823 }
824
825 #[doc(alias = "gst_buffer_remove_memory")]
826 pub fn remove_memory(&mut self, idx: usize) {
827 assert!(idx < self.n_memory());
828 unsafe { ffi::gst_buffer_remove_memory(self.as_mut_ptr(), idx as u32) }
829 }
830
831 #[doc(alias = "gst_buffer_remove_memory_range")]
832 pub fn remove_memory_range(&mut self, range: impl RangeBounds<usize>) {
833 let (idx, len) = self
834 .memory_range_into_idx_len(range)
835 .expect("Invalid memory range");
836
837 unsafe { ffi::gst_buffer_remove_memory_range(self.as_mut_ptr(), idx, len) }
838 }
839
840 #[doc(alias = "gst_buffer_replace_all_memory")]
841 pub fn replace_all_memory(&mut self, mem: Memory) {
842 unsafe { ffi::gst_buffer_replace_all_memory(self.as_mut_ptr(), mem.into_glib_ptr()) }
843 }
844
845 #[doc(alias = "gst_buffer_replace_memory")]
846 pub fn replace_memory(&mut self, idx: usize, mem: Memory) {
847 assert!(idx < self.n_memory());
848 unsafe {
849 ffi::gst_buffer_replace_memory(self.as_mut_ptr(), idx as u32, mem.into_glib_ptr())
850 }
851 }
852
853 #[doc(alias = "gst_buffer_replace_memory_range")]
854 pub fn replace_memory_range(&mut self, range: impl RangeBounds<usize>, mem: Memory) {
855 let (idx, len) = self
856 .memory_range_into_idx_len(range)
857 .expect("Invalid memory range");
858
859 unsafe {
860 ffi::gst_buffer_replace_memory_range(self.as_mut_ptr(), idx, len, mem.into_glib_ptr())
861 }
862 }
863
864 pub fn iter_memories(&self) -> Iter {
865 Iter::new(self)
866 }
867
868 pub fn iter_memories_mut(&mut self) -> Result<IterMut, glib::BoolError> {
869 if !self.is_all_memory_writable() {
870 Err(glib::bool_error!("Not all memory are writable"))
871 } else {
872 Ok(IterMut::new(self))
873 }
874 }
875
876 pub fn iter_memories_owned(&self) -> IterOwned {
877 IterOwned::new(self)
878 }
879
880 pub fn as_cursor_readable(&self) -> BufferRefCursor<&BufferRef> {
881 BufferRefCursor::new_readable(self)
882 }
883
884 pub fn as_cursor_writable(
885 &mut self,
886 ) -> Result<BufferRefCursor<&mut BufferRef>, glib::BoolError> {
887 BufferRefCursor::new_writable(self)
888 }
889
890 #[doc(alias = "gst_util_dump_buffer")]
891 pub fn dump(&self) -> Dump {
892 Dump {
893 buffer: self,
894 start: Bound::Unbounded,
895 end: Bound::Unbounded,
896 }
897 }
898
899 #[doc(alias = "gst_util_dump_buffer")]
900 pub fn dump_range(&self, range: impl RangeBounds<usize>) -> Dump {
901 Dump {
902 buffer: self,
903 start: range.start_bound().cloned(),
904 end: range.end_bound().cloned(),
905 }
906 }
907}
908
909macro_rules! define_meta_iter(
910 ($name:ident, $typ:ty, $mtyp:ty, $prepare_buffer:expr, $from_ptr:expr) => {
911 pub struct $name<'a, T: MetaAPI + 'a> {
912 buffer: $typ,
913 state: glib::ffi::gpointer,
914 meta_api: glib::Type,
915 items: PhantomData<$mtyp>,
916 }
917
918 unsafe impl<'a, T: MetaAPI> Send for $name<'a, T> { }
919 unsafe impl<'a, T: MetaAPI> Sync for $name<'a, T> { }
920
921 impl<'a, T: MetaAPI> fmt::Debug for $name<'a, T> {
922 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
923 f.debug_struct(stringify!($name))
924 .field("buffer", &self.buffer)
925 .field("state", &self.state)
926 .field("meta_api", &self.meta_api)
927 .field("items", &self.items)
928 .finish()
929 }
930 }
931
932 impl<'a, T: MetaAPI> $name<'a, T> {
933 fn new(buffer: $typ) -> $name<'a, T> {
934 skip_assert_initialized!();
935
936 $name {
937 buffer,
938 state: ptr::null_mut(),
939 meta_api: T::meta_api(),
940 items: PhantomData,
941 }
942 }
943 }
944
945 #[allow(clippy::redundant_closure_call)]
946 impl<'a, T: MetaAPI> Iterator for $name<'a, T> {
947 type Item = $mtyp;
948
949 fn next(&mut self) -> Option<Self::Item> {
950 loop {
951 unsafe {
952 let meta = ffi::gst_buffer_iterate_meta(self.buffer.as_mut_ptr(), &mut self.state);
953
954 if meta.is_null() {
955 return None;
956 } else if self.meta_api == glib::Type::INVALID || glib::Type::from_glib((*(*meta).info).api) == self.meta_api {
957 let buffer = $prepare_buffer(self.buffer.as_mut_ptr());
959 let item = $from_ptr(buffer, meta);
960 return Some(item);
961 }
962 }
963 }
964 }
965 }
966
967 impl<'a, T: MetaAPI> std::iter::FusedIterator for $name<'a, T> { }
968 }
969);
970
971define_meta_iter!(
972 MetaIter,
973 &'a BufferRef,
974 MetaRef<'a, T>,
975 |buffer: *const ffi::GstBuffer| BufferRef::from_ptr(buffer),
976 |buffer, meta| T::from_ptr(buffer, meta as *const <T as MetaAPI>::GstType)
977);
978define_meta_iter!(
979 MetaIterMut,
980 &'a mut BufferRef,
981 MetaRefMut<'a, T, crate::meta::Iterated>,
982 |buffer: *mut ffi::GstBuffer| BufferRef::from_mut_ptr(buffer),
983 |buffer: &'a mut BufferRef, meta| T::from_mut_ptr(buffer, meta as *mut <T as MetaAPI>::GstType)
984);
985
986macro_rules! define_iter(
987 ($name:ident, $typ:ty, $mtyp:ty, $get_item:expr) => {
988 pub struct $name<'a> {
989 buffer: $typ,
990 idx: usize,
991 n_memory: usize,
992 }
993
994 impl<'a> fmt::Debug for $name<'a> {
995 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
996 f.debug_struct(stringify!($name))
997 .field("buffer", &self.buffer)
998 .field("idx", &self.idx)
999 .field("n_memory", &self.n_memory)
1000 .finish()
1001 }
1002 }
1003
1004 impl<'a> $name<'a> {
1005 fn new(buffer: $typ) -> $name<'a> {
1006 skip_assert_initialized!();
1007
1008 let n_memory = buffer.n_memory();
1009
1010 $name {
1011 buffer,
1012 idx: 0,
1013 n_memory,
1014 }
1015 }
1016 }
1017
1018 #[allow(clippy::redundant_closure_call)]
1019 impl<'a> Iterator for $name<'a> {
1020 type Item = $mtyp;
1021
1022 fn next(&mut self) -> Option<Self::Item> {
1023 if self.idx >= self.n_memory {
1024 return None;
1025 }
1026
1027 #[allow(unused_unsafe)]
1028 unsafe {
1029 let item = $get_item(self.buffer, self.idx).unwrap();
1030 self.idx += 1;
1031 Some(item)
1032 }
1033 }
1034
1035 fn size_hint(&self) -> (usize, Option<usize>) {
1036 let remaining = self.n_memory - self.idx;
1037
1038 (remaining, Some(remaining))
1039 }
1040
1041 fn count(self) -> usize {
1042 self.n_memory - self.idx
1043 }
1044
1045 fn nth(&mut self, n: usize) -> Option<Self::Item> {
1046 let (end, overflow) = self.idx.overflowing_add(n);
1047 if end >= self.n_memory || overflow {
1048 self.idx = self.n_memory;
1049 None
1050 } else {
1051 #[allow(unused_unsafe)]
1052 unsafe {
1053 self.idx = end + 1;
1054 Some($get_item(self.buffer, end).unwrap())
1055 }
1056 }
1057 }
1058
1059 fn last(self) -> Option<Self::Item> {
1060 if self.idx == self.n_memory {
1061 None
1062 } else {
1063 #[allow(unused_unsafe)]
1064 unsafe {
1065 Some($get_item(self.buffer, self.n_memory - 1).unwrap())
1066 }
1067 }
1068 }
1069 }
1070
1071 #[allow(clippy::redundant_closure_call)]
1072 impl<'a> DoubleEndedIterator for $name<'a> {
1073 fn next_back(&mut self) -> Option<Self::Item> {
1074 if self.idx == self.n_memory {
1075 return None;
1076 }
1077
1078 #[allow(unused_unsafe)]
1079 unsafe {
1080 self.n_memory -= 1;
1081 Some($get_item(self.buffer, self.n_memory).unwrap())
1082 }
1083 }
1084
1085 fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
1086 let (end, overflow) = self.n_memory.overflowing_sub(n);
1087 if end <= self.idx || overflow {
1088 self.idx = self.n_memory;
1089 None
1090 } else {
1091 #[allow(unused_unsafe)]
1092 unsafe {
1093 self.n_memory = end - 1;
1094 Some($get_item(self.buffer, self.n_memory).unwrap())
1095 }
1096 }
1097 }
1098 }
1099
1100 impl<'a> ExactSizeIterator for $name<'a> {}
1101
1102 impl<'a> std::iter::FusedIterator for $name<'a> {}
1103 }
1104);
1105
1106define_iter!(
1107 Iter,
1108 &'a BufferRef,
1109 &'a MemoryRef,
1110 |buffer: &BufferRef, idx| {
1111 let ptr = ffi::gst_buffer_peek_memory(buffer.as_mut_ptr(), idx as u32);
1112 if ptr.is_null() {
1113 None
1114 } else {
1115 Some(MemoryRef::from_ptr(ptr as *const ffi::GstMemory))
1116 }
1117 }
1118);
1119
1120define_iter!(
1121 IterMut,
1122 &'a mut BufferRef,
1123 &'a mut MemoryRef,
1124 |buffer: &mut BufferRef, idx| {
1125 let ptr = ffi::gst_buffer_peek_memory(buffer.as_mut_ptr(), idx as u32);
1126 if ptr.is_null() {
1127 None
1128 } else {
1129 Some(MemoryRef::from_mut_ptr(ptr))
1130 }
1131 }
1132);
1133
1134impl<'a> IntoIterator for &'a BufferRef {
1135 type IntoIter = Iter<'a>;
1136 type Item = &'a MemoryRef;
1137
1138 fn into_iter(self) -> Self::IntoIter {
1139 self.iter_memories()
1140 }
1141}
1142
1143impl From<Memory> for Buffer {
1144 fn from(value: Memory) -> Self {
1145 skip_assert_initialized!();
1146
1147 let mut buffer = Buffer::new();
1148 {
1149 let buffer = buffer.get_mut().unwrap();
1150 buffer.append_memory(value);
1151 }
1152 buffer
1153 }
1154}
1155
1156impl<const N: usize> From<[Memory; N]> for Buffer {
1157 fn from(value: [Memory; N]) -> Self {
1158 skip_assert_initialized!();
1159
1160 let mut buffer = Buffer::new();
1161 {
1162 let buffer = buffer.get_mut().unwrap();
1163 value.into_iter().for_each(|b| buffer.append_memory(b));
1164 }
1165 buffer
1166 }
1167}
1168
1169impl std::iter::FromIterator<Memory> for Buffer {
1170 fn from_iter<T: IntoIterator<Item = Memory>>(iter: T) -> Self {
1171 skip_assert_initialized!();
1172 let iter = iter.into_iter();
1173
1174 let mut buffer = Buffer::new();
1175
1176 {
1177 let buffer = buffer.get_mut().unwrap();
1178 iter.for_each(|m| buffer.append_memory(m));
1179 }
1180
1181 buffer
1182 }
1183}
1184
1185impl std::iter::Extend<Memory> for BufferRef {
1186 fn extend<T: IntoIterator<Item = Memory>>(&mut self, iter: T) {
1187 iter.into_iter().for_each(|m| self.append_memory(m));
1188 }
1189}
1190
1191define_iter!(
1192 IterOwned,
1193 &'a BufferRef,
1194 Memory,
1195 |buffer: &BufferRef, idx| { buffer.memory(idx) }
1196);
1197
1198impl fmt::Debug for Buffer {
1199 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1200 BufferRef::fmt(self, f)
1201 }
1202}
1203
1204impl PartialEq for Buffer {
1205 fn eq(&self, other: &Buffer) -> bool {
1206 BufferRef::eq(self, other)
1207 }
1208}
1209
1210impl Eq for Buffer {}
1211
1212impl PartialEq<BufferRef> for Buffer {
1213 fn eq(&self, other: &BufferRef) -> bool {
1214 BufferRef::eq(self, other)
1215 }
1216}
1217impl PartialEq<Buffer> for BufferRef {
1218 fn eq(&self, other: &Buffer) -> bool {
1219 BufferRef::eq(other, self)
1220 }
1221}
1222
1223impl fmt::Debug for BufferRef {
1224 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1225 use std::cell::RefCell;
1226
1227 use crate::utils::Displayable;
1228
1229 struct DebugIter<I>(RefCell<I>);
1230 impl<I: Iterator> fmt::Debug for DebugIter<I>
1231 where
1232 I::Item: fmt::Debug,
1233 {
1234 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1235 f.debug_list().entries(&mut *self.0.borrow_mut()).finish()
1236 }
1237 }
1238
1239 f.debug_struct("Buffer")
1240 .field("ptr", &self.as_ptr())
1241 .field("pts", &self.pts().display())
1242 .field("dts", &self.dts().display())
1243 .field("duration", &self.duration().display())
1244 .field("size", &self.size())
1245 .field("offset", &self.offset())
1246 .field("offset_end", &self.offset_end())
1247 .field("flags", &self.flags())
1248 .field(
1249 "metas",
1250 &DebugIter(RefCell::new(
1251 self.iter_meta::<crate::Meta>().map(|m| m.api()),
1252 )),
1253 )
1254 .finish()
1255 }
1256}
1257
1258impl PartialEq for BufferRef {
1259 fn eq(&self, other: &BufferRef) -> bool {
1260 if self.size() != other.size() {
1261 return false;
1262 }
1263
1264 let self_map = self.map_readable();
1265 let other_map = other.map_readable();
1266
1267 match (self_map, other_map) {
1268 (Ok(self_map), Ok(other_map)) => self_map.as_slice().eq(other_map.as_slice()),
1269 _ => false,
1270 }
1271 }
1272}
1273
1274impl Eq for BufferRef {}
1275
1276impl<T> BufferMap<'_, T> {
1277 #[doc(alias = "get_size")]
1278 #[inline]
1279 pub fn size(&self) -> usize {
1280 self.map_info.size
1281 }
1282
1283 #[doc(alias = "get_buffer")]
1284 #[inline]
1285 pub fn buffer(&self) -> &BufferRef {
1286 self.buffer
1287 }
1288
1289 #[inline]
1290 pub fn as_slice(&self) -> &[u8] {
1291 if self.map_info.size == 0 {
1292 return &[];
1293 }
1294 unsafe { slice::from_raw_parts(self.map_info.data, self.map_info.size) }
1295 }
1296}
1297
1298impl BufferMap<'_, Writable> {
1299 #[inline]
1300 pub fn as_mut_slice(&mut self) -> &mut [u8] {
1301 if self.map_info.size == 0 {
1302 return &mut [];
1303 }
1304 unsafe { slice::from_raw_parts_mut(self.map_info.data, self.map_info.size) }
1305 }
1306}
1307
1308impl<T> AsRef<[u8]> for BufferMap<'_, T> {
1309 #[inline]
1310 fn as_ref(&self) -> &[u8] {
1311 self.as_slice()
1312 }
1313}
1314
1315impl AsMut<[u8]> for BufferMap<'_, Writable> {
1316 #[inline]
1317 fn as_mut(&mut self) -> &mut [u8] {
1318 self.as_mut_slice()
1319 }
1320}
1321
1322impl<T> ops::Deref for BufferMap<'_, T> {
1323 type Target = [u8];
1324
1325 #[inline]
1326 fn deref(&self) -> &[u8] {
1327 self.as_slice()
1328 }
1329}
1330
1331impl ops::DerefMut for BufferMap<'_, Writable> {
1332 #[inline]
1333 fn deref_mut(&mut self) -> &mut [u8] {
1334 self.as_mut_slice()
1335 }
1336}
1337
1338impl<T> fmt::Debug for BufferMap<'_, T> {
1339 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1340 f.debug_tuple("BufferMap").field(&self.buffer()).finish()
1341 }
1342}
1343
1344impl<'a, T> PartialEq for BufferMap<'a, T> {
1345 fn eq(&self, other: &BufferMap<'a, T>) -> bool {
1346 self.as_slice().eq(other.as_slice())
1347 }
1348}
1349
1350impl<T> Eq for BufferMap<'_, T> {}
1351
1352impl<T> Drop for BufferMap<'_, T> {
1353 #[inline]
1354 fn drop(&mut self) {
1355 unsafe {
1356 ffi::gst_buffer_unmap(self.buffer.as_mut_ptr(), &mut self.map_info);
1357 }
1358 }
1359}
1360
1361unsafe impl<T> Send for BufferMap<'_, T> {}
1362unsafe impl<T> Sync for BufferMap<'_, T> {}
1363
1364impl<T> MappedBuffer<T> {
1365 #[inline]
1366 pub fn as_slice(&self) -> &[u8] {
1367 if self.map_info.size == 0 {
1368 return &[];
1369 }
1370 unsafe { slice::from_raw_parts(self.map_info.data, self.map_info.size) }
1371 }
1372
1373 #[doc(alias = "get_size")]
1374 #[inline]
1375 pub fn size(&self) -> usize {
1376 self.map_info.size
1377 }
1378
1379 #[doc(alias = "get_buffer")]
1380 #[inline]
1381 pub fn buffer(&self) -> &BufferRef {
1382 self.buffer.as_ref()
1383 }
1384
1385 #[inline]
1386 pub fn into_buffer(self) -> Buffer {
1387 let mut s = mem::ManuallyDrop::new(self);
1388 let buffer = unsafe { ptr::read(&s.buffer) };
1389 unsafe {
1390 ffi::gst_buffer_unmap(buffer.as_mut_ptr(), &mut s.map_info);
1391 }
1392
1393 buffer
1394 }
1395}
1396
1397impl MappedBuffer<Readable> {
1398 #[doc(alias = "get_buffer")]
1399 #[inline]
1400 pub fn buffer_owned(&self) -> Buffer {
1401 self.buffer.clone()
1402 }
1403}
1404
1405impl MappedBuffer<Writable> {
1406 #[inline]
1407 pub fn as_mut_slice(&mut self) -> &mut [u8] {
1408 if self.map_info.size == 0 {
1409 return &mut [];
1410 }
1411 unsafe { slice::from_raw_parts_mut(self.map_info.data, self.map_info.size) }
1412 }
1413}
1414
1415impl<T> AsRef<[u8]> for MappedBuffer<T> {
1416 #[inline]
1417 fn as_ref(&self) -> &[u8] {
1418 self.as_slice()
1419 }
1420}
1421
1422impl AsMut<[u8]> for MappedBuffer<Writable> {
1423 #[inline]
1424 fn as_mut(&mut self) -> &mut [u8] {
1425 self.as_mut_slice()
1426 }
1427}
1428
1429impl<T> ops::Deref for MappedBuffer<T> {
1430 type Target = [u8];
1431
1432 #[inline]
1433 fn deref(&self) -> &[u8] {
1434 self.as_slice()
1435 }
1436}
1437
1438impl ops::DerefMut for MappedBuffer<Writable> {
1439 #[inline]
1440 fn deref_mut(&mut self) -> &mut [u8] {
1441 self.as_mut_slice()
1442 }
1443}
1444
1445impl<T> Drop for MappedBuffer<T> {
1446 #[inline]
1447 fn drop(&mut self) {
1448 unsafe {
1449 ffi::gst_buffer_unmap(self.buffer.as_mut_ptr(), &mut self.map_info);
1450 }
1451 }
1452}
1453
1454impl<T> fmt::Debug for MappedBuffer<T> {
1455 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1456 f.debug_tuple("MappedBuffer").field(&self.buffer()).finish()
1457 }
1458}
1459
1460impl<T> PartialEq for MappedBuffer<T> {
1461 fn eq(&self, other: &MappedBuffer<T>) -> bool {
1462 self.as_slice().eq(other.as_slice())
1463 }
1464}
1465
1466impl<T> Eq for MappedBuffer<T> {}
1467
1468unsafe impl<T> Send for MappedBuffer<T> {}
1469unsafe impl<T> Sync for MappedBuffer<T> {}
1470
1471#[doc(alias = "GST_BUFFER_COPY_METADATA")]
1472pub const BUFFER_COPY_METADATA: crate::BufferCopyFlags =
1473 crate::BufferCopyFlags::from_bits_truncate(ffi::GST_BUFFER_COPY_METADATA);
1474#[doc(alias = "GST_BUFFER_COPY_ALL")]
1475pub const BUFFER_COPY_ALL: crate::BufferCopyFlags =
1476 crate::BufferCopyFlags::from_bits_truncate(ffi::GST_BUFFER_COPY_ALL);
1477
1478pub struct Dump<'a> {
1479 buffer: &'a BufferRef,
1480 start: Bound<usize>,
1481 end: Bound<usize>,
1482}
1483
1484struct BufferChunked16Iter<'a> {
1485 buffer: &'a BufferRef,
1486 mem_idx: usize,
1487 mem_len: usize,
1488 map: Option<crate::memory::MemoryMap<'a, crate::memory::Readable>>,
1489 map_offset: usize,
1490 len: usize,
1491}
1492
1493impl Iterator for BufferChunked16Iter<'_> {
1494 type Item = ([u8; 16], usize);
1496
1497 fn next(&mut self) -> Option<Self::Item> {
1498 if self.mem_idx == self.mem_len || self.len == 0 {
1499 return None;
1500 }
1501
1502 let mut item = [0u8; 16];
1503 let mut data = item.as_mut_slice();
1504
1505 while !data.is_empty() && self.mem_idx < self.mem_len && self.len > 0 {
1506 if self.map.is_none() {
1507 let mem = self.buffer.peek_memory(self.mem_idx);
1508 self.map = Some(mem.map_readable().expect("failed to map memory"));
1509 }
1510
1511 let map = self.map.as_ref().unwrap();
1512 debug_assert!(self.map_offset < map.len());
1513 let copy = cmp::min(cmp::min(map.len() - self.map_offset, data.len()), self.len);
1514 data[..copy].copy_from_slice(&map[self.map_offset..][..copy]);
1515 self.map_offset += copy;
1516 self.len -= copy;
1517 data = &mut data[copy..];
1518
1519 if self.map_offset == map.len() {
1520 self.map = None;
1521 self.map_offset = 0;
1522 self.mem_idx += 1;
1523 }
1524 }
1525
1526 let copied = 16 - data.len();
1527 Some((item, copied))
1528 }
1529}
1530
1531impl Dump<'_> {
1532 fn fmt(&self, f: &mut fmt::Formatter, debug: bool) -> fmt::Result {
1533 let n_memory = self.buffer.n_memory();
1534 if n_memory == 0 {
1535 write!(f, "<empty>")?;
1536 return Ok(());
1537 }
1538
1539 use std::fmt::Write;
1540
1541 let len = self.buffer.size();
1542
1543 let mut start_idx = match self.start {
1546 Bound::Included(idx) if idx >= len => {
1547 write!(f, "<start out of range>")?;
1548 return Ok(());
1549 }
1550 Bound::Excluded(idx) if idx.checked_add(1).map_or(true, |idx| idx >= len) => {
1551 write!(f, "<start out of range>")?;
1552 return Ok(());
1553 }
1554 Bound::Included(idx) => idx,
1555 Bound::Excluded(idx) => idx + 1,
1556 Bound::Unbounded => 0,
1557 };
1558
1559 let end_idx = match self.end {
1560 Bound::Included(idx) if idx.checked_add(1).map_or(true, |idx| idx > len) => {
1561 write!(f, "<end out of range>")?;
1562 return Ok(());
1563 }
1564 Bound::Excluded(idx) if idx > len => {
1565 write!(f, "<end out of range>")?;
1566 return Ok(());
1567 }
1568 Bound::Included(idx) => idx + 1,
1569 Bound::Excluded(idx) => idx,
1570 Bound::Unbounded => len,
1571 };
1572
1573 if start_idx >= end_idx {
1574 write!(f, "<empty range>")?;
1575 return Ok(());
1576 }
1577
1578 let (memory_range, skip) = self
1580 .buffer
1581 .find_memory(start_idx..)
1582 .expect("can't find memory");
1583
1584 let chunks = BufferChunked16Iter {
1585 buffer: self.buffer,
1586 mem_idx: memory_range.start,
1587 mem_len: n_memory,
1588 map: None,
1589 map_offset: skip,
1590 len: end_idx - start_idx,
1591 };
1592
1593 if debug {
1594 for (line, line_len) in chunks {
1595 let line = &line[..line_len];
1596
1597 match end_idx {
1598 0x00_00..=0xff_ff => write!(f, "{:04x}: ", start_idx)?,
1599 0x01_00_00..=0xff_ff_ff => write!(f, "{:06x}: ", start_idx)?,
1600 0x01_00_00_00..=0xff_ff_ff_ff => write!(f, "{:08x}: ", start_idx)?,
1601 _ => write!(f, "{:016x}: ", start_idx)?,
1602 }
1603
1604 for (i, v) in line.iter().enumerate() {
1605 if i > 0 {
1606 write!(f, " {:02x}", v)?;
1607 } else {
1608 write!(f, "{:02x}", v)?;
1609 }
1610 }
1611
1612 for _ in line.len()..16 {
1613 write!(f, " ")?;
1614 }
1615 write!(f, " ")?;
1616
1617 for v in line {
1618 if v.is_ascii() && !v.is_ascii_control() {
1619 f.write_char((*v).into())?;
1620 } else {
1621 f.write_char('.')?;
1622 }
1623 }
1624
1625 start_idx = start_idx.saturating_add(16);
1626 if start_idx < end_idx {
1627 writeln!(f)?;
1628 }
1629 }
1630
1631 Ok(())
1632 } else {
1633 for (line, line_len) in chunks {
1634 let line = &line[..line_len];
1635
1636 for (i, v) in line.iter().enumerate() {
1637 if i > 0 {
1638 write!(f, " {:02x}", v)?;
1639 } else {
1640 write!(f, "{:02x}", v)?;
1641 }
1642 }
1643
1644 start_idx = start_idx.saturating_add(16);
1645 if start_idx < end_idx {
1646 writeln!(f)?;
1647 }
1648 }
1649
1650 Ok(())
1651 }
1652 }
1653}
1654
1655impl fmt::Display for Dump<'_> {
1656 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1657 self.fmt(f, false)
1658 }
1659}
1660
1661impl fmt::Debug for Dump<'_> {
1662 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1663 self.fmt(f, true)
1664 }
1665}
1666
1667#[cfg(test)]
1668mod tests {
1669 use super::*;
1670
1671 #[test]
1672 fn test_fields() {
1673 crate::init().unwrap();
1674
1675 let mut buffer = Buffer::new();
1676
1677 {
1678 let buffer = buffer.get_mut().unwrap();
1679 buffer.set_pts(ClockTime::NSECOND);
1680 buffer.set_dts(2 * ClockTime::NSECOND);
1681 buffer.set_offset(3);
1682 buffer.set_offset_end(4);
1683 buffer.set_duration(Some(5 * ClockTime::NSECOND));
1684 }
1685 assert_eq!(buffer.pts(), Some(ClockTime::NSECOND));
1686 assert_eq!(buffer.dts(), Some(2 * ClockTime::NSECOND));
1687 assert_eq!(buffer.offset(), 3);
1688 assert_eq!(buffer.offset_end(), 4);
1689 assert_eq!(buffer.duration(), Some(5 * ClockTime::NSECOND));
1690 }
1691
1692 #[test]
1693 fn test_writability() {
1694 crate::init().unwrap();
1695
1696 let mut buffer = Buffer::from_slice(vec![1, 2, 3, 4]);
1697 {
1698 let data = buffer.map_readable().unwrap();
1699 assert_eq!(data.as_slice(), vec![1, 2, 3, 4].as_slice());
1700 }
1701 assert_ne!(buffer.get_mut(), None);
1702 {
1703 let buffer = buffer.get_mut().unwrap();
1704 buffer.set_pts(Some(ClockTime::NSECOND));
1705 }
1706
1707 let mut buffer2 = buffer.clone();
1708 assert_eq!(buffer.get_mut(), None);
1709
1710 assert_eq!(buffer2.as_ptr(), buffer.as_ptr());
1711
1712 {
1713 let buffer2 = buffer2.make_mut();
1714 assert_ne!(buffer2.as_ptr(), buffer.as_ptr());
1715
1716 buffer2.set_pts(Some(2 * ClockTime::NSECOND));
1717
1718 let mut data = buffer2.map_writable().unwrap();
1719 assert_eq!(data.as_slice(), vec![1, 2, 3, 4].as_slice());
1720 data.as_mut_slice()[0] = 0;
1721 }
1722
1723 assert_eq!(buffer.pts(), Some(ClockTime::NSECOND));
1724 assert_eq!(buffer2.pts(), Some(2 * ClockTime::NSECOND));
1725
1726 {
1727 let data = buffer.map_readable().unwrap();
1728 assert_eq!(data.as_slice(), vec![1, 2, 3, 4].as_slice());
1729
1730 let data = buffer2.map_readable().unwrap();
1731 assert_eq!(data.as_slice(), vec![0, 2, 3, 4].as_slice());
1732 }
1733 }
1734
1735 #[test]
1736 #[allow(clippy::cognitive_complexity)]
1737 fn test_memories() {
1738 crate::init().unwrap();
1739
1740 let mut buffer = Buffer::new();
1741 {
1742 let buffer = buffer.get_mut().unwrap();
1743 buffer.append_memory(crate::Memory::from_mut_slice(vec![0; 5]));
1744 buffer.append_memory(crate::Memory::from_mut_slice(vec![0; 5]));
1745 buffer.append_memory(crate::Memory::from_mut_slice(vec![0; 5]));
1746 buffer.append_memory(crate::Memory::from_mut_slice(vec![0; 5]));
1747 buffer.append_memory(crate::Memory::from_mut_slice(vec![0; 10]));
1748 }
1749
1750 assert!(buffer.is_all_memory_writable());
1751 assert_eq!(buffer.n_memory(), 5);
1752 assert_eq!(buffer.size(), 30);
1753
1754 for i in 0..5 {
1755 {
1756 let mem = buffer.memory(i).unwrap();
1757 assert_eq!(mem.size(), if i < 4 { 5 } else { 10 });
1758 let map = mem.map_readable().unwrap();
1759 assert_eq!(map.size(), if i < 4 { 5 } else { 10 });
1760 }
1761
1762 {
1763 let mem = buffer.peek_memory(i);
1764 assert_eq!(mem.size(), if i < 4 { 5 } else { 10 });
1765 let map = mem.map_readable().unwrap();
1766 assert_eq!(map.size(), if i < 4 { 5 } else { 10 });
1767 }
1768
1769 {
1770 let buffer = buffer.get_mut().unwrap();
1771 let mem = buffer.peek_memory_mut(i).unwrap();
1772 assert_eq!(mem.size(), if i < 4 { 5 } else { 10 });
1773 let map = mem.map_writable().unwrap();
1774 assert_eq!(map.size(), if i < 4 { 5 } else { 10 });
1775 }
1776 }
1777
1778 {
1779 let buffer = buffer.get_mut().unwrap();
1780 let mut last = 0;
1781 for (i, mem) in buffer.iter_memories_mut().unwrap().enumerate() {
1782 {
1783 assert_eq!(mem.size(), if i < 4 { 5 } else { 10 });
1784 let map = mem.map_readable().unwrap();
1785 assert_eq!(map.size(), if i < 4 { 5 } else { 10 });
1786 }
1787
1788 {
1789 assert_eq!(mem.size(), if i < 4 { 5 } else { 10 });
1790 let map = mem.map_readable().unwrap();
1791 assert_eq!(map.size(), if i < 4 { 5 } else { 10 });
1792 }
1793
1794 {
1795 assert_eq!(mem.size(), if i < 4 { 5 } else { 10 });
1796 let map = mem.map_writable().unwrap();
1797 assert_eq!(map.size(), if i < 4 { 5 } else { 10 });
1798 }
1799
1800 last = i;
1801 }
1802
1803 assert_eq!(last, 4);
1804 }
1805
1806 let mut last = 0;
1807 for (i, mem) in buffer.iter_memories().enumerate() {
1808 {
1809 assert_eq!(mem.size(), if i < 4 { 5 } else { 10 });
1810 let map = mem.map_readable().unwrap();
1811 assert_eq!(map.size(), if i < 4 { 5 } else { 10 });
1812 }
1813
1814 {
1815 assert_eq!(mem.size(), if i < 4 { 5 } else { 10 });
1816 let map = mem.map_readable().unwrap();
1817 assert_eq!(map.size(), if i < 4 { 5 } else { 10 });
1818 }
1819
1820 last = i;
1821 }
1822
1823 assert_eq!(last, 4);
1824
1825 let mut last = 0;
1826 for (i, mem) in buffer.iter_memories_owned().enumerate() {
1827 {
1828 assert_eq!(mem.size(), if i < 4 { 5 } else { 10 });
1829 let map = mem.map_readable().unwrap();
1830 assert_eq!(map.size(), if i < 4 { 5 } else { 10 });
1831 }
1832
1833 {
1834 assert_eq!(mem.size(), if i < 4 { 5 } else { 10 });
1835 let map = mem.map_readable().unwrap();
1836 assert_eq!(map.size(), if i < 4 { 5 } else { 10 });
1837 }
1838
1839 last = i;
1840 }
1841
1842 assert_eq!(last, 4);
1843 }
1844
1845 #[test]
1846 fn test_meta_foreach() {
1847 crate::init().unwrap();
1848
1849 let mut buffer = Buffer::new();
1850 {
1851 let buffer = buffer.get_mut().unwrap();
1852 crate::ReferenceTimestampMeta::add(
1853 buffer,
1854 &crate::Caps::builder("foo/bar").build(),
1855 ClockTime::ZERO,
1856 ClockTime::NONE,
1857 );
1858 crate::ReferenceTimestampMeta::add(
1859 buffer,
1860 &crate::Caps::builder("foo/bar").build(),
1861 ClockTime::SECOND,
1862 ClockTime::NONE,
1863 );
1864 }
1865
1866 let mut res = vec![];
1867 buffer.foreach_meta(|meta| {
1868 let meta = meta
1869 .downcast_ref::<crate::ReferenceTimestampMeta>()
1870 .unwrap();
1871 res.push(meta.timestamp());
1872 ControlFlow::Continue(())
1873 });
1874
1875 assert_eq!(&[ClockTime::ZERO, ClockTime::SECOND][..], &res[..]);
1876 }
1877
1878 #[test]
1879 fn test_meta_foreach_mut() {
1880 crate::init().unwrap();
1881
1882 let mut buffer = Buffer::new();
1883 {
1884 let buffer = buffer.get_mut().unwrap();
1885 crate::ReferenceTimestampMeta::add(
1886 buffer,
1887 &crate::Caps::builder("foo/bar").build(),
1888 ClockTime::ZERO,
1889 ClockTime::NONE,
1890 );
1891 crate::ReferenceTimestampMeta::add(
1892 buffer,
1893 &crate::Caps::builder("foo/bar").build(),
1894 ClockTime::SECOND,
1895 ClockTime::NONE,
1896 );
1897 }
1898
1899 let mut res = vec![];
1900 buffer.get_mut().unwrap().foreach_meta_mut(|mut meta| {
1901 let meta = meta
1902 .downcast_ref::<crate::ReferenceTimestampMeta>()
1903 .unwrap();
1904 res.push(meta.timestamp());
1905 if meta.timestamp() == ClockTime::SECOND {
1906 ControlFlow::Continue(BufferMetaForeachAction::Remove)
1907 } else {
1908 ControlFlow::Continue(BufferMetaForeachAction::Keep)
1909 }
1910 });
1911
1912 assert_eq!(&[ClockTime::ZERO, ClockTime::SECOND][..], &res[..]);
1913
1914 let mut res = vec![];
1915 buffer.foreach_meta(|meta| {
1916 let meta = meta
1917 .downcast_ref::<crate::ReferenceTimestampMeta>()
1918 .unwrap();
1919 res.push(meta.timestamp());
1920 ControlFlow::Continue(())
1921 });
1922
1923 assert_eq!(&[ClockTime::ZERO][..], &res[..]);
1924 }
1925
1926 #[test]
1927 fn test_ptr_eq() {
1928 crate::init().unwrap();
1929
1930 let buffer1 = Buffer::new();
1931 assert!(BufferRef::ptr_eq(&buffer1, &buffer1));
1932 let buffer2 = Buffer::new();
1933 assert!(!BufferRef::ptr_eq(&buffer1, &buffer2));
1934 }
1935
1936 #[test]
1937 fn test_copy_region() {
1938 crate::init().unwrap();
1939
1940 let buffer1 = Buffer::from_mut_slice(vec![0, 1, 2, 3, 4, 5, 6, 7]);
1941 let buffer2 = buffer1.copy_region(BUFFER_COPY_ALL, ..).unwrap();
1942 assert_eq!(
1943 buffer2.map_readable().unwrap().as_slice(),
1944 &[0, 1, 2, 3, 4, 5, 6, 7]
1945 );
1946 let buffer2 = buffer1.copy_region(BUFFER_COPY_ALL, 0..8).unwrap();
1947 assert_eq!(
1948 buffer2.map_readable().unwrap().as_slice(),
1949 &[0, 1, 2, 3, 4, 5, 6, 7]
1950 );
1951 let buffer2 = buffer1.copy_region(BUFFER_COPY_ALL, 0..=7).unwrap();
1952 assert_eq!(
1953 buffer2.map_readable().unwrap().as_slice(),
1954 &[0, 1, 2, 3, 4, 5, 6, 7]
1955 );
1956 let buffer2 = buffer1.copy_region(BUFFER_COPY_ALL, ..=7).unwrap();
1957 assert_eq!(
1958 buffer2.map_readable().unwrap().as_slice(),
1959 &[0, 1, 2, 3, 4, 5, 6, 7]
1960 );
1961 let buffer2 = buffer1.copy_region(BUFFER_COPY_ALL, ..8).unwrap();
1962 assert_eq!(
1963 buffer2.map_readable().unwrap().as_slice(),
1964 &[0, 1, 2, 3, 4, 5, 6, 7]
1965 );
1966 let buffer2 = buffer1.copy_region(BUFFER_COPY_ALL, 0..).unwrap();
1967 assert_eq!(
1968 buffer2.map_readable().unwrap().as_slice(),
1969 &[0, 1, 2, 3, 4, 5, 6, 7]
1970 );
1971
1972 assert!(buffer1.copy_region(BUFFER_COPY_ALL, 0..=8).is_err());
1973 assert!(buffer1.copy_region(BUFFER_COPY_ALL, 0..=10).is_err());
1974 assert!(buffer1.copy_region(BUFFER_COPY_ALL, 8..=10).is_err());
1975 assert!(buffer1.copy_region(BUFFER_COPY_ALL, 8..=8).is_err());
1976 assert!(buffer1.copy_region(BUFFER_COPY_ALL, 10..).is_err());
1977 assert!(buffer1.copy_region(BUFFER_COPY_ALL, 10..100).is_err());
1978
1979 let buffer2 = buffer1.copy_region(BUFFER_COPY_ALL, 2..4).unwrap();
1980 assert_eq!(buffer2.map_readable().unwrap().as_slice(), &[2, 3]);
1981
1982 let buffer2 = buffer1.copy_region(BUFFER_COPY_ALL, 2..=4).unwrap();
1983 assert_eq!(buffer2.map_readable().unwrap().as_slice(), &[2, 3, 4]);
1984
1985 let buffer2 = buffer1.copy_region(BUFFER_COPY_ALL, 2..).unwrap();
1986 assert_eq!(
1987 buffer2.map_readable().unwrap().as_slice(),
1988 &[2, 3, 4, 5, 6, 7]
1989 );
1990 let buffer2 = buffer1.copy_region(BUFFER_COPY_ALL, ..2).unwrap();
1991 assert_eq!(buffer2.map_readable().unwrap().as_slice(), &[0, 1]);
1992 let buffer2 = buffer1.copy_region(BUFFER_COPY_ALL, ..=2).unwrap();
1993 assert_eq!(buffer2.map_readable().unwrap().as_slice(), &[0, 1, 2]);
1994 }
1995
1996 #[test]
1997 fn test_dump() {
1998 use std::fmt::Write;
1999
2000 crate::init().unwrap();
2001
2002 let mut s = String::new();
2003 let buffer = crate::Buffer::from_slice(vec![1, 2, 3, 4]);
2004 write!(&mut s, "{:?}", buffer.dump()).unwrap();
2005 assert_eq!(
2006 s,
2007 "0000: 01 02 03 04 ...."
2008 );
2009 s.clear();
2010 write!(&mut s, "{}", buffer.dump()).unwrap();
2011 assert_eq!(s, "01 02 03 04");
2012 s.clear();
2013
2014 let buffer = crate::Buffer::from_slice(vec![1, 2, 3, 4]);
2015 write!(&mut s, "{:?}", buffer.dump_range(..)).unwrap();
2016 assert_eq!(
2017 s,
2018 "0000: 01 02 03 04 ...."
2019 );
2020 s.clear();
2021 write!(&mut s, "{:?}", buffer.dump_range(..2)).unwrap();
2022 assert_eq!(
2023 s,
2024 "0000: 01 02 .."
2025 );
2026 s.clear();
2027 write!(&mut s, "{:?}", buffer.dump_range(2..=3)).unwrap();
2028 assert_eq!(
2029 s,
2030 "0002: 03 04 .."
2031 );
2032 s.clear();
2033 write!(&mut s, "{:?}", buffer.dump_range(..100)).unwrap();
2034 assert_eq!(s, "<end out of range>",);
2035 s.clear();
2036 write!(&mut s, "{:?}", buffer.dump_range(90..100)).unwrap();
2037 assert_eq!(s, "<start out of range>",);
2038 s.clear();
2039
2040 let buffer = crate::Buffer::from_slice(vec![0; 19]);
2041 write!(&mut s, "{:?}", buffer.dump()).unwrap();
2042 assert_eq!(
2043 s,
2044 "0000: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 ................\n\
2045 0010: 00 00 00 ..."
2046 );
2047 s.clear();
2048 }
2049
2050 #[test]
2051 fn test_dump_multi_memories() {
2052 use std::fmt::Write;
2053
2054 crate::init().unwrap();
2055
2056 let mut buffer = crate::Buffer::new();
2057 {
2058 let buffer = buffer.get_mut().unwrap();
2059
2060 let mem = crate::Memory::from_slice(vec![1, 2, 3, 4]);
2061 buffer.append_memory(mem);
2062
2063 let mem = crate::Memory::from_slice(vec![5, 6, 7, 8]);
2064 buffer.append_memory(mem);
2065
2066 let mem = crate::Memory::from_slice(vec![9, 10, 11, 12]);
2067 buffer.append_memory(mem);
2068
2069 let mem = crate::Memory::from_slice(vec![13, 14, 15, 16]);
2070 buffer.append_memory(mem);
2071
2072 let mem = crate::Memory::from_slice(vec![17, 18, 19]);
2073 buffer.append_memory(mem);
2074 }
2075
2076 let mut s = String::new();
2077 write!(&mut s, "{:?}", buffer.dump()).unwrap();
2078 assert_eq!(
2079 s,
2080 "0000: 01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f 10 ................\n\
2081 0010: 11 12 13 ..."
2082 );
2083 s.clear();
2084 write!(&mut s, "{}", buffer.dump()).unwrap();
2085 assert_eq!(
2086 s,
2087 "01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f 10\n11 12 13"
2088 );
2089 s.clear();
2090
2091 write!(&mut s, "{:?}", buffer.dump_range(2..)).unwrap();
2092 assert_eq!(
2093 s,
2094 "0002: 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f 10 11 12 ................\n\
2095 0012: 13 ."
2096 );
2097 s.clear();
2098
2099 write!(&mut s, "{:?}", buffer.dump_range(14..17)).unwrap();
2100 assert_eq!(
2101 s,
2102 "000e: 0f 10 11 ..."
2103 );
2104 s.clear();
2105
2106 write!(&mut s, "{:?}", buffer.dump_range(14..20)).unwrap();
2107 assert_eq!(s, "<end out of range>");
2108 s.clear();
2109
2110 #[allow(clippy::reversed_empty_ranges)]
2111 {
2112 write!(&mut s, "{:?}", buffer.dump_range(23..20)).unwrap();
2113 assert_eq!(s, "<start out of range>");
2114 s.clear();
2115 }
2116 }
2117}