1 | // Take a look at the license at the top of the repository in the LICENSE file. |
2 | |
3 | use std::{ |
4 | cmp, fmt, |
5 | marker::PhantomData, |
6 | mem, ops, |
7 | ops::{Bound, ControlFlow, Range, RangeBounds}, |
8 | ptr, slice, |
9 | }; |
10 | |
11 | use glib::translate::*; |
12 | |
13 | use crate::{ |
14 | ffi, meta::*, BufferCursor, BufferFlags, BufferRefCursor, ClockTime, Memory, MemoryRef, |
15 | }; |
16 | |
17 | pub enum Readable {} |
18 | pub enum Writable {} |
19 | |
20 | #[derive (Copy, Clone, Debug, PartialEq, Eq)] |
21 | pub enum BufferMetaForeachAction { |
22 | Keep, |
23 | Remove, |
24 | } |
25 | |
26 | mini_object_wrapper!(Buffer, BufferRef, ffi::GstBuffer, || { |
27 | ffi::gst_buffer_get_type() |
28 | }); |
29 | |
30 | pub struct BufferMap<'a, T> { |
31 | buffer: &'a BufferRef, |
32 | map_info: ffi::GstMapInfo, |
33 | phantom: PhantomData<T>, |
34 | } |
35 | |
36 | pub struct MappedBuffer<T> { |
37 | buffer: Buffer, |
38 | map_info: ffi::GstMapInfo, |
39 | phantom: PhantomData<T>, |
40 | } |
41 | |
42 | impl Buffer { |
43 | #[doc (alias = "gst_buffer_new" )] |
44 | #[inline ] |
45 | pub fn new() -> Self { |
46 | assert_initialized_main_thread!(); |
47 | |
48 | unsafe { from_glib_full(ffi::gst_buffer_new()) } |
49 | } |
50 | |
51 | #[doc (alias = "gst_buffer_new_allocate" )] |
52 | #[doc (alias = "gst_buffer_new_and_alloc" )] |
53 | #[inline ] |
54 | pub fn with_size(size: usize) -> Result<Self, glib::BoolError> { |
55 | assert_initialized_main_thread!(); |
56 | |
57 | unsafe { |
58 | Option::<_>::from_glib_full(ffi::gst_buffer_new_allocate( |
59 | ptr::null_mut(), |
60 | size, |
61 | ptr::null_mut(), |
62 | )) |
63 | .ok_or_else(|| glib::bool_error!("Failed to allocate buffer" )) |
64 | } |
65 | } |
66 | |
67 | #[doc (alias = "gst_buffer_new_wrapped" )] |
68 | #[doc (alias = "gst_buffer_new_wrapped_full" )] |
69 | #[inline ] |
70 | pub fn from_mut_slice<T: AsMut<[u8]> + Send + 'static>(slice: T) -> Self { |
71 | assert_initialized_main_thread!(); |
72 | |
73 | let mem = Memory::from_mut_slice(slice); |
74 | let mut buffer = Buffer::new(); |
75 | { |
76 | let buffer = buffer.get_mut().unwrap(); |
77 | buffer.append_memory(mem); |
78 | buffer.unset_flags(BufferFlags::TAG_MEMORY); |
79 | } |
80 | |
81 | buffer |
82 | } |
83 | |
84 | #[doc (alias = "gst_buffer_new_wrapped" )] |
85 | #[doc (alias = "gst_buffer_new_wrapped_full" )] |
86 | #[inline ] |
87 | pub fn from_slice<T: AsRef<[u8]> + Send + 'static>(slice: T) -> Self { |
88 | assert_initialized_main_thread!(); |
89 | |
90 | let mem = Memory::from_slice(slice); |
91 | let mut buffer = Buffer::new(); |
92 | { |
93 | let buffer = buffer.get_mut().unwrap(); |
94 | buffer.append_memory(mem); |
95 | buffer.unset_flags(BufferFlags::TAG_MEMORY); |
96 | } |
97 | |
98 | buffer |
99 | } |
100 | |
101 | #[doc (alias = "gst_buffer_map" )] |
102 | #[inline ] |
103 | pub fn into_mapped_buffer_readable(self) -> Result<MappedBuffer<Readable>, Self> { |
104 | unsafe { |
105 | let mut map_info = mem::MaybeUninit::uninit(); |
106 | let res: bool = from_glib(ffi::gst_buffer_map( |
107 | self.as_mut_ptr(), |
108 | map_info.as_mut_ptr(), |
109 | ffi::GST_MAP_READ, |
110 | )); |
111 | if res { |
112 | Ok(MappedBuffer { |
113 | buffer: self, |
114 | map_info: map_info.assume_init(), |
115 | phantom: PhantomData, |
116 | }) |
117 | } else { |
118 | Err(self) |
119 | } |
120 | } |
121 | } |
122 | |
123 | #[doc (alias = "gst_buffer_map" )] |
124 | #[inline ] |
125 | pub fn into_mapped_buffer_writable(self) -> Result<MappedBuffer<Writable>, Self> { |
126 | unsafe { |
127 | let mut map_info = mem::MaybeUninit::uninit(); |
128 | let res: bool = from_glib(ffi::gst_buffer_map( |
129 | self.as_mut_ptr(), |
130 | map_info.as_mut_ptr(), |
131 | ffi::GST_MAP_READWRITE, |
132 | )); |
133 | if res { |
134 | Ok(MappedBuffer { |
135 | buffer: self, |
136 | map_info: map_info.assume_init(), |
137 | phantom: PhantomData, |
138 | }) |
139 | } else { |
140 | Err(self) |
141 | } |
142 | } |
143 | } |
144 | |
145 | #[inline ] |
146 | pub fn into_cursor_readable(self) -> BufferCursor<Readable> { |
147 | BufferCursor::new_readable(self) |
148 | } |
149 | |
150 | #[inline ] |
151 | pub fn into_cursor_writable(self) -> Result<BufferCursor<Writable>, glib::BoolError> { |
152 | BufferCursor::new_writable(self) |
153 | } |
154 | |
155 | #[doc (alias = "gst_buffer_append" )] |
156 | pub fn append(&mut self, other: Self) { |
157 | unsafe { |
158 | let ptr = ffi::gst_buffer_append(self.as_mut_ptr(), other.into_glib_ptr()); |
159 | self.replace_ptr(ptr); |
160 | } |
161 | } |
162 | } |
163 | |
164 | impl Default for Buffer { |
165 | fn default() -> Self { |
166 | Self::new() |
167 | } |
168 | } |
169 | |
170 | impl BufferRef { |
171 | #[doc (alias = "gst_buffer_map" )] |
172 | #[inline ] |
173 | pub fn map_readable(&self) -> Result<BufferMap<Readable>, glib::BoolError> { |
174 | unsafe { |
175 | let mut map_info = mem::MaybeUninit::uninit(); |
176 | let res = |
177 | ffi::gst_buffer_map(self.as_mut_ptr(), map_info.as_mut_ptr(), ffi::GST_MAP_READ); |
178 | if res == glib::ffi::GTRUE { |
179 | Ok(BufferMap { |
180 | buffer: self, |
181 | map_info: map_info.assume_init(), |
182 | phantom: PhantomData, |
183 | }) |
184 | } else { |
185 | Err(glib::bool_error!("Failed to map buffer readable" )) |
186 | } |
187 | } |
188 | } |
189 | |
190 | #[doc (alias = "gst_buffer_map" )] |
191 | #[inline ] |
192 | pub fn map_writable(&mut self) -> Result<BufferMap<Writable>, glib::BoolError> { |
193 | unsafe { |
194 | let mut map_info = mem::MaybeUninit::uninit(); |
195 | let res = ffi::gst_buffer_map( |
196 | self.as_mut_ptr(), |
197 | map_info.as_mut_ptr(), |
198 | ffi::GST_MAP_READWRITE, |
199 | ); |
200 | if res == glib::ffi::GTRUE { |
201 | Ok(BufferMap { |
202 | buffer: self, |
203 | map_info: map_info.assume_init(), |
204 | phantom: PhantomData, |
205 | }) |
206 | } else { |
207 | Err(glib::bool_error!("Failed to map buffer writable" )) |
208 | } |
209 | } |
210 | } |
211 | |
212 | fn memory_range_into_idx_len( |
213 | &self, |
214 | range: impl RangeBounds<usize>, |
215 | ) -> Result<(u32, i32), glib::BoolError> { |
216 | let n_memory = self.n_memory(); |
217 | debug_assert!(n_memory <= u32::MAX as usize); |
218 | |
219 | let start_idx = match range.start_bound() { |
220 | ops::Bound::Included(idx) if *idx >= n_memory => { |
221 | return Err(glib::bool_error!("Invalid range start" )); |
222 | } |
223 | ops::Bound::Included(idx) => *idx, |
224 | ops::Bound::Excluded(idx) if idx.checked_add(1).map_or(true, |idx| idx >= n_memory) => { |
225 | return Err(glib::bool_error!("Invalid range start" )); |
226 | } |
227 | ops::Bound::Excluded(idx) => *idx + 1, |
228 | ops::Bound::Unbounded => 0, |
229 | }; |
230 | |
231 | let end_idx = match range.end_bound() { |
232 | ops::Bound::Included(idx) if idx.checked_add(1).map_or(true, |idx| idx > n_memory) => { |
233 | return Err(glib::bool_error!("Invalid range end" )); |
234 | } |
235 | ops::Bound::Included(idx) => *idx + 1, |
236 | ops::Bound::Excluded(idx) if *idx > n_memory => { |
237 | return Err(glib::bool_error!("Invalid range end" )); |
238 | } |
239 | ops::Bound::Excluded(idx) => *idx, |
240 | ops::Bound::Unbounded => n_memory, |
241 | }; |
242 | |
243 | Ok(( |
244 | start_idx as u32, |
245 | i32::try_from(end_idx - start_idx).map_err(|_| glib::bool_error!("Too large range" ))?, |
246 | )) |
247 | } |
248 | |
249 | #[doc (alias = "gst_buffer_map_range" )] |
250 | #[inline ] |
251 | pub fn map_range_readable( |
252 | &self, |
253 | range: impl RangeBounds<usize>, |
254 | ) -> Result<BufferMap<Readable>, glib::BoolError> { |
255 | let (idx, len) = self.memory_range_into_idx_len(range)?; |
256 | unsafe { |
257 | let mut map_info = mem::MaybeUninit::uninit(); |
258 | let res = ffi::gst_buffer_map_range( |
259 | self.as_mut_ptr(), |
260 | idx, |
261 | len, |
262 | map_info.as_mut_ptr(), |
263 | ffi::GST_MAP_READ, |
264 | ); |
265 | if res == glib::ffi::GTRUE { |
266 | Ok(BufferMap { |
267 | buffer: self, |
268 | map_info: map_info.assume_init(), |
269 | phantom: PhantomData, |
270 | }) |
271 | } else { |
272 | Err(glib::bool_error!("Failed to map buffer readable" )) |
273 | } |
274 | } |
275 | } |
276 | |
277 | #[doc (alias = "gst_buffer_map_range" )] |
278 | #[inline ] |
279 | pub fn map_range_writable( |
280 | &mut self, |
281 | range: impl RangeBounds<usize>, |
282 | ) -> Result<BufferMap<Writable>, glib::BoolError> { |
283 | let (idx, len) = self.memory_range_into_idx_len(range)?; |
284 | unsafe { |
285 | let mut map_info = mem::MaybeUninit::uninit(); |
286 | let res = ffi::gst_buffer_map_range( |
287 | self.as_mut_ptr(), |
288 | idx, |
289 | len, |
290 | map_info.as_mut_ptr(), |
291 | ffi::GST_MAP_READWRITE, |
292 | ); |
293 | if res == glib::ffi::GTRUE { |
294 | Ok(BufferMap { |
295 | buffer: self, |
296 | map_info: map_info.assume_init(), |
297 | phantom: PhantomData, |
298 | }) |
299 | } else { |
300 | Err(glib::bool_error!("Failed to map buffer writable" )) |
301 | } |
302 | } |
303 | } |
304 | |
305 | pub(crate) fn byte_range_into_offset_len( |
306 | &self, |
307 | range: impl RangeBounds<usize>, |
308 | ) -> Result<(usize, usize), glib::BoolError> { |
309 | let size = self.size(); |
310 | |
311 | let start_idx = match range.start_bound() { |
312 | ops::Bound::Included(idx) if *idx >= size => { |
313 | return Err(glib::bool_error!("Invalid range start" )); |
314 | } |
315 | ops::Bound::Included(idx) => *idx, |
316 | ops::Bound::Excluded(idx) if idx.checked_add(1).map_or(true, |idx| idx >= size) => { |
317 | return Err(glib::bool_error!("Invalid range start" )); |
318 | } |
319 | ops::Bound::Excluded(idx) => *idx + 1, |
320 | ops::Bound::Unbounded => 0, |
321 | }; |
322 | |
323 | let end_idx = match range.end_bound() { |
324 | ops::Bound::Included(idx) if idx.checked_add(1).map_or(true, |idx| idx > size) => { |
325 | return Err(glib::bool_error!("Invalid range end" )); |
326 | } |
327 | ops::Bound::Included(idx) => *idx + 1, |
328 | ops::Bound::Excluded(idx) if *idx > size => { |
329 | return Err(glib::bool_error!("Invalid range end" )); |
330 | } |
331 | ops::Bound::Excluded(idx) => *idx, |
332 | ops::Bound::Unbounded => size, |
333 | }; |
334 | |
335 | Ok((start_idx, end_idx - start_idx)) |
336 | } |
337 | |
338 | #[doc (alias = "gst_buffer_copy_region" )] |
339 | pub fn copy_region( |
340 | &self, |
341 | flags: crate::BufferCopyFlags, |
342 | range: impl RangeBounds<usize>, |
343 | ) -> Result<Buffer, glib::BoolError> { |
344 | let (offset, size) = self.byte_range_into_offset_len(range)?; |
345 | |
346 | unsafe { |
347 | Option::<_>::from_glib_full(ffi::gst_buffer_copy_region( |
348 | self.as_mut_ptr(), |
349 | flags.into_glib(), |
350 | offset, |
351 | size, |
352 | )) |
353 | .ok_or_else(|| glib::bool_error!("Failed to copy region of buffer" )) |
354 | } |
355 | } |
356 | |
357 | #[doc (alias = "gst_buffer_copy_into" )] |
358 | pub fn copy_into( |
359 | &self, |
360 | dest: &mut BufferRef, |
361 | flags: crate::BufferCopyFlags, |
362 | range: impl RangeBounds<usize>, |
363 | ) -> Result<(), glib::BoolError> { |
364 | let (offset, size) = self.byte_range_into_offset_len(range)?; |
365 | |
366 | unsafe { |
367 | glib::result_from_gboolean!( |
368 | ffi::gst_buffer_copy_into( |
369 | dest.as_mut_ptr(), |
370 | self.as_mut_ptr(), |
371 | flags.into_glib(), |
372 | offset, |
373 | size, |
374 | ), |
375 | "Failed to copy into destination buffer" , |
376 | ) |
377 | } |
378 | } |
379 | |
380 | #[doc (alias = "gst_buffer_fill" )] |
381 | pub fn copy_from_slice(&mut self, offset: usize, slice: &[u8]) -> Result<(), usize> { |
382 | let maxsize = self.maxsize(); |
383 | let size = slice.len(); |
384 | |
385 | assert!(maxsize >= offset && maxsize - offset >= size); |
386 | |
387 | let copied = unsafe { |
388 | let src = slice.as_ptr(); |
389 | ffi::gst_buffer_fill( |
390 | self.as_mut_ptr(), |
391 | offset, |
392 | src as glib::ffi::gconstpointer, |
393 | size, |
394 | ) |
395 | }; |
396 | |
397 | if copied == size { |
398 | Ok(()) |
399 | } else { |
400 | Err(copied) |
401 | } |
402 | } |
403 | |
404 | #[doc (alias = "gst_buffer_extract" )] |
405 | pub fn copy_to_slice(&self, offset: usize, slice: &mut [u8]) -> Result<(), usize> { |
406 | let maxsize = self.size(); |
407 | let size = slice.len(); |
408 | |
409 | assert!(maxsize >= offset && maxsize - offset >= size); |
410 | |
411 | let copied = unsafe { |
412 | let dest = slice.as_mut_ptr(); |
413 | ffi::gst_buffer_extract(self.as_mut_ptr(), offset, dest as glib::ffi::gpointer, size) |
414 | }; |
415 | |
416 | if copied == size { |
417 | Ok(()) |
418 | } else { |
419 | Err(copied) |
420 | } |
421 | } |
422 | |
423 | #[doc (alias = "gst_buffer_copy_deep" )] |
424 | pub fn copy_deep(&self) -> Result<Buffer, glib::BoolError> { |
425 | unsafe { |
426 | Option::<_>::from_glib_full(ffi::gst_buffer_copy_deep(self.as_ptr())) |
427 | .ok_or_else(|| glib::bool_error!("Failed to deep copy buffer" )) |
428 | } |
429 | } |
430 | |
431 | #[doc (alias = "get_size" )] |
432 | #[doc (alias = "gst_buffer_get_size" )] |
433 | pub fn size(&self) -> usize { |
434 | unsafe { ffi::gst_buffer_get_size(self.as_mut_ptr()) } |
435 | } |
436 | |
437 | #[doc (alias = "get_maxsize" )] |
438 | pub fn maxsize(&self) -> usize { |
439 | unsafe { |
440 | let mut maxsize = mem::MaybeUninit::uninit(); |
441 | ffi::gst_buffer_get_sizes_range( |
442 | self.as_mut_ptr(), |
443 | 0, |
444 | -1, |
445 | ptr::null_mut(), |
446 | maxsize.as_mut_ptr(), |
447 | ); |
448 | |
449 | maxsize.assume_init() |
450 | } |
451 | } |
452 | |
453 | #[doc (alias = "gst_buffer_set_size" )] |
454 | pub fn set_size(&mut self, size: usize) { |
455 | assert!(self.maxsize() >= size); |
456 | |
457 | unsafe { |
458 | ffi::gst_buffer_set_size(self.as_mut_ptr(), size as isize); |
459 | } |
460 | } |
461 | |
462 | #[doc (alias = "get_offset" )] |
463 | #[doc (alias = "GST_BUFFER_OFFSET" )] |
464 | #[inline ] |
465 | pub fn offset(&self) -> u64 { |
466 | self.0.offset |
467 | } |
468 | |
469 | #[inline ] |
470 | pub fn set_offset(&mut self, offset: u64) { |
471 | self.0.offset = offset; |
472 | } |
473 | |
474 | #[doc (alias = "get_offset_end" )] |
475 | #[doc (alias = "GST_BUFFER_OFFSET_END" )] |
476 | #[inline ] |
477 | pub fn offset_end(&self) -> u64 { |
478 | self.0.offset_end |
479 | } |
480 | |
481 | #[inline ] |
482 | pub fn set_offset_end(&mut self, offset_end: u64) { |
483 | self.0.offset_end = offset_end; |
484 | } |
485 | |
486 | #[doc (alias = "get_pts" )] |
487 | #[doc (alias = "GST_BUFFER_PTS" )] |
488 | #[inline ] |
489 | pub fn pts(&self) -> Option<ClockTime> { |
490 | unsafe { from_glib(self.0.pts) } |
491 | } |
492 | |
493 | #[inline ] |
494 | pub fn set_pts(&mut self, pts: impl Into<Option<ClockTime>>) { |
495 | self.0.pts = pts.into().into_glib(); |
496 | } |
497 | |
498 | #[doc (alias = "get_dts" )] |
499 | #[doc (alias = "GST_BUFFER_DTS" )] |
500 | #[inline ] |
501 | pub fn dts(&self) -> Option<ClockTime> { |
502 | unsafe { from_glib(self.0.dts) } |
503 | } |
504 | |
505 | #[inline ] |
506 | pub fn set_dts(&mut self, dts: impl Into<Option<ClockTime>>) { |
507 | self.0.dts = dts.into().into_glib(); |
508 | } |
509 | |
510 | #[doc (alias = "get_dts_or_pts" )] |
511 | #[doc (alias = "GST_BUFFER_DTS_OR_PTS" )] |
512 | #[inline ] |
513 | pub fn dts_or_pts(&self) -> Option<ClockTime> { |
514 | let val = self.dts(); |
515 | if val.is_none() { |
516 | self.pts() |
517 | } else { |
518 | val |
519 | } |
520 | } |
521 | |
522 | #[doc (alias = "get_duration" )] |
523 | #[doc (alias = "GST_BUFFER_DURATION" )] |
524 | #[inline ] |
525 | pub fn duration(&self) -> Option<ClockTime> { |
526 | unsafe { from_glib(self.0.duration) } |
527 | } |
528 | |
529 | #[inline ] |
530 | pub fn set_duration(&mut self, duration: impl Into<Option<ClockTime>>) { |
531 | self.0.duration = duration.into().into_glib(); |
532 | } |
533 | |
534 | #[doc (alias = "get_flags" )] |
535 | #[doc (alias = "GST_BUFFER_FLAGS" )] |
536 | #[inline ] |
537 | pub fn flags(&self) -> BufferFlags { |
538 | BufferFlags::from_bits_truncate(self.0.mini_object.flags) |
539 | } |
540 | |
541 | #[doc (alias = "GST_BUFFER_FLAG_SET" )] |
542 | #[inline ] |
543 | pub fn set_flags(&mut self, flags: BufferFlags) { |
544 | self.0.mini_object.flags |= flags.bits(); |
545 | } |
546 | |
547 | #[doc (alias = "GST_BUFFER_FLAG_UNSET" )] |
548 | #[inline ] |
549 | pub fn unset_flags(&mut self, flags: BufferFlags) { |
550 | self.0.mini_object.flags &= !flags.bits(); |
551 | } |
552 | |
553 | #[doc (alias = "get_meta" )] |
554 | #[doc (alias = "gst_buffer_get_meta" )] |
555 | #[inline ] |
556 | pub fn meta<T: MetaAPI>(&self) -> Option<MetaRef<T>> { |
557 | unsafe { |
558 | let meta = ffi::gst_buffer_get_meta(self.as_mut_ptr(), T::meta_api().into_glib()); |
559 | if meta.is_null() { |
560 | None |
561 | } else { |
562 | Some(T::from_ptr(self, meta as *const <T as MetaAPI>::GstType)) |
563 | } |
564 | } |
565 | } |
566 | |
567 | #[doc (alias = "get_meta_mut" )] |
568 | #[inline ] |
569 | pub fn meta_mut<T: MetaAPI>(&mut self) -> Option<MetaRefMut<T, crate::meta::Standalone>> { |
570 | unsafe { |
571 | let meta = ffi::gst_buffer_get_meta(self.as_mut_ptr(), T::meta_api().into_glib()); |
572 | if meta.is_null() { |
573 | None |
574 | } else { |
575 | Some(T::from_mut_ptr(self, meta as *mut <T as MetaAPI>::GstType)) |
576 | } |
577 | } |
578 | } |
579 | |
580 | pub fn iter_meta<T: MetaAPI>(&self) -> MetaIter<T> { |
581 | MetaIter::new(self) |
582 | } |
583 | |
584 | pub fn iter_meta_mut<T: MetaAPI>(&mut self) -> MetaIterMut<T> { |
585 | MetaIterMut::new(self) |
586 | } |
587 | |
588 | #[doc (alias = "gst_buffer_foreach_meta" )] |
589 | pub fn foreach_meta<F: FnMut(MetaRef<Meta>) -> ControlFlow<(), ()>>(&self, func: F) -> bool { |
590 | unsafe extern "C" fn trampoline<F: FnMut(MetaRef<Meta>) -> ControlFlow<(), ()>>( |
591 | buffer: *mut ffi::GstBuffer, |
592 | meta: *mut *mut ffi::GstMeta, |
593 | user_data: glib::ffi::gpointer, |
594 | ) -> glib::ffi::gboolean { |
595 | let func = user_data as *mut F; |
596 | let res = (*func)(Meta::from_ptr(BufferRef::from_ptr(buffer), *meta)); |
597 | |
598 | matches!(res, ControlFlow::Continue(_)).into_glib() |
599 | } |
600 | |
601 | unsafe { |
602 | let mut func = func; |
603 | let func_ptr: &mut F = &mut func; |
604 | |
605 | from_glib(ffi::gst_buffer_foreach_meta( |
606 | mut_override(self.as_ptr()), |
607 | Some(trampoline::<F>), |
608 | func_ptr as *mut _ as *mut _, |
609 | )) |
610 | } |
611 | } |
612 | |
613 | #[doc (alias = "gst_buffer_foreach_meta" )] |
614 | pub fn foreach_meta_mut< |
615 | F: FnMut( |
616 | MetaRefMut<Meta, crate::meta::Iterated>, |
617 | ) -> ControlFlow<BufferMetaForeachAction, BufferMetaForeachAction>, |
618 | >( |
619 | &mut self, |
620 | func: F, |
621 | ) -> bool { |
622 | unsafe extern "C" fn trampoline< |
623 | F: FnMut( |
624 | MetaRefMut<Meta, crate::meta::Iterated>, |
625 | ) -> ControlFlow<BufferMetaForeachAction, BufferMetaForeachAction>, |
626 | >( |
627 | buffer: *mut ffi::GstBuffer, |
628 | meta: *mut *mut ffi::GstMeta, |
629 | user_data: glib::ffi::gpointer, |
630 | ) -> glib::ffi::gboolean { |
631 | let func = user_data as *mut F; |
632 | let res = (*func)(Meta::from_mut_ptr(BufferRef::from_mut_ptr(buffer), *meta)); |
633 | |
634 | let (cont, action) = match res { |
635 | ControlFlow::Continue(action) => (true, action), |
636 | ControlFlow::Break(action) => (false, action), |
637 | }; |
638 | |
639 | if action == BufferMetaForeachAction::Remove { |
640 | *meta = ptr::null_mut(); |
641 | } |
642 | |
643 | cont.into_glib() |
644 | } |
645 | |
646 | unsafe { |
647 | let mut func = func; |
648 | let func_ptr: &mut F = &mut func; |
649 | |
650 | from_glib(ffi::gst_buffer_foreach_meta( |
651 | mut_override(self.as_ptr()), |
652 | Some(trampoline::<F>), |
653 | func_ptr as *mut _ as *mut _, |
654 | )) |
655 | } |
656 | } |
657 | |
658 | #[doc (alias = "gst_buffer_append_memory" )] |
659 | pub fn append_memory(&mut self, mem: Memory) { |
660 | unsafe { ffi::gst_buffer_append_memory(self.as_mut_ptr(), mem.into_glib_ptr()) } |
661 | } |
662 | |
663 | #[doc (alias = "gst_buffer_find_memory" )] |
664 | pub fn find_memory(&self, range: impl RangeBounds<usize>) -> Option<(Range<usize>, usize)> { |
665 | let (offset, size) = self.byte_range_into_offset_len(range).ok()?; |
666 | |
667 | unsafe { |
668 | let mut idx = mem::MaybeUninit::uninit(); |
669 | let mut length = mem::MaybeUninit::uninit(); |
670 | let mut skip = mem::MaybeUninit::uninit(); |
671 | |
672 | let res = from_glib(ffi::gst_buffer_find_memory( |
673 | self.as_mut_ptr(), |
674 | offset, |
675 | size, |
676 | idx.as_mut_ptr(), |
677 | length.as_mut_ptr(), |
678 | skip.as_mut_ptr(), |
679 | )); |
680 | |
681 | if res { |
682 | let idx = idx.assume_init() as usize; |
683 | let length = length.assume_init() as usize; |
684 | let skip = skip.assume_init(); |
685 | Some((idx..(idx + length), skip)) |
686 | } else { |
687 | None |
688 | } |
689 | } |
690 | } |
691 | |
692 | #[doc (alias = "get_all_memory" )] |
693 | #[doc (alias = "gst_buffer_get_all_memory" )] |
694 | pub fn all_memory(&self) -> Option<Memory> { |
695 | unsafe { from_glib_full(ffi::gst_buffer_get_all_memory(self.as_mut_ptr())) } |
696 | } |
697 | |
698 | #[doc (alias = "get_max_memory" )] |
699 | #[doc (alias = "gst_buffer_get_max_memory" )] |
700 | pub fn max_memory() -> usize { |
701 | unsafe { ffi::gst_buffer_get_max_memory() as usize } |
702 | } |
703 | |
704 | #[doc (alias = "get_memory" )] |
705 | #[doc (alias = "gst_buffer_get_memory" )] |
706 | pub fn memory(&self, idx: usize) -> Option<Memory> { |
707 | if idx >= self.n_memory() { |
708 | return None; |
709 | } |
710 | unsafe { |
711 | let res = ffi::gst_buffer_get_memory(self.as_mut_ptr(), idx as u32); |
712 | Some(from_glib_full(res)) |
713 | } |
714 | } |
715 | |
716 | #[doc (alias = "get_memory_range" )] |
717 | #[doc (alias = "gst_buffer_get_memory_range" )] |
718 | pub fn memory_range(&self, range: impl RangeBounds<usize>) -> Option<Memory> { |
719 | let (idx, len) = self.memory_range_into_idx_len(range).ok()?; |
720 | |
721 | unsafe { |
722 | let res = ffi::gst_buffer_get_memory_range(self.as_mut_ptr(), idx, len); |
723 | from_glib_full(res) |
724 | } |
725 | } |
726 | |
727 | #[doc (alias = "gst_buffer_insert_memory" )] |
728 | pub fn insert_memory(&mut self, idx: impl Into<Option<usize>>, mem: Memory) { |
729 | let n_memory = self.n_memory(); |
730 | let idx = idx.into(); |
731 | let idx = idx.unwrap_or(n_memory); |
732 | assert!(idx <= self.n_memory()); |
733 | unsafe { ffi::gst_buffer_insert_memory(self.as_mut_ptr(), idx as i32, mem.into_glib_ptr()) } |
734 | } |
735 | |
736 | #[doc (alias = "gst_buffer_is_all_memory_writable" )] |
737 | pub fn is_all_memory_writable(&self) -> bool { |
738 | unsafe { from_glib(ffi::gst_buffer_is_all_memory_writable(self.as_mut_ptr())) } |
739 | } |
740 | |
741 | #[doc (alias = "gst_buffer_is_memory_range_writable" )] |
742 | pub fn is_memory_range_writable(&self, range: impl RangeBounds<usize>) -> bool { |
743 | let Some((idx, len)) = self.memory_range_into_idx_len(range).ok() else { |
744 | return false; |
745 | }; |
746 | |
747 | unsafe { |
748 | from_glib(ffi::gst_buffer_is_memory_range_writable( |
749 | self.as_mut_ptr(), |
750 | idx, |
751 | len, |
752 | )) |
753 | } |
754 | } |
755 | |
756 | #[doc (alias = "gst_buffer_n_memory" )] |
757 | pub fn n_memory(&self) -> usize { |
758 | unsafe { ffi::gst_buffer_n_memory(self.as_ptr() as *mut _) as usize } |
759 | } |
760 | |
761 | #[doc (alias = "gst_buffer_peek_memory" )] |
762 | pub fn peek_memory(&self, idx: usize) -> &MemoryRef { |
763 | assert!(idx < self.n_memory()); |
764 | unsafe { MemoryRef::from_ptr(ffi::gst_buffer_peek_memory(self.as_mut_ptr(), idx as u32)) } |
765 | } |
766 | |
767 | #[doc (alias = "gst_buffer_peek_memory" )] |
768 | pub fn peek_memory_mut(&mut self, idx: usize) -> Result<&mut MemoryRef, glib::BoolError> { |
769 | assert!(idx < self.n_memory()); |
770 | unsafe { |
771 | let mem = ffi::gst_buffer_peek_memory(self.as_mut_ptr(), idx as u32); |
772 | if ffi::gst_mini_object_is_writable(mem as *mut _) == glib::ffi::GFALSE { |
773 | Err(glib::bool_error!("Memory not writable" )) |
774 | } else { |
775 | Ok(MemoryRef::from_mut_ptr(mem)) |
776 | } |
777 | } |
778 | } |
779 | |
780 | #[doc (alias = "gst_buffer_prepend_memory" )] |
781 | pub fn prepend_memory(&mut self, mem: Memory) { |
782 | unsafe { ffi::gst_buffer_prepend_memory(self.as_mut_ptr(), mem.into_glib_ptr()) } |
783 | } |
784 | |
785 | #[doc (alias = "gst_buffer_remove_all_memory" )] |
786 | pub fn remove_all_memory(&mut self) { |
787 | unsafe { ffi::gst_buffer_remove_all_memory(self.as_mut_ptr()) } |
788 | } |
789 | |
790 | #[doc (alias = "gst_buffer_remove_memory" )] |
791 | pub fn remove_memory(&mut self, idx: usize) { |
792 | assert!(idx < self.n_memory()); |
793 | unsafe { ffi::gst_buffer_remove_memory(self.as_mut_ptr(), idx as u32) } |
794 | } |
795 | |
796 | #[doc (alias = "gst_buffer_remove_memory_range" )] |
797 | pub fn remove_memory_range(&mut self, range: impl RangeBounds<usize>) { |
798 | let (idx, len) = self |
799 | .memory_range_into_idx_len(range) |
800 | .expect("Invalid memory range" ); |
801 | |
802 | unsafe { ffi::gst_buffer_remove_memory_range(self.as_mut_ptr(), idx, len) } |
803 | } |
804 | |
805 | #[doc (alias = "gst_buffer_replace_all_memory" )] |
806 | pub fn replace_all_memory(&mut self, mem: Memory) { |
807 | unsafe { ffi::gst_buffer_replace_all_memory(self.as_mut_ptr(), mem.into_glib_ptr()) } |
808 | } |
809 | |
810 | #[doc (alias = "gst_buffer_replace_memory" )] |
811 | pub fn replace_memory(&mut self, idx: usize, mem: Memory) { |
812 | assert!(idx < self.n_memory()); |
813 | unsafe { |
814 | ffi::gst_buffer_replace_memory(self.as_mut_ptr(), idx as u32, mem.into_glib_ptr()) |
815 | } |
816 | } |
817 | |
818 | #[doc (alias = "gst_buffer_replace_memory_range" )] |
819 | pub fn replace_memory_range(&mut self, range: impl RangeBounds<usize>, mem: Memory) { |
820 | let (idx, len) = self |
821 | .memory_range_into_idx_len(range) |
822 | .expect("Invalid memory range" ); |
823 | |
824 | unsafe { |
825 | ffi::gst_buffer_replace_memory_range(self.as_mut_ptr(), idx, len, mem.into_glib_ptr()) |
826 | } |
827 | } |
828 | |
829 | pub fn iter_memories(&self) -> Iter { |
830 | Iter::new(self) |
831 | } |
832 | |
833 | pub fn iter_memories_mut(&mut self) -> Result<IterMut, glib::BoolError> { |
834 | if !self.is_all_memory_writable() { |
835 | Err(glib::bool_error!("Not all memory are writable" )) |
836 | } else { |
837 | Ok(IterMut::new(self)) |
838 | } |
839 | } |
840 | |
841 | pub fn iter_memories_owned(&self) -> IterOwned { |
842 | IterOwned::new(self) |
843 | } |
844 | |
845 | pub fn as_cursor_readable(&self) -> BufferRefCursor<&BufferRef> { |
846 | BufferRefCursor::new_readable(self) |
847 | } |
848 | |
849 | pub fn as_cursor_writable( |
850 | &mut self, |
851 | ) -> Result<BufferRefCursor<&mut BufferRef>, glib::BoolError> { |
852 | BufferRefCursor::new_writable(self) |
853 | } |
854 | |
855 | #[doc (alias = "gst_util_dump_buffer" )] |
856 | pub fn dump(&self) -> Dump { |
857 | Dump { |
858 | buffer: self, |
859 | start: Bound::Unbounded, |
860 | end: Bound::Unbounded, |
861 | } |
862 | } |
863 | |
864 | #[doc (alias = "gst_util_dump_buffer" )] |
865 | pub fn dump_range(&self, range: impl RangeBounds<usize>) -> Dump { |
866 | Dump { |
867 | buffer: self, |
868 | start: range.start_bound().cloned(), |
869 | end: range.end_bound().cloned(), |
870 | } |
871 | } |
872 | } |
873 | |
874 | macro_rules! define_meta_iter( |
875 | ($name:ident, $typ:ty, $mtyp:ty, $prepare_buffer:expr, $from_ptr:expr) => { |
876 | pub struct $name<'a, T: MetaAPI + 'a> { |
877 | buffer: $typ, |
878 | state: glib::ffi::gpointer, |
879 | meta_api: glib::Type, |
880 | items: PhantomData<$mtyp>, |
881 | } |
882 | |
883 | unsafe impl<'a, T: MetaAPI> Send for $name<'a, T> { } |
884 | unsafe impl<'a, T: MetaAPI> Sync for $name<'a, T> { } |
885 | |
886 | impl<'a, T: MetaAPI> fmt::Debug for $name<'a, T> { |
887 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { |
888 | f.debug_struct(stringify!($name)) |
889 | .field("buffer" , &self.buffer) |
890 | .field("state" , &self.state) |
891 | .field("meta_api" , &self.meta_api) |
892 | .field("items" , &self.items) |
893 | .finish() |
894 | } |
895 | } |
896 | |
897 | impl<'a, T: MetaAPI> $name<'a, T> { |
898 | fn new(buffer: $typ) -> $name<'a, T> { |
899 | skip_assert_initialized!(); |
900 | |
901 | $name { |
902 | buffer, |
903 | state: ptr::null_mut(), |
904 | meta_api: T::meta_api(), |
905 | items: PhantomData, |
906 | } |
907 | } |
908 | } |
909 | |
910 | #[allow(clippy::redundant_closure_call)] |
911 | impl<'a, T: MetaAPI> Iterator for $name<'a, T> { |
912 | type Item = $mtyp; |
913 | |
914 | fn next(&mut self) -> Option<Self::Item> { |
915 | loop { |
916 | unsafe { |
917 | let meta = ffi::gst_buffer_iterate_meta(self.buffer.as_mut_ptr(), &mut self.state); |
918 | |
919 | if meta.is_null() { |
920 | return None; |
921 | } else if self.meta_api == glib::Type::INVALID || glib::Type::from_glib((*(*meta).info).api) == self.meta_api { |
922 | // FIXME: Workaround for a lifetime issue with the mutable iterator only |
923 | let buffer = $prepare_buffer(self.buffer.as_mut_ptr()); |
924 | let item = $from_ptr(buffer, meta); |
925 | return Some(item); |
926 | } |
927 | } |
928 | } |
929 | } |
930 | } |
931 | |
932 | impl<'a, T: MetaAPI> std::iter::FusedIterator for $name<'a, T> { } |
933 | } |
934 | ); |
935 | |
936 | define_meta_iter!( |
937 | MetaIter, |
938 | &'a BufferRef, |
939 | MetaRef<'a, T>, |
940 | |buffer: *const ffi::GstBuffer| BufferRef::from_ptr(buffer), |
941 | |buffer, meta| T::from_ptr(buffer, meta as *const <T as MetaAPI>::GstType) |
942 | ); |
943 | define_meta_iter!( |
944 | MetaIterMut, |
945 | &'a mut BufferRef, |
946 | MetaRefMut<'a, T, crate::meta::Iterated>, |
947 | |buffer: *mut ffi::GstBuffer| BufferRef::from_mut_ptr(buffer), |
948 | |buffer: &'a mut BufferRef, meta| T::from_mut_ptr(buffer, meta as *mut <T as MetaAPI>::GstType) |
949 | ); |
950 | |
951 | macro_rules! define_iter( |
952 | ($name:ident, $typ:ty, $mtyp:ty, $get_item:expr) => { |
953 | pub struct $name<'a> { |
954 | buffer: $typ, |
955 | idx: usize, |
956 | n_memory: usize, |
957 | } |
958 | |
959 | impl<'a> fmt::Debug for $name<'a> { |
960 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { |
961 | f.debug_struct(stringify!($name)) |
962 | .field("buffer" , &self.buffer) |
963 | .field("idx" , &self.idx) |
964 | .field("n_memory" , &self.n_memory) |
965 | .finish() |
966 | } |
967 | } |
968 | |
969 | impl<'a> $name<'a> { |
970 | fn new(buffer: $typ) -> $name<'a> { |
971 | skip_assert_initialized!(); |
972 | |
973 | let n_memory = buffer.n_memory(); |
974 | |
975 | $name { |
976 | buffer, |
977 | idx: 0, |
978 | n_memory, |
979 | } |
980 | } |
981 | } |
982 | |
983 | #[allow(clippy::redundant_closure_call)] |
984 | impl<'a> Iterator for $name<'a> { |
985 | type Item = $mtyp; |
986 | |
987 | fn next(&mut self) -> Option<Self::Item> { |
988 | if self.idx >= self.n_memory { |
989 | return None; |
990 | } |
991 | |
992 | #[allow(unused_unsafe)] |
993 | unsafe { |
994 | let item = $get_item(self.buffer, self.idx).unwrap(); |
995 | self.idx += 1; |
996 | Some(item) |
997 | } |
998 | } |
999 | |
1000 | fn size_hint(&self) -> (usize, Option<usize>) { |
1001 | let remaining = self.n_memory - self.idx; |
1002 | |
1003 | (remaining, Some(remaining)) |
1004 | } |
1005 | |
1006 | fn count(self) -> usize { |
1007 | self.n_memory - self.idx |
1008 | } |
1009 | |
1010 | fn nth(&mut self, n: usize) -> Option<Self::Item> { |
1011 | let (end, overflow) = self.idx.overflowing_add(n); |
1012 | if end >= self.n_memory || overflow { |
1013 | self.idx = self.n_memory; |
1014 | None |
1015 | } else { |
1016 | #[allow(unused_unsafe)] |
1017 | unsafe { |
1018 | self.idx = end + 1; |
1019 | Some($get_item(self.buffer, end).unwrap()) |
1020 | } |
1021 | } |
1022 | } |
1023 | |
1024 | fn last(self) -> Option<Self::Item> { |
1025 | if self.idx == self.n_memory { |
1026 | None |
1027 | } else { |
1028 | #[allow(unused_unsafe)] |
1029 | unsafe { |
1030 | Some($get_item(self.buffer, self.n_memory - 1).unwrap()) |
1031 | } |
1032 | } |
1033 | } |
1034 | } |
1035 | |
1036 | #[allow(clippy::redundant_closure_call)] |
1037 | impl<'a> DoubleEndedIterator for $name<'a> { |
1038 | fn next_back(&mut self) -> Option<Self::Item> { |
1039 | if self.idx == self.n_memory { |
1040 | return None; |
1041 | } |
1042 | |
1043 | #[allow(unused_unsafe)] |
1044 | unsafe { |
1045 | self.n_memory -= 1; |
1046 | Some($get_item(self.buffer, self.n_memory).unwrap()) |
1047 | } |
1048 | } |
1049 | |
1050 | fn nth_back(&mut self, n: usize) -> Option<Self::Item> { |
1051 | let (end, overflow) = self.n_memory.overflowing_sub(n); |
1052 | if end <= self.idx || overflow { |
1053 | self.idx = self.n_memory; |
1054 | None |
1055 | } else { |
1056 | #[allow(unused_unsafe)] |
1057 | unsafe { |
1058 | self.n_memory = end - 1; |
1059 | Some($get_item(self.buffer, self.n_memory).unwrap()) |
1060 | } |
1061 | } |
1062 | } |
1063 | } |
1064 | |
1065 | impl<'a> ExactSizeIterator for $name<'a> {} |
1066 | |
1067 | impl<'a> std::iter::FusedIterator for $name<'a> {} |
1068 | } |
1069 | ); |
1070 | |
1071 | define_iter!( |
1072 | Iter, |
1073 | &'a BufferRef, |
1074 | &'a MemoryRef, |
1075 | |buffer: &BufferRef, idx| { |
1076 | let ptr = ffi::gst_buffer_peek_memory(buffer.as_mut_ptr(), idx as u32); |
1077 | if ptr.is_null() { |
1078 | None |
1079 | } else { |
1080 | Some(MemoryRef::from_ptr(ptr as *const ffi::GstMemory)) |
1081 | } |
1082 | } |
1083 | ); |
1084 | |
1085 | define_iter!( |
1086 | IterMut, |
1087 | &'a mut BufferRef, |
1088 | &'a mut MemoryRef, |
1089 | |buffer: &mut BufferRef, idx| { |
1090 | let ptr = ffi::gst_buffer_peek_memory(buffer.as_mut_ptr(), idx as u32); |
1091 | if ptr.is_null() { |
1092 | None |
1093 | } else { |
1094 | Some(MemoryRef::from_mut_ptr(ptr)) |
1095 | } |
1096 | } |
1097 | ); |
1098 | |
1099 | impl<'a> IntoIterator for &'a BufferRef { |
1100 | type IntoIter = Iter<'a>; |
1101 | type Item = &'a MemoryRef; |
1102 | |
1103 | fn into_iter(self) -> Self::IntoIter { |
1104 | self.iter_memories() |
1105 | } |
1106 | } |
1107 | |
1108 | impl From<Memory> for Buffer { |
1109 | fn from(value: Memory) -> Self { |
1110 | skip_assert_initialized!(); |
1111 | |
1112 | let mut buffer: Buffer = Buffer::new(); |
1113 | { |
1114 | let buffer: &mut BufferRef = buffer.get_mut().unwrap(); |
1115 | buffer.append_memory(mem:value); |
1116 | } |
1117 | buffer |
1118 | } |
1119 | } |
1120 | |
1121 | impl<const N: usize> From<[Memory; N]> for Buffer { |
1122 | fn from(value: [Memory; N]) -> Self { |
1123 | skip_assert_initialized!(); |
1124 | |
1125 | let mut buffer: Buffer = Buffer::new(); |
1126 | { |
1127 | let buffer: &mut BufferRef = buffer.get_mut().unwrap(); |
1128 | value.into_iter().for_each(|b: Memory| buffer.append_memory(mem:b)); |
1129 | } |
1130 | buffer |
1131 | } |
1132 | } |
1133 | |
1134 | impl std::iter::FromIterator<Memory> for Buffer { |
1135 | fn from_iter<T: IntoIterator<Item = Memory>>(iter: T) -> Self { |
1136 | skip_assert_initialized!(); |
1137 | let iter: ::IntoIter = iter.into_iter(); |
1138 | |
1139 | let mut buffer: Buffer = Buffer::new(); |
1140 | |
1141 | { |
1142 | let buffer: &mut BufferRef = buffer.get_mut().unwrap(); |
1143 | iter.for_each(|m: Memory| buffer.append_memory(mem:m)); |
1144 | } |
1145 | |
1146 | buffer |
1147 | } |
1148 | } |
1149 | |
1150 | impl std::iter::Extend<Memory> for BufferRef { |
1151 | fn extend<T: IntoIterator<Item = Memory>>(&mut self, iter: T) { |
1152 | iter.into_iter().for_each(|m: Memory| self.append_memory(mem:m)); |
1153 | } |
1154 | } |
1155 | |
1156 | define_iter!( |
1157 | IterOwned, |
1158 | &'a BufferRef, |
1159 | Memory, |
1160 | |buffer: &BufferRef, idx| { buffer.memory(idx) } |
1161 | ); |
1162 | |
1163 | impl fmt::Debug for Buffer { |
1164 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { |
1165 | BufferRef::fmt(self, f) |
1166 | } |
1167 | } |
1168 | |
1169 | impl PartialEq for Buffer { |
1170 | fn eq(&self, other: &Buffer) -> bool { |
1171 | BufferRef::eq(self, other) |
1172 | } |
1173 | } |
1174 | |
1175 | impl Eq for Buffer {} |
1176 | |
1177 | impl PartialEq<BufferRef> for Buffer { |
1178 | fn eq(&self, other: &BufferRef) -> bool { |
1179 | BufferRef::eq(self, other) |
1180 | } |
1181 | } |
1182 | impl PartialEq<Buffer> for BufferRef { |
1183 | fn eq(&self, other: &Buffer) -> bool { |
1184 | BufferRef::eq(self:other, self) |
1185 | } |
1186 | } |
1187 | |
1188 | impl fmt::Debug for BufferRef { |
1189 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { |
1190 | use std::cell::RefCell; |
1191 | |
1192 | use crate::utils::Displayable; |
1193 | |
1194 | struct DebugIter<I>(RefCell<I>); |
1195 | impl<I: Iterator> fmt::Debug for DebugIter<I> |
1196 | where |
1197 | I::Item: fmt::Debug, |
1198 | { |
1199 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { |
1200 | f.debug_list().entries(&mut *self.0.borrow_mut()).finish() |
1201 | } |
1202 | } |
1203 | |
1204 | f.debug_struct("Buffer" ) |
1205 | .field("ptr" , &self.as_ptr()) |
1206 | .field("pts" , &self.pts().display()) |
1207 | .field("dts" , &self.dts().display()) |
1208 | .field("duration" , &self.duration().display()) |
1209 | .field("size" , &self.size()) |
1210 | .field("offset" , &self.offset()) |
1211 | .field("offset_end" , &self.offset_end()) |
1212 | .field("flags" , &self.flags()) |
1213 | .field( |
1214 | "metas" , |
1215 | &DebugIter(RefCell::new( |
1216 | self.iter_meta::<crate::Meta>().map(|m| m.api()), |
1217 | )), |
1218 | ) |
1219 | .finish() |
1220 | } |
1221 | } |
1222 | |
1223 | impl PartialEq for BufferRef { |
1224 | fn eq(&self, other: &BufferRef) -> bool { |
1225 | if self.size() != other.size() { |
1226 | return false; |
1227 | } |
1228 | |
1229 | let self_map: Result, …> = self.map_readable(); |
1230 | let other_map: Result, …> = other.map_readable(); |
1231 | |
1232 | match (self_map, other_map) { |
1233 | (Ok(self_map: BufferMap<'_, Readable>), Ok(other_map: BufferMap<'_, Readable>)) => self_map.as_slice().eq(other_map.as_slice()), |
1234 | _ => false, |
1235 | } |
1236 | } |
1237 | } |
1238 | |
1239 | impl Eq for BufferRef {} |
1240 | |
1241 | impl<T> BufferMap<'_, T> { |
1242 | #[doc (alias = "get_size" )] |
1243 | #[inline ] |
1244 | pub fn size(&self) -> usize { |
1245 | self.map_info.size |
1246 | } |
1247 | |
1248 | #[doc (alias = "get_buffer" )] |
1249 | #[inline ] |
1250 | pub fn buffer(&self) -> &BufferRef { |
1251 | self.buffer |
1252 | } |
1253 | |
1254 | #[inline ] |
1255 | pub fn as_slice(&self) -> &[u8] { |
1256 | if self.map_info.size == 0 { |
1257 | return &[]; |
1258 | } |
1259 | unsafe { slice::from_raw_parts(self.map_info.data, self.map_info.size) } |
1260 | } |
1261 | } |
1262 | |
1263 | impl BufferMap<'_, Writable> { |
1264 | #[inline ] |
1265 | pub fn as_mut_slice(&mut self) -> &mut [u8] { |
1266 | if self.map_info.size == 0 { |
1267 | return &mut []; |
1268 | } |
1269 | unsafe { slice::from_raw_parts_mut(self.map_info.data, self.map_info.size) } |
1270 | } |
1271 | } |
1272 | |
1273 | impl<T> AsRef<[u8]> for BufferMap<'_, T> { |
1274 | #[inline ] |
1275 | fn as_ref(&self) -> &[u8] { |
1276 | self.as_slice() |
1277 | } |
1278 | } |
1279 | |
1280 | impl AsMut<[u8]> for BufferMap<'_, Writable> { |
1281 | #[inline ] |
1282 | fn as_mut(&mut self) -> &mut [u8] { |
1283 | self.as_mut_slice() |
1284 | } |
1285 | } |
1286 | |
1287 | impl<T> ops::Deref for BufferMap<'_, T> { |
1288 | type Target = [u8]; |
1289 | |
1290 | #[inline ] |
1291 | fn deref(&self) -> &[u8] { |
1292 | self.as_slice() |
1293 | } |
1294 | } |
1295 | |
1296 | impl ops::DerefMut for BufferMap<'_, Writable> { |
1297 | #[inline ] |
1298 | fn deref_mut(&mut self) -> &mut [u8] { |
1299 | self.as_mut_slice() |
1300 | } |
1301 | } |
1302 | |
1303 | impl<T> fmt::Debug for BufferMap<'_, T> { |
1304 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { |
1305 | f.debug_tuple(name:"BufferMap" ).field(&self.buffer()).finish() |
1306 | } |
1307 | } |
1308 | |
1309 | impl<'a, T> PartialEq for BufferMap<'a, T> { |
1310 | fn eq(&self, other: &BufferMap<'a, T>) -> bool { |
1311 | self.as_slice().eq(other.as_slice()) |
1312 | } |
1313 | } |
1314 | |
1315 | impl<T> Eq for BufferMap<'_, T> {} |
1316 | |
1317 | impl<T> Drop for BufferMap<'_, T> { |
1318 | #[inline ] |
1319 | fn drop(&mut self) { |
1320 | unsafe { |
1321 | ffi::gst_buffer_unmap(self.buffer.as_mut_ptr(), &mut self.map_info); |
1322 | } |
1323 | } |
1324 | } |
1325 | |
1326 | unsafe impl<T> Send for BufferMap<'_, T> {} |
1327 | unsafe impl<T> Sync for BufferMap<'_, T> {} |
1328 | |
1329 | impl<T> MappedBuffer<T> { |
1330 | #[inline ] |
1331 | pub fn as_slice(&self) -> &[u8] { |
1332 | if self.map_info.size == 0 { |
1333 | return &[]; |
1334 | } |
1335 | unsafe { slice::from_raw_parts(self.map_info.data, self.map_info.size) } |
1336 | } |
1337 | |
1338 | #[doc (alias = "get_size" )] |
1339 | #[inline ] |
1340 | pub fn size(&self) -> usize { |
1341 | self.map_info.size |
1342 | } |
1343 | |
1344 | #[doc (alias = "get_buffer" )] |
1345 | #[inline ] |
1346 | pub fn buffer(&self) -> &BufferRef { |
1347 | self.buffer.as_ref() |
1348 | } |
1349 | |
1350 | #[inline ] |
1351 | pub fn into_buffer(self) -> Buffer { |
1352 | let mut s = mem::ManuallyDrop::new(self); |
1353 | let buffer = unsafe { ptr::read(&s.buffer) }; |
1354 | unsafe { |
1355 | ffi::gst_buffer_unmap(buffer.as_mut_ptr(), &mut s.map_info); |
1356 | } |
1357 | |
1358 | buffer |
1359 | } |
1360 | } |
1361 | |
1362 | impl MappedBuffer<Readable> { |
1363 | #[doc (alias = "get_buffer" )] |
1364 | #[inline ] |
1365 | pub fn buffer_owned(&self) -> Buffer { |
1366 | self.buffer.clone() |
1367 | } |
1368 | } |
1369 | |
1370 | impl MappedBuffer<Writable> { |
1371 | #[inline ] |
1372 | pub fn as_mut_slice(&mut self) -> &mut [u8] { |
1373 | if self.map_info.size == 0 { |
1374 | return &mut []; |
1375 | } |
1376 | unsafe { slice::from_raw_parts_mut(self.map_info.data, self.map_info.size) } |
1377 | } |
1378 | } |
1379 | |
1380 | impl<T> AsRef<[u8]> for MappedBuffer<T> { |
1381 | #[inline ] |
1382 | fn as_ref(&self) -> &[u8] { |
1383 | self.as_slice() |
1384 | } |
1385 | } |
1386 | |
1387 | impl AsMut<[u8]> for MappedBuffer<Writable> { |
1388 | #[inline ] |
1389 | fn as_mut(&mut self) -> &mut [u8] { |
1390 | self.as_mut_slice() |
1391 | } |
1392 | } |
1393 | |
1394 | impl<T> ops::Deref for MappedBuffer<T> { |
1395 | type Target = [u8]; |
1396 | |
1397 | #[inline ] |
1398 | fn deref(&self) -> &[u8] { |
1399 | self.as_slice() |
1400 | } |
1401 | } |
1402 | |
1403 | impl ops::DerefMut for MappedBuffer<Writable> { |
1404 | #[inline ] |
1405 | fn deref_mut(&mut self) -> &mut [u8] { |
1406 | self.as_mut_slice() |
1407 | } |
1408 | } |
1409 | |
1410 | impl<T> Drop for MappedBuffer<T> { |
1411 | #[inline ] |
1412 | fn drop(&mut self) { |
1413 | unsafe { |
1414 | ffi::gst_buffer_unmap(self.buffer.as_mut_ptr(), &mut self.map_info); |
1415 | } |
1416 | } |
1417 | } |
1418 | |
1419 | impl<T> fmt::Debug for MappedBuffer<T> { |
1420 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { |
1421 | f.debug_tuple(name:"MappedBuffer" ).field(&self.buffer()).finish() |
1422 | } |
1423 | } |
1424 | |
1425 | impl<T> PartialEq for MappedBuffer<T> { |
1426 | fn eq(&self, other: &MappedBuffer<T>) -> bool { |
1427 | self.as_slice().eq(other.as_slice()) |
1428 | } |
1429 | } |
1430 | |
1431 | impl<T> Eq for MappedBuffer<T> {} |
1432 | |
1433 | unsafe impl<T> Send for MappedBuffer<T> {} |
1434 | unsafe impl<T> Sync for MappedBuffer<T> {} |
1435 | |
1436 | #[doc (alias = "GST_BUFFER_COPY_METADATA" )] |
1437 | pub const BUFFER_COPY_METADATA: crate::BufferCopyFlags = |
1438 | crate::BufferCopyFlags::from_bits_truncate(bits:ffi::GST_BUFFER_COPY_METADATA); |
1439 | #[doc (alias = "GST_BUFFER_COPY_ALL" )] |
1440 | pub const BUFFER_COPY_ALL: crate::BufferCopyFlags = |
1441 | crate::BufferCopyFlags::from_bits_truncate(bits:ffi::GST_BUFFER_COPY_ALL); |
1442 | |
1443 | pub struct Dump<'a> { |
1444 | buffer: &'a BufferRef, |
1445 | start: Bound<usize>, |
1446 | end: Bound<usize>, |
1447 | } |
1448 | |
1449 | struct BufferChunked16Iter<'a> { |
1450 | buffer: &'a BufferRef, |
1451 | mem_idx: usize, |
1452 | mem_len: usize, |
1453 | map: Option<crate::memory::MemoryMap<'a, crate::memory::Readable>>, |
1454 | map_offset: usize, |
1455 | len: usize, |
1456 | } |
1457 | |
1458 | impl Iterator for BufferChunked16Iter<'_> { |
1459 | // FIXME: Return a `&'self [u8]` once there's some GAT iterator trait |
1460 | type Item = ([u8; 16], usize); |
1461 | |
1462 | fn next(&mut self) -> Option<Self::Item> { |
1463 | if self.mem_idx == self.mem_len || self.len == 0 { |
1464 | return None; |
1465 | } |
1466 | |
1467 | let mut item = [0u8; 16]; |
1468 | let mut data = item.as_mut_slice(); |
1469 | |
1470 | while !data.is_empty() && self.mem_idx < self.mem_len && self.len > 0 { |
1471 | if self.map.is_none() { |
1472 | let mem = self.buffer.peek_memory(self.mem_idx); |
1473 | self.map = Some(mem.map_readable().expect("failed to map memory" )); |
1474 | } |
1475 | |
1476 | let map = self.map.as_ref().unwrap(); |
1477 | debug_assert!(self.map_offset < map.len()); |
1478 | let copy = cmp::min(cmp::min(map.len() - self.map_offset, data.len()), self.len); |
1479 | data[..copy].copy_from_slice(&map[self.map_offset..][..copy]); |
1480 | self.map_offset += copy; |
1481 | self.len -= copy; |
1482 | data = &mut data[copy..]; |
1483 | |
1484 | if self.map_offset == map.len() { |
1485 | self.map = None; |
1486 | self.map_offset = 0; |
1487 | self.mem_idx += 1; |
1488 | } |
1489 | } |
1490 | |
1491 | let copied = 16 - data.len(); |
1492 | Some((item, copied)) |
1493 | } |
1494 | } |
1495 | |
1496 | impl Dump<'_> { |
1497 | fn fmt(&self, f: &mut fmt::Formatter, debug: bool) -> fmt::Result { |
1498 | let n_memory = self.buffer.n_memory(); |
1499 | if n_memory == 0 { |
1500 | write!(f, "<empty>" )?; |
1501 | return Ok(()); |
1502 | } |
1503 | |
1504 | use std::fmt::Write; |
1505 | |
1506 | let len = self.buffer.size(); |
1507 | |
1508 | // Kind of re-implementation of slice indexing to allow handling out of range values better |
1509 | // with specific output strings |
1510 | let mut start_idx = match self.start { |
1511 | Bound::Included(idx) if idx >= len => { |
1512 | write!(f, "<start out of range>" )?; |
1513 | return Ok(()); |
1514 | } |
1515 | Bound::Excluded(idx) if idx.checked_add(1).map_or(true, |idx| idx >= len) => { |
1516 | write!(f, "<start out of range>" )?; |
1517 | return Ok(()); |
1518 | } |
1519 | Bound::Included(idx) => idx, |
1520 | Bound::Excluded(idx) => idx + 1, |
1521 | Bound::Unbounded => 0, |
1522 | }; |
1523 | |
1524 | let end_idx = match self.end { |
1525 | Bound::Included(idx) if idx.checked_add(1).map_or(true, |idx| idx > len) => { |
1526 | write!(f, "<end out of range>" )?; |
1527 | return Ok(()); |
1528 | } |
1529 | Bound::Excluded(idx) if idx > len => { |
1530 | write!(f, "<end out of range>" )?; |
1531 | return Ok(()); |
1532 | } |
1533 | Bound::Included(idx) => idx + 1, |
1534 | Bound::Excluded(idx) => idx, |
1535 | Bound::Unbounded => len, |
1536 | }; |
1537 | |
1538 | if start_idx >= end_idx { |
1539 | write!(f, "<empty range>" )?; |
1540 | return Ok(()); |
1541 | } |
1542 | |
1543 | // This can't really fail because of the above |
1544 | let (memory_range, skip) = self |
1545 | .buffer |
1546 | .find_memory(start_idx..) |
1547 | .expect("can't find memory" ); |
1548 | |
1549 | let chunks = BufferChunked16Iter { |
1550 | buffer: self.buffer, |
1551 | mem_idx: memory_range.start, |
1552 | mem_len: n_memory, |
1553 | map: None, |
1554 | map_offset: skip, |
1555 | len: end_idx - start_idx, |
1556 | }; |
1557 | |
1558 | if debug { |
1559 | for (line, line_len) in chunks { |
1560 | let line = &line[..line_len]; |
1561 | |
1562 | match end_idx { |
1563 | 0x00_00..=0xff_ff => write!(f, " {:04x}: " , start_idx)?, |
1564 | 0x01_00_00..=0xff_ff_ff => write!(f, " {:06x}: " , start_idx)?, |
1565 | 0x01_00_00_00..=0xff_ff_ff_ff => write!(f, " {:08x}: " , start_idx)?, |
1566 | _ => write!(f, " {:016x}: " , start_idx)?, |
1567 | } |
1568 | |
1569 | for (i, v) in line.iter().enumerate() { |
1570 | if i > 0 { |
1571 | write!(f, " {:02x}" , v)?; |
1572 | } else { |
1573 | write!(f, " {:02x}" , v)?; |
1574 | } |
1575 | } |
1576 | |
1577 | for _ in line.len()..16 { |
1578 | write!(f, " " )?; |
1579 | } |
1580 | write!(f, " " )?; |
1581 | |
1582 | for v in line { |
1583 | if v.is_ascii() && !v.is_ascii_control() { |
1584 | f.write_char((*v).into())?; |
1585 | } else { |
1586 | f.write_char('.' )?; |
1587 | } |
1588 | } |
1589 | |
1590 | start_idx = start_idx.saturating_add(16); |
1591 | if start_idx < end_idx { |
1592 | writeln!(f)?; |
1593 | } |
1594 | } |
1595 | |
1596 | Ok(()) |
1597 | } else { |
1598 | for (line, line_len) in chunks { |
1599 | let line = &line[..line_len]; |
1600 | |
1601 | for (i, v) in line.iter().enumerate() { |
1602 | if i > 0 { |
1603 | write!(f, " {:02x}" , v)?; |
1604 | } else { |
1605 | write!(f, " {:02x}" , v)?; |
1606 | } |
1607 | } |
1608 | |
1609 | start_idx = start_idx.saturating_add(16); |
1610 | if start_idx < end_idx { |
1611 | writeln!(f)?; |
1612 | } |
1613 | } |
1614 | |
1615 | Ok(()) |
1616 | } |
1617 | } |
1618 | } |
1619 | |
1620 | impl fmt::Display for Dump<'_> { |
1621 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { |
1622 | self.fmt(f, debug:false) |
1623 | } |
1624 | } |
1625 | |
1626 | impl fmt::Debug for Dump<'_> { |
1627 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { |
1628 | self.fmt(f, debug:true) |
1629 | } |
1630 | } |
1631 | |
1632 | #[cfg (test)] |
1633 | mod tests { |
1634 | use super::*; |
1635 | |
1636 | #[test ] |
1637 | fn test_fields() { |
1638 | crate::init().unwrap(); |
1639 | |
1640 | let mut buffer = Buffer::new(); |
1641 | |
1642 | { |
1643 | let buffer = buffer.get_mut().unwrap(); |
1644 | buffer.set_pts(ClockTime::NSECOND); |
1645 | buffer.set_dts(2 * ClockTime::NSECOND); |
1646 | buffer.set_offset(3); |
1647 | buffer.set_offset_end(4); |
1648 | buffer.set_duration(Some(5 * ClockTime::NSECOND)); |
1649 | } |
1650 | assert_eq!(buffer.pts(), Some(ClockTime::NSECOND)); |
1651 | assert_eq!(buffer.dts(), Some(2 * ClockTime::NSECOND)); |
1652 | assert_eq!(buffer.offset(), 3); |
1653 | assert_eq!(buffer.offset_end(), 4); |
1654 | assert_eq!(buffer.duration(), Some(5 * ClockTime::NSECOND)); |
1655 | } |
1656 | |
1657 | #[test ] |
1658 | fn test_writability() { |
1659 | crate::init().unwrap(); |
1660 | |
1661 | let mut buffer = Buffer::from_slice(vec![1, 2, 3, 4]); |
1662 | { |
1663 | let data = buffer.map_readable().unwrap(); |
1664 | assert_eq!(data.as_slice(), vec![1, 2, 3, 4].as_slice()); |
1665 | } |
1666 | assert_ne!(buffer.get_mut(), None); |
1667 | { |
1668 | let buffer = buffer.get_mut().unwrap(); |
1669 | buffer.set_pts(Some(ClockTime::NSECOND)); |
1670 | } |
1671 | |
1672 | let mut buffer2 = buffer.clone(); |
1673 | assert_eq!(buffer.get_mut(), None); |
1674 | |
1675 | assert_eq!(buffer2.as_ptr(), buffer.as_ptr()); |
1676 | |
1677 | { |
1678 | let buffer2 = buffer2.make_mut(); |
1679 | assert_ne!(buffer2.as_ptr(), buffer.as_ptr()); |
1680 | |
1681 | buffer2.set_pts(Some(2 * ClockTime::NSECOND)); |
1682 | |
1683 | let mut data = buffer2.map_writable().unwrap(); |
1684 | assert_eq!(data.as_slice(), vec![1, 2, 3, 4].as_slice()); |
1685 | data.as_mut_slice()[0] = 0; |
1686 | } |
1687 | |
1688 | assert_eq!(buffer.pts(), Some(ClockTime::NSECOND)); |
1689 | assert_eq!(buffer2.pts(), Some(2 * ClockTime::NSECOND)); |
1690 | |
1691 | { |
1692 | let data = buffer.map_readable().unwrap(); |
1693 | assert_eq!(data.as_slice(), vec![1, 2, 3, 4].as_slice()); |
1694 | |
1695 | let data = buffer2.map_readable().unwrap(); |
1696 | assert_eq!(data.as_slice(), vec![0, 2, 3, 4].as_slice()); |
1697 | } |
1698 | } |
1699 | |
1700 | #[test ] |
1701 | #[allow (clippy::cognitive_complexity)] |
1702 | fn test_memories() { |
1703 | crate::init().unwrap(); |
1704 | |
1705 | let mut buffer = Buffer::new(); |
1706 | { |
1707 | let buffer = buffer.get_mut().unwrap(); |
1708 | buffer.append_memory(crate::Memory::from_mut_slice(vec![0; 5])); |
1709 | buffer.append_memory(crate::Memory::from_mut_slice(vec![0; 5])); |
1710 | buffer.append_memory(crate::Memory::from_mut_slice(vec![0; 5])); |
1711 | buffer.append_memory(crate::Memory::from_mut_slice(vec![0; 5])); |
1712 | buffer.append_memory(crate::Memory::from_mut_slice(vec![0; 10])); |
1713 | } |
1714 | |
1715 | assert!(buffer.is_all_memory_writable()); |
1716 | assert_eq!(buffer.n_memory(), 5); |
1717 | assert_eq!(buffer.size(), 30); |
1718 | |
1719 | for i in 0..5 { |
1720 | { |
1721 | let mem = buffer.memory(i).unwrap(); |
1722 | assert_eq!(mem.size(), if i < 4 { 5 } else { 10 }); |
1723 | let map = mem.map_readable().unwrap(); |
1724 | assert_eq!(map.size(), if i < 4 { 5 } else { 10 }); |
1725 | } |
1726 | |
1727 | { |
1728 | let mem = buffer.peek_memory(i); |
1729 | assert_eq!(mem.size(), if i < 4 { 5 } else { 10 }); |
1730 | let map = mem.map_readable().unwrap(); |
1731 | assert_eq!(map.size(), if i < 4 { 5 } else { 10 }); |
1732 | } |
1733 | |
1734 | { |
1735 | let buffer = buffer.get_mut().unwrap(); |
1736 | let mem = buffer.peek_memory_mut(i).unwrap(); |
1737 | assert_eq!(mem.size(), if i < 4 { 5 } else { 10 }); |
1738 | let map = mem.map_writable().unwrap(); |
1739 | assert_eq!(map.size(), if i < 4 { 5 } else { 10 }); |
1740 | } |
1741 | } |
1742 | |
1743 | { |
1744 | let buffer = buffer.get_mut().unwrap(); |
1745 | let mut last = 0; |
1746 | for (i, mem) in buffer.iter_memories_mut().unwrap().enumerate() { |
1747 | { |
1748 | assert_eq!(mem.size(), if i < 4 { 5 } else { 10 }); |
1749 | let map = mem.map_readable().unwrap(); |
1750 | assert_eq!(map.size(), if i < 4 { 5 } else { 10 }); |
1751 | } |
1752 | |
1753 | { |
1754 | assert_eq!(mem.size(), if i < 4 { 5 } else { 10 }); |
1755 | let map = mem.map_readable().unwrap(); |
1756 | assert_eq!(map.size(), if i < 4 { 5 } else { 10 }); |
1757 | } |
1758 | |
1759 | { |
1760 | assert_eq!(mem.size(), if i < 4 { 5 } else { 10 }); |
1761 | let map = mem.map_writable().unwrap(); |
1762 | assert_eq!(map.size(), if i < 4 { 5 } else { 10 }); |
1763 | } |
1764 | |
1765 | last = i; |
1766 | } |
1767 | |
1768 | assert_eq!(last, 4); |
1769 | } |
1770 | |
1771 | let mut last = 0; |
1772 | for (i, mem) in buffer.iter_memories().enumerate() { |
1773 | { |
1774 | assert_eq!(mem.size(), if i < 4 { 5 } else { 10 }); |
1775 | let map = mem.map_readable().unwrap(); |
1776 | assert_eq!(map.size(), if i < 4 { 5 } else { 10 }); |
1777 | } |
1778 | |
1779 | { |
1780 | assert_eq!(mem.size(), if i < 4 { 5 } else { 10 }); |
1781 | let map = mem.map_readable().unwrap(); |
1782 | assert_eq!(map.size(), if i < 4 { 5 } else { 10 }); |
1783 | } |
1784 | |
1785 | last = i; |
1786 | } |
1787 | |
1788 | assert_eq!(last, 4); |
1789 | |
1790 | let mut last = 0; |
1791 | for (i, mem) in buffer.iter_memories_owned().enumerate() { |
1792 | { |
1793 | assert_eq!(mem.size(), if i < 4 { 5 } else { 10 }); |
1794 | let map = mem.map_readable().unwrap(); |
1795 | assert_eq!(map.size(), if i < 4 { 5 } else { 10 }); |
1796 | } |
1797 | |
1798 | { |
1799 | assert_eq!(mem.size(), if i < 4 { 5 } else { 10 }); |
1800 | let map = mem.map_readable().unwrap(); |
1801 | assert_eq!(map.size(), if i < 4 { 5 } else { 10 }); |
1802 | } |
1803 | |
1804 | last = i; |
1805 | } |
1806 | |
1807 | assert_eq!(last, 4); |
1808 | } |
1809 | |
1810 | #[test ] |
1811 | fn test_meta_foreach() { |
1812 | crate::init().unwrap(); |
1813 | |
1814 | let mut buffer = Buffer::new(); |
1815 | { |
1816 | let buffer = buffer.get_mut().unwrap(); |
1817 | crate::ReferenceTimestampMeta::add( |
1818 | buffer, |
1819 | &crate::Caps::builder("foo/bar" ).build(), |
1820 | ClockTime::ZERO, |
1821 | ClockTime::NONE, |
1822 | ); |
1823 | crate::ReferenceTimestampMeta::add( |
1824 | buffer, |
1825 | &crate::Caps::builder("foo/bar" ).build(), |
1826 | ClockTime::SECOND, |
1827 | ClockTime::NONE, |
1828 | ); |
1829 | } |
1830 | |
1831 | let mut res = vec![]; |
1832 | buffer.foreach_meta(|meta| { |
1833 | let meta = meta |
1834 | .downcast_ref::<crate::ReferenceTimestampMeta>() |
1835 | .unwrap(); |
1836 | res.push(meta.timestamp()); |
1837 | ControlFlow::Continue(()) |
1838 | }); |
1839 | |
1840 | assert_eq!(&[ClockTime::ZERO, ClockTime::SECOND][..], &res[..]); |
1841 | } |
1842 | |
1843 | #[test ] |
1844 | fn test_meta_foreach_mut() { |
1845 | crate::init().unwrap(); |
1846 | |
1847 | let mut buffer = Buffer::new(); |
1848 | { |
1849 | let buffer = buffer.get_mut().unwrap(); |
1850 | crate::ReferenceTimestampMeta::add( |
1851 | buffer, |
1852 | &crate::Caps::builder("foo/bar" ).build(), |
1853 | ClockTime::ZERO, |
1854 | ClockTime::NONE, |
1855 | ); |
1856 | crate::ReferenceTimestampMeta::add( |
1857 | buffer, |
1858 | &crate::Caps::builder("foo/bar" ).build(), |
1859 | ClockTime::SECOND, |
1860 | ClockTime::NONE, |
1861 | ); |
1862 | } |
1863 | |
1864 | let mut res = vec![]; |
1865 | buffer.get_mut().unwrap().foreach_meta_mut(|mut meta| { |
1866 | let meta = meta |
1867 | .downcast_ref::<crate::ReferenceTimestampMeta>() |
1868 | .unwrap(); |
1869 | res.push(meta.timestamp()); |
1870 | if meta.timestamp() == ClockTime::SECOND { |
1871 | ControlFlow::Continue(BufferMetaForeachAction::Remove) |
1872 | } else { |
1873 | ControlFlow::Continue(BufferMetaForeachAction::Keep) |
1874 | } |
1875 | }); |
1876 | |
1877 | assert_eq!(&[ClockTime::ZERO, ClockTime::SECOND][..], &res[..]); |
1878 | |
1879 | let mut res = vec![]; |
1880 | buffer.foreach_meta(|meta| { |
1881 | let meta = meta |
1882 | .downcast_ref::<crate::ReferenceTimestampMeta>() |
1883 | .unwrap(); |
1884 | res.push(meta.timestamp()); |
1885 | ControlFlow::Continue(()) |
1886 | }); |
1887 | |
1888 | assert_eq!(&[ClockTime::ZERO][..], &res[..]); |
1889 | } |
1890 | |
1891 | #[test ] |
1892 | fn test_ptr_eq() { |
1893 | crate::init().unwrap(); |
1894 | |
1895 | let buffer1 = Buffer::new(); |
1896 | assert!(BufferRef::ptr_eq(&buffer1, &buffer1)); |
1897 | let buffer2 = Buffer::new(); |
1898 | assert!(!BufferRef::ptr_eq(&buffer1, &buffer2)); |
1899 | } |
1900 | |
1901 | #[test ] |
1902 | fn test_copy_region() { |
1903 | crate::init().unwrap(); |
1904 | |
1905 | let buffer1 = Buffer::from_mut_slice(vec![0, 1, 2, 3, 4, 5, 6, 7]); |
1906 | let buffer2 = buffer1.copy_region(BUFFER_COPY_ALL, ..).unwrap(); |
1907 | assert_eq!( |
1908 | buffer2.map_readable().unwrap().as_slice(), |
1909 | &[0, 1, 2, 3, 4, 5, 6, 7] |
1910 | ); |
1911 | let buffer2 = buffer1.copy_region(BUFFER_COPY_ALL, 0..8).unwrap(); |
1912 | assert_eq!( |
1913 | buffer2.map_readable().unwrap().as_slice(), |
1914 | &[0, 1, 2, 3, 4, 5, 6, 7] |
1915 | ); |
1916 | let buffer2 = buffer1.copy_region(BUFFER_COPY_ALL, 0..=7).unwrap(); |
1917 | assert_eq!( |
1918 | buffer2.map_readable().unwrap().as_slice(), |
1919 | &[0, 1, 2, 3, 4, 5, 6, 7] |
1920 | ); |
1921 | let buffer2 = buffer1.copy_region(BUFFER_COPY_ALL, ..=7).unwrap(); |
1922 | assert_eq!( |
1923 | buffer2.map_readable().unwrap().as_slice(), |
1924 | &[0, 1, 2, 3, 4, 5, 6, 7] |
1925 | ); |
1926 | let buffer2 = buffer1.copy_region(BUFFER_COPY_ALL, ..8).unwrap(); |
1927 | assert_eq!( |
1928 | buffer2.map_readable().unwrap().as_slice(), |
1929 | &[0, 1, 2, 3, 4, 5, 6, 7] |
1930 | ); |
1931 | let buffer2 = buffer1.copy_region(BUFFER_COPY_ALL, 0..).unwrap(); |
1932 | assert_eq!( |
1933 | buffer2.map_readable().unwrap().as_slice(), |
1934 | &[0, 1, 2, 3, 4, 5, 6, 7] |
1935 | ); |
1936 | |
1937 | assert!(buffer1.copy_region(BUFFER_COPY_ALL, 0..=8).is_err()); |
1938 | assert!(buffer1.copy_region(BUFFER_COPY_ALL, 0..=10).is_err()); |
1939 | assert!(buffer1.copy_region(BUFFER_COPY_ALL, 8..=10).is_err()); |
1940 | assert!(buffer1.copy_region(BUFFER_COPY_ALL, 8..=8).is_err()); |
1941 | assert!(buffer1.copy_region(BUFFER_COPY_ALL, 10..).is_err()); |
1942 | assert!(buffer1.copy_region(BUFFER_COPY_ALL, 10..100).is_err()); |
1943 | |
1944 | let buffer2 = buffer1.copy_region(BUFFER_COPY_ALL, 2..4).unwrap(); |
1945 | assert_eq!(buffer2.map_readable().unwrap().as_slice(), &[2, 3]); |
1946 | |
1947 | let buffer2 = buffer1.copy_region(BUFFER_COPY_ALL, 2..=4).unwrap(); |
1948 | assert_eq!(buffer2.map_readable().unwrap().as_slice(), &[2, 3, 4]); |
1949 | |
1950 | let buffer2 = buffer1.copy_region(BUFFER_COPY_ALL, 2..).unwrap(); |
1951 | assert_eq!( |
1952 | buffer2.map_readable().unwrap().as_slice(), |
1953 | &[2, 3, 4, 5, 6, 7] |
1954 | ); |
1955 | let buffer2 = buffer1.copy_region(BUFFER_COPY_ALL, ..2).unwrap(); |
1956 | assert_eq!(buffer2.map_readable().unwrap().as_slice(), &[0, 1]); |
1957 | let buffer2 = buffer1.copy_region(BUFFER_COPY_ALL, ..=2).unwrap(); |
1958 | assert_eq!(buffer2.map_readable().unwrap().as_slice(), &[0, 1, 2]); |
1959 | } |
1960 | |
1961 | #[test ] |
1962 | fn test_dump() { |
1963 | use std::fmt::Write; |
1964 | |
1965 | crate::init().unwrap(); |
1966 | |
1967 | let mut s = String::new(); |
1968 | let buffer = crate::Buffer::from_slice(vec![1, 2, 3, 4]); |
1969 | write!(&mut s, "{:?}" , buffer.dump()).unwrap(); |
1970 | assert_eq!( |
1971 | s, |
1972 | "0000: 01 02 03 04 ...." |
1973 | ); |
1974 | s.clear(); |
1975 | write!(&mut s, "{}" , buffer.dump()).unwrap(); |
1976 | assert_eq!(s, "01 02 03 04" ); |
1977 | s.clear(); |
1978 | |
1979 | let buffer = crate::Buffer::from_slice(vec![1, 2, 3, 4]); |
1980 | write!(&mut s, "{:?}" , buffer.dump_range(..)).unwrap(); |
1981 | assert_eq!( |
1982 | s, |
1983 | "0000: 01 02 03 04 ...." |
1984 | ); |
1985 | s.clear(); |
1986 | write!(&mut s, "{:?}" , buffer.dump_range(..2)).unwrap(); |
1987 | assert_eq!( |
1988 | s, |
1989 | "0000: 01 02 .." |
1990 | ); |
1991 | s.clear(); |
1992 | write!(&mut s, "{:?}" , buffer.dump_range(2..=3)).unwrap(); |
1993 | assert_eq!( |
1994 | s, |
1995 | "0002: 03 04 .." |
1996 | ); |
1997 | s.clear(); |
1998 | write!(&mut s, "{:?}" , buffer.dump_range(..100)).unwrap(); |
1999 | assert_eq!(s, "<end out of range>" ,); |
2000 | s.clear(); |
2001 | write!(&mut s, "{:?}" , buffer.dump_range(90..100)).unwrap(); |
2002 | assert_eq!(s, "<start out of range>" ,); |
2003 | s.clear(); |
2004 | |
2005 | let buffer = crate::Buffer::from_slice(vec![0; 19]); |
2006 | write!(&mut s, "{:?}" , buffer.dump()).unwrap(); |
2007 | assert_eq!( |
2008 | s, |
2009 | "0000: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 ................ \n\ |
2010 | 0010: 00 00 00 ..." |
2011 | ); |
2012 | s.clear(); |
2013 | } |
2014 | |
2015 | #[test ] |
2016 | fn test_dump_multi_memories() { |
2017 | use std::fmt::Write; |
2018 | |
2019 | crate::init().unwrap(); |
2020 | |
2021 | let mut buffer = crate::Buffer::new(); |
2022 | { |
2023 | let buffer = buffer.get_mut().unwrap(); |
2024 | |
2025 | let mem = crate::Memory::from_slice(vec![1, 2, 3, 4]); |
2026 | buffer.append_memory(mem); |
2027 | |
2028 | let mem = crate::Memory::from_slice(vec![5, 6, 7, 8]); |
2029 | buffer.append_memory(mem); |
2030 | |
2031 | let mem = crate::Memory::from_slice(vec![9, 10, 11, 12]); |
2032 | buffer.append_memory(mem); |
2033 | |
2034 | let mem = crate::Memory::from_slice(vec![13, 14, 15, 16]); |
2035 | buffer.append_memory(mem); |
2036 | |
2037 | let mem = crate::Memory::from_slice(vec![17, 18, 19]); |
2038 | buffer.append_memory(mem); |
2039 | } |
2040 | |
2041 | let mut s = String::new(); |
2042 | write!(&mut s, "{:?}" , buffer.dump()).unwrap(); |
2043 | assert_eq!( |
2044 | s, |
2045 | "0000: 01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f 10 ................ \n\ |
2046 | 0010: 11 12 13 ..." |
2047 | ); |
2048 | s.clear(); |
2049 | write!(&mut s, "{}" , buffer.dump()).unwrap(); |
2050 | assert_eq!( |
2051 | s, |
2052 | "01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f 10 \n11 12 13" |
2053 | ); |
2054 | s.clear(); |
2055 | |
2056 | write!(&mut s, "{:?}" , buffer.dump_range(2..)).unwrap(); |
2057 | assert_eq!( |
2058 | s, |
2059 | "0002: 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f 10 11 12 ................ \n\ |
2060 | 0012: 13 ." |
2061 | ); |
2062 | s.clear(); |
2063 | |
2064 | write!(&mut s, "{:?}" , buffer.dump_range(14..17)).unwrap(); |
2065 | assert_eq!( |
2066 | s, |
2067 | "000e: 0f 10 11 ..." |
2068 | ); |
2069 | s.clear(); |
2070 | |
2071 | write!(&mut s, "{:?}" , buffer.dump_range(14..20)).unwrap(); |
2072 | assert_eq!(s, "<end out of range>" ); |
2073 | s.clear(); |
2074 | |
2075 | #[allow (clippy::reversed_empty_ranges)] |
2076 | { |
2077 | write!(&mut s, "{:?}" , buffer.dump_range(23..20)).unwrap(); |
2078 | assert_eq!(s, "<start out of range>" ); |
2079 | s.clear(); |
2080 | } |
2081 | } |
2082 | } |
2083 | |