1 | // Take a look at the license at the top of the repository in the LICENSE file. |
2 | |
3 | use std::{fmt, marker::PhantomData, mem, ops, ops::ControlFlow, ptr, slice, u64, usize}; |
4 | |
5 | use glib::translate::{ |
6 | from_glib, from_glib_full, FromGlib, FromGlibPtrFull, IntoGlib, IntoGlibPtr, |
7 | }; |
8 | |
9 | use crate::{meta::*, BufferCursor, BufferFlags, BufferRefCursor, ClockTime, Memory, MemoryRef}; |
10 | |
11 | pub enum Readable {} |
12 | pub enum Writable {} |
13 | |
14 | #[derive (Copy, Clone, Debug, PartialEq, Eq)] |
15 | pub enum BufferMetaForeachAction { |
16 | Keep, |
17 | Remove, |
18 | } |
19 | |
20 | mini_object_wrapper!(Buffer, BufferRef, ffi::GstBuffer, || { |
21 | ffi::gst_buffer_get_type() |
22 | }); |
23 | |
24 | pub struct BufferMap<'a, T> { |
25 | buffer: &'a BufferRef, |
26 | map_info: ffi::GstMapInfo, |
27 | phantom: PhantomData<T>, |
28 | } |
29 | |
30 | pub struct MappedBuffer<T> { |
31 | buffer: Buffer, |
32 | map_info: ffi::GstMapInfo, |
33 | phantom: PhantomData<T>, |
34 | } |
35 | |
36 | impl Buffer { |
37 | #[doc (alias = "gst_buffer_new" )] |
38 | pub fn new() -> Self { |
39 | assert_initialized_main_thread!(); |
40 | |
41 | unsafe { from_glib_full(ffi::gst_buffer_new()) } |
42 | } |
43 | |
44 | #[doc (alias = "gst_buffer_new_allocate" )] |
45 | #[doc (alias = "gst_buffer_new_and_alloc" )] |
46 | pub fn with_size(size: usize) -> Result<Self, glib::BoolError> { |
47 | assert_initialized_main_thread!(); |
48 | |
49 | unsafe { |
50 | Option::<_>::from_glib_full(ffi::gst_buffer_new_allocate( |
51 | ptr::null_mut(), |
52 | size, |
53 | ptr::null_mut(), |
54 | )) |
55 | .ok_or_else(|| glib::bool_error!("Failed to allocate buffer" )) |
56 | } |
57 | } |
58 | |
59 | unsafe extern "C" fn drop_box<T>(vec: glib::ffi::gpointer) { |
60 | let slice: Box<T> = Box::from_raw(vec as *mut T); |
61 | drop(slice); |
62 | } |
63 | |
64 | #[doc (alias = "gst_buffer_new_wrapped_full" )] |
65 | pub fn from_mut_slice<T: AsMut<[u8]> + Send + 'static>(slice: T) -> Self { |
66 | assert_initialized_main_thread!(); |
67 | |
68 | unsafe { |
69 | let mut b = Box::new(slice); |
70 | let (size, data) = { |
71 | let slice = (*b).as_mut(); |
72 | (slice.len(), slice.as_mut_ptr()) |
73 | }; |
74 | let user_data = Box::into_raw(b); |
75 | from_glib_full(ffi::gst_buffer_new_wrapped_full( |
76 | 0, |
77 | data as glib::ffi::gpointer, |
78 | size, |
79 | 0, |
80 | size, |
81 | user_data as glib::ffi::gpointer, |
82 | Some(Self::drop_box::<T>), |
83 | )) |
84 | } |
85 | } |
86 | |
87 | #[doc (alias = "gst_buffer_new_wrapped_full" )] |
88 | pub fn from_slice<T: AsRef<[u8]> + Send + 'static>(slice: T) -> Self { |
89 | assert_initialized_main_thread!(); |
90 | |
91 | unsafe { |
92 | let b = Box::new(slice); |
93 | let (size, data) = { |
94 | let slice = (*b).as_ref(); |
95 | (slice.len(), slice.as_ptr()) |
96 | }; |
97 | let user_data = Box::into_raw(b); |
98 | from_glib_full(ffi::gst_buffer_new_wrapped_full( |
99 | ffi::GST_MEMORY_FLAG_READONLY, |
100 | data as glib::ffi::gpointer, |
101 | size, |
102 | 0, |
103 | size, |
104 | user_data as glib::ffi::gpointer, |
105 | Some(Self::drop_box::<T>), |
106 | )) |
107 | } |
108 | } |
109 | |
110 | #[doc (alias = "gst_buffer_map" )] |
111 | #[inline ] |
112 | pub fn into_mapped_buffer_readable(self) -> Result<MappedBuffer<Readable>, Self> { |
113 | unsafe { |
114 | let mut map_info = mem::MaybeUninit::uninit(); |
115 | let res: bool = from_glib(ffi::gst_buffer_map( |
116 | self.as_mut_ptr(), |
117 | map_info.as_mut_ptr(), |
118 | ffi::GST_MAP_READ, |
119 | )); |
120 | if res { |
121 | Ok(MappedBuffer { |
122 | buffer: self, |
123 | map_info: map_info.assume_init(), |
124 | phantom: PhantomData, |
125 | }) |
126 | } else { |
127 | Err(self) |
128 | } |
129 | } |
130 | } |
131 | |
132 | #[doc (alias = "gst_buffer_map" )] |
133 | #[inline ] |
134 | pub fn into_mapped_buffer_writable(self) -> Result<MappedBuffer<Writable>, Self> { |
135 | unsafe { |
136 | let mut map_info = mem::MaybeUninit::uninit(); |
137 | let res: bool = from_glib(ffi::gst_buffer_map( |
138 | self.as_mut_ptr(), |
139 | map_info.as_mut_ptr(), |
140 | ffi::GST_MAP_READWRITE, |
141 | )); |
142 | if res { |
143 | Ok(MappedBuffer { |
144 | buffer: self, |
145 | map_info: map_info.assume_init(), |
146 | phantom: PhantomData, |
147 | }) |
148 | } else { |
149 | Err(self) |
150 | } |
151 | } |
152 | } |
153 | |
154 | #[inline ] |
155 | pub fn into_cursor_readable(self) -> BufferCursor<Readable> { |
156 | BufferCursor::new_readable(self) |
157 | } |
158 | |
159 | #[inline ] |
160 | pub fn into_cursor_writable(self) -> Result<BufferCursor<Writable>, glib::BoolError> { |
161 | BufferCursor::new_writable(self) |
162 | } |
163 | |
164 | #[doc (alias = "gst_buffer_append" )] |
165 | pub fn append(&mut self, other: Self) { |
166 | unsafe { |
167 | let ptr = ffi::gst_buffer_append(self.as_mut_ptr(), other.into_glib_ptr()); |
168 | self.replace_ptr(ptr); |
169 | } |
170 | } |
171 | } |
172 | |
173 | impl Default for Buffer { |
174 | fn default() -> Self { |
175 | Self::new() |
176 | } |
177 | } |
178 | |
179 | impl BufferRef { |
180 | #[doc (alias = "gst_buffer_map" )] |
181 | #[inline ] |
182 | pub fn map_readable(&self) -> Result<BufferMap<Readable>, glib::BoolError> { |
183 | unsafe { |
184 | let mut map_info = mem::MaybeUninit::uninit(); |
185 | let res = |
186 | ffi::gst_buffer_map(self.as_mut_ptr(), map_info.as_mut_ptr(), ffi::GST_MAP_READ); |
187 | if res == glib::ffi::GTRUE { |
188 | Ok(BufferMap { |
189 | buffer: self, |
190 | map_info: map_info.assume_init(), |
191 | phantom: PhantomData, |
192 | }) |
193 | } else { |
194 | Err(glib::bool_error!("Failed to map buffer readable" )) |
195 | } |
196 | } |
197 | } |
198 | |
199 | #[doc (alias = "gst_buffer_map" )] |
200 | #[inline ] |
201 | pub fn map_writable(&mut self) -> Result<BufferMap<Writable>, glib::BoolError> { |
202 | unsafe { |
203 | let mut map_info = mem::MaybeUninit::uninit(); |
204 | let res = ffi::gst_buffer_map( |
205 | self.as_mut_ptr(), |
206 | map_info.as_mut_ptr(), |
207 | ffi::GST_MAP_READWRITE, |
208 | ); |
209 | if res == glib::ffi::GTRUE { |
210 | Ok(BufferMap { |
211 | buffer: self, |
212 | map_info: map_info.assume_init(), |
213 | phantom: PhantomData, |
214 | }) |
215 | } else { |
216 | Err(glib::bool_error!("Failed to map buffer writable" )) |
217 | } |
218 | } |
219 | } |
220 | |
221 | #[doc (alias = "gst_buffer_copy_region" )] |
222 | pub fn copy_region( |
223 | &self, |
224 | flags: crate::BufferCopyFlags, |
225 | offset: usize, |
226 | size: Option<usize>, |
227 | ) -> Result<Buffer, glib::BoolError> { |
228 | let size_real = size.unwrap_or(usize::MAX); |
229 | unsafe { |
230 | Option::<_>::from_glib_full(ffi::gst_buffer_copy_region( |
231 | self.as_mut_ptr(), |
232 | flags.into_glib(), |
233 | offset, |
234 | size_real, |
235 | )) |
236 | .ok_or_else(|| glib::bool_error!("Failed to copy region of buffer" )) |
237 | } |
238 | } |
239 | |
240 | #[doc (alias = "gst_buffer_copy_into" )] |
241 | pub fn copy_into( |
242 | &self, |
243 | dest: &mut BufferRef, |
244 | flags: crate::BufferCopyFlags, |
245 | offset: usize, |
246 | size: Option<usize>, |
247 | ) -> Result<(), glib::BoolError> { |
248 | let size_real = size.unwrap_or(usize::MAX); |
249 | unsafe { |
250 | glib::result_from_gboolean!( |
251 | ffi::gst_buffer_copy_into( |
252 | dest.as_mut_ptr(), |
253 | self.as_mut_ptr(), |
254 | flags.into_glib(), |
255 | offset, |
256 | size_real, |
257 | ), |
258 | "Failed to copy into destination buffer" , |
259 | ) |
260 | } |
261 | } |
262 | |
263 | #[doc (alias = "gst_buffer_fill" )] |
264 | pub fn copy_from_slice(&mut self, offset: usize, slice: &[u8]) -> Result<(), usize> { |
265 | let maxsize = self.maxsize(); |
266 | let size = slice.len(); |
267 | |
268 | assert!(maxsize >= offset && maxsize - offset >= size); |
269 | |
270 | let copied = unsafe { |
271 | let src = slice.as_ptr(); |
272 | ffi::gst_buffer_fill( |
273 | self.as_mut_ptr(), |
274 | offset, |
275 | src as glib::ffi::gconstpointer, |
276 | size, |
277 | ) |
278 | }; |
279 | |
280 | if copied == size { |
281 | Ok(()) |
282 | } else { |
283 | Err(copied) |
284 | } |
285 | } |
286 | |
287 | #[doc (alias = "gst_buffer_extract" )] |
288 | pub fn copy_to_slice(&self, offset: usize, slice: &mut [u8]) -> Result<(), usize> { |
289 | let maxsize = self.size(); |
290 | let size = slice.len(); |
291 | |
292 | assert!(maxsize >= offset && maxsize - offset >= size); |
293 | |
294 | let copied = unsafe { |
295 | let dest = slice.as_mut_ptr(); |
296 | ffi::gst_buffer_extract(self.as_mut_ptr(), offset, dest as glib::ffi::gpointer, size) |
297 | }; |
298 | |
299 | if copied == size { |
300 | Ok(()) |
301 | } else { |
302 | Err(copied) |
303 | } |
304 | } |
305 | |
306 | #[doc (alias = "gst_buffer_copy_deep" )] |
307 | pub fn copy_deep(&self) -> Result<Buffer, glib::BoolError> { |
308 | unsafe { |
309 | Option::<_>::from_glib_full(ffi::gst_buffer_copy_deep(self.as_ptr())) |
310 | .ok_or_else(|| glib::bool_error!("Failed to deep copy buffer" )) |
311 | } |
312 | } |
313 | |
314 | #[doc (alias = "get_size" )] |
315 | #[doc (alias = "gst_buffer_get_size" )] |
316 | pub fn size(&self) -> usize { |
317 | unsafe { ffi::gst_buffer_get_size(self.as_mut_ptr()) } |
318 | } |
319 | |
320 | #[doc (alias = "get_maxsize" )] |
321 | pub fn maxsize(&self) -> usize { |
322 | unsafe { |
323 | let mut maxsize = mem::MaybeUninit::uninit(); |
324 | ffi::gst_buffer_get_sizes_range( |
325 | self.as_mut_ptr(), |
326 | 0, |
327 | -1, |
328 | ptr::null_mut(), |
329 | maxsize.as_mut_ptr(), |
330 | ); |
331 | |
332 | maxsize.assume_init() |
333 | } |
334 | } |
335 | |
336 | #[doc (alias = "gst_buffer_set_size" )] |
337 | pub fn set_size(&mut self, size: usize) { |
338 | assert!(self.maxsize() >= size); |
339 | |
340 | unsafe { |
341 | ffi::gst_buffer_set_size(self.as_mut_ptr(), size as isize); |
342 | } |
343 | } |
344 | |
345 | #[doc (alias = "get_offset" )] |
346 | #[doc (alias = "GST_BUFFER_OFFSET" )] |
347 | #[inline ] |
348 | pub fn offset(&self) -> u64 { |
349 | self.0.offset |
350 | } |
351 | |
352 | #[inline ] |
353 | pub fn set_offset(&mut self, offset: u64) { |
354 | self.0.offset = offset; |
355 | } |
356 | |
357 | #[doc (alias = "get_offset_end" )] |
358 | #[doc (alias = "GST_BUFFER_OFFSET_END" )] |
359 | #[inline ] |
360 | pub fn offset_end(&self) -> u64 { |
361 | self.0.offset_end |
362 | } |
363 | |
364 | #[inline ] |
365 | pub fn set_offset_end(&mut self, offset_end: u64) { |
366 | self.0.offset_end = offset_end; |
367 | } |
368 | |
369 | #[doc (alias = "get_pts" )] |
370 | #[doc (alias = "GST_BUFFER_PTS" )] |
371 | #[inline ] |
372 | pub fn pts(&self) -> Option<ClockTime> { |
373 | unsafe { from_glib(self.0.pts) } |
374 | } |
375 | |
376 | #[inline ] |
377 | pub fn set_pts(&mut self, pts: impl Into<Option<ClockTime>>) { |
378 | self.0.pts = pts.into().into_glib(); |
379 | } |
380 | |
381 | #[doc (alias = "get_dts" )] |
382 | #[doc (alias = "GST_BUFFER_DTS" )] |
383 | #[inline ] |
384 | pub fn dts(&self) -> Option<ClockTime> { |
385 | unsafe { from_glib(self.0.dts) } |
386 | } |
387 | |
388 | #[inline ] |
389 | pub fn set_dts(&mut self, dts: impl Into<Option<ClockTime>>) { |
390 | self.0.dts = dts.into().into_glib(); |
391 | } |
392 | |
393 | #[doc (alias = "get_dts_or_pts" )] |
394 | #[doc (alias = "GST_BUFFER_DTS_OR_PTS" )] |
395 | #[inline ] |
396 | pub fn dts_or_pts(&self) -> Option<ClockTime> { |
397 | let val = self.dts(); |
398 | if val.is_none() { |
399 | self.pts() |
400 | } else { |
401 | val |
402 | } |
403 | } |
404 | |
405 | #[doc (alias = "get_duration" )] |
406 | #[doc (alias = "GST_BUFFER_DURATION" )] |
407 | #[inline ] |
408 | pub fn duration(&self) -> Option<ClockTime> { |
409 | unsafe { from_glib(self.0.duration) } |
410 | } |
411 | |
412 | #[inline ] |
413 | pub fn set_duration(&mut self, duration: impl Into<Option<ClockTime>>) { |
414 | self.0.duration = duration.into().into_glib(); |
415 | } |
416 | |
417 | #[doc (alias = "get_flags" )] |
418 | #[doc (alias = "GST_BUFFER_FLAGS" )] |
419 | #[inline ] |
420 | pub fn flags(&self) -> BufferFlags { |
421 | BufferFlags::from_bits_truncate(self.0.mini_object.flags) |
422 | } |
423 | |
424 | #[doc (alias = "GST_BUFFER_FLAG_SET" )] |
425 | #[inline ] |
426 | pub fn set_flags(&mut self, flags: BufferFlags) { |
427 | self.0.mini_object.flags |= flags.bits(); |
428 | } |
429 | |
430 | #[doc (alias = "GST_BUFFER_FLAG_UNSET" )] |
431 | #[inline ] |
432 | pub fn unset_flags(&mut self, flags: BufferFlags) { |
433 | self.0.mini_object.flags &= !flags.bits(); |
434 | } |
435 | |
436 | #[doc (alias = "get_meta" )] |
437 | #[doc (alias = "gst_buffer_get_meta" )] |
438 | #[inline ] |
439 | pub fn meta<T: MetaAPI>(&self) -> Option<MetaRef<T>> { |
440 | unsafe { |
441 | let meta = ffi::gst_buffer_get_meta(self.as_mut_ptr(), T::meta_api().into_glib()); |
442 | if meta.is_null() { |
443 | None |
444 | } else { |
445 | Some(T::from_ptr(self, meta as *const <T as MetaAPI>::GstType)) |
446 | } |
447 | } |
448 | } |
449 | |
450 | #[doc (alias = "get_meta_mut" )] |
451 | #[inline ] |
452 | pub fn meta_mut<T: MetaAPI>(&mut self) -> Option<MetaRefMut<T, crate::meta::Standalone>> { |
453 | unsafe { |
454 | let meta = ffi::gst_buffer_get_meta(self.as_mut_ptr(), T::meta_api().into_glib()); |
455 | if meta.is_null() { |
456 | None |
457 | } else { |
458 | Some(T::from_mut_ptr(self, meta as *mut <T as MetaAPI>::GstType)) |
459 | } |
460 | } |
461 | } |
462 | |
463 | pub fn iter_meta<T: MetaAPI>(&self) -> MetaIter<T> { |
464 | MetaIter::new(self) |
465 | } |
466 | |
467 | pub fn iter_meta_mut<T: MetaAPI>(&mut self) -> MetaIterMut<T> { |
468 | MetaIterMut::new(self) |
469 | } |
470 | |
471 | #[doc (alias = "gst_buffer_foreach_meta" )] |
472 | pub fn foreach_meta<F: FnMut(MetaRef<Meta>) -> ControlFlow<(), ()>>(&self, func: F) -> bool { |
473 | unsafe extern "C" fn trampoline<F: FnMut(MetaRef<Meta>) -> ControlFlow<(), ()>>( |
474 | buffer: *mut ffi::GstBuffer, |
475 | meta: *mut *mut ffi::GstMeta, |
476 | user_data: glib::ffi::gpointer, |
477 | ) -> glib::ffi::gboolean { |
478 | let func = user_data as *const _ as usize as *mut F; |
479 | let res = (*func)(Meta::from_ptr(BufferRef::from_ptr(buffer), *meta)); |
480 | |
481 | matches!(res, ControlFlow::Continue(_)).into_glib() |
482 | } |
483 | |
484 | unsafe { |
485 | let func_ptr: &F = &func; |
486 | |
487 | from_glib(ffi::gst_buffer_foreach_meta( |
488 | self.as_ptr() as *mut _, |
489 | Some(trampoline::<F>), |
490 | func_ptr as *const _ as usize as *mut _, |
491 | )) |
492 | } |
493 | } |
494 | |
495 | #[doc (alias = "gst_buffer_foreach_meta" )] |
496 | pub fn foreach_meta_mut< |
497 | F: FnMut( |
498 | MetaRefMut<Meta, crate::meta::Iterated>, |
499 | ) -> ControlFlow<BufferMetaForeachAction, BufferMetaForeachAction>, |
500 | >( |
501 | &mut self, |
502 | func: F, |
503 | ) -> bool { |
504 | unsafe extern "C" fn trampoline< |
505 | F: FnMut( |
506 | MetaRefMut<Meta, crate::meta::Iterated>, |
507 | ) -> ControlFlow<BufferMetaForeachAction, BufferMetaForeachAction>, |
508 | >( |
509 | buffer: *mut ffi::GstBuffer, |
510 | meta: *mut *mut ffi::GstMeta, |
511 | user_data: glib::ffi::gpointer, |
512 | ) -> glib::ffi::gboolean { |
513 | let func = user_data as *const _ as usize as *mut F; |
514 | let res = (*func)(Meta::from_mut_ptr(BufferRef::from_mut_ptr(buffer), *meta)); |
515 | |
516 | let (cont, action) = match res { |
517 | ControlFlow::Continue(action) => (true, action), |
518 | ControlFlow::Break(action) => (false, action), |
519 | }; |
520 | |
521 | if action == BufferMetaForeachAction::Remove { |
522 | *meta = ptr::null_mut(); |
523 | } |
524 | |
525 | cont.into_glib() |
526 | } |
527 | |
528 | unsafe { |
529 | let func_ptr: &F = &func; |
530 | |
531 | from_glib(ffi::gst_buffer_foreach_meta( |
532 | self.as_ptr() as *mut _, |
533 | Some(trampoline::<F>), |
534 | func_ptr as *const _ as usize as *mut _, |
535 | )) |
536 | } |
537 | } |
538 | |
539 | #[doc (alias = "gst_buffer_append_memory" )] |
540 | pub fn append_memory(&mut self, mem: Memory) { |
541 | unsafe { ffi::gst_buffer_append_memory(self.as_mut_ptr(), mem.into_glib_ptr()) } |
542 | } |
543 | |
544 | #[doc (alias = "gst_buffer_find_memory" )] |
545 | pub fn find_memory(&self, offset: usize, size: Option<usize>) -> Option<(u32, u32, usize)> { |
546 | unsafe { |
547 | let mut idx = mem::MaybeUninit::uninit(); |
548 | let mut length = mem::MaybeUninit::uninit(); |
549 | let mut skip = mem::MaybeUninit::uninit(); |
550 | |
551 | let res = from_glib(ffi::gst_buffer_find_memory( |
552 | self.as_mut_ptr(), |
553 | offset, |
554 | size.unwrap_or(usize::MAX), |
555 | idx.as_mut_ptr(), |
556 | length.as_mut_ptr(), |
557 | skip.as_mut_ptr(), |
558 | )); |
559 | |
560 | if res { |
561 | Some((idx.assume_init(), length.assume_init(), skip.assume_init())) |
562 | } else { |
563 | None |
564 | } |
565 | } |
566 | } |
567 | |
568 | #[doc (alias = "get_all_memory" )] |
569 | #[doc (alias = "gst_buffer_get_all_memory" )] |
570 | pub fn all_memory(&self) -> Option<Memory> { |
571 | unsafe { |
572 | let res = ffi::gst_buffer_get_all_memory(self.as_mut_ptr()); |
573 | if res.is_null() { |
574 | None |
575 | } else { |
576 | Some(from_glib_full(res)) |
577 | } |
578 | } |
579 | } |
580 | |
581 | #[doc (alias = "get_max_memory" )] |
582 | #[doc (alias = "gst_buffer_get_max_memory" )] |
583 | pub fn max_memory() -> u32 { |
584 | unsafe { ffi::gst_buffer_get_max_memory() } |
585 | } |
586 | |
587 | #[doc (alias = "get_memory" )] |
588 | #[doc (alias = "gst_buffer_get_memory" )] |
589 | pub fn memory(&self, idx: u32) -> Option<Memory> { |
590 | if idx >= self.n_memory() { |
591 | None |
592 | } else { |
593 | unsafe { |
594 | let res = ffi::gst_buffer_get_memory(self.as_mut_ptr(), idx); |
595 | if res.is_null() { |
596 | None |
597 | } else { |
598 | Some(from_glib_full(res)) |
599 | } |
600 | } |
601 | } |
602 | } |
603 | |
604 | #[doc (alias = "get_memory_range" )] |
605 | #[doc (alias = "gst_buffer_get_memory_range" )] |
606 | pub fn memory_range(&self, idx: u32, length: Option<u32>) -> Option<Memory> { |
607 | assert!(idx + length.unwrap_or(0) < self.n_memory()); |
608 | unsafe { |
609 | let res = ffi::gst_buffer_get_memory_range( |
610 | self.as_mut_ptr(), |
611 | idx, |
612 | match length { |
613 | Some(val) => val as i32, |
614 | None => -1, |
615 | }, |
616 | ); |
617 | if res.is_null() { |
618 | None |
619 | } else { |
620 | Some(from_glib_full(res)) |
621 | } |
622 | } |
623 | } |
624 | |
625 | #[doc (alias = "gst_buffer_insert_memory" )] |
626 | pub fn insert_memory(&mut self, idx: Option<u32>, mem: Memory) { |
627 | unsafe { |
628 | ffi::gst_buffer_insert_memory( |
629 | self.as_mut_ptr(), |
630 | match idx { |
631 | Some(val) => val as i32, |
632 | None => -1, |
633 | }, |
634 | mem.into_glib_ptr(), |
635 | ) |
636 | } |
637 | } |
638 | |
639 | #[doc (alias = "gst_buffer_is_all_memory_writable" )] |
640 | pub fn is_all_memory_writable(&self) -> bool { |
641 | unsafe { from_glib(ffi::gst_buffer_is_all_memory_writable(self.as_mut_ptr())) } |
642 | } |
643 | |
644 | #[doc (alias = "gst_buffer_is_memory_range_writable" )] |
645 | pub fn is_memory_range_writable(&self, idx: u32, length: Option<u16>) -> bool { |
646 | unsafe { |
647 | from_glib(ffi::gst_buffer_is_memory_range_writable( |
648 | self.as_mut_ptr(), |
649 | idx, |
650 | match length { |
651 | Some(val) => val as i32, |
652 | None => -1, |
653 | }, |
654 | )) |
655 | } |
656 | } |
657 | |
658 | #[doc (alias = "gst_buffer_n_memory" )] |
659 | pub fn n_memory(&self) -> u32 { |
660 | unsafe { ffi::gst_buffer_n_memory(self.as_ptr() as *mut _) } |
661 | } |
662 | |
663 | #[doc (alias = "gst_buffer_peek_memory" )] |
664 | pub fn peek_memory(&self, idx: u32) -> &MemoryRef { |
665 | assert!(idx < self.n_memory()); |
666 | unsafe { MemoryRef::from_ptr(ffi::gst_buffer_peek_memory(self.as_mut_ptr(), idx)) } |
667 | } |
668 | |
669 | #[doc (alias = "gst_buffer_peek_memory" )] |
670 | pub fn peek_memory_mut(&mut self, idx: u32) -> Result<&mut MemoryRef, glib::BoolError> { |
671 | assert!(idx < self.n_memory()); |
672 | unsafe { |
673 | let mem = ffi::gst_buffer_peek_memory(self.as_mut_ptr(), idx); |
674 | if ffi::gst_mini_object_is_writable(mem as *mut _) == glib::ffi::GFALSE { |
675 | Err(glib::bool_error!("Memory not writable" )) |
676 | } else { |
677 | Ok(MemoryRef::from_mut_ptr(ffi::gst_buffer_peek_memory( |
678 | self.as_mut_ptr(), |
679 | idx, |
680 | ))) |
681 | } |
682 | } |
683 | } |
684 | |
685 | #[doc (alias = "gst_buffer_prepend_memory" )] |
686 | pub fn prepend_memory(&mut self, mem: Memory) { |
687 | unsafe { ffi::gst_buffer_prepend_memory(self.as_mut_ptr(), mem.into_glib_ptr()) } |
688 | } |
689 | |
690 | #[doc (alias = "gst_buffer_remove_all_memory" )] |
691 | pub fn remove_all_memory(&mut self) { |
692 | unsafe { ffi::gst_buffer_remove_all_memory(self.as_mut_ptr()) } |
693 | } |
694 | |
695 | #[doc (alias = "gst_buffer_remove_memory" )] |
696 | pub fn remove_memory(&mut self, idx: u32) { |
697 | assert!(idx < self.n_memory()); |
698 | unsafe { ffi::gst_buffer_remove_memory(self.as_mut_ptr(), idx) } |
699 | } |
700 | |
701 | #[doc (alias = "gst_buffer_remove_memory_range" )] |
702 | pub fn remove_memory_range(&mut self, idx: u32, length: Option<u32>) { |
703 | assert!(idx + length.unwrap_or(0) < self.n_memory()); |
704 | unsafe { |
705 | ffi::gst_buffer_remove_memory_range( |
706 | self.as_mut_ptr(), |
707 | idx, |
708 | match length { |
709 | Some(val) => val as i32, |
710 | None => -1, |
711 | }, |
712 | ) |
713 | } |
714 | } |
715 | |
716 | #[doc (alias = "gst_buffer_replace_all_memory" )] |
717 | pub fn replace_all_memory(&mut self, mem: Memory) { |
718 | unsafe { ffi::gst_buffer_replace_all_memory(self.as_mut_ptr(), mem.into_glib_ptr()) } |
719 | } |
720 | |
721 | #[doc (alias = "gst_buffer_replace_memory" )] |
722 | pub fn replace_memory(&mut self, idx: u32, mem: Memory) { |
723 | assert!(idx < self.n_memory()); |
724 | unsafe { ffi::gst_buffer_replace_memory(self.as_mut_ptr(), idx, mem.into_glib_ptr()) } |
725 | } |
726 | |
727 | #[doc (alias = "gst_buffer_replace_memory_range" )] |
728 | pub fn replace_memory_range(&mut self, idx: u32, length: Option<u32>, mem: Memory) { |
729 | assert!(idx + length.unwrap_or(0) < self.n_memory()); |
730 | unsafe { |
731 | ffi::gst_buffer_replace_memory_range( |
732 | self.as_mut_ptr(), |
733 | idx, |
734 | match length { |
735 | Some(val) => val as i32, |
736 | None => -1, |
737 | }, |
738 | mem.into_glib_ptr(), |
739 | ) |
740 | } |
741 | } |
742 | |
743 | pub fn iter_memories(&self) -> Iter { |
744 | Iter::new(self) |
745 | } |
746 | |
747 | pub fn iter_memories_mut(&mut self) -> Result<IterMut, glib::BoolError> { |
748 | if !self.is_all_memory_writable() { |
749 | Err(glib::bool_error!("Not all memory are writable" )) |
750 | } else { |
751 | Ok(IterMut::new(self)) |
752 | } |
753 | } |
754 | |
755 | pub fn iter_memories_owned(&self) -> IterOwned { |
756 | IterOwned::new(self) |
757 | } |
758 | |
759 | pub fn as_cursor_readable(&self) -> BufferRefCursor<&BufferRef> { |
760 | BufferRefCursor::new_readable(self) |
761 | } |
762 | |
763 | pub fn as_cursor_writable( |
764 | &mut self, |
765 | ) -> Result<BufferRefCursor<&mut BufferRef>, glib::BoolError> { |
766 | BufferRefCursor::new_writable(self) |
767 | } |
768 | } |
769 | |
770 | macro_rules! define_meta_iter( |
771 | ($name:ident, $typ:ty, $mtyp:ty, $prepare_buffer:expr, $from_ptr:expr) => { |
772 | pub struct $name<'a, T: MetaAPI + 'a> { |
773 | buffer: $typ, |
774 | state: glib::ffi::gpointer, |
775 | meta_api: glib::Type, |
776 | items: PhantomData<$mtyp>, |
777 | } |
778 | |
779 | unsafe impl<'a, T: MetaAPI> Send for $name<'a, T> { } |
780 | unsafe impl<'a, T: MetaAPI> Sync for $name<'a, T> { } |
781 | |
782 | impl<'a, T: MetaAPI> fmt::Debug for $name<'a, T> { |
783 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { |
784 | f.debug_struct(stringify!($name)) |
785 | .field("buffer" , &self.buffer) |
786 | .field("state" , &self.state) |
787 | .field("meta_api" , &self.meta_api) |
788 | .field("items" , &self.items) |
789 | .finish() |
790 | } |
791 | } |
792 | |
793 | impl<'a, T: MetaAPI> $name<'a, T> { |
794 | fn new(buffer: $typ) -> $name<'a, T> { |
795 | skip_assert_initialized!(); |
796 | |
797 | $name { |
798 | buffer, |
799 | state: ptr::null_mut(), |
800 | meta_api: T::meta_api(), |
801 | items: PhantomData, |
802 | } |
803 | } |
804 | } |
805 | |
806 | #[allow(clippy::redundant_closure_call)] |
807 | impl<'a, T: MetaAPI> Iterator for $name<'a, T> { |
808 | type Item = $mtyp; |
809 | |
810 | fn next(&mut self) -> Option<Self::Item> { |
811 | loop { |
812 | unsafe { |
813 | let meta = ffi::gst_buffer_iterate_meta(self.buffer.as_mut_ptr(), &mut self.state); |
814 | |
815 | if meta.is_null() { |
816 | return None; |
817 | } else if self.meta_api == glib::Type::INVALID || glib::Type::from_glib((*(*meta).info).api) == self.meta_api { |
818 | // FIXME: Workaround for a lifetime issue with the mutable iterator only |
819 | let buffer = $prepare_buffer(self.buffer.as_mut_ptr()); |
820 | let item = $from_ptr(buffer, meta); |
821 | return Some(item); |
822 | } |
823 | } |
824 | } |
825 | } |
826 | } |
827 | |
828 | impl<'a, T: MetaAPI> std::iter::FusedIterator for $name<'a, T> { } |
829 | } |
830 | ); |
831 | |
832 | define_meta_iter!( |
833 | MetaIter, |
834 | &'a BufferRef, |
835 | MetaRef<'a, T>, |
836 | |buffer: *const ffi::GstBuffer| BufferRef::from_ptr(buffer), |
837 | |buffer, meta| T::from_ptr(buffer, meta as *const <T as MetaAPI>::GstType) |
838 | ); |
839 | define_meta_iter!( |
840 | MetaIterMut, |
841 | &'a mut BufferRef, |
842 | MetaRefMut<'a, T, crate::meta::Iterated>, |
843 | |buffer: *mut ffi::GstBuffer| BufferRef::from_mut_ptr(buffer), |
844 | |buffer: &'a mut BufferRef, meta| T::from_mut_ptr(buffer, meta as *mut <T as MetaAPI>::GstType) |
845 | ); |
846 | |
847 | macro_rules! define_iter( |
848 | ($name:ident, $typ:ty, $mtyp:ty, $get_item:expr) => { |
849 | pub struct $name<'a> { |
850 | buffer: $typ, |
851 | idx: usize, |
852 | n_memory: usize, |
853 | } |
854 | |
855 | impl<'a> fmt::Debug for $name<'a> { |
856 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { |
857 | f.debug_struct(stringify!($name)) |
858 | .field("buffer" , &self.buffer) |
859 | .field("idx" , &self.idx) |
860 | .field("n_memory" , &self.n_memory) |
861 | .finish() |
862 | } |
863 | } |
864 | |
865 | impl<'a> $name<'a> { |
866 | fn new(buffer: $typ) -> $name<'a> { |
867 | skip_assert_initialized!(); |
868 | |
869 | let n_memory = buffer.n_memory(); |
870 | |
871 | $name { |
872 | buffer, |
873 | idx: 0, |
874 | n_memory: n_memory as usize, |
875 | } |
876 | } |
877 | } |
878 | |
879 | #[allow(clippy::redundant_closure_call)] |
880 | impl<'a> Iterator for $name<'a> { |
881 | type Item = $mtyp; |
882 | |
883 | fn next(&mut self) -> Option<Self::Item> { |
884 | if self.idx >= self.n_memory { |
885 | return None; |
886 | } |
887 | |
888 | #[allow(unused_unsafe)] |
889 | unsafe { |
890 | let item = $get_item(self.buffer, self.idx as u32).unwrap(); |
891 | self.idx += 1; |
892 | Some(item) |
893 | } |
894 | } |
895 | |
896 | fn size_hint(&self) -> (usize, Option<usize>) { |
897 | let remaining = self.n_memory - self.idx; |
898 | |
899 | (remaining, Some(remaining)) |
900 | } |
901 | |
902 | fn count(self) -> usize { |
903 | self.n_memory - self.idx |
904 | } |
905 | |
906 | fn nth(&mut self, n: usize) -> Option<Self::Item> { |
907 | let (end, overflow) = self.idx.overflowing_add(n); |
908 | if end >= self.n_memory || overflow { |
909 | self.idx = self.n_memory; |
910 | None |
911 | } else { |
912 | #[allow(unused_unsafe)] |
913 | unsafe { |
914 | self.idx = end + 1; |
915 | Some($get_item(self.buffer, end as u32).unwrap()) |
916 | } |
917 | } |
918 | } |
919 | |
920 | fn last(self) -> Option<Self::Item> { |
921 | if self.idx == self.n_memory { |
922 | None |
923 | } else { |
924 | #[allow(unused_unsafe)] |
925 | unsafe { |
926 | Some($get_item(self.buffer, self.n_memory as u32 - 1).unwrap()) |
927 | } |
928 | } |
929 | } |
930 | } |
931 | |
932 | #[allow(clippy::redundant_closure_call)] |
933 | impl<'a> DoubleEndedIterator for $name<'a> { |
934 | fn next_back(&mut self) -> Option<Self::Item> { |
935 | if self.idx == self.n_memory { |
936 | return None; |
937 | } |
938 | |
939 | #[allow(unused_unsafe)] |
940 | unsafe { |
941 | self.n_memory -= 1; |
942 | Some($get_item(self.buffer, self.n_memory as u32).unwrap()) |
943 | } |
944 | } |
945 | |
946 | fn nth_back(&mut self, n: usize) -> Option<Self::Item> { |
947 | let (end, overflow) = self.n_memory.overflowing_sub(n); |
948 | if end <= self.idx || overflow { |
949 | self.idx = self.n_memory; |
950 | None |
951 | } else { |
952 | #[allow(unused_unsafe)] |
953 | unsafe { |
954 | self.n_memory = end - 1; |
955 | Some($get_item(self.buffer, self.n_memory as u32).unwrap()) |
956 | } |
957 | } |
958 | } |
959 | } |
960 | |
961 | impl<'a> ExactSizeIterator for $name<'a> {} |
962 | |
963 | impl<'a> std::iter::FusedIterator for $name<'a> {} |
964 | } |
965 | ); |
966 | |
967 | define_iter!( |
968 | Iter, |
969 | &'a BufferRef, |
970 | &'a MemoryRef, |
971 | |buffer: &BufferRef, idx| { |
972 | let ptr = ffi::gst_buffer_peek_memory(buffer.as_mut_ptr(), idx); |
973 | if ptr.is_null() { |
974 | None |
975 | } else { |
976 | Some(MemoryRef::from_ptr(ptr as *const ffi::GstMemory)) |
977 | } |
978 | } |
979 | ); |
980 | |
981 | define_iter!( |
982 | IterMut, |
983 | &'a mut BufferRef, |
984 | &'a mut MemoryRef, |
985 | |buffer: &mut BufferRef, idx| { |
986 | let ptr = ffi::gst_buffer_peek_memory(buffer.as_mut_ptr(), idx); |
987 | if ptr.is_null() { |
988 | None |
989 | } else { |
990 | Some(MemoryRef::from_mut_ptr(ptr)) |
991 | } |
992 | } |
993 | ); |
994 | |
995 | impl<'a> IntoIterator for &'a BufferRef { |
996 | type IntoIter = Iter<'a>; |
997 | type Item = &'a MemoryRef; |
998 | |
999 | fn into_iter(self) -> Self::IntoIter { |
1000 | self.iter_memories() |
1001 | } |
1002 | } |
1003 | |
1004 | impl From<Memory> for Buffer { |
1005 | fn from(value: Memory) -> Self { |
1006 | skip_assert_initialized!(); |
1007 | |
1008 | let mut buffer: Buffer = Buffer::new(); |
1009 | { |
1010 | let buffer: &mut BufferRef = buffer.get_mut().unwrap(); |
1011 | buffer.append_memory(mem:value); |
1012 | } |
1013 | buffer |
1014 | } |
1015 | } |
1016 | |
1017 | impl<const N: usize> From<[Memory; N]> for Buffer { |
1018 | fn from(value: [Memory; N]) -> Self { |
1019 | skip_assert_initialized!(); |
1020 | |
1021 | let mut buffer: Buffer = Buffer::new(); |
1022 | { |
1023 | let buffer: &mut BufferRef = buffer.get_mut().unwrap(); |
1024 | value.into_iter().for_each(|b: Memory| buffer.append_memory(mem:b)); |
1025 | } |
1026 | buffer |
1027 | } |
1028 | } |
1029 | |
1030 | impl std::iter::FromIterator<Memory> for Buffer { |
1031 | fn from_iter<T: IntoIterator<Item = Memory>>(iter: T) -> Self { |
1032 | skip_assert_initialized!(); |
1033 | let iter: ::IntoIter = iter.into_iter(); |
1034 | |
1035 | let mut buffer: Buffer = Buffer::new(); |
1036 | |
1037 | { |
1038 | let buffer: &mut BufferRef = buffer.get_mut().unwrap(); |
1039 | iter.for_each(|m: Memory| buffer.append_memory(mem:m)); |
1040 | } |
1041 | |
1042 | buffer |
1043 | } |
1044 | } |
1045 | |
1046 | impl std::iter::Extend<Memory> for BufferRef { |
1047 | fn extend<T: IntoIterator<Item = Memory>>(&mut self, iter: T) { |
1048 | iter.into_iter().for_each(|m: Memory| self.append_memory(mem:m)); |
1049 | } |
1050 | } |
1051 | |
1052 | define_iter!( |
1053 | IterOwned, |
1054 | &'a BufferRef, |
1055 | Memory, |
1056 | |buffer: &BufferRef, idx| { buffer.memory(idx) } |
1057 | ); |
1058 | |
1059 | impl fmt::Debug for Buffer { |
1060 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { |
1061 | BufferRef::fmt(self, f) |
1062 | } |
1063 | } |
1064 | |
1065 | impl PartialEq for Buffer { |
1066 | fn eq(&self, other: &Buffer) -> bool { |
1067 | BufferRef::eq(self, other) |
1068 | } |
1069 | } |
1070 | |
1071 | impl Eq for Buffer {} |
1072 | |
1073 | impl PartialEq<BufferRef> for Buffer { |
1074 | fn eq(&self, other: &BufferRef) -> bool { |
1075 | BufferRef::eq(self, other) |
1076 | } |
1077 | } |
1078 | impl PartialEq<Buffer> for BufferRef { |
1079 | fn eq(&self, other: &Buffer) -> bool { |
1080 | BufferRef::eq(self:other, self) |
1081 | } |
1082 | } |
1083 | |
1084 | impl fmt::Debug for BufferRef { |
1085 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { |
1086 | use std::cell::RefCell; |
1087 | |
1088 | use crate::utils::Displayable; |
1089 | |
1090 | struct DebugIter<I>(RefCell<I>); |
1091 | impl<I: Iterator> fmt::Debug for DebugIter<I> |
1092 | where |
1093 | I::Item: fmt::Debug, |
1094 | { |
1095 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { |
1096 | f.debug_list().entries(&mut *self.0.borrow_mut()).finish() |
1097 | } |
1098 | } |
1099 | |
1100 | f.debug_struct("Buffer" ) |
1101 | .field("ptr" , &self.as_ptr()) |
1102 | .field("pts" , &self.pts().display()) |
1103 | .field("dts" , &self.dts().display()) |
1104 | .field("duration" , &self.duration().display()) |
1105 | .field("size" , &self.size()) |
1106 | .field("offset" , &self.offset()) |
1107 | .field("offset_end" , &self.offset_end()) |
1108 | .field("flags" , &self.flags()) |
1109 | .field( |
1110 | "metas" , |
1111 | &DebugIter(RefCell::new( |
1112 | self.iter_meta::<crate::Meta>().map(|m| m.api()), |
1113 | )), |
1114 | ) |
1115 | .finish() |
1116 | } |
1117 | } |
1118 | |
1119 | impl PartialEq for BufferRef { |
1120 | fn eq(&self, other: &BufferRef) -> bool { |
1121 | if self.size() != other.size() { |
1122 | return false; |
1123 | } |
1124 | |
1125 | let self_map: Result, …> = self.map_readable(); |
1126 | let other_map: Result, …> = other.map_readable(); |
1127 | |
1128 | match (self_map, other_map) { |
1129 | (Ok(self_map: BufferMap<'_, Readable>), Ok(other_map: BufferMap<'_, Readable>)) => self_map.as_slice().eq(other_map.as_slice()), |
1130 | _ => false, |
1131 | } |
1132 | } |
1133 | } |
1134 | |
1135 | impl Eq for BufferRef {} |
1136 | |
1137 | impl<'a, T> BufferMap<'a, T> { |
1138 | #[doc (alias = "get_size" )] |
1139 | #[inline ] |
1140 | pub fn size(&self) -> usize { |
1141 | self.map_info.size |
1142 | } |
1143 | |
1144 | #[doc (alias = "get_buffer" )] |
1145 | #[inline ] |
1146 | pub fn buffer(&self) -> &BufferRef { |
1147 | self.buffer |
1148 | } |
1149 | |
1150 | #[inline ] |
1151 | pub fn as_slice(&self) -> &[u8] { |
1152 | if self.map_info.size == 0 { |
1153 | return &[]; |
1154 | } |
1155 | unsafe { slice::from_raw_parts(self.map_info.data, self.map_info.size) } |
1156 | } |
1157 | } |
1158 | |
1159 | impl<'a> BufferMap<'a, Writable> { |
1160 | #[inline ] |
1161 | pub fn as_mut_slice(&mut self) -> &mut [u8] { |
1162 | if self.map_info.size == 0 { |
1163 | return &mut []; |
1164 | } |
1165 | unsafe { slice::from_raw_parts_mut(self.map_info.data, self.map_info.size) } |
1166 | } |
1167 | } |
1168 | |
1169 | impl<'a, T> AsRef<[u8]> for BufferMap<'a, T> { |
1170 | #[inline ] |
1171 | fn as_ref(&self) -> &[u8] { |
1172 | self.as_slice() |
1173 | } |
1174 | } |
1175 | |
1176 | impl<'a> AsMut<[u8]> for BufferMap<'a, Writable> { |
1177 | #[inline ] |
1178 | fn as_mut(&mut self) -> &mut [u8] { |
1179 | self.as_mut_slice() |
1180 | } |
1181 | } |
1182 | |
1183 | impl<'a, T> ops::Deref for BufferMap<'a, T> { |
1184 | type Target = [u8]; |
1185 | |
1186 | #[inline ] |
1187 | fn deref(&self) -> &[u8] { |
1188 | self.as_slice() |
1189 | } |
1190 | } |
1191 | |
1192 | impl<'a> ops::DerefMut for BufferMap<'a, Writable> { |
1193 | #[inline ] |
1194 | fn deref_mut(&mut self) -> &mut [u8] { |
1195 | self.as_mut_slice() |
1196 | } |
1197 | } |
1198 | |
1199 | impl<'a, T> fmt::Debug for BufferMap<'a, T> { |
1200 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { |
1201 | f.debug_tuple(name:"BufferMap" ).field(&self.buffer()).finish() |
1202 | } |
1203 | } |
1204 | |
1205 | impl<'a, T> PartialEq for BufferMap<'a, T> { |
1206 | fn eq(&self, other: &BufferMap<'a, T>) -> bool { |
1207 | self.as_slice().eq(other.as_slice()) |
1208 | } |
1209 | } |
1210 | |
1211 | impl<'a, T> Eq for BufferMap<'a, T> {} |
1212 | |
1213 | impl<'a, T> Drop for BufferMap<'a, T> { |
1214 | #[inline ] |
1215 | fn drop(&mut self) { |
1216 | unsafe { |
1217 | ffi::gst_buffer_unmap(self.buffer.as_mut_ptr(), &mut self.map_info); |
1218 | } |
1219 | } |
1220 | } |
1221 | |
1222 | unsafe impl<'a, T> Send for BufferMap<'a, T> {} |
1223 | unsafe impl<'a, T> Sync for BufferMap<'a, T> {} |
1224 | |
1225 | impl<T> MappedBuffer<T> { |
1226 | #[inline ] |
1227 | pub fn as_slice(&self) -> &[u8] { |
1228 | if self.map_info.size == 0 { |
1229 | return &[]; |
1230 | } |
1231 | unsafe { slice::from_raw_parts(self.map_info.data, self.map_info.size) } |
1232 | } |
1233 | |
1234 | #[doc (alias = "get_size" )] |
1235 | #[inline ] |
1236 | pub fn size(&self) -> usize { |
1237 | self.map_info.size |
1238 | } |
1239 | |
1240 | #[doc (alias = "get_buffer" )] |
1241 | #[inline ] |
1242 | pub fn buffer(&self) -> &BufferRef { |
1243 | self.buffer.as_ref() |
1244 | } |
1245 | |
1246 | #[inline ] |
1247 | pub fn into_buffer(self) -> Buffer { |
1248 | let mut s = mem::ManuallyDrop::new(self); |
1249 | let buffer = unsafe { ptr::read(&s.buffer) }; |
1250 | unsafe { |
1251 | ffi::gst_buffer_unmap(buffer.as_mut_ptr(), &mut s.map_info); |
1252 | } |
1253 | |
1254 | buffer |
1255 | } |
1256 | } |
1257 | |
1258 | impl MappedBuffer<Writable> { |
1259 | #[inline ] |
1260 | pub fn as_mut_slice(&mut self) -> &mut [u8] { |
1261 | if self.map_info.size == 0 { |
1262 | return &mut []; |
1263 | } |
1264 | unsafe { slice::from_raw_parts_mut(self.map_info.data, self.map_info.size) } |
1265 | } |
1266 | } |
1267 | |
1268 | impl<T> AsRef<[u8]> for MappedBuffer<T> { |
1269 | #[inline ] |
1270 | fn as_ref(&self) -> &[u8] { |
1271 | self.as_slice() |
1272 | } |
1273 | } |
1274 | |
1275 | impl AsMut<[u8]> for MappedBuffer<Writable> { |
1276 | #[inline ] |
1277 | fn as_mut(&mut self) -> &mut [u8] { |
1278 | self.as_mut_slice() |
1279 | } |
1280 | } |
1281 | |
1282 | impl<T> ops::Deref for MappedBuffer<T> { |
1283 | type Target = [u8]; |
1284 | |
1285 | #[inline ] |
1286 | fn deref(&self) -> &[u8] { |
1287 | self.as_slice() |
1288 | } |
1289 | } |
1290 | |
1291 | impl ops::DerefMut for MappedBuffer<Writable> { |
1292 | #[inline ] |
1293 | fn deref_mut(&mut self) -> &mut [u8] { |
1294 | self.as_mut_slice() |
1295 | } |
1296 | } |
1297 | |
1298 | impl<T> Drop for MappedBuffer<T> { |
1299 | #[inline ] |
1300 | fn drop(&mut self) { |
1301 | unsafe { |
1302 | ffi::gst_buffer_unmap(self.buffer.as_mut_ptr(), &mut self.map_info); |
1303 | } |
1304 | } |
1305 | } |
1306 | |
1307 | impl<T> fmt::Debug for MappedBuffer<T> { |
1308 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { |
1309 | f.debug_tuple(name:"MappedBuffer" ).field(&self.buffer()).finish() |
1310 | } |
1311 | } |
1312 | |
1313 | impl<T> PartialEq for MappedBuffer<T> { |
1314 | fn eq(&self, other: &MappedBuffer<T>) -> bool { |
1315 | self.as_slice().eq(other.as_slice()) |
1316 | } |
1317 | } |
1318 | |
1319 | impl<T> Eq for MappedBuffer<T> {} |
1320 | |
1321 | unsafe impl<T> Send for MappedBuffer<T> {} |
1322 | unsafe impl<T> Sync for MappedBuffer<T> {} |
1323 | |
1324 | #[doc (alias = "GST_BUFFER_COPY_METADATA" )] |
1325 | pub const BUFFER_COPY_METADATA: crate::BufferCopyFlags = |
1326 | crate::BufferCopyFlags::from_bits_truncate(bits:ffi::GST_BUFFER_COPY_METADATA); |
1327 | #[doc (alias = "GST_BUFFER_COPY_ALL" )] |
1328 | pub const BUFFER_COPY_ALL: crate::BufferCopyFlags = |
1329 | crate::BufferCopyFlags::from_bits_truncate(bits:ffi::GST_BUFFER_COPY_ALL); |
1330 | |
1331 | #[cfg (test)] |
1332 | mod tests { |
1333 | use super::*; |
1334 | |
1335 | #[test ] |
1336 | fn test_fields() { |
1337 | crate::init().unwrap(); |
1338 | |
1339 | let mut buffer = Buffer::new(); |
1340 | |
1341 | { |
1342 | let buffer = buffer.get_mut().unwrap(); |
1343 | buffer.set_pts(ClockTime::NSECOND); |
1344 | buffer.set_dts(2 * ClockTime::NSECOND); |
1345 | buffer.set_offset(3); |
1346 | buffer.set_offset_end(4); |
1347 | buffer.set_duration(Some(5 * ClockTime::NSECOND)); |
1348 | } |
1349 | assert_eq!(buffer.pts(), Some(ClockTime::NSECOND)); |
1350 | assert_eq!(buffer.dts(), Some(2 * ClockTime::NSECOND)); |
1351 | assert_eq!(buffer.offset(), 3); |
1352 | assert_eq!(buffer.offset_end(), 4); |
1353 | assert_eq!(buffer.duration(), Some(5 * ClockTime::NSECOND)); |
1354 | } |
1355 | |
1356 | #[test ] |
1357 | fn test_writability() { |
1358 | crate::init().unwrap(); |
1359 | |
1360 | let mut buffer = Buffer::from_slice(vec![1, 2, 3, 4]); |
1361 | { |
1362 | let data = buffer.map_readable().unwrap(); |
1363 | assert_eq!(data.as_slice(), vec![1, 2, 3, 4].as_slice()); |
1364 | } |
1365 | assert_ne!(buffer.get_mut(), None); |
1366 | { |
1367 | let buffer = buffer.get_mut().unwrap(); |
1368 | buffer.set_pts(Some(ClockTime::NSECOND)); |
1369 | } |
1370 | |
1371 | let mut buffer2 = buffer.clone(); |
1372 | assert_eq!(buffer.get_mut(), None); |
1373 | |
1374 | assert_eq!(buffer2.as_ptr(), buffer.as_ptr()); |
1375 | |
1376 | { |
1377 | let buffer2 = buffer2.make_mut(); |
1378 | assert_ne!(buffer2.as_ptr(), buffer.as_ptr()); |
1379 | |
1380 | buffer2.set_pts(Some(2 * ClockTime::NSECOND)); |
1381 | |
1382 | let mut data = buffer2.map_writable().unwrap(); |
1383 | assert_eq!(data.as_slice(), vec![1, 2, 3, 4].as_slice()); |
1384 | data.as_mut_slice()[0] = 0; |
1385 | } |
1386 | |
1387 | assert_eq!(buffer.pts(), Some(ClockTime::NSECOND)); |
1388 | assert_eq!(buffer2.pts(), Some(2 * ClockTime::NSECOND)); |
1389 | |
1390 | { |
1391 | let data = buffer.map_readable().unwrap(); |
1392 | assert_eq!(data.as_slice(), vec![1, 2, 3, 4].as_slice()); |
1393 | |
1394 | let data = buffer2.map_readable().unwrap(); |
1395 | assert_eq!(data.as_slice(), vec![0, 2, 3, 4].as_slice()); |
1396 | } |
1397 | } |
1398 | |
1399 | #[test ] |
1400 | #[allow (clippy::cognitive_complexity)] |
1401 | fn test_memories() { |
1402 | crate::init().unwrap(); |
1403 | |
1404 | let mut buffer = Buffer::new(); |
1405 | { |
1406 | let buffer = buffer.get_mut().unwrap(); |
1407 | buffer.append_memory(crate::Memory::from_mut_slice(vec![0; 5])); |
1408 | buffer.append_memory(crate::Memory::from_mut_slice(vec![0; 5])); |
1409 | buffer.append_memory(crate::Memory::from_mut_slice(vec![0; 5])); |
1410 | buffer.append_memory(crate::Memory::from_mut_slice(vec![0; 5])); |
1411 | buffer.append_memory(crate::Memory::from_mut_slice(vec![0; 10])); |
1412 | } |
1413 | |
1414 | assert!(buffer.is_all_memory_writable()); |
1415 | assert_eq!(buffer.n_memory(), 5); |
1416 | assert_eq!(buffer.size(), 30); |
1417 | |
1418 | for i in 0..5 { |
1419 | { |
1420 | let mem = buffer.memory(i).unwrap(); |
1421 | assert_eq!(mem.size(), if i < 4 { 5 } else { 10 }); |
1422 | let map = mem.map_readable().unwrap(); |
1423 | assert_eq!(map.size(), if i < 4 { 5 } else { 10 }); |
1424 | } |
1425 | |
1426 | { |
1427 | let mem = buffer.peek_memory(i); |
1428 | assert_eq!(mem.size(), if i < 4 { 5 } else { 10 }); |
1429 | let map = mem.map_readable().unwrap(); |
1430 | assert_eq!(map.size(), if i < 4 { 5 } else { 10 }); |
1431 | } |
1432 | |
1433 | { |
1434 | let buffer = buffer.get_mut().unwrap(); |
1435 | let mem = buffer.peek_memory_mut(i).unwrap(); |
1436 | assert_eq!(mem.size(), if i < 4 { 5 } else { 10 }); |
1437 | let map = mem.map_writable().unwrap(); |
1438 | assert_eq!(map.size(), if i < 4 { 5 } else { 10 }); |
1439 | } |
1440 | } |
1441 | |
1442 | { |
1443 | let buffer = buffer.get_mut().unwrap(); |
1444 | let mut last = 0; |
1445 | for (i, mem) in buffer.iter_memories_mut().unwrap().enumerate() { |
1446 | { |
1447 | assert_eq!(mem.size(), if i < 4 { 5 } else { 10 }); |
1448 | let map = mem.map_readable().unwrap(); |
1449 | assert_eq!(map.size(), if i < 4 { 5 } else { 10 }); |
1450 | } |
1451 | |
1452 | { |
1453 | assert_eq!(mem.size(), if i < 4 { 5 } else { 10 }); |
1454 | let map = mem.map_readable().unwrap(); |
1455 | assert_eq!(map.size(), if i < 4 { 5 } else { 10 }); |
1456 | } |
1457 | |
1458 | { |
1459 | assert_eq!(mem.size(), if i < 4 { 5 } else { 10 }); |
1460 | let map = mem.map_writable().unwrap(); |
1461 | assert_eq!(map.size(), if i < 4 { 5 } else { 10 }); |
1462 | } |
1463 | |
1464 | last = i; |
1465 | } |
1466 | |
1467 | assert_eq!(last, 4); |
1468 | } |
1469 | |
1470 | let mut last = 0; |
1471 | for (i, mem) in buffer.iter_memories().enumerate() { |
1472 | { |
1473 | assert_eq!(mem.size(), if i < 4 { 5 } else { 10 }); |
1474 | let map = mem.map_readable().unwrap(); |
1475 | assert_eq!(map.size(), if i < 4 { 5 } else { 10 }); |
1476 | } |
1477 | |
1478 | { |
1479 | assert_eq!(mem.size(), if i < 4 { 5 } else { 10 }); |
1480 | let map = mem.map_readable().unwrap(); |
1481 | assert_eq!(map.size(), if i < 4 { 5 } else { 10 }); |
1482 | } |
1483 | |
1484 | last = i; |
1485 | } |
1486 | |
1487 | assert_eq!(last, 4); |
1488 | |
1489 | let mut last = 0; |
1490 | for (i, mem) in buffer.iter_memories_owned().enumerate() { |
1491 | { |
1492 | assert_eq!(mem.size(), if i < 4 { 5 } else { 10 }); |
1493 | let map = mem.map_readable().unwrap(); |
1494 | assert_eq!(map.size(), if i < 4 { 5 } else { 10 }); |
1495 | } |
1496 | |
1497 | { |
1498 | assert_eq!(mem.size(), if i < 4 { 5 } else { 10 }); |
1499 | let map = mem.map_readable().unwrap(); |
1500 | assert_eq!(map.size(), if i < 4 { 5 } else { 10 }); |
1501 | } |
1502 | |
1503 | last = i; |
1504 | } |
1505 | |
1506 | assert_eq!(last, 4); |
1507 | } |
1508 | |
1509 | #[test ] |
1510 | fn test_meta_foreach() { |
1511 | crate::init().unwrap(); |
1512 | |
1513 | let mut buffer = Buffer::new(); |
1514 | { |
1515 | let buffer = buffer.get_mut().unwrap(); |
1516 | crate::ReferenceTimestampMeta::add( |
1517 | buffer, |
1518 | &crate::Caps::builder("foo/bar" ).build(), |
1519 | ClockTime::ZERO, |
1520 | ClockTime::NONE, |
1521 | ); |
1522 | crate::ReferenceTimestampMeta::add( |
1523 | buffer, |
1524 | &crate::Caps::builder("foo/bar" ).build(), |
1525 | ClockTime::SECOND, |
1526 | ClockTime::NONE, |
1527 | ); |
1528 | } |
1529 | |
1530 | let mut res = vec![]; |
1531 | buffer.foreach_meta(|meta| { |
1532 | let meta = meta |
1533 | .downcast_ref::<crate::ReferenceTimestampMeta>() |
1534 | .unwrap(); |
1535 | res.push(meta.timestamp()); |
1536 | ControlFlow::Continue(()) |
1537 | }); |
1538 | |
1539 | assert_eq!(&[ClockTime::ZERO, ClockTime::SECOND][..], &res[..]); |
1540 | } |
1541 | |
1542 | #[test ] |
1543 | fn test_meta_foreach_mut() { |
1544 | crate::init().unwrap(); |
1545 | |
1546 | let mut buffer = Buffer::new(); |
1547 | { |
1548 | let buffer = buffer.get_mut().unwrap(); |
1549 | crate::ReferenceTimestampMeta::add( |
1550 | buffer, |
1551 | &crate::Caps::builder("foo/bar" ).build(), |
1552 | ClockTime::ZERO, |
1553 | ClockTime::NONE, |
1554 | ); |
1555 | crate::ReferenceTimestampMeta::add( |
1556 | buffer, |
1557 | &crate::Caps::builder("foo/bar" ).build(), |
1558 | ClockTime::SECOND, |
1559 | ClockTime::NONE, |
1560 | ); |
1561 | } |
1562 | |
1563 | let mut res = vec![]; |
1564 | buffer.get_mut().unwrap().foreach_meta_mut(|mut meta| { |
1565 | let meta = meta |
1566 | .downcast_ref::<crate::ReferenceTimestampMeta>() |
1567 | .unwrap(); |
1568 | res.push(meta.timestamp()); |
1569 | if meta.timestamp() == ClockTime::SECOND { |
1570 | ControlFlow::Continue(BufferMetaForeachAction::Remove) |
1571 | } else { |
1572 | ControlFlow::Continue(BufferMetaForeachAction::Keep) |
1573 | } |
1574 | }); |
1575 | |
1576 | assert_eq!(&[ClockTime::ZERO, ClockTime::SECOND][..], &res[..]); |
1577 | |
1578 | let mut res = vec![]; |
1579 | buffer.foreach_meta(|meta| { |
1580 | let meta = meta |
1581 | .downcast_ref::<crate::ReferenceTimestampMeta>() |
1582 | .unwrap(); |
1583 | res.push(meta.timestamp()); |
1584 | ControlFlow::Continue(()) |
1585 | }); |
1586 | |
1587 | assert_eq!(&[ClockTime::ZERO][..], &res[..]); |
1588 | } |
1589 | |
1590 | #[test ] |
1591 | fn test_ptr_eq() { |
1592 | crate::init().unwrap(); |
1593 | |
1594 | let buffer1 = Buffer::new(); |
1595 | assert!(BufferRef::ptr_eq(&buffer1, &buffer1)); |
1596 | let buffer2 = Buffer::new(); |
1597 | assert!(!BufferRef::ptr_eq(&buffer1, &buffer2)); |
1598 | } |
1599 | } |
1600 | |