1 | // Take a look at the license at the top of the repository in the LICENSE file. |
2 | |
3 | use std::{ |
4 | fmt, |
5 | marker::PhantomData, |
6 | mem, |
7 | ops::{Bound, Deref, DerefMut, RangeBounds}, |
8 | ptr, slice, |
9 | }; |
10 | |
11 | use glib::translate::*; |
12 | |
13 | use crate::{ffi, AllocationParams, Allocator, MemoryFlags}; |
14 | |
15 | mini_object_wrapper!(Memory, MemoryRef, ffi::GstMemory, || { |
16 | ffi::gst_memory_get_type() |
17 | }); |
18 | |
19 | pub struct MemoryMap<'a, T> { |
20 | memory: &'a MemoryRef, |
21 | map_info: ffi::GstMapInfo, |
22 | phantom: PhantomData<T>, |
23 | } |
24 | |
25 | pub struct MappedMemory<T> { |
26 | memory: Memory, |
27 | map_info: ffi::GstMapInfo, |
28 | phantom: PhantomData<T>, |
29 | } |
30 | |
31 | impl fmt::Debug for Memory { |
32 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { |
33 | MemoryRef::fmt(self, f) |
34 | } |
35 | } |
36 | |
37 | impl fmt::Debug for MemoryRef { |
38 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { |
39 | f&mut DebugStruct<'_, '_>.debug_struct("Memory" ) |
40 | .field("ptr" , &self.as_ptr()) |
41 | .field("allocator" , &self.allocator()) |
42 | .field("parent" , &self.parent()) |
43 | .field("maxsize" , &self.maxsize()) |
44 | .field("align" , &self.align()) |
45 | .field("offset" , &self.offset()) |
46 | .field("size" , &self.size()) |
47 | .field(name:"flags" , &self.flags()) |
48 | .finish() |
49 | } |
50 | } |
51 | |
52 | pub enum Readable {} |
53 | pub enum Writable {} |
54 | |
55 | impl Memory { |
56 | #[inline ] |
57 | pub fn with_size(size: usize) -> Self { |
58 | assert_initialized_main_thread!(); |
59 | unsafe { |
60 | from_glib_full(ffi::gst_allocator_alloc( |
61 | ptr::null_mut(), |
62 | size, |
63 | ptr::null_mut(), |
64 | )) |
65 | } |
66 | } |
67 | |
68 | #[inline ] |
69 | pub fn with_size_and_params(size: usize, params: &AllocationParams) -> Self { |
70 | assert_initialized_main_thread!(); |
71 | unsafe { |
72 | from_glib_full(ffi::gst_allocator_alloc( |
73 | ptr::null_mut(), |
74 | size, |
75 | params.as_ptr() as *mut _, |
76 | )) |
77 | } |
78 | } |
79 | |
80 | #[inline ] |
81 | pub fn into_mapped_memory_readable(self) -> Result<MappedMemory<Readable>, Self> { |
82 | unsafe { |
83 | let mut map_info = mem::MaybeUninit::uninit(); |
84 | let res: bool = from_glib(ffi::gst_memory_map( |
85 | self.as_mut_ptr(), |
86 | map_info.as_mut_ptr(), |
87 | ffi::GST_MAP_READ, |
88 | )); |
89 | if res { |
90 | Ok(MappedMemory { |
91 | memory: self, |
92 | map_info: map_info.assume_init(), |
93 | phantom: PhantomData, |
94 | }) |
95 | } else { |
96 | Err(self) |
97 | } |
98 | } |
99 | } |
100 | |
101 | #[inline ] |
102 | pub fn into_mapped_memory_writable(self) -> Result<MappedMemory<Writable>, Self> { |
103 | unsafe { |
104 | let mut map_info = mem::MaybeUninit::uninit(); |
105 | let res: bool = from_glib(ffi::gst_memory_map( |
106 | self.as_mut_ptr(), |
107 | map_info.as_mut_ptr(), |
108 | ffi::GST_MAP_READWRITE, |
109 | )); |
110 | if res { |
111 | Ok(MappedMemory { |
112 | memory: self, |
113 | map_info: map_info.assume_init(), |
114 | phantom: PhantomData, |
115 | }) |
116 | } else { |
117 | Err(self) |
118 | } |
119 | } |
120 | } |
121 | } |
122 | |
123 | impl MemoryRef { |
124 | #[doc (alias = "get_allocator" )] |
125 | #[inline ] |
126 | pub fn allocator(&self) -> Option<&Allocator> { |
127 | unsafe { |
128 | if self.0.allocator.is_null() { |
129 | None |
130 | } else { |
131 | Some(&*(&self.0.allocator as *const *mut ffi::GstAllocator as *const Allocator)) |
132 | } |
133 | } |
134 | } |
135 | |
136 | #[doc (alias = "get_parent" )] |
137 | #[inline ] |
138 | pub fn parent(&self) -> Option<&MemoryRef> { |
139 | unsafe { |
140 | if self.0.parent.is_null() { |
141 | None |
142 | } else { |
143 | Some(MemoryRef::from_ptr(self.0.parent)) |
144 | } |
145 | } |
146 | } |
147 | |
148 | #[doc (alias = "get_maxsize" )] |
149 | #[inline ] |
150 | pub fn maxsize(&self) -> usize { |
151 | self.0.maxsize |
152 | } |
153 | |
154 | #[doc (alias = "get_align" )] |
155 | #[inline ] |
156 | pub fn align(&self) -> usize { |
157 | self.0.align |
158 | } |
159 | |
160 | #[doc (alias = "get_offset" )] |
161 | #[inline ] |
162 | pub fn offset(&self) -> usize { |
163 | self.0.offset |
164 | } |
165 | |
166 | #[doc (alias = "get_size" )] |
167 | #[inline ] |
168 | pub fn size(&self) -> usize { |
169 | self.0.size |
170 | } |
171 | |
172 | #[doc (alias = "get_flags" )] |
173 | #[inline ] |
174 | pub fn flags(&self) -> MemoryFlags { |
175 | unsafe { from_glib(self.0.mini_object.flags) } |
176 | } |
177 | |
178 | fn calculate_offset_size(&self, range: impl RangeBounds<usize>) -> (isize, isize) { |
179 | let size = self.size(); |
180 | |
181 | let start_offset = match range.start_bound() { |
182 | Bound::Included(v) => *v, |
183 | Bound::Excluded(v) => v.checked_add(1).expect("Invalid start offset" ), |
184 | Bound::Unbounded => 0, |
185 | }; |
186 | assert!(start_offset < size, "Start offset after valid range" ); |
187 | |
188 | let end_offset = match range.end_bound() { |
189 | Bound::Included(v) => v.checked_add(1).expect("Invalid end offset" ), |
190 | Bound::Excluded(v) => *v, |
191 | Bound::Unbounded => size, |
192 | }; |
193 | assert!(end_offset <= size, "End offset after valid range" ); |
194 | |
195 | // Cast from usize to isize because that's literally how this works in the |
196 | // implementation and how the upper half of the usize range can be made use of. |
197 | // |
198 | // The implementation works exploiting wraparounds. |
199 | let new_offset = start_offset as isize; |
200 | let new_size = end_offset.saturating_sub(start_offset) as isize; |
201 | |
202 | (new_offset, new_size) |
203 | } |
204 | |
205 | fn calculate_offset_size_maxsize(&self, range: impl RangeBounds<usize>) -> (isize, isize) { |
206 | let maxsize = self.maxsize(); |
207 | |
208 | let start_offset = match range.start_bound() { |
209 | Bound::Included(v) => *v, |
210 | Bound::Excluded(v) => v.checked_add(1).expect("Invalid start offset" ), |
211 | Bound::Unbounded => 0, |
212 | }; |
213 | assert!(start_offset < maxsize, "Start offset after valid range" ); |
214 | |
215 | let end_offset = match range.end_bound() { |
216 | Bound::Included(v) => v.checked_add(1).expect("Invalid end offset" ), |
217 | Bound::Excluded(v) => *v, |
218 | Bound::Unbounded => maxsize, |
219 | }; |
220 | assert!(end_offset <= maxsize, "End offset after valid range" ); |
221 | |
222 | // Cast from usize to isize because that's literally how this works in the |
223 | // implementation and how the upper half of the usize range can be made use of. |
224 | // |
225 | // The implementation works by exploiting wraparounds. |
226 | let offset = self.offset(); |
227 | |
228 | let new_offset = start_offset.wrapping_sub(offset) as isize; |
229 | let new_size = end_offset.saturating_sub(start_offset) as isize; |
230 | |
231 | (new_offset, new_size) |
232 | } |
233 | |
234 | #[doc (alias = "gst_memory_copy" )] |
235 | pub fn copy_range(&self, range: impl RangeBounds<usize>) -> Memory { |
236 | let (offset, size) = self.calculate_offset_size(range); |
237 | unsafe { from_glib_full(ffi::gst_memory_copy(self.as_mut_ptr(), offset, size)) } |
238 | } |
239 | |
240 | #[doc (alias = "gst_memory_copy" )] |
241 | pub fn copy_range_maxsize(&self, range: impl RangeBounds<usize>) -> Memory { |
242 | let (offset, size) = self.calculate_offset_size_maxsize(range); |
243 | unsafe { from_glib_full(ffi::gst_memory_copy(self.as_mut_ptr(), offset, size)) } |
244 | } |
245 | |
246 | #[doc (alias = "gst_memory_is_span" )] |
247 | pub fn is_span(&self, mem2: &MemoryRef) -> Option<usize> { |
248 | unsafe { |
249 | let mut offset = mem::MaybeUninit::uninit(); |
250 | let res = from_glib(ffi::gst_memory_is_span( |
251 | self.as_mut_ptr(), |
252 | mem2.as_mut_ptr(), |
253 | offset.as_mut_ptr(), |
254 | )); |
255 | if res { |
256 | Some(offset.assume_init()) |
257 | } else { |
258 | None |
259 | } |
260 | } |
261 | } |
262 | |
263 | #[doc (alias = "gst_memory_is_type" )] |
264 | pub fn is_type(&self, mem_type: &str) -> bool { |
265 | unsafe { |
266 | from_glib(ffi::gst_memory_is_type( |
267 | self.as_mut_ptr(), |
268 | mem_type.to_glib_none().0, |
269 | )) |
270 | } |
271 | } |
272 | |
273 | #[inline ] |
274 | pub fn map_readable(&self) -> Result<MemoryMap<Readable>, glib::BoolError> { |
275 | unsafe { |
276 | let mut map_info = mem::MaybeUninit::uninit(); |
277 | let res = |
278 | ffi::gst_memory_map(self.as_mut_ptr(), map_info.as_mut_ptr(), ffi::GST_MAP_READ); |
279 | if res == glib::ffi::GTRUE { |
280 | Ok(MemoryMap { |
281 | memory: self, |
282 | map_info: map_info.assume_init(), |
283 | phantom: PhantomData, |
284 | }) |
285 | } else { |
286 | Err(glib::bool_error!("Failed to map memory readable" )) |
287 | } |
288 | } |
289 | } |
290 | |
291 | #[inline ] |
292 | pub fn map_writable(&mut self) -> Result<MemoryMap<Writable>, glib::BoolError> { |
293 | unsafe { |
294 | let mut map_info = mem::MaybeUninit::uninit(); |
295 | let res = ffi::gst_memory_map( |
296 | self.as_mut_ptr(), |
297 | map_info.as_mut_ptr(), |
298 | ffi::GST_MAP_READWRITE, |
299 | ); |
300 | if res == glib::ffi::GTRUE { |
301 | Ok(MemoryMap { |
302 | memory: self, |
303 | map_info: map_info.assume_init(), |
304 | phantom: PhantomData, |
305 | }) |
306 | } else { |
307 | Err(glib::bool_error!("Failed to map memory writable" )) |
308 | } |
309 | } |
310 | } |
311 | |
312 | #[doc (alias = "gst_memory_share" )] |
313 | pub fn share(&self, range: impl RangeBounds<usize>) -> Memory { |
314 | let (offset, size) = self.calculate_offset_size(range); |
315 | unsafe { from_glib_full(ffi::gst_memory_share(self.as_ptr() as *mut _, offset, size)) } |
316 | } |
317 | |
318 | #[doc (alias = "gst_memory_share" )] |
319 | pub fn share_maxsize(&self, range: impl RangeBounds<usize>) -> Memory { |
320 | let (offset, size) = self.calculate_offset_size_maxsize(range); |
321 | unsafe { from_glib_full(ffi::gst_memory_share(self.as_ptr() as *mut _, offset, size)) } |
322 | } |
323 | |
324 | #[doc (alias = "gst_memory_resize" )] |
325 | pub fn resize(&mut self, range: impl RangeBounds<usize>) { |
326 | let (offset, size) = self.calculate_offset_size(range); |
327 | unsafe { ffi::gst_memory_resize(self.as_mut_ptr(), offset, size as usize) } |
328 | } |
329 | |
330 | #[doc (alias = "gst_memory_resize" )] |
331 | pub fn resize_maxsize(&mut self, range: impl RangeBounds<usize>) { |
332 | let (offset, size) = self.calculate_offset_size_maxsize(range); |
333 | unsafe { ffi::gst_memory_resize(self.as_mut_ptr(), offset, size as usize) } |
334 | } |
335 | |
336 | #[doc (alias = "gst_util_dump_mem" )] |
337 | pub fn dump(&self) -> Dump { |
338 | Dump { |
339 | memory: self, |
340 | start: Bound::Unbounded, |
341 | end: Bound::Unbounded, |
342 | } |
343 | } |
344 | |
345 | #[doc (alias = "gst_util_dump_mem" )] |
346 | pub fn dump_range(&self, range: impl RangeBounds<usize>) -> Dump { |
347 | Dump { |
348 | memory: self, |
349 | start: range.start_bound().cloned(), |
350 | end: range.end_bound().cloned(), |
351 | } |
352 | } |
353 | } |
354 | |
355 | impl<T> MemoryMap<'_, T> { |
356 | #[doc (alias = "get_size" )] |
357 | #[inline ] |
358 | pub fn size(&self) -> usize { |
359 | self.map_info.size |
360 | } |
361 | |
362 | #[doc (alias = "get_memory" )] |
363 | #[inline ] |
364 | pub fn memory(&self) -> &MemoryRef { |
365 | self.memory |
366 | } |
367 | |
368 | #[inline ] |
369 | pub fn as_slice(&self) -> &[u8] { |
370 | if self.map_info.size == 0 { |
371 | return &[]; |
372 | } |
373 | unsafe { slice::from_raw_parts(self.map_info.data, self.map_info.size) } |
374 | } |
375 | } |
376 | |
377 | impl MemoryMap<'_, Writable> { |
378 | #[inline ] |
379 | pub fn as_mut_slice(&mut self) -> &mut [u8] { |
380 | if self.map_info.size == 0 { |
381 | return &mut []; |
382 | } |
383 | unsafe { slice::from_raw_parts_mut(self.map_info.data, self.map_info.size) } |
384 | } |
385 | } |
386 | |
387 | impl<T> AsRef<[u8]> for MemoryMap<'_, T> { |
388 | #[inline ] |
389 | fn as_ref(&self) -> &[u8] { |
390 | self.as_slice() |
391 | } |
392 | } |
393 | |
394 | impl AsMut<[u8]> for MemoryMap<'_, Writable> { |
395 | #[inline ] |
396 | fn as_mut(&mut self) -> &mut [u8] { |
397 | self.as_mut_slice() |
398 | } |
399 | } |
400 | |
401 | impl<T> Deref for MemoryMap<'_, T> { |
402 | type Target = [u8]; |
403 | |
404 | #[inline ] |
405 | fn deref(&self) -> &[u8] { |
406 | self.as_slice() |
407 | } |
408 | } |
409 | |
410 | impl DerefMut for MemoryMap<'_, Writable> { |
411 | #[inline ] |
412 | fn deref_mut(&mut self) -> &mut [u8] { |
413 | self.as_mut_slice() |
414 | } |
415 | } |
416 | |
417 | impl<T> fmt::Debug for MemoryMap<'_, T> { |
418 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { |
419 | f.debug_tuple(name:"MemoryMap" ).field(&self.memory()).finish() |
420 | } |
421 | } |
422 | |
423 | impl<'a, T> PartialEq for MemoryMap<'a, T> { |
424 | fn eq(&self, other: &MemoryMap<'a, T>) -> bool { |
425 | self.as_slice().eq(other.as_slice()) |
426 | } |
427 | } |
428 | |
429 | impl<T> Eq for MemoryMap<'_, T> {} |
430 | |
431 | impl<T> Drop for MemoryMap<'_, T> { |
432 | #[inline ] |
433 | fn drop(&mut self) { |
434 | unsafe { |
435 | ffi::gst_memory_unmap(self.memory.as_mut_ptr(), &mut self.map_info); |
436 | } |
437 | } |
438 | } |
439 | |
440 | unsafe impl<T> Send for MemoryMap<'_, T> {} |
441 | unsafe impl<T> Sync for MemoryMap<'_, T> {} |
442 | |
443 | impl<T> MappedMemory<T> { |
444 | #[inline ] |
445 | pub fn as_slice(&self) -> &[u8] { |
446 | if self.map_info.size == 0 { |
447 | return &[]; |
448 | } |
449 | unsafe { slice::from_raw_parts(self.map_info.data, self.map_info.size) } |
450 | } |
451 | |
452 | #[doc (alias = "get_size" )] |
453 | #[inline ] |
454 | pub fn size(&self) -> usize { |
455 | self.map_info.size |
456 | } |
457 | |
458 | #[doc (alias = "get_memory" )] |
459 | #[inline ] |
460 | pub fn memory(&self) -> &MemoryRef { |
461 | self.memory.as_ref() |
462 | } |
463 | |
464 | #[inline ] |
465 | pub fn into_memory(self) -> Memory { |
466 | let mut s = mem::ManuallyDrop::new(self); |
467 | let memory = unsafe { ptr::read(&s.memory) }; |
468 | unsafe { |
469 | ffi::gst_memory_unmap(memory.as_mut_ptr(), &mut s.map_info); |
470 | } |
471 | |
472 | memory |
473 | } |
474 | } |
475 | |
476 | impl MappedMemory<Writable> { |
477 | #[inline ] |
478 | pub fn as_mut_slice(&mut self) -> &mut [u8] { |
479 | if self.map_info.size == 0 { |
480 | return &mut []; |
481 | } |
482 | unsafe { slice::from_raw_parts_mut(self.map_info.data, self.map_info.size) } |
483 | } |
484 | } |
485 | |
486 | impl<T> AsRef<[u8]> for MappedMemory<T> { |
487 | #[inline ] |
488 | fn as_ref(&self) -> &[u8] { |
489 | self.as_slice() |
490 | } |
491 | } |
492 | |
493 | impl AsMut<[u8]> for MappedMemory<Writable> { |
494 | #[inline ] |
495 | fn as_mut(&mut self) -> &mut [u8] { |
496 | self.as_mut_slice() |
497 | } |
498 | } |
499 | |
500 | impl<T> Deref for MappedMemory<T> { |
501 | type Target = [u8]; |
502 | |
503 | #[inline ] |
504 | fn deref(&self) -> &[u8] { |
505 | self.as_slice() |
506 | } |
507 | } |
508 | |
509 | impl DerefMut for MappedMemory<Writable> { |
510 | #[inline ] |
511 | fn deref_mut(&mut self) -> &mut [u8] { |
512 | self.as_mut_slice() |
513 | } |
514 | } |
515 | |
516 | impl<T> Drop for MappedMemory<T> { |
517 | #[inline ] |
518 | fn drop(&mut self) { |
519 | unsafe { |
520 | ffi::gst_memory_unmap(self.memory.as_mut_ptr(), &mut self.map_info); |
521 | } |
522 | } |
523 | } |
524 | |
525 | impl<T> fmt::Debug for MappedMemory<T> { |
526 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { |
527 | f.debug_tuple(name:"MappedMemory" ).field(&self.memory()).finish() |
528 | } |
529 | } |
530 | |
531 | impl<T> PartialEq for MappedMemory<T> { |
532 | fn eq(&self, other: &MappedMemory<T>) -> bool { |
533 | self.as_slice().eq(other.as_slice()) |
534 | } |
535 | } |
536 | |
537 | impl<T> Eq for MappedMemory<T> {} |
538 | |
539 | unsafe impl<T> Send for MappedMemory<T> {} |
540 | unsafe impl<T> Sync for MappedMemory<T> {} |
541 | |
542 | pub struct Dump<'a> { |
543 | memory: &'a MemoryRef, |
544 | start: Bound<usize>, |
545 | end: Bound<usize>, |
546 | } |
547 | |
548 | impl Dump<'_> { |
549 | fn fmt(&self, f: &mut fmt::Formatter, debug: bool) -> fmt::Result { |
550 | let map: MemoryMap<'_, Readable> = self.memory.map_readable().expect(msg:"Failed to map memory" ); |
551 | let data: &[u8] = map.as_slice(); |
552 | |
553 | let dump: Dump<'_> = crate::slice::Dump { |
554 | data, |
555 | start: self.start, |
556 | end: self.end, |
557 | }; |
558 | |
559 | if debug { |
560 | <crate::slice::Dump as fmt::Debug>::fmt(&dump, f) |
561 | } else { |
562 | <crate::slice::Dump as fmt::Display>::fmt(&dump, f) |
563 | } |
564 | } |
565 | } |
566 | |
567 | impl fmt::Display for Dump<'_> { |
568 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { |
569 | self.fmt(f, debug:false) |
570 | } |
571 | } |
572 | |
573 | impl fmt::Debug for Dump<'_> { |
574 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { |
575 | self.fmt(f, debug:true) |
576 | } |
577 | } |
578 | |
579 | pub unsafe trait MemoryType: crate::prelude::IsMiniObject + AsRef<Memory> |
580 | where |
581 | <Self as crate::prelude::IsMiniObject>::RefType: AsRef<MemoryRef> + AsMut<MemoryRef>, |
582 | { |
583 | fn check_memory_type(mem: &MemoryRef) -> bool; |
584 | } |
585 | |
586 | #[derive (Debug, thiserror::Error)] |
587 | pub enum MemoryTypeMismatchError { |
588 | #[error(transparent)] |
589 | ValueTypeMismatch(#[from] glib::value::ValueTypeMismatchError), |
590 | #[error("the memory is not of the requested type {requested}" )] |
591 | MemoryTypeMismatch { requested: &'static str }, |
592 | } |
593 | |
594 | pub struct MemoryTypeValueTypeChecker<M>(PhantomData<M>); |
595 | |
596 | unsafe impl<M> glib::value::ValueTypeChecker for MemoryTypeValueTypeChecker<M> |
597 | where |
598 | M: MemoryType + glib::prelude::StaticType, |
599 | <M as crate::prelude::IsMiniObject>::RefType: AsRef<MemoryRef> + AsMut<MemoryRef>, |
600 | { |
601 | type Error = glib::value::ValueTypeMismatchOrNoneError<MemoryTypeMismatchError>; |
602 | |
603 | fn check(value: &glib::Value) -> Result<(), Self::Error> { |
604 | skip_assert_initialized!(); |
605 | let mem = value.get::<&Memory>().map_err(|err| match err { |
606 | glib::value::ValueTypeMismatchOrNoneError::UnexpectedNone => { |
607 | glib::value::ValueTypeMismatchOrNoneError::UnexpectedNone |
608 | } |
609 | glib::value::ValueTypeMismatchOrNoneError::WrongValueType(err) => { |
610 | glib::value::ValueTypeMismatchOrNoneError::WrongValueType( |
611 | MemoryTypeMismatchError::ValueTypeMismatch(err), |
612 | ) |
613 | } |
614 | })?; |
615 | |
616 | if mem.is_memory_type::<M>() { |
617 | Ok(()) |
618 | } else { |
619 | Err(glib::value::ValueTypeMismatchOrNoneError::WrongValueType( |
620 | MemoryTypeMismatchError::MemoryTypeMismatch { |
621 | requested: std::any::type_name::<M>(), |
622 | }, |
623 | )) |
624 | } |
625 | } |
626 | } |
627 | |
628 | impl AsRef<MemoryRef> for MemoryRef { |
629 | #[inline ] |
630 | fn as_ref(&self) -> &MemoryRef { |
631 | self |
632 | } |
633 | } |
634 | |
635 | impl AsMut<MemoryRef> for MemoryRef { |
636 | #[inline ] |
637 | fn as_mut(&mut self) -> &mut MemoryRef { |
638 | self |
639 | } |
640 | } |
641 | |
642 | impl AsRef<Memory> for Memory { |
643 | #[inline ] |
644 | fn as_ref(&self) -> &Memory { |
645 | self |
646 | } |
647 | } |
648 | |
649 | unsafe impl MemoryType for Memory { |
650 | #[inline ] |
651 | fn check_memory_type(_mem: &MemoryRef) -> bool { |
652 | skip_assert_initialized!(); |
653 | true |
654 | } |
655 | } |
656 | |
657 | impl Memory { |
658 | #[inline ] |
659 | pub fn downcast_memory<M: MemoryType>(self) -> Result<M, Self> |
660 | where |
661 | <M as crate::prelude::IsMiniObject>::RefType: AsRef<MemoryRef> + AsMut<MemoryRef>, |
662 | { |
663 | if M::check_memory_type(&self) { |
664 | unsafe { Ok(from_glib_full(self.into_glib_ptr() as *mut M::FfiType)) } |
665 | } else { |
666 | Err(self) |
667 | } |
668 | } |
669 | } |
670 | |
671 | impl MemoryRef { |
672 | #[inline ] |
673 | pub fn is_memory_type<M: MemoryType>(&self) -> bool |
674 | where |
675 | <M as crate::prelude::IsMiniObject>::RefType: AsRef<MemoryRef> + AsMut<MemoryRef>, |
676 | { |
677 | M::check_memory_type(self) |
678 | } |
679 | |
680 | #[inline ] |
681 | pub fn downcast_memory_ref<M: MemoryType>(&self) -> Option<&M::RefType> |
682 | where |
683 | <M as crate::prelude::IsMiniObject>::RefType: AsRef<MemoryRef> + AsMut<MemoryRef>, |
684 | { |
685 | if M::check_memory_type(self) { |
686 | unsafe { Some(&*(self as *const Self as *const M::RefType)) } |
687 | } else { |
688 | None |
689 | } |
690 | } |
691 | |
692 | #[inline ] |
693 | pub fn downcast_memory_mut<M: MemoryType>(&mut self) -> Option<&mut M::RefType> |
694 | where |
695 | <M as crate::prelude::IsMiniObject>::RefType: AsRef<MemoryRef> + AsMut<MemoryRef>, |
696 | { |
697 | if M::check_memory_type(self) { |
698 | unsafe { Some(&mut *(self as *mut Self as *mut M::RefType)) } |
699 | } else { |
700 | None |
701 | } |
702 | } |
703 | } |
704 | |
705 | #[macro_export ] |
706 | macro_rules! memory_object_wrapper { |
707 | ($name:ident, $ref_name:ident, $ffi_name:path, $mem_type_check:expr, $parent_memory_type:path, $parent_memory_ref_type:path) => { |
708 | $crate::mini_object_wrapper!($name, $ref_name, $ffi_name); |
709 | |
710 | unsafe impl $crate::memory::MemoryType for $name { |
711 | #[inline] |
712 | fn check_memory_type(mem: &$crate::MemoryRef) -> bool { |
713 | skip_assert_initialized!(); |
714 | $mem_type_check(mem) |
715 | } |
716 | } |
717 | |
718 | impl $name { |
719 | #[inline] |
720 | pub fn downcast_memory<M: $crate::memory::MemoryType>(self) -> Result<M, Self> |
721 | where |
722 | <M as $crate::miniobject::IsMiniObject>::RefType: AsRef<$crate::MemoryRef> |
723 | + AsMut<$crate::MemoryRef> |
724 | + AsRef<$ref_name> |
725 | + AsMut<$ref_name>, |
726 | { |
727 | if M::check_memory_type(&self) { |
728 | unsafe { |
729 | Ok($crate::glib::translate::from_glib_full( |
730 | self.into_glib_ptr() as *mut M::FfiType |
731 | )) |
732 | } |
733 | } else { |
734 | Err(self) |
735 | } |
736 | } |
737 | |
738 | #[inline] |
739 | pub fn upcast_memory<M>(self) -> M |
740 | where |
741 | M: $crate::memory::MemoryType |
742 | + $crate::glib::translate::FromGlibPtrFull< |
743 | *const <M as $crate::miniobject::IsMiniObject>::FfiType, |
744 | >, |
745 | <M as $crate::miniobject::IsMiniObject>::RefType: |
746 | AsRef<$crate::MemoryRef> + AsMut<$crate::MemoryRef>, |
747 | Self: AsRef<M>, |
748 | { |
749 | unsafe { |
750 | $crate::glib::translate::from_glib_full( |
751 | self.into_glib_ptr() as *const <M as $crate::miniobject::IsMiniObject>::FfiType |
752 | ) |
753 | } |
754 | } |
755 | } |
756 | |
757 | impl $ref_name { |
758 | #[inline] |
759 | pub fn upcast_memory_ref<M>(&self) -> &M::RefType |
760 | where |
761 | M: $crate::memory::MemoryType, |
762 | <M as $crate::miniobject::IsMiniObject>::RefType: |
763 | AsRef<$crate::MemoryRef> + AsMut<$crate::MemoryRef>, |
764 | Self: AsRef<M::RefType> + AsMut<M::RefType> |
765 | { |
766 | self.as_ref() |
767 | } |
768 | |
769 | #[inline] |
770 | pub fn upcast_memory_mut<M>(&mut self) -> &mut M::RefType |
771 | where |
772 | M: $crate::memory::MemoryType, |
773 | <M as $crate::miniobject::IsMiniObject>::RefType: |
774 | AsRef<$crate::MemoryRef> + AsMut<$crate::MemoryRef>, |
775 | Self: AsRef<M::RefType> + AsMut<M::RefType> |
776 | { |
777 | self.as_mut() |
778 | } |
779 | } |
780 | |
781 | impl std::ops::Deref for $ref_name { |
782 | type Target = $parent_memory_ref_type; |
783 | |
784 | #[inline] |
785 | fn deref(&self) -> &Self::Target { |
786 | unsafe { &*(self as *const _ as *const Self::Target) } |
787 | } |
788 | } |
789 | |
790 | impl std::ops::DerefMut for $ref_name { |
791 | #[inline] |
792 | fn deref_mut(&mut self) -> &mut Self::Target { |
793 | unsafe { &mut *(self as *mut _ as *mut Self::Target) } |
794 | } |
795 | } |
796 | |
797 | impl AsRef<$parent_memory_type> for $name { |
798 | #[inline] |
799 | fn as_ref(&self) -> &$parent_memory_type { |
800 | unsafe { &*(self as *const _ as *const $parent_memory_type) } |
801 | } |
802 | } |
803 | |
804 | impl AsRef<$parent_memory_ref_type> for $ref_name { |
805 | #[inline] |
806 | fn as_ref(&self) -> &$parent_memory_ref_type { |
807 | self |
808 | } |
809 | } |
810 | |
811 | impl AsMut<$parent_memory_ref_type> for $ref_name { |
812 | #[inline] |
813 | fn as_mut(&mut self) -> &mut $parent_memory_ref_type { |
814 | &mut *self |
815 | } |
816 | } |
817 | |
818 | impl $crate::glib::types::StaticType for $name { |
819 | #[inline] |
820 | fn static_type() -> glib::types::Type { |
821 | $ref_name::static_type() |
822 | } |
823 | } |
824 | |
825 | impl $crate::glib::types::StaticType for $ref_name { |
826 | #[inline] |
827 | fn static_type() -> $crate::glib::types::Type { |
828 | unsafe { $crate::glib::translate::from_glib($crate::ffi::gst_memory_get_type()) } |
829 | } |
830 | } |
831 | |
832 | impl $crate::glib::value::ValueType for $name { |
833 | type Type = Self; |
834 | } |
835 | |
836 | unsafe impl<'a> $crate::glib::value::FromValue<'a> for $name { |
837 | type Checker = $crate::memory::MemoryTypeValueTypeChecker<Self>; |
838 | |
839 | unsafe fn from_value(value: &'a $crate::glib::Value) -> Self { |
840 | skip_assert_initialized!(); |
841 | $crate::glib::translate::from_glib_none($crate::glib::gobject_ffi::g_value_get_boxed( |
842 | $crate::glib::translate::ToGlibPtr::to_glib_none(value).0, |
843 | ) as *mut $ffi_name) |
844 | } |
845 | } |
846 | |
847 | unsafe impl<'a> $crate::glib::value::FromValue<'a> for &'a $name { |
848 | type Checker = $crate::memory::MemoryTypeValueTypeChecker<$name>; |
849 | |
850 | unsafe fn from_value(value: &'a $crate::glib::Value) -> Self { |
851 | skip_assert_initialized!(); |
852 | assert_eq!( |
853 | std::mem::size_of::<$name>(), |
854 | std::mem::size_of::<$crate::glib::ffi::gpointer>() |
855 | ); |
856 | let value = &*(value as *const $crate::glib::Value as *const $crate::glib::gobject_ffi::GValue); |
857 | let ptr = &value.data[0].v_pointer as *const $crate::glib::ffi::gpointer |
858 | as *const *const $ffi_name; |
859 | debug_assert!(!(*ptr).is_null()); |
860 | &*(ptr as *const $name) |
861 | } |
862 | } |
863 | |
864 | impl $crate::glib::value::ToValue for $name { |
865 | fn to_value(&self) -> $crate::glib::Value { |
866 | let mut value = $crate::glib::Value::for_value_type::<Self>(); |
867 | unsafe { |
868 | $crate::glib::gobject_ffi::g_value_set_boxed( |
869 | $crate::glib::translate::ToGlibPtrMut::to_glib_none_mut(&mut value).0, |
870 | $crate::glib::translate::ToGlibPtr::<*const $ffi_name>::to_glib_none(self).0 |
871 | as *mut _, |
872 | ) |
873 | } |
874 | value |
875 | } |
876 | |
877 | fn value_type(&self) -> glib::Type { |
878 | <Self as $crate::glib::prelude::StaticType>::static_type() |
879 | } |
880 | } |
881 | |
882 | impl $crate::glib::value::ToValueOptional for $name { |
883 | fn to_value_optional(s: Option<&Self>) -> $crate::glib::Value { |
884 | skip_assert_initialized!(); |
885 | let mut value = $crate::glib::Value::for_value_type::<Self>(); |
886 | unsafe { |
887 | $crate::glib::gobject_ffi::g_value_set_boxed( |
888 | $crate::glib::translate::ToGlibPtrMut::to_glib_none_mut(&mut value).0, |
889 | $crate::glib::translate::ToGlibPtr::<*const $ffi_name>::to_glib_none(&s).0 |
890 | as *mut _, |
891 | ) |
892 | } |
893 | value |
894 | } |
895 | } |
896 | |
897 | impl From<$name> for $crate::glib::Value { |
898 | fn from(v: $name) -> $crate::glib::Value { |
899 | skip_assert_initialized!(); |
900 | let mut value = $crate::glib::Value::for_value_type::<$name>(); |
901 | unsafe { |
902 | $crate::glib::gobject_ffi::g_value_take_boxed( |
903 | $crate::glib::translate::ToGlibPtrMut::to_glib_none_mut(&mut value).0, |
904 | $crate::glib::translate::IntoGlibPtr::<*mut $ffi_name>::into_glib_ptr(v) as *mut _, |
905 | ) |
906 | } |
907 | value |
908 | } |
909 | } |
910 | |
911 | unsafe impl<'a> $crate::glib::value::FromValue<'a> for &'a $ref_name { |
912 | type Checker = $crate::memory::MemoryTypeValueTypeChecker<$name>; |
913 | |
914 | unsafe fn from_value(value: &'a glib::Value) -> Self { |
915 | skip_assert_initialized!(); |
916 | &*($crate::glib::gobject_ffi::g_value_get_boxed($crate::glib::translate::ToGlibPtr::to_glib_none(value).0) |
917 | as *const $ref_name) |
918 | } |
919 | } |
920 | |
921 | // Can't have SetValue/SetValueOptional impls as otherwise one could use it to get |
922 | // immutable references from a mutable reference without borrowing via the value |
923 | }; |
924 | ($name:ident, $ref_name:ident, $ffi_name:path, $mem_type_check:expr, $parent_memory_type:path, $parent_memory_ref_type:path, $($parent_parent_memory_type:path, $parent_parent_memory_ref_type:path),*) => { |
925 | $crate::memory_object_wrapper!($name, $ref_name, $ffi_name, $mem_type_check, $parent_memory_type, $parent_memory_ref_type); |
926 | |
927 | $( |
928 | impl AsRef<$parent_parent_memory_type> for $name { |
929 | #[inline] |
930 | fn as_ref(&self) -> &$parent_parent_memory_type { |
931 | unsafe { &*(self as *const _ as *const $parent_parent_memory_type) } |
932 | } |
933 | } |
934 | |
935 | impl AsRef<$parent_parent_memory_ref_type> for $ref_name { |
936 | #[inline] |
937 | fn as_ref(&self) -> &$parent_parent_memory_ref_type { |
938 | self |
939 | } |
940 | } |
941 | |
942 | impl AsMut<$parent_parent_memory_ref_type> for $ref_name { |
943 | #[inline] |
944 | fn as_mut(&mut self) -> &mut $parent_parent_memory_ref_type { |
945 | &mut *self |
946 | } |
947 | } |
948 | )* |
949 | }; |
950 | } |
951 | |
952 | #[cfg (test)] |
953 | mod tests { |
954 | #[test ] |
955 | fn test_map() { |
956 | crate::init().unwrap(); |
957 | |
958 | let mem = crate::Memory::from_slice(vec![1, 2, 3, 4]); |
959 | let map = mem.map_readable().unwrap(); |
960 | assert_eq!(map.as_slice(), &[1, 2, 3, 4]); |
961 | drop(map); |
962 | |
963 | let mem = mem.into_mapped_memory_readable().unwrap(); |
964 | assert_eq!(mem.as_slice(), &[1, 2, 3, 4]); |
965 | |
966 | let mem = mem.into_memory(); |
967 | let map = mem.map_readable().unwrap(); |
968 | assert_eq!(map.as_slice(), &[1, 2, 3, 4]); |
969 | } |
970 | |
971 | #[test ] |
972 | fn test_share() { |
973 | crate::init().unwrap(); |
974 | |
975 | let mem = crate::Memory::from_slice(vec![1, 2, 3, 4]); |
976 | let sub = mem.share(1..=2); // [2, 3] |
977 | let sub_sub1 = sub.share(1..=1); // [3] |
978 | let sub_sub2 = sub.share_maxsize(0..4); // [1, 2, 3, 4] |
979 | |
980 | let map = mem.map_readable().unwrap(); |
981 | assert_eq!(map.as_slice(), &[1, 2, 3, 4]); |
982 | drop(map); |
983 | |
984 | let map = sub.map_readable().unwrap(); |
985 | assert_eq!(map.as_slice(), &[2, 3]); |
986 | drop(map); |
987 | |
988 | let map = sub_sub1.map_readable().unwrap(); |
989 | assert_eq!(map.as_slice(), &[3]); |
990 | drop(map); |
991 | |
992 | let map = sub_sub2.map_readable().unwrap(); |
993 | assert_eq!(map.as_slice(), &[1, 2, 3, 4]); |
994 | drop(map); |
995 | } |
996 | |
997 | #[test ] |
998 | fn test_dump() { |
999 | use std::fmt::Write; |
1000 | |
1001 | crate::init().unwrap(); |
1002 | |
1003 | let mut s = String::new(); |
1004 | let mem = crate::Memory::from_slice(vec![1, 2, 3, 4]); |
1005 | write!(&mut s, "{:?}" , mem.dump()).unwrap(); |
1006 | assert_eq!( |
1007 | s, |
1008 | "0000: 01 02 03 04 ...." |
1009 | ); |
1010 | s.clear(); |
1011 | write!(&mut s, "{}" , mem.dump()).unwrap(); |
1012 | assert_eq!(s, "01 02 03 04" ); |
1013 | s.clear(); |
1014 | |
1015 | let mem = crate::Memory::from_slice(vec![1, 2, 3, 4]); |
1016 | write!(&mut s, "{:?}" , mem.dump_range(..)).unwrap(); |
1017 | assert_eq!( |
1018 | s, |
1019 | "0000: 01 02 03 04 ...." |
1020 | ); |
1021 | s.clear(); |
1022 | write!(&mut s, "{:?}" , mem.dump_range(..2)).unwrap(); |
1023 | assert_eq!( |
1024 | s, |
1025 | "0000: 01 02 .." |
1026 | ); |
1027 | s.clear(); |
1028 | write!(&mut s, "{:?}" , mem.dump_range(2..=3)).unwrap(); |
1029 | assert_eq!( |
1030 | s, |
1031 | "0002: 03 04 .." |
1032 | ); |
1033 | s.clear(); |
1034 | write!(&mut s, "{:?}" , mem.dump_range(..100)).unwrap(); |
1035 | assert_eq!(s, "<end out of range>" ,); |
1036 | s.clear(); |
1037 | write!(&mut s, "{:?}" , mem.dump_range(90..100)).unwrap(); |
1038 | assert_eq!(s, "<start out of range>" ,); |
1039 | s.clear(); |
1040 | |
1041 | let mem = crate::Memory::from_slice(vec![0; 19]); |
1042 | write!(&mut s, "{:?}" , mem.dump()).unwrap(); |
1043 | assert_eq!( |
1044 | s, |
1045 | "0000: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 ................ \n\ |
1046 | 0010: 00 00 00 ..." |
1047 | ); |
1048 | s.clear(); |
1049 | } |
1050 | |
1051 | #[test ] |
1052 | fn test_value() { |
1053 | use glib::prelude::*; |
1054 | |
1055 | crate::init().unwrap(); |
1056 | |
1057 | let v = None::<&crate::Memory>.to_value(); |
1058 | assert!(matches!(v.get::<Option<crate::Memory>>(), Ok(None))); |
1059 | |
1060 | let mem = crate::Memory::from_slice(vec![1, 2, 3, 4]); |
1061 | let v = mem.to_value(); |
1062 | assert!(matches!(v.get::<Option<crate::Memory>>(), Ok(Some(_)))); |
1063 | assert!(v.get::<crate::Memory>().is_ok()); |
1064 | } |
1065 | } |
1066 | |