1 | // Take a look at the license at the top of the repository in the LICENSE file. |
2 | |
3 | use std::{ |
4 | fmt, |
5 | marker::PhantomData, |
6 | mem, |
7 | ops::{Deref, DerefMut}, |
8 | ptr, slice, |
9 | }; |
10 | |
11 | use glib::translate::*; |
12 | |
13 | use crate::{AllocationParams, Allocator, MemoryFlags}; |
14 | |
15 | mini_object_wrapper!(Memory, MemoryRef, ffi::GstMemory, || { |
16 | ffi::gst_memory_get_type() |
17 | }); |
18 | |
19 | pub struct MemoryMap<'a, T> { |
20 | memory: &'a MemoryRef, |
21 | map_info: ffi::GstMapInfo, |
22 | phantom: PhantomData<T>, |
23 | } |
24 | |
25 | pub struct MappedMemory<T> { |
26 | memory: Memory, |
27 | map_info: ffi::GstMapInfo, |
28 | phantom: PhantomData<T>, |
29 | } |
30 | |
31 | impl fmt::Debug for Memory { |
32 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { |
33 | MemoryRef::fmt(self, f) |
34 | } |
35 | } |
36 | |
37 | impl fmt::Debug for MemoryRef { |
38 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { |
39 | f&mut DebugStruct<'_, '_>.debug_struct("Memory" ) |
40 | .field("ptr" , &self.as_ptr()) |
41 | .field("allocator" , &self.allocator()) |
42 | .field("parent" , &self.parent()) |
43 | .field("maxsize" , &self.maxsize()) |
44 | .field("align" , &self.align()) |
45 | .field("offset" , &self.offset()) |
46 | .field("size" , &self.size()) |
47 | .field(name:"flags" , &self.flags()) |
48 | .finish() |
49 | } |
50 | } |
51 | |
52 | pub enum Readable {} |
53 | pub enum Writable {} |
54 | |
55 | impl Memory { |
56 | unsafe extern "C" fn drop_box<T>(vec: glib::ffi::gpointer) { |
57 | let slice: Box<T> = Box::from_raw(vec as *mut T); |
58 | drop(slice); |
59 | } |
60 | |
61 | pub fn with_size(size: usize) -> Self { |
62 | assert_initialized_main_thread!(); |
63 | unsafe { |
64 | from_glib_full(ffi::gst_allocator_alloc( |
65 | ptr::null_mut(), |
66 | size, |
67 | ptr::null_mut(), |
68 | )) |
69 | } |
70 | } |
71 | |
72 | pub fn with_size_and_params(size: usize, params: &AllocationParams) -> Self { |
73 | assert_initialized_main_thread!(); |
74 | unsafe { |
75 | from_glib_full(ffi::gst_allocator_alloc( |
76 | ptr::null_mut(), |
77 | size, |
78 | params.as_ptr() as *mut _, |
79 | )) |
80 | } |
81 | } |
82 | |
83 | pub fn from_slice<T: AsRef<[u8]> + Send + 'static>(slice: T) -> Self { |
84 | assert_initialized_main_thread!(); |
85 | unsafe { |
86 | let b = Box::new(slice); |
87 | let (size, data) = { |
88 | let slice = (*b).as_ref(); |
89 | (slice.len(), slice.as_ptr()) |
90 | }; |
91 | let user_data = Box::into_raw(b); |
92 | from_glib_full(ffi::gst_memory_new_wrapped( |
93 | ffi::GST_MEMORY_FLAG_READONLY, |
94 | data as glib::ffi::gpointer, |
95 | size, |
96 | 0, |
97 | size, |
98 | user_data as glib::ffi::gpointer, |
99 | Some(Self::drop_box::<T>), |
100 | )) |
101 | } |
102 | } |
103 | |
104 | pub fn from_mut_slice<T: AsMut<[u8]> + Send + 'static>(slice: T) -> Self { |
105 | assert_initialized_main_thread!(); |
106 | |
107 | unsafe { |
108 | let mut b = Box::new(slice); |
109 | let (size, data) = { |
110 | let slice = (*b).as_mut(); |
111 | (slice.len(), slice.as_mut_ptr()) |
112 | }; |
113 | let user_data = Box::into_raw(b); |
114 | from_glib_full(ffi::gst_memory_new_wrapped( |
115 | 0, |
116 | data as glib::ffi::gpointer, |
117 | size, |
118 | 0, |
119 | size, |
120 | user_data as glib::ffi::gpointer, |
121 | Some(Self::drop_box::<T>), |
122 | )) |
123 | } |
124 | } |
125 | |
126 | #[inline ] |
127 | pub fn into_mapped_memory_readable(self) -> Result<MappedMemory<Readable>, Self> { |
128 | unsafe { |
129 | let mut map_info = mem::MaybeUninit::uninit(); |
130 | let res: bool = from_glib(ffi::gst_memory_map( |
131 | self.as_mut_ptr(), |
132 | map_info.as_mut_ptr(), |
133 | ffi::GST_MAP_READ, |
134 | )); |
135 | if res { |
136 | Ok(MappedMemory { |
137 | memory: self, |
138 | map_info: map_info.assume_init(), |
139 | phantom: PhantomData, |
140 | }) |
141 | } else { |
142 | Err(self) |
143 | } |
144 | } |
145 | } |
146 | |
147 | #[inline ] |
148 | pub fn into_mapped_memory_writable(self) -> Result<MappedMemory<Writable>, Self> { |
149 | unsafe { |
150 | let mut map_info = mem::MaybeUninit::uninit(); |
151 | let res: bool = from_glib(ffi::gst_memory_map( |
152 | self.as_mut_ptr(), |
153 | map_info.as_mut_ptr(), |
154 | ffi::GST_MAP_READWRITE, |
155 | )); |
156 | if res { |
157 | Ok(MappedMemory { |
158 | memory: self, |
159 | map_info: map_info.assume_init(), |
160 | phantom: PhantomData, |
161 | }) |
162 | } else { |
163 | Err(self) |
164 | } |
165 | } |
166 | } |
167 | } |
168 | |
169 | impl MemoryRef { |
170 | #[doc (alias = "get_allocator" )] |
171 | #[inline ] |
172 | pub fn allocator(&self) -> Option<&Allocator> { |
173 | unsafe { |
174 | if self.0.allocator.is_null() { |
175 | None |
176 | } else { |
177 | Some(&*(&self.0.allocator as *const *mut ffi::GstAllocator as *const Allocator)) |
178 | } |
179 | } |
180 | } |
181 | |
182 | #[doc (alias = "get_parent" )] |
183 | #[inline ] |
184 | pub fn parent(&self) -> Option<&MemoryRef> { |
185 | unsafe { |
186 | if self.0.parent.is_null() { |
187 | None |
188 | } else { |
189 | Some(MemoryRef::from_ptr(self.0.parent)) |
190 | } |
191 | } |
192 | } |
193 | |
194 | #[doc (alias = "get_maxsize" )] |
195 | #[inline ] |
196 | pub fn maxsize(&self) -> usize { |
197 | self.0.maxsize |
198 | } |
199 | |
200 | #[doc (alias = "get_align" )] |
201 | #[inline ] |
202 | pub fn align(&self) -> usize { |
203 | self.0.align |
204 | } |
205 | |
206 | #[doc (alias = "get_offset" )] |
207 | #[inline ] |
208 | pub fn offset(&self) -> usize { |
209 | self.0.offset |
210 | } |
211 | |
212 | #[doc (alias = "get_size" )] |
213 | #[inline ] |
214 | pub fn size(&self) -> usize { |
215 | self.0.size |
216 | } |
217 | |
218 | #[doc (alias = "get_flags" )] |
219 | #[inline ] |
220 | pub fn flags(&self) -> MemoryFlags { |
221 | unsafe { from_glib(self.0.mini_object.flags) } |
222 | } |
223 | |
224 | #[doc (alias = "gst_memory_copy" )] |
225 | pub fn copy_part(&self, offset: isize, size: Option<usize>) -> Memory { |
226 | let new_offset = if offset < 0 { |
227 | assert!((-offset) as usize >= self.offset()); |
228 | self.offset() - (-offset as usize) |
229 | } else { |
230 | self.offset() |
231 | .checked_add(offset as usize) |
232 | .expect("Too large offset" ) |
233 | }; |
234 | |
235 | assert!(new_offset + size.unwrap_or(0) < self.maxsize()); |
236 | |
237 | unsafe { |
238 | from_glib_full(ffi::gst_memory_copy( |
239 | self.as_mut_ptr(), |
240 | offset, |
241 | match size { |
242 | Some(val) => val as isize, |
243 | None => -1, |
244 | }, |
245 | )) |
246 | } |
247 | } |
248 | |
249 | #[doc (alias = "gst_memory_is_span" )] |
250 | pub fn is_span(&self, mem2: &MemoryRef) -> Option<usize> { |
251 | unsafe { |
252 | let mut offset = mem::MaybeUninit::uninit(); |
253 | let res = from_glib(ffi::gst_memory_is_span( |
254 | self.as_mut_ptr(), |
255 | mem2.as_mut_ptr(), |
256 | offset.as_mut_ptr(), |
257 | )); |
258 | if res { |
259 | Some(offset.assume_init()) |
260 | } else { |
261 | None |
262 | } |
263 | } |
264 | } |
265 | |
266 | #[doc (alias = "gst_memory_is_type" )] |
267 | pub fn is_type(&self, mem_type: &str) -> bool { |
268 | unsafe { |
269 | from_glib(ffi::gst_memory_is_type( |
270 | self.as_mut_ptr(), |
271 | mem_type.to_glib_none().0, |
272 | )) |
273 | } |
274 | } |
275 | |
276 | #[inline ] |
277 | pub fn map_readable(&self) -> Result<MemoryMap<Readable>, glib::BoolError> { |
278 | unsafe { |
279 | let mut map_info = mem::MaybeUninit::uninit(); |
280 | let res = |
281 | ffi::gst_memory_map(self.as_mut_ptr(), map_info.as_mut_ptr(), ffi::GST_MAP_READ); |
282 | if res == glib::ffi::GTRUE { |
283 | Ok(MemoryMap { |
284 | memory: self, |
285 | map_info: map_info.assume_init(), |
286 | phantom: PhantomData, |
287 | }) |
288 | } else { |
289 | Err(glib::bool_error!("Failed to map memory readable" )) |
290 | } |
291 | } |
292 | } |
293 | |
294 | #[inline ] |
295 | pub fn map_writable(&mut self) -> Result<MemoryMap<Writable>, glib::BoolError> { |
296 | unsafe { |
297 | let mut map_info = mem::MaybeUninit::uninit(); |
298 | let res = ffi::gst_memory_map( |
299 | self.as_mut_ptr(), |
300 | map_info.as_mut_ptr(), |
301 | ffi::GST_MAP_READWRITE, |
302 | ); |
303 | if res == glib::ffi::GTRUE { |
304 | Ok(MemoryMap { |
305 | memory: self, |
306 | map_info: map_info.assume_init(), |
307 | phantom: PhantomData, |
308 | }) |
309 | } else { |
310 | Err(glib::bool_error!("Failed to map memory writable" )) |
311 | } |
312 | } |
313 | } |
314 | |
315 | #[doc (alias = "gst_memory_share" )] |
316 | pub fn share(&self, offset: isize, size: Option<usize>) -> Memory { |
317 | let new_offset = if offset < 0 { |
318 | assert!((-offset) as usize >= self.offset()); |
319 | self.offset() - (-offset as usize) |
320 | } else { |
321 | self.offset() |
322 | .checked_add(offset as usize) |
323 | .expect("Too large offset" ) |
324 | }; |
325 | |
326 | assert!(new_offset + size.unwrap_or(0) < self.maxsize()); |
327 | |
328 | unsafe { |
329 | from_glib_full(ffi::gst_memory_share( |
330 | self.as_ptr() as *mut _, |
331 | offset, |
332 | match size { |
333 | Some(val) => val as isize, |
334 | None => -1, |
335 | }, |
336 | )) |
337 | } |
338 | } |
339 | |
340 | #[doc (alias = "gst_memory_resize" )] |
341 | pub fn resize(&mut self, offset: isize, size: usize) { |
342 | let new_offset = if offset < 0 { |
343 | assert!((-offset) as usize >= self.offset()); |
344 | self.offset() - (-offset as usize) |
345 | } else { |
346 | self.offset() |
347 | .checked_add(offset as usize) |
348 | .expect("Too large offset" ) |
349 | }; |
350 | |
351 | assert!(new_offset + size < self.maxsize()); |
352 | |
353 | unsafe { ffi::gst_memory_resize(self.as_mut_ptr(), offset, size) } |
354 | } |
355 | |
356 | pub fn dump(&self, size: Option<usize>) -> Dump { |
357 | Dump { memory: self, size } |
358 | } |
359 | } |
360 | |
361 | impl<'a, T> MemoryMap<'a, T> { |
362 | #[doc (alias = "get_size" )] |
363 | #[inline ] |
364 | pub fn size(&self) -> usize { |
365 | self.map_info.size |
366 | } |
367 | |
368 | #[doc (alias = "get_memory" )] |
369 | #[inline ] |
370 | pub fn memory(&self) -> &MemoryRef { |
371 | self.memory |
372 | } |
373 | |
374 | #[inline ] |
375 | pub fn as_slice(&self) -> &[u8] { |
376 | if self.map_info.size == 0 { |
377 | return &[]; |
378 | } |
379 | unsafe { slice::from_raw_parts(self.map_info.data, self.map_info.size) } |
380 | } |
381 | } |
382 | |
383 | impl<'a> MemoryMap<'a, Writable> { |
384 | #[inline ] |
385 | pub fn as_mut_slice(&mut self) -> &mut [u8] { |
386 | if self.map_info.size == 0 { |
387 | return &mut []; |
388 | } |
389 | unsafe { slice::from_raw_parts_mut(self.map_info.data, self.map_info.size) } |
390 | } |
391 | } |
392 | |
393 | impl<'a, T> AsRef<[u8]> for MemoryMap<'a, T> { |
394 | #[inline ] |
395 | fn as_ref(&self) -> &[u8] { |
396 | self.as_slice() |
397 | } |
398 | } |
399 | |
400 | impl<'a> AsMut<[u8]> for MemoryMap<'a, Writable> { |
401 | #[inline ] |
402 | fn as_mut(&mut self) -> &mut [u8] { |
403 | self.as_mut_slice() |
404 | } |
405 | } |
406 | |
407 | impl<'a, T> Deref for MemoryMap<'a, T> { |
408 | type Target = [u8]; |
409 | |
410 | #[inline ] |
411 | fn deref(&self) -> &[u8] { |
412 | self.as_slice() |
413 | } |
414 | } |
415 | |
416 | impl<'a> DerefMut for MemoryMap<'a, Writable> { |
417 | #[inline ] |
418 | fn deref_mut(&mut self) -> &mut [u8] { |
419 | self.as_mut_slice() |
420 | } |
421 | } |
422 | |
423 | impl<'a, T> fmt::Debug for MemoryMap<'a, T> { |
424 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { |
425 | f.debug_tuple(name:"MemoryMap" ).field(&self.memory()).finish() |
426 | } |
427 | } |
428 | |
429 | impl<'a, T> PartialEq for MemoryMap<'a, T> { |
430 | fn eq(&self, other: &MemoryMap<'a, T>) -> bool { |
431 | self.as_slice().eq(other.as_slice()) |
432 | } |
433 | } |
434 | |
435 | impl<'a, T> Eq for MemoryMap<'a, T> {} |
436 | |
437 | impl<'a, T> Drop for MemoryMap<'a, T> { |
438 | #[inline ] |
439 | fn drop(&mut self) { |
440 | unsafe { |
441 | ffi::gst_memory_unmap(self.memory.as_mut_ptr(), &mut self.map_info); |
442 | } |
443 | } |
444 | } |
445 | |
446 | unsafe impl<'a, T> Send for MemoryMap<'a, T> {} |
447 | unsafe impl<'a, T> Sync for MemoryMap<'a, T> {} |
448 | |
449 | impl<T> MappedMemory<T> { |
450 | #[inline ] |
451 | pub fn as_slice(&self) -> &[u8] { |
452 | if self.map_info.size == 0 { |
453 | return &[]; |
454 | } |
455 | unsafe { slice::from_raw_parts(self.map_info.data, self.map_info.size) } |
456 | } |
457 | |
458 | #[doc (alias = "get_size" )] |
459 | #[inline ] |
460 | pub fn size(&self) -> usize { |
461 | self.map_info.size |
462 | } |
463 | |
464 | #[doc (alias = "get_memory" )] |
465 | #[inline ] |
466 | pub fn memory(&self) -> &MemoryRef { |
467 | self.memory.as_ref() |
468 | } |
469 | |
470 | #[inline ] |
471 | pub fn into_memory(self) -> Memory { |
472 | let mut s = mem::ManuallyDrop::new(self); |
473 | let memory = unsafe { ptr::read(&s.memory) }; |
474 | unsafe { |
475 | ffi::gst_memory_unmap(memory.as_mut_ptr(), &mut s.map_info); |
476 | } |
477 | |
478 | memory |
479 | } |
480 | } |
481 | |
482 | impl MappedMemory<Writable> { |
483 | #[inline ] |
484 | pub fn as_mut_slice(&mut self) -> &mut [u8] { |
485 | if self.map_info.size == 0 { |
486 | return &mut []; |
487 | } |
488 | unsafe { slice::from_raw_parts_mut(self.map_info.data, self.map_info.size) } |
489 | } |
490 | } |
491 | |
492 | impl<T> AsRef<[u8]> for MappedMemory<T> { |
493 | #[inline ] |
494 | fn as_ref(&self) -> &[u8] { |
495 | self.as_slice() |
496 | } |
497 | } |
498 | |
499 | impl AsMut<[u8]> for MappedMemory<Writable> { |
500 | #[inline ] |
501 | fn as_mut(&mut self) -> &mut [u8] { |
502 | self.as_mut_slice() |
503 | } |
504 | } |
505 | |
506 | impl<T> Deref for MappedMemory<T> { |
507 | type Target = [u8]; |
508 | |
509 | #[inline ] |
510 | fn deref(&self) -> &[u8] { |
511 | self.as_slice() |
512 | } |
513 | } |
514 | |
515 | impl DerefMut for MappedMemory<Writable> { |
516 | #[inline ] |
517 | fn deref_mut(&mut self) -> &mut [u8] { |
518 | self.as_mut_slice() |
519 | } |
520 | } |
521 | |
522 | impl<T> Drop for MappedMemory<T> { |
523 | #[inline ] |
524 | fn drop(&mut self) { |
525 | unsafe { |
526 | ffi::gst_memory_unmap(self.memory.as_mut_ptr(), &mut self.map_info); |
527 | } |
528 | } |
529 | } |
530 | |
531 | impl<T> fmt::Debug for MappedMemory<T> { |
532 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { |
533 | f.debug_tuple(name:"MappedMemory" ).field(&self.memory()).finish() |
534 | } |
535 | } |
536 | |
537 | impl<T> PartialEq for MappedMemory<T> { |
538 | fn eq(&self, other: &MappedMemory<T>) -> bool { |
539 | self.as_slice().eq(other.as_slice()) |
540 | } |
541 | } |
542 | |
543 | impl<T> Eq for MappedMemory<T> {} |
544 | |
545 | unsafe impl<T> Send for MappedMemory<T> {} |
546 | unsafe impl<T> Sync for MappedMemory<T> {} |
547 | |
548 | pub struct Dump<'a> { |
549 | memory: &'a MemoryRef, |
550 | size: Option<usize>, |
551 | } |
552 | |
553 | impl<'a> Dump<'a> { |
554 | fn fmt(&self, f: &mut fmt::Formatter, debug: bool) -> fmt::Result { |
555 | use pretty_hex::*; |
556 | |
557 | let map: MemoryMap<'_, Readable> = self.memory.map_readable().expect(msg:"Failed to map memory" ); |
558 | let data: &[u8] = map.as_slice(); |
559 | let size: usize = self.size.unwrap_or_else(|| self.memory.size()); |
560 | let data: &[u8] = &data[0..size]; |
561 | |
562 | if debug { |
563 | write!(f, " {:?}" , data.hex_dump()) |
564 | } else { |
565 | write!(f, " {}" , data.hex_dump()) |
566 | } |
567 | } |
568 | } |
569 | |
570 | impl<'a> fmt::Display for Dump<'a> { |
571 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { |
572 | self.fmt(f, debug:false) |
573 | } |
574 | } |
575 | |
576 | impl<'a> fmt::Debug for Dump<'a> { |
577 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { |
578 | self.fmt(f, debug:true) |
579 | } |
580 | } |
581 | |
582 | pub unsafe trait MemoryType: crate::prelude::IsMiniObject + AsRef<Memory> |
583 | where |
584 | <Self as crate::prelude::IsMiniObject>::RefType: AsRef<MemoryRef> + AsMut<MemoryRef>, |
585 | { |
586 | fn check_memory_type(mem: &MemoryRef) -> bool; |
587 | } |
588 | |
589 | #[derive (Debug, thiserror::Error)] |
590 | pub enum MemoryTypeMismatchError { |
591 | #[error(transparent)] |
592 | ValueTypeMismatch(#[from] glib::value::ValueTypeMismatchError), |
593 | #[error("the memory is not of the requested type {requested}" )] |
594 | MemoryTypeMismatch { requested: &'static str }, |
595 | } |
596 | |
597 | pub struct MemoryTypeValueTypeChecker<M>(PhantomData<M>); |
598 | |
599 | unsafe impl<M> glib::value::ValueTypeChecker for MemoryTypeValueTypeChecker<M> |
600 | where |
601 | M: MemoryType + glib::StaticType, |
602 | <M as crate::prelude::IsMiniObject>::RefType: AsRef<MemoryRef> + AsMut<MemoryRef>, |
603 | { |
604 | type Error = glib::value::ValueTypeMismatchOrNoneError<MemoryTypeMismatchError>; |
605 | |
606 | fn check(value: &glib::Value) -> Result<(), Self::Error> { |
607 | skip_assert_initialized!(); |
608 | let mem = value.get::<&Memory>().map_err(|err| match err { |
609 | glib::value::ValueTypeMismatchOrNoneError::UnexpectedNone => { |
610 | glib::value::ValueTypeMismatchOrNoneError::UnexpectedNone |
611 | } |
612 | glib::value::ValueTypeMismatchOrNoneError::WrongValueType(err) => { |
613 | glib::value::ValueTypeMismatchOrNoneError::WrongValueType( |
614 | MemoryTypeMismatchError::ValueTypeMismatch(err), |
615 | ) |
616 | } |
617 | })?; |
618 | |
619 | if mem.is_memory_type::<M>() { |
620 | Ok(()) |
621 | } else { |
622 | Err(glib::value::ValueTypeMismatchOrNoneError::WrongValueType( |
623 | MemoryTypeMismatchError::MemoryTypeMismatch { |
624 | requested: std::any::type_name::<M>(), |
625 | }, |
626 | )) |
627 | } |
628 | } |
629 | } |
630 | |
631 | impl AsRef<MemoryRef> for MemoryRef { |
632 | #[inline ] |
633 | fn as_ref(&self) -> &MemoryRef { |
634 | self |
635 | } |
636 | } |
637 | |
638 | impl AsMut<MemoryRef> for MemoryRef { |
639 | #[inline ] |
640 | fn as_mut(&mut self) -> &mut MemoryRef { |
641 | self |
642 | } |
643 | } |
644 | |
645 | impl AsRef<Memory> for Memory { |
646 | #[inline ] |
647 | fn as_ref(&self) -> &Memory { |
648 | self |
649 | } |
650 | } |
651 | |
652 | unsafe impl MemoryType for Memory { |
653 | #[inline ] |
654 | fn check_memory_type(_mem: &MemoryRef) -> bool { |
655 | skip_assert_initialized!(); |
656 | true |
657 | } |
658 | } |
659 | |
660 | impl Memory { |
661 | #[inline ] |
662 | pub fn downcast_memory<M: MemoryType>(self) -> Result<M, Self> |
663 | where |
664 | <M as crate::prelude::IsMiniObject>::RefType: AsRef<MemoryRef> + AsMut<MemoryRef>, |
665 | { |
666 | if M::check_memory_type(&self) { |
667 | unsafe { Ok(from_glib_full(self.into_glib_ptr() as *mut M::FfiType)) } |
668 | } else { |
669 | Err(self) |
670 | } |
671 | } |
672 | } |
673 | |
674 | impl MemoryRef { |
675 | #[inline ] |
676 | pub fn is_memory_type<M: MemoryType>(&self) -> bool |
677 | where |
678 | <M as crate::prelude::IsMiniObject>::RefType: AsRef<MemoryRef> + AsMut<MemoryRef>, |
679 | { |
680 | M::check_memory_type(self) |
681 | } |
682 | |
683 | #[inline ] |
684 | pub fn downcast_memory_ref<M: MemoryType>(&self) -> Option<&M::RefType> |
685 | where |
686 | <M as crate::prelude::IsMiniObject>::RefType: AsRef<MemoryRef> + AsMut<MemoryRef>, |
687 | { |
688 | if M::check_memory_type(self) { |
689 | unsafe { Some(&*(self as *const Self as *const M::RefType)) } |
690 | } else { |
691 | None |
692 | } |
693 | } |
694 | |
695 | #[inline ] |
696 | pub fn downcast_memory_mut<M: MemoryType>(&mut self) -> Option<&mut M::RefType> |
697 | where |
698 | <M as crate::prelude::IsMiniObject>::RefType: AsRef<MemoryRef> + AsMut<MemoryRef>, |
699 | { |
700 | if M::check_memory_type(self) { |
701 | unsafe { Some(&mut *(self as *mut Self as *mut M::RefType)) } |
702 | } else { |
703 | None |
704 | } |
705 | } |
706 | } |
707 | |
708 | #[macro_export ] |
709 | macro_rules! memory_object_wrapper { |
710 | ($name:ident, $ref_name:ident, $ffi_name:path, $mem_type_check:expr, $parent_memory_type:path, $parent_memory_ref_type:path) => { |
711 | $crate::mini_object_wrapper!($name, $ref_name, $ffi_name); |
712 | |
713 | unsafe impl $crate::memory::MemoryType for $name { |
714 | #[inline] |
715 | fn check_memory_type(mem: &$crate::MemoryRef) -> bool { |
716 | skip_assert_initialized!(); |
717 | $mem_type_check(mem) |
718 | } |
719 | } |
720 | |
721 | impl $name { |
722 | #[inline] |
723 | pub fn downcast_memory<M: $crate::memory::MemoryType>(self) -> Result<M, Self> |
724 | where |
725 | <M as $crate::miniobject::IsMiniObject>::RefType: AsRef<$crate::MemoryRef> |
726 | + AsMut<$crate::MemoryRef> |
727 | + AsRef<$ref_name> |
728 | + AsMut<$ref_name>, |
729 | { |
730 | if M::check_memory_type(&self) { |
731 | unsafe { |
732 | Ok($crate::glib::translate::from_glib_full( |
733 | self.into_glib_ptr() as *mut M::FfiType |
734 | )) |
735 | } |
736 | } else { |
737 | Err(self) |
738 | } |
739 | } |
740 | |
741 | #[inline] |
742 | pub fn upcast_memory<M>(self) -> M |
743 | where |
744 | M: $crate::memory::MemoryType |
745 | + $crate::glib::translate::FromGlibPtrFull< |
746 | *const <M as $crate::miniobject::IsMiniObject>::FfiType, |
747 | >, |
748 | <M as $crate::miniobject::IsMiniObject>::RefType: |
749 | AsRef<$crate::MemoryRef> + AsMut<$crate::MemoryRef>, |
750 | Self: AsRef<M>, |
751 | { |
752 | unsafe { |
753 | $crate::glib::translate::from_glib_full( |
754 | self.into_glib_ptr() as *const <M as $crate::miniobject::IsMiniObject>::FfiType |
755 | ) |
756 | } |
757 | } |
758 | } |
759 | |
760 | impl $ref_name { |
761 | #[inline] |
762 | pub fn upcast_memory_ref<M>(&self) -> &M::RefType |
763 | where |
764 | M: $crate::memory::MemoryType, |
765 | <M as $crate::miniobject::IsMiniObject>::RefType: |
766 | AsRef<$crate::MemoryRef> + AsMut<$crate::MemoryRef>, |
767 | Self: AsRef<M::RefType> + AsMut<M::RefType> |
768 | { |
769 | self.as_ref() |
770 | } |
771 | |
772 | #[inline] |
773 | pub fn upcast_memory_mut<M>(&mut self) -> &mut M::RefType |
774 | where |
775 | M: $crate::memory::MemoryType, |
776 | <M as $crate::miniobject::IsMiniObject>::RefType: |
777 | AsRef<$crate::MemoryRef> + AsMut<$crate::MemoryRef>, |
778 | Self: AsRef<M::RefType> + AsMut<M::RefType> |
779 | { |
780 | self.as_mut() |
781 | } |
782 | } |
783 | |
784 | impl std::ops::Deref for $ref_name { |
785 | type Target = $parent_memory_ref_type; |
786 | |
787 | #[inline] |
788 | fn deref(&self) -> &Self::Target { |
789 | unsafe { &*(self as *const _ as *const Self::Target) } |
790 | } |
791 | } |
792 | |
793 | impl std::ops::DerefMut for $ref_name { |
794 | #[inline] |
795 | fn deref_mut(&mut self) -> &mut Self::Target { |
796 | unsafe { &mut *(self as *mut _ as *mut Self::Target) } |
797 | } |
798 | } |
799 | |
800 | impl AsRef<$parent_memory_type> for $name { |
801 | #[inline] |
802 | fn as_ref(&self) -> &$parent_memory_type { |
803 | unsafe { &*(self as *const _ as *const $parent_memory_type) } |
804 | } |
805 | } |
806 | |
807 | impl AsRef<$parent_memory_ref_type> for $ref_name { |
808 | #[inline] |
809 | fn as_ref(&self) -> &$parent_memory_ref_type { |
810 | self |
811 | } |
812 | } |
813 | |
814 | impl AsMut<$parent_memory_ref_type> for $ref_name { |
815 | #[inline] |
816 | fn as_mut(&mut self) -> &mut $parent_memory_ref_type { |
817 | &mut *self |
818 | } |
819 | } |
820 | |
821 | impl $crate::glib::types::StaticType for $name { |
822 | #[inline] |
823 | fn static_type() -> glib::types::Type { |
824 | $ref_name::static_type() |
825 | } |
826 | } |
827 | |
828 | impl $crate::glib::types::StaticType for $ref_name { |
829 | #[inline] |
830 | fn static_type() -> $crate::glib::types::Type { |
831 | unsafe { $crate::glib::translate::from_glib($crate::ffi::gst_memory_get_type()) } |
832 | } |
833 | } |
834 | |
835 | impl $crate::glib::value::ValueType for $name { |
836 | type Type = Self; |
837 | } |
838 | |
839 | unsafe impl<'a> $crate::glib::value::FromValue<'a> for $name { |
840 | type Checker = $crate::memory::MemoryTypeValueTypeChecker<Self>; |
841 | |
842 | unsafe fn from_value(value: &'a $crate::glib::Value) -> Self { |
843 | skip_assert_initialized!(); |
844 | $crate::glib::translate::from_glib_none($crate::glib::gobject_ffi::g_value_get_boxed( |
845 | $crate::glib::translate::ToGlibPtr::to_glib_none(value).0, |
846 | ) as *mut $ffi_name) |
847 | } |
848 | } |
849 | |
850 | unsafe impl<'a> $crate::glib::value::FromValue<'a> for &'a $name { |
851 | type Checker = $crate::memory::MemoryTypeValueTypeChecker<$name>; |
852 | |
853 | unsafe fn from_value(value: &'a $crate::glib::Value) -> Self { |
854 | skip_assert_initialized!(); |
855 | assert_eq!( |
856 | std::mem::size_of::<$name>(), |
857 | std::mem::size_of::<$crate::glib::ffi::gpointer>() |
858 | ); |
859 | let value = &*(value as *const $crate::glib::Value as *const $crate::glib::gobject_ffi::GValue); |
860 | let ptr = &value.data[0].v_pointer as *const $crate::glib::ffi::gpointer |
861 | as *const *const $ffi_name; |
862 | debug_assert!(!(*ptr).is_null()); |
863 | &*(ptr as *const $name) |
864 | } |
865 | } |
866 | |
867 | impl $crate::glib::value::ToValue for $name { |
868 | fn to_value(&self) -> $crate::glib::Value { |
869 | let mut value = $crate::glib::Value::for_value_type::<Self>(); |
870 | unsafe { |
871 | $crate::glib::gobject_ffi::g_value_set_boxed( |
872 | $crate::glib::translate::ToGlibPtrMut::to_glib_none_mut(&mut value).0, |
873 | $crate::glib::translate::ToGlibPtr::<*const $ffi_name>::to_glib_none(self).0 |
874 | as *mut _, |
875 | ) |
876 | } |
877 | value |
878 | } |
879 | |
880 | fn value_type(&self) -> glib::Type { |
881 | <Self as glib::StaticType>::static_type() |
882 | } |
883 | } |
884 | |
885 | impl $crate::glib::value::ToValueOptional for $name { |
886 | fn to_value_optional(s: Option<&Self>) -> $crate::glib::Value { |
887 | skip_assert_initialized!(); |
888 | let mut value = $crate::glib::Value::for_value_type::<Self>(); |
889 | unsafe { |
890 | $crate::glib::gobject_ffi::g_value_set_boxed( |
891 | $crate::glib::translate::ToGlibPtrMut::to_glib_none_mut(&mut value).0, |
892 | $crate::glib::translate::ToGlibPtr::<*const $ffi_name>::to_glib_none(&s).0 |
893 | as *mut _, |
894 | ) |
895 | } |
896 | value |
897 | } |
898 | } |
899 | |
900 | impl From<$name> for $crate::glib::Value { |
901 | fn from(v: $name) -> $crate::glib::Value { |
902 | skip_assert_initialized!(); |
903 | let mut value = $crate::glib::Value::for_value_type::<$name>(); |
904 | unsafe { |
905 | $crate::glib::gobject_ffi::g_value_take_boxed( |
906 | $crate::glib::translate::ToGlibPtrMut::to_glib_none_mut(&mut value).0, |
907 | $crate::glib::translate::IntoGlibPtr::<*mut $ffi_name>::into_glib_ptr(v) as *mut _, |
908 | ) |
909 | } |
910 | value |
911 | } |
912 | } |
913 | |
914 | unsafe impl<'a> $crate::glib::value::FromValue<'a> for &'a $ref_name { |
915 | type Checker = $crate::memory::MemoryTypeValueTypeChecker<$name>; |
916 | |
917 | unsafe fn from_value(value: &'a glib::Value) -> Self { |
918 | skip_assert_initialized!(); |
919 | &*($crate::glib::gobject_ffi::g_value_get_boxed($crate::glib::translate::ToGlibPtr::to_glib_none(value).0) |
920 | as *const $ref_name) |
921 | } |
922 | } |
923 | |
924 | // Can't have SetValue/SetValueOptional impls as otherwise one could use it to get |
925 | // immutable references from a mutable reference without borrowing via the value |
926 | }; |
927 | ($name:ident, $ref_name:ident, $ffi_name:path, $mem_type_check:expr, $parent_memory_type:path, $parent_memory_ref_type:path, $($parent_parent_memory_type:path, $parent_parent_memory_ref_type:path),*) => { |
928 | $crate::memory_object_wrapper!($name, $ref_name, $ffi_name, $mem_type_check, $parent_memory_type, $parent_memory_ref_type); |
929 | |
930 | $( |
931 | impl AsRef<$parent_parent_memory_type> for $name { |
932 | #[inline] |
933 | fn as_ref(&self) -> &$parent_parent_memory_type { |
934 | unsafe { &*(self as *const _ as *const $parent_parent_memory_type) } |
935 | } |
936 | } |
937 | |
938 | impl AsRef<$parent_parent_memory_ref_type> for $ref_name { |
939 | #[inline] |
940 | fn as_ref(&self) -> &$parent_parent_memory_ref_type { |
941 | self |
942 | } |
943 | } |
944 | |
945 | impl AsMut<$parent_parent_memory_ref_type> for $ref_name { |
946 | #[inline] |
947 | fn as_mut(&mut self) -> &mut $parent_parent_memory_ref_type { |
948 | &mut *self |
949 | } |
950 | } |
951 | )* |
952 | }; |
953 | } |
954 | |
955 | #[cfg (test)] |
956 | mod tests { |
957 | #[test ] |
958 | fn test_map() { |
959 | crate::init().unwrap(); |
960 | |
961 | let mem = crate::Memory::from_slice(vec![1, 2, 3, 4]); |
962 | let map = mem.map_readable().unwrap(); |
963 | assert_eq!(map.as_slice(), &[1, 2, 3, 4]); |
964 | drop(map); |
965 | |
966 | let mem = mem.into_mapped_memory_readable().unwrap(); |
967 | assert_eq!(mem.as_slice(), &[1, 2, 3, 4]); |
968 | |
969 | let mem = mem.into_memory(); |
970 | let map = mem.map_readable().unwrap(); |
971 | assert_eq!(map.as_slice(), &[1, 2, 3, 4]); |
972 | } |
973 | |
974 | #[test ] |
975 | fn test_dump() { |
976 | crate::init().unwrap(); |
977 | |
978 | let mem = crate::Memory::from_slice(vec![1, 2, 3, 4]); |
979 | println!(" {}" , mem.dump(Some(mem.size()))); |
980 | |
981 | let mem = crate::Memory::from_slice(vec![1, 2, 3, 4]); |
982 | println!(" {:?}" , mem.dump(Some(2))); |
983 | |
984 | let mem = crate::Memory::from_slice(vec![0; 64]); |
985 | dbg!(mem.dump(None)); |
986 | } |
987 | |
988 | #[test ] |
989 | fn test_value() { |
990 | use glib::prelude::*; |
991 | |
992 | crate::init().unwrap(); |
993 | |
994 | let v = None::<&crate::Memory>.to_value(); |
995 | assert!(matches!(v.get::<Option<crate::Memory>>(), Ok(None))); |
996 | |
997 | let mem = crate::Memory::from_slice(vec![1, 2, 3, 4]); |
998 | let v = mem.to_value(); |
999 | assert!(matches!(v.get::<Option<crate::Memory>>(), Ok(Some(_)))); |
1000 | assert!(v.get::<crate::Memory>().is_ok()); |
1001 | } |
1002 | } |
1003 | |