| 1 | use crate::prelude::*; |
| 2 | use skia_bindings::{self as sb, SkDataTable, SkRefCntBase}; |
| 3 | use std::{ |
| 4 | ffi::{c_void, CStr}, |
| 5 | fmt, mem, |
| 6 | ops::Index, |
| 7 | }; |
| 8 | |
| 9 | pub type DataTable = RCHandle<SkDataTable>; |
| 10 | unsafe_send_sync!(DataTable); |
| 11 | require_type_equality!(sb::SkDataTable_INHERITED, sb::SkRefCnt); |
| 12 | |
| 13 | impl NativeRefCountedBase for SkDataTable { |
| 14 | type Base = SkRefCntBase; |
| 15 | } |
| 16 | |
| 17 | impl Index<usize> for DataTable { |
| 18 | type Output = [u8]; |
| 19 | |
| 20 | fn index(&self, index: usize) -> &Self::Output { |
| 21 | self.at(index) |
| 22 | } |
| 23 | } |
| 24 | |
| 25 | impl fmt::Debug for DataTable { |
| 26 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { |
| 27 | f&mut DebugStruct<'_, '_>.debug_struct("DataTable" ) |
| 28 | .field(name:"count" , &self.count()) |
| 29 | .finish() |
| 30 | } |
| 31 | } |
| 32 | |
| 33 | impl DataTable { |
| 34 | pub fn is_empty(&self) -> bool { |
| 35 | self.count() == 0 |
| 36 | } |
| 37 | |
| 38 | pub fn count(&self) -> usize { |
| 39 | unsafe { sb::C_SkDataTable_count(self.native()) } |
| 40 | .try_into() |
| 41 | .unwrap() |
| 42 | } |
| 43 | |
| 44 | pub fn at_size(&self, index: usize) -> usize { |
| 45 | assert!(index < self.count()); |
| 46 | unsafe { self.native().atSize(index.try_into().unwrap()) } |
| 47 | } |
| 48 | |
| 49 | pub fn at(&self, index: usize) -> &[u8] { |
| 50 | unsafe { self.at_t(index) } |
| 51 | } |
| 52 | |
| 53 | #[allow (clippy::missing_safety_doc)] |
| 54 | pub unsafe fn at_t<T: Copy>(&self, index: usize) -> &[T] { |
| 55 | assert!(index < self.count()); |
| 56 | let mut size = usize::default(); |
| 57 | let ptr = self.native().at(index.try_into().unwrap(), &mut size); |
| 58 | let element_size = mem::size_of::<T>(); |
| 59 | assert_eq!(size % element_size, 0); |
| 60 | let elements = size / element_size; |
| 61 | safer::from_raw_parts(ptr as _, elements) |
| 62 | } |
| 63 | |
| 64 | pub fn at_str(&self, index: usize) -> &CStr { |
| 65 | let bytes = self.at(index); |
| 66 | CStr::from_bytes_with_nul(bytes).unwrap() |
| 67 | } |
| 68 | |
| 69 | pub fn new_empty() -> Self { |
| 70 | DataTable::from_ptr(unsafe { sb::C_SkDataTable_MakeEmpty() }).unwrap() |
| 71 | } |
| 72 | |
| 73 | pub fn from_slices(slices: &[&[u8]]) -> Self { |
| 74 | let ptrs: Vec<*const c_void> = slices.iter().map(|s| s.as_ptr() as _).collect(); |
| 75 | let sizes: Vec<usize> = slices.iter().map(|s| s.len()).collect(); |
| 76 | unsafe { |
| 77 | DataTable::from_ptr(sb::C_SkDataTable_MakeCopyArrays( |
| 78 | ptrs.as_ptr(), |
| 79 | sizes.as_ptr(), |
| 80 | slices.len().try_into().unwrap(), |
| 81 | )) |
| 82 | .unwrap() |
| 83 | } |
| 84 | } |
| 85 | |
| 86 | pub fn from_slice<T: Copy>(slice: &[T]) -> Self { |
| 87 | unsafe { |
| 88 | DataTable::from_ptr(sb::C_SkDataTable_MakeCopyArray( |
| 89 | slice.as_ptr() as _, |
| 90 | mem::size_of::<T>(), |
| 91 | slice.len().try_into().unwrap(), |
| 92 | )) |
| 93 | .unwrap() |
| 94 | } |
| 95 | } |
| 96 | |
| 97 | // TODO: wrap MakeArrayProc() |
| 98 | |
| 99 | pub fn iter(&self) -> Iter { |
| 100 | Iter { |
| 101 | table: self, |
| 102 | count: self.count(), |
| 103 | current: 0, |
| 104 | } |
| 105 | } |
| 106 | } |
| 107 | |
| 108 | #[derive (Debug)] |
| 109 | pub struct Iter<'a> { |
| 110 | table: &'a DataTable, |
| 111 | count: usize, |
| 112 | current: usize, |
| 113 | } |
| 114 | |
| 115 | impl<'a> Iterator for Iter<'a> { |
| 116 | type Item = &'a [u8]; |
| 117 | |
| 118 | fn next(&mut self) -> Option<Self::Item> { |
| 119 | if self.current < self.count { |
| 120 | let r: Option<&[u8]> = Some(self.table.at(self.current)); |
| 121 | self.current += 1; |
| 122 | r |
| 123 | } else { |
| 124 | None |
| 125 | } |
| 126 | } |
| 127 | } |
| 128 | |