1 | use super::*; |
2 | type Result<T> = std::result::Result<T, ()>; |
3 | |
4 | pub struct File { |
5 | pub reader: *const Reader, |
6 | pub bytes: Vec<u8>, |
7 | pub strings: usize, |
8 | pub blobs: usize, |
9 | pub tables: [Table; 17], |
10 | } |
11 | |
12 | impl std::fmt::Debug for File { |
13 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { |
14 | std::write!(f, " {:?}" , self.bytes.as_ptr()) |
15 | } |
16 | } |
17 | |
18 | impl std::hash::Hash for File { |
19 | fn hash<H: std::hash::Hasher>(&self, state: &mut H) { |
20 | self.bytes.as_ptr().hash(state); |
21 | } |
22 | } |
23 | |
24 | impl PartialEq for File { |
25 | fn eq(&self, other: &Self) -> bool { |
26 | self.bytes.as_ptr() == other.bytes.as_ptr() |
27 | } |
28 | } |
29 | |
30 | impl Eq for File {} |
31 | |
32 | impl Ord for File { |
33 | fn cmp(&self, other: &Self) -> std::cmp::Ordering { |
34 | self.bytes.as_ptr().cmp(&other.bytes.as_ptr()) |
35 | } |
36 | } |
37 | |
38 | impl PartialOrd for File { |
39 | fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> { |
40 | Some(self.cmp(other)) |
41 | } |
42 | } |
43 | |
44 | unsafe impl Sync for File {} |
45 | |
46 | impl File { |
47 | pub fn new(bytes: Vec<u8>) -> Option<Self> { |
48 | Self::ok(bytes).ok() |
49 | } |
50 | |
51 | fn ok(bytes: Vec<u8>) -> Result<Self> { |
52 | let mut result = File { bytes, reader: std::ptr::null(), strings: 0, blobs: 0, tables: Default::default() }; |
53 | |
54 | let dos = result.bytes.view_as::<IMAGE_DOS_HEADER>(0)?; |
55 | |
56 | if dos.e_magic != IMAGE_DOS_SIGNATURE || result.bytes.copy_as::<u32>(dos.e_lfanew as usize)? != IMAGE_NT_SIGNATURE { |
57 | return Err(()); |
58 | } |
59 | |
60 | let file_offset = dos.e_lfanew as usize + std::mem::size_of::<u32>(); |
61 | let file = result.bytes.view_as::<IMAGE_FILE_HEADER>(file_offset)?; |
62 | |
63 | let optional_offset = file_offset + std::mem::size_of::<IMAGE_FILE_HEADER>(); |
64 | |
65 | let (com_virtual_address, sections) = match result.bytes.copy_as::<u16>(optional_offset)? { |
66 | IMAGE_NT_OPTIONAL_HDR32_MAGIC => { |
67 | let optional = result.bytes.view_as::<IMAGE_OPTIONAL_HEADER32>(optional_offset)?; |
68 | (optional.DataDirectory[IMAGE_DIRECTORY_ENTRY_COM_DESCRIPTOR as usize].VirtualAddress, result.bytes.view_as_slice_of::<IMAGE_SECTION_HEADER>(optional_offset + std::mem::size_of::<IMAGE_OPTIONAL_HEADER32>(), file.NumberOfSections as usize)?) |
69 | } |
70 | IMAGE_NT_OPTIONAL_HDR64_MAGIC => { |
71 | let optional = result.bytes.view_as::<IMAGE_OPTIONAL_HEADER64>(optional_offset)?; |
72 | (optional.DataDirectory[IMAGE_DIRECTORY_ENTRY_COM_DESCRIPTOR as usize].VirtualAddress, result.bytes.view_as_slice_of::<IMAGE_SECTION_HEADER>(optional_offset + std::mem::size_of::<IMAGE_OPTIONAL_HEADER64>(), file.NumberOfSections as usize)?) |
73 | } |
74 | _ => return Err(()), |
75 | }; |
76 | |
77 | let clr = result.bytes.view_as::<IMAGE_COR20_HEADER>(offset_from_rva(section_from_rva(sections, com_virtual_address)?, com_virtual_address))?; |
78 | |
79 | if clr.cb != std::mem::size_of::<IMAGE_COR20_HEADER>() as u32 { |
80 | return Err(()); |
81 | } |
82 | |
83 | let metadata_offset = offset_from_rva(section_from_rva(sections, clr.MetaData.VirtualAddress)?, clr.MetaData.VirtualAddress); |
84 | let metadata = result.bytes.view_as::<METADATA_HEADER>(metadata_offset)?; |
85 | |
86 | if metadata.signature != METADATA_SIGNATURE { |
87 | return Err(()); |
88 | } |
89 | |
90 | // The METADATA_HEADER struct is not a fixed size so have to offset a little more carefully. |
91 | let mut view = metadata_offset + metadata.length as usize + 20; |
92 | let mut tables_data: (usize, usize) = (0, 0); |
93 | |
94 | for _ in 0..result.bytes.copy_as::<u16>(metadata_offset + metadata.length as usize + 18)? { |
95 | let stream_offset = result.bytes.copy_as::<u32>(view)? as usize; |
96 | let stream_len = result.bytes.copy_as::<u32>(view + 4)? as usize; |
97 | let stream_name = result.bytes.view_as_str(view + 8)?; |
98 | match stream_name { |
99 | b"#Strings" => result.strings = metadata_offset + stream_offset, |
100 | b"#Blob" => result.blobs = metadata_offset + stream_offset, |
101 | b"#~" => tables_data = (metadata_offset + stream_offset, stream_len), |
102 | b"#GUID" => {} |
103 | b"#US" => {} |
104 | rest => unimplemented!(" {rest:?}" ), |
105 | } |
106 | let mut padding = 4 - stream_name.len() % 4; |
107 | if padding == 0 { |
108 | padding = 4; |
109 | } |
110 | view += 8 + stream_name.len() + padding; |
111 | } |
112 | |
113 | let heap_sizes = result.bytes.copy_as::<u8>(tables_data.0 + 6)?; |
114 | let string_index_size = if (heap_sizes & 1) == 1 { 4 } else { 2 }; |
115 | let guid_index_size = if (heap_sizes >> 1 & 1) == 1 { 4 } else { 2 }; |
116 | let blob_index_size = if (heap_sizes >> 2 & 1) == 1 { 4 } else { 2 }; |
117 | let valid_bits = result.bytes.copy_as::<u64>(tables_data.0 + 8)?; |
118 | view = tables_data.0 + 24; |
119 | |
120 | // These tables are unused by the reader, but needed temporarily to calculate sizes and offsets for subsequent tables. |
121 | let unused_empty = Table::default(); |
122 | let mut unused_assembly = Table::default(); |
123 | let mut unused_assembly_os = Table::default(); |
124 | let mut unused_assembly_processor = Table::default(); |
125 | let mut unused_assembly_ref_os = Table::default(); |
126 | let mut unused_assembly_ref_processor = Table::default(); |
127 | let mut unused_decl_security = Table::default(); |
128 | let mut unused_event = Table::default(); |
129 | let mut unused_event_map = Table::default(); |
130 | let mut unused_exported_type = Table::default(); |
131 | let mut unused_field_layout = Table::default(); |
132 | let mut unused_field_marshal = Table::default(); |
133 | let mut unused_field_rva = Table::default(); |
134 | let mut unused_file = Table::default(); |
135 | let mut unused_generic_param_constraint = Table::default(); |
136 | let mut unused_manifest_resource = Table::default(); |
137 | let mut unused_method_impl = Table::default(); |
138 | let mut unused_method_semantics = Table::default(); |
139 | let mut unused_method_spec = Table::default(); |
140 | let mut unused_property = Table::default(); |
141 | let mut unused_property_map = Table::default(); |
142 | let mut unused_standalone_sig = Table::default(); |
143 | |
144 | for i in 0..64 { |
145 | if (valid_bits >> i & 1) == 0 { |
146 | continue; |
147 | } |
148 | |
149 | let len = result.bytes.copy_as::<u32>(view)? as usize; |
150 | view += 4; |
151 | |
152 | match i { |
153 | 0x00 => result.tables[Module::TABLE].len = len, |
154 | 0x01 => result.tables[TypeRef::TABLE].len = len, |
155 | 0x02 => result.tables[TypeDef::TABLE].len = len, |
156 | 0x04 => result.tables[Field::TABLE].len = len, |
157 | 0x06 => result.tables[MethodDef::TABLE].len = len, |
158 | 0x08 => result.tables[Param::TABLE].len = len, |
159 | 0x09 => result.tables[InterfaceImpl::TABLE].len = len, |
160 | 0x0a => result.tables[MemberRef::TABLE].len = len, |
161 | 0x0b => result.tables[Constant::TABLE].len = len, |
162 | 0x0c => result.tables[Attribute::TABLE].len = len, |
163 | 0x0d => unused_field_marshal.len = len, |
164 | 0x0e => unused_decl_security.len = len, |
165 | 0x0f => result.tables[ClassLayout::TABLE].len = len, |
166 | 0x10 => unused_field_layout.len = len, |
167 | 0x11 => unused_standalone_sig.len = len, |
168 | 0x12 => unused_event_map.len = len, |
169 | 0x14 => unused_event.len = len, |
170 | 0x15 => unused_property_map.len = len, |
171 | 0x17 => unused_property.len = len, |
172 | 0x18 => unused_method_semantics.len = len, |
173 | 0x19 => unused_method_impl.len = len, |
174 | 0x1a => result.tables[ModuleRef::TABLE].len = len, |
175 | 0x1b => result.tables[TypeSpec::TABLE].len = len, |
176 | 0x1c => result.tables[ImplMap::TABLE].len = len, |
177 | 0x1d => unused_field_rva.len = len, |
178 | 0x20 => unused_assembly.len = len, |
179 | 0x21 => unused_assembly_processor.len = len, |
180 | 0x22 => unused_assembly_os.len = len, |
181 | 0x23 => result.tables[AssemblyRef::TABLE].len = len, |
182 | 0x24 => unused_assembly_ref_processor.len = len, |
183 | 0x25 => unused_assembly_ref_os.len = len, |
184 | 0x26 => unused_file.len = len, |
185 | 0x27 => unused_exported_type.len = len, |
186 | 0x28 => unused_manifest_resource.len = len, |
187 | 0x29 => result.tables[NestedClass::TABLE].len = len, |
188 | 0x2a => result.tables[GenericParam::TABLE].len = len, |
189 | 0x2b => unused_method_spec.len = len, |
190 | 0x2c => unused_generic_param_constraint.len = len, |
191 | _ => unreachable!(), |
192 | }; |
193 | } |
194 | |
195 | let tables = &result.tables; |
196 | let type_def_or_ref = coded_index_size(&[tables[TypeDef::TABLE].len, tables[TypeRef::TABLE].len, tables[TypeSpec::TABLE].len]); |
197 | let has_constant = coded_index_size(&[tables[Field::TABLE].len, tables[Param::TABLE].len, unused_property.len]); |
198 | let has_field_marshal = coded_index_size(&[tables[Field::TABLE].len, tables[Param::TABLE].len]); |
199 | let has_decl_security = coded_index_size(&[tables[TypeDef::TABLE].len, tables[MethodDef::TABLE].len, unused_assembly.len]); |
200 | let member_ref_parent = coded_index_size(&[tables[TypeDef::TABLE].len, tables[TypeRef::TABLE].len, tables[ModuleRef::TABLE].len, tables[MethodDef::TABLE].len, tables[TypeSpec::TABLE].len]); |
201 | let has_semantics = coded_index_size(&[unused_event.len, unused_property.len]); |
202 | let method_def_or_ref = coded_index_size(&[tables[MethodDef::TABLE].len, tables[MemberRef::TABLE].len]); |
203 | let member_forwarded = coded_index_size(&[tables[Field::TABLE].len, tables[MethodDef::TABLE].len]); |
204 | let implementation = coded_index_size(&[unused_file.len, tables[AssemblyRef::TABLE].len, unused_exported_type.len]); |
205 | let custom_attribute_type = coded_index_size(&[tables[MethodDef::TABLE].len, tables[MemberRef::TABLE].len, unused_empty.len, unused_empty.len, unused_empty.len]); |
206 | let resolution_scope = coded_index_size(&[tables[Module::TABLE].len, tables[ModuleRef::TABLE].len, tables[AssemblyRef::TABLE].len, tables[TypeRef::TABLE].len]); |
207 | let type_or_method_def = coded_index_size(&[tables[TypeDef::TABLE].len, tables[MethodDef::TABLE].len]); |
208 | |
209 | let has_custom_attribute = coded_index_size(&[ |
210 | tables[MethodDef::TABLE].len, |
211 | tables[Field::TABLE].len, |
212 | tables[TypeRef::TABLE].len, |
213 | tables[TypeDef::TABLE].len, |
214 | tables[Param::TABLE].len, |
215 | tables[InterfaceImpl::TABLE].len, |
216 | tables[MemberRef::TABLE].len, |
217 | tables[Module::TABLE].len, |
218 | unused_property.len, |
219 | unused_event.len, |
220 | unused_standalone_sig.len, |
221 | tables[ModuleRef::TABLE].len, |
222 | tables[TypeSpec::TABLE].len, |
223 | unused_assembly.len, |
224 | tables[AssemblyRef::TABLE].len, |
225 | unused_file.len, |
226 | unused_exported_type.len, |
227 | unused_manifest_resource.len, |
228 | tables[GenericParam::TABLE].len, |
229 | unused_generic_param_constraint.len, |
230 | unused_method_spec.len, |
231 | ]); |
232 | |
233 | unused_assembly.set_columns(4, 8, 4, blob_index_size, string_index_size, string_index_size); |
234 | unused_assembly_os.set_columns(4, 4, 4, 0, 0, 0); |
235 | unused_assembly_processor.set_columns(4, 0, 0, 0, 0, 0); |
236 | result.tables[AssemblyRef::TABLE].set_columns(8, 4, blob_index_size, string_index_size, string_index_size, blob_index_size); |
237 | unused_assembly_ref_os.set_columns(4, 4, 4, result.tables[AssemblyRef::TABLE].index_width(), 0, 0); |
238 | unused_assembly_ref_processor.set_columns(4, result.tables[AssemblyRef::TABLE].index_width(), 0, 0, 0, 0); |
239 | result.tables[ClassLayout::TABLE].set_columns(2, 4, result.tables[TypeDef::TABLE].index_width(), 0, 0, 0); |
240 | result.tables[Constant::TABLE].set_columns(2, has_constant, blob_index_size, 0, 0, 0); |
241 | result.tables[Attribute::TABLE].set_columns(has_custom_attribute, custom_attribute_type, blob_index_size, 0, 0, 0); |
242 | unused_decl_security.set_columns(2, has_decl_security, blob_index_size, 0, 0, 0); |
243 | unused_event_map.set_columns(result.tables[TypeDef::TABLE].index_width(), unused_event.index_width(), 0, 0, 0, 0); |
244 | unused_event.set_columns(2, string_index_size, type_def_or_ref, 0, 0, 0); |
245 | unused_exported_type.set_columns(4, 4, string_index_size, string_index_size, implementation, 0); |
246 | result.tables[Field::TABLE].set_columns(2, string_index_size, blob_index_size, 0, 0, 0); |
247 | unused_field_layout.set_columns(4, result.tables[Field::TABLE].index_width(), 0, 0, 0, 0); |
248 | unused_field_marshal.set_columns(has_field_marshal, blob_index_size, 0, 0, 0, 0); |
249 | unused_field_rva.set_columns(4, result.tables[Field::TABLE].index_width(), 0, 0, 0, 0); |
250 | unused_file.set_columns(4, string_index_size, blob_index_size, 0, 0, 0); |
251 | result.tables[GenericParam::TABLE].set_columns(2, 2, type_or_method_def, string_index_size, 0, 0); |
252 | unused_generic_param_constraint.set_columns(result.tables[GenericParam::TABLE].index_width(), type_def_or_ref, 0, 0, 0, 0); |
253 | result.tables[ImplMap::TABLE].set_columns(2, member_forwarded, string_index_size, result.tables[ModuleRef::TABLE].index_width(), 0, 0); |
254 | result.tables[InterfaceImpl::TABLE].set_columns(result.tables[TypeDef::TABLE].index_width(), type_def_or_ref, 0, 0, 0, 0); |
255 | unused_manifest_resource.set_columns(4, 4, string_index_size, implementation, 0, 0); |
256 | result.tables[MemberRef::TABLE].set_columns(member_ref_parent, string_index_size, blob_index_size, 0, 0, 0); |
257 | result.tables[MethodDef::TABLE].set_columns(4, 2, 2, string_index_size, blob_index_size, result.tables[Param::TABLE].index_width()); |
258 | unused_method_impl.set_columns(result.tables[TypeDef::TABLE].index_width(), method_def_or_ref, method_def_or_ref, 0, 0, 0); |
259 | unused_method_semantics.set_columns(2, result.tables[MethodDef::TABLE].index_width(), has_semantics, 0, 0, 0); |
260 | unused_method_spec.set_columns(method_def_or_ref, blob_index_size, 0, 0, 0, 0); |
261 | result.tables[Module::TABLE].set_columns(2, string_index_size, guid_index_size, guid_index_size, guid_index_size, 0); |
262 | result.tables[ModuleRef::TABLE].set_columns(string_index_size, 0, 0, 0, 0, 0); |
263 | result.tables[NestedClass::TABLE].set_columns(result.tables[TypeDef::TABLE].index_width(), result.tables[TypeDef::TABLE].index_width(), 0, 0, 0, 0); |
264 | result.tables[Param::TABLE].set_columns(2, 2, string_index_size, 0, 0, 0); |
265 | unused_property.set_columns(2, string_index_size, blob_index_size, 0, 0, 0); |
266 | unused_property_map.set_columns(result.tables[TypeDef::TABLE].index_width(), unused_property.index_width(), 0, 0, 0, 0); |
267 | unused_standalone_sig.set_columns(blob_index_size, 0, 0, 0, 0, 0); |
268 | result.tables[TypeDef::TABLE].set_columns(4, string_index_size, string_index_size, type_def_or_ref, result.tables[Field::TABLE].index_width(), result.tables[MethodDef::TABLE].index_width()); |
269 | result.tables[TypeRef::TABLE].set_columns(resolution_scope, string_index_size, string_index_size, 0, 0, 0); |
270 | result.tables[TypeSpec::TABLE].set_columns(blob_index_size, 0, 0, 0, 0, 0); |
271 | |
272 | result.tables[Module::TABLE].set_data(&mut view); |
273 | result.tables[TypeRef::TABLE].set_data(&mut view); |
274 | result.tables[TypeDef::TABLE].set_data(&mut view); |
275 | result.tables[Field::TABLE].set_data(&mut view); |
276 | result.tables[MethodDef::TABLE].set_data(&mut view); |
277 | result.tables[Param::TABLE].set_data(&mut view); |
278 | result.tables[InterfaceImpl::TABLE].set_data(&mut view); |
279 | result.tables[MemberRef::TABLE].set_data(&mut view); |
280 | result.tables[Constant::TABLE].set_data(&mut view); |
281 | result.tables[Attribute::TABLE].set_data(&mut view); |
282 | unused_field_marshal.set_data(&mut view); |
283 | unused_decl_security.set_data(&mut view); |
284 | result.tables[ClassLayout::TABLE].set_data(&mut view); |
285 | unused_field_layout.set_data(&mut view); |
286 | unused_standalone_sig.set_data(&mut view); |
287 | unused_event_map.set_data(&mut view); |
288 | unused_event.set_data(&mut view); |
289 | unused_property_map.set_data(&mut view); |
290 | unused_property.set_data(&mut view); |
291 | unused_method_semantics.set_data(&mut view); |
292 | unused_method_impl.set_data(&mut view); |
293 | result.tables[ModuleRef::TABLE].set_data(&mut view); |
294 | result.tables[TypeSpec::TABLE].set_data(&mut view); |
295 | result.tables[ImplMap::TABLE].set_data(&mut view); |
296 | unused_field_rva.set_data(&mut view); |
297 | unused_assembly.set_data(&mut view); |
298 | unused_assembly_processor.set_data(&mut view); |
299 | unused_assembly_os.set_data(&mut view); |
300 | result.tables[AssemblyRef::TABLE].set_data(&mut view); |
301 | unused_assembly_ref_processor.set_data(&mut view); |
302 | unused_assembly_ref_os.set_data(&mut view); |
303 | unused_file.set_data(&mut view); |
304 | unused_exported_type.set_data(&mut view); |
305 | unused_manifest_resource.set_data(&mut view); |
306 | result.tables[NestedClass::TABLE].set_data(&mut view); |
307 | result.tables[GenericParam::TABLE].set_data(&mut view); |
308 | |
309 | Ok(result) |
310 | } |
311 | |
312 | pub fn usize(&self, row: usize, table: usize, column: usize) -> usize { |
313 | let table = &self.tables[table]; |
314 | let column = &table.columns[column]; |
315 | let offset = table.offset + row * table.width + column.offset; |
316 | match column.width { |
317 | 1 => self.bytes.copy_as::<u8>(offset).map_or(0, |v| v as usize), |
318 | 2 => self.bytes.copy_as::<u16>(offset).map_or(0, |v| v as usize), |
319 | 4 => self.bytes.copy_as::<u32>(offset).map_or(0, |v| v as usize), |
320 | _ => self.bytes.copy_as::<u64>(offset).map_or(0, |v| v as usize), |
321 | } |
322 | } |
323 | |
324 | pub fn lower_bound_of(&self, table: usize, mut first: usize, last: usize, column: usize, value: usize) -> usize { |
325 | let mut count = last - first; |
326 | while count > 0 { |
327 | let count2 = count / 2; |
328 | let middle = first + count2; |
329 | if self.usize(middle, table, column) < value { |
330 | first = middle + 1; |
331 | count -= count2 + 1; |
332 | } else { |
333 | count = count2; |
334 | } |
335 | } |
336 | first |
337 | } |
338 | |
339 | pub fn upper_bound_of(&self, table: usize, mut first: usize, last: usize, column: usize, value: usize) -> usize { |
340 | let mut count = last - first; |
341 | while count > 0 { |
342 | let count2 = count / 2; |
343 | let middle = first + count2; |
344 | if value < self.usize(middle, table, column) { |
345 | count = count2 |
346 | } else { |
347 | first = middle + 1; |
348 | count -= count2 + 1; |
349 | } |
350 | } |
351 | first |
352 | } |
353 | |
354 | pub fn table<R: AsRow>(&'static self) -> RowIterator<R> { |
355 | RowIterator::new(self, 0..self.tables[R::TABLE].len) |
356 | } |
357 | } |
358 | |
359 | fn section_from_rva(sections: &[IMAGE_SECTION_HEADER], rva: u32) -> Result<&IMAGE_SECTION_HEADER> { |
360 | sections.iter().find(|&s| rva >= s.VirtualAddress && rva < s.VirtualAddress + unsafe { s.Misc.VirtualSize }).ok_or(()) |
361 | } |
362 | |
363 | fn offset_from_rva(section: &IMAGE_SECTION_HEADER, rva: u32) -> usize { |
364 | (rva - section.VirtualAddress + section.PointerToRawData) as usize |
365 | } |
366 | |
367 | trait View { |
368 | fn view_as<T>(&self, offset: usize) -> Result<&T>; |
369 | fn view_as_slice_of<T>(&self, offset: usize, len: usize) -> Result<&[T]>; |
370 | fn copy_as<T: Copy>(&self, offset: usize) -> Result<T>; |
371 | fn view_as_str(&self, offset: usize) -> Result<&[u8]>; |
372 | fn is_proper_length<T>(&self, offset: usize) -> Result<()>; |
373 | fn is_proper_length_and_alignment<T>(&self, offset: usize, count: usize) -> Result<*const T>; |
374 | } |
375 | |
376 | impl View for [u8] { |
377 | fn view_as<T>(&self, offset: usize) -> Result<&T> { |
378 | unsafe { Ok(&*self.is_proper_length_and_alignment(offset, 1)?) } |
379 | } |
380 | |
381 | fn view_as_slice_of<T>(&self, offset: usize, len: usize) -> Result<&[T]> { |
382 | unsafe { Ok(std::slice::from_raw_parts(self.is_proper_length_and_alignment(offset, len)?, len)) } |
383 | } |
384 | |
385 | fn copy_as<T>(&self, offset: usize) -> Result<T> { |
386 | self.is_proper_length::<T>(offset)?; |
387 | |
388 | unsafe { |
389 | let mut data = std::mem::MaybeUninit::zeroed().assume_init(); |
390 | std::ptr::copy_nonoverlapping(self[offset..].as_ptr(), &mut data as *mut T as *mut u8, std::mem::size_of::<T>()); |
391 | Ok(data) |
392 | } |
393 | } |
394 | |
395 | fn view_as_str(&self, offset: usize) -> Result<&[u8]> { |
396 | let buffer = &self[offset..]; |
397 | let index = buffer.iter().position(|c| *c == b' \0' ).ok_or(())?; |
398 | Ok(&self[offset..offset + index]) |
399 | } |
400 | |
401 | fn is_proper_length<T>(&self, offset: usize) -> Result<()> { |
402 | if offset + std::mem::size_of::<T>() <= self.len() { |
403 | Ok(()) |
404 | } else { |
405 | Err(()) |
406 | } |
407 | } |
408 | |
409 | fn is_proper_length_and_alignment<T>(&self, offset: usize, count: usize) -> Result<*const T> { |
410 | self.is_proper_length::<T>(offset * count)?; |
411 | let ptr = &self[offset] as *const u8 as *const T; |
412 | |
413 | if ptr.align_offset(std::mem::align_of::<T>()) == 0 { |
414 | Ok(ptr) |
415 | } else { |
416 | Err(()) |
417 | } |
418 | } |
419 | } |
420 | |