1 | /* Copyright 2019 Mozilla Foundation |
2 | * |
3 | * Licensed under the Apache License, Version 2.0 (the "License"); |
4 | * you may not use this file except in compliance with the License. |
5 | * You may obtain a copy of the License at |
6 | * |
7 | * http://www.apache.org/licenses/LICENSE-2.0 |
8 | * |
9 | * Unless required by applicable law or agreed to in writing, software |
10 | * distributed under the License is distributed on an "AS IS" BASIS, |
11 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
12 | * See the License for the specific language governing permissions and |
13 | * limitations under the License. |
14 | */ |
15 | |
16 | // The basic validation algorithm here is copied from the "Validation |
17 | // Algorithm" section of the WebAssembly specification - |
18 | // https://webassembly.github.io/spec/core/appendix/algorithm.html. |
19 | // |
20 | // That algorithm is followed pretty closely here, namely `push_operand`, |
21 | // `pop_operand`, `push_ctrl`, and `pop_ctrl`. If anything here is a bit |
22 | // confusing it's recommended to read over that section to see how it maps to |
23 | // the various methods here. |
24 | |
25 | #[cfg (feature = "simd" )] |
26 | use crate::VisitSimdOperator; |
27 | use crate::{ |
28 | limits::MAX_WASM_FUNCTION_LOCALS, AbstractHeapType, BinaryReaderError, BlockType, BrTable, |
29 | Catch, ContType, FieldType, FrameKind, FuncType, GlobalType, Handle, HeapType, Ieee32, Ieee64, |
30 | MemArg, ModuleArity, RefType, Result, ResumeTable, StorageType, StructType, SubType, TableType, |
31 | TryTable, UnpackedIndex, ValType, VisitOperator, WasmFeatures, WasmModuleResources, |
32 | }; |
33 | use crate::{prelude::*, CompositeInnerType, Ordering}; |
34 | use core::ops::{Deref, DerefMut}; |
35 | |
36 | #[cfg (feature = "simd" )] |
37 | mod simd; |
38 | |
39 | pub(crate) struct OperatorValidator { |
40 | pub(super) locals: Locals, |
41 | local_inits: LocalInits, |
42 | |
43 | // This is a list of flags for wasm features which are used to gate various |
44 | // instructions. |
45 | pub(crate) features: WasmFeatures, |
46 | |
47 | // Temporary storage used during `match_stack_operands` |
48 | popped_types_tmp: Vec<MaybeType>, |
49 | |
50 | /// The `control` list is the list of blocks that we're currently in. |
51 | control: Vec<Frame>, |
52 | /// The `operands` is the current type stack. |
53 | operands: Vec<MaybeType>, |
54 | |
55 | /// Offset of the `end` instruction which emptied the `control` stack, which |
56 | /// must be the end of the function. |
57 | end_which_emptied_control: Option<usize>, |
58 | |
59 | /// Whether validation is happening in a shared context. |
60 | shared: bool, |
61 | |
62 | #[cfg (debug_assertions)] |
63 | pub(crate) pop_push_count: (u32, u32), |
64 | } |
65 | |
66 | /// Captures the initialization of non-defaultable locals. |
67 | struct LocalInits { |
68 | /// Records if a local is already initialized. |
69 | local_inits: Vec<bool>, |
70 | /// When `local_inits` is modified, the relevant `index` is recorded |
71 | /// here to be undone when control pops. |
72 | inits: Vec<u32>, |
73 | /// The index of the first non-defaultable local. |
74 | /// |
75 | /// # Note |
76 | /// |
77 | /// This is an optimization so that we only have to perform expensive |
78 | /// look-ups for locals that have a local index equal to or higher than this. |
79 | first_non_default_local: u32, |
80 | } |
81 | |
82 | impl Default for LocalInits { |
83 | fn default() -> Self { |
84 | Self { |
85 | local_inits: Vec::default(), |
86 | inits: Vec::default(), |
87 | first_non_default_local: u32::MAX, |
88 | } |
89 | } |
90 | } |
91 | |
92 | impl LocalInits { |
93 | /// Defines new function local parameters. |
94 | pub fn define_params(&mut self, count: usize) { |
95 | let Some(new_len) = self.local_inits.len().checked_add(count) else { |
96 | panic!("tried to define too many function locals as parameters: {count}" ); |
97 | }; |
98 | self.local_inits.resize(new_len, true); |
99 | } |
100 | |
101 | /// Defines `count` function locals of type `ty`. |
102 | pub fn define_locals(&mut self, count: u32, ty: ValType) { |
103 | let Ok(count) = usize::try_from(count) else { |
104 | panic!("tried to define too many function locals: {count}" ); |
105 | }; |
106 | let len = self.local_inits.len(); |
107 | let Some(new_len) = len.checked_add(count) else { |
108 | panic!("tried to define too many function locals: {count}" ); |
109 | }; |
110 | let is_defaultable = ty.is_defaultable(); |
111 | if !is_defaultable && self.first_non_default_local == u32::MAX { |
112 | self.first_non_default_local = len as u32; |
113 | } |
114 | self.local_inits.resize(new_len, is_defaultable); |
115 | } |
116 | |
117 | /// Returns `true` if the local at `local_index` has already been initialized. |
118 | #[inline ] |
119 | pub fn is_uninit(&self, local_index: u32) -> bool { |
120 | if local_index < self.first_non_default_local { |
121 | return false; |
122 | } |
123 | !self.local_inits[local_index as usize] |
124 | } |
125 | |
126 | /// Marks the local at `local_index` as initialized. |
127 | #[inline ] |
128 | pub fn set_init(&mut self, local_index: u32) { |
129 | if self.is_uninit(local_index) { |
130 | self.local_inits[local_index as usize] = true; |
131 | self.inits.push(local_index); |
132 | } |
133 | } |
134 | |
135 | /// Registers a new control frame and returns its `height`. |
136 | pub fn push_ctrl(&mut self) -> usize { |
137 | self.inits.len() |
138 | } |
139 | |
140 | /// Pops a control frame via its `height`. |
141 | /// |
142 | /// This uninitializes all locals that have been initialized within it. |
143 | pub fn pop_ctrl(&mut self, height: usize) { |
144 | for local_index in self.inits.split_off(height) { |
145 | self.local_inits[local_index as usize] = false; |
146 | } |
147 | } |
148 | |
149 | /// Clears the [`LocalInits`]. |
150 | /// |
151 | /// After this operation `self` will be empty and ready for reuse. |
152 | pub fn clear(&mut self) { |
153 | self.local_inits.clear(); |
154 | self.inits.clear(); |
155 | self.first_non_default_local = u32::MAX; |
156 | } |
157 | |
158 | /// Returns `true` if `self` is empty. |
159 | pub fn is_empty(&self) -> bool { |
160 | self.local_inits.is_empty() |
161 | } |
162 | } |
163 | |
164 | // No science was performed in the creation of this number, feel free to change |
165 | // it if you so like. |
166 | const MAX_LOCALS_TO_TRACK: usize = 50; |
167 | |
168 | pub(super) struct Locals { |
169 | // Total number of locals in the function. |
170 | num_locals: u32, |
171 | |
172 | // The first MAX_LOCALS_TO_TRACK locals in a function. This is used to |
173 | // optimize the theoretically common case where most functions don't have |
174 | // many locals and don't need a full binary search in the entire local space |
175 | // below. |
176 | first: Vec<ValType>, |
177 | |
178 | // This is a "compressed" list of locals for this function. The list of |
179 | // locals are represented as a list of tuples. The second element is the |
180 | // type of the local, and the first element is monotonically increasing as |
181 | // you visit elements of this list. The first element is the maximum index |
182 | // of the local, after the previous index, of the type specified. |
183 | // |
184 | // This allows us to do a binary search on the list for a local's index for |
185 | // `local.{get,set,tee}`. We do a binary search for the index desired, and |
186 | // it either lies in a "hole" where the maximum index is specified later, |
187 | // or it's at the end of the list meaning it's out of bounds. |
188 | all: Vec<(u32, ValType)>, |
189 | } |
190 | |
191 | /// A Wasm control flow block on the control flow stack during Wasm validation. |
192 | // |
193 | // # Dev. Note |
194 | // |
195 | // This structure corresponds to `ctrl_frame` as specified at in the validation |
196 | // appendix of the wasm spec |
197 | #[derive (Debug, Copy, Clone)] |
198 | pub struct Frame { |
199 | /// Indicator for what kind of instruction pushed this frame. |
200 | pub kind: FrameKind, |
201 | /// The type signature of this frame, represented as a singular return type |
202 | /// or a type index pointing into the module's types. |
203 | pub block_type: BlockType, |
204 | /// The index, below which, this frame cannot modify the operand stack. |
205 | pub height: usize, |
206 | /// Whether this frame is unreachable so far. |
207 | pub unreachable: bool, |
208 | /// The number of initializations in the stack at the time of its creation |
209 | pub init_height: usize, |
210 | } |
211 | |
212 | struct OperatorValidatorTemp<'validator, 'resources, T> { |
213 | offset: usize, |
214 | inner: &'validator mut OperatorValidator, |
215 | resources: &'resources T, |
216 | } |
217 | |
218 | #[derive (Default)] |
219 | pub struct OperatorValidatorAllocations { |
220 | popped_types_tmp: Vec<MaybeType>, |
221 | control: Vec<Frame>, |
222 | operands: Vec<MaybeType>, |
223 | local_inits: LocalInits, |
224 | locals_first: Vec<ValType>, |
225 | locals_all: Vec<(u32, ValType)>, |
226 | } |
227 | |
228 | /// Type storage within the validator. |
229 | /// |
230 | /// When managing the operand stack in unreachable code, the validator may not |
231 | /// fully know an operand's type. this unknown state is known as the `bottom` |
232 | /// type in the WebAssembly specification. Validating further instructions may |
233 | /// give us more information; either partial (`PartialRef`) or fully known. |
234 | #[derive (Debug, Copy, Clone)] |
235 | enum MaybeType<T = ValType> { |
236 | /// The operand has no available type information due to unreachable code. |
237 | /// |
238 | /// This state represents "unknown" and corresponds to the `bottom` type in |
239 | /// the WebAssembly specification. There are no constraints on what this |
240 | /// type may be and it can match any other type during validation. |
241 | Bottom, |
242 | /// The operand is known to be a reference and we may know its abstract |
243 | /// type. |
244 | /// |
245 | /// This state is not fully `Known`, however, because its type can be |
246 | /// interpreted as either: |
247 | /// - `shared` or not-`shared` |
248 | /// - nullable or not nullable |
249 | /// |
250 | /// No further refinements are required for WebAssembly instructions today |
251 | /// but this may grow in the future. |
252 | UnknownRef(Option<AbstractHeapType>), |
253 | /// The operand is known to have type `T`. |
254 | Known(T), |
255 | } |
256 | |
257 | // The validator is pretty performance-sensitive and `MaybeType` is the main |
258 | // unit of storage, so assert that it doesn't exceed 4 bytes which is the |
259 | // current expected size. |
260 | #[test ] |
261 | fn assert_maybe_type_small() { |
262 | assert!(core::mem::size_of::<MaybeType>() == 4); |
263 | } |
264 | |
265 | impl core::fmt::Display for MaybeType { |
266 | fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { |
267 | match self { |
268 | MaybeType::Bottom => write!(f, "bot" ), |
269 | MaybeType::UnknownRef(ty: &Option) => { |
270 | write!(f, "(ref shared? " )?; |
271 | match ty { |
272 | Some(ty: &AbstractHeapType) => write!(f, " {}bot" , ty.as_str(true))?, |
273 | None => write!(f, "bot" )?, |
274 | } |
275 | write!(f, ")" ) |
276 | } |
277 | MaybeType::Known(ty: &ValType) => core::fmt::Display::fmt(self:ty, f), |
278 | } |
279 | } |
280 | } |
281 | |
282 | impl From<ValType> for MaybeType { |
283 | fn from(ty: ValType) -> MaybeType { |
284 | MaybeType::Known(ty) |
285 | } |
286 | } |
287 | |
288 | impl From<RefType> for MaybeType { |
289 | fn from(ty: RefType) -> MaybeType { |
290 | let ty: ValType = ty.into(); |
291 | ty.into() |
292 | } |
293 | } |
294 | impl From<MaybeType<RefType>> for MaybeType<ValType> { |
295 | fn from(ty: MaybeType<RefType>) -> MaybeType<ValType> { |
296 | match ty { |
297 | MaybeType::Bottom => MaybeType::Bottom, |
298 | MaybeType::UnknownRef(ty: Option) => MaybeType::UnknownRef(ty), |
299 | MaybeType::Known(t: RefType) => MaybeType::Known(t.into()), |
300 | } |
301 | } |
302 | } |
303 | |
304 | impl MaybeType<RefType> { |
305 | fn as_non_null(&self) -> MaybeType<RefType> { |
306 | match self { |
307 | MaybeType::Bottom => MaybeType::Bottom, |
308 | MaybeType::UnknownRef(ty: &Option) => MaybeType::UnknownRef(*ty), |
309 | MaybeType::Known(ty: &RefType) => MaybeType::Known(ty.as_non_null()), |
310 | } |
311 | } |
312 | |
313 | fn is_maybe_shared(&self, resources: &impl WasmModuleResources) -> Option<bool> { |
314 | match self { |
315 | MaybeType::Bottom => None, |
316 | MaybeType::UnknownRef(_) => None, |
317 | MaybeType::Known(ty: &RefType) => Some(resources.is_shared(*ty)), |
318 | } |
319 | } |
320 | } |
321 | |
322 | impl OperatorValidator { |
323 | fn new(features: &WasmFeatures, allocs: OperatorValidatorAllocations) -> Self { |
324 | let OperatorValidatorAllocations { |
325 | popped_types_tmp, |
326 | control, |
327 | operands, |
328 | local_inits, |
329 | locals_first, |
330 | locals_all, |
331 | } = allocs; |
332 | debug_assert!(popped_types_tmp.is_empty()); |
333 | debug_assert!(control.is_empty()); |
334 | debug_assert!(operands.is_empty()); |
335 | debug_assert!(local_inits.is_empty()); |
336 | debug_assert!(local_inits.is_empty()); |
337 | debug_assert!(locals_first.is_empty()); |
338 | debug_assert!(locals_all.is_empty()); |
339 | OperatorValidator { |
340 | locals: Locals { |
341 | num_locals: 0, |
342 | first: locals_first, |
343 | all: locals_all, |
344 | }, |
345 | local_inits, |
346 | features: *features, |
347 | popped_types_tmp, |
348 | operands, |
349 | control, |
350 | end_which_emptied_control: None, |
351 | shared: false, |
352 | #[cfg (debug_assertions)] |
353 | pop_push_count: (0, 0), |
354 | } |
355 | } |
356 | |
357 | /// Creates a new operator validator which will be used to validate a |
358 | /// function whose type is the `ty` index specified. |
359 | /// |
360 | /// The `resources` are used to learn about the function type underlying |
361 | /// `ty`. |
362 | pub fn new_func<T>( |
363 | ty: u32, |
364 | offset: usize, |
365 | features: &WasmFeatures, |
366 | resources: &T, |
367 | allocs: OperatorValidatorAllocations, |
368 | ) -> Result<Self> |
369 | where |
370 | T: WasmModuleResources, |
371 | { |
372 | let mut ret = OperatorValidator::new(features, allocs); |
373 | ret.control.push(Frame { |
374 | kind: FrameKind::Block, |
375 | block_type: BlockType::FuncType(ty), |
376 | height: 0, |
377 | unreachable: false, |
378 | init_height: 0, |
379 | }); |
380 | |
381 | // Retrieve the function's type via index (`ty`); the `offset` is |
382 | // necessary due to `sub_type_at`'s error messaging. |
383 | let sub_ty = OperatorValidatorTemp { |
384 | offset, |
385 | inner: &mut ret, |
386 | resources, |
387 | } |
388 | .sub_type_at(ty)?; |
389 | |
390 | // Set up the function's locals. |
391 | if let CompositeInnerType::Func(func_ty) = &sub_ty.composite_type.inner { |
392 | for ty in func_ty.params() { |
393 | ret.locals.define(1, *ty); |
394 | } |
395 | ret.local_inits.define_params(func_ty.params().len()); |
396 | } else { |
397 | bail!(offset, "expected func type at index {ty}, found {sub_ty}" ) |
398 | } |
399 | |
400 | // If we're in a shared function, ensure we do not access unshared |
401 | // objects. |
402 | if sub_ty.composite_type.shared { |
403 | ret.shared = true; |
404 | } |
405 | Ok(ret) |
406 | } |
407 | |
408 | /// Creates a new operator validator which will be used to validate an |
409 | /// `init_expr` constant expression which should result in the `ty` |
410 | /// specified. |
411 | pub fn new_const_expr( |
412 | features: &WasmFeatures, |
413 | ty: ValType, |
414 | allocs: OperatorValidatorAllocations, |
415 | ) -> Self { |
416 | let mut ret = OperatorValidator::new(features, allocs); |
417 | ret.control.push(Frame { |
418 | kind: FrameKind::Block, |
419 | block_type: BlockType::Type(ty), |
420 | height: 0, |
421 | unreachable: false, |
422 | init_height: 0, |
423 | }); |
424 | ret |
425 | } |
426 | |
427 | pub fn define_locals( |
428 | &mut self, |
429 | offset: usize, |
430 | count: u32, |
431 | mut ty: ValType, |
432 | resources: &impl WasmModuleResources, |
433 | ) -> Result<()> { |
434 | resources.check_value_type(&mut ty, &self.features, offset)?; |
435 | if count == 0 { |
436 | return Ok(()); |
437 | } |
438 | if !self.locals.define(count, ty) { |
439 | return Err(BinaryReaderError::new( |
440 | "too many locals: locals exceed maximum" , |
441 | offset, |
442 | )); |
443 | } |
444 | self.local_inits.define_locals(count, ty); |
445 | Ok(()) |
446 | } |
447 | |
448 | /// Returns the current operands stack height. |
449 | pub fn operand_stack_height(&self) -> usize { |
450 | self.operands.len() |
451 | } |
452 | |
453 | /// Returns the optional value type of the value operand at the given |
454 | /// `depth` from the top of the operand stack. |
455 | /// |
456 | /// - Returns `None` if the `depth` is out of bounds. |
457 | /// - Returns `Some(None)` if there is a value with unknown type |
458 | /// at the given `depth`. |
459 | /// |
460 | /// # Note |
461 | /// |
462 | /// A `depth` of 0 will refer to the last operand on the stack. |
463 | pub fn peek_operand_at(&self, depth: usize) -> Option<Option<ValType>> { |
464 | Some(match self.operands.iter().rev().nth(depth)? { |
465 | MaybeType::Known(t) => Some(*t), |
466 | MaybeType::Bottom | MaybeType::UnknownRef(..) => None, |
467 | }) |
468 | } |
469 | |
470 | /// Returns the number of frames on the control flow stack. |
471 | pub fn control_stack_height(&self) -> usize { |
472 | self.control.len() |
473 | } |
474 | |
475 | pub fn get_frame(&self, depth: usize) -> Option<&Frame> { |
476 | self.control.iter().rev().nth(depth) |
477 | } |
478 | |
479 | /// Create a temporary [`OperatorValidatorTemp`] for validation. |
480 | pub fn with_resources<'a, 'validator, 'resources, T>( |
481 | &'validator mut self, |
482 | resources: &'resources T, |
483 | offset: usize, |
484 | ) -> impl VisitOperator<'a, Output = Result<()>> + ModuleArity + 'validator |
485 | where |
486 | T: WasmModuleResources, |
487 | 'resources: 'validator, |
488 | { |
489 | WasmProposalValidator(OperatorValidatorTemp { |
490 | offset, |
491 | inner: self, |
492 | resources, |
493 | }) |
494 | } |
495 | |
496 | /// Same as `with_resources` above but guarantees it's able to visit simd |
497 | /// operators as well. |
498 | #[cfg (feature = "simd" )] |
499 | pub fn with_resources_simd<'a, 'validator, 'resources, T>( |
500 | &'validator mut self, |
501 | resources: &'resources T, |
502 | offset: usize, |
503 | ) -> impl VisitSimdOperator<'a, Output = Result<()>> + ModuleArity + 'validator |
504 | where |
505 | T: WasmModuleResources, |
506 | 'resources: 'validator, |
507 | { |
508 | WasmProposalValidator(OperatorValidatorTemp { |
509 | offset, |
510 | inner: self, |
511 | resources, |
512 | }) |
513 | } |
514 | |
515 | pub fn finish(&mut self, offset: usize) -> Result<()> { |
516 | if self.control.last().is_some() { |
517 | bail!( |
518 | offset, |
519 | "control frames remain at end of function: END opcode expected" |
520 | ); |
521 | } |
522 | |
523 | // The `end` opcode is one byte which means that the `offset` here |
524 | // should point just beyond the `end` opcode which emptied the control |
525 | // stack. If not that means more instructions were present after the |
526 | // control stack was emptied. |
527 | if offset != self.end_which_emptied_control.unwrap() + 1 { |
528 | return Err(self.err_beyond_end(offset)); |
529 | } |
530 | Ok(()) |
531 | } |
532 | |
533 | fn err_beyond_end(&self, offset: usize) -> BinaryReaderError { |
534 | format_err!(offset, "operators remaining after end of function" ) |
535 | } |
536 | |
537 | pub fn into_allocations(mut self) -> OperatorValidatorAllocations { |
538 | fn clear<T>(mut tmp: Vec<T>) -> Vec<T> { |
539 | tmp.clear(); |
540 | tmp |
541 | } |
542 | OperatorValidatorAllocations { |
543 | popped_types_tmp: clear(self.popped_types_tmp), |
544 | control: clear(self.control), |
545 | operands: clear(self.operands), |
546 | local_inits: { |
547 | self.local_inits.clear(); |
548 | self.local_inits |
549 | }, |
550 | locals_first: clear(self.locals.first), |
551 | locals_all: clear(self.locals.all), |
552 | } |
553 | } |
554 | |
555 | fn record_pop(&mut self) { |
556 | #[cfg (debug_assertions)] |
557 | { |
558 | self.pop_push_count.0 += 1; |
559 | } |
560 | } |
561 | |
562 | fn record_push(&mut self) { |
563 | #[cfg (debug_assertions)] |
564 | { |
565 | self.pop_push_count.1 += 1; |
566 | } |
567 | } |
568 | } |
569 | |
570 | impl<R> Deref for OperatorValidatorTemp<'_, '_, R> { |
571 | type Target = OperatorValidator; |
572 | fn deref(&self) -> &OperatorValidator { |
573 | self.inner |
574 | } |
575 | } |
576 | |
577 | impl<R> DerefMut for OperatorValidatorTemp<'_, '_, R> { |
578 | fn deref_mut(&mut self) -> &mut OperatorValidator { |
579 | self.inner |
580 | } |
581 | } |
582 | |
583 | impl<'resources, R> OperatorValidatorTemp<'_, 'resources, R> |
584 | where |
585 | R: WasmModuleResources, |
586 | { |
587 | /// Pushes a type onto the operand stack. |
588 | /// |
589 | /// This is used by instructions to represent a value that is pushed to the |
590 | /// operand stack. This can fail, but only if `Type` is feature gated. |
591 | /// Otherwise the push operation always succeeds. |
592 | fn push_operand<T>(&mut self, ty: T) -> Result<()> |
593 | where |
594 | T: Into<MaybeType>, |
595 | { |
596 | let maybe_ty = ty.into(); |
597 | |
598 | if cfg!(debug_assertions) { |
599 | match maybe_ty { |
600 | MaybeType::Known(ValType::Ref(r)) => match r.heap_type() { |
601 | HeapType::Concrete(index) => { |
602 | debug_assert!( |
603 | matches!(index, UnpackedIndex::Id(_)), |
604 | "only ref types referencing `CoreTypeId`s can \ |
605 | be pushed to the operand stack" |
606 | ); |
607 | } |
608 | _ => {} |
609 | }, |
610 | _ => {} |
611 | } |
612 | } |
613 | |
614 | self.operands.push(maybe_ty); |
615 | self.record_push(); |
616 | Ok(()) |
617 | } |
618 | |
619 | fn push_concrete_ref(&mut self, nullable: bool, type_index: u32) -> Result<()> { |
620 | let mut heap_ty = HeapType::Concrete(UnpackedIndex::Module(type_index)); |
621 | |
622 | // Canonicalize the module index into an id. |
623 | self.resources.check_heap_type(&mut heap_ty, self.offset)?; |
624 | debug_assert!(matches!(heap_ty, HeapType::Concrete(UnpackedIndex::Id(_)))); |
625 | |
626 | let ref_ty = RefType::new(nullable, heap_ty).ok_or_else(|| { |
627 | format_err!(self.offset, "implementation limit: type index too large" ) |
628 | })?; |
629 | |
630 | self.push_operand(ref_ty) |
631 | } |
632 | |
633 | fn pop_concrete_ref(&mut self, nullable: bool, type_index: u32) -> Result<MaybeType> { |
634 | let mut heap_ty = HeapType::Concrete(UnpackedIndex::Module(type_index)); |
635 | |
636 | // Canonicalize the module index into an id. |
637 | self.resources.check_heap_type(&mut heap_ty, self.offset)?; |
638 | debug_assert!(matches!(heap_ty, HeapType::Concrete(UnpackedIndex::Id(_)))); |
639 | |
640 | let ref_ty = RefType::new(nullable, heap_ty).ok_or_else(|| { |
641 | format_err!(self.offset, "implementation limit: type index too large" ) |
642 | })?; |
643 | |
644 | self.pop_operand(Some(ref_ty.into())) |
645 | } |
646 | |
647 | /// Pop the given label types, checking that they are indeed present on the |
648 | /// stack, and then push them back on again. |
649 | fn pop_push_label_types( |
650 | &mut self, |
651 | label_types: impl PreciseIterator<Item = ValType>, |
652 | ) -> Result<()> { |
653 | for ty in label_types.clone().rev() { |
654 | self.pop_operand(Some(ty))?; |
655 | } |
656 | for ty in label_types { |
657 | self.push_operand(ty)?; |
658 | } |
659 | Ok(()) |
660 | } |
661 | |
662 | /// Attempts to pop a type from the operand stack. |
663 | /// |
664 | /// This function is used to remove types from the operand stack. The |
665 | /// `expected` argument can be used to indicate that a type is required, or |
666 | /// simply that something is needed to be popped. |
667 | /// |
668 | /// If `expected` is `Some(T)` then this will be guaranteed to return |
669 | /// `T`, and it will only return success if the current block is |
670 | /// unreachable or if `T` was found at the top of the operand stack. |
671 | /// |
672 | /// If `expected` is `None` then it indicates that something must be on the |
673 | /// operand stack, but it doesn't matter what's on the operand stack. This |
674 | /// is useful for polymorphic instructions like `select`. |
675 | /// |
676 | /// If `Some(T)` is returned then `T` was popped from the operand stack and |
677 | /// matches `expected`. If `None` is returned then it means that `None` was |
678 | /// expected and a type was successfully popped, but its exact type is |
679 | /// indeterminate because the current block is unreachable. |
680 | fn pop_operand(&mut self, expected: Option<ValType>) -> Result<MaybeType> { |
681 | // This method is one of the hottest methods in the validator so to |
682 | // improve codegen this method contains a fast-path success case where |
683 | // if the top operand on the stack is as expected it's returned |
684 | // immediately. This is the most common case where the stack will indeed |
685 | // have the expected type and all we need to do is pop it off. |
686 | // |
687 | // Note that this still has to be careful to be correct, though. For |
688 | // efficiency an operand is unconditionally popped and on success it is |
689 | // matched against the state of the world to see if we could actually |
690 | // pop it. If we shouldn't have popped it then it's passed to the slow |
691 | // path to get pushed back onto the stack. |
692 | let popped = match self.operands.pop() { |
693 | Some(MaybeType::Known(actual_ty)) => { |
694 | if Some(actual_ty) == expected { |
695 | if let Some(control) = self.control.last() { |
696 | if self.operands.len() >= control.height { |
697 | self.record_pop(); |
698 | return Ok(MaybeType::Known(actual_ty)); |
699 | } |
700 | } |
701 | } |
702 | Some(MaybeType::Known(actual_ty)) |
703 | } |
704 | other => other, |
705 | }; |
706 | |
707 | self._pop_operand(expected, popped) |
708 | } |
709 | |
710 | // This is the "real" implementation of `pop_operand` which is 100% |
711 | // spec-compliant with little attention paid to efficiency since this is the |
712 | // slow-path from the actual `pop_operand` function above. |
713 | #[cold ] |
714 | fn _pop_operand( |
715 | &mut self, |
716 | expected: Option<ValType>, |
717 | popped: Option<MaybeType>, |
718 | ) -> Result<MaybeType> { |
719 | self.operands.extend(popped); |
720 | let control = match self.control.last() { |
721 | Some(c) => c, |
722 | None => return Err(self.err_beyond_end(self.offset)), |
723 | }; |
724 | let actual = if self.operands.len() == control.height && control.unreachable { |
725 | MaybeType::Bottom |
726 | } else { |
727 | if self.operands.len() == control.height { |
728 | let desc = match expected { |
729 | Some(ty) => ty_to_str(ty), |
730 | None => "a type" .into(), |
731 | }; |
732 | bail!( |
733 | self.offset, |
734 | "type mismatch: expected {desc} but nothing on stack" |
735 | ) |
736 | } else { |
737 | self.operands.pop().unwrap() |
738 | } |
739 | }; |
740 | if let Some(expected) = expected { |
741 | match (actual, expected) { |
742 | // The bottom type matches all expectations |
743 | (MaybeType::Bottom, _) => {} |
744 | |
745 | // The "heap bottom" type only matches other references types, |
746 | // but not any integer types. Note that if the heap bottom is |
747 | // known to have a specific abstract heap type then a subtype |
748 | // check is performed against hte expected type. |
749 | (MaybeType::UnknownRef(actual_ty), ValType::Ref(expected)) => { |
750 | if let Some(actual) = actual_ty { |
751 | let expected_shared = self.resources.is_shared(expected); |
752 | let actual = RefType::new( |
753 | false, |
754 | HeapType::Abstract { |
755 | shared: expected_shared, |
756 | ty: actual, |
757 | }, |
758 | ) |
759 | .unwrap(); |
760 | if !self.resources.is_subtype(actual.into(), expected.into()) { |
761 | bail!( |
762 | self.offset, |
763 | "type mismatch: expected {}, found {}" , |
764 | ty_to_str(expected.into()), |
765 | ty_to_str(actual.into()) |
766 | ); |
767 | } |
768 | } |
769 | } |
770 | |
771 | // Use the `is_subtype` predicate to test if a found type matches |
772 | // the expectation. |
773 | (MaybeType::Known(actual), expected) => { |
774 | if !self.resources.is_subtype(actual, expected) { |
775 | bail!( |
776 | self.offset, |
777 | "type mismatch: expected {}, found {}" , |
778 | ty_to_str(expected), |
779 | ty_to_str(actual) |
780 | ); |
781 | } |
782 | } |
783 | |
784 | // A "heap bottom" type cannot match any numeric types. |
785 | ( |
786 | MaybeType::UnknownRef(..), |
787 | ValType::I32 | ValType::I64 | ValType::F32 | ValType::F64 | ValType::V128, |
788 | ) => { |
789 | bail!( |
790 | self.offset, |
791 | "type mismatch: expected {}, found heap type" , |
792 | ty_to_str(expected) |
793 | ) |
794 | } |
795 | } |
796 | } |
797 | self.record_pop(); |
798 | Ok(actual) |
799 | } |
800 | |
801 | /// Match expected vs. actual operand. |
802 | fn match_operand( |
803 | &mut self, |
804 | actual: ValType, |
805 | expected: ValType, |
806 | ) -> Result<(), BinaryReaderError> { |
807 | #[cfg (debug_assertions)] |
808 | let tmp = self.pop_push_count; |
809 | self.push_operand(actual)?; |
810 | self.pop_operand(Some(expected))?; |
811 | #[cfg (debug_assertions)] |
812 | { |
813 | self.pop_push_count = tmp; |
814 | } |
815 | Ok(()) |
816 | } |
817 | |
818 | /// Match a type sequence to the top of the stack. |
819 | fn match_stack_operands( |
820 | &mut self, |
821 | expected_tys: impl PreciseIterator<Item = ValType> + 'resources, |
822 | ) -> Result<()> { |
823 | debug_assert!(self.popped_types_tmp.is_empty()); |
824 | self.popped_types_tmp.reserve(expected_tys.len()); |
825 | #[cfg (debug_assertions)] |
826 | let tmp = self.pop_push_count; |
827 | for expected_ty in expected_tys.rev() { |
828 | let actual_ty = self.pop_operand(Some(expected_ty))?; |
829 | self.popped_types_tmp.push(actual_ty); |
830 | } |
831 | for ty in self.inner.popped_types_tmp.drain(..).rev() { |
832 | self.inner.operands.push(ty.into()); |
833 | } |
834 | #[cfg (debug_assertions)] |
835 | { |
836 | self.pop_push_count = tmp; |
837 | } |
838 | Ok(()) |
839 | } |
840 | |
841 | /// Pop a reference type from the operand stack. |
842 | fn pop_ref(&mut self, expected: Option<RefType>) -> Result<MaybeType<RefType>> { |
843 | match self.pop_operand(expected.map(|t| t.into()))? { |
844 | MaybeType::Bottom => Ok(MaybeType::UnknownRef(None)), |
845 | MaybeType::UnknownRef(ty) => Ok(MaybeType::UnknownRef(ty)), |
846 | MaybeType::Known(ValType::Ref(rt)) => Ok(MaybeType::Known(rt)), |
847 | MaybeType::Known(ty) => bail!( |
848 | self.offset, |
849 | "type mismatch: expected ref but found {}" , |
850 | ty_to_str(ty) |
851 | ), |
852 | } |
853 | } |
854 | |
855 | /// Pop a reference type from the operand stack, checking if it is a subtype |
856 | /// of a nullable type of `expected` or the shared version of `expected`. |
857 | /// |
858 | /// This function returns the popped reference type and its `shared`-ness, |
859 | /// saving extra lookups for concrete types. |
860 | fn pop_maybe_shared_ref(&mut self, expected: AbstractHeapType) -> Result<MaybeType<RefType>> { |
861 | let actual = match self.pop_ref(None)? { |
862 | MaybeType::Bottom => return Ok(MaybeType::Bottom), |
863 | MaybeType::UnknownRef(None) => return Ok(MaybeType::UnknownRef(None)), |
864 | MaybeType::UnknownRef(Some(actual)) => { |
865 | if !actual.is_subtype_of(expected) { |
866 | bail!( |
867 | self.offset, |
868 | "type mismatch: expected subtype of {}, found {}" , |
869 | expected.as_str(false), |
870 | actual.as_str(false), |
871 | ) |
872 | } |
873 | return Ok(MaybeType::UnknownRef(Some(actual))); |
874 | } |
875 | MaybeType::Known(ty) => ty, |
876 | }; |
877 | // Change our expectation based on whether we're dealing with an actual |
878 | // shared or unshared type. |
879 | let is_actual_shared = self.resources.is_shared(actual); |
880 | let expected = RefType::new( |
881 | true, |
882 | HeapType::Abstract { |
883 | shared: is_actual_shared, |
884 | ty: expected, |
885 | }, |
886 | ) |
887 | .unwrap(); |
888 | |
889 | // Check (again) that the actual type is a subtype of the expected type. |
890 | // Note that `_pop_operand` already does this kind of thing but we leave |
891 | // that for a future refactoring (TODO). |
892 | if !self.resources.is_subtype(actual.into(), expected.into()) { |
893 | bail!( |
894 | self.offset, |
895 | "type mismatch: expected subtype of {expected}, found {actual}" , |
896 | ) |
897 | } |
898 | Ok(MaybeType::Known(actual)) |
899 | } |
900 | |
901 | /// Fetches the type for the local at `idx`, returning an error if it's out |
902 | /// of bounds. |
903 | fn local(&self, idx: u32) -> Result<ValType> { |
904 | match self.locals.get(idx) { |
905 | Some(ty) => Ok(ty), |
906 | None => bail!( |
907 | self.offset, |
908 | "unknown local {}: local index out of bounds" , |
909 | idx |
910 | ), |
911 | } |
912 | } |
913 | |
914 | /// Flags the current control frame as unreachable, additionally truncating |
915 | /// the currently active operand stack. |
916 | fn unreachable(&mut self) -> Result<()> { |
917 | let control = match self.control.last_mut() { |
918 | Some(frame) => frame, |
919 | None => return Err(self.err_beyond_end(self.offset)), |
920 | }; |
921 | control.unreachable = true; |
922 | let new_height = control.height; |
923 | self.operands.truncate(new_height); |
924 | Ok(()) |
925 | } |
926 | |
927 | /// Pushes a new frame onto the control stack. |
928 | /// |
929 | /// This operation is used when entering a new block such as an if, loop, |
930 | /// or block itself. The `kind` of block is specified which indicates how |
931 | /// breaks interact with this block's type. Additionally the type signature |
932 | /// of the block is specified by `ty`. |
933 | fn push_ctrl(&mut self, kind: FrameKind, ty: BlockType) -> Result<()> { |
934 | // Push a new frame which has a snapshot of the height of the current |
935 | // operand stack. |
936 | let height = self.operands.len(); |
937 | let init_height = self.local_inits.push_ctrl(); |
938 | self.control.push(Frame { |
939 | kind, |
940 | block_type: ty, |
941 | height, |
942 | unreachable: false, |
943 | init_height, |
944 | }); |
945 | // All of the parameters are now also available in this control frame, |
946 | // so we push them here in order. |
947 | for ty in self.params(ty)? { |
948 | self.push_operand(ty)?; |
949 | } |
950 | Ok(()) |
951 | } |
952 | |
953 | /// Pops a frame from the control stack. |
954 | /// |
955 | /// This function is used when exiting a block and leaves a block scope. |
956 | /// Internally this will validate that blocks have the correct result type. |
957 | fn pop_ctrl(&mut self) -> Result<Frame> { |
958 | // Read the expected type and expected height of the operand stack the |
959 | // end of the frame. |
960 | let frame = match self.control.last() { |
961 | Some(f) => f, |
962 | None => return Err(self.err_beyond_end(self.offset)), |
963 | }; |
964 | let ty = frame.block_type; |
965 | let height = frame.height; |
966 | let init_height = frame.init_height; |
967 | |
968 | // reset_locals in the spec |
969 | self.local_inits.pop_ctrl(init_height); |
970 | |
971 | // Pop all the result types, in reverse order, from the operand stack. |
972 | // These types will, possibly, be transferred to the next frame. |
973 | for ty in self.results(ty)?.rev() { |
974 | self.pop_operand(Some(ty))?; |
975 | } |
976 | |
977 | // Make sure that the operand stack has returned to is original |
978 | // height... |
979 | if self.operands.len() != height { |
980 | bail!( |
981 | self.offset, |
982 | "type mismatch: values remaining on stack at end of block" |
983 | ); |
984 | } |
985 | |
986 | // And then we can remove it! |
987 | Ok(self.control.pop().unwrap()) |
988 | } |
989 | |
990 | /// Validates a relative jump to the `depth` specified. |
991 | /// |
992 | /// Returns the type signature of the block that we're jumping to as well |
993 | /// as the kind of block if the jump is valid. Otherwise returns an error. |
994 | fn jump(&self, depth: u32) -> Result<(BlockType, FrameKind)> { |
995 | if self.control.is_empty() { |
996 | return Err(self.err_beyond_end(self.offset)); |
997 | } |
998 | match (self.control.len() - 1).checked_sub(depth as usize) { |
999 | Some(i) => { |
1000 | let frame = &self.control[i]; |
1001 | Ok((frame.block_type, frame.kind)) |
1002 | } |
1003 | None => bail!(self.offset, "unknown label: branch depth too large" ), |
1004 | } |
1005 | } |
1006 | |
1007 | /// Validates that `memory_index` is valid in this module, and returns the |
1008 | /// type of address used to index the memory specified. |
1009 | fn check_memory_index(&self, memory_index: u32) -> Result<ValType> { |
1010 | match self.resources.memory_at(memory_index) { |
1011 | Some(mem) => Ok(mem.index_type()), |
1012 | None => bail!(self.offset, "unknown memory {}" , memory_index), |
1013 | } |
1014 | } |
1015 | |
1016 | /// Validates a `memarg for alignment and such (also the memory it |
1017 | /// references), and returns the type of index used to address the memory. |
1018 | fn check_memarg(&self, memarg: MemArg) -> Result<ValType> { |
1019 | let index_ty = self.check_memory_index(memarg.memory)?; |
1020 | if memarg.align > memarg.max_align { |
1021 | bail!( |
1022 | self.offset, |
1023 | "malformed memop alignment: alignment must not be larger than natural" |
1024 | ); |
1025 | } |
1026 | if index_ty == ValType::I32 && memarg.offset > u64::from(u32::MAX) { |
1027 | bail!(self.offset, "offset out of range: must be <= 2**32" ); |
1028 | } |
1029 | Ok(index_ty) |
1030 | } |
1031 | |
1032 | fn check_floats_enabled(&self) -> Result<()> { |
1033 | if !self.features.floats() { |
1034 | bail!(self.offset, "floating-point instruction disallowed" ); |
1035 | } |
1036 | Ok(()) |
1037 | } |
1038 | |
1039 | fn check_shared_memarg(&self, memarg: MemArg) -> Result<ValType> { |
1040 | if memarg.align != memarg.max_align { |
1041 | bail!( |
1042 | self.offset, |
1043 | "atomic instructions must always specify maximum alignment" |
1044 | ); |
1045 | } |
1046 | self.check_memory_index(memarg.memory) |
1047 | } |
1048 | |
1049 | /// Validates a block type, primarily with various in-flight proposals. |
1050 | fn check_block_type(&self, ty: &mut BlockType) -> Result<()> { |
1051 | match ty { |
1052 | BlockType::Empty => Ok(()), |
1053 | BlockType::Type(t) => self |
1054 | .resources |
1055 | .check_value_type(t, &self.features, self.offset), |
1056 | BlockType::FuncType(idx) => { |
1057 | if !self.features.multi_value() { |
1058 | bail!( |
1059 | self.offset, |
1060 | "blocks, loops, and ifs may only produce a resulttype \ |
1061 | when multi-value is not enabled" , |
1062 | ); |
1063 | } |
1064 | self.func_type_at(*idx)?; |
1065 | Ok(()) |
1066 | } |
1067 | } |
1068 | } |
1069 | |
1070 | /// Returns the corresponding function type for the `func` item located at |
1071 | /// `function_index`. |
1072 | fn type_of_function(&self, function_index: u32) -> Result<&'resources FuncType> { |
1073 | if let Some(type_index) = self.resources.type_index_of_function(function_index) { |
1074 | self.func_type_at(type_index) |
1075 | } else { |
1076 | bail!( |
1077 | self.offset, |
1078 | "unknown function {function_index}: function index out of bounds" , |
1079 | ) |
1080 | } |
1081 | } |
1082 | |
1083 | /// Checks a call-style instruction which will be invoking the function `ty` |
1084 | /// specified. |
1085 | /// |
1086 | /// This will pop parameters from the operand stack for the function's |
1087 | /// parameters and then push the results of the function on the stack. |
1088 | fn check_call_ty(&mut self, ty: &FuncType) -> Result<()> { |
1089 | for &ty in ty.params().iter().rev() { |
1090 | debug_assert_type_indices_are_ids(ty); |
1091 | self.pop_operand(Some(ty))?; |
1092 | } |
1093 | for &ty in ty.results() { |
1094 | debug_assert_type_indices_are_ids(ty); |
1095 | self.push_operand(ty)?; |
1096 | } |
1097 | Ok(()) |
1098 | } |
1099 | |
1100 | /// Similar to `check_call_ty` except used for tail-call instructions. |
1101 | fn check_return_call_ty(&mut self, ty: &FuncType) -> Result<()> { |
1102 | self.check_func_type_same_results(ty)?; |
1103 | for &ty in ty.params().iter().rev() { |
1104 | debug_assert_type_indices_are_ids(ty); |
1105 | self.pop_operand(Some(ty))?; |
1106 | } |
1107 | |
1108 | // Match the results with this function's, but don't include in pop/push counts. |
1109 | #[cfg (debug_assertions)] |
1110 | let tmp = self.pop_push_count; |
1111 | for &ty in ty.results() { |
1112 | debug_assert_type_indices_are_ids(ty); |
1113 | self.push_operand(ty)?; |
1114 | } |
1115 | self.check_return()?; |
1116 | #[cfg (debug_assertions)] |
1117 | { |
1118 | self.pop_push_count = tmp; |
1119 | } |
1120 | |
1121 | Ok(()) |
1122 | } |
1123 | |
1124 | /// Checks the immediate `type_index` of a `call_ref`-style instruction |
1125 | /// (also `return_call_ref`). |
1126 | /// |
1127 | /// This will validate that the value on the stack is a `(ref type_index)` |
1128 | /// or a subtype. This will then return the corresponding function type used |
1129 | /// for this call (to be used with `check_call_ty` or |
1130 | /// `check_return_call_ty`). |
1131 | fn check_call_ref_ty(&mut self, type_index: u32) -> Result<&'resources FuncType> { |
1132 | let unpacked_index = UnpackedIndex::Module(type_index); |
1133 | let mut hty = HeapType::Concrete(unpacked_index); |
1134 | self.resources.check_heap_type(&mut hty, self.offset)?; |
1135 | let expected = RefType::new(true, hty).expect("hty should be previously validated" ); |
1136 | self.pop_ref(Some(expected))?; |
1137 | self.func_type_at(type_index) |
1138 | } |
1139 | |
1140 | /// Validates the immediate operands of a `call_indirect` or |
1141 | /// `return_call_indirect` instruction. |
1142 | /// |
1143 | /// This will validate that `table_index` is valid and a funcref table. It |
1144 | /// will additionally pop the index argument which is used to index into the |
1145 | /// table. |
1146 | /// |
1147 | /// The return value of this function is the function type behind |
1148 | /// `type_index` which must then be passed to `check_{call,return_call}_ty`. |
1149 | fn check_call_indirect_ty( |
1150 | &mut self, |
1151 | type_index: u32, |
1152 | table_index: u32, |
1153 | ) -> Result<&'resources FuncType> { |
1154 | let tab = self.table_type_at(table_index)?; |
1155 | if !self |
1156 | .resources |
1157 | .is_subtype(ValType::Ref(tab.element_type), ValType::FUNCREF) |
1158 | { |
1159 | bail!( |
1160 | self.offset, |
1161 | "type mismatch: indirect calls must go through a table with type <= funcref" , |
1162 | ); |
1163 | } |
1164 | self.pop_operand(Some(tab.index_type()))?; |
1165 | self.func_type_at(type_index) |
1166 | } |
1167 | |
1168 | /// Validates a `return` instruction, popping types from the operand |
1169 | /// stack that the function needs. |
1170 | fn check_return(&mut self) -> Result<()> { |
1171 | if self.control.is_empty() { |
1172 | return Err(self.err_beyond_end(self.offset)); |
1173 | } |
1174 | for ty in self.results(self.control[0].block_type)?.rev() { |
1175 | self.pop_operand(Some(ty))?; |
1176 | } |
1177 | self.unreachable()?; |
1178 | Ok(()) |
1179 | } |
1180 | |
1181 | /// Check that the given type has the same result types as the current |
1182 | /// function's results. |
1183 | fn check_func_type_same_results(&self, callee_ty: &FuncType) -> Result<()> { |
1184 | if self.control.is_empty() { |
1185 | return Err(self.err_beyond_end(self.offset)); |
1186 | } |
1187 | let caller_rets = self.results(self.control[0].block_type)?; |
1188 | if callee_ty.results().len() != caller_rets.len() |
1189 | || !caller_rets |
1190 | .zip(callee_ty.results()) |
1191 | .all(|(caller_ty, callee_ty)| self.resources.is_subtype(*callee_ty, caller_ty)) |
1192 | { |
1193 | let caller_rets = self |
1194 | .results(self.control[0].block_type)? |
1195 | .map(|ty| format!(" {ty}" )) |
1196 | .collect::<Vec<_>>() |
1197 | .join(" " ); |
1198 | let callee_rets = callee_ty |
1199 | .results() |
1200 | .iter() |
1201 | .map(|ty| format!(" {ty}" )) |
1202 | .collect::<Vec<_>>() |
1203 | .join(" " ); |
1204 | bail!( |
1205 | self.offset, |
1206 | "type mismatch: current function requires result type \ |
1207 | [ {caller_rets}] but callee returns [ {callee_rets}]" |
1208 | ); |
1209 | } |
1210 | Ok(()) |
1211 | } |
1212 | |
1213 | /// Checks the validity of a common comparison operator. |
1214 | fn check_cmp_op(&mut self, ty: ValType) -> Result<()> { |
1215 | self.pop_operand(Some(ty))?; |
1216 | self.pop_operand(Some(ty))?; |
1217 | self.push_operand(ValType::I32)?; |
1218 | Ok(()) |
1219 | } |
1220 | |
1221 | /// Checks the validity of a common float comparison operator. |
1222 | fn check_fcmp_op(&mut self, ty: ValType) -> Result<()> { |
1223 | debug_assert!(matches!(ty, ValType::F32 | ValType::F64)); |
1224 | self.check_floats_enabled()?; |
1225 | self.check_cmp_op(ty) |
1226 | } |
1227 | |
1228 | /// Checks the validity of a common unary operator. |
1229 | fn check_unary_op(&mut self, ty: ValType) -> Result<()> { |
1230 | self.pop_operand(Some(ty))?; |
1231 | self.push_operand(ty)?; |
1232 | Ok(()) |
1233 | } |
1234 | |
1235 | /// Checks the validity of a common unary float operator. |
1236 | fn check_funary_op(&mut self, ty: ValType) -> Result<()> { |
1237 | debug_assert!(matches!(ty, ValType::F32 | ValType::F64)); |
1238 | self.check_floats_enabled()?; |
1239 | self.check_unary_op(ty) |
1240 | } |
1241 | |
1242 | /// Checks the validity of a common conversion operator. |
1243 | fn check_conversion_op(&mut self, into: ValType, from: ValType) -> Result<()> { |
1244 | self.pop_operand(Some(from))?; |
1245 | self.push_operand(into)?; |
1246 | Ok(()) |
1247 | } |
1248 | |
1249 | /// Checks the validity of a common float conversion operator. |
1250 | fn check_fconversion_op(&mut self, into: ValType, from: ValType) -> Result<()> { |
1251 | debug_assert!(matches!(into, ValType::F32 | ValType::F64)); |
1252 | self.check_floats_enabled()?; |
1253 | self.check_conversion_op(into, from) |
1254 | } |
1255 | |
1256 | /// Checks the validity of a common binary operator. |
1257 | fn check_binary_op(&mut self, ty: ValType) -> Result<()> { |
1258 | self.pop_operand(Some(ty))?; |
1259 | self.pop_operand(Some(ty))?; |
1260 | self.push_operand(ty)?; |
1261 | Ok(()) |
1262 | } |
1263 | |
1264 | /// Checks the validity of a common binary float operator. |
1265 | fn check_fbinary_op(&mut self, ty: ValType) -> Result<()> { |
1266 | debug_assert!(matches!(ty, ValType::F32 | ValType::F64)); |
1267 | self.check_floats_enabled()?; |
1268 | self.check_binary_op(ty) |
1269 | } |
1270 | |
1271 | /// Checks the validity of an atomic load operator. |
1272 | fn check_atomic_load(&mut self, memarg: MemArg, load_ty: ValType) -> Result<()> { |
1273 | let ty = self.check_shared_memarg(memarg)?; |
1274 | self.pop_operand(Some(ty))?; |
1275 | self.push_operand(load_ty)?; |
1276 | Ok(()) |
1277 | } |
1278 | |
1279 | /// Checks the validity of an atomic store operator. |
1280 | fn check_atomic_store(&mut self, memarg: MemArg, store_ty: ValType) -> Result<()> { |
1281 | let ty = self.check_shared_memarg(memarg)?; |
1282 | self.pop_operand(Some(store_ty))?; |
1283 | self.pop_operand(Some(ty))?; |
1284 | Ok(()) |
1285 | } |
1286 | |
1287 | /// Checks the validity of atomic binary operator on memory. |
1288 | fn check_atomic_binary_memory_op(&mut self, memarg: MemArg, op_ty: ValType) -> Result<()> { |
1289 | let ty = self.check_shared_memarg(memarg)?; |
1290 | self.pop_operand(Some(op_ty))?; |
1291 | self.pop_operand(Some(ty))?; |
1292 | self.push_operand(op_ty)?; |
1293 | Ok(()) |
1294 | } |
1295 | |
1296 | /// Checks the validity of an atomic compare exchange operator on memories. |
1297 | fn check_atomic_binary_memory_cmpxchg(&mut self, memarg: MemArg, op_ty: ValType) -> Result<()> { |
1298 | let ty = self.check_shared_memarg(memarg)?; |
1299 | self.pop_operand(Some(op_ty))?; |
1300 | self.pop_operand(Some(op_ty))?; |
1301 | self.pop_operand(Some(ty))?; |
1302 | self.push_operand(op_ty)?; |
1303 | Ok(()) |
1304 | } |
1305 | |
1306 | /// Common helper for `ref.test` and `ref.cast` downcasting/checking |
1307 | /// instructions. Returns the given `heap_type` as a `ValType`. |
1308 | fn check_downcast(&mut self, nullable: bool, mut heap_type: HeapType) -> Result<RefType> { |
1309 | self.resources |
1310 | .check_heap_type(&mut heap_type, self.offset)?; |
1311 | |
1312 | let sub_ty = RefType::new(nullable, heap_type).ok_or_else(|| { |
1313 | BinaryReaderError::new("implementation limit: type index too large" , self.offset) |
1314 | })?; |
1315 | let sup_ty = RefType::new(true, self.resources.top_type(&heap_type)) |
1316 | .expect("can't panic with non-concrete heap types" ); |
1317 | |
1318 | self.pop_ref(Some(sup_ty))?; |
1319 | Ok(sub_ty) |
1320 | } |
1321 | |
1322 | /// Common helper for both nullable and non-nullable variants of `ref.test` |
1323 | /// instructions. |
1324 | fn check_ref_test(&mut self, nullable: bool, heap_type: HeapType) -> Result<()> { |
1325 | self.check_downcast(nullable, heap_type)?; |
1326 | self.push_operand(ValType::I32) |
1327 | } |
1328 | |
1329 | /// Common helper for both nullable and non-nullable variants of `ref.cast` |
1330 | /// instructions. |
1331 | fn check_ref_cast(&mut self, nullable: bool, heap_type: HeapType) -> Result<()> { |
1332 | let sub_ty = self.check_downcast(nullable, heap_type)?; |
1333 | self.push_operand(sub_ty) |
1334 | } |
1335 | |
1336 | /// Common helper for checking the types of globals accessed with atomic RMW |
1337 | /// instructions, which only allow `i32` and `i64`. |
1338 | fn check_atomic_global_rmw_ty(&self, global_index: u32) -> Result<ValType> { |
1339 | let ty = self.global_type_at(global_index)?.content_type; |
1340 | if !(ty == ValType::I32 || ty == ValType::I64) { |
1341 | bail!( |
1342 | self.offset, |
1343 | "invalid type: `global.atomic.rmw.*` only allows `i32` and `i64`" |
1344 | ); |
1345 | } |
1346 | Ok(ty) |
1347 | } |
1348 | |
1349 | /// Common helper for checking the types of structs accessed with atomic RMW |
1350 | /// instructions, which only allow `i32` and `i64` types. |
1351 | fn check_struct_atomic_rmw( |
1352 | &mut self, |
1353 | op: &'static str, |
1354 | struct_type_index: u32, |
1355 | field_index: u32, |
1356 | ) -> Result<()> { |
1357 | let field = self.mutable_struct_field_at(struct_type_index, field_index)?; |
1358 | let field_ty = match field.element_type { |
1359 | StorageType::Val(ValType::I32) => ValType::I32, |
1360 | StorageType::Val(ValType::I64) => ValType::I64, |
1361 | _ => bail!( |
1362 | self.offset, |
1363 | "invalid type: `struct.atomic.rmw. {}` only allows `i32` and `i64`" , |
1364 | op |
1365 | ), |
1366 | }; |
1367 | self.pop_operand(Some(field_ty))?; |
1368 | self.pop_concrete_ref(true, struct_type_index)?; |
1369 | self.push_operand(field_ty)?; |
1370 | Ok(()) |
1371 | } |
1372 | |
1373 | /// Common helper for checking the types of arrays accessed with atomic RMW |
1374 | /// instructions, which only allow `i32` and `i64`. |
1375 | fn check_array_atomic_rmw(&mut self, op: &'static str, type_index: u32) -> Result<()> { |
1376 | let field = self.mutable_array_type_at(type_index)?; |
1377 | let elem_ty = match field.element_type { |
1378 | StorageType::Val(ValType::I32) => ValType::I32, |
1379 | StorageType::Val(ValType::I64) => ValType::I64, |
1380 | _ => bail!( |
1381 | self.offset, |
1382 | "invalid type: `array.atomic.rmw. {}` only allows `i32` and `i64`" , |
1383 | op |
1384 | ), |
1385 | }; |
1386 | self.pop_operand(Some(elem_ty))?; |
1387 | self.pop_operand(Some(ValType::I32))?; |
1388 | self.pop_concrete_ref(true, type_index)?; |
1389 | self.push_operand(elem_ty)?; |
1390 | Ok(()) |
1391 | } |
1392 | |
1393 | fn element_type_at(&self, elem_index: u32) -> Result<RefType> { |
1394 | match self.resources.element_type_at(elem_index) { |
1395 | Some(ty) => Ok(ty), |
1396 | None => bail!( |
1397 | self.offset, |
1398 | "unknown elem segment {}: segment index out of bounds" , |
1399 | elem_index |
1400 | ), |
1401 | } |
1402 | } |
1403 | |
1404 | fn sub_type_at(&self, at: u32) -> Result<&'resources SubType> { |
1405 | self.resources |
1406 | .sub_type_at(at) |
1407 | .ok_or_else(|| format_err!(self.offset, "unknown type: type index out of bounds" )) |
1408 | } |
1409 | |
1410 | fn struct_type_at(&self, at: u32) -> Result<&'resources StructType> { |
1411 | let sub_ty = self.sub_type_at(at)?; |
1412 | if let CompositeInnerType::Struct(struct_ty) = &sub_ty.composite_type.inner { |
1413 | if self.inner.shared && !sub_ty.composite_type.shared { |
1414 | bail!( |
1415 | self.offset, |
1416 | "shared functions cannot access unshared structs" , |
1417 | ); |
1418 | } |
1419 | Ok(struct_ty) |
1420 | } else { |
1421 | bail!( |
1422 | self.offset, |
1423 | "expected struct type at index {at}, found {sub_ty}" |
1424 | ) |
1425 | } |
1426 | } |
1427 | |
1428 | fn struct_field_at(&self, struct_type_index: u32, field_index: u32) -> Result<FieldType> { |
1429 | let field_index = usize::try_from(field_index).map_err(|_| { |
1430 | BinaryReaderError::new("unknown field: field index out of bounds" , self.offset) |
1431 | })?; |
1432 | self.struct_type_at(struct_type_index)? |
1433 | .fields |
1434 | .get(field_index) |
1435 | .copied() |
1436 | .ok_or_else(|| { |
1437 | BinaryReaderError::new("unknown field: field index out of bounds" , self.offset) |
1438 | }) |
1439 | } |
1440 | |
1441 | fn mutable_struct_field_at( |
1442 | &self, |
1443 | struct_type_index: u32, |
1444 | field_index: u32, |
1445 | ) -> Result<FieldType> { |
1446 | let field = self.struct_field_at(struct_type_index, field_index)?; |
1447 | if !field.mutable { |
1448 | bail!( |
1449 | self.offset, |
1450 | "invalid struct modification: struct field is immutable" |
1451 | ) |
1452 | } |
1453 | Ok(field) |
1454 | } |
1455 | |
1456 | fn array_type_at(&self, at: u32) -> Result<FieldType> { |
1457 | let sub_ty = self.sub_type_at(at)?; |
1458 | if let CompositeInnerType::Array(array_ty) = &sub_ty.composite_type.inner { |
1459 | if self.inner.shared && !sub_ty.composite_type.shared { |
1460 | bail!( |
1461 | self.offset, |
1462 | "shared functions cannot access unshared arrays" , |
1463 | ); |
1464 | } |
1465 | Ok(array_ty.0) |
1466 | } else { |
1467 | bail!( |
1468 | self.offset, |
1469 | "expected array type at index {at}, found {sub_ty}" |
1470 | ) |
1471 | } |
1472 | } |
1473 | |
1474 | fn mutable_array_type_at(&self, at: u32) -> Result<FieldType> { |
1475 | let field = self.array_type_at(at)?; |
1476 | if !field.mutable { |
1477 | bail!( |
1478 | self.offset, |
1479 | "invalid array modification: array is immutable" |
1480 | ) |
1481 | } |
1482 | Ok(field) |
1483 | } |
1484 | |
1485 | fn func_type_at(&self, at: u32) -> Result<&'resources FuncType> { |
1486 | let sub_ty = self.sub_type_at(at)?; |
1487 | if let CompositeInnerType::Func(func_ty) = &sub_ty.composite_type.inner { |
1488 | if self.inner.shared && !sub_ty.composite_type.shared { |
1489 | bail!( |
1490 | self.offset, |
1491 | "shared functions cannot access unshared functions" , |
1492 | ); |
1493 | } |
1494 | Ok(func_ty) |
1495 | } else { |
1496 | bail!( |
1497 | self.offset, |
1498 | "expected func type at index {at}, found {sub_ty}" |
1499 | ) |
1500 | } |
1501 | } |
1502 | |
1503 | fn cont_type_at(&self, at: u32) -> Result<&ContType> { |
1504 | let sub_ty = self.sub_type_at(at)?; |
1505 | if let CompositeInnerType::Cont(cont_ty) = &sub_ty.composite_type.inner { |
1506 | if self.inner.shared && !sub_ty.composite_type.shared { |
1507 | bail!( |
1508 | self.offset, |
1509 | "shared continuations cannot access unshared continuations" , |
1510 | ); |
1511 | } |
1512 | Ok(cont_ty) |
1513 | } else { |
1514 | bail!(self.offset, "non-continuation type {at}" ,) |
1515 | } |
1516 | } |
1517 | |
1518 | fn func_type_of_cont_type(&self, cont_ty: &ContType) -> &'resources FuncType { |
1519 | let func_id = cont_ty.0.as_core_type_id().expect("valid core type id" ); |
1520 | self.resources.sub_type_at_id(func_id).unwrap_func() |
1521 | } |
1522 | |
1523 | fn tag_at(&self, at: u32) -> Result<&'resources FuncType> { |
1524 | self.resources |
1525 | .tag_at(at) |
1526 | .ok_or_else(|| format_err!(self.offset, "unknown tag {}: tag index out of bounds" , at)) |
1527 | } |
1528 | |
1529 | // Similar to `tag_at`, but checks that the result type is |
1530 | // empty. This is necessary when enabling the stack switching |
1531 | // feature as it allows non-empty result types on tags. |
1532 | fn exception_tag_at(&self, at: u32) -> Result<&'resources FuncType> { |
1533 | let func_ty = self.tag_at(at)?; |
1534 | if func_ty.results().len() != 0 { |
1535 | bail!( |
1536 | self.offset, |
1537 | "invalid exception type: non-empty tag result type" |
1538 | ); |
1539 | } |
1540 | Ok(func_ty) |
1541 | } |
1542 | |
1543 | fn global_type_at(&self, at: u32) -> Result<GlobalType> { |
1544 | if let Some(ty) = self.resources.global_at(at) { |
1545 | if self.inner.shared && !ty.shared { |
1546 | bail!( |
1547 | self.offset, |
1548 | "shared functions cannot access unshared globals" , |
1549 | ); |
1550 | } |
1551 | Ok(ty) |
1552 | } else { |
1553 | bail!(self.offset, "unknown global: global index out of bounds" ); |
1554 | } |
1555 | } |
1556 | |
1557 | /// Validates that the `table` is valid and returns the type it points to. |
1558 | fn table_type_at(&self, table: u32) -> Result<TableType> { |
1559 | match self.resources.table_at(table) { |
1560 | Some(ty) => { |
1561 | if self.inner.shared && !ty.shared { |
1562 | bail!( |
1563 | self.offset, |
1564 | "shared functions cannot access unshared tables" , |
1565 | ); |
1566 | } |
1567 | Ok(ty) |
1568 | } |
1569 | None => bail!( |
1570 | self.offset, |
1571 | "unknown table {table}: table index out of bounds" |
1572 | ), |
1573 | } |
1574 | } |
1575 | |
1576 | fn params(&self, ty: BlockType) -> Result<impl PreciseIterator<Item = ValType> + 'resources> { |
1577 | Ok(match ty { |
1578 | BlockType::Empty | BlockType::Type(_) => Either::B(None.into_iter()), |
1579 | BlockType::FuncType(t) => Either::A(self.func_type_at(t)?.params().iter().copied()), |
1580 | }) |
1581 | } |
1582 | |
1583 | fn results(&self, ty: BlockType) -> Result<impl PreciseIterator<Item = ValType> + 'resources> { |
1584 | Ok(match ty { |
1585 | BlockType::Empty => Either::B(None.into_iter()), |
1586 | BlockType::Type(t) => Either::B(Some(t).into_iter()), |
1587 | BlockType::FuncType(t) => Either::A(self.func_type_at(t)?.results().iter().copied()), |
1588 | }) |
1589 | } |
1590 | |
1591 | fn label_types( |
1592 | &self, |
1593 | ty: BlockType, |
1594 | kind: FrameKind, |
1595 | ) -> Result<impl PreciseIterator<Item = ValType> + 'resources> { |
1596 | Ok(match kind { |
1597 | FrameKind::Loop => Either::A(self.params(ty)?), |
1598 | _ => Either::B(self.results(ty)?), |
1599 | }) |
1600 | } |
1601 | |
1602 | fn check_data_segment(&self, data_index: u32) -> Result<()> { |
1603 | match self.resources.data_count() { |
1604 | None => bail!(self.offset, "data count section required" ), |
1605 | Some(count) if data_index < count => Ok(()), |
1606 | Some(_) => bail!(self.offset, "unknown data segment {data_index}" ), |
1607 | } |
1608 | } |
1609 | |
1610 | fn check_resume_table( |
1611 | &mut self, |
1612 | table: ResumeTable, |
1613 | type_index: u32, // The type index annotation on the `resume` instruction, which `table` appears on. |
1614 | ) -> Result<&'resources FuncType> { |
1615 | let cont_ty = self.cont_type_at(type_index)?; |
1616 | // ts1 -> ts2 |
1617 | let old_func_ty = self.func_type_of_cont_type(cont_ty); |
1618 | for handle in table.handlers { |
1619 | match handle { |
1620 | Handle::OnLabel { tag, label } => { |
1621 | // ts1' -> ts2' |
1622 | let tag_ty = self.tag_at(tag)?; |
1623 | // ts1'' (ref (cont $ft)) |
1624 | let block = self.jump(label)?; |
1625 | // Pop the continuation reference. |
1626 | match self.label_types(block.0, block.1)?.last() { |
1627 | Some(ValType::Ref(rt)) if rt.is_concrete_type_ref() => { |
1628 | let sub_ty = self.resources.sub_type_at_id(rt.type_index().unwrap().as_core_type_id().expect("canonicalized index" )); |
1629 | let new_cont = |
1630 | if let CompositeInnerType::Cont(cont) = &sub_ty.composite_type.inner { |
1631 | cont |
1632 | } else { |
1633 | bail!(self.offset, "non-continuation type" ); |
1634 | }; |
1635 | let new_func_ty = self.func_type_of_cont_type(&new_cont); |
1636 | // Check that (ts2' -> ts2) <: $ft |
1637 | if new_func_ty.params().len() != tag_ty.results().len() || !self.is_subtype_many(new_func_ty.params(), tag_ty.results()) |
1638 | || old_func_ty.results().len() != new_func_ty.results().len() || !self.is_subtype_many(old_func_ty.results(), new_func_ty.results()) { |
1639 | bail!(self.offset, "type mismatch in continuation type" ) |
1640 | } |
1641 | let expected_nargs = tag_ty.params().len() + 1; |
1642 | let actual_nargs = self |
1643 | .label_types(block.0, block.1)? |
1644 | .len(); |
1645 | if actual_nargs != expected_nargs { |
1646 | bail!(self.offset, "type mismatch: expected {expected_nargs} label result(s), but label is annotated with {actual_nargs} results" ) |
1647 | } |
1648 | |
1649 | let labeltys = self |
1650 | .label_types(block.0, block.1)? |
1651 | .take(expected_nargs - 1); |
1652 | |
1653 | // Check that ts1'' <: ts1'. |
1654 | for (tagty, &lblty) in labeltys.zip(tag_ty.params()) { |
1655 | if !self.resources.is_subtype(lblty, tagty) { |
1656 | bail!(self.offset, "type mismatch between tag type and label type" ) |
1657 | } |
1658 | } |
1659 | } |
1660 | Some(ty) => { |
1661 | bail!(self.offset, "type mismatch: {}" , ty_to_str(ty)) |
1662 | } |
1663 | _ => bail!(self.offset, |
1664 | "type mismatch: instruction requires continuation reference type but label has none" ) |
1665 | } |
1666 | } |
1667 | Handle::OnSwitch { tag } => { |
1668 | let tag_ty = self.tag_at(tag)?; |
1669 | if tag_ty.params().len() != 0 { |
1670 | bail!(self.offset, "type mismatch: non-empty tag parameter type" ) |
1671 | } |
1672 | } |
1673 | } |
1674 | } |
1675 | Ok(old_func_ty) |
1676 | } |
1677 | |
1678 | /// Applies `is_subtype` pointwise two equally sized collections |
1679 | /// (i.e. equally sized after skipped elements). |
1680 | fn is_subtype_many(&mut self, ts1: &[ValType], ts2: &[ValType]) -> bool { |
1681 | debug_assert!(ts1.len() == ts2.len()); |
1682 | ts1.iter() |
1683 | .zip(ts2.iter()) |
1684 | .all(|(ty1, ty2)| self.resources.is_subtype(*ty1, *ty2)) |
1685 | } |
1686 | |
1687 | fn check_binop128(&mut self) -> Result<()> { |
1688 | self.pop_operand(Some(ValType::I64))?; |
1689 | self.pop_operand(Some(ValType::I64))?; |
1690 | self.pop_operand(Some(ValType::I64))?; |
1691 | self.pop_operand(Some(ValType::I64))?; |
1692 | self.push_operand(ValType::I64)?; |
1693 | self.push_operand(ValType::I64)?; |
1694 | Ok(()) |
1695 | } |
1696 | |
1697 | fn check_i64_mul_wide(&mut self) -> Result<()> { |
1698 | self.pop_operand(Some(ValType::I64))?; |
1699 | self.pop_operand(Some(ValType::I64))?; |
1700 | self.push_operand(ValType::I64)?; |
1701 | self.push_operand(ValType::I64)?; |
1702 | Ok(()) |
1703 | } |
1704 | } |
1705 | |
1706 | pub fn ty_to_str(ty: ValType) -> &'static str { |
1707 | match ty { |
1708 | ValType::I32 => "i32" , |
1709 | ValType::I64 => "i64" , |
1710 | ValType::F32 => "f32" , |
1711 | ValType::F64 => "f64" , |
1712 | ValType::V128 => "v128" , |
1713 | ValType::Ref(r: RefType) => r.wat(), |
1714 | } |
1715 | } |
1716 | |
1717 | /// A wrapper "visitor" around the real operator validator internally which |
1718 | /// exists to check that the required wasm feature is enabled to proceed with |
1719 | /// validation. |
1720 | /// |
1721 | /// This validator is macro-generated to ensure that the proposal listed in this |
1722 | /// crate's macro matches the one that's validated here. Each instruction's |
1723 | /// visit method validates the specified proposal is enabled and then delegates |
1724 | /// to `OperatorValidatorTemp` to perform the actual opcode validation. |
1725 | struct WasmProposalValidator<'validator, 'resources, T>( |
1726 | OperatorValidatorTemp<'validator, 'resources, T>, |
1727 | ); |
1728 | |
1729 | impl<T> WasmProposalValidator<'_, '_, T> { |
1730 | fn check_enabled(&self, flag: bool, desc: &str) -> Result<()> { |
1731 | if flag { |
1732 | return Ok(()); |
1733 | } |
1734 | bail!(self.0.offset, " {desc} support is not enabled" ); |
1735 | } |
1736 | } |
1737 | |
1738 | macro_rules! validate_proposal { |
1739 | ($( @$proposal:ident $op:ident $({ $($arg:ident: $argty:ty),* })? => $visit:ident ($($ann:tt)*))*) => { |
1740 | $( |
1741 | fn $visit(&mut self $($(,$arg: $argty)*)?) -> Result<()> { |
1742 | validate_proposal!(validate self $proposal); |
1743 | self.0.$visit($( $($arg),* )?) |
1744 | } |
1745 | )* |
1746 | }; |
1747 | |
1748 | (validate self mvp) => {}; |
1749 | (validate $self:ident $proposal:ident) => { |
1750 | $self.check_enabled($self.0.features.$proposal(), validate_proposal!(desc $proposal))? |
1751 | }; |
1752 | |
1753 | (desc simd) => ("SIMD" ); |
1754 | (desc relaxed_simd) => ("relaxed SIMD" ); |
1755 | (desc threads) => ("threads" ); |
1756 | (desc shared_everything_threads) => ("shared-everything-threads" ); |
1757 | (desc saturating_float_to_int) => ("saturating float to int conversions" ); |
1758 | (desc reference_types) => ("reference types" ); |
1759 | (desc bulk_memory) => ("bulk memory" ); |
1760 | (desc sign_extension) => ("sign extension operations" ); |
1761 | (desc exceptions) => ("exceptions" ); |
1762 | (desc tail_call) => ("tail calls" ); |
1763 | (desc function_references) => ("function references" ); |
1764 | (desc memory_control) => ("memory control" ); |
1765 | (desc gc) => ("gc" ); |
1766 | (desc legacy_exceptions) => ("legacy exceptions" ); |
1767 | (desc stack_switching) => ("stack switching" ); |
1768 | (desc wide_arithmetic) => ("wide arithmetic" ); |
1769 | } |
1770 | |
1771 | impl<'a, T> VisitOperator<'a> for WasmProposalValidator<'_, '_, T> |
1772 | where |
1773 | T: WasmModuleResources, |
1774 | { |
1775 | type Output = Result<()>; |
1776 | |
1777 | #[cfg (feature = "simd" )] |
1778 | fn simd_visitor(&mut self) -> Option<&mut dyn VisitSimdOperator<'a, Output = Self::Output>> { |
1779 | Some(self) |
1780 | } |
1781 | |
1782 | crate::for_each_visit_operator!(validate_proposal); |
1783 | } |
1784 | |
1785 | #[cfg (feature = "simd" )] |
1786 | impl<'a, T> VisitSimdOperator<'a> for WasmProposalValidator<'_, '_, T> |
1787 | where |
1788 | T: WasmModuleResources, |
1789 | { |
1790 | crate::for_each_visit_simd_operator!(validate_proposal); |
1791 | } |
1792 | |
1793 | #[track_caller ] |
1794 | #[inline ] |
1795 | fn debug_assert_type_indices_are_ids(ty: ValType) { |
1796 | if cfg!(debug_assertions) { |
1797 | if let ValType::Ref(r: RefType) = ty { |
1798 | if let HeapType::Concrete(idx: UnpackedIndex) = r.heap_type() { |
1799 | debug_assert!( |
1800 | matches!(idx, UnpackedIndex::Id(_)), |
1801 | "type reference should be a `CoreTypeId`, found {idx:?}" |
1802 | ); |
1803 | } |
1804 | } |
1805 | } |
1806 | } |
1807 | |
1808 | impl<'a, T> VisitOperator<'a> for OperatorValidatorTemp<'_, '_, T> |
1809 | where |
1810 | T: WasmModuleResources, |
1811 | { |
1812 | type Output = Result<()>; |
1813 | |
1814 | #[cfg (feature = "simd" )] |
1815 | fn simd_visitor(&mut self) -> Option<&mut dyn VisitSimdOperator<'a, Output = Self::Output>> { |
1816 | Some(self) |
1817 | } |
1818 | |
1819 | fn visit_nop(&mut self) -> Self::Output { |
1820 | Ok(()) |
1821 | } |
1822 | fn visit_unreachable(&mut self) -> Self::Output { |
1823 | self.unreachable()?; |
1824 | Ok(()) |
1825 | } |
1826 | fn visit_block(&mut self, mut ty: BlockType) -> Self::Output { |
1827 | self.check_block_type(&mut ty)?; |
1828 | for ty in self.params(ty)?.rev() { |
1829 | self.pop_operand(Some(ty))?; |
1830 | } |
1831 | self.push_ctrl(FrameKind::Block, ty)?; |
1832 | Ok(()) |
1833 | } |
1834 | fn visit_loop(&mut self, mut ty: BlockType) -> Self::Output { |
1835 | self.check_block_type(&mut ty)?; |
1836 | for ty in self.params(ty)?.rev() { |
1837 | self.pop_operand(Some(ty))?; |
1838 | } |
1839 | self.push_ctrl(FrameKind::Loop, ty)?; |
1840 | Ok(()) |
1841 | } |
1842 | fn visit_if(&mut self, mut ty: BlockType) -> Self::Output { |
1843 | self.check_block_type(&mut ty)?; |
1844 | self.pop_operand(Some(ValType::I32))?; |
1845 | for ty in self.params(ty)?.rev() { |
1846 | self.pop_operand(Some(ty))?; |
1847 | } |
1848 | self.push_ctrl(FrameKind::If, ty)?; |
1849 | Ok(()) |
1850 | } |
1851 | fn visit_else(&mut self) -> Self::Output { |
1852 | let frame = self.pop_ctrl()?; |
1853 | if frame.kind != FrameKind::If { |
1854 | bail!(self.offset, "else found outside of an `if` block" ); |
1855 | } |
1856 | self.push_ctrl(FrameKind::Else, frame.block_type)?; |
1857 | Ok(()) |
1858 | } |
1859 | fn visit_try_table(&mut self, mut ty: TryTable) -> Self::Output { |
1860 | self.check_block_type(&mut ty.ty)?; |
1861 | for ty in self.params(ty.ty)?.rev() { |
1862 | self.pop_operand(Some(ty))?; |
1863 | } |
1864 | let exn_type = ValType::from(RefType::EXN); |
1865 | for catch in ty.catches { |
1866 | match catch { |
1867 | Catch::One { tag, label } => { |
1868 | let tag = self.exception_tag_at(tag)?; |
1869 | let (ty, kind) = self.jump(label)?; |
1870 | let params = tag.params(); |
1871 | let types = self.label_types(ty, kind)?; |
1872 | if params.len() != types.len() { |
1873 | bail!( |
1874 | self.offset, |
1875 | "type mismatch: catch label must have same number of types as tag" |
1876 | ); |
1877 | } |
1878 | for (expected, actual) in types.zip(params) { |
1879 | self.match_operand(*actual, expected)?; |
1880 | } |
1881 | } |
1882 | Catch::OneRef { tag, label } => { |
1883 | let tag = self.exception_tag_at(tag)?; |
1884 | let (ty, kind) = self.jump(label)?; |
1885 | let tag_params = tag.params().iter().copied(); |
1886 | let label_types = self.label_types(ty, kind)?; |
1887 | if tag_params.len() + 1 != label_types.len() { |
1888 | bail!( |
1889 | self.offset, |
1890 | "type mismatch: catch_ref label must have one \ |
1891 | more type than tag types" , |
1892 | ); |
1893 | } |
1894 | for (expected_label_type, actual_tag_param) in |
1895 | label_types.zip(tag_params.chain([exn_type])) |
1896 | { |
1897 | self.match_operand(actual_tag_param, expected_label_type)?; |
1898 | } |
1899 | } |
1900 | |
1901 | Catch::All { label } => { |
1902 | let (ty, kind) = self.jump(label)?; |
1903 | if self.label_types(ty, kind)?.len() != 0 { |
1904 | bail!( |
1905 | self.offset, |
1906 | "type mismatch: catch_all label must have no result types" |
1907 | ); |
1908 | } |
1909 | } |
1910 | |
1911 | Catch::AllRef { label } => { |
1912 | let (ty, kind) = self.jump(label)?; |
1913 | let mut types = self.label_types(ty, kind)?; |
1914 | let ty = match (types.next(), types.next()) { |
1915 | (Some(ty), None) => ty, |
1916 | _ => { |
1917 | bail!( |
1918 | self.offset, |
1919 | "type mismatch: catch_all_ref label must have \ |
1920 | exactly one result type" |
1921 | ); |
1922 | } |
1923 | }; |
1924 | if !self.resources.is_subtype(exn_type, ty) { |
1925 | bail!( |
1926 | self.offset, |
1927 | "type mismatch: catch_all_ref label must a \ |
1928 | subtype of (ref exn)" |
1929 | ); |
1930 | } |
1931 | } |
1932 | } |
1933 | } |
1934 | self.push_ctrl(FrameKind::TryTable, ty.ty)?; |
1935 | Ok(()) |
1936 | } |
1937 | fn visit_throw(&mut self, index: u32) -> Self::Output { |
1938 | // Check values associated with the exception. |
1939 | let ty = self.exception_tag_at(index)?; |
1940 | for ty in ty.clone().params().iter().rev() { |
1941 | self.pop_operand(Some(*ty))?; |
1942 | } |
1943 | // this should be validated when the tag was defined in the module |
1944 | debug_assert!(ty.results().is_empty()); |
1945 | self.unreachable()?; |
1946 | Ok(()) |
1947 | } |
1948 | fn visit_throw_ref(&mut self) -> Self::Output { |
1949 | self.pop_operand(Some(ValType::EXNREF))?; |
1950 | self.unreachable()?; |
1951 | Ok(()) |
1952 | } |
1953 | fn visit_end(&mut self) -> Self::Output { |
1954 | let mut frame = self.pop_ctrl()?; |
1955 | |
1956 | // Note that this `if` isn't included in the appendix right |
1957 | // now, but it's used to allow for `if` statements that are |
1958 | // missing an `else` block which have the same parameter/return |
1959 | // types on the block (since that's valid). |
1960 | if frame.kind == FrameKind::If { |
1961 | self.push_ctrl(FrameKind::Else, frame.block_type)?; |
1962 | frame = self.pop_ctrl()?; |
1963 | } |
1964 | for ty in self.results(frame.block_type)? { |
1965 | self.push_operand(ty)?; |
1966 | } |
1967 | |
1968 | if self.control.is_empty() && self.end_which_emptied_control.is_none() { |
1969 | assert_ne!(self.offset, 0); |
1970 | self.end_which_emptied_control = Some(self.offset); |
1971 | } |
1972 | Ok(()) |
1973 | } |
1974 | fn visit_br(&mut self, relative_depth: u32) -> Self::Output { |
1975 | let (ty, kind) = self.jump(relative_depth)?; |
1976 | for ty in self.label_types(ty, kind)?.rev() { |
1977 | self.pop_operand(Some(ty))?; |
1978 | } |
1979 | self.unreachable()?; |
1980 | Ok(()) |
1981 | } |
1982 | fn visit_br_if(&mut self, relative_depth: u32) -> Self::Output { |
1983 | self.pop_operand(Some(ValType::I32))?; |
1984 | let (ty, kind) = self.jump(relative_depth)?; |
1985 | let label_types = self.label_types(ty, kind)?; |
1986 | self.pop_push_label_types(label_types)?; |
1987 | Ok(()) |
1988 | } |
1989 | fn visit_br_table(&mut self, table: BrTable) -> Self::Output { |
1990 | self.pop_operand(Some(ValType::I32))?; |
1991 | let default = self.jump(table.default())?; |
1992 | let default_types = self.label_types(default.0, default.1)?; |
1993 | for element in table.targets() { |
1994 | let relative_depth = element?; |
1995 | let block = self.jump(relative_depth)?; |
1996 | let label_tys = self.label_types(block.0, block.1)?; |
1997 | if label_tys.len() != default_types.len() { |
1998 | bail!( |
1999 | self.offset, |
2000 | "type mismatch: br_table target labels have different number of types" |
2001 | ); |
2002 | } |
2003 | self.match_stack_operands(label_tys)?; |
2004 | } |
2005 | for ty in default_types.rev() { |
2006 | self.pop_operand(Some(ty))?; |
2007 | } |
2008 | self.unreachable()?; |
2009 | Ok(()) |
2010 | } |
2011 | fn visit_return(&mut self) -> Self::Output { |
2012 | self.check_return()?; |
2013 | Ok(()) |
2014 | } |
2015 | fn visit_call(&mut self, function_index: u32) -> Self::Output { |
2016 | let ty = self.type_of_function(function_index)?; |
2017 | self.check_call_ty(ty)?; |
2018 | Ok(()) |
2019 | } |
2020 | fn visit_return_call(&mut self, function_index: u32) -> Self::Output { |
2021 | let ty = self.type_of_function(function_index)?; |
2022 | self.check_return_call_ty(ty)?; |
2023 | Ok(()) |
2024 | } |
2025 | fn visit_call_ref(&mut self, type_index: u32) -> Self::Output { |
2026 | let ty = self.check_call_ref_ty(type_index)?; |
2027 | self.check_call_ty(ty)?; |
2028 | Ok(()) |
2029 | } |
2030 | fn visit_return_call_ref(&mut self, type_index: u32) -> Self::Output { |
2031 | let ty = self.check_call_ref_ty(type_index)?; |
2032 | self.check_return_call_ty(ty)?; |
2033 | Ok(()) |
2034 | } |
2035 | fn visit_call_indirect(&mut self, type_index: u32, table_index: u32) -> Self::Output { |
2036 | let ty = self.check_call_indirect_ty(type_index, table_index)?; |
2037 | self.check_call_ty(ty)?; |
2038 | Ok(()) |
2039 | } |
2040 | fn visit_return_call_indirect(&mut self, type_index: u32, table_index: u32) -> Self::Output { |
2041 | let ty = self.check_call_indirect_ty(type_index, table_index)?; |
2042 | self.check_return_call_ty(ty)?; |
2043 | Ok(()) |
2044 | } |
2045 | fn visit_drop(&mut self) -> Self::Output { |
2046 | self.pop_operand(None)?; |
2047 | Ok(()) |
2048 | } |
2049 | fn visit_select(&mut self) -> Self::Output { |
2050 | self.pop_operand(Some(ValType::I32))?; |
2051 | let ty1 = self.pop_operand(None)?; |
2052 | let ty2 = self.pop_operand(None)?; |
2053 | |
2054 | let ty = match (ty1, ty2) { |
2055 | // All heap-related types aren't allowed with the `select` |
2056 | // instruction |
2057 | (MaybeType::UnknownRef(..), _) |
2058 | | (_, MaybeType::UnknownRef(..)) |
2059 | | (MaybeType::Known(ValType::Ref(_)), _) |
2060 | | (_, MaybeType::Known(ValType::Ref(_))) => { |
2061 | bail!( |
2062 | self.offset, |
2063 | "type mismatch: select only takes integral types" |
2064 | ) |
2065 | } |
2066 | |
2067 | // If one operand is the "bottom" type then whatever the other |
2068 | // operand is is the result of the `select` |
2069 | (MaybeType::Bottom, t) | (t, MaybeType::Bottom) => t, |
2070 | |
2071 | // Otherwise these are two integral types and they must match for |
2072 | // `select` to typecheck. |
2073 | (t @ MaybeType::Known(t1), MaybeType::Known(t2)) => { |
2074 | if t1 != t2 { |
2075 | bail!( |
2076 | self.offset, |
2077 | "type mismatch: select operands have different types" |
2078 | ); |
2079 | } |
2080 | t |
2081 | } |
2082 | }; |
2083 | self.push_operand(ty)?; |
2084 | Ok(()) |
2085 | } |
2086 | fn visit_typed_select(&mut self, mut ty: ValType) -> Self::Output { |
2087 | self.resources |
2088 | .check_value_type(&mut ty, &self.features, self.offset)?; |
2089 | self.pop_operand(Some(ValType::I32))?; |
2090 | self.pop_operand(Some(ty))?; |
2091 | self.pop_operand(Some(ty))?; |
2092 | self.push_operand(ty)?; |
2093 | Ok(()) |
2094 | } |
2095 | fn visit_local_get(&mut self, local_index: u32) -> Self::Output { |
2096 | let ty = self.local(local_index)?; |
2097 | debug_assert_type_indices_are_ids(ty); |
2098 | if self.local_inits.is_uninit(local_index) { |
2099 | bail!(self.offset, "uninitialized local: {}" , local_index); |
2100 | } |
2101 | self.push_operand(ty)?; |
2102 | Ok(()) |
2103 | } |
2104 | fn visit_local_set(&mut self, local_index: u32) -> Self::Output { |
2105 | let ty = self.local(local_index)?; |
2106 | self.pop_operand(Some(ty))?; |
2107 | self.local_inits.set_init(local_index); |
2108 | Ok(()) |
2109 | } |
2110 | fn visit_local_tee(&mut self, local_index: u32) -> Self::Output { |
2111 | let expected_ty = self.local(local_index)?; |
2112 | self.pop_operand(Some(expected_ty))?; |
2113 | self.local_inits.set_init(local_index); |
2114 | self.push_operand(expected_ty)?; |
2115 | Ok(()) |
2116 | } |
2117 | fn visit_global_get(&mut self, global_index: u32) -> Self::Output { |
2118 | let ty = self.global_type_at(global_index)?.content_type; |
2119 | debug_assert_type_indices_are_ids(ty); |
2120 | self.push_operand(ty)?; |
2121 | Ok(()) |
2122 | } |
2123 | fn visit_global_atomic_get(&mut self, _ordering: Ordering, global_index: u32) -> Self::Output { |
2124 | self.visit_global_get(global_index)?; |
2125 | // No validation of `ordering` is needed because `global.atomic.get` can |
2126 | // be used on both shared and unshared globals. But we do need to limit |
2127 | // which types can be used with this instruction. |
2128 | let ty = self.global_type_at(global_index)?.content_type; |
2129 | let supertype = RefType::ANYREF.into(); |
2130 | if !(ty == ValType::I32 || ty == ValType::I64 || self.resources.is_subtype(ty, supertype)) { |
2131 | bail!(self.offset, "invalid type: `global.atomic.get` only allows `i32`, `i64` and subtypes of `anyref`" ); |
2132 | } |
2133 | Ok(()) |
2134 | } |
2135 | fn visit_global_set(&mut self, global_index: u32) -> Self::Output { |
2136 | let ty = self.global_type_at(global_index)?; |
2137 | if !ty.mutable { |
2138 | bail!( |
2139 | self.offset, |
2140 | "global is immutable: cannot modify it with `global.set`" |
2141 | ); |
2142 | } |
2143 | self.pop_operand(Some(ty.content_type))?; |
2144 | Ok(()) |
2145 | } |
2146 | fn visit_global_atomic_set(&mut self, _ordering: Ordering, global_index: u32) -> Self::Output { |
2147 | self.visit_global_set(global_index)?; |
2148 | // No validation of `ordering` is needed because `global.atomic.get` can |
2149 | // be used on both shared and unshared globals. |
2150 | let ty = self.global_type_at(global_index)?.content_type; |
2151 | let supertype = RefType::ANYREF.into(); |
2152 | if !(ty == ValType::I32 || ty == ValType::I64 || self.resources.is_subtype(ty, supertype)) { |
2153 | bail!(self.offset, "invalid type: `global.atomic.set` only allows `i32`, `i64` and subtypes of `anyref`" ); |
2154 | } |
2155 | Ok(()) |
2156 | } |
2157 | fn visit_global_atomic_rmw_add( |
2158 | &mut self, |
2159 | _ordering: crate::Ordering, |
2160 | global_index: u32, |
2161 | ) -> Self::Output { |
2162 | let ty = self.check_atomic_global_rmw_ty(global_index)?; |
2163 | self.check_unary_op(ty) |
2164 | } |
2165 | fn visit_global_atomic_rmw_sub( |
2166 | &mut self, |
2167 | _ordering: crate::Ordering, |
2168 | global_index: u32, |
2169 | ) -> Self::Output { |
2170 | let ty = self.check_atomic_global_rmw_ty(global_index)?; |
2171 | self.check_unary_op(ty) |
2172 | } |
2173 | fn visit_global_atomic_rmw_and( |
2174 | &mut self, |
2175 | _ordering: crate::Ordering, |
2176 | global_index: u32, |
2177 | ) -> Self::Output { |
2178 | let ty = self.check_atomic_global_rmw_ty(global_index)?; |
2179 | self.check_unary_op(ty) |
2180 | } |
2181 | fn visit_global_atomic_rmw_or( |
2182 | &mut self, |
2183 | _ordering: crate::Ordering, |
2184 | global_index: u32, |
2185 | ) -> Self::Output { |
2186 | let ty = self.check_atomic_global_rmw_ty(global_index)?; |
2187 | self.check_unary_op(ty) |
2188 | } |
2189 | fn visit_global_atomic_rmw_xor( |
2190 | &mut self, |
2191 | _ordering: crate::Ordering, |
2192 | global_index: u32, |
2193 | ) -> Self::Output { |
2194 | let ty = self.check_atomic_global_rmw_ty(global_index)?; |
2195 | self.check_unary_op(ty) |
2196 | } |
2197 | fn visit_global_atomic_rmw_xchg( |
2198 | &mut self, |
2199 | _ordering: crate::Ordering, |
2200 | global_index: u32, |
2201 | ) -> Self::Output { |
2202 | let ty = self.global_type_at(global_index)?.content_type; |
2203 | if !(ty == ValType::I32 |
2204 | || ty == ValType::I64 |
2205 | || self.resources.is_subtype(ty, RefType::ANYREF.into())) |
2206 | { |
2207 | bail!(self.offset, "invalid type: `global.atomic.rmw.xchg` only allows `i32`, `i64` and subtypes of `anyref`" ); |
2208 | } |
2209 | self.check_unary_op(ty) |
2210 | } |
2211 | fn visit_global_atomic_rmw_cmpxchg( |
2212 | &mut self, |
2213 | _ordering: crate::Ordering, |
2214 | global_index: u32, |
2215 | ) -> Self::Output { |
2216 | let ty = self.global_type_at(global_index)?.content_type; |
2217 | if !(ty == ValType::I32 |
2218 | || ty == ValType::I64 |
2219 | || self.resources.is_subtype(ty, RefType::EQREF.into())) |
2220 | { |
2221 | bail!(self.offset, "invalid type: `global.atomic.rmw.cmpxchg` only allows `i32`, `i64` and subtypes of `eqref`" ); |
2222 | } |
2223 | self.check_binary_op(ty) |
2224 | } |
2225 | |
2226 | fn visit_i32_load(&mut self, memarg: MemArg) -> Self::Output { |
2227 | let ty = self.check_memarg(memarg)?; |
2228 | self.pop_operand(Some(ty))?; |
2229 | self.push_operand(ValType::I32)?; |
2230 | Ok(()) |
2231 | } |
2232 | fn visit_i64_load(&mut self, memarg: MemArg) -> Self::Output { |
2233 | let ty = self.check_memarg(memarg)?; |
2234 | self.pop_operand(Some(ty))?; |
2235 | self.push_operand(ValType::I64)?; |
2236 | Ok(()) |
2237 | } |
2238 | fn visit_f32_load(&mut self, memarg: MemArg) -> Self::Output { |
2239 | self.check_floats_enabled()?; |
2240 | let ty = self.check_memarg(memarg)?; |
2241 | self.pop_operand(Some(ty))?; |
2242 | self.push_operand(ValType::F32)?; |
2243 | Ok(()) |
2244 | } |
2245 | fn visit_f64_load(&mut self, memarg: MemArg) -> Self::Output { |
2246 | self.check_floats_enabled()?; |
2247 | let ty = self.check_memarg(memarg)?; |
2248 | self.pop_operand(Some(ty))?; |
2249 | self.push_operand(ValType::F64)?; |
2250 | Ok(()) |
2251 | } |
2252 | fn visit_i32_load8_s(&mut self, memarg: MemArg) -> Self::Output { |
2253 | let ty = self.check_memarg(memarg)?; |
2254 | self.pop_operand(Some(ty))?; |
2255 | self.push_operand(ValType::I32)?; |
2256 | Ok(()) |
2257 | } |
2258 | fn visit_i32_load8_u(&mut self, memarg: MemArg) -> Self::Output { |
2259 | self.visit_i32_load8_s(memarg) |
2260 | } |
2261 | fn visit_i32_load16_s(&mut self, memarg: MemArg) -> Self::Output { |
2262 | let ty = self.check_memarg(memarg)?; |
2263 | self.pop_operand(Some(ty))?; |
2264 | self.push_operand(ValType::I32)?; |
2265 | Ok(()) |
2266 | } |
2267 | fn visit_i32_load16_u(&mut self, memarg: MemArg) -> Self::Output { |
2268 | self.visit_i32_load16_s(memarg) |
2269 | } |
2270 | fn visit_i64_load8_s(&mut self, memarg: MemArg) -> Self::Output { |
2271 | let ty = self.check_memarg(memarg)?; |
2272 | self.pop_operand(Some(ty))?; |
2273 | self.push_operand(ValType::I64)?; |
2274 | Ok(()) |
2275 | } |
2276 | fn visit_i64_load8_u(&mut self, memarg: MemArg) -> Self::Output { |
2277 | self.visit_i64_load8_s(memarg) |
2278 | } |
2279 | fn visit_i64_load16_s(&mut self, memarg: MemArg) -> Self::Output { |
2280 | let ty = self.check_memarg(memarg)?; |
2281 | self.pop_operand(Some(ty))?; |
2282 | self.push_operand(ValType::I64)?; |
2283 | Ok(()) |
2284 | } |
2285 | fn visit_i64_load16_u(&mut self, memarg: MemArg) -> Self::Output { |
2286 | self.visit_i64_load16_s(memarg) |
2287 | } |
2288 | fn visit_i64_load32_s(&mut self, memarg: MemArg) -> Self::Output { |
2289 | let ty = self.check_memarg(memarg)?; |
2290 | self.pop_operand(Some(ty))?; |
2291 | self.push_operand(ValType::I64)?; |
2292 | Ok(()) |
2293 | } |
2294 | fn visit_i64_load32_u(&mut self, memarg: MemArg) -> Self::Output { |
2295 | self.visit_i64_load32_s(memarg) |
2296 | } |
2297 | fn visit_i32_store(&mut self, memarg: MemArg) -> Self::Output { |
2298 | let ty = self.check_memarg(memarg)?; |
2299 | self.pop_operand(Some(ValType::I32))?; |
2300 | self.pop_operand(Some(ty))?; |
2301 | Ok(()) |
2302 | } |
2303 | fn visit_i64_store(&mut self, memarg: MemArg) -> Self::Output { |
2304 | let ty = self.check_memarg(memarg)?; |
2305 | self.pop_operand(Some(ValType::I64))?; |
2306 | self.pop_operand(Some(ty))?; |
2307 | Ok(()) |
2308 | } |
2309 | fn visit_f32_store(&mut self, memarg: MemArg) -> Self::Output { |
2310 | self.check_floats_enabled()?; |
2311 | let ty = self.check_memarg(memarg)?; |
2312 | self.pop_operand(Some(ValType::F32))?; |
2313 | self.pop_operand(Some(ty))?; |
2314 | Ok(()) |
2315 | } |
2316 | fn visit_f64_store(&mut self, memarg: MemArg) -> Self::Output { |
2317 | self.check_floats_enabled()?; |
2318 | let ty = self.check_memarg(memarg)?; |
2319 | self.pop_operand(Some(ValType::F64))?; |
2320 | self.pop_operand(Some(ty))?; |
2321 | Ok(()) |
2322 | } |
2323 | fn visit_i32_store8(&mut self, memarg: MemArg) -> Self::Output { |
2324 | let ty = self.check_memarg(memarg)?; |
2325 | self.pop_operand(Some(ValType::I32))?; |
2326 | self.pop_operand(Some(ty))?; |
2327 | Ok(()) |
2328 | } |
2329 | fn visit_i32_store16(&mut self, memarg: MemArg) -> Self::Output { |
2330 | let ty = self.check_memarg(memarg)?; |
2331 | self.pop_operand(Some(ValType::I32))?; |
2332 | self.pop_operand(Some(ty))?; |
2333 | Ok(()) |
2334 | } |
2335 | fn visit_i64_store8(&mut self, memarg: MemArg) -> Self::Output { |
2336 | let ty = self.check_memarg(memarg)?; |
2337 | self.pop_operand(Some(ValType::I64))?; |
2338 | self.pop_operand(Some(ty))?; |
2339 | Ok(()) |
2340 | } |
2341 | fn visit_i64_store16(&mut self, memarg: MemArg) -> Self::Output { |
2342 | let ty = self.check_memarg(memarg)?; |
2343 | self.pop_operand(Some(ValType::I64))?; |
2344 | self.pop_operand(Some(ty))?; |
2345 | Ok(()) |
2346 | } |
2347 | fn visit_i64_store32(&mut self, memarg: MemArg) -> Self::Output { |
2348 | let ty = self.check_memarg(memarg)?; |
2349 | self.pop_operand(Some(ValType::I64))?; |
2350 | self.pop_operand(Some(ty))?; |
2351 | Ok(()) |
2352 | } |
2353 | fn visit_memory_size(&mut self, mem: u32) -> Self::Output { |
2354 | let index_ty = self.check_memory_index(mem)?; |
2355 | self.push_operand(index_ty)?; |
2356 | Ok(()) |
2357 | } |
2358 | fn visit_memory_grow(&mut self, mem: u32) -> Self::Output { |
2359 | let index_ty = self.check_memory_index(mem)?; |
2360 | self.pop_operand(Some(index_ty))?; |
2361 | self.push_operand(index_ty)?; |
2362 | Ok(()) |
2363 | } |
2364 | fn visit_i32_const(&mut self, _value: i32) -> Self::Output { |
2365 | self.push_operand(ValType::I32)?; |
2366 | Ok(()) |
2367 | } |
2368 | fn visit_i64_const(&mut self, _value: i64) -> Self::Output { |
2369 | self.push_operand(ValType::I64)?; |
2370 | Ok(()) |
2371 | } |
2372 | fn visit_f32_const(&mut self, _value: Ieee32) -> Self::Output { |
2373 | self.check_floats_enabled()?; |
2374 | self.push_operand(ValType::F32)?; |
2375 | Ok(()) |
2376 | } |
2377 | fn visit_f64_const(&mut self, _value: Ieee64) -> Self::Output { |
2378 | self.check_floats_enabled()?; |
2379 | self.push_operand(ValType::F64)?; |
2380 | Ok(()) |
2381 | } |
2382 | fn visit_i32_eqz(&mut self) -> Self::Output { |
2383 | self.pop_operand(Some(ValType::I32))?; |
2384 | self.push_operand(ValType::I32)?; |
2385 | Ok(()) |
2386 | } |
2387 | fn visit_i32_eq(&mut self) -> Self::Output { |
2388 | self.check_cmp_op(ValType::I32) |
2389 | } |
2390 | fn visit_i32_ne(&mut self) -> Self::Output { |
2391 | self.check_cmp_op(ValType::I32) |
2392 | } |
2393 | fn visit_i32_lt_s(&mut self) -> Self::Output { |
2394 | self.check_cmp_op(ValType::I32) |
2395 | } |
2396 | fn visit_i32_lt_u(&mut self) -> Self::Output { |
2397 | self.check_cmp_op(ValType::I32) |
2398 | } |
2399 | fn visit_i32_gt_s(&mut self) -> Self::Output { |
2400 | self.check_cmp_op(ValType::I32) |
2401 | } |
2402 | fn visit_i32_gt_u(&mut self) -> Self::Output { |
2403 | self.check_cmp_op(ValType::I32) |
2404 | } |
2405 | fn visit_i32_le_s(&mut self) -> Self::Output { |
2406 | self.check_cmp_op(ValType::I32) |
2407 | } |
2408 | fn visit_i32_le_u(&mut self) -> Self::Output { |
2409 | self.check_cmp_op(ValType::I32) |
2410 | } |
2411 | fn visit_i32_ge_s(&mut self) -> Self::Output { |
2412 | self.check_cmp_op(ValType::I32) |
2413 | } |
2414 | fn visit_i32_ge_u(&mut self) -> Self::Output { |
2415 | self.check_cmp_op(ValType::I32) |
2416 | } |
2417 | fn visit_i64_eqz(&mut self) -> Self::Output { |
2418 | self.pop_operand(Some(ValType::I64))?; |
2419 | self.push_operand(ValType::I32)?; |
2420 | Ok(()) |
2421 | } |
2422 | fn visit_i64_eq(&mut self) -> Self::Output { |
2423 | self.check_cmp_op(ValType::I64) |
2424 | } |
2425 | fn visit_i64_ne(&mut self) -> Self::Output { |
2426 | self.check_cmp_op(ValType::I64) |
2427 | } |
2428 | fn visit_i64_lt_s(&mut self) -> Self::Output { |
2429 | self.check_cmp_op(ValType::I64) |
2430 | } |
2431 | fn visit_i64_lt_u(&mut self) -> Self::Output { |
2432 | self.check_cmp_op(ValType::I64) |
2433 | } |
2434 | fn visit_i64_gt_s(&mut self) -> Self::Output { |
2435 | self.check_cmp_op(ValType::I64) |
2436 | } |
2437 | fn visit_i64_gt_u(&mut self) -> Self::Output { |
2438 | self.check_cmp_op(ValType::I64) |
2439 | } |
2440 | fn visit_i64_le_s(&mut self) -> Self::Output { |
2441 | self.check_cmp_op(ValType::I64) |
2442 | } |
2443 | fn visit_i64_le_u(&mut self) -> Self::Output { |
2444 | self.check_cmp_op(ValType::I64) |
2445 | } |
2446 | fn visit_i64_ge_s(&mut self) -> Self::Output { |
2447 | self.check_cmp_op(ValType::I64) |
2448 | } |
2449 | fn visit_i64_ge_u(&mut self) -> Self::Output { |
2450 | self.check_cmp_op(ValType::I64) |
2451 | } |
2452 | fn visit_f32_eq(&mut self) -> Self::Output { |
2453 | self.check_fcmp_op(ValType::F32) |
2454 | } |
2455 | fn visit_f32_ne(&mut self) -> Self::Output { |
2456 | self.check_fcmp_op(ValType::F32) |
2457 | } |
2458 | fn visit_f32_lt(&mut self) -> Self::Output { |
2459 | self.check_fcmp_op(ValType::F32) |
2460 | } |
2461 | fn visit_f32_gt(&mut self) -> Self::Output { |
2462 | self.check_fcmp_op(ValType::F32) |
2463 | } |
2464 | fn visit_f32_le(&mut self) -> Self::Output { |
2465 | self.check_fcmp_op(ValType::F32) |
2466 | } |
2467 | fn visit_f32_ge(&mut self) -> Self::Output { |
2468 | self.check_fcmp_op(ValType::F32) |
2469 | } |
2470 | fn visit_f64_eq(&mut self) -> Self::Output { |
2471 | self.check_fcmp_op(ValType::F64) |
2472 | } |
2473 | fn visit_f64_ne(&mut self) -> Self::Output { |
2474 | self.check_fcmp_op(ValType::F64) |
2475 | } |
2476 | fn visit_f64_lt(&mut self) -> Self::Output { |
2477 | self.check_fcmp_op(ValType::F64) |
2478 | } |
2479 | fn visit_f64_gt(&mut self) -> Self::Output { |
2480 | self.check_fcmp_op(ValType::F64) |
2481 | } |
2482 | fn visit_f64_le(&mut self) -> Self::Output { |
2483 | self.check_fcmp_op(ValType::F64) |
2484 | } |
2485 | fn visit_f64_ge(&mut self) -> Self::Output { |
2486 | self.check_fcmp_op(ValType::F64) |
2487 | } |
2488 | fn visit_i32_clz(&mut self) -> Self::Output { |
2489 | self.check_unary_op(ValType::I32) |
2490 | } |
2491 | fn visit_i32_ctz(&mut self) -> Self::Output { |
2492 | self.check_unary_op(ValType::I32) |
2493 | } |
2494 | fn visit_i32_popcnt(&mut self) -> Self::Output { |
2495 | self.check_unary_op(ValType::I32) |
2496 | } |
2497 | fn visit_i32_add(&mut self) -> Self::Output { |
2498 | self.check_binary_op(ValType::I32) |
2499 | } |
2500 | fn visit_i32_sub(&mut self) -> Self::Output { |
2501 | self.check_binary_op(ValType::I32) |
2502 | } |
2503 | fn visit_i32_mul(&mut self) -> Self::Output { |
2504 | self.check_binary_op(ValType::I32) |
2505 | } |
2506 | fn visit_i32_div_s(&mut self) -> Self::Output { |
2507 | self.check_binary_op(ValType::I32) |
2508 | } |
2509 | fn visit_i32_div_u(&mut self) -> Self::Output { |
2510 | self.check_binary_op(ValType::I32) |
2511 | } |
2512 | fn visit_i32_rem_s(&mut self) -> Self::Output { |
2513 | self.check_binary_op(ValType::I32) |
2514 | } |
2515 | fn visit_i32_rem_u(&mut self) -> Self::Output { |
2516 | self.check_binary_op(ValType::I32) |
2517 | } |
2518 | fn visit_i32_and(&mut self) -> Self::Output { |
2519 | self.check_binary_op(ValType::I32) |
2520 | } |
2521 | fn visit_i32_or(&mut self) -> Self::Output { |
2522 | self.check_binary_op(ValType::I32) |
2523 | } |
2524 | fn visit_i32_xor(&mut self) -> Self::Output { |
2525 | self.check_binary_op(ValType::I32) |
2526 | } |
2527 | fn visit_i32_shl(&mut self) -> Self::Output { |
2528 | self.check_binary_op(ValType::I32) |
2529 | } |
2530 | fn visit_i32_shr_s(&mut self) -> Self::Output { |
2531 | self.check_binary_op(ValType::I32) |
2532 | } |
2533 | fn visit_i32_shr_u(&mut self) -> Self::Output { |
2534 | self.check_binary_op(ValType::I32) |
2535 | } |
2536 | fn visit_i32_rotl(&mut self) -> Self::Output { |
2537 | self.check_binary_op(ValType::I32) |
2538 | } |
2539 | fn visit_i32_rotr(&mut self) -> Self::Output { |
2540 | self.check_binary_op(ValType::I32) |
2541 | } |
2542 | fn visit_i64_clz(&mut self) -> Self::Output { |
2543 | self.check_unary_op(ValType::I64) |
2544 | } |
2545 | fn visit_i64_ctz(&mut self) -> Self::Output { |
2546 | self.check_unary_op(ValType::I64) |
2547 | } |
2548 | fn visit_i64_popcnt(&mut self) -> Self::Output { |
2549 | self.check_unary_op(ValType::I64) |
2550 | } |
2551 | fn visit_i64_add(&mut self) -> Self::Output { |
2552 | self.check_binary_op(ValType::I64) |
2553 | } |
2554 | fn visit_i64_sub(&mut self) -> Self::Output { |
2555 | self.check_binary_op(ValType::I64) |
2556 | } |
2557 | fn visit_i64_mul(&mut self) -> Self::Output { |
2558 | self.check_binary_op(ValType::I64) |
2559 | } |
2560 | fn visit_i64_div_s(&mut self) -> Self::Output { |
2561 | self.check_binary_op(ValType::I64) |
2562 | } |
2563 | fn visit_i64_div_u(&mut self) -> Self::Output { |
2564 | self.check_binary_op(ValType::I64) |
2565 | } |
2566 | fn visit_i64_rem_s(&mut self) -> Self::Output { |
2567 | self.check_binary_op(ValType::I64) |
2568 | } |
2569 | fn visit_i64_rem_u(&mut self) -> Self::Output { |
2570 | self.check_binary_op(ValType::I64) |
2571 | } |
2572 | fn visit_i64_and(&mut self) -> Self::Output { |
2573 | self.check_binary_op(ValType::I64) |
2574 | } |
2575 | fn visit_i64_or(&mut self) -> Self::Output { |
2576 | self.check_binary_op(ValType::I64) |
2577 | } |
2578 | fn visit_i64_xor(&mut self) -> Self::Output { |
2579 | self.check_binary_op(ValType::I64) |
2580 | } |
2581 | fn visit_i64_shl(&mut self) -> Self::Output { |
2582 | self.check_binary_op(ValType::I64) |
2583 | } |
2584 | fn visit_i64_shr_s(&mut self) -> Self::Output { |
2585 | self.check_binary_op(ValType::I64) |
2586 | } |
2587 | fn visit_i64_shr_u(&mut self) -> Self::Output { |
2588 | self.check_binary_op(ValType::I64) |
2589 | } |
2590 | fn visit_i64_rotl(&mut self) -> Self::Output { |
2591 | self.check_binary_op(ValType::I64) |
2592 | } |
2593 | fn visit_i64_rotr(&mut self) -> Self::Output { |
2594 | self.check_binary_op(ValType::I64) |
2595 | } |
2596 | fn visit_f32_abs(&mut self) -> Self::Output { |
2597 | self.check_funary_op(ValType::F32) |
2598 | } |
2599 | fn visit_f32_neg(&mut self) -> Self::Output { |
2600 | self.check_funary_op(ValType::F32) |
2601 | } |
2602 | fn visit_f32_ceil(&mut self) -> Self::Output { |
2603 | self.check_funary_op(ValType::F32) |
2604 | } |
2605 | fn visit_f32_floor(&mut self) -> Self::Output { |
2606 | self.check_funary_op(ValType::F32) |
2607 | } |
2608 | fn visit_f32_trunc(&mut self) -> Self::Output { |
2609 | self.check_funary_op(ValType::F32) |
2610 | } |
2611 | fn visit_f32_nearest(&mut self) -> Self::Output { |
2612 | self.check_funary_op(ValType::F32) |
2613 | } |
2614 | fn visit_f32_sqrt(&mut self) -> Self::Output { |
2615 | self.check_funary_op(ValType::F32) |
2616 | } |
2617 | fn visit_f32_add(&mut self) -> Self::Output { |
2618 | self.check_fbinary_op(ValType::F32) |
2619 | } |
2620 | fn visit_f32_sub(&mut self) -> Self::Output { |
2621 | self.check_fbinary_op(ValType::F32) |
2622 | } |
2623 | fn visit_f32_mul(&mut self) -> Self::Output { |
2624 | self.check_fbinary_op(ValType::F32) |
2625 | } |
2626 | fn visit_f32_div(&mut self) -> Self::Output { |
2627 | self.check_fbinary_op(ValType::F32) |
2628 | } |
2629 | fn visit_f32_min(&mut self) -> Self::Output { |
2630 | self.check_fbinary_op(ValType::F32) |
2631 | } |
2632 | fn visit_f32_max(&mut self) -> Self::Output { |
2633 | self.check_fbinary_op(ValType::F32) |
2634 | } |
2635 | fn visit_f32_copysign(&mut self) -> Self::Output { |
2636 | self.check_fbinary_op(ValType::F32) |
2637 | } |
2638 | fn visit_f64_abs(&mut self) -> Self::Output { |
2639 | self.check_funary_op(ValType::F64) |
2640 | } |
2641 | fn visit_f64_neg(&mut self) -> Self::Output { |
2642 | self.check_funary_op(ValType::F64) |
2643 | } |
2644 | fn visit_f64_ceil(&mut self) -> Self::Output { |
2645 | self.check_funary_op(ValType::F64) |
2646 | } |
2647 | fn visit_f64_floor(&mut self) -> Self::Output { |
2648 | self.check_funary_op(ValType::F64) |
2649 | } |
2650 | fn visit_f64_trunc(&mut self) -> Self::Output { |
2651 | self.check_funary_op(ValType::F64) |
2652 | } |
2653 | fn visit_f64_nearest(&mut self) -> Self::Output { |
2654 | self.check_funary_op(ValType::F64) |
2655 | } |
2656 | fn visit_f64_sqrt(&mut self) -> Self::Output { |
2657 | self.check_funary_op(ValType::F64) |
2658 | } |
2659 | fn visit_f64_add(&mut self) -> Self::Output { |
2660 | self.check_fbinary_op(ValType::F64) |
2661 | } |
2662 | fn visit_f64_sub(&mut self) -> Self::Output { |
2663 | self.check_fbinary_op(ValType::F64) |
2664 | } |
2665 | fn visit_f64_mul(&mut self) -> Self::Output { |
2666 | self.check_fbinary_op(ValType::F64) |
2667 | } |
2668 | fn visit_f64_div(&mut self) -> Self::Output { |
2669 | self.check_fbinary_op(ValType::F64) |
2670 | } |
2671 | fn visit_f64_min(&mut self) -> Self::Output { |
2672 | self.check_fbinary_op(ValType::F64) |
2673 | } |
2674 | fn visit_f64_max(&mut self) -> Self::Output { |
2675 | self.check_fbinary_op(ValType::F64) |
2676 | } |
2677 | fn visit_f64_copysign(&mut self) -> Self::Output { |
2678 | self.check_fbinary_op(ValType::F64) |
2679 | } |
2680 | fn visit_i32_wrap_i64(&mut self) -> Self::Output { |
2681 | self.check_conversion_op(ValType::I32, ValType::I64) |
2682 | } |
2683 | fn visit_i32_trunc_f32_s(&mut self) -> Self::Output { |
2684 | self.check_conversion_op(ValType::I32, ValType::F32) |
2685 | } |
2686 | fn visit_i32_trunc_f32_u(&mut self) -> Self::Output { |
2687 | self.check_conversion_op(ValType::I32, ValType::F32) |
2688 | } |
2689 | fn visit_i32_trunc_f64_s(&mut self) -> Self::Output { |
2690 | self.check_conversion_op(ValType::I32, ValType::F64) |
2691 | } |
2692 | fn visit_i32_trunc_f64_u(&mut self) -> Self::Output { |
2693 | self.check_conversion_op(ValType::I32, ValType::F64) |
2694 | } |
2695 | fn visit_i64_extend_i32_s(&mut self) -> Self::Output { |
2696 | self.check_conversion_op(ValType::I64, ValType::I32) |
2697 | } |
2698 | fn visit_i64_extend_i32_u(&mut self) -> Self::Output { |
2699 | self.check_conversion_op(ValType::I64, ValType::I32) |
2700 | } |
2701 | fn visit_i64_trunc_f32_s(&mut self) -> Self::Output { |
2702 | self.check_conversion_op(ValType::I64, ValType::F32) |
2703 | } |
2704 | fn visit_i64_trunc_f32_u(&mut self) -> Self::Output { |
2705 | self.check_conversion_op(ValType::I64, ValType::F32) |
2706 | } |
2707 | fn visit_i64_trunc_f64_s(&mut self) -> Self::Output { |
2708 | self.check_conversion_op(ValType::I64, ValType::F64) |
2709 | } |
2710 | fn visit_i64_trunc_f64_u(&mut self) -> Self::Output { |
2711 | self.check_conversion_op(ValType::I64, ValType::F64) |
2712 | } |
2713 | fn visit_f32_convert_i32_s(&mut self) -> Self::Output { |
2714 | self.check_fconversion_op(ValType::F32, ValType::I32) |
2715 | } |
2716 | fn visit_f32_convert_i32_u(&mut self) -> Self::Output { |
2717 | self.check_fconversion_op(ValType::F32, ValType::I32) |
2718 | } |
2719 | fn visit_f32_convert_i64_s(&mut self) -> Self::Output { |
2720 | self.check_fconversion_op(ValType::F32, ValType::I64) |
2721 | } |
2722 | fn visit_f32_convert_i64_u(&mut self) -> Self::Output { |
2723 | self.check_fconversion_op(ValType::F32, ValType::I64) |
2724 | } |
2725 | fn visit_f32_demote_f64(&mut self) -> Self::Output { |
2726 | self.check_fconversion_op(ValType::F32, ValType::F64) |
2727 | } |
2728 | fn visit_f64_convert_i32_s(&mut self) -> Self::Output { |
2729 | self.check_fconversion_op(ValType::F64, ValType::I32) |
2730 | } |
2731 | fn visit_f64_convert_i32_u(&mut self) -> Self::Output { |
2732 | self.check_fconversion_op(ValType::F64, ValType::I32) |
2733 | } |
2734 | fn visit_f64_convert_i64_s(&mut self) -> Self::Output { |
2735 | self.check_fconversion_op(ValType::F64, ValType::I64) |
2736 | } |
2737 | fn visit_f64_convert_i64_u(&mut self) -> Self::Output { |
2738 | self.check_fconversion_op(ValType::F64, ValType::I64) |
2739 | } |
2740 | fn visit_f64_promote_f32(&mut self) -> Self::Output { |
2741 | self.check_fconversion_op(ValType::F64, ValType::F32) |
2742 | } |
2743 | fn visit_i32_reinterpret_f32(&mut self) -> Self::Output { |
2744 | self.check_conversion_op(ValType::I32, ValType::F32) |
2745 | } |
2746 | fn visit_i64_reinterpret_f64(&mut self) -> Self::Output { |
2747 | self.check_conversion_op(ValType::I64, ValType::F64) |
2748 | } |
2749 | fn visit_f32_reinterpret_i32(&mut self) -> Self::Output { |
2750 | self.check_fconversion_op(ValType::F32, ValType::I32) |
2751 | } |
2752 | fn visit_f64_reinterpret_i64(&mut self) -> Self::Output { |
2753 | self.check_fconversion_op(ValType::F64, ValType::I64) |
2754 | } |
2755 | fn visit_i32_trunc_sat_f32_s(&mut self) -> Self::Output { |
2756 | self.check_conversion_op(ValType::I32, ValType::F32) |
2757 | } |
2758 | fn visit_i32_trunc_sat_f32_u(&mut self) -> Self::Output { |
2759 | self.check_conversion_op(ValType::I32, ValType::F32) |
2760 | } |
2761 | fn visit_i32_trunc_sat_f64_s(&mut self) -> Self::Output { |
2762 | self.check_conversion_op(ValType::I32, ValType::F64) |
2763 | } |
2764 | fn visit_i32_trunc_sat_f64_u(&mut self) -> Self::Output { |
2765 | self.check_conversion_op(ValType::I32, ValType::F64) |
2766 | } |
2767 | fn visit_i64_trunc_sat_f32_s(&mut self) -> Self::Output { |
2768 | self.check_conversion_op(ValType::I64, ValType::F32) |
2769 | } |
2770 | fn visit_i64_trunc_sat_f32_u(&mut self) -> Self::Output { |
2771 | self.check_conversion_op(ValType::I64, ValType::F32) |
2772 | } |
2773 | fn visit_i64_trunc_sat_f64_s(&mut self) -> Self::Output { |
2774 | self.check_conversion_op(ValType::I64, ValType::F64) |
2775 | } |
2776 | fn visit_i64_trunc_sat_f64_u(&mut self) -> Self::Output { |
2777 | self.check_conversion_op(ValType::I64, ValType::F64) |
2778 | } |
2779 | fn visit_i32_extend8_s(&mut self) -> Self::Output { |
2780 | self.check_unary_op(ValType::I32) |
2781 | } |
2782 | fn visit_i32_extend16_s(&mut self) -> Self::Output { |
2783 | self.check_unary_op(ValType::I32) |
2784 | } |
2785 | fn visit_i64_extend8_s(&mut self) -> Self::Output { |
2786 | self.check_unary_op(ValType::I64) |
2787 | } |
2788 | fn visit_i64_extend16_s(&mut self) -> Self::Output { |
2789 | self.check_unary_op(ValType::I64) |
2790 | } |
2791 | fn visit_i64_extend32_s(&mut self) -> Self::Output { |
2792 | self.check_unary_op(ValType::I64) |
2793 | } |
2794 | fn visit_i32_atomic_load(&mut self, memarg: MemArg) -> Self::Output { |
2795 | self.check_atomic_load(memarg, ValType::I32) |
2796 | } |
2797 | fn visit_i32_atomic_load16_u(&mut self, memarg: MemArg) -> Self::Output { |
2798 | self.check_atomic_load(memarg, ValType::I32) |
2799 | } |
2800 | fn visit_i32_atomic_load8_u(&mut self, memarg: MemArg) -> Self::Output { |
2801 | self.check_atomic_load(memarg, ValType::I32) |
2802 | } |
2803 | fn visit_i64_atomic_load(&mut self, memarg: MemArg) -> Self::Output { |
2804 | self.check_atomic_load(memarg, ValType::I64) |
2805 | } |
2806 | fn visit_i64_atomic_load32_u(&mut self, memarg: MemArg) -> Self::Output { |
2807 | self.check_atomic_load(memarg, ValType::I64) |
2808 | } |
2809 | fn visit_i64_atomic_load16_u(&mut self, memarg: MemArg) -> Self::Output { |
2810 | self.check_atomic_load(memarg, ValType::I64) |
2811 | } |
2812 | fn visit_i64_atomic_load8_u(&mut self, memarg: MemArg) -> Self::Output { |
2813 | self.check_atomic_load(memarg, ValType::I64) |
2814 | } |
2815 | fn visit_i32_atomic_store(&mut self, memarg: MemArg) -> Self::Output { |
2816 | self.check_atomic_store(memarg, ValType::I32) |
2817 | } |
2818 | fn visit_i32_atomic_store16(&mut self, memarg: MemArg) -> Self::Output { |
2819 | self.check_atomic_store(memarg, ValType::I32) |
2820 | } |
2821 | fn visit_i32_atomic_store8(&mut self, memarg: MemArg) -> Self::Output { |
2822 | self.check_atomic_store(memarg, ValType::I32) |
2823 | } |
2824 | fn visit_i64_atomic_store(&mut self, memarg: MemArg) -> Self::Output { |
2825 | self.check_atomic_store(memarg, ValType::I64) |
2826 | } |
2827 | fn visit_i64_atomic_store32(&mut self, memarg: MemArg) -> Self::Output { |
2828 | self.check_atomic_store(memarg, ValType::I64) |
2829 | } |
2830 | fn visit_i64_atomic_store16(&mut self, memarg: MemArg) -> Self::Output { |
2831 | self.check_atomic_store(memarg, ValType::I64) |
2832 | } |
2833 | fn visit_i64_atomic_store8(&mut self, memarg: MemArg) -> Self::Output { |
2834 | self.check_atomic_store(memarg, ValType::I64) |
2835 | } |
2836 | fn visit_i32_atomic_rmw_add(&mut self, memarg: MemArg) -> Self::Output { |
2837 | self.check_atomic_binary_memory_op(memarg, ValType::I32) |
2838 | } |
2839 | fn visit_i32_atomic_rmw_sub(&mut self, memarg: MemArg) -> Self::Output { |
2840 | self.check_atomic_binary_memory_op(memarg, ValType::I32) |
2841 | } |
2842 | fn visit_i32_atomic_rmw_and(&mut self, memarg: MemArg) -> Self::Output { |
2843 | self.check_atomic_binary_memory_op(memarg, ValType::I32) |
2844 | } |
2845 | fn visit_i32_atomic_rmw_or(&mut self, memarg: MemArg) -> Self::Output { |
2846 | self.check_atomic_binary_memory_op(memarg, ValType::I32) |
2847 | } |
2848 | fn visit_i32_atomic_rmw_xor(&mut self, memarg: MemArg) -> Self::Output { |
2849 | self.check_atomic_binary_memory_op(memarg, ValType::I32) |
2850 | } |
2851 | fn visit_i32_atomic_rmw16_add_u(&mut self, memarg: MemArg) -> Self::Output { |
2852 | self.check_atomic_binary_memory_op(memarg, ValType::I32) |
2853 | } |
2854 | fn visit_i32_atomic_rmw16_sub_u(&mut self, memarg: MemArg) -> Self::Output { |
2855 | self.check_atomic_binary_memory_op(memarg, ValType::I32) |
2856 | } |
2857 | fn visit_i32_atomic_rmw16_and_u(&mut self, memarg: MemArg) -> Self::Output { |
2858 | self.check_atomic_binary_memory_op(memarg, ValType::I32) |
2859 | } |
2860 | fn visit_i32_atomic_rmw16_or_u(&mut self, memarg: MemArg) -> Self::Output { |
2861 | self.check_atomic_binary_memory_op(memarg, ValType::I32) |
2862 | } |
2863 | fn visit_i32_atomic_rmw16_xor_u(&mut self, memarg: MemArg) -> Self::Output { |
2864 | self.check_atomic_binary_memory_op(memarg, ValType::I32) |
2865 | } |
2866 | fn visit_i32_atomic_rmw8_add_u(&mut self, memarg: MemArg) -> Self::Output { |
2867 | self.check_atomic_binary_memory_op(memarg, ValType::I32) |
2868 | } |
2869 | fn visit_i32_atomic_rmw8_sub_u(&mut self, memarg: MemArg) -> Self::Output { |
2870 | self.check_atomic_binary_memory_op(memarg, ValType::I32) |
2871 | } |
2872 | fn visit_i32_atomic_rmw8_and_u(&mut self, memarg: MemArg) -> Self::Output { |
2873 | self.check_atomic_binary_memory_op(memarg, ValType::I32) |
2874 | } |
2875 | fn visit_i32_atomic_rmw8_or_u(&mut self, memarg: MemArg) -> Self::Output { |
2876 | self.check_atomic_binary_memory_op(memarg, ValType::I32) |
2877 | } |
2878 | fn visit_i32_atomic_rmw8_xor_u(&mut self, memarg: MemArg) -> Self::Output { |
2879 | self.check_atomic_binary_memory_op(memarg, ValType::I32) |
2880 | } |
2881 | fn visit_i64_atomic_rmw_add(&mut self, memarg: MemArg) -> Self::Output { |
2882 | self.check_atomic_binary_memory_op(memarg, ValType::I64) |
2883 | } |
2884 | fn visit_i64_atomic_rmw_sub(&mut self, memarg: MemArg) -> Self::Output { |
2885 | self.check_atomic_binary_memory_op(memarg, ValType::I64) |
2886 | } |
2887 | fn visit_i64_atomic_rmw_and(&mut self, memarg: MemArg) -> Self::Output { |
2888 | self.check_atomic_binary_memory_op(memarg, ValType::I64) |
2889 | } |
2890 | fn visit_i64_atomic_rmw_or(&mut self, memarg: MemArg) -> Self::Output { |
2891 | self.check_atomic_binary_memory_op(memarg, ValType::I64) |
2892 | } |
2893 | fn visit_i64_atomic_rmw_xor(&mut self, memarg: MemArg) -> Self::Output { |
2894 | self.check_atomic_binary_memory_op(memarg, ValType::I64) |
2895 | } |
2896 | fn visit_i64_atomic_rmw32_add_u(&mut self, memarg: MemArg) -> Self::Output { |
2897 | self.check_atomic_binary_memory_op(memarg, ValType::I64) |
2898 | } |
2899 | fn visit_i64_atomic_rmw32_sub_u(&mut self, memarg: MemArg) -> Self::Output { |
2900 | self.check_atomic_binary_memory_op(memarg, ValType::I64) |
2901 | } |
2902 | fn visit_i64_atomic_rmw32_and_u(&mut self, memarg: MemArg) -> Self::Output { |
2903 | self.check_atomic_binary_memory_op(memarg, ValType::I64) |
2904 | } |
2905 | fn visit_i64_atomic_rmw32_or_u(&mut self, memarg: MemArg) -> Self::Output { |
2906 | self.check_atomic_binary_memory_op(memarg, ValType::I64) |
2907 | } |
2908 | fn visit_i64_atomic_rmw32_xor_u(&mut self, memarg: MemArg) -> Self::Output { |
2909 | self.check_atomic_binary_memory_op(memarg, ValType::I64) |
2910 | } |
2911 | fn visit_i64_atomic_rmw16_add_u(&mut self, memarg: MemArg) -> Self::Output { |
2912 | self.check_atomic_binary_memory_op(memarg, ValType::I64) |
2913 | } |
2914 | fn visit_i64_atomic_rmw16_sub_u(&mut self, memarg: MemArg) -> Self::Output { |
2915 | self.check_atomic_binary_memory_op(memarg, ValType::I64) |
2916 | } |
2917 | fn visit_i64_atomic_rmw16_and_u(&mut self, memarg: MemArg) -> Self::Output { |
2918 | self.check_atomic_binary_memory_op(memarg, ValType::I64) |
2919 | } |
2920 | fn visit_i64_atomic_rmw16_or_u(&mut self, memarg: MemArg) -> Self::Output { |
2921 | self.check_atomic_binary_memory_op(memarg, ValType::I64) |
2922 | } |
2923 | fn visit_i64_atomic_rmw16_xor_u(&mut self, memarg: MemArg) -> Self::Output { |
2924 | self.check_atomic_binary_memory_op(memarg, ValType::I64) |
2925 | } |
2926 | fn visit_i64_atomic_rmw8_add_u(&mut self, memarg: MemArg) -> Self::Output { |
2927 | self.check_atomic_binary_memory_op(memarg, ValType::I64) |
2928 | } |
2929 | fn visit_i64_atomic_rmw8_sub_u(&mut self, memarg: MemArg) -> Self::Output { |
2930 | self.check_atomic_binary_memory_op(memarg, ValType::I64) |
2931 | } |
2932 | fn visit_i64_atomic_rmw8_and_u(&mut self, memarg: MemArg) -> Self::Output { |
2933 | self.check_atomic_binary_memory_op(memarg, ValType::I64) |
2934 | } |
2935 | fn visit_i64_atomic_rmw8_or_u(&mut self, memarg: MemArg) -> Self::Output { |
2936 | self.check_atomic_binary_memory_op(memarg, ValType::I64) |
2937 | } |
2938 | fn visit_i64_atomic_rmw8_xor_u(&mut self, memarg: MemArg) -> Self::Output { |
2939 | self.check_atomic_binary_memory_op(memarg, ValType::I64) |
2940 | } |
2941 | fn visit_i32_atomic_rmw_xchg(&mut self, memarg: MemArg) -> Self::Output { |
2942 | self.check_atomic_binary_memory_op(memarg, ValType::I32) |
2943 | } |
2944 | fn visit_i32_atomic_rmw16_xchg_u(&mut self, memarg: MemArg) -> Self::Output { |
2945 | self.check_atomic_binary_memory_op(memarg, ValType::I32) |
2946 | } |
2947 | fn visit_i32_atomic_rmw8_xchg_u(&mut self, memarg: MemArg) -> Self::Output { |
2948 | self.check_atomic_binary_memory_op(memarg, ValType::I32) |
2949 | } |
2950 | fn visit_i32_atomic_rmw_cmpxchg(&mut self, memarg: MemArg) -> Self::Output { |
2951 | self.check_atomic_binary_memory_cmpxchg(memarg, ValType::I32) |
2952 | } |
2953 | fn visit_i32_atomic_rmw16_cmpxchg_u(&mut self, memarg: MemArg) -> Self::Output { |
2954 | self.check_atomic_binary_memory_cmpxchg(memarg, ValType::I32) |
2955 | } |
2956 | fn visit_i32_atomic_rmw8_cmpxchg_u(&mut self, memarg: MemArg) -> Self::Output { |
2957 | self.check_atomic_binary_memory_cmpxchg(memarg, ValType::I32) |
2958 | } |
2959 | fn visit_i64_atomic_rmw_xchg(&mut self, memarg: MemArg) -> Self::Output { |
2960 | self.check_atomic_binary_memory_op(memarg, ValType::I64) |
2961 | } |
2962 | fn visit_i64_atomic_rmw32_xchg_u(&mut self, memarg: MemArg) -> Self::Output { |
2963 | self.check_atomic_binary_memory_op(memarg, ValType::I64) |
2964 | } |
2965 | fn visit_i64_atomic_rmw16_xchg_u(&mut self, memarg: MemArg) -> Self::Output { |
2966 | self.check_atomic_binary_memory_op(memarg, ValType::I64) |
2967 | } |
2968 | fn visit_i64_atomic_rmw8_xchg_u(&mut self, memarg: MemArg) -> Self::Output { |
2969 | self.check_atomic_binary_memory_op(memarg, ValType::I64) |
2970 | } |
2971 | fn visit_i64_atomic_rmw_cmpxchg(&mut self, memarg: MemArg) -> Self::Output { |
2972 | self.check_atomic_binary_memory_cmpxchg(memarg, ValType::I64) |
2973 | } |
2974 | fn visit_i64_atomic_rmw32_cmpxchg_u(&mut self, memarg: MemArg) -> Self::Output { |
2975 | self.check_atomic_binary_memory_cmpxchg(memarg, ValType::I64) |
2976 | } |
2977 | fn visit_i64_atomic_rmw16_cmpxchg_u(&mut self, memarg: MemArg) -> Self::Output { |
2978 | self.check_atomic_binary_memory_cmpxchg(memarg, ValType::I64) |
2979 | } |
2980 | fn visit_i64_atomic_rmw8_cmpxchg_u(&mut self, memarg: MemArg) -> Self::Output { |
2981 | self.check_atomic_binary_memory_cmpxchg(memarg, ValType::I64) |
2982 | } |
2983 | fn visit_memory_atomic_notify(&mut self, memarg: MemArg) -> Self::Output { |
2984 | self.check_atomic_binary_memory_op(memarg, ValType::I32) |
2985 | } |
2986 | fn visit_memory_atomic_wait32(&mut self, memarg: MemArg) -> Self::Output { |
2987 | let ty = self.check_shared_memarg(memarg)?; |
2988 | self.pop_operand(Some(ValType::I64))?; |
2989 | self.pop_operand(Some(ValType::I32))?; |
2990 | self.pop_operand(Some(ty))?; |
2991 | self.push_operand(ValType::I32)?; |
2992 | Ok(()) |
2993 | } |
2994 | fn visit_memory_atomic_wait64(&mut self, memarg: MemArg) -> Self::Output { |
2995 | let ty = self.check_shared_memarg(memarg)?; |
2996 | self.pop_operand(Some(ValType::I64))?; |
2997 | self.pop_operand(Some(ValType::I64))?; |
2998 | self.pop_operand(Some(ty))?; |
2999 | self.push_operand(ValType::I32)?; |
3000 | Ok(()) |
3001 | } |
3002 | fn visit_atomic_fence(&mut self) -> Self::Output { |
3003 | Ok(()) |
3004 | } |
3005 | fn visit_ref_null(&mut self, mut heap_type: HeapType) -> Self::Output { |
3006 | if let Some(ty) = RefType::new(true, heap_type) { |
3007 | self.features |
3008 | .check_ref_type(ty) |
3009 | .map_err(|e| BinaryReaderError::new(e, self.offset))?; |
3010 | } |
3011 | self.resources |
3012 | .check_heap_type(&mut heap_type, self.offset)?; |
3013 | let ty = ValType::Ref( |
3014 | RefType::new(true, heap_type).expect("existing heap types should be within our limits" ), |
3015 | ); |
3016 | self.push_operand(ty)?; |
3017 | Ok(()) |
3018 | } |
3019 | |
3020 | fn visit_ref_as_non_null(&mut self) -> Self::Output { |
3021 | let ty = self.pop_ref(None)?.as_non_null(); |
3022 | self.push_operand(ty)?; |
3023 | Ok(()) |
3024 | } |
3025 | fn visit_br_on_null(&mut self, relative_depth: u32) -> Self::Output { |
3026 | let ref_ty = self.pop_ref(None)?.as_non_null(); |
3027 | let (ft, kind) = self.jump(relative_depth)?; |
3028 | let label_types = self.label_types(ft, kind)?; |
3029 | self.pop_push_label_types(label_types)?; |
3030 | self.push_operand(ref_ty)?; |
3031 | Ok(()) |
3032 | } |
3033 | fn visit_br_on_non_null(&mut self, relative_depth: u32) -> Self::Output { |
3034 | let (ft, kind) = self.jump(relative_depth)?; |
3035 | |
3036 | let mut label_types = self.label_types(ft, kind)?; |
3037 | let expected = match label_types.next_back() { |
3038 | None => bail!( |
3039 | self.offset, |
3040 | "type mismatch: br_on_non_null target has no label types" , |
3041 | ), |
3042 | Some(ValType::Ref(ty)) => ty, |
3043 | Some(_) => bail!( |
3044 | self.offset, |
3045 | "type mismatch: br_on_non_null target does not end with heap type" , |
3046 | ), |
3047 | }; |
3048 | self.pop_ref(Some(expected.nullable()))?; |
3049 | |
3050 | self.pop_push_label_types(label_types)?; |
3051 | Ok(()) |
3052 | } |
3053 | fn visit_ref_is_null(&mut self) -> Self::Output { |
3054 | self.pop_ref(None)?; |
3055 | self.push_operand(ValType::I32)?; |
3056 | Ok(()) |
3057 | } |
3058 | fn visit_ref_func(&mut self, function_index: u32) -> Self::Output { |
3059 | let type_id = match self.resources.type_id_of_function(function_index) { |
3060 | Some(id) => id, |
3061 | None => bail!( |
3062 | self.offset, |
3063 | "unknown function {}: function index out of bounds" , |
3064 | function_index, |
3065 | ), |
3066 | }; |
3067 | if !self.resources.is_function_referenced(function_index) { |
3068 | bail!(self.offset, "undeclared function reference" ); |
3069 | } |
3070 | |
3071 | let index = UnpackedIndex::Id(type_id); |
3072 | let ty = ValType::Ref( |
3073 | RefType::new(false, HeapType::Concrete(index)).ok_or_else(|| { |
3074 | BinaryReaderError::new("implementation limit: type index too large" , self.offset) |
3075 | })?, |
3076 | ); |
3077 | self.push_operand(ty)?; |
3078 | Ok(()) |
3079 | } |
3080 | fn visit_ref_eq(&mut self) -> Self::Output { |
3081 | let a = self.pop_maybe_shared_ref(AbstractHeapType::Eq)?; |
3082 | let b = self.pop_maybe_shared_ref(AbstractHeapType::Eq)?; |
3083 | let a_is_shared = a.is_maybe_shared(&self.resources); |
3084 | let b_is_shared = b.is_maybe_shared(&self.resources); |
3085 | match (a_is_shared, b_is_shared) { |
3086 | // One or both of the types are from unreachable code; assume |
3087 | // the shared-ness matches. |
3088 | (None, Some(_)) | (Some(_), None) | (None, None) => {} |
3089 | |
3090 | (Some(is_a_shared), Some(is_b_shared)) => { |
3091 | if is_a_shared != is_b_shared { |
3092 | bail!( |
3093 | self.offset, |
3094 | "type mismatch: expected `ref.eq` types to match `shared`-ness" |
3095 | ); |
3096 | } |
3097 | } |
3098 | } |
3099 | self.push_operand(ValType::I32) |
3100 | } |
3101 | fn visit_memory_init(&mut self, segment: u32, mem: u32) -> Self::Output { |
3102 | let ty = self.check_memory_index(mem)?; |
3103 | self.check_data_segment(segment)?; |
3104 | self.pop_operand(Some(ValType::I32))?; |
3105 | self.pop_operand(Some(ValType::I32))?; |
3106 | self.pop_operand(Some(ty))?; |
3107 | Ok(()) |
3108 | } |
3109 | fn visit_data_drop(&mut self, segment: u32) -> Self::Output { |
3110 | self.check_data_segment(segment)?; |
3111 | Ok(()) |
3112 | } |
3113 | fn visit_memory_copy(&mut self, dst: u32, src: u32) -> Self::Output { |
3114 | let dst_ty = self.check_memory_index(dst)?; |
3115 | let src_ty = self.check_memory_index(src)?; |
3116 | |
3117 | // The length operand here is the smaller of src/dst, which is |
3118 | // i32 if one is i32 |
3119 | self.pop_operand(Some(match src_ty { |
3120 | ValType::I32 => ValType::I32, |
3121 | _ => dst_ty, |
3122 | }))?; |
3123 | |
3124 | // ... and the offset into each memory is required to be |
3125 | // whatever the indexing type is for that memory |
3126 | self.pop_operand(Some(src_ty))?; |
3127 | self.pop_operand(Some(dst_ty))?; |
3128 | Ok(()) |
3129 | } |
3130 | fn visit_memory_fill(&mut self, mem: u32) -> Self::Output { |
3131 | let ty = self.check_memory_index(mem)?; |
3132 | self.pop_operand(Some(ty))?; |
3133 | self.pop_operand(Some(ValType::I32))?; |
3134 | self.pop_operand(Some(ty))?; |
3135 | Ok(()) |
3136 | } |
3137 | fn visit_memory_discard(&mut self, mem: u32) -> Self::Output { |
3138 | let ty = self.check_memory_index(mem)?; |
3139 | self.pop_operand(Some(ty))?; |
3140 | self.pop_operand(Some(ty))?; |
3141 | Ok(()) |
3142 | } |
3143 | fn visit_table_init(&mut self, segment: u32, table: u32) -> Self::Output { |
3144 | let table = self.table_type_at(table)?; |
3145 | let segment_ty = self.element_type_at(segment)?; |
3146 | if !self |
3147 | .resources |
3148 | .is_subtype(ValType::Ref(segment_ty), ValType::Ref(table.element_type)) |
3149 | { |
3150 | bail!(self.offset, "type mismatch" ); |
3151 | } |
3152 | self.pop_operand(Some(ValType::I32))?; |
3153 | self.pop_operand(Some(ValType::I32))?; |
3154 | self.pop_operand(Some(table.index_type()))?; |
3155 | Ok(()) |
3156 | } |
3157 | fn visit_elem_drop(&mut self, segment: u32) -> Self::Output { |
3158 | self.element_type_at(segment)?; |
3159 | Ok(()) |
3160 | } |
3161 | fn visit_table_copy(&mut self, dst_table: u32, src_table: u32) -> Self::Output { |
3162 | let src = self.table_type_at(src_table)?; |
3163 | let dst = self.table_type_at(dst_table)?; |
3164 | if !self.resources.is_subtype( |
3165 | ValType::Ref(src.element_type), |
3166 | ValType::Ref(dst.element_type), |
3167 | ) { |
3168 | bail!(self.offset, "type mismatch" ); |
3169 | } |
3170 | |
3171 | // The length operand here is the smaller of src/dst, which is |
3172 | // i32 if one is i32 |
3173 | self.pop_operand(Some(match src.index_type() { |
3174 | ValType::I32 => ValType::I32, |
3175 | _ => dst.index_type(), |
3176 | }))?; |
3177 | |
3178 | // ... and the offset into each table is required to be |
3179 | // whatever the indexing type is for that table |
3180 | self.pop_operand(Some(src.index_type()))?; |
3181 | self.pop_operand(Some(dst.index_type()))?; |
3182 | Ok(()) |
3183 | } |
3184 | fn visit_table_get(&mut self, table: u32) -> Self::Output { |
3185 | let table = self.table_type_at(table)?; |
3186 | debug_assert_type_indices_are_ids(table.element_type.into()); |
3187 | self.pop_operand(Some(table.index_type()))?; |
3188 | self.push_operand(table.element_type)?; |
3189 | Ok(()) |
3190 | } |
3191 | fn visit_table_atomic_get(&mut self, _ordering: Ordering, table: u32) -> Self::Output { |
3192 | self.visit_table_get(table)?; |
3193 | // No validation of `ordering` is needed because `table.atomic.get` can |
3194 | // be used on both shared and unshared tables. But we do need to limit |
3195 | // which types can be used with this instruction. |
3196 | let ty = self.table_type_at(table)?.element_type; |
3197 | let supertype = RefType::ANYREF.shared().unwrap(); |
3198 | if !self.resources.is_subtype(ty.into(), supertype.into()) { |
3199 | bail!( |
3200 | self.offset, |
3201 | "invalid type: `table.atomic.get` only allows subtypes of `anyref`" |
3202 | ); |
3203 | } |
3204 | Ok(()) |
3205 | } |
3206 | fn visit_table_set(&mut self, table: u32) -> Self::Output { |
3207 | let table = self.table_type_at(table)?; |
3208 | debug_assert_type_indices_are_ids(table.element_type.into()); |
3209 | self.pop_operand(Some(table.element_type.into()))?; |
3210 | self.pop_operand(Some(table.index_type()))?; |
3211 | Ok(()) |
3212 | } |
3213 | fn visit_table_atomic_set(&mut self, _ordering: Ordering, table: u32) -> Self::Output { |
3214 | self.visit_table_set(table)?; |
3215 | // No validation of `ordering` is needed because `table.atomic.set` can |
3216 | // be used on both shared and unshared tables. But we do need to limit |
3217 | // which types can be used with this instruction. |
3218 | let ty = self.table_type_at(table)?.element_type; |
3219 | let supertype = RefType::ANYREF.shared().unwrap(); |
3220 | if !self.resources.is_subtype(ty.into(), supertype.into()) { |
3221 | bail!( |
3222 | self.offset, |
3223 | "invalid type: `table.atomic.set` only allows subtypes of `anyref`" |
3224 | ); |
3225 | } |
3226 | Ok(()) |
3227 | } |
3228 | fn visit_table_grow(&mut self, table: u32) -> Self::Output { |
3229 | let table = self.table_type_at(table)?; |
3230 | debug_assert_type_indices_are_ids(table.element_type.into()); |
3231 | self.pop_operand(Some(table.index_type()))?; |
3232 | self.pop_operand(Some(table.element_type.into()))?; |
3233 | self.push_operand(table.index_type())?; |
3234 | Ok(()) |
3235 | } |
3236 | fn visit_table_size(&mut self, table: u32) -> Self::Output { |
3237 | let table = self.table_type_at(table)?; |
3238 | self.push_operand(table.index_type())?; |
3239 | Ok(()) |
3240 | } |
3241 | fn visit_table_fill(&mut self, table: u32) -> Self::Output { |
3242 | let table = self.table_type_at(table)?; |
3243 | debug_assert_type_indices_are_ids(table.element_type.into()); |
3244 | self.pop_operand(Some(table.index_type()))?; |
3245 | self.pop_operand(Some(table.element_type.into()))?; |
3246 | self.pop_operand(Some(table.index_type()))?; |
3247 | Ok(()) |
3248 | } |
3249 | fn visit_table_atomic_rmw_xchg(&mut self, _ordering: Ordering, table: u32) -> Self::Output { |
3250 | let table = self.table_type_at(table)?; |
3251 | let elem_ty = table.element_type.into(); |
3252 | debug_assert_type_indices_are_ids(elem_ty); |
3253 | let supertype = RefType::ANYREF.shared().unwrap(); |
3254 | if !self.resources.is_subtype(elem_ty, supertype.into()) { |
3255 | bail!( |
3256 | self.offset, |
3257 | "invalid type: `table.atomic.rmw.xchg` only allows subtypes of `anyref`" |
3258 | ); |
3259 | } |
3260 | self.pop_operand(Some(elem_ty))?; |
3261 | self.pop_operand(Some(table.index_type()))?; |
3262 | self.push_operand(elem_ty)?; |
3263 | Ok(()) |
3264 | } |
3265 | fn visit_table_atomic_rmw_cmpxchg(&mut self, _ordering: Ordering, table: u32) -> Self::Output { |
3266 | let table = self.table_type_at(table)?; |
3267 | let elem_ty = table.element_type.into(); |
3268 | debug_assert_type_indices_are_ids(elem_ty); |
3269 | let supertype = RefType::EQREF.shared().unwrap(); |
3270 | if !self.resources.is_subtype(elem_ty, supertype.into()) { |
3271 | bail!( |
3272 | self.offset, |
3273 | "invalid type: `table.atomic.rmw.cmpxchg` only allows subtypes of `eqref`" |
3274 | ); |
3275 | } |
3276 | self.pop_operand(Some(elem_ty))?; |
3277 | self.pop_operand(Some(elem_ty))?; |
3278 | self.pop_operand(Some(table.index_type()))?; |
3279 | self.push_operand(elem_ty)?; |
3280 | Ok(()) |
3281 | } |
3282 | fn visit_struct_new(&mut self, struct_type_index: u32) -> Self::Output { |
3283 | let struct_ty = self.struct_type_at(struct_type_index)?; |
3284 | for ty in struct_ty.fields.iter().rev() { |
3285 | self.pop_operand(Some(ty.element_type.unpack()))?; |
3286 | } |
3287 | self.push_concrete_ref(false, struct_type_index)?; |
3288 | Ok(()) |
3289 | } |
3290 | fn visit_struct_new_default(&mut self, type_index: u32) -> Self::Output { |
3291 | let ty = self.struct_type_at(type_index)?; |
3292 | for field in ty.fields.iter() { |
3293 | let val_ty = field.element_type.unpack(); |
3294 | if !val_ty.is_defaultable() { |
3295 | bail!( |
3296 | self.offset, |
3297 | "invalid `struct.new_default`: {val_ty} field is not defaultable" |
3298 | ); |
3299 | } |
3300 | } |
3301 | self.push_concrete_ref(false, type_index)?; |
3302 | Ok(()) |
3303 | } |
3304 | fn visit_struct_get(&mut self, struct_type_index: u32, field_index: u32) -> Self::Output { |
3305 | let field_ty = self.struct_field_at(struct_type_index, field_index)?; |
3306 | if field_ty.element_type.is_packed() { |
3307 | bail!( |
3308 | self.offset, |
3309 | "can only use struct `get` with non-packed storage types" |
3310 | ) |
3311 | } |
3312 | self.pop_concrete_ref(true, struct_type_index)?; |
3313 | self.push_operand(field_ty.element_type.unpack()) |
3314 | } |
3315 | fn visit_struct_atomic_get( |
3316 | &mut self, |
3317 | _ordering: Ordering, |
3318 | struct_type_index: u32, |
3319 | field_index: u32, |
3320 | ) -> Self::Output { |
3321 | self.visit_struct_get(struct_type_index, field_index)?; |
3322 | // The `atomic` version has some additional type restrictions. |
3323 | let ty = self |
3324 | .struct_field_at(struct_type_index, field_index)? |
3325 | .element_type; |
3326 | let is_valid_type = match ty { |
3327 | StorageType::Val(ValType::I32) | StorageType::Val(ValType::I64) => true, |
3328 | StorageType::Val(v) => self |
3329 | .resources |
3330 | .is_subtype(v, RefType::ANYREF.shared().unwrap().into()), |
3331 | _ => false, |
3332 | }; |
3333 | if !is_valid_type { |
3334 | bail!( |
3335 | self.offset, |
3336 | "invalid type: `struct.atomic.get` only allows `i32`, `i64` and subtypes of `anyref`" |
3337 | ); |
3338 | } |
3339 | Ok(()) |
3340 | } |
3341 | fn visit_struct_get_s(&mut self, struct_type_index: u32, field_index: u32) -> Self::Output { |
3342 | let field_ty = self.struct_field_at(struct_type_index, field_index)?; |
3343 | if !field_ty.element_type.is_packed() { |
3344 | bail!( |
3345 | self.offset, |
3346 | "cannot use struct.get_s with non-packed storage types" |
3347 | ) |
3348 | } |
3349 | self.pop_concrete_ref(true, struct_type_index)?; |
3350 | self.push_operand(field_ty.element_type.unpack()) |
3351 | } |
3352 | fn visit_struct_atomic_get_s( |
3353 | &mut self, |
3354 | _ordering: Ordering, |
3355 | struct_type_index: u32, |
3356 | field_index: u32, |
3357 | ) -> Self::Output { |
3358 | self.visit_struct_get_s(struct_type_index, field_index)?; |
3359 | // This instruction has the same type restrictions as the non-`atomic` version. |
3360 | debug_assert!(matches!( |
3361 | self.struct_field_at(struct_type_index, field_index)? |
3362 | .element_type, |
3363 | StorageType::I8 | StorageType::I16 |
3364 | )); |
3365 | Ok(()) |
3366 | } |
3367 | fn visit_struct_get_u(&mut self, struct_type_index: u32, field_index: u32) -> Self::Output { |
3368 | let field_ty = self.struct_field_at(struct_type_index, field_index)?; |
3369 | if !field_ty.element_type.is_packed() { |
3370 | bail!( |
3371 | self.offset, |
3372 | "cannot use struct.get_u with non-packed storage types" |
3373 | ) |
3374 | } |
3375 | self.pop_concrete_ref(true, struct_type_index)?; |
3376 | self.push_operand(field_ty.element_type.unpack()) |
3377 | } |
3378 | fn visit_struct_atomic_get_u( |
3379 | &mut self, |
3380 | _ordering: Ordering, |
3381 | struct_type_index: u32, |
3382 | field_index: u32, |
3383 | ) -> Self::Output { |
3384 | self.visit_struct_get_s(struct_type_index, field_index)?; |
3385 | // This instruction has the same type restrictions as the non-`atomic` version. |
3386 | debug_assert!(matches!( |
3387 | self.struct_field_at(struct_type_index, field_index)? |
3388 | .element_type, |
3389 | StorageType::I8 | StorageType::I16 |
3390 | )); |
3391 | Ok(()) |
3392 | } |
3393 | fn visit_struct_set(&mut self, struct_type_index: u32, field_index: u32) -> Self::Output { |
3394 | let field_ty = self.mutable_struct_field_at(struct_type_index, field_index)?; |
3395 | self.pop_operand(Some(field_ty.element_type.unpack()))?; |
3396 | self.pop_concrete_ref(true, struct_type_index)?; |
3397 | Ok(()) |
3398 | } |
3399 | fn visit_struct_atomic_set( |
3400 | &mut self, |
3401 | _ordering: Ordering, |
3402 | struct_type_index: u32, |
3403 | field_index: u32, |
3404 | ) -> Self::Output { |
3405 | self.visit_struct_set(struct_type_index, field_index)?; |
3406 | // The `atomic` version has some additional type restrictions. |
3407 | let ty = self |
3408 | .struct_field_at(struct_type_index, field_index)? |
3409 | .element_type; |
3410 | let is_valid_type = match ty { |
3411 | StorageType::I8 | StorageType::I16 => true, |
3412 | StorageType::Val(ValType::I32) | StorageType::Val(ValType::I64) => true, |
3413 | StorageType::Val(v) => self |
3414 | .resources |
3415 | .is_subtype(v, RefType::ANYREF.shared().unwrap().into()), |
3416 | }; |
3417 | if !is_valid_type { |
3418 | bail!( |
3419 | self.offset, |
3420 | "invalid type: `struct.atomic.set` only allows `i8`, `i16`, `i32`, `i64` and subtypes of `anyref`" |
3421 | ); |
3422 | } |
3423 | Ok(()) |
3424 | } |
3425 | fn visit_struct_atomic_rmw_add( |
3426 | &mut self, |
3427 | _ordering: Ordering, |
3428 | struct_type_index: u32, |
3429 | field_index: u32, |
3430 | ) -> Self::Output { |
3431 | self.check_struct_atomic_rmw("add" , struct_type_index, field_index) |
3432 | } |
3433 | fn visit_struct_atomic_rmw_sub( |
3434 | &mut self, |
3435 | _ordering: Ordering, |
3436 | struct_type_index: u32, |
3437 | field_index: u32, |
3438 | ) -> Self::Output { |
3439 | self.check_struct_atomic_rmw("sub" , struct_type_index, field_index) |
3440 | } |
3441 | fn visit_struct_atomic_rmw_and( |
3442 | &mut self, |
3443 | _ordering: Ordering, |
3444 | struct_type_index: u32, |
3445 | field_index: u32, |
3446 | ) -> Self::Output { |
3447 | self.check_struct_atomic_rmw("and" , struct_type_index, field_index) |
3448 | } |
3449 | fn visit_struct_atomic_rmw_or( |
3450 | &mut self, |
3451 | _ordering: Ordering, |
3452 | struct_type_index: u32, |
3453 | field_index: u32, |
3454 | ) -> Self::Output { |
3455 | self.check_struct_atomic_rmw("or" , struct_type_index, field_index) |
3456 | } |
3457 | fn visit_struct_atomic_rmw_xor( |
3458 | &mut self, |
3459 | _ordering: Ordering, |
3460 | struct_type_index: u32, |
3461 | field_index: u32, |
3462 | ) -> Self::Output { |
3463 | self.check_struct_atomic_rmw("xor" , struct_type_index, field_index) |
3464 | } |
3465 | fn visit_struct_atomic_rmw_xchg( |
3466 | &mut self, |
3467 | _ordering: Ordering, |
3468 | struct_type_index: u32, |
3469 | field_index: u32, |
3470 | ) -> Self::Output { |
3471 | let field = self.mutable_struct_field_at(struct_type_index, field_index)?; |
3472 | let is_valid_type = match field.element_type { |
3473 | StorageType::Val(ValType::I32) | StorageType::Val(ValType::I64) => true, |
3474 | StorageType::Val(v) => self |
3475 | .resources |
3476 | .is_subtype(v, RefType::ANYREF.shared().unwrap().into()), |
3477 | _ => false, |
3478 | }; |
3479 | if !is_valid_type { |
3480 | bail!( |
3481 | self.offset, |
3482 | "invalid type: `struct.atomic.rmw.xchg` only allows `i32`, `i64` and subtypes of `anyref`" |
3483 | ); |
3484 | } |
3485 | let field_ty = field.element_type.unpack(); |
3486 | self.pop_operand(Some(field_ty))?; |
3487 | self.pop_concrete_ref(true, struct_type_index)?; |
3488 | self.push_operand(field_ty)?; |
3489 | Ok(()) |
3490 | } |
3491 | fn visit_struct_atomic_rmw_cmpxchg( |
3492 | &mut self, |
3493 | _ordering: Ordering, |
3494 | struct_type_index: u32, |
3495 | field_index: u32, |
3496 | ) -> Self::Output { |
3497 | let field = self.mutable_struct_field_at(struct_type_index, field_index)?; |
3498 | let is_valid_type = match field.element_type { |
3499 | StorageType::Val(ValType::I32) | StorageType::Val(ValType::I64) => true, |
3500 | StorageType::Val(v) => self |
3501 | .resources |
3502 | .is_subtype(v, RefType::EQREF.shared().unwrap().into()), |
3503 | _ => false, |
3504 | }; |
3505 | if !is_valid_type { |
3506 | bail!( |
3507 | self.offset, |
3508 | "invalid type: `struct.atomic.rmw.cmpxchg` only allows `i32`, `i64` and subtypes of `eqref`" |
3509 | ); |
3510 | } |
3511 | let field_ty = field.element_type.unpack(); |
3512 | self.pop_operand(Some(field_ty))?; |
3513 | self.pop_operand(Some(field_ty))?; |
3514 | self.pop_concrete_ref(true, struct_type_index)?; |
3515 | self.push_operand(field_ty)?; |
3516 | Ok(()) |
3517 | } |
3518 | fn visit_array_new(&mut self, type_index: u32) -> Self::Output { |
3519 | let array_ty = self.array_type_at(type_index)?; |
3520 | self.pop_operand(Some(ValType::I32))?; |
3521 | self.pop_operand(Some(array_ty.element_type.unpack()))?; |
3522 | self.push_concrete_ref(false, type_index) |
3523 | } |
3524 | fn visit_array_new_default(&mut self, type_index: u32) -> Self::Output { |
3525 | let ty = self.array_type_at(type_index)?; |
3526 | let val_ty = ty.element_type.unpack(); |
3527 | if !val_ty.is_defaultable() { |
3528 | bail!( |
3529 | self.offset, |
3530 | "invalid `array.new_default`: {val_ty} field is not defaultable" |
3531 | ); |
3532 | } |
3533 | self.pop_operand(Some(ValType::I32))?; |
3534 | self.push_concrete_ref(false, type_index) |
3535 | } |
3536 | fn visit_array_new_fixed(&mut self, type_index: u32, n: u32) -> Self::Output { |
3537 | let array_ty = self.array_type_at(type_index)?; |
3538 | let elem_ty = array_ty.element_type.unpack(); |
3539 | for _ in 0..n { |
3540 | self.pop_operand(Some(elem_ty))?; |
3541 | } |
3542 | self.push_concrete_ref(false, type_index) |
3543 | } |
3544 | fn visit_array_new_data(&mut self, type_index: u32, data_index: u32) -> Self::Output { |
3545 | let array_ty = self.array_type_at(type_index)?; |
3546 | let elem_ty = array_ty.element_type.unpack(); |
3547 | match elem_ty { |
3548 | ValType::I32 | ValType::I64 | ValType::F32 | ValType::F64 | ValType::V128 => {} |
3549 | ValType::Ref(_) => bail!( |
3550 | self.offset, |
3551 | "type mismatch: array.new_data can only create arrays with numeric and vector elements" |
3552 | ), |
3553 | } |
3554 | self.check_data_segment(data_index)?; |
3555 | self.pop_operand(Some(ValType::I32))?; |
3556 | self.pop_operand(Some(ValType::I32))?; |
3557 | self.push_concrete_ref(false, type_index) |
3558 | } |
3559 | fn visit_array_new_elem(&mut self, type_index: u32, elem_index: u32) -> Self::Output { |
3560 | let array_ty = self.array_type_at(type_index)?; |
3561 | let array_ref_ty = match array_ty.element_type.unpack() { |
3562 | ValType::Ref(rt) => rt, |
3563 | ValType::I32 | ValType::I64 | ValType::F32 | ValType::F64 | ValType::V128 => bail!( |
3564 | self.offset, |
3565 | "type mismatch: array.new_elem can only create arrays with reference elements" |
3566 | ), |
3567 | }; |
3568 | let elem_ref_ty = self.element_type_at(elem_index)?; |
3569 | if !self |
3570 | .resources |
3571 | .is_subtype(elem_ref_ty.into(), array_ref_ty.into()) |
3572 | { |
3573 | bail!( |
3574 | self.offset, |
3575 | "invalid array.new_elem instruction: element segment {elem_index} type mismatch: \ |
3576 | expected {array_ref_ty}, found {elem_ref_ty}" |
3577 | ) |
3578 | } |
3579 | self.pop_operand(Some(ValType::I32))?; |
3580 | self.pop_operand(Some(ValType::I32))?; |
3581 | self.push_concrete_ref(false, type_index) |
3582 | } |
3583 | fn visit_array_get(&mut self, type_index: u32) -> Self::Output { |
3584 | let array_ty = self.array_type_at(type_index)?; |
3585 | let elem_ty = array_ty.element_type; |
3586 | if elem_ty.is_packed() { |
3587 | bail!( |
3588 | self.offset, |
3589 | "cannot use array.get with packed storage types" |
3590 | ) |
3591 | } |
3592 | self.pop_operand(Some(ValType::I32))?; |
3593 | self.pop_concrete_ref(true, type_index)?; |
3594 | self.push_operand(elem_ty.unpack()) |
3595 | } |
3596 | fn visit_array_atomic_get(&mut self, _ordering: Ordering, type_index: u32) -> Self::Output { |
3597 | self.visit_array_get(type_index)?; |
3598 | // The `atomic` version has some additional type restrictions. |
3599 | let elem_ty = self.array_type_at(type_index)?.element_type; |
3600 | let is_valid_type = match elem_ty { |
3601 | StorageType::Val(ValType::I32) | StorageType::Val(ValType::I64) => true, |
3602 | StorageType::Val(v) => self |
3603 | .resources |
3604 | .is_subtype(v, RefType::ANYREF.shared().unwrap().into()), |
3605 | _ => false, |
3606 | }; |
3607 | if !is_valid_type { |
3608 | bail!( |
3609 | self.offset, |
3610 | "invalid type: `array.atomic.get` only allows `i32`, `i64` and subtypes of `anyref`" |
3611 | ); |
3612 | } |
3613 | Ok(()) |
3614 | } |
3615 | fn visit_array_get_s(&mut self, type_index: u32) -> Self::Output { |
3616 | let array_ty = self.array_type_at(type_index)?; |
3617 | let elem_ty = array_ty.element_type; |
3618 | if !elem_ty.is_packed() { |
3619 | bail!( |
3620 | self.offset, |
3621 | "cannot use array.get_s with non-packed storage types" |
3622 | ) |
3623 | } |
3624 | self.pop_operand(Some(ValType::I32))?; |
3625 | self.pop_concrete_ref(true, type_index)?; |
3626 | self.push_operand(elem_ty.unpack()) |
3627 | } |
3628 | fn visit_array_atomic_get_s(&mut self, _ordering: Ordering, type_index: u32) -> Self::Output { |
3629 | self.visit_array_get_s(type_index)?; |
3630 | // This instruction has the same type restrictions as the non-`atomic` version. |
3631 | debug_assert!(matches!( |
3632 | self.array_type_at(type_index)?.element_type, |
3633 | StorageType::I8 | StorageType::I16 |
3634 | )); |
3635 | Ok(()) |
3636 | } |
3637 | fn visit_array_get_u(&mut self, type_index: u32) -> Self::Output { |
3638 | let array_ty = self.array_type_at(type_index)?; |
3639 | let elem_ty = array_ty.element_type; |
3640 | if !elem_ty.is_packed() { |
3641 | bail!( |
3642 | self.offset, |
3643 | "cannot use array.get_u with non-packed storage types" |
3644 | ) |
3645 | } |
3646 | self.pop_operand(Some(ValType::I32))?; |
3647 | self.pop_concrete_ref(true, type_index)?; |
3648 | self.push_operand(elem_ty.unpack()) |
3649 | } |
3650 | fn visit_array_atomic_get_u(&mut self, _ordering: Ordering, type_index: u32) -> Self::Output { |
3651 | self.visit_array_get_u(type_index)?; |
3652 | // This instruction has the same type restrictions as the non-`atomic` version. |
3653 | debug_assert!(matches!( |
3654 | self.array_type_at(type_index)?.element_type, |
3655 | StorageType::I8 | StorageType::I16 |
3656 | )); |
3657 | Ok(()) |
3658 | } |
3659 | fn visit_array_set(&mut self, type_index: u32) -> Self::Output { |
3660 | let array_ty = self.mutable_array_type_at(type_index)?; |
3661 | self.pop_operand(Some(array_ty.element_type.unpack()))?; |
3662 | self.pop_operand(Some(ValType::I32))?; |
3663 | self.pop_concrete_ref(true, type_index)?; |
3664 | Ok(()) |
3665 | } |
3666 | fn visit_array_atomic_set(&mut self, _ordering: Ordering, type_index: u32) -> Self::Output { |
3667 | self.visit_array_set(type_index)?; |
3668 | // The `atomic` version has some additional type restrictions. |
3669 | let elem_ty = self.array_type_at(type_index)?.element_type; |
3670 | let is_valid_type = match elem_ty { |
3671 | StorageType::I8 | StorageType::I16 => true, |
3672 | StorageType::Val(ValType::I32) | StorageType::Val(ValType::I64) => true, |
3673 | StorageType::Val(v) => self |
3674 | .resources |
3675 | .is_subtype(v, RefType::ANYREF.shared().unwrap().into()), |
3676 | }; |
3677 | if !is_valid_type { |
3678 | bail!( |
3679 | self.offset, |
3680 | "invalid type: `array.atomic.set` only allows `i8`, `i16`, `i32`, `i64` and subtypes of `anyref`" |
3681 | ); |
3682 | } |
3683 | Ok(()) |
3684 | } |
3685 | fn visit_array_len(&mut self) -> Self::Output { |
3686 | self.pop_maybe_shared_ref(AbstractHeapType::Array)?; |
3687 | self.push_operand(ValType::I32) |
3688 | } |
3689 | fn visit_array_fill(&mut self, array_type_index: u32) -> Self::Output { |
3690 | let array_ty = self.mutable_array_type_at(array_type_index)?; |
3691 | self.pop_operand(Some(ValType::I32))?; |
3692 | self.pop_operand(Some(array_ty.element_type.unpack()))?; |
3693 | self.pop_operand(Some(ValType::I32))?; |
3694 | self.pop_concrete_ref(true, array_type_index)?; |
3695 | Ok(()) |
3696 | } |
3697 | fn visit_array_copy(&mut self, type_index_dst: u32, type_index_src: u32) -> Self::Output { |
3698 | let array_ty_dst = self.mutable_array_type_at(type_index_dst)?; |
3699 | let array_ty_src = self.array_type_at(type_index_src)?; |
3700 | match (array_ty_dst.element_type, array_ty_src.element_type) { |
3701 | (StorageType::I8, StorageType::I8) => {} |
3702 | (StorageType::I8, ty) => bail!( |
3703 | self.offset, |
3704 | "array types do not match: expected i8, found {ty}" |
3705 | ), |
3706 | (StorageType::I16, StorageType::I16) => {} |
3707 | (StorageType::I16, ty) => bail!( |
3708 | self.offset, |
3709 | "array types do not match: expected i16, found {ty}" |
3710 | ), |
3711 | (StorageType::Val(dst), StorageType::Val(src)) => { |
3712 | if !self.resources.is_subtype(src, dst) { |
3713 | bail!( |
3714 | self.offset, |
3715 | "array types do not match: expected {dst}, found {src}" |
3716 | ) |
3717 | } |
3718 | } |
3719 | (StorageType::Val(dst), src) => { |
3720 | bail!( |
3721 | self.offset, |
3722 | "array types do not match: expected {dst}, found {src}" |
3723 | ) |
3724 | } |
3725 | } |
3726 | self.pop_operand(Some(ValType::I32))?; |
3727 | self.pop_operand(Some(ValType::I32))?; |
3728 | self.pop_concrete_ref(true, type_index_src)?; |
3729 | self.pop_operand(Some(ValType::I32))?; |
3730 | self.pop_concrete_ref(true, type_index_dst)?; |
3731 | Ok(()) |
3732 | } |
3733 | fn visit_array_init_data( |
3734 | &mut self, |
3735 | array_type_index: u32, |
3736 | array_data_index: u32, |
3737 | ) -> Self::Output { |
3738 | let array_ty = self.mutable_array_type_at(array_type_index)?; |
3739 | let val_ty = array_ty.element_type.unpack(); |
3740 | match val_ty { |
3741 | ValType::I32 | ValType::I64 | ValType::F32 | ValType::F64 | ValType::V128 => {} |
3742 | ValType::Ref(_) => bail!( |
3743 | self.offset, |
3744 | "invalid array.init_data: array type is not numeric or vector" |
3745 | ), |
3746 | } |
3747 | self.check_data_segment(array_data_index)?; |
3748 | self.pop_operand(Some(ValType::I32))?; |
3749 | self.pop_operand(Some(ValType::I32))?; |
3750 | self.pop_operand(Some(ValType::I32))?; |
3751 | self.pop_concrete_ref(true, array_type_index)?; |
3752 | Ok(()) |
3753 | } |
3754 | fn visit_array_init_elem(&mut self, type_index: u32, elem_index: u32) -> Self::Output { |
3755 | let array_ty = self.mutable_array_type_at(type_index)?; |
3756 | let array_ref_ty = match array_ty.element_type.unpack() { |
3757 | ValType::Ref(rt) => rt, |
3758 | ValType::I32 | ValType::I64 | ValType::F32 | ValType::F64 | ValType::V128 => bail!( |
3759 | self.offset, |
3760 | "type mismatch: array.init_elem can only create arrays with reference elements" |
3761 | ), |
3762 | }; |
3763 | let elem_ref_ty = self.element_type_at(elem_index)?; |
3764 | if !self |
3765 | .resources |
3766 | .is_subtype(elem_ref_ty.into(), array_ref_ty.into()) |
3767 | { |
3768 | bail!( |
3769 | self.offset, |
3770 | "invalid array.init_elem instruction: element segment {elem_index} type mismatch: \ |
3771 | expected {array_ref_ty}, found {elem_ref_ty}" |
3772 | ) |
3773 | } |
3774 | self.pop_operand(Some(ValType::I32))?; |
3775 | self.pop_operand(Some(ValType::I32))?; |
3776 | self.pop_operand(Some(ValType::I32))?; |
3777 | self.pop_concrete_ref(true, type_index)?; |
3778 | Ok(()) |
3779 | } |
3780 | fn visit_array_atomic_rmw_add(&mut self, _ordering: Ordering, type_index: u32) -> Self::Output { |
3781 | self.check_array_atomic_rmw("add" , type_index) |
3782 | } |
3783 | fn visit_array_atomic_rmw_sub(&mut self, _ordering: Ordering, type_index: u32) -> Self::Output { |
3784 | self.check_array_atomic_rmw("sub" , type_index) |
3785 | } |
3786 | fn visit_array_atomic_rmw_and(&mut self, _ordering: Ordering, type_index: u32) -> Self::Output { |
3787 | self.check_array_atomic_rmw("and" , type_index) |
3788 | } |
3789 | fn visit_array_atomic_rmw_or(&mut self, _ordering: Ordering, type_index: u32) -> Self::Output { |
3790 | self.check_array_atomic_rmw("or" , type_index) |
3791 | } |
3792 | fn visit_array_atomic_rmw_xor(&mut self, _ordering: Ordering, type_index: u32) -> Self::Output { |
3793 | self.check_array_atomic_rmw("xor" , type_index) |
3794 | } |
3795 | fn visit_array_atomic_rmw_xchg( |
3796 | &mut self, |
3797 | _ordering: Ordering, |
3798 | type_index: u32, |
3799 | ) -> Self::Output { |
3800 | let field = self.mutable_array_type_at(type_index)?; |
3801 | let is_valid_type = match field.element_type { |
3802 | StorageType::Val(ValType::I32) | StorageType::Val(ValType::I64) => true, |
3803 | StorageType::Val(v) => self |
3804 | .resources |
3805 | .is_subtype(v, RefType::ANYREF.shared().unwrap().into()), |
3806 | _ => false, |
3807 | }; |
3808 | if !is_valid_type { |
3809 | bail!( |
3810 | self.offset, |
3811 | "invalid type: `array.atomic.rmw.xchg` only allows `i32`, `i64` and subtypes of `anyref`" |
3812 | ); |
3813 | } |
3814 | let elem_ty = field.element_type.unpack(); |
3815 | self.pop_operand(Some(elem_ty))?; |
3816 | self.pop_operand(Some(ValType::I32))?; |
3817 | self.pop_concrete_ref(true, type_index)?; |
3818 | self.push_operand(elem_ty)?; |
3819 | Ok(()) |
3820 | } |
3821 | fn visit_array_atomic_rmw_cmpxchg( |
3822 | &mut self, |
3823 | _ordering: Ordering, |
3824 | type_index: u32, |
3825 | ) -> Self::Output { |
3826 | let field = self.mutable_array_type_at(type_index)?; |
3827 | let is_valid_type = match field.element_type { |
3828 | StorageType::Val(ValType::I32) | StorageType::Val(ValType::I64) => true, |
3829 | StorageType::Val(v) => self |
3830 | .resources |
3831 | .is_subtype(v, RefType::EQREF.shared().unwrap().into()), |
3832 | _ => false, |
3833 | }; |
3834 | if !is_valid_type { |
3835 | bail!( |
3836 | self.offset, |
3837 | "invalid type: `array.atomic.rmw.cmpxchg` only allows `i32`, `i64` and subtypes of `eqref`" |
3838 | ); |
3839 | } |
3840 | let elem_ty = field.element_type.unpack(); |
3841 | self.pop_operand(Some(elem_ty))?; |
3842 | self.pop_operand(Some(elem_ty))?; |
3843 | self.pop_operand(Some(ValType::I32))?; |
3844 | self.pop_concrete_ref(true, type_index)?; |
3845 | self.push_operand(elem_ty)?; |
3846 | Ok(()) |
3847 | } |
3848 | fn visit_any_convert_extern(&mut self) -> Self::Output { |
3849 | let any_ref = match self.pop_maybe_shared_ref(AbstractHeapType::Extern)? { |
3850 | MaybeType::Bottom | MaybeType::UnknownRef(_) => { |
3851 | MaybeType::UnknownRef(Some(AbstractHeapType::Any)) |
3852 | } |
3853 | MaybeType::Known(ty) => { |
3854 | let shared = self.resources.is_shared(ty); |
3855 | let heap_type = HeapType::Abstract { |
3856 | shared, |
3857 | ty: AbstractHeapType::Any, |
3858 | }; |
3859 | let any_ref = RefType::new(ty.is_nullable(), heap_type).unwrap(); |
3860 | MaybeType::Known(any_ref) |
3861 | } |
3862 | }; |
3863 | self.push_operand(any_ref) |
3864 | } |
3865 | fn visit_extern_convert_any(&mut self) -> Self::Output { |
3866 | let extern_ref = match self.pop_maybe_shared_ref(AbstractHeapType::Any)? { |
3867 | MaybeType::Bottom | MaybeType::UnknownRef(_) => { |
3868 | MaybeType::UnknownRef(Some(AbstractHeapType::Extern)) |
3869 | } |
3870 | MaybeType::Known(ty) => { |
3871 | let shared = self.resources.is_shared(ty); |
3872 | let heap_type = HeapType::Abstract { |
3873 | shared, |
3874 | ty: AbstractHeapType::Extern, |
3875 | }; |
3876 | let extern_ref = RefType::new(ty.is_nullable(), heap_type).unwrap(); |
3877 | MaybeType::Known(extern_ref) |
3878 | } |
3879 | }; |
3880 | self.push_operand(extern_ref) |
3881 | } |
3882 | fn visit_ref_test_non_null(&mut self, heap_type: HeapType) -> Self::Output { |
3883 | self.check_ref_test(false, heap_type) |
3884 | } |
3885 | fn visit_ref_test_nullable(&mut self, heap_type: HeapType) -> Self::Output { |
3886 | self.check_ref_test(true, heap_type) |
3887 | } |
3888 | fn visit_ref_cast_non_null(&mut self, heap_type: HeapType) -> Self::Output { |
3889 | self.check_ref_cast(false, heap_type) |
3890 | } |
3891 | fn visit_ref_cast_nullable(&mut self, heap_type: HeapType) -> Self::Output { |
3892 | self.check_ref_cast(true, heap_type) |
3893 | } |
3894 | fn visit_br_on_cast( |
3895 | &mut self, |
3896 | relative_depth: u32, |
3897 | mut from_ref_type: RefType, |
3898 | mut to_ref_type: RefType, |
3899 | ) -> Self::Output { |
3900 | self.resources |
3901 | .check_ref_type(&mut from_ref_type, self.offset)?; |
3902 | self.resources |
3903 | .check_ref_type(&mut to_ref_type, self.offset)?; |
3904 | |
3905 | if !self |
3906 | .resources |
3907 | .is_subtype(to_ref_type.into(), from_ref_type.into()) |
3908 | { |
3909 | bail!( |
3910 | self.offset, |
3911 | "type mismatch: expected {from_ref_type}, found {to_ref_type}" |
3912 | ); |
3913 | } |
3914 | |
3915 | let (block_ty, frame_kind) = self.jump(relative_depth)?; |
3916 | let mut label_types = self.label_types(block_ty, frame_kind)?; |
3917 | |
3918 | match label_types.next_back() { |
3919 | Some(label_ty) if self.resources.is_subtype(to_ref_type.into(), label_ty) => { |
3920 | self.pop_operand(Some(from_ref_type.into()))?; |
3921 | } |
3922 | Some(label_ty) => bail!( |
3923 | self.offset, |
3924 | "type mismatch: casting to type {to_ref_type}, but it does not match \ |
3925 | label result type {label_ty}" |
3926 | ), |
3927 | None => bail!( |
3928 | self.offset, |
3929 | "type mismatch: br_on_cast to label with empty types, must have a reference type" |
3930 | ), |
3931 | }; |
3932 | |
3933 | self.pop_push_label_types(label_types)?; |
3934 | let diff_ty = RefType::difference(from_ref_type, to_ref_type); |
3935 | self.push_operand(diff_ty)?; |
3936 | Ok(()) |
3937 | } |
3938 | fn visit_br_on_cast_fail( |
3939 | &mut self, |
3940 | relative_depth: u32, |
3941 | mut from_ref_type: RefType, |
3942 | mut to_ref_type: RefType, |
3943 | ) -> Self::Output { |
3944 | self.resources |
3945 | .check_ref_type(&mut from_ref_type, self.offset)?; |
3946 | self.resources |
3947 | .check_ref_type(&mut to_ref_type, self.offset)?; |
3948 | |
3949 | if !self |
3950 | .resources |
3951 | .is_subtype(to_ref_type.into(), from_ref_type.into()) |
3952 | { |
3953 | bail!( |
3954 | self.offset, |
3955 | "type mismatch: expected {from_ref_type}, found {to_ref_type}" |
3956 | ); |
3957 | } |
3958 | |
3959 | let (block_ty, frame_kind) = self.jump(relative_depth)?; |
3960 | let mut label_tys = self.label_types(block_ty, frame_kind)?; |
3961 | |
3962 | let diff_ty = RefType::difference(from_ref_type, to_ref_type); |
3963 | match label_tys.next_back() { |
3964 | Some(label_ty) if self.resources.is_subtype(diff_ty.into(), label_ty) => { |
3965 | self.pop_operand(Some(from_ref_type.into()))?; |
3966 | } |
3967 | Some(label_ty) => bail!( |
3968 | self.offset, |
3969 | "type mismatch: expected label result type {label_ty}, found {diff_ty}" |
3970 | ), |
3971 | None => bail!( |
3972 | self.offset, |
3973 | "type mismatch: expected a reference type, found nothing" |
3974 | ), |
3975 | } |
3976 | |
3977 | self.pop_push_label_types(label_tys)?; |
3978 | self.push_operand(to_ref_type)?; |
3979 | Ok(()) |
3980 | } |
3981 | fn visit_ref_i31(&mut self) -> Self::Output { |
3982 | self.pop_operand(Some(ValType::I32))?; |
3983 | self.push_operand(ValType::Ref(RefType::I31)) |
3984 | } |
3985 | fn visit_ref_i31_shared(&mut self) -> Self::Output { |
3986 | self.pop_operand(Some(ValType::I32))?; |
3987 | self.push_operand(ValType::Ref( |
3988 | RefType::I31.shared().expect("i31 is abstract" ), |
3989 | )) |
3990 | } |
3991 | fn visit_i31_get_s(&mut self) -> Self::Output { |
3992 | self.pop_maybe_shared_ref(AbstractHeapType::I31)?; |
3993 | self.push_operand(ValType::I32) |
3994 | } |
3995 | fn visit_i31_get_u(&mut self) -> Self::Output { |
3996 | self.pop_maybe_shared_ref(AbstractHeapType::I31)?; |
3997 | self.push_operand(ValType::I32) |
3998 | } |
3999 | fn visit_try(&mut self, mut ty: BlockType) -> Self::Output { |
4000 | self.check_block_type(&mut ty)?; |
4001 | for ty in self.params(ty)?.rev() { |
4002 | self.pop_operand(Some(ty))?; |
4003 | } |
4004 | self.push_ctrl(FrameKind::LegacyTry, ty)?; |
4005 | Ok(()) |
4006 | } |
4007 | fn visit_catch(&mut self, index: u32) -> Self::Output { |
4008 | let frame = self.pop_ctrl()?; |
4009 | if frame.kind != FrameKind::LegacyTry && frame.kind != FrameKind::LegacyCatch { |
4010 | bail!(self.offset, "catch found outside of an `try` block" ); |
4011 | } |
4012 | // Start a new frame and push `exnref` value. |
4013 | let height = self.operands.len(); |
4014 | let init_height = self.local_inits.push_ctrl(); |
4015 | self.control.push(Frame { |
4016 | kind: FrameKind::LegacyCatch, |
4017 | block_type: frame.block_type, |
4018 | height, |
4019 | unreachable: false, |
4020 | init_height, |
4021 | }); |
4022 | // Push exception argument types. |
4023 | let ty = self.exception_tag_at(index)?; |
4024 | for ty in ty.params() { |
4025 | self.push_operand(*ty)?; |
4026 | } |
4027 | Ok(()) |
4028 | } |
4029 | fn visit_rethrow(&mut self, relative_depth: u32) -> Self::Output { |
4030 | // This is not a jump, but we need to check that the `rethrow` |
4031 | // targets an actual `catch` to get the exception. |
4032 | let (_, kind) = self.jump(relative_depth)?; |
4033 | if kind != FrameKind::LegacyCatch && kind != FrameKind::LegacyCatchAll { |
4034 | bail!( |
4035 | self.offset, |
4036 | "invalid rethrow label: target was not a `catch` block" |
4037 | ); |
4038 | } |
4039 | self.unreachable()?; |
4040 | Ok(()) |
4041 | } |
4042 | fn visit_delegate(&mut self, relative_depth: u32) -> Self::Output { |
4043 | let frame = self.pop_ctrl()?; |
4044 | if frame.kind != FrameKind::LegacyTry { |
4045 | bail!(self.offset, "delegate found outside of an `try` block" ); |
4046 | } |
4047 | // This operation is not a jump, but we need to check the |
4048 | // depth for validity |
4049 | let _ = self.jump(relative_depth)?; |
4050 | for ty in self.results(frame.block_type)? { |
4051 | self.push_operand(ty)?; |
4052 | } |
4053 | Ok(()) |
4054 | } |
4055 | fn visit_catch_all(&mut self) -> Self::Output { |
4056 | let frame = self.pop_ctrl()?; |
4057 | if frame.kind == FrameKind::LegacyCatchAll { |
4058 | bail!(self.offset, "only one catch_all allowed per `try` block" ); |
4059 | } else if frame.kind != FrameKind::LegacyTry && frame.kind != FrameKind::LegacyCatch { |
4060 | bail!(self.offset, "catch_all found outside of a `try` block" ); |
4061 | } |
4062 | let height = self.operands.len(); |
4063 | let init_height = self.local_inits.push_ctrl(); |
4064 | self.control.push(Frame { |
4065 | kind: FrameKind::LegacyCatchAll, |
4066 | block_type: frame.block_type, |
4067 | height, |
4068 | unreachable: false, |
4069 | init_height, |
4070 | }); |
4071 | Ok(()) |
4072 | } |
4073 | fn visit_cont_new(&mut self, type_index: u32) -> Self::Output { |
4074 | let cont_ty = self.cont_type_at(type_index)?; |
4075 | let rt = RefType::concrete(true, cont_ty.0); |
4076 | self.pop_ref(Some(rt))?; |
4077 | self.push_concrete_ref(false, type_index)?; |
4078 | Ok(()) |
4079 | } |
4080 | fn visit_cont_bind(&mut self, argument_index: u32, result_index: u32) -> Self::Output { |
4081 | // [ts1 ts1'] -> [ts2] |
4082 | let arg_cont = self.cont_type_at(argument_index)?; |
4083 | let arg_func = self.func_type_of_cont_type(arg_cont); |
4084 | // [ts1''] -> [ts2'] |
4085 | let res_cont = self.cont_type_at(result_index)?; |
4086 | let res_func = self.func_type_of_cont_type(res_cont); |
4087 | |
4088 | // Verify that the argument's domain is at least as large as the |
4089 | // result's domain. |
4090 | if arg_func.params().len() < res_func.params().len() { |
4091 | bail!(self.offset, "type mismatch in continuation arguments" ); |
4092 | } |
4093 | |
4094 | let argcnt = arg_func.params().len() - res_func.params().len(); |
4095 | |
4096 | // Check that [ts1'] -> [ts2] <: [ts1''] -> [ts2'] |
4097 | if !self.is_subtype_many(res_func.params(), &arg_func.params()[argcnt..]) |
4098 | || arg_func.results().len() != res_func.results().len() |
4099 | || !self.is_subtype_many(arg_func.results(), res_func.results()) |
4100 | { |
4101 | bail!(self.offset, "type mismatch in continuation types" ); |
4102 | } |
4103 | |
4104 | // Check that the continuation is available on the stack. |
4105 | self.pop_concrete_ref(true, argument_index)?; |
4106 | |
4107 | // Check that the argument prefix is available on the stack. |
4108 | for &ty in arg_func.params().iter().take(argcnt).rev() { |
4109 | self.pop_operand(Some(ty))?; |
4110 | } |
4111 | |
4112 | // Construct the result type. |
4113 | self.push_concrete_ref(false, result_index)?; |
4114 | |
4115 | Ok(()) |
4116 | } |
4117 | fn visit_suspend(&mut self, tag_index: u32) -> Self::Output { |
4118 | let ft = &self.tag_at(tag_index)?; |
4119 | for &ty in ft.params().iter().rev() { |
4120 | self.pop_operand(Some(ty))?; |
4121 | } |
4122 | for &ty in ft.results() { |
4123 | self.push_operand(ty)?; |
4124 | } |
4125 | Ok(()) |
4126 | } |
4127 | fn visit_resume(&mut self, type_index: u32, table: ResumeTable) -> Self::Output { |
4128 | // [ts1] -> [ts2] |
4129 | let ft = self.check_resume_table(table, type_index)?; |
4130 | self.pop_concrete_ref(true, type_index)?; |
4131 | // Check that ts1 are available on the stack. |
4132 | for &ty in ft.params().iter().rev() { |
4133 | self.pop_operand(Some(ty))?; |
4134 | } |
4135 | |
4136 | // Make ts2 available on the stack. |
4137 | for &ty in ft.results() { |
4138 | self.push_operand(ty)?; |
4139 | } |
4140 | Ok(()) |
4141 | } |
4142 | fn visit_resume_throw( |
4143 | &mut self, |
4144 | type_index: u32, |
4145 | tag_index: u32, |
4146 | table: ResumeTable, |
4147 | ) -> Self::Output { |
4148 | // [ts1] -> [ts2] |
4149 | let ft = self.check_resume_table(table, type_index)?; |
4150 | // [ts1'] -> [] |
4151 | let tag_ty = self.exception_tag_at(tag_index)?; |
4152 | if tag_ty.results().len() != 0 { |
4153 | bail!(self.offset, "type mismatch: non-empty tag result type" ) |
4154 | } |
4155 | self.pop_concrete_ref(true, type_index)?; |
4156 | // Check that ts1' are available on the stack. |
4157 | for &ty in tag_ty.params().iter().rev() { |
4158 | self.pop_operand(Some(ty))?; |
4159 | } |
4160 | |
4161 | // Make ts2 available on the stack. |
4162 | for &ty in ft.results() { |
4163 | self.push_operand(ty)?; |
4164 | } |
4165 | Ok(()) |
4166 | } |
4167 | fn visit_switch(&mut self, type_index: u32, tag_index: u32) -> Self::Output { |
4168 | // [t1* (ref null $ct2)] -> [te1*] |
4169 | let cont_ty = self.cont_type_at(type_index)?; |
4170 | let func_ty = self.func_type_of_cont_type(cont_ty); |
4171 | // [] -> [t*] |
4172 | let tag_ty = self.tag_at(tag_index)?; |
4173 | if tag_ty.params().len() != 0 { |
4174 | bail!(self.offset, "type mismatch: non-empty tag parameter type" ) |
4175 | } |
4176 | // Extract the other continuation reference |
4177 | match func_ty.params().last() { |
4178 | Some(ValType::Ref(rt)) if rt.is_concrete_type_ref() => { |
4179 | let other_cont_id = rt |
4180 | .type_index() |
4181 | .unwrap() |
4182 | .unpack() |
4183 | .as_core_type_id() |
4184 | .expect("expected canonicalized index" ); |
4185 | let sub_ty = self.resources.sub_type_at_id(other_cont_id); |
4186 | let other_cont_ty = |
4187 | if let CompositeInnerType::Cont(cont) = &sub_ty.composite_type.inner { |
4188 | cont |
4189 | } else { |
4190 | bail!(self.offset, "non-continuation type" ); |
4191 | }; |
4192 | let other_func_ty = self.func_type_of_cont_type(&other_cont_ty); |
4193 | if func_ty.results().len() != tag_ty.results().len() |
4194 | || !self.is_subtype_many(func_ty.results(), tag_ty.results()) |
4195 | || other_func_ty.results().len() != tag_ty.results().len() |
4196 | || !self.is_subtype_many(tag_ty.results(), other_func_ty.results()) |
4197 | { |
4198 | bail!(self.offset, "type mismatch in continuation types" ) |
4199 | } |
4200 | |
4201 | // Pop the continuation reference. |
4202 | self.pop_concrete_ref(true, type_index)?; |
4203 | |
4204 | // Check that the arguments t1* are available on the |
4205 | // stack. |
4206 | for &ty in func_ty.params().iter().rev().skip(1) { |
4207 | self.pop_operand(Some(ty))?; |
4208 | } |
4209 | |
4210 | // Make the results t2* available on the stack. |
4211 | for &ty in other_func_ty.params() { |
4212 | self.push_operand(ty)?; |
4213 | } |
4214 | } |
4215 | Some(ty) => bail!( |
4216 | self.offset, |
4217 | "type mismatch: expected a continuation reference, found {}" , |
4218 | ty_to_str(*ty) |
4219 | ), |
4220 | None => bail!( |
4221 | self.offset, |
4222 | "type mismatch: instruction requires a continuation reference" |
4223 | ), |
4224 | } |
4225 | Ok(()) |
4226 | } |
4227 | fn visit_i64_add128(&mut self) -> Result<()> { |
4228 | self.check_binop128() |
4229 | } |
4230 | fn visit_i64_sub128(&mut self) -> Result<()> { |
4231 | self.check_binop128() |
4232 | } |
4233 | fn visit_i64_mul_wide_s(&mut self) -> Result<()> { |
4234 | self.check_i64_mul_wide() |
4235 | } |
4236 | fn visit_i64_mul_wide_u(&mut self) -> Result<()> { |
4237 | self.check_i64_mul_wide() |
4238 | } |
4239 | } |
4240 | |
4241 | #[derive (Clone, Debug)] |
4242 | enum Either<A, B> { |
4243 | A(A), |
4244 | B(B), |
4245 | } |
4246 | |
4247 | impl<A, B> Iterator for Either<A, B> |
4248 | where |
4249 | A: Iterator, |
4250 | B: Iterator<Item = A::Item>, |
4251 | { |
4252 | type Item = A::Item; |
4253 | fn next(&mut self) -> Option<A::Item> { |
4254 | match self { |
4255 | Either::A(a: &mut A) => a.next(), |
4256 | Either::B(b: &mut B) => b.next(), |
4257 | } |
4258 | } |
4259 | } |
4260 | |
4261 | impl<A, B> DoubleEndedIterator for Either<A, B> |
4262 | where |
4263 | A: DoubleEndedIterator, |
4264 | B: DoubleEndedIterator<Item = A::Item>, |
4265 | { |
4266 | fn next_back(&mut self) -> Option<A::Item> { |
4267 | match self { |
4268 | Either::A(a: &mut A) => a.next_back(), |
4269 | Either::B(b: &mut B) => b.next_back(), |
4270 | } |
4271 | } |
4272 | } |
4273 | |
4274 | impl<A, B> ExactSizeIterator for Either<A, B> |
4275 | where |
4276 | A: ExactSizeIterator, |
4277 | B: ExactSizeIterator<Item = A::Item>, |
4278 | { |
4279 | fn len(&self) -> usize { |
4280 | match self { |
4281 | Either::A(a: &A) => a.len(), |
4282 | Either::B(b: &B) => b.len(), |
4283 | } |
4284 | } |
4285 | } |
4286 | |
4287 | trait PreciseIterator: ExactSizeIterator + DoubleEndedIterator + Clone + core::fmt::Debug {} |
4288 | impl<T: ExactSizeIterator + DoubleEndedIterator + Clone + core::fmt::Debug> PreciseIterator for T {} |
4289 | |
4290 | impl Locals { |
4291 | /// Defines another group of `count` local variables of type `ty`. |
4292 | /// |
4293 | /// Returns `true` if the definition was successful. Local variable |
4294 | /// definition is unsuccessful in case the amount of total variables |
4295 | /// after definition exceeds the allowed maximum number. |
4296 | fn define(&mut self, count: u32, ty: ValType) -> bool { |
4297 | match self.num_locals.checked_add(count) { |
4298 | Some(n) => self.num_locals = n, |
4299 | None => return false, |
4300 | } |
4301 | if self.num_locals > (MAX_WASM_FUNCTION_LOCALS as u32) { |
4302 | return false; |
4303 | } |
4304 | for _ in 0..count { |
4305 | if self.first.len() >= MAX_LOCALS_TO_TRACK { |
4306 | break; |
4307 | } |
4308 | self.first.push(ty); |
4309 | } |
4310 | self.all.push((self.num_locals - 1, ty)); |
4311 | true |
4312 | } |
4313 | |
4314 | /// Returns the number of defined local variables. |
4315 | pub(super) fn len_locals(&self) -> u32 { |
4316 | self.num_locals |
4317 | } |
4318 | |
4319 | /// Returns the type of the local variable at the given index if any. |
4320 | #[inline ] |
4321 | pub(super) fn get(&self, idx: u32) -> Option<ValType> { |
4322 | match self.first.get(idx as usize) { |
4323 | Some(ty) => Some(*ty), |
4324 | None => self.get_bsearch(idx), |
4325 | } |
4326 | } |
4327 | |
4328 | fn get_bsearch(&self, idx: u32) -> Option<ValType> { |
4329 | match self.all.binary_search_by_key(&idx, |(idx, _)| *idx) { |
4330 | // If this index would be inserted at the end of the list, then the |
4331 | // index is out of bounds and we return an error. |
4332 | Err(i) if i == self.all.len() => None, |
4333 | |
4334 | // If `Ok` is returned we found the index exactly, or if `Err` is |
4335 | // returned the position is the one which is the least index |
4336 | // greater that `idx`, which is still the type of `idx` according |
4337 | // to our "compressed" representation. In both cases we access the |
4338 | // list at index `i`. |
4339 | Ok(i) | Err(i) => Some(self.all[i].1), |
4340 | } |
4341 | } |
4342 | } |
4343 | |
4344 | impl<R> ModuleArity for WasmProposalValidator<'_, '_, R> |
4345 | where |
4346 | R: WasmModuleResources, |
4347 | { |
4348 | fn tag_type_arity(&self, at: u32) -> Option<(u32, u32)> { |
4349 | self.0 |
4350 | .resources |
4351 | .tag_at(at) |
4352 | .map(|x| (x.params().len() as u32, x.results().len() as u32)) |
4353 | } |
4354 | |
4355 | fn type_index_of_function(&self, function_idx: u32) -> Option<u32> { |
4356 | self.0.resources.type_index_of_function(function_idx) |
4357 | } |
4358 | |
4359 | fn sub_type_at(&self, type_idx: u32) -> Option<&SubType> { |
4360 | Some(self.0.sub_type_at(type_idx).ok()?) |
4361 | } |
4362 | |
4363 | fn func_type_of_cont_type(&self, c: &ContType) -> Option<&FuncType> { |
4364 | Some(self.0.func_type_of_cont_type(c)) |
4365 | } |
4366 | |
4367 | fn sub_type_of_ref_type(&self, rt: &RefType) -> Option<&SubType> { |
4368 | let id = rt.type_index()?.as_core_type_id()?; |
4369 | Some(self.0.resources.sub_type_at_id(id)) |
4370 | } |
4371 | |
4372 | fn control_stack_height(&self) -> u32 { |
4373 | self.0.control.len() as u32 |
4374 | } |
4375 | |
4376 | fn label_block(&self, depth: u32) -> Option<(BlockType, FrameKind)> { |
4377 | self.0.jump(depth).ok() |
4378 | } |
4379 | } |
4380 | |