1#[cfg(feature = "simd")]
26use crate::VisitSimdOperator;
27use crate::{
28 AbstractHeapType, BinaryReaderError, BlockType, BrTable, Catch, ContType, FieldType, FrameKind,
29 FrameStack, FuncType, GlobalType, Handle, HeapType, Ieee32, Ieee64, MemArg, ModuleArity,
30 RefType, Result, ResumeTable, StorageType, StructType, SubType, TableType, TryTable,
31 UnpackedIndex, ValType, VisitOperator, WasmFeatures, WasmModuleResources,
32 limits::MAX_WASM_FUNCTION_LOCALS,
33};
34use crate::{CompositeInnerType, Ordering, prelude::*};
35use core::ops::{Deref, DerefMut};
36use core::{cmp, iter, mem};
37
38#[cfg(feature = "simd")]
39mod simd;
40
41pub(crate) struct OperatorValidator {
42 pub(super) locals: Locals,
43 local_inits: LocalInits,
44
45 pub(crate) features: WasmFeatures,
48
49 popped_types_tmp: Vec<MaybeType>,
51
52 control: Vec<Frame>,
54 operands: Vec<MaybeType>,
56
57 shared: bool,
59
60 #[cfg(debug_assertions)]
65 pub(crate) pop_push_log: Vec<bool>,
66}
67
68struct LocalInits {
70 local_inits: Vec<bool>,
72 inits: Vec<u32>,
75 first_non_default_local: u32,
82}
83
84impl Default for LocalInits {
85 fn default() -> Self {
86 Self {
87 local_inits: Vec::default(),
88 inits: Vec::default(),
89 first_non_default_local: u32::MAX,
90 }
91 }
92}
93
94impl LocalInits {
95 pub fn define_params(&mut self, count: usize) {
97 let Some(new_len) = self.local_inits.len().checked_add(count) else {
98 panic!("tried to define too many function locals as parameters: {count}");
99 };
100 self.local_inits.resize(new_len, true);
101 }
102
103 pub fn define_locals(&mut self, count: u32, ty: ValType) {
105 let Ok(count) = usize::try_from(count) else {
106 panic!("tried to define too many function locals: {count}");
107 };
108 let len = self.local_inits.len();
109 let Some(new_len) = len.checked_add(count) else {
110 panic!("tried to define too many function locals: {count}");
111 };
112 let is_defaultable = ty.is_defaultable();
113 if !is_defaultable && self.first_non_default_local == u32::MAX {
114 self.first_non_default_local = len as u32;
115 }
116 self.local_inits.resize(new_len, is_defaultable);
117 }
118
119 #[inline]
121 pub fn is_uninit(&self, local_index: u32) -> bool {
122 if local_index < self.first_non_default_local {
123 return false;
124 }
125 !self.local_inits[local_index as usize]
126 }
127
128 #[inline]
130 pub fn set_init(&mut self, local_index: u32) {
131 if self.is_uninit(local_index) {
132 self.local_inits[local_index as usize] = true;
133 self.inits.push(local_index);
134 }
135 }
136
137 pub fn push_ctrl(&mut self) -> usize {
139 self.inits.len()
140 }
141
142 pub fn pop_ctrl(&mut self, height: usize) {
146 for local_index in self.inits.split_off(height) {
147 self.local_inits[local_index as usize] = false;
148 }
149 }
150
151 pub fn clear(&mut self) {
155 self.local_inits.clear();
156 self.inits.clear();
157 self.first_non_default_local = u32::MAX;
158 }
159
160 pub fn is_empty(&self) -> bool {
162 self.local_inits.is_empty()
163 }
164}
165
166const MAX_LOCALS_TO_TRACK: u32 = 50;
169
170pub(super) struct Locals {
171 num_locals: u32,
173
174 first: Vec<ValType>,
179
180 uncached: Vec<(u32, ValType)>,
191}
192
193#[derive(Debug, Copy, Clone)]
200pub struct Frame {
201 pub kind: FrameKind,
203 pub block_type: BlockType,
206 pub height: usize,
208 pub unreachable: bool,
210 pub init_height: usize,
212}
213
214struct OperatorValidatorTemp<'validator, 'resources, T> {
215 offset: usize,
216 inner: &'validator mut OperatorValidator,
217 resources: &'resources T,
218}
219
220#[derive(Default)]
221pub struct OperatorValidatorAllocations {
222 popped_types_tmp: Vec<MaybeType>,
223 control: Vec<Frame>,
224 operands: Vec<MaybeType>,
225 local_inits: LocalInits,
226 locals_first: Vec<ValType>,
227 locals_uncached: Vec<(u32, ValType)>,
228}
229
230#[derive(Debug, Copy, Clone)]
237enum MaybeType<T = ValType> {
238 Bottom,
244 UnknownRef(Option<AbstractHeapType>),
255 Known(T),
257}
258
259#[test]
263fn assert_maybe_type_small() {
264 assert!(core::mem::size_of::<MaybeType>() == 4);
265}
266
267impl core::fmt::Display for MaybeType {
268 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
269 match self {
270 MaybeType::Bottom => write!(f, "bot"),
271 MaybeType::UnknownRef(ty) => {
272 write!(f, "(ref shared? ")?;
273 match ty {
274 Some(ty) => write!(f, "{}bot", ty.as_str(true))?,
275 None => write!(f, "bot")?,
276 }
277 write!(f, ")")
278 }
279 MaybeType::Known(ty) => core::fmt::Display::fmt(ty, f),
280 }
281 }
282}
283
284impl From<ValType> for MaybeType {
285 fn from(ty: ValType) -> MaybeType {
286 MaybeType::Known(ty)
287 }
288}
289
290impl From<RefType> for MaybeType {
291 fn from(ty: RefType) -> MaybeType {
292 let ty: ValType = ty.into();
293 ty.into()
294 }
295}
296impl From<MaybeType<RefType>> for MaybeType<ValType> {
297 fn from(ty: MaybeType<RefType>) -> MaybeType<ValType> {
298 match ty {
299 MaybeType::Bottom => MaybeType::Bottom,
300 MaybeType::UnknownRef(ty) => MaybeType::UnknownRef(ty),
301 MaybeType::Known(t) => MaybeType::Known(t.into()),
302 }
303 }
304}
305
306impl MaybeType<RefType> {
307 fn as_non_null(&self) -> MaybeType<RefType> {
308 match self {
309 MaybeType::Bottom => MaybeType::Bottom,
310 MaybeType::UnknownRef(ty) => MaybeType::UnknownRef(*ty),
311 MaybeType::Known(ty) => MaybeType::Known(ty.as_non_null()),
312 }
313 }
314
315 fn is_maybe_shared(&self, resources: &impl WasmModuleResources) -> Option<bool> {
316 match self {
317 MaybeType::Bottom => None,
318 MaybeType::UnknownRef(_) => None,
319 MaybeType::Known(ty) => Some(resources.is_shared(*ty)),
320 }
321 }
322}
323
324impl OperatorValidator {
325 fn new(features: &WasmFeatures, allocs: OperatorValidatorAllocations) -> Self {
326 let OperatorValidatorAllocations {
327 popped_types_tmp,
328 control,
329 operands,
330 local_inits,
331 locals_first,
332 locals_uncached,
333 } = allocs;
334 debug_assert!(popped_types_tmp.is_empty());
335 debug_assert!(control.is_empty());
336 debug_assert!(operands.is_empty());
337 debug_assert!(local_inits.is_empty());
338 debug_assert!(local_inits.is_empty());
339 debug_assert!(locals_first.is_empty());
340 debug_assert!(locals_uncached.is_empty());
341 OperatorValidator {
342 locals: Locals {
343 num_locals: 0,
344 first: locals_first,
345 uncached: locals_uncached,
346 },
347 local_inits,
348 features: *features,
349 popped_types_tmp,
350 operands,
351 control,
352 shared: false,
353 #[cfg(debug_assertions)]
354 pop_push_log: vec![],
355 }
356 }
357
358 pub fn new_func<T>(
364 ty: u32,
365 offset: usize,
366 features: &WasmFeatures,
367 resources: &T,
368 allocs: OperatorValidatorAllocations,
369 ) -> Result<Self>
370 where
371 T: WasmModuleResources,
372 {
373 let mut ret = OperatorValidator::new(features, allocs);
374 ret.control.push(Frame {
375 kind: FrameKind::Block,
376 block_type: BlockType::FuncType(ty),
377 height: 0,
378 unreachable: false,
379 init_height: 0,
380 });
381
382 let sub_ty = OperatorValidatorTemp {
385 offset,
386 inner: &mut ret,
387 resources,
388 }
389 .sub_type_at(ty)?;
390
391 if let CompositeInnerType::Func(func_ty) = &sub_ty.composite_type.inner {
393 for ty in func_ty.params() {
394 ret.locals.define(1, *ty);
395 }
396 ret.local_inits.define_params(func_ty.params().len());
397 } else {
398 bail!(offset, "expected func type at index {ty}, found {sub_ty}")
399 }
400
401 if sub_ty.composite_type.shared {
404 ret.shared = true;
405 }
406 Ok(ret)
407 }
408
409 pub fn new_const_expr(
413 features: &WasmFeatures,
414 ty: ValType,
415 allocs: OperatorValidatorAllocations,
416 ) -> Self {
417 let mut ret = OperatorValidator::new(features, allocs);
418 ret.control.push(Frame {
419 kind: FrameKind::Block,
420 block_type: BlockType::Type(ty),
421 height: 0,
422 unreachable: false,
423 init_height: 0,
424 });
425 ret
426 }
427
428 pub fn define_locals(
429 &mut self,
430 offset: usize,
431 count: u32,
432 mut ty: ValType,
433 resources: &impl WasmModuleResources,
434 ) -> Result<()> {
435 resources.check_value_type(&mut ty, &self.features, offset)?;
436 if count == 0 {
437 return Ok(());
438 }
439 if !self.locals.define(count, ty) {
440 return Err(BinaryReaderError::new(
441 "too many locals: locals exceed maximum",
442 offset,
443 ));
444 }
445 self.local_inits.define_locals(count, ty);
446 Ok(())
447 }
448
449 pub fn operand_stack_height(&self) -> usize {
451 self.operands.len()
452 }
453
454 pub fn peek_operand_at(&self, depth: usize) -> Option<Option<ValType>> {
465 Some(match self.operands.iter().rev().nth(depth)? {
466 MaybeType::Known(t) => Some(*t),
467 MaybeType::Bottom | MaybeType::UnknownRef(..) => None,
468 })
469 }
470
471 pub fn control_stack_height(&self) -> usize {
473 self.control.len()
474 }
475
476 pub(crate) fn jump(&self, depth: u32) -> Option<(BlockType, FrameKind)> {
481 assert!(!self.control.is_empty());
482 let i = (self.control.len() - 1).checked_sub(depth as usize)?;
483 let frame = &self.control[i];
484 Some((frame.block_type, frame.kind))
485 }
486
487 pub fn get_frame(&self, depth: usize) -> Option<&Frame> {
488 self.control.iter().rev().nth(depth)
489 }
490
491 pub fn with_resources<'a, 'validator, 'resources, T>(
493 &'validator mut self,
494 resources: &'resources T,
495 offset: usize,
496 ) -> impl VisitOperator<'a, Output = Result<()>> + ModuleArity + FrameStack + 'validator
497 where
498 T: WasmModuleResources,
499 'resources: 'validator,
500 {
501 WasmProposalValidator(OperatorValidatorTemp {
502 offset,
503 inner: self,
504 resources,
505 })
506 }
507
508 #[cfg(feature = "simd")]
511 pub fn with_resources_simd<'a, 'validator, 'resources, T>(
512 &'validator mut self,
513 resources: &'resources T,
514 offset: usize,
515 ) -> impl VisitSimdOperator<'a, Output = Result<()>> + ModuleArity + 'validator
516 where
517 T: WasmModuleResources,
518 'resources: 'validator,
519 {
520 WasmProposalValidator(OperatorValidatorTemp {
521 offset,
522 inner: self,
523 resources,
524 })
525 }
526
527 pub fn into_allocations(mut self) -> OperatorValidatorAllocations {
528 fn clear<T>(mut tmp: Vec<T>) -> Vec<T> {
529 tmp.clear();
530 tmp
531 }
532 OperatorValidatorAllocations {
533 popped_types_tmp: clear(self.popped_types_tmp),
534 control: clear(self.control),
535 operands: clear(self.operands),
536 local_inits: {
537 self.local_inits.clear();
538 self.local_inits
539 },
540 locals_first: clear(self.locals.first),
541 locals_uncached: clear(self.locals.uncached),
542 }
543 }
544
545 fn record_pop(&mut self) {
546 #[cfg(debug_assertions)]
547 {
548 self.pop_push_log.push(false);
549 }
550 }
551
552 fn record_push(&mut self) {
553 #[cfg(debug_assertions)]
554 {
555 self.pop_push_log.push(true);
556 }
557 }
558}
559
560impl<R> Deref for OperatorValidatorTemp<'_, '_, R> {
561 type Target = OperatorValidator;
562 fn deref(&self) -> &OperatorValidator {
563 self.inner
564 }
565}
566
567impl<R> DerefMut for OperatorValidatorTemp<'_, '_, R> {
568 fn deref_mut(&mut self) -> &mut OperatorValidator {
569 self.inner
570 }
571}
572
573impl<'resources, R> OperatorValidatorTemp<'_, 'resources, R>
574where
575 R: WasmModuleResources,
576{
577 fn push_operand<T>(&mut self, ty: T) -> Result<()>
583 where
584 T: Into<MaybeType>,
585 {
586 let maybe_ty = ty.into();
587
588 if cfg!(debug_assertions) {
589 match maybe_ty {
590 MaybeType::Known(ValType::Ref(r)) => match r.heap_type() {
591 HeapType::Concrete(index) => {
592 debug_assert!(
593 matches!(index, UnpackedIndex::Id(_)),
594 "only ref types referencing `CoreTypeId`s can \
595 be pushed to the operand stack"
596 );
597 }
598 _ => {}
599 },
600 _ => {}
601 }
602 }
603
604 self.operands.push(maybe_ty);
605 self.record_push();
606 Ok(())
607 }
608
609 fn push_concrete_ref(&mut self, nullable: bool, type_index: u32) -> Result<()> {
610 let mut heap_ty = HeapType::Concrete(UnpackedIndex::Module(type_index));
611
612 self.resources.check_heap_type(&mut heap_ty, self.offset)?;
614 debug_assert!(matches!(heap_ty, HeapType::Concrete(UnpackedIndex::Id(_))));
615
616 let ref_ty = RefType::new(nullable, heap_ty).ok_or_else(|| {
617 format_err!(self.offset, "implementation limit: type index too large")
618 })?;
619
620 self.push_operand(ref_ty)
621 }
622
623 fn pop_concrete_ref(&mut self, nullable: bool, type_index: u32) -> Result<MaybeType> {
624 let mut heap_ty = HeapType::Concrete(UnpackedIndex::Module(type_index));
625
626 self.resources.check_heap_type(&mut heap_ty, self.offset)?;
628 debug_assert!(matches!(heap_ty, HeapType::Concrete(UnpackedIndex::Id(_))));
629
630 let ref_ty = RefType::new(nullable, heap_ty).ok_or_else(|| {
631 format_err!(self.offset, "implementation limit: type index too large")
632 })?;
633
634 self.pop_operand(Some(ref_ty.into()))
635 }
636
637 fn pop_push_label_types(
640 &mut self,
641 label_types: impl PreciseIterator<Item = ValType>,
642 ) -> Result<()> {
643 for ty in label_types.clone().rev() {
644 self.pop_operand(Some(ty))?;
645 }
646 for ty in label_types {
647 self.push_operand(ty)?;
648 }
649 Ok(())
650 }
651
652 fn pop_operand(&mut self, expected: Option<ValType>) -> Result<MaybeType> {
671 let popped = match self.operands.pop() {
683 Some(MaybeType::Known(actual_ty)) => {
684 if Some(actual_ty) == expected {
685 if let Some(control) = self.control.last() {
686 if self.operands.len() >= control.height {
687 self.record_pop();
688 return Ok(MaybeType::Known(actual_ty));
689 }
690 }
691 }
692 Some(MaybeType::Known(actual_ty))
693 }
694 other => other,
695 };
696
697 self._pop_operand(expected, popped)
698 }
699
700 #[cold]
704 fn _pop_operand(
705 &mut self,
706 expected: Option<ValType>,
707 popped: Option<MaybeType>,
708 ) -> Result<MaybeType> {
709 self.operands.extend(popped);
710 let control = self.control.last().unwrap();
711 let actual = if self.operands.len() == control.height && control.unreachable {
712 MaybeType::Bottom
713 } else {
714 if self.operands.len() == control.height {
715 let desc = match expected {
716 Some(ty) => ty_to_str(ty),
717 None => "a type".into(),
718 };
719 bail!(
720 self.offset,
721 "type mismatch: expected {desc} but nothing on stack"
722 )
723 } else {
724 self.operands.pop().unwrap()
725 }
726 };
727 if let Some(expected) = expected {
728 match (actual, expected) {
729 (MaybeType::Bottom, _) => {}
731
732 (MaybeType::UnknownRef(actual_ty), ValType::Ref(expected)) => {
737 if let Some(actual) = actual_ty {
738 let expected_shared = self.resources.is_shared(expected);
739 let actual = RefType::new(
740 false,
741 HeapType::Abstract {
742 shared: expected_shared,
743 ty: actual,
744 },
745 )
746 .unwrap();
747 if !self.resources.is_subtype(actual.into(), expected.into()) {
748 bail!(
749 self.offset,
750 "type mismatch: expected {}, found {}",
751 ty_to_str(expected.into()),
752 ty_to_str(actual.into())
753 );
754 }
755 }
756 }
757
758 (MaybeType::Known(actual), expected) => {
761 if !self.resources.is_subtype(actual, expected) {
762 bail!(
763 self.offset,
764 "type mismatch: expected {}, found {}",
765 ty_to_str(expected),
766 ty_to_str(actual)
767 );
768 }
769 }
770
771 (
773 MaybeType::UnknownRef(..),
774 ValType::I32 | ValType::I64 | ValType::F32 | ValType::F64 | ValType::V128,
775 ) => {
776 bail!(
777 self.offset,
778 "type mismatch: expected {}, found heap type",
779 ty_to_str(expected)
780 )
781 }
782 }
783 }
784 self.record_pop();
785 Ok(actual)
786 }
787
788 fn match_operand(
790 &mut self,
791 actual: ValType,
792 expected: ValType,
793 ) -> Result<(), BinaryReaderError> {
794 self.push_operand(actual)?;
795 self.pop_operand(Some(expected))?;
796 Ok(())
797 }
798
799 fn match_stack_operands(
801 &mut self,
802 expected_tys: impl PreciseIterator<Item = ValType> + 'resources,
803 ) -> Result<()> {
804 let mut popped_types_tmp = mem::take(&mut self.popped_types_tmp);
805 debug_assert!(popped_types_tmp.is_empty());
806 popped_types_tmp.reserve(expected_tys.len());
807
808 for expected_ty in expected_tys.rev() {
809 let actual_ty = self.pop_operand(Some(expected_ty))?;
810 popped_types_tmp.push(actual_ty);
811 }
812 for ty in popped_types_tmp.drain(..).rev() {
813 self.push_operand(ty)?;
814 }
815
816 debug_assert!(self.popped_types_tmp.is_empty());
817 self.popped_types_tmp = popped_types_tmp;
818 Ok(())
819 }
820
821 fn pop_ref(&mut self, expected: Option<RefType>) -> Result<MaybeType<RefType>> {
823 match self.pop_operand(expected.map(|t| t.into()))? {
824 MaybeType::Bottom => Ok(MaybeType::UnknownRef(None)),
825 MaybeType::UnknownRef(ty) => Ok(MaybeType::UnknownRef(ty)),
826 MaybeType::Known(ValType::Ref(rt)) => Ok(MaybeType::Known(rt)),
827 MaybeType::Known(ty) => bail!(
828 self.offset,
829 "type mismatch: expected ref but found {}",
830 ty_to_str(ty)
831 ),
832 }
833 }
834
835 fn pop_maybe_shared_ref(&mut self, expected: AbstractHeapType) -> Result<MaybeType<RefType>> {
841 let actual = match self.pop_ref(None)? {
842 MaybeType::Bottom => return Ok(MaybeType::Bottom),
843 MaybeType::UnknownRef(None) => return Ok(MaybeType::UnknownRef(None)),
844 MaybeType::UnknownRef(Some(actual)) => {
845 if !actual.is_subtype_of(expected) {
846 bail!(
847 self.offset,
848 "type mismatch: expected subtype of {}, found {}",
849 expected.as_str(false),
850 actual.as_str(false),
851 )
852 }
853 return Ok(MaybeType::UnknownRef(Some(actual)));
854 }
855 MaybeType::Known(ty) => ty,
856 };
857 let is_actual_shared = self.resources.is_shared(actual);
860 let expected = RefType::new(
861 true,
862 HeapType::Abstract {
863 shared: is_actual_shared,
864 ty: expected,
865 },
866 )
867 .unwrap();
868
869 if !self.resources.is_subtype(actual.into(), expected.into()) {
873 bail!(
874 self.offset,
875 "type mismatch: expected subtype of {expected}, found {actual}",
876 )
877 }
878 Ok(MaybeType::Known(actual))
879 }
880
881 fn local(&self, idx: u32) -> Result<ValType> {
884 match self.locals.get(idx) {
885 Some(ty) => Ok(ty),
886 None => bail!(
887 self.offset,
888 "unknown local {}: local index out of bounds",
889 idx
890 ),
891 }
892 }
893
894 fn unreachable(&mut self) -> Result<()> {
897 let control = self.control.last_mut().unwrap();
898 control.unreachable = true;
899 let new_height = control.height;
900 self.operands.truncate(new_height);
901 Ok(())
902 }
903
904 fn push_ctrl(&mut self, kind: FrameKind, ty: BlockType) -> Result<()> {
911 let height = self.operands.len();
914 let init_height = self.local_inits.push_ctrl();
915 self.control.push(Frame {
916 kind,
917 block_type: ty,
918 height,
919 unreachable: false,
920 init_height,
921 });
922 for ty in self.params(ty)? {
925 self.push_operand(ty)?;
926 }
927 Ok(())
928 }
929
930 fn pop_ctrl(&mut self) -> Result<Frame> {
935 let frame = self.control.last().unwrap();
938 let ty = frame.block_type;
939 let height = frame.height;
940 let init_height = frame.init_height;
941
942 self.local_inits.pop_ctrl(init_height);
944
945 for ty in self.results(ty)?.rev() {
948 self.pop_operand(Some(ty))?;
949 }
950
951 if self.operands.len() != height {
954 bail!(
955 self.offset,
956 "type mismatch: values remaining on stack at end of block"
957 );
958 }
959
960 Ok(self.control.pop().unwrap())
962 }
963
964 fn jump(&self, depth: u32) -> Result<(BlockType, FrameKind)> {
969 match self.inner.jump(depth) {
970 Some(tup) => Ok(tup),
971 None => bail!(self.offset, "unknown label: branch depth too large"),
972 }
973 }
974
975 fn check_memory_index(&self, memory_index: u32) -> Result<ValType> {
978 match self.resources.memory_at(memory_index) {
979 Some(mem) => Ok(mem.index_type()),
980 None => bail!(self.offset, "unknown memory {}", memory_index),
981 }
982 }
983
984 fn check_memarg(&self, memarg: MemArg) -> Result<ValType> {
987 let index_ty = self.check_memory_index(memarg.memory)?;
988 if memarg.align > memarg.max_align {
989 bail!(
990 self.offset,
991 "malformed memop alignment: alignment must not be larger than natural"
992 );
993 }
994 if index_ty == ValType::I32 && memarg.offset > u64::from(u32::MAX) {
995 bail!(self.offset, "offset out of range: must be <= 2**32");
996 }
997 Ok(index_ty)
998 }
999
1000 fn check_floats_enabled(&self) -> Result<()> {
1001 if !self.features.floats() {
1002 bail!(self.offset, "floating-point instruction disallowed");
1003 }
1004 Ok(())
1005 }
1006
1007 fn check_shared_memarg(&self, memarg: MemArg) -> Result<ValType> {
1008 if memarg.align != memarg.max_align {
1009 bail!(
1010 self.offset,
1011 "atomic instructions must always specify maximum alignment"
1012 );
1013 }
1014 self.check_memory_index(memarg.memory)
1015 }
1016
1017 fn check_block_type(&self, ty: &mut BlockType) -> Result<()> {
1019 match ty {
1020 BlockType::Empty => Ok(()),
1021 BlockType::Type(t) => self
1022 .resources
1023 .check_value_type(t, &self.features, self.offset),
1024 BlockType::FuncType(idx) => {
1025 if !self.features.multi_value() {
1026 bail!(
1027 self.offset,
1028 "blocks, loops, and ifs may only produce a resulttype \
1029 when multi-value is not enabled",
1030 );
1031 }
1032 self.func_type_at(*idx)?;
1033 Ok(())
1034 }
1035 }
1036 }
1037
1038 fn type_of_function(&self, function_index: u32) -> Result<&'resources FuncType> {
1041 if let Some(type_index) = self.resources.type_index_of_function(function_index) {
1042 self.func_type_at(type_index)
1043 } else {
1044 bail!(
1045 self.offset,
1046 "unknown function {function_index}: function index out of bounds",
1047 )
1048 }
1049 }
1050
1051 fn check_call_ty(&mut self, ty: &FuncType) -> Result<()> {
1057 for &ty in ty.params().iter().rev() {
1058 debug_assert_type_indices_are_ids(ty);
1059 self.pop_operand(Some(ty))?;
1060 }
1061 for &ty in ty.results() {
1062 debug_assert_type_indices_are_ids(ty);
1063 self.push_operand(ty)?;
1064 }
1065 Ok(())
1066 }
1067
1068 fn check_return_call_ty(&mut self, ty: &FuncType) -> Result<()> {
1070 self.check_func_type_same_results(ty)?;
1071 for &ty in ty.params().iter().rev() {
1072 debug_assert_type_indices_are_ids(ty);
1073 self.pop_operand(Some(ty))?;
1074 }
1075
1076 for &ty in ty.results() {
1078 debug_assert_type_indices_are_ids(ty);
1079 self.push_operand(ty)?;
1080 }
1081 self.check_return()?;
1082
1083 Ok(())
1084 }
1085
1086 fn check_call_ref_ty(&mut self, type_index: u32) -> Result<&'resources FuncType> {
1094 let unpacked_index = UnpackedIndex::Module(type_index);
1095 let mut hty = HeapType::Concrete(unpacked_index);
1096 self.resources.check_heap_type(&mut hty, self.offset)?;
1097 let expected = RefType::new(true, hty).expect("hty should be previously validated");
1098 self.pop_ref(Some(expected))?;
1099 self.func_type_at(type_index)
1100 }
1101
1102 fn check_call_indirect_ty(
1112 &mut self,
1113 type_index: u32,
1114 table_index: u32,
1115 ) -> Result<&'resources FuncType> {
1116 let tab = self.table_type_at(table_index)?;
1117 if !self
1118 .resources
1119 .is_subtype(ValType::Ref(tab.element_type), ValType::FUNCREF)
1120 {
1121 bail!(
1122 self.offset,
1123 "type mismatch: indirect calls must go through a table with type <= funcref",
1124 );
1125 }
1126 self.pop_operand(Some(tab.index_type()))?;
1127 self.func_type_at(type_index)
1128 }
1129
1130 fn check_return(&mut self) -> Result<()> {
1133 assert!(!self.control.is_empty());
1134 for ty in self.results(self.control[0].block_type)?.rev() {
1135 self.pop_operand(Some(ty))?;
1136 }
1137 self.unreachable()?;
1138 Ok(())
1139 }
1140
1141 fn check_func_type_same_results(&self, callee_ty: &FuncType) -> Result<()> {
1144 assert!(!self.control.is_empty());
1145 let caller_rets = self.results(self.control[0].block_type)?;
1146 if callee_ty.results().len() != caller_rets.len()
1147 || !caller_rets
1148 .zip(callee_ty.results())
1149 .all(|(caller_ty, callee_ty)| self.resources.is_subtype(*callee_ty, caller_ty))
1150 {
1151 let caller_rets = self
1152 .results(self.control[0].block_type)?
1153 .map(|ty| format!("{ty}"))
1154 .collect::<Vec<_>>()
1155 .join(" ");
1156 let callee_rets = callee_ty
1157 .results()
1158 .iter()
1159 .map(|ty| format!("{ty}"))
1160 .collect::<Vec<_>>()
1161 .join(" ");
1162 bail!(
1163 self.offset,
1164 "type mismatch: current function requires result type \
1165 [{caller_rets}] but callee returns [{callee_rets}]"
1166 );
1167 }
1168 Ok(())
1169 }
1170
1171 fn check_cmp_op(&mut self, ty: ValType) -> Result<()> {
1173 self.pop_operand(Some(ty))?;
1174 self.pop_operand(Some(ty))?;
1175 self.push_operand(ValType::I32)?;
1176 Ok(())
1177 }
1178
1179 fn check_fcmp_op(&mut self, ty: ValType) -> Result<()> {
1181 debug_assert!(matches!(ty, ValType::F32 | ValType::F64));
1182 self.check_floats_enabled()?;
1183 self.check_cmp_op(ty)
1184 }
1185
1186 fn check_unary_op(&mut self, ty: ValType) -> Result<()> {
1188 self.pop_operand(Some(ty))?;
1189 self.push_operand(ty)?;
1190 Ok(())
1191 }
1192
1193 fn check_funary_op(&mut self, ty: ValType) -> Result<()> {
1195 debug_assert!(matches!(ty, ValType::F32 | ValType::F64));
1196 self.check_floats_enabled()?;
1197 self.check_unary_op(ty)
1198 }
1199
1200 fn check_conversion_op(&mut self, into: ValType, from: ValType) -> Result<()> {
1202 self.pop_operand(Some(from))?;
1203 self.push_operand(into)?;
1204 Ok(())
1205 }
1206
1207 fn check_fconversion_op(&mut self, into: ValType, from: ValType) -> Result<()> {
1209 debug_assert!(matches!(into, ValType::F32 | ValType::F64));
1210 self.check_floats_enabled()?;
1211 self.check_conversion_op(into, from)
1212 }
1213
1214 fn check_binary_op(&mut self, ty: ValType) -> Result<()> {
1216 self.pop_operand(Some(ty))?;
1217 self.pop_operand(Some(ty))?;
1218 self.push_operand(ty)?;
1219 Ok(())
1220 }
1221
1222 fn check_fbinary_op(&mut self, ty: ValType) -> Result<()> {
1224 debug_assert!(matches!(ty, ValType::F32 | ValType::F64));
1225 self.check_floats_enabled()?;
1226 self.check_binary_op(ty)
1227 }
1228
1229 fn check_atomic_load(&mut self, memarg: MemArg, load_ty: ValType) -> Result<()> {
1231 let ty = self.check_shared_memarg(memarg)?;
1232 self.pop_operand(Some(ty))?;
1233 self.push_operand(load_ty)?;
1234 Ok(())
1235 }
1236
1237 fn check_atomic_store(&mut self, memarg: MemArg, store_ty: ValType) -> Result<()> {
1239 let ty = self.check_shared_memarg(memarg)?;
1240 self.pop_operand(Some(store_ty))?;
1241 self.pop_operand(Some(ty))?;
1242 Ok(())
1243 }
1244
1245 fn check_atomic_binary_memory_op(&mut self, memarg: MemArg, op_ty: ValType) -> Result<()> {
1247 let ty = self.check_shared_memarg(memarg)?;
1248 self.pop_operand(Some(op_ty))?;
1249 self.pop_operand(Some(ty))?;
1250 self.push_operand(op_ty)?;
1251 Ok(())
1252 }
1253
1254 fn check_atomic_binary_memory_cmpxchg(&mut self, memarg: MemArg, op_ty: ValType) -> Result<()> {
1256 let ty = self.check_shared_memarg(memarg)?;
1257 self.pop_operand(Some(op_ty))?;
1258 self.pop_operand(Some(op_ty))?;
1259 self.pop_operand(Some(ty))?;
1260 self.push_operand(op_ty)?;
1261 Ok(())
1262 }
1263
1264 fn check_downcast(&mut self, nullable: bool, mut heap_type: HeapType) -> Result<RefType> {
1267 self.resources
1268 .check_heap_type(&mut heap_type, self.offset)?;
1269
1270 let sub_ty = RefType::new(nullable, heap_type).ok_or_else(|| {
1271 BinaryReaderError::new("implementation limit: type index too large", self.offset)
1272 })?;
1273 let sup_ty = RefType::new(true, self.resources.top_type(&heap_type))
1274 .expect("can't panic with non-concrete heap types");
1275
1276 self.pop_ref(Some(sup_ty))?;
1277 Ok(sub_ty)
1278 }
1279
1280 fn check_ref_test(&mut self, nullable: bool, heap_type: HeapType) -> Result<()> {
1283 self.check_downcast(nullable, heap_type)?;
1284 self.push_operand(ValType::I32)
1285 }
1286
1287 fn check_ref_cast(&mut self, nullable: bool, heap_type: HeapType) -> Result<()> {
1290 let sub_ty = self.check_downcast(nullable, heap_type)?;
1291 self.push_operand(sub_ty)
1292 }
1293
1294 fn check_atomic_global_rmw_ty(&self, global_index: u32) -> Result<ValType> {
1297 let ty = self.global_type_at(global_index)?.content_type;
1298 if !(ty == ValType::I32 || ty == ValType::I64) {
1299 bail!(
1300 self.offset,
1301 "invalid type: `global.atomic.rmw.*` only allows `i32` and `i64`"
1302 );
1303 }
1304 Ok(ty)
1305 }
1306
1307 fn check_struct_atomic_rmw(
1310 &mut self,
1311 op: &'static str,
1312 struct_type_index: u32,
1313 field_index: u32,
1314 ) -> Result<()> {
1315 let field = self.mutable_struct_field_at(struct_type_index, field_index)?;
1316 let field_ty = match field.element_type {
1317 StorageType::Val(ValType::I32) => ValType::I32,
1318 StorageType::Val(ValType::I64) => ValType::I64,
1319 _ => bail!(
1320 self.offset,
1321 "invalid type: `struct.atomic.rmw.{}` only allows `i32` and `i64`",
1322 op
1323 ),
1324 };
1325 self.pop_operand(Some(field_ty))?;
1326 self.pop_concrete_ref(true, struct_type_index)?;
1327 self.push_operand(field_ty)?;
1328 Ok(())
1329 }
1330
1331 fn check_array_atomic_rmw(&mut self, op: &'static str, type_index: u32) -> Result<()> {
1334 let field = self.mutable_array_type_at(type_index)?;
1335 let elem_ty = match field.element_type {
1336 StorageType::Val(ValType::I32) => ValType::I32,
1337 StorageType::Val(ValType::I64) => ValType::I64,
1338 _ => bail!(
1339 self.offset,
1340 "invalid type: `array.atomic.rmw.{}` only allows `i32` and `i64`",
1341 op
1342 ),
1343 };
1344 self.pop_operand(Some(elem_ty))?;
1345 self.pop_operand(Some(ValType::I32))?;
1346 self.pop_concrete_ref(true, type_index)?;
1347 self.push_operand(elem_ty)?;
1348 Ok(())
1349 }
1350
1351 fn element_type_at(&self, elem_index: u32) -> Result<RefType> {
1352 match self.resources.element_type_at(elem_index) {
1353 Some(ty) => Ok(ty),
1354 None => bail!(
1355 self.offset,
1356 "unknown elem segment {}: segment index out of bounds",
1357 elem_index
1358 ),
1359 }
1360 }
1361
1362 fn sub_type_at(&self, at: u32) -> Result<&'resources SubType> {
1363 self.resources
1364 .sub_type_at(at)
1365 .ok_or_else(|| format_err!(self.offset, "unknown type: type index out of bounds"))
1366 }
1367
1368 fn struct_type_at(&self, at: u32) -> Result<&'resources StructType> {
1369 let sub_ty = self.sub_type_at(at)?;
1370 if let CompositeInnerType::Struct(struct_ty) = &sub_ty.composite_type.inner {
1371 if self.inner.shared && !sub_ty.composite_type.shared {
1372 bail!(
1373 self.offset,
1374 "shared functions cannot access unshared structs",
1375 );
1376 }
1377 Ok(struct_ty)
1378 } else {
1379 bail!(
1380 self.offset,
1381 "expected struct type at index {at}, found {sub_ty}"
1382 )
1383 }
1384 }
1385
1386 fn struct_field_at(&self, struct_type_index: u32, field_index: u32) -> Result<FieldType> {
1387 let field_index = usize::try_from(field_index).map_err(|_| {
1388 BinaryReaderError::new("unknown field: field index out of bounds", self.offset)
1389 })?;
1390 self.struct_type_at(struct_type_index)?
1391 .fields
1392 .get(field_index)
1393 .copied()
1394 .ok_or_else(|| {
1395 BinaryReaderError::new("unknown field: field index out of bounds", self.offset)
1396 })
1397 }
1398
1399 fn mutable_struct_field_at(
1400 &self,
1401 struct_type_index: u32,
1402 field_index: u32,
1403 ) -> Result<FieldType> {
1404 let field = self.struct_field_at(struct_type_index, field_index)?;
1405 if !field.mutable {
1406 bail!(
1407 self.offset,
1408 "invalid struct modification: struct field is immutable"
1409 )
1410 }
1411 Ok(field)
1412 }
1413
1414 fn array_type_at(&self, at: u32) -> Result<FieldType> {
1415 let sub_ty = self.sub_type_at(at)?;
1416 if let CompositeInnerType::Array(array_ty) = &sub_ty.composite_type.inner {
1417 if self.inner.shared && !sub_ty.composite_type.shared {
1418 bail!(
1419 self.offset,
1420 "shared functions cannot access unshared arrays",
1421 );
1422 }
1423 Ok(array_ty.0)
1424 } else {
1425 bail!(
1426 self.offset,
1427 "expected array type at index {at}, found {sub_ty}"
1428 )
1429 }
1430 }
1431
1432 fn mutable_array_type_at(&self, at: u32) -> Result<FieldType> {
1433 let field = self.array_type_at(at)?;
1434 if !field.mutable {
1435 bail!(
1436 self.offset,
1437 "invalid array modification: array is immutable"
1438 )
1439 }
1440 Ok(field)
1441 }
1442
1443 fn func_type_at(&self, at: u32) -> Result<&'resources FuncType> {
1444 let sub_ty = self.sub_type_at(at)?;
1445 if let CompositeInnerType::Func(func_ty) = &sub_ty.composite_type.inner {
1446 if self.inner.shared && !sub_ty.composite_type.shared {
1447 bail!(
1448 self.offset,
1449 "shared functions cannot access unshared functions",
1450 );
1451 }
1452 Ok(func_ty)
1453 } else {
1454 bail!(
1455 self.offset,
1456 "expected func type at index {at}, found {sub_ty}"
1457 )
1458 }
1459 }
1460
1461 fn cont_type_at(&self, at: u32) -> Result<&ContType> {
1462 let sub_ty = self.sub_type_at(at)?;
1463 if let CompositeInnerType::Cont(cont_ty) = &sub_ty.composite_type.inner {
1464 if self.inner.shared && !sub_ty.composite_type.shared {
1465 bail!(
1466 self.offset,
1467 "shared continuations cannot access unshared continuations",
1468 );
1469 }
1470 Ok(cont_ty)
1471 } else {
1472 bail!(self.offset, "non-continuation type {at}",)
1473 }
1474 }
1475
1476 fn func_type_of_cont_type(&self, cont_ty: &ContType) -> &'resources FuncType {
1477 let func_id = cont_ty.0.as_core_type_id().expect("valid core type id");
1478 self.resources.sub_type_at_id(func_id).unwrap_func()
1479 }
1480
1481 fn tag_at(&self, at: u32) -> Result<&'resources FuncType> {
1482 self.resources
1483 .tag_at(at)
1484 .ok_or_else(|| format_err!(self.offset, "unknown tag {}: tag index out of bounds", at))
1485 }
1486
1487 fn exception_tag_at(&self, at: u32) -> Result<&'resources FuncType> {
1491 let func_ty = self.tag_at(at)?;
1492 if func_ty.results().len() != 0 {
1493 bail!(
1494 self.offset,
1495 "invalid exception type: non-empty tag result type"
1496 );
1497 }
1498 Ok(func_ty)
1499 }
1500
1501 fn global_type_at(&self, at: u32) -> Result<GlobalType> {
1502 if let Some(ty) = self.resources.global_at(at) {
1503 if self.inner.shared && !ty.shared {
1504 bail!(
1505 self.offset,
1506 "shared functions cannot access unshared globals",
1507 );
1508 }
1509 Ok(ty)
1510 } else {
1511 bail!(self.offset, "unknown global: global index out of bounds");
1512 }
1513 }
1514
1515 fn table_type_at(&self, table: u32) -> Result<TableType> {
1517 match self.resources.table_at(table) {
1518 Some(ty) => {
1519 if self.inner.shared && !ty.shared {
1520 bail!(
1521 self.offset,
1522 "shared functions cannot access unshared tables",
1523 );
1524 }
1525 Ok(ty)
1526 }
1527 None => bail!(
1528 self.offset,
1529 "unknown table {table}: table index out of bounds"
1530 ),
1531 }
1532 }
1533
1534 fn params(&self, ty: BlockType) -> Result<impl PreciseIterator<Item = ValType> + 'resources> {
1535 Ok(match ty {
1536 BlockType::Empty | BlockType::Type(_) => Either::B(None.into_iter()),
1537 BlockType::FuncType(t) => Either::A(self.func_type_at(t)?.params().iter().copied()),
1538 })
1539 }
1540
1541 fn results(&self, ty: BlockType) -> Result<impl PreciseIterator<Item = ValType> + 'resources> {
1542 Ok(match ty {
1543 BlockType::Empty => Either::B(None.into_iter()),
1544 BlockType::Type(t) => Either::B(Some(t).into_iter()),
1545 BlockType::FuncType(t) => Either::A(self.func_type_at(t)?.results().iter().copied()),
1546 })
1547 }
1548
1549 fn label_types(
1550 &self,
1551 ty: BlockType,
1552 kind: FrameKind,
1553 ) -> Result<impl PreciseIterator<Item = ValType> + 'resources> {
1554 Ok(match kind {
1555 FrameKind::Loop => Either::A(self.params(ty)?),
1556 _ => Either::B(self.results(ty)?),
1557 })
1558 }
1559
1560 fn check_data_segment(&self, data_index: u32) -> Result<()> {
1561 match self.resources.data_count() {
1562 None => bail!(self.offset, "data count section required"),
1563 Some(count) if data_index < count => Ok(()),
1564 Some(_) => bail!(self.offset, "unknown data segment {data_index}"),
1565 }
1566 }
1567
1568 fn check_resume_table(
1569 &mut self,
1570 table: ResumeTable,
1571 type_index: u32, ) -> Result<&'resources FuncType> {
1573 let cont_ty = self.cont_type_at(type_index)?;
1574 let old_func_ty = self.func_type_of_cont_type(cont_ty);
1576 for handle in table.handlers {
1577 match handle {
1578 Handle::OnLabel { tag, label } => {
1579 let tag_ty = self.tag_at(tag)?;
1581 let block = self.jump(label)?;
1583 match self.label_types(block.0, block.1)?.last() {
1585 Some(ValType::Ref(rt)) if rt.is_concrete_type_ref() => {
1586 let sub_ty = self.resources.sub_type_at_id(
1587 rt.type_index()
1588 .unwrap()
1589 .as_core_type_id()
1590 .expect("canonicalized index"),
1591 );
1592 let new_cont = if let CompositeInnerType::Cont(cont) =
1593 &sub_ty.composite_type.inner
1594 {
1595 cont
1596 } else {
1597 bail!(self.offset, "non-continuation type");
1598 };
1599 let new_func_ty = self.func_type_of_cont_type(&new_cont);
1600 if new_func_ty.params().len() != tag_ty.results().len()
1602 || !self.is_subtype_many(new_func_ty.params(), tag_ty.results())
1603 || old_func_ty.results().len() != new_func_ty.results().len()
1604 || !self
1605 .is_subtype_many(old_func_ty.results(), new_func_ty.results())
1606 {
1607 bail!(self.offset, "type mismatch in continuation type")
1608 }
1609 let expected_nargs = tag_ty.params().len() + 1;
1610 let actual_nargs = self.label_types(block.0, block.1)?.len();
1611 if actual_nargs != expected_nargs {
1612 bail!(
1613 self.offset,
1614 "type mismatch: expected {expected_nargs} label result(s), but label is annotated with {actual_nargs} results"
1615 )
1616 }
1617
1618 let labeltys =
1619 self.label_types(block.0, block.1)?.take(expected_nargs - 1);
1620
1621 for (tagty, &lblty) in labeltys.zip(tag_ty.params()) {
1623 if !self.resources.is_subtype(lblty, tagty) {
1624 bail!(
1625 self.offset,
1626 "type mismatch between tag type and label type"
1627 )
1628 }
1629 }
1630 }
1631 Some(ty) => {
1632 bail!(self.offset, "type mismatch: {}", ty_to_str(ty))
1633 }
1634 _ => bail!(
1635 self.offset,
1636 "type mismatch: instruction requires continuation reference type but label has none"
1637 ),
1638 }
1639 }
1640 Handle::OnSwitch { tag } => {
1641 let tag_ty = self.tag_at(tag)?;
1642 if tag_ty.params().len() != 0 {
1643 bail!(self.offset, "type mismatch: non-empty tag parameter type")
1644 }
1645 }
1646 }
1647 }
1648 Ok(old_func_ty)
1649 }
1650
1651 fn is_subtype_many(&mut self, ts1: &[ValType], ts2: &[ValType]) -> bool {
1654 debug_assert!(ts1.len() == ts2.len());
1655 ts1.iter()
1656 .zip(ts2.iter())
1657 .all(|(ty1, ty2)| self.resources.is_subtype(*ty1, *ty2))
1658 }
1659
1660 fn check_binop128(&mut self) -> Result<()> {
1661 self.pop_operand(Some(ValType::I64))?;
1662 self.pop_operand(Some(ValType::I64))?;
1663 self.pop_operand(Some(ValType::I64))?;
1664 self.pop_operand(Some(ValType::I64))?;
1665 self.push_operand(ValType::I64)?;
1666 self.push_operand(ValType::I64)?;
1667 Ok(())
1668 }
1669
1670 fn check_i64_mul_wide(&mut self) -> Result<()> {
1671 self.pop_operand(Some(ValType::I64))?;
1672 self.pop_operand(Some(ValType::I64))?;
1673 self.push_operand(ValType::I64)?;
1674 self.push_operand(ValType::I64)?;
1675 Ok(())
1676 }
1677
1678 fn check_enabled(&self, flag: bool, desc: &str) -> Result<()> {
1679 if flag {
1680 return Ok(());
1681 }
1682 bail!(self.offset, "{desc} support is not enabled");
1683 }
1684}
1685
1686pub fn ty_to_str(ty: ValType) -> &'static str {
1687 match ty {
1688 ValType::I32 => "i32",
1689 ValType::I64 => "i64",
1690 ValType::F32 => "f32",
1691 ValType::F64 => "f64",
1692 ValType::V128 => "v128",
1693 ValType::Ref(r) => r.wat(),
1694 }
1695}
1696
1697struct WasmProposalValidator<'validator, 'resources, T>(
1706 OperatorValidatorTemp<'validator, 'resources, T>,
1707);
1708
1709#[cfg_attr(not(feature = "simd"), allow(unused_macro_rules))]
1710macro_rules! validate_proposal {
1711 ($( @$proposal:ident $op:ident $({ $($arg:ident: $argty:ty),* })? => $visit:ident ($($ann:tt)*))*) => {
1712 $(
1713 fn $visit(&mut self $($(,$arg: $argty)*)?) -> Result<()> {
1714 validate_proposal!(validate self $proposal / $op);
1715 self.0.$visit($( $($arg),* )?)
1716 }
1717 )*
1718 };
1719
1720 (validate self mvp / $op:ident) => {};
1721
1722 (validate self $proposal:ident / MemoryFill) => {};
1726 (validate self $proposal:ident / MemoryCopy) => {};
1727
1728 (validate $self:ident $proposal:ident / $op:ident) => {
1729 $self.0.check_enabled($self.0.features.$proposal(), validate_proposal!(desc $proposal))?
1730 };
1731
1732 (desc simd) => ("SIMD");
1733 (desc relaxed_simd) => ("relaxed SIMD");
1734 (desc threads) => ("threads");
1735 (desc shared_everything_threads) => ("shared-everything-threads");
1736 (desc saturating_float_to_int) => ("saturating float to int conversions");
1737 (desc reference_types) => ("reference types");
1738 (desc bulk_memory) => ("bulk memory");
1739 (desc sign_extension) => ("sign extension operations");
1740 (desc exceptions) => ("exceptions");
1741 (desc tail_call) => ("tail calls");
1742 (desc function_references) => ("function references");
1743 (desc memory_control) => ("memory control");
1744 (desc gc) => ("gc");
1745 (desc legacy_exceptions) => ("legacy exceptions");
1746 (desc stack_switching) => ("stack switching");
1747 (desc wide_arithmetic) => ("wide arithmetic");
1748}
1749
1750impl<'a, T> VisitOperator<'a> for WasmProposalValidator<'_, '_, T>
1751where
1752 T: WasmModuleResources,
1753{
1754 type Output = Result<()>;
1755
1756 #[cfg(feature = "simd")]
1757 fn simd_visitor(&mut self) -> Option<&mut dyn VisitSimdOperator<'a, Output = Self::Output>> {
1758 Some(self)
1759 }
1760
1761 crate::for_each_visit_operator!(validate_proposal);
1762}
1763
1764#[cfg(feature = "simd")]
1765impl<'a, T> VisitSimdOperator<'a> for WasmProposalValidator<'_, '_, T>
1766where
1767 T: WasmModuleResources,
1768{
1769 crate::for_each_visit_simd_operator!(validate_proposal);
1770}
1771
1772#[track_caller]
1773#[inline]
1774fn debug_assert_type_indices_are_ids(ty: ValType) {
1775 if cfg!(debug_assertions) {
1776 if let ValType::Ref(r) = ty {
1777 if let HeapType::Concrete(idx) = r.heap_type() {
1778 debug_assert!(
1779 matches!(idx, UnpackedIndex::Id(_)),
1780 "type reference should be a `CoreTypeId`, found {idx:?}"
1781 );
1782 }
1783 }
1784 }
1785}
1786
1787impl<'a, T> VisitOperator<'a> for OperatorValidatorTemp<'_, '_, T>
1788where
1789 T: WasmModuleResources,
1790{
1791 type Output = Result<()>;
1792
1793 #[cfg(feature = "simd")]
1794 fn simd_visitor(&mut self) -> Option<&mut dyn VisitSimdOperator<'a, Output = Self::Output>> {
1795 Some(self)
1796 }
1797
1798 fn visit_nop(&mut self) -> Self::Output {
1799 Ok(())
1800 }
1801 fn visit_unreachable(&mut self) -> Self::Output {
1802 self.unreachable()?;
1803 Ok(())
1804 }
1805 fn visit_block(&mut self, mut ty: BlockType) -> Self::Output {
1806 self.check_block_type(&mut ty)?;
1807 for ty in self.params(ty)?.rev() {
1808 self.pop_operand(Some(ty))?;
1809 }
1810 self.push_ctrl(FrameKind::Block, ty)?;
1811 Ok(())
1812 }
1813 fn visit_loop(&mut self, mut ty: BlockType) -> Self::Output {
1814 self.check_block_type(&mut ty)?;
1815 for ty in self.params(ty)?.rev() {
1816 self.pop_operand(Some(ty))?;
1817 }
1818 self.push_ctrl(FrameKind::Loop, ty)?;
1819 Ok(())
1820 }
1821 fn visit_if(&mut self, mut ty: BlockType) -> Self::Output {
1822 self.check_block_type(&mut ty)?;
1823 self.pop_operand(Some(ValType::I32))?;
1824 for ty in self.params(ty)?.rev() {
1825 self.pop_operand(Some(ty))?;
1826 }
1827 self.push_ctrl(FrameKind::If, ty)?;
1828 Ok(())
1829 }
1830 fn visit_else(&mut self) -> Self::Output {
1831 let frame = self.pop_ctrl()?;
1832 if frame.kind != FrameKind::If {
1833 bail!(self.offset, "else found outside of an `if` block");
1834 }
1835 self.push_ctrl(FrameKind::Else, frame.block_type)?;
1836 Ok(())
1837 }
1838 fn visit_try_table(&mut self, mut ty: TryTable) -> Self::Output {
1839 self.check_block_type(&mut ty.ty)?;
1840 for ty in self.params(ty.ty)?.rev() {
1841 self.pop_operand(Some(ty))?;
1842 }
1843 let exn_type = ValType::from(RefType::EXN);
1844 for catch in ty.catches {
1845 match catch {
1846 Catch::One { tag, label } => {
1847 let tag = self.exception_tag_at(tag)?;
1848 let (ty, kind) = self.jump(label)?;
1849 let params = tag.params();
1850 let types = self.label_types(ty, kind)?;
1851 if params.len() != types.len() {
1852 bail!(
1853 self.offset,
1854 "type mismatch: catch label must have same number of types as tag"
1855 );
1856 }
1857 for (expected, actual) in types.zip(params) {
1858 self.match_operand(*actual, expected)?;
1859 }
1860 }
1861 Catch::OneRef { tag, label } => {
1862 let tag = self.exception_tag_at(tag)?;
1863 let (ty, kind) = self.jump(label)?;
1864 let tag_params = tag.params().iter().copied();
1865 let label_types = self.label_types(ty, kind)?;
1866 if tag_params.len() + 1 != label_types.len() {
1867 bail!(
1868 self.offset,
1869 "type mismatch: catch_ref label must have one \
1870 more type than tag types",
1871 );
1872 }
1873 for (expected_label_type, actual_tag_param) in
1874 label_types.zip(tag_params.chain([exn_type]))
1875 {
1876 self.match_operand(actual_tag_param, expected_label_type)?;
1877 }
1878 }
1879
1880 Catch::All { label } => {
1881 let (ty, kind) = self.jump(label)?;
1882 if self.label_types(ty, kind)?.len() != 0 {
1883 bail!(
1884 self.offset,
1885 "type mismatch: catch_all label must have no result types"
1886 );
1887 }
1888 }
1889
1890 Catch::AllRef { label } => {
1891 let (ty, kind) = self.jump(label)?;
1892 let mut types = self.label_types(ty, kind)?;
1893 let ty = match (types.next(), types.next()) {
1894 (Some(ty), None) => ty,
1895 _ => {
1896 bail!(
1897 self.offset,
1898 "type mismatch: catch_all_ref label must have \
1899 exactly one result type"
1900 );
1901 }
1902 };
1903 if !self.resources.is_subtype(exn_type, ty) {
1904 bail!(
1905 self.offset,
1906 "type mismatch: catch_all_ref label must a \
1907 subtype of (ref exn)"
1908 );
1909 }
1910 }
1911 }
1912 }
1913 self.push_ctrl(FrameKind::TryTable, ty.ty)?;
1914 Ok(())
1915 }
1916 fn visit_throw(&mut self, index: u32) -> Self::Output {
1917 let ty = self.exception_tag_at(index)?;
1919 for ty in ty.clone().params().iter().rev() {
1920 self.pop_operand(Some(*ty))?;
1921 }
1922 debug_assert!(ty.results().is_empty());
1924 self.unreachable()?;
1925 Ok(())
1926 }
1927 fn visit_throw_ref(&mut self) -> Self::Output {
1928 self.pop_operand(Some(ValType::EXNREF))?;
1929 self.unreachable()?;
1930 Ok(())
1931 }
1932 fn visit_end(&mut self) -> Self::Output {
1933 let mut frame = self.pop_ctrl()?;
1934
1935 if frame.kind == FrameKind::If {
1940 self.push_ctrl(FrameKind::Else, frame.block_type)?;
1941 frame = self.pop_ctrl()?;
1942 }
1943 for ty in self.results(frame.block_type)? {
1944 self.push_operand(ty)?;
1945 }
1946 if self.control.is_empty() {
1947 assert_ne!(self.offset, 0);
1948 }
1949 Ok(())
1950 }
1951 fn visit_br(&mut self, relative_depth: u32) -> Self::Output {
1952 let (ty, kind) = self.jump(relative_depth)?;
1953 for ty in self.label_types(ty, kind)?.rev() {
1954 self.pop_operand(Some(ty))?;
1955 }
1956 self.unreachable()?;
1957 Ok(())
1958 }
1959 fn visit_br_if(&mut self, relative_depth: u32) -> Self::Output {
1960 self.pop_operand(Some(ValType::I32))?;
1961 let (ty, kind) = self.jump(relative_depth)?;
1962 let label_types = self.label_types(ty, kind)?;
1963 self.pop_push_label_types(label_types)?;
1964 Ok(())
1965 }
1966 fn visit_br_table(&mut self, table: BrTable) -> Self::Output {
1967 self.pop_operand(Some(ValType::I32))?;
1968 let default = self.jump(table.default())?;
1969 let default_types = self.label_types(default.0, default.1)?;
1970 for element in table.targets() {
1971 let relative_depth = element?;
1972 let block = self.jump(relative_depth)?;
1973 let label_tys = self.label_types(block.0, block.1)?;
1974 if label_tys.len() != default_types.len() {
1975 bail!(
1976 self.offset,
1977 "type mismatch: br_table target labels have different number of types"
1978 );
1979 }
1980 self.match_stack_operands(label_tys)?;
1981 }
1982 for ty in default_types.rev() {
1983 self.pop_operand(Some(ty))?;
1984 }
1985 self.unreachable()?;
1986 Ok(())
1987 }
1988 fn visit_return(&mut self) -> Self::Output {
1989 self.check_return()?;
1990 Ok(())
1991 }
1992 fn visit_call(&mut self, function_index: u32) -> Self::Output {
1993 let ty = self.type_of_function(function_index)?;
1994 self.check_call_ty(ty)?;
1995 Ok(())
1996 }
1997 fn visit_return_call(&mut self, function_index: u32) -> Self::Output {
1998 let ty = self.type_of_function(function_index)?;
1999 self.check_return_call_ty(ty)?;
2000 Ok(())
2001 }
2002 fn visit_call_ref(&mut self, type_index: u32) -> Self::Output {
2003 let ty = self.check_call_ref_ty(type_index)?;
2004 self.check_call_ty(ty)?;
2005 Ok(())
2006 }
2007 fn visit_return_call_ref(&mut self, type_index: u32) -> Self::Output {
2008 let ty = self.check_call_ref_ty(type_index)?;
2009 self.check_return_call_ty(ty)?;
2010 Ok(())
2011 }
2012 fn visit_call_indirect(&mut self, type_index: u32, table_index: u32) -> Self::Output {
2013 let ty = self.check_call_indirect_ty(type_index, table_index)?;
2014 self.check_call_ty(ty)?;
2015 Ok(())
2016 }
2017 fn visit_return_call_indirect(&mut self, type_index: u32, table_index: u32) -> Self::Output {
2018 let ty = self.check_call_indirect_ty(type_index, table_index)?;
2019 self.check_return_call_ty(ty)?;
2020 Ok(())
2021 }
2022 fn visit_drop(&mut self) -> Self::Output {
2023 self.pop_operand(None)?;
2024 Ok(())
2025 }
2026 fn visit_select(&mut self) -> Self::Output {
2027 self.pop_operand(Some(ValType::I32))?;
2028 let ty1 = self.pop_operand(None)?;
2029 let ty2 = self.pop_operand(None)?;
2030
2031 let ty = match (ty1, ty2) {
2032 (MaybeType::UnknownRef(..), _)
2035 | (_, MaybeType::UnknownRef(..))
2036 | (MaybeType::Known(ValType::Ref(_)), _)
2037 | (_, MaybeType::Known(ValType::Ref(_))) => {
2038 bail!(
2039 self.offset,
2040 "type mismatch: select only takes integral types"
2041 )
2042 }
2043
2044 (MaybeType::Bottom, t) | (t, MaybeType::Bottom) => t,
2047
2048 (t @ MaybeType::Known(t1), MaybeType::Known(t2)) => {
2051 if t1 != t2 {
2052 bail!(
2053 self.offset,
2054 "type mismatch: select operands have different types"
2055 );
2056 }
2057 t
2058 }
2059 };
2060 self.push_operand(ty)?;
2061 Ok(())
2062 }
2063 fn visit_typed_select(&mut self, mut ty: ValType) -> Self::Output {
2064 self.resources
2065 .check_value_type(&mut ty, &self.features, self.offset)?;
2066 self.pop_operand(Some(ValType::I32))?;
2067 self.pop_operand(Some(ty))?;
2068 self.pop_operand(Some(ty))?;
2069 self.push_operand(ty)?;
2070 Ok(())
2071 }
2072 fn visit_typed_select_multi(&mut self, tys: Vec<ValType>) -> Self::Output {
2073 debug_assert!(tys.len() != 1);
2074 bail!(self.offset, "invalid result arity");
2075 }
2076 fn visit_local_get(&mut self, local_index: u32) -> Self::Output {
2077 let ty = self.local(local_index)?;
2078 debug_assert_type_indices_are_ids(ty);
2079 if self.local_inits.is_uninit(local_index) {
2080 bail!(self.offset, "uninitialized local: {}", local_index);
2081 }
2082 self.push_operand(ty)?;
2083 Ok(())
2084 }
2085 fn visit_local_set(&mut self, local_index: u32) -> Self::Output {
2086 let ty = self.local(local_index)?;
2087 self.pop_operand(Some(ty))?;
2088 self.local_inits.set_init(local_index);
2089 Ok(())
2090 }
2091 fn visit_local_tee(&mut self, local_index: u32) -> Self::Output {
2092 let expected_ty = self.local(local_index)?;
2093 self.pop_operand(Some(expected_ty))?;
2094 self.local_inits.set_init(local_index);
2095 self.push_operand(expected_ty)?;
2096 Ok(())
2097 }
2098 fn visit_global_get(&mut self, global_index: u32) -> Self::Output {
2099 let ty = self.global_type_at(global_index)?.content_type;
2100 debug_assert_type_indices_are_ids(ty);
2101 self.push_operand(ty)?;
2102 Ok(())
2103 }
2104 fn visit_global_atomic_get(&mut self, _ordering: Ordering, global_index: u32) -> Self::Output {
2105 self.visit_global_get(global_index)?;
2106 let ty = self.global_type_at(global_index)?.content_type;
2110 let supertype = RefType::ANYREF.into();
2111 if !(ty == ValType::I32 || ty == ValType::I64 || self.resources.is_subtype(ty, supertype)) {
2112 bail!(
2113 self.offset,
2114 "invalid type: `global.atomic.get` only allows `i32`, `i64` and subtypes of `anyref`"
2115 );
2116 }
2117 Ok(())
2118 }
2119 fn visit_global_set(&mut self, global_index: u32) -> Self::Output {
2120 let ty = self.global_type_at(global_index)?;
2121 if !ty.mutable {
2122 bail!(
2123 self.offset,
2124 "global is immutable: cannot modify it with `global.set`"
2125 );
2126 }
2127 self.pop_operand(Some(ty.content_type))?;
2128 Ok(())
2129 }
2130 fn visit_global_atomic_set(&mut self, _ordering: Ordering, global_index: u32) -> Self::Output {
2131 self.visit_global_set(global_index)?;
2132 let ty = self.global_type_at(global_index)?.content_type;
2135 let supertype = RefType::ANYREF.into();
2136 if !(ty == ValType::I32 || ty == ValType::I64 || self.resources.is_subtype(ty, supertype)) {
2137 bail!(
2138 self.offset,
2139 "invalid type: `global.atomic.set` only allows `i32`, `i64` and subtypes of `anyref`"
2140 );
2141 }
2142 Ok(())
2143 }
2144 fn visit_global_atomic_rmw_add(
2145 &mut self,
2146 _ordering: crate::Ordering,
2147 global_index: u32,
2148 ) -> Self::Output {
2149 let ty = self.check_atomic_global_rmw_ty(global_index)?;
2150 self.check_unary_op(ty)
2151 }
2152 fn visit_global_atomic_rmw_sub(
2153 &mut self,
2154 _ordering: crate::Ordering,
2155 global_index: u32,
2156 ) -> Self::Output {
2157 let ty = self.check_atomic_global_rmw_ty(global_index)?;
2158 self.check_unary_op(ty)
2159 }
2160 fn visit_global_atomic_rmw_and(
2161 &mut self,
2162 _ordering: crate::Ordering,
2163 global_index: u32,
2164 ) -> Self::Output {
2165 let ty = self.check_atomic_global_rmw_ty(global_index)?;
2166 self.check_unary_op(ty)
2167 }
2168 fn visit_global_atomic_rmw_or(
2169 &mut self,
2170 _ordering: crate::Ordering,
2171 global_index: u32,
2172 ) -> Self::Output {
2173 let ty = self.check_atomic_global_rmw_ty(global_index)?;
2174 self.check_unary_op(ty)
2175 }
2176 fn visit_global_atomic_rmw_xor(
2177 &mut self,
2178 _ordering: crate::Ordering,
2179 global_index: u32,
2180 ) -> Self::Output {
2181 let ty = self.check_atomic_global_rmw_ty(global_index)?;
2182 self.check_unary_op(ty)
2183 }
2184 fn visit_global_atomic_rmw_xchg(
2185 &mut self,
2186 _ordering: crate::Ordering,
2187 global_index: u32,
2188 ) -> Self::Output {
2189 let ty = self.global_type_at(global_index)?.content_type;
2190 if !(ty == ValType::I32
2191 || ty == ValType::I64
2192 || self.resources.is_subtype(ty, RefType::ANYREF.into()))
2193 {
2194 bail!(
2195 self.offset,
2196 "invalid type: `global.atomic.rmw.xchg` only allows `i32`, `i64` and subtypes of `anyref`"
2197 );
2198 }
2199 self.check_unary_op(ty)
2200 }
2201 fn visit_global_atomic_rmw_cmpxchg(
2202 &mut self,
2203 _ordering: crate::Ordering,
2204 global_index: u32,
2205 ) -> Self::Output {
2206 let ty = self.global_type_at(global_index)?.content_type;
2207 if !(ty == ValType::I32
2208 || ty == ValType::I64
2209 || self.resources.is_subtype(ty, RefType::EQREF.into()))
2210 {
2211 bail!(
2212 self.offset,
2213 "invalid type: `global.atomic.rmw.cmpxchg` only allows `i32`, `i64` and subtypes of `eqref`"
2214 );
2215 }
2216 self.check_binary_op(ty)
2217 }
2218
2219 fn visit_i32_load(&mut self, memarg: MemArg) -> Self::Output {
2220 let ty = self.check_memarg(memarg)?;
2221 self.pop_operand(Some(ty))?;
2222 self.push_operand(ValType::I32)?;
2223 Ok(())
2224 }
2225 fn visit_i64_load(&mut self, memarg: MemArg) -> Self::Output {
2226 let ty = self.check_memarg(memarg)?;
2227 self.pop_operand(Some(ty))?;
2228 self.push_operand(ValType::I64)?;
2229 Ok(())
2230 }
2231 fn visit_f32_load(&mut self, memarg: MemArg) -> Self::Output {
2232 self.check_floats_enabled()?;
2233 let ty = self.check_memarg(memarg)?;
2234 self.pop_operand(Some(ty))?;
2235 self.push_operand(ValType::F32)?;
2236 Ok(())
2237 }
2238 fn visit_f64_load(&mut self, memarg: MemArg) -> Self::Output {
2239 self.check_floats_enabled()?;
2240 let ty = self.check_memarg(memarg)?;
2241 self.pop_operand(Some(ty))?;
2242 self.push_operand(ValType::F64)?;
2243 Ok(())
2244 }
2245 fn visit_i32_load8_s(&mut self, memarg: MemArg) -> Self::Output {
2246 let ty = self.check_memarg(memarg)?;
2247 self.pop_operand(Some(ty))?;
2248 self.push_operand(ValType::I32)?;
2249 Ok(())
2250 }
2251 fn visit_i32_load8_u(&mut self, memarg: MemArg) -> Self::Output {
2252 self.visit_i32_load8_s(memarg)
2253 }
2254 fn visit_i32_load16_s(&mut self, memarg: MemArg) -> Self::Output {
2255 let ty = self.check_memarg(memarg)?;
2256 self.pop_operand(Some(ty))?;
2257 self.push_operand(ValType::I32)?;
2258 Ok(())
2259 }
2260 fn visit_i32_load16_u(&mut self, memarg: MemArg) -> Self::Output {
2261 self.visit_i32_load16_s(memarg)
2262 }
2263 fn visit_i64_load8_s(&mut self, memarg: MemArg) -> Self::Output {
2264 let ty = self.check_memarg(memarg)?;
2265 self.pop_operand(Some(ty))?;
2266 self.push_operand(ValType::I64)?;
2267 Ok(())
2268 }
2269 fn visit_i64_load8_u(&mut self, memarg: MemArg) -> Self::Output {
2270 self.visit_i64_load8_s(memarg)
2271 }
2272 fn visit_i64_load16_s(&mut self, memarg: MemArg) -> Self::Output {
2273 let ty = self.check_memarg(memarg)?;
2274 self.pop_operand(Some(ty))?;
2275 self.push_operand(ValType::I64)?;
2276 Ok(())
2277 }
2278 fn visit_i64_load16_u(&mut self, memarg: MemArg) -> Self::Output {
2279 self.visit_i64_load16_s(memarg)
2280 }
2281 fn visit_i64_load32_s(&mut self, memarg: MemArg) -> Self::Output {
2282 let ty = self.check_memarg(memarg)?;
2283 self.pop_operand(Some(ty))?;
2284 self.push_operand(ValType::I64)?;
2285 Ok(())
2286 }
2287 fn visit_i64_load32_u(&mut self, memarg: MemArg) -> Self::Output {
2288 self.visit_i64_load32_s(memarg)
2289 }
2290 fn visit_i32_store(&mut self, memarg: MemArg) -> Self::Output {
2291 let ty = self.check_memarg(memarg)?;
2292 self.pop_operand(Some(ValType::I32))?;
2293 self.pop_operand(Some(ty))?;
2294 Ok(())
2295 }
2296 fn visit_i64_store(&mut self, memarg: MemArg) -> Self::Output {
2297 let ty = self.check_memarg(memarg)?;
2298 self.pop_operand(Some(ValType::I64))?;
2299 self.pop_operand(Some(ty))?;
2300 Ok(())
2301 }
2302 fn visit_f32_store(&mut self, memarg: MemArg) -> Self::Output {
2303 self.check_floats_enabled()?;
2304 let ty = self.check_memarg(memarg)?;
2305 self.pop_operand(Some(ValType::F32))?;
2306 self.pop_operand(Some(ty))?;
2307 Ok(())
2308 }
2309 fn visit_f64_store(&mut self, memarg: MemArg) -> Self::Output {
2310 self.check_floats_enabled()?;
2311 let ty = self.check_memarg(memarg)?;
2312 self.pop_operand(Some(ValType::F64))?;
2313 self.pop_operand(Some(ty))?;
2314 Ok(())
2315 }
2316 fn visit_i32_store8(&mut self, memarg: MemArg) -> Self::Output {
2317 let ty = self.check_memarg(memarg)?;
2318 self.pop_operand(Some(ValType::I32))?;
2319 self.pop_operand(Some(ty))?;
2320 Ok(())
2321 }
2322 fn visit_i32_store16(&mut self, memarg: MemArg) -> Self::Output {
2323 let ty = self.check_memarg(memarg)?;
2324 self.pop_operand(Some(ValType::I32))?;
2325 self.pop_operand(Some(ty))?;
2326 Ok(())
2327 }
2328 fn visit_i64_store8(&mut self, memarg: MemArg) -> Self::Output {
2329 let ty = self.check_memarg(memarg)?;
2330 self.pop_operand(Some(ValType::I64))?;
2331 self.pop_operand(Some(ty))?;
2332 Ok(())
2333 }
2334 fn visit_i64_store16(&mut self, memarg: MemArg) -> Self::Output {
2335 let ty = self.check_memarg(memarg)?;
2336 self.pop_operand(Some(ValType::I64))?;
2337 self.pop_operand(Some(ty))?;
2338 Ok(())
2339 }
2340 fn visit_i64_store32(&mut self, memarg: MemArg) -> Self::Output {
2341 let ty = self.check_memarg(memarg)?;
2342 self.pop_operand(Some(ValType::I64))?;
2343 self.pop_operand(Some(ty))?;
2344 Ok(())
2345 }
2346 fn visit_memory_size(&mut self, mem: u32) -> Self::Output {
2347 let index_ty = self.check_memory_index(mem)?;
2348 self.push_operand(index_ty)?;
2349 Ok(())
2350 }
2351 fn visit_memory_grow(&mut self, mem: u32) -> Self::Output {
2352 let index_ty = self.check_memory_index(mem)?;
2353 self.pop_operand(Some(index_ty))?;
2354 self.push_operand(index_ty)?;
2355 Ok(())
2356 }
2357 fn visit_i32_const(&mut self, _value: i32) -> Self::Output {
2358 self.push_operand(ValType::I32)?;
2359 Ok(())
2360 }
2361 fn visit_i64_const(&mut self, _value: i64) -> Self::Output {
2362 self.push_operand(ValType::I64)?;
2363 Ok(())
2364 }
2365 fn visit_f32_const(&mut self, _value: Ieee32) -> Self::Output {
2366 self.check_floats_enabled()?;
2367 self.push_operand(ValType::F32)?;
2368 Ok(())
2369 }
2370 fn visit_f64_const(&mut self, _value: Ieee64) -> Self::Output {
2371 self.check_floats_enabled()?;
2372 self.push_operand(ValType::F64)?;
2373 Ok(())
2374 }
2375 fn visit_i32_eqz(&mut self) -> Self::Output {
2376 self.pop_operand(Some(ValType::I32))?;
2377 self.push_operand(ValType::I32)?;
2378 Ok(())
2379 }
2380 fn visit_i32_eq(&mut self) -> Self::Output {
2381 self.check_cmp_op(ValType::I32)
2382 }
2383 fn visit_i32_ne(&mut self) -> Self::Output {
2384 self.check_cmp_op(ValType::I32)
2385 }
2386 fn visit_i32_lt_s(&mut self) -> Self::Output {
2387 self.check_cmp_op(ValType::I32)
2388 }
2389 fn visit_i32_lt_u(&mut self) -> Self::Output {
2390 self.check_cmp_op(ValType::I32)
2391 }
2392 fn visit_i32_gt_s(&mut self) -> Self::Output {
2393 self.check_cmp_op(ValType::I32)
2394 }
2395 fn visit_i32_gt_u(&mut self) -> Self::Output {
2396 self.check_cmp_op(ValType::I32)
2397 }
2398 fn visit_i32_le_s(&mut self) -> Self::Output {
2399 self.check_cmp_op(ValType::I32)
2400 }
2401 fn visit_i32_le_u(&mut self) -> Self::Output {
2402 self.check_cmp_op(ValType::I32)
2403 }
2404 fn visit_i32_ge_s(&mut self) -> Self::Output {
2405 self.check_cmp_op(ValType::I32)
2406 }
2407 fn visit_i32_ge_u(&mut self) -> Self::Output {
2408 self.check_cmp_op(ValType::I32)
2409 }
2410 fn visit_i64_eqz(&mut self) -> Self::Output {
2411 self.pop_operand(Some(ValType::I64))?;
2412 self.push_operand(ValType::I32)?;
2413 Ok(())
2414 }
2415 fn visit_i64_eq(&mut self) -> Self::Output {
2416 self.check_cmp_op(ValType::I64)
2417 }
2418 fn visit_i64_ne(&mut self) -> Self::Output {
2419 self.check_cmp_op(ValType::I64)
2420 }
2421 fn visit_i64_lt_s(&mut self) -> Self::Output {
2422 self.check_cmp_op(ValType::I64)
2423 }
2424 fn visit_i64_lt_u(&mut self) -> Self::Output {
2425 self.check_cmp_op(ValType::I64)
2426 }
2427 fn visit_i64_gt_s(&mut self) -> Self::Output {
2428 self.check_cmp_op(ValType::I64)
2429 }
2430 fn visit_i64_gt_u(&mut self) -> Self::Output {
2431 self.check_cmp_op(ValType::I64)
2432 }
2433 fn visit_i64_le_s(&mut self) -> Self::Output {
2434 self.check_cmp_op(ValType::I64)
2435 }
2436 fn visit_i64_le_u(&mut self) -> Self::Output {
2437 self.check_cmp_op(ValType::I64)
2438 }
2439 fn visit_i64_ge_s(&mut self) -> Self::Output {
2440 self.check_cmp_op(ValType::I64)
2441 }
2442 fn visit_i64_ge_u(&mut self) -> Self::Output {
2443 self.check_cmp_op(ValType::I64)
2444 }
2445 fn visit_f32_eq(&mut self) -> Self::Output {
2446 self.check_fcmp_op(ValType::F32)
2447 }
2448 fn visit_f32_ne(&mut self) -> Self::Output {
2449 self.check_fcmp_op(ValType::F32)
2450 }
2451 fn visit_f32_lt(&mut self) -> Self::Output {
2452 self.check_fcmp_op(ValType::F32)
2453 }
2454 fn visit_f32_gt(&mut self) -> Self::Output {
2455 self.check_fcmp_op(ValType::F32)
2456 }
2457 fn visit_f32_le(&mut self) -> Self::Output {
2458 self.check_fcmp_op(ValType::F32)
2459 }
2460 fn visit_f32_ge(&mut self) -> Self::Output {
2461 self.check_fcmp_op(ValType::F32)
2462 }
2463 fn visit_f64_eq(&mut self) -> Self::Output {
2464 self.check_fcmp_op(ValType::F64)
2465 }
2466 fn visit_f64_ne(&mut self) -> Self::Output {
2467 self.check_fcmp_op(ValType::F64)
2468 }
2469 fn visit_f64_lt(&mut self) -> Self::Output {
2470 self.check_fcmp_op(ValType::F64)
2471 }
2472 fn visit_f64_gt(&mut self) -> Self::Output {
2473 self.check_fcmp_op(ValType::F64)
2474 }
2475 fn visit_f64_le(&mut self) -> Self::Output {
2476 self.check_fcmp_op(ValType::F64)
2477 }
2478 fn visit_f64_ge(&mut self) -> Self::Output {
2479 self.check_fcmp_op(ValType::F64)
2480 }
2481 fn visit_i32_clz(&mut self) -> Self::Output {
2482 self.check_unary_op(ValType::I32)
2483 }
2484 fn visit_i32_ctz(&mut self) -> Self::Output {
2485 self.check_unary_op(ValType::I32)
2486 }
2487 fn visit_i32_popcnt(&mut self) -> Self::Output {
2488 self.check_unary_op(ValType::I32)
2489 }
2490 fn visit_i32_add(&mut self) -> Self::Output {
2491 self.check_binary_op(ValType::I32)
2492 }
2493 fn visit_i32_sub(&mut self) -> Self::Output {
2494 self.check_binary_op(ValType::I32)
2495 }
2496 fn visit_i32_mul(&mut self) -> Self::Output {
2497 self.check_binary_op(ValType::I32)
2498 }
2499 fn visit_i32_div_s(&mut self) -> Self::Output {
2500 self.check_binary_op(ValType::I32)
2501 }
2502 fn visit_i32_div_u(&mut self) -> Self::Output {
2503 self.check_binary_op(ValType::I32)
2504 }
2505 fn visit_i32_rem_s(&mut self) -> Self::Output {
2506 self.check_binary_op(ValType::I32)
2507 }
2508 fn visit_i32_rem_u(&mut self) -> Self::Output {
2509 self.check_binary_op(ValType::I32)
2510 }
2511 fn visit_i32_and(&mut self) -> Self::Output {
2512 self.check_binary_op(ValType::I32)
2513 }
2514 fn visit_i32_or(&mut self) -> Self::Output {
2515 self.check_binary_op(ValType::I32)
2516 }
2517 fn visit_i32_xor(&mut self) -> Self::Output {
2518 self.check_binary_op(ValType::I32)
2519 }
2520 fn visit_i32_shl(&mut self) -> Self::Output {
2521 self.check_binary_op(ValType::I32)
2522 }
2523 fn visit_i32_shr_s(&mut self) -> Self::Output {
2524 self.check_binary_op(ValType::I32)
2525 }
2526 fn visit_i32_shr_u(&mut self) -> Self::Output {
2527 self.check_binary_op(ValType::I32)
2528 }
2529 fn visit_i32_rotl(&mut self) -> Self::Output {
2530 self.check_binary_op(ValType::I32)
2531 }
2532 fn visit_i32_rotr(&mut self) -> Self::Output {
2533 self.check_binary_op(ValType::I32)
2534 }
2535 fn visit_i64_clz(&mut self) -> Self::Output {
2536 self.check_unary_op(ValType::I64)
2537 }
2538 fn visit_i64_ctz(&mut self) -> Self::Output {
2539 self.check_unary_op(ValType::I64)
2540 }
2541 fn visit_i64_popcnt(&mut self) -> Self::Output {
2542 self.check_unary_op(ValType::I64)
2543 }
2544 fn visit_i64_add(&mut self) -> Self::Output {
2545 self.check_binary_op(ValType::I64)
2546 }
2547 fn visit_i64_sub(&mut self) -> Self::Output {
2548 self.check_binary_op(ValType::I64)
2549 }
2550 fn visit_i64_mul(&mut self) -> Self::Output {
2551 self.check_binary_op(ValType::I64)
2552 }
2553 fn visit_i64_div_s(&mut self) -> Self::Output {
2554 self.check_binary_op(ValType::I64)
2555 }
2556 fn visit_i64_div_u(&mut self) -> Self::Output {
2557 self.check_binary_op(ValType::I64)
2558 }
2559 fn visit_i64_rem_s(&mut self) -> Self::Output {
2560 self.check_binary_op(ValType::I64)
2561 }
2562 fn visit_i64_rem_u(&mut self) -> Self::Output {
2563 self.check_binary_op(ValType::I64)
2564 }
2565 fn visit_i64_and(&mut self) -> Self::Output {
2566 self.check_binary_op(ValType::I64)
2567 }
2568 fn visit_i64_or(&mut self) -> Self::Output {
2569 self.check_binary_op(ValType::I64)
2570 }
2571 fn visit_i64_xor(&mut self) -> Self::Output {
2572 self.check_binary_op(ValType::I64)
2573 }
2574 fn visit_i64_shl(&mut self) -> Self::Output {
2575 self.check_binary_op(ValType::I64)
2576 }
2577 fn visit_i64_shr_s(&mut self) -> Self::Output {
2578 self.check_binary_op(ValType::I64)
2579 }
2580 fn visit_i64_shr_u(&mut self) -> Self::Output {
2581 self.check_binary_op(ValType::I64)
2582 }
2583 fn visit_i64_rotl(&mut self) -> Self::Output {
2584 self.check_binary_op(ValType::I64)
2585 }
2586 fn visit_i64_rotr(&mut self) -> Self::Output {
2587 self.check_binary_op(ValType::I64)
2588 }
2589 fn visit_f32_abs(&mut self) -> Self::Output {
2590 self.check_funary_op(ValType::F32)
2591 }
2592 fn visit_f32_neg(&mut self) -> Self::Output {
2593 self.check_funary_op(ValType::F32)
2594 }
2595 fn visit_f32_ceil(&mut self) -> Self::Output {
2596 self.check_funary_op(ValType::F32)
2597 }
2598 fn visit_f32_floor(&mut self) -> Self::Output {
2599 self.check_funary_op(ValType::F32)
2600 }
2601 fn visit_f32_trunc(&mut self) -> Self::Output {
2602 self.check_funary_op(ValType::F32)
2603 }
2604 fn visit_f32_nearest(&mut self) -> Self::Output {
2605 self.check_funary_op(ValType::F32)
2606 }
2607 fn visit_f32_sqrt(&mut self) -> Self::Output {
2608 self.check_funary_op(ValType::F32)
2609 }
2610 fn visit_f32_add(&mut self) -> Self::Output {
2611 self.check_fbinary_op(ValType::F32)
2612 }
2613 fn visit_f32_sub(&mut self) -> Self::Output {
2614 self.check_fbinary_op(ValType::F32)
2615 }
2616 fn visit_f32_mul(&mut self) -> Self::Output {
2617 self.check_fbinary_op(ValType::F32)
2618 }
2619 fn visit_f32_div(&mut self) -> Self::Output {
2620 self.check_fbinary_op(ValType::F32)
2621 }
2622 fn visit_f32_min(&mut self) -> Self::Output {
2623 self.check_fbinary_op(ValType::F32)
2624 }
2625 fn visit_f32_max(&mut self) -> Self::Output {
2626 self.check_fbinary_op(ValType::F32)
2627 }
2628 fn visit_f32_copysign(&mut self) -> Self::Output {
2629 self.check_fbinary_op(ValType::F32)
2630 }
2631 fn visit_f64_abs(&mut self) -> Self::Output {
2632 self.check_funary_op(ValType::F64)
2633 }
2634 fn visit_f64_neg(&mut self) -> Self::Output {
2635 self.check_funary_op(ValType::F64)
2636 }
2637 fn visit_f64_ceil(&mut self) -> Self::Output {
2638 self.check_funary_op(ValType::F64)
2639 }
2640 fn visit_f64_floor(&mut self) -> Self::Output {
2641 self.check_funary_op(ValType::F64)
2642 }
2643 fn visit_f64_trunc(&mut self) -> Self::Output {
2644 self.check_funary_op(ValType::F64)
2645 }
2646 fn visit_f64_nearest(&mut self) -> Self::Output {
2647 self.check_funary_op(ValType::F64)
2648 }
2649 fn visit_f64_sqrt(&mut self) -> Self::Output {
2650 self.check_funary_op(ValType::F64)
2651 }
2652 fn visit_f64_add(&mut self) -> Self::Output {
2653 self.check_fbinary_op(ValType::F64)
2654 }
2655 fn visit_f64_sub(&mut self) -> Self::Output {
2656 self.check_fbinary_op(ValType::F64)
2657 }
2658 fn visit_f64_mul(&mut self) -> Self::Output {
2659 self.check_fbinary_op(ValType::F64)
2660 }
2661 fn visit_f64_div(&mut self) -> Self::Output {
2662 self.check_fbinary_op(ValType::F64)
2663 }
2664 fn visit_f64_min(&mut self) -> Self::Output {
2665 self.check_fbinary_op(ValType::F64)
2666 }
2667 fn visit_f64_max(&mut self) -> Self::Output {
2668 self.check_fbinary_op(ValType::F64)
2669 }
2670 fn visit_f64_copysign(&mut self) -> Self::Output {
2671 self.check_fbinary_op(ValType::F64)
2672 }
2673 fn visit_i32_wrap_i64(&mut self) -> Self::Output {
2674 self.check_conversion_op(ValType::I32, ValType::I64)
2675 }
2676 fn visit_i32_trunc_f32_s(&mut self) -> Self::Output {
2677 self.check_conversion_op(ValType::I32, ValType::F32)
2678 }
2679 fn visit_i32_trunc_f32_u(&mut self) -> Self::Output {
2680 self.check_conversion_op(ValType::I32, ValType::F32)
2681 }
2682 fn visit_i32_trunc_f64_s(&mut self) -> Self::Output {
2683 self.check_conversion_op(ValType::I32, ValType::F64)
2684 }
2685 fn visit_i32_trunc_f64_u(&mut self) -> Self::Output {
2686 self.check_conversion_op(ValType::I32, ValType::F64)
2687 }
2688 fn visit_i64_extend_i32_s(&mut self) -> Self::Output {
2689 self.check_conversion_op(ValType::I64, ValType::I32)
2690 }
2691 fn visit_i64_extend_i32_u(&mut self) -> Self::Output {
2692 self.check_conversion_op(ValType::I64, ValType::I32)
2693 }
2694 fn visit_i64_trunc_f32_s(&mut self) -> Self::Output {
2695 self.check_conversion_op(ValType::I64, ValType::F32)
2696 }
2697 fn visit_i64_trunc_f32_u(&mut self) -> Self::Output {
2698 self.check_conversion_op(ValType::I64, ValType::F32)
2699 }
2700 fn visit_i64_trunc_f64_s(&mut self) -> Self::Output {
2701 self.check_conversion_op(ValType::I64, ValType::F64)
2702 }
2703 fn visit_i64_trunc_f64_u(&mut self) -> Self::Output {
2704 self.check_conversion_op(ValType::I64, ValType::F64)
2705 }
2706 fn visit_f32_convert_i32_s(&mut self) -> Self::Output {
2707 self.check_fconversion_op(ValType::F32, ValType::I32)
2708 }
2709 fn visit_f32_convert_i32_u(&mut self) -> Self::Output {
2710 self.check_fconversion_op(ValType::F32, ValType::I32)
2711 }
2712 fn visit_f32_convert_i64_s(&mut self) -> Self::Output {
2713 self.check_fconversion_op(ValType::F32, ValType::I64)
2714 }
2715 fn visit_f32_convert_i64_u(&mut self) -> Self::Output {
2716 self.check_fconversion_op(ValType::F32, ValType::I64)
2717 }
2718 fn visit_f32_demote_f64(&mut self) -> Self::Output {
2719 self.check_fconversion_op(ValType::F32, ValType::F64)
2720 }
2721 fn visit_f64_convert_i32_s(&mut self) -> Self::Output {
2722 self.check_fconversion_op(ValType::F64, ValType::I32)
2723 }
2724 fn visit_f64_convert_i32_u(&mut self) -> Self::Output {
2725 self.check_fconversion_op(ValType::F64, ValType::I32)
2726 }
2727 fn visit_f64_convert_i64_s(&mut self) -> Self::Output {
2728 self.check_fconversion_op(ValType::F64, ValType::I64)
2729 }
2730 fn visit_f64_convert_i64_u(&mut self) -> Self::Output {
2731 self.check_fconversion_op(ValType::F64, ValType::I64)
2732 }
2733 fn visit_f64_promote_f32(&mut self) -> Self::Output {
2734 self.check_fconversion_op(ValType::F64, ValType::F32)
2735 }
2736 fn visit_i32_reinterpret_f32(&mut self) -> Self::Output {
2737 self.check_conversion_op(ValType::I32, ValType::F32)
2738 }
2739 fn visit_i64_reinterpret_f64(&mut self) -> Self::Output {
2740 self.check_conversion_op(ValType::I64, ValType::F64)
2741 }
2742 fn visit_f32_reinterpret_i32(&mut self) -> Self::Output {
2743 self.check_fconversion_op(ValType::F32, ValType::I32)
2744 }
2745 fn visit_f64_reinterpret_i64(&mut self) -> Self::Output {
2746 self.check_fconversion_op(ValType::F64, ValType::I64)
2747 }
2748 fn visit_i32_trunc_sat_f32_s(&mut self) -> Self::Output {
2749 self.check_conversion_op(ValType::I32, ValType::F32)
2750 }
2751 fn visit_i32_trunc_sat_f32_u(&mut self) -> Self::Output {
2752 self.check_conversion_op(ValType::I32, ValType::F32)
2753 }
2754 fn visit_i32_trunc_sat_f64_s(&mut self) -> Self::Output {
2755 self.check_conversion_op(ValType::I32, ValType::F64)
2756 }
2757 fn visit_i32_trunc_sat_f64_u(&mut self) -> Self::Output {
2758 self.check_conversion_op(ValType::I32, ValType::F64)
2759 }
2760 fn visit_i64_trunc_sat_f32_s(&mut self) -> Self::Output {
2761 self.check_conversion_op(ValType::I64, ValType::F32)
2762 }
2763 fn visit_i64_trunc_sat_f32_u(&mut self) -> Self::Output {
2764 self.check_conversion_op(ValType::I64, ValType::F32)
2765 }
2766 fn visit_i64_trunc_sat_f64_s(&mut self) -> Self::Output {
2767 self.check_conversion_op(ValType::I64, ValType::F64)
2768 }
2769 fn visit_i64_trunc_sat_f64_u(&mut self) -> Self::Output {
2770 self.check_conversion_op(ValType::I64, ValType::F64)
2771 }
2772 fn visit_i32_extend8_s(&mut self) -> Self::Output {
2773 self.check_unary_op(ValType::I32)
2774 }
2775 fn visit_i32_extend16_s(&mut self) -> Self::Output {
2776 self.check_unary_op(ValType::I32)
2777 }
2778 fn visit_i64_extend8_s(&mut self) -> Self::Output {
2779 self.check_unary_op(ValType::I64)
2780 }
2781 fn visit_i64_extend16_s(&mut self) -> Self::Output {
2782 self.check_unary_op(ValType::I64)
2783 }
2784 fn visit_i64_extend32_s(&mut self) -> Self::Output {
2785 self.check_unary_op(ValType::I64)
2786 }
2787 fn visit_i32_atomic_load(&mut self, memarg: MemArg) -> Self::Output {
2788 self.check_atomic_load(memarg, ValType::I32)
2789 }
2790 fn visit_i32_atomic_load16_u(&mut self, memarg: MemArg) -> Self::Output {
2791 self.check_atomic_load(memarg, ValType::I32)
2792 }
2793 fn visit_i32_atomic_load8_u(&mut self, memarg: MemArg) -> Self::Output {
2794 self.check_atomic_load(memarg, ValType::I32)
2795 }
2796 fn visit_i64_atomic_load(&mut self, memarg: MemArg) -> Self::Output {
2797 self.check_atomic_load(memarg, ValType::I64)
2798 }
2799 fn visit_i64_atomic_load32_u(&mut self, memarg: MemArg) -> Self::Output {
2800 self.check_atomic_load(memarg, ValType::I64)
2801 }
2802 fn visit_i64_atomic_load16_u(&mut self, memarg: MemArg) -> Self::Output {
2803 self.check_atomic_load(memarg, ValType::I64)
2804 }
2805 fn visit_i64_atomic_load8_u(&mut self, memarg: MemArg) -> Self::Output {
2806 self.check_atomic_load(memarg, ValType::I64)
2807 }
2808 fn visit_i32_atomic_store(&mut self, memarg: MemArg) -> Self::Output {
2809 self.check_atomic_store(memarg, ValType::I32)
2810 }
2811 fn visit_i32_atomic_store16(&mut self, memarg: MemArg) -> Self::Output {
2812 self.check_atomic_store(memarg, ValType::I32)
2813 }
2814 fn visit_i32_atomic_store8(&mut self, memarg: MemArg) -> Self::Output {
2815 self.check_atomic_store(memarg, ValType::I32)
2816 }
2817 fn visit_i64_atomic_store(&mut self, memarg: MemArg) -> Self::Output {
2818 self.check_atomic_store(memarg, ValType::I64)
2819 }
2820 fn visit_i64_atomic_store32(&mut self, memarg: MemArg) -> Self::Output {
2821 self.check_atomic_store(memarg, ValType::I64)
2822 }
2823 fn visit_i64_atomic_store16(&mut self, memarg: MemArg) -> Self::Output {
2824 self.check_atomic_store(memarg, ValType::I64)
2825 }
2826 fn visit_i64_atomic_store8(&mut self, memarg: MemArg) -> Self::Output {
2827 self.check_atomic_store(memarg, ValType::I64)
2828 }
2829 fn visit_i32_atomic_rmw_add(&mut self, memarg: MemArg) -> Self::Output {
2830 self.check_atomic_binary_memory_op(memarg, ValType::I32)
2831 }
2832 fn visit_i32_atomic_rmw_sub(&mut self, memarg: MemArg) -> Self::Output {
2833 self.check_atomic_binary_memory_op(memarg, ValType::I32)
2834 }
2835 fn visit_i32_atomic_rmw_and(&mut self, memarg: MemArg) -> Self::Output {
2836 self.check_atomic_binary_memory_op(memarg, ValType::I32)
2837 }
2838 fn visit_i32_atomic_rmw_or(&mut self, memarg: MemArg) -> Self::Output {
2839 self.check_atomic_binary_memory_op(memarg, ValType::I32)
2840 }
2841 fn visit_i32_atomic_rmw_xor(&mut self, memarg: MemArg) -> Self::Output {
2842 self.check_atomic_binary_memory_op(memarg, ValType::I32)
2843 }
2844 fn visit_i32_atomic_rmw16_add_u(&mut self, memarg: MemArg) -> Self::Output {
2845 self.check_atomic_binary_memory_op(memarg, ValType::I32)
2846 }
2847 fn visit_i32_atomic_rmw16_sub_u(&mut self, memarg: MemArg) -> Self::Output {
2848 self.check_atomic_binary_memory_op(memarg, ValType::I32)
2849 }
2850 fn visit_i32_atomic_rmw16_and_u(&mut self, memarg: MemArg) -> Self::Output {
2851 self.check_atomic_binary_memory_op(memarg, ValType::I32)
2852 }
2853 fn visit_i32_atomic_rmw16_or_u(&mut self, memarg: MemArg) -> Self::Output {
2854 self.check_atomic_binary_memory_op(memarg, ValType::I32)
2855 }
2856 fn visit_i32_atomic_rmw16_xor_u(&mut self, memarg: MemArg) -> Self::Output {
2857 self.check_atomic_binary_memory_op(memarg, ValType::I32)
2858 }
2859 fn visit_i32_atomic_rmw8_add_u(&mut self, memarg: MemArg) -> Self::Output {
2860 self.check_atomic_binary_memory_op(memarg, ValType::I32)
2861 }
2862 fn visit_i32_atomic_rmw8_sub_u(&mut self, memarg: MemArg) -> Self::Output {
2863 self.check_atomic_binary_memory_op(memarg, ValType::I32)
2864 }
2865 fn visit_i32_atomic_rmw8_and_u(&mut self, memarg: MemArg) -> Self::Output {
2866 self.check_atomic_binary_memory_op(memarg, ValType::I32)
2867 }
2868 fn visit_i32_atomic_rmw8_or_u(&mut self, memarg: MemArg) -> Self::Output {
2869 self.check_atomic_binary_memory_op(memarg, ValType::I32)
2870 }
2871 fn visit_i32_atomic_rmw8_xor_u(&mut self, memarg: MemArg) -> Self::Output {
2872 self.check_atomic_binary_memory_op(memarg, ValType::I32)
2873 }
2874 fn visit_i64_atomic_rmw_add(&mut self, memarg: MemArg) -> Self::Output {
2875 self.check_atomic_binary_memory_op(memarg, ValType::I64)
2876 }
2877 fn visit_i64_atomic_rmw_sub(&mut self, memarg: MemArg) -> Self::Output {
2878 self.check_atomic_binary_memory_op(memarg, ValType::I64)
2879 }
2880 fn visit_i64_atomic_rmw_and(&mut self, memarg: MemArg) -> Self::Output {
2881 self.check_atomic_binary_memory_op(memarg, ValType::I64)
2882 }
2883 fn visit_i64_atomic_rmw_or(&mut self, memarg: MemArg) -> Self::Output {
2884 self.check_atomic_binary_memory_op(memarg, ValType::I64)
2885 }
2886 fn visit_i64_atomic_rmw_xor(&mut self, memarg: MemArg) -> Self::Output {
2887 self.check_atomic_binary_memory_op(memarg, ValType::I64)
2888 }
2889 fn visit_i64_atomic_rmw32_add_u(&mut self, memarg: MemArg) -> Self::Output {
2890 self.check_atomic_binary_memory_op(memarg, ValType::I64)
2891 }
2892 fn visit_i64_atomic_rmw32_sub_u(&mut self, memarg: MemArg) -> Self::Output {
2893 self.check_atomic_binary_memory_op(memarg, ValType::I64)
2894 }
2895 fn visit_i64_atomic_rmw32_and_u(&mut self, memarg: MemArg) -> Self::Output {
2896 self.check_atomic_binary_memory_op(memarg, ValType::I64)
2897 }
2898 fn visit_i64_atomic_rmw32_or_u(&mut self, memarg: MemArg) -> Self::Output {
2899 self.check_atomic_binary_memory_op(memarg, ValType::I64)
2900 }
2901 fn visit_i64_atomic_rmw32_xor_u(&mut self, memarg: MemArg) -> Self::Output {
2902 self.check_atomic_binary_memory_op(memarg, ValType::I64)
2903 }
2904 fn visit_i64_atomic_rmw16_add_u(&mut self, memarg: MemArg) -> Self::Output {
2905 self.check_atomic_binary_memory_op(memarg, ValType::I64)
2906 }
2907 fn visit_i64_atomic_rmw16_sub_u(&mut self, memarg: MemArg) -> Self::Output {
2908 self.check_atomic_binary_memory_op(memarg, ValType::I64)
2909 }
2910 fn visit_i64_atomic_rmw16_and_u(&mut self, memarg: MemArg) -> Self::Output {
2911 self.check_atomic_binary_memory_op(memarg, ValType::I64)
2912 }
2913 fn visit_i64_atomic_rmw16_or_u(&mut self, memarg: MemArg) -> Self::Output {
2914 self.check_atomic_binary_memory_op(memarg, ValType::I64)
2915 }
2916 fn visit_i64_atomic_rmw16_xor_u(&mut self, memarg: MemArg) -> Self::Output {
2917 self.check_atomic_binary_memory_op(memarg, ValType::I64)
2918 }
2919 fn visit_i64_atomic_rmw8_add_u(&mut self, memarg: MemArg) -> Self::Output {
2920 self.check_atomic_binary_memory_op(memarg, ValType::I64)
2921 }
2922 fn visit_i64_atomic_rmw8_sub_u(&mut self, memarg: MemArg) -> Self::Output {
2923 self.check_atomic_binary_memory_op(memarg, ValType::I64)
2924 }
2925 fn visit_i64_atomic_rmw8_and_u(&mut self, memarg: MemArg) -> Self::Output {
2926 self.check_atomic_binary_memory_op(memarg, ValType::I64)
2927 }
2928 fn visit_i64_atomic_rmw8_or_u(&mut self, memarg: MemArg) -> Self::Output {
2929 self.check_atomic_binary_memory_op(memarg, ValType::I64)
2930 }
2931 fn visit_i64_atomic_rmw8_xor_u(&mut self, memarg: MemArg) -> Self::Output {
2932 self.check_atomic_binary_memory_op(memarg, ValType::I64)
2933 }
2934 fn visit_i32_atomic_rmw_xchg(&mut self, memarg: MemArg) -> Self::Output {
2935 self.check_atomic_binary_memory_op(memarg, ValType::I32)
2936 }
2937 fn visit_i32_atomic_rmw16_xchg_u(&mut self, memarg: MemArg) -> Self::Output {
2938 self.check_atomic_binary_memory_op(memarg, ValType::I32)
2939 }
2940 fn visit_i32_atomic_rmw8_xchg_u(&mut self, memarg: MemArg) -> Self::Output {
2941 self.check_atomic_binary_memory_op(memarg, ValType::I32)
2942 }
2943 fn visit_i32_atomic_rmw_cmpxchg(&mut self, memarg: MemArg) -> Self::Output {
2944 self.check_atomic_binary_memory_cmpxchg(memarg, ValType::I32)
2945 }
2946 fn visit_i32_atomic_rmw16_cmpxchg_u(&mut self, memarg: MemArg) -> Self::Output {
2947 self.check_atomic_binary_memory_cmpxchg(memarg, ValType::I32)
2948 }
2949 fn visit_i32_atomic_rmw8_cmpxchg_u(&mut self, memarg: MemArg) -> Self::Output {
2950 self.check_atomic_binary_memory_cmpxchg(memarg, ValType::I32)
2951 }
2952 fn visit_i64_atomic_rmw_xchg(&mut self, memarg: MemArg) -> Self::Output {
2953 self.check_atomic_binary_memory_op(memarg, ValType::I64)
2954 }
2955 fn visit_i64_atomic_rmw32_xchg_u(&mut self, memarg: MemArg) -> Self::Output {
2956 self.check_atomic_binary_memory_op(memarg, ValType::I64)
2957 }
2958 fn visit_i64_atomic_rmw16_xchg_u(&mut self, memarg: MemArg) -> Self::Output {
2959 self.check_atomic_binary_memory_op(memarg, ValType::I64)
2960 }
2961 fn visit_i64_atomic_rmw8_xchg_u(&mut self, memarg: MemArg) -> Self::Output {
2962 self.check_atomic_binary_memory_op(memarg, ValType::I64)
2963 }
2964 fn visit_i64_atomic_rmw_cmpxchg(&mut self, memarg: MemArg) -> Self::Output {
2965 self.check_atomic_binary_memory_cmpxchg(memarg, ValType::I64)
2966 }
2967 fn visit_i64_atomic_rmw32_cmpxchg_u(&mut self, memarg: MemArg) -> Self::Output {
2968 self.check_atomic_binary_memory_cmpxchg(memarg, ValType::I64)
2969 }
2970 fn visit_i64_atomic_rmw16_cmpxchg_u(&mut self, memarg: MemArg) -> Self::Output {
2971 self.check_atomic_binary_memory_cmpxchg(memarg, ValType::I64)
2972 }
2973 fn visit_i64_atomic_rmw8_cmpxchg_u(&mut self, memarg: MemArg) -> Self::Output {
2974 self.check_atomic_binary_memory_cmpxchg(memarg, ValType::I64)
2975 }
2976 fn visit_memory_atomic_notify(&mut self, memarg: MemArg) -> Self::Output {
2977 self.check_atomic_binary_memory_op(memarg, ValType::I32)
2978 }
2979 fn visit_memory_atomic_wait32(&mut self, memarg: MemArg) -> Self::Output {
2980 let ty = self.check_shared_memarg(memarg)?;
2981 self.pop_operand(Some(ValType::I64))?;
2982 self.pop_operand(Some(ValType::I32))?;
2983 self.pop_operand(Some(ty))?;
2984 self.push_operand(ValType::I32)?;
2985 Ok(())
2986 }
2987 fn visit_memory_atomic_wait64(&mut self, memarg: MemArg) -> Self::Output {
2988 let ty = self.check_shared_memarg(memarg)?;
2989 self.pop_operand(Some(ValType::I64))?;
2990 self.pop_operand(Some(ValType::I64))?;
2991 self.pop_operand(Some(ty))?;
2992 self.push_operand(ValType::I32)?;
2993 Ok(())
2994 }
2995 fn visit_atomic_fence(&mut self) -> Self::Output {
2996 Ok(())
2997 }
2998 fn visit_ref_null(&mut self, mut heap_type: HeapType) -> Self::Output {
2999 if let Some(ty) = RefType::new(true, heap_type) {
3000 self.features
3001 .check_ref_type(ty)
3002 .map_err(|e| BinaryReaderError::new(e, self.offset))?;
3003 }
3004 self.resources
3005 .check_heap_type(&mut heap_type, self.offset)?;
3006 let ty = ValType::Ref(
3007 RefType::new(true, heap_type).expect("existing heap types should be within our limits"),
3008 );
3009 self.push_operand(ty)?;
3010 Ok(())
3011 }
3012
3013 fn visit_ref_as_non_null(&mut self) -> Self::Output {
3014 let ty = self.pop_ref(None)?.as_non_null();
3015 self.push_operand(ty)?;
3016 Ok(())
3017 }
3018 fn visit_br_on_null(&mut self, relative_depth: u32) -> Self::Output {
3019 let ref_ty = self.pop_ref(None)?.as_non_null();
3020 let (ft, kind) = self.jump(relative_depth)?;
3021 let label_types = self.label_types(ft, kind)?;
3022 self.pop_push_label_types(label_types)?;
3023 self.push_operand(ref_ty)?;
3024 Ok(())
3025 }
3026 fn visit_br_on_non_null(&mut self, relative_depth: u32) -> Self::Output {
3027 let (ft, kind) = self.jump(relative_depth)?;
3028
3029 let mut label_types = self.label_types(ft, kind)?;
3030 let expected = match label_types.next_back() {
3031 None => bail!(
3032 self.offset,
3033 "type mismatch: br_on_non_null target has no label types",
3034 ),
3035 Some(ValType::Ref(ty)) => ty,
3036 Some(_) => bail!(
3037 self.offset,
3038 "type mismatch: br_on_non_null target does not end with heap type",
3039 ),
3040 };
3041 self.pop_ref(Some(expected.nullable()))?;
3042
3043 self.pop_push_label_types(label_types)?;
3044 Ok(())
3045 }
3046 fn visit_ref_is_null(&mut self) -> Self::Output {
3047 self.pop_ref(None)?;
3048 self.push_operand(ValType::I32)?;
3049 Ok(())
3050 }
3051 fn visit_ref_func(&mut self, function_index: u32) -> Self::Output {
3052 let type_id = match self.resources.type_id_of_function(function_index) {
3053 Some(id) => id,
3054 None => bail!(
3055 self.offset,
3056 "unknown function {}: function index out of bounds",
3057 function_index,
3058 ),
3059 };
3060 if !self.resources.is_function_referenced(function_index) {
3061 bail!(self.offset, "undeclared function reference");
3062 }
3063
3064 let index = UnpackedIndex::Id(type_id);
3065 let ty = ValType::Ref(
3066 RefType::new(false, HeapType::Concrete(index)).ok_or_else(|| {
3067 BinaryReaderError::new("implementation limit: type index too large", self.offset)
3068 })?,
3069 );
3070 self.push_operand(ty)?;
3071 Ok(())
3072 }
3073 fn visit_ref_eq(&mut self) -> Self::Output {
3074 let a = self.pop_maybe_shared_ref(AbstractHeapType::Eq)?;
3075 let b = self.pop_maybe_shared_ref(AbstractHeapType::Eq)?;
3076 let a_is_shared = a.is_maybe_shared(&self.resources);
3077 let b_is_shared = b.is_maybe_shared(&self.resources);
3078 match (a_is_shared, b_is_shared) {
3079 (None, Some(_)) | (Some(_), None) | (None, None) => {}
3082
3083 (Some(is_a_shared), Some(is_b_shared)) => {
3084 if is_a_shared != is_b_shared {
3085 bail!(
3086 self.offset,
3087 "type mismatch: expected `ref.eq` types to match `shared`-ness"
3088 );
3089 }
3090 }
3091 }
3092 self.push_operand(ValType::I32)
3093 }
3094 fn visit_memory_init(&mut self, segment: u32, mem: u32) -> Self::Output {
3095 let ty = self.check_memory_index(mem)?;
3096 self.check_data_segment(segment)?;
3097 self.pop_operand(Some(ValType::I32))?;
3098 self.pop_operand(Some(ValType::I32))?;
3099 self.pop_operand(Some(ty))?;
3100 Ok(())
3101 }
3102 fn visit_data_drop(&mut self, segment: u32) -> Self::Output {
3103 self.check_data_segment(segment)?;
3104 Ok(())
3105 }
3106 fn visit_memory_copy(&mut self, dst: u32, src: u32) -> Self::Output {
3107 self.check_enabled(self.features.bulk_memory_opt(), "bulk memory")?;
3108 let dst_ty = self.check_memory_index(dst)?;
3109 let src_ty = self.check_memory_index(src)?;
3110
3111 self.pop_operand(Some(match src_ty {
3114 ValType::I32 => ValType::I32,
3115 _ => dst_ty,
3116 }))?;
3117
3118 self.pop_operand(Some(src_ty))?;
3121 self.pop_operand(Some(dst_ty))?;
3122 Ok(())
3123 }
3124 fn visit_memory_fill(&mut self, mem: u32) -> Self::Output {
3125 self.check_enabled(self.features.bulk_memory_opt(), "bulk memory")?;
3126 let ty = self.check_memory_index(mem)?;
3127 self.pop_operand(Some(ty))?;
3128 self.pop_operand(Some(ValType::I32))?;
3129 self.pop_operand(Some(ty))?;
3130 Ok(())
3131 }
3132 fn visit_memory_discard(&mut self, mem: u32) -> Self::Output {
3133 let ty = self.check_memory_index(mem)?;
3134 self.pop_operand(Some(ty))?;
3135 self.pop_operand(Some(ty))?;
3136 Ok(())
3137 }
3138 fn visit_table_init(&mut self, segment: u32, table: u32) -> Self::Output {
3139 let table = self.table_type_at(table)?;
3140 let segment_ty = self.element_type_at(segment)?;
3141 if !self
3142 .resources
3143 .is_subtype(ValType::Ref(segment_ty), ValType::Ref(table.element_type))
3144 {
3145 bail!(self.offset, "type mismatch");
3146 }
3147 self.pop_operand(Some(ValType::I32))?;
3148 self.pop_operand(Some(ValType::I32))?;
3149 self.pop_operand(Some(table.index_type()))?;
3150 Ok(())
3151 }
3152 fn visit_elem_drop(&mut self, segment: u32) -> Self::Output {
3153 self.element_type_at(segment)?;
3154 Ok(())
3155 }
3156 fn visit_table_copy(&mut self, dst_table: u32, src_table: u32) -> Self::Output {
3157 let src = self.table_type_at(src_table)?;
3158 let dst = self.table_type_at(dst_table)?;
3159 if !self.resources.is_subtype(
3160 ValType::Ref(src.element_type),
3161 ValType::Ref(dst.element_type),
3162 ) {
3163 bail!(self.offset, "type mismatch");
3164 }
3165
3166 self.pop_operand(Some(match src.index_type() {
3169 ValType::I32 => ValType::I32,
3170 _ => dst.index_type(),
3171 }))?;
3172
3173 self.pop_operand(Some(src.index_type()))?;
3176 self.pop_operand(Some(dst.index_type()))?;
3177 Ok(())
3178 }
3179 fn visit_table_get(&mut self, table: u32) -> Self::Output {
3180 let table = self.table_type_at(table)?;
3181 debug_assert_type_indices_are_ids(table.element_type.into());
3182 self.pop_operand(Some(table.index_type()))?;
3183 self.push_operand(table.element_type)?;
3184 Ok(())
3185 }
3186 fn visit_table_atomic_get(&mut self, _ordering: Ordering, table: u32) -> Self::Output {
3187 self.visit_table_get(table)?;
3188 let ty = self.table_type_at(table)?.element_type;
3192 let supertype = RefType::ANYREF.shared().unwrap();
3193 if !self.resources.is_subtype(ty.into(), supertype.into()) {
3194 bail!(
3195 self.offset,
3196 "invalid type: `table.atomic.get` only allows subtypes of `anyref`"
3197 );
3198 }
3199 Ok(())
3200 }
3201 fn visit_table_set(&mut self, table: u32) -> Self::Output {
3202 let table = self.table_type_at(table)?;
3203 debug_assert_type_indices_are_ids(table.element_type.into());
3204 self.pop_operand(Some(table.element_type.into()))?;
3205 self.pop_operand(Some(table.index_type()))?;
3206 Ok(())
3207 }
3208 fn visit_table_atomic_set(&mut self, _ordering: Ordering, table: u32) -> Self::Output {
3209 self.visit_table_set(table)?;
3210 let ty = self.table_type_at(table)?.element_type;
3214 let supertype = RefType::ANYREF.shared().unwrap();
3215 if !self.resources.is_subtype(ty.into(), supertype.into()) {
3216 bail!(
3217 self.offset,
3218 "invalid type: `table.atomic.set` only allows subtypes of `anyref`"
3219 );
3220 }
3221 Ok(())
3222 }
3223 fn visit_table_grow(&mut self, table: u32) -> Self::Output {
3224 let table = self.table_type_at(table)?;
3225 debug_assert_type_indices_are_ids(table.element_type.into());
3226 self.pop_operand(Some(table.index_type()))?;
3227 self.pop_operand(Some(table.element_type.into()))?;
3228 self.push_operand(table.index_type())?;
3229 Ok(())
3230 }
3231 fn visit_table_size(&mut self, table: u32) -> Self::Output {
3232 let table = self.table_type_at(table)?;
3233 self.push_operand(table.index_type())?;
3234 Ok(())
3235 }
3236 fn visit_table_fill(&mut self, table: u32) -> Self::Output {
3237 let table = self.table_type_at(table)?;
3238 debug_assert_type_indices_are_ids(table.element_type.into());
3239 self.pop_operand(Some(table.index_type()))?;
3240 self.pop_operand(Some(table.element_type.into()))?;
3241 self.pop_operand(Some(table.index_type()))?;
3242 Ok(())
3243 }
3244 fn visit_table_atomic_rmw_xchg(&mut self, _ordering: Ordering, table: u32) -> Self::Output {
3245 let table = self.table_type_at(table)?;
3246 let elem_ty = table.element_type.into();
3247 debug_assert_type_indices_are_ids(elem_ty);
3248 let supertype = RefType::ANYREF.shared().unwrap();
3249 if !self.resources.is_subtype(elem_ty, supertype.into()) {
3250 bail!(
3251 self.offset,
3252 "invalid type: `table.atomic.rmw.xchg` only allows subtypes of `anyref`"
3253 );
3254 }
3255 self.pop_operand(Some(elem_ty))?;
3256 self.pop_operand(Some(table.index_type()))?;
3257 self.push_operand(elem_ty)?;
3258 Ok(())
3259 }
3260 fn visit_table_atomic_rmw_cmpxchg(&mut self, _ordering: Ordering, table: u32) -> Self::Output {
3261 let table = self.table_type_at(table)?;
3262 let elem_ty = table.element_type.into();
3263 debug_assert_type_indices_are_ids(elem_ty);
3264 let supertype = RefType::EQREF.shared().unwrap();
3265 if !self.resources.is_subtype(elem_ty, supertype.into()) {
3266 bail!(
3267 self.offset,
3268 "invalid type: `table.atomic.rmw.cmpxchg` only allows subtypes of `eqref`"
3269 );
3270 }
3271 self.pop_operand(Some(elem_ty))?;
3272 self.pop_operand(Some(elem_ty))?;
3273 self.pop_operand(Some(table.index_type()))?;
3274 self.push_operand(elem_ty)?;
3275 Ok(())
3276 }
3277 fn visit_struct_new(&mut self, struct_type_index: u32) -> Self::Output {
3278 let struct_ty = self.struct_type_at(struct_type_index)?;
3279 for ty in struct_ty.fields.iter().rev() {
3280 self.pop_operand(Some(ty.element_type.unpack()))?;
3281 }
3282 self.push_concrete_ref(false, struct_type_index)?;
3283 Ok(())
3284 }
3285 fn visit_struct_new_default(&mut self, type_index: u32) -> Self::Output {
3286 let ty = self.struct_type_at(type_index)?;
3287 for field in ty.fields.iter() {
3288 let val_ty = field.element_type.unpack();
3289 if !val_ty.is_defaultable() {
3290 bail!(
3291 self.offset,
3292 "invalid `struct.new_default`: {val_ty} field is not defaultable"
3293 );
3294 }
3295 }
3296 self.push_concrete_ref(false, type_index)?;
3297 Ok(())
3298 }
3299 fn visit_struct_get(&mut self, struct_type_index: u32, field_index: u32) -> Self::Output {
3300 let field_ty = self.struct_field_at(struct_type_index, field_index)?;
3301 if field_ty.element_type.is_packed() {
3302 bail!(
3303 self.offset,
3304 "can only use struct `get` with non-packed storage types"
3305 )
3306 }
3307 self.pop_concrete_ref(true, struct_type_index)?;
3308 self.push_operand(field_ty.element_type.unpack())
3309 }
3310 fn visit_struct_atomic_get(
3311 &mut self,
3312 _ordering: Ordering,
3313 struct_type_index: u32,
3314 field_index: u32,
3315 ) -> Self::Output {
3316 self.visit_struct_get(struct_type_index, field_index)?;
3317 let ty = self
3319 .struct_field_at(struct_type_index, field_index)?
3320 .element_type;
3321 let is_valid_type = match ty {
3322 StorageType::Val(ValType::I32) | StorageType::Val(ValType::I64) => true,
3323 StorageType::Val(v) => self
3324 .resources
3325 .is_subtype(v, RefType::ANYREF.shared().unwrap().into()),
3326 _ => false,
3327 };
3328 if !is_valid_type {
3329 bail!(
3330 self.offset,
3331 "invalid type: `struct.atomic.get` only allows `i32`, `i64` and subtypes of `anyref`"
3332 );
3333 }
3334 Ok(())
3335 }
3336 fn visit_struct_get_s(&mut self, struct_type_index: u32, field_index: u32) -> Self::Output {
3337 let field_ty = self.struct_field_at(struct_type_index, field_index)?;
3338 if !field_ty.element_type.is_packed() {
3339 bail!(
3340 self.offset,
3341 "cannot use struct.get_s with non-packed storage types"
3342 )
3343 }
3344 self.pop_concrete_ref(true, struct_type_index)?;
3345 self.push_operand(field_ty.element_type.unpack())
3346 }
3347 fn visit_struct_atomic_get_s(
3348 &mut self,
3349 _ordering: Ordering,
3350 struct_type_index: u32,
3351 field_index: u32,
3352 ) -> Self::Output {
3353 self.visit_struct_get_s(struct_type_index, field_index)?;
3354 debug_assert!(matches!(
3356 self.struct_field_at(struct_type_index, field_index)?
3357 .element_type,
3358 StorageType::I8 | StorageType::I16
3359 ));
3360 Ok(())
3361 }
3362 fn visit_struct_get_u(&mut self, struct_type_index: u32, field_index: u32) -> Self::Output {
3363 let field_ty = self.struct_field_at(struct_type_index, field_index)?;
3364 if !field_ty.element_type.is_packed() {
3365 bail!(
3366 self.offset,
3367 "cannot use struct.get_u with non-packed storage types"
3368 )
3369 }
3370 self.pop_concrete_ref(true, struct_type_index)?;
3371 self.push_operand(field_ty.element_type.unpack())
3372 }
3373 fn visit_struct_atomic_get_u(
3374 &mut self,
3375 _ordering: Ordering,
3376 struct_type_index: u32,
3377 field_index: u32,
3378 ) -> Self::Output {
3379 self.visit_struct_get_s(struct_type_index, field_index)?;
3380 debug_assert!(matches!(
3382 self.struct_field_at(struct_type_index, field_index)?
3383 .element_type,
3384 StorageType::I8 | StorageType::I16
3385 ));
3386 Ok(())
3387 }
3388 fn visit_struct_set(&mut self, struct_type_index: u32, field_index: u32) -> Self::Output {
3389 let field_ty = self.mutable_struct_field_at(struct_type_index, field_index)?;
3390 self.pop_operand(Some(field_ty.element_type.unpack()))?;
3391 self.pop_concrete_ref(true, struct_type_index)?;
3392 Ok(())
3393 }
3394 fn visit_struct_atomic_set(
3395 &mut self,
3396 _ordering: Ordering,
3397 struct_type_index: u32,
3398 field_index: u32,
3399 ) -> Self::Output {
3400 self.visit_struct_set(struct_type_index, field_index)?;
3401 let ty = self
3403 .struct_field_at(struct_type_index, field_index)?
3404 .element_type;
3405 let is_valid_type = match ty {
3406 StorageType::I8 | StorageType::I16 => true,
3407 StorageType::Val(ValType::I32) | StorageType::Val(ValType::I64) => true,
3408 StorageType::Val(v) => self
3409 .resources
3410 .is_subtype(v, RefType::ANYREF.shared().unwrap().into()),
3411 };
3412 if !is_valid_type {
3413 bail!(
3414 self.offset,
3415 "invalid type: `struct.atomic.set` only allows `i8`, `i16`, `i32`, `i64` and subtypes of `anyref`"
3416 );
3417 }
3418 Ok(())
3419 }
3420 fn visit_struct_atomic_rmw_add(
3421 &mut self,
3422 _ordering: Ordering,
3423 struct_type_index: u32,
3424 field_index: u32,
3425 ) -> Self::Output {
3426 self.check_struct_atomic_rmw("add", struct_type_index, field_index)
3427 }
3428 fn visit_struct_atomic_rmw_sub(
3429 &mut self,
3430 _ordering: Ordering,
3431 struct_type_index: u32,
3432 field_index: u32,
3433 ) -> Self::Output {
3434 self.check_struct_atomic_rmw("sub", struct_type_index, field_index)
3435 }
3436 fn visit_struct_atomic_rmw_and(
3437 &mut self,
3438 _ordering: Ordering,
3439 struct_type_index: u32,
3440 field_index: u32,
3441 ) -> Self::Output {
3442 self.check_struct_atomic_rmw("and", struct_type_index, field_index)
3443 }
3444 fn visit_struct_atomic_rmw_or(
3445 &mut self,
3446 _ordering: Ordering,
3447 struct_type_index: u32,
3448 field_index: u32,
3449 ) -> Self::Output {
3450 self.check_struct_atomic_rmw("or", struct_type_index, field_index)
3451 }
3452 fn visit_struct_atomic_rmw_xor(
3453 &mut self,
3454 _ordering: Ordering,
3455 struct_type_index: u32,
3456 field_index: u32,
3457 ) -> Self::Output {
3458 self.check_struct_atomic_rmw("xor", struct_type_index, field_index)
3459 }
3460 fn visit_struct_atomic_rmw_xchg(
3461 &mut self,
3462 _ordering: Ordering,
3463 struct_type_index: u32,
3464 field_index: u32,
3465 ) -> Self::Output {
3466 let field = self.mutable_struct_field_at(struct_type_index, field_index)?;
3467 let is_valid_type = match field.element_type {
3468 StorageType::Val(ValType::I32) | StorageType::Val(ValType::I64) => true,
3469 StorageType::Val(v) => self
3470 .resources
3471 .is_subtype(v, RefType::ANYREF.shared().unwrap().into()),
3472 _ => false,
3473 };
3474 if !is_valid_type {
3475 bail!(
3476 self.offset,
3477 "invalid type: `struct.atomic.rmw.xchg` only allows `i32`, `i64` and subtypes of `anyref`"
3478 );
3479 }
3480 let field_ty = field.element_type.unpack();
3481 self.pop_operand(Some(field_ty))?;
3482 self.pop_concrete_ref(true, struct_type_index)?;
3483 self.push_operand(field_ty)?;
3484 Ok(())
3485 }
3486 fn visit_struct_atomic_rmw_cmpxchg(
3487 &mut self,
3488 _ordering: Ordering,
3489 struct_type_index: u32,
3490 field_index: u32,
3491 ) -> Self::Output {
3492 let field = self.mutable_struct_field_at(struct_type_index, field_index)?;
3493 let is_valid_type = match field.element_type {
3494 StorageType::Val(ValType::I32) | StorageType::Val(ValType::I64) => true,
3495 StorageType::Val(v) => self
3496 .resources
3497 .is_subtype(v, RefType::EQREF.shared().unwrap().into()),
3498 _ => false,
3499 };
3500 if !is_valid_type {
3501 bail!(
3502 self.offset,
3503 "invalid type: `struct.atomic.rmw.cmpxchg` only allows `i32`, `i64` and subtypes of `eqref`"
3504 );
3505 }
3506 let field_ty = field.element_type.unpack();
3507 self.pop_operand(Some(field_ty))?;
3508 self.pop_operand(Some(field_ty))?;
3509 self.pop_concrete_ref(true, struct_type_index)?;
3510 self.push_operand(field_ty)?;
3511 Ok(())
3512 }
3513 fn visit_array_new(&mut self, type_index: u32) -> Self::Output {
3514 let array_ty = self.array_type_at(type_index)?;
3515 self.pop_operand(Some(ValType::I32))?;
3516 self.pop_operand(Some(array_ty.element_type.unpack()))?;
3517 self.push_concrete_ref(false, type_index)
3518 }
3519 fn visit_array_new_default(&mut self, type_index: u32) -> Self::Output {
3520 let ty = self.array_type_at(type_index)?;
3521 let val_ty = ty.element_type.unpack();
3522 if !val_ty.is_defaultable() {
3523 bail!(
3524 self.offset,
3525 "invalid `array.new_default`: {val_ty} field is not defaultable"
3526 );
3527 }
3528 self.pop_operand(Some(ValType::I32))?;
3529 self.push_concrete_ref(false, type_index)
3530 }
3531 fn visit_array_new_fixed(&mut self, type_index: u32, n: u32) -> Self::Output {
3532 let array_ty = self.array_type_at(type_index)?;
3533 let elem_ty = array_ty.element_type.unpack();
3534 for _ in 0..n {
3535 self.pop_operand(Some(elem_ty))?;
3536 }
3537 self.push_concrete_ref(false, type_index)
3538 }
3539 fn visit_array_new_data(&mut self, type_index: u32, data_index: u32) -> Self::Output {
3540 let array_ty = self.array_type_at(type_index)?;
3541 let elem_ty = array_ty.element_type.unpack();
3542 match elem_ty {
3543 ValType::I32 | ValType::I64 | ValType::F32 | ValType::F64 | ValType::V128 => {}
3544 ValType::Ref(_) => bail!(
3545 self.offset,
3546 "type mismatch: array.new_data can only create arrays with numeric and vector elements"
3547 ),
3548 }
3549 self.check_data_segment(data_index)?;
3550 self.pop_operand(Some(ValType::I32))?;
3551 self.pop_operand(Some(ValType::I32))?;
3552 self.push_concrete_ref(false, type_index)
3553 }
3554 fn visit_array_new_elem(&mut self, type_index: u32, elem_index: u32) -> Self::Output {
3555 let array_ty = self.array_type_at(type_index)?;
3556 let array_ref_ty = match array_ty.element_type.unpack() {
3557 ValType::Ref(rt) => rt,
3558 ValType::I32 | ValType::I64 | ValType::F32 | ValType::F64 | ValType::V128 => bail!(
3559 self.offset,
3560 "type mismatch: array.new_elem can only create arrays with reference elements"
3561 ),
3562 };
3563 let elem_ref_ty = self.element_type_at(elem_index)?;
3564 if !self
3565 .resources
3566 .is_subtype(elem_ref_ty.into(), array_ref_ty.into())
3567 {
3568 bail!(
3569 self.offset,
3570 "invalid array.new_elem instruction: element segment {elem_index} type mismatch: \
3571 expected {array_ref_ty}, found {elem_ref_ty}"
3572 )
3573 }
3574 self.pop_operand(Some(ValType::I32))?;
3575 self.pop_operand(Some(ValType::I32))?;
3576 self.push_concrete_ref(false, type_index)
3577 }
3578 fn visit_array_get(&mut self, type_index: u32) -> Self::Output {
3579 let array_ty = self.array_type_at(type_index)?;
3580 let elem_ty = array_ty.element_type;
3581 if elem_ty.is_packed() {
3582 bail!(
3583 self.offset,
3584 "cannot use array.get with packed storage types"
3585 )
3586 }
3587 self.pop_operand(Some(ValType::I32))?;
3588 self.pop_concrete_ref(true, type_index)?;
3589 self.push_operand(elem_ty.unpack())
3590 }
3591 fn visit_array_atomic_get(&mut self, _ordering: Ordering, type_index: u32) -> Self::Output {
3592 self.visit_array_get(type_index)?;
3593 let elem_ty = self.array_type_at(type_index)?.element_type;
3595 let is_valid_type = match elem_ty {
3596 StorageType::Val(ValType::I32) | StorageType::Val(ValType::I64) => true,
3597 StorageType::Val(v) => self
3598 .resources
3599 .is_subtype(v, RefType::ANYREF.shared().unwrap().into()),
3600 _ => false,
3601 };
3602 if !is_valid_type {
3603 bail!(
3604 self.offset,
3605 "invalid type: `array.atomic.get` only allows `i32`, `i64` and subtypes of `anyref`"
3606 );
3607 }
3608 Ok(())
3609 }
3610 fn visit_array_get_s(&mut self, type_index: u32) -> Self::Output {
3611 let array_ty = self.array_type_at(type_index)?;
3612 let elem_ty = array_ty.element_type;
3613 if !elem_ty.is_packed() {
3614 bail!(
3615 self.offset,
3616 "cannot use array.get_s with non-packed storage types"
3617 )
3618 }
3619 self.pop_operand(Some(ValType::I32))?;
3620 self.pop_concrete_ref(true, type_index)?;
3621 self.push_operand(elem_ty.unpack())
3622 }
3623 fn visit_array_atomic_get_s(&mut self, _ordering: Ordering, type_index: u32) -> Self::Output {
3624 self.visit_array_get_s(type_index)?;
3625 debug_assert!(matches!(
3627 self.array_type_at(type_index)?.element_type,
3628 StorageType::I8 | StorageType::I16
3629 ));
3630 Ok(())
3631 }
3632 fn visit_array_get_u(&mut self, type_index: u32) -> Self::Output {
3633 let array_ty = self.array_type_at(type_index)?;
3634 let elem_ty = array_ty.element_type;
3635 if !elem_ty.is_packed() {
3636 bail!(
3637 self.offset,
3638 "cannot use array.get_u with non-packed storage types"
3639 )
3640 }
3641 self.pop_operand(Some(ValType::I32))?;
3642 self.pop_concrete_ref(true, type_index)?;
3643 self.push_operand(elem_ty.unpack())
3644 }
3645 fn visit_array_atomic_get_u(&mut self, _ordering: Ordering, type_index: u32) -> Self::Output {
3646 self.visit_array_get_u(type_index)?;
3647 debug_assert!(matches!(
3649 self.array_type_at(type_index)?.element_type,
3650 StorageType::I8 | StorageType::I16
3651 ));
3652 Ok(())
3653 }
3654 fn visit_array_set(&mut self, type_index: u32) -> Self::Output {
3655 let array_ty = self.mutable_array_type_at(type_index)?;
3656 self.pop_operand(Some(array_ty.element_type.unpack()))?;
3657 self.pop_operand(Some(ValType::I32))?;
3658 self.pop_concrete_ref(true, type_index)?;
3659 Ok(())
3660 }
3661 fn visit_array_atomic_set(&mut self, _ordering: Ordering, type_index: u32) -> Self::Output {
3662 self.visit_array_set(type_index)?;
3663 let elem_ty = self.array_type_at(type_index)?.element_type;
3665 let is_valid_type = match elem_ty {
3666 StorageType::I8 | StorageType::I16 => true,
3667 StorageType::Val(ValType::I32) | StorageType::Val(ValType::I64) => true,
3668 StorageType::Val(v) => self
3669 .resources
3670 .is_subtype(v, RefType::ANYREF.shared().unwrap().into()),
3671 };
3672 if !is_valid_type {
3673 bail!(
3674 self.offset,
3675 "invalid type: `array.atomic.set` only allows `i8`, `i16`, `i32`, `i64` and subtypes of `anyref`"
3676 );
3677 }
3678 Ok(())
3679 }
3680 fn visit_array_len(&mut self) -> Self::Output {
3681 self.pop_maybe_shared_ref(AbstractHeapType::Array)?;
3682 self.push_operand(ValType::I32)
3683 }
3684 fn visit_array_fill(&mut self, array_type_index: u32) -> Self::Output {
3685 let array_ty = self.mutable_array_type_at(array_type_index)?;
3686 self.pop_operand(Some(ValType::I32))?;
3687 self.pop_operand(Some(array_ty.element_type.unpack()))?;
3688 self.pop_operand(Some(ValType::I32))?;
3689 self.pop_concrete_ref(true, array_type_index)?;
3690 Ok(())
3691 }
3692 fn visit_array_copy(&mut self, type_index_dst: u32, type_index_src: u32) -> Self::Output {
3693 let array_ty_dst = self.mutable_array_type_at(type_index_dst)?;
3694 let array_ty_src = self.array_type_at(type_index_src)?;
3695 match (array_ty_dst.element_type, array_ty_src.element_type) {
3696 (StorageType::I8, StorageType::I8) => {}
3697 (StorageType::I8, ty) => bail!(
3698 self.offset,
3699 "array types do not match: expected i8, found {ty}"
3700 ),
3701 (StorageType::I16, StorageType::I16) => {}
3702 (StorageType::I16, ty) => bail!(
3703 self.offset,
3704 "array types do not match: expected i16, found {ty}"
3705 ),
3706 (StorageType::Val(dst), StorageType::Val(src)) => {
3707 if !self.resources.is_subtype(src, dst) {
3708 bail!(
3709 self.offset,
3710 "array types do not match: expected {dst}, found {src}"
3711 )
3712 }
3713 }
3714 (StorageType::Val(dst), src) => {
3715 bail!(
3716 self.offset,
3717 "array types do not match: expected {dst}, found {src}"
3718 )
3719 }
3720 }
3721 self.pop_operand(Some(ValType::I32))?;
3722 self.pop_operand(Some(ValType::I32))?;
3723 self.pop_concrete_ref(true, type_index_src)?;
3724 self.pop_operand(Some(ValType::I32))?;
3725 self.pop_concrete_ref(true, type_index_dst)?;
3726 Ok(())
3727 }
3728 fn visit_array_init_data(
3729 &mut self,
3730 array_type_index: u32,
3731 array_data_index: u32,
3732 ) -> Self::Output {
3733 let array_ty = self.mutable_array_type_at(array_type_index)?;
3734 let val_ty = array_ty.element_type.unpack();
3735 match val_ty {
3736 ValType::I32 | ValType::I64 | ValType::F32 | ValType::F64 | ValType::V128 => {}
3737 ValType::Ref(_) => bail!(
3738 self.offset,
3739 "invalid array.init_data: array type is not numeric or vector"
3740 ),
3741 }
3742 self.check_data_segment(array_data_index)?;
3743 self.pop_operand(Some(ValType::I32))?;
3744 self.pop_operand(Some(ValType::I32))?;
3745 self.pop_operand(Some(ValType::I32))?;
3746 self.pop_concrete_ref(true, array_type_index)?;
3747 Ok(())
3748 }
3749 fn visit_array_init_elem(&mut self, type_index: u32, elem_index: u32) -> Self::Output {
3750 let array_ty = self.mutable_array_type_at(type_index)?;
3751 let array_ref_ty = match array_ty.element_type.unpack() {
3752 ValType::Ref(rt) => rt,
3753 ValType::I32 | ValType::I64 | ValType::F32 | ValType::F64 | ValType::V128 => bail!(
3754 self.offset,
3755 "type mismatch: array.init_elem can only create arrays with reference elements"
3756 ),
3757 };
3758 let elem_ref_ty = self.element_type_at(elem_index)?;
3759 if !self
3760 .resources
3761 .is_subtype(elem_ref_ty.into(), array_ref_ty.into())
3762 {
3763 bail!(
3764 self.offset,
3765 "invalid array.init_elem instruction: element segment {elem_index} type mismatch: \
3766 expected {array_ref_ty}, found {elem_ref_ty}"
3767 )
3768 }
3769 self.pop_operand(Some(ValType::I32))?;
3770 self.pop_operand(Some(ValType::I32))?;
3771 self.pop_operand(Some(ValType::I32))?;
3772 self.pop_concrete_ref(true, type_index)?;
3773 Ok(())
3774 }
3775 fn visit_array_atomic_rmw_add(&mut self, _ordering: Ordering, type_index: u32) -> Self::Output {
3776 self.check_array_atomic_rmw("add", type_index)
3777 }
3778 fn visit_array_atomic_rmw_sub(&mut self, _ordering: Ordering, type_index: u32) -> Self::Output {
3779 self.check_array_atomic_rmw("sub", type_index)
3780 }
3781 fn visit_array_atomic_rmw_and(&mut self, _ordering: Ordering, type_index: u32) -> Self::Output {
3782 self.check_array_atomic_rmw("and", type_index)
3783 }
3784 fn visit_array_atomic_rmw_or(&mut self, _ordering: Ordering, type_index: u32) -> Self::Output {
3785 self.check_array_atomic_rmw("or", type_index)
3786 }
3787 fn visit_array_atomic_rmw_xor(&mut self, _ordering: Ordering, type_index: u32) -> Self::Output {
3788 self.check_array_atomic_rmw("xor", type_index)
3789 }
3790 fn visit_array_atomic_rmw_xchg(
3791 &mut self,
3792 _ordering: Ordering,
3793 type_index: u32,
3794 ) -> Self::Output {
3795 let field = self.mutable_array_type_at(type_index)?;
3796 let is_valid_type = match field.element_type {
3797 StorageType::Val(ValType::I32) | StorageType::Val(ValType::I64) => true,
3798 StorageType::Val(v) => self
3799 .resources
3800 .is_subtype(v, RefType::ANYREF.shared().unwrap().into()),
3801 _ => false,
3802 };
3803 if !is_valid_type {
3804 bail!(
3805 self.offset,
3806 "invalid type: `array.atomic.rmw.xchg` only allows `i32`, `i64` and subtypes of `anyref`"
3807 );
3808 }
3809 let elem_ty = field.element_type.unpack();
3810 self.pop_operand(Some(elem_ty))?;
3811 self.pop_operand(Some(ValType::I32))?;
3812 self.pop_concrete_ref(true, type_index)?;
3813 self.push_operand(elem_ty)?;
3814 Ok(())
3815 }
3816 fn visit_array_atomic_rmw_cmpxchg(
3817 &mut self,
3818 _ordering: Ordering,
3819 type_index: u32,
3820 ) -> Self::Output {
3821 let field = self.mutable_array_type_at(type_index)?;
3822 let is_valid_type = match field.element_type {
3823 StorageType::Val(ValType::I32) | StorageType::Val(ValType::I64) => true,
3824 StorageType::Val(v) => self
3825 .resources
3826 .is_subtype(v, RefType::EQREF.shared().unwrap().into()),
3827 _ => false,
3828 };
3829 if !is_valid_type {
3830 bail!(
3831 self.offset,
3832 "invalid type: `array.atomic.rmw.cmpxchg` only allows `i32`, `i64` and subtypes of `eqref`"
3833 );
3834 }
3835 let elem_ty = field.element_type.unpack();
3836 self.pop_operand(Some(elem_ty))?;
3837 self.pop_operand(Some(elem_ty))?;
3838 self.pop_operand(Some(ValType::I32))?;
3839 self.pop_concrete_ref(true, type_index)?;
3840 self.push_operand(elem_ty)?;
3841 Ok(())
3842 }
3843 fn visit_any_convert_extern(&mut self) -> Self::Output {
3844 let any_ref = match self.pop_maybe_shared_ref(AbstractHeapType::Extern)? {
3845 MaybeType::Bottom | MaybeType::UnknownRef(_) => {
3846 MaybeType::UnknownRef(Some(AbstractHeapType::Any))
3847 }
3848 MaybeType::Known(ty) => {
3849 let shared = self.resources.is_shared(ty);
3850 let heap_type = HeapType::Abstract {
3851 shared,
3852 ty: AbstractHeapType::Any,
3853 };
3854 let any_ref = RefType::new(ty.is_nullable(), heap_type).unwrap();
3855 MaybeType::Known(any_ref)
3856 }
3857 };
3858 self.push_operand(any_ref)
3859 }
3860 fn visit_extern_convert_any(&mut self) -> Self::Output {
3861 let extern_ref = match self.pop_maybe_shared_ref(AbstractHeapType::Any)? {
3862 MaybeType::Bottom | MaybeType::UnknownRef(_) => {
3863 MaybeType::UnknownRef(Some(AbstractHeapType::Extern))
3864 }
3865 MaybeType::Known(ty) => {
3866 let shared = self.resources.is_shared(ty);
3867 let heap_type = HeapType::Abstract {
3868 shared,
3869 ty: AbstractHeapType::Extern,
3870 };
3871 let extern_ref = RefType::new(ty.is_nullable(), heap_type).unwrap();
3872 MaybeType::Known(extern_ref)
3873 }
3874 };
3875 self.push_operand(extern_ref)
3876 }
3877 fn visit_ref_test_non_null(&mut self, heap_type: HeapType) -> Self::Output {
3878 self.check_ref_test(false, heap_type)
3879 }
3880 fn visit_ref_test_nullable(&mut self, heap_type: HeapType) -> Self::Output {
3881 self.check_ref_test(true, heap_type)
3882 }
3883 fn visit_ref_cast_non_null(&mut self, heap_type: HeapType) -> Self::Output {
3884 self.check_ref_cast(false, heap_type)
3885 }
3886 fn visit_ref_cast_nullable(&mut self, heap_type: HeapType) -> Self::Output {
3887 self.check_ref_cast(true, heap_type)
3888 }
3889 fn visit_br_on_cast(
3890 &mut self,
3891 relative_depth: u32,
3892 mut from_ref_type: RefType,
3893 mut to_ref_type: RefType,
3894 ) -> Self::Output {
3895 self.resources
3896 .check_ref_type(&mut from_ref_type, self.offset)?;
3897 self.resources
3898 .check_ref_type(&mut to_ref_type, self.offset)?;
3899
3900 if !self
3901 .resources
3902 .is_subtype(to_ref_type.into(), from_ref_type.into())
3903 {
3904 bail!(
3905 self.offset,
3906 "type mismatch: expected {from_ref_type}, found {to_ref_type}"
3907 );
3908 }
3909
3910 let (block_ty, frame_kind) = self.jump(relative_depth)?;
3911 let mut label_types = self.label_types(block_ty, frame_kind)?;
3912
3913 match label_types.next_back() {
3914 Some(label_ty) if self.resources.is_subtype(to_ref_type.into(), label_ty) => {
3915 self.pop_operand(Some(from_ref_type.into()))?;
3916 }
3917 Some(label_ty) => bail!(
3918 self.offset,
3919 "type mismatch: casting to type {to_ref_type}, but it does not match \
3920 label result type {label_ty}"
3921 ),
3922 None => bail!(
3923 self.offset,
3924 "type mismatch: br_on_cast to label with empty types, must have a reference type"
3925 ),
3926 };
3927
3928 self.pop_push_label_types(label_types)?;
3929 let diff_ty = RefType::difference(from_ref_type, to_ref_type);
3930 self.push_operand(diff_ty)?;
3931 Ok(())
3932 }
3933 fn visit_br_on_cast_fail(
3934 &mut self,
3935 relative_depth: u32,
3936 mut from_ref_type: RefType,
3937 mut to_ref_type: RefType,
3938 ) -> Self::Output {
3939 self.resources
3940 .check_ref_type(&mut from_ref_type, self.offset)?;
3941 self.resources
3942 .check_ref_type(&mut to_ref_type, self.offset)?;
3943
3944 if !self
3945 .resources
3946 .is_subtype(to_ref_type.into(), from_ref_type.into())
3947 {
3948 bail!(
3949 self.offset,
3950 "type mismatch: expected {from_ref_type}, found {to_ref_type}"
3951 );
3952 }
3953
3954 let (block_ty, frame_kind) = self.jump(relative_depth)?;
3955 let mut label_tys = self.label_types(block_ty, frame_kind)?;
3956
3957 let diff_ty = RefType::difference(from_ref_type, to_ref_type);
3958 match label_tys.next_back() {
3959 Some(label_ty) if self.resources.is_subtype(diff_ty.into(), label_ty) => {
3960 self.pop_operand(Some(from_ref_type.into()))?;
3961 }
3962 Some(label_ty) => bail!(
3963 self.offset,
3964 "type mismatch: expected label result type {label_ty}, found {diff_ty}"
3965 ),
3966 None => bail!(
3967 self.offset,
3968 "type mismatch: expected a reference type, found nothing"
3969 ),
3970 }
3971
3972 self.pop_push_label_types(label_tys)?;
3973 self.push_operand(to_ref_type)?;
3974 Ok(())
3975 }
3976 fn visit_ref_i31(&mut self) -> Self::Output {
3977 self.pop_operand(Some(ValType::I32))?;
3978 self.push_operand(ValType::Ref(RefType::I31))
3979 }
3980 fn visit_ref_i31_shared(&mut self) -> Self::Output {
3981 self.pop_operand(Some(ValType::I32))?;
3982 self.push_operand(ValType::Ref(
3983 RefType::I31.shared().expect("i31 is abstract"),
3984 ))
3985 }
3986 fn visit_i31_get_s(&mut self) -> Self::Output {
3987 self.pop_maybe_shared_ref(AbstractHeapType::I31)?;
3988 self.push_operand(ValType::I32)
3989 }
3990 fn visit_i31_get_u(&mut self) -> Self::Output {
3991 self.pop_maybe_shared_ref(AbstractHeapType::I31)?;
3992 self.push_operand(ValType::I32)
3993 }
3994 fn visit_try(&mut self, mut ty: BlockType) -> Self::Output {
3995 self.check_block_type(&mut ty)?;
3996 for ty in self.params(ty)?.rev() {
3997 self.pop_operand(Some(ty))?;
3998 }
3999 self.push_ctrl(FrameKind::LegacyTry, ty)?;
4000 Ok(())
4001 }
4002 fn visit_catch(&mut self, index: u32) -> Self::Output {
4003 let frame = self.pop_ctrl()?;
4004 if frame.kind != FrameKind::LegacyTry && frame.kind != FrameKind::LegacyCatch {
4005 bail!(self.offset, "catch found outside of an `try` block");
4006 }
4007 let height = self.operands.len();
4009 let init_height = self.local_inits.push_ctrl();
4010 self.control.push(Frame {
4011 kind: FrameKind::LegacyCatch,
4012 block_type: frame.block_type,
4013 height,
4014 unreachable: false,
4015 init_height,
4016 });
4017 let ty = self.exception_tag_at(index)?;
4019 for ty in ty.params() {
4020 self.push_operand(*ty)?;
4021 }
4022 Ok(())
4023 }
4024 fn visit_rethrow(&mut self, relative_depth: u32) -> Self::Output {
4025 let (_, kind) = self.jump(relative_depth)?;
4028 if kind != FrameKind::LegacyCatch && kind != FrameKind::LegacyCatchAll {
4029 bail!(
4030 self.offset,
4031 "invalid rethrow label: target was not a `catch` block"
4032 );
4033 }
4034 self.unreachable()?;
4035 Ok(())
4036 }
4037 fn visit_delegate(&mut self, relative_depth: u32) -> Self::Output {
4038 let frame = self.pop_ctrl()?;
4039 if frame.kind != FrameKind::LegacyTry {
4040 bail!(self.offset, "delegate found outside of an `try` block");
4041 }
4042 let _ = self.jump(relative_depth)?;
4045 for ty in self.results(frame.block_type)? {
4046 self.push_operand(ty)?;
4047 }
4048 Ok(())
4049 }
4050 fn visit_catch_all(&mut self) -> Self::Output {
4051 let frame = self.pop_ctrl()?;
4052 if frame.kind == FrameKind::LegacyCatchAll {
4053 bail!(self.offset, "only one catch_all allowed per `try` block");
4054 } else if frame.kind != FrameKind::LegacyTry && frame.kind != FrameKind::LegacyCatch {
4055 bail!(self.offset, "catch_all found outside of a `try` block");
4056 }
4057 let height = self.operands.len();
4058 let init_height = self.local_inits.push_ctrl();
4059 self.control.push(Frame {
4060 kind: FrameKind::LegacyCatchAll,
4061 block_type: frame.block_type,
4062 height,
4063 unreachable: false,
4064 init_height,
4065 });
4066 Ok(())
4067 }
4068 fn visit_cont_new(&mut self, type_index: u32) -> Self::Output {
4069 let cont_ty = self.cont_type_at(type_index)?;
4070 let rt = RefType::concrete(true, cont_ty.0);
4071 self.pop_ref(Some(rt))?;
4072 self.push_concrete_ref(false, type_index)?;
4073 Ok(())
4074 }
4075 fn visit_cont_bind(&mut self, argument_index: u32, result_index: u32) -> Self::Output {
4076 let arg_cont = self.cont_type_at(argument_index)?;
4078 let arg_func = self.func_type_of_cont_type(arg_cont);
4079 let res_cont = self.cont_type_at(result_index)?;
4081 let res_func = self.func_type_of_cont_type(res_cont);
4082
4083 if arg_func.params().len() < res_func.params().len() {
4086 bail!(self.offset, "type mismatch in continuation arguments");
4087 }
4088
4089 let argcnt = arg_func.params().len() - res_func.params().len();
4090
4091 if !self.is_subtype_many(res_func.params(), &arg_func.params()[argcnt..])
4093 || arg_func.results().len() != res_func.results().len()
4094 || !self.is_subtype_many(arg_func.results(), res_func.results())
4095 {
4096 bail!(self.offset, "type mismatch in continuation types");
4097 }
4098
4099 self.pop_concrete_ref(true, argument_index)?;
4101
4102 for &ty in arg_func.params().iter().take(argcnt).rev() {
4104 self.pop_operand(Some(ty))?;
4105 }
4106
4107 self.push_concrete_ref(false, result_index)?;
4109
4110 Ok(())
4111 }
4112 fn visit_suspend(&mut self, tag_index: u32) -> Self::Output {
4113 let ft = &self.tag_at(tag_index)?;
4114 for &ty in ft.params().iter().rev() {
4115 self.pop_operand(Some(ty))?;
4116 }
4117 for &ty in ft.results() {
4118 self.push_operand(ty)?;
4119 }
4120 Ok(())
4121 }
4122 fn visit_resume(&mut self, type_index: u32, table: ResumeTable) -> Self::Output {
4123 let ft = self.check_resume_table(table, type_index)?;
4125 self.pop_concrete_ref(true, type_index)?;
4126 for &ty in ft.params().iter().rev() {
4128 self.pop_operand(Some(ty))?;
4129 }
4130
4131 for &ty in ft.results() {
4133 self.push_operand(ty)?;
4134 }
4135 Ok(())
4136 }
4137 fn visit_resume_throw(
4138 &mut self,
4139 type_index: u32,
4140 tag_index: u32,
4141 table: ResumeTable,
4142 ) -> Self::Output {
4143 let ft = self.check_resume_table(table, type_index)?;
4145 let tag_ty = self.exception_tag_at(tag_index)?;
4147 if tag_ty.results().len() != 0 {
4148 bail!(self.offset, "type mismatch: non-empty tag result type")
4149 }
4150 self.pop_concrete_ref(true, type_index)?;
4151 for &ty in tag_ty.params().iter().rev() {
4153 self.pop_operand(Some(ty))?;
4154 }
4155
4156 for &ty in ft.results() {
4158 self.push_operand(ty)?;
4159 }
4160 Ok(())
4161 }
4162 fn visit_switch(&mut self, type_index: u32, tag_index: u32) -> Self::Output {
4163 let cont_ty = self.cont_type_at(type_index)?;
4165 let func_ty = self.func_type_of_cont_type(cont_ty);
4166 let tag_ty = self.tag_at(tag_index)?;
4168 if tag_ty.params().len() != 0 {
4169 bail!(self.offset, "type mismatch: non-empty tag parameter type")
4170 }
4171 match func_ty.params().last() {
4173 Some(ValType::Ref(rt)) if rt.is_concrete_type_ref() => {
4174 let other_cont_id = rt
4175 .type_index()
4176 .unwrap()
4177 .unpack()
4178 .as_core_type_id()
4179 .expect("expected canonicalized index");
4180 let sub_ty = self.resources.sub_type_at_id(other_cont_id);
4181 let other_cont_ty =
4182 if let CompositeInnerType::Cont(cont) = &sub_ty.composite_type.inner {
4183 cont
4184 } else {
4185 bail!(self.offset, "non-continuation type");
4186 };
4187 let other_func_ty = self.func_type_of_cont_type(&other_cont_ty);
4188 if func_ty.results().len() != tag_ty.results().len()
4189 || !self.is_subtype_many(func_ty.results(), tag_ty.results())
4190 || other_func_ty.results().len() != tag_ty.results().len()
4191 || !self.is_subtype_many(tag_ty.results(), other_func_ty.results())
4192 {
4193 bail!(self.offset, "type mismatch in continuation types")
4194 }
4195
4196 self.pop_concrete_ref(true, type_index)?;
4198
4199 for &ty in func_ty.params().iter().rev().skip(1) {
4202 self.pop_operand(Some(ty))?;
4203 }
4204
4205 for &ty in other_func_ty.params() {
4207 self.push_operand(ty)?;
4208 }
4209 }
4210 Some(ty) => bail!(
4211 self.offset,
4212 "type mismatch: expected a continuation reference, found {}",
4213 ty_to_str(*ty)
4214 ),
4215 None => bail!(
4216 self.offset,
4217 "type mismatch: instruction requires a continuation reference"
4218 ),
4219 }
4220 Ok(())
4221 }
4222 fn visit_i64_add128(&mut self) -> Result<()> {
4223 self.check_binop128()
4224 }
4225 fn visit_i64_sub128(&mut self) -> Result<()> {
4226 self.check_binop128()
4227 }
4228 fn visit_i64_mul_wide_s(&mut self) -> Result<()> {
4229 self.check_i64_mul_wide()
4230 }
4231 fn visit_i64_mul_wide_u(&mut self) -> Result<()> {
4232 self.check_i64_mul_wide()
4233 }
4234}
4235
4236#[derive(Clone, Debug)]
4237enum Either<A, B> {
4238 A(A),
4239 B(B),
4240}
4241
4242impl<A, B> Iterator for Either<A, B>
4243where
4244 A: Iterator,
4245 B: Iterator<Item = A::Item>,
4246{
4247 type Item = A::Item;
4248 fn next(&mut self) -> Option<A::Item> {
4249 match self {
4250 Either::A(a) => a.next(),
4251 Either::B(b) => b.next(),
4252 }
4253 }
4254}
4255
4256impl<A, B> DoubleEndedIterator for Either<A, B>
4257where
4258 A: DoubleEndedIterator,
4259 B: DoubleEndedIterator<Item = A::Item>,
4260{
4261 fn next_back(&mut self) -> Option<A::Item> {
4262 match self {
4263 Either::A(a) => a.next_back(),
4264 Either::B(b) => b.next_back(),
4265 }
4266 }
4267}
4268
4269impl<A, B> ExactSizeIterator for Either<A, B>
4270where
4271 A: ExactSizeIterator,
4272 B: ExactSizeIterator<Item = A::Item>,
4273{
4274 fn len(&self) -> usize {
4275 match self {
4276 Either::A(a) => a.len(),
4277 Either::B(b) => b.len(),
4278 }
4279 }
4280}
4281
4282trait PreciseIterator: ExactSizeIterator + DoubleEndedIterator + Clone + core::fmt::Debug {}
4283impl<T: ExactSizeIterator + DoubleEndedIterator + Clone + core::fmt::Debug> PreciseIterator for T {}
4284
4285impl Locals {
4286 fn define(&mut self, count: u32, ty: ValType) -> bool {
4292 if count == 0 {
4293 return true;
4294 }
4295 let vacant_first = MAX_LOCALS_TO_TRACK.saturating_sub(self.num_locals);
4296 match self.num_locals.checked_add(count) {
4297 Some(num_locals) if num_locals > MAX_WASM_FUNCTION_LOCALS => return false,
4298 None => return false,
4299 Some(num_locals) => self.num_locals = num_locals,
4300 };
4301 let push_to_first = cmp::min(vacant_first, count);
4302 self.first
4303 .extend(iter::repeat(ty).take(push_to_first as usize));
4304 let num_uncached = count - push_to_first;
4305 if num_uncached > 0 {
4306 let max_uncached_idx = self.num_locals - 1;
4307 self.uncached.push((max_uncached_idx, ty));
4308 }
4309 true
4310 }
4311
4312 pub(super) fn len_locals(&self) -> u32 {
4314 self.num_locals
4315 }
4316
4317 #[inline]
4319 pub(super) fn get(&self, idx: u32) -> Option<ValType> {
4320 match self.first.get(idx as usize) {
4321 Some(ty) => Some(*ty),
4322 None => self.get_bsearch(idx),
4323 }
4324 }
4325
4326 fn get_bsearch(&self, idx: u32) -> Option<ValType> {
4327 match self.uncached.binary_search_by_key(&idx, |(idx, _)| *idx) {
4328 Err(i) if i == self.uncached.len() => None,
4331
4332 Ok(i) | Err(i) => Some(self.uncached[i].1),
4338 }
4339 }
4340}
4341
4342impl<R> ModuleArity for WasmProposalValidator<'_, '_, R>
4343where
4344 R: WasmModuleResources,
4345{
4346 fn tag_type_arity(&self, at: u32) -> Option<(u32, u32)> {
4347 self.0
4348 .resources
4349 .tag_at(at)
4350 .map(|x| (x.params().len() as u32, x.results().len() as u32))
4351 }
4352
4353 fn type_index_of_function(&self, function_idx: u32) -> Option<u32> {
4354 self.0.resources.type_index_of_function(function_idx)
4355 }
4356
4357 fn sub_type_at(&self, type_idx: u32) -> Option<&SubType> {
4358 Some(self.0.sub_type_at(type_idx).ok()?)
4359 }
4360
4361 fn func_type_of_cont_type(&self, c: &ContType) -> Option<&FuncType> {
4362 Some(self.0.func_type_of_cont_type(c))
4363 }
4364
4365 fn sub_type_of_ref_type(&self, rt: &RefType) -> Option<&SubType> {
4366 let id = rt.type_index()?.as_core_type_id()?;
4367 Some(self.0.resources.sub_type_at_id(id))
4368 }
4369
4370 fn control_stack_height(&self) -> u32 {
4371 self.0.control.len() as u32
4372 }
4373
4374 fn label_block(&self, depth: u32) -> Option<(BlockType, FrameKind)> {
4375 self.0.jump(depth).ok()
4376 }
4377}
4378
4379impl<R> FrameStack for WasmProposalValidator<'_, '_, R>
4380where
4381 R: WasmModuleResources,
4382{
4383 fn current_frame(&self) -> Option<FrameKind> {
4384 Some(self.0.control.last()?.kind)
4385 }
4386}