wasmtime/runtime/vm/gc/enabled/
exnref.rs

1use super::structref::{initialize_field_impl, read_field_impl};
2use crate::{
3    StorageType, Val,
4    prelude::*,
5    runtime::vm::{GcHeap, GcStore, VMGcRef},
6    store::{AutoAssertNoGc, InstanceId},
7};
8use core::fmt;
9use wasmtime_environ::{DefinedTagIndex, GcStructLayout, VMGcKind};
10
11/// A `VMGcRef` that we know points to an `exn`.
12///
13/// Create a `VMExnRef` via `VMGcRef::into_exnref` and
14/// `VMGcRef::as_exnref`, or their untyped equivalents
15/// `VMGcRef::into_exnref_unchecked` and `VMGcRef::as_exnref_unchecked`.
16///
17/// Note: This is not a `TypedGcRef<_>` because each collector can have a
18/// different concrete representation of `exnref` that they allocate inside
19/// their heaps.
20#[derive(Debug, PartialEq, Eq, Hash)]
21#[repr(transparent)]
22pub struct VMExnRef(VMGcRef);
23
24impl fmt::Pointer for VMExnRef {
25    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
26        fmt::Pointer::fmt(&self.0, f)
27    }
28}
29
30impl From<VMExnRef> for VMGcRef {
31    #[inline]
32    fn from(x: VMExnRef) -> Self {
33        x.0
34    }
35}
36
37impl VMGcRef {
38    /// Is this `VMGcRef` pointing to an `exn`?
39    pub fn is_exnref(&self, gc_heap: &(impl GcHeap + ?Sized)) -> bool {
40        if self.is_i31() {
41            return false;
42        }
43
44        let header = gc_heap.header(&self);
45        header.kind().matches(VMGcKind::ExnRef)
46    }
47
48    /// Create a new `VMExnRef` from the given `gc_ref`.
49    ///
50    /// If this is not a GC reference to an `exnref`, `Err(self)` is
51    /// returned.
52    pub fn into_exnref(self, gc_heap: &(impl GcHeap + ?Sized)) -> Result<VMExnRef, VMGcRef> {
53        if self.is_exnref(gc_heap) {
54            Ok(self.into_exnref_unchecked())
55        } else {
56            Err(self)
57        }
58    }
59
60    /// Create a new `VMExnRef` from `self` without actually checking that
61    /// `self` is an `exnref`.
62    ///
63    /// This method does not check that `self` is actually an `exnref`, but
64    /// it should be. Failure to uphold this invariant is memory safe but will
65    /// result in general incorrectness down the line such as panics or wrong
66    /// results.
67    #[inline]
68    pub fn into_exnref_unchecked(self) -> VMExnRef {
69        debug_assert!(!self.is_i31());
70        VMExnRef(self)
71    }
72
73    /// Get this GC reference as an `exnref` reference, if it actually is an
74    /// `exnref` reference.
75    pub fn as_exnref(&self, gc_heap: &(impl GcHeap + ?Sized)) -> Option<&VMExnRef> {
76        if self.is_exnref(gc_heap) {
77            Some(self.as_exnref_unchecked())
78        } else {
79            None
80        }
81    }
82
83    /// Get this GC reference as an `exnref` reference without checking if it
84    /// actually is an `exnref` reference.
85    ///
86    /// Calling this method on a non-`exnref` reference is memory safe, but
87    /// will lead to general incorrectness like panics and wrong results.
88    pub fn as_exnref_unchecked(&self) -> &VMExnRef {
89        debug_assert!(!self.is_i31());
90        let ptr = self as *const VMGcRef;
91        let ret = unsafe { &*ptr.cast() };
92        assert!(matches!(ret, VMExnRef(VMGcRef { .. })));
93        ret
94    }
95}
96
97impl VMExnRef {
98    /// Get the underlying `VMGcRef`.
99    pub fn as_gc_ref(&self) -> &VMGcRef {
100        &self.0
101    }
102
103    /// Get a mutable borrow on the underlying `VMGcRef`.
104    ///
105    /// Requires that the mutation retains the reference's invariants,
106    /// namely: not null, and pointing to a valid exnref object. Doing
107    /// otherwise is memory safe, but will lead to general
108    /// incorrectness.
109    pub fn as_gc_ref_mut(&mut self) -> &mut VMGcRef {
110        &mut self.0
111    }
112
113    /// Clone this `VMExnRef`, running any GC barriers as necessary.
114    pub fn clone(&self, gc_store: &mut GcStore) -> Self {
115        Self(gc_store.clone_gc_ref(&self.0))
116    }
117
118    /// Explicitly drop this `exnref`, running GC drop barriers as necessary.
119    pub fn drop(self, gc_store: &mut GcStore) {
120        gc_store.drop_gc_ref(self.0);
121    }
122
123    /// Copy this `VMExnRef` without running the GC's clone barriers.
124    ///
125    /// Prefer calling `clone(&mut GcStore)` instead! This is mostly an internal
126    /// escape hatch for collector implementations.
127    ///
128    /// Failure to run GC barriers when they would otherwise be necessary can
129    /// lead to leaks, panics, and wrong results. It cannot lead to memory
130    /// unsafety, however.
131    pub fn unchecked_copy(&self) -> Self {
132        Self(self.0.unchecked_copy())
133    }
134
135    /// Read a field of the given `StorageType` into a `Val`.
136    ///
137    /// `i8` and `i16` fields are zero-extended into `Val::I32(_)`s.
138    ///
139    /// Does not check that the field is actually of type `ty`. That is the
140    /// caller's responsibility. Failure to do so is memory safe, but will lead
141    /// to general incorrectness such as panics and wrong results.
142    ///
143    /// Panics on out-of-bounds accesses.
144    pub fn read_field(
145        &self,
146        store: &mut AutoAssertNoGc,
147        layout: &GcStructLayout,
148        ty: &StorageType,
149        field: usize,
150    ) -> Val {
151        let offset = layout.fields[field].offset;
152        read_field_impl(self.as_gc_ref(), store, ty, offset)
153    }
154
155    /// Initialize a field in this exnref that is currently uninitialized.
156    ///
157    /// Calling this method on an exnref that has already had the
158    /// associated field initialized will result in GC bugs. These are
159    /// memory safe but will lead to generally incorrect behavior such
160    /// as panics, leaks, and incorrect results.
161    ///
162    /// Does not check that `val` matches `ty`, nor that the field is actually
163    /// of type `ty`. Checking those things is the caller's responsibility.
164    /// Failure to do so is memory safe, but will lead to general incorrectness
165    /// such as panics and wrong results.
166    ///
167    /// Returns an error if `val` is a GC reference that has since been
168    /// unrooted.
169    ///
170    /// Panics on out-of-bounds accesses.
171    pub fn initialize_field(
172        &self,
173        store: &mut AutoAssertNoGc,
174        layout: &GcStructLayout,
175        ty: &StorageType,
176        field: usize,
177        val: Val,
178    ) -> Result<()> {
179        debug_assert!(val._matches_ty(&store, &ty.unpack())?);
180        let offset = layout.fields[field].offset;
181        initialize_field_impl(self.as_gc_ref(), store, ty, offset, val)
182    }
183
184    /// Initialize the tag referenced by this exception object.
185    pub fn initialize_tag(
186        &self,
187        store: &mut AutoAssertNoGc,
188        instance: InstanceId,
189        tag: DefinedTagIndex,
190    ) -> Result<()> {
191        let layouts = store.engine().gc_runtime().unwrap().layouts();
192        let instance_offset = layouts.exception_tag_instance_offset();
193        let tag_offset = layouts.exception_tag_defined_offset();
194        let store = store.require_gc_store_mut()?;
195        store
196            .gc_object_data(&self.0)
197            .write_u32(instance_offset, instance.as_u32());
198        store
199            .gc_object_data(&self.0)
200            .write_u32(tag_offset, tag.as_u32());
201        Ok(())
202    }
203
204    /// Get the tag referenced by this exception object.
205    pub fn tag(&self, store: &mut AutoAssertNoGc) -> Result<(InstanceId, DefinedTagIndex)> {
206        let layouts = store.engine().gc_runtime().unwrap().layouts();
207        let instance_offset = layouts.exception_tag_instance_offset();
208        let tag_offset = layouts.exception_tag_defined_offset();
209        let instance = store
210            .require_gc_store_mut()?
211            .gc_object_data(&self.0)
212            .read_u32(instance_offset);
213        let instance = InstanceId::from_u32(instance);
214        let store = store.require_gc_store_mut()?;
215        let tag = store.gc_object_data(&self.0).read_u32(tag_offset);
216        let tag = DefinedTagIndex::from_u32(tag);
217        Ok((instance, tag))
218    }
219}