wasmtime/runtime/vm/gc/enabled/
drc.rs

1//! The deferred reference-counting (DRC) collector.
2//!
3//! Warning: this ref-counting collector does not have a tracing cycle
4//! collector, and therefore cannot collect cycles between GC objects!
5//!
6//! For host VM code, we use plain reference counting, where cloning increments
7//! the reference count, and dropping decrements it. We can avoid many of the
8//! on-stack increment/decrement operations that typically plague the
9//! performance of reference counting via Rust's ownership and borrowing system.
10//! Moving a `VMGcRef` avoids mutating its reference count, and borrowing it
11//! either avoids the reference count increment or delays it until if/when the
12//! `VMGcRef` is cloned.
13//!
14//! When passing a `VMGcRef` into compiled Wasm code, we don't want to do
15//! reference count mutations for every compiled `local.{get,set}`, nor for
16//! every function call. Therefore, we use a variation of **deferred reference
17//! counting**, where we only mutate reference counts when storing `VMGcRef`s
18//! somewhere that outlives the Wasm activation: into a global or
19//! table. Simultaneously, we over-approximate the set of `VMGcRef`s that are
20//! inside Wasm function activations. Periodically, we walk the stack at GC safe
21//! points, and use stack map information to precisely identify the set of
22//! `VMGcRef`s inside Wasm activations. Then we take the difference between this
23//! precise set and our over-approximation, and decrement the reference count
24//! for each of the `VMGcRef`s that are in our over-approximation but not in the
25//! precise set. Finally, the over-approximation is reset to the precise set.
26//!
27//! An intrusive, singly-linked list in the object header implements the
28//! over-approximated set of `VMGcRef`s referenced by Wasm activations. Calling
29//! a Wasm function and passing it a `VMGcRef` inserts the `VMGcRef` into that
30//! list if it is not already present, and the compiled Wasm function logically
31//! "borrows" the `VMGcRef` from the list. Similarly, `global.get` and
32//! `table.get` operations logically clone the gotten `VMGcRef` into that list
33//! and then "borrow" the reference out of the list.
34//!
35//! When a `VMGcRef` is returned to host code from a Wasm function, the host
36//! increments the reference count (because the reference is logically
37//! "borrowed" from the list and the reference count from
38//! the table will be dropped at the next GC).
39//!
40//! The precise set of stack roots is implemented with a mark bit in the object
41//! header. See the `trace` and `sweep` methods for more details.
42//!
43//! For more general information on deferred reference counting, see *An
44//! Examination of Deferred Reference Counting and Cycle Detection* by Quinane:
45//! <https://openresearch-repository.anu.edu.au/bitstream/1885/42030/2/hon-thesis.pdf>
46
47use super::VMArrayRef;
48use super::free_list::FreeList;
49use crate::hash_map::HashMap;
50use crate::hash_set::HashSet;
51use crate::runtime::vm::{
52    ExternRefHostDataId, ExternRefHostDataTable, GarbageCollection, GcHeap, GcHeapObject,
53    GcProgress, GcRootsIter, GcRuntime, TypedGcRef, VMExternRef, VMGcHeader, VMGcRef,
54};
55use crate::vm::VMMemoryDefinition;
56use crate::{Engine, EngineWeak, prelude::*};
57use core::sync::atomic::AtomicUsize;
58use core::{
59    alloc::Layout,
60    any::Any,
61    mem,
62    ops::{Deref, DerefMut},
63    ptr::NonNull,
64};
65use wasmtime_environ::drc::{ARRAY_LENGTH_OFFSET, DrcTypeLayouts};
66use wasmtime_environ::{
67    GcArrayLayout, GcLayout, GcStructLayout, GcTypeLayouts, VMGcKind, VMSharedTypeIndex,
68};
69
70#[expect(clippy::cast_possible_truncation, reason = "known to not overflow")]
71const GC_REF_ARRAY_ELEMS_OFFSET: u32 = ARRAY_LENGTH_OFFSET + (mem::size_of::<u32>() as u32);
72
73/// The deferred reference-counting (DRC) collector.
74///
75/// This reference-counting collector does not have a cycle collector, and so it
76/// will not be able to reclaim garbage cycles.
77///
78/// This is not a moving collector; it doesn't have a nursery or do any
79/// compaction.
80#[derive(Default)]
81pub struct DrcCollector {
82    layouts: DrcTypeLayouts,
83}
84
85unsafe impl GcRuntime for DrcCollector {
86    fn layouts(&self) -> &dyn GcTypeLayouts {
87        &self.layouts
88    }
89
90    fn new_gc_heap(&self, engine: &Engine) -> Result<Box<dyn GcHeap>> {
91        let heap = DrcHeap::new(engine)?;
92        Ok(Box::new(heap) as _)
93    }
94}
95
96/// How to trace a GC object.
97enum TraceInfo {
98    /// How to trace an array.
99    Array {
100        /// Whether this array type's elements are GC references, and need
101        /// tracing.
102        gc_ref_elems: bool,
103    },
104
105    /// How to trace a struct.
106    Struct {
107        /// The offsets of each GC reference field that needs tracing in
108        /// instances of this struct type.
109        gc_ref_offsets: Box<[u32]>,
110    },
111}
112
113/// A deferred reference-counting (DRC) heap.
114struct DrcHeap {
115    engine: EngineWeak,
116
117    /// For every type that we have allocated in this heap, how do we trace it?
118    trace_infos: HashMap<VMSharedTypeIndex, TraceInfo>,
119
120    /// Count of how many no-gc scopes we are currently within.
121    no_gc_count: u64,
122
123    /// The head of the over-approximated-stack-roots list.
124    ///
125    /// Note that this is exposed directly to compiled Wasm code through the
126    /// vmctx, so must not move.
127    over_approximated_stack_roots: Box<Option<VMGcRef>>,
128
129    /// The storage for the GC heap itself.
130    memory: Option<crate::vm::Memory>,
131
132    /// The cached `VMMemoryDefinition` for `self.memory` so that we don't have
133    /// to make indirect calls through a `dyn RuntimeLinearMemory` object.
134    ///
135    /// Must be updated and kept in sync with `self.memory`, cleared when the
136    /// memory is taken and updated when the memory is replaced.
137    vmmemory: Option<VMMemoryDefinition>,
138
139    /// A free list describing which ranges of the heap are available for use.
140    free_list: Option<FreeList>,
141
142    /// An explicit stack to avoid recursion when deallocating one object needs
143    /// to dec-ref another object, which can then be deallocated and dec-refs
144    /// yet another object, etc...
145    ///
146    /// We store this stack here to reuse the storage and avoid repeated
147    /// allocations.
148    ///
149    /// Note that the `Option` is perhaps technically unnecessary (we could
150    /// remove the `Option` and, when we take the stack out of `self`, leave
151    /// behind an empty vec instead of `None`) but we keep it because it will
152    /// help us catch unexpected re-entry, similar to how a `RefCell` would.
153    dec_ref_stack: Option<Vec<VMGcRef>>,
154}
155
156impl DrcHeap {
157    /// Construct a new, default DRC heap.
158    fn new(engine: &Engine) -> Result<Self> {
159        log::trace!("allocating new DRC heap");
160        Ok(Self {
161            engine: engine.weak(),
162            trace_infos: HashMap::with_capacity(1),
163            no_gc_count: 0,
164            over_approximated_stack_roots: Box::new(None),
165            memory: None,
166            vmmemory: None,
167            free_list: None,
168            dec_ref_stack: Some(Vec::with_capacity(1)),
169        })
170    }
171
172    fn engine(&self) -> Engine {
173        self.engine.upgrade().unwrap()
174    }
175
176    fn dealloc(&mut self, gc_ref: VMGcRef) {
177        let drc_ref = drc_ref(&gc_ref);
178        let size = self.index(drc_ref).object_size();
179        let layout = FreeList::layout(size);
180        self.free_list
181            .as_mut()
182            .unwrap()
183            .dealloc(gc_ref.as_heap_index().unwrap(), layout);
184    }
185
186    /// Increment the ref count for the associated object.
187    fn inc_ref(&mut self, gc_ref: &VMGcRef) {
188        if gc_ref.is_i31() {
189            return;
190        }
191
192        let drc_ref = drc_ref(gc_ref);
193        let header = self.index_mut(&drc_ref);
194        debug_assert_ne!(
195            header.ref_count, 0,
196            "{:#p} is supposedly live; should have nonzero ref count",
197            *gc_ref
198        );
199        header.ref_count += 1;
200        log::trace!("increment {:#p} ref count -> {}", *gc_ref, header.ref_count);
201    }
202
203    /// Decrement the ref count for the associated object.
204    ///
205    /// Returns `true` if the ref count reached zero and the object should be
206    /// deallocated.
207    fn dec_ref(&mut self, gc_ref: &VMGcRef) -> bool {
208        if gc_ref.is_i31() {
209            return false;
210        }
211
212        let drc_ref = drc_ref(gc_ref);
213        let header = self.index_mut(drc_ref);
214        debug_assert_ne!(
215            header.ref_count, 0,
216            "{:#p} is supposedly live; should have nonzero ref count",
217            *gc_ref
218        );
219        header.ref_count -= 1;
220        log::trace!("decrement {:#p} ref count -> {}", *gc_ref, header.ref_count);
221        header.ref_count == 0
222    }
223
224    /// Decrement the ref count for the associated object.
225    ///
226    /// If the ref count reached zero, then deallocate the object and remove its
227    /// associated entry from the `host_data_table` if necessary.
228    ///
229    /// This uses an explicit stack, rather than recursion, for the scenario
230    /// where dropping one object means that the ref count for another object
231    /// that it referenced reaches zero.
232    fn dec_ref_and_maybe_dealloc(
233        &mut self,
234        host_data_table: &mut ExternRefHostDataTable,
235        gc_ref: &VMGcRef,
236    ) {
237        let mut stack = self.dec_ref_stack.take().unwrap();
238        debug_assert!(stack.is_empty());
239        stack.push(gc_ref.unchecked_copy());
240
241        while let Some(gc_ref) = stack.pop() {
242            if self.dec_ref(&gc_ref) {
243                // The object's reference count reached zero.
244                //
245                // Enqueue any other objects it references for dec-ref'ing.
246                self.trace_gc_ref(&gc_ref, &mut stack);
247
248                // If this object was an `externref`, remove its associated
249                // entry from the host-data table.
250                if let Some(externref) = gc_ref.as_typed::<VMDrcExternRef>(self) {
251                    let host_data_id = self.index(externref).host_data;
252                    host_data_table.dealloc(host_data_id);
253                }
254
255                // Deallocate this GC object!
256                self.dealloc(gc_ref.unchecked_copy());
257            }
258        }
259
260        debug_assert!(stack.is_empty());
261        debug_assert!(self.dec_ref_stack.is_none());
262        self.dec_ref_stack = Some(stack);
263    }
264
265    /// Ensure that we have tracing information for the given type.
266    fn ensure_trace_info(&mut self, ty: VMSharedTypeIndex) {
267        if self.trace_infos.contains_key(&ty) {
268            return;
269        }
270
271        self.insert_new_trace_info(ty);
272    }
273
274    fn insert_new_trace_info(&mut self, ty: VMSharedTypeIndex) {
275        debug_assert!(!self.trace_infos.contains_key(&ty));
276
277        let engine = self.engine();
278        let gc_layout = engine
279            .signatures()
280            .layout(ty)
281            .unwrap_or_else(|| panic!("should have a GC layout for {ty:?}"));
282
283        let info = match gc_layout {
284            GcLayout::Array(l) => {
285                if l.elems_are_gc_refs {
286                    debug_assert_eq!(l.elem_offset(0), GC_REF_ARRAY_ELEMS_OFFSET,);
287                }
288                TraceInfo::Array {
289                    gc_ref_elems: l.elems_are_gc_refs,
290                }
291            }
292            GcLayout::Struct(l) => TraceInfo::Struct {
293                gc_ref_offsets: l
294                    .fields
295                    .iter()
296                    .filter_map(|f| if f.is_gc_ref { Some(f.offset) } else { None })
297                    .collect(),
298            },
299        };
300
301        let old_entry = self.trace_infos.insert(ty, info);
302        debug_assert!(old_entry.is_none());
303    }
304
305    /// Enumerate all of the given `VMGcRef`'s outgoing edges.
306    fn trace_gc_ref(&self, gc_ref: &VMGcRef, stack: &mut Vec<VMGcRef>) {
307        debug_assert!(!gc_ref.is_i31());
308
309        let header = self.header(gc_ref);
310        let Some(ty) = header.ty() else {
311            debug_assert!(header.kind().matches(VMGcKind::ExternRef));
312            return;
313        };
314        match self
315            .trace_infos
316            .get(&ty)
317            .expect("should have inserted trace info for every GC type allocated in this heap")
318        {
319            TraceInfo::Struct { gc_ref_offsets } => {
320                stack.reserve(gc_ref_offsets.len());
321                let data = self.gc_object_data(gc_ref);
322                for offset in gc_ref_offsets {
323                    let raw = data.read_u32(*offset);
324                    if let Some(gc_ref) = VMGcRef::from_raw_u32(raw) {
325                        stack.push(gc_ref);
326                    }
327                }
328            }
329            TraceInfo::Array { gc_ref_elems } => {
330                if !*gc_ref_elems {
331                    return;
332                }
333
334                let data = self.gc_object_data(gc_ref);
335                let len = self.array_len(gc_ref.as_arrayref_unchecked());
336                stack.reserve(usize::try_from(len).unwrap());
337                for i in 0..len {
338                    let elem_offset = GC_REF_ARRAY_ELEMS_OFFSET
339                        + i * u32::try_from(mem::size_of::<u32>()).unwrap();
340                    let raw = data.read_u32(elem_offset);
341                    if let Some(gc_ref) = VMGcRef::from_raw_u32(raw) {
342                        stack.push(gc_ref);
343                    }
344                }
345            }
346        }
347    }
348
349    /// Iterate over the over-approximated-stack-roots list.
350    fn iter_over_approximated_stack_roots(&self) -> impl Iterator<Item = VMGcRef> + '_ {
351        let mut link = (*self.over_approximated_stack_roots)
352            .as_ref()
353            .map(|r| r.unchecked_copy());
354
355        core::iter::from_fn(move || {
356            let r = link.as_ref()?.unchecked_copy();
357            link = self.index(drc_ref(&r)).next_over_approximated_stack_root();
358            Some(r)
359        })
360    }
361
362    fn trace(&mut self, roots: &mut GcRootsIter<'_>) {
363        // The `over_approx_set` is used for `debug_assert!`s checking that
364        // every reference we read out from the stack via stack maps is actually
365        // in the table. If that weren't true, than either we forgot to insert a
366        // reference in the table when passing it into Wasm (a bug) or we are
367        // reading invalid references from the stack (another bug).
368        let mut over_approx_set: DebugOnly<HashSet<_>> = Default::default();
369        if cfg!(debug_assertions) {
370            over_approx_set.extend(self.iter_over_approximated_stack_roots());
371        }
372
373        for root in roots {
374            if !root.is_on_wasm_stack() {
375                // We only trace on-Wasm-stack GC roots. These are the
376                // GC references that we do deferred ref counting for
377                // and that get inserted into our activations
378                // table. Other GC roots are managed purely with naive
379                // ref counting.
380                continue;
381            }
382
383            let gc_ref = root.get();
384
385            if gc_ref.is_i31() {
386                continue;
387            }
388
389            log::trace!("Found GC reference on the stack: {gc_ref:#p}");
390
391            debug_assert!(
392                over_approx_set.contains(&gc_ref),
393                "every on-stack gc ref inside a Wasm frame should \
394                 have be in our over-approximated stack roots set, \
395                 but {gc_ref:#p} is not in the set",
396            );
397            debug_assert!(
398                self.index(drc_ref(&gc_ref))
399                    .is_in_over_approximated_stack_roots(),
400                "every on-stack gc ref inside a Wasm frame should have \
401                 its in-the-over-approximated-stack-roots-list bit set",
402            );
403            debug_assert_ne!(
404                self.index_mut(drc_ref(&gc_ref)).ref_count,
405                0,
406                "{gc_ref:#p} is on the Wasm stack and therefore should be held \
407                 alive by the over-approximated-stack-roots set; should have \
408                 nonzero ref count",
409            );
410
411            self.index_mut(drc_ref(&gc_ref)).set_marked();
412        }
413    }
414
415    #[inline(never)]
416    #[cold]
417    fn log_gc_ref_set(prefix: &str, items: impl Iterator<Item = VMGcRef>) {
418        assert!(log::log_enabled!(log::Level::Trace));
419        let mut set = "{".to_string();
420        let mut any = false;
421        for gc_ref in items {
422            any = true;
423            set += &format!("\n  {gc_ref:#p},");
424        }
425        if any {
426            set.push('\n');
427        }
428        set.push('}');
429        log::trace!("{prefix}: {set}");
430    }
431
432    /// Sweep the bump allocation table after we've discovered our precise stack
433    /// roots.
434    fn sweep(&mut self, host_data_table: &mut ExternRefHostDataTable) {
435        if log::log_enabled!(log::Level::Trace) {
436            Self::log_gc_ref_set(
437                "over-approximated-stack-roots set before sweeping",
438                self.iter_over_approximated_stack_roots(),
439            );
440        }
441
442        // Logically, we are taking the difference between
443        // over-approximated-stack-roots set and the precise-stack-roots set,
444        // decrementing the ref count for each object in that difference
445        // (because they are no longer live on the stack), and then resetting
446        // the over-approximated-stack-roots set to the precise set. In our
447        // actual implementation, the over-approximated-stack-roots set is
448        // implemented as an intrusive, singly-linked list in the object
449        // headers, and the precise-stack-roots set is implemented via the mark
450        // bits in the object headers. Therefore, we walk the
451        // over-approximated-stack-roots list, checking whether each object has
452        // its mark bit set.
453        //
454        // * If the mark bit is set, then it is in the precise-stack-roots set
455        //   and is still on the stack, so we keep it in the
456        //   over-approximated-stack-roots list and do not modify its ref count.
457        //
458        // * If the mark bit is not set, then it is not in the
459        //   precise-stack-roots set and is no longer on the stack, so we remove
460        //   it from the over-approximated-stack-roots set and decrement its ref
461        //   count.
462        //
463        // We also clear the mark bits as we do this traversal.
464        //
465        // Finally, note that decrementing ref counts may run `Drop`
466        // implementations, which may run arbitrary user code. However, because
467        // of our `&mut` borrow on this heap (which ultimately comes from a
468        // `&mut Store`) we're guaranteed that nothing will reentrantly touch
469        // this heap or run Wasm code in this store.
470        log::trace!("Begin sweeping");
471
472        // The `VMGcRef` of the previous object in the
473        // over-approximated-stack-roots list, if any.
474        let mut prev = None;
475
476        // The `VMGcRef` of the next object in the over-approximated-stack-roots
477        // list, if any.
478        let mut next = (*self.over_approximated_stack_roots)
479            .as_ref()
480            .map(|r| r.unchecked_copy());
481
482        while let Some(gc_ref) = next {
483            log::trace!("sweeping gc ref: {gc_ref:#p}");
484
485            let header = self.index_mut(drc_ref(&gc_ref));
486            debug_assert!(header.is_in_over_approximated_stack_roots());
487
488            if header.clear_marked() {
489                // This GC ref was marked, meaning it is still on the stack, so
490                // keep it in the over-approximated-stack-roots list and move on
491                // to the next object in the list.
492                log::trace!(
493                    "  -> {gc_ref:#p} is marked, leaving it in the over-approximated-\
494                     stack-roots list"
495                );
496                next = header.next_over_approximated_stack_root();
497                prev = Some(gc_ref);
498                continue;
499            }
500
501            // This GC ref was not marked, meaning it is no longer on the stack,
502            // so remove it from the over-approximated-stack-roots list and
503            // decrement its reference count.
504            log::trace!(
505                "  -> {gc_ref:#p} is not marked, removing it from over-approximated-\
506                 stack-roots list and decrementing its ref count"
507            );
508            next = header.next_over_approximated_stack_root();
509            let prev_next = header.next_over_approximated_stack_root();
510            header.set_in_over_approximated_stack_roots_bit(false);
511            match &prev {
512                None => *self.over_approximated_stack_roots = prev_next,
513                Some(prev) => self
514                    .index_mut(drc_ref(prev))
515                    .set_next_over_approximated_stack_root(prev_next),
516            }
517            self.dec_ref_and_maybe_dealloc(host_data_table, &gc_ref);
518        }
519
520        log::trace!("Done sweeping");
521
522        if log::log_enabled!(log::Level::Trace) {
523            Self::log_gc_ref_set(
524                "over-approximated-stack-roots set after sweeping",
525                self.iter_over_approximated_stack_roots(),
526            );
527        }
528    }
529}
530
531/// Convert the given GC reference as a typed GC reference pointing to a
532/// `VMDrcHeader`.
533fn drc_ref(gc_ref: &VMGcRef) -> &TypedGcRef<VMDrcHeader> {
534    debug_assert!(!gc_ref.is_i31());
535    gc_ref.as_typed_unchecked()
536}
537
538/// Convert a generic `externref` to a typed reference to our concrete
539/// `externref` type.
540fn externref_to_drc(externref: &VMExternRef) -> &TypedGcRef<VMDrcExternRef> {
541    let gc_ref = externref.as_gc_ref();
542    debug_assert!(!gc_ref.is_i31());
543    gc_ref.as_typed_unchecked()
544}
545
546/// The common header for all objects in the DRC collector.
547///
548/// This adds a ref count on top collector-agnostic `VMGcHeader`.
549///
550/// This is accessed by JIT code.
551#[repr(C)]
552struct VMDrcHeader {
553    header: VMGcHeader,
554    ref_count: u64,
555    next_over_approximated_stack_root: Option<VMGcRef>,
556    object_size: u32,
557}
558
559unsafe impl GcHeapObject for VMDrcHeader {
560    #[inline]
561    fn is(_header: &VMGcHeader) -> bool {
562        // All DRC objects have a DRC header.
563        true
564    }
565}
566
567impl VMDrcHeader {
568    /// The size of this header's object.
569    #[inline]
570    fn object_size(&self) -> usize {
571        usize::try_from(self.object_size).unwrap()
572    }
573
574    /// Is this object in the over-approximated stack roots list?
575    #[inline]
576    fn is_in_over_approximated_stack_roots(&self) -> bool {
577        self.header.reserved_u26() & wasmtime_environ::drc::HEADER_IN_OVER_APPROX_LIST_BIT != 0
578    }
579
580    /// Set whether this object is in the over-approximated stack roots list.
581    #[inline]
582    fn set_in_over_approximated_stack_roots_bit(&mut self, bit: bool) {
583        let reserved = self.header.reserved_u26();
584        let new_reserved = if bit {
585            reserved | wasmtime_environ::drc::HEADER_IN_OVER_APPROX_LIST_BIT
586        } else {
587            reserved & !wasmtime_environ::drc::HEADER_IN_OVER_APPROX_LIST_BIT
588        };
589        self.header.set_reserved_u26(new_reserved);
590    }
591
592    /// Get the next object after this one in the over-approximated-stack-roots
593    /// list, if any.
594    #[inline]
595    fn next_over_approximated_stack_root(&self) -> Option<VMGcRef> {
596        debug_assert!(self.is_in_over_approximated_stack_roots());
597        self.next_over_approximated_stack_root
598            .as_ref()
599            .map(|r| r.unchecked_copy())
600    }
601
602    /// Set the next object after this one in the over-approximated-stack-roots
603    /// list.
604    #[inline]
605    fn set_next_over_approximated_stack_root(&mut self, next: Option<VMGcRef>) {
606        debug_assert!(self.is_in_over_approximated_stack_roots());
607        self.next_over_approximated_stack_root = next;
608    }
609
610    /// Is this object marked?
611    #[inline]
612    fn is_marked(&self) -> bool {
613        self.header.reserved_u26() & wasmtime_environ::drc::HEADER_MARK_BIT != 0
614    }
615
616    /// Mark this object.
617    ///
618    /// Returns `true` if this object was newly marked (i.e. `is_marked()` would
619    /// have returned `false` before this call was made).
620    #[inline]
621    fn set_marked(&mut self) {
622        let reserved = self.header.reserved_u26();
623        self.header
624            .set_reserved_u26(reserved | wasmtime_environ::drc::HEADER_MARK_BIT);
625    }
626
627    /// Clear the mark bit for this object.
628    ///
629    /// Returns `true` if this object was marked before the mark bit was
630    /// cleared.
631    #[inline]
632    fn clear_marked(&mut self) -> bool {
633        if self.is_marked() {
634            let reserved = self.header.reserved_u26();
635            self.header
636                .set_reserved_u26(reserved & !wasmtime_environ::drc::HEADER_MARK_BIT);
637            debug_assert!(!self.is_marked());
638            true
639        } else {
640            false
641        }
642    }
643}
644
645/// The common header for all arrays in the DRC collector.
646#[repr(C)]
647struct VMDrcArrayHeader {
648    header: VMDrcHeader,
649    length: u32,
650}
651
652unsafe impl GcHeapObject for VMDrcArrayHeader {
653    #[inline]
654    fn is(header: &VMGcHeader) -> bool {
655        header.kind() == VMGcKind::ArrayRef
656    }
657}
658
659/// The representation of an `externref` in the DRC collector.
660#[repr(C)]
661struct VMDrcExternRef {
662    header: VMDrcHeader,
663    host_data: ExternRefHostDataId,
664}
665
666unsafe impl GcHeapObject for VMDrcExternRef {
667    #[inline]
668    fn is(header: &VMGcHeader) -> bool {
669        header.kind() == VMGcKind::ExternRef
670    }
671}
672
673unsafe impl GcHeap for DrcHeap {
674    fn is_attached(&self) -> bool {
675        debug_assert_eq!(self.memory.is_some(), self.free_list.is_some());
676        debug_assert_eq!(self.memory.is_some(), self.vmmemory.is_some());
677        self.memory.is_some()
678    }
679
680    fn attach(&mut self, memory: crate::vm::Memory) {
681        assert!(!self.is_attached());
682        assert!(!memory.is_shared_memory());
683        debug_assert!(self.over_approximated_stack_roots.is_none());
684        let len = memory.vmmemory().current_length();
685        self.free_list = Some(FreeList::new(len));
686        self.vmmemory = Some(memory.vmmemory());
687        self.memory = Some(memory);
688    }
689
690    fn detach(&mut self) -> crate::vm::Memory {
691        assert!(self.is_attached());
692
693        let DrcHeap {
694            engine: _,
695            no_gc_count,
696            over_approximated_stack_roots,
697            free_list,
698            dec_ref_stack,
699            memory,
700            vmmemory,
701
702            // NB: we will only ever be reused with the same engine, so no need
703            // to clear out our tracing info just to fill it back in with the
704            // same exact stuff.
705            trace_infos: _,
706        } = self;
707
708        *no_gc_count = 0;
709        **over_approximated_stack_roots = None;
710        *free_list = None;
711        *vmmemory = None;
712        debug_assert!(dec_ref_stack.as_ref().is_some_and(|s| s.is_empty()));
713
714        memory.take().unwrap()
715    }
716
717    fn as_any(&self) -> &dyn Any {
718        self as _
719    }
720
721    fn as_any_mut(&mut self) -> &mut dyn Any {
722        self as _
723    }
724
725    fn enter_no_gc_scope(&mut self) {
726        self.no_gc_count += 1;
727    }
728
729    fn exit_no_gc_scope(&mut self) {
730        self.no_gc_count -= 1;
731    }
732
733    fn clone_gc_ref(&mut self, gc_ref: &VMGcRef) -> VMGcRef {
734        self.inc_ref(gc_ref);
735        gc_ref.unchecked_copy()
736    }
737
738    fn write_gc_ref(
739        &mut self,
740        host_data_table: &mut ExternRefHostDataTable,
741        destination: &mut Option<VMGcRef>,
742        source: Option<&VMGcRef>,
743    ) {
744        // Increment the ref count of the object being written into the slot.
745        if let Some(src) = source {
746            self.inc_ref(src);
747        }
748
749        // Decrement the ref count of the value being overwritten and, if
750        // necessary, deallocate the GC object.
751        if let Some(dest) = destination {
752            self.dec_ref_and_maybe_dealloc(host_data_table, dest);
753        }
754
755        // Do the actual write.
756        *destination = source.map(|s| s.unchecked_copy());
757    }
758
759    fn expose_gc_ref_to_wasm(&mut self, gc_ref: VMGcRef) {
760        let header = self.index_mut(drc_ref(&gc_ref));
761        if header.is_in_over_approximated_stack_roots() {
762            // Already in the over-approximated-stack-roots list, nothing more
763            // to do here.
764            return;
765        }
766
767        // Push this object onto the head of the over-approximated-stack-roots
768        // list.
769        header.set_in_over_approximated_stack_roots_bit(true);
770        let next = (*self.over_approximated_stack_roots)
771            .as_ref()
772            .map(|r| r.unchecked_copy());
773        self.index_mut(drc_ref(&gc_ref))
774            .set_next_over_approximated_stack_root(next);
775        *self.over_approximated_stack_roots = Some(gc_ref);
776    }
777
778    fn alloc_externref(
779        &mut self,
780        host_data: ExternRefHostDataId,
781    ) -> Result<Result<VMExternRef, u64>> {
782        let gc_ref =
783            match self.alloc_raw(VMGcHeader::externref(), Layout::new::<VMDrcExternRef>())? {
784                Err(n) => return Ok(Err(n)),
785                Ok(gc_ref) => gc_ref,
786            };
787        self.index_mut::<VMDrcExternRef>(gc_ref.as_typed_unchecked())
788            .host_data = host_data;
789        Ok(Ok(gc_ref.into_externref_unchecked()))
790    }
791
792    fn externref_host_data(&self, externref: &VMExternRef) -> ExternRefHostDataId {
793        let typed_ref = externref_to_drc(externref);
794        self.index(typed_ref).host_data
795    }
796
797    fn header(&self, gc_ref: &VMGcRef) -> &VMGcHeader {
798        self.index(gc_ref.as_typed_unchecked())
799    }
800
801    fn header_mut(&mut self, gc_ref: &VMGcRef) -> &mut VMGcHeader {
802        self.index_mut(gc_ref.as_typed_unchecked())
803    }
804
805    fn object_size(&self, gc_ref: &VMGcRef) -> usize {
806        self.index(drc_ref(gc_ref)).object_size()
807    }
808
809    fn alloc_raw(&mut self, header: VMGcHeader, layout: Layout) -> Result<Result<VMGcRef, u64>> {
810        debug_assert!(layout.size() >= core::mem::size_of::<VMDrcHeader>());
811        debug_assert!(layout.align() >= core::mem::align_of::<VMDrcHeader>());
812        debug_assert_eq!(header.reserved_u26(), 0);
813
814        // We must have trace info for every GC type that we allocate in this
815        // heap. The only kinds of GC objects we allocate that do not have an
816        // associated `VMSharedTypeIndex` are `externref`s, and they don't have
817        // any GC edges.
818        if let Some(ty) = header.ty() {
819            self.ensure_trace_info(ty);
820        } else {
821            debug_assert_eq!(header.kind(), VMGcKind::ExternRef);
822        }
823
824        let object_size = u32::try_from(layout.size()).unwrap();
825
826        let gc_ref = match self.free_list.as_mut().unwrap().alloc(layout)? {
827            None => return Ok(Err(u64::try_from(layout.size()).unwrap())),
828            Some(index) => VMGcRef::from_heap_index(index).unwrap(),
829        };
830
831        *self.index_mut(drc_ref(&gc_ref)) = VMDrcHeader {
832            header,
833            ref_count: 1,
834            next_over_approximated_stack_root: None,
835            object_size,
836        };
837        log::trace!("new object: increment {gc_ref:#p} ref count -> 1");
838        Ok(Ok(gc_ref))
839    }
840
841    fn alloc_uninit_struct_or_exn(
842        &mut self,
843        ty: VMSharedTypeIndex,
844        layout: &GcStructLayout,
845    ) -> Result<Result<VMGcRef, u64>> {
846        let kind = if layout.is_exception {
847            VMGcKind::ExnRef
848        } else {
849            VMGcKind::StructRef
850        };
851        let gc_ref =
852            match self.alloc_raw(VMGcHeader::from_kind_and_index(kind, ty), layout.layout())? {
853                Err(n) => return Ok(Err(n)),
854                Ok(gc_ref) => gc_ref,
855            };
856
857        Ok(Ok(gc_ref))
858    }
859
860    fn dealloc_uninit_struct_or_exn(&mut self, gcref: VMGcRef) {
861        self.dealloc(gcref);
862    }
863
864    fn alloc_uninit_array(
865        &mut self,
866        ty: VMSharedTypeIndex,
867        length: u32,
868        layout: &GcArrayLayout,
869    ) -> Result<Result<VMArrayRef, u64>> {
870        let gc_ref = match self.alloc_raw(
871            VMGcHeader::from_kind_and_index(VMGcKind::ArrayRef, ty),
872            layout.layout(length),
873        )? {
874            Err(n) => return Ok(Err(n)),
875            Ok(gc_ref) => gc_ref,
876        };
877
878        self.index_mut(gc_ref.as_typed_unchecked::<VMDrcArrayHeader>())
879            .length = length;
880
881        Ok(Ok(gc_ref.into_arrayref_unchecked()))
882    }
883
884    fn dealloc_uninit_array(&mut self, arrayref: VMArrayRef) {
885        self.dealloc(arrayref.into())
886    }
887
888    fn array_len(&self, arrayref: &VMArrayRef) -> u32 {
889        debug_assert!(arrayref.as_gc_ref().is_typed::<VMDrcArrayHeader>(self));
890        self.index::<VMDrcArrayHeader>(arrayref.as_gc_ref().as_typed_unchecked())
891            .length
892    }
893
894    fn gc<'a>(
895        &'a mut self,
896        roots: GcRootsIter<'a>,
897        host_data_table: &'a mut ExternRefHostDataTable,
898    ) -> Box<dyn GarbageCollection<'a> + 'a> {
899        assert_eq!(self.no_gc_count, 0, "Cannot GC inside a no-GC scope!");
900        Box::new(DrcCollection {
901            roots,
902            host_data_table,
903            heap: self,
904            phase: DrcCollectionPhase::Trace,
905        })
906    }
907
908    unsafe fn vmctx_gc_heap_data(&self) -> NonNull<u8> {
909        let ptr: NonNull<Option<VMGcRef>> = NonNull::from(&*self.over_approximated_stack_roots);
910        ptr.cast()
911    }
912
913    fn take_memory(&mut self) -> crate::vm::Memory {
914        debug_assert!(self.is_attached());
915        self.vmmemory.take();
916        self.memory.take().unwrap()
917    }
918
919    unsafe fn replace_memory(&mut self, memory: crate::vm::Memory, delta_bytes_grown: u64) {
920        debug_assert!(self.memory.is_none());
921        debug_assert!(!memory.is_shared_memory());
922        self.vmmemory = Some(memory.vmmemory());
923        self.memory = Some(memory);
924
925        self.free_list
926            .as_mut()
927            .unwrap()
928            .add_capacity(usize::try_from(delta_bytes_grown).unwrap())
929    }
930
931    #[inline]
932    fn vmmemory(&self) -> VMMemoryDefinition {
933        debug_assert!(self.is_attached());
934        debug_assert!(!self.memory.as_ref().unwrap().is_shared_memory());
935        let vmmemory = self.vmmemory.as_ref().unwrap();
936        VMMemoryDefinition {
937            base: vmmemory.base,
938            current_length: AtomicUsize::new(vmmemory.current_length()),
939        }
940    }
941}
942
943struct DrcCollection<'a> {
944    roots: GcRootsIter<'a>,
945    host_data_table: &'a mut ExternRefHostDataTable,
946    heap: &'a mut DrcHeap,
947    phase: DrcCollectionPhase,
948}
949
950enum DrcCollectionPhase {
951    Trace,
952    Sweep,
953    Done,
954}
955
956impl<'a> GarbageCollection<'a> for DrcCollection<'a> {
957    fn collect_increment(&mut self) -> GcProgress {
958        match self.phase {
959            DrcCollectionPhase::Trace => {
960                log::trace!("Begin DRC trace");
961                self.heap.trace(&mut self.roots);
962                log::trace!("End DRC trace");
963                self.phase = DrcCollectionPhase::Sweep;
964                GcProgress::Continue
965            }
966            DrcCollectionPhase::Sweep => {
967                log::trace!("Begin DRC sweep");
968                self.heap.sweep(self.host_data_table);
969                log::trace!("End DRC sweep");
970                self.phase = DrcCollectionPhase::Done;
971                GcProgress::Complete
972            }
973            DrcCollectionPhase::Done => GcProgress::Complete,
974        }
975    }
976}
977
978#[derive(Debug, Default)]
979struct DebugOnly<T> {
980    inner: T,
981}
982
983impl<T> Deref for DebugOnly<T> {
984    type Target = T;
985
986    fn deref(&self) -> &T {
987        if cfg!(debug_assertions) {
988            &self.inner
989        } else {
990            panic!(
991                "only deref `DebugOnly` when `cfg(debug_assertions)` or \
992                 inside a `debug_assert!(..)`"
993            )
994        }
995    }
996}
997
998impl<T> DerefMut for DebugOnly<T> {
999    fn deref_mut(&mut self) -> &mut T {
1000        if cfg!(debug_assertions) {
1001            &mut self.inner
1002        } else {
1003            panic!(
1004                "only deref `DebugOnly` when `cfg(debug_assertions)` or \
1005                 inside a `debug_assert!(..)`"
1006            )
1007        }
1008    }
1009}
1010
1011#[cfg(test)]
1012mod tests {
1013    use super::*;
1014    use wasmtime_environ::HostPtr;
1015
1016    #[test]
1017    fn vm_drc_header_size_align() {
1018        assert_eq!(
1019            (wasmtime_environ::drc::HEADER_SIZE as usize),
1020            core::mem::size_of::<VMDrcHeader>()
1021        );
1022        assert_eq!(
1023            (wasmtime_environ::drc::HEADER_ALIGN as usize),
1024            core::mem::align_of::<VMDrcHeader>()
1025        );
1026    }
1027
1028    #[test]
1029    fn vm_drc_array_header_length_offset() {
1030        assert_eq!(
1031            wasmtime_environ::drc::ARRAY_LENGTH_OFFSET,
1032            u32::try_from(core::mem::offset_of!(VMDrcArrayHeader, length)).unwrap(),
1033        );
1034    }
1035
1036    #[test]
1037    fn ref_count_is_at_correct_offset() {
1038        let extern_data = VMDrcHeader {
1039            header: VMGcHeader::externref(),
1040            ref_count: 0,
1041            next_over_approximated_stack_root: None,
1042            object_size: 0,
1043        };
1044
1045        let extern_data_ptr = &extern_data as *const _;
1046        let ref_count_ptr = &extern_data.ref_count as *const _;
1047
1048        let actual_offset = (ref_count_ptr as usize) - (extern_data_ptr as usize);
1049
1050        let offsets = wasmtime_environ::VMOffsets::from(wasmtime_environ::VMOffsetsFields {
1051            ptr: HostPtr,
1052            num_imported_functions: 0,
1053            num_imported_tables: 0,
1054            num_imported_memories: 0,
1055            num_imported_globals: 0,
1056            num_imported_tags: 0,
1057            num_defined_tables: 0,
1058            num_defined_memories: 0,
1059            num_owned_memories: 0,
1060            num_defined_globals: 0,
1061            num_defined_tags: 0,
1062            num_escaped_funcs: 0,
1063        });
1064
1065        assert_eq!(
1066            offsets.vm_drc_header_ref_count(),
1067            u32::try_from(actual_offset).unwrap(),
1068        );
1069    }
1070}