1use super::VMArrayRef;
48use super::free_list::FreeList;
49use crate::hash_map::HashMap;
50use crate::hash_set::HashSet;
51use crate::runtime::vm::{
52 ExternRefHostDataId, ExternRefHostDataTable, GarbageCollection, GcHeap, GcHeapObject,
53 GcProgress, GcRootsIter, GcRuntime, TypedGcRef, VMExternRef, VMGcHeader, VMGcRef,
54};
55use crate::vm::VMMemoryDefinition;
56use crate::{Engine, EngineWeak, prelude::*};
57use core::sync::atomic::AtomicUsize;
58use core::{
59 alloc::Layout,
60 any::Any,
61 mem,
62 ops::{Deref, DerefMut},
63 ptr::NonNull,
64};
65use wasmtime_environ::drc::{ARRAY_LENGTH_OFFSET, DrcTypeLayouts};
66use wasmtime_environ::{
67 GcArrayLayout, GcLayout, GcStructLayout, GcTypeLayouts, VMGcKind, VMSharedTypeIndex,
68};
69
70#[expect(clippy::cast_possible_truncation, reason = "known to not overflow")]
71const GC_REF_ARRAY_ELEMS_OFFSET: u32 = ARRAY_LENGTH_OFFSET + (mem::size_of::<u32>() as u32);
72
73#[derive(Default)]
81pub struct DrcCollector {
82 layouts: DrcTypeLayouts,
83}
84
85unsafe impl GcRuntime for DrcCollector {
86 fn layouts(&self) -> &dyn GcTypeLayouts {
87 &self.layouts
88 }
89
90 fn new_gc_heap(&self, engine: &Engine) -> Result<Box<dyn GcHeap>> {
91 let heap = DrcHeap::new(engine)?;
92 Ok(Box::new(heap) as _)
93 }
94}
95
96enum TraceInfo {
98 Array {
100 gc_ref_elems: bool,
103 },
104
105 Struct {
107 gc_ref_offsets: Box<[u32]>,
110 },
111}
112
113struct DrcHeap {
115 engine: EngineWeak,
116
117 trace_infos: HashMap<VMSharedTypeIndex, TraceInfo>,
119
120 no_gc_count: u64,
122
123 over_approximated_stack_roots: Box<Option<VMGcRef>>,
128
129 memory: Option<crate::vm::Memory>,
131
132 vmmemory: Option<VMMemoryDefinition>,
138
139 free_list: Option<FreeList>,
141
142 dec_ref_stack: Option<Vec<VMGcRef>>,
154}
155
156impl DrcHeap {
157 fn new(engine: &Engine) -> Result<Self> {
159 log::trace!("allocating new DRC heap");
160 Ok(Self {
161 engine: engine.weak(),
162 trace_infos: HashMap::with_capacity(1),
163 no_gc_count: 0,
164 over_approximated_stack_roots: Box::new(None),
165 memory: None,
166 vmmemory: None,
167 free_list: None,
168 dec_ref_stack: Some(Vec::with_capacity(1)),
169 })
170 }
171
172 fn engine(&self) -> Engine {
173 self.engine.upgrade().unwrap()
174 }
175
176 fn dealloc(&mut self, gc_ref: VMGcRef) {
177 let drc_ref = drc_ref(&gc_ref);
178 let size = self.index(drc_ref).object_size();
179 let layout = FreeList::layout(size);
180 self.free_list
181 .as_mut()
182 .unwrap()
183 .dealloc(gc_ref.as_heap_index().unwrap(), layout);
184 }
185
186 fn inc_ref(&mut self, gc_ref: &VMGcRef) {
188 if gc_ref.is_i31() {
189 return;
190 }
191
192 let drc_ref = drc_ref(gc_ref);
193 let header = self.index_mut(&drc_ref);
194 debug_assert_ne!(
195 header.ref_count, 0,
196 "{:#p} is supposedly live; should have nonzero ref count",
197 *gc_ref
198 );
199 header.ref_count += 1;
200 log::trace!("increment {:#p} ref count -> {}", *gc_ref, header.ref_count);
201 }
202
203 fn dec_ref(&mut self, gc_ref: &VMGcRef) -> bool {
208 if gc_ref.is_i31() {
209 return false;
210 }
211
212 let drc_ref = drc_ref(gc_ref);
213 let header = self.index_mut(drc_ref);
214 debug_assert_ne!(
215 header.ref_count, 0,
216 "{:#p} is supposedly live; should have nonzero ref count",
217 *gc_ref
218 );
219 header.ref_count -= 1;
220 log::trace!("decrement {:#p} ref count -> {}", *gc_ref, header.ref_count);
221 header.ref_count == 0
222 }
223
224 fn dec_ref_and_maybe_dealloc(
233 &mut self,
234 host_data_table: &mut ExternRefHostDataTable,
235 gc_ref: &VMGcRef,
236 ) {
237 let mut stack = self.dec_ref_stack.take().unwrap();
238 debug_assert!(stack.is_empty());
239 stack.push(gc_ref.unchecked_copy());
240
241 while let Some(gc_ref) = stack.pop() {
242 if self.dec_ref(&gc_ref) {
243 self.trace_gc_ref(&gc_ref, &mut stack);
247
248 if let Some(externref) = gc_ref.as_typed::<VMDrcExternRef>(self) {
251 let host_data_id = self.index(externref).host_data;
252 host_data_table.dealloc(host_data_id);
253 }
254
255 self.dealloc(gc_ref.unchecked_copy());
257 }
258 }
259
260 debug_assert!(stack.is_empty());
261 debug_assert!(self.dec_ref_stack.is_none());
262 self.dec_ref_stack = Some(stack);
263 }
264
265 fn ensure_trace_info(&mut self, ty: VMSharedTypeIndex) {
267 if self.trace_infos.contains_key(&ty) {
268 return;
269 }
270
271 self.insert_new_trace_info(ty);
272 }
273
274 fn insert_new_trace_info(&mut self, ty: VMSharedTypeIndex) {
275 debug_assert!(!self.trace_infos.contains_key(&ty));
276
277 let engine = self.engine();
278 let gc_layout = engine
279 .signatures()
280 .layout(ty)
281 .unwrap_or_else(|| panic!("should have a GC layout for {ty:?}"));
282
283 let info = match gc_layout {
284 GcLayout::Array(l) => {
285 if l.elems_are_gc_refs {
286 debug_assert_eq!(l.elem_offset(0), GC_REF_ARRAY_ELEMS_OFFSET,);
287 }
288 TraceInfo::Array {
289 gc_ref_elems: l.elems_are_gc_refs,
290 }
291 }
292 GcLayout::Struct(l) => TraceInfo::Struct {
293 gc_ref_offsets: l
294 .fields
295 .iter()
296 .filter_map(|f| if f.is_gc_ref { Some(f.offset) } else { None })
297 .collect(),
298 },
299 };
300
301 let old_entry = self.trace_infos.insert(ty, info);
302 debug_assert!(old_entry.is_none());
303 }
304
305 fn trace_gc_ref(&self, gc_ref: &VMGcRef, stack: &mut Vec<VMGcRef>) {
307 debug_assert!(!gc_ref.is_i31());
308
309 let header = self.header(gc_ref);
310 let Some(ty) = header.ty() else {
311 debug_assert!(header.kind().matches(VMGcKind::ExternRef));
312 return;
313 };
314 match self
315 .trace_infos
316 .get(&ty)
317 .expect("should have inserted trace info for every GC type allocated in this heap")
318 {
319 TraceInfo::Struct { gc_ref_offsets } => {
320 stack.reserve(gc_ref_offsets.len());
321 let data = self.gc_object_data(gc_ref);
322 for offset in gc_ref_offsets {
323 let raw = data.read_u32(*offset);
324 if let Some(gc_ref) = VMGcRef::from_raw_u32(raw) {
325 stack.push(gc_ref);
326 }
327 }
328 }
329 TraceInfo::Array { gc_ref_elems } => {
330 if !*gc_ref_elems {
331 return;
332 }
333
334 let data = self.gc_object_data(gc_ref);
335 let len = self.array_len(gc_ref.as_arrayref_unchecked());
336 stack.reserve(usize::try_from(len).unwrap());
337 for i in 0..len {
338 let elem_offset = GC_REF_ARRAY_ELEMS_OFFSET
339 + i * u32::try_from(mem::size_of::<u32>()).unwrap();
340 let raw = data.read_u32(elem_offset);
341 if let Some(gc_ref) = VMGcRef::from_raw_u32(raw) {
342 stack.push(gc_ref);
343 }
344 }
345 }
346 }
347 }
348
349 fn iter_over_approximated_stack_roots(&self) -> impl Iterator<Item = VMGcRef> + '_ {
351 let mut link = (*self.over_approximated_stack_roots)
352 .as_ref()
353 .map(|r| r.unchecked_copy());
354
355 core::iter::from_fn(move || {
356 let r = link.as_ref()?.unchecked_copy();
357 link = self.index(drc_ref(&r)).next_over_approximated_stack_root();
358 Some(r)
359 })
360 }
361
362 fn trace(&mut self, roots: &mut GcRootsIter<'_>) {
363 let mut over_approx_set: DebugOnly<HashSet<_>> = Default::default();
369 if cfg!(debug_assertions) {
370 over_approx_set.extend(self.iter_over_approximated_stack_roots());
371 }
372
373 for root in roots {
374 if !root.is_on_wasm_stack() {
375 continue;
381 }
382
383 let gc_ref = root.get();
384
385 if gc_ref.is_i31() {
386 continue;
387 }
388
389 log::trace!("Found GC reference on the stack: {gc_ref:#p}");
390
391 debug_assert!(
392 over_approx_set.contains(&gc_ref),
393 "every on-stack gc ref inside a Wasm frame should \
394 have be in our over-approximated stack roots set, \
395 but {gc_ref:#p} is not in the set",
396 );
397 debug_assert!(
398 self.index(drc_ref(&gc_ref))
399 .is_in_over_approximated_stack_roots(),
400 "every on-stack gc ref inside a Wasm frame should have \
401 its in-the-over-approximated-stack-roots-list bit set",
402 );
403 debug_assert_ne!(
404 self.index_mut(drc_ref(&gc_ref)).ref_count,
405 0,
406 "{gc_ref:#p} is on the Wasm stack and therefore should be held \
407 alive by the over-approximated-stack-roots set; should have \
408 nonzero ref count",
409 );
410
411 self.index_mut(drc_ref(&gc_ref)).set_marked();
412 }
413 }
414
415 #[inline(never)]
416 #[cold]
417 fn log_gc_ref_set(prefix: &str, items: impl Iterator<Item = VMGcRef>) {
418 assert!(log::log_enabled!(log::Level::Trace));
419 let mut set = "{".to_string();
420 let mut any = false;
421 for gc_ref in items {
422 any = true;
423 set += &format!("\n {gc_ref:#p},");
424 }
425 if any {
426 set.push('\n');
427 }
428 set.push('}');
429 log::trace!("{prefix}: {set}");
430 }
431
432 fn sweep(&mut self, host_data_table: &mut ExternRefHostDataTable) {
435 if log::log_enabled!(log::Level::Trace) {
436 Self::log_gc_ref_set(
437 "over-approximated-stack-roots set before sweeping",
438 self.iter_over_approximated_stack_roots(),
439 );
440 }
441
442 log::trace!("Begin sweeping");
471
472 let mut prev = None;
475
476 let mut next = (*self.over_approximated_stack_roots)
479 .as_ref()
480 .map(|r| r.unchecked_copy());
481
482 while let Some(gc_ref) = next {
483 log::trace!("sweeping gc ref: {gc_ref:#p}");
484
485 let header = self.index_mut(drc_ref(&gc_ref));
486 debug_assert!(header.is_in_over_approximated_stack_roots());
487
488 if header.clear_marked() {
489 log::trace!(
493 " -> {gc_ref:#p} is marked, leaving it in the over-approximated-\
494 stack-roots list"
495 );
496 next = header.next_over_approximated_stack_root();
497 prev = Some(gc_ref);
498 continue;
499 }
500
501 log::trace!(
505 " -> {gc_ref:#p} is not marked, removing it from over-approximated-\
506 stack-roots list and decrementing its ref count"
507 );
508 next = header.next_over_approximated_stack_root();
509 let prev_next = header.next_over_approximated_stack_root();
510 header.set_in_over_approximated_stack_roots_bit(false);
511 match &prev {
512 None => *self.over_approximated_stack_roots = prev_next,
513 Some(prev) => self
514 .index_mut(drc_ref(prev))
515 .set_next_over_approximated_stack_root(prev_next),
516 }
517 self.dec_ref_and_maybe_dealloc(host_data_table, &gc_ref);
518 }
519
520 log::trace!("Done sweeping");
521
522 if log::log_enabled!(log::Level::Trace) {
523 Self::log_gc_ref_set(
524 "over-approximated-stack-roots set after sweeping",
525 self.iter_over_approximated_stack_roots(),
526 );
527 }
528 }
529}
530
531fn drc_ref(gc_ref: &VMGcRef) -> &TypedGcRef<VMDrcHeader> {
534 debug_assert!(!gc_ref.is_i31());
535 gc_ref.as_typed_unchecked()
536}
537
538fn externref_to_drc(externref: &VMExternRef) -> &TypedGcRef<VMDrcExternRef> {
541 let gc_ref = externref.as_gc_ref();
542 debug_assert!(!gc_ref.is_i31());
543 gc_ref.as_typed_unchecked()
544}
545
546#[repr(C)]
552struct VMDrcHeader {
553 header: VMGcHeader,
554 ref_count: u64,
555 next_over_approximated_stack_root: Option<VMGcRef>,
556 object_size: u32,
557}
558
559unsafe impl GcHeapObject for VMDrcHeader {
560 #[inline]
561 fn is(_header: &VMGcHeader) -> bool {
562 true
564 }
565}
566
567impl VMDrcHeader {
568 #[inline]
570 fn object_size(&self) -> usize {
571 usize::try_from(self.object_size).unwrap()
572 }
573
574 #[inline]
576 fn is_in_over_approximated_stack_roots(&self) -> bool {
577 self.header.reserved_u26() & wasmtime_environ::drc::HEADER_IN_OVER_APPROX_LIST_BIT != 0
578 }
579
580 #[inline]
582 fn set_in_over_approximated_stack_roots_bit(&mut self, bit: bool) {
583 let reserved = self.header.reserved_u26();
584 let new_reserved = if bit {
585 reserved | wasmtime_environ::drc::HEADER_IN_OVER_APPROX_LIST_BIT
586 } else {
587 reserved & !wasmtime_environ::drc::HEADER_IN_OVER_APPROX_LIST_BIT
588 };
589 self.header.set_reserved_u26(new_reserved);
590 }
591
592 #[inline]
595 fn next_over_approximated_stack_root(&self) -> Option<VMGcRef> {
596 debug_assert!(self.is_in_over_approximated_stack_roots());
597 self.next_over_approximated_stack_root
598 .as_ref()
599 .map(|r| r.unchecked_copy())
600 }
601
602 #[inline]
605 fn set_next_over_approximated_stack_root(&mut self, next: Option<VMGcRef>) {
606 debug_assert!(self.is_in_over_approximated_stack_roots());
607 self.next_over_approximated_stack_root = next;
608 }
609
610 #[inline]
612 fn is_marked(&self) -> bool {
613 self.header.reserved_u26() & wasmtime_environ::drc::HEADER_MARK_BIT != 0
614 }
615
616 #[inline]
621 fn set_marked(&mut self) {
622 let reserved = self.header.reserved_u26();
623 self.header
624 .set_reserved_u26(reserved | wasmtime_environ::drc::HEADER_MARK_BIT);
625 }
626
627 #[inline]
632 fn clear_marked(&mut self) -> bool {
633 if self.is_marked() {
634 let reserved = self.header.reserved_u26();
635 self.header
636 .set_reserved_u26(reserved & !wasmtime_environ::drc::HEADER_MARK_BIT);
637 debug_assert!(!self.is_marked());
638 true
639 } else {
640 false
641 }
642 }
643}
644
645#[repr(C)]
647struct VMDrcArrayHeader {
648 header: VMDrcHeader,
649 length: u32,
650}
651
652unsafe impl GcHeapObject for VMDrcArrayHeader {
653 #[inline]
654 fn is(header: &VMGcHeader) -> bool {
655 header.kind() == VMGcKind::ArrayRef
656 }
657}
658
659#[repr(C)]
661struct VMDrcExternRef {
662 header: VMDrcHeader,
663 host_data: ExternRefHostDataId,
664}
665
666unsafe impl GcHeapObject for VMDrcExternRef {
667 #[inline]
668 fn is(header: &VMGcHeader) -> bool {
669 header.kind() == VMGcKind::ExternRef
670 }
671}
672
673unsafe impl GcHeap for DrcHeap {
674 fn is_attached(&self) -> bool {
675 debug_assert_eq!(self.memory.is_some(), self.free_list.is_some());
676 debug_assert_eq!(self.memory.is_some(), self.vmmemory.is_some());
677 self.memory.is_some()
678 }
679
680 fn attach(&mut self, memory: crate::vm::Memory) {
681 assert!(!self.is_attached());
682 assert!(!memory.is_shared_memory());
683 debug_assert!(self.over_approximated_stack_roots.is_none());
684 let len = memory.vmmemory().current_length();
685 self.free_list = Some(FreeList::new(len));
686 self.vmmemory = Some(memory.vmmemory());
687 self.memory = Some(memory);
688 }
689
690 fn detach(&mut self) -> crate::vm::Memory {
691 assert!(self.is_attached());
692
693 let DrcHeap {
694 engine: _,
695 no_gc_count,
696 over_approximated_stack_roots,
697 free_list,
698 dec_ref_stack,
699 memory,
700 vmmemory,
701
702 trace_infos: _,
706 } = self;
707
708 *no_gc_count = 0;
709 **over_approximated_stack_roots = None;
710 *free_list = None;
711 *vmmemory = None;
712 debug_assert!(dec_ref_stack.as_ref().is_some_and(|s| s.is_empty()));
713
714 memory.take().unwrap()
715 }
716
717 fn as_any(&self) -> &dyn Any {
718 self as _
719 }
720
721 fn as_any_mut(&mut self) -> &mut dyn Any {
722 self as _
723 }
724
725 fn enter_no_gc_scope(&mut self) {
726 self.no_gc_count += 1;
727 }
728
729 fn exit_no_gc_scope(&mut self) {
730 self.no_gc_count -= 1;
731 }
732
733 fn clone_gc_ref(&mut self, gc_ref: &VMGcRef) -> VMGcRef {
734 self.inc_ref(gc_ref);
735 gc_ref.unchecked_copy()
736 }
737
738 fn write_gc_ref(
739 &mut self,
740 host_data_table: &mut ExternRefHostDataTable,
741 destination: &mut Option<VMGcRef>,
742 source: Option<&VMGcRef>,
743 ) {
744 if let Some(src) = source {
746 self.inc_ref(src);
747 }
748
749 if let Some(dest) = destination {
752 self.dec_ref_and_maybe_dealloc(host_data_table, dest);
753 }
754
755 *destination = source.map(|s| s.unchecked_copy());
757 }
758
759 fn expose_gc_ref_to_wasm(&mut self, gc_ref: VMGcRef) {
760 let header = self.index_mut(drc_ref(&gc_ref));
761 if header.is_in_over_approximated_stack_roots() {
762 return;
765 }
766
767 header.set_in_over_approximated_stack_roots_bit(true);
770 let next = (*self.over_approximated_stack_roots)
771 .as_ref()
772 .map(|r| r.unchecked_copy());
773 self.index_mut(drc_ref(&gc_ref))
774 .set_next_over_approximated_stack_root(next);
775 *self.over_approximated_stack_roots = Some(gc_ref);
776 }
777
778 fn alloc_externref(
779 &mut self,
780 host_data: ExternRefHostDataId,
781 ) -> Result<Result<VMExternRef, u64>> {
782 let gc_ref =
783 match self.alloc_raw(VMGcHeader::externref(), Layout::new::<VMDrcExternRef>())? {
784 Err(n) => return Ok(Err(n)),
785 Ok(gc_ref) => gc_ref,
786 };
787 self.index_mut::<VMDrcExternRef>(gc_ref.as_typed_unchecked())
788 .host_data = host_data;
789 Ok(Ok(gc_ref.into_externref_unchecked()))
790 }
791
792 fn externref_host_data(&self, externref: &VMExternRef) -> ExternRefHostDataId {
793 let typed_ref = externref_to_drc(externref);
794 self.index(typed_ref).host_data
795 }
796
797 fn header(&self, gc_ref: &VMGcRef) -> &VMGcHeader {
798 self.index(gc_ref.as_typed_unchecked())
799 }
800
801 fn header_mut(&mut self, gc_ref: &VMGcRef) -> &mut VMGcHeader {
802 self.index_mut(gc_ref.as_typed_unchecked())
803 }
804
805 fn object_size(&self, gc_ref: &VMGcRef) -> usize {
806 self.index(drc_ref(gc_ref)).object_size()
807 }
808
809 fn alloc_raw(&mut self, header: VMGcHeader, layout: Layout) -> Result<Result<VMGcRef, u64>> {
810 debug_assert!(layout.size() >= core::mem::size_of::<VMDrcHeader>());
811 debug_assert!(layout.align() >= core::mem::align_of::<VMDrcHeader>());
812 debug_assert_eq!(header.reserved_u26(), 0);
813
814 if let Some(ty) = header.ty() {
819 self.ensure_trace_info(ty);
820 } else {
821 debug_assert_eq!(header.kind(), VMGcKind::ExternRef);
822 }
823
824 let object_size = u32::try_from(layout.size()).unwrap();
825
826 let gc_ref = match self.free_list.as_mut().unwrap().alloc(layout)? {
827 None => return Ok(Err(u64::try_from(layout.size()).unwrap())),
828 Some(index) => VMGcRef::from_heap_index(index).unwrap(),
829 };
830
831 *self.index_mut(drc_ref(&gc_ref)) = VMDrcHeader {
832 header,
833 ref_count: 1,
834 next_over_approximated_stack_root: None,
835 object_size,
836 };
837 log::trace!("new object: increment {gc_ref:#p} ref count -> 1");
838 Ok(Ok(gc_ref))
839 }
840
841 fn alloc_uninit_struct_or_exn(
842 &mut self,
843 ty: VMSharedTypeIndex,
844 layout: &GcStructLayout,
845 ) -> Result<Result<VMGcRef, u64>> {
846 let kind = if layout.is_exception {
847 VMGcKind::ExnRef
848 } else {
849 VMGcKind::StructRef
850 };
851 let gc_ref =
852 match self.alloc_raw(VMGcHeader::from_kind_and_index(kind, ty), layout.layout())? {
853 Err(n) => return Ok(Err(n)),
854 Ok(gc_ref) => gc_ref,
855 };
856
857 Ok(Ok(gc_ref))
858 }
859
860 fn dealloc_uninit_struct_or_exn(&mut self, gcref: VMGcRef) {
861 self.dealloc(gcref);
862 }
863
864 fn alloc_uninit_array(
865 &mut self,
866 ty: VMSharedTypeIndex,
867 length: u32,
868 layout: &GcArrayLayout,
869 ) -> Result<Result<VMArrayRef, u64>> {
870 let gc_ref = match self.alloc_raw(
871 VMGcHeader::from_kind_and_index(VMGcKind::ArrayRef, ty),
872 layout.layout(length),
873 )? {
874 Err(n) => return Ok(Err(n)),
875 Ok(gc_ref) => gc_ref,
876 };
877
878 self.index_mut(gc_ref.as_typed_unchecked::<VMDrcArrayHeader>())
879 .length = length;
880
881 Ok(Ok(gc_ref.into_arrayref_unchecked()))
882 }
883
884 fn dealloc_uninit_array(&mut self, arrayref: VMArrayRef) {
885 self.dealloc(arrayref.into())
886 }
887
888 fn array_len(&self, arrayref: &VMArrayRef) -> u32 {
889 debug_assert!(arrayref.as_gc_ref().is_typed::<VMDrcArrayHeader>(self));
890 self.index::<VMDrcArrayHeader>(arrayref.as_gc_ref().as_typed_unchecked())
891 .length
892 }
893
894 fn gc<'a>(
895 &'a mut self,
896 roots: GcRootsIter<'a>,
897 host_data_table: &'a mut ExternRefHostDataTable,
898 ) -> Box<dyn GarbageCollection<'a> + 'a> {
899 assert_eq!(self.no_gc_count, 0, "Cannot GC inside a no-GC scope!");
900 Box::new(DrcCollection {
901 roots,
902 host_data_table,
903 heap: self,
904 phase: DrcCollectionPhase::Trace,
905 })
906 }
907
908 unsafe fn vmctx_gc_heap_data(&self) -> NonNull<u8> {
909 let ptr: NonNull<Option<VMGcRef>> = NonNull::from(&*self.over_approximated_stack_roots);
910 ptr.cast()
911 }
912
913 fn take_memory(&mut self) -> crate::vm::Memory {
914 debug_assert!(self.is_attached());
915 self.vmmemory.take();
916 self.memory.take().unwrap()
917 }
918
919 unsafe fn replace_memory(&mut self, memory: crate::vm::Memory, delta_bytes_grown: u64) {
920 debug_assert!(self.memory.is_none());
921 debug_assert!(!memory.is_shared_memory());
922 self.vmmemory = Some(memory.vmmemory());
923 self.memory = Some(memory);
924
925 self.free_list
926 .as_mut()
927 .unwrap()
928 .add_capacity(usize::try_from(delta_bytes_grown).unwrap())
929 }
930
931 #[inline]
932 fn vmmemory(&self) -> VMMemoryDefinition {
933 debug_assert!(self.is_attached());
934 debug_assert!(!self.memory.as_ref().unwrap().is_shared_memory());
935 let vmmemory = self.vmmemory.as_ref().unwrap();
936 VMMemoryDefinition {
937 base: vmmemory.base,
938 current_length: AtomicUsize::new(vmmemory.current_length()),
939 }
940 }
941}
942
943struct DrcCollection<'a> {
944 roots: GcRootsIter<'a>,
945 host_data_table: &'a mut ExternRefHostDataTable,
946 heap: &'a mut DrcHeap,
947 phase: DrcCollectionPhase,
948}
949
950enum DrcCollectionPhase {
951 Trace,
952 Sweep,
953 Done,
954}
955
956impl<'a> GarbageCollection<'a> for DrcCollection<'a> {
957 fn collect_increment(&mut self) -> GcProgress {
958 match self.phase {
959 DrcCollectionPhase::Trace => {
960 log::trace!("Begin DRC trace");
961 self.heap.trace(&mut self.roots);
962 log::trace!("End DRC trace");
963 self.phase = DrcCollectionPhase::Sweep;
964 GcProgress::Continue
965 }
966 DrcCollectionPhase::Sweep => {
967 log::trace!("Begin DRC sweep");
968 self.heap.sweep(self.host_data_table);
969 log::trace!("End DRC sweep");
970 self.phase = DrcCollectionPhase::Done;
971 GcProgress::Complete
972 }
973 DrcCollectionPhase::Done => GcProgress::Complete,
974 }
975 }
976}
977
978#[derive(Debug, Default)]
979struct DebugOnly<T> {
980 inner: T,
981}
982
983impl<T> Deref for DebugOnly<T> {
984 type Target = T;
985
986 fn deref(&self) -> &T {
987 if cfg!(debug_assertions) {
988 &self.inner
989 } else {
990 panic!(
991 "only deref `DebugOnly` when `cfg(debug_assertions)` or \
992 inside a `debug_assert!(..)`"
993 )
994 }
995 }
996}
997
998impl<T> DerefMut for DebugOnly<T> {
999 fn deref_mut(&mut self) -> &mut T {
1000 if cfg!(debug_assertions) {
1001 &mut self.inner
1002 } else {
1003 panic!(
1004 "only deref `DebugOnly` when `cfg(debug_assertions)` or \
1005 inside a `debug_assert!(..)`"
1006 )
1007 }
1008 }
1009}
1010
1011#[cfg(test)]
1012mod tests {
1013 use super::*;
1014 use wasmtime_environ::HostPtr;
1015
1016 #[test]
1017 fn vm_drc_header_size_align() {
1018 assert_eq!(
1019 (wasmtime_environ::drc::HEADER_SIZE as usize),
1020 core::mem::size_of::<VMDrcHeader>()
1021 );
1022 assert_eq!(
1023 (wasmtime_environ::drc::HEADER_ALIGN as usize),
1024 core::mem::align_of::<VMDrcHeader>()
1025 );
1026 }
1027
1028 #[test]
1029 fn vm_drc_array_header_length_offset() {
1030 assert_eq!(
1031 wasmtime_environ::drc::ARRAY_LENGTH_OFFSET,
1032 u32::try_from(core::mem::offset_of!(VMDrcArrayHeader, length)).unwrap(),
1033 );
1034 }
1035
1036 #[test]
1037 fn ref_count_is_at_correct_offset() {
1038 let extern_data = VMDrcHeader {
1039 header: VMGcHeader::externref(),
1040 ref_count: 0,
1041 next_over_approximated_stack_root: None,
1042 object_size: 0,
1043 };
1044
1045 let extern_data_ptr = &extern_data as *const _;
1046 let ref_count_ptr = &extern_data.ref_count as *const _;
1047
1048 let actual_offset = (ref_count_ptr as usize) - (extern_data_ptr as usize);
1049
1050 let offsets = wasmtime_environ::VMOffsets::from(wasmtime_environ::VMOffsetsFields {
1051 ptr: HostPtr,
1052 num_imported_functions: 0,
1053 num_imported_tables: 0,
1054 num_imported_memories: 0,
1055 num_imported_globals: 0,
1056 num_imported_tags: 0,
1057 num_defined_tables: 0,
1058 num_defined_memories: 0,
1059 num_owned_memories: 0,
1060 num_defined_globals: 0,
1061 num_defined_tags: 0,
1062 num_escaped_funcs: 0,
1063 });
1064
1065 assert_eq!(
1066 offsets.vm_drc_header_ref_count(),
1067 u32::try_from(actual_offset).unwrap(),
1068 );
1069 }
1070}