1use super::*;
5use crate::translate::TargetEnvironment;
6use crate::{TRAP_INTERNAL_ASSERT, func_environ::FuncEnvironment};
7use cranelift_codegen::ir::condcodes::IntCC;
8use cranelift_codegen::ir::{self, InstBuilder};
9use cranelift_frontend::FunctionBuilder;
10use smallvec::SmallVec;
11use wasmtime_environ::drc::{EXCEPTION_TAG_DEFINED_OFFSET, EXCEPTION_TAG_INSTANCE_OFFSET};
12use wasmtime_environ::{
13 GcTypeLayouts, ModuleInternedTypeIndex, PtrSize, TypeIndex, VMGcKind, WasmHeapTopType,
14 WasmHeapType, WasmRefType, WasmResult, WasmStorageType, WasmValType, drc::DrcTypeLayouts,
15};
16
17#[derive(Default)]
18pub struct DrcCompiler {
19 layouts: DrcTypeLayouts,
20}
21
22impl DrcCompiler {
23 fn load_ref_count(
27 &mut self,
28 func_env: &mut FuncEnvironment<'_>,
29 builder: &mut FunctionBuilder,
30 gc_ref: ir::Value,
31 ) -> ir::Value {
32 let offset = func_env.offsets.vm_drc_header_ref_count();
33 let pointer = func_env.prepare_gc_ref_access(
34 builder,
35 gc_ref,
36 BoundsCheck::StaticOffset {
37 offset,
38 access_size: u8::try_from(ir::types::I64.bytes()).unwrap(),
39 },
40 );
41 builder
42 .ins()
43 .load(ir::types::I64, ir::MemFlags::trusted(), pointer, 0)
44 }
45
46 fn store_ref_count(
51 &mut self,
52 func_env: &mut FuncEnvironment<'_>,
53 builder: &mut FunctionBuilder,
54 gc_ref: ir::Value,
55 new_ref_count: ir::Value,
56 ) {
57 let offset = func_env.offsets.vm_drc_header_ref_count();
58 let pointer = func_env.prepare_gc_ref_access(
59 builder,
60 gc_ref,
61 BoundsCheck::StaticOffset {
62 offset,
63 access_size: u8::try_from(ir::types::I64.bytes()).unwrap(),
64 },
65 );
66 builder
67 .ins()
68 .store(ir::MemFlags::trusted(), new_ref_count, pointer, 0);
69 }
70
71 fn mutate_ref_count(
78 &mut self,
79 func_env: &mut FuncEnvironment<'_>,
80 builder: &mut FunctionBuilder,
81 gc_ref: ir::Value,
82 delta: i64,
83 ) -> ir::Value {
84 debug_assert!(delta == -1 || delta == 1);
85 let old_ref_count = self.load_ref_count(func_env, builder, gc_ref);
86 let new_ref_count = builder.ins().iadd_imm(old_ref_count, delta);
87 self.store_ref_count(func_env, builder, gc_ref, new_ref_count);
88 new_ref_count
89 }
90
91 fn push_onto_over_approximated_stack_roots(
97 &mut self,
98 func_env: &mut FuncEnvironment<'_>,
99 builder: &mut FunctionBuilder<'_>,
100 gc_ref: ir::Value,
101 reserved: ir::Value,
102 ) {
103 debug_assert_eq!(builder.func.dfg.value_type(gc_ref), ir::types::I32);
104 debug_assert_eq!(builder.func.dfg.value_type(reserved), ir::types::I32);
105
106 let head = self.load_over_approximated_stack_roots_head(func_env, builder);
107
108 let next = builder
111 .ins()
112 .load(ir::types::I32, ir::MemFlags::trusted(), head, 0);
113
114 self.set_next_over_approximated_stack_root(func_env, builder, gc_ref, next);
116 self.set_in_over_approximated_stack_roots_bit(func_env, builder, gc_ref, reserved);
117
118 self.mutate_ref_count(func_env, builder, gc_ref, 1);
120
121 builder
123 .ins()
124 .store(ir::MemFlags::trusted(), gc_ref, head, 0);
125 }
126
127 fn load_over_approximated_stack_roots_head(
130 &mut self,
131 func_env: &mut FuncEnvironment<'_>,
132 builder: &mut FunctionBuilder,
133 ) -> ir::Value {
134 let ptr_ty = func_env.pointer_type();
135 let vmctx = func_env.vmctx(&mut builder.func);
136 let vmctx = builder.ins().global_value(ptr_ty, vmctx);
137 builder.ins().load(
138 ptr_ty,
139 ir::MemFlags::trusted().with_readonly(),
140 vmctx,
141 i32::from(func_env.offsets.ptr.vmctx_gc_heap_data()),
142 )
143 }
144
145 fn set_next_over_approximated_stack_root(
147 &mut self,
148 func_env: &mut FuncEnvironment<'_>,
149 builder: &mut FunctionBuilder<'_>,
150 gc_ref: ir::Value,
151 next: ir::Value,
152 ) {
153 debug_assert_eq!(builder.func.dfg.value_type(gc_ref), ir::types::I32);
154 debug_assert_eq!(builder.func.dfg.value_type(next), ir::types::I32);
155 let ptr = func_env.prepare_gc_ref_access(
156 builder,
157 gc_ref,
158 BoundsCheck::StaticOffset {
159 offset: func_env
160 .offsets
161 .vm_drc_header_next_over_approximated_stack_root(),
162 access_size: u8::try_from(ir::types::I32.bytes()).unwrap(),
163 },
164 );
165 builder.ins().store(ir::MemFlags::trusted(), next, ptr, 0);
166 }
167
168 fn set_in_over_approximated_stack_roots_bit(
171 &mut self,
172 func_env: &mut FuncEnvironment<'_>,
173 builder: &mut FunctionBuilder<'_>,
174 gc_ref: ir::Value,
175 old_reserved_bits: ir::Value,
176 ) {
177 let in_set_bit = builder.ins().iconst(
178 ir::types::I32,
179 i64::from(wasmtime_environ::drc::HEADER_IN_OVER_APPROX_LIST_BIT),
180 );
181 let new_reserved = builder.ins().bor(old_reserved_bits, in_set_bit);
182 self.set_reserved_bits(func_env, builder, gc_ref, new_reserved);
183 }
184
185 fn set_reserved_bits(
187 &mut self,
188 func_env: &mut FuncEnvironment<'_>,
189 builder: &mut FunctionBuilder<'_>,
190 gc_ref: ir::Value,
191 new_reserved: ir::Value,
192 ) {
193 let ptr = func_env.prepare_gc_ref_access(
194 builder,
195 gc_ref,
196 BoundsCheck::StaticOffset {
197 offset: func_env.offsets.vm_gc_header_reserved_bits(),
198 access_size: u8::try_from(ir::types::I32.bytes()).unwrap(),
199 },
200 );
201 builder
202 .ins()
203 .store(ir::MemFlags::trusted(), new_reserved, ptr, 0);
204 }
205
206 fn init_field(
208 &mut self,
209 func_env: &mut FuncEnvironment<'_>,
210 builder: &mut FunctionBuilder<'_>,
211 field_addr: ir::Value,
212 ty: WasmStorageType,
213 val: ir::Value,
214 ) -> WasmResult<()> {
215 let flags = ir::MemFlags::trusted().with_endianness(ir::Endianness::Little);
217
218 match ty {
219 WasmStorageType::Val(WasmValType::Ref(r))
220 if r.heap_type.top() == WasmHeapTopType::Func =>
221 {
222 write_func_ref_at_addr(func_env, builder, r, flags, field_addr, val)?;
223 }
224 WasmStorageType::Val(WasmValType::Ref(r)) => {
225 self.translate_init_gc_reference(func_env, builder, r, field_addr, val, flags)?;
226 }
227 WasmStorageType::I8 => {
228 assert_eq!(builder.func.dfg.value_type(val), ir::types::I32);
229 builder.ins().istore8(flags, val, field_addr, 0);
230 }
231 WasmStorageType::I16 => {
232 assert_eq!(builder.func.dfg.value_type(val), ir::types::I32);
233 builder.ins().istore16(flags, val, field_addr, 0);
234 }
235 WasmStorageType::Val(_) => {
236 let size_of_access = wasmtime_environ::byte_size_of_wasm_ty_in_gc_heap(&ty);
237 assert_eq!(builder.func.dfg.value_type(val).bytes(), size_of_access);
238 builder.ins().store(flags, val, field_addr, 0);
239 }
240 }
241
242 Ok(())
243 }
244
245 fn translate_init_gc_reference(
255 &mut self,
256 func_env: &mut FuncEnvironment<'_>,
257 builder: &mut FunctionBuilder,
258 ty: WasmRefType,
259 dst: ir::Value,
260 new_val: ir::Value,
261 flags: ir::MemFlags,
262 ) -> WasmResult<()> {
263 let (ref_ty, needs_stack_map) = func_env.reference_type(ty.heap_type);
264 debug_assert!(needs_stack_map);
265
266 if let WasmHeapType::None = ty.heap_type {
269 if ty.nullable {
270 let null = builder.ins().iconst(ref_ty, 0);
271 builder.ins().store(flags, null, dst, 0);
272 } else {
273 let zero = builder.ins().iconst(ir::types::I32, 0);
274 builder.ins().trapz(zero, TRAP_INTERNAL_ASSERT);
275 }
276 return Ok(());
277 };
278
279 if let WasmHeapType::I31 = ty.heap_type {
281 return unbarriered_store_gc_ref(builder, ty.heap_type, dst, new_val, flags);
282 }
283
284 let current_block = builder.current_block().unwrap();
310 let inc_ref_block = builder.create_block();
311 let continue_block = builder.create_block();
312
313 builder.ensure_inserted_block();
314 builder.insert_block_after(inc_ref_block, current_block);
315 builder.insert_block_after(continue_block, inc_ref_block);
316
317 log::trace!("DRC initialization barrier: check if the value is null or i31");
320 let new_val_is_null_or_i31 = func_env.gc_ref_is_null_or_i31(builder, ty, new_val);
321 builder.ins().brif(
322 new_val_is_null_or_i31,
323 continue_block,
324 &[],
325 inc_ref_block,
326 &[],
327 );
328
329 builder.switch_to_block(inc_ref_block);
332 builder.seal_block(inc_ref_block);
333 log::trace!("DRC initialization barrier: increment the ref count of the initial value");
334 self.mutate_ref_count(func_env, builder, new_val, 1);
335 builder.ins().jump(continue_block, &[]);
336
337 builder.switch_to_block(continue_block);
340 builder.seal_block(continue_block);
341 log::trace!(
342 "DRC initialization barrier: finally, store into {dst:?} to initialize the field"
343 );
344 unbarriered_store_gc_ref(builder, ty.heap_type, dst, new_val, flags)?;
345
346 Ok(())
347 }
348}
349
350fn emit_gc_raw_alloc(
352 func_env: &mut FuncEnvironment<'_>,
353 builder: &mut FunctionBuilder<'_>,
354 kind: VMGcKind,
355 ty: ModuleInternedTypeIndex,
356 size: ir::Value,
357 align: u32,
358) -> ir::Value {
359 let gc_alloc_raw_builtin = func_env.builtin_functions.gc_alloc_raw(builder.func);
360 let vmctx = func_env.vmctx_val(&mut builder.cursor());
361
362 let kind = builder
363 .ins()
364 .iconst(ir::types::I32, i64::from(kind.as_u32()));
365
366 let ty = builder.ins().iconst(ir::types::I32, i64::from(ty.as_u32()));
367
368 assert!(align.is_power_of_two());
369 let align = builder.ins().iconst(ir::types::I32, i64::from(align));
370
371 let call_inst = builder
372 .ins()
373 .call(gc_alloc_raw_builtin, &[vmctx, kind, ty, size, align]);
374
375 let gc_ref = builder.func.dfg.first_result(call_inst);
376 builder.declare_value_needs_stack_map(gc_ref);
377 gc_ref
378}
379
380impl GcCompiler for DrcCompiler {
381 fn layouts(&self) -> &dyn GcTypeLayouts {
382 &self.layouts
383 }
384
385 fn alloc_array(
386 &mut self,
387 func_env: &mut FuncEnvironment<'_>,
388 builder: &mut FunctionBuilder<'_>,
389 array_type_index: TypeIndex,
390 init: super::ArrayInit<'_>,
391 ) -> WasmResult<ir::Value> {
392 let interned_type_index =
393 func_env.module.types[array_type_index].unwrap_module_type_index();
394 let ptr_ty = func_env.pointer_type();
395
396 let len_offset = gc_compiler(func_env)?.layouts().array_length_field_offset();
397 let array_layout = func_env.array_layout(interned_type_index).clone();
398 let base_size = array_layout.base_size;
399 let align = array_layout.align;
400 let len_to_elems_delta = base_size.checked_sub(len_offset).unwrap();
401
402 let len = init.len(&mut builder.cursor());
405 let size = emit_array_size(func_env, builder, &array_layout, len);
406
407 let array_ref = emit_gc_raw_alloc(
410 func_env,
411 builder,
412 VMGcKind::ArrayRef,
413 interned_type_index,
414 size,
415 align,
416 );
417
418 let base = func_env.get_gc_heap_base(builder);
423 let extended_array_ref =
424 uextend_i32_to_pointer_type(builder, func_env.pointer_type(), array_ref);
425 let object_addr = builder.ins().iadd(base, extended_array_ref);
426 let len_addr = builder.ins().iadd_imm(object_addr, i64::from(len_offset));
427 let len = init.len(&mut builder.cursor());
428 builder
429 .ins()
430 .store(ir::MemFlags::trusted(), len, len_addr, 0);
431
432 let len_to_elems_delta = builder.ins().iconst(ptr_ty, i64::from(len_to_elems_delta));
434 let elems_addr = builder.ins().iadd(len_addr, len_to_elems_delta);
435 init.initialize(
436 func_env,
437 builder,
438 interned_type_index,
439 base_size,
440 size,
441 elems_addr,
442 |func_env, builder, elem_ty, elem_addr, val| {
443 self.init_field(func_env, builder, elem_addr, elem_ty, val)
444 },
445 )?;
446 Ok(array_ref)
447 }
448
449 fn alloc_struct(
450 &mut self,
451 func_env: &mut FuncEnvironment<'_>,
452 builder: &mut FunctionBuilder<'_>,
453 struct_type_index: TypeIndex,
454 field_vals: &[ir::Value],
455 ) -> WasmResult<ir::Value> {
456 let interned_type_index =
457 func_env.module.types[struct_type_index].unwrap_module_type_index();
458 let struct_layout = func_env.struct_or_exn_layout(interned_type_index);
459
460 let struct_size = struct_layout.size;
462 let struct_align = struct_layout.align;
463 let field_offsets: SmallVec<[_; 8]> = struct_layout.fields.iter().copied().collect();
464 assert_eq!(field_vals.len(), field_offsets.len());
465
466 let struct_size_val = builder.ins().iconst(ir::types::I32, i64::from(struct_size));
467
468 let struct_ref = emit_gc_raw_alloc(
469 func_env,
470 builder,
471 VMGcKind::StructRef,
472 interned_type_index,
473 struct_size_val,
474 struct_align,
475 );
476
477 let base = func_env.get_gc_heap_base(builder);
482 let extended_struct_ref =
483 uextend_i32_to_pointer_type(builder, func_env.pointer_type(), struct_ref);
484 let raw_ptr_to_struct = builder.ins().iadd(base, extended_struct_ref);
485 initialize_struct_fields(
486 func_env,
487 builder,
488 interned_type_index,
489 raw_ptr_to_struct,
490 field_vals,
491 |func_env, builder, ty, field_addr, val| {
492 self.init_field(func_env, builder, field_addr, ty, val)
493 },
494 )?;
495
496 Ok(struct_ref)
497 }
498
499 fn alloc_exn(
500 &mut self,
501 func_env: &mut FuncEnvironment<'_>,
502 builder: &mut FunctionBuilder<'_>,
503 tag_index: TagIndex,
504 field_vals: &[ir::Value],
505 instance_id: ir::Value,
506 tag: ir::Value,
507 ) -> WasmResult<ir::Value> {
508 let interned_type_index = func_env.module.tags[tag_index]
509 .exception
510 .unwrap_module_type_index();
511 let exn_layout = func_env.struct_or_exn_layout(interned_type_index);
512
513 let exn_size = exn_layout.size;
515 let exn_align = exn_layout.align;
516 let field_offsets: SmallVec<[_; 8]> = exn_layout.fields.iter().copied().collect();
517 assert_eq!(field_vals.len(), field_offsets.len());
518
519 let exn_size_val = builder.ins().iconst(ir::types::I32, i64::from(exn_size));
520
521 let exn_ref = emit_gc_raw_alloc(
522 func_env,
523 builder,
524 VMGcKind::ExnRef,
525 interned_type_index,
526 exn_size_val,
527 exn_align,
528 );
529
530 let base = func_env.get_gc_heap_base(builder);
536 let extended_exn_ref =
537 uextend_i32_to_pointer_type(builder, func_env.pointer_type(), exn_ref);
538 let raw_ptr_to_exn = builder.ins().iadd(base, extended_exn_ref);
539 initialize_struct_fields(
540 func_env,
541 builder,
542 interned_type_index,
543 raw_ptr_to_exn,
544 field_vals,
545 |func_env, builder, ty, field_addr, val| {
546 self.init_field(func_env, builder, field_addr, ty, val)
547 },
548 )?;
549
550 let instance_id_addr = builder
552 .ins()
553 .iadd_imm(raw_ptr_to_exn, i64::from(EXCEPTION_TAG_INSTANCE_OFFSET));
554 self.init_field(
555 func_env,
556 builder,
557 instance_id_addr,
558 WasmStorageType::Val(WasmValType::I32),
559 instance_id,
560 )?;
561 let tag_addr = builder
562 .ins()
563 .iadd_imm(raw_ptr_to_exn, i64::from(EXCEPTION_TAG_DEFINED_OFFSET));
564 self.init_field(
565 func_env,
566 builder,
567 tag_addr,
568 WasmStorageType::Val(WasmValType::I32),
569 tag,
570 )?;
571
572 Ok(exn_ref)
573 }
574
575 fn translate_read_gc_reference(
576 &mut self,
577 func_env: &mut FuncEnvironment<'_>,
578 builder: &mut FunctionBuilder,
579 ty: WasmRefType,
580 src: ir::Value,
581 flags: ir::MemFlags,
582 ) -> WasmResult<ir::Value> {
583 log::trace!("translate_read_gc_reference({ty:?}, {src:?}, {flags:?})");
584
585 assert!(ty.is_vmgcref_type());
586
587 let (reference_type, needs_stack_map) = func_env.reference_type(ty.heap_type);
588 debug_assert!(needs_stack_map);
589
590 if let WasmHeapType::None = ty.heap_type {
594 let null = builder.ins().iconst(reference_type, 0);
595
596 if flags.trap_code().is_some() {
607 let _ = builder.ins().load(reference_type, flags, src, 0);
608 }
609
610 if !ty.nullable {
611 let zero = builder.ins().iconst(ir::types::I32, 0);
615 builder.ins().trapz(zero, TRAP_INTERNAL_ASSERT);
616 }
617
618 return Ok(null);
619 };
620
621 if let WasmHeapType::I31 = ty.heap_type {
623 return unbarriered_load_gc_ref(builder, ty.heap_type, src, flags);
624 }
625
626 let current_block = builder.current_block().unwrap();
661 let non_null_gc_ref_block = builder.create_block();
662 let insert_block = builder.create_block();
663 let continue_block = builder.create_block();
664
665 builder.ensure_inserted_block();
666 builder.insert_block_after(non_null_gc_ref_block, current_block);
667 builder.insert_block_after(insert_block, non_null_gc_ref_block);
668 builder.insert_block_after(continue_block, insert_block);
669
670 log::trace!("DRC read barrier: load the gc reference and check for null or i31");
671 let gc_ref = unbarriered_load_gc_ref(builder, ty.heap_type, src, flags)?;
672 let gc_ref_is_null_or_i31 = func_env.gc_ref_is_null_or_i31(builder, ty, gc_ref);
673 builder.ins().brif(
674 gc_ref_is_null_or_i31,
675 continue_block,
676 &[],
677 non_null_gc_ref_block,
678 &[],
679 );
680
681 builder.switch_to_block(non_null_gc_ref_block);
686 builder.seal_block(non_null_gc_ref_block);
687 log::trace!(
688 "DRC read barrier: check whether this object is already in the \
689 over-approximated-stack-roots list"
690 );
691 let ptr = func_env.prepare_gc_ref_access(
692 builder,
693 gc_ref,
694 BoundsCheck::StaticOffset {
695 offset: func_env.offsets.vm_gc_header_reserved_bits(),
696 access_size: u8::try_from(ir::types::I32.bytes()).unwrap(),
697 },
698 );
699 let reserved = builder
700 .ins()
701 .load(ir::types::I32, ir::MemFlags::trusted(), ptr, 0);
702 let in_set_bit = builder.ins().iconst(
703 ir::types::I32,
704 i64::from(wasmtime_environ::drc::HEADER_IN_OVER_APPROX_LIST_BIT),
705 );
706 let in_set = builder.ins().band(reserved, in_set_bit);
707 builder
708 .ins()
709 .brif(in_set, continue_block, &[], insert_block, &[]);
710
711 builder.switch_to_block(insert_block);
714 builder.seal_block(insert_block);
715 log::trace!(
716 "DRC read barrier: push the object onto the over-approximated-stack-roots list"
717 );
718 self.push_onto_over_approximated_stack_roots(func_env, builder, gc_ref, reserved);
719 builder.ins().jump(continue_block, &[]);
720
721 builder.switch_to_block(continue_block);
723 builder.seal_block(continue_block);
724 log::trace!("translate_read_gc_reference(..) -> {gc_ref:?}");
725 Ok(gc_ref)
726 }
727
728 fn translate_write_gc_reference(
729 &mut self,
730 func_env: &mut FuncEnvironment<'_>,
731 builder: &mut FunctionBuilder,
732 ty: WasmRefType,
733 dst: ir::Value,
734 new_val: ir::Value,
735 flags: ir::MemFlags,
736 ) -> WasmResult<()> {
737 assert!(ty.is_vmgcref_type());
738
739 let (ref_ty, needs_stack_map) = func_env.reference_type(ty.heap_type);
740 debug_assert!(needs_stack_map);
741
742 if let WasmHeapType::None = ty.heap_type {
746 if ty.nullable {
747 let null = builder.ins().iconst(ref_ty, 0);
748 builder.ins().store(flags, null, dst, 0);
749 } else {
750 let zero = builder.ins().iconst(ir::types::I32, 0);
754 builder.ins().trapz(zero, TRAP_INTERNAL_ASSERT);
755 }
756 return Ok(());
757 };
758
759 if let WasmHeapType::I31 = ty.heap_type {
761 return unbarriered_store_gc_ref(builder, ty.heap_type, dst, new_val, flags);
762 }
763
764 let current_block = builder.current_block().unwrap();
827 let inc_ref_block = builder.create_block();
828 let check_old_val_block = builder.create_block();
829 let dec_ref_block = builder.create_block();
830 let drop_old_val_block = builder.create_block();
831 let store_dec_ref_block = builder.create_block();
832 let continue_block = builder.create_block();
833
834 builder.ensure_inserted_block();
835 builder.set_cold_block(drop_old_val_block);
836
837 builder.insert_block_after(inc_ref_block, current_block);
838 builder.insert_block_after(check_old_val_block, inc_ref_block);
839 builder.insert_block_after(dec_ref_block, check_old_val_block);
840 builder.insert_block_after(drop_old_val_block, dec_ref_block);
841 builder.insert_block_after(store_dec_ref_block, drop_old_val_block);
842 builder.insert_block_after(continue_block, store_dec_ref_block);
843
844 log::trace!("DRC write barrier: load old ref; check if new ref is null or i31");
847 let old_val = unbarriered_load_gc_ref(builder, ty.heap_type, dst, flags)?;
848 let new_val_is_null_or_i31 = func_env.gc_ref_is_null_or_i31(builder, ty, new_val);
849 builder.ins().brif(
850 new_val_is_null_or_i31,
851 check_old_val_block,
852 &[],
853 inc_ref_block,
854 &[],
855 );
856
857 builder.switch_to_block(inc_ref_block);
860 log::trace!("DRC write barrier: increment new ref's ref count");
861 builder.seal_block(inc_ref_block);
862 self.mutate_ref_count(func_env, builder, new_val, 1);
863 builder.ins().jump(check_old_val_block, &[]);
864
865 builder.switch_to_block(check_old_val_block);
869 builder.seal_block(check_old_val_block);
870 log::trace!("DRC write barrier: store new ref into field; check if old ref is null or i31");
871 unbarriered_store_gc_ref(builder, ty.heap_type, dst, new_val, flags)?;
872 let old_val_is_null_or_i31 = func_env.gc_ref_is_null_or_i31(builder, ty, old_val);
873 builder.ins().brif(
874 old_val_is_null_or_i31,
875 continue_block,
876 &[],
877 dec_ref_block,
878 &[],
879 );
880
881 builder.switch_to_block(dec_ref_block);
884 builder.seal_block(dec_ref_block);
885 log::trace!(
886 "DRC write barrier: decrement old ref's ref count and check for zero ref count"
887 );
888 let ref_count = self.load_ref_count(func_env, builder, old_val);
889 let new_ref_count = builder.ins().iadd_imm(ref_count, -1);
890 let old_val_needs_drop = builder.ins().icmp_imm(IntCC::Equal, new_ref_count, 0);
891 builder.ins().brif(
892 old_val_needs_drop,
893 drop_old_val_block,
894 &[],
895 store_dec_ref_block,
896 &[],
897 );
898
899 builder.switch_to_block(drop_old_val_block);
906 builder.seal_block(drop_old_val_block);
907 log::trace!("DRC write barrier: drop old ref with a ref count of zero");
908 let drop_gc_ref_libcall = func_env.builtin_functions.drop_gc_ref(builder.func);
909 let vmctx = func_env.vmctx_val(&mut builder.cursor());
910 builder.ins().call(drop_gc_ref_libcall, &[vmctx, old_val]);
911 builder.ins().jump(continue_block, &[]);
912
913 builder.switch_to_block(store_dec_ref_block);
916 builder.seal_block(store_dec_ref_block);
917 log::trace!("DRC write barrier: store decremented ref count into old ref");
918 self.store_ref_count(func_env, builder, old_val, new_ref_count);
919 builder.ins().jump(continue_block, &[]);
920
921 builder.switch_to_block(continue_block);
923 builder.seal_block(continue_block);
924 log::trace!("DRC write barrier: finished");
925 Ok(())
926 }
927}