wasmtime_internal_cranelift/
compiler.rs

1use crate::TRAP_INTERNAL_ASSERT;
2use crate::debug::DwarfSectionRelocTarget;
3use crate::func_environ::FuncEnvironment;
4use crate::translate::FuncTranslator;
5use crate::{BuiltinFunctionSignatures, builder::LinkOptions, wasm_call_signature};
6use crate::{CompiledFunction, ModuleTextBuilder, array_call_signature};
7use anyhow::{Context as _, Result};
8use cranelift_codegen::binemit::CodeOffset;
9use cranelift_codegen::inline::InlineCommand;
10use cranelift_codegen::ir::condcodes::IntCC;
11use cranelift_codegen::ir::{self, InstBuilder, MemFlags, UserExternalName, UserFuncName, Value};
12use cranelift_codegen::isa::{
13    OwnedTargetIsa, TargetIsa,
14    unwind::{UnwindInfo, UnwindInfoKind},
15};
16use cranelift_codegen::print_errors::pretty_error;
17use cranelift_codegen::{CompiledCode, Context, FinalizedMachCallSite};
18use cranelift_entity::PrimaryMap;
19use cranelift_frontend::FunctionBuilder;
20use object::write::{Object, StandardSegment, SymbolId};
21use object::{RelocationEncoding, RelocationFlags, RelocationKind, SectionKind};
22use std::any::Any;
23use std::borrow::Cow;
24use std::cmp;
25use std::collections::HashMap;
26use std::mem;
27use std::ops::Range;
28use std::path;
29use std::sync::{Arc, Mutex};
30use wasmparser::{FuncValidatorAllocations, FunctionBody};
31use wasmtime_environ::obj::ELF_WASMTIME_EXCEPTIONS;
32use wasmtime_environ::{
33    Abi, AddressMapSection, BuiltinFunctionIndex, CacheStore, CompileError, CompiledFunctionBody,
34    DefinedFuncIndex, FlagValue, FuncKey, FunctionBodyData, FunctionLoc, HostCall,
35    InliningCompiler, ModuleInternedTypeIndex, ModuleTranslation, ModuleTypesBuilder, PtrSize,
36    StackMapSection, StaticModuleIndex, TrapEncodingBuilder, TrapSentinel, TripleExt, Tunables,
37    WasmFuncType, WasmValType,
38};
39use wasmtime_unwinder::ExceptionTableBuilder;
40
41#[cfg(feature = "component-model")]
42mod component;
43
44struct IncrementalCacheContext {
45    #[cfg(feature = "incremental-cache")]
46    cache_store: Arc<dyn CacheStore>,
47    num_hits: usize,
48    num_cached: usize,
49}
50
51struct CompilerContext {
52    func_translator: FuncTranslator,
53    codegen_context: Context,
54    incremental_cache_ctx: Option<IncrementalCacheContext>,
55    validator_allocations: FuncValidatorAllocations,
56    abi: Option<Abi>,
57}
58
59impl Default for CompilerContext {
60    fn default() -> Self {
61        Self {
62            func_translator: FuncTranslator::new(),
63            codegen_context: Context::new(),
64            incremental_cache_ctx: None,
65            validator_allocations: Default::default(),
66            abi: None,
67        }
68    }
69}
70
71/// A compiler that compiles a WebAssembly module with Compiler, translating
72/// the Wasm to Compiler IR, optimizing it and then translating to assembly.
73pub struct Compiler {
74    tunables: Tunables,
75    contexts: Mutex<Vec<CompilerContext>>,
76    isa: OwnedTargetIsa,
77    emit_debug_checks: bool,
78    linkopts: LinkOptions,
79    cache_store: Option<Arc<dyn CacheStore>>,
80    clif_dir: Option<path::PathBuf>,
81    #[cfg(feature = "wmemcheck")]
82    pub(crate) wmemcheck: bool,
83}
84
85impl Drop for Compiler {
86    fn drop(&mut self) {
87        if self.cache_store.is_none() {
88            return;
89        }
90
91        let mut num_hits = 0;
92        let mut num_cached = 0;
93        for ctx in self.contexts.lock().unwrap().iter() {
94            if let Some(ref cache_ctx) = ctx.incremental_cache_ctx {
95                num_hits += cache_ctx.num_hits;
96                num_cached += cache_ctx.num_cached;
97            }
98        }
99
100        let total = num_hits + num_cached;
101        if num_hits + num_cached > 0 {
102            log::trace!(
103                "Incremental compilation cache stats: {}/{} = {}% (hits/lookup)\ncached: {}",
104                num_hits,
105                total,
106                (num_hits as f32) / (total as f32) * 100.0,
107                num_cached
108            );
109        }
110    }
111}
112
113impl Compiler {
114    pub fn new(
115        tunables: Tunables,
116        isa: OwnedTargetIsa,
117        cache_store: Option<Arc<dyn CacheStore>>,
118        emit_debug_checks: bool,
119        linkopts: LinkOptions,
120        clif_dir: Option<path::PathBuf>,
121        wmemcheck: bool,
122    ) -> Compiler {
123        let _ = wmemcheck;
124        Compiler {
125            contexts: Default::default(),
126            tunables,
127            isa,
128            emit_debug_checks,
129            linkopts,
130            cache_store,
131            clif_dir,
132            #[cfg(feature = "wmemcheck")]
133            wmemcheck,
134        }
135    }
136
137    /// Perform an indirect call from Cranelift-generated code to native code in
138    /// Wasmtime itself.
139    ///
140    /// For native platforms this is a simple `call_indirect` instruction but
141    /// for the Pulley backend this is special as it's transitioning from
142    /// Cranelift-generated bytecode to native code on the host. That requires a
143    /// special opcode in the interpreter and is modeled slightly differently in
144    /// Cranelift IR.
145    fn call_indirect_host(
146        &self,
147        builder: &mut FunctionBuilder<'_>,
148        hostcall: impl Into<HostCall>,
149        sig: ir::SigRef,
150        addr: Value,
151        args: &[Value],
152    ) -> ir::Inst {
153        let signature = &builder.func.dfg.signatures[sig];
154
155        // When calling the host we should always be using the platform's
156        // default calling convention since it'll be calling Rust code in
157        // Wasmtime itself.
158        assert_eq!(signature.call_conv, self.isa.default_call_conv());
159
160        // If this target is actually pulley then the goal is to emit the custom
161        // `call_indirect_host` pulley opcode. That's encoded in Cranelift as a
162        // `call` instruction where the name is `colocated: false`. This will
163        // force a pulley-specific relocation to get emitted in addition to
164        // using the `call_indirect_host` instruction.
165        if self.isa.triple().is_pulley() {
166            let mut new_signature = signature.clone();
167            new_signature
168                .params
169                .insert(0, ir::AbiParam::new(self.isa.pointer_type()));
170            let new_sig = builder.func.import_signature(new_signature);
171            let key = FuncKey::PulleyHostCall(hostcall.into());
172            let (namespace, index) = key.into_raw_parts();
173            let name = ir::ExternalName::User(
174                builder
175                    .func
176                    .declare_imported_user_function(ir::UserExternalName { namespace, index }),
177            );
178            let func = builder.func.import_function(ir::ExtFuncData {
179                name,
180                signature: new_sig,
181                // This is the signal that a special `call_indirect_host`
182                // opcode is used to jump from pulley to the host.
183                colocated: false,
184            });
185            let mut raw_args = vec![addr];
186            raw_args.extend_from_slice(args);
187            return builder.ins().call(func, &raw_args);
188        }
189
190        builder.ins().call_indirect(sig, addr, args)
191    }
192}
193
194fn box_dyn_any_compiled_function(f: CompiledFunction) -> Box<dyn Any + Send + Sync> {
195    let b = box_dyn_any(f);
196    debug_assert!(b.is::<CompiledFunction>());
197    b
198}
199
200fn box_dyn_any_compiler_context(ctx: Option<CompilerContext>) -> Box<dyn Any + Send + Sync> {
201    let b = box_dyn_any(ctx);
202    debug_assert!(b.is::<Option<CompilerContext>>());
203    b
204}
205
206fn box_dyn_any(x: impl Any + Send + Sync) -> Box<dyn Any + Send + Sync> {
207    log::trace!(
208        "making Box<dyn Any + Send + Sync> of {}",
209        std::any::type_name_of_val(&x)
210    );
211    let b = Box::new(x);
212    let r: &(dyn Any + Sync + Send) = &*b;
213    log::trace!("  --> {r:#p}");
214    b
215}
216
217impl wasmtime_environ::Compiler for Compiler {
218    fn inlining_compiler(&self) -> Option<&dyn wasmtime_environ::InliningCompiler> {
219        Some(self)
220    }
221
222    fn compile_function(
223        &self,
224        translation: &ModuleTranslation<'_>,
225        key: FuncKey,
226        input: FunctionBodyData<'_>,
227        types: &ModuleTypesBuilder,
228        symbol: &str,
229    ) -> Result<CompiledFunctionBody, CompileError> {
230        log::trace!("compiling Wasm function: {key:?} = {symbol:?}");
231
232        let isa = &*self.isa;
233        let module = &translation.module;
234
235        let (module_index, def_func_index) = key.unwrap_defined_wasm_function();
236        debug_assert_eq!(translation.module_index(), module_index);
237
238        let func_index = module.func_index(def_func_index);
239        let sig = translation.module.functions[func_index]
240            .signature
241            .unwrap_module_type_index();
242        let wasm_func_ty = types[sig].unwrap_func();
243
244        let mut compiler = self.function_compiler();
245
246        let context = &mut compiler.cx.codegen_context;
247        context.func.signature = wasm_call_signature(isa, wasm_func_ty, &self.tunables);
248        let (namespace, index) = key.into_raw_parts();
249        context.func.name = UserFuncName::User(UserExternalName { namespace, index });
250
251        if self.tunables.generate_native_debuginfo {
252            context.func.collect_debug_info();
253        }
254
255        let mut func_env = FuncEnvironment::new(self, translation, types, wasm_func_ty);
256
257        // The `stack_limit` global value below is the implementation of stack
258        // overflow checks in Wasmtime.
259        //
260        // The Wasm spec defines that stack overflows will raise a trap, and
261        // there's also an added constraint where as an embedder you frequently
262        // are running host-provided code called from wasm. WebAssembly and
263        // native code currently share the same call stack, so Wasmtime needs to
264        // make sure that host-provided code will have enough call-stack
265        // available to it.
266        //
267        // The way that stack overflow is handled here is by adding a prologue
268        // check to all functions for how much native stack is remaining. The
269        // `VMContext` pointer is the first argument to all functions, and the
270        // first field of this structure is `*const VMStoreContext` and the
271        // third field of that is the stack limit. Note that the stack limit in
272        // this case means "if the stack pointer goes below this, trap". Each
273        // function which consumes stack space or isn't a leaf function starts
274        // off by loading the stack limit, checking it against the stack
275        // pointer, and optionally traps.
276        //
277        // This manual check allows the embedder to give wasm a relatively
278        // precise amount of stack allocation. Using this scheme we reserve a
279        // chunk of stack for wasm code relative from where wasm code was
280        // called. This ensures that native code called by wasm should have
281        // native stack space to run, and the numbers of stack spaces here
282        // should all be configurable for various embeddings.
283        //
284        // Note that this check is independent of each thread's stack guard page
285        // here. If the stack guard page is reached that's still considered an
286        // abort for the whole program since the runtime limits configured by
287        // the embedder should cause wasm to trap before it reaches that
288        // (ensuring the host has enough space as well for its functionality).
289        if !isa.triple().is_pulley() {
290            let vmctx = context
291                .func
292                .create_global_value(ir::GlobalValueData::VMContext);
293            let interrupts_ptr = context.func.create_global_value(ir::GlobalValueData::Load {
294                base: vmctx,
295                offset: i32::from(func_env.offsets.ptr.vmctx_store_context()).into(),
296                global_type: isa.pointer_type(),
297                flags: MemFlags::trusted().with_readonly(),
298            });
299            let stack_limit = context.func.create_global_value(ir::GlobalValueData::Load {
300                base: interrupts_ptr,
301                offset: i32::from(func_env.offsets.ptr.vmstore_context_stack_limit()).into(),
302                global_type: isa.pointer_type(),
303                flags: MemFlags::trusted(),
304            });
305            if self.tunables.signals_based_traps {
306                context.func.stack_limit = Some(stack_limit);
307            } else {
308                func_env.stack_limit_at_function_entry = Some(stack_limit);
309            }
310        }
311        let FunctionBodyData { validator, body } = input;
312        let mut validator =
313            validator.into_validator(mem::take(&mut compiler.cx.validator_allocations));
314        compiler.cx.func_translator.translate_body(
315            &mut validator,
316            body.clone(),
317            &mut context.func,
318            &mut func_env,
319        )?;
320
321        if self.tunables.inlining {
322            compiler
323                .cx
324                .codegen_context
325                .legalize(isa)
326                .map_err(|e| CompileError::Codegen(e.to_string()))?;
327        }
328
329        let timing = cranelift_codegen::timing::take_current();
330        log::debug!("`{symbol}` translated to CLIF in {:?}", timing.total());
331        log::trace!("`{symbol}` timing info\n{timing}");
332
333        Ok(CompiledFunctionBody {
334            code: box_dyn_any_compiler_context(Some(compiler.cx)),
335            needs_gc_heap: func_env.needs_gc_heap(),
336        })
337    }
338
339    fn compile_array_to_wasm_trampoline(
340        &self,
341        translation: &ModuleTranslation<'_>,
342        types: &ModuleTypesBuilder,
343        key: FuncKey,
344        symbol: &str,
345    ) -> Result<CompiledFunctionBody, CompileError> {
346        let (module_index, def_func_index) = key.unwrap_array_to_wasm_trampoline();
347        let func_index = translation.module.func_index(def_func_index);
348        let sig = translation.module.functions[func_index]
349            .signature
350            .unwrap_module_type_index();
351        self.array_to_wasm_trampoline(
352            key,
353            FuncKey::DefinedWasmFunction(module_index, def_func_index),
354            types,
355            sig,
356            symbol,
357            self.isa.pointer_bytes().vmctx_store_context().into(),
358            wasmtime_environ::VMCONTEXT_MAGIC,
359        )
360    }
361
362    fn compile_wasm_to_array_trampoline(
363        &self,
364        wasm_func_ty: &WasmFuncType,
365        key: FuncKey,
366        symbol: &str,
367    ) -> Result<CompiledFunctionBody, CompileError> {
368        log::trace!("compiling wasm-to-array trampoline: {key:?} = {symbol:?}");
369
370        let isa = &*self.isa;
371        let pointer_type = isa.pointer_type();
372        let wasm_call_sig = wasm_call_signature(isa, wasm_func_ty, &self.tunables);
373        let array_call_sig = array_call_signature(isa);
374
375        let mut compiler = self.function_compiler();
376        let func = ir::Function::with_name_signature(key_to_name(key), wasm_call_sig);
377        let (mut builder, block0) = compiler.builder(func);
378
379        let args = builder.func.dfg.block_params(block0).to_vec();
380        let callee_vmctx = args[0];
381        let caller_vmctx = args[1];
382
383        // We are exiting Wasm, so save our PC and FP.
384        //
385        // Assert that the caller vmctx really is a core Wasm vmctx, since
386        // that's what we are assuming with our offsets below.
387        self.debug_assert_vmctx_kind(
388            &mut builder,
389            caller_vmctx,
390            wasmtime_environ::VMCONTEXT_MAGIC,
391        );
392        let ptr = isa.pointer_bytes();
393        let vm_store_context = builder.ins().load(
394            pointer_type,
395            MemFlags::trusted(),
396            caller_vmctx,
397            i32::from(ptr.vmcontext_store_context()),
398        );
399        save_last_wasm_exit_fp_and_pc(&mut builder, pointer_type, &ptr, vm_store_context);
400
401        // Spill all wasm arguments to the stack in `ValRaw` slots.
402        let (args_base, args_len) =
403            self.allocate_stack_array_and_spill_args(wasm_func_ty, &mut builder, &args[2..]);
404        let args_len = builder.ins().iconst(pointer_type, i64::from(args_len));
405
406        // Load the actual callee out of the
407        // `VMArrayCallHostFuncContext::host_func`.
408        let ptr_size = isa.pointer_bytes();
409        let callee = builder.ins().load(
410            pointer_type,
411            MemFlags::trusted(),
412            callee_vmctx,
413            ptr_size.vmarray_call_host_func_context_func_ref() + ptr_size.vm_func_ref_array_call(),
414        );
415
416        // Do an indirect call to the callee.
417        let callee_signature = builder.func.import_signature(array_call_sig);
418        let call = self.call_indirect_host(
419            &mut builder,
420            HostCall::ArrayCall,
421            callee_signature,
422            callee,
423            &[callee_vmctx, caller_vmctx, args_base, args_len],
424        );
425        let succeeded = builder.func.dfg.inst_results(call)[0];
426        self.raise_if_host_trapped(&mut builder, caller_vmctx, succeeded);
427        let results =
428            self.load_values_from_array(wasm_func_ty.returns(), &mut builder, args_base, args_len);
429        builder.ins().return_(&results);
430        builder.finalize();
431
432        Ok(CompiledFunctionBody {
433            code: box_dyn_any_compiler_context(Some(compiler.cx)),
434            needs_gc_heap: false,
435        })
436    }
437
438    fn append_code(
439        &self,
440        obj: &mut Object<'static>,
441        funcs: &[(String, Box<dyn Any + Send + Sync>)],
442        resolve_reloc: &dyn Fn(usize, FuncKey) -> usize,
443    ) -> Result<Vec<(SymbolId, FunctionLoc)>> {
444        log::trace!(
445            "appending functions to object file: {:#?}",
446            funcs.iter().map(|(sym, _)| sym).collect::<Vec<_>>()
447        );
448
449        let mut builder =
450            ModuleTextBuilder::new(obj, self, self.isa.text_section_builder(funcs.len()));
451        if self.linkopts.force_jump_veneers {
452            builder.force_veneers();
453        }
454        let mut addrs = AddressMapSection::default();
455        let mut traps = TrapEncodingBuilder::default();
456        let mut stack_maps = StackMapSection::default();
457        let mut exception_tables = ExceptionTableBuilder::default();
458
459        let mut ret = Vec::with_capacity(funcs.len());
460        for (i, (sym, func)) in funcs.iter().enumerate() {
461            debug_assert!(!func.is::<Option<CompilerContext>>());
462            debug_assert!(func.is::<CompiledFunction>());
463            let func = func.downcast_ref::<CompiledFunction>().unwrap();
464
465            let (sym_id, range) = builder.append_func(&sym, func, |idx| resolve_reloc(i, idx));
466            log::trace!("symbol id {sym_id:?} = {sym:?}");
467
468            if self.tunables.generate_address_map {
469                let addr = func.address_map();
470                addrs.push(range.clone(), &addr.instructions);
471            }
472
473            clif_to_env_stack_maps(
474                &mut stack_maps,
475                range.clone(),
476                func.buffer.user_stack_maps(),
477            );
478
479            traps.push(range.clone(), &func.traps().collect::<Vec<_>>());
480            clif_to_env_exception_tables(
481                &mut exception_tables,
482                range.clone(),
483                func.buffer.call_sites(),
484            )?;
485            builder.append_padding(self.linkopts.padding_between_functions);
486
487            let info = FunctionLoc {
488                start: u32::try_from(range.start).unwrap(),
489                length: u32::try_from(range.end - range.start).unwrap(),
490            };
491            ret.push((sym_id, info));
492        }
493
494        builder.finish();
495
496        if self.tunables.generate_address_map {
497            addrs.append_to(obj);
498        }
499        stack_maps.append_to(obj);
500        traps.append_to(obj);
501
502        let exception_section = obj.add_section(
503            obj.segment_name(StandardSegment::Data).to_vec(),
504            ELF_WASMTIME_EXCEPTIONS.as_bytes().to_vec(),
505            SectionKind::ReadOnlyData,
506        );
507        exception_tables.serialize(|bytes| {
508            obj.append_section_data(exception_section, bytes, 1);
509        });
510
511        Ok(ret)
512    }
513
514    fn triple(&self) -> &target_lexicon::Triple {
515        self.isa.triple()
516    }
517
518    fn flags(&self) -> Vec<(&'static str, FlagValue<'static>)> {
519        crate::clif_flags_to_wasmtime(self.isa.flags().iter())
520    }
521
522    fn isa_flags(&self) -> Vec<(&'static str, FlagValue<'static>)> {
523        crate::clif_flags_to_wasmtime(self.isa.isa_flags())
524    }
525
526    fn is_branch_protection_enabled(&self) -> bool {
527        self.isa.is_branch_protection_enabled()
528    }
529
530    #[cfg(feature = "component-model")]
531    fn component_compiler(&self) -> &dyn wasmtime_environ::component::ComponentCompiler {
532        self
533    }
534
535    fn append_dwarf<'a>(
536        &self,
537        obj: &mut Object<'_>,
538        translations: &'a PrimaryMap<StaticModuleIndex, ModuleTranslation<'a>>,
539        get_func: &'a dyn Fn(
540            StaticModuleIndex,
541            DefinedFuncIndex,
542        ) -> (SymbolId, &'a (dyn Any + Send + Sync)),
543        dwarf_package_bytes: Option<&'a [u8]>,
544        tunables: &'a Tunables,
545    ) -> Result<()> {
546        log::trace!("appending DWARF debug info");
547
548        let get_func = move |m, f| {
549            let (sym, any) = get_func(m, f);
550            log::trace!("get_func({m:?}, {f:?}) -> ({sym:?}, {any:#p})");
551            debug_assert!(!any.is::<Option<CompilerContext>>());
552            debug_assert!(any.is::<CompiledFunction>());
553            (
554                sym,
555                any.downcast_ref::<CompiledFunction>().unwrap().metadata(),
556            )
557        };
558
559        let mut compilation = crate::debug::Compilation::new(
560            &*self.isa,
561            translations,
562            &get_func,
563            dwarf_package_bytes,
564            tunables,
565        );
566        let dwarf_sections = crate::debug::emit_dwarf(&*self.isa, &mut compilation)
567            .with_context(|| "failed to emit DWARF debug information")?;
568
569        let (debug_bodies, debug_relocs): (Vec<_>, Vec<_>) = dwarf_sections
570            .iter()
571            .map(|s| ((s.name, &s.body), (s.name, &s.relocs)))
572            .unzip();
573        let mut dwarf_sections_ids = HashMap::new();
574        for (name, body) in debug_bodies {
575            let segment = obj.segment_name(StandardSegment::Debug).to_vec();
576            let section_id = obj.add_section(segment, name.as_bytes().to_vec(), SectionKind::Debug);
577            dwarf_sections_ids.insert(name, section_id);
578            obj.append_section_data(section_id, &body, 1);
579        }
580
581        // Write all debug data relocations.
582        for (name, relocs) in debug_relocs {
583            let section_id = *dwarf_sections_ids.get(name).unwrap();
584            for reloc in relocs {
585                let target_symbol = match reloc.target {
586                    DwarfSectionRelocTarget::Func(id) => compilation.symbol_id(id),
587                    DwarfSectionRelocTarget::Section(name) => {
588                        obj.section_symbol(dwarf_sections_ids[name])
589                    }
590                };
591                obj.add_relocation(
592                    section_id,
593                    object::write::Relocation {
594                        offset: u64::from(reloc.offset),
595                        symbol: target_symbol,
596                        addend: i64::from(reloc.addend),
597                        flags: RelocationFlags::Generic {
598                            size: reloc.size << 3,
599                            kind: RelocationKind::Absolute,
600                            encoding: RelocationEncoding::Generic,
601                        },
602                    },
603                )?;
604            }
605        }
606
607        Ok(())
608    }
609
610    fn create_systemv_cie(&self) -> Option<gimli::write::CommonInformationEntry> {
611        self.isa.create_systemv_cie()
612    }
613
614    fn compile_wasm_to_builtin(
615        &self,
616        key: FuncKey,
617        symbol: &str,
618    ) -> Result<CompiledFunctionBody, CompileError> {
619        log::trace!("compiling wasm-to-builtin trampoline: {key:?} = {symbol:?}");
620
621        let isa = &*self.isa;
622        let ptr_size = isa.pointer_bytes();
623        let pointer_type = isa.pointer_type();
624        let sigs = BuiltinFunctionSignatures::new(self);
625        let builtin_func_index = key.unwrap_wasm_to_builtin_trampoline();
626        let wasm_sig = sigs.wasm_signature(builtin_func_index);
627        let host_sig = sigs.host_signature(builtin_func_index);
628
629        let mut compiler = self.function_compiler();
630        let func = ir::Function::with_name_signature(key_to_name(key), wasm_sig.clone());
631        let (mut builder, block0) = compiler.builder(func);
632        let vmctx = builder.block_params(block0)[0];
633
634        // Debug-assert that this is the right kind of vmctx, and then
635        // additionally perform the "routine of the exit trampoline" of saving
636        // fp/pc/etc.
637        self.debug_assert_vmctx_kind(&mut builder, vmctx, wasmtime_environ::VMCONTEXT_MAGIC);
638        let vm_store_context = builder.ins().load(
639            pointer_type,
640            MemFlags::trusted(),
641            vmctx,
642            ptr_size.vmcontext_store_context(),
643        );
644        save_last_wasm_exit_fp_and_pc(&mut builder, pointer_type, &ptr_size, vm_store_context);
645
646        // Now it's time to delegate to the actual builtin. Forward all our own
647        // arguments to the libcall itself.
648        let args = builder.block_params(block0).to_vec();
649        let call = self.call_builtin(&mut builder, vmctx, &args, builtin_func_index, host_sig);
650        let results = builder.func.dfg.inst_results(call).to_vec();
651
652        // Libcalls do not explicitly jump/raise on traps but instead return a
653        // code indicating whether they trapped or not. This means that it's the
654        // responsibility of the trampoline to check for an trapping return
655        // value and raise a trap as appropriate. With the `results` above check
656        // what `index` is and for each libcall that has a trapping return value
657        // process it here.
658        match builtin_func_index.trap_sentinel() {
659            Some(TrapSentinel::Falsy) => {
660                self.raise_if_host_trapped(&mut builder, vmctx, results[0]);
661            }
662            Some(TrapSentinel::NegativeTwo) => {
663                let ty = builder.func.dfg.value_type(results[0]);
664                let trapped = builder.ins().iconst(ty, -2);
665                let succeeded = builder.ins().icmp(IntCC::NotEqual, results[0], trapped);
666                self.raise_if_host_trapped(&mut builder, vmctx, succeeded);
667            }
668            Some(TrapSentinel::Negative) => {
669                let ty = builder.func.dfg.value_type(results[0]);
670                let zero = builder.ins().iconst(ty, 0);
671                let succeeded =
672                    builder
673                        .ins()
674                        .icmp(IntCC::SignedGreaterThanOrEqual, results[0], zero);
675                self.raise_if_host_trapped(&mut builder, vmctx, succeeded);
676            }
677            Some(TrapSentinel::NegativeOne) => {
678                let ty = builder.func.dfg.value_type(results[0]);
679                let minus_one = builder.ins().iconst(ty, -1);
680                let succeeded = builder.ins().icmp(IntCC::NotEqual, results[0], minus_one);
681                self.raise_if_host_trapped(&mut builder, vmctx, succeeded);
682            }
683            None => {}
684        }
685
686        // And finally, return all the results of this libcall.
687        builder.ins().return_(&results);
688        builder.finalize();
689
690        Ok(CompiledFunctionBody {
691            code: box_dyn_any_compiler_context(Some(compiler.cx)),
692            needs_gc_heap: false,
693        })
694    }
695
696    fn compiled_function_relocation_targets<'a>(
697        &'a self,
698        func: &'a dyn Any,
699    ) -> Box<dyn Iterator<Item = FuncKey> + 'a> {
700        debug_assert!(!func.is::<Option<CompilerContext>>());
701        debug_assert!(func.is::<CompiledFunction>());
702        let func = func.downcast_ref::<CompiledFunction>().unwrap();
703        Box::new(func.relocations().map(|r| r.reloc_target))
704    }
705}
706
707impl InliningCompiler for Compiler {
708    fn calls(
709        &self,
710        func_body: &CompiledFunctionBody,
711        calls: &mut wasmtime_environ::prelude::IndexSet<FuncKey>,
712    ) -> Result<()> {
713        debug_assert!(!func_body.code.is::<CompiledFunction>());
714        debug_assert!(func_body.code.is::<Option<CompilerContext>>());
715        let cx = func_body
716            .code
717            .downcast_ref::<Option<CompilerContext>>()
718            .unwrap()
719            .as_ref()
720            .unwrap();
721        let func = &cx.codegen_context.func;
722        calls.extend(
723            func.params
724                .user_named_funcs()
725                .values()
726                .map(|name| FuncKey::from_raw_parts(name.namespace, name.index))
727                .filter(|key| matches!(key, FuncKey::DefinedWasmFunction(_, _))),
728        );
729        Ok(())
730    }
731
732    fn size(&self, func_body: &CompiledFunctionBody) -> u32 {
733        debug_assert!(!func_body.code.is::<CompiledFunction>());
734        debug_assert!(func_body.code.is::<Option<CompilerContext>>());
735        let cx = func_body
736            .code
737            .downcast_ref::<Option<CompilerContext>>()
738            .unwrap()
739            .as_ref()
740            .unwrap();
741        let func = &cx.codegen_context.func;
742        let size = func.dfg.values().len();
743        u32::try_from(size).unwrap()
744    }
745
746    fn inline<'a>(
747        &self,
748        func_body: &mut CompiledFunctionBody,
749        get_callee: &'a mut dyn FnMut(FuncKey) -> Option<&'a CompiledFunctionBody>,
750    ) -> Result<()> {
751        debug_assert!(!func_body.code.is::<CompiledFunction>());
752        debug_assert!(func_body.code.is::<Option<CompilerContext>>());
753        let code = func_body
754            .code
755            .downcast_mut::<Option<CompilerContext>>()
756            .unwrap();
757        let cx = code.as_mut().unwrap();
758
759        cx.codegen_context.inline(Inliner(get_callee))?;
760        return Ok(());
761
762        struct Inliner<'a>(&'a mut dyn FnMut(FuncKey) -> Option<&'a CompiledFunctionBody>);
763
764        impl cranelift_codegen::inline::Inline for Inliner<'_> {
765            fn inline(
766                &mut self,
767                caller: &ir::Function,
768                _call_inst: ir::Inst,
769                _call_opcode: ir::Opcode,
770                callee: ir::FuncRef,
771                _call_args: &[ir::Value],
772            ) -> InlineCommand<'_> {
773                let callee = &caller.dfg.ext_funcs[callee].name;
774                let callee = match callee {
775                    ir::ExternalName::User(callee) => *callee,
776                    ir::ExternalName::TestCase(_)
777                    | ir::ExternalName::LibCall(_)
778                    | ir::ExternalName::KnownSymbol(_) => return InlineCommand::KeepCall,
779                };
780                let callee = &caller.params.user_named_funcs()[callee];
781                let callee = FuncKey::from_raw_parts(callee.namespace, callee.index);
782                match callee {
783                    FuncKey::DefinedWasmFunction(_, _) => match (self.0)(callee) {
784                        None => InlineCommand::KeepCall,
785                        Some(func_body) => {
786                            debug_assert!(!func_body.code.is::<CompiledFunction>());
787                            debug_assert!(func_body.code.is::<Option<CompilerContext>>());
788                            let cx = func_body
789                                .code
790                                .downcast_ref::<Option<CompilerContext>>()
791                                .unwrap();
792                            InlineCommand::Inline {
793                                callee: Cow::Borrowed(&cx.as_ref().unwrap().codegen_context.func),
794                                // We've already visited the callee for inlining
795                                // due to our bottom-up approach, no need to
796                                // visit it again.
797                                visit_callee: false,
798                            }
799                        }
800                    },
801                    _ => InlineCommand::KeepCall,
802                }
803            }
804        }
805    }
806
807    fn finish_compiling(
808        &self,
809        func_body: &mut CompiledFunctionBody,
810        input: Option<wasmparser::FunctionBody<'_>>,
811        symbol: &str,
812    ) -> Result<()> {
813        log::trace!("finish compiling {symbol:?}");
814        debug_assert!(!func_body.code.is::<CompiledFunction>());
815        debug_assert!(func_body.code.is::<Option<CompilerContext>>());
816        let cx = func_body
817            .code
818            .downcast_mut::<Option<CompilerContext>>()
819            .unwrap()
820            .take()
821            .unwrap();
822        let compiler = FunctionCompiler { compiler: self, cx };
823
824        let symbol = match compiler.cx.abi {
825            None => Cow::Borrowed(symbol),
826            Some(Abi::Wasm) => Cow::Owned(format!("{symbol}_wasm_call")),
827            Some(Abi::Array) => Cow::Owned(format!("{symbol}_array_call")),
828        };
829
830        let compiled_func = if let Some(input) = input {
831            compiler.finish_with_info(Some((&input, &self.tunables)), &symbol)?
832        } else {
833            compiler.finish(&symbol)?
834        };
835
836        let timing = cranelift_codegen::timing::take_current();
837        log::debug!("`{symbol}` compiled in {:?}", timing.total());
838        log::trace!("`{symbol}` timing info\n{timing}");
839
840        func_body.code = box_dyn_any_compiled_function(compiled_func);
841        Ok(())
842    }
843}
844
845#[cfg(feature = "incremental-cache")]
846mod incremental_cache {
847    use super::*;
848
849    struct CraneliftCacheStore(Arc<dyn CacheStore>);
850
851    impl cranelift_codegen::incremental_cache::CacheKvStore for CraneliftCacheStore {
852        fn get(&self, key: &[u8]) -> Option<std::borrow::Cow<'_, [u8]>> {
853            self.0.get(key)
854        }
855        fn insert(&mut self, key: &[u8], val: Vec<u8>) {
856            self.0.insert(key, val);
857        }
858    }
859
860    pub(super) fn compile_maybe_cached<'a>(
861        context: &'a mut Context,
862        isa: &dyn TargetIsa,
863        cache_ctx: Option<&mut IncrementalCacheContext>,
864    ) -> Result<CompiledCode, CompileError> {
865        let cache_ctx = match cache_ctx {
866            Some(ctx) => ctx,
867            None => return compile_uncached(context, isa),
868        };
869
870        let mut cache_store = CraneliftCacheStore(cache_ctx.cache_store.clone());
871        let (_compiled_code, from_cache) = context
872            .compile_with_cache(isa, &mut cache_store, &mut Default::default())
873            .map_err(|error| CompileError::Codegen(pretty_error(&error.func, error.inner)))?;
874
875        if from_cache {
876            cache_ctx.num_hits += 1;
877        } else {
878            cache_ctx.num_cached += 1;
879        }
880
881        Ok(context.take_compiled_code().unwrap())
882    }
883}
884
885#[cfg(feature = "incremental-cache")]
886use incremental_cache::*;
887
888#[cfg(not(feature = "incremental-cache"))]
889fn compile_maybe_cached<'a>(
890    context: &'a mut Context,
891    isa: &dyn TargetIsa,
892    _cache_ctx: Option<&mut IncrementalCacheContext>,
893) -> Result<CompiledCode, CompileError> {
894    compile_uncached(context, isa)
895}
896
897fn compile_uncached<'a>(
898    context: &'a mut Context,
899    isa: &dyn TargetIsa,
900) -> Result<CompiledCode, CompileError> {
901    context
902        .compile(isa, &mut Default::default())
903        .map_err(|error| CompileError::Codegen(pretty_error(&error.func, error.inner)))?;
904    Ok(context.take_compiled_code().unwrap())
905}
906
907impl Compiler {
908    /// This function will allocate a stack slot suitable for storing both the
909    /// arguments and return values of the function, and then the arguments will
910    /// all be stored in this block.
911    ///
912    /// `block0` must be the entry block of the function and `ty` must be the
913    /// Wasm function type of the trampoline.
914    ///
915    /// The stack slot pointer is returned in addition to the size, in units of
916    /// `ValRaw`, of the stack slot.
917    fn allocate_stack_array_and_spill_args(
918        &self,
919        ty: &WasmFuncType,
920        builder: &mut FunctionBuilder,
921        args: &[ir::Value],
922    ) -> (Value, u32) {
923        let isa = &*self.isa;
924        let pointer_type = isa.pointer_type();
925
926        // Compute the size of the values vector.
927        let value_size = mem::size_of::<u128>();
928        let values_vec_len = cmp::max(ty.params().len(), ty.returns().len());
929        let values_vec_byte_size = u32::try_from(value_size * values_vec_len).unwrap();
930        let values_vec_len = u32::try_from(values_vec_len).unwrap();
931
932        let slot = builder.func.create_sized_stack_slot(ir::StackSlotData::new(
933            ir::StackSlotKind::ExplicitSlot,
934            values_vec_byte_size,
935            4,
936        ));
937        let values_vec_ptr = builder.ins().stack_addr(pointer_type, slot, 0);
938
939        {
940            let values_vec_len = builder
941                .ins()
942                .iconst(ir::types::I32, i64::from(values_vec_len));
943            self.store_values_to_array(builder, ty.params(), args, values_vec_ptr, values_vec_len);
944        }
945
946        (values_vec_ptr, values_vec_len)
947    }
948
949    /// Store values to an array in the array calling convention.
950    ///
951    /// Used either to store arguments to the array when calling a function
952    /// using the array calling convention, or used to store results to the
953    /// array when implementing a function that exposes the array calling
954    /// convention.
955    fn store_values_to_array(
956        &self,
957        builder: &mut FunctionBuilder,
958        types: &[WasmValType],
959        values: &[Value],
960        values_vec_ptr: Value,
961        values_vec_capacity: Value,
962    ) {
963        debug_assert_eq!(types.len(), values.len());
964        self.debug_assert_enough_capacity_for_length(builder, types.len(), values_vec_capacity);
965
966        // Note that loads and stores are unconditionally done in the
967        // little-endian format rather than the host's native-endianness,
968        // despite this load/store being unrelated to execution in wasm itself.
969        // For more details on this see the `ValRaw` type in
970        // `wasmtime::runtime::vm`.
971        let flags = ir::MemFlags::new()
972            .with_notrap()
973            .with_endianness(ir::Endianness::Little);
974
975        let value_size = mem::size_of::<u128>();
976        for (i, val) in values.iter().copied().enumerate() {
977            crate::unbarriered_store_type_at_offset(
978                &mut builder.cursor(),
979                flags,
980                values_vec_ptr,
981                i32::try_from(i * value_size).unwrap(),
982                val,
983            );
984        }
985    }
986
987    /// Used for loading the values of an array-call host function's value
988    /// array.
989    ///
990    /// This can be used to load arguments out of the array if the trampoline we
991    /// are building exposes the array calling convention, or it can be used to
992    /// load results out of the array if the trampoline we are building calls a
993    /// function that uses the array calling convention.
994    fn load_values_from_array(
995        &self,
996        types: &[WasmValType],
997        builder: &mut FunctionBuilder,
998        values_vec_ptr: Value,
999        values_vec_capacity: Value,
1000    ) -> Vec<ir::Value> {
1001        let isa = &*self.isa;
1002        let value_size = mem::size_of::<u128>();
1003
1004        self.debug_assert_enough_capacity_for_length(builder, types.len(), values_vec_capacity);
1005
1006        // Note that this is little-endian like `store_values_to_array` above,
1007        // see notes there for more information.
1008        let flags = MemFlags::new()
1009            .with_notrap()
1010            .with_endianness(ir::Endianness::Little);
1011
1012        let mut results = Vec::new();
1013        for (i, ty) in types.iter().enumerate() {
1014            results.push(crate::unbarriered_load_type_at_offset(
1015                isa,
1016                &mut builder.cursor(),
1017                *ty,
1018                flags,
1019                values_vec_ptr,
1020                i32::try_from(i * value_size).unwrap(),
1021            ));
1022        }
1023        results
1024    }
1025
1026    fn function_compiler(&self) -> FunctionCompiler<'_> {
1027        let saved_context = self.contexts.lock().unwrap().pop();
1028        FunctionCompiler {
1029            compiler: self,
1030            cx: saved_context
1031                .map(|mut ctx| {
1032                    ctx.codegen_context.clear();
1033                    ctx
1034                })
1035                .unwrap_or_else(|| CompilerContext {
1036                    #[cfg(feature = "incremental-cache")]
1037                    incremental_cache_ctx: self.cache_store.as_ref().map(|cache_store| {
1038                        IncrementalCacheContext {
1039                            cache_store: cache_store.clone(),
1040                            num_hits: 0,
1041                            num_cached: 0,
1042                        }
1043                    }),
1044                    ..Default::default()
1045                }),
1046        }
1047    }
1048
1049    /// Invokes the `raise` libcall in `vmctx` if the `succeeded` value
1050    /// indicates if a trap happened.
1051    ///
1052    /// This helper is used when the host returns back to WebAssembly. The host
1053    /// returns a `bool` indicating whether the call succeeded. If the call
1054    /// failed then Cranelift needs to unwind back to the original invocation
1055    /// point. The unwind right now is then implemented in Wasmtime with an
1056    /// exceptional resume, one day this might be implemented differently with
1057    /// an unwind inside of Cranelift.
1058    ///
1059    /// Additionally in the future for pulley this will emit a special trap
1060    /// opcode for Pulley itself to cease interpretation and exit the
1061    /// interpreter.
1062    pub fn raise_if_host_trapped(
1063        &self,
1064        builder: &mut FunctionBuilder<'_>,
1065        vmctx: ir::Value,
1066        succeeded: ir::Value,
1067    ) {
1068        let trapped_block = builder.create_block();
1069        let continuation_block = builder.create_block();
1070        builder.set_cold_block(trapped_block);
1071        builder
1072            .ins()
1073            .brif(succeeded, continuation_block, &[], trapped_block, &[]);
1074
1075        builder.seal_block(trapped_block);
1076        builder.seal_block(continuation_block);
1077
1078        builder.switch_to_block(trapped_block);
1079        let sigs = BuiltinFunctionSignatures::new(self);
1080        let sig = sigs.host_signature(BuiltinFunctionIndex::raise());
1081        self.call_builtin(builder, vmctx, &[vmctx], BuiltinFunctionIndex::raise(), sig);
1082        builder.ins().trap(TRAP_INTERNAL_ASSERT);
1083
1084        builder.switch_to_block(continuation_block);
1085    }
1086
1087    /// Helper to load the core `builtin` from `vmctx` and invoke it with
1088    /// `args`.
1089    fn call_builtin(
1090        &self,
1091        builder: &mut FunctionBuilder<'_>,
1092        vmctx: ir::Value,
1093        args: &[ir::Value],
1094        builtin: BuiltinFunctionIndex,
1095        sig: ir::Signature,
1096    ) -> ir::Inst {
1097        let isa = &*self.isa;
1098        let ptr_size = isa.pointer_bytes();
1099        let pointer_type = isa.pointer_type();
1100
1101        // Builtins are stored in an array in all `VMContext`s. First load the
1102        // base pointer of the array and then load the entry of the array that
1103        // corresponds to this builtin.
1104        let mem_flags = ir::MemFlags::trusted().with_readonly();
1105        let array_addr = builder.ins().load(
1106            pointer_type,
1107            mem_flags,
1108            vmctx,
1109            i32::from(ptr_size.vmcontext_builtin_functions()),
1110        );
1111        let body_offset = i32::try_from(builtin.index() * pointer_type.bytes()).unwrap();
1112        let func_addr = builder
1113            .ins()
1114            .load(pointer_type, mem_flags, array_addr, body_offset);
1115
1116        let sig = builder.func.import_signature(sig);
1117        self.call_indirect_host(builder, builtin, sig, func_addr, args)
1118    }
1119
1120    pub fn isa(&self) -> &dyn TargetIsa {
1121        &*self.isa
1122    }
1123
1124    pub fn tunables(&self) -> &Tunables {
1125        &self.tunables
1126    }
1127
1128    fn debug_assert_enough_capacity_for_length(
1129        &self,
1130        builder: &mut FunctionBuilder,
1131        length: usize,
1132        capacity: ir::Value,
1133    ) {
1134        if !self.emit_debug_checks {
1135            return;
1136        }
1137        let enough_capacity = builder.ins().icmp_imm(
1138            ir::condcodes::IntCC::UnsignedGreaterThanOrEqual,
1139            capacity,
1140            ir::immediates::Imm64::new(length.try_into().unwrap()),
1141        );
1142        builder.ins().trapz(enough_capacity, TRAP_INTERNAL_ASSERT);
1143    }
1144
1145    fn debug_assert_vmctx_kind(
1146        &self,
1147        builder: &mut FunctionBuilder,
1148        vmctx: ir::Value,
1149        expected_vmctx_magic: u32,
1150    ) {
1151        if !self.emit_debug_checks {
1152            return;
1153        }
1154        let magic = builder.ins().load(
1155            ir::types::I32,
1156            MemFlags::trusted().with_endianness(self.isa.endianness()),
1157            vmctx,
1158            0,
1159        );
1160        let is_expected_vmctx = builder.ins().icmp_imm(
1161            ir::condcodes::IntCC::Equal,
1162            magic,
1163            i64::from(expected_vmctx_magic),
1164        );
1165        builder.ins().trapz(is_expected_vmctx, TRAP_INTERNAL_ASSERT);
1166    }
1167
1168    fn array_to_wasm_trampoline(
1169        &self,
1170        trampoline_key: FuncKey,
1171        callee_key: FuncKey,
1172        types: &ModuleTypesBuilder,
1173        callee_sig: ModuleInternedTypeIndex,
1174        symbol: &str,
1175        vm_store_context_offset: u32,
1176        expected_vmctx_magic: u32,
1177    ) -> Result<CompiledFunctionBody, CompileError> {
1178        log::trace!("compiling array-to-wasm trampoline: {trampoline_key:?} = {symbol:?}");
1179
1180        let wasm_func_ty = types[callee_sig].unwrap_func();
1181
1182        let isa = &*self.isa;
1183        let pointer_type = isa.pointer_type();
1184        let wasm_call_sig = wasm_call_signature(isa, wasm_func_ty, &self.tunables);
1185        let array_call_sig = array_call_signature(isa);
1186
1187        let mut compiler = self.function_compiler();
1188        let func = ir::Function::with_name_signature(key_to_name(trampoline_key), array_call_sig);
1189        let (mut builder, block0) = compiler.builder(func);
1190
1191        let try_call_block = builder.create_block();
1192        builder.ins().jump(try_call_block, []);
1193        builder.switch_to_block(try_call_block);
1194
1195        let (vmctx, caller_vmctx, values_vec_ptr, values_vec_len) = {
1196            let params = builder.func.dfg.block_params(block0);
1197            (params[0], params[1], params[2], params[3])
1198        };
1199
1200        // First load the actual arguments out of the array.
1201        let mut args = self.load_values_from_array(
1202            wasm_func_ty.params(),
1203            &mut builder,
1204            values_vec_ptr,
1205            values_vec_len,
1206        );
1207        args.insert(0, caller_vmctx);
1208        args.insert(0, vmctx);
1209
1210        // Just before we enter Wasm, save our context information.
1211        //
1212        // Assert that we were really given a core Wasm vmctx, since that's
1213        // what we are assuming with our offsets below.
1214        self.debug_assert_vmctx_kind(&mut builder, vmctx, expected_vmctx_magic);
1215        save_last_wasm_entry_context(
1216            &mut builder,
1217            pointer_type,
1218            &self.isa.pointer_bytes(),
1219            vm_store_context_offset,
1220            vmctx,
1221            try_call_block,
1222        );
1223
1224        // Create the invocation of wasm, which is notably done with a
1225        // `try_call` with an exception handler that's used to handle traps.
1226        let normal_return = builder.create_block();
1227        let exceptional_return = builder.create_block();
1228        let normal_return_values = wasm_call_sig
1229            .returns
1230            .iter()
1231            .map(|ty| {
1232                builder
1233                    .func
1234                    .dfg
1235                    .append_block_param(normal_return, ty.value_type)
1236            })
1237            .collect::<Vec<_>>();
1238
1239        // Then call the Wasm function with those arguments.
1240        let signature = builder.func.import_signature(wasm_call_sig.clone());
1241        let callee = {
1242            let (namespace, index) = callee_key.into_raw_parts();
1243            let name = ir::ExternalName::User(
1244                builder
1245                    .func
1246                    .declare_imported_user_function(ir::UserExternalName { namespace, index }),
1247            );
1248            builder.func.dfg.ext_funcs.push(ir::ExtFuncData {
1249                name,
1250                signature,
1251                colocated: true,
1252            })
1253        };
1254
1255        let dfg = &mut builder.func.dfg;
1256        let exception_table = dfg.exception_tables.push(ir::ExceptionTableData::new(
1257            signature,
1258            ir::BlockCall::new(
1259                normal_return,
1260                (0..wasm_call_sig.returns.len())
1261                    .map(|i| ir::BlockArg::TryCallRet(i.try_into().unwrap())),
1262                &mut dfg.value_lists,
1263            ),
1264            [ir::ExceptionTableItem::Default(ir::BlockCall::new(
1265                exceptional_return,
1266                None,
1267                &mut dfg.value_lists,
1268            ))],
1269        ));
1270        builder.ins().try_call(callee, &args, exception_table);
1271
1272        builder.seal_block(try_call_block);
1273        builder.seal_block(normal_return);
1274        builder.seal_block(exceptional_return);
1275
1276        // On the normal return path store all the results in the array we were
1277        // provided and return "true" for "returned successfully".
1278        builder.switch_to_block(normal_return);
1279        self.store_values_to_array(
1280            &mut builder,
1281            wasm_func_ty.returns(),
1282            &normal_return_values,
1283            values_vec_ptr,
1284            values_vec_len,
1285        );
1286        let true_return = builder.ins().iconst(ir::types::I8, 1);
1287        builder.ins().return_(&[true_return]);
1288
1289        // On the exceptional return path just return "false" for "did not
1290        // succeed". Note that register restoration is part of the `try_call`
1291        // and handler implementation.
1292        builder.switch_to_block(exceptional_return);
1293        let false_return = builder.ins().iconst(ir::types::I8, 0);
1294        builder.ins().return_(&[false_return]);
1295
1296        builder.finalize();
1297
1298        Ok(CompiledFunctionBody {
1299            code: box_dyn_any_compiler_context(Some(compiler.cx)),
1300            needs_gc_heap: false,
1301        })
1302    }
1303}
1304
1305struct FunctionCompiler<'a> {
1306    compiler: &'a Compiler,
1307    cx: CompilerContext,
1308}
1309
1310impl FunctionCompiler<'_> {
1311    fn builder(&mut self, func: ir::Function) -> (FunctionBuilder<'_>, ir::Block) {
1312        self.cx.codegen_context.func = func;
1313        let mut builder = FunctionBuilder::new(
1314            &mut self.cx.codegen_context.func,
1315            self.cx.func_translator.context(),
1316        );
1317
1318        let block0 = builder.create_block();
1319        builder.append_block_params_for_function_params(block0);
1320        builder.switch_to_block(block0);
1321        builder.seal_block(block0);
1322        (builder, block0)
1323    }
1324
1325    fn finish(self, symbol: &str) -> Result<CompiledFunction, CompileError> {
1326        self.finish_with_info(None, symbol)
1327    }
1328
1329    fn finish_with_info(
1330        mut self,
1331        body_and_tunables: Option<(&FunctionBody<'_>, &Tunables)>,
1332        symbol: &str,
1333    ) -> Result<CompiledFunction, CompileError> {
1334        let context = &mut self.cx.codegen_context;
1335        let isa = &*self.compiler.isa;
1336
1337        // Run compilation, but don't propagate the error just yet. This'll
1338        // mutate `context` and the IR contained within (optionally) but it may
1339        // fail if the backend has a bug in it. Use `context` after this
1340        // finishes to optionally emit CLIF and then after that's done actually
1341        // propagate the error if one happened.
1342        let compilation_result =
1343            compile_maybe_cached(context, isa, self.cx.incremental_cache_ctx.as_mut());
1344
1345        if let Some(path) = &self.compiler.clif_dir {
1346            use std::io::Write;
1347
1348            let mut path = path.join(symbol.replace(":", "-"));
1349            path.set_extension("clif");
1350
1351            let mut output = std::fs::File::create(path).unwrap();
1352            write!(
1353                output,
1354                ";; Intermediate Representation of function <{symbol}>:\n",
1355            )
1356            .unwrap();
1357            write!(output, "{}", context.func.display()).unwrap();
1358        }
1359
1360        let compiled_code = compilation_result?;
1361
1362        // Give wasm functions, user defined code, a "preferred" alignment
1363        // instead of the minimum alignment as this can help perf in niche
1364        // situations.
1365        let preferred_alignment = if body_and_tunables.is_some() {
1366            self.compiler.isa.function_alignment().preferred
1367        } else {
1368            1
1369        };
1370
1371        let alignment = compiled_code.buffer.alignment.max(preferred_alignment);
1372        let mut compiled_function = CompiledFunction::new(
1373            compiled_code.buffer.clone(),
1374            context.func.params.user_named_funcs().clone(),
1375            alignment,
1376        );
1377
1378        if let Some((body, tunables)) = body_and_tunables {
1379            let data = body.get_binary_reader();
1380            let offset = data.original_position();
1381            let len = data.bytes_remaining();
1382            compiled_function.set_address_map(
1383                offset.try_into().unwrap(),
1384                len.try_into().unwrap(),
1385                tunables.generate_address_map,
1386            );
1387        }
1388
1389        if isa.flags().unwind_info() {
1390            let unwind = compiled_code
1391                .create_unwind_info(isa)
1392                .map_err(|error| CompileError::Codegen(pretty_error(&context.func, error)))?;
1393
1394            if let Some(unwind_info) = unwind {
1395                compiled_function.set_unwind_info(unwind_info);
1396            }
1397        }
1398
1399        if body_and_tunables
1400            .map(|(_, t)| t.generate_native_debuginfo)
1401            .unwrap_or(false)
1402        {
1403            compiled_function.set_value_labels_ranges(compiled_code.value_labels_ranges.clone());
1404
1405            // DWARF debugging needs the CFA-based unwind information even on Windows.
1406            if !matches!(
1407                compiled_function.metadata().unwind_info,
1408                Some(UnwindInfo::SystemV(_))
1409            ) {
1410                let cfa_unwind = compiled_code
1411                    .create_unwind_info_of_kind(isa, UnwindInfoKind::SystemV)
1412                    .map_err(|error| CompileError::Codegen(pretty_error(&context.func, error)))?;
1413
1414                if let Some(UnwindInfo::SystemV(cfa_unwind_info)) = cfa_unwind {
1415                    compiled_function.set_cfa_unwind_info(cfa_unwind_info);
1416                }
1417            }
1418        }
1419
1420        compiled_function
1421            .set_sized_stack_slots(std::mem::take(&mut context.func.sized_stack_slots));
1422        self.compiler.contexts.lock().unwrap().push(self.cx);
1423
1424        Ok(compiled_function)
1425    }
1426}
1427
1428/// Convert from Cranelift's representation of a stack map to Wasmtime's
1429/// compiler-agnostic representation.
1430///
1431/// Here `section` is the wasmtime data section being created and `range` is the
1432/// range of the function being added. The `clif_stack_maps` entry is the raw
1433/// listing of stack maps from Cranelift.
1434fn clif_to_env_stack_maps(
1435    section: &mut StackMapSection,
1436    range: Range<u64>,
1437    clif_stack_maps: &[(CodeOffset, u32, ir::UserStackMap)],
1438) {
1439    for (offset, frame_size, stack_map) in clif_stack_maps {
1440        let mut frame_offsets = Vec::new();
1441        for (ty, frame_offset) in stack_map.entries() {
1442            assert_eq!(ty, ir::types::I32);
1443            frame_offsets.push(frame_offset);
1444        }
1445        let code_offset = range.start + u64::from(*offset);
1446        assert!(code_offset < range.end);
1447        section.push(code_offset, *frame_size, frame_offsets.into_iter());
1448    }
1449}
1450
1451/// Convert from Cranelift's representation of exception handler
1452/// metadata to Wasmtime's compiler-agnostic representation.
1453///
1454/// Here `builder` is the wasmtime-unwinder exception section being
1455/// created and `range` is the range of the function being added. The
1456/// `call_sites` iterator is the raw iterator over callsite metadata
1457/// (including exception handlers) from Cranelift.
1458fn clif_to_env_exception_tables<'a>(
1459    builder: &mut ExceptionTableBuilder,
1460    range: Range<u64>,
1461    call_sites: impl Iterator<Item = FinalizedMachCallSite<'a>>,
1462) -> anyhow::Result<()> {
1463    builder.add_func(CodeOffset::try_from(range.start).unwrap(), call_sites)
1464}
1465
1466fn save_last_wasm_entry_context(
1467    builder: &mut FunctionBuilder,
1468    pointer_type: ir::Type,
1469    ptr_size: &dyn PtrSize,
1470    vm_store_context_offset: u32,
1471    vmctx: Value,
1472    block: ir::Block,
1473) {
1474    // First we need to get the `VMStoreContext`.
1475    let vm_store_context = builder.ins().load(
1476        pointer_type,
1477        MemFlags::trusted(),
1478        vmctx,
1479        i32::try_from(vm_store_context_offset).unwrap(),
1480    );
1481
1482    // Save the current fp/sp of the entry trampoline into the `VMStoreContext`.
1483    let fp = builder.ins().get_frame_pointer(pointer_type);
1484    builder.ins().store(
1485        MemFlags::trusted(),
1486        fp,
1487        vm_store_context,
1488        ptr_size.vmstore_context_last_wasm_entry_fp(),
1489    );
1490    let sp = builder.ins().get_stack_pointer(pointer_type);
1491    builder.ins().store(
1492        MemFlags::trusted(),
1493        sp,
1494        vm_store_context,
1495        ptr_size.vmstore_context_last_wasm_entry_sp(),
1496    );
1497
1498    // Also save the address of this function's exception handler. This is used
1499    // as a resumption point for traps, for example.
1500    let trap_handler = builder
1501        .ins()
1502        .get_exception_handler_address(pointer_type, block, 0);
1503    builder.ins().store(
1504        MemFlags::trusted(),
1505        trap_handler,
1506        vm_store_context,
1507        ptr_size.vmstore_context_last_wasm_entry_trap_handler(),
1508    );
1509}
1510
1511fn save_last_wasm_exit_fp_and_pc(
1512    builder: &mut FunctionBuilder,
1513    pointer_type: ir::Type,
1514    ptr: &impl PtrSize,
1515    limits: Value,
1516) {
1517    // Save the trampoline FP to the limits. Exception unwind needs
1518    // this so that it can know the SP (bottom of frame) for the very
1519    // last Wasm frame.
1520    let trampoline_fp = builder.ins().get_frame_pointer(pointer_type);
1521    builder.ins().store(
1522        MemFlags::trusted(),
1523        trampoline_fp,
1524        limits,
1525        ptr.vmstore_context_last_wasm_exit_trampoline_fp(),
1526    );
1527
1528    // Finally save the Wasm return address to the limits.
1529    let wasm_pc = builder.ins().get_return_address(pointer_type);
1530    builder.ins().store(
1531        MemFlags::trusted(),
1532        wasm_pc,
1533        limits,
1534        ptr.vmstore_context_last_wasm_exit_pc(),
1535    );
1536}
1537
1538fn key_to_name(key: FuncKey) -> ir::UserFuncName {
1539    let (namespace, index) = key.into_raw_parts();
1540    ir::UserFuncName::User(ir::UserExternalName { namespace, index })
1541}