use crate::{compiler::Compiler, TRAP_ALWAYS, TRAP_CANNOT_ENTER, TRAP_INTERNAL_ASSERT};
use anyhow::Result;
use cranelift_codegen::ir::condcodes::IntCC;
use cranelift_codegen::ir::{self, InstBuilder, MemFlags};
use cranelift_codegen::isa::{CallConv, TargetIsa};
use cranelift_frontend::FunctionBuilder;
use std::any::Any;
use wasmtime_environ::component::*;
use wasmtime_environ::{HostCall, ModuleInternedTypeIndex, PtrSize, Tunables, WasmValType};
struct TrampolineCompiler<'a> {
compiler: &'a Compiler,
isa: &'a (dyn TargetIsa + 'static),
builder: FunctionBuilder<'a>,
component: &'a Component,
types: &'a ComponentTypesBuilder,
offsets: VMComponentOffsets<u8>,
abi: Abi,
block0: ir::Block,
signature: ModuleInternedTypeIndex,
tunables: &'a Tunables,
}
#[derive(Debug, Copy, Clone)]
enum Abi {
Wasm,
Array,
}
impl<'a> TrampolineCompiler<'a> {
fn new(
compiler: &'a Compiler,
func_compiler: &'a mut super::FunctionCompiler<'_>,
component: &'a Component,
types: &'a ComponentTypesBuilder,
index: TrampolineIndex,
abi: Abi,
tunables: &'a Tunables,
) -> TrampolineCompiler<'a> {
let isa = &*compiler.isa;
let signature = component.trampolines[index];
let ty = types[signature].unwrap_func();
let func = ir::Function::with_name_signature(
ir::UserFuncName::user(0, 0),
match abi {
Abi::Wasm => crate::wasm_call_signature(isa, ty, &compiler.tunables),
Abi::Array => crate::array_call_signature(isa),
},
);
let (builder, block0) = func_compiler.builder(func);
TrampolineCompiler {
compiler,
isa,
builder,
component,
types,
offsets: VMComponentOffsets::new(isa.pointer_bytes(), component),
abi,
block0,
signature,
tunables,
}
}
fn translate(&mut self, trampoline: &Trampoline) {
match trampoline {
Trampoline::Transcoder {
op,
from,
from64,
to,
to64,
} => {
match self.abi {
Abi::Wasm => {
self.translate_transcode(*op, *from, *from64, *to, *to64);
}
Abi::Array => {
self.builder.ins().trap(TRAP_INTERNAL_ASSERT);
}
}
}
Trampoline::LowerImport {
index,
options,
lower_ty,
} => {
self.translate_lower_import(*index, options, *lower_ty);
}
Trampoline::AlwaysTrap => {
self.translate_always_trap();
}
Trampoline::TaskBackpressure { instance } => {
_ = instance;
todo!()
}
Trampoline::TaskReturn => todo!(),
Trampoline::TaskWait {
instance,
async_,
memory,
} => {
_ = (instance, async_, memory);
todo!()
}
Trampoline::TaskPoll {
instance,
async_,
memory,
} => {
_ = (instance, async_, memory);
todo!()
}
Trampoline::TaskYield { async_ } => {
_ = async_;
todo!()
}
Trampoline::SubtaskDrop { instance } => {
_ = instance;
todo!()
}
Trampoline::StreamNew { ty } => {
_ = ty;
todo!()
}
Trampoline::StreamRead { ty, options } => {
_ = (ty, options);
todo!()
}
Trampoline::StreamWrite { ty, options } => {
_ = (ty, options);
todo!()
}
Trampoline::StreamCancelRead { ty, async_ } => {
_ = (ty, async_);
todo!()
}
Trampoline::StreamCancelWrite { ty, async_ } => {
_ = (ty, async_);
todo!()
}
Trampoline::StreamCloseReadable { ty } => {
_ = ty;
todo!()
}
Trampoline::StreamCloseWritable { ty } => {
_ = ty;
todo!()
}
Trampoline::FutureNew { ty } => {
_ = ty;
todo!()
}
Trampoline::FutureRead { ty, options } => {
_ = (ty, options);
todo!()
}
Trampoline::FutureWrite { ty, options } => {
_ = (ty, options);
todo!()
}
Trampoline::FutureCancelRead { ty, async_ } => {
_ = (ty, async_);
todo!()
}
Trampoline::FutureCancelWrite { ty, async_ } => {
_ = (ty, async_);
todo!()
}
Trampoline::FutureCloseReadable { ty } => {
_ = ty;
todo!()
}
Trampoline::FutureCloseWritable { ty } => {
_ = ty;
todo!()
}
Trampoline::ErrorContextNew { ty, options } => {
_ = (ty, options);
todo!()
}
Trampoline::ErrorContextDebugMessage { ty, options } => {
_ = (ty, options);
todo!()
}
Trampoline::ErrorContextDrop { ty } => {
_ = ty;
todo!()
}
Trampoline::ResourceNew(ty) => self.translate_resource_new(*ty),
Trampoline::ResourceRep(ty) => self.translate_resource_rep(*ty),
Trampoline::ResourceDrop(ty) => self.translate_resource_drop(*ty),
Trampoline::ResourceTransferOwn => {
self.translate_resource_libcall(host::resource_transfer_own, |me, rets| {
rets[0] = me.raise_if_resource_trapped(rets[0]);
})
}
Trampoline::ResourceTransferBorrow => {
self.translate_resource_libcall(host::resource_transfer_borrow, |me, rets| {
rets[0] = me.raise_if_resource_trapped(rets[0]);
})
}
Trampoline::ResourceEnterCall => {
self.translate_resource_libcall(host::resource_enter_call, |_, _| {})
}
Trampoline::ResourceExitCall => {
self.translate_resource_libcall(host::resource_exit_call, |me, rets| {
me.raise_if_host_trapped(rets.pop().unwrap());
})
}
Trampoline::AsyncEnterCall => todo!(),
Trampoline::AsyncExitCall {
callback,
post_return,
} => {
_ = (callback, post_return);
todo!()
}
Trampoline::FutureTransfer => {
_ = host::future_transfer;
todo!()
}
Trampoline::StreamTransfer => {
_ = host::stream_transfer;
todo!()
}
Trampoline::ErrorContextTransfer => {
_ = host::error_context_transfer;
todo!()
}
}
}
fn translate_lower_import(
&mut self,
index: LoweredIndex,
options: &CanonicalOptions,
lower_ty: TypeFuncIndex,
) {
let pointer_type = self.isa.pointer_type();
let args = self.builder.func.dfg.block_params(self.block0).to_vec();
let vmctx = args[0];
let wasm_func_ty = self.types[self.signature].unwrap_func();
let (values_vec_ptr, values_vec_len) = match self.abi {
Abi::Wasm => {
let (ptr, len) = self.compiler.allocate_stack_array_and_spill_args(
wasm_func_ty,
&mut self.builder,
&args[2..],
);
let len = self.builder.ins().iconst(pointer_type, i64::from(len));
(ptr, len)
}
Abi::Array => {
let params = self.builder.func.dfg.block_params(self.block0);
(params[2], params[3])
}
};
let mut callee_args = Vec::new();
let mut host_sig = ir::Signature::new(CallConv::triple_default(self.isa.triple()));
let CanonicalOptions {
instance,
memory,
realloc,
post_return,
string_encoding,
callback: _,
async_,
} = *options;
host_sig.params.push(ir::AbiParam::new(pointer_type));
callee_args.push(vmctx);
host_sig.params.push(ir::AbiParam::new(pointer_type));
callee_args.push(self.builder.ins().load(
pointer_type,
MemFlags::trusted(),
vmctx,
i32::try_from(self.offsets.lowering_data(index)).unwrap(),
));
host_sig.params.push(ir::AbiParam::new(ir::types::I32));
callee_args.push(
self.builder
.ins()
.iconst(ir::types::I32, i64::from(lower_ty.as_u32())),
);
host_sig.params.push(ir::AbiParam::new(pointer_type));
callee_args.push(
self.builder
.ins()
.iadd_imm(vmctx, i64::from(self.offsets.instance_flags(instance))),
);
host_sig.params.push(ir::AbiParam::new(pointer_type));
callee_args.push(match memory {
Some(idx) => self.builder.ins().load(
pointer_type,
MemFlags::trusted(),
vmctx,
i32::try_from(self.offsets.runtime_memory(idx)).unwrap(),
),
None => self.builder.ins().iconst(pointer_type, 0),
});
host_sig.params.push(ir::AbiParam::new(pointer_type));
callee_args.push(match realloc {
Some(idx) => self.builder.ins().load(
pointer_type,
MemFlags::trusted(),
vmctx,
i32::try_from(self.offsets.runtime_realloc(idx)).unwrap(),
),
None => self.builder.ins().iconst(pointer_type, 0),
});
assert!(post_return.is_none());
host_sig.params.push(ir::AbiParam::new(ir::types::I8));
callee_args.push(
self.builder
.ins()
.iconst(ir::types::I8, i64::from(string_encoding as u8)),
);
host_sig.params.push(ir::AbiParam::new(ir::types::I8));
callee_args.push(
self.builder
.ins()
.iconst(ir::types::I8, if async_ { 1 } else { 0 }),
);
host_sig.params.push(ir::AbiParam::new(pointer_type));
callee_args.push(values_vec_ptr);
host_sig.params.push(ir::AbiParam::new(pointer_type));
callee_args.push(values_vec_len);
host_sig.returns.push(ir::AbiParam::new(ir::types::I8));
let host_fn = self.builder.ins().load(
pointer_type,
MemFlags::trusted(),
vmctx,
i32::try_from(self.offsets.lowering_callee(index)).unwrap(),
);
let host_sig = self.builder.import_signature(host_sig);
let call = self.compiler.call_indirect_host(
&mut self.builder,
HostCall::ComponentLowerImport,
host_sig,
host_fn,
&callee_args,
);
let succeeded = self.builder.func.dfg.inst_results(call)[0];
match self.abi {
Abi::Wasm => {
self.raise_if_host_trapped(succeeded);
let results = self.compiler.load_values_from_array(
wasm_func_ty.returns(),
&mut self.builder,
values_vec_ptr,
values_vec_len,
);
self.builder.ins().return_(&results);
}
Abi::Array => {
self.builder.ins().return_(&[succeeded]);
}
}
}
fn translate_always_trap(&mut self) {
if self.tunables.signals_based_traps {
self.builder.ins().trap(TRAP_ALWAYS);
return;
}
let args = self.abi_load_params();
let vmctx = args[0];
let (host_sig, index) = host::trap(self.isa, &mut self.builder.func);
let host_fn = self.load_libcall(vmctx, index);
let code = self.builder.ins().iconst(
ir::types::I8,
i64::from(wasmtime_environ::Trap::AlwaysTrapAdapter as u8),
);
self.compiler.call_indirect_host(
&mut self.builder,
index,
host_sig,
host_fn,
&[vmctx, code],
);
let succeeded = self.builder.ins().iconst(ir::types::I8, 0);
self.raise_if_host_trapped(succeeded);
self.builder.ins().trap(TRAP_INTERNAL_ASSERT);
}
fn translate_resource_new(&mut self, resource: TypeResourceTableIndex) {
let args = self.abi_load_params();
let vmctx = args[0];
let mut host_args = Vec::new();
host_args.push(vmctx);
host_args.push(
self.builder
.ins()
.iconst(ir::types::I32, i64::from(resource.as_u32())),
);
host_args.push(args[2]);
assert_eq!(
self.types[self.signature].unwrap_func().params()[0],
WasmValType::I32
);
let call = self.call_libcall(vmctx, host::resource_new32, &host_args);
let result = self.builder.func.dfg.inst_results(call)[0];
let result = self.raise_if_resource_trapped(result);
self.abi_store_results(&[result]);
}
fn translate_resource_rep(&mut self, resource: TypeResourceTableIndex) {
let args = self.abi_load_params();
let vmctx = args[0];
let mut host_args = Vec::new();
host_args.push(vmctx);
host_args.push(
self.builder
.ins()
.iconst(ir::types::I32, i64::from(resource.as_u32())),
);
host_args.push(args[2]);
assert_eq!(
self.types[self.signature].unwrap_func().returns()[0],
WasmValType::I32
);
let call = self.call_libcall(vmctx, host::resource_rep32, &host_args);
let result = self.builder.func.dfg.inst_results(call)[0];
let result = self.raise_if_resource_trapped(result);
self.abi_store_results(&[result]);
}
fn translate_resource_drop(&mut self, resource: TypeResourceTableIndex) {
let args = self.abi_load_params();
let vmctx = args[0];
let caller_vmctx = args[1];
let pointer_type = self.isa.pointer_type();
let mut host_args = Vec::new();
host_args.push(vmctx);
host_args.push(
self.builder
.ins()
.iconst(ir::types::I32, i64::from(resource.as_u32())),
);
host_args.push(args[2]);
let call = self.call_libcall(vmctx, host::resource_drop, &host_args);
let should_run_destructor = self.builder.func.dfg.inst_results(call)[0];
let minus_one = self.builder.ins().iconst(ir::types::I64, -1);
let succeeded = self
.builder
.ins()
.icmp(IntCC::NotEqual, should_run_destructor, minus_one);
self.raise_if_host_trapped(succeeded);
let resource_ty = self.types[resource].ty;
let resource_def = self
.component
.defined_resource_index(resource_ty)
.map(|idx| {
self.component
.initializers
.iter()
.filter_map(|i| match i {
GlobalInitializer::Resource(r) if r.index == idx => Some(r),
_ => None,
})
.next()
.unwrap()
});
let has_destructor = match resource_def {
Some(def) => def.dtor.is_some(),
None => true,
};
self.builder.ensure_inserted_block();
let current_block = self.builder.current_block().unwrap();
let run_destructor_block = self.builder.create_block();
self.builder
.insert_block_after(run_destructor_block, current_block);
let return_block = self.builder.create_block();
self.builder
.insert_block_after(return_block, run_destructor_block);
self.builder.ins().brif(
should_run_destructor,
run_destructor_block,
&[],
return_block,
&[],
);
let trusted = ir::MemFlags::trusted().with_readonly();
self.builder.switch_to_block(run_destructor_block);
if let Some(def) = resource_def {
if self.types[resource].instance != def.instance {
let flags = self.builder.ins().load(
ir::types::I32,
trusted,
vmctx,
i32::try_from(self.offsets.instance_flags(def.instance)).unwrap(),
);
let masked = self
.builder
.ins()
.band_imm(flags, i64::from(FLAG_MAY_ENTER));
self.builder.ins().trapz(masked, TRAP_CANNOT_ENTER);
}
}
if has_destructor {
let rep = self.builder.ins().ushr_imm(should_run_destructor, 1);
let rep = self.builder.ins().ireduce(ir::types::I32, rep);
let index = self.types[resource].ty;
let dtor_func_ref = self.builder.ins().load(
pointer_type,
trusted,
vmctx,
i32::try_from(self.offsets.resource_destructor(index)).unwrap(),
);
if cfg!(debug_assertions) {
self.builder
.ins()
.trapz(dtor_func_ref, TRAP_INTERNAL_ASSERT);
}
let func_addr = self.builder.ins().load(
pointer_type,
trusted,
dtor_func_ref,
i32::from(self.offsets.ptr.vm_func_ref_wasm_call()),
);
let callee_vmctx = self.builder.ins().load(
pointer_type,
trusted,
dtor_func_ref,
i32::from(self.offsets.ptr.vm_func_ref_vmctx()),
);
let sig = crate::wasm_call_signature(
self.isa,
&self.types[self.signature].unwrap_func(),
&self.compiler.tunables,
);
let sig_ref = self.builder.import_signature(sig);
self.builder.ins().call_indirect(
sig_ref,
func_addr,
&[callee_vmctx, caller_vmctx, rep],
);
}
self.builder.ins().jump(return_block, &[]);
self.builder.seal_block(run_destructor_block);
self.builder.switch_to_block(return_block);
self.builder.seal_block(return_block);
self.abi_store_results(&[]);
}
fn translate_resource_libcall(
&mut self,
get_libcall: fn(
&dyn TargetIsa,
&mut ir::Function,
) -> (ir::SigRef, ComponentBuiltinFunctionIndex),
handle_results: fn(&mut Self, &mut Vec<ir::Value>),
) {
match self.abi {
Abi::Wasm => {}
Abi::Array => {
self.builder.ins().trap(TRAP_INTERNAL_ASSERT);
return;
}
}
let args = self.builder.func.dfg.block_params(self.block0).to_vec();
let vmctx = args[0];
let mut host_args = vec![vmctx];
host_args.extend(args[2..].iter().copied());
let call = self.call_libcall(vmctx, get_libcall, &host_args);
let mut results = self.builder.func.dfg.inst_results(call).to_vec();
handle_results(self, &mut results);
self.builder.ins().return_(&results);
}
fn load_libcall(
&mut self,
vmctx: ir::Value,
index: ComponentBuiltinFunctionIndex,
) -> ir::Value {
let pointer_type = self.isa.pointer_type();
let builtins_array = self.builder.ins().load(
pointer_type,
MemFlags::trusted().with_readonly(),
vmctx,
i32::try_from(self.offsets.builtins()).unwrap(),
);
self.builder.ins().load(
pointer_type,
MemFlags::trusted().with_readonly(),
builtins_array,
i32::try_from(index.index() * u32::from(self.offsets.ptr.size())).unwrap(),
)
}
fn abi_load_params(&mut self) -> Vec<ir::Value> {
let mut block0_params = self.builder.func.dfg.block_params(self.block0).to_vec();
match self.abi {
Abi::Wasm => block0_params,
Abi::Array => {
let results = self.compiler.load_values_from_array(
self.types[self.signature].unwrap_func().params(),
&mut self.builder,
block0_params[2],
block0_params[3],
);
block0_params.truncate(2);
block0_params.extend(results);
block0_params
}
}
}
fn abi_store_results(&mut self, results: &[ir::Value]) {
match self.abi {
Abi::Wasm => {
self.builder.ins().return_(results);
}
Abi::Array => {
let block0_params = self.builder.func.dfg.block_params(self.block0);
let (ptr, len) = (block0_params[2], block0_params[3]);
self.compiler.store_values_to_array(
&mut self.builder,
self.types[self.signature].unwrap_func().returns(),
results,
ptr,
len,
);
let true_value = self.builder.ins().iconst(ir::types::I8, 1);
self.builder.ins().return_(&[true_value]);
}
}
}
fn raise_if_host_trapped(&mut self, succeeded: ir::Value) {
let caller_vmctx = self.builder.func.dfg.block_params(self.block0)[1];
self.compiler
.raise_if_host_trapped(&mut self.builder, caller_vmctx, succeeded);
}
fn raise_if_transcode_trapped(&mut self, amount_copied: ir::Value) {
let pointer_type = self.isa.pointer_type();
let minus_one = self.builder.ins().iconst(pointer_type, -1);
let succeeded = self
.builder
.ins()
.icmp(IntCC::NotEqual, amount_copied, minus_one);
self.raise_if_host_trapped(succeeded);
}
fn raise_if_resource_trapped(&mut self, ret: ir::Value) -> ir::Value {
let minus_one = self.builder.ins().iconst(ir::types::I64, -1);
let succeeded = self.builder.ins().icmp(IntCC::NotEqual, ret, minus_one);
self.raise_if_host_trapped(succeeded);
self.builder.ins().ireduce(ir::types::I32, ret)
}
fn call_libcall(
&mut self,
vmctx: ir::Value,
get_libcall: fn(
&dyn TargetIsa,
&mut ir::Function,
) -> (ir::SigRef, ComponentBuiltinFunctionIndex),
args: &[ir::Value],
) -> ir::Inst {
let (host_sig, index) = get_libcall(self.isa, &mut self.builder.func);
let host_fn = self.load_libcall(vmctx, index);
self.compiler
.call_indirect_host(&mut self.builder, index, host_sig, host_fn, args)
}
}
impl ComponentCompiler for Compiler {
fn compile_trampoline(
&self,
component: &ComponentTranslation,
types: &ComponentTypesBuilder,
index: TrampolineIndex,
tunables: &Tunables,
) -> Result<AllCallFunc<Box<dyn Any + Send>>> {
let compile = |abi: Abi| -> Result<_> {
let mut compiler = self.function_compiler();
let mut c = TrampolineCompiler::new(
self,
&mut compiler,
&component.component,
types,
index,
abi,
tunables,
);
let vmctx = c.builder.block_params(c.block0)[0];
let pointer_type = self.isa.pointer_type();
super::debug_assert_vmctx_kind(
&*self.isa,
&mut c.builder,
vmctx,
wasmtime_environ::component::VMCOMPONENT_MAGIC,
);
if let Abi::Wasm = abi {
let vm_store_context = c.builder.ins().load(
pointer_type,
MemFlags::trusted(),
vmctx,
i32::try_from(c.offsets.vm_store_context()).unwrap(),
);
super::save_last_wasm_exit_fp_and_pc(
&mut c.builder,
pointer_type,
&c.offsets.ptr,
vm_store_context,
);
}
c.translate(&component.trampolines[index]);
c.builder.finalize();
Ok(Box::new(compiler.finish(&format!(
"component_trampoline_{}_{abi:?}",
index.as_u32(),
))?))
};
Ok(AllCallFunc {
wasm_call: compile(Abi::Wasm)?,
array_call: compile(Abi::Array)?,
})
}
}
impl TrampolineCompiler<'_> {
fn translate_transcode(
&mut self,
op: Transcode,
from: RuntimeMemoryIndex,
from64: bool,
to: RuntimeMemoryIndex,
to64: bool,
) {
let pointer_type = self.isa.pointer_type();
let vmctx = self.builder.func.dfg.block_params(self.block0)[0];
let get_libcall = match op {
Transcode::Copy(FixedEncoding::Utf8) => host::utf8_to_utf8,
Transcode::Copy(FixedEncoding::Utf16) => host::utf16_to_utf16,
Transcode::Copy(FixedEncoding::Latin1) => host::latin1_to_latin1,
Transcode::Latin1ToUtf16 => host::latin1_to_utf16,
Transcode::Latin1ToUtf8 => host::latin1_to_utf8,
Transcode::Utf16ToCompactProbablyUtf16 => host::utf16_to_compact_probably_utf16,
Transcode::Utf16ToCompactUtf16 => host::utf16_to_compact_utf16,
Transcode::Utf16ToLatin1 => host::utf16_to_latin1,
Transcode::Utf16ToUtf8 => host::utf16_to_utf8,
Transcode::Utf8ToCompactUtf16 => host::utf8_to_compact_utf16,
Transcode::Utf8ToLatin1 => host::utf8_to_latin1,
Transcode::Utf8ToUtf16 => host::utf8_to_utf16,
};
let from_base = self.load_runtime_memory_base(vmctx, from);
let to_base = self.load_runtime_memory_base(vmctx, to);
let mut args = Vec::new();
let uses_retptr = match op {
Transcode::Utf16ToUtf8
| Transcode::Latin1ToUtf8
| Transcode::Utf8ToLatin1
| Transcode::Utf16ToLatin1 => true,
_ => false,
};
match op {
Transcode::Copy(_)
| Transcode::Latin1ToUtf16
| Transcode::Utf16ToCompactProbablyUtf16
| Transcode::Utf8ToLatin1
| Transcode::Utf16ToLatin1
| Transcode::Utf8ToUtf16 => {
args.push(self.ptr_param(0, from64, from_base));
args.push(self.len_param(1, from64));
args.push(self.ptr_param(2, to64, to_base));
}
Transcode::Utf16ToUtf8 | Transcode::Latin1ToUtf8 => {
args.push(self.ptr_param(0, from64, from_base));
args.push(self.len_param(1, from64));
args.push(self.ptr_param(2, to64, to_base));
args.push(self.len_param(3, to64));
}
Transcode::Utf8ToCompactUtf16 | Transcode::Utf16ToCompactUtf16 => {
args.push(self.ptr_param(0, from64, from_base));
args.push(self.len_param(1, from64));
args.push(self.ptr_param(2, to64, to_base));
args.push(self.len_param(3, to64));
args.push(self.len_param(4, to64));
}
};
if uses_retptr {
let slot = self
.builder
.func
.create_sized_stack_slot(ir::StackSlotData::new(
ir::StackSlotKind::ExplicitSlot,
pointer_type.bytes(),
0,
));
args.push(self.builder.ins().stack_addr(pointer_type, slot, 0));
}
let call = self.call_libcall(vmctx, get_libcall, &args);
let mut results = self.builder.func.dfg.inst_results(call).to_vec();
if uses_retptr {
results.push(self.builder.ins().load(
pointer_type,
ir::MemFlags::trusted(),
*args.last().unwrap(),
0,
));
}
let mut raw_results = Vec::new();
match op {
Transcode::Copy(_) | Transcode::Latin1ToUtf16 => {
self.raise_if_host_trapped(results[0]);
}
Transcode::Utf8ToUtf16
| Transcode::Utf16ToCompactProbablyUtf16
| Transcode::Utf8ToCompactUtf16
| Transcode::Utf16ToCompactUtf16 => {
self.raise_if_transcode_trapped(results[0]);
raw_results.push(self.cast_from_pointer(results[0], to64));
}
Transcode::Latin1ToUtf8
| Transcode::Utf16ToUtf8
| Transcode::Utf8ToLatin1
| Transcode::Utf16ToLatin1 => {
self.raise_if_transcode_trapped(results[0]);
raw_results.push(self.cast_from_pointer(results[0], from64));
raw_results.push(self.cast_from_pointer(results[1], to64));
}
};
self.builder.ins().return_(&raw_results);
}
fn len_param(&mut self, param: usize, is64: bool) -> ir::Value {
let val = self.builder.func.dfg.block_params(self.block0)[2 + param];
self.cast_to_pointer(val, is64)
}
fn ptr_param(&mut self, param: usize, is64: bool, base: ir::Value) -> ir::Value {
let val = self.len_param(param, is64);
self.builder.ins().iadd(base, val)
}
fn cast_to_pointer(&mut self, val: ir::Value, is64: bool) -> ir::Value {
let pointer_type = self.isa.pointer_type();
let host64 = pointer_type == ir::types::I64;
if is64 == host64 {
val
} else if !is64 {
assert!(host64);
self.builder.ins().uextend(pointer_type, val)
} else {
assert!(!host64);
self.builder.ins().ireduce(pointer_type, val)
}
}
fn cast_from_pointer(&mut self, val: ir::Value, is64: bool) -> ir::Value {
let host64 = self.isa.pointer_type() == ir::types::I64;
if is64 == host64 {
val
} else if !is64 {
assert!(host64);
self.builder.ins().ireduce(ir::types::I32, val)
} else {
assert!(!host64);
self.builder.ins().uextend(ir::types::I64, val)
}
}
fn load_runtime_memory_base(&mut self, vmctx: ir::Value, mem: RuntimeMemoryIndex) -> ir::Value {
let pointer_type = self.isa.pointer_type();
let from_vmmemory_definition = self.builder.ins().load(
pointer_type,
MemFlags::trusted(),
vmctx,
i32::try_from(self.offsets.runtime_memory(mem)).unwrap(),
);
self.builder.ins().load(
pointer_type,
MemFlags::trusted(),
from_vmmemory_definition,
i32::from(self.offsets.ptr.vmmemory_definition_base()),
)
}
}
mod host {
use cranelift_codegen::ir::{self, AbiParam};
use cranelift_codegen::isa::{CallConv, TargetIsa};
use wasmtime_environ::component::ComponentBuiltinFunctionIndex;
macro_rules! define {
(
$(
$( #[$attr:meta] )*
$name:ident( $( $pname:ident: $param:ident ),* ) $( -> $result:ident )?;
)*
) => {
$(
pub(super) fn $name(isa: &dyn TargetIsa, func: &mut ir::Function) -> (ir::SigRef, ComponentBuiltinFunctionIndex) {
let pointer_type = isa.pointer_type();
let params = vec![
$( AbiParam::new(define!(@ty pointer_type $param)) ),*
];
let returns = vec![
$( AbiParam::new(define!(@ty pointer_type $result)) )?
];
let sig = func.import_signature(ir::Signature {
params,
returns,
call_conv: CallConv::triple_default(isa.triple()),
});
(sig, ComponentBuiltinFunctionIndex::$name())
}
)*
};
(@ty $ptr:ident size) => ($ptr);
(@ty $ptr:ident ptr_u8) => ($ptr);
(@ty $ptr:ident ptr_u16) => ($ptr);
(@ty $ptr:ident ptr_size) => ($ptr);
(@ty $ptr:ident bool) => (ir::types::I8);
(@ty $ptr:ident u8) => (ir::types::I8);
(@ty $ptr:ident u32) => (ir::types::I32);
(@ty $ptr:ident u64) => (ir::types::I64);
(@ty $ptr:ident vmctx) => ($ptr);
}
wasmtime_environ::foreach_builtin_component_function!(define);
}