use crate::prelude::*;
use crate::runtime::vm::table::{Table, TableElementType};
use crate::runtime::vm::vmcontext::VMFuncRef;
#[cfg(feature = "gc")]
use crate::runtime::vm::VMGcRef;
use crate::runtime::vm::{HostResultHasUnwindSentinel, Instance, TrapReason, VMStore};
use core::convert::Infallible;
use core::ptr::NonNull;
#[cfg(feature = "threads")]
use core::time::Duration;
use wasmtime_environ::{DataIndex, ElemIndex, FuncIndex, MemoryIndex, TableIndex, Trap};
#[cfg(feature = "wmemcheck")]
use wasmtime_wmemcheck::AccessError::{
DoubleMalloc, InvalidFree, InvalidRead, InvalidWrite, OutOfBounds,
};
pub mod raw {
#![allow(unused_doc_comments, unused_attributes)]
use crate::runtime::vm::{InstanceAndStore, VMContext};
use core::ptr::NonNull;
macro_rules! libcall {
(
$(
$( #[cfg($attr:meta)] )?
$name:ident( vmctx: vmctx $(, $pname:ident: $param:ident )* ) $(-> $result:ident)?;
)*
) => {
$(
#[allow(unused_variables, missing_docs)]
pub unsafe extern "C" fn $name(
vmctx: NonNull<VMContext>,
$( $pname : libcall!(@ty $param), )*
) $(-> libcall!(@ty $result))? {
$(#[cfg($attr)])?
{
crate::runtime::vm::traphandlers::catch_unwind_and_record_trap(|| {
InstanceAndStore::from_vmctx(vmctx, |pair| {
let (instance, store) = pair.unpack_mut();
super::$name(store, instance, $($pname),*)
})
})
}
$(
#[cfg(not($attr))]
unreachable!();
)?
}
#[allow(non_upper_case_globals)]
const _: () = {
#[used]
static I_AM_USED: unsafe extern "C" fn(
NonNull<VMContext>,
$( $pname : libcall!(@ty $param), )*
) $( -> libcall!(@ty $result))? = $name;
};
)*
};
(@ty u32) => (u32);
(@ty u64) => (u64);
(@ty u8) => (u8);
(@ty bool) => (bool);
(@ty pointer) => (*mut u8);
}
wasmtime_environ::foreach_builtin_function!(libcall);
}
fn memory32_grow(
store: &mut dyn VMStore,
instance: &mut Instance,
delta: u64,
memory_index: u32,
) -> Result<Option<AllocationSize>, TrapReason> {
let memory_index = MemoryIndex::from_u32(memory_index);
let result = instance
.memory_grow(store, memory_index, delta)?
.map(|size_in_bytes| {
AllocationSize(size_in_bytes / instance.memory_page_size(memory_index))
});
Ok(result)
}
struct AllocationSize(usize);
unsafe impl HostResultHasUnwindSentinel for Option<AllocationSize> {
type Abi = *mut u8;
const SENTINEL: *mut u8 = (usize::MAX - 1) as *mut u8;
fn into_abi(self) -> *mut u8 {
match self {
Some(size) => {
debug_assert!(size.0 < (usize::MAX - 1));
size.0 as *mut u8
}
None => usize::MAX as *mut u8,
}
}
}
unsafe fn table_grow_func_ref(
store: &mut dyn VMStore,
instance: &mut Instance,
table_index: u32,
delta: u64,
init_value: *mut u8,
) -> Result<Option<AllocationSize>> {
let table_index = TableIndex::from_u32(table_index);
let element = match instance.table_element_type(table_index) {
TableElementType::Func => NonNull::new(init_value.cast::<VMFuncRef>()).into(),
TableElementType::GcRef => unreachable!(),
};
let result = instance
.table_grow(store, table_index, delta, element)?
.map(AllocationSize);
Ok(result)
}
#[cfg(feature = "gc")]
unsafe fn table_grow_gc_ref(
store: &mut dyn VMStore,
instance: &mut Instance,
table_index: u32,
delta: u64,
init_value: u32,
) -> Result<Option<AllocationSize>> {
let table_index = TableIndex::from_u32(table_index);
let element = match instance.table_element_type(table_index) {
TableElementType::Func => unreachable!(),
TableElementType::GcRef => VMGcRef::from_raw_u32(init_value)
.map(|r| {
store
.store_opaque_mut()
.unwrap_gc_store_mut()
.clone_gc_ref(&r)
})
.into(),
};
let result = instance
.table_grow(store, table_index, delta, element)?
.map(AllocationSize);
Ok(result)
}
unsafe fn table_fill_func_ref(
store: &mut dyn VMStore,
instance: &mut Instance,
table_index: u32,
dst: u64,
val: *mut u8,
len: u64,
) -> Result<()> {
let table_index = TableIndex::from_u32(table_index);
let table = &mut *instance.get_table(table_index);
match table.element_type() {
TableElementType::Func => {
let val = NonNull::new(val.cast::<VMFuncRef>());
table.fill(store.optional_gc_store_mut()?, dst, val.into(), len)?;
Ok(())
}
TableElementType::GcRef => unreachable!(),
}
}
#[cfg(feature = "gc")]
unsafe fn table_fill_gc_ref(
store: &mut dyn VMStore,
instance: &mut Instance,
table_index: u32,
dst: u64,
val: u32,
len: u64,
) -> Result<()> {
let table_index = TableIndex::from_u32(table_index);
let table = &mut *instance.get_table(table_index);
match table.element_type() {
TableElementType::Func => unreachable!(),
TableElementType::GcRef => {
let gc_store = store.store_opaque_mut().unwrap_gc_store_mut();
let gc_ref = VMGcRef::from_raw_u32(val);
let gc_ref = gc_ref.map(|r| gc_store.clone_gc_ref(&r));
table.fill(Some(gc_store), dst, gc_ref.into(), len)?;
Ok(())
}
}
}
unsafe fn table_copy(
store: &mut dyn VMStore,
instance: &mut Instance,
dst_table_index: u32,
src_table_index: u32,
dst: u64,
src: u64,
len: u64,
) -> Result<()> {
let dst_table_index = TableIndex::from_u32(dst_table_index);
let src_table_index = TableIndex::from_u32(src_table_index);
let store = store.store_opaque_mut();
let dst_table = instance.get_table(dst_table_index);
let src_range = src..(src.checked_add(len).unwrap_or(u64::MAX));
let src_table = instance.get_table_with_lazy_init(src_table_index, src_range);
let gc_store = store.optional_gc_store_mut()?;
Table::copy(gc_store, dst_table, src_table, dst, src, len)?;
Ok(())
}
fn table_init(
store: &mut dyn VMStore,
instance: &mut Instance,
table_index: u32,
elem_index: u32,
dst: u64,
src: u64,
len: u64,
) -> Result<(), Trap> {
let table_index = TableIndex::from_u32(table_index);
let elem_index = ElemIndex::from_u32(elem_index);
instance.table_init(
store.store_opaque_mut(),
table_index,
elem_index,
dst,
src,
len,
)
}
fn elem_drop(_store: &mut dyn VMStore, instance: &mut Instance, elem_index: u32) {
let elem_index = ElemIndex::from_u32(elem_index);
instance.elem_drop(elem_index)
}
fn memory_copy(
_store: &mut dyn VMStore,
instance: &mut Instance,
dst_index: u32,
dst: u64,
src_index: u32,
src: u64,
len: u64,
) -> Result<(), Trap> {
let src_index = MemoryIndex::from_u32(src_index);
let dst_index = MemoryIndex::from_u32(dst_index);
instance.memory_copy(dst_index, dst, src_index, src, len)
}
fn memory_fill(
_store: &mut dyn VMStore,
instance: &mut Instance,
memory_index: u32,
dst: u64,
val: u32,
len: u64,
) -> Result<(), Trap> {
let memory_index = MemoryIndex::from_u32(memory_index);
#[allow(clippy::cast_possible_truncation)]
instance.memory_fill(memory_index, dst, val as u8, len)
}
fn memory_init(
_store: &mut dyn VMStore,
instance: &mut Instance,
memory_index: u32,
data_index: u32,
dst: u64,
src: u32,
len: u32,
) -> Result<(), Trap> {
let memory_index = MemoryIndex::from_u32(memory_index);
let data_index = DataIndex::from_u32(data_index);
instance.memory_init(memory_index, data_index, dst, src, len)
}
fn ref_func(_store: &mut dyn VMStore, instance: &mut Instance, func_index: u32) -> NonNull<u8> {
instance
.get_func_ref(FuncIndex::from_u32(func_index))
.expect("ref_func: funcref should always be available for given func index")
.cast()
}
fn data_drop(_store: &mut dyn VMStore, instance: &mut Instance, data_index: u32) {
let data_index = DataIndex::from_u32(data_index);
instance.data_drop(data_index)
}
unsafe fn table_get_lazy_init_func_ref(
_store: &mut dyn VMStore,
instance: &mut Instance,
table_index: u32,
index: u64,
) -> *mut u8 {
let table_index = TableIndex::from_u32(table_index);
let table = instance.get_table_with_lazy_init(table_index, core::iter::once(index));
let elem = (*table)
.get(None, index)
.expect("table access already bounds-checked");
match elem.into_func_ref_asserting_initialized() {
Some(ptr) => ptr.as_ptr().cast(),
None => core::ptr::null_mut(),
}
}
#[cfg(feature = "gc-drc")]
unsafe fn drop_gc_ref(store: &mut dyn VMStore, _instance: &mut Instance, gc_ref: u32) {
log::trace!("libcalls::drop_gc_ref({gc_ref:#x})");
let gc_ref = VMGcRef::from_raw_u32(gc_ref).expect("non-null VMGcRef");
store
.store_opaque_mut()
.unwrap_gc_store_mut()
.drop_gc_ref(gc_ref);
}
#[cfg(feature = "gc-drc")]
unsafe fn gc(store: &mut dyn VMStore, _instance: &mut Instance, gc_ref: u32) -> Result<u32> {
let gc_ref = VMGcRef::from_raw_u32(gc_ref);
let gc_ref = gc_ref.map(|r| {
store
.store_opaque_mut()
.unwrap_gc_store_mut()
.clone_gc_ref(&r)
});
if let Some(gc_ref) = &gc_ref {
let gc_store = store.store_opaque_mut().unwrap_gc_store_mut();
let gc_ref = gc_store.clone_gc_ref(gc_ref);
gc_store.expose_gc_ref_to_wasm(gc_ref);
}
match store.maybe_async_gc(gc_ref)? {
None => Ok(0),
Some(r) => {
let raw = r.as_raw_u32();
store
.store_opaque_mut()
.unwrap_gc_store_mut()
.expose_gc_ref_to_wasm(r);
Ok(raw)
}
}
}
#[cfg(feature = "gc-drc")]
unsafe fn gc_alloc_raw(
store: &mut dyn VMStore,
instance: &mut Instance,
kind: u32,
module_interned_type_index: u32,
size: u32,
align: u32,
) -> Result<u32> {
use crate::{vm::VMGcHeader, GcHeapOutOfMemory};
use core::alloc::Layout;
use wasmtime_environ::{ModuleInternedTypeIndex, VMGcKind};
let kind = VMGcKind::from_high_bits_of_u32(kind);
log::trace!("gc_alloc_raw(kind={kind:?}, size={size}, align={align})",);
let module = instance
.runtime_module()
.expect("should never allocate GC types defined in a dummy module");
let module_interned_type_index = ModuleInternedTypeIndex::from_u32(module_interned_type_index);
let shared_type_index = module
.signatures()
.shared_type(module_interned_type_index)
.expect("should have engine type index for module type index");
let header = VMGcHeader::from_kind_and_index(kind, shared_type_index);
let size = usize::try_from(size).unwrap();
let align = usize::try_from(align).unwrap();
let layout = Layout::from_size_align(size, align).unwrap();
let gc_ref = match store
.store_opaque_mut()
.unwrap_gc_store_mut()
.alloc_raw(header, layout)?
{
Some(r) => r,
None => {
store.maybe_async_gc(None)?;
store
.unwrap_gc_store_mut()
.alloc_raw(header, layout)?
.ok_or_else(|| GcHeapOutOfMemory::new(()))?
}
};
let raw = gc_ref.as_raw_u32();
store
.store_opaque_mut()
.unwrap_gc_store_mut()
.expose_gc_ref_to_wasm(gc_ref);
Ok(raw)
}
#[cfg(feature = "gc")]
unsafe fn intern_func_ref_for_gc_heap(
store: &mut dyn VMStore,
_instance: &mut Instance,
func_ref: *mut u8,
) -> Result<u32> {
use crate::{store::AutoAssertNoGc, vm::SendSyncPtr};
use core::ptr::NonNull;
let mut store = AutoAssertNoGc::new(store.store_opaque_mut());
let func_ref = func_ref.cast::<VMFuncRef>();
let func_ref = NonNull::new(func_ref).map(SendSyncPtr::new);
let func_ref_id = store.gc_store_mut()?.func_ref_table.intern(func_ref);
Ok(func_ref_id.into_raw())
}
#[cfg(feature = "gc")]
unsafe fn get_interned_func_ref(
store: &mut dyn VMStore,
instance: &mut Instance,
func_ref_id: u32,
module_interned_type_index: u32,
) -> *mut u8 {
use super::FuncRefTableId;
use crate::store::AutoAssertNoGc;
use wasmtime_environ::{packed_option::ReservedValue, ModuleInternedTypeIndex};
let store = AutoAssertNoGc::new(store.store_opaque_mut());
let func_ref_id = FuncRefTableId::from_raw(func_ref_id);
let module_interned_type_index = ModuleInternedTypeIndex::from_bits(module_interned_type_index);
let func_ref = if module_interned_type_index.is_reserved_value() {
store
.unwrap_gc_store()
.func_ref_table
.get_untyped(func_ref_id)
} else {
let types = store.engine().signatures();
let engine_ty = instance.engine_type_index(module_interned_type_index);
store
.unwrap_gc_store()
.func_ref_table
.get_typed(types, func_ref_id, engine_ty)
};
func_ref.map_or(core::ptr::null_mut(), |f| f.as_ptr().cast())
}
#[cfg(feature = "gc")]
unsafe fn array_new_data(
store: &mut dyn VMStore,
instance: &mut Instance,
array_type_index: u32,
data_index: u32,
src: u32,
len: u32,
) -> Result<u32> {
use crate::{ArrayType, GcHeapOutOfMemory};
use wasmtime_environ::ModuleInternedTypeIndex;
let array_type_index = ModuleInternedTypeIndex::from_u32(array_type_index);
let data_index = DataIndex::from_u32(data_index);
let data_range = instance.wasm_data_range(data_index);
let shared_ty = instance.engine_type_index(array_type_index);
let array_ty = ArrayType::from_shared_type_index(store.store_opaque_mut().engine(), shared_ty);
let one_elem_size = array_ty
.element_type()
.data_byte_size()
.expect("Wasm validation ensures that this type have a defined byte size");
let byte_len = len
.checked_mul(one_elem_size)
.and_then(|x| usize::try_from(x).ok())
.ok_or_else(|| Trap::MemoryOutOfBounds)?;
let src = usize::try_from(src).map_err(|_| Trap::MemoryOutOfBounds)?;
let data = instance
.wasm_data(data_range)
.get(src..)
.and_then(|d| d.get(..byte_len))
.ok_or_else(|| Trap::MemoryOutOfBounds)?;
let gc_layout = store
.store_opaque_mut()
.engine()
.signatures()
.layout(shared_ty)
.expect("array types have GC layouts");
let array_layout = gc_layout.unwrap_array();
let array_ref = match store
.store_opaque_mut()
.unwrap_gc_store_mut()
.alloc_uninit_array(shared_ty, len, &array_layout)?
{
Some(a) => a,
None => {
store.maybe_async_gc(None)?;
store
.store_opaque_mut()
.unwrap_gc_store_mut()
.alloc_uninit_array(shared_ty, u32::try_from(byte_len).unwrap(), &array_layout)?
.ok_or_else(|| GcHeapOutOfMemory::new(()))?
}
};
store
.store_opaque_mut()
.unwrap_gc_store_mut()
.gc_object_data(array_ref.as_gc_ref())
.copy_from_slice(array_layout.base_size, data);
let raw = array_ref.as_gc_ref().as_raw_u32();
store
.store_opaque_mut()
.unwrap_gc_store_mut()
.expose_gc_ref_to_wasm(array_ref.into());
Ok(raw)
}
#[cfg(feature = "gc")]
unsafe fn array_init_data(
store: &mut dyn VMStore,
instance: &mut Instance,
array_type_index: u32,
array: u32,
dst: u32,
data_index: u32,
src: u32,
len: u32,
) -> Result<()> {
use crate::ArrayType;
use wasmtime_environ::ModuleInternedTypeIndex;
let array_type_index = ModuleInternedTypeIndex::from_u32(array_type_index);
let data_index = DataIndex::from_u32(data_index);
log::trace!(
"array.init_data(array={array:#x}, dst={dst}, data_index={data_index:?}, src={src}, len={len})",
);
let gc_ref = VMGcRef::from_raw_u32(array).ok_or_else(|| Trap::NullReference)?;
let array = gc_ref
.into_arrayref(&*store.unwrap_gc_store().gc_heap)
.expect("gc ref should be an array");
let dst = usize::try_from(dst).map_err(|_| Trap::MemoryOutOfBounds)?;
let src = usize::try_from(src).map_err(|_| Trap::MemoryOutOfBounds)?;
let len = usize::try_from(len).map_err(|_| Trap::MemoryOutOfBounds)?;
let array_len = array.len(store.store_opaque());
let array_len = usize::try_from(array_len).map_err(|_| Trap::ArrayOutOfBounds)?;
if dst.checked_add(len).ok_or_else(|| Trap::ArrayOutOfBounds)? > array_len {
return Err(Trap::ArrayOutOfBounds.into());
}
let shared_ty = instance.engine_type_index(array_type_index);
let array_ty = ArrayType::from_shared_type_index(store.engine(), shared_ty);
let one_elem_size = array_ty
.element_type()
.data_byte_size()
.expect("Wasm validation ensures that this type have a defined byte size");
let data_len = len
.checked_mul(usize::try_from(one_elem_size).unwrap())
.ok_or_else(|| Trap::MemoryOutOfBounds)?;
let data_range = instance.wasm_data_range(data_index);
let data = instance
.wasm_data(data_range)
.get(src..)
.and_then(|d| d.get(..data_len))
.ok_or_else(|| Trap::MemoryOutOfBounds)?;
let dst_offset = u32::try_from(dst)
.unwrap()
.checked_mul(one_elem_size)
.unwrap();
let array_layout = store
.engine()
.signatures()
.layout(shared_ty)
.expect("array types have GC layouts");
let array_layout = array_layout.unwrap_array();
let obj_offset = array_layout.base_size.checked_add(dst_offset).unwrap();
store
.unwrap_gc_store_mut()
.gc_object_data(array.as_gc_ref())
.copy_from_slice(obj_offset, data);
Ok(())
}
#[cfg(feature = "gc")]
unsafe fn array_new_elem(
store: &mut dyn VMStore,
instance: &mut Instance,
array_type_index: u32,
elem_index: u32,
src: u32,
len: u32,
) -> Result<u32> {
use crate::{
store::AutoAssertNoGc,
vm::const_expr::{ConstEvalContext, ConstExprEvaluator},
ArrayRef, ArrayRefPre, ArrayType, Func, GcHeapOutOfMemory, RootSet, RootedGcRefImpl, Val,
};
use wasmtime_environ::{ModuleInternedTypeIndex, TableSegmentElements};
let array_type_index = ModuleInternedTypeIndex::from_u32(array_type_index);
let elem_index = ElemIndex::from_u32(elem_index);
let mut storage = None;
let elements = instance.passive_element_segment(&mut storage, elem_index);
let src = usize::try_from(src).map_err(|_| Trap::TableOutOfBounds)?;
let len = usize::try_from(len).map_err(|_| Trap::TableOutOfBounds)?;
let shared_ty = instance.engine_type_index(array_type_index);
let array_ty = ArrayType::from_shared_type_index(store.engine(), shared_ty);
let elem_ty = array_ty.element_type();
let pre = ArrayRefPre::_new(store, array_ty);
RootSet::with_lifo_scope(store, |store| {
let mut vals = Vec::with_capacity(usize::try_from(elements.len()).unwrap());
match elements {
TableSegmentElements::Functions(fs) => {
vals.extend(
fs.get(src..)
.and_then(|s| s.get(..len))
.ok_or_else(|| Trap::TableOutOfBounds)?
.iter()
.map(|f| {
let raw_func_ref = instance.get_func_ref(*f);
let func = raw_func_ref.map(|p| Func::from_vm_func_ref(store, p));
Val::FuncRef(func)
}),
);
}
TableSegmentElements::Expressions(xs) => {
let xs = xs
.get(src..)
.and_then(|s| s.get(..len))
.ok_or_else(|| Trap::TableOutOfBounds)?;
let mut const_context = ConstEvalContext::new(instance);
let mut const_evaluator = ConstExprEvaluator::default();
vals.extend(xs.iter().map(|x| unsafe {
let raw = const_evaluator
.eval(store, &mut const_context, x)
.expect("const expr should be valid");
let mut store = AutoAssertNoGc::new(store);
Val::_from_raw(&mut store, raw, elem_ty.unwrap_val_type())
}));
}
}
let array = match ArrayRef::_new_fixed(store, &pre, &vals) {
Ok(a) => a,
Err(e) if e.is::<GcHeapOutOfMemory<()>>() => {
store.maybe_async_gc(None)?;
ArrayRef::_new_fixed(store, &pre, &vals)?
}
Err(e) => return Err(e),
};
let mut store = AutoAssertNoGc::new(store);
let gc_ref = array.try_clone_gc_ref(&mut store)?;
let raw = gc_ref.as_raw_u32();
store.unwrap_gc_store_mut().expose_gc_ref_to_wasm(gc_ref);
Ok(raw)
})
}
#[cfg(feature = "gc")]
unsafe fn array_init_elem(
store: &mut dyn VMStore,
instance: &mut Instance,
array_type_index: u32,
array: u32,
dst: u32,
elem_index: u32,
src: u32,
len: u32,
) -> Result<()> {
use crate::{
store::AutoAssertNoGc,
vm::const_expr::{ConstEvalContext, ConstExprEvaluator},
ArrayRef, Func, OpaqueRootScope, Val,
};
use wasmtime_environ::{ModuleInternedTypeIndex, TableSegmentElements};
let mut store = OpaqueRootScope::new(store.store_opaque_mut());
let _array_type_index = ModuleInternedTypeIndex::from_u32(array_type_index);
let elem_index = ElemIndex::from_u32(elem_index);
log::trace!(
"array.init_elem(array={array:#x}, dst={dst}, elem_index={elem_index:?}, src={src}, len={len})",
);
let array = VMGcRef::from_raw_u32(array).ok_or_else(|| Trap::NullReference)?;
let array = store.unwrap_gc_store_mut().clone_gc_ref(&array);
let array = {
let mut no_gc = AutoAssertNoGc::new(&mut store);
ArrayRef::from_cloned_gc_ref(&mut no_gc, array)
};
let array_len = array._len(&store)?;
log::trace!("array_len = {array_len}");
if dst.checked_add(len).ok_or_else(|| Trap::ArrayOutOfBounds)? > array_len {
return Err(Trap::ArrayOutOfBounds.into());
}
let mut storage = None;
let elements = instance.passive_element_segment(&mut storage, elem_index);
let src = usize::try_from(src).map_err(|_| Trap::TableOutOfBounds)?;
let len = usize::try_from(len).map_err(|_| Trap::TableOutOfBounds)?;
let vals = match elements {
TableSegmentElements::Functions(fs) => fs
.get(src..)
.and_then(|s| s.get(..len))
.ok_or_else(|| Trap::TableOutOfBounds)?
.iter()
.map(|f| {
let raw_func_ref = instance.get_func_ref(*f);
let func = raw_func_ref.map(|p| Func::from_vm_func_ref(&mut store, p));
Val::FuncRef(func)
})
.collect::<Vec<_>>(),
TableSegmentElements::Expressions(xs) => {
let elem_ty = array._ty(&store)?.element_type();
let elem_ty = elem_ty.unwrap_val_type();
let mut const_context = ConstEvalContext::new(instance);
let mut const_evaluator = ConstExprEvaluator::default();
xs.get(src..)
.and_then(|s| s.get(..len))
.ok_or_else(|| Trap::TableOutOfBounds)?
.iter()
.map(|x| unsafe {
let raw = const_evaluator
.eval(&mut store, &mut const_context, x)
.expect("const expr should be valid");
let mut store = AutoAssertNoGc::new(&mut store);
Val::_from_raw(&mut store, raw, elem_ty)
})
.collect::<Vec<_>>()
}
};
for (i, val) in vals.into_iter().enumerate() {
let i = u32::try_from(i).unwrap();
let j = dst.checked_add(i).unwrap();
array._set(&mut store, j, val)?;
}
Ok(())
}
#[cfg(feature = "gc")]
unsafe fn array_copy(
store: &mut dyn VMStore,
_instance: &mut Instance,
dst_array: u32,
dst: u32,
src_array: u32,
src: u32,
len: u32,
) -> Result<()> {
use crate::{store::AutoAssertNoGc, ArrayRef, OpaqueRootScope};
log::trace!(
"array.copy(dst_array={dst_array:#x}, dst_index={dst}, src_array={src_array:#x}, src_index={src}, len={len})",
);
let mut store = OpaqueRootScope::new(store.store_opaque_mut());
let mut store = AutoAssertNoGc::new(&mut store);
let dst_array = VMGcRef::from_raw_u32(dst_array).ok_or_else(|| Trap::NullReference)?;
let dst_array = store.unwrap_gc_store_mut().clone_gc_ref(&dst_array);
let dst_array = ArrayRef::from_cloned_gc_ref(&mut store, dst_array);
let src_array = VMGcRef::from_raw_u32(src_array).ok_or_else(|| Trap::NullReference)?;
let src_array = store.unwrap_gc_store_mut().clone_gc_ref(&src_array);
let src_array = ArrayRef::from_cloned_gc_ref(&mut store, src_array);
let dst_array_len = dst_array._len(&store)?;
if dst.checked_add(len).ok_or_else(|| Trap::ArrayOutOfBounds)? > dst_array_len {
return Err(Trap::ArrayOutOfBounds.into());
}
let src_array_len = src_array._len(&store)?;
if src.checked_add(len).ok_or_else(|| Trap::ArrayOutOfBounds)? > src_array_len {
return Err(Trap::ArrayOutOfBounds.into());
}
let mut store = AutoAssertNoGc::new(&mut store);
if src > dst {
for i in 0..len {
let src_elem = src_array._get(&mut store, src + i)?;
let dst_i = dst + i;
dst_array._set(&mut store, dst_i, src_elem)?;
}
} else {
for i in (0..len).rev() {
let src_elem = src_array._get(&mut store, src + i)?;
let dst_i = dst + i;
dst_array._set(&mut store, dst_i, src_elem)?;
}
}
Ok(())
}
#[cfg(feature = "gc")]
unsafe fn is_subtype(
store: &mut dyn VMStore,
_instance: &mut Instance,
actual_engine_type: u32,
expected_engine_type: u32,
) -> u32 {
use wasmtime_environ::VMSharedTypeIndex;
let actual = VMSharedTypeIndex::from_u32(actual_engine_type);
let expected = VMSharedTypeIndex::from_u32(expected_engine_type);
let is_subtype: bool = store
.engine()
.signatures()
.is_subtype(actual, expected)
.into();
log::trace!("is_subtype(actual={actual:?}, expected={expected:?}) -> {is_subtype}",);
is_subtype as u32
}
#[cfg(feature = "threads")]
fn memory_atomic_notify(
_store: &mut dyn VMStore,
instance: &mut Instance,
memory_index: u32,
addr_index: u64,
count: u32,
) -> Result<u32, Trap> {
let memory = MemoryIndex::from_u32(memory_index);
instance
.get_runtime_memory(memory)
.atomic_notify(addr_index, count)
}
#[cfg(feature = "threads")]
fn memory_atomic_wait32(
_store: &mut dyn VMStore,
instance: &mut Instance,
memory_index: u32,
addr_index: u64,
expected: u32,
timeout: u64,
) -> Result<u32, Trap> {
let timeout = (timeout as i64 >= 0).then(|| Duration::from_nanos(timeout));
let memory = MemoryIndex::from_u32(memory_index);
Ok(instance
.get_runtime_memory(memory)
.atomic_wait32(addr_index, expected, timeout)? as u32)
}
#[cfg(feature = "threads")]
fn memory_atomic_wait64(
_store: &mut dyn VMStore,
instance: &mut Instance,
memory_index: u32,
addr_index: u64,
expected: u64,
timeout: u64,
) -> Result<u32, Trap> {
let timeout = (timeout as i64 >= 0).then(|| Duration::from_nanos(timeout));
let memory = MemoryIndex::from_u32(memory_index);
Ok(instance
.get_runtime_memory(memory)
.atomic_wait64(addr_index, expected, timeout)? as u32)
}
fn out_of_gas(store: &mut dyn VMStore, _instance: &mut Instance) -> Result<()> {
store.out_of_gas()
}
#[cfg(target_has_atomic = "64")]
fn new_epoch(store: &mut dyn VMStore, _instance: &mut Instance) -> Result<NextEpoch> {
store.new_epoch().map(NextEpoch)
}
struct NextEpoch(u64);
unsafe impl HostResultHasUnwindSentinel for NextEpoch {
type Abi = u64;
const SENTINEL: u64 = u64::MAX;
fn into_abi(self) -> u64 {
self.0
}
}
#[cfg(feature = "wmemcheck")]
unsafe fn check_malloc(
_store: &mut dyn VMStore,
instance: &mut Instance,
addr: u32,
len: u32,
) -> Result<()> {
if let Some(wmemcheck_state) = &mut instance.wmemcheck_state {
let result = wmemcheck_state.malloc(addr as usize, len as usize);
wmemcheck_state.memcheck_on();
match result {
Ok(()) => {}
Err(DoubleMalloc { addr, len }) => {
bail!("Double malloc at addr {:#x} of size {}", addr, len)
}
Err(OutOfBounds { addr, len }) => {
bail!("Malloc out of bounds at addr {:#x} of size {}", addr, len);
}
_ => {
panic!("unreachable")
}
}
}
Ok(())
}
#[cfg(feature = "wmemcheck")]
unsafe fn check_free(_store: &mut dyn VMStore, instance: &mut Instance, addr: u32) -> Result<()> {
if let Some(wmemcheck_state) = &mut instance.wmemcheck_state {
let result = wmemcheck_state.free(addr as usize);
wmemcheck_state.memcheck_on();
match result {
Ok(()) => {}
Err(InvalidFree { addr }) => {
bail!("Invalid free at addr {:#x}", addr)
}
_ => {
panic!("unreachable")
}
}
}
Ok(())
}
#[cfg(feature = "wmemcheck")]
fn check_load(
_store: &mut dyn VMStore,
instance: &mut Instance,
num_bytes: u32,
addr: u32,
offset: u32,
) -> Result<()> {
if let Some(wmemcheck_state) = &mut instance.wmemcheck_state {
let result = wmemcheck_state.read(addr as usize + offset as usize, num_bytes as usize);
match result {
Ok(()) => {}
Err(InvalidRead { addr, len }) => {
bail!("Invalid load at addr {:#x} of size {}", addr, len);
}
Err(OutOfBounds { addr, len }) => {
bail!("Load out of bounds at addr {:#x} of size {}", addr, len);
}
_ => {
panic!("unreachable")
}
}
}
Ok(())
}
#[cfg(feature = "wmemcheck")]
fn check_store(
_store: &mut dyn VMStore,
instance: &mut Instance,
num_bytes: u32,
addr: u32,
offset: u32,
) -> Result<()> {
if let Some(wmemcheck_state) = &mut instance.wmemcheck_state {
let result = wmemcheck_state.write(addr as usize + offset as usize, num_bytes as usize);
match result {
Ok(()) => {}
Err(InvalidWrite { addr, len }) => {
bail!("Invalid store at addr {:#x} of size {}", addr, len)
}
Err(OutOfBounds { addr, len }) => {
bail!("Store out of bounds at addr {:#x} of size {}", addr, len)
}
_ => {
panic!("unreachable")
}
}
}
Ok(())
}
#[cfg(feature = "wmemcheck")]
fn malloc_start(_store: &mut dyn VMStore, instance: &mut Instance) {
if let Some(wmemcheck_state) = &mut instance.wmemcheck_state {
wmemcheck_state.memcheck_off();
}
}
#[cfg(feature = "wmemcheck")]
fn free_start(_store: &mut dyn VMStore, instance: &mut Instance) {
if let Some(wmemcheck_state) = &mut instance.wmemcheck_state {
wmemcheck_state.memcheck_off();
}
}
#[cfg(feature = "wmemcheck")]
fn update_stack_pointer(_store: &mut dyn VMStore, _instance: &mut Instance, _value: u32) {
}
#[cfg(feature = "wmemcheck")]
fn update_mem_size(_store: &mut dyn VMStore, instance: &mut Instance, num_pages: u32) {
if let Some(wmemcheck_state) = &mut instance.wmemcheck_state {
const KIB: usize = 1024;
let num_bytes = num_pages as usize * 64 * KIB;
wmemcheck_state.update_mem_size(num_bytes);
}
}
fn trap(
_store: &mut dyn VMStore,
_instance: &mut Instance,
code: u8,
) -> Result<Infallible, TrapReason> {
Err(TrapReason::Wasm(
wasmtime_environ::Trap::from_u8(code).unwrap(),
))
}
fn raise(_store: &mut dyn VMStore, _instance: &mut Instance) {
#[cfg(has_host_compiler_backend)]
unsafe {
crate::runtime::vm::traphandlers::raise_preexisting_trap()
}
#[cfg(not(has_host_compiler_backend))]
unreachable!()
}
#[allow(missing_docs)]
pub mod relocs {
pub extern "C" fn floorf32(f: f32) -> f32 {
wasmtime_math::WasmFloat::wasm_floor(f)
}
pub extern "C" fn floorf64(f: f64) -> f64 {
wasmtime_math::WasmFloat::wasm_floor(f)
}
pub extern "C" fn ceilf32(f: f32) -> f32 {
wasmtime_math::WasmFloat::wasm_ceil(f)
}
pub extern "C" fn ceilf64(f: f64) -> f64 {
wasmtime_math::WasmFloat::wasm_ceil(f)
}
pub extern "C" fn truncf32(f: f32) -> f32 {
wasmtime_math::WasmFloat::wasm_trunc(f)
}
pub extern "C" fn truncf64(f: f64) -> f64 {
wasmtime_math::WasmFloat::wasm_trunc(f)
}
pub extern "C" fn nearestf32(x: f32) -> f32 {
wasmtime_math::WasmFloat::wasm_nearest(x)
}
pub extern "C" fn nearestf64(x: f64) -> f64 {
wasmtime_math::WasmFloat::wasm_nearest(x)
}
pub extern "C" fn fmaf32(a: f32, b: f32, c: f32) -> f32 {
wasmtime_math::WasmFloat::wasm_mul_add(a, b, c)
}
pub extern "C" fn fmaf64(a: f64, b: f64, c: f64) -> f64 {
wasmtime_math::WasmFloat::wasm_mul_add(a, b, c)
}
#[cfg(target_arch = "x86_64")]
use core::arch::x86_64::__m128i;
#[cfg(target_arch = "x86_64")]
#[target_feature(enable = "sse")]
#[allow(improper_ctypes_definitions)]
pub unsafe extern "C" fn x86_pshufb(a: __m128i, b: __m128i) -> __m128i {
union U {
reg: __m128i,
mem: [u8; 16],
}
unsafe {
let a = U { reg: a }.mem;
let b = U { reg: b }.mem;
let select = |arr: &[u8; 16], byte: u8| {
if byte & 0x80 != 0 {
0x00
} else {
arr[(byte & 0xf) as usize]
}
};
U {
mem: [
select(&a, b[0]),
select(&a, b[1]),
select(&a, b[2]),
select(&a, b[3]),
select(&a, b[4]),
select(&a, b[5]),
select(&a, b[6]),
select(&a, b[7]),
select(&a, b[8]),
select(&a, b[9]),
select(&a, b[10]),
select(&a, b[11]),
select(&a, b[12]),
select(&a, b[13]),
select(&a, b[14]),
select(&a, b[15]),
],
}
.reg
}
}
}