wasmtime/runtime/vm/component.rs
1//! Runtime support for the component model in Wasmtime
2//!
3//! Currently this runtime support includes a `VMComponentContext` which is
4//! similar in purpose to `VMContext`. The context is read from
5//! cranelift-generated trampolines when entering the host from a wasm module.
6//! Eventually it's intended that module-to-module calls, which would be
7//! cranelift-compiled adapters, will use this `VMComponentContext` as well.
8
9use crate::component::{Component, Instance, InstancePre, ResourceType, RuntimeImport};
10use crate::runtime::component::ComponentInstanceId;
11use crate::runtime::vm::instance::{InstanceLayout, OwnedInstance, OwnedVMContext};
12use crate::runtime::vm::vmcontext::VMFunctionBody;
13use crate::runtime::vm::{
14 HostResult, SendSyncPtr, VMArrayCallFunction, VMFuncRef, VMGlobalDefinition,
15 VMMemoryDefinition, VMOpaqueContext, VMStore, VMStoreRawPtr, VMTableImport, VMWasmCallFunction,
16 ValRaw, VmPtr, VmSafe, catch_unwind_and_record_trap,
17};
18use crate::store::InstanceId;
19use alloc::alloc::Layout;
20use alloc::sync::Arc;
21use core::mem;
22use core::mem::offset_of;
23use core::pin::Pin;
24use core::ptr::NonNull;
25use wasmtime_environ::component::*;
26use wasmtime_environ::{HostPtr, PrimaryMap, VMSharedTypeIndex};
27
28#[allow(
29 clippy::cast_possible_truncation,
30 reason = "it's intended this is truncated on 32-bit platforms"
31)]
32const INVALID_PTR: usize = 0xdead_dead_beef_beef_u64 as usize;
33
34mod handle_table;
35mod libcalls;
36mod resources;
37
38pub use self::handle_table::{HandleTable, RemovedResource};
39#[cfg(feature = "component-model-async")]
40pub use self::handle_table::{TransmitLocalState, Waitable};
41#[cfg(feature = "component-model-async")]
42pub use self::resources::CallContext;
43pub use self::resources::{CallContexts, ResourceTables, TypedResource, TypedResourceIndex};
44
45#[cfg(feature = "component-model-async")]
46use crate::component::concurrent;
47
48/// Runtime representation of a component instance and all state necessary for
49/// the instance itself.
50///
51/// This type never exists by-value, but rather it's always behind a pointer.
52/// The size of the allocation for `ComponentInstance` includes the trailing
53/// `VMComponentContext` which is variably sized based on the `offsets`
54/// contained within.
55///
56/// # Pin
57///
58/// Note that this type is mutated through `Pin<&mut ComponentInstance>` in the
59/// same manner as `vm::Instance` for core modules, and see more information
60/// over there for documentation and rationale.
61#[repr(C)]
62pub struct ComponentInstance {
63 /// The index within the store of where to find this component instance.
64 id: ComponentInstanceId,
65
66 /// Size and offset information for the trailing `VMComponentContext`.
67 offsets: VMComponentOffsets<HostPtr>,
68
69 /// The component that this instance was created from.
70 //
71 // NB: in the future if necessary it would be possible to avoid storing an
72 // entire `Component` here and instead storing only information such as:
73 //
74 // * Some reference to `Arc<ComponentTypes>`
75 // * Necessary references to closed-over modules which are exported from the
76 // component itself.
77 //
78 // Otherwise the full guts of this component should only ever be used during
79 // the instantiation of this instance, meaning that after instantiation much
80 // of the component can be thrown away (theoretically).
81 component: Component,
82
83 /// State of handles (e.g. resources, waitables, etc.) for this component.
84 ///
85 /// For resource handles, this is paired with other information to create a
86 /// `ResourceTables` and manipulated through that. For other handles, this
87 /// is used directly to translate guest handles to host representations and
88 /// vice-versa.
89 instance_handle_tables: PrimaryMap<RuntimeComponentInstanceIndex, HandleTable>,
90
91 /// State related to async for this component, e.g. futures, streams, tasks,
92 /// etc.
93 #[cfg(feature = "component-model-async")]
94 concurrent_state: concurrent::ConcurrentState,
95
96 /// What all compile-time-identified core instances are mapped to within the
97 /// `Store` that this component belongs to.
98 instances: PrimaryMap<RuntimeInstanceIndex, InstanceId>,
99
100 /// Storage for the type information about resources within this component
101 /// instance.
102 resource_types: Arc<PrimaryMap<ResourceIndex, ResourceType>>,
103
104 /// Arguments that this instance used to be instantiated.
105 ///
106 /// Strong references are stored to these arguments since pointers are saved
107 /// into the structures such as functions within the
108 /// `OwnedComponentInstance` but it's our job to keep them alive.
109 ///
110 /// One purpose of this storage is to enable embedders to drop a `Linker`,
111 /// for example, after a component is instantiated. In that situation if the
112 /// arguments weren't held here then they might be dropped, and structures
113 /// such as `.lowering()` which point back into the original function would
114 /// become stale and use-after-free conditions when used. By preserving the
115 /// entire list here though we're guaranteed that nothing is lost for the
116 /// duration of the lifetime of this instance.
117 imports: Arc<PrimaryMap<RuntimeImportIndex, RuntimeImport>>,
118
119 /// Self-pointer back to `Store<T>` and its functions.
120 store: VMStoreRawPtr,
121
122 /// Cached ABI return value from the last-invoked function call along with
123 /// the function index that was invoked.
124 ///
125 /// Used in `post_return_arg_set` and `post_return_arg_take` below.
126 post_return_arg: Option<(ExportIndex, ValRaw)>,
127
128 /// Required by `InstanceLayout`, also required to be the last field (with
129 /// repr(C))
130 vmctx: OwnedVMContext<VMComponentContext>,
131}
132
133/// Type signature for host-defined trampolines that are called from
134/// WebAssembly.
135///
136/// This function signature is invoked from a cranelift-compiled trampoline that
137/// adapts from the core wasm System-V ABI into the ABI provided here:
138///
139/// * `vmctx` - this is the first argument to the wasm import, and should always
140/// end up being a `VMComponentContext`.
141/// * `data` - this is the data pointer associated with the `VMLowering` for
142/// which this function pointer was registered.
143/// * `ty` - the type index, relative to the tables in `vmctx`, that is the
144/// type of the function being called.
145/// * `options` - the `OptionsIndex` which indicates the canonical ABI options
146/// in use for this call.
147/// * `args_and_results` - pointer to stack-allocated space in the caller where
148/// all the arguments are stored as well as where the results will be written
149/// to. The size and initialized bytes of this depends on the core wasm type
150/// signature that this callee corresponds to.
151/// * `nargs_and_results` - the size, in units of `ValRaw`, of
152/// `args_and_results`.
153///
154/// This function returns a `bool` which indicates whether the call succeeded
155/// or not. On failure this function records trap information in TLS which
156/// should be suitable for reading later.
157pub type VMLoweringCallee = extern "C" fn(
158 vmctx: NonNull<VMOpaqueContext>,
159 data: NonNull<u8>,
160 ty: u32,
161 options: u32,
162 args_and_results: NonNull<mem::MaybeUninit<ValRaw>>,
163 nargs_and_results: usize,
164) -> bool;
165
166/// An opaque function pointer which is a `VMLoweringFunction` under the hood
167/// but this is stored as `VMPtr<VMLoweringFunction>` within `VMLowering` below
168/// to handle provenance correctly when using Pulley.
169#[repr(transparent)]
170pub struct VMLoweringFunction(VMFunctionBody);
171
172/// Structure describing a lowered host function stored within a
173/// `VMComponentContext` per-lowering.
174#[derive(Copy, Clone)]
175#[repr(C)]
176pub struct VMLowering {
177 /// The host function pointer that is invoked when this lowering is
178 /// invoked.
179 pub callee: VmPtr<VMLoweringFunction>,
180 /// The host data pointer (think void* pointer) to get passed to `callee`.
181 pub data: VmPtr<u8>,
182}
183
184// SAFETY: the above structure is repr(C) and only contains `VmSafe` fields.
185unsafe impl VmSafe for VMLowering {}
186
187/// This is a marker type to represent the underlying allocation of a
188/// `VMComponentContext`.
189///
190/// This type is similar to `VMContext` for core wasm and is allocated once per
191/// component instance in Wasmtime. While the static size of this type is 0 the
192/// actual runtime size is variable depending on the shape of the component that
193/// this corresponds to. This structure always trails a `ComponentInstance`
194/// allocation and the allocation/lifetime of this allocation is managed by
195/// `ComponentInstance`.
196#[repr(C)]
197// Set an appropriate alignment for this structure where the most-aligned value
198// internally right now `VMGlobalDefinition` which has an alignment of 16 bytes.
199#[repr(align(16))]
200pub struct VMComponentContext;
201
202impl ComponentInstance {
203 /// Converts the `vmctx` provided into a `ComponentInstance` and runs the
204 /// provided closure with that instance.
205 ///
206 /// This function will also catch any failures that `f` produces and returns
207 /// an appropriate ABI value to return to wasm. This includes normal errors
208 /// such as traps as well as Rust-side panics which require wasm to unwind.
209 ///
210 /// # Unsafety
211 ///
212 /// This is `unsafe` because `vmctx` cannot be guaranteed to be a valid
213 /// pointer and it cannot be proven statically that it's safe to get a
214 /// mutable reference at this time to the instance from `vmctx`. Note that
215 /// it must be also safe to borrow the store mutably, meaning it can't
216 /// already be in use elsewhere.
217 pub unsafe fn enter_host_from_wasm<R>(
218 vmctx: NonNull<VMComponentContext>,
219 f: impl FnOnce(&mut dyn VMStore, Instance) -> R,
220 ) -> R::Abi
221 where
222 R: HostResult,
223 {
224 // SAFETY: it's a contract of this function that `vmctx` is a valid
225 // allocation which can go backwards to a `ComponentInstance`.
226 let mut ptr = unsafe { Self::from_vmctx(vmctx) };
227
228 // SAFETY: it's a contract of this function that it's safe to use `ptr`
229 // as a mutable reference.
230 let reference = unsafe { ptr.as_mut() };
231
232 // SAFETY: it's a contract of this function that it's safe to use the
233 // store mutably at this time.
234 let store = unsafe { &mut *reference.store.0.as_ptr() };
235
236 let instance = Instance::from_wasmtime(store, reference.id);
237 catch_unwind_and_record_trap(store, |store| f(store, instance))
238 }
239
240 /// Returns the `InstanceId` associated with the `vmctx` provided.
241 ///
242 /// # Safety
243 ///
244 /// The `vmctx` pointer must be a valid pointer and allocation within a
245 /// `ComponentInstance`. See `Instance::from_vmctx` for some more
246 /// information.
247 unsafe fn from_vmctx(vmctx: NonNull<VMComponentContext>) -> NonNull<ComponentInstance> {
248 // SAFETY: it's a contract of this function that `vmctx` is a valid
249 // pointer to do this pointer arithmetic on.
250 unsafe {
251 vmctx
252 .byte_sub(mem::size_of::<ComponentInstance>())
253 .cast::<ComponentInstance>()
254 }
255 }
256
257 /// Returns the `InstanceId` associated with the `vmctx` provided.
258 ///
259 /// # Safety
260 ///
261 /// The `vmctx` pointer must be a valid pointer to read the
262 /// `ComponentInstanceId` from.
263 pub(crate) unsafe fn vmctx_instance_id(
264 vmctx: NonNull<VMComponentContext>,
265 ) -> ComponentInstanceId {
266 // SAFETY: it's a contract of this function that `vmctx` is a valid
267 // pointer with a `ComponentInstance` in front which can be read.
268 unsafe { Self::from_vmctx(vmctx).as_ref().id }
269 }
270
271 /// Returns the layout corresponding to what would be an allocation of a
272 /// `ComponentInstance` for the `offsets` provided.
273 ///
274 /// The returned layout has space for both the `ComponentInstance` and the
275 /// trailing `VMComponentContext`.
276 fn alloc_layout(offsets: &VMComponentOffsets<HostPtr>) -> Layout {
277 let size = mem::size_of::<Self>()
278 .checked_add(usize::try_from(offsets.size_of_vmctx()).unwrap())
279 .unwrap();
280 let align = mem::align_of::<Self>();
281 Layout::from_size_align(size, align).unwrap()
282 }
283
284 /// Allocates a new `ComponentInstance + VMComponentContext` pair on the
285 /// heap with `malloc` and configures it for the `component` specified.
286 pub(crate) fn new(
287 id: ComponentInstanceId,
288 component: &Component,
289 resource_types: Arc<PrimaryMap<ResourceIndex, ResourceType>>,
290 imports: &Arc<PrimaryMap<RuntimeImportIndex, RuntimeImport>>,
291 store: NonNull<dyn VMStore>,
292 ) -> OwnedComponentInstance {
293 let offsets = VMComponentOffsets::new(HostPtr, component.env_component());
294 let num_instances = component.env_component().num_runtime_component_instances;
295 let mut instance_handle_tables =
296 PrimaryMap::with_capacity(num_instances.try_into().unwrap());
297 for _ in 0..num_instances {
298 instance_handle_tables.push(HandleTable::default());
299 }
300
301 let mut ret = OwnedInstance::new(ComponentInstance {
302 id,
303 offsets,
304 instance_handle_tables,
305 instances: PrimaryMap::with_capacity(
306 component
307 .env_component()
308 .num_runtime_instances
309 .try_into()
310 .unwrap(),
311 ),
312 component: component.clone(),
313 resource_types,
314 imports: imports.clone(),
315 store: VMStoreRawPtr(store),
316 post_return_arg: None,
317 #[cfg(feature = "component-model-async")]
318 concurrent_state: concurrent::ConcurrentState::new(component),
319 vmctx: OwnedVMContext::new(),
320 });
321 unsafe {
322 ret.get_mut().initialize_vmctx();
323 }
324 ret
325 }
326
327 #[inline]
328 pub fn vmctx(&self) -> NonNull<VMComponentContext> {
329 InstanceLayout::vmctx(self)
330 }
331
332 /// Returns a pointer to the "may leave" flag for this instance specified
333 /// for canonical lowering and lifting operations.
334 #[inline]
335 pub fn instance_flags(&self, instance: RuntimeComponentInstanceIndex) -> InstanceFlags {
336 unsafe {
337 let ptr = self
338 .vmctx_plus_offset_raw::<VMGlobalDefinition>(self.offsets.instance_flags(instance));
339 InstanceFlags(SendSyncPtr::new(ptr))
340 }
341 }
342
343 /// Returns the runtime memory definition corresponding to the index of the
344 /// memory provided.
345 ///
346 /// This can only be called after `idx` has been initialized at runtime
347 /// during the instantiation process of a component.
348 pub fn runtime_memory(&self, idx: RuntimeMemoryIndex) -> *mut VMMemoryDefinition {
349 unsafe {
350 let ret = *self.vmctx_plus_offset::<VmPtr<_>>(self.offsets.runtime_memory(idx));
351 debug_assert!(ret.as_ptr() as usize != INVALID_PTR);
352 ret.as_ptr()
353 }
354 }
355
356 /// Returns the runtime table definition and associated instance `VMContext`
357 /// corresponding to the index of the table provided.
358 ///
359 /// This can only be called after `idx` has been initialized at runtime
360 /// during the instantiation process of a component.
361 pub fn runtime_table(&self, idx: RuntimeTableIndex) -> VMTableImport {
362 unsafe {
363 let ret = *self.vmctx_plus_offset::<VMTableImport>(self.offsets.runtime_table(idx));
364 debug_assert!(ret.from.as_ptr() as usize != INVALID_PTR);
365 debug_assert!(ret.vmctx.as_ptr() as usize != INVALID_PTR);
366 ret
367 }
368 }
369
370 /// Returns the realloc pointer corresponding to the index provided.
371 ///
372 /// This can only be called after `idx` has been initialized at runtime
373 /// during the instantiation process of a component.
374 pub fn runtime_realloc(&self, idx: RuntimeReallocIndex) -> NonNull<VMFuncRef> {
375 unsafe {
376 let ret = *self.vmctx_plus_offset::<VmPtr<_>>(self.offsets.runtime_realloc(idx));
377 debug_assert!(ret.as_ptr() as usize != INVALID_PTR);
378 ret.as_non_null()
379 }
380 }
381
382 /// Returns the async callback pointer corresponding to the index provided.
383 ///
384 /// This can only be called after `idx` has been initialized at runtime
385 /// during the instantiation process of a component.
386 pub fn runtime_callback(&self, idx: RuntimeCallbackIndex) -> NonNull<VMFuncRef> {
387 unsafe {
388 let ret = *self.vmctx_plus_offset::<VmPtr<_>>(self.offsets.runtime_callback(idx));
389 debug_assert!(ret.as_ptr() as usize != INVALID_PTR);
390 ret.as_non_null()
391 }
392 }
393
394 /// Returns the post-return pointer corresponding to the index provided.
395 ///
396 /// This can only be called after `idx` has been initialized at runtime
397 /// during the instantiation process of a component.
398 pub fn runtime_post_return(&self, idx: RuntimePostReturnIndex) -> NonNull<VMFuncRef> {
399 unsafe {
400 let ret = *self.vmctx_plus_offset::<VmPtr<_>>(self.offsets.runtime_post_return(idx));
401 debug_assert!(ret.as_ptr() as usize != INVALID_PTR);
402 ret.as_non_null()
403 }
404 }
405
406 /// Returns the host information for the lowered function at the index
407 /// specified.
408 ///
409 /// This can only be called after `idx` has been initialized at runtime
410 /// during the instantiation process of a component.
411 pub fn lowering(&self, idx: LoweredIndex) -> VMLowering {
412 unsafe {
413 let ret = *self.vmctx_plus_offset::<VMLowering>(self.offsets.lowering(idx));
414 debug_assert!(ret.callee.as_ptr() as usize != INVALID_PTR);
415 debug_assert!(ret.data.as_ptr() as usize != INVALID_PTR);
416 ret
417 }
418 }
419
420 /// Returns the core wasm `funcref` corresponding to the trampoline
421 /// specified.
422 ///
423 /// The returned function is suitable to pass directly to a wasm module
424 /// instantiation and the function contains cranelift-compiled trampolines.
425 ///
426 /// This can only be called after `idx` has been initialized at runtime
427 /// during the instantiation process of a component.
428 pub fn trampoline_func_ref(&self, idx: TrampolineIndex) -> NonNull<VMFuncRef> {
429 unsafe {
430 let offset = self.offsets.trampoline_func_ref(idx);
431 let ret = self.vmctx_plus_offset_raw::<VMFuncRef>(offset);
432 debug_assert!(
433 mem::transmute::<Option<VmPtr<VMWasmCallFunction>>, usize>(ret.as_ref().wasm_call)
434 != INVALID_PTR
435 );
436 debug_assert!(ret.as_ref().vmctx.as_ptr() as usize != INVALID_PTR);
437 ret
438 }
439 }
440
441 /// Stores the runtime memory pointer at the index specified.
442 ///
443 /// This is intended to be called during the instantiation process of a
444 /// component once a memory is available, which may not be until part-way
445 /// through component instantiation.
446 ///
447 /// Note that it should be a property of the component model that the `ptr`
448 /// here is never needed prior to it being configured here in the instance.
449 pub fn set_runtime_memory(
450 self: Pin<&mut Self>,
451 idx: RuntimeMemoryIndex,
452 ptr: NonNull<VMMemoryDefinition>,
453 ) {
454 unsafe {
455 let offset = self.offsets.runtime_memory(idx);
456 let storage = self.vmctx_plus_offset_mut::<VmPtr<VMMemoryDefinition>>(offset);
457 debug_assert!((*storage).as_ptr() as usize == INVALID_PTR);
458 *storage = ptr.into();
459 }
460 }
461
462 /// Same as `set_runtime_memory` but for realloc function pointers.
463 pub fn set_runtime_realloc(
464 self: Pin<&mut Self>,
465 idx: RuntimeReallocIndex,
466 ptr: NonNull<VMFuncRef>,
467 ) {
468 unsafe {
469 let offset = self.offsets.runtime_realloc(idx);
470 let storage = self.vmctx_plus_offset_mut::<VmPtr<VMFuncRef>>(offset);
471 debug_assert!((*storage).as_ptr() as usize == INVALID_PTR);
472 *storage = ptr.into();
473 }
474 }
475
476 /// Same as `set_runtime_memory` but for async callback function pointers.
477 pub fn set_runtime_callback(
478 self: Pin<&mut Self>,
479 idx: RuntimeCallbackIndex,
480 ptr: NonNull<VMFuncRef>,
481 ) {
482 unsafe {
483 let offset = self.offsets.runtime_callback(idx);
484 let storage = self.vmctx_plus_offset_mut::<VmPtr<VMFuncRef>>(offset);
485 debug_assert!((*storage).as_ptr() as usize == INVALID_PTR);
486 *storage = ptr.into();
487 }
488 }
489
490 /// Same as `set_runtime_memory` but for post-return function pointers.
491 pub fn set_runtime_post_return(
492 self: Pin<&mut Self>,
493 idx: RuntimePostReturnIndex,
494 ptr: NonNull<VMFuncRef>,
495 ) {
496 unsafe {
497 let offset = self.offsets.runtime_post_return(idx);
498 let storage = self.vmctx_plus_offset_mut::<VmPtr<VMFuncRef>>(offset);
499 debug_assert!((*storage).as_ptr() as usize == INVALID_PTR);
500 *storage = ptr.into();
501 }
502 }
503
504 /// Stores the runtime table pointer at the index specified.
505 ///
506 /// This is intended to be called during the instantiation process of a
507 /// component once a table is available, which may not be until part-way
508 /// through component instantiation.
509 ///
510 /// Note that it should be a property of the component model that the `ptr`
511 /// here is never needed prior to it being configured here in the instance.
512 pub fn set_runtime_table(self: Pin<&mut Self>, idx: RuntimeTableIndex, import: VMTableImport) {
513 unsafe {
514 let offset = self.offsets.runtime_table(idx);
515 let storage = self.vmctx_plus_offset_mut::<VMTableImport>(offset);
516 debug_assert!((*storage).vmctx.as_ptr() as usize == INVALID_PTR);
517 debug_assert!((*storage).from.as_ptr() as usize == INVALID_PTR);
518 *storage = import;
519 }
520 }
521
522 /// Configures host runtime lowering information associated with imported f
523 /// functions for the `idx` specified.
524 pub fn set_lowering(self: Pin<&mut Self>, idx: LoweredIndex, lowering: VMLowering) {
525 unsafe {
526 let callee = self.offsets.lowering_callee(idx);
527 debug_assert!(*self.vmctx_plus_offset::<usize>(callee) == INVALID_PTR);
528 let data = self.offsets.lowering_data(idx);
529 debug_assert!(*self.vmctx_plus_offset::<usize>(data) == INVALID_PTR);
530 let offset = self.offsets.lowering(idx);
531 *self.vmctx_plus_offset_mut(offset) = lowering;
532 }
533 }
534
535 /// Same as `set_lowering` but for the resource.drop functions.
536 pub fn set_trampoline(
537 self: Pin<&mut Self>,
538 idx: TrampolineIndex,
539 wasm_call: NonNull<VMWasmCallFunction>,
540 array_call: NonNull<VMArrayCallFunction>,
541 type_index: VMSharedTypeIndex,
542 ) {
543 unsafe {
544 let offset = self.offsets.trampoline_func_ref(idx);
545 debug_assert!(*self.vmctx_plus_offset::<usize>(offset) == INVALID_PTR);
546 let vmctx = VMOpaqueContext::from_vmcomponent(self.vmctx());
547 *self.vmctx_plus_offset_mut(offset) = VMFuncRef {
548 wasm_call: Some(wasm_call.into()),
549 array_call: array_call.into(),
550 type_index,
551 vmctx: vmctx.into(),
552 };
553 }
554 }
555
556 /// Configures the destructor for a resource at the `idx` specified.
557 ///
558 /// This is required to be called for each resource as it's defined within a
559 /// component during the instantiation process.
560 pub fn set_resource_destructor(
561 self: Pin<&mut Self>,
562 idx: ResourceIndex,
563 dtor: Option<NonNull<VMFuncRef>>,
564 ) {
565 unsafe {
566 let offset = self.offsets.resource_destructor(idx);
567 debug_assert!(*self.vmctx_plus_offset::<usize>(offset) == INVALID_PTR);
568 *self.vmctx_plus_offset_mut(offset) = dtor.map(VmPtr::from);
569 }
570 }
571
572 /// Returns the destructor, if any, for `idx`.
573 ///
574 /// This is only valid to call after `set_resource_destructor`, or typically
575 /// after instantiation.
576 pub fn resource_destructor(&self, idx: ResourceIndex) -> Option<NonNull<VMFuncRef>> {
577 unsafe {
578 let offset = self.offsets.resource_destructor(idx);
579 debug_assert!(*self.vmctx_plus_offset::<usize>(offset) != INVALID_PTR);
580 (*self.vmctx_plus_offset::<Option<VmPtr<VMFuncRef>>>(offset)).map(|p| p.as_non_null())
581 }
582 }
583
584 unsafe fn initialize_vmctx(mut self: Pin<&mut Self>) {
585 let offset = self.offsets.magic();
586 // SAFETY: it's safe to write the magic value during initialization and
587 // this is also the right type of value to write.
588 unsafe {
589 *self.as_mut().vmctx_plus_offset_mut(offset) = VMCOMPONENT_MAGIC;
590 }
591
592 // Initialize the built-in functions
593 //
594 // SAFETY: it's safe to initialize the vmctx in this function and this
595 // is also the right type of value to store in the vmctx.
596 static BUILTINS: libcalls::VMComponentBuiltins = libcalls::VMComponentBuiltins::INIT;
597 let ptr = BUILTINS.expose_provenance();
598 let offset = self.offsets.builtins();
599 unsafe {
600 *self.as_mut().vmctx_plus_offset_mut(offset) = VmPtr::from(ptr);
601 }
602
603 // SAFETY: it's safe to initialize the vmctx in this function and this
604 // is also the right type of value to store in the vmctx.
605 let offset = self.offsets.vm_store_context();
606 unsafe {
607 *self.as_mut().vmctx_plus_offset_mut(offset) =
608 VmPtr::from(self.store.0.as_ref().vm_store_context_ptr());
609 }
610
611 for i in 0..self.offsets.num_runtime_component_instances {
612 let i = RuntimeComponentInstanceIndex::from_u32(i);
613 let mut def = VMGlobalDefinition::new();
614 // SAFETY: this is a valid initialization of all globals which are
615 // 32-bit values.
616 unsafe {
617 *def.as_i32_mut() = FLAG_MAY_ENTER | FLAG_MAY_LEAVE;
618 self.instance_flags(i).as_raw().write(def);
619 }
620 }
621
622 // In debug mode set non-null bad values to all "pointer looking" bits
623 // and pieces related to lowering and such. This'll help detect any
624 // erroneous usage and enable debug assertions above as well to prevent
625 // loading these before they're configured or setting them twice.
626 //
627 // SAFETY: it's valid to write a garbage pointer during initialization
628 // when this is otherwise uninitialized memory
629 if cfg!(debug_assertions) {
630 for i in 0..self.offsets.num_lowerings {
631 let i = LoweredIndex::from_u32(i);
632 let offset = self.offsets.lowering_callee(i);
633 // SAFETY: see above
634 unsafe {
635 *self.as_mut().vmctx_plus_offset_mut(offset) = INVALID_PTR;
636 }
637 let offset = self.offsets.lowering_data(i);
638 // SAFETY: see above
639 unsafe {
640 *self.as_mut().vmctx_plus_offset_mut(offset) = INVALID_PTR;
641 }
642 }
643 for i in 0..self.offsets.num_trampolines {
644 let i = TrampolineIndex::from_u32(i);
645 let offset = self.offsets.trampoline_func_ref(i);
646 // SAFETY: see above
647 unsafe {
648 *self.as_mut().vmctx_plus_offset_mut(offset) = INVALID_PTR;
649 }
650 }
651 for i in 0..self.offsets.num_runtime_memories {
652 let i = RuntimeMemoryIndex::from_u32(i);
653 let offset = self.offsets.runtime_memory(i);
654 // SAFETY: see above
655 unsafe {
656 *self.as_mut().vmctx_plus_offset_mut(offset) = INVALID_PTR;
657 }
658 }
659 for i in 0..self.offsets.num_runtime_reallocs {
660 let i = RuntimeReallocIndex::from_u32(i);
661 let offset = self.offsets.runtime_realloc(i);
662 // SAFETY: see above
663 unsafe {
664 *self.as_mut().vmctx_plus_offset_mut(offset) = INVALID_PTR;
665 }
666 }
667 for i in 0..self.offsets.num_runtime_callbacks {
668 let i = RuntimeCallbackIndex::from_u32(i);
669 let offset = self.offsets.runtime_callback(i);
670 // SAFETY: see above
671 unsafe {
672 *self.as_mut().vmctx_plus_offset_mut(offset) = INVALID_PTR;
673 }
674 }
675 for i in 0..self.offsets.num_runtime_post_returns {
676 let i = RuntimePostReturnIndex::from_u32(i);
677 let offset = self.offsets.runtime_post_return(i);
678 // SAFETY: see above
679 unsafe {
680 *self.as_mut().vmctx_plus_offset_mut(offset) = INVALID_PTR;
681 }
682 }
683 for i in 0..self.offsets.num_resources {
684 let i = ResourceIndex::from_u32(i);
685 let offset = self.offsets.resource_destructor(i);
686 // SAFETY: see above
687 unsafe {
688 *self.as_mut().vmctx_plus_offset_mut(offset) = INVALID_PTR;
689 }
690 }
691 for i in 0..self.offsets.num_runtime_tables {
692 let i = RuntimeTableIndex::from_u32(i);
693 let offset = self.offsets.runtime_table(i);
694 // SAFETY: see above
695 unsafe {
696 *self.as_mut().vmctx_plus_offset_mut(offset) = INVALID_PTR;
697 }
698 }
699 }
700 }
701
702 /// Returns a reference to the component type information for this
703 /// instance.
704 pub fn component(&self) -> &Component {
705 &self.component
706 }
707
708 /// Same as [`Self::component`] but additionally returns the
709 /// `Pin<&mut Self>` with the same original lifetime.
710 pub fn component_and_self(self: Pin<&mut Self>) -> (&Component, Pin<&mut Self>) {
711 // SAFETY: this function is projecting both `&Component` and the same
712 // pointer both connected to the same lifetime. This is safe because
713 // it's a contract of `Pin<&mut Self>` that the `Component` field is
714 // never written, meaning it's effectively unsafe to have `&mut
715 // Component` projected from `Pin<&mut Self>`. Consequently it's safe to
716 // have a read-only view of the field while still retaining mutable
717 // access to all other fields.
718 let component = unsafe { &*(&raw const self.component) };
719 (component, self)
720 }
721
722 /// Returns a reference to the resource type information.
723 pub fn resource_types(&self) -> &Arc<PrimaryMap<ResourceIndex, ResourceType>> {
724 &self.resource_types
725 }
726
727 /// Returns a mutable reference to the resource type information.
728 pub fn resource_types_mut(
729 self: Pin<&mut Self>,
730 ) -> &mut Arc<PrimaryMap<ResourceIndex, ResourceType>> {
731 // SAFETY: we've chosen the `Pin` guarantee of `Self` to not apply to
732 // the map returned.
733 unsafe { &mut self.get_unchecked_mut().resource_types }
734 }
735
736 /// Returns whether the resource that `ty` points to is owned by the
737 /// instance that `ty` correspond to.
738 ///
739 /// This is used when lowering borrows to skip table management and instead
740 /// thread through the underlying representation directly.
741 pub fn resource_owned_by_own_instance(&self, ty: TypeResourceTableIndex) -> bool {
742 let resource = &self.component.types()[ty];
743 let component = self.component.env_component();
744 let idx = match component.defined_resource_index(resource.ty) {
745 Some(idx) => idx,
746 None => return false,
747 };
748 resource.instance == component.defined_resource_instances[idx]
749 }
750
751 /// Returns the runtime state of resources associated with this component.
752 #[inline]
753 pub fn guest_tables(
754 self: Pin<&mut Self>,
755 ) -> (
756 &mut PrimaryMap<RuntimeComponentInstanceIndex, HandleTable>,
757 &ComponentTypes,
758 ) {
759 // safety: we've chosen the `pin` guarantee of `self` to not apply to
760 // the map returned.
761 unsafe {
762 let me = self.get_unchecked_mut();
763 (&mut me.instance_handle_tables, me.component.types())
764 }
765 }
766
767 /// Returns the destructor and instance flags for the specified resource
768 /// table type.
769 ///
770 /// This will lookup the origin definition of the `ty` table and return the
771 /// destructor/flags for that.
772 pub fn dtor_and_flags(
773 &self,
774 ty: TypeResourceTableIndex,
775 ) -> (Option<NonNull<VMFuncRef>>, Option<InstanceFlags>) {
776 let resource = self.component.types()[ty].ty;
777 let dtor = self.resource_destructor(resource);
778 let component = self.component.env_component();
779 let flags = component.defined_resource_index(resource).map(|i| {
780 let instance = component.defined_resource_instances[i];
781 self.instance_flags(instance)
782 });
783 (dtor, flags)
784 }
785
786 /// Returns the store-local id that points to this component.
787 pub fn id(&self) -> ComponentInstanceId {
788 self.id
789 }
790
791 /// Pushes a new runtime instance that's been created into
792 /// `self.instances`.
793 pub fn push_instance_id(self: Pin<&mut Self>, id: InstanceId) -> RuntimeInstanceIndex {
794 self.instances_mut().push(id)
795 }
796
797 /// Returns the [`InstanceId`] previously pushed by `push_instance_id`
798 /// above.
799 ///
800 /// # Panics
801 ///
802 /// Panics if `idx` hasn't been initialized yet.
803 pub fn instance(&self, idx: RuntimeInstanceIndex) -> InstanceId {
804 self.instances[idx]
805 }
806
807 fn instances_mut(self: Pin<&mut Self>) -> &mut PrimaryMap<RuntimeInstanceIndex, InstanceId> {
808 // SAFETY: we've chosen the `Pin` guarantee of `Self` to not apply to
809 // the map returned.
810 unsafe { &mut self.get_unchecked_mut().instances }
811 }
812
813 /// Looks up the value used for `import` at runtime.
814 ///
815 /// # Panics
816 ///
817 /// Panics of `import` is out of bounds for this component.
818 pub(crate) fn runtime_import(&self, import: RuntimeImportIndex) -> &RuntimeImport {
819 &self.imports[import]
820 }
821
822 /// Returns an `InstancePre<T>` which can be used to re-instantiated this
823 /// component if desired.
824 ///
825 /// # Safety
826 ///
827 /// This function places no bounds on `T` so it's up to the caller to match
828 /// that up appropriately with the store that this instance resides within.
829 pub unsafe fn instance_pre<T>(&self) -> InstancePre<T> {
830 // SAFETY: The `T` part of `new_unchecked` is forwarded as a contract of
831 // this function, and otherwise the validity of the components of the
832 // InstancePre should be guaranteed as it's what we were built with
833 // ourselves.
834 unsafe {
835 InstancePre::new_unchecked(
836 self.component.clone(),
837 self.imports.clone(),
838 self.resource_types.clone(),
839 )
840 }
841 }
842
843 /// Sets the cached argument for the canonical ABI option `post-return` to
844 /// the `arg` specified.
845 ///
846 /// This function is used in conjunction with function calls to record,
847 /// after a function call completes, the optional ABI return value. This
848 /// return value is cached within this instance for future use when the
849 /// `post_return` Rust-API-level function is invoked.
850 ///
851 /// Note that `index` here is the index of the export that was just
852 /// invoked, and this is used to ensure that `post_return` is called on the
853 /// same function afterwards. This restriction technically isn't necessary
854 /// though and may be one we want to lift in the future.
855 ///
856 /// # Panics
857 ///
858 /// This function will panic if `post_return_arg` is already set to `Some`.
859 pub fn post_return_arg_set(self: Pin<&mut Self>, index: ExportIndex, arg: ValRaw) {
860 assert!(self.post_return_arg.is_none());
861 *self.post_return_arg_mut() = Some((index, arg));
862 }
863
864 /// Re-acquires the value originally saved via `post_return_arg_set`.
865 ///
866 /// This function will take a function `index` that's having its
867 /// `post_return` function called. If an argument was previously stored and
868 /// `index` matches the index that was stored then `Some(arg)` is returned.
869 /// Otherwise `None` is returned.
870 pub fn post_return_arg_take(self: Pin<&mut Self>, index: ExportIndex) -> Option<ValRaw> {
871 let post_return_arg = self.post_return_arg_mut();
872 let (expected_index, arg) = post_return_arg.take()?;
873 if index != expected_index {
874 *post_return_arg = Some((expected_index, arg));
875 None
876 } else {
877 Some(arg)
878 }
879 }
880
881 fn post_return_arg_mut(self: Pin<&mut Self>) -> &mut Option<(ExportIndex, ValRaw)> {
882 // SAFETY: we've chosen the `Pin` guarantee of `Self` to not apply to
883 // the map returned.
884 unsafe { &mut self.get_unchecked_mut().post_return_arg }
885 }
886
887 #[cfg(feature = "component-model-async")]
888 pub(crate) fn concurrent_state_mut(self: Pin<&mut Self>) -> &mut concurrent::ConcurrentState {
889 // SAFETY: we've chosen the `Pin` guarantee of `Self` to not apply to
890 // the map returned.
891 unsafe { &mut self.get_unchecked_mut().concurrent_state }
892 }
893
894 pub(crate) fn check_may_leave(
895 &self,
896 instance: RuntimeComponentInstanceIndex,
897 ) -> anyhow::Result<()> {
898 let flags = self.instance_flags(instance);
899 if unsafe { flags.may_leave() } {
900 Ok(())
901 } else {
902 Err(anyhow::anyhow!(crate::Trap::CannotLeaveComponent))
903 }
904 }
905}
906
907// SAFETY: `layout` should describe this accurately and `OwnedVMContext` is the
908// last field of `ComponentInstance`.
909unsafe impl InstanceLayout for ComponentInstance {
910 /// Technically it is not required to `alloc_zeroed` here. The primary
911 /// reason for doing this is because a component context start is a "partly
912 /// initialized" state where pointers and such are configured as the
913 /// instantiation process continues. The component model should guarantee
914 /// that we never access uninitialized memory in the context, but to help
915 /// protect against possible bugs a zeroed allocation is done here to try to
916 /// contain use-before-initialized issues.
917 const INIT_ZEROED: bool = true;
918
919 type VMContext = VMComponentContext;
920
921 fn layout(&self) -> Layout {
922 ComponentInstance::alloc_layout(&self.offsets)
923 }
924
925 fn owned_vmctx(&self) -> &OwnedVMContext<VMComponentContext> {
926 &self.vmctx
927 }
928
929 fn owned_vmctx_mut(&mut self) -> &mut OwnedVMContext<VMComponentContext> {
930 &mut self.vmctx
931 }
932}
933
934pub type OwnedComponentInstance = OwnedInstance<ComponentInstance>;
935
936impl VMComponentContext {
937 /// Moves the `self` pointer backwards to the `ComponentInstance` pointer
938 /// that this `VMComponentContext` trails.
939 pub fn instance(&self) -> *mut ComponentInstance {
940 unsafe {
941 (self as *const Self as *mut u8)
942 .offset(-(offset_of!(ComponentInstance, vmctx) as isize))
943 as *mut ComponentInstance
944 }
945 }
946
947 /// Helper function to cast between context types using a debug assertion to
948 /// protect against some mistakes.
949 ///
950 /// # Safety
951 ///
952 /// The `opaque` value must be a valid pointer where it's safe to read its
953 /// "magic" value.
954 #[inline]
955 pub unsafe fn from_opaque(opaque: NonNull<VMOpaqueContext>) -> NonNull<VMComponentContext> {
956 // See comments in `VMContext::from_opaque` for this debug assert
957 //
958 // SAFETY: it's a contract of this function that it's safe to read
959 // `opaque`.
960 unsafe {
961 debug_assert_eq!(opaque.as_ref().magic, VMCOMPONENT_MAGIC);
962 }
963 opaque.cast()
964 }
965}
966
967impl VMOpaqueContext {
968 /// Helper function to clearly indicate the cast desired
969 #[inline]
970 pub fn from_vmcomponent(ptr: NonNull<VMComponentContext>) -> NonNull<VMOpaqueContext> {
971 ptr.cast()
972 }
973}
974
975#[repr(transparent)]
976#[derive(Copy, Clone)]
977pub struct InstanceFlags(SendSyncPtr<VMGlobalDefinition>);
978
979impl InstanceFlags {
980 /// Wraps the given pointer as an `InstanceFlags`
981 ///
982 /// # Unsafety
983 ///
984 /// This is a raw pointer argument which needs to be valid for the lifetime
985 /// that `InstanceFlags` is used.
986 pub unsafe fn from_raw(ptr: NonNull<VMGlobalDefinition>) -> InstanceFlags {
987 InstanceFlags(SendSyncPtr::from(ptr))
988 }
989
990 #[inline]
991 pub unsafe fn may_leave(&self) -> bool {
992 unsafe { *self.as_raw().as_ref().as_i32() & FLAG_MAY_LEAVE != 0 }
993 }
994
995 #[inline]
996 pub unsafe fn set_may_leave(&mut self, val: bool) {
997 unsafe {
998 if val {
999 *self.as_raw().as_mut().as_i32_mut() |= FLAG_MAY_LEAVE;
1000 } else {
1001 *self.as_raw().as_mut().as_i32_mut() &= !FLAG_MAY_LEAVE;
1002 }
1003 }
1004 }
1005
1006 #[inline]
1007 pub unsafe fn may_enter(&self) -> bool {
1008 unsafe { *self.as_raw().as_ref().as_i32() & FLAG_MAY_ENTER != 0 }
1009 }
1010
1011 #[inline]
1012 pub unsafe fn set_may_enter(&mut self, val: bool) {
1013 unsafe {
1014 if val {
1015 *self.as_raw().as_mut().as_i32_mut() |= FLAG_MAY_ENTER;
1016 } else {
1017 *self.as_raw().as_mut().as_i32_mut() &= !FLAG_MAY_ENTER;
1018 }
1019 }
1020 }
1021
1022 #[inline]
1023 pub unsafe fn needs_post_return(&self) -> bool {
1024 unsafe { *self.as_raw().as_ref().as_i32() & FLAG_NEEDS_POST_RETURN != 0 }
1025 }
1026
1027 #[inline]
1028 pub unsafe fn set_needs_post_return(&mut self, val: bool) {
1029 unsafe {
1030 if val {
1031 *self.as_raw().as_mut().as_i32_mut() |= FLAG_NEEDS_POST_RETURN;
1032 } else {
1033 *self.as_raw().as_mut().as_i32_mut() &= !FLAG_NEEDS_POST_RETURN;
1034 }
1035 }
1036 }
1037
1038 #[inline]
1039 pub fn as_raw(&self) -> NonNull<VMGlobalDefinition> {
1040 self.0.as_non_null()
1041 }
1042}