1use crate::prelude::*;
2use crate::runtime::vm::const_expr::{ConstEvalContext, ConstExprEvaluator};
3use crate::runtime::vm::imports::Imports;
4use crate::runtime::vm::instance::{Instance, InstanceHandle};
5use crate::runtime::vm::memory::Memory;
6use crate::runtime::vm::mpk::ProtectionKey;
7use crate::runtime::vm::table::Table;
8use crate::runtime::vm::{CompiledModuleId, ModuleRuntimeInfo};
9use crate::store::{InstanceId, StoreOpaque, StoreResourceLimiter};
10use crate::{OpaqueRootScope, Val};
11use core::{mem, ptr};
12use wasmtime_environ::{
13 DefinedMemoryIndex, DefinedTableIndex, HostPtr, InitMemory, MemoryInitialization,
14 MemoryInitializer, Module, PrimaryMap, SizeOverflow, TableInitialValue, Trap, VMOffsets,
15};
16
17#[cfg(feature = "gc")]
18use crate::runtime::vm::{GcHeap, GcRuntime};
19
20#[cfg(feature = "component-model")]
21use wasmtime_environ::{
22 StaticModuleIndex,
23 component::{Component, VMComponentOffsets},
24};
25
26mod on_demand;
27pub use self::on_demand::OnDemandInstanceAllocator;
28
29#[cfg(feature = "pooling-allocator")]
30mod pooling;
31#[cfg(feature = "pooling-allocator")]
32pub use self::pooling::{
33 InstanceLimits, PoolConcurrencyLimitError, PoolingAllocatorMetrics, PoolingInstanceAllocator,
34 PoolingInstanceAllocatorConfig,
35};
36
37pub struct InstanceAllocationRequest<'a, 'b> {
39 pub id: InstanceId,
42
43 pub runtime_info: &'a ModuleRuntimeInfo,
49
50 pub imports: Imports<'a>,
52
53 pub store: &'a StoreOpaque,
55
56 pub limiter: Option<&'a mut StoreResourceLimiter<'b>>,
58}
59
60#[derive(Clone, Copy, Debug, Eq, PartialEq, PartialOrd, Ord)]
62pub struct MemoryAllocationIndex(u32);
63
64impl Default for MemoryAllocationIndex {
65 fn default() -> Self {
66 MemoryAllocationIndex(u32::MAX)
69 }
70}
71
72impl MemoryAllocationIndex {
73 #[cfg(feature = "pooling-allocator")]
75 pub fn index(&self) -> usize {
76 self.0 as usize
77 }
78}
79
80#[derive(Clone, Copy, Debug, Eq, PartialEq, PartialOrd, Ord)]
82pub struct TableAllocationIndex(u32);
83
84impl Default for TableAllocationIndex {
85 fn default() -> Self {
86 TableAllocationIndex(u32::MAX)
89 }
90}
91
92impl TableAllocationIndex {
93 #[cfg(feature = "pooling-allocator")]
95 pub fn index(&self) -> usize {
96 self.0 as usize
97 }
98}
99
100#[derive(Clone, Copy, Debug, Eq, PartialEq, PartialOrd, Ord)]
102pub struct GcHeapAllocationIndex(u32);
103
104impl Default for GcHeapAllocationIndex {
105 fn default() -> Self {
106 GcHeapAllocationIndex(u32::MAX)
109 }
110}
111
112impl GcHeapAllocationIndex {
113 pub fn index(&self) -> usize {
115 self.0 as usize
116 }
117}
118
119#[async_trait::async_trait]
131pub unsafe trait InstanceAllocator: Send + Sync {
132 #[cfg(feature = "component-model")]
135 fn validate_component<'a>(
136 &self,
137 component: &Component,
138 offsets: &VMComponentOffsets<HostPtr>,
139 get_module: &'a dyn Fn(StaticModuleIndex) -> &'a Module,
140 ) -> Result<()>;
141
142 fn validate_module(&self, module: &Module, offsets: &VMOffsets<HostPtr>) -> Result<()>;
144
145 #[cfg(feature = "gc")]
147 fn validate_memory(&self, memory: &wasmtime_environ::Memory) -> Result<()>;
148
149 #[cfg(feature = "component-model")]
169 fn increment_component_instance_count(&self) -> Result<()>;
170
171 #[cfg(feature = "component-model")]
173 fn decrement_component_instance_count(&self);
174
175 fn increment_core_instance_count(&self) -> Result<()>;
182
183 fn decrement_core_instance_count(&self);
185
186 async fn allocate_memory(
188 &self,
189 request: &mut InstanceAllocationRequest<'_, '_>,
190 ty: &wasmtime_environ::Memory,
191 memory_index: Option<DefinedMemoryIndex>,
192 ) -> Result<(MemoryAllocationIndex, Memory)>;
193
194 unsafe fn deallocate_memory(
202 &self,
203 memory_index: Option<DefinedMemoryIndex>,
204 allocation_index: MemoryAllocationIndex,
205 memory: Memory,
206 );
207
208 async fn allocate_table(
210 &self,
211 req: &mut InstanceAllocationRequest<'_, '_>,
212 table: &wasmtime_environ::Table,
213 table_index: DefinedTableIndex,
214 ) -> Result<(TableAllocationIndex, Table)>;
215
216 unsafe fn deallocate_table(
224 &self,
225 table_index: DefinedTableIndex,
226 allocation_index: TableAllocationIndex,
227 table: Table,
228 );
229
230 #[cfg(feature = "async")]
232 fn allocate_fiber_stack(&self) -> Result<wasmtime_fiber::FiberStack>;
233
234 #[cfg(feature = "async")]
242 unsafe fn deallocate_fiber_stack(&self, stack: wasmtime_fiber::FiberStack);
243
244 #[cfg(feature = "gc")]
246 fn allocate_gc_heap(
247 &self,
248 engine: &crate::Engine,
249 gc_runtime: &dyn GcRuntime,
250 memory_alloc_index: MemoryAllocationIndex,
251 memory: Memory,
252 ) -> Result<(GcHeapAllocationIndex, Box<dyn GcHeap>)>;
253
254 #[cfg(feature = "gc")]
257 #[must_use = "it is the caller's responsibility to deallocate the GC heap's underlying memory \
258 storage after the GC heap is deallocated"]
259 fn deallocate_gc_heap(
260 &self,
261 allocation_index: GcHeapAllocationIndex,
262 gc_heap: Box<dyn GcHeap>,
263 ) -> (MemoryAllocationIndex, Memory);
264
265 fn purge_module(&self, module: CompiledModuleId);
271
272 fn next_available_pkey(&self) -> Option<ProtectionKey>;
278
279 fn restrict_to_pkey(&self, pkey: ProtectionKey);
286
287 fn allow_all_pkeys(&self);
289
290 #[cfg(feature = "pooling-allocator")]
292 fn as_pooling(&self) -> Option<&PoolingInstanceAllocator> {
293 None
294 }
295}
296
297impl dyn InstanceAllocator + '_ {
298 pub(crate) async unsafe fn allocate_module(
312 &self,
313 mut request: InstanceAllocationRequest<'_, '_>,
314 ) -> Result<InstanceHandle> {
315 let module = request.runtime_info.env_module();
316
317 if cfg!(debug_assertions) {
318 InstanceAllocator::validate_module(self, module, request.runtime_info.offsets())
319 .expect("module should have already been validated before allocation");
320 }
321
322 self.increment_core_instance_count()?;
323
324 let num_defined_memories = module.num_defined_memories();
325 let num_defined_tables = module.num_defined_tables();
326
327 let mut guard = DeallocateOnDrop {
328 run_deallocate: true,
329 memories: PrimaryMap::with_capacity(num_defined_memories),
330 tables: PrimaryMap::with_capacity(num_defined_tables),
331 allocator: self,
332 };
333
334 self.allocate_memories(&mut request, &mut guard.memories)
335 .await?;
336 self.allocate_tables(&mut request, &mut guard.tables)
337 .await?;
338 guard.run_deallocate = false;
339 return unsafe {
343 Ok(Instance::new(
344 request,
345 mem::take(&mut guard.memories),
346 mem::take(&mut guard.tables),
347 &module.memories,
348 ))
349 };
350
351 struct DeallocateOnDrop<'a> {
352 run_deallocate: bool,
353 memories: PrimaryMap<DefinedMemoryIndex, (MemoryAllocationIndex, Memory)>,
354 tables: PrimaryMap<DefinedTableIndex, (TableAllocationIndex, Table)>,
355 allocator: &'a (dyn InstanceAllocator + 'a),
356 }
357
358 impl Drop for DeallocateOnDrop<'_> {
359 fn drop(&mut self) {
360 if !self.run_deallocate {
361 return;
362 }
363 unsafe {
365 self.allocator.deallocate_memories(&mut self.memories);
366 self.allocator.deallocate_tables(&mut self.tables);
367 }
368 self.allocator.decrement_core_instance_count();
369 }
370 }
371 }
372
373 pub(crate) unsafe fn deallocate_module(&self, handle: &mut InstanceHandle) {
382 unsafe {
386 self.deallocate_memories(handle.get_mut().memories_mut());
387 self.deallocate_tables(handle.get_mut().tables_mut());
388 }
389
390 self.decrement_core_instance_count();
391 }
392
393 async fn allocate_memories(
396 &self,
397 request: &mut InstanceAllocationRequest<'_, '_>,
398 memories: &mut PrimaryMap<DefinedMemoryIndex, (MemoryAllocationIndex, Memory)>,
399 ) -> Result<()> {
400 let module = request.runtime_info.env_module();
401
402 if cfg!(debug_assertions) {
403 InstanceAllocator::validate_module(self, module, request.runtime_info.offsets())
404 .expect("module should have already been validated before allocation");
405 }
406
407 for (memory_index, ty) in module.memories.iter().skip(module.num_imported_memories) {
408 let memory_index = module
409 .defined_memory_index(memory_index)
410 .expect("should be a defined memory since we skipped imported ones");
411
412 let memory = self
413 .allocate_memory(request, ty, Some(memory_index))
414 .await?;
415 memories.push(memory);
416 }
417
418 Ok(())
419 }
420
421 unsafe fn deallocate_memories(
428 &self,
429 memories: &mut PrimaryMap<DefinedMemoryIndex, (MemoryAllocationIndex, Memory)>,
430 ) {
431 for (memory_index, (allocation_index, memory)) in mem::take(memories) {
432 unsafe {
441 self.deallocate_memory(Some(memory_index), allocation_index, memory);
442 }
443 }
444 }
445
446 async fn allocate_tables(
449 &self,
450 request: &mut InstanceAllocationRequest<'_, '_>,
451 tables: &mut PrimaryMap<DefinedTableIndex, (TableAllocationIndex, Table)>,
452 ) -> Result<()> {
453 let module = request.runtime_info.env_module();
454
455 if cfg!(debug_assertions) {
456 InstanceAllocator::validate_module(self, module, request.runtime_info.offsets())
457 .expect("module should have already been validated before allocation");
458 }
459
460 for (index, table) in module.tables.iter().skip(module.num_imported_tables) {
461 let def_index = module
462 .defined_table_index(index)
463 .expect("should be a defined table since we skipped imported ones");
464
465 let table = self.allocate_table(request, table, def_index).await?;
466 tables.push(table);
467 }
468
469 Ok(())
470 }
471
472 unsafe fn deallocate_tables(
479 &self,
480 tables: &mut PrimaryMap<DefinedTableIndex, (TableAllocationIndex, Table)>,
481 ) {
482 for (table_index, (allocation_index, table)) in mem::take(tables) {
483 unsafe {
486 self.deallocate_table(table_index, allocation_index, table);
487 }
488 }
489 }
490}
491
492fn check_table_init_bounds(
493 store: &mut StoreOpaque,
494 instance: InstanceId,
495 module: &Module,
496) -> Result<()> {
497 let mut const_evaluator = ConstExprEvaluator::default();
498 let mut store = OpaqueRootScope::new(store);
499
500 for segment in module.table_initialization.segments.iter() {
501 let mut context = ConstEvalContext::new(instance);
502 let start = const_evaluator
503 .eval_int(&mut store, &mut context, &segment.offset)
504 .expect("const expression should be valid");
505 let start = usize::try_from(start.unwrap_i32().cast_unsigned()).unwrap();
506 let end = start.checked_add(usize::try_from(segment.elements.len()).unwrap());
507
508 let table = store.instance_mut(instance).get_table(segment.table_index);
509 match end {
510 Some(end) if end <= table.size() => {
511 }
513 _ => {
514 bail!("table out of bounds: elements segment does not fit")
515 }
516 }
517 }
518
519 Ok(())
520}
521
522async fn initialize_tables(
523 store: &mut StoreOpaque,
524 mut limiter: Option<&mut StoreResourceLimiter<'_>>,
525 context: &mut ConstEvalContext,
526 const_evaluator: &mut ConstExprEvaluator,
527 module: &Module,
528) -> Result<()> {
529 let mut store = OpaqueRootScope::new(store);
530 for (table, init) in module.table_initialization.initial_values.iter() {
531 match init {
532 TableInitialValue::Null { precomputed: _ } => {}
534
535 TableInitialValue::Expr(expr) => {
536 let init = const_evaluator
537 .eval(&mut store, limiter.as_deref_mut(), context, expr)
538 .await?;
539 let idx = module.table_index(table);
540 let id = store.id();
541 let table = store
542 .instance_mut(context.instance)
543 .get_exported_table(id, idx);
544 let size = table._size(&store);
545 table._fill(&mut store, 0, init.ref_().unwrap(), size)?;
546 }
547 }
548 }
549
550 for segment in module.table_initialization.segments.iter() {
558 let start = const_evaluator
559 .eval_int(&mut store, context, &segment.offset)
560 .expect("const expression should be valid");
561 let start = get_index(
562 start,
563 store.instance(context.instance).env_module().tables[segment.table_index].idx_type,
564 );
565 Instance::table_init_segment(
566 &mut store,
567 limiter.as_deref_mut(),
568 context.instance,
569 const_evaluator,
570 segment.table_index,
571 &segment.elements,
572 start,
573 0,
574 segment.elements.len(),
575 )
576 .await?;
577 }
578
579 Ok(())
580}
581
582fn get_index(val: &Val, ty: wasmtime_environ::IndexType) -> u64 {
583 match ty {
584 wasmtime_environ::IndexType::I32 => val.unwrap_i32().cast_unsigned().into(),
585 wasmtime_environ::IndexType::I64 => val.unwrap_i64().cast_unsigned(),
586 }
587}
588
589fn get_memory_init_start(
590 store: &mut StoreOpaque,
591 init: &MemoryInitializer,
592 instance: InstanceId,
593) -> Result<u64> {
594 let mut context = ConstEvalContext::new(instance);
595 let mut const_evaluator = ConstExprEvaluator::default();
596 let mut store = OpaqueRootScope::new(store);
597 const_evaluator
598 .eval_int(&mut store, &mut context, &init.offset)
599 .map(|v| {
600 get_index(
601 v,
602 store.instance(instance).env_module().memories[init.memory_index].idx_type,
603 )
604 })
605}
606
607fn check_memory_init_bounds(
608 store: &mut StoreOpaque,
609 instance: InstanceId,
610 initializers: &[MemoryInitializer],
611) -> Result<()> {
612 for init in initializers {
613 let memory = store.instance_mut(instance).get_memory(init.memory_index);
614 let start = get_memory_init_start(store, init, instance)?;
615 let end = usize::try_from(start)
616 .ok()
617 .and_then(|start| start.checked_add(init.data.len()));
618
619 match end {
620 Some(end) if end <= memory.current_length() => {
621 }
623 _ => {
624 bail!("memory out of bounds: data segment does not fit")
625 }
626 }
627 }
628
629 Ok(())
630}
631
632fn initialize_memories(
633 store: &mut StoreOpaque,
634 context: &mut ConstEvalContext,
635 const_evaluator: &mut ConstExprEvaluator,
636 module: &Module,
637) -> Result<()> {
638 struct InitMemoryAtInstantiation<'a> {
648 module: &'a Module,
649 store: &'a mut StoreOpaque,
650 context: &'a mut ConstEvalContext,
651 const_evaluator: &'a mut ConstExprEvaluator,
652 }
653
654 impl InitMemory for InitMemoryAtInstantiation<'_> {
655 fn memory_size_in_bytes(
656 &mut self,
657 memory: wasmtime_environ::MemoryIndex,
658 ) -> Result<u64, SizeOverflow> {
659 let len = self
660 .store
661 .instance(self.context.instance)
662 .get_memory(memory)
663 .current_length();
664 let len = u64::try_from(len).unwrap();
665 Ok(len)
666 }
667
668 fn eval_offset(
669 &mut self,
670 memory: wasmtime_environ::MemoryIndex,
671 expr: &wasmtime_environ::ConstExpr,
672 ) -> Option<u64> {
673 let mut store = OpaqueRootScope::new(&mut *self.store);
674 let val = self
675 .const_evaluator
676 .eval_int(&mut store, self.context, expr)
677 .expect("const expression should be valid");
678 Some(get_index(
679 val,
680 store.instance(self.context.instance).env_module().memories[memory].idx_type,
681 ))
682 }
683
684 fn write(
685 &mut self,
686 memory_index: wasmtime_environ::MemoryIndex,
687 init: &wasmtime_environ::StaticMemoryInitializer,
688 ) -> bool {
689 let instance = self.store.instance_mut(self.context.instance);
694 if let Some(memory_index) = self.module.defined_memory_index(memory_index) {
695 if !instance.memories[memory_index].1.needs_init() {
696 return true;
697 }
698 }
699 let memory = instance.get_memory(memory_index);
700
701 unsafe {
702 let src = instance.wasm_data(init.data.clone());
703 let offset = usize::try_from(init.offset).unwrap();
704 let dst = memory.base.as_ptr().add(offset);
705
706 assert!(offset + src.len() <= memory.current_length());
707
708 ptr::copy_nonoverlapping(src.as_ptr(), dst, src.len())
712 }
713 true
714 }
715 }
716
717 let ok = module
718 .memory_initialization
719 .init_memory(&mut InitMemoryAtInstantiation {
720 module,
721 store,
722 context,
723 const_evaluator,
724 });
725 if !ok {
726 return Err(Trap::MemoryOutOfBounds.into());
727 }
728
729 Ok(())
730}
731
732fn check_init_bounds(store: &mut StoreOpaque, instance: InstanceId, module: &Module) -> Result<()> {
733 check_table_init_bounds(store, instance, module)?;
734
735 match &module.memory_initialization {
736 MemoryInitialization::Segmented(initializers) => {
737 check_memory_init_bounds(store, instance, initializers)?;
738 }
739 MemoryInitialization::Static { .. } => {}
741 }
742
743 Ok(())
744}
745
746async fn initialize_globals(
747 store: &mut StoreOpaque,
748 mut limiter: Option<&mut StoreResourceLimiter<'_>>,
749 context: &mut ConstEvalContext,
750 const_evaluator: &mut ConstExprEvaluator,
751 module: &Module,
752) -> Result<()> {
753 assert!(core::ptr::eq(
754 &**store.instance(context.instance).env_module(),
755 module
756 ));
757
758 let mut store = OpaqueRootScope::new(store);
759
760 for (index, init) in module.global_initializers.iter() {
761 let val = if let Some(val) = const_evaluator.try_simple(init) {
765 val
766 } else {
767 const_evaluator
768 .eval(&mut store, limiter.as_deref_mut(), context, init)
769 .await?
770 };
771
772 let id = store.id();
773 let index = module.global_index(index);
774 let mut instance = store.instance_mut(context.instance);
775
776 #[cfg(feature = "wmemcheck")]
777 if index.as_u32() == 0
778 && module.globals[index].wasm_ty == wasmtime_environ::WasmValType::I32
779 {
780 if let Some(wmemcheck) = instance.as_mut().wmemcheck_state_mut() {
781 let size = usize::try_from(val.unwrap_i32()).unwrap();
782 wmemcheck.set_stack_size(size);
783 }
784 }
785
786 let global = instance.as_mut().get_exported_global(id, index);
787
788 unsafe {
796 global.set_unchecked(&mut store, &val)?;
797 }
798 }
799 Ok(())
800}
801
802pub async fn initialize_instance(
803 store: &mut StoreOpaque,
804 mut limiter: Option<&mut StoreResourceLimiter<'_>>,
805 instance: InstanceId,
806 module: &Module,
807 is_bulk_memory: bool,
808) -> Result<()> {
809 if !is_bulk_memory {
814 check_init_bounds(store, instance, module)?;
815 }
816
817 let mut context = ConstEvalContext::new(instance);
818 let mut const_evaluator = ConstExprEvaluator::default();
819
820 initialize_globals(
821 store,
822 limiter.as_deref_mut(),
823 &mut context,
824 &mut const_evaluator,
825 module,
826 )
827 .await?;
828 initialize_tables(
829 store,
830 limiter.as_deref_mut(),
831 &mut context,
832 &mut const_evaluator,
833 module,
834 )
835 .await?;
836 initialize_memories(store, &mut context, &mut const_evaluator, &module)?;
837
838 Ok(())
839}
840
841#[cfg(test)]
842mod tests {
843 use super::*;
844
845 #[test]
846 fn allocator_traits_are_object_safe() {
847 fn _instance_allocator(_: &dyn InstanceAllocator) {}
848 }
849}