From 5e2214f7f03e90dcbea1d0d63d6ce350083cfbfb Mon Sep 17 00:00:00 2001 From: Nick Fitzgerald Date: Wed, 21 Feb 2024 16:02:32 -0800 Subject: [PATCH 01/12] Wasmtime: Add a `gc` cargo feature This controls whether support for `ExternRef` and its associated deferred, reference-counting garbage collector is enabled at compile time or not. It will also be used for similarly for Wasmtime's full Wasm GC support as that gets added. --- crates/cranelift/Cargo.toml | 1 + crates/cranelift/src/func_environ.rs | 91 +++++++++++++- crates/environ/Cargo.toml | 1 + crates/environ/src/builtin.rs | 38 ++++-- crates/runtime/Cargo.toml | 1 + crates/runtime/src/instance.rs | 40 +++++- crates/runtime/src/lib.rs | 3 + crates/runtime/src/libcalls.rs | 40 ++++-- crates/runtime/src/table.rs | 39 +++++- crates/runtime/src/vmcontext.rs | 37 +++++- crates/wasmtime/Cargo.toml | 17 +++ crates/wasmtime/src/config.rs | 12 +- crates/wasmtime/src/engine/serialization.rs | 48 +++++-- crates/wasmtime/src/runtime.rs | 3 + .../wasmtime/src/runtime/externals/global.rs | 22 +++- .../wasmtime/src/runtime/externals/table.rs | 11 +- crates/wasmtime/src/runtime/func.rs | 55 ++++---- crates/wasmtime/src/runtime/func/typed.rs | 19 +-- crates/wasmtime/src/runtime/module.rs | 2 + .../wasmtime/src/runtime/module/registry.rs | 5 +- crates/wasmtime/src/runtime/ref.rs | 118 ++---------------- crates/wasmtime/src/runtime/ref/gc_ref.rs | 107 ++++++++++++++++ crates/wasmtime/src/runtime/ref/no_gc_ref.rs | 10 ++ crates/wasmtime/src/runtime/store.rs | 41 ++++-- .../wasmtime/src/runtime/trampoline/global.rs | 14 ++- crates/wasmtime/src/runtime/uninhabited.rs | 5 + crates/wasmtime/src/runtime/values.rs | 29 ++++- 27 files changed, 597 insertions(+), 212 deletions(-) create mode 100644 crates/wasmtime/src/runtime/ref/gc_ref.rs create mode 100644 crates/wasmtime/src/runtime/ref/no_gc_ref.rs create mode 100644 crates/wasmtime/src/runtime/uninhabited.rs diff --git a/crates/cranelift/Cargo.toml b/crates/cranelift/Cargo.toml index cde0d4e711f5..2c5ce4845c32 100644 --- a/crates/cranelift/Cargo.toml +++ b/crates/cranelift/Cargo.toml @@ -38,3 +38,4 @@ host-arch = ["cranelift-codegen/host-arch"] component-model = ["wasmtime-environ/component-model"] incremental-cache = ["cranelift-codegen/incremental-cache"] wmemcheck = [] +gc = ["wasmtime-environ/gc"] diff --git a/crates/cranelift/src/func_environ.rs b/crates/cranelift/src/func_environ.rs index 51bca04adb0e..57b965c3a15e 100644 --- a/crates/cranelift/src/func_environ.rs +++ b/crates/cranelift/src/func_environ.rs @@ -34,26 +34,41 @@ macro_rules! declare_function_signatures { ) => { /// A struct with an `Option` member for every builtin /// function, to de-duplicate constructing/getting its signature. + #[allow(unused_doc_comments)] struct BuiltinFunctionSignatures { pointer_type: ir::Type, + + #[cfg(feature = "gc")] reference_type: ir::Type, + call_conv: isa::CallConv, + $( + $( #[$attr] )* $name: Option, )* } + #[allow(unused_doc_comments)] impl BuiltinFunctionSignatures { fn new( pointer_type: ir::Type, reference_type: ir::Type, call_conv: isa::CallConv, ) -> Self { + #[cfg(not(feature = "gc"))] + let _ = reference_type; + Self { pointer_type, + + #[cfg(feature = "gc")] reference_type, + call_conv, + $( + $( #[$attr] )* $name: None, )* } @@ -63,6 +78,7 @@ macro_rules! declare_function_signatures { AbiParam::special(self.pointer_type, ArgumentPurpose::VMContext) } + #[cfg(feature = "gc")] fn reference(&self) -> AbiParam { AbiParam::new(self.reference_type) } @@ -89,6 +105,7 @@ macro_rules! declare_function_signatures { } $( + $( #[$attr] )* fn $name(&mut self, func: &mut Function) -> ir::SigRef { let sig = self.$name.unwrap_or_else(|| { func.import_signature(Signature { @@ -336,6 +353,7 @@ impl<'module_environment> FuncEnvironment<'module_environment> { /// reference count. /// /// The new reference count is returned. + #[cfg(feature = "gc")] fn mutate_externref_ref_count( &mut self, builder: &mut FunctionBuilder, @@ -1324,11 +1342,19 @@ impl<'module_environment> cranelift_wasm::FuncEnvironment for FuncEnvironment<'m self.builtin_function_signatures .table_grow_func_ref(&mut pos.func), ), + #[cfg(feature = "gc")] WasmHeapType::Extern => ( BuiltinFunctionIndex::table_grow_externref(), self.builtin_function_signatures .table_grow_externref(&mut pos.func), ), + #[cfg(not(feature = "gc"))] + WasmHeapType::Extern => { + return Err(cranelift_wasm::wasm_unsupported!( + "support for `externref` disabled at compile time because \ + the `gc` cargo feature was not enabled", + )) + } }; let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx); @@ -1350,8 +1376,6 @@ impl<'module_environment> cranelift_wasm::FuncEnvironment for FuncEnvironment<'m table: ir::Table, index: ir::Value, ) -> WasmResult { - let pointer_type = self.pointer_type(); - let plan = &self.module.table_plans[table_index]; match plan.table.wasm_ty.heap_type { WasmHeapType::Func | WasmHeapType::Concrete(_) | WasmHeapType::NoFunc => match plan @@ -1361,6 +1385,7 @@ impl<'module_environment> cranelift_wasm::FuncEnvironment for FuncEnvironment<'m Ok(self.get_or_init_func_ref_table_elem(builder, table_index, table, index)) } }, + #[cfg(feature = "gc")] WasmHeapType::Extern => { // Our read barrier for `externref` tables is roughly equivalent // to the following pseudocode: @@ -1382,6 +1407,7 @@ impl<'module_environment> cranelift_wasm::FuncEnvironment for FuncEnvironment<'m // onto the stack are safely held alive by the // `VMExternRefActivationsTable`. + let pointer_type = self.pointer_type(); let reference_type = self.reference_type(WasmHeapType::Extern); builder.ensure_inserted_block(); @@ -1477,6 +1503,13 @@ impl<'module_environment> cranelift_wasm::FuncEnvironment for FuncEnvironment<'m Ok(elem) } + #[cfg(not(feature = "gc"))] + WasmHeapType::Extern => { + return Err(cranelift_wasm::wasm_unsupported!( + "support for `externref` disabled at compile time because the \ + `gc` cargo feature was not enabled", + )) + } } } @@ -1510,6 +1543,7 @@ impl<'module_environment> cranelift_wasm::FuncEnvironment for FuncEnvironment<'m } }, + #[cfg(feature = "gc")] WasmHeapType::Extern => { // Our write barrier for `externref`s being copied out of the // stack and into a table is roughly equivalent to the following @@ -1640,6 +1674,14 @@ impl<'module_environment> cranelift_wasm::FuncEnvironment for FuncEnvironment<'m Ok(()) } + + #[cfg(not(feature = "gc"))] + WasmHeapType::Extern => { + return Err(cranelift_wasm::wasm_unsupported!( + "support for `externref` disabled at compile time because the \ + `gc` cargo feature was not enabled", + )) + } } } @@ -1658,11 +1700,19 @@ impl<'module_environment> cranelift_wasm::FuncEnvironment for FuncEnvironment<'m self.builtin_function_signatures .table_fill_func_ref(&mut pos.func), ), + #[cfg(feature = "gc")] WasmHeapType::Extern => ( BuiltinFunctionIndex::table_fill_externref(), self.builtin_function_signatures .table_fill_externref(&mut pos.func), ), + #[cfg(not(feature = "gc"))] + WasmHeapType::Extern => { + return Err(cranelift_wasm::wasm_unsupported!( + "support for `externref` disabled at compile time because the \ + `gc` cargo feature was not enabled", + )); + } }; let (vmctx, builtin_addr) = @@ -1727,6 +1777,7 @@ impl<'module_environment> cranelift_wasm::FuncEnvironment for FuncEnvironment<'m Ok(pos.func.dfg.first_result(call_inst)) } + #[cfg(feature = "gc")] fn translate_custom_global_get( &mut self, mut pos: cranelift_codegen::cursor::FuncCursor<'_>, @@ -1754,6 +1805,24 @@ impl<'module_environment> cranelift_wasm::FuncEnvironment for FuncEnvironment<'m Ok(pos.func.dfg.first_result(call_inst)) } + #[cfg(not(feature = "gc"))] + fn translate_custom_global_get( + &mut self, + _pos: FuncCursor, + index: GlobalIndex, + ) -> WasmResult { + debug_assert_eq!( + self.module.globals[index].wasm_ty, + WasmValType::Ref(WasmRefType::EXTERNREF), + "We only use GlobalVariable::Custom for externref" + ); + Err(cranelift_wasm::wasm_unsupported!( + "support for `externref` disabled at compile time because the \ + `gc` cargo feature was not enabled", + )) + } + + #[cfg(feature = "gc")] fn translate_custom_global_set( &mut self, mut pos: cranelift_codegen::cursor::FuncCursor<'_>, @@ -1781,6 +1850,24 @@ impl<'module_environment> cranelift_wasm::FuncEnvironment for FuncEnvironment<'m Ok(()) } + #[cfg(not(feature = "gc"))] + fn translate_custom_global_set( + &mut self, + _pos: FuncCursor, + index: GlobalIndex, + _value: ir::Value, + ) -> WasmResult<()> { + debug_assert_eq!( + self.module.globals[index].wasm_ty, + WasmValType::Ref(WasmRefType::EXTERNREF), + "We only use GlobalVariable::Custom for externref" + ); + Err(cranelift_wasm::wasm_unsupported!( + "support for `externref` disabled at compile time because the \ + `gc` cargo feature was not enabled", + )) + } + fn make_heap(&mut self, func: &mut ir::Function, index: MemoryIndex) -> WasmResult { let pointer_type = self.pointer_type(); let is_shared = self.module.memory_plans[index].memory.shared; diff --git a/crates/environ/Cargo.toml b/crates/environ/Cargo.toml index cb471956bb38..50093112220b 100644 --- a/crates/environ/Cargo.toml +++ b/crates/environ/Cargo.toml @@ -49,3 +49,4 @@ component-model = [ "dep:wasmtime-component-util", ] demangle = ['dep:rustc-demangle', 'dep:cpp_demangle'] +gc = [] diff --git a/crates/environ/src/builtin.rs b/crates/environ/src/builtin.rs index 07a5a03ab5dd..94abe93770db 100644 --- a/crates/environ/src/builtin.rs +++ b/crates/environ/src/builtin.rs @@ -26,21 +26,8 @@ macro_rules! foreach_builtin_function { table_get_lazy_init_func_ref(vmctx: vmctx, table: i32, index: i32) -> pointer; /// Returns an index for Wasm's `table.grow` instruction for `funcref`s. table_grow_func_ref(vmctx: vmctx, table: i32, delta: i32, init: pointer) -> i32; - /// Returns an index for Wasm's `table.grow` instruction for `externref`s. - table_grow_externref(vmctx: vmctx, table: i32, delta: i32, init: reference) -> i32; - /// Returns an index for Wasm's `table.fill` instruction for `externref`s. - table_fill_externref(vmctx: vmctx, table: i32, dst: i32, val: reference, len: i32); /// Returns an index for Wasm's `table.fill` instruction for `funcref`s. table_fill_func_ref(vmctx: vmctx, table: i32, dst: i32, val: pointer, len: i32); - /// Returns an index to drop a `VMExternRef`. - drop_externref(vmctx: vmctx, val: pointer); - /// Returns an index to do a GC and then insert a `VMExternRef` into the - /// `VMExternRefActivationsTable`. - activations_table_insert_with_gc(vmctx: vmctx, val: reference); - /// Returns an index for Wasm's `global.get` instruction for `externref`s. - externref_global_get(vmctx: vmctx, global: i32) -> reference; - /// Returns an index for Wasm's `global.get` instruction for `externref`s. - externref_global_set(vmctx: vmctx, global: i32, val: reference); /// Returns an index for wasm's `memory.atomic.notify` instruction. memory_atomic_notify(vmctx: vmctx, memory: i32, addr: i64, count: i32) -> i32; /// Returns an index for wasm's `memory.atomic.wait32` instruction. @@ -67,6 +54,31 @@ macro_rules! foreach_builtin_function { update_stack_pointer(vmctx: vmctx, value: i32); /// Invoked before memory.grow is called. update_mem_size(vmctx: vmctx, num_bytes: i32); + + /// Returns an index to drop a `VMExternRef`. + #[cfg(feature = "gc")] + drop_externref(vmctx: vmctx, val: pointer); + + /// Returns an index to do a GC and then insert a `VMExternRef` into the + /// `VMExternRefActivationsTable`. + #[cfg(feature = "gc")] + activations_table_insert_with_gc(vmctx: vmctx, val: reference); + + /// Returns an index for Wasm's `global.get` instruction for `externref`s. + #[cfg(feature = "gc")] + externref_global_get(vmctx: vmctx, global: i32) -> reference; + + /// Returns an index for Wasm's `global.get` instruction for `externref`s. + #[cfg(feature = "gc")] + externref_global_set(vmctx: vmctx, global: i32, val: reference); + + /// Returns an index for Wasm's `table.grow` instruction for `externref`s. + #[cfg(feature = "gc")] + table_grow_externref(vmctx: vmctx, table: i32, delta: i32, init: reference) -> i32; + + /// Returns an index for Wasm's `table.fill` instruction for `externref`s. + #[cfg(feature = "gc")] + table_fill_externref(vmctx: vmctx, table: i32, dst: i32, val: reference, len: i32); } }; } diff --git a/crates/runtime/Cargo.toml b/crates/runtime/Cargo.toml index 3738335b4035..364668577115 100644 --- a/crates/runtime/Cargo.toml +++ b/crates/runtime/Cargo.toml @@ -70,3 +70,4 @@ pooling-allocator = [] component-model = ["wasmtime-environ/component-model", "dep:encoding_rs"] wmemcheck = [] debug-builtins = ['wasmtime-jit-debug'] +gc = ["wasmtime-environ/gc"] diff --git a/crates/runtime/src/instance.rs b/crates/runtime/src/instance.rs index 54986e0f3db2..1fe54bd717b5 100644 --- a/crates/runtime/src/instance.rs +++ b/crates/runtime/src/instance.rs @@ -3,6 +3,7 @@ //! `InstanceHandle` is a reference-counting handle for an `Instance`. use crate::export::Export; +#[cfg(feature = "gc")] use crate::externref::VMExternRefActivationsTable; use crate::memory::{Memory, RuntimeMemoryCreator}; use crate::table::{Table, TableElement, TableElementType}; @@ -438,6 +439,7 @@ impl Instance { } /// Return a pointer to the `VMExternRefActivationsTable`. + #[cfg(feature = "gc")] pub fn externref_activations_table(&mut self) -> *mut *mut VMExternRefActivationsTable { unsafe { self.vmctx_plus_offset_mut(self.offsets().vmctx_externref_activations_table()) } } @@ -465,7 +467,11 @@ impl Instance { *self.vmctx_plus_offset_mut(self.offsets().vmctx_store()) = store; *self.runtime_limits() = (*store).vmruntime_limits(); *self.epoch_ptr() = (*store).epoch_ptr(); - *self.externref_activations_table() = (*store).externref_activations_table().0; + + #[cfg(feature = "gc")] + { + *self.externref_activations_table() = (*store).externref_activations_table().0; + } } else { assert_eq!( mem::size_of::<*mut dyn Store>(), @@ -476,7 +482,11 @@ impl Instance { *self.runtime_limits() = ptr::null_mut(); *self.epoch_ptr() = ptr::null_mut(); - *self.externref_activations_table() = ptr::null_mut(); + + #[cfg(feature = "gc")] + { + *self.externref_activations_table() = ptr::null_mut(); + } } } @@ -842,6 +852,7 @@ impl Instance { )?; } + #[cfg(feature = "gc")] TableElementType::Extern => { debug_assert!(elements.iter().all(|e| *e == FuncIndex::reserved_value())); table.fill(dst, TableElement::ExternRef(None), len)?; @@ -1235,10 +1246,18 @@ impl Instance { // count as values move between globals, everything else is just // copy-able bits. match wasm_ty { + #[cfg(feature = "gc")] WasmValType::Ref(WasmRefType { heap_type: WasmHeapType::Extern, .. }) => *(*to).as_externref_mut() = from.as_externref().clone(), + + #[cfg(not(feature = "gc"))] + WasmValType::Ref(WasmRefType { + heap_type: WasmHeapType::Extern, + .. + }) => unreachable!(), + _ => ptr::copy_nonoverlapping(from, to, 1), } } @@ -1283,15 +1302,24 @@ impl Drop for Instance { }; match global.wasm_ty { // For now only externref globals need to get destroyed + #[cfg(feature = "gc")] WasmValType::Ref(WasmRefType { heap_type: WasmHeapType::Extern, .. - }) => {} + }) => unsafe { + drop((*self.global_ptr(idx)).as_externref_mut().take()); + }, + + #[cfg(not(feature = "gc"))] + WasmValType::Ref(WasmRefType { + heap_type: WasmHeapType::Extern, + .. + }) => unreachable!( + "global {idx:?} is an externref but the `gc` cargo feature is disabled" + ), + _ => continue, } - unsafe { - drop((*self.global_ptr(idx)).as_externref_mut().take()); - } } } } diff --git a/crates/runtime/src/lib.rs b/crates/runtime/src/lib.rs index 46ec141ff7ed..e28dd2632a94 100644 --- a/crates/runtime/src/lib.rs +++ b/crates/runtime/src/lib.rs @@ -14,6 +14,7 @@ mod arch; #[cfg(feature = "component-model")] pub mod component; mod export; +#[cfg(feature = "gc")] mod externref; mod imports; mod instance; @@ -38,6 +39,7 @@ pub use wasmtime_jit_debug::gdb_jit_int::GdbJitImageRegistration; pub use crate::arch::{get_stack_pointer, V128Abi}; pub use crate::export::*; +#[cfg(feature = "gc")] pub use crate::externref::*; pub use crate::imports::Imports; pub use crate::instance::{ @@ -106,6 +108,7 @@ pub unsafe trait Store { /// The first element returned is the table in which externrefs are stored /// throughout wasm execution, and the second element is how to look up /// module information for gc requests. + #[cfg(feature = "gc")] fn externref_activations_table( &mut self, ) -> (&mut VMExternRefActivationsTable, &dyn ModuleInfoLookup); diff --git a/crates/runtime/src/libcalls.rs b/crates/runtime/src/libcalls.rs index 539a1b3f5f31..e397a1e3f43b 100644 --- a/crates/runtime/src/libcalls.rs +++ b/crates/runtime/src/libcalls.rs @@ -54,7 +54,9 @@ //! } //! ``` +#[cfg(feature = "gc")] use crate::externref::VMExternRef; + use crate::table::{Table, TableElementType}; use crate::vmcontext::VMFuncRef; use crate::{Instance, TrapReason}; @@ -62,12 +64,8 @@ use crate::{Instance, TrapReason}; use anyhow::bail; use anyhow::Result; use cfg_if::cfg_if; -use std::mem; -use std::ptr::{self, NonNull}; use std::time::{Duration, Instant}; -use wasmtime_environ::{ - DataIndex, ElemIndex, FuncIndex, GlobalIndex, MemoryIndex, TableIndex, Trap, Unsigned, -}; +use wasmtime_environ::{DataIndex, ElemIndex, FuncIndex, MemoryIndex, TableIndex, Trap, Unsigned}; #[cfg(feature = "wmemcheck")] use wasmtime_wmemcheck::AccessError::{ DoubleMalloc, InvalidFree, InvalidRead, InvalidWrite, OutOfBounds, @@ -80,6 +78,10 @@ use wasmtime_wmemcheck::AccessError::{ /// now to ensure that the fp/sp on exit are recorded for backtraces to work /// properly. pub mod trampolines { + // Allow these things because of the macro and how we can't differentiate + // between doc comments and `cfg`s. + #![allow(unused_doc_comments, unused_attributes)] + use crate::arch::wasm_to_libcall_trampoline; use crate::{Instance, TrapReason, VMContext}; @@ -96,6 +98,7 @@ pub mod trampolines { // supported platforms or otherwise in inline assembly for // platforms like s390x which don't have stable `global_asm!` // yet. + $( #[$attr] )* extern "C" { #[allow(missing_docs)] #[allow(improper_ctypes)] @@ -106,6 +109,7 @@ pub mod trampolines { ) $(-> libcall!(@ty $result))?; } + $( #[ $attr ] )* wasm_to_libcall_trampoline!($name ; []); // This is the direct entrypoint from the inline assembly which @@ -119,6 +123,7 @@ pub mod trampolines { // like s390x need to use outlined assembly files which requires // `no_mangle`. #[cfg_attr(target_arch = "s390x", wasmtime_versioned_export_macros::versioned_export)] + $( #[ $attr ] )* unsafe extern "C" fn []( vmctx: *mut VMContext, $( $pname : libcall!(@ty $param), )* @@ -139,6 +144,7 @@ pub mod trampolines { // in a linking failure. #[allow(non_upper_case_globals)] #[used] + $( #[ $attr ] )* static []: unsafe extern "C" fn( *mut VMContext, $( $pname : libcall!(@ty $param), )* @@ -221,8 +227,11 @@ unsafe fn table_grow( init_value: *mut u8, ) -> Result { let table_index = TableIndex::from_u32(table_index); + let element = match instance.table_element_type(table_index) { TableElementType::Func => (init_value as *mut VMFuncRef).into(), + + #[cfg(feature = "gc")] TableElementType::Extern => { let init_value = if init_value.is_null() { None @@ -232,6 +241,7 @@ unsafe fn table_grow( init_value.into() } }; + Ok(match instance.table_grow(table_index, delta, element)? { Some(r) => r, None => (-1_i32).unsigned(), @@ -239,6 +249,8 @@ unsafe fn table_grow( } use table_grow as table_grow_func_ref; + +#[cfg(feature = "gc")] use table_grow as table_grow_externref; // Implementation of `table.fill`. @@ -258,6 +270,8 @@ unsafe fn table_fill( let val = val as *mut VMFuncRef; table.fill(dst, val.into(), len) } + + #[cfg(feature = "gc")] TableElementType::Extern => { let val = if val.is_null() { None @@ -270,6 +284,8 @@ unsafe fn table_fill( } use table_fill as table_fill_func_ref; + +#[cfg(feature = "gc")] use table_fill as table_fill_externref; // Implementation of `table.copy`. @@ -380,14 +396,16 @@ unsafe fn table_get_lazy_init_func_ref( } // Drop a `VMExternRef`. +#[cfg(feature = "gc")] unsafe fn drop_externref(_instance: &mut Instance, externref: *mut u8) { let externref = externref as *mut crate::externref::VMExternData; - let externref = NonNull::new(externref).unwrap().into(); + let externref = std::ptr::NonNull::new(externref).unwrap().into(); crate::externref::VMExternData::drop_and_dealloc(externref); } // Do a GC and insert the given `externref` into the // `VMExternRefActivationsTable`. +#[cfg(feature = "gc")] unsafe fn activations_table_insert_with_gc(instance: &mut Instance, externref: *mut u8) { let externref = VMExternRef::clone_from_raw(externref); let limits = *instance.runtime_limits(); @@ -406,12 +424,13 @@ unsafe fn activations_table_insert_with_gc(instance: &mut Instance, externref: * } // Perform a Wasm `global.get` for `externref` globals. +#[cfg(feature = "gc")] unsafe fn externref_global_get(instance: &mut Instance, index: u32) -> *mut u8 { - let index = GlobalIndex::from_u32(index); + let index = wasmtime_environ::GlobalIndex::from_u32(index); let limits = *instance.runtime_limits(); let global = instance.defined_or_imported_global_ptr(index); match (*global).as_externref().clone() { - None => ptr::null_mut(), + None => std::ptr::null_mut(), Some(externref) => { let raw = externref.as_raw(); let (activations_table, module_info_lookup) = @@ -423,6 +442,7 @@ unsafe fn externref_global_get(instance: &mut Instance, index: u32) -> *mut u8 { } // Perform a Wasm `global.set` for `externref` globals. +#[cfg(feature = "gc")] unsafe fn externref_global_set(instance: &mut Instance, index: u32, externref: *mut u8) { let externref = if externref.is_null() { None @@ -430,14 +450,14 @@ unsafe fn externref_global_set(instance: &mut Instance, index: u32, externref: * Some(VMExternRef::clone_from_raw(externref)) }; - let index = GlobalIndex::from_u32(index); + let index = wasmtime_environ::GlobalIndex::from_u32(index); let global = instance.defined_or_imported_global_ptr(index); // Swap the new `externref` value into the global before we drop the old // value. This protects against an `externref` with a `Drop` implementation // that calls back into Wasm and touches this global again (we want to avoid // it observing a halfway-deinitialized value). - let old = mem::replace((*global).as_externref_mut(), externref); + let old = std::mem::replace((*global).as_externref_mut(), externref); drop(old); } diff --git a/crates/runtime/src/table.rs b/crates/runtime/src/table.rs index 5740b94d22b5..94224b529ce1 100644 --- a/crates/runtime/src/table.rs +++ b/crates/runtime/src/table.rs @@ -2,8 +2,10 @@ //! //! `Table` is to WebAssembly tables what `LinearMemory` is to WebAssembly linear memories. +#[cfg(feature = "gc")] +use crate::externref::VMExternRef; use crate::vmcontext::{VMFuncRef, VMTableDefinition}; -use crate::{SendSyncPtr, Store, VMExternRef}; +use crate::{SendSyncPtr, Store}; use anyhow::{bail, format_err, Error, Result}; use sptr::Strict; use std::ops::Range; @@ -19,8 +21,11 @@ use wasmtime_environ::{ pub enum TableElement { /// A `funcref`. FuncRef(*mut VMFuncRef), + /// An `exrernref`. + #[cfg(feature = "gc")] ExternRef(Option), + /// An uninitialized funcref value. This should never be exposed /// beyond the `wasmtime` crate boundary; the upper-level code /// (which has access to the info needed for lazy initialization) @@ -31,12 +36,16 @@ pub enum TableElement { #[derive(Copy, Clone, PartialEq, Eq, Debug)] pub enum TableElementType { Func, + + #[cfg(feature = "gc")] Extern, } // The usage of `*mut VMFuncRef` is safe w.r.t. thread safety, this // just relies on thread-safety of `VMExternRef` itself. +#[cfg(feature = "gc")] unsafe impl Send for TableElement where VMExternRef: Send {} +#[cfg(feature = "gc")] unsafe impl Sync for TableElement where VMExternRef: Sync {} impl TableElement { @@ -55,7 +64,9 @@ impl TableElement { let masked = Strict::map_addr(ptr, |a| a & FUNCREF_MASK); Self::FuncRef(masked.cast()) } + #[cfg(feature = "gc")] (TableElementType::Extern, None) => Self::ExternRef(None), + #[cfg(feature = "gc")] (TableElementType::Extern, Some(ptr)) => { Self::ExternRef(Some(VMExternRef::from_raw(ptr.as_ptr()))) } @@ -72,6 +83,7 @@ impl TableElement { // Functions have no ownership, so defer to the prior method. TableElementType::Func => TableElement::from_table_value(ty, ptr), + #[cfg(feature = "gc")] TableElementType::Extern => { Self::ExternRef(ptr.map(|p| VMExternRef::clone_from_raw(p.as_ptr()))) } @@ -94,6 +106,7 @@ impl TableElement { let tagged = Strict::map_addr(e, |e| e | FUNCREF_INIT_BIT); Some(NonNull::new(tagged.cast()).unwrap().into()) } + #[cfg(feature = "gc")] Self::ExternRef(e) => e.map(|e| NonNull::new(e.into_raw()).unwrap().into()), } } @@ -112,8 +125,10 @@ impl TableElement { pub(crate) unsafe fn into_ref_asserting_initialized(self) -> *mut u8 { match self { Self::FuncRef(e) => e.cast(), - Self::ExternRef(e) => e.map_or(ptr::null_mut(), |e| e.into_raw()), Self::UninitFunc => panic!("Uninitialized table element value outside of table slot"), + + #[cfg(feature = "gc")] + Self::ExternRef(e) => e.map_or(ptr::null_mut(), |e| e.into_raw()), } } @@ -133,12 +148,14 @@ impl From<*mut VMFuncRef> for TableElement { } } +#[cfg(feature = "gc")] impl From> for TableElement { fn from(x: Option) -> TableElement { TableElement::ExternRef(x) } } +#[cfg(feature = "gc")] impl From for TableElement { fn from(x: VMExternRef) -> TableElement { TableElement::ExternRef(Some(x)) @@ -178,7 +195,12 @@ fn wasm_to_table_type(ty: WasmRefType) -> TableElementType { WasmHeapType::Func | WasmHeapType::Concrete(_) | WasmHeapType::NoFunc => { TableElementType::Func } + + #[cfg(feature = "gc")] WasmHeapType::Extern => TableElementType::Extern, + + #[cfg(not(feature = "gc"))] + WasmHeapType::Extern => unreachable!(), } } @@ -490,7 +512,10 @@ impl Table { fn type_matches(&self, val: &TableElement) -> bool { match (&val, self.element_type()) { (TableElement::FuncRef(_), TableElementType::Func) => true, + + #[cfg(feature = "gc")] (TableElement::ExternRef(_), TableElementType::Extern) => true, + _ => false, } } @@ -536,6 +561,8 @@ impl Table { dst_table.elements_mut()[dst_range] .copy_from_slice(&src_table.elements()[src_range]); } + + #[cfg(feature = "gc")] TableElementType::Extern => { // We need to clone each `externref` let dst = dst_table.elements_mut(); @@ -556,6 +583,8 @@ impl Table { // `funcref` are `Copy`, so just do a memmove dst.copy_within(src_range, dst_range.start); } + + #[cfg(feature = "gc")] TableElementType::Extern => { // We need to clone each `externref` while handling overlapping // ranges @@ -579,7 +608,11 @@ impl Drop for Table { fn drop(&mut self) { let ty = self.element_type(); - // funcref tables can skip this + // `funcref` tables don't need drops. + // + // This is an irrefutable pattern when the `gc` cargo feature is not + // enabled. + #[allow(irrefutable_let_patterns)] if let TableElementType::Func = ty { return; } diff --git a/crates/runtime/src/vmcontext.rs b/crates/runtime/src/vmcontext.rs index 05235418bee1..3eb1244b519d 100644 --- a/crates/runtime/src/vmcontext.rs +++ b/crates/runtime/src/vmcontext.rs @@ -3,7 +3,9 @@ mod vm_host_func_context; +#[cfg(feature = "gc")] use crate::externref::VMExternRef; + use sptr::Strict; use std::cell::UnsafeCell; use std::ffi::c_void; @@ -390,7 +392,6 @@ pub struct VMGlobalDefinition { #[cfg(test)] mod test_vmglobal_definition { use super::VMGlobalDefinition; - use crate::externref::VMExternRef; use std::mem::{align_of, size_of}; use wasmtime_environ::{Module, PtrSize, VMOffsets}; @@ -421,7 +422,9 @@ mod test_vmglobal_definition { } #[test] + #[cfg(feature = "gc")] fn check_vmglobal_can_contain_externref() { + use crate::externref::VMExternRef; assert!(size_of::() <= size_of::()); } } @@ -533,11 +536,13 @@ impl VMGlobalDefinition { } /// Return a reference to the value as an externref. + #[cfg(feature = "gc")] pub unsafe fn as_externref(&self) -> &Option { &*(self.storage.as_ref().as_ptr().cast::>()) } /// Return a mutable reference to the value as an externref. + #[cfg(feature = "gc")] pub unsafe fn as_externref_mut(&mut self) -> &mut Option { &mut *(self .storage @@ -707,6 +712,7 @@ macro_rules! define_builtin_array { #[repr(C)] pub struct VMBuiltinFunctionsArray { $( + $( #[ $attr ] )* $name: unsafe extern "C" fn( $(define_builtin_array!(@ty $param)),* ) $( -> define_builtin_array!(@ty $result))?, @@ -714,8 +720,12 @@ macro_rules! define_builtin_array { } impl VMBuiltinFunctionsArray { + #[allow(unused_doc_comments)] pub const INIT: VMBuiltinFunctionsArray = VMBuiltinFunctionsArray { - $($name: crate::libcalls::trampolines::$name,)* + $( + $( #[ $attr ] )* + $name: crate::libcalls::trampolines::$name, + )* }; } }; @@ -1025,6 +1035,7 @@ pub union ValRaw { /// carefully calling the correct functions throughout the runtime. /// /// This value is always stored in a little-endian format. + #[cfg(feature = "gc")] externref: *mut c_void, } @@ -1097,8 +1108,16 @@ impl ValRaw { /// Creates a WebAssembly `externref` value #[inline] pub fn externref(i: *mut c_void) -> ValRaw { - ValRaw { - externref: Strict::map_addr(i, |i| i.to_le()), + #[cfg(feature = "gc")] + { + return ValRaw { + externref: Strict::map_addr(i, |i| i.to_le()), + }; + } + #[cfg(not(feature = "gc"))] + { + assert!(i.is_null()); + return ValRaw::funcref(i); } } @@ -1153,7 +1172,15 @@ impl ValRaw { /// Gets the WebAssembly `externref` value #[inline] pub fn get_externref(&self) -> *mut c_void { - unsafe { Strict::map_addr(self.externref, |i| usize::from_le(i)) } + #[cfg(feature = "gc")] + unsafe { + return Strict::map_addr(self.externref, |i| usize::from_le(i)); + } + #[cfg(not(feature = "gc"))] + { + assert!(self.get_funcref().is_null()); + return std::ptr::null_mut(); + } } } diff --git a/crates/wasmtime/Cargo.toml b/crates/wasmtime/Cargo.toml index 2b8ef78d56c3..d923863faa6a 100644 --- a/crates/wasmtime/Cargo.toml +++ b/crates/wasmtime/Cargo.toml @@ -86,6 +86,7 @@ wasi-common = { path = "../wasi-common", default-features = true } default = [ 'async', 'cache', + 'gc', 'wat', 'profiling', 'parallel-compilation', @@ -172,3 +173,19 @@ debug-builtins = ["wasmtime-runtime?/debug-builtins"] # Enable support for executing compiled Wasm modules. runtime = ["dep:wasmtime-runtime", "dep:wasmtime-jit-icache-coherence", "dep:wasmtime-slab"] + +# Enable support for garbage collection-related things. +# +# This Cargo feature is required to compile or run Wasm that uses any of the +# following Wasm proposals: +# +# * Reference types: https://github.com/WebAssembly/reference-types/ +# * Typed function references: https://github.com/WebAssembly/function-references/ +# * Garbage collection: https://github.com/WebAssembly/gc +# +# When a compiler Cargo feature (`cranelift` or `winch`) is enabled, this +# feature gates the ability to compile Wasm that uses those proposals. +# +# When the `runtime` Cargo feature is enabled, this feature gates the ability to +# load and run Wasm that uses those proposals. +gc = ["wasmtime-environ/gc", "wasmtime-runtime?/gc", "wasmtime-cranelift?/gc"] diff --git a/crates/wasmtime/src/config.rs b/crates/wasmtime/src/config.rs index 38630040667f..629a1cf35355 100644 --- a/crates/wasmtime/src/config.rs +++ b/crates/wasmtime/src/config.rs @@ -252,6 +252,7 @@ impl Config { ret.cranelift_opt_level(OptLevel::Speed); } + #[cfg(feature = "gc")] ret.wasm_reference_types(true); ret.wasm_multi_value(true); ret.wasm_bulk_memory(true); @@ -721,7 +722,9 @@ impl Config { /// /// Note that the reference types proposal depends on the bulk memory proposal. /// - /// This feature is `true` by default. + /// This method requires the `gc` Cargo feature to be enabled. + /// + /// This feature is `true` by default, when the `gc` Cargo feature is enabled. /// /// # Errors /// @@ -729,6 +732,7 @@ impl Config { /// and thus may cause `Engine::new` fail if the `bulk_memory` feature is disabled. /// /// [proposal]: https://github.com/webassembly/reference-types + #[cfg(feature = "gc")] pub fn wasm_reference_types(&mut self, enable: bool) -> &mut Self { self.features.reference_types = enable; self @@ -744,9 +748,12 @@ impl Config { /// Note that the function references proposal depends on the reference /// types proposal. /// + /// This method requires the `gc` Cargo feature to be enabled. + /// /// This feature is `false` by default. /// /// [proposal]: https://github.com/WebAssembly/function-references + #[cfg(feature = "gc")] pub fn wasm_function_references(&mut self, enable: bool) -> &mut Self { self.features.function_references = enable; self @@ -761,12 +768,15 @@ impl Config { /// Note that the function references proposal depends on the typed function /// references proposal. /// + /// This method requires the `gc` Cargo feature to be enabled. + /// /// This feature is `false` by default. /// /// **Warning: Wasmtime's implementation of the GC proposal is still in /// progress and generally not ready for primetime.** /// /// [proposal]: https://github.com/WebAssembly/gc + #[cfg(feature = "gc")] pub fn wasm_gc(&mut self, enable: bool) -> &mut Self { self.features.gc = enable; self diff --git a/crates/wasmtime/src/engine/serialization.rs b/crates/wasmtime/src/engine/serialization.rs index a26cf28cb95a..8c854f2e74ad 100644 --- a/crates/wasmtime/src/engine/serialization.rs +++ b/crates/wasmtime/src/engine/serialization.rs @@ -22,7 +22,7 @@ //! using wasmtime artifacts across versions. use crate::{Engine, ModuleVersionStrategy, Precompiled}; -use anyhow::{anyhow, bail, Context, Result}; +use anyhow::{anyhow, bail, ensure, Context, Result}; use object::write::{Object, StandardSegment}; use object::{File, FileFlags, Object as _, ObjectSection, SectionKind}; use serde_derive::{Deserialize, Serialize}; @@ -406,6 +406,27 @@ impl Metadata<'_> { Ok(()) } + fn check_cfg_bool( + cfg: bool, + cfg_str: &str, + found: bool, + expected: bool, + feature: &str, + ) -> Result<()> { + if cfg { + Self::check_bool(found, expected, feature) + } else { + assert!(!expected); + ensure!( + !found, + "Module was compiled with {feature} but support in the host \ + was disabled at compile time because the `{cfg_str}` Cargo \ + feature was not enabled", + ); + Ok(()) + } + } + fn check_features(&mut self, other: &wasmparser::WasmFeatures) -> Result<()> { let WasmFeatures { reference_types, @@ -424,11 +445,28 @@ impl Metadata<'_> { gc, } = self.features; - Self::check_bool( + Self::check_cfg_bool( + cfg!(feature = "gc"), + "gc", reference_types, other.reference_types, "WebAssembly reference types support", )?; + Self::check_cfg_bool( + cfg!(feature = "gc"), + "gc", + function_references, + other.function_references, + "WebAssembly function-references support", + )?; + Self::check_cfg_bool( + cfg!(feature = "gc"), + "gc", + gc, + other.gc, + "WebAssembly garbage collection support", + )?; + Self::check_bool( multi_value, other.multi_value, @@ -472,12 +510,6 @@ impl Metadata<'_> { other.relaxed_simd, "WebAssembly relaxed-simd support", )?; - Self::check_bool( - function_references, - other.function_references, - "WebAssembly function-references support", - )?; - Self::check_bool(gc, other.gc, "WebAssembly garbage collection support")?; Ok(()) } diff --git a/crates/wasmtime/src/runtime.rs b/crates/wasmtime/src/runtime.rs index aeab6ca5146e..db012948fbbb 100644 --- a/crates/wasmtime/src/runtime.rs +++ b/crates/wasmtime/src/runtime.rs @@ -21,6 +21,7 @@ pub(crate) mod trampoline; pub(crate) mod trap; pub(crate) mod type_registry; pub(crate) mod types; +pub(crate) mod uninhabited; pub(crate) mod v128; pub(crate) mod values; @@ -66,6 +67,8 @@ pub use types::*; pub use v128::V128; pub use values::*; +pub(crate) use uninhabited::*; + #[cfg(feature = "profiling")] pub use profiling::GuestProfiler; diff --git a/crates/wasmtime/src/runtime/externals/global.rs b/crates/wasmtime/src/runtime/externals/global.rs index c09b2124aef6..7c754c70f309 100644 --- a/crates/wasmtime/src/runtime/externals/global.rs +++ b/crates/wasmtime/src/runtime/externals/global.rs @@ -110,16 +110,25 @@ impl Global { ValType::V128 => Val::V128((*definition.as_u128()).into()), ValType::Ref(ref_ty) => { let reference = match ref_ty.heap_type() { + HeapType::Func | HeapType::Concrete(_) => { + Ref::Func(Func::from_raw(store, definition.as_func_ref().cast())) + } + + HeapType::NoFunc => Ref::Func(None), + + #[cfg(feature = "gc")] HeapType::Extern => Ref::Extern( definition .as_externref() .clone() .map(|inner| ExternRef { inner }), ), - HeapType::Func | HeapType::Concrete(_) => { - Ref::Func(Func::from_raw(store, definition.as_func_ref().cast())) + + #[cfg(not(feature = "gc"))] + HeapType::Extern => { + assert!(definition.as_func_ref().is_null()); + Ref::Extern(None) } - HeapType::NoFunc => Ref::Func(None), }; debug_assert!( ref_ty.is_nullable() || !reference.is_null(), @@ -162,12 +171,19 @@ impl Global { *definition.as_func_ref_mut() = f.map_or(ptr::null_mut(), |f| f.vm_func_ref(store).as_ptr().cast()); } + #[cfg(feature = "gc")] Val::ExternRef(e) => { // Take care to invoke the `Drop` implementation of the // existing `ExternRef` so that it doesn't leak. let old = mem::replace(definition.as_externref_mut(), e.map(|e| e.inner)); drop(old); } + #[cfg(not(feature = "gc"))] + Val::ExternRef(None) => { + assert!(definition.as_func_ref().is_null()); + } + #[cfg(not(feature = "gc"))] + Val::ExternRef(Some(e)) => match e._inner {}, } } Ok(()) diff --git a/crates/wasmtime/src/runtime/externals/table.rs b/crates/wasmtime/src/runtime/externals/table.rs index d3537dce6f1a..1f8d8addc701 100644 --- a/crates/wasmtime/src/runtime/externals/table.rs +++ b/crates/wasmtime/src/runtime/externals/table.rs @@ -154,14 +154,19 @@ impl Table { let func = Func::from_vm_func_ref(store, f); Some(func.into()) } + + runtime::TableElement::UninitFunc => { + unreachable!("lazy init above should have converted UninitFunc") + } + + #[cfg(feature = "gc")] runtime::TableElement::ExternRef(None) => Some(Ref::Extern(None)), + + #[cfg(feature = "gc")] runtime::TableElement::ExternRef(Some(x)) => { let x = ExternRef { inner: x }; Some(x.into()) } - runtime::TableElement::UninitFunc => { - unreachable!("lazy init above should have converted UninitFunc") - } } } } diff --git a/crates/wasmtime/src/runtime/func.rs b/crates/wasmtime/src/runtime/func.rs index a5cf6df63676..d3d788a25294 100644 --- a/crates/wasmtime/src/runtime/func.rs +++ b/crates/wasmtime/src/runtime/func.rs @@ -1,3 +1,4 @@ +use crate::runtime::Uninhabited; use crate::store::{StoreData, StoreOpaque, Stored}; use crate::type_registry::RegisteredType; use crate::{ @@ -78,9 +79,6 @@ pub struct NoFunc { _inner: Uninhabited, } -#[derive(Copy, Clone, Debug, PartialEq, Eq)] -enum Uninhabited {} - impl NoFunc { /// Get the null `(ref null nofunc)` (aka `nullfuncref`) reference. #[inline] @@ -1158,20 +1156,23 @@ impl Func { let values_vec_size = params.len().max(ty.results().len()); - // Whenever we pass `externref`s from host code to Wasm code, they - // go into the `VMExternRefActivationsTable`. But the table might be - // at capacity already, so check for that. If it is at capacity - // (unlikely) then do a GC to free up space. This is necessary - // because otherwise we would either keep filling up the bump chunk - // and making it larger and larger or we would always take the slow - // path when inserting references into the table. - if ty.as_wasm_func_type().externref_params_count() - > store - .0 - .externref_activations_table() - .bump_capacity_remaining() + #[cfg(feature = "gc")] { - store.gc(); + // Whenever we pass `externref`s from host code to Wasm code, they + // go into the `VMExternRefActivationsTable`. But the table might be + // at capacity already, so check for that. If it is at capacity + // (unlikely) then do a GC to free up space. This is necessary + // because otherwise we would either keep filling up the bump chunk + // and making it larger and larger or we would always take the slow + // path when inserting references into the table. + if ty.as_wasm_func_type().externref_params_count() + > store + .0 + .externref_activations_table() + .bump_capacity_remaining() + { + store.gc(); + } } // Store the argument values into `values_vec`. @@ -1307,15 +1308,18 @@ impl Func { let (params, results) = val_vec.split_at_mut(nparams); func(caller.sub_caller(), params, results)?; - // See the comment in `Func::call_impl`'s `write_params` function. - if ty.as_wasm_func_type().externref_returns_count() - > caller - .store - .0 - .externref_activations_table() - .bump_capacity_remaining() + #[cfg(feature = "gc")] { - caller.store.gc(); + // See the comment in `Func::call_impl`'s `write_params` function. + if ty.as_wasm_func_type().externref_returns_count() + > caller + .store + .0 + .externref_activations_table() + .bump_capacity_remaining() + { + caller.store.gc(); + } } // Unlike our arguments we need to dynamically check that the return @@ -2023,6 +2027,9 @@ impl Caller<'_, T> { /// Perform garbage collection of `ExternRef`s. /// /// Same as [`Store::gc`](crate::Store::gc). + /// + /// This method is only available when the `gc` cargo feature is enabled. + #[cfg(feature = "gc")] pub fn gc(&mut self) { self.store.gc() } diff --git a/crates/wasmtime/src/runtime/func/typed.rs b/crates/wasmtime/src/runtime/func/typed.rs index d2832bea1d6d..0da88efe79af 100644 --- a/crates/wasmtime/src/runtime/func/typed.rs +++ b/crates/wasmtime/src/runtime/func/typed.rs @@ -156,14 +156,17 @@ where Self::debug_typecheck(store.0, func.as_ref().type_index); } - // See the comment in `Func::call_impl`'s `write_params` function. - if params.externrefs_count() - > store - .0 - .externref_activations_table() - .bump_capacity_remaining() + #[cfg(feature = "gc")] { - store.gc(); + // See the comment in `Func::call_impl`'s `write_params` function. + if params.externrefs_count() + > store + .0 + .externref_activations_table() + .bump_capacity_remaining() + { + store.gc(); + } } // Validate that all runtime values flowing into this store indeed @@ -423,6 +426,7 @@ floats! { f64/u64/get_f64 => F64 } +#[cfg(feature = "gc")] unsafe impl WasmTy for ExternRef { type Abi = NonNull; @@ -506,6 +510,7 @@ unsafe impl WasmTy for ExternRef { } } +#[cfg(feature = "gc")] unsafe impl WasmTy for Option { type Abi = *mut u8; diff --git a/crates/wasmtime/src/runtime/module.rs b/crates/wasmtime/src/runtime/module.rs index ca59e7070096..3eb53f6bac22 100644 --- a/crates/wasmtime/src/runtime/module.rs +++ b/crates/wasmtime/src/runtime/module.rs @@ -963,6 +963,7 @@ impl Module { self.inner.clone() } + #[cfg(feature = "gc")] pub(crate) fn module_info(&self) -> &dyn wasmtime_runtime::ModuleInfo { &*self.inner } @@ -1215,6 +1216,7 @@ impl wasmtime_runtime::ModuleRuntimeInfo for ModuleInner { } } +#[cfg(feature = "gc")] impl wasmtime_runtime::ModuleInfo for ModuleInner { fn lookup_stack_map(&self, pc: usize) -> Option<&wasmtime_environ::StackMap> { let text_offset = pc - self.module.text().as_ptr() as usize; diff --git a/crates/wasmtime/src/runtime/module/registry.rs b/crates/wasmtime/src/runtime/module/registry.rs index 1b6e286e3b92..21049bdacf48 100644 --- a/crates/wasmtime/src/runtime/module/registry.rs +++ b/crates/wasmtime/src/runtime/module/registry.rs @@ -11,7 +11,7 @@ use std::{ ptr::NonNull, sync::{Arc, RwLock}, }; -use wasmtime_runtime::{ModuleInfo, VMSharedTypeIndex, VMWasmCallFunction}; +use wasmtime_runtime::{VMSharedTypeIndex, VMWasmCallFunction}; /// Used for registering modules with a store. /// @@ -67,7 +67,8 @@ impl ModuleRegistry { } /// Fetches information about a registered module given a program counter value. - pub fn lookup_module_info(&self, pc: usize) -> Option<&dyn ModuleInfo> { + #[cfg(feature = "gc")] + pub fn lookup_module_info(&self, pc: usize) -> Option<&dyn wasmtime_runtime::ModuleInfo> { let (module, _) = self.module_and_offset(pc)?; Some(module.module_info()) } diff --git a/crates/wasmtime/src/runtime/ref.rs b/crates/wasmtime/src/runtime/ref.rs index e04cbea2a2cf..1c4ca1110ce5 100644 --- a/crates/wasmtime/src/runtime/ref.rs +++ b/crates/wasmtime/src/runtime/ref.rs @@ -1,109 +1,9 @@ -#![allow(missing_docs)] - -use crate::AsContextMut; -use std::any::Any; -use std::ffi::c_void; -use wasmtime_runtime::VMExternRef; - -/// Represents an opaque reference to any data within WebAssembly. -#[derive(Clone, Debug)] -#[repr(transparent)] -pub struct ExternRef { - pub(crate) inner: VMExternRef, -} - -impl ExternRef { - /// Creates a new instance of `ExternRef` wrapping the given value. - pub fn new(value: T) -> ExternRef - where - T: 'static + Any + Send + Sync, - { - let inner = VMExternRef::new(value); - ExternRef { inner } - } - - /// Get the underlying data for this `ExternRef`. - pub fn data(&self) -> &dyn Any { - &*self.inner - } - - /// Get the strong reference count for this `ExternRef`. - /// - /// Note that this loads the reference count with a `SeqCst` ordering to - /// synchronize with other threads. - pub fn strong_count(&self) -> usize { - self.inner.strong_count() - } - - /// Does this `ExternRef` point to the same inner value as `other`? - /// - /// This is *only* pointer equality, and does *not* run any inner value's - /// `Eq` implementation. - pub fn ptr_eq(&self, other: &ExternRef) -> bool { - VMExternRef::eq(&self.inner, &other.inner) - } - - /// Creates a new strongly-owned [`ExternRef`] from the raw value provided. - /// - /// This is intended to be used in conjunction with [`Func::new_unchecked`], - /// [`Func::call_unchecked`], and [`ValRaw`] with its `externref` field. - /// - /// This function assumes that `raw` is an externref value which is - /// currently rooted within the [`Store`]. - /// - /// # Unsafety - /// - /// This function is particularly `unsafe` because `raw` not only must be a - /// valid externref value produced prior by `to_raw` but it must also be - /// correctly rooted within the store. When arguments are provided to a - /// callback with [`Func::new_unchecked`], for example, or returned via - /// [`Func::call_unchecked`], if a GC is performed within the store then - /// floating externref values are not rooted and will be GC'd, meaning that - /// this function will no longer be safe to call with the values cleaned up. - /// This function must be invoked *before* possible GC operations can happen - /// (such as calling wasm). - /// - /// When in doubt try to not use this. Instead use the safe Rust APIs of - /// [`TypedFunc`] and friends. - /// - /// [`Func::call_unchecked`]: crate::Func::call_unchecked - /// [`Func::new_unchecked`]: crate::Func::new_unchecked - /// [`Store`]: crate::Store - /// [`TypedFunc`]: crate::TypedFunc - /// [`ValRaw`]: crate::ValRaw - pub unsafe fn from_raw(raw: *mut c_void) -> Option { - let raw = raw.cast::(); - if raw.is_null() { - None - } else { - Some(ExternRef { - inner: VMExternRef::clone_from_raw(raw), - }) - } - } - - /// Converts this [`ExternRef`] to a raw value suitable to store within a - /// [`ValRaw`]. - /// - /// # Unsafety - /// - /// Produces a raw value which is only safe to pass into a store if a GC - /// doesn't happen between when the value is produce and when it's passed - /// into the store. - /// - /// [`ValRaw`]: crate::ValRaw - pub unsafe fn to_raw(&self, mut store: impl AsContextMut) -> *mut c_void { - let externref_ptr = self.inner.as_raw(); - store - .as_context_mut() - .0 - .insert_vmexternref_without_gc(self.inner.clone()); - externref_ptr.cast() - } -} - -impl std::fmt::Pointer for ExternRef { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - std::fmt::Pointer::fmt(&self.inner, f) - } -} +#[cfg(feature = "gc")] +mod gc_ref; +#[cfg(feature = "gc")] +pub use gc_ref::*; + +#[cfg(not(feature = "gc"))] +mod no_gc_ref; +#[cfg(not(feature = "gc"))] +pub use no_gc_ref::*; diff --git a/crates/wasmtime/src/runtime/ref/gc_ref.rs b/crates/wasmtime/src/runtime/ref/gc_ref.rs new file mode 100644 index 000000000000..5e0736ef956f --- /dev/null +++ b/crates/wasmtime/src/runtime/ref/gc_ref.rs @@ -0,0 +1,107 @@ +use crate::AsContextMut; +use std::any::Any; +use std::ffi::c_void; +use wasmtime_runtime::VMExternRef; + +/// Represents an opaque reference to any data within WebAssembly. +#[derive(Clone, Debug)] +#[repr(transparent)] +pub struct ExternRef { + pub(crate) inner: VMExternRef, +} + +impl ExternRef { + /// Creates a new instance of `ExternRef` wrapping the given value. + pub fn new(value: T) -> ExternRef + where + T: 'static + Any + Send + Sync, + { + let inner = VMExternRef::new(value); + ExternRef { inner } + } + + /// Get the underlying data for this `ExternRef`. + pub fn data(&self) -> &dyn Any { + &*self.inner + } + + /// Get the strong reference count for this `ExternRef`. + /// + /// Note that this loads the reference count with a `SeqCst` ordering to + /// synchronize with other threads. + pub fn strong_count(&self) -> usize { + self.inner.strong_count() + } + + /// Does this `ExternRef` point to the same inner value as `other`? + /// + /// This is *only* pointer equality, and does *not* run any inner value's + /// `Eq` implementation. + pub fn ptr_eq(&self, other: &ExternRef) -> bool { + VMExternRef::eq(&self.inner, &other.inner) + } + + /// Creates a new strongly-owned [`ExternRef`] from the raw value provided. + /// + /// This is intended to be used in conjunction with [`Func::new_unchecked`], + /// [`Func::call_unchecked`], and [`ValRaw`] with its `externref` field. + /// + /// This function assumes that `raw` is an externref value which is + /// currently rooted within the [`Store`]. + /// + /// # Unsafety + /// + /// This function is particularly `unsafe` because `raw` not only must be a + /// valid externref value produced prior by `to_raw` but it must also be + /// correctly rooted within the store. When arguments are provided to a + /// callback with [`Func::new_unchecked`], for example, or returned via + /// [`Func::call_unchecked`], if a GC is performed within the store then + /// floating externref values are not rooted and will be GC'd, meaning that + /// this function will no longer be safe to call with the values cleaned up. + /// This function must be invoked *before* possible GC operations can happen + /// (such as calling wasm). + /// + /// When in doubt try to not use this. Instead use the safe Rust APIs of + /// [`TypedFunc`] and friends. + /// + /// [`Func::call_unchecked`]: crate::Func::call_unchecked + /// [`Func::new_unchecked`]: crate::Func::new_unchecked + /// [`Store`]: crate::Store + /// [`TypedFunc`]: crate::TypedFunc + /// [`ValRaw`]: crate::ValRaw + pub unsafe fn from_raw(raw: *mut c_void) -> Option { + let raw = raw.cast::(); + if raw.is_null() { + None + } else { + Some(ExternRef { + inner: VMExternRef::clone_from_raw(raw), + }) + } + } + + /// Converts this [`ExternRef`] to a raw value suitable to store within a + /// [`ValRaw`]. + /// + /// # Unsafety + /// + /// Produces a raw value which is only safe to pass into a store if a GC + /// doesn't happen between when the value is produce and when it's passed + /// into the store. + /// + /// [`ValRaw`]: crate::ValRaw + pub unsafe fn to_raw(&self, mut store: impl AsContextMut) -> *mut c_void { + let externref_ptr = self.inner.as_raw(); + store + .as_context_mut() + .0 + .insert_vmexternref_without_gc(self.inner.clone()); + externref_ptr.cast() + } +} + +impl std::fmt::Pointer for ExternRef { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + std::fmt::Pointer::fmt(&self.inner, f) + } +} diff --git a/crates/wasmtime/src/runtime/ref/no_gc_ref.rs b/crates/wasmtime/src/runtime/ref/no_gc_ref.rs new file mode 100644 index 000000000000..dfcf997efd41 --- /dev/null +++ b/crates/wasmtime/src/runtime/ref/no_gc_ref.rs @@ -0,0 +1,10 @@ +use crate::runtime::Uninhabited; + +/// Represents an opaque reference to any data within WebAssembly. +/// +/// Due to compilation configuration, this is an uninhabited type: enable the +/// `gc` cargo feature to properly use this type. +#[derive(Clone, Debug)] +pub struct ExternRef { + pub(crate) _inner: Uninhabited, +} diff --git a/crates/wasmtime/src/runtime/store.rs b/crates/wasmtime/src/runtime/store.rs index 40df116370c6..48c10535c23f 100644 --- a/crates/wasmtime/src/runtime/store.rs +++ b/crates/wasmtime/src/runtime/store.rs @@ -97,9 +97,9 @@ use std::sync::Arc; use std::task::{Context, Poll}; use wasmtime_runtime::mpk::{self, ProtectionKey, ProtectionMask}; use wasmtime_runtime::{ - ExportGlobal, InstanceAllocationRequest, InstanceAllocator, InstanceHandle, ModuleInfo, - OnDemandInstanceAllocator, SignalHandler, StoreBox, StorePtr, VMContext, VMExternRef, - VMExternRefActivationsTable, VMFuncRef, VMRuntimeLimits, WasmFault, + ExportGlobal, InstanceAllocationRequest, InstanceAllocator, InstanceHandle, + OnDemandInstanceAllocator, SignalHandler, StoreBox, StorePtr, VMContext, VMFuncRef, + VMRuntimeLimits, WasmFault, }; mod context; @@ -307,7 +307,8 @@ pub struct StoreOpaque { #[cfg(feature = "component-model")] num_component_instances: usize, signal_handler: Option>>, - externref_activations_table: VMExternRefActivationsTable, + #[cfg(feature = "gc")] + externref_activations_table: wasmtime_runtime::VMExternRefActivationsTable, modules: ModuleRegistry, func_refs: FuncRefs, host_globals: Vec>, @@ -392,7 +393,7 @@ pub(crate) struct AutoAssertNoGc where T: std::ops::DerefMut, { - #[cfg(debug_assertions)] + #[cfg(all(debug_assertions, feature = "gc"))] prev_okay: bool, store: T, } @@ -404,12 +405,12 @@ where #[inline] pub fn new(mut store: T) -> Self { let _ = &mut store; - #[cfg(debug_assertions)] + #[cfg(all(debug_assertions, feature = "gc"))] { let prev_okay = store.externref_activations_table.set_gc_okay(false); return AutoAssertNoGc { store, prev_okay }; } - #[cfg(not(debug_assertions))] + #[cfg(not(all(debug_assertions, feature = "gc")))] { return AutoAssertNoGc { store }; } @@ -441,7 +442,7 @@ where T: std::ops::DerefMut, { fn drop(&mut self) { - #[cfg(debug_assertions)] + #[cfg(all(debug_assertions, feature = "gc"))] { self.store .externref_activations_table @@ -497,7 +498,8 @@ impl Store { #[cfg(feature = "component-model")] num_component_instances: 0, signal_handler: None, - externref_activations_table: VMExternRefActivationsTable::new(), + #[cfg(feature = "gc")] + externref_activations_table: wasmtime_runtime::VMExternRefActivationsTable::new(), modules: ModuleRegistry::default(), func_refs: FuncRefs::default(), host_globals: Vec::new(), @@ -794,6 +796,9 @@ impl Store { /// Note that it is not required to actively call this function. GC will /// automatically happen when internal buffers fill up. This is provided if /// fine-grained control over the GC is desired. + /// + /// This method is only available when the `gc` Cargo feature is enabled. + #[cfg(feature = "gc")] pub fn gc(&mut self) { self.inner.gc() } @@ -1036,6 +1041,9 @@ impl<'a, T> StoreContextMut<'a, T> { /// Perform garbage collection of `ExternRef`s. /// /// Same as [`Store::gc`]. + /// + /// This method is only available when the `gc` Cargo feature is enabled. + #[cfg(feature = "gc")] pub fn gc(&mut self) { self.0.gc() } @@ -1374,10 +1382,14 @@ impl StoreOpaque { } #[inline] - pub fn externref_activations_table(&mut self) -> &mut VMExternRefActivationsTable { + #[cfg(feature = "gc")] + pub fn externref_activations_table( + &mut self, + ) -> &mut wasmtime_runtime::VMExternRefActivationsTable { &mut self.externref_activations_table } + #[cfg(feature = "gc")] pub fn gc(&mut self) { // For this crate's API, we ensure that `set_stack_canary` invariants // are upheld for all host-->Wasm calls. @@ -1523,7 +1535,8 @@ impl StoreOpaque { &self.runtime_limits as *const VMRuntimeLimits as *mut VMRuntimeLimits } - pub unsafe fn insert_vmexternref_without_gc(&mut self, r: VMExternRef) { + #[cfg(feature = "gc")] + pub unsafe fn insert_vmexternref_without_gc(&mut self, r: wasmtime_runtime::VMExternRef) { self.externref_activations_table.insert_without_gc(r); } @@ -2039,10 +2052,11 @@ unsafe impl wasmtime_runtime::Store for StoreInner { self.engine.epoch_counter() as *const _ } + #[cfg(feature = "gc")] fn externref_activations_table( &mut self, ) -> ( - &mut VMExternRefActivationsTable, + &mut wasmtime_runtime::VMExternRefActivationsTable, &dyn wasmtime_runtime::ModuleInfoLookup, ) { let inner = &mut self.inner; @@ -2298,8 +2312,9 @@ impl Drop for StoreOpaque { } } +#[cfg(feature = "gc")] impl wasmtime_runtime::ModuleInfoLookup for ModuleRegistry { - fn lookup(&self, pc: usize) -> Option<&dyn ModuleInfo> { + fn lookup(&self, pc: usize) -> Option<&dyn wasmtime_runtime::ModuleInfo> { self.lookup_module_info(pc) } } diff --git a/crates/wasmtime/src/runtime/trampoline/global.rs b/crates/wasmtime/src/runtime/trampoline/global.rs index b44c714ccdc9..b2f9e07bd37c 100644 --- a/crates/wasmtime/src/runtime/trampoline/global.rs +++ b/crates/wasmtime/src/runtime/trampoline/global.rs @@ -20,10 +20,15 @@ impl Drop for VMHostGlobalContext { // Nothing to drop. } crate::ValType::Ref(r) => match r.heap_type() { - HeapType::Extern => unsafe { ptr::drop_in_place(self.global.as_externref_mut()) }, HeapType::Func | HeapType::Concrete(_) | HeapType::NoFunc => { // Nothing to drop. } + + #[cfg(feature = "gc")] + HeapType::Extern => unsafe { ptr::drop_in_place(self.global.as_externref_mut()) }, + + #[cfg(not(feature = "gc"))] + HeapType::Extern => assert!(unsafe { self.global.as_func_ref().is_null() }), }, } } @@ -58,9 +63,16 @@ pub fn generate_global_export( *global.as_func_ref_mut() = f.map_or(ptr::null_mut(), |f| f.vm_func_ref(store).as_ptr()); } + #[cfg(feature = "gc")] Val::ExternRef(x) => { *global.as_externref_mut() = x.map(|x| x.inner); } + #[cfg(not(feature = "gc"))] + Val::ExternRef(None) => { + *global.as_func_ref_mut() = ptr::null_mut(); + } + #[cfg(not(feature = "gc"))] + Val::ExternRef(Some(x)) => match x._inner {}, } global }; diff --git a/crates/wasmtime/src/runtime/uninhabited.rs b/crates/wasmtime/src/runtime/uninhabited.rs new file mode 100644 index 000000000000..6b368c35ceba --- /dev/null +++ b/crates/wasmtime/src/runtime/uninhabited.rs @@ -0,0 +1,5 @@ +/// An uninhabited type. +/// +/// Useful for modeling Wasm's bottom types or `cfg`'d off features. +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +pub(crate) enum Uninhabited {} diff --git a/crates/wasmtime/src/runtime/values.rs b/crates/wasmtime/src/runtime/values.rs index 9ae98876da61..af90c4fd0a3b 100644 --- a/crates/wasmtime/src/runtime/values.rs +++ b/crates/wasmtime/src/runtime/values.rs @@ -178,8 +178,13 @@ impl Val { Val::V128(b) => ValRaw::v128(b.as_u128()), Val::ExternRef(e) => { let externref = match e { - Some(e) => e.to_raw(store), None => ptr::null_mut(), + + #[cfg(feature = "gc")] + Some(e) => e.to_raw(store), + + #[cfg(not(feature = "gc"))] + Some(x) => match x._inner {}, }; ValRaw::externref(externref) } @@ -209,11 +214,19 @@ impl Val { ValType::V128 => Val::V128(raw.get_v128().into()), ValType::Ref(ref_ty) => { let ref_ = match ref_ty.heap_type() { - HeapType::Extern => ExternRef::from_raw(raw.get_externref()).into(), HeapType::Func | HeapType::Concrete(_) => { Func::from_raw(store, raw.get_funcref()).into() } HeapType::NoFunc => Ref::Func(None), + + #[cfg(feature = "gc")] + HeapType::Extern => ExternRef::from_raw(raw.get_externref()).into(), + + #[cfg(not(feature = "gc"))] + HeapType::Extern => { + assert!(raw.get_externref().is_null()); + Ref::Extern(None) + } }; assert!( ref_ty.is_nullable() || !ref_.is_null(), @@ -689,6 +702,8 @@ impl Ref { ); Ok(TableElement::FuncRef(f.vm_func_ref(store).as_ptr())) } + + #[cfg(feature = "gc")] (Ref::Extern(e), HeapType::Extern) => match e { None => { assert!(ty.is_nullable()); @@ -696,6 +711,16 @@ impl Ref { } Some(e) => Ok(TableElement::ExternRef(Some(e.inner))), }, + + #[cfg(not(feature = "gc"))] + (Ref::Extern(e), HeapType::Extern) => match e { + None => bail!( + "support for externref tables disabled at compile time \ + because the `gc` cargo feature was not enabled" + ), + Some(e) => match e._inner {}, + }, + _ => unreachable!("checked that the value matches the type above"), } } From 8e430216e5830200bf637ea9129e7f8c4811850e Mon Sep 17 00:00:00 2001 From: Nick Fitzgerald Date: Thu, 22 Feb 2024 07:51:33 -0800 Subject: [PATCH 02/12] Add CI for `gc` Cargo feature --- .github/workflows/main.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 9238ad276102..74ce0cb54187 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -288,6 +288,10 @@ jobs: - run: cargo check -p wasmtime --no-default-features --features wmemcheck - run: cargo check -p wasmtime --no-default-features --features demangle - run: cargo check -p wasmtime --no-default-features --features addr2line + - run: cargo check -p wasmtime --no-default-features --features gc + - run: cargo check -p wasmtime --no-default-features --features runtime,gc + - run: cargo check -p wasmtime --no-default-features --features cranelift,gc + - run: cargo check -p wasmtime --no-default-features --features runtime - run: cargo check --features component-model - run: cargo check -p wasmtime --features incremental-cache From 2c619901793367e7120003986580d4bb5e1e83f8 Mon Sep 17 00:00:00 2001 From: Nick Fitzgerald Date: Thu, 22 Feb 2024 09:51:43 -0800 Subject: [PATCH 03/12] Cut down on the number of `#[cfg(feature = "gc")]`s outside the implementation of `[VM]ExternRef` --- crates/runtime/src/externref.rs | 1066 +---------------- crates/runtime/src/externref/gc.rs | 1058 ++++++++++++++++ crates/runtime/src/externref/no_gc.rs | 114 ++ crates/runtime/src/instance.rs | 28 +- crates/runtime/src/lib.rs | 2 - crates/runtime/src/libcalls.rs | 28 +- crates/runtime/src/table.rs | 30 +- crates/runtime/src/vmcontext.rs | 40 +- .../wasmtime/src/runtime/externals/global.rs | 21 +- .../wasmtime/src/runtime/externals/table.rs | 5 +- crates/wasmtime/src/runtime/func/typed.rs | 19 +- crates/wasmtime/src/runtime/module.rs | 2 - .../wasmtime/src/runtime/module/registry.rs | 1 - crates/wasmtime/src/runtime/ref/gc_ref.rs | 19 +- crates/wasmtime/src/runtime/ref/no_gc_ref.rs | 50 + crates/wasmtime/src/runtime/store.rs | 1 - .../wasmtime/src/runtime/trampoline/global.rs | 14 +- crates/wasmtime/src/runtime/values.rs | 25 +- 18 files changed, 1282 insertions(+), 1241 deletions(-) create mode 100644 crates/runtime/src/externref/gc.rs create mode 100644 crates/runtime/src/externref/no_gc.rs diff --git a/crates/runtime/src/externref.rs b/crates/runtime/src/externref.rs index 2ba82944c46f..547a87410b7b 100644 --- a/crates/runtime/src/externref.rs +++ b/crates/runtime/src/externref.rs @@ -1,796 +1,14 @@ -//! # `VMExternRef` -//! -//! `VMExternRef` is a reference-counted box for any kind of data that is -//! external and opaque to running Wasm. Sometimes it might hold a Wasmtime -//! thing, other times it might hold something from a Wasmtime embedder and is -//! opaque even to us. It is morally equivalent to `Rc` in Rust, but -//! additionally always fits in a pointer-sized word. `VMExternRef` is -//! non-nullable, but `Option` is a null pointer. -//! -//! The one part of `VMExternRef` that can't ever be opaque to us is the -//! reference count. Even when we don't know what's inside an `VMExternRef`, we -//! need to be able to manipulate its reference count as we add and remove -//! references to it. And we need to do this from compiled Wasm code, so it must -//! be `repr(C)`! -//! -//! ## Memory Layout -//! -//! `VMExternRef` itself is just a pointer to an `VMExternData`, which holds the -//! opaque, boxed value, its reference count, and its vtable pointer. -//! -//! The `VMExternData` struct is *preceded* by the dynamically-sized value boxed -//! up and referenced by one or more `VMExternRef`s: -//! -//! ```text -//! ,-------------------------------------------------------. -//! | | -//! V | -//! +----------------------------+-----------+-----------+ | -//! | dynamically-sized value... | ref_count | value_ptr |---' -//! +----------------------------+-----------+-----------+ -//! | VMExternData | -//! +-----------------------+ -//! ^ -//! +-------------+ | -//! | VMExternRef |-------------------+ -//! +-------------+ | -//! | -//! +-------------+ | -//! | VMExternRef |-------------------+ -//! +-------------+ | -//! | -//! ... === -//! | -//! +-------------+ | -//! | VMExternRef |-------------------' -//! +-------------+ -//! ``` -//! -//! The `value_ptr` member always points backwards to the start of the -//! dynamically-sized value (which is also the start of the heap allocation for -//! this value-and-`VMExternData` pair). Because it is a `dyn` pointer, it is -//! fat, and also points to the value's `Any` vtable. -//! -//! The boxed value and the `VMExternRef` footer are held a single heap -//! allocation. The layout described above is used to make satisfying the -//! value's alignment easy: we just need to ensure that the heap allocation used -//! to hold everything satisfies its alignment. It also ensures that we don't -//! need a ton of excess padding between the `VMExternData` and the value for -//! values with large alignment. -//! -//! ## Reference Counting, Wasm Functions, and Garbage Collection -//! -//! For host VM code, we use plain reference counting, where cloning increments -//! the reference count, and dropping decrements it. We can avoid many of the -//! on-stack increment/decrement operations that typically plague the -//! performance of reference counting via Rust's ownership and borrowing system. -//! Moving a `VMExternRef` avoids mutating its reference count, and borrowing it -//! either avoids the reference count increment or delays it until if/when the -//! `VMExternRef` is cloned. -//! -//! When passing a `VMExternRef` into compiled Wasm code, we don't want to do -//! reference count mutations for every compiled `local.{get,set}`, nor for -//! every function call. Therefore, we use a variation of **deferred reference -//! counting**, where we only mutate reference counts when storing -//! `VMExternRef`s somewhere that outlives the activation: into a global or -//! table. Simultaneously, we over-approximate the set of `VMExternRef`s that -//! are inside Wasm function activations. Periodically, we walk the stack at GC -//! safe points, and use stack map information to precisely identify the set of -//! `VMExternRef`s inside Wasm activations. Then we take the difference between -//! this precise set and our over-approximation, and decrement the reference -//! count for each of the `VMExternRef`s that are in our over-approximation but -//! not in the precise set. Finally, the over-approximation is replaced with the -//! precise set. -//! -//! The `VMExternRefActivationsTable` implements the over-approximized set of -//! `VMExternRef`s referenced by Wasm activations. Calling a Wasm function and -//! passing it a `VMExternRef` moves the `VMExternRef` into the table, and the -//! compiled Wasm function logically "borrows" the `VMExternRef` from the -//! table. Similarly, `global.get` and `table.get` operations clone the gotten -//! `VMExternRef` into the `VMExternRefActivationsTable` and then "borrow" the -//! reference out of the table. -//! -//! When a `VMExternRef` is returned to host code from a Wasm function, the host -//! increments the reference count (because the reference is logically -//! "borrowed" from the `VMExternRefActivationsTable` and the reference count -//! from the table will be dropped at the next GC). -//! -//! For more general information on deferred reference counting, see *An -//! Examination of Deferred Reference Counting and Cycle Detection* by Quinane: -//! +#[cfg(feature = "gc")] +mod gc; +#[cfg(feature = "gc")] +pub use gc::*; -use crate::{Backtrace, SendSyncPtr, VMRuntimeLimits}; -use std::alloc::Layout; -use std::any::Any; -use std::cell::UnsafeCell; -use std::cmp; -use std::collections::HashSet; -use std::hash::{Hash, Hasher}; -use std::mem; -use std::ops::Deref; -use std::ptr::{self, NonNull}; -use std::sync::atomic::{self, AtomicUsize, Ordering}; -use wasmtime_environ::StackMap; - -/// An external reference to some opaque data. -/// -/// `VMExternRef`s dereference to their underlying opaque data as `dyn Any`. -/// -/// Unlike the `externref` in the Wasm spec, `VMExternRef`s are non-nullable, -/// and always point to a valid value. You may use `Option` to -/// represent nullable references, and `Option` is guaranteed to -/// have the same size and alignment as a raw pointer, with `None` represented -/// with the null pointer. -/// -/// `VMExternRef`s are reference counted, so cloning is a cheap, shallow -/// operation. It also means they are inherently shared, so you may not get a -/// mutable, exclusive reference to their inner contents, only a shared, -/// immutable reference. You may use interior mutability with `RefCell` or -/// `Mutex` to work around this restriction, if necessary. -/// -/// `VMExternRef`s have pointer-equality semantics, not structural-equality -/// semantics. Given two `VMExternRef`s `a` and `b`, `a == b` only if `a` and -/// `b` point to the same allocation. `a` and `b` are considered not equal, even -/// if `a` and `b` are two different identical copies of the same data, if they -/// are in two different allocations. The hashing and ordering implementations -/// also only operate on the pointer. -/// -/// # Example -/// -/// ``` -/// # fn foo() -> Result<(), Box> { -/// use std::cell::RefCell; -/// use wasmtime_runtime::VMExternRef; -/// -/// // Open a file. Wasm doesn't know about files, but we can let Wasm instances -/// // work with files via opaque `externref` handles. -/// let file = std::fs::File::create("some/file/path")?; -/// -/// // Wrap the file up as an `VMExternRef` that can be passed to Wasm. -/// let extern_ref_to_file = VMExternRef::new(file); -/// -/// // `VMExternRef`s dereference to `dyn Any`, so you can use `Any` methods to -/// // perform runtime type checks and downcasts. -/// -/// assert!(extern_ref_to_file.is::()); -/// assert!(!extern_ref_to_file.is::()); -/// -/// if let Some(mut file) = extern_ref_to_file.downcast_ref::() { -/// use std::io::Write; -/// writeln!(&mut file, "Hello, `VMExternRef`!")?; -/// } -/// # Ok(()) -/// # } -/// ``` -#[derive(Debug)] -#[repr(transparent)] -pub struct VMExternRef(SendSyncPtr); - -impl std::fmt::Pointer for VMExternRef { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - std::fmt::Pointer::fmt(&self.0, f) - } -} - -#[repr(C)] -pub(crate) struct VMExternData { - // Implicit, dynamically-sized member that always preceded an - // `VMExternData`. - // - // value: [u8], - // - /// The reference count for this `VMExternData` and value. When it reaches - /// zero, we can safely destroy the value and free this heap - /// allocation. This is an `UnsafeCell`, rather than plain `Cell`, because - /// it can be modified by compiled Wasm code. - /// - /// Note: this field's offset must be kept in sync with - /// `wasmtime_environ::VMOffsets::vm_extern_data_ref_count()` which is - /// currently always zero. - ref_count: AtomicUsize, - - /// Always points to the implicit, dynamically-sized `value` member that - /// precedes this `VMExternData`. - value_ptr: SendSyncPtr, -} - -impl Clone for VMExternRef { - #[inline] - fn clone(&self) -> VMExternRef { - self.extern_data().increment_ref_count(); - VMExternRef(self.0) - } -} - -impl Drop for VMExternRef { - #[inline] - fn drop(&mut self) { - let data = self.extern_data(); - - // Note that the memory orderings here also match the standard library - // itself. Documentation is more available in the implementation of - // `Arc`, but the general idea is that this is a special pattern allowed - // by the C standard with atomic orderings where we "release" for all - // the decrements and only the final decrementer performs an acquire - // fence. This properly ensures that the final thread, which actually - // destroys the data, sees all the updates from all other threads. - if data.ref_count.fetch_sub(1, Ordering::Release) != 1 { - return; - } - atomic::fence(Ordering::Acquire); - - unsafe { - VMExternData::drop_and_dealloc(self.0); - } - } -} - -impl VMExternData { - /// Get the `Layout` for a value with the given size and alignment, and the - /// offset within that layout where the `VMExternData` footer resides. - /// - /// This doesn't take a `value: &T` because `VMExternRef::new_with` hasn't - /// constructed a `T` value yet, and it isn't generic over `T` because - /// `VMExternData::drop_and_dealloc` doesn't know what `T` to use, and has - /// to use `std::mem::{size,align}_of_val` instead. - unsafe fn layout_for(value_size: usize, value_align: usize) -> (Layout, usize) { - let extern_data_size = mem::size_of::(); - let extern_data_align = mem::align_of::(); - - let value_and_padding_size = round_up_to_align(value_size, extern_data_align).unwrap(); - - let alloc_align = std::cmp::max(value_align, extern_data_align); - let alloc_size = value_and_padding_size + extern_data_size; - - debug_assert!( - Layout::from_size_align(alloc_size, alloc_align).is_ok(), - "should create a `Layout` for size={} and align={} okay", - alloc_size, - alloc_align, - ); - ( - Layout::from_size_align_unchecked(alloc_size, alloc_align), - value_and_padding_size, - ) - } - - /// Drop the inner value and then free this `VMExternData` heap allocation. - pub(crate) unsafe fn drop_and_dealloc(mut data: SendSyncPtr) { - log::trace!("Dropping externref data @ {:p}", data); - - // Note: we introduce a block scope so that we drop the live - // reference to the data before we free the heap allocation it - // resides within after this block. - let (alloc_ptr, layout) = { - let data = data.as_mut(); - debug_assert_eq!(data.ref_count.load(Ordering::SeqCst), 0); - - // Same thing, but for the dropping the reference to `value` before - // we drop it itself. - let (layout, _) = { - let value = data.value_ptr.as_ref(); - Self::layout_for(mem::size_of_val(value), mem::align_of_val(value)) - }; - - ptr::drop_in_place(data.value_ptr.as_ptr()); - let alloc_ptr = data.value_ptr.as_ptr().cast::(); - - (alloc_ptr, layout) - }; - - ptr::drop_in_place(data.as_ptr()); - std::alloc::dealloc(alloc_ptr, layout); - } - - #[inline] - fn increment_ref_count(&self) { - // This is only using during cloning operations, and like the standard - // library we use `Relaxed` here. The rationale is better documented in - // libstd's implementation of `Arc`, but the general gist is that we're - // creating a new pointer for our own thread, so there's no need to have - // any synchronization with orderings. The synchronization with other - // threads with respect to orderings happens when the pointer is sent to - // another thread. - self.ref_count.fetch_add(1, Ordering::Relaxed); - } -} - -#[inline] -fn round_up_to_align(n: usize, align: usize) -> Option { - debug_assert!(align.is_power_of_two()); - let align_minus_one = align - 1; - Some(n.checked_add(align_minus_one)? & !align_minus_one) -} - -impl VMExternRef { - /// Wrap the given value inside an `VMExternRef`. - pub fn new(value: T) -> VMExternRef - where - T: 'static + Any + Send + Sync, - { - VMExternRef::new_with(|| value) - } - - /// Construct a new `VMExternRef` in place by invoking `make_value`. - pub fn new_with(make_value: impl FnOnce() -> T) -> VMExternRef - where - T: 'static + Any + Send + Sync, - { - unsafe { - let (layout, footer_offset) = - VMExternData::layout_for(mem::size_of::(), mem::align_of::()); - - let alloc_ptr = std::alloc::alloc(layout); - let alloc_ptr = NonNull::new(alloc_ptr).unwrap_or_else(|| { - std::alloc::handle_alloc_error(layout); - }); - - let value_ptr = alloc_ptr.cast::(); - ptr::write(value_ptr.as_ptr(), make_value()); - - let extern_data_ptr = - alloc_ptr.cast::().as_ptr().add(footer_offset) as *mut VMExternData; - - ptr::write( - extern_data_ptr, - VMExternData { - ref_count: AtomicUsize::new(1), - // Cast from `*mut T` to `*mut dyn Any` here. - value_ptr: SendSyncPtr::new(NonNull::new_unchecked(value_ptr.as_ptr())), - }, - ); - - log::trace!("New externref data @ {:p}", extern_data_ptr); - VMExternRef(NonNull::new_unchecked(extern_data_ptr).into()) - } - } - - /// Turn this `VMExternRef` into a raw, untyped pointer. - /// - /// Unlike `into_raw`, this does not consume and forget `self`. It is *not* - /// safe to use `from_raw` on pointers returned from this method; only use - /// `clone_from_raw`! - /// - /// Nor does this method increment the reference count. You must ensure - /// that `self` (or some other clone of `self`) stays alive until - /// `clone_from_raw` is called. - #[inline] - pub fn as_raw(&self) -> *mut u8 { - let ptr = self.0.as_ptr().cast::(); - ptr - } - - /// Consume this `VMExternRef` into a raw, untyped pointer. - /// - /// # Safety - /// - /// This method forgets self, so it is possible to create a leak of the - /// underlying reference counted data if not used carefully. - /// - /// Use `from_raw` to recreate the `VMExternRef`. - pub unsafe fn into_raw(self) -> *mut u8 { - let ptr = self.0.as_ptr().cast::(); - std::mem::forget(self); - ptr - } - - /// Recreate a `VMExternRef` from a pointer returned from a previous call to - /// `as_raw`. - /// - /// # Safety - /// - /// Unlike `clone_from_raw`, this does not increment the reference count of the - /// underlying data. It is not safe to continue to use the pointer passed to this - /// function. - pub unsafe fn from_raw(ptr: *mut u8) -> Self { - debug_assert!(!ptr.is_null()); - VMExternRef(NonNull::new_unchecked(ptr).cast().into()) - } - - /// Recreate a `VMExternRef` from a pointer returned from a previous call to - /// `as_raw`. - /// - /// # Safety - /// - /// Wildly unsafe to use with anything other than the result of a previous - /// `as_raw` call! - /// - /// Additionally, it is your responsibility to ensure that this raw - /// `VMExternRef`'s reference count has not dropped to zero. Failure to do - /// so will result in use after free! - pub unsafe fn clone_from_raw(ptr: *mut u8) -> Self { - debug_assert!(!ptr.is_null()); - let x = VMExternRef(NonNull::new_unchecked(ptr).cast().into()); - x.extern_data().increment_ref_count(); - x - } - - /// Get the strong reference count for this `VMExternRef`. - /// - /// Note that this loads with a `SeqCst` ordering to synchronize with other - /// threads. - pub fn strong_count(&self) -> usize { - self.extern_data().ref_count.load(Ordering::SeqCst) - } - - #[inline] - fn extern_data(&self) -> &VMExternData { - unsafe { self.0.as_ref() } - } -} - -/// Methods that would normally be trait implementations, but aren't to avoid -/// potential footguns around `VMExternRef`'s pointer-equality semantics. -/// -/// Note that none of these methods are on `&self`, they all require a -/// fully-qualified `VMExternRef::foo(my_ref)` invocation. -impl VMExternRef { - /// Check whether two `VMExternRef`s point to the same inner allocation. - /// - /// Note that this uses pointer-equality semantics, not structural-equality - /// semantics, and so only pointers are compared, and doesn't use any `Eq` - /// or `PartialEq` implementation of the pointed-to values. - #[inline] - pub fn eq(a: &Self, b: &Self) -> bool { - ptr::eq(a.0.as_ptr(), b.0.as_ptr()) - } - - /// Hash a given `VMExternRef`. - /// - /// Note that this just hashes the pointer to the inner value, it does *not* - /// use the inner value's `Hash` implementation (if any). - #[inline] - pub fn hash(externref: &Self, hasher: &mut H) - where - H: Hasher, - { - ptr::hash(externref.0.as_ptr(), hasher); - } - - /// Compare two `VMExternRef`s. - /// - /// Note that this uses pointer-equality semantics, not structural-equality - /// semantics, and so only pointers are compared, and doesn't use any `Cmp` - /// or `PartialCmp` implementation of the pointed-to values. - #[inline] - pub fn cmp(a: &Self, b: &Self) -> cmp::Ordering { - let a = a.0.as_ptr() as usize; - let b = b.0.as_ptr() as usize; - a.cmp(&b) - } -} - -impl Deref for VMExternRef { - type Target = dyn Any; - - fn deref(&self) -> &dyn Any { - unsafe { self.extern_data().value_ptr.as_ref() } - } -} - -/// A wrapper around a `VMExternRef` that implements `Eq` and `Hash` with -/// pointer semantics. -/// -/// We use this so that we can morally put `VMExternRef`s inside of `HashSet`s -/// even though they don't implement `Eq` and `Hash` to avoid foot guns. -#[derive(Clone, Debug)] -struct VMExternRefWithTraits(VMExternRef); - -impl Hash for VMExternRefWithTraits { - fn hash(&self, hasher: &mut H) - where - H: Hasher, - { - VMExternRef::hash(&self.0, hasher) - } -} - -impl PartialEq for VMExternRefWithTraits { - fn eq(&self, other: &Self) -> bool { - VMExternRef::eq(&self.0, &other.0) - } -} - -impl Eq for VMExternRefWithTraits {} - -type TableElem = UnsafeCell>; - -/// A table that over-approximizes the set of `VMExternRef`s that any Wasm -/// activation on this thread is currently using. -/// -/// Under the covers, this is a simple bump allocator that allows duplicate -/// entries. Deduplication happens at GC time. -#[repr(C)] // `alloc` must be the first member, it's accessed from JIT code. -pub struct VMExternRefActivationsTable { - /// Structures used to perform fast bump allocation of storage of externref - /// values. - /// - /// This is the only member of this structure accessed from JIT code. - alloc: VMExternRefTableAlloc, - - /// When unioned with `chunk`, this is an over-approximation of the GC roots - /// on the stack, inside Wasm frames. - /// - /// This is used by slow-path insertion, and when a GC cycle finishes, is - /// re-initialized to the just-discovered precise set of stack roots (which - /// immediately becomes an over-approximation again as soon as Wasm runs and - /// potentially drops references). - over_approximated_stack_roots: HashSet, - - /// The precise set of on-stack, inside-Wasm GC roots that we discover via - /// walking the stack and interpreting stack maps. - /// - /// This is *only* used inside the `gc` function, and is empty otherwise. It - /// is just part of this struct so that we can reuse the allocation, rather - /// than create a new hash set every GC. - precise_stack_roots: HashSet, - - /// A debug-only field for asserting that we are in a region of code where - /// GC is okay to preform. - #[cfg(debug_assertions)] - gc_okay: bool, -} - -#[repr(C)] // This is accessed from JIT code. -struct VMExternRefTableAlloc { - /// Bump-allocation finger within the `chunk`. - /// - /// NB: this is an `UnsafeCell` because it is written to by compiled Wasm - /// code. - next: UnsafeCell>, - - /// Pointer to just after the `chunk`. - /// - /// This is *not* within the current chunk and therefore is not a valid - /// place to insert a reference! - end: NonNull, - - /// Bump allocation chunk that stores fast-path insertions. - /// - /// This is not accessed from JIT code. - chunk: Box<[TableElem]>, -} - -// This gets around the usage of `UnsafeCell` throughout the internals of this -// allocator, but the storage should all be Send/Sync and synchronization isn't -// necessary since operations require `&mut self`. -unsafe impl Send for VMExternRefTableAlloc {} -unsafe impl Sync for VMExternRefTableAlloc {} - -fn _assert_send_sync() { - fn _assert() {} - _assert::(); - _assert::(); -} - -impl VMExternRefActivationsTable { - const CHUNK_SIZE: usize = 4096 / mem::size_of::(); - - /// Create a new `VMExternRefActivationsTable`. - pub fn new() -> Self { - // Start with an empty chunk in case this activations table isn't used. - // This means that there's no space in the bump-allocation area which - // will force any path trying to use this to the slow gc path. The first - // time this happens, though, the slow gc path will allocate a new chunk - // for actual fast-bumping. - let mut chunk: Box<[TableElem]> = Box::new([]); - let next = chunk.as_mut_ptr(); - let end = unsafe { next.add(chunk.len()) }; +#[cfg(not(feature = "gc"))] +mod no_gc; +#[cfg(not(feature = "gc"))] +pub use no_gc::*; - VMExternRefActivationsTable { - alloc: VMExternRefTableAlloc { - next: UnsafeCell::new(NonNull::new(next).unwrap()), - end: NonNull::new(end).unwrap(), - chunk, - }, - over_approximated_stack_roots: HashSet::new(), - precise_stack_roots: HashSet::new(), - #[cfg(debug_assertions)] - gc_okay: true, - } - } - - fn new_chunk(size: usize) -> Box<[UnsafeCell>]> { - assert!(size >= Self::CHUNK_SIZE); - (0..size).map(|_| UnsafeCell::new(None)).collect() - } - - /// Get the available capacity in the bump allocation chunk. - #[inline] - pub fn bump_capacity_remaining(&self) -> usize { - let end = self.alloc.end.as_ptr() as usize; - let next = unsafe { *self.alloc.next.get() }; - end - next.as_ptr() as usize - } - - /// Try and insert a `VMExternRef` into this table. - /// - /// This is a fast path that only succeeds when the bump chunk has the - /// capacity for the requested insertion. - /// - /// If the insertion fails, then the `VMExternRef` is given back. Callers - /// may attempt a GC to free up space and try again, or may call - /// `insert_slow_path` to infallibly insert the reference (potentially - /// allocating additional space in the table to hold it). - #[inline] - pub fn try_insert(&mut self, externref: VMExternRef) -> Result<(), VMExternRef> { - unsafe { - let next = *self.alloc.next.get(); - if next == self.alloc.end { - return Err(externref); - } - - debug_assert!( - (*next.as_ref().get()).is_none(), - "slots >= the `next` bump finger are always `None`" - ); - ptr::write(next.as_ptr(), UnsafeCell::new(Some(externref))); - - let next = NonNull::new_unchecked(next.as_ptr().add(1)); - debug_assert!(next <= self.alloc.end); - *self.alloc.next.get() = next; - - Ok(()) - } - } - - /// Insert a reference into the table, falling back on a GC to clear up - /// space if the table is already full. - /// - /// # Unsafety - /// - /// The same as `gc`. - #[inline] - pub unsafe fn insert_with_gc( - &mut self, - limits: *const VMRuntimeLimits, - externref: VMExternRef, - module_info_lookup: &dyn ModuleInfoLookup, - ) { - #[cfg(debug_assertions)] - assert!(self.gc_okay); - - if let Err(externref) = self.try_insert(externref) { - self.gc_and_insert_slow(limits, externref, module_info_lookup); - } - } - - #[inline(never)] - unsafe fn gc_and_insert_slow( - &mut self, - limits: *const VMRuntimeLimits, - externref: VMExternRef, - module_info_lookup: &dyn ModuleInfoLookup, - ) { - gc(limits, module_info_lookup, self); - - // Might as well insert right into the hash set, rather than the bump - // chunk, since we are already on a slow path and we get de-duplication - // this way. - self.over_approximated_stack_roots - .insert(VMExternRefWithTraits(externref)); - } - - /// Insert a reference into the table, without ever performing GC. - #[inline] - pub fn insert_without_gc(&mut self, externref: VMExternRef) { - if let Err(externref) = self.try_insert(externref) { - self.insert_slow_without_gc(externref); - } - } - - #[inline(never)] - fn insert_slow_without_gc(&mut self, externref: VMExternRef) { - self.over_approximated_stack_roots - .insert(VMExternRefWithTraits(externref)); - } - - fn num_filled_in_bump_chunk(&self) -> usize { - let next = unsafe { *self.alloc.next.get() }; - let bytes_unused = (self.alloc.end.as_ptr() as usize) - (next.as_ptr() as usize); - let slots_unused = bytes_unused / mem::size_of::(); - self.alloc.chunk.len().saturating_sub(slots_unused) - } - - fn elements(&self, mut f: impl FnMut(&VMExternRef)) { - for elem in self.over_approximated_stack_roots.iter() { - f(&elem.0); - } - - // The bump chunk is not all the way full, so we only iterate over its - // filled-in slots. - let num_filled = self.num_filled_in_bump_chunk(); - for slot in self.alloc.chunk.iter().take(num_filled) { - if let Some(elem) = unsafe { &*slot.get() } { - f(elem); - } - } - } - - fn insert_precise_stack_root( - precise_stack_roots: &mut HashSet, - root: NonNull, - ) { - let root = unsafe { VMExternRef::clone_from_raw(root.as_ptr().cast()) }; - log::trace!("Found externref on stack: {:p}", root); - precise_stack_roots.insert(VMExternRefWithTraits(root)); - } - - /// Sweep the bump allocation table after we've discovered our precise stack - /// roots. - fn sweep(&mut self) { - log::trace!("begin GC sweep"); - - // Sweep our bump chunk. - let num_filled = self.num_filled_in_bump_chunk(); - unsafe { - *self.alloc.next.get() = self.alloc.end; - } - for slot in self.alloc.chunk.iter().take(num_filled) { - unsafe { - *slot.get() = None; - } - } - debug_assert!( - self.alloc - .chunk - .iter() - .all(|slot| unsafe { (*slot.get()).as_ref().is_none() }), - "after sweeping the bump chunk, all slots should be `None`" - ); - - // If this is the first instance of gc then the initial chunk is empty, - // so we lazily allocate space for fast bump-allocation in the future. - if self.alloc.chunk.is_empty() { - self.alloc.chunk = Self::new_chunk(Self::CHUNK_SIZE); - self.alloc.end = - NonNull::new(unsafe { self.alloc.chunk.as_mut_ptr().add(self.alloc.chunk.len()) }) - .unwrap(); - } - - // Reset our `next` finger to the start of the bump allocation chunk. - unsafe { - let next = self.alloc.chunk.as_mut_ptr(); - debug_assert!(!next.is_null()); - *self.alloc.next.get() = NonNull::new_unchecked(next); - } - - // The current `precise_stack_roots` becomes our new over-appoximated - // set for the next GC cycle. - mem::swap( - &mut self.precise_stack_roots, - &mut self.over_approximated_stack_roots, - ); - - // And finally, the new `precise_stack_roots` should be cleared and - // remain empty until the next GC cycle. - // - // Note that this may run arbitrary code as we run externref - // destructors. Because of our `&mut` borrow above on this table, - // though, we're guaranteed that nothing will touch this table. - self.precise_stack_roots.clear(); - - log::trace!("end GC sweep"); - } - - /// Set whether it is okay to GC or not right now. - /// - /// This is provided as a helper for enabling various debug-only assertions - /// and checking places where the `wasmtime-runtime` user expects there not - /// to be any GCs. - #[inline] - pub fn set_gc_okay(&mut self, okay: bool) -> bool { - #[cfg(debug_assertions)] - { - return std::mem::replace(&mut self.gc_okay, okay); - } - #[cfg(not(debug_assertions))] - { - let _ = okay; - return true; - } - } -} +use wasmtime_environ::StackMap; /// Used by the runtime to lookup information about a module given a /// program counter value. @@ -804,269 +22,3 @@ pub trait ModuleInfo { /// Lookup the stack map at a program counter value. fn lookup_stack_map(&self, pc: usize) -> Option<&StackMap>; } - -#[derive(Debug, Default)] -struct DebugOnly { - inner: T, -} - -impl std::ops::Deref for DebugOnly { - type Target = T; - - fn deref(&self) -> &T { - if cfg!(debug_assertions) { - &self.inner - } else { - panic!( - "only deref `DebugOnly` when `cfg(debug_assertions)` or \ - inside a `debug_assert!(..)`" - ) - } - } -} - -impl std::ops::DerefMut for DebugOnly { - fn deref_mut(&mut self) -> &mut T { - if cfg!(debug_assertions) { - &mut self.inner - } else { - panic!( - "only deref `DebugOnly` when `cfg(debug_assertions)` or \ - inside a `debug_assert!(..)`" - ) - } - } -} - -/// Perform garbage collection of `VMExternRef`s. -/// -/// # Unsafety -/// -/// You must have called `VMExternRefActivationsTable::set_stack_canary` for at -/// least the oldest host-->Wasm stack frame transition on this thread's stack -/// (it is idempotent to call it more than once) and keep its return value alive -/// across the duration of that host-->Wasm call. -/// -/// Additionally, you must have registered the stack maps for every Wasm module -/// that has frames on the stack with the given `stack_maps_registry`. -pub unsafe fn gc( - limits: *const VMRuntimeLimits, - module_info_lookup: &dyn ModuleInfoLookup, - externref_activations_table: &mut VMExternRefActivationsTable, -) { - log::debug!("start GC"); - - #[cfg(debug_assertions)] - assert!(externref_activations_table.gc_okay); - - debug_assert!({ - // This set is only non-empty within this function. It is built up when - // walking the stack and interpreting stack maps, and then drained back - // into the activations table's bump-allocated space at the - // end. Therefore, it should always be empty upon entering this - // function. - externref_activations_table.precise_stack_roots.is_empty() - }); - - // This function proceeds by: - // - // * walking the stack, - // - // * finding the precise set of roots inside Wasm frames via our stack maps, - // and - // - // * resetting our bump-allocated table's over-approximation to the - // newly-discovered precise set. - - // The `activations_table_set` is used for `debug_assert!`s checking that - // every reference we read out from the stack via stack maps is actually in - // the table. If that weren't true, than either we forgot to insert a - // reference in the table when passing it into Wasm (a bug) or we are - // reading invalid references from the stack (another bug). - let mut activations_table_set: DebugOnly> = Default::default(); - if cfg!(debug_assertions) { - externref_activations_table.elements(|elem| { - activations_table_set.insert(elem.as_raw() as *mut VMExternData); - }); - } - - log::trace!("begin GC trace"); - Backtrace::trace(limits, |frame| { - let pc = frame.pc(); - debug_assert!(pc != 0, "we should always get a valid PC for Wasm frames"); - - let fp = frame.fp(); - debug_assert!( - fp != 0, - "we should always get a valid frame pointer for Wasm frames" - ); - - let module_info = module_info_lookup - .lookup(pc) - .expect("should have module info for Wasm frame"); - - let stack_map = match module_info.lookup_stack_map(pc) { - Some(sm) => sm, - None => { - log::trace!("No stack map for this Wasm frame"); - return std::ops::ControlFlow::Continue(()); - } - }; - log::trace!( - "We have a stack map that maps {} words in this Wasm frame", - stack_map.mapped_words() - ); - - let sp = fp - stack_map.mapped_words() as usize * mem::size_of::(); - - for i in 0..(stack_map.mapped_words() as usize) { - // Stack maps have one bit per word in the frame, and the - // zero^th bit is the *lowest* addressed word in the frame, - // i.e. the closest to the SP. So to get the `i`^th word in - // this frame, we add `i * sizeof(word)` to the SP. - let stack_slot = sp + i * mem::size_of::(); - - if !stack_map.get_bit(i) { - log::trace!( - "Stack slot @ {:p} does not contain externrefs", - stack_slot as *const (), - ); - continue; - } - - let stack_slot = stack_slot as *const *mut VMExternData; - let r = std::ptr::read(stack_slot); - log::trace!("Stack slot @ {:p} = {:p}", stack_slot, r); - - debug_assert!( - r.is_null() || activations_table_set.contains(&r), - "every on-stack externref inside a Wasm frame should \ - have an entry in the VMExternRefActivationsTable; \ - {:?} is not in the table", - r - ); - - if let Some(r) = NonNull::new(r) { - VMExternRefActivationsTable::insert_precise_stack_root( - &mut externref_activations_table.precise_stack_roots, - r, - ); - } - } - - std::ops::ControlFlow::Continue(()) - }); - log::trace!("end GC trace"); - - externref_activations_table.sweep(); - - log::debug!("end GC"); -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn extern_ref_is_pointer_sized_and_aligned() { - assert_eq!(mem::size_of::(), mem::size_of::<*mut ()>()); - assert_eq!(mem::align_of::(), mem::align_of::<*mut ()>()); - assert_eq!( - mem::size_of::>(), - mem::size_of::<*mut ()>() - ); - assert_eq!( - mem::align_of::>(), - mem::align_of::<*mut ()>() - ); - } - - #[test] - fn ref_count_is_at_correct_offset() { - let s = "hi"; - let s: &(dyn Any + Send + Sync) = &s as _; - let s: *const (dyn Any + Send + Sync) = s as _; - let s: *mut (dyn Any + Send + Sync) = s as _; - - let extern_data = VMExternData { - ref_count: AtomicUsize::new(0), - value_ptr: NonNull::new(s).unwrap().into(), - }; - - let extern_data_ptr = &extern_data as *const _; - let ref_count_ptr = &extern_data.ref_count as *const _; - - let actual_offset = (ref_count_ptr as usize) - (extern_data_ptr as usize); - - let offsets = wasmtime_environ::VMOffsets::from(wasmtime_environ::VMOffsetsFields { - ptr: 8, - num_imported_functions: 0, - num_imported_tables: 0, - num_imported_memories: 0, - num_imported_globals: 0, - num_defined_tables: 0, - num_defined_memories: 0, - num_owned_memories: 0, - num_defined_globals: 0, - num_escaped_funcs: 0, - }); - assert_eq!( - offsets.vm_extern_data_ref_count(), - actual_offset.try_into().unwrap(), - ); - } - - #[test] - fn table_next_is_at_correct_offset() { - let table = VMExternRefActivationsTable::new(); - - let table_ptr = &table as *const _; - let next_ptr = &table.alloc.next as *const _; - - let actual_offset = (next_ptr as usize) - (table_ptr as usize); - - let offsets = wasmtime_environ::VMOffsets::from(wasmtime_environ::VMOffsetsFields { - ptr: 8, - num_imported_functions: 0, - num_imported_tables: 0, - num_imported_memories: 0, - num_imported_globals: 0, - num_defined_tables: 0, - num_defined_memories: 0, - num_owned_memories: 0, - num_defined_globals: 0, - num_escaped_funcs: 0, - }); - assert_eq!( - offsets.vm_extern_ref_activation_table_next() as usize, - actual_offset - ); - } - - #[test] - fn table_end_is_at_correct_offset() { - let table = VMExternRefActivationsTable::new(); - - let table_ptr = &table as *const _; - let end_ptr = &table.alloc.end as *const _; - - let actual_offset = (end_ptr as usize) - (table_ptr as usize); - - let offsets = wasmtime_environ::VMOffsets::from(wasmtime_environ::VMOffsetsFields { - ptr: 8, - num_imported_functions: 0, - num_imported_tables: 0, - num_imported_memories: 0, - num_imported_globals: 0, - num_defined_tables: 0, - num_defined_memories: 0, - num_owned_memories: 0, - num_defined_globals: 0, - num_escaped_funcs: 0, - }); - assert_eq!( - offsets.vm_extern_ref_activation_table_end() as usize, - actual_offset - ); - } -} diff --git a/crates/runtime/src/externref/gc.rs b/crates/runtime/src/externref/gc.rs new file mode 100644 index 000000000000..6574799f386e --- /dev/null +++ b/crates/runtime/src/externref/gc.rs @@ -0,0 +1,1058 @@ +//! # `VMExternRef` +//! +//! `VMExternRef` is a reference-counted box for any kind of data that is +//! external and opaque to running Wasm. Sometimes it might hold a Wasmtime +//! thing, other times it might hold something from a Wasmtime embedder and is +//! opaque even to us. It is morally equivalent to `Rc` in Rust, but +//! additionally always fits in a pointer-sized word. `VMExternRef` is +//! non-nullable, but `Option` is a null pointer. +//! +//! The one part of `VMExternRef` that can't ever be opaque to us is the +//! reference count. Even when we don't know what's inside an `VMExternRef`, we +//! need to be able to manipulate its reference count as we add and remove +//! references to it. And we need to do this from compiled Wasm code, so it must +//! be `repr(C)`! +//! +//! ## Memory Layout +//! +//! `VMExternRef` itself is just a pointer to an `VMExternData`, which holds the +//! opaque, boxed value, its reference count, and its vtable pointer. +//! +//! The `VMExternData` struct is *preceded* by the dynamically-sized value boxed +//! up and referenced by one or more `VMExternRef`s: +//! +//! ```text +//! ,-------------------------------------------------------. +//! | | +//! V | +//! +----------------------------+-----------+-----------+ | +//! | dynamically-sized value... | ref_count | value_ptr |---' +//! +----------------------------+-----------+-----------+ +//! | VMExternData | +//! +-----------------------+ +//! ^ +//! +-------------+ | +//! | VMExternRef |-------------------+ +//! +-------------+ | +//! | +//! +-------------+ | +//! | VMExternRef |-------------------+ +//! +-------------+ | +//! | +//! ... === +//! | +//! +-------------+ | +//! | VMExternRef |-------------------' +//! +-------------+ +//! ``` +//! +//! The `value_ptr` member always points backwards to the start of the +//! dynamically-sized value (which is also the start of the heap allocation for +//! this value-and-`VMExternData` pair). Because it is a `dyn` pointer, it is +//! fat, and also points to the value's `Any` vtable. +//! +//! The boxed value and the `VMExternRef` footer are held a single heap +//! allocation. The layout described above is used to make satisfying the +//! value's alignment easy: we just need to ensure that the heap allocation used +//! to hold everything satisfies its alignment. It also ensures that we don't +//! need a ton of excess padding between the `VMExternData` and the value for +//! values with large alignment. +//! +//! ## Reference Counting, Wasm Functions, and Garbage Collection +//! +//! For host VM code, we use plain reference counting, where cloning increments +//! the reference count, and dropping decrements it. We can avoid many of the +//! on-stack increment/decrement operations that typically plague the +//! performance of reference counting via Rust's ownership and borrowing system. +//! Moving a `VMExternRef` avoids mutating its reference count, and borrowing it +//! either avoids the reference count increment or delays it until if/when the +//! `VMExternRef` is cloned. +//! +//! When passing a `VMExternRef` into compiled Wasm code, we don't want to do +//! reference count mutations for every compiled `local.{get,set}`, nor for +//! every function call. Therefore, we use a variation of **deferred reference +//! counting**, where we only mutate reference counts when storing +//! `VMExternRef`s somewhere that outlives the activation: into a global or +//! table. Simultaneously, we over-approximate the set of `VMExternRef`s that +//! are inside Wasm function activations. Periodically, we walk the stack at GC +//! safe points, and use stack map information to precisely identify the set of +//! `VMExternRef`s inside Wasm activations. Then we take the difference between +//! this precise set and our over-approximation, and decrement the reference +//! count for each of the `VMExternRef`s that are in our over-approximation but +//! not in the precise set. Finally, the over-approximation is replaced with the +//! precise set. +//! +//! The `VMExternRefActivationsTable` implements the over-approximized set of +//! `VMExternRef`s referenced by Wasm activations. Calling a Wasm function and +//! passing it a `VMExternRef` moves the `VMExternRef` into the table, and the +//! compiled Wasm function logically "borrows" the `VMExternRef` from the +//! table. Similarly, `global.get` and `table.get` operations clone the gotten +//! `VMExternRef` into the `VMExternRefActivationsTable` and then "borrow" the +//! reference out of the table. +//! +//! When a `VMExternRef` is returned to host code from a Wasm function, the host +//! increments the reference count (because the reference is logically +//! "borrowed" from the `VMExternRefActivationsTable` and the reference count +//! from the table will be dropped at the next GC). +//! +//! For more general information on deferred reference counting, see *An +//! Examination of Deferred Reference Counting and Cycle Detection* by Quinane: +//! + +use crate::{Backtrace, ModuleInfoLookup, SendSyncPtr, VMRuntimeLimits}; +use std::alloc::Layout; +use std::any::Any; +use std::cell::UnsafeCell; +use std::cmp; +use std::collections::HashSet; +use std::hash::{Hash, Hasher}; +use std::mem; +use std::ops::Deref; +use std::ptr::{self, NonNull}; +use std::sync::atomic::{self, AtomicUsize, Ordering}; + +/// An external reference to some opaque data. +/// +/// `VMExternRef`s dereference to their underlying opaque data as `dyn Any`. +/// +/// Unlike the `externref` in the Wasm spec, `VMExternRef`s are non-nullable, +/// and always point to a valid value. You may use `Option` to +/// represent nullable references, and `Option` is guaranteed to +/// have the same size and alignment as a raw pointer, with `None` represented +/// with the null pointer. +/// +/// `VMExternRef`s are reference counted, so cloning is a cheap, shallow +/// operation. It also means they are inherently shared, so you may not get a +/// mutable, exclusive reference to their inner contents, only a shared, +/// immutable reference. You may use interior mutability with `RefCell` or +/// `Mutex` to work around this restriction, if necessary. +/// +/// `VMExternRef`s have pointer-equality semantics, not structural-equality +/// semantics. Given two `VMExternRef`s `a` and `b`, `a == b` only if `a` and +/// `b` point to the same allocation. `a` and `b` are considered not equal, even +/// if `a` and `b` are two different identical copies of the same data, if they +/// are in two different allocations. The hashing and ordering implementations +/// also only operate on the pointer. +/// +/// # Example +/// +/// ``` +/// # fn foo() -> Result<(), Box> { +/// use std::cell::RefCell; +/// use wasmtime_runtime::VMExternRef; +/// +/// // Open a file. Wasm doesn't know about files, but we can let Wasm instances +/// // work with files via opaque `externref` handles. +/// let file = std::fs::File::create("some/file/path")?; +/// +/// // Wrap the file up as an `VMExternRef` that can be passed to Wasm. +/// let extern_ref_to_file = VMExternRef::new(file); +/// +/// // `VMExternRef`s dereference to `dyn Any`, so you can use `Any` methods to +/// // perform runtime type checks and downcasts. +/// +/// assert!(extern_ref_to_file.is::()); +/// assert!(!extern_ref_to_file.is::()); +/// +/// if let Some(mut file) = extern_ref_to_file.downcast_ref::() { +/// use std::io::Write; +/// writeln!(&mut file, "Hello, `VMExternRef`!")?; +/// } +/// # Ok(()) +/// # } +/// ``` +#[derive(Debug)] +#[repr(transparent)] +pub struct VMExternRef(SendSyncPtr); + +impl std::fmt::Pointer for VMExternRef { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + std::fmt::Pointer::fmt(&self.0, f) + } +} + +#[repr(C)] +pub(crate) struct VMExternData { + // Implicit, dynamically-sized member that always preceded an + // `VMExternData`. + // + // value: [u8], + // + /// The reference count for this `VMExternData` and value. When it reaches + /// zero, we can safely destroy the value and free this heap + /// allocation. This is an `UnsafeCell`, rather than plain `Cell`, because + /// it can be modified by compiled Wasm code. + /// + /// Note: this field's offset must be kept in sync with + /// `wasmtime_environ::VMOffsets::vm_extern_data_ref_count()` which is + /// currently always zero. + ref_count: AtomicUsize, + + /// Always points to the implicit, dynamically-sized `value` member that + /// precedes this `VMExternData`. + value_ptr: SendSyncPtr, +} + +impl Clone for VMExternRef { + #[inline] + fn clone(&self) -> VMExternRef { + self.extern_data().increment_ref_count(); + VMExternRef(self.0) + } +} + +impl Drop for VMExternRef { + #[inline] + fn drop(&mut self) { + let data = self.extern_data(); + + // Note that the memory orderings here also match the standard library + // itself. Documentation is more available in the implementation of + // `Arc`, but the general idea is that this is a special pattern allowed + // by the C standard with atomic orderings where we "release" for all + // the decrements and only the final decrementer performs an acquire + // fence. This properly ensures that the final thread, which actually + // destroys the data, sees all the updates from all other threads. + if data.ref_count.fetch_sub(1, Ordering::Release) != 1 { + return; + } + atomic::fence(Ordering::Acquire); + + unsafe { + VMExternData::drop_and_dealloc(self.0); + } + } +} + +impl VMExternData { + /// Get the `Layout` for a value with the given size and alignment, and the + /// offset within that layout where the `VMExternData` footer resides. + /// + /// This doesn't take a `value: &T` because `VMExternRef::new_with` hasn't + /// constructed a `T` value yet, and it isn't generic over `T` because + /// `VMExternData::drop_and_dealloc` doesn't know what `T` to use, and has + /// to use `std::mem::{size,align}_of_val` instead. + unsafe fn layout_for(value_size: usize, value_align: usize) -> (Layout, usize) { + let extern_data_size = mem::size_of::(); + let extern_data_align = mem::align_of::(); + + let value_and_padding_size = round_up_to_align(value_size, extern_data_align).unwrap(); + + let alloc_align = std::cmp::max(value_align, extern_data_align); + let alloc_size = value_and_padding_size + extern_data_size; + + debug_assert!( + Layout::from_size_align(alloc_size, alloc_align).is_ok(), + "should create a `Layout` for size={} and align={} okay", + alloc_size, + alloc_align, + ); + ( + Layout::from_size_align_unchecked(alloc_size, alloc_align), + value_and_padding_size, + ) + } + + /// Drop the inner value and then free this `VMExternData` heap allocation. + pub(crate) unsafe fn drop_and_dealloc(mut data: SendSyncPtr) { + log::trace!("Dropping externref data @ {:p}", data); + + // Note: we introduce a block scope so that we drop the live + // reference to the data before we free the heap allocation it + // resides within after this block. + let (alloc_ptr, layout) = { + let data = data.as_mut(); + debug_assert_eq!(data.ref_count.load(Ordering::SeqCst), 0); + + // Same thing, but for the dropping the reference to `value` before + // we drop it itself. + let (layout, _) = { + let value = data.value_ptr.as_ref(); + Self::layout_for(mem::size_of_val(value), mem::align_of_val(value)) + }; + + ptr::drop_in_place(data.value_ptr.as_ptr()); + let alloc_ptr = data.value_ptr.as_ptr().cast::(); + + (alloc_ptr, layout) + }; + + ptr::drop_in_place(data.as_ptr()); + std::alloc::dealloc(alloc_ptr, layout); + } + + #[inline] + fn increment_ref_count(&self) { + // This is only using during cloning operations, and like the standard + // library we use `Relaxed` here. The rationale is better documented in + // libstd's implementation of `Arc`, but the general gist is that we're + // creating a new pointer for our own thread, so there's no need to have + // any synchronization with orderings. The synchronization with other + // threads with respect to orderings happens when the pointer is sent to + // another thread. + self.ref_count.fetch_add(1, Ordering::Relaxed); + } +} + +#[inline] +fn round_up_to_align(n: usize, align: usize) -> Option { + debug_assert!(align.is_power_of_two()); + let align_minus_one = align - 1; + Some(n.checked_add(align_minus_one)? & !align_minus_one) +} + +impl VMExternRef { + /// Wrap the given value inside an `VMExternRef`. + pub fn new(value: T) -> VMExternRef + where + T: 'static + Any + Send + Sync, + { + VMExternRef::new_with(|| value) + } + + /// Construct a new `VMExternRef` in place by invoking `make_value`. + pub fn new_with(make_value: impl FnOnce() -> T) -> VMExternRef + where + T: 'static + Any + Send + Sync, + { + unsafe { + let (layout, footer_offset) = + VMExternData::layout_for(mem::size_of::(), mem::align_of::()); + + let alloc_ptr = std::alloc::alloc(layout); + let alloc_ptr = NonNull::new(alloc_ptr).unwrap_or_else(|| { + std::alloc::handle_alloc_error(layout); + }); + + let value_ptr = alloc_ptr.cast::(); + ptr::write(value_ptr.as_ptr(), make_value()); + + let extern_data_ptr = + alloc_ptr.cast::().as_ptr().add(footer_offset) as *mut VMExternData; + + ptr::write( + extern_data_ptr, + VMExternData { + ref_count: AtomicUsize::new(1), + // Cast from `*mut T` to `*mut dyn Any` here. + value_ptr: SendSyncPtr::new(NonNull::new_unchecked(value_ptr.as_ptr())), + }, + ); + + log::trace!("New externref data @ {:p}", extern_data_ptr); + VMExternRef(NonNull::new_unchecked(extern_data_ptr).into()) + } + } + + /// Turn this `VMExternRef` into a raw, untyped pointer. + /// + /// Unlike `into_raw`, this does not consume and forget `self`. It is *not* + /// safe to use `from_raw` on pointers returned from this method; only use + /// `clone_from_raw`! + /// + /// Nor does this method increment the reference count. You must ensure + /// that `self` (or some other clone of `self`) stays alive until + /// `clone_from_raw` is called. + #[inline] + pub fn as_raw(&self) -> *mut u8 { + let ptr = self.0.as_ptr().cast::(); + ptr + } + + /// Consume this `VMExternRef` into a raw, untyped pointer. + /// + /// # Safety + /// + /// This method forgets self, so it is possible to create a leak of the + /// underlying reference counted data if not used carefully. + /// + /// Use `from_raw` to recreate the `VMExternRef`. + pub unsafe fn into_raw(self) -> *mut u8 { + let ptr = self.0.as_ptr().cast::(); + std::mem::forget(self); + ptr + } + + /// Recreate a `VMExternRef` from a pointer returned from a previous call to + /// `as_raw`. + /// + /// # Safety + /// + /// Unlike `clone_from_raw`, this does not increment the reference count of the + /// underlying data. It is not safe to continue to use the pointer passed to this + /// function. + #[inline] + pub unsafe fn from_raw(ptr: *mut u8) -> Option { + Some(VMExternRef(NonNull::new(ptr)?.cast().into())) + } + + /// Recreate a `VMExternRef` from a pointer returned from a previous call to + /// `as_raw`. + /// + /// # Safety + /// + /// Wildly unsafe to use with anything other than the result of a previous + /// `as_raw` call! + /// + /// Additionally, it is your responsibility to ensure that this raw + /// `VMExternRef`'s reference count has not dropped to zero. Failure to do + /// so will result in use after free! + #[inline] + pub unsafe fn clone_from_raw(ptr: *mut u8) -> Option { + let x = VMExternRef(NonNull::new(ptr)?.cast::().into()); + x.extern_data().increment_ref_count(); + Some(x) + } + + /// Get the strong reference count for this `VMExternRef`. + /// + /// Note that this loads with a `SeqCst` ordering to synchronize with other + /// threads. + pub fn strong_count(&self) -> usize { + self.extern_data().ref_count.load(Ordering::SeqCst) + } + + #[inline] + fn extern_data(&self) -> &VMExternData { + unsafe { self.0.as_ref() } + } +} + +/// Methods that would normally be trait implementations, but aren't to avoid +/// potential footguns around `VMExternRef`'s pointer-equality semantics. +/// +/// Note that none of these methods are on `&self`, they all require a +/// fully-qualified `VMExternRef::foo(my_ref)` invocation. +impl VMExternRef { + /// Check whether two `VMExternRef`s point to the same inner allocation. + /// + /// Note that this uses pointer-equality semantics, not structural-equality + /// semantics, and so only pointers are compared, and doesn't use any `Eq` + /// or `PartialEq` implementation of the pointed-to values. + #[inline] + pub fn eq(a: &Self, b: &Self) -> bool { + ptr::eq(a.0.as_ptr(), b.0.as_ptr()) + } + + /// Hash a given `VMExternRef`. + /// + /// Note that this just hashes the pointer to the inner value, it does *not* + /// use the inner value's `Hash` implementation (if any). + #[inline] + pub fn hash(externref: &Self, hasher: &mut H) + where + H: Hasher, + { + ptr::hash(externref.0.as_ptr(), hasher); + } + + /// Compare two `VMExternRef`s. + /// + /// Note that this uses pointer-equality semantics, not structural-equality + /// semantics, and so only pointers are compared, and doesn't use any `Cmp` + /// or `PartialCmp` implementation of the pointed-to values. + #[inline] + pub fn cmp(a: &Self, b: &Self) -> cmp::Ordering { + let a = a.0.as_ptr() as usize; + let b = b.0.as_ptr() as usize; + a.cmp(&b) + } +} + +impl Deref for VMExternRef { + type Target = dyn Any; + + fn deref(&self) -> &dyn Any { + unsafe { self.extern_data().value_ptr.as_ref() } + } +} + +/// A wrapper around a `VMExternRef` that implements `Eq` and `Hash` with +/// pointer semantics. +/// +/// We use this so that we can morally put `VMExternRef`s inside of `HashSet`s +/// even though they don't implement `Eq` and `Hash` to avoid foot guns. +#[derive(Clone, Debug)] +struct VMExternRefWithTraits(VMExternRef); + +impl Hash for VMExternRefWithTraits { + fn hash(&self, hasher: &mut H) + where + H: Hasher, + { + VMExternRef::hash(&self.0, hasher) + } +} + +impl PartialEq for VMExternRefWithTraits { + fn eq(&self, other: &Self) -> bool { + VMExternRef::eq(&self.0, &other.0) + } +} + +impl Eq for VMExternRefWithTraits {} + +type TableElem = UnsafeCell>; + +/// A table that over-approximizes the set of `VMExternRef`s that any Wasm +/// activation on this thread is currently using. +/// +/// Under the covers, this is a simple bump allocator that allows duplicate +/// entries. Deduplication happens at GC time. +#[repr(C)] // `alloc` must be the first member, it's accessed from JIT code. +pub struct VMExternRefActivationsTable { + /// Structures used to perform fast bump allocation of storage of externref + /// values. + /// + /// This is the only member of this structure accessed from JIT code. + alloc: VMExternRefTableAlloc, + + /// When unioned with `chunk`, this is an over-approximation of the GC roots + /// on the stack, inside Wasm frames. + /// + /// This is used by slow-path insertion, and when a GC cycle finishes, is + /// re-initialized to the just-discovered precise set of stack roots (which + /// immediately becomes an over-approximation again as soon as Wasm runs and + /// potentially drops references). + over_approximated_stack_roots: HashSet, + + /// The precise set of on-stack, inside-Wasm GC roots that we discover via + /// walking the stack and interpreting stack maps. + /// + /// This is *only* used inside the `gc` function, and is empty otherwise. It + /// is just part of this struct so that we can reuse the allocation, rather + /// than create a new hash set every GC. + precise_stack_roots: HashSet, + + /// A debug-only field for asserting that we are in a region of code where + /// GC is okay to preform. + #[cfg(debug_assertions)] + gc_okay: bool, +} + +#[repr(C)] // This is accessed from JIT code. +struct VMExternRefTableAlloc { + /// Bump-allocation finger within the `chunk`. + /// + /// NB: this is an `UnsafeCell` because it is written to by compiled Wasm + /// code. + next: UnsafeCell>, + + /// Pointer to just after the `chunk`. + /// + /// This is *not* within the current chunk and therefore is not a valid + /// place to insert a reference! + end: NonNull, + + /// Bump allocation chunk that stores fast-path insertions. + /// + /// This is not accessed from JIT code. + chunk: Box<[TableElem]>, +} + +// This gets around the usage of `UnsafeCell` throughout the internals of this +// allocator, but the storage should all be Send/Sync and synchronization isn't +// necessary since operations require `&mut self`. +unsafe impl Send for VMExternRefTableAlloc {} +unsafe impl Sync for VMExternRefTableAlloc {} + +fn _assert_send_sync() { + fn _assert() {} + _assert::(); + _assert::(); +} + +impl VMExternRefActivationsTable { + const CHUNK_SIZE: usize = 4096 / mem::size_of::(); + + /// Create a new `VMExternRefActivationsTable`. + pub fn new() -> Self { + // Start with an empty chunk in case this activations table isn't used. + // This means that there's no space in the bump-allocation area which + // will force any path trying to use this to the slow gc path. The first + // time this happens, though, the slow gc path will allocate a new chunk + // for actual fast-bumping. + let mut chunk: Box<[TableElem]> = Box::new([]); + let next = chunk.as_mut_ptr(); + let end = unsafe { next.add(chunk.len()) }; + + VMExternRefActivationsTable { + alloc: VMExternRefTableAlloc { + next: UnsafeCell::new(NonNull::new(next).unwrap()), + end: NonNull::new(end).unwrap(), + chunk, + }, + over_approximated_stack_roots: HashSet::new(), + precise_stack_roots: HashSet::new(), + #[cfg(debug_assertions)] + gc_okay: true, + } + } + + fn new_chunk(size: usize) -> Box<[UnsafeCell>]> { + assert!(size >= Self::CHUNK_SIZE); + (0..size).map(|_| UnsafeCell::new(None)).collect() + } + + /// Get the available capacity in the bump allocation chunk. + #[inline] + pub fn bump_capacity_remaining(&self) -> usize { + let end = self.alloc.end.as_ptr() as usize; + let next = unsafe { *self.alloc.next.get() }; + end - next.as_ptr() as usize + } + + /// Try and insert a `VMExternRef` into this table. + /// + /// This is a fast path that only succeeds when the bump chunk has the + /// capacity for the requested insertion. + /// + /// If the insertion fails, then the `VMExternRef` is given back. Callers + /// may attempt a GC to free up space and try again, or may call + /// `insert_slow_path` to infallibly insert the reference (potentially + /// allocating additional space in the table to hold it). + #[inline] + pub fn try_insert(&mut self, externref: VMExternRef) -> Result<(), VMExternRef> { + unsafe { + let next = *self.alloc.next.get(); + if next == self.alloc.end { + return Err(externref); + } + + debug_assert!( + (*next.as_ref().get()).is_none(), + "slots >= the `next` bump finger are always `None`" + ); + ptr::write(next.as_ptr(), UnsafeCell::new(Some(externref))); + + let next = NonNull::new_unchecked(next.as_ptr().add(1)); + debug_assert!(next <= self.alloc.end); + *self.alloc.next.get() = next; + + Ok(()) + } + } + + /// Insert a reference into the table, falling back on a GC to clear up + /// space if the table is already full. + /// + /// # Unsafety + /// + /// The same as `gc`. + #[inline] + pub unsafe fn insert_with_gc( + &mut self, + limits: *const VMRuntimeLimits, + externref: VMExternRef, + module_info_lookup: &dyn ModuleInfoLookup, + ) { + #[cfg(debug_assertions)] + assert!(self.gc_okay); + + if let Err(externref) = self.try_insert(externref) { + self.gc_and_insert_slow(limits, externref, module_info_lookup); + } + } + + #[inline(never)] + unsafe fn gc_and_insert_slow( + &mut self, + limits: *const VMRuntimeLimits, + externref: VMExternRef, + module_info_lookup: &dyn ModuleInfoLookup, + ) { + gc(limits, module_info_lookup, self); + + // Might as well insert right into the hash set, rather than the bump + // chunk, since we are already on a slow path and we get de-duplication + // this way. + self.over_approximated_stack_roots + .insert(VMExternRefWithTraits(externref)); + } + + /// Insert a reference into the table, without ever performing GC. + #[inline] + pub fn insert_without_gc(&mut self, externref: VMExternRef) { + if let Err(externref) = self.try_insert(externref) { + self.insert_slow_without_gc(externref); + } + } + + #[inline(never)] + fn insert_slow_without_gc(&mut self, externref: VMExternRef) { + self.over_approximated_stack_roots + .insert(VMExternRefWithTraits(externref)); + } + + fn num_filled_in_bump_chunk(&self) -> usize { + let next = unsafe { *self.alloc.next.get() }; + let bytes_unused = (self.alloc.end.as_ptr() as usize) - (next.as_ptr() as usize); + let slots_unused = bytes_unused / mem::size_of::(); + self.alloc.chunk.len().saturating_sub(slots_unused) + } + + fn elements(&self, mut f: impl FnMut(&VMExternRef)) { + for elem in self.over_approximated_stack_roots.iter() { + f(&elem.0); + } + + // The bump chunk is not all the way full, so we only iterate over its + // filled-in slots. + let num_filled = self.num_filled_in_bump_chunk(); + for slot in self.alloc.chunk.iter().take(num_filled) { + if let Some(elem) = unsafe { &*slot.get() } { + f(elem); + } + } + } + + fn insert_precise_stack_root( + precise_stack_roots: &mut HashSet, + root: NonNull, + ) { + let root = unsafe { VMExternRef::clone_from_raw(root.as_ptr().cast()).unwrap() }; + log::trace!("Found externref on stack: {:p}", root); + precise_stack_roots.insert(VMExternRefWithTraits(root)); + } + + /// Sweep the bump allocation table after we've discovered our precise stack + /// roots. + fn sweep(&mut self) { + log::trace!("begin GC sweep"); + + // Sweep our bump chunk. + let num_filled = self.num_filled_in_bump_chunk(); + unsafe { + *self.alloc.next.get() = self.alloc.end; + } + for slot in self.alloc.chunk.iter().take(num_filled) { + unsafe { + *slot.get() = None; + } + } + debug_assert!( + self.alloc + .chunk + .iter() + .all(|slot| unsafe { (*slot.get()).as_ref().is_none() }), + "after sweeping the bump chunk, all slots should be `None`" + ); + + // If this is the first instance of gc then the initial chunk is empty, + // so we lazily allocate space for fast bump-allocation in the future. + if self.alloc.chunk.is_empty() { + self.alloc.chunk = Self::new_chunk(Self::CHUNK_SIZE); + self.alloc.end = + NonNull::new(unsafe { self.alloc.chunk.as_mut_ptr().add(self.alloc.chunk.len()) }) + .unwrap(); + } + + // Reset our `next` finger to the start of the bump allocation chunk. + unsafe { + let next = self.alloc.chunk.as_mut_ptr(); + debug_assert!(!next.is_null()); + *self.alloc.next.get() = NonNull::new_unchecked(next); + } + + // The current `precise_stack_roots` becomes our new over-appoximated + // set for the next GC cycle. + mem::swap( + &mut self.precise_stack_roots, + &mut self.over_approximated_stack_roots, + ); + + // And finally, the new `precise_stack_roots` should be cleared and + // remain empty until the next GC cycle. + // + // Note that this may run arbitrary code as we run externref + // destructors. Because of our `&mut` borrow above on this table, + // though, we're guaranteed that nothing will touch this table. + self.precise_stack_roots.clear(); + + log::trace!("end GC sweep"); + } + + /// Set whether it is okay to GC or not right now. + /// + /// This is provided as a helper for enabling various debug-only assertions + /// and checking places where the `wasmtime-runtime` user expects there not + /// to be any GCs. + #[inline] + pub fn set_gc_okay(&mut self, okay: bool) -> bool { + #[cfg(debug_assertions)] + { + return std::mem::replace(&mut self.gc_okay, okay); + } + #[cfg(not(debug_assertions))] + { + let _ = okay; + return true; + } + } +} + +#[derive(Debug, Default)] +struct DebugOnly { + inner: T, +} + +impl std::ops::Deref for DebugOnly { + type Target = T; + + fn deref(&self) -> &T { + if cfg!(debug_assertions) { + &self.inner + } else { + panic!( + "only deref `DebugOnly` when `cfg(debug_assertions)` or \ + inside a `debug_assert!(..)`" + ) + } + } +} + +impl std::ops::DerefMut for DebugOnly { + fn deref_mut(&mut self) -> &mut T { + if cfg!(debug_assertions) { + &mut self.inner + } else { + panic!( + "only deref `DebugOnly` when `cfg(debug_assertions)` or \ + inside a `debug_assert!(..)`" + ) + } + } +} + +/// Perform garbage collection of `VMExternRef`s. +/// +/// # Unsafety +/// +/// You must have called `VMExternRefActivationsTable::set_stack_canary` for at +/// least the oldest host-->Wasm stack frame transition on this thread's stack +/// (it is idempotent to call it more than once) and keep its return value alive +/// across the duration of that host-->Wasm call. +/// +/// Additionally, you must have registered the stack maps for every Wasm module +/// that has frames on the stack with the given `stack_maps_registry`. +pub unsafe fn gc( + limits: *const VMRuntimeLimits, + module_info_lookup: &dyn ModuleInfoLookup, + externref_activations_table: &mut VMExternRefActivationsTable, +) { + log::debug!("start GC"); + + #[cfg(debug_assertions)] + assert!(externref_activations_table.gc_okay); + + debug_assert!({ + // This set is only non-empty within this function. It is built up when + // walking the stack and interpreting stack maps, and then drained back + // into the activations table's bump-allocated space at the + // end. Therefore, it should always be empty upon entering this + // function. + externref_activations_table.precise_stack_roots.is_empty() + }); + + // This function proceeds by: + // + // * walking the stack, + // + // * finding the precise set of roots inside Wasm frames via our stack maps, + // and + // + // * resetting our bump-allocated table's over-approximation to the + // newly-discovered precise set. + + // The `activations_table_set` is used for `debug_assert!`s checking that + // every reference we read out from the stack via stack maps is actually in + // the table. If that weren't true, than either we forgot to insert a + // reference in the table when passing it into Wasm (a bug) or we are + // reading invalid references from the stack (another bug). + let mut activations_table_set: DebugOnly> = Default::default(); + if cfg!(debug_assertions) { + externref_activations_table.elements(|elem| { + activations_table_set.insert(elem.as_raw() as *mut VMExternData); + }); + } + + log::trace!("begin GC trace"); + Backtrace::trace(limits, |frame| { + let pc = frame.pc(); + debug_assert!(pc != 0, "we should always get a valid PC for Wasm frames"); + + let fp = frame.fp(); + debug_assert!( + fp != 0, + "we should always get a valid frame pointer for Wasm frames" + ); + + let module_info = module_info_lookup + .lookup(pc) + .expect("should have module info for Wasm frame"); + + let stack_map = match module_info.lookup_stack_map(pc) { + Some(sm) => sm, + None => { + log::trace!("No stack map for this Wasm frame"); + return std::ops::ControlFlow::Continue(()); + } + }; + log::trace!( + "We have a stack map that maps {} words in this Wasm frame", + stack_map.mapped_words() + ); + + let sp = fp - stack_map.mapped_words() as usize * mem::size_of::(); + + for i in 0..(stack_map.mapped_words() as usize) { + // Stack maps have one bit per word in the frame, and the + // zero^th bit is the *lowest* addressed word in the frame, + // i.e. the closest to the SP. So to get the `i`^th word in + // this frame, we add `i * sizeof(word)` to the SP. + let stack_slot = sp + i * mem::size_of::(); + + if !stack_map.get_bit(i) { + log::trace!( + "Stack slot @ {:p} does not contain externrefs", + stack_slot as *const (), + ); + continue; + } + + let stack_slot = stack_slot as *const *mut VMExternData; + let r = std::ptr::read(stack_slot); + log::trace!("Stack slot @ {:p} = {:p}", stack_slot, r); + + debug_assert!( + r.is_null() || activations_table_set.contains(&r), + "every on-stack externref inside a Wasm frame should \ + have an entry in the VMExternRefActivationsTable; \ + {:?} is not in the table", + r + ); + + if let Some(r) = NonNull::new(r) { + VMExternRefActivationsTable::insert_precise_stack_root( + &mut externref_activations_table.precise_stack_roots, + r, + ); + } + } + + std::ops::ControlFlow::Continue(()) + }); + log::trace!("end GC trace"); + + externref_activations_table.sweep(); + + log::debug!("end GC"); +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn extern_ref_is_pointer_sized_and_aligned() { + assert_eq!(mem::size_of::(), mem::size_of::<*mut ()>()); + assert_eq!(mem::align_of::(), mem::align_of::<*mut ()>()); + assert_eq!( + mem::size_of::>(), + mem::size_of::<*mut ()>() + ); + assert_eq!( + mem::align_of::>(), + mem::align_of::<*mut ()>() + ); + } + + #[test] + fn ref_count_is_at_correct_offset() { + let s = "hi"; + let s: &(dyn Any + Send + Sync) = &s as _; + let s: *const (dyn Any + Send + Sync) = s as _; + let s: *mut (dyn Any + Send + Sync) = s as _; + + let extern_data = VMExternData { + ref_count: AtomicUsize::new(0), + value_ptr: NonNull::new(s).unwrap().into(), + }; + + let extern_data_ptr = &extern_data as *const _; + let ref_count_ptr = &extern_data.ref_count as *const _; + + let actual_offset = (ref_count_ptr as usize) - (extern_data_ptr as usize); + + let offsets = wasmtime_environ::VMOffsets::from(wasmtime_environ::VMOffsetsFields { + ptr: 8, + num_imported_functions: 0, + num_imported_tables: 0, + num_imported_memories: 0, + num_imported_globals: 0, + num_defined_tables: 0, + num_defined_memories: 0, + num_owned_memories: 0, + num_defined_globals: 0, + num_escaped_funcs: 0, + }); + assert_eq!( + offsets.vm_extern_data_ref_count(), + actual_offset.try_into().unwrap(), + ); + } + + #[test] + fn table_next_is_at_correct_offset() { + let table = VMExternRefActivationsTable::new(); + + let table_ptr = &table as *const _; + let next_ptr = &table.alloc.next as *const _; + + let actual_offset = (next_ptr as usize) - (table_ptr as usize); + + let offsets = wasmtime_environ::VMOffsets::from(wasmtime_environ::VMOffsetsFields { + ptr: 8, + num_imported_functions: 0, + num_imported_tables: 0, + num_imported_memories: 0, + num_imported_globals: 0, + num_defined_tables: 0, + num_defined_memories: 0, + num_owned_memories: 0, + num_defined_globals: 0, + num_escaped_funcs: 0, + }); + assert_eq!( + offsets.vm_extern_ref_activation_table_next() as usize, + actual_offset + ); + } + + #[test] + fn table_end_is_at_correct_offset() { + let table = VMExternRefActivationsTable::new(); + + let table_ptr = &table as *const _; + let end_ptr = &table.alloc.end as *const _; + + let actual_offset = (end_ptr as usize) - (table_ptr as usize); + + let offsets = wasmtime_environ::VMOffsets::from(wasmtime_environ::VMOffsetsFields { + ptr: 8, + num_imported_functions: 0, + num_imported_tables: 0, + num_imported_memories: 0, + num_imported_globals: 0, + num_defined_tables: 0, + num_defined_memories: 0, + num_owned_memories: 0, + num_defined_globals: 0, + num_escaped_funcs: 0, + }); + assert_eq!( + offsets.vm_extern_ref_activation_table_end() as usize, + actual_offset + ); + } +} diff --git a/crates/runtime/src/externref/no_gc.rs b/crates/runtime/src/externref/no_gc.rs new file mode 100644 index 000000000000..970b1cd551a7 --- /dev/null +++ b/crates/runtime/src/externref/no_gc.rs @@ -0,0 +1,114 @@ +//! The dummy `VMExternRef` for when the `gc` cargo feature is disabled. +//! +//! To reduce `#[cfg(...)]`s, this provides all the same methods as the real +//! `VMExternRef` except for constructors. + +#![allow(missing_docs)] + +use crate::{ModuleInfoLookup, VMRuntimeLimits}; +use std::any::Any; +use std::cmp; +use std::hash::Hasher; +use std::ops::Deref; + +#[derive(Clone)] +enum Uninhabited {} + +#[derive(Clone)] +pub struct VMExternRef(Uninhabited); + +impl std::fmt::Pointer for VMExternRef { + fn fmt(&self, _f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self.0 {} + } +} + +impl Drop for VMExternRef { + fn drop(&mut self) { + match self.0 {} + } +} + +impl VMExternRef { + pub fn as_raw(&self) -> *mut u8 { + match self.0 {} + } + + pub unsafe fn into_raw(self) -> *mut u8 { + match self.0 {} + } + + pub unsafe fn from_raw(ptr: *mut u8) -> Option { + assert!(ptr.is_null()); + None + } + + pub unsafe fn clone_from_raw(ptr: *mut u8) -> Option { + assert!(ptr.is_null()); + None + } + + pub fn strong_count(&self) -> usize { + match self.0 {} + } + + pub fn eq(a: &Self, _b: &Self) -> bool { + match a.0 {} + } + + pub fn hash(externref: &Self, _hasher: &mut H) + where + H: Hasher, + { + match externref.0 {} + } + + pub fn cmp(a: &Self, _b: &Self) -> cmp::Ordering { + match a.0 {} + } +} + +impl Deref for VMExternRef { + type Target = dyn Any; + + fn deref(&self) -> &dyn Any { + match self.0 {} + } +} + +pub struct VMExternRefActivationsTable(Uninhabited); + +impl VMExternRefActivationsTable { + pub fn bump_capacity_remaining(&self) -> usize { + match self.0 {} + } + + pub fn try_insert(&mut self, _externref: VMExternRef) -> Result<(), VMExternRef> { + match self.0 {} + } + + pub unsafe fn insert_with_gc( + &mut self, + _limits: *const VMRuntimeLimits, + _externref: VMExternRef, + _module_info_lookup: &dyn ModuleInfoLookup, + ) { + match self.0 {} + } + + pub fn insert_without_gc(&mut self, _externref: VMExternRef) { + match self.0 {} + } + + pub fn set_gc_okay(&mut self, _okay: bool) -> bool { + match self.0 {} + } +} + +pub unsafe fn gc( + _limits: *const VMRuntimeLimits, + _module_info_lookup: &dyn ModuleInfoLookup, + externref_activations_table: &mut VMExternRefActivationsTable, +) { + match externref_activations_table.0 {} +} diff --git a/crates/runtime/src/instance.rs b/crates/runtime/src/instance.rs index 1fe54bd717b5..be10cf10ed04 100644 --- a/crates/runtime/src/instance.rs +++ b/crates/runtime/src/instance.rs @@ -3,7 +3,6 @@ //! `InstanceHandle` is a reference-counting handle for an `Instance`. use crate::export::Export; -#[cfg(feature = "gc")] use crate::externref::VMExternRefActivationsTable; use crate::memory::{Memory, RuntimeMemoryCreator}; use crate::table::{Table, TableElement, TableElementType}; @@ -439,7 +438,6 @@ impl Instance { } /// Return a pointer to the `VMExternRefActivationsTable`. - #[cfg(feature = "gc")] pub fn externref_activations_table(&mut self) -> *mut *mut VMExternRefActivationsTable { unsafe { self.vmctx_plus_offset_mut(self.offsets().vmctx_externref_activations_table()) } } @@ -472,6 +470,10 @@ impl Instance { { *self.externref_activations_table() = (*store).externref_activations_table().0; } + #[cfg(not(feature = "gc"))] + { + *self.externref_activations_table() = ptr::null_mut(); + } } else { assert_eq!( mem::size_of::<*mut dyn Store>(), @@ -483,10 +485,7 @@ impl Instance { *self.runtime_limits() = ptr::null_mut(); *self.epoch_ptr() = ptr::null_mut(); - #[cfg(feature = "gc")] - { - *self.externref_activations_table() = ptr::null_mut(); - } + *self.externref_activations_table() = ptr::null_mut(); } } @@ -852,7 +851,6 @@ impl Instance { )?; } - #[cfg(feature = "gc")] TableElementType::Extern => { debug_assert!(elements.iter().all(|e| *e == FuncIndex::reserved_value())); table.fill(dst, TableElement::ExternRef(None), len)?; @@ -1246,18 +1244,11 @@ impl Instance { // count as values move between globals, everything else is just // copy-able bits. match wasm_ty { - #[cfg(feature = "gc")] WasmValType::Ref(WasmRefType { heap_type: WasmHeapType::Extern, .. }) => *(*to).as_externref_mut() = from.as_externref().clone(), - #[cfg(not(feature = "gc"))] - WasmValType::Ref(WasmRefType { - heap_type: WasmHeapType::Extern, - .. - }) => unreachable!(), - _ => ptr::copy_nonoverlapping(from, to, 1), } } @@ -1302,7 +1293,6 @@ impl Drop for Instance { }; match global.wasm_ty { // For now only externref globals need to get destroyed - #[cfg(feature = "gc")] WasmValType::Ref(WasmRefType { heap_type: WasmHeapType::Extern, .. @@ -1310,14 +1300,6 @@ impl Drop for Instance { drop((*self.global_ptr(idx)).as_externref_mut().take()); }, - #[cfg(not(feature = "gc"))] - WasmValType::Ref(WasmRefType { - heap_type: WasmHeapType::Extern, - .. - }) => unreachable!( - "global {idx:?} is an externref but the `gc` cargo feature is disabled" - ), - _ => continue, } } diff --git a/crates/runtime/src/lib.rs b/crates/runtime/src/lib.rs index e28dd2632a94..cae83b5a8afd 100644 --- a/crates/runtime/src/lib.rs +++ b/crates/runtime/src/lib.rs @@ -14,7 +14,6 @@ mod arch; #[cfg(feature = "component-model")] pub mod component; mod export; -#[cfg(feature = "gc")] mod externref; mod imports; mod instance; @@ -39,7 +38,6 @@ pub use wasmtime_jit_debug::gdb_jit_int::GdbJitImageRegistration; pub use crate::arch::{get_stack_pointer, V128Abi}; pub use crate::export::*; -#[cfg(feature = "gc")] pub use crate::externref::*; pub use crate::imports::Imports; pub use crate::instance::{ diff --git a/crates/runtime/src/libcalls.rs b/crates/runtime/src/libcalls.rs index e397a1e3f43b..1e2d203a6b85 100644 --- a/crates/runtime/src/libcalls.rs +++ b/crates/runtime/src/libcalls.rs @@ -54,9 +54,7 @@ //! } //! ``` -#[cfg(feature = "gc")] use crate::externref::VMExternRef; - use crate::table::{Table, TableElementType}; use crate::vmcontext::VMFuncRef; use crate::{Instance, TrapReason}; @@ -230,16 +228,7 @@ unsafe fn table_grow( let element = match instance.table_element_type(table_index) { TableElementType::Func => (init_value as *mut VMFuncRef).into(), - - #[cfg(feature = "gc")] - TableElementType::Extern => { - let init_value = if init_value.is_null() { - None - } else { - Some(VMExternRef::clone_from_raw(init_value)) - }; - init_value.into() - } + TableElementType::Extern => VMExternRef::clone_from_raw(init_value).into(), }; Ok(match instance.table_grow(table_index, delta, element)? { @@ -271,13 +260,8 @@ unsafe fn table_fill( table.fill(dst, val.into(), len) } - #[cfg(feature = "gc")] TableElementType::Extern => { - let val = if val.is_null() { - None - } else { - Some(VMExternRef::clone_from_raw(val)) - }; + let val = VMExternRef::clone_from_raw(val); table.fill(dst, val.into(), len) } } @@ -407,7 +391,7 @@ unsafe fn drop_externref(_instance: &mut Instance, externref: *mut u8) { // `VMExternRefActivationsTable`. #[cfg(feature = "gc")] unsafe fn activations_table_insert_with_gc(instance: &mut Instance, externref: *mut u8) { - let externref = VMExternRef::clone_from_raw(externref); + let externref = VMExternRef::clone_from_raw(externref).unwrap(); let limits = *instance.runtime_limits(); let (activations_table, module_info_lookup) = (*instance.store()).externref_activations_table(); @@ -444,11 +428,7 @@ unsafe fn externref_global_get(instance: &mut Instance, index: u32) -> *mut u8 { // Perform a Wasm `global.set` for `externref` globals. #[cfg(feature = "gc")] unsafe fn externref_global_set(instance: &mut Instance, index: u32, externref: *mut u8) { - let externref = if externref.is_null() { - None - } else { - Some(VMExternRef::clone_from_raw(externref)) - }; + let externref = VMExternRef::clone_from_raw(externref); let index = wasmtime_environ::GlobalIndex::from_u32(index); let global = instance.defined_or_imported_global_ptr(index); diff --git a/crates/runtime/src/table.rs b/crates/runtime/src/table.rs index 94224b529ce1..cb08b61303b9 100644 --- a/crates/runtime/src/table.rs +++ b/crates/runtime/src/table.rs @@ -2,7 +2,6 @@ //! //! `Table` is to WebAssembly tables what `LinearMemory` is to WebAssembly linear memories. -#[cfg(feature = "gc")] use crate::externref::VMExternRef; use crate::vmcontext::{VMFuncRef, VMTableDefinition}; use crate::{SendSyncPtr, Store}; @@ -23,7 +22,6 @@ pub enum TableElement { FuncRef(*mut VMFuncRef), /// An `exrernref`. - #[cfg(feature = "gc")] ExternRef(Option), /// An uninitialized funcref value. This should never be exposed @@ -36,16 +34,12 @@ pub enum TableElement { #[derive(Copy, Clone, PartialEq, Eq, Debug)] pub enum TableElementType { Func, - - #[cfg(feature = "gc")] Extern, } // The usage of `*mut VMFuncRef` is safe w.r.t. thread safety, this // just relies on thread-safety of `VMExternRef` itself. -#[cfg(feature = "gc")] unsafe impl Send for TableElement where VMExternRef: Send {} -#[cfg(feature = "gc")] unsafe impl Sync for TableElement where VMExternRef: Sync {} impl TableElement { @@ -64,11 +58,9 @@ impl TableElement { let masked = Strict::map_addr(ptr, |a| a & FUNCREF_MASK); Self::FuncRef(masked.cast()) } - #[cfg(feature = "gc")] (TableElementType::Extern, None) => Self::ExternRef(None), - #[cfg(feature = "gc")] (TableElementType::Extern, Some(ptr)) => { - Self::ExternRef(Some(VMExternRef::from_raw(ptr.as_ptr()))) + Self::ExternRef(VMExternRef::from_raw(ptr.as_ptr())) } } } @@ -83,9 +75,8 @@ impl TableElement { // Functions have no ownership, so defer to the prior method. TableElementType::Func => TableElement::from_table_value(ty, ptr), - #[cfg(feature = "gc")] TableElementType::Extern => { - Self::ExternRef(ptr.map(|p| VMExternRef::clone_from_raw(p.as_ptr()))) + Self::ExternRef(ptr.and_then(|p| VMExternRef::clone_from_raw(p.as_ptr()))) } } } @@ -106,7 +97,6 @@ impl TableElement { let tagged = Strict::map_addr(e, |e| e | FUNCREF_INIT_BIT); Some(NonNull::new(tagged.cast()).unwrap().into()) } - #[cfg(feature = "gc")] Self::ExternRef(e) => e.map(|e| NonNull::new(e.into_raw()).unwrap().into()), } } @@ -126,8 +116,6 @@ impl TableElement { match self { Self::FuncRef(e) => e.cast(), Self::UninitFunc => panic!("Uninitialized table element value outside of table slot"), - - #[cfg(feature = "gc")] Self::ExternRef(e) => e.map_or(ptr::null_mut(), |e| e.into_raw()), } } @@ -148,14 +136,12 @@ impl From<*mut VMFuncRef> for TableElement { } } -#[cfg(feature = "gc")] impl From> for TableElement { fn from(x: Option) -> TableElement { TableElement::ExternRef(x) } } -#[cfg(feature = "gc")] impl From for TableElement { fn from(x: VMExternRef) -> TableElement { TableElement::ExternRef(Some(x)) @@ -195,12 +181,7 @@ fn wasm_to_table_type(ty: WasmRefType) -> TableElementType { WasmHeapType::Func | WasmHeapType::Concrete(_) | WasmHeapType::NoFunc => { TableElementType::Func } - - #[cfg(feature = "gc")] WasmHeapType::Extern => TableElementType::Extern, - - #[cfg(not(feature = "gc"))] - WasmHeapType::Extern => unreachable!(), } } @@ -512,10 +493,7 @@ impl Table { fn type_matches(&self, val: &TableElement) -> bool { match (&val, self.element_type()) { (TableElement::FuncRef(_), TableElementType::Func) => true, - - #[cfg(feature = "gc")] (TableElement::ExternRef(_), TableElementType::Extern) => true, - _ => false, } } @@ -561,8 +539,6 @@ impl Table { dst_table.elements_mut()[dst_range] .copy_from_slice(&src_table.elements()[src_range]); } - - #[cfg(feature = "gc")] TableElementType::Extern => { // We need to clone each `externref` let dst = dst_table.elements_mut(); @@ -583,8 +559,6 @@ impl Table { // `funcref` are `Copy`, so just do a memmove dst.copy_within(src_range, dst_range.start); } - - #[cfg(feature = "gc")] TableElementType::Extern => { // We need to clone each `externref` while handling overlapping // ranges diff --git a/crates/runtime/src/vmcontext.rs b/crates/runtime/src/vmcontext.rs index 3eb1244b519d..5b03966c11f0 100644 --- a/crates/runtime/src/vmcontext.rs +++ b/crates/runtime/src/vmcontext.rs @@ -3,9 +3,7 @@ mod vm_host_func_context; -#[cfg(feature = "gc")] use crate::externref::VMExternRef; - use sptr::Strict; use std::cell::UnsafeCell; use std::ffi::c_void; @@ -536,19 +534,21 @@ impl VMGlobalDefinition { } /// Return a reference to the value as an externref. - #[cfg(feature = "gc")] pub unsafe fn as_externref(&self) -> &Option { - &*(self.storage.as_ref().as_ptr().cast::>()) + let ret = &*(self.storage.as_ref().as_ptr().cast::>()); + assert!(cfg!(feature = "gc") || ret.is_none()); + ret } /// Return a mutable reference to the value as an externref. - #[cfg(feature = "gc")] pub unsafe fn as_externref_mut(&mut self) -> &mut Option { - &mut *(self + let ret = &mut *(self .storage .as_mut() .as_mut_ptr() - .cast::>()) + .cast::>()); + assert!(cfg!(feature = "gc") || ret.is_none()); + ret } /// Return a reference to the value as a `VMFuncRef`. @@ -1035,7 +1035,6 @@ pub union ValRaw { /// carefully calling the correct functions throughout the runtime. /// /// This value is always stored in a little-endian format. - #[cfg(feature = "gc")] externref: *mut c_void, } @@ -1108,16 +1107,9 @@ impl ValRaw { /// Creates a WebAssembly `externref` value #[inline] pub fn externref(i: *mut c_void) -> ValRaw { - #[cfg(feature = "gc")] - { - return ValRaw { - externref: Strict::map_addr(i, |i| i.to_le()), - }; - } - #[cfg(not(feature = "gc"))] - { - assert!(i.is_null()); - return ValRaw::funcref(i); + assert!(cfg!(feature = "gc") || i.is_null()); + ValRaw { + externref: Strict::map_addr(i, |i| i.to_le()), } } @@ -1172,15 +1164,9 @@ impl ValRaw { /// Gets the WebAssembly `externref` value #[inline] pub fn get_externref(&self) -> *mut c_void { - #[cfg(feature = "gc")] - unsafe { - return Strict::map_addr(self.externref, |i| usize::from_le(i)); - } - #[cfg(not(feature = "gc"))] - { - assert!(self.get_funcref().is_null()); - return std::ptr::null_mut(); - } + let ptr = unsafe { Strict::map_addr(self.externref, |i| usize::from_le(i)) }; + assert!(cfg!(feature = "gc") || ptr.is_null()); + ptr } } diff --git a/crates/wasmtime/src/runtime/externals/global.rs b/crates/wasmtime/src/runtime/externals/global.rs index 7c754c70f309..d191e3f071b7 100644 --- a/crates/wasmtime/src/runtime/externals/global.rs +++ b/crates/wasmtime/src/runtime/externals/global.rs @@ -116,19 +116,12 @@ impl Global { HeapType::NoFunc => Ref::Func(None), - #[cfg(feature = "gc")] HeapType::Extern => Ref::Extern( definition .as_externref() .clone() - .map(|inner| ExternRef { inner }), + .map(|inner| ExternRef::from_vm_extern_ref(inner)), ), - - #[cfg(not(feature = "gc"))] - HeapType::Extern => { - assert!(definition.as_func_ref().is_null()); - Ref::Extern(None) - } }; debug_assert!( ref_ty.is_nullable() || !reference.is_null(), @@ -171,19 +164,15 @@ impl Global { *definition.as_func_ref_mut() = f.map_or(ptr::null_mut(), |f| f.vm_func_ref(store).as_ptr().cast()); } - #[cfg(feature = "gc")] Val::ExternRef(e) => { // Take care to invoke the `Drop` implementation of the // existing `ExternRef` so that it doesn't leak. - let old = mem::replace(definition.as_externref_mut(), e.map(|e| e.inner)); + let old = mem::replace( + definition.as_externref_mut(), + e.map(|e| e.into_vm_extern_ref()), + ); drop(old); } - #[cfg(not(feature = "gc"))] - Val::ExternRef(None) => { - assert!(definition.as_func_ref().is_null()); - } - #[cfg(not(feature = "gc"))] - Val::ExternRef(Some(e)) => match e._inner {}, } } Ok(()) diff --git a/crates/wasmtime/src/runtime/externals/table.rs b/crates/wasmtime/src/runtime/externals/table.rs index 1f8d8addc701..247e39e98413 100644 --- a/crates/wasmtime/src/runtime/externals/table.rs +++ b/crates/wasmtime/src/runtime/externals/table.rs @@ -159,12 +159,11 @@ impl Table { unreachable!("lazy init above should have converted UninitFunc") } - #[cfg(feature = "gc")] runtime::TableElement::ExternRef(None) => Some(Ref::Extern(None)), - #[cfg(feature = "gc")] + #[cfg_attr(not(feature = "gc"), allow(unreachable_code, unused_variables))] runtime::TableElement::ExternRef(Some(x)) => { - let x = ExternRef { inner: x }; + let x = ExternRef::from_vm_extern_ref(x); Some(x.into()) } } diff --git a/crates/wasmtime/src/runtime/func/typed.rs b/crates/wasmtime/src/runtime/func/typed.rs index 0da88efe79af..4cf926061292 100644 --- a/crates/wasmtime/src/runtime/func/typed.rs +++ b/crates/wasmtime/src/runtime/func/typed.rs @@ -464,7 +464,8 @@ unsafe impl WasmTy for ExternRef { #[inline] fn into_abi(self, store: &mut StoreOpaque) -> Self::Abi { - let abi = self.inner.as_raw(); + let inner = self.into_vm_extern_ref(); + let abi = inner.as_raw(); unsafe { // NB: We _must not_ trigger a GC when passing refs from host // code into Wasm (e.g. returned from a host function or passed @@ -495,7 +496,7 @@ unsafe impl WasmTy for ExternRef { // In conclusion, to prevent uses after free, we cannot GC // during this insertion. let mut store = AutoAssertNoGc::new(store); - store.insert_vmexternref_without_gc(self.inner); + store.insert_vmexternref_without_gc(inner); debug_assert!(!abi.is_null()); NonNull::new_unchecked(abi) @@ -504,9 +505,8 @@ unsafe impl WasmTy for ExternRef { #[inline] unsafe fn from_abi(abi: Self::Abi, _store: &mut StoreOpaque) -> Self { - ExternRef { - inner: wasmtime_runtime::VMExternRef::clone_from_raw(abi.as_ptr()), - } + let inner = wasmtime_runtime::VMExternRef::clone_from_raw(abi.as_ptr()).unwrap(); + ExternRef::from_vm_extern_ref(inner) } } @@ -555,13 +555,8 @@ unsafe impl WasmTy for Option { #[inline] unsafe fn from_abi(abi: Self::Abi, _store: &mut StoreOpaque) -> Self { - if abi.is_null() { - None - } else { - Some(ExternRef { - inner: wasmtime_runtime::VMExternRef::clone_from_raw(abi), - }) - } + let inner = wasmtime_runtime::VMExternRef::clone_from_raw(abi)?; + Some(ExternRef::from_vm_extern_ref(inner)) } } diff --git a/crates/wasmtime/src/runtime/module.rs b/crates/wasmtime/src/runtime/module.rs index 3eb53f6bac22..ca59e7070096 100644 --- a/crates/wasmtime/src/runtime/module.rs +++ b/crates/wasmtime/src/runtime/module.rs @@ -963,7 +963,6 @@ impl Module { self.inner.clone() } - #[cfg(feature = "gc")] pub(crate) fn module_info(&self) -> &dyn wasmtime_runtime::ModuleInfo { &*self.inner } @@ -1216,7 +1215,6 @@ impl wasmtime_runtime::ModuleRuntimeInfo for ModuleInner { } } -#[cfg(feature = "gc")] impl wasmtime_runtime::ModuleInfo for ModuleInner { fn lookup_stack_map(&self, pc: usize) -> Option<&wasmtime_environ::StackMap> { let text_offset = pc - self.module.text().as_ptr() as usize; diff --git a/crates/wasmtime/src/runtime/module/registry.rs b/crates/wasmtime/src/runtime/module/registry.rs index 21049bdacf48..9c8b50528b01 100644 --- a/crates/wasmtime/src/runtime/module/registry.rs +++ b/crates/wasmtime/src/runtime/module/registry.rs @@ -67,7 +67,6 @@ impl ModuleRegistry { } /// Fetches information about a registered module given a program counter value. - #[cfg(feature = "gc")] pub fn lookup_module_info(&self, pc: usize) -> Option<&dyn wasmtime_runtime::ModuleInfo> { let (module, _) = self.module_and_offset(pc)?; Some(module.module_info()) diff --git a/crates/wasmtime/src/runtime/ref/gc_ref.rs b/crates/wasmtime/src/runtime/ref/gc_ref.rs index 5e0736ef956f..50cad8b670dc 100644 --- a/crates/wasmtime/src/runtime/ref/gc_ref.rs +++ b/crates/wasmtime/src/runtime/ref/gc_ref.rs @@ -7,7 +7,7 @@ use wasmtime_runtime::VMExternRef; #[derive(Clone, Debug)] #[repr(transparent)] pub struct ExternRef { - pub(crate) inner: VMExternRef, + inner: VMExternRef, } impl ExternRef { @@ -20,6 +20,14 @@ impl ExternRef { ExternRef { inner } } + pub(crate) fn from_vm_extern_ref(inner: VMExternRef) -> Self { + ExternRef { inner } + } + + pub(crate) fn into_vm_extern_ref(self) -> VMExternRef { + self.inner + } + /// Get the underlying data for this `ExternRef`. pub fn data(&self) -> &dyn Any { &*self.inner @@ -71,13 +79,8 @@ impl ExternRef { /// [`ValRaw`]: crate::ValRaw pub unsafe fn from_raw(raw: *mut c_void) -> Option { let raw = raw.cast::(); - if raw.is_null() { - None - } else { - Some(ExternRef { - inner: VMExternRef::clone_from_raw(raw), - }) - } + let inner = VMExternRef::clone_from_raw(raw)?; + Some(ExternRef { inner }) } /// Converts this [`ExternRef`] to a raw value suitable to store within a diff --git a/crates/wasmtime/src/runtime/ref/no_gc_ref.rs b/crates/wasmtime/src/runtime/ref/no_gc_ref.rs index dfcf997efd41..30fd1ae3c1c7 100644 --- a/crates/wasmtime/src/runtime/ref/no_gc_ref.rs +++ b/crates/wasmtime/src/runtime/ref/no_gc_ref.rs @@ -1,4 +1,16 @@ +//! The dummy `ExternRef` type used when the `gc` cargo feature is disabled. +//! +//! Providing a dummy type means that downstream users need to do fewer +//! `#[cfg(...)]`s versus if this type or its methods simply didn't exist. The +//! only methods that are left missing are constructors. + +#![allow(missing_docs)] + use crate::runtime::Uninhabited; +use crate::AsContextMut; +use std::any::Any; +use std::ffi::c_void; +use wasmtime_runtime::VMExternRef; /// Represents an opaque reference to any data within WebAssembly. /// @@ -8,3 +20,41 @@ use crate::runtime::Uninhabited; pub struct ExternRef { pub(crate) _inner: Uninhabited, } + +impl ExternRef { + pub(crate) fn from_vm_extern_ref(_inner: VMExternRef) -> Self { + unreachable!() + } + + pub(crate) fn into_vm_extern_ref(self) -> VMExternRef { + match self._inner {} + } + + pub fn data(&self) -> &dyn Any { + match self._inner {} + } + + pub fn strong_count(&self) -> usize { + match self._inner {} + } + + pub fn ptr_eq(&self, _other: &ExternRef) -> bool { + match self._inner {} + } + + pub unsafe fn from_raw(raw: *mut c_void) -> Option { + assert!(raw.is_null()); + None + } + + pub unsafe fn to_raw(&self, mut store: impl AsContextMut) -> *mut c_void { + let _ = &mut store; + match self._inner {} + } +} + +impl std::fmt::Pointer for ExternRef { + fn fmt(&self, _f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self._inner {} + } +} diff --git a/crates/wasmtime/src/runtime/store.rs b/crates/wasmtime/src/runtime/store.rs index 48c10535c23f..894e6658718b 100644 --- a/crates/wasmtime/src/runtime/store.rs +++ b/crates/wasmtime/src/runtime/store.rs @@ -2312,7 +2312,6 @@ impl Drop for StoreOpaque { } } -#[cfg(feature = "gc")] impl wasmtime_runtime::ModuleInfoLookup for ModuleRegistry { fn lookup(&self, pc: usize) -> Option<&dyn wasmtime_runtime::ModuleInfo> { self.lookup_module_info(pc) diff --git a/crates/wasmtime/src/runtime/trampoline/global.rs b/crates/wasmtime/src/runtime/trampoline/global.rs index b2f9e07bd37c..89dc230a284b 100644 --- a/crates/wasmtime/src/runtime/trampoline/global.rs +++ b/crates/wasmtime/src/runtime/trampoline/global.rs @@ -23,12 +23,7 @@ impl Drop for VMHostGlobalContext { HeapType::Func | HeapType::Concrete(_) | HeapType::NoFunc => { // Nothing to drop. } - - #[cfg(feature = "gc")] HeapType::Extern => unsafe { ptr::drop_in_place(self.global.as_externref_mut()) }, - - #[cfg(not(feature = "gc"))] - HeapType::Extern => assert!(unsafe { self.global.as_func_ref().is_null() }), }, } } @@ -63,16 +58,9 @@ pub fn generate_global_export( *global.as_func_ref_mut() = f.map_or(ptr::null_mut(), |f| f.vm_func_ref(store).as_ptr()); } - #[cfg(feature = "gc")] Val::ExternRef(x) => { - *global.as_externref_mut() = x.map(|x| x.inner); - } - #[cfg(not(feature = "gc"))] - Val::ExternRef(None) => { - *global.as_func_ref_mut() = ptr::null_mut(); + *global.as_externref_mut() = x.map(|x| x.into_vm_extern_ref()); } - #[cfg(not(feature = "gc"))] - Val::ExternRef(Some(x)) => match x._inner {}, } global }; diff --git a/crates/wasmtime/src/runtime/values.rs b/crates/wasmtime/src/runtime/values.rs index af90c4fd0a3b..8bb41362bcb0 100644 --- a/crates/wasmtime/src/runtime/values.rs +++ b/crates/wasmtime/src/runtime/values.rs @@ -179,12 +179,7 @@ impl Val { Val::ExternRef(e) => { let externref = match e { None => ptr::null_mut(), - - #[cfg(feature = "gc")] Some(e) => e.to_raw(store), - - #[cfg(not(feature = "gc"))] - Some(x) => match x._inner {}, }; ValRaw::externref(externref) } @@ -218,15 +213,7 @@ impl Val { Func::from_raw(store, raw.get_funcref()).into() } HeapType::NoFunc => Ref::Func(None), - - #[cfg(feature = "gc")] HeapType::Extern => ExternRef::from_raw(raw.get_externref()).into(), - - #[cfg(not(feature = "gc"))] - HeapType::Extern => { - assert!(raw.get_externref().is_null()); - Ref::Extern(None) - } }; assert!( ref_ty.is_nullable() || !ref_.is_null(), @@ -703,22 +690,12 @@ impl Ref { Ok(TableElement::FuncRef(f.vm_func_ref(store).as_ptr())) } - #[cfg(feature = "gc")] (Ref::Extern(e), HeapType::Extern) => match e { None => { assert!(ty.is_nullable()); Ok(TableElement::ExternRef(None)) } - Some(e) => Ok(TableElement::ExternRef(Some(e.inner))), - }, - - #[cfg(not(feature = "gc"))] - (Ref::Extern(e), HeapType::Extern) => match e { - None => bail!( - "support for externref tables disabled at compile time \ - because the `gc` cargo feature was not enabled" - ), - Some(e) => match e._inner {}, + Some(e) => Ok(TableElement::ExternRef(Some(e.into_vm_extern_ref()))), }, _ => unreachable!("checked that the value matches the type above"), From d750adfcf873888dfc2a4b4b6f62a6b5d467374d Mon Sep 17 00:00:00 2001 From: Nick Fitzgerald Date: Thu, 22 Feb 2024 10:01:45 -0800 Subject: [PATCH 04/12] Fix wasmparser reference types configuration with GC disabled/enabled --- crates/wasmtime/src/config.rs | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/crates/wasmtime/src/config.rs b/crates/wasmtime/src/config.rs index 629a1cf35355..0da166f70406 100644 --- a/crates/wasmtime/src/config.rs +++ b/crates/wasmtime/src/config.rs @@ -252,8 +252,7 @@ impl Config { ret.cranelift_opt_level(OptLevel::Speed); } - #[cfg(feature = "gc")] - ret.wasm_reference_types(true); + ret.wasm_reference_types(cfg!(feature = "gc")); ret.wasm_multi_value(true); ret.wasm_bulk_memory(true); ret.wasm_simd(true); From ffe73e2023e4032af8a3ec48c31ab80f990bee62 Mon Sep 17 00:00:00 2001 From: Nick Fitzgerald Date: Thu, 22 Feb 2024 10:12:05 -0800 Subject: [PATCH 05/12] More config fix --- crates/wasmtime/src/config.rs | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/crates/wasmtime/src/config.rs b/crates/wasmtime/src/config.rs index 0da166f70406..5e230986695e 100644 --- a/crates/wasmtime/src/config.rs +++ b/crates/wasmtime/src/config.rs @@ -252,7 +252,15 @@ impl Config { ret.cranelift_opt_level(OptLevel::Speed); } - ret.wasm_reference_types(cfg!(feature = "gc")); + #[cfg(feature = "gc")] + ret.wasm_reference_types(true); + #[cfg(not(feature = "gc"))] + { + ret.features.reference_types = false; + ret.features.function_references = false; + ret.features.gc = false; + } + ret.wasm_multi_value(true); ret.wasm_bulk_memory(true); ret.wasm_simd(true); From 6f7f3e3fcfcb3a5b916a6b3a7d38958b4bfe1b19 Mon Sep 17 00:00:00 2001 From: Nick Fitzgerald Date: Thu, 22 Feb 2024 10:14:53 -0800 Subject: [PATCH 06/12] doc cfg --- crates/wasmtime/src/config.rs | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) diff --git a/crates/wasmtime/src/config.rs b/crates/wasmtime/src/config.rs index 5e230986695e..51d8258c108f 100644 --- a/crates/wasmtime/src/config.rs +++ b/crates/wasmtime/src/config.rs @@ -729,9 +729,7 @@ impl Config { /// /// Note that the reference types proposal depends on the bulk memory proposal. /// - /// This method requires the `gc` Cargo feature to be enabled. - /// - /// This feature is `true` by default, when the `gc` Cargo feature is enabled. + /// This feature is `true` by default. /// /// # Errors /// @@ -740,6 +738,7 @@ impl Config { /// /// [proposal]: https://github.com/webassembly/reference-types #[cfg(feature = "gc")] + #[cfg_attr(docsrs, doc(cfg(feature = "gc")))] pub fn wasm_reference_types(&mut self, enable: bool) -> &mut Self { self.features.reference_types = enable; self @@ -755,12 +754,11 @@ impl Config { /// Note that the function references proposal depends on the reference /// types proposal. /// - /// This method requires the `gc` Cargo feature to be enabled. - /// /// This feature is `false` by default. /// /// [proposal]: https://github.com/WebAssembly/function-references #[cfg(feature = "gc")] + #[cfg_attr(docsrs, doc(cfg(feature = "gc")))] pub fn wasm_function_references(&mut self, enable: bool) -> &mut Self { self.features.function_references = enable; self @@ -775,8 +773,6 @@ impl Config { /// Note that the function references proposal depends on the typed function /// references proposal. /// - /// This method requires the `gc` Cargo feature to be enabled. - /// /// This feature is `false` by default. /// /// **Warning: Wasmtime's implementation of the GC proposal is still in @@ -784,6 +780,7 @@ impl Config { /// /// [proposal]: https://github.com/WebAssembly/gc #[cfg(feature = "gc")] + #[cfg_attr(docsrs, doc(cfg(feature = "gc")))] pub fn wasm_gc(&mut self, enable: bool) -> &mut Self { self.features.gc = enable; self From 520f5a46559bdfc49035f090e24d99df91a0d50a Mon Sep 17 00:00:00 2001 From: Nick Fitzgerald Date: Thu, 22 Feb 2024 10:53:39 -0800 Subject: [PATCH 07/12] Make the dummy `VMExternRefActivationsTable` inhabited --- crates/runtime/src/externref/no_gc.rs | 28 ++++++++++++++++----------- crates/runtime/src/instance.rs | 10 +--------- crates/runtime/src/lib.rs | 1 - crates/wasmtime/src/runtime/store.rs | 6 ------ 4 files changed, 18 insertions(+), 27 deletions(-) diff --git a/crates/runtime/src/externref/no_gc.rs b/crates/runtime/src/externref/no_gc.rs index 970b1cd551a7..75692f5d5de1 100644 --- a/crates/runtime/src/externref/no_gc.rs +++ b/crates/runtime/src/externref/no_gc.rs @@ -76,39 +76,45 @@ impl Deref for VMExternRef { } } -pub struct VMExternRefActivationsTable(Uninhabited); +pub struct VMExternRefActivationsTable { + _priv: (), +} impl VMExternRefActivationsTable { + pub fn new() -> Self { + Self { _priv: () } + } + pub fn bump_capacity_remaining(&self) -> usize { - match self.0 {} + usize::MAX } - pub fn try_insert(&mut self, _externref: VMExternRef) -> Result<(), VMExternRef> { - match self.0 {} + pub fn try_insert(&mut self, externref: VMExternRef) -> Result<(), VMExternRef> { + match externref.0 {} } pub unsafe fn insert_with_gc( &mut self, _limits: *const VMRuntimeLimits, - _externref: VMExternRef, + externref: VMExternRef, _module_info_lookup: &dyn ModuleInfoLookup, ) { - match self.0 {} + match externref.0 {} } - pub fn insert_without_gc(&mut self, _externref: VMExternRef) { - match self.0 {} + pub fn insert_without_gc(&mut self, externref: VMExternRef) { + match externref.0 {} } pub fn set_gc_okay(&mut self, _okay: bool) -> bool { - match self.0 {} + true } } pub unsafe fn gc( _limits: *const VMRuntimeLimits, _module_info_lookup: &dyn ModuleInfoLookup, - externref_activations_table: &mut VMExternRefActivationsTable, + _externref_activations_table: &mut VMExternRefActivationsTable, ) { - match externref_activations_table.0 {} + // Nothing to do. } diff --git a/crates/runtime/src/instance.rs b/crates/runtime/src/instance.rs index be10cf10ed04..e65558420654 100644 --- a/crates/runtime/src/instance.rs +++ b/crates/runtime/src/instance.rs @@ -465,15 +465,7 @@ impl Instance { *self.vmctx_plus_offset_mut(self.offsets().vmctx_store()) = store; *self.runtime_limits() = (*store).vmruntime_limits(); *self.epoch_ptr() = (*store).epoch_ptr(); - - #[cfg(feature = "gc")] - { - *self.externref_activations_table() = (*store).externref_activations_table().0; - } - #[cfg(not(feature = "gc"))] - { - *self.externref_activations_table() = ptr::null_mut(); - } + *self.externref_activations_table() = (*store).externref_activations_table().0; } else { assert_eq!( mem::size_of::<*mut dyn Store>(), diff --git a/crates/runtime/src/lib.rs b/crates/runtime/src/lib.rs index cae83b5a8afd..46ec141ff7ed 100644 --- a/crates/runtime/src/lib.rs +++ b/crates/runtime/src/lib.rs @@ -106,7 +106,6 @@ pub unsafe trait Store { /// The first element returned is the table in which externrefs are stored /// throughout wasm execution, and the second element is how to look up /// module information for gc requests. - #[cfg(feature = "gc")] fn externref_activations_table( &mut self, ) -> (&mut VMExternRefActivationsTable, &dyn ModuleInfoLookup); diff --git a/crates/wasmtime/src/runtime/store.rs b/crates/wasmtime/src/runtime/store.rs index 894e6658718b..ca0fc73c1259 100644 --- a/crates/wasmtime/src/runtime/store.rs +++ b/crates/wasmtime/src/runtime/store.rs @@ -307,7 +307,6 @@ pub struct StoreOpaque { #[cfg(feature = "component-model")] num_component_instances: usize, signal_handler: Option>>, - #[cfg(feature = "gc")] externref_activations_table: wasmtime_runtime::VMExternRefActivationsTable, modules: ModuleRegistry, func_refs: FuncRefs, @@ -498,7 +497,6 @@ impl Store { #[cfg(feature = "component-model")] num_component_instances: 0, signal_handler: None, - #[cfg(feature = "gc")] externref_activations_table: wasmtime_runtime::VMExternRefActivationsTable::new(), modules: ModuleRegistry::default(), func_refs: FuncRefs::default(), @@ -1382,14 +1380,12 @@ impl StoreOpaque { } #[inline] - #[cfg(feature = "gc")] pub fn externref_activations_table( &mut self, ) -> &mut wasmtime_runtime::VMExternRefActivationsTable { &mut self.externref_activations_table } - #[cfg(feature = "gc")] pub fn gc(&mut self) { // For this crate's API, we ensure that `set_stack_canary` invariants // are upheld for all host-->Wasm calls. @@ -1535,7 +1531,6 @@ impl StoreOpaque { &self.runtime_limits as *const VMRuntimeLimits as *mut VMRuntimeLimits } - #[cfg(feature = "gc")] pub unsafe fn insert_vmexternref_without_gc(&mut self, r: wasmtime_runtime::VMExternRef) { self.externref_activations_table.insert_without_gc(r); } @@ -2052,7 +2047,6 @@ unsafe impl wasmtime_runtime::Store for StoreInner { self.engine.epoch_counter() as *const _ } - #[cfg(feature = "gc")] fn externref_activations_table( &mut self, ) -> ( From e8509075560b7b6228cedd101894480b1865a0a4 Mon Sep 17 00:00:00 2001 From: Nick Fitzgerald Date: Thu, 22 Feb 2024 11:16:24 -0800 Subject: [PATCH 08/12] Fix winch tests --- winch/codegen/src/codegen/builtin.rs | 4 ++++ winch/filetests/filetests/x64/table/fill.wat | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/winch/codegen/src/codegen/builtin.rs b/winch/codegen/src/codegen/builtin.rs index 0294c2ffe265..5c508daa25c5 100644 --- a/winch/codegen/src/codegen/builtin.rs +++ b/winch/codegen/src/codegen/builtin.rs @@ -93,6 +93,7 @@ macro_rules! declare_function_sig { /// F64 Nearest. nearest_f64: Option, $( + $( #[ $attr ] )* $name: Option, )* } @@ -102,6 +103,7 @@ macro_rules! declare_function_sig { impl BuiltinFunctions { pub fn new(vmoffsets: &VMOffsets

, call_conv: CallingConvention) -> Self { let size = vmoffsets.ptr.size(); + #[allow(unused_doc_comments)] Self { ptr_size: size, call_conv, @@ -116,6 +118,7 @@ macro_rules! declare_function_sig { nearest_f32: None, nearest_f64: None, $( + $( #[ $attr ] )* $name: None, )* } @@ -246,6 +249,7 @@ macro_rules! declare_function_sig { } $( + $( #[ $attr ] )* pub(crate) fn $name(&mut self) -> BuiltinFunction { if self.$name.is_none() { let params = vec![ $(self.$param() ),* ]; diff --git a/winch/filetests/filetests/x64/table/fill.wat b/winch/filetests/filetests/x64/table/fill.wat index d7cd14519c05..0ed7d6a0cd1c 100644 --- a/winch/filetests/filetests/x64/table/fill.wat +++ b/winch/filetests/filetests/x64/table/fill.wat @@ -118,7 +118,7 @@ ;; c1: 4883e0fe and rax, 0xfffffffffffffffe ;; 4889442404 mov qword ptr [rsp + 4], rax ;; 4d8b5e38 mov r11, qword ptr [r14 + 0x38] -;; 498b4368 mov rax, qword ptr [r11 + 0x68] +;; 498b4358 mov rax, qword ptr [r11 + 0x58] ;; 448b5c2414 mov r11d, dword ptr [rsp + 0x14] ;; 4883ec04 sub rsp, 4 ;; 44891c24 mov dword ptr [rsp], r11d From a9cd3c5cef535760d9cd457ebd70bad42f8c38a2 Mon Sep 17 00:00:00 2001 From: Nick Fitzgerald Date: Thu, 22 Feb 2024 11:22:30 -0800 Subject: [PATCH 09/12] final review bits --- crates/runtime/src/externref/no_gc.rs | 5 +++++ crates/runtime/src/table.rs | 6 ++---- crates/wasmtime/src/runtime/func.rs | 3 +-- crates/wasmtime/src/runtime/ref/no_gc_ref.rs | 6 +++--- 4 files changed, 11 insertions(+), 9 deletions(-) diff --git a/crates/runtime/src/externref/no_gc.rs b/crates/runtime/src/externref/no_gc.rs index 75692f5d5de1..48c7fc885ead 100644 --- a/crates/runtime/src/externref/no_gc.rs +++ b/crates/runtime/src/externref/no_gc.rs @@ -30,6 +30,11 @@ impl Drop for VMExternRef { } impl VMExternRef { + /// This method is only available when the `gc` cargo feature is *disabled*. + pub fn assert_unreachable(&self) -> T { + match self.0 {} + } + pub fn as_raw(&self) -> *mut u8 { match self.0 {} } diff --git a/crates/runtime/src/table.rs b/crates/runtime/src/table.rs index cb08b61303b9..81bd095f3064 100644 --- a/crates/runtime/src/table.rs +++ b/crates/runtime/src/table.rs @@ -2,6 +2,8 @@ //! //! `Table` is to WebAssembly tables what `LinearMemory` is to WebAssembly linear memories. +#![cfg_attr(feature = "gc", allow(irrefutable_let_patterns))] + use crate::externref::VMExternRef; use crate::vmcontext::{VMFuncRef, VMTableDefinition}; use crate::{SendSyncPtr, Store}; @@ -583,10 +585,6 @@ impl Drop for Table { let ty = self.element_type(); // `funcref` tables don't need drops. - // - // This is an irrefutable pattern when the `gc` cargo feature is not - // enabled. - #[allow(irrefutable_let_patterns)] if let TableElementType::Func = ty { return; } diff --git a/crates/wasmtime/src/runtime/func.rs b/crates/wasmtime/src/runtime/func.rs index d3d788a25294..dd539c819e7f 100644 --- a/crates/wasmtime/src/runtime/func.rs +++ b/crates/wasmtime/src/runtime/func.rs @@ -2027,9 +2027,8 @@ impl Caller<'_, T> { /// Perform garbage collection of `ExternRef`s. /// /// Same as [`Store::gc`](crate::Store::gc). - /// - /// This method is only available when the `gc` cargo feature is enabled. #[cfg(feature = "gc")] + #[cfg_attr(docsrs, doc(cfg(feature = "gc")))] pub fn gc(&mut self) { self.store.gc() } diff --git a/crates/wasmtime/src/runtime/ref/no_gc_ref.rs b/crates/wasmtime/src/runtime/ref/no_gc_ref.rs index 30fd1ae3c1c7..b83e08dd5dc5 100644 --- a/crates/wasmtime/src/runtime/ref/no_gc_ref.rs +++ b/crates/wasmtime/src/runtime/ref/no_gc_ref.rs @@ -18,12 +18,12 @@ use wasmtime_runtime::VMExternRef; /// `gc` cargo feature to properly use this type. #[derive(Clone, Debug)] pub struct ExternRef { - pub(crate) _inner: Uninhabited, + _inner: Uninhabited, } impl ExternRef { - pub(crate) fn from_vm_extern_ref(_inner: VMExternRef) -> Self { - unreachable!() + pub(crate) fn from_vm_extern_ref(inner: VMExternRef) -> Self { + inner.assert_unreachable() } pub(crate) fn into_vm_extern_ref(self) -> VMExternRef { From 7c3e3ced068b75cf298844ad697b6ced60ab8def Mon Sep 17 00:00:00 2001 From: Nick Fitzgerald Date: Thu, 22 Feb 2024 14:48:25 -0800 Subject: [PATCH 10/12] Enable wasmtime's gc cargo feature for the C API --- crates/c-api/Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/c-api/Cargo.toml b/crates/c-api/Cargo.toml index 6b470b15c044..2e6942f45fbc 100644 --- a/crates/c-api/Cargo.toml +++ b/crates/c-api/Cargo.toml @@ -22,7 +22,7 @@ doctest = false env_logger = { workspace = true, optional = true } anyhow = { workspace = true } once_cell = { workspace = true } -wasmtime = { workspace = true, features = ['cranelift', 'runtime'] } +wasmtime = { workspace = true, features = ['cranelift', 'runtime', 'gc'] } wasmtime-c-api-macros = { workspace = true } log = { workspace = true } tracing = { workspace = true } From 87324976e404fdd00d3d9a2c23cc78670a676a69 Mon Sep 17 00:00:00 2001 From: Nick Fitzgerald Date: Thu, 22 Feb 2024 17:55:35 -0800 Subject: [PATCH 11/12] Enable wasmtime's gc cargo feature from wasmtime-cli-flags --- crates/cli-flags/Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/cli-flags/Cargo.toml b/crates/cli-flags/Cargo.toml index 67e02dbe9bf3..543a508762ba 100644 --- a/crates/cli-flags/Cargo.toml +++ b/crates/cli-flags/Cargo.toml @@ -17,7 +17,7 @@ clap = { workspace = true } file-per-thread-logger = { workspace = true, optional = true } tracing-subscriber = { workspace = true, optional = true } rayon = { version = "1.5.0", optional = true } -wasmtime = { workspace = true } +wasmtime = { workspace = true, features = ["gc"] } humantime = { workspace = true } [features] From 21061dfa126f45efbdb2a9f548f46d448f235d92 Mon Sep 17 00:00:00 2001 From: Nick Fitzgerald Date: Fri, 23 Feb 2024 07:47:34 -0800 Subject: [PATCH 12/12] enable gc cargo feature in a couple other crates --- crates/fuzzing/Cargo.toml | 2 +- crates/wast/Cargo.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/fuzzing/Cargo.toml b/crates/fuzzing/Cargo.toml index 6828481bb09b..7a7c18f8c42b 100644 --- a/crates/fuzzing/Cargo.toml +++ b/crates/fuzzing/Cargo.toml @@ -22,7 +22,7 @@ target-lexicon = { workspace = true } tempfile = "3.3.0" wasmparser = { workspace = true } wasmprinter = { workspace = true } -wasmtime = { workspace = true, features = ['default', 'winch'] } +wasmtime = { workspace = true, features = ['default', 'winch', 'gc'] } wasmtime-wast = { workspace = true } wasm-encoder = { workspace = true } wasm-smith = { workspace = true } diff --git a/crates/wast/Cargo.toml b/crates/wast/Cargo.toml index b326466d6f49..a752481c020a 100644 --- a/crates/wast/Cargo.toml +++ b/crates/wast/Cargo.toml @@ -14,7 +14,7 @@ workspace = true [dependencies] anyhow = { workspace = true } -wasmtime = { workspace = true, features = ['cranelift', 'wat', 'runtime'] } +wasmtime = { workspace = true, features = ['cranelift', 'wat', 'runtime', 'gc'] } wast = { workspace = true } log = { workspace = true }