diff --git a/crates/cranelift/src/compiler/component.rs b/crates/cranelift/src/compiler/component.rs index 6b9702d26547..52ef64bcc2b2 100644 --- a/crates/cranelift/src/compiler/component.rs +++ b/crates/cranelift/src/compiler/component.rs @@ -3,12 +3,24 @@ use crate::compiler::{Compiler, NativeRet}; use anyhow::Result; use cranelift_codegen::ir::{self, InstBuilder, MemFlags}; -use cranelift_codegen::isa::CallConv; +use cranelift_codegen::isa::{CallConv, TargetIsa}; use cranelift_frontend::FunctionBuilder; use std::any::Any; use wasmtime_cranelift_shared::{ALWAYS_TRAP_CODE, CANNOT_ENTER_CODE}; use wasmtime_environ::component::*; -use wasmtime_environ::{PtrSize, WasmFuncType, WasmType}; +use wasmtime_environ::{PtrSize, SignatureIndex, WasmType}; + +struct TrampolineCompiler<'a> { + compiler: &'a Compiler, + isa: &'a (dyn TargetIsa + 'static), + builder: FunctionBuilder<'a>, + component: &'a Component, + types: &'a ComponentTypes, + offsets: VMComponentOffsets, + abi: Abi, + block0: ir::Block, + signature: SignatureIndex, +} #[derive(Copy, Clone)] enum Abi { @@ -17,25 +29,90 @@ enum Abi { Array, } -impl Compiler { - fn compile_lowered_trampoline_for_abi( - &self, - component: &Component, - lowering: &LowerImport, - types: &ComponentTypes, +impl<'a> TrampolineCompiler<'a> { + fn new( + compiler: &'a Compiler, + func_compiler: &'a mut super::FunctionCompiler<'_>, + component: &'a Component, + types: &'a ComponentTypes, + index: TrampolineIndex, abi: Abi, - ) -> Result> { - let wasm_func_ty = &types[lowering.canonical_abi]; - let isa = &*self.isa; - let pointer_type = isa.pointer_type(); - let offsets = VMComponentOffsets::new(isa.pointer_bytes(), component); + ) -> TrampolineCompiler<'a> { + let isa = &*compiler.isa; + let signature = component.trampolines[index]; + let ty = &types[signature]; + let func = ir::Function::with_name_signature( + ir::UserFuncName::user(0, 0), + match abi { + Abi::Wasm => crate::wasm_call_signature(isa, ty), + Abi::Native => crate::native_call_signature(isa, ty), + Abi::Array => crate::array_call_signature(isa), + }, + ); + let (builder, block0) = func_compiler.builder(func); + TrampolineCompiler { + compiler, + isa, + builder, + component, + types, + offsets: VMComponentOffsets::new(isa.pointer_bytes(), component), + abi, + block0, + signature, + } + } - let mut compiler = self.function_compiler(); + fn translate(&mut self, trampoline: &Trampoline) { + match trampoline { + Trampoline::Transcoder { + op, + from, + from64, + to, + to64, + } => { + match self.abi { + Abi::Wasm => { + self.translate_transcode(*op, *from, *from64, *to, *to64); + } + // Transcoders can only actually be called by Wasm, so let's assert + // that here. + Abi::Native | Abi::Array => { + self.builder + .ins() + .trap(ir::TrapCode::User(crate::DEBUG_ASSERT_TRAP_CODE)); + } + } + } + Trampoline::LowerImport { + index, + options, + lower_ty, + } => { + self.translate_lower_import(*index, options, *lower_ty); + } + Trampoline::AlwaysTrap => { + self.builder + .ins() + .trap(ir::TrapCode::User(ALWAYS_TRAP_CODE)); + } + Trampoline::ResourceNew(ty) => self.translate_resource_new(*ty), + Trampoline::ResourceRep(ty) => self.translate_resource_rep(*ty), + Trampoline::ResourceDrop(ty) => self.translate_resource_drop(*ty), + } + } - let func = self.func(wasm_func_ty, abi); - let (mut builder, block0) = compiler.builder(func); - let args = builder.func.dfg.block_params(block0).to_vec(); + fn translate_lower_import( + &mut self, + index: LoweredIndex, + options: &CanonicalOptions, + lower_ty: TypeFuncIndex, + ) { + let pointer_type = self.isa.pointer_type(); + let args = self.builder.func.dfg.block_params(self.block0).to_vec(); let vmctx = args[0]; + let wasm_func_ty = &self.types[self.signature]; // More handling is necessary here if this changes assert!(matches!( @@ -45,29 +122,27 @@ impl Compiler { // Start off by spilling all the wasm arguments into a stack slot to be // passed to the host function. - let (values_vec_ptr, values_vec_len) = match abi { + let (values_vec_ptr, values_vec_len) = match self.abi { Abi::Wasm | Abi::Native => { - let (ptr, len) = self.allocate_stack_array_and_spill_args( + let (ptr, len) = self.compiler.allocate_stack_array_and_spill_args( wasm_func_ty, - &mut builder, + &mut self.builder, &args[2..], ); - let len = builder.ins().iconst(pointer_type, i64::from(len)); + let len = self.builder.ins().iconst(pointer_type, i64::from(len)); (ptr, len) } Abi::Array => { - let params = builder.func.dfg.block_params(block0); + let params = self.builder.func.dfg.block_params(self.block0); (params[2], params[3]) } }; - self.abi_preamble(&mut builder, &offsets, vmctx, abi); - // Below this will incrementally build both the signature of the host // function we're calling as well as the list of arguments since the // list is somewhat long. let mut callee_args = Vec::new(); - let mut host_sig = ir::Signature::new(CallConv::triple_default(isa.triple())); + let mut host_sig = ir::Signature::new(CallConv::triple_default(self.isa.triple())); let CanonicalOptions { instance, @@ -75,7 +150,7 @@ impl Compiler { realloc, post_return, string_encoding, - } = lowering.options; + } = *options; // vmctx: *mut VMComponentContext host_sig.params.push(ir::AbiParam::new(pointer_type)); @@ -83,48 +158,51 @@ impl Compiler { // data: *mut u8, host_sig.params.push(ir::AbiParam::new(pointer_type)); - callee_args.push(builder.ins().load( + callee_args.push(self.builder.ins().load( pointer_type, MemFlags::trusted(), vmctx, - i32::try_from(offsets.lowering_data(lowering.index)).unwrap(), + i32::try_from(self.offsets.lowering_data(index)).unwrap(), )); // ty: TypeFuncIndex, - let ty = lowering.lower_ty; host_sig.params.push(ir::AbiParam::new(ir::types::I32)); - callee_args.push(builder.ins().iconst(ir::types::I32, i64::from(ty.as_u32()))); + callee_args.push( + self.builder + .ins() + .iconst(ir::types::I32, i64::from(lower_ty.as_u32())), + ); // flags: *mut VMGlobalDefinition host_sig.params.push(ir::AbiParam::new(pointer_type)); callee_args.push( - builder + self.builder .ins() - .iadd_imm(vmctx, i64::from(offsets.instance_flags(instance))), + .iadd_imm(vmctx, i64::from(self.offsets.instance_flags(instance))), ); // memory: *mut VMMemoryDefinition host_sig.params.push(ir::AbiParam::new(pointer_type)); callee_args.push(match memory { - Some(idx) => builder.ins().load( + Some(idx) => self.builder.ins().load( pointer_type, MemFlags::trusted(), vmctx, - i32::try_from(offsets.runtime_memory(idx)).unwrap(), + i32::try_from(self.offsets.runtime_memory(idx)).unwrap(), ), - None => builder.ins().iconst(pointer_type, 0), + None => self.builder.ins().iconst(pointer_type, 0), }); // realloc: *mut VMFuncRef host_sig.params.push(ir::AbiParam::new(pointer_type)); callee_args.push(match realloc { - Some(idx) => builder.ins().load( + Some(idx) => self.builder.ins().load( pointer_type, MemFlags::trusted(), vmctx, - i32::try_from(offsets.runtime_realloc(idx)).unwrap(), + i32::try_from(self.offsets.runtime_realloc(idx)).unwrap(), ), - None => builder.ins().iconst(pointer_type, 0), + None => self.builder.ins().iconst(pointer_type, 0), }); // A post-return option is only valid on `canon.lift`'d functions so no @@ -135,7 +213,7 @@ impl Compiler { // string_encoding: StringEncoding host_sig.params.push(ir::AbiParam::new(ir::types::I8)); callee_args.push( - builder + self.builder .ins() .iconst(ir::types::I8, i64::from(string_encoding as u8)), ); @@ -150,100 +228,39 @@ impl Compiler { // Load host function pointer from the vmcontext and then call that // indirect function pointer with the list of arguments. - let host_fn = builder.ins().load( + let host_fn = self.builder.ins().load( pointer_type, MemFlags::trusted(), vmctx, - i32::try_from(offsets.lowering_callee(lowering.index)).unwrap(), + i32::try_from(self.offsets.lowering_callee(index)).unwrap(), ); - let host_sig = builder.import_signature(host_sig); - builder.ins().call_indirect(host_sig, host_fn, &callee_args); + let host_sig = self.builder.import_signature(host_sig); + self.builder + .ins() + .call_indirect(host_sig, host_fn, &callee_args); - match abi { + match self.abi { Abi::Wasm | Abi::Native => { // After the host function has returned the results are loaded from // `values_vec_ptr` and then returned. - let results = self.load_values_from_array( + let results = self.compiler.load_values_from_array( wasm_func_ty.returns(), - &mut builder, + &mut self.builder, values_vec_ptr, values_vec_len, ); - builder.ins().return_(&results); + self.builder.ins().return_(&results); } Abi::Array => { - builder.ins().return_(&[]); - } - } - builder.finalize(); - - Ok(Box::new(compiler.finish()?)) - } - - fn compile_always_trap_for_abi( - &self, - ty: &WasmFuncType, - abi: Abi, - ) -> Result> { - let mut compiler = self.function_compiler(); - let func = self.func(ty, abi); - let (mut builder, _block0) = compiler.builder(func); - builder.ins().trap(ir::TrapCode::User(ALWAYS_TRAP_CODE)); - builder.finalize(); - - Ok(Box::new(compiler.finish()?)) - } - - fn compile_transcoder_for_abi( - &self, - component: &Component, - transcoder: &Transcoder, - types: &ComponentTypes, - abi: Abi, - ) -> Result> { - let ty = &types[transcoder.signature]; - let isa = &*self.isa; - let offsets = VMComponentOffsets::new(isa.pointer_bytes(), component); - let mut compiler = self.function_compiler(); - let func = self.func(ty, abi); - let (mut builder, block0) = compiler.builder(func); - - match abi { - Abi::Wasm => { - self.translate_transcode(&mut builder, &offsets, transcoder, block0); - } - // Transcoders can only actually be called by Wasm, so let's assert - // that here. - Abi::Native | Abi::Array => { - builder - .ins() - .trap(ir::TrapCode::User(crate::DEBUG_ASSERT_TRAP_CODE)); + self.builder.ins().return_(&[]); } } - - builder.finalize(); - Ok(Box::new(compiler.finish()?)) } - fn compile_resource_new_for_abi( - &self, - component: &Component, - resource: &ResourceNew, - types: &ComponentTypes, - abi: Abi, - ) -> Result> { - let ty = &types[resource.signature]; - let isa = &*self.isa; - let offsets = VMComponentOffsets::new(isa.pointer_bytes(), component); - let mut compiler = self.function_compiler(); - let func = self.func(ty, abi); - let (mut builder, block0) = compiler.builder(func); - - let args = self.abi_load_params(&mut builder, ty, block0, abi); + fn translate_resource_new(&mut self, resource: TypeResourceTableIndex) { + let args = self.abi_load_params(); let vmctx = args[0]; - self.abi_preamble(&mut builder, &offsets, vmctx, abi); - // The arguments this shim passes along to the libcall are: // // * the vmctx @@ -252,44 +269,29 @@ impl Compiler { let mut host_args = Vec::new(); host_args.push(vmctx); host_args.push( - builder + self.builder .ins() - .iconst(ir::types::I32, i64::from(resource.resource.as_u32())), + .iconst(ir::types::I32, i64::from(resource.as_u32())), ); host_args.push(args[2]); // Currently this only support resources represented by `i32` - assert_eq!(ty.params()[0], WasmType::I32); - let (host_sig, offset) = host::resource_new32(self, &mut builder.func); - - let host_fn = self.load_libcall(&mut builder, &offsets, vmctx, offset); - let call = builder.ins().call_indirect(host_sig, host_fn, &host_args); - let result = builder.func.dfg.inst_results(call)[0]; - self.abi_store_results(&mut builder, ty, block0, &[result], abi); - - builder.finalize(); - Ok(Box::new(compiler.finish()?)) + assert_eq!(self.types[self.signature].params()[0], WasmType::I32); + let (host_sig, offset) = host::resource_new32(self.isa, &mut self.builder.func); + + let host_fn = self.load_libcall(vmctx, offset); + let call = self + .builder + .ins() + .call_indirect(host_sig, host_fn, &host_args); + let result = self.builder.func.dfg.inst_results(call)[0]; + self.abi_store_results(&[result]); } - fn compile_resource_rep_for_abi( - &self, - component: &Component, - resource: &ResourceRep, - types: &ComponentTypes, - abi: Abi, - ) -> Result> { - let ty = &types[resource.signature]; - let isa = &*self.isa; - let offsets = VMComponentOffsets::new(isa.pointer_bytes(), component); - let mut compiler = self.function_compiler(); - let func = self.func(ty, abi); - let (mut builder, block0) = compiler.builder(func); - - let args = self.abi_load_params(&mut builder, ty, block0, abi); + fn translate_resource_rep(&mut self, resource: TypeResourceTableIndex) { + let args = self.abi_load_params(); let vmctx = args[0]; - self.abi_preamble(&mut builder, &offsets, vmctx, abi); - // The arguments this shim passes along to the libcall are: // // * the vmctx @@ -298,45 +300,30 @@ impl Compiler { let mut host_args = Vec::new(); host_args.push(vmctx); host_args.push( - builder + self.builder .ins() - .iconst(ir::types::I32, i64::from(resource.resource.as_u32())), + .iconst(ir::types::I32, i64::from(resource.as_u32())), ); host_args.push(args[2]); // Currently this only support resources represented by `i32` - assert_eq!(ty.returns()[0], WasmType::I32); - let (host_sig, offset) = host::resource_rep32(self, &mut builder.func); - - let host_fn = self.load_libcall(&mut builder, &offsets, vmctx, offset); - let call = builder.ins().call_indirect(host_sig, host_fn, &host_args); - let result = builder.func.dfg.inst_results(call)[0]; - self.abi_store_results(&mut builder, ty, block0, &[result], abi); - - builder.finalize(); - Ok(Box::new(compiler.finish()?)) + assert_eq!(self.types[self.signature].returns()[0], WasmType::I32); + let (host_sig, offset) = host::resource_rep32(self.isa, &mut self.builder.func); + + let host_fn = self.load_libcall(vmctx, offset); + let call = self + .builder + .ins() + .call_indirect(host_sig, host_fn, &host_args); + let result = self.builder.func.dfg.inst_results(call)[0]; + self.abi_store_results(&[result]); } - fn compile_resource_drop_for_abi( - &self, - component: &Component, - resource: &ResourceDrop, - types: &ComponentTypes, - abi: Abi, - ) -> Result> { - let pointer_type = self.isa.pointer_type(); - let ty = &types[resource.signature]; - let isa = &*self.isa; - let offsets = VMComponentOffsets::new(isa.pointer_bytes(), component); - let mut compiler = self.function_compiler(); - let func = self.func(ty, abi); - let (mut builder, block0) = compiler.builder(func); - - let args = self.abi_load_params(&mut builder, ty, block0, abi); + fn translate_resource_drop(&mut self, resource: TypeResourceTableIndex) { + let args = self.abi_load_params(); let vmctx = args[0]; let caller_vmctx = args[1]; - - self.abi_preamble(&mut builder, &offsets, vmctx, abi); + let pointer_type = self.isa.pointer_type(); // The arguments this shim passes along to the libcall are: // @@ -346,29 +333,35 @@ impl Compiler { let mut host_args = Vec::new(); host_args.push(vmctx); host_args.push( - builder + self.builder .ins() - .iconst(ir::types::I32, i64::from(resource.resource.as_u32())), + .iconst(ir::types::I32, i64::from(resource.as_u32())), ); host_args.push(args[2]); - let (host_sig, offset) = host::resource_drop(self, &mut builder.func); - let host_fn = self.load_libcall(&mut builder, &offsets, vmctx, offset); - let call = builder.ins().call_indirect(host_sig, host_fn, &host_args); - let should_run_destructor = builder.func.dfg.inst_results(call)[0]; - - let resource_ty = types[resource.resource].ty; - let resource_def = component.defined_resource_index(resource_ty).map(|idx| { - component - .initializers - .iter() - .filter_map(|i| match i { - GlobalInitializer::Resource(r) if r.index == idx => Some(r), - _ => None, - }) - .next() - .unwrap() - }); + let (host_sig, offset) = host::resource_drop(self.isa, &mut self.builder.func); + let host_fn = self.load_libcall(vmctx, offset); + let call = self + .builder + .ins() + .call_indirect(host_sig, host_fn, &host_args); + let should_run_destructor = self.builder.func.dfg.inst_results(call)[0]; + + let resource_ty = self.types[resource].ty; + let resource_def = self + .component + .defined_resource_index(resource_ty) + .map(|idx| { + self.component + .initializers + .iter() + .filter_map(|i| match i { + GlobalInitializer::Resource(r) if r.index == idx => Some(r), + _ => None, + }) + .next() + .unwrap() + }); let has_destructor = match resource_def { Some(def) => def.dtor.is_some(), None => true, @@ -405,14 +398,16 @@ impl Compiler { // This will decode `should_run_destructor` and run the destructor // funcref if one is specified for this resource. Note that not all // resources have destructors, hence the null check. - builder.ensure_inserted_block(); - let current_block = builder.current_block().unwrap(); - let run_destructor_block = builder.create_block(); - builder.insert_block_after(run_destructor_block, current_block); - let return_block = builder.create_block(); - builder.insert_block_after(return_block, run_destructor_block); - - builder.ins().brif( + self.builder.ensure_inserted_block(); + let current_block = self.builder.current_block().unwrap(); + let run_destructor_block = self.builder.create_block(); + self.builder + .insert_block_after(run_destructor_block, current_block); + let return_block = self.builder.create_block(); + self.builder + .insert_block_after(return_block, run_destructor_block); + + self.builder.ins().brif( should_run_destructor, run_destructor_block, &[], @@ -422,7 +417,7 @@ impl Compiler { let trusted = ir::MemFlags::trusted().with_readonly(); - builder.switch_to_block(run_destructor_block); + self.builder.switch_to_block(run_destructor_block); // If this is a defined resource within the component itself then a // check needs to be emitted for the `may_enter` flag. Note though @@ -430,15 +425,18 @@ impl Compiler { // the same component instance that defined the resource as the // component is calling itself. if let Some(def) = resource_def { - if types[resource.resource].instance != def.instance { - let flags = builder.ins().load( + if self.types[resource].instance != def.instance { + let flags = self.builder.ins().load( ir::types::I32, trusted, vmctx, - i32::try_from(offsets.instance_flags(def.instance)).unwrap(), + i32::try_from(self.offsets.instance_flags(def.instance)).unwrap(), ); - let masked = builder.ins().band_imm(flags, i64::from(FLAG_MAY_ENTER)); - builder + let masked = self + .builder + .ins() + .band_imm(flags, i64::from(FLAG_MAY_ENTER)); + self.builder .ins() .trapz(masked, ir::TrapCode::User(CANNOT_ENTER_CODE)); } @@ -447,119 +445,83 @@ impl Compiler { // Conditionally emit destructor-execution code based on whether we // statically know that a destructor exists or not. if has_destructor { - let rep = builder.ins().ushr_imm(should_run_destructor, 1); - let rep = builder.ins().ireduce(ir::types::I32, rep); - let index = types[resource.resource].ty; + let rep = self.builder.ins().ushr_imm(should_run_destructor, 1); + let rep = self.builder.ins().ireduce(ir::types::I32, rep); + let index = self.types[resource].ty; // NB: despite the vmcontext storing nullable funcrefs for function // pointers we know this is statically never null due to the // `has_destructor` check above. - let dtor_func_ref = builder.ins().load( + let dtor_func_ref = self.builder.ins().load( pointer_type, trusted, vmctx, - i32::try_from(offsets.resource_destructor(index)).unwrap(), + i32::try_from(self.offsets.resource_destructor(index)).unwrap(), ); if cfg!(debug_assertions) { - builder.ins().trapz( + self.builder.ins().trapz( dtor_func_ref, ir::TrapCode::User(crate::DEBUG_ASSERT_TRAP_CODE), ); } - let func_addr = builder.ins().load( + let func_addr = self.builder.ins().load( pointer_type, trusted, dtor_func_ref, - i32::from(offsets.ptr.vm_func_ref_wasm_call()), + i32::from(self.offsets.ptr.vm_func_ref_wasm_call()), ); - let callee_vmctx = builder.ins().load( + let callee_vmctx = self.builder.ins().load( pointer_type, trusted, dtor_func_ref, - i32::from(offsets.ptr.vm_func_ref_vmctx()), + i32::from(self.offsets.ptr.vm_func_ref_vmctx()), ); - let sig = crate::wasm_call_signature(isa, &types[resource.signature]); - let sig_ref = builder.import_signature(sig); + let sig = crate::wasm_call_signature(self.isa, &self.types[self.signature]); + let sig_ref = self.builder.import_signature(sig); // NB: note that the "caller" vmctx here is the caller of this // intrinsic itself, not the `VMComponentContext`. This effectively // takes ourselves out of the chain here but that's ok since the // caller is only used for store/limits and that same info is // stored, but elsewhere, in the component context. - builder - .ins() - .call_indirect(sig_ref, func_addr, &[callee_vmctx, caller_vmctx, rep]); + self.builder.ins().call_indirect( + sig_ref, + func_addr, + &[callee_vmctx, caller_vmctx, rep], + ); } - builder.ins().jump(return_block, &[]); - builder.seal_block(run_destructor_block); - - builder.switch_to_block(return_block); - builder.ins().return_(&[]); - builder.seal_block(return_block); - - builder.finalize(); - Ok(Box::new(compiler.finish()?)) - } - - fn func(&self, ty: &WasmFuncType, abi: Abi) -> ir::Function { - let isa = &*self.isa; - ir::Function::with_name_signature( - ir::UserFuncName::user(0, 0), - match abi { - Abi::Wasm => crate::wasm_call_signature(isa, ty), - Abi::Native => crate::native_call_signature(isa, ty), - Abi::Array => crate::array_call_signature(isa), - }, - ) - } + self.builder.ins().jump(return_block, &[]); + self.builder.seal_block(run_destructor_block); - fn compile_func_ref( - &self, - compile: impl Fn(Abi) -> Result>, - ) -> Result>> { - Ok(AllCallFunc { - wasm_call: compile(Abi::Wasm)?, - array_call: compile(Abi::Array)?, - native_call: compile(Abi::Native)?, - }) + self.builder.switch_to_block(return_block); + self.builder.ins().return_(&[]); + self.builder.seal_block(return_block); } /// Loads a host function pointer for a libcall stored at the `offset` /// provided in the libcalls array. /// /// The offset is calculated in the `host` module below. - fn load_libcall( - &self, - builder: &mut FunctionBuilder<'_>, - offsets: &VMComponentOffsets, - vmctx: ir::Value, - offset: u32, - ) -> ir::Value { + fn load_libcall(&mut self, vmctx: ir::Value, offset: u32) -> ir::Value { let pointer_type = self.isa.pointer_type(); // First load the pointer to the libcalls structure which is static // per-process. - let libcalls_array = builder.ins().load( + let libcalls_array = self.builder.ins().load( pointer_type, MemFlags::trusted().with_readonly(), vmctx, - i32::try_from(offsets.libcalls()).unwrap(), + i32::try_from(self.offsets.libcalls()).unwrap(), ); // Next load the function pointer at `offset` and return that. - builder.ins().load( + self.builder.ins().load( pointer_type, MemFlags::trusted().with_readonly(), libcalls_array, - i32::try_from(offset * u32::from(offsets.ptr.size())).unwrap(), + i32::try_from(offset * u32::from(self.offsets.ptr.size())).unwrap(), ) } - fn abi_load_params( - &self, - builder: &mut FunctionBuilder<'_>, - ty: &WasmFuncType, - block0: ir::Block, - abi: Abi, - ) -> Vec { - let mut block0_params = builder.func.dfg.block_params(block0).to_vec(); - match abi { + fn abi_load_params(&mut self) -> Vec { + let mut block0_params = self.builder.func.dfg.block_params(self.block0).to_vec(); + match self.abi { // Wasm and native ABIs pass parameters as normal function // parameters. Abi::Wasm | Abi::Native => block0_params, @@ -567,9 +529,9 @@ impl Compiler { // The array ABI passes a pointer/length as the 3rd/4th arguments // and those are used to load the actual wasm parameters. Abi::Array => { - let results = self.load_values_from_array( - ty.params(), - builder, + let results = self.compiler.load_values_from_array( + self.types[self.signature].params(), + &mut self.builder, block0_params[2], block0_params[3], ); @@ -580,206 +542,133 @@ impl Compiler { } } - fn abi_store_results( - &self, - builder: &mut FunctionBuilder<'_>, - ty: &WasmFuncType, - block0: ir::Block, - results: &[ir::Value], - abi: Abi, - ) { - match abi { + fn abi_store_results(&mut self, results: &[ir::Value]) { + match self.abi { // Wasm/native ABIs return values as usual. Abi::Wasm | Abi::Native => { - builder.ins().return_(results); + self.builder.ins().return_(results); } // The array ABI stores all results in the pointer/length passed // as arguments to this function, which contractually are required // to have enough space for the results. Abi::Array => { - let block0_params = builder.func.dfg.block_params(block0); - self.store_values_to_array( - builder, - ty.returns(), + let block0_params = self.builder.func.dfg.block_params(self.block0); + let (ptr, len) = (block0_params[2], block0_params[3]); + self.compiler.store_values_to_array( + &mut self.builder, + self.types[self.signature].returns(), results, - block0_params[2], - block0_params[3], + ptr, + len, ); - builder.ins().return_(&[]); + self.builder.ins().return_(&[]); } } } - - fn abi_preamble( - &self, - builder: &mut FunctionBuilder<'_>, - offsets: &VMComponentOffsets, - vmctx: ir::Value, - abi: Abi, - ) { - let pointer_type = self.isa.pointer_type(); - // If we are crossing the Wasm-to-native boundary, we need to save the - // exit FP and return address for stack walking purposes. However, we - // always debug assert that our vmctx is a component context, regardless - // whether we are actually crossing that boundary because it should - // always hold. - super::debug_assert_vmctx_kind( - &*self.isa, - builder, - vmctx, - wasmtime_environ::component::VMCOMPONENT_MAGIC, - ); - if let Abi::Wasm = abi { - let limits = builder.ins().load( - pointer_type, - MemFlags::trusted(), - vmctx, - i32::try_from(offsets.limits()).unwrap(), - ); - super::save_last_wasm_exit_fp_and_pc(builder, pointer_type, &offsets.ptr, limits); - } - } } impl ComponentCompiler for Compiler { - fn compile_lowered_trampoline( - &self, - component: &Component, - lowering: &LowerImport, - types: &ComponentTypes, - ) -> Result>> { - self.compile_func_ref(|abi| { - self.compile_lowered_trampoline_for_abi(component, lowering, types, abi) - }) - } - - fn compile_always_trap(&self, ty: &WasmFuncType) -> Result>> { - self.compile_func_ref(|abi| self.compile_always_trap_for_abi(ty, abi)) - } - - fn compile_transcoder( + fn compile_trampoline( &self, - component: &Component, - transcoder: &Transcoder, + component: &ComponentTranslation, types: &ComponentTypes, + index: TrampolineIndex, ) -> Result>> { - self.compile_func_ref(|abi| { - self.compile_transcoder_for_abi(component, transcoder, types, abi) - }) - } + let compile = |abi: Abi| -> Result<_> { + let mut compiler = self.function_compiler(); + let mut c = TrampolineCompiler::new( + self, + &mut compiler, + &component.component, + types, + index, + abi, + ); - fn compile_resource_new( - &self, - component: &Component, - resource: &ResourceNew, - types: &ComponentTypes, - ) -> Result>> { - self.compile_func_ref(|abi| { - self.compile_resource_new_for_abi(component, resource, types, abi) - }) - } + // If we are crossing the Wasm-to-native boundary, we need to save the + // exit FP and return address for stack walking purposes. However, we + // always debug assert that our vmctx is a component context, regardless + // whether we are actually crossing that boundary because it should + // always hold. + let vmctx = c.builder.block_params(c.block0)[0]; + let pointer_type = self.isa.pointer_type(); + super::debug_assert_vmctx_kind( + &*self.isa, + &mut c.builder, + vmctx, + wasmtime_environ::component::VMCOMPONENT_MAGIC, + ); + if let Abi::Wasm = abi { + let limits = c.builder.ins().load( + pointer_type, + MemFlags::trusted(), + vmctx, + i32::try_from(c.offsets.limits()).unwrap(), + ); + super::save_last_wasm_exit_fp_and_pc( + &mut c.builder, + pointer_type, + &c.offsets.ptr, + limits, + ); + } - fn compile_resource_rep( - &self, - component: &Component, - resource: &ResourceRep, - types: &ComponentTypes, - ) -> Result>> { - self.compile_func_ref(|abi| { - self.compile_resource_rep_for_abi(component, resource, types, abi) - }) - } + c.translate(&component.trampolines[index]); + c.builder.finalize(); - fn compile_resource_drop( - &self, - component: &Component, - resource: &ResourceDrop, - types: &ComponentTypes, - ) -> Result>> { - self.compile_func_ref(|abi| { - self.compile_resource_drop_for_abi(component, resource, types, abi) + Ok(Box::new(compiler.finish()?)) + }; + Ok(AllCallFunc { + wasm_call: compile(Abi::Wasm)?, + array_call: compile(Abi::Array)?, + native_call: compile(Abi::Native)?, }) } } -impl Compiler { +impl TrampolineCompiler<'_> { fn translate_transcode( - &self, - builder: &mut FunctionBuilder<'_>, - offsets: &VMComponentOffsets, - transcoder: &Transcoder, - block: ir::Block, + &mut self, + op: Transcode, + from: RuntimeMemoryIndex, + from64: bool, + to: RuntimeMemoryIndex, + to64: bool, ) { let pointer_type = self.isa.pointer_type(); - let vmctx = builder.func.dfg.block_params(block)[0]; - - self.abi_preamble(builder, offsets, vmctx, Abi::Wasm); + let vmctx = self.builder.func.dfg.block_params(self.block0)[0]; // Determine the static signature of the host libcall for this transcode // operation and additionally calculate the static offset within the // transode libcalls array. - let func = &mut builder.func; - let (sig, offset) = match transcoder.op { - Transcode::Copy(FixedEncoding::Utf8) => host::utf8_to_utf8(self, func), - Transcode::Copy(FixedEncoding::Utf16) => host::utf16_to_utf16(self, func), - Transcode::Copy(FixedEncoding::Latin1) => host::latin1_to_latin1(self, func), - Transcode::Latin1ToUtf16 => host::latin1_to_utf16(self, func), - Transcode::Latin1ToUtf8 => host::latin1_to_utf8(self, func), + let func = &mut self.builder.func; + let (sig, offset) = match op { + Transcode::Copy(FixedEncoding::Utf8) => host::utf8_to_utf8(self.isa, func), + Transcode::Copy(FixedEncoding::Utf16) => host::utf16_to_utf16(self.isa, func), + Transcode::Copy(FixedEncoding::Latin1) => host::latin1_to_latin1(self.isa, func), + Transcode::Latin1ToUtf16 => host::latin1_to_utf16(self.isa, func), + Transcode::Latin1ToUtf8 => host::latin1_to_utf8(self.isa, func), Transcode::Utf16ToCompactProbablyUtf16 => { - host::utf16_to_compact_probably_utf16(self, func) + host::utf16_to_compact_probably_utf16(self.isa, func) } - Transcode::Utf16ToCompactUtf16 => host::utf16_to_compact_utf16(self, func), - Transcode::Utf16ToLatin1 => host::utf16_to_latin1(self, func), - Transcode::Utf16ToUtf8 => host::utf16_to_utf8(self, func), - Transcode::Utf8ToCompactUtf16 => host::utf8_to_compact_utf16(self, func), - Transcode::Utf8ToLatin1 => host::utf8_to_latin1(self, func), - Transcode::Utf8ToUtf16 => host::utf8_to_utf16(self, func), + Transcode::Utf16ToCompactUtf16 => host::utf16_to_compact_utf16(self.isa, func), + Transcode::Utf16ToLatin1 => host::utf16_to_latin1(self.isa, func), + Transcode::Utf16ToUtf8 => host::utf16_to_utf8(self.isa, func), + Transcode::Utf8ToCompactUtf16 => host::utf8_to_compact_utf16(self.isa, func), + Transcode::Utf8ToLatin1 => host::utf8_to_latin1(self.isa, func), + Transcode::Utf8ToUtf16 => host::utf8_to_utf16(self.isa, func), }; - let libcall = self.load_libcall(builder, offsets, vmctx, offset); + let libcall = self.load_libcall(vmctx, offset); // Load the base pointers for the from/to linear memories. - let from_base = self.load_runtime_memory_base(builder, vmctx, offsets, transcoder.from); - let to_base = self.load_runtime_memory_base(builder, vmctx, offsets, transcoder.to); - - // Helper function to cast a core wasm input to a host pointer type - // which will go into the host libcall. - let cast_to_pointer = |builder: &mut FunctionBuilder<'_>, val: ir::Value, is64: bool| { - let host64 = pointer_type == ir::types::I64; - if is64 == host64 { - val - } else if !is64 { - assert!(host64); - builder.ins().uextend(pointer_type, val) - } else { - assert!(!host64); - builder.ins().ireduce(pointer_type, val) - } - }; + let from_base = self.load_runtime_memory_base(vmctx, from); + let to_base = self.load_runtime_memory_base(vmctx, to); - // Helper function to cast an input parameter to the host pointer type. - let len_param = |builder: &mut FunctionBuilder<'_>, param: usize, is64: bool| { - let val = builder.func.dfg.block_params(block)[2 + param]; - cast_to_pointer(builder, val, is64) - }; - - // Helper function to interpret an input parameter as a pointer into - // linear memory. This will cast the input parameter to the host integer - // type and then add that value to the base. - // - // Note that bounds-checking happens in adapter modules, and this - // trampoline is simply calling the host libcall. - let ptr_param = - |builder: &mut FunctionBuilder<'_>, param: usize, is64: bool, base: ir::Value| { - let val = len_param(builder, param, is64); - builder.ins().iadd(base, val) - }; - - let Transcoder { to64, from64, .. } = *transcoder; let mut args = Vec::new(); - let uses_retptr = match transcoder.op { + let uses_retptr = match op { Transcode::Utf16ToUtf8 | Transcode::Latin1ToUtf8 | Transcode::Utf8ToLatin1 @@ -790,44 +679,47 @@ impl Compiler { // Most transcoders share roughly the same signature despite doing very // different things internally, so most libcalls are lumped together // here. - match transcoder.op { + match op { Transcode::Copy(_) | Transcode::Latin1ToUtf16 | Transcode::Utf16ToCompactProbablyUtf16 | Transcode::Utf8ToLatin1 | Transcode::Utf16ToLatin1 | Transcode::Utf8ToUtf16 => { - args.push(ptr_param(builder, 0, from64, from_base)); - args.push(len_param(builder, 1, from64)); - args.push(ptr_param(builder, 2, to64, to_base)); + args.push(self.ptr_param(0, from64, from_base)); + args.push(self.len_param(1, from64)); + args.push(self.ptr_param(2, to64, to_base)); } Transcode::Utf16ToUtf8 | Transcode::Latin1ToUtf8 => { - args.push(ptr_param(builder, 0, from64, from_base)); - args.push(len_param(builder, 1, from64)); - args.push(ptr_param(builder, 2, to64, to_base)); - args.push(len_param(builder, 3, to64)); + args.push(self.ptr_param(0, from64, from_base)); + args.push(self.len_param(1, from64)); + args.push(self.ptr_param(2, to64, to_base)); + args.push(self.len_param(3, to64)); } Transcode::Utf8ToCompactUtf16 | Transcode::Utf16ToCompactUtf16 => { - args.push(ptr_param(builder, 0, from64, from_base)); - args.push(len_param(builder, 1, from64)); - args.push(ptr_param(builder, 2, to64, to_base)); - args.push(len_param(builder, 3, to64)); - args.push(len_param(builder, 4, to64)); + args.push(self.ptr_param(0, from64, from_base)); + args.push(self.len_param(1, from64)); + args.push(self.ptr_param(2, to64, to_base)); + args.push(self.len_param(3, to64)); + args.push(self.len_param(4, to64)); } }; if uses_retptr { - let slot = builder.func.create_sized_stack_slot(ir::StackSlotData::new( - ir::StackSlotKind::ExplicitSlot, - pointer_type.bytes(), - )); - args.push(builder.ins().stack_addr(pointer_type, slot, 0)); + let slot = self + .builder + .func + .create_sized_stack_slot(ir::StackSlotData::new( + ir::StackSlotKind::ExplicitSlot, + pointer_type.bytes(), + )); + args.push(self.builder.ins().stack_addr(pointer_type, slot, 0)); } - let call = builder.ins().call_indirect(sig, libcall, &args); - let mut results = builder.func.dfg.inst_results(call).to_vec(); + let call = self.builder.ins().call_indirect(sig, libcall, &args); + let mut results = self.builder.func.dfg.inst_results(call).to_vec(); if uses_retptr { - results.push(builder.ins().load( + results.push(self.builder.ins().load( pointer_type, ir::MemFlags::trusted(), *args.last().unwrap(), @@ -836,63 +728,90 @@ impl Compiler { } let mut raw_results = Vec::new(); - // Helper to cast a host pointer integer type to the destination type. - let cast_from_pointer = |builder: &mut FunctionBuilder<'_>, val: ir::Value, is64: bool| { - let host64 = pointer_type == ir::types::I64; - if is64 == host64 { - val - } else if !is64 { - assert!(host64); - builder.ins().ireduce(ir::types::I32, val) - } else { - assert!(!host64); - builder.ins().uextend(ir::types::I64, val) - } - }; - // Like the arguments the results are fairly similar across libcalls, so // they're lumped into various buckets here. - match transcoder.op { + match op { Transcode::Copy(_) | Transcode::Latin1ToUtf16 => {} Transcode::Utf8ToUtf16 | Transcode::Utf16ToCompactProbablyUtf16 | Transcode::Utf8ToCompactUtf16 | Transcode::Utf16ToCompactUtf16 => { - raw_results.push(cast_from_pointer(builder, results[0], to64)); + raw_results.push(self.cast_from_pointer(results[0], to64)); } Transcode::Latin1ToUtf8 | Transcode::Utf16ToUtf8 | Transcode::Utf8ToLatin1 | Transcode::Utf16ToLatin1 => { - raw_results.push(cast_from_pointer(builder, results[0], from64)); - raw_results.push(cast_from_pointer(builder, results[1], to64)); + raw_results.push(self.cast_from_pointer(results[0], from64)); + raw_results.push(self.cast_from_pointer(results[1], to64)); } }; - builder.ins().return_(&raw_results); + self.builder.ins().return_(&raw_results); } - fn load_runtime_memory_base( - &self, - builder: &mut FunctionBuilder<'_>, - vmctx: ir::Value, - offsets: &VMComponentOffsets, - mem: RuntimeMemoryIndex, - ) -> ir::Value { + // Helper function to cast an input parameter to the host pointer type. + fn len_param(&mut self, param: usize, is64: bool) -> ir::Value { + let val = self.builder.func.dfg.block_params(self.block0)[2 + param]; + self.cast_to_pointer(val, is64) + } + + // Helper function to interpret an input parameter as a pointer into + // linear memory. This will cast the input parameter to the host integer + // type and then add that value to the base. + // + // Note that bounds-checking happens in adapter modules, and this + // trampoline is simply calling the host libcall. + fn ptr_param(&mut self, param: usize, is64: bool, base: ir::Value) -> ir::Value { + let val = self.len_param(param, is64); + self.builder.ins().iadd(base, val) + } + + // Helper function to cast a core wasm input to a host pointer type + // which will go into the host libcall. + fn cast_to_pointer(&mut self, val: ir::Value, is64: bool) -> ir::Value { + let pointer_type = self.isa.pointer_type(); + let host64 = pointer_type == ir::types::I64; + if is64 == host64 { + val + } else if !is64 { + assert!(host64); + self.builder.ins().uextend(pointer_type, val) + } else { + assert!(!host64); + self.builder.ins().ireduce(pointer_type, val) + } + } + + // Helper to cast a host pointer integer type to the destination type. + fn cast_from_pointer(&mut self, val: ir::Value, is64: bool) -> ir::Value { + let host64 = self.isa.pointer_type() == ir::types::I64; + if is64 == host64 { + val + } else if !is64 { + assert!(host64); + self.builder.ins().ireduce(ir::types::I32, val) + } else { + assert!(!host64); + self.builder.ins().uextend(ir::types::I64, val) + } + } + + fn load_runtime_memory_base(&mut self, vmctx: ir::Value, mem: RuntimeMemoryIndex) -> ir::Value { let pointer_type = self.isa.pointer_type(); - let from_vmmemory_definition = builder.ins().load( + let from_vmmemory_definition = self.builder.ins().load( pointer_type, MemFlags::trusted(), vmctx, - i32::try_from(offsets.runtime_memory(mem)).unwrap(), + i32::try_from(self.offsets.runtime_memory(mem)).unwrap(), ); - builder.ins().load( + self.builder.ins().load( pointer_type, MemFlags::trusted(), from_vmmemory_definition, - i32::from(offsets.ptr.vmmemory_definition_base()), + i32::from(self.offsets.ptr.vmmemory_definition_base()), ) } } @@ -903,9 +822,8 @@ impl Compiler { /// Note that a macro is used here to keep this in sync with the actual /// transcoder functions themselves which are also defined via a macro. mod host { - use crate::compiler::Compiler; use cranelift_codegen::ir::{self, AbiParam}; - use cranelift_codegen::isa::CallConv; + use cranelift_codegen::isa::{CallConv, TargetIsa}; macro_rules! define { ( @@ -915,8 +833,8 @@ mod host { )* ) => { $( - pub(super) fn $name(compiler: &Compiler, func: &mut ir::Function) -> (ir::SigRef, u32) { - let pointer_type = compiler.isa.pointer_type(); + pub(super) fn $name(isa: &dyn TargetIsa, func: &mut ir::Function) -> (ir::SigRef, u32) { + let pointer_type = isa.pointer_type(); let params = vec![ $( AbiParam::new(define!(@ty pointer_type $param)) ),* ]; @@ -926,7 +844,7 @@ mod host { let sig = func.import_signature(ir::Signature { params, returns, - call_conv: CallConv::triple_default(compiler.isa.triple()), + call_conv: CallConv::triple_default(isa.triple()), }); (sig, offsets::$name) diff --git a/crates/environ/src/component/compiler.rs b/crates/environ/src/component/compiler.rs index b8256845a621..ed192e1df6df 100644 --- a/crates/environ/src/component/compiler.rs +++ b/crates/environ/src/component/compiler.rs @@ -1,7 +1,4 @@ -use crate::component::{ - Component, ComponentTypes, LowerImport, ResourceDrop, ResourceNew, ResourceRep, Transcoder, -}; -use crate::WasmFuncType; +use crate::component::{ComponentTranslation, ComponentTypes, TrampolineIndex}; use anyhow::Result; use serde::{Deserialize, Serialize}; use std::any::Any; @@ -35,84 +32,16 @@ impl AllCallFunc { /// Compilation support necessary for components. pub trait ComponentCompiler: Send + Sync { - /// Creates a trampoline for a `canon.lower`'d host function. + /// Compiles the pieces necessary to create a `VMFuncRef` for the + /// `trampoline` specified. /// - /// This function will create a suitable trampoline which can be called from - /// WebAssembly code and which will then call into host code. The signature - /// of this generated trampoline should have the appropriate wasm ABI for - /// the `lowering.canonical_abi` type signature (e.g. System-V). - /// - /// The generated trampoline will interpret its first argument as a - /// `*mut VMComponentContext` and use the `VMComponentOffsets` for - /// `component` to read necessary data (as specified by `lowering.options`) - /// and call the host function pointer. Notably the host function pointer - /// has the signature `VMLoweringCallee` where many of the arguments are - /// loaded from known offsets (for this particular generated trampoline) - /// from the `VMComponentContext`. - /// - /// Returns a compiler-specific `Box` which can be passed later to - /// `emit_obj` to crate an elf object. - fn compile_lowered_trampoline( - &self, - component: &Component, - lowering: &LowerImport, - types: &ComponentTypes, - ) -> Result>>; - - /// Creates a function which will always trap that has the `ty` specified. - /// - /// This will create a small trampoline whose only purpose is to generate a - /// trap at runtime. This is used to implement the degenerate case of a - /// `canon lift`'d function immediately being `canon lower`'d. - fn compile_always_trap(&self, ty: &WasmFuncType) -> Result>>; - - /// Compiles a trampoline to implement string transcoding from adapter - /// modules. - /// - /// The generated trampoline will invoke the `transcoder.op` libcall with - /// the various memory configuration provided in `transcoder`. This is used - /// to pass raw pointers to host functions to avoid the host having to deal - /// with base pointers, offsets, memory32-vs-64, etc. - /// - /// Note that all bounds checks for memories are present in adapters - /// themselves, and the host libcalls simply assume that the pointers are - /// valid. - fn compile_transcoder( - &self, - component: &Component, - transcoder: &Transcoder, - types: &ComponentTypes, - ) -> Result>>; - - /// Compiles a trampoline to use as the `resource.new` intrinsic in the - /// component model. - /// - /// The generated trampoline will invoke a host-defined libcall that will do - /// all the heavy lifting for this intrinsic, so this is primarily bridging - /// ABIs and inserting a `TypeResourceTableIndex` argument so the host - /// has the context about where this is coming from. - fn compile_resource_new( - &self, - component: &Component, - resource: &ResourceNew, - types: &ComponentTypes, - ) -> Result>>; - - /// Same as `compile_resource_new` except for the `resource.rep` intrinsic. - fn compile_resource_rep( - &self, - component: &Component, - resource: &ResourceRep, - types: &ComponentTypes, - ) -> Result>>; - - /// Similar to `compile_resource_new` but this additionally handles the - /// return value which may involve executing destructors and checking for - /// reentrance traps. - fn compile_resource_drop( + /// Each trampoline is a member of the `Trampoline` enumeration and has a + /// unique purpose and is translated differently. See the implementation of + /// this trait for Cranelift for more information. + fn compile_trampoline( &self, - component: &Component, - resource: &ResourceDrop, + component: &ComponentTranslation, types: &ComponentTypes, + trampoline: TrampolineIndex, ) -> Result>>; } diff --git a/crates/environ/src/component/dfg.rs b/crates/environ/src/component/dfg.rs index 48a527992cca..f81135e0d982 100644 --- a/crates/environ/src/component/dfg.rs +++ b/crates/environ/src/component/dfg.rs @@ -46,13 +46,9 @@ pub struct ComponentDfg { /// Same as `Component::exports` pub exports: IndexMap, - /// All known lowered host functions along with the configuration for each - /// lowering. - pub lowerings: Intern, - - /// All known "always trapping" trampolines and the function signature they - /// have. - pub always_trap: Intern, + /// All trampolines and their type signature which will need to get + /// compiled by Cranelift. + pub trampolines: PrimaryMap, /// Know reallocation functions which are used by `lowerings` (e.g. will be /// used by the host) @@ -71,16 +67,13 @@ pub struct ComponentDfg { /// out of the inlining pass of translation. pub adapters: Intern, - /// Metadata about string transcoders needed by adapter modules. - pub transcoders: Intern, - /// Metadata about all known core wasm instances created. /// /// This is mostly an ordered list and is not deduplicated based on contents /// unlike the items above. Creation of an `Instance` is side-effectful and /// all instances here are always required to be created. These are /// considered "roots" in dataflow. - pub instances: Intern, + pub instances: PrimaryMap, /// Number of component instances that were created during the inlining /// phase (this is not edited after creation). @@ -165,14 +158,11 @@ macro_rules! id { id! { pub struct InstanceId(u32); - pub struct LowerImportId(u32); pub struct MemoryId(u32); pub struct ReallocId(u32); pub struct AdapterId(u32); pub struct PostReturnId(u32); - pub struct AlwaysTrapId(u32); pub struct AdapterModuleId(u32); - pub struct TranscoderId(u32); } /// Same as `info::InstantiateModule` @@ -204,15 +194,8 @@ pub enum Export { #[allow(missing_docs)] pub enum CoreDef { Export(CoreExport), - Lowered(LowerImportId), - AlwaysTrap(AlwaysTrapId), InstanceFlags(RuntimeComponentInstanceIndex), - Transcoder(TranscoderId), - - ResourceNew(TypeResourceTableIndex, SignatureIndex), - ResourceRep(TypeResourceTableIndex, SignatureIndex), - ResourceDrop(TypeResourceTableIndex, SignatureIndex), - + Trampoline(TrampolineIndex), /// This is a special variant not present in `info::CoreDef` which /// represents that this definition refers to a fused adapter function. This /// adapter is fully processed after the initial translation and @@ -254,14 +237,25 @@ impl CoreExport { } } -/// Same as `info::LowerImport` -#[derive(Hash, Eq, PartialEq, Clone)] +/// Same as `info::Trampoline` #[allow(missing_docs)] -pub struct LowerImport { - pub import: RuntimeImportIndex, - pub canonical_abi: SignatureIndex, - pub options: CanonicalOptions, - pub lower_ty: TypeFuncIndex, +pub enum Trampoline { + LowerImport { + import: RuntimeImportIndex, + options: CanonicalOptions, + lower_ty: TypeFuncIndex, + }, + Transcoder { + op: Transcode, + from: MemoryId, + from64: bool, + to: MemoryId, + to64: bool, + }, + AlwaysTrap, + ResourceNew(TypeResourceTableIndex), + ResourceRep(TypeResourceTableIndex), + ResourceDrop(TypeResourceTableIndex), } /// Same as `info::CanonicalOptions` @@ -275,18 +269,6 @@ pub struct CanonicalOptions { pub post_return: Option, } -/// Same as `info::Transcoder` -#[derive(Clone, Hash, Eq, PartialEq)] -#[allow(missing_docs)] -pub struct Transcoder { - pub op: Transcode, - pub from: MemoryId, - pub from64: bool, - pub to: MemoryId, - pub to64: bool, - pub signature: SignatureIndex, -} - /// Same as `info::Resource` #[allow(missing_docs)] pub struct Resource { @@ -309,19 +291,13 @@ impl Intern where K: EntityRef, { - /// Pushes a new `value` into this list without interning, assigning a new - /// unique key `K` to the value. - pub fn push(&mut self, value: V) -> K { - self.key_map.push(value) - } - /// Inserts the `value` specified into this set, returning either a fresh /// key `K` if this value hasn't been seen before or otherwise returning the /// previous `K` used to represent value. /// /// Note that this should only be used for component model items where the /// creation of `value` is not side-effectful. - pub fn push_uniq(&mut self, value: V) -> K + pub fn push(&mut self, value: V) -> K where V: Hash + Eq + Clone, { @@ -356,7 +332,7 @@ impl Default for Intern { impl ComponentDfg { /// Consumes the intermediate `ComponentDfg` to produce a final `Component` /// with a linear innitializer list. - pub fn finish(self) -> Component { + pub fn finish(self) -> ComponentTranslation { let mut linearize = LinearizeDfg { dfg: &self, initializers: Vec::new(), @@ -364,12 +340,10 @@ impl ComponentDfg { runtime_post_return: Default::default(), runtime_reallocs: Default::default(), runtime_instances: Default::default(), - runtime_always_trap: Default::default(), - runtime_lowerings: Default::default(), - runtime_transcoders: Default::default(), - runtime_resource_new: Default::default(), - runtime_resource_rep: Default::default(), - runtime_resource_drop: Default::default(), + num_lowerings: 0, + trampolines: Default::default(), + trampoline_defs: Default::default(), + trampoline_map: Default::default(), }; // Handle all side effects of this component in the order that they're @@ -391,28 +365,30 @@ impl ComponentDfg { // linearization are recorded into the `Component`. The number of // runtime values used for each index space is used from the `linearize` // result. - Component { - exports, - initializers: linearize.initializers, - - num_runtime_memories: linearize.runtime_memories.len() as u32, - num_runtime_post_returns: linearize.runtime_post_return.len() as u32, - num_runtime_reallocs: linearize.runtime_reallocs.len() as u32, - num_runtime_instances: linearize.runtime_instances.len() as u32, - num_always_trap: linearize.runtime_always_trap.len() as u32, - num_lowerings: linearize.runtime_lowerings.len() as u32, - num_transcoders: linearize.runtime_transcoders.len() as u32, - num_resource_new: linearize.runtime_resource_new.len() as u32, - num_resource_rep: linearize.runtime_resource_rep.len() as u32, - num_resource_drop: linearize.runtime_resource_drop.len() as u32, - - imports: self.imports, - import_types: self.import_types, - num_runtime_component_instances: self.num_runtime_component_instances, - num_resource_tables: self.num_resource_tables, - num_resources: (self.resources.len() + self.imported_resources.len()) as u32, - imported_resources: self.imported_resources, - defined_resource_instances: self.resources.iter().map(|(_, r)| r.instance).collect(), + ComponentTranslation { + trampolines: linearize.trampoline_defs, + component: Component { + exports, + initializers: linearize.initializers, + trampolines: linearize.trampolines, + num_lowerings: linearize.num_lowerings, + + num_runtime_memories: linearize.runtime_memories.len() as u32, + num_runtime_post_returns: linearize.runtime_post_return.len() as u32, + num_runtime_reallocs: linearize.runtime_reallocs.len() as u32, + num_runtime_instances: linearize.runtime_instances.len() as u32, + imports: self.imports, + import_types: self.import_types, + num_runtime_component_instances: self.num_runtime_component_instances, + num_resource_tables: self.num_resource_tables, + num_resources: (self.resources.len() + self.imported_resources.len()) as u32, + imported_resources: self.imported_resources, + defined_resource_instances: self + .resources + .iter() + .map(|(_, r)| r.instance) + .collect(), + }, } } @@ -426,16 +402,14 @@ impl ComponentDfg { struct LinearizeDfg<'a> { dfg: &'a ComponentDfg, initializers: Vec, + trampolines: PrimaryMap, + trampoline_defs: PrimaryMap, + trampoline_map: HashMap, runtime_memories: HashMap, runtime_reallocs: HashMap, runtime_post_return: HashMap, runtime_instances: HashMap, - runtime_always_trap: HashMap, - runtime_lowerings: HashMap, - runtime_transcoders: HashMap, - runtime_resource_new: HashMap, - runtime_resource_rep: HashMap, - runtime_resource_drop: HashMap, + num_lowerings: u32, } #[derive(Copy, Clone, Hash, Eq, PartialEq)] @@ -561,138 +535,58 @@ impl LinearizeDfg<'_> { fn core_def(&mut self, def: &CoreDef) -> info::CoreDef { match def { CoreDef::Export(e) => info::CoreDef::Export(self.core_export(e)), - CoreDef::AlwaysTrap(id) => info::CoreDef::AlwaysTrap(self.runtime_always_trap(*id)), - CoreDef::Lowered(id) => info::CoreDef::Lowered(self.runtime_lowering(*id)), CoreDef::InstanceFlags(i) => info::CoreDef::InstanceFlags(*i), CoreDef::Adapter(id) => info::CoreDef::Export(self.adapter(*id)), - CoreDef::Transcoder(id) => info::CoreDef::Transcoder(self.runtime_transcoder(*id)), - CoreDef::ResourceNew(id, ty) => info::CoreDef::ResourceNew(self.resource_new(*id, *ty)), - CoreDef::ResourceRep(id, ty) => info::CoreDef::ResourceRep(self.resource_rep(*id, *ty)), - CoreDef::ResourceDrop(id, ty) => { - info::CoreDef::ResourceDrop(self.resource_drop(*id, *ty)) - } + CoreDef::Trampoline(index) => info::CoreDef::Trampoline(self.trampoline(*index)), } } - fn runtime_always_trap(&mut self, id: AlwaysTrapId) -> RuntimeAlwaysTrapIndex { - self.intern( - id, - |me| &mut me.runtime_always_trap, - |me, id| me.dfg.always_trap[id], - |index, canonical_abi| { - GlobalInitializer::AlwaysTrap(AlwaysTrap { - index, - canonical_abi, - }) - }, - ) - } - - fn runtime_lowering(&mut self, id: LowerImportId) -> LoweredIndex { - self.intern( - id, - |me| &mut me.runtime_lowerings, - |me, id| { - let info = &me.dfg.lowerings[id]; - let options = me.options(&info.options); - (info.import, info.canonical_abi, options, info.lower_ty) - }, - |index, (import, canonical_abi, options, lower_ty)| { - GlobalInitializer::LowerImport(info::LowerImport { - index, - import, - canonical_abi, - options, - lower_ty, - }) - }, - ) - } - - fn runtime_transcoder(&mut self, id: TranscoderId) -> RuntimeTranscoderIndex { - self.intern( - id, - |me| &mut me.runtime_transcoders, - |me, id| { - let info = &me.dfg.transcoders[id]; - ( - info.op, - me.runtime_memory(info.from), - info.from64, - me.runtime_memory(info.to), - info.to64, - info.signature, - ) - }, - |index, (op, from, from64, to, to64, signature)| { - GlobalInitializer::Transcoder(info::Transcoder { - index, - op, - from, - from64, - to, - to64, - signature, - }) - }, - ) - } - - fn resource_new( - &mut self, - id: TypeResourceTableIndex, - signature: SignatureIndex, - ) -> RuntimeResourceNewIndex { - self.intern( - id, - |me| &mut me.runtime_resource_new, - |_me, id| id, - |index, resource| { - GlobalInitializer::ResourceNew(info::ResourceNew { - index, - resource, - signature, - }) - }, - ) - } - - fn resource_rep( - &mut self, - id: TypeResourceTableIndex, - signature: SignatureIndex, - ) -> RuntimeResourceRepIndex { - self.intern( - id, - |me| &mut me.runtime_resource_rep, - |_me, id| id, - |index, resource| { - GlobalInitializer::ResourceRep(info::ResourceRep { + fn trampoline(&mut self, index: TrampolineIndex) -> TrampolineIndex { + if let Some(idx) = self.trampoline_map.get(&index) { + return *idx; + } + let (signature, trampoline) = &self.dfg.trampolines[index]; + let trampoline = match trampoline { + Trampoline::LowerImport { + import, + options, + lower_ty, + } => { + let index = LoweredIndex::from_u32(self.num_lowerings); + self.num_lowerings += 1; + self.initializers.push(GlobalInitializer::LowerImport { index, - resource, - signature, - }) - }, - ) - } - - fn resource_drop( - &mut self, - id: TypeResourceTableIndex, - signature: SignatureIndex, - ) -> RuntimeResourceDropIndex { - self.intern( - id, - |me| &mut me.runtime_resource_drop, - |_me, ty| ty, - |index, resource| { - GlobalInitializer::ResourceDrop(info::ResourceDrop { + import: *import, + }); + info::Trampoline::LowerImport { index, - resource, - signature, - }) + options: self.options(options), + lower_ty: *lower_ty, + } + } + Trampoline::Transcoder { + op, + from, + from64, + to, + to64, + } => info::Trampoline::Transcoder { + op: *op, + from: self.runtime_memory(*from), + from64: *from64, + to: self.runtime_memory(*to), + to64: *to64, }, - ) + Trampoline::AlwaysTrap => info::Trampoline::AlwaysTrap, + Trampoline::ResourceNew(ty) => info::Trampoline::ResourceNew(*ty), + Trampoline::ResourceDrop(ty) => info::Trampoline::ResourceDrop(*ty), + Trampoline::ResourceRep(ty) => info::Trampoline::ResourceRep(*ty), + }; + let i1 = self.trampolines.push(*signature); + let i2 = self.trampoline_defs.push(trampoline); + assert_eq!(i1, i2); + self.trampoline_map.insert(index, i1); + i1 } fn core_export(&mut self, export: &CoreExport) -> info::CoreExport diff --git a/crates/environ/src/component/info.rs b/crates/environ/src/component/info.rs index 1179491a0a6f..583d81e33f98 100644 --- a/crates/environ/src/component/info.rs +++ b/crates/environ/src/component/info.rs @@ -51,6 +51,15 @@ use crate::{EntityIndex, PrimaryMap, SignatureIndex, WasmType}; use indexmap::IndexMap; use serde::{Deserialize, Serialize}; +/// Metadata as a result of compiling a component. +pub struct ComponentTranslation { + /// Serializable information that will be emitted into the final artifact. + pub component: Component, + + /// Metadata about required trampolines and what they're supposed to do. + pub trampolines: PrimaryMap, +} + /// Run-time-type-information about a `Component`, its structure, and how to /// instantiate it. /// @@ -136,27 +145,13 @@ pub struct Component { /// Same as `num_runtime_reallocs`, but for post-return functions. pub num_runtime_post_returns: u32, + /// WebAssembly type signature of all trampolines. + pub trampolines: PrimaryMap, + /// The number of lowered host functions (maximum `LoweredIndex`) needed to /// instantiate this component. pub num_lowerings: u32, - /// The number of functions which "always trap" used to implement - /// `canon.lower` of `canon.lift`'d functions within the same component. - pub num_always_trap: u32, - - /// The number of host transcoder functions needed for strings in adapter - /// modules. - pub num_transcoders: u32, - - /// Number of `ResourceNew` initializers in the global initializers list. - pub num_resource_new: u32, - - /// Number of `ResourceRep` initializers in the global initializers list. - pub num_resource_rep: u32, - - /// Number of `ResourceDrop` initializers in the global initializers list. - pub num_resource_drop: u32, - /// Maximal number of tables that required at runtime for resource-related /// information in this component. pub num_resource_tables: usize, @@ -224,13 +219,18 @@ pub enum GlobalInitializer { /// `VMComponentContext` and information about this lowering such as the /// cranelift-compiled trampoline function pointer, the host function /// pointer the trampoline calls, and the canonical ABI options. - LowerImport(LowerImport), - - /// A core wasm function was "generated" via `canon lower` of a function - /// that was `canon lift`'d in the same component, meaning that the function - /// always traps. This is recorded within the `VMComponentContext` as a new - /// `VMFuncRef` that's available for use. - AlwaysTrap(AlwaysTrap), + LowerImport { + /// The index of the lowered function that's being created. + /// + /// This is guaranteed to be the `n`th `LowerImport` instruction + /// if the index is `n`. + index: LoweredIndex, + + /// The index of the imported host function that is being lowered. + /// + /// It's guaranteed that this `RuntimeImportIndex` points to a function. + import: RuntimeImportIndex, + }, /// A core wasm linear memory is going to be saved into the /// `VMComponentContext`. @@ -250,27 +250,10 @@ pub enum GlobalInitializer { /// used as a `post-return` function. ExtractPostReturn(ExtractPostReturn), - /// Similar to `ExtractMemory` and friends and indicates that a `VMFuncRef` - /// needs to be initialized for a transcoder function and this will later be - /// used to instantiate an adapter module. - Transcoder(Transcoder), - /// Declares a new defined resource within this component. /// /// Contains information about the destructor, for example. Resource(Resource), - - /// Declares a new `resource.new` intrinsic should be initialized. - /// - /// This will initialize a `VMFuncRef` within the `VMComponentContext` for - /// the described resource. - ResourceNew(ResourceNew), - - /// Same as `ResourceNew`, but for `resource.rep` intrinsics. - ResourceRep(ResourceRep), - - /// Same as `ResourceNew`, but for `resource.drop` intrinsics. - ResourceDrop(ResourceDrop), } /// Metadata for extraction of a memory of what's being extracted and where it's @@ -323,58 +306,6 @@ pub enum InstantiateModule { ), } -/// Description of a lowered import used in conjunction with -/// `GlobalInitializer::LowerImport`. -#[derive(Debug, Serialize, Deserialize)] -pub struct LowerImport { - /// The index of the lowered function that's being created. - /// - /// This is guaranteed to be the `n`th `LowerImport` instruction - /// if the index is `n`. - pub index: LoweredIndex, - - /// The index of the imported host function that is being lowered. - /// - /// It's guaranteed that this `RuntimeImportIndex` points to a function. - pub import: RuntimeImportIndex, - - /// The type of the function that is being lowered, as perceived by the - /// component doing the lowering. - pub lower_ty: TypeFuncIndex, - - /// The core wasm signature of the function that's being created. - pub canonical_abi: SignatureIndex, - - /// The canonical ABI options used when lowering this function specified in - /// the original component. - pub options: CanonicalOptions, -} - -impl LowerImport { - /// Get the symbol name for this lowered import. - pub fn symbol_name(&self) -> String { - format!("wasm_component_lowering_{}", self.index.as_u32()) - } -} - -/// Description of what to initialize when a `GlobalInitializer::AlwaysTrap` is -/// encountered. -#[derive(Debug, Serialize, Deserialize)] -pub struct AlwaysTrap { - /// The index of the function that is being initialized in the - /// `VMComponentContext`. - pub index: RuntimeAlwaysTrapIndex, - /// The core wasm signature of the function that's inserted. - pub canonical_abi: SignatureIndex, -} - -impl AlwaysTrap { - /// Get the symbol name for this always-trap function. - pub fn symbol_name(&self) -> String { - format!("wasm_component_always_trap_{}", self.index.as_u32()) - } -} - /// Definition of a core wasm item and where it can come from within a /// component. /// @@ -387,29 +318,12 @@ pub enum CoreDef { /// This item refers to an export of a previously instantiated core wasm /// instance. Export(CoreExport), - /// This item is a core wasm function with the index specified here. Note - /// that this `LoweredIndex` corresponds to the nth - /// `GlobalInitializer::LowerImport` instruction. - Lowered(LoweredIndex), - /// This is used to represent a degenerate case of where a `canon lift`'d - /// function is immediately `canon lower`'d in the same instance. Such a - /// function always traps at runtime. - AlwaysTrap(RuntimeAlwaysTrapIndex), /// This is a reference to a wasm global which represents the /// runtime-managed flags for a wasm instance. InstanceFlags(RuntimeComponentInstanceIndex), - /// This refers to a cranelift-generated trampoline which calls to a - /// host-defined transcoding function. - Transcoder(RuntimeTranscoderIndex), - - /// This refers to a `resource.new` intrinsic described by the index - /// provided. These indices are created through `GlobalInitializer` - /// entries. - ResourceNew(RuntimeResourceNewIndex), - /// Same as `ResourceNew`, but for the `resource.rep` intrinsic - ResourceRep(RuntimeResourceRepIndex), - /// Same as `ResourceNew`, but for the `resource.drop` intrinsic - ResourceDrop(RuntimeResourceDropIndex), + /// This is a reference to a Cranelift-generated trampoline which is + /// described in the `trampolines` array. + Trampoline(TrampolineIndex), } impl From> for CoreDef @@ -530,54 +444,6 @@ pub enum StringEncoding { CompactUtf16, } -/// Information about a string transcoding function required by an adapter -/// module. -/// -/// A transcoder is used when strings are passed between adapter modules, -/// optionally changing string encodings at the same time. The transcoder is -/// implemented in a few different layers: -/// -/// * Each generated adapter module has some glue around invoking the transcoder -/// represented by this item. This involves bounds-checks and handling -/// `realloc` for example. -/// * Each transcoder gets a cranelift-generated trampoline which has the -/// appropriate signature for the adapter module in question. Existence of -/// this initializer indicates that this should be compiled by Cranelift. -/// * The cranelift-generated trampoline will invoke a "transcoder libcall" -/// which is implemented natively in Rust that has a signature independent of -/// memory64 configuration options for example. -#[derive(Debug, Clone, Serialize, Deserialize, Hash, Eq, PartialEq)] -pub struct Transcoder { - /// The index of the transcoder being defined and initialized. - /// - /// This indicates which `VMFuncRef` slot is written to in a - /// `VMComponentContext`. - pub index: RuntimeTranscoderIndex, - /// The transcoding operation being performed. - pub op: Transcode, - /// The linear memory that the string is being read from. - pub from: RuntimeMemoryIndex, - /// Whether or not the source linear memory is 64-bit or not. - pub from64: bool, - /// The linear memory that the string is being written to. - pub to: RuntimeMemoryIndex, - /// Whether or not the destination linear memory is 64-bit or not. - pub to64: bool, - /// The wasm signature of the cranelift-generated trampoline. - pub signature: SignatureIndex, -} - -impl Transcoder { - /// Get the symbol name for this transcoder function. - pub fn symbol_name(&self) -> String { - let index = self.index.as_u32(); - let op = self.op.symbol_fragment(); - let from = if self.from64 { "64" } else { "32" }; - let to = if self.to64 { "64" } else { "32" }; - format!("wasm_component_transcoder_{index}_{op}_memory{from}_to_memory{to}") - } -} - pub use crate::fact::{FixedEncoding, Transcode}; /// Description of a new resource declared in a `GlobalInitializer::Resource` @@ -597,58 +463,99 @@ pub struct Resource { pub instance: RuntimeComponentInstanceIndex, } -/// Description of a `resource.new` intrinsic used to declare and initialize a -/// new `VMFuncRef` which generates the core wasm function corresponding to -/// `resource.new`. -#[derive(Debug, Serialize, Deserialize)] -pub struct ResourceNew { - /// The index of the intrinsic being created. - pub index: RuntimeResourceNewIndex, - /// The resource table that this intrinsic will be modifying. - pub resource: TypeResourceTableIndex, - /// The core wasm signature of the intrinsic, always `(func (param i32) - /// (result i32))`. - pub signature: SignatureIndex, -} +/// A list of all possible trampolines that may be required to compile a +/// component completely. +/// +/// These trampolines are used often as core wasm definitions and require +/// Cranelift support to generate these functions. Each trampoline serves a +/// different purpose for implementing bits and pieces of the component model. +/// +/// All trampolines have a core wasm function signature associated with them +/// which is stored in the `Component::trampolines` array. +/// +/// Note that this type does not implement `Serialize` or `Deserialize` and +/// that's intentional as this isn't stored in the final compilation artifact. +pub enum Trampoline { + /// Description of a lowered import used in conjunction with + /// `GlobalInitializer::LowerImport`. + LowerImport { + /// The runtime lowering state that this trampoline will access. + index: LoweredIndex, + + /// The type of the function that is being lowered, as perceived by the + /// component doing the lowering. + lower_ty: TypeFuncIndex, + + /// The canonical ABI options used when lowering this function specified + /// in the original component. + options: CanonicalOptions, + }, -impl ResourceNew { - /// Returns the compiled artifact symbol name for this intrinsic. - pub fn symbol_name(&self) -> String { - let resource = self.resource.as_u32(); - format!("wasm_component_resource_new{resource}") - } -} + /// Information about a string transcoding function required by an adapter + /// module. + /// + /// A transcoder is used when strings are passed between adapter modules, + /// optionally changing string encodings at the same time. The transcoder is + /// implemented in a few different layers: + /// + /// * Each generated adapter module has some glue around invoking the + /// transcoder represented by this item. This involves bounds-checks and + /// handling `realloc` for example. + /// * Each transcoder gets a cranelift-generated trampoline which has the + /// appropriate signature for the adapter module in question. Existence of + /// this initializer indicates that this should be compiled by Cranelift. + /// * The cranelift-generated trampoline will invoke a "transcoder libcall" + /// which is implemented natively in Rust that has a signature independent + /// of memory64 configuration options for example. + Transcoder { + /// The transcoding operation being performed. + op: Transcode, + /// The linear memory that the string is being read from. + from: RuntimeMemoryIndex, + /// Whether or not the source linear memory is 64-bit or not. + from64: bool, + /// The linear memory that the string is being written to. + to: RuntimeMemoryIndex, + /// Whether or not the destination linear memory is 64-bit or not. + to64: bool, + }, -/// Same as `ResourceNew`, but for the `resource.rep` intrinsic. -#[derive(Debug, Serialize, Deserialize)] -#[allow(missing_docs)] -pub struct ResourceRep { - pub index: RuntimeResourceRepIndex, - pub resource: TypeResourceTableIndex, - pub signature: SignatureIndex, -} + /// A small adapter which simply traps, used for degenerate lift/lower + /// combinations. + AlwaysTrap, -impl ResourceRep { - /// Returns the compiled artifact symbol name for this intrinsic. - pub fn symbol_name(&self) -> String { - let resource = self.resource.as_u32(); - format!("wasm_component_resource_rep{resource}") - } -} + /// A `resource.new` intrinsic which will inject a new resource into the + /// table specified. + ResourceNew(TypeResourceTableIndex), -/// Same as `ResourceNew`, but for the `resource.drop` intrinsic. -#[derive(Debug, Serialize, Deserialize)] -#[allow(missing_docs)] -pub struct ResourceDrop { - pub index: RuntimeResourceDropIndex, - pub resource: TypeResourceTableIndex, - pub signature: SignatureIndex, + /// Same as `ResourceNew`, but for the `resource.rep` intrinsic. + ResourceRep(TypeResourceTableIndex), + + /// Same as `ResourceNew`, but for the `resource.drop` intrinsic. + ResourceDrop(TypeResourceTableIndex), } -impl ResourceDrop { - /// Returns the compiled artifact symbol name for this intrinsic. +impl Trampoline { + /// Returns the name to use for the symbol of this trampoline in the final + /// compiled artifact pub fn symbol_name(&self) -> String { - let resource = self.resource.as_u32(); - format!("wasm_component_resource_drop{resource}") + use Trampoline::*; + match self { + LowerImport { index, .. } => { + format!("component-lower-import[{}]", index.as_u32()) + } + Transcoder { + op, from64, to64, .. + } => { + let op = op.symbol_fragment(); + let from = if *from64 { "64" } else { "32" }; + let to = if *to64 { "64" } else { "32" }; + format!("component-transcode-{op}-m{from}-m{to}") + } + AlwaysTrap => format!("component-always-trap"), + ResourceNew(i) => format!("component-resource-new[{}]", i.as_u32()), + ResourceRep(i) => format!("component-resource-rep[{}]", i.as_u32()), + ResourceDrop(i) => format!("component-resource-drop[{}]", i.as_u32()), + } } } diff --git a/crates/environ/src/component/translate.rs b/crates/environ/src/component/translate.rs index 09c8ea12ff59..b054214536a8 100644 --- a/crates/environ/src/component/translate.rs +++ b/crates/environ/src/component/translate.rs @@ -301,7 +301,7 @@ impl<'a, 'data> Translator<'a, 'data> { mut self, component: &'data [u8], ) -> Result<( - Component, + ComponentTranslation, PrimaryMap>, )> { // First up wasmparser is used to actually perform the translation and diff --git a/crates/environ/src/component/translate/adapt.rs b/crates/environ/src/component/translate/adapt.rs index b695e58c30dc..91d1d6ab3985 100644 --- a/crates/environ/src/component/translate/adapt.rs +++ b/crates/environ/src/component/translate/adapt.rs @@ -276,19 +276,23 @@ fn fact_import_to_core_def( } } - let from = dfg.memories.push_uniq(unwrap_memory(from)); - let to = dfg.memories.push_uniq(unwrap_memory(to)); - dfg::CoreDef::Transcoder(dfg.transcoders.push_uniq(dfg::Transcoder { - op: *op, - from, - from64: *from64, - to, - to64: *to64, - signature: match ty { - EntityType::Function(signature) => signature, - _ => unreachable!(), + let from = dfg.memories.push(unwrap_memory(from)); + let to = dfg.memories.push(unwrap_memory(to)); + let signature = match ty { + EntityType::Function(signature) => signature, + _ => unreachable!(), + }; + let index = dfg.trampolines.push(( + signature, + dfg::Trampoline::Transcoder { + op: *op, + from, + from64: *from64, + to, + to64: *to64, }, - })) + )); + dfg::CoreDef::Trampoline(index) } } } @@ -379,15 +383,7 @@ impl PartitionAdapterModules { } // These items can't transitively depend on an adapter - dfg::CoreDef::Lowered(_) - | dfg::CoreDef::AlwaysTrap(_) - | dfg::CoreDef::InstanceFlags(_) - | dfg::CoreDef::ResourceNew(..) - | dfg::CoreDef::ResourceDrop(..) - | dfg::CoreDef::ResourceRep(..) => {} - - // should not be in the dfg yet - dfg::CoreDef::Transcoder(_) => unreachable!(), + dfg::CoreDef::Trampoline(_) | dfg::CoreDef::InstanceFlags(_) => {} } } diff --git a/crates/environ/src/component/translate/inline.rs b/crates/environ/src/component/translate/inline.rs index c0b68b345537..dc75a404c7fc 100644 --- a/crates/environ/src/component/translate/inline.rs +++ b/crates/environ/src/component/translate/inline.rs @@ -508,13 +508,15 @@ impl<'a> Inliner<'a> { ComponentFuncDef::Import(path) => { let import = self.runtime_import(path); let options = self.canonical_options(options_lower); - let index = self.result.lowerings.push_uniq(dfg::LowerImport { - canonical_abi: *canonical_abi, - import, - options, - lower_ty, - }); - dfg::CoreDef::Lowered(index) + let index = self.result.trampolines.push(( + *canonical_abi, + dfg::Trampoline::LowerImport { + import, + options, + lower_ty, + }, + )); + dfg::CoreDef::Trampoline(index) } // This case handles when a lifted function is later @@ -546,8 +548,11 @@ impl<'a> Inliner<'a> { options: options_lift, .. } if options_lift.instance == options_lower.instance => { - let index = self.result.always_trap.push_uniq(*canonical_abi); - dfg::CoreDef::AlwaysTrap(index) + let index = self + .result + .trampolines + .push((*canonical_abi, dfg::Trampoline::AlwaysTrap)); + dfg::CoreDef::Trampoline(index) } // Lowering a lifted function where the destination @@ -583,7 +588,7 @@ impl<'a> Inliner<'a> { func, options: options_lift, } => { - let adapter_idx = self.result.adapters.push_uniq(Adapter { + let adapter_idx = self.result.adapters.push(Adapter { lift_ty: *lift_ty, lift_options: options_lift.clone(), lower_ty, @@ -648,15 +653,27 @@ impl<'a> Inliner<'a> { // information and then new entries for each intrinsic are recorded. ResourceNew(id, ty) => { let id = types.resource_id(frame.translation.types_ref(), *id); - frame.funcs.push(dfg::CoreDef::ResourceNew(id, *ty)); + let index = self + .result + .trampolines + .push((*ty, dfg::Trampoline::ResourceNew(id))); + frame.funcs.push(dfg::CoreDef::Trampoline(index)); } ResourceRep(id, ty) => { let id = types.resource_id(frame.translation.types_ref(), *id); - frame.funcs.push(dfg::CoreDef::ResourceRep(id, *ty)); + let index = self + .result + .trampolines + .push((*ty, dfg::Trampoline::ResourceRep(id))); + frame.funcs.push(dfg::CoreDef::Trampoline(index)); } ResourceDrop(id, ty) => { let id = types.resource_id(frame.translation.types_ref(), *id); - frame.funcs.push(dfg::CoreDef::ResourceDrop(id, *ty)); + let index = self + .result + .trampolines + .push((*ty, dfg::Trampoline::ResourceDrop(id))); + frame.funcs.push(dfg::CoreDef::Trampoline(index)); } ModuleStatic(idx) => { @@ -989,13 +1006,11 @@ impl<'a> Inliner<'a> { fn canonical_options(&mut self, options: AdapterOptions) -> dfg::CanonicalOptions { let memory = options .memory - .map(|export| self.result.memories.push_uniq(export)); - let realloc = options - .realloc - .map(|def| self.result.reallocs.push_uniq(def)); + .map(|export| self.result.memories.push(export)); + let realloc = options.realloc.map(|def| self.result.reallocs.push(def)); let post_return = options .post_return - .map(|def| self.result.post_returns.push_uniq(def)); + .map(|def| self.result.post_returns.push(def)); dfg::CanonicalOptions { instance: options.instance, string_encoding: options.string_encoding, diff --git a/crates/environ/src/component/types.rs b/crates/environ/src/component/types.rs index d42dbc2c304c..38681ea1e731 100644 --- a/crates/environ/src/component/types.rs +++ b/crates/environ/src/component/types.rs @@ -195,9 +195,6 @@ indices! { /// component model. pub struct LoweredIndex(u32); - /// Same as `LoweredIndex` but for the `CoreDef::AlwaysTrap` variant. - pub struct RuntimeAlwaysTrapIndex(u32); - /// Index representing a linear memory extracted from a wasm instance /// which is stored in a `VMComponentContext`. This is used to deduplicate /// references to the same linear memory where it's only stored once in a @@ -213,21 +210,13 @@ indices! { /// Same as `RuntimeMemoryIndex` except for the `post-return` function. pub struct RuntimePostReturnIndex(u32); - /// Index into the list of transcoders identified during compilation. - /// - /// This is used to index the `VMFuncRef` slots reserved for string encoders - /// which reference linear memories defined within a component. - pub struct RuntimeTranscoderIndex(u32); - - /// Index into the list of `resource.new` intrinsics used by a component. + /// Index for all trampolines that are compiled in Cranelift for a + /// component. /// - /// This is used to allocate space in `VMComponentContext` and record - /// `VMFuncRef`s corresponding to the definition of the intrinsic. - pub struct RuntimeResourceNewIndex(u32); - /// Same as `RuntimeResourceNewIndex`, but for `resource.rep` - pub struct RuntimeResourceDropIndex(u32); - /// Same as `RuntimeResourceNewIndex`, but for `resource.drop` - pub struct RuntimeResourceRepIndex(u32); + /// This is used to point to various bits of metadata within a compiled + /// component and is stored in the final compilation artifact. This does not + /// have a direct corresponance to any wasm definition. + pub struct TrampolineIndex(u32); } // Reexport for convenience some core-wasm indices which are also used in the diff --git a/crates/environ/src/component/vmcomponent_offsets.rs b/crates/environ/src/component/vmcomponent_offsets.rs index 11650c9c3b76..a770fb2eeecf 100644 --- a/crates/environ/src/component/vmcomponent_offsets.rs +++ b/crates/environ/src/component/vmcomponent_offsets.rs @@ -6,12 +6,7 @@ // store: *mut dyn Store, // limits: *const VMRuntimeLimits, // flags: [VMGlobalDefinition; component.num_runtime_component_instances], -// lowering_func_refs: [VMFuncRef; component.num_lowerings], -// always_trap_func_refs: [VMFuncRef; component.num_always_trap], -// transcoder_func_refs: [VMFuncRef; component.num_transcoders], -// resource_new_func_refs: [VMFuncRef; component.num_resource_new], -// resource_rep_func_refs: [VMFuncRef; component.num_resource_rep], -// resource_drop_func_refs: [VMFuncRef; component.num_resource_drop], +// trampoline_func_refs: [VMFuncRef; component.num_trampolines], // lowerings: [VMLowering; component.num_lowerings], // memories: [*mut VMMemoryDefinition; component.num_memories], // reallocs: [*mut VMFuncRef; component.num_reallocs], @@ -57,17 +52,8 @@ pub struct VMComponentOffsets

{ /// Number of component instances internally in the component (always at /// least 1). pub num_runtime_component_instances: u32, - /// Number of "always trap" functions which have their - /// `VMFuncRef` stored inline in the `VMComponentContext`. - pub num_always_trap: u32, - /// Number of transcoders needed for string conversion. - pub num_transcoders: u32, - /// Number of `resource.new` intrinsics within a component. - pub num_resource_new: u32, - /// Number of `resource.rep` intrinsics within a component. - pub num_resource_rep: u32, - /// Number of `resource.drop` intrinsics within a component. - pub num_resource_drop: u32, + /// Number of cranelift-compiled trampolines required for this component. + pub num_trampolines: u32, /// Number of resources within a component which need destructors stored. pub num_resources: u32, @@ -77,12 +63,7 @@ pub struct VMComponentOffsets

{ store: u32, limits: u32, flags: u32, - lowering_func_refs: u32, - always_trap_func_refs: u32, - transcoder_func_refs: u32, - resource_new_func_refs: u32, - resource_rep_func_refs: u32, - resource_drop_func_refs: u32, + trampoline_func_refs: u32, lowerings: u32, memories: u32, reallocs: u32, @@ -103,7 +84,7 @@ impl VMComponentOffsets

{ pub fn new(ptr: P, component: &Component) -> Self { let mut ret = Self { ptr, - num_lowerings: component.num_lowerings.try_into().unwrap(), + num_lowerings: component.num_lowerings, num_runtime_memories: component.num_runtime_memories.try_into().unwrap(), num_runtime_reallocs: component.num_runtime_reallocs.try_into().unwrap(), num_runtime_post_returns: component.num_runtime_post_returns.try_into().unwrap(), @@ -111,23 +92,14 @@ impl VMComponentOffsets

{ .num_runtime_component_instances .try_into() .unwrap(), - num_always_trap: component.num_always_trap, - num_transcoders: component.num_transcoders, - num_resource_new: component.num_resource_new, - num_resource_rep: component.num_resource_rep, - num_resource_drop: component.num_resource_drop, + num_trampolines: component.trampolines.len().try_into().unwrap(), num_resources: component.num_resources, magic: 0, libcalls: 0, store: 0, limits: 0, flags: 0, - lowering_func_refs: 0, - always_trap_func_refs: 0, - transcoder_func_refs: 0, - resource_new_func_refs: 0, - resource_rep_func_refs: 0, - resource_drop_func_refs: 0, + trampoline_func_refs: 0, lowerings: 0, memories: 0, reallocs: 0, @@ -169,12 +141,7 @@ impl VMComponentOffsets

{ align(16), size(flags) = cmul(ret.num_runtime_component_instances, ret.ptr.size_of_vmglobal_definition()), align(u32::from(ret.ptr.size())), - size(lowering_func_refs) = cmul(ret.num_lowerings, ret.ptr.size_of_vm_func_ref()), - size(always_trap_func_refs) = cmul(ret.num_always_trap, ret.ptr.size_of_vm_func_ref()), - size(transcoder_func_refs) = cmul(ret.num_transcoders, ret.ptr.size_of_vm_func_ref()), - size(resource_new_func_refs) = cmul(ret.num_resource_new, ret.ptr.size_of_vm_func_ref()), - size(resource_rep_func_refs) = cmul(ret.num_resource_rep, ret.ptr.size_of_vm_func_ref()), - size(resource_drop_func_refs) = cmul(ret.num_resource_drop, ret.ptr.size_of_vm_func_ref()), + size(trampoline_func_refs) = cmul(ret.num_trampolines, ret.ptr.size_of_vm_func_ref()), size(lowerings) = cmul(ret.num_lowerings, ret.ptr.size() * 2), size(memories) = cmul(ret.num_runtime_memories, ret.ptr.size()), size(reallocs) = cmul(ret.num_runtime_reallocs, ret.ptr.size()), @@ -229,82 +196,17 @@ impl VMComponentOffsets

{ self.limits } - /// The offset of the `lowering_func_refs` field. + /// The offset of the `trampoline_func_refs` field. #[inline] - pub fn lowering_func_refs(&self) -> u32 { - self.lowering_func_refs + pub fn trampoline_func_refs(&self) -> u32 { + self.trampoline_func_refs } /// The offset of `VMFuncRef` for the `index` specified. #[inline] - pub fn lowering_func_ref(&self, index: LoweredIndex) -> u32 { - assert!(index.as_u32() < self.num_lowerings); - self.lowering_func_refs() + index.as_u32() * u32::from(self.ptr.size_of_vm_func_ref()) - } - - /// The offset of the `always_trap_func_refs` field. - #[inline] - pub fn always_trap_func_refs(&self) -> u32 { - self.always_trap_func_refs - } - - /// The offset of `VMFuncRef` for the `index` specified. - #[inline] - pub fn always_trap_func_ref(&self, index: RuntimeAlwaysTrapIndex) -> u32 { - assert!(index.as_u32() < self.num_always_trap); - self.always_trap_func_refs() + index.as_u32() * u32::from(self.ptr.size_of_vm_func_ref()) - } - - /// The offset of the `transcoder_func_refs` field. - #[inline] - pub fn transcoder_func_refs(&self) -> u32 { - self.transcoder_func_refs - } - - /// The offset of `VMFuncRef` for the `index` specified. - #[inline] - pub fn transcoder_func_ref(&self, index: RuntimeTranscoderIndex) -> u32 { - assert!(index.as_u32() < self.num_transcoders); - self.transcoder_func_refs() + index.as_u32() * u32::from(self.ptr.size_of_vm_func_ref()) - } - - /// The offset of the `resource_new_func_refs` field. - #[inline] - pub fn resource_new_func_refs(&self) -> u32 { - self.resource_new_func_refs - } - - /// The offset of `VMFuncRef` for the `index` specified. - #[inline] - pub fn resource_new_func_ref(&self, index: RuntimeResourceNewIndex) -> u32 { - assert!(index.as_u32() < self.num_resource_new); - self.resource_new_func_refs() + index.as_u32() * u32::from(self.ptr.size_of_vm_func_ref()) - } - - /// The offset of the `resource_rep_func_refs` field. - #[inline] - pub fn resource_rep_func_refs(&self) -> u32 { - self.resource_rep_func_refs - } - - /// The offset of `VMFuncRef` for the `index` specified. - #[inline] - pub fn resource_rep_func_ref(&self, index: RuntimeResourceRepIndex) -> u32 { - assert!(index.as_u32() < self.num_resource_rep); - self.resource_rep_func_refs() + index.as_u32() * u32::from(self.ptr.size_of_vm_func_ref()) - } - - /// The offset of the `resource_drop_func_refs` field. - #[inline] - pub fn resource_drop_func_refs(&self) -> u32 { - self.resource_drop_func_refs - } - - /// The offset of `VMFuncRef` for the `index` specified. - #[inline] - pub fn resource_drop_func_ref(&self, index: RuntimeResourceDropIndex) -> u32 { - assert!(index.as_u32() < self.num_resource_drop); - self.resource_drop_func_refs() + index.as_u32() * u32::from(self.ptr.size_of_vm_func_ref()) + pub fn trampoline_func_ref(&self, index: TrampolineIndex) -> u32 { + assert!(index.as_u32() < self.num_trampolines); + self.trampoline_func_refs() + index.as_u32() * u32::from(self.ptr.size_of_vm_func_ref()) } /// The offset of the `lowerings` field. diff --git a/crates/environ/src/fact/transcode.rs b/crates/environ/src/fact/transcode.rs index 62947cbfab54..06cfccdefeb2 100644 --- a/crates/environ/src/fact/transcode.rs +++ b/crates/environ/src/fact/transcode.rs @@ -17,7 +17,7 @@ pub struct Transcoder { /// Note that each transcoding operation may have a unique signature depending /// on the precise operation. #[allow(missing_docs)] -#[derive(Debug, Copy, Clone, Hash, Eq, PartialEq, Serialize, Deserialize)] +#[derive(Debug, Copy, Clone, Hash, Eq, PartialEq)] pub enum Transcode { Copy(FixedEncoding), Latin1ToUtf16, diff --git a/crates/runtime/src/component.rs b/crates/runtime/src/component.rs index 50b836464965..2b4d41f21a94 100644 --- a/crates/runtime/src/component.rs +++ b/crates/runtime/src/component.rs @@ -313,51 +313,25 @@ impl ComponentInstance { } } - /// Returns the core wasm function pointer corresponding to the lowering - /// index specified. + /// Returns the core wasm `funcref` corresponding to the trampoline + /// specified. /// /// The returned function is suitable to pass directly to a wasm module - /// instantiation and the function is a cranelift-compiled trampoline. + /// instantiation and the function contains cranelift-compiled trampolines. /// /// This can only be called after `idx` has been initialized at runtime /// during the instantiation process of a component. - pub fn lowering_func_ref(&self, idx: LoweredIndex) -> NonNull { - unsafe { self.func_ref(self.offsets.lowering_func_ref(idx)) } - } - - /// Same as `lowering_func_ref` except for the functions that always trap. - pub fn always_trap_func_ref(&self, idx: RuntimeAlwaysTrapIndex) -> NonNull { - unsafe { self.func_ref(self.offsets.always_trap_func_ref(idx)) } - } - - /// Same as `lowering_func_ref` except for the transcoding functions. - pub fn transcoder_func_ref(&self, idx: RuntimeTranscoderIndex) -> NonNull { - unsafe { self.func_ref(self.offsets.transcoder_func_ref(idx)) } - } - - /// Same as `lowering_func_ref` except for the `resource.new` functions. - pub fn resource_new_func_ref(&self, idx: RuntimeResourceNewIndex) -> NonNull { - unsafe { self.func_ref(self.offsets.resource_new_func_ref(idx)) } - } - - /// Same as `lowering_func_ref` except for the `resource.rep` functions. - pub fn resource_rep_func_ref(&self, idx: RuntimeResourceRepIndex) -> NonNull { - unsafe { self.func_ref(self.offsets.resource_rep_func_ref(idx)) } - } - - /// Same as `lowering_func_ref` except for the `resource.drop` functions. - pub fn resource_drop_func_ref(&self, idx: RuntimeResourceDropIndex) -> NonNull { - unsafe { self.func_ref(self.offsets.resource_drop_func_ref(idx)) } - } - - unsafe fn func_ref(&self, offset: u32) -> NonNull { - let ret = self.vmctx_plus_offset::(offset); - debug_assert!( - mem::transmute::>, usize>((*ret).wasm_call) - != INVALID_PTR - ); - debug_assert!((*ret).vmctx as usize != INVALID_PTR); - NonNull::new(ret.cast_mut()).unwrap() + pub fn trampoline_func_ref(&self, idx: TrampolineIndex) -> NonNull { + unsafe { + let offset = self.offsets.trampoline_func_ref(idx); + let ret = self.vmctx_plus_offset::(offset); + debug_assert!( + mem::transmute::>, usize>((*ret).wasm_call) + != INVALID_PTR + ); + debug_assert!((*ret).vmctx as usize != INVALID_PTR); + NonNull::new(ret.cast_mut()).unwrap() + } } /// Stores the runtime memory pointer at the index specified. @@ -399,24 +373,9 @@ impl ComponentInstance { } } - /// Configures a lowered host function with all the pieces necessary. - /// - /// * `idx` - the index that's being configured - /// * `lowering` - the host-related closure information to get invoked when - /// the lowering is called. - /// * `{wasm,native,array}_call` - the cranelift-compiled trampolines which will - /// read the `VMComponentContext` and invoke `lowering` provided. - /// * `type_index` - the signature index for the core wasm type - /// registered within the engine already. - pub fn set_lowering( - &mut self, - idx: LoweredIndex, - lowering: VMLowering, - wasm_call: NonNull, - native_call: NonNull, - array_call: VMArrayCallFunction, - type_index: VMSharedSignatureIndex, - ) { + /// Configures host runtime lowering information associated with imported f + /// functions for the `idx` specified. + pub fn set_lowering(&mut self, idx: LoweredIndex, lowering: VMLowering) { unsafe { debug_assert!( *self.vmctx_plus_offset::(self.offsets.lowering_callee(idx)) == INVALID_PTR @@ -425,135 +384,32 @@ impl ComponentInstance { *self.vmctx_plus_offset::(self.offsets.lowering_data(idx)) == INVALID_PTR ); *self.vmctx_plus_offset_mut(self.offsets.lowering(idx)) = lowering; - self.set_func_ref( - self.offsets.lowering_func_ref(idx), - wasm_call, - native_call, - array_call, - type_index, - ); - } - } - - /// Same as `set_lowering` but for the "always trap" functions. - pub fn set_always_trap( - &mut self, - idx: RuntimeAlwaysTrapIndex, - wasm_call: NonNull, - native_call: NonNull, - array_call: VMArrayCallFunction, - type_index: VMSharedSignatureIndex, - ) { - unsafe { - self.set_func_ref( - self.offsets.always_trap_func_ref(idx), - wasm_call, - native_call, - array_call, - type_index, - ); - } - } - - /// Same as `set_lowering` but for the transcoder functions. - pub fn set_transcoder( - &mut self, - idx: RuntimeTranscoderIndex, - wasm_call: NonNull, - native_call: NonNull, - array_call: VMArrayCallFunction, - type_index: VMSharedSignatureIndex, - ) { - unsafe { - self.set_func_ref( - self.offsets.transcoder_func_ref(idx), - wasm_call, - native_call, - array_call, - type_index, - ); - } - } - - /// Same as `set_lowering` but for the resource.new functions. - pub fn set_resource_new( - &mut self, - idx: RuntimeResourceNewIndex, - wasm_call: NonNull, - native_call: NonNull, - array_call: VMArrayCallFunction, - type_index: VMSharedSignatureIndex, - ) { - unsafe { - self.set_func_ref( - self.offsets.resource_new_func_ref(idx), - wasm_call, - native_call, - array_call, - type_index, - ); - } - } - - /// Same as `set_lowering` but for the resource.rep functions. - pub fn set_resource_rep( - &mut self, - idx: RuntimeResourceRepIndex, - wasm_call: NonNull, - native_call: NonNull, - array_call: VMArrayCallFunction, - type_index: VMSharedSignatureIndex, - ) { - unsafe { - self.set_func_ref( - self.offsets.resource_rep_func_ref(idx), - wasm_call, - native_call, - array_call, - type_index, - ); } } /// Same as `set_lowering` but for the resource.drop functions. - pub fn set_resource_drop( + pub fn set_trampoline( &mut self, - idx: RuntimeResourceDropIndex, + idx: TrampolineIndex, wasm_call: NonNull, native_call: NonNull, array_call: VMArrayCallFunction, type_index: VMSharedSignatureIndex, ) { unsafe { - self.set_func_ref( - self.offsets.resource_drop_func_ref(idx), - wasm_call, + let offset = self.offsets.trampoline_func_ref(idx); + debug_assert!(*self.vmctx_plus_offset::(offset) == INVALID_PTR); + let vmctx = VMOpaqueContext::from_vmcomponent(self.vmctx()); + *self.vmctx_plus_offset_mut(offset) = VMFuncRef { + wasm_call: Some(wasm_call), native_call, array_call, type_index, - ); + vmctx, + }; } } - unsafe fn set_func_ref( - &mut self, - offset: u32, - wasm_call: NonNull, - native_call: NonNull, - array_call: VMArrayCallFunction, - type_index: VMSharedSignatureIndex, - ) { - debug_assert!(*self.vmctx_plus_offset::(offset) == INVALID_PTR); - let vmctx = VMOpaqueContext::from_vmcomponent(self.vmctx()); - *self.vmctx_plus_offset_mut(offset) = VMFuncRef { - wasm_call: Some(wasm_call), - native_call, - array_call, - type_index, - vmctx, - }; - } - /// Configures the destructor for a resource at the `idx` specified. /// /// This is required to be called for each resource as it's defined within a @@ -607,32 +463,10 @@ impl ComponentInstance { *self.vmctx_plus_offset_mut(offset) = INVALID_PTR; let offset = self.offsets.lowering_data(i); *self.vmctx_plus_offset_mut(offset) = INVALID_PTR; - let offset = self.offsets.lowering_func_ref(i); - *self.vmctx_plus_offset_mut(offset) = INVALID_PTR; - } - for i in 0..self.offsets.num_always_trap { - let i = RuntimeAlwaysTrapIndex::from_u32(i); - let offset = self.offsets.always_trap_func_ref(i); - *self.vmctx_plus_offset_mut(offset) = INVALID_PTR; - } - for i in 0..self.offsets.num_transcoders { - let i = RuntimeTranscoderIndex::from_u32(i); - let offset = self.offsets.transcoder_func_ref(i); - *self.vmctx_plus_offset_mut(offset) = INVALID_PTR; - } - for i in 0..self.offsets.num_resource_new { - let i = RuntimeResourceNewIndex::from_u32(i); - let offset = self.offsets.resource_new_func_ref(i); - *self.vmctx_plus_offset_mut(offset) = INVALID_PTR; - } - for i in 0..self.offsets.num_resource_rep { - let i = RuntimeResourceRepIndex::from_u32(i); - let offset = self.offsets.resource_rep_func_ref(i); - *self.vmctx_plus_offset_mut(offset) = INVALID_PTR; } - for i in 0..self.offsets.num_resource_drop { - let i = RuntimeResourceDropIndex::from_u32(i); - let offset = self.offsets.resource_drop_func_ref(i); + for i in 0..self.offsets.num_trampolines { + let i = TrampolineIndex::from_u32(i); + let offset = self.offsets.trampoline_func_ref(i); *self.vmctx_plus_offset_mut(offset) = INVALID_PTR; } for i in 0..self.offsets.num_runtime_memories { @@ -844,114 +678,22 @@ impl OwnedComponentInstance { } /// See `ComponentInstance::set_lowering` - pub fn set_lowering( - &mut self, - idx: LoweredIndex, - lowering: VMLowering, - wasm_call: NonNull, - native_call: NonNull, - array_call: VMArrayCallFunction, - type_index: VMSharedSignatureIndex, - ) { - unsafe { - self.instance_mut().set_lowering( - idx, - lowering, - wasm_call, - native_call, - array_call, - type_index, - ) - } - } - - /// See `ComponentInstance::set_always_trap` - pub fn set_always_trap( - &mut self, - idx: RuntimeAlwaysTrapIndex, - wasm_call: NonNull, - native_call: NonNull, - array_call: VMArrayCallFunction, - type_index: VMSharedSignatureIndex, - ) { - unsafe { - self.instance_mut() - .set_always_trap(idx, wasm_call, native_call, array_call, type_index) - } - } - - /// See `ComponentInstance::set_transcoder` - pub fn set_transcoder( - &mut self, - idx: RuntimeTranscoderIndex, - wasm_call: NonNull, - native_call: NonNull, - array_call: VMArrayCallFunction, - type_index: VMSharedSignatureIndex, - ) { - unsafe { - self.instance_mut() - .set_transcoder(idx, wasm_call, native_call, array_call, type_index) - } - } - - /// See `ComponentInstance::set_resource_new` - pub fn set_resource_new( - &mut self, - idx: RuntimeResourceNewIndex, - wasm_call: NonNull, - native_call: NonNull, - array_call: VMArrayCallFunction, - type_index: VMSharedSignatureIndex, - ) { - unsafe { - self.instance_mut().set_resource_new( - idx, - wasm_call, - native_call, - array_call, - type_index, - ) - } - } - - /// See `ComponentInstance::set_resource_rep` - pub fn set_resource_rep( - &mut self, - idx: RuntimeResourceRepIndex, - wasm_call: NonNull, - native_call: NonNull, - array_call: VMArrayCallFunction, - type_index: VMSharedSignatureIndex, - ) { - unsafe { - self.instance_mut().set_resource_rep( - idx, - wasm_call, - native_call, - array_call, - type_index, - ) - } + pub fn set_lowering(&mut self, idx: LoweredIndex, lowering: VMLowering) { + unsafe { self.instance_mut().set_lowering(idx, lowering) } } /// See `ComponentInstance::set_resource_drop` - pub fn set_resource_drop( + pub fn set_trampoline( &mut self, - idx: RuntimeResourceDropIndex, + idx: TrampolineIndex, wasm_call: NonNull, native_call: NonNull, array_call: VMArrayCallFunction, type_index: VMSharedSignatureIndex, ) { unsafe { - self.instance_mut().set_resource_drop( - idx, - wasm_call, - native_call, - array_call, - type_index, - ) + self.instance_mut() + .set_trampoline(idx, wasm_call, native_call, array_call, type_index) } } diff --git a/crates/wasmtime/src/compiler.rs b/crates/wasmtime/src/compiler.rs index 24fadd4cc996..de0f693847b0 100644 --- a/crates/wasmtime/src/compiler.rs +++ b/crates/wasmtime/src/compiler.rs @@ -46,7 +46,7 @@ struct CompileKey { } impl CompileKey { - const KIND_BITS: u32 = 4; + const KIND_BITS: u32 = 3; const KIND_OFFSET: u32 = 32 - Self::KIND_BITS; const KIND_MASK: u32 = ((1 << Self::KIND_BITS) - 1) << Self::KIND_OFFSET; @@ -104,52 +104,12 @@ impl CompileKey { #[cfg(feature = "component-model")] impl CompileKey { - const LOWERING_KIND: u32 = Self::new_kind(4); - const ALWAYS_TRAP_KIND: u32 = Self::new_kind(5); - const TRANSCODER_KIND: u32 = Self::new_kind(6); - const RESOURCE_NEW_KIND: u32 = Self::new_kind(7); - const RESOURCE_REP_KIND: u32 = Self::new_kind(8); - const RESOURCE_DROP_KIND: u32 = Self::new_kind(9); - const RESOURCE_DROP_WASM_TO_NATIVE_KIND: u32 = Self::new_kind(10); - - fn lowering(index: wasmtime_environ::component::LoweredIndex) -> Self { - Self { - namespace: Self::LOWERING_KIND, - index: index.as_u32(), - } - } - - fn always_trap(index: wasmtime_environ::component::RuntimeAlwaysTrapIndex) -> Self { - Self { - namespace: Self::ALWAYS_TRAP_KIND, - index: index.as_u32(), - } - } - - fn transcoder(index: wasmtime_environ::component::RuntimeTranscoderIndex) -> Self { - Self { - namespace: Self::TRANSCODER_KIND, - index: index.as_u32(), - } - } + const TRAMPOLINE_KIND: u32 = Self::new_kind(4); + const RESOURCE_DROP_WASM_TO_NATIVE_KIND: u32 = Self::new_kind(5); - fn resource_new(index: wasmtime_environ::component::RuntimeResourceNewIndex) -> Self { + fn trampoline(index: wasmtime_environ::component::TrampolineIndex) -> Self { Self { - namespace: Self::RESOURCE_NEW_KIND, - index: index.as_u32(), - } - } - - fn resource_rep(index: wasmtime_environ::component::RuntimeResourceRepIndex) -> Self { - Self { - namespace: Self::RESOURCE_REP_KIND, - index: index.as_u32(), - } - } - - fn resource_drop(index: wasmtime_environ::component::RuntimeResourceDropIndex) -> Self { - Self { - namespace: Self::RESOURCE_DROP_KIND, + namespace: Self::TRAMPOLINE_KIND, index: index.as_u32(), } } @@ -233,7 +193,7 @@ impl<'a> CompileInputs<'a> { #[cfg(feature = "component-model")] pub fn for_component( types: &'a wasmtime_environ::component::ComponentTypes, - component: &'a wasmtime_environ::component::Component, + component: &'a wasmtime_environ::component::ComponentTranslation, module_translations: impl IntoIterator< Item = ( StaticModuleIndex, @@ -246,111 +206,27 @@ impl<'a> CompileInputs<'a> { ret.collect_inputs_in_translations(types.module_types(), module_translations); - for init in &component.initializers { - match init { - wasmtime_environ::component::GlobalInitializer::AlwaysTrap(always_trap) => { - ret.push_input(move |_tunables, compiler| { - Ok(CompileOutput { - key: CompileKey::always_trap(always_trap.index), - symbol: always_trap.symbol_name(), - function: compiler - .component_compiler() - .compile_always_trap(&types[always_trap.canonical_abi])? - .into(), - info: None, - }) - }); - } - wasmtime_environ::component::GlobalInitializer::Transcoder(transcoder) => { - ret.push_input(move |_tunables, compiler| { - Ok(CompileOutput { - key: CompileKey::transcoder(transcoder.index), - symbol: transcoder.symbol_name(), - function: compiler - .component_compiler() - .compile_transcoder(component, transcoder, types)? - .into(), - info: None, - }) - }); - } - wasmtime_environ::component::GlobalInitializer::LowerImport(lower_import) => { - ret.push_input(move |_tunables, compiler| { - Ok(CompileOutput { - key: CompileKey::lowering(lower_import.index), - symbol: lower_import.symbol_name(), - function: compiler - .component_compiler() - .compile_lowered_trampoline(component, lower_import, types)? - .into(), - info: None, - }) - }); - } - - wasmtime_environ::component::GlobalInitializer::ResourceNew(r) => { - ret.push_input(move |_tunables, compiler| { - Ok(CompileOutput { - key: CompileKey::resource_new(r.index), - symbol: r.symbol_name(), - function: compiler - .component_compiler() - .compile_resource_new(component, r, types)? - .into(), - info: None, - }) - }); - } - wasmtime_environ::component::GlobalInitializer::ResourceRep(r) => { - ret.push_input(move |_tunables, compiler| { - Ok(CompileOutput { - key: CompileKey::resource_rep(r.index), - symbol: r.symbol_name(), - function: compiler - .component_compiler() - .compile_resource_rep(component, r, types)? - .into(), - info: None, - }) - }); - } - wasmtime_environ::component::GlobalInitializer::ResourceDrop(r) => { - ret.push_input(move |_tunables, compiler| { - Ok(CompileOutput { - key: CompileKey::resource_drop(r.index), - symbol: r.symbol_name(), - function: compiler - .component_compiler() - .compile_resource_drop(component, r, types)? - .into(), - info: None, - }) - }); - } - - wasmtime_environ::component::GlobalInitializer::Resource(_) - | wasmtime_environ::component::GlobalInitializer::InstantiateModule(_) - | wasmtime_environ::component::GlobalInitializer::ExtractMemory(_) - | wasmtime_environ::component::GlobalInitializer::ExtractRealloc(_) - | wasmtime_environ::component::GlobalInitializer::ExtractPostReturn(_) => { - // Nothing to compile for these. - } - } + for (idx, trampoline) in component.trampolines.iter() { + ret.push_input(move |_tunables, compiler| { + Ok(CompileOutput { + key: CompileKey::trampoline(idx), + symbol: trampoline.symbol_name(), + function: compiler + .component_compiler() + .compile_trampoline(component, types, idx)? + .into(), + info: None, + }) + }); } - // If a host-defined resource is destroyed from wasm then a - // wasm-to-native trampoline will be required when creating the - // `VMFuncRef` for the host resource's destructor. This snippet is an - // overeager approximation of this where if a component has any - // resources and the signature of `resource.drop` is mentioned anywhere - // in the component then assume this situation is going to happen. - // - // To handle this a wasm-to-native trampoline for the signature is - // generated here. Note that this may duplicate one wasm-to-native - // trampoline as it may already exist for the signature elsewhere in the - // file. Doing this here though helps simplify this compilation process - // so it's an accepted overhead for now. - if component.num_resources > 0 { + // If there are any resources defined within this component, the + // signature for `resource.drop` is mentioned somewhere, and the + // wasm-to-native trampoline for `resource.drop` hasn't been created yet + // then insert that here. This is possibly required by destruction of + // resources from the embedder and otherwise won't be explicitly + // requested through initializers above or such. + if component.component.num_resources > 0 { if let Some(sig) = types.find_resource_drop_signature() { ret.push_input(move |_tunables, compiler| { let trampoline = compiler.compile_wasm_to_native_trampoline(&types[sig])?; @@ -748,44 +624,9 @@ impl FunctionIndices { #[cfg(feature = "component-model")] { - artifacts.lowerings = self - .indices - .remove(&CompileKey::LOWERING_KIND) - .unwrap_or_default() - .into_iter() - .map(|(_id, x)| x.unwrap_all_call_func().map(|i| symbol_ids_and_locs[i].1)) - .collect(); - artifacts.transcoders = self - .indices - .remove(&CompileKey::TRANSCODER_KIND) - .unwrap_or_default() - .into_iter() - .map(|(_id, x)| x.unwrap_all_call_func().map(|i| symbol_ids_and_locs[i].1)) - .collect(); - artifacts.always_traps = self - .indices - .remove(&CompileKey::ALWAYS_TRAP_KIND) - .unwrap_or_default() - .into_iter() - .map(|(_id, x)| x.unwrap_all_call_func().map(|i| symbol_ids_and_locs[i].1)) - .collect(); - artifacts.resource_new = self - .indices - .remove(&CompileKey::RESOURCE_NEW_KIND) - .unwrap_or_default() - .into_iter() - .map(|(_id, x)| x.unwrap_all_call_func().map(|i| symbol_ids_and_locs[i].1)) - .collect(); - artifacts.resource_rep = self + artifacts.trampolines = self .indices - .remove(&CompileKey::RESOURCE_REP_KIND) - .unwrap_or_default() - .into_iter() - .map(|(_id, x)| x.unwrap_all_call_func().map(|i| symbol_ids_and_locs[i].1)) - .collect(); - artifacts.resource_drop = self - .indices - .remove(&CompileKey::RESOURCE_DROP_KIND) + .remove(&CompileKey::TRAMPOLINE_KIND) .unwrap_or_default() .into_iter() .map(|(_id, x)| x.unwrap_all_call_func().map(|i| symbol_ids_and_locs[i].1)) @@ -816,33 +657,8 @@ impl FunctionIndices { pub struct Artifacts { pub modules: PrimaryMap, #[cfg(feature = "component-model")] - pub lowerings: PrimaryMap< - wasmtime_environ::component::LoweredIndex, - wasmtime_environ::component::AllCallFunc, - >, - #[cfg(feature = "component-model")] - pub always_traps: PrimaryMap< - wasmtime_environ::component::RuntimeAlwaysTrapIndex, - wasmtime_environ::component::AllCallFunc, - >, - #[cfg(feature = "component-model")] - pub transcoders: PrimaryMap< - wasmtime_environ::component::RuntimeTranscoderIndex, - wasmtime_environ::component::AllCallFunc, - >, - #[cfg(feature = "component-model")] - pub resource_new: PrimaryMap< - wasmtime_environ::component::RuntimeResourceNewIndex, - wasmtime_environ::component::AllCallFunc, - >, - #[cfg(feature = "component-model")] - pub resource_rep: PrimaryMap< - wasmtime_environ::component::RuntimeResourceRepIndex, - wasmtime_environ::component::AllCallFunc, - >, - #[cfg(feature = "component-model")] - pub resource_drop: PrimaryMap< - wasmtime_environ::component::RuntimeResourceDropIndex, + pub trampolines: PrimaryMap< + wasmtime_environ::component::TrampolineIndex, wasmtime_environ::component::AllCallFunc, >, #[cfg(feature = "component-model")] @@ -855,14 +671,7 @@ impl Artifacts { pub fn unwrap_as_module_info(self) -> CompiledModuleInfo { assert_eq!(self.modules.len(), 1); #[cfg(feature = "component-model")] - { - assert!(self.lowerings.is_empty()); - assert!(self.always_traps.is_empty()); - assert!(self.transcoders.is_empty()); - assert!(self.resource_new.is_empty()); - assert!(self.resource_rep.is_empty()); - assert!(self.resource_drop.is_empty()); - } + assert!(self.trampolines.is_empty()); self.modules.into_iter().next().unwrap().1 } } diff --git a/crates/wasmtime/src/component/component.rs b/crates/wasmtime/src/component/component.rs index 7665ac8614a1..81548bcfcc53 100644 --- a/crates/wasmtime/src/component/component.rs +++ b/crates/wasmtime/src/component/component.rs @@ -9,9 +9,7 @@ use std::path::Path; use std::ptr::NonNull; use std::sync::Arc; use wasmtime_environ::component::{ - AllCallFunc, ComponentTypes, LoweredIndex, RuntimeAlwaysTrapIndex, RuntimeResourceDropIndex, - RuntimeResourceNewIndex, RuntimeResourceRepIndex, RuntimeTranscoderIndex, StaticModuleIndex, - Translator, + AllCallFunc, ComponentTypes, StaticModuleIndex, TrampolineIndex, Translator, }; use wasmtime_environ::{FunctionLoc, ObjectKind, PrimaryMap, ScopeVec}; use wasmtime_jit::{CodeMemory, CompiledModuleInfo}; @@ -61,30 +59,7 @@ struct CompiledComponentInfo { /// /// function pointers that end up in a `VMFuncRef` for each /// lowering. - lowerings: PrimaryMap>, - - /// Where the "always trap" functions are located within the `text` section - /// of `code_memory`. - /// - /// These functions are "degenerate functions" here solely to implement - /// functions that are `canon lift`'d then immediately `canon lower`'d. The - /// `u32` value here is the offset of the trap instruction from the start fo - /// the function. - always_trap: PrimaryMap>, - - /// Where all the cranelift-generated transcode functions are located in the - /// compiled image of this component. - transcoders: PrimaryMap>, - - /// Locations of cranelift-generated `resource.new` functions are located - /// within the component. - resource_new: PrimaryMap>, - - /// Same as `resource_new`, but for `resource.rep` intrinsics. - resource_rep: PrimaryMap>, - - /// Same as `resource_new`, but for `resource.drop` intrinsics. - resource_drop: PrimaryMap>, + trampolines: PrimaryMap>, /// The location of the wasm-to-native trampoline for the `resource.drop` /// intrinsic. @@ -237,13 +212,8 @@ impl Component { )?; let info = CompiledComponentInfo { - component, - always_trap: compilation_artifacts.always_traps, - lowerings: compilation_artifacts.lowerings, - transcoders: compilation_artifacts.transcoders, - resource_new: compilation_artifacts.resource_new, - resource_rep: compilation_artifacts.resource_rep, - resource_drop: compilation_artifacts.resource_drop, + component: component.component, + trampolines: compilation_artifacts.trampolines, resource_drop_wasm_to_native_trampoline: compilation_artifacts .resource_drop_wasm_to_native_trampoline, }; @@ -324,12 +294,12 @@ impl Component { self.inner.code.code_memory().text() } - fn all_call_func_ptrs(&self, func: &AllCallFunc) -> AllCallFuncPointers { + pub(crate) fn trampoline_ptrs(&self, index: TrampolineIndex) -> AllCallFuncPointers { let AllCallFunc { wasm_call, array_call, native_call, - } = func; + } = &self.inner.info.trampolines[index]; AllCallFuncPointers { wasm_call: self.func(wasm_call).cast(), array_call: unsafe { @@ -341,33 +311,6 @@ impl Component { } } - pub(crate) fn lowering_ptrs(&self, index: LoweredIndex) -> AllCallFuncPointers { - self.all_call_func_ptrs(&self.inner.info.lowerings[index]) - } - - pub(crate) fn always_trap_ptrs(&self, index: RuntimeAlwaysTrapIndex) -> AllCallFuncPointers { - self.all_call_func_ptrs(&self.inner.info.always_trap[index]) - } - - pub(crate) fn transcoder_ptrs(&self, index: RuntimeTranscoderIndex) -> AllCallFuncPointers { - self.all_call_func_ptrs(&self.inner.info.transcoders[index]) - } - - pub(crate) fn resource_new_ptrs(&self, index: RuntimeResourceNewIndex) -> AllCallFuncPointers { - self.all_call_func_ptrs(&self.inner.info.resource_new[index]) - } - - pub(crate) fn resource_rep_ptrs(&self, index: RuntimeResourceRepIndex) -> AllCallFuncPointers { - self.all_call_func_ptrs(&self.inner.info.resource_rep[index]) - } - - pub(crate) fn resource_drop_ptrs( - &self, - index: RuntimeResourceDropIndex, - ) -> AllCallFuncPointers { - self.all_call_func_ptrs(&self.inner.info.resource_drop[index]) - } - fn func(&self, loc: &FunctionLoc) -> NonNull { let text = self.text(); let trampoline = &text[loc.start as usize..][..loc.length as usize]; diff --git a/crates/wasmtime/src/component/instance.rs b/crates/wasmtime/src/component/instance.rs index eaf6065cc41d..e824539ff191 100644 --- a/crates/wasmtime/src/component/instance.rs +++ b/crates/wasmtime/src/component/instance.rs @@ -1,4 +1,3 @@ -use super::component::AllCallFuncPointers; use crate::component::func::HostFunc; use crate::component::matching::InstanceType; use crate::component::{Component, ComponentNamedList, Func, Lift, Lower, ResourceType, TypedFunc}; @@ -12,12 +11,9 @@ use std::marker; use std::ptr::NonNull; use std::sync::Arc; use wasmtime_environ::component::*; -use wasmtime_environ::{EntityIndex, EntityType, Global, PrimaryMap, SignatureIndex, WasmType}; +use wasmtime_environ::{EntityIndex, EntityType, Global, PrimaryMap, WasmType}; use wasmtime_runtime::component::{ComponentInstance, OwnedComponentInstance}; -use wasmtime_runtime::{ - VMArrayCallFunction, VMFuncRef, VMNativeCallFunction, VMSharedSignatureIndex, - VMWasmCallFunction, -}; +use wasmtime_runtime::VMFuncRef; /// An instantiated component. /// @@ -153,14 +149,9 @@ impl InstanceData { pub fn lookup_def(&self, store: &mut StoreOpaque, def: &CoreDef) -> wasmtime_runtime::Export { match def { CoreDef::Export(e) => self.lookup_export(store, e), - CoreDef::Lowered(idx) => { + CoreDef::Trampoline(idx) => { wasmtime_runtime::Export::Function(wasmtime_runtime::ExportFunction { - func_ref: self.state.lowering_func_ref(*idx), - }) - } - CoreDef::AlwaysTrap(idx) => { - wasmtime_runtime::Export::Function(wasmtime_runtime::ExportFunction { - func_ref: self.state.always_trap_func_ref(*idx), + func_ref: self.state.trampoline_func_ref(*idx), }) } CoreDef::InstanceFlags(idx) => { @@ -172,26 +163,6 @@ impl InstanceData { }, }) } - CoreDef::Transcoder(idx) => { - wasmtime_runtime::Export::Function(wasmtime_runtime::ExportFunction { - func_ref: self.state.transcoder_func_ref(*idx), - }) - } - CoreDef::ResourceNew(idx) => { - wasmtime_runtime::Export::Function(wasmtime_runtime::ExportFunction { - func_ref: self.state.resource_new_func_ref(*idx), - }) - } - CoreDef::ResourceRep(idx) => { - wasmtime_runtime::Export::Function(wasmtime_runtime::ExportFunction { - func_ref: self.state.resource_rep_func_ref(*idx), - }) - } - CoreDef::ResourceDrop(idx) => { - wasmtime_runtime::Export::Function(wasmtime_runtime::ExportFunction { - func_ref: self.state.resource_drop_func_ref(*idx), - }) - } } } @@ -330,6 +301,26 @@ impl<'a> Instantiator<'a> { self.data.state.set_resource_destructor(idx, Some(func_ref)); } + // Next configure all `VMFuncRef`s for trampolines that this component + // will require. These functions won't actually get used until their + // associated state has been initialized through the global initializers + // below, but the funcrefs can all be configured here. + for (idx, sig) in env_component.trampolines.iter() { + let ptrs = self.component.trampoline_ptrs(idx); + let signature = self + .component + .signatures() + .shared_signature(*sig) + .expect("found unregistered signature"); + self.data.state.set_trampoline( + idx, + ptrs.wasm_call, + ptrs.native_call, + ptrs.array_call, + signature, + ); + } + for initializer in env_component.initializers.iter() { match initializer { GlobalInitializer::InstantiateModule(m) => { @@ -377,9 +368,13 @@ impl<'a> Instantiator<'a> { self.data.instances.push(i); } - GlobalInitializer::LowerImport(import) => self.lower_import(import), - - GlobalInitializer::AlwaysTrap(trap) => self.always_trap(trap), + GlobalInitializer::LowerImport { import, index } => { + let func = match &self.imports[*import] { + RuntimeImport::Func(func) => func, + _ => unreachable!(), + }; + self.data.state.set_lowering(*index, func.lowering()); + } GlobalInitializer::ExtractMemory(mem) => self.extract_memory(store.0, mem), @@ -391,94 +386,12 @@ impl<'a> Instantiator<'a> { self.extract_post_return(store.0, post_return) } - GlobalInitializer::Transcoder(e) => self.transcoder(e), - GlobalInitializer::Resource(r) => self.resource(store.0, r), - GlobalInitializer::ResourceNew(r) => self.resource_new(r), - GlobalInitializer::ResourceRep(r) => self.resource_rep(r), - GlobalInitializer::ResourceDrop(r) => self.resource_drop(r), } } Ok(()) } - fn lower_import(&mut self, import: &LowerImport) { - let func = match &self.imports[import.import] { - RuntimeImport::Func(func) => func, - _ => unreachable!(), - }; - let AllCallFuncPointers { - wasm_call, - array_call, - native_call, - } = self.component.lowering_ptrs(import.index); - let type_index = self - .component - .signatures() - .shared_signature(import.canonical_abi) - .expect("found unregistered signature"); - self.data.state.set_lowering( - import.index, - func.lowering(), - wasm_call, - native_call, - array_call, - type_index, - ); - } - - fn set_funcref( - &mut self, - index: T, - signature: SignatureIndex, - func_ptrs: impl FnOnce(&Component, T) -> AllCallFuncPointers, - set_funcref: impl FnOnce( - &mut OwnedComponentInstance, - T, - NonNull, - NonNull, - VMArrayCallFunction, - VMSharedSignatureIndex, - ), - ) { - let AllCallFuncPointers { - wasm_call, - array_call, - native_call, - } = func_ptrs(&self.component, index); - let signature = self - .component - .signatures() - .shared_signature(signature) - .expect("found unregistered signature"); - set_funcref( - &mut self.data.state, - index, - wasm_call, - native_call, - array_call, - signature, - ) - } - - fn always_trap(&mut self, trap: &AlwaysTrap) { - self.set_funcref( - trap.index, - trap.canonical_abi, - Component::always_trap_ptrs, - OwnedComponentInstance::set_always_trap, - ) - } - - fn transcoder(&mut self, transcoder: &Transcoder) { - self.set_funcref( - transcoder.index, - transcoder.signature, - Component::transcoder_ptrs, - OwnedComponentInstance::set_transcoder, - ) - } - fn resource(&mut self, store: &mut StoreOpaque, resource: &Resource) { let dtor = resource .dtor @@ -498,33 +411,6 @@ impl<'a> Instantiator<'a> { debug_assert_eq!(i, index); } - fn resource_new(&mut self, resource: &ResourceNew) { - self.set_funcref( - resource.index, - resource.signature, - Component::resource_new_ptrs, - OwnedComponentInstance::set_resource_new, - ) - } - - fn resource_rep(&mut self, resource: &ResourceRep) { - self.set_funcref( - resource.index, - resource.signature, - Component::resource_rep_ptrs, - OwnedComponentInstance::set_resource_rep, - ) - } - - fn resource_drop(&mut self, resource: &ResourceDrop) { - self.set_funcref( - resource.index, - resource.signature, - Component::resource_drop_ptrs, - OwnedComponentInstance::set_resource_drop, - ) - } - fn extract_memory(&mut self, store: &mut StoreOpaque, memory: &ExtractMemory) { let mem = match self.data.lookup_export(store, &memory.export) { wasmtime_runtime::Export::Memory(m) => m,