use crate::{
abi::{array_sig, native_sig, wasm_sig, ABIOperand, ABIParams, ABISig, RetArea, ABI},
codegen::ptr_type_from_ptr_size,
isa::CallingConvention,
masm::{CalleeKind, MacroAssembler, OperandSize, RegImm, SPOffset, MAX_CONTEXT_ARGS},
reg::Reg,
};
use anyhow::{anyhow, Result};
use smallvec::SmallVec;
use std::mem;
use wasmtime_environ::{FuncIndex, PtrSize, WasmFuncType, WasmValType};
pub enum TrampolineKind {
ArrayToWasm(FuncIndex),
NativeToWasm(FuncIndex),
WasmToNative,
}
const VALUE_SIZE: usize = mem::size_of::<u128>();
pub(crate) struct Trampoline<'a, M>
where
M: MacroAssembler,
{
masm: &'a mut M,
scratch_reg: Reg,
alloc_scratch_reg: Reg,
callee_saved_regs: SmallVec<[(Reg, OperandSize); 18]>,
call_conv: &'a CallingConvention,
pointer_size: M::Ptr,
pointer_type: WasmValType,
}
impl<'a, M> Trampoline<'a, M>
where
M: MacroAssembler,
{
pub fn new(
masm: &'a mut M,
scratch_reg: Reg,
alloc_scratch_reg: Reg,
call_conv: &'a CallingConvention,
pointer_size: M::Ptr,
) -> Self {
let size = pointer_size.size();
Self {
masm,
scratch_reg,
alloc_scratch_reg,
callee_saved_regs: <M::ABI as ABI>::callee_saved_regs(call_conv),
call_conv,
pointer_size,
pointer_type: ptr_type_from_ptr_size(size),
}
}
pub fn emit_array_to_wasm(mut self, ty: &WasmFuncType, callee_index: FuncIndex) -> Result<()> {
let array_sig = array_sig::<M::ABI>(&self.call_conv);
let wasm_sig: ABISig = wasm_sig::<M::ABI>(&ty);
let val_ptr = array_sig
.params
.get(2)
.map(|operand| RegImm::reg(operand.unwrap_reg()))
.ok_or_else(|| anyhow!("Expected value pointer to be in a register"))?;
let (vmctx, caller_vmctx) = Self::callee_and_caller_vmctx(&array_sig.params)?;
let (dst_callee_vmctx, dst_caller_vmctx) = Self::callee_and_caller_vmctx(&wasm_sig.params)?;
self.masm.prologue(caller_vmctx, &self.callee_saved_regs);
self.masm
.mov(vmctx.into(), dst_callee_vmctx, self.pointer_type.into());
self.masm.mov(
caller_vmctx.into(),
dst_caller_vmctx,
self.pointer_type.into(),
);
let ret_area = self.make_ret_area(&wasm_sig);
let vmctx_runtime_limits_addr = self.vmctx_runtime_limits_addr(vmctx);
let (offsets, spill_size) = self.spill(&array_sig.params()[2..]);
let allocated_stack = self.masm.call(wasm_sig.params_stack_size(), |masm| {
Self::save_last_wasm_entry_sp(
masm,
vmctx_runtime_limits_addr,
self.scratch_reg,
&self.pointer_size,
);
masm.mov(val_ptr, self.scratch_reg.into(), OperandSize::S64);
Self::load_values_from_array(
masm,
&wasm_sig,
ret_area.as_ref(),
self.scratch_reg,
self.alloc_scratch_reg,
);
CalleeKind::Direct(callee_index.as_u32())
});
self.masm.free_stack(allocated_stack);
let val_ptr_offset = offsets[0];
self.masm
.load_ptr(self.masm.address_from_sp(val_ptr_offset), self.scratch_reg);
self.store_results_to_array(&wasm_sig, ret_area.as_ref());
if wasm_sig.has_stack_results() {
self.masm.free_stack(wasm_sig.results.size());
}
self.masm.free_stack(spill_size);
self.masm.epilogue(&self.callee_saved_regs);
Ok(())
}
fn store_results_to_array(&mut self, sig: &ABISig, ret_area: Option<&RetArea>) {
for (i, operand) in sig.results().iter().enumerate() {
let value_offset = (i * VALUE_SIZE) as u32;
match operand {
ABIOperand::Reg { ty, reg, .. } => self.masm.store(
(*reg).into(),
self.masm.address_at_reg(self.scratch_reg, value_offset),
(*ty).into(),
),
ABIOperand::Stack { ty, offset, .. } => {
let addr = match ret_area.unwrap() {
RetArea::SP(sp_offset) => {
let elem_offs = SPOffset::from_u32(sp_offset.as_u32() - offset);
self.masm.address_from_sp(elem_offs)
}
_ => unreachable!(),
};
let size: OperandSize = (*ty).into();
self.masm.load(addr, self.alloc_scratch_reg, size);
self.masm.store(
self.alloc_scratch_reg.into(),
self.masm.address_at_reg(self.scratch_reg, value_offset),
(*ty).into(),
);
}
}
}
}
pub fn emit_native_to_wasm(mut self, ty: &WasmFuncType, callee_index: FuncIndex) -> Result<()> {
let native_sig = native_sig::<M::ABI>(&ty, &self.call_conv);
let wasm_sig = wasm_sig::<M::ABI>(&ty);
let (vmctx, caller_vmctx) = Self::callee_and_caller_vmctx(&native_sig.params)?;
self.masm.prologue(caller_vmctx, &self.callee_saved_regs);
let vmctx_runtime_limits_addr = self.vmctx_runtime_limits_addr(vmctx);
let ret_area = self.make_ret_area(&wasm_sig);
let (offsets, spill_size) = self.spill(native_sig.params());
let reserved_stack = self.masm.call(wasm_sig.params_stack_size(), |masm| {
Self::save_last_wasm_entry_sp(
masm,
vmctx_runtime_limits_addr,
self.scratch_reg,
&self.pointer_size,
);
Self::assign_args(
masm,
&wasm_sig.params_without_retptr(),
&native_sig.params_without_retptr(),
&offsets,
self.scratch_reg,
);
Self::load_retptr(masm, ret_area.as_ref(), &wasm_sig);
CalleeKind::Direct(callee_index.as_u32())
});
self.masm.free_stack(reserved_stack);
self.forward_results(&wasm_sig, &native_sig, ret_area.as_ref(), offsets.last());
if wasm_sig.has_stack_results() {
self.masm.free_stack(wasm_sig.results.size());
}
self.masm.free_stack(spill_size);
self.masm.epilogue(&self.callee_saved_regs);
Ok(())
}
fn make_ret_area(&mut self, sig: &ABISig) -> Option<RetArea> {
sig.has_stack_results().then(|| {
self.masm.reserve_stack(sig.results.size());
let offs = self.masm.sp_offset();
RetArea::sp(offs)
})
}
fn load_retptr(masm: &mut M, ret_area: Option<&RetArea>, callee: &ABISig) {
if let Some(area) = ret_area {
match (area, callee.params.unwrap_results_area_operand()) {
(RetArea::SP(sp_offset), ABIOperand::Reg { ty, reg, .. }) => {
let addr = masm.address_from_sp(*sp_offset);
masm.load_addr(addr, *reg, (*ty).into());
}
(RetArea::SP(sp_offset), ABIOperand::Stack { ty, offset, .. }) => {
let retptr = masm.address_from_sp(*sp_offset);
let scratch = <M::ABI as ABI>::scratch_reg();
masm.load_addr(retptr, scratch, (*ty).into());
let retptr_slot = masm.address_from_sp(SPOffset::from_u32(*offset));
masm.store(scratch.into(), retptr_slot, (*ty).into());
}
_ => unreachable!(),
}
}
}
fn forward_results(
&mut self,
callee_sig: &ABISig,
caller_sig: &ABISig,
callee_ret_area: Option<&RetArea>,
caller_retptr_offset: Option<&SPOffset>,
) {
let results_spill = self.spill(callee_sig.results());
let mut spill_offsets_iter = results_spill.0.iter();
let caller_retptr = caller_sig.has_stack_results().then(|| {
let fp = <M::ABI as ABI>::fp_reg();
let arg_base: u32 = <M::ABI as ABI>::arg_base_offset().into();
match caller_sig.params.unwrap_results_area_operand() {
ABIOperand::Reg { ty, .. } => {
let addr = self.masm.address_from_sp(*caller_retptr_offset.unwrap());
let size: OperandSize = (*ty).into();
self.masm.load(addr, self.scratch_reg, size);
self.scratch_reg
}
ABIOperand::Stack { ty, offset, .. } => {
let size: OperandSize = (*ty).into();
let addr = self.masm.address_at_reg(fp, arg_base + offset);
self.masm.load(addr, self.scratch_reg, size);
self.scratch_reg
}
}
});
for (callee_operand, caller_operand) in
callee_sig.results().iter().zip(caller_sig.results())
{
match (callee_operand, caller_operand) {
(ABIOperand::Reg { ty, .. }, ABIOperand::Stack { offset, .. }) => {
let reg_offset = spill_offsets_iter.next().unwrap();
let size: OperandSize = (*ty).into();
self.masm.load(
self.masm.address_from_sp(*reg_offset),
self.alloc_scratch_reg,
size,
);
self.masm.store(
self.alloc_scratch_reg.into(),
self.masm.address_at_reg(caller_retptr.unwrap(), *offset),
(*ty).into(),
);
}
(
ABIOperand::Stack { ty, offset, .. },
ABIOperand::Stack {
offset: caller_offset,
..
},
) => {
let addr = {
let base = callee_ret_area.unwrap().unwrap_sp();
let slot_offset = base.as_u32() - *offset;
self.masm.address_from_sp(SPOffset::from_u32(slot_offset))
};
let size: OperandSize = (*ty).into();
self.masm.load(addr, self.alloc_scratch_reg, size);
self.masm.store(
self.alloc_scratch_reg.into(),
self.masm
.address_at_reg(caller_retptr.unwrap(), *caller_offset),
(*ty).into(),
);
}
(ABIOperand::Stack { ty, offset, .. }, ABIOperand::Reg { reg, .. }) => {
let addr = {
let base = callee_ret_area.unwrap().unwrap_sp();
let slot_offset = base.as_u32() - *offset;
self.masm.address_from_sp(SPOffset::from_u32(slot_offset))
};
self.masm.load(addr, *reg, (*ty).into());
}
(ABIOperand::Reg { ty, .. }, ABIOperand::Reg { reg: dst, .. }) => {
let spill_offset = spill_offsets_iter.next().unwrap();
self.masm
.load(self.masm.address_from_sp(*spill_offset), *dst, (*ty).into());
}
}
}
self.masm.free_stack(results_spill.1);
}
pub fn emit_wasm_to_native(mut self, ty: &WasmFuncType) -> Result<()> {
let wasm_sig = wasm_sig::<M::ABI>(&ty);
let native_sig = native_sig::<M::ABI>(ty, &self.call_conv);
let (vmctx, caller_vmctx) = Self::callee_and_caller_vmctx(&wasm_sig.params).unwrap();
let vmctx_runtime_limits_addr = self.vmctx_runtime_limits_addr(caller_vmctx);
self.masm.prologue(caller_vmctx, &[]);
Self::save_last_wasm_exit_fp_and_pc(
self.masm,
vmctx_runtime_limits_addr,
self.scratch_reg,
self.alloc_scratch_reg,
&self.pointer_size,
);
let ret_area = self.make_ret_area(&native_sig);
let (offsets, spill_size) = self.spill(wasm_sig.params());
let reserved_stack = self.masm.call(native_sig.params_stack_size(), |masm| {
masm.mov(
vmctx.into(),
self.alloc_scratch_reg.into(),
OperandSize::S64,
);
Self::assign_args(
masm,
&native_sig.params_without_retptr(),
&wasm_sig.params_without_retptr(),
&offsets,
self.scratch_reg,
);
Self::load_retptr(masm, ret_area.as_ref(), &native_sig);
let body_offset = self.pointer_size.vmnative_call_host_func_context_func_ref()
+ self.pointer_size.vm_func_ref_native_call();
let callee_addr = masm.address_at_reg(self.alloc_scratch_reg, body_offset.into());
masm.load_ptr(callee_addr, self.scratch_reg);
CalleeKind::Indirect(self.scratch_reg)
});
self.masm.free_stack(reserved_stack);
self.forward_results(&native_sig, &wasm_sig, ret_area.as_ref(), offsets.last());
if native_sig.has_stack_results() {
self.masm.free_stack(native_sig.results.size());
}
self.masm.free_stack(spill_size);
self.masm.epilogue(&[]);
Ok(())
}
fn assign_args(
masm: &mut M,
callee_params: &[ABIOperand],
caller_params: &[ABIOperand],
caller_stack_offsets: &[SPOffset],
scratch: Reg,
) {
assert!(callee_params.len() == caller_params.len());
let arg_base_offset: u32 = <M::ABI as ABI>::arg_base_offset().into();
let fp = <M::ABI as ABI>::fp_reg();
let mut offset_index = 0;
callee_params
.iter()
.zip(caller_params)
.for_each(
|(callee_param, caller_param)| match (callee_param, caller_param) {
(ABIOperand::Reg { ty, reg: dst, .. }, ABIOperand::Reg { .. }) => {
let offset = caller_stack_offsets[offset_index];
let addr = masm.address_from_sp(offset);
masm.load(addr, *dst, (*ty).into());
offset_index += 1;
}
(ABIOperand::Stack { ty, offset, .. }, ABIOperand::Reg { .. }) => {
let spill_offset = caller_stack_offsets[offset_index];
let addr = masm.address_from_sp(spill_offset);
let size: OperandSize = (*ty).into();
masm.load(addr, scratch, size);
let arg_addr = masm.address_at_sp(SPOffset::from_u32(*offset));
masm.store(scratch.into(), arg_addr, (*ty).into());
offset_index += 1;
}
(ABIOperand::Reg { ty, reg: dst, .. }, ABIOperand::Stack { offset, .. }) => {
let addr = masm.address_at_reg(fp, arg_base_offset + offset);
masm.load(addr, *dst, (*ty).into());
}
(
ABIOperand::Stack {
ty,
offset: callee_offset,
..
},
ABIOperand::Stack {
offset: caller_offset,
..
},
) => {
let addr = masm.address_at_reg(fp, arg_base_offset + caller_offset);
masm.load(addr, scratch, (*ty).into());
let arg_addr = masm.address_at_sp(SPOffset::from_u32(*callee_offset));
masm.store(scratch.into(), arg_addr, (*ty).into());
}
},
);
}
fn callee_and_caller_vmctx(params: &ABIParams) -> Result<(Reg, Reg)> {
let vmctx = params
.get(0)
.map(|operand| operand.unwrap_reg())
.expect("Callee VMContext to be in a register");
let caller_vmctx = params
.get(1)
.map(|operand| operand.unwrap_reg())
.expect("Caller VMContext to be in a register");
Ok((vmctx, caller_vmctx))
}
fn vmctx_runtime_limits_addr(&mut self, vmctx: Reg) -> M::Address {
self.masm
.address_at_reg(vmctx, self.pointer_size.vmcontext_runtime_limits().into())
}
fn spill(&mut self, operands: &[ABIOperand]) -> (SmallVec<[SPOffset; 6]>, u32) {
let mut offsets = SmallVec::new();
let mut spill_size = 0;
operands.iter().for_each(|param| {
if let Some(reg) = param.get_reg() {
let slot = self.masm.push(reg, param.ty().into());
offsets.push(slot.offset);
spill_size += slot.size;
}
});
(offsets, spill_size)
}
fn load_values_from_array(
masm: &mut M,
callee_sig: &ABISig,
ret_area: Option<&RetArea>,
values_reg: Reg,
scratch: Reg,
) {
callee_sig
.params_without_retptr()
.iter()
.skip(MAX_CONTEXT_ARGS)
.enumerate()
.for_each(|(i, param)| {
let value_offset = (i * VALUE_SIZE) as u32;
match param {
ABIOperand::Reg { reg, ty, .. } => masm.load(
masm.address_at_reg(values_reg, value_offset),
*reg,
(*ty).into(),
),
ABIOperand::Stack { offset, ty, .. } => {
masm.load(
masm.address_at_reg(values_reg, value_offset),
scratch,
(*ty).into(),
);
masm.store(
scratch.into(),
masm.address_at_sp(SPOffset::from_u32(*offset)),
(*ty).into(),
);
}
}
});
if let Some(offs) = ret_area {
let results_area_operand = callee_sig.params.unwrap_results_area_operand();
let addr = match offs {
RetArea::SP(sp_offset) => masm.address_from_sp(*sp_offset),
_ => unreachable!(),
};
match results_area_operand {
ABIOperand::Reg { ty, reg, .. } => {
masm.load_addr(addr, (*reg).into(), (*ty).into());
}
ABIOperand::Stack { ty, offset, .. } => {
masm.load_addr(addr, scratch, (*ty).into());
masm.store(
scratch.into(),
masm.address_at_sp(SPOffset::from_u32(*offset)),
(*ty).into(),
);
}
}
}
}
fn save_last_wasm_entry_sp(
masm: &mut M,
vm_runtime_limits_addr: M::Address,
scratch: Reg,
ptr: &impl PtrSize,
) {
let sp = <M::ABI as ABI>::sp_reg();
masm.load_ptr(vm_runtime_limits_addr, scratch);
let addr = masm.address_at_reg(scratch, ptr.vmruntime_limits_last_wasm_entry_sp().into());
masm.store(sp.into(), addr, OperandSize::S64);
}
fn save_last_wasm_exit_fp_and_pc(
masm: &mut M,
vm_runtime_limits_addr: M::Address,
scratch: Reg,
alloc_scratch: Reg,
ptr: &impl PtrSize,
) {
masm.load_ptr(vm_runtime_limits_addr, alloc_scratch);
let last_wasm_exit_fp_addr = masm.address_at_reg(
alloc_scratch,
ptr.vmruntime_limits_last_wasm_exit_fp().into(),
);
let last_wasm_exit_pc_addr = masm.address_at_reg(
alloc_scratch,
ptr.vmruntime_limits_last_wasm_exit_pc().into(),
);
let fp = <M::ABI as ABI>::fp_reg();
let fp_addr = masm.address_at_reg(fp, 0);
masm.load_ptr(fp_addr, scratch);
masm.store(scratch.into(), last_wasm_exit_fp_addr, OperandSize::S64);
let ret_addr_offset = <M::ABI as ABI>::ret_addr_offset();
let ret_addr = masm.address_at_reg(fp, ret_addr_offset.into());
masm.load_ptr(ret_addr, scratch);
masm.store(scratch.into(), last_wasm_exit_pc_addr, OperandSize::S64);
}
}