use crate::decode::*;
use crate::imms::*;
use crate::regs::*;
use crate::ExtendedOpcode;
use crate::Opcode;
use alloc::string::ToString;
use alloc::{vec, vec::Vec};
use core::fmt;
use core::mem;
use core::ops::ControlFlow;
use core::ops::{Index, IndexMut};
use core::ptr::{self, NonNull};
use sptr::Strict;
mod interp_loop;
const DEFAULT_STACK_SIZE: usize = 1 << 20; pub struct Vm {
state: MachineState,
}
impl Default for Vm {
fn default() -> Self {
Vm::new()
}
}
impl Vm {
pub fn new() -> Self {
Self::with_stack(vec![0; DEFAULT_STACK_SIZE])
}
pub fn with_stack(stack: Vec<u8>) -> Self {
Self {
state: MachineState::with_stack(stack),
}
}
pub fn state(&self) -> &MachineState {
&self.state
}
pub fn state_mut(&mut self) -> &mut MachineState {
&mut self.state
}
pub fn into_stack(self) -> Vec<u8> {
self.state.stack
}
pub unsafe fn call<'a>(
&'a mut self,
func: NonNull<u8>,
args: &[Val],
rets: impl IntoIterator<Item = RegType> + 'a,
) -> Result<impl Iterator<Item = Val> + 'a, NonNull<u8>> {
let mut x_args = (0..16).map(|x| XReg::new_unchecked(x));
let mut f_args = (0..16).map(|f| FReg::new_unchecked(f));
let mut v_args = (0..16).map(|v| VReg::new_unchecked(v));
for arg in args {
match arg {
Val::XReg(val) => match x_args.next() {
Some(reg) => self.state[reg] = *val,
None => todo!("stack slots"),
},
Val::FReg(val) => match f_args.next() {
Some(reg) => self.state[reg] = *val,
None => todo!("stack slots"),
},
Val::VReg(val) => match v_args.next() {
Some(reg) => self.state[reg] = *val,
None => todo!("stack slots"),
},
}
}
self.run(func)?;
let mut x_rets = (0..16).map(|x| XReg::new_unchecked(x));
let mut f_rets = (0..16).map(|f| FReg::new_unchecked(f));
let mut v_rets = (0..16).map(|v| VReg::new_unchecked(v));
Ok(rets.into_iter().map(move |ty| match ty {
RegType::XReg => match x_rets.next() {
Some(reg) => Val::XReg(self.state[reg]),
None => todo!("stack slots"),
},
RegType::FReg => match f_rets.next() {
Some(reg) => Val::FReg(self.state[reg]),
None => todo!("stack slots"),
},
RegType::VReg => match v_rets.next() {
Some(reg) => Val::VReg(self.state[reg]),
None => todo!("stack slots"),
},
}))
}
unsafe fn run(&mut self, pc: NonNull<u8>) -> Result<(), NonNull<u8>> {
let mut bytecode = UnsafeBytecodeStream::new(pc);
match interp_loop::interpreter_loop(self, &mut bytecode) {
Done::ReturnToHost => self.return_to_host(),
Done::Trap(pc) => self.trap(pc),
Done::HostCall => self.host_call(),
}
}
#[cold]
#[inline(never)]
fn return_to_host(&self) -> Result<(), NonNull<u8>> {
Ok(())
}
#[cold]
#[inline(never)]
fn trap(&self, pc: NonNull<u8>) -> Result<(), NonNull<u8>> {
let trap_pc = unsafe {
NonNull::new_unchecked(pc.as_ptr().byte_sub(ExtendedOpcode::ENCODED_SIZE_OF_TRAP))
};
Err(trap_pc)
}
#[cold]
#[inline(never)]
fn host_call(&self) -> Result<(), NonNull<u8>> {
todo!()
}
}
#[derive(Clone, Copy, Debug)]
pub enum RegType {
XReg,
FReg,
VReg,
}
#[derive(Clone, Copy, Debug)]
pub enum Val {
XReg(XRegVal),
FReg(FRegVal),
VReg(VRegVal),
}
impl fmt::LowerHex for Val {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Val::XReg(v) => fmt::LowerHex::fmt(v, f),
Val::FReg(v) => fmt::LowerHex::fmt(v, f),
Val::VReg(v) => fmt::LowerHex::fmt(v, f),
}
}
}
impl From<XRegVal> for Val {
fn from(value: XRegVal) -> Self {
Val::XReg(value)
}
}
impl From<u64> for Val {
fn from(value: u64) -> Self {
XRegVal::new_u64(value).into()
}
}
impl From<u32> for Val {
fn from(value: u32) -> Self {
XRegVal::new_u32(value).into()
}
}
impl From<i64> for Val {
fn from(value: i64) -> Self {
XRegVal::new_i64(value).into()
}
}
impl From<i32> for Val {
fn from(value: i32) -> Self {
XRegVal::new_i32(value).into()
}
}
impl<T> From<*mut T> for Val {
fn from(value: *mut T) -> Self {
XRegVal::new_ptr(value).into()
}
}
impl From<FRegVal> for Val {
fn from(value: FRegVal) -> Self {
Val::FReg(value)
}
}
impl From<f64> for Val {
fn from(value: f64) -> Self {
FRegVal::new_f64(value).into()
}
}
impl From<f32> for Val {
fn from(value: f32) -> Self {
FRegVal::new_f32(value).into()
}
}
impl From<VRegVal> for Val {
fn from(value: VRegVal) -> Self {
Val::VReg(value)
}
}
#[derive(Copy, Clone)]
pub struct XRegVal(XRegUnion);
impl PartialEq for XRegVal {
fn eq(&self, other: &Self) -> bool {
self.get_u64() == other.get_u64()
}
}
impl Eq for XRegVal {}
impl fmt::Debug for XRegVal {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("XRegVal")
.field("as_u64", &self.get_u64())
.finish()
}
}
impl fmt::LowerHex for XRegVal {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::LowerHex::fmt(&self.get_u64(), f)
}
}
#[derive(Copy, Clone)]
union XRegUnion {
i32: i32,
u32: u32,
i64: i64,
u64: u64,
ptr: *mut u8,
}
impl Default for XRegVal {
fn default() -> Self {
Self(unsafe { mem::zeroed() })
}
}
#[allow(missing_docs)]
impl XRegVal {
pub const HOST_RETURN_ADDR: Self = Self(XRegUnion { i64: -1 });
pub fn new_i32(x: i32) -> Self {
let mut val = XRegVal::default();
val.set_i32(x);
val
}
pub fn new_u32(x: u32) -> Self {
let mut val = XRegVal::default();
val.set_u32(x);
val
}
pub fn new_i64(x: i64) -> Self {
let mut val = XRegVal::default();
val.set_i64(x);
val
}
pub fn new_u64(x: u64) -> Self {
let mut val = XRegVal::default();
val.set_u64(x);
val
}
pub fn new_ptr<T>(ptr: *mut T) -> Self {
let mut val = XRegVal::default();
val.set_ptr(ptr);
val
}
pub fn get_i32(&self) -> i32 {
let x = unsafe { self.0.i32 };
i32::from_le(x)
}
pub fn get_u32(&self) -> u32 {
let x = unsafe { self.0.u32 };
u32::from_le(x)
}
pub fn get_i64(&self) -> i64 {
let x = unsafe { self.0.i64 };
i64::from_le(x)
}
pub fn get_u64(&self) -> u64 {
let x = unsafe { self.0.u64 };
u64::from_le(x)
}
pub fn get_ptr<T>(&self) -> *mut T {
let ptr = unsafe { self.0.ptr };
Strict::map_addr(ptr, |p| usize::from_le(p)).cast()
}
pub fn set_i32(&mut self, x: i32) {
self.0.i32 = x.to_le();
}
pub fn set_u32(&mut self, x: u32) {
self.0.u32 = x.to_le();
}
pub fn set_i64(&mut self, x: i64) {
self.0.i64 = x.to_le();
}
pub fn set_u64(&mut self, x: u64) {
self.0.u64 = x.to_le();
}
pub fn set_ptr<T>(&mut self, ptr: *mut T) {
self.0.ptr = Strict::map_addr(ptr, |p| p.to_le()).cast();
}
}
#[derive(Copy, Clone)]
pub struct FRegVal(FRegUnion);
impl fmt::Debug for FRegVal {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("FRegVal")
.field("as_f32", &self.get_f32())
.field("as_f64", &self.get_f64())
.finish()
}
}
impl fmt::LowerHex for FRegVal {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::LowerHex::fmt(&self.get_f64().to_bits(), f)
}
}
#[derive(Copy, Clone)]
union FRegUnion {
f32: u32,
f64: u64,
}
impl Default for FRegVal {
fn default() -> Self {
Self(unsafe { mem::zeroed() })
}
}
#[allow(missing_docs)]
impl FRegVal {
pub fn new_f32(f: f32) -> Self {
let mut val = Self::default();
val.set_f32(f);
val
}
pub fn new_f64(f: f64) -> Self {
let mut val = Self::default();
val.set_f64(f);
val
}
pub fn get_f32(&self) -> f32 {
let val = unsafe { self.0.f32 };
f32::from_le_bytes(val.to_ne_bytes())
}
pub fn get_f64(&self) -> f64 {
let val = unsafe { self.0.f64 };
f64::from_le_bytes(val.to_ne_bytes())
}
pub fn set_f32(&mut self, val: f32) {
self.0.f32 = u32::from_ne_bytes(val.to_le_bytes());
}
pub fn set_f64(&mut self, val: f64) {
self.0.f64 = u64::from_ne_bytes(val.to_le_bytes());
}
}
#[derive(Copy, Clone)]
pub struct VRegVal(VRegUnion);
impl fmt::Debug for VRegVal {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("VRegVal")
.field("as_u128", &unsafe { self.0.u128 })
.finish()
}
}
impl fmt::LowerHex for VRegVal {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::LowerHex::fmt(unsafe { &self.0.u128 }, f)
}
}
#[derive(Copy, Clone)]
union VRegUnion {
u128: u128,
}
impl Default for VRegVal {
fn default() -> Self {
Self(unsafe { mem::zeroed() })
}
}
pub struct MachineState {
x_regs: [XRegVal; XReg::RANGE.end as usize],
f_regs: [FRegVal; FReg::RANGE.end as usize],
v_regs: [VRegVal; VReg::RANGE.end as usize],
stack: Vec<u8>,
}
unsafe impl Send for MachineState {}
unsafe impl Sync for MachineState {}
impl fmt::Debug for MachineState {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let MachineState {
x_regs,
f_regs,
v_regs,
stack: _,
} = self;
struct RegMap<'a, R>(&'a [R], fn(u8) -> alloc::string::String);
impl<R: fmt::Debug> fmt::Debug for RegMap<'_, R> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let mut f = f.debug_map();
for (i, r) in self.0.iter().enumerate() {
f.entry(&(self.1)(i as u8), r);
}
f.finish()
}
}
f.debug_struct("MachineState")
.field(
"x_regs",
&RegMap(x_regs, |i| XReg::new(i).unwrap().to_string()),
)
.field(
"f_regs",
&RegMap(f_regs, |i| FReg::new(i).unwrap().to_string()),
)
.field(
"v_regs",
&RegMap(v_regs, |i| VReg::new(i).unwrap().to_string()),
)
.finish_non_exhaustive()
}
}
macro_rules! index_reg {
($reg_ty:ty,$value_ty:ty,$field:ident) => {
impl Index<$reg_ty> for MachineState {
type Output = $value_ty;
fn index(&self, reg: $reg_ty) -> &Self::Output {
&self.$field[reg.index()]
}
}
impl IndexMut<$reg_ty> for MachineState {
fn index_mut(&mut self, reg: $reg_ty) -> &mut Self::Output {
&mut self.$field[reg.index()]
}
}
};
}
index_reg!(XReg, XRegVal, x_regs);
index_reg!(FReg, FRegVal, f_regs);
index_reg!(VReg, VRegVal, v_regs);
impl MachineState {
fn with_stack(stack: Vec<u8>) -> Self {
assert!(stack.len() > 0);
let mut state = Self {
x_regs: [Default::default(); XReg::RANGE.end as usize],
f_regs: Default::default(),
v_regs: Default::default(),
stack,
};
let len = state.stack.len();
let sp = &mut state.stack[..];
let sp = sp.as_mut_ptr();
let sp = unsafe { sp.add(len) };
state[XReg::sp] = XRegVal::new_ptr(sp);
state[XReg::fp] = XRegVal::HOST_RETURN_ADDR;
state[XReg::lr] = XRegVal::HOST_RETURN_ADDR;
state
}
fn push<T>(&mut self, val: T) {
let sp = self[XReg::sp].get_ptr::<T>();
unsafe { sp.write_unaligned(val) }
self[XReg::sp].set_ptr(sp.wrapping_add(1));
}
fn pop<T>(&mut self) -> T {
let sp = self[XReg::sp].get_ptr::<T>();
let val = unsafe { sp.read_unaligned() };
self[XReg::sp].set_ptr(sp.wrapping_sub(1));
val
}
}
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
enum Done {
ReturnToHost,
Trap(NonNull<u8>),
#[allow(dead_code)]
HostCall,
}