pulley_interpreter::decode

Trait OpVisitor

Source
pub trait OpVisitor {
    type BytecodeStream: BytecodeStream;
    type Return;

Show 210 methods // Required methods fn bytecode(&mut self) -> &mut Self::BytecodeStream; fn ret(&mut self) -> Self::Return; fn call(&mut self, offset: PcRelOffset) -> Self::Return; fn call1(&mut self, arg1: XReg, offset: PcRelOffset) -> Self::Return; fn call2( &mut self, arg1: XReg, arg2: XReg, offset: PcRelOffset, ) -> Self::Return; fn call3( &mut self, arg1: XReg, arg2: XReg, arg3: XReg, offset: PcRelOffset, ) -> Self::Return; fn call4( &mut self, arg1: XReg, arg2: XReg, arg3: XReg, arg4: XReg, offset: PcRelOffset, ) -> Self::Return; fn call_indirect(&mut self, reg: XReg) -> Self::Return; fn jump(&mut self, offset: PcRelOffset) -> Self::Return; fn xjump(&mut self, reg: XReg) -> Self::Return; fn br_if32(&mut self, cond: XReg, offset: PcRelOffset) -> Self::Return; fn br_if_not32(&mut self, cond: XReg, offset: PcRelOffset) -> Self::Return; fn br_if_xeq32( &mut self, a: XReg, b: XReg, offset: PcRelOffset, ) -> Self::Return; fn br_if_xneq32( &mut self, a: XReg, b: XReg, offset: PcRelOffset, ) -> Self::Return; fn br_if_xslt32( &mut self, a: XReg, b: XReg, offset: PcRelOffset, ) -> Self::Return; fn br_if_xslteq32( &mut self, a: XReg, b: XReg, offset: PcRelOffset, ) -> Self::Return; fn br_if_xult32( &mut self, a: XReg, b: XReg, offset: PcRelOffset, ) -> Self::Return; fn br_if_xulteq32( &mut self, a: XReg, b: XReg, offset: PcRelOffset, ) -> Self::Return; fn br_if_xeq64( &mut self, a: XReg, b: XReg, offset: PcRelOffset, ) -> Self::Return; fn br_if_xneq64( &mut self, a: XReg, b: XReg, offset: PcRelOffset, ) -> Self::Return; fn br_if_xslt64( &mut self, a: XReg, b: XReg, offset: PcRelOffset, ) -> Self::Return; fn br_if_xslteq64( &mut self, a: XReg, b: XReg, offset: PcRelOffset, ) -> Self::Return; fn br_if_xult64( &mut self, a: XReg, b: XReg, offset: PcRelOffset, ) -> Self::Return; fn br_if_xulteq64( &mut self, a: XReg, b: XReg, offset: PcRelOffset, ) -> Self::Return; fn br_if_xeq32_i8( &mut self, a: XReg, b: i8, offset: PcRelOffset, ) -> Self::Return; fn br_if_xeq32_i32( &mut self, a: XReg, b: i32, offset: PcRelOffset, ) -> Self::Return; fn br_if_xneq32_i8( &mut self, a: XReg, b: i8, offset: PcRelOffset, ) -> Self::Return; fn br_if_xneq32_i32( &mut self, a: XReg, b: i32, offset: PcRelOffset, ) -> Self::Return; fn br_if_xslt32_i8( &mut self, a: XReg, b: i8, offset: PcRelOffset, ) -> Self::Return; fn br_if_xslt32_i32( &mut self, a: XReg, b: i32, offset: PcRelOffset, ) -> Self::Return; fn br_if_xsgt32_i8( &mut self, a: XReg, b: i8, offset: PcRelOffset, ) -> Self::Return; fn br_if_xsgt32_i32( &mut self, a: XReg, b: i32, offset: PcRelOffset, ) -> Self::Return; fn br_if_xslteq32_i8( &mut self, a: XReg, b: i8, offset: PcRelOffset, ) -> Self::Return; fn br_if_xslteq32_i32( &mut self, a: XReg, b: i32, offset: PcRelOffset, ) -> Self::Return; fn br_if_xsgteq32_i8( &mut self, a: XReg, b: i8, offset: PcRelOffset, ) -> Self::Return; fn br_if_xsgteq32_i32( &mut self, a: XReg, b: i32, offset: PcRelOffset, ) -> Self::Return; fn br_if_xult32_u8( &mut self, a: XReg, b: u8, offset: PcRelOffset, ) -> Self::Return; fn br_if_xult32_u32( &mut self, a: XReg, b: u32, offset: PcRelOffset, ) -> Self::Return; fn br_if_xulteq32_u8( &mut self, a: XReg, b: u8, offset: PcRelOffset, ) -> Self::Return; fn br_if_xulteq32_u32( &mut self, a: XReg, b: u32, offset: PcRelOffset, ) -> Self::Return; fn br_if_xugt32_u8( &mut self, a: XReg, b: u8, offset: PcRelOffset, ) -> Self::Return; fn br_if_xugt32_u32( &mut self, a: XReg, b: u32, offset: PcRelOffset, ) -> Self::Return; fn br_if_xugteq32_u8( &mut self, a: XReg, b: u8, offset: PcRelOffset, ) -> Self::Return; fn br_if_xugteq32_u32( &mut self, a: XReg, b: u32, offset: PcRelOffset, ) -> Self::Return; fn br_if_xeq64_i8( &mut self, a: XReg, b: i8, offset: PcRelOffset, ) -> Self::Return; fn br_if_xeq64_i32( &mut self, a: XReg, b: i32, offset: PcRelOffset, ) -> Self::Return; fn br_if_xneq64_i8( &mut self, a: XReg, b: i8, offset: PcRelOffset, ) -> Self::Return; fn br_if_xneq64_i32( &mut self, a: XReg, b: i32, offset: PcRelOffset, ) -> Self::Return; fn br_if_xslt64_i8( &mut self, a: XReg, b: i8, offset: PcRelOffset, ) -> Self::Return; fn br_if_xslt64_i32( &mut self, a: XReg, b: i32, offset: PcRelOffset, ) -> Self::Return; fn br_if_xsgt64_i8( &mut self, a: XReg, b: i8, offset: PcRelOffset, ) -> Self::Return; fn br_if_xsgt64_i32( &mut self, a: XReg, b: i32, offset: PcRelOffset, ) -> Self::Return; fn br_if_xslteq64_i8( &mut self, a: XReg, b: i8, offset: PcRelOffset, ) -> Self::Return; fn br_if_xslteq64_i32( &mut self, a: XReg, b: i32, offset: PcRelOffset, ) -> Self::Return; fn br_if_xsgteq64_i8( &mut self, a: XReg, b: i8, offset: PcRelOffset, ) -> Self::Return; fn br_if_xsgteq64_i32( &mut self, a: XReg, b: i32, offset: PcRelOffset, ) -> Self::Return; fn br_if_xult64_u8( &mut self, a: XReg, b: u8, offset: PcRelOffset, ) -> Self::Return; fn br_if_xult64_u32( &mut self, a: XReg, b: u32, offset: PcRelOffset, ) -> Self::Return; fn br_if_xulteq64_u8( &mut self, a: XReg, b: u8, offset: PcRelOffset, ) -> Self::Return; fn br_if_xulteq64_u32( &mut self, a: XReg, b: u32, offset: PcRelOffset, ) -> Self::Return; fn br_if_xugt64_u8( &mut self, a: XReg, b: u8, offset: PcRelOffset, ) -> Self::Return; fn br_if_xugt64_u32( &mut self, a: XReg, b: u32, offset: PcRelOffset, ) -> Self::Return; fn br_if_xugteq64_u8( &mut self, a: XReg, b: u8, offset: PcRelOffset, ) -> Self::Return; fn br_if_xugteq64_u32( &mut self, a: XReg, b: u32, offset: PcRelOffset, ) -> Self::Return; fn br_table32(&mut self, idx: XReg, amt: u32) -> Self::Return; fn xmov(&mut self, dst: XReg, src: XReg) -> Self::Return; fn xconst8(&mut self, dst: XReg, imm: i8) -> Self::Return; fn xconst16(&mut self, dst: XReg, imm: i16) -> Self::Return; fn xconst32(&mut self, dst: XReg, imm: i32) -> Self::Return; fn xconst64(&mut self, dst: XReg, imm: i64) -> Self::Return; fn xadd32(&mut self, operands: BinaryOperands<XReg>) -> Self::Return; fn xadd32_u8(&mut self, dst: XReg, src1: XReg, src2: u8) -> Self::Return; fn xadd32_u32(&mut self, dst: XReg, src1: XReg, src2: u32) -> Self::Return; fn xadd64(&mut self, operands: BinaryOperands<XReg>) -> Self::Return; fn xadd64_u8(&mut self, dst: XReg, src1: XReg, src2: u8) -> Self::Return; fn xadd64_u32(&mut self, dst: XReg, src1: XReg, src2: u32) -> Self::Return; fn xsub32(&mut self, operands: BinaryOperands<XReg>) -> Self::Return; fn xsub32_u8(&mut self, dst: XReg, src1: XReg, src2: u8) -> Self::Return; fn xsub32_u32(&mut self, dst: XReg, src1: XReg, src2: u32) -> Self::Return; fn xsub64(&mut self, operands: BinaryOperands<XReg>) -> Self::Return; fn xsub64_u8(&mut self, dst: XReg, src1: XReg, src2: u8) -> Self::Return; fn xsub64_u32(&mut self, dst: XReg, src1: XReg, src2: u32) -> Self::Return; fn xmul32(&mut self, operands: BinaryOperands<XReg>) -> Self::Return; fn xmul32_s8(&mut self, dst: XReg, src1: XReg, src2: i8) -> Self::Return; fn xmul32_s32(&mut self, dst: XReg, src1: XReg, src2: i32) -> Self::Return; fn xmul64(&mut self, operands: BinaryOperands<XReg>) -> Self::Return; fn xmul64_s8(&mut self, dst: XReg, src1: XReg, src2: i8) -> Self::Return; fn xmul64_s32(&mut self, dst: XReg, src1: XReg, src2: i32) -> Self::Return; fn xctz32(&mut self, dst: XReg, src: XReg) -> Self::Return; fn xctz64(&mut self, dst: XReg, src: XReg) -> Self::Return; fn xclz32(&mut self, dst: XReg, src: XReg) -> Self::Return; fn xclz64(&mut self, dst: XReg, src: XReg) -> Self::Return; fn xpopcnt32(&mut self, dst: XReg, src: XReg) -> Self::Return; fn xpopcnt64(&mut self, dst: XReg, src: XReg) -> Self::Return; fn xrotl32(&mut self, operands: BinaryOperands<XReg>) -> Self::Return; fn xrotl64(&mut self, operands: BinaryOperands<XReg>) -> Self::Return; fn xrotr32(&mut self, operands: BinaryOperands<XReg>) -> Self::Return; fn xrotr64(&mut self, operands: BinaryOperands<XReg>) -> Self::Return; fn xshl32(&mut self, operands: BinaryOperands<XReg>) -> Self::Return; fn xshr32_s(&mut self, operands: BinaryOperands<XReg>) -> Self::Return; fn xshr32_u(&mut self, operands: BinaryOperands<XReg>) -> Self::Return; fn xshl64(&mut self, operands: BinaryOperands<XReg>) -> Self::Return; fn xshr64_s(&mut self, operands: BinaryOperands<XReg>) -> Self::Return; fn xshr64_u(&mut self, operands: BinaryOperands<XReg>) -> Self::Return; fn xshl32_u6( &mut self, operands: BinaryOperands<XReg, XReg, U6>, ) -> Self::Return; fn xshr32_s_u6( &mut self, operands: BinaryOperands<XReg, XReg, U6>, ) -> Self::Return; fn xshr32_u_u6( &mut self, operands: BinaryOperands<XReg, XReg, U6>, ) -> Self::Return; fn xshl64_u6( &mut self, operands: BinaryOperands<XReg, XReg, U6>, ) -> Self::Return; fn xshr64_s_u6( &mut self, operands: BinaryOperands<XReg, XReg, U6>, ) -> Self::Return; fn xshr64_u_u6( &mut self, operands: BinaryOperands<XReg, XReg, U6>, ) -> Self::Return; fn xneg32(&mut self, dst: XReg, src: XReg) -> Self::Return; fn xneg64(&mut self, dst: XReg, src: XReg) -> Self::Return; fn xeq64(&mut self, operands: BinaryOperands<XReg>) -> Self::Return; fn xneq64(&mut self, operands: BinaryOperands<XReg>) -> Self::Return; fn xslt64(&mut self, operands: BinaryOperands<XReg>) -> Self::Return; fn xslteq64(&mut self, operands: BinaryOperands<XReg>) -> Self::Return; fn xult64(&mut self, operands: BinaryOperands<XReg>) -> Self::Return; fn xulteq64(&mut self, operands: BinaryOperands<XReg>) -> Self::Return; fn xeq32(&mut self, operands: BinaryOperands<XReg>) -> Self::Return; fn xneq32(&mut self, operands: BinaryOperands<XReg>) -> Self::Return; fn xslt32(&mut self, operands: BinaryOperands<XReg>) -> Self::Return; fn xslteq32(&mut self, operands: BinaryOperands<XReg>) -> Self::Return; fn xult32(&mut self, operands: BinaryOperands<XReg>) -> Self::Return; fn xulteq32(&mut self, operands: BinaryOperands<XReg>) -> Self::Return; fn xload8_u32_offset32( &mut self, dst: XReg, ptr: XReg, offset: i32, ) -> Self::Return; fn xload8_s32_offset32( &mut self, dst: XReg, ptr: XReg, offset: i32, ) -> Self::Return; fn xload16le_u32_offset32( &mut self, dst: XReg, ptr: XReg, offset: i32, ) -> Self::Return; fn xload16le_s32_offset32( &mut self, dst: XReg, ptr: XReg, offset: i32, ) -> Self::Return; fn xload32le_offset32( &mut self, dst: XReg, ptr: XReg, offset: i32, ) -> Self::Return; fn xload8_u64_offset32( &mut self, dst: XReg, ptr: XReg, offset: i32, ) -> Self::Return; fn xload8_s64_offset32( &mut self, dst: XReg, ptr: XReg, offset: i32, ) -> Self::Return; fn xload16le_u64_offset32( &mut self, dst: XReg, ptr: XReg, offset: i32, ) -> Self::Return; fn xload16le_s64_offset32( &mut self, dst: XReg, ptr: XReg, offset: i32, ) -> Self::Return; fn xload32le_u64_offset32( &mut self, dst: XReg, ptr: XReg, offset: i32, ) -> Self::Return; fn xload32le_s64_offset32( &mut self, dst: XReg, ptr: XReg, offset: i32, ) -> Self::Return; fn xload64le_offset32( &mut self, dst: XReg, ptr: XReg, offset: i32, ) -> Self::Return; fn xstore8_offset32( &mut self, ptr: XReg, offset: i32, src: XReg, ) -> Self::Return; fn xstore16le_offset32( &mut self, ptr: XReg, offset: i32, src: XReg, ) -> Self::Return; fn xstore32le_offset32( &mut self, ptr: XReg, offset: i32, src: XReg, ) -> Self::Return; fn xstore64le_offset32( &mut self, ptr: XReg, offset: i32, src: XReg, ) -> Self::Return; fn xload8_u32_offset8( &mut self, dst: XReg, ptr: XReg, offset: u8, ) -> Self::Return; fn xload8_s32_offset8( &mut self, dst: XReg, ptr: XReg, offset: u8, ) -> Self::Return; fn xload16le_u32_offset8( &mut self, dst: XReg, ptr: XReg, offset: u8, ) -> Self::Return; fn xload16le_s32_offset8( &mut self, dst: XReg, ptr: XReg, offset: u8, ) -> Self::Return; fn xload32le_offset8( &mut self, dst: XReg, ptr: XReg, offset: u8, ) -> Self::Return; fn xload8_u64_offset8( &mut self, dst: XReg, ptr: XReg, offset: u8, ) -> Self::Return; fn xload8_s64_offset8( &mut self, dst: XReg, ptr: XReg, offset: u8, ) -> Self::Return; fn xload16le_u64_offset8( &mut self, dst: XReg, ptr: XReg, offset: u8, ) -> Self::Return; fn xload16le_s64_offset8( &mut self, dst: XReg, ptr: XReg, offset: u8, ) -> Self::Return; fn xload32le_u64_offset8( &mut self, dst: XReg, ptr: XReg, offset: u8, ) -> Self::Return; fn xload32le_s64_offset8( &mut self, dst: XReg, ptr: XReg, offset: u8, ) -> Self::Return; fn xload64le_offset8( &mut self, dst: XReg, ptr: XReg, offset: u8, ) -> Self::Return; fn xstore8_offset8( &mut self, ptr: XReg, offset: u8, src: XReg, ) -> Self::Return; fn xstore16le_offset8( &mut self, ptr: XReg, offset: u8, src: XReg, ) -> Self::Return; fn xstore32le_offset8( &mut self, ptr: XReg, offset: u8, src: XReg, ) -> Self::Return; fn xstore64le_offset8( &mut self, ptr: XReg, offset: u8, src: XReg, ) -> Self::Return; fn push_frame(&mut self) -> Self::Return; fn pop_frame(&mut self) -> Self::Return; fn push_frame_save(&mut self, amt: u32, regs: RegSet<XReg>) -> Self::Return; fn pop_frame_restore( &mut self, amt: u32, regs: RegSet<XReg>, ) -> Self::Return; fn stack_alloc32(&mut self, amt: u32) -> Self::Return; fn stack_free32(&mut self, amt: u32) -> Self::Return; fn zext8(&mut self, dst: XReg, src: XReg) -> Self::Return; fn zext16(&mut self, dst: XReg, src: XReg) -> Self::Return; fn zext32(&mut self, dst: XReg, src: XReg) -> Self::Return; fn sext8(&mut self, dst: XReg, src: XReg) -> Self::Return; fn sext16(&mut self, dst: XReg, src: XReg) -> Self::Return; fn sext32(&mut self, dst: XReg, src: XReg) -> Self::Return; fn xabs32(&mut self, dst: XReg, src: XReg) -> Self::Return; fn xabs64(&mut self, dst: XReg, src: XReg) -> Self::Return; fn xdiv32_s(&mut self, operands: BinaryOperands<XReg>) -> Self::Return; fn xdiv64_s(&mut self, operands: BinaryOperands<XReg>) -> Self::Return; fn xdiv32_u(&mut self, operands: BinaryOperands<XReg>) -> Self::Return; fn xdiv64_u(&mut self, operands: BinaryOperands<XReg>) -> Self::Return; fn xrem32_s(&mut self, operands: BinaryOperands<XReg>) -> Self::Return; fn xrem64_s(&mut self, operands: BinaryOperands<XReg>) -> Self::Return; fn xrem32_u(&mut self, operands: BinaryOperands<XReg>) -> Self::Return; fn xrem64_u(&mut self, operands: BinaryOperands<XReg>) -> Self::Return; fn xband32(&mut self, operands: BinaryOperands<XReg>) -> Self::Return; fn xband32_s8(&mut self, dst: XReg, src1: XReg, src2: i8) -> Self::Return; fn xband32_s32(&mut self, dst: XReg, src1: XReg, src2: i32) -> Self::Return; fn xband64(&mut self, operands: BinaryOperands<XReg>) -> Self::Return; fn xband64_s8(&mut self, dst: XReg, src1: XReg, src2: i8) -> Self::Return; fn xband64_s32(&mut self, dst: XReg, src1: XReg, src2: i32) -> Self::Return; fn xbor32(&mut self, operands: BinaryOperands<XReg>) -> Self::Return; fn xbor32_s8(&mut self, dst: XReg, src1: XReg, src2: i8) -> Self::Return; fn xbor32_s32(&mut self, dst: XReg, src1: XReg, src2: i32) -> Self::Return; fn xbor64(&mut self, operands: BinaryOperands<XReg>) -> Self::Return; fn xbor64_s8(&mut self, dst: XReg, src1: XReg, src2: i8) -> Self::Return; fn xbor64_s32(&mut self, dst: XReg, src1: XReg, src2: i32) -> Self::Return; fn xbxor32(&mut self, operands: BinaryOperands<XReg>) -> Self::Return; fn xbxor32_s8(&mut self, dst: XReg, src1: XReg, src2: i8) -> Self::Return; fn xbxor32_s32(&mut self, dst: XReg, src1: XReg, src2: i32) -> Self::Return; fn xbxor64(&mut self, operands: BinaryOperands<XReg>) -> Self::Return; fn xbxor64_s8(&mut self, dst: XReg, src1: XReg, src2: i8) -> Self::Return; fn xbxor64_s32(&mut self, dst: XReg, src1: XReg, src2: i32) -> Self::Return; fn xbnot32(&mut self, dst: XReg, src: XReg) -> Self::Return; fn xbnot64(&mut self, dst: XReg, src: XReg) -> Self::Return; fn xmin32_u(&mut self, operands: BinaryOperands<XReg>) -> Self::Return; fn xmin32_s(&mut self, operands: BinaryOperands<XReg>) -> Self::Return; fn xmax32_u(&mut self, operands: BinaryOperands<XReg>) -> Self::Return; fn xmax32_s(&mut self, operands: BinaryOperands<XReg>) -> Self::Return; fn xmin64_u(&mut self, operands: BinaryOperands<XReg>) -> Self::Return; fn xmin64_s(&mut self, operands: BinaryOperands<XReg>) -> Self::Return; fn xmax64_u(&mut self, operands: BinaryOperands<XReg>) -> Self::Return; fn xmax64_s(&mut self, operands: BinaryOperands<XReg>) -> Self::Return; fn xselect32( &mut self, dst: XReg, cond: XReg, if_nonzero: XReg, if_zero: XReg, ) -> Self::Return; fn xselect64( &mut self, dst: XReg, cond: XReg, if_nonzero: XReg, if_zero: XReg, ) -> Self::Return; // Provided methods fn before_visit(&mut self) { ... } fn after_visit(&mut self) { ... }
}
Available on crate feature decode only.
Expand description

Callbacks upon decoding instructions from bytecode.

Implement this trait for your type, give an instance of your type to a Decoder method, and the Decoder will invoke the associated method for each instruction that it decodes. For example, if the Decoder decodes an xadd32 instruction, then it will invoke the xadd32 visitor method, passing along any decoded immediates, operands, etc… as arguments.

Required Associated Types§

Source

type BytecodeStream: BytecodeStream

The type of this visitor’s bytecode stream.

Source

type Return

The type of values returned by each visitor method.

Required Methods§

Source

fn bytecode(&mut self) -> &mut Self::BytecodeStream

Get this visitor’s underlying bytecode stream.

Source

fn ret(&mut self) -> Self::Return

Transfer control the address in the lr register.

Source

fn call(&mut self, offset: PcRelOffset) -> Self::Return

Transfer control to the PC at the given offset and set the lr register to the PC just after this instruction.

This instruction generally assumes that the Pulley ABI is being respected where arguments are in argument registers (starting at x0 for integer arguments) and results are in result registers. This instruction itself assume that all arguments are already in their registers. Subsequent instructions below enable moving arguments into the correct registers as part of the same call instruction.

Source

fn call1(&mut self, arg1: XReg, offset: PcRelOffset) -> Self::Return

Like call, but also x0 = arg1

Source

fn call2(&mut self, arg1: XReg, arg2: XReg, offset: PcRelOffset) -> Self::Return

Like call, but also x0, x1 = arg1, arg2

Source

fn call3( &mut self, arg1: XReg, arg2: XReg, arg3: XReg, offset: PcRelOffset, ) -> Self::Return

Like call, but also x0, x1, x2 = arg1, arg2, arg3

Source

fn call4( &mut self, arg1: XReg, arg2: XReg, arg3: XReg, arg4: XReg, offset: PcRelOffset, ) -> Self::Return

Like call, but also x0, x1, x2, x3 = arg1, arg2, arg3, arg4

Source

fn call_indirect(&mut self, reg: XReg) -> Self::Return

Transfer control to the PC in reg and set lr to the PC just after this instruction.

Source

fn jump(&mut self, offset: PcRelOffset) -> Self::Return

Unconditionally transfer control to the PC at the given offset.

Source

fn xjump(&mut self, reg: XReg) -> Self::Return

Unconditionally transfer control to the PC at specified register.

Source

fn br_if32(&mut self, cond: XReg, offset: PcRelOffset) -> Self::Return

Conditionally transfer control to the given PC offset if low32(cond) contains a non-zero value.

Source

fn br_if_not32(&mut self, cond: XReg, offset: PcRelOffset) -> Self::Return

Conditionally transfer control to the given PC offset if low32(cond) contains a zero value.

Source

fn br_if_xeq32(&mut self, a: XReg, b: XReg, offset: PcRelOffset) -> Self::Return

Branch if a == b.

Source

fn br_if_xneq32( &mut self, a: XReg, b: XReg, offset: PcRelOffset, ) -> Self::Return

Branch if a != b.

Source

fn br_if_xslt32( &mut self, a: XReg, b: XReg, offset: PcRelOffset, ) -> Self::Return

Branch if signed a < b.

Source

fn br_if_xslteq32( &mut self, a: XReg, b: XReg, offset: PcRelOffset, ) -> Self::Return

Branch if signed a <= b.

Source

fn br_if_xult32( &mut self, a: XReg, b: XReg, offset: PcRelOffset, ) -> Self::Return

Branch if unsigned a < b.

Source

fn br_if_xulteq32( &mut self, a: XReg, b: XReg, offset: PcRelOffset, ) -> Self::Return

Branch if unsigned a <= b.

Source

fn br_if_xeq64(&mut self, a: XReg, b: XReg, offset: PcRelOffset) -> Self::Return

Branch if a == b.

Source

fn br_if_xneq64( &mut self, a: XReg, b: XReg, offset: PcRelOffset, ) -> Self::Return

Branch if a != b.

Source

fn br_if_xslt64( &mut self, a: XReg, b: XReg, offset: PcRelOffset, ) -> Self::Return

Branch if signed a < b.

Source

fn br_if_xslteq64( &mut self, a: XReg, b: XReg, offset: PcRelOffset, ) -> Self::Return

Branch if signed a <= b.

Source

fn br_if_xult64( &mut self, a: XReg, b: XReg, offset: PcRelOffset, ) -> Self::Return

Branch if unsigned a < b.

Source

fn br_if_xulteq64( &mut self, a: XReg, b: XReg, offset: PcRelOffset, ) -> Self::Return

Branch if unsigned a <= b.

Source

fn br_if_xeq32_i8( &mut self, a: XReg, b: i8, offset: PcRelOffset, ) -> Self::Return

Branch if a == b.

Source

fn br_if_xeq32_i32( &mut self, a: XReg, b: i32, offset: PcRelOffset, ) -> Self::Return

Branch if a == b.

Source

fn br_if_xneq32_i8( &mut self, a: XReg, b: i8, offset: PcRelOffset, ) -> Self::Return

Branch if a != b.

Source

fn br_if_xneq32_i32( &mut self, a: XReg, b: i32, offset: PcRelOffset, ) -> Self::Return

Branch if a != b.

Source

fn br_if_xslt32_i8( &mut self, a: XReg, b: i8, offset: PcRelOffset, ) -> Self::Return

Branch if signed a < b.

Source

fn br_if_xslt32_i32( &mut self, a: XReg, b: i32, offset: PcRelOffset, ) -> Self::Return

Branch if signed a < b.

Source

fn br_if_xsgt32_i8( &mut self, a: XReg, b: i8, offset: PcRelOffset, ) -> Self::Return

Branch if signed a > b.

Source

fn br_if_xsgt32_i32( &mut self, a: XReg, b: i32, offset: PcRelOffset, ) -> Self::Return

Branch if signed a > b.

Source

fn br_if_xslteq32_i8( &mut self, a: XReg, b: i8, offset: PcRelOffset, ) -> Self::Return

Branch if signed a <= b.

Source

fn br_if_xslteq32_i32( &mut self, a: XReg, b: i32, offset: PcRelOffset, ) -> Self::Return

Branch if signed a <= b.

Source

fn br_if_xsgteq32_i8( &mut self, a: XReg, b: i8, offset: PcRelOffset, ) -> Self::Return

Branch if signed a >= b.

Source

fn br_if_xsgteq32_i32( &mut self, a: XReg, b: i32, offset: PcRelOffset, ) -> Self::Return

Branch if signed a >= b.

Source

fn br_if_xult32_u8( &mut self, a: XReg, b: u8, offset: PcRelOffset, ) -> Self::Return

Branch if unsigned a < b.

Source

fn br_if_xult32_u32( &mut self, a: XReg, b: u32, offset: PcRelOffset, ) -> Self::Return

Branch if unsigned a < b.

Source

fn br_if_xulteq32_u8( &mut self, a: XReg, b: u8, offset: PcRelOffset, ) -> Self::Return

Branch if unsigned a <= b.

Source

fn br_if_xulteq32_u32( &mut self, a: XReg, b: u32, offset: PcRelOffset, ) -> Self::Return

Branch if unsigned a <= b.

Source

fn br_if_xugt32_u8( &mut self, a: XReg, b: u8, offset: PcRelOffset, ) -> Self::Return

Branch if unsigned a > b.

Source

fn br_if_xugt32_u32( &mut self, a: XReg, b: u32, offset: PcRelOffset, ) -> Self::Return

Branch if unsigned a > b.

Source

fn br_if_xugteq32_u8( &mut self, a: XReg, b: u8, offset: PcRelOffset, ) -> Self::Return

Branch if unsigned a >= b.

Source

fn br_if_xugteq32_u32( &mut self, a: XReg, b: u32, offset: PcRelOffset, ) -> Self::Return

Branch if unsigned a >= b.

Source

fn br_if_xeq64_i8( &mut self, a: XReg, b: i8, offset: PcRelOffset, ) -> Self::Return

Branch if a == b.

Source

fn br_if_xeq64_i32( &mut self, a: XReg, b: i32, offset: PcRelOffset, ) -> Self::Return

Branch if a == b.

Source

fn br_if_xneq64_i8( &mut self, a: XReg, b: i8, offset: PcRelOffset, ) -> Self::Return

Branch if a != b.

Source

fn br_if_xneq64_i32( &mut self, a: XReg, b: i32, offset: PcRelOffset, ) -> Self::Return

Branch if a != b.

Source

fn br_if_xslt64_i8( &mut self, a: XReg, b: i8, offset: PcRelOffset, ) -> Self::Return

Branch if signed a < b.

Source

fn br_if_xslt64_i32( &mut self, a: XReg, b: i32, offset: PcRelOffset, ) -> Self::Return

Branch if signed a < b.

Source

fn br_if_xsgt64_i8( &mut self, a: XReg, b: i8, offset: PcRelOffset, ) -> Self::Return

Branch if signed a > b.

Source

fn br_if_xsgt64_i32( &mut self, a: XReg, b: i32, offset: PcRelOffset, ) -> Self::Return

Branch if signed a > b.

Source

fn br_if_xslteq64_i8( &mut self, a: XReg, b: i8, offset: PcRelOffset, ) -> Self::Return

Branch if signed a <= b.

Source

fn br_if_xslteq64_i32( &mut self, a: XReg, b: i32, offset: PcRelOffset, ) -> Self::Return

Branch if signed a <= b.

Source

fn br_if_xsgteq64_i8( &mut self, a: XReg, b: i8, offset: PcRelOffset, ) -> Self::Return

Branch if signed a >= b.

Source

fn br_if_xsgteq64_i32( &mut self, a: XReg, b: i32, offset: PcRelOffset, ) -> Self::Return

Branch if signed a >= b.

Source

fn br_if_xult64_u8( &mut self, a: XReg, b: u8, offset: PcRelOffset, ) -> Self::Return

Branch if unsigned a < b.

Source

fn br_if_xult64_u32( &mut self, a: XReg, b: u32, offset: PcRelOffset, ) -> Self::Return

Branch if unsigned a < b.

Source

fn br_if_xulteq64_u8( &mut self, a: XReg, b: u8, offset: PcRelOffset, ) -> Self::Return

Branch if unsigned a <= b.

Source

fn br_if_xulteq64_u32( &mut self, a: XReg, b: u32, offset: PcRelOffset, ) -> Self::Return

Branch if unsigned a <= b.

Source

fn br_if_xugt64_u8( &mut self, a: XReg, b: u8, offset: PcRelOffset, ) -> Self::Return

Branch if unsigned a > b.

Source

fn br_if_xugt64_u32( &mut self, a: XReg, b: u32, offset: PcRelOffset, ) -> Self::Return

Branch if unsigned a > b.

Source

fn br_if_xugteq64_u8( &mut self, a: XReg, b: u8, offset: PcRelOffset, ) -> Self::Return

Branch if unsigned a >= b.

Source

fn br_if_xugteq64_u32( &mut self, a: XReg, b: u32, offset: PcRelOffset, ) -> Self::Return

Branch if unsigned a >= b.

Source

fn br_table32(&mut self, idx: XReg, amt: u32) -> Self::Return

Branch to the label indicated by low32(idx).

After this instruction are amt instances of PcRelOffset and the idx selects which one will be branched to. The value of idx is clamped to amt - 1 (e.g. the last offset is the “default” one.

Source

fn xmov(&mut self, dst: XReg, src: XReg) -> Self::Return

Move between x registers.

Source

fn xconst8(&mut self, dst: XReg, imm: i8) -> Self::Return

Set dst = sign_extend(imm8).

Source

fn xconst16(&mut self, dst: XReg, imm: i16) -> Self::Return

Set dst = sign_extend(imm16).

Source

fn xconst32(&mut self, dst: XReg, imm: i32) -> Self::Return

Set dst = sign_extend(imm32).

Source

fn xconst64(&mut self, dst: XReg, imm: i64) -> Self::Return

Set dst = imm64.

Source

fn xadd32(&mut self, operands: BinaryOperands<XReg>) -> Self::Return

32-bit wrapping addition: low32(dst) = low32(src1) + low32(src2).

The upper 32-bits of dst are unmodified.

Source

fn xadd32_u8(&mut self, dst: XReg, src1: XReg, src2: u8) -> Self::Return

Same as xadd32 but src2 is a zero-extended 8-bit immediate.

Source

fn xadd32_u32(&mut self, dst: XReg, src1: XReg, src2: u32) -> Self::Return

Same as xadd32 but src2 is a 32-bit immediate.

Source

fn xadd64(&mut self, operands: BinaryOperands<XReg>) -> Self::Return

64-bit wrapping addition: dst = src1 + src2.

Source

fn xadd64_u8(&mut self, dst: XReg, src1: XReg, src2: u8) -> Self::Return

Same as xadd64 but src2 is a zero-extended 8-bit immediate.

Source

fn xadd64_u32(&mut self, dst: XReg, src1: XReg, src2: u32) -> Self::Return

Same as xadd64 but src2 is a zero-extended 32-bit immediate.

Source

fn xsub32(&mut self, operands: BinaryOperands<XReg>) -> Self::Return

32-bit wrapping subtraction: low32(dst) = low32(src1) - low32(src2).

The upper 32-bits of dst are unmodified.

Source

fn xsub32_u8(&mut self, dst: XReg, src1: XReg, src2: u8) -> Self::Return

Same as xsub32 but src2 is a zero-extended 8-bit immediate.

Source

fn xsub32_u32(&mut self, dst: XReg, src1: XReg, src2: u32) -> Self::Return

Same as xsub32 but src2 is a 32-bit immediate.

Source

fn xsub64(&mut self, operands: BinaryOperands<XReg>) -> Self::Return

64-bit wrapping subtraction: dst = src1 - src2.

Source

fn xsub64_u8(&mut self, dst: XReg, src1: XReg, src2: u8) -> Self::Return

Same as xsub64 but src2 is a zero-extended 8-bit immediate.

Source

fn xsub64_u32(&mut self, dst: XReg, src1: XReg, src2: u32) -> Self::Return

Same as xsub64 but src2 is a zero-extended 32-bit immediate.

Source

fn xmul32(&mut self, operands: BinaryOperands<XReg>) -> Self::Return

low32(dst) = low32(src1) * low32(src2)

Source

fn xmul32_s8(&mut self, dst: XReg, src1: XReg, src2: i8) -> Self::Return

Same as xmul64 but src2 is a sign-extended 8-bit immediate.

Source

fn xmul32_s32(&mut self, dst: XReg, src1: XReg, src2: i32) -> Self::Return

Same as xmul32 but src2 is a sign-extended 32-bit immediate.

Source

fn xmul64(&mut self, operands: BinaryOperands<XReg>) -> Self::Return

dst = src1 * src2

Source

fn xmul64_s8(&mut self, dst: XReg, src1: XReg, src2: i8) -> Self::Return

Same as xmul64 but src2 is a sign-extended 8-bit immediate.

Source

fn xmul64_s32(&mut self, dst: XReg, src1: XReg, src2: i32) -> Self::Return

Same as xmul64 but src2 is a sign-extended 64-bit immediate.

Source

fn xctz32(&mut self, dst: XReg, src: XReg) -> Self::Return

low32(dst) = trailing_zeros(low32(src))

Source

fn xctz64(&mut self, dst: XReg, src: XReg) -> Self::Return

dst = trailing_zeros(src)

Source

fn xclz32(&mut self, dst: XReg, src: XReg) -> Self::Return

low32(dst) = leading_zeros(low32(src))

Source

fn xclz64(&mut self, dst: XReg, src: XReg) -> Self::Return

dst = leading_zeros(src)

Source

fn xpopcnt32(&mut self, dst: XReg, src: XReg) -> Self::Return

low32(dst) = count_ones(low32(src))

Source

fn xpopcnt64(&mut self, dst: XReg, src: XReg) -> Self::Return

dst = count_ones(src)

Source

fn xrotl32(&mut self, operands: BinaryOperands<XReg>) -> Self::Return

low32(dst) = rotate_left(low32(src1), low32(src2))

Source

fn xrotl64(&mut self, operands: BinaryOperands<XReg>) -> Self::Return

dst = rotate_left(src1, src2)

Source

fn xrotr32(&mut self, operands: BinaryOperands<XReg>) -> Self::Return

low32(dst) = rotate_right(low32(src1), low32(src2))

Source

fn xrotr64(&mut self, operands: BinaryOperands<XReg>) -> Self::Return

dst = rotate_right(src1, src2)

Source

fn xshl32(&mut self, operands: BinaryOperands<XReg>) -> Self::Return

low32(dst) = low32(src1) << low5(src2)

Source

fn xshr32_s(&mut self, operands: BinaryOperands<XReg>) -> Self::Return

low32(dst) = low32(src1) >> low5(src2)

Source

fn xshr32_u(&mut self, operands: BinaryOperands<XReg>) -> Self::Return

low32(dst) = low32(src1) >> low5(src2)

Source

fn xshl64(&mut self, operands: BinaryOperands<XReg>) -> Self::Return

dst = src1 << low5(src2)

Source

fn xshr64_s(&mut self, operands: BinaryOperands<XReg>) -> Self::Return

dst = src1 >> low6(src2)

Source

fn xshr64_u(&mut self, operands: BinaryOperands<XReg>) -> Self::Return

dst = src1 >> low6(src2)

Source

fn xshl32_u6( &mut self, operands: BinaryOperands<XReg, XReg, U6>, ) -> Self::Return

low32(dst) = low32(src1) << low5(src2)

Source

fn xshr32_s_u6( &mut self, operands: BinaryOperands<XReg, XReg, U6>, ) -> Self::Return

low32(dst) = low32(src1) >> low5(src2)

Source

fn xshr32_u_u6( &mut self, operands: BinaryOperands<XReg, XReg, U6>, ) -> Self::Return

low32(dst) = low32(src1) >> low5(src2)

Source

fn xshl64_u6( &mut self, operands: BinaryOperands<XReg, XReg, U6>, ) -> Self::Return

dst = src1 << low5(src2)

Source

fn xshr64_s_u6( &mut self, operands: BinaryOperands<XReg, XReg, U6>, ) -> Self::Return

dst = src1 >> low6(src2)

Source

fn xshr64_u_u6( &mut self, operands: BinaryOperands<XReg, XReg, U6>, ) -> Self::Return

dst = src1 >> low6(src2)

Source

fn xneg32(&mut self, dst: XReg, src: XReg) -> Self::Return

low32(dst) = -low32(src)

Source

fn xneg64(&mut self, dst: XReg, src: XReg) -> Self::Return

dst = -src

Source

fn xeq64(&mut self, operands: BinaryOperands<XReg>) -> Self::Return

low32(dst) = src1 == src2

Source

fn xneq64(&mut self, operands: BinaryOperands<XReg>) -> Self::Return

low32(dst) = src1 != src2

Source

fn xslt64(&mut self, operands: BinaryOperands<XReg>) -> Self::Return

low32(dst) = src1 < src2 (signed)

Source

fn xslteq64(&mut self, operands: BinaryOperands<XReg>) -> Self::Return

low32(dst) = src1 <= src2 (signed)

Source

fn xult64(&mut self, operands: BinaryOperands<XReg>) -> Self::Return

low32(dst) = src1 < src2 (unsigned)

Source

fn xulteq64(&mut self, operands: BinaryOperands<XReg>) -> Self::Return

low32(dst) = src1 <= src2 (unsigned)

Source

fn xeq32(&mut self, operands: BinaryOperands<XReg>) -> Self::Return

low32(dst) = low32(src1) == low32(src2)

Source

fn xneq32(&mut self, operands: BinaryOperands<XReg>) -> Self::Return

low32(dst) = low32(src1) != low32(src2)

Source

fn xslt32(&mut self, operands: BinaryOperands<XReg>) -> Self::Return

low32(dst) = low32(src1) < low32(src2) (signed)

Source

fn xslteq32(&mut self, operands: BinaryOperands<XReg>) -> Self::Return

low32(dst) = low32(src1) <= low32(src2) (signed)

Source

fn xult32(&mut self, operands: BinaryOperands<XReg>) -> Self::Return

low32(dst) = low32(src1) < low32(src2) (unsigned)

Source

fn xulteq32(&mut self, operands: BinaryOperands<XReg>) -> Self::Return

low32(dst) = low32(src1) <= low32(src2) (unsigned)

Source

fn xload8_u32_offset32( &mut self, dst: XReg, ptr: XReg, offset: i32, ) -> Self::Return

low32(dst) = zext(*(ptr + offset))

Source

fn xload8_s32_offset32( &mut self, dst: XReg, ptr: XReg, offset: i32, ) -> Self::Return

low32(dst) = sext(*(ptr + offset))

Source

fn xload16le_u32_offset32( &mut self, dst: XReg, ptr: XReg, offset: i32, ) -> Self::Return

low32(dst) = zext(*(ptr + offset))

Source

fn xload16le_s32_offset32( &mut self, dst: XReg, ptr: XReg, offset: i32, ) -> Self::Return

low32(dst) = sext(*(ptr + offset))

Source

fn xload32le_offset32( &mut self, dst: XReg, ptr: XReg, offset: i32, ) -> Self::Return

low32(dst) = *(ptr + offset)

Source

fn xload8_u64_offset32( &mut self, dst: XReg, ptr: XReg, offset: i32, ) -> Self::Return

dst = zext(*(ptr + offset))

Source

fn xload8_s64_offset32( &mut self, dst: XReg, ptr: XReg, offset: i32, ) -> Self::Return

dst = sext(*(ptr + offset))

Source

fn xload16le_u64_offset32( &mut self, dst: XReg, ptr: XReg, offset: i32, ) -> Self::Return

dst = zext(*(ptr + offset))

Source

fn xload16le_s64_offset32( &mut self, dst: XReg, ptr: XReg, offset: i32, ) -> Self::Return

dst = sext(*(ptr + offset))

Source

fn xload32le_u64_offset32( &mut self, dst: XReg, ptr: XReg, offset: i32, ) -> Self::Return

dst = zext(*(ptr + offset))

Source

fn xload32le_s64_offset32( &mut self, dst: XReg, ptr: XReg, offset: i32, ) -> Self::Return

dst = sext(*(ptr + offset))

Source

fn xload64le_offset32( &mut self, dst: XReg, ptr: XReg, offset: i32, ) -> Self::Return

dst = *(ptr + offset)

Source

fn xstore8_offset32( &mut self, ptr: XReg, offset: i32, src: XReg, ) -> Self::Return

*(ptr + offset) = low8(src)

Source

fn xstore16le_offset32( &mut self, ptr: XReg, offset: i32, src: XReg, ) -> Self::Return

*(ptr + offset) = low16(src)

Source

fn xstore32le_offset32( &mut self, ptr: XReg, offset: i32, src: XReg, ) -> Self::Return

*(ptr + offset) = low32(src)

Source

fn xstore64le_offset32( &mut self, ptr: XReg, offset: i32, src: XReg, ) -> Self::Return

*(ptr + offset) = low64(src)

Source

fn xload8_u32_offset8( &mut self, dst: XReg, ptr: XReg, offset: u8, ) -> Self::Return

low32(dst) = zext(*(ptr + offset))

Source

fn xload8_s32_offset8( &mut self, dst: XReg, ptr: XReg, offset: u8, ) -> Self::Return

low32(dst) = sext(*(ptr + offset))

Source

fn xload16le_u32_offset8( &mut self, dst: XReg, ptr: XReg, offset: u8, ) -> Self::Return

low32(dst) = zext(*(ptr + offset))

Source

fn xload16le_s32_offset8( &mut self, dst: XReg, ptr: XReg, offset: u8, ) -> Self::Return

low32(dst) = sext(*(ptr + offset))

Source

fn xload32le_offset8( &mut self, dst: XReg, ptr: XReg, offset: u8, ) -> Self::Return

low32(dst) = *(ptr + offset)

Source

fn xload8_u64_offset8( &mut self, dst: XReg, ptr: XReg, offset: u8, ) -> Self::Return

dst = zext(*(ptr + offset))

Source

fn xload8_s64_offset8( &mut self, dst: XReg, ptr: XReg, offset: u8, ) -> Self::Return

dst = sext(*(ptr + offset))

Source

fn xload16le_u64_offset8( &mut self, dst: XReg, ptr: XReg, offset: u8, ) -> Self::Return

dst = zext(*(ptr + offset))

Source

fn xload16le_s64_offset8( &mut self, dst: XReg, ptr: XReg, offset: u8, ) -> Self::Return

dst = sext(*(ptr + offset))

Source

fn xload32le_u64_offset8( &mut self, dst: XReg, ptr: XReg, offset: u8, ) -> Self::Return

dst = zext(*(ptr + offset))

Source

fn xload32le_s64_offset8( &mut self, dst: XReg, ptr: XReg, offset: u8, ) -> Self::Return

dst = sext(*(ptr + offset))

Source

fn xload64le_offset8( &mut self, dst: XReg, ptr: XReg, offset: u8, ) -> Self::Return

dst = *(ptr + offset)

Source

fn xstore8_offset8(&mut self, ptr: XReg, offset: u8, src: XReg) -> Self::Return

*(ptr + offset) = low8(src)

Source

fn xstore16le_offset8( &mut self, ptr: XReg, offset: u8, src: XReg, ) -> Self::Return

*(ptr + offset) = low16(src)

Source

fn xstore32le_offset8( &mut self, ptr: XReg, offset: u8, src: XReg, ) -> Self::Return

*(ptr + offset) = low32(src)

Source

fn xstore64le_offset8( &mut self, ptr: XReg, offset: u8, src: XReg, ) -> Self::Return

*(ptr + offset) = low64(src)

Source

fn push_frame(&mut self) -> Self::Return

push lr; push fp; fp = sp

Source

fn pop_frame(&mut self) -> Self::Return

sp = fp; pop fp; pop lr

Source

fn push_frame_save(&mut self, amt: u32, regs: RegSet<XReg>) -> Self::Return

Macro-instruction to enter a function, allocate some stack, and then save some registers.

This is equivalent to push_frame, stack_alloc32 amt, then saving all of regs to the top of the stack just allocated.

Source

fn pop_frame_restore(&mut self, amt: u32, regs: RegSet<XReg>) -> Self::Return

Inverse of push_frame_save. Restores regs from the top of the stack, then runs stack_free32 amt, then runs pop_frame.

Source

fn stack_alloc32(&mut self, amt: u32) -> Self::Return

sp = sp.checked_sub(amt)

Source

fn stack_free32(&mut self, amt: u32) -> Self::Return

sp = sp + amt

Source

fn zext8(&mut self, dst: XReg, src: XReg) -> Self::Return

dst = zext(low8(src))

Source

fn zext16(&mut self, dst: XReg, src: XReg) -> Self::Return

dst = zext(low16(src))

Source

fn zext32(&mut self, dst: XReg, src: XReg) -> Self::Return

dst = zext(low32(src))

Source

fn sext8(&mut self, dst: XReg, src: XReg) -> Self::Return

dst = sext(low8(src))

Source

fn sext16(&mut self, dst: XReg, src: XReg) -> Self::Return

dst = sext(low16(src))

Source

fn sext32(&mut self, dst: XReg, src: XReg) -> Self::Return

dst = sext(low32(src))

Source

fn xabs32(&mut self, dst: XReg, src: XReg) -> Self::Return

low32(dst) = |low32(src)|

Source

fn xabs64(&mut self, dst: XReg, src: XReg) -> Self::Return

dst = |src|

Source

fn xdiv32_s(&mut self, operands: BinaryOperands<XReg>) -> Self::Return

low32(dst) = low32(src1) / low32(src2) (signed)

Source

fn xdiv64_s(&mut self, operands: BinaryOperands<XReg>) -> Self::Return

dst = src1 / src2 (signed)

Source

fn xdiv32_u(&mut self, operands: BinaryOperands<XReg>) -> Self::Return

low32(dst) = low32(src1) / low32(src2) (unsigned)

Source

fn xdiv64_u(&mut self, operands: BinaryOperands<XReg>) -> Self::Return

dst = src1 / src2 (unsigned)

Source

fn xrem32_s(&mut self, operands: BinaryOperands<XReg>) -> Self::Return

low32(dst) = low32(src1) % low32(src2) (signed)

Source

fn xrem64_s(&mut self, operands: BinaryOperands<XReg>) -> Self::Return

dst = src1 / src2 (signed)

Source

fn xrem32_u(&mut self, operands: BinaryOperands<XReg>) -> Self::Return

low32(dst) = low32(src1) % low32(src2) (unsigned)

Source

fn xrem64_u(&mut self, operands: BinaryOperands<XReg>) -> Self::Return

dst = src1 / src2 (unsigned)

Source

fn xband32(&mut self, operands: BinaryOperands<XReg>) -> Self::Return

low32(dst) = low32(src1) & low32(src2)

Source

fn xband32_s8(&mut self, dst: XReg, src1: XReg, src2: i8) -> Self::Return

Same as xband64 but src2 is a sign-extended 8-bit immediate.

Source

fn xband32_s32(&mut self, dst: XReg, src1: XReg, src2: i32) -> Self::Return

Same as xband32 but src2 is a sign-extended 32-bit immediate.

Source

fn xband64(&mut self, operands: BinaryOperands<XReg>) -> Self::Return

dst = src1 & src2

Source

fn xband64_s8(&mut self, dst: XReg, src1: XReg, src2: i8) -> Self::Return

Same as xband64 but src2 is a sign-extended 8-bit immediate.

Source

fn xband64_s32(&mut self, dst: XReg, src1: XReg, src2: i32) -> Self::Return

Same as xband64 but src2 is a sign-extended 32-bit immediate.

Source

fn xbor32(&mut self, operands: BinaryOperands<XReg>) -> Self::Return

low32(dst) = low32(src1) | low32(src2)

Source

fn xbor32_s8(&mut self, dst: XReg, src1: XReg, src2: i8) -> Self::Return

Same as xbor64 but src2 is a sign-extended 8-bit immediate.

Source

fn xbor32_s32(&mut self, dst: XReg, src1: XReg, src2: i32) -> Self::Return

Same as xbor32 but src2 is a sign-extended 32-bit immediate.

Source

fn xbor64(&mut self, operands: BinaryOperands<XReg>) -> Self::Return

dst = src1 | src2

Source

fn xbor64_s8(&mut self, dst: XReg, src1: XReg, src2: i8) -> Self::Return

Same as xbor64 but src2 is a sign-extended 8-bit immediate.

Source

fn xbor64_s32(&mut self, dst: XReg, src1: XReg, src2: i32) -> Self::Return

Same as xbor64 but src2 is a sign-extended 32-bit immediate.

Source

fn xbxor32(&mut self, operands: BinaryOperands<XReg>) -> Self::Return

low32(dst) = low32(src1) ^ low32(src2)

Source

fn xbxor32_s8(&mut self, dst: XReg, src1: XReg, src2: i8) -> Self::Return

Same as xbxor64 but src2 is a sign-extended 8-bit immediate.

Source

fn xbxor32_s32(&mut self, dst: XReg, src1: XReg, src2: i32) -> Self::Return

Same as xbxor32 but src2 is a sign-extended 32-bit immediate.

Source

fn xbxor64(&mut self, operands: BinaryOperands<XReg>) -> Self::Return

dst = src1 ^ src2

Source

fn xbxor64_s8(&mut self, dst: XReg, src1: XReg, src2: i8) -> Self::Return

Same as xbxor64 but src2 is a sign-extended 8-bit immediate.

Source

fn xbxor64_s32(&mut self, dst: XReg, src1: XReg, src2: i32) -> Self::Return

Same as xbxor64 but src2 is a sign-extended 32-bit immediate.

Source

fn xbnot32(&mut self, dst: XReg, src: XReg) -> Self::Return

low32(dst) = !low32(src1)

Source

fn xbnot64(&mut self, dst: XReg, src: XReg) -> Self::Return

dst = !src1

Source

fn xmin32_u(&mut self, operands: BinaryOperands<XReg>) -> Self::Return

low32(dst) = min(low32(src1), low32(src2)) (unsigned)

Source

fn xmin32_s(&mut self, operands: BinaryOperands<XReg>) -> Self::Return

low32(dst) = min(low32(src1), low32(src2)) (signed)

Source

fn xmax32_u(&mut self, operands: BinaryOperands<XReg>) -> Self::Return

low32(dst) = max(low32(src1), low32(src2)) (unsigned)

Source

fn xmax32_s(&mut self, operands: BinaryOperands<XReg>) -> Self::Return

low32(dst) = max(low32(src1), low32(src2)) (signed)

Source

fn xmin64_u(&mut self, operands: BinaryOperands<XReg>) -> Self::Return

dst = min(src1, src2) (unsigned)

Source

fn xmin64_s(&mut self, operands: BinaryOperands<XReg>) -> Self::Return

dst = min(src1, src2) (signed)

Source

fn xmax64_u(&mut self, operands: BinaryOperands<XReg>) -> Self::Return

dst = max(src1, src2) (unsigned)

Source

fn xmax64_s(&mut self, operands: BinaryOperands<XReg>) -> Self::Return

dst = max(src1, src2) (signed)

Source

fn xselect32( &mut self, dst: XReg, cond: XReg, if_nonzero: XReg, if_zero: XReg, ) -> Self::Return

low32(dst) = low32(cond) ? low32(if_nonzero) : low32(if_zero)

Source

fn xselect64( &mut self, dst: XReg, cond: XReg, if_nonzero: XReg, if_zero: XReg, ) -> Self::Return

dst = low32(cond) ? if_nonzero : if_zero

Provided Methods§

Source

fn before_visit(&mut self)

A callback invoked before starting to decode an instruction.

Does nothing by default.

Source

fn after_visit(&mut self)

A callback invoked after an instruction has been completely decoded.

Does nothing by default.

Implementors§

Source§

impl<'a> OpVisitor for Disassembler<'a>

Available on crate feature disas only.
Source§

impl<B: BytecodeStream> OpVisitor for MaterializeOpsVisitor<B>

Source§

impl<F, T, V1, V2> OpVisitor for SequencedVisitor<'_, F, V1, V2>
where F: FnMut(V1::Return, V2::Return) -> T, V1: OpVisitor, V2: OpVisitor<BytecodeStream = V1::BytecodeStream>,