pub trait OpVisitor {
type BytecodeStream: BytecodeStream;
type Return;
Show 210 methods
// Required methods
fn bytecode(&mut self) -> &mut Self::BytecodeStream;
fn ret(&mut self) -> Self::Return;
fn call(&mut self, offset: PcRelOffset) -> Self::Return;
fn call1(&mut self, arg1: XReg, offset: PcRelOffset) -> Self::Return;
fn call2(
&mut self,
arg1: XReg,
arg2: XReg,
offset: PcRelOffset,
) -> Self::Return;
fn call3(
&mut self,
arg1: XReg,
arg2: XReg,
arg3: XReg,
offset: PcRelOffset,
) -> Self::Return;
fn call4(
&mut self,
arg1: XReg,
arg2: XReg,
arg3: XReg,
arg4: XReg,
offset: PcRelOffset,
) -> Self::Return;
fn call_indirect(&mut self, reg: XReg) -> Self::Return;
fn jump(&mut self, offset: PcRelOffset) -> Self::Return;
fn xjump(&mut self, reg: XReg) -> Self::Return;
fn br_if32(&mut self, cond: XReg, offset: PcRelOffset) -> Self::Return;
fn br_if_not32(&mut self, cond: XReg, offset: PcRelOffset) -> Self::Return;
fn br_if_xeq32(
&mut self,
a: XReg,
b: XReg,
offset: PcRelOffset,
) -> Self::Return;
fn br_if_xneq32(
&mut self,
a: XReg,
b: XReg,
offset: PcRelOffset,
) -> Self::Return;
fn br_if_xslt32(
&mut self,
a: XReg,
b: XReg,
offset: PcRelOffset,
) -> Self::Return;
fn br_if_xslteq32(
&mut self,
a: XReg,
b: XReg,
offset: PcRelOffset,
) -> Self::Return;
fn br_if_xult32(
&mut self,
a: XReg,
b: XReg,
offset: PcRelOffset,
) -> Self::Return;
fn br_if_xulteq32(
&mut self,
a: XReg,
b: XReg,
offset: PcRelOffset,
) -> Self::Return;
fn br_if_xeq64(
&mut self,
a: XReg,
b: XReg,
offset: PcRelOffset,
) -> Self::Return;
fn br_if_xneq64(
&mut self,
a: XReg,
b: XReg,
offset: PcRelOffset,
) -> Self::Return;
fn br_if_xslt64(
&mut self,
a: XReg,
b: XReg,
offset: PcRelOffset,
) -> Self::Return;
fn br_if_xslteq64(
&mut self,
a: XReg,
b: XReg,
offset: PcRelOffset,
) -> Self::Return;
fn br_if_xult64(
&mut self,
a: XReg,
b: XReg,
offset: PcRelOffset,
) -> Self::Return;
fn br_if_xulteq64(
&mut self,
a: XReg,
b: XReg,
offset: PcRelOffset,
) -> Self::Return;
fn br_if_xeq32_i8(
&mut self,
a: XReg,
b: i8,
offset: PcRelOffset,
) -> Self::Return;
fn br_if_xeq32_i32(
&mut self,
a: XReg,
b: i32,
offset: PcRelOffset,
) -> Self::Return;
fn br_if_xneq32_i8(
&mut self,
a: XReg,
b: i8,
offset: PcRelOffset,
) -> Self::Return;
fn br_if_xneq32_i32(
&mut self,
a: XReg,
b: i32,
offset: PcRelOffset,
) -> Self::Return;
fn br_if_xslt32_i8(
&mut self,
a: XReg,
b: i8,
offset: PcRelOffset,
) -> Self::Return;
fn br_if_xslt32_i32(
&mut self,
a: XReg,
b: i32,
offset: PcRelOffset,
) -> Self::Return;
fn br_if_xsgt32_i8(
&mut self,
a: XReg,
b: i8,
offset: PcRelOffset,
) -> Self::Return;
fn br_if_xsgt32_i32(
&mut self,
a: XReg,
b: i32,
offset: PcRelOffset,
) -> Self::Return;
fn br_if_xslteq32_i8(
&mut self,
a: XReg,
b: i8,
offset: PcRelOffset,
) -> Self::Return;
fn br_if_xslteq32_i32(
&mut self,
a: XReg,
b: i32,
offset: PcRelOffset,
) -> Self::Return;
fn br_if_xsgteq32_i8(
&mut self,
a: XReg,
b: i8,
offset: PcRelOffset,
) -> Self::Return;
fn br_if_xsgteq32_i32(
&mut self,
a: XReg,
b: i32,
offset: PcRelOffset,
) -> Self::Return;
fn br_if_xult32_u8(
&mut self,
a: XReg,
b: u8,
offset: PcRelOffset,
) -> Self::Return;
fn br_if_xult32_u32(
&mut self,
a: XReg,
b: u32,
offset: PcRelOffset,
) -> Self::Return;
fn br_if_xulteq32_u8(
&mut self,
a: XReg,
b: u8,
offset: PcRelOffset,
) -> Self::Return;
fn br_if_xulteq32_u32(
&mut self,
a: XReg,
b: u32,
offset: PcRelOffset,
) -> Self::Return;
fn br_if_xugt32_u8(
&mut self,
a: XReg,
b: u8,
offset: PcRelOffset,
) -> Self::Return;
fn br_if_xugt32_u32(
&mut self,
a: XReg,
b: u32,
offset: PcRelOffset,
) -> Self::Return;
fn br_if_xugteq32_u8(
&mut self,
a: XReg,
b: u8,
offset: PcRelOffset,
) -> Self::Return;
fn br_if_xugteq32_u32(
&mut self,
a: XReg,
b: u32,
offset: PcRelOffset,
) -> Self::Return;
fn br_if_xeq64_i8(
&mut self,
a: XReg,
b: i8,
offset: PcRelOffset,
) -> Self::Return;
fn br_if_xeq64_i32(
&mut self,
a: XReg,
b: i32,
offset: PcRelOffset,
) -> Self::Return;
fn br_if_xneq64_i8(
&mut self,
a: XReg,
b: i8,
offset: PcRelOffset,
) -> Self::Return;
fn br_if_xneq64_i32(
&mut self,
a: XReg,
b: i32,
offset: PcRelOffset,
) -> Self::Return;
fn br_if_xslt64_i8(
&mut self,
a: XReg,
b: i8,
offset: PcRelOffset,
) -> Self::Return;
fn br_if_xslt64_i32(
&mut self,
a: XReg,
b: i32,
offset: PcRelOffset,
) -> Self::Return;
fn br_if_xsgt64_i8(
&mut self,
a: XReg,
b: i8,
offset: PcRelOffset,
) -> Self::Return;
fn br_if_xsgt64_i32(
&mut self,
a: XReg,
b: i32,
offset: PcRelOffset,
) -> Self::Return;
fn br_if_xslteq64_i8(
&mut self,
a: XReg,
b: i8,
offset: PcRelOffset,
) -> Self::Return;
fn br_if_xslteq64_i32(
&mut self,
a: XReg,
b: i32,
offset: PcRelOffset,
) -> Self::Return;
fn br_if_xsgteq64_i8(
&mut self,
a: XReg,
b: i8,
offset: PcRelOffset,
) -> Self::Return;
fn br_if_xsgteq64_i32(
&mut self,
a: XReg,
b: i32,
offset: PcRelOffset,
) -> Self::Return;
fn br_if_xult64_u8(
&mut self,
a: XReg,
b: u8,
offset: PcRelOffset,
) -> Self::Return;
fn br_if_xult64_u32(
&mut self,
a: XReg,
b: u32,
offset: PcRelOffset,
) -> Self::Return;
fn br_if_xulteq64_u8(
&mut self,
a: XReg,
b: u8,
offset: PcRelOffset,
) -> Self::Return;
fn br_if_xulteq64_u32(
&mut self,
a: XReg,
b: u32,
offset: PcRelOffset,
) -> Self::Return;
fn br_if_xugt64_u8(
&mut self,
a: XReg,
b: u8,
offset: PcRelOffset,
) -> Self::Return;
fn br_if_xugt64_u32(
&mut self,
a: XReg,
b: u32,
offset: PcRelOffset,
) -> Self::Return;
fn br_if_xugteq64_u8(
&mut self,
a: XReg,
b: u8,
offset: PcRelOffset,
) -> Self::Return;
fn br_if_xugteq64_u32(
&mut self,
a: XReg,
b: u32,
offset: PcRelOffset,
) -> Self::Return;
fn br_table32(&mut self, idx: XReg, amt: u32) -> Self::Return;
fn xmov(&mut self, dst: XReg, src: XReg) -> Self::Return;
fn xconst8(&mut self, dst: XReg, imm: i8) -> Self::Return;
fn xconst16(&mut self, dst: XReg, imm: i16) -> Self::Return;
fn xconst32(&mut self, dst: XReg, imm: i32) -> Self::Return;
fn xconst64(&mut self, dst: XReg, imm: i64) -> Self::Return;
fn xadd32(&mut self, operands: BinaryOperands<XReg>) -> Self::Return;
fn xadd32_u8(&mut self, dst: XReg, src1: XReg, src2: u8) -> Self::Return;
fn xadd32_u32(&mut self, dst: XReg, src1: XReg, src2: u32) -> Self::Return;
fn xadd64(&mut self, operands: BinaryOperands<XReg>) -> Self::Return;
fn xadd64_u8(&mut self, dst: XReg, src1: XReg, src2: u8) -> Self::Return;
fn xadd64_u32(&mut self, dst: XReg, src1: XReg, src2: u32) -> Self::Return;
fn xsub32(&mut self, operands: BinaryOperands<XReg>) -> Self::Return;
fn xsub32_u8(&mut self, dst: XReg, src1: XReg, src2: u8) -> Self::Return;
fn xsub32_u32(&mut self, dst: XReg, src1: XReg, src2: u32) -> Self::Return;
fn xsub64(&mut self, operands: BinaryOperands<XReg>) -> Self::Return;
fn xsub64_u8(&mut self, dst: XReg, src1: XReg, src2: u8) -> Self::Return;
fn xsub64_u32(&mut self, dst: XReg, src1: XReg, src2: u32) -> Self::Return;
fn xmul32(&mut self, operands: BinaryOperands<XReg>) -> Self::Return;
fn xmul32_s8(&mut self, dst: XReg, src1: XReg, src2: i8) -> Self::Return;
fn xmul32_s32(&mut self, dst: XReg, src1: XReg, src2: i32) -> Self::Return;
fn xmul64(&mut self, operands: BinaryOperands<XReg>) -> Self::Return;
fn xmul64_s8(&mut self, dst: XReg, src1: XReg, src2: i8) -> Self::Return;
fn xmul64_s32(&mut self, dst: XReg, src1: XReg, src2: i32) -> Self::Return;
fn xctz32(&mut self, dst: XReg, src: XReg) -> Self::Return;
fn xctz64(&mut self, dst: XReg, src: XReg) -> Self::Return;
fn xclz32(&mut self, dst: XReg, src: XReg) -> Self::Return;
fn xclz64(&mut self, dst: XReg, src: XReg) -> Self::Return;
fn xpopcnt32(&mut self, dst: XReg, src: XReg) -> Self::Return;
fn xpopcnt64(&mut self, dst: XReg, src: XReg) -> Self::Return;
fn xrotl32(&mut self, operands: BinaryOperands<XReg>) -> Self::Return;
fn xrotl64(&mut self, operands: BinaryOperands<XReg>) -> Self::Return;
fn xrotr32(&mut self, operands: BinaryOperands<XReg>) -> Self::Return;
fn xrotr64(&mut self, operands: BinaryOperands<XReg>) -> Self::Return;
fn xshl32(&mut self, operands: BinaryOperands<XReg>) -> Self::Return;
fn xshr32_s(&mut self, operands: BinaryOperands<XReg>) -> Self::Return;
fn xshr32_u(&mut self, operands: BinaryOperands<XReg>) -> Self::Return;
fn xshl64(&mut self, operands: BinaryOperands<XReg>) -> Self::Return;
fn xshr64_s(&mut self, operands: BinaryOperands<XReg>) -> Self::Return;
fn xshr64_u(&mut self, operands: BinaryOperands<XReg>) -> Self::Return;
fn xshl32_u6(
&mut self,
operands: BinaryOperands<XReg, XReg, U6>,
) -> Self::Return;
fn xshr32_s_u6(
&mut self,
operands: BinaryOperands<XReg, XReg, U6>,
) -> Self::Return;
fn xshr32_u_u6(
&mut self,
operands: BinaryOperands<XReg, XReg, U6>,
) -> Self::Return;
fn xshl64_u6(
&mut self,
operands: BinaryOperands<XReg, XReg, U6>,
) -> Self::Return;
fn xshr64_s_u6(
&mut self,
operands: BinaryOperands<XReg, XReg, U6>,
) -> Self::Return;
fn xshr64_u_u6(
&mut self,
operands: BinaryOperands<XReg, XReg, U6>,
) -> Self::Return;
fn xneg32(&mut self, dst: XReg, src: XReg) -> Self::Return;
fn xneg64(&mut self, dst: XReg, src: XReg) -> Self::Return;
fn xeq64(&mut self, operands: BinaryOperands<XReg>) -> Self::Return;
fn xneq64(&mut self, operands: BinaryOperands<XReg>) -> Self::Return;
fn xslt64(&mut self, operands: BinaryOperands<XReg>) -> Self::Return;
fn xslteq64(&mut self, operands: BinaryOperands<XReg>) -> Self::Return;
fn xult64(&mut self, operands: BinaryOperands<XReg>) -> Self::Return;
fn xulteq64(&mut self, operands: BinaryOperands<XReg>) -> Self::Return;
fn xeq32(&mut self, operands: BinaryOperands<XReg>) -> Self::Return;
fn xneq32(&mut self, operands: BinaryOperands<XReg>) -> Self::Return;
fn xslt32(&mut self, operands: BinaryOperands<XReg>) -> Self::Return;
fn xslteq32(&mut self, operands: BinaryOperands<XReg>) -> Self::Return;
fn xult32(&mut self, operands: BinaryOperands<XReg>) -> Self::Return;
fn xulteq32(&mut self, operands: BinaryOperands<XReg>) -> Self::Return;
fn xload8_u32_offset32(
&mut self,
dst: XReg,
ptr: XReg,
offset: i32,
) -> Self::Return;
fn xload8_s32_offset32(
&mut self,
dst: XReg,
ptr: XReg,
offset: i32,
) -> Self::Return;
fn xload16le_u32_offset32(
&mut self,
dst: XReg,
ptr: XReg,
offset: i32,
) -> Self::Return;
fn xload16le_s32_offset32(
&mut self,
dst: XReg,
ptr: XReg,
offset: i32,
) -> Self::Return;
fn xload32le_offset32(
&mut self,
dst: XReg,
ptr: XReg,
offset: i32,
) -> Self::Return;
fn xload8_u64_offset32(
&mut self,
dst: XReg,
ptr: XReg,
offset: i32,
) -> Self::Return;
fn xload8_s64_offset32(
&mut self,
dst: XReg,
ptr: XReg,
offset: i32,
) -> Self::Return;
fn xload16le_u64_offset32(
&mut self,
dst: XReg,
ptr: XReg,
offset: i32,
) -> Self::Return;
fn xload16le_s64_offset32(
&mut self,
dst: XReg,
ptr: XReg,
offset: i32,
) -> Self::Return;
fn xload32le_u64_offset32(
&mut self,
dst: XReg,
ptr: XReg,
offset: i32,
) -> Self::Return;
fn xload32le_s64_offset32(
&mut self,
dst: XReg,
ptr: XReg,
offset: i32,
) -> Self::Return;
fn xload64le_offset32(
&mut self,
dst: XReg,
ptr: XReg,
offset: i32,
) -> Self::Return;
fn xstore8_offset32(
&mut self,
ptr: XReg,
offset: i32,
src: XReg,
) -> Self::Return;
fn xstore16le_offset32(
&mut self,
ptr: XReg,
offset: i32,
src: XReg,
) -> Self::Return;
fn xstore32le_offset32(
&mut self,
ptr: XReg,
offset: i32,
src: XReg,
) -> Self::Return;
fn xstore64le_offset32(
&mut self,
ptr: XReg,
offset: i32,
src: XReg,
) -> Self::Return;
fn xload8_u32_offset8(
&mut self,
dst: XReg,
ptr: XReg,
offset: u8,
) -> Self::Return;
fn xload8_s32_offset8(
&mut self,
dst: XReg,
ptr: XReg,
offset: u8,
) -> Self::Return;
fn xload16le_u32_offset8(
&mut self,
dst: XReg,
ptr: XReg,
offset: u8,
) -> Self::Return;
fn xload16le_s32_offset8(
&mut self,
dst: XReg,
ptr: XReg,
offset: u8,
) -> Self::Return;
fn xload32le_offset8(
&mut self,
dst: XReg,
ptr: XReg,
offset: u8,
) -> Self::Return;
fn xload8_u64_offset8(
&mut self,
dst: XReg,
ptr: XReg,
offset: u8,
) -> Self::Return;
fn xload8_s64_offset8(
&mut self,
dst: XReg,
ptr: XReg,
offset: u8,
) -> Self::Return;
fn xload16le_u64_offset8(
&mut self,
dst: XReg,
ptr: XReg,
offset: u8,
) -> Self::Return;
fn xload16le_s64_offset8(
&mut self,
dst: XReg,
ptr: XReg,
offset: u8,
) -> Self::Return;
fn xload32le_u64_offset8(
&mut self,
dst: XReg,
ptr: XReg,
offset: u8,
) -> Self::Return;
fn xload32le_s64_offset8(
&mut self,
dst: XReg,
ptr: XReg,
offset: u8,
) -> Self::Return;
fn xload64le_offset8(
&mut self,
dst: XReg,
ptr: XReg,
offset: u8,
) -> Self::Return;
fn xstore8_offset8(
&mut self,
ptr: XReg,
offset: u8,
src: XReg,
) -> Self::Return;
fn xstore16le_offset8(
&mut self,
ptr: XReg,
offset: u8,
src: XReg,
) -> Self::Return;
fn xstore32le_offset8(
&mut self,
ptr: XReg,
offset: u8,
src: XReg,
) -> Self::Return;
fn xstore64le_offset8(
&mut self,
ptr: XReg,
offset: u8,
src: XReg,
) -> Self::Return;
fn push_frame(&mut self) -> Self::Return;
fn pop_frame(&mut self) -> Self::Return;
fn push_frame_save(&mut self, amt: u32, regs: RegSet<XReg>) -> Self::Return;
fn pop_frame_restore(
&mut self,
amt: u32,
regs: RegSet<XReg>,
) -> Self::Return;
fn stack_alloc32(&mut self, amt: u32) -> Self::Return;
fn stack_free32(&mut self, amt: u32) -> Self::Return;
fn zext8(&mut self, dst: XReg, src: XReg) -> Self::Return;
fn zext16(&mut self, dst: XReg, src: XReg) -> Self::Return;
fn zext32(&mut self, dst: XReg, src: XReg) -> Self::Return;
fn sext8(&mut self, dst: XReg, src: XReg) -> Self::Return;
fn sext16(&mut self, dst: XReg, src: XReg) -> Self::Return;
fn sext32(&mut self, dst: XReg, src: XReg) -> Self::Return;
fn xabs32(&mut self, dst: XReg, src: XReg) -> Self::Return;
fn xabs64(&mut self, dst: XReg, src: XReg) -> Self::Return;
fn xdiv32_s(&mut self, operands: BinaryOperands<XReg>) -> Self::Return;
fn xdiv64_s(&mut self, operands: BinaryOperands<XReg>) -> Self::Return;
fn xdiv32_u(&mut self, operands: BinaryOperands<XReg>) -> Self::Return;
fn xdiv64_u(&mut self, operands: BinaryOperands<XReg>) -> Self::Return;
fn xrem32_s(&mut self, operands: BinaryOperands<XReg>) -> Self::Return;
fn xrem64_s(&mut self, operands: BinaryOperands<XReg>) -> Self::Return;
fn xrem32_u(&mut self, operands: BinaryOperands<XReg>) -> Self::Return;
fn xrem64_u(&mut self, operands: BinaryOperands<XReg>) -> Self::Return;
fn xband32(&mut self, operands: BinaryOperands<XReg>) -> Self::Return;
fn xband32_s8(&mut self, dst: XReg, src1: XReg, src2: i8) -> Self::Return;
fn xband32_s32(&mut self, dst: XReg, src1: XReg, src2: i32) -> Self::Return;
fn xband64(&mut self, operands: BinaryOperands<XReg>) -> Self::Return;
fn xband64_s8(&mut self, dst: XReg, src1: XReg, src2: i8) -> Self::Return;
fn xband64_s32(&mut self, dst: XReg, src1: XReg, src2: i32) -> Self::Return;
fn xbor32(&mut self, operands: BinaryOperands<XReg>) -> Self::Return;
fn xbor32_s8(&mut self, dst: XReg, src1: XReg, src2: i8) -> Self::Return;
fn xbor32_s32(&mut self, dst: XReg, src1: XReg, src2: i32) -> Self::Return;
fn xbor64(&mut self, operands: BinaryOperands<XReg>) -> Self::Return;
fn xbor64_s8(&mut self, dst: XReg, src1: XReg, src2: i8) -> Self::Return;
fn xbor64_s32(&mut self, dst: XReg, src1: XReg, src2: i32) -> Self::Return;
fn xbxor32(&mut self, operands: BinaryOperands<XReg>) -> Self::Return;
fn xbxor32_s8(&mut self, dst: XReg, src1: XReg, src2: i8) -> Self::Return;
fn xbxor32_s32(&mut self, dst: XReg, src1: XReg, src2: i32) -> Self::Return;
fn xbxor64(&mut self, operands: BinaryOperands<XReg>) -> Self::Return;
fn xbxor64_s8(&mut self, dst: XReg, src1: XReg, src2: i8) -> Self::Return;
fn xbxor64_s32(&mut self, dst: XReg, src1: XReg, src2: i32) -> Self::Return;
fn xbnot32(&mut self, dst: XReg, src: XReg) -> Self::Return;
fn xbnot64(&mut self, dst: XReg, src: XReg) -> Self::Return;
fn xmin32_u(&mut self, operands: BinaryOperands<XReg>) -> Self::Return;
fn xmin32_s(&mut self, operands: BinaryOperands<XReg>) -> Self::Return;
fn xmax32_u(&mut self, operands: BinaryOperands<XReg>) -> Self::Return;
fn xmax32_s(&mut self, operands: BinaryOperands<XReg>) -> Self::Return;
fn xmin64_u(&mut self, operands: BinaryOperands<XReg>) -> Self::Return;
fn xmin64_s(&mut self, operands: BinaryOperands<XReg>) -> Self::Return;
fn xmax64_u(&mut self, operands: BinaryOperands<XReg>) -> Self::Return;
fn xmax64_s(&mut self, operands: BinaryOperands<XReg>) -> Self::Return;
fn xselect32(
&mut self,
dst: XReg,
cond: XReg,
if_nonzero: XReg,
if_zero: XReg,
) -> Self::Return;
fn xselect64(
&mut self,
dst: XReg,
cond: XReg,
if_nonzero: XReg,
if_zero: XReg,
) -> Self::Return;
// Provided methods
fn before_visit(&mut self) { ... }
fn after_visit(&mut self) { ... }
}
decode
only.Expand description
Callbacks upon decoding instructions from bytecode.
Implement this trait for your type, give an instance of your type to
a Decoder
method, and the Decoder
will invoke the associated
method for each instruction that it decodes. For example, if the
Decoder
decodes an xadd32
instruction, then it will invoke the
xadd32
visitor method, passing along any decoded immediates,
operands, etc… as arguments.
Required Associated Types§
Sourcetype BytecodeStream: BytecodeStream
type BytecodeStream: BytecodeStream
The type of this visitor’s bytecode stream.
Required Methods§
Sourcefn bytecode(&mut self) -> &mut Self::BytecodeStream
fn bytecode(&mut self) -> &mut Self::BytecodeStream
Get this visitor’s underlying bytecode stream.
Sourcefn call(&mut self, offset: PcRelOffset) -> Self::Return
fn call(&mut self, offset: PcRelOffset) -> Self::Return
Transfer control to the PC at the given offset and set the lr
register to the PC just after this instruction.
This instruction generally assumes that the Pulley ABI is being respected where arguments are in argument registers (starting at x0 for integer arguments) and results are in result registers. This instruction itself assume that all arguments are already in their registers. Subsequent instructions below enable moving arguments into the correct registers as part of the same call instruction.
Sourcefn call1(&mut self, arg1: XReg, offset: PcRelOffset) -> Self::Return
fn call1(&mut self, arg1: XReg, offset: PcRelOffset) -> Self::Return
Like call
, but also x0 = arg1
Sourcefn call2(&mut self, arg1: XReg, arg2: XReg, offset: PcRelOffset) -> Self::Return
fn call2(&mut self, arg1: XReg, arg2: XReg, offset: PcRelOffset) -> Self::Return
Like call
, but also x0, x1 = arg1, arg2
Sourcefn call3(
&mut self,
arg1: XReg,
arg2: XReg,
arg3: XReg,
offset: PcRelOffset,
) -> Self::Return
fn call3( &mut self, arg1: XReg, arg2: XReg, arg3: XReg, offset: PcRelOffset, ) -> Self::Return
Like call
, but also x0, x1, x2 = arg1, arg2, arg3
Sourcefn call4(
&mut self,
arg1: XReg,
arg2: XReg,
arg3: XReg,
arg4: XReg,
offset: PcRelOffset,
) -> Self::Return
fn call4( &mut self, arg1: XReg, arg2: XReg, arg3: XReg, arg4: XReg, offset: PcRelOffset, ) -> Self::Return
Like call
, but also x0, x1, x2, x3 = arg1, arg2, arg3, arg4
Sourcefn call_indirect(&mut self, reg: XReg) -> Self::Return
fn call_indirect(&mut self, reg: XReg) -> Self::Return
Transfer control to the PC in reg
and set lr
to the PC just
after this instruction.
Sourcefn jump(&mut self, offset: PcRelOffset) -> Self::Return
fn jump(&mut self, offset: PcRelOffset) -> Self::Return
Unconditionally transfer control to the PC at the given offset.
Sourcefn xjump(&mut self, reg: XReg) -> Self::Return
fn xjump(&mut self, reg: XReg) -> Self::Return
Unconditionally transfer control to the PC at specified register.
Sourcefn br_if32(&mut self, cond: XReg, offset: PcRelOffset) -> Self::Return
fn br_if32(&mut self, cond: XReg, offset: PcRelOffset) -> Self::Return
Conditionally transfer control to the given PC offset if
low32(cond)
contains a non-zero value.
Sourcefn br_if_not32(&mut self, cond: XReg, offset: PcRelOffset) -> Self::Return
fn br_if_not32(&mut self, cond: XReg, offset: PcRelOffset) -> Self::Return
Conditionally transfer control to the given PC offset if
low32(cond)
contains a zero value.
Sourcefn br_if_xeq32(&mut self, a: XReg, b: XReg, offset: PcRelOffset) -> Self::Return
fn br_if_xeq32(&mut self, a: XReg, b: XReg, offset: PcRelOffset) -> Self::Return
Branch if a == b
.
Sourcefn br_if_xneq32(
&mut self,
a: XReg,
b: XReg,
offset: PcRelOffset,
) -> Self::Return
fn br_if_xneq32( &mut self, a: XReg, b: XReg, offset: PcRelOffset, ) -> Self::Return
Branch if a !=
b.
Sourcefn br_if_xslt32(
&mut self,
a: XReg,
b: XReg,
offset: PcRelOffset,
) -> Self::Return
fn br_if_xslt32( &mut self, a: XReg, b: XReg, offset: PcRelOffset, ) -> Self::Return
Branch if signed a < b
.
Sourcefn br_if_xslteq32(
&mut self,
a: XReg,
b: XReg,
offset: PcRelOffset,
) -> Self::Return
fn br_if_xslteq32( &mut self, a: XReg, b: XReg, offset: PcRelOffset, ) -> Self::Return
Branch if signed a <= b
.
Sourcefn br_if_xult32(
&mut self,
a: XReg,
b: XReg,
offset: PcRelOffset,
) -> Self::Return
fn br_if_xult32( &mut self, a: XReg, b: XReg, offset: PcRelOffset, ) -> Self::Return
Branch if unsigned a < b
.
Sourcefn br_if_xulteq32(
&mut self,
a: XReg,
b: XReg,
offset: PcRelOffset,
) -> Self::Return
fn br_if_xulteq32( &mut self, a: XReg, b: XReg, offset: PcRelOffset, ) -> Self::Return
Branch if unsigned a <= b
.
Sourcefn br_if_xeq64(&mut self, a: XReg, b: XReg, offset: PcRelOffset) -> Self::Return
fn br_if_xeq64(&mut self, a: XReg, b: XReg, offset: PcRelOffset) -> Self::Return
Branch if a == b
.
Sourcefn br_if_xneq64(
&mut self,
a: XReg,
b: XReg,
offset: PcRelOffset,
) -> Self::Return
fn br_if_xneq64( &mut self, a: XReg, b: XReg, offset: PcRelOffset, ) -> Self::Return
Branch if a !=
b.
Sourcefn br_if_xslt64(
&mut self,
a: XReg,
b: XReg,
offset: PcRelOffset,
) -> Self::Return
fn br_if_xslt64( &mut self, a: XReg, b: XReg, offset: PcRelOffset, ) -> Self::Return
Branch if signed a < b
.
Sourcefn br_if_xslteq64(
&mut self,
a: XReg,
b: XReg,
offset: PcRelOffset,
) -> Self::Return
fn br_if_xslteq64( &mut self, a: XReg, b: XReg, offset: PcRelOffset, ) -> Self::Return
Branch if signed a <= b
.
Sourcefn br_if_xult64(
&mut self,
a: XReg,
b: XReg,
offset: PcRelOffset,
) -> Self::Return
fn br_if_xult64( &mut self, a: XReg, b: XReg, offset: PcRelOffset, ) -> Self::Return
Branch if unsigned a < b
.
Sourcefn br_if_xulteq64(
&mut self,
a: XReg,
b: XReg,
offset: PcRelOffset,
) -> Self::Return
fn br_if_xulteq64( &mut self, a: XReg, b: XReg, offset: PcRelOffset, ) -> Self::Return
Branch if unsigned a <= b
.
Sourcefn br_if_xeq32_i8(
&mut self,
a: XReg,
b: i8,
offset: PcRelOffset,
) -> Self::Return
fn br_if_xeq32_i8( &mut self, a: XReg, b: i8, offset: PcRelOffset, ) -> Self::Return
Branch if a == b
.
Sourcefn br_if_xeq32_i32(
&mut self,
a: XReg,
b: i32,
offset: PcRelOffset,
) -> Self::Return
fn br_if_xeq32_i32( &mut self, a: XReg, b: i32, offset: PcRelOffset, ) -> Self::Return
Branch if a == b
.
Sourcefn br_if_xneq32_i8(
&mut self,
a: XReg,
b: i8,
offset: PcRelOffset,
) -> Self::Return
fn br_if_xneq32_i8( &mut self, a: XReg, b: i8, offset: PcRelOffset, ) -> Self::Return
Branch if a !=
b.
Sourcefn br_if_xneq32_i32(
&mut self,
a: XReg,
b: i32,
offset: PcRelOffset,
) -> Self::Return
fn br_if_xneq32_i32( &mut self, a: XReg, b: i32, offset: PcRelOffset, ) -> Self::Return
Branch if a !=
b.
Sourcefn br_if_xslt32_i8(
&mut self,
a: XReg,
b: i8,
offset: PcRelOffset,
) -> Self::Return
fn br_if_xslt32_i8( &mut self, a: XReg, b: i8, offset: PcRelOffset, ) -> Self::Return
Branch if signed a < b
.
Sourcefn br_if_xslt32_i32(
&mut self,
a: XReg,
b: i32,
offset: PcRelOffset,
) -> Self::Return
fn br_if_xslt32_i32( &mut self, a: XReg, b: i32, offset: PcRelOffset, ) -> Self::Return
Branch if signed a < b
.
Sourcefn br_if_xsgt32_i8(
&mut self,
a: XReg,
b: i8,
offset: PcRelOffset,
) -> Self::Return
fn br_if_xsgt32_i8( &mut self, a: XReg, b: i8, offset: PcRelOffset, ) -> Self::Return
Branch if signed a > b
.
Sourcefn br_if_xsgt32_i32(
&mut self,
a: XReg,
b: i32,
offset: PcRelOffset,
) -> Self::Return
fn br_if_xsgt32_i32( &mut self, a: XReg, b: i32, offset: PcRelOffset, ) -> Self::Return
Branch if signed a > b
.
Sourcefn br_if_xslteq32_i8(
&mut self,
a: XReg,
b: i8,
offset: PcRelOffset,
) -> Self::Return
fn br_if_xslteq32_i8( &mut self, a: XReg, b: i8, offset: PcRelOffset, ) -> Self::Return
Branch if signed a <= b
.
Sourcefn br_if_xslteq32_i32(
&mut self,
a: XReg,
b: i32,
offset: PcRelOffset,
) -> Self::Return
fn br_if_xslteq32_i32( &mut self, a: XReg, b: i32, offset: PcRelOffset, ) -> Self::Return
Branch if signed a <= b
.
Sourcefn br_if_xsgteq32_i8(
&mut self,
a: XReg,
b: i8,
offset: PcRelOffset,
) -> Self::Return
fn br_if_xsgteq32_i8( &mut self, a: XReg, b: i8, offset: PcRelOffset, ) -> Self::Return
Branch if signed a >= b
.
Sourcefn br_if_xsgteq32_i32(
&mut self,
a: XReg,
b: i32,
offset: PcRelOffset,
) -> Self::Return
fn br_if_xsgteq32_i32( &mut self, a: XReg, b: i32, offset: PcRelOffset, ) -> Self::Return
Branch if signed a >= b
.
Sourcefn br_if_xult32_u8(
&mut self,
a: XReg,
b: u8,
offset: PcRelOffset,
) -> Self::Return
fn br_if_xult32_u8( &mut self, a: XReg, b: u8, offset: PcRelOffset, ) -> Self::Return
Branch if unsigned a < b
.
Sourcefn br_if_xult32_u32(
&mut self,
a: XReg,
b: u32,
offset: PcRelOffset,
) -> Self::Return
fn br_if_xult32_u32( &mut self, a: XReg, b: u32, offset: PcRelOffset, ) -> Self::Return
Branch if unsigned a < b
.
Sourcefn br_if_xulteq32_u8(
&mut self,
a: XReg,
b: u8,
offset: PcRelOffset,
) -> Self::Return
fn br_if_xulteq32_u8( &mut self, a: XReg, b: u8, offset: PcRelOffset, ) -> Self::Return
Branch if unsigned a <= b
.
Sourcefn br_if_xulteq32_u32(
&mut self,
a: XReg,
b: u32,
offset: PcRelOffset,
) -> Self::Return
fn br_if_xulteq32_u32( &mut self, a: XReg, b: u32, offset: PcRelOffset, ) -> Self::Return
Branch if unsigned a <= b
.
Sourcefn br_if_xugt32_u8(
&mut self,
a: XReg,
b: u8,
offset: PcRelOffset,
) -> Self::Return
fn br_if_xugt32_u8( &mut self, a: XReg, b: u8, offset: PcRelOffset, ) -> Self::Return
Branch if unsigned a > b
.
Sourcefn br_if_xugt32_u32(
&mut self,
a: XReg,
b: u32,
offset: PcRelOffset,
) -> Self::Return
fn br_if_xugt32_u32( &mut self, a: XReg, b: u32, offset: PcRelOffset, ) -> Self::Return
Branch if unsigned a > b
.
Sourcefn br_if_xugteq32_u8(
&mut self,
a: XReg,
b: u8,
offset: PcRelOffset,
) -> Self::Return
fn br_if_xugteq32_u8( &mut self, a: XReg, b: u8, offset: PcRelOffset, ) -> Self::Return
Branch if unsigned a >= b
.
Sourcefn br_if_xugteq32_u32(
&mut self,
a: XReg,
b: u32,
offset: PcRelOffset,
) -> Self::Return
fn br_if_xugteq32_u32( &mut self, a: XReg, b: u32, offset: PcRelOffset, ) -> Self::Return
Branch if unsigned a >= b
.
Sourcefn br_if_xeq64_i8(
&mut self,
a: XReg,
b: i8,
offset: PcRelOffset,
) -> Self::Return
fn br_if_xeq64_i8( &mut self, a: XReg, b: i8, offset: PcRelOffset, ) -> Self::Return
Branch if a == b
.
Sourcefn br_if_xeq64_i32(
&mut self,
a: XReg,
b: i32,
offset: PcRelOffset,
) -> Self::Return
fn br_if_xeq64_i32( &mut self, a: XReg, b: i32, offset: PcRelOffset, ) -> Self::Return
Branch if a == b
.
Sourcefn br_if_xneq64_i8(
&mut self,
a: XReg,
b: i8,
offset: PcRelOffset,
) -> Self::Return
fn br_if_xneq64_i8( &mut self, a: XReg, b: i8, offset: PcRelOffset, ) -> Self::Return
Branch if a !=
b.
Sourcefn br_if_xneq64_i32(
&mut self,
a: XReg,
b: i32,
offset: PcRelOffset,
) -> Self::Return
fn br_if_xneq64_i32( &mut self, a: XReg, b: i32, offset: PcRelOffset, ) -> Self::Return
Branch if a !=
b.
Sourcefn br_if_xslt64_i8(
&mut self,
a: XReg,
b: i8,
offset: PcRelOffset,
) -> Self::Return
fn br_if_xslt64_i8( &mut self, a: XReg, b: i8, offset: PcRelOffset, ) -> Self::Return
Branch if signed a < b
.
Sourcefn br_if_xslt64_i32(
&mut self,
a: XReg,
b: i32,
offset: PcRelOffset,
) -> Self::Return
fn br_if_xslt64_i32( &mut self, a: XReg, b: i32, offset: PcRelOffset, ) -> Self::Return
Branch if signed a < b
.
Sourcefn br_if_xsgt64_i8(
&mut self,
a: XReg,
b: i8,
offset: PcRelOffset,
) -> Self::Return
fn br_if_xsgt64_i8( &mut self, a: XReg, b: i8, offset: PcRelOffset, ) -> Self::Return
Branch if signed a > b
.
Sourcefn br_if_xsgt64_i32(
&mut self,
a: XReg,
b: i32,
offset: PcRelOffset,
) -> Self::Return
fn br_if_xsgt64_i32( &mut self, a: XReg, b: i32, offset: PcRelOffset, ) -> Self::Return
Branch if signed a > b
.
Sourcefn br_if_xslteq64_i8(
&mut self,
a: XReg,
b: i8,
offset: PcRelOffset,
) -> Self::Return
fn br_if_xslteq64_i8( &mut self, a: XReg, b: i8, offset: PcRelOffset, ) -> Self::Return
Branch if signed a <= b
.
Sourcefn br_if_xslteq64_i32(
&mut self,
a: XReg,
b: i32,
offset: PcRelOffset,
) -> Self::Return
fn br_if_xslteq64_i32( &mut self, a: XReg, b: i32, offset: PcRelOffset, ) -> Self::Return
Branch if signed a <= b
.
Sourcefn br_if_xsgteq64_i8(
&mut self,
a: XReg,
b: i8,
offset: PcRelOffset,
) -> Self::Return
fn br_if_xsgteq64_i8( &mut self, a: XReg, b: i8, offset: PcRelOffset, ) -> Self::Return
Branch if signed a >= b
.
Sourcefn br_if_xsgteq64_i32(
&mut self,
a: XReg,
b: i32,
offset: PcRelOffset,
) -> Self::Return
fn br_if_xsgteq64_i32( &mut self, a: XReg, b: i32, offset: PcRelOffset, ) -> Self::Return
Branch if signed a >= b
.
Sourcefn br_if_xult64_u8(
&mut self,
a: XReg,
b: u8,
offset: PcRelOffset,
) -> Self::Return
fn br_if_xult64_u8( &mut self, a: XReg, b: u8, offset: PcRelOffset, ) -> Self::Return
Branch if unsigned a < b
.
Sourcefn br_if_xult64_u32(
&mut self,
a: XReg,
b: u32,
offset: PcRelOffset,
) -> Self::Return
fn br_if_xult64_u32( &mut self, a: XReg, b: u32, offset: PcRelOffset, ) -> Self::Return
Branch if unsigned a < b
.
Sourcefn br_if_xulteq64_u8(
&mut self,
a: XReg,
b: u8,
offset: PcRelOffset,
) -> Self::Return
fn br_if_xulteq64_u8( &mut self, a: XReg, b: u8, offset: PcRelOffset, ) -> Self::Return
Branch if unsigned a <= b
.
Sourcefn br_if_xulteq64_u32(
&mut self,
a: XReg,
b: u32,
offset: PcRelOffset,
) -> Self::Return
fn br_if_xulteq64_u32( &mut self, a: XReg, b: u32, offset: PcRelOffset, ) -> Self::Return
Branch if unsigned a <= b
.
Sourcefn br_if_xugt64_u8(
&mut self,
a: XReg,
b: u8,
offset: PcRelOffset,
) -> Self::Return
fn br_if_xugt64_u8( &mut self, a: XReg, b: u8, offset: PcRelOffset, ) -> Self::Return
Branch if unsigned a > b
.
Sourcefn br_if_xugt64_u32(
&mut self,
a: XReg,
b: u32,
offset: PcRelOffset,
) -> Self::Return
fn br_if_xugt64_u32( &mut self, a: XReg, b: u32, offset: PcRelOffset, ) -> Self::Return
Branch if unsigned a > b
.
Sourcefn br_if_xugteq64_u8(
&mut self,
a: XReg,
b: u8,
offset: PcRelOffset,
) -> Self::Return
fn br_if_xugteq64_u8( &mut self, a: XReg, b: u8, offset: PcRelOffset, ) -> Self::Return
Branch if unsigned a >= b
.
Sourcefn br_if_xugteq64_u32(
&mut self,
a: XReg,
b: u32,
offset: PcRelOffset,
) -> Self::Return
fn br_if_xugteq64_u32( &mut self, a: XReg, b: u32, offset: PcRelOffset, ) -> Self::Return
Branch if unsigned a >= b
.
Sourcefn br_table32(&mut self, idx: XReg, amt: u32) -> Self::Return
fn br_table32(&mut self, idx: XReg, amt: u32) -> Self::Return
Branch to the label indicated by low32(idx)
.
After this instruction are amt
instances of PcRelOffset
and the idx
selects which one will be branched to. The value
of idx
is clamped to amt - 1
(e.g. the last offset is the
“default” one.
Sourcefn xadd32(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
fn xadd32(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
32-bit wrapping addition: low32(dst) = low32(src1) + low32(src2)
.
The upper 32-bits of dst
are unmodified.
Sourcefn xadd32_u8(&mut self, dst: XReg, src1: XReg, src2: u8) -> Self::Return
fn xadd32_u8(&mut self, dst: XReg, src1: XReg, src2: u8) -> Self::Return
Same as xadd32
but src2
is a zero-extended 8-bit immediate.
Sourcefn xadd32_u32(&mut self, dst: XReg, src1: XReg, src2: u32) -> Self::Return
fn xadd32_u32(&mut self, dst: XReg, src1: XReg, src2: u32) -> Self::Return
Same as xadd32
but src2
is a 32-bit immediate.
Sourcefn xadd64(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
fn xadd64(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
64-bit wrapping addition: dst = src1 + src2
.
Sourcefn xadd64_u8(&mut self, dst: XReg, src1: XReg, src2: u8) -> Self::Return
fn xadd64_u8(&mut self, dst: XReg, src1: XReg, src2: u8) -> Self::Return
Same as xadd64
but src2
is a zero-extended 8-bit immediate.
Sourcefn xadd64_u32(&mut self, dst: XReg, src1: XReg, src2: u32) -> Self::Return
fn xadd64_u32(&mut self, dst: XReg, src1: XReg, src2: u32) -> Self::Return
Same as xadd64
but src2
is a zero-extended 32-bit immediate.
Sourcefn xsub32(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
fn xsub32(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
32-bit wrapping subtraction: low32(dst) = low32(src1) - low32(src2)
.
The upper 32-bits of dst
are unmodified.
Sourcefn xsub32_u8(&mut self, dst: XReg, src1: XReg, src2: u8) -> Self::Return
fn xsub32_u8(&mut self, dst: XReg, src1: XReg, src2: u8) -> Self::Return
Same as xsub32
but src2
is a zero-extended 8-bit immediate.
Sourcefn xsub32_u32(&mut self, dst: XReg, src1: XReg, src2: u32) -> Self::Return
fn xsub32_u32(&mut self, dst: XReg, src1: XReg, src2: u32) -> Self::Return
Same as xsub32
but src2
is a 32-bit immediate.
Sourcefn xsub64(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
fn xsub64(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
64-bit wrapping subtraction: dst = src1 - src2
.
Sourcefn xsub64_u8(&mut self, dst: XReg, src1: XReg, src2: u8) -> Self::Return
fn xsub64_u8(&mut self, dst: XReg, src1: XReg, src2: u8) -> Self::Return
Same as xsub64
but src2
is a zero-extended 8-bit immediate.
Sourcefn xsub64_u32(&mut self, dst: XReg, src1: XReg, src2: u32) -> Self::Return
fn xsub64_u32(&mut self, dst: XReg, src1: XReg, src2: u32) -> Self::Return
Same as xsub64
but src2
is a zero-extended 32-bit immediate.
Sourcefn xmul32(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
fn xmul32(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
low32(dst) = low32(src1) * low32(src2)
Sourcefn xmul32_s8(&mut self, dst: XReg, src1: XReg, src2: i8) -> Self::Return
fn xmul32_s8(&mut self, dst: XReg, src1: XReg, src2: i8) -> Self::Return
Same as xmul64
but src2
is a sign-extended 8-bit immediate.
Sourcefn xmul32_s32(&mut self, dst: XReg, src1: XReg, src2: i32) -> Self::Return
fn xmul32_s32(&mut self, dst: XReg, src1: XReg, src2: i32) -> Self::Return
Same as xmul32
but src2
is a sign-extended 32-bit immediate.
Sourcefn xmul64(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
fn xmul64(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
dst = src1 * src2
Sourcefn xmul64_s8(&mut self, dst: XReg, src1: XReg, src2: i8) -> Self::Return
fn xmul64_s8(&mut self, dst: XReg, src1: XReg, src2: i8) -> Self::Return
Same as xmul64
but src2
is a sign-extended 8-bit immediate.
Sourcefn xmul64_s32(&mut self, dst: XReg, src1: XReg, src2: i32) -> Self::Return
fn xmul64_s32(&mut self, dst: XReg, src1: XReg, src2: i32) -> Self::Return
Same as xmul64
but src2
is a sign-extended 64-bit immediate.
Sourcefn xctz32(&mut self, dst: XReg, src: XReg) -> Self::Return
fn xctz32(&mut self, dst: XReg, src: XReg) -> Self::Return
low32(dst) = trailing_zeros(low32(src))
Sourcefn xclz32(&mut self, dst: XReg, src: XReg) -> Self::Return
fn xclz32(&mut self, dst: XReg, src: XReg) -> Self::Return
low32(dst) = leading_zeros(low32(src))
Sourcefn xpopcnt32(&mut self, dst: XReg, src: XReg) -> Self::Return
fn xpopcnt32(&mut self, dst: XReg, src: XReg) -> Self::Return
low32(dst) = count_ones(low32(src))
Sourcefn xrotl32(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
fn xrotl32(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
low32(dst) = rotate_left(low32(src1), low32(src2))
Sourcefn xrotl64(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
fn xrotl64(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
dst = rotate_left(src1, src2)
Sourcefn xrotr32(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
fn xrotr32(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
low32(dst) = rotate_right(low32(src1), low32(src2))
Sourcefn xrotr64(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
fn xrotr64(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
dst = rotate_right(src1, src2)
Sourcefn xshl32(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
fn xshl32(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
low32(dst) = low32(src1) << low5(src2)
Sourcefn xshr32_s(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
fn xshr32_s(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
low32(dst) = low32(src1) >> low5(src2)
Sourcefn xshr32_u(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
fn xshr32_u(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
low32(dst) = low32(src1) >> low5(src2)
Sourcefn xshl64(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
fn xshl64(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
dst = src1 << low5(src2)
Sourcefn xshr64_s(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
fn xshr64_s(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
dst = src1 >> low6(src2)
Sourcefn xshr64_u(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
fn xshr64_u(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
dst = src1 >> low6(src2)
Sourcefn xshl32_u6(
&mut self,
operands: BinaryOperands<XReg, XReg, U6>,
) -> Self::Return
fn xshl32_u6( &mut self, operands: BinaryOperands<XReg, XReg, U6>, ) -> Self::Return
low32(dst) = low32(src1) << low5(src2)
Sourcefn xshr32_s_u6(
&mut self,
operands: BinaryOperands<XReg, XReg, U6>,
) -> Self::Return
fn xshr32_s_u6( &mut self, operands: BinaryOperands<XReg, XReg, U6>, ) -> Self::Return
low32(dst) = low32(src1) >> low5(src2)
Sourcefn xshr32_u_u6(
&mut self,
operands: BinaryOperands<XReg, XReg, U6>,
) -> Self::Return
fn xshr32_u_u6( &mut self, operands: BinaryOperands<XReg, XReg, U6>, ) -> Self::Return
low32(dst) = low32(src1) >> low5(src2)
Sourcefn xshl64_u6(
&mut self,
operands: BinaryOperands<XReg, XReg, U6>,
) -> Self::Return
fn xshl64_u6( &mut self, operands: BinaryOperands<XReg, XReg, U6>, ) -> Self::Return
dst = src1 << low5(src2)
Sourcefn xshr64_s_u6(
&mut self,
operands: BinaryOperands<XReg, XReg, U6>,
) -> Self::Return
fn xshr64_s_u6( &mut self, operands: BinaryOperands<XReg, XReg, U6>, ) -> Self::Return
dst = src1 >> low6(src2)
Sourcefn xshr64_u_u6(
&mut self,
operands: BinaryOperands<XReg, XReg, U6>,
) -> Self::Return
fn xshr64_u_u6( &mut self, operands: BinaryOperands<XReg, XReg, U6>, ) -> Self::Return
dst = src1 >> low6(src2)
Sourcefn xeq64(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
fn xeq64(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
low32(dst) = src1 == src2
Sourcefn xneq64(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
fn xneq64(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
low32(dst) = src1 != src2
Sourcefn xslt64(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
fn xslt64(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
low32(dst) = src1 < src2
(signed)
Sourcefn xslteq64(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
fn xslteq64(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
low32(dst) = src1 <= src2
(signed)
Sourcefn xult64(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
fn xult64(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
low32(dst) = src1 < src2
(unsigned)
Sourcefn xulteq64(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
fn xulteq64(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
low32(dst) = src1 <= src2
(unsigned)
Sourcefn xeq32(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
fn xeq32(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
low32(dst) = low32(src1) == low32(src2)
Sourcefn xneq32(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
fn xneq32(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
low32(dst) = low32(src1) != low32(src2)
Sourcefn xslt32(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
fn xslt32(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
low32(dst) = low32(src1) < low32(src2)
(signed)
Sourcefn xslteq32(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
fn xslteq32(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
low32(dst) = low32(src1) <= low32(src2)
(signed)
Sourcefn xult32(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
fn xult32(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
low32(dst) = low32(src1) < low32(src2)
(unsigned)
Sourcefn xulteq32(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
fn xulteq32(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
low32(dst) = low32(src1) <= low32(src2)
(unsigned)
Sourcefn xload8_u32_offset32(
&mut self,
dst: XReg,
ptr: XReg,
offset: i32,
) -> Self::Return
fn xload8_u32_offset32( &mut self, dst: XReg, ptr: XReg, offset: i32, ) -> Self::Return
low32(dst) = zext(*(ptr + offset))
Sourcefn xload8_s32_offset32(
&mut self,
dst: XReg,
ptr: XReg,
offset: i32,
) -> Self::Return
fn xload8_s32_offset32( &mut self, dst: XReg, ptr: XReg, offset: i32, ) -> Self::Return
low32(dst) = sext(*(ptr + offset))
Sourcefn xload16le_u32_offset32(
&mut self,
dst: XReg,
ptr: XReg,
offset: i32,
) -> Self::Return
fn xload16le_u32_offset32( &mut self, dst: XReg, ptr: XReg, offset: i32, ) -> Self::Return
low32(dst) = zext(*(ptr + offset))
Sourcefn xload16le_s32_offset32(
&mut self,
dst: XReg,
ptr: XReg,
offset: i32,
) -> Self::Return
fn xload16le_s32_offset32( &mut self, dst: XReg, ptr: XReg, offset: i32, ) -> Self::Return
low32(dst) = sext(*(ptr + offset))
Sourcefn xload32le_offset32(
&mut self,
dst: XReg,
ptr: XReg,
offset: i32,
) -> Self::Return
fn xload32le_offset32( &mut self, dst: XReg, ptr: XReg, offset: i32, ) -> Self::Return
low32(dst) = *(ptr + offset)
Sourcefn xload8_u64_offset32(
&mut self,
dst: XReg,
ptr: XReg,
offset: i32,
) -> Self::Return
fn xload8_u64_offset32( &mut self, dst: XReg, ptr: XReg, offset: i32, ) -> Self::Return
dst = zext(*(ptr + offset))
Sourcefn xload8_s64_offset32(
&mut self,
dst: XReg,
ptr: XReg,
offset: i32,
) -> Self::Return
fn xload8_s64_offset32( &mut self, dst: XReg, ptr: XReg, offset: i32, ) -> Self::Return
dst = sext(*(ptr + offset))
Sourcefn xload16le_u64_offset32(
&mut self,
dst: XReg,
ptr: XReg,
offset: i32,
) -> Self::Return
fn xload16le_u64_offset32( &mut self, dst: XReg, ptr: XReg, offset: i32, ) -> Self::Return
dst = zext(*(ptr + offset))
Sourcefn xload16le_s64_offset32(
&mut self,
dst: XReg,
ptr: XReg,
offset: i32,
) -> Self::Return
fn xload16le_s64_offset32( &mut self, dst: XReg, ptr: XReg, offset: i32, ) -> Self::Return
dst = sext(*(ptr + offset))
Sourcefn xload32le_u64_offset32(
&mut self,
dst: XReg,
ptr: XReg,
offset: i32,
) -> Self::Return
fn xload32le_u64_offset32( &mut self, dst: XReg, ptr: XReg, offset: i32, ) -> Self::Return
dst = zext(*(ptr + offset))
Sourcefn xload32le_s64_offset32(
&mut self,
dst: XReg,
ptr: XReg,
offset: i32,
) -> Self::Return
fn xload32le_s64_offset32( &mut self, dst: XReg, ptr: XReg, offset: i32, ) -> Self::Return
dst = sext(*(ptr + offset))
Sourcefn xload64le_offset32(
&mut self,
dst: XReg,
ptr: XReg,
offset: i32,
) -> Self::Return
fn xload64le_offset32( &mut self, dst: XReg, ptr: XReg, offset: i32, ) -> Self::Return
dst = *(ptr + offset)
Sourcefn xstore8_offset32(
&mut self,
ptr: XReg,
offset: i32,
src: XReg,
) -> Self::Return
fn xstore8_offset32( &mut self, ptr: XReg, offset: i32, src: XReg, ) -> Self::Return
*(ptr + offset) = low8(src)
Sourcefn xstore16le_offset32(
&mut self,
ptr: XReg,
offset: i32,
src: XReg,
) -> Self::Return
fn xstore16le_offset32( &mut self, ptr: XReg, offset: i32, src: XReg, ) -> Self::Return
*(ptr + offset) = low16(src)
Sourcefn xstore32le_offset32(
&mut self,
ptr: XReg,
offset: i32,
src: XReg,
) -> Self::Return
fn xstore32le_offset32( &mut self, ptr: XReg, offset: i32, src: XReg, ) -> Self::Return
*(ptr + offset) = low32(src)
Sourcefn xstore64le_offset32(
&mut self,
ptr: XReg,
offset: i32,
src: XReg,
) -> Self::Return
fn xstore64le_offset32( &mut self, ptr: XReg, offset: i32, src: XReg, ) -> Self::Return
*(ptr + offset) = low64(src)
Sourcefn xload8_u32_offset8(
&mut self,
dst: XReg,
ptr: XReg,
offset: u8,
) -> Self::Return
fn xload8_u32_offset8( &mut self, dst: XReg, ptr: XReg, offset: u8, ) -> Self::Return
low32(dst) = zext(*(ptr + offset))
Sourcefn xload8_s32_offset8(
&mut self,
dst: XReg,
ptr: XReg,
offset: u8,
) -> Self::Return
fn xload8_s32_offset8( &mut self, dst: XReg, ptr: XReg, offset: u8, ) -> Self::Return
low32(dst) = sext(*(ptr + offset))
Sourcefn xload16le_u32_offset8(
&mut self,
dst: XReg,
ptr: XReg,
offset: u8,
) -> Self::Return
fn xload16le_u32_offset8( &mut self, dst: XReg, ptr: XReg, offset: u8, ) -> Self::Return
low32(dst) = zext(*(ptr + offset))
Sourcefn xload16le_s32_offset8(
&mut self,
dst: XReg,
ptr: XReg,
offset: u8,
) -> Self::Return
fn xload16le_s32_offset8( &mut self, dst: XReg, ptr: XReg, offset: u8, ) -> Self::Return
low32(dst) = sext(*(ptr + offset))
Sourcefn xload32le_offset8(
&mut self,
dst: XReg,
ptr: XReg,
offset: u8,
) -> Self::Return
fn xload32le_offset8( &mut self, dst: XReg, ptr: XReg, offset: u8, ) -> Self::Return
low32(dst) = *(ptr + offset)
Sourcefn xload8_u64_offset8(
&mut self,
dst: XReg,
ptr: XReg,
offset: u8,
) -> Self::Return
fn xload8_u64_offset8( &mut self, dst: XReg, ptr: XReg, offset: u8, ) -> Self::Return
dst = zext(*(ptr + offset))
Sourcefn xload8_s64_offset8(
&mut self,
dst: XReg,
ptr: XReg,
offset: u8,
) -> Self::Return
fn xload8_s64_offset8( &mut self, dst: XReg, ptr: XReg, offset: u8, ) -> Self::Return
dst = sext(*(ptr + offset))
Sourcefn xload16le_u64_offset8(
&mut self,
dst: XReg,
ptr: XReg,
offset: u8,
) -> Self::Return
fn xload16le_u64_offset8( &mut self, dst: XReg, ptr: XReg, offset: u8, ) -> Self::Return
dst = zext(*(ptr + offset))
Sourcefn xload16le_s64_offset8(
&mut self,
dst: XReg,
ptr: XReg,
offset: u8,
) -> Self::Return
fn xload16le_s64_offset8( &mut self, dst: XReg, ptr: XReg, offset: u8, ) -> Self::Return
dst = sext(*(ptr + offset))
Sourcefn xload32le_u64_offset8(
&mut self,
dst: XReg,
ptr: XReg,
offset: u8,
) -> Self::Return
fn xload32le_u64_offset8( &mut self, dst: XReg, ptr: XReg, offset: u8, ) -> Self::Return
dst = zext(*(ptr + offset))
Sourcefn xload32le_s64_offset8(
&mut self,
dst: XReg,
ptr: XReg,
offset: u8,
) -> Self::Return
fn xload32le_s64_offset8( &mut self, dst: XReg, ptr: XReg, offset: u8, ) -> Self::Return
dst = sext(*(ptr + offset))
Sourcefn xload64le_offset8(
&mut self,
dst: XReg,
ptr: XReg,
offset: u8,
) -> Self::Return
fn xload64le_offset8( &mut self, dst: XReg, ptr: XReg, offset: u8, ) -> Self::Return
dst = *(ptr + offset)
Sourcefn xstore8_offset8(&mut self, ptr: XReg, offset: u8, src: XReg) -> Self::Return
fn xstore8_offset8(&mut self, ptr: XReg, offset: u8, src: XReg) -> Self::Return
*(ptr + offset) = low8(src)
Sourcefn xstore16le_offset8(
&mut self,
ptr: XReg,
offset: u8,
src: XReg,
) -> Self::Return
fn xstore16le_offset8( &mut self, ptr: XReg, offset: u8, src: XReg, ) -> Self::Return
*(ptr + offset) = low16(src)
Sourcefn xstore32le_offset8(
&mut self,
ptr: XReg,
offset: u8,
src: XReg,
) -> Self::Return
fn xstore32le_offset8( &mut self, ptr: XReg, offset: u8, src: XReg, ) -> Self::Return
*(ptr + offset) = low32(src)
Sourcefn xstore64le_offset8(
&mut self,
ptr: XReg,
offset: u8,
src: XReg,
) -> Self::Return
fn xstore64le_offset8( &mut self, ptr: XReg, offset: u8, src: XReg, ) -> Self::Return
*(ptr + offset) = low64(src)
Sourcefn push_frame(&mut self) -> Self::Return
fn push_frame(&mut self) -> Self::Return
push lr; push fp; fp = sp
Sourcefn push_frame_save(&mut self, amt: u32, regs: RegSet<XReg>) -> Self::Return
fn push_frame_save(&mut self, amt: u32, regs: RegSet<XReg>) -> Self::Return
Macro-instruction to enter a function, allocate some stack, and then save some registers.
This is equivalent to push_frame
, stack_alloc32 amt
, then
saving all of regs
to the top of the stack just allocated.
Sourcefn pop_frame_restore(&mut self, amt: u32, regs: RegSet<XReg>) -> Self::Return
fn pop_frame_restore(&mut self, amt: u32, regs: RegSet<XReg>) -> Self::Return
Inverse of push_frame_save
. Restores regs
from the top of
the stack, then runs stack_free32 amt
, then runs pop_frame
.
Sourcefn stack_alloc32(&mut self, amt: u32) -> Self::Return
fn stack_alloc32(&mut self, amt: u32) -> Self::Return
sp = sp.checked_sub(amt)
Sourcefn stack_free32(&mut self, amt: u32) -> Self::Return
fn stack_free32(&mut self, amt: u32) -> Self::Return
sp = sp + amt
Sourcefn xdiv32_s(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
fn xdiv32_s(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
low32(dst) = low32(src1) / low32(src2)
(signed)
Sourcefn xdiv64_s(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
fn xdiv64_s(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
dst = src1 / src2
(signed)
Sourcefn xdiv32_u(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
fn xdiv32_u(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
low32(dst) = low32(src1) / low32(src2)
(unsigned)
Sourcefn xdiv64_u(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
fn xdiv64_u(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
dst = src1 / src2
(unsigned)
Sourcefn xrem32_s(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
fn xrem32_s(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
low32(dst) = low32(src1) % low32(src2)
(signed)
Sourcefn xrem64_s(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
fn xrem64_s(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
dst = src1 / src2
(signed)
Sourcefn xrem32_u(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
fn xrem32_u(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
low32(dst) = low32(src1) % low32(src2)
(unsigned)
Sourcefn xrem64_u(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
fn xrem64_u(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
dst = src1 / src2
(unsigned)
Sourcefn xband32(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
fn xband32(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
low32(dst) = low32(src1) & low32(src2)
Sourcefn xband32_s8(&mut self, dst: XReg, src1: XReg, src2: i8) -> Self::Return
fn xband32_s8(&mut self, dst: XReg, src1: XReg, src2: i8) -> Self::Return
Same as xband64
but src2
is a sign-extended 8-bit immediate.
Sourcefn xband32_s32(&mut self, dst: XReg, src1: XReg, src2: i32) -> Self::Return
fn xband32_s32(&mut self, dst: XReg, src1: XReg, src2: i32) -> Self::Return
Same as xband32
but src2
is a sign-extended 32-bit immediate.
Sourcefn xband64(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
fn xband64(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
dst = src1 & src2
Sourcefn xband64_s8(&mut self, dst: XReg, src1: XReg, src2: i8) -> Self::Return
fn xband64_s8(&mut self, dst: XReg, src1: XReg, src2: i8) -> Self::Return
Same as xband64
but src2
is a sign-extended 8-bit immediate.
Sourcefn xband64_s32(&mut self, dst: XReg, src1: XReg, src2: i32) -> Self::Return
fn xband64_s32(&mut self, dst: XReg, src1: XReg, src2: i32) -> Self::Return
Same as xband64
but src2
is a sign-extended 32-bit immediate.
Sourcefn xbor32(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
fn xbor32(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
low32(dst) = low32(src1) | low32(src2)
Sourcefn xbor32_s8(&mut self, dst: XReg, src1: XReg, src2: i8) -> Self::Return
fn xbor32_s8(&mut self, dst: XReg, src1: XReg, src2: i8) -> Self::Return
Same as xbor64
but src2
is a sign-extended 8-bit immediate.
Sourcefn xbor32_s32(&mut self, dst: XReg, src1: XReg, src2: i32) -> Self::Return
fn xbor32_s32(&mut self, dst: XReg, src1: XReg, src2: i32) -> Self::Return
Same as xbor32
but src2
is a sign-extended 32-bit immediate.
Sourcefn xbor64(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
fn xbor64(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
dst = src1 | src2
Sourcefn xbor64_s8(&mut self, dst: XReg, src1: XReg, src2: i8) -> Self::Return
fn xbor64_s8(&mut self, dst: XReg, src1: XReg, src2: i8) -> Self::Return
Same as xbor64
but src2
is a sign-extended 8-bit immediate.
Sourcefn xbor64_s32(&mut self, dst: XReg, src1: XReg, src2: i32) -> Self::Return
fn xbor64_s32(&mut self, dst: XReg, src1: XReg, src2: i32) -> Self::Return
Same as xbor64
but src2
is a sign-extended 32-bit immediate.
Sourcefn xbxor32(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
fn xbxor32(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
low32(dst) = low32(src1) ^ low32(src2)
Sourcefn xbxor32_s8(&mut self, dst: XReg, src1: XReg, src2: i8) -> Self::Return
fn xbxor32_s8(&mut self, dst: XReg, src1: XReg, src2: i8) -> Self::Return
Same as xbxor64
but src2
is a sign-extended 8-bit immediate.
Sourcefn xbxor32_s32(&mut self, dst: XReg, src1: XReg, src2: i32) -> Self::Return
fn xbxor32_s32(&mut self, dst: XReg, src1: XReg, src2: i32) -> Self::Return
Same as xbxor32
but src2
is a sign-extended 32-bit immediate.
Sourcefn xbxor64(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
fn xbxor64(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
dst = src1 ^ src2
Sourcefn xbxor64_s8(&mut self, dst: XReg, src1: XReg, src2: i8) -> Self::Return
fn xbxor64_s8(&mut self, dst: XReg, src1: XReg, src2: i8) -> Self::Return
Same as xbxor64
but src2
is a sign-extended 8-bit immediate.
Sourcefn xbxor64_s32(&mut self, dst: XReg, src1: XReg, src2: i32) -> Self::Return
fn xbxor64_s32(&mut self, dst: XReg, src1: XReg, src2: i32) -> Self::Return
Same as xbxor64
but src2
is a sign-extended 32-bit immediate.
Sourcefn xmin32_u(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
fn xmin32_u(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
low32(dst) = min(low32(src1), low32(src2))
(unsigned)
Sourcefn xmin32_s(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
fn xmin32_s(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
low32(dst) = min(low32(src1), low32(src2))
(signed)
Sourcefn xmax32_u(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
fn xmax32_u(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
low32(dst) = max(low32(src1), low32(src2))
(unsigned)
Sourcefn xmax32_s(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
fn xmax32_s(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
low32(dst) = max(low32(src1), low32(src2))
(signed)
Sourcefn xmin64_u(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
fn xmin64_u(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
dst = min(src1, src2)
(unsigned)
Sourcefn xmin64_s(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
fn xmin64_s(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
dst = min(src1, src2)
(signed)
Sourcefn xmax64_u(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
fn xmax64_u(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
dst = max(src1, src2)
(unsigned)
Sourcefn xmax64_s(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
fn xmax64_s(&mut self, operands: BinaryOperands<XReg>) -> Self::Return
dst = max(src1, src2)
(signed)
Provided Methods§
Sourcefn before_visit(&mut self)
fn before_visit(&mut self)
A callback invoked before starting to decode an instruction.
Does nothing by default.
Sourcefn after_visit(&mut self)
fn after_visit(&mut self)
A callback invoked after an instruction has been completely decoded.
Does nothing by default.
Implementors§
Source§impl<'a> OpVisitor for Disassembler<'a>
Available on crate feature disas
only.
impl<'a> OpVisitor for Disassembler<'a>
disas
only.