Available on crate feature
decode
only.Expand description
Functions for decoding the operands of an instruction, assuming the opcode has already been decoded.
Functionsยง
- bitcast_
float_ from_ int_ 32 - bitcast_
float_ from_ int_ 64 - bitcast_
int_ from_ float_ 32 - bitcast_
int_ from_ float_ 64 - br_if32
- br_
if_ not32 - br_
if_ xeq32 - br_
if_ xeq64 - br_
if_ xeq32_ i8 - br_
if_ xeq32_ i32 - br_
if_ xeq64_ i8 - br_
if_ xeq64_ i32 - br_
if_ xneq32 - br_
if_ xneq64 - br_
if_ xneq32_ i8 - br_
if_ xneq32_ i32 - br_
if_ xneq64_ i8 - br_
if_ xneq64_ i32 - br_
if_ xsgt32_ i8 - br_
if_ xsgt32_ i32 - br_
if_ xsgt64_ i8 - br_
if_ xsgt64_ i32 - br_
if_ xsgteq32_ i8 - br_
if_ xsgteq32_ i32 - br_
if_ xsgteq64_ i8 - br_
if_ xsgteq64_ i32 - br_
if_ xslt32 - br_
if_ xslt64 - br_
if_ xslt32_ i8 - br_
if_ xslt32_ i32 - br_
if_ xslt64_ i8 - br_
if_ xslt64_ i32 - br_
if_ xslteq32 - br_
if_ xslteq64 - br_
if_ xslteq32_ i8 - br_
if_ xslteq32_ i32 - br_
if_ xslteq64_ i8 - br_
if_ xslteq64_ i32 - br_
if_ xugt32_ u8 - br_
if_ xugt32_ u32 - br_
if_ xugt64_ u8 - br_
if_ xugt64_ u32 - br_
if_ xugteq32_ u8 - br_
if_ xugteq32_ u32 - br_
if_ xugteq64_ u8 - br_
if_ xugteq64_ u32 - br_
if_ xult32 - br_
if_ xult64 - br_
if_ xult32_ u8 - br_
if_ xult32_ u32 - br_
if_ xult64_ u8 - br_
if_ xult64_ u32 - br_
if_ xulteq32 - br_
if_ xulteq64 - br_
if_ xulteq32_ u8 - br_
if_ xulteq32_ u32 - br_
if_ xulteq64_ u8 - br_
if_ xulteq64_ u32 - br_
table32 - bswap32
- bswap64
- call
- call1
- call2
- call3
- call4
- call_
indirect - call_
indirect_ host - extended
- f32_
from_ f64 - f32_
from_ x32_ s - f32_
from_ x32_ u - f32_
from_ x64_ s - f32_
from_ x64_ u - f64_
from_ f32 - f64_
from_ x32_ s - f64_
from_ x32_ u - f64_
from_ x64_ s - f64_
from_ x64_ u - fabs32
- fabs64
- fadd32
- fadd64
- fceil32
- fceil64
- fconst32
- fconst64
- fcopysign32
- fcopysign64
- fdiv32
- fdiv64
- feq32
- feq64
- fextractv32x4
- fextractv64x2
- ffloor32
- ffloor64
- fload32be_
offset32 - fload32le_
offset32 - fload64be_
offset32 - fload64le_
offset32 - flt32
- flt64
- flteq32
- flteq64
- fmaximum32
- fmaximum64
- fminimum32
- fminimum64
- fmov
- fmul32
- fmul64
- fnearest32
- fnearest64
- fneg32
- fneg64
- fneq32
- fneq64
- fselect32
- fselect64
- fsqrt32
- fsqrt64
- fstore32be_
offset32 - fstore32le_
offset32 - fstore64be_
offset32 - fstore64le_
offset32 - fsub32
- fsub64
- ftrunc32
- ftrunc64
- jump
- nop
- pop_
frame - pop_
frame_ restore - push_
frame - push_
frame_ save - ret
- sext8
- sext16
- sext32
- stack_
alloc32 - stack_
free32 - trap
- vabs8x16
- vabs16x8
- vabs32x4
- vabs64x2
- vabsf32x4
- vabsf64x2
- vaddf32x4
- vaddf64x2
- vaddi8x16
- vaddi8x16_
sat - vaddi16x8
- vaddi16x8_
sat - vaddi32x4
- vaddi64x2
- vaddpairwisei16x8_
s - vaddpairwisei32x4_
s - vaddu8x16_
sat - vaddu16x8_
sat - valltrue8x16
- valltrue16x8
- valltrue32x4
- valltrue64x2
- vanytrue8x16
- vanytrue16x8
- vanytrue32x4
- vanytrue64x2
- vavground8x16
- vavground16x8
- vband128
- vbitmask8x16
- vbitmask16x8
- vbitmask32x4
- vbitmask64x2
- vbitselect128
- vbnot128
- vbor128
- vbxor128
- vceil32x4
- vceil64x2
- vconst128
- vdivf32x4
- vdivf64x2
- veq8x16
- veq16x8
- veq32x4
- veq64x2
- vf32x4_
from_ i32x4_ s - vf32x4_
from_ i32x4_ u - vf64x2_
from_ i64x2_ s - vf64x2_
from_ i64x2_ u - vfdemote
- vfloor32x4
- vfloor64x2
- vfpromotelow
- vinsertf32
- vinsertf64
- vinsertx8
- vinsertx16
- vinsertx32
- vinsertx64
- vload8x8_
s_ offset32 - vload8x8_
u_ offset32 - vload16x4le_
s_ offset32 - vload16x4le_
u_ offset32 - vload32x2le_
s_ offset32 - vload32x2le_
u_ offset32 - vload128le_
offset32 - vmax8x16_
s - vmax8x16_
u - vmax16x8_
s - vmax16x8_
u - vmax32x4_
s - vmax32x4_
u - vmaximumf32x4
- vmaximumf64x2
- vmin8x16_
s - vmin8x16_
u - vmin16x8_
s - vmin16x8_
u - vmin32x4_
s - vmin32x4_
u - vminimumf32x4
- vminimumf64x2
- vmov
- vmulf64x2
- vmuli8x16
- vmuli16x8
- vmuli32x4
- vmuli64x2
- vnarrow16x8_
s - vnarrow16x8_
u - vnarrow32x4_
s - vnarrow32x4_
u - vnearest32x4
- vnearest64x2
- vneg8x16
- vneg16x8
- vneg32x4
- vneg64x2
- vnegf64x2
- vneq8x16
- vneq16x8
- vneq32x4
- vneq64x2
- vpopcnt8x16
- vqmulrsi16x8
- vshli8x16
- vshli16x8
- vshli32x4
- vshli64x2
- vshri8x16_
s - vshri8x16_
u - vshri16x8_
s - vshri16x8_
u - vshri32x4_
s - vshri32x4_
u - vshri64x2_
s - vshri64x2_
u - vshuffle
- vslt8x16
- vslt16x8
- vslt32x4
- vslt64x2
- vslteq8x16
- vslteq16x8
- vslteq32x4
- vslteq64x2
- vsplatf32
- vsplatf64
- vsplatx8
- vsplatx16
- vsplatx32
- vsplatx64
- vsqrt32x4
- vsqrt64x2
- vstore128le_
offset32 - vsubf64x2
- vsubi8x16
- vsubi8x16_
sat - vsubi16x8
- vsubi16x8_
sat - vsubi32x4
- vsubi64x2
- vsubu8x16_
sat - vsubu16x8_
sat - vswizzlei8x16
- vtrunc32x4
- vtrunc64x2
- vult8x16
- vult16x8
- vult32x4
- vult64x2
- vulteq8x16
- vulteq16x8
- vulteq32x4
- vulteq64x2
- vwidenhigh8x16_
s - vwidenhigh8x16_
u - vwidenhigh16x8_
s - vwidenhigh16x8_
u - vwidenhigh32x4_
s - vwidenhigh32x4_
u - vwidenlow8x16_
s - vwidenlow8x16_
u - vwidenlow16x8_
s - vwidenlow16x8_
u - vwidenlow32x4_
s - vwidenlow32x4_
u - x32_
from_ f32_ s - x32_
from_ f32_ s_ sat - x32_
from_ f32_ u - x32_
from_ f32_ u_ sat - x32_
from_ f64_ s - x32_
from_ f64_ s_ sat - x32_
from_ f64_ u - x32_
from_ f64_ u_ sat - x64_
from_ f32_ s - x64_
from_ f32_ s_ sat - x64_
from_ f32_ u - x64_
from_ f32_ u_ sat - x64_
from_ f64_ s - x64_
from_ f64_ s_ sat - x64_
from_ f64_ u - x64_
from_ f64_ u_ sat - xabs32
- xabs64
- xadd32
- xadd64
- xadd32_
u8 - xadd32_
u32 - xadd32_
uoverflow_ trap - xadd64_
u8 - xadd64_
u32 - xadd64_
uoverflow_ trap - xband32
- xband64
- xband32_
s8 - xband32_
s32 - xband64_
s8 - xband64_
s32 - xbmask32
- xbmask64
- xbnot32
- xbnot64
- xbor32
- xbor64
- xbor32_
s8 - xbor32_
s32 - xbor64_
s8 - xbor64_
s32 - xbxor32
- xbxor64
- xbxor32_
s8 - xbxor32_
s32 - xbxor64_
s8 - xbxor64_
s32 - xclz32
- xclz64
- xconst8
- xconst16
- xconst32
- xconst64
- xctz32
- xctz64
- xdiv32_
s - xdiv32_
u - xdiv64_
s - xdiv64_
u - xeq32
- xeq64
- xextractv8x16
- xextractv16x8
- xextractv32x4
- xextractv64x2
- xjump
- xload8_
s32_ offset8 - xload8_
s32_ offset32 - xload8_
s64_ offset8 - xload8_
s64_ offset32 - xload8_
u32_ offset8 - xload8_
u32_ offset32 - xload8_
u64_ offset8 - xload8_
u64_ offset32 - xload16be_
s64_ offset32 - xload16be_
u64_ offset32 - xload16le_
s32_ offset8 - xload16le_
s32_ offset32 - xload16le_
s64_ offset8 - xload16le_
s64_ offset32 - xload16le_
u32_ offset8 - xload16le_
u32_ offset32 - xload16le_
u64_ offset8 - xload16le_
u64_ offset32 - xload32be_
s64_ offset32 - xload32be_
u64_ offset32 - xload32le_
offset8 - xload32le_
offset32 - xload32le_
s64_ offset8 - xload32le_
s64_ offset32 - xload32le_
u64_ offset8 - xload32le_
u64_ offset32 - xload64be_
offset32 - xload64le_
offset8 - xload64le_
offset32 - xmax32_
s - xmax32_
u - xmax64_
s - xmax64_
u - xmin32_
s - xmin32_
u - xmin64_
s - xmin64_
u - xmov
- xmov_fp
- xmov_lr
- xmul32
- xmul64
- xmul32_
s8 - xmul32_
s32 - xmul64_
s8 - xmul64_
s32 - xmulhi64_
s - xmulhi64_
u - xneg32
- xneg64
- xneq32
- xneq64
- xpop32
- xpop64
- xpop32_
many - xpop64_
many - xpopcnt32
- xpopcnt64
- xpush32
- xpush64
- xpush32_
many - xpush64_
many - xrem32_
s - xrem32_
u - xrem64_
s - xrem64_
u - xrotl32
- xrotl64
- xrotr32
- xrotr64
- xselect32
- xselect64
- xshl32
- xshl64
- xshl32_
u6 - xshl64_
u6 - xshr32_
s - xshr32_
s_ u6 - xshr32_
u - xshr32_
u_ u6 - xshr64_
s - xshr64_
s_ u6 - xshr64_
u - xshr64_
u_ u6 - xslt32
- xslt64
- xslteq32
- xslteq64
- xstore8_
offset8 - xstore8_
offset32 - xstore16be_
offset32 - xstore16le_
offset8 - xstore16le_
offset32 - xstore32be_
offset32 - xstore32le_
offset8 - xstore32le_
offset32 - xstore64be_
offset32 - xstore64le_
offset8 - xstore64le_
offset32 - xsub32
- xsub64
- xsub32_
u8 - xsub32_
u32 - xsub64_
u8 - xsub64_
u32 - xult32
- xult64
- xulteq32
- xulteq64
- zext8
- zext16
- zext32