1macro_rules! mk_read_xx {
5 ($slf:ident, $mem_ty:ty, $from_le:path, $ret_ty:ty, $err_expr:expr) => {
6 const SIZE: usize = mem::size_of::<$mem_ty>();
7 const _: () = assert!(SIZE >= 1);
8 const _: () = assert!(SIZE <= Decoder::MAX_READ_SIZE);
9 let data_ptr = $slf.data_ptr;
10 #[allow(trivial_numeric_casts)]
11 {
12 if data_ptr + SIZE - 1 < $slf.max_data_ptr {
14 let result = $from_le(unsafe { ptr::read_unaligned(data_ptr as *const $mem_ty) }) as $ret_ty;
18 $slf.data_ptr = data_ptr + SIZE;
21 result
22 } else {
23 $err_expr
24 }
25 }
26 };
27}
28macro_rules! mk_read_xx_fn_body {
29 ($slf:ident, $mem_ty:ty, $from_le:path, $ret_ty:ty) => {
30 mk_read_xx!($slf, $mem_ty, $from_le, $ret_ty, {
31 $slf.state.flags |= StateFlags::IS_INVALID | StateFlags::NO_MORE_BYTES;
32 0
33 })
34 };
35}
36macro_rules! read_u8_break {
37 ($slf:ident) => {{
38 mk_read_xx! {$slf, u8, u8::from_le, usize, break}
39 }};
40}
41#[cfg(not(feature = "__internal_flip"))]
42macro_rules! read_u16_break {
43 ($slf:ident) => {{
44 mk_read_xx! {$slf, u16, u16::from_le, usize, break}
45 }};
46}
47macro_rules! read_u32_break {
48 ($slf:ident) => {{
49 mk_read_xx! {$slf, u32, u32::from_le, usize, break}
50 }};
51}
52#[cfg(not(feature = "__internal_flip"))]
53macro_rules! read_op_mem_stmt_ret {
54 ($decoder:ident, $instruction:ident, $stmts:block) => {{
55 debug_assert!($decoder.state.encoding() != EncodingKind::EVEX as u32 && $decoder.state.encoding() != EncodingKind::MVEX as u32);
56 let index = $decoder.state.mem_index as usize;
57 debug_assert!(index < $decoder.read_op_mem_fns.len());
58 let handler = unsafe { *$decoder.read_op_mem_fns.get_unchecked(index) };
60
61 $stmts
62
63 if $decoder.state.address_size != OpSize::Size16 {
64 (handler)($instruction, $decoder)
65 } else {
66 $decoder.read_op_mem_16($instruction, TupleType::N1);
67 false
68 }
69 }};
70}
71#[cfg(not(feature = "__internal_flip"))]
72macro_rules! read_op_mem_stmt {
73 ($decoder:ident, $instruction:ident, $stmts:block) => {
74 let _ = read_op_mem_stmt_ret!($decoder, $instruction, $stmts);
75 };
76}
77#[cfg(feature = "__internal_flip")]
78macro_rules! read_op_mem_stmt {
79 ($decoder:ident, $instruction:ident, $stmts:block) => {
80 debug_assert!($decoder.state.encoding() != EncodingKind::EVEX as u32 && $decoder.state.encoding() != EncodingKind::MVEX as u32);
81 $stmts
82 if $decoder.state.address_size != OpSize::Size16 {
83 let _ = $decoder.read_op_mem_32_or_64($instruction);
84 } else {
85 $decoder.read_op_mem_16($instruction, TupleType::N1);
86 }
87 };
88}
89
90mod enums;
91mod handlers;
92mod table_de;
93#[cfg(test)]
94pub(crate) mod tests;
95
96use crate::decoder::handlers::tables::TABLES;
97use crate::decoder::handlers::{OpCodeHandler, OpCodeHandlerDecodeFn};
98use crate::iced_constants::IcedConstants;
99use crate::iced_error::IcedError;
100use crate::instruction_internal;
101use crate::tuple_type_tbl::get_disp8n;
102use crate::*;
103use core::iter::FusedIterator;
104use core::{cmp, fmt, mem, ptr};
105
106#[rustfmt::skip]
107#[cfg(any(feature = "__internal_flip", not(feature = "no_evex"), not(feature = "no_vex"), not(feature = "no_xop"), feature = "mvex"))]
108static READ_OP_MEM_VSIB_FNS: [fn(&mut Decoder<'_>, &mut Instruction, Register, TupleType, bool) -> bool; 0x18] = [
109 decoder_read_op_mem_vsib_0,
110 decoder_read_op_mem_vsib_0,
111 decoder_read_op_mem_vsib_0,
112 decoder_read_op_mem_vsib_0,
113 decoder_read_op_mem_vsib_0_4,
114 decoder_read_op_mem_vsib_0_5,
115 decoder_read_op_mem_vsib_0,
116 decoder_read_op_mem_vsib_0,
117
118 decoder_read_op_mem_vsib_1,
119 decoder_read_op_mem_vsib_1,
120 decoder_read_op_mem_vsib_1,
121 decoder_read_op_mem_vsib_1,
122 decoder_read_op_mem_vsib_1_4,
123 decoder_read_op_mem_vsib_1,
124 decoder_read_op_mem_vsib_1,
125 decoder_read_op_mem_vsib_1,
126
127 decoder_read_op_mem_vsib_2,
128 decoder_read_op_mem_vsib_2,
129 decoder_read_op_mem_vsib_2,
130 decoder_read_op_mem_vsib_2,
131 decoder_read_op_mem_vsib_2_4,
132 decoder_read_op_mem_vsib_2,
133 decoder_read_op_mem_vsib_2,
134 decoder_read_op_mem_vsib_2,
135];
136
137static MEM_REGS_16: [(Register, Register); 8] = [
138 (Register::BX, Register::SI),
139 (Register::BX, Register::DI),
140 (Register::BP, Register::SI),
141 (Register::BP, Register::DI),
142 (Register::SI, Register::None),
143 (Register::DI, Register::None),
144 (Register::BP, Register::None),
145 (Register::BX, Register::None),
146];
147
148#[derive(Copy, Clone, Eq, PartialEq)]
151#[allow(dead_code)]
152pub(crate) enum OpSize {
153 Size16,
154 Size32,
155 Size64,
156}
157#[rustfmt::skip]
158static GEN_DEBUG_OP_SIZE: [&str; 3] = [
159 "Size16",
160 "Size32",
161 "Size64",
162];
163impl fmt::Debug for OpSize {
164 #[inline]
165 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
166 write!(f, "{}", GEN_DEBUG_OP_SIZE[*self as usize])
167 }
168}
169impl Default for OpSize {
170 #[must_use]
171 #[inline]
172 fn default() -> Self {
173 OpSize::Size16
174 }
175}
176#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]
182#[cfg_attr(not(feature = "exhaustive_enums"), non_exhaustive)]
183pub enum DecoderError {
184 None = 0,
186 InvalidInstruction = 1,
188 NoMoreBytes = 2,
190}
191#[rustfmt::skip]
192static GEN_DEBUG_DECODER_ERROR: [&str; 3] = [
193 "None",
194 "InvalidInstruction",
195 "NoMoreBytes",
196];
197impl fmt::Debug for DecoderError {
198 #[inline]
199 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
200 write!(f, "{}", GEN_DEBUG_DECODER_ERROR[*self as usize])
201 }
202}
203impl Default for DecoderError {
204 #[must_use]
205 #[inline]
206 fn default() -> Self {
207 DecoderError::None
208 }
209}
210#[allow(non_camel_case_types)]
211#[allow(dead_code)]
212pub(crate) type DecoderErrorUnderlyingType = u8;
213#[rustfmt::skip]
214impl DecoderError {
215 #[inline]
217 pub fn values() -> impl Iterator<Item = DecoderError> + DoubleEndedIterator + ExactSizeIterator + FusedIterator {
218 (0..IcedConstants::DECODER_ERROR_ENUM_COUNT).map(|x| unsafe { mem::transmute::<u8, DecoderError>(x as u8) })
220 }
221}
222#[test]
223#[rustfmt::skip]
224fn test_decodererror_values() {
225 let mut iter = DecoderError::values();
226 assert_eq!(iter.size_hint(), (IcedConstants::DECODER_ERROR_ENUM_COUNT, Some(IcedConstants::DECODER_ERROR_ENUM_COUNT)));
227 assert_eq!(iter.len(), IcedConstants::DECODER_ERROR_ENUM_COUNT);
228 assert!(iter.next().is_some());
229 assert_eq!(iter.size_hint(), (IcedConstants::DECODER_ERROR_ENUM_COUNT - 1, Some(IcedConstants::DECODER_ERROR_ENUM_COUNT - 1)));
230 assert_eq!(iter.len(), IcedConstants::DECODER_ERROR_ENUM_COUNT - 1);
231
232 let values: Vec<DecoderError> = DecoderError::values().collect();
233 assert_eq!(values.len(), IcedConstants::DECODER_ERROR_ENUM_COUNT);
234 for (i, value) in values.into_iter().enumerate() {
235 assert_eq!(i, value as usize);
236 }
237
238 let values1: Vec<DecoderError> = DecoderError::values().collect();
239 let mut values2: Vec<DecoderError> = DecoderError::values().rev().collect();
240 values2.reverse();
241 assert_eq!(values1, values2);
242}
243#[rustfmt::skip]
244impl TryFrom<usize> for DecoderError {
245 type Error = IcedError;
246 #[inline]
247 fn try_from(value: usize) -> Result<Self, Self::Error> {
248 if value < IcedConstants::DECODER_ERROR_ENUM_COUNT {
249 Ok(unsafe { mem::transmute(value as u8) })
251 } else {
252 Err(IcedError::new("Invalid DecoderError value"))
253 }
254 }
255}
256#[test]
257#[rustfmt::skip]
258fn test_decodererror_try_from_usize() {
259 for value in DecoderError::values() {
260 let converted = <DecoderError as TryFrom<usize>>::try_from(value as usize).unwrap();
261 assert_eq!(converted, value);
262 }
263 assert!(<DecoderError as TryFrom<usize>>::try_from(IcedConstants::DECODER_ERROR_ENUM_COUNT).is_err());
264 assert!(<DecoderError as TryFrom<usize>>::try_from(core::usize::MAX).is_err());
265}
266#[cfg(feature = "serde")]
267#[rustfmt::skip]
268#[allow(clippy::zero_sized_map_values)]
269const _: () = {
270 use core::marker::PhantomData;
271 use serde::de;
272 use serde::{Deserialize, Deserializer, Serialize, Serializer};
273 type EnumType = DecoderError;
274 impl Serialize for EnumType {
275 #[inline]
276 fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
277 where
278 S: Serializer,
279 {
280 serializer.serialize_u8(*self as u8)
281 }
282 }
283 impl<'de> Deserialize<'de> for EnumType {
284 #[inline]
285 fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
286 where
287 D: Deserializer<'de>,
288 {
289 struct Visitor<'de> {
290 marker: PhantomData<EnumType>,
291 lifetime: PhantomData<&'de ()>,
292 }
293 impl<'de> de::Visitor<'de> for Visitor<'de> {
294 type Value = EnumType;
295 #[inline]
296 fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
297 formatter.write_str("enum DecoderError")
298 }
299 #[inline]
300 fn visit_u64<E>(self, v: u64) -> Result<Self::Value, E>
301 where
302 E: de::Error,
303 {
304 if let Ok(v) = <usize as TryFrom<_>>::try_from(v) {
305 if let Ok(value) = <EnumType as TryFrom<_>>::try_from(v) {
306 return Ok(value);
307 }
308 }
309 Err(de::Error::invalid_value(de::Unexpected::Unsigned(v), &"a valid DecoderError variant value"))
310 }
311 }
312 deserializer.deserialize_u8(Visitor { marker: PhantomData::<EnumType>, lifetime: PhantomData })
313 }
314 }
315};
316#[allow(missing_copy_implementations)]
322#[allow(missing_debug_implementations)]
323pub struct DecoderOptions;
324impl DecoderOptions {
325 pub const NONE: u32 = 0x0000_0000;
327 pub const NO_INVALID_CHECK: u32 = 0x0000_0001;
331 pub const AMD: u32 = 0x0000_0002;
333 pub const FORCE_RESERVED_NOP: u32 = 0x0000_0004;
337 pub const UMOV: u32 = 0x0000_0008;
339 pub const XBTS: u32 = 0x0000_0010;
341 pub const CMPXCHG486A: u32 = 0x0000_0020;
343 pub const OLD_FPU: u32 = 0x0000_0040;
345 pub const PCOMMIT: u32 = 0x0000_0080;
347 pub const LOADALL286: u32 = 0x0000_0100;
349 pub const LOADALL386: u32 = 0x0000_0200;
351 pub const CL1INVMB: u32 = 0x0000_0400;
353 pub const MOV_TR: u32 = 0x0000_0800;
355 pub const JMPE: u32 = 0x0000_1000;
357 pub const NO_PAUSE: u32 = 0x0000_2000;
359 pub const NO_WBNOINVD: u32 = 0x0000_4000;
361 pub const UDBG: u32 = 0x0000_8000;
363 pub const NO_MPFX_0FBC: u32 = 0x0001_0000;
365 pub const NO_MPFX_0FBD: u32 = 0x0002_0000;
367 pub const NO_LAHF_SAHF_64: u32 = 0x0004_0000;
369 pub const MPX: u32 = 0x0008_0000;
371 pub const CYRIX: u32 = 0x0010_0000;
373 pub const CYRIX_SMINT_0F7E: u32 = 0x0020_0000;
375 pub const CYRIX_DMI: u32 = 0x0040_0000;
377 pub const ALTINST: u32 = 0x0080_0000;
379 pub const KNC: u32 = 0x0100_0000;
381}
382pub(crate) struct HandlerFlags;
387#[allow(dead_code)]
388impl HandlerFlags {
389 pub(crate) const NONE: u32 = 0x0000_0000;
390 pub(crate) const XACQUIRE: u32 = 0x0000_0001;
391 pub(crate) const XRELEASE: u32 = 0x0000_0002;
392 pub(crate) const XACQUIRE_XRELEASE_NO_LOCK: u32 = 0x0000_0004;
393 pub(crate) const LOCK: u32 = 0x0000_0008;
394}
395pub(crate) struct StateFlags;
400#[allow(dead_code)]
401impl StateFlags {
402 pub(crate) const IP_REL64: u32 = 0x0000_0001;
403 pub(crate) const IP_REL32: u32 = 0x0000_0002;
404 pub(crate) const HAS_REX: u32 = 0x0000_0008;
405 pub(crate) const B: u32 = 0x0000_0010;
406 pub(crate) const Z: u32 = 0x0000_0020;
407 pub(crate) const IS_INVALID: u32 = 0x0000_0040;
408 pub(crate) const W: u32 = 0x0000_0080;
409 pub(crate) const NO_IMM: u32 = 0x0000_0100;
410 pub(crate) const ADDR64: u32 = 0x0000_0200;
411 pub(crate) const BRANCH_IMM8: u32 = 0x0000_0400;
412 pub(crate) const XBEGIN: u32 = 0x0000_0800;
413 pub(crate) const LOCK: u32 = 0x0000_1000;
414 pub(crate) const ALLOW_LOCK: u32 = 0x0000_2000;
415 pub(crate) const NO_MORE_BYTES: u32 = 0x0000_4000;
416 pub(crate) const HAS66: u32 = 0x0000_8000;
417 pub(crate) const MVEX_SSS_MASK: u32 = 0x0000_0007;
418 pub(crate) const MVEX_SSS_SHIFT: u32 = 0x0000_0010;
419 pub(crate) const MVEX_EH: u32 = 0x0008_0000;
420 pub(crate) const ENCODING_MASK: u32 = 0x0000_0007;
421 pub(crate) const ENCODING_SHIFT: u32 = 0x0000_001D;
422}
423#[repr(u32)]
429#[derive(Debug, Copy, Clone, Eq, PartialEq)]
430enum DecoderMandatoryPrefix {
431 PNP = 0,
432 P66 = 1,
433 PF3 = 2,
434 PF2 = 3,
435}
436impl Default for DecoderMandatoryPrefix {
437 fn default() -> Self {
438 DecoderMandatoryPrefix::PNP
439 }
440}
441
442#[derive(Default)]
443#[allow(dead_code)]
444struct State {
445 modrm: u32, mod_: u32, reg: u32, rm: u32, extra_register_base: u32, extra_index_register_base: u32, extra_base_register_base: u32, extra_index_register_base_vsib: u32,
456 flags: u32, mandatory_prefix: DecoderMandatoryPrefix,
458
459 vvvv: u32, vvvv_invalid_check: u32, mem_index: u32, vector_length: VectorLength,
464 aaa: u32,
465 extra_register_base_evex: u32, extra_base_register_base_evex: u32, address_size: OpSize,
469 operand_size: OpSize,
470 segment_prio: u8, dummy: u8,
472 }
474
475impl State {
476 #[must_use]
477 #[inline(always)]
478 #[cfg(debug_assertions)]
479 const fn encoding(&self) -> u32 {
480 (self.flags >> StateFlags::ENCODING_SHIFT) & StateFlags::ENCODING_MASK
481 }
482
483 #[must_use]
484 #[inline(always)]
485 #[cfg(not(debug_assertions))]
486 #[allow(clippy::unused_self)]
487 fn encoding(&self) -> u32 {
488 EncodingKind::Legacy as u32
489 }
490
491 #[must_use]
492 #[inline]
493 #[cfg(feature = "mvex")]
494 fn sss(&self) -> u32 {
495 (self.flags >> StateFlags::MVEX_SSS_SHIFT) & StateFlags::MVEX_SSS_MASK
496 }
497}
498
499#[allow(missing_debug_implementations)]
501#[allow(dead_code)]
502pub struct Decoder<'a>
503where
504 Self: Send + Sync,
505{
506 ip: u64,
508
509 data_ptr: usize,
514 data_ptr_end: usize,
519 max_data_ptr: usize,
526 instr_start_data_ptr: usize,
528
529 handlers_map0: &'static [(OpCodeHandlerDecodeFn, &'static OpCodeHandler); 0x100],
530 #[cfg(all(not(feature = "no_vex"), feature = "mvex"))]
532 handlers_vex_map0: &'static [(OpCodeHandlerDecodeFn, &'static OpCodeHandler); 0x100],
533 #[cfg(not(feature = "no_vex"))]
534 handlers_vex: [&'static [(OpCodeHandlerDecodeFn, &'static OpCodeHandler); 0x100]; 3],
535 #[cfg(not(feature = "no_evex"))]
536 handlers_evex: [&'static [(OpCodeHandlerDecodeFn, &'static OpCodeHandler); 0x100]; 6],
537 #[cfg(not(feature = "no_xop"))]
538 handlers_xop: [&'static [(OpCodeHandlerDecodeFn, &'static OpCodeHandler); 0x100]; 3],
539 #[cfg(feature = "mvex")]
540 handlers_mvex: [&'static [(OpCodeHandlerDecodeFn, &'static OpCodeHandler); 0x100]; 3],
541
542 #[cfg(not(all(not(feature = "no_vex"), feature = "mvex")))]
543 handlers_vex_map0: (),
544 #[cfg(feature = "no_vex")]
545 handlers_vex: [(); 3],
546 #[cfg(feature = "no_evex")]
547 handlers_evex: [(); 6],
548 #[cfg(feature = "no_xop")]
549 handlers_xop: [(); 3],
550 #[cfg(not(feature = "mvex"))]
551 handlers_mvex: [(); 3],
552
553 #[cfg(not(feature = "__internal_flip"))]
554 read_op_mem_fns: [fn(&mut Instruction, &mut Decoder<'a>) -> bool; 0x18],
555 #[cfg(feature = "__internal_flip")]
556 read_op_mem_fns: (),
557
558 state: State,
559 options: u32,
561 invalid_check_mask: u32,
563 is64b_mode_and_w: u32,
565 reg15_mask: u32,
567 mask_e0: u32,
569 rex_mask: u32,
570 bitness: u32,
571 default_address_size: OpSize,
573 default_operand_size: OpSize,
574 segment_prio: u8, dummy: u8, default_inverted_address_size: OpSize,
578 default_inverted_operand_size: OpSize,
579 is64b_mode: bool,
581 default_code_size: CodeSize,
582 displ_index: u8,
584
585 data: &'a [u8],
587}
588
589macro_rules! write_base_reg {
590 ($instruction:ident, $expr:expr) => {
591 debug_assert!($expr < IcedConstants::REGISTER_ENUM_COUNT as u32);
592 $instruction.set_memory_base(unsafe { mem::transmute($expr as RegisterUnderlyingType) });
593 };
594}
595
596macro_rules! write_index_reg {
597 ($instruction:ident, $expr:expr) => {
598 debug_assert!($expr < IcedConstants::REGISTER_ENUM_COUNT as u32);
599 $instruction.set_memory_index(unsafe { mem::transmute($expr as RegisterUnderlyingType) });
600 };
601}
602
603impl<'a> Decoder<'a> {
604 const MAX_READ_SIZE: usize = 8;
605
606 #[must_use]
670 #[inline]
671 #[allow(clippy::unwrap_used)]
672 pub fn new(bitness: u32, data: &'a [u8], options: u32) -> Decoder<'a> {
673 Decoder::try_new(bitness, data, options).unwrap()
674 }
675
676 #[must_use]
738 #[inline]
739 #[allow(clippy::unwrap_used)]
740 pub fn with_ip(bitness: u32, data: &'a [u8], ip: u64, options: u32) -> Decoder<'a> {
741 Decoder::try_with_ip(bitness, data, ip, options).unwrap()
742 }
743
744 #[inline]
808 pub fn try_new(bitness: u32, data: &'a [u8], options: u32) -> Result<Decoder<'a>, IcedError> {
809 Decoder::try_with_ip(bitness, data, 0, options)
810 }
811
812 #[allow(clippy::missing_inline_in_public_items)]
874 #[allow(clippy::let_unit_value)]
875 #[allow(trivial_casts)]
876 pub fn try_with_ip(bitness: u32, data: &'a [u8], ip: u64, options: u32) -> Result<Decoder<'a>, IcedError> {
877 let is64b_mode;
878 let default_code_size;
879 let default_operand_size;
880 let default_inverted_operand_size;
881 let default_address_size;
882 let default_inverted_address_size;
883 match bitness {
884 64 => {
885 is64b_mode = true;
886 default_code_size = CodeSize::Code64;
887 default_operand_size = OpSize::Size32;
888 default_inverted_operand_size = OpSize::Size16;
889 default_address_size = OpSize::Size64;
890 default_inverted_address_size = OpSize::Size32;
891 }
892 32 => {
893 is64b_mode = false;
894 default_code_size = CodeSize::Code32;
895 default_operand_size = OpSize::Size32;
896 default_inverted_operand_size = OpSize::Size16;
897 default_address_size = OpSize::Size32;
898 default_inverted_address_size = OpSize::Size16;
899 }
900 16 => {
901 is64b_mode = false;
902 default_code_size = CodeSize::Code16;
903 default_operand_size = OpSize::Size16;
904 default_inverted_operand_size = OpSize::Size32;
905 default_address_size = OpSize::Size16;
906 default_inverted_address_size = OpSize::Size32;
907 }
908 _ => return Err(IcedError::new("Invalid bitness")),
909 }
910 let data_ptr_end = data.as_ptr() as usize + data.len();
911 if data_ptr_end < data.as_ptr() as usize || {
912 data_ptr_end.wrapping_add(cmp::max(IcedConstants::MAX_INSTRUCTION_LENGTH, Decoder::MAX_READ_SIZE)) < data.as_ptr() as usize
917 } {
918 return Err(IcedError::new("Invalid slice"));
919 }
920
921 let tables = &*TABLES;
922
923 #[allow(clippy::unwrap_used)]
924 fn get_handlers(
925 handlers: &'static [(OpCodeHandlerDecodeFn, &'static OpCodeHandler)],
926 ) -> &'static [(OpCodeHandlerDecodeFn, &'static OpCodeHandler); 0x100] {
927 debug_assert_eq!(handlers.len(), 0x100);
928 TryFrom::try_from(handlers).unwrap()
929 }
930 macro_rules! mk_handlers_local {
931 ($name:ident, $feature:literal) => {
932 mk_handlers_local!($name, $name, $feature);
933 };
934 ($name:ident, $field_name:ident, $feature:literal) => {
935 #[cfg(not(feature = $feature))]
936 let $name = get_handlers(&tables.$field_name);
937 #[cfg(feature = $feature)]
938 let $name = ();
939 };
940 ($name:ident ; $feature:literal) => {
941 mk_handlers_local!($name, $name ; $feature);
942 };
943 ($name:ident, $field_name:ident ; $feature:literal) => {
944 #[cfg(feature = $feature)]
945 let $name = get_handlers(&tables.$field_name);
946 #[cfg(not(feature = $feature))]
947 let $name = ();
948 };
949 }
950 #[cfg(all(not(feature = "no_vex"), feature = "mvex"))]
951 let handlers_vex_map0 = get_handlers(&tables.handlers_vex_map0);
952 #[cfg(not(all(not(feature = "no_vex"), feature = "mvex")))]
953 let handlers_vex_map0 = ();
954 mk_handlers_local!(handlers_vex_0f, "no_vex");
955 mk_handlers_local!(handlers_vex_0f38, "no_vex");
956 mk_handlers_local!(handlers_vex_0f3a, "no_vex");
957 mk_handlers_local!(handlers_evex_0f, "no_evex");
958 mk_handlers_local!(handlers_evex_0f38, "no_evex");
959 mk_handlers_local!(handlers_evex_0f3a, "no_evex");
960 mk_handlers_local!(handlers_evex_map4, invalid_map, "no_evex");
961 mk_handlers_local!(handlers_evex_map5, "no_evex");
962 mk_handlers_local!(handlers_evex_map6, "no_evex");
963 mk_handlers_local!(handlers_xop_map8, "no_xop");
964 mk_handlers_local!(handlers_xop_map9, "no_xop");
965 mk_handlers_local!(handlers_xop_map10, "no_xop");
966 mk_handlers_local!(handlers_mvex_0f ; "mvex");
967 mk_handlers_local!(handlers_mvex_0f38 ; "mvex");
968 mk_handlers_local!(handlers_mvex_0f3a ; "mvex");
969
970 #[rustfmt::skip]
971 #[cfg(not(feature = "__internal_flip"))]
972 let read_op_mem_fns = [
973 Decoder::read_op_mem_0,
974 Decoder::read_op_mem_0,
975 Decoder::read_op_mem_0,
976 Decoder::read_op_mem_0,
977 Decoder::read_op_mem_0_4,
978 Decoder::read_op_mem_0_5,
979 Decoder::read_op_mem_0,
980 Decoder::read_op_mem_0,
981
982 Decoder::read_op_mem_1,
983 Decoder::read_op_mem_1,
984 Decoder::read_op_mem_1,
985 Decoder::read_op_mem_1,
986 Decoder::read_op_mem_1_4,
987 Decoder::read_op_mem_1,
988 Decoder::read_op_mem_1,
989 Decoder::read_op_mem_1,
990
991 Decoder::read_op_mem_2,
992 Decoder::read_op_mem_2,
993 Decoder::read_op_mem_2,
994 Decoder::read_op_mem_2,
995 Decoder::read_op_mem_2_4,
996 Decoder::read_op_mem_2,
997 Decoder::read_op_mem_2,
998 Decoder::read_op_mem_2,
999 ];
1000 #[cfg(feature = "__internal_flip")]
1001 let read_op_mem_fns = ();
1002
1003 Ok(Decoder {
1004 ip,
1005 data_ptr: data.as_ptr() as usize,
1006 data_ptr_end,
1007 max_data_ptr: data.as_ptr() as usize,
1008 instr_start_data_ptr: data.as_ptr() as usize,
1009 handlers_map0: get_handlers(&tables.handlers_map0),
1010 handlers_vex_map0,
1011 handlers_vex: [handlers_vex_0f, handlers_vex_0f38, handlers_vex_0f3a],
1012 handlers_evex: [handlers_evex_0f, handlers_evex_0f38, handlers_evex_0f3a, handlers_evex_map4, handlers_evex_map5, handlers_evex_map6],
1013 handlers_xop: [handlers_xop_map8, handlers_xop_map9, handlers_xop_map10],
1014 handlers_mvex: [handlers_mvex_0f, handlers_mvex_0f38, handlers_mvex_0f3a],
1015 read_op_mem_fns,
1016 state: State::default(),
1017 options,
1018 invalid_check_mask: if (options & DecoderOptions::NO_INVALID_CHECK) == 0 { u32::MAX } else { 0 },
1019 is64b_mode_and_w: if is64b_mode { StateFlags::W } else { 0 },
1020 reg15_mask: if is64b_mode { 0xF } else { 0x7 },
1021 mask_e0: if is64b_mode { 0xE0 } else { 0 },
1022 rex_mask: if is64b_mode { 0xF0 } else { 0 },
1023 bitness,
1024 default_address_size,
1025 default_operand_size,
1026 segment_prio: 0,
1027 dummy: 0,
1028 default_inverted_address_size,
1029 default_inverted_operand_size,
1030 is64b_mode,
1031 default_code_size,
1032 displ_index: 0,
1033 data,
1034 })
1035 }
1036
1037 #[must_use]
1041 #[inline]
1042 pub const fn ip(&self) -> u64 {
1043 self.ip
1044 }
1045
1046 #[inline]
1056 pub fn set_ip(&mut self, new_value: u64) {
1057 self.ip = new_value;
1058 }
1059
1060 #[must_use]
1062 #[inline]
1063 pub const fn bitness(&self) -> u32 {
1064 self.bitness
1065 }
1066
1067 #[must_use]
1072 #[inline]
1073 pub const fn max_position(&self) -> usize {
1074 self.data.len()
1075 }
1076
1077 #[must_use]
1085 #[inline]
1086 pub fn position(&self) -> usize {
1087 self.data_ptr - self.data.as_ptr() as usize
1088 }
1089
1090 #[inline]
1131 #[allow(clippy::missing_inline_in_public_items)]
1132 pub fn set_position(&mut self, new_pos: usize) -> Result<(), IcedError> {
1133 if new_pos > self.data.len() {
1134 Err(IcedError::new("Invalid position"))
1135 } else {
1136 self.data_ptr = self.data.as_ptr() as usize + new_pos;
1139 Ok(())
1140 }
1141 }
1142
1143 #[doc(hidden)]
1144 #[inline]
1145 pub fn try_set_position(&mut self, new_pos: usize) -> Result<(), IcedError> {
1146 self.set_position(new_pos)
1147 }
1148
1149 #[must_use]
1187 #[inline]
1188 #[allow(clippy::missing_const_for_fn)]
1189 pub fn can_decode(&self) -> bool {
1190 self.data_ptr != self.data_ptr_end
1191 }
1192
1193 #[inline]
1226 pub fn iter<'b>(&'b mut self) -> DecoderIter<'a, 'b> {
1227 DecoderIter { decoder: self }
1228 }
1229
1230 #[must_use]
1231 #[inline(always)]
1232 fn read_u8(&mut self) -> usize {
1233 mk_read_xx_fn_body! {self, u8, u8::from_le, usize}
1234 }
1235
1236 #[must_use]
1237 #[inline(always)]
1238 fn read_u16(&mut self) -> usize {
1239 mk_read_xx_fn_body! {self, u16, u16::from_le, usize}
1240 }
1241
1242 #[must_use]
1243 #[inline(always)]
1244 fn read_u32(&mut self) -> usize {
1245 mk_read_xx_fn_body! {self, u32, u32::from_le, usize}
1246 }
1247
1248 #[must_use]
1249 #[inline(always)]
1250 fn read_u64(&mut self) -> u64 {
1251 mk_read_xx_fn_body! {self, u64, u64::from_le, u64}
1252 }
1253
1254 #[must_use]
1259 #[inline]
1260 pub const fn last_error(&self) -> DecoderError {
1261 if (self.state.flags & StateFlags::NO_MORE_BYTES) != 0 {
1263 DecoderError::NoMoreBytes
1264 } else if (self.state.flags & StateFlags::IS_INVALID) != 0 {
1265 DecoderError::InvalidInstruction
1266 } else {
1267 DecoderError::None
1268 }
1269 }
1270
1271 #[must_use]
1309 #[inline]
1310 pub fn decode(&mut self) -> Instruction {
1311 let mut instruction = mem::MaybeUninit::uninit();
1312 unsafe {
1314 self.decode_out_ptr(instruction.as_mut_ptr());
1315 instruction.assume_init()
1316 }
1317 }
1318
1319 #[inline]
1362 pub fn decode_out(&mut self, instruction: &mut Instruction) {
1363 unsafe {
1364 self.decode_out_ptr(instruction);
1365 }
1366 }
1367
1368 #[allow(clippy::useless_let_if_seq)]
1370 unsafe fn decode_out_ptr(&mut self, instruction: *mut Instruction) {
1371 unsafe { ptr::write(instruction, Instruction::default()) };
1375 let instruction = unsafe { &mut *instruction };
1377
1378 self.state.extra_register_base = 0;
1379 self.state.extra_index_register_base = 0;
1380 self.state.extra_base_register_base = 0;
1381 self.state.extra_index_register_base_vsib = 0;
1382 self.state.flags = 0;
1383 self.state.mandatory_prefix = DecoderMandatoryPrefix::default();
1384 self.state.vvvv = 0;
1387 self.state.vvvv_invalid_check = 0;
1388
1389 self.state.address_size = self.default_address_size;
1393 self.state.operand_size = self.default_operand_size;
1394 self.state.segment_prio = self.segment_prio;
1395 self.state.dummy = self.dummy;
1396
1397 let data_ptr = self.data_ptr;
1398 self.instr_start_data_ptr = data_ptr;
1399 self.max_data_ptr = cmp::min(data_ptr + IcedConstants::MAX_INSTRUCTION_LENGTH, self.data_ptr_end);
1402
1403 let b = self.read_u8();
1404 let mut handler = self.handlers_map0[b];
1405 if ((b as u32) & self.rex_mask) == 0x40 {
1406 debug_assert!(self.is64b_mode);
1407 handler = self.handlers_map0[self.read_u8()];
1408 let mut flags = self.state.flags | StateFlags::HAS_REX;
1409 if (b & 8) != 0 {
1410 flags |= StateFlags::W;
1411 self.state.operand_size = OpSize::Size64;
1412 }
1413 self.state.flags = flags;
1414 self.state.extra_register_base = (b as u32 & 4) << 1;
1415 self.state.extra_index_register_base = (b as u32 & 2) << 2;
1416 self.state.extra_base_register_base = (b as u32 & 1) << 3;
1417 }
1418 self.decode_table2(handler, instruction);
1419
1420 debug_assert_eq!(data_ptr, self.instr_start_data_ptr);
1421 let instr_len = self.data_ptr as u32 - data_ptr as u32;
1422 debug_assert!(instr_len <= IcedConstants::MAX_INSTRUCTION_LENGTH as u32); instruction_internal::internal_set_len(instruction, instr_len);
1424 let orig_ip = self.ip;
1425 let ip = orig_ip.wrapping_add(instr_len as u64);
1426 self.ip = ip;
1427 instruction.set_next_ip(ip);
1428 instruction_internal::internal_set_code_size(instruction, self.default_code_size);
1429
1430 let mut flags = self.state.flags;
1431 if (flags & (StateFlags::IS_INVALID | StateFlags::LOCK | StateFlags::IP_REL64 | StateFlags::IP_REL32)) != 0 {
1432 let addr = ip.wrapping_add(instruction.memory_displacement64());
1433 instruction.set_memory_displacement64(addr);
1435 if (flags & (StateFlags::IP_REL64 | StateFlags::IS_INVALID | StateFlags::LOCK)) == StateFlags::IP_REL64 {
1437 return;
1438 }
1439 if (flags & StateFlags::IP_REL64) == 0 {
1440 instruction.set_memory_displacement64(addr.wrapping_sub(ip));
1442 }
1443 if (flags & StateFlags::IP_REL32) != 0 {
1444 let addr = ip.wrapping_add(instruction.memory_displacement64());
1445 instruction.set_memory_displacement64(addr as u32 as u64);
1446 }
1447
1448 if (flags & StateFlags::IS_INVALID) != 0
1449 || (((flags & (StateFlags::LOCK | StateFlags::ALLOW_LOCK)) & self.invalid_check_mask) == StateFlags::LOCK)
1450 {
1451 *instruction = Instruction::default();
1452 const _: () = assert!(Code::INVALID as u32 == 0);
1453 if (flags & StateFlags::NO_MORE_BYTES) != 0 {
1456 debug_assert_eq!(data_ptr, self.instr_start_data_ptr);
1457 let max_len = self.data_ptr_end - data_ptr;
1458 if max_len >= IcedConstants::MAX_INSTRUCTION_LENGTH {
1460 flags &= !StateFlags::NO_MORE_BYTES;
1461 }
1462 self.data_ptr = self.max_data_ptr;
1464 }
1465
1466 self.state.flags = flags | StateFlags::IS_INVALID;
1467
1468 let instr_len = self.data_ptr as u32 - data_ptr as u32;
1469 instruction_internal::internal_set_len(instruction, instr_len);
1470 let ip = orig_ip.wrapping_add(instr_len as u64);
1471 self.ip = ip;
1472 instruction.set_next_ip(ip);
1473 instruction_internal::internal_set_code_size(instruction, self.default_code_size);
1474 }
1475 }
1476 }
1477
1478 #[inline(always)]
1479 fn reset_rex_prefix_state(&mut self) {
1480 self.state.flags &= !(StateFlags::HAS_REX | StateFlags::W);
1481 if (self.state.flags & StateFlags::HAS66) == 0 {
1482 self.state.operand_size = self.default_operand_size;
1483 } else {
1484 self.state.operand_size = self.default_inverted_operand_size;
1485 }
1486 self.state.extra_register_base = 0;
1487 self.state.extra_index_register_base = 0;
1488 self.state.extra_base_register_base = 0;
1489 }
1490
1491 #[inline(always)]
1492 fn call_opcode_handlers_map0_table(&mut self, instruction: &mut Instruction) {
1493 let b = self.read_u8();
1494 self.decode_table2(self.handlers_map0[b], instruction);
1495 }
1496
1497 #[must_use]
1498 #[inline]
1499 fn current_ip32(&self) -> u32 {
1500 debug_assert!(self.instr_start_data_ptr <= self.data_ptr);
1501 debug_assert!(self.data_ptr - self.instr_start_data_ptr <= IcedConstants::MAX_INSTRUCTION_LENGTH);
1502 ((self.data_ptr - self.instr_start_data_ptr) as u32).wrapping_add(self.ip as u32)
1503 }
1504
1505 #[must_use]
1506 #[inline]
1507 fn current_ip64(&self) -> u64 {
1508 debug_assert!(self.instr_start_data_ptr <= self.data_ptr);
1509 debug_assert!(self.data_ptr - self.instr_start_data_ptr <= IcedConstants::MAX_INSTRUCTION_LENGTH);
1510 ((self.data_ptr - self.instr_start_data_ptr) as u64).wrapping_add(self.ip)
1511 }
1512
1513 #[inline]
1514 fn clear_mandatory_prefix(&mut self, instruction: &mut Instruction) {
1515 debug_assert_eq!(self.state.encoding(), EncodingKind::Legacy as u32);
1516 instruction_internal::internal_clear_has_repe_repne_prefix(instruction);
1517 }
1518
1519 #[inline(always)]
1520 fn set_xacquire_xrelease(&mut self, instruction: &mut Instruction, flags: u32) {
1521 if instruction.has_lock_prefix() {
1522 self.set_xacquire_xrelease_core(instruction, flags);
1523 }
1524 }
1525
1526 #[allow(clippy::nonminimal_bool)]
1527 fn set_xacquire_xrelease_core(&mut self, instruction: &mut Instruction, flags: u32) {
1528 debug_assert!(!((flags & HandlerFlags::XACQUIRE_XRELEASE_NO_LOCK) == 0 && !instruction.has_lock_prefix()));
1529 match self.state.mandatory_prefix {
1530 DecoderMandatoryPrefix::PF2 => {
1531 self.clear_mandatory_prefix_f2(instruction);
1532 instruction.set_has_xacquire_prefix(true);
1533 }
1534 DecoderMandatoryPrefix::PF3 => {
1535 self.clear_mandatory_prefix_f3(instruction);
1536 instruction.set_has_xrelease_prefix(true);
1537 }
1538 _ => {}
1539 }
1540 }
1541
1542 #[inline]
1543 fn clear_mandatory_prefix_f3(&self, instruction: &mut Instruction) {
1544 debug_assert_eq!(self.state.encoding(), EncodingKind::Legacy as u32);
1545 debug_assert_eq!(self.state.mandatory_prefix, DecoderMandatoryPrefix::PF3);
1546 instruction.set_has_repe_prefix(false);
1547 }
1548
1549 #[inline]
1550 fn clear_mandatory_prefix_f2(&self, instruction: &mut Instruction) {
1551 debug_assert_eq!(self.state.encoding(), EncodingKind::Legacy as u32);
1552 debug_assert_eq!(self.state.mandatory_prefix, DecoderMandatoryPrefix::PF2);
1553 instruction.set_has_repne_prefix(false);
1554 }
1555
1556 #[inline]
1557 fn set_invalid_instruction(&mut self) {
1558 self.state.flags |= StateFlags::IS_INVALID;
1559 }
1560
1561 #[inline(always)]
1562 fn decode_table2(&mut self, (decode, handler): (OpCodeHandlerDecodeFn, &OpCodeHandler), instruction: &mut Instruction) {
1563 if handler.has_modrm {
1564 let m = self.read_u8() as u32;
1565 self.state.modrm = m;
1566 self.state.reg = (m >> 3) & 7;
1567 self.state.mod_ = m >> 6;
1568 self.state.rm = m & 7;
1569 self.state.mem_index = (self.state.mod_ << 3) | self.state.rm;
1570 }
1571 (decode)(handler, self, instruction);
1572 }
1573
1574 #[inline(always)]
1575 fn read_modrm(&mut self) {
1576 let m = self.read_u8() as u32;
1577 self.state.modrm = m;
1578 self.state.reg = (m >> 3) & 7;
1579 self.state.mod_ = m >> 6;
1580 self.state.rm = m & 7;
1581 self.state.mem_index = (self.state.mod_ << 3) | self.state.rm;
1582 }
1583
1584 #[cfg(feature = "no_vex")]
1585 fn vex2(&mut self, _instruction: &mut Instruction) {
1586 self.set_invalid_instruction();
1587 }
1588
1589 #[cfg(not(feature = "no_vex"))]
1590 fn vex2(&mut self, instruction: &mut Instruction) {
1591 const _: () = assert!(DecoderMandatoryPrefix::PNP as u32 == 0);
1592 if (((self.state.flags & StateFlags::HAS_REX) | (self.state.mandatory_prefix as u32)) & self.invalid_check_mask) != 0 {
1593 self.set_invalid_instruction();
1594 }
1595 self.state.flags &= !StateFlags::W;
1597 self.state.extra_index_register_base = 0;
1598 self.state.extra_base_register_base = 0;
1599
1600 if cfg!(debug_assertions) {
1601 self.state.flags |= (EncodingKind::VEX as u32) << StateFlags::ENCODING_SHIFT;
1602 }
1603
1604 let b = self.read_u8();
1605 let (decode, handler) = self.handlers_vex[0][b];
1606
1607 let mut b = self.state.modrm;
1608
1609 const _: () = assert!(VectorLength::L128 as u32 == 0);
1610 const _: () = assert!(VectorLength::L256 as u32 == 1);
1611 self.state.vector_length = unsafe { mem::transmute((b >> 2) & 1) };
1613
1614 const _: () = assert!(DecoderMandatoryPrefix::PNP as u32 == 0);
1615 const _: () = assert!(DecoderMandatoryPrefix::P66 as u32 == 1);
1616 const _: () = assert!(DecoderMandatoryPrefix::PF3 as u32 == 2);
1617 const _: () = assert!(DecoderMandatoryPrefix::PF2 as u32 == 3);
1618 self.state.mandatory_prefix = unsafe { mem::transmute(b & 3) };
1620
1621 b = !b;
1622 self.state.extra_register_base = (b >> 4) & 8;
1623
1624 b = (b >> 3) & 0x0F;
1626 self.state.vvvv = b;
1627 self.state.vvvv_invalid_check = b;
1628
1629 self.decode_table2((decode, handler), instruction);
1630 }
1631
1632 #[cfg(feature = "no_vex")]
1633 fn vex3(&mut self, _instruction: &mut Instruction) {
1634 self.set_invalid_instruction();
1635 }
1636
1637 #[cfg(not(feature = "no_vex"))]
1638 fn vex3(&mut self, instruction: &mut Instruction) {
1639 const _: () = assert!(DecoderMandatoryPrefix::PNP as u32 == 0);
1640 if (((self.state.flags & StateFlags::HAS_REX) | (self.state.mandatory_prefix as u32)) & self.invalid_check_mask) != 0 {
1641 self.set_invalid_instruction();
1642 }
1643 self.state.flags &= !StateFlags::W;
1645
1646 if cfg!(debug_assertions) {
1647 self.state.flags |= (EncodingKind::VEX as u32) << StateFlags::ENCODING_SHIFT;
1648 }
1649
1650 let b2 = self.read_u16() as u32;
1651
1652 const _: () = assert!(StateFlags::W == 0x80);
1653 self.state.flags |= b2 & 0x80;
1654
1655 const _: () = assert!(VectorLength::L128 as u32 == 0);
1656 const _: () = assert!(VectorLength::L256 as u32 == 1);
1657 self.state.vector_length = unsafe { mem::transmute((b2 >> 2) & 1) };
1659
1660 const _: () = assert!(DecoderMandatoryPrefix::PNP as u32 == 0);
1661 const _: () = assert!(DecoderMandatoryPrefix::P66 as u32 == 1);
1662 const _: () = assert!(DecoderMandatoryPrefix::PF3 as u32 == 2);
1663 const _: () = assert!(DecoderMandatoryPrefix::PF2 as u32 == 3);
1664 self.state.mandatory_prefix = unsafe { mem::transmute(b2 & 3) };
1666
1667 let b = (!b2 >> 3) & 0x0F;
1668 self.state.vvvv_invalid_check = b;
1669 self.state.vvvv = b & self.reg15_mask;
1670 let b1 = self.state.modrm;
1671 let b1x = !b1 & self.mask_e0;
1672 self.state.extra_register_base = (b1x >> 4) & 8;
1673 self.state.extra_index_register_base = (b1x >> 3) & 8;
1674 self.state.extra_base_register_base = (b1x >> 2) & 8;
1675
1676 if let Some(&table) = self.handlers_vex.get(((b1 & 0x1F) as usize).wrapping_sub(1)) {
1677 self.decode_table2(table[(b2 >> 8) as usize], instruction);
1678 } else {
1679 #[cfg(feature = "mvex")]
1680 if (b1 & 0x1F) == 0 {
1681 self.decode_table2(self.handlers_vex_map0[(b2 >> 8) as usize], instruction);
1682 return;
1683 }
1684 self.set_invalid_instruction();
1685 }
1686 }
1687
1688 #[cfg(feature = "no_xop")]
1689 fn xop(&mut self, _instruction: &mut Instruction) {
1690 self.set_invalid_instruction();
1691 }
1692
1693 #[cfg(not(feature = "no_xop"))]
1694 fn xop(&mut self, instruction: &mut Instruction) {
1695 const _: () = assert!(DecoderMandatoryPrefix::PNP as u32 == 0);
1696 if (((self.state.flags & StateFlags::HAS_REX) | (self.state.mandatory_prefix as u32)) & self.invalid_check_mask) != 0 {
1697 self.set_invalid_instruction();
1698 }
1699 self.state.flags &= !StateFlags::W;
1701
1702 if cfg!(debug_assertions) {
1703 self.state.flags |= (EncodingKind::XOP as u32) << StateFlags::ENCODING_SHIFT;
1704 }
1705
1706 let b2 = self.read_u16() as u32;
1707
1708 const _: () = assert!(StateFlags::W == 0x80);
1709 self.state.flags |= b2 & 0x80;
1710
1711 const _: () = assert!(VectorLength::L128 as u32 == 0);
1712 const _: () = assert!(VectorLength::L256 as u32 == 1);
1713 self.state.vector_length = unsafe { mem::transmute((b2 >> 2) & 1) };
1715
1716 const _: () = assert!(DecoderMandatoryPrefix::PNP as u32 == 0);
1717 const _: () = assert!(DecoderMandatoryPrefix::P66 as u32 == 1);
1718 const _: () = assert!(DecoderMandatoryPrefix::PF3 as u32 == 2);
1719 const _: () = assert!(DecoderMandatoryPrefix::PF2 as u32 == 3);
1720 self.state.mandatory_prefix = unsafe { mem::transmute(b2 & 3) };
1722
1723 let b = (!b2 >> 3) & 0x0F;
1724 self.state.vvvv_invalid_check = b;
1725 self.state.vvvv = b & self.reg15_mask;
1726 let b1 = self.state.modrm;
1727 let b1x = !b1 & self.mask_e0;
1728 self.state.extra_register_base = (b1x >> 4) & 8;
1729 self.state.extra_index_register_base = (b1x >> 3) & 8;
1730 self.state.extra_base_register_base = (b1x >> 2) & 8;
1731
1732 if let Some(&table) = self.handlers_xop.get(((b1 & 0x1F) as usize).wrapping_sub(8)) {
1733 self.decode_table2(table[(b2 >> 8) as usize], instruction);
1734 } else {
1735 self.set_invalid_instruction();
1736 }
1737 }
1738
1739 #[cfg(not(any(not(feature = "no_evex"), feature = "mvex")))]
1740 fn evex_mvex(&mut self, _instruction: &mut Instruction) {
1741 self.set_invalid_instruction();
1742 }
1743
1744 #[cfg(any(not(feature = "no_evex"), feature = "mvex"))]
1745 fn evex_mvex(&mut self, instruction: &mut Instruction) {
1746 const _: () = assert!(DecoderMandatoryPrefix::PNP as u32 == 0);
1747 if (((self.state.flags & StateFlags::HAS_REX) | (self.state.mandatory_prefix as u32)) & self.invalid_check_mask) != 0 {
1748 self.set_invalid_instruction();
1749 }
1750 self.state.flags &= !StateFlags::W;
1752
1753 let d = self.read_u32() as u32;
1754 if (d & 4) != 0 {
1755 #[cfg(feature = "no_evex")]
1756 self.set_invalid_instruction();
1757 #[cfg(not(feature = "no_evex"))]
1758 {
1759 let p0 = self.state.modrm;
1760 if (p0 & 8) == 0 {
1761 if cfg!(debug_assertions) {
1762 self.state.flags |= (EncodingKind::EVEX as u32) << StateFlags::ENCODING_SHIFT;
1763 }
1764
1765 const _: () = assert!(DecoderMandatoryPrefix::PNP as u32 == 0);
1766 const _: () = assert!(DecoderMandatoryPrefix::P66 as u32 == 1);
1767 const _: () = assert!(DecoderMandatoryPrefix::PF3 as u32 == 2);
1768 const _: () = assert!(DecoderMandatoryPrefix::PF2 as u32 == 3);
1769 self.state.mandatory_prefix = unsafe { mem::transmute(d & 3) };
1771
1772 const _: () = assert!(StateFlags::W == 0x80);
1773 self.state.flags |= d & 0x80;
1774
1775 let p2 = d >> 8;
1776 let aaa = p2 & 7;
1777 self.state.aaa = aaa;
1778 instruction_internal::internal_set_op_mask(instruction, aaa);
1779 if (p2 & 0x80) != 0 {
1780 if (aaa ^ self.invalid_check_mask) == u32::MAX {
1782 self.set_invalid_instruction();
1783 }
1784 self.state.flags |= StateFlags::Z;
1785 instruction.set_zeroing_masking(true);
1786 }
1787
1788 const _: () = assert!(StateFlags::B == 0x10);
1789 self.state.flags |= p2 & 0x10;
1790
1791 const _: () = assert!(VectorLength::L128 as u32 == 0);
1792 const _: () = assert!(VectorLength::L256 as u32 == 1);
1793 const _: () = assert!(VectorLength::L512 as u32 == 2);
1794 const _: () = assert!(VectorLength::Unknown as u32 == 3);
1795 self.state.vector_length = unsafe { mem::transmute((p2 >> 5) & 3) };
1797
1798 let p1 = (!d >> 3) & 0x0F;
1799 if self.is64b_mode {
1800 let mut tmp = (!p2 & 8) << 1;
1801 self.state.extra_index_register_base_vsib = tmp;
1802 tmp += p1;
1803 self.state.vvvv = tmp;
1804 self.state.vvvv_invalid_check = tmp;
1805 let mut p0x = !p0;
1806 self.state.extra_register_base = (p0x >> 4) & 8;
1807 self.state.extra_index_register_base = (p0x >> 3) & 8;
1808 self.state.extra_register_base_evex = p0x & 0x10;
1809 p0x >>= 2;
1810 self.state.extra_base_register_base_evex = p0x & 0x18;
1811 self.state.extra_base_register_base = p0x & 8;
1812 } else {
1813 self.state.vvvv_invalid_check = p1;
1814 self.state.vvvv = p1 & 0x07;
1815 const _: () = assert!(StateFlags::IS_INVALID == 0x40);
1816 self.state.flags |= (!p2 & 8) << 3;
1817 }
1818
1819 if let Some(&table) = self.handlers_evex.get(((p0 & 7) as usize).wrapping_sub(1)) {
1820 let (decode, handler) = table[(d >> 16) as u8 as usize];
1821 debug_assert!(handler.has_modrm);
1822 let m = d >> 24;
1823 self.state.modrm = m;
1824 self.state.reg = (m >> 3) & 7;
1825 self.state.mod_ = m >> 6;
1826 self.state.rm = m & 7;
1827 self.state.mem_index = (self.state.mod_ << 3) | self.state.rm;
1828 const _: () = assert!(StateFlags::B > 3);
1830 debug_assert!(self.state.vector_length as u32 <= 3);
1831 if (((self.state.flags & StateFlags::B) | (self.state.vector_length as u32)) & self.invalid_check_mask) == 3 {
1832 self.set_invalid_instruction();
1833 }
1834 (decode)(handler, self, instruction);
1835 } else {
1836 self.set_invalid_instruction();
1837 }
1838 } else {
1839 self.set_invalid_instruction();
1840 }
1841 }
1842 } else {
1843 #[cfg(not(feature = "mvex"))]
1844 self.set_invalid_instruction();
1845 #[cfg(feature = "mvex")]
1846 {
1847 if (self.options & DecoderOptions::KNC) == 0 || !self.is64b_mode {
1848 self.set_invalid_instruction();
1849 } else {
1850 let p0 = self.state.modrm;
1851 if cfg!(debug_assertions) {
1852 self.state.flags |= (EncodingKind::MVEX as u32) << StateFlags::ENCODING_SHIFT;
1853 }
1854
1855 const _: () = assert!(DecoderMandatoryPrefix::PNP as u32 == 0);
1856 const _: () = assert!(DecoderMandatoryPrefix::P66 as u32 == 1);
1857 const _: () = assert!(DecoderMandatoryPrefix::PF3 as u32 == 2);
1858 const _: () = assert!(DecoderMandatoryPrefix::PF2 as u32 == 3);
1859 self.state.mandatory_prefix = unsafe { mem::transmute(d & 3) };
1861
1862 const _: () = assert!(StateFlags::W == 0x80);
1863 self.state.flags |= d & 0x80;
1864
1865 let p2 = d >> 8;
1866 let aaa = p2 & 7;
1867 self.state.aaa = aaa;
1868 instruction_internal::internal_set_op_mask(instruction, aaa);
1869
1870 const _: () = assert!(StateFlags::MVEX_SSS_SHIFT == 16);
1871 const _: () = assert!(StateFlags::MVEX_SSS_MASK == 7);
1872 const _: () = assert!(StateFlags::MVEX_EH == 1 << (StateFlags::MVEX_SSS_SHIFT + 3));
1873 self.state.flags |= (p2 & 0xF0) << (StateFlags::MVEX_SSS_SHIFT - 4);
1874
1875 let p1 = (!d >> 3) & 0x0F;
1876 let mut tmp = (!p2 & 8) << 1;
1877 self.state.extra_index_register_base_vsib = tmp;
1878 tmp += p1;
1879 self.state.vvvv = tmp;
1880 self.state.vvvv_invalid_check = tmp;
1881 let mut p0x = !p0;
1882 self.state.extra_register_base = (p0x >> 4) & 8;
1883 self.state.extra_index_register_base = (p0x >> 3) & 8;
1884 self.state.extra_register_base_evex = p0x & 0x10;
1885 p0x >>= 2;
1886 self.state.extra_base_register_base_evex = p0x & 0x18;
1887 self.state.extra_base_register_base = p0x & 8;
1888
1889 if let Some(&table) = self.handlers_mvex.get(((p0 & 0xF) as usize).wrapping_sub(1)) {
1890 let (decode, handler) = table[(d >> 16) as u8 as usize];
1891 debug_assert!(handler.has_modrm);
1892 let m = d >> 24;
1893 self.state.modrm = m;
1894 self.state.reg = (m >> 3) & 7;
1895 self.state.mod_ = m >> 6;
1896 self.state.rm = m & 7;
1897 self.state.mem_index = (self.state.mod_ << 3) | self.state.rm;
1898 (decode)(handler, self, instruction);
1899 } else {
1900 self.set_invalid_instruction();
1901 }
1902 }
1903 }
1904 }
1905 }
1906
1907 #[must_use]
1908 #[inline(always)]
1909 fn read_op_seg_reg(&mut self) -> u32 {
1910 let reg = self.state.reg;
1911 const _: () = assert!(Register::ES as u32 + 1 == Register::CS as u32);
1912 const _: () = assert!(Register::ES as u32 + 2 == Register::SS as u32);
1913 const _: () = assert!(Register::ES as u32 + 3 == Register::DS as u32);
1914 const _: () = assert!(Register::ES as u32 + 4 == Register::FS as u32);
1915 const _: () = assert!(Register::ES as u32 + 5 == Register::GS as u32);
1916 if reg < 6 {
1917 Register::ES as u32 + reg
1918 } else {
1919 self.set_invalid_instruction();
1920 Register::None as u32
1921 }
1922 }
1923
1924 #[inline(always)]
1925 #[cfg(any(not(feature = "no_vex"), not(feature = "no_xop")))]
1926 fn read_op_mem_sib(&mut self, instruction: &mut Instruction) {
1927 debug_assert!(self.state.encoding() != EncodingKind::EVEX as u32 && self.state.encoding() != EncodingKind::MVEX as u32);
1928 let is_valid = if self.state.address_size != OpSize::Size16 {
1929 self.read_op_mem_32_or_64(instruction)
1930 } else {
1931 self.read_op_mem_16(instruction, TupleType::N1);
1932 false
1933 };
1934 if self.invalid_check_mask != 0 && !is_valid {
1935 self.set_invalid_instruction();
1936 }
1937 }
1938
1939 #[inline(always)]
1943 fn read_op_mem_mpx(&mut self, instruction: &mut Instruction) {
1944 debug_assert!(self.state.encoding() != EncodingKind::EVEX as u32 && self.state.encoding() != EncodingKind::MVEX as u32);
1945 if self.is64b_mode {
1946 self.state.address_size = OpSize::Size64;
1947 let _ = self.read_op_mem_32_or_64(instruction);
1948 } else if self.state.address_size != OpSize::Size16 {
1949 let _ = self.read_op_mem_32_or_64(instruction);
1950 } else {
1951 self.read_op_mem_16(instruction, TupleType::N1);
1952 if self.invalid_check_mask != 0 {
1953 self.set_invalid_instruction();
1954 }
1955 }
1956 }
1957
1958 #[inline(always)]
1959 #[cfg(any(not(feature = "no_evex"), feature = "mvex"))]
1960 fn read_op_mem_tuple_type(&mut self, instruction: &mut Instruction, tuple_type: TupleType) {
1961 debug_assert!(self.state.encoding() == EncodingKind::EVEX as u32 || self.state.encoding() == EncodingKind::MVEX as u32);
1962 if self.state.address_size != OpSize::Size16 {
1963 let index_reg = if self.state.address_size == OpSize::Size64 { Register::RAX } else { Register::EAX };
1964 let _ = decoder_read_op_mem_32_or_64_vsib(self, instruction, index_reg, tuple_type, false);
1965 } else {
1966 self.read_op_mem_16(instruction, tuple_type);
1967 }
1968 }
1969
1970 #[inline(always)]
1971 #[cfg(any(not(feature = "no_evex"), not(feature = "no_vex"), not(feature = "no_xop"), feature = "mvex"))]
1972 fn read_op_mem_vsib(&mut self, instruction: &mut Instruction, vsib_index: Register, tuple_type: TupleType) {
1973 let is_valid = if self.state.address_size != OpSize::Size16 {
1974 decoder_read_op_mem_32_or_64_vsib(self, instruction, vsib_index, tuple_type, true)
1975 } else {
1976 self.read_op_mem_16(instruction, tuple_type);
1977 false
1978 };
1979 if self.invalid_check_mask != 0 && !is_valid {
1980 self.set_invalid_instruction();
1981 }
1982 }
1983
1984 #[inline(never)]
1987 #[cold]
1988 fn read_op_mem_16(&mut self, instruction: &mut Instruction, tuple_type: TupleType) {
1989 debug_assert!(self.state.address_size == OpSize::Size16);
1990 debug_assert!(self.state.rm <= 7);
1991 let (mut base_reg, index_reg) = unsafe { *MEM_REGS_16.get_unchecked(self.state.rm as usize) };
1993 match self.state.mod_ {
1994 0 => {
1995 if self.state.rm == 6 {
1996 instruction_internal::internal_set_memory_displ_size(instruction, 2);
1997 self.displ_index = self.data_ptr as u8;
1998 instruction.set_memory_displacement64(self.read_u16() as u64);
1999 base_reg = Register::None;
2000 debug_assert_eq!(index_reg, Register::None);
2001 }
2002 }
2003 1 => {
2004 instruction_internal::internal_set_memory_displ_size(instruction, 1);
2005 self.displ_index = self.data_ptr as u8;
2006 let b = self.read_u8();
2007 instruction.set_memory_displacement64(self.disp8n(tuple_type).wrapping_mul(b as i8 as u32) as u16 as u64);
2008 }
2009 _ => {
2010 debug_assert_eq!(self.state.mod_, 2);
2011 instruction_internal::internal_set_memory_displ_size(instruction, 2);
2012 self.displ_index = self.data_ptr as u8;
2013 instruction.set_memory_displacement64(self.read_u16() as u64);
2014 }
2015 }
2016 instruction.set_memory_base(base_reg);
2017 instruction.set_memory_index(index_reg);
2018 }
2019
2020 #[must_use]
2021 #[cfg(feature = "__internal_flip")]
2022 fn read_op_mem_32_or_64(&mut self, instruction: &mut Instruction) -> bool {
2023 let base_reg = if self.state.address_size == OpSize::Size64 { Register::RAX } else { Register::EAX };
2024 decoder_read_op_mem_32_or_64_vsib(self, instruction, base_reg, TupleType::N1, false)
2025 }
2026
2027 #[must_use]
2030 #[cfg(not(feature = "__internal_flip"))]
2031 #[inline(always)]
2032 fn read_op_mem_32_or_64(&mut self, instruction: &mut Instruction) -> bool {
2033 read_op_mem_stmt_ret!(self, instruction, {})
2034 }
2035
2036 #[cfg(not(feature = "__internal_flip"))]
2037 #[allow(clippy::never_loop)]
2038 fn read_op_mem_1(instruction: &mut Instruction, this: &mut Decoder<'_>) -> bool {
2039 loop {
2040 instruction_internal::internal_set_memory_displ_size(instruction, 1);
2041 this.displ_index = this.data_ptr as u8;
2042 let displ = read_u8_break!(this) as i8 as u64;
2043 if this.state.address_size == OpSize::Size64 {
2044 instruction.set_memory_displacement64(displ);
2045 write_base_reg!(instruction, this.state.extra_base_register_base + this.state.rm + Register::RAX as u32);
2046 } else {
2047 instruction.set_memory_displacement64(displ as u32 as u64);
2048 write_base_reg!(instruction, this.state.extra_base_register_base + this.state.rm + Register::EAX as u32);
2049 }
2050
2051 return false;
2052 }
2053 this.state.flags |= StateFlags::IS_INVALID | StateFlags::NO_MORE_BYTES;
2054 false
2055 }
2056
2057 #[cfg(not(feature = "__internal_flip"))]
2058 #[allow(clippy::never_loop)]
2059 fn read_op_mem_1_4(instruction: &mut Instruction, this: &mut Decoder<'_>) -> bool {
2060 loop {
2061 instruction_internal::internal_set_memory_displ_size(instruction, 1);
2062
2063 this.displ_index = this.data_ptr.wrapping_add(1) as u8;
2064 let w = read_u16_break!(this) as u32;
2065
2066 const _: () = assert!(InstrScale::Scale1 as u32 == 0);
2067 const _: () = assert!(InstrScale::Scale2 as u32 == 1);
2068 const _: () = assert!(InstrScale::Scale4 as u32 == 2);
2069 const _: () = assert!(InstrScale::Scale8 as u32 == 3);
2070 instruction_internal::internal_set_memory_index_scale(instruction, unsafe { mem::transmute(((w >> 6) & 3) as InstrScaleUnderlyingType) });
2072 let index = ((w >> 3) & 7) + this.state.extra_index_register_base;
2073 if this.state.address_size == OpSize::Size64 {
2074 const BASE_REG: Register = Register::RAX;
2075 if index != 4 {
2076 write_index_reg!(instruction, index + BASE_REG as u32);
2077 }
2078
2079 write_base_reg!(instruction, (w & 7) + this.state.extra_base_register_base + BASE_REG as u32);
2080 let displ = (w >> 8) as i8 as u64;
2081 instruction.set_memory_displacement64(displ);
2082 } else {
2083 const BASE_REG: Register = Register::EAX;
2084 if index != 4 {
2085 write_index_reg!(instruction, index + BASE_REG as u32);
2086 }
2087
2088 write_base_reg!(instruction, (w & 7) + this.state.extra_base_register_base + BASE_REG as u32);
2089 let displ = (w >> 8) as i8 as u32 as u64;
2090 instruction.set_memory_displacement64(displ);
2091 }
2092
2093 return true;
2094 }
2095 this.state.flags |= StateFlags::IS_INVALID | StateFlags::NO_MORE_BYTES;
2096 true
2097 }
2098
2099 #[cfg(not(feature = "__internal_flip"))]
2100 fn read_op_mem_0(instruction: &mut Instruction, this: &mut Decoder<'_>) -> bool {
2101 if this.state.address_size == OpSize::Size64 {
2102 write_base_reg!(instruction, this.state.extra_base_register_base + this.state.rm + Register::RAX as u32);
2103 } else {
2104 write_base_reg!(instruction, this.state.extra_base_register_base + this.state.rm + Register::EAX as u32);
2105 };
2106
2107 false
2108 }
2109
2110 #[cfg(not(feature = "__internal_flip"))]
2111 #[allow(clippy::never_loop)]
2112 fn read_op_mem_0_5(instruction: &mut Instruction, this: &mut Decoder<'_>) -> bool {
2113 loop {
2114 this.displ_index = this.data_ptr as u8;
2115 let displ = read_u32_break!(this) as i32 as u64;
2116 if this.state.address_size == OpSize::Size64 {
2117 debug_assert!(this.is64b_mode);
2118 this.state.flags |= StateFlags::IP_REL64;
2119 instruction.set_memory_displacement64(displ);
2120 instruction_internal::internal_set_memory_displ_size(instruction, 4);
2121 instruction.set_memory_base(Register::RIP);
2122 } else if this.is64b_mode {
2123 this.state.flags |= StateFlags::IP_REL32;
2124 instruction.set_memory_displacement64(displ as u32 as u64);
2125 instruction_internal::internal_set_memory_displ_size(instruction, 3);
2126 instruction.set_memory_base(Register::EIP);
2127 } else {
2128 instruction.set_memory_displacement64(displ as u32 as u64);
2129 instruction_internal::internal_set_memory_displ_size(instruction, 3);
2130 }
2131
2132 return false;
2133 }
2134 this.state.flags |= StateFlags::IS_INVALID | StateFlags::NO_MORE_BYTES;
2135 false
2136 }
2137
2138 #[cfg(not(feature = "__internal_flip"))]
2139 #[allow(clippy::never_loop)]
2140 fn read_op_mem_2_4(instruction: &mut Instruction, this: &mut Decoder<'_>) -> bool {
2141 loop {
2142 let sib = read_u8_break!(this) as u32;
2143 this.displ_index = this.data_ptr as u8;
2144 let displ = read_u32_break!(this) as i32 as u64;
2145
2146 const _: () = assert!(InstrScale::Scale1 as u32 == 0);
2147 const _: () = assert!(InstrScale::Scale2 as u32 == 1);
2148 const _: () = assert!(InstrScale::Scale4 as u32 == 2);
2149 const _: () = assert!(InstrScale::Scale8 as u32 == 3);
2150 instruction_internal::internal_set_memory_index_scale(instruction, unsafe { mem::transmute((sib >> 6) as InstrScaleUnderlyingType) });
2152 let index = ((sib >> 3) & 7) + this.state.extra_index_register_base;
2153 if this.state.address_size == OpSize::Size64 {
2154 const BASE_REG: Register = Register::RAX;
2155 if index != 4 {
2156 write_index_reg!(instruction, index + BASE_REG as u32);
2157 }
2158
2159 write_base_reg!(instruction, (sib & 7) + this.state.extra_base_register_base + BASE_REG as u32);
2160 instruction_internal::internal_set_memory_displ_size(instruction, 4);
2161 instruction.set_memory_displacement64(displ);
2162 } else {
2163 const BASE_REG: Register = Register::EAX;
2164 if index != 4 {
2165 write_index_reg!(instruction, index + BASE_REG as u32);
2166 }
2167
2168 write_base_reg!(instruction, (sib & 7) + this.state.extra_base_register_base + BASE_REG as u32);
2169 instruction_internal::internal_set_memory_displ_size(instruction, 3);
2170 instruction.set_memory_displacement64(displ as u32 as u64);
2171 }
2172
2173 return true;
2174 }
2175 this.state.flags |= StateFlags::IS_INVALID | StateFlags::NO_MORE_BYTES;
2176 true
2177 }
2178
2179 #[cfg(not(feature = "__internal_flip"))]
2180 #[allow(clippy::never_loop)]
2181 fn read_op_mem_2(instruction: &mut Instruction, this: &mut Decoder<'_>) -> bool {
2182 loop {
2183 this.displ_index = this.data_ptr as u8;
2184 let displ = read_u32_break!(this) as i32 as u64;
2185 if this.state.address_size == OpSize::Size64 {
2186 instruction.set_memory_displacement64(displ);
2187 instruction_internal::internal_set_memory_displ_size(instruction, 4);
2188 write_base_reg!(instruction, this.state.extra_base_register_base + this.state.rm + Register::RAX as u32);
2189 } else {
2190 instruction.set_memory_displacement64(displ as u32 as u64);
2191 instruction_internal::internal_set_memory_displ_size(instruction, 3);
2192 write_base_reg!(instruction, this.state.extra_base_register_base + this.state.rm + Register::EAX as u32);
2193 }
2194
2195 return false;
2196 }
2197 this.state.flags |= StateFlags::IS_INVALID | StateFlags::NO_MORE_BYTES;
2198 false
2199 }
2200
2201 #[cfg(not(feature = "__internal_flip"))]
2202 #[allow(clippy::never_loop)]
2203 fn read_op_mem_0_4(instruction: &mut Instruction, this: &mut Decoder<'_>) -> bool {
2204 loop {
2205 let sib = read_u8_break!(this) as u32;
2206 const _: () = assert!(InstrScale::Scale1 as u32 == 0);
2207 const _: () = assert!(InstrScale::Scale2 as u32 == 1);
2208 const _: () = assert!(InstrScale::Scale4 as u32 == 2);
2209 const _: () = assert!(InstrScale::Scale8 as u32 == 3);
2210 instruction_internal::internal_set_memory_index_scale(instruction, unsafe { mem::transmute((sib >> 6) as InstrScaleUnderlyingType) });
2212 let index = ((sib >> 3) & 7) + this.state.extra_index_register_base;
2213 let base_reg = if this.state.address_size == OpSize::Size64 { Register::RAX } else { Register::EAX };
2214 if index != 4 {
2215 write_index_reg!(instruction, index + base_reg as u32);
2216 }
2217
2218 let base = sib & 7;
2219 if base == 5 {
2220 this.displ_index = this.data_ptr as u8;
2221 let displ = read_u32_break!(this) as i32 as u64;
2222 if this.state.address_size == OpSize::Size64 {
2223 instruction.set_memory_displacement64(displ);
2224 instruction_internal::internal_set_memory_displ_size(instruction, 4);
2225 } else {
2226 instruction.set_memory_displacement64(displ as u32 as u64);
2227 instruction_internal::internal_set_memory_displ_size(instruction, 3);
2228 }
2229 } else {
2230 write_base_reg!(instruction, base + this.state.extra_base_register_base + base_reg as u32);
2231 instruction_internal::internal_set_memory_displ_size(instruction, 0);
2232 instruction.set_memory_displacement64(0);
2233 }
2234
2235 return true;
2236 }
2237 this.state.flags |= StateFlags::IS_INVALID | StateFlags::NO_MORE_BYTES;
2238 true
2239 }
2240
2241 #[must_use]
2242 #[inline(always)]
2243 fn disp8n(&self, tuple_type: TupleType) -> u32 {
2244 get_disp8n(tuple_type, (self.state.flags & StateFlags::B) != 0)
2245 }
2246
2247 #[must_use]
2281 #[allow(clippy::missing_inline_in_public_items)]
2282 pub fn get_constant_offsets(&self, instruction: &Instruction) -> ConstantOffsets {
2283 let mut constant_offsets = ConstantOffsets::default();
2284
2285 let displ_size = instruction.memory_displ_size();
2286 if displ_size != 0 {
2287 constant_offsets.displacement_offset = self.displ_index.wrapping_sub(self.instr_start_data_ptr as u8);
2288 if displ_size == 8 && (self.state.flags & StateFlags::ADDR64) == 0 {
2289 constant_offsets.displacement_size = 4;
2290 } else {
2291 constant_offsets.displacement_size = displ_size as u8;
2292 }
2293 }
2294
2295 if (self.state.flags & StateFlags::NO_IMM) == 0 {
2296 let mut extra_imm_sub = 0;
2297 for i in (0..instruction.op_count()).rev() {
2298 match instruction.op_kind(i) {
2299 OpKind::Immediate8 | OpKind::Immediate8to16 | OpKind::Immediate8to32 | OpKind::Immediate8to64 => {
2300 constant_offsets.immediate_offset = instruction.len().wrapping_sub(extra_imm_sub).wrapping_sub(1) as u8;
2301 constant_offsets.immediate_size = 1;
2302 break;
2303 }
2304
2305 OpKind::Immediate16 => {
2306 constant_offsets.immediate_offset = instruction.len().wrapping_sub(extra_imm_sub).wrapping_sub(2) as u8;
2307 constant_offsets.immediate_size = 2;
2308 break;
2309 }
2310
2311 OpKind::Immediate32 | OpKind::Immediate32to64 => {
2312 constant_offsets.immediate_offset = instruction.len().wrapping_sub(extra_imm_sub).wrapping_sub(4) as u8;
2313 constant_offsets.immediate_size = 4;
2314 break;
2315 }
2316
2317 OpKind::Immediate64 => {
2318 constant_offsets.immediate_offset = instruction.len().wrapping_sub(extra_imm_sub).wrapping_sub(8) as u8;
2319 constant_offsets.immediate_size = 8;
2320 break;
2321 }
2322
2323 OpKind::Immediate8_2nd => {
2324 constant_offsets.immediate_offset2 = instruction.len().wrapping_sub(1) as u8;
2325 constant_offsets.immediate_size2 = 1;
2326 extra_imm_sub = 1;
2327 }
2328
2329 OpKind::NearBranch16 => {
2330 if (self.state.flags & StateFlags::BRANCH_IMM8) != 0 {
2331 constant_offsets.immediate_offset = instruction.len().wrapping_sub(1) as u8;
2332 constant_offsets.immediate_size = 1;
2333 } else if (self.state.flags & StateFlags::XBEGIN) == 0 {
2334 constant_offsets.immediate_offset = instruction.len().wrapping_sub(2) as u8;
2335 constant_offsets.immediate_size = 2;
2336 } else {
2337 debug_assert!((self.state.flags & StateFlags::XBEGIN) != 0);
2338 if self.state.operand_size != OpSize::Size16 {
2339 constant_offsets.immediate_offset = instruction.len().wrapping_sub(4) as u8;
2340 constant_offsets.immediate_size = 4;
2341 } else {
2342 constant_offsets.immediate_offset = instruction.len().wrapping_sub(2) as u8;
2343 constant_offsets.immediate_size = 2;
2344 }
2345 }
2346 }
2347
2348 OpKind::NearBranch32 | OpKind::NearBranch64 => {
2349 if (self.state.flags & StateFlags::BRANCH_IMM8) != 0 {
2350 constant_offsets.immediate_offset = instruction.len().wrapping_sub(1) as u8;
2351 constant_offsets.immediate_size = 1;
2352 } else if (self.state.flags & StateFlags::XBEGIN) == 0 {
2353 constant_offsets.immediate_offset = instruction.len().wrapping_sub(4) as u8;
2354 constant_offsets.immediate_size = 4;
2355 } else {
2356 debug_assert!((self.state.flags & StateFlags::XBEGIN) != 0);
2357 if self.state.operand_size != OpSize::Size16 {
2358 constant_offsets.immediate_offset = instruction.len().wrapping_sub(4) as u8;
2359 constant_offsets.immediate_size = 4;
2360 } else {
2361 constant_offsets.immediate_offset = instruction.len().wrapping_sub(2) as u8;
2362 constant_offsets.immediate_size = 2;
2363 }
2364 }
2365 }
2366
2367 OpKind::FarBranch16 => {
2368 constant_offsets.immediate_offset = instruction.len().wrapping_sub(2 + 2) as u8;
2369 constant_offsets.immediate_size = 2;
2370 constant_offsets.immediate_offset2 = instruction.len().wrapping_sub(2) as u8;
2371 constant_offsets.immediate_size2 = 2;
2372 }
2373
2374 OpKind::FarBranch32 => {
2375 constant_offsets.immediate_offset = instruction.len().wrapping_sub(4 + 2) as u8;
2376 constant_offsets.immediate_size = 4;
2377 constant_offsets.immediate_offset2 = instruction.len().wrapping_sub(2) as u8;
2378 constant_offsets.immediate_size2 = 2;
2379 }
2380
2381 _ => {}
2382 }
2383 }
2384 }
2385
2386 constant_offsets
2387 }
2388}
2389
2390#[must_use]
2396#[cfg(any(feature = "__internal_flip", not(feature = "no_evex"), not(feature = "no_vex"), not(feature = "no_xop"), feature = "mvex"))]
2397#[inline(always)]
2398fn decoder_read_op_mem_32_or_64_vsib(
2399 this: &mut Decoder<'_>, instruction: &mut Instruction, index_reg: Register, tuple_type: TupleType, is_vsib: bool,
2400) -> bool {
2401 debug_assert!(this.state.address_size == OpSize::Size32 || this.state.address_size == OpSize::Size64);
2402
2403 let index = this.state.mem_index as usize;
2404 debug_assert!(index < READ_OP_MEM_VSIB_FNS.len());
2405 unsafe { (READ_OP_MEM_VSIB_FNS.get_unchecked(index))(this, instruction, index_reg, tuple_type, is_vsib) }
2407}
2408
2409#[cfg(any(feature = "__internal_flip", not(feature = "no_evex"), not(feature = "no_vex"), not(feature = "no_xop"), feature = "mvex"))]
2410fn decoder_read_op_mem_vsib_1(
2411 this: &mut Decoder<'_>, instruction: &mut Instruction, _index_reg: Register, tuple_type: TupleType, _is_vsib: bool,
2412) -> bool {
2413 instruction_internal::internal_set_memory_displ_size(instruction, 1);
2414 this.displ_index = this.data_ptr as u8;
2415 let b = this.read_u8();
2416 if this.state.address_size == OpSize::Size64 {
2417 write_base_reg!(instruction, this.state.extra_base_register_base + this.state.rm + Register::RAX as u32);
2418 instruction.set_memory_displacement64((this.disp8n(tuple_type) as u64).wrapping_mul(b as i8 as u64));
2419 } else {
2420 write_base_reg!(instruction, this.state.extra_base_register_base + this.state.rm + Register::EAX as u32);
2421 instruction.set_memory_displacement64(this.disp8n(tuple_type).wrapping_mul(b as i8 as u32) as u64);
2422 }
2423
2424 false
2425}
2426
2427#[cfg(any(feature = "__internal_flip", not(feature = "no_evex"), not(feature = "no_vex"), not(feature = "no_xop"), feature = "mvex"))]
2428fn decoder_read_op_mem_vsib_1_4(
2429 this: &mut Decoder<'_>, instruction: &mut Instruction, index_reg: Register, tuple_type: TupleType, is_vsib: bool,
2430) -> bool {
2431 instruction_internal::internal_set_memory_displ_size(instruction, 1);
2432
2433 this.displ_index = this.data_ptr.wrapping_add(1) as u8;
2434 let sib = this.read_u16() as u32;
2435 let index = ((sib >> 3) & 7) + this.state.extra_index_register_base;
2436 if !is_vsib {
2437 if index != 4 {
2438 write_index_reg!(instruction, index + index_reg as u32);
2439 }
2440 } else {
2441 write_index_reg!(instruction, index + this.state.extra_index_register_base_vsib + index_reg as u32);
2442 }
2443
2444 const _: () = assert!(InstrScale::Scale1 as u32 == 0);
2445 const _: () = assert!(InstrScale::Scale2 as u32 == 1);
2446 const _: () = assert!(InstrScale::Scale4 as u32 == 2);
2447 const _: () = assert!(InstrScale::Scale8 as u32 == 3);
2448 instruction_internal::internal_set_memory_index_scale(instruction, unsafe { mem::transmute(((sib >> 6) & 3) as InstrScaleUnderlyingType) });
2450 let base_reg = if this.state.address_size == OpSize::Size64 { Register::RAX } else { Register::EAX };
2451 write_base_reg!(instruction, (sib & 7) + this.state.extra_base_register_base + base_reg as u32);
2452
2453 let b = (sib >> 8) as i8 as u32;
2454 let displ = this.disp8n(tuple_type).wrapping_mul(b);
2455 if this.state.address_size == OpSize::Size64 {
2456 instruction.set_memory_displacement64(displ as i32 as u64);
2457 } else {
2458 instruction.set_memory_displacement64(displ as u64);
2459 }
2460
2461 true
2462}
2463
2464#[cfg(any(feature = "__internal_flip", not(feature = "no_evex"), not(feature = "no_vex"), not(feature = "no_xop"), feature = "mvex"))]
2465fn decoder_read_op_mem_vsib_0(
2466 this: &mut Decoder<'_>, instruction: &mut Instruction, _index_reg: Register, _tuple_type: TupleType, _is_vsib: bool,
2467) -> bool {
2468 let base_reg = if this.state.address_size == OpSize::Size64 { Register::RAX } else { Register::EAX };
2469 write_base_reg!(instruction, this.state.extra_base_register_base + this.state.rm + base_reg as u32);
2470
2471 false
2472}
2473
2474#[cfg(any(feature = "__internal_flip", not(feature = "no_evex"), not(feature = "no_vex"), not(feature = "no_xop"), feature = "mvex"))]
2475fn decoder_read_op_mem_vsib_0_5(
2476 this: &mut Decoder<'_>, instruction: &mut Instruction, _index_reg: Register, _tuple_type: TupleType, _is_vsib: bool,
2477) -> bool {
2478 this.displ_index = this.data_ptr as u8;
2479 let d = this.read_u32();
2480 if this.state.address_size == OpSize::Size64 {
2481 debug_assert!(this.is64b_mode);
2482 this.state.flags |= StateFlags::IP_REL64;
2483 instruction.set_memory_displacement64(d as i32 as u64);
2484 instruction_internal::internal_set_memory_displ_size(instruction, 4);
2485 instruction.set_memory_base(Register::RIP);
2486 } else if this.is64b_mode {
2487 this.state.flags |= StateFlags::IP_REL32;
2488 instruction.set_memory_displacement64(d as u64);
2489 instruction_internal::internal_set_memory_displ_size(instruction, 3);
2490 instruction.set_memory_base(Register::EIP);
2491 } else {
2492 instruction.set_memory_displacement64(d as u64);
2493 instruction_internal::internal_set_memory_displ_size(instruction, 3);
2494 }
2495
2496 false
2497}
2498
2499#[cfg(any(feature = "__internal_flip", not(feature = "no_evex"), not(feature = "no_vex"), not(feature = "no_xop"), feature = "mvex"))]
2500fn decoder_read_op_mem_vsib_2_4(
2501 this: &mut Decoder<'_>, instruction: &mut Instruction, index_reg: Register, _tuple_type: TupleType, is_vsib: bool,
2502) -> bool {
2503 let sib = this.read_u8() as u32;
2504 this.displ_index = this.data_ptr as u8;
2505
2506 let index = ((sib >> 3) & 7) + this.state.extra_index_register_base;
2507 if !is_vsib {
2508 if index != 4 {
2509 write_index_reg!(instruction, index + index_reg as u32);
2510 }
2511 } else {
2512 write_index_reg!(instruction, index + this.state.extra_index_register_base_vsib + index_reg as u32);
2513 }
2514
2515 const _: () = assert!(InstrScale::Scale1 as u32 == 0);
2516 const _: () = assert!(InstrScale::Scale2 as u32 == 1);
2517 const _: () = assert!(InstrScale::Scale4 as u32 == 2);
2518 const _: () = assert!(InstrScale::Scale8 as u32 == 3);
2519 instruction_internal::internal_set_memory_index_scale(instruction, unsafe { mem::transmute((sib >> 6) as InstrScaleUnderlyingType) });
2521
2522 let base_reg = if this.state.address_size == OpSize::Size64 { Register::RAX } else { Register::EAX };
2523 write_base_reg!(instruction, (sib & 7) + this.state.extra_base_register_base + base_reg as u32);
2524 let displ = this.read_u32() as u32;
2525 if this.state.address_size == OpSize::Size64 {
2526 instruction_internal::internal_set_memory_displ_size(instruction, 4);
2527 instruction.set_memory_displacement64(displ as i32 as u64);
2528 } else {
2529 instruction_internal::internal_set_memory_displ_size(instruction, 3);
2530 instruction.set_memory_displacement64(displ as u64);
2531 }
2532
2533 true
2534}
2535
2536#[cfg(any(feature = "__internal_flip", not(feature = "no_evex"), not(feature = "no_vex"), not(feature = "no_xop"), feature = "mvex"))]
2537fn decoder_read_op_mem_vsib_2(
2538 this: &mut Decoder<'_>, instruction: &mut Instruction, _index_reg: Register, _tuple_type: TupleType, _is_vsib: bool,
2539) -> bool {
2540 let base_reg = if this.state.address_size == OpSize::Size64 { Register::RAX } else { Register::EAX };
2541 write_base_reg!(instruction, this.state.extra_base_register_base + this.state.rm + base_reg as u32);
2542 this.displ_index = this.data_ptr as u8;
2543 let d = this.read_u32();
2544 if this.state.address_size == OpSize::Size64 {
2545 instruction.set_memory_displacement64(d as i32 as u64);
2546 instruction_internal::internal_set_memory_displ_size(instruction, 4);
2547 } else {
2548 instruction.set_memory_displacement64(d as u64);
2549 instruction_internal::internal_set_memory_displ_size(instruction, 3);
2550 }
2551
2552 false
2553}
2554
2555#[cfg(any(feature = "__internal_flip", not(feature = "no_evex"), not(feature = "no_vex"), not(feature = "no_xop"), feature = "mvex"))]
2556fn decoder_read_op_mem_vsib_0_4(
2557 this: &mut Decoder<'_>, instruction: &mut Instruction, index_reg: Register, _tuple_type: TupleType, is_vsib: bool,
2558) -> bool {
2559 let sib = this.read_u8() as u32;
2560 const _: () = assert!(InstrScale::Scale1 as u32 == 0);
2561 const _: () = assert!(InstrScale::Scale2 as u32 == 1);
2562 const _: () = assert!(InstrScale::Scale4 as u32 == 2);
2563 const _: () = assert!(InstrScale::Scale8 as u32 == 3);
2564 instruction_internal::internal_set_memory_index_scale(instruction, unsafe { mem::transmute((sib >> 6) as InstrScaleUnderlyingType) });
2566 let index = ((sib >> 3) & 7) + this.state.extra_index_register_base;
2567 if !is_vsib {
2568 if index != 4 {
2569 write_index_reg!(instruction, index + index_reg as u32);
2570 }
2571 } else {
2572 write_index_reg!(instruction, index + this.state.extra_index_register_base_vsib + index_reg as u32);
2573 }
2574
2575 let base = sib & 7;
2576 if base == 5 {
2577 this.displ_index = this.data_ptr as u8;
2578 let d = this.read_u32();
2579 if this.state.address_size == OpSize::Size64 {
2580 instruction.set_memory_displacement64(d as i32 as u64);
2581 instruction_internal::internal_set_memory_displ_size(instruction, 4);
2582 } else {
2583 instruction.set_memory_displacement64(d as u64);
2584 instruction_internal::internal_set_memory_displ_size(instruction, 3);
2585 }
2586 } else {
2587 let base_reg = if this.state.address_size == OpSize::Size64 { Register::RAX } else { Register::EAX };
2588 write_base_reg!(instruction, base + this.state.extra_base_register_base + base_reg as u32);
2589 instruction_internal::internal_set_memory_displ_size(instruction, 0);
2590 instruction.set_memory_displacement64(0);
2591 }
2592
2593 true
2594}
2595
2596#[doc(hidden)]
2597#[allow(missing_debug_implementations)]
2598pub struct DecoderIter<'a, 'b> {
2599 decoder: &'b mut Decoder<'a>,
2600}
2601
2602impl Iterator for DecoderIter<'_, '_> {
2603 type Item = Instruction;
2604
2605 #[inline]
2606 fn next(&mut self) -> Option<Self::Item> {
2607 if self.decoder.can_decode() {
2608 Some(self.decoder.decode())
2609 } else {
2610 None
2611 }
2612 }
2613}
2614
2615impl FusedIterator for DecoderIter<'_, '_> {}
2616
2617#[doc(hidden)]
2618#[allow(missing_debug_implementations)]
2619pub struct DecoderIntoIter<'a> {
2620 decoder: Decoder<'a>,
2621}
2622
2623impl Iterator for DecoderIntoIter<'_> {
2624 type Item = Instruction;
2625
2626 #[inline]
2627 fn next(&mut self) -> Option<Self::Item> {
2628 if self.decoder.can_decode() {
2629 Some(self.decoder.decode())
2630 } else {
2631 None
2632 }
2633 }
2634}
2635
2636impl FusedIterator for DecoderIntoIter<'_> {}
2637
2638impl<'a> IntoIterator for Decoder<'a> {
2639 type Item = Instruction;
2640 type IntoIter = DecoderIntoIter<'a>;
2641
2642 #[must_use]
2643 #[inline]
2644 fn into_iter(self) -> Self::IntoIter {
2645 DecoderIntoIter { decoder: self }
2646 }
2647}
2648
2649impl<'a, 'b> IntoIterator for &'b mut Decoder<'a> {
2650 type Item = Instruction;
2651 type IntoIter = DecoderIter<'a, 'b>;
2652
2653 #[must_use]
2654 #[inline]
2655 fn into_iter(self) -> Self::IntoIter {
2656 DecoderIter { decoder: self }
2657 }
2658}