1use crate::ir::types::*;
4use crate::isa::aarch64::inst::*;
5
6#[derive(Clone, Copy, Debug, PartialEq, Eq)]
11#[repr(u8)]
12pub enum ShiftOp {
13 LSL = 0b00,
15 LSR = 0b01,
17 ASR = 0b10,
19 ROR = 0b11,
21}
22
23impl ShiftOp {
24 pub fn bits(self) -> u8 {
26 self as u8
27 }
28}
29
30#[derive(Clone, Copy, Debug)]
32pub struct ShiftOpShiftImm(u8);
33
34impl ShiftOpShiftImm {
35 pub const MAX_SHIFT: u64 = 63;
37
38 pub fn maybe_from_shift(shift: u64) -> Option<ShiftOpShiftImm> {
40 if shift <= Self::MAX_SHIFT {
41 Some(ShiftOpShiftImm(shift as u8))
42 } else {
43 None
44 }
45 }
46
47 pub fn value(self) -> u8 {
49 self.0
50 }
51
52 pub fn mask(self, bits: u8) -> ShiftOpShiftImm {
54 ShiftOpShiftImm(self.0 & (bits - 1))
55 }
56}
57
58#[derive(Copy, Clone, Debug)]
60pub struct ShiftOpAndAmt {
61 op: ShiftOp,
63 shift: ShiftOpShiftImm,
65}
66
67impl ShiftOpAndAmt {
68 pub fn new(op: ShiftOp, shift: ShiftOpShiftImm) -> ShiftOpAndAmt {
70 ShiftOpAndAmt { op, shift }
71 }
72
73 pub fn op(&self) -> ShiftOp {
75 self.op
76 }
77
78 pub fn amt(&self) -> ShiftOpShiftImm {
80 self.shift
81 }
82}
83
84#[derive(Clone, Copy, Debug)]
86#[repr(u8)]
87pub enum ExtendOp {
88 UXTB = 0b000,
90 UXTH = 0b001,
92 UXTW = 0b010,
94 UXTX = 0b011,
96 SXTB = 0b100,
98 SXTH = 0b101,
100 SXTW = 0b110,
102 SXTX = 0b111,
104}
105
106impl ExtendOp {
107 pub fn bits(self) -> u8 {
109 self as u8
110 }
111}
112
113#[derive(Clone, Debug)]
118pub enum MemLabel {
119 PCRel(i32),
123 Mach(MachLabel),
126}
127
128impl AMode {
129 pub fn reg(reg: Reg) -> AMode {
131 AMode::UnsignedOffset {
134 rn: reg,
135 uimm12: UImm12Scaled::zero(I64),
136 }
137 }
138
139 pub fn reg_plus_reg_scaled_extended(reg1: Reg, reg2: Reg, op: ExtendOp) -> AMode {
142 AMode::RegScaledExtended {
143 rn: reg1,
144 rm: reg2,
145 extendop: op,
146 }
147 }
148}
149
150pub use crate::isa::aarch64::lower::isle::generated_code::PairAMode;
151
152#[derive(Clone, Copy, Debug, PartialEq, Eq)]
158#[repr(u8)]
159pub enum Cond {
160 Eq = 0,
162 Ne = 1,
164 Hs = 2,
166 Lo = 3,
168 Mi = 4,
170 Pl = 5,
172 Vs = 6,
174 Vc = 7,
176 Hi = 8,
178 Ls = 9,
180 Ge = 10,
182 Lt = 11,
184 Gt = 12,
186 Le = 13,
188 Al = 14,
190 Nv = 15,
192}
193
194impl Cond {
195 pub fn invert(self) -> Cond {
197 match self {
198 Cond::Eq => Cond::Ne,
199 Cond::Ne => Cond::Eq,
200
201 Cond::Hs => Cond::Lo,
202 Cond::Lo => Cond::Hs,
203
204 Cond::Mi => Cond::Pl,
205 Cond::Pl => Cond::Mi,
206
207 Cond::Vs => Cond::Vc,
208 Cond::Vc => Cond::Vs,
209
210 Cond::Hi => Cond::Ls,
211 Cond::Ls => Cond::Hi,
212
213 Cond::Ge => Cond::Lt,
214 Cond::Lt => Cond::Ge,
215
216 Cond::Gt => Cond::Le,
217 Cond::Le => Cond::Gt,
218
219 Cond::Al => Cond::Nv,
220 Cond::Nv => Cond::Al,
221 }
222 }
223
224 pub fn bits(self) -> u32 {
226 self as u32
227 }
228}
229
230#[derive(Clone, Copy, Debug)]
234pub enum CondBrKind {
235 Zero(Reg, OperandSize),
237 NotZero(Reg, OperandSize),
239 Cond(Cond),
241}
242
243impl CondBrKind {
244 pub fn invert(self) -> CondBrKind {
246 match self {
247 CondBrKind::Zero(reg, size) => CondBrKind::NotZero(reg, size),
248 CondBrKind::NotZero(reg, size) => CondBrKind::Zero(reg, size),
249 CondBrKind::Cond(c) => CondBrKind::Cond(c.invert()),
250 }
251 }
252}
253
254#[derive(Clone, Copy, Debug, PartialEq, Eq)]
257pub enum BranchTarget {
258 Label(MachLabel),
261 ResolvedOffset(i32),
263}
264
265impl BranchTarget {
266 pub fn as_label(self) -> Option<MachLabel> {
268 match self {
269 BranchTarget::Label(l) => Some(l),
270 _ => None,
271 }
272 }
273
274 pub fn as_offset14_or_zero(self) -> u32 {
276 self.as_offset_bounded(14)
277 }
278
279 pub fn as_offset19_or_zero(self) -> u32 {
281 self.as_offset_bounded(19)
282 }
283
284 pub fn as_offset26_or_zero(self) -> u32 {
286 self.as_offset_bounded(26)
287 }
288
289 fn as_offset_bounded(self, bits: u32) -> u32 {
290 let off = match self {
291 BranchTarget::ResolvedOffset(off) => off >> 2,
292 _ => 0,
293 };
294 let hi = (1 << (bits - 1)) - 1;
295 let lo = -(1 << bits - 1);
296 assert!(off <= hi);
297 assert!(off >= lo);
298 (off as u32) & ((1 << bits) - 1)
299 }
300}
301
302impl PrettyPrint for ShiftOpAndAmt {
303 fn pretty_print(&self, _: u8) -> String {
304 format!("{:?} {}", self.op(), self.amt().value())
305 }
306}
307
308impl PrettyPrint for ExtendOp {
309 fn pretty_print(&self, _: u8) -> String {
310 format!("{self:?}")
311 }
312}
313
314impl PrettyPrint for MemLabel {
315 fn pretty_print(&self, _: u8) -> String {
316 match self {
317 MemLabel::PCRel(off) => format!("pc+{off}"),
318 MemLabel::Mach(off) => format!("label({})", off.as_u32()),
319 }
320 }
321}
322
323fn shift_for_type(size_bytes: u8) -> usize {
324 match size_bytes {
325 1 => 0,
326 2 => 1,
327 4 => 2,
328 8 => 3,
329 16 => 4,
330 _ => panic!("unknown type size: {size_bytes}"),
331 }
332}
333
334impl PrettyPrint for AMode {
335 fn pretty_print(&self, size_bytes: u8) -> String {
336 debug_assert!(size_bytes != 0);
337 match self {
338 &AMode::Unscaled { rn, simm9 } => {
339 let reg = pretty_print_reg(rn);
340 if simm9.value != 0 {
341 let simm9 = simm9.pretty_print(8);
342 format!("[{reg}, {simm9}]")
343 } else {
344 format!("[{reg}]")
345 }
346 }
347 &AMode::UnsignedOffset { rn, uimm12 } => {
348 let reg = pretty_print_reg(rn);
349 if uimm12.value() != 0 {
350 let uimm12 = uimm12.pretty_print(8);
351 format!("[{reg}, {uimm12}]")
352 } else {
353 format!("[{reg}]")
354 }
355 }
356 &AMode::RegReg { rn, rm } => {
357 let r1 = pretty_print_reg(rn);
358 let r2 = pretty_print_reg(rm);
359 format!("[{r1}, {r2}]")
360 }
361 &AMode::RegScaled { rn, rm } => {
362 let r1 = pretty_print_reg(rn);
363 let r2 = pretty_print_reg(rm);
364 let shift = shift_for_type(size_bytes);
365 format!("[{r1}, {r2}, LSL #{shift}]")
366 }
367 &AMode::RegScaledExtended { rn, rm, extendop } => {
368 let shift = shift_for_type(size_bytes);
369 let size = match extendop {
370 ExtendOp::SXTW | ExtendOp::UXTW => OperandSize::Size32,
371 _ => OperandSize::Size64,
372 };
373 let r1 = pretty_print_reg(rn);
374 let r2 = pretty_print_ireg(rm, size);
375 let op = extendop.pretty_print(0);
376 format!("[{r1}, {r2}, {op} #{shift}]")
377 }
378 &AMode::RegExtended { rn, rm, extendop } => {
379 let size = match extendop {
380 ExtendOp::SXTW | ExtendOp::UXTW => OperandSize::Size32,
381 _ => OperandSize::Size64,
382 };
383 let r1 = pretty_print_reg(rn);
384 let r2 = pretty_print_ireg(rm, size);
385 let op = extendop.pretty_print(0);
386 format!("[{r1}, {r2}, {op}]")
387 }
388 &AMode::Label { ref label } => label.pretty_print(0),
389 &AMode::SPPreIndexed { simm9 } => {
390 let simm9 = simm9.pretty_print(8);
391 format!("[sp, {simm9}]!")
392 }
393 &AMode::SPPostIndexed { simm9 } => {
394 let simm9 = simm9.pretty_print(8);
395 format!("[sp], {simm9}")
396 }
397 AMode::Const { addr } => format!("[const({})]", addr.as_u32()),
398
399 &AMode::SPOffset { .. }
401 | &AMode::FPOffset { .. }
402 | &AMode::IncomingArg { .. }
403 | &AMode::SlotOffset { .. }
404 | &AMode::RegOffset { .. } => {
405 panic!("Unexpected pseudo mem-arg mode: {self:?}")
406 }
407 }
408 }
409}
410
411impl PrettyPrint for PairAMode {
412 fn pretty_print(&self, _: u8) -> String {
413 match self {
414 &PairAMode::SignedOffset { reg, simm7 } => {
415 let reg = pretty_print_reg(reg);
416 if simm7.value != 0 {
417 let simm7 = simm7.pretty_print(8);
418 format!("[{reg}, {simm7}]")
419 } else {
420 format!("[{reg}]")
421 }
422 }
423 &PairAMode::SPPreIndexed { simm7 } => {
424 let simm7 = simm7.pretty_print(8);
425 format!("[sp, {simm7}]!")
426 }
427 &PairAMode::SPPostIndexed { simm7 } => {
428 let simm7 = simm7.pretty_print(8);
429 format!("[sp], {simm7}")
430 }
431 }
432 }
433}
434
435impl PrettyPrint for Cond {
436 fn pretty_print(&self, _: u8) -> String {
437 let mut s = format!("{self:?}");
438 s.make_ascii_lowercase();
439 s
440 }
441}
442
443impl PrettyPrint for BranchTarget {
444 fn pretty_print(&self, _: u8) -> String {
445 match self {
446 &BranchTarget::Label(label) => format!("label{:?}", label.as_u32()),
447 &BranchTarget::ResolvedOffset(off) => format!("{off}"),
448 }
449 }
450}
451
452#[derive(Clone, Copy, Debug, PartialEq, Eq)]
455pub enum OperandSize {
456 Size32,
458 Size64,
460}
461
462impl OperandSize {
463 pub fn is32(self) -> bool {
465 self == OperandSize::Size32
466 }
467
468 pub fn is64(self) -> bool {
470 self == OperandSize::Size64
471 }
472
473 pub fn from_bits<I: Into<usize>>(bits: I) -> OperandSize {
475 let bits: usize = bits.into();
476 assert!(bits <= 64);
477 if bits <= 32 {
478 OperandSize::Size32
479 } else {
480 OperandSize::Size64
481 }
482 }
483
484 pub fn bits(&self) -> u8 {
486 match self {
487 OperandSize::Size32 => 32,
488 OperandSize::Size64 => 64,
489 }
490 }
491
492 pub fn from_ty(ty: Type) -> OperandSize {
494 debug_assert!(!ty.is_vector());
495
496 Self::from_bits(ty_bits(ty))
497 }
498
499 pub fn to_ty(self) -> Type {
501 match self {
502 OperandSize::Size32 => I32,
503 OperandSize::Size64 => I64,
504 }
505 }
506
507 pub fn sf_bit(&self) -> u32 {
511 match self {
512 OperandSize::Size32 => 0,
513 OperandSize::Size64 => 1,
514 }
515 }
516
517 pub fn max_value(&self) -> u64 {
519 match self {
520 OperandSize::Size32 => u32::MAX as u64,
521 OperandSize::Size64 => u64::MAX,
522 }
523 }
524}
525
526#[derive(Clone, Copy, Debug, PartialEq, Eq)]
528pub enum ScalarSize {
529 Size8,
531 Size16,
533 Size32,
535 Size64,
537 Size128,
539}
540
541impl ScalarSize {
542 pub fn operand_size(&self) -> OperandSize {
544 match self {
545 ScalarSize::Size8 | ScalarSize::Size16 | ScalarSize::Size32 => OperandSize::Size32,
546 ScalarSize::Size64 => OperandSize::Size64,
547 _ => panic!("Unexpected operand_size request for: {self:?}"),
548 }
549 }
550
551 pub fn ftype(&self) -> u32 {
554 match self {
555 ScalarSize::Size16 => 0b11,
556 ScalarSize::Size32 => 0b00,
557 ScalarSize::Size64 => 0b01,
558 _ => panic!("Unexpected scalar FP operand size: {self:?}"),
559 }
560 }
561
562 pub fn widen(&self) -> ScalarSize {
564 match self {
565 ScalarSize::Size8 => ScalarSize::Size16,
566 ScalarSize::Size16 => ScalarSize::Size32,
567 ScalarSize::Size32 => ScalarSize::Size64,
568 ScalarSize::Size64 => ScalarSize::Size128,
569 ScalarSize::Size128 => panic!("can't widen 128-bits"),
570 }
571 }
572
573 pub fn narrow(&self) -> ScalarSize {
575 match self {
576 ScalarSize::Size8 => panic!("can't narrow 8-bits"),
577 ScalarSize::Size16 => ScalarSize::Size8,
578 ScalarSize::Size32 => ScalarSize::Size16,
579 ScalarSize::Size64 => ScalarSize::Size32,
580 ScalarSize::Size128 => ScalarSize::Size64,
581 }
582 }
583
584 pub fn ty(&self) -> Type {
586 match self {
587 ScalarSize::Size8 => I8,
588 ScalarSize::Size16 => I16,
589 ScalarSize::Size32 => I32,
590 ScalarSize::Size64 => I64,
591 ScalarSize::Size128 => I128,
592 }
593 }
594}
595
596#[derive(Clone, Copy, Debug, PartialEq, Eq)]
598pub enum VectorSize {
599 Size8x8,
601 Size8x16,
603 Size16x4,
605 Size16x8,
607 Size32x2,
609 Size32x4,
611 Size64x2,
613}
614
615impl VectorSize {
616 pub fn from_lane_size(size: ScalarSize, is_128bit: bool) -> VectorSize {
618 match (size, is_128bit) {
619 (ScalarSize::Size8, false) => VectorSize::Size8x8,
620 (ScalarSize::Size8, true) => VectorSize::Size8x16,
621 (ScalarSize::Size16, false) => VectorSize::Size16x4,
622 (ScalarSize::Size16, true) => VectorSize::Size16x8,
623 (ScalarSize::Size32, false) => VectorSize::Size32x2,
624 (ScalarSize::Size32, true) => VectorSize::Size32x4,
625 (ScalarSize::Size64, true) => VectorSize::Size64x2,
626 _ => panic!("Unexpected scalar FP operand size: {size:?}"),
627 }
628 }
629
630 pub fn operand_size(&self) -> OperandSize {
632 match self {
633 VectorSize::Size64x2 => OperandSize::Size64,
634 _ => OperandSize::Size32,
635 }
636 }
637
638 pub fn lane_size(&self) -> ScalarSize {
640 match self {
641 VectorSize::Size8x8 | VectorSize::Size8x16 => ScalarSize::Size8,
642 VectorSize::Size16x4 | VectorSize::Size16x8 => ScalarSize::Size16,
643 VectorSize::Size32x2 | VectorSize::Size32x4 => ScalarSize::Size32,
644 VectorSize::Size64x2 => ScalarSize::Size64,
645 }
646 }
647
648 pub fn is_128bits(&self) -> bool {
650 match self {
651 VectorSize::Size8x8 => false,
652 VectorSize::Size8x16 => true,
653 VectorSize::Size16x4 => false,
654 VectorSize::Size16x8 => true,
655 VectorSize::Size32x2 => false,
656 VectorSize::Size32x4 => true,
657 VectorSize::Size64x2 => true,
658 }
659 }
660
661 pub fn enc_size(&self) -> (u32, u32) {
664 let q = self.is_128bits() as u32;
665 let size = match self.lane_size() {
666 ScalarSize::Size8 => 0b00,
667 ScalarSize::Size16 => 0b01,
668 ScalarSize::Size32 => 0b10,
669 ScalarSize::Size64 => 0b11,
670 _ => unreachable!(),
671 };
672
673 (q, size)
674 }
675
676 pub fn enc_float_size(&self) -> u32 {
679 match self.lane_size() {
680 ScalarSize::Size32 => 0b0,
681 ScalarSize::Size64 => 0b1,
682 size => panic!("Unsupported floating-point size for vector op: {size:?}"),
683 }
684 }
685}
686
687impl APIKey {
688 pub fn enc_auti_hint(&self) -> u32 {
691 let (crm, op2) = match self {
692 APIKey::AZ => (0b0011, 0b100),
693 APIKey::ASP => (0b0011, 0b101),
694 APIKey::BZ => (0b0011, 0b110),
695 APIKey::BSP => (0b0011, 0b111),
696 };
697 0xd503201f | (crm << 8) | (op2 << 5)
698 }
699}
700
701pub use crate::isa::aarch64::lower::isle::generated_code::TestBitAndBranchKind;
702
703impl TestBitAndBranchKind {
704 pub fn complement(&self) -> TestBitAndBranchKind {
706 match self {
707 TestBitAndBranchKind::Z => TestBitAndBranchKind::NZ,
708 TestBitAndBranchKind::NZ => TestBitAndBranchKind::Z,
709 }
710 }
711}