cranelift_codegen/isa/pulley_shared/inst/
args.rs

1//! Pulley instruction arguments.
2
3use super::*;
4use crate::ir::ExternalName;
5use crate::machinst::abi::StackAMode;
6use pulley_interpreter::encode;
7use pulley_interpreter::regs::Reg as _;
8use std::fmt;
9
10/// A macro for defining a newtype of `Reg` that enforces some invariant about
11/// the wrapped `Reg` (such as that it is of a particular register class).
12macro_rules! newtype_of_reg {
13    (
14        $newtype_reg:ident,
15        $newtype_writable_reg:ident,
16        $class:expr
17    ) => {
18        /// A newtype wrapper around `Reg`.
19        #[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
20        pub struct $newtype_reg(Reg);
21
22        impl PartialEq<Reg> for $newtype_reg {
23            fn eq(&self, other: &Reg) -> bool {
24                self.0 == *other
25            }
26        }
27
28        impl From<$newtype_reg> for Reg {
29            fn from(r: $newtype_reg) -> Self {
30                r.0
31            }
32        }
33
34        impl TryFrom<Reg> for $newtype_reg {
35            type Error = ();
36            fn try_from(r: Reg) -> Result<Self, Self::Error> {
37                Self::new(r).ok_or(())
38            }
39        }
40
41        impl $newtype_reg {
42            /// Create this newtype from the given register, or return `None` if the register
43            /// is not a valid instance of this newtype.
44            pub fn new(reg: Reg) -> Option<Self> {
45                if reg.class() == $class {
46                    Some(Self(reg))
47                } else {
48                    None
49                }
50            }
51
52            /// Get this newtype's underlying `Reg`.
53            pub fn to_reg(self) -> Reg {
54                self.0
55            }
56        }
57
58        // Convenience impl so that people working with this newtype can use it
59        // "just like" a plain `Reg`.
60        //
61        // NB: We cannot implement `DerefMut` because that would let people do
62        // nasty stuff like `*my_xreg.deref_mut() = some_freg`, breaking the
63        // invariants that `XReg` provides.
64        impl std::ops::Deref for $newtype_reg {
65            type Target = Reg;
66
67            fn deref(&self) -> &Reg {
68                &self.0
69            }
70        }
71
72        /// If you know what you're doing, you can explicitly mutably borrow the
73        /// underlying `Reg`. Don't make it point to the wrong type of register
74        /// please.
75        impl AsMut<Reg> for $newtype_reg {
76            fn as_mut(&mut self) -> &mut Reg {
77                &mut self.0
78            }
79        }
80
81        /// Writable Reg.
82        pub type $newtype_writable_reg = Writable<$newtype_reg>;
83
84        impl From<pulley_interpreter::regs::$newtype_reg> for $newtype_reg {
85            fn from(r: pulley_interpreter::regs::$newtype_reg) -> Self {
86                Self::new(regalloc2::PReg::new(usize::from(r as u8), $class).into()).unwrap()
87            }
88        }
89        impl From<$newtype_reg> for pulley_interpreter::regs::$newtype_reg {
90            fn from(r: $newtype_reg) -> Self {
91                Self::new(r.to_real_reg().unwrap().hw_enc()).unwrap()
92            }
93        }
94        impl<'a> From<&'a $newtype_reg> for pulley_interpreter::regs::$newtype_reg {
95            fn from(r: &'a $newtype_reg) -> Self {
96                Self::new(r.to_real_reg().unwrap().hw_enc()).unwrap()
97            }
98        }
99        impl From<$newtype_writable_reg> for pulley_interpreter::regs::$newtype_reg {
100            fn from(r: $newtype_writable_reg) -> Self {
101                Self::new(r.to_reg().to_real_reg().unwrap().hw_enc()).unwrap()
102            }
103        }
104        impl<'a> From<&'a $newtype_writable_reg> for pulley_interpreter::regs::$newtype_reg {
105            fn from(r: &'a $newtype_writable_reg) -> Self {
106                Self::new(r.to_reg().to_real_reg().unwrap().hw_enc()).unwrap()
107            }
108        }
109
110        impl TryFrom<Writable<Reg>> for $newtype_writable_reg {
111            type Error = ();
112            fn try_from(r: Writable<Reg>) -> Result<Self, Self::Error> {
113                let r = r.to_reg();
114                match $newtype_reg::new(r) {
115                    Some(r) => Ok(Writable::from_reg(r)),
116                    None => Err(()),
117                }
118            }
119        }
120    };
121}
122
123// Newtypes for registers classes.
124newtype_of_reg!(XReg, WritableXReg, RegClass::Int);
125newtype_of_reg!(FReg, WritableFReg, RegClass::Float);
126newtype_of_reg!(VReg, WritableVReg, RegClass::Vector);
127
128impl XReg {
129    /// Index of the first "special" register, or the end of which registers
130    /// regalloc is allowed to use.
131    pub const SPECIAL_START: u8 = pulley_interpreter::regs::XReg::SPECIAL_START;
132
133    /// Returns whether this is a "special" physical register for pulley.
134    pub fn is_special(&self) -> bool {
135        match self.as_pulley() {
136            Some(reg) => reg.is_special(),
137            None => false,
138        }
139    }
140
141    /// Returns the pulley-typed register, if this is a physical register.
142    pub fn as_pulley(&self) -> Option<pulley_interpreter::XReg> {
143        let enc = self.to_real_reg()?.hw_enc();
144        Some(pulley_interpreter::XReg::new(enc).unwrap())
145    }
146}
147
148pub use super::super::lower::isle::generated_code::ExtKind;
149
150pub use super::super::lower::isle::generated_code::Amode;
151
152impl Amode {
153    /// Add the registers referenced by this Amode to `collector`.
154    pub(crate) fn get_operands(&mut self, collector: &mut impl OperandVisitor) {
155        match self {
156            Amode::RegOffset { base, offset: _ } => collector.reg_use(base),
157            // Registers used in these modes aren't allocatable.
158            Amode::SpOffset { .. } | Amode::Stack { .. } => {}
159        }
160    }
161
162    pub(crate) fn get_base_register(&self) -> Option<XReg> {
163        match self {
164            Amode::RegOffset { base, offset: _ } => Some((*base).into()),
165            Amode::SpOffset { .. } | Amode::Stack { .. } => Some(XReg::new(stack_reg()).unwrap()),
166        }
167    }
168
169    pub(crate) fn get_offset_with_state<P>(&self, state: &EmitState<P>) -> i32
170    where
171        P: PulleyTargetKind,
172    {
173        match self {
174            Amode::RegOffset { base: _, offset } | Amode::SpOffset { offset } => *offset,
175            Amode::Stack { amode } => {
176                let offset64 = match amode {
177                    StackAMode::IncomingArg(offset, stack_args_size) => {
178                        let offset = i64::from(*stack_args_size) - *offset;
179                        let frame_layout = state.frame_layout();
180                        let sp_offset = frame_layout.tail_args_size
181                            + frame_layout.setup_area_size
182                            + frame_layout.clobber_size
183                            + frame_layout.fixed_frame_storage_size
184                            + frame_layout.outgoing_args_size;
185                        i64::from(sp_offset) - offset
186                    }
187                    StackAMode::Slot(offset) => {
188                        offset + i64::from(state.frame_layout().outgoing_args_size)
189                    }
190                    StackAMode::OutgoingArg(offset) => *offset,
191                };
192                i32::try_from(offset64).unwrap()
193            }
194        }
195    }
196}
197
198impl core::fmt::Display for Amode {
199    fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
200        match self {
201            Amode::SpOffset { offset } => {
202                if *offset >= 0 {
203                    write!(f, "sp+{offset}")
204                } else {
205                    write!(f, "sp{offset}")
206                }
207            }
208            Amode::RegOffset { base, offset } => {
209                let name = reg_name(**base);
210                if *offset >= 0 {
211                    write!(f, "{name}+{offset}")
212                } else {
213                    write!(f, "{name}{offset}")
214                }
215            }
216            Amode::Stack { amode } => core::fmt::Debug::fmt(amode, f),
217        }
218    }
219}
220
221impl From<StackAMode> for Amode {
222    fn from(amode: StackAMode) -> Self {
223        Amode::Stack { amode }
224    }
225}
226
227/// The size of an operand or operation.
228#[derive(Clone, Copy, Debug, PartialEq, Eq)]
229pub enum OperandSize {
230    /// 32 bits.
231    Size32,
232    /// 64 bits.
233    Size64,
234}
235
236pub use crate::isa::pulley_shared::lower::isle::generated_code::Cond;
237
238impl Cond {
239    /// Collect register operands within `collector` for register allocation.
240    pub fn get_operands(&mut self, collector: &mut impl OperandVisitor) {
241        match self {
242            Cond::If32 { reg } | Cond::IfNot32 { reg } => collector.reg_use(reg),
243
244            Cond::IfXeq32 { src1, src2 }
245            | Cond::IfXneq32 { src1, src2 }
246            | Cond::IfXslt32 { src1, src2 }
247            | Cond::IfXslteq32 { src1, src2 }
248            | Cond::IfXult32 { src1, src2 }
249            | Cond::IfXulteq32 { src1, src2 }
250            | Cond::IfXeq64 { src1, src2 }
251            | Cond::IfXneq64 { src1, src2 }
252            | Cond::IfXslt64 { src1, src2 }
253            | Cond::IfXslteq64 { src1, src2 }
254            | Cond::IfXult64 { src1, src2 }
255            | Cond::IfXulteq64 { src1, src2 } => {
256                collector.reg_use(src1);
257                collector.reg_use(src2);
258            }
259
260            Cond::IfXeq32I32 { src1, src2 }
261            | Cond::IfXneq32I32 { src1, src2 }
262            | Cond::IfXslt32I32 { src1, src2 }
263            | Cond::IfXslteq32I32 { src1, src2 }
264            | Cond::IfXsgt32I32 { src1, src2 }
265            | Cond::IfXsgteq32I32 { src1, src2 }
266            | Cond::IfXeq64I32 { src1, src2 }
267            | Cond::IfXneq64I32 { src1, src2 }
268            | Cond::IfXslt64I32 { src1, src2 }
269            | Cond::IfXslteq64I32 { src1, src2 }
270            | Cond::IfXsgt64I32 { src1, src2 }
271            | Cond::IfXsgteq64I32 { src1, src2 } => {
272                collector.reg_use(src1);
273                let _: &mut i32 = src2;
274            }
275
276            Cond::IfXult32I32 { src1, src2 }
277            | Cond::IfXulteq32I32 { src1, src2 }
278            | Cond::IfXugt32I32 { src1, src2 }
279            | Cond::IfXugteq32I32 { src1, src2 }
280            | Cond::IfXult64I32 { src1, src2 }
281            | Cond::IfXulteq64I32 { src1, src2 }
282            | Cond::IfXugt64I32 { src1, src2 }
283            | Cond::IfXugteq64I32 { src1, src2 } => {
284                collector.reg_use(src1);
285                let _: &mut u32 = src2;
286            }
287        }
288    }
289
290    /// Encode this condition as a branch into `sink`.
291    ///
292    /// Note that the offset encoded to jump by is filled in as 0 and it's
293    /// assumed `MachBuffer` will come back and clean it up.
294    pub fn encode(&self, sink: &mut impl Extend<u8>) {
295        match *self {
296            Cond::If32 { reg } => encode::br_if32(sink, reg, 0),
297            Cond::IfNot32 { reg } => encode::br_if_not32(sink, reg, 0),
298            Cond::IfXeq32 { src1, src2 } => encode::br_if_xeq32(sink, src1, src2, 0),
299            Cond::IfXneq32 { src1, src2 } => encode::br_if_xneq32(sink, src1, src2, 0),
300            Cond::IfXslt32 { src1, src2 } => encode::br_if_xslt32(sink, src1, src2, 0),
301            Cond::IfXslteq32 { src1, src2 } => encode::br_if_xslteq32(sink, src1, src2, 0),
302            Cond::IfXult32 { src1, src2 } => encode::br_if_xult32(sink, src1, src2, 0),
303            Cond::IfXulteq32 { src1, src2 } => encode::br_if_xulteq32(sink, src1, src2, 0),
304            Cond::IfXeq64 { src1, src2 } => encode::br_if_xeq64(sink, src1, src2, 0),
305            Cond::IfXneq64 { src1, src2 } => encode::br_if_xneq64(sink, src1, src2, 0),
306            Cond::IfXslt64 { src1, src2 } => encode::br_if_xslt64(sink, src1, src2, 0),
307            Cond::IfXslteq64 { src1, src2 } => encode::br_if_xslteq64(sink, src1, src2, 0),
308            Cond::IfXult64 { src1, src2 } => encode::br_if_xult64(sink, src1, src2, 0),
309            Cond::IfXulteq64 { src1, src2 } => encode::br_if_xulteq64(sink, src1, src2, 0),
310
311            Cond::IfXeq32I32 { src1, src2 } => match i8::try_from(src2) {
312                Ok(src2) => encode::br_if_xeq32_i8(sink, src1, src2, 0),
313                Err(_) => encode::br_if_xeq32_i32(sink, src1, src2, 0),
314            },
315            Cond::IfXneq32I32 { src1, src2 } => match i8::try_from(src2) {
316                Ok(src2) => encode::br_if_xneq32_i8(sink, src1, src2, 0),
317                Err(_) => encode::br_if_xneq32_i32(sink, src1, src2, 0),
318            },
319            Cond::IfXslt32I32 { src1, src2 } => match i8::try_from(src2) {
320                Ok(src2) => encode::br_if_xslt32_i8(sink, src1, src2, 0),
321                Err(_) => encode::br_if_xslt32_i32(sink, src1, src2, 0),
322            },
323            Cond::IfXslteq32I32 { src1, src2 } => match i8::try_from(src2) {
324                Ok(src2) => encode::br_if_xslteq32_i8(sink, src1, src2, 0),
325                Err(_) => encode::br_if_xslteq32_i32(sink, src1, src2, 0),
326            },
327            Cond::IfXsgt32I32 { src1, src2 } => match i8::try_from(src2) {
328                Ok(src2) => encode::br_if_xsgt32_i8(sink, src1, src2, 0),
329                Err(_) => encode::br_if_xsgt32_i32(sink, src1, src2, 0),
330            },
331            Cond::IfXsgteq32I32 { src1, src2 } => match i8::try_from(src2) {
332                Ok(src2) => encode::br_if_xsgteq32_i8(sink, src1, src2, 0),
333                Err(_) => encode::br_if_xsgteq32_i32(sink, src1, src2, 0),
334            },
335            Cond::IfXult32I32 { src1, src2 } => match u8::try_from(src2) {
336                Ok(src2) => encode::br_if_xult32_u8(sink, src1, src2, 0),
337                Err(_) => encode::br_if_xult32_u32(sink, src1, src2, 0),
338            },
339            Cond::IfXulteq32I32 { src1, src2 } => match u8::try_from(src2) {
340                Ok(src2) => encode::br_if_xulteq32_u8(sink, src1, src2, 0),
341                Err(_) => encode::br_if_xulteq32_u32(sink, src1, src2, 0),
342            },
343            Cond::IfXugt32I32 { src1, src2 } => match u8::try_from(src2) {
344                Ok(src2) => encode::br_if_xugt32_u8(sink, src1, src2, 0),
345                Err(_) => encode::br_if_xugt32_u32(sink, src1, src2, 0),
346            },
347            Cond::IfXugteq32I32 { src1, src2 } => match u8::try_from(src2) {
348                Ok(src2) => encode::br_if_xugteq32_u8(sink, src1, src2, 0),
349                Err(_) => encode::br_if_xugteq32_u32(sink, src1, src2, 0),
350            },
351
352            Cond::IfXeq64I32 { src1, src2 } => match i8::try_from(src2) {
353                Ok(src2) => encode::br_if_xeq64_i8(sink, src1, src2, 0),
354                Err(_) => encode::br_if_xeq64_i32(sink, src1, src2, 0),
355            },
356            Cond::IfXneq64I32 { src1, src2 } => match i8::try_from(src2) {
357                Ok(src2) => encode::br_if_xneq64_i8(sink, src1, src2, 0),
358                Err(_) => encode::br_if_xneq64_i32(sink, src1, src2, 0),
359            },
360            Cond::IfXslt64I32 { src1, src2 } => match i8::try_from(src2) {
361                Ok(src2) => encode::br_if_xslt64_i8(sink, src1, src2, 0),
362                Err(_) => encode::br_if_xslt64_i32(sink, src1, src2, 0),
363            },
364            Cond::IfXslteq64I32 { src1, src2 } => match i8::try_from(src2) {
365                Ok(src2) => encode::br_if_xslteq64_i8(sink, src1, src2, 0),
366                Err(_) => encode::br_if_xslteq64_i32(sink, src1, src2, 0),
367            },
368            Cond::IfXsgt64I32 { src1, src2 } => match i8::try_from(src2) {
369                Ok(src2) => encode::br_if_xsgt64_i8(sink, src1, src2, 0),
370                Err(_) => encode::br_if_xsgt64_i32(sink, src1, src2, 0),
371            },
372            Cond::IfXsgteq64I32 { src1, src2 } => match i8::try_from(src2) {
373                Ok(src2) => encode::br_if_xsgteq64_i8(sink, src1, src2, 0),
374                Err(_) => encode::br_if_xsgteq64_i32(sink, src1, src2, 0),
375            },
376            Cond::IfXult64I32 { src1, src2 } => match u8::try_from(src2) {
377                Ok(src2) => encode::br_if_xult64_u8(sink, src1, src2, 0),
378                Err(_) => encode::br_if_xult64_u32(sink, src1, src2, 0),
379            },
380            Cond::IfXulteq64I32 { src1, src2 } => match u8::try_from(src2) {
381                Ok(src2) => encode::br_if_xulteq64_u8(sink, src1, src2, 0),
382                Err(_) => encode::br_if_xulteq64_u32(sink, src1, src2, 0),
383            },
384            Cond::IfXugt64I32 { src1, src2 } => match u8::try_from(src2) {
385                Ok(src2) => encode::br_if_xugt64_u8(sink, src1, src2, 0),
386                Err(_) => encode::br_if_xugt64_u32(sink, src1, src2, 0),
387            },
388            Cond::IfXugteq64I32 { src1, src2 } => match u8::try_from(src2) {
389                Ok(src2) => encode::br_if_xugteq64_u8(sink, src1, src2, 0),
390                Err(_) => encode::br_if_xugteq64_u32(sink, src1, src2, 0),
391            },
392        }
393    }
394
395    /// Inverts this conditional.
396    pub fn invert(&self) -> Cond {
397        match *self {
398            Cond::If32 { reg } => Cond::IfNot32 { reg },
399            Cond::IfNot32 { reg } => Cond::If32 { reg },
400            Cond::IfXeq32 { src1, src2 } => Cond::IfXneq32 { src1, src2 },
401            Cond::IfXneq32 { src1, src2 } => Cond::IfXeq32 { src1, src2 },
402            Cond::IfXeq64 { src1, src2 } => Cond::IfXneq64 { src1, src2 },
403            Cond::IfXneq64 { src1, src2 } => Cond::IfXeq64 { src1, src2 },
404
405            // Note that for below the condition changes but the operands are
406            // also swapped.
407            Cond::IfXslt32 { src1, src2 } => Cond::IfXslteq32 {
408                src1: src2,
409                src2: src1,
410            },
411            Cond::IfXslteq32 { src1, src2 } => Cond::IfXslt32 {
412                src1: src2,
413                src2: src1,
414            },
415            Cond::IfXult32 { src1, src2 } => Cond::IfXulteq32 {
416                src1: src2,
417                src2: src1,
418            },
419            Cond::IfXulteq32 { src1, src2 } => Cond::IfXult32 {
420                src1: src2,
421                src2: src1,
422            },
423            Cond::IfXslt64 { src1, src2 } => Cond::IfXslteq64 {
424                src1: src2,
425                src2: src1,
426            },
427            Cond::IfXslteq64 { src1, src2 } => Cond::IfXslt64 {
428                src1: src2,
429                src2: src1,
430            },
431            Cond::IfXult64 { src1, src2 } => Cond::IfXulteq64 {
432                src1: src2,
433                src2: src1,
434            },
435            Cond::IfXulteq64 { src1, src2 } => Cond::IfXult64 {
436                src1: src2,
437                src2: src1,
438            },
439
440            Cond::IfXeq32I32 { src1, src2 } => Cond::IfXneq32I32 { src1, src2 },
441            Cond::IfXneq32I32 { src1, src2 } => Cond::IfXeq32I32 { src1, src2 },
442            Cond::IfXslt32I32 { src1, src2 } => Cond::IfXsgteq32I32 { src1, src2 },
443            Cond::IfXslteq32I32 { src1, src2 } => Cond::IfXsgt32I32 { src1, src2 },
444            Cond::IfXult32I32 { src1, src2 } => Cond::IfXugteq32I32 { src1, src2 },
445            Cond::IfXulteq32I32 { src1, src2 } => Cond::IfXugt32I32 { src1, src2 },
446            Cond::IfXsgt32I32 { src1, src2 } => Cond::IfXslteq32I32 { src1, src2 },
447            Cond::IfXsgteq32I32 { src1, src2 } => Cond::IfXslt32I32 { src1, src2 },
448            Cond::IfXugt32I32 { src1, src2 } => Cond::IfXulteq32I32 { src1, src2 },
449            Cond::IfXugteq32I32 { src1, src2 } => Cond::IfXult32I32 { src1, src2 },
450
451            Cond::IfXeq64I32 { src1, src2 } => Cond::IfXneq64I32 { src1, src2 },
452            Cond::IfXneq64I32 { src1, src2 } => Cond::IfXeq64I32 { src1, src2 },
453            Cond::IfXslt64I32 { src1, src2 } => Cond::IfXsgteq64I32 { src1, src2 },
454            Cond::IfXslteq64I32 { src1, src2 } => Cond::IfXsgt64I32 { src1, src2 },
455            Cond::IfXult64I32 { src1, src2 } => Cond::IfXugteq64I32 { src1, src2 },
456            Cond::IfXulteq64I32 { src1, src2 } => Cond::IfXugt64I32 { src1, src2 },
457            Cond::IfXsgt64I32 { src1, src2 } => Cond::IfXslteq64I32 { src1, src2 },
458            Cond::IfXsgteq64I32 { src1, src2 } => Cond::IfXslt64I32 { src1, src2 },
459            Cond::IfXugt64I32 { src1, src2 } => Cond::IfXulteq64I32 { src1, src2 },
460            Cond::IfXugteq64I32 { src1, src2 } => Cond::IfXult64I32 { src1, src2 },
461        }
462    }
463}
464
465impl fmt::Display for Cond {
466    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
467        match self {
468            Cond::If32 { reg } => write!(f, "if32 {}", reg_name(**reg)),
469            Cond::IfNot32 { reg } => write!(f, "if_not32 {}", reg_name(**reg)),
470            Cond::IfXeq32 { src1, src2 } => {
471                write!(f, "if_xeq32 {}, {}", reg_name(**src1), reg_name(**src2))
472            }
473            Cond::IfXneq32 { src1, src2 } => {
474                write!(f, "if_xneq32 {}, {}", reg_name(**src1), reg_name(**src2))
475            }
476            Cond::IfXslt32 { src1, src2 } => {
477                write!(f, "if_xslt32 {}, {}", reg_name(**src1), reg_name(**src2))
478            }
479            Cond::IfXslteq32 { src1, src2 } => {
480                write!(f, "if_xslteq32 {}, {}", reg_name(**src1), reg_name(**src2))
481            }
482            Cond::IfXult32 { src1, src2 } => {
483                write!(f, "if_xult32 {}, {}", reg_name(**src1), reg_name(**src2))
484            }
485            Cond::IfXulteq32 { src1, src2 } => {
486                write!(f, "if_xulteq32 {}, {}", reg_name(**src1), reg_name(**src2))
487            }
488            Cond::IfXeq64 { src1, src2 } => {
489                write!(f, "if_xeq64 {}, {}", reg_name(**src1), reg_name(**src2))
490            }
491            Cond::IfXneq64 { src1, src2 } => {
492                write!(f, "if_xneq64 {}, {}", reg_name(**src1), reg_name(**src2))
493            }
494            Cond::IfXslt64 { src1, src2 } => {
495                write!(f, "if_xslt64 {}, {}", reg_name(**src1), reg_name(**src2))
496            }
497            Cond::IfXslteq64 { src1, src2 } => {
498                write!(f, "if_xslteq64 {}, {}", reg_name(**src1), reg_name(**src2))
499            }
500            Cond::IfXult64 { src1, src2 } => {
501                write!(f, "if_xult64 {}, {}", reg_name(**src1), reg_name(**src2))
502            }
503            Cond::IfXulteq64 { src1, src2 } => {
504                write!(f, "if_xulteq64 {}, {}", reg_name(**src1), reg_name(**src2))
505            }
506            Cond::IfXeq32I32 { src1, src2 } => {
507                write!(f, "if_xeq32_i32 {}, {src2}", reg_name(**src1))
508            }
509            Cond::IfXneq32I32 { src1, src2 } => {
510                write!(f, "if_xneq32_i32 {}, {src2}", reg_name(**src1))
511            }
512            Cond::IfXslt32I32 { src1, src2 } => {
513                write!(f, "if_xslt32_i32 {}, {src2}", reg_name(**src1))
514            }
515            Cond::IfXslteq32I32 { src1, src2 } => {
516                write!(f, "if_xslteq32_i32 {}, {src2}", reg_name(**src1))
517            }
518            Cond::IfXsgt32I32 { src1, src2 } => {
519                write!(f, "if_xsgt32_i32 {}, {src2}", reg_name(**src1))
520            }
521            Cond::IfXsgteq32I32 { src1, src2 } => {
522                write!(f, "if_xsgteq32_i32 {}, {src2}", reg_name(**src1))
523            }
524            Cond::IfXult32I32 { src1, src2 } => {
525                write!(f, "if_xult32_i32 {}, {src2}", reg_name(**src1))
526            }
527            Cond::IfXulteq32I32 { src1, src2 } => {
528                write!(f, "if_xulteq32_i32 {}, {src2}", reg_name(**src1))
529            }
530            Cond::IfXugt32I32 { src1, src2 } => {
531                write!(f, "if_xugt32_i32 {}, {src2}", reg_name(**src1))
532            }
533            Cond::IfXugteq32I32 { src1, src2 } => {
534                write!(f, "if_xugteq32_i32 {}, {src2}", reg_name(**src1))
535            }
536            Cond::IfXeq64I32 { src1, src2 } => {
537                write!(f, "if_xeq64_i32 {}, {src2}", reg_name(**src1))
538            }
539            Cond::IfXneq64I32 { src1, src2 } => {
540                write!(f, "if_xneq64_i32 {}, {src2}", reg_name(**src1))
541            }
542            Cond::IfXslt64I32 { src1, src2 } => {
543                write!(f, "if_xslt64_i32 {}, {src2}", reg_name(**src1))
544            }
545            Cond::IfXslteq64I32 { src1, src2 } => {
546                write!(f, "if_xslteq64_i32 {}, {src2}", reg_name(**src1))
547            }
548            Cond::IfXsgt64I32 { src1, src2 } => {
549                write!(f, "if_xsgt64_i32 {}, {src2}", reg_name(**src1))
550            }
551            Cond::IfXsgteq64I32 { src1, src2 } => {
552                write!(f, "if_xsgteq64_i32 {}, {src2}", reg_name(**src1))
553            }
554            Cond::IfXult64I32 { src1, src2 } => {
555                write!(f, "if_xult64_i32 {}, {src2}", reg_name(**src1))
556            }
557            Cond::IfXulteq64I32 { src1, src2 } => {
558                write!(f, "if_xulteq64_i32 {}, {src2}", reg_name(**src1))
559            }
560            Cond::IfXugt64I32 { src1, src2 } => {
561                write!(f, "if_xugt64_i32 {}, {src2}", reg_name(**src1))
562            }
563            Cond::IfXugteq64I32 { src1, src2 } => {
564                write!(f, "if_xugteq64_i32 {}, {src2}", reg_name(**src1))
565            }
566        }
567    }
568}
569
570/// Payload of `CallInfo` for call instructions
571#[derive(Clone, Debug)]
572pub struct PulleyCall {
573    /// The external name that's being called, or the Cranelift-generated
574    /// function that's being invoked.
575    pub name: ExternalName,
576    /// Arguments tracked in this call invocation which aren't assigned fixed
577    /// registers. This tracks up to 4 registers and all remaining registers
578    /// will be present and tracked in `CallInfo<T>` fields.
579    pub args: SmallVec<[XReg; 4]>,
580}