1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 471 472 473 474 475 476 477 478 479 480 481 482 483 484 485 486 487 488 489 490 491 492 493 494 495 496 497 498 499 500 501 502 503 504 505 506 507 508 509 510 511 512 513 514 515 516 517 518 519 520 521 522 523 524 525 526 527 528 529 530 531 532 533 534 535 536 537 538 539 540 541 542 543 544 545 546 547 548 549 550 551
//! This module exposes the machine-specific backend definition pieces.
//!
//! The MachInst infrastructure is the compiler backend, from CLIF
//! (ir::Function) to machine code. The purpose of this infrastructure is, at a
//! high level, to do instruction selection/lowering (to machine instructions),
//! register allocation, and then perform all the fixups to branches, constant
//! data references, etc., needed to actually generate machine code.
//!
//! The container for machine instructions, at various stages of construction,
//! is the `VCode` struct. We refer to a sequence of machine instructions organized
//! into basic blocks as "vcode". This is short for "virtual-register code".
//!
//! The compilation pipeline, from an `ir::Function` (already optimized as much as
//! you like by machine-independent optimization passes) onward, is as follows.
//!
//! ```plain
//!
//! ir::Function (SSA IR, machine-independent opcodes)
//! |
//! | [lower]
//! |
//! VCode<arch_backend::Inst> (machine instructions:
//! | - mostly virtual registers.
//! | - cond branches in two-target form.
//! | - branch targets are block indices.
//! | - in-memory constants held by insns,
//! | with unknown offsets.
//! | - critical edges (actually all edges)
//! | are split.)
//! |
//! | [regalloc --> `regalloc2::Output`; VCode is unchanged]
//! |
//! | [binary emission via MachBuffer]
//! |
//! Vec<u8> (machine code:
//! | - two-dest branches resolved via
//! | streaming branch resolution/simplification.
//! | - regalloc `Allocation` results used directly
//! | by instruction emission code.
//! | - prologue and epilogue(s) built and emitted
//! | directly during emission.
//! | - nominal-SP-relative offsets resolved
//! | by tracking EmitState.)
//!
//! ```
use crate::binemit::{Addend, CodeInfo, CodeOffset, Reloc, StackMap};
use crate::ir::function::FunctionParameters;
use crate::ir::{DynamicStackSlot, RelSourceLoc, StackSlot, Type};
use crate::isa::FunctionAlignment;
use crate::result::CodegenResult;
use crate::settings;
use crate::settings::Flags;
use crate::value_label::ValueLabelsRanges;
use alloc::vec::Vec;
use core::fmt::Debug;
use cranelift_control::ControlPlane;
use cranelift_entity::PrimaryMap;
use regalloc2::{Allocation, VReg};
use smallvec::{smallvec, SmallVec};
use std::string::String;
#[cfg(feature = "enable-serde")]
use serde::{Deserialize, Serialize};
#[macro_use]
pub mod isle;
pub mod lower;
pub use lower::*;
pub mod vcode;
pub use vcode::*;
pub mod compile;
pub use compile::*;
pub mod blockorder;
pub use blockorder::*;
pub mod abi;
pub use abi::*;
pub mod buffer;
pub use buffer::*;
pub mod helpers;
pub use helpers::*;
pub mod inst_common;
pub use inst_common::*;
pub mod valueregs;
pub use reg::*;
pub use valueregs::*;
pub mod reg;
/// A machine instruction.
pub trait MachInst: Clone + Debug {
/// The ABI machine spec for this `MachInst`.
type ABIMachineSpec: ABIMachineSpec<I = Self>;
/// Return the registers referenced by this machine instruction along with
/// the modes of reference (use, def, modify).
fn get_operands<F: Fn(VReg) -> VReg>(&self, collector: &mut OperandCollector<'_, F>);
/// If this is a simple move, return the (source, destination) tuple of registers.
fn is_move(&self) -> Option<(Writable<Reg>, Reg)>;
/// Is this a terminator (branch or ret)? If so, return its type
/// (ret/uncond/cond) and target if applicable.
fn is_term(&self) -> MachTerminator;
/// Is this an unconditional trap?
fn is_trap(&self) -> bool;
/// Is this an "args" pseudoinst?
fn is_args(&self) -> bool;
/// Should this instruction be included in the clobber-set?
fn is_included_in_clobbers(&self) -> bool;
/// Generate a move.
fn gen_move(to_reg: Writable<Reg>, from_reg: Reg, ty: Type) -> Self;
/// Generate a dummy instruction that will keep a value alive but
/// has no other purpose.
fn gen_dummy_use(reg: Reg) -> Self;
/// Determine register class(es) to store the given Cranelift type, and the
/// Cranelift type actually stored in the underlying register(s). May return
/// an error if the type isn't supported by this backend.
///
/// If the type requires multiple registers, then the list of registers is
/// returned in little-endian order.
///
/// Note that the type actually stored in the register(s) may differ in the
/// case that a value is split across registers: for example, on a 32-bit
/// target, an I64 may be stored in two registers, each of which holds an
/// I32. The actually-stored types are used only to inform the backend when
/// generating spills and reloads for individual registers.
fn rc_for_type(ty: Type) -> CodegenResult<(&'static [RegClass], &'static [Type])>;
/// Get an appropriate type that can fully hold a value in a given
/// register class. This may not be the only type that maps to
/// that class, but when used with `gen_move()` or the ABI trait's
/// load/spill constructors, it should produce instruction(s) that
/// move the entire register contents.
fn canonical_type_for_rc(rc: RegClass) -> Type;
/// Generate a jump to another target. Used during lowering of
/// control flow.
fn gen_jump(target: MachLabel) -> Self;
/// Generate a store of an immediate 64-bit integer to a register. Used by
/// the control plane to generate random instructions.
fn gen_imm_u64(_value: u64, _dst: Writable<Reg>) -> Option<Self> {
None
}
/// Generate a store of an immediate 64-bit integer to a register. Used by
/// the control plane to generate random instructions. The tmp register may
/// be used by architectures which don't support writing immediate values to
/// floating point registers directly.
fn gen_imm_f64(_value: f64, _tmp: Writable<Reg>, _dst: Writable<Reg>) -> SmallVec<[Self; 2]> {
SmallVec::new()
}
/// Generate a NOP. The `preferred_size` parameter allows the caller to
/// request a NOP of that size, or as close to it as possible. The machine
/// backend may return a NOP whose binary encoding is smaller than the
/// preferred size, but must not return a NOP that is larger. However,
/// the instruction must have a nonzero size if preferred_size is nonzero.
fn gen_nop(preferred_size: usize) -> Self;
/// Align a basic block offset (from start of function). By default, no
/// alignment occurs.
fn align_basic_block(offset: CodeOffset) -> CodeOffset {
offset
}
/// What is the worst-case instruction size emitted by this instruction type?
fn worst_case_size() -> CodeOffset;
/// What is the register class used for reference types (GC-observable pointers)? Can
/// be dependent on compilation flags.
fn ref_type_regclass(_flags: &Flags) -> RegClass;
/// Is this a safepoint?
fn is_safepoint(&self) -> bool;
/// Generate an instruction that must appear at the beginning of a basic
/// block, if any. Note that the return value must not be subject to
/// register allocation.
fn gen_block_start(
_is_indirect_branch_target: bool,
_is_forward_edge_cfi_enabled: bool,
) -> Option<Self> {
None
}
/// Returns a description of the alignment required for functions for this
/// architecture.
fn function_alignment() -> FunctionAlignment;
/// A label-use kind: a type that describes the types of label references that
/// can occur in an instruction.
type LabelUse: MachInstLabelUse;
/// Byte representation of a trap opcode which is inserted by `MachBuffer`
/// during its `defer_trap` method.
const TRAP_OPCODE: &'static [u8];
}
/// A descriptor of a label reference (use) in an instruction set.
pub trait MachInstLabelUse: Clone + Copy + Debug + Eq {
/// Required alignment for any veneer. Usually the required instruction
/// alignment (e.g., 4 for a RISC with 32-bit instructions, or 1 for x86).
const ALIGN: CodeOffset;
/// What is the maximum PC-relative range (positive)? E.g., if `1024`, a
/// label-reference fixup at offset `x` is valid if the label resolves to `x
/// + 1024`.
fn max_pos_range(self) -> CodeOffset;
/// What is the maximum PC-relative range (negative)? This is the absolute
/// value; i.e., if `1024`, then a label-reference fixup at offset `x` is
/// valid if the label resolves to `x - 1024`.
fn max_neg_range(self) -> CodeOffset;
/// What is the size of code-buffer slice this label-use needs to patch in
/// the label's value?
fn patch_size(self) -> CodeOffset;
/// Perform a code-patch, given the offset into the buffer of this label use
/// and the offset into the buffer of the label's definition.
/// It is guaranteed that, given `delta = offset - label_offset`, we will
/// have `offset >= -self.max_neg_range()` and `offset <=
/// self.max_pos_range()`.
fn patch(self, buffer: &mut [u8], use_offset: CodeOffset, label_offset: CodeOffset);
/// Can the label-use be patched to a veneer that supports a longer range?
/// Usually valid for jumps (a short-range jump can jump to a longer-range
/// jump), but not for e.g. constant pool references, because the constant
/// load would require different code (one more level of indirection).
fn supports_veneer(self) -> bool;
/// How many bytes are needed for a veneer?
fn veneer_size(self) -> CodeOffset;
/// Generate a veneer. The given code-buffer slice is `self.veneer_size()`
/// bytes long at offset `veneer_offset` in the buffer. The original
/// label-use will be patched to refer to this veneer's offset. A new
/// (offset, LabelUse) is returned that allows the veneer to use the actual
/// label. For veneers to work properly, it is expected that the new veneer
/// has a larger range; on most platforms this probably means either a
/// "long-range jump" (e.g., on ARM, the 26-bit form), or if already at that
/// stage, a jump that supports a full 32-bit range, for example.
fn generate_veneer(self, buffer: &mut [u8], veneer_offset: CodeOffset) -> (CodeOffset, Self);
/// Returns the corresponding label-use for the relocation specified.
///
/// This returns `None` if the relocation doesn't have a corresponding
/// representation for the target architecture.
fn from_reloc(reloc: Reloc, addend: Addend) -> Option<Self>;
}
/// Describes a block terminator (not call) in the vcode, when its branches
/// have not yet been finalized (so a branch may have two targets).
///
/// Actual targets are not included: the single-source-of-truth for
/// those is the VCode itself, which holds, for each block, successors
/// and outgoing branch args per successor.
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum MachTerminator {
/// Not a terminator.
None,
/// A return instruction.
Ret,
/// A tail call.
RetCall,
/// An unconditional branch to another block.
Uncond,
/// A conditional branch to one of two other blocks.
Cond,
/// An indirect branch with known possible targets.
Indirect,
}
/// A trait describing the ability to encode a MachInst into binary machine code.
pub trait MachInstEmit: MachInst {
/// Persistent state carried across `emit` invocations.
type State: MachInstEmitState<Self>;
/// Constant information used in `emit` invocations.
type Info;
/// Emit the instruction.
fn emit(
&self,
allocs: &[Allocation],
code: &mut MachBuffer<Self>,
info: &Self::Info,
state: &mut Self::State,
);
/// Pretty-print the instruction.
fn pretty_print_inst(&self, allocs: &[Allocation], state: &mut Self::State) -> String;
}
/// A trait describing the emission state carried between MachInsts when
/// emitting a function body.
pub trait MachInstEmitState<I: VCodeInst>: Default + Clone + Debug {
/// Create a new emission state given the ABI object.
fn new(abi: &Callee<I::ABIMachineSpec>, ctrl_plane: ControlPlane) -> Self;
/// Update the emission state before emitting an instruction that is a
/// safepoint.
fn pre_safepoint(&mut self, _stack_map: StackMap) {}
/// Update the emission state to indicate instructions are associated with a
/// particular RelSourceLoc.
fn pre_sourceloc(&mut self, _srcloc: RelSourceLoc) {}
/// The emission state holds ownership of a control plane, so it doesn't
/// have to be passed around explicitly too much. `ctrl_plane_mut` may
/// be used if temporary access to the control plane is needed by some
/// other function that doesn't have access to the emission state.
fn ctrl_plane_mut(&mut self) -> &mut ControlPlane;
/// Used to continue using a control plane after the emission state is
/// not needed anymore.
fn take_ctrl_plane(self) -> ControlPlane;
/// A hook that triggers when first emitting a new block.
/// It is guaranteed to be called before any instructions are emitted.
fn on_new_block(&mut self) {}
}
/// The result of a `MachBackend::compile_function()` call. Contains machine
/// code (as bytes) and a disassembly, if requested.
#[derive(PartialEq, Debug, Clone)]
#[cfg_attr(feature = "enable-serde", derive(Serialize, Deserialize))]
pub struct CompiledCodeBase<T: CompilePhase> {
/// Machine code.
pub buffer: MachBufferFinalized<T>,
/// Size of stack frame, in bytes.
pub frame_size: u32,
/// Disassembly, if requested.
pub vcode: Option<String>,
/// Debug info: value labels to registers/stackslots at code offsets.
pub value_labels_ranges: ValueLabelsRanges,
/// Debug info: stackslots to stack pointer offsets.
pub sized_stackslot_offsets: PrimaryMap<StackSlot, u32>,
/// Debug info: stackslots to stack pointer offsets.
pub dynamic_stackslot_offsets: PrimaryMap<DynamicStackSlot, u32>,
/// Basic-block layout info: block start offsets.
///
/// This info is generated only if the `machine_code_cfg_info`
/// flag is set.
pub bb_starts: Vec<CodeOffset>,
/// Basic-block layout info: block edges. Each edge is `(from,
/// to)`, where `from` and `to` are basic-block start offsets of
/// the respective blocks.
///
/// This info is generated only if the `machine_code_cfg_info`
/// flag is set.
pub bb_edges: Vec<(CodeOffset, CodeOffset)>,
}
impl CompiledCodeStencil {
/// Apply function parameters to finalize a stencil into its final form.
pub fn apply_params(self, params: &FunctionParameters) -> CompiledCode {
CompiledCode {
buffer: self.buffer.apply_base_srcloc(params.base_srcloc()),
frame_size: self.frame_size,
vcode: self.vcode,
value_labels_ranges: self.value_labels_ranges,
sized_stackslot_offsets: self.sized_stackslot_offsets,
dynamic_stackslot_offsets: self.dynamic_stackslot_offsets,
bb_starts: self.bb_starts,
bb_edges: self.bb_edges,
}
}
}
impl<T: CompilePhase> CompiledCodeBase<T> {
/// Get a `CodeInfo` describing section sizes from this compilation result.
pub fn code_info(&self) -> CodeInfo {
CodeInfo {
total_size: self.buffer.total_size(),
}
}
/// Returns a reference to the machine code generated for this function compilation.
pub fn code_buffer(&self) -> &[u8] {
self.buffer.data()
}
/// Get the disassembly of the buffer, using the given capstone context.
#[cfg(feature = "disas")]
pub fn disassemble(
&self,
params: Option<&crate::ir::function::FunctionParameters>,
cs: &capstone::Capstone,
) -> Result<String, anyhow::Error> {
use std::fmt::Write;
let mut buf = String::new();
let relocs = self.buffer.relocs();
let traps = self.buffer.traps();
// Normalize the block starts to include an initial block of offset 0.
let mut block_starts = Vec::new();
if self.bb_starts.first().copied() != Some(0) {
block_starts.push(0);
}
block_starts.extend_from_slice(&self.bb_starts);
block_starts.push(self.buffer.data().len() as u32);
// Iterate over block regions, to ensure that we always produce block labels
for (n, (&start, &end)) in block_starts
.iter()
.zip(block_starts.iter().skip(1))
.enumerate()
{
writeln!(buf, "block{}: ; offset 0x{:x}", n, start)?;
let buffer = &self.buffer.data()[start as usize..end as usize];
let insns = cs.disasm_all(buffer, start as u64).map_err(map_caperr)?;
for i in insns.iter() {
write!(buf, " ")?;
let op_str = i.op_str().unwrap_or("");
if let Some(s) = i.mnemonic() {
write!(buf, "{}", s)?;
if !op_str.is_empty() {
write!(buf, " ")?;
}
}
write!(buf, "{}", op_str)?;
let end = i.address() + i.bytes().len() as u64;
let contains = |off| i.address() <= off && off < end;
if let Some(reloc) = relocs.iter().find(|reloc| contains(reloc.offset as u64)) {
write!(
buf,
" ; reloc_external {} {} {}",
reloc.kind,
reloc.name.display(params),
reloc.addend,
)?;
}
if let Some(trap) = traps.iter().find(|trap| contains(trap.offset as u64)) {
write!(buf, " ; trap: {}", trap.code)?;
}
writeln!(buf)?;
}
}
return Ok(buf);
fn map_caperr(err: capstone::Error) -> anyhow::Error {
anyhow::format_err!("{}", err)
}
}
}
/// Result of compiling a `FunctionStencil`, before applying `FunctionParameters` onto it.
///
/// Only used internally, in a transient manner, for the incremental compilation cache.
pub type CompiledCodeStencil = CompiledCodeBase<Stencil>;
/// `CompiledCode` in its final form (i.e. after `FunctionParameters` have been applied), ready for
/// consumption.
pub type CompiledCode = CompiledCodeBase<Final>;
impl CompiledCode {
/// If available, return information about the code layout in the
/// final machine code: the offsets (in bytes) of each basic-block
/// start, and all basic-block edges.
pub fn get_code_bb_layout(&self) -> (Vec<usize>, Vec<(usize, usize)>) {
(
self.bb_starts.iter().map(|&off| off as usize).collect(),
self.bb_edges
.iter()
.map(|&(from, to)| (from as usize, to as usize))
.collect(),
)
}
/// Creates unwind information for the function.
///
/// Returns `None` if the function has no unwind information.
#[cfg(feature = "unwind")]
pub fn create_unwind_info(
&self,
isa: &dyn crate::isa::TargetIsa,
) -> CodegenResult<Option<crate::isa::unwind::UnwindInfo>> {
let unwind_info_kind = match isa.triple().operating_system {
target_lexicon::OperatingSystem::Windows => UnwindInfoKind::Windows,
_ => UnwindInfoKind::SystemV,
};
isa.emit_unwind_info(self, unwind_info_kind)
}
}
/// An object that can be used to create the text section of an executable.
///
/// This primarily handles resolving relative relocations at
/// text-section-assembly time rather than at load/link time. This
/// architecture-specific logic is sort of like a linker, but only for one
/// object file at a time.
pub trait TextSectionBuilder {
/// Appends `data` to the text section with the `align` specified.
///
/// If `labeled` is `true` then this also binds the appended data to the
/// `n`th label for how many times this has been called with `labeled:
/// true`. The label target can be passed as the `target` argument to
/// `resolve_reloc`.
///
/// This function returns the offset at which the data was placed in the
/// text section.
fn append(
&mut self,
labeled: bool,
data: &[u8],
align: u32,
ctrl_plane: &mut ControlPlane,
) -> u64;
/// Attempts to resolve a relocation for this function.
///
/// The `offset` is the offset of the relocation, within the text section.
/// The `reloc` is the kind of relocation.
/// The `addend` is the value to add to the relocation.
/// The `target` is the labeled function that is the target of this
/// relocation.
///
/// Labeled functions are created with the `append` function above by
/// setting the `labeled` parameter to `true`.
///
/// If this builder does not know how to handle `reloc` then this function
/// will return `false`. Otherwise this function will return `true` and this
/// relocation will be resolved in the final bytes returned by `finish`.
fn resolve_reloc(&mut self, offset: u64, reloc: Reloc, addend: Addend, target: usize) -> bool;
/// A debug-only option which is used to for
fn force_veneers(&mut self);
/// Completes this text section, filling out any final details, and returns
/// the bytes of the text section.
fn finish(&mut self, ctrl_plane: &mut ControlPlane) -> Vec<u8>;
}
/// Expected unwind info type.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[non_exhaustive]
pub enum UnwindInfoKind {
/// No unwind info.
None,
/// SystemV CIE/FDE unwind info.
#[cfg(feature = "unwind")]
SystemV,
/// Windows X64 Unwind info
#[cfg(feature = "unwind")]
Windows,
}