pub use Integer::*;
pub use Primitive::*;
use crate::spec::Target;
use std::convert::{TryFrom, TryInto};
use std::num::NonZeroUsize;
use std::ops::{Add, AddAssign, Deref, Mul, Range, RangeInclusive, Sub};
use rustc_index::vec::{Idx, IndexVec};
use rustc_macros::HashStable_Generic;
use rustc_span::Span;
pub mod call;
pub struct TargetDataLayout {
pub endian: Endian,
pub i1_align: AbiAndPrefAlign,
pub i8_align: AbiAndPrefAlign,
pub i16_align: AbiAndPrefAlign,
pub i32_align: AbiAndPrefAlign,
pub i64_align: AbiAndPrefAlign,
pub i128_align: AbiAndPrefAlign,
pub f32_align: AbiAndPrefAlign,
pub f64_align: AbiAndPrefAlign,
pub pointer_size: Size,
pub pointer_align: AbiAndPrefAlign,
pub aggregate_align: AbiAndPrefAlign,
pub vector_align: Vec<(Size, AbiAndPrefAlign)>,
pub instruction_address_space: AddressSpace,
}
impl Default for TargetDataLayout {
fn default() -> TargetDataLayout {
let align = |bits| Align::from_bits(bits).unwrap();
TargetDataLayout {
endian: Endian::Big,
i1_align: AbiAndPrefAlign::new(align(8)),
i8_align: AbiAndPrefAlign::new(align(8)),
i16_align: AbiAndPrefAlign::new(align(16)),
i32_align: AbiAndPrefAlign::new(align(32)),
i64_align: AbiAndPrefAlign { abi: align(32), pref: align(64) },
i128_align: AbiAndPrefAlign { abi: align(32), pref: align(64) },
f32_align: AbiAndPrefAlign::new(align(32)),
f64_align: AbiAndPrefAlign::new(align(64)),
pointer_size: Size::from_bits(64),
pointer_align: AbiAndPrefAlign::new(align(64)),
aggregate_align: AbiAndPrefAlign { abi: align(0), pref: align(64) },
vector_align: vec![
(Size::from_bits(64), AbiAndPrefAlign::new(align(64))),
(Size::from_bits(128), AbiAndPrefAlign::new(align(128))),
],
instruction_address_space: AddressSpace::DATA,
}
}
}
impl TargetDataLayout {
pub fn parse(target: &Target) -> Result<TargetDataLayout, String> {
let parse_address_space = |s: &str, cause: &str| {
s.parse::<u32>().map(AddressSpace).map_err(|err| {
format!("invalid address space `{}` for `{}` in \"data-layout\": {}", s, cause, err)
})
};
let parse_bits = |s: &str, kind: &str, cause: &str| {
s.parse::<u64>().map_err(|err| {
format!("invalid {} `{}` for `{}` in \"data-layout\": {}", kind, s, cause, err)
})
};
let size = |s: &str, cause: &str| parse_bits(s, "size", cause).map(Size::from_bits);
let align = |s: &[&str], cause: &str| {
if s.is_empty() {
return Err(format!("missing alignment for `{}` in \"data-layout\"", cause));
}
let align_from_bits = |bits| {
Align::from_bits(bits).map_err(|err| {
format!("invalid alignment for `{}` in \"data-layout\": {}", cause, err)
})
};
let abi = parse_bits(s[0], "alignment", cause)?;
let pref = s.get(1).map_or(Ok(abi), |pref| parse_bits(pref, "alignment", cause))?;
Ok(AbiAndPrefAlign { abi: align_from_bits(abi)?, pref: align_from_bits(pref)? })
};
let mut dl = TargetDataLayout::default();
let mut i128_align_src = 64;
for spec in target.data_layout.split('-') {
let spec_parts = spec.split(':').collect::<Vec<_>>();
match &*spec_parts {
["e"] => dl.endian = Endian::Little,
["E"] => dl.endian = Endian::Big,
[p] if p.starts_with('P') => {
dl.instruction_address_space = parse_address_space(&p[1..], "P")?
}
["a", ref a @ ..] => dl.aggregate_align = align(a, "a")?,
["f32", ref a @ ..] => dl.f32_align = align(a, "f32")?,
["f64", ref a @ ..] => dl.f64_align = align(a, "f64")?,
[p @ "p", s, ref a @ ..] | [p @ "p0", s, ref a @ ..] => {
dl.pointer_size = size(s, p)?;
dl.pointer_align = align(a, p)?;
}
[s, ref a @ ..] if s.starts_with('i') => {
let bits = match s[1..].parse::<u64>() {
Ok(bits) => bits,
Err(_) => {
size(&s[1..], "i")?;
continue;
}
};
let a = align(a, s)?;
match bits {
1 => dl.i1_align = a,
8 => dl.i8_align = a,
16 => dl.i16_align = a,
32 => dl.i32_align = a,
64 => dl.i64_align = a,
_ => {}
}
if bits >= i128_align_src && bits <= 128 {
i128_align_src = bits;
dl.i128_align = a;
}
}
[s, ref a @ ..] if s.starts_with('v') => {
let v_size = size(&s[1..], "v")?;
let a = align(a, s)?;
if let Some(v) = dl.vector_align.iter_mut().find(|v| v.0 == v_size) {
v.1 = a;
continue;
}
dl.vector_align.push((v_size, a));
}
_ => {}
}
}
let endian_str = match dl.endian {
Endian::Little => "little",
Endian::Big => "big",
};
if endian_str != target.target_endian {
return Err(format!(
"inconsistent target specification: \"data-layout\" claims \
architecture is {}-endian, while \"target-endian\" is `{}`",
endian_str, target.target_endian
));
}
if dl.pointer_size.bits().to_string() != target.target_pointer_width {
return Err(format!(
"inconsistent target specification: \"data-layout\" claims \
pointers are {}-bit, while \"target-pointer-width\" is `{}`",
dl.pointer_size.bits(),
target.target_pointer_width
));
}
Ok(dl)
}
pub fn obj_size_bound(&self) -> u64 {
match self.pointer_size.bits() {
16 => 1 << 15,
32 => 1 << 31,
64 => 1 << 47,
bits => panic!("obj_size_bound: unknown pointer bit size {}", bits),
}
}
pub fn ptr_sized_integer(&self) -> Integer {
match self.pointer_size.bits() {
16 => I16,
32 => I32,
64 => I64,
bits => panic!("ptr_sized_integer: unknown pointer bit size {}", bits),
}
}
pub fn vector_align(&self, vec_size: Size) -> AbiAndPrefAlign {
for &(size, align) in &self.vector_align {
if size == vec_size {
return align;
}
}
AbiAndPrefAlign::new(Align::from_bytes(vec_size.bytes().next_power_of_two()).unwrap())
}
}
pub trait HasDataLayout {
fn data_layout(&self) -> &TargetDataLayout;
}
impl HasDataLayout for TargetDataLayout {
fn data_layout(&self) -> &TargetDataLayout {
self
}
}
#[derive(Copy, Clone, PartialEq)]
pub enum Endian {
Little,
Big,
}
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Encodable, Decodable)]
#[derive(HashStable_Generic)]
pub struct Size {
raw: u64,
}
impl Size {
pub const ZERO: Size = Size { raw: 0 };
#[inline]
pub fn from_bits(bits: impl TryInto<u64>) -> Size {
let bits = bits.try_into().ok().unwrap();
Size::from_bytes(bits / 8 + ((bits % 8) + 7) / 8)
}
#[inline]
pub fn from_bytes(bytes: impl TryInto<u64>) -> Size {
Size { raw: bytes.try_into().ok().unwrap() }
}
#[inline]
pub fn bytes(self) -> u64 {
self.raw
}
#[inline]
pub fn bytes_usize(self) -> usize {
self.bytes().try_into().unwrap()
}
#[inline]
pub fn bits(self) -> u64 {
self.bytes().checked_mul(8).unwrap_or_else(|| {
panic!("Size::bits: {} bytes in bits doesn't fit in u64", self.bytes())
})
}
#[inline]
pub fn bits_usize(self) -> usize {
self.bits().try_into().unwrap()
}
#[inline]
pub fn align_to(self, align: Align) -> Size {
let mask = align.bytes() - 1;
Size::from_bytes((self.bytes() + mask) & !mask)
}
#[inline]
pub fn is_aligned(self, align: Align) -> bool {
let mask = align.bytes() - 1;
self.bytes() & mask == 0
}
#[inline]
pub fn checked_add<C: HasDataLayout>(self, offset: Size, cx: &C) -> Option<Size> {
let dl = cx.data_layout();
let bytes = self.bytes().checked_add(offset.bytes())?;
if bytes < dl.obj_size_bound() { Some(Size::from_bytes(bytes)) } else { None }
}
#[inline]
pub fn checked_mul<C: HasDataLayout>(self, count: u64, cx: &C) -> Option<Size> {
let dl = cx.data_layout();
let bytes = self.bytes().checked_mul(count)?;
if bytes < dl.obj_size_bound() { Some(Size::from_bytes(bytes)) } else { None }
}
}
impl Add for Size {
type Output = Size;
#[inline]
fn add(self, other: Size) -> Size {
Size::from_bytes(self.bytes().checked_add(other.bytes()).unwrap_or_else(|| {
panic!("Size::add: {} + {} doesn't fit in u64", self.bytes(), other.bytes())
}))
}
}
impl Sub for Size {
type Output = Size;
#[inline]
fn sub(self, other: Size) -> Size {
Size::from_bytes(self.bytes().checked_sub(other.bytes()).unwrap_or_else(|| {
panic!("Size::sub: {} - {} would result in negative size", self.bytes(), other.bytes())
}))
}
}
impl Mul<Size> for u64 {
type Output = Size;
#[inline]
fn mul(self, size: Size) -> Size {
size * self
}
}
impl Mul<u64> for Size {
type Output = Size;
#[inline]
fn mul(self, count: u64) -> Size {
match self.bytes().checked_mul(count) {
Some(bytes) => Size::from_bytes(bytes),
None => panic!("Size::mul: {} * {} doesn't fit in u64", self.bytes(), count),
}
}
}
impl AddAssign for Size {
#[inline]
fn add_assign(&mut self, other: Size) {
*self = *self + other;
}
}
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Encodable, Decodable)]
#[derive(HashStable_Generic)]
pub struct Align {
pow2: u8,
}
impl Align {
pub fn from_bits(bits: u64) -> Result<Align, String> {
Align::from_bytes(Size::from_bits(bits).bytes())
}
pub fn from_bytes(align: u64) -> Result<Align, String> {
if align == 0 {
return Ok(Align { pow2: 0 });
}
let mut bytes = align;
let mut pow2: u8 = 0;
while (bytes & 1) == 0 {
pow2 += 1;
bytes >>= 1;
}
if bytes != 1 {
return Err(format!("`{}` is not a power of 2", align));
}
if pow2 > 29 {
return Err(format!("`{}` is too large", align));
}
Ok(Align { pow2 })
}
pub fn bytes(self) -> u64 {
1 << self.pow2
}
pub fn bits(self) -> u64 {
self.bytes() * 8
}
pub fn max_for_offset(offset: Size) -> Align {
Align { pow2: offset.bytes().trailing_zeros() as u8 }
}
pub fn restrict_for_offset(self, offset: Size) -> Align {
self.min(Align::max_for_offset(offset))
}
}
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, Encodable, Decodable)]
#[derive(HashStable_Generic)]
pub struct AbiAndPrefAlign {
pub abi: Align,
pub pref: Align,
}
impl AbiAndPrefAlign {
pub fn new(align: Align) -> AbiAndPrefAlign {
AbiAndPrefAlign { abi: align, pref: align }
}
pub fn min(self, other: AbiAndPrefAlign) -> AbiAndPrefAlign {
AbiAndPrefAlign { abi: self.abi.min(other.abi), pref: self.pref.min(other.pref) }
}
pub fn max(self, other: AbiAndPrefAlign) -> AbiAndPrefAlign {
AbiAndPrefAlign { abi: self.abi.max(other.abi), pref: self.pref.max(other.pref) }
}
}
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, HashStable_Generic)]
pub enum Integer {
I8,
I16,
I32,
I64,
I128,
}
impl Integer {
pub fn size(self) -> Size {
match self {
I8 => Size::from_bytes(1),
I16 => Size::from_bytes(2),
I32 => Size::from_bytes(4),
I64 => Size::from_bytes(8),
I128 => Size::from_bytes(16),
}
}
pub fn align<C: HasDataLayout>(self, cx: &C) -> AbiAndPrefAlign {
let dl = cx.data_layout();
match self {
I8 => dl.i8_align,
I16 => dl.i16_align,
I32 => dl.i32_align,
I64 => dl.i64_align,
I128 => dl.i128_align,
}
}
pub fn fit_signed(x: i128) -> Integer {
match x {
-0x0000_0000_0000_0080..=0x0000_0000_0000_007f => I8,
-0x0000_0000_0000_8000..=0x0000_0000_0000_7fff => I16,
-0x0000_0000_8000_0000..=0x0000_0000_7fff_ffff => I32,
-0x8000_0000_0000_0000..=0x7fff_ffff_ffff_ffff => I64,
_ => I128,
}
}
pub fn fit_unsigned(x: u128) -> Integer {
match x {
0..=0x0000_0000_0000_00ff => I8,
0..=0x0000_0000_0000_ffff => I16,
0..=0x0000_0000_ffff_ffff => I32,
0..=0xffff_ffff_ffff_ffff => I64,
_ => I128,
}
}
pub fn for_align<C: HasDataLayout>(cx: &C, wanted: Align) -> Option<Integer> {
let dl = cx.data_layout();
for &candidate in &[I8, I16, I32, I64, I128] {
if wanted == candidate.align(dl).abi && wanted.bytes() == candidate.size().bytes() {
return Some(candidate);
}
}
None
}
pub fn approximate_align<C: HasDataLayout>(cx: &C, wanted: Align) -> Integer {
let dl = cx.data_layout();
for &candidate in &[I64, I32, I16] {
if wanted >= candidate.align(dl).abi && wanted.bytes() >= candidate.size().bytes() {
return candidate;
}
}
I8
}
}
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, HashStable_Generic)]
pub enum Primitive {
Int(Integer, bool),
F32,
F64,
Pointer,
}
impl Primitive {
pub fn size<C: HasDataLayout>(self, cx: &C) -> Size {
let dl = cx.data_layout();
match self {
Int(i, _) => i.size(),
F32 => Size::from_bits(32),
F64 => Size::from_bits(64),
Pointer => dl.pointer_size,
}
}
pub fn align<C: HasDataLayout>(self, cx: &C) -> AbiAndPrefAlign {
let dl = cx.data_layout();
match self {
Int(i, _) => i.align(dl),
F32 => dl.f32_align,
F64 => dl.f64_align,
Pointer => dl.pointer_align,
}
}
pub fn is_float(self) -> bool {
match self {
F32 | F64 => true,
_ => false,
}
}
pub fn is_int(self) -> bool {
match self {
Int(..) => true,
_ => false,
}
}
}
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
#[derive(HashStable_Generic)]
pub struct Scalar {
pub value: Primitive,
pub valid_range: RangeInclusive<u128>,
}
impl Scalar {
pub fn is_bool(&self) -> bool {
if let Int(I8, _) = self.value { self.valid_range == (0..=1) } else { false }
}
pub fn valid_range_exclusive<C: HasDataLayout>(&self, cx: &C) -> Range<u128> {
let bits = self.value.size(cx).bits();
assert!(bits <= 128);
let mask = !0u128 >> (128 - bits);
let start = *self.valid_range.start();
let end = *self.valid_range.end();
assert_eq!(start, start & mask);
assert_eq!(end, end & mask);
start..(end.wrapping_add(1) & mask)
}
}
#[derive(PartialEq, Eq, Hash, Debug, HashStable_Generic)]
pub enum FieldsShape {
Primitive,
Union(NonZeroUsize),
Array { stride: Size, count: u64 },
Arbitrary {
offsets: Vec<Size>,
memory_index: Vec<u32>,
},
}
impl FieldsShape {
pub fn count(&self) -> usize {
match *self {
FieldsShape::Primitive => 0,
FieldsShape::Union(count) => count.get(),
FieldsShape::Array { count, .. } => {
let usize_count = count as usize;
assert_eq!(usize_count as u64, count);
usize_count
}
FieldsShape::Arbitrary { ref offsets, .. } => offsets.len(),
}
}
pub fn offset(&self, i: usize) -> Size {
match *self {
FieldsShape::Primitive => {
unreachable!("FieldsShape::offset: `Primitive`s have no fields")
}
FieldsShape::Union(count) => {
assert!(
i < count.get(),
"tried to access field {} of union with {} fields",
i,
count
);
Size::ZERO
}
FieldsShape::Array { stride, count } => {
let i = u64::try_from(i).unwrap();
assert!(i < count);
stride * i
}
FieldsShape::Arbitrary { ref offsets, .. } => offsets[i],
}
}
pub fn memory_index(&self, i: usize) -> usize {
match *self {
FieldsShape::Primitive => {
unreachable!("FieldsShape::memory_index: `Primitive`s have no fields")
}
FieldsShape::Union(_) | FieldsShape::Array { .. } => i,
FieldsShape::Arbitrary { ref memory_index, .. } => {
let r = memory_index[i];
assert_eq!(r as usize as u32, r);
r as usize
}
}
}
#[inline]
pub fn index_by_increasing_offset<'a>(&'a self) -> impl Iterator<Item = usize> + 'a {
let mut inverse_small = [0u8; 64];
let mut inverse_big = vec![];
let use_small = self.count() <= inverse_small.len();
if let FieldsShape::Arbitrary { ref memory_index, .. } = *self {
if use_small {
for i in 0..self.count() {
inverse_small[memory_index[i] as usize] = i as u8;
}
} else {
inverse_big = vec![0; self.count()];
for i in 0..self.count() {
inverse_big[memory_index[i] as usize] = i as u32;
}
}
}
(0..self.count()).map(move |i| match *self {
FieldsShape::Primitive | FieldsShape::Union(_) | FieldsShape::Array { .. } => i,
FieldsShape::Arbitrary { .. } => {
if use_small {
inverse_small[i] as usize
} else {
inverse_big[i] as usize
}
}
})
}
}
#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
pub struct AddressSpace(pub u32);
impl AddressSpace {
pub const DATA: Self = AddressSpace(0);
}
#[derive(Clone, PartialEq, Eq, Hash, Debug, HashStable_Generic)]
pub enum Abi {
Uninhabited,
Scalar(Scalar),
ScalarPair(Scalar, Scalar),
Vector {
element: Scalar,
count: u64,
},
Aggregate {
sized: bool,
},
}
impl Abi {
pub fn is_unsized(&self) -> bool {
match *self {
Abi::Uninhabited | Abi::Scalar(_) | Abi::ScalarPair(..) | Abi::Vector { .. } => false,
Abi::Aggregate { sized } => !sized,
}
}
pub fn is_signed(&self) -> bool {
match *self {
Abi::Scalar(ref scal) => match scal.value {
Primitive::Int(_, signed) => signed,
_ => false,
},
_ => panic!("`is_signed` on non-scalar ABI {:?}", self),
}
}
pub fn is_uninhabited(&self) -> bool {
match *self {
Abi::Uninhabited => true,
_ => false,
}
}
pub fn is_scalar(&self) -> bool {
match *self {
Abi::Scalar(_) => true,
_ => false,
}
}
}
rustc_index::newtype_index! {
pub struct VariantIdx {
derive [HashStable_Generic]
}
}
#[derive(PartialEq, Eq, Hash, Debug, HashStable_Generic)]
pub enum Variants {
Single { index: VariantIdx },
Multiple {
tag: Scalar,
tag_encoding: TagEncoding,
tag_field: usize,
variants: IndexVec<VariantIdx, Layout>,
},
}
#[derive(PartialEq, Eq, Hash, Debug, HashStable_Generic)]
pub enum TagEncoding {
Direct,
Niche {
dataful_variant: VariantIdx,
niche_variants: RangeInclusive<VariantIdx>,
niche_start: u128,
},
}
#[derive(Clone, PartialEq, Eq, Hash, Debug, HashStable_Generic)]
pub struct Niche {
pub offset: Size,
pub scalar: Scalar,
}
impl Niche {
pub fn from_scalar<C: HasDataLayout>(cx: &C, offset: Size, scalar: Scalar) -> Option<Self> {
let niche = Niche { offset, scalar };
if niche.available(cx) > 0 { Some(niche) } else { None }
}
pub fn available<C: HasDataLayout>(&self, cx: &C) -> u128 {
let Scalar { value, valid_range: ref v } = self.scalar;
let bits = value.size(cx).bits();
assert!(bits <= 128);
let max_value = !0u128 >> (128 - bits);
let niche = v.end().wrapping_add(1)..*v.start();
niche.end.wrapping_sub(niche.start) & max_value
}
pub fn reserve<C: HasDataLayout>(&self, cx: &C, count: u128) -> Option<(u128, Scalar)> {
assert!(count > 0);
let Scalar { value, valid_range: ref v } = self.scalar;
let bits = value.size(cx).bits();
assert!(bits <= 128);
let max_value = !0u128 >> (128 - bits);
if count > max_value {
return None;
}
let start = v.end().wrapping_add(1) & max_value;
let end = v.end().wrapping_add(count) & max_value;
let valid_range_contains = |x| {
if v.start() <= v.end() {
*v.start() <= x && x <= *v.end()
} else {
*v.start() <= x || x <= *v.end()
}
};
if valid_range_contains(end) {
return None;
}
Some((start, Scalar { value, valid_range: *v.start()..=end }))
}
}
#[derive(PartialEq, Eq, Hash, Debug, HashStable_Generic)]
pub struct Layout {
pub fields: FieldsShape,
pub variants: Variants,
pub abi: Abi,
pub largest_niche: Option<Niche>,
pub align: AbiAndPrefAlign,
pub size: Size,
}
impl Layout {
pub fn scalar<C: HasDataLayout>(cx: &C, scalar: Scalar) -> Self {
let largest_niche = Niche::from_scalar(cx, Size::ZERO, scalar.clone());
let size = scalar.value.size(cx);
let align = scalar.value.align(cx);
Layout {
variants: Variants::Single { index: VariantIdx::new(0) },
fields: FieldsShape::Primitive,
abi: Abi::Scalar(scalar),
largest_niche,
size,
align,
}
}
}
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
pub struct TyAndLayout<'a, Ty> {
pub ty: Ty,
pub layout: &'a Layout,
}
impl<'a, Ty> Deref for TyAndLayout<'a, Ty> {
type Target = &'a Layout;
fn deref(&self) -> &&'a Layout {
&self.layout
}
}
pub trait LayoutOf {
type Ty;
type TyAndLayout;
fn layout_of(&self, ty: Self::Ty) -> Self::TyAndLayout;
fn spanned_layout_of(&self, ty: Self::Ty, _span: Span) -> Self::TyAndLayout {
self.layout_of(ty)
}
}
pub trait MaybeResult<T> {
type Error;
fn from(x: Result<T, Self::Error>) -> Self;
fn to_result(self) -> Result<T, Self::Error>;
}
impl<T> MaybeResult<T> for T {
type Error = !;
fn from(Ok(x): Result<T, Self::Error>) -> Self {
x
}
fn to_result(self) -> Result<T, Self::Error> {
Ok(self)
}
}
impl<T, E> MaybeResult<T> for Result<T, E> {
type Error = E;
fn from(x: Result<T, Self::Error>) -> Self {
x
}
fn to_result(self) -> Result<T, Self::Error> {
self
}
}
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
pub enum PointerKind {
Shared,
Frozen,
UniqueBorrowed,
UniqueOwned,
}
#[derive(Copy, Clone, Debug)]
pub struct PointeeInfo {
pub size: Size,
pub align: Align,
pub safe: Option<PointerKind>,
pub address_space: AddressSpace,
}
pub trait TyAndLayoutMethods<'a, C: LayoutOf<Ty = Self>>: Sized {
fn for_variant(
this: TyAndLayout<'a, Self>,
cx: &C,
variant_index: VariantIdx,
) -> TyAndLayout<'a, Self>;
fn field(this: TyAndLayout<'a, Self>, cx: &C, i: usize) -> C::TyAndLayout;
fn pointee_info_at(this: TyAndLayout<'a, Self>, cx: &C, offset: Size) -> Option<PointeeInfo>;
}
impl<'a, Ty> TyAndLayout<'a, Ty> {
pub fn for_variant<C>(self, cx: &C, variant_index: VariantIdx) -> Self
where
Ty: TyAndLayoutMethods<'a, C>,
C: LayoutOf<Ty = Ty>,
{
Ty::for_variant(self, cx, variant_index)
}
pub fn field<C>(self, cx: &C, i: usize) -> C::TyAndLayout
where
Ty: TyAndLayoutMethods<'a, C>,
C: LayoutOf<Ty = Ty>,
{
Ty::field(self, cx, i)
}
pub fn pointee_info_at<C>(self, cx: &C, offset: Size) -> Option<PointeeInfo>
where
Ty: TyAndLayoutMethods<'a, C>,
C: LayoutOf<Ty = Ty>,
{
Ty::pointee_info_at(self, cx, offset)
}
}
impl<'a, Ty> TyAndLayout<'a, Ty> {
pub fn is_unsized(&self) -> bool {
self.abi.is_unsized()
}
pub fn is_zst(&self) -> bool {
match self.abi {
Abi::Scalar(_) | Abi::ScalarPair(..) | Abi::Vector { .. } => false,
Abi::Uninhabited => self.size.bytes() == 0,
Abi::Aggregate { sized } => sized && self.size.bytes() == 0,
}
}
pub fn might_permit_raw_init<C, E>(self, cx: &C, zero: bool) -> Result<bool, E>
where
Self: Copy,
Ty: TyAndLayoutMethods<'a, C>,
C: LayoutOf<Ty = Ty, TyAndLayout: MaybeResult<Self, Error = E>> + HasDataLayout,
{
let scalar_allows_raw_init = move |s: &Scalar| -> bool {
if zero {
let range = &s.valid_range;
range.contains(&0) || (*range.start() > *range.end())
} else {
let range = s.valid_range_exclusive(cx);
range.start == range.end
}
};
let valid = match &self.abi {
Abi::Uninhabited => false,
Abi::Scalar(s) => scalar_allows_raw_init(s),
Abi::ScalarPair(s1, s2) => scalar_allows_raw_init(s1) && scalar_allows_raw_init(s2),
Abi::Vector { element: s, count } => *count == 0 || scalar_allows_raw_init(s),
Abi::Aggregate { .. } => true,
};
if !valid {
trace!("might_permit_raw_init({:?}, zero={}): not valid", self.layout, zero);
return Ok(false);
}
Ok(true)
}
}