pub trait Shr<Rhs = Self> {
type Output;
fn shr(self, rhs: Rhs) -> Self::Output;
}
Expand description
The right shift operator >>
. Note that because this trait is implemented
for all integer types with multiple right-hand-side types, Rust’s type
checker has special handling for _ >> _
, setting the result type for
integer operations to the type of the left-hand-side operand. This means
that though a >> b
and a.shr(b)
are one and the same from an evaluation
standpoint, they are different when it comes to type inference.
An implementation of Shr
that lifts the >>
operation on integers to a
wrapper around usize
.
use std::ops::Shr;
#[derive(PartialEq, Debug)]
struct Scalar(usize);
impl Shr<Scalar> for Scalar {
type Output = Self;
fn shr(self, Self(rhs): Self) -> Self::Output {
let Self(lhs) = self;
Self(lhs >> rhs)
}
}
assert_eq!(Scalar(16) >> Scalar(2), Scalar(4));
An implementation of Shr
that spins a vector rightward by a given amount.
use std::ops::Shr;
#[derive(PartialEq, Debug)]
struct SpinVector<T: Clone> {
vec: Vec<T>,
}
impl<T: Clone> Shr<usize> for SpinVector<T> {
type Output = Self;
fn shr(self, rhs: usize) -> Self::Output {
let (a, b) = self.vec.split_at(self.vec.len() - rhs);
let mut spun_vector = vec![];
spun_vector.extend_from_slice(b);
spun_vector.extend_from_slice(a);
Self { vec: spun_vector }
}
}
assert_eq!(SpinVector { vec: vec![0, 1, 2, 3, 4] } >> 2,
SpinVector { vec: vec![3, 4, 0, 1, 2] });
The resulting type after applying the >>
operator.
Performs the >>
operation.
assert_eq!(5u8 >> 1, 2);
assert_eq!(2u8 >> 1, 1);
impl Shr<LittleEndian<i16>> for LittleEndian<i16>
impl Shr<LittleEndian<i32>> for LittleEndian<i32>
impl Shr<LittleEndian<i64>> for LittleEndian<i64>
impl Shr<LittleEndian<u16>> for LittleEndian<u16>
impl Shr<LittleEndian<u32>> for LittleEndian<u32>
impl Shr<LittleEndian<u64>> for LittleEndian<u64>
impl Shr<NativeEndian<i16>> for NativeEndian<i16>
impl Shr<NativeEndian<i32>> for NativeEndian<i32>
impl Shr<NativeEndian<i64>> for NativeEndian<i64>
impl Shr<NativeEndian<u16>> for NativeEndian<u16>
impl Shr<NativeEndian<u32>> for NativeEndian<u32>
impl Shr<NativeEndian<u64>> for NativeEndian<u64>
impl<'_> Shr<&'_ BigEndian<i16>> for BigEndian<i16>
impl<'_> Shr<&'_ BigEndian<i32>> for BigEndian<i32>
impl<'_> Shr<&'_ BigEndian<i64>> for BigEndian<i64>
impl<'_> Shr<&'_ BigEndian<u16>> for BigEndian<u16>
impl<'_> Shr<&'_ BigEndian<u32>> for BigEndian<u32>
impl<'_> Shr<&'_ BigEndian<u64>> for BigEndian<u64>
impl<'_> Shr<&'_ LittleEndian<i16>> for LittleEndian<i16>
impl<'_> Shr<&'_ LittleEndian<i32>> for LittleEndian<i32>
impl<'_> Shr<&'_ LittleEndian<i64>> for LittleEndian<i64>
impl<'_> Shr<&'_ LittleEndian<i128>> for LittleEndian<i128>
impl<'_> Shr<&'_ LittleEndian<u16>> for LittleEndian<u16>
impl<'_> Shr<&'_ LittleEndian<u32>> for LittleEndian<u32>
impl<'_> Shr<&'_ LittleEndian<u64>> for LittleEndian<u64>
impl<'_> Shr<&'_ LittleEndian<u128>> for LittleEndian<u128>
impl<'_> Shr<&'_ NativeEndian<i16>> for NativeEndian<i16>
impl<'_> Shr<&'_ NativeEndian<i32>> for NativeEndian<i32>
impl<'_> Shr<&'_ NativeEndian<i64>> for NativeEndian<i64>
impl<'_> Shr<&'_ NativeEndian<i128>> for NativeEndian<i128>
impl<'_> Shr<&'_ NativeEndian<u16>> for NativeEndian<u16>
impl<'_> Shr<&'_ NativeEndian<u32>> for NativeEndian<u32>
impl<'_> Shr<&'_ NativeEndian<u64>> for NativeEndian<u64>
impl<'_> Shr<&'_ NativeEndian<u128>> for NativeEndian<u128>
impl<'_> Shr<BigEndian<i16>> for &'_ BigEndian<i16>
impl<'_> Shr<BigEndian<i32>> for &'_ BigEndian<i32>
impl<'_> Shr<BigEndian<i64>> for &'_ BigEndian<i64>
impl<'_> Shr<BigEndian<u16>> for &'_ BigEndian<u16>
impl<'_> Shr<BigEndian<u32>> for &'_ BigEndian<u32>
impl<'_> Shr<BigEndian<u64>> for &'_ BigEndian<u64>
impl<'_> Shr<LittleEndian<i16>> for &'_ LittleEndian<i16>
impl<'_> Shr<LittleEndian<i32>> for &'_ LittleEndian<i32>
impl<'_> Shr<LittleEndian<i64>> for &'_ LittleEndian<i64>
impl<'_> Shr<LittleEndian<i128>> for &'_ LittleEndian<i128>
impl<'_> Shr<LittleEndian<u16>> for &'_ LittleEndian<u16>
impl<'_> Shr<LittleEndian<u32>> for &'_ LittleEndian<u32>
impl<'_> Shr<LittleEndian<u64>> for &'_ LittleEndian<u64>
impl<'_> Shr<LittleEndian<u128>> for &'_ LittleEndian<u128>
impl<'_> Shr<NativeEndian<i16>> for &'_ NativeEndian<i16>
impl<'_> Shr<NativeEndian<i32>> for &'_ NativeEndian<i32>
impl<'_> Shr<NativeEndian<i64>> for &'_ NativeEndian<i64>
impl<'_> Shr<NativeEndian<i128>> for &'_ NativeEndian<i128>
impl<'_> Shr<NativeEndian<u16>> for &'_ NativeEndian<u16>
impl<'_> Shr<NativeEndian<u32>> for &'_ NativeEndian<u32>
impl<'_> Shr<NativeEndian<u64>> for &'_ NativeEndian<u64>
impl<'_> Shr<NativeEndian<u128>> for &'_ NativeEndian<u128>
impl<'_, '_> Shr<&'_ i16> for &'_ BigEndian<i16>
impl<'_, '_> Shr<&'_ i16> for &'_ LittleEndian<i16>
impl<'_, '_> Shr<&'_ i16> for &'_ NativeEndian<i16>
impl<'_, '_> Shr<&'_ i32> for &'_ BigEndian<i32>
impl<'_, '_> Shr<&'_ i32> for &'_ LittleEndian<i32>
impl<'_, '_> Shr<&'_ i32> for &'_ NativeEndian<i32>
impl<'_, '_> Shr<&'_ i64> for &'_ BigEndian<i64>
impl<'_, '_> Shr<&'_ i64> for &'_ LittleEndian<i64>
impl<'_, '_> Shr<&'_ i64> for &'_ NativeEndian<i64>
impl<'_, '_> Shr<&'_ u16> for &'_ BigEndian<u16>
impl<'_, '_> Shr<&'_ u16> for &'_ LittleEndian<u16>
impl<'_, '_> Shr<&'_ u16> for &'_ NativeEndian<u16>
impl<'_, '_> Shr<&'_ u32> for &'_ BigEndian<u32>
impl<'_, '_> Shr<&'_ u32> for &'_ LittleEndian<u32>
impl<'_, '_> Shr<&'_ u32> for &'_ NativeEndian<u32>
impl<'_, '_> Shr<&'_ u64> for &'_ BigEndian<u64>
impl<'_, '_> Shr<&'_ u64> for &'_ LittleEndian<u64>
impl<'_, '_> Shr<&'_ u64> for &'_ NativeEndian<u64>
impl<'_, '_> Shr<&'_ BigEndian<i16>> for &'_ i16
impl<'_, '_> Shr<&'_ BigEndian<i16>> for &'_ BigEndian<i16>
impl<'_, '_> Shr<&'_ BigEndian<i32>> for &'_ i32
impl<'_, '_> Shr<&'_ BigEndian<i32>> for &'_ BigEndian<i32>
impl<'_, '_> Shr<&'_ BigEndian<i64>> for &'_ i64
impl<'_, '_> Shr<&'_ BigEndian<i64>> for &'_ BigEndian<i64>
impl<'_, '_> Shr<&'_ BigEndian<i128>> for &'_ BigEndian<i128>
impl<'_, '_> Shr<&'_ BigEndian<u16>> for &'_ u16
impl<'_, '_> Shr<&'_ BigEndian<u16>> for &'_ BigEndian<u16>
impl<'_, '_> Shr<&'_ BigEndian<u32>> for &'_ u32
impl<'_, '_> Shr<&'_ BigEndian<u32>> for &'_ BigEndian<u32>
impl<'_, '_> Shr<&'_ BigEndian<u64>> for &'_ u64
impl<'_, '_> Shr<&'_ BigEndian<u64>> for &'_ BigEndian<u64>
impl<'_, '_> Shr<&'_ BigEndian<u128>> for &'_ BigEndian<u128>
impl<'_, '_> Shr<&'_ LittleEndian<i16>> for &'_ i16
impl<'_, '_> Shr<&'_ LittleEndian<i16>> for &'_ LittleEndian<i16>
impl<'_, '_> Shr<&'_ LittleEndian<i32>> for &'_ i32
impl<'_, '_> Shr<&'_ LittleEndian<i32>> for &'_ LittleEndian<i32>
impl<'_, '_> Shr<&'_ LittleEndian<i64>> for &'_ i64
impl<'_, '_> Shr<&'_ LittleEndian<i64>> for &'_ LittleEndian<i64>
impl<'_, '_> Shr<&'_ LittleEndian<i128>> for &'_ LittleEndian<i128>
impl<'_, '_> Shr<&'_ LittleEndian<u16>> for &'_ u16
impl<'_, '_> Shr<&'_ LittleEndian<u16>> for &'_ LittleEndian<u16>
impl<'_, '_> Shr<&'_ LittleEndian<u32>> for &'_ u32
impl<'_, '_> Shr<&'_ LittleEndian<u32>> for &'_ LittleEndian<u32>
impl<'_, '_> Shr<&'_ LittleEndian<u64>> for &'_ u64
impl<'_, '_> Shr<&'_ LittleEndian<u64>> for &'_ LittleEndian<u64>
impl<'_, '_> Shr<&'_ LittleEndian<u128>> for &'_ LittleEndian<u128>
impl<'_, '_> Shr<&'_ NativeEndian<i16>> for &'_ i16
impl<'_, '_> Shr<&'_ NativeEndian<i16>> for &'_ NativeEndian<i16>
impl<'_, '_> Shr<&'_ NativeEndian<i32>> for &'_ i32
impl<'_, '_> Shr<&'_ NativeEndian<i32>> for &'_ NativeEndian<i32>
impl<'_, '_> Shr<&'_ NativeEndian<i64>> for &'_ i64
impl<'_, '_> Shr<&'_ NativeEndian<i64>> for &'_ NativeEndian<i64>
impl<'_, '_> Shr<&'_ NativeEndian<i128>> for &'_ NativeEndian<i128>
impl<'_, '_> Shr<&'_ NativeEndian<u16>> for &'_ u16
impl<'_, '_> Shr<&'_ NativeEndian<u16>> for &'_ NativeEndian<u16>
impl<'_, '_> Shr<&'_ NativeEndian<u32>> for &'_ u32
impl<'_, '_> Shr<&'_ NativeEndian<u32>> for &'_ NativeEndian<u32>
impl<'_, '_> Shr<&'_ NativeEndian<u64>> for &'_ u64
impl<'_, '_> Shr<&'_ NativeEndian<u64>> for &'_ NativeEndian<u64>
impl<'_, '_> Shr<&'_ NativeEndian<u128>> for &'_ NativeEndian<u128>