mod chunk_iterator;
mod chunks_exact_mut;
mod fmt;
mod iterator;
mod slice_iterator;
mod zip_validity;
pub(crate) use chunk_iterator::merge_reversed;
pub use chunk_iterator::{BitChunk, BitChunkIterExact, BitChunks, BitChunksExact};
pub use chunks_exact_mut::BitChunksExactMut;
pub use fmt::fmt;
pub use iterator::BitmapIter;
use polars_utils::slice::GetSaferUnchecked;
pub use slice_iterator::SlicesIterator;
pub use zip_validity::{ZipValidity, ZipValidityIter};
#[inline]
pub fn is_set(byte: u8, i: usize) -> bool {
debug_assert!(i < 8);
byte & (1 << i) != 0
}
#[inline]
pub fn set(byte: u8, i: usize, value: bool) -> u8 {
debug_assert!(i < 8);
let mask = !(1 << i);
let insert = (value as u8) << i;
(byte & mask) | insert
}
#[inline]
pub fn set_bit(bytes: &mut [u8], i: usize, value: bool) {
bytes[i / 8] = set(bytes[i / 8], i % 8, value);
}
#[inline]
pub unsafe fn set_bit_unchecked(bytes: &mut [u8], i: usize, value: bool) {
let byte = bytes.get_unchecked_mut(i / 8);
*byte = set(*byte, i % 8, value);
}
#[inline]
pub fn get_bit(bytes: &[u8], i: usize) -> bool {
let byte = bytes[i / 8];
let bit = (byte >> (i % 8)) & 1;
bit != 0
}
#[inline]
pub unsafe fn get_bit_unchecked(bytes: &[u8], i: usize) -> bool {
let byte = *bytes.get_unchecked_release(i / 8);
let bit = (byte >> (i % 8)) & 1;
bit != 0
}
#[inline]
pub fn bytes_for(bits: usize) -> usize {
bits.saturating_add(7) / 8
}
pub fn count_zeros(mut slice: &[u8], mut offset: usize, len: usize) -> usize {
if len == 0 {
return 0;
}
let first_byte_idx = offset / 8;
let last_byte_idx = (offset + len - 1) / 8;
slice = &slice[first_byte_idx..=last_byte_idx];
offset %= 8;
if slice.len() <= 8 {
let mut tmp = [0u8; 8];
tmp[..slice.len()].copy_from_slice(slice);
let word = u64::from_ne_bytes(tmp) >> offset;
let masked = word << (64 - len);
return len - masked.count_ones() as usize;
}
let mut len_uncounted = len;
let mut num_ones = 0;
if offset != 0 {
let partial_byte;
(partial_byte, slice) = slice.split_first().unwrap();
num_ones += (partial_byte >> offset).count_ones() as usize;
len_uncounted -= 8 - offset;
}
let final_partial_len = len_uncounted % 8;
if final_partial_len != 0 {
let partial_byte;
(partial_byte, slice) = slice.split_last().unwrap();
let masked = partial_byte << (8 - final_partial_len);
num_ones += masked.count_ones() as usize;
}
let (start, mid, end) = unsafe { slice.align_to::<u64>() };
let mut tmp = [0u8; 8];
tmp[..start.len()].copy_from_slice(start);
num_ones += u64::from_ne_bytes(tmp).count_ones() as usize;
tmp = [0u8; 8];
tmp[..end.len()].copy_from_slice(end);
num_ones += u64::from_ne_bytes(tmp).count_ones() as usize;
num_ones += mid
.iter()
.copied()
.map(|w| w.count_ones() as usize)
.sum::<usize>();
len - num_ones
}
#[inline]
pub fn align_bitslice_start_u8(
slice: &[u8],
offset: usize,
len: usize,
) -> (u8, usize, &[u8], usize) {
if len == 0 {
return (0, 0, &[], 0);
}
assert!(slice.len() * 8 >= offset + len);
let mut first_byte_idx = offset / 8;
let partial_offset = offset % 8;
let bits_in_partial_byte = (8 - partial_offset).min(len) % 8;
let mut partial_byte = unsafe { *slice.get_unchecked(first_byte_idx) };
partial_byte >>= partial_offset;
partial_byte &= (1 << bits_in_partial_byte) - 1;
first_byte_idx += (partial_offset > 0) as usize;
let rest_slice = unsafe { slice.get_unchecked(first_byte_idx..) };
(
partial_byte,
bits_in_partial_byte,
rest_slice,
len - bits_in_partial_byte,
)
}