use ahash::RandomState;
use arrow::array::*;
use arrow::datatypes::*;
use arrow::row::Rows;
use arrow::{downcast_dictionary_array, downcast_primitive_array};
use arrow_buffer::i256;
use datafusion_common::{
cast::{
as_boolean_array, as_generic_binary_array, as_primitive_array, as_string_array,
},
internal_err, DataFusionError, Result,
};
use std::sync::Arc;
#[inline]
fn combine_hashes(l: u64, r: u64) -> u64 {
let hash = (17 * 37u64).wrapping_add(l);
hash.wrapping_mul(37).wrapping_add(r)
}
fn hash_null(random_state: &RandomState, hashes_buffer: &'_ mut [u64], mul_col: bool) {
if mul_col {
hashes_buffer.iter_mut().for_each(|hash| {
*hash = combine_hashes(random_state.hash_one(1), *hash);
})
} else {
hashes_buffer.iter_mut().for_each(|hash| {
*hash = random_state.hash_one(1);
})
}
}
pub(crate) trait HashValue {
fn hash_one(&self, state: &RandomState) -> u64;
}
impl<'a, T: HashValue + ?Sized> HashValue for &'a T {
fn hash_one(&self, state: &RandomState) -> u64 {
T::hash_one(self, state)
}
}
macro_rules! hash_value {
($($t:ty),+) => {
$(impl HashValue for $t {
fn hash_one(&self, state: &RandomState) -> u64 {
state.hash_one(self)
}
})+
};
}
hash_value!(i8, i16, i32, i64, i128, i256, u8, u16, u32, u64);
hash_value!(bool, str, [u8]);
macro_rules! hash_float_value {
($(($t:ty, $i:ty)),+) => {
$(impl HashValue for $t {
fn hash_one(&self, state: &RandomState) -> u64 {
state.hash_one(<$i>::from_ne_bytes(self.to_ne_bytes()))
}
})+
};
}
hash_float_value!((half::f16, u16), (f32, u32), (f64, u64));
fn hash_array_primitive<T>(
array: &PrimitiveArray<T>,
random_state: &RandomState,
hashes_buffer: &mut [u64],
rehash: bool,
) where
T: ArrowPrimitiveType,
<T as arrow_array::ArrowPrimitiveType>::Native: HashValue,
{
assert_eq!(
hashes_buffer.len(),
array.len(),
"hashes_buffer and array should be of equal length"
);
if array.null_count() == 0 {
if rehash {
for (hash, &value) in hashes_buffer.iter_mut().zip(array.values().iter()) {
*hash = combine_hashes(value.hash_one(random_state), *hash);
}
} else {
for (hash, &value) in hashes_buffer.iter_mut().zip(array.values().iter()) {
*hash = value.hash_one(random_state);
}
}
} else if rehash {
for (i, hash) in hashes_buffer.iter_mut().enumerate() {
if !array.is_null(i) {
let value = unsafe { array.value_unchecked(i) };
*hash = combine_hashes(value.hash_one(random_state), *hash);
}
}
} else {
for (i, hash) in hashes_buffer.iter_mut().enumerate() {
if !array.is_null(i) {
let value = unsafe { array.value_unchecked(i) };
*hash = value.hash_one(random_state);
}
}
}
}
fn hash_array<T>(
array: T,
random_state: &RandomState,
hashes_buffer: &mut [u64],
rehash: bool,
) where
T: ArrayAccessor,
T::Item: HashValue,
{
assert_eq!(
hashes_buffer.len(),
array.len(),
"hashes_buffer and array should be of equal length"
);
if array.null_count() == 0 {
if rehash {
for (i, hash) in hashes_buffer.iter_mut().enumerate() {
let value = unsafe { array.value_unchecked(i) };
*hash = combine_hashes(value.hash_one(random_state), *hash);
}
} else {
for (i, hash) in hashes_buffer.iter_mut().enumerate() {
let value = unsafe { array.value_unchecked(i) };
*hash = value.hash_one(random_state);
}
}
} else if rehash {
for (i, hash) in hashes_buffer.iter_mut().enumerate() {
if !array.is_null(i) {
let value = unsafe { array.value_unchecked(i) };
*hash = combine_hashes(value.hash_one(random_state), *hash);
}
}
} else {
for (i, hash) in hashes_buffer.iter_mut().enumerate() {
if !array.is_null(i) {
let value = unsafe { array.value_unchecked(i) };
*hash = value.hash_one(random_state);
}
}
}
}
fn hash_dictionary<K: ArrowDictionaryKeyType>(
array: &DictionaryArray<K>,
random_state: &RandomState,
hashes_buffer: &mut [u64],
multi_col: bool,
) -> Result<()> {
let values = Arc::clone(array.values());
let mut dict_hashes = vec![0; values.len()];
create_hashes(&[values], random_state, &mut dict_hashes)?;
if multi_col {
for (hash, key) in hashes_buffer.iter_mut().zip(array.keys().iter()) {
if let Some(key) = key {
*hash = combine_hashes(dict_hashes[key.as_usize()], *hash)
} }
} else {
for (hash, key) in hashes_buffer.iter_mut().zip(array.keys().iter()) {
if let Some(key) = key {
*hash = dict_hashes[key.as_usize()]
} }
}
Ok(())
}
#[cfg(feature = "force_hash_collisions")]
pub fn create_hashes<'a>(
_arrays: &[ArrayRef],
_random_state: &RandomState,
hashes_buffer: &'a mut Vec<u64>,
) -> Result<&'a mut Vec<u64>> {
for hash in hashes_buffer.iter_mut() {
*hash = 0
}
Ok(hashes_buffer)
}
#[cfg(feature = "force_hash_collisions")]
pub fn create_row_hashes<'a>(
_rows: &[Vec<u8>],
_random_state: &RandomState,
hashes_buffer: &'a mut Vec<u64>,
) -> Result<&'a mut Vec<u64>> {
for hash in hashes_buffer.iter_mut() {
*hash = 0
}
Ok(hashes_buffer)
}
#[cfg(not(feature = "force_hash_collisions"))]
pub fn create_row_hashes<'a>(
rows: &[Vec<u8>],
random_state: &RandomState,
hashes_buffer: &'a mut Vec<u64>,
) -> Result<&'a mut Vec<u64>> {
for hash in hashes_buffer.iter_mut() {
*hash = 0
}
for (i, hash) in hashes_buffer.iter_mut().enumerate() {
*hash = random_state.hash_one(&rows[i]);
}
Ok(hashes_buffer)
}
#[cfg(not(feature = "force_hash_collisions"))]
pub fn create_hashes<'a>(
arrays: &[ArrayRef],
random_state: &RandomState,
hashes_buffer: &'a mut Vec<u64>,
) -> Result<&'a mut Vec<u64>> {
for (i, col) in arrays.iter().enumerate() {
let array = col.as_ref();
let rehash = i >= 1;
downcast_primitive_array! {
array => hash_array_primitive(array, random_state, hashes_buffer, rehash),
DataType::Null => hash_null(random_state, hashes_buffer, rehash),
DataType::Boolean => hash_array(as_boolean_array(array)?, random_state, hashes_buffer, rehash),
DataType::Utf8 => hash_array(as_string_array(array)?, random_state, hashes_buffer, rehash),
DataType::LargeUtf8 => hash_array(as_largestring_array(array), random_state, hashes_buffer, rehash),
DataType::Binary => hash_array(as_generic_binary_array::<i32>(array)?, random_state, hashes_buffer, rehash),
DataType::LargeBinary => hash_array(as_generic_binary_array::<i64>(array)?, random_state, hashes_buffer, rehash),
DataType::FixedSizeBinary(_) => {
let array: &FixedSizeBinaryArray = array.as_any().downcast_ref().unwrap();
hash_array(array, random_state, hashes_buffer, rehash)
}
DataType::Decimal128(_, _) => {
let array = as_primitive_array::<Decimal128Type>(array)?;
hash_array_primitive(array, random_state, hashes_buffer, rehash)
}
DataType::Decimal256(_, _) => {
let array = as_primitive_array::<Decimal256Type>(array)?;
hash_array_primitive(array, random_state, hashes_buffer, rehash)
}
DataType::Dictionary(_, _) => downcast_dictionary_array! {
array => hash_dictionary(array, random_state, hashes_buffer, rehash)?,
_ => unreachable!()
}
_ => {
return internal_err!(
"Unsupported data type in hasher: {}",
col.data_type()
);
}
}
}
Ok(hashes_buffer)
}
#[cfg(feature = "force_hash_collisions")]
pub fn create_row_hashes_v2<'a>(
_rows: &Rows,
_random_state: &RandomState,
hashes_buffer: &'a mut Vec<u64>,
) -> Result<&'a mut Vec<u64>> {
for hash in hashes_buffer.iter_mut() {
*hash = 0
}
Ok(hashes_buffer)
}
#[cfg(not(feature = "force_hash_collisions"))]
pub fn create_row_hashes_v2<'a>(
rows: &Rows,
random_state: &RandomState,
hashes_buffer: &'a mut Vec<u64>,
) -> Result<&'a mut Vec<u64>> {
for hash in hashes_buffer.iter_mut() {
*hash = 0
}
for (i, hash) in hashes_buffer.iter_mut().enumerate() {
*hash = random_state.hash_one(rows.row(i));
}
Ok(hashes_buffer)
}
#[cfg(test)]
mod tests {
use arrow::{array::*, datatypes::*};
use std::sync::Arc;
use super::*;
#[test]
fn create_hashes_for_decimal_array() -> Result<()> {
let array = vec![1, 2, 3, 4]
.into_iter()
.map(Some)
.collect::<Decimal128Array>()
.with_precision_and_scale(20, 3)
.unwrap();
let array_ref = Arc::new(array);
let random_state = RandomState::with_seeds(0, 0, 0, 0);
let hashes_buff = &mut vec![0; array_ref.len()];
let hashes = create_hashes(&[array_ref], &random_state, hashes_buff)?;
assert_eq!(hashes.len(), 4);
Ok(())
}
#[test]
fn create_hashes_for_float_arrays() -> Result<()> {
let f32_arr = Arc::new(Float32Array::from(vec![0.12, 0.5, 1f32, 444.7]));
let f64_arr = Arc::new(Float64Array::from(vec![0.12, 0.5, 1f64, 444.7]));
let random_state = RandomState::with_seeds(0, 0, 0, 0);
let hashes_buff = &mut vec![0; f32_arr.len()];
let hashes = create_hashes(&[f32_arr], &random_state, hashes_buff)?;
assert_eq!(hashes.len(), 4,);
let hashes = create_hashes(&[f64_arr], &random_state, hashes_buff)?;
assert_eq!(hashes.len(), 4,);
Ok(())
}
#[test]
fn create_hashes_binary() -> Result<()> {
let byte_array = Arc::new(BinaryArray::from_vec(vec![
&[4, 3, 2],
&[4, 3, 2],
&[1, 2, 3],
]));
let random_state = RandomState::with_seeds(0, 0, 0, 0);
let hashes_buff = &mut vec![0; byte_array.len()];
let hashes = create_hashes(&[byte_array], &random_state, hashes_buff)?;
assert_eq!(hashes.len(), 3,);
Ok(())
}
#[test]
fn create_hashes_fixed_size_binary() -> Result<()> {
let input_arg = vec![vec![1, 2], vec![5, 6], vec![5, 6]];
let fixed_size_binary_array =
Arc::new(FixedSizeBinaryArray::try_from_iter(input_arg.into_iter()).unwrap());
let random_state = RandomState::with_seeds(0, 0, 0, 0);
let hashes_buff = &mut vec![0; fixed_size_binary_array.len()];
let hashes =
create_hashes(&[fixed_size_binary_array], &random_state, hashes_buff)?;
assert_eq!(hashes.len(), 3,);
Ok(())
}
#[test]
#[cfg(not(feature = "force_hash_collisions"))]
fn create_hashes_for_dict_arrays() {
let strings = [Some("foo"), None, Some("bar"), Some("foo"), None];
let string_array = Arc::new(strings.iter().cloned().collect::<StringArray>());
let dict_array = Arc::new(
strings
.iter()
.cloned()
.collect::<DictionaryArray<Int8Type>>(),
);
let random_state = RandomState::with_seeds(0, 0, 0, 0);
let mut string_hashes = vec![0; strings.len()];
create_hashes(&[string_array], &random_state, &mut string_hashes).unwrap();
let mut dict_hashes = vec![0; strings.len()];
create_hashes(&[dict_array], &random_state, &mut dict_hashes).unwrap();
for (val, hash) in strings.iter().zip(string_hashes.iter()) {
match val {
Some(_) => assert_ne!(*hash, 0),
None => assert_eq!(*hash, 0),
}
}
assert_eq!(string_hashes, dict_hashes);
assert_eq!(strings[1], strings[4]);
assert_eq!(dict_hashes[1], dict_hashes[4]);
assert_eq!(strings[0], strings[3]);
assert_eq!(dict_hashes[0], dict_hashes[3]);
assert_ne!(strings[0], strings[2]);
assert_ne!(dict_hashes[0], dict_hashes[2]);
}
#[test]
#[cfg(not(feature = "force_hash_collisions"))]
fn create_multi_column_hash_for_dict_arrays() {
let strings1 = [Some("foo"), None, Some("bar")];
let strings2 = [Some("blarg"), Some("blah"), None];
let string_array = Arc::new(strings1.iter().cloned().collect::<StringArray>());
let dict_array = Arc::new(
strings2
.iter()
.cloned()
.collect::<DictionaryArray<Int32Type>>(),
);
let random_state = RandomState::with_seeds(0, 0, 0, 0);
let mut one_col_hashes = vec![0; strings1.len()];
create_hashes(&[dict_array.clone()], &random_state, &mut one_col_hashes).unwrap();
let mut two_col_hashes = vec![0; strings1.len()];
create_hashes(
&[dict_array, string_array],
&random_state,
&mut two_col_hashes,
)
.unwrap();
assert_eq!(one_col_hashes.len(), 3);
assert_eq!(two_col_hashes.len(), 3);
assert_ne!(one_col_hashes, two_col_hashes);
}
}