use crate::dictionary::{merge_dictionary_values, should_merge_dictionary_values};
use arrow_array::cast::AsArray;
use arrow_array::types::*;
use arrow_array::*;
use arrow_buffer::{ArrowNativeType, BooleanBufferBuilder, NullBuffer};
use arrow_data::transform::{Capacities, MutableArrayData};
use arrow_schema::{ArrowError, DataType, SchemaRef};
use std::sync::Arc;
fn binary_capacity<T: ByteArrayType>(arrays: &[&dyn Array]) -> Capacities {
let mut item_capacity = 0;
let mut bytes_capacity = 0;
for array in arrays {
let a = array.as_bytes::<T>();
let offsets = a.value_offsets();
bytes_capacity += offsets[offsets.len() - 1].as_usize() - offsets[0].as_usize();
item_capacity += a.len()
}
Capacities::Binary(item_capacity, Some(bytes_capacity))
}
fn fixed_size_list_capacity(arrays: &[&dyn Array], data_type: &DataType) -> Capacities {
if let DataType::FixedSizeList(f, _) = data_type {
let item_capacity = arrays.iter().map(|a| a.len()).sum();
let child_data_type = f.data_type();
match child_data_type {
DataType::Utf8
| DataType::LargeUtf8
| DataType::Binary
| DataType::LargeBinary
| DataType::FixedSizeList(_, _) => {
let values: Vec<&dyn arrow_array::Array> = arrays
.iter()
.map(|a| a.as_fixed_size_list().values().as_ref())
.collect();
Capacities::List(
item_capacity,
Some(Box::new(get_capacity(&values, child_data_type))),
)
}
_ => Capacities::Array(item_capacity),
}
} else {
unreachable!("illegal data type for fixed size list")
}
}
fn concat_dictionaries<K: ArrowDictionaryKeyType>(
arrays: &[&dyn Array],
) -> Result<ArrayRef, ArrowError> {
let mut output_len = 0;
let dictionaries: Vec<_> = arrays
.iter()
.map(|x| x.as_dictionary::<K>())
.inspect(|d| output_len += d.len())
.collect();
if !should_merge_dictionary_values::<K>(&dictionaries, output_len) {
return concat_fallback(arrays, Capacities::Array(output_len));
}
let merged = merge_dictionary_values(&dictionaries, None)?;
let mut key_values = Vec::with_capacity(output_len);
let mut has_nulls = false;
for (d, mapping) in dictionaries.iter().zip(merged.key_mappings) {
has_nulls |= d.null_count() != 0;
for key in d.keys().values() {
key_values.push(mapping.get(key.as_usize()).copied().unwrap_or_default())
}
}
let nulls = has_nulls.then(|| {
let mut nulls = BooleanBufferBuilder::new(output_len);
for d in &dictionaries {
match d.nulls() {
Some(n) => nulls.append_buffer(n.inner()),
None => nulls.append_n(d.len(), true),
}
}
NullBuffer::new(nulls.finish())
});
let keys = PrimitiveArray::<K>::new(key_values.into(), nulls);
assert_eq!(keys.len(), output_len);
let array = unsafe { DictionaryArray::new_unchecked(keys, merged.values) };
Ok(Arc::new(array))
}
macro_rules! dict_helper {
($t:ty, $arrays:expr) => {
return Ok(Arc::new(concat_dictionaries::<$t>($arrays)?) as _)
};
}
fn get_capacity(arrays: &[&dyn Array], data_type: &DataType) -> Capacities {
match data_type {
DataType::Utf8 => binary_capacity::<Utf8Type>(arrays),
DataType::LargeUtf8 => binary_capacity::<LargeUtf8Type>(arrays),
DataType::Binary => binary_capacity::<BinaryType>(arrays),
DataType::LargeBinary => binary_capacity::<LargeBinaryType>(arrays),
DataType::FixedSizeList(_, _) => fixed_size_list_capacity(arrays, data_type),
_ => Capacities::Array(arrays.iter().map(|a| a.len()).sum()),
}
}
pub fn concat(arrays: &[&dyn Array]) -> Result<ArrayRef, ArrowError> {
if arrays.is_empty() {
return Err(ArrowError::ComputeError(
"concat requires input of at least one array".to_string(),
));
} else if arrays.len() == 1 {
let array = arrays[0];
return Ok(array.slice(0, array.len()));
}
let d = arrays[0].data_type();
if arrays.iter().skip(1).any(|array| array.data_type() != d) {
return Err(ArrowError::InvalidArgumentError(
"It is not possible to concatenate arrays of different data types.".to_string(),
));
}
if let DataType::Dictionary(k, _) = d {
downcast_integer! {
k.as_ref() => (dict_helper, arrays),
_ => unreachable!("illegal dictionary key type {k}")
};
} else {
let capacity = get_capacity(arrays, d);
concat_fallback(arrays, capacity)
}
}
fn concat_fallback(arrays: &[&dyn Array], capacity: Capacities) -> Result<ArrayRef, ArrowError> {
let array_data: Vec<_> = arrays.iter().map(|a| a.to_data()).collect::<Vec<_>>();
let array_data = array_data.iter().collect();
let mut mutable = MutableArrayData::with_capacities(array_data, false, capacity);
for (i, a) in arrays.iter().enumerate() {
mutable.extend(i, 0, a.len())
}
Ok(make_array(mutable.freeze()))
}
pub fn concat_batches<'a>(
schema: &SchemaRef,
input_batches: impl IntoIterator<Item = &'a RecordBatch>,
) -> Result<RecordBatch, ArrowError> {
if schema.fields().is_empty() {
let num_rows: usize = input_batches.into_iter().map(RecordBatch::num_rows).sum();
let mut options = RecordBatchOptions::default();
options.row_count = Some(num_rows);
return RecordBatch::try_new_with_options(schema.clone(), vec![], &options);
}
let batches: Vec<&RecordBatch> = input_batches.into_iter().collect();
if batches.is_empty() {
return Ok(RecordBatch::new_empty(schema.clone()));
}
let field_num = schema.fields().len();
let mut arrays = Vec::with_capacity(field_num);
for i in 0..field_num {
let array = concat(
&batches
.iter()
.map(|batch| batch.column(i).as_ref())
.collect::<Vec<_>>(),
)?;
arrays.push(array);
}
RecordBatch::try_new(schema.clone(), arrays)
}
#[cfg(test)]
mod tests {
use super::*;
use arrow_array::builder::StringDictionaryBuilder;
use arrow_schema::{Field, Schema};
#[test]
fn test_concat_empty_vec() {
let re = concat(&[]);
assert!(re.is_err());
}
#[test]
fn test_concat_batches_no_columns() {
let schema = Arc::new(Schema::empty());
let mut options = RecordBatchOptions::default();
options.row_count = Some(100);
let batch = RecordBatch::try_new_with_options(schema.clone(), vec![], &options).unwrap();
let re = concat_batches(&schema, &[batch.clone(), batch]).unwrap();
assert_eq!(re.num_rows(), 200);
}
#[test]
fn test_concat_one_element_vec() {
let arr = Arc::new(PrimitiveArray::<Int64Type>::from(vec![
Some(-1),
Some(2),
None,
])) as ArrayRef;
let result = concat(&[arr.as_ref()]).unwrap();
assert_eq!(
&arr, &result,
"concatenating single element array gives back the same result"
);
}
#[test]
fn test_concat_incompatible_datatypes() {
let re = concat(&[
&PrimitiveArray::<Int64Type>::from(vec![Some(-1), Some(2), None]),
&StringArray::from(vec![Some("hello"), Some("bar"), Some("world")]),
]);
assert!(re.is_err());
}
#[test]
fn test_concat_string_arrays() {
let arr = concat(&[
&StringArray::from(vec!["hello", "world"]),
&StringArray::from(vec!["2", "3", "4"]),
&StringArray::from(vec![Some("foo"), Some("bar"), None, Some("baz")]),
])
.unwrap();
let expected_output = Arc::new(StringArray::from(vec![
Some("hello"),
Some("world"),
Some("2"),
Some("3"),
Some("4"),
Some("foo"),
Some("bar"),
None,
Some("baz"),
])) as ArrayRef;
assert_eq!(&arr, &expected_output);
}
#[test]
fn test_concat_primitive_arrays() {
let arr = concat(&[
&PrimitiveArray::<Int64Type>::from(vec![Some(-1), Some(-1), Some(2), None, None]),
&PrimitiveArray::<Int64Type>::from(vec![Some(101), Some(102), Some(103), None]),
&PrimitiveArray::<Int64Type>::from(vec![Some(256), Some(512), Some(1024)]),
])
.unwrap();
let expected_output = Arc::new(PrimitiveArray::<Int64Type>::from(vec![
Some(-1),
Some(-1),
Some(2),
None,
None,
Some(101),
Some(102),
Some(103),
None,
Some(256),
Some(512),
Some(1024),
])) as ArrayRef;
assert_eq!(&arr, &expected_output);
}
#[test]
fn test_concat_primitive_array_slices() {
let input_1 =
PrimitiveArray::<Int64Type>::from(vec![Some(-1), Some(-1), Some(2), None, None])
.slice(1, 3);
let input_2 =
PrimitiveArray::<Int64Type>::from(vec![Some(101), Some(102), Some(103), None])
.slice(1, 3);
let arr = concat(&[&input_1, &input_2]).unwrap();
let expected_output = Arc::new(PrimitiveArray::<Int64Type>::from(vec![
Some(-1),
Some(2),
None,
Some(102),
Some(103),
None,
])) as ArrayRef;
assert_eq!(&arr, &expected_output);
}
#[test]
fn test_concat_boolean_primitive_arrays() {
let arr = concat(&[
&BooleanArray::from(vec![
Some(true),
Some(true),
Some(false),
None,
None,
Some(false),
]),
&BooleanArray::from(vec![None, Some(false), Some(true), Some(false)]),
])
.unwrap();
let expected_output = Arc::new(BooleanArray::from(vec![
Some(true),
Some(true),
Some(false),
None,
None,
Some(false),
None,
Some(false),
Some(true),
Some(false),
])) as ArrayRef;
assert_eq!(&arr, &expected_output);
}
#[test]
fn test_concat_primitive_list_arrays() {
let list1 = vec![
Some(vec![Some(-1), Some(-1), Some(2), None, None]),
Some(vec![]),
None,
Some(vec![Some(10)]),
];
let list1_array = ListArray::from_iter_primitive::<Int64Type, _, _>(list1.clone());
let list2 = vec![
None,
Some(vec![Some(100), None, Some(101)]),
Some(vec![Some(102)]),
];
let list2_array = ListArray::from_iter_primitive::<Int64Type, _, _>(list2.clone());
let list3 = vec![Some(vec![Some(1000), Some(1001)])];
let list3_array = ListArray::from_iter_primitive::<Int64Type, _, _>(list3.clone());
let array_result = concat(&[&list1_array, &list2_array, &list3_array]).unwrap();
let expected = list1.into_iter().chain(list2).chain(list3);
let array_expected = ListArray::from_iter_primitive::<Int64Type, _, _>(expected);
assert_eq!(array_result.as_ref(), &array_expected as &dyn Array);
}
#[test]
fn test_concat_primitive_fixed_size_list_arrays() {
let list1 = vec![
Some(vec![Some(-1), None]),
None,
Some(vec![Some(10), Some(20)]),
];
let list1_array =
FixedSizeListArray::from_iter_primitive::<Int64Type, _, _>(list1.clone(), 2);
let list2 = vec![
None,
Some(vec![Some(100), None]),
Some(vec![Some(102), Some(103)]),
];
let list2_array =
FixedSizeListArray::from_iter_primitive::<Int64Type, _, _>(list2.clone(), 2);
let list3 = vec![Some(vec![Some(1000), Some(1001)])];
let list3_array =
FixedSizeListArray::from_iter_primitive::<Int64Type, _, _>(list3.clone(), 2);
let array_result = concat(&[&list1_array, &list2_array, &list3_array]).unwrap();
let expected = list1.into_iter().chain(list2).chain(list3);
let array_expected =
FixedSizeListArray::from_iter_primitive::<Int64Type, _, _>(expected, 2);
assert_eq!(array_result.as_ref(), &array_expected as &dyn Array);
}
#[test]
fn test_concat_struct_arrays() {
let field = Arc::new(Field::new("field", DataType::Int64, true));
let input_primitive_1: ArrayRef = Arc::new(PrimitiveArray::<Int64Type>::from(vec![
Some(-1),
Some(-1),
Some(2),
None,
None,
]));
let input_struct_1 = StructArray::from(vec![(field.clone(), input_primitive_1)]);
let input_primitive_2: ArrayRef = Arc::new(PrimitiveArray::<Int64Type>::from(vec![
Some(101),
Some(102),
Some(103),
None,
]));
let input_struct_2 = StructArray::from(vec![(field.clone(), input_primitive_2)]);
let input_primitive_3: ArrayRef = Arc::new(PrimitiveArray::<Int64Type>::from(vec![
Some(256),
Some(512),
Some(1024),
]));
let input_struct_3 = StructArray::from(vec![(field, input_primitive_3)]);
let arr = concat(&[&input_struct_1, &input_struct_2, &input_struct_3]).unwrap();
let expected_primitive_output = Arc::new(PrimitiveArray::<Int64Type>::from(vec![
Some(-1),
Some(-1),
Some(2),
None,
None,
Some(101),
Some(102),
Some(103),
None,
Some(256),
Some(512),
Some(1024),
])) as ArrayRef;
let actual_primitive = arr
.as_any()
.downcast_ref::<StructArray>()
.unwrap()
.column(0);
assert_eq!(actual_primitive, &expected_primitive_output);
}
#[test]
fn test_concat_struct_array_slices() {
let field = Arc::new(Field::new("field", DataType::Int64, true));
let input_primitive_1: ArrayRef = Arc::new(PrimitiveArray::<Int64Type>::from(vec![
Some(-1),
Some(-1),
Some(2),
None,
None,
]));
let input_struct_1 = StructArray::from(vec![(field.clone(), input_primitive_1)]);
let input_primitive_2: ArrayRef = Arc::new(PrimitiveArray::<Int64Type>::from(vec![
Some(101),
Some(102),
Some(103),
None,
]));
let input_struct_2 = StructArray::from(vec![(field, input_primitive_2)]);
let arr = concat(&[&input_struct_1.slice(1, 3), &input_struct_2.slice(1, 2)]).unwrap();
let expected_primitive_output = Arc::new(PrimitiveArray::<Int64Type>::from(vec![
Some(-1),
Some(2),
None,
Some(102),
Some(103),
])) as ArrayRef;
let actual_primitive = arr
.as_any()
.downcast_ref::<StructArray>()
.unwrap()
.column(0);
assert_eq!(actual_primitive, &expected_primitive_output);
}
#[test]
fn test_string_array_slices() {
let input_1 = StringArray::from(vec!["hello", "A", "B", "C"]);
let input_2 = StringArray::from(vec!["world", "D", "E", "Z"]);
let arr = concat(&[&input_1.slice(1, 3), &input_2.slice(1, 2)]).unwrap();
let expected_output = StringArray::from(vec!["A", "B", "C", "D", "E"]);
let actual_output = arr.as_any().downcast_ref::<StringArray>().unwrap();
assert_eq!(actual_output, &expected_output);
}
#[test]
fn test_string_array_with_null_slices() {
let input_1 = StringArray::from(vec![Some("hello"), None, Some("A"), Some("C")]);
let input_2 = StringArray::from(vec![None, Some("world"), Some("D"), None]);
let arr = concat(&[&input_1.slice(1, 3), &input_2.slice(1, 2)]).unwrap();
let expected_output =
StringArray::from(vec![None, Some("A"), Some("C"), Some("world"), Some("D")]);
let actual_output = arr.as_any().downcast_ref::<StringArray>().unwrap();
assert_eq!(actual_output, &expected_output);
}
fn collect_string_dictionary(array: &DictionaryArray<Int32Type>) -> Vec<Option<&str>> {
let concrete = array.downcast_dict::<StringArray>().unwrap();
concrete.into_iter().collect()
}
#[test]
fn test_string_dictionary_array() {
let input_1: DictionaryArray<Int32Type> = vec!["hello", "A", "B", "hello", "hello", "C"]
.into_iter()
.collect();
let input_2: DictionaryArray<Int32Type> = vec!["hello", "E", "E", "hello", "F", "E"]
.into_iter()
.collect();
let expected: Vec<_> = vec![
"hello", "A", "B", "hello", "hello", "C", "hello", "E", "E", "hello", "F", "E",
]
.into_iter()
.map(Some)
.collect();
let concat = concat(&[&input_1 as _, &input_2 as _]).unwrap();
let dictionary = concat.as_dictionary::<Int32Type>();
let actual = collect_string_dictionary(dictionary);
assert_eq!(actual, expected);
assert_eq!(
dictionary.values().len(),
input_1.values().len() + input_2.values().len(),
)
}
#[test]
fn test_string_dictionary_array_nulls() {
let input_1: DictionaryArray<Int32Type> = vec![Some("foo"), Some("bar"), None, Some("fiz")]
.into_iter()
.collect();
let input_2: DictionaryArray<Int32Type> = vec![None].into_iter().collect();
let expected = vec![Some("foo"), Some("bar"), None, Some("fiz"), None];
let concat = concat(&[&input_1 as _, &input_2 as _]).unwrap();
let dictionary = concat.as_dictionary::<Int32Type>();
let actual = collect_string_dictionary(dictionary);
assert_eq!(actual, expected);
assert_eq!(
dictionary.values().len(),
input_1.values().len() + input_2.values().len(),
)
}
#[test]
fn test_string_dictionary_merge() {
let mut builder = StringDictionaryBuilder::<Int32Type>::new();
for i in 0..20 {
builder.append(&i.to_string()).unwrap();
}
let input_1 = builder.finish();
let mut builder = StringDictionaryBuilder::<Int32Type>::new();
for i in 0..30 {
builder.append(&i.to_string()).unwrap();
}
let input_2 = builder.finish();
let expected: Vec<_> = (0..20).chain(0..30).map(|x| x.to_string()).collect();
let expected: Vec<_> = expected.iter().map(|x| Some(x.as_str())).collect();
let concat = concat(&[&input_1 as _, &input_2 as _]).unwrap();
let dictionary = concat.as_dictionary::<Int32Type>();
let actual = collect_string_dictionary(dictionary);
assert_eq!(actual, expected);
let values_len = dictionary.values().len();
assert!((30..40).contains(&values_len), "{values_len}")
}
#[test]
fn test_concat_string_sizes() {
let a: LargeStringArray = ((0..150).map(|_| Some("foo"))).collect();
let b: LargeStringArray = ((0..150).map(|_| Some("foo"))).collect();
let c = LargeStringArray::from(vec![Some("foo"), Some("bar"), None, Some("baz")]);
let arr = concat(&[&a, &b, &c]).unwrap();
assert_eq!(arr.to_data().buffers()[1].capacity(), 960);
}
#[test]
fn test_dictionary_concat_reuse() {
let array: DictionaryArray<Int8Type> = vec!["a", "a", "b", "c"].into_iter().collect();
let copy: DictionaryArray<Int8Type> = array.clone();
assert_eq!(
array.values(),
&(Arc::new(StringArray::from(vec!["a", "b", "c"])) as ArrayRef)
);
assert_eq!(array.keys(), &Int8Array::from(vec![0, 0, 1, 2]));
let combined = concat(&[© as _, &array as _]).unwrap();
let combined = combined.as_dictionary::<Int8Type>();
assert_eq!(
combined.values(),
&(Arc::new(StringArray::from(vec!["a", "b", "c"])) as ArrayRef),
"Actual: {combined:#?}"
);
assert_eq!(
combined.keys(),
&Int8Array::from(vec![0, 0, 1, 2, 0, 0, 1, 2])
);
assert!(array
.values()
.to_data()
.ptr_eq(&combined.values().to_data()));
assert!(copy.values().to_data().ptr_eq(&combined.values().to_data()));
let new: DictionaryArray<Int8Type> = vec!["d"].into_iter().collect();
let combined = concat(&[© as _, &array as _, &new as _]).unwrap();
let com = combined.as_dictionary::<Int8Type>();
assert!(!array.values().to_data().ptr_eq(&com.values().to_data()));
assert!(!copy.values().to_data().ptr_eq(&com.values().to_data()));
assert!(!new.values().to_data().ptr_eq(&com.values().to_data()));
}
#[test]
fn concat_record_batches() {
let schema = Arc::new(Schema::new(vec![
Field::new("a", DataType::Int32, false),
Field::new("b", DataType::Utf8, false),
]));
let batch1 = RecordBatch::try_new(
schema.clone(),
vec![
Arc::new(Int32Array::from(vec![1, 2])),
Arc::new(StringArray::from(vec!["a", "b"])),
],
)
.unwrap();
let batch2 = RecordBatch::try_new(
schema.clone(),
vec![
Arc::new(Int32Array::from(vec![3, 4])),
Arc::new(StringArray::from(vec!["c", "d"])),
],
)
.unwrap();
let new_batch = concat_batches(&schema, [&batch1, &batch2]).unwrap();
assert_eq!(new_batch.schema().as_ref(), schema.as_ref());
assert_eq!(2, new_batch.num_columns());
assert_eq!(4, new_batch.num_rows());
let new_batch_owned = concat_batches(&schema, &[batch1, batch2]).unwrap();
assert_eq!(new_batch_owned.schema().as_ref(), schema.as_ref());
assert_eq!(2, new_batch_owned.num_columns());
assert_eq!(4, new_batch_owned.num_rows());
}
#[test]
fn concat_empty_record_batch() {
let schema = Arc::new(Schema::new(vec![
Field::new("a", DataType::Int32, false),
Field::new("b", DataType::Utf8, false),
]));
let batch = concat_batches(&schema, []).unwrap();
assert_eq!(batch.schema().as_ref(), schema.as_ref());
assert_eq!(0, batch.num_rows());
}
#[test]
fn concat_record_batches_of_different_schemas_but_compatible_data() {
let schema1 = Arc::new(Schema::new(vec![Field::new("a", DataType::Int32, false)]));
let schema2 = Arc::new(Schema::new(vec![Field::new("c", DataType::Int32, false)]));
let batch1 = RecordBatch::try_new(
schema1.clone(),
vec![Arc::new(Int32Array::from(vec![1, 2]))],
)
.unwrap();
let batch2 =
RecordBatch::try_new(schema2, vec![Arc::new(Int32Array::from(vec![3, 4]))]).unwrap();
let batch = concat_batches(&schema1, [&batch1, &batch2]).unwrap();
assert_eq!(batch.schema().as_ref(), schema1.as_ref());
assert_eq!(4, batch.num_rows());
}
#[test]
fn concat_record_batches_of_different_schemas_incompatible_data() {
let schema1 = Arc::new(Schema::new(vec![Field::new("a", DataType::Int32, false)]));
let schema2 = Arc::new(Schema::new(vec![Field::new("a", DataType::Utf8, false)]));
let batch1 = RecordBatch::try_new(
schema1.clone(),
vec![Arc::new(Int32Array::from(vec![1, 2]))],
)
.unwrap();
let batch2 = RecordBatch::try_new(
schema2,
vec![Arc::new(StringArray::from(vec!["foo", "bar"]))],
)
.unwrap();
let error = concat_batches(&schema1, [&batch1, &batch2]).unwrap_err();
assert_eq!(error.to_string(), "Invalid argument error: It is not possible to concatenate arrays of different data types.");
}
#[test]
fn concat_capacity() {
let a = Int32Array::from_iter_values(0..100);
let b = Int32Array::from_iter_values(10..20);
let a = concat(&[&a, &b]).unwrap();
let data = a.to_data();
assert_eq!(data.buffers()[0].len(), 440);
assert_eq!(data.buffers()[0].capacity(), 448); let a = concat(&[&a.slice(10, 20), &b]).unwrap();
let data = a.to_data();
assert_eq!(data.buffers()[0].len(), 120);
assert_eq!(data.buffers()[0].capacity(), 128); let a = StringArray::from_iter_values(std::iter::repeat("foo").take(100));
let b = StringArray::from(vec!["bingo", "bongo", "lorem", ""]);
let a = concat(&[&a, &b]).unwrap();
let data = a.to_data();
assert_eq!(data.buffers()[0].len(), 420);
assert_eq!(data.buffers()[0].capacity(), 448); assert_eq!(data.buffers()[1].len(), 315);
assert_eq!(data.buffers()[1].capacity(), 320); let a = concat(&[&a.slice(10, 40), &b]).unwrap();
let data = a.to_data();
assert_eq!(data.buffers()[0].len(), 180);
assert_eq!(data.buffers()[0].capacity(), 192); assert_eq!(data.buffers()[1].len(), 135);
assert_eq!(data.buffers()[1].capacity(), 192); let a = LargeBinaryArray::from_iter_values(std::iter::repeat(b"foo").take(100));
let b = LargeBinaryArray::from_iter_values(std::iter::repeat(b"cupcakes").take(10));
let a = concat(&[&a, &b]).unwrap();
let data = a.to_data();
assert_eq!(data.buffers()[0].len(), 888);
assert_eq!(data.buffers()[0].capacity(), 896); assert_eq!(data.buffers()[1].len(), 380);
assert_eq!(data.buffers()[1].capacity(), 384); let a = concat(&[&a.slice(10, 40), &b]).unwrap();
let data = a.to_data();
assert_eq!(data.buffers()[0].len(), 408);
assert_eq!(data.buffers()[0].capacity(), 448); assert_eq!(data.buffers()[1].len(), 200);
assert_eq!(data.buffers()[1].capacity(), 256); }
#[test]
fn concat_sparse_nulls() {
let values = StringArray::from_iter_values((0..100).map(|x| x.to_string()));
let keys = Int32Array::from(vec![1; 10]);
let dict_a = DictionaryArray::new(keys, Arc::new(values));
let values = StringArray::new_null(0);
let keys = Int32Array::new_null(10);
let dict_b = DictionaryArray::new(keys, Arc::new(values));
let array = concat(&[&dict_a, &dict_b]).unwrap();
assert_eq!(array.null_count(), 10);
}
}