use std::any::Any;
use std::collections::HashMap;
use std::pin::Pin;
use std::sync::Arc;
use std::task::{Context, Poll};
use super::expressions::{CastExpr, Column, Literal};
use super::metrics::{BaselineMetrics, ExecutionPlanMetricsSet, MetricsSet};
use super::{
DisplayAs, ExecutionPlanProperties, PlanProperties, RecordBatchStream,
SendableRecordBatchStream, Statistics,
};
use crate::execution_plan::CardinalityEffect;
use crate::joins::utils::{ColumnIndex, JoinFilter};
use crate::{ColumnStatistics, DisplayFormatType, ExecutionPlan, PhysicalExpr};
use arrow::datatypes::{Field, Schema, SchemaRef};
use arrow::record_batch::{RecordBatch, RecordBatchOptions};
use datafusion_common::stats::Precision;
use datafusion_common::tree_node::{
Transformed, TransformedResult, TreeNode, TreeNodeRecursion,
};
use datafusion_common::{internal_err, JoinSide, Result};
use datafusion_execution::TaskContext;
use datafusion_physical_expr::equivalence::ProjectionMapping;
use datafusion_physical_expr::utils::collect_columns;
use datafusion_physical_expr::PhysicalExprRef;
use futures::stream::{Stream, StreamExt};
use itertools::Itertools;
use log::trace;
#[derive(Debug, Clone)]
pub struct ProjectionExec {
pub(crate) expr: Vec<(Arc<dyn PhysicalExpr>, String)>,
schema: SchemaRef,
input: Arc<dyn ExecutionPlan>,
metrics: ExecutionPlanMetricsSet,
cache: PlanProperties,
}
impl ProjectionExec {
pub fn try_new(
expr: Vec<(Arc<dyn PhysicalExpr>, String)>,
input: Arc<dyn ExecutionPlan>,
) -> Result<Self> {
let input_schema = input.schema();
let fields: Result<Vec<Field>> = expr
.iter()
.map(|(e, name)| {
let mut field = Field::new(
name,
e.data_type(&input_schema)?,
e.nullable(&input_schema)?,
);
field.set_metadata(
get_field_metadata(e, &input_schema).unwrap_or_default(),
);
Ok(field)
})
.collect();
let schema = Arc::new(Schema::new_with_metadata(
fields?,
input_schema.metadata().clone(),
));
let projection_mapping = ProjectionMapping::try_new(&expr, &input_schema)?;
let cache =
Self::compute_properties(&input, &projection_mapping, Arc::clone(&schema))?;
Ok(Self {
expr,
schema,
input,
metrics: ExecutionPlanMetricsSet::new(),
cache,
})
}
pub fn expr(&self) -> &[(Arc<dyn PhysicalExpr>, String)] {
&self.expr
}
pub fn input(&self) -> &Arc<dyn ExecutionPlan> {
&self.input
}
fn compute_properties(
input: &Arc<dyn ExecutionPlan>,
projection_mapping: &ProjectionMapping,
schema: SchemaRef,
) -> Result<PlanProperties> {
let mut input_eq_properties = input.equivalence_properties().clone();
input_eq_properties.substitute_oeq_class(projection_mapping)?;
let eq_properties = input_eq_properties.project(projection_mapping, schema);
let input_partition = input.output_partitioning();
let output_partitioning =
input_partition.project(projection_mapping, &input_eq_properties);
Ok(PlanProperties::new(
eq_properties,
output_partitioning,
input.pipeline_behavior(),
input.boundedness(),
))
}
}
impl DisplayAs for ProjectionExec {
fn fmt_as(
&self,
t: DisplayFormatType,
f: &mut std::fmt::Formatter,
) -> std::fmt::Result {
match t {
DisplayFormatType::Default | DisplayFormatType::Verbose => {
let expr: Vec<String> = self
.expr
.iter()
.map(|(e, alias)| {
let e = e.to_string();
if &e != alias {
format!("{e} as {alias}")
} else {
e
}
})
.collect();
write!(f, "ProjectionExec: expr=[{}]", expr.join(", "))
}
}
}
}
impl ExecutionPlan for ProjectionExec {
fn name(&self) -> &'static str {
"ProjectionExec"
}
fn as_any(&self) -> &dyn Any {
self
}
fn properties(&self) -> &PlanProperties {
&self.cache
}
fn children(&self) -> Vec<&Arc<dyn ExecutionPlan>> {
vec![&self.input]
}
fn maintains_input_order(&self) -> Vec<bool> {
vec![true]
}
fn with_new_children(
self: Arc<Self>,
mut children: Vec<Arc<dyn ExecutionPlan>>,
) -> Result<Arc<dyn ExecutionPlan>> {
ProjectionExec::try_new(self.expr.clone(), children.swap_remove(0))
.map(|p| Arc::new(p) as _)
}
fn benefits_from_input_partitioning(&self) -> Vec<bool> {
let all_simple_exprs = self
.expr
.iter()
.all(|(e, _)| e.as_any().is::<Column>() || e.as_any().is::<Literal>());
vec![!all_simple_exprs]
}
fn execute(
&self,
partition: usize,
context: Arc<TaskContext>,
) -> Result<SendableRecordBatchStream> {
trace!("Start ProjectionExec::execute for partition {} of context session_id {} and task_id {:?}", partition, context.session_id(), context.task_id());
Ok(Box::pin(ProjectionStream {
schema: Arc::clone(&self.schema),
expr: self.expr.iter().map(|x| Arc::clone(&x.0)).collect(),
input: self.input.execute(partition, context)?,
baseline_metrics: BaselineMetrics::new(&self.metrics, partition),
}))
}
fn metrics(&self) -> Option<MetricsSet> {
Some(self.metrics.clone_inner())
}
fn statistics(&self) -> Result<Statistics> {
Ok(stats_projection(
self.input.statistics()?,
self.expr.iter().map(|(e, _)| Arc::clone(e)),
Arc::clone(&self.schema),
))
}
fn supports_limit_pushdown(&self) -> bool {
true
}
fn cardinality_effect(&self) -> CardinalityEffect {
CardinalityEffect::Equal
}
fn try_swapping_with_projection(
&self,
projection: &ProjectionExec,
) -> Result<Option<Arc<dyn ExecutionPlan>>> {
let maybe_unified = try_unifying_projections(projection, self)?;
if let Some(new_plan) = maybe_unified {
remove_unnecessary_projections(new_plan).data().map(Some)
} else {
Ok(Some(Arc::new(projection.clone())))
}
}
}
pub(crate) fn get_field_metadata(
e: &Arc<dyn PhysicalExpr>,
input_schema: &Schema,
) -> Option<HashMap<String, String>> {
if let Some(cast) = e.as_any().downcast_ref::<CastExpr>() {
return get_field_metadata(cast.expr(), input_schema);
}
e.as_any()
.downcast_ref::<Column>()
.map(|column| input_schema.field(column.index()).metadata())
.cloned()
}
fn stats_projection(
mut stats: Statistics,
exprs: impl Iterator<Item = Arc<dyn PhysicalExpr>>,
schema: SchemaRef,
) -> Statistics {
let mut primitive_row_size = 0;
let mut primitive_row_size_possible = true;
let mut column_statistics = vec![];
for expr in exprs {
let col_stats = if let Some(col) = expr.as_any().downcast_ref::<Column>() {
stats.column_statistics[col.index()].clone()
} else {
ColumnStatistics::new_unknown()
};
column_statistics.push(col_stats);
if let Ok(data_type) = expr.data_type(&schema) {
if let Some(value) = data_type.primitive_width() {
primitive_row_size += value;
continue;
}
}
primitive_row_size_possible = false;
}
if primitive_row_size_possible {
stats.total_byte_size =
Precision::Exact(primitive_row_size).multiply(&stats.num_rows);
}
stats.column_statistics = column_statistics;
stats
}
impl ProjectionStream {
fn batch_project(&self, batch: &RecordBatch) -> Result<RecordBatch> {
let _timer = self.baseline_metrics.elapsed_compute().timer();
let arrays = self
.expr
.iter()
.map(|expr| {
expr.evaluate(batch)
.and_then(|v| v.into_array(batch.num_rows()))
})
.collect::<Result<Vec<_>>>()?;
if arrays.is_empty() {
let options =
RecordBatchOptions::new().with_row_count(Some(batch.num_rows()));
RecordBatch::try_new_with_options(Arc::clone(&self.schema), arrays, &options)
.map_err(Into::into)
} else {
RecordBatch::try_new(Arc::clone(&self.schema), arrays).map_err(Into::into)
}
}
}
struct ProjectionStream {
schema: SchemaRef,
expr: Vec<Arc<dyn PhysicalExpr>>,
input: SendableRecordBatchStream,
baseline_metrics: BaselineMetrics,
}
impl Stream for ProjectionStream {
type Item = Result<RecordBatch>;
fn poll_next(
mut self: Pin<&mut Self>,
cx: &mut Context<'_>,
) -> Poll<Option<Self::Item>> {
let poll = self.input.poll_next_unpin(cx).map(|x| match x {
Some(Ok(batch)) => Some(self.batch_project(&batch)),
other => other,
});
self.baseline_metrics.record_poll(poll)
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.input.size_hint()
}
}
impl RecordBatchStream for ProjectionStream {
fn schema(&self) -> SchemaRef {
Arc::clone(&self.schema)
}
}
pub trait EmbeddedProjection: ExecutionPlan + Sized {
fn with_projection(&self, projection: Option<Vec<usize>>) -> Result<Self>;
}
pub fn try_embed_projection<Exec: EmbeddedProjection + 'static>(
projection: &ProjectionExec,
execution_plan: &Exec,
) -> Result<Option<Arc<dyn ExecutionPlan>>> {
let projection_index = collect_column_indices(projection.expr());
if projection_index.is_empty() {
return Ok(None);
};
if projection_index.len() == projection_index.last().unwrap() + 1
&& projection_index.len() == execution_plan.schema().fields().len()
{
return Ok(None);
}
let new_execution_plan =
Arc::new(execution_plan.with_projection(Some(projection_index.to_vec()))?);
let embed_project_exprs = projection_index
.iter()
.zip(new_execution_plan.schema().fields())
.map(|(index, field)| {
(
Arc::new(Column::new(field.name(), *index)) as Arc<dyn PhysicalExpr>,
field.name().to_owned(),
)
})
.collect::<Vec<_>>();
let mut new_projection_exprs = Vec::with_capacity(projection.expr().len());
for (expr, alias) in projection.expr() {
let Some(expr) = update_expr(expr, embed_project_exprs.as_slice(), false)? else {
return Ok(None);
};
new_projection_exprs.push((expr, alias.clone()));
}
let new_projection = Arc::new(ProjectionExec::try_new(
new_projection_exprs,
Arc::clone(&new_execution_plan) as _,
)?);
if is_projection_removable(&new_projection) {
Ok(Some(new_execution_plan))
} else {
Ok(Some(new_projection))
}
}
pub type JoinOn = Vec<(PhysicalExprRef, PhysicalExprRef)>;
pub type JoinOnRef<'a> = &'a [(PhysicalExprRef, PhysicalExprRef)];
pub struct JoinData {
pub projected_left_child: ProjectionExec,
pub projected_right_child: ProjectionExec,
pub join_filter: Option<JoinFilter>,
pub join_on: JoinOn,
}
pub fn try_pushdown_through_join(
projection: &ProjectionExec,
join_left: &Arc<dyn ExecutionPlan>,
join_right: &Arc<dyn ExecutionPlan>,
join_on: JoinOnRef,
schema: SchemaRef,
filter: Option<&JoinFilter>,
) -> Result<Option<JoinData>> {
let Some(projection_as_columns) = physical_to_column_exprs(projection.expr()) else {
return Ok(None);
};
let (far_right_left_col_ind, far_left_right_col_ind) =
join_table_borders(join_left.schema().fields().len(), &projection_as_columns);
if !join_allows_pushdown(
&projection_as_columns,
&schema,
far_right_left_col_ind,
far_left_right_col_ind,
) {
return Ok(None);
}
let new_filter = if let Some(filter) = filter {
match update_join_filter(
&projection_as_columns[0..=far_right_left_col_ind as _],
&projection_as_columns[far_left_right_col_ind as _..],
filter,
join_left.schema().fields().len(),
) {
Some(updated_filter) => Some(updated_filter),
None => return Ok(None),
}
} else {
None
};
let Some(new_on) = update_join_on(
&projection_as_columns[0..=far_right_left_col_ind as _],
&projection_as_columns[far_left_right_col_ind as _..],
join_on,
join_left.schema().fields().len(),
) else {
return Ok(None);
};
let (new_left, new_right) = new_join_children(
&projection_as_columns,
far_right_left_col_ind,
far_left_right_col_ind,
join_left,
join_right,
)?;
Ok(Some(JoinData {
projected_left_child: new_left,
projected_right_child: new_right,
join_filter: new_filter,
join_on: new_on,
}))
}
pub fn remove_unnecessary_projections(
plan: Arc<dyn ExecutionPlan>,
) -> Result<Transformed<Arc<dyn ExecutionPlan>>> {
let maybe_modified =
if let Some(projection) = plan.as_any().downcast_ref::<ProjectionExec>() {
if is_projection_removable(projection) {
return Ok(Transformed::yes(Arc::clone(projection.input())));
}
projection
.input()
.try_swapping_with_projection(projection)?
} else {
return Ok(Transformed::no(plan));
};
Ok(maybe_modified.map_or(Transformed::no(plan), Transformed::yes))
}
fn is_projection_removable(projection: &ProjectionExec) -> bool {
let exprs = projection.expr();
exprs.iter().enumerate().all(|(idx, (expr, alias))| {
let Some(col) = expr.as_any().downcast_ref::<Column>() else {
return false;
};
col.name() == alias && col.index() == idx
}) && exprs.len() == projection.input().schema().fields().len()
}
pub fn all_alias_free_columns(exprs: &[(Arc<dyn PhysicalExpr>, String)]) -> bool {
exprs.iter().all(|(expr, alias)| {
expr.as_any()
.downcast_ref::<Column>()
.map(|column| column.name() == alias)
.unwrap_or(false)
})
}
pub fn new_projections_for_columns(
projection: &ProjectionExec,
source: &[usize],
) -> Vec<usize> {
projection
.expr()
.iter()
.filter_map(|(expr, _)| {
expr.as_any()
.downcast_ref::<Column>()
.map(|expr| source[expr.index()])
})
.collect()
}
pub fn make_with_child(
projection: &ProjectionExec,
child: &Arc<dyn ExecutionPlan>,
) -> Result<Arc<dyn ExecutionPlan>> {
ProjectionExec::try_new(projection.expr().to_vec(), Arc::clone(child))
.map(|e| Arc::new(e) as _)
}
pub fn all_columns(exprs: &[(Arc<dyn PhysicalExpr>, String)]) -> bool {
exprs.iter().all(|(expr, _)| expr.as_any().is::<Column>())
}
pub fn update_expr(
expr: &Arc<dyn PhysicalExpr>,
projected_exprs: &[(Arc<dyn PhysicalExpr>, String)],
sync_with_child: bool,
) -> Result<Option<Arc<dyn PhysicalExpr>>> {
#[derive(Debug, PartialEq)]
enum RewriteState {
Unchanged,
RewrittenValid,
RewrittenInvalid,
}
let mut state = RewriteState::Unchanged;
let new_expr = Arc::clone(expr)
.transform_up(|expr: Arc<dyn PhysicalExpr>| {
if state == RewriteState::RewrittenInvalid {
return Ok(Transformed::no(expr));
}
let Some(column) = expr.as_any().downcast_ref::<Column>() else {
return Ok(Transformed::no(expr));
};
if sync_with_child {
state = RewriteState::RewrittenValid;
Ok(Transformed::yes(Arc::clone(
&projected_exprs[column.index()].0,
)))
} else {
state = RewriteState::RewrittenInvalid;
projected_exprs
.iter()
.enumerate()
.find_map(|(index, (projected_expr, alias))| {
projected_expr.as_any().downcast_ref::<Column>().and_then(
|projected_column| {
(column.name().eq(projected_column.name())
&& column.index() == projected_column.index())
.then(|| {
state = RewriteState::RewrittenValid;
Arc::new(Column::new(alias, index)) as _
})
},
)
})
.map_or_else(
|| Ok(Transformed::no(expr)),
|c| Ok(Transformed::yes(c)),
)
}
})
.data();
new_expr.map(|e| (state == RewriteState::RewrittenValid).then_some(e))
}
pub fn physical_to_column_exprs(
exprs: &[(Arc<dyn PhysicalExpr>, String)],
) -> Option<Vec<(Column, String)>> {
exprs
.iter()
.map(|(expr, alias)| {
expr.as_any()
.downcast_ref::<Column>()
.map(|col| (col.clone(), alias.clone()))
})
.collect()
}
pub fn new_join_children(
projection_as_columns: &[(Column, String)],
far_right_left_col_ind: i32,
far_left_right_col_ind: i32,
left_child: &Arc<dyn ExecutionPlan>,
right_child: &Arc<dyn ExecutionPlan>,
) -> Result<(ProjectionExec, ProjectionExec)> {
let new_left = ProjectionExec::try_new(
projection_as_columns[0..=far_right_left_col_ind as _]
.iter()
.map(|(col, alias)| {
(
Arc::new(Column::new(col.name(), col.index())) as _,
alias.clone(),
)
})
.collect_vec(),
Arc::clone(left_child),
)?;
let left_size = left_child.schema().fields().len() as i32;
let new_right = ProjectionExec::try_new(
projection_as_columns[far_left_right_col_ind as _..]
.iter()
.map(|(col, alias)| {
(
Arc::new(Column::new(
col.name(),
(col.index() as i32 - left_size) as _,
)) as _,
alias.clone(),
)
})
.collect_vec(),
Arc::clone(right_child),
)?;
Ok((new_left, new_right))
}
pub fn join_allows_pushdown(
projection_as_columns: &[(Column, String)],
join_schema: &SchemaRef,
far_right_left_col_ind: i32,
far_left_right_col_ind: i32,
) -> bool {
projection_as_columns.len() < join_schema.fields().len()
&& (far_right_left_col_ind + 1 == far_left_right_col_ind)
&& far_right_left_col_ind >= 0
&& far_left_right_col_ind < projection_as_columns.len() as i32
}
pub fn join_table_borders(
left_table_column_count: usize,
projection_as_columns: &[(Column, String)],
) -> (i32, i32) {
let far_right_left_col_ind = projection_as_columns
.iter()
.enumerate()
.take_while(|(_, (projection_column, _))| {
projection_column.index() < left_table_column_count
})
.last()
.map(|(index, _)| index as i32)
.unwrap_or(-1);
let far_left_right_col_ind = projection_as_columns
.iter()
.enumerate()
.rev()
.take_while(|(_, (projection_column, _))| {
projection_column.index() >= left_table_column_count
})
.last()
.map(|(index, _)| index as i32)
.unwrap_or(projection_as_columns.len() as i32);
(far_right_left_col_ind, far_left_right_col_ind)
}
pub fn update_join_on(
proj_left_exprs: &[(Column, String)],
proj_right_exprs: &[(Column, String)],
hash_join_on: &[(PhysicalExprRef, PhysicalExprRef)],
left_field_size: usize,
) -> Option<Vec<(PhysicalExprRef, PhysicalExprRef)>> {
#[allow(clippy::map_identity)]
let (left_idx, right_idx): (Vec<_>, Vec<_>) = hash_join_on
.iter()
.map(|(left, right)| (left, right))
.unzip();
let new_left_columns = new_columns_for_join_on(&left_idx, proj_left_exprs, 0);
let new_right_columns =
new_columns_for_join_on(&right_idx, proj_right_exprs, left_field_size);
match (new_left_columns, new_right_columns) {
(Some(left), Some(right)) => Some(left.into_iter().zip(right).collect()),
_ => None,
}
}
pub fn update_join_filter(
projection_left_exprs: &[(Column, String)],
projection_right_exprs: &[(Column, String)],
join_filter: &JoinFilter,
left_field_size: usize,
) -> Option<JoinFilter> {
let mut new_left_indices = new_indices_for_join_filter(
join_filter,
JoinSide::Left,
projection_left_exprs,
0,
)
.into_iter();
let mut new_right_indices = new_indices_for_join_filter(
join_filter,
JoinSide::Right,
projection_right_exprs,
left_field_size,
)
.into_iter();
(new_right_indices.len() + new_left_indices.len()
== join_filter.column_indices().len())
.then(|| {
JoinFilter::new(
Arc::clone(join_filter.expression()),
join_filter
.column_indices()
.iter()
.map(|col_idx| ColumnIndex {
index: if col_idx.side == JoinSide::Left {
new_left_indices.next().unwrap()
} else {
new_right_indices.next().unwrap()
},
side: col_idx.side,
})
.collect(),
Arc::clone(join_filter.schema()),
)
})
}
fn try_unifying_projections(
projection: &ProjectionExec,
child: &ProjectionExec,
) -> Result<Option<Arc<dyn ExecutionPlan>>> {
let mut projected_exprs = vec![];
let mut column_ref_map: HashMap<Column, usize> = HashMap::new();
projection.expr().iter().for_each(|(expr, _)| {
expr.apply(|expr| {
Ok({
if let Some(column) = expr.as_any().downcast_ref::<Column>() {
*column_ref_map.entry(column.clone()).or_default() += 1;
}
TreeNodeRecursion::Continue
})
})
.unwrap();
});
if column_ref_map.iter().any(|(column, count)| {
*count > 1 && !is_expr_trivial(&Arc::clone(&child.expr()[column.index()].0))
}) {
return Ok(None);
}
for (expr, alias) in projection.expr() {
let Some(expr) = update_expr(expr, child.expr(), true)? else {
return Ok(None);
};
projected_exprs.push((expr, alias.clone()));
}
ProjectionExec::try_new(projected_exprs, Arc::clone(child.input()))
.map(|e| Some(Arc::new(e) as _))
}
fn collect_column_indices(exprs: &[(Arc<dyn PhysicalExpr>, String)]) -> Vec<usize> {
let mut indices = exprs
.iter()
.flat_map(|(expr, _)| collect_columns(expr))
.map(|x| x.index())
.collect::<std::collections::HashSet<_>>()
.into_iter()
.collect::<Vec<_>>();
indices.sort();
indices
}
fn new_indices_for_join_filter(
join_filter: &JoinFilter,
join_side: JoinSide,
projection_exprs: &[(Column, String)],
column_index_offset: usize,
) -> Vec<usize> {
join_filter
.column_indices()
.iter()
.filter(|col_idx| col_idx.side == join_side)
.filter_map(|col_idx| {
projection_exprs
.iter()
.position(|(col, _)| col_idx.index + column_index_offset == col.index())
})
.collect()
}
fn new_columns_for_join_on(
hash_join_on: &[&PhysicalExprRef],
projection_exprs: &[(Column, String)],
column_index_offset: usize,
) -> Option<Vec<PhysicalExprRef>> {
let new_columns = hash_join_on
.iter()
.filter_map(|on| {
Arc::clone(*on)
.transform(|expr| {
if let Some(column) = expr.as_any().downcast_ref::<Column>() {
let new_column = projection_exprs
.iter()
.enumerate()
.find(|(_, (proj_column, _))| {
column.name() == proj_column.name()
&& column.index() + column_index_offset
== proj_column.index()
})
.map(|(index, (_, alias))| Column::new(alias, index));
if let Some(new_column) = new_column {
Ok(Transformed::yes(Arc::new(new_column)))
} else {
internal_err!(
"Column {:?} not found in projection expressions",
column
)
}
} else {
Ok(Transformed::no(expr))
}
})
.data()
.ok()
})
.collect::<Vec<_>>();
(new_columns.len() == hash_join_on.len()).then_some(new_columns)
}
fn is_expr_trivial(expr: &Arc<dyn PhysicalExpr>) -> bool {
expr.as_any().downcast_ref::<Column>().is_some()
|| expr.as_any().downcast_ref::<Literal>().is_some()
}
#[cfg(test)]
mod tests {
use super::*;
use std::sync::Arc;
use crate::common::collect;
use crate::test;
use arrow_schema::DataType;
use datafusion_common::ScalarValue;
use datafusion_expr::Operator;
use datafusion_physical_expr::expressions::{BinaryExpr, Column, Literal};
#[test]
fn test_collect_column_indices() -> Result<()> {
let expr = Arc::new(BinaryExpr::new(
Arc::new(Column::new("b", 7)),
Operator::Minus,
Arc::new(BinaryExpr::new(
Arc::new(Literal::new(ScalarValue::Int32(Some(1)))),
Operator::Plus,
Arc::new(Column::new("a", 1)),
)),
));
let column_indices = collect_column_indices(&[(expr, "b-(1+a)".to_string())]);
assert_eq!(column_indices, vec![1, 7]);
Ok(())
}
#[test]
fn test_join_table_borders() -> Result<()> {
let projections = vec![
(Column::new("b", 1), "b".to_owned()),
(Column::new("c", 2), "c".to_owned()),
(Column::new("e", 4), "e".to_owned()),
(Column::new("d", 3), "d".to_owned()),
(Column::new("c", 2), "c".to_owned()),
(Column::new("f", 5), "f".to_owned()),
(Column::new("h", 7), "h".to_owned()),
(Column::new("g", 6), "g".to_owned()),
];
let left_table_column_count = 5;
assert_eq!(
join_table_borders(left_table_column_count, &projections),
(4, 5)
);
let left_table_column_count = 8;
assert_eq!(
join_table_borders(left_table_column_count, &projections),
(7, 8)
);
let left_table_column_count = 1;
assert_eq!(
join_table_borders(left_table_column_count, &projections),
(-1, 0)
);
let projections = vec![
(Column::new("a", 0), "a".to_owned()),
(Column::new("b", 1), "b".to_owned()),
(Column::new("d", 3), "d".to_owned()),
(Column::new("g", 6), "g".to_owned()),
(Column::new("e", 4), "e".to_owned()),
(Column::new("f", 5), "f".to_owned()),
(Column::new("e", 4), "e".to_owned()),
(Column::new("h", 7), "h".to_owned()),
];
let left_table_column_count = 5;
assert_eq!(
join_table_borders(left_table_column_count, &projections),
(2, 7)
);
let left_table_column_count = 7;
assert_eq!(
join_table_borders(left_table_column_count, &projections),
(6, 7)
);
Ok(())
}
#[tokio::test]
async fn project_no_column() -> Result<()> {
let task_ctx = Arc::new(TaskContext::default());
let exec = test::scan_partitioned(1);
let expected = collect(exec.execute(0, Arc::clone(&task_ctx))?)
.await
.unwrap();
let projection = ProjectionExec::try_new(vec![], exec)?;
let stream = projection.execute(0, Arc::clone(&task_ctx))?;
let output = collect(stream).await.unwrap();
assert_eq!(output.len(), expected.len());
Ok(())
}
fn get_stats() -> Statistics {
Statistics {
num_rows: Precision::Exact(5),
total_byte_size: Precision::Exact(23),
column_statistics: vec![
ColumnStatistics {
distinct_count: Precision::Exact(5),
max_value: Precision::Exact(ScalarValue::Int64(Some(21))),
min_value: Precision::Exact(ScalarValue::Int64(Some(-4))),
sum_value: Precision::Exact(ScalarValue::Int64(Some(42))),
null_count: Precision::Exact(0),
},
ColumnStatistics {
distinct_count: Precision::Exact(1),
max_value: Precision::Exact(ScalarValue::from("x")),
min_value: Precision::Exact(ScalarValue::from("a")),
sum_value: Precision::Absent,
null_count: Precision::Exact(3),
},
ColumnStatistics {
distinct_count: Precision::Absent,
max_value: Precision::Exact(ScalarValue::Float32(Some(1.1))),
min_value: Precision::Exact(ScalarValue::Float32(Some(0.1))),
sum_value: Precision::Exact(ScalarValue::Float32(Some(5.5))),
null_count: Precision::Absent,
},
],
}
}
fn get_schema() -> Schema {
let field_0 = Field::new("col0", DataType::Int64, false);
let field_1 = Field::new("col1", DataType::Utf8, false);
let field_2 = Field::new("col2", DataType::Float32, false);
Schema::new(vec![field_0, field_1, field_2])
}
#[tokio::test]
async fn test_stats_projection_columns_only() {
let source = get_stats();
let schema = get_schema();
let exprs: Vec<Arc<dyn PhysicalExpr>> = vec![
Arc::new(Column::new("col1", 1)),
Arc::new(Column::new("col0", 0)),
];
let result = stats_projection(source, exprs.into_iter(), Arc::new(schema));
let expected = Statistics {
num_rows: Precision::Exact(5),
total_byte_size: Precision::Exact(23),
column_statistics: vec![
ColumnStatistics {
distinct_count: Precision::Exact(1),
max_value: Precision::Exact(ScalarValue::from("x")),
min_value: Precision::Exact(ScalarValue::from("a")),
sum_value: Precision::Absent,
null_count: Precision::Exact(3),
},
ColumnStatistics {
distinct_count: Precision::Exact(5),
max_value: Precision::Exact(ScalarValue::Int64(Some(21))),
min_value: Precision::Exact(ScalarValue::Int64(Some(-4))),
sum_value: Precision::Exact(ScalarValue::Int64(Some(42))),
null_count: Precision::Exact(0),
},
],
};
assert_eq!(result, expected);
}
#[tokio::test]
async fn test_stats_projection_column_with_primitive_width_only() {
let source = get_stats();
let schema = get_schema();
let exprs: Vec<Arc<dyn PhysicalExpr>> = vec![
Arc::new(Column::new("col2", 2)),
Arc::new(Column::new("col0", 0)),
];
let result = stats_projection(source, exprs.into_iter(), Arc::new(schema));
let expected = Statistics {
num_rows: Precision::Exact(5),
total_byte_size: Precision::Exact(60),
column_statistics: vec![
ColumnStatistics {
distinct_count: Precision::Absent,
max_value: Precision::Exact(ScalarValue::Float32(Some(1.1))),
min_value: Precision::Exact(ScalarValue::Float32(Some(0.1))),
sum_value: Precision::Exact(ScalarValue::Float32(Some(5.5))),
null_count: Precision::Absent,
},
ColumnStatistics {
distinct_count: Precision::Exact(5),
max_value: Precision::Exact(ScalarValue::Int64(Some(21))),
min_value: Precision::Exact(ScalarValue::Int64(Some(-4))),
sum_value: Precision::Exact(ScalarValue::Int64(Some(42))),
null_count: Precision::Exact(0),
},
],
};
assert_eq!(result, expected);
}
}