datafusion_physical_expr/
scalar_function.rsuse std::any::Any;
use std::fmt::{self, Debug, Formatter};
use std::hash::{Hash, Hasher};
use std::sync::Arc;
use crate::physical_expr::{down_cast_any_ref, physical_exprs_equal};
use crate::PhysicalExpr;
use arrow::datatypes::{DataType, Schema};
use arrow::record_batch::RecordBatch;
use arrow_array::Array;
use datafusion_common::{internal_err, DFSchema, Result, ScalarValue};
use datafusion_expr::interval_arithmetic::Interval;
use datafusion_expr::sort_properties::ExprProperties;
use datafusion_expr::type_coercion::functions::data_types_with_scalar_udf;
use datafusion_expr::{expr_vec_fmt, ColumnarValue, Expr, ScalarUDF};
pub struct ScalarFunctionExpr {
fun: Arc<ScalarUDF>,
name: String,
args: Vec<Arc<dyn PhysicalExpr>>,
return_type: DataType,
nullable: bool,
}
impl Debug for ScalarFunctionExpr {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
f.debug_struct("ScalarFunctionExpr")
.field("fun", &"<FUNC>")
.field("name", &self.name)
.field("args", &self.args)
.field("return_type", &self.return_type)
.finish()
}
}
impl ScalarFunctionExpr {
pub fn new(
name: &str,
fun: Arc<ScalarUDF>,
args: Vec<Arc<dyn PhysicalExpr>>,
return_type: DataType,
) -> Self {
Self {
fun,
name: name.to_owned(),
args,
return_type,
nullable: true,
}
}
pub fn fun(&self) -> &ScalarUDF {
&self.fun
}
pub fn name(&self) -> &str {
&self.name
}
pub fn args(&self) -> &[Arc<dyn PhysicalExpr>] {
&self.args
}
pub fn return_type(&self) -> &DataType {
&self.return_type
}
pub fn with_nullable(mut self, nullable: bool) -> Self {
self.nullable = nullable;
self
}
pub fn nullable(&self) -> bool {
self.nullable
}
}
impl fmt::Display for ScalarFunctionExpr {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
write!(f, "{}({})", self.name, expr_vec_fmt!(self.args))
}
}
impl PhysicalExpr for ScalarFunctionExpr {
fn as_any(&self) -> &dyn Any {
self
}
fn data_type(&self, _input_schema: &Schema) -> Result<DataType> {
Ok(self.return_type.clone())
}
fn nullable(&self, _input_schema: &Schema) -> Result<bool> {
Ok(self.nullable)
}
fn evaluate(&self, batch: &RecordBatch) -> Result<ColumnarValue> {
let inputs = self
.args
.iter()
.map(|e| e.evaluate(batch))
.collect::<Result<Vec<_>>>()?;
let output = self.fun.invoke_batch(&inputs, batch.num_rows())?;
if let ColumnarValue::Array(array) = &output {
if array.len() != batch.num_rows() {
let preserve_scalar = array.len() == 1
&& !inputs.is_empty()
&& inputs
.iter()
.all(|arg| matches!(arg, ColumnarValue::Scalar(_)));
return if preserve_scalar {
ScalarValue::try_from_array(array, 0).map(ColumnarValue::Scalar)
} else {
internal_err!("UDF returned a different number of rows than expected. Expected: {}, Got: {}",
batch.num_rows(), array.len())
};
}
}
Ok(output)
}
fn children(&self) -> Vec<&Arc<dyn PhysicalExpr>> {
self.args.iter().collect()
}
fn with_new_children(
self: Arc<Self>,
children: Vec<Arc<dyn PhysicalExpr>>,
) -> Result<Arc<dyn PhysicalExpr>> {
Ok(Arc::new(
ScalarFunctionExpr::new(
&self.name,
Arc::clone(&self.fun),
children,
self.return_type().clone(),
)
.with_nullable(self.nullable),
))
}
fn evaluate_bounds(&self, children: &[&Interval]) -> Result<Interval> {
self.fun.evaluate_bounds(children)
}
fn propagate_constraints(
&self,
interval: &Interval,
children: &[&Interval],
) -> Result<Option<Vec<Interval>>> {
self.fun.propagate_constraints(interval, children)
}
fn dyn_hash(&self, state: &mut dyn Hasher) {
let mut s = state;
self.name.hash(&mut s);
self.args.hash(&mut s);
self.return_type.hash(&mut s);
}
fn get_properties(&self, children: &[ExprProperties]) -> Result<ExprProperties> {
let sort_properties = self.fun.output_ordering(children)?;
let children_range = children
.iter()
.map(|props| &props.range)
.collect::<Vec<_>>();
let range = self.fun().evaluate_bounds(&children_range)?;
Ok(ExprProperties {
sort_properties,
range,
})
}
}
impl PartialEq<dyn Any> for ScalarFunctionExpr {
fn eq(&self, other: &dyn Any) -> bool {
down_cast_any_ref(other)
.downcast_ref::<Self>()
.map(|x| {
self.name == x.name
&& physical_exprs_equal(&self.args, &x.args)
&& self.return_type == x.return_type
})
.unwrap_or(false)
}
}
pub fn create_physical_expr(
fun: &ScalarUDF,
input_phy_exprs: &[Arc<dyn PhysicalExpr>],
input_schema: &Schema,
args: &[Expr],
input_dfschema: &DFSchema,
) -> Result<Arc<dyn PhysicalExpr>> {
let input_expr_types = input_phy_exprs
.iter()
.map(|e| e.data_type(input_schema))
.collect::<Result<Vec<_>>>()?;
data_types_with_scalar_udf(&input_expr_types, fun)?;
let return_type =
fun.return_type_from_exprs(args, input_dfschema, &input_expr_types)?;
Ok(Arc::new(
ScalarFunctionExpr::new(
fun.name(),
Arc::new(fun.clone()),
input_phy_exprs.to_vec(),
return_type,
)
.with_nullable(fun.is_nullable(args, input_dfschema)),
))
}