datafusion_physical_expr/
scalar_function.rsuse std::any::Any;
use std::fmt::{self, Debug, Formatter};
use std::hash::Hash;
use std::sync::Arc;
use crate::expressions::Literal;
use crate::PhysicalExpr;
use arrow::datatypes::{DataType, Schema};
use arrow::record_batch::RecordBatch;
use arrow_array::Array;
use datafusion_common::{internal_err, DFSchema, Result, ScalarValue};
use datafusion_expr::interval_arithmetic::Interval;
use datafusion_expr::sort_properties::ExprProperties;
use datafusion_expr::type_coercion::functions::data_types_with_scalar_udf;
use datafusion_expr::{
expr_vec_fmt, ColumnarValue, Expr, ReturnTypeArgs, ScalarFunctionArgs, ScalarUDF,
};
#[derive(Eq, PartialEq, Hash)]
pub struct ScalarFunctionExpr {
fun: Arc<ScalarUDF>,
name: String,
args: Vec<Arc<dyn PhysicalExpr>>,
return_type: DataType,
nullable: bool,
}
impl Debug for ScalarFunctionExpr {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
f.debug_struct("ScalarFunctionExpr")
.field("fun", &"<FUNC>")
.field("name", &self.name)
.field("args", &self.args)
.field("return_type", &self.return_type)
.finish()
}
}
impl ScalarFunctionExpr {
pub fn new(
name: &str,
fun: Arc<ScalarUDF>,
args: Vec<Arc<dyn PhysicalExpr>>,
return_type: DataType,
) -> Self {
Self {
fun,
name: name.to_owned(),
args,
return_type,
nullable: true,
}
}
pub fn try_new(
fun: Arc<ScalarUDF>,
args: Vec<Arc<dyn PhysicalExpr>>,
schema: &Schema,
) -> Result<Self> {
let name = fun.name().to_string();
let arg_types = args
.iter()
.map(|e| e.data_type(schema))
.collect::<Result<Vec<_>>>()?;
data_types_with_scalar_udf(&arg_types, &fun)?;
let nullables = args
.iter()
.map(|e| e.nullable(schema))
.collect::<Result<Vec<_>>>()?;
let arguments = args
.iter()
.map(|e| {
e.as_any()
.downcast_ref::<Literal>()
.map(|literal| literal.value())
})
.collect::<Vec<_>>();
let ret_args = ReturnTypeArgs {
arg_types: &arg_types,
scalar_arguments: &arguments,
nullables: &nullables,
};
let (return_type, nullable) = fun.return_type_from_args(ret_args)?.into_parts();
Ok(Self {
fun,
name,
args,
return_type,
nullable,
})
}
pub fn fun(&self) -> &ScalarUDF {
&self.fun
}
pub fn name(&self) -> &str {
&self.name
}
pub fn args(&self) -> &[Arc<dyn PhysicalExpr>] {
&self.args
}
pub fn return_type(&self) -> &DataType {
&self.return_type
}
pub fn with_nullable(mut self, nullable: bool) -> Self {
self.nullable = nullable;
self
}
pub fn nullable(&self) -> bool {
self.nullable
}
}
impl fmt::Display for ScalarFunctionExpr {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
write!(f, "{}({})", self.name, expr_vec_fmt!(self.args))
}
}
impl PhysicalExpr for ScalarFunctionExpr {
fn as_any(&self) -> &dyn Any {
self
}
fn data_type(&self, _input_schema: &Schema) -> Result<DataType> {
Ok(self.return_type.clone())
}
fn nullable(&self, _input_schema: &Schema) -> Result<bool> {
Ok(self.nullable)
}
fn evaluate(&self, batch: &RecordBatch) -> Result<ColumnarValue> {
let args = self
.args
.iter()
.map(|e| e.evaluate(batch))
.collect::<Result<Vec<_>>>()?;
let input_empty = args.is_empty();
let input_all_scalar = args
.iter()
.all(|arg| matches!(arg, ColumnarValue::Scalar(_)));
let output = self.fun.invoke_with_args(ScalarFunctionArgs {
args,
number_rows: batch.num_rows(),
return_type: &self.return_type,
})?;
if let ColumnarValue::Array(array) = &output {
if array.len() != batch.num_rows() {
let preserve_scalar =
array.len() == 1 && !input_empty && input_all_scalar;
return if preserve_scalar {
ScalarValue::try_from_array(array, 0).map(ColumnarValue::Scalar)
} else {
internal_err!("UDF {} returned a different number of rows than expected. Expected: {}, Got: {}",
self.name, batch.num_rows(), array.len())
};
}
}
Ok(output)
}
fn children(&self) -> Vec<&Arc<dyn PhysicalExpr>> {
self.args.iter().collect()
}
fn with_new_children(
self: Arc<Self>,
children: Vec<Arc<dyn PhysicalExpr>>,
) -> Result<Arc<dyn PhysicalExpr>> {
Ok(Arc::new(
ScalarFunctionExpr::new(
&self.name,
Arc::clone(&self.fun),
children,
self.return_type().clone(),
)
.with_nullable(self.nullable),
))
}
fn evaluate_bounds(&self, children: &[&Interval]) -> Result<Interval> {
self.fun.evaluate_bounds(children)
}
fn propagate_constraints(
&self,
interval: &Interval,
children: &[&Interval],
) -> Result<Option<Vec<Interval>>> {
self.fun.propagate_constraints(interval, children)
}
fn get_properties(&self, children: &[ExprProperties]) -> Result<ExprProperties> {
let sort_properties = self.fun.output_ordering(children)?;
let preserves_lex_ordering = self.fun.preserves_lex_ordering(children)?;
let children_range = children
.iter()
.map(|props| &props.range)
.collect::<Vec<_>>();
let range = self.fun().evaluate_bounds(&children_range)?;
Ok(ExprProperties {
sort_properties,
range,
preserves_lex_ordering,
})
}
}
#[deprecated(since = "45.0.0", note = "use ScalarFunctionExpr::new() instead")]
pub fn create_physical_expr(
fun: &ScalarUDF,
input_phy_exprs: &[Arc<dyn PhysicalExpr>],
input_schema: &Schema,
args: &[Expr],
input_dfschema: &DFSchema,
) -> Result<Arc<dyn PhysicalExpr>> {
let input_expr_types = input_phy_exprs
.iter()
.map(|e| e.data_type(input_schema))
.collect::<Result<Vec<_>>>()?;
data_types_with_scalar_udf(&input_expr_types, fun)?;
let return_type =
fun.return_type_from_exprs(args, input_dfschema, &input_expr_types)?;
Ok(Arc::new(
ScalarFunctionExpr::new(
fun.name(),
Arc::new(fun.clone()),
input_phy_exprs.to_vec(),
return_type,
)
.with_nullable(fun.is_nullable(args, input_dfschema)),
))
}