Skip to content
Merged
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
150 changes: 0 additions & 150 deletions datafusion-examples/examples/return_types_udf.rs

This file was deleted.

142 changes: 139 additions & 3 deletions datafusion/core/tests/user_defined/user_defined_scalar_functions.rs
Original file line number Diff line number Diff line change
Expand Up @@ -22,12 +22,16 @@ use arrow_schema::{DataType, Field, Schema};
use datafusion::prelude::*;
use datafusion::{execution::registry::FunctionRegistry, test_util};
use datafusion_common::cast::as_float64_array;
use datafusion_common::{assert_batches_eq, cast::as_int32_array, Result, ScalarValue};
use datafusion_common::{
assert_batches_eq, assert_batches_sorted_eq, cast::as_int32_array, not_impl_err,
plan_err, DFSchema, DataFusionError, Result, ScalarValue,
};
use datafusion_expr::{
create_udaf, create_udf, Accumulator, ColumnarValue, LogicalPlanBuilder, ScalarUDF,
ScalarUDFImpl, Signature, Volatility,
create_udaf, create_udf, Accumulator, ColumnarValue, ExprSchemable,
LogicalPlanBuilder, ScalarUDF, ScalarUDFImpl, Signature, Volatility,
};
use rand::{thread_rng, Rng};
use std::any::Any;
use std::iter;
use std::sync::Arc;

Expand Down Expand Up @@ -494,6 +498,127 @@ async fn test_user_defined_functions_zero_argument() -> Result<()> {
Ok(())
}

#[derive(Debug)]
struct TakeUDF {
signature: Signature,
}

impl TakeUDF {
fn new() -> Self {
Self {
signature: Signature::any(3, Volatility::Immutable),
}
}
}

/// Implement a ScalarUDFImpl whose return type is a function of the input values
impl ScalarUDFImpl for TakeUDF {
fn as_any(&self) -> &dyn Any {
self
}
fn name(&self) -> &str {
"tale"
}
fn signature(&self) -> &Signature {
&self.signature
}
fn return_type(&self, _args: &[DataType]) -> Result<DataType> {
not_impl_err!("Not called because the return_type_from_exprs is implemented")
}

/// Thus function returns the type of the first or second argument based on
/// the third argument:
///
/// 1. If the third argument is '0', return the type of the first argument
/// 2. If the third argument is '1', return the type of the second argument
fn return_type_from_exprs(
&self,
arg_exprs: &[Expr],
schema: &DFSchema,
) -> Result<DataType> {
if arg_exprs.len() != 3 {
return plan_err!("Expected 3 arguments, got {}.", arg_exprs.len());
}

let take_idx = if let Some(Expr::Literal(ScalarValue::Int64(Some(idx)))) =
arg_exprs.get(2)
{
if *idx == 0 || *idx == 1 {
*idx as usize
} else {
return plan_err!("The third argument must be 0 or 1, got: {idx}");
}
} else {
return plan_err!(
"The third argument must be a literal of type int64, but got {:?}",
arg_exprs.get(2)
);
};

arg_exprs.get(take_idx).unwrap().get_type(schema)
}

// The actual implementation rethr
Copy link
Owner

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

What does 'rethr' mean here, is it a small typo?

fn invoke(&self, args: &[ColumnarValue]) -> Result<ColumnarValue> {
let take_idx = match &args[2] {
ColumnarValue::Scalar(ScalarValue::Int64(Some(v))) if v < &2 => *v as usize,
_ => unreachable!(),
};
match &args[take_idx] {
ColumnarValue::Array(array) => Ok(ColumnarValue::Array(array.clone())),
ColumnarValue::Scalar(_) => unimplemented!(),
}
}
}

#[tokio::test]
async fn verify_udf_return_type() -> Result<()> {
// Create a new ScalarUDF from the implementation
let take = ScalarUDF::from(TakeUDF::new());

// SELECT
// take(smallint_col, double_col, 0) as take0,
// take(smallint_col, double_col, 1) as take1
// FROM alltypes_plain;
let exprs = vec![
take.call(vec![col("smallint_col"), col("double_col"), lit(0_i64)])
.alias("take0"),
take.call(vec![col("smallint_col"), col("double_col"), lit(1_i64)])
.alias("take1"),
];

let ctx = SessionContext::new();
register_alltypes_parquet(&ctx).await?;

let df = ctx.table("alltypes_plain").await?.select(exprs)?;

let schema = df.schema();

// The output schema should be
// * type of column smallint_col (float64)
// * type of column double_col (float32)
assert_eq!(schema.field(0).data_type(), &DataType::Int32);
assert_eq!(schema.field(1).data_type(), &DataType::Float64);

let expected = [
"+-------+-------+",
"| take0 | take1 |",
"+-------+-------+",
"| 0 | 0.0 |",
"| 0 | 0.0 |",
"| 0 | 0.0 |",
"| 0 | 0.0 |",
"| 1 | 10.1 |",
"| 1 | 10.1 |",
"| 1 | 10.1 |",
"| 1 | 10.1 |",
"+-------+-------+",
];
assert_batches_sorted_eq!(&expected, &df.collect().await?);

Ok(())
}

fn create_udf_context() -> SessionContext {
let ctx = SessionContext::new();
// register a custom UDF
Expand Down Expand Up @@ -531,6 +656,17 @@ async fn register_aggregate_csv(ctx: &SessionContext) -> Result<()> {
Ok(())
}

async fn register_alltypes_parquet(ctx: &SessionContext) -> Result<()> {
let testdata = datafusion::test_util::parquet_test_data();
ctx.register_parquet(
"alltypes_plain",
&format!("{testdata}/alltypes_plain.parquet"),
ParquetReadOptions::default(),
)
.await?;
Ok(())
}

/// Execute SQL and return results as a RecordBatch
async fn plan_and_collect(ctx: &SessionContext, sql: &str) -> Result<Vec<RecordBatch>> {
ctx.sql(sql).await?.collect().await
Expand Down
2 changes: 1 addition & 1 deletion datafusion/expr/src/expr_schema.rs
Original file line number Diff line number Diff line change
Expand Up @@ -330,7 +330,7 @@ impl ExprSchemable<DFSchema> for Expr {

fn metadata(&self, schema: &DFSchema) -> Result<HashMap<String, String>> {
match self {
Expr::Column(c) => Ok(schema.metadata().clone()),
Expr::Column(_) => Ok(schema.metadata().clone()),
Expr::Alias(Alias { expr, .. }) => expr.metadata(schema),
_ => Ok(HashMap::new()),
}
Expand Down
Loading