diff --git a/datafusion/core/tests/optimizer/mod.rs b/datafusion/core/tests/optimizer/mod.rs index 6466e9ad96d17..25369bd2b742e 100644 --- a/datafusion/core/tests/optimizer/mod.rs +++ b/datafusion/core/tests/optimizer/mod.rs @@ -18,6 +18,9 @@ //! Tests for the DataFusion SQL query planner that require functions from the //! datafusion-functions crate. +use datafusion_expr::execution_props::ExecutionProps; +use datafusion_expr::simplify::SimplifyContext; +use datafusion_optimizer::simplify_expressions::ExprSimplifier; use insta::assert_snapshot; use std::any::Any; use std::collections::HashMap; @@ -26,13 +29,16 @@ use std::sync::Arc; use arrow::datatypes::{ DataType, Field, Fields, Schema, SchemaBuilder, SchemaRef, TimeUnit, }; +use datafusion::functions::datetime::expr_fn; use datafusion_common::config::ConfigOptions; use datafusion_common::tree_node::TransformedResult; -use datafusion_common::{DFSchema, Result, ScalarValue, TableReference, plan_err}; +use datafusion_common::{ + DFSchema, DFSchemaRef, Result, ScalarValue, TableReference, plan_err, +}; use datafusion_expr::interval_arithmetic::{Interval, NullableInterval}; use datafusion_expr::{ AggregateUDF, BinaryExpr, Expr, ExprSchemable, LogicalPlan, Operator, ScalarUDF, - TableSource, WindowUDF, col, lit, + TableSource, WindowUDF, and, col, lit, or, }; use datafusion_functions::core::expr_ext::FieldAccessor; use datafusion_optimizer::analyzer::Analyzer; @@ -378,3 +384,196 @@ fn validate_unchanged_cases(guarantees: &[(Expr, NullableInterval)], cases: &[Ex ); } } + +// DatePart preimage tests +#[test] +fn test_preimage_date_part_date32_eq() { + let schema = expr_test_schema(); + // date_part(c1, DatePart::Year) = 2024 -> c1 >= 2024-01-01 AND c1 < 2025-01-01 + let expr_lt = expr_fn::date_part(lit("year"), col("date32")).eq(lit(2024i32)); + let expected = and( + col("date32").gt_eq(lit(ScalarValue::Date32(Some(19723)))), + col("date32").lt(lit(ScalarValue::Date32(Some(20089)))), + ); + assert_eq!(optimize_test(expr_lt, &schema), expected) +} + +#[test] +fn test_preimage_date_part_date64_not_eq() { + let schema = expr_test_schema(); + // date_part(c1, DatePart::Year) <> 2024 -> c1 < 2024-01-01 AND c1 >= 2025-01-01 + let expr_lt = expr_fn::date_part(lit("year"), col("date64")).not_eq(lit(2024i32)); + let expected = or( + col("date64").lt(lit(ScalarValue::Date64(Some(19723 * 86_400_000)))), + col("date64").gt_eq(lit(ScalarValue::Date64(Some(20089 * 86_400_000)))), + ); + assert_eq!(optimize_test(expr_lt, &schema), expected) +} + +#[test] +fn test_preimage_date_part_timestamp_nano_lt() { + let schema = expr_test_schema(); + let expr_lt = expr_fn::date_part(lit("year"), col("ts_nano_none")).lt(lit(2024i32)); + let expected = col("ts_nano_none").lt(lit(ScalarValue::TimestampNanosecond( + Some(19723 * 86_400_000_000_000), + None, + ))); + assert_eq!(optimize_test(expr_lt, &schema), expected) +} + +#[test] +fn test_preimage_date_part_timestamp_nano_utc_gt() { + let schema = expr_test_schema(); + let expr_lt = expr_fn::date_part(lit("year"), col("ts_nano_utc")).gt(lit(2024i32)); + let expected = col("ts_nano_utc").gt_eq(lit(ScalarValue::TimestampNanosecond( + Some(20089 * 86_400_000_000_000), + None, + ))); + assert_eq!(optimize_test(expr_lt, &schema), expected) +} + +#[test] +fn test_preimage_date_part_timestamp_sec_est_gt_eq() { + let schema = expr_test_schema(); + let expr_lt = expr_fn::date_part(lit("year"), col("ts_sec_est")).gt_eq(lit(2024i32)); + let expected = col("ts_sec_est").gt_eq(lit(ScalarValue::TimestampSecond( + Some(19723 * 86_400), + None, + ))); + assert_eq!(optimize_test(expr_lt, &schema), expected) +} + +#[test] +fn test_preimage_date_part_timestamp_sec_est_lt_eq() { + let schema = expr_test_schema(); + let expr_lt = expr_fn::date_part(lit("year"), col("ts_mic_pt")).lt_eq(lit(2024i32)); + let expected = col("ts_mic_pt").lt(lit(ScalarValue::TimestampMicrosecond( + Some(20089 * 86_400_000_000), + None, + ))); + assert_eq!(optimize_test(expr_lt, &schema), expected) +} + +#[test] +fn test_preimage_date_part_timestamp_nano_lt_swap() { + let schema = expr_test_schema(); + let expr_lt = lit(2024i32).gt(expr_fn::date_part(lit("year"), col("ts_nano_none"))); + let expected = col("ts_nano_none").lt(lit(ScalarValue::TimestampNanosecond( + Some(19723 * 86_400_000_000_000), + None, + ))); + assert_eq!(optimize_test(expr_lt, &schema), expected) +} + +#[test] +fn test_preimage_date_part_date32_is_not_distinct_from() { + let schema = expr_test_schema(); + // date_part(c1, DatePart::Year) is not distinct from 2024 -> c1 >= 2024-01-01 AND c1 < 2025-01-01 (the null handling part is dropped since rhs is not null) + let expr_lt = Expr::BinaryExpr(BinaryExpr { + left: Box::new(expr_fn::date_part(lit("year"), col("date32"))), + op: Operator::IsNotDistinctFrom, + right: Box::new(lit(2024i32)), + }); + let expected = and( + col("date32").gt_eq(lit(ScalarValue::Date32(Some(19723)))), + col("date32").lt(lit(ScalarValue::Date32(Some(20089)))), + ); + assert_eq!(optimize_test(expr_lt, &schema), expected) +} + +#[test] +// Should not simplify - interval can't be calculated +fn test_preimage_date_part_date32_is_not_distinct_from_null() { + let schema = expr_test_schema(); + // date_part(c1, DatePart::Year) is not distinct from Null -> unchanged + let expr_lt = Expr::BinaryExpr(BinaryExpr { + left: Box::new(expr_fn::date_part(lit("year"), col("date32"))), + op: Operator::IsNotDistinctFrom, + right: Box::new(lit(ScalarValue::Null)), + }); + assert_eq!(optimize_test(expr_lt.clone(), &schema), expr_lt) +} + +#[test] +fn test_preimage_date_part_date64_is_distinct_from() { + let schema = expr_test_schema(); + // date_part(c1, DatePart::Year) is distinct from 2024 -> c1 < 2024-01-01 OR c1 >= 2025-01-01 or c1 is NULL + let expr_lt = Expr::BinaryExpr(BinaryExpr { + left: Box::new(expr_fn::date_part(lit("year"), col("date64"))), + op: Operator::IsDistinctFrom, + right: Box::new(lit(2024i32)), + }); + let expected = col("date64") + .lt(lit(ScalarValue::Date64(Some(19723 * 86_400_000)))) + .or(col("date64").gt_eq(lit(ScalarValue::Date64(Some(20089 * 86_400_000))))) + .or(col("date64").is_null()); + assert_eq!(optimize_test(expr_lt, &schema), expected) +} + +#[test] +// Should not simplify - interval can't be calculated +fn test_preimage_date_part_date64_is_distinct_from_null() { + let schema = expr_test_schema(); + // date_part(c1, DatePart::Year) is distinct from 2024 -> c1 < 2024-01-01 OR c1 >= unchanged + let expr_lt = Expr::BinaryExpr(BinaryExpr { + left: Box::new(expr_fn::date_part(lit("year"), col("date64"))), + op: Operator::IsDistinctFrom, + right: Box::new(lit(ScalarValue::Null)), + }); + assert_eq!(optimize_test(expr_lt.clone(), &schema), expr_lt) +} + +#[test] +// Should not simplify +fn test_preimage_date_part_not_year_date32_eq() { + let schema = expr_test_schema(); + // date_part(c1, DatePart::Year) = 2024 -> c1 >= 2024-01-01 AND c1 < 2025-01-01 + let expr_lt = expr_fn::date_part(lit("month"), col("date32")).eq(lit(1i32)); + assert_eq!(optimize_test(expr_lt.clone(), &schema), expr_lt) +} + +fn optimize_test(expr: Expr, schema: &DFSchemaRef) -> Expr { + let props = ExecutionProps::new(); + let simplifier = + ExprSimplifier::new(SimplifyContext::new(&props).with_schema(Arc::clone(schema))); + + simplifier.simplify(expr).unwrap() +} + +fn expr_test_schema() -> DFSchemaRef { + Arc::new( + DFSchema::from_unqualified_fields( + vec![ + Field::new("date32", DataType::Date32, true), + Field::new("date64", DataType::Date64, true), + Field::new("ts_nano_none", timestamp_nano_none_type(), true), + Field::new("ts_nano_utc", timestamp_nano_utc_type(), true), + Field::new("ts_sec_est", timestamp_sec_est_type(), true), + Field::new("ts_mic_pt", timestamp_mic_pt_type(), true), + ] + .into(), + HashMap::new(), + ) + .unwrap(), + ) +} + +fn timestamp_nano_none_type() -> DataType { + DataType::Timestamp(TimeUnit::Nanosecond, None) +} + +// this is the type that now() returns +fn timestamp_nano_utc_type() -> DataType { + let utc = Some("+0:00".into()); + DataType::Timestamp(TimeUnit::Nanosecond, utc) +} + +fn timestamp_sec_est_type() -> DataType { + let est = Some("-5:00".into()); + DataType::Timestamp(TimeUnit::Second, est) +} + +fn timestamp_mic_pt_type() -> DataType { + let pt = Some("-8::00".into()); + DataType::Timestamp(TimeUnit::Microsecond, pt) +} diff --git a/datafusion/expr/src/udf.rs b/datafusion/expr/src/udf.rs index 0654370ac7ebf..34332fbcdbd79 100644 --- a/datafusion/expr/src/udf.rs +++ b/datafusion/expr/src/udf.rs @@ -226,6 +226,25 @@ impl ScalarUDF { self.inner.simplify(args, info) } + /// Return a preimage + /// + /// See [`ScalarUDFImpl::preimage`] for more details. + pub fn preimage( + &self, + args: &[Expr], + lit_expr: &Expr, + info: &dyn SimplifyInfo, + ) -> Result> { + self.inner.preimage(args, lit_expr, info) + } + + /// Return inner column from function args + /// + /// See [`ScalarUDFImpl::column_expr`] + pub fn column_expr(&self, args: &[Expr]) -> Option { + self.inner.column_expr(args) + } + #[deprecated(since = "50.0.0", note = "Use `return_field_from_args` instead.")] pub fn is_nullable(&self, args: &[Expr], schema: &dyn ExprSchema) -> bool { #[expect(deprecated)] @@ -696,6 +715,36 @@ pub trait ScalarUDFImpl: Debug + DynEq + DynHash + Send + Sync { Ok(ExprSimplifyResult::Original(args)) } + /// Returns the [preimage] for this function and the specified scalar value, if any. + /// + /// A preimage is a single contiguous [`Interval`] of values where the function + /// will always return `lit_value` + /// + /// This rewrite is described in the [ClickHouse Paper] and is particularly + /// useful for simplifying expressions `date_part` or equivalent functions. The + /// idea is that if you have an expression like `date_part(YEAR, k) = 2024` and you + /// can find a [preimage] for `date_part(YEAR, k)`, which is the range of dates + /// covering the entire year of 2024. Thus, you can rewrite the expression to `k + /// >= '2024-01-01' AND k < '2025-01-01' which is often more optimizable. + /// + /// This should only return a preimage if the function takes a single argument + /// + /// [ClickHouse Paper]: https://www.vldb.org/pvldb/vol17/p3731-schulze.pdf + /// [preimage]: https://en.wikipedia.org/wiki/Image_(mathematics)#Inverse_image + fn preimage( + &self, + _args: &[Expr], + _lit_expr: &Expr, + _info: &dyn SimplifyInfo, + ) -> Result> { + Ok(None) + } + + // Return the inner column expression from this function + fn column_expr(&self, _args: &[Expr]) -> Option { + None + } + /// Returns true if some of this `exprs` subexpressions may not be evaluated /// and thus any side effects (like divide by zero) may not be encountered. /// @@ -926,6 +975,19 @@ impl ScalarUDFImpl for AliasedScalarUDFImpl { self.inner.simplify(args, info) } + fn preimage( + &self, + args: &[Expr], + lit_expr: &Expr, + info: &dyn SimplifyInfo, + ) -> Result> { + self.inner.preimage(args, lit_expr, info) + } + + fn column_expr(&self, args: &[Expr]) -> Option { + self.inner.column_expr(args) + } + fn conditional_arguments<'a>( &self, args: &'a [Expr], diff --git a/datafusion/functions/src/datetime/date_part.rs b/datafusion/functions/src/datetime/date_part.rs index 375200d07280b..a8af0fb61c2af 100644 --- a/datafusion/functions/src/datetime/date_part.rs +++ b/datafusion/functions/src/datetime/date_part.rs @@ -27,6 +27,10 @@ use arrow::datatypes::DataType::{ }; use arrow::datatypes::TimeUnit::{Microsecond, Millisecond, Nanosecond, Second}; use arrow::datatypes::{DataType, Field, FieldRef, TimeUnit}; +use arrow::temporal_conversions::{ + MICROSECONDS_IN_DAY, MILLISECONDS_IN_DAY, NANOSECONDS_IN_DAY, SECONDS_IN_DAY, +}; +use chrono::{Datelike, NaiveDate}; use datafusion_common::types::{NativeType, logical_date}; use datafusion_common::{ @@ -41,10 +45,12 @@ use datafusion_common::{ types::logical_string, utils::take_function_args, }; +use datafusion_expr::simplify::SimplifyInfo; use datafusion_expr::{ ColumnarValue, Documentation, ReturnFieldArgs, ScalarUDFImpl, Signature, TypeSignature, Volatility, }; +use datafusion_expr::{Expr, interval_arithmetic}; use datafusion_expr_common::signature::{Coercion, TypeSignatureClass}; use datafusion_macros::user_doc; @@ -231,6 +237,67 @@ impl ScalarUDFImpl for DatePartFunc { }) } + // Only casting the year is supported since pruning other IntervalUnit is not possible + // date_part(col, YEAR) = 2024 => col >= '2024-01-01' and col < '2025-01-01' + // But for anything less than YEAR simplifying is not possible without specifying the bigger interval + // date_part(col, MONTH) = 1 => col = '2023-01-01' or col = '2024-01-01' or ... or col = '3000-01-01' + fn preimage( + &self, + args: &[Expr], + lit_expr: &Expr, + info: &dyn SimplifyInfo, + ) -> Result> { + let [part, col_expr] = take_function_args(self.name(), args)?; + + // Get the interval unit from the part argument + let interval_unit = part + .as_literal() + .and_then(|sv| sv.try_as_str().flatten()) + .map(part_normalization) + .and_then(|s| IntervalUnit::from_str(s).ok()); + + // only support extracting year + match interval_unit { + Some(IntervalUnit::Year) => (), + _ => return Ok(None), + } + + // Check if the argument is a literal (e.g. date_part(YEAR, col) = 2024) + let Some(argument_literal) = lit_expr.as_literal() else { + return Ok(None); + }; + + // Extract i32 year from Scalar value + let year = match argument_literal { + ScalarValue::Int32(Some(y)) => *y, + _ => return Ok(None), + }; + + // Can only extract year from Date32/64 and Timestamp column + let target_type = match info.get_data_type(col_expr)? { + Date32 | Date64 | Timestamp(_, _) => &info.get_data_type(col_expr)?, + _ => return Ok(None), + }; + + // Compute the Interval bounds + let start_time = + NaiveDate::from_ymd_opt(year, 1, 1).expect("Expect computed start time"); + let end_time = start_time + .with_year(year + 1) + .expect("Expect computed end time"); + + // Convert to ScalarValues + let lower = date_to_scalar(start_time, target_type) + .expect("Expect preimage interval lower bound"); + let upper = date_to_scalar(end_time, target_type) + .expect("Expect preimage interval upper bound"); + Ok(Some(interval_arithmetic::Interval::try_new(lower, upper)?)) + } + + fn column_expr(&self, args: &[Expr]) -> Option { + Some(args[1].clone()) + } + fn aliases(&self) -> &[String] { &self.aliases } @@ -240,6 +307,35 @@ impl ScalarUDFImpl for DatePartFunc { } } +fn date_to_scalar(date: NaiveDate, target_type: &DataType) -> Option { + let days = date + .signed_duration_since(NaiveDate::from_epoch_days(0)?) + .num_days(); + + Some(match target_type { + Date32 => ScalarValue::Date32(Some(days as i32)), + Date64 => ScalarValue::Date64(Some(days * MILLISECONDS_IN_DAY)), + Timestamp(unit, tz) => match unit { + Second => { + ScalarValue::TimestampSecond(Some(days * SECONDS_IN_DAY), tz.clone()) + } + Millisecond => ScalarValue::TimestampMillisecond( + Some(days * MILLISECONDS_IN_DAY), + tz.clone(), + ), + Microsecond => ScalarValue::TimestampMicrosecond( + Some(days * MICROSECONDS_IN_DAY), + tz.clone(), + ), + Nanosecond => ScalarValue::TimestampNanosecond( + Some(days * NANOSECONDS_IN_DAY), + tz.clone(), + ), + }, + _ => return None, + }) +} + fn is_epoch(part: &str) -> bool { let part = part_normalization(part); matches!(part.to_lowercase().as_str(), "epoch") diff --git a/datafusion/optimizer/src/simplify_expressions/expr_simplifier.rs b/datafusion/optimizer/src/simplify_expressions/expr_simplifier.rs index 55bff5849c5cb..7b5762f2b76b1 100644 --- a/datafusion/optimizer/src/simplify_expressions/expr_simplifier.rs +++ b/datafusion/optimizer/src/simplify_expressions/expr_simplifier.rs @@ -54,6 +54,7 @@ use super::utils::*; use crate::analyzer::type_coercion::TypeCoercionRewriter; use crate::simplify_expressions::SimplifyContext; use crate::simplify_expressions::regex::simplify_regex_expr; +use crate::simplify_expressions::udf_preimage::rewrite_with_preimage; use crate::simplify_expressions::unwrap_cast::{ is_cast_expr_and_support_unwrap_cast_in_comparison_for_binary, is_cast_expr_and_support_unwrap_cast_in_comparison_for_inlist, @@ -61,6 +62,7 @@ use crate::simplify_expressions::unwrap_cast::{ }; use datafusion_expr::expr_rewriter::rewrite_with_guarantees_map; use datafusion_expr_common::casts::try_cast_literal_to_type; +use datafusion_expr_common::interval_arithmetic::Interval; use indexmap::IndexSet; use regex::Regex; @@ -1952,12 +1954,70 @@ impl TreeNodeRewriter for Simplifier<'_> { })) } + // ======================================= + // preimage_in_comparison + // ======================================= + // + // For case: + // date_part(expr as 'YEAR') op literal + // + // Background: + // Datasources such as Parquet can prune partitions using simple predicates, + // but they cannot do so for complex expressions. + // For a complex predicate like `date_part('YEAR', c1) < 2000`, pruning is not possible. + // After rewriting it to `c1 < 2000-01-01`, pruning becomes feasible. + Expr::BinaryExpr(BinaryExpr { left, op, right }) + if get_preimage(&left, &right, info)?.0.is_some() + && get_preimage(&left, &right, info)?.1.is_some() => + { + // todo use let binding (if let Some(interval) = ...) once stabilized to avoid computing this thrice😢 + let (Some(interval), Some(col_expr)) = + get_preimage(left.as_ref(), &right, info)? + else { + unreachable!( + "The above if statement insures interval and col_expr are Some" + ) + }; + rewrite_with_preimage(info, interval, op, Box::new(col_expr))? + } + // literal op date_part(literal, expression) + // --> + // date_part(literal, expression) op_swap literal + Expr::BinaryExpr(BinaryExpr { left, op, right }) + if get_preimage(&right, &left, info)?.0.is_some() + && get_preimage(&right, &left, info)?.1.is_some() + && op.swap().is_some() => + { + let swapped = op.swap().unwrap(); + let (Some(interval), Some(col_expr)) = get_preimage(&right, &left, info)? + else { + unreachable!( + "The above if statement insures interval and col_expr are Some" + ) + }; + rewrite_with_preimage(info, interval, swapped, Box::new(col_expr))? + } + // no additional rewrites possible expr => Transformed::no(expr), }) } } +fn get_preimage( + left_expr: &Expr, + right_expr: &Expr, + info: &dyn SimplifyInfo, +) -> Result<(Option, Option)> { + let Expr::ScalarFunction(ScalarFunction { func, args }) = left_expr else { + return Ok((None, None)); + }; + Ok(( + func.preimage(args, right_expr, info)?, + func.column_expr(args), + )) +} + fn as_string_scalar(expr: &Expr) -> Option<(DataType, &Option)> { match expr { Expr::Literal(ScalarValue::Utf8(s), _) => Some((DataType::Utf8, s)), diff --git a/datafusion/optimizer/src/simplify_expressions/mod.rs b/datafusion/optimizer/src/simplify_expressions/mod.rs index 3ab76119cca84..b85b000821ad8 100644 --- a/datafusion/optimizer/src/simplify_expressions/mod.rs +++ b/datafusion/optimizer/src/simplify_expressions/mod.rs @@ -24,6 +24,7 @@ mod regex; pub mod simplify_exprs; pub mod simplify_literal; mod simplify_predicates; +mod udf_preimage; mod unwrap_cast; mod utils; diff --git a/datafusion/optimizer/src/simplify_expressions/udf_preimage.rs b/datafusion/optimizer/src/simplify_expressions/udf_preimage.rs new file mode 100644 index 0000000000000..980a1ea42e0e3 --- /dev/null +++ b/datafusion/optimizer/src/simplify_expressions/udf_preimage.rs @@ -0,0 +1,112 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +use datafusion_common::{Result, internal_err, tree_node::Transformed}; +use datafusion_expr::{BinaryExpr, Expr, Operator, and, lit, or, simplify::SimplifyInfo}; +use datafusion_expr_common::interval_arithmetic::Interval; + +/// Rewrites a binary expression using its "preimage" +/// +/// Specifically it rewrites expressions of the form ` OP x` (e.g. ` = +/// x`) where `` is known to have a pre-image (aka the entire single +/// range for which it is valid) +/// +/// This rewrite is described in the [ClickHouse Paper] and is particularly +/// useful for simplifying expressions `date_part` or equivalent functions. The +/// idea is that if you have an expression like `date_part(YEAR, k) = 2024` and you +/// can find a [preimage] for `date_part(YEAR, k)`, which is the range of dates +/// covering the entire year of 2024. Thus, you can rewrite the expression to `k +/// >= '2024-01-01' AND k < '2025-01-01' which is often more optimizable. +/// +/// [ClickHouse Paper]: https://www.vldb.org/pvldb/vol17/p3731-schulze.pdf +/// [preimage]: https://en.wikipedia.org/wiki/Image_(mathematics)#Inverse_image +/// +pub(super) fn rewrite_with_preimage( + _info: &dyn SimplifyInfo, + preimage_interval: Interval, + op: Operator, + expr: Box, +) -> Result> { + let (lower, upper) = preimage_interval.into_bounds(); + let (lower, upper) = (lit(lower), lit(upper)); + + let rewritten_expr = match op { + // < x ==> < upper + // >= x ==> >= lower + Operator::Lt | Operator::GtEq => Expr::BinaryExpr(BinaryExpr { + left: expr, + op, + right: Box::new(lower), + }), + // > x ==> >= upper + Operator::Gt => Expr::BinaryExpr(BinaryExpr { + left: expr, + op: Operator::GtEq, + right: Box::new(upper), + }), + // <= x ==> < upper + Operator::LtEq => Expr::BinaryExpr(BinaryExpr { + left: expr, + op: Operator::Lt, + right: Box::new(upper), + }), + // = x ==> ( >= lower) and ( < upper) + // + // is not distinct from x ==> ( is NULL and x is NULL) or (( >= lower) and ( < upper)) + // but since x is always not NULL => ( >= lower) and ( < upper) + Operator::Eq | Operator::IsNotDistinctFrom => and( + Expr::BinaryExpr(BinaryExpr { + left: expr.clone(), + op: Operator::GtEq, + right: Box::new(lower), + }), + Expr::BinaryExpr(BinaryExpr { + left: expr, + op: Operator::Lt, + right: Box::new(upper), + }), + ), + // != x ==> ( < lower) or ( >= upper) + Operator::NotEq => or( + Expr::BinaryExpr(BinaryExpr { + left: expr.clone(), + op: Operator::Lt, + right: Box::new(lower), + }), + Expr::BinaryExpr(BinaryExpr { + left: expr, + op: Operator::GtEq, + right: Box::new(upper), + }), + ), + // is distinct from x ==> ( < lower) or ( >= upper) or ( is NULL and x is not NULL) or ( is not NULL and x is NULL) + // but given that x is always not NULL => ( < lower) or ( >= upper) or ( is NULL) + Operator::IsDistinctFrom => Expr::BinaryExpr(BinaryExpr { + left: expr.clone(), + op: Operator::Lt, + right: Box::new(lower.clone()), + }) + .or(Expr::BinaryExpr(BinaryExpr { + left: expr.clone(), + op: Operator::GtEq, + right: Box::new(upper), + })) + .or(expr.is_null()), + _ => return internal_err!("Expect comparison operators"), + }; + Ok(Transformed::yes(rewritten_expr)) +} diff --git a/datafusion/sqllogictest/test_files/udf_preimage.slt b/datafusion/sqllogictest/test_files/udf_preimage.slt new file mode 100644 index 0000000000000..544f082cc1d83 --- /dev/null +++ b/datafusion/sqllogictest/test_files/udf_preimage.slt @@ -0,0 +1,506 @@ +# Licensed to the Apache Software Foundation (asF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The asF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "as IS" BasIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +############################ +# date_part(year, col) tests +############################ + +statement ok +create table t1(c DATE) as VALUES (NULL), ('1990-01-01'), ('2024-01-01'), ('2030-01-01'); + +# +# Simple optimizations, col on LHS +# +query D +select c from t1 where extract(year from c) = 2024; +---- +2024-01-01 + +query D +select c from t1 where extract(year from c) <> 2024; +---- +1990-01-01 +2030-01-01 + +query D +select c from t1 where extract(year from c) > 2024; +---- +2030-01-01 + +query D +select c from t1 where extract(year from c) < 2024; +---- +1990-01-01 + +query D +select c from t1 where extract(year from c) >= 2024; +---- +2024-01-01 +2030-01-01 + +query D +select c from t1 where extract(year from c) <= 2024; +---- +1990-01-01 +2024-01-01 + +query D +select c from t1 where extract(year from c) is not distinct from 2024 +---- +2024-01-01 + +query D +select c from t1 where extract(year from c) is distinct from 2024 +---- +NULL +1990-01-01 +2030-01-01 + +# +# Check that date_part is not in the explain statements +# +query TT +explain select c from t1 where extract (year from c) = 2024 +---- +logical_plan +01)Filter: t1.c >= Date32("2024-01-01") AND t1.c < Date32("2025-01-01") +02)--TableScan: t1 projection=[c] +physical_plan +01)FilterExec: c@0 >= 2024-01-01 AND c@0 < 2025-01-01 +02)--DataSourceExec: partitions=1, partition_sizes=[1] + +query TT +explain select c from t1 where extract (year from c) <> 2024 +---- +logical_plan +01)Filter: t1.c < Date32("2024-01-01") OR t1.c >= Date32("2025-01-01") +02)--TableScan: t1 projection=[c] +physical_plan +01)FilterExec: c@0 < 2024-01-01 OR c@0 >= 2025-01-01 +02)--DataSourceExec: partitions=1, partition_sizes=[1] + +query TT +explain select c from t1 where extract (year from c) > 2024 +---- +logical_plan +01)Filter: t1.c >= Date32("2025-01-01") +02)--TableScan: t1 projection=[c] +physical_plan +01)FilterExec: c@0 >= 2025-01-01 +02)--DataSourceExec: partitions=1, partition_sizes=[1] + +query TT +explain select c from t1 where extract (year from c) < 2024 +---- +logical_plan +01)Filter: t1.c < Date32("2024-01-01") +02)--TableScan: t1 projection=[c] +physical_plan +01)FilterExec: c@0 < 2024-01-01 +02)--DataSourceExec: partitions=1, partition_sizes=[1] + +query TT +explain select c from t1 where extract (year from c) >= 2024 +---- +logical_plan +01)Filter: t1.c >= Date32("2024-01-01") +02)--TableScan: t1 projection=[c] +physical_plan +01)FilterExec: c@0 >= 2024-01-01 +02)--DataSourceExec: partitions=1, partition_sizes=[1] + +query TT +explain select c from t1 where extract (year from c) <= 2024 +---- +logical_plan +01)Filter: t1.c < Date32("2025-01-01") +02)--TableScan: t1 projection=[c] +physical_plan +01)FilterExec: c@0 < 2025-01-01 +02)--DataSourceExec: partitions=1, partition_sizes=[1] + +query TT +explain select c from t1 where extract (year from c) is not distinct from 2024 +---- +logical_plan +01)Filter: t1.c >= Date32("2024-01-01") AND t1.c < Date32("2025-01-01") +02)--TableScan: t1 projection=[c] +physical_plan +01)FilterExec: c@0 >= 2024-01-01 AND c@0 < 2025-01-01 +02)--DataSourceExec: partitions=1, partition_sizes=[1] + +query TT +explain select c from t1 where extract (year from c) is distinct from 2024 +---- +logical_plan +01)Filter: t1.c < Date32("2024-01-01") OR t1.c >= Date32("2025-01-01") OR t1.c IS NULL +02)--TableScan: t1 projection=[c] +physical_plan +01)FilterExec: c@0 < 2024-01-01 OR c@0 >= 2025-01-01 OR c@0 IS NULL +02)--DataSourceExec: partitions=1, partition_sizes=[1] + +# +# Simple optimizations, column on RHS +# +query D +select c from t1 where 2024 = extract(year from c); +---- +2024-01-01 + +query D +select c from t1 where 2024 <> extract(year from c); +---- +1990-01-01 +2030-01-01 + +query D +select c from t1 where 2024 < extract(year from c); +---- +2030-01-01 + +query D +select c from t1 where 2024 > extract(year from c); +---- +1990-01-01 + +query D +select c from t1 where 2024 <= extract(year from c); +---- +2024-01-01 +2030-01-01 + +query D +select c from t1 where 2024 >= extract(year from c); +---- +1990-01-01 +2024-01-01 + +query D +select c from t1 where 2024 is not distinct from extract(year from c); +---- +2024-01-01 + +query D +select c from t1 where 2024 is distinct from extract(year from c); +---- +NULL +1990-01-01 +2030-01-01 + +# +# Check explain statements for optimizations for other interval types +# +query TT +explain select c from t1 where extract (quarter from c) = 2024 +---- +logical_plan +01)Filter: date_part(Utf8("QUARTER"), t1.c) = Int32(2024) +02)--TableScan: t1 projection=[c] +physical_plan +01)FilterExec: date_part(QUARTER, c@0) = 2024 +02)--DataSourceExec: partitions=1, partition_sizes=[1] + +query TT +explain select c from t1 where extract (month from c) = 2024 +---- +logical_plan +01)Filter: date_part(Utf8("MONTH"), t1.c) = Int32(2024) +02)--TableScan: t1 projection=[c] +physical_plan +01)FilterExec: date_part(MONTH, c@0) = 2024 +02)--DataSourceExec: partitions=1, partition_sizes=[1] + +query TT +explain select c from t1 where extract (week from c) = 2024 +---- +logical_plan +01)Filter: date_part(Utf8("WEEK"), t1.c) = Int32(2024) +02)--TableScan: t1 projection=[c] +physical_plan +01)FilterExec: date_part(WEEK, c@0) = 2024 +02)--DataSourceExec: partitions=1, partition_sizes=[1] + +query TT +explain select c from t1 where extract (day from c) = 2024 +---- +logical_plan +01)Filter: date_part(Utf8("DAY"), t1.c) = Int32(2024) +02)--TableScan: t1 projection=[c] +physical_plan +01)FilterExec: date_part(DAY, c@0) = 2024 +02)--DataSourceExec: partitions=1, partition_sizes=[1] + +query TT +explain select c from t1 where extract (hour from c) = 2024 +---- +logical_plan +01)Filter: date_part(Utf8("HOUR"), t1.c) = Int32(2024) +02)--TableScan: t1 projection=[c] +physical_plan +01)FilterExec: date_part(HOUR, c@0) = 2024 +02)--DataSourceExec: partitions=1, partition_sizes=[1] + +query TT +explain select c from t1 where extract (minute from c) = 2024 +---- +logical_plan +01)Filter: date_part(Utf8("MINUTE"), t1.c) = Int32(2024) +02)--TableScan: t1 projection=[c] +physical_plan +01)FilterExec: date_part(MINUTE, c@0) = 2024 +02)--DataSourceExec: partitions=1, partition_sizes=[1] + +query TT +explain select c from t1 where extract (second from c) = 2024 +---- +logical_plan +01)Filter: date_part(Utf8("SECOND"), t1.c) = Int32(2024) +02)--TableScan: t1 projection=[c] +physical_plan +01)FilterExec: date_part(SECOND, c@0) = 2024 +02)--DataSourceExec: partitions=1, partition_sizes=[1] + +query TT +explain select c from t1 where extract (millisecond from c) = 2024 +---- +logical_plan +01)Filter: date_part(Utf8("MILLISECOND"), t1.c) = Int32(2024) +02)--TableScan: t1 projection=[c] +physical_plan +01)FilterExec: date_part(MILLISECOND, c@0) = 2024 +02)--DataSourceExec: partitions=1, partition_sizes=[1] + +query TT +explain select c from t1 where extract (microsecond from c) = 2024 +---- +logical_plan +01)Filter: date_part(Utf8("MICROSECOND"), t1.c) = Int32(2024) +02)--TableScan: t1 projection=[c] +physical_plan +01)FilterExec: date_part(MICROSECOND, c@0) = 2024 +02)--DataSourceExec: partitions=1, partition_sizes=[1] + +query TT +explain select c from t1 where extract (nanosecond from c) = 2024 +---- +logical_plan +01)Filter: date_part(Utf8("NANOSECOND"), t1.c) = Int32(2024) +02)--TableScan: t1 projection=[c] +physical_plan +01)FilterExec: date_part(NANOSECOND, c@0) = 2024 +02)--DataSourceExec: partitions=1, partition_sizes=[1] + +query TT +explain select c from t1 where extract (dow from c) = 2024 +---- +logical_plan +01)Filter: date_part(Utf8("DOW"), t1.c) = Int32(2024) +02)--TableScan: t1 projection=[c] +physical_plan +01)FilterExec: date_part(DOW, c@0) = 2024 +02)--DataSourceExec: partitions=1, partition_sizes=[1] + +query TT +explain select c from t1 where extract (doy from c) = 2024 +---- +logical_plan +01)Filter: date_part(Utf8("DOY"), t1.c) = Int32(2024) +02)--TableScan: t1 projection=[c] +physical_plan +01)FilterExec: date_part(DOY, c@0) = 2024 +02)--DataSourceExec: partitions=1, partition_sizes=[1] + +query TT +explain select c from t1 where extract (epoch from c) = 2024 +---- +logical_plan +01)Filter: date_part(Utf8("EPOCH"), t1.c) = Float64(2024) +02)--TableScan: t1 projection=[c] +physical_plan +01)FilterExec: date_part(EPOCH, c@0) = 2024 +02)--DataSourceExec: partitions=1, partition_sizes=[1] + +query TT +explain select c from t1 where extract (isodow from c) = 2024 +---- +logical_plan +01)Filter: date_part(Utf8("ISODOW"), t1.c) = Int32(2024) +02)--TableScan: t1 projection=[c] +physical_plan +01)FilterExec: date_part(ISODOW, c@0) = 2024 +02)--DataSourceExec: partitions=1, partition_sizes=[1] + +# +# Simple optimize different datatypes +# +statement ok +create table t2( + c1_date32 DATE, + c2_ts_sec timestamp, + c3_ts_mili timestamp, + c4_ts_micro timestamp, + c5_ts_nano timestamp +) as VALUES + (NULL, + NULL, + NULL, + NULL, + NULL), + ('1990-05-20', + '1990-05-20T00:00:10'::timestamp, + '1990-05-20T00:00:10.987'::timestamp, + '1990-05-20T00:00:10.987654'::timestamp, + '1990-05-20T00:00:10.987654321'::timestamp), + ('2024-01-01', + '2024-01-01T00:00:00'::timestamp, + '2024-01-01T00:00:00.123'::timestamp, + '2024-01-01T00:00:00.123456'::timestamp, + '2024-01-01T00:00:00.123456789'::timestamp), + ('2030-12-31', + '2030-12-31T23:59:59'::timestamp, + '2030-12-31T23:59:59.001'::timestamp, + '2030-12-31T23:59:59.001234'::timestamp, + '2030-12-31T23:59:59.001234567'::timestamp) +; + +query D +select c1_date32 from t2 where extract(year from c1_date32) = 2024; +---- +2024-01-01 + +query D +select c1_date32 from t2 where extract(year from c1_date32) <> 2024; +---- +1990-05-20 +2030-12-31 + +query P +select c2_ts_sec from t2 where extract(year from c2_ts_sec) > 2024; +---- +2030-12-31T23:59:59 + +query P +select c3_ts_mili from t2 where extract(year from c3_ts_mili) < 2024; +---- +1990-05-20T00:00:10.987 + +query P +select c4_ts_micro from t2 where extract(year from c4_ts_micro) >= 2024; +---- +2024-01-01T00:00:00.123456 +2030-12-31T23:59:59.001234 + +query P +select c5_ts_nano from t2 where extract(year from c5_ts_nano) <= 2024; +---- +1990-05-20T00:00:10.987654321 +2024-01-01T00:00:00.123456789 + +query D +select c1_date32 from t2 where extract(year from c1_date32) is not distinct from 2024 +---- +2024-01-01 + +query D +select c1_date32 from t2 where extract(year from c1_date32) is distinct from 2024 +---- +NULL +1990-05-20 +2030-12-31 + +# +# Check that date_part is not in the explain statements for other datatypes +# +query TT +explain select c1_date32 from t2 where extract (year from c1_date32) = 2024 +---- +logical_plan +01)Filter: t2.c1_date32 >= Date32("2024-01-01") AND t2.c1_date32 < Date32("2025-01-01") +02)--TableScan: t2 projection=[c1_date32] +physical_plan +01)FilterExec: c1_date32@0 >= 2024-01-01 AND c1_date32@0 < 2025-01-01 +02)--DataSourceExec: partitions=1, partition_sizes=[1] + +query TT +explain select c1_date32 from t2 where extract (year from c1_date32) <> 2024 +---- +logical_plan +01)Filter: t2.c1_date32 < Date32("2024-01-01") OR t2.c1_date32 >= Date32("2025-01-01") +02)--TableScan: t2 projection=[c1_date32] +physical_plan +01)FilterExec: c1_date32@0 < 2024-01-01 OR c1_date32@0 >= 2025-01-01 +02)--DataSourceExec: partitions=1, partition_sizes=[1] + +query TT +explain select c2_ts_sec from t2 where extract (year from c2_ts_sec) > 2024 +---- +logical_plan +01)Filter: t2.c2_ts_sec >= TimestampNanosecond(1735689600000000000, None) +02)--TableScan: t2 projection=[c2_ts_sec] +physical_plan +01)FilterExec: c2_ts_sec@0 >= 1735689600000000000 +02)--DataSourceExec: partitions=1, partition_sizes=[1] + +query TT +explain select c3_ts_mili from t2 where extract (year from c3_ts_mili) < 2024 +---- +logical_plan +01)Filter: t2.c3_ts_mili < TimestampNanosecond(1704067200000000000, None) +02)--TableScan: t2 projection=[c3_ts_mili] +physical_plan +01)FilterExec: c3_ts_mili@0 < 1704067200000000000 +02)--DataSourceExec: partitions=1, partition_sizes=[1] + +query TT +explain select c4_ts_micro from t2 where extract (year from c4_ts_micro) >= 2024 +---- +logical_plan +01)Filter: t2.c4_ts_micro >= TimestampNanosecond(1704067200000000000, None) +02)--TableScan: t2 projection=[c4_ts_micro] +physical_plan +01)FilterExec: c4_ts_micro@0 >= 1704067200000000000 +02)--DataSourceExec: partitions=1, partition_sizes=[1] + +query TT +explain select c5_ts_nano from t2 where extract (year from c5_ts_nano) <= 2024 +---- +logical_plan +01)Filter: t2.c5_ts_nano < TimestampNanosecond(1735689600000000000, None) +02)--TableScan: t2 projection=[c5_ts_nano] +physical_plan +01)FilterExec: c5_ts_nano@0 < 1735689600000000000 +02)--DataSourceExec: partitions=1, partition_sizes=[1] + +query TT +explain select c1_date32 from t2 where extract (year from c1_date32) is not distinct from 2024 +---- +logical_plan +01)Filter: t2.c1_date32 >= Date32("2024-01-01") AND t2.c1_date32 < Date32("2025-01-01") +02)--TableScan: t2 projection=[c1_date32] +physical_plan +01)FilterExec: c1_date32@0 >= 2024-01-01 AND c1_date32@0 < 2025-01-01 +02)--DataSourceExec: partitions=1, partition_sizes=[1] + +query TT +explain select c1_date32 from t2 where extract (year from c1_date32) is distinct from 2024 +---- +logical_plan +01)Filter: t2.c1_date32 < Date32("2024-01-01") OR t2.c1_date32 >= Date32("2025-01-01") OR t2.c1_date32 IS NULL +02)--TableScan: t2 projection=[c1_date32] +physical_plan +01)FilterExec: c1_date32@0 < 2024-01-01 OR c1_date32@0 >= 2025-01-01 OR c1_date32@0 IS NULL +02)--DataSourceExec: partitions=1, partition_sizes=[1] \ No newline at end of file