Skip to content

Commit

Permalink
Clean up API, make examples easier
Browse files Browse the repository at this point in the history
  • Loading branch information
alamb committed May 1, 2024
1 parent 0948517 commit 86548e3
Show file tree
Hide file tree
Showing 6 changed files with 110 additions and 62 deletions.
20 changes: 11 additions & 9 deletions datafusion-examples/examples/expr_api.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,9 +25,7 @@ use datafusion::arrow::datatypes::{DataType, Field, Schema, TimeUnit};
use datafusion::common::DFSchema;
use datafusion::error::Result;
use datafusion::optimizer::simplify_expressions::ExprSimplifier;
use datafusion::physical_expr::{
analyze, create_physical_expr, AnalysisContext, ExprBoundaries, PhysicalExpr,
};
use datafusion::physical_expr::{analyze, AnalysisContext, ExprBoundaries};
use datafusion::prelude::*;
use datafusion_common::{ScalarValue, ToDFSchema};
use datafusion_expr::execution_props::ExecutionProps;
Expand Down Expand Up @@ -92,8 +90,8 @@ fn evaluate_demo() -> Result<()> {
let expr = col("a").lt(lit(5)).or(col("a").eq(lit(8)));

// First, you make a "physical expression" from the logical `Expr`
let ctx = SessionContext::new();
let physical_expr = physical_expr(&batch.schema(), expr)?;
let df_schema = DFSchema::try_from(batch.schema())?;
let physical_expr = SessionContext::new().create_physical_expr(&df_schema, expr)?;

// Now, you can evaluate the expression against the RecordBatch
let result = physical_expr.evaluate(&batch)?;
Expand Down Expand Up @@ -214,7 +212,7 @@ fn range_analysis_demo() -> Result<()> {
// `date < '2020-10-01' AND date > '2020-09-01'`

// As always, we need to tell DataFusion the type of column "date"
let schema = Schema::new(vec![make_field("date", DataType::Date32)]);
let schema = Arc::new(Schema::new(vec![make_field("date", DataType::Date32)]));

// You can provide DataFusion any known boundaries on the values of `date`
// (for example, maybe you know you only have data up to `2020-09-15`), but
Expand All @@ -223,9 +221,13 @@ fn range_analysis_demo() -> Result<()> {
let boundaries = ExprBoundaries::try_new_unbounded(&schema)?;

// Now, we invoke the analysis code to perform the range analysis
let physical_expr = physical_expr(&schema, expr)?;
let analysis_result =
analyze(&physical_expr, AnalysisContext::new(boundaries), &schema)?;
let df_schema = DFSchema::try_from(schema)?;
let physical_expr = SessionContext::new().create_physical_expr(&df_schema, expr)?;
let analysis_result = analyze(
&physical_expr,
AnalysisContext::new(boundaries),
df_schema.as_ref(),
)?;

// The results of the analysis is an range, encoded as an `Interval`, for
// each column in the schema, that must be true in order for the predicate
Expand Down
20 changes: 20 additions & 0 deletions datafusion/common/src/dfschema.rs
Original file line number Diff line number Diff line change
Expand Up @@ -125,6 +125,20 @@ impl DFSchema {
}
}

/// Return a reference to the inner Arrow [`Schema`]
///
/// Note this does not have the qualifier information
pub fn as_arrow(&self) -> &Schema {
self.inner.as_ref()
}

/// Return a reference to the inner Arrow [`SchemaRef`]
///
/// Note this does not have the qualifier information
pub fn inner(&self) -> &SchemaRef {
&self.inner
}

/// Create a `DFSchema` from an Arrow schema where all the fields have a given qualifier
pub fn new_with_metadata(
qualified_fields: Vec<(Option<TableReference>, Arc<Field>)>,
Expand Down Expand Up @@ -806,6 +820,12 @@ impl From<&DFSchema> for Schema {
}
}

impl AsRef<Schema> for DFSchema {
fn as_ref(&self) -> &Schema {
self.as_arrow()
}
}

/// Create a `DFSchema` from an Arrow schema
impl TryFrom<Schema> for DFSchema {
type Error = DataFusionError;
Expand Down
60 changes: 45 additions & 15 deletions datafusion/core/src/execution/context/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -70,13 +70,13 @@ use datafusion_common::{
config::{ConfigExtension, TableOptions},
exec_err, not_impl_err, plan_datafusion_err, plan_err,
tree_node::{TreeNodeRecursion, TreeNodeVisitor},
DFSchemaRef, SchemaReference, TableReference,
DFSchema, SchemaReference, TableReference,
};
use datafusion_execution::registry::SerializerRegistry;
use datafusion_expr::{
logical_plan::{DdlStatement, Statement},
var_provider::is_system_variables,
Expr, StringifiedPlan, UserDefinedLogicalNode, WindowUDF,
Expr, ExprSchemable, StringifiedPlan, UserDefinedLogicalNode, WindowUDF,
};
use datafusion_sql::{
parser::{CopyToSource, CopyToStatement, DFParser},
Expand All @@ -96,7 +96,7 @@ pub use datafusion_execution::config::SessionConfig;
pub use datafusion_execution::TaskContext;
pub use datafusion_expr::execution_props::ExecutionProps;
use datafusion_expr::expr_rewriter::FunctionRewrite;
use datafusion_expr::simplify::SimplifyContext;
use datafusion_expr::simplify::SimplifyInfo;
use datafusion_optimizer::simplify_expressions::ExprSimplifier;
use datafusion_physical_expr::create_physical_expr;

Expand Down Expand Up @@ -520,10 +520,10 @@ impl SessionContext {
/// examples.
pub fn create_physical_expr(
&self,
schema: impl Into<DFSchemaRef>,
df_schema: &DFSchema,
expr: Expr,
) -> Result<Arc<dyn PhysicalExpr>> {
self.state.read().create_physical_expr(schema, expr)
self.state.read().create_physical_expr(df_schema, expr)
}

// return an empty dataframe
Expand Down Expand Up @@ -1966,7 +1966,9 @@ impl SessionState {
}

/// Creates a [`PhysicalExpr`] from an [`Expr`] after applying type
/// coercion, simplifications, and function rewrites.
/// coercion, and function rewrites.
///
/// Note that no simplification (TODO link) is applied.
///
/// TODO links to coercsion, simplificiation, and rewrites
///
Expand All @@ -1976,21 +1978,20 @@ impl SessionState {
/// ```
pub fn create_physical_expr(
&self,
schema: impl Into<DFSchemaRef>,
// todo make this schema
df_schema: &DFSchema,
expr: Expr,
) -> Result<Arc<dyn PhysicalExpr>> {
let df_schema = schema.into();

// Simplify
let props = ExecutionProps::new();
let simplifier = ExprSimplifier::new(
SimplifyContext::new(&props).with_schema(df_schema.clone()),
);
let simplifier =
ExprSimplifier::new(SessionSimpifyProvider::new(self, df_schema));

// apply type coercion here to ensure types match
let expr = simplifier.coerce(expr, df_schema.clone())?;
let expr = simplifier.coerce(expr, df_schema)?;
// TODO should we also simplify the expression?
// simplifier.simplify()

create_physical_expr(&expr, df_schema.as_ref(), &props)
create_physical_expr(&expr, df_schema, self.execution_props())
}

/// Return the session ID
Expand Down Expand Up @@ -2070,6 +2071,35 @@ impl SessionState {
}
}

struct SessionSimpifyProvider<'a> {
state: &'a SessionState,
df_schema: &'a DFSchema,
}

impl<'a> SessionSimpifyProvider<'a> {
fn new(state: &'a SessionState, df_schema: &'a DFSchema) -> Self {
Self { state, df_schema }
}
}

impl<'a> SimplifyInfo for SessionSimpifyProvider<'a> {
fn is_boolean_type(&self, expr: &Expr) -> Result<bool> {
Ok(expr.get_type(self.df_schema)? == DataType::Boolean)
}

fn nullable(&self, expr: &Expr) -> Result<bool> {
expr.nullable(self.df_schema)
}

fn execution_props(&self) -> &ExecutionProps {
self.state.execution_props()
}

fn get_data_type(&self, expr: &Expr) -> Result<DataType> {
expr.get_type(self.df_schema)
}
}

struct SessionContextProvider<'a> {
state: &'a SessionState,
tables: HashMap<String, Arc<dyn TableSource>>,
Expand Down
2 changes: 1 addition & 1 deletion datafusion/core/src/test_util/parquet.rs
Original file line number Diff line number Diff line change
Expand Up @@ -168,7 +168,7 @@ impl TestParquetFile {
let parquet_options = ctx.copied_table_options().parquet;
if let Some(filter) = maybe_filter {
let simplifier = ExprSimplifier::new(context);
let filter = simplifier.coerce(filter, df_schema.clone()).unwrap();
let filter = simplifier.coerce(filter, &df_schema).unwrap();
let physical_filter_expr =
create_physical_expr(&filter, &df_schema, &ExecutionProps::default())?;
let parquet_exec = Arc::new(ParquetExec::new(
Expand Down
Loading

0 comments on commit 86548e3

Please sign in to comment.