mz_adapter/optimize/
view.rs

1// Copyright Materialize, Inc. and contributors. All rights reserved.
2//
3// Use of this software is governed by the Business Source License
4// included in the LICENSE file.
5//
6// As of the Change Date specified in that file, in accordance with
7// the Business Source License, use of this software will be governed
8// by the Apache License, Version 2.0.
9
10//! An Optimizer that
11//! 1. Optimistically calls `optimize_mir_constant`.
12//! 2. Then, if we haven't arrived at a constant, it does real optimization:
13//!    - calls `prep_relation_expr` an `ExprPrepStyle` was given.
14//!    - calls `optimize_mir_local`, i.e., the logical optimizer.
15//!
16//! This is used for `CREATE VIEW` statements and in various other situations where no physical
17//! optimization is needed, such as for `INSERT` statements.
18//!
19//! TODO: We should split this into an optimizer that is just for views, and another optimizer
20//! for various other ad hoc things, such as `INSERT`, `COPY FROM`, etc.
21
22use std::time::Instant;
23
24use mz_expr::OptimizedMirRelationExpr;
25use mz_sql::optimizer_metrics::OptimizerMetrics;
26use mz_sql::plan::HirRelationExpr;
27use mz_transform::TransformCtx;
28use mz_transform::dataflow::DataflowMetainfo;
29use mz_transform::typecheck::{SharedContext as TypecheckContext, empty_context};
30
31use crate::optimize::dataflows::{ExprPrepStyle, prep_relation_expr};
32use crate::optimize::{
33    Optimize, OptimizerConfig, OptimizerError, optimize_mir_constant, optimize_mir_local,
34    trace_plan,
35};
36
37pub struct Optimizer<'a> {
38    /// A typechecking context to use throughout the optimizer pipeline.
39    typecheck_ctx: TypecheckContext,
40    /// Optimizer config.
41    config: OptimizerConfig,
42    /// Optimizer metrics.
43    ///
44    /// Allowed to be `None` for cases where view optimization is invoked outside of the
45    /// coordinator context and the metrics are not available.
46    metrics: Option<OptimizerMetrics>,
47    /// If present, the optimizer will call `prep_relation_expr` using the given `ExprPrepStyle`.
48    expr_prep_style: Option<ExprPrepStyle<'a>>,
49    /// Whether to call `FoldConstants` with a size limit, or try to fold constants of any size.
50    fold_constants_limit: bool,
51}
52
53impl<'a> Optimizer<'a> {
54    pub fn new(config: OptimizerConfig, metrics: Option<OptimizerMetrics>) -> Self {
55        Self {
56            typecheck_ctx: empty_context(),
57            config,
58            metrics,
59            expr_prep_style: None,
60            fold_constants_limit: true,
61        }
62    }
63
64    /// Creates an optimizer instance that also calls `prep_relation_expr` with the given
65    /// `ExprPrepStyle`, so that unmaterializable functions are resolved.
66    /// Additionally, this instance calls constant folding without a size limit.
67    pub fn new_with_prep_no_limit(
68        config: OptimizerConfig,
69        metrics: Option<OptimizerMetrics>,
70        expr_prep_style: ExprPrepStyle<'a>,
71    ) -> Optimizer<'a> {
72        Self {
73            typecheck_ctx: empty_context(),
74            config,
75            metrics,
76            expr_prep_style: Some(expr_prep_style),
77            fold_constants_limit: false,
78        }
79    }
80}
81
82impl Optimize<HirRelationExpr> for Optimizer<'_> {
83    type To = OptimizedMirRelationExpr;
84
85    fn optimize(&mut self, expr: HirRelationExpr) -> Result<Self::To, OptimizerError> {
86        let time = Instant::now();
87
88        // Trace the pipeline input under `optimize/raw`.
89        trace_plan!(at: "raw", &expr);
90
91        // HIR ⇒ MIR lowering and decorrelation
92        let mut expr = expr.lower(&self.config, self.metrics.as_ref())?;
93
94        let mut df_meta = DataflowMetainfo::default();
95        let mut transform_ctx = TransformCtx::local(
96            &self.config.features,
97            &self.typecheck_ctx,
98            &mut df_meta,
99            self.metrics.as_ref(),
100            None,
101        );
102
103        // First, we run a very simple optimizer pipeline, which only folds constants. This takes
104        // care of constant INSERTs. (This optimizer is also used for INSERTs, not just VIEWs.)
105        expr = optimize_mir_constant(expr, &mut transform_ctx, self.fold_constants_limit)?;
106
107        // MIR ⇒ MIR optimization (local)
108        let expr = if expr.as_const().is_some() {
109            // No need to optimize further, because we already have a constant.
110            // But trace this at "local", so that `EXPLAIN LOCALLY OPTIMIZED PLAN` can pick it up.
111            trace_plan!(at: "local", &expr);
112            OptimizedMirRelationExpr(expr)
113        } else {
114            // Do the real optimization (starting with `prep_relation_expr` if needed).
115            if let Some(expr_prep_style) = &self.expr_prep_style {
116                let mut opt_expr = OptimizedMirRelationExpr(expr);
117                prep_relation_expr(&mut opt_expr, expr_prep_style.clone())?;
118                expr = opt_expr.into_inner();
119            }
120            optimize_mir_local(expr, &mut transform_ctx)?
121        };
122
123        if let Some(metrics) = &self.metrics {
124            metrics.observe_e2e_optimization_time("view", time.elapsed());
125        }
126
127        // Return the resulting OptimizedMirRelationExpr.
128        Ok(expr)
129    }
130}