mz_adapter/optimize/
view.rs

1// Copyright Materialize, Inc. and contributors. All rights reserved.
2//
3// Use of this software is governed by the Business Source License
4// included in the LICENSE file.
5//
6// As of the Change Date specified in that file, in accordance with
7// the Business Source License, use of this software will be governed
8// by the Apache License, Version 2.0.
9
10//! An Optimizer that
11//! 1. Optimistically calls `optimize_mir_constant`.
12//! 2. Then, if we haven't arrived at a constant, it does real optimization:
13//!    - calls `prep_relation_expr` an `ExprPrepStyle` was given.
14//!    - calls `optimize_mir_local`, i.e., the logical optimizer.
15//!
16//! This is used for `CREATE VIEW` statements and in various other situations where no physical
17//! optimization is needed, such as for `INSERT` statements.
18//!
19//! TODO: We should split this into an optimizer that is just for views, and another optimizer
20//! for various other ad hoc things, such as `INSERT`, `COPY FROM`, etc.
21
22use std::time::Instant;
23
24use mz_expr::OptimizedMirRelationExpr;
25use mz_sql::optimizer_metrics::OptimizerMetrics;
26use mz_sql::plan::HirRelationExpr;
27use mz_transform::TransformCtx;
28use mz_transform::dataflow::DataflowMetainfo;
29use mz_transform::reprtypecheck::{
30    SharedContext as ReprTypecheckContext, empty_context as empty_repr_context,
31};
32
33use crate::optimize::dataflows::{ExprPrepStyle, prep_relation_expr};
34use crate::optimize::{
35    Optimize, OptimizerConfig, OptimizerError, optimize_mir_constant, optimize_mir_local,
36    trace_plan,
37};
38
39pub struct Optimizer<'a> {
40    /// A representation typechecking context to use throughout the optimizer pipeline.
41    repr_typecheck_ctx: ReprTypecheckContext,
42    /// Optimizer config.
43    config: OptimizerConfig,
44    /// Optimizer metrics.
45    ///
46    /// Allowed to be `None` for cases where view optimization is invoked outside of the
47    /// coordinator context and the metrics are not available.
48    metrics: Option<OptimizerMetrics>,
49    /// If present, the optimizer will call `prep_relation_expr` using the given `ExprPrepStyle`.
50    expr_prep_style: Option<ExprPrepStyle<'a>>,
51    /// Whether to call `FoldConstants` with a size limit, or try to fold constants of any size.
52    fold_constants_limit: bool,
53}
54
55impl<'a> Optimizer<'a> {
56    pub fn new(config: OptimizerConfig, metrics: Option<OptimizerMetrics>) -> Self {
57        Self {
58            repr_typecheck_ctx: empty_repr_context(),
59            config,
60            metrics,
61            expr_prep_style: None,
62            fold_constants_limit: true,
63        }
64    }
65
66    /// Creates an optimizer instance that also calls `prep_relation_expr` with the given
67    /// `ExprPrepStyle`, so that unmaterializable functions are resolved.
68    /// Additionally, this instance calls constant folding without a size limit.
69    pub fn new_with_prep_no_limit(
70        config: OptimizerConfig,
71        metrics: Option<OptimizerMetrics>,
72        expr_prep_style: ExprPrepStyle<'a>,
73    ) -> Optimizer<'a> {
74        Self {
75            repr_typecheck_ctx: empty_repr_context(),
76            config,
77            metrics,
78            expr_prep_style: Some(expr_prep_style),
79            fold_constants_limit: false,
80        }
81    }
82}
83
84impl Optimize<HirRelationExpr> for Optimizer<'_> {
85    type To = OptimizedMirRelationExpr;
86
87    fn optimize(&mut self, expr: HirRelationExpr) -> Result<Self::To, OptimizerError> {
88        let time = Instant::now();
89
90        // Trace the pipeline input under `optimize/raw`.
91        trace_plan!(at: "raw", &expr);
92
93        // HIR ⇒ MIR lowering and decorrelation
94        let mut expr = expr.lower(&self.config, self.metrics.as_ref())?;
95
96        let mut df_meta = DataflowMetainfo::default();
97        let mut transform_ctx = TransformCtx::local(
98            &self.config.features,
99            &self.repr_typecheck_ctx,
100            &mut df_meta,
101            self.metrics.as_mut(),
102            None,
103        );
104
105        // First, we run a very simple optimizer pipeline, which only folds constants. This takes
106        // care of constant INSERTs. (This optimizer is also used for INSERTs, not just VIEWs.)
107        expr = optimize_mir_constant(expr, &mut transform_ctx, self.fold_constants_limit)?;
108
109        // MIR ⇒ MIR optimization (local)
110        let expr = if expr.as_const().is_some() {
111            // No need to optimize further, because we already have a constant.
112            // But trace this at "local", so that `EXPLAIN LOCALLY OPTIMIZED PLAN` can pick it up.
113            trace_plan!(at: "local", &expr);
114            OptimizedMirRelationExpr(expr)
115        } else {
116            // Do the real optimization (starting with `prep_relation_expr` if needed).
117            if let Some(expr_prep_style) = &self.expr_prep_style {
118                let mut opt_expr = OptimizedMirRelationExpr(expr);
119                prep_relation_expr(&mut opt_expr, expr_prep_style.clone())?;
120                expr = opt_expr.into_inner();
121            }
122            optimize_mir_local(expr, &mut transform_ctx)?
123        };
124
125        if let Some(metrics) = &self.metrics {
126            metrics.observe_e2e_optimization_time("view", time.elapsed());
127        }
128
129        // Return the resulting OptimizedMirRelationExpr.
130        Ok(expr)
131    }
132}