mz_expr/linear.rs
1// Copyright Materialize, Inc. and contributors. All rights reserved.
2//
3// Use of this software is governed by the Business Source License
4// included in the LICENSE file.
5//
6// As of the Change Date specified in that file, in accordance with
7// the Business Source License, use of this software will be governed
8// by the Apache License, Version 2.0.
9use std::collections::{BTreeMap, BTreeSet};
10use std::fmt::Display;
11
12use mz_repr::{Datum, Row};
13use serde::{Deserialize, Serialize};
14
15use crate::visit::Visit;
16use crate::{MirRelationExpr, MirScalarExpr};
17
18/// A compound operator that can be applied row-by-row.
19///
20/// This operator integrates the map, filter, and project operators.
21/// It applies a sequences of map expressions, which are allowed to
22/// refer to previous expressions, interleaved with predicates which
23/// must be satisfied for an output to be produced. If all predicates
24/// evaluate to `Datum::True` the data at the identified columns are
25/// collected and produced as output in a packed `Row`.
26///
27/// This operator is a "builder" and its contents may contain expressions
28/// that are not yet executable. For example, it may contain temporal
29/// expressions in `self.expressions`, even though this is not something
30/// we can directly evaluate. The plan creation methods will defensively
31/// ensure that the right thing happens.
32#[derive(
33 Clone,
34 Debug,
35 Eq,
36 PartialEq,
37 Serialize,
38 Deserialize,
39 Hash,
40 Ord,
41 PartialOrd
42)]
43pub struct MapFilterProject {
44 /// A sequence of expressions that should be appended to the row.
45 ///
46 /// Many of these expressions may not be produced in the output,
47 /// and may only be present as common subexpressions.
48 pub expressions: Vec<MirScalarExpr>,
49 /// Expressions that must evaluate to `Datum::True` for the output
50 /// row to be produced.
51 ///
52 /// Each entry is prepended with a column identifier indicating
53 /// the column *before* which the predicate should first be applied.
54 /// Most commonly this would be one plus the largest column identifier
55 /// in the predicate's support, but it could be larger to implement
56 /// guarded evaluation of predicates.
57 ///
58 /// This list should be sorted by the first field.
59 pub predicates: Vec<(usize, MirScalarExpr)>,
60 /// A sequence of column identifiers whose data form the output row.
61 pub projection: Vec<usize>,
62 /// The expected number of input columns.
63 ///
64 /// This is needed to ensure correct identification of newly formed
65 /// columns in the output.
66 pub input_arity: usize,
67}
68
69impl Display for MapFilterProject {
70 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
71 writeln!(f, "MapFilterProject(")?;
72 writeln!(f, " expressions:")?;
73 self.expressions
74 .iter()
75 .enumerate()
76 .try_for_each(|(i, e)| writeln!(f, " #{} <- {},", i + self.input_arity, e))?;
77 writeln!(f, " predicates:")?;
78 self.predicates
79 .iter()
80 .try_for_each(|(before, p)| writeln!(f, " <before: {}> {},", before, p))?;
81 writeln!(f, " projection: {:?}", self.projection)?;
82 writeln!(f, " input_arity: {}", self.input_arity)?;
83 writeln!(f, ")")
84 }
85}
86
87impl MapFilterProject {
88 /// Create a no-op operator for an input of a supplied arity.
89 pub fn new(input_arity: usize) -> Self {
90 Self {
91 expressions: Vec::new(),
92 predicates: Vec::new(),
93 projection: (0..input_arity).collect(),
94 input_arity,
95 }
96 }
97
98 /// Given two mfps, return an mfp that applies one
99 /// followed by the other.
100 /// Note that the arguments are in the opposite order
101 /// from how function composition is usually written in mathematics.
102 pub fn compose(before: Self, after: Self) -> Self {
103 let (m, f, p) = after.into_map_filter_project();
104 before.map(m).filter(f).project(p)
105 }
106
107 /// True if the operator describes the identity transformation.
108 pub fn is_identity(&self) -> bool {
109 self.expressions.is_empty()
110 && self.predicates.is_empty()
111 && self.projection.len() == self.input_arity
112 && self.projection.iter().enumerate().all(|(i, p)| i == *p)
113 }
114
115 /// Retain only the indicated columns in the presented order.
116 pub fn project<I>(mut self, columns: I) -> Self
117 where
118 I: IntoIterator<Item = usize> + std::fmt::Debug,
119 {
120 self.projection = columns.into_iter().map(|c| self.projection[c]).collect();
121 self
122 }
123
124 /// Retain only rows satisfying these predicates.
125 ///
126 /// This method introduces predicates as eagerly as they can be evaluated,
127 /// which may not be desired for predicates that may cause exceptions.
128 /// If fine manipulation is required, the predicates can be added manually.
129 pub fn filter<I>(mut self, predicates: I) -> Self
130 where
131 I: IntoIterator<Item = MirScalarExpr>,
132 {
133 for mut predicate in predicates {
134 // Correct column references.
135 predicate.permute(&self.projection[..]);
136
137 // Validate column references.
138 assert!(
139 predicate
140 .support()
141 .into_iter()
142 .all(|c| c < self.input_arity + self.expressions.len())
143 );
144
145 // Insert predicate as eagerly as it can be evaluated:
146 // just after the largest column in its support is formed.
147 let max_support = predicate
148 .support()
149 .into_iter()
150 .max()
151 .map(|c| c + 1)
152 .unwrap_or(0);
153 self.predicates.push((max_support, predicate))
154 }
155 // Stable sort predicates by position at which they take effect.
156 // We put literal errors at the end as a stop-gap to avoid erroring
157 // before we are able to evaluate any predicates that might prevent it.
158 self.predicates
159 .sort_by_key(|(position, predicate)| (predicate.is_literal_err(), *position));
160 self
161 }
162
163 /// Append the result of evaluating expressions to each row.
164 pub fn map<I>(mut self, expressions: I) -> Self
165 where
166 I: IntoIterator<Item = MirScalarExpr>,
167 {
168 for mut expression in expressions {
169 // Correct column references.
170 expression.permute(&self.projection[..]);
171
172 // Validate column references.
173 assert!(
174 expression
175 .support()
176 .into_iter()
177 .all(|c| c < self.input_arity + self.expressions.len())
178 );
179
180 // Introduce expression and produce as output.
181 self.expressions.push(expression);
182 self.projection
183 .push(self.input_arity + self.expressions.len() - 1);
184 }
185
186 self
187 }
188
189 /// Like [`MapFilterProject::as_map_filter_project`], but consumes `self` rather than cloning.
190 pub fn into_map_filter_project(self) -> (Vec<MirScalarExpr>, Vec<MirScalarExpr>, Vec<usize>) {
191 let predicates = self
192 .predicates
193 .into_iter()
194 .map(|(_pos, predicate)| predicate)
195 .collect();
196 (self.expressions, predicates, self.projection)
197 }
198
199 /// As the arguments to `Map`, `Filter`, and `Project` operators.
200 ///
201 /// In principle, this operator can be implemented as a sequence of
202 /// more elemental operators, likely less efficiently.
203 pub fn as_map_filter_project(&self) -> (Vec<MirScalarExpr>, Vec<MirScalarExpr>, Vec<usize>) {
204 self.clone().into_map_filter_project()
205 }
206
207 /// Determines if a scalar expression must be equal to a literal datum.
208 pub fn literal_constraint(&self, expr: &MirScalarExpr) -> Option<Datum<'_>> {
209 for (_pos, predicate) in self.predicates.iter() {
210 if let MirScalarExpr::CallBinary {
211 func: crate::BinaryFunc::Eq(_),
212 expr1,
213 expr2,
214 } = predicate
215 {
216 if let Some(Ok(datum1)) = expr1.as_literal() {
217 if &**expr2 == expr {
218 return Some(datum1);
219 }
220 }
221 if let Some(Ok(datum2)) = expr2.as_literal() {
222 if &**expr1 == expr {
223 return Some(datum2);
224 }
225 }
226 }
227 }
228 None
229 }
230
231 /// Determines if a sequence of scalar expressions must be equal to a literal row.
232 ///
233 /// This method returns `None` on an empty `exprs`, which might be surprising, but
234 /// seems to line up with its callers' expectations of that being a non-constraint.
235 /// The caller knows if `exprs` is empty, and can modify their behavior appropriately.
236 /// if they would rather have a literal empty row.
237 pub fn literal_constraints(&self, exprs: &[MirScalarExpr]) -> Option<Row> {
238 if exprs.is_empty() {
239 return None;
240 }
241 let mut row = Row::default();
242 let mut packer = row.packer();
243 for expr in exprs {
244 if let Some(literal) = self.literal_constraint(expr) {
245 packer.push(literal);
246 } else {
247 return None;
248 }
249 }
250 Some(row)
251 }
252
253 /// Extracts any MapFilterProject at the root of the expression.
254 ///
255 /// The expression will be modified to extract any maps, filters, and
256 /// projections, which will be returned as `Self`. If there are no maps,
257 /// filters, or projections the method will return an identity operator.
258 ///
259 /// The extracted expressions may contain temporal predicates, and one
260 /// should be careful to apply them blindly.
261 pub fn extract_from_expression(expr: &MirRelationExpr) -> (Self, &MirRelationExpr) {
262 // TODO: This could become iterative rather than recursive if
263 // we were able to fuse MFP operators from below, rather than
264 // from above.
265 match expr {
266 MirRelationExpr::Map { input, scalars } => {
267 let (mfp, expr) = Self::extract_from_expression(input);
268 (mfp.map(scalars.iter().cloned()), expr)
269 }
270 MirRelationExpr::Filter { input, predicates } => {
271 let (mfp, expr) = Self::extract_from_expression(input);
272 (mfp.filter(predicates.iter().cloned()), expr)
273 }
274 MirRelationExpr::Project { input, outputs } => {
275 let (mfp, expr) = Self::extract_from_expression(input);
276 (mfp.project(outputs.iter().cloned()), expr)
277 }
278 // TODO: The recursion is quadratic in the number of Map/Filter/Project operators due to
279 // this call to `arity()`.
280 x => (Self::new(x.arity()), x),
281 }
282 }
283
284 /// Extracts an error-free MapFilterProject at the root of the expression.
285 ///
286 /// The expression will be modified to extract maps, filters, and projects
287 /// from the root of the expression, which will be returned as `Self`. The
288 /// extraction will halt if a Map or Filter containing a literal error is
289 /// reached. Otherwise, the method will return an identity operator.
290 ///
291 /// This method is meant to be used during optimization, where it is
292 /// necessary to avoid moving around maps and filters with errors.
293 pub fn extract_non_errors_from_expr(expr: &MirRelationExpr) -> (Self, &MirRelationExpr) {
294 match expr {
295 MirRelationExpr::Map { input, scalars }
296 if scalars.iter().all(|s| !s.is_literal_err()) =>
297 {
298 let (mfp, expr) = Self::extract_non_errors_from_expr(input);
299 (mfp.map(scalars.iter().cloned()), expr)
300 }
301 MirRelationExpr::Filter { input, predicates }
302 if predicates.iter().all(|p| !p.is_literal_err()) =>
303 {
304 let (mfp, expr) = Self::extract_non_errors_from_expr(input);
305 (mfp.filter(predicates.iter().cloned()), expr)
306 }
307 MirRelationExpr::Project { input, outputs } => {
308 let (mfp, expr) = Self::extract_non_errors_from_expr(input);
309 (mfp.project(outputs.iter().cloned()), expr)
310 }
311 x => (Self::new(x.arity()), x),
312 }
313 }
314
315 /// Extracts an error-free MapFilterProject at the root of the expression.
316 ///
317 /// Differs from [MapFilterProject::extract_non_errors_from_expr] by taking and returning a
318 /// mutable reference.
319 pub fn extract_non_errors_from_expr_ref_mut(
320 expr: &mut MirRelationExpr,
321 ) -> (Self, &mut MirRelationExpr) {
322 // This is essentially the same code as `extract_non_errors_from_expr`, except the seemingly
323 // superfluous outer if, which works around a borrow-checker issue:
324 // https://github.com/rust-lang/rust/issues/54663
325 if matches!(
326 expr,
327 MirRelationExpr::Map { input: _, scalars }
328 if scalars.iter().all(|s| !s.is_literal_err())
329 ) || matches!(
330 expr,
331 MirRelationExpr::Filter { input: _, predicates }
332 if predicates.iter().all(|p| !p.is_literal_err())
333 ) || matches!(expr, MirRelationExpr::Project { .. })
334 {
335 match expr {
336 MirRelationExpr::Map { input, scalars }
337 if scalars.iter().all(|s| !s.is_literal_err()) =>
338 {
339 let (mfp, expr) = Self::extract_non_errors_from_expr_ref_mut(input);
340 (mfp.map(scalars.iter().cloned()), expr)
341 }
342 MirRelationExpr::Filter { input, predicates }
343 if predicates.iter().all(|p| !p.is_literal_err()) =>
344 {
345 let (mfp, expr) = Self::extract_non_errors_from_expr_ref_mut(input);
346 (mfp.filter(predicates.iter().cloned()), expr)
347 }
348 MirRelationExpr::Project { input, outputs } => {
349 let (mfp, expr) = Self::extract_non_errors_from_expr_ref_mut(input);
350 (mfp.project(outputs.iter().cloned()), expr)
351 }
352 _ => unreachable!(),
353 }
354 } else {
355 (Self::new(expr.arity()), expr)
356 }
357 }
358
359 /// Removes an error-free MapFilterProject from the root of the expression.
360 ///
361 /// The expression will be modified to extract maps, filters, and projects
362 /// from the root of the expression, which will be returned as `Self`. The
363 /// extraction will halt if a Map or Filter containing a literal error is
364 /// reached. Otherwise, the method will return an
365 /// identity operator, and the expression will remain unchanged.
366 ///
367 /// This method is meant to be used during optimization, where it is
368 /// necessary to avoid moving around maps and filters with errors.
369 pub fn extract_non_errors_from_expr_mut(expr: &mut MirRelationExpr) -> Self {
370 match expr {
371 MirRelationExpr::Map { input, scalars }
372 if scalars.iter().all(|s| !s.is_literal_err()) =>
373 {
374 let mfp =
375 Self::extract_non_errors_from_expr_mut(input).map(scalars.iter().cloned());
376 *expr = input.take_dangerous();
377 mfp
378 }
379 MirRelationExpr::Filter { input, predicates }
380 if predicates.iter().all(|p| !p.is_literal_err()) =>
381 {
382 let mfp = Self::extract_non_errors_from_expr_mut(input)
383 .filter(predicates.iter().cloned());
384 *expr = input.take_dangerous();
385 mfp
386 }
387 MirRelationExpr::Project { input, outputs } => {
388 let mfp =
389 Self::extract_non_errors_from_expr_mut(input).project(outputs.iter().cloned());
390 *expr = input.take_dangerous();
391 mfp
392 }
393 x => Self::new(x.arity()),
394 }
395 }
396
397 /// Extracts temporal predicates into their own `Self`.
398 ///
399 /// Expressions that are used by the temporal predicates are exposed by `self.projection`,
400 /// though there could be justification for extracting them as well if they are otherwise
401 /// unused.
402 ///
403 /// This separation is valuable when the execution cannot be fused into one operator.
404 pub fn extract_temporal(&mut self) -> Self {
405 // Optimize the expression, as it is only post-optimization that we can be certain
406 // that temporal expressions are restricted to filters. We could relax this in the
407 // future to be only `inline_expressions` and `remove_undemanded`, but optimization
408 // seems to be the best fit at the moment.
409 self.optimize();
410
411 // Assert that we no longer have temporal expressions to evaluate. This should only
412 // occur if the optimization above results with temporal expressions yielded in the
413 // output, which is out of spec for how the type is meant to be used.
414 assert!(!self.expressions.iter().any(|e| e.contains_temporal()));
415
416 // Extract temporal predicates from `self.predicates`.
417 let mut temporal_predicates = Vec::new();
418 self.predicates.retain(|(_position, predicate)| {
419 if predicate.contains_temporal() {
420 temporal_predicates.push(predicate.clone());
421 false
422 } else {
423 true
424 }
425 });
426
427 // Determine extended input columns used by temporal filters.
428 let mut support = BTreeSet::new();
429 for predicate in temporal_predicates.iter() {
430 support.extend(predicate.support());
431 }
432
433 // Discover the locations of these columns after `self.projection`.
434 let old_projection_len = self.projection.len();
435 let mut new_location = BTreeMap::new();
436 for original in support.iter() {
437 if let Some(position) = self.projection.iter().position(|x| x == original) {
438 new_location.insert(*original, position);
439 } else {
440 new_location.insert(*original, self.projection.len());
441 self.projection.push(*original);
442 }
443 }
444 // Permute references in extracted predicates to their new locations.
445 for predicate in temporal_predicates.iter_mut() {
446 predicate.permute_map(&new_location);
447 }
448
449 // Form a new `Self` containing the temporal predicates to return.
450 Self::new(self.projection.len())
451 .filter(temporal_predicates)
452 .project(0..old_projection_len)
453 }
454
455 /// Extracts common expressions from multiple `Self` into a result `Self`.
456 ///
457 /// The argument `mfps` are mutated so that each are functionaly equivalent to their
458 /// corresponding input, when composed atop the resulting `Self`.
459 ///
460 /// The `extract_exprs` argument is temporary, as we roll out the `extract_common_mfp_expressions` flag.
461 pub fn extract_common(mfps: &mut [&mut Self]) -> Self {
462 match mfps.len() {
463 0 => {
464 panic!("Cannot call method on empty arguments");
465 }
466 1 => {
467 let output_arity = mfps[0].projection.len();
468 std::mem::replace(mfps[0], MapFilterProject::new(output_arity))
469 }
470 _ => {
471 // More generally, we convert each mfp to ANF, at which point we can
472 // repeatedly extract atomic expressions that depend only on input
473 // columns, migrate them to an input mfp, and repeat until no such
474 // expressions exist. At this point, we can also migrate predicates
475 // and then determine and push down projections.
476
477 // Prepare a return `Self`.
478 let mut result_mfp = MapFilterProject::new(mfps[0].input_arity);
479
480 // We convert each mfp to ANF, using `memoize_expressions`.
481 for mfp in mfps.iter_mut() {
482 mfp.memoize_expressions();
483 }
484
485 // We repeatedly extract common expressions, until none remain.
486 let mut done = false;
487 while !done {
488 // We use references to determine common expressions, and must
489 // introduce a scope here to drop the borrows before mutation.
490 let common = {
491 // The input arity may increase as we iterate, so recapture.
492 let input_arity = result_mfp.projection.len();
493 let mut prev: BTreeSet<_> = mfps[0]
494 .expressions
495 .iter()
496 .filter(|e| e.support().iter().max() < Some(&input_arity))
497 .collect();
498 let mut next = BTreeSet::default();
499 for mfp in mfps[1..].iter() {
500 for expr in mfp.expressions.iter() {
501 if prev.contains(expr) {
502 next.insert(expr);
503 }
504 }
505 std::mem::swap(&mut prev, &mut next);
506 next.clear();
507 }
508 prev.into_iter().cloned().collect::<Vec<_>>()
509 };
510 // Without new common expressions, we should terminate the loop.
511 done = common.is_empty();
512
513 // Migrate each expression in `common` to `result_mfp`.
514 for expr in common.into_iter() {
515 // Update each mfp by removing expr and updating column references.
516 for mfp in mfps.iter_mut() {
517 // With `expr` next in `result_mfp`, it is as if we are rotating it to
518 // be the first expression in `mfp`, and then removing it from `mfp` and
519 // increasing the input arity of `mfp`.
520 let arity = result_mfp.projection.len();
521 let found = mfp.expressions.iter().position(|e| e == &expr).unwrap();
522 let index = arity + found;
523 // Column references change due to the rotation from `index` to `arity`.
524 let action = |c: &mut usize| {
525 if arity <= *c && *c < index {
526 *c += 1;
527 } else if *c == index {
528 *c = arity;
529 }
530 };
531 // Rotate `expr` from `found` to first, and then snip.
532 // Short circuit by simply removing and incrementing the input arity.
533 mfp.input_arity += 1;
534 mfp.expressions.remove(found);
535 // Update column references in expressions, predicates, and projections.
536 for e in mfp.expressions.iter_mut() {
537 e.visit_columns(action);
538 }
539 for (o, e) in mfp.predicates.iter_mut() {
540 e.visit_columns(action);
541 // Max out the offset for the predicate; optimization will correct.
542 *o = mfp.input_arity + mfp.expressions.len();
543 }
544 for c in mfp.projection.iter_mut() {
545 action(c);
546 }
547 }
548 // Install the expression and update
549 result_mfp.expressions.push(expr);
550 result_mfp.projection.push(result_mfp.projection.len());
551 }
552 }
553 // As before, but easier: predicates in common to all mfps.
554 let common_preds: Vec<MirScalarExpr> = {
555 let input_arity = result_mfp.projection.len();
556 let mut prev: BTreeSet<_> = mfps[0]
557 .predicates
558 .iter()
559 .map(|(_, e)| e)
560 .filter(|e| e.support().iter().max() < Some(&input_arity))
561 .collect();
562 let mut next = BTreeSet::default();
563 for mfp in mfps[1..].iter() {
564 for (_, expr) in mfp.predicates.iter() {
565 if prev.contains(expr) {
566 next.insert(expr);
567 }
568 }
569 std::mem::swap(&mut prev, &mut next);
570 next.clear();
571 }
572 // Expressions in common, that we will append to `result_mfp.expressions`.
573 prev.into_iter().cloned().collect::<Vec<_>>()
574 };
575 for mfp in mfps.iter_mut() {
576 mfp.predicates.retain(|(_, p)| !common_preds.contains(p));
577 mfp.optimize();
578 }
579 result_mfp.predicates.extend(
580 common_preds
581 .into_iter()
582 .map(|e| (result_mfp.projection.len(), e)),
583 );
584
585 // Then, look for unused columns and project them away.
586 let mut common_demand = BTreeSet::new();
587 for mfp in mfps.iter() {
588 common_demand.extend(mfp.demand());
589 }
590 // columns in `common_demand` must be retained, but others
591 // may be discarded.
592 let common_demand = (0..result_mfp.projection.len())
593 .filter(|x| common_demand.contains(x))
594 .collect::<Vec<_>>();
595 let remap = common_demand
596 .iter()
597 .cloned()
598 .enumerate()
599 .map(|(new, old)| (old, new))
600 .collect::<BTreeMap<_, _>>();
601 for mfp in mfps.iter_mut() {
602 mfp.permute_fn(|c| remap[&c], common_demand.len());
603 }
604 result_mfp = result_mfp.project(common_demand);
605
606 // Return the resulting MFP.
607 result_mfp.optimize();
608 result_mfp
609 }
610 }
611 }
612
613 /// Returns `self`, and leaves behind an identity operator that acts on its output.
614 pub fn take(&mut self) -> Self {
615 let mut identity = Self::new(self.projection.len());
616 std::mem::swap(self, &mut identity);
617 identity
618 }
619
620 /// Convert the `MapFilterProject` into a staged evaluation plan.
621 ///
622 /// The main behavior is extract temporal predicates, which cannot be evaluated
623 /// using the standard machinery.
624 pub fn into_plan(self) -> Result<plan::MfpPlan, String> {
625 plan::MfpPlan::create_from(self)
626 }
627}
628
629impl MapFilterProject {
630 /// Partitions `self` into two instances, one of which can be eagerly applied.
631 ///
632 /// The `available` argument indicates which input columns are available (keys)
633 /// and in which positions (values). This information may allow some maps and
634 /// filters to execute. The `input_arity` argument reports the total number of
635 /// input columns (which may include some not present in `available`)
636 ///
637 /// This method partitions `self` in two parts, `(before, after)`, where `before`
638 /// can be applied on columns present as keys in `available`, and `after` must
639 /// await the introduction of the other input columns.
640 ///
641 /// The `before` instance will *append* any columns that can be determined from
642 /// `available` but will project away any of these columns that are not needed by
643 /// `after`. Importantly, this means that `before` will leave intact *all* input
644 /// columns including those not referenced in `available`.
645 ///
646 /// The `after` instance will presume all input columns are available, followed
647 /// by the appended columns of the `before` instance. It may be that some input
648 /// columns can be projected away in `before` if `after` does not need them, but
649 /// we leave that as something the caller can apply if needed (it is otherwise
650 /// complicated to negotiate which input columns `before` should retain).
651 ///
652 /// To correctly reconstruct `self` from `before` and `after`, one must introduce
653 /// additional input columns, permute all input columns to their locations as
654 /// expected by `self`, follow this by new columns appended by `before`, and
655 /// remove all other columns that may be present.
656 ///
657 /// # Example
658 ///
659 /// ```rust
660 /// use mz_expr::{BinaryFunc, MapFilterProject, MirScalarExpr, func};
661 ///
662 /// // imagine an action on columns (a, b, c, d).
663 /// let original = MapFilterProject::new(4).map(vec![
664 /// MirScalarExpr::column(0).call_binary(MirScalarExpr::column(1), func::AddInt64),
665 /// MirScalarExpr::column(2).call_binary(MirScalarExpr::column(4), func::AddInt64),
666 /// MirScalarExpr::column(3).call_binary(MirScalarExpr::column(5), func::AddInt64),
667 /// ]).project(vec![6]);
668 ///
669 /// // Imagine we start with columns (b, x, a, y, c).
670 /// //
671 /// // The `partition` method requires a map from *expected* input columns to *actual*
672 /// // input columns. In the example above, the columns a, b, and c exist, and are at
673 /// // locations 2, 0, and 4 respectively. We must construct a map to this effect.
674 /// let mut available_columns = std::collections::BTreeMap::new();
675 /// available_columns.insert(0, 2);
676 /// available_columns.insert(1, 0);
677 /// available_columns.insert(2, 4);
678 /// // Partition `original` using the available columns and current input arity.
679 /// // This informs `partition` which columns are available, where they can be found,
680 /// // and how many columns are not relevant but should be preserved.
681 /// let (before, after) = original.partition(available_columns, 5);
682 ///
683 /// // `before` sees all five input columns, and should append `a + b + c`.
684 /// assert_eq!(before, MapFilterProject::new(5).map(vec![
685 /// MirScalarExpr::column(2).call_binary(MirScalarExpr::column(0), func::AddInt64),
686 /// MirScalarExpr::column(4).call_binary(MirScalarExpr::column(5), func::AddInt64),
687 /// ]).project(vec![0, 1, 2, 3, 4, 6]));
688 ///
689 /// // `after` expects to see `(a, b, c, d, a + b + c)`.
690 /// assert_eq!(after, MapFilterProject::new(5).map(vec![
691 /// MirScalarExpr::column(3).call_binary(MirScalarExpr::column(4), func::AddInt64)
692 /// ]).project(vec![5]));
693 ///
694 /// // To reconstruct `self`, we must introduce the columns that are not present,
695 /// // and present them in the order intended by `self`. In this example, we must
696 /// // introduce column d and permute the columns so that they begin (a, b, c, d).
697 /// // The columns x and y must be projected away, and any columns introduced by
698 /// // `begin` must be retained in their current order.
699 ///
700 /// // The `after` instance expects to be provided with all inputs, but it
701 /// // may not need all inputs. The `demand()` and `permute()` methods can
702 /// // optimize the representation.
703 /// ```
704 pub fn partition(self, available: BTreeMap<usize, usize>, input_arity: usize) -> (Self, Self) {
705 // Map expressions, filter predicates, and projections for `before` and `after`.
706 let mut before_expr = Vec::new();
707 let mut before_pred = Vec::new();
708 let mut before_proj = Vec::new();
709 let mut after_expr = Vec::new();
710 let mut after_pred = Vec::new();
711 let mut after_proj = Vec::new();
712
713 // Track which output columns must be preserved in the output of `before`.
714 let mut demanded = BTreeSet::new();
715 demanded.extend(0..self.input_arity);
716 demanded.extend(self.projection.iter());
717
718 // Determine which map expressions can be computed from the available subset.
719 // Some expressions may depend on other expressions, but by evaluating them
720 // in forward order we should accurately determine the available expressions.
721 let mut available_expr = vec![false; self.input_arity];
722 // Initialize available columns from `available`, which is then not used again.
723 for index in available.keys() {
724 available_expr[*index] = true;
725 }
726 for expr in self.expressions.into_iter() {
727 // We treat an expression as available if its supporting columns are available,
728 // and if it is not a literal (we want to avoid pushing down literals). This
729 // choice is ad-hoc, but the intent is that we partition the operators so
730 // that we can reduce the row representation size and total computation.
731 // Pushing down literals harms the former and does nothing for the latter.
732 // In the future, we'll want to have a harder think about this trade-off, as
733 // we are certainly making sub-optimal decisions by pushing down all available
734 // work.
735 // TODO(mcsherry): establish better principles about what work to push down.
736 let is_available =
737 expr.support().into_iter().all(|i| available_expr[i]) && !expr.is_literal();
738 if is_available {
739 before_expr.push(expr);
740 } else {
741 demanded.extend(expr.support());
742 after_expr.push(expr);
743 }
744 available_expr.push(is_available);
745 }
746
747 // Determine which predicates can be computed from the available subset.
748 for (_when, pred) in self.predicates.into_iter() {
749 let is_available = pred.support().into_iter().all(|i| available_expr[i]);
750 if is_available {
751 before_pred.push(pred);
752 } else {
753 demanded.extend(pred.support());
754 after_pred.push(pred);
755 }
756 }
757
758 // Map from prior output location to location in un-projected `before`.
759 // This map is used to correct references in `before` but it should be
760 // adjusted to reflect `before`s projection prior to use in `after`.
761 let mut before_map = available;
762 // Input columns include any additional undescribed columns that may
763 // not be captured by the `available` argument, so we must independently
764 // track the current number of columns (vs relying on `before_map.len()`).
765 let mut input_columns = input_arity;
766 for index in self.input_arity..available_expr.len() {
767 if available_expr[index] {
768 before_map.insert(index, input_columns);
769 input_columns += 1;
770 }
771 }
772
773 // Permute the column references in `before` expressions and predicates.
774 for expr in before_expr.iter_mut() {
775 expr.permute_map(&before_map);
776 }
777 for pred in before_pred.iter_mut() {
778 pred.permute_map(&before_map);
779 }
780
781 // Demand information determines `before`s output projection.
782 // Specifically, we produce all input columns in the output, as well as
783 // any columns that are available and demanded.
784 before_proj.extend(0..input_arity);
785 for index in self.input_arity..available_expr.len() {
786 // If an intermediate result is both available and demanded,
787 // we should produce it as output.
788 if available_expr[index] && demanded.contains(&index) {
789 // Use the new location of `index`.
790 before_proj.push(before_map[&index]);
791 }
792 }
793
794 // Map from prior output locations to location in post-`before` columns.
795 // This map is used to correct references in `after`.
796 // The presumption is that `after` will be presented with all input columns,
797 // followed by the output columns introduced by `before` in order.
798 let mut after_map = BTreeMap::new();
799 for index in 0..self.input_arity {
800 after_map.insert(index, index);
801 }
802 for index in self.input_arity..available_expr.len() {
803 // If an intermediate result is both available and demanded,
804 // it was produced as output.
805 if available_expr[index] && demanded.contains(&index) {
806 // We expect to find the output as far after `self.input_arity` as
807 // it was produced after `input_arity` in the output of `before`.
808 let location = self.input_arity
809 + (before_proj
810 .iter()
811 .position(|x| x == &before_map[&index])
812 .unwrap()
813 - input_arity);
814 after_map.insert(index, location);
815 }
816 }
817 // We must now re-map the remaining non-demanded expressions, which are
818 // contiguous rather than potentially interspersed.
819 for index in self.input_arity..available_expr.len() {
820 if !available_expr[index] {
821 after_map.insert(index, after_map.len());
822 }
823 }
824
825 // Permute the column references in `after` expressions and predicates.
826 for expr in after_expr.iter_mut() {
827 expr.permute_map(&after_map);
828 }
829 for pred in after_pred.iter_mut() {
830 pred.permute_map(&after_map);
831 }
832 // Populate `after` projection with the new locations of `self.projection`.
833 for index in self.projection {
834 after_proj.push(after_map[&index]);
835 }
836
837 // Form and return the before and after MapFilterProject instances.
838 let before = Self::new(input_arity)
839 .map(before_expr)
840 .filter(before_pred)
841 .project(before_proj.clone());
842 let after = Self::new(self.input_arity + (before_proj.len() - input_arity))
843 .map(after_expr)
844 .filter(after_pred)
845 .project(after_proj);
846 (before, after)
847 }
848
849 /// Lists input columns whose values are used in outputs.
850 ///
851 /// It is entirely appropriate to determine the demand of an instance
852 /// and then both apply a projection to the subject of the instance and
853 /// `self.permute` this instance.
854 pub fn demand(&self) -> BTreeSet<usize> {
855 let mut demanded = BTreeSet::new();
856 for (_index, pred) in self.predicates.iter() {
857 demanded.extend(pred.support());
858 }
859 demanded.extend(self.projection.iter().cloned());
860 for index in (0..self.expressions.len()).rev() {
861 if demanded.contains(&(self.input_arity + index)) {
862 demanded.extend(self.expressions[index].support());
863 }
864 }
865 demanded.retain(|col| col < &self.input_arity);
866 demanded
867 }
868
869 /// Update input column references, due to an input projection or permutation.
870 ///
871 /// The `shuffle` argument remaps expected column identifiers to new locations,
872 /// with the expectation that `shuffle` describes all input columns, and so the
873 /// intermediate results will be able to start at position `shuffle.len()`.
874 ///
875 /// The supplied `shuffle` might not list columns that are not "demanded" by the
876 /// instance, and so we should ensure that `self` is optimized to not reference
877 /// columns that are not demanded.
878 pub fn permute_fn<F>(&mut self, remap: F, new_input_arity: usize)
879 where
880 F: Fn(usize) -> usize,
881 {
882 let (mut map, mut filter, mut project) = self.as_map_filter_project();
883 let map_len = map.len();
884 let action = |col: &mut usize| {
885 if self.input_arity <= *col && *col < self.input_arity + map_len {
886 *col = new_input_arity + (*col - self.input_arity);
887 } else {
888 *col = remap(*col);
889 }
890 };
891 for expr in map.iter_mut() {
892 expr.visit_columns(action);
893 }
894 for pred in filter.iter_mut() {
895 pred.visit_columns(action);
896 }
897 for proj in project.iter_mut() {
898 action(proj);
899 assert!(*proj < new_input_arity + map.len());
900 }
901 *self = Self::new(new_input_arity)
902 .map(map)
903 .filter(filter)
904 .project(project)
905 }
906}
907
908// Optimization routines.
909impl MapFilterProject {
910 /// Optimize the internal expression evaluation order.
911 ///
912 /// This method performs several optimizations that are meant to streamline
913 /// the execution of the `MapFilterProject` instance, but not to alter its
914 /// semantics. This includes extracting expressions that are used multiple
915 /// times, inlining those that are not, and removing expressions that are
916 /// unreferenced.
917 ///
918 /// This method will inline all temporal expressions, and remove any columns
919 /// that are not demanded by the output, which should transform any temporal
920 /// filters to a state where the temporal expressions exist only in the list
921 /// of predicates.
922 ///
923 /// # Example
924 ///
925 /// This example demonstrates how the re-use of one expression, converting
926 /// column 1 from a string to an integer, can be extracted and the results
927 /// shared among the two uses. This example is used for each of the steps
928 /// along the optimization path.
929 ///
930 /// ```rust
931 /// use mz_expr::{func, MapFilterProject, MirScalarExpr, UnaryFunc, BinaryFunc};
932 /// // Demonstrate extraction of common expressions (here: parsing strings).
933 /// let mut map_filter_project = MapFilterProject::new(5)
934 /// .map(vec![
935 /// MirScalarExpr::column(0).call_unary(func::CastStringToInt64).call_binary(MirScalarExpr::column(1).call_unary(func::CastStringToInt64), func::AddInt64),
936 /// MirScalarExpr::column(1).call_unary(func::CastStringToInt64).call_binary(MirScalarExpr::column(2).call_unary(func::CastStringToInt64), func::AddInt64),
937 /// ])
938 /// .project(vec![3,4,5,6]);
939 ///
940 /// let mut expected_optimized = MapFilterProject::new(5)
941 /// .map(vec![
942 /// MirScalarExpr::column(1).call_unary(func::CastStringToInt64),
943 /// MirScalarExpr::column(0).call_unary(func::CastStringToInt64).call_binary(MirScalarExpr::column(5), func::AddInt64),
944 /// MirScalarExpr::column(5).call_binary(MirScalarExpr::column(2).call_unary(func::CastStringToInt64), func::AddInt64),
945 /// ])
946 /// .project(vec![3,4,6,7]);
947 ///
948 /// // Optimize the expression.
949 /// map_filter_project.optimize();
950 ///
951 /// assert_eq!(
952 /// map_filter_project,
953 /// expected_optimized,
954 /// );
955 /// ```
956 pub fn optimize(&mut self) {
957 // Track sizes and iterate as long as they decrease.
958 let mut prev_size = None;
959 let mut self_size = usize::max_value();
960 // Continue as long as strict improvements occur.
961 while prev_size.map(|p| self_size < p).unwrap_or(true) {
962 // Lock in current size.
963 prev_size = Some(self_size);
964
965 // We have an annoying pattern of mapping literals that already exist as columns (by filters).
966 // Try to identify this pattern, of a map that introduces an expression equated to a prior column,
967 // and then replace the mapped expression by a column reference.
968 //
969 // We think this is due to `LiteralLifting`, and we might investigate removing the introduciton in
970 // the first place. The tell-tale that we see when we fix is a diff that look likes
971 //
972 // - Project (#0, #2)
973 // - Filter (#1 = 1)
974 // - Map (1)
975 // - Get l0
976 // + Filter (#1 = 1)
977 // + Get l0
978 //
979 for (index, expr) in self.expressions.iter_mut().enumerate() {
980 // If `expr` matches a filter equating it to a column < index + input_arity, rewrite it
981 for (_, predicate) in self.predicates.iter() {
982 if let MirScalarExpr::CallBinary {
983 func: crate::BinaryFunc::Eq(_),
984 expr1,
985 expr2,
986 } = predicate
987 {
988 if let MirScalarExpr::Column(c, name) = &**expr1 {
989 if *c < index + self.input_arity && &**expr2 == expr {
990 *expr = MirScalarExpr::Column(*c, name.clone());
991 }
992 }
993 if let MirScalarExpr::Column(c, name) = &**expr2 {
994 if *c < index + self.input_arity && &**expr1 == expr {
995 *expr = MirScalarExpr::Column(*c, name.clone());
996 }
997 }
998 }
999 }
1000 }
1001
1002 // Optimization memoizes individual `ScalarExpr` expressions that
1003 // are sure to be evaluated, canonicalizes references to the first
1004 // occurrence of each, inlines expressions that have a reference
1005 // count of one, and then removes any expressions that are not
1006 // referenced.
1007 self.memoize_expressions();
1008 self.predicates.sort();
1009 self.predicates.dedup();
1010 self.inline_expressions();
1011 self.remove_undemanded();
1012
1013 // Re-build `self` from parts to restore evaluation order invariants.
1014 let (map, filter, project) = self.as_map_filter_project();
1015 *self = Self::new(self.input_arity)
1016 .map(map)
1017 .filter(filter)
1018 .project(project);
1019
1020 self_size = self.size();
1021 }
1022 }
1023
1024 /// Total expression sizes across all expressions.
1025 pub fn size(&self) -> usize {
1026 self.expressions.iter().map(|e| e.size()).sum::<usize>()
1027 + self.predicates.iter().map(|(_, e)| e.size()).sum::<usize>()
1028 }
1029
1030 /// Place each certainly evaluated expression in its own column.
1031 ///
1032 /// This method places each non-trivial, certainly evaluated expression
1033 /// in its own column, and deduplicates them so that all references to
1034 /// the same expression reference the same column.
1035 ///
1036 /// This transformation is restricted to expressions we are certain will
1037 /// be evaluated, which does not include expressions in `if` statements.
1038 ///
1039 /// # Example
1040 ///
1041 /// This example demonstrates how memoization notices `MirScalarExpr`s
1042 /// that are used multiple times, and ensures that each are extracted
1043 /// into columns and then referenced by column. This pass does not try
1044 /// to minimize the occurrences of column references, which will happen
1045 /// in inlining.
1046 ///
1047 /// ```rust
1048 /// use mz_expr::{func, MapFilterProject, MirScalarExpr, UnaryFunc, BinaryFunc};
1049 /// // Demonstrate extraction of common expressions (here: parsing strings).
1050 /// let mut map_filter_project = MapFilterProject::new(5)
1051 /// .map(vec![
1052 /// MirScalarExpr::column(0).call_unary(func::CastStringToInt64).call_binary(MirScalarExpr::column(1).call_unary(func::CastStringToInt64), func::AddInt64),
1053 /// MirScalarExpr::column(1).call_unary(func::CastStringToInt64).call_binary(MirScalarExpr::column(2).call_unary(func::CastStringToInt64), func::AddInt64),
1054 /// ])
1055 /// .project(vec![3,4,5,6]);
1056 ///
1057 /// let mut expected_optimized = MapFilterProject::new(5)
1058 /// .map(vec![
1059 /// MirScalarExpr::column(0).call_unary(func::CastStringToInt64),
1060 /// MirScalarExpr::column(1).call_unary(func::CastStringToInt64),
1061 /// MirScalarExpr::column(5).call_binary(MirScalarExpr::column(6), func::AddInt64),
1062 /// MirScalarExpr::column(7),
1063 /// MirScalarExpr::column(2).call_unary(func::CastStringToInt64),
1064 /// MirScalarExpr::column(6).call_binary(MirScalarExpr::column(9), func::AddInt64),
1065 /// MirScalarExpr::column(10),
1066 /// ])
1067 /// .project(vec![3,4,8,11]);
1068 ///
1069 /// // Memoize expressions, ensuring uniqueness of each `MirScalarExpr`.
1070 /// map_filter_project.memoize_expressions();
1071 ///
1072 /// assert_eq!(
1073 /// map_filter_project,
1074 /// expected_optimized,
1075 /// );
1076 /// ```
1077 ///
1078 /// Expressions may not be memoized if they are not certain to be evaluated,
1079 /// for example if they occur in conditional branches of a `MirScalarExpr::If`.
1080 ///
1081 /// ```rust
1082 /// use mz_expr::{func, MapFilterProject, MirScalarExpr, UnaryFunc, BinaryFunc};
1083 /// // Demonstrate extraction of unconditionally evaluated expressions, as well as
1084 /// // the non-extraction of common expressions guarded by conditions.
1085 /// let mut map_filter_project = MapFilterProject::new(2)
1086 /// .map(vec![
1087 /// MirScalarExpr::If {
1088 /// cond: Box::new(MirScalarExpr::column(0).call_binary(MirScalarExpr::column(1), func::Lt)),
1089 /// then: Box::new(MirScalarExpr::column(0).call_binary(MirScalarExpr::column(1), func::DivInt64)),
1090 /// els: Box::new(MirScalarExpr::column(1).call_binary(MirScalarExpr::column(0), func::DivInt64)),
1091 /// },
1092 /// MirScalarExpr::If {
1093 /// cond: Box::new(MirScalarExpr::column(0).call_binary(MirScalarExpr::column(1), func::Lt)),
1094 /// then: Box::new(MirScalarExpr::column(1).call_binary(MirScalarExpr::column(0), func::DivInt64)),
1095 /// els: Box::new(MirScalarExpr::column(0).call_binary(MirScalarExpr::column(1), func::DivInt64)),
1096 /// },
1097 /// ]);
1098 ///
1099 /// let mut expected_optimized = MapFilterProject::new(2)
1100 /// .map(vec![
1101 /// MirScalarExpr::column(0).call_binary(MirScalarExpr::column(1), func::Lt),
1102 /// MirScalarExpr::If {
1103 /// cond: Box::new(MirScalarExpr::column(2)),
1104 /// then: Box::new(MirScalarExpr::column(0).call_binary(MirScalarExpr::column(1), func::DivInt64)),
1105 /// els: Box::new(MirScalarExpr::column(1).call_binary(MirScalarExpr::column(0), func::DivInt64)),
1106 /// },
1107 /// MirScalarExpr::column(3),
1108 /// MirScalarExpr::If {
1109 /// cond: Box::new(MirScalarExpr::column(2)),
1110 /// then: Box::new(MirScalarExpr::column(1).call_binary(MirScalarExpr::column(0), func::DivInt64)),
1111 /// els: Box::new(MirScalarExpr::column(0).call_binary(MirScalarExpr::column(1), func::DivInt64)),
1112 /// },
1113 /// MirScalarExpr::column(5),
1114 /// ])
1115 /// .project(vec![0,1,4,6]);
1116 ///
1117 /// // Memoize expressions, ensuring uniqueness of each `MirScalarExpr`.
1118 /// map_filter_project.memoize_expressions();
1119 ///
1120 /// assert_eq!(
1121 /// map_filter_project,
1122 /// expected_optimized,
1123 /// );
1124 /// ```
1125 pub fn memoize_expressions(&mut self) {
1126 // Record the mapping from starting column references to new column
1127 // references.
1128 let mut remaps = BTreeMap::new();
1129 for index in 0..self.input_arity {
1130 remaps.insert(index, index);
1131 }
1132 let mut new_expressions = Vec::new();
1133
1134 // We follow the same order as for evaluation, to ensure that all
1135 // column references exist in time for their evaluation. We could
1136 // prioritize predicates, but we would need to be careful to chase
1137 // down column references to expressions and memoize those as well.
1138 let mut expression = 0;
1139 for (support, predicate) in self.predicates.iter_mut() {
1140 while self.input_arity + expression < *support {
1141 self.expressions[expression].permute_map(&remaps);
1142 memoize_expr(
1143 &mut self.expressions[expression],
1144 &mut new_expressions,
1145 self.input_arity,
1146 );
1147 remaps.insert(
1148 self.input_arity + expression,
1149 self.input_arity + new_expressions.len(),
1150 );
1151 new_expressions.push(self.expressions[expression].clone());
1152 expression += 1;
1153 }
1154 predicate.permute_map(&remaps);
1155 memoize_expr(predicate, &mut new_expressions, self.input_arity);
1156 }
1157 while expression < self.expressions.len() {
1158 self.expressions[expression].permute_map(&remaps);
1159 memoize_expr(
1160 &mut self.expressions[expression],
1161 &mut new_expressions,
1162 self.input_arity,
1163 );
1164 remaps.insert(
1165 self.input_arity + expression,
1166 self.input_arity + new_expressions.len(),
1167 );
1168 new_expressions.push(self.expressions[expression].clone());
1169 expression += 1;
1170 }
1171
1172 self.expressions = new_expressions;
1173 for proj in self.projection.iter_mut() {
1174 *proj = remaps[proj];
1175 }
1176
1177 // Restore predicate order invariants.
1178 for (pos, pred) in self.predicates.iter_mut() {
1179 *pos = pred.support().into_iter().max().map(|x| x + 1).unwrap_or(0);
1180 }
1181 }
1182
1183 /// This method inlines expressions with a single use.
1184 ///
1185 /// This method only inlines expressions; it does not delete expressions
1186 /// that are no longer referenced. The `remove_undemanded()` method does
1187 /// that, and should likely be used after this method.
1188 ///
1189 /// Inlining replaces column references when the referred-to item is either
1190 /// another column reference, or the only referrer of its referent. This
1191 /// is most common after memoization has atomized all expressions to seek
1192 /// out re-use: inlining re-assembles expressions that were not helpfully
1193 /// shared with other expressions.
1194 ///
1195 /// # Example
1196 ///
1197 /// In this example, we see that with only a single reference to columns
1198 /// 0 and 2, their parsing can each be inlined. Similarly, column references
1199 /// can be cleaned up among expressions, and in the final projection.
1200 ///
1201 /// Also notice the remaining expressions, which can be cleaned up in a later
1202 /// pass (the `remove_undemanded` method).
1203 ///
1204 /// ```rust
1205 /// use mz_expr::{func, MapFilterProject, MirScalarExpr, UnaryFunc, BinaryFunc};
1206 /// // Use the output from first `memoize_expression` example.
1207 /// let mut map_filter_project = MapFilterProject::new(5)
1208 /// .map(vec![
1209 /// MirScalarExpr::column(0).call_unary(func::CastStringToInt64),
1210 /// MirScalarExpr::column(1).call_unary(func::CastStringToInt64),
1211 /// MirScalarExpr::column(5).call_binary(MirScalarExpr::column(6), func::AddInt64),
1212 /// MirScalarExpr::column(7),
1213 /// MirScalarExpr::column(2).call_unary(func::CastStringToInt64),
1214 /// MirScalarExpr::column(6).call_binary(MirScalarExpr::column(9), func::AddInt64),
1215 /// MirScalarExpr::column(10),
1216 /// ])
1217 /// .project(vec![3,4,8,11]);
1218 ///
1219 /// let mut expected_optimized = MapFilterProject::new(5)
1220 /// .map(vec![
1221 /// MirScalarExpr::column(0).call_unary(func::CastStringToInt64),
1222 /// MirScalarExpr::column(1).call_unary(func::CastStringToInt64),
1223 /// MirScalarExpr::column(0).call_unary(func::CastStringToInt64).call_binary(MirScalarExpr::column(6), func::AddInt64),
1224 /// MirScalarExpr::column(0).call_unary(func::CastStringToInt64).call_binary(MirScalarExpr::column(6), func::AddInt64),
1225 /// MirScalarExpr::column(2).call_unary(func::CastStringToInt64),
1226 /// MirScalarExpr::column(6).call_binary(MirScalarExpr::column(2).call_unary(func::CastStringToInt64), func::AddInt64),
1227 /// MirScalarExpr::column(6).call_binary(MirScalarExpr::column(2).call_unary(func::CastStringToInt64), func::AddInt64),
1228 /// ])
1229 /// .project(vec![3,4,8,11]);
1230 ///
1231 /// // Inline expressions that are referenced only once.
1232 /// map_filter_project.inline_expressions();
1233 ///
1234 /// assert_eq!(
1235 /// map_filter_project,
1236 /// expected_optimized,
1237 /// );
1238 /// ```
1239 pub fn inline_expressions(&mut self) {
1240 // Local copy of input_arity to avoid borrowing `self` in closures.
1241 let input_arity = self.input_arity;
1242 // Reference counts track the number of places that a reference occurs.
1243 let mut reference_count = vec![0; input_arity + self.expressions.len()];
1244 // Increment reference counts for each use
1245 for expr in self.expressions.iter() {
1246 expr.visit_pre(|e| {
1247 if let MirScalarExpr::Column(i, _name) = e {
1248 reference_count[*i] += 1;
1249 }
1250 });
1251 }
1252 for (_, pred) in self.predicates.iter() {
1253 pred.visit_pre(|e| {
1254 if let MirScalarExpr::Column(i, _name) = e {
1255 reference_count[*i] += 1;
1256 }
1257 });
1258 }
1259 for proj in self.projection.iter() {
1260 reference_count[*proj] += 1;
1261 }
1262
1263 // Determine which expressions should be inlined because they reference temporal expressions.
1264 let mut is_temporal = vec![false; input_arity];
1265 for expr in self.expressions.iter() {
1266 // An express may contain a temporal expression, or reference a column containing such.
1267 is_temporal.push(
1268 expr.contains_temporal() || expr.support().into_iter().any(|col| is_temporal[col]),
1269 );
1270 }
1271
1272 // Inline only those columns that 1. are expressions not inputs, and
1273 // 2a. are column references or literals or 2b. have a refcount of 1,
1274 // or 2c. reference temporal expressions (which cannot be evaluated).
1275 let mut should_inline = vec![false; reference_count.len()];
1276 for i in (input_arity..reference_count.len()).rev() {
1277 if let MirScalarExpr::Column(c, _) = self.expressions[i - input_arity] {
1278 should_inline[i] = true;
1279 // The reference count of the referenced column should be
1280 // incremented with the number of references
1281 // `self.expressions[i - input_arity]` has.
1282 // Subtract 1 because `self.expressions[i - input_arity]` is
1283 // itself a reference.
1284 reference_count[c] += reference_count[i] - 1;
1285 } else {
1286 should_inline[i] = reference_count[i] == 1 || is_temporal[i];
1287 }
1288 }
1289 // Inline expressions per `should_inline`.
1290 self.perform_inlining(should_inline);
1291 // We can only inline column references in `self.projection`, but we should.
1292 for proj in self.projection.iter_mut() {
1293 if *proj >= self.input_arity {
1294 if let MirScalarExpr::Column(i, _) = self.expressions[*proj - self.input_arity] {
1295 // TODO(mgree) !!! propagate name information to projection
1296 *proj = i;
1297 }
1298 }
1299 }
1300 }
1301
1302 /// Inlines those expressions that are indicated by should_inline.
1303 /// See `inline_expressions` for usage.
1304 pub fn perform_inlining(&mut self, should_inline: Vec<bool>) {
1305 for index in 0..self.expressions.len() {
1306 let (prior, expr) = self.expressions.split_at_mut(index);
1307 #[allow(deprecated)]
1308 expr[0].visit_mut_post_nolimit(&mut |e| {
1309 if let MirScalarExpr::Column(i, _name) = e {
1310 if should_inline[*i] {
1311 *e = prior[*i - self.input_arity].clone();
1312 }
1313 }
1314 });
1315 }
1316 for (_index, pred) in self.predicates.iter_mut() {
1317 let expressions = &self.expressions;
1318 #[allow(deprecated)]
1319 pred.visit_mut_post_nolimit(&mut |e| {
1320 if let MirScalarExpr::Column(i, _name) = e {
1321 if should_inline[*i] {
1322 *e = expressions[*i - self.input_arity].clone();
1323 }
1324 }
1325 });
1326 }
1327 }
1328
1329 /// Removes unused expressions from `self.expressions`.
1330 ///
1331 /// Expressions are "used" if they are relied upon by any output columns
1332 /// or any predicates, even transitively. Any expressions that are not
1333 /// relied upon in this way can be discarded.
1334 ///
1335 /// # Example
1336 ///
1337 /// ```rust
1338 /// use mz_expr::{func, MapFilterProject, MirScalarExpr, UnaryFunc, BinaryFunc};
1339 /// // Use the output from `inline_expression` example.
1340 /// let mut map_filter_project = MapFilterProject::new(5)
1341 /// .map(vec![
1342 /// MirScalarExpr::column(0).call_unary(func::CastStringToInt64),
1343 /// MirScalarExpr::column(1).call_unary(func::CastStringToInt64),
1344 /// MirScalarExpr::column(0).call_unary(func::CastStringToInt64).call_binary(MirScalarExpr::column(6), func::AddInt64),
1345 /// MirScalarExpr::column(0).call_unary(func::CastStringToInt64).call_binary(MirScalarExpr::column(6), func::AddInt64),
1346 /// MirScalarExpr::column(2).call_unary(func::CastStringToInt64),
1347 /// MirScalarExpr::column(6).call_binary(MirScalarExpr::column(2).call_unary(func::CastStringToInt64), func::AddInt64),
1348 /// MirScalarExpr::column(6).call_binary(MirScalarExpr::column(2).call_unary(func::CastStringToInt64), func::AddInt64),
1349 /// ])
1350 /// .project(vec![3,4,8,11]);
1351 ///
1352 /// let mut expected_optimized = MapFilterProject::new(5)
1353 /// .map(vec![
1354 /// MirScalarExpr::column(1).call_unary(func::CastStringToInt64),
1355 /// MirScalarExpr::column(0).call_unary(func::CastStringToInt64).call_binary(MirScalarExpr::column(5), func::AddInt64),
1356 /// MirScalarExpr::column(5).call_binary(MirScalarExpr::column(2).call_unary(func::CastStringToInt64), func::AddInt64),
1357 /// ])
1358 /// .project(vec![3,4,6,7]);
1359 ///
1360 /// // Remove undemanded expressions, streamlining the work done..
1361 /// map_filter_project.remove_undemanded();
1362 ///
1363 /// assert_eq!(
1364 /// map_filter_project,
1365 /// expected_optimized,
1366 /// );
1367 /// ```
1368 pub fn remove_undemanded(&mut self) {
1369 // Determine the demanded expressions to remove irrelevant ones.
1370 let mut demand = BTreeSet::new();
1371 for (_index, pred) in self.predicates.iter() {
1372 demand.extend(pred.support());
1373 }
1374 // Start from the output columns as presumed demanded.
1375 // If this is not the case, the caller should project some away.
1376 demand.extend(self.projection.iter().cloned());
1377 // Proceed in *reverse* order, as expressions may depend on other
1378 // expressions that precede them.
1379 for index in (0..self.expressions.len()).rev() {
1380 if demand.contains(&(self.input_arity + index)) {
1381 demand.extend(self.expressions[index].support());
1382 }
1383 }
1384
1385 // Maintain a map from initial column identifiers to locations
1386 // once we have removed undemanded expressions.
1387 let mut remap = BTreeMap::new();
1388 // This map only needs to map elements of `demand` to a new location,
1389 // but the logic is easier if we include all input columns (as the
1390 // new position is then determined by the size of the map).
1391 for index in 0..self.input_arity {
1392 remap.insert(index, index);
1393 }
1394 // Retain demanded expressions, and record their new locations.
1395 let mut new_expressions = Vec::new();
1396 for (index, expr) in self.expressions.drain(..).enumerate() {
1397 if demand.contains(&(index + self.input_arity)) {
1398 remap.insert(index + self.input_arity, remap.len());
1399 new_expressions.push(expr);
1400 }
1401 }
1402 self.expressions = new_expressions;
1403
1404 // Update column identifiers; rebuild `Self` to re-establish any invariants.
1405 // We mirror `self.permute(&remap)` but we specifically want to remap columns
1406 // that are produced by `self.expressions` after the input columns.
1407 let (expressions, predicates, projection) = self.as_map_filter_project();
1408 *self = Self::new(self.input_arity)
1409 .map(expressions.into_iter().map(|mut e| {
1410 e.permute_map(&remap);
1411 e
1412 }))
1413 .filter(predicates.into_iter().map(|mut p| {
1414 p.permute_map(&remap);
1415 p
1416 }))
1417 .project(projection.into_iter().map(|c| remap[&c]));
1418 }
1419}
1420
1421// TODO: move this elsewhere?
1422/// Recursively memoize parts of `expr`, storing those parts in `memoized_parts`.
1423///
1424/// A part of `expr` that is memoized is replaced by a reference to column
1425/// `(input_arity + pos)`, where `pos` is the position of the memoized part in
1426/// `memoized_parts`, and `input_arity` is the arity of the input that `expr`
1427/// refers to.
1428pub fn memoize_expr(
1429 expr: &mut MirScalarExpr,
1430 memoized_parts: &mut Vec<MirScalarExpr>,
1431 input_arity: usize,
1432) {
1433 #[allow(deprecated)]
1434 expr.visit_mut_pre_post_nolimit(
1435 &mut |e| {
1436 // We should not eagerly memoize `if` branches that might not be taken.
1437 // TODO: Memoize expressions in the intersection of `then` and `els`.
1438 if let MirScalarExpr::If { cond, .. } = e {
1439 return Some(vec![cond]);
1440 }
1441
1442 // We should not eagerly memoize `COALESCE` expressions after the first,
1443 // as they are only meant to be evaluated if the preceding expressions
1444 // evaluate to NULL. We could memoize any preceding by expressions that
1445 // are certain not to error.
1446 if let MirScalarExpr::CallVariadic {
1447 func: crate::VariadicFunc::Coalesce,
1448 exprs,
1449 } = e
1450 {
1451 return Some(exprs.iter_mut().take(1).collect());
1452 }
1453
1454 // We should not deconstruct temporal filters, because `MfpPlan::create_from` expects
1455 // those to be in a specific form. However, we _should_ attend to the expression that is
1456 // on the opposite side of mz_now(), because it might be a complex expression in itself,
1457 // and is ok to deconstruct.
1458 if let Some((_func, other_side)) = e.as_mut_temporal_filter().ok() {
1459 return Some(vec![other_side]);
1460 }
1461
1462 None
1463 },
1464 &mut |e| {
1465 match e {
1466 MirScalarExpr::Literal(_, _) => {
1467 // Literals do not need to be memoized.
1468 }
1469 MirScalarExpr::Column(col, _) => {
1470 // Column references do not need to be memoized, but may need to be
1471 // updated if they reference a column reference themselves.
1472 if *col > input_arity {
1473 if let MirScalarExpr::Column(col2, _) = memoized_parts[*col - input_arity] {
1474 // We do _not_ propagate column names, since mis-associating names and column
1475 // references will be very confusing (and possibly bug-inducing).
1476 *col = col2;
1477 }
1478 }
1479 }
1480 _ => {
1481 // TODO: OOO (Optimizer Optimization Opportunity):
1482 // we are quadratic in expression size because of this .iter().position
1483 if let Some(position) = memoized_parts.iter().position(|e2| e2 == e) {
1484 // Any complex expression that already exists as a prior column can
1485 // be replaced by a reference to that column.
1486 *e = MirScalarExpr::column(input_arity + position);
1487 } else {
1488 // A complex expression that does not exist should be memoized, and
1489 // replaced by a reference to the column.
1490 memoized_parts.push(std::mem::replace(
1491 e,
1492 MirScalarExpr::column(input_arity + memoized_parts.len()),
1493 ));
1494 }
1495 }
1496 }
1497 },
1498 )
1499}
1500
1501pub mod util {
1502 use std::collections::BTreeMap;
1503
1504 use crate::MirScalarExpr;
1505
1506 #[allow(dead_code)]
1507 /// A triple of actions that map from rows to (key, val) pairs and back again.
1508 struct KeyValRowMapping {
1509 /// Expressions to apply to a row to produce key datums.
1510 to_key: Vec<MirScalarExpr>,
1511 /// Columns to project from a row to produce residual value datums.
1512 to_val: Vec<usize>,
1513 /// Columns to project from the concatenation of key and value to reconstruct the row.
1514 to_row: Vec<usize>,
1515 }
1516
1517 /// Derive supporting logic to support transforming rows to (key, val) pairs,
1518 /// and back again.
1519 ///
1520 /// We are given as input a list of key expressions and an input arity, and the
1521 /// requirement the produced key should be the application of the key expressions.
1522 /// To produce the `val` output, we will identify those input columns not found in
1523 /// the key expressions, and name all other columns.
1524 /// To reconstitute the original row, we identify the sequence of columns from the
1525 /// concatenation of key and val which would reconstruct the original row.
1526 ///
1527 /// The output is a pair of column sequences, the first used to reconstruct a row
1528 /// from the concatenation of key and value, and the second to identify the columns
1529 /// of a row that should become the value associated with its key.
1530 ///
1531 /// The permutations and thinning expressions generated here will be tracked in
1532 /// `dataflow::plan::AvailableCollections`; see the
1533 /// documentation there for more details.
1534 pub fn permutation_for_arrangement(
1535 key: &[MirScalarExpr],
1536 unthinned_arity: usize,
1537 ) -> (Vec<usize>, Vec<usize>) {
1538 let columns_in_key: BTreeMap<_, _> = key
1539 .iter()
1540 .enumerate()
1541 .filter_map(|(i, key_col)| key_col.as_column().map(|c| (c, i)))
1542 .collect();
1543 let mut input_cursor = key.len();
1544 let permutation = (0..unthinned_arity)
1545 .map(|c| {
1546 if let Some(c) = columns_in_key.get(&c) {
1547 // Column is in key (and thus gone from the value
1548 // of the thinned representation)
1549 *c
1550 } else {
1551 // Column remains in value of the thinned representation
1552 input_cursor += 1;
1553 input_cursor - 1
1554 }
1555 })
1556 .collect();
1557 let thinning = (0..unthinned_arity)
1558 .filter(|c| !columns_in_key.contains_key(c))
1559 .collect();
1560 (permutation, thinning)
1561 }
1562
1563 /// Given the permutations (see [`permutation_for_arrangement`] and
1564 /// (`dataflow::plan::AvailableCollections`) corresponding to two
1565 /// collections with the same key arity,
1566 /// computes the permutation for the result of joining them.
1567 pub fn join_permutations(
1568 key_arity: usize,
1569 stream_permutation: Vec<usize>,
1570 thinned_stream_arity: usize,
1571 lookup_permutation: Vec<usize>,
1572 ) -> BTreeMap<usize, usize> {
1573 let stream_arity = stream_permutation.len();
1574 let lookup_arity = lookup_permutation.len();
1575
1576 (0..stream_arity + lookup_arity)
1577 .map(|i| {
1578 let location = if i < stream_arity {
1579 stream_permutation[i]
1580 } else {
1581 let location_in_lookup = lookup_permutation[i - stream_arity];
1582 if location_in_lookup < key_arity {
1583 location_in_lookup
1584 } else {
1585 location_in_lookup + thinned_stream_arity
1586 }
1587 };
1588 (i, location)
1589 })
1590 .collect()
1591 }
1592}
1593
1594pub mod plan {
1595 use std::iter;
1596
1597 use mz_repr::{Datum, Diff, Row, RowArena};
1598 use serde::{Deserialize, Serialize};
1599
1600 use crate::{BinaryFunc, EvalError, MapFilterProject, MirScalarExpr, UnaryFunc, func};
1601
1602 /// A wrapper type which indicates it is safe to simply evaluate all expressions.
1603 #[derive(Clone, Debug, Serialize, Deserialize, Eq, PartialEq, Ord, PartialOrd)]
1604 pub struct SafeMfpPlan {
1605 pub(crate) mfp: MapFilterProject,
1606 }
1607
1608 impl SafeMfpPlan {
1609 /// Remaps references to input columns according to `remap`.
1610 ///
1611 /// Leaves other column references, e.g. to newly mapped columns, unchanged.
1612 pub fn permute_fn<F>(&mut self, remap: F, new_arity: usize)
1613 where
1614 F: Fn(usize) -> usize,
1615 {
1616 self.mfp.permute_fn(remap, new_arity);
1617 }
1618 /// Evaluates the linear operator on a supplied list of datums.
1619 ///
1620 /// The arguments are the initial datums associated with the row,
1621 /// and an appropriately lifetimed arena for temporary allocations
1622 /// needed by scalar evaluation.
1623 ///
1624 /// An `Ok` result will either be `None` if any predicate did not
1625 /// evaluate to `Datum::True`, or the values of the columns listed
1626 /// by `self.projection` if all predicates passed. If an error
1627 /// occurs in the evaluation it is returned as an `Err` variant.
1628 /// As the evaluation exits early with failed predicates, it may
1629 /// miss some errors that would occur later in evaluation.
1630 ///
1631 /// The `row` is not cleared first, but emptied if the function
1632 /// returns `Ok(Some(row)).
1633 #[inline(always)]
1634 pub fn evaluate_into<'a, 'row>(
1635 &'a self,
1636 datums: &mut Vec<Datum<'a>>,
1637 arena: &'a RowArena,
1638 row_buf: &'row mut Row,
1639 ) -> Result<Option<&'row Row>, EvalError> {
1640 let passed_predicates = self.evaluate_inner(datums, arena)?;
1641 if !passed_predicates {
1642 Ok(None)
1643 } else {
1644 row_buf
1645 .packer()
1646 .extend(self.mfp.projection.iter().map(|c| datums[*c]));
1647 Ok(Some(row_buf))
1648 }
1649 }
1650
1651 /// A version of `evaluate` which produces an iterator over `Datum`
1652 /// as output.
1653 ///
1654 /// This version can be useful when one wants to capture the resulting
1655 /// datums without packing and then unpacking a row.
1656 #[inline(always)]
1657 pub fn evaluate_iter<'b, 'a: 'b>(
1658 &'a self,
1659 datums: &'b mut Vec<Datum<'a>>,
1660 arena: &'a RowArena,
1661 ) -> Result<Option<impl Iterator<Item = Datum<'a>> + 'b>, EvalError> {
1662 let passed_predicates = self.evaluate_inner(datums, arena)?;
1663 if !passed_predicates {
1664 Ok(None)
1665 } else {
1666 Ok(Some(self.mfp.projection.iter().map(move |i| datums[*i])))
1667 }
1668 }
1669
1670 /// Populates `datums` with `self.expressions` and tests `self.predicates`.
1671 ///
1672 /// This does not apply `self.projection`, which is up to the calling method.
1673 pub fn evaluate_inner<'b, 'a: 'b>(
1674 &'a self,
1675 datums: &'b mut Vec<Datum<'a>>,
1676 arena: &'a RowArena,
1677 ) -> Result<bool, EvalError> {
1678 let mut expression = 0;
1679 for (support, predicate) in self.mfp.predicates.iter() {
1680 while self.mfp.input_arity + expression < *support {
1681 datums.push(self.mfp.expressions[expression].eval(&datums[..], arena)?);
1682 expression += 1;
1683 }
1684 if predicate.eval(&datums[..], arena)? != Datum::True {
1685 return Ok(false);
1686 }
1687 }
1688 while expression < self.mfp.expressions.len() {
1689 datums.push(self.mfp.expressions[expression].eval(&datums[..], arena)?);
1690 expression += 1;
1691 }
1692 Ok(true)
1693 }
1694
1695 /// Returns true if evaluation could introduce an error on non-error inputs.
1696 pub fn could_error(&self) -> bool {
1697 self.mfp.predicates.iter().any(|(_pos, e)| e.could_error())
1698 || self.mfp.expressions.iter().any(|e| e.could_error())
1699 }
1700
1701 /// Returns true when `Self` is the identity.
1702 pub fn is_identity(&self) -> bool {
1703 self.mfp.is_identity()
1704 }
1705 }
1706
1707 impl std::ops::Deref for SafeMfpPlan {
1708 type Target = MapFilterProject;
1709 fn deref(&self) -> &Self::Target {
1710 &self.mfp
1711 }
1712 }
1713
1714 /// Predicates partitioned into temporal and non-temporal.
1715 ///
1716 /// Temporal predicates require some recognition to determine their
1717 /// structure, and it is best to do that once and re-use the results.
1718 ///
1719 /// There are restrictions on the temporal predicates we currently support.
1720 /// They must directly constrain `MzNow` from below or above,
1721 /// by expressions that do not themselves contain `MzNow`.
1722 /// Conjunctions of such constraints are also ok.
1723 #[derive(Clone, Debug, PartialEq)]
1724 pub struct MfpPlan {
1725 /// Normal predicates to evaluate on `&[Datum]` and expect `Ok(Datum::True)`.
1726 pub(crate) mfp: SafeMfpPlan,
1727 /// Expressions that when evaluated lower-bound `MzNow`.
1728 pub(crate) lower_bounds: Vec<MirScalarExpr>,
1729 /// Expressions that when evaluated upper-bound `MzNow`.
1730 pub(crate) upper_bounds: Vec<MirScalarExpr>,
1731 }
1732
1733 impl MfpPlan {
1734 /// Partitions `predicates` into non-temporal, and lower and upper temporal bounds.
1735 ///
1736 /// The first returned list is of predicates that do not contain `mz_now`.
1737 /// The second and third returned lists contain expressions that, once evaluated, lower
1738 /// and upper bound the validity interval of a record, respectively. These second two
1739 /// lists are populated only by binary expressions of the form
1740 /// ```ignore
1741 /// mz_now cmp_op expr
1742 /// ```
1743 /// where `cmp_op` is a comparison operator and `expr` does not contain `mz_now`.
1744 ///
1745 /// If any unsupported expression is found, for example one that uses `mz_now`
1746 /// in an unsupported position, an error is returned.
1747 pub fn create_from(mut mfp: MapFilterProject) -> Result<Self, String> {
1748 let mut lower_bounds = Vec::new();
1749 let mut upper_bounds = Vec::new();
1750
1751 let mut temporal = Vec::new();
1752
1753 // Optimize, to ensure that temporal predicates are move in to `mfp.predicates`.
1754 mfp.optimize();
1755
1756 mfp.predicates.retain(|(_position, predicate)| {
1757 if predicate.contains_temporal() {
1758 temporal.push(predicate.clone());
1759 false
1760 } else {
1761 true
1762 }
1763 });
1764
1765 for mut predicate in temporal.into_iter() {
1766 let (func, expr2) = predicate.as_mut_temporal_filter()?;
1767 let expr2 = expr2.clone();
1768
1769 // LogicalTimestamp <OP> <EXPR2> for several supported operators.
1770 match func {
1771 BinaryFunc::Eq(_) => {
1772 lower_bounds.push(expr2.clone());
1773 upper_bounds.push(
1774 expr2.call_unary(UnaryFunc::StepMzTimestamp(func::StepMzTimestamp)),
1775 );
1776 }
1777 BinaryFunc::Lt(_) => {
1778 upper_bounds.push(expr2.clone());
1779 }
1780 BinaryFunc::Lte(_) => {
1781 upper_bounds.push(
1782 expr2.call_unary(UnaryFunc::StepMzTimestamp(func::StepMzTimestamp)),
1783 );
1784 }
1785 BinaryFunc::Gt(_) => {
1786 lower_bounds.push(
1787 expr2.call_unary(UnaryFunc::StepMzTimestamp(func::StepMzTimestamp)),
1788 );
1789 }
1790 BinaryFunc::Gte(_) => {
1791 lower_bounds.push(expr2.clone());
1792 }
1793 _ => {
1794 return Err(format!("Unsupported binary temporal operation: {:?}", func));
1795 }
1796 }
1797 }
1798
1799 Ok(Self {
1800 mfp: SafeMfpPlan { mfp },
1801 lower_bounds,
1802 upper_bounds,
1803 })
1804 }
1805
1806 /// Indicates if the planned `MapFilterProject` emits exactly its inputs as outputs.
1807 pub fn is_identity(&self) -> bool {
1808 self.mfp.mfp.is_identity()
1809 && self.lower_bounds.is_empty()
1810 && self.upper_bounds.is_empty()
1811 }
1812
1813 /// Returns `self`, and leaves behind an identity operator that acts on its output.
1814 pub fn take(&mut self) -> Self {
1815 let mut identity = Self {
1816 mfp: SafeMfpPlan {
1817 mfp: MapFilterProject::new(self.mfp.projection.len()),
1818 },
1819 lower_bounds: Default::default(),
1820 upper_bounds: Default::default(),
1821 };
1822 std::mem::swap(self, &mut identity);
1823 identity
1824 }
1825
1826 /// Attempt to convert self into a non-temporal MapFilterProject plan.
1827 ///
1828 /// If that is not possible, the original instance is returned as an error.
1829 #[allow(clippy::result_large_err)]
1830 pub fn into_nontemporal(self) -> Result<SafeMfpPlan, Self> {
1831 if self.lower_bounds.is_empty() && self.upper_bounds.is_empty() {
1832 Ok(self.mfp)
1833 } else {
1834 Err(self)
1835 }
1836 }
1837
1838 /// Returns an iterator over mutable references to all non-temporal
1839 /// scalar expressions in the plan.
1840 ///
1841 /// The order of iteration is unspecified.
1842 pub fn iter_nontemporal_exprs(&mut self) -> impl Iterator<Item = &mut MirScalarExpr> {
1843 iter::empty()
1844 .chain(self.mfp.mfp.predicates.iter_mut().map(|(_, expr)| expr))
1845 .chain(&mut self.mfp.mfp.expressions)
1846 .chain(&mut self.lower_bounds)
1847 .chain(&mut self.upper_bounds)
1848 }
1849
1850 /// Evaluate the predicates, temporal and non-, and return times and differences for `data`.
1851 ///
1852 /// If `self` contains only non-temporal predicates, the result will either be `(time, diff)`,
1853 /// or an evaluation error. If `self contains temporal predicates, the results can be times
1854 /// that are greater than the input `time`, and may contain negated `diff` values.
1855 ///
1856 /// The `row_builder` is not cleared first, but emptied if the function
1857 /// returns an iterator with any `Ok(_)` element.
1858 pub fn evaluate<'b, 'a: 'b, E: From<EvalError>, V: Fn(&mz_repr::Timestamp) -> bool>(
1859 &'a self,
1860 datums: &'b mut Vec<Datum<'a>>,
1861 arena: &'a RowArena,
1862 time: mz_repr::Timestamp,
1863 diff: Diff,
1864 valid_time: V,
1865 row_builder: &mut Row,
1866 ) -> impl Iterator<
1867 Item = Result<(Row, mz_repr::Timestamp, Diff), (E, mz_repr::Timestamp, Diff)>,
1868 > + use<E, V> {
1869 match self.mfp.evaluate_inner(datums, arena) {
1870 Err(e) => {
1871 return Some(Err((e.into(), time, diff))).into_iter().chain(None);
1872 }
1873 Ok(true) => {}
1874 Ok(false) => {
1875 return None.into_iter().chain(None);
1876 }
1877 }
1878
1879 // Lower and upper bounds.
1880 let mut lower_bound = time;
1881 let mut upper_bound = None;
1882
1883 // Track whether we have seen a null in either bound, as this should
1884 // prevent the record from being produced at any time.
1885 let mut null_eval = false;
1886
1887 // Advance our lower bound to be at least the result of any lower bound
1888 // expressions.
1889 for l in self.lower_bounds.iter() {
1890 match l.eval(datums, arena) {
1891 Err(e) => {
1892 return Some(Err((e.into(), time, diff)))
1893 .into_iter()
1894 .chain(None.into_iter());
1895 }
1896 Ok(Datum::MzTimestamp(d)) => {
1897 lower_bound = lower_bound.max(d);
1898 }
1899 Ok(Datum::Null) => {
1900 null_eval = true;
1901 }
1902 x => {
1903 panic!("Non-mz_timestamp value in temporal predicate: {:?}", x);
1904 }
1905 }
1906 }
1907
1908 // If the lower bound exceeds our `until` frontier, it should not appear in the output.
1909 if !valid_time(&lower_bound) {
1910 return None.into_iter().chain(None);
1911 }
1912
1913 // If there are any upper bounds, determine the minimum upper bound.
1914 for u in self.upper_bounds.iter() {
1915 // We can cease as soon as the lower and upper bounds match,
1916 // as the update will certainly not be produced in that case.
1917 if upper_bound != Some(lower_bound) {
1918 match u.eval(datums, arena) {
1919 Err(e) => {
1920 return Some(Err((e.into(), time, diff)))
1921 .into_iter()
1922 .chain(None.into_iter());
1923 }
1924 Ok(Datum::MzTimestamp(d)) => {
1925 if let Some(upper) = upper_bound {
1926 upper_bound = Some(upper.min(d));
1927 } else {
1928 upper_bound = Some(d);
1929 };
1930 // Force the upper bound to be at least the lower
1931 // bound. The `is_some()` test should always be true
1932 // due to the above block, but maintain it here in
1933 // case that changes. It's hopefully optimized away.
1934 if upper_bound.is_some() && upper_bound < Some(lower_bound) {
1935 upper_bound = Some(lower_bound);
1936 }
1937 }
1938 Ok(Datum::Null) => {
1939 null_eval = true;
1940 }
1941 x => {
1942 panic!("Non-mz_timestamp value in temporal predicate: {:?}", x);
1943 }
1944 }
1945 }
1946 }
1947
1948 // If the upper bound exceeds our `until` frontier, it should not appear in the output.
1949 if let Some(upper) = &mut upper_bound {
1950 if !valid_time(upper) {
1951 upper_bound = None;
1952 }
1953 }
1954
1955 // Produce an output only if the upper bound exceeds the lower bound,
1956 // and if we did not encounter a `null` in our evaluation.
1957 if Some(lower_bound) != upper_bound && !null_eval {
1958 row_builder
1959 .packer()
1960 .extend(self.mfp.mfp.projection.iter().map(|c| datums[*c]));
1961 let upper_opt =
1962 upper_bound.map(|upper_bound| Ok((row_builder.clone(), upper_bound, -diff)));
1963 let lower = Some(Ok((row_builder.clone(), lower_bound, diff)));
1964 lower.into_iter().chain(upper_opt)
1965 } else {
1966 None.into_iter().chain(None)
1967 }
1968 }
1969
1970 /// Returns true if evaluation could introduce an error on non-error inputs.
1971 pub fn could_error(&self) -> bool {
1972 self.mfp.could_error()
1973 || self.lower_bounds.iter().any(|e| e.could_error())
1974 || self.upper_bounds.iter().any(|e| e.could_error())
1975 }
1976
1977 /// Indicates that `Self` ignores its input to the extent that it can be evaluated on `&[]`.
1978 ///
1979 /// At the moment, this is only true if it projects away all columns and applies no filters,
1980 /// but it could be extended to plans that produce literals independent of the input.
1981 pub fn ignores_input(&self) -> bool {
1982 self.lower_bounds.is_empty()
1983 && self.upper_bounds.is_empty()
1984 && self.mfp.mfp.projection.is_empty()
1985 && self.mfp.mfp.predicates.is_empty()
1986 }
1987 }
1988}