mz_transform/literal_constraints.rs
1// Copyright Materialize, Inc. and contributors. All rights reserved.
2//
3// Use of this software is governed by the Business Source License
4// included in the LICENSE file.
5//
6// As of the Change Date specified in that file, in accordance with
7// the Business Source License, use of this software will be governed
8// by the Apache License, Version 2.0.
9
10//! See if there are predicates of the form `<expr> = literal` that can be sped up using an index.
11//! More specifically, look for an MFP on top of a Get, where the MFP has an appropriate filter, and
12//! the Get has a matching index. Convert these to `IndexedFilter` joins, which is a semi-join with
13//! a constant collection.
14//!
15//! E.g.: Logically, we go from something like
16//! `SELECT f1, f2, f3 FROM t WHERE t.f1 = lit1 AND t.f2 = lit2`
17//! to
18//! `SELECT f1, f2, f3 FROM t, (SELECT * FROM (VALUES (lit1, lit2))) as filter_list
19//! WHERE t.f1 = filter_list.column1 AND t.f2 = filter_list.column2`
20
21use std::collections::{BTreeMap, BTreeSet};
22
23use itertools::Itertools;
24use mz_expr::JoinImplementation::IndexedFilter;
25use mz_expr::canonicalize::canonicalize_predicates;
26use mz_expr::func::variadic::{And, Or};
27use mz_expr::visit::{Visit, VisitChildren};
28use mz_expr::{BinaryFunc, Id, MapFilterProject, MirRelationExpr, MirScalarExpr, VariadicFunc};
29use mz_ore::collections::CollectionExt;
30use mz_ore::iter::IteratorExt;
31use mz_ore::stack::RecursionLimitError;
32use mz_ore::vec::swap_remove_multiple;
33use mz_repr::{Diff, GlobalId, ReprRelationType, Row};
34
35use crate::TransformCtx;
36use crate::canonicalize_mfp::CanonicalizeMfp;
37use crate::notice::IndexTooWideForLiteralConstraints;
38
39/// Convert literal constraints into `IndexedFilter` joins.
40#[derive(Debug)]
41pub struct LiteralConstraints;
42
43impl crate::Transform for LiteralConstraints {
44 fn name(&self) -> &'static str {
45 "LiteralConstraints"
46 }
47
48 #[mz_ore::instrument(
49 target = "optimizer",
50 level = "debug",
51 fields(path.segment = "literal_constraints")
52 )]
53 fn actually_perform_transform(
54 &self,
55 relation: &mut MirRelationExpr,
56 ctx: &mut TransformCtx,
57 ) -> Result<(), crate::TransformError> {
58 let result = self.action(relation, ctx);
59 mz_repr::explain::trace_plan(&*relation);
60 result
61 }
62}
63
64impl LiteralConstraints {
65 fn action(
66 &self,
67 relation: &mut MirRelationExpr,
68 transform_ctx: &mut TransformCtx,
69 ) -> Result<(), crate::TransformError> {
70 let mut mfp = MapFilterProject::extract_non_errors_from_expr_mut(relation);
71 relation.try_visit_mut_children(|e| self.action(e, transform_ctx))?;
72
73 if let MirRelationExpr::Get {
74 id: Id::Global(id),
75 ref typ,
76 ..
77 } = *relation
78 {
79 let orig_mfp = mfp.clone();
80
81 // Preparation for the literal constraints detection.
82 Self::inline_literal_constraints(&mut mfp);
83 Self::list_of_predicates_to_and_of_predicates(&mut mfp);
84 Self::distribute_and_over_or(&mut mfp)?;
85 Self::unary_and(&mut mfp);
86
87 /// The above preparation might make the MFP more complicated, so we'll later want to
88 /// either undo the preparation transformations or get back to `orig_mfp`.
89 fn undo_preparation(
90 mfp: &mut MapFilterProject,
91 orig_mfp: &MapFilterProject,
92 relation: &MirRelationExpr,
93 relation_type: ReprRelationType,
94 ) {
95 // undo list_of_predicates_to_and_of_predicates, distribute_and_over_or, unary_and
96 // (It undoes the latter 2 through `MirScalarExp::reduce`.)
97 LiteralConstraints::canonicalize_predicates(mfp, relation, relation_type);
98 // undo inline_literal_constraints
99 mfp.optimize();
100 // We can usually undo, but sometimes not (see comment on `distribute_and_over_or`),
101 // so in those cases we might have a more complicated MFP than the original MFP
102 // (despite the removal of the literal constraints and/or contradicting OR args).
103 // So let's use the simpler one.
104 if LiteralConstraints::predicates_size(orig_mfp)
105 < LiteralConstraints::predicates_size(mfp)
106 {
107 *mfp = orig_mfp.clone();
108 }
109 }
110
111 let removed_contradicting_or_args = Self::remove_impossible_or_args(&mut mfp)?;
112
113 // todo: We might want to also call `canonicalize_equivalences`,
114 // see near the end of literal_constraints.slt.
115
116 let inp_typ = typ.clone();
117
118 let key_val = Self::detect_literal_constraints(&mfp, id, transform_ctx);
119
120 match key_val {
121 None => {
122 // We didn't find a usable index, so no chance to remove literal constraints.
123 // But, we might have removed contradicting OR args.
124 if removed_contradicting_or_args {
125 undo_preparation(&mut mfp, &orig_mfp, relation, inp_typ);
126 } else {
127 // We didn't remove anything, so let's go with the original MFP.
128 mfp = orig_mfp;
129 }
130 }
131 Some((idx_id, key, possible_vals)) => {
132 // We found a usable index. We'll try to remove the corresponding literal
133 // constraints.
134 if Self::remove_literal_constraints(&mut mfp, &key)
135 || removed_contradicting_or_args
136 {
137 // We were able to remove the literal constraints or contradicting OR args,
138 // so we would like to use this new MFP, so we try undoing the preparation.
139 undo_preparation(&mut mfp, &orig_mfp, relation, inp_typ.clone());
140 } else {
141 // We were not able to remove the literal constraint, so `mfp` is
142 // equivalent to `orig_mfp`, but `orig_mfp` is often simpler (or the same).
143 mfp = orig_mfp;
144 }
145
146 // We transform the Get into a semi-join with a constant collection.
147
148 let inp_id = id.clone();
149 let filter_list = MirRelationExpr::Constant {
150 rows: Ok(possible_vals
151 .iter()
152 .map(|val| (val.clone(), Diff::ONE))
153 .collect()),
154 typ: ReprRelationType {
155 column_types: key
156 .iter()
157 .map(|e| e.typ(&inp_typ.column_types).scalar_type.nullable(false))
158 .collect(),
159 // (Note that the key inference for `MirRelationExpr::Constant` inspects
160 // the constant values to detect keys not listed within the node, but it
161 // can only detect a single-column key this way. A multi-column key is
162 // common here, so we explicitly add it.)
163 keys: vec![(0..key.len()).collect()],
164 },
165 }
166 .arrange_by(&[(0..key.len()).map(MirScalarExpr::column).collect_vec()]);
167
168 if possible_vals.is_empty() {
169 // Even better than what we were hoping for: Found contradicting
170 // literal constraints, so the whole relation is empty.
171 relation.take_safely(Some(inp_typ));
172 } else {
173 // The common case: We need to build the join which is the main point of
174 // this transform.
175 *relation = MirRelationExpr::Join {
176 // It's important to keep the `filter_list` in the second position.
177 // Both the lowering and EXPLAIN depend on this.
178 inputs: vec![
179 relation.clone().arrange_by(std::slice::from_ref(&key)),
180 filter_list,
181 ],
182 equivalences: key
183 .iter()
184 .enumerate()
185 .map(|(i, e)| {
186 vec![(*e).clone(), MirScalarExpr::column(i + inp_typ.arity())]
187 })
188 .collect(),
189 implementation: IndexedFilter(
190 inp_id,
191 idx_id,
192 key.clone(),
193 possible_vals,
194 ),
195 };
196
197 // Rebuild the MFP to add the projection that removes the columns coming from
198 // the filter_list side of the join.
199 let (map, filter, project) = mfp.as_map_filter_project();
200 mfp = MapFilterProject::new(inp_typ.arity() + key.len())
201 .project(0..inp_typ.arity()) // make the join semi
202 .map(map)
203 .filter(filter)
204 .project(project);
205 mfp.optimize()
206 }
207 }
208 }
209 }
210
211 CanonicalizeMfp::rebuild_mfp(mfp, relation);
212
213 Ok(())
214 }
215
216 /// Detects literal constraints in an MFP on top of a Get of `id`, and a matching index that can
217 /// be used to speed up the Filter of the MFP.
218 ///
219 /// For example, if there is an index on `(f1, f2)`, and the Filter is
220 /// `(f1 = 3 AND f2 = 5) OR (f1 = 7 AND f2 = 9)`, it returns `Some([f1, f2], [[3,5], [7,9]])`.
221 ///
222 /// We can use an index if each argument of the OR includes a literal constraint on each of the
223 /// key fields of the index. Extra predicates inside the OR arguments are ok.
224 ///
225 /// Returns (idx_id, idx_key, values to lookup in the index).
226 fn detect_literal_constraints(
227 mfp: &MapFilterProject,
228 get_id: GlobalId,
229 transform_ctx: &mut TransformCtx,
230 ) -> Option<(GlobalId, Vec<MirScalarExpr>, Vec<Row>)> {
231 // Checks whether an index with the specified key can be used to speed up the given filter.
232 // See comment of `IndexMatch`.
233 fn match_index(key: &[MirScalarExpr], or_args: &Vec<MirScalarExpr>) -> IndexMatch {
234 if key.is_empty() {
235 // Nothing to do with an index that has an empty key.
236 return IndexMatch::UnusableNoSubset;
237 }
238 if !key.iter().all_unique() {
239 // This is a weird index. Why does it have duplicate key expressions?
240 return IndexMatch::UnusableNoSubset;
241 }
242 let mut literal_values = Vec::new();
243 let mut inv_cast_any = false;
244 // This starts with all key fields of the index.
245 // At the end, it will contain a subset S of index key fields such that if the index had
246 // only S as its key, then the index would be usable.
247 let mut usable_key_fields = key.iter().collect::<BTreeSet<_>>();
248 let mut usable = true;
249 for or_arg in or_args {
250 let mut row = Row::default();
251 let mut packer = row.packer();
252 for key_field in key {
253 let and_args = or_arg.and_or_args(And.into());
254 // Let's find a constraint for this key field
255 if let Some((literal, inv_cast)) = and_args
256 .iter()
257 .find_map(|and_arg| and_arg.expr_eq_literal(key_field))
258 {
259 // (Note that the above find_map can find only 0 or 1 result, because
260 // of `remove_impossible_or_args`.)
261 packer.push(literal.unpack_first());
262 inv_cast_any |= inv_cast;
263 } else {
264 // There is an `or_arg` where we didn't find a constraint for a key field,
265 // so the index is unusable. Throw out the field from the usable fields.
266 usable = false;
267 usable_key_fields.remove(key_field);
268 if usable_key_fields.is_empty() {
269 return IndexMatch::UnusableNoSubset;
270 }
271 }
272 }
273 literal_values.push(row);
274 }
275 if usable {
276 // We should deduplicate, because a constraint can be duplicated by
277 // `distribute_and_over_or`. For example: `IN ('l1', 'l2') AND (a > 0 OR a < 5)`:
278 // the 2 args of the OR will cause the IN constraints to be duplicated. This doesn't
279 // alter the meaning of the expression when evaluated as a filter, but if we extract
280 // those literals 2 times into `literal_values` then the Peek code will look up
281 // those keys from the index 2 times, leading to duplicate results.
282 literal_values.sort();
283 literal_values.dedup();
284 IndexMatch::Usable(literal_values, inv_cast_any)
285 } else {
286 if usable_key_fields.is_empty() {
287 IndexMatch::UnusableNoSubset
288 } else {
289 IndexMatch::UnusableTooWide(
290 usable_key_fields.into_iter().cloned().collect_vec(),
291 )
292 }
293 }
294 }
295
296 let or_args = Self::get_or_args(mfp);
297
298 let index_matches = transform_ctx
299 .indexes
300 .indexes_on(get_id)
301 .map(|(index_id, key)| (index_id, key.to_owned(), match_index(key, &or_args)))
302 .collect_vec();
303
304 let result = index_matches
305 .iter()
306 .cloned()
307 .filter_map(|(idx_id, key, index_match)| match index_match {
308 IndexMatch::Usable(vals, inv_cast) => Some((idx_id, key, vals, inv_cast)),
309 _ => None,
310 })
311 // Maximize:
312 // 1. number of predicates that are sped using a single index.
313 // 2. whether we are using a simpler index by having removed a cast from the key expr.
314 .max_by_key(|(_idx_id, key, _vals, inv_cast)| (key.len(), *inv_cast))
315 .map(|(idx_id, key, vals, _inv_cast)| (idx_id, key, vals));
316
317 if result.is_none() && !or_args.is_empty() {
318 // Let's see if we can give a hint to the user.
319 index_matches
320 .into_iter()
321 .for_each(|(index_id, index_key, index_match)| {
322 match index_match {
323 IndexMatch::UnusableTooWide(usable_subset) => {
324 // see comment of `UnusableTooWide`
325 assert!(!usable_subset.is_empty());
326 // Determine literal values that we would get if the index was on
327 // `usable_subset`.
328 let literal_values = match match_index(&usable_subset, &or_args) {
329 IndexMatch::Usable(literal_vals, _) => literal_vals,
330 _ => unreachable!(), // `usable_subset` would make the index usable.
331 };
332
333 // Let's come up with a recommendation for what columns to index:
334 // Intersect literal constraints across all OR args. (Which might
335 // include columns that are NOT in this index, and therefore not in
336 // `usable_subset`.)
337 let recommended_key = or_args
338 .iter()
339 .map(|or_arg| {
340 let and_args = or_arg.and_or_args(And.into());
341 and_args
342 .iter()
343 .filter_map(|and_arg| and_arg.any_expr_eq_literal())
344 .collect::<BTreeSet<_>>()
345 })
346 .reduce(|fields1, fields2| {
347 fields1.intersection(&fields2).cloned().collect()
348 })
349 // The unwrap is safe because above we checked `!or_args.is_empty()`
350 .unwrap()
351 .into_iter()
352 .collect_vec();
353
354 transform_ctx.df_meta.push_optimizer_notice_dedup(
355 IndexTooWideForLiteralConstraints {
356 index_id,
357 index_key,
358 usable_subset,
359 literal_values,
360 index_on_id: get_id,
361 recommended_key,
362 },
363 )
364 }
365 _ => (),
366 }
367 });
368 }
369
370 result
371 }
372
373 /// Removes the expressions that [LiteralConstraints::detect_literal_constraints] found, if
374 /// possible. Returns whether it removed anything.
375 /// For example, if the key of the detected literal constraint is just `f1`, and we have the
376 /// expression
377 /// `(f1 = 3 AND f2 = 5) OR (f1 = 7 AND f2 = 5)`, then this modifies it to `f2 = 5`.
378 /// However, if OR branches differ in their non-key parts, then we cannot remove the literal
379 /// constraint. For example,
380 /// `(f1 = 3 AND f2 = 5) OR (f1 = 7 AND f2 = 555)`, then we cannot remove the `f1` parts,
381 /// because then the filter wouldn't know whether to check `f2 = 5` or `f2 = 555`.
382 fn remove_literal_constraints(mfp: &mut MapFilterProject, key: &Vec<MirScalarExpr>) -> bool {
383 let or_args = Self::get_or_args(mfp);
384 if or_args.len() == 0 {
385 return false;
386 }
387
388 // In simple situations it would be enough to check here that if we remove the detected
389 // literal constraints from each OR arg, then the residual OR args are all equal.
390 // However, this wouldn't be able to perform the removal when the expression that should
391 // remain in the end has an OR. This is because conversion to DNF makes duplicates of
392 // every literal constraint, with different residuals. To also handle this case, we collect
393 // the possible residuals for every literal constraint row, and check that all sets are
394 // equal. Example: The user wrote
395 // `WHERE ((a=1 AND b=1) OR (a=2 AND b=2)) AND (c OR (d AND e))`.
396 // The DNF of this is
397 // `(a=1 AND b=1 AND c) OR (a=1 AND b=1 AND d AND e) OR (a=2 AND b=2 AND c) OR (a=2 AND b=2 AND d AND e)`.
398 // Then `constraints_to_residual_sets` will be:
399 // [
400 // [`a=1`, `b=1`] -> {[`c`], [`d`, `e`]},
401 // [`a=2`, `b=2`] -> {[`c`], [`d`, `e`]}
402 // ]
403 // After removing the literal constraints we have
404 // `c OR (d AND e)`
405 let mut constraints_to_residual_sets = BTreeMap::new();
406 or_args.iter().for_each(|or_arg| {
407 let and_args = or_arg.and_or_args(And.into());
408 let (mut constraints, mut residual): (Vec<_>, Vec<_>) =
409 and_args.iter().cloned().partition(|and_arg| {
410 key.iter()
411 .any(|key_field| matches!(and_arg.expr_eq_literal(key_field), Some(..)))
412 });
413 // In every or_arg there has to be some literal constraints, otherwise
414 // `detect_literal_constraints` would have returned None.
415 assert!(constraints.len() >= 1);
416 // `remove_impossible_or_args` made sure that inside each or_arg, each
417 // expression can be literal constrained only once. So if we find one of the
418 // key fields being literal constrained, then it's definitely that literal
419 // constraint that detect_literal_constraints based one of its return values on.
420 //
421 // This is important, because without `remove_impossible_or_args`, we might
422 // have the situation here that or_arg would be something like
423 // `a = 5 AND a = 8`, of which `detect_literal_constraints` found only the `a = 5`,
424 // but here we would remove both the `a = 5` and the `a = 8`.
425 constraints.sort();
426 residual.sort();
427 let entry = constraints_to_residual_sets
428 .entry(constraints)
429 .or_insert_with(BTreeSet::new);
430 entry.insert(residual);
431 });
432 let residual_sets = constraints_to_residual_sets
433 .into_iter()
434 .map(|(_constraints, residual_set)| residual_set)
435 .collect::<Vec<_>>();
436 if residual_sets.iter().all_equal() {
437 // We can remove the literal constraint
438 assert!(residual_sets.len() >= 1); // We already checked `or_args.len() == 0` above
439 let residual_set = residual_sets.into_iter().into_first();
440 let new_pred = MirScalarExpr::call_variadic(
441 Or,
442 residual_set
443 .into_iter()
444 .map(|residual| MirScalarExpr::call_variadic(And, residual))
445 .collect::<Vec<_>>(),
446 );
447 let (map, _predicates, project) = mfp.as_map_filter_project();
448 *mfp = MapFilterProject::new(mfp.input_arity)
449 .map(map)
450 .filter(std::iter::once(new_pred))
451 .project(project);
452
453 true
454 } else {
455 false
456 }
457 }
458
459 /// 1. Removes such OR args in which there are contradicting literal constraints.
460 /// 2. Also, if an OR arg doesn't have any contradiction, this fn just deduplicates
461 /// the AND arg list of that OR arg. (Might additionally sort all AND arg lists.)
462 ///
463 /// Returns whether it performed any removal or deduplication.
464 ///
465 /// Example for 1:
466 /// `<arg1> OR (a = 5 AND a = 5 AND a = 8) OR <arg3>`
467 /// -->
468 /// `<arg1> OR <arg3> `
469 ///
470 /// Example for 2:
471 /// `<arg1> OR (a = 5 AND a = 5 AND b = 8) OR <arg3>`
472 /// -->
473 /// `<arg1> OR (a = 5 AND b = 8) OR <arg3>`
474 fn remove_impossible_or_args(mfp: &mut MapFilterProject) -> Result<bool, RecursionLimitError> {
475 let mut or_args = Self::get_or_args(mfp);
476 if or_args.len() == 0 {
477 return Ok(false);
478 }
479 let mut to_remove = Vec::new();
480 let mut changed = false;
481 or_args.iter_mut().enumerate().for_each(|(i, or_arg)| {
482 if let MirScalarExpr::CallVariadic {
483 func: VariadicFunc::And(And),
484 exprs: and_args,
485 } = or_arg
486 {
487 if and_args
488 .iter()
489 .any(|e| e.impossible_literal_equality_because_types())
490 {
491 changed = true;
492 to_remove.push(i);
493 } else {
494 and_args.sort_by_key(|e: &MirScalarExpr| e.invert_casts_on_expr_eq_literal());
495 let and_args_before_dedup = and_args.clone();
496 and_args
497 .dedup_by_key(|e: &mut MirScalarExpr| e.invert_casts_on_expr_eq_literal());
498 if *and_args != and_args_before_dedup {
499 changed = true;
500 }
501 // Deduplicated, so we cannot have something like `a = 5 AND a = 5`.
502 // This means that if we now have `<expr1> = <literal1> AND <expr1> = <literal2>`,
503 // then `literal1` is definitely not the same as `literal2`. This means that this
504 // whole or_arg is a contradiction, because it's something like `a = 5 AND a = 8`.
505 let mut literal_constrained_exprs = and_args
506 .iter()
507 .filter_map(|and_arg| and_arg.any_expr_eq_literal());
508 if !literal_constrained_exprs.all_unique() {
509 changed = true;
510 to_remove.push(i);
511 }
512 }
513 } else {
514 // `unary_and` made sure that each OR arg is an AND
515 unreachable!("OR arg was not an AND in remove_impossible_or_args");
516 }
517 });
518 // We remove the marked OR args.
519 // (If the OR has 0 or 1 args remaining, then `reduce_and_canonicalize_and_or` will later
520 // further simplify.)
521 swap_remove_multiple(&mut or_args, to_remove);
522 // Rebuild the MFP if needed
523 if changed {
524 let new_predicates = vec![MirScalarExpr::call_variadic(Or, or_args)];
525 let (map, _predicates, project) = mfp.as_map_filter_project();
526 *mfp = MapFilterProject::new(mfp.input_arity)
527 .map(map)
528 .filter(new_predicates)
529 .project(project);
530 Ok(true)
531 } else {
532 Ok(false)
533 }
534 }
535
536 /// Returns the arguments of the predicate's top-level OR as a Vec.
537 /// If there is no top-level OR, then interpret the predicate as a 1-arg OR, i.e., return a
538 /// 1-element Vec.
539 ///
540 /// Assumes that [LiteralConstraints::list_of_predicates_to_and_of_predicates] has already run.
541 fn get_or_args(mfp: &MapFilterProject) -> Vec<MirScalarExpr> {
542 assert_eq!(mfp.predicates.len(), 1); // list_of_predicates_to_and_of_predicates ensured this
543 let (_, pred) = mfp.predicates.get(0).unwrap();
544 pred.and_or_args(Or.into())
545 }
546
547 /// Makes the job of [LiteralConstraints::detect_literal_constraints] easier by undoing some CSE to
548 /// reconstruct literal constraints.
549 fn inline_literal_constraints(mfp: &mut MapFilterProject) {
550 let mut should_inline = vec![false; mfp.input_arity + mfp.expressions.len()];
551 // Mark those expressions for inlining that contain a subexpression of the form
552 // `<xxx> = <lit>` or `<lit> = <xxx>`.
553 for (i, e) in mfp.expressions.iter().enumerate() {
554 e.visit_pre(|s| {
555 if s.any_expr_eq_literal().is_some() {
556 should_inline[i + mfp.input_arity] = true;
557 }
558 });
559 }
560 // Whenever
561 // `<Column(i)> = <lit>` or `<lit> = <Column(i)>`
562 // appears in a predicate, mark the ith expression to be inlined.
563 for (_before, p) in mfp.predicates.iter() {
564 p.visit_pre(|e| {
565 if let MirScalarExpr::CallBinary {
566 func: BinaryFunc::Eq(_),
567 expr1,
568 expr2,
569 } = e
570 {
571 if matches!(**expr1, MirScalarExpr::Literal(..)) {
572 if let MirScalarExpr::Column(col, _) = **expr2 {
573 if col >= mfp.input_arity {
574 should_inline[col] = true;
575 }
576 }
577 }
578 if matches!(**expr2, MirScalarExpr::Literal(..)) {
579 if let MirScalarExpr::Column(col, _) = **expr1 {
580 if col >= mfp.input_arity {
581 should_inline[col] = true;
582 }
583 }
584 }
585 }
586 });
587 }
588 // Perform the marked inlinings.
589 mfp.perform_inlining(should_inline);
590 }
591
592 /// MFPs have a Vec of predicates `[p1, p2, ...]`, which logically represents `p1 AND p2 AND ...`.
593 /// This function performs this conversion. Note that it might create a variadic AND with
594 /// 0 or 1 args, so the resulting predicate Vec always has exactly 1 element.
595 fn list_of_predicates_to_and_of_predicates(mfp: &mut MapFilterProject) {
596 // Rebuild the MFP. (Unfortunately, we cannot modify the predicates in place, because MFP
597 // predicates also have a "before" field, which we need to update. (`filter` will recompute
598 // these.)
599 let (map, _predicates, project) = mfp.as_map_filter_project();
600 let new_predicates = vec![MirScalarExpr::call_variadic(
601 And,
602 mfp.predicates.iter().map(|(_, p)| p.clone()).collect(),
603 )];
604 *mfp = MapFilterProject::new(mfp.input_arity)
605 .map(map)
606 .filter(new_predicates)
607 .project(project);
608 }
609
610 /// Call [mz_expr::canonicalize::canonicalize_predicates] on each of the predicates in the MFP.
611 fn canonicalize_predicates(
612 mfp: &mut MapFilterProject,
613 relation: &MirRelationExpr,
614 relation_type: ReprRelationType,
615 ) {
616 let (map, mut predicates, project) = mfp.as_map_filter_project();
617 let typ_after_map = relation
618 .clone()
619 .map(map.clone())
620 .typ_with_input_types(&[relation_type]);
621 canonicalize_predicates(&mut predicates, &typ_after_map.column_types);
622 // Rebuild the MFP with the new predicates.
623 *mfp = MapFilterProject::new(mfp.input_arity)
624 .map(map)
625 .filter(predicates)
626 .project(project);
627 }
628
629 /// Distribute AND over OR + do flatten_and_or until fixed point.
630 /// This effectively converts to disjunctive normal form (DNF) (i.e., an OR of ANDs), because
631 /// [MirScalarExpr::reduce] did Demorgans and double-negation-elimination. So after
632 /// [MirScalarExpr::reduce], we get here a tree of AND/OR nodes. A distribution step lifts an OR
633 /// up the tree by 1 level, and a [MirScalarExpr::flatten_associative] merges two ORs that are at
634 /// adjacent levels, so eventually we'll end up with just one OR that is at the top of the tree,
635 /// with ANDs below it.
636 /// For example:
637 /// (a || b) && (c || d)
638 /// ->
639 /// ((a || b) && c) || ((a || b) && d)
640 /// ->
641 /// (a && c) || (b && c) || (a && d) || (b && d)
642 /// (This is a variadic OR with 4 arguments.)
643 ///
644 /// Example:
645 /// User wrote `WHERE (a,b) IN ((1,2), (1,4), (8,5))`,
646 /// from which [MirScalarExpr::undistribute_and_or] made this before us:
647 /// (#0 = 1 AND (#1 = 2 OR #1 = 4)) OR (#0 = 8 AND #1 = 5)
648 /// And now we distribute the first AND over the first OR in 2 steps: First to
649 /// ((#0 = 1 AND #1 = 2) OR (#0 = 1 AND #1 = 4)) OR (#0 = 8 AND #1 = 5)
650 /// then [MirScalarExpr::flatten_associative]:
651 /// (#0 = 1 AND #1 = 2) OR (#0 = 1 AND #1 = 4) OR (#0 = 8 AND #1 = 5)
652 ///
653 /// Note that [MirScalarExpr::undistribute_and_or] is not exactly an inverse to this because
654 /// 1) it can undistribute both AND over OR and OR over AND.
655 /// 2) it cannot always undo the distribution, because an expression might have multiple
656 /// overlapping undistribution opportunities, see comment there.
657 fn distribute_and_over_or(mfp: &mut MapFilterProject) -> Result<(), RecursionLimitError> {
658 mfp.predicates.iter_mut().try_for_each(|(_, p)| {
659 let mut old_p = MirScalarExpr::column(0);
660 while old_p != *p {
661 let size = p.size();
662 // We might make the expression exponentially larger, so we should have some limit.
663 // Below 1000 (e.g., a single IN list of ~300 elements, or 3 IN lists of 4-5
664 // elements each), we are <10 ms for a single IN list, and even less for multiple IN
665 // lists.
666 if size > 1000 {
667 break;
668 }
669 old_p = p.clone();
670 p.visit_mut_post(&mut |e: &mut MirScalarExpr| {
671 if let MirScalarExpr::CallVariadic {
672 func: VariadicFunc::And(And),
673 exprs: and_args,
674 } = e
675 {
676 if let Some((i, _)) = and_args.iter().enumerate().find(|(_i, a)| {
677 matches!(
678 a,
679 MirScalarExpr::CallVariadic {
680 func: VariadicFunc::Or(Or),
681 ..
682 }
683 )
684 }) {
685 // We found an AND whose ith argument is an OR. We'll distribute the other
686 // args of the AND over this OR.
687 let mut or = and_args.swap_remove(i);
688 let to_distribute =
689 MirScalarExpr::call_variadic(And, (*and_args).clone());
690 if let MirScalarExpr::CallVariadic {
691 func: VariadicFunc::Or(Or),
692 exprs: ref mut or_args,
693 } = or
694 {
695 or_args.iter_mut().for_each(|a| {
696 *a = a.clone().and(to_distribute.clone());
697 });
698 } else {
699 unreachable!(); // because the `find` found a match already
700 }
701 *e = or; // The modified OR will be the new top-level expr.
702 }
703 }
704 })?;
705 p.visit_mut_post(&mut |e: &mut MirScalarExpr| {
706 e.flatten_associative();
707 })?;
708 }
709 Ok(())
710 })
711 }
712
713 /// For each of the arguments of the top-level OR (if no top-level OR, then interpret the whole
714 /// expression as a 1-arg OR, see [LiteralConstraints::get_or_args]), check if it's an AND, and
715 /// if not, then wrap it in a 1-arg AND.
716 fn unary_and(mfp: &mut MapFilterProject) {
717 let mut or_args = Self::get_or_args(mfp);
718 let mut changed = false;
719 or_args.iter_mut().for_each(|or_arg| {
720 if !matches!(
721 or_arg,
722 MirScalarExpr::CallVariadic {
723 func: VariadicFunc::And(And),
724 ..
725 }
726 ) {
727 *or_arg = MirScalarExpr::call_variadic(And, vec![or_arg.clone()]);
728 changed = true;
729 }
730 });
731 if changed {
732 let new_predicates = vec![MirScalarExpr::call_variadic(Or, or_args)];
733 let (map, _predicates, project) = mfp.as_map_filter_project();
734 *mfp = MapFilterProject::new(mfp.input_arity)
735 .map(map)
736 .filter(new_predicates)
737 .project(project);
738 }
739 }
740
741 fn predicates_size(mfp: &MapFilterProject) -> usize {
742 let mut sum = 0;
743 for (_, p) in mfp.predicates.iter() {
744 sum = sum + p.size();
745 }
746 sum
747 }
748}
749
750/// Whether an index is usable to speed up a Filter with literal constraints.
751#[derive(Clone)]
752enum IndexMatch {
753 /// The index is usable, that is, each OR argument constrains each key field.
754 ///
755 /// The `Vec<Row>` has the constraining literal values, where each Row corresponds to one OR
756 /// argument, and each value in the Row corresponds to one key field.
757 ///
758 /// The `bool` indicates whether we needed to inverse cast equalities to match them up with key
759 /// fields. The inverse cast enables index usage when an implicit cast is wrapping a key field.
760 /// E.g., if `a` is smallint, and the user writes `a = 5`, then HIR inserts an implicit cast:
761 /// `smallint_to_integer(a) = 5`, which we invert to `a = 5`, where the `5` is a smallint
762 /// literal. For more details on the inversion, see `invert_casts_on_expr_eq_literal_inner`.
763 Usable(Vec<Row>, bool),
764 /// The index is unusable. However, there is a subset of key fields such that if the index would
765 /// be only on this subset, then it would be usable.
766 /// Note: this Vec is never empty. (If it were empty, then we'd get `UnusableNoSubset` instead.)
767 UnusableTooWide(Vec<MirScalarExpr>),
768 /// The index is unusable. Moreover, none of its key fields could be used as an alternate index
769 /// to speed up this filter.
770 UnusableNoSubset,
771}