1use std::collections::{BTreeMap, BTreeSet};
18
19use chrono::{DateTime, Utc};
20use maplit::{btreemap, btreeset};
21use tracing::warn;
22
23use mz_catalog::memory::objects::{CatalogItem, DataSourceDesc, Index, TableDataSource, View};
24use mz_compute_client::controller::error::InstanceMissing;
25use mz_compute_types::ComputeInstanceId;
26use mz_compute_types::dataflows::{DataflowDesc, DataflowDescription, IndexDesc};
27use mz_controller::Controller;
28use mz_expr::visit::Visit;
29use mz_expr::{
30 CollectionPlan, Id, MapFilterProject, MirRelationExpr, MirScalarExpr, OptimizedMirRelationExpr,
31 RECURSION_LIMIT, UnmaterializableFunc,
32};
33use mz_ore::cast::ReinterpretCast;
34use mz_ore::stack::{CheckedRecursion, RecursionGuard, RecursionLimitError, maybe_grow};
35use mz_repr::adt::array::ArrayDimension;
36use mz_repr::explain::trace_plan;
37use mz_repr::optimize::OptimizerFeatures;
38use mz_repr::role_id::RoleId;
39use mz_repr::{Datum, GlobalId, Row};
40use mz_sql::catalog::CatalogRole;
41use mz_sql::rbac;
42use mz_sql::session::metadata::SessionMetadata;
43use mz_transform::analysis::DerivedBuilder;
44use mz_transform::analysis::monotonic::Monotonic;
45
46use crate::catalog::CatalogState;
47use crate::coord::id_bundle::CollectionIdBundle;
48use crate::optimize::{Optimize, OptimizerCatalog, OptimizerConfig, OptimizerError, view};
49use crate::session::{SERVER_MAJOR_VERSION, SERVER_MINOR_VERSION};
50use crate::util::viewable_variables;
51
52#[derive(Debug, Clone)]
55pub struct ComputeInstanceSnapshot {
56 instance_id: ComputeInstanceId,
57 collections: Option<BTreeSet<GlobalId>>,
62}
63
64impl ComputeInstanceSnapshot {
65 pub fn new(controller: &Controller, id: ComputeInstanceId) -> Result<Self, InstanceMissing> {
66 controller
67 .compute
68 .collection_ids(id)
69 .map(|collection_ids| Self {
70 instance_id: id,
71 collections: Some(collection_ids.collect()),
72 })
73 }
74
75 pub fn new_from_parts(instance_id: ComputeInstanceId, collections: BTreeSet<GlobalId>) -> Self {
76 Self {
77 instance_id,
78 collections: Some(collections),
79 }
80 }
81
82 pub fn new_without_collections(instance_id: ComputeInstanceId) -> Self {
83 Self {
84 instance_id,
85 collections: None,
86 }
87 }
88
89 pub fn instance_id(&self) -> ComputeInstanceId {
91 self.instance_id
92 }
93
94 pub fn contains_collection(&self, id: &GlobalId) -> bool {
97 self.collections
98 .as_ref()
99 .map_or(true, |collections| collections.contains(id))
100 }
101
102 pub fn insert_collection(&mut self, id: GlobalId) {
104 self.collections
105 .as_mut()
106 .expect("insert_collection called on snapshot with None collections")
107 .insert(id);
108 }
109}
110
111#[derive(Debug)]
113pub struct DataflowBuilder<'a> {
114 pub catalog: &'a dyn OptimizerCatalog,
115 pub compute: ComputeInstanceSnapshot,
120 pub replan: Option<GlobalId>,
129 recursion_guard: RecursionGuard,
131}
132
133pub trait ExprPrep {
135 fn prep_relation_expr(&self, expr: &mut OptimizedMirRelationExpr)
137 -> Result<(), OptimizerError>;
138
139 fn prep_scalar_expr(&self, expr: &mut MirScalarExpr) -> Result<(), OptimizerError>;
141}
142
143pub struct ExprPrepNoop;
145impl ExprPrep for ExprPrepNoop {
146 fn prep_relation_expr(&self, _: &mut OptimizedMirRelationExpr) -> Result<(), OptimizerError> {
147 Ok(())
148 }
149 fn prep_scalar_expr(&self, _expr: &mut MirScalarExpr) -> Result<(), OptimizerError> {
150 Ok(())
151 }
152}
153
154pub struct ExprPrepMaintained;
157
158impl ExprPrep for ExprPrepMaintained {
159 fn prep_relation_expr(
160 &self,
161 expr: &mut OptimizedMirRelationExpr,
162 ) -> Result<(), OptimizerError> {
163 expr.0.try_visit_mut_post(&mut |e| {
164 if let MirRelationExpr::Filter { input, predicates } = &*e {
166 let mfp = MapFilterProject::new(input.arity()).filter(predicates.iter().cloned());
167 match mfp.into_plan() {
168 Err(e) => Err(OptimizerError::UnsupportedTemporalExpression(e)),
169 Ok(mut mfp) => {
170 for s in mfp.iter_nontemporal_exprs() {
171 self.prep_scalar_expr(s)?;
172 }
173 Ok(())
174 }
175 }
176 } else {
177 e.try_visit_scalars_mut1(&mut |s| self.prep_scalar_expr(s))
178 }
179 })
180 }
181
182 fn prep_scalar_expr(&self, expr: &mut MirScalarExpr) -> Result<(), OptimizerError> {
183 let mut last_observed_unmaterializable_func = None;
185 expr.visit_mut_post(&mut |e| {
186 if let MirScalarExpr::CallUnmaterializable(f) = e {
187 last_observed_unmaterializable_func = Some(f.clone());
188 }
189 })?;
190
191 if let Some(f) = last_observed_unmaterializable_func {
192 Err(OptimizerError::UnmaterializableFunction(f))
193 } else {
194 Ok(())
195 }
196 }
197}
198
199pub struct ExprPrepOneShot<'a> {
202 pub logical_time: EvalTime,
203 pub session: &'a dyn SessionMetadata,
204 pub catalog_state: &'a CatalogState,
205}
206
207impl ExprPrep for ExprPrepOneShot<'_> {
208 fn prep_relation_expr(
209 &self,
210 expr: &mut OptimizedMirRelationExpr,
211 ) -> Result<(), OptimizerError> {
212 expr.0
213 .try_visit_scalars_mut(&mut |s| self.prep_scalar_expr(s))
214 }
215
216 fn prep_scalar_expr(&self, expr: &mut MirScalarExpr) -> Result<(), OptimizerError> {
217 expr.try_visit_mut_post(&mut |e| {
220 if let MirScalarExpr::CallUnmaterializable(f) = e {
221 *e = eval_unmaterializable_func(
222 self.catalog_state,
223 f,
224 self.logical_time,
225 self.session,
226 )?;
227 }
228 Ok(())
229 })
230 }
231}
232
233pub struct ExprPrepWebhookValidation {
236 pub now: DateTime<Utc>,
238}
239
240impl ExprPrep for ExprPrepWebhookValidation {
241 fn prep_relation_expr(
242 &self,
243 expr: &mut OptimizedMirRelationExpr,
244 ) -> Result<(), OptimizerError> {
245 expr.0
246 .try_visit_scalars_mut(&mut |s| self.prep_scalar_expr(s))
247 }
248
249 fn prep_scalar_expr(&self, expr: &mut MirScalarExpr) -> Result<(), OptimizerError> {
250 let now = self.now;
251 expr.try_visit_mut_post(&mut |e| {
252 if let MirScalarExpr::CallUnmaterializable(f @ UnmaterializableFunc::CurrentTimestamp) =
253 e
254 {
255 let now: Datum = now.try_into()?;
256 let const_expr = MirScalarExpr::literal_ok(now, f.output_type().scalar_type);
257 *e = const_expr;
258 }
259 Ok(())
260 })
261 }
262}
263
264#[derive(Clone, Copy, Debug)]
265pub enum EvalTime {
266 Time(mz_repr::Timestamp),
267 Deferred,
269 NotAvailable,
271}
272
273pub fn dataflow_import_id_bundle<P>(
275 dataflow: &DataflowDescription<P>,
276 compute_instance: ComputeInstanceId,
277) -> CollectionIdBundle {
278 let storage_ids = dataflow.source_imports.keys().copied().collect();
279 let compute_ids = dataflow.index_imports.keys().copied().collect();
280 CollectionIdBundle {
281 storage_ids,
282 compute_ids: btreemap! {compute_instance => compute_ids},
283 }
284}
285
286impl<'a> DataflowBuilder<'a> {
287 pub fn new(catalog: &'a dyn OptimizerCatalog, compute: ComputeInstanceSnapshot) -> Self {
288 Self {
289 catalog,
290 compute,
291 replan: None,
292 recursion_guard: RecursionGuard::with_limit(RECURSION_LIMIT),
293 }
294 }
295
296 pub(super) fn with_config(mut self, config: &OptimizerConfig) -> Self {
301 self.replan = config.replan;
302 self
303 }
304
305 pub fn import_into_dataflow(
311 &mut self,
312 id: &GlobalId,
313 dataflow: &mut DataflowDesc,
314 features: &OptimizerFeatures,
315 ) -> Result<(), OptimizerError> {
316 maybe_grow(|| {
317 if dataflow.is_imported(id) {
319 return Ok(());
320 }
321
322 let monotonic = self.monotonic_object(*id, features);
323
324 let mut valid_indexes = self.indexes_on(*id).peekable();
329 if valid_indexes.peek().is_some() {
330 for (index_id, idx) in valid_indexes {
331 let index_desc = IndexDesc {
332 on_id: *id,
333 key: idx.keys.to_vec(),
334 };
335 let entry = self.catalog.get_entry(id);
336 let desc = entry
337 .relation_desc()
338 .expect("indexes can only be built on items with descs");
339 dataflow.import_index(index_id, index_desc, desc.typ().clone(), monotonic);
340 }
341 } else {
342 drop(valid_indexes);
343 let entry = self.catalog.get_entry(id);
344 match entry.item() {
346 CatalogItem::Table(table) => {
347 dataflow.import_source(*id, table.desc_for(id).into_typ(), monotonic);
348 }
349 CatalogItem::Source(source) => {
350 dataflow.import_source(*id, source.desc.typ().clone(), monotonic);
351 }
352 CatalogItem::View(view) => {
353 let expr = view.optimized_expr.as_ref();
354 self.import_view_into_dataflow(id, expr, dataflow, features)?;
355 }
356 CatalogItem::MaterializedView(mview) if mview.replacement_target.is_some() => {
357 let expr = mview.optimized_expr.as_ref();
359 self.import_view_into_dataflow(id, expr, dataflow, features)?;
360 }
361 CatalogItem::MaterializedView(mview) => {
362 dataflow.import_source(*id, mview.desc_for(id).into_typ(), monotonic);
363 }
364 CatalogItem::Log(log) => {
365 dataflow.import_source(*id, log.variant.desc().typ().clone(), monotonic);
366 }
367 CatalogItem::ContinualTask(ct) => {
368 dataflow.import_source(*id, ct.desc.typ().clone(), monotonic);
369 }
370 CatalogItem::Sink(_)
371 | CatalogItem::Index(_)
372 | CatalogItem::Type(_)
373 | CatalogItem::Func(_)
374 | CatalogItem::Secret(_)
375 | CatalogItem::Connection(_) => {
376 unreachable!()
378 }
379 }
380 }
381 Ok(())
382 })
383 }
384
385 pub fn import_view_into_dataflow(
395 &mut self,
396 view_id: &GlobalId,
397 view: &OptimizedMirRelationExpr,
398 dataflow: &mut DataflowDesc,
399 features: &OptimizerFeatures,
400 ) -> Result<(), OptimizerError> {
401 for get_id in view.depends_on() {
402 self.import_into_dataflow(&get_id, dataflow, features)?;
403 }
404 dataflow.insert_plan(*view_id, view.clone());
405 Ok(())
406 }
407
408 pub fn maybe_reoptimize_imported_views(
411 &self,
412 df_desc: &mut DataflowDesc,
413 config: &OptimizerConfig,
414 ) -> Result<(), OptimizerError> {
415 if !config.features.reoptimize_imported_views {
416 return Ok(()); }
418
419 let mut view_optimizer = view::Optimizer::new(config.clone(), None);
420 for desc in df_desc.objects_to_build.iter_mut().rev() {
421 if matches!(desc.id, GlobalId::Explain | GlobalId::Transient(_)) {
422 continue; }
424 if let CatalogItem::View(view) = &self.catalog.get_entry(&desc.id).item {
425 let _span = tracing::span!(
426 target: "optimizer",
427 tracing::Level::DEBUG,
428 "view",
429 path.segment = desc.id.to_string()
430 )
431 .entered();
432
433 desc.plan = view_optimizer.optimize(view.raw_expr.as_ref().clone())?;
435
436 trace_plan(desc.plan.as_inner());
438 }
439 }
440
441 Ok(())
442 }
443
444 fn monotonic_source(&self, data_source: &DataSourceDesc) -> bool {
446 match data_source {
447 DataSourceDesc::Ingestion { .. } => false,
448 DataSourceDesc::OldSyntaxIngestion {
449 desc, data_config, ..
450 } => data_config.monotonic(&desc.connection),
451 DataSourceDesc::Webhook { .. } => true,
452 DataSourceDesc::IngestionExport {
453 ingestion_id,
454 data_config,
455 ..
456 } => {
457 let source_desc = self
458 .catalog
459 .get_entry_by_item_id(ingestion_id)
460 .source_desc()
461 .expect("ingestion export must reference a source")
462 .expect("ingestion export must reference a source");
463 data_config.monotonic(&source_desc.connection)
464 }
465 DataSourceDesc::Introspection(_) | DataSourceDesc::Progress => false,
466 }
467 }
468
469 fn monotonic_object(&self, id: GlobalId, features: &OptimizerFeatures) -> bool {
479 self.monotonic_object_inner(id, &mut BTreeMap::new(), features)
480 .unwrap_or_else(|e| {
481 warn!(%id, "error inspecting object for monotonicity: {e}");
482 false
483 })
484 }
485
486 fn monotonic_object_inner(
487 &self,
488 id: GlobalId,
489 memo: &mut BTreeMap<GlobalId, bool>,
490 features: &OptimizerFeatures,
491 ) -> Result<bool, RecursionLimitError> {
492 if let Some(monotonic) = memo.get(&id) {
495 return Ok(*monotonic);
496 }
497
498 let monotonic = self.checked_recur(|_| {
499 match self.catalog.get_entry(&id).item() {
500 CatalogItem::Source(source) => Ok(self.monotonic_source(&source.data_source)),
501 CatalogItem::Table(table) => match &table.data_source {
502 TableDataSource::TableWrites { .. } => Ok(false),
503 TableDataSource::DataSource { desc, timeline: _ } => {
504 Ok(self.monotonic_source(desc))
505 }
506 },
507 CatalogItem::View(View { optimized_expr, .. }) => {
508 let view_expr = optimized_expr.as_ref().clone().into_inner();
509
510 let mut monotonic_ids = BTreeSet::new();
513 let recursion_result: Result<(), RecursionLimitError> = view_expr
514 .try_visit_post(&mut |e| {
515 if let MirRelationExpr::Get {
516 id: Id::Global(got_id),
517 ..
518 } = e
519 {
520 if self.monotonic_object_inner(*got_id, memo, features)? {
521 monotonic_ids.insert(*got_id);
522 }
523 }
524 Ok(())
525 });
526 if let Err(error) = recursion_result {
527 warn!(%id, "error inspecting view for monotonicity: {error}");
530 }
531
532 let mut builder = DerivedBuilder::new(features);
533 builder.require(Monotonic::new(monotonic_ids.clone()));
534 let derived = builder.visit(&view_expr);
535
536 Ok(*derived
537 .as_view()
538 .value::<Monotonic>()
539 .expect("Expected monotonic result from non empty tree"))
540 }
541 CatalogItem::Index(Index { on, .. }) => {
542 self.monotonic_object_inner(*on, memo, features)
543 }
544 CatalogItem::Secret(_)
545 | CatalogItem::Type(_)
546 | CatalogItem::Connection(_)
547 | CatalogItem::Log(_)
548 | CatalogItem::MaterializedView(_)
549 | CatalogItem::Sink(_)
550 | CatalogItem::Func(_)
551 | CatalogItem::ContinualTask(_) => Ok(false),
552 }
553 })?;
554
555 memo.insert(id, monotonic);
556
557 Ok(monotonic)
558 }
559}
560
561impl<'a> CheckedRecursion for DataflowBuilder<'a> {
562 fn recursion_guard(&self) -> &RecursionGuard {
563 &self.recursion_guard
564 }
565}
566
567fn eval_unmaterializable_func(
568 state: &CatalogState,
569 f: &UnmaterializableFunc,
570 logical_time: EvalTime,
571 session: &dyn SessionMetadata,
572) -> Result<MirScalarExpr, OptimizerError> {
573 let pack_1d_array = |datums: Vec<Datum>| {
574 let mut row = Row::default();
575 row.packer()
576 .try_push_array(
577 &[ArrayDimension {
578 lower_bound: 1,
579 length: datums.len(),
580 }],
581 datums,
582 )
583 .expect("known to be a valid array");
584 Ok(MirScalarExpr::Literal(Ok(row), f.output_type()))
585 };
586 let pack_dict = |mut datums: Vec<(String, String)>| {
587 datums.sort();
588 let mut row = Row::default();
589 row.packer().push_dict(
590 datums
591 .iter()
592 .map(|(key, value)| (key.as_str(), Datum::from(value.as_str()))),
593 );
594 Ok(MirScalarExpr::Literal(Ok(row), f.output_type()))
595 };
596 let pack = |datum| {
597 Ok(MirScalarExpr::literal_ok(
598 datum,
599 f.output_type().scalar_type,
600 ))
601 };
602
603 match f {
604 UnmaterializableFunc::CurrentDatabase => pack(Datum::from(session.database())),
605 UnmaterializableFunc::CurrentSchema => {
606 let search_path = state.resolve_search_path(session);
607 let schema = search_path
608 .first()
609 .map(|(db, schema)| &*state.get_schema(db, schema, session.conn_id()).name.schema);
610 pack(Datum::from(schema))
611 }
612 UnmaterializableFunc::CurrentSchemasWithSystem => {
613 let search_path = state.resolve_search_path(session);
614 let search_path = state.effective_search_path(&search_path, false);
615 pack_1d_array(
616 search_path
617 .into_iter()
618 .map(|(db, schema)| {
619 let schema = state.get_schema(&db, &schema, session.conn_id());
620 Datum::String(&schema.name.schema)
621 })
622 .collect(),
623 )
624 }
625 UnmaterializableFunc::CurrentSchemasWithoutSystem => {
626 let search_path = state.resolve_search_path(session);
627 pack_1d_array(
628 search_path
629 .into_iter()
630 .map(|(db, schema)| {
631 let schema = state.get_schema(&db, &schema, session.conn_id());
632 Datum::String(&schema.name.schema)
633 })
634 .collect(),
635 )
636 }
637 UnmaterializableFunc::ViewableVariables => pack_dict(
638 viewable_variables(state, session)
639 .map(|var| (var.name().to_lowercase(), var.value()))
640 .collect(),
641 ),
642 UnmaterializableFunc::CurrentTimestamp => {
643 let t: Datum = session.pcx().wall_time.try_into()?;
644 pack(t)
645 }
646 UnmaterializableFunc::CurrentUser => pack(Datum::from(
647 state.get_role(session.current_role_id()).name(),
648 )),
649 UnmaterializableFunc::SessionUser => pack(Datum::from(
650 state.get_role(session.session_role_id()).name(),
651 )),
652 UnmaterializableFunc::IsRbacEnabled => pack(Datum::from(
653 rbac::is_rbac_enabled_for_session(state.system_config(), session),
654 )),
655 UnmaterializableFunc::MzEnvironmentId => {
656 pack(Datum::from(&*state.config().environment_id.to_string()))
657 }
658 UnmaterializableFunc::MzIsSuperuser => pack(Datum::from(session.is_superuser())),
659 UnmaterializableFunc::MzNow => match logical_time {
660 EvalTime::Time(logical_time) => pack(Datum::MzTimestamp(logical_time)),
661 EvalTime::Deferred => Ok(MirScalarExpr::CallUnmaterializable(f.clone())),
662 EvalTime::NotAvailable => Err(OptimizerError::UncallableFunction {
663 func: UnmaterializableFunc::MzNow,
664 context: "this",
665 }),
666 },
667 UnmaterializableFunc::MzRoleOidMemberships => {
668 let role_memberships = role_oid_memberships(state);
669 let mut role_memberships: Vec<(_, Vec<_>)> = role_memberships
670 .into_iter()
671 .map(|(role_id, role_membership)| {
672 (
673 role_id.to_string(),
674 role_membership
675 .into_iter()
676 .map(|role_id| role_id.to_string())
677 .collect(),
678 )
679 })
680 .collect();
681 role_memberships.sort();
682 let mut row = Row::default();
683 row.packer().push_dict_with(|row| {
684 for (role_id, role_membership) in &role_memberships {
685 row.push(Datum::from(role_id.as_str()));
686 row.try_push_array(
687 &[ArrayDimension {
688 lower_bound: 1,
689 length: role_membership.len(),
690 }],
691 role_membership.iter().map(|role_id| Datum::from(role_id.as_str())),
692 ).expect("role_membership is 1 dimensional, and its length is used for the array length");
693 }
694 });
695 Ok(MirScalarExpr::Literal(Ok(row), f.output_type()))
696 }
697 UnmaterializableFunc::MzSessionId => pack(Datum::from(state.config().session_id)),
698 UnmaterializableFunc::MzUptime => {
699 let uptime = state.config().start_instant.elapsed();
700 let uptime = chrono::Duration::from_std(uptime).map_or(Datum::Null, Datum::from);
701 pack(uptime)
702 }
703 UnmaterializableFunc::MzVersion => pack(Datum::from(
704 &*state
705 .config()
706 .build_info
707 .human_version(state.config().helm_chart_version.clone()),
708 )),
709 UnmaterializableFunc::MzVersionNum => {
710 pack(Datum::Int32(state.config().build_info.version_num()))
711 }
712 UnmaterializableFunc::PgBackendPid => pack(Datum::Int32(i32::reinterpret_cast(
713 session.conn_id().unhandled(),
714 ))),
715 UnmaterializableFunc::PgPostmasterStartTime => {
716 let t: Datum = state.config().start_time.try_into()?;
717 pack(t)
718 }
719 UnmaterializableFunc::Version => {
720 let build_info = state.config().build_info;
721 let version = format!(
722 "PostgreSQL {}.{} on {} (Materialize {})",
723 SERVER_MAJOR_VERSION,
724 SERVER_MINOR_VERSION,
725 mz_build_info::TARGET_TRIPLE,
726 build_info.version,
727 );
728 pack(Datum::from(&*version))
729 }
730 }
731}
732
733fn role_oid_memberships<'a>(catalog: &'a CatalogState) -> BTreeMap<u32, BTreeSet<u32>> {
734 let mut role_memberships = BTreeMap::new();
735 for role_id in catalog.get_roles() {
736 let role = catalog.get_role(role_id);
737 if !role_memberships.contains_key(&role.oid) {
738 role_oid_memberships_inner(catalog, role_id, &mut role_memberships);
739 }
740 }
741 role_memberships
742}
743
744fn role_oid_memberships_inner<'a>(
745 catalog: &'a CatalogState,
746 role_id: &RoleId,
747 role_memberships: &mut BTreeMap<u32, BTreeSet<u32>>,
748) {
749 let role = catalog.get_role(role_id);
750 role_memberships.insert(role.oid, btreeset! {role.oid});
751 for parent_role_id in role.membership.map.keys() {
752 let parent_role = catalog.get_role(parent_role_id);
753 if !role_memberships.contains_key(&parent_role.oid) {
754 role_oid_memberships_inner(catalog, parent_role_id, role_memberships);
755 }
756 let parent_membership: BTreeSet<_> = role_memberships
757 .get(&parent_role.oid)
758 .expect("inserted in recursive call above")
759 .into_iter()
760 .cloned()
761 .collect();
762 role_memberships
763 .get_mut(&role.oid)
764 .expect("inserted above")
765 .extend(parent_membership);
766 }
767}