mz_adapter/optimize/
dataflows.rs

1// Copyright Materialize, Inc. and contributors. All rights reserved.
2//
3// Use of this software is governed by the Business Source License
4// included in the LICENSE file.
5//
6// As of the Change Date specified in that file, in accordance with
7// the Business Source License, use of this software will be governed
8// by the Apache License, Version 2.0.
9
10//! Types and methods for building and shipping dataflow descriptions.
11//!
12//! Dataflows are buildable from the coordinator's `catalog` and `indexes`
13//! members, which respectively describe the collection backing identifiers
14//! and indicate which identifiers have arrangements available. This module
15//! isolates that logic from the rest of the somewhat complicated coordinator.
16
17use std::collections::{BTreeMap, BTreeSet};
18
19use chrono::{DateTime, Utc};
20use maplit::{btreemap, btreeset};
21use tracing::warn;
22
23use mz_catalog::memory::objects::{CatalogItem, DataSourceDesc, Index, TableDataSource, View};
24use mz_compute_client::controller::error::InstanceMissing;
25use mz_compute_types::ComputeInstanceId;
26use mz_compute_types::dataflows::{DataflowDesc, DataflowDescription, IndexDesc};
27use mz_controller::Controller;
28use mz_expr::visit::Visit;
29use mz_expr::{
30    CollectionPlan, Id, MapFilterProject, MirRelationExpr, MirScalarExpr, OptimizedMirRelationExpr,
31    RECURSION_LIMIT, UnmaterializableFunc,
32};
33use mz_ore::cast::ReinterpretCast;
34use mz_ore::stack::{CheckedRecursion, RecursionGuard, RecursionLimitError, maybe_grow};
35use mz_repr::adt::array::ArrayDimension;
36use mz_repr::explain::trace_plan;
37use mz_repr::optimize::OptimizerFeatures;
38use mz_repr::role_id::RoleId;
39use mz_repr::{Datum, GlobalId, Row};
40use mz_sql::catalog::CatalogRole;
41use mz_sql::rbac;
42use mz_sql::session::metadata::SessionMetadata;
43use mz_transform::analysis::DerivedBuilder;
44use mz_transform::analysis::monotonic::Monotonic;
45
46use crate::catalog::CatalogState;
47use crate::coord::id_bundle::CollectionIdBundle;
48use crate::optimize::{Optimize, OptimizerCatalog, OptimizerConfig, OptimizerError, view};
49use crate::session::{SERVER_MAJOR_VERSION, SERVER_MINOR_VERSION};
50use crate::util::viewable_variables;
51
52/// A reference-less snapshot of a compute instance. There is no guarantee `instance_id` continues
53/// to exist after this has been made.
54#[derive(Debug, Clone)]
55pub struct ComputeInstanceSnapshot {
56    instance_id: ComputeInstanceId,
57    collections: BTreeSet<GlobalId>,
58}
59
60impl ComputeInstanceSnapshot {
61    pub fn new(controller: &Controller, id: ComputeInstanceId) -> Result<Self, InstanceMissing> {
62        controller
63            .compute
64            .collection_ids(id)
65            .map(|collection_ids| Self {
66                instance_id: id,
67                collections: collection_ids.collect(),
68            })
69    }
70
71    /// Return the ID of this compute instance.
72    pub fn instance_id(&self) -> ComputeInstanceId {
73        self.instance_id
74    }
75
76    /// Reports whether the instance contains the indicated collection.
77    pub fn contains_collection(&self, id: &GlobalId) -> bool {
78        self.collections.contains(id)
79    }
80
81    /// Inserts the given collection into the snapshot.
82    pub fn insert_collection(&mut self, id: GlobalId) {
83        self.collections.insert(id);
84    }
85}
86
87/// Borrows of catalog and indexes sufficient to build dataflow descriptions.
88#[derive(Debug)]
89pub struct DataflowBuilder<'a> {
90    pub catalog: &'a dyn OptimizerCatalog,
91    /// A handle to the compute abstraction, which describes indexes by identifier.
92    ///
93    /// This can also be used to grab a handle to the storage abstraction, through
94    /// its `storage_mut()` method.
95    pub compute: ComputeInstanceSnapshot,
96    /// If set, indicates that the `DataflowBuilder` operates in "replan" mode
97    /// and should consider only catalog items that are strictly less than the
98    /// given [`GlobalId`].
99    ///
100    /// In particular, indexes with higher [`GlobalId`] that are present in the
101    /// catalog will be ignored.
102    ///
103    /// Bound from [`OptimizerConfig::replan`].
104    pub replan: Option<GlobalId>,
105    /// A guard for recursive operations in this [`DataflowBuilder`] instance.
106    recursion_guard: RecursionGuard,
107}
108
109/// The styles in which an expression can be prepared for use in a dataflow.
110#[derive(Clone, Copy, Debug)]
111pub enum ExprPrepStyle<'a> {
112    /// The expression is being prepared for installation as a maintained dataflow, e.g.,
113    /// index, materialized view, or subscribe.
114    Maintained,
115    /// The expression is being prepared to run once at the specified logical
116    /// time in the specified session.
117    OneShot {
118        logical_time: EvalTime,
119        session: &'a dyn SessionMetadata,
120        catalog_state: &'a CatalogState,
121    },
122    /// The expression is being prepared for evaluation in a CHECK expression of a webhook source.
123    WebhookValidation {
124        /// Time at which this expression is being evaluated.
125        now: DateTime<Utc>,
126    },
127}
128
129#[derive(Clone, Copy, Debug)]
130pub enum EvalTime {
131    Time(mz_repr::Timestamp),
132    /// Skips mz_now() calls.
133    Deferred,
134    /// Errors on mz_now() calls.
135    NotAvailable,
136}
137
138/// Returns an ID bundle with the given dataflows imports.
139pub fn dataflow_import_id_bundle<P>(
140    dataflow: &DataflowDescription<P>,
141    compute_instance: ComputeInstanceId,
142) -> CollectionIdBundle {
143    let storage_ids = dataflow.source_imports.keys().copied().collect();
144    let compute_ids = dataflow.index_imports.keys().copied().collect();
145    CollectionIdBundle {
146        storage_ids,
147        compute_ids: btreemap! {compute_instance => compute_ids},
148    }
149}
150
151impl<'a> DataflowBuilder<'a> {
152    pub fn new(catalog: &'a dyn OptimizerCatalog, compute: ComputeInstanceSnapshot) -> Self {
153        Self {
154            catalog,
155            compute,
156            replan: None,
157            recursion_guard: RecursionGuard::with_limit(RECURSION_LIMIT),
158        }
159    }
160
161    // TODO(aalexandrov): strictly speaking it should be better if we can make
162    // `config: &OptimizerConfig` a field in the enclosing builder. However,
163    // before we can do that we should make sure that nobody outside of the
164    // optimizer is using a DataflowBuilder instance.
165    pub(super) fn with_config(mut self, config: &OptimizerConfig) -> Self {
166        self.replan = config.replan;
167        self
168    }
169
170    /// Imports the view, source, or table with `id` into the provided
171    /// dataflow description. [`OptimizerFeatures`] is used while running
172    /// the [`Monotonic`] analysis.
173    pub fn import_into_dataflow(
174        &mut self,
175        id: &GlobalId,
176        dataflow: &mut DataflowDesc,
177        features: &OptimizerFeatures,
178    ) -> Result<(), OptimizerError> {
179        maybe_grow(|| {
180            // Avoid importing the item redundantly.
181            if dataflow.is_imported(id) {
182                return Ok(());
183            }
184
185            let monotonic = self.monotonic_object(*id, features);
186
187            // A valid index is any index on `id` that is known to index oracle.
188            // Here, we import all indexes that belong to all imported collections. Later,
189            // `prune_and_annotate_dataflow_index_imports` runs at the end of the MIR
190            // pipeline, and removes unneeded index imports based on the optimized plan.
191            let mut valid_indexes = self.indexes_on(*id).peekable();
192            if valid_indexes.peek().is_some() {
193                for (index_id, idx) in valid_indexes {
194                    let index_desc = IndexDesc {
195                        on_id: *id,
196                        key: idx.keys.to_vec(),
197                    };
198                    let entry = self.catalog.get_entry(id);
199                    let desc = entry
200                        .desc(
201                            &self
202                                .catalog
203                                .resolve_full_name(entry.name(), entry.conn_id()),
204                        )
205                        .expect("indexes can only be built on items with descs");
206                    dataflow.import_index(index_id, index_desc, desc.typ().clone(), monotonic);
207                }
208            } else {
209                drop(valid_indexes);
210                let entry = self.catalog.get_entry(id);
211                match entry.item() {
212                    CatalogItem::Table(table) => {
213                        dataflow.import_source(*id, table.desc_for(id).typ().clone(), monotonic);
214                    }
215                    CatalogItem::Source(source) => {
216                        dataflow.import_source(*id, source.desc.typ().clone(), monotonic);
217                    }
218                    CatalogItem::View(view) => {
219                        let expr = view.optimized_expr.as_ref();
220                        self.import_view_into_dataflow(id, expr, dataflow, features)?;
221                    }
222                    CatalogItem::MaterializedView(mview) => {
223                        dataflow.import_source(*id, mview.desc.typ().clone(), monotonic);
224                    }
225                    CatalogItem::Log(log) => {
226                        dataflow.import_source(*id, log.variant.desc().typ().clone(), monotonic);
227                    }
228                    CatalogItem::ContinualTask(ct) => {
229                        dataflow.import_source(*id, ct.desc.typ().clone(), monotonic);
230                    }
231                    _ => unreachable!(),
232                }
233            }
234            Ok(())
235        })
236    }
237
238    /// Imports the view with the specified ID and expression into the provided
239    /// dataflow description. [`OptimizerFeatures`] is used while running
240    /// expression [`mz_transform::analysis::Analysis`].
241    ///
242    /// You should generally prefer calling
243    /// [`DataflowBuilder::import_into_dataflow`], which can handle objects of
244    /// any type as long as they exist in the catalog. This method exists for
245    /// when the view does not exist in the catalog, e.g., because it is
246    /// identified by a [`GlobalId::Transient`].
247    pub fn import_view_into_dataflow(
248        &mut self,
249        view_id: &GlobalId,
250        view: &OptimizedMirRelationExpr,
251        dataflow: &mut DataflowDesc,
252        features: &OptimizerFeatures,
253    ) -> Result<(), OptimizerError> {
254        for get_id in view.depends_on() {
255            self.import_into_dataflow(&get_id, dataflow, features)?;
256        }
257        dataflow.insert_plan(*view_id, view.clone());
258        Ok(())
259    }
260
261    // Re-optimize the imported view plans using the current optimizer
262    // configuration if reoptimization is requested.
263    pub fn maybe_reoptimize_imported_views(
264        &self,
265        df_desc: &mut DataflowDesc,
266        config: &OptimizerConfig,
267    ) -> Result<(), OptimizerError> {
268        if !config.features.reoptimize_imported_views {
269            return Ok(()); // Do nothing if not explicitly requested.
270        }
271
272        let mut view_optimizer = view::Optimizer::new(config.clone(), None);
273        for desc in df_desc.objects_to_build.iter_mut().rev() {
274            if matches!(desc.id, GlobalId::Explain | GlobalId::Transient(_)) {
275                continue; // Skip descriptions that do not reference proper views.
276            }
277            if let CatalogItem::View(view) = &self.catalog.get_entry(&desc.id).item {
278                let _span = tracing::span!(
279                    target: "optimizer",
280                    tracing::Level::DEBUG,
281                    "view",
282                    path.segment = desc.id.to_string()
283                )
284                .entered();
285
286                // Reoptimize the view and update the resulting `desc.plan`.
287                desc.plan = view_optimizer.optimize(view.raw_expr.as_ref().clone())?;
288
289                // Report the optimized plan under this span.
290                trace_plan(desc.plan.as_inner());
291            }
292        }
293
294        Ok(())
295    }
296
297    /// Determine the given source's monotonicity.
298    fn monotonic_source(&self, data_source: &DataSourceDesc) -> bool {
299        match data_source {
300            DataSourceDesc::Ingestion { .. } => false,
301            DataSourceDesc::OldSyntaxIngestion {
302                desc, data_config, ..
303            } => data_config.monotonic(&desc.connection),
304            DataSourceDesc::Webhook { .. } => true,
305            DataSourceDesc::IngestionExport {
306                ingestion_id,
307                data_config,
308                ..
309            } => {
310                let source_desc = self
311                    .catalog
312                    .get_entry_by_item_id(ingestion_id)
313                    .source_desc()
314                    .expect("ingestion export must reference a source")
315                    .expect("ingestion export must reference a source");
316                data_config.monotonic(&source_desc.connection)
317            }
318            DataSourceDesc::Introspection(_) | DataSourceDesc::Progress => false,
319        }
320    }
321
322    /// Determine the given objects's monotonicity.
323    ///
324    /// This recursively traverses the expressions of all views depended on by the given object.
325    /// If this becomes a performance problem, we could add the monotonicity information of views
326    /// into the catalog instead.
327    ///
328    /// Note that materialized views are never monotonic, no matter their definition, because the
329    /// self-correcting persist_sink may insert retractions to correct the contents of its output
330    /// collection.
331    fn monotonic_object(&self, id: GlobalId, features: &OptimizerFeatures) -> bool {
332        self.monotonic_object_inner(id, &mut BTreeMap::new(), features)
333            .unwrap_or_else(|e| {
334                warn!(%id, "error inspecting object for monotonicity: {e}");
335                false
336            })
337    }
338
339    fn monotonic_object_inner(
340        &self,
341        id: GlobalId,
342        memo: &mut BTreeMap<GlobalId, bool>,
343        features: &OptimizerFeatures,
344    ) -> Result<bool, RecursionLimitError> {
345        // An object might be reached multiple times. If we already computed the monotonicity of
346        // the given ID, use that. If not, then compute it and remember the result.
347        if let Some(monotonic) = memo.get(&id) {
348            return Ok(*monotonic);
349        }
350
351        let monotonic = self.checked_recur(|_| {
352            match self.catalog.get_entry(&id).item() {
353                CatalogItem::Source(source) => Ok(self.monotonic_source(&source.data_source)),
354                CatalogItem::Table(table) => match &table.data_source {
355                    TableDataSource::TableWrites { .. } => Ok(false),
356                    TableDataSource::DataSource { desc, timeline: _ } => {
357                        Ok(self.monotonic_source(desc))
358                    }
359                },
360                CatalogItem::View(View { optimized_expr, .. }) => {
361                    let view_expr = optimized_expr.as_ref().clone().into_inner();
362
363                    // Inspect global ids that occur in the Gets in view_expr, and collect the ids
364                    // of monotonic dependees.
365                    let mut monotonic_ids = BTreeSet::new();
366                    let recursion_result: Result<(), RecursionLimitError> = view_expr
367                        .try_visit_post(&mut |e| {
368                            if let MirRelationExpr::Get {
369                                id: Id::Global(got_id),
370                                ..
371                            } = e
372                            {
373                                if self.monotonic_object_inner(*got_id, memo, features)? {
374                                    monotonic_ids.insert(*got_id);
375                                }
376                            }
377                            Ok(())
378                        });
379                    if let Err(error) = recursion_result {
380                        // We still might have got some of the IDs, so just log and continue. Now
381                        // the subsequent monotonicity analysis can have false negatives.
382                        warn!(%id, "error inspecting view for monotonicity: {error}");
383                    }
384
385                    let mut builder = DerivedBuilder::new(features);
386                    builder.require(Monotonic::new(monotonic_ids.clone()));
387                    let derived = builder.visit(&view_expr);
388
389                    Ok(*derived
390                        .as_view()
391                        .value::<Monotonic>()
392                        .expect("Expected monotonic result from non empty tree"))
393                }
394                CatalogItem::Index(Index { on, .. }) => {
395                    self.monotonic_object_inner(*on, memo, features)
396                }
397                CatalogItem::Secret(_)
398                | CatalogItem::Type(_)
399                | CatalogItem::Connection(_)
400                | CatalogItem::Log(_)
401                | CatalogItem::MaterializedView(_)
402                | CatalogItem::Sink(_)
403                | CatalogItem::Func(_)
404                | CatalogItem::ContinualTask(_) => Ok(false),
405            }
406        })?;
407
408        memo.insert(id, monotonic);
409
410        Ok(monotonic)
411    }
412}
413
414impl<'a> CheckedRecursion for DataflowBuilder<'a> {
415    fn recursion_guard(&self) -> &RecursionGuard {
416        &self.recursion_guard
417    }
418}
419
420/// Prepares a relation expression for dataflow execution by preparing all
421/// contained scalar expressions (see `prep_scalar_expr`) in the specified
422/// style.
423pub fn prep_relation_expr(
424    expr: &mut OptimizedMirRelationExpr,
425    style: ExprPrepStyle,
426) -> Result<(), OptimizerError> {
427    match style {
428        ExprPrepStyle::Maintained => {
429            expr.0.try_visit_mut_post(&mut |e| {
430                // Carefully test filter expressions, which may represent temporal filters.
431                if let MirRelationExpr::Filter { input, predicates } = &*e {
432                    let mfp =
433                        MapFilterProject::new(input.arity()).filter(predicates.iter().cloned());
434                    match mfp.into_plan() {
435                        Err(e) => Err(OptimizerError::UnsupportedTemporalExpression(e)),
436                        Ok(mut mfp) => {
437                            for s in mfp.iter_nontemporal_exprs() {
438                                prep_scalar_expr(s, style)?;
439                            }
440                            Ok(())
441                        }
442                    }
443                } else {
444                    e.try_visit_scalars_mut1(&mut |s| prep_scalar_expr(s, style))
445                }
446            })
447        }
448        ExprPrepStyle::OneShot { .. } | ExprPrepStyle::WebhookValidation { .. } => expr
449            .0
450            .try_visit_scalars_mut(&mut |s| prep_scalar_expr(s, style)),
451    }
452}
453
454/// Prepares a scalar expression for execution by handling unmaterializable
455/// functions.
456///
457/// How we prepare the scalar expression depends on which `style` is specificed.
458///
459/// * `OneShot`: Calls to all unmaterializable functions are replaced.
460/// * `Index`: An error is produced if a call to an unmaterializable function is encountered.
461/// * `AsOfUpTo`: An error is produced if a call to an unmaterializable function is encountered.
462/// * `WebhookValidation`: Only calls to `UnmaterializableFunc::CurrentTimestamp` are replaced,
463///   others are left untouched.
464///
465pub fn prep_scalar_expr(
466    expr: &mut MirScalarExpr,
467    style: ExprPrepStyle,
468) -> Result<(), OptimizerError> {
469    match style {
470        // Evaluate each unmaterializable function and replace the
471        // invocation with the result.
472        ExprPrepStyle::OneShot {
473            logical_time,
474            session,
475            catalog_state,
476        } => expr.try_visit_mut_post(&mut |e| {
477            if let MirScalarExpr::CallUnmaterializable(f) = e {
478                *e = eval_unmaterializable_func(catalog_state, f, logical_time, session)?;
479            }
480            Ok(())
481        }),
482
483        // Reject the query if it contains any unmaterializable function calls.
484        ExprPrepStyle::Maintained => {
485            let mut last_observed_unmaterializable_func = None;
486            expr.visit_mut_post(&mut |e| {
487                if let MirScalarExpr::CallUnmaterializable(f) = e {
488                    last_observed_unmaterializable_func = Some(f.clone());
489                }
490            })?;
491
492            if let Some(f) = last_observed_unmaterializable_func {
493                let err = match style {
494                    ExprPrepStyle::Maintained => OptimizerError::UnmaterializableFunction(f),
495                    _ => unreachable!(),
496                };
497                return Err(err);
498            }
499            Ok(())
500        }
501
502        ExprPrepStyle::WebhookValidation { now } => {
503            expr.try_visit_mut_post(&mut |e| {
504                if let MirScalarExpr::CallUnmaterializable(
505                    f @ UnmaterializableFunc::CurrentTimestamp,
506                ) = e
507                {
508                    let now: Datum = now.try_into()?;
509                    let const_expr = MirScalarExpr::literal_ok(now, f.output_type().scalar_type);
510                    *e = const_expr;
511                }
512                Ok::<_, anyhow::Error>(())
513            })?;
514            Ok(())
515        }
516    }
517}
518
519fn eval_unmaterializable_func(
520    state: &CatalogState,
521    f: &UnmaterializableFunc,
522    logical_time: EvalTime,
523    session: &dyn SessionMetadata,
524) -> Result<MirScalarExpr, OptimizerError> {
525    let pack_1d_array = |datums: Vec<Datum>| {
526        let mut row = Row::default();
527        row.packer()
528            .try_push_array(
529                &[ArrayDimension {
530                    lower_bound: 1,
531                    length: datums.len(),
532                }],
533                datums,
534            )
535            .expect("known to be a valid array");
536        Ok(MirScalarExpr::Literal(Ok(row), f.output_type()))
537    };
538    let pack_dict = |mut datums: Vec<(String, String)>| {
539        datums.sort();
540        let mut row = Row::default();
541        row.packer().push_dict(
542            datums
543                .iter()
544                .map(|(key, value)| (key.as_str(), Datum::from(value.as_str()))),
545        );
546        Ok(MirScalarExpr::Literal(Ok(row), f.output_type()))
547    };
548    let pack = |datum| {
549        Ok(MirScalarExpr::literal_ok(
550            datum,
551            f.output_type().scalar_type,
552        ))
553    };
554
555    match f {
556        UnmaterializableFunc::CurrentDatabase => pack(Datum::from(session.database())),
557        UnmaterializableFunc::CurrentSchema => {
558            let search_path = state.resolve_search_path(session);
559            let schema = search_path
560                .first()
561                .map(|(db, schema)| &*state.get_schema(db, schema, session.conn_id()).name.schema);
562            pack(Datum::from(schema))
563        }
564        UnmaterializableFunc::CurrentSchemasWithSystem => {
565            let search_path = state.resolve_search_path(session);
566            let search_path = state.effective_search_path(&search_path, false);
567            pack_1d_array(
568                search_path
569                    .into_iter()
570                    .map(|(db, schema)| {
571                        let schema = state.get_schema(&db, &schema, session.conn_id());
572                        Datum::String(&schema.name.schema)
573                    })
574                    .collect(),
575            )
576        }
577        UnmaterializableFunc::CurrentSchemasWithoutSystem => {
578            let search_path = state.resolve_search_path(session);
579            pack_1d_array(
580                search_path
581                    .into_iter()
582                    .map(|(db, schema)| {
583                        let schema = state.get_schema(&db, &schema, session.conn_id());
584                        Datum::String(&schema.name.schema)
585                    })
586                    .collect(),
587            )
588        }
589        UnmaterializableFunc::ViewableVariables => pack_dict(
590            viewable_variables(state, session)
591                .map(|var| (var.name().to_lowercase(), var.value()))
592                .collect(),
593        ),
594        UnmaterializableFunc::CurrentTimestamp => {
595            let t: Datum = session.pcx().wall_time.try_into()?;
596            pack(t)
597        }
598        UnmaterializableFunc::CurrentUser => pack(Datum::from(
599            state.get_role(session.current_role_id()).name(),
600        )),
601        UnmaterializableFunc::SessionUser => pack(Datum::from(
602            state.get_role(session.session_role_id()).name(),
603        )),
604        UnmaterializableFunc::IsRbacEnabled => pack(Datum::from(
605            rbac::is_rbac_enabled_for_session(state.system_config(), session),
606        )),
607        UnmaterializableFunc::MzEnvironmentId => {
608            pack(Datum::from(&*state.config().environment_id.to_string()))
609        }
610        UnmaterializableFunc::MzIsSuperuser => pack(Datum::from(session.is_superuser())),
611        UnmaterializableFunc::MzNow => match logical_time {
612            EvalTime::Time(logical_time) => pack(Datum::MzTimestamp(logical_time)),
613            EvalTime::Deferred => Ok(MirScalarExpr::CallUnmaterializable(f.clone())),
614            EvalTime::NotAvailable => Err(OptimizerError::UncallableFunction {
615                func: UnmaterializableFunc::MzNow,
616                context: "this",
617            }),
618        },
619        UnmaterializableFunc::MzRoleOidMemberships => {
620            let role_memberships = role_oid_memberships(state);
621            let mut role_memberships: Vec<(_, Vec<_>)> = role_memberships
622                .into_iter()
623                .map(|(role_id, role_membership)| {
624                    (
625                        role_id.to_string(),
626                        role_membership
627                            .into_iter()
628                            .map(|role_id| role_id.to_string())
629                            .collect(),
630                    )
631                })
632                .collect();
633            role_memberships.sort();
634            let mut row = Row::default();
635            row.packer().push_dict_with(|row| {
636                for (role_id, role_membership) in &role_memberships {
637                    row.push(Datum::from(role_id.as_str()));
638                    row.try_push_array(
639                        &[ArrayDimension {
640                            lower_bound: 1,
641                            length: role_membership.len(),
642                        }],
643                        role_membership.iter().map(|role_id| Datum::from(role_id.as_str())),
644                    ).expect("role_membership is 1 dimensional, and its length is used for the array length");
645                }
646            });
647            Ok(MirScalarExpr::Literal(Ok(row), f.output_type()))
648        }
649        UnmaterializableFunc::MzSessionId => pack(Datum::from(state.config().session_id)),
650        UnmaterializableFunc::MzUptime => {
651            let uptime = state.config().start_instant.elapsed();
652            let uptime = chrono::Duration::from_std(uptime).map_or(Datum::Null, Datum::from);
653            pack(uptime)
654        }
655        UnmaterializableFunc::MzVersion => pack(Datum::from(
656            &*state
657                .config()
658                .build_info
659                .human_version(state.config().helm_chart_version.clone()),
660        )),
661        UnmaterializableFunc::MzVersionNum => {
662            pack(Datum::Int32(state.config().build_info.version_num()))
663        }
664        UnmaterializableFunc::PgBackendPid => pack(Datum::Int32(i32::reinterpret_cast(
665            session.conn_id().unhandled(),
666        ))),
667        UnmaterializableFunc::PgPostmasterStartTime => {
668            let t: Datum = state.config().start_time.try_into()?;
669            pack(t)
670        }
671        UnmaterializableFunc::Version => {
672            let build_info = state.config().build_info;
673            let version = format!(
674                "PostgreSQL {}.{} on {} (Materialize {})",
675                SERVER_MAJOR_VERSION,
676                SERVER_MINOR_VERSION,
677                mz_build_info::TARGET_TRIPLE,
678                build_info.version,
679            );
680            pack(Datum::from(&*version))
681        }
682    }
683}
684
685fn role_oid_memberships<'a>(catalog: &'a CatalogState) -> BTreeMap<u32, BTreeSet<u32>> {
686    let mut role_memberships = BTreeMap::new();
687    for role_id in catalog.get_roles() {
688        let role = catalog.get_role(role_id);
689        if !role_memberships.contains_key(&role.oid) {
690            role_oid_memberships_inner(catalog, role_id, &mut role_memberships);
691        }
692    }
693    role_memberships
694}
695
696fn role_oid_memberships_inner<'a>(
697    catalog: &'a CatalogState,
698    role_id: &RoleId,
699    role_memberships: &mut BTreeMap<u32, BTreeSet<u32>>,
700) {
701    let role = catalog.get_role(role_id);
702    role_memberships.insert(role.oid, btreeset! {role.oid});
703    for parent_role_id in role.membership.map.keys() {
704        let parent_role = catalog.get_role(parent_role_id);
705        if !role_memberships.contains_key(&parent_role.oid) {
706            role_oid_memberships_inner(catalog, parent_role_id, role_memberships);
707        }
708        let parent_membership: BTreeSet<_> = role_memberships
709            .get(&parent_role.oid)
710            .expect("inserted in recursive call above")
711            .into_iter()
712            .cloned()
713            .collect();
714        role_memberships
715            .get_mut(&role.oid)
716            .expect("inserted above")
717            .extend(parent_membership);
718    }
719}