1use std::collections::BTreeSet;
11use std::error::Error;
12use std::num::{ParseIntError, TryFromIntError};
13use std::sync::Arc;
14use std::time::Duration;
15use std::{fmt, io};
16
17use itertools::Itertools;
18use mz_expr::EvalError;
19use mz_mysql_util::MySqlError;
20use mz_ore::error::ErrorExt;
21use mz_ore::stack::RecursionLimitError;
22use mz_ore::str::{StrExt, separated};
23use mz_postgres_util::PostgresError;
24use mz_repr::adt::char::InvalidCharLengthError;
25use mz_repr::adt::mz_acl_item::AclMode;
26use mz_repr::adt::numeric::InvalidNumericMaxScaleError;
27use mz_repr::adt::timestamp::InvalidTimestampPrecisionError;
28use mz_repr::adt::varchar::InvalidVarCharMaxLengthError;
29use mz_repr::{CatalogItemId, ColumnName, strconv};
30use mz_sql_parser::ast::display::AstDisplay;
31use mz_sql_parser::ast::{IdentError, UnresolvedItemName};
32use mz_sql_parser::parser::{ParserError, ParserStatementError};
33use mz_sql_server_util::SqlServerError;
34use mz_storage_types::sources::ExternalReferenceResolutionError;
35
36use crate::catalog::{
37 CatalogError, CatalogItemType, ErrorMessageObjectDescription, SystemObjectType,
38};
39use crate::names::{PartialItemName, ResolvedItemName};
40use crate::plan::ObjectType;
41use crate::plan::plan_utils::JoinSide;
42use crate::plan::scope::ScopeItem;
43use crate::plan::typeconv::CastContext;
44use crate::pure::error::{
45 CsrPurificationError, KafkaSinkPurificationError, KafkaSourcePurificationError,
46 LoadGeneratorSourcePurificationError, MySqlSourcePurificationError, PgSourcePurificationError,
47 SqlServerSourcePurificationError,
48};
49use crate::session::vars::VarError;
50
51#[derive(Clone, Debug)]
52pub enum PlanError {
53 Unsupported {
55 feature: String,
56 discussion_no: Option<usize>,
57 },
58 NeverSupported {
60 feature: String,
61 documentation_link: Option<String>,
62 details: Option<String>,
63 },
64 UnknownColumn {
65 table: Option<PartialItemName>,
66 column: ColumnName,
67 similar: Box<[ColumnName]>,
68 },
69 UngroupedColumn {
70 table: Option<PartialItemName>,
71 column: ColumnName,
72 },
73 WrongJoinTypeForLateralColumn {
74 table: Option<PartialItemName>,
75 column: ColumnName,
76 },
77 AmbiguousColumn(ColumnName),
78 TooManyColumns {
79 max_num_columns: usize,
80 req_num_columns: usize,
81 },
82 ColumnAlreadyExists {
83 column_name: ColumnName,
84 object_name: String,
85 },
86 AmbiguousTable(PartialItemName),
87 UnknownColumnInUsingClause {
88 column: ColumnName,
89 join_side: JoinSide,
90 },
91 AmbiguousColumnInUsingClause {
92 column: ColumnName,
93 join_side: JoinSide,
94 },
95 MisqualifiedName(String),
96 OverqualifiedDatabaseName(String),
97 OverqualifiedSchemaName(String),
98 UnderqualifiedColumnName(String),
99 SubqueriesDisallowed {
100 context: String,
101 },
102 UnknownParameter(usize),
103 ParameterNotAllowed(String),
104 WrongParameterType(usize, String, String),
105 RecursionLimit(RecursionLimitError),
106 StrconvParse(strconv::ParseError),
107 Catalog(CatalogError),
108 UpsertSinkWithoutKey,
109 UpsertSinkWithInvalidKey {
110 name: String,
111 desired_key: Vec<String>,
112 valid_keys: Vec<Vec<String>>,
113 },
114 InvalidWmrRecursionLimit(String),
115 InvalidNumericMaxScale(InvalidNumericMaxScaleError),
116 InvalidCharLength(InvalidCharLengthError),
117 InvalidId(CatalogItemId),
118 InvalidIdent(IdentError),
119 InvalidObject(Box<ResolvedItemName>),
120 InvalidObjectType {
121 expected_type: SystemObjectType,
122 actual_type: SystemObjectType,
123 object_name: String,
124 },
125 InvalidPrivilegeTypes {
126 invalid_privileges: AclMode,
127 object_description: ErrorMessageObjectDescription,
128 },
129 InvalidVarCharMaxLength(InvalidVarCharMaxLengthError),
130 InvalidTimestampPrecision(InvalidTimestampPrecisionError),
131 InvalidSecret(Box<ResolvedItemName>),
132 InvalidTemporarySchema,
133 InvalidCast {
134 name: String,
135 ccx: CastContext,
136 from: String,
137 to: String,
138 },
139 InvalidTable {
140 name: String,
141 },
142 InvalidVersion {
143 name: String,
144 version: String,
145 },
146 MangedReplicaName(String),
147 ParserStatement(ParserStatementError),
148 Parser(ParserError),
149 DropViewOnMaterializedView(String),
150 DependentObjectsStillExist {
151 object_type: String,
152 object_name: String,
153 dependents: Vec<(String, String)>,
155 },
156 AlterViewOnMaterializedView(String),
157 ShowCreateViewOnMaterializedView(String),
158 ExplainViewOnMaterializedView(String),
159 UnacceptableTimelineName(String),
160 FetchingCsrSchemaFailed {
161 schema_lookup: String,
162 cause: Arc<dyn Error + Send + Sync>,
163 },
164 PostgresConnectionErr {
165 cause: Arc<mz_postgres_util::PostgresError>,
166 },
167 MySqlConnectionErr {
168 cause: Arc<MySqlError>,
169 },
170 SqlServerConnectionErr {
171 cause: Arc<SqlServerError>,
172 },
173 SubsourceNameConflict {
174 name: UnresolvedItemName,
175 upstream_references: Vec<UnresolvedItemName>,
176 },
177 SubsourceDuplicateReference {
178 name: UnresolvedItemName,
179 target_names: Vec<UnresolvedItemName>,
180 },
181 NoTablesFoundForSchemas(Vec<String>),
182 InvalidProtobufSchema {
183 cause: protobuf_native::OperationFailedError,
184 },
185 InvalidOptionValue {
186 option_name: String,
189 err: Box<PlanError>,
190 },
191 UnexpectedDuplicateReference {
192 name: UnresolvedItemName,
193 },
194 RecursiveTypeMismatch(String, Vec<String>, Vec<String>),
196 UnknownFunction {
197 name: String,
198 arg_types: Vec<String>,
199 },
200 IndistinctFunction {
201 name: String,
202 arg_types: Vec<String>,
203 },
204 UnknownOperator {
205 name: String,
206 arg_types: Vec<String>,
207 },
208 IndistinctOperator {
209 name: String,
210 arg_types: Vec<String>,
211 },
212 InvalidPrivatelinkAvailabilityZone {
213 name: String,
214 supported_azs: BTreeSet<String>,
215 },
216 DuplicatePrivatelinkAvailabilityZone {
217 duplicate_azs: BTreeSet<String>,
218 },
219 InvalidSchemaName,
220 ItemAlreadyExists {
221 name: String,
222 item_type: CatalogItemType,
223 },
224 ManagedCluster {
225 cluster_name: String,
226 },
227 InvalidKeysInSubscribeEnvelopeUpsert,
228 InvalidKeysInSubscribeEnvelopeDebezium,
229 InvalidPartitionByEnvelopeDebezium {
230 column_name: String,
231 },
232 InvalidOrderByInSubscribeWithinTimestampOrderBy,
233 FromValueRequiresParen,
234 VarError(VarError),
235 UnsolvablePolymorphicFunctionInput,
236 ShowCommandInView,
237 WebhookValidationDoesNotUseColumns,
238 WebhookValidationNonDeterministic,
239 InternalFunctionCall,
240 CommentTooLong {
241 length: usize,
242 max_size: usize,
243 },
244 InvalidTimestampInterval {
245 min: Duration,
246 max: Duration,
247 requested: Duration,
248 },
249 InvalidGroupSizeHints,
250 PgSourcePurification(PgSourcePurificationError),
251 KafkaSourcePurification(KafkaSourcePurificationError),
252 KafkaSinkPurification(KafkaSinkPurificationError),
253 LoadGeneratorSourcePurification(LoadGeneratorSourcePurificationError),
254 CsrPurification(CsrPurificationError),
255 MySqlSourcePurification(MySqlSourcePurificationError),
256 SqlServerSourcePurificationError(SqlServerSourcePurificationError),
257 UseTablesForSources(String),
258 MissingName(CatalogItemType),
259 InvalidRefreshAt,
260 InvalidRefreshEveryAlignedTo,
261 CreateReplicaFailStorageObjects {
262 current_replica_count: usize,
264 internal_replica_count: usize,
266 hypothetical_replica_count: usize,
269 },
270 MismatchedObjectType {
271 name: PartialItemName,
272 is_type: ObjectType,
273 expected_type: ObjectType,
274 },
275 TableContainsUningestableTypes {
277 name: String,
278 type_: String,
279 column: String,
280 },
281 RetainHistoryLow {
282 limit: Duration,
283 },
284 RetainHistoryRequired,
285 UntilReadyTimeoutRequired,
286 SubsourceResolutionError(ExternalReferenceResolutionError),
287 Replan(String),
288 NetworkPolicyLockoutError,
289 NetworkPolicyInUse,
290 ConstantExpressionSimplificationFailed(String),
292 InvalidOffset(String),
293 Unstructured(String),
295}
296
297impl PlanError {
298 pub(crate) fn ungrouped_column(item: &ScopeItem) -> PlanError {
299 PlanError::UngroupedColumn {
300 table: item.table_name.clone(),
301 column: item.column_name.clone(),
302 }
303 }
304
305 pub fn detail(&self) -> Option<String> {
306 match self {
307 Self::NeverSupported { details, .. } => details.clone(),
308 Self::FetchingCsrSchemaFailed { cause, .. } => Some(cause.to_string_with_causes()),
309 Self::PostgresConnectionErr { cause } => Some(cause.to_string_with_causes()),
310 Self::InvalidProtobufSchema { cause } => Some(cause.to_string_with_causes()),
311 Self::InvalidOptionValue { err, .. } => err.detail(),
312 Self::UpsertSinkWithInvalidKey {
313 name,
314 desired_key,
315 valid_keys,
316 } => {
317 let valid_keys = if valid_keys.is_empty() {
318 "There are no known valid unique keys for the underlying relation.".into()
319 } else {
320 format!(
321 "The following keys are known to be unique for the underlying relation:\n{}",
322 valid_keys
323 .iter()
324 .map(|k|
325 format!(" ({})", k.iter().map(|c| c.as_str().quoted()).join(", "))
326 )
327 .join("\n"),
328 )
329 };
330 Some(format!(
331 "Materialize could not prove that the specified upsert envelope key ({}) \
332 was a unique key of the underlying relation {}. {valid_keys}",
333 separated(", ", desired_key.iter().map(|c| c.as_str().quoted())),
334 name.quoted()
335 ))
336 }
337 Self::VarError(e) => e.detail(),
338 Self::InternalFunctionCall => Some("This function is for the internal use of the database system and cannot be called directly.".into()),
339 Self::PgSourcePurification(e) => e.detail(),
340 Self::MySqlSourcePurification(e) => e.detail(),
341 Self::SqlServerSourcePurificationError(e) => e.detail(),
342 Self::KafkaSourcePurification(e) => e.detail(),
343 Self::LoadGeneratorSourcePurification(e) => e.detail(),
344 Self::CsrPurification(e) => e.detail(),
345 Self::KafkaSinkPurification(e) => e.detail(),
346 Self::CreateReplicaFailStorageObjects { current_replica_count: current, internal_replica_count: internal, hypothetical_replica_count: target } => {
347 Some(format!(
348 "Currently have {} replica{}{}; command would result in {}",
349 current,
350 if *current != 1 { "s" } else { "" },
351 if *internal > 0 {
352 format!(" ({} internal)", internal)
353 } else {
354 "".to_string()
355 },
356 target
357 ))
358 },
359 Self::SubsourceNameConflict {
360 name: _,
361 upstream_references,
362 } => Some(format!(
363 "referenced tables with duplicate name: {}",
364 itertools::join(upstream_references, ", ")
365 )),
366 Self::SubsourceDuplicateReference {
367 name: _,
368 target_names,
369 } => Some(format!(
370 "subsources referencing table: {}",
371 itertools::join(target_names, ", ")
372 )),
373 Self::InvalidPartitionByEnvelopeDebezium { .. } => Some(
374 "When using ENVELOPE DEBEZIUM, only columns in the key can be referenced in the PARTITION BY expression.".to_string()
375 ),
376 Self::NoTablesFoundForSchemas(schemas) => Some(format!(
377 "missing schemas: {}",
378 separated(", ", schemas.iter().map(|c| c.quoted()))
379 )),
380 _ => None,
381 }
382 }
383
384 pub fn hint(&self) -> Option<String> {
385 match self {
386 Self::DropViewOnMaterializedView(_) => {
387 Some("Use DROP MATERIALIZED VIEW to remove a materialized view.".into())
388 }
389 Self::DependentObjectsStillExist {..} => Some("Use DROP ... CASCADE to drop the dependent objects too.".into()),
390 Self::AlterViewOnMaterializedView(_) => {
391 Some("Use ALTER MATERIALIZED VIEW to rename a materialized view.".into())
392 }
393 Self::ShowCreateViewOnMaterializedView(_) => {
394 Some("Use SHOW CREATE MATERIALIZED VIEW to show a materialized view.".into())
395 }
396 Self::ExplainViewOnMaterializedView(_) => {
397 Some("Use EXPLAIN [...] MATERIALIZED VIEW to explain a materialized view.".into())
398 }
399 Self::UnacceptableTimelineName(_) => {
400 Some("The prefix \"mz_\" is reserved for system timelines.".into())
401 }
402 Self::PostgresConnectionErr { cause } => {
403 if let Some(cause) = cause.source() {
404 if let Some(cause) = cause.downcast_ref::<io::Error>() {
405 if cause.kind() == io::ErrorKind::TimedOut {
406 return Some(
407 "Do you have a firewall or security group that is \
408 preventing Materialize from connecting to your PostgreSQL server?"
409 .into(),
410 );
411 }
412 }
413 }
414 None
415 }
416 Self::InvalidOptionValue { err, .. } => err.hint(),
417 Self::UnknownFunction { ..} => Some("No function matches the given name and argument types. You might need to add explicit type casts.".into()),
418 Self::IndistinctFunction {..} => {
419 Some("Could not choose a best candidate function. You might need to add explicit type casts.".into())
420 }
421 Self::UnknownOperator {..} => {
422 Some("No operator matches the given name and argument types. You might need to add explicit type casts.".into())
423 }
424 Self::IndistinctOperator {..} => {
425 Some("Could not choose a best candidate operator. You might need to add explicit type casts.".into())
426 },
427 Self::InvalidPrivatelinkAvailabilityZone { supported_azs, ..} => {
428 let supported_azs_str = supported_azs.iter().join("\n ");
429 Some(format!("Did you supply an availability zone name instead of an ID? Known availability zone IDs:\n {}", supported_azs_str))
430 }
431 Self::DuplicatePrivatelinkAvailabilityZone { duplicate_azs, ..} => {
432 let duplicate_azs = duplicate_azs.iter().join("\n ");
433 Some(format!("Duplicated availability zones:\n {}", duplicate_azs))
434 }
435 Self::InvalidKeysInSubscribeEnvelopeUpsert => {
436 Some("All keys must be columns on the underlying relation.".into())
437 }
438 Self::InvalidKeysInSubscribeEnvelopeDebezium => {
439 Some("All keys must be columns on the underlying relation.".into())
440 }
441 Self::InvalidOrderByInSubscribeWithinTimestampOrderBy => {
442 Some("All order bys must be output columns.".into())
443 }
444 Self::UpsertSinkWithInvalidKey { .. } | Self::UpsertSinkWithoutKey => {
445 Some("See: https://materialize.com/s/sink-key-selection".into())
446 }
447 Self::Catalog(e) => e.hint(),
448 Self::VarError(e) => e.hint(),
449 Self::PgSourcePurification(e) => e.hint(),
450 Self::MySqlSourcePurification(e) => e.hint(),
451 Self::SqlServerSourcePurificationError(e) => e.hint(),
452 Self::KafkaSourcePurification(e) => e.hint(),
453 Self::LoadGeneratorSourcePurification(e) => e.hint(),
454 Self::CsrPurification(e) => e.hint(),
455 Self::KafkaSinkPurification(e) => e.hint(),
456 Self::UnknownColumn { table, similar, .. } => {
457 let suffix = "Make sure to surround case sensitive names in double quotes.";
458 match &similar[..] {
459 [] => None,
460 [column] => Some(format!("The similarly named column {} does exist. {suffix}", ColumnDisplay { table, column })),
461 names => {
462 let similar = names.into_iter().map(|column| ColumnDisplay { table, column }).join(", ");
463 Some(format!("There are similarly named columns that do exist: {similar}. {suffix}"))
464 }
465 }
466 }
467 Self::RecursiveTypeMismatch(..) => {
468 Some("You will need to rewrite or cast the query's expressions.".into())
469 },
470 Self::InvalidRefreshAt
471 | Self::InvalidRefreshEveryAlignedTo => {
472 Some("Calling `mz_now()` is allowed.".into())
473 },
474 Self::TableContainsUningestableTypes { column,.. } => {
475 Some(format!("Remove the table or use TEXT COLUMNS ({column}, ..) to ingest this column as text"))
476 }
477 Self::RetainHistoryLow { .. } | Self::RetainHistoryRequired => {
478 Some("Use ALTER ... RESET (RETAIN HISTORY) to set the retain history to its default and lowest value.".into())
479 }
480 Self::NetworkPolicyInUse => {
481 Some("Use ALTER SYSTEM SET 'network_policy' to change the default network policy.".into())
482 }
483 Self::WrongParameterType(_, _, _) => {
484 Some("EXECUTE automatically inserts only such casts that are allowed in an assignment cast context. Try adding an explicit cast.".into())
485 }
486 Self::InvalidSchemaName => {
487 Some("Use SET schema = name to select a schema. Use SHOW SCHEMAS to list available schemas. Use SHOW search_path to show the schema names that we looked for, but none of them existed.".into())
488 }
489 _ => None,
490 }
491 }
492}
493
494impl fmt::Display for PlanError {
495 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
496 match self {
497 Self::Unsupported { feature, discussion_no } => {
498 write!(f, "{} not yet supported", feature)?;
499 if let Some(discussion_no) = discussion_no {
500 write!(f, ", see https://github.com/MaterializeInc/materialize/discussions/{} for more details", discussion_no)?;
501 }
502 Ok(())
503 }
504 Self::NeverSupported { feature, documentation_link: documentation_path,.. } => {
505 write!(f, "{feature} is not supported",)?;
506 if let Some(documentation_path) = documentation_path {
507 write!(f, ", for more information consult the documentation at https://materialize.com/docs/{documentation_path}")?;
508 }
509 Ok(())
510 }
511 Self::UnknownColumn { table, column, similar: _ } => write!(
512 f,
513 "column {} does not exist",
514 ColumnDisplay { table, column }
515 ),
516 Self::UngroupedColumn { table, column } => write!(
517 f,
518 "column {} must appear in the GROUP BY clause or be used in an aggregate function",
519 ColumnDisplay { table, column },
520 ),
521 Self::WrongJoinTypeForLateralColumn { table, column } => write!(
522 f,
523 "column {} cannot be referenced from this part of the query: \
524 the combining JOIN type must be INNER or LEFT for a LATERAL reference",
525 ColumnDisplay { table, column },
526 ),
527 Self::AmbiguousColumn(column) => write!(
528 f,
529 "column reference {} is ambiguous",
530 column.quoted()
531 ),
532 Self::TooManyColumns { max_num_columns, req_num_columns } => write!(
533 f,
534 "attempt to create relation with too many columns, {} max: {}",
535 req_num_columns, max_num_columns
536 ),
537 Self::ColumnAlreadyExists { column_name, object_name } => write!(
538 f,
539 "column {} of relation {} already exists",
540 column_name.quoted(), object_name.quoted(),
541 ),
542 Self::AmbiguousTable(table) => write!(
543 f,
544 "table reference {} is ambiguous",
545 table.item.as_str().quoted()
546 ),
547 Self::UnknownColumnInUsingClause { column, join_side } => write!(
548 f,
549 "column {} specified in USING clause does not exist in {} table",
550 column.quoted(),
551 join_side,
552 ),
553 Self::AmbiguousColumnInUsingClause { column, join_side } => write!(
554 f,
555 "common column name {} appears more than once in {} table",
556 column.quoted(),
557 join_side,
558 ),
559 Self::MisqualifiedName(name) => write!(
560 f,
561 "qualified name did not have between 1 and 3 components: {}",
562 name
563 ),
564 Self::OverqualifiedDatabaseName(name) => write!(
565 f,
566 "database name '{}' does not have exactly one component",
567 name
568 ),
569 Self::OverqualifiedSchemaName(name) => write!(
570 f,
571 "schema name '{}' cannot have more than two components",
572 name
573 ),
574 Self::UnderqualifiedColumnName(name) => write!(
575 f,
576 "column name '{}' must have at least a table qualification",
577 name
578 ),
579 Self::UnacceptableTimelineName(name) => {
580 write!(f, "unacceptable timeline name {}", name.quoted(),)
581 }
582 Self::SubqueriesDisallowed { context } => {
583 write!(f, "{} does not allow subqueries", context)
584 }
585 Self::UnknownParameter(n) => write!(f, "there is no parameter ${}", n),
586 Self::ParameterNotAllowed(object_type) => write!(f, "{} cannot have parameters", object_type),
587 Self::WrongParameterType(i, expected_ty, actual_ty) => write!(f, "unable to cast given parameter ${}: expected {}, got {}", i, expected_ty, actual_ty),
588 Self::RecursionLimit(e) => write!(f, "{}", e),
589 Self::StrconvParse(e) => write!(f, "{}", e),
590 Self::Catalog(e) => write!(f, "{}", e),
591 Self::UpsertSinkWithoutKey => write!(f, "upsert sinks must specify a key"),
592 Self::UpsertSinkWithInvalidKey { .. } => {
593 write!(f, "upsert key could not be validated as unique")
594 }
595 Self::InvalidWmrRecursionLimit(msg) => write!(f, "Invalid WITH MUTUALLY RECURSIVE recursion limit. {}", msg),
596 Self::InvalidNumericMaxScale(e) => e.fmt(f),
597 Self::InvalidCharLength(e) => e.fmt(f),
598 Self::InvalidVarCharMaxLength(e) => e.fmt(f),
599 Self::InvalidTimestampPrecision(e) => e.fmt(f),
600 Self::Parser(e) => e.fmt(f),
601 Self::ParserStatement(e) => e.fmt(f),
602 Self::Unstructured(e) => write!(f, "{}", e),
603 Self::InvalidId(id) => write!(f, "invalid id {}", id),
604 Self::InvalidIdent(err) => write!(f, "invalid identifier, {err}"),
605 Self::InvalidObject(i) => write!(f, "{} is not a database object", i.full_name_str()),
606 Self::InvalidObjectType{expected_type, actual_type, object_name} => write!(f, "{actual_type} {object_name} is not a {expected_type}"),
607 Self::InvalidPrivilegeTypes{ invalid_privileges, object_description, } => {
608 write!(f, "invalid privilege types {} for {}", invalid_privileges.to_error_string(), object_description)
609 },
610 Self::InvalidSecret(i) => write!(f, "{} is not a secret", i.full_name_str()),
611 Self::InvalidTemporarySchema => {
612 write!(f, "cannot create temporary item in non-temporary schema")
613 }
614 Self::InvalidCast { name, ccx, from, to } =>{
615 write!(
616 f,
617 "{name} does not support {ccx}casting from {from} to {to}",
618 ccx = if matches!(ccx, CastContext::Implicit) {
619 "implicitly "
620 } else {
621 ""
622 },
623 )
624 }
625 Self::InvalidTable { name } => {
626 write!(f, "invalid table definition for {}", name.quoted())
627 },
628 Self::InvalidVersion { name, version } => {
629 write!(f, "invalid version {} for {}", version.quoted(), name.quoted())
630 },
631 Self::DropViewOnMaterializedView(name)
632 | Self::AlterViewOnMaterializedView(name)
633 | Self::ShowCreateViewOnMaterializedView(name)
634 | Self::ExplainViewOnMaterializedView(name) => write!(f, "{name} is not a view"),
635 Self::FetchingCsrSchemaFailed { schema_lookup, .. } => {
636 write!(f, "failed to fetch schema {schema_lookup} from schema registry")
637 }
638 Self::PostgresConnectionErr { .. } => {
639 write!(f, "failed to connect to PostgreSQL database")
640 }
641 Self::MySqlConnectionErr { cause } => {
642 write!(f, "failed to connect to MySQL database: {}", cause)
643 }
644 Self::SqlServerConnectionErr { cause } => {
645 write!(f, "failed to connect to SQL Server database: {}", cause)
646 }
647 Self::SubsourceNameConflict {
648 name , upstream_references: _,
649 } => {
650 write!(f, "multiple subsources would be named {}", name)
651 },
652 Self::SubsourceDuplicateReference {
653 name,
654 target_names: _,
655 } => {
656 write!(f, "multiple subsources refer to table {}", name)
657 },
658 Self::NoTablesFoundForSchemas(schemas) => {
659 write!(f, "no tables found in referenced schemas: {}",
660 separated(", ", schemas.iter().map(|c| c.quoted()))
661 )
662 },
663 Self::InvalidProtobufSchema { .. } => {
664 write!(f, "invalid protobuf schema")
665 }
666 Self::DependentObjectsStillExist {object_type, object_name, dependents} => {
667 let reason = match &dependents[..] {
668 [] => " because other objects depend on it".to_string(),
669 dependents => {
670 let dependents = dependents.iter().map(|(dependent_type, dependent_name)| format!("{} {}", dependent_type, dependent_name.quoted())).join(", ");
671 format!(": still depended upon by {dependents}")
672 },
673 };
674 let object_name = object_name.quoted();
675 write!(f, "cannot drop {object_type} {object_name}{reason}")
676 }
677 Self::InvalidOptionValue { option_name, err } => write!(f, "invalid {} option value: {}", option_name, err),
678 Self::UnexpectedDuplicateReference { name } => write!(f, "unexpected multiple references to {}", name.to_ast_string_simple()),
679 Self::RecursiveTypeMismatch(name, declared, inferred) => {
680 let declared = separated(", ", declared);
681 let inferred = separated(", ", inferred);
682 let name = name.quoted();
683 write!(f, "WITH MUTUALLY RECURSIVE query {name} declared types ({declared}), but query returns types ({inferred})")
684 },
685 Self::UnknownFunction {name, arg_types, ..} => {
686 write!(f, "function {}({}) does not exist", name, arg_types.join(", "))
687 },
688 Self::IndistinctFunction {name, arg_types, ..} => {
689 write!(f, "function {}({}) is not unique", name, arg_types.join(", "))
690 },
691 Self::UnknownOperator {name, arg_types, ..} => {
692 write!(f, "operator does not exist: {}", match arg_types.as_slice(){
693 [typ] => format!("{} {}", name, typ),
694 [ltyp, rtyp] => {
695 format!("{} {} {}", ltyp, name, rtyp)
696 }
697 _ => unreachable!("non-unary non-binary operator"),
698 })
699 },
700 Self::IndistinctOperator {name, arg_types, ..} => {
701 write!(f, "operator is not unique: {}", match arg_types.as_slice(){
702 [typ] => format!("{} {}", name, typ),
703 [ltyp, rtyp] => {
704 format!("{} {} {}", ltyp, name, rtyp)
705 }
706 _ => unreachable!("non-unary non-binary operator"),
707 })
708 },
709 Self::InvalidPrivatelinkAvailabilityZone { name, ..} => write!(f, "invalid AWS PrivateLink availability zone {}", name.quoted()),
710 Self::DuplicatePrivatelinkAvailabilityZone {..} => write!(f, "connection cannot contain duplicate availability zones"),
711 Self::InvalidSchemaName => write!(f, "no valid schema selected"),
712 Self::ItemAlreadyExists { name, item_type } => write!(f, "{item_type} {} already exists", name.quoted()),
713 Self::ManagedCluster {cluster_name} => write!(f, "cannot modify managed cluster {cluster_name}"),
714 Self::InvalidKeysInSubscribeEnvelopeUpsert => {
715 write!(f, "invalid keys in SUBSCRIBE ENVELOPE UPSERT (KEY (..))")
716 }
717 Self::InvalidKeysInSubscribeEnvelopeDebezium => {
718 write!(f, "invalid keys in SUBSCRIBE ENVELOPE DEBEZIUM (KEY (..))")
719 }
720 Self::InvalidPartitionByEnvelopeDebezium { column_name } => {
721 write!(
722 f,
723 "PARTITION BY expression cannot refer to non-key column {}",
724 column_name.quoted(),
725 )
726 }
727 Self::InvalidOrderByInSubscribeWithinTimestampOrderBy => {
728 write!(f, "invalid ORDER BY in SUBSCRIBE WITHIN TIMESTAMP ORDER BY")
729 }
730 Self::FromValueRequiresParen => f.write_str(
731 "VALUES expression in FROM clause must be surrounded by parentheses"
732 ),
733 Self::VarError(e) => e.fmt(f),
734 Self::UnsolvablePolymorphicFunctionInput => f.write_str(
735 "could not determine polymorphic type because input has type unknown"
736 ),
737 Self::ShowCommandInView => f.write_str("SHOW commands are not allowed in views"),
738 Self::WebhookValidationDoesNotUseColumns => f.write_str(
739 "expression provided in CHECK does not reference any columns"
740 ),
741 Self::WebhookValidationNonDeterministic => f.write_str(
742 "expression provided in CHECK is not deterministic"
743 ),
744 Self::InternalFunctionCall => f.write_str("cannot call function with arguments of type internal"),
745 Self::CommentTooLong { length, max_size } => {
746 write!(f, "provided comment was {length} bytes long, max size is {max_size} bytes")
747 }
748 Self::InvalidTimestampInterval { min, max, requested } => {
749 write!(f, "invalid timestamp interval of {}ms, must be in the range [{}ms, {}ms]", requested.as_millis(), min.as_millis(), max.as_millis())
750 }
751 Self::InvalidGroupSizeHints => f.write_str("EXPECTED GROUP SIZE cannot be provided \
752 simultaneously with any of AGGREGATE INPUT GROUP SIZE, DISTINCT ON INPUT GROUP SIZE, \
753 or LIMIT INPUT GROUP SIZE"),
754 Self::PgSourcePurification(e) => write!(f, "POSTGRES source validation: {}", e),
755 Self::KafkaSourcePurification(e) => write!(f, "KAFKA source validation: {}", e),
756 Self::LoadGeneratorSourcePurification(e) => write!(f, "LOAD GENERATOR source validation: {}", e),
757 Self::KafkaSinkPurification(e) => write!(f, "KAFKA sink validation: {}", e),
758 Self::CsrPurification(e) => write!(f, "CONFLUENT SCHEMA REGISTRY validation: {}", e),
759 Self::MySqlSourcePurification(e) => write!(f, "MYSQL source validation: {}", e),
760 Self::SqlServerSourcePurificationError(e) => write!(f, "SQL SERVER source validation: {}", e),
761 Self::UseTablesForSources(command) => write!(f, "{command} not supported; use CREATE TABLE .. FROM SOURCE instead"),
762 Self::MangedReplicaName(name) => {
763 write!(f, "{name} is reserved for replicas of managed clusters")
764 }
765 Self::MissingName(item_type) => {
766 write!(f, "unspecified name for {item_type}")
767 }
768 Self::InvalidRefreshAt => {
769 write!(f, "REFRESH AT argument must be an expression that can be simplified \
770 and/or cast to a constant whose type is mz_timestamp")
771 }
772 Self::InvalidRefreshEveryAlignedTo => {
773 write!(f, "REFRESH EVERY ... ALIGNED TO argument must be an expression that can be simplified \
774 and/or cast to a constant whose type is mz_timestamp")
775 }
776 Self::CreateReplicaFailStorageObjects {..} => {
777 write!(f, "cannot create more than one replica of a cluster containing sources or sinks")
778 },
779 Self::MismatchedObjectType {
780 name,
781 is_type,
782 expected_type,
783 } => {
784 write!(
785 f,
786 "{name} is {} {} not {} {}",
787 if *is_type == ObjectType::Index {
788 "an"
789 } else {
790 "a"
791 },
792 is_type.to_string().to_lowercase(),
793 if *expected_type == ObjectType::Index {
794 "an"
795 } else {
796 "a"
797 },
798 expected_type.to_string().to_lowercase()
799 )
800 }
801 Self::TableContainsUningestableTypes { name, type_, column } => {
802 write!(f, "table {name} contains column {column} of type {type_} which Materialize cannot currently ingest")
803 },
804 Self::RetainHistoryLow { limit } => {
805 write!(f, "RETAIN HISTORY cannot be set lower than {}ms", limit.as_millis())
806 },
807 Self::RetainHistoryRequired => {
808 write!(f, "RETAIN HISTORY cannot be disabled or set to 0")
809 },
810 Self::SubsourceResolutionError(e) => write!(f, "{}", e),
811 Self::Replan(msg) => write!(f, "internal error while replanning, please contact support: {msg}"),
812 Self::NetworkPolicyLockoutError => write!(f, "policy would block current session IP"),
813 Self::NetworkPolicyInUse => write!(f, "network policy is currently in use"),
814 Self::UntilReadyTimeoutRequired => {
815 write!(f, "TIMEOUT=<duration> option is required for ALTER CLUSTER ... WITH (WAIT UNTIL READY ( ... ))")
816 },
817 Self::ConstantExpressionSimplificationFailed(e) => write!(f, "{}", e),
818 Self::InvalidOffset(e) => write!(f, "Invalid OFFSET clause: {}", e),
819 }
820 }
821}
822
823impl Error for PlanError {}
824
825impl From<CatalogError> for PlanError {
826 fn from(e: CatalogError) -> PlanError {
827 PlanError::Catalog(e)
828 }
829}
830
831impl From<strconv::ParseError> for PlanError {
832 fn from(e: strconv::ParseError) -> PlanError {
833 PlanError::StrconvParse(e)
834 }
835}
836
837impl From<RecursionLimitError> for PlanError {
838 fn from(e: RecursionLimitError) -> PlanError {
839 PlanError::RecursionLimit(e)
840 }
841}
842
843impl From<InvalidNumericMaxScaleError> for PlanError {
844 fn from(e: InvalidNumericMaxScaleError) -> PlanError {
845 PlanError::InvalidNumericMaxScale(e)
846 }
847}
848
849impl From<InvalidCharLengthError> for PlanError {
850 fn from(e: InvalidCharLengthError) -> PlanError {
851 PlanError::InvalidCharLength(e)
852 }
853}
854
855impl From<InvalidVarCharMaxLengthError> for PlanError {
856 fn from(e: InvalidVarCharMaxLengthError) -> PlanError {
857 PlanError::InvalidVarCharMaxLength(e)
858 }
859}
860
861impl From<InvalidTimestampPrecisionError> for PlanError {
862 fn from(e: InvalidTimestampPrecisionError) -> PlanError {
863 PlanError::InvalidTimestampPrecision(e)
864 }
865}
866
867impl From<anyhow::Error> for PlanError {
868 fn from(e: anyhow::Error) -> PlanError {
869 sql_err!("{}", e.display_with_causes())
871 }
872}
873
874impl From<TryFromIntError> for PlanError {
875 fn from(e: TryFromIntError) -> PlanError {
876 sql_err!("{}", e.display_with_causes())
877 }
878}
879
880impl From<ParseIntError> for PlanError {
881 fn from(e: ParseIntError) -> PlanError {
882 sql_err!("{}", e.display_with_causes())
883 }
884}
885
886impl From<EvalError> for PlanError {
887 fn from(e: EvalError) -> PlanError {
888 sql_err!("{}", e.display_with_causes())
889 }
890}
891
892impl From<ParserError> for PlanError {
893 fn from(e: ParserError) -> PlanError {
894 PlanError::Parser(e)
895 }
896}
897
898impl From<ParserStatementError> for PlanError {
899 fn from(e: ParserStatementError) -> PlanError {
900 PlanError::ParserStatement(e)
901 }
902}
903
904impl From<PostgresError> for PlanError {
905 fn from(e: PostgresError) -> PlanError {
906 PlanError::PostgresConnectionErr { cause: Arc::new(e) }
907 }
908}
909
910impl From<MySqlError> for PlanError {
911 fn from(e: MySqlError) -> PlanError {
912 PlanError::MySqlConnectionErr { cause: Arc::new(e) }
913 }
914}
915
916impl From<SqlServerError> for PlanError {
917 fn from(e: SqlServerError) -> PlanError {
918 PlanError::SqlServerConnectionErr { cause: Arc::new(e) }
919 }
920}
921
922impl From<VarError> for PlanError {
923 fn from(e: VarError) -> Self {
924 PlanError::VarError(e)
925 }
926}
927
928impl From<PgSourcePurificationError> for PlanError {
929 fn from(e: PgSourcePurificationError) -> Self {
930 PlanError::PgSourcePurification(e)
931 }
932}
933
934impl From<KafkaSourcePurificationError> for PlanError {
935 fn from(e: KafkaSourcePurificationError) -> Self {
936 PlanError::KafkaSourcePurification(e)
937 }
938}
939
940impl From<KafkaSinkPurificationError> for PlanError {
941 fn from(e: KafkaSinkPurificationError) -> Self {
942 PlanError::KafkaSinkPurification(e)
943 }
944}
945
946impl From<CsrPurificationError> for PlanError {
947 fn from(e: CsrPurificationError) -> Self {
948 PlanError::CsrPurification(e)
949 }
950}
951
952impl From<LoadGeneratorSourcePurificationError> for PlanError {
953 fn from(e: LoadGeneratorSourcePurificationError) -> Self {
954 PlanError::LoadGeneratorSourcePurification(e)
955 }
956}
957
958impl From<MySqlSourcePurificationError> for PlanError {
959 fn from(e: MySqlSourcePurificationError) -> Self {
960 PlanError::MySqlSourcePurification(e)
961 }
962}
963
964impl From<SqlServerSourcePurificationError> for PlanError {
965 fn from(e: SqlServerSourcePurificationError) -> Self {
966 PlanError::SqlServerSourcePurificationError(e)
967 }
968}
969
970impl From<IdentError> for PlanError {
971 fn from(e: IdentError) -> Self {
972 PlanError::InvalidIdent(e)
973 }
974}
975
976impl From<ExternalReferenceResolutionError> for PlanError {
977 fn from(e: ExternalReferenceResolutionError) -> Self {
978 PlanError::SubsourceResolutionError(e)
979 }
980}
981
982struct ColumnDisplay<'a> {
983 table: &'a Option<PartialItemName>,
984 column: &'a ColumnName,
985}
986
987impl<'a> fmt::Display for ColumnDisplay<'a> {
988 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
989 if let Some(table) = &self.table {
990 format!("{}.{}", table.item, self.column).quoted().fmt(f)
991 } else {
992 self.column.quoted().fmt(f)
993 }
994 }
995}