From 7db33518a4264e422294a1e20fbd1c1505d9a62d Mon Sep 17 00:00:00 2001 From: Daniel Baumann Date: Tue, 12 Sep 2023 10:28:54 +0200 Subject: Merging upstream version 18.3.0. Signed-off-by: Daniel Baumann --- docs/sqlglot/dialects/bigquery.html | 1939 ++++++++++++++++++--------------- docs/sqlglot/dialects/clickhouse.html | 9 +- docs/sqlglot/dialects/databricks.html | 1 + docs/sqlglot/dialects/doris.html | 1 + docs/sqlglot/dialects/drill.html | 7 +- docs/sqlglot/dialects/duckdb.html | 17 +- docs/sqlglot/dialects/hive.html | 21 +- docs/sqlglot/dialects/mysql.html | 25 +- docs/sqlglot/dialects/oracle.html | 320 +++--- docs/sqlglot/dialects/postgres.html | 995 +++++++++-------- docs/sqlglot/dialects/presto.html | 21 +- docs/sqlglot/dialects/redshift.html | 5 +- docs/sqlglot/dialects/snowflake.html | 1796 ++++++++++++++++-------------- docs/sqlglot/dialects/spark.html | 1 + docs/sqlglot/dialects/spark2.html | 11 +- docs/sqlglot/dialects/sqlite.html | 3 +- docs/sqlglot/dialects/starrocks.html | 1 + docs/sqlglot/dialects/tableau.html | 1 + docs/sqlglot/dialects/teradata.html | 755 ++++++------- docs/sqlglot/dialects/trino.html | 1 + docs/sqlglot/dialects/tsql.html | 1110 +++++++++---------- 21 files changed, 3731 insertions(+), 3309 deletions(-) (limited to 'docs/sqlglot/dialects') diff --git a/docs/sqlglot/dialects/bigquery.html b/docs/sqlglot/dialects/bigquery.html index 2385883..b37cc7e 100644 --- a/docs/sqlglot/dialects/bigquery.html +++ b/docs/sqlglot/dialects/bigquery.html @@ -198,6 +198,9 @@
  • attimezone_sql
  • +
  • + cast_sql +
  • trycast_sql
  • @@ -391,8 +394,8 @@ 30 31def _date_add_sql( 32 data_type: str, kind: str - 33) -> t.Callable[[generator.Generator, exp.Expression], str]: - 34 def func(self, expression): + 33) -> t.Callable[[BigQuery.Generator, exp.Expression], str]: + 34 def func(self: BigQuery.Generator, expression: exp.Expression) -> str: 35 this = self.sql(expression, "this") 36 unit = expression.args.get("unit") 37 unit = exp.var(unit.name.upper() if unit else "DAY") @@ -402,7 +405,7 @@ 41 return func 42 43 - 44def _derived_table_values_to_unnest(self: generator.Generator, expression: exp.Values) -> str: + 44def _derived_table_values_to_unnest(self: BigQuery.Generator, expression: exp.Values) -> str: 45 if not expression.find_ancestor(exp.From, exp.Join): 46 return self.values_sql(expression) 47 @@ -426,7 +429,7 @@ 65 return self.unnest_sql(exp.Unnest(expressions=[exp.Array(expressions=structs)])) 66 67 - 68def _returnsproperty_sql(self: generator.Generator, expression: exp.ReturnsProperty) -> str: + 68def _returnsproperty_sql(self: BigQuery.Generator, expression: exp.ReturnsProperty) -> str: 69 this = expression.this 70 if isinstance(this, exp.Schema): 71 this = f"{this.this} <{self.expressions(this)}>" @@ -435,7 +438,7 @@ 74 return f"RETURNS {this}" 75 76 - 77def _create_sql(self: generator.Generator, expression: exp.Create) -> str: + 77def _create_sql(self: BigQuery.Generator, expression: exp.Create) -> str: 78 kind = expression.args["kind"] 79 returns = expression.find(exp.ReturnsProperty) 80 @@ -719,7 +722,7 @@ 358 } 359 360 def _parse_table_part(self, schema: bool = False) -> t.Optional[exp.Expression]: -361 this = super()._parse_table_part(schema=schema) +361 this = super()._parse_table_part(schema=schema) or self._parse_number() 362 363 # https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#table_names 364 if isinstance(this, exp.Identifier): @@ -729,291 +732,330 @@ 368 table_name += f"-{self._prev.text}" 369 370 this = exp.Identifier(this=table_name, quoted=this.args.get("quoted")) -371 -372 return this +371 elif isinstance(this, exp.Literal): +372 table_name = this.name 373 -374 def _parse_table_parts(self, schema: bool = False) -> exp.Table: -375 table = super()._parse_table_parts(schema=schema) -376 if isinstance(table.this, exp.Identifier) and "." in table.name: -377 catalog, db, this, *rest = ( -378 t.cast(t.Optional[exp.Expression], exp.to_identifier(x)) -379 for x in split_num_words(table.name, ".", 3) -380 ) -381 -382 if rest and this: -383 this = exp.Dot.build(t.cast(t.List[exp.Expression], [this, *rest])) +374 if ( +375 self._curr +376 and self._prev.end == self._curr.start - 1 +377 and self._parse_var(any_token=True) +378 ): +379 table_name += self._prev.text +380 +381 this = exp.Identifier(this=table_name, quoted=True) +382 +383 return this 384 -385 table = exp.Table(this=this, db=db, catalog=catalog) -386 -387 return table -388 -389 class Generator(generator.Generator): -390 EXPLICIT_UNION = True -391 INTERVAL_ALLOWS_PLURAL_FORM = False -392 JOIN_HINTS = False -393 QUERY_HINTS = False -394 TABLE_HINTS = False -395 LIMIT_FETCH = "LIMIT" -396 RENAME_TABLE_WITH_DB = False -397 ESCAPE_LINE_BREAK = True -398 NVL2_SUPPORTED = False +385 def _parse_table_parts(self, schema: bool = False) -> exp.Table: +386 table = super()._parse_table_parts(schema=schema) +387 if isinstance(table.this, exp.Identifier) and "." in table.name: +388 catalog, db, this, *rest = ( +389 t.cast(t.Optional[exp.Expression], exp.to_identifier(x)) +390 for x in split_num_words(table.name, ".", 3) +391 ) +392 +393 if rest and this: +394 this = exp.Dot.build(t.cast(t.List[exp.Expression], [this, *rest])) +395 +396 table = exp.Table(this=this, db=db, catalog=catalog) +397 +398 return table 399 -400 TRANSFORMS = { -401 **generator.Generator.TRANSFORMS, -402 exp.ApproxDistinct: rename_func("APPROX_COUNT_DISTINCT"), -403 exp.ArraySize: rename_func("ARRAY_LENGTH"), -404 exp.Cast: transforms.preprocess([transforms.remove_precision_parameterized_types]), -405 exp.Create: _create_sql, -406 exp.CTE: transforms.preprocess([_pushdown_cte_column_names]), -407 exp.DateAdd: _date_add_sql("DATE", "ADD"), -408 exp.DateDiff: lambda self, e: f"DATE_DIFF({self.sql(e, 'this')}, {self.sql(e, 'expression')}, {self.sql(e.args.get('unit', 'DAY'))})", -409 exp.DateFromParts: rename_func("DATE"), -410 exp.DateStrToDate: datestrtodate_sql, -411 exp.DateSub: _date_add_sql("DATE", "SUB"), -412 exp.DatetimeAdd: _date_add_sql("DATETIME", "ADD"), -413 exp.DatetimeSub: _date_add_sql("DATETIME", "SUB"), -414 exp.DateTrunc: lambda self, e: self.func("DATE_TRUNC", e.this, e.text("unit")), -415 exp.GenerateSeries: rename_func("GENERATE_ARRAY"), -416 exp.GroupConcat: rename_func("STRING_AGG"), -417 exp.Hex: rename_func("TO_HEX"), -418 exp.ILike: no_ilike_sql, -419 exp.IntDiv: rename_func("DIV"), -420 exp.JSONFormat: rename_func("TO_JSON_STRING"), -421 exp.JSONKeyValue: json_keyvalue_comma_sql, -422 exp.Max: max_or_greatest, -423 exp.MD5: lambda self, e: self.func("TO_HEX", self.func("MD5", e.this)), -424 exp.MD5Digest: rename_func("MD5"), -425 exp.Min: min_or_least, -426 exp.PartitionedByProperty: lambda self, e: f"PARTITION BY {self.sql(e, 'this')}", -427 exp.RegexpExtract: lambda self, e: self.func( -428 "REGEXP_EXTRACT", -429 e.this, -430 e.expression, -431 e.args.get("position"), -432 e.args.get("occurrence"), -433 ), -434 exp.RegexpReplace: regexp_replace_sql, -435 exp.RegexpLike: rename_func("REGEXP_CONTAINS"), -436 exp.ReturnsProperty: _returnsproperty_sql, -437 exp.Select: transforms.preprocess( -438 [ -439 transforms.explode_to_unnest, -440 _unqualify_unnest, -441 transforms.eliminate_distinct_on, -442 _alias_ordered_group, -443 ] -444 ), -445 exp.SHA2: lambda self, e: self.func( -446 f"SHA256" if e.text("length") == "256" else "SHA512", e.this -447 ), -448 exp.StabilityProperty: lambda self, e: f"DETERMINISTIC" -449 if e.name == "IMMUTABLE" -450 else "NOT DETERMINISTIC", -451 exp.StrToDate: lambda self, e: f"PARSE_DATE({self.format_time(e)}, {self.sql(e, 'this')})", -452 exp.StrToTime: lambda self, e: self.func( -453 "PARSE_TIMESTAMP", self.format_time(e), e.this, e.args.get("zone") -454 ), -455 exp.TimeAdd: _date_add_sql("TIME", "ADD"), -456 exp.TimeSub: _date_add_sql("TIME", "SUB"), -457 exp.TimestampAdd: _date_add_sql("TIMESTAMP", "ADD"), -458 exp.TimestampSub: _date_add_sql("TIMESTAMP", "SUB"), -459 exp.TimeStrToTime: timestrtotime_sql, -460 exp.Trim: lambda self, e: self.func(f"TRIM", e.this, e.expression), -461 exp.TsOrDsAdd: _date_add_sql("DATE", "ADD"), -462 exp.TsOrDsToDate: ts_or_ds_to_date_sql("bigquery"), -463 exp.Unhex: rename_func("FROM_HEX"), -464 exp.Values: _derived_table_values_to_unnest, -465 exp.VariancePop: rename_func("VAR_POP"), -466 } -467 -468 TYPE_MAPPING = { -469 **generator.Generator.TYPE_MAPPING, -470 exp.DataType.Type.BIGDECIMAL: "BIGNUMERIC", -471 exp.DataType.Type.BIGINT: "INT64", -472 exp.DataType.Type.BINARY: "BYTES", -473 exp.DataType.Type.BOOLEAN: "BOOL", -474 exp.DataType.Type.CHAR: "STRING", -475 exp.DataType.Type.DECIMAL: "NUMERIC", -476 exp.DataType.Type.DOUBLE: "FLOAT64", -477 exp.DataType.Type.FLOAT: "FLOAT64", -478 exp.DataType.Type.INT: "INT64", -479 exp.DataType.Type.NCHAR: "STRING", -480 exp.DataType.Type.NVARCHAR: "STRING", -481 exp.DataType.Type.SMALLINT: "INT64", -482 exp.DataType.Type.TEXT: "STRING", -483 exp.DataType.Type.TIMESTAMP: "DATETIME", -484 exp.DataType.Type.TIMESTAMPTZ: "TIMESTAMP", -485 exp.DataType.Type.TIMESTAMPLTZ: "TIMESTAMP", -486 exp.DataType.Type.TINYINT: "INT64", -487 exp.DataType.Type.VARBINARY: "BYTES", -488 exp.DataType.Type.VARCHAR: "STRING", -489 exp.DataType.Type.VARIANT: "ANY TYPE", -490 } -491 -492 PROPERTIES_LOCATION = { -493 **generator.Generator.PROPERTIES_LOCATION, -494 exp.PartitionedByProperty: exp.Properties.Location.POST_SCHEMA, -495 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, -496 } -497 -498 # from: https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#reserved_keywords -499 RESERVED_KEYWORDS = { -500 *generator.Generator.RESERVED_KEYWORDS, -501 "all", -502 "and", -503 "any", -504 "array", -505 "as", -506 "asc", -507 "assert_rows_modified", -508 "at", -509 "between", -510 "by", -511 "case", -512 "cast", -513 "collate", -514 "contains", -515 "create", -516 "cross", -517 "cube", -518 "current", -519 "default", -520 "define", -521 "desc", -522 "distinct", -523 "else", -524 "end", -525 "enum", -526 "escape", -527 "except", -528 "exclude", -529 "exists", -530 "extract", -531 "false", -532 "fetch", -533 "following", -534 "for", -535 "from", -536 "full", -537 "group", -538 "grouping", -539 "groups", -540 "hash", -541 "having", -542 "if", -543 "ignore", -544 "in", -545 "inner", -546 "intersect", -547 "interval", -548 "into", -549 "is", -550 "join", -551 "lateral", -552 "left", -553 "like", -554 "limit", -555 "lookup", -556 "merge", -557 "natural", -558 "new", -559 "no", -560 "not", -561 "null", -562 "nulls", -563 "of", -564 "on", -565 "or", -566 "order", -567 "outer", -568 "over", -569 "partition", -570 "preceding", -571 "proto", -572 "qualify", -573 "range", -574 "recursive", -575 "respect", -576 "right", -577 "rollup", -578 "rows", -579 "select", -580 "set", -581 "some", -582 "struct", -583 "tablesample", -584 "then", -585 "to", -586 "treat", -587 "true", -588 "unbounded", -589 "union", -590 "unnest", -591 "using", -592 "when", -593 "where", -594 "window", -595 "with", -596 "within", -597 } -598 -599 def attimezone_sql(self, expression: exp.AtTimeZone) -> str: -600 parent = expression.parent -601 -602 # BigQuery allows CAST(.. AS {STRING|TIMESTAMP} [FORMAT <fmt> [AT TIME ZONE <tz>]]). -603 # Only the TIMESTAMP one should use the below conversion, when AT TIME ZONE is included. -604 if not isinstance(parent, exp.Cast) or not parent.to.is_type("text"): -605 return self.func( -606 "TIMESTAMP", self.func("DATETIME", expression.this, expression.args.get("zone")) -607 ) -608 -609 return super().attimezone_sql(expression) -610 -611 def trycast_sql(self, expression: exp.TryCast) -> str: -612 return self.cast_sql(expression, safe_prefix="SAFE_") -613 -614 def cte_sql(self, expression: exp.CTE) -> str: -615 if expression.alias_column_names: -616 self.unsupported("Column names in CTE definition are not supported.") -617 return super().cte_sql(expression) -618 -619 def array_sql(self, expression: exp.Array) -> str: -620 first_arg = seq_get(expression.expressions, 0) -621 if isinstance(first_arg, exp.Subqueryable): -622 return f"ARRAY{self.wrap(self.sql(first_arg))}" -623 -624 return inline_array_sql(self, expression) -625 -626 def transaction_sql(self, *_) -> str: -627 return "BEGIN TRANSACTION" -628 -629 def commit_sql(self, *_) -> str: -630 return "COMMIT TRANSACTION" -631 -632 def rollback_sql(self, *_) -> str: -633 return "ROLLBACK TRANSACTION" -634 -635 def in_unnest_op(self, expression: exp.Unnest) -> str: -636 return self.sql(expression) -637 -638 def except_op(self, expression: exp.Except) -> str: -639 if not expression.args.get("distinct", False): -640 self.unsupported("EXCEPT without DISTINCT is not supported in BigQuery") -641 return f"EXCEPT{' DISTINCT' if expression.args.get('distinct') else ' ALL'}" +400 def _parse_json_object(self) -> exp.JSONObject: +401 json_object = super()._parse_json_object() +402 array_kv_pair = seq_get(json_object.expressions, 0) +403 +404 # Converts BQ's "signature 2" of JSON_OBJECT into SQLGlot's canonical representation +405 # https://cloud.google.com/bigquery/docs/reference/standard-sql/json_functions#json_object_signature2 +406 if ( +407 array_kv_pair +408 and isinstance(array_kv_pair.this, exp.Array) +409 and isinstance(array_kv_pair.expression, exp.Array) +410 ): +411 keys = array_kv_pair.this.expressions +412 values = array_kv_pair.expression.expressions +413 +414 json_object.set( +415 "expressions", +416 [exp.JSONKeyValue(this=k, expression=v) for k, v in zip(keys, values)], +417 ) +418 +419 return json_object +420 +421 class Generator(generator.Generator): +422 EXPLICIT_UNION = True +423 INTERVAL_ALLOWS_PLURAL_FORM = False +424 JOIN_HINTS = False +425 QUERY_HINTS = False +426 TABLE_HINTS = False +427 LIMIT_FETCH = "LIMIT" +428 RENAME_TABLE_WITH_DB = False +429 ESCAPE_LINE_BREAK = True +430 NVL2_SUPPORTED = False +431 +432 TRANSFORMS = { +433 **generator.Generator.TRANSFORMS, +434 exp.ApproxDistinct: rename_func("APPROX_COUNT_DISTINCT"), +435 exp.ArraySize: rename_func("ARRAY_LENGTH"), +436 exp.Cast: transforms.preprocess([transforms.remove_precision_parameterized_types]), +437 exp.Create: _create_sql, +438 exp.CTE: transforms.preprocess([_pushdown_cte_column_names]), +439 exp.DateAdd: _date_add_sql("DATE", "ADD"), +440 exp.DateDiff: lambda self, e: f"DATE_DIFF({self.sql(e, 'this')}, {self.sql(e, 'expression')}, {self.sql(e.args.get('unit', 'DAY'))})", +441 exp.DateFromParts: rename_func("DATE"), +442 exp.DateStrToDate: datestrtodate_sql, +443 exp.DateSub: _date_add_sql("DATE", "SUB"), +444 exp.DatetimeAdd: _date_add_sql("DATETIME", "ADD"), +445 exp.DatetimeSub: _date_add_sql("DATETIME", "SUB"), +446 exp.DateTrunc: lambda self, e: self.func("DATE_TRUNC", e.this, e.text("unit")), +447 exp.GenerateSeries: rename_func("GENERATE_ARRAY"), +448 exp.GroupConcat: rename_func("STRING_AGG"), +449 exp.Hex: rename_func("TO_HEX"), +450 exp.ILike: no_ilike_sql, +451 exp.IntDiv: rename_func("DIV"), +452 exp.JSONFormat: rename_func("TO_JSON_STRING"), +453 exp.JSONKeyValue: json_keyvalue_comma_sql, +454 exp.Max: max_or_greatest, +455 exp.MD5: lambda self, e: self.func("TO_HEX", self.func("MD5", e.this)), +456 exp.MD5Digest: rename_func("MD5"), +457 exp.Min: min_or_least, +458 exp.PartitionedByProperty: lambda self, e: f"PARTITION BY {self.sql(e, 'this')}", +459 exp.RegexpExtract: lambda self, e: self.func( +460 "REGEXP_EXTRACT", +461 e.this, +462 e.expression, +463 e.args.get("position"), +464 e.args.get("occurrence"), +465 ), +466 exp.RegexpReplace: regexp_replace_sql, +467 exp.RegexpLike: rename_func("REGEXP_CONTAINS"), +468 exp.ReturnsProperty: _returnsproperty_sql, +469 exp.Select: transforms.preprocess( +470 [ +471 transforms.explode_to_unnest, +472 _unqualify_unnest, +473 transforms.eliminate_distinct_on, +474 _alias_ordered_group, +475 ] +476 ), +477 exp.SHA2: lambda self, e: self.func( +478 f"SHA256" if e.text("length") == "256" else "SHA512", e.this +479 ), +480 exp.StabilityProperty: lambda self, e: f"DETERMINISTIC" +481 if e.name == "IMMUTABLE" +482 else "NOT DETERMINISTIC", +483 exp.StrToDate: lambda self, e: f"PARSE_DATE({self.format_time(e)}, {self.sql(e, 'this')})", +484 exp.StrToTime: lambda self, e: self.func( +485 "PARSE_TIMESTAMP", self.format_time(e), e.this, e.args.get("zone") +486 ), +487 exp.TimeAdd: _date_add_sql("TIME", "ADD"), +488 exp.TimeSub: _date_add_sql("TIME", "SUB"), +489 exp.TimestampAdd: _date_add_sql("TIMESTAMP", "ADD"), +490 exp.TimestampSub: _date_add_sql("TIMESTAMP", "SUB"), +491 exp.TimeStrToTime: timestrtotime_sql, +492 exp.Trim: lambda self, e: self.func(f"TRIM", e.this, e.expression), +493 exp.TsOrDsAdd: _date_add_sql("DATE", "ADD"), +494 exp.TsOrDsToDate: ts_or_ds_to_date_sql("bigquery"), +495 exp.Unhex: rename_func("FROM_HEX"), +496 exp.Values: _derived_table_values_to_unnest, +497 exp.VariancePop: rename_func("VAR_POP"), +498 } +499 +500 TYPE_MAPPING = { +501 **generator.Generator.TYPE_MAPPING, +502 exp.DataType.Type.BIGDECIMAL: "BIGNUMERIC", +503 exp.DataType.Type.BIGINT: "INT64", +504 exp.DataType.Type.BINARY: "BYTES", +505 exp.DataType.Type.BOOLEAN: "BOOL", +506 exp.DataType.Type.CHAR: "STRING", +507 exp.DataType.Type.DECIMAL: "NUMERIC", +508 exp.DataType.Type.DOUBLE: "FLOAT64", +509 exp.DataType.Type.FLOAT: "FLOAT64", +510 exp.DataType.Type.INT: "INT64", +511 exp.DataType.Type.NCHAR: "STRING", +512 exp.DataType.Type.NVARCHAR: "STRING", +513 exp.DataType.Type.SMALLINT: "INT64", +514 exp.DataType.Type.TEXT: "STRING", +515 exp.DataType.Type.TIMESTAMP: "DATETIME", +516 exp.DataType.Type.TIMESTAMPTZ: "TIMESTAMP", +517 exp.DataType.Type.TIMESTAMPLTZ: "TIMESTAMP", +518 exp.DataType.Type.TINYINT: "INT64", +519 exp.DataType.Type.VARBINARY: "BYTES", +520 exp.DataType.Type.VARCHAR: "STRING", +521 exp.DataType.Type.VARIANT: "ANY TYPE", +522 } +523 +524 PROPERTIES_LOCATION = { +525 **generator.Generator.PROPERTIES_LOCATION, +526 exp.PartitionedByProperty: exp.Properties.Location.POST_SCHEMA, +527 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, +528 } +529 +530 # from: https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#reserved_keywords +531 RESERVED_KEYWORDS = { +532 *generator.Generator.RESERVED_KEYWORDS, +533 "all", +534 "and", +535 "any", +536 "array", +537 "as", +538 "asc", +539 "assert_rows_modified", +540 "at", +541 "between", +542 "by", +543 "case", +544 "cast", +545 "collate", +546 "contains", +547 "create", +548 "cross", +549 "cube", +550 "current", +551 "default", +552 "define", +553 "desc", +554 "distinct", +555 "else", +556 "end", +557 "enum", +558 "escape", +559 "except", +560 "exclude", +561 "exists", +562 "extract", +563 "false", +564 "fetch", +565 "following", +566 "for", +567 "from", +568 "full", +569 "group", +570 "grouping", +571 "groups", +572 "hash", +573 "having", +574 "if", +575 "ignore", +576 "in", +577 "inner", +578 "intersect", +579 "interval", +580 "into", +581 "is", +582 "join", +583 "lateral", +584 "left", +585 "like", +586 "limit", +587 "lookup", +588 "merge", +589 "natural", +590 "new", +591 "no", +592 "not", +593 "null", +594 "nulls", +595 "of", +596 "on", +597 "or", +598 "order", +599 "outer", +600 "over", +601 "partition", +602 "preceding", +603 "proto", +604 "qualify", +605 "range", +606 "recursive", +607 "respect", +608 "right", +609 "rollup", +610 "rows", +611 "select", +612 "set", +613 "some", +614 "struct", +615 "tablesample", +616 "then", +617 "to", +618 "treat", +619 "true", +620 "unbounded", +621 "union", +622 "unnest", +623 "using", +624 "when", +625 "where", +626 "window", +627 "with", +628 "within", +629 } +630 +631 def attimezone_sql(self, expression: exp.AtTimeZone) -> str: +632 parent = expression.parent +633 +634 # BigQuery allows CAST(.. AS {STRING|TIMESTAMP} [FORMAT <fmt> [AT TIME ZONE <tz>]]). +635 # Only the TIMESTAMP one should use the below conversion, when AT TIME ZONE is included. +636 if not isinstance(parent, exp.Cast) or not parent.to.is_type("text"): +637 return self.func( +638 "TIMESTAMP", self.func("DATETIME", expression.this, expression.args.get("zone")) +639 ) +640 +641 return super().attimezone_sql(expression) 642 -643 def intersect_op(self, expression: exp.Intersect) -> str: -644 if not expression.args.get("distinct", False): -645 self.unsupported("INTERSECT without DISTINCT is not supported in BigQuery") -646 return f"INTERSECT{' DISTINCT' if expression.args.get('distinct') else ' ALL'}" +643 def cast_sql(self, expression: exp.Cast, safe_prefix: t.Optional[str] = None) -> str: +644 # https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#json_literals +645 if expression.is_type("json"): +646 return f"JSON {self.sql(expression, 'this')}" 647 -648 def with_properties(self, properties: exp.Properties) -> str: -649 return self.properties(properties, prefix=self.seg("OPTIONS")) -650 -651 def version_sql(self, expression: exp.Version) -> str: -652 if expression.name == "TIMESTAMP": -653 expression = expression.copy() -654 expression.set("this", "SYSTEM_TIME") -655 return super().version_sql(expression) +648 return super().cast_sql(expression, safe_prefix=safe_prefix) +649 +650 def trycast_sql(self, expression: exp.TryCast) -> str: +651 return self.cast_sql(expression, safe_prefix="SAFE_") +652 +653 def cte_sql(self, expression: exp.CTE) -> str: +654 if expression.alias_column_names: +655 self.unsupported("Column names in CTE definition are not supported.") +656 return super().cte_sql(expression) +657 +658 def array_sql(self, expression: exp.Array) -> str: +659 first_arg = seq_get(expression.expressions, 0) +660 if isinstance(first_arg, exp.Subqueryable): +661 return f"ARRAY{self.wrap(self.sql(first_arg))}" +662 +663 return inline_array_sql(self, expression) +664 +665 def transaction_sql(self, *_) -> str: +666 return "BEGIN TRANSACTION" +667 +668 def commit_sql(self, *_) -> str: +669 return "COMMIT TRANSACTION" +670 +671 def rollback_sql(self, *_) -> str: +672 return "ROLLBACK TRANSACTION" +673 +674 def in_unnest_op(self, expression: exp.Unnest) -> str: +675 return self.sql(expression) +676 +677 def except_op(self, expression: exp.Except) -> str: +678 if not expression.args.get("distinct", False): +679 self.unsupported("EXCEPT without DISTINCT is not supported in BigQuery") +680 return f"EXCEPT{' DISTINCT' if expression.args.get('distinct') else ' ALL'}" +681 +682 def intersect_op(self, expression: exp.Intersect) -> str: +683 if not expression.args.get("distinct", False): +684 self.unsupported("INTERSECT without DISTINCT is not supported in BigQuery") +685 return f"INTERSECT{' DISTINCT' if expression.args.get('distinct') else ' ALL'}" +686 +687 def with_properties(self, properties: exp.Properties) -> str: +688 return self.properties(properties, prefix=self.seg("OPTIONS")) +689 +690 def version_sql(self, expression: exp.Version) -> str: +691 if expression.name == "TIMESTAMP": +692 expression = expression.copy() +693 expression.set("this", "SYSTEM_TIME") +694 return super().version_sql(expression) @@ -1214,7 +1256,7 @@ 359 } 360 361 def _parse_table_part(self, schema: bool = False) -> t.Optional[exp.Expression]: -362 this = super()._parse_table_part(schema=schema) +362 this = super()._parse_table_part(schema=schema) or self._parse_number() 363 364 # https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#table_names 365 if isinstance(this, exp.Identifier): @@ -1224,291 +1266,330 @@ 369 table_name += f"-{self._prev.text}" 370 371 this = exp.Identifier(this=table_name, quoted=this.args.get("quoted")) -372 -373 return this +372 elif isinstance(this, exp.Literal): +373 table_name = this.name 374 -375 def _parse_table_parts(self, schema: bool = False) -> exp.Table: -376 table = super()._parse_table_parts(schema=schema) -377 if isinstance(table.this, exp.Identifier) and "." in table.name: -378 catalog, db, this, *rest = ( -379 t.cast(t.Optional[exp.Expression], exp.to_identifier(x)) -380 for x in split_num_words(table.name, ".", 3) -381 ) -382 -383 if rest and this: -384 this = exp.Dot.build(t.cast(t.List[exp.Expression], [this, *rest])) +375 if ( +376 self._curr +377 and self._prev.end == self._curr.start - 1 +378 and self._parse_var(any_token=True) +379 ): +380 table_name += self._prev.text +381 +382 this = exp.Identifier(this=table_name, quoted=True) +383 +384 return this 385 -386 table = exp.Table(this=this, db=db, catalog=catalog) -387 -388 return table -389 -390 class Generator(generator.Generator): -391 EXPLICIT_UNION = True -392 INTERVAL_ALLOWS_PLURAL_FORM = False -393 JOIN_HINTS = False -394 QUERY_HINTS = False -395 TABLE_HINTS = False -396 LIMIT_FETCH = "LIMIT" -397 RENAME_TABLE_WITH_DB = False -398 ESCAPE_LINE_BREAK = True -399 NVL2_SUPPORTED = False +386 def _parse_table_parts(self, schema: bool = False) -> exp.Table: +387 table = super()._parse_table_parts(schema=schema) +388 if isinstance(table.this, exp.Identifier) and "." in table.name: +389 catalog, db, this, *rest = ( +390 t.cast(t.Optional[exp.Expression], exp.to_identifier(x)) +391 for x in split_num_words(table.name, ".", 3) +392 ) +393 +394 if rest and this: +395 this = exp.Dot.build(t.cast(t.List[exp.Expression], [this, *rest])) +396 +397 table = exp.Table(this=this, db=db, catalog=catalog) +398 +399 return table 400 -401 TRANSFORMS = { -402 **generator.Generator.TRANSFORMS, -403 exp.ApproxDistinct: rename_func("APPROX_COUNT_DISTINCT"), -404 exp.ArraySize: rename_func("ARRAY_LENGTH"), -405 exp.Cast: transforms.preprocess([transforms.remove_precision_parameterized_types]), -406 exp.Create: _create_sql, -407 exp.CTE: transforms.preprocess([_pushdown_cte_column_names]), -408 exp.DateAdd: _date_add_sql("DATE", "ADD"), -409 exp.DateDiff: lambda self, e: f"DATE_DIFF({self.sql(e, 'this')}, {self.sql(e, 'expression')}, {self.sql(e.args.get('unit', 'DAY'))})", -410 exp.DateFromParts: rename_func("DATE"), -411 exp.DateStrToDate: datestrtodate_sql, -412 exp.DateSub: _date_add_sql("DATE", "SUB"), -413 exp.DatetimeAdd: _date_add_sql("DATETIME", "ADD"), -414 exp.DatetimeSub: _date_add_sql("DATETIME", "SUB"), -415 exp.DateTrunc: lambda self, e: self.func("DATE_TRUNC", e.this, e.text("unit")), -416 exp.GenerateSeries: rename_func("GENERATE_ARRAY"), -417 exp.GroupConcat: rename_func("STRING_AGG"), -418 exp.Hex: rename_func("TO_HEX"), -419 exp.ILike: no_ilike_sql, -420 exp.IntDiv: rename_func("DIV"), -421 exp.JSONFormat: rename_func("TO_JSON_STRING"), -422 exp.JSONKeyValue: json_keyvalue_comma_sql, -423 exp.Max: max_or_greatest, -424 exp.MD5: lambda self, e: self.func("TO_HEX", self.func("MD5", e.this)), -425 exp.MD5Digest: rename_func("MD5"), -426 exp.Min: min_or_least, -427 exp.PartitionedByProperty: lambda self, e: f"PARTITION BY {self.sql(e, 'this')}", -428 exp.RegexpExtract: lambda self, e: self.func( -429 "REGEXP_EXTRACT", -430 e.this, -431 e.expression, -432 e.args.get("position"), -433 e.args.get("occurrence"), -434 ), -435 exp.RegexpReplace: regexp_replace_sql, -436 exp.RegexpLike: rename_func("REGEXP_CONTAINS"), -437 exp.ReturnsProperty: _returnsproperty_sql, -438 exp.Select: transforms.preprocess( -439 [ -440 transforms.explode_to_unnest, -441 _unqualify_unnest, -442 transforms.eliminate_distinct_on, -443 _alias_ordered_group, -444 ] -445 ), -446 exp.SHA2: lambda self, e: self.func( -447 f"SHA256" if e.text("length") == "256" else "SHA512", e.this -448 ), -449 exp.StabilityProperty: lambda self, e: f"DETERMINISTIC" -450 if e.name == "IMMUTABLE" -451 else "NOT DETERMINISTIC", -452 exp.StrToDate: lambda self, e: f"PARSE_DATE({self.format_time(e)}, {self.sql(e, 'this')})", -453 exp.StrToTime: lambda self, e: self.func( -454 "PARSE_TIMESTAMP", self.format_time(e), e.this, e.args.get("zone") -455 ), -456 exp.TimeAdd: _date_add_sql("TIME", "ADD"), -457 exp.TimeSub: _date_add_sql("TIME", "SUB"), -458 exp.TimestampAdd: _date_add_sql("TIMESTAMP", "ADD"), -459 exp.TimestampSub: _date_add_sql("TIMESTAMP", "SUB"), -460 exp.TimeStrToTime: timestrtotime_sql, -461 exp.Trim: lambda self, e: self.func(f"TRIM", e.this, e.expression), -462 exp.TsOrDsAdd: _date_add_sql("DATE", "ADD"), -463 exp.TsOrDsToDate: ts_or_ds_to_date_sql("bigquery"), -464 exp.Unhex: rename_func("FROM_HEX"), -465 exp.Values: _derived_table_values_to_unnest, -466 exp.VariancePop: rename_func("VAR_POP"), -467 } -468 -469 TYPE_MAPPING = { -470 **generator.Generator.TYPE_MAPPING, -471 exp.DataType.Type.BIGDECIMAL: "BIGNUMERIC", -472 exp.DataType.Type.BIGINT: "INT64", -473 exp.DataType.Type.BINARY: "BYTES", -474 exp.DataType.Type.BOOLEAN: "BOOL", -475 exp.DataType.Type.CHAR: "STRING", -476 exp.DataType.Type.DECIMAL: "NUMERIC", -477 exp.DataType.Type.DOUBLE: "FLOAT64", -478 exp.DataType.Type.FLOAT: "FLOAT64", -479 exp.DataType.Type.INT: "INT64", -480 exp.DataType.Type.NCHAR: "STRING", -481 exp.DataType.Type.NVARCHAR: "STRING", -482 exp.DataType.Type.SMALLINT: "INT64", -483 exp.DataType.Type.TEXT: "STRING", -484 exp.DataType.Type.TIMESTAMP: "DATETIME", -485 exp.DataType.Type.TIMESTAMPTZ: "TIMESTAMP", -486 exp.DataType.Type.TIMESTAMPLTZ: "TIMESTAMP", -487 exp.DataType.Type.TINYINT: "INT64", -488 exp.DataType.Type.VARBINARY: "BYTES", -489 exp.DataType.Type.VARCHAR: "STRING", -490 exp.DataType.Type.VARIANT: "ANY TYPE", -491 } -492 -493 PROPERTIES_LOCATION = { -494 **generator.Generator.PROPERTIES_LOCATION, -495 exp.PartitionedByProperty: exp.Properties.Location.POST_SCHEMA, -496 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, -497 } -498 -499 # from: https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#reserved_keywords -500 RESERVED_KEYWORDS = { -501 *generator.Generator.RESERVED_KEYWORDS, -502 "all", -503 "and", -504 "any", -505 "array", -506 "as", -507 "asc", -508 "assert_rows_modified", -509 "at", -510 "between", -511 "by", -512 "case", -513 "cast", -514 "collate", -515 "contains", -516 "create", -517 "cross", -518 "cube", -519 "current", -520 "default", -521 "define", -522 "desc", -523 "distinct", -524 "else", -525 "end", -526 "enum", -527 "escape", -528 "except", -529 "exclude", -530 "exists", -531 "extract", -532 "false", -533 "fetch", -534 "following", -535 "for", -536 "from", -537 "full", -538 "group", -539 "grouping", -540 "groups", -541 "hash", -542 "having", -543 "if", -544 "ignore", -545 "in", -546 "inner", -547 "intersect", -548 "interval", -549 "into", -550 "is", -551 "join", -552 "lateral", -553 "left", -554 "like", -555 "limit", -556 "lookup", -557 "merge", -558 "natural", -559 "new", -560 "no", -561 "not", -562 "null", -563 "nulls", -564 "of", -565 "on", -566 "or", -567 "order", -568 "outer", -569 "over", -570 "partition", -571 "preceding", -572 "proto", -573 "qualify", -574 "range", -575 "recursive", -576 "respect", -577 "right", -578 "rollup", -579 "rows", -580 "select", -581 "set", -582 "some", -583 "struct", -584 "tablesample", -585 "then", -586 "to", -587 "treat", -588 "true", -589 "unbounded", -590 "union", -591 "unnest", -592 "using", -593 "when", -594 "where", -595 "window", -596 "with", -597 "within", -598 } -599 -600 def attimezone_sql(self, expression: exp.AtTimeZone) -> str: -601 parent = expression.parent -602 -603 # BigQuery allows CAST(.. AS {STRING|TIMESTAMP} [FORMAT <fmt> [AT TIME ZONE <tz>]]). -604 # Only the TIMESTAMP one should use the below conversion, when AT TIME ZONE is included. -605 if not isinstance(parent, exp.Cast) or not parent.to.is_type("text"): -606 return self.func( -607 "TIMESTAMP", self.func("DATETIME", expression.this, expression.args.get("zone")) -608 ) -609 -610 return super().attimezone_sql(expression) -611 -612 def trycast_sql(self, expression: exp.TryCast) -> str: -613 return self.cast_sql(expression, safe_prefix="SAFE_") -614 -615 def cte_sql(self, expression: exp.CTE) -> str: -616 if expression.alias_column_names: -617 self.unsupported("Column names in CTE definition are not supported.") -618 return super().cte_sql(expression) -619 -620 def array_sql(self, expression: exp.Array) -> str: -621 first_arg = seq_get(expression.expressions, 0) -622 if isinstance(first_arg, exp.Subqueryable): -623 return f"ARRAY{self.wrap(self.sql(first_arg))}" -624 -625 return inline_array_sql(self, expression) -626 -627 def transaction_sql(self, *_) -> str: -628 return "BEGIN TRANSACTION" -629 -630 def commit_sql(self, *_) -> str: -631 return "COMMIT TRANSACTION" -632 -633 def rollback_sql(self, *_) -> str: -634 return "ROLLBACK TRANSACTION" -635 -636 def in_unnest_op(self, expression: exp.Unnest) -> str: -637 return self.sql(expression) -638 -639 def except_op(self, expression: exp.Except) -> str: -640 if not expression.args.get("distinct", False): -641 self.unsupported("EXCEPT without DISTINCT is not supported in BigQuery") -642 return f"EXCEPT{' DISTINCT' if expression.args.get('distinct') else ' ALL'}" +401 def _parse_json_object(self) -> exp.JSONObject: +402 json_object = super()._parse_json_object() +403 array_kv_pair = seq_get(json_object.expressions, 0) +404 +405 # Converts BQ's "signature 2" of JSON_OBJECT into SQLGlot's canonical representation +406 # https://cloud.google.com/bigquery/docs/reference/standard-sql/json_functions#json_object_signature2 +407 if ( +408 array_kv_pair +409 and isinstance(array_kv_pair.this, exp.Array) +410 and isinstance(array_kv_pair.expression, exp.Array) +411 ): +412 keys = array_kv_pair.this.expressions +413 values = array_kv_pair.expression.expressions +414 +415 json_object.set( +416 "expressions", +417 [exp.JSONKeyValue(this=k, expression=v) for k, v in zip(keys, values)], +418 ) +419 +420 return json_object +421 +422 class Generator(generator.Generator): +423 EXPLICIT_UNION = True +424 INTERVAL_ALLOWS_PLURAL_FORM = False +425 JOIN_HINTS = False +426 QUERY_HINTS = False +427 TABLE_HINTS = False +428 LIMIT_FETCH = "LIMIT" +429 RENAME_TABLE_WITH_DB = False +430 ESCAPE_LINE_BREAK = True +431 NVL2_SUPPORTED = False +432 +433 TRANSFORMS = { +434 **generator.Generator.TRANSFORMS, +435 exp.ApproxDistinct: rename_func("APPROX_COUNT_DISTINCT"), +436 exp.ArraySize: rename_func("ARRAY_LENGTH"), +437 exp.Cast: transforms.preprocess([transforms.remove_precision_parameterized_types]), +438 exp.Create: _create_sql, +439 exp.CTE: transforms.preprocess([_pushdown_cte_column_names]), +440 exp.DateAdd: _date_add_sql("DATE", "ADD"), +441 exp.DateDiff: lambda self, e: f"DATE_DIFF({self.sql(e, 'this')}, {self.sql(e, 'expression')}, {self.sql(e.args.get('unit', 'DAY'))})", +442 exp.DateFromParts: rename_func("DATE"), +443 exp.DateStrToDate: datestrtodate_sql, +444 exp.DateSub: _date_add_sql("DATE", "SUB"), +445 exp.DatetimeAdd: _date_add_sql("DATETIME", "ADD"), +446 exp.DatetimeSub: _date_add_sql("DATETIME", "SUB"), +447 exp.DateTrunc: lambda self, e: self.func("DATE_TRUNC", e.this, e.text("unit")), +448 exp.GenerateSeries: rename_func("GENERATE_ARRAY"), +449 exp.GroupConcat: rename_func("STRING_AGG"), +450 exp.Hex: rename_func("TO_HEX"), +451 exp.ILike: no_ilike_sql, +452 exp.IntDiv: rename_func("DIV"), +453 exp.JSONFormat: rename_func("TO_JSON_STRING"), +454 exp.JSONKeyValue: json_keyvalue_comma_sql, +455 exp.Max: max_or_greatest, +456 exp.MD5: lambda self, e: self.func("TO_HEX", self.func("MD5", e.this)), +457 exp.MD5Digest: rename_func("MD5"), +458 exp.Min: min_or_least, +459 exp.PartitionedByProperty: lambda self, e: f"PARTITION BY {self.sql(e, 'this')}", +460 exp.RegexpExtract: lambda self, e: self.func( +461 "REGEXP_EXTRACT", +462 e.this, +463 e.expression, +464 e.args.get("position"), +465 e.args.get("occurrence"), +466 ), +467 exp.RegexpReplace: regexp_replace_sql, +468 exp.RegexpLike: rename_func("REGEXP_CONTAINS"), +469 exp.ReturnsProperty: _returnsproperty_sql, +470 exp.Select: transforms.preprocess( +471 [ +472 transforms.explode_to_unnest, +473 _unqualify_unnest, +474 transforms.eliminate_distinct_on, +475 _alias_ordered_group, +476 ] +477 ), +478 exp.SHA2: lambda self, e: self.func( +479 f"SHA256" if e.text("length") == "256" else "SHA512", e.this +480 ), +481 exp.StabilityProperty: lambda self, e: f"DETERMINISTIC" +482 if e.name == "IMMUTABLE" +483 else "NOT DETERMINISTIC", +484 exp.StrToDate: lambda self, e: f"PARSE_DATE({self.format_time(e)}, {self.sql(e, 'this')})", +485 exp.StrToTime: lambda self, e: self.func( +486 "PARSE_TIMESTAMP", self.format_time(e), e.this, e.args.get("zone") +487 ), +488 exp.TimeAdd: _date_add_sql("TIME", "ADD"), +489 exp.TimeSub: _date_add_sql("TIME", "SUB"), +490 exp.TimestampAdd: _date_add_sql("TIMESTAMP", "ADD"), +491 exp.TimestampSub: _date_add_sql("TIMESTAMP", "SUB"), +492 exp.TimeStrToTime: timestrtotime_sql, +493 exp.Trim: lambda self, e: self.func(f"TRIM", e.this, e.expression), +494 exp.TsOrDsAdd: _date_add_sql("DATE", "ADD"), +495 exp.TsOrDsToDate: ts_or_ds_to_date_sql("bigquery"), +496 exp.Unhex: rename_func("FROM_HEX"), +497 exp.Values: _derived_table_values_to_unnest, +498 exp.VariancePop: rename_func("VAR_POP"), +499 } +500 +501 TYPE_MAPPING = { +502 **generator.Generator.TYPE_MAPPING, +503 exp.DataType.Type.BIGDECIMAL: "BIGNUMERIC", +504 exp.DataType.Type.BIGINT: "INT64", +505 exp.DataType.Type.BINARY: "BYTES", +506 exp.DataType.Type.BOOLEAN: "BOOL", +507 exp.DataType.Type.CHAR: "STRING", +508 exp.DataType.Type.DECIMAL: "NUMERIC", +509 exp.DataType.Type.DOUBLE: "FLOAT64", +510 exp.DataType.Type.FLOAT: "FLOAT64", +511 exp.DataType.Type.INT: "INT64", +512 exp.DataType.Type.NCHAR: "STRING", +513 exp.DataType.Type.NVARCHAR: "STRING", +514 exp.DataType.Type.SMALLINT: "INT64", +515 exp.DataType.Type.TEXT: "STRING", +516 exp.DataType.Type.TIMESTAMP: "DATETIME", +517 exp.DataType.Type.TIMESTAMPTZ: "TIMESTAMP", +518 exp.DataType.Type.TIMESTAMPLTZ: "TIMESTAMP", +519 exp.DataType.Type.TINYINT: "INT64", +520 exp.DataType.Type.VARBINARY: "BYTES", +521 exp.DataType.Type.VARCHAR: "STRING", +522 exp.DataType.Type.VARIANT: "ANY TYPE", +523 } +524 +525 PROPERTIES_LOCATION = { +526 **generator.Generator.PROPERTIES_LOCATION, +527 exp.PartitionedByProperty: exp.Properties.Location.POST_SCHEMA, +528 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, +529 } +530 +531 # from: https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#reserved_keywords +532 RESERVED_KEYWORDS = { +533 *generator.Generator.RESERVED_KEYWORDS, +534 "all", +535 "and", +536 "any", +537 "array", +538 "as", +539 "asc", +540 "assert_rows_modified", +541 "at", +542 "between", +543 "by", +544 "case", +545 "cast", +546 "collate", +547 "contains", +548 "create", +549 "cross", +550 "cube", +551 "current", +552 "default", +553 "define", +554 "desc", +555 "distinct", +556 "else", +557 "end", +558 "enum", +559 "escape", +560 "except", +561 "exclude", +562 "exists", +563 "extract", +564 "false", +565 "fetch", +566 "following", +567 "for", +568 "from", +569 "full", +570 "group", +571 "grouping", +572 "groups", +573 "hash", +574 "having", +575 "if", +576 "ignore", +577 "in", +578 "inner", +579 "intersect", +580 "interval", +581 "into", +582 "is", +583 "join", +584 "lateral", +585 "left", +586 "like", +587 "limit", +588 "lookup", +589 "merge", +590 "natural", +591 "new", +592 "no", +593 "not", +594 "null", +595 "nulls", +596 "of", +597 "on", +598 "or", +599 "order", +600 "outer", +601 "over", +602 "partition", +603 "preceding", +604 "proto", +605 "qualify", +606 "range", +607 "recursive", +608 "respect", +609 "right", +610 "rollup", +611 "rows", +612 "select", +613 "set", +614 "some", +615 "struct", +616 "tablesample", +617 "then", +618 "to", +619 "treat", +620 "true", +621 "unbounded", +622 "union", +623 "unnest", +624 "using", +625 "when", +626 "where", +627 "window", +628 "with", +629 "within", +630 } +631 +632 def attimezone_sql(self, expression: exp.AtTimeZone) -> str: +633 parent = expression.parent +634 +635 # BigQuery allows CAST(.. AS {STRING|TIMESTAMP} [FORMAT <fmt> [AT TIME ZONE <tz>]]). +636 # Only the TIMESTAMP one should use the below conversion, when AT TIME ZONE is included. +637 if not isinstance(parent, exp.Cast) or not parent.to.is_type("text"): +638 return self.func( +639 "TIMESTAMP", self.func("DATETIME", expression.this, expression.args.get("zone")) +640 ) +641 +642 return super().attimezone_sql(expression) 643 -644 def intersect_op(self, expression: exp.Intersect) -> str: -645 if not expression.args.get("distinct", False): -646 self.unsupported("INTERSECT without DISTINCT is not supported in BigQuery") -647 return f"INTERSECT{' DISTINCT' if expression.args.get('distinct') else ' ALL'}" +644 def cast_sql(self, expression: exp.Cast, safe_prefix: t.Optional[str] = None) -> str: +645 # https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#json_literals +646 if expression.is_type("json"): +647 return f"JSON {self.sql(expression, 'this')}" 648 -649 def with_properties(self, properties: exp.Properties) -> str: -650 return self.properties(properties, prefix=self.seg("OPTIONS")) -651 -652 def version_sql(self, expression: exp.Version) -> str: -653 if expression.name == "TIMESTAMP": -654 expression = expression.copy() -655 expression.set("this", "SYSTEM_TIME") -656 return super().version_sql(expression) +649 return super().cast_sql(expression, safe_prefix=safe_prefix) +650 +651 def trycast_sql(self, expression: exp.TryCast) -> str: +652 return self.cast_sql(expression, safe_prefix="SAFE_") +653 +654 def cte_sql(self, expression: exp.CTE) -> str: +655 if expression.alias_column_names: +656 self.unsupported("Column names in CTE definition are not supported.") +657 return super().cte_sql(expression) +658 +659 def array_sql(self, expression: exp.Array) -> str: +660 first_arg = seq_get(expression.expressions, 0) +661 if isinstance(first_arg, exp.Subqueryable): +662 return f"ARRAY{self.wrap(self.sql(first_arg))}" +663 +664 return inline_array_sql(self, expression) +665 +666 def transaction_sql(self, *_) -> str: +667 return "BEGIN TRANSACTION" +668 +669 def commit_sql(self, *_) -> str: +670 return "COMMIT TRANSACTION" +671 +672 def rollback_sql(self, *_) -> str: +673 return "ROLLBACK TRANSACTION" +674 +675 def in_unnest_op(self, expression: exp.Unnest) -> str: +676 return self.sql(expression) +677 +678 def except_op(self, expression: exp.Except) -> str: +679 if not expression.args.get("distinct", False): +680 self.unsupported("EXCEPT without DISTINCT is not supported in BigQuery") +681 return f"EXCEPT{' DISTINCT' if expression.args.get('distinct') else ' ALL'}" +682 +683 def intersect_op(self, expression: exp.Intersect) -> str: +684 if not expression.args.get("distinct", False): +685 self.unsupported("INTERSECT without DISTINCT is not supported in BigQuery") +686 return f"INTERSECT{' DISTINCT' if expression.args.get('distinct') else ' ALL'}" +687 +688 def with_properties(self, properties: exp.Properties) -> str: +689 return self.properties(properties, prefix=self.seg("OPTIONS")) +690 +691 def version_sql(self, expression: exp.Version) -> str: +692 if expression.name == "TIMESTAMP": +693 expression = expression.copy() +694 expression.set("this", "SYSTEM_TIME") +695 return super().version_sql(expression) @@ -2134,7 +2215,7 @@ they will be normalized regardless of being quoted or not.

    359 } 360 361 def _parse_table_part(self, schema: bool = False) -> t.Optional[exp.Expression]: -362 this = super()._parse_table_part(schema=schema) +362 this = super()._parse_table_part(schema=schema) or self._parse_number() 363 364 # https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#table_names 365 if isinstance(this, exp.Identifier): @@ -2144,23 +2225,55 @@ they will be normalized regardless of being quoted or not.

    369 table_name += f"-{self._prev.text}" 370 371 this = exp.Identifier(this=table_name, quoted=this.args.get("quoted")) -372 -373 return this +372 elif isinstance(this, exp.Literal): +373 table_name = this.name 374 -375 def _parse_table_parts(self, schema: bool = False) -> exp.Table: -376 table = super()._parse_table_parts(schema=schema) -377 if isinstance(table.this, exp.Identifier) and "." in table.name: -378 catalog, db, this, *rest = ( -379 t.cast(t.Optional[exp.Expression], exp.to_identifier(x)) -380 for x in split_num_words(table.name, ".", 3) -381 ) -382 -383 if rest and this: -384 this = exp.Dot.build(t.cast(t.List[exp.Expression], [this, *rest])) +375 if ( +376 self._curr +377 and self._prev.end == self._curr.start - 1 +378 and self._parse_var(any_token=True) +379 ): +380 table_name += self._prev.text +381 +382 this = exp.Identifier(this=table_name, quoted=True) +383 +384 return this 385 -386 table = exp.Table(this=this, db=db, catalog=catalog) -387 -388 return table +386 def _parse_table_parts(self, schema: bool = False) -> exp.Table: +387 table = super()._parse_table_parts(schema=schema) +388 if isinstance(table.this, exp.Identifier) and "." in table.name: +389 catalog, db, this, *rest = ( +390 t.cast(t.Optional[exp.Expression], exp.to_identifier(x)) +391 for x in split_num_words(table.name, ".", 3) +392 ) +393 +394 if rest and this: +395 this = exp.Dot.build(t.cast(t.List[exp.Expression], [this, *rest])) +396 +397 table = exp.Table(this=this, db=db, catalog=catalog) +398 +399 return table +400 +401 def _parse_json_object(self) -> exp.JSONObject: +402 json_object = super()._parse_json_object() +403 array_kv_pair = seq_get(json_object.expressions, 0) +404 +405 # Converts BQ's "signature 2" of JSON_OBJECT into SQLGlot's canonical representation +406 # https://cloud.google.com/bigquery/docs/reference/standard-sql/json_functions#json_object_signature2 +407 if ( +408 array_kv_pair +409 and isinstance(array_kv_pair.this, exp.Array) +410 and isinstance(array_kv_pair.expression, exp.Array) +411 ): +412 keys = array_kv_pair.this.expressions +413 values = array_kv_pair.expression.expressions +414 +415 json_object.set( +416 "expressions", +417 [exp.JSONKeyValue(this=k, expression=v) for k, v in zip(keys, values)], +418 ) +419 +420 return json_object @@ -2272,7 +2385,7 @@ Default: 3
    NESTED_TYPE_TOKENS = - {<TokenType.STRUCT: 'STRUCT'>, <TokenType.TABLE: 'TABLE'>, <TokenType.LOWCARDINALITY: 'LOWCARDINALITY'>, <TokenType.ARRAY: 'ARRAY'>, <TokenType.MAP: 'MAP'>, <TokenType.NESTED: 'NESTED'>, <TokenType.NULLABLE: 'NULLABLE'>} + {<TokenType.TABLE: 'TABLE'>, <TokenType.ARRAY: 'ARRAY'>, <TokenType.NESTED: 'NESTED'>, <TokenType.NULLABLE: 'NULLABLE'>, <TokenType.LOWCARDINALITY: 'LOWCARDINALITY'>, <TokenType.STRUCT: 'STRUCT'>, <TokenType.MAP: 'MAP'>}
    @@ -2285,7 +2398,7 @@ Default: 3
    ID_VAR_TOKENS = - {<TokenType.END: 'END'>, <TokenType.NUMRANGE: 'NUMRANGE'>, <TokenType.SUPER: 'SUPER'>, <TokenType.UINT: 'UINT'>, <TokenType.RIGHT: 'RIGHT'>, <TokenType.SET: 'SET'>, <TokenType.UNIQUE: 'UNIQUE'>, <TokenType.SETTINGS: 'SETTINGS'>, <TokenType.UNIQUEIDENTIFIER: 'UNIQUEIDENTIFIER'>, <TokenType.JSONB: 'JSONB'>, <TokenType.ARRAY: 'ARRAY'>, <TokenType.VARCHAR: 'VARCHAR'>, <TokenType.OBJECT: 'OBJECT'>, <TokenType.DELETE: 'DELETE'>, <TokenType.DATEMULTIRANGE: 'DATEMULTIRANGE'>, <TokenType.BOOLEAN: 'BOOLEAN'>, <TokenType.HLLSKETCH: 'HLLSKETCH'>, <TokenType.FILTER: 'FILTER'>, <TokenType.UUID: 'UUID'>, <TokenType.FLOAT: 'FLOAT'>, <TokenType.SMALLSERIAL: 'SMALLSERIAL'>, <TokenType.PROCEDURE: 'PROCEDURE'>, <TokenType.VARIANT: 'VARIANT'>, <TokenType.JSON: 'JSON'>, <TokenType.UINT128: 'UINT128'>, <TokenType.DATABASE: 'DATABASE'>, <TokenType.MAP: 'MAP'>, <TokenType.NATURAL: 'NATURAL'>, <TokenType.FIRST: 'FIRST'>, <TokenType.ANY: 'ANY'>, <TokenType.COLLATE: 'COLLATE'>, <TokenType.REFERENCES: 'REFERENCES'>, <TokenType.SMALLINT: 'SMALLINT'>, <TokenType.USERDEFINED: 'USERDEFINED'>, <TokenType.IS: 'IS'>, <TokenType.MEDIUMBLOB: 'MEDIUMBLOB'>, <TokenType.DIV: 'DIV'>, <TokenType.ORDINALITY: 'ORDINALITY'>, <TokenType.NVARCHAR: 'NVARCHAR'>, <TokenType.DEFAULT: 'DEFAULT'>, <TokenType.USMALLINT: 'USMALLINT'>, <TokenType.INT4RANGE: 'INT4RANGE'>, <TokenType.KEEP: 'KEEP'>, <TokenType.VALUES: 'VALUES'>, <TokenType.LOAD: 'LOAD'>, <TokenType.TSTZRANGE: 'TSTZRANGE'>, <TokenType.UINT256: 'UINT256'>, <TokenType.MERGE: 'MERGE'>, <TokenType.PERCENT: 'PERCENT'>, <TokenType.VOLATILE: 'VOLATILE'>, <TokenType.IPADDRESS: 'IPADDRESS'>, <TokenType.INT: 'INT'>, <TokenType.INT256: 'INT256'>, <TokenType.ALL: 'ALL'>, <TokenType.NULL: 'NULL'>, <TokenType.COMMAND: 'COMMAND'>, <TokenType.CONSTRAINT: 'CONSTRAINT'>, <TokenType.BIGSERIAL: 'BIGSERIAL'>, <TokenType.UBIGINT: 'UBIGINT'>, <TokenType.YEAR: 'YEAR'>, <TokenType.TOP: 'TOP'>, <TokenType.ENUM: 'ENUM'>, <TokenType.BIT: 'BIT'>, <TokenType.SEMI: 'SEMI'>, <TokenType.SMALLMONEY: 'SMALLMONEY'>, <TokenType.COMMIT: 'COMMIT'>, <TokenType.DESC: 'DESC'>, <TokenType.ROWVERSION: 'ROWVERSION'>, <TokenType.STRUCT: 'STRUCT'>, <TokenType.GEOMETRY: 'GEOMETRY'>, <TokenType.FUNCTION: 'FUNCTION'>, <TokenType.TIME: 'TIME'>, <TokenType.VIEW: 'VIEW'>, <TokenType.XML: 'XML'>, <TokenType.INT128: 'INT128'>, <TokenType.NESTED: 'NESTED'>, <TokenType.TIMESTAMP: 'TIMESTAMP'>, <TokenType.TSRANGE: 'TSRANGE'>, <TokenType.UNPIVOT: 'UNPIVOT'>, <TokenType.IMAGE: 'IMAGE'>, <TokenType.TINYINT: 'TINYINT'>, <TokenType.COLUMN: 'COLUMN'>, <TokenType.TIMESTAMPTZ: 'TIMESTAMPTZ'>, <TokenType.DATETIME64: 'DATETIME64'>, <TokenType.BIGINT: 'BIGINT'>, <TokenType.MONEY: 'MONEY'>, <TokenType.GEOGRAPHY: 'GEOGRAPHY'>, <TokenType.DOUBLE: 'DOUBLE'>, <TokenType.CURRENT_USER: 'CURRENT_USER'>, <TokenType.ASC: 'ASC'>, <TokenType.COMMENT: 'COMMENT'>, <TokenType.RANGE: 'RANGE'>, <TokenType.NUMMULTIRANGE: 'NUMMULTIRANGE'>, <TokenType.WINDOW: 'WINDOW'>, <TokenType.ISNULL: 'ISNULL'>, <TokenType.SHOW: 'SHOW'>, <TokenType.INDEX: 'INDEX'>, <TokenType.SCHEMA: 'SCHEMA'>, <TokenType.DICTIONARY: 'DICTIONARY'>, <TokenType.DECIMAL: 'DECIMAL'>, <TokenType.FALSE: 'FALSE'>, <TokenType.MEDIUMTEXT: 'MEDIUMTEXT'>, <TokenType.DESCRIBE: 'DESCRIBE'>, <TokenType.TEMPORARY: 'TEMPORARY'>, <TokenType.BINARY: 'BINARY'>, <TokenType.UPDATE: 'UPDATE'>, <TokenType.DATE: 'DATE'>, <TokenType.DATETIME: 'DATETIME'>, <TokenType.IPPREFIX: 'IPPREFIX'>, <TokenType.EXISTS: 'EXISTS'>, <TokenType.NCHAR: 'NCHAR'>, <TokenType.ESCAPE: 'ESCAPE'>, <TokenType.CASE: 'CASE'>, <TokenType.VAR: 'VAR'>, <TokenType.INT8MULTIRANGE: 'INT8MULTIRANGE'>, <TokenType.UNKNOWN: 'UNKNOWN'>, <TokenType.APPLY: 'APPLY'>, <TokenType.INET: 'INET'>, <TokenType.INT8RANGE: 'INT8RANGE'>, <TokenType.CURRENT_DATE: 'CURRENT_DATE'>, <TokenType.OVERWRITE: 'OVERWRITE'>, <TokenType.TSTZMULTIRANGE: 'TSTZMULTIRANGE'>, <TokenType.TEXT: 'TEXT'>, <TokenType.EXECUTE: 'EXECUTE'>, <TokenType.TABLE: 'TABLE'>, <TokenType.ROW: 'ROW'>, <TokenType.PRAGMA: 'PRAGMA'>, <TokenType.PSEUDO_TYPE: 'PSEUDO_TYPE'>, <TokenType.CACHE: 'CACHE'>, <TokenType.AUTO_INCREMENT: 'AUTO_INCREMENT'>, <TokenType.HSTORE: 'HSTORE'>, <TokenType.NULLABLE: 'NULLABLE'>, <TokenType.SOME: 'SOME'>, <TokenType.CURRENT_TIMESTAMP: 'CURRENT_TIMESTAMP'>, <TokenType.DATERANGE: 'DATERANGE'>, <TokenType.TSMULTIRANGE: 'TSMULTIRANGE'>, <TokenType.ANTI: 'ANTI'>, <TokenType.CURRENT_DATETIME: 'CURRENT_DATETIME'>, <TokenType.SERIAL: 'SERIAL'>, <TokenType.FIXEDSTRING: 'FIXEDSTRING'>, <TokenType.NEXT: 'NEXT'>, <TokenType.CHAR: 'CHAR'>, <TokenType.VARBINARY: 'VARBINARY'>, <TokenType.TIMETZ: 'TIMETZ'>, <TokenType.LOWCARDINALITY: 'LOWCARDINALITY'>, <TokenType.BEGIN: 'BEGIN'>, <TokenType.FULL: 'FULL'>, <TokenType.PARTITION: 'PARTITION'>, <TokenType.ROWS: 'ROWS'>, <TokenType.UTINYINT: 'UTINYINT'>, <TokenType.ENUM16: 'ENUM16'>, <TokenType.LONGBLOB: 'LONGBLOB'>, <TokenType.CURRENT_TIME: 'CURRENT_TIME'>, <TokenType.INT4MULTIRANGE: 'INT4MULTIRANGE'>, <TokenType.MEDIUMINT: 'MEDIUMINT'>, <TokenType.PIVOT: 'PIVOT'>, <TokenType.FORMAT: 'FORMAT'>, <TokenType.ENUM8: 'ENUM8'>, <TokenType.TIMESTAMPLTZ: 'TIMESTAMPLTZ'>, <TokenType.LEFT: 'LEFT'>, <TokenType.OFFSET: 'OFFSET'>, <TokenType.LONGTEXT: 'LONGTEXT'>, <TokenType.INTERVAL: 'INTERVAL'>, <TokenType.BIGDECIMAL: 'BIGDECIMAL'>, <TokenType.TRUE: 'TRUE'>} + {<TokenType.UBIGINT: 'UBIGINT'>, <TokenType.BOOLEAN: 'BOOLEAN'>, <TokenType.NULLABLE: 'NULLABLE'>, <TokenType.OBJECT_IDENTIFIER: 'OBJECT_IDENTIFIER'>, <TokenType.STRUCT: 'STRUCT'>, <TokenType.ROW: 'ROW'>, <TokenType.TIMESTAMPTZ: 'TIMESTAMPTZ'>, <TokenType.DATE: 'DATE'>, <TokenType.ENUM: 'ENUM'>, <TokenType.COLLATE: 'COLLATE'>, <TokenType.MERGE: 'MERGE'>, <TokenType.XML: 'XML'>, <TokenType.SMALLMONEY: 'SMALLMONEY'>, <TokenType.IPPREFIX: 'IPPREFIX'>, <TokenType.GEOMETRY: 'GEOMETRY'>, <TokenType.SMALLSERIAL: 'SMALLSERIAL'>, <TokenType.TEXT: 'TEXT'>, <TokenType.NESTED: 'NESTED'>, <TokenType.TIMETZ: 'TIMETZ'>, <TokenType.APPLY: 'APPLY'>, <TokenType.VIEW: 'VIEW'>, <TokenType.ANY: 'ANY'>, <TokenType.FIXEDSTRING: 'FIXEDSTRING'>, <TokenType.LONGBLOB: 'LONGBLOB'>, <TokenType.TSTZMULTIRANGE: 'TSTZMULTIRANGE'>, <TokenType.TEMPORARY: 'TEMPORARY'>, <TokenType.TRUE: 'TRUE'>, <TokenType.DATABASE: 'DATABASE'>, <TokenType.UNKNOWN: 'UNKNOWN'>, <TokenType.VOLATILE: 'VOLATILE'>, <TokenType.DESCRIBE: 'DESCRIBE'>, <TokenType.COMMIT: 'COMMIT'>, <TokenType.INT4MULTIRANGE: 'INT4MULTIRANGE'>, <TokenType.CURRENT_TIME: 'CURRENT_TIME'>, <TokenType.FLOAT: 'FLOAT'>, <TokenType.BINARY: 'BINARY'>, <TokenType.PSEUDO_TYPE: 'PSEUDO_TYPE'>, <TokenType.BEGIN: 'BEGIN'>, <TokenType.FUNCTION: 'FUNCTION'>, <TokenType.BIT: 'BIT'>, <TokenType.NATURAL: 'NATURAL'>, <TokenType.ROWVERSION: 'ROWVERSION'>, <TokenType.INET: 'INET'>, <TokenType.BIGSERIAL: 'BIGSERIAL'>, <TokenType.DATEMULTIRANGE: 'DATEMULTIRANGE'>, <TokenType.BIGINT: 'BIGINT'>, <TokenType.PERCENT: 'PERCENT'>, <TokenType.USERDEFINED: 'USERDEFINED'>, <TokenType.KEEP: 'KEEP'>, <TokenType.LOAD: 'LOAD'>, <TokenType.ORDINALITY: 'ORDINALITY'>, <TokenType.MEDIUMINT: 'MEDIUMINT'>, <TokenType.DELETE: 'DELETE'>, <TokenType.UINT: 'UINT'>, <TokenType.FIRST: 'FIRST'>, <TokenType.SOME: 'SOME'>, <TokenType.MEDIUMTEXT: 'MEDIUMTEXT'>, <TokenType.WINDOW: 'WINDOW'>, <TokenType.SEMI: 'SEMI'>, <TokenType.UTINYINT: 'UTINYINT'>, <TokenType.DIV: 'DIV'>, <TokenType.COMMENT: 'COMMENT'>, <TokenType.CONSTRAINT: 'CONSTRAINT'>, <TokenType.COMMAND: 'COMMAND'>, <TokenType.YEAR: 'YEAR'>, <TokenType.UNPIVOT: 'UNPIVOT'>, <TokenType.CHAR: 'CHAR'>, <TokenType.VARBINARY: 'VARBINARY'>, <TokenType.CURRENT_DATETIME: 'CURRENT_DATETIME'>, <TokenType.ROWS: 'ROWS'>, <TokenType.INT4RANGE: 'INT4RANGE'>, <TokenType.JSONB: 'JSONB'>, <TokenType.FULL: 'FULL'>, <TokenType.NVARCHAR: 'NVARCHAR'>, <TokenType.PRAGMA: 'PRAGMA'>, <TokenType.ESCAPE: 'ESCAPE'>, <TokenType.TSRANGE: 'TSRANGE'>, <TokenType.ISNULL: 'ISNULL'>, <TokenType.INT8RANGE: 'INT8RANGE'>, <TokenType.CURRENT_TIMESTAMP: 'CURRENT_TIMESTAMP'>, <TokenType.COLUMN: 'COLUMN'>, <TokenType.PARTITION: 'PARTITION'>, <TokenType.FALSE: 'FALSE'>, <TokenType.NCHAR: 'NCHAR'>, <TokenType.TSTZRANGE: 'TSTZRANGE'>, <TokenType.DEFAULT: 'DEFAULT'>, <TokenType.RANGE: 'RANGE'>, <TokenType.USMALLINT: 'USMALLINT'>, <TokenType.DATERANGE: 'DATERANGE'>, <TokenType.NUMMULTIRANGE: 'NUMMULTIRANGE'>, <TokenType.PROCEDURE: 'PROCEDURE'>, <TokenType.UINT128: 'UINT128'>, <TokenType.UNIQUE: 'UNIQUE'>, <TokenType.HLLSKETCH: 'HLLSKETCH'>, <TokenType.IS: 'IS'>, <TokenType.SHOW: 'SHOW'>, <TokenType.INDEX: 'INDEX'>, <TokenType.UINT256: 'UINT256'>, <TokenType.SERIAL: 'SERIAL'>, <TokenType.SCHEMA: 'SCHEMA'>, <TokenType.DOUBLE: 'DOUBLE'>, <TokenType.DICTIONARY: 'DICTIONARY'>, <TokenType.VAR: 'VAR'>, <TokenType.OFFSET: 'OFFSET'>, <TokenType.RIGHT: 'RIGHT'>, <TokenType.TIME: 'TIME'>, <TokenType.ANTI: 'ANTI'>, <TokenType.CURRENT_DATE: 'CURRENT_DATE'>, <TokenType.INT: 'INT'>, <TokenType.VARIANT: 'VARIANT'>, <TokenType.SUPER: 'SUPER'>, <TokenType.HSTORE: 'HSTORE'>, <TokenType.TIMESTAMPLTZ: 'TIMESTAMPLTZ'>, <TokenType.VARCHAR: 'VARCHAR'>, <TokenType.CASE: 'CASE'>, <TokenType.NUMRANGE: 'NUMRANGE'>, <TokenType.SMALLINT: 'SMALLINT'>, <TokenType.OVERWRITE: 'OVERWRITE'>, <TokenType.CURRENT_USER: 'CURRENT_USER'>, <TokenType.INT256: 'INT256'>, <TokenType.FORMAT: 'FORMAT'>, <TokenType.VALUES: 'VALUES'>, <TokenType.AUTO_INCREMENT: 'AUTO_INCREMENT'>, <TokenType.END: 'END'>, <TokenType.LOWCARDINALITY: 'LOWCARDINALITY'>, <TokenType.INT128: 'INT128'>, <TokenType.PIVOT: 'PIVOT'>, <TokenType.NEXT: 'NEXT'>, <TokenType.TINYINT: 'TINYINT'>, <TokenType.REFERENCES: 'REFERENCES'>, <TokenType.INTERVAL: 'INTERVAL'>, <TokenType.UUID: 'UUID'>, <TokenType.LEFT: 'LEFT'>, <TokenType.UNIQUEIDENTIFIER: 'UNIQUEIDENTIFIER'>, <TokenType.ARRAY: 'ARRAY'>, <TokenType.DECIMAL: 'DECIMAL'>, <TokenType.LONGTEXT: 'LONGTEXT'>, <TokenType.TSMULTIRANGE: 'TSMULTIRANGE'>, <TokenType.TOP: 'TOP'>, <TokenType.DESC: 'DESC'>, <TokenType.ASC: 'ASC'>, <TokenType.IMAGE: 'IMAGE'>, <TokenType.NULL: 'NULL'>, <TokenType.IPADDRESS: 'IPADDRESS'>, <TokenType.FILTER: 'FILTER'>, <TokenType.GEOGRAPHY: 'GEOGRAPHY'>, <TokenType.ALL: 'ALL'>, <TokenType.DATETIME: 'DATETIME'>, <TokenType.MONEY: 'MONEY'>, <TokenType.SET: 'SET'>, <TokenType.SETTINGS: 'SETTINGS'>, <TokenType.EXISTS: 'EXISTS'>, <TokenType.EXECUTE: 'EXECUTE'>, <TokenType.UPDATE: 'UPDATE'>, <TokenType.INT8MULTIRANGE: 'INT8MULTIRANGE'>, <TokenType.TABLE: 'TABLE'>, <TokenType.ENUM16: 'ENUM16'>, <TokenType.BIGDECIMAL: 'BIGDECIMAL'>, <TokenType.MAP: 'MAP'>, <TokenType.TIMESTAMP: 'TIMESTAMP'>, <TokenType.CACHE: 'CACHE'>, <TokenType.JSON: 'JSON'>, <TokenType.DATETIME64: 'DATETIME64'>, <TokenType.MEDIUMBLOB: 'MEDIUMBLOB'>, <TokenType.OBJECT: 'OBJECT'>, <TokenType.ENUM8: 'ENUM8'>}
    @@ -2527,273 +2640,280 @@ Default: 3 -
    390    class Generator(generator.Generator):
    -391        EXPLICIT_UNION = True
    -392        INTERVAL_ALLOWS_PLURAL_FORM = False
    -393        JOIN_HINTS = False
    -394        QUERY_HINTS = False
    -395        TABLE_HINTS = False
    -396        LIMIT_FETCH = "LIMIT"
    -397        RENAME_TABLE_WITH_DB = False
    -398        ESCAPE_LINE_BREAK = True
    -399        NVL2_SUPPORTED = False
    -400
    -401        TRANSFORMS = {
    -402            **generator.Generator.TRANSFORMS,
    -403            exp.ApproxDistinct: rename_func("APPROX_COUNT_DISTINCT"),
    -404            exp.ArraySize: rename_func("ARRAY_LENGTH"),
    -405            exp.Cast: transforms.preprocess([transforms.remove_precision_parameterized_types]),
    -406            exp.Create: _create_sql,
    -407            exp.CTE: transforms.preprocess([_pushdown_cte_column_names]),
    -408            exp.DateAdd: _date_add_sql("DATE", "ADD"),
    -409            exp.DateDiff: lambda self, e: f"DATE_DIFF({self.sql(e, 'this')}, {self.sql(e, 'expression')}, {self.sql(e.args.get('unit', 'DAY'))})",
    -410            exp.DateFromParts: rename_func("DATE"),
    -411            exp.DateStrToDate: datestrtodate_sql,
    -412            exp.DateSub: _date_add_sql("DATE", "SUB"),
    -413            exp.DatetimeAdd: _date_add_sql("DATETIME", "ADD"),
    -414            exp.DatetimeSub: _date_add_sql("DATETIME", "SUB"),
    -415            exp.DateTrunc: lambda self, e: self.func("DATE_TRUNC", e.this, e.text("unit")),
    -416            exp.GenerateSeries: rename_func("GENERATE_ARRAY"),
    -417            exp.GroupConcat: rename_func("STRING_AGG"),
    -418            exp.Hex: rename_func("TO_HEX"),
    -419            exp.ILike: no_ilike_sql,
    -420            exp.IntDiv: rename_func("DIV"),
    -421            exp.JSONFormat: rename_func("TO_JSON_STRING"),
    -422            exp.JSONKeyValue: json_keyvalue_comma_sql,
    -423            exp.Max: max_or_greatest,
    -424            exp.MD5: lambda self, e: self.func("TO_HEX", self.func("MD5", e.this)),
    -425            exp.MD5Digest: rename_func("MD5"),
    -426            exp.Min: min_or_least,
    -427            exp.PartitionedByProperty: lambda self, e: f"PARTITION BY {self.sql(e, 'this')}",
    -428            exp.RegexpExtract: lambda self, e: self.func(
    -429                "REGEXP_EXTRACT",
    -430                e.this,
    -431                e.expression,
    -432                e.args.get("position"),
    -433                e.args.get("occurrence"),
    -434            ),
    -435            exp.RegexpReplace: regexp_replace_sql,
    -436            exp.RegexpLike: rename_func("REGEXP_CONTAINS"),
    -437            exp.ReturnsProperty: _returnsproperty_sql,
    -438            exp.Select: transforms.preprocess(
    -439                [
    -440                    transforms.explode_to_unnest,
    -441                    _unqualify_unnest,
    -442                    transforms.eliminate_distinct_on,
    -443                    _alias_ordered_group,
    -444                ]
    -445            ),
    -446            exp.SHA2: lambda self, e: self.func(
    -447                f"SHA256" if e.text("length") == "256" else "SHA512", e.this
    -448            ),
    -449            exp.StabilityProperty: lambda self, e: f"DETERMINISTIC"
    -450            if e.name == "IMMUTABLE"
    -451            else "NOT DETERMINISTIC",
    -452            exp.StrToDate: lambda self, e: f"PARSE_DATE({self.format_time(e)}, {self.sql(e, 'this')})",
    -453            exp.StrToTime: lambda self, e: self.func(
    -454                "PARSE_TIMESTAMP", self.format_time(e), e.this, e.args.get("zone")
    -455            ),
    -456            exp.TimeAdd: _date_add_sql("TIME", "ADD"),
    -457            exp.TimeSub: _date_add_sql("TIME", "SUB"),
    -458            exp.TimestampAdd: _date_add_sql("TIMESTAMP", "ADD"),
    -459            exp.TimestampSub: _date_add_sql("TIMESTAMP", "SUB"),
    -460            exp.TimeStrToTime: timestrtotime_sql,
    -461            exp.Trim: lambda self, e: self.func(f"TRIM", e.this, e.expression),
    -462            exp.TsOrDsAdd: _date_add_sql("DATE", "ADD"),
    -463            exp.TsOrDsToDate: ts_or_ds_to_date_sql("bigquery"),
    -464            exp.Unhex: rename_func("FROM_HEX"),
    -465            exp.Values: _derived_table_values_to_unnest,
    -466            exp.VariancePop: rename_func("VAR_POP"),
    -467        }
    -468
    -469        TYPE_MAPPING = {
    -470            **generator.Generator.TYPE_MAPPING,
    -471            exp.DataType.Type.BIGDECIMAL: "BIGNUMERIC",
    -472            exp.DataType.Type.BIGINT: "INT64",
    -473            exp.DataType.Type.BINARY: "BYTES",
    -474            exp.DataType.Type.BOOLEAN: "BOOL",
    -475            exp.DataType.Type.CHAR: "STRING",
    -476            exp.DataType.Type.DECIMAL: "NUMERIC",
    -477            exp.DataType.Type.DOUBLE: "FLOAT64",
    -478            exp.DataType.Type.FLOAT: "FLOAT64",
    -479            exp.DataType.Type.INT: "INT64",
    -480            exp.DataType.Type.NCHAR: "STRING",
    -481            exp.DataType.Type.NVARCHAR: "STRING",
    -482            exp.DataType.Type.SMALLINT: "INT64",
    -483            exp.DataType.Type.TEXT: "STRING",
    -484            exp.DataType.Type.TIMESTAMP: "DATETIME",
    -485            exp.DataType.Type.TIMESTAMPTZ: "TIMESTAMP",
    -486            exp.DataType.Type.TIMESTAMPLTZ: "TIMESTAMP",
    -487            exp.DataType.Type.TINYINT: "INT64",
    -488            exp.DataType.Type.VARBINARY: "BYTES",
    -489            exp.DataType.Type.VARCHAR: "STRING",
    -490            exp.DataType.Type.VARIANT: "ANY TYPE",
    -491        }
    -492
    -493        PROPERTIES_LOCATION = {
    -494            **generator.Generator.PROPERTIES_LOCATION,
    -495            exp.PartitionedByProperty: exp.Properties.Location.POST_SCHEMA,
    -496            exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED,
    -497        }
    -498
    -499        # from: https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#reserved_keywords
    -500        RESERVED_KEYWORDS = {
    -501            *generator.Generator.RESERVED_KEYWORDS,
    -502            "all",
    -503            "and",
    -504            "any",
    -505            "array",
    -506            "as",
    -507            "asc",
    -508            "assert_rows_modified",
    -509            "at",
    -510            "between",
    -511            "by",
    -512            "case",
    -513            "cast",
    -514            "collate",
    -515            "contains",
    -516            "create",
    -517            "cross",
    -518            "cube",
    -519            "current",
    -520            "default",
    -521            "define",
    -522            "desc",
    -523            "distinct",
    -524            "else",
    -525            "end",
    -526            "enum",
    -527            "escape",
    -528            "except",
    -529            "exclude",
    -530            "exists",
    -531            "extract",
    -532            "false",
    -533            "fetch",
    -534            "following",
    -535            "for",
    -536            "from",
    -537            "full",
    -538            "group",
    -539            "grouping",
    -540            "groups",
    -541            "hash",
    -542            "having",
    -543            "if",
    -544            "ignore",
    -545            "in",
    -546            "inner",
    -547            "intersect",
    -548            "interval",
    -549            "into",
    -550            "is",
    -551            "join",
    -552            "lateral",
    -553            "left",
    -554            "like",
    -555            "limit",
    -556            "lookup",
    -557            "merge",
    -558            "natural",
    -559            "new",
    -560            "no",
    -561            "not",
    -562            "null",
    -563            "nulls",
    -564            "of",
    -565            "on",
    -566            "or",
    -567            "order",
    -568            "outer",
    -569            "over",
    -570            "partition",
    -571            "preceding",
    -572            "proto",
    -573            "qualify",
    -574            "range",
    -575            "recursive",
    -576            "respect",
    -577            "right",
    -578            "rollup",
    -579            "rows",
    -580            "select",
    -581            "set",
    -582            "some",
    -583            "struct",
    -584            "tablesample",
    -585            "then",
    -586            "to",
    -587            "treat",
    -588            "true",
    -589            "unbounded",
    -590            "union",
    -591            "unnest",
    -592            "using",
    -593            "when",
    -594            "where",
    -595            "window",
    -596            "with",
    -597            "within",
    -598        }
    -599
    -600        def attimezone_sql(self, expression: exp.AtTimeZone) -> str:
    -601            parent = expression.parent
    -602
    -603            # BigQuery allows CAST(.. AS {STRING|TIMESTAMP} [FORMAT <fmt> [AT TIME ZONE <tz>]]).
    -604            # Only the TIMESTAMP one should use the below conversion, when AT TIME ZONE is included.
    -605            if not isinstance(parent, exp.Cast) or not parent.to.is_type("text"):
    -606                return self.func(
    -607                    "TIMESTAMP", self.func("DATETIME", expression.this, expression.args.get("zone"))
    -608                )
    -609
    -610            return super().attimezone_sql(expression)
    -611
    -612        def trycast_sql(self, expression: exp.TryCast) -> str:
    -613            return self.cast_sql(expression, safe_prefix="SAFE_")
    -614
    -615        def cte_sql(self, expression: exp.CTE) -> str:
    -616            if expression.alias_column_names:
    -617                self.unsupported("Column names in CTE definition are not supported.")
    -618            return super().cte_sql(expression)
    -619
    -620        def array_sql(self, expression: exp.Array) -> str:
    -621            first_arg = seq_get(expression.expressions, 0)
    -622            if isinstance(first_arg, exp.Subqueryable):
    -623                return f"ARRAY{self.wrap(self.sql(first_arg))}"
    -624
    -625            return inline_array_sql(self, expression)
    -626
    -627        def transaction_sql(self, *_) -> str:
    -628            return "BEGIN TRANSACTION"
    -629
    -630        def commit_sql(self, *_) -> str:
    -631            return "COMMIT TRANSACTION"
    -632
    -633        def rollback_sql(self, *_) -> str:
    -634            return "ROLLBACK TRANSACTION"
    -635
    -636        def in_unnest_op(self, expression: exp.Unnest) -> str:
    -637            return self.sql(expression)
    -638
    -639        def except_op(self, expression: exp.Except) -> str:
    -640            if not expression.args.get("distinct", False):
    -641                self.unsupported("EXCEPT without DISTINCT is not supported in BigQuery")
    -642            return f"EXCEPT{' DISTINCT' if expression.args.get('distinct') else ' ALL'}"
    +            
    422    class Generator(generator.Generator):
    +423        EXPLICIT_UNION = True
    +424        INTERVAL_ALLOWS_PLURAL_FORM = False
    +425        JOIN_HINTS = False
    +426        QUERY_HINTS = False
    +427        TABLE_HINTS = False
    +428        LIMIT_FETCH = "LIMIT"
    +429        RENAME_TABLE_WITH_DB = False
    +430        ESCAPE_LINE_BREAK = True
    +431        NVL2_SUPPORTED = False
    +432
    +433        TRANSFORMS = {
    +434            **generator.Generator.TRANSFORMS,
    +435            exp.ApproxDistinct: rename_func("APPROX_COUNT_DISTINCT"),
    +436            exp.ArraySize: rename_func("ARRAY_LENGTH"),
    +437            exp.Cast: transforms.preprocess([transforms.remove_precision_parameterized_types]),
    +438            exp.Create: _create_sql,
    +439            exp.CTE: transforms.preprocess([_pushdown_cte_column_names]),
    +440            exp.DateAdd: _date_add_sql("DATE", "ADD"),
    +441            exp.DateDiff: lambda self, e: f"DATE_DIFF({self.sql(e, 'this')}, {self.sql(e, 'expression')}, {self.sql(e.args.get('unit', 'DAY'))})",
    +442            exp.DateFromParts: rename_func("DATE"),
    +443            exp.DateStrToDate: datestrtodate_sql,
    +444            exp.DateSub: _date_add_sql("DATE", "SUB"),
    +445            exp.DatetimeAdd: _date_add_sql("DATETIME", "ADD"),
    +446            exp.DatetimeSub: _date_add_sql("DATETIME", "SUB"),
    +447            exp.DateTrunc: lambda self, e: self.func("DATE_TRUNC", e.this, e.text("unit")),
    +448            exp.GenerateSeries: rename_func("GENERATE_ARRAY"),
    +449            exp.GroupConcat: rename_func("STRING_AGG"),
    +450            exp.Hex: rename_func("TO_HEX"),
    +451            exp.ILike: no_ilike_sql,
    +452            exp.IntDiv: rename_func("DIV"),
    +453            exp.JSONFormat: rename_func("TO_JSON_STRING"),
    +454            exp.JSONKeyValue: json_keyvalue_comma_sql,
    +455            exp.Max: max_or_greatest,
    +456            exp.MD5: lambda self, e: self.func("TO_HEX", self.func("MD5", e.this)),
    +457            exp.MD5Digest: rename_func("MD5"),
    +458            exp.Min: min_or_least,
    +459            exp.PartitionedByProperty: lambda self, e: f"PARTITION BY {self.sql(e, 'this')}",
    +460            exp.RegexpExtract: lambda self, e: self.func(
    +461                "REGEXP_EXTRACT",
    +462                e.this,
    +463                e.expression,
    +464                e.args.get("position"),
    +465                e.args.get("occurrence"),
    +466            ),
    +467            exp.RegexpReplace: regexp_replace_sql,
    +468            exp.RegexpLike: rename_func("REGEXP_CONTAINS"),
    +469            exp.ReturnsProperty: _returnsproperty_sql,
    +470            exp.Select: transforms.preprocess(
    +471                [
    +472                    transforms.explode_to_unnest,
    +473                    _unqualify_unnest,
    +474                    transforms.eliminate_distinct_on,
    +475                    _alias_ordered_group,
    +476                ]
    +477            ),
    +478            exp.SHA2: lambda self, e: self.func(
    +479                f"SHA256" if e.text("length") == "256" else "SHA512", e.this
    +480            ),
    +481            exp.StabilityProperty: lambda self, e: f"DETERMINISTIC"
    +482            if e.name == "IMMUTABLE"
    +483            else "NOT DETERMINISTIC",
    +484            exp.StrToDate: lambda self, e: f"PARSE_DATE({self.format_time(e)}, {self.sql(e, 'this')})",
    +485            exp.StrToTime: lambda self, e: self.func(
    +486                "PARSE_TIMESTAMP", self.format_time(e), e.this, e.args.get("zone")
    +487            ),
    +488            exp.TimeAdd: _date_add_sql("TIME", "ADD"),
    +489            exp.TimeSub: _date_add_sql("TIME", "SUB"),
    +490            exp.TimestampAdd: _date_add_sql("TIMESTAMP", "ADD"),
    +491            exp.TimestampSub: _date_add_sql("TIMESTAMP", "SUB"),
    +492            exp.TimeStrToTime: timestrtotime_sql,
    +493            exp.Trim: lambda self, e: self.func(f"TRIM", e.this, e.expression),
    +494            exp.TsOrDsAdd: _date_add_sql("DATE", "ADD"),
    +495            exp.TsOrDsToDate: ts_or_ds_to_date_sql("bigquery"),
    +496            exp.Unhex: rename_func("FROM_HEX"),
    +497            exp.Values: _derived_table_values_to_unnest,
    +498            exp.VariancePop: rename_func("VAR_POP"),
    +499        }
    +500
    +501        TYPE_MAPPING = {
    +502            **generator.Generator.TYPE_MAPPING,
    +503            exp.DataType.Type.BIGDECIMAL: "BIGNUMERIC",
    +504            exp.DataType.Type.BIGINT: "INT64",
    +505            exp.DataType.Type.BINARY: "BYTES",
    +506            exp.DataType.Type.BOOLEAN: "BOOL",
    +507            exp.DataType.Type.CHAR: "STRING",
    +508            exp.DataType.Type.DECIMAL: "NUMERIC",
    +509            exp.DataType.Type.DOUBLE: "FLOAT64",
    +510            exp.DataType.Type.FLOAT: "FLOAT64",
    +511            exp.DataType.Type.INT: "INT64",
    +512            exp.DataType.Type.NCHAR: "STRING",
    +513            exp.DataType.Type.NVARCHAR: "STRING",
    +514            exp.DataType.Type.SMALLINT: "INT64",
    +515            exp.DataType.Type.TEXT: "STRING",
    +516            exp.DataType.Type.TIMESTAMP: "DATETIME",
    +517            exp.DataType.Type.TIMESTAMPTZ: "TIMESTAMP",
    +518            exp.DataType.Type.TIMESTAMPLTZ: "TIMESTAMP",
    +519            exp.DataType.Type.TINYINT: "INT64",
    +520            exp.DataType.Type.VARBINARY: "BYTES",
    +521            exp.DataType.Type.VARCHAR: "STRING",
    +522            exp.DataType.Type.VARIANT: "ANY TYPE",
    +523        }
    +524
    +525        PROPERTIES_LOCATION = {
    +526            **generator.Generator.PROPERTIES_LOCATION,
    +527            exp.PartitionedByProperty: exp.Properties.Location.POST_SCHEMA,
    +528            exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED,
    +529        }
    +530
    +531        # from: https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#reserved_keywords
    +532        RESERVED_KEYWORDS = {
    +533            *generator.Generator.RESERVED_KEYWORDS,
    +534            "all",
    +535            "and",
    +536            "any",
    +537            "array",
    +538            "as",
    +539            "asc",
    +540            "assert_rows_modified",
    +541            "at",
    +542            "between",
    +543            "by",
    +544            "case",
    +545            "cast",
    +546            "collate",
    +547            "contains",
    +548            "create",
    +549            "cross",
    +550            "cube",
    +551            "current",
    +552            "default",
    +553            "define",
    +554            "desc",
    +555            "distinct",
    +556            "else",
    +557            "end",
    +558            "enum",
    +559            "escape",
    +560            "except",
    +561            "exclude",
    +562            "exists",
    +563            "extract",
    +564            "false",
    +565            "fetch",
    +566            "following",
    +567            "for",
    +568            "from",
    +569            "full",
    +570            "group",
    +571            "grouping",
    +572            "groups",
    +573            "hash",
    +574            "having",
    +575            "if",
    +576            "ignore",
    +577            "in",
    +578            "inner",
    +579            "intersect",
    +580            "interval",
    +581            "into",
    +582            "is",
    +583            "join",
    +584            "lateral",
    +585            "left",
    +586            "like",
    +587            "limit",
    +588            "lookup",
    +589            "merge",
    +590            "natural",
    +591            "new",
    +592            "no",
    +593            "not",
    +594            "null",
    +595            "nulls",
    +596            "of",
    +597            "on",
    +598            "or",
    +599            "order",
    +600            "outer",
    +601            "over",
    +602            "partition",
    +603            "preceding",
    +604            "proto",
    +605            "qualify",
    +606            "range",
    +607            "recursive",
    +608            "respect",
    +609            "right",
    +610            "rollup",
    +611            "rows",
    +612            "select",
    +613            "set",
    +614            "some",
    +615            "struct",
    +616            "tablesample",
    +617            "then",
    +618            "to",
    +619            "treat",
    +620            "true",
    +621            "unbounded",
    +622            "union",
    +623            "unnest",
    +624            "using",
    +625            "when",
    +626            "where",
    +627            "window",
    +628            "with",
    +629            "within",
    +630        }
    +631
    +632        def attimezone_sql(self, expression: exp.AtTimeZone) -> str:
    +633            parent = expression.parent
    +634
    +635            # BigQuery allows CAST(.. AS {STRING|TIMESTAMP} [FORMAT <fmt> [AT TIME ZONE <tz>]]).
    +636            # Only the TIMESTAMP one should use the below conversion, when AT TIME ZONE is included.
    +637            if not isinstance(parent, exp.Cast) or not parent.to.is_type("text"):
    +638                return self.func(
    +639                    "TIMESTAMP", self.func("DATETIME", expression.this, expression.args.get("zone"))
    +640                )
    +641
    +642            return super().attimezone_sql(expression)
     643
    -644        def intersect_op(self, expression: exp.Intersect) -> str:
    -645            if not expression.args.get("distinct", False):
    -646                self.unsupported("INTERSECT without DISTINCT is not supported in BigQuery")
    -647            return f"INTERSECT{' DISTINCT' if expression.args.get('distinct') else ' ALL'}"
    +644        def cast_sql(self, expression: exp.Cast, safe_prefix: t.Optional[str] = None) -> str:
    +645            # https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#json_literals
    +646            if expression.is_type("json"):
    +647                return f"JSON {self.sql(expression, 'this')}"
     648
    -649        def with_properties(self, properties: exp.Properties) -> str:
    -650            return self.properties(properties, prefix=self.seg("OPTIONS"))
    -651
    -652        def version_sql(self, expression: exp.Version) -> str:
    -653            if expression.name == "TIMESTAMP":
    -654                expression = expression.copy()
    -655                expression.set("this", "SYSTEM_TIME")
    -656            return super().version_sql(expression)
    +649            return super().cast_sql(expression, safe_prefix=safe_prefix)
    +650
    +651        def trycast_sql(self, expression: exp.TryCast) -> str:
    +652            return self.cast_sql(expression, safe_prefix="SAFE_")
    +653
    +654        def cte_sql(self, expression: exp.CTE) -> str:
    +655            if expression.alias_column_names:
    +656                self.unsupported("Column names in CTE definition are not supported.")
    +657            return super().cte_sql(expression)
    +658
    +659        def array_sql(self, expression: exp.Array) -> str:
    +660            first_arg = seq_get(expression.expressions, 0)
    +661            if isinstance(first_arg, exp.Subqueryable):
    +662                return f"ARRAY{self.wrap(self.sql(first_arg))}"
    +663
    +664            return inline_array_sql(self, expression)
    +665
    +666        def transaction_sql(self, *_) -> str:
    +667            return "BEGIN TRANSACTION"
    +668
    +669        def commit_sql(self, *_) -> str:
    +670            return "COMMIT TRANSACTION"
    +671
    +672        def rollback_sql(self, *_) -> str:
    +673            return "ROLLBACK TRANSACTION"
    +674
    +675        def in_unnest_op(self, expression: exp.Unnest) -> str:
    +676            return self.sql(expression)
    +677
    +678        def except_op(self, expression: exp.Except) -> str:
    +679            if not expression.args.get("distinct", False):
    +680                self.unsupported("EXCEPT without DISTINCT is not supported in BigQuery")
    +681            return f"EXCEPT{' DISTINCT' if expression.args.get('distinct') else ' ALL'}"
    +682
    +683        def intersect_op(self, expression: exp.Intersect) -> str:
    +684            if not expression.args.get("distinct", False):
    +685                self.unsupported("INTERSECT without DISTINCT is not supported in BigQuery")
    +686            return f"INTERSECT{' DISTINCT' if expression.args.get('distinct') else ' ALL'}"
    +687
    +688        def with_properties(self, properties: exp.Properties) -> str:
    +689            return self.properties(properties, prefix=self.seg("OPTIONS"))
    +690
    +691        def version_sql(self, expression: exp.Version) -> str:
    +692            if expression.name == "TIMESTAMP":
    +693                expression = expression.copy()
    +694                expression.set("this", "SYSTEM_TIME")
    +695            return super().version_sql(expression)
     
    @@ -2987,7 +3107,7 @@ Default: True
    RESERVED_KEYWORDS = - {'lateral', 'all', 'having', 'by', 'from', 'tablesample', 'true', 'within', 'collate', 'window', 'rows', 'unnest', 'preceding', 'between', 'then', 'at', 'lookup', 'left', 'cross', 'join', 'any', 'inner', 'when', 'array', 'natural', 'merge', 'treat', 'default', 'in', 'on', 'respect', 'where', 'range', 'fetch', 'enum', 'rollup', 'exists', 'limit', 'struct', 'over', 'nulls', 'select', 'for', 'qualify', 'groups', 'contains', 'intersect', 'right', 'using', 'cast', 'recursive', 'set', 'distinct', 'is', 'end', 'new', 'define', 'with', 'as', 'exclude', 'full', 'to', 'assert_rows_modified', 'order', 'no', 'false', 'group', 'into', 'null', 'escape', 'proto', 'create', 'outer', 'except', 'if', 'interval', 'grouping', 'or', 'following', 'like', 'current', 'desc', 'cube', 'partition', 'of', 'ignore', 'not', 'some', 'case', 'and', 'asc', 'hash', 'unbounded', 'union', 'extract', 'else'} + {'new', 'or', 'else', 'outer', 'right', 'qualify', 'rollup', 'assert_rows_modified', 'order', 'at', 'for', 'default', 'current', 'treat', 'contains', 'rows', 'where', 'array', 'not', 'within', 'nulls', 'recursive', 'exclude', 'as', 'is', 'extract', 'cross', 'unbounded', 'to', 'some', 'case', 'preceding', 'no', 'intersect', 'having', 'distinct', 'limit', 'null', 'full', 'hash', 'using', 'on', 'merge', 'create', 'define', 'partition', 'left', 'collate', 'lookup', 'into', 'struct', 'window', 'between', 'false', 'cube', 'inner', 'ignore', 'if', 'like', 'group', 'in', 'proto', 'all', 'from', 'over', 'unnest', 'grouping', 'range', 'union', 'except', 'desc', 'and', 'select', 'of', 'enum', 'tablesample', 'set', 'fetch', 'cast', 'then', 'following', 'by', 'lateral', 'respect', 'when', 'groups', 'end', 'escape', 'join', 'any', 'interval', 'true', 'with', 'asc', 'natural', 'exists'}
    @@ -3007,17 +3127,40 @@ Default: True
    -
    600        def attimezone_sql(self, expression: exp.AtTimeZone) -> str:
    -601            parent = expression.parent
    -602
    -603            # BigQuery allows CAST(.. AS {STRING|TIMESTAMP} [FORMAT <fmt> [AT TIME ZONE <tz>]]).
    -604            # Only the TIMESTAMP one should use the below conversion, when AT TIME ZONE is included.
    -605            if not isinstance(parent, exp.Cast) or not parent.to.is_type("text"):
    -606                return self.func(
    -607                    "TIMESTAMP", self.func("DATETIME", expression.this, expression.args.get("zone"))
    -608                )
    -609
    -610            return super().attimezone_sql(expression)
    +            
    632        def attimezone_sql(self, expression: exp.AtTimeZone) -> str:
    +633            parent = expression.parent
    +634
    +635            # BigQuery allows CAST(.. AS {STRING|TIMESTAMP} [FORMAT <fmt> [AT TIME ZONE <tz>]]).
    +636            # Only the TIMESTAMP one should use the below conversion, when AT TIME ZONE is included.
    +637            if not isinstance(parent, exp.Cast) or not parent.to.is_type("text"):
    +638                return self.func(
    +639                    "TIMESTAMP", self.func("DATETIME", expression.this, expression.args.get("zone"))
    +640                )
    +641
    +642            return super().attimezone_sql(expression)
    +
    + + + + +
    +
    + +
    + + def + cast_sql( self, expression: sqlglot.expressions.Cast, safe_prefix: Optional[str] = None) -> str: + + + +
    + +
    644        def cast_sql(self, expression: exp.Cast, safe_prefix: t.Optional[str] = None) -> str:
    +645            # https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#json_literals
    +646            if expression.is_type("json"):
    +647                return f"JSON {self.sql(expression, 'this')}"
    +648
    +649            return super().cast_sql(expression, safe_prefix=safe_prefix)
     
    @@ -3035,8 +3178,8 @@ Default: True
    -
    612        def trycast_sql(self, expression: exp.TryCast) -> str:
    -613            return self.cast_sql(expression, safe_prefix="SAFE_")
    +            
    651        def trycast_sql(self, expression: exp.TryCast) -> str:
    +652            return self.cast_sql(expression, safe_prefix="SAFE_")
     
    @@ -3054,10 +3197,10 @@ Default: True
    -
    615        def cte_sql(self, expression: exp.CTE) -> str:
    -616            if expression.alias_column_names:
    -617                self.unsupported("Column names in CTE definition are not supported.")
    -618            return super().cte_sql(expression)
    +            
    654        def cte_sql(self, expression: exp.CTE) -> str:
    +655            if expression.alias_column_names:
    +656                self.unsupported("Column names in CTE definition are not supported.")
    +657            return super().cte_sql(expression)
     
    @@ -3075,12 +3218,12 @@ Default: True
    -
    620        def array_sql(self, expression: exp.Array) -> str:
    -621            first_arg = seq_get(expression.expressions, 0)
    -622            if isinstance(first_arg, exp.Subqueryable):
    -623                return f"ARRAY{self.wrap(self.sql(first_arg))}"
    -624
    -625            return inline_array_sql(self, expression)
    +            
    659        def array_sql(self, expression: exp.Array) -> str:
    +660            first_arg = seq_get(expression.expressions, 0)
    +661            if isinstance(first_arg, exp.Subqueryable):
    +662                return f"ARRAY{self.wrap(self.sql(first_arg))}"
    +663
    +664            return inline_array_sql(self, expression)
     
    @@ -3098,8 +3241,8 @@ Default: True
    -
    627        def transaction_sql(self, *_) -> str:
    -628            return "BEGIN TRANSACTION"
    +            
    666        def transaction_sql(self, *_) -> str:
    +667            return "BEGIN TRANSACTION"
     
    @@ -3117,8 +3260,8 @@ Default: True
    -
    630        def commit_sql(self, *_) -> str:
    -631            return "COMMIT TRANSACTION"
    +            
    669        def commit_sql(self, *_) -> str:
    +670            return "COMMIT TRANSACTION"
     
    @@ -3136,8 +3279,8 @@ Default: True
    -
    633        def rollback_sql(self, *_) -> str:
    -634            return "ROLLBACK TRANSACTION"
    +            
    672        def rollback_sql(self, *_) -> str:
    +673            return "ROLLBACK TRANSACTION"
     
    @@ -3155,8 +3298,8 @@ Default: True
    -
    636        def in_unnest_op(self, expression: exp.Unnest) -> str:
    -637            return self.sql(expression)
    +            
    675        def in_unnest_op(self, expression: exp.Unnest) -> str:
    +676            return self.sql(expression)
     
    @@ -3174,10 +3317,10 @@ Default: True
    -
    639        def except_op(self, expression: exp.Except) -> str:
    -640            if not expression.args.get("distinct", False):
    -641                self.unsupported("EXCEPT without DISTINCT is not supported in BigQuery")
    -642            return f"EXCEPT{' DISTINCT' if expression.args.get('distinct') else ' ALL'}"
    +            
    678        def except_op(self, expression: exp.Except) -> str:
    +679            if not expression.args.get("distinct", False):
    +680                self.unsupported("EXCEPT without DISTINCT is not supported in BigQuery")
    +681            return f"EXCEPT{' DISTINCT' if expression.args.get('distinct') else ' ALL'}"
     
    @@ -3195,10 +3338,10 @@ Default: True
    -
    644        def intersect_op(self, expression: exp.Intersect) -> str:
    -645            if not expression.args.get("distinct", False):
    -646                self.unsupported("INTERSECT without DISTINCT is not supported in BigQuery")
    -647            return f"INTERSECT{' DISTINCT' if expression.args.get('distinct') else ' ALL'}"
    +            
    683        def intersect_op(self, expression: exp.Intersect) -> str:
    +684            if not expression.args.get("distinct", False):
    +685                self.unsupported("INTERSECT without DISTINCT is not supported in BigQuery")
    +686            return f"INTERSECT{' DISTINCT' if expression.args.get('distinct') else ' ALL'}"
     
    @@ -3216,8 +3359,8 @@ Default: True
    -
    649        def with_properties(self, properties: exp.Properties) -> str:
    -650            return self.properties(properties, prefix=self.seg("OPTIONS"))
    +            
    688        def with_properties(self, properties: exp.Properties) -> str:
    +689            return self.properties(properties, prefix=self.seg("OPTIONS"))
     
    @@ -3235,11 +3378,11 @@ Default: True
    -
    652        def version_sql(self, expression: exp.Version) -> str:
    -653            if expression.name == "TIMESTAMP":
    -654                expression = expression.copy()
    -655                expression.set("this", "SYSTEM_TIME")
    -656            return super().version_sql(expression)
    +            
    691        def version_sql(self, expression: exp.Version) -> str:
    +692            if expression.name == "TIMESTAMP":
    +693                expression = expression.copy()
    +694                expression.set("this", "SYSTEM_TIME")
    +695            return super().version_sql(expression)
     
    @@ -3600,6 +3743,7 @@ Default: True
    intersect_sql
    introducer_sql
    pseudotype_sql
    +
    objectidentifier_sql
    onconflict_sql
    returning_sql
    rowformatdelimitedproperty_sql
    @@ -3700,7 +3844,6 @@ Default: True
    bitwiseor_sql
    bitwiserightshift_sql
    bitwisexor_sql
    -
    cast_sql
    currentdate_sql
    collate_sql
    command_sql
    diff --git a/docs/sqlglot/dialects/clickhouse.html b/docs/sqlglot/dialects/clickhouse.html index 4da4873..6c62e68 100644 --- a/docs/sqlglot/dialects/clickhouse.html +++ b/docs/sqlglot/dialects/clickhouse.html @@ -1959,7 +1959,7 @@ Default: 3
    FUNCTIONS_WITH_ALIASED_ARGS = -{'TUPLE', 'STRUCT'} +{'STRUCT', 'TUPLE'}
    @@ -2024,7 +2024,7 @@ Default: 3
    JOIN_KINDS = - {<TokenType.CROSS: 'CROSS'>, <TokenType.ANY: 'ANY'>, <TokenType.ASOF: 'ASOF'>, <TokenType.SEMI: 'SEMI'>, <TokenType.OUTER: 'OUTER'>, <TokenType.INNER: 'INNER'>, <TokenType.ARRAY: 'ARRAY'>, <TokenType.ANTI: 'ANTI'>} + {<TokenType.ASOF: 'ASOF'>, <TokenType.CROSS: 'CROSS'>, <TokenType.ANY: 'ANY'>, <TokenType.ARRAY: 'ARRAY'>, <TokenType.INNER: 'INNER'>, <TokenType.SEMI: 'SEMI'>, <TokenType.ANTI: 'ANTI'>, <TokenType.OUTER: 'OUTER'>}
    @@ -2037,7 +2037,7 @@ Default: 3
    TABLE_ALIAS_TOKENS = - {<TokenType.END: 'END'>, <TokenType.NUMRANGE: 'NUMRANGE'>, <TokenType.SUPER: 'SUPER'>, <TokenType.UINT: 'UINT'>, <TokenType.SET: 'SET'>, <TokenType.UNIQUE: 'UNIQUE'>, <TokenType.UNIQUEIDENTIFIER: 'UNIQUEIDENTIFIER'>, <TokenType.JSONB: 'JSONB'>, <TokenType.VARCHAR: 'VARCHAR'>, <TokenType.OBJECT: 'OBJECT'>, <TokenType.DELETE: 'DELETE'>, <TokenType.DATEMULTIRANGE: 'DATEMULTIRANGE'>, <TokenType.BOOLEAN: 'BOOLEAN'>, <TokenType.HLLSKETCH: 'HLLSKETCH'>, <TokenType.FILTER: 'FILTER'>, <TokenType.UUID: 'UUID'>, <TokenType.FLOAT: 'FLOAT'>, <TokenType.SMALLSERIAL: 'SMALLSERIAL'>, <TokenType.PROCEDURE: 'PROCEDURE'>, <TokenType.VARIANT: 'VARIANT'>, <TokenType.JSON: 'JSON'>, <TokenType.UINT128: 'UINT128'>, <TokenType.DATABASE: 'DATABASE'>, <TokenType.MAP: 'MAP'>, <TokenType.FIRST: 'FIRST'>, <TokenType.COLLATE: 'COLLATE'>, <TokenType.REFERENCES: 'REFERENCES'>, <TokenType.SMALLINT: 'SMALLINT'>, <TokenType.USERDEFINED: 'USERDEFINED'>, <TokenType.IS: 'IS'>, <TokenType.MEDIUMBLOB: 'MEDIUMBLOB'>, <TokenType.DIV: 'DIV'>, <TokenType.ORDINALITY: 'ORDINALITY'>, <TokenType.NVARCHAR: 'NVARCHAR'>, <TokenType.DEFAULT: 'DEFAULT'>, <TokenType.USMALLINT: 'USMALLINT'>, <TokenType.INT4RANGE: 'INT4RANGE'>, <TokenType.KEEP: 'KEEP'>, <TokenType.LOAD: 'LOAD'>, <TokenType.TSTZRANGE: 'TSTZRANGE'>, <TokenType.UINT256: 'UINT256'>, <TokenType.MERGE: 'MERGE'>, <TokenType.PERCENT: 'PERCENT'>, <TokenType.VOLATILE: 'VOLATILE'>, <TokenType.IPADDRESS: 'IPADDRESS'>, <TokenType.INT: 'INT'>, <TokenType.INT256: 'INT256'>, <TokenType.ALL: 'ALL'>, <TokenType.NULL: 'NULL'>, <TokenType.COMMAND: 'COMMAND'>, <TokenType.CONSTRAINT: 'CONSTRAINT'>, <TokenType.BIGSERIAL: 'BIGSERIAL'>, <TokenType.UBIGINT: 'UBIGINT'>, <TokenType.YEAR: 'YEAR'>, <TokenType.TOP: 'TOP'>, <TokenType.ENUM: 'ENUM'>, <TokenType.BIT: 'BIT'>, <TokenType.SMALLMONEY: 'SMALLMONEY'>, <TokenType.COMMIT: 'COMMIT'>, <TokenType.DESC: 'DESC'>, <TokenType.ROWVERSION: 'ROWVERSION'>, <TokenType.STRUCT: 'STRUCT'>, <TokenType.GEOMETRY: 'GEOMETRY'>, <TokenType.FUNCTION: 'FUNCTION'>, <TokenType.TIME: 'TIME'>, <TokenType.VIEW: 'VIEW'>, <TokenType.XML: 'XML'>, <TokenType.INT128: 'INT128'>, <TokenType.NESTED: 'NESTED'>, <TokenType.TIMESTAMP: 'TIMESTAMP'>, <TokenType.TSRANGE: 'TSRANGE'>, <TokenType.UNPIVOT: 'UNPIVOT'>, <TokenType.IMAGE: 'IMAGE'>, <TokenType.TINYINT: 'TINYINT'>, <TokenType.COLUMN: 'COLUMN'>, <TokenType.TIMESTAMPTZ: 'TIMESTAMPTZ'>, <TokenType.DATETIME64: 'DATETIME64'>, <TokenType.BIGINT: 'BIGINT'>, <TokenType.MONEY: 'MONEY'>, <TokenType.GEOGRAPHY: 'GEOGRAPHY'>, <TokenType.DOUBLE: 'DOUBLE'>, <TokenType.CURRENT_USER: 'CURRENT_USER'>, <TokenType.ASC: 'ASC'>, <TokenType.COMMENT: 'COMMENT'>, <TokenType.RANGE: 'RANGE'>, <TokenType.NUMMULTIRANGE: 'NUMMULTIRANGE'>, <TokenType.ISNULL: 'ISNULL'>, <TokenType.SHOW: 'SHOW'>, <TokenType.INDEX: 'INDEX'>, <TokenType.SCHEMA: 'SCHEMA'>, <TokenType.DICTIONARY: 'DICTIONARY'>, <TokenType.DECIMAL: 'DECIMAL'>, <TokenType.FALSE: 'FALSE'>, <TokenType.MEDIUMTEXT: 'MEDIUMTEXT'>, <TokenType.DESCRIBE: 'DESCRIBE'>, <TokenType.TEMPORARY: 'TEMPORARY'>, <TokenType.BINARY: 'BINARY'>, <TokenType.UPDATE: 'UPDATE'>, <TokenType.DATE: 'DATE'>, <TokenType.DATETIME: 'DATETIME'>, <TokenType.IPPREFIX: 'IPPREFIX'>, <TokenType.EXISTS: 'EXISTS'>, <TokenType.NCHAR: 'NCHAR'>, <TokenType.ESCAPE: 'ESCAPE'>, <TokenType.CASE: 'CASE'>, <TokenType.VAR: 'VAR'>, <TokenType.INT8MULTIRANGE: 'INT8MULTIRANGE'>, <TokenType.UNKNOWN: 'UNKNOWN'>, <TokenType.INET: 'INET'>, <TokenType.INT8RANGE: 'INT8RANGE'>, <TokenType.CURRENT_DATE: 'CURRENT_DATE'>, <TokenType.OVERWRITE: 'OVERWRITE'>, <TokenType.TSTZMULTIRANGE: 'TSTZMULTIRANGE'>, <TokenType.TEXT: 'TEXT'>, <TokenType.EXECUTE: 'EXECUTE'>, <TokenType.TABLE: 'TABLE'>, <TokenType.ROW: 'ROW'>, <TokenType.PRAGMA: 'PRAGMA'>, <TokenType.PSEUDO_TYPE: 'PSEUDO_TYPE'>, <TokenType.CACHE: 'CACHE'>, <TokenType.AUTO_INCREMENT: 'AUTO_INCREMENT'>, <TokenType.HSTORE: 'HSTORE'>, <TokenType.NULLABLE: 'NULLABLE'>, <TokenType.SOME: 'SOME'>, <TokenType.CURRENT_TIMESTAMP: 'CURRENT_TIMESTAMP'>, <TokenType.DATERANGE: 'DATERANGE'>, <TokenType.TSMULTIRANGE: 'TSMULTIRANGE'>, <TokenType.CURRENT_DATETIME: 'CURRENT_DATETIME'>, <TokenType.SERIAL: 'SERIAL'>, <TokenType.FIXEDSTRING: 'FIXEDSTRING'>, <TokenType.NEXT: 'NEXT'>, <TokenType.CHAR: 'CHAR'>, <TokenType.VARBINARY: 'VARBINARY'>, <TokenType.TIMETZ: 'TIMETZ'>, <TokenType.LOWCARDINALITY: 'LOWCARDINALITY'>, <TokenType.BEGIN: 'BEGIN'>, <TokenType.PARTITION: 'PARTITION'>, <TokenType.ROWS: 'ROWS'>, <TokenType.UTINYINT: 'UTINYINT'>, <TokenType.ENUM16: 'ENUM16'>, <TokenType.LONGBLOB: 'LONGBLOB'>, <TokenType.CURRENT_TIME: 'CURRENT_TIME'>, <TokenType.INT4MULTIRANGE: 'INT4MULTIRANGE'>, <TokenType.MEDIUMINT: 'MEDIUMINT'>, <TokenType.PIVOT: 'PIVOT'>, <TokenType.ENUM8: 'ENUM8'>, <TokenType.TIMESTAMPLTZ: 'TIMESTAMPLTZ'>, <TokenType.LONGTEXT: 'LONGTEXT'>, <TokenType.INTERVAL: 'INTERVAL'>, <TokenType.BIGDECIMAL: 'BIGDECIMAL'>, <TokenType.TRUE: 'TRUE'>} + {<TokenType.UBIGINT: 'UBIGINT'>, <TokenType.BOOLEAN: 'BOOLEAN'>, <TokenType.NULLABLE: 'NULLABLE'>, <TokenType.OBJECT_IDENTIFIER: 'OBJECT_IDENTIFIER'>, <TokenType.STRUCT: 'STRUCT'>, <TokenType.ROW: 'ROW'>, <TokenType.TIMESTAMPTZ: 'TIMESTAMPTZ'>, <TokenType.DATE: 'DATE'>, <TokenType.ENUM: 'ENUM'>, <TokenType.COLLATE: 'COLLATE'>, <TokenType.MERGE: 'MERGE'>, <TokenType.XML: 'XML'>, <TokenType.SMALLMONEY: 'SMALLMONEY'>, <TokenType.IPPREFIX: 'IPPREFIX'>, <TokenType.GEOMETRY: 'GEOMETRY'>, <TokenType.SMALLSERIAL: 'SMALLSERIAL'>, <TokenType.TEXT: 'TEXT'>, <TokenType.NESTED: 'NESTED'>, <TokenType.TIMETZ: 'TIMETZ'>, <TokenType.VIEW: 'VIEW'>, <TokenType.FIXEDSTRING: 'FIXEDSTRING'>, <TokenType.LONGBLOB: 'LONGBLOB'>, <TokenType.TSTZMULTIRANGE: 'TSTZMULTIRANGE'>, <TokenType.TEMPORARY: 'TEMPORARY'>, <TokenType.TRUE: 'TRUE'>, <TokenType.DATABASE: 'DATABASE'>, <TokenType.UNKNOWN: 'UNKNOWN'>, <TokenType.VOLATILE: 'VOLATILE'>, <TokenType.DESCRIBE: 'DESCRIBE'>, <TokenType.COMMIT: 'COMMIT'>, <TokenType.INT4MULTIRANGE: 'INT4MULTIRANGE'>, <TokenType.CURRENT_TIME: 'CURRENT_TIME'>, <TokenType.FLOAT: 'FLOAT'>, <TokenType.BINARY: 'BINARY'>, <TokenType.PSEUDO_TYPE: 'PSEUDO_TYPE'>, <TokenType.BEGIN: 'BEGIN'>, <TokenType.FUNCTION: 'FUNCTION'>, <TokenType.BIT: 'BIT'>, <TokenType.ROWVERSION: 'ROWVERSION'>, <TokenType.INET: 'INET'>, <TokenType.BIGSERIAL: 'BIGSERIAL'>, <TokenType.DATEMULTIRANGE: 'DATEMULTIRANGE'>, <TokenType.BIGINT: 'BIGINT'>, <TokenType.PERCENT: 'PERCENT'>, <TokenType.USERDEFINED: 'USERDEFINED'>, <TokenType.KEEP: 'KEEP'>, <TokenType.LOAD: 'LOAD'>, <TokenType.ORDINALITY: 'ORDINALITY'>, <TokenType.MEDIUMINT: 'MEDIUMINT'>, <TokenType.DELETE: 'DELETE'>, <TokenType.UINT: 'UINT'>, <TokenType.FIRST: 'FIRST'>, <TokenType.SOME: 'SOME'>, <TokenType.MEDIUMTEXT: 'MEDIUMTEXT'>, <TokenType.UTINYINT: 'UTINYINT'>, <TokenType.DIV: 'DIV'>, <TokenType.COMMENT: 'COMMENT'>, <TokenType.CONSTRAINT: 'CONSTRAINT'>, <TokenType.COMMAND: 'COMMAND'>, <TokenType.YEAR: 'YEAR'>, <TokenType.UNPIVOT: 'UNPIVOT'>, <TokenType.CHAR: 'CHAR'>, <TokenType.VARBINARY: 'VARBINARY'>, <TokenType.CURRENT_DATETIME: 'CURRENT_DATETIME'>, <TokenType.ROWS: 'ROWS'>, <TokenType.INT4RANGE: 'INT4RANGE'>, <TokenType.JSONB: 'JSONB'>, <TokenType.NVARCHAR: 'NVARCHAR'>, <TokenType.PRAGMA: 'PRAGMA'>, <TokenType.ESCAPE: 'ESCAPE'>, <TokenType.TSRANGE: 'TSRANGE'>, <TokenType.ISNULL: 'ISNULL'>, <TokenType.INT8RANGE: 'INT8RANGE'>, <TokenType.CURRENT_TIMESTAMP: 'CURRENT_TIMESTAMP'>, <TokenType.COLUMN: 'COLUMN'>, <TokenType.PARTITION: 'PARTITION'>, <TokenType.FALSE: 'FALSE'>, <TokenType.NCHAR: 'NCHAR'>, <TokenType.TSTZRANGE: 'TSTZRANGE'>, <TokenType.DEFAULT: 'DEFAULT'>, <TokenType.RANGE: 'RANGE'>, <TokenType.USMALLINT: 'USMALLINT'>, <TokenType.DATERANGE: 'DATERANGE'>, <TokenType.NUMMULTIRANGE: 'NUMMULTIRANGE'>, <TokenType.PROCEDURE: 'PROCEDURE'>, <TokenType.UINT128: 'UINT128'>, <TokenType.UNIQUE: 'UNIQUE'>, <TokenType.HLLSKETCH: 'HLLSKETCH'>, <TokenType.IS: 'IS'>, <TokenType.SHOW: 'SHOW'>, <TokenType.INDEX: 'INDEX'>, <TokenType.UINT256: 'UINT256'>, <TokenType.SERIAL: 'SERIAL'>, <TokenType.SCHEMA: 'SCHEMA'>, <TokenType.DOUBLE: 'DOUBLE'>, <TokenType.DICTIONARY: 'DICTIONARY'>, <TokenType.VAR: 'VAR'>, <TokenType.TIME: 'TIME'>, <TokenType.CURRENT_DATE: 'CURRENT_DATE'>, <TokenType.INT: 'INT'>, <TokenType.VARIANT: 'VARIANT'>, <TokenType.SUPER: 'SUPER'>, <TokenType.HSTORE: 'HSTORE'>, <TokenType.TIMESTAMPLTZ: 'TIMESTAMPLTZ'>, <TokenType.VARCHAR: 'VARCHAR'>, <TokenType.CASE: 'CASE'>, <TokenType.NUMRANGE: 'NUMRANGE'>, <TokenType.SMALLINT: 'SMALLINT'>, <TokenType.OVERWRITE: 'OVERWRITE'>, <TokenType.CURRENT_USER: 'CURRENT_USER'>, <TokenType.INT256: 'INT256'>, <TokenType.AUTO_INCREMENT: 'AUTO_INCREMENT'>, <TokenType.END: 'END'>, <TokenType.LOWCARDINALITY: 'LOWCARDINALITY'>, <TokenType.INT128: 'INT128'>, <TokenType.PIVOT: 'PIVOT'>, <TokenType.NEXT: 'NEXT'>, <TokenType.TINYINT: 'TINYINT'>, <TokenType.REFERENCES: 'REFERENCES'>, <TokenType.INTERVAL: 'INTERVAL'>, <TokenType.UUID: 'UUID'>, <TokenType.UNIQUEIDENTIFIER: 'UNIQUEIDENTIFIER'>, <TokenType.DECIMAL: 'DECIMAL'>, <TokenType.LONGTEXT: 'LONGTEXT'>, <TokenType.TSMULTIRANGE: 'TSMULTIRANGE'>, <TokenType.TOP: 'TOP'>, <TokenType.DESC: 'DESC'>, <TokenType.ASC: 'ASC'>, <TokenType.IMAGE: 'IMAGE'>, <TokenType.NULL: 'NULL'>, <TokenType.IPADDRESS: 'IPADDRESS'>, <TokenType.FILTER: 'FILTER'>, <TokenType.GEOGRAPHY: 'GEOGRAPHY'>, <TokenType.ALL: 'ALL'>, <TokenType.DATETIME: 'DATETIME'>, <TokenType.MONEY: 'MONEY'>, <TokenType.SET: 'SET'>, <TokenType.EXISTS: 'EXISTS'>, <TokenType.EXECUTE: 'EXECUTE'>, <TokenType.UPDATE: 'UPDATE'>, <TokenType.INT8MULTIRANGE: 'INT8MULTIRANGE'>, <TokenType.TABLE: 'TABLE'>, <TokenType.ENUM16: 'ENUM16'>, <TokenType.BIGDECIMAL: 'BIGDECIMAL'>, <TokenType.MAP: 'MAP'>, <TokenType.TIMESTAMP: 'TIMESTAMP'>, <TokenType.CACHE: 'CACHE'>, <TokenType.JSON: 'JSON'>, <TokenType.DATETIME64: 'DATETIME64'>, <TokenType.ENUM8: 'ENUM8'>, <TokenType.OBJECT: 'OBJECT'>, <TokenType.MEDIUMBLOB: 'MEDIUMBLOB'>}
    @@ -2598,7 +2598,7 @@ Default: True
    ON_CLUSTER_TARGETS = -{'DATABASE', 'INDEX', 'DICTIONARY', 'TABLE', 'FUNCTION', 'NAMED COLLECTION', 'VIEW'} +{'FUNCTION', 'TABLE', 'DATABASE', 'NAMED COLLECTION', 'INDEX', 'VIEW', 'DICTIONARY'}
    @@ -3174,6 +3174,7 @@ Default: True
    intersect_op
    introducer_sql
    pseudotype_sql
    +
    objectidentifier_sql
    onconflict_sql
    returning_sql
    rowformatdelimitedproperty_sql
    diff --git a/docs/sqlglot/dialects/databricks.html b/docs/sqlglot/dialects/databricks.html index 81d3bfe..34f9971 100644 --- a/docs/sqlglot/dialects/databricks.html +++ b/docs/sqlglot/dialects/databricks.html @@ -1291,6 +1291,7 @@ Default: True
    intersect_op
    introducer_sql
    pseudotype_sql
    +
    objectidentifier_sql
    onconflict_sql
    returning_sql
    rowformatdelimitedproperty_sql
    diff --git a/docs/sqlglot/dialects/doris.html b/docs/sqlglot/dialects/doris.html index 476804e..58c122d 100644 --- a/docs/sqlglot/dialects/doris.html +++ b/docs/sqlglot/dialects/doris.html @@ -1318,6 +1318,7 @@ Default: True
    intersect_op
    introducer_sql
    pseudotype_sql
    +
    objectidentifier_sql
    onconflict_sql
    returning_sql
    rowformatdelimitedproperty_sql
    diff --git a/docs/sqlglot/dialects/drill.html b/docs/sqlglot/dialects/drill.html index f7fd311..0c1407c 100644 --- a/docs/sqlglot/dialects/drill.html +++ b/docs/sqlglot/dialects/drill.html @@ -287,8 +287,8 @@ 16) 17 18 - 19def _date_add_sql(kind: str) -> t.Callable[[generator.Generator, exp.DateAdd | exp.DateSub], str]: - 20 def func(self: generator.Generator, expression: exp.DateAdd | exp.DateSub) -> str: + 19def _date_add_sql(kind: str) -> t.Callable[[Drill.Generator, exp.DateAdd | exp.DateSub], str]: + 20 def func(self: Drill.Generator, expression: exp.DateAdd | exp.DateSub) -> str: 21 this = self.sql(expression, "this") 22 unit = exp.var(expression.text("unit").upper() or "DAY") 23 return f"DATE_{kind}({this}, {self.sql(exp.Interval(this=expression.expression.copy(), unit=unit))})" @@ -296,7 +296,7 @@ 25 return func 26 27 - 28def _str_to_date(self: generator.Generator, expression: exp.StrToDate) -> str: + 28def _str_to_date(self: Drill.Generator, expression: exp.StrToDate) -> str: 29 this = self.sql(expression, "this") 30 time_format = self.format_time(expression) 31 if time_format == Drill.DATE_FORMAT: @@ -1851,6 +1851,7 @@ Default: True
    intersect_op
    introducer_sql
    pseudotype_sql
    +
    objectidentifier_sql
    onconflict_sql
    returning_sql
    rowformatdelimitedproperty_sql
    diff --git a/docs/sqlglot/dialects/duckdb.html b/docs/sqlglot/dialects/duckdb.html index 834f10b..87ff0c0 100644 --- a/docs/sqlglot/dialects/duckdb.html +++ b/docs/sqlglot/dialects/duckdb.html @@ -293,13 +293,13 @@
    31from sqlglot.tokens import TokenType 32 33 - 34def _ts_or_ds_add_sql(self: generator.Generator, expression: exp.TsOrDsAdd) -> str: + 34def _ts_or_ds_add_sql(self: DuckDB.Generator, expression: exp.TsOrDsAdd) -> str: 35 this = self.sql(expression, "this") 36 unit = self.sql(expression, "unit").strip("'") or "DAY" 37 return f"CAST({this} AS DATE) + {self.sql(exp.Interval(this=expression.expression.copy(), unit=unit))}" 38 39 - 40def _date_delta_sql(self: generator.Generator, expression: exp.DateAdd | exp.DateSub) -> str: + 40def _date_delta_sql(self: DuckDB.Generator, expression: exp.DateAdd | exp.DateSub) -> str: 41 this = self.sql(expression, "this") 42 unit = self.sql(expression, "unit").strip("'") or "DAY" 43 op = "+" if isinstance(expression, exp.DateAdd) else "-" @@ -307,7 +307,7 @@ 45 46 47# BigQuery -> DuckDB conversion for the DATE function - 48def _date_sql(self: generator.Generator, expression: exp.Date) -> str: + 48def _date_sql(self: DuckDB.Generator, expression: exp.Date) -> str: 49 result = f"CAST({self.sql(expression, 'this')} AS DATE)" 50 zone = self.sql(expression, "zone") 51 @@ -321,13 +321,13 @@ 59 return result 60 61 - 62def _array_sort_sql(self: generator.Generator, expression: exp.ArraySort) -> str: + 62def _array_sort_sql(self: DuckDB.Generator, expression: exp.ArraySort) -> str: 63 if expression.expression: 64 self.unsupported("DUCKDB ARRAY_SORT does not support a comparator") 65 return f"ARRAY_SORT({self.sql(expression, 'this')})" 66 67 - 68def _sort_array_sql(self: generator.Generator, expression: exp.SortArray) -> str: + 68def _sort_array_sql(self: DuckDB.Generator, expression: exp.SortArray) -> str: 69 this = self.sql(expression, "this") 70 if expression.args.get("asc") == exp.false(): 71 return f"ARRAY_REVERSE_SORT({this})" @@ -342,14 +342,14 @@ 80 return exp.DateDiff(this=seq_get(args, 2), expression=seq_get(args, 1), unit=seq_get(args, 0)) 81 82 - 83def _struct_sql(self: generator.Generator, expression: exp.Struct) -> str: + 83def _struct_sql(self: DuckDB.Generator, expression: exp.Struct) -> str: 84 args = [ 85 f"'{e.name or e.this.name}': {self.sql(e, 'expression')}" for e in expression.expressions 86 ] 87 return f"{{{', '.join(args)}}}" 88 89 - 90def _datatype_sql(self: generator.Generator, expression: exp.DataType) -> str: + 90def _datatype_sql(self: DuckDB.Generator, expression: exp.DataType) -> str: 91 if expression.is_type("array"): 92 return f"{self.expressions(expression, flat=True)}[]" 93 @@ -360,7 +360,7 @@ 98 return self.datatype_sql(expression) 99 100 -101def _json_format_sql(self: generator.Generator, expression: exp.JSONFormat) -> str: +101def _json_format_sql(self: DuckDB.Generator, expression: exp.JSONFormat) -> str: 102 sql = self.func("TO_JSON", expression.this, expression.args.get("options")) 103 return f"CAST({sql} AS TEXT)" 104 @@ -2264,6 +2264,7 @@ Default: True
    intersect_op
    introducer_sql
    pseudotype_sql
    +
    objectidentifier_sql
    onconflict_sql
    returning_sql
    rowformatdelimitedproperty_sql
    diff --git a/docs/sqlglot/dialects/hive.html b/docs/sqlglot/dialects/hive.html index cf1ef03..1d6c5b1 100644 --- a/docs/sqlglot/dialects/hive.html +++ b/docs/sqlglot/dialects/hive.html @@ -372,7 +372,7 @@
    50DIFF_MONTH_SWITCH = ("YEAR", "QUARTER", "MONTH") 51 52 - 53def _add_date_sql(self: generator.Generator, expression: exp.DateAdd | exp.DateSub) -> str: + 53def _add_date_sql(self: Hive.Generator, expression: exp.DateAdd | exp.DateSub) -> str: 54 unit = expression.text("unit").upper() 55 func, multiplier = DATE_DELTA_INTERVAL.get(unit, ("DATE_ADD", 1)) 56 @@ -391,7 +391,7 @@ 69 return self.func(func, expression.this, modified_increment) 70 71 - 72def _date_diff_sql(self: generator.Generator, expression: exp.DateDiff) -> str: + 72def _date_diff_sql(self: Hive.Generator, expression: exp.DateDiff) -> str: 73 unit = expression.text("unit").upper() 74 75 factor = TIME_DIFF_FACTOR.get(unit) @@ -409,7 +409,7 @@ 87 return f"{diff_sql}{multiplier_sql}" 88 89 - 90def _json_format_sql(self: generator.Generator, expression: exp.JSONFormat) -> str: + 90def _json_format_sql(self: Hive.Generator, expression: exp.JSONFormat) -> str: 91 this = expression.this 92 if isinstance(this, exp.Cast) and this.is_type("json") and this.this.is_string: 93 # Since FROM_JSON requires a nested type, we always wrap the json string with @@ -425,21 +425,21 @@ 103 return self.func("TO_JSON", this, expression.args.get("options")) 104 105 -106def _array_sort_sql(self: generator.Generator, expression: exp.ArraySort) -> str: +106def _array_sort_sql(self: Hive.Generator, expression: exp.ArraySort) -> str: 107 if expression.expression: 108 self.unsupported("Hive SORT_ARRAY does not support a comparator") 109 return f"SORT_ARRAY({self.sql(expression, 'this')})" 110 111 -112def _property_sql(self: generator.Generator, expression: exp.Property) -> str: +112def _property_sql(self: Hive.Generator, expression: exp.Property) -> str: 113 return f"'{expression.name}'={self.sql(expression, 'value')}" 114 115 -116def _str_to_unix_sql(self: generator.Generator, expression: exp.StrToUnix) -> str: +116def _str_to_unix_sql(self: Hive.Generator, expression: exp.StrToUnix) -> str: 117 return self.func("UNIX_TIMESTAMP", expression.this, time_format("hive")(self, expression)) 118 119 -120def _str_to_date_sql(self: generator.Generator, expression: exp.StrToDate) -> str: +120def _str_to_date_sql(self: Hive.Generator, expression: exp.StrToDate) -> str: 121 this = self.sql(expression, "this") 122 time_format = self.format_time(expression) 123 if time_format not in (Hive.TIME_FORMAT, Hive.DATE_FORMAT): @@ -447,7 +447,7 @@ 125 return f"CAST({this} AS DATE)" 126 127 -128def _str_to_time_sql(self: generator.Generator, expression: exp.StrToTime) -> str: +128def _str_to_time_sql(self: Hive.Generator, expression: exp.StrToTime) -> str: 129 this = self.sql(expression, "this") 130 time_format = self.format_time(expression) 131 if time_format not in (Hive.TIME_FORMAT, Hive.DATE_FORMAT): @@ -455,13 +455,13 @@ 133 return f"CAST({this} AS TIMESTAMP)" 134 135 -136def _time_to_str(self: generator.Generator, expression: exp.TimeToStr) -> str: +136def _time_to_str(self: Hive.Generator, expression: exp.TimeToStr) -> str: 137 this = self.sql(expression, "this") 138 time_format = self.format_time(expression) 139 return f"DATE_FORMAT({this}, {time_format})" 140 141 -142def _to_date_sql(self: generator.Generator, expression: exp.TsOrDsToDate) -> str: +142def _to_date_sql(self: Hive.Generator, expression: exp.TsOrDsToDate) -> str: 143 this = self.sql(expression, "this") 144 time_format = self.format_time(expression) 145 if time_format and time_format not in (Hive.TIME_FORMAT, Hive.DATE_FORMAT): @@ -2991,6 +2991,7 @@ Default: True
    intersect_op
    introducer_sql
    pseudotype_sql
    +
    objectidentifier_sql
    onconflict_sql
    returning_sql
    rowformatdelimitedproperty_sql
    diff --git a/docs/sqlglot/dialects/mysql.html b/docs/sqlglot/dialects/mysql.html index 8f762da..d165e53 100644 --- a/docs/sqlglot/dialects/mysql.html +++ b/docs/sqlglot/dialects/mysql.html @@ -376,7 +376,7 @@
    33 return _parse 34 35 - 36def _date_trunc_sql(self: generator.Generator, expression: exp.DateTrunc) -> str: + 36def _date_trunc_sql(self: MySQL.Generator, expression: exp.DateTrunc) -> str: 37 expr = self.sql(expression, "this") 38 unit = expression.text("unit") 39 @@ -407,12 +407,12 @@ 64 return exp.StrToDate(this=seq_get(args, 0), format=date_format) 65 66 - 67def _str_to_date_sql(self: generator.Generator, expression: exp.StrToDate | exp.StrToTime) -> str: + 67def _str_to_date_sql(self: MySQL.Generator, expression: exp.StrToDate | exp.StrToTime) -> str: 68 date_format = self.format_time(expression) 69 return f"STR_TO_DATE({self.sql(expression.this)}, {date_format})" 70 71 - 72def _trim_sql(self: generator.Generator, expression: exp.Trim) -> str: + 72def _trim_sql(self: MySQL.Generator, expression: exp.Trim) -> str: 73 target = self.sql(expression, "this") 74 trim_type = self.sql(expression, "position") 75 remove_chars = self.sql(expression, "expression") @@ -427,8 +427,8 @@ 84 return f"TRIM({trim_type}{remove_chars}{from_part}{target})" 85 86 - 87def _date_add_sql(kind: str) -> t.Callable[[generator.Generator, exp.DateAdd | exp.DateSub], str]: - 88 def func(self: generator.Generator, expression: exp.DateAdd | exp.DateSub) -> str: + 87def _date_add_sql(kind: str) -> t.Callable[[MySQL.Generator, exp.DateAdd | exp.DateSub], str]: + 88 def func(self: MySQL.Generator, expression: exp.DateAdd | exp.DateSub) -> str: 89 this = self.sql(expression, "this") 90 unit = expression.text("unit").upper() or "DAY" 91 return f"DATE_{kind}({this}, {self.sql(exp.Interval(this=expression.expression.copy(), unit=unit))})" @@ -2053,7 +2053,7 @@
    COMMANDS = -{<TokenType.COMMAND: 'COMMAND'>, <TokenType.FETCH: 'FETCH'>, <TokenType.EXECUTE: 'EXECUTE'>} +{<TokenType.FETCH: 'FETCH'>, <TokenType.COMMAND: 'COMMAND'>, <TokenType.EXECUTE: 'EXECUTE'>}
    @@ -2465,7 +2465,7 @@ Default: 3
    FUNC_TOKENS = - {<TokenType.IMAGE: 'IMAGE'>, <TokenType.NUMRANGE: 'NUMRANGE'>, <TokenType.SUPER: 'SUPER'>, <TokenType.BIGDECIMAL: 'BIGDECIMAL'>, <TokenType.TINYINT: 'TINYINT'>, <TokenType.UINT: 'UINT'>, <TokenType.RIGHT: 'RIGHT'>, <TokenType.TIMESTAMPTZ: 'TIMESTAMPTZ'>, <TokenType.DATETIME64: 'DATETIME64'>, <TokenType.BIGINT: 'BIGINT'>, <TokenType.MONEY: 'MONEY'>, <TokenType.GEOGRAPHY: 'GEOGRAPHY'>, <TokenType.DOUBLE: 'DOUBLE'>, <TokenType.CURRENT_USER: 'CURRENT_USER'>, <TokenType.RANGE: 'RANGE'>, <TokenType.LIKE: 'LIKE'>, <TokenType.NUMMULTIRANGE: 'NUMMULTIRANGE'>, <TokenType.WINDOW: 'WINDOW'>, <TokenType.ISNULL: 'ISNULL'>, <TokenType.UNIQUEIDENTIFIER: 'UNIQUEIDENTIFIER'>, <TokenType.JSONB: 'JSONB'>, <TokenType.ARRAY: 'ARRAY'>, <TokenType.INDEX: 'INDEX'>, <TokenType.SCHEMA: 'SCHEMA'>, <TokenType.VARCHAR: 'VARCHAR'>, <TokenType.OBJECT: 'OBJECT'>, <TokenType.DECIMAL: 'DECIMAL'>, <TokenType.MEDIUMTEXT: 'MEDIUMTEXT'>, <TokenType.REPLACE: 'REPLACE'>, <TokenType.BINARY: 'BINARY'>, <TokenType.DATE: 'DATE'>, <TokenType.DATETIME: 'DATETIME'>, <TokenType.IPPREFIX: 'IPPREFIX'>, <TokenType.GLOB: 'GLOB'>, <TokenType.DATEMULTIRANGE: 'DATEMULTIRANGE'>, <TokenType.BOOLEAN: 'BOOLEAN'>, <TokenType.EXISTS: 'EXISTS'>, <TokenType.HLLSKETCH: 'HLLSKETCH'>, <TokenType.NCHAR: 'NCHAR'>, <TokenType.FILTER: 'FILTER'>, <TokenType.UUID: 'UUID'>, <TokenType.FLOAT: 'FLOAT'>, <TokenType.PRIMARY_KEY: 'PRIMARY_KEY'>, <TokenType.SMALLSERIAL: 'SMALLSERIAL'>, <TokenType.VARIANT: 'VARIANT'>, <TokenType.JSON: 'JSON'>, <TokenType.VAR: 'VAR'>, <TokenType.INT8MULTIRANGE: 'INT8MULTIRANGE'>, <TokenType.UNKNOWN: 'UNKNOWN'>, <TokenType.UINT128: 'UINT128'>, <TokenType.INET: 'INET'>, <TokenType.INT8RANGE: 'INT8RANGE'>, <TokenType.DATABASE: 'DATABASE'>, <TokenType.RLIKE: 'RLIKE'>, <TokenType.CURRENT_DATE: 'CURRENT_DATE'>, <TokenType.MAP: 'MAP'>, <TokenType.INT128: 'INT128'>, <TokenType.TSTZMULTIRANGE: 'TSTZMULTIRANGE'>, <TokenType.FIRST: 'FIRST'>, <TokenType.ANY: 'ANY'>, <TokenType.SMALLINT: 'SMALLINT'>, <TokenType.IDENTIFIER: 'IDENTIFIER'>, <TokenType.USERDEFINED: 'USERDEFINED'>, <TokenType.TEXT: 'TEXT'>, <TokenType.MEDIUMBLOB: 'MEDIUMBLOB'>, <TokenType.TABLE: 'TABLE'>, <TokenType.ROW: 'ROW'>, <TokenType.PSEUDO_TYPE: 'PSEUDO_TYPE'>, <TokenType.NVARCHAR: 'NVARCHAR'>, <TokenType.USMALLINT: 'USMALLINT'>, <TokenType.HSTORE: 'HSTORE'>, <TokenType.INT4RANGE: 'INT4RANGE'>, <TokenType.NULLABLE: 'NULLABLE'>, <TokenType.SOME: 'SOME'>, <TokenType.VALUES: 'VALUES'>, <TokenType.CURRENT_TIMESTAMP: 'CURRENT_TIMESTAMP'>, <TokenType.DATERANGE: 'DATERANGE'>, <TokenType.TSTZRANGE: 'TSTZRANGE'>, <TokenType.UINT256: 'UINT256'>, <TokenType.TSMULTIRANGE: 'TSMULTIRANGE'>, <TokenType.CURRENT_DATETIME: 'CURRENT_DATETIME'>, <TokenType.MERGE: 'MERGE'>, <TokenType.SERIAL: 'SERIAL'>, <TokenType.ILIKE: 'ILIKE'>, <TokenType.FIXEDSTRING: 'FIXEDSTRING'>, <TokenType.XOR: 'XOR'>, <TokenType.IPADDRESS: 'IPADDRESS'>, <TokenType.INSERT: 'INSERT'>, <TokenType.CHAR: 'CHAR'>, <TokenType.INT: 'INT'>, <TokenType.INT256: 'INT256'>, <TokenType.NULL: 'NULL'>, <TokenType.VARBINARY: 'VARBINARY'>, <TokenType.TIMETZ: 'TIMETZ'>, <TokenType.LOWCARDINALITY: 'LOWCARDINALITY'>, <TokenType.COMMAND: 'COMMAND'>, <TokenType.ALL: 'ALL'>, <TokenType.BIGSERIAL: 'BIGSERIAL'>, <TokenType.UBIGINT: 'UBIGINT'>, <TokenType.YEAR: 'YEAR'>, <TokenType.UTINYINT: 'UTINYINT'>, <TokenType.ENUM: 'ENUM'>, <TokenType.ENUM16: 'ENUM16'>, <TokenType.BIT: 'BIT'>, <TokenType.LONGBLOB: 'LONGBLOB'>, <TokenType.CURRENT_TIME: 'CURRENT_TIME'>, <TokenType.INT4MULTIRANGE: 'INT4MULTIRANGE'>, <TokenType.MEDIUMINT: 'MEDIUMINT'>, <TokenType.FORMAT: 'FORMAT'>, <TokenType.UNNEST: 'UNNEST'>, <TokenType.SMALLMONEY: 'SMALLMONEY'>, <TokenType.ROWVERSION: 'ROWVERSION'>, <TokenType.STRUCT: 'STRUCT'>, <TokenType.ENUM8: 'ENUM8'>, <TokenType.TIMESTAMPLTZ: 'TIMESTAMPLTZ'>, <TokenType.LEFT: 'LEFT'>, <TokenType.OFFSET: 'OFFSET'>, <TokenType.GEOMETRY: 'GEOMETRY'>, <TokenType.LONGTEXT: 'LONGTEXT'>, <TokenType.INTERVAL: 'INTERVAL'>, <TokenType.TIME: 'TIME'>, <TokenType.XML: 'XML'>, <TokenType.NESTED: 'NESTED'>, <TokenType.TIMESTAMP: 'TIMESTAMP'>, <TokenType.TSRANGE: 'TSRANGE'>} + {<TokenType.NUMMULTIRANGE: 'NUMMULTIRANGE'>, <TokenType.UBIGINT: 'UBIGINT'>, <TokenType.BOOLEAN: 'BOOLEAN'>, <TokenType.NULLABLE: 'NULLABLE'>, <TokenType.UINT128: 'UINT128'>, <TokenType.OBJECT_IDENTIFIER: 'OBJECT_IDENTIFIER'>, <TokenType.IDENTIFIER: 'IDENTIFIER'>, <TokenType.ROW: 'ROW'>, <TokenType.STRUCT: 'STRUCT'>, <TokenType.DATE: 'DATE'>, <TokenType.TIMESTAMPTZ: 'TIMESTAMPTZ'>, <TokenType.ENUM: 'ENUM'>, <TokenType.HLLSKETCH: 'HLLSKETCH'>, <TokenType.MERGE: 'MERGE'>, <TokenType.INDEX: 'INDEX'>, <TokenType.UINT256: 'UINT256'>, <TokenType.SERIAL: 'SERIAL'>, <TokenType.RLIKE: 'RLIKE'>, <TokenType.XML: 'XML'>, <TokenType.SMALLMONEY: 'SMALLMONEY'>, <TokenType.DOUBLE: 'DOUBLE'>, <TokenType.IPPREFIX: 'IPPREFIX'>, <TokenType.GEOMETRY: 'GEOMETRY'>, <TokenType.SMALLSERIAL: 'SMALLSERIAL'>, <TokenType.TEXT: 'TEXT'>, <TokenType.NESTED: 'NESTED'>, <TokenType.SCHEMA: 'SCHEMA'>, <TokenType.TIMETZ: 'TIMETZ'>, <TokenType.ANY: 'ANY'>, <TokenType.VAR: 'VAR'>, <TokenType.OFFSET: 'OFFSET'>, <TokenType.RIGHT: 'RIGHT'>, <TokenType.FIXEDSTRING: 'FIXEDSTRING'>, <TokenType.TIME: 'TIME'>, <TokenType.LONGBLOB: 'LONGBLOB'>, <TokenType.TSTZMULTIRANGE: 'TSTZMULTIRANGE'>, <TokenType.CURRENT_DATE: 'CURRENT_DATE'>, <TokenType.DATABASE: 'DATABASE'>, <TokenType.UNKNOWN: 'UNKNOWN'>, <TokenType.INSERT: 'INSERT'>, <TokenType.INT: 'INT'>, <TokenType.VARIANT: 'VARIANT'>, <TokenType.SUPER: 'SUPER'>, <TokenType.INT4MULTIRANGE: 'INT4MULTIRANGE'>, <TokenType.HSTORE: 'HSTORE'>, <TokenType.CURRENT_TIME: 'CURRENT_TIME'>, <TokenType.GLOB: 'GLOB'>, <TokenType.FLOAT: 'FLOAT'>, <TokenType.TIMESTAMPLTZ: 'TIMESTAMPLTZ'>, <TokenType.VARCHAR: 'VARCHAR'>, <TokenType.PRIMARY_KEY: 'PRIMARY_KEY'>, <TokenType.BINARY: 'BINARY'>, <TokenType.PSEUDO_TYPE: 'PSEUDO_TYPE'>, <TokenType.NUMRANGE: 'NUMRANGE'>, <TokenType.SMALLINT: 'SMALLINT'>, <TokenType.BIT: 'BIT'>, <TokenType.CURRENT_USER: 'CURRENT_USER'>, <TokenType.INT256: 'INT256'>, <TokenType.UNNEST: 'UNNEST'>, <TokenType.ROWVERSION: 'ROWVERSION'>, <TokenType.INET: 'INET'>, <TokenType.FORMAT: 'FORMAT'>, <TokenType.BIGSERIAL: 'BIGSERIAL'>, <TokenType.DATEMULTIRANGE: 'DATEMULTIRANGE'>, <TokenType.VALUES: 'VALUES'>, <TokenType.BIGINT: 'BIGINT'>, <TokenType.LIKE: 'LIKE'>, <TokenType.LOWCARDINALITY: 'LOWCARDINALITY'>, <TokenType.INT128: 'INT128'>, <TokenType.USERDEFINED: 'USERDEFINED'>, <TokenType.MEDIUMINT: 'MEDIUMINT'>, <TokenType.TINYINT: 'TINYINT'>, <TokenType.INTERVAL: 'INTERVAL'>, <TokenType.UUID: 'UUID'>, <TokenType.LEFT: 'LEFT'>, <TokenType.UINT: 'UINT'>, <TokenType.UNIQUEIDENTIFIER: 'UNIQUEIDENTIFIER'>, <TokenType.REPLACE: 'REPLACE'>, <TokenType.FIRST: 'FIRST'>, <TokenType.MEDIUMTEXT: 'MEDIUMTEXT'>, <TokenType.ARRAY: 'ARRAY'>, <TokenType.DECIMAL: 'DECIMAL'>, <TokenType.LONGTEXT: 'LONGTEXT'>, <TokenType.WINDOW: 'WINDOW'>, <TokenType.SOME: 'SOME'>, <TokenType.UTINYINT: 'UTINYINT'>, <TokenType.TSMULTIRANGE: 'TSMULTIRANGE'>, <TokenType.IMAGE: 'IMAGE'>, <TokenType.NULL: 'NULL'>, <TokenType.IPADDRESS: 'IPADDRESS'>, <TokenType.FILTER: 'FILTER'>, <TokenType.COMMAND: 'COMMAND'>, <TokenType.GEOGRAPHY: 'GEOGRAPHY'>, <TokenType.ALL: 'ALL'>, <TokenType.DATETIME: 'DATETIME'>, <TokenType.CURRENT_DATETIME: 'CURRENT_DATETIME'>, <TokenType.YEAR: 'YEAR'>, <TokenType.CHAR: 'CHAR'>, <TokenType.VARBINARY: 'VARBINARY'>, <TokenType.MONEY: 'MONEY'>, <TokenType.EXISTS: 'EXISTS'>, <TokenType.INT4RANGE: 'INT4RANGE'>, <TokenType.JSONB: 'JSONB'>, <TokenType.INT8MULTIRANGE: 'INT8MULTIRANGE'>, <TokenType.NVARCHAR: 'NVARCHAR'>, <TokenType.TABLE: 'TABLE'>, <TokenType.ENUM16: 'ENUM16'>, <TokenType.TSRANGE: 'TSRANGE'>, <TokenType.ISNULL: 'ISNULL'>, <TokenType.BIGDECIMAL: 'BIGDECIMAL'>, <TokenType.CURRENT_TIMESTAMP: 'CURRENT_TIMESTAMP'>, <TokenType.INT8RANGE: 'INT8RANGE'>, <TokenType.TIMESTAMP: 'TIMESTAMP'>, <TokenType.MAP: 'MAP'>, <TokenType.ILIKE: 'ILIKE'>, <TokenType.JSON: 'JSON'>, <TokenType.NCHAR: 'NCHAR'>, <TokenType.XOR: 'XOR'>, <TokenType.RANGE: 'RANGE'>, <TokenType.TSTZRANGE: 'TSTZRANGE'>, <TokenType.DATETIME64: 'DATETIME64'>, <TokenType.MEDIUMBLOB: 'MEDIUMBLOB'>, <TokenType.USMALLINT: 'USMALLINT'>, <TokenType.OBJECT: 'OBJECT'>, <TokenType.DATERANGE: 'DATERANGE'>, <TokenType.ENUM8: 'ENUM8'>}
    @@ -2504,7 +2504,7 @@ Default: 3
    TABLE_ALIAS_TOKENS = - {<TokenType.END: 'END'>, <TokenType.NUMRANGE: 'NUMRANGE'>, <TokenType.SUPER: 'SUPER'>, <TokenType.UINT: 'UINT'>, <TokenType.SET: 'SET'>, <TokenType.UNIQUE: 'UNIQUE'>, <TokenType.SETTINGS: 'SETTINGS'>, <TokenType.UNIQUEIDENTIFIER: 'UNIQUEIDENTIFIER'>, <TokenType.JSONB: 'JSONB'>, <TokenType.ARRAY: 'ARRAY'>, <TokenType.VARCHAR: 'VARCHAR'>, <TokenType.OBJECT: 'OBJECT'>, <TokenType.DELETE: 'DELETE'>, <TokenType.DATEMULTIRANGE: 'DATEMULTIRANGE'>, <TokenType.BOOLEAN: 'BOOLEAN'>, <TokenType.HLLSKETCH: 'HLLSKETCH'>, <TokenType.FILTER: 'FILTER'>, <TokenType.UUID: 'UUID'>, <TokenType.FLOAT: 'FLOAT'>, <TokenType.SMALLSERIAL: 'SMALLSERIAL'>, <TokenType.PROCEDURE: 'PROCEDURE'>, <TokenType.VARIANT: 'VARIANT'>, <TokenType.JSON: 'JSON'>, <TokenType.UINT128: 'UINT128'>, <TokenType.DATABASE: 'DATABASE'>, <TokenType.MAP: 'MAP'>, <TokenType.FIRST: 'FIRST'>, <TokenType.ANY: 'ANY'>, <TokenType.COLLATE: 'COLLATE'>, <TokenType.REFERENCES: 'REFERENCES'>, <TokenType.SMALLINT: 'SMALLINT'>, <TokenType.USERDEFINED: 'USERDEFINED'>, <TokenType.IS: 'IS'>, <TokenType.MEDIUMBLOB: 'MEDIUMBLOB'>, <TokenType.DIV: 'DIV'>, <TokenType.ORDINALITY: 'ORDINALITY'>, <TokenType.NVARCHAR: 'NVARCHAR'>, <TokenType.DEFAULT: 'DEFAULT'>, <TokenType.USMALLINT: 'USMALLINT'>, <TokenType.INT4RANGE: 'INT4RANGE'>, <TokenType.KEEP: 'KEEP'>, <TokenType.LOAD: 'LOAD'>, <TokenType.TSTZRANGE: 'TSTZRANGE'>, <TokenType.UINT256: 'UINT256'>, <TokenType.MERGE: 'MERGE'>, <TokenType.PERCENT: 'PERCENT'>, <TokenType.VOLATILE: 'VOLATILE'>, <TokenType.IPADDRESS: 'IPADDRESS'>, <TokenType.INT: 'INT'>, <TokenType.INT256: 'INT256'>, <TokenType.ALL: 'ALL'>, <TokenType.NULL: 'NULL'>, <TokenType.COMMAND: 'COMMAND'>, <TokenType.CONSTRAINT: 'CONSTRAINT'>, <TokenType.BIGSERIAL: 'BIGSERIAL'>, <TokenType.UBIGINT: 'UBIGINT'>, <TokenType.YEAR: 'YEAR'>, <TokenType.TOP: 'TOP'>, <TokenType.ENUM: 'ENUM'>, <TokenType.BIT: 'BIT'>, <TokenType.SEMI: 'SEMI'>, <TokenType.SMALLMONEY: 'SMALLMONEY'>, <TokenType.COMMIT: 'COMMIT'>, <TokenType.DESC: 'DESC'>, <TokenType.ROWVERSION: 'ROWVERSION'>, <TokenType.STRUCT: 'STRUCT'>, <TokenType.GEOMETRY: 'GEOMETRY'>, <TokenType.FUNCTION: 'FUNCTION'>, <TokenType.TIME: 'TIME'>, <TokenType.VIEW: 'VIEW'>, <TokenType.XML: 'XML'>, <TokenType.INT128: 'INT128'>, <TokenType.NESTED: 'NESTED'>, <TokenType.TIMESTAMP: 'TIMESTAMP'>, <TokenType.TSRANGE: 'TSRANGE'>, <TokenType.UNPIVOT: 'UNPIVOT'>, <TokenType.IMAGE: 'IMAGE'>, <TokenType.TINYINT: 'TINYINT'>, <TokenType.COLUMN: 'COLUMN'>, <TokenType.TIMESTAMPTZ: 'TIMESTAMPTZ'>, <TokenType.DATETIME64: 'DATETIME64'>, <TokenType.BIGINT: 'BIGINT'>, <TokenType.MONEY: 'MONEY'>, <TokenType.GEOGRAPHY: 'GEOGRAPHY'>, <TokenType.DOUBLE: 'DOUBLE'>, <TokenType.CURRENT_USER: 'CURRENT_USER'>, <TokenType.ASC: 'ASC'>, <TokenType.COMMENT: 'COMMENT'>, <TokenType.RANGE: 'RANGE'>, <TokenType.NUMMULTIRANGE: 'NUMMULTIRANGE'>, <TokenType.ISNULL: 'ISNULL'>, <TokenType.SHOW: 'SHOW'>, <TokenType.INDEX: 'INDEX'>, <TokenType.SCHEMA: 'SCHEMA'>, <TokenType.DICTIONARY: 'DICTIONARY'>, <TokenType.DECIMAL: 'DECIMAL'>, <TokenType.FALSE: 'FALSE'>, <TokenType.MEDIUMTEXT: 'MEDIUMTEXT'>, <TokenType.DESCRIBE: 'DESCRIBE'>, <TokenType.TEMPORARY: 'TEMPORARY'>, <TokenType.BINARY: 'BINARY'>, <TokenType.UPDATE: 'UPDATE'>, <TokenType.DATE: 'DATE'>, <TokenType.DATETIME: 'DATETIME'>, <TokenType.IPPREFIX: 'IPPREFIX'>, <TokenType.EXISTS: 'EXISTS'>, <TokenType.NCHAR: 'NCHAR'>, <TokenType.ESCAPE: 'ESCAPE'>, <TokenType.CASE: 'CASE'>, <TokenType.VAR: 'VAR'>, <TokenType.INT8MULTIRANGE: 'INT8MULTIRANGE'>, <TokenType.UNKNOWN: 'UNKNOWN'>, <TokenType.INET: 'INET'>, <TokenType.INT8RANGE: 'INT8RANGE'>, <TokenType.CURRENT_DATE: 'CURRENT_DATE'>, <TokenType.OVERWRITE: 'OVERWRITE'>, <TokenType.TSTZMULTIRANGE: 'TSTZMULTIRANGE'>, <TokenType.TEXT: 'TEXT'>, <TokenType.EXECUTE: 'EXECUTE'>, <TokenType.TABLE: 'TABLE'>, <TokenType.ROW: 'ROW'>, <TokenType.PRAGMA: 'PRAGMA'>, <TokenType.PSEUDO_TYPE: 'PSEUDO_TYPE'>, <TokenType.CACHE: 'CACHE'>, <TokenType.AUTO_INCREMENT: 'AUTO_INCREMENT'>, <TokenType.HSTORE: 'HSTORE'>, <TokenType.NULLABLE: 'NULLABLE'>, <TokenType.SOME: 'SOME'>, <TokenType.CURRENT_TIMESTAMP: 'CURRENT_TIMESTAMP'>, <TokenType.DATERANGE: 'DATERANGE'>, <TokenType.TSMULTIRANGE: 'TSMULTIRANGE'>, <TokenType.ANTI: 'ANTI'>, <TokenType.CURRENT_DATETIME: 'CURRENT_DATETIME'>, <TokenType.SERIAL: 'SERIAL'>, <TokenType.FIXEDSTRING: 'FIXEDSTRING'>, <TokenType.NEXT: 'NEXT'>, <TokenType.CHAR: 'CHAR'>, <TokenType.VARBINARY: 'VARBINARY'>, <TokenType.TIMETZ: 'TIMETZ'>, <TokenType.LOWCARDINALITY: 'LOWCARDINALITY'>, <TokenType.BEGIN: 'BEGIN'>, <TokenType.PARTITION: 'PARTITION'>, <TokenType.ROWS: 'ROWS'>, <TokenType.UTINYINT: 'UTINYINT'>, <TokenType.ENUM16: 'ENUM16'>, <TokenType.LONGBLOB: 'LONGBLOB'>, <TokenType.CURRENT_TIME: 'CURRENT_TIME'>, <TokenType.INT4MULTIRANGE: 'INT4MULTIRANGE'>, <TokenType.MEDIUMINT: 'MEDIUMINT'>, <TokenType.PIVOT: 'PIVOT'>, <TokenType.FORMAT: 'FORMAT'>, <TokenType.ENUM8: 'ENUM8'>, <TokenType.TIMESTAMPLTZ: 'TIMESTAMPLTZ'>, <TokenType.LONGTEXT: 'LONGTEXT'>, <TokenType.INTERVAL: 'INTERVAL'>, <TokenType.BIGDECIMAL: 'BIGDECIMAL'>, <TokenType.TRUE: 'TRUE'>} + {<TokenType.UBIGINT: 'UBIGINT'>, <TokenType.BOOLEAN: 'BOOLEAN'>, <TokenType.NULLABLE: 'NULLABLE'>, <TokenType.OBJECT_IDENTIFIER: 'OBJECT_IDENTIFIER'>, <TokenType.STRUCT: 'STRUCT'>, <TokenType.ROW: 'ROW'>, <TokenType.TIMESTAMPTZ: 'TIMESTAMPTZ'>, <TokenType.DATE: 'DATE'>, <TokenType.ENUM: 'ENUM'>, <TokenType.COLLATE: 'COLLATE'>, <TokenType.MERGE: 'MERGE'>, <TokenType.XML: 'XML'>, <TokenType.SMALLMONEY: 'SMALLMONEY'>, <TokenType.IPPREFIX: 'IPPREFIX'>, <TokenType.GEOMETRY: 'GEOMETRY'>, <TokenType.SMALLSERIAL: 'SMALLSERIAL'>, <TokenType.TEXT: 'TEXT'>, <TokenType.NESTED: 'NESTED'>, <TokenType.TIMETZ: 'TIMETZ'>, <TokenType.VIEW: 'VIEW'>, <TokenType.ANY: 'ANY'>, <TokenType.FIXEDSTRING: 'FIXEDSTRING'>, <TokenType.LONGBLOB: 'LONGBLOB'>, <TokenType.TSTZMULTIRANGE: 'TSTZMULTIRANGE'>, <TokenType.TEMPORARY: 'TEMPORARY'>, <TokenType.TRUE: 'TRUE'>, <TokenType.DATABASE: 'DATABASE'>, <TokenType.UNKNOWN: 'UNKNOWN'>, <TokenType.VOLATILE: 'VOLATILE'>, <TokenType.DESCRIBE: 'DESCRIBE'>, <TokenType.COMMIT: 'COMMIT'>, <TokenType.INT4MULTIRANGE: 'INT4MULTIRANGE'>, <TokenType.CURRENT_TIME: 'CURRENT_TIME'>, <TokenType.FLOAT: 'FLOAT'>, <TokenType.BINARY: 'BINARY'>, <TokenType.PSEUDO_TYPE: 'PSEUDO_TYPE'>, <TokenType.BEGIN: 'BEGIN'>, <TokenType.FUNCTION: 'FUNCTION'>, <TokenType.BIT: 'BIT'>, <TokenType.ROWVERSION: 'ROWVERSION'>, <TokenType.INET: 'INET'>, <TokenType.BIGSERIAL: 'BIGSERIAL'>, <TokenType.DATEMULTIRANGE: 'DATEMULTIRANGE'>, <TokenType.BIGINT: 'BIGINT'>, <TokenType.PERCENT: 'PERCENT'>, <TokenType.USERDEFINED: 'USERDEFINED'>, <TokenType.KEEP: 'KEEP'>, <TokenType.LOAD: 'LOAD'>, <TokenType.ORDINALITY: 'ORDINALITY'>, <TokenType.MEDIUMINT: 'MEDIUMINT'>, <TokenType.DELETE: 'DELETE'>, <TokenType.UINT: 'UINT'>, <TokenType.FIRST: 'FIRST'>, <TokenType.SOME: 'SOME'>, <TokenType.MEDIUMTEXT: 'MEDIUMTEXT'>, <TokenType.SEMI: 'SEMI'>, <TokenType.UTINYINT: 'UTINYINT'>, <TokenType.DIV: 'DIV'>, <TokenType.COMMENT: 'COMMENT'>, <TokenType.CONSTRAINT: 'CONSTRAINT'>, <TokenType.COMMAND: 'COMMAND'>, <TokenType.YEAR: 'YEAR'>, <TokenType.UNPIVOT: 'UNPIVOT'>, <TokenType.CHAR: 'CHAR'>, <TokenType.VARBINARY: 'VARBINARY'>, <TokenType.CURRENT_DATETIME: 'CURRENT_DATETIME'>, <TokenType.ROWS: 'ROWS'>, <TokenType.INT4RANGE: 'INT4RANGE'>, <TokenType.JSONB: 'JSONB'>, <TokenType.NVARCHAR: 'NVARCHAR'>, <TokenType.PRAGMA: 'PRAGMA'>, <TokenType.ESCAPE: 'ESCAPE'>, <TokenType.TSRANGE: 'TSRANGE'>, <TokenType.ISNULL: 'ISNULL'>, <TokenType.INT8RANGE: 'INT8RANGE'>, <TokenType.CURRENT_TIMESTAMP: 'CURRENT_TIMESTAMP'>, <TokenType.COLUMN: 'COLUMN'>, <TokenType.PARTITION: 'PARTITION'>, <TokenType.FALSE: 'FALSE'>, <TokenType.NCHAR: 'NCHAR'>, <TokenType.TSTZRANGE: 'TSTZRANGE'>, <TokenType.DEFAULT: 'DEFAULT'>, <TokenType.RANGE: 'RANGE'>, <TokenType.USMALLINT: 'USMALLINT'>, <TokenType.DATERANGE: 'DATERANGE'>, <TokenType.NUMMULTIRANGE: 'NUMMULTIRANGE'>, <TokenType.PROCEDURE: 'PROCEDURE'>, <TokenType.UINT128: 'UINT128'>, <TokenType.UNIQUE: 'UNIQUE'>, <TokenType.HLLSKETCH: 'HLLSKETCH'>, <TokenType.IS: 'IS'>, <TokenType.SHOW: 'SHOW'>, <TokenType.INDEX: 'INDEX'>, <TokenType.UINT256: 'UINT256'>, <TokenType.SERIAL: 'SERIAL'>, <TokenType.SCHEMA: 'SCHEMA'>, <TokenType.DOUBLE: 'DOUBLE'>, <TokenType.DICTIONARY: 'DICTIONARY'>, <TokenType.VAR: 'VAR'>, <TokenType.TIME: 'TIME'>, <TokenType.ANTI: 'ANTI'>, <TokenType.CURRENT_DATE: 'CURRENT_DATE'>, <TokenType.INT: 'INT'>, <TokenType.VARIANT: 'VARIANT'>, <TokenType.SUPER: 'SUPER'>, <TokenType.HSTORE: 'HSTORE'>, <TokenType.TIMESTAMPLTZ: 'TIMESTAMPLTZ'>, <TokenType.VARCHAR: 'VARCHAR'>, <TokenType.CASE: 'CASE'>, <TokenType.NUMRANGE: 'NUMRANGE'>, <TokenType.SMALLINT: 'SMALLINT'>, <TokenType.OVERWRITE: 'OVERWRITE'>, <TokenType.CURRENT_USER: 'CURRENT_USER'>, <TokenType.INT256: 'INT256'>, <TokenType.FORMAT: 'FORMAT'>, <TokenType.AUTO_INCREMENT: 'AUTO_INCREMENT'>, <TokenType.END: 'END'>, <TokenType.LOWCARDINALITY: 'LOWCARDINALITY'>, <TokenType.INT128: 'INT128'>, <TokenType.PIVOT: 'PIVOT'>, <TokenType.NEXT: 'NEXT'>, <TokenType.TINYINT: 'TINYINT'>, <TokenType.REFERENCES: 'REFERENCES'>, <TokenType.INTERVAL: 'INTERVAL'>, <TokenType.UUID: 'UUID'>, <TokenType.UNIQUEIDENTIFIER: 'UNIQUEIDENTIFIER'>, <TokenType.ARRAY: 'ARRAY'>, <TokenType.DECIMAL: 'DECIMAL'>, <TokenType.LONGTEXT: 'LONGTEXT'>, <TokenType.TSMULTIRANGE: 'TSMULTIRANGE'>, <TokenType.TOP: 'TOP'>, <TokenType.DESC: 'DESC'>, <TokenType.ASC: 'ASC'>, <TokenType.IMAGE: 'IMAGE'>, <TokenType.NULL: 'NULL'>, <TokenType.IPADDRESS: 'IPADDRESS'>, <TokenType.FILTER: 'FILTER'>, <TokenType.GEOGRAPHY: 'GEOGRAPHY'>, <TokenType.ALL: 'ALL'>, <TokenType.DATETIME: 'DATETIME'>, <TokenType.MONEY: 'MONEY'>, <TokenType.SET: 'SET'>, <TokenType.SETTINGS: 'SETTINGS'>, <TokenType.EXISTS: 'EXISTS'>, <TokenType.EXECUTE: 'EXECUTE'>, <TokenType.UPDATE: 'UPDATE'>, <TokenType.INT8MULTIRANGE: 'INT8MULTIRANGE'>, <TokenType.TABLE: 'TABLE'>, <TokenType.ENUM16: 'ENUM16'>, <TokenType.BIGDECIMAL: 'BIGDECIMAL'>, <TokenType.MAP: 'MAP'>, <TokenType.TIMESTAMP: 'TIMESTAMP'>, <TokenType.CACHE: 'CACHE'>, <TokenType.JSON: 'JSON'>, <TokenType.DATETIME64: 'DATETIME64'>, <TokenType.ENUM8: 'ENUM8'>, <TokenType.OBJECT: 'OBJECT'>, <TokenType.MEDIUMBLOB: 'MEDIUMBLOB'>}
    @@ -2607,7 +2607,7 @@ Default: 3
    SCHEMA_UNNAMED_CONSTRAINTS = -{'PRIMARY KEY', 'FULLTEXT', 'UNIQUE', 'SPATIAL', 'LIKE', 'KEY', 'CHECK', 'INDEX', 'FOREIGN KEY'} +{'SPATIAL', 'UNIQUE', 'KEY', 'INDEX', 'CHECK', 'FULLTEXT', 'FOREIGN KEY', 'LIKE', 'PRIMARY KEY'}
    @@ -2619,7 +2619,7 @@ Default: 3
    PROFILE_TYPES = -{'BLOCK IO', 'ALL', 'CONTEXT SWITCHES', 'PAGE FAULTS', 'SOURCE', 'CPU', 'SWAPS', 'IPC', 'MEMORY'} +{'SWAPS', 'BLOCK IO', 'PAGE FAULTS', 'ALL', 'CONTEXT SWITCHES', 'SOURCE', 'IPC', 'MEMORY', 'CPU'}
    @@ -2632,7 +2632,7 @@ Default: 3
    TYPE_TOKENS = - {<TokenType.IMAGE: 'IMAGE'>, <TokenType.NUMRANGE: 'NUMRANGE'>, <TokenType.SUPER: 'SUPER'>, <TokenType.BIGDECIMAL: 'BIGDECIMAL'>, <TokenType.TINYINT: 'TINYINT'>, <TokenType.UINT: 'UINT'>, <TokenType.TIMESTAMPTZ: 'TIMESTAMPTZ'>, <TokenType.DATETIME64: 'DATETIME64'>, <TokenType.SET: 'SET'>, <TokenType.BIGINT: 'BIGINT'>, <TokenType.MONEY: 'MONEY'>, <TokenType.GEOGRAPHY: 'GEOGRAPHY'>, <TokenType.DOUBLE: 'DOUBLE'>, <TokenType.NUMMULTIRANGE: 'NUMMULTIRANGE'>, <TokenType.UNIQUEIDENTIFIER: 'UNIQUEIDENTIFIER'>, <TokenType.JSONB: 'JSONB'>, <TokenType.ARRAY: 'ARRAY'>, <TokenType.VARCHAR: 'VARCHAR'>, <TokenType.OBJECT: 'OBJECT'>, <TokenType.DECIMAL: 'DECIMAL'>, <TokenType.MEDIUMTEXT: 'MEDIUMTEXT'>, <TokenType.BINARY: 'BINARY'>, <TokenType.DATE: 'DATE'>, <TokenType.DATETIME: 'DATETIME'>, <TokenType.IPPREFIX: 'IPPREFIX'>, <TokenType.DATEMULTIRANGE: 'DATEMULTIRANGE'>, <TokenType.BOOLEAN: 'BOOLEAN'>, <TokenType.HLLSKETCH: 'HLLSKETCH'>, <TokenType.NCHAR: 'NCHAR'>, <TokenType.UUID: 'UUID'>, <TokenType.FLOAT: 'FLOAT'>, <TokenType.SMALLSERIAL: 'SMALLSERIAL'>, <TokenType.VARIANT: 'VARIANT'>, <TokenType.JSON: 'JSON'>, <TokenType.INT8MULTIRANGE: 'INT8MULTIRANGE'>, <TokenType.UNKNOWN: 'UNKNOWN'>, <TokenType.UINT128: 'UINT128'>, <TokenType.INET: 'INET'>, <TokenType.INT8RANGE: 'INT8RANGE'>, <TokenType.MAP: 'MAP'>, <TokenType.INT128: 'INT128'>, <TokenType.TSTZMULTIRANGE: 'TSTZMULTIRANGE'>, <TokenType.SMALLINT: 'SMALLINT'>, <TokenType.USERDEFINED: 'USERDEFINED'>, <TokenType.TEXT: 'TEXT'>, <TokenType.MEDIUMBLOB: 'MEDIUMBLOB'>, <TokenType.PSEUDO_TYPE: 'PSEUDO_TYPE'>, <TokenType.NVARCHAR: 'NVARCHAR'>, <TokenType.USMALLINT: 'USMALLINT'>, <TokenType.HSTORE: 'HSTORE'>, <TokenType.INT4RANGE: 'INT4RANGE'>, <TokenType.NULLABLE: 'NULLABLE'>, <TokenType.DATERANGE: 'DATERANGE'>, <TokenType.TSTZRANGE: 'TSTZRANGE'>, <TokenType.UINT256: 'UINT256'>, <TokenType.TSMULTIRANGE: 'TSMULTIRANGE'>, <TokenType.SERIAL: 'SERIAL'>, <TokenType.FIXEDSTRING: 'FIXEDSTRING'>, <TokenType.IPADDRESS: 'IPADDRESS'>, <TokenType.CHAR: 'CHAR'>, <TokenType.INT: 'INT'>, <TokenType.INT256: 'INT256'>, <TokenType.NULL: 'NULL'>, <TokenType.VARBINARY: 'VARBINARY'>, <TokenType.TIMETZ: 'TIMETZ'>, <TokenType.LOWCARDINALITY: 'LOWCARDINALITY'>, <TokenType.BIGSERIAL: 'BIGSERIAL'>, <TokenType.UBIGINT: 'UBIGINT'>, <TokenType.YEAR: 'YEAR'>, <TokenType.UTINYINT: 'UTINYINT'>, <TokenType.ENUM: 'ENUM'>, <TokenType.ENUM16: 'ENUM16'>, <TokenType.BIT: 'BIT'>, <TokenType.LONGBLOB: 'LONGBLOB'>, <TokenType.INT4MULTIRANGE: 'INT4MULTIRANGE'>, <TokenType.MEDIUMINT: 'MEDIUMINT'>, <TokenType.SMALLMONEY: 'SMALLMONEY'>, <TokenType.ENUM8: 'ENUM8'>, <TokenType.ROWVERSION: 'ROWVERSION'>, <TokenType.STRUCT: 'STRUCT'>, <TokenType.TIMESTAMPLTZ: 'TIMESTAMPLTZ'>, <TokenType.GEOMETRY: 'GEOMETRY'>, <TokenType.LONGTEXT: 'LONGTEXT'>, <TokenType.INTERVAL: 'INTERVAL'>, <TokenType.TIME: 'TIME'>, <TokenType.XML: 'XML'>, <TokenType.NESTED: 'NESTED'>, <TokenType.TIMESTAMP: 'TIMESTAMP'>, <TokenType.TSRANGE: 'TSRANGE'>} + {<TokenType.NUMMULTIRANGE: 'NUMMULTIRANGE'>, <TokenType.UBIGINT: 'UBIGINT'>, <TokenType.BOOLEAN: 'BOOLEAN'>, <TokenType.NULLABLE: 'NULLABLE'>, <TokenType.UINT128: 'UINT128'>, <TokenType.OBJECT_IDENTIFIER: 'OBJECT_IDENTIFIER'>, <TokenType.STRUCT: 'STRUCT'>, <TokenType.TIMESTAMPTZ: 'TIMESTAMPTZ'>, <TokenType.DATE: 'DATE'>, <TokenType.ENUM: 'ENUM'>, <TokenType.HLLSKETCH: 'HLLSKETCH'>, <TokenType.UINT256: 'UINT256'>, <TokenType.SERIAL: 'SERIAL'>, <TokenType.XML: 'XML'>, <TokenType.SMALLMONEY: 'SMALLMONEY'>, <TokenType.DOUBLE: 'DOUBLE'>, <TokenType.IPPREFIX: 'IPPREFIX'>, <TokenType.GEOMETRY: 'GEOMETRY'>, <TokenType.SMALLSERIAL: 'SMALLSERIAL'>, <TokenType.TEXT: 'TEXT'>, <TokenType.NESTED: 'NESTED'>, <TokenType.TIMETZ: 'TIMETZ'>, <TokenType.FIXEDSTRING: 'FIXEDSTRING'>, <TokenType.TIME: 'TIME'>, <TokenType.LONGBLOB: 'LONGBLOB'>, <TokenType.TSTZMULTIRANGE: 'TSTZMULTIRANGE'>, <TokenType.UNKNOWN: 'UNKNOWN'>, <TokenType.INT: 'INT'>, <TokenType.VARIANT: 'VARIANT'>, <TokenType.SUPER: 'SUPER'>, <TokenType.INT4MULTIRANGE: 'INT4MULTIRANGE'>, <TokenType.HSTORE: 'HSTORE'>, <TokenType.TIMESTAMPLTZ: 'TIMESTAMPLTZ'>, <TokenType.FLOAT: 'FLOAT'>, <TokenType.VARCHAR: 'VARCHAR'>, <TokenType.BINARY: 'BINARY'>, <TokenType.PSEUDO_TYPE: 'PSEUDO_TYPE'>, <TokenType.NUMRANGE: 'NUMRANGE'>, <TokenType.SMALLINT: 'SMALLINT'>, <TokenType.BIT: 'BIT'>, <TokenType.INT256: 'INT256'>, <TokenType.ROWVERSION: 'ROWVERSION'>, <TokenType.INET: 'INET'>, <TokenType.BIGSERIAL: 'BIGSERIAL'>, <TokenType.DATEMULTIRANGE: 'DATEMULTIRANGE'>, <TokenType.BIGINT: 'BIGINT'>, <TokenType.LOWCARDINALITY: 'LOWCARDINALITY'>, <TokenType.INT128: 'INT128'>, <TokenType.USERDEFINED: 'USERDEFINED'>, <TokenType.MEDIUMINT: 'MEDIUMINT'>, <TokenType.TINYINT: 'TINYINT'>, <TokenType.INTERVAL: 'INTERVAL'>, <TokenType.UUID: 'UUID'>, <TokenType.UINT: 'UINT'>, <TokenType.UNIQUEIDENTIFIER: 'UNIQUEIDENTIFIER'>, <TokenType.ARRAY: 'ARRAY'>, <TokenType.DECIMAL: 'DECIMAL'>, <TokenType.MEDIUMTEXT: 'MEDIUMTEXT'>, <TokenType.LONGTEXT: 'LONGTEXT'>, <TokenType.UTINYINT: 'UTINYINT'>, <TokenType.TSMULTIRANGE: 'TSMULTIRANGE'>, <TokenType.IMAGE: 'IMAGE'>, <TokenType.NULL: 'NULL'>, <TokenType.IPADDRESS: 'IPADDRESS'>, <TokenType.GEOGRAPHY: 'GEOGRAPHY'>, <TokenType.DATETIME: 'DATETIME'>, <TokenType.YEAR: 'YEAR'>, <TokenType.MONEY: 'MONEY'>, <TokenType.CHAR: 'CHAR'>, <TokenType.VARBINARY: 'VARBINARY'>, <TokenType.SET: 'SET'>, <TokenType.INT4RANGE: 'INT4RANGE'>, <TokenType.JSONB: 'JSONB'>, <TokenType.INT8MULTIRANGE: 'INT8MULTIRANGE'>, <TokenType.NVARCHAR: 'NVARCHAR'>, <TokenType.ENUM16: 'ENUM16'>, <TokenType.TSRANGE: 'TSRANGE'>, <TokenType.BIGDECIMAL: 'BIGDECIMAL'>, <TokenType.INT8RANGE: 'INT8RANGE'>, <TokenType.MAP: 'MAP'>, <TokenType.TIMESTAMP: 'TIMESTAMP'>, <TokenType.JSON: 'JSON'>, <TokenType.NCHAR: 'NCHAR'>, <TokenType.TSTZRANGE: 'TSTZRANGE'>, <TokenType.DATETIME64: 'DATETIME64'>, <TokenType.MEDIUMBLOB: 'MEDIUMBLOB'>, <TokenType.USMALLINT: 'USMALLINT'>, <TokenType.OBJECT: 'OBJECT'>, <TokenType.DATERANGE: 'DATERANGE'>, <TokenType.ENUM8: 'ENUM8'>}
    @@ -2645,7 +2645,7 @@ Default: 3
    ENUM_TYPE_TOKENS = - {<TokenType.ENUM8: 'ENUM8'>, <TokenType.ENUM: 'ENUM'>, <TokenType.SET: 'SET'>, <TokenType.ENUM16: 'ENUM16'>} + {<TokenType.SET: 'SET'>, <TokenType.ENUM: 'ENUM'>, <TokenType.ENUM16: 'ENUM16'>, <TokenType.ENUM8: 'ENUM8'>}
    @@ -3708,6 +3708,7 @@ Default: True
    intersect_op
    introducer_sql
    pseudotype_sql
    +
    objectidentifier_sql
    onconflict_sql
    returning_sql
    rowformatdelimitedproperty_sql
    diff --git a/docs/sqlglot/dialects/oracle.html b/docs/sqlglot/dialects/oracle.html index 2d56b8c..40aa6c0 100644 --- a/docs/sqlglot/dialects/oracle.html +++ b/docs/sqlglot/dialects/oracle.html @@ -267,7 +267,7 @@ 8from sqlglot.tokens import TokenType 9 10 - 11def _parse_xml_table(self: parser.Parser) -> exp.XMLTable: + 11def _parse_xml_table(self: Oracle.Parser) -> exp.XMLTable: 12 this = self._parse_string() 13 14 passing = None @@ -392,55 +392,54 @@ 133 ), 134 exp.Group: transforms.preprocess([transforms.unalias_group]), 135 exp.ILike: no_ilike_sql, -136 exp.Coalesce: rename_func("NVL"), -137 exp.Select: transforms.preprocess([transforms.eliminate_distinct_on]), -138 exp.StrToTime: lambda self, e: f"TO_TIMESTAMP({self.sql(e, 'this')}, {self.format_time(e)})", -139 exp.Subquery: lambda self, e: self.subquery_sql(e, sep=" "), -140 exp.Substring: rename_func("SUBSTR"), -141 exp.Table: lambda self, e: self.table_sql(e, sep=" "), -142 exp.TableSample: lambda self, e: self.tablesample_sql(e, sep=" "), -143 exp.TimeToStr: lambda self, e: f"TO_CHAR({self.sql(e, 'this')}, {self.format_time(e)})", -144 exp.ToChar: lambda self, e: self.function_fallback_sql(e), -145 exp.Trim: trim_sql, -146 exp.UnixToTime: lambda self, e: f"TO_DATE('1970-01-01','YYYY-MM-DD') + ({self.sql(e, 'this')} / 86400)", -147 } -148 -149 PROPERTIES_LOCATION = { -150 **generator.Generator.PROPERTIES_LOCATION, -151 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, -152 } -153 -154 def offset_sql(self, expression: exp.Offset) -> str: -155 return f"{super().offset_sql(expression)} ROWS" -156 -157 def xmltable_sql(self, expression: exp.XMLTable) -> str: -158 this = self.sql(expression, "this") -159 passing = self.expressions(expression, key="passing") -160 passing = f"{self.sep()}PASSING{self.seg(passing)}" if passing else "" -161 columns = self.expressions(expression, key="columns") -162 columns = f"{self.sep()}COLUMNS{self.seg(columns)}" if columns else "" -163 by_ref = ( -164 f"{self.sep()}RETURNING SEQUENCE BY REF" if expression.args.get("by_ref") else "" -165 ) -166 return f"XMLTABLE({self.sep('')}{self.indent(this + passing + by_ref + columns)}{self.seg(')', sep='')}" -167 -168 class Tokenizer(tokens.Tokenizer): -169 VAR_SINGLE_TOKENS = {"@", "$", "#"} -170 -171 KEYWORDS = { -172 **tokens.Tokenizer.KEYWORDS, -173 "(+)": TokenType.JOIN_MARKER, -174 "BINARY_DOUBLE": TokenType.DOUBLE, -175 "BINARY_FLOAT": TokenType.FLOAT, -176 "COLUMNS": TokenType.COLUMN, -177 "MATCH_RECOGNIZE": TokenType.MATCH_RECOGNIZE, -178 "MINUS": TokenType.EXCEPT, -179 "NVARCHAR2": TokenType.NVARCHAR, -180 "SAMPLE": TokenType.TABLE_SAMPLE, -181 "START": TokenType.BEGIN, -182 "TOP": TokenType.TOP, -183 "VARCHAR2": TokenType.VARCHAR, -184 } +136 exp.Select: transforms.preprocess([transforms.eliminate_distinct_on]), +137 exp.StrToTime: lambda self, e: f"TO_TIMESTAMP({self.sql(e, 'this')}, {self.format_time(e)})", +138 exp.Subquery: lambda self, e: self.subquery_sql(e, sep=" "), +139 exp.Substring: rename_func("SUBSTR"), +140 exp.Table: lambda self, e: self.table_sql(e, sep=" "), +141 exp.TableSample: lambda self, e: self.tablesample_sql(e, sep=" "), +142 exp.TimeToStr: lambda self, e: f"TO_CHAR({self.sql(e, 'this')}, {self.format_time(e)})", +143 exp.ToChar: lambda self, e: self.function_fallback_sql(e), +144 exp.Trim: trim_sql, +145 exp.UnixToTime: lambda self, e: f"TO_DATE('1970-01-01','YYYY-MM-DD') + ({self.sql(e, 'this')} / 86400)", +146 } +147 +148 PROPERTIES_LOCATION = { +149 **generator.Generator.PROPERTIES_LOCATION, +150 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, +151 } +152 +153 def offset_sql(self, expression: exp.Offset) -> str: +154 return f"{super().offset_sql(expression)} ROWS" +155 +156 def xmltable_sql(self, expression: exp.XMLTable) -> str: +157 this = self.sql(expression, "this") +158 passing = self.expressions(expression, key="passing") +159 passing = f"{self.sep()}PASSING{self.seg(passing)}" if passing else "" +160 columns = self.expressions(expression, key="columns") +161 columns = f"{self.sep()}COLUMNS{self.seg(columns)}" if columns else "" +162 by_ref = ( +163 f"{self.sep()}RETURNING SEQUENCE BY REF" if expression.args.get("by_ref") else "" +164 ) +165 return f"XMLTABLE({self.sep('')}{self.indent(this + passing + by_ref + columns)}{self.seg(')', sep='')}" +166 +167 class Tokenizer(tokens.Tokenizer): +168 VAR_SINGLE_TOKENS = {"@", "$", "#"} +169 +170 KEYWORDS = { +171 **tokens.Tokenizer.KEYWORDS, +172 "(+)": TokenType.JOIN_MARKER, +173 "BINARY_DOUBLE": TokenType.DOUBLE, +174 "BINARY_FLOAT": TokenType.FLOAT, +175 "COLUMNS": TokenType.COLUMN, +176 "MATCH_RECOGNIZE": TokenType.MATCH_RECOGNIZE, +177 "MINUS": TokenType.EXCEPT, +178 "NVARCHAR2": TokenType.NVARCHAR, +179 "SAMPLE": TokenType.TABLE_SAMPLE, +180 "START": TokenType.BEGIN, +181 "TOP": TokenType.TOP, +182 "VARCHAR2": TokenType.VARCHAR, +183 }
    @@ -562,55 +561,54 @@ 134 ), 135 exp.Group: transforms.preprocess([transforms.unalias_group]), 136 exp.ILike: no_ilike_sql, -137 exp.Coalesce: rename_func("NVL"), -138 exp.Select: transforms.preprocess([transforms.eliminate_distinct_on]), -139 exp.StrToTime: lambda self, e: f"TO_TIMESTAMP({self.sql(e, 'this')}, {self.format_time(e)})", -140 exp.Subquery: lambda self, e: self.subquery_sql(e, sep=" "), -141 exp.Substring: rename_func("SUBSTR"), -142 exp.Table: lambda self, e: self.table_sql(e, sep=" "), -143 exp.TableSample: lambda self, e: self.tablesample_sql(e, sep=" "), -144 exp.TimeToStr: lambda self, e: f"TO_CHAR({self.sql(e, 'this')}, {self.format_time(e)})", -145 exp.ToChar: lambda self, e: self.function_fallback_sql(e), -146 exp.Trim: trim_sql, -147 exp.UnixToTime: lambda self, e: f"TO_DATE('1970-01-01','YYYY-MM-DD') + ({self.sql(e, 'this')} / 86400)", -148 } -149 -150 PROPERTIES_LOCATION = { -151 **generator.Generator.PROPERTIES_LOCATION, -152 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, -153 } -154 -155 def offset_sql(self, expression: exp.Offset) -> str: -156 return f"{super().offset_sql(expression)} ROWS" -157 -158 def xmltable_sql(self, expression: exp.XMLTable) -> str: -159 this = self.sql(expression, "this") -160 passing = self.expressions(expression, key="passing") -161 passing = f"{self.sep()}PASSING{self.seg(passing)}" if passing else "" -162 columns = self.expressions(expression, key="columns") -163 columns = f"{self.sep()}COLUMNS{self.seg(columns)}" if columns else "" -164 by_ref = ( -165 f"{self.sep()}RETURNING SEQUENCE BY REF" if expression.args.get("by_ref") else "" -166 ) -167 return f"XMLTABLE({self.sep('')}{self.indent(this + passing + by_ref + columns)}{self.seg(')', sep='')}" -168 -169 class Tokenizer(tokens.Tokenizer): -170 VAR_SINGLE_TOKENS = {"@", "$", "#"} -171 -172 KEYWORDS = { -173 **tokens.Tokenizer.KEYWORDS, -174 "(+)": TokenType.JOIN_MARKER, -175 "BINARY_DOUBLE": TokenType.DOUBLE, -176 "BINARY_FLOAT": TokenType.FLOAT, -177 "COLUMNS": TokenType.COLUMN, -178 "MATCH_RECOGNIZE": TokenType.MATCH_RECOGNIZE, -179 "MINUS": TokenType.EXCEPT, -180 "NVARCHAR2": TokenType.NVARCHAR, -181 "SAMPLE": TokenType.TABLE_SAMPLE, -182 "START": TokenType.BEGIN, -183 "TOP": TokenType.TOP, -184 "VARCHAR2": TokenType.VARCHAR, -185 } +137 exp.Select: transforms.preprocess([transforms.eliminate_distinct_on]), +138 exp.StrToTime: lambda self, e: f"TO_TIMESTAMP({self.sql(e, 'this')}, {self.format_time(e)})", +139 exp.Subquery: lambda self, e: self.subquery_sql(e, sep=" "), +140 exp.Substring: rename_func("SUBSTR"), +141 exp.Table: lambda self, e: self.table_sql(e, sep=" "), +142 exp.TableSample: lambda self, e: self.tablesample_sql(e, sep=" "), +143 exp.TimeToStr: lambda self, e: f"TO_CHAR({self.sql(e, 'this')}, {self.format_time(e)})", +144 exp.ToChar: lambda self, e: self.function_fallback_sql(e), +145 exp.Trim: trim_sql, +146 exp.UnixToTime: lambda self, e: f"TO_DATE('1970-01-01','YYYY-MM-DD') + ({self.sql(e, 'this')} / 86400)", +147 } +148 +149 PROPERTIES_LOCATION = { +150 **generator.Generator.PROPERTIES_LOCATION, +151 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, +152 } +153 +154 def offset_sql(self, expression: exp.Offset) -> str: +155 return f"{super().offset_sql(expression)} ROWS" +156 +157 def xmltable_sql(self, expression: exp.XMLTable) -> str: +158 this = self.sql(expression, "this") +159 passing = self.expressions(expression, key="passing") +160 passing = f"{self.sep()}PASSING{self.seg(passing)}" if passing else "" +161 columns = self.expressions(expression, key="columns") +162 columns = f"{self.sep()}COLUMNS{self.seg(columns)}" if columns else "" +163 by_ref = ( +164 f"{self.sep()}RETURNING SEQUENCE BY REF" if expression.args.get("by_ref") else "" +165 ) +166 return f"XMLTABLE({self.sep('')}{self.indent(this + passing + by_ref + columns)}{self.seg(')', sep='')}" +167 +168 class Tokenizer(tokens.Tokenizer): +169 VAR_SINGLE_TOKENS = {"@", "$", "#"} +170 +171 KEYWORDS = { +172 **tokens.Tokenizer.KEYWORDS, +173 "(+)": TokenType.JOIN_MARKER, +174 "BINARY_DOUBLE": TokenType.DOUBLE, +175 "BINARY_FLOAT": TokenType.FLOAT, +176 "COLUMNS": TokenType.COLUMN, +177 "MATCH_RECOGNIZE": TokenType.MATCH_RECOGNIZE, +178 "MINUS": TokenType.EXCEPT, +179 "NVARCHAR2": TokenType.NVARCHAR, +180 "SAMPLE": TokenType.TABLE_SAMPLE, +181 "START": TokenType.BEGIN, +182 "TOP": TokenType.TOP, +183 "VARCHAR2": TokenType.VARCHAR, +184 } @@ -972,7 +970,7 @@ Default: 3
    WINDOW_BEFORE_PAREN_TOKENS = -{<TokenType.KEEP: 'KEEP'>, <TokenType.OVER: 'OVER'>} +{<TokenType.OVER: 'OVER'>, <TokenType.KEEP: 'KEEP'>}
    @@ -1254,37 +1252,36 @@ Default: 3 134 ), 135 exp.Group: transforms.preprocess([transforms.unalias_group]), 136 exp.ILike: no_ilike_sql, -137 exp.Coalesce: rename_func("NVL"), -138 exp.Select: transforms.preprocess([transforms.eliminate_distinct_on]), -139 exp.StrToTime: lambda self, e: f"TO_TIMESTAMP({self.sql(e, 'this')}, {self.format_time(e)})", -140 exp.Subquery: lambda self, e: self.subquery_sql(e, sep=" "), -141 exp.Substring: rename_func("SUBSTR"), -142 exp.Table: lambda self, e: self.table_sql(e, sep=" "), -143 exp.TableSample: lambda self, e: self.tablesample_sql(e, sep=" "), -144 exp.TimeToStr: lambda self, e: f"TO_CHAR({self.sql(e, 'this')}, {self.format_time(e)})", -145 exp.ToChar: lambda self, e: self.function_fallback_sql(e), -146 exp.Trim: trim_sql, -147 exp.UnixToTime: lambda self, e: f"TO_DATE('1970-01-01','YYYY-MM-DD') + ({self.sql(e, 'this')} / 86400)", -148 } -149 -150 PROPERTIES_LOCATION = { -151 **generator.Generator.PROPERTIES_LOCATION, -152 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, -153 } -154 -155 def offset_sql(self, expression: exp.Offset) -> str: -156 return f"{super().offset_sql(expression)} ROWS" -157 -158 def xmltable_sql(self, expression: exp.XMLTable) -> str: -159 this = self.sql(expression, "this") -160 passing = self.expressions(expression, key="passing") -161 passing = f"{self.sep()}PASSING{self.seg(passing)}" if passing else "" -162 columns = self.expressions(expression, key="columns") -163 columns = f"{self.sep()}COLUMNS{self.seg(columns)}" if columns else "" -164 by_ref = ( -165 f"{self.sep()}RETURNING SEQUENCE BY REF" if expression.args.get("by_ref") else "" -166 ) -167 return f"XMLTABLE({self.sep('')}{self.indent(this + passing + by_ref + columns)}{self.seg(')', sep='')}" +137 exp.Select: transforms.preprocess([transforms.eliminate_distinct_on]), +138 exp.StrToTime: lambda self, e: f"TO_TIMESTAMP({self.sql(e, 'this')}, {self.format_time(e)})", +139 exp.Subquery: lambda self, e: self.subquery_sql(e, sep=" "), +140 exp.Substring: rename_func("SUBSTR"), +141 exp.Table: lambda self, e: self.table_sql(e, sep=" "), +142 exp.TableSample: lambda self, e: self.tablesample_sql(e, sep=" "), +143 exp.TimeToStr: lambda self, e: f"TO_CHAR({self.sql(e, 'this')}, {self.format_time(e)})", +144 exp.ToChar: lambda self, e: self.function_fallback_sql(e), +145 exp.Trim: trim_sql, +146 exp.UnixToTime: lambda self, e: f"TO_DATE('1970-01-01','YYYY-MM-DD') + ({self.sql(e, 'this')} / 86400)", +147 } +148 +149 PROPERTIES_LOCATION = { +150 **generator.Generator.PROPERTIES_LOCATION, +151 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, +152 } +153 +154 def offset_sql(self, expression: exp.Offset) -> str: +155 return f"{super().offset_sql(expression)} ROWS" +156 +157 def xmltable_sql(self, expression: exp.XMLTable) -> str: +158 this = self.sql(expression, "this") +159 passing = self.expressions(expression, key="passing") +160 passing = f"{self.sep()}PASSING{self.seg(passing)}" if passing else "" +161 columns = self.expressions(expression, key="columns") +162 columns = f"{self.sep()}COLUMNS{self.seg(columns)}" if columns else "" +163 by_ref = ( +164 f"{self.sep()}RETURNING SEQUENCE BY REF" if expression.args.get("by_ref") else "" +165 ) +166 return f"XMLTABLE({self.sep('')}{self.indent(this + passing + by_ref + columns)}{self.seg(')', sep='')}"
    @@ -1404,7 +1401,7 @@ Default: True
    TRANSFORMS = - {<class 'sqlglot.expressions.DateAdd'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.TsOrDsAdd'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.CaseSpecificColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.CharacterSetColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.CharacterSetProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.CheckColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.ClusteredColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.CollateColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.CopyGrantsProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.CommentColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.DateFormatColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.DefaultColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.EncodeColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.ExecuteAsProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.ExternalProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.HeapProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.InlineLengthColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.IntervalDayToSecondSpan'>: 'DAY TO SECOND', <class 'sqlglot.expressions.IntervalYearToMonthSpan'>: 'YEAR TO MONTH', <class 'sqlglot.expressions.LanguageProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.LocationProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.LogProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.MaterializedProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.NoPrimaryIndexProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.NonClusteredColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.NotForReplicationColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.OnCommitProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.OnProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.OnUpdateColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.PathColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.ReturnsProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.SetProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.SettingsProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.SqlSecurityProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.StabilityProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.TemporaryProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.ToTableProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.TransientProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.TitleColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.UppercaseColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.VarMap'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.VolatileProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.WithJournalTableProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.DateStrToDate'>: <function Oracle.Generator.<lambda>>, <class 'sqlglot.expressions.Group'>: <function preprocess.<locals>._to_sql>, <class 'sqlglot.expressions.ILike'>: <function no_ilike_sql>, <class 'sqlglot.expressions.Coalesce'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.Select'>: <function preprocess.<locals>._to_sql>, <class 'sqlglot.expressions.StrToTime'>: <function Oracle.Generator.<lambda>>, <class 'sqlglot.expressions.Subquery'>: <function Oracle.Generator.<lambda>>, <class 'sqlglot.expressions.Substring'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.Table'>: <function Oracle.Generator.<lambda>>, <class 'sqlglot.expressions.TableSample'>: <function Oracle.Generator.<lambda>>, <class 'sqlglot.expressions.TimeToStr'>: <function Oracle.Generator.<lambda>>, <class 'sqlglot.expressions.ToChar'>: <function Oracle.Generator.<lambda>>, <class 'sqlglot.expressions.Trim'>: <function trim_sql>, <class 'sqlglot.expressions.UnixToTime'>: <function Oracle.Generator.<lambda>>} + {<class 'sqlglot.expressions.DateAdd'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.TsOrDsAdd'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.CaseSpecificColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.CharacterSetColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.CharacterSetProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.CheckColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.ClusteredColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.CollateColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.CopyGrantsProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.CommentColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.DateFormatColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.DefaultColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.EncodeColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.ExecuteAsProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.ExternalProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.HeapProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.InlineLengthColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.IntervalDayToSecondSpan'>: 'DAY TO SECOND', <class 'sqlglot.expressions.IntervalYearToMonthSpan'>: 'YEAR TO MONTH', <class 'sqlglot.expressions.LanguageProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.LocationProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.LogProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.MaterializedProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.NoPrimaryIndexProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.NonClusteredColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.NotForReplicationColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.OnCommitProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.OnProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.OnUpdateColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.PathColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.ReturnsProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.SetProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.SettingsProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.SqlSecurityProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.StabilityProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.TemporaryProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.ToTableProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.TransientProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.TitleColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.UppercaseColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.VarMap'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.VolatileProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.WithJournalTableProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.DateStrToDate'>: <function Oracle.Generator.<lambda>>, <class 'sqlglot.expressions.Group'>: <function preprocess.<locals>._to_sql>, <class 'sqlglot.expressions.ILike'>: <function no_ilike_sql>, <class 'sqlglot.expressions.Select'>: <function preprocess.<locals>._to_sql>, <class 'sqlglot.expressions.StrToTime'>: <function Oracle.Generator.<lambda>>, <class 'sqlglot.expressions.Subquery'>: <function Oracle.Generator.<lambda>>, <class 'sqlglot.expressions.Substring'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.Table'>: <function Oracle.Generator.<lambda>>, <class 'sqlglot.expressions.TableSample'>: <function Oracle.Generator.<lambda>>, <class 'sqlglot.expressions.TimeToStr'>: <function Oracle.Generator.<lambda>>, <class 'sqlglot.expressions.ToChar'>: <function Oracle.Generator.<lambda>>, <class 'sqlglot.expressions.Trim'>: <function trim_sql>, <class 'sqlglot.expressions.UnixToTime'>: <function Oracle.Generator.<lambda>>}
    @@ -1437,8 +1434,8 @@ Default: True -
    155        def offset_sql(self, expression: exp.Offset) -> str:
    -156            return f"{super().offset_sql(expression)} ROWS"
    +            
    154        def offset_sql(self, expression: exp.Offset) -> str:
    +155            return f"{super().offset_sql(expression)} ROWS"
     
    @@ -1456,16 +1453,16 @@ Default: True
    -
    158        def xmltable_sql(self, expression: exp.XMLTable) -> str:
    -159            this = self.sql(expression, "this")
    -160            passing = self.expressions(expression, key="passing")
    -161            passing = f"{self.sep()}PASSING{self.seg(passing)}" if passing else ""
    -162            columns = self.expressions(expression, key="columns")
    -163            columns = f"{self.sep()}COLUMNS{self.seg(columns)}" if columns else ""
    -164            by_ref = (
    -165                f"{self.sep()}RETURNING SEQUENCE BY REF" if expression.args.get("by_ref") else ""
    -166            )
    -167            return f"XMLTABLE({self.sep('')}{self.indent(this + passing + by_ref + columns)}{self.seg(')', sep='')}"
    +            
    157        def xmltable_sql(self, expression: exp.XMLTable) -> str:
    +158            this = self.sql(expression, "this")
    +159            passing = self.expressions(expression, key="passing")
    +160            passing = f"{self.sep()}PASSING{self.seg(passing)}" if passing else ""
    +161            columns = self.expressions(expression, key="columns")
    +162            columns = f"{self.sep()}COLUMNS{self.seg(columns)}" if columns else ""
    +163            by_ref = (
    +164                f"{self.sep()}RETURNING SEQUENCE BY REF" if expression.args.get("by_ref") else ""
    +165            )
    +166            return f"XMLTABLE({self.sep('')}{self.indent(this + passing + by_ref + columns)}{self.seg(')', sep='')}"
     
    @@ -1837,6 +1834,7 @@ Default: True
    intersect_op
    introducer_sql
    pseudotype_sql
    +
    objectidentifier_sql
    onconflict_sql
    returning_sql
    rowformatdelimitedproperty_sql
    @@ -2032,23 +2030,23 @@ Default: True
    -
    169    class Tokenizer(tokens.Tokenizer):
    -170        VAR_SINGLE_TOKENS = {"@", "$", "#"}
    -171
    -172        KEYWORDS = {
    -173            **tokens.Tokenizer.KEYWORDS,
    -174            "(+)": TokenType.JOIN_MARKER,
    -175            "BINARY_DOUBLE": TokenType.DOUBLE,
    -176            "BINARY_FLOAT": TokenType.FLOAT,
    -177            "COLUMNS": TokenType.COLUMN,
    -178            "MATCH_RECOGNIZE": TokenType.MATCH_RECOGNIZE,
    -179            "MINUS": TokenType.EXCEPT,
    -180            "NVARCHAR2": TokenType.NVARCHAR,
    -181            "SAMPLE": TokenType.TABLE_SAMPLE,
    -182            "START": TokenType.BEGIN,
    -183            "TOP": TokenType.TOP,
    -184            "VARCHAR2": TokenType.VARCHAR,
    -185        }
    +            
    168    class Tokenizer(tokens.Tokenizer):
    +169        VAR_SINGLE_TOKENS = {"@", "$", "#"}
    +170
    +171        KEYWORDS = {
    +172            **tokens.Tokenizer.KEYWORDS,
    +173            "(+)": TokenType.JOIN_MARKER,
    +174            "BINARY_DOUBLE": TokenType.DOUBLE,
    +175            "BINARY_FLOAT": TokenType.FLOAT,
    +176            "COLUMNS": TokenType.COLUMN,
    +177            "MATCH_RECOGNIZE": TokenType.MATCH_RECOGNIZE,
    +178            "MINUS": TokenType.EXCEPT,
    +179            "NVARCHAR2": TokenType.NVARCHAR,
    +180            "SAMPLE": TokenType.TABLE_SAMPLE,
    +181            "START": TokenType.BEGIN,
    +182            "TOP": TokenType.TOP,
    +183            "VARCHAR2": TokenType.VARCHAR,
    +184        }
     
    diff --git a/docs/sqlglot/dialects/postgres.html b/docs/sqlglot/dialects/postgres.html index 48bde5b..bc81615 100644 --- a/docs/sqlglot/dialects/postgres.html +++ b/docs/sqlglot/dialects/postgres.html @@ -338,8 +338,8 @@
    40} 41 42 - 43def _date_add_sql(kind: str) -> t.Callable[[generator.Generator, exp.DateAdd | exp.DateSub], str]: - 44 def func(self: generator.Generator, expression: exp.DateAdd | exp.DateSub) -> str: + 43def _date_add_sql(kind: str) -> t.Callable[[Postgres.Generator, exp.DateAdd | exp.DateSub], str]: + 44 def func(self: Postgres.Generator, expression: exp.DateAdd | exp.DateSub) -> str: 45 expression = expression.copy() 46 47 this = self.sql(expression, "this") @@ -355,7 +355,7 @@ 57 return func 58 59 - 60def _date_diff_sql(self: generator.Generator, expression: exp.DateDiff) -> str: + 60def _date_diff_sql(self: Postgres.Generator, expression: exp.DateDiff) -> str: 61 unit = expression.text("unit").upper() 62 factor = DATE_DIFF_FACTOR.get(unit) 63 @@ -381,7 +381,7 @@ 83 return f"CAST({unit} AS BIGINT)" 84 85 - 86def _substring_sql(self: generator.Generator, expression: exp.Substring) -> str: + 86def _substring_sql(self: Postgres.Generator, expression: exp.Substring) -> str: 87 this = self.sql(expression, "this") 88 start = self.sql(expression, "start") 89 length = self.sql(expression, "length") @@ -392,7 +392,7 @@ 94 return f"SUBSTRING({this}{from_part}{for_part})" 95 96 - 97def _string_agg_sql(self: generator.Generator, expression: exp.GroupConcat) -> str: + 97def _string_agg_sql(self: Postgres.Generator, expression: exp.GroupConcat) -> str: 98 expression = expression.copy() 99 separator = expression.args.get("separator") or exp.Literal.string(",") 100 @@ -406,7 +406,7 @@ 108 return f"STRING_AGG({self.format_args(this, separator)}{order})" 109 110 -111def _datatype_sql(self: generator.Generator, expression: exp.DataType) -> str: +111def _datatype_sql(self: Postgres.Generator, expression: exp.DataType) -> str: 112 if expression.is_type("array"): 113 return f"{self.expressions(expression, flat=True)}[]" 114 return self.datatype_sql(expression) @@ -573,161 +573,173 @@ 275 "SMALLSERIAL": TokenType.SMALLSERIAL, 276 "TEMP": TokenType.TEMPORARY, 277 "CSTRING": TokenType.PSEUDO_TYPE, -278 } -279 -280 SINGLE_TOKENS = { -281 **tokens.Tokenizer.SINGLE_TOKENS, -282 "$": TokenType.PARAMETER, -283 } -284 -285 VAR_SINGLE_TOKENS = {"$"} -286 -287 class Parser(parser.Parser): -288 CONCAT_NULL_OUTPUTS_STRING = True -289 -290 FUNCTIONS = { -291 **parser.Parser.FUNCTIONS, -292 "DATE_TRUNC": parse_timestamp_trunc, -293 "GENERATE_SERIES": _generate_series, -294 "NOW": exp.CurrentTimestamp.from_arg_list, -295 "TO_CHAR": format_time_lambda(exp.TimeToStr, "postgres"), -296 "TO_TIMESTAMP": _to_timestamp, -297 "UNNEST": exp.Explode.from_arg_list, -298 } -299 -300 FUNCTION_PARSERS = { -301 **parser.Parser.FUNCTION_PARSERS, -302 "DATE_PART": lambda self: self._parse_date_part(), -303 } -304 -305 BITWISE = { -306 **parser.Parser.BITWISE, -307 TokenType.HASH: exp.BitwiseXor, -308 } -309 -310 EXPONENT = { -311 TokenType.CARET: exp.Pow, -312 } -313 -314 RANGE_PARSERS = { -315 **parser.Parser.RANGE_PARSERS, -316 TokenType.DAMP: binary_range_parser(exp.ArrayOverlaps), -317 TokenType.DAT: lambda self, this: self.expression( -318 exp.MatchAgainst, this=self._parse_bitwise(), expressions=[this] -319 ), -320 TokenType.AT_GT: binary_range_parser(exp.ArrayContains), -321 TokenType.LT_AT: binary_range_parser(exp.ArrayContained), -322 } -323 -324 STATEMENT_PARSERS = { -325 **parser.Parser.STATEMENT_PARSERS, -326 TokenType.END: lambda self: self._parse_commit_or_rollback(), -327 } -328 -329 def _parse_factor(self) -> t.Optional[exp.Expression]: -330 return self._parse_tokens(self._parse_exponent, self.FACTOR) -331 -332 def _parse_exponent(self) -> t.Optional[exp.Expression]: -333 return self._parse_tokens(self._parse_unary, self.EXPONENT) -334 -335 def _parse_date_part(self) -> exp.Expression: -336 part = self._parse_type() -337 self._match(TokenType.COMMA) -338 value = self._parse_bitwise() -339 -340 if part and part.is_string: -341 part = exp.var(part.name) -342 -343 return self.expression(exp.Extract, this=part, expression=value) -344 -345 class Generator(generator.Generator): -346 SINGLE_STRING_INTERVAL = True -347 LOCKING_READS_SUPPORTED = True -348 JOIN_HINTS = False -349 TABLE_HINTS = False -350 QUERY_HINTS = False -351 NVL2_SUPPORTED = False -352 PARAMETER_TOKEN = "$" -353 -354 TYPE_MAPPING = { -355 **generator.Generator.TYPE_MAPPING, -356 exp.DataType.Type.TINYINT: "SMALLINT", -357 exp.DataType.Type.FLOAT: "REAL", -358 exp.DataType.Type.DOUBLE: "DOUBLE PRECISION", -359 exp.DataType.Type.BINARY: "BYTEA", -360 exp.DataType.Type.VARBINARY: "BYTEA", -361 exp.DataType.Type.DATETIME: "TIMESTAMP", -362 } -363 -364 TRANSFORMS = { -365 **generator.Generator.TRANSFORMS, -366 exp.AnyValue: any_value_to_max_sql, -367 exp.ArrayConcat: rename_func("ARRAY_CAT"), -368 exp.BitwiseXor: lambda self, e: self.binary(e, "#"), -369 exp.ColumnDef: transforms.preprocess([_auto_increment_to_serial, _serial_to_generated]), -370 exp.Explode: rename_func("UNNEST"), -371 exp.JSONExtract: arrow_json_extract_sql, -372 exp.JSONExtractScalar: arrow_json_extract_scalar_sql, -373 exp.JSONBExtract: lambda self, e: self.binary(e, "#>"), -374 exp.JSONBExtractScalar: lambda self, e: self.binary(e, "#>>"), -375 exp.JSONBContains: lambda self, e: self.binary(e, "?"), -376 exp.Pow: lambda self, e: self.binary(e, "^"), -377 exp.CurrentDate: no_paren_current_date_sql, -378 exp.CurrentTimestamp: lambda *_: "CURRENT_TIMESTAMP", -379 exp.DateAdd: _date_add_sql("+"), -380 exp.DateStrToDate: datestrtodate_sql, -381 exp.DateSub: _date_add_sql("-"), -382 exp.DateDiff: _date_diff_sql, -383 exp.LogicalOr: rename_func("BOOL_OR"), -384 exp.LogicalAnd: rename_func("BOOL_AND"), -385 exp.Max: max_or_greatest, -386 exp.MapFromEntries: no_map_from_entries_sql, -387 exp.Min: min_or_least, -388 exp.ArrayOverlaps: lambda self, e: self.binary(e, "&&"), -389 exp.ArrayContains: lambda self, e: self.binary(e, "@>"), -390 exp.ArrayContained: lambda self, e: self.binary(e, "<@"), -391 exp.Merge: transforms.preprocess([_remove_target_from_merge]), -392 exp.Pivot: no_pivot_sql, -393 exp.RegexpLike: lambda self, e: self.binary(e, "~"), -394 exp.RegexpILike: lambda self, e: self.binary(e, "~*"), -395 exp.StrPosition: str_position_sql, -396 exp.StrToTime: lambda self, e: f"TO_TIMESTAMP({self.sql(e, 'this')}, {self.format_time(e)})", -397 exp.Substring: _substring_sql, -398 exp.TimestampTrunc: timestamptrunc_sql, -399 exp.TimeStrToTime: timestrtotime_sql, -400 exp.TimeToStr: lambda self, e: f"TO_CHAR({self.sql(e, 'this')}, {self.format_time(e)})", -401 exp.TableSample: no_tablesample_sql, -402 exp.ToChar: lambda self, e: self.function_fallback_sql(e), -403 exp.Trim: trim_sql, -404 exp.TryCast: no_trycast_sql, -405 exp.TsOrDsToDate: ts_or_ds_to_date_sql("postgres"), -406 exp.UnixToTime: lambda self, e: f"TO_TIMESTAMP({self.sql(e, 'this')})", -407 exp.DataType: _datatype_sql, -408 exp.GroupConcat: _string_agg_sql, -409 exp.Array: lambda self, e: f"{self.normalize_func('ARRAY')}({self.sql(e.expressions[0])})" -410 if isinstance(seq_get(e.expressions, 0), exp.Select) -411 else f"{self.normalize_func('ARRAY')}[{self.expressions(e, flat=True)}]", -412 } -413 -414 PROPERTIES_LOCATION = { -415 **generator.Generator.PROPERTIES_LOCATION, -416 exp.TransientProperty: exp.Properties.Location.UNSUPPORTED, -417 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, -418 } -419 -420 def bracket_sql(self, expression: exp.Bracket) -> str: -421 """Forms like ARRAY[1, 2, 3][3] aren't allowed; we need to wrap the ARRAY.""" -422 if isinstance(expression.this, exp.Array): -423 expression = expression.copy() -424 expression.set("this", exp.paren(expression.this, copy=False)) +278 "OID": TokenType.OBJECT_IDENTIFIER, +279 "REGCLASS": TokenType.OBJECT_IDENTIFIER, +280 "REGCOLLATION": TokenType.OBJECT_IDENTIFIER, +281 "REGCONFIG": TokenType.OBJECT_IDENTIFIER, +282 "REGDICTIONARY": TokenType.OBJECT_IDENTIFIER, +283 "REGNAMESPACE": TokenType.OBJECT_IDENTIFIER, +284 "REGOPER": TokenType.OBJECT_IDENTIFIER, +285 "REGOPERATOR": TokenType.OBJECT_IDENTIFIER, +286 "REGPROC": TokenType.OBJECT_IDENTIFIER, +287 "REGPROCEDURE": TokenType.OBJECT_IDENTIFIER, +288 "REGROLE": TokenType.OBJECT_IDENTIFIER, +289 "REGTYPE": TokenType.OBJECT_IDENTIFIER, +290 } +291 +292 SINGLE_TOKENS = { +293 **tokens.Tokenizer.SINGLE_TOKENS, +294 "$": TokenType.PARAMETER, +295 } +296 +297 VAR_SINGLE_TOKENS = {"$"} +298 +299 class Parser(parser.Parser): +300 CONCAT_NULL_OUTPUTS_STRING = True +301 +302 FUNCTIONS = { +303 **parser.Parser.FUNCTIONS, +304 "DATE_TRUNC": parse_timestamp_trunc, +305 "GENERATE_SERIES": _generate_series, +306 "NOW": exp.CurrentTimestamp.from_arg_list, +307 "TO_CHAR": format_time_lambda(exp.TimeToStr, "postgres"), +308 "TO_TIMESTAMP": _to_timestamp, +309 "UNNEST": exp.Explode.from_arg_list, +310 } +311 +312 FUNCTION_PARSERS = { +313 **parser.Parser.FUNCTION_PARSERS, +314 "DATE_PART": lambda self: self._parse_date_part(), +315 } +316 +317 BITWISE = { +318 **parser.Parser.BITWISE, +319 TokenType.HASH: exp.BitwiseXor, +320 } +321 +322 EXPONENT = { +323 TokenType.CARET: exp.Pow, +324 } +325 +326 RANGE_PARSERS = { +327 **parser.Parser.RANGE_PARSERS, +328 TokenType.DAMP: binary_range_parser(exp.ArrayOverlaps), +329 TokenType.DAT: lambda self, this: self.expression( +330 exp.MatchAgainst, this=self._parse_bitwise(), expressions=[this] +331 ), +332 TokenType.AT_GT: binary_range_parser(exp.ArrayContains), +333 TokenType.LT_AT: binary_range_parser(exp.ArrayContained), +334 } +335 +336 STATEMENT_PARSERS = { +337 **parser.Parser.STATEMENT_PARSERS, +338 TokenType.END: lambda self: self._parse_commit_or_rollback(), +339 } +340 +341 def _parse_factor(self) -> t.Optional[exp.Expression]: +342 return self._parse_tokens(self._parse_exponent, self.FACTOR) +343 +344 def _parse_exponent(self) -> t.Optional[exp.Expression]: +345 return self._parse_tokens(self._parse_unary, self.EXPONENT) +346 +347 def _parse_date_part(self) -> exp.Expression: +348 part = self._parse_type() +349 self._match(TokenType.COMMA) +350 value = self._parse_bitwise() +351 +352 if part and part.is_string: +353 part = exp.var(part.name) +354 +355 return self.expression(exp.Extract, this=part, expression=value) +356 +357 class Generator(generator.Generator): +358 SINGLE_STRING_INTERVAL = True +359 LOCKING_READS_SUPPORTED = True +360 JOIN_HINTS = False +361 TABLE_HINTS = False +362 QUERY_HINTS = False +363 NVL2_SUPPORTED = False +364 PARAMETER_TOKEN = "$" +365 +366 TYPE_MAPPING = { +367 **generator.Generator.TYPE_MAPPING, +368 exp.DataType.Type.TINYINT: "SMALLINT", +369 exp.DataType.Type.FLOAT: "REAL", +370 exp.DataType.Type.DOUBLE: "DOUBLE PRECISION", +371 exp.DataType.Type.BINARY: "BYTEA", +372 exp.DataType.Type.VARBINARY: "BYTEA", +373 exp.DataType.Type.DATETIME: "TIMESTAMP", +374 } +375 +376 TRANSFORMS = { +377 **generator.Generator.TRANSFORMS, +378 exp.AnyValue: any_value_to_max_sql, +379 exp.ArrayConcat: rename_func("ARRAY_CAT"), +380 exp.BitwiseXor: lambda self, e: self.binary(e, "#"), +381 exp.ColumnDef: transforms.preprocess([_auto_increment_to_serial, _serial_to_generated]), +382 exp.Explode: rename_func("UNNEST"), +383 exp.JSONExtract: arrow_json_extract_sql, +384 exp.JSONExtractScalar: arrow_json_extract_scalar_sql, +385 exp.JSONBExtract: lambda self, e: self.binary(e, "#>"), +386 exp.JSONBExtractScalar: lambda self, e: self.binary(e, "#>>"), +387 exp.JSONBContains: lambda self, e: self.binary(e, "?"), +388 exp.Pow: lambda self, e: self.binary(e, "^"), +389 exp.CurrentDate: no_paren_current_date_sql, +390 exp.CurrentTimestamp: lambda *_: "CURRENT_TIMESTAMP", +391 exp.DateAdd: _date_add_sql("+"), +392 exp.DateStrToDate: datestrtodate_sql, +393 exp.DateSub: _date_add_sql("-"), +394 exp.DateDiff: _date_diff_sql, +395 exp.LogicalOr: rename_func("BOOL_OR"), +396 exp.LogicalAnd: rename_func("BOOL_AND"), +397 exp.Max: max_or_greatest, +398 exp.MapFromEntries: no_map_from_entries_sql, +399 exp.Min: min_or_least, +400 exp.ArrayOverlaps: lambda self, e: self.binary(e, "&&"), +401 exp.ArrayContains: lambda self, e: self.binary(e, "@>"), +402 exp.ArrayContained: lambda self, e: self.binary(e, "<@"), +403 exp.Merge: transforms.preprocess([_remove_target_from_merge]), +404 exp.Pivot: no_pivot_sql, +405 exp.RegexpLike: lambda self, e: self.binary(e, "~"), +406 exp.RegexpILike: lambda self, e: self.binary(e, "~*"), +407 exp.StrPosition: str_position_sql, +408 exp.StrToTime: lambda self, e: f"TO_TIMESTAMP({self.sql(e, 'this')}, {self.format_time(e)})", +409 exp.Substring: _substring_sql, +410 exp.TimestampTrunc: timestamptrunc_sql, +411 exp.TimeStrToTime: timestrtotime_sql, +412 exp.TimeToStr: lambda self, e: f"TO_CHAR({self.sql(e, 'this')}, {self.format_time(e)})", +413 exp.TableSample: no_tablesample_sql, +414 exp.ToChar: lambda self, e: self.function_fallback_sql(e), +415 exp.Trim: trim_sql, +416 exp.TryCast: no_trycast_sql, +417 exp.TsOrDsToDate: ts_or_ds_to_date_sql("postgres"), +418 exp.UnixToTime: lambda self, e: f"TO_TIMESTAMP({self.sql(e, 'this')})", +419 exp.DataType: _datatype_sql, +420 exp.GroupConcat: _string_agg_sql, +421 exp.Array: lambda self, e: f"{self.normalize_func('ARRAY')}({self.sql(e.expressions[0])})" +422 if isinstance(seq_get(e.expressions, 0), exp.Select) +423 else f"{self.normalize_func('ARRAY')}[{self.expressions(e, flat=True)}]", +424 } 425 -426 return super().bracket_sql(expression) -427 -428 def matchagainst_sql(self, expression: exp.MatchAgainst) -> str: -429 this = self.sql(expression, "this") -430 expressions = [f"{self.sql(e)} @@ {this}" for e in expression.expressions] -431 sql = " OR ".join(expressions) -432 return f"({sql})" if len(expressions) > 1 else sql +426 PROPERTIES_LOCATION = { +427 **generator.Generator.PROPERTIES_LOCATION, +428 exp.TransientProperty: exp.Properties.Location.UNSUPPORTED, +429 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, +430 } +431 +432 def bracket_sql(self, expression: exp.Bracket) -> str: +433 """Forms like ARRAY[1, 2, 3][3] aren't allowed; we need to wrap the ARRAY.""" +434 if isinstance(expression.this, exp.Array): +435 expression = expression.copy() +436 expression.set("this", exp.paren(expression.this, copy=False)) +437 +438 return super().bracket_sql(expression) +439 +440 def matchagainst_sql(self, expression: exp.MatchAgainst) -> str: +441 this = self.sql(expression, "this") +442 expressions = [f"{self.sql(e)} @@ {this}" for e in expression.expressions] +443 sql = " OR ".join(expressions) +444 return f"({sql})" if len(expressions) > 1 else sql
    @@ -823,161 +835,173 @@ 276 "SMALLSERIAL": TokenType.SMALLSERIAL, 277 "TEMP": TokenType.TEMPORARY, 278 "CSTRING": TokenType.PSEUDO_TYPE, -279 } -280 -281 SINGLE_TOKENS = { -282 **tokens.Tokenizer.SINGLE_TOKENS, -283 "$": TokenType.PARAMETER, -284 } -285 -286 VAR_SINGLE_TOKENS = {"$"} -287 -288 class Parser(parser.Parser): -289 CONCAT_NULL_OUTPUTS_STRING = True -290 -291 FUNCTIONS = { -292 **parser.Parser.FUNCTIONS, -293 "DATE_TRUNC": parse_timestamp_trunc, -294 "GENERATE_SERIES": _generate_series, -295 "NOW": exp.CurrentTimestamp.from_arg_list, -296 "TO_CHAR": format_time_lambda(exp.TimeToStr, "postgres"), -297 "TO_TIMESTAMP": _to_timestamp, -298 "UNNEST": exp.Explode.from_arg_list, -299 } -300 -301 FUNCTION_PARSERS = { -302 **parser.Parser.FUNCTION_PARSERS, -303 "DATE_PART": lambda self: self._parse_date_part(), -304 } -305 -306 BITWISE = { -307 **parser.Parser.BITWISE, -308 TokenType.HASH: exp.BitwiseXor, -309 } -310 -311 EXPONENT = { -312 TokenType.CARET: exp.Pow, -313 } -314 -315 RANGE_PARSERS = { -316 **parser.Parser.RANGE_PARSERS, -317 TokenType.DAMP: binary_range_parser(exp.ArrayOverlaps), -318 TokenType.DAT: lambda self, this: self.expression( -319 exp.MatchAgainst, this=self._parse_bitwise(), expressions=[this] -320 ), -321 TokenType.AT_GT: binary_range_parser(exp.ArrayContains), -322 TokenType.LT_AT: binary_range_parser(exp.ArrayContained), -323 } -324 -325 STATEMENT_PARSERS = { -326 **parser.Parser.STATEMENT_PARSERS, -327 TokenType.END: lambda self: self._parse_commit_or_rollback(), -328 } -329 -330 def _parse_factor(self) -> t.Optional[exp.Expression]: -331 return self._parse_tokens(self._parse_exponent, self.FACTOR) -332 -333 def _parse_exponent(self) -> t.Optional[exp.Expression]: -334 return self._parse_tokens(self._parse_unary, self.EXPONENT) -335 -336 def _parse_date_part(self) -> exp.Expression: -337 part = self._parse_type() -338 self._match(TokenType.COMMA) -339 value = self._parse_bitwise() -340 -341 if part and part.is_string: -342 part = exp.var(part.name) -343 -344 return self.expression(exp.Extract, this=part, expression=value) -345 -346 class Generator(generator.Generator): -347 SINGLE_STRING_INTERVAL = True -348 LOCKING_READS_SUPPORTED = True -349 JOIN_HINTS = False -350 TABLE_HINTS = False -351 QUERY_HINTS = False -352 NVL2_SUPPORTED = False -353 PARAMETER_TOKEN = "$" -354 -355 TYPE_MAPPING = { -356 **generator.Generator.TYPE_MAPPING, -357 exp.DataType.Type.TINYINT: "SMALLINT", -358 exp.DataType.Type.FLOAT: "REAL", -359 exp.DataType.Type.DOUBLE: "DOUBLE PRECISION", -360 exp.DataType.Type.BINARY: "BYTEA", -361 exp.DataType.Type.VARBINARY: "BYTEA", -362 exp.DataType.Type.DATETIME: "TIMESTAMP", -363 } -364 -365 TRANSFORMS = { -366 **generator.Generator.TRANSFORMS, -367 exp.AnyValue: any_value_to_max_sql, -368 exp.ArrayConcat: rename_func("ARRAY_CAT"), -369 exp.BitwiseXor: lambda self, e: self.binary(e, "#"), -370 exp.ColumnDef: transforms.preprocess([_auto_increment_to_serial, _serial_to_generated]), -371 exp.Explode: rename_func("UNNEST"), -372 exp.JSONExtract: arrow_json_extract_sql, -373 exp.JSONExtractScalar: arrow_json_extract_scalar_sql, -374 exp.JSONBExtract: lambda self, e: self.binary(e, "#>"), -375 exp.JSONBExtractScalar: lambda self, e: self.binary(e, "#>>"), -376 exp.JSONBContains: lambda self, e: self.binary(e, "?"), -377 exp.Pow: lambda self, e: self.binary(e, "^"), -378 exp.CurrentDate: no_paren_current_date_sql, -379 exp.CurrentTimestamp: lambda *_: "CURRENT_TIMESTAMP", -380 exp.DateAdd: _date_add_sql("+"), -381 exp.DateStrToDate: datestrtodate_sql, -382 exp.DateSub: _date_add_sql("-"), -383 exp.DateDiff: _date_diff_sql, -384 exp.LogicalOr: rename_func("BOOL_OR"), -385 exp.LogicalAnd: rename_func("BOOL_AND"), -386 exp.Max: max_or_greatest, -387 exp.MapFromEntries: no_map_from_entries_sql, -388 exp.Min: min_or_least, -389 exp.ArrayOverlaps: lambda self, e: self.binary(e, "&&"), -390 exp.ArrayContains: lambda self, e: self.binary(e, "@>"), -391 exp.ArrayContained: lambda self, e: self.binary(e, "<@"), -392 exp.Merge: transforms.preprocess([_remove_target_from_merge]), -393 exp.Pivot: no_pivot_sql, -394 exp.RegexpLike: lambda self, e: self.binary(e, "~"), -395 exp.RegexpILike: lambda self, e: self.binary(e, "~*"), -396 exp.StrPosition: str_position_sql, -397 exp.StrToTime: lambda self, e: f"TO_TIMESTAMP({self.sql(e, 'this')}, {self.format_time(e)})", -398 exp.Substring: _substring_sql, -399 exp.TimestampTrunc: timestamptrunc_sql, -400 exp.TimeStrToTime: timestrtotime_sql, -401 exp.TimeToStr: lambda self, e: f"TO_CHAR({self.sql(e, 'this')}, {self.format_time(e)})", -402 exp.TableSample: no_tablesample_sql, -403 exp.ToChar: lambda self, e: self.function_fallback_sql(e), -404 exp.Trim: trim_sql, -405 exp.TryCast: no_trycast_sql, -406 exp.TsOrDsToDate: ts_or_ds_to_date_sql("postgres"), -407 exp.UnixToTime: lambda self, e: f"TO_TIMESTAMP({self.sql(e, 'this')})", -408 exp.DataType: _datatype_sql, -409 exp.GroupConcat: _string_agg_sql, -410 exp.Array: lambda self, e: f"{self.normalize_func('ARRAY')}({self.sql(e.expressions[0])})" -411 if isinstance(seq_get(e.expressions, 0), exp.Select) -412 else f"{self.normalize_func('ARRAY')}[{self.expressions(e, flat=True)}]", -413 } -414 -415 PROPERTIES_LOCATION = { -416 **generator.Generator.PROPERTIES_LOCATION, -417 exp.TransientProperty: exp.Properties.Location.UNSUPPORTED, -418 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, -419 } -420 -421 def bracket_sql(self, expression: exp.Bracket) -> str: -422 """Forms like ARRAY[1, 2, 3][3] aren't allowed; we need to wrap the ARRAY.""" -423 if isinstance(expression.this, exp.Array): -424 expression = expression.copy() -425 expression.set("this", exp.paren(expression.this, copy=False)) +279 "OID": TokenType.OBJECT_IDENTIFIER, +280 "REGCLASS": TokenType.OBJECT_IDENTIFIER, +281 "REGCOLLATION": TokenType.OBJECT_IDENTIFIER, +282 "REGCONFIG": TokenType.OBJECT_IDENTIFIER, +283 "REGDICTIONARY": TokenType.OBJECT_IDENTIFIER, +284 "REGNAMESPACE": TokenType.OBJECT_IDENTIFIER, +285 "REGOPER": TokenType.OBJECT_IDENTIFIER, +286 "REGOPERATOR": TokenType.OBJECT_IDENTIFIER, +287 "REGPROC": TokenType.OBJECT_IDENTIFIER, +288 "REGPROCEDURE": TokenType.OBJECT_IDENTIFIER, +289 "REGROLE": TokenType.OBJECT_IDENTIFIER, +290 "REGTYPE": TokenType.OBJECT_IDENTIFIER, +291 } +292 +293 SINGLE_TOKENS = { +294 **tokens.Tokenizer.SINGLE_TOKENS, +295 "$": TokenType.PARAMETER, +296 } +297 +298 VAR_SINGLE_TOKENS = {"$"} +299 +300 class Parser(parser.Parser): +301 CONCAT_NULL_OUTPUTS_STRING = True +302 +303 FUNCTIONS = { +304 **parser.Parser.FUNCTIONS, +305 "DATE_TRUNC": parse_timestamp_trunc, +306 "GENERATE_SERIES": _generate_series, +307 "NOW": exp.CurrentTimestamp.from_arg_list, +308 "TO_CHAR": format_time_lambda(exp.TimeToStr, "postgres"), +309 "TO_TIMESTAMP": _to_timestamp, +310 "UNNEST": exp.Explode.from_arg_list, +311 } +312 +313 FUNCTION_PARSERS = { +314 **parser.Parser.FUNCTION_PARSERS, +315 "DATE_PART": lambda self: self._parse_date_part(), +316 } +317 +318 BITWISE = { +319 **parser.Parser.BITWISE, +320 TokenType.HASH: exp.BitwiseXor, +321 } +322 +323 EXPONENT = { +324 TokenType.CARET: exp.Pow, +325 } +326 +327 RANGE_PARSERS = { +328 **parser.Parser.RANGE_PARSERS, +329 TokenType.DAMP: binary_range_parser(exp.ArrayOverlaps), +330 TokenType.DAT: lambda self, this: self.expression( +331 exp.MatchAgainst, this=self._parse_bitwise(), expressions=[this] +332 ), +333 TokenType.AT_GT: binary_range_parser(exp.ArrayContains), +334 TokenType.LT_AT: binary_range_parser(exp.ArrayContained), +335 } +336 +337 STATEMENT_PARSERS = { +338 **parser.Parser.STATEMENT_PARSERS, +339 TokenType.END: lambda self: self._parse_commit_or_rollback(), +340 } +341 +342 def _parse_factor(self) -> t.Optional[exp.Expression]: +343 return self._parse_tokens(self._parse_exponent, self.FACTOR) +344 +345 def _parse_exponent(self) -> t.Optional[exp.Expression]: +346 return self._parse_tokens(self._parse_unary, self.EXPONENT) +347 +348 def _parse_date_part(self) -> exp.Expression: +349 part = self._parse_type() +350 self._match(TokenType.COMMA) +351 value = self._parse_bitwise() +352 +353 if part and part.is_string: +354 part = exp.var(part.name) +355 +356 return self.expression(exp.Extract, this=part, expression=value) +357 +358 class Generator(generator.Generator): +359 SINGLE_STRING_INTERVAL = True +360 LOCKING_READS_SUPPORTED = True +361 JOIN_HINTS = False +362 TABLE_HINTS = False +363 QUERY_HINTS = False +364 NVL2_SUPPORTED = False +365 PARAMETER_TOKEN = "$" +366 +367 TYPE_MAPPING = { +368 **generator.Generator.TYPE_MAPPING, +369 exp.DataType.Type.TINYINT: "SMALLINT", +370 exp.DataType.Type.FLOAT: "REAL", +371 exp.DataType.Type.DOUBLE: "DOUBLE PRECISION", +372 exp.DataType.Type.BINARY: "BYTEA", +373 exp.DataType.Type.VARBINARY: "BYTEA", +374 exp.DataType.Type.DATETIME: "TIMESTAMP", +375 } +376 +377 TRANSFORMS = { +378 **generator.Generator.TRANSFORMS, +379 exp.AnyValue: any_value_to_max_sql, +380 exp.ArrayConcat: rename_func("ARRAY_CAT"), +381 exp.BitwiseXor: lambda self, e: self.binary(e, "#"), +382 exp.ColumnDef: transforms.preprocess([_auto_increment_to_serial, _serial_to_generated]), +383 exp.Explode: rename_func("UNNEST"), +384 exp.JSONExtract: arrow_json_extract_sql, +385 exp.JSONExtractScalar: arrow_json_extract_scalar_sql, +386 exp.JSONBExtract: lambda self, e: self.binary(e, "#>"), +387 exp.JSONBExtractScalar: lambda self, e: self.binary(e, "#>>"), +388 exp.JSONBContains: lambda self, e: self.binary(e, "?"), +389 exp.Pow: lambda self, e: self.binary(e, "^"), +390 exp.CurrentDate: no_paren_current_date_sql, +391 exp.CurrentTimestamp: lambda *_: "CURRENT_TIMESTAMP", +392 exp.DateAdd: _date_add_sql("+"), +393 exp.DateStrToDate: datestrtodate_sql, +394 exp.DateSub: _date_add_sql("-"), +395 exp.DateDiff: _date_diff_sql, +396 exp.LogicalOr: rename_func("BOOL_OR"), +397 exp.LogicalAnd: rename_func("BOOL_AND"), +398 exp.Max: max_or_greatest, +399 exp.MapFromEntries: no_map_from_entries_sql, +400 exp.Min: min_or_least, +401 exp.ArrayOverlaps: lambda self, e: self.binary(e, "&&"), +402 exp.ArrayContains: lambda self, e: self.binary(e, "@>"), +403 exp.ArrayContained: lambda self, e: self.binary(e, "<@"), +404 exp.Merge: transforms.preprocess([_remove_target_from_merge]), +405 exp.Pivot: no_pivot_sql, +406 exp.RegexpLike: lambda self, e: self.binary(e, "~"), +407 exp.RegexpILike: lambda self, e: self.binary(e, "~*"), +408 exp.StrPosition: str_position_sql, +409 exp.StrToTime: lambda self, e: f"TO_TIMESTAMP({self.sql(e, 'this')}, {self.format_time(e)})", +410 exp.Substring: _substring_sql, +411 exp.TimestampTrunc: timestamptrunc_sql, +412 exp.TimeStrToTime: timestrtotime_sql, +413 exp.TimeToStr: lambda self, e: f"TO_CHAR({self.sql(e, 'this')}, {self.format_time(e)})", +414 exp.TableSample: no_tablesample_sql, +415 exp.ToChar: lambda self, e: self.function_fallback_sql(e), +416 exp.Trim: trim_sql, +417 exp.TryCast: no_trycast_sql, +418 exp.TsOrDsToDate: ts_or_ds_to_date_sql("postgres"), +419 exp.UnixToTime: lambda self, e: f"TO_TIMESTAMP({self.sql(e, 'this')})", +420 exp.DataType: _datatype_sql, +421 exp.GroupConcat: _string_agg_sql, +422 exp.Array: lambda self, e: f"{self.normalize_func('ARRAY')}({self.sql(e.expressions[0])})" +423 if isinstance(seq_get(e.expressions, 0), exp.Select) +424 else f"{self.normalize_func('ARRAY')}[{self.expressions(e, flat=True)}]", +425 } 426 -427 return super().bracket_sql(expression) -428 -429 def matchagainst_sql(self, expression: exp.MatchAgainst) -> str: -430 this = self.sql(expression, "this") -431 expressions = [f"{self.sql(e)} @@ {this}" for e in expression.expressions] -432 sql = " OR ".join(expressions) -433 return f"({sql})" if len(expressions) > 1 else sql +427 PROPERTIES_LOCATION = { +428 **generator.Generator.PROPERTIES_LOCATION, +429 exp.TransientProperty: exp.Properties.Location.UNSUPPORTED, +430 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, +431 } +432 +433 def bracket_sql(self, expression: exp.Bracket) -> str: +434 """Forms like ARRAY[1, 2, 3][3] aren't allowed; we need to wrap the ARRAY.""" +435 if isinstance(expression.this, exp.Array): +436 expression = expression.copy() +437 expression.set("this", exp.paren(expression.this, copy=False)) +438 +439 return super().bracket_sql(expression) +440 +441 def matchagainst_sql(self, expression: exp.MatchAgainst) -> str: +442 this = self.sql(expression, "this") +443 expressions = [f"{self.sql(e)} @@ {this}" for e in expression.expressions] +444 sql = " OR ".join(expressions) +445 return f"({sql})" if len(expressions) > 1 else sql @@ -1318,14 +1342,26 @@ 276 "SMALLSERIAL": TokenType.SMALLSERIAL, 277 "TEMP": TokenType.TEMPORARY, 278 "CSTRING": TokenType.PSEUDO_TYPE, -279 } -280 -281 SINGLE_TOKENS = { -282 **tokens.Tokenizer.SINGLE_TOKENS, -283 "$": TokenType.PARAMETER, -284 } -285 -286 VAR_SINGLE_TOKENS = {"$"} +279 "OID": TokenType.OBJECT_IDENTIFIER, +280 "REGCLASS": TokenType.OBJECT_IDENTIFIER, +281 "REGCOLLATION": TokenType.OBJECT_IDENTIFIER, +282 "REGCONFIG": TokenType.OBJECT_IDENTIFIER, +283 "REGDICTIONARY": TokenType.OBJECT_IDENTIFIER, +284 "REGNAMESPACE": TokenType.OBJECT_IDENTIFIER, +285 "REGOPER": TokenType.OBJECT_IDENTIFIER, +286 "REGOPERATOR": TokenType.OBJECT_IDENTIFIER, +287 "REGPROC": TokenType.OBJECT_IDENTIFIER, +288 "REGPROCEDURE": TokenType.OBJECT_IDENTIFIER, +289 "REGROLE": TokenType.OBJECT_IDENTIFIER, +290 "REGTYPE": TokenType.OBJECT_IDENTIFIER, +291 } +292 +293 SINGLE_TOKENS = { +294 **tokens.Tokenizer.SINGLE_TOKENS, +295 "$": TokenType.PARAMETER, +296 } +297 +298 VAR_SINGLE_TOKENS = {"$"} @@ -1383,7 +1419,7 @@
    KEYWORDS = - {'{%': <TokenType.BLOCK_START: 'BLOCK_START'>, '{%+': <TokenType.BLOCK_START: 'BLOCK_START'>, '{%-': <TokenType.BLOCK_START: 'BLOCK_START'>, '%}': <TokenType.BLOCK_END: 'BLOCK_END'>, '+%}': <TokenType.BLOCK_END: 'BLOCK_END'>, '-%}': <TokenType.BLOCK_END: 'BLOCK_END'>, '{{+': <TokenType.BLOCK_START: 'BLOCK_START'>, '{{-': <TokenType.BLOCK_START: 'BLOCK_START'>, '+}}': <TokenType.BLOCK_END: 'BLOCK_END'>, '-}}': <TokenType.BLOCK_END: 'BLOCK_END'>, '/*+': <TokenType.HINT: 'HINT'>, '==': <TokenType.EQ: 'EQ'>, '::': <TokenType.DCOLON: 'DCOLON'>, '||': <TokenType.DPIPE: 'DPIPE'>, '>=': <TokenType.GTE: 'GTE'>, '<=': <TokenType.LTE: 'LTE'>, '<>': <TokenType.NEQ: 'NEQ'>, '!=': <TokenType.NEQ: 'NEQ'>, '<=>': <TokenType.NULLSAFE_EQ: 'NULLSAFE_EQ'>, '->': <TokenType.ARROW: 'ARROW'>, '->>': <TokenType.DARROW: 'DARROW'>, '=>': <TokenType.FARROW: 'FARROW'>, '#>': <TokenType.HASH_ARROW: 'HASH_ARROW'>, '#>>': <TokenType.DHASH_ARROW: 'DHASH_ARROW'>, '<->': <TokenType.LR_ARROW: 'LR_ARROW'>, '&&': <TokenType.DAMP: 'DAMP'>, '??': <TokenType.DQMARK: 'DQMARK'>, 'ALL': <TokenType.ALL: 'ALL'>, 'ALWAYS': <TokenType.ALWAYS: 'ALWAYS'>, 'AND': <TokenType.AND: 'AND'>, 'ANTI': <TokenType.ANTI: 'ANTI'>, 'ANY': <TokenType.ANY: 'ANY'>, 'ASC': <TokenType.ASC: 'ASC'>, 'AS': <TokenType.ALIAS: 'ALIAS'>, 'ASOF': <TokenType.ASOF: 'ASOF'>, 'AUTOINCREMENT': <TokenType.AUTO_INCREMENT: 'AUTO_INCREMENT'>, 'AUTO_INCREMENT': <TokenType.AUTO_INCREMENT: 'AUTO_INCREMENT'>, 'BEGIN': <TokenType.COMMAND: 'COMMAND'>, 'BETWEEN': <TokenType.BETWEEN: 'BETWEEN'>, 'CACHE': <TokenType.CACHE: 'CACHE'>, 'UNCACHE': <TokenType.UNCACHE: 'UNCACHE'>, 'CASE': <TokenType.CASE: 'CASE'>, 'CHARACTER SET': <TokenType.CHARACTER_SET: 'CHARACTER_SET'>, 'CLUSTER BY': <TokenType.CLUSTER_BY: 'CLUSTER_BY'>, 'COLLATE': <TokenType.COLLATE: 'COLLATE'>, 'COLUMN': <TokenType.COLUMN: 'COLUMN'>, 'COMMIT': <TokenType.COMMIT: 'COMMIT'>, 'CONNECT BY': <TokenType.CONNECT_BY: 'CONNECT_BY'>, 'CONSTRAINT': <TokenType.CONSTRAINT: 'CONSTRAINT'>, 'CREATE': <TokenType.CREATE: 'CREATE'>, 'CROSS': <TokenType.CROSS: 'CROSS'>, 'CUBE': <TokenType.CUBE: 'CUBE'>, 'CURRENT_DATE': <TokenType.CURRENT_DATE: 'CURRENT_DATE'>, 'CURRENT_TIME': <TokenType.CURRENT_TIME: 'CURRENT_TIME'>, 'CURRENT_TIMESTAMP': <TokenType.CURRENT_TIMESTAMP: 'CURRENT_TIMESTAMP'>, 'CURRENT_USER': <TokenType.CURRENT_USER: 'CURRENT_USER'>, 'DATABASE': <TokenType.DATABASE: 'DATABASE'>, 'DEFAULT': <TokenType.DEFAULT: 'DEFAULT'>, 'DELETE': <TokenType.DELETE: 'DELETE'>, 'DESC': <TokenType.DESC: 'DESC'>, 'DESCRIBE': <TokenType.DESCRIBE: 'DESCRIBE'>, 'DISTINCT': <TokenType.DISTINCT: 'DISTINCT'>, 'DISTRIBUTE BY': <TokenType.DISTRIBUTE_BY: 'DISTRIBUTE_BY'>, 'DIV': <TokenType.DIV: 'DIV'>, 'DROP': <TokenType.DROP: 'DROP'>, 'ELSE': <TokenType.ELSE: 'ELSE'>, 'END': <TokenType.END: 'END'>, 'ESCAPE': <TokenType.ESCAPE: 'ESCAPE'>, 'EXCEPT': <TokenType.EXCEPT: 'EXCEPT'>, 'EXECUTE': <TokenType.EXECUTE: 'EXECUTE'>, 'EXISTS': <TokenType.EXISTS: 'EXISTS'>, 'FALSE': <TokenType.FALSE: 'FALSE'>, 'FETCH': <TokenType.FETCH: 'FETCH'>, 'FILTER': <TokenType.FILTER: 'FILTER'>, 'FIRST': <TokenType.FIRST: 'FIRST'>, 'FULL': <TokenType.FULL: 'FULL'>, 'FUNCTION': <TokenType.FUNCTION: 'FUNCTION'>, 'FOR': <TokenType.FOR: 'FOR'>, 'FOREIGN KEY': <TokenType.FOREIGN_KEY: 'FOREIGN_KEY'>, 'FORMAT': <TokenType.FORMAT: 'FORMAT'>, 'FROM': <TokenType.FROM: 'FROM'>, 'GEOGRAPHY': <TokenType.GEOGRAPHY: 'GEOGRAPHY'>, 'GEOMETRY': <TokenType.GEOMETRY: 'GEOMETRY'>, 'GLOB': <TokenType.GLOB: 'GLOB'>, 'GROUP BY': <TokenType.GROUP_BY: 'GROUP_BY'>, 'GROUPING SETS': <TokenType.GROUPING_SETS: 'GROUPING_SETS'>, 'HAVING': <TokenType.HAVING: 'HAVING'>, 'ILIKE': <TokenType.ILIKE: 'ILIKE'>, 'IN': <TokenType.IN: 'IN'>, 'INDEX': <TokenType.INDEX: 'INDEX'>, 'INET': <TokenType.INET: 'INET'>, 'INNER': <TokenType.INNER: 'INNER'>, 'INSERT': <TokenType.INSERT: 'INSERT'>, 'INTERVAL': <TokenType.INTERVAL: 'INTERVAL'>, 'INTERSECT': <TokenType.INTERSECT: 'INTERSECT'>, 'INTO': <TokenType.INTO: 'INTO'>, 'IS': <TokenType.IS: 'IS'>, 'ISNULL': <TokenType.ISNULL: 'ISNULL'>, 'JOIN': <TokenType.JOIN: 'JOIN'>, 'KEEP': <TokenType.KEEP: 'KEEP'>, 'LATERAL': <TokenType.LATERAL: 'LATERAL'>, 'LEFT': <TokenType.LEFT: 'LEFT'>, 'LIKE': <TokenType.LIKE: 'LIKE'>, 'LIMIT': <TokenType.LIMIT: 'LIMIT'>, 'LOAD': <TokenType.LOAD: 'LOAD'>, 'LOCK': <TokenType.LOCK: 'LOCK'>, 'MERGE': <TokenType.MERGE: 'MERGE'>, 'NATURAL': <TokenType.NATURAL: 'NATURAL'>, 'NEXT': <TokenType.NEXT: 'NEXT'>, 'NOT': <TokenType.NOT: 'NOT'>, 'NOTNULL': <TokenType.NOTNULL: 'NOTNULL'>, 'NULL': <TokenType.NULL: 'NULL'>, 'OBJECT': <TokenType.OBJECT: 'OBJECT'>, 'OFFSET': <TokenType.OFFSET: 'OFFSET'>, 'ON': <TokenType.ON: 'ON'>, 'OR': <TokenType.OR: 'OR'>, 'XOR': <TokenType.XOR: 'XOR'>, 'ORDER BY': <TokenType.ORDER_BY: 'ORDER_BY'>, 'ORDINALITY': <TokenType.ORDINALITY: 'ORDINALITY'>, 'OUTER': <TokenType.OUTER: 'OUTER'>, 'OVER': <TokenType.OVER: 'OVER'>, 'OVERLAPS': <TokenType.OVERLAPS: 'OVERLAPS'>, 'OVERWRITE': <TokenType.OVERWRITE: 'OVERWRITE'>, 'PARTITION': <TokenType.PARTITION: 'PARTITION'>, 'PARTITION BY': <TokenType.PARTITION_BY: 'PARTITION_BY'>, 'PARTITIONED BY': <TokenType.PARTITION_BY: 'PARTITION_BY'>, 'PARTITIONED_BY': <TokenType.PARTITION_BY: 'PARTITION_BY'>, 'PERCENT': <TokenType.PERCENT: 'PERCENT'>, 'PIVOT': <TokenType.PIVOT: 'PIVOT'>, 'PRAGMA': <TokenType.PRAGMA: 'PRAGMA'>, 'PRIMARY KEY': <TokenType.PRIMARY_KEY: 'PRIMARY_KEY'>, 'PROCEDURE': <TokenType.PROCEDURE: 'PROCEDURE'>, 'QUALIFY': <TokenType.QUALIFY: 'QUALIFY'>, 'RANGE': <TokenType.RANGE: 'RANGE'>, 'RECURSIVE': <TokenType.RECURSIVE: 'RECURSIVE'>, 'REGEXP': <TokenType.RLIKE: 'RLIKE'>, 'REPLACE': <TokenType.REPLACE: 'REPLACE'>, 'RETURNING': <TokenType.RETURNING: 'RETURNING'>, 'REFERENCES': <TokenType.REFERENCES: 'REFERENCES'>, 'RIGHT': <TokenType.RIGHT: 'RIGHT'>, 'RLIKE': <TokenType.RLIKE: 'RLIKE'>, 'ROLLBACK': <TokenType.ROLLBACK: 'ROLLBACK'>, 'ROLLUP': <TokenType.ROLLUP: 'ROLLUP'>, 'ROW': <TokenType.ROW: 'ROW'>, 'ROWS': <TokenType.ROWS: 'ROWS'>, 'SCHEMA': <TokenType.SCHEMA: 'SCHEMA'>, 'SELECT': <TokenType.SELECT: 'SELECT'>, 'SEMI': <TokenType.SEMI: 'SEMI'>, 'SET': <TokenType.SET: 'SET'>, 'SETTINGS': <TokenType.SETTINGS: 'SETTINGS'>, 'SHOW': <TokenType.SHOW: 'SHOW'>, 'SIMILAR TO': <TokenType.SIMILAR_TO: 'SIMILAR_TO'>, 'SOME': <TokenType.SOME: 'SOME'>, 'SORT BY': <TokenType.SORT_BY: 'SORT_BY'>, 'START WITH': <TokenType.START_WITH: 'START_WITH'>, 'TABLE': <TokenType.TABLE: 'TABLE'>, 'TABLESAMPLE': <TokenType.TABLE_SAMPLE: 'TABLE_SAMPLE'>, 'TEMP': <TokenType.TEMPORARY: 'TEMPORARY'>, 'TEMPORARY': <TokenType.TEMPORARY: 'TEMPORARY'>, 'THEN': <TokenType.THEN: 'THEN'>, 'TRUE': <TokenType.TRUE: 'TRUE'>, 'UNION': <TokenType.UNION: 'UNION'>, 'UNKNOWN': <TokenType.UNKNOWN: 'UNKNOWN'>, 'UNNEST': <TokenType.UNNEST: 'UNNEST'>, 'UNPIVOT': <TokenType.UNPIVOT: 'UNPIVOT'>, 'UPDATE': <TokenType.UPDATE: 'UPDATE'>, 'USE': <TokenType.USE: 'USE'>, 'USING': <TokenType.USING: 'USING'>, 'UUID': <TokenType.UUID: 'UUID'>, 'VALUES': <TokenType.VALUES: 'VALUES'>, 'VIEW': <TokenType.VIEW: 'VIEW'>, 'VOLATILE': <TokenType.VOLATILE: 'VOLATILE'>, 'WHEN': <TokenType.WHEN: 'WHEN'>, 'WHERE': <TokenType.WHERE: 'WHERE'>, 'WINDOW': <TokenType.WINDOW: 'WINDOW'>, 'WITH': <TokenType.WITH: 'WITH'>, 'APPLY': <TokenType.APPLY: 'APPLY'>, 'ARRAY': <TokenType.ARRAY: 'ARRAY'>, 'BIT': <TokenType.BIT: 'BIT'>, 'BOOL': <TokenType.BOOLEAN: 'BOOLEAN'>, 'BOOLEAN': <TokenType.BOOLEAN: 'BOOLEAN'>, 'BYTE': <TokenType.TINYINT: 'TINYINT'>, 'TINYINT': <TokenType.TINYINT: 'TINYINT'>, 'SHORT': <TokenType.SMALLINT: 'SMALLINT'>, 'SMALLINT': <TokenType.SMALLINT: 'SMALLINT'>, 'INT128': <TokenType.INT128: 'INT128'>, 'INT2': <TokenType.SMALLINT: 'SMALLINT'>, 'INTEGER': <TokenType.INT: 'INT'>, 'INT': <TokenType.INT: 'INT'>, 'INT4': <TokenType.INT: 'INT'>, 'LONG': <TokenType.BIGINT: 'BIGINT'>, 'BIGINT': <TokenType.BIGINT: 'BIGINT'>, 'INT8': <TokenType.BIGINT: 'BIGINT'>, 'DEC': <TokenType.DECIMAL: 'DECIMAL'>, 'DECIMAL': <TokenType.DECIMAL: 'DECIMAL'>, 'BIGDECIMAL': <TokenType.BIGDECIMAL: 'BIGDECIMAL'>, 'BIGNUMERIC': <TokenType.BIGDECIMAL: 'BIGDECIMAL'>, 'MAP': <TokenType.MAP: 'MAP'>, 'NULLABLE': <TokenType.NULLABLE: 'NULLABLE'>, 'NUMBER': <TokenType.DECIMAL: 'DECIMAL'>, 'NUMERIC': <TokenType.DECIMAL: 'DECIMAL'>, 'FIXED': <TokenType.DECIMAL: 'DECIMAL'>, 'REAL': <TokenType.FLOAT: 'FLOAT'>, 'FLOAT': <TokenType.FLOAT: 'FLOAT'>, 'FLOAT4': <TokenType.FLOAT: 'FLOAT'>, 'FLOAT8': <TokenType.DOUBLE: 'DOUBLE'>, 'DOUBLE': <TokenType.DOUBLE: 'DOUBLE'>, 'DOUBLE PRECISION': <TokenType.DOUBLE: 'DOUBLE'>, 'JSON': <TokenType.JSON: 'JSON'>, 'CHAR': <TokenType.CHAR: 'CHAR'>, 'CHARACTER': <TokenType.CHAR: 'CHAR'>, 'NCHAR': <TokenType.NCHAR: 'NCHAR'>, 'VARCHAR': <TokenType.VARCHAR: 'VARCHAR'>, 'VARCHAR2': <TokenType.VARCHAR: 'VARCHAR'>, 'NVARCHAR': <TokenType.NVARCHAR: 'NVARCHAR'>, 'NVARCHAR2': <TokenType.NVARCHAR: 'NVARCHAR'>, 'STR': <TokenType.TEXT: 'TEXT'>, 'STRING': <TokenType.TEXT: 'TEXT'>, 'TEXT': <TokenType.TEXT: 'TEXT'>, 'CLOB': <TokenType.TEXT: 'TEXT'>, 'LONGVARCHAR': <TokenType.TEXT: 'TEXT'>, 'BINARY': <TokenType.BINARY: 'BINARY'>, 'BLOB': <TokenType.VARBINARY: 'VARBINARY'>, 'BYTEA': <TokenType.VARBINARY: 'VARBINARY'>, 'VARBINARY': <TokenType.VARBINARY: 'VARBINARY'>, 'TIME': <TokenType.TIME: 'TIME'>, 'TIMETZ': <TokenType.TIMETZ: 'TIMETZ'>, 'TIMESTAMP': <TokenType.TIMESTAMP: 'TIMESTAMP'>, 'TIMESTAMPTZ': <TokenType.TIMESTAMPTZ: 'TIMESTAMPTZ'>, 'TIMESTAMPLTZ': <TokenType.TIMESTAMPLTZ: 'TIMESTAMPLTZ'>, 'DATE': <TokenType.DATE: 'DATE'>, 'DATETIME': <TokenType.DATETIME: 'DATETIME'>, 'INT4RANGE': <TokenType.INT4RANGE: 'INT4RANGE'>, 'INT4MULTIRANGE': <TokenType.INT4MULTIRANGE: 'INT4MULTIRANGE'>, 'INT8RANGE': <TokenType.INT8RANGE: 'INT8RANGE'>, 'INT8MULTIRANGE': <TokenType.INT8MULTIRANGE: 'INT8MULTIRANGE'>, 'NUMRANGE': <TokenType.NUMRANGE: 'NUMRANGE'>, 'NUMMULTIRANGE': <TokenType.NUMMULTIRANGE: 'NUMMULTIRANGE'>, 'TSRANGE': <TokenType.TSRANGE: 'TSRANGE'>, 'TSMULTIRANGE': <TokenType.TSMULTIRANGE: 'TSMULTIRANGE'>, 'TSTZRANGE': <TokenType.TSTZRANGE: 'TSTZRANGE'>, 'TSTZMULTIRANGE': <TokenType.TSTZMULTIRANGE: 'TSTZMULTIRANGE'>, 'DATERANGE': <TokenType.DATERANGE: 'DATERANGE'>, 'DATEMULTIRANGE': <TokenType.DATEMULTIRANGE: 'DATEMULTIRANGE'>, 'UNIQUE': <TokenType.UNIQUE: 'UNIQUE'>, 'STRUCT': <TokenType.STRUCT: 'STRUCT'>, 'VARIANT': <TokenType.VARIANT: 'VARIANT'>, 'ALTER': <TokenType.ALTER: 'ALTER'>, 'ANALYZE': <TokenType.COMMAND: 'COMMAND'>, 'CALL': <TokenType.COMMAND: 'COMMAND'>, 'COMMENT': <TokenType.COMMENT: 'COMMENT'>, 'COPY': <TokenType.COMMAND: 'COMMAND'>, 'EXPLAIN': <TokenType.COMMAND: 'COMMAND'>, 'GRANT': <TokenType.COMMAND: 'COMMAND'>, 'OPTIMIZE': <TokenType.COMMAND: 'COMMAND'>, 'PREPARE': <TokenType.COMMAND: 'COMMAND'>, 'TRUNCATE': <TokenType.COMMAND: 'COMMAND'>, 'VACUUM': <TokenType.COMMAND: 'COMMAND'>, 'USER-DEFINED': <TokenType.USERDEFINED: 'USERDEFINED'>, 'FOR VERSION': <TokenType.VERSION_SNAPSHOT: 'VERSION_SNAPSHOT'>, 'FOR TIMESTAMP': <TokenType.TIMESTAMP_SNAPSHOT: 'TIMESTAMP_SNAPSHOT'>, '~~': <TokenType.LIKE: 'LIKE'>, '~~*': <TokenType.ILIKE: 'ILIKE'>, '~*': <TokenType.IRLIKE: 'IRLIKE'>, '~': <TokenType.RLIKE: 'RLIKE'>, '@@': <TokenType.DAT: 'DAT'>, '@>': <TokenType.AT_GT: 'AT_GT'>, '<@': <TokenType.LT_AT: 'LT_AT'>, 'BEGIN TRANSACTION': <TokenType.BEGIN: 'BEGIN'>, 'BIGSERIAL': <TokenType.BIGSERIAL: 'BIGSERIAL'>, 'CHARACTER VARYING': <TokenType.VARCHAR: 'VARCHAR'>, 'DECLARE': <TokenType.COMMAND: 'COMMAND'>, 'DO': <TokenType.COMMAND: 'COMMAND'>, 'HSTORE': <TokenType.HSTORE: 'HSTORE'>, 'JSONB': <TokenType.JSONB: 'JSONB'>, 'MONEY': <TokenType.MONEY: 'MONEY'>, 'REFRESH': <TokenType.COMMAND: 'COMMAND'>, 'REINDEX': <TokenType.COMMAND: 'COMMAND'>, 'RESET': <TokenType.COMMAND: 'COMMAND'>, 'REVOKE': <TokenType.COMMAND: 'COMMAND'>, 'SERIAL': <TokenType.SERIAL: 'SERIAL'>, 'SMALLSERIAL': <TokenType.SMALLSERIAL: 'SMALLSERIAL'>, 'CSTRING': <TokenType.PSEUDO_TYPE: 'PSEUDO_TYPE'>} + {'{%': <TokenType.BLOCK_START: 'BLOCK_START'>, '{%+': <TokenType.BLOCK_START: 'BLOCK_START'>, '{%-': <TokenType.BLOCK_START: 'BLOCK_START'>, '%}': <TokenType.BLOCK_END: 'BLOCK_END'>, '+%}': <TokenType.BLOCK_END: 'BLOCK_END'>, '-%}': <TokenType.BLOCK_END: 'BLOCK_END'>, '{{+': <TokenType.BLOCK_START: 'BLOCK_START'>, '{{-': <TokenType.BLOCK_START: 'BLOCK_START'>, '+}}': <TokenType.BLOCK_END: 'BLOCK_END'>, '-}}': <TokenType.BLOCK_END: 'BLOCK_END'>, '/*+': <TokenType.HINT: 'HINT'>, '==': <TokenType.EQ: 'EQ'>, '::': <TokenType.DCOLON: 'DCOLON'>, '||': <TokenType.DPIPE: 'DPIPE'>, '>=': <TokenType.GTE: 'GTE'>, '<=': <TokenType.LTE: 'LTE'>, '<>': <TokenType.NEQ: 'NEQ'>, '!=': <TokenType.NEQ: 'NEQ'>, '<=>': <TokenType.NULLSAFE_EQ: 'NULLSAFE_EQ'>, '->': <TokenType.ARROW: 'ARROW'>, '->>': <TokenType.DARROW: 'DARROW'>, '=>': <TokenType.FARROW: 'FARROW'>, '#>': <TokenType.HASH_ARROW: 'HASH_ARROW'>, '#>>': <TokenType.DHASH_ARROW: 'DHASH_ARROW'>, '<->': <TokenType.LR_ARROW: 'LR_ARROW'>, '&&': <TokenType.DAMP: 'DAMP'>, '??': <TokenType.DQMARK: 'DQMARK'>, 'ALL': <TokenType.ALL: 'ALL'>, 'ALWAYS': <TokenType.ALWAYS: 'ALWAYS'>, 'AND': <TokenType.AND: 'AND'>, 'ANTI': <TokenType.ANTI: 'ANTI'>, 'ANY': <TokenType.ANY: 'ANY'>, 'ASC': <TokenType.ASC: 'ASC'>, 'AS': <TokenType.ALIAS: 'ALIAS'>, 'ASOF': <TokenType.ASOF: 'ASOF'>, 'AUTOINCREMENT': <TokenType.AUTO_INCREMENT: 'AUTO_INCREMENT'>, 'AUTO_INCREMENT': <TokenType.AUTO_INCREMENT: 'AUTO_INCREMENT'>, 'BEGIN': <TokenType.COMMAND: 'COMMAND'>, 'BETWEEN': <TokenType.BETWEEN: 'BETWEEN'>, 'CACHE': <TokenType.CACHE: 'CACHE'>, 'UNCACHE': <TokenType.UNCACHE: 'UNCACHE'>, 'CASE': <TokenType.CASE: 'CASE'>, 'CHARACTER SET': <TokenType.CHARACTER_SET: 'CHARACTER_SET'>, 'CLUSTER BY': <TokenType.CLUSTER_BY: 'CLUSTER_BY'>, 'COLLATE': <TokenType.COLLATE: 'COLLATE'>, 'COLUMN': <TokenType.COLUMN: 'COLUMN'>, 'COMMIT': <TokenType.COMMIT: 'COMMIT'>, 'CONNECT BY': <TokenType.CONNECT_BY: 'CONNECT_BY'>, 'CONSTRAINT': <TokenType.CONSTRAINT: 'CONSTRAINT'>, 'CREATE': <TokenType.CREATE: 'CREATE'>, 'CROSS': <TokenType.CROSS: 'CROSS'>, 'CUBE': <TokenType.CUBE: 'CUBE'>, 'CURRENT_DATE': <TokenType.CURRENT_DATE: 'CURRENT_DATE'>, 'CURRENT_TIME': <TokenType.CURRENT_TIME: 'CURRENT_TIME'>, 'CURRENT_TIMESTAMP': <TokenType.CURRENT_TIMESTAMP: 'CURRENT_TIMESTAMP'>, 'CURRENT_USER': <TokenType.CURRENT_USER: 'CURRENT_USER'>, 'DATABASE': <TokenType.DATABASE: 'DATABASE'>, 'DEFAULT': <TokenType.DEFAULT: 'DEFAULT'>, 'DELETE': <TokenType.DELETE: 'DELETE'>, 'DESC': <TokenType.DESC: 'DESC'>, 'DESCRIBE': <TokenType.DESCRIBE: 'DESCRIBE'>, 'DISTINCT': <TokenType.DISTINCT: 'DISTINCT'>, 'DISTRIBUTE BY': <TokenType.DISTRIBUTE_BY: 'DISTRIBUTE_BY'>, 'DIV': <TokenType.DIV: 'DIV'>, 'DROP': <TokenType.DROP: 'DROP'>, 'ELSE': <TokenType.ELSE: 'ELSE'>, 'END': <TokenType.END: 'END'>, 'ESCAPE': <TokenType.ESCAPE: 'ESCAPE'>, 'EXCEPT': <TokenType.EXCEPT: 'EXCEPT'>, 'EXECUTE': <TokenType.EXECUTE: 'EXECUTE'>, 'EXISTS': <TokenType.EXISTS: 'EXISTS'>, 'FALSE': <TokenType.FALSE: 'FALSE'>, 'FETCH': <TokenType.FETCH: 'FETCH'>, 'FILTER': <TokenType.FILTER: 'FILTER'>, 'FIRST': <TokenType.FIRST: 'FIRST'>, 'FULL': <TokenType.FULL: 'FULL'>, 'FUNCTION': <TokenType.FUNCTION: 'FUNCTION'>, 'FOR': <TokenType.FOR: 'FOR'>, 'FOREIGN KEY': <TokenType.FOREIGN_KEY: 'FOREIGN_KEY'>, 'FORMAT': <TokenType.FORMAT: 'FORMAT'>, 'FROM': <TokenType.FROM: 'FROM'>, 'GEOGRAPHY': <TokenType.GEOGRAPHY: 'GEOGRAPHY'>, 'GEOMETRY': <TokenType.GEOMETRY: 'GEOMETRY'>, 'GLOB': <TokenType.GLOB: 'GLOB'>, 'GROUP BY': <TokenType.GROUP_BY: 'GROUP_BY'>, 'GROUPING SETS': <TokenType.GROUPING_SETS: 'GROUPING_SETS'>, 'HAVING': <TokenType.HAVING: 'HAVING'>, 'ILIKE': <TokenType.ILIKE: 'ILIKE'>, 'IN': <TokenType.IN: 'IN'>, 'INDEX': <TokenType.INDEX: 'INDEX'>, 'INET': <TokenType.INET: 'INET'>, 'INNER': <TokenType.INNER: 'INNER'>, 'INSERT': <TokenType.INSERT: 'INSERT'>, 'INTERVAL': <TokenType.INTERVAL: 'INTERVAL'>, 'INTERSECT': <TokenType.INTERSECT: 'INTERSECT'>, 'INTO': <TokenType.INTO: 'INTO'>, 'IS': <TokenType.IS: 'IS'>, 'ISNULL': <TokenType.ISNULL: 'ISNULL'>, 'JOIN': <TokenType.JOIN: 'JOIN'>, 'KEEP': <TokenType.KEEP: 'KEEP'>, 'LATERAL': <TokenType.LATERAL: 'LATERAL'>, 'LEFT': <TokenType.LEFT: 'LEFT'>, 'LIKE': <TokenType.LIKE: 'LIKE'>, 'LIMIT': <TokenType.LIMIT: 'LIMIT'>, 'LOAD': <TokenType.LOAD: 'LOAD'>, 'LOCK': <TokenType.LOCK: 'LOCK'>, 'MERGE': <TokenType.MERGE: 'MERGE'>, 'NATURAL': <TokenType.NATURAL: 'NATURAL'>, 'NEXT': <TokenType.NEXT: 'NEXT'>, 'NOT': <TokenType.NOT: 'NOT'>, 'NOTNULL': <TokenType.NOTNULL: 'NOTNULL'>, 'NULL': <TokenType.NULL: 'NULL'>, 'OBJECT': <TokenType.OBJECT: 'OBJECT'>, 'OFFSET': <TokenType.OFFSET: 'OFFSET'>, 'ON': <TokenType.ON: 'ON'>, 'OR': <TokenType.OR: 'OR'>, 'XOR': <TokenType.XOR: 'XOR'>, 'ORDER BY': <TokenType.ORDER_BY: 'ORDER_BY'>, 'ORDINALITY': <TokenType.ORDINALITY: 'ORDINALITY'>, 'OUTER': <TokenType.OUTER: 'OUTER'>, 'OVER': <TokenType.OVER: 'OVER'>, 'OVERLAPS': <TokenType.OVERLAPS: 'OVERLAPS'>, 'OVERWRITE': <TokenType.OVERWRITE: 'OVERWRITE'>, 'PARTITION': <TokenType.PARTITION: 'PARTITION'>, 'PARTITION BY': <TokenType.PARTITION_BY: 'PARTITION_BY'>, 'PARTITIONED BY': <TokenType.PARTITION_BY: 'PARTITION_BY'>, 'PARTITIONED_BY': <TokenType.PARTITION_BY: 'PARTITION_BY'>, 'PERCENT': <TokenType.PERCENT: 'PERCENT'>, 'PIVOT': <TokenType.PIVOT: 'PIVOT'>, 'PRAGMA': <TokenType.PRAGMA: 'PRAGMA'>, 'PRIMARY KEY': <TokenType.PRIMARY_KEY: 'PRIMARY_KEY'>, 'PROCEDURE': <TokenType.PROCEDURE: 'PROCEDURE'>, 'QUALIFY': <TokenType.QUALIFY: 'QUALIFY'>, 'RANGE': <TokenType.RANGE: 'RANGE'>, 'RECURSIVE': <TokenType.RECURSIVE: 'RECURSIVE'>, 'REGEXP': <TokenType.RLIKE: 'RLIKE'>, 'REPLACE': <TokenType.REPLACE: 'REPLACE'>, 'RETURNING': <TokenType.RETURNING: 'RETURNING'>, 'REFERENCES': <TokenType.REFERENCES: 'REFERENCES'>, 'RIGHT': <TokenType.RIGHT: 'RIGHT'>, 'RLIKE': <TokenType.RLIKE: 'RLIKE'>, 'ROLLBACK': <TokenType.ROLLBACK: 'ROLLBACK'>, 'ROLLUP': <TokenType.ROLLUP: 'ROLLUP'>, 'ROW': <TokenType.ROW: 'ROW'>, 'ROWS': <TokenType.ROWS: 'ROWS'>, 'SCHEMA': <TokenType.SCHEMA: 'SCHEMA'>, 'SELECT': <TokenType.SELECT: 'SELECT'>, 'SEMI': <TokenType.SEMI: 'SEMI'>, 'SET': <TokenType.SET: 'SET'>, 'SETTINGS': <TokenType.SETTINGS: 'SETTINGS'>, 'SHOW': <TokenType.SHOW: 'SHOW'>, 'SIMILAR TO': <TokenType.SIMILAR_TO: 'SIMILAR_TO'>, 'SOME': <TokenType.SOME: 'SOME'>, 'SORT BY': <TokenType.SORT_BY: 'SORT_BY'>, 'START WITH': <TokenType.START_WITH: 'START_WITH'>, 'TABLE': <TokenType.TABLE: 'TABLE'>, 'TABLESAMPLE': <TokenType.TABLE_SAMPLE: 'TABLE_SAMPLE'>, 'TEMP': <TokenType.TEMPORARY: 'TEMPORARY'>, 'TEMPORARY': <TokenType.TEMPORARY: 'TEMPORARY'>, 'THEN': <TokenType.THEN: 'THEN'>, 'TRUE': <TokenType.TRUE: 'TRUE'>, 'UNION': <TokenType.UNION: 'UNION'>, 'UNKNOWN': <TokenType.UNKNOWN: 'UNKNOWN'>, 'UNNEST': <TokenType.UNNEST: 'UNNEST'>, 'UNPIVOT': <TokenType.UNPIVOT: 'UNPIVOT'>, 'UPDATE': <TokenType.UPDATE: 'UPDATE'>, 'USE': <TokenType.USE: 'USE'>, 'USING': <TokenType.USING: 'USING'>, 'UUID': <TokenType.UUID: 'UUID'>, 'VALUES': <TokenType.VALUES: 'VALUES'>, 'VIEW': <TokenType.VIEW: 'VIEW'>, 'VOLATILE': <TokenType.VOLATILE: 'VOLATILE'>, 'WHEN': <TokenType.WHEN: 'WHEN'>, 'WHERE': <TokenType.WHERE: 'WHERE'>, 'WINDOW': <TokenType.WINDOW: 'WINDOW'>, 'WITH': <TokenType.WITH: 'WITH'>, 'APPLY': <TokenType.APPLY: 'APPLY'>, 'ARRAY': <TokenType.ARRAY: 'ARRAY'>, 'BIT': <TokenType.BIT: 'BIT'>, 'BOOL': <TokenType.BOOLEAN: 'BOOLEAN'>, 'BOOLEAN': <TokenType.BOOLEAN: 'BOOLEAN'>, 'BYTE': <TokenType.TINYINT: 'TINYINT'>, 'TINYINT': <TokenType.TINYINT: 'TINYINT'>, 'SHORT': <TokenType.SMALLINT: 'SMALLINT'>, 'SMALLINT': <TokenType.SMALLINT: 'SMALLINT'>, 'INT128': <TokenType.INT128: 'INT128'>, 'INT2': <TokenType.SMALLINT: 'SMALLINT'>, 'INTEGER': <TokenType.INT: 'INT'>, 'INT': <TokenType.INT: 'INT'>, 'INT4': <TokenType.INT: 'INT'>, 'LONG': <TokenType.BIGINT: 'BIGINT'>, 'BIGINT': <TokenType.BIGINT: 'BIGINT'>, 'INT8': <TokenType.BIGINT: 'BIGINT'>, 'DEC': <TokenType.DECIMAL: 'DECIMAL'>, 'DECIMAL': <TokenType.DECIMAL: 'DECIMAL'>, 'BIGDECIMAL': <TokenType.BIGDECIMAL: 'BIGDECIMAL'>, 'BIGNUMERIC': <TokenType.BIGDECIMAL: 'BIGDECIMAL'>, 'MAP': <TokenType.MAP: 'MAP'>, 'NULLABLE': <TokenType.NULLABLE: 'NULLABLE'>, 'NUMBER': <TokenType.DECIMAL: 'DECIMAL'>, 'NUMERIC': <TokenType.DECIMAL: 'DECIMAL'>, 'FIXED': <TokenType.DECIMAL: 'DECIMAL'>, 'REAL': <TokenType.FLOAT: 'FLOAT'>, 'FLOAT': <TokenType.FLOAT: 'FLOAT'>, 'FLOAT4': <TokenType.FLOAT: 'FLOAT'>, 'FLOAT8': <TokenType.DOUBLE: 'DOUBLE'>, 'DOUBLE': <TokenType.DOUBLE: 'DOUBLE'>, 'DOUBLE PRECISION': <TokenType.DOUBLE: 'DOUBLE'>, 'JSON': <TokenType.JSON: 'JSON'>, 'CHAR': <TokenType.CHAR: 'CHAR'>, 'CHARACTER': <TokenType.CHAR: 'CHAR'>, 'NCHAR': <TokenType.NCHAR: 'NCHAR'>, 'VARCHAR': <TokenType.VARCHAR: 'VARCHAR'>, 'VARCHAR2': <TokenType.VARCHAR: 'VARCHAR'>, 'NVARCHAR': <TokenType.NVARCHAR: 'NVARCHAR'>, 'NVARCHAR2': <TokenType.NVARCHAR: 'NVARCHAR'>, 'STR': <TokenType.TEXT: 'TEXT'>, 'STRING': <TokenType.TEXT: 'TEXT'>, 'TEXT': <TokenType.TEXT: 'TEXT'>, 'CLOB': <TokenType.TEXT: 'TEXT'>, 'LONGVARCHAR': <TokenType.TEXT: 'TEXT'>, 'BINARY': <TokenType.BINARY: 'BINARY'>, 'BLOB': <TokenType.VARBINARY: 'VARBINARY'>, 'BYTEA': <TokenType.VARBINARY: 'VARBINARY'>, 'VARBINARY': <TokenType.VARBINARY: 'VARBINARY'>, 'TIME': <TokenType.TIME: 'TIME'>, 'TIMETZ': <TokenType.TIMETZ: 'TIMETZ'>, 'TIMESTAMP': <TokenType.TIMESTAMP: 'TIMESTAMP'>, 'TIMESTAMPTZ': <TokenType.TIMESTAMPTZ: 'TIMESTAMPTZ'>, 'TIMESTAMPLTZ': <TokenType.TIMESTAMPLTZ: 'TIMESTAMPLTZ'>, 'DATE': <TokenType.DATE: 'DATE'>, 'DATETIME': <TokenType.DATETIME: 'DATETIME'>, 'INT4RANGE': <TokenType.INT4RANGE: 'INT4RANGE'>, 'INT4MULTIRANGE': <TokenType.INT4MULTIRANGE: 'INT4MULTIRANGE'>, 'INT8RANGE': <TokenType.INT8RANGE: 'INT8RANGE'>, 'INT8MULTIRANGE': <TokenType.INT8MULTIRANGE: 'INT8MULTIRANGE'>, 'NUMRANGE': <TokenType.NUMRANGE: 'NUMRANGE'>, 'NUMMULTIRANGE': <TokenType.NUMMULTIRANGE: 'NUMMULTIRANGE'>, 'TSRANGE': <TokenType.TSRANGE: 'TSRANGE'>, 'TSMULTIRANGE': <TokenType.TSMULTIRANGE: 'TSMULTIRANGE'>, 'TSTZRANGE': <TokenType.TSTZRANGE: 'TSTZRANGE'>, 'TSTZMULTIRANGE': <TokenType.TSTZMULTIRANGE: 'TSTZMULTIRANGE'>, 'DATERANGE': <TokenType.DATERANGE: 'DATERANGE'>, 'DATEMULTIRANGE': <TokenType.DATEMULTIRANGE: 'DATEMULTIRANGE'>, 'UNIQUE': <TokenType.UNIQUE: 'UNIQUE'>, 'STRUCT': <TokenType.STRUCT: 'STRUCT'>, 'VARIANT': <TokenType.VARIANT: 'VARIANT'>, 'ALTER': <TokenType.ALTER: 'ALTER'>, 'ANALYZE': <TokenType.COMMAND: 'COMMAND'>, 'CALL': <TokenType.COMMAND: 'COMMAND'>, 'COMMENT': <TokenType.COMMENT: 'COMMENT'>, 'COPY': <TokenType.COMMAND: 'COMMAND'>, 'EXPLAIN': <TokenType.COMMAND: 'COMMAND'>, 'GRANT': <TokenType.COMMAND: 'COMMAND'>, 'OPTIMIZE': <TokenType.COMMAND: 'COMMAND'>, 'PREPARE': <TokenType.COMMAND: 'COMMAND'>, 'TRUNCATE': <TokenType.COMMAND: 'COMMAND'>, 'VACUUM': <TokenType.COMMAND: 'COMMAND'>, 'USER-DEFINED': <TokenType.USERDEFINED: 'USERDEFINED'>, 'FOR VERSION': <TokenType.VERSION_SNAPSHOT: 'VERSION_SNAPSHOT'>, 'FOR TIMESTAMP': <TokenType.TIMESTAMP_SNAPSHOT: 'TIMESTAMP_SNAPSHOT'>, '~~': <TokenType.LIKE: 'LIKE'>, '~~*': <TokenType.ILIKE: 'ILIKE'>, '~*': <TokenType.IRLIKE: 'IRLIKE'>, '~': <TokenType.RLIKE: 'RLIKE'>, '@@': <TokenType.DAT: 'DAT'>, '@>': <TokenType.AT_GT: 'AT_GT'>, '<@': <TokenType.LT_AT: 'LT_AT'>, 'BEGIN TRANSACTION': <TokenType.BEGIN: 'BEGIN'>, 'BIGSERIAL': <TokenType.BIGSERIAL: 'BIGSERIAL'>, 'CHARACTER VARYING': <TokenType.VARCHAR: 'VARCHAR'>, 'DECLARE': <TokenType.COMMAND: 'COMMAND'>, 'DO': <TokenType.COMMAND: 'COMMAND'>, 'HSTORE': <TokenType.HSTORE: 'HSTORE'>, 'JSONB': <TokenType.JSONB: 'JSONB'>, 'MONEY': <TokenType.MONEY: 'MONEY'>, 'REFRESH': <TokenType.COMMAND: 'COMMAND'>, 'REINDEX': <TokenType.COMMAND: 'COMMAND'>, 'RESET': <TokenType.COMMAND: 'COMMAND'>, 'REVOKE': <TokenType.COMMAND: 'COMMAND'>, 'SERIAL': <TokenType.SERIAL: 'SERIAL'>, 'SMALLSERIAL': <TokenType.SMALLSERIAL: 'SMALLSERIAL'>, 'CSTRING': <TokenType.PSEUDO_TYPE: 'PSEUDO_TYPE'>, 'OID': <TokenType.OBJECT_IDENTIFIER: 'OBJECT_IDENTIFIER'>, 'REGCLASS': <TokenType.OBJECT_IDENTIFIER: 'OBJECT_IDENTIFIER'>, 'REGCOLLATION': <TokenType.OBJECT_IDENTIFIER: 'OBJECT_IDENTIFIER'>, 'REGCONFIG': <TokenType.OBJECT_IDENTIFIER: 'OBJECT_IDENTIFIER'>, 'REGDICTIONARY': <TokenType.OBJECT_IDENTIFIER: 'OBJECT_IDENTIFIER'>, 'REGNAMESPACE': <TokenType.OBJECT_IDENTIFIER: 'OBJECT_IDENTIFIER'>, 'REGOPER': <TokenType.OBJECT_IDENTIFIER: 'OBJECT_IDENTIFIER'>, 'REGOPERATOR': <TokenType.OBJECT_IDENTIFIER: 'OBJECT_IDENTIFIER'>, 'REGPROC': <TokenType.OBJECT_IDENTIFIER: 'OBJECT_IDENTIFIER'>, 'REGPROCEDURE': <TokenType.OBJECT_IDENTIFIER: 'OBJECT_IDENTIFIER'>, 'REGROLE': <TokenType.OBJECT_IDENTIFIER: 'OBJECT_IDENTIFIER'>, 'REGTYPE': <TokenType.OBJECT_IDENTIFIER: 'OBJECT_IDENTIFIER'>}
    @@ -1454,63 +1490,63 @@ -
    288    class Parser(parser.Parser):
    -289        CONCAT_NULL_OUTPUTS_STRING = True
    -290
    -291        FUNCTIONS = {
    -292            **parser.Parser.FUNCTIONS,
    -293            "DATE_TRUNC": parse_timestamp_trunc,
    -294            "GENERATE_SERIES": _generate_series,
    -295            "NOW": exp.CurrentTimestamp.from_arg_list,
    -296            "TO_CHAR": format_time_lambda(exp.TimeToStr, "postgres"),
    -297            "TO_TIMESTAMP": _to_timestamp,
    -298            "UNNEST": exp.Explode.from_arg_list,
    -299        }
    -300
    -301        FUNCTION_PARSERS = {
    -302            **parser.Parser.FUNCTION_PARSERS,
    -303            "DATE_PART": lambda self: self._parse_date_part(),
    -304        }
    -305
    -306        BITWISE = {
    -307            **parser.Parser.BITWISE,
    -308            TokenType.HASH: exp.BitwiseXor,
    -309        }
    -310
    -311        EXPONENT = {
    -312            TokenType.CARET: exp.Pow,
    -313        }
    -314
    -315        RANGE_PARSERS = {
    -316            **parser.Parser.RANGE_PARSERS,
    -317            TokenType.DAMP: binary_range_parser(exp.ArrayOverlaps),
    -318            TokenType.DAT: lambda self, this: self.expression(
    -319                exp.MatchAgainst, this=self._parse_bitwise(), expressions=[this]
    -320            ),
    -321            TokenType.AT_GT: binary_range_parser(exp.ArrayContains),
    -322            TokenType.LT_AT: binary_range_parser(exp.ArrayContained),
    -323        }
    -324
    -325        STATEMENT_PARSERS = {
    -326            **parser.Parser.STATEMENT_PARSERS,
    -327            TokenType.END: lambda self: self._parse_commit_or_rollback(),
    -328        }
    -329
    -330        def _parse_factor(self) -> t.Optional[exp.Expression]:
    -331            return self._parse_tokens(self._parse_exponent, self.FACTOR)
    -332
    -333        def _parse_exponent(self) -> t.Optional[exp.Expression]:
    -334            return self._parse_tokens(self._parse_unary, self.EXPONENT)
    -335
    -336        def _parse_date_part(self) -> exp.Expression:
    -337            part = self._parse_type()
    -338            self._match(TokenType.COMMA)
    -339            value = self._parse_bitwise()
    -340
    -341            if part and part.is_string:
    -342                part = exp.var(part.name)
    -343
    -344            return self.expression(exp.Extract, this=part, expression=value)
    +            
    300    class Parser(parser.Parser):
    +301        CONCAT_NULL_OUTPUTS_STRING = True
    +302
    +303        FUNCTIONS = {
    +304            **parser.Parser.FUNCTIONS,
    +305            "DATE_TRUNC": parse_timestamp_trunc,
    +306            "GENERATE_SERIES": _generate_series,
    +307            "NOW": exp.CurrentTimestamp.from_arg_list,
    +308            "TO_CHAR": format_time_lambda(exp.TimeToStr, "postgres"),
    +309            "TO_TIMESTAMP": _to_timestamp,
    +310            "UNNEST": exp.Explode.from_arg_list,
    +311        }
    +312
    +313        FUNCTION_PARSERS = {
    +314            **parser.Parser.FUNCTION_PARSERS,
    +315            "DATE_PART": lambda self: self._parse_date_part(),
    +316        }
    +317
    +318        BITWISE = {
    +319            **parser.Parser.BITWISE,
    +320            TokenType.HASH: exp.BitwiseXor,
    +321        }
    +322
    +323        EXPONENT = {
    +324            TokenType.CARET: exp.Pow,
    +325        }
    +326
    +327        RANGE_PARSERS = {
    +328            **parser.Parser.RANGE_PARSERS,
    +329            TokenType.DAMP: binary_range_parser(exp.ArrayOverlaps),
    +330            TokenType.DAT: lambda self, this: self.expression(
    +331                exp.MatchAgainst, this=self._parse_bitwise(), expressions=[this]
    +332            ),
    +333            TokenType.AT_GT: binary_range_parser(exp.ArrayContains),
    +334            TokenType.LT_AT: binary_range_parser(exp.ArrayContained),
    +335        }
    +336
    +337        STATEMENT_PARSERS = {
    +338            **parser.Parser.STATEMENT_PARSERS,
    +339            TokenType.END: lambda self: self._parse_commit_or_rollback(),
    +340        }
    +341
    +342        def _parse_factor(self) -> t.Optional[exp.Expression]:
    +343            return self._parse_tokens(self._parse_exponent, self.FACTOR)
    +344
    +345        def _parse_exponent(self) -> t.Optional[exp.Expression]:
    +346            return self._parse_tokens(self._parse_unary, self.EXPONENT)
    +347
    +348        def _parse_date_part(self) -> exp.Expression:
    +349            part = self._parse_type()
    +350            self._match(TokenType.COMMA)
    +351            value = self._parse_bitwise()
    +352
    +353            if part and part.is_string:
    +354                part = exp.var(part.name)
    +355
    +356            return self.expression(exp.Extract, this=part, expression=value)
     
    @@ -1822,94 +1858,94 @@ Default: 3
    -
    346    class Generator(generator.Generator):
    -347        SINGLE_STRING_INTERVAL = True
    -348        LOCKING_READS_SUPPORTED = True
    -349        JOIN_HINTS = False
    -350        TABLE_HINTS = False
    -351        QUERY_HINTS = False
    -352        NVL2_SUPPORTED = False
    -353        PARAMETER_TOKEN = "$"
    -354
    -355        TYPE_MAPPING = {
    -356            **generator.Generator.TYPE_MAPPING,
    -357            exp.DataType.Type.TINYINT: "SMALLINT",
    -358            exp.DataType.Type.FLOAT: "REAL",
    -359            exp.DataType.Type.DOUBLE: "DOUBLE PRECISION",
    -360            exp.DataType.Type.BINARY: "BYTEA",
    -361            exp.DataType.Type.VARBINARY: "BYTEA",
    -362            exp.DataType.Type.DATETIME: "TIMESTAMP",
    -363        }
    -364
    -365        TRANSFORMS = {
    -366            **generator.Generator.TRANSFORMS,
    -367            exp.AnyValue: any_value_to_max_sql,
    -368            exp.ArrayConcat: rename_func("ARRAY_CAT"),
    -369            exp.BitwiseXor: lambda self, e: self.binary(e, "#"),
    -370            exp.ColumnDef: transforms.preprocess([_auto_increment_to_serial, _serial_to_generated]),
    -371            exp.Explode: rename_func("UNNEST"),
    -372            exp.JSONExtract: arrow_json_extract_sql,
    -373            exp.JSONExtractScalar: arrow_json_extract_scalar_sql,
    -374            exp.JSONBExtract: lambda self, e: self.binary(e, "#>"),
    -375            exp.JSONBExtractScalar: lambda self, e: self.binary(e, "#>>"),
    -376            exp.JSONBContains: lambda self, e: self.binary(e, "?"),
    -377            exp.Pow: lambda self, e: self.binary(e, "^"),
    -378            exp.CurrentDate: no_paren_current_date_sql,
    -379            exp.CurrentTimestamp: lambda *_: "CURRENT_TIMESTAMP",
    -380            exp.DateAdd: _date_add_sql("+"),
    -381            exp.DateStrToDate: datestrtodate_sql,
    -382            exp.DateSub: _date_add_sql("-"),
    -383            exp.DateDiff: _date_diff_sql,
    -384            exp.LogicalOr: rename_func("BOOL_OR"),
    -385            exp.LogicalAnd: rename_func("BOOL_AND"),
    -386            exp.Max: max_or_greatest,
    -387            exp.MapFromEntries: no_map_from_entries_sql,
    -388            exp.Min: min_or_least,
    -389            exp.ArrayOverlaps: lambda self, e: self.binary(e, "&&"),
    -390            exp.ArrayContains: lambda self, e: self.binary(e, "@>"),
    -391            exp.ArrayContained: lambda self, e: self.binary(e, "<@"),
    -392            exp.Merge: transforms.preprocess([_remove_target_from_merge]),
    -393            exp.Pivot: no_pivot_sql,
    -394            exp.RegexpLike: lambda self, e: self.binary(e, "~"),
    -395            exp.RegexpILike: lambda self, e: self.binary(e, "~*"),
    -396            exp.StrPosition: str_position_sql,
    -397            exp.StrToTime: lambda self, e: f"TO_TIMESTAMP({self.sql(e, 'this')}, {self.format_time(e)})",
    -398            exp.Substring: _substring_sql,
    -399            exp.TimestampTrunc: timestamptrunc_sql,
    -400            exp.TimeStrToTime: timestrtotime_sql,
    -401            exp.TimeToStr: lambda self, e: f"TO_CHAR({self.sql(e, 'this')}, {self.format_time(e)})",
    -402            exp.TableSample: no_tablesample_sql,
    -403            exp.ToChar: lambda self, e: self.function_fallback_sql(e),
    -404            exp.Trim: trim_sql,
    -405            exp.TryCast: no_trycast_sql,
    -406            exp.TsOrDsToDate: ts_or_ds_to_date_sql("postgres"),
    -407            exp.UnixToTime: lambda self, e: f"TO_TIMESTAMP({self.sql(e, 'this')})",
    -408            exp.DataType: _datatype_sql,
    -409            exp.GroupConcat: _string_agg_sql,
    -410            exp.Array: lambda self, e: f"{self.normalize_func('ARRAY')}({self.sql(e.expressions[0])})"
    -411            if isinstance(seq_get(e.expressions, 0), exp.Select)
    -412            else f"{self.normalize_func('ARRAY')}[{self.expressions(e, flat=True)}]",
    -413        }
    -414
    -415        PROPERTIES_LOCATION = {
    -416            **generator.Generator.PROPERTIES_LOCATION,
    -417            exp.TransientProperty: exp.Properties.Location.UNSUPPORTED,
    -418            exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED,
    -419        }
    -420
    -421        def bracket_sql(self, expression: exp.Bracket) -> str:
    -422            """Forms like ARRAY[1, 2, 3][3] aren't allowed; we need to wrap the ARRAY."""
    -423            if isinstance(expression.this, exp.Array):
    -424                expression = expression.copy()
    -425                expression.set("this", exp.paren(expression.this, copy=False))
    +            
    358    class Generator(generator.Generator):
    +359        SINGLE_STRING_INTERVAL = True
    +360        LOCKING_READS_SUPPORTED = True
    +361        JOIN_HINTS = False
    +362        TABLE_HINTS = False
    +363        QUERY_HINTS = False
    +364        NVL2_SUPPORTED = False
    +365        PARAMETER_TOKEN = "$"
    +366
    +367        TYPE_MAPPING = {
    +368            **generator.Generator.TYPE_MAPPING,
    +369            exp.DataType.Type.TINYINT: "SMALLINT",
    +370            exp.DataType.Type.FLOAT: "REAL",
    +371            exp.DataType.Type.DOUBLE: "DOUBLE PRECISION",
    +372            exp.DataType.Type.BINARY: "BYTEA",
    +373            exp.DataType.Type.VARBINARY: "BYTEA",
    +374            exp.DataType.Type.DATETIME: "TIMESTAMP",
    +375        }
    +376
    +377        TRANSFORMS = {
    +378            **generator.Generator.TRANSFORMS,
    +379            exp.AnyValue: any_value_to_max_sql,
    +380            exp.ArrayConcat: rename_func("ARRAY_CAT"),
    +381            exp.BitwiseXor: lambda self, e: self.binary(e, "#"),
    +382            exp.ColumnDef: transforms.preprocess([_auto_increment_to_serial, _serial_to_generated]),
    +383            exp.Explode: rename_func("UNNEST"),
    +384            exp.JSONExtract: arrow_json_extract_sql,
    +385            exp.JSONExtractScalar: arrow_json_extract_scalar_sql,
    +386            exp.JSONBExtract: lambda self, e: self.binary(e, "#>"),
    +387            exp.JSONBExtractScalar: lambda self, e: self.binary(e, "#>>"),
    +388            exp.JSONBContains: lambda self, e: self.binary(e, "?"),
    +389            exp.Pow: lambda self, e: self.binary(e, "^"),
    +390            exp.CurrentDate: no_paren_current_date_sql,
    +391            exp.CurrentTimestamp: lambda *_: "CURRENT_TIMESTAMP",
    +392            exp.DateAdd: _date_add_sql("+"),
    +393            exp.DateStrToDate: datestrtodate_sql,
    +394            exp.DateSub: _date_add_sql("-"),
    +395            exp.DateDiff: _date_diff_sql,
    +396            exp.LogicalOr: rename_func("BOOL_OR"),
    +397            exp.LogicalAnd: rename_func("BOOL_AND"),
    +398            exp.Max: max_or_greatest,
    +399            exp.MapFromEntries: no_map_from_entries_sql,
    +400            exp.Min: min_or_least,
    +401            exp.ArrayOverlaps: lambda self, e: self.binary(e, "&&"),
    +402            exp.ArrayContains: lambda self, e: self.binary(e, "@>"),
    +403            exp.ArrayContained: lambda self, e: self.binary(e, "<@"),
    +404            exp.Merge: transforms.preprocess([_remove_target_from_merge]),
    +405            exp.Pivot: no_pivot_sql,
    +406            exp.RegexpLike: lambda self, e: self.binary(e, "~"),
    +407            exp.RegexpILike: lambda self, e: self.binary(e, "~*"),
    +408            exp.StrPosition: str_position_sql,
    +409            exp.StrToTime: lambda self, e: f"TO_TIMESTAMP({self.sql(e, 'this')}, {self.format_time(e)})",
    +410            exp.Substring: _substring_sql,
    +411            exp.TimestampTrunc: timestamptrunc_sql,
    +412            exp.TimeStrToTime: timestrtotime_sql,
    +413            exp.TimeToStr: lambda self, e: f"TO_CHAR({self.sql(e, 'this')}, {self.format_time(e)})",
    +414            exp.TableSample: no_tablesample_sql,
    +415            exp.ToChar: lambda self, e: self.function_fallback_sql(e),
    +416            exp.Trim: trim_sql,
    +417            exp.TryCast: no_trycast_sql,
    +418            exp.TsOrDsToDate: ts_or_ds_to_date_sql("postgres"),
    +419            exp.UnixToTime: lambda self, e: f"TO_TIMESTAMP({self.sql(e, 'this')})",
    +420            exp.DataType: _datatype_sql,
    +421            exp.GroupConcat: _string_agg_sql,
    +422            exp.Array: lambda self, e: f"{self.normalize_func('ARRAY')}({self.sql(e.expressions[0])})"
    +423            if isinstance(seq_get(e.expressions, 0), exp.Select)
    +424            else f"{self.normalize_func('ARRAY')}[{self.expressions(e, flat=True)}]",
    +425        }
     426
    -427            return super().bracket_sql(expression)
    -428
    -429        def matchagainst_sql(self, expression: exp.MatchAgainst) -> str:
    -430            this = self.sql(expression, "this")
    -431            expressions = [f"{self.sql(e)} @@ {this}" for e in expression.expressions]
    -432            sql = " OR ".join(expressions)
    -433            return f"({sql})" if len(expressions) > 1 else sql
    +427        PROPERTIES_LOCATION = {
    +428            **generator.Generator.PROPERTIES_LOCATION,
    +429            exp.TransientProperty: exp.Properties.Location.UNSUPPORTED,
    +430            exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED,
    +431        }
    +432
    +433        def bracket_sql(self, expression: exp.Bracket) -> str:
    +434            """Forms like ARRAY[1, 2, 3][3] aren't allowed; we need to wrap the ARRAY."""
    +435            if isinstance(expression.this, exp.Array):
    +436                expression = expression.copy()
    +437                expression.set("this", exp.paren(expression.this, copy=False))
    +438
    +439            return super().bracket_sql(expression)
    +440
    +441        def matchagainst_sql(self, expression: exp.MatchAgainst) -> str:
    +442            this = self.sql(expression, "this")
    +443            expressions = [f"{self.sql(e)} @@ {this}" for e in expression.expressions]
    +444            sql = " OR ".join(expressions)
    +445            return f"({sql})" if len(expressions) > 1 else sql
     
    @@ -2086,13 +2122,13 @@ Default: True
    -
    421        def bracket_sql(self, expression: exp.Bracket) -> str:
    -422            """Forms like ARRAY[1, 2, 3][3] aren't allowed; we need to wrap the ARRAY."""
    -423            if isinstance(expression.this, exp.Array):
    -424                expression = expression.copy()
    -425                expression.set("this", exp.paren(expression.this, copy=False))
    -426
    -427            return super().bracket_sql(expression)
    +            
    433        def bracket_sql(self, expression: exp.Bracket) -> str:
    +434            """Forms like ARRAY[1, 2, 3][3] aren't allowed; we need to wrap the ARRAY."""
    +435            if isinstance(expression.this, exp.Array):
    +436                expression = expression.copy()
    +437                expression.set("this", exp.paren(expression.this, copy=False))
    +438
    +439            return super().bracket_sql(expression)
     
    @@ -2112,11 +2148,11 @@ Default: True
    -
    429        def matchagainst_sql(self, expression: exp.MatchAgainst) -> str:
    -430            this = self.sql(expression, "this")
    -431            expressions = [f"{self.sql(e)} @@ {this}" for e in expression.expressions]
    -432            sql = " OR ".join(expressions)
    -433            return f"({sql})" if len(expressions) > 1 else sql
    +            
    441        def matchagainst_sql(self, expression: exp.MatchAgainst) -> str:
    +442            this = self.sql(expression, "this")
    +443            expressions = [f"{self.sql(e)} @@ {this}" for e in expression.expressions]
    +444            sql = " OR ".join(expressions)
    +445            return f"({sql})" if len(expressions) > 1 else sql
     
    @@ -2497,6 +2533,7 @@ Default: True
    intersect_op
    introducer_sql
    pseudotype_sql
    +
    objectidentifier_sql
    onconflict_sql
    returning_sql
    rowformatdelimitedproperty_sql
    diff --git a/docs/sqlglot/dialects/presto.html b/docs/sqlglot/dialects/presto.html index 9e1c1be..64d723a 100644 --- a/docs/sqlglot/dialects/presto.html +++ b/docs/sqlglot/dialects/presto.html @@ -309,13 +309,13 @@
    26from sqlglot.tokens import TokenType 27 28 - 29def _approx_distinct_sql(self: generator.Generator, expression: exp.ApproxDistinct) -> str: + 29def _approx_distinct_sql(self: Presto.Generator, expression: exp.ApproxDistinct) -> str: 30 accuracy = expression.args.get("accuracy") 31 accuracy = ", " + self.sql(accuracy) if accuracy else "" 32 return f"APPROX_DISTINCT({self.sql(expression, 'this')}{accuracy})" 33 34 - 35def _explode_to_unnest_sql(self: generator.Generator, expression: exp.Lateral) -> str: + 35def _explode_to_unnest_sql(self: Presto.Generator, expression: exp.Lateral) -> str: 36 if isinstance(expression.this, (exp.Explode, exp.Posexplode)): 37 expression = expression.copy() 38 return self.sql( @@ -331,12 +331,12 @@ 48 return self.lateral_sql(expression) 49 50 - 51def _initcap_sql(self: generator.Generator, expression: exp.Initcap) -> str: + 51def _initcap_sql(self: Presto.Generator, expression: exp.Initcap) -> str: 52 regex = r"(\w)(\w*)" 53 return f"REGEXP_REPLACE({self.sql(expression, 'this')}, '{regex}', x -> UPPER(x[1]) || LOWER(x[2]))" 54 55 - 56def _no_sort_array(self: generator.Generator, expression: exp.SortArray) -> str: + 56def _no_sort_array(self: Presto.Generator, expression: exp.SortArray) -> str: 57 if expression.args.get("asc") == exp.false(): 58 comparator = "(a, b) -> CASE WHEN a < b THEN 1 WHEN a > b THEN -1 ELSE 0 END" 59 else: @@ -344,7 +344,7 @@ 61 return self.func("ARRAY_SORT", expression.this, comparator) 62 63 - 64def _schema_sql(self: generator.Generator, expression: exp.Schema) -> str: + 64def _schema_sql(self: Presto.Generator, expression: exp.Schema) -> str: 65 if isinstance(expression.parent, exp.Property): 66 columns = ", ".join(f"'{c.name}'" for c in expression.expressions) 67 return f"ARRAY[{columns}]" @@ -358,25 +358,25 @@ 75 return self.schema_sql(expression) 76 77 - 78def _quantile_sql(self: generator.Generator, expression: exp.Quantile) -> str: + 78def _quantile_sql(self: Presto.Generator, expression: exp.Quantile) -> str: 79 self.unsupported("Presto does not support exact quantiles") 80 return f"APPROX_PERCENTILE({self.sql(expression, 'this')}, {self.sql(expression, 'quantile')})" 81 82 83def _str_to_time_sql( - 84 self: generator.Generator, expression: exp.StrToDate | exp.StrToTime | exp.TsOrDsToDate + 84 self: Presto.Generator, expression: exp.StrToDate | exp.StrToTime | exp.TsOrDsToDate 85) -> str: 86 return f"DATE_PARSE({self.sql(expression, 'this')}, {self.format_time(expression)})" 87 88 - 89def _ts_or_ds_to_date_sql(self: generator.Generator, expression: exp.TsOrDsToDate) -> str: + 89def _ts_or_ds_to_date_sql(self: Presto.Generator, expression: exp.TsOrDsToDate) -> str: 90 time_format = self.format_time(expression) 91 if time_format and time_format not in (Presto.TIME_FORMAT, Presto.DATE_FORMAT): 92 return exp.cast(_str_to_time_sql(self, expression), "DATE").sql(dialect="presto") 93 return exp.cast(exp.cast(expression.this, "TIMESTAMP", copy=True), "DATE").sql(dialect="presto") 94 95 - 96def _ts_or_ds_add_sql(self: generator.Generator, expression: exp.TsOrDsAdd) -> str: + 96def _ts_or_ds_add_sql(self: Presto.Generator, expression: exp.TsOrDsAdd) -> str: 97 this = expression.this 98 99 if not isinstance(this, exp.CurrentDate): @@ -436,7 +436,7 @@ 153 return expression 154 155 -156def _first_last_sql(self: generator.Generator, expression: exp.First | exp.Last) -> str: +156def _first_last_sql(self: Presto.Generator, expression: exp.First | exp.Last) -> str: 157 """ 158 Trino doesn't support FIRST / LAST as functions, but they're valid in the context 159 of MATCH_RECOGNIZE, so we need to preserve them in that case. In all other cases @@ -2475,6 +2475,7 @@ Default: True
    intersect_op
    introducer_sql
    pseudotype_sql
    +
    objectidentifier_sql
    onconflict_sql
    returning_sql
    rowformatdelimitedproperty_sql
    diff --git a/docs/sqlglot/dialects/redshift.html b/docs/sqlglot/dialects/redshift.html index a3a2fda..0f04b11 100644 --- a/docs/sqlglot/dialects/redshift.html +++ b/docs/sqlglot/dialects/redshift.html @@ -272,7 +272,7 @@
    13from sqlglot.tokens import TokenType 14 15 - 16def _json_sql(self: Postgres.Generator, expression: exp.JSONExtract | exp.JSONExtractScalar) -> str: + 16def _json_sql(self: Redshift.Generator, expression: exp.JSONExtract | exp.JSONExtractScalar) -> str: 17 return f'{self.sql(expression, "this")}."{expression.expression.name}"' 18 19 @@ -1230,7 +1230,7 @@ Default: 3
    KEYWORDS = - {'{%': <TokenType.BLOCK_START: 'BLOCK_START'>, '{%+': <TokenType.BLOCK_START: 'BLOCK_START'>, '{%-': <TokenType.BLOCK_START: 'BLOCK_START'>, '%}': <TokenType.BLOCK_END: 'BLOCK_END'>, '+%}': <TokenType.BLOCK_END: 'BLOCK_END'>, '-%}': <TokenType.BLOCK_END: 'BLOCK_END'>, '{{+': <TokenType.BLOCK_START: 'BLOCK_START'>, '{{-': <TokenType.BLOCK_START: 'BLOCK_START'>, '+}}': <TokenType.BLOCK_END: 'BLOCK_END'>, '-}}': <TokenType.BLOCK_END: 'BLOCK_END'>, '/*+': <TokenType.HINT: 'HINT'>, '==': <TokenType.EQ: 'EQ'>, '::': <TokenType.DCOLON: 'DCOLON'>, '||': <TokenType.DPIPE: 'DPIPE'>, '>=': <TokenType.GTE: 'GTE'>, '<=': <TokenType.LTE: 'LTE'>, '<>': <TokenType.NEQ: 'NEQ'>, '!=': <TokenType.NEQ: 'NEQ'>, '<=>': <TokenType.NULLSAFE_EQ: 'NULLSAFE_EQ'>, '->': <TokenType.ARROW: 'ARROW'>, '->>': <TokenType.DARROW: 'DARROW'>, '=>': <TokenType.FARROW: 'FARROW'>, '#>': <TokenType.HASH_ARROW: 'HASH_ARROW'>, '#>>': <TokenType.DHASH_ARROW: 'DHASH_ARROW'>, '<->': <TokenType.LR_ARROW: 'LR_ARROW'>, '&&': <TokenType.DAMP: 'DAMP'>, '??': <TokenType.DQMARK: 'DQMARK'>, 'ALL': <TokenType.ALL: 'ALL'>, 'ALWAYS': <TokenType.ALWAYS: 'ALWAYS'>, 'AND': <TokenType.AND: 'AND'>, 'ANTI': <TokenType.ANTI: 'ANTI'>, 'ANY': <TokenType.ANY: 'ANY'>, 'ASC': <TokenType.ASC: 'ASC'>, 'AS': <TokenType.ALIAS: 'ALIAS'>, 'ASOF': <TokenType.ASOF: 'ASOF'>, 'AUTOINCREMENT': <TokenType.AUTO_INCREMENT: 'AUTO_INCREMENT'>, 'AUTO_INCREMENT': <TokenType.AUTO_INCREMENT: 'AUTO_INCREMENT'>, 'BEGIN': <TokenType.COMMAND: 'COMMAND'>, 'BETWEEN': <TokenType.BETWEEN: 'BETWEEN'>, 'CACHE': <TokenType.CACHE: 'CACHE'>, 'UNCACHE': <TokenType.UNCACHE: 'UNCACHE'>, 'CASE': <TokenType.CASE: 'CASE'>, 'CHARACTER SET': <TokenType.CHARACTER_SET: 'CHARACTER_SET'>, 'CLUSTER BY': <TokenType.CLUSTER_BY: 'CLUSTER_BY'>, 'COLLATE': <TokenType.COLLATE: 'COLLATE'>, 'COLUMN': <TokenType.COLUMN: 'COLUMN'>, 'COMMIT': <TokenType.COMMIT: 'COMMIT'>, 'CONNECT BY': <TokenType.CONNECT_BY: 'CONNECT_BY'>, 'CONSTRAINT': <TokenType.CONSTRAINT: 'CONSTRAINT'>, 'CREATE': <TokenType.CREATE: 'CREATE'>, 'CROSS': <TokenType.CROSS: 'CROSS'>, 'CUBE': <TokenType.CUBE: 'CUBE'>, 'CURRENT_DATE': <TokenType.CURRENT_DATE: 'CURRENT_DATE'>, 'CURRENT_TIME': <TokenType.CURRENT_TIME: 'CURRENT_TIME'>, 'CURRENT_TIMESTAMP': <TokenType.CURRENT_TIMESTAMP: 'CURRENT_TIMESTAMP'>, 'CURRENT_USER': <TokenType.CURRENT_USER: 'CURRENT_USER'>, 'DATABASE': <TokenType.DATABASE: 'DATABASE'>, 'DEFAULT': <TokenType.DEFAULT: 'DEFAULT'>, 'DELETE': <TokenType.DELETE: 'DELETE'>, 'DESC': <TokenType.DESC: 'DESC'>, 'DESCRIBE': <TokenType.DESCRIBE: 'DESCRIBE'>, 'DISTINCT': <TokenType.DISTINCT: 'DISTINCT'>, 'DISTRIBUTE BY': <TokenType.DISTRIBUTE_BY: 'DISTRIBUTE_BY'>, 'DIV': <TokenType.DIV: 'DIV'>, 'DROP': <TokenType.DROP: 'DROP'>, 'ELSE': <TokenType.ELSE: 'ELSE'>, 'END': <TokenType.END: 'END'>, 'ESCAPE': <TokenType.ESCAPE: 'ESCAPE'>, 'EXCEPT': <TokenType.EXCEPT: 'EXCEPT'>, 'EXECUTE': <TokenType.EXECUTE: 'EXECUTE'>, 'EXISTS': <TokenType.EXISTS: 'EXISTS'>, 'FALSE': <TokenType.FALSE: 'FALSE'>, 'FETCH': <TokenType.FETCH: 'FETCH'>, 'FILTER': <TokenType.FILTER: 'FILTER'>, 'FIRST': <TokenType.FIRST: 'FIRST'>, 'FULL': <TokenType.FULL: 'FULL'>, 'FUNCTION': <TokenType.FUNCTION: 'FUNCTION'>, 'FOR': <TokenType.FOR: 'FOR'>, 'FOREIGN KEY': <TokenType.FOREIGN_KEY: 'FOREIGN_KEY'>, 'FORMAT': <TokenType.FORMAT: 'FORMAT'>, 'FROM': <TokenType.FROM: 'FROM'>, 'GEOGRAPHY': <TokenType.GEOGRAPHY: 'GEOGRAPHY'>, 'GEOMETRY': <TokenType.GEOMETRY: 'GEOMETRY'>, 'GLOB': <TokenType.GLOB: 'GLOB'>, 'GROUP BY': <TokenType.GROUP_BY: 'GROUP_BY'>, 'GROUPING SETS': <TokenType.GROUPING_SETS: 'GROUPING_SETS'>, 'HAVING': <TokenType.HAVING: 'HAVING'>, 'ILIKE': <TokenType.ILIKE: 'ILIKE'>, 'IN': <TokenType.IN: 'IN'>, 'INDEX': <TokenType.INDEX: 'INDEX'>, 'INET': <TokenType.INET: 'INET'>, 'INNER': <TokenType.INNER: 'INNER'>, 'INSERT': <TokenType.INSERT: 'INSERT'>, 'INTERVAL': <TokenType.INTERVAL: 'INTERVAL'>, 'INTERSECT': <TokenType.INTERSECT: 'INTERSECT'>, 'INTO': <TokenType.INTO: 'INTO'>, 'IS': <TokenType.IS: 'IS'>, 'ISNULL': <TokenType.ISNULL: 'ISNULL'>, 'JOIN': <TokenType.JOIN: 'JOIN'>, 'KEEP': <TokenType.KEEP: 'KEEP'>, 'LATERAL': <TokenType.LATERAL: 'LATERAL'>, 'LEFT': <TokenType.LEFT: 'LEFT'>, 'LIKE': <TokenType.LIKE: 'LIKE'>, 'LIMIT': <TokenType.LIMIT: 'LIMIT'>, 'LOAD': <TokenType.LOAD: 'LOAD'>, 'LOCK': <TokenType.LOCK: 'LOCK'>, 'MERGE': <TokenType.MERGE: 'MERGE'>, 'NATURAL': <TokenType.NATURAL: 'NATURAL'>, 'NEXT': <TokenType.NEXT: 'NEXT'>, 'NOT': <TokenType.NOT: 'NOT'>, 'NOTNULL': <TokenType.NOTNULL: 'NOTNULL'>, 'NULL': <TokenType.NULL: 'NULL'>, 'OBJECT': <TokenType.OBJECT: 'OBJECT'>, 'OFFSET': <TokenType.OFFSET: 'OFFSET'>, 'ON': <TokenType.ON: 'ON'>, 'OR': <TokenType.OR: 'OR'>, 'XOR': <TokenType.XOR: 'XOR'>, 'ORDER BY': <TokenType.ORDER_BY: 'ORDER_BY'>, 'ORDINALITY': <TokenType.ORDINALITY: 'ORDINALITY'>, 'OUTER': <TokenType.OUTER: 'OUTER'>, 'OVER': <TokenType.OVER: 'OVER'>, 'OVERLAPS': <TokenType.OVERLAPS: 'OVERLAPS'>, 'OVERWRITE': <TokenType.OVERWRITE: 'OVERWRITE'>, 'PARTITION': <TokenType.PARTITION: 'PARTITION'>, 'PARTITION BY': <TokenType.PARTITION_BY: 'PARTITION_BY'>, 'PARTITIONED BY': <TokenType.PARTITION_BY: 'PARTITION_BY'>, 'PARTITIONED_BY': <TokenType.PARTITION_BY: 'PARTITION_BY'>, 'PERCENT': <TokenType.PERCENT: 'PERCENT'>, 'PIVOT': <TokenType.PIVOT: 'PIVOT'>, 'PRAGMA': <TokenType.PRAGMA: 'PRAGMA'>, 'PRIMARY KEY': <TokenType.PRIMARY_KEY: 'PRIMARY_KEY'>, 'PROCEDURE': <TokenType.PROCEDURE: 'PROCEDURE'>, 'QUALIFY': <TokenType.QUALIFY: 'QUALIFY'>, 'RANGE': <TokenType.RANGE: 'RANGE'>, 'RECURSIVE': <TokenType.RECURSIVE: 'RECURSIVE'>, 'REGEXP': <TokenType.RLIKE: 'RLIKE'>, 'REPLACE': <TokenType.REPLACE: 'REPLACE'>, 'RETURNING': <TokenType.RETURNING: 'RETURNING'>, 'REFERENCES': <TokenType.REFERENCES: 'REFERENCES'>, 'RIGHT': <TokenType.RIGHT: 'RIGHT'>, 'RLIKE': <TokenType.RLIKE: 'RLIKE'>, 'ROLLBACK': <TokenType.ROLLBACK: 'ROLLBACK'>, 'ROLLUP': <TokenType.ROLLUP: 'ROLLUP'>, 'ROW': <TokenType.ROW: 'ROW'>, 'ROWS': <TokenType.ROWS: 'ROWS'>, 'SCHEMA': <TokenType.SCHEMA: 'SCHEMA'>, 'SELECT': <TokenType.SELECT: 'SELECT'>, 'SEMI': <TokenType.SEMI: 'SEMI'>, 'SET': <TokenType.SET: 'SET'>, 'SETTINGS': <TokenType.SETTINGS: 'SETTINGS'>, 'SHOW': <TokenType.SHOW: 'SHOW'>, 'SIMILAR TO': <TokenType.SIMILAR_TO: 'SIMILAR_TO'>, 'SOME': <TokenType.SOME: 'SOME'>, 'SORT BY': <TokenType.SORT_BY: 'SORT_BY'>, 'START WITH': <TokenType.START_WITH: 'START_WITH'>, 'TABLE': <TokenType.TABLE: 'TABLE'>, 'TABLESAMPLE': <TokenType.TABLE_SAMPLE: 'TABLE_SAMPLE'>, 'TEMP': <TokenType.TEMPORARY: 'TEMPORARY'>, 'TEMPORARY': <TokenType.TEMPORARY: 'TEMPORARY'>, 'THEN': <TokenType.THEN: 'THEN'>, 'TRUE': <TokenType.TRUE: 'TRUE'>, 'UNION': <TokenType.UNION: 'UNION'>, 'UNKNOWN': <TokenType.UNKNOWN: 'UNKNOWN'>, 'UNNEST': <TokenType.UNNEST: 'UNNEST'>, 'UNPIVOT': <TokenType.UNPIVOT: 'UNPIVOT'>, 'UPDATE': <TokenType.UPDATE: 'UPDATE'>, 'USE': <TokenType.USE: 'USE'>, 'USING': <TokenType.USING: 'USING'>, 'UUID': <TokenType.UUID: 'UUID'>, 'VALUES': <TokenType.VALUES: 'VALUES'>, 'VIEW': <TokenType.VIEW: 'VIEW'>, 'VOLATILE': <TokenType.VOLATILE: 'VOLATILE'>, 'WHEN': <TokenType.WHEN: 'WHEN'>, 'WHERE': <TokenType.WHERE: 'WHERE'>, 'WINDOW': <TokenType.WINDOW: 'WINDOW'>, 'WITH': <TokenType.WITH: 'WITH'>, 'APPLY': <TokenType.APPLY: 'APPLY'>, 'ARRAY': <TokenType.ARRAY: 'ARRAY'>, 'BIT': <TokenType.BIT: 'BIT'>, 'BOOL': <TokenType.BOOLEAN: 'BOOLEAN'>, 'BOOLEAN': <TokenType.BOOLEAN: 'BOOLEAN'>, 'BYTE': <TokenType.TINYINT: 'TINYINT'>, 'TINYINT': <TokenType.TINYINT: 'TINYINT'>, 'SHORT': <TokenType.SMALLINT: 'SMALLINT'>, 'SMALLINT': <TokenType.SMALLINT: 'SMALLINT'>, 'INT128': <TokenType.INT128: 'INT128'>, 'INT2': <TokenType.SMALLINT: 'SMALLINT'>, 'INTEGER': <TokenType.INT: 'INT'>, 'INT': <TokenType.INT: 'INT'>, 'INT4': <TokenType.INT: 'INT'>, 'LONG': <TokenType.BIGINT: 'BIGINT'>, 'BIGINT': <TokenType.BIGINT: 'BIGINT'>, 'INT8': <TokenType.BIGINT: 'BIGINT'>, 'DEC': <TokenType.DECIMAL: 'DECIMAL'>, 'DECIMAL': <TokenType.DECIMAL: 'DECIMAL'>, 'BIGDECIMAL': <TokenType.BIGDECIMAL: 'BIGDECIMAL'>, 'BIGNUMERIC': <TokenType.BIGDECIMAL: 'BIGDECIMAL'>, 'MAP': <TokenType.MAP: 'MAP'>, 'NULLABLE': <TokenType.NULLABLE: 'NULLABLE'>, 'NUMBER': <TokenType.DECIMAL: 'DECIMAL'>, 'NUMERIC': <TokenType.DECIMAL: 'DECIMAL'>, 'FIXED': <TokenType.DECIMAL: 'DECIMAL'>, 'REAL': <TokenType.FLOAT: 'FLOAT'>, 'FLOAT': <TokenType.FLOAT: 'FLOAT'>, 'FLOAT4': <TokenType.FLOAT: 'FLOAT'>, 'FLOAT8': <TokenType.DOUBLE: 'DOUBLE'>, 'DOUBLE': <TokenType.DOUBLE: 'DOUBLE'>, 'DOUBLE PRECISION': <TokenType.DOUBLE: 'DOUBLE'>, 'JSON': <TokenType.JSON: 'JSON'>, 'CHAR': <TokenType.CHAR: 'CHAR'>, 'CHARACTER': <TokenType.CHAR: 'CHAR'>, 'NCHAR': <TokenType.NCHAR: 'NCHAR'>, 'VARCHAR': <TokenType.VARCHAR: 'VARCHAR'>, 'VARCHAR2': <TokenType.VARCHAR: 'VARCHAR'>, 'NVARCHAR': <TokenType.NVARCHAR: 'NVARCHAR'>, 'NVARCHAR2': <TokenType.NVARCHAR: 'NVARCHAR'>, 'STR': <TokenType.TEXT: 'TEXT'>, 'STRING': <TokenType.TEXT: 'TEXT'>, 'TEXT': <TokenType.TEXT: 'TEXT'>, 'CLOB': <TokenType.TEXT: 'TEXT'>, 'LONGVARCHAR': <TokenType.TEXT: 'TEXT'>, 'BINARY': <TokenType.BINARY: 'BINARY'>, 'BLOB': <TokenType.VARBINARY: 'VARBINARY'>, 'BYTEA': <TokenType.VARBINARY: 'VARBINARY'>, 'VARBINARY': <TokenType.VARBINARY: 'VARBINARY'>, 'TIME': <TokenType.TIME: 'TIME'>, 'TIMETZ': <TokenType.TIMETZ: 'TIMETZ'>, 'TIMESTAMP': <TokenType.TIMESTAMP: 'TIMESTAMP'>, 'TIMESTAMPTZ': <TokenType.TIMESTAMPTZ: 'TIMESTAMPTZ'>, 'TIMESTAMPLTZ': <TokenType.TIMESTAMPLTZ: 'TIMESTAMPLTZ'>, 'DATE': <TokenType.DATE: 'DATE'>, 'DATETIME': <TokenType.DATETIME: 'DATETIME'>, 'INT4RANGE': <TokenType.INT4RANGE: 'INT4RANGE'>, 'INT4MULTIRANGE': <TokenType.INT4MULTIRANGE: 'INT4MULTIRANGE'>, 'INT8RANGE': <TokenType.INT8RANGE: 'INT8RANGE'>, 'INT8MULTIRANGE': <TokenType.INT8MULTIRANGE: 'INT8MULTIRANGE'>, 'NUMRANGE': <TokenType.NUMRANGE: 'NUMRANGE'>, 'NUMMULTIRANGE': <TokenType.NUMMULTIRANGE: 'NUMMULTIRANGE'>, 'TSRANGE': <TokenType.TSRANGE: 'TSRANGE'>, 'TSMULTIRANGE': <TokenType.TSMULTIRANGE: 'TSMULTIRANGE'>, 'TSTZRANGE': <TokenType.TSTZRANGE: 'TSTZRANGE'>, 'TSTZMULTIRANGE': <TokenType.TSTZMULTIRANGE: 'TSTZMULTIRANGE'>, 'DATERANGE': <TokenType.DATERANGE: 'DATERANGE'>, 'DATEMULTIRANGE': <TokenType.DATEMULTIRANGE: 'DATEMULTIRANGE'>, 'UNIQUE': <TokenType.UNIQUE: 'UNIQUE'>, 'STRUCT': <TokenType.STRUCT: 'STRUCT'>, 'VARIANT': <TokenType.VARIANT: 'VARIANT'>, 'ALTER': <TokenType.ALTER: 'ALTER'>, 'ANALYZE': <TokenType.COMMAND: 'COMMAND'>, 'CALL': <TokenType.COMMAND: 'COMMAND'>, 'COMMENT': <TokenType.COMMENT: 'COMMENT'>, 'COPY': <TokenType.COMMAND: 'COMMAND'>, 'EXPLAIN': <TokenType.COMMAND: 'COMMAND'>, 'GRANT': <TokenType.COMMAND: 'COMMAND'>, 'OPTIMIZE': <TokenType.COMMAND: 'COMMAND'>, 'PREPARE': <TokenType.COMMAND: 'COMMAND'>, 'TRUNCATE': <TokenType.COMMAND: 'COMMAND'>, 'VACUUM': <TokenType.COMMAND: 'COMMAND'>, 'USER-DEFINED': <TokenType.USERDEFINED: 'USERDEFINED'>, 'FOR VERSION': <TokenType.VERSION_SNAPSHOT: 'VERSION_SNAPSHOT'>, 'FOR TIMESTAMP': <TokenType.TIMESTAMP_SNAPSHOT: 'TIMESTAMP_SNAPSHOT'>, '~~': <TokenType.LIKE: 'LIKE'>, '~~*': <TokenType.ILIKE: 'ILIKE'>, '~*': <TokenType.IRLIKE: 'IRLIKE'>, '~': <TokenType.RLIKE: 'RLIKE'>, '@@': <TokenType.DAT: 'DAT'>, '@>': <TokenType.AT_GT: 'AT_GT'>, '<@': <TokenType.LT_AT: 'LT_AT'>, 'BEGIN TRANSACTION': <TokenType.BEGIN: 'BEGIN'>, 'BIGSERIAL': <TokenType.BIGSERIAL: 'BIGSERIAL'>, 'CHARACTER VARYING': <TokenType.VARCHAR: 'VARCHAR'>, 'DECLARE': <TokenType.COMMAND: 'COMMAND'>, 'DO': <TokenType.COMMAND: 'COMMAND'>, 'HSTORE': <TokenType.HSTORE: 'HSTORE'>, 'JSONB': <TokenType.JSONB: 'JSONB'>, 'MONEY': <TokenType.MONEY: 'MONEY'>, 'REFRESH': <TokenType.COMMAND: 'COMMAND'>, 'REINDEX': <TokenType.COMMAND: 'COMMAND'>, 'RESET': <TokenType.COMMAND: 'COMMAND'>, 'REVOKE': <TokenType.COMMAND: 'COMMAND'>, 'SERIAL': <TokenType.SERIAL: 'SERIAL'>, 'SMALLSERIAL': <TokenType.SMALLSERIAL: 'SMALLSERIAL'>, 'CSTRING': <TokenType.PSEUDO_TYPE: 'PSEUDO_TYPE'>, 'HLLSKETCH': <TokenType.HLLSKETCH: 'HLLSKETCH'>, 'SUPER': <TokenType.SUPER: 'SUPER'>, 'SYSDATE': <TokenType.CURRENT_TIMESTAMP: 'CURRENT_TIMESTAMP'>, 'TOP': <TokenType.TOP: 'TOP'>, 'UNLOAD': <TokenType.COMMAND: 'COMMAND'>, 'VARBYTE': <TokenType.VARBINARY: 'VARBINARY'>} + {'{%': <TokenType.BLOCK_START: 'BLOCK_START'>, '{%+': <TokenType.BLOCK_START: 'BLOCK_START'>, '{%-': <TokenType.BLOCK_START: 'BLOCK_START'>, '%}': <TokenType.BLOCK_END: 'BLOCK_END'>, '+%}': <TokenType.BLOCK_END: 'BLOCK_END'>, '-%}': <TokenType.BLOCK_END: 'BLOCK_END'>, '{{+': <TokenType.BLOCK_START: 'BLOCK_START'>, '{{-': <TokenType.BLOCK_START: 'BLOCK_START'>, '+}}': <TokenType.BLOCK_END: 'BLOCK_END'>, '-}}': <TokenType.BLOCK_END: 'BLOCK_END'>, '/*+': <TokenType.HINT: 'HINT'>, '==': <TokenType.EQ: 'EQ'>, '::': <TokenType.DCOLON: 'DCOLON'>, '||': <TokenType.DPIPE: 'DPIPE'>, '>=': <TokenType.GTE: 'GTE'>, '<=': <TokenType.LTE: 'LTE'>, '<>': <TokenType.NEQ: 'NEQ'>, '!=': <TokenType.NEQ: 'NEQ'>, '<=>': <TokenType.NULLSAFE_EQ: 'NULLSAFE_EQ'>, '->': <TokenType.ARROW: 'ARROW'>, '->>': <TokenType.DARROW: 'DARROW'>, '=>': <TokenType.FARROW: 'FARROW'>, '#>': <TokenType.HASH_ARROW: 'HASH_ARROW'>, '#>>': <TokenType.DHASH_ARROW: 'DHASH_ARROW'>, '<->': <TokenType.LR_ARROW: 'LR_ARROW'>, '&&': <TokenType.DAMP: 'DAMP'>, '??': <TokenType.DQMARK: 'DQMARK'>, 'ALL': <TokenType.ALL: 'ALL'>, 'ALWAYS': <TokenType.ALWAYS: 'ALWAYS'>, 'AND': <TokenType.AND: 'AND'>, 'ANTI': <TokenType.ANTI: 'ANTI'>, 'ANY': <TokenType.ANY: 'ANY'>, 'ASC': <TokenType.ASC: 'ASC'>, 'AS': <TokenType.ALIAS: 'ALIAS'>, 'ASOF': <TokenType.ASOF: 'ASOF'>, 'AUTOINCREMENT': <TokenType.AUTO_INCREMENT: 'AUTO_INCREMENT'>, 'AUTO_INCREMENT': <TokenType.AUTO_INCREMENT: 'AUTO_INCREMENT'>, 'BEGIN': <TokenType.COMMAND: 'COMMAND'>, 'BETWEEN': <TokenType.BETWEEN: 'BETWEEN'>, 'CACHE': <TokenType.CACHE: 'CACHE'>, 'UNCACHE': <TokenType.UNCACHE: 'UNCACHE'>, 'CASE': <TokenType.CASE: 'CASE'>, 'CHARACTER SET': <TokenType.CHARACTER_SET: 'CHARACTER_SET'>, 'CLUSTER BY': <TokenType.CLUSTER_BY: 'CLUSTER_BY'>, 'COLLATE': <TokenType.COLLATE: 'COLLATE'>, 'COLUMN': <TokenType.COLUMN: 'COLUMN'>, 'COMMIT': <TokenType.COMMIT: 'COMMIT'>, 'CONNECT BY': <TokenType.CONNECT_BY: 'CONNECT_BY'>, 'CONSTRAINT': <TokenType.CONSTRAINT: 'CONSTRAINT'>, 'CREATE': <TokenType.CREATE: 'CREATE'>, 'CROSS': <TokenType.CROSS: 'CROSS'>, 'CUBE': <TokenType.CUBE: 'CUBE'>, 'CURRENT_DATE': <TokenType.CURRENT_DATE: 'CURRENT_DATE'>, 'CURRENT_TIME': <TokenType.CURRENT_TIME: 'CURRENT_TIME'>, 'CURRENT_TIMESTAMP': <TokenType.CURRENT_TIMESTAMP: 'CURRENT_TIMESTAMP'>, 'CURRENT_USER': <TokenType.CURRENT_USER: 'CURRENT_USER'>, 'DATABASE': <TokenType.DATABASE: 'DATABASE'>, 'DEFAULT': <TokenType.DEFAULT: 'DEFAULT'>, 'DELETE': <TokenType.DELETE: 'DELETE'>, 'DESC': <TokenType.DESC: 'DESC'>, 'DESCRIBE': <TokenType.DESCRIBE: 'DESCRIBE'>, 'DISTINCT': <TokenType.DISTINCT: 'DISTINCT'>, 'DISTRIBUTE BY': <TokenType.DISTRIBUTE_BY: 'DISTRIBUTE_BY'>, 'DIV': <TokenType.DIV: 'DIV'>, 'DROP': <TokenType.DROP: 'DROP'>, 'ELSE': <TokenType.ELSE: 'ELSE'>, 'END': <TokenType.END: 'END'>, 'ESCAPE': <TokenType.ESCAPE: 'ESCAPE'>, 'EXCEPT': <TokenType.EXCEPT: 'EXCEPT'>, 'EXECUTE': <TokenType.EXECUTE: 'EXECUTE'>, 'EXISTS': <TokenType.EXISTS: 'EXISTS'>, 'FALSE': <TokenType.FALSE: 'FALSE'>, 'FETCH': <TokenType.FETCH: 'FETCH'>, 'FILTER': <TokenType.FILTER: 'FILTER'>, 'FIRST': <TokenType.FIRST: 'FIRST'>, 'FULL': <TokenType.FULL: 'FULL'>, 'FUNCTION': <TokenType.FUNCTION: 'FUNCTION'>, 'FOR': <TokenType.FOR: 'FOR'>, 'FOREIGN KEY': <TokenType.FOREIGN_KEY: 'FOREIGN_KEY'>, 'FORMAT': <TokenType.FORMAT: 'FORMAT'>, 'FROM': <TokenType.FROM: 'FROM'>, 'GEOGRAPHY': <TokenType.GEOGRAPHY: 'GEOGRAPHY'>, 'GEOMETRY': <TokenType.GEOMETRY: 'GEOMETRY'>, 'GLOB': <TokenType.GLOB: 'GLOB'>, 'GROUP BY': <TokenType.GROUP_BY: 'GROUP_BY'>, 'GROUPING SETS': <TokenType.GROUPING_SETS: 'GROUPING_SETS'>, 'HAVING': <TokenType.HAVING: 'HAVING'>, 'ILIKE': <TokenType.ILIKE: 'ILIKE'>, 'IN': <TokenType.IN: 'IN'>, 'INDEX': <TokenType.INDEX: 'INDEX'>, 'INET': <TokenType.INET: 'INET'>, 'INNER': <TokenType.INNER: 'INNER'>, 'INSERT': <TokenType.INSERT: 'INSERT'>, 'INTERVAL': <TokenType.INTERVAL: 'INTERVAL'>, 'INTERSECT': <TokenType.INTERSECT: 'INTERSECT'>, 'INTO': <TokenType.INTO: 'INTO'>, 'IS': <TokenType.IS: 'IS'>, 'ISNULL': <TokenType.ISNULL: 'ISNULL'>, 'JOIN': <TokenType.JOIN: 'JOIN'>, 'KEEP': <TokenType.KEEP: 'KEEP'>, 'LATERAL': <TokenType.LATERAL: 'LATERAL'>, 'LEFT': <TokenType.LEFT: 'LEFT'>, 'LIKE': <TokenType.LIKE: 'LIKE'>, 'LIMIT': <TokenType.LIMIT: 'LIMIT'>, 'LOAD': <TokenType.LOAD: 'LOAD'>, 'LOCK': <TokenType.LOCK: 'LOCK'>, 'MERGE': <TokenType.MERGE: 'MERGE'>, 'NATURAL': <TokenType.NATURAL: 'NATURAL'>, 'NEXT': <TokenType.NEXT: 'NEXT'>, 'NOT': <TokenType.NOT: 'NOT'>, 'NOTNULL': <TokenType.NOTNULL: 'NOTNULL'>, 'NULL': <TokenType.NULL: 'NULL'>, 'OBJECT': <TokenType.OBJECT: 'OBJECT'>, 'OFFSET': <TokenType.OFFSET: 'OFFSET'>, 'ON': <TokenType.ON: 'ON'>, 'OR': <TokenType.OR: 'OR'>, 'XOR': <TokenType.XOR: 'XOR'>, 'ORDER BY': <TokenType.ORDER_BY: 'ORDER_BY'>, 'ORDINALITY': <TokenType.ORDINALITY: 'ORDINALITY'>, 'OUTER': <TokenType.OUTER: 'OUTER'>, 'OVER': <TokenType.OVER: 'OVER'>, 'OVERLAPS': <TokenType.OVERLAPS: 'OVERLAPS'>, 'OVERWRITE': <TokenType.OVERWRITE: 'OVERWRITE'>, 'PARTITION': <TokenType.PARTITION: 'PARTITION'>, 'PARTITION BY': <TokenType.PARTITION_BY: 'PARTITION_BY'>, 'PARTITIONED BY': <TokenType.PARTITION_BY: 'PARTITION_BY'>, 'PARTITIONED_BY': <TokenType.PARTITION_BY: 'PARTITION_BY'>, 'PERCENT': <TokenType.PERCENT: 'PERCENT'>, 'PIVOT': <TokenType.PIVOT: 'PIVOT'>, 'PRAGMA': <TokenType.PRAGMA: 'PRAGMA'>, 'PRIMARY KEY': <TokenType.PRIMARY_KEY: 'PRIMARY_KEY'>, 'PROCEDURE': <TokenType.PROCEDURE: 'PROCEDURE'>, 'QUALIFY': <TokenType.QUALIFY: 'QUALIFY'>, 'RANGE': <TokenType.RANGE: 'RANGE'>, 'RECURSIVE': <TokenType.RECURSIVE: 'RECURSIVE'>, 'REGEXP': <TokenType.RLIKE: 'RLIKE'>, 'REPLACE': <TokenType.REPLACE: 'REPLACE'>, 'RETURNING': <TokenType.RETURNING: 'RETURNING'>, 'REFERENCES': <TokenType.REFERENCES: 'REFERENCES'>, 'RIGHT': <TokenType.RIGHT: 'RIGHT'>, 'RLIKE': <TokenType.RLIKE: 'RLIKE'>, 'ROLLBACK': <TokenType.ROLLBACK: 'ROLLBACK'>, 'ROLLUP': <TokenType.ROLLUP: 'ROLLUP'>, 'ROW': <TokenType.ROW: 'ROW'>, 'ROWS': <TokenType.ROWS: 'ROWS'>, 'SCHEMA': <TokenType.SCHEMA: 'SCHEMA'>, 'SELECT': <TokenType.SELECT: 'SELECT'>, 'SEMI': <TokenType.SEMI: 'SEMI'>, 'SET': <TokenType.SET: 'SET'>, 'SETTINGS': <TokenType.SETTINGS: 'SETTINGS'>, 'SHOW': <TokenType.SHOW: 'SHOW'>, 'SIMILAR TO': <TokenType.SIMILAR_TO: 'SIMILAR_TO'>, 'SOME': <TokenType.SOME: 'SOME'>, 'SORT BY': <TokenType.SORT_BY: 'SORT_BY'>, 'START WITH': <TokenType.START_WITH: 'START_WITH'>, 'TABLE': <TokenType.TABLE: 'TABLE'>, 'TABLESAMPLE': <TokenType.TABLE_SAMPLE: 'TABLE_SAMPLE'>, 'TEMP': <TokenType.TEMPORARY: 'TEMPORARY'>, 'TEMPORARY': <TokenType.TEMPORARY: 'TEMPORARY'>, 'THEN': <TokenType.THEN: 'THEN'>, 'TRUE': <TokenType.TRUE: 'TRUE'>, 'UNION': <TokenType.UNION: 'UNION'>, 'UNKNOWN': <TokenType.UNKNOWN: 'UNKNOWN'>, 'UNNEST': <TokenType.UNNEST: 'UNNEST'>, 'UNPIVOT': <TokenType.UNPIVOT: 'UNPIVOT'>, 'UPDATE': <TokenType.UPDATE: 'UPDATE'>, 'USE': <TokenType.USE: 'USE'>, 'USING': <TokenType.USING: 'USING'>, 'UUID': <TokenType.UUID: 'UUID'>, 'VALUES': <TokenType.VALUES: 'VALUES'>, 'VIEW': <TokenType.VIEW: 'VIEW'>, 'VOLATILE': <TokenType.VOLATILE: 'VOLATILE'>, 'WHEN': <TokenType.WHEN: 'WHEN'>, 'WHERE': <TokenType.WHERE: 'WHERE'>, 'WINDOW': <TokenType.WINDOW: 'WINDOW'>, 'WITH': <TokenType.WITH: 'WITH'>, 'APPLY': <TokenType.APPLY: 'APPLY'>, 'ARRAY': <TokenType.ARRAY: 'ARRAY'>, 'BIT': <TokenType.BIT: 'BIT'>, 'BOOL': <TokenType.BOOLEAN: 'BOOLEAN'>, 'BOOLEAN': <TokenType.BOOLEAN: 'BOOLEAN'>, 'BYTE': <TokenType.TINYINT: 'TINYINT'>, 'TINYINT': <TokenType.TINYINT: 'TINYINT'>, 'SHORT': <TokenType.SMALLINT: 'SMALLINT'>, 'SMALLINT': <TokenType.SMALLINT: 'SMALLINT'>, 'INT128': <TokenType.INT128: 'INT128'>, 'INT2': <TokenType.SMALLINT: 'SMALLINT'>, 'INTEGER': <TokenType.INT: 'INT'>, 'INT': <TokenType.INT: 'INT'>, 'INT4': <TokenType.INT: 'INT'>, 'LONG': <TokenType.BIGINT: 'BIGINT'>, 'BIGINT': <TokenType.BIGINT: 'BIGINT'>, 'INT8': <TokenType.BIGINT: 'BIGINT'>, 'DEC': <TokenType.DECIMAL: 'DECIMAL'>, 'DECIMAL': <TokenType.DECIMAL: 'DECIMAL'>, 'BIGDECIMAL': <TokenType.BIGDECIMAL: 'BIGDECIMAL'>, 'BIGNUMERIC': <TokenType.BIGDECIMAL: 'BIGDECIMAL'>, 'MAP': <TokenType.MAP: 'MAP'>, 'NULLABLE': <TokenType.NULLABLE: 'NULLABLE'>, 'NUMBER': <TokenType.DECIMAL: 'DECIMAL'>, 'NUMERIC': <TokenType.DECIMAL: 'DECIMAL'>, 'FIXED': <TokenType.DECIMAL: 'DECIMAL'>, 'REAL': <TokenType.FLOAT: 'FLOAT'>, 'FLOAT': <TokenType.FLOAT: 'FLOAT'>, 'FLOAT4': <TokenType.FLOAT: 'FLOAT'>, 'FLOAT8': <TokenType.DOUBLE: 'DOUBLE'>, 'DOUBLE': <TokenType.DOUBLE: 'DOUBLE'>, 'DOUBLE PRECISION': <TokenType.DOUBLE: 'DOUBLE'>, 'JSON': <TokenType.JSON: 'JSON'>, 'CHAR': <TokenType.CHAR: 'CHAR'>, 'CHARACTER': <TokenType.CHAR: 'CHAR'>, 'NCHAR': <TokenType.NCHAR: 'NCHAR'>, 'VARCHAR': <TokenType.VARCHAR: 'VARCHAR'>, 'VARCHAR2': <TokenType.VARCHAR: 'VARCHAR'>, 'NVARCHAR': <TokenType.NVARCHAR: 'NVARCHAR'>, 'NVARCHAR2': <TokenType.NVARCHAR: 'NVARCHAR'>, 'STR': <TokenType.TEXT: 'TEXT'>, 'STRING': <TokenType.TEXT: 'TEXT'>, 'TEXT': <TokenType.TEXT: 'TEXT'>, 'CLOB': <TokenType.TEXT: 'TEXT'>, 'LONGVARCHAR': <TokenType.TEXT: 'TEXT'>, 'BINARY': <TokenType.BINARY: 'BINARY'>, 'BLOB': <TokenType.VARBINARY: 'VARBINARY'>, 'BYTEA': <TokenType.VARBINARY: 'VARBINARY'>, 'VARBINARY': <TokenType.VARBINARY: 'VARBINARY'>, 'TIME': <TokenType.TIME: 'TIME'>, 'TIMETZ': <TokenType.TIMETZ: 'TIMETZ'>, 'TIMESTAMP': <TokenType.TIMESTAMP: 'TIMESTAMP'>, 'TIMESTAMPTZ': <TokenType.TIMESTAMPTZ: 'TIMESTAMPTZ'>, 'TIMESTAMPLTZ': <TokenType.TIMESTAMPLTZ: 'TIMESTAMPLTZ'>, 'DATE': <TokenType.DATE: 'DATE'>, 'DATETIME': <TokenType.DATETIME: 'DATETIME'>, 'INT4RANGE': <TokenType.INT4RANGE: 'INT4RANGE'>, 'INT4MULTIRANGE': <TokenType.INT4MULTIRANGE: 'INT4MULTIRANGE'>, 'INT8RANGE': <TokenType.INT8RANGE: 'INT8RANGE'>, 'INT8MULTIRANGE': <TokenType.INT8MULTIRANGE: 'INT8MULTIRANGE'>, 'NUMRANGE': <TokenType.NUMRANGE: 'NUMRANGE'>, 'NUMMULTIRANGE': <TokenType.NUMMULTIRANGE: 'NUMMULTIRANGE'>, 'TSRANGE': <TokenType.TSRANGE: 'TSRANGE'>, 'TSMULTIRANGE': <TokenType.TSMULTIRANGE: 'TSMULTIRANGE'>, 'TSTZRANGE': <TokenType.TSTZRANGE: 'TSTZRANGE'>, 'TSTZMULTIRANGE': <TokenType.TSTZMULTIRANGE: 'TSTZMULTIRANGE'>, 'DATERANGE': <TokenType.DATERANGE: 'DATERANGE'>, 'DATEMULTIRANGE': <TokenType.DATEMULTIRANGE: 'DATEMULTIRANGE'>, 'UNIQUE': <TokenType.UNIQUE: 'UNIQUE'>, 'STRUCT': <TokenType.STRUCT: 'STRUCT'>, 'VARIANT': <TokenType.VARIANT: 'VARIANT'>, 'ALTER': <TokenType.ALTER: 'ALTER'>, 'ANALYZE': <TokenType.COMMAND: 'COMMAND'>, 'CALL': <TokenType.COMMAND: 'COMMAND'>, 'COMMENT': <TokenType.COMMENT: 'COMMENT'>, 'COPY': <TokenType.COMMAND: 'COMMAND'>, 'EXPLAIN': <TokenType.COMMAND: 'COMMAND'>, 'GRANT': <TokenType.COMMAND: 'COMMAND'>, 'OPTIMIZE': <TokenType.COMMAND: 'COMMAND'>, 'PREPARE': <TokenType.COMMAND: 'COMMAND'>, 'TRUNCATE': <TokenType.COMMAND: 'COMMAND'>, 'VACUUM': <TokenType.COMMAND: 'COMMAND'>, 'USER-DEFINED': <TokenType.USERDEFINED: 'USERDEFINED'>, 'FOR VERSION': <TokenType.VERSION_SNAPSHOT: 'VERSION_SNAPSHOT'>, 'FOR TIMESTAMP': <TokenType.TIMESTAMP_SNAPSHOT: 'TIMESTAMP_SNAPSHOT'>, '~~': <TokenType.LIKE: 'LIKE'>, '~~*': <TokenType.ILIKE: 'ILIKE'>, '~*': <TokenType.IRLIKE: 'IRLIKE'>, '~': <TokenType.RLIKE: 'RLIKE'>, '@@': <TokenType.DAT: 'DAT'>, '@>': <TokenType.AT_GT: 'AT_GT'>, '<@': <TokenType.LT_AT: 'LT_AT'>, 'BEGIN TRANSACTION': <TokenType.BEGIN: 'BEGIN'>, 'BIGSERIAL': <TokenType.BIGSERIAL: 'BIGSERIAL'>, 'CHARACTER VARYING': <TokenType.VARCHAR: 'VARCHAR'>, 'DECLARE': <TokenType.COMMAND: 'COMMAND'>, 'DO': <TokenType.COMMAND: 'COMMAND'>, 'HSTORE': <TokenType.HSTORE: 'HSTORE'>, 'JSONB': <TokenType.JSONB: 'JSONB'>, 'MONEY': <TokenType.MONEY: 'MONEY'>, 'REFRESH': <TokenType.COMMAND: 'COMMAND'>, 'REINDEX': <TokenType.COMMAND: 'COMMAND'>, 'RESET': <TokenType.COMMAND: 'COMMAND'>, 'REVOKE': <TokenType.COMMAND: 'COMMAND'>, 'SERIAL': <TokenType.SERIAL: 'SERIAL'>, 'SMALLSERIAL': <TokenType.SMALLSERIAL: 'SMALLSERIAL'>, 'CSTRING': <TokenType.PSEUDO_TYPE: 'PSEUDO_TYPE'>, 'OID': <TokenType.OBJECT_IDENTIFIER: 'OBJECT_IDENTIFIER'>, 'REGCLASS': <TokenType.OBJECT_IDENTIFIER: 'OBJECT_IDENTIFIER'>, 'REGCOLLATION': <TokenType.OBJECT_IDENTIFIER: 'OBJECT_IDENTIFIER'>, 'REGCONFIG': <TokenType.OBJECT_IDENTIFIER: 'OBJECT_IDENTIFIER'>, 'REGDICTIONARY': <TokenType.OBJECT_IDENTIFIER: 'OBJECT_IDENTIFIER'>, 'REGNAMESPACE': <TokenType.OBJECT_IDENTIFIER: 'OBJECT_IDENTIFIER'>, 'REGOPER': <TokenType.OBJECT_IDENTIFIER: 'OBJECT_IDENTIFIER'>, 'REGOPERATOR': <TokenType.OBJECT_IDENTIFIER: 'OBJECT_IDENTIFIER'>, 'REGPROC': <TokenType.OBJECT_IDENTIFIER: 'OBJECT_IDENTIFIER'>, 'REGPROCEDURE': <TokenType.OBJECT_IDENTIFIER: 'OBJECT_IDENTIFIER'>, 'REGROLE': <TokenType.OBJECT_IDENTIFIER: 'OBJECT_IDENTIFIER'>, 'REGTYPE': <TokenType.OBJECT_IDENTIFIER: 'OBJECT_IDENTIFIER'>, 'HLLSKETCH': <TokenType.HLLSKETCH: 'HLLSKETCH'>, 'SUPER': <TokenType.SUPER: 'SUPER'>, 'SYSDATE': <TokenType.CURRENT_TIMESTAMP: 'CURRENT_TIMESTAMP'>, 'TOP': <TokenType.TOP: 'TOP'>, 'UNLOAD': <TokenType.COMMAND: 'COMMAND'>, 'VARBYTE': <TokenType.VARBINARY: 'VARBINARY'>}
    @@ -1937,6 +1937,7 @@ without precision we convert it to VARCHAR(max) and if it does have
    intersect_op
    introducer_sql
    pseudotype_sql
    +
    objectidentifier_sql
    onconflict_sql
    returning_sql
    rowformatdelimitedproperty_sql
    diff --git a/docs/sqlglot/dialects/snowflake.html b/docs/sqlglot/dialects/snowflake.html index 3faba8b..c9df0d4 100644 --- a/docs/sqlglot/dialects/snowflake.html +++ b/docs/sqlglot/dialects/snowflake.html @@ -72,6 +72,12 @@
  • ALTER_PARSERS
  • +
  • + STATEMENT_PARSERS +
  • +
  • + SHOW_PARSERS +
  • TOKENIZER_CLASS
  • @@ -120,6 +126,9 @@
  • VAR_SINGLE_TOKENS
  • +
  • + COMMANDS +
  • @@ -156,6 +165,9 @@
  • PROPERTIES_LOCATION
  • +
  • + show_sql +
  • regexpextract_sql
  • @@ -391,7 +403,7 @@
    90 return exp.DateDiff(this=seq_get(args, 2), expression=seq_get(args, 1), unit=seq_get(args, 0)) 91 92 - 93def _unix_to_time_sql(self: generator.Generator, expression: exp.UnixToTime) -> str: + 93def _unix_to_time_sql(self: Snowflake.Generator, expression: exp.UnixToTime) -> str: 94 scale = expression.args.get("scale") 95 timestamp = self.sql(expression, "this") 96 if scale in [None, exp.UnixToTime.SECONDS]: @@ -406,7 +418,7 @@ 105 106# https://docs.snowflake.com/en/sql-reference/functions/date_part.html 107# https://docs.snowflake.com/en/sql-reference/functions-date-time.html#label-supported-date-time-parts -108def _parse_date_part(self: parser.Parser) -> t.Optional[exp.Expression]: +108def _parse_date_part(self: Snowflake.Parser) -> t.Optional[exp.Expression]: 109 this = self._parse_var() or self._parse_type() 110 111 if not this: @@ -457,7 +469,7 @@ 156 return exp.If(this=cond, true=exp.Null(), false=seq_get(args, 0)) 157 158 -159def _datatype_sql(self: generator.Generator, expression: exp.DataType) -> str: +159def _datatype_sql(self: Snowflake.Generator, expression: exp.DataType) -> str: 160 if expression.is_type("array"): 161 return "ARRAY" 162 elif expression.is_type("map"): @@ -465,7 +477,7 @@ 164 return self.datatype_sql(expression) 165 166 -167def _regexpilike_sql(self: generator.Generator, expression: exp.RegexpILike) -> str: +167def _regexpilike_sql(self: Snowflake.Generator, expression: exp.RegexpILike) -> str: 168 flag = expression.text("flag") 169 170 if "i" not in flag: @@ -491,270 +503,320 @@ 190 return regexp_replace 191 192 -193class Snowflake(Dialect): -194 # https://docs.snowflake.com/en/sql-reference/identifiers-syntax -195 RESOLVES_IDENTIFIERS_AS_UPPERCASE = True -196 NULL_ORDERING = "nulls_are_large" -197 TIME_FORMAT = "'YYYY-MM-DD HH24:MI:SS'" +193def _show_parser(*args: t.Any, **kwargs: t.Any) -> t.Callable[[Snowflake.Parser], exp.Show]: +194 def _parse(self: Snowflake.Parser) -> exp.Show: +195 return self._parse_show_snowflake(*args, **kwargs) +196 +197 return _parse 198 -199 TIME_MAPPING = { -200 "YYYY": "%Y", -201 "yyyy": "%Y", -202 "YY": "%y", -203 "yy": "%y", -204 "MMMM": "%B", -205 "mmmm": "%B", -206 "MON": "%b", -207 "mon": "%b", -208 "MM": "%m", -209 "mm": "%m", -210 "DD": "%d", -211 "dd": "%-d", -212 "DY": "%a", -213 "dy": "%w", -214 "HH24": "%H", -215 "hh24": "%H", -216 "HH12": "%I", -217 "hh12": "%I", -218 "MI": "%M", -219 "mi": "%M", -220 "SS": "%S", -221 "ss": "%S", -222 "FF": "%f", -223 "ff": "%f", -224 "FF6": "%f", -225 "ff6": "%f", -226 } -227 -228 class Parser(parser.Parser): -229 IDENTIFY_PIVOT_STRINGS = True -230 SUPPORTS_USER_DEFINED_TYPES = False -231 -232 FUNCTIONS = { -233 **parser.Parser.FUNCTIONS, -234 "ARRAYAGG": exp.ArrayAgg.from_arg_list, -235 "ARRAY_CONSTRUCT": exp.Array.from_arg_list, -236 "ARRAY_TO_STRING": exp.ArrayJoin.from_arg_list, -237 "CONVERT_TIMEZONE": _parse_convert_timezone, -238 "DATE_TRUNC": date_trunc_to_time, -239 "DATEADD": lambda args: exp.DateAdd( -240 this=seq_get(args, 2), expression=seq_get(args, 1), unit=seq_get(args, 0) -241 ), -242 "DATEDIFF": _parse_datediff, -243 "DIV0": _div0_to_if, -244 "IFF": exp.If.from_arg_list, -245 "NULLIFZERO": _nullifzero_to_if, -246 "OBJECT_CONSTRUCT": _parse_object_construct, -247 "REGEXP_REPLACE": _parse_regexp_replace, -248 "REGEXP_SUBSTR": exp.RegexpExtract.from_arg_list, -249 "RLIKE": exp.RegexpLike.from_arg_list, -250 "SQUARE": lambda args: exp.Pow(this=seq_get(args, 0), expression=exp.Literal.number(2)), -251 "TIMEDIFF": _parse_datediff, -252 "TIMESTAMPDIFF": _parse_datediff, -253 "TO_ARRAY": exp.Array.from_arg_list, -254 "TO_TIMESTAMP": _parse_to_timestamp, -255 "TO_VARCHAR": exp.ToChar.from_arg_list, -256 "ZEROIFNULL": _zeroifnull_to_if, -257 } -258 -259 FUNCTION_PARSERS = { -260 **parser.Parser.FUNCTION_PARSERS, -261 "DATE_PART": _parse_date_part, -262 } -263 FUNCTION_PARSERS.pop("TRIM") -264 -265 COLUMN_OPERATORS = { -266 **parser.Parser.COLUMN_OPERATORS, -267 TokenType.COLON: lambda self, this, path: self.expression( -268 exp.Bracket, this=this, expressions=[path] -269 ), +199 +200class Snowflake(Dialect): +201 # https://docs.snowflake.com/en/sql-reference/identifiers-syntax +202 RESOLVES_IDENTIFIERS_AS_UPPERCASE = True +203 NULL_ORDERING = "nulls_are_large" +204 TIME_FORMAT = "'YYYY-MM-DD HH24:MI:SS'" +205 +206 TIME_MAPPING = { +207 "YYYY": "%Y", +208 "yyyy": "%Y", +209 "YY": "%y", +210 "yy": "%y", +211 "MMMM": "%B", +212 "mmmm": "%B", +213 "MON": "%b", +214 "mon": "%b", +215 "MM": "%m", +216 "mm": "%m", +217 "DD": "%d", +218 "dd": "%-d", +219 "DY": "%a", +220 "dy": "%w", +221 "HH24": "%H", +222 "hh24": "%H", +223 "HH12": "%I", +224 "hh12": "%I", +225 "MI": "%M", +226 "mi": "%M", +227 "SS": "%S", +228 "ss": "%S", +229 "FF": "%f", +230 "ff": "%f", +231 "FF6": "%f", +232 "ff6": "%f", +233 } +234 +235 class Parser(parser.Parser): +236 IDENTIFY_PIVOT_STRINGS = True +237 SUPPORTS_USER_DEFINED_TYPES = False +238 +239 FUNCTIONS = { +240 **parser.Parser.FUNCTIONS, +241 "ARRAYAGG": exp.ArrayAgg.from_arg_list, +242 "ARRAY_CONSTRUCT": exp.Array.from_arg_list, +243 "ARRAY_TO_STRING": exp.ArrayJoin.from_arg_list, +244 "CONVERT_TIMEZONE": _parse_convert_timezone, +245 "DATE_TRUNC": date_trunc_to_time, +246 "DATEADD": lambda args: exp.DateAdd( +247 this=seq_get(args, 2), expression=seq_get(args, 1), unit=seq_get(args, 0) +248 ), +249 "DATEDIFF": _parse_datediff, +250 "DIV0": _div0_to_if, +251 "IFF": exp.If.from_arg_list, +252 "LISTAGG": exp.GroupConcat.from_arg_list, +253 "NULLIFZERO": _nullifzero_to_if, +254 "OBJECT_CONSTRUCT": _parse_object_construct, +255 "REGEXP_REPLACE": _parse_regexp_replace, +256 "REGEXP_SUBSTR": exp.RegexpExtract.from_arg_list, +257 "RLIKE": exp.RegexpLike.from_arg_list, +258 "SQUARE": lambda args: exp.Pow(this=seq_get(args, 0), expression=exp.Literal.number(2)), +259 "TIMEDIFF": _parse_datediff, +260 "TIMESTAMPDIFF": _parse_datediff, +261 "TO_ARRAY": exp.Array.from_arg_list, +262 "TO_TIMESTAMP": _parse_to_timestamp, +263 "TO_VARCHAR": exp.ToChar.from_arg_list, +264 "ZEROIFNULL": _zeroifnull_to_if, +265 } +266 +267 FUNCTION_PARSERS = { +268 **parser.Parser.FUNCTION_PARSERS, +269 "DATE_PART": _parse_date_part, 270 } -271 -272 TIMESTAMPS = parser.Parser.TIMESTAMPS.copy() - {TokenType.TIME} -273 -274 RANGE_PARSERS = { -275 **parser.Parser.RANGE_PARSERS, -276 TokenType.LIKE_ANY: binary_range_parser(exp.LikeAny), -277 TokenType.ILIKE_ANY: binary_range_parser(exp.ILikeAny), +271 FUNCTION_PARSERS.pop("TRIM") +272 +273 COLUMN_OPERATORS = { +274 **parser.Parser.COLUMN_OPERATORS, +275 TokenType.COLON: lambda self, this, path: self.expression( +276 exp.Bracket, this=this, expressions=[path] +277 ), 278 } 279 -280 ALTER_PARSERS = { -281 **parser.Parser.ALTER_PARSERS, -282 "SET": lambda self: self._parse_set(tag=self._match_text_seq("TAG")), -283 "UNSET": lambda self: self.expression( -284 exp.Set, -285 tag=self._match_text_seq("TAG"), -286 expressions=self._parse_csv(self._parse_id_var), -287 unset=True, -288 ), -289 } -290 -291 def _parse_id_var( -292 self, -293 any_token: bool = True, -294 tokens: t.Optional[t.Collection[TokenType]] = None, -295 ) -> t.Optional[exp.Expression]: -296 if self._match_text_seq("IDENTIFIER", "("): -297 identifier = ( -298 super()._parse_id_var(any_token=any_token, tokens=tokens) -299 or self._parse_string() -300 ) -301 self._match_r_paren() -302 return self.expression(exp.Anonymous, this="IDENTIFIER", expressions=[identifier]) +280 TIMESTAMPS = parser.Parser.TIMESTAMPS.copy() - {TokenType.TIME} +281 +282 RANGE_PARSERS = { +283 **parser.Parser.RANGE_PARSERS, +284 TokenType.LIKE_ANY: binary_range_parser(exp.LikeAny), +285 TokenType.ILIKE_ANY: binary_range_parser(exp.ILikeAny), +286 } +287 +288 ALTER_PARSERS = { +289 **parser.Parser.ALTER_PARSERS, +290 "SET": lambda self: self._parse_set(tag=self._match_text_seq("TAG")), +291 "UNSET": lambda self: self.expression( +292 exp.Set, +293 tag=self._match_text_seq("TAG"), +294 expressions=self._parse_csv(self._parse_id_var), +295 unset=True, +296 ), +297 } +298 +299 STATEMENT_PARSERS = { +300 **parser.Parser.STATEMENT_PARSERS, +301 TokenType.SHOW: lambda self: self._parse_show(), +302 } 303 -304 return super()._parse_id_var(any_token=any_token, tokens=tokens) -305 -306 class Tokenizer(tokens.Tokenizer): -307 STRING_ESCAPES = ["\\", "'"] -308 HEX_STRINGS = [("x'", "'"), ("X'", "'")] -309 RAW_STRINGS = ["$$"] -310 COMMENTS = ["--", "//", ("/*", "*/")] -311 -312 KEYWORDS = { -313 **tokens.Tokenizer.KEYWORDS, -314 "BYTEINT": TokenType.INT, -315 "CHAR VARYING": TokenType.VARCHAR, -316 "CHARACTER VARYING": TokenType.VARCHAR, -317 "EXCLUDE": TokenType.EXCEPT, -318 "ILIKE ANY": TokenType.ILIKE_ANY, -319 "LIKE ANY": TokenType.LIKE_ANY, -320 "MATCH_RECOGNIZE": TokenType.MATCH_RECOGNIZE, -321 "MINUS": TokenType.EXCEPT, -322 "NCHAR VARYING": TokenType.VARCHAR, -323 "PUT": TokenType.COMMAND, -324 "RENAME": TokenType.REPLACE, -325 "SAMPLE": TokenType.TABLE_SAMPLE, -326 "TIMESTAMP_LTZ": TokenType.TIMESTAMPLTZ, -327 "TIMESTAMP_NTZ": TokenType.TIMESTAMP, -328 "TIMESTAMP_TZ": TokenType.TIMESTAMPTZ, -329 "TIMESTAMPNTZ": TokenType.TIMESTAMP, -330 "TOP": TokenType.TOP, -331 } -332 -333 SINGLE_TOKENS = { -334 **tokens.Tokenizer.SINGLE_TOKENS, -335 "$": TokenType.PARAMETER, -336 } -337 -338 VAR_SINGLE_TOKENS = {"$"} -339 -340 class Generator(generator.Generator): -341 PARAMETER_TOKEN = "$" -342 MATCHED_BY_SOURCE = False -343 SINGLE_STRING_INTERVAL = True -344 JOIN_HINTS = False -345 TABLE_HINTS = False -346 QUERY_HINTS = False -347 -348 TRANSFORMS = { -349 **generator.Generator.TRANSFORMS, -350 exp.Array: inline_array_sql, -351 exp.ArrayConcat: rename_func("ARRAY_CAT"), -352 exp.ArrayJoin: rename_func("ARRAY_TO_STRING"), -353 exp.AtTimeZone: lambda self, e: self.func( -354 "CONVERT_TIMEZONE", e.args.get("zone"), e.this -355 ), -356 exp.DateAdd: lambda self, e: self.func("DATEADD", e.text("unit"), e.expression, e.this), -357 exp.DateDiff: lambda self, e: self.func( -358 "DATEDIFF", e.text("unit"), e.expression, e.this -359 ), -360 exp.DateStrToDate: datestrtodate_sql, -361 exp.DataType: _datatype_sql, -362 exp.DayOfWeek: rename_func("DAYOFWEEK"), -363 exp.Extract: rename_func("DATE_PART"), -364 exp.If: rename_func("IFF"), -365 exp.LogicalAnd: rename_func("BOOLAND_AGG"), -366 exp.LogicalOr: rename_func("BOOLOR_AGG"), -367 exp.Map: lambda self, e: var_map_sql(self, e, "OBJECT_CONSTRUCT"), -368 exp.Max: max_or_greatest, -369 exp.Min: min_or_least, -370 exp.PartitionedByProperty: lambda self, e: f"PARTITION BY {self.sql(e, 'this')}", -371 exp.RegexpILike: _regexpilike_sql, -372 exp.Select: transforms.preprocess([transforms.eliminate_distinct_on]), -373 exp.StarMap: rename_func("OBJECT_CONSTRUCT"), -374 exp.StartsWith: rename_func("STARTSWITH"), -375 exp.StrPosition: lambda self, e: self.func( -376 "POSITION", e.args.get("substr"), e.this, e.args.get("position") -377 ), -378 exp.StrToTime: lambda self, e: f"TO_TIMESTAMP({self.sql(e, 'this')}, {self.format_time(e)})", -379 exp.Struct: lambda self, e: self.func( -380 "OBJECT_CONSTRUCT", -381 *(arg for expression in e.expressions for arg in expression.flatten()), -382 ), -383 exp.Stuff: rename_func("INSERT"), -384 exp.TimestampTrunc: timestamptrunc_sql, -385 exp.TimeStrToTime: timestrtotime_sql, -386 exp.TimeToStr: lambda self, e: self.func( -387 "TO_CHAR", exp.cast(e.this, "timestamp"), self.format_time(e) -388 ), -389 exp.TimeToUnix: lambda self, e: f"EXTRACT(epoch_second FROM {self.sql(e, 'this')})", -390 exp.ToChar: lambda self, e: self.function_fallback_sql(e), -391 exp.Trim: lambda self, e: self.func("TRIM", e.this, e.expression), -392 exp.TsOrDsToDate: ts_or_ds_to_date_sql("snowflake"), -393 exp.UnixToTime: _unix_to_time_sql, -394 exp.VarMap: lambda self, e: var_map_sql(self, e, "OBJECT_CONSTRUCT"), -395 exp.WeekOfYear: rename_func("WEEKOFYEAR"), -396 } -397 -398 TYPE_MAPPING = { -399 **generator.Generator.TYPE_MAPPING, -400 exp.DataType.Type.TIMESTAMP: "TIMESTAMPNTZ", -401 } -402 -403 STAR_MAPPING = { -404 "except": "EXCLUDE", -405 "replace": "RENAME", -406 } -407 -408 PROPERTIES_LOCATION = { -409 **generator.Generator.PROPERTIES_LOCATION, -410 exp.SetProperty: exp.Properties.Location.UNSUPPORTED, -411 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, -412 } -413 -414 def regexpextract_sql(self, expression: exp.RegexpExtract) -> str: -415 # Other dialects don't support all of the following parameters, so we need to -416 # generate default values as necessary to ensure the transpilation is correct -417 group = expression.args.get("group") -418 parameters = expression.args.get("parameters") or (group and exp.Literal.string("c")) -419 occurrence = expression.args.get("occurrence") or (parameters and exp.Literal.number(1)) -420 position = expression.args.get("position") or (occurrence and exp.Literal.number(1)) -421 -422 return self.func( -423 "REGEXP_SUBSTR", -424 expression.this, -425 expression.expression, -426 position, -427 occurrence, -428 parameters, -429 group, -430 ) -431 -432 def except_op(self, expression: exp.Except) -> str: -433 if not expression.args.get("distinct", False): -434 self.unsupported("EXCEPT with All is not supported in Snowflake") -435 return super().except_op(expression) -436 -437 def intersect_op(self, expression: exp.Intersect) -> str: -438 if not expression.args.get("distinct", False): -439 self.unsupported("INTERSECT with All is not supported in Snowflake") -440 return super().intersect_op(expression) -441 -442 def describe_sql(self, expression: exp.Describe) -> str: -443 # Default to table if kind is unknown -444 kind_value = expression.args.get("kind") or "TABLE" -445 kind = f" {kind_value}" if kind_value else "" -446 this = f" {self.sql(expression, 'this')}" -447 return f"DESCRIBE{kind}{this}" -448 -449 def generatedasidentitycolumnconstraint_sql( -450 self, expression: exp.GeneratedAsIdentityColumnConstraint -451 ) -> str: -452 start = expression.args.get("start") -453 start = f" START {start}" if start else "" -454 increment = expression.args.get("increment") -455 increment = f" INCREMENT {increment}" if increment else "" -456 return f"AUTOINCREMENT{start}{increment}" +304 SHOW_PARSERS = { +305 "PRIMARY KEYS": _show_parser("PRIMARY KEYS"), +306 "TERSE PRIMARY KEYS": _show_parser("PRIMARY KEYS"), +307 } +308 +309 def _parse_id_var( +310 self, +311 any_token: bool = True, +312 tokens: t.Optional[t.Collection[TokenType]] = None, +313 ) -> t.Optional[exp.Expression]: +314 if self._match_text_seq("IDENTIFIER", "("): +315 identifier = ( +316 super()._parse_id_var(any_token=any_token, tokens=tokens) +317 or self._parse_string() +318 ) +319 self._match_r_paren() +320 return self.expression(exp.Anonymous, this="IDENTIFIER", expressions=[identifier]) +321 +322 return super()._parse_id_var(any_token=any_token, tokens=tokens) +323 +324 def _parse_show_snowflake(self, this: str) -> exp.Show: +325 scope = None +326 scope_kind = None +327 +328 if self._match(TokenType.IN): +329 if self._match_text_seq("ACCOUNT"): +330 scope_kind = "ACCOUNT" +331 elif self._match_set(self.DB_CREATABLES): +332 scope_kind = self._prev.text +333 if self._curr: +334 scope = self._parse_table() +335 elif self._curr: +336 scope_kind = "TABLE" +337 scope = self._parse_table() +338 +339 return self.expression(exp.Show, this=this, scope=scope, scope_kind=scope_kind) +340 +341 class Tokenizer(tokens.Tokenizer): +342 STRING_ESCAPES = ["\\", "'"] +343 HEX_STRINGS = [("x'", "'"), ("X'", "'")] +344 RAW_STRINGS = ["$$"] +345 COMMENTS = ["--", "//", ("/*", "*/")] +346 +347 KEYWORDS = { +348 **tokens.Tokenizer.KEYWORDS, +349 "BYTEINT": TokenType.INT, +350 "CHAR VARYING": TokenType.VARCHAR, +351 "CHARACTER VARYING": TokenType.VARCHAR, +352 "EXCLUDE": TokenType.EXCEPT, +353 "ILIKE ANY": TokenType.ILIKE_ANY, +354 "LIKE ANY": TokenType.LIKE_ANY, +355 "MATCH_RECOGNIZE": TokenType.MATCH_RECOGNIZE, +356 "MINUS": TokenType.EXCEPT, +357 "NCHAR VARYING": TokenType.VARCHAR, +358 "PUT": TokenType.COMMAND, +359 "RENAME": TokenType.REPLACE, +360 "SAMPLE": TokenType.TABLE_SAMPLE, +361 "TIMESTAMP_LTZ": TokenType.TIMESTAMPLTZ, +362 "TIMESTAMP_NTZ": TokenType.TIMESTAMP, +363 "TIMESTAMP_TZ": TokenType.TIMESTAMPTZ, +364 "TIMESTAMPNTZ": TokenType.TIMESTAMP, +365 "TOP": TokenType.TOP, +366 } +367 +368 SINGLE_TOKENS = { +369 **tokens.Tokenizer.SINGLE_TOKENS, +370 "$": TokenType.PARAMETER, +371 } +372 +373 VAR_SINGLE_TOKENS = {"$"} +374 +375 COMMANDS = tokens.Tokenizer.COMMANDS - {TokenType.SHOW} +376 +377 class Generator(generator.Generator): +378 PARAMETER_TOKEN = "$" +379 MATCHED_BY_SOURCE = False +380 SINGLE_STRING_INTERVAL = True +381 JOIN_HINTS = False +382 TABLE_HINTS = False +383 QUERY_HINTS = False +384 +385 TRANSFORMS = { +386 **generator.Generator.TRANSFORMS, +387 exp.Array: inline_array_sql, +388 exp.ArrayConcat: rename_func("ARRAY_CAT"), +389 exp.ArrayJoin: rename_func("ARRAY_TO_STRING"), +390 exp.AtTimeZone: lambda self, e: self.func( +391 "CONVERT_TIMEZONE", e.args.get("zone"), e.this +392 ), +393 exp.DateAdd: lambda self, e: self.func("DATEADD", e.text("unit"), e.expression, e.this), +394 exp.DateDiff: lambda self, e: self.func( +395 "DATEDIFF", e.text("unit"), e.expression, e.this +396 ), +397 exp.DateStrToDate: datestrtodate_sql, +398 exp.DataType: _datatype_sql, +399 exp.DayOfWeek: rename_func("DAYOFWEEK"), +400 exp.Extract: rename_func("DATE_PART"), +401 exp.GroupConcat: rename_func("LISTAGG"), +402 exp.If: rename_func("IFF"), +403 exp.LogicalAnd: rename_func("BOOLAND_AGG"), +404 exp.LogicalOr: rename_func("BOOLOR_AGG"), +405 exp.Map: lambda self, e: var_map_sql(self, e, "OBJECT_CONSTRUCT"), +406 exp.Max: max_or_greatest, +407 exp.Min: min_or_least, +408 exp.PartitionedByProperty: lambda self, e: f"PARTITION BY {self.sql(e, 'this')}", +409 exp.RegexpILike: _regexpilike_sql, +410 exp.Select: transforms.preprocess([transforms.eliminate_distinct_on]), +411 exp.StarMap: rename_func("OBJECT_CONSTRUCT"), +412 exp.StartsWith: rename_func("STARTSWITH"), +413 exp.StrPosition: lambda self, e: self.func( +414 "POSITION", e.args.get("substr"), e.this, e.args.get("position") +415 ), +416 exp.StrToTime: lambda self, e: f"TO_TIMESTAMP({self.sql(e, 'this')}, {self.format_time(e)})", +417 exp.Struct: lambda self, e: self.func( +418 "OBJECT_CONSTRUCT", +419 *(arg for expression in e.expressions for arg in expression.flatten()), +420 ), +421 exp.Stuff: rename_func("INSERT"), +422 exp.TimestampTrunc: timestamptrunc_sql, +423 exp.TimeStrToTime: timestrtotime_sql, +424 exp.TimeToStr: lambda self, e: self.func( +425 "TO_CHAR", exp.cast(e.this, "timestamp"), self.format_time(e) +426 ), +427 exp.TimeToUnix: lambda self, e: f"EXTRACT(epoch_second FROM {self.sql(e, 'this')})", +428 exp.ToChar: lambda self, e: self.function_fallback_sql(e), +429 exp.Trim: lambda self, e: self.func("TRIM", e.this, e.expression), +430 exp.TsOrDsToDate: ts_or_ds_to_date_sql("snowflake"), +431 exp.UnixToTime: _unix_to_time_sql, +432 exp.VarMap: lambda self, e: var_map_sql(self, e, "OBJECT_CONSTRUCT"), +433 exp.WeekOfYear: rename_func("WEEKOFYEAR"), +434 } +435 +436 TYPE_MAPPING = { +437 **generator.Generator.TYPE_MAPPING, +438 exp.DataType.Type.TIMESTAMP: "TIMESTAMPNTZ", +439 } +440 +441 STAR_MAPPING = { +442 "except": "EXCLUDE", +443 "replace": "RENAME", +444 } +445 +446 PROPERTIES_LOCATION = { +447 **generator.Generator.PROPERTIES_LOCATION, +448 exp.SetProperty: exp.Properties.Location.UNSUPPORTED, +449 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, +450 } +451 +452 def show_sql(self, expression: exp.Show) -> str: +453 scope = self.sql(expression, "scope") +454 scope = f" {scope}" if scope else "" +455 +456 scope_kind = self.sql(expression, "scope_kind") +457 if scope_kind: +458 scope_kind = f" IN {scope_kind}" +459 +460 return f"SHOW {expression.name}{scope_kind}{scope}" +461 +462 def regexpextract_sql(self, expression: exp.RegexpExtract) -> str: +463 # Other dialects don't support all of the following parameters, so we need to +464 # generate default values as necessary to ensure the transpilation is correct +465 group = expression.args.get("group") +466 parameters = expression.args.get("parameters") or (group and exp.Literal.string("c")) +467 occurrence = expression.args.get("occurrence") or (parameters and exp.Literal.number(1)) +468 position = expression.args.get("position") or (occurrence and exp.Literal.number(1)) +469 +470 return self.func( +471 "REGEXP_SUBSTR", +472 expression.this, +473 expression.expression, +474 position, +475 occurrence, +476 parameters, +477 group, +478 ) +479 +480 def except_op(self, expression: exp.Except) -> str: +481 if not expression.args.get("distinct", False): +482 self.unsupported("EXCEPT with All is not supported in Snowflake") +483 return super().except_op(expression) +484 +485 def intersect_op(self, expression: exp.Intersect) -> str: +486 if not expression.args.get("distinct", False): +487 self.unsupported("INTERSECT with All is not supported in Snowflake") +488 return super().intersect_op(expression) +489 +490 def describe_sql(self, expression: exp.Describe) -> str: +491 # Default to table if kind is unknown +492 kind_value = expression.args.get("kind") or "TABLE" +493 kind = f" {kind_value}" if kind_value else "" +494 this = f" {self.sql(expression, 'this')}" +495 expressions = self.expressions(expression, flat=True) +496 expressions = f" {expressions}" if expressions else "" +497 return f"DESCRIBE{kind}{this}{expressions}" +498 +499 def generatedasidentitycolumnconstraint_sql( +500 self, expression: exp.GeneratedAsIdentityColumnConstraint +501 ) -> str: +502 start = expression.args.get("start") +503 start = f" START {start}" if start else "" +504 increment = expression.args.get("increment") +505 increment = f" INCREMENT {increment}" if increment else "" +506 return f"AUTOINCREMENT{start}{increment}"
    @@ -770,270 +832,313 @@ -
    194class Snowflake(Dialect):
    -195    # https://docs.snowflake.com/en/sql-reference/identifiers-syntax
    -196    RESOLVES_IDENTIFIERS_AS_UPPERCASE = True
    -197    NULL_ORDERING = "nulls_are_large"
    -198    TIME_FORMAT = "'YYYY-MM-DD HH24:MI:SS'"
    -199
    -200    TIME_MAPPING = {
    -201        "YYYY": "%Y",
    -202        "yyyy": "%Y",
    -203        "YY": "%y",
    -204        "yy": "%y",
    -205        "MMMM": "%B",
    -206        "mmmm": "%B",
    -207        "MON": "%b",
    -208        "mon": "%b",
    -209        "MM": "%m",
    -210        "mm": "%m",
    -211        "DD": "%d",
    -212        "dd": "%-d",
    -213        "DY": "%a",
    -214        "dy": "%w",
    -215        "HH24": "%H",
    -216        "hh24": "%H",
    -217        "HH12": "%I",
    -218        "hh12": "%I",
    -219        "MI": "%M",
    -220        "mi": "%M",
    -221        "SS": "%S",
    -222        "ss": "%S",
    -223        "FF": "%f",
    -224        "ff": "%f",
    -225        "FF6": "%f",
    -226        "ff6": "%f",
    -227    }
    -228
    -229    class Parser(parser.Parser):
    -230        IDENTIFY_PIVOT_STRINGS = True
    -231        SUPPORTS_USER_DEFINED_TYPES = False
    -232
    -233        FUNCTIONS = {
    -234            **parser.Parser.FUNCTIONS,
    -235            "ARRAYAGG": exp.ArrayAgg.from_arg_list,
    -236            "ARRAY_CONSTRUCT": exp.Array.from_arg_list,
    -237            "ARRAY_TO_STRING": exp.ArrayJoin.from_arg_list,
    -238            "CONVERT_TIMEZONE": _parse_convert_timezone,
    -239            "DATE_TRUNC": date_trunc_to_time,
    -240            "DATEADD": lambda args: exp.DateAdd(
    -241                this=seq_get(args, 2), expression=seq_get(args, 1), unit=seq_get(args, 0)
    -242            ),
    -243            "DATEDIFF": _parse_datediff,
    -244            "DIV0": _div0_to_if,
    -245            "IFF": exp.If.from_arg_list,
    -246            "NULLIFZERO": _nullifzero_to_if,
    -247            "OBJECT_CONSTRUCT": _parse_object_construct,
    -248            "REGEXP_REPLACE": _parse_regexp_replace,
    -249            "REGEXP_SUBSTR": exp.RegexpExtract.from_arg_list,
    -250            "RLIKE": exp.RegexpLike.from_arg_list,
    -251            "SQUARE": lambda args: exp.Pow(this=seq_get(args, 0), expression=exp.Literal.number(2)),
    -252            "TIMEDIFF": _parse_datediff,
    -253            "TIMESTAMPDIFF": _parse_datediff,
    -254            "TO_ARRAY": exp.Array.from_arg_list,
    -255            "TO_TIMESTAMP": _parse_to_timestamp,
    -256            "TO_VARCHAR": exp.ToChar.from_arg_list,
    -257            "ZEROIFNULL": _zeroifnull_to_if,
    -258        }
    -259
    -260        FUNCTION_PARSERS = {
    -261            **parser.Parser.FUNCTION_PARSERS,
    -262            "DATE_PART": _parse_date_part,
    -263        }
    -264        FUNCTION_PARSERS.pop("TRIM")
    -265
    -266        COLUMN_OPERATORS = {
    -267            **parser.Parser.COLUMN_OPERATORS,
    -268            TokenType.COLON: lambda self, this, path: self.expression(
    -269                exp.Bracket, this=this, expressions=[path]
    -270            ),
    +            
    201class Snowflake(Dialect):
    +202    # https://docs.snowflake.com/en/sql-reference/identifiers-syntax
    +203    RESOLVES_IDENTIFIERS_AS_UPPERCASE = True
    +204    NULL_ORDERING = "nulls_are_large"
    +205    TIME_FORMAT = "'YYYY-MM-DD HH24:MI:SS'"
    +206
    +207    TIME_MAPPING = {
    +208        "YYYY": "%Y",
    +209        "yyyy": "%Y",
    +210        "YY": "%y",
    +211        "yy": "%y",
    +212        "MMMM": "%B",
    +213        "mmmm": "%B",
    +214        "MON": "%b",
    +215        "mon": "%b",
    +216        "MM": "%m",
    +217        "mm": "%m",
    +218        "DD": "%d",
    +219        "dd": "%-d",
    +220        "DY": "%a",
    +221        "dy": "%w",
    +222        "HH24": "%H",
    +223        "hh24": "%H",
    +224        "HH12": "%I",
    +225        "hh12": "%I",
    +226        "MI": "%M",
    +227        "mi": "%M",
    +228        "SS": "%S",
    +229        "ss": "%S",
    +230        "FF": "%f",
    +231        "ff": "%f",
    +232        "FF6": "%f",
    +233        "ff6": "%f",
    +234    }
    +235
    +236    class Parser(parser.Parser):
    +237        IDENTIFY_PIVOT_STRINGS = True
    +238        SUPPORTS_USER_DEFINED_TYPES = False
    +239
    +240        FUNCTIONS = {
    +241            **parser.Parser.FUNCTIONS,
    +242            "ARRAYAGG": exp.ArrayAgg.from_arg_list,
    +243            "ARRAY_CONSTRUCT": exp.Array.from_arg_list,
    +244            "ARRAY_TO_STRING": exp.ArrayJoin.from_arg_list,
    +245            "CONVERT_TIMEZONE": _parse_convert_timezone,
    +246            "DATE_TRUNC": date_trunc_to_time,
    +247            "DATEADD": lambda args: exp.DateAdd(
    +248                this=seq_get(args, 2), expression=seq_get(args, 1), unit=seq_get(args, 0)
    +249            ),
    +250            "DATEDIFF": _parse_datediff,
    +251            "DIV0": _div0_to_if,
    +252            "IFF": exp.If.from_arg_list,
    +253            "LISTAGG": exp.GroupConcat.from_arg_list,
    +254            "NULLIFZERO": _nullifzero_to_if,
    +255            "OBJECT_CONSTRUCT": _parse_object_construct,
    +256            "REGEXP_REPLACE": _parse_regexp_replace,
    +257            "REGEXP_SUBSTR": exp.RegexpExtract.from_arg_list,
    +258            "RLIKE": exp.RegexpLike.from_arg_list,
    +259            "SQUARE": lambda args: exp.Pow(this=seq_get(args, 0), expression=exp.Literal.number(2)),
    +260            "TIMEDIFF": _parse_datediff,
    +261            "TIMESTAMPDIFF": _parse_datediff,
    +262            "TO_ARRAY": exp.Array.from_arg_list,
    +263            "TO_TIMESTAMP": _parse_to_timestamp,
    +264            "TO_VARCHAR": exp.ToChar.from_arg_list,
    +265            "ZEROIFNULL": _zeroifnull_to_if,
    +266        }
    +267
    +268        FUNCTION_PARSERS = {
    +269            **parser.Parser.FUNCTION_PARSERS,
    +270            "DATE_PART": _parse_date_part,
     271        }
    -272
    -273        TIMESTAMPS = parser.Parser.TIMESTAMPS.copy() - {TokenType.TIME}
    -274
    -275        RANGE_PARSERS = {
    -276            **parser.Parser.RANGE_PARSERS,
    -277            TokenType.LIKE_ANY: binary_range_parser(exp.LikeAny),
    -278            TokenType.ILIKE_ANY: binary_range_parser(exp.ILikeAny),
    +272        FUNCTION_PARSERS.pop("TRIM")
    +273
    +274        COLUMN_OPERATORS = {
    +275            **parser.Parser.COLUMN_OPERATORS,
    +276            TokenType.COLON: lambda self, this, path: self.expression(
    +277                exp.Bracket, this=this, expressions=[path]
    +278            ),
     279        }
     280
    -281        ALTER_PARSERS = {
    -282            **parser.Parser.ALTER_PARSERS,
    -283            "SET": lambda self: self._parse_set(tag=self._match_text_seq("TAG")),
    -284            "UNSET": lambda self: self.expression(
    -285                exp.Set,
    -286                tag=self._match_text_seq("TAG"),
    -287                expressions=self._parse_csv(self._parse_id_var),
    -288                unset=True,
    -289            ),
    -290        }
    -291
    -292        def _parse_id_var(
    -293            self,
    -294            any_token: bool = True,
    -295            tokens: t.Optional[t.Collection[TokenType]] = None,
    -296        ) -> t.Optional[exp.Expression]:
    -297            if self._match_text_seq("IDENTIFIER", "("):
    -298                identifier = (
    -299                    super()._parse_id_var(any_token=any_token, tokens=tokens)
    -300                    or self._parse_string()
    -301                )
    -302                self._match_r_paren()
    -303                return self.expression(exp.Anonymous, this="IDENTIFIER", expressions=[identifier])
    +281        TIMESTAMPS = parser.Parser.TIMESTAMPS.copy() - {TokenType.TIME}
    +282
    +283        RANGE_PARSERS = {
    +284            **parser.Parser.RANGE_PARSERS,
    +285            TokenType.LIKE_ANY: binary_range_parser(exp.LikeAny),
    +286            TokenType.ILIKE_ANY: binary_range_parser(exp.ILikeAny),
    +287        }
    +288
    +289        ALTER_PARSERS = {
    +290            **parser.Parser.ALTER_PARSERS,
    +291            "SET": lambda self: self._parse_set(tag=self._match_text_seq("TAG")),
    +292            "UNSET": lambda self: self.expression(
    +293                exp.Set,
    +294                tag=self._match_text_seq("TAG"),
    +295                expressions=self._parse_csv(self._parse_id_var),
    +296                unset=True,
    +297            ),
    +298        }
    +299
    +300        STATEMENT_PARSERS = {
    +301            **parser.Parser.STATEMENT_PARSERS,
    +302            TokenType.SHOW: lambda self: self._parse_show(),
    +303        }
     304
    -305            return super()._parse_id_var(any_token=any_token, tokens=tokens)
    -306
    -307    class Tokenizer(tokens.Tokenizer):
    -308        STRING_ESCAPES = ["\\", "'"]
    -309        HEX_STRINGS = [("x'", "'"), ("X'", "'")]
    -310        RAW_STRINGS = ["$$"]
    -311        COMMENTS = ["--", "//", ("/*", "*/")]
    -312
    -313        KEYWORDS = {
    -314            **tokens.Tokenizer.KEYWORDS,
    -315            "BYTEINT": TokenType.INT,
    -316            "CHAR VARYING": TokenType.VARCHAR,
    -317            "CHARACTER VARYING": TokenType.VARCHAR,
    -318            "EXCLUDE": TokenType.EXCEPT,
    -319            "ILIKE ANY": TokenType.ILIKE_ANY,
    -320            "LIKE ANY": TokenType.LIKE_ANY,
    -321            "MATCH_RECOGNIZE": TokenType.MATCH_RECOGNIZE,
    -322            "MINUS": TokenType.EXCEPT,
    -323            "NCHAR VARYING": TokenType.VARCHAR,
    -324            "PUT": TokenType.COMMAND,
    -325            "RENAME": TokenType.REPLACE,
    -326            "SAMPLE": TokenType.TABLE_SAMPLE,
    -327            "TIMESTAMP_LTZ": TokenType.TIMESTAMPLTZ,
    -328            "TIMESTAMP_NTZ": TokenType.TIMESTAMP,
    -329            "TIMESTAMP_TZ": TokenType.TIMESTAMPTZ,
    -330            "TIMESTAMPNTZ": TokenType.TIMESTAMP,
    -331            "TOP": TokenType.TOP,
    -332        }
    -333
    -334        SINGLE_TOKENS = {
    -335            **tokens.Tokenizer.SINGLE_TOKENS,
    -336            "$": TokenType.PARAMETER,
    -337        }
    -338
    -339        VAR_SINGLE_TOKENS = {"$"}
    -340
    -341    class Generator(generator.Generator):
    -342        PARAMETER_TOKEN = "$"
    -343        MATCHED_BY_SOURCE = False
    -344        SINGLE_STRING_INTERVAL = True
    -345        JOIN_HINTS = False
    -346        TABLE_HINTS = False
    -347        QUERY_HINTS = False
    -348
    -349        TRANSFORMS = {
    -350            **generator.Generator.TRANSFORMS,
    -351            exp.Array: inline_array_sql,
    -352            exp.ArrayConcat: rename_func("ARRAY_CAT"),
    -353            exp.ArrayJoin: rename_func("ARRAY_TO_STRING"),
    -354            exp.AtTimeZone: lambda self, e: self.func(
    -355                "CONVERT_TIMEZONE", e.args.get("zone"), e.this
    -356            ),
    -357            exp.DateAdd: lambda self, e: self.func("DATEADD", e.text("unit"), e.expression, e.this),
    -358            exp.DateDiff: lambda self, e: self.func(
    -359                "DATEDIFF", e.text("unit"), e.expression, e.this
    -360            ),
    -361            exp.DateStrToDate: datestrtodate_sql,
    -362            exp.DataType: _datatype_sql,
    -363            exp.DayOfWeek: rename_func("DAYOFWEEK"),
    -364            exp.Extract: rename_func("DATE_PART"),
    -365            exp.If: rename_func("IFF"),
    -366            exp.LogicalAnd: rename_func("BOOLAND_AGG"),
    -367            exp.LogicalOr: rename_func("BOOLOR_AGG"),
    -368            exp.Map: lambda self, e: var_map_sql(self, e, "OBJECT_CONSTRUCT"),
    -369            exp.Max: max_or_greatest,
    -370            exp.Min: min_or_least,
    -371            exp.PartitionedByProperty: lambda self, e: f"PARTITION BY {self.sql(e, 'this')}",
    -372            exp.RegexpILike: _regexpilike_sql,
    -373            exp.Select: transforms.preprocess([transforms.eliminate_distinct_on]),
    -374            exp.StarMap: rename_func("OBJECT_CONSTRUCT"),
    -375            exp.StartsWith: rename_func("STARTSWITH"),
    -376            exp.StrPosition: lambda self, e: self.func(
    -377                "POSITION", e.args.get("substr"), e.this, e.args.get("position")
    -378            ),
    -379            exp.StrToTime: lambda self, e: f"TO_TIMESTAMP({self.sql(e, 'this')}, {self.format_time(e)})",
    -380            exp.Struct: lambda self, e: self.func(
    -381                "OBJECT_CONSTRUCT",
    -382                *(arg for expression in e.expressions for arg in expression.flatten()),
    -383            ),
    -384            exp.Stuff: rename_func("INSERT"),
    -385            exp.TimestampTrunc: timestamptrunc_sql,
    -386            exp.TimeStrToTime: timestrtotime_sql,
    -387            exp.TimeToStr: lambda self, e: self.func(
    -388                "TO_CHAR", exp.cast(e.this, "timestamp"), self.format_time(e)
    -389            ),
    -390            exp.TimeToUnix: lambda self, e: f"EXTRACT(epoch_second FROM {self.sql(e, 'this')})",
    -391            exp.ToChar: lambda self, e: self.function_fallback_sql(e),
    -392            exp.Trim: lambda self, e: self.func("TRIM", e.this, e.expression),
    -393            exp.TsOrDsToDate: ts_or_ds_to_date_sql("snowflake"),
    -394            exp.UnixToTime: _unix_to_time_sql,
    -395            exp.VarMap: lambda self, e: var_map_sql(self, e, "OBJECT_CONSTRUCT"),
    -396            exp.WeekOfYear: rename_func("WEEKOFYEAR"),
    -397        }
    -398
    -399        TYPE_MAPPING = {
    -400            **generator.Generator.TYPE_MAPPING,
    -401            exp.DataType.Type.TIMESTAMP: "TIMESTAMPNTZ",
    -402        }
    -403
    -404        STAR_MAPPING = {
    -405            "except": "EXCLUDE",
    -406            "replace": "RENAME",
    -407        }
    -408
    -409        PROPERTIES_LOCATION = {
    -410            **generator.Generator.PROPERTIES_LOCATION,
    -411            exp.SetProperty: exp.Properties.Location.UNSUPPORTED,
    -412            exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED,
    -413        }
    -414
    -415        def regexpextract_sql(self, expression: exp.RegexpExtract) -> str:
    -416            # Other dialects don't support all of the following parameters, so we need to
    -417            # generate default values as necessary to ensure the transpilation is correct
    -418            group = expression.args.get("group")
    -419            parameters = expression.args.get("parameters") or (group and exp.Literal.string("c"))
    -420            occurrence = expression.args.get("occurrence") or (parameters and exp.Literal.number(1))
    -421            position = expression.args.get("position") or (occurrence and exp.Literal.number(1))
    -422
    -423            return self.func(
    -424                "REGEXP_SUBSTR",
    -425                expression.this,
    -426                expression.expression,
    -427                position,
    -428                occurrence,
    -429                parameters,
    -430                group,
    -431            )
    -432
    -433        def except_op(self, expression: exp.Except) -> str:
    -434            if not expression.args.get("distinct", False):
    -435                self.unsupported("EXCEPT with All is not supported in Snowflake")
    -436            return super().except_op(expression)
    -437
    -438        def intersect_op(self, expression: exp.Intersect) -> str:
    -439            if not expression.args.get("distinct", False):
    -440                self.unsupported("INTERSECT with All is not supported in Snowflake")
    -441            return super().intersect_op(expression)
    -442
    -443        def describe_sql(self, expression: exp.Describe) -> str:
    -444            # Default to table if kind is unknown
    -445            kind_value = expression.args.get("kind") or "TABLE"
    -446            kind = f" {kind_value}" if kind_value else ""
    -447            this = f" {self.sql(expression, 'this')}"
    -448            return f"DESCRIBE{kind}{this}"
    -449
    -450        def generatedasidentitycolumnconstraint_sql(
    -451            self, expression: exp.GeneratedAsIdentityColumnConstraint
    -452        ) -> str:
    -453            start = expression.args.get("start")
    -454            start = f" START {start}" if start else ""
    -455            increment = expression.args.get("increment")
    -456            increment = f" INCREMENT {increment}" if increment else ""
    -457            return f"AUTOINCREMENT{start}{increment}"
    +305        SHOW_PARSERS = {
    +306            "PRIMARY KEYS": _show_parser("PRIMARY KEYS"),
    +307            "TERSE PRIMARY KEYS": _show_parser("PRIMARY KEYS"),
    +308        }
    +309
    +310        def _parse_id_var(
    +311            self,
    +312            any_token: bool = True,
    +313            tokens: t.Optional[t.Collection[TokenType]] = None,
    +314        ) -> t.Optional[exp.Expression]:
    +315            if self._match_text_seq("IDENTIFIER", "("):
    +316                identifier = (
    +317                    super()._parse_id_var(any_token=any_token, tokens=tokens)
    +318                    or self._parse_string()
    +319                )
    +320                self._match_r_paren()
    +321                return self.expression(exp.Anonymous, this="IDENTIFIER", expressions=[identifier])
    +322
    +323            return super()._parse_id_var(any_token=any_token, tokens=tokens)
    +324
    +325        def _parse_show_snowflake(self, this: str) -> exp.Show:
    +326            scope = None
    +327            scope_kind = None
    +328
    +329            if self._match(TokenType.IN):
    +330                if self._match_text_seq("ACCOUNT"):
    +331                    scope_kind = "ACCOUNT"
    +332                elif self._match_set(self.DB_CREATABLES):
    +333                    scope_kind = self._prev.text
    +334                    if self._curr:
    +335                        scope = self._parse_table()
    +336                elif self._curr:
    +337                    scope_kind = "TABLE"
    +338                    scope = self._parse_table()
    +339
    +340            return self.expression(exp.Show, this=this, scope=scope, scope_kind=scope_kind)
    +341
    +342    class Tokenizer(tokens.Tokenizer):
    +343        STRING_ESCAPES = ["\\", "'"]
    +344        HEX_STRINGS = [("x'", "'"), ("X'", "'")]
    +345        RAW_STRINGS = ["$$"]
    +346        COMMENTS = ["--", "//", ("/*", "*/")]
    +347
    +348        KEYWORDS = {
    +349            **tokens.Tokenizer.KEYWORDS,
    +350            "BYTEINT": TokenType.INT,
    +351            "CHAR VARYING": TokenType.VARCHAR,
    +352            "CHARACTER VARYING": TokenType.VARCHAR,
    +353            "EXCLUDE": TokenType.EXCEPT,
    +354            "ILIKE ANY": TokenType.ILIKE_ANY,
    +355            "LIKE ANY": TokenType.LIKE_ANY,
    +356            "MATCH_RECOGNIZE": TokenType.MATCH_RECOGNIZE,
    +357            "MINUS": TokenType.EXCEPT,
    +358            "NCHAR VARYING": TokenType.VARCHAR,
    +359            "PUT": TokenType.COMMAND,
    +360            "RENAME": TokenType.REPLACE,
    +361            "SAMPLE": TokenType.TABLE_SAMPLE,
    +362            "TIMESTAMP_LTZ": TokenType.TIMESTAMPLTZ,
    +363            "TIMESTAMP_NTZ": TokenType.TIMESTAMP,
    +364            "TIMESTAMP_TZ": TokenType.TIMESTAMPTZ,
    +365            "TIMESTAMPNTZ": TokenType.TIMESTAMP,
    +366            "TOP": TokenType.TOP,
    +367        }
    +368
    +369        SINGLE_TOKENS = {
    +370            **tokens.Tokenizer.SINGLE_TOKENS,
    +371            "$": TokenType.PARAMETER,
    +372        }
    +373
    +374        VAR_SINGLE_TOKENS = {"$"}
    +375
    +376        COMMANDS = tokens.Tokenizer.COMMANDS - {TokenType.SHOW}
    +377
    +378    class Generator(generator.Generator):
    +379        PARAMETER_TOKEN = "$"
    +380        MATCHED_BY_SOURCE = False
    +381        SINGLE_STRING_INTERVAL = True
    +382        JOIN_HINTS = False
    +383        TABLE_HINTS = False
    +384        QUERY_HINTS = False
    +385
    +386        TRANSFORMS = {
    +387            **generator.Generator.TRANSFORMS,
    +388            exp.Array: inline_array_sql,
    +389            exp.ArrayConcat: rename_func("ARRAY_CAT"),
    +390            exp.ArrayJoin: rename_func("ARRAY_TO_STRING"),
    +391            exp.AtTimeZone: lambda self, e: self.func(
    +392                "CONVERT_TIMEZONE", e.args.get("zone"), e.this
    +393            ),
    +394            exp.DateAdd: lambda self, e: self.func("DATEADD", e.text("unit"), e.expression, e.this),
    +395            exp.DateDiff: lambda self, e: self.func(
    +396                "DATEDIFF", e.text("unit"), e.expression, e.this
    +397            ),
    +398            exp.DateStrToDate: datestrtodate_sql,
    +399            exp.DataType: _datatype_sql,
    +400            exp.DayOfWeek: rename_func("DAYOFWEEK"),
    +401            exp.Extract: rename_func("DATE_PART"),
    +402            exp.GroupConcat: rename_func("LISTAGG"),
    +403            exp.If: rename_func("IFF"),
    +404            exp.LogicalAnd: rename_func("BOOLAND_AGG"),
    +405            exp.LogicalOr: rename_func("BOOLOR_AGG"),
    +406            exp.Map: lambda self, e: var_map_sql(self, e, "OBJECT_CONSTRUCT"),
    +407            exp.Max: max_or_greatest,
    +408            exp.Min: min_or_least,
    +409            exp.PartitionedByProperty: lambda self, e: f"PARTITION BY {self.sql(e, 'this')}",
    +410            exp.RegexpILike: _regexpilike_sql,
    +411            exp.Select: transforms.preprocess([transforms.eliminate_distinct_on]),
    +412            exp.StarMap: rename_func("OBJECT_CONSTRUCT"),
    +413            exp.StartsWith: rename_func("STARTSWITH"),
    +414            exp.StrPosition: lambda self, e: self.func(
    +415                "POSITION", e.args.get("substr"), e.this, e.args.get("position")
    +416            ),
    +417            exp.StrToTime: lambda self, e: f"TO_TIMESTAMP({self.sql(e, 'this')}, {self.format_time(e)})",
    +418            exp.Struct: lambda self, e: self.func(
    +419                "OBJECT_CONSTRUCT",
    +420                *(arg for expression in e.expressions for arg in expression.flatten()),
    +421            ),
    +422            exp.Stuff: rename_func("INSERT"),
    +423            exp.TimestampTrunc: timestamptrunc_sql,
    +424            exp.TimeStrToTime: timestrtotime_sql,
    +425            exp.TimeToStr: lambda self, e: self.func(
    +426                "TO_CHAR", exp.cast(e.this, "timestamp"), self.format_time(e)
    +427            ),
    +428            exp.TimeToUnix: lambda self, e: f"EXTRACT(epoch_second FROM {self.sql(e, 'this')})",
    +429            exp.ToChar: lambda self, e: self.function_fallback_sql(e),
    +430            exp.Trim: lambda self, e: self.func("TRIM", e.this, e.expression),
    +431            exp.TsOrDsToDate: ts_or_ds_to_date_sql("snowflake"),
    +432            exp.UnixToTime: _unix_to_time_sql,
    +433            exp.VarMap: lambda self, e: var_map_sql(self, e, "OBJECT_CONSTRUCT"),
    +434            exp.WeekOfYear: rename_func("WEEKOFYEAR"),
    +435        }
    +436
    +437        TYPE_MAPPING = {
    +438            **generator.Generator.TYPE_MAPPING,
    +439            exp.DataType.Type.TIMESTAMP: "TIMESTAMPNTZ",
    +440        }
    +441
    +442        STAR_MAPPING = {
    +443            "except": "EXCLUDE",
    +444            "replace": "RENAME",
    +445        }
    +446
    +447        PROPERTIES_LOCATION = {
    +448            **generator.Generator.PROPERTIES_LOCATION,
    +449            exp.SetProperty: exp.Properties.Location.UNSUPPORTED,
    +450            exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED,
    +451        }
    +452
    +453        def show_sql(self, expression: exp.Show) -> str:
    +454            scope = self.sql(expression, "scope")
    +455            scope = f" {scope}" if scope else ""
    +456
    +457            scope_kind = self.sql(expression, "scope_kind")
    +458            if scope_kind:
    +459                scope_kind = f" IN {scope_kind}"
    +460
    +461            return f"SHOW {expression.name}{scope_kind}{scope}"
    +462
    +463        def regexpextract_sql(self, expression: exp.RegexpExtract) -> str:
    +464            # Other dialects don't support all of the following parameters, so we need to
    +465            # generate default values as necessary to ensure the transpilation is correct
    +466            group = expression.args.get("group")
    +467            parameters = expression.args.get("parameters") or (group and exp.Literal.string("c"))
    +468            occurrence = expression.args.get("occurrence") or (parameters and exp.Literal.number(1))
    +469            position = expression.args.get("position") or (occurrence and exp.Literal.number(1))
    +470
    +471            return self.func(
    +472                "REGEXP_SUBSTR",
    +473                expression.this,
    +474                expression.expression,
    +475                position,
    +476                occurrence,
    +477                parameters,
    +478                group,
    +479            )
    +480
    +481        def except_op(self, expression: exp.Except) -> str:
    +482            if not expression.args.get("distinct", False):
    +483                self.unsupported("EXCEPT with All is not supported in Snowflake")
    +484            return super().except_op(expression)
    +485
    +486        def intersect_op(self, expression: exp.Intersect) -> str:
    +487            if not expression.args.get("distinct", False):
    +488                self.unsupported("INTERSECT with All is not supported in Snowflake")
    +489            return super().intersect_op(expression)
    +490
    +491        def describe_sql(self, expression: exp.Describe) -> str:
    +492            # Default to table if kind is unknown
    +493            kind_value = expression.args.get("kind") or "TABLE"
    +494            kind = f" {kind_value}" if kind_value else ""
    +495            this = f" {self.sql(expression, 'this')}"
    +496            expressions = self.expressions(expression, flat=True)
    +497            expressions = f" {expressions}" if expressions else ""
    +498            return f"DESCRIBE{kind}{this}{expressions}"
    +499
    +500        def generatedasidentitycolumnconstraint_sql(
    +501            self, expression: exp.GeneratedAsIdentityColumnConstraint
    +502        ) -> str:
    +503            start = expression.args.get("start")
    +504            start = f" START {start}" if start else ""
    +505            increment = expression.args.get("increment")
    +506            increment = f" INCREMENT {increment}" if increment else ""
    +507            return f"AUTOINCREMENT{start}{increment}"
     
    @@ -1341,83 +1446,111 @@
    -
    229    class Parser(parser.Parser):
    -230        IDENTIFY_PIVOT_STRINGS = True
    -231        SUPPORTS_USER_DEFINED_TYPES = False
    -232
    -233        FUNCTIONS = {
    -234            **parser.Parser.FUNCTIONS,
    -235            "ARRAYAGG": exp.ArrayAgg.from_arg_list,
    -236            "ARRAY_CONSTRUCT": exp.Array.from_arg_list,
    -237            "ARRAY_TO_STRING": exp.ArrayJoin.from_arg_list,
    -238            "CONVERT_TIMEZONE": _parse_convert_timezone,
    -239            "DATE_TRUNC": date_trunc_to_time,
    -240            "DATEADD": lambda args: exp.DateAdd(
    -241                this=seq_get(args, 2), expression=seq_get(args, 1), unit=seq_get(args, 0)
    -242            ),
    -243            "DATEDIFF": _parse_datediff,
    -244            "DIV0": _div0_to_if,
    -245            "IFF": exp.If.from_arg_list,
    -246            "NULLIFZERO": _nullifzero_to_if,
    -247            "OBJECT_CONSTRUCT": _parse_object_construct,
    -248            "REGEXP_REPLACE": _parse_regexp_replace,
    -249            "REGEXP_SUBSTR": exp.RegexpExtract.from_arg_list,
    -250            "RLIKE": exp.RegexpLike.from_arg_list,
    -251            "SQUARE": lambda args: exp.Pow(this=seq_get(args, 0), expression=exp.Literal.number(2)),
    -252            "TIMEDIFF": _parse_datediff,
    -253            "TIMESTAMPDIFF": _parse_datediff,
    -254            "TO_ARRAY": exp.Array.from_arg_list,
    -255            "TO_TIMESTAMP": _parse_to_timestamp,
    -256            "TO_VARCHAR": exp.ToChar.from_arg_list,
    -257            "ZEROIFNULL": _zeroifnull_to_if,
    -258        }
    -259
    -260        FUNCTION_PARSERS = {
    -261            **parser.Parser.FUNCTION_PARSERS,
    -262            "DATE_PART": _parse_date_part,
    -263        }
    -264        FUNCTION_PARSERS.pop("TRIM")
    -265
    -266        COLUMN_OPERATORS = {
    -267            **parser.Parser.COLUMN_OPERATORS,
    -268            TokenType.COLON: lambda self, this, path: self.expression(
    -269                exp.Bracket, this=this, expressions=[path]
    -270            ),
    +            
    236    class Parser(parser.Parser):
    +237        IDENTIFY_PIVOT_STRINGS = True
    +238        SUPPORTS_USER_DEFINED_TYPES = False
    +239
    +240        FUNCTIONS = {
    +241            **parser.Parser.FUNCTIONS,
    +242            "ARRAYAGG": exp.ArrayAgg.from_arg_list,
    +243            "ARRAY_CONSTRUCT": exp.Array.from_arg_list,
    +244            "ARRAY_TO_STRING": exp.ArrayJoin.from_arg_list,
    +245            "CONVERT_TIMEZONE": _parse_convert_timezone,
    +246            "DATE_TRUNC": date_trunc_to_time,
    +247            "DATEADD": lambda args: exp.DateAdd(
    +248                this=seq_get(args, 2), expression=seq_get(args, 1), unit=seq_get(args, 0)
    +249            ),
    +250            "DATEDIFF": _parse_datediff,
    +251            "DIV0": _div0_to_if,
    +252            "IFF": exp.If.from_arg_list,
    +253            "LISTAGG": exp.GroupConcat.from_arg_list,
    +254            "NULLIFZERO": _nullifzero_to_if,
    +255            "OBJECT_CONSTRUCT": _parse_object_construct,
    +256            "REGEXP_REPLACE": _parse_regexp_replace,
    +257            "REGEXP_SUBSTR": exp.RegexpExtract.from_arg_list,
    +258            "RLIKE": exp.RegexpLike.from_arg_list,
    +259            "SQUARE": lambda args: exp.Pow(this=seq_get(args, 0), expression=exp.Literal.number(2)),
    +260            "TIMEDIFF": _parse_datediff,
    +261            "TIMESTAMPDIFF": _parse_datediff,
    +262            "TO_ARRAY": exp.Array.from_arg_list,
    +263            "TO_TIMESTAMP": _parse_to_timestamp,
    +264            "TO_VARCHAR": exp.ToChar.from_arg_list,
    +265            "ZEROIFNULL": _zeroifnull_to_if,
    +266        }
    +267
    +268        FUNCTION_PARSERS = {
    +269            **parser.Parser.FUNCTION_PARSERS,
    +270            "DATE_PART": _parse_date_part,
     271        }
    -272
    -273        TIMESTAMPS = parser.Parser.TIMESTAMPS.copy() - {TokenType.TIME}
    -274
    -275        RANGE_PARSERS = {
    -276            **parser.Parser.RANGE_PARSERS,
    -277            TokenType.LIKE_ANY: binary_range_parser(exp.LikeAny),
    -278            TokenType.ILIKE_ANY: binary_range_parser(exp.ILikeAny),
    +272        FUNCTION_PARSERS.pop("TRIM")
    +273
    +274        COLUMN_OPERATORS = {
    +275            **parser.Parser.COLUMN_OPERATORS,
    +276            TokenType.COLON: lambda self, this, path: self.expression(
    +277                exp.Bracket, this=this, expressions=[path]
    +278            ),
     279        }
     280
    -281        ALTER_PARSERS = {
    -282            **parser.Parser.ALTER_PARSERS,
    -283            "SET": lambda self: self._parse_set(tag=self._match_text_seq("TAG")),
    -284            "UNSET": lambda self: self.expression(
    -285                exp.Set,
    -286                tag=self._match_text_seq("TAG"),
    -287                expressions=self._parse_csv(self._parse_id_var),
    -288                unset=True,
    -289            ),
    -290        }
    -291
    -292        def _parse_id_var(
    -293            self,
    -294            any_token: bool = True,
    -295            tokens: t.Optional[t.Collection[TokenType]] = None,
    -296        ) -> t.Optional[exp.Expression]:
    -297            if self._match_text_seq("IDENTIFIER", "("):
    -298                identifier = (
    -299                    super()._parse_id_var(any_token=any_token, tokens=tokens)
    -300                    or self._parse_string()
    -301                )
    -302                self._match_r_paren()
    -303                return self.expression(exp.Anonymous, this="IDENTIFIER", expressions=[identifier])
    +281        TIMESTAMPS = parser.Parser.TIMESTAMPS.copy() - {TokenType.TIME}
    +282
    +283        RANGE_PARSERS = {
    +284            **parser.Parser.RANGE_PARSERS,
    +285            TokenType.LIKE_ANY: binary_range_parser(exp.LikeAny),
    +286            TokenType.ILIKE_ANY: binary_range_parser(exp.ILikeAny),
    +287        }
    +288
    +289        ALTER_PARSERS = {
    +290            **parser.Parser.ALTER_PARSERS,
    +291            "SET": lambda self: self._parse_set(tag=self._match_text_seq("TAG")),
    +292            "UNSET": lambda self: self.expression(
    +293                exp.Set,
    +294                tag=self._match_text_seq("TAG"),
    +295                expressions=self._parse_csv(self._parse_id_var),
    +296                unset=True,
    +297            ),
    +298        }
    +299
    +300        STATEMENT_PARSERS = {
    +301            **parser.Parser.STATEMENT_PARSERS,
    +302            TokenType.SHOW: lambda self: self._parse_show(),
    +303        }
     304
    -305            return super()._parse_id_var(any_token=any_token, tokens=tokens)
    +305        SHOW_PARSERS = {
    +306            "PRIMARY KEYS": _show_parser("PRIMARY KEYS"),
    +307            "TERSE PRIMARY KEYS": _show_parser("PRIMARY KEYS"),
    +308        }
    +309
    +310        def _parse_id_var(
    +311            self,
    +312            any_token: bool = True,
    +313            tokens: t.Optional[t.Collection[TokenType]] = None,
    +314        ) -> t.Optional[exp.Expression]:
    +315            if self._match_text_seq("IDENTIFIER", "("):
    +316                identifier = (
    +317                    super()._parse_id_var(any_token=any_token, tokens=tokens)
    +318                    or self._parse_string()
    +319                )
    +320                self._match_r_paren()
    +321                return self.expression(exp.Anonymous, this="IDENTIFIER", expressions=[identifier])
    +322
    +323            return super()._parse_id_var(any_token=any_token, tokens=tokens)
    +324
    +325        def _parse_show_snowflake(self, this: str) -> exp.Show:
    +326            scope = None
    +327            scope_kind = None
    +328
    +329            if self._match(TokenType.IN):
    +330                if self._match_text_seq("ACCOUNT"):
    +331                    scope_kind = "ACCOUNT"
    +332                elif self._match_set(self.DB_CREATABLES):
    +333                    scope_kind = self._prev.text
    +334                    if self._curr:
    +335                        scope = self._parse_table()
    +336                elif self._curr:
    +337                    scope_kind = "TABLE"
    +338                    scope = self._parse_table()
    +339
    +340            return self.expression(exp.Show, this=this, scope=scope, scope_kind=scope_kind)
     
    @@ -1466,7 +1599,7 @@ Default: 3
    FUNCTIONS = - {'ABS': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Abs'>>, 'ANY_VALUE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.AnyValue'>>, 'APPROX_DISTINCT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ApproxDistinct'>>, 'APPROX_COUNT_DISTINCT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ApproxDistinct'>>, 'APPROX_QUANTILE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ApproxQuantile'>>, 'ARRAY': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Array'>>, 'ARRAY_AGG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayAgg'>>, 'ARRAY_ALL': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayAll'>>, 'ARRAY_ANY': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayAny'>>, 'ARRAY_CONCAT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayConcat'>>, 'ARRAY_CAT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayConcat'>>, 'ARRAY_CONTAINS': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayContains'>>, 'FILTER': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayFilter'>>, 'ARRAY_FILTER': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayFilter'>>, 'ARRAY_JOIN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayJoin'>>, 'ARRAY_SIZE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArraySize'>>, 'ARRAY_SORT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArraySort'>>, 'ARRAY_SUM': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArraySum'>>, 'ARRAY_UNION_AGG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayUnionAgg'>>, 'AVG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Avg'>>, 'CASE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Case'>>, 'CAST': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Cast'>>, 'CAST_TO_STR_TYPE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CastToStrType'>>, 'CEIL': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Ceil'>>, 'CEILING': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Ceil'>>, 'COALESCE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Coalesce'>>, 'IFNULL': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Coalesce'>>, 'NVL': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Coalesce'>>, 'CONCAT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Concat'>>, 'CONCAT_WS': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ConcatWs'>>, 'COUNT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Count'>>, 'COUNT_IF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CountIf'>>, 'CURRENT_DATE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CurrentDate'>>, 'CURRENT_DATETIME': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CurrentDatetime'>>, 'CURRENT_TIME': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CurrentTime'>>, 'CURRENT_TIMESTAMP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CurrentTimestamp'>>, 'CURRENT_USER': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CurrentUser'>>, 'DATE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Date'>>, 'DATE_ADD': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DateAdd'>>, 'DATEDIFF': <function _parse_datediff>, 'DATE_DIFF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DateDiff'>>, 'DATEFROMPARTS': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DateFromParts'>>, 'DATE_STR_TO_DATE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DateStrToDate'>>, 'DATE_SUB': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DateSub'>>, 'DATE_TO_DATE_STR': <function Parser.<lambda>>, 'DATE_TO_DI': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DateToDi'>>, 'DATE_TRUNC': <function date_trunc_to_time>, 'DATETIME_ADD': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DatetimeAdd'>>, 'DATETIME_DIFF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DatetimeDiff'>>, 'DATETIME_SUB': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DatetimeSub'>>, 'DATETIME_TRUNC': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DatetimeTrunc'>>, 'DAY': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Day'>>, 'DAY_OF_MONTH': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DayOfMonth'>>, 'DAYOFMONTH': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DayOfMonth'>>, 'DAY_OF_WEEK': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DayOfWeek'>>, 'DAYOFWEEK': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DayOfWeek'>>, 'DAY_OF_YEAR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DayOfYear'>>, 'DAYOFYEAR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DayOfYear'>>, 'DECODE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Decode'>>, 'DI_TO_DATE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DiToDate'>>, 'ENCODE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Encode'>>, 'EXP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Exp'>>, 'EXPLODE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Explode'>>, 'EXTRACT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Extract'>>, 'FIRST': <bound method Func.from_arg_list of <class 'sqlglot.expressions.First'>>, 'FLOOR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Floor'>>, 'FROM_BASE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.FromBase'>>, 'FROM_BASE64': <bound method Func.from_arg_list of <class 'sqlglot.expressions.FromBase64'>>, 'GENERATE_SERIES': <bound method Func.from_arg_list of <class 'sqlglot.expressions.GenerateSeries'>>, 'GREATEST': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Greatest'>>, 'GROUP_CONCAT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.GroupConcat'>>, 'HEX': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Hex'>>, 'HLL': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Hll'>>, 'IF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.If'>>, 'INITCAP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Initcap'>>, 'IS_NAN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.IsNan'>>, 'ISNAN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.IsNan'>>, 'JSON_ARRAY_CONTAINS': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONArrayContains'>>, 'JSONB_EXTRACT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONBExtract'>>, 'JSONB_EXTRACT_SCALAR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONBExtractScalar'>>, 'JSON_EXTRACT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONExtract'>>, 'JSON_EXTRACT_SCALAR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONExtractScalar'>>, 'JSON_FORMAT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONFormat'>>, 'J_S_O_N_OBJECT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONObject'>>, 'LAST': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Last'>>, 'LAST_DATE_OF_MONTH': <bound method Func.from_arg_list of <class 'sqlglot.expressions.LastDateOfMonth'>>, 'LEAST': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Least'>>, 'LEFT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Left'>>, 'LENGTH': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Length'>>, 'LEN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Length'>>, 'LEVENSHTEIN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Levenshtein'>>, 'LN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Ln'>>, 'LOG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Log'>>, 'LOG10': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Log10'>>, 'LOG2': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Log2'>>, 'LOGICAL_AND': <bound method Func.from_arg_list of <class 'sqlglot.expressions.LogicalAnd'>>, 'BOOL_AND': <bound method Func.from_arg_list of <class 'sqlglot.expressions.LogicalAnd'>>, 'BOOLAND_AGG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.LogicalAnd'>>, 'LOGICAL_OR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.LogicalOr'>>, 'BOOL_OR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.LogicalOr'>>, 'BOOLOR_AGG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.LogicalOr'>>, 'LOWER': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Lower'>>, 'LCASE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Lower'>>, 'MD5': <bound method Func.from_arg_list of <class 'sqlglot.expressions.MD5'>>, 'MD5_DIGEST': <bound method Func.from_arg_list of <class 'sqlglot.expressions.MD5Digest'>>, 'MAP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Map'>>, 'MAP_FROM_ENTRIES': <bound method Func.from_arg_list of <class 'sqlglot.expressions.MapFromEntries'>>, 'MATCH_AGAINST': <bound method Func.from_arg_list of <class 'sqlglot.expressions.MatchAgainst'>>, 'MAX': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Max'>>, 'MIN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Min'>>, 'MONTH': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Month'>>, 'MONTHS_BETWEEN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.MonthsBetween'>>, 'NEXT_VALUE_FOR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.NextValueFor'>>, 'NUMBER_TO_STR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.NumberToStr'>>, 'NVL2': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Nvl2'>>, 'OPEN_J_S_O_N': <bound method Func.from_arg_list of <class 'sqlglot.expressions.OpenJSON'>>, 'PARAMETERIZED_AGG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ParameterizedAgg'>>, 'PERCENTILE_CONT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.PercentileCont'>>, 'PERCENTILE_DISC': <bound method Func.from_arg_list of <class 'sqlglot.expressions.PercentileDisc'>>, 'POSEXPLODE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Posexplode'>>, 'POWER': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Pow'>>, 'POW': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Pow'>>, 'QUANTILE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Quantile'>>, 'RANGE_N': <bound method Func.from_arg_list of <class 'sqlglot.expressions.RangeN'>>, 'READ_CSV': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ReadCSV'>>, 'REDUCE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Reduce'>>, 'REGEXP_EXTRACT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.RegexpExtract'>>, 'REGEXP_I_LIKE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.RegexpILike'>>, 'REGEXP_LIKE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.RegexpLike'>>, 'REGEXP_REPLACE': <function _parse_regexp_replace>, 'REGEXP_SPLIT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.RegexpSplit'>>, 'REPEAT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Repeat'>>, 'RIGHT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Right'>>, 'ROUND': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Round'>>, 'ROW_NUMBER': <bound method Func.from_arg_list of <class 'sqlglot.expressions.RowNumber'>>, 'SHA': <bound method Func.from_arg_list of <class 'sqlglot.expressions.SHA'>>, 'SHA1': <bound method Func.from_arg_list of <class 'sqlglot.expressions.SHA'>>, 'SHA2': <bound method Func.from_arg_list of <class 'sqlglot.expressions.SHA2'>>, 'SAFE_CONCAT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.SafeConcat'>>, 'SAFE_DIVIDE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.SafeDivide'>>, 'SET_AGG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.SetAgg'>>, 'SORT_ARRAY': <bound method Func.from_arg_list of <class 'sqlglot.expressions.SortArray'>>, 'SPLIT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Split'>>, 'SQRT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Sqrt'>>, 'STANDARD_HASH': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StandardHash'>>, 'STAR_MAP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StarMap'>>, 'STARTS_WITH': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StartsWith'>>, 'STARTSWITH': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StartsWith'>>, 'STDDEV': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Stddev'>>, 'STDDEV_POP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StddevPop'>>, 'STDDEV_SAMP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StddevSamp'>>, 'STR_POSITION': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StrPosition'>>, 'STR_TO_DATE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StrToDate'>>, 'STR_TO_MAP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StrToMap'>>, 'STR_TO_TIME': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StrToTime'>>, 'STR_TO_UNIX': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StrToUnix'>>, 'STRUCT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Struct'>>, 'STRUCT_EXTRACT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StructExtract'>>, 'STUFF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Stuff'>>, 'INSERT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Stuff'>>, 'SUBSTRING': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Substring'>>, 'SUM': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Sum'>>, 'TIME_ADD': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimeAdd'>>, 'TIME_DIFF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimeDiff'>>, 'TIME_STR_TO_DATE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimeStrToDate'>>, 'TIME_STR_TO_TIME': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimeStrToTime'>>, 'TIME_STR_TO_UNIX': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimeStrToUnix'>>, 'TIME_SUB': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimeSub'>>, 'TIME_TO_STR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimeToStr'>>, 'TIME_TO_TIME_STR': <function Parser.<lambda>>, 'TIME_TO_UNIX': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimeToUnix'>>, 'TIME_TRUNC': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimeTrunc'>>, 'TIMESTAMP_ADD': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimestampAdd'>>, 'TIMESTAMP_DIFF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimestampDiff'>>, 'TIMESTAMP_SUB': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimestampSub'>>, 'TIMESTAMP_TRUNC': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimestampTrunc'>>, 'TO_BASE64': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ToBase64'>>, 'TO_CHAR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ToChar'>>, 'TRANSFORM': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Transform'>>, 'TRIM': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Trim'>>, 'TRY_CAST': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TryCast'>>, 'TS_OR_DI_TO_DI': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TsOrDiToDi'>>, 'TS_OR_DS_ADD': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TsOrDsAdd'>>, 'TS_OR_DS_TO_DATE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TsOrDsToDate'>>, 'TS_OR_DS_TO_DATE_STR': <function Parser.<lambda>>, 'UNHEX': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Unhex'>>, 'UNIX_TO_STR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.UnixToStr'>>, 'UNIX_TO_TIME': <bound method Func.from_arg_list of <class 'sqlglot.expressions.UnixToTime'>>, 'UNIX_TO_TIME_STR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.UnixToTimeStr'>>, 'UPPER': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Upper'>>, 'UCASE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Upper'>>, 'VAR_MAP': <function parse_var_map>, 'VARIANCE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Variance'>>, 'VARIANCE_SAMP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Variance'>>, 'VAR_SAMP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Variance'>>, 'VARIANCE_POP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.VariancePop'>>, 'VAR_POP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.VariancePop'>>, 'WEEK': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Week'>>, 'WEEK_OF_YEAR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.WeekOfYear'>>, 'WEEKOFYEAR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.WeekOfYear'>>, 'WHEN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.When'>>, 'X_M_L_TABLE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.XMLTable'>>, 'XOR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Xor'>>, 'YEAR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Year'>>, 'GLOB': <function Parser.<lambda>>, 'LIKE': <function parse_like>, 'ARRAYAGG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayAgg'>>, 'ARRAY_CONSTRUCT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Array'>>, 'ARRAY_TO_STRING': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayJoin'>>, 'CONVERT_TIMEZONE': <function _parse_convert_timezone>, 'DATEADD': <function Snowflake.Parser.<lambda>>, 'DIV0': <function _div0_to_if>, 'IFF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.If'>>, 'NULLIFZERO': <function _nullifzero_to_if>, 'OBJECT_CONSTRUCT': <function _parse_object_construct>, 'REGEXP_SUBSTR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.RegexpExtract'>>, 'RLIKE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.RegexpLike'>>, 'SQUARE': <function Snowflake.Parser.<lambda>>, 'TIMEDIFF': <function _parse_datediff>, 'TIMESTAMPDIFF': <function _parse_datediff>, 'TO_ARRAY': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Array'>>, 'TO_TIMESTAMP': <function _parse_to_timestamp>, 'TO_VARCHAR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ToChar'>>, 'ZEROIFNULL': <function _zeroifnull_to_if>} + {'ABS': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Abs'>>, 'ANY_VALUE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.AnyValue'>>, 'APPROX_DISTINCT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ApproxDistinct'>>, 'APPROX_COUNT_DISTINCT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ApproxDistinct'>>, 'APPROX_QUANTILE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ApproxQuantile'>>, 'ARRAY': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Array'>>, 'ARRAY_AGG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayAgg'>>, 'ARRAY_ALL': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayAll'>>, 'ARRAY_ANY': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayAny'>>, 'ARRAY_CONCAT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayConcat'>>, 'ARRAY_CAT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayConcat'>>, 'ARRAY_CONTAINS': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayContains'>>, 'FILTER': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayFilter'>>, 'ARRAY_FILTER': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayFilter'>>, 'ARRAY_JOIN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayJoin'>>, 'ARRAY_SIZE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArraySize'>>, 'ARRAY_SORT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArraySort'>>, 'ARRAY_SUM': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArraySum'>>, 'ARRAY_UNION_AGG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayUnionAgg'>>, 'AVG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Avg'>>, 'CASE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Case'>>, 'CAST': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Cast'>>, 'CAST_TO_STR_TYPE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CastToStrType'>>, 'CEIL': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Ceil'>>, 'CEILING': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Ceil'>>, 'COALESCE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Coalesce'>>, 'IFNULL': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Coalesce'>>, 'NVL': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Coalesce'>>, 'CONCAT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Concat'>>, 'CONCAT_WS': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ConcatWs'>>, 'COUNT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Count'>>, 'COUNT_IF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CountIf'>>, 'CURRENT_DATE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CurrentDate'>>, 'CURRENT_DATETIME': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CurrentDatetime'>>, 'CURRENT_TIME': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CurrentTime'>>, 'CURRENT_TIMESTAMP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CurrentTimestamp'>>, 'CURRENT_USER': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CurrentUser'>>, 'DATE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Date'>>, 'DATE_ADD': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DateAdd'>>, 'DATEDIFF': <function _parse_datediff>, 'DATE_DIFF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DateDiff'>>, 'DATEFROMPARTS': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DateFromParts'>>, 'DATE_STR_TO_DATE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DateStrToDate'>>, 'DATE_SUB': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DateSub'>>, 'DATE_TO_DATE_STR': <function Parser.<lambda>>, 'DATE_TO_DI': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DateToDi'>>, 'DATE_TRUNC': <function date_trunc_to_time>, 'DATETIME_ADD': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DatetimeAdd'>>, 'DATETIME_DIFF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DatetimeDiff'>>, 'DATETIME_SUB': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DatetimeSub'>>, 'DATETIME_TRUNC': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DatetimeTrunc'>>, 'DAY': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Day'>>, 'DAY_OF_MONTH': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DayOfMonth'>>, 'DAYOFMONTH': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DayOfMonth'>>, 'DAY_OF_WEEK': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DayOfWeek'>>, 'DAYOFWEEK': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DayOfWeek'>>, 'DAY_OF_YEAR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DayOfYear'>>, 'DAYOFYEAR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DayOfYear'>>, 'DECODE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Decode'>>, 'DI_TO_DATE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DiToDate'>>, 'ENCODE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Encode'>>, 'EXP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Exp'>>, 'EXPLODE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Explode'>>, 'EXTRACT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Extract'>>, 'FIRST': <bound method Func.from_arg_list of <class 'sqlglot.expressions.First'>>, 'FLOOR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Floor'>>, 'FROM_BASE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.FromBase'>>, 'FROM_BASE64': <bound method Func.from_arg_list of <class 'sqlglot.expressions.FromBase64'>>, 'GENERATE_SERIES': <bound method Func.from_arg_list of <class 'sqlglot.expressions.GenerateSeries'>>, 'GREATEST': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Greatest'>>, 'GROUP_CONCAT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.GroupConcat'>>, 'HEX': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Hex'>>, 'HLL': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Hll'>>, 'IF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.If'>>, 'INITCAP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Initcap'>>, 'IS_NAN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.IsNan'>>, 'ISNAN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.IsNan'>>, 'JSON_ARRAY_CONTAINS': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONArrayContains'>>, 'JSONB_EXTRACT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONBExtract'>>, 'JSONB_EXTRACT_SCALAR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONBExtractScalar'>>, 'JSON_EXTRACT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONExtract'>>, 'JSON_EXTRACT_SCALAR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONExtractScalar'>>, 'JSON_FORMAT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONFormat'>>, 'J_S_O_N_OBJECT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONObject'>>, 'LAST': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Last'>>, 'LAST_DATE_OF_MONTH': <bound method Func.from_arg_list of <class 'sqlglot.expressions.LastDateOfMonth'>>, 'LEAST': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Least'>>, 'LEFT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Left'>>, 'LENGTH': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Length'>>, 'LEN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Length'>>, 'LEVENSHTEIN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Levenshtein'>>, 'LN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Ln'>>, 'LOG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Log'>>, 'LOG10': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Log10'>>, 'LOG2': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Log2'>>, 'LOGICAL_AND': <bound method Func.from_arg_list of <class 'sqlglot.expressions.LogicalAnd'>>, 'BOOL_AND': <bound method Func.from_arg_list of <class 'sqlglot.expressions.LogicalAnd'>>, 'BOOLAND_AGG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.LogicalAnd'>>, 'LOGICAL_OR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.LogicalOr'>>, 'BOOL_OR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.LogicalOr'>>, 'BOOLOR_AGG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.LogicalOr'>>, 'LOWER': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Lower'>>, 'LCASE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Lower'>>, 'MD5': <bound method Func.from_arg_list of <class 'sqlglot.expressions.MD5'>>, 'MD5_DIGEST': <bound method Func.from_arg_list of <class 'sqlglot.expressions.MD5Digest'>>, 'MAP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Map'>>, 'MAP_FROM_ENTRIES': <bound method Func.from_arg_list of <class 'sqlglot.expressions.MapFromEntries'>>, 'MATCH_AGAINST': <bound method Func.from_arg_list of <class 'sqlglot.expressions.MatchAgainst'>>, 'MAX': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Max'>>, 'MIN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Min'>>, 'MONTH': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Month'>>, 'MONTHS_BETWEEN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.MonthsBetween'>>, 'NEXT_VALUE_FOR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.NextValueFor'>>, 'NUMBER_TO_STR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.NumberToStr'>>, 'NVL2': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Nvl2'>>, 'OPEN_J_S_O_N': <bound method Func.from_arg_list of <class 'sqlglot.expressions.OpenJSON'>>, 'PARAMETERIZED_AGG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ParameterizedAgg'>>, 'PERCENTILE_CONT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.PercentileCont'>>, 'PERCENTILE_DISC': <bound method Func.from_arg_list of <class 'sqlglot.expressions.PercentileDisc'>>, 'POSEXPLODE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Posexplode'>>, 'POWER': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Pow'>>, 'POW': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Pow'>>, 'QUANTILE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Quantile'>>, 'RANGE_N': <bound method Func.from_arg_list of <class 'sqlglot.expressions.RangeN'>>, 'READ_CSV': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ReadCSV'>>, 'REDUCE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Reduce'>>, 'REGEXP_EXTRACT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.RegexpExtract'>>, 'REGEXP_I_LIKE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.RegexpILike'>>, 'REGEXP_LIKE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.RegexpLike'>>, 'REGEXP_REPLACE': <function _parse_regexp_replace>, 'REGEXP_SPLIT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.RegexpSplit'>>, 'REPEAT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Repeat'>>, 'RIGHT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Right'>>, 'ROUND': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Round'>>, 'ROW_NUMBER': <bound method Func.from_arg_list of <class 'sqlglot.expressions.RowNumber'>>, 'SHA': <bound method Func.from_arg_list of <class 'sqlglot.expressions.SHA'>>, 'SHA1': <bound method Func.from_arg_list of <class 'sqlglot.expressions.SHA'>>, 'SHA2': <bound method Func.from_arg_list of <class 'sqlglot.expressions.SHA2'>>, 'SAFE_CONCAT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.SafeConcat'>>, 'SAFE_DIVIDE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.SafeDivide'>>, 'SET_AGG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.SetAgg'>>, 'SORT_ARRAY': <bound method Func.from_arg_list of <class 'sqlglot.expressions.SortArray'>>, 'SPLIT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Split'>>, 'SQRT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Sqrt'>>, 'STANDARD_HASH': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StandardHash'>>, 'STAR_MAP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StarMap'>>, 'STARTS_WITH': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StartsWith'>>, 'STARTSWITH': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StartsWith'>>, 'STDDEV': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Stddev'>>, 'STDDEV_POP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StddevPop'>>, 'STDDEV_SAMP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StddevSamp'>>, 'STR_POSITION': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StrPosition'>>, 'STR_TO_DATE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StrToDate'>>, 'STR_TO_MAP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StrToMap'>>, 'STR_TO_TIME': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StrToTime'>>, 'STR_TO_UNIX': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StrToUnix'>>, 'STRUCT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Struct'>>, 'STRUCT_EXTRACT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StructExtract'>>, 'STUFF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Stuff'>>, 'INSERT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Stuff'>>, 'SUBSTRING': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Substring'>>, 'SUM': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Sum'>>, 'TIME_ADD': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimeAdd'>>, 'TIME_DIFF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimeDiff'>>, 'TIME_STR_TO_DATE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimeStrToDate'>>, 'TIME_STR_TO_TIME': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimeStrToTime'>>, 'TIME_STR_TO_UNIX': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimeStrToUnix'>>, 'TIME_SUB': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimeSub'>>, 'TIME_TO_STR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimeToStr'>>, 'TIME_TO_TIME_STR': <function Parser.<lambda>>, 'TIME_TO_UNIX': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimeToUnix'>>, 'TIME_TRUNC': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimeTrunc'>>, 'TIMESTAMP_ADD': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimestampAdd'>>, 'TIMESTAMP_DIFF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimestampDiff'>>, 'TIMESTAMP_SUB': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimestampSub'>>, 'TIMESTAMP_TRUNC': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimestampTrunc'>>, 'TO_BASE64': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ToBase64'>>, 'TO_CHAR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ToChar'>>, 'TRANSFORM': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Transform'>>, 'TRIM': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Trim'>>, 'TRY_CAST': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TryCast'>>, 'TS_OR_DI_TO_DI': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TsOrDiToDi'>>, 'TS_OR_DS_ADD': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TsOrDsAdd'>>, 'TS_OR_DS_TO_DATE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TsOrDsToDate'>>, 'TS_OR_DS_TO_DATE_STR': <function Parser.<lambda>>, 'UNHEX': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Unhex'>>, 'UNIX_TO_STR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.UnixToStr'>>, 'UNIX_TO_TIME': <bound method Func.from_arg_list of <class 'sqlglot.expressions.UnixToTime'>>, 'UNIX_TO_TIME_STR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.UnixToTimeStr'>>, 'UPPER': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Upper'>>, 'UCASE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Upper'>>, 'VAR_MAP': <function parse_var_map>, 'VARIANCE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Variance'>>, 'VARIANCE_SAMP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Variance'>>, 'VAR_SAMP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Variance'>>, 'VARIANCE_POP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.VariancePop'>>, 'VAR_POP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.VariancePop'>>, 'WEEK': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Week'>>, 'WEEK_OF_YEAR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.WeekOfYear'>>, 'WEEKOFYEAR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.WeekOfYear'>>, 'WHEN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.When'>>, 'X_M_L_TABLE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.XMLTable'>>, 'XOR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Xor'>>, 'YEAR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Year'>>, 'GLOB': <function Parser.<lambda>>, 'LIKE': <function parse_like>, 'ARRAYAGG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayAgg'>>, 'ARRAY_CONSTRUCT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Array'>>, 'ARRAY_TO_STRING': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayJoin'>>, 'CONVERT_TIMEZONE': <function _parse_convert_timezone>, 'DATEADD': <function Snowflake.Parser.<lambda>>, 'DIV0': <function _div0_to_if>, 'IFF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.If'>>, 'LISTAGG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.GroupConcat'>>, 'NULLIFZERO': <function _nullifzero_to_if>, 'OBJECT_CONSTRUCT': <function _parse_object_construct>, 'REGEXP_SUBSTR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.RegexpExtract'>>, 'RLIKE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.RegexpLike'>>, 'SQUARE': <function Snowflake.Parser.<lambda>>, 'TIMEDIFF': <function _parse_datediff>, 'TIMESTAMPDIFF': <function _parse_datediff>, 'TO_ARRAY': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Array'>>, 'TO_TIMESTAMP': <function _parse_to_timestamp>, 'TO_VARCHAR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ToChar'>>, 'ZEROIFNULL': <function _zeroifnull_to_if>}
    @@ -1505,7 +1638,7 @@ Default: 3
    TIMESTAMPS = - {<TokenType.TIMESTAMPTZ: 'TIMESTAMPTZ'>, <TokenType.TIMESTAMPLTZ: 'TIMESTAMPLTZ'>, <TokenType.TIMETZ: 'TIMETZ'>, <TokenType.TIMESTAMP: 'TIMESTAMP'>} + {<TokenType.TIMESTAMPTZ: 'TIMESTAMPTZ'>, <TokenType.TIMETZ: 'TIMETZ'>, <TokenType.TIMESTAMPLTZ: 'TIMESTAMPLTZ'>, <TokenType.TIMESTAMP: 'TIMESTAMP'>}
    @@ -1539,6 +1672,32 @@ Default: 3 +
    +
    +
    + STATEMENT_PARSERS = + + {<TokenType.ALTER: 'ALTER'>: <function Parser.<lambda>>, <TokenType.BEGIN: 'BEGIN'>: <function Parser.<lambda>>, <TokenType.CACHE: 'CACHE'>: <function Parser.<lambda>>, <TokenType.COMMIT: 'COMMIT'>: <function Parser.<lambda>>, <TokenType.COMMENT: 'COMMENT'>: <function Parser.<lambda>>, <TokenType.CREATE: 'CREATE'>: <function Parser.<lambda>>, <TokenType.DELETE: 'DELETE'>: <function Parser.<lambda>>, <TokenType.DESC: 'DESC'>: <function Parser.<lambda>>, <TokenType.DESCRIBE: 'DESCRIBE'>: <function Parser.<lambda>>, <TokenType.DROP: 'DROP'>: <function Parser.<lambda>>, <TokenType.INSERT: 'INSERT'>: <function Parser.<lambda>>, <TokenType.LOAD: 'LOAD'>: <function Parser.<lambda>>, <TokenType.MERGE: 'MERGE'>: <function Parser.<lambda>>, <TokenType.PIVOT: 'PIVOT'>: <function Parser.<lambda>>, <TokenType.PRAGMA: 'PRAGMA'>: <function Parser.<lambda>>, <TokenType.ROLLBACK: 'ROLLBACK'>: <function Parser.<lambda>>, <TokenType.SET: 'SET'>: <function Parser.<lambda>>, <TokenType.UNCACHE: 'UNCACHE'>: <function Parser.<lambda>>, <TokenType.UPDATE: 'UPDATE'>: <function Parser.<lambda>>, <TokenType.USE: 'USE'>: <function Parser.<lambda>>, <TokenType.SHOW: 'SHOW'>: <function Snowflake.Parser.<lambda>>} + + +
    + + + + +
    +
    +
    + SHOW_PARSERS = + + {'PRIMARY KEYS': <function _show_parser.<locals>._parse>, 'TERSE PRIMARY KEYS': <function _show_parser.<locals>._parse>} + + +
    + + + +
    @@ -1567,7 +1726,7 @@ Default: 3
    SHOW_TRIE: Dict = -{} +{'PRIMARY': {'KEYS': {0: True}}, 'TERSE': {'PRIMARY': {'KEYS': {0: True}}}}
    @@ -1662,7 +1821,6 @@ Default: 3
    JOIN_HINTS
    LAMBDAS
    EXPRESSION_PARSERS
    -
    STATEMENT_PARSERS
    UNARY_PARSERS
    PRIMARY_PARSERS
    PLACEHOLDER_PARSERS
    @@ -1674,7 +1832,6 @@ Default: 3
    FUNCTIONS_WITH_ALIASED_ARGS
    QUERY_MODIFIER_PARSERS
    SET_PARSERS
    -
    SHOW_PARSERS
    TYPE_LITERAL_PARSERS
    MODIFIABLES
    DDL_SELECT_TOKENS
    @@ -1729,39 +1886,41 @@ Default: 3
    -
    307    class Tokenizer(tokens.Tokenizer):
    -308        STRING_ESCAPES = ["\\", "'"]
    -309        HEX_STRINGS = [("x'", "'"), ("X'", "'")]
    -310        RAW_STRINGS = ["$$"]
    -311        COMMENTS = ["--", "//", ("/*", "*/")]
    -312
    -313        KEYWORDS = {
    -314            **tokens.Tokenizer.KEYWORDS,
    -315            "BYTEINT": TokenType.INT,
    -316            "CHAR VARYING": TokenType.VARCHAR,
    -317            "CHARACTER VARYING": TokenType.VARCHAR,
    -318            "EXCLUDE": TokenType.EXCEPT,
    -319            "ILIKE ANY": TokenType.ILIKE_ANY,
    -320            "LIKE ANY": TokenType.LIKE_ANY,
    -321            "MATCH_RECOGNIZE": TokenType.MATCH_RECOGNIZE,
    -322            "MINUS": TokenType.EXCEPT,
    -323            "NCHAR VARYING": TokenType.VARCHAR,
    -324            "PUT": TokenType.COMMAND,
    -325            "RENAME": TokenType.REPLACE,
    -326            "SAMPLE": TokenType.TABLE_SAMPLE,
    -327            "TIMESTAMP_LTZ": TokenType.TIMESTAMPLTZ,
    -328            "TIMESTAMP_NTZ": TokenType.TIMESTAMP,
    -329            "TIMESTAMP_TZ": TokenType.TIMESTAMPTZ,
    -330            "TIMESTAMPNTZ": TokenType.TIMESTAMP,
    -331            "TOP": TokenType.TOP,
    -332        }
    -333
    -334        SINGLE_TOKENS = {
    -335            **tokens.Tokenizer.SINGLE_TOKENS,
    -336            "$": TokenType.PARAMETER,
    -337        }
    -338
    -339        VAR_SINGLE_TOKENS = {"$"}
    +            
    342    class Tokenizer(tokens.Tokenizer):
    +343        STRING_ESCAPES = ["\\", "'"]
    +344        HEX_STRINGS = [("x'", "'"), ("X'", "'")]
    +345        RAW_STRINGS = ["$$"]
    +346        COMMENTS = ["--", "//", ("/*", "*/")]
    +347
    +348        KEYWORDS = {
    +349            **tokens.Tokenizer.KEYWORDS,
    +350            "BYTEINT": TokenType.INT,
    +351            "CHAR VARYING": TokenType.VARCHAR,
    +352            "CHARACTER VARYING": TokenType.VARCHAR,
    +353            "EXCLUDE": TokenType.EXCEPT,
    +354            "ILIKE ANY": TokenType.ILIKE_ANY,
    +355            "LIKE ANY": TokenType.LIKE_ANY,
    +356            "MATCH_RECOGNIZE": TokenType.MATCH_RECOGNIZE,
    +357            "MINUS": TokenType.EXCEPT,
    +358            "NCHAR VARYING": TokenType.VARCHAR,
    +359            "PUT": TokenType.COMMAND,
    +360            "RENAME": TokenType.REPLACE,
    +361            "SAMPLE": TokenType.TABLE_SAMPLE,
    +362            "TIMESTAMP_LTZ": TokenType.TIMESTAMPLTZ,
    +363            "TIMESTAMP_NTZ": TokenType.TIMESTAMP,
    +364            "TIMESTAMP_TZ": TokenType.TIMESTAMPTZ,
    +365            "TIMESTAMPNTZ": TokenType.TIMESTAMP,
    +366            "TOP": TokenType.TOP,
    +367        }
    +368
    +369        SINGLE_TOKENS = {
    +370            **tokens.Tokenizer.SINGLE_TOKENS,
    +371            "$": TokenType.PARAMETER,
    +372        }
    +373
    +374        VAR_SINGLE_TOKENS = {"$"}
    +375
    +376        COMMANDS = tokens.Tokenizer.COMMANDS - {TokenType.SHOW}
     
    @@ -1852,6 +2011,18 @@ Default: 3 +
    +
    +
    + COMMANDS = +{<TokenType.FETCH: 'FETCH'>, <TokenType.COMMAND: 'COMMAND'>, <TokenType.EXECUTE: 'EXECUTE'>} + + +
    + + + +
    Inherited Members
    @@ -1864,7 +2035,6 @@ Default: 3
    QUOTES
    IDENTIFIERS_CAN_START_WITH_DIGIT
    WHITE_SPACE
    -
    COMMANDS
    COMMAND_PREFIX_TOKENS
    NUMERIC_LITERALS
    ENCODE
    @@ -1890,123 +2060,136 @@ Default: 3
    -
    341    class Generator(generator.Generator):
    -342        PARAMETER_TOKEN = "$"
    -343        MATCHED_BY_SOURCE = False
    -344        SINGLE_STRING_INTERVAL = True
    -345        JOIN_HINTS = False
    -346        TABLE_HINTS = False
    -347        QUERY_HINTS = False
    -348
    -349        TRANSFORMS = {
    -350            **generator.Generator.TRANSFORMS,
    -351            exp.Array: inline_array_sql,
    -352            exp.ArrayConcat: rename_func("ARRAY_CAT"),
    -353            exp.ArrayJoin: rename_func("ARRAY_TO_STRING"),
    -354            exp.AtTimeZone: lambda self, e: self.func(
    -355                "CONVERT_TIMEZONE", e.args.get("zone"), e.this
    -356            ),
    -357            exp.DateAdd: lambda self, e: self.func("DATEADD", e.text("unit"), e.expression, e.this),
    -358            exp.DateDiff: lambda self, e: self.func(
    -359                "DATEDIFF", e.text("unit"), e.expression, e.this
    -360            ),
    -361            exp.DateStrToDate: datestrtodate_sql,
    -362            exp.DataType: _datatype_sql,
    -363            exp.DayOfWeek: rename_func("DAYOFWEEK"),
    -364            exp.Extract: rename_func("DATE_PART"),
    -365            exp.If: rename_func("IFF"),
    -366            exp.LogicalAnd: rename_func("BOOLAND_AGG"),
    -367            exp.LogicalOr: rename_func("BOOLOR_AGG"),
    -368            exp.Map: lambda self, e: var_map_sql(self, e, "OBJECT_CONSTRUCT"),
    -369            exp.Max: max_or_greatest,
    -370            exp.Min: min_or_least,
    -371            exp.PartitionedByProperty: lambda self, e: f"PARTITION BY {self.sql(e, 'this')}",
    -372            exp.RegexpILike: _regexpilike_sql,
    -373            exp.Select: transforms.preprocess([transforms.eliminate_distinct_on]),
    -374            exp.StarMap: rename_func("OBJECT_CONSTRUCT"),
    -375            exp.StartsWith: rename_func("STARTSWITH"),
    -376            exp.StrPosition: lambda self, e: self.func(
    -377                "POSITION", e.args.get("substr"), e.this, e.args.get("position")
    -378            ),
    -379            exp.StrToTime: lambda self, e: f"TO_TIMESTAMP({self.sql(e, 'this')}, {self.format_time(e)})",
    -380            exp.Struct: lambda self, e: self.func(
    -381                "OBJECT_CONSTRUCT",
    -382                *(arg for expression in e.expressions for arg in expression.flatten()),
    -383            ),
    -384            exp.Stuff: rename_func("INSERT"),
    -385            exp.TimestampTrunc: timestamptrunc_sql,
    -386            exp.TimeStrToTime: timestrtotime_sql,
    -387            exp.TimeToStr: lambda self, e: self.func(
    -388                "TO_CHAR", exp.cast(e.this, "timestamp"), self.format_time(e)
    -389            ),
    -390            exp.TimeToUnix: lambda self, e: f"EXTRACT(epoch_second FROM {self.sql(e, 'this')})",
    -391            exp.ToChar: lambda self, e: self.function_fallback_sql(e),
    -392            exp.Trim: lambda self, e: self.func("TRIM", e.this, e.expression),
    -393            exp.TsOrDsToDate: ts_or_ds_to_date_sql("snowflake"),
    -394            exp.UnixToTime: _unix_to_time_sql,
    -395            exp.VarMap: lambda self, e: var_map_sql(self, e, "OBJECT_CONSTRUCT"),
    -396            exp.WeekOfYear: rename_func("WEEKOFYEAR"),
    -397        }
    -398
    -399        TYPE_MAPPING = {
    -400            **generator.Generator.TYPE_MAPPING,
    -401            exp.DataType.Type.TIMESTAMP: "TIMESTAMPNTZ",
    -402        }
    -403
    -404        STAR_MAPPING = {
    -405            "except": "EXCLUDE",
    -406            "replace": "RENAME",
    -407        }
    -408
    -409        PROPERTIES_LOCATION = {
    -410            **generator.Generator.PROPERTIES_LOCATION,
    -411            exp.SetProperty: exp.Properties.Location.UNSUPPORTED,
    -412            exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED,
    -413        }
    -414
    -415        def regexpextract_sql(self, expression: exp.RegexpExtract) -> str:
    -416            # Other dialects don't support all of the following parameters, so we need to
    -417            # generate default values as necessary to ensure the transpilation is correct
    -418            group = expression.args.get("group")
    -419            parameters = expression.args.get("parameters") or (group and exp.Literal.string("c"))
    -420            occurrence = expression.args.get("occurrence") or (parameters and exp.Literal.number(1))
    -421            position = expression.args.get("position") or (occurrence and exp.Literal.number(1))
    -422
    -423            return self.func(
    -424                "REGEXP_SUBSTR",
    -425                expression.this,
    -426                expression.expression,
    -427                position,
    -428                occurrence,
    -429                parameters,
    -430                group,
    -431            )
    -432
    -433        def except_op(self, expression: exp.Except) -> str:
    -434            if not expression.args.get("distinct", False):
    -435                self.unsupported("EXCEPT with All is not supported in Snowflake")
    -436            return super().except_op(expression)
    -437
    -438        def intersect_op(self, expression: exp.Intersect) -> str:
    -439            if not expression.args.get("distinct", False):
    -440                self.unsupported("INTERSECT with All is not supported in Snowflake")
    -441            return super().intersect_op(expression)
    -442
    -443        def describe_sql(self, expression: exp.Describe) -> str:
    -444            # Default to table if kind is unknown
    -445            kind_value = expression.args.get("kind") or "TABLE"
    -446            kind = f" {kind_value}" if kind_value else ""
    -447            this = f" {self.sql(expression, 'this')}"
    -448            return f"DESCRIBE{kind}{this}"
    -449
    -450        def generatedasidentitycolumnconstraint_sql(
    -451            self, expression: exp.GeneratedAsIdentityColumnConstraint
    -452        ) -> str:
    -453            start = expression.args.get("start")
    -454            start = f" START {start}" if start else ""
    -455            increment = expression.args.get("increment")
    -456            increment = f" INCREMENT {increment}" if increment else ""
    -457            return f"AUTOINCREMENT{start}{increment}"
    +            
    378    class Generator(generator.Generator):
    +379        PARAMETER_TOKEN = "$"
    +380        MATCHED_BY_SOURCE = False
    +381        SINGLE_STRING_INTERVAL = True
    +382        JOIN_HINTS = False
    +383        TABLE_HINTS = False
    +384        QUERY_HINTS = False
    +385
    +386        TRANSFORMS = {
    +387            **generator.Generator.TRANSFORMS,
    +388            exp.Array: inline_array_sql,
    +389            exp.ArrayConcat: rename_func("ARRAY_CAT"),
    +390            exp.ArrayJoin: rename_func("ARRAY_TO_STRING"),
    +391            exp.AtTimeZone: lambda self, e: self.func(
    +392                "CONVERT_TIMEZONE", e.args.get("zone"), e.this
    +393            ),
    +394            exp.DateAdd: lambda self, e: self.func("DATEADD", e.text("unit"), e.expression, e.this),
    +395            exp.DateDiff: lambda self, e: self.func(
    +396                "DATEDIFF", e.text("unit"), e.expression, e.this
    +397            ),
    +398            exp.DateStrToDate: datestrtodate_sql,
    +399            exp.DataType: _datatype_sql,
    +400            exp.DayOfWeek: rename_func("DAYOFWEEK"),
    +401            exp.Extract: rename_func("DATE_PART"),
    +402            exp.GroupConcat: rename_func("LISTAGG"),
    +403            exp.If: rename_func("IFF"),
    +404            exp.LogicalAnd: rename_func("BOOLAND_AGG"),
    +405            exp.LogicalOr: rename_func("BOOLOR_AGG"),
    +406            exp.Map: lambda self, e: var_map_sql(self, e, "OBJECT_CONSTRUCT"),
    +407            exp.Max: max_or_greatest,
    +408            exp.Min: min_or_least,
    +409            exp.PartitionedByProperty: lambda self, e: f"PARTITION BY {self.sql(e, 'this')}",
    +410            exp.RegexpILike: _regexpilike_sql,
    +411            exp.Select: transforms.preprocess([transforms.eliminate_distinct_on]),
    +412            exp.StarMap: rename_func("OBJECT_CONSTRUCT"),
    +413            exp.StartsWith: rename_func("STARTSWITH"),
    +414            exp.StrPosition: lambda self, e: self.func(
    +415                "POSITION", e.args.get("substr"), e.this, e.args.get("position")
    +416            ),
    +417            exp.StrToTime: lambda self, e: f"TO_TIMESTAMP({self.sql(e, 'this')}, {self.format_time(e)})",
    +418            exp.Struct: lambda self, e: self.func(
    +419                "OBJECT_CONSTRUCT",
    +420                *(arg for expression in e.expressions for arg in expression.flatten()),
    +421            ),
    +422            exp.Stuff: rename_func("INSERT"),
    +423            exp.TimestampTrunc: timestamptrunc_sql,
    +424            exp.TimeStrToTime: timestrtotime_sql,
    +425            exp.TimeToStr: lambda self, e: self.func(
    +426                "TO_CHAR", exp.cast(e.this, "timestamp"), self.format_time(e)
    +427            ),
    +428            exp.TimeToUnix: lambda self, e: f"EXTRACT(epoch_second FROM {self.sql(e, 'this')})",
    +429            exp.ToChar: lambda self, e: self.function_fallback_sql(e),
    +430            exp.Trim: lambda self, e: self.func("TRIM", e.this, e.expression),
    +431            exp.TsOrDsToDate: ts_or_ds_to_date_sql("snowflake"),
    +432            exp.UnixToTime: _unix_to_time_sql,
    +433            exp.VarMap: lambda self, e: var_map_sql(self, e, "OBJECT_CONSTRUCT"),
    +434            exp.WeekOfYear: rename_func("WEEKOFYEAR"),
    +435        }
    +436
    +437        TYPE_MAPPING = {
    +438            **generator.Generator.TYPE_MAPPING,
    +439            exp.DataType.Type.TIMESTAMP: "TIMESTAMPNTZ",
    +440        }
    +441
    +442        STAR_MAPPING = {
    +443            "except": "EXCLUDE",
    +444            "replace": "RENAME",
    +445        }
    +446
    +447        PROPERTIES_LOCATION = {
    +448            **generator.Generator.PROPERTIES_LOCATION,
    +449            exp.SetProperty: exp.Properties.Location.UNSUPPORTED,
    +450            exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED,
    +451        }
    +452
    +453        def show_sql(self, expression: exp.Show) -> str:
    +454            scope = self.sql(expression, "scope")
    +455            scope = f" {scope}" if scope else ""
    +456
    +457            scope_kind = self.sql(expression, "scope_kind")
    +458            if scope_kind:
    +459                scope_kind = f" IN {scope_kind}"
    +460
    +461            return f"SHOW {expression.name}{scope_kind}{scope}"
    +462
    +463        def regexpextract_sql(self, expression: exp.RegexpExtract) -> str:
    +464            # Other dialects don't support all of the following parameters, so we need to
    +465            # generate default values as necessary to ensure the transpilation is correct
    +466            group = expression.args.get("group")
    +467            parameters = expression.args.get("parameters") or (group and exp.Literal.string("c"))
    +468            occurrence = expression.args.get("occurrence") or (parameters and exp.Literal.number(1))
    +469            position = expression.args.get("position") or (occurrence and exp.Literal.number(1))
    +470
    +471            return self.func(
    +472                "REGEXP_SUBSTR",
    +473                expression.this,
    +474                expression.expression,
    +475                position,
    +476                occurrence,
    +477                parameters,
    +478                group,
    +479            )
    +480
    +481        def except_op(self, expression: exp.Except) -> str:
    +482            if not expression.args.get("distinct", False):
    +483                self.unsupported("EXCEPT with All is not supported in Snowflake")
    +484            return super().except_op(expression)
    +485
    +486        def intersect_op(self, expression: exp.Intersect) -> str:
    +487            if not expression.args.get("distinct", False):
    +488                self.unsupported("INTERSECT with All is not supported in Snowflake")
    +489            return super().intersect_op(expression)
    +490
    +491        def describe_sql(self, expression: exp.Describe) -> str:
    +492            # Default to table if kind is unknown
    +493            kind_value = expression.args.get("kind") or "TABLE"
    +494            kind = f" {kind_value}" if kind_value else ""
    +495            this = f" {self.sql(expression, 'this')}"
    +496            expressions = self.expressions(expression, flat=True)
    +497            expressions = f" {expressions}" if expressions else ""
    +498            return f"DESCRIBE{kind}{this}{expressions}"
    +499
    +500        def generatedasidentitycolumnconstraint_sql(
    +501            self, expression: exp.GeneratedAsIdentityColumnConstraint
    +502        ) -> str:
    +503            start = expression.args.get("start")
    +504            start = f" START {start}" if start else ""
    +505            increment = expression.args.get("increment")
    +506            increment = f" INCREMENT {increment}" if increment else ""
    +507            return f"AUTOINCREMENT{start}{increment}"
     
    @@ -2125,7 +2308,7 @@ Default: True
    TRANSFORMS = - {<class 'sqlglot.expressions.DateAdd'>: <function Snowflake.Generator.<lambda>>, <class 'sqlglot.expressions.TsOrDsAdd'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.CaseSpecificColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.CharacterSetColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.CharacterSetProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.CheckColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.ClusteredColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.CollateColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.CopyGrantsProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.CommentColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.DateFormatColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.DefaultColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.EncodeColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.ExecuteAsProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.ExternalProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.HeapProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.InlineLengthColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.IntervalDayToSecondSpan'>: 'DAY TO SECOND', <class 'sqlglot.expressions.IntervalYearToMonthSpan'>: 'YEAR TO MONTH', <class 'sqlglot.expressions.LanguageProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.LocationProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.LogProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.MaterializedProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.NoPrimaryIndexProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.NonClusteredColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.NotForReplicationColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.OnCommitProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.OnProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.OnUpdateColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.PathColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.ReturnsProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.SetProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.SettingsProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.SqlSecurityProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.StabilityProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.TemporaryProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.ToTableProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.TransientProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.TitleColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.UppercaseColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.VarMap'>: <function Snowflake.Generator.<lambda>>, <class 'sqlglot.expressions.VolatileProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.WithJournalTableProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.Array'>: <function inline_array_sql>, <class 'sqlglot.expressions.ArrayConcat'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.ArrayJoin'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.AtTimeZone'>: <function Snowflake.Generator.<lambda>>, <class 'sqlglot.expressions.DateDiff'>: <function Snowflake.Generator.<lambda>>, <class 'sqlglot.expressions.DateStrToDate'>: <function datestrtodate_sql>, <class 'sqlglot.expressions.DataType'>: <function _datatype_sql>, <class 'sqlglot.expressions.DayOfWeek'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.Extract'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.If'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.LogicalAnd'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.LogicalOr'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.Map'>: <function Snowflake.Generator.<lambda>>, <class 'sqlglot.expressions.Max'>: <function max_or_greatest>, <class 'sqlglot.expressions.Min'>: <function min_or_least>, <class 'sqlglot.expressions.PartitionedByProperty'>: <function Snowflake.Generator.<lambda>>, <class 'sqlglot.expressions.RegexpILike'>: <function _regexpilike_sql>, <class 'sqlglot.expressions.Select'>: <function preprocess.<locals>._to_sql>, <class 'sqlglot.expressions.StarMap'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.StartsWith'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.StrPosition'>: <function Snowflake.Generator.<lambda>>, <class 'sqlglot.expressions.StrToTime'>: <function Snowflake.Generator.<lambda>>, <class 'sqlglot.expressions.Struct'>: <function Snowflake.Generator.<lambda>>, <class 'sqlglot.expressions.Stuff'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.TimestampTrunc'>: <function timestamptrunc_sql>, <class 'sqlglot.expressions.TimeStrToTime'>: <function timestrtotime_sql>, <class 'sqlglot.expressions.TimeToStr'>: <function Snowflake.Generator.<lambda>>, <class 'sqlglot.expressions.TimeToUnix'>: <function Snowflake.Generator.<lambda>>, <class 'sqlglot.expressions.ToChar'>: <function Snowflake.Generator.<lambda>>, <class 'sqlglot.expressions.Trim'>: <function Snowflake.Generator.<lambda>>, <class 'sqlglot.expressions.TsOrDsToDate'>: <function ts_or_ds_to_date_sql.<locals>._ts_or_ds_to_date_sql>, <class 'sqlglot.expressions.UnixToTime'>: <function _unix_to_time_sql>, <class 'sqlglot.expressions.WeekOfYear'>: <function rename_func.<locals>.<lambda>>} + {<class 'sqlglot.expressions.DateAdd'>: <function Snowflake.Generator.<lambda>>, <class 'sqlglot.expressions.TsOrDsAdd'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.CaseSpecificColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.CharacterSetColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.CharacterSetProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.CheckColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.ClusteredColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.CollateColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.CopyGrantsProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.CommentColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.DateFormatColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.DefaultColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.EncodeColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.ExecuteAsProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.ExternalProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.HeapProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.InlineLengthColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.IntervalDayToSecondSpan'>: 'DAY TO SECOND', <class 'sqlglot.expressions.IntervalYearToMonthSpan'>: 'YEAR TO MONTH', <class 'sqlglot.expressions.LanguageProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.LocationProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.LogProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.MaterializedProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.NoPrimaryIndexProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.NonClusteredColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.NotForReplicationColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.OnCommitProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.OnProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.OnUpdateColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.PathColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.ReturnsProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.SetProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.SettingsProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.SqlSecurityProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.StabilityProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.TemporaryProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.ToTableProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.TransientProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.TitleColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.UppercaseColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.VarMap'>: <function Snowflake.Generator.<lambda>>, <class 'sqlglot.expressions.VolatileProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.WithJournalTableProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.Array'>: <function inline_array_sql>, <class 'sqlglot.expressions.ArrayConcat'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.ArrayJoin'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.AtTimeZone'>: <function Snowflake.Generator.<lambda>>, <class 'sqlglot.expressions.DateDiff'>: <function Snowflake.Generator.<lambda>>, <class 'sqlglot.expressions.DateStrToDate'>: <function datestrtodate_sql>, <class 'sqlglot.expressions.DataType'>: <function _datatype_sql>, <class 'sqlglot.expressions.DayOfWeek'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.Extract'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.GroupConcat'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.If'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.LogicalAnd'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.LogicalOr'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.Map'>: <function Snowflake.Generator.<lambda>>, <class 'sqlglot.expressions.Max'>: <function max_or_greatest>, <class 'sqlglot.expressions.Min'>: <function min_or_least>, <class 'sqlglot.expressions.PartitionedByProperty'>: <function Snowflake.Generator.<lambda>>, <class 'sqlglot.expressions.RegexpILike'>: <function _regexpilike_sql>, <class 'sqlglot.expressions.Select'>: <function preprocess.<locals>._to_sql>, <class 'sqlglot.expressions.StarMap'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.StartsWith'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.StrPosition'>: <function Snowflake.Generator.<lambda>>, <class 'sqlglot.expressions.StrToTime'>: <function Snowflake.Generator.<lambda>>, <class 'sqlglot.expressions.Struct'>: <function Snowflake.Generator.<lambda>>, <class 'sqlglot.expressions.Stuff'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.TimestampTrunc'>: <function timestamptrunc_sql>, <class 'sqlglot.expressions.TimeStrToTime'>: <function timestrtotime_sql>, <class 'sqlglot.expressions.TimeToStr'>: <function Snowflake.Generator.<lambda>>, <class 'sqlglot.expressions.TimeToUnix'>: <function Snowflake.Generator.<lambda>>, <class 'sqlglot.expressions.ToChar'>: <function Snowflake.Generator.<lambda>>, <class 'sqlglot.expressions.Trim'>: <function Snowflake.Generator.<lambda>>, <class 'sqlglot.expressions.TsOrDsToDate'>: <function ts_or_ds_to_date_sql.<locals>._ts_or_ds_to_date_sql>, <class 'sqlglot.expressions.UnixToTime'>: <function _unix_to_time_sql>, <class 'sqlglot.expressions.WeekOfYear'>: <function rename_func.<locals>.<lambda>>}
    @@ -2171,6 +2354,32 @@ Default: True +
    +
    + +
    + + def + show_sql(self, expression: sqlglot.expressions.Show) -> str: + + + +
    + +
    453        def show_sql(self, expression: exp.Show) -> str:
    +454            scope = self.sql(expression, "scope")
    +455            scope = f" {scope}" if scope else ""
    +456
    +457            scope_kind = self.sql(expression, "scope_kind")
    +458            if scope_kind:
    +459                scope_kind = f" IN {scope_kind}"
    +460
    +461            return f"SHOW {expression.name}{scope_kind}{scope}"
    +
    + + + +
    @@ -2183,23 +2392,23 @@ Default: True
    -
    415        def regexpextract_sql(self, expression: exp.RegexpExtract) -> str:
    -416            # Other dialects don't support all of the following parameters, so we need to
    -417            # generate default values as necessary to ensure the transpilation is correct
    -418            group = expression.args.get("group")
    -419            parameters = expression.args.get("parameters") or (group and exp.Literal.string("c"))
    -420            occurrence = expression.args.get("occurrence") or (parameters and exp.Literal.number(1))
    -421            position = expression.args.get("position") or (occurrence and exp.Literal.number(1))
    -422
    -423            return self.func(
    -424                "REGEXP_SUBSTR",
    -425                expression.this,
    -426                expression.expression,
    -427                position,
    -428                occurrence,
    -429                parameters,
    -430                group,
    -431            )
    +            
    463        def regexpextract_sql(self, expression: exp.RegexpExtract) -> str:
    +464            # Other dialects don't support all of the following parameters, so we need to
    +465            # generate default values as necessary to ensure the transpilation is correct
    +466            group = expression.args.get("group")
    +467            parameters = expression.args.get("parameters") or (group and exp.Literal.string("c"))
    +468            occurrence = expression.args.get("occurrence") or (parameters and exp.Literal.number(1))
    +469            position = expression.args.get("position") or (occurrence and exp.Literal.number(1))
    +470
    +471            return self.func(
    +472                "REGEXP_SUBSTR",
    +473                expression.this,
    +474                expression.expression,
    +475                position,
    +476                occurrence,
    +477                parameters,
    +478                group,
    +479            )
     
    @@ -2217,10 +2426,10 @@ Default: True
    -
    433        def except_op(self, expression: exp.Except) -> str:
    -434            if not expression.args.get("distinct", False):
    -435                self.unsupported("EXCEPT with All is not supported in Snowflake")
    -436            return super().except_op(expression)
    +            
    481        def except_op(self, expression: exp.Except) -> str:
    +482            if not expression.args.get("distinct", False):
    +483                self.unsupported("EXCEPT with All is not supported in Snowflake")
    +484            return super().except_op(expression)
     
    @@ -2238,10 +2447,10 @@ Default: True
    -
    438        def intersect_op(self, expression: exp.Intersect) -> str:
    -439            if not expression.args.get("distinct", False):
    -440                self.unsupported("INTERSECT with All is not supported in Snowflake")
    -441            return super().intersect_op(expression)
    +            
    486        def intersect_op(self, expression: exp.Intersect) -> str:
    +487            if not expression.args.get("distinct", False):
    +488                self.unsupported("INTERSECT with All is not supported in Snowflake")
    +489            return super().intersect_op(expression)
     
    @@ -2259,12 +2468,14 @@ Default: True
    -
    443        def describe_sql(self, expression: exp.Describe) -> str:
    -444            # Default to table if kind is unknown
    -445            kind_value = expression.args.get("kind") or "TABLE"
    -446            kind = f" {kind_value}" if kind_value else ""
    -447            this = f" {self.sql(expression, 'this')}"
    -448            return f"DESCRIBE{kind}{this}"
    +            
    491        def describe_sql(self, expression: exp.Describe) -> str:
    +492            # Default to table if kind is unknown
    +493            kind_value = expression.args.get("kind") or "TABLE"
    +494            kind = f" {kind_value}" if kind_value else ""
    +495            this = f" {self.sql(expression, 'this')}"
    +496            expressions = self.expressions(expression, flat=True)
    +497            expressions = f" {expressions}" if expressions else ""
    +498            return f"DESCRIBE{kind}{this}{expressions}"
     
    @@ -2282,14 +2493,14 @@ Default: True
    -
    450        def generatedasidentitycolumnconstraint_sql(
    -451            self, expression: exp.GeneratedAsIdentityColumnConstraint
    -452        ) -> str:
    -453            start = expression.args.get("start")
    -454            start = f" START {start}" if start else ""
    -455            increment = expression.args.get("increment")
    -456            increment = f" INCREMENT {increment}" if increment else ""
    -457            return f"AUTOINCREMENT{start}{increment}"
    +            
    500        def generatedasidentitycolumnconstraint_sql(
    +501            self, expression: exp.GeneratedAsIdentityColumnConstraint
    +502        ) -> str:
    +503            start = expression.args.get("start")
    +504            start = f" START {start}" if start else ""
    +505            increment = expression.args.get("increment")
    +506            increment = f" INCREMENT {increment}" if increment else ""
    +507            return f"AUTOINCREMENT{start}{increment}"
     
    @@ -2655,6 +2866,7 @@ Default: True
    intersect_sql
    introducer_sql
    pseudotype_sql
    +
    objectidentifier_sql
    onconflict_sql
    returning_sql
    rowformatdelimitedproperty_sql
    diff --git a/docs/sqlglot/dialects/spark.html b/docs/sqlglot/dialects/spark.html index 759b08a..2b7a73e 100644 --- a/docs/sqlglot/dialects/spark.html +++ b/docs/sqlglot/dialects/spark.html @@ -1327,6 +1327,7 @@ Default: True
    intersect_op
    introducer_sql
    pseudotype_sql
    +
    objectidentifier_sql
    onconflict_sql
    returning_sql
    rowformatdelimitedproperty_sql
    diff --git a/docs/sqlglot/dialects/spark2.html b/docs/sqlglot/dialects/spark2.html index 521f4ba..0c8265f 100644 --- a/docs/sqlglot/dialects/spark2.html +++ b/docs/sqlglot/dialects/spark2.html @@ -238,7 +238,7 @@
    15from sqlglot.helper import seq_get 16 17 - 18def _create_sql(self: Hive.Generator, e: exp.Create) -> str: + 18def _create_sql(self: Spark2.Generator, e: exp.Create) -> str: 19 kind = e.args["kind"] 20 properties = e.args.get("properties") 21 @@ -254,7 +254,7 @@ 31 return create_with_partitions_sql(self, e) 32 33 - 34def _map_sql(self: Hive.Generator, expression: exp.Map) -> str: + 34def _map_sql(self: Spark2.Generator, expression: exp.Map) -> str: 35 keys = expression.args.get("keys") 36 values = expression.args.get("values") 37 @@ -268,7 +268,7 @@ 45 return lambda args: exp.Cast(this=seq_get(args, 0), to=exp.DataType.build(to_type)) 46 47 - 48def _str_to_date(self: Hive.Generator, expression: exp.StrToDate) -> str: + 48def _str_to_date(self: Spark2.Generator, expression: exp.StrToDate) -> str: 49 this = self.sql(expression, "this") 50 time_format = self.format_time(expression) 51 if time_format == Hive.DATE_FORMAT: @@ -276,7 +276,7 @@ 53 return f"TO_DATE({this}, {time_format})" 54 55 - 56def _unix_to_time_sql(self: Hive.Generator, expression: exp.UnixToTime) -> str: + 56def _unix_to_time_sql(self: Spark2.Generator, expression: exp.UnixToTime) -> str: 57 scale = expression.args.get("scale") 58 timestamp = self.sql(expression, "this") 59 if scale is None: @@ -337,7 +337,7 @@ 114 return expression 115 116 -117def _insert_sql(self: Hive.Generator, expression: exp.Insert) -> str: +117def _insert_sql(self: Spark2.Generator, expression: exp.Insert) -> str: 118 if expression.expression.args.get("with"): 119 expression = expression.copy() 120 expression.set("with", expression.expression.args.pop("with")) @@ -1769,6 +1769,7 @@ Default: True
    intersect_op
    introducer_sql
    pseudotype_sql
    +
    objectidentifier_sql
    onconflict_sql
    returning_sql
    rowformatdelimitedproperty_sql
    diff --git a/docs/sqlglot/dialects/sqlite.html b/docs/sqlglot/dialects/sqlite.html index 1378abb..f586e85 100644 --- a/docs/sqlglot/dialects/sqlite.html +++ b/docs/sqlglot/dialects/sqlite.html @@ -263,7 +263,7 @@
    19from sqlglot.tokens import TokenType 20 21 - 22def _date_add_sql(self: generator.Generator, expression: exp.DateAdd) -> str: + 22def _date_add_sql(self: SQLite.Generator, expression: exp.DateAdd) -> str: 23 modifier = expression.expression 24 modifier = modifier.name if modifier.is_string else self.sql(modifier) 25 unit = expression.args.get("unit") @@ -1924,6 +1924,7 @@ Default: True
    intersect_op
    introducer_sql
    pseudotype_sql
    +
    objectidentifier_sql
    onconflict_sql
    returning_sql
    rowformatdelimitedproperty_sql
    diff --git a/docs/sqlglot/dialects/starrocks.html b/docs/sqlglot/dialects/starrocks.html index 6c0700d..88d9d8d 100644 --- a/docs/sqlglot/dialects/starrocks.html +++ b/docs/sqlglot/dialects/starrocks.html @@ -1256,6 +1256,7 @@ Default: True
    intersect_op
    introducer_sql
    pseudotype_sql
    +
    objectidentifier_sql
    onconflict_sql
    returning_sql
    rowformatdelimitedproperty_sql
    diff --git a/docs/sqlglot/dialects/tableau.html b/docs/sqlglot/dialects/tableau.html index c9b8db6..df29988 100644 --- a/docs/sqlglot/dialects/tableau.html +++ b/docs/sqlglot/dialects/tableau.html @@ -1083,6 +1083,7 @@ Default: True
    intersect_op
    introducer_sql
    pseudotype_sql
    +
    objectidentifier_sql
    onconflict_sql
    returning_sql
    rowformatdelimitedproperty_sql
    diff --git a/docs/sqlglot/dialects/teradata.html b/docs/sqlglot/dialects/teradata.html index 91928b6..fd9bc5a 100644 --- a/docs/sqlglot/dialects/teradata.html +++ b/docs/sqlglot/dialects/teradata.html @@ -345,120 +345,124 @@
    95 96 STATEMENT_PARSERS = { 97 **parser.Parser.STATEMENT_PARSERS, - 98 TokenType.REPLACE: lambda self: self._parse_create(), - 99 } -100 -101 FUNCTION_PARSERS = { -102 **parser.Parser.FUNCTION_PARSERS, -103 "RANGE_N": lambda self: self._parse_rangen(), -104 "TRANSLATE": lambda self: self._parse_translate(self.STRICT_CAST), -105 } -106 -107 def _parse_translate(self, strict: bool) -> exp.Expression: -108 this = self._parse_conjunction() + 98 TokenType.DATABASE: lambda self: self.expression( + 99 exp.Use, this=self._parse_table(schema=False) +100 ), +101 TokenType.REPLACE: lambda self: self._parse_create(), +102 } +103 +104 FUNCTION_PARSERS = { +105 **parser.Parser.FUNCTION_PARSERS, +106 "RANGE_N": lambda self: self._parse_rangen(), +107 "TRANSLATE": lambda self: self._parse_translate(self.STRICT_CAST), +108 } 109 -110 if not self._match(TokenType.USING): -111 self.raise_error("Expected USING in TRANSLATE") +110 def _parse_translate(self, strict: bool) -> exp.Expression: +111 this = self._parse_conjunction() 112 -113 if self._match_texts(self.CHARSET_TRANSLATORS): -114 charset_split = self._prev.text.split("_TO_") -115 to = self.expression(exp.CharacterSet, this=charset_split[1]) -116 else: -117 self.raise_error("Expected a character set translator after USING in TRANSLATE") -118 -119 return self.expression(exp.Cast if strict else exp.TryCast, this=this, to=to) -120 -121 # FROM before SET in Teradata UPDATE syntax -122 # https://docs.teradata.com/r/Enterprise_IntelliFlex_VMware/Teradata-VantageTM-SQL-Data-Manipulation-Language-17.20/Statement-Syntax/UPDATE/UPDATE-Syntax-Basic-Form-FROM-Clause -123 def _parse_update(self) -> exp.Update: -124 return self.expression( -125 exp.Update, -126 **{ # type: ignore -127 "this": self._parse_table(alias_tokens=self.UPDATE_ALIAS_TOKENS), -128 "from": self._parse_from(joins=True), -129 "expressions": self._match(TokenType.SET) -130 and self._parse_csv(self._parse_equality), -131 "where": self._parse_where(), -132 }, -133 ) -134 -135 def _parse_rangen(self): -136 this = self._parse_id_var() -137 self._match(TokenType.BETWEEN) -138 -139 expressions = self._parse_csv(self._parse_conjunction) -140 each = self._match_text_seq("EACH") and self._parse_conjunction() +113 if not self._match(TokenType.USING): +114 self.raise_error("Expected USING in TRANSLATE") +115 +116 if self._match_texts(self.CHARSET_TRANSLATORS): +117 charset_split = self._prev.text.split("_TO_") +118 to = self.expression(exp.CharacterSet, this=charset_split[1]) +119 else: +120 self.raise_error("Expected a character set translator after USING in TRANSLATE") +121 +122 return self.expression(exp.Cast if strict else exp.TryCast, this=this, to=to) +123 +124 # FROM before SET in Teradata UPDATE syntax +125 # https://docs.teradata.com/r/Enterprise_IntelliFlex_VMware/Teradata-VantageTM-SQL-Data-Manipulation-Language-17.20/Statement-Syntax/UPDATE/UPDATE-Syntax-Basic-Form-FROM-Clause +126 def _parse_update(self) -> exp.Update: +127 return self.expression( +128 exp.Update, +129 **{ # type: ignore +130 "this": self._parse_table(alias_tokens=self.UPDATE_ALIAS_TOKENS), +131 "from": self._parse_from(joins=True), +132 "expressions": self._match(TokenType.SET) +133 and self._parse_csv(self._parse_equality), +134 "where": self._parse_where(), +135 }, +136 ) +137 +138 def _parse_rangen(self): +139 this = self._parse_id_var() +140 self._match(TokenType.BETWEEN) 141 -142 return self.expression(exp.RangeN, this=this, expressions=expressions, each=each) -143 -144 class Generator(generator.Generator): -145 JOIN_HINTS = False -146 TABLE_HINTS = False -147 QUERY_HINTS = False -148 -149 TYPE_MAPPING = { -150 **generator.Generator.TYPE_MAPPING, -151 exp.DataType.Type.GEOMETRY: "ST_GEOMETRY", -152 } -153 -154 PROPERTIES_LOCATION = { -155 **generator.Generator.PROPERTIES_LOCATION, -156 exp.OnCommitProperty: exp.Properties.Location.POST_INDEX, -157 exp.PartitionedByProperty: exp.Properties.Location.POST_EXPRESSION, -158 exp.StabilityProperty: exp.Properties.Location.POST_CREATE, -159 } -160 -161 TRANSFORMS = { -162 **generator.Generator.TRANSFORMS, -163 exp.Max: max_or_greatest, -164 exp.Min: min_or_least, -165 exp.Select: transforms.preprocess([transforms.eliminate_distinct_on]), -166 exp.StrToDate: lambda self, e: f"CAST({self.sql(e, 'this')} AS DATE FORMAT {self.format_time(e)})", -167 exp.ToChar: lambda self, e: self.function_fallback_sql(e), -168 } -169 -170 def partitionedbyproperty_sql(self, expression: exp.PartitionedByProperty) -> str: -171 return f"PARTITION BY {self.sql(expression, 'this')}" -172 -173 # FROM before SET in Teradata UPDATE syntax -174 # https://docs.teradata.com/r/Enterprise_IntelliFlex_VMware/Teradata-VantageTM-SQL-Data-Manipulation-Language-17.20/Statement-Syntax/UPDATE/UPDATE-Syntax-Basic-Form-FROM-Clause -175 def update_sql(self, expression: exp.Update) -> str: -176 this = self.sql(expression, "this") -177 from_sql = self.sql(expression, "from") -178 set_sql = self.expressions(expression, flat=True) -179 where_sql = self.sql(expression, "where") -180 sql = f"UPDATE {this}{from_sql} SET {set_sql}{where_sql}" -181 return self.prepend_ctes(expression, sql) -182 -183 def mod_sql(self, expression: exp.Mod) -> str: -184 return self.binary(expression, "MOD") -185 -186 def datatype_sql(self, expression: exp.DataType) -> str: -187 type_sql = super().datatype_sql(expression) -188 prefix_sql = expression.args.get("prefix") -189 return f"SYSUDTLIB.{type_sql}" if prefix_sql else type_sql -190 -191 def rangen_sql(self, expression: exp.RangeN) -> str: -192 this = self.sql(expression, "this") -193 expressions_sql = self.expressions(expression) -194 each_sql = self.sql(expression, "each") -195 each_sql = f" EACH {each_sql}" if each_sql else "" -196 -197 return f"RANGE_N({this} BETWEEN {expressions_sql}{each_sql})" -198 -199 def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str: -200 kind = self.sql(expression, "kind").upper() -201 if kind == "TABLE" and locations.get(exp.Properties.Location.POST_NAME): -202 this_name = self.sql(expression.this, "this") -203 this_properties = self.properties( -204 exp.Properties(expressions=locations[exp.Properties.Location.POST_NAME]), -205 wrapped=False, -206 prefix=",", -207 ) -208 this_schema = self.schema_columns_sql(expression.this) -209 return f"{this_name}{this_properties}{self.sep()}{this_schema}" -210 -211 return super().createable_sql(expression, locations) +142 expressions = self._parse_csv(self._parse_conjunction) +143 each = self._match_text_seq("EACH") and self._parse_conjunction() +144 +145 return self.expression(exp.RangeN, this=this, expressions=expressions, each=each) +146 +147 class Generator(generator.Generator): +148 JOIN_HINTS = False +149 TABLE_HINTS = False +150 QUERY_HINTS = False +151 +152 TYPE_MAPPING = { +153 **generator.Generator.TYPE_MAPPING, +154 exp.DataType.Type.GEOMETRY: "ST_GEOMETRY", +155 } +156 +157 PROPERTIES_LOCATION = { +158 **generator.Generator.PROPERTIES_LOCATION, +159 exp.OnCommitProperty: exp.Properties.Location.POST_INDEX, +160 exp.PartitionedByProperty: exp.Properties.Location.POST_EXPRESSION, +161 exp.StabilityProperty: exp.Properties.Location.POST_CREATE, +162 } +163 +164 TRANSFORMS = { +165 **generator.Generator.TRANSFORMS, +166 exp.Max: max_or_greatest, +167 exp.Min: min_or_least, +168 exp.Select: transforms.preprocess([transforms.eliminate_distinct_on]), +169 exp.StrToDate: lambda self, e: f"CAST({self.sql(e, 'this')} AS DATE FORMAT {self.format_time(e)})", +170 exp.ToChar: lambda self, e: self.function_fallback_sql(e), +171 exp.Use: lambda self, e: f"DATABASE {self.sql(e, 'this')}", +172 } +173 +174 def partitionedbyproperty_sql(self, expression: exp.PartitionedByProperty) -> str: +175 return f"PARTITION BY {self.sql(expression, 'this')}" +176 +177 # FROM before SET in Teradata UPDATE syntax +178 # https://docs.teradata.com/r/Enterprise_IntelliFlex_VMware/Teradata-VantageTM-SQL-Data-Manipulation-Language-17.20/Statement-Syntax/UPDATE/UPDATE-Syntax-Basic-Form-FROM-Clause +179 def update_sql(self, expression: exp.Update) -> str: +180 this = self.sql(expression, "this") +181 from_sql = self.sql(expression, "from") +182 set_sql = self.expressions(expression, flat=True) +183 where_sql = self.sql(expression, "where") +184 sql = f"UPDATE {this}{from_sql} SET {set_sql}{where_sql}" +185 return self.prepend_ctes(expression, sql) +186 +187 def mod_sql(self, expression: exp.Mod) -> str: +188 return self.binary(expression, "MOD") +189 +190 def datatype_sql(self, expression: exp.DataType) -> str: +191 type_sql = super().datatype_sql(expression) +192 prefix_sql = expression.args.get("prefix") +193 return f"SYSUDTLIB.{type_sql}" if prefix_sql else type_sql +194 +195 def rangen_sql(self, expression: exp.RangeN) -> str: +196 this = self.sql(expression, "this") +197 expressions_sql = self.expressions(expression) +198 each_sql = self.sql(expression, "each") +199 each_sql = f" EACH {each_sql}" if each_sql else "" +200 +201 return f"RANGE_N({this} BETWEEN {expressions_sql}{each_sql})" +202 +203 def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str: +204 kind = self.sql(expression, "kind").upper() +205 if kind == "TABLE" and locations.get(exp.Properties.Location.POST_NAME): +206 this_name = self.sql(expression.this, "this") +207 this_properties = self.properties( +208 exp.Properties(expressions=locations[exp.Properties.Location.POST_NAME]), +209 wrapped=False, +210 prefix=",", +211 ) +212 this_schema = self.schema_columns_sql(expression.this) +213 return f"{this_name}{this_properties}{self.sep()}{this_schema}" +214 +215 return super().createable_sql(expression, locations)
    @@ -562,120 +566,124 @@ 96 97 STATEMENT_PARSERS = { 98 **parser.Parser.STATEMENT_PARSERS, - 99 TokenType.REPLACE: lambda self: self._parse_create(), -100 } -101 -102 FUNCTION_PARSERS = { -103 **parser.Parser.FUNCTION_PARSERS, -104 "RANGE_N": lambda self: self._parse_rangen(), -105 "TRANSLATE": lambda self: self._parse_translate(self.STRICT_CAST), -106 } -107 -108 def _parse_translate(self, strict: bool) -> exp.Expression: -109 this = self._parse_conjunction() + 99 TokenType.DATABASE: lambda self: self.expression( +100 exp.Use, this=self._parse_table(schema=False) +101 ), +102 TokenType.REPLACE: lambda self: self._parse_create(), +103 } +104 +105 FUNCTION_PARSERS = { +106 **parser.Parser.FUNCTION_PARSERS, +107 "RANGE_N": lambda self: self._parse_rangen(), +108 "TRANSLATE": lambda self: self._parse_translate(self.STRICT_CAST), +109 } 110 -111 if not self._match(TokenType.USING): -112 self.raise_error("Expected USING in TRANSLATE") +111 def _parse_translate(self, strict: bool) -> exp.Expression: +112 this = self._parse_conjunction() 113 -114 if self._match_texts(self.CHARSET_TRANSLATORS): -115 charset_split = self._prev.text.split("_TO_") -116 to = self.expression(exp.CharacterSet, this=charset_split[1]) -117 else: -118 self.raise_error("Expected a character set translator after USING in TRANSLATE") -119 -120 return self.expression(exp.Cast if strict else exp.TryCast, this=this, to=to) -121 -122 # FROM before SET in Teradata UPDATE syntax -123 # https://docs.teradata.com/r/Enterprise_IntelliFlex_VMware/Teradata-VantageTM-SQL-Data-Manipulation-Language-17.20/Statement-Syntax/UPDATE/UPDATE-Syntax-Basic-Form-FROM-Clause -124 def _parse_update(self) -> exp.Update: -125 return self.expression( -126 exp.Update, -127 **{ # type: ignore -128 "this": self._parse_table(alias_tokens=self.UPDATE_ALIAS_TOKENS), -129 "from": self._parse_from(joins=True), -130 "expressions": self._match(TokenType.SET) -131 and self._parse_csv(self._parse_equality), -132 "where": self._parse_where(), -133 }, -134 ) -135 -136 def _parse_rangen(self): -137 this = self._parse_id_var() -138 self._match(TokenType.BETWEEN) -139 -140 expressions = self._parse_csv(self._parse_conjunction) -141 each = self._match_text_seq("EACH") and self._parse_conjunction() +114 if not self._match(TokenType.USING): +115 self.raise_error("Expected USING in TRANSLATE") +116 +117 if self._match_texts(self.CHARSET_TRANSLATORS): +118 charset_split = self._prev.text.split("_TO_") +119 to = self.expression(exp.CharacterSet, this=charset_split[1]) +120 else: +121 self.raise_error("Expected a character set translator after USING in TRANSLATE") +122 +123 return self.expression(exp.Cast if strict else exp.TryCast, this=this, to=to) +124 +125 # FROM before SET in Teradata UPDATE syntax +126 # https://docs.teradata.com/r/Enterprise_IntelliFlex_VMware/Teradata-VantageTM-SQL-Data-Manipulation-Language-17.20/Statement-Syntax/UPDATE/UPDATE-Syntax-Basic-Form-FROM-Clause +127 def _parse_update(self) -> exp.Update: +128 return self.expression( +129 exp.Update, +130 **{ # type: ignore +131 "this": self._parse_table(alias_tokens=self.UPDATE_ALIAS_TOKENS), +132 "from": self._parse_from(joins=True), +133 "expressions": self._match(TokenType.SET) +134 and self._parse_csv(self._parse_equality), +135 "where": self._parse_where(), +136 }, +137 ) +138 +139 def _parse_rangen(self): +140 this = self._parse_id_var() +141 self._match(TokenType.BETWEEN) 142 -143 return self.expression(exp.RangeN, this=this, expressions=expressions, each=each) -144 -145 class Generator(generator.Generator): -146 JOIN_HINTS = False -147 TABLE_HINTS = False -148 QUERY_HINTS = False -149 -150 TYPE_MAPPING = { -151 **generator.Generator.TYPE_MAPPING, -152 exp.DataType.Type.GEOMETRY: "ST_GEOMETRY", -153 } -154 -155 PROPERTIES_LOCATION = { -156 **generator.Generator.PROPERTIES_LOCATION, -157 exp.OnCommitProperty: exp.Properties.Location.POST_INDEX, -158 exp.PartitionedByProperty: exp.Properties.Location.POST_EXPRESSION, -159 exp.StabilityProperty: exp.Properties.Location.POST_CREATE, -160 } -161 -162 TRANSFORMS = { -163 **generator.Generator.TRANSFORMS, -164 exp.Max: max_or_greatest, -165 exp.Min: min_or_least, -166 exp.Select: transforms.preprocess([transforms.eliminate_distinct_on]), -167 exp.StrToDate: lambda self, e: f"CAST({self.sql(e, 'this')} AS DATE FORMAT {self.format_time(e)})", -168 exp.ToChar: lambda self, e: self.function_fallback_sql(e), -169 } -170 -171 def partitionedbyproperty_sql(self, expression: exp.PartitionedByProperty) -> str: -172 return f"PARTITION BY {self.sql(expression, 'this')}" -173 -174 # FROM before SET in Teradata UPDATE syntax -175 # https://docs.teradata.com/r/Enterprise_IntelliFlex_VMware/Teradata-VantageTM-SQL-Data-Manipulation-Language-17.20/Statement-Syntax/UPDATE/UPDATE-Syntax-Basic-Form-FROM-Clause -176 def update_sql(self, expression: exp.Update) -> str: -177 this = self.sql(expression, "this") -178 from_sql = self.sql(expression, "from") -179 set_sql = self.expressions(expression, flat=True) -180 where_sql = self.sql(expression, "where") -181 sql = f"UPDATE {this}{from_sql} SET {set_sql}{where_sql}" -182 return self.prepend_ctes(expression, sql) -183 -184 def mod_sql(self, expression: exp.Mod) -> str: -185 return self.binary(expression, "MOD") -186 -187 def datatype_sql(self, expression: exp.DataType) -> str: -188 type_sql = super().datatype_sql(expression) -189 prefix_sql = expression.args.get("prefix") -190 return f"SYSUDTLIB.{type_sql}" if prefix_sql else type_sql -191 -192 def rangen_sql(self, expression: exp.RangeN) -> str: -193 this = self.sql(expression, "this") -194 expressions_sql = self.expressions(expression) -195 each_sql = self.sql(expression, "each") -196 each_sql = f" EACH {each_sql}" if each_sql else "" -197 -198 return f"RANGE_N({this} BETWEEN {expressions_sql}{each_sql})" -199 -200 def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str: -201 kind = self.sql(expression, "kind").upper() -202 if kind == "TABLE" and locations.get(exp.Properties.Location.POST_NAME): -203 this_name = self.sql(expression.this, "this") -204 this_properties = self.properties( -205 exp.Properties(expressions=locations[exp.Properties.Location.POST_NAME]), -206 wrapped=False, -207 prefix=",", -208 ) -209 this_schema = self.schema_columns_sql(expression.this) -210 return f"{this_name}{this_properties}{self.sep()}{this_schema}" -211 -212 return super().createable_sql(expression, locations) +143 expressions = self._parse_csv(self._parse_conjunction) +144 each = self._match_text_seq("EACH") and self._parse_conjunction() +145 +146 return self.expression(exp.RangeN, this=this, expressions=expressions, each=each) +147 +148 class Generator(generator.Generator): +149 JOIN_HINTS = False +150 TABLE_HINTS = False +151 QUERY_HINTS = False +152 +153 TYPE_MAPPING = { +154 **generator.Generator.TYPE_MAPPING, +155 exp.DataType.Type.GEOMETRY: "ST_GEOMETRY", +156 } +157 +158 PROPERTIES_LOCATION = { +159 **generator.Generator.PROPERTIES_LOCATION, +160 exp.OnCommitProperty: exp.Properties.Location.POST_INDEX, +161 exp.PartitionedByProperty: exp.Properties.Location.POST_EXPRESSION, +162 exp.StabilityProperty: exp.Properties.Location.POST_CREATE, +163 } +164 +165 TRANSFORMS = { +166 **generator.Generator.TRANSFORMS, +167 exp.Max: max_or_greatest, +168 exp.Min: min_or_least, +169 exp.Select: transforms.preprocess([transforms.eliminate_distinct_on]), +170 exp.StrToDate: lambda self, e: f"CAST({self.sql(e, 'this')} AS DATE FORMAT {self.format_time(e)})", +171 exp.ToChar: lambda self, e: self.function_fallback_sql(e), +172 exp.Use: lambda self, e: f"DATABASE {self.sql(e, 'this')}", +173 } +174 +175 def partitionedbyproperty_sql(self, expression: exp.PartitionedByProperty) -> str: +176 return f"PARTITION BY {self.sql(expression, 'this')}" +177 +178 # FROM before SET in Teradata UPDATE syntax +179 # https://docs.teradata.com/r/Enterprise_IntelliFlex_VMware/Teradata-VantageTM-SQL-Data-Manipulation-Language-17.20/Statement-Syntax/UPDATE/UPDATE-Syntax-Basic-Form-FROM-Clause +180 def update_sql(self, expression: exp.Update) -> str: +181 this = self.sql(expression, "this") +182 from_sql = self.sql(expression, "from") +183 set_sql = self.expressions(expression, flat=True) +184 where_sql = self.sql(expression, "where") +185 sql = f"UPDATE {this}{from_sql} SET {set_sql}{where_sql}" +186 return self.prepend_ctes(expression, sql) +187 +188 def mod_sql(self, expression: exp.Mod) -> str: +189 return self.binary(expression, "MOD") +190 +191 def datatype_sql(self, expression: exp.DataType) -> str: +192 type_sql = super().datatype_sql(expression) +193 prefix_sql = expression.args.get("prefix") +194 return f"SYSUDTLIB.{type_sql}" if prefix_sql else type_sql +195 +196 def rangen_sql(self, expression: exp.RangeN) -> str: +197 this = self.sql(expression, "this") +198 expressions_sql = self.expressions(expression) +199 each_sql = self.sql(expression, "each") +200 each_sql = f" EACH {each_sql}" if each_sql else "" +201 +202 return f"RANGE_N({this} BETWEEN {expressions_sql}{each_sql})" +203 +204 def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str: +205 kind = self.sql(expression, "kind").upper() +206 if kind == "TABLE" and locations.get(exp.Properties.Location.POST_NAME): +207 this_name = self.sql(expression.this, "this") +208 this_properties = self.properties( +209 exp.Properties(expressions=locations[exp.Properties.Location.POST_NAME]), +210 wrapped=False, +211 prefix=",", +212 ) +213 this_schema = self.schema_columns_sql(expression.this) +214 return f"{this_name}{this_properties}{self.sep()}{this_schema}" +215 +216 return super().createable_sql(expression, locations)
    @@ -1088,51 +1096,54 @@ 96 97 STATEMENT_PARSERS = { 98 **parser.Parser.STATEMENT_PARSERS, - 99 TokenType.REPLACE: lambda self: self._parse_create(), -100 } -101 -102 FUNCTION_PARSERS = { -103 **parser.Parser.FUNCTION_PARSERS, -104 "RANGE_N": lambda self: self._parse_rangen(), -105 "TRANSLATE": lambda self: self._parse_translate(self.STRICT_CAST), -106 } -107 -108 def _parse_translate(self, strict: bool) -> exp.Expression: -109 this = self._parse_conjunction() + 99 TokenType.DATABASE: lambda self: self.expression( +100 exp.Use, this=self._parse_table(schema=False) +101 ), +102 TokenType.REPLACE: lambda self: self._parse_create(), +103 } +104 +105 FUNCTION_PARSERS = { +106 **parser.Parser.FUNCTION_PARSERS, +107 "RANGE_N": lambda self: self._parse_rangen(), +108 "TRANSLATE": lambda self: self._parse_translate(self.STRICT_CAST), +109 } 110 -111 if not self._match(TokenType.USING): -112 self.raise_error("Expected USING in TRANSLATE") +111 def _parse_translate(self, strict: bool) -> exp.Expression: +112 this = self._parse_conjunction() 113 -114 if self._match_texts(self.CHARSET_TRANSLATORS): -115 charset_split = self._prev.text.split("_TO_") -116 to = self.expression(exp.CharacterSet, this=charset_split[1]) -117 else: -118 self.raise_error("Expected a character set translator after USING in TRANSLATE") -119 -120 return self.expression(exp.Cast if strict else exp.TryCast, this=this, to=to) -121 -122 # FROM before SET in Teradata UPDATE syntax -123 # https://docs.teradata.com/r/Enterprise_IntelliFlex_VMware/Teradata-VantageTM-SQL-Data-Manipulation-Language-17.20/Statement-Syntax/UPDATE/UPDATE-Syntax-Basic-Form-FROM-Clause -124 def _parse_update(self) -> exp.Update: -125 return self.expression( -126 exp.Update, -127 **{ # type: ignore -128 "this": self._parse_table(alias_tokens=self.UPDATE_ALIAS_TOKENS), -129 "from": self._parse_from(joins=True), -130 "expressions": self._match(TokenType.SET) -131 and self._parse_csv(self._parse_equality), -132 "where": self._parse_where(), -133 }, -134 ) -135 -136 def _parse_rangen(self): -137 this = self._parse_id_var() -138 self._match(TokenType.BETWEEN) -139 -140 expressions = self._parse_csv(self._parse_conjunction) -141 each = self._match_text_seq("EACH") and self._parse_conjunction() +114 if not self._match(TokenType.USING): +115 self.raise_error("Expected USING in TRANSLATE") +116 +117 if self._match_texts(self.CHARSET_TRANSLATORS): +118 charset_split = self._prev.text.split("_TO_") +119 to = self.expression(exp.CharacterSet, this=charset_split[1]) +120 else: +121 self.raise_error("Expected a character set translator after USING in TRANSLATE") +122 +123 return self.expression(exp.Cast if strict else exp.TryCast, this=this, to=to) +124 +125 # FROM before SET in Teradata UPDATE syntax +126 # https://docs.teradata.com/r/Enterprise_IntelliFlex_VMware/Teradata-VantageTM-SQL-Data-Manipulation-Language-17.20/Statement-Syntax/UPDATE/UPDATE-Syntax-Basic-Form-FROM-Clause +127 def _parse_update(self) -> exp.Update: +128 return self.expression( +129 exp.Update, +130 **{ # type: ignore +131 "this": self._parse_table(alias_tokens=self.UPDATE_ALIAS_TOKENS), +132 "from": self._parse_from(joins=True), +133 "expressions": self._match(TokenType.SET) +134 and self._parse_csv(self._parse_equality), +135 "where": self._parse_where(), +136 }, +137 ) +138 +139 def _parse_rangen(self): +140 this = self._parse_id_var() +141 self._match(TokenType.BETWEEN) 142 -143 return self.expression(exp.RangeN, this=this, expressions=expressions, each=each) +143 expressions = self._parse_csv(self._parse_conjunction) +144 each = self._match_text_seq("EACH") and self._parse_conjunction() +145 +146 return self.expression(exp.RangeN, this=this, expressions=expressions, each=each)
    @@ -1157,7 +1168,7 @@ Default: 3
    CHARSET_TRANSLATORS = - {'KANJISJIS_TO_UNICODE', 'LATIN_TO_GRAPHIC', 'GRAPHIC_TO_KANJISJIS', 'UNICODE_TO_LATIN', 'LOCALE_TO_UNICODE', 'UNICODE_TO_UNICODE_Fullwidth', 'LATIN_TO_KANJISJIS', 'UNICODE_TO_KANJI1_SBC', 'UNICODE_TO_UNICODE_FoldSpace', 'UNICODE_TO_UNICODE_NFKD', 'UNICODE_TO_UNICODE_Halfwidth', 'UNICODE_TO_KANJI1_KanjiEBCDIC', 'KANJISJIS_TO_LATIN', 'KANJI1_SBC_TO_UNICODE', 'UNICODE_TO_GRAPHIC_PadGraphic', 'UNICODE_TO_GRAPHIC', 'UNICODE_TO_UNICODE_NFC', 'UNICODE_TO_UNICODE_NFKC', 'UNICODE_TO_LOCALE', 'GRAPHIC_TO_UNICODE_PadSpace', 'KANJI1_KanjiEBCDIC_TO_UNICODE', 'KANJI1_KanjiEUC_TO_UNICODE', 'UNICODE_TO_GRAPHIC_VarGraphic', 'GRAPHIC_TO_UNICODE', 'LATIN_TO_UNICODE', 'GRAPHIC_TO_LATIN', 'KANJI1_KANJISJIS_TO_UNICODE', 'KANJISJIS_TO_GRAPHIC', 'UNICODE_TO_KANJI1_KANJISJIS', 'UNICODE_TO_KANJISJIS', 'UNICODE_TO_KANJI1_KanjiEUC', 'UNICODE_TO_UNICODE_NFD'} + {'UNICODE_TO_GRAPHIC', 'UNICODE_TO_KANJI1_KANJISJIS', 'UNICODE_TO_UNICODE_NFC', 'KANJI1_KANJISJIS_TO_UNICODE', 'UNICODE_TO_GRAPHIC_VarGraphic', 'GRAPHIC_TO_KANJISJIS', 'LATIN_TO_UNICODE', 'UNICODE_TO_KANJI1_KanjiEBCDIC', 'UNICODE_TO_KANJISJIS', 'UNICODE_TO_LOCALE', 'KANJISJIS_TO_UNICODE', 'UNICODE_TO_UNICODE_NFD', 'UNICODE_TO_UNICODE_NFKD', 'KANJISJIS_TO_LATIN', 'KANJI1_KanjiEBCDIC_TO_UNICODE', 'UNICODE_TO_GRAPHIC_PadGraphic', 'UNICODE_TO_UNICODE_Fullwidth', 'LOCALE_TO_UNICODE', 'LATIN_TO_KANJISJIS', 'KANJI1_SBC_TO_UNICODE', 'UNICODE_TO_LATIN', 'UNICODE_TO_UNICODE_FoldSpace', 'GRAPHIC_TO_UNICODE_PadSpace', 'LATIN_TO_GRAPHIC', 'KANJISJIS_TO_GRAPHIC', 'UNICODE_TO_KANJI1_SBC', 'GRAPHIC_TO_UNICODE', 'GRAPHIC_TO_LATIN', 'UNICODE_TO_KANJI1_KanjiEUC', 'UNICODE_TO_UNICODE_Halfwidth', 'UNICODE_TO_UNICODE_NFKC', 'KANJI1_KanjiEUC_TO_UNICODE'}
    @@ -1170,7 +1181,7 @@ Default: 3
    FUNC_TOKENS = - {<TokenType.IMAGE: 'IMAGE'>, <TokenType.NUMRANGE: 'NUMRANGE'>, <TokenType.SUPER: 'SUPER'>, <TokenType.BIGDECIMAL: 'BIGDECIMAL'>, <TokenType.TINYINT: 'TINYINT'>, <TokenType.UINT: 'UINT'>, <TokenType.RIGHT: 'RIGHT'>, <TokenType.TIMESTAMPTZ: 'TIMESTAMPTZ'>, <TokenType.DATETIME64: 'DATETIME64'>, <TokenType.BIGINT: 'BIGINT'>, <TokenType.MONEY: 'MONEY'>, <TokenType.GEOGRAPHY: 'GEOGRAPHY'>, <TokenType.DOUBLE: 'DOUBLE'>, <TokenType.CURRENT_USER: 'CURRENT_USER'>, <TokenType.RANGE: 'RANGE'>, <TokenType.LIKE: 'LIKE'>, <TokenType.NUMMULTIRANGE: 'NUMMULTIRANGE'>, <TokenType.WINDOW: 'WINDOW'>, <TokenType.ISNULL: 'ISNULL'>, <TokenType.UNIQUEIDENTIFIER: 'UNIQUEIDENTIFIER'>, <TokenType.JSONB: 'JSONB'>, <TokenType.ARRAY: 'ARRAY'>, <TokenType.INDEX: 'INDEX'>, <TokenType.VARCHAR: 'VARCHAR'>, <TokenType.OBJECT: 'OBJECT'>, <TokenType.DECIMAL: 'DECIMAL'>, <TokenType.MEDIUMTEXT: 'MEDIUMTEXT'>, <TokenType.BINARY: 'BINARY'>, <TokenType.DATE: 'DATE'>, <TokenType.DATETIME: 'DATETIME'>, <TokenType.IPPREFIX: 'IPPREFIX'>, <TokenType.GLOB: 'GLOB'>, <TokenType.DATEMULTIRANGE: 'DATEMULTIRANGE'>, <TokenType.BOOLEAN: 'BOOLEAN'>, <TokenType.EXISTS: 'EXISTS'>, <TokenType.HLLSKETCH: 'HLLSKETCH'>, <TokenType.NCHAR: 'NCHAR'>, <TokenType.FILTER: 'FILTER'>, <TokenType.UUID: 'UUID'>, <TokenType.FLOAT: 'FLOAT'>, <TokenType.PRIMARY_KEY: 'PRIMARY_KEY'>, <TokenType.SMALLSERIAL: 'SMALLSERIAL'>, <TokenType.VARIANT: 'VARIANT'>, <TokenType.JSON: 'JSON'>, <TokenType.VAR: 'VAR'>, <TokenType.INT8MULTIRANGE: 'INT8MULTIRANGE'>, <TokenType.UNKNOWN: 'UNKNOWN'>, <TokenType.UINT128: 'UINT128'>, <TokenType.INET: 'INET'>, <TokenType.INT8RANGE: 'INT8RANGE'>, <TokenType.RLIKE: 'RLIKE'>, <TokenType.CURRENT_DATE: 'CURRENT_DATE'>, <TokenType.MAP: 'MAP'>, <TokenType.INT128: 'INT128'>, <TokenType.TSTZMULTIRANGE: 'TSTZMULTIRANGE'>, <TokenType.FIRST: 'FIRST'>, <TokenType.ANY: 'ANY'>, <TokenType.SMALLINT: 'SMALLINT'>, <TokenType.IDENTIFIER: 'IDENTIFIER'>, <TokenType.USERDEFINED: 'USERDEFINED'>, <TokenType.TEXT: 'TEXT'>, <TokenType.MEDIUMBLOB: 'MEDIUMBLOB'>, <TokenType.TABLE: 'TABLE'>, <TokenType.ROW: 'ROW'>, <TokenType.PSEUDO_TYPE: 'PSEUDO_TYPE'>, <TokenType.NVARCHAR: 'NVARCHAR'>, <TokenType.USMALLINT: 'USMALLINT'>, <TokenType.HSTORE: 'HSTORE'>, <TokenType.INT4RANGE: 'INT4RANGE'>, <TokenType.NULLABLE: 'NULLABLE'>, <TokenType.SOME: 'SOME'>, <TokenType.CURRENT_TIMESTAMP: 'CURRENT_TIMESTAMP'>, <TokenType.DATERANGE: 'DATERANGE'>, <TokenType.TSTZRANGE: 'TSTZRANGE'>, <TokenType.UINT256: 'UINT256'>, <TokenType.TSMULTIRANGE: 'TSMULTIRANGE'>, <TokenType.CURRENT_DATETIME: 'CURRENT_DATETIME'>, <TokenType.MERGE: 'MERGE'>, <TokenType.SERIAL: 'SERIAL'>, <TokenType.ILIKE: 'ILIKE'>, <TokenType.FIXEDSTRING: 'FIXEDSTRING'>, <TokenType.XOR: 'XOR'>, <TokenType.IPADDRESS: 'IPADDRESS'>, <TokenType.INSERT: 'INSERT'>, <TokenType.CHAR: 'CHAR'>, <TokenType.INT: 'INT'>, <TokenType.INT256: 'INT256'>, <TokenType.NULL: 'NULL'>, <TokenType.VARBINARY: 'VARBINARY'>, <TokenType.TIMETZ: 'TIMETZ'>, <TokenType.LOWCARDINALITY: 'LOWCARDINALITY'>, <TokenType.COMMAND: 'COMMAND'>, <TokenType.ALL: 'ALL'>, <TokenType.BIGSERIAL: 'BIGSERIAL'>, <TokenType.UBIGINT: 'UBIGINT'>, <TokenType.YEAR: 'YEAR'>, <TokenType.UTINYINT: 'UTINYINT'>, <TokenType.ENUM: 'ENUM'>, <TokenType.ENUM16: 'ENUM16'>, <TokenType.BIT: 'BIT'>, <TokenType.LONGBLOB: 'LONGBLOB'>, <TokenType.CURRENT_TIME: 'CURRENT_TIME'>, <TokenType.INT4MULTIRANGE: 'INT4MULTIRANGE'>, <TokenType.MEDIUMINT: 'MEDIUMINT'>, <TokenType.FORMAT: 'FORMAT'>, <TokenType.UNNEST: 'UNNEST'>, <TokenType.SMALLMONEY: 'SMALLMONEY'>, <TokenType.ROWVERSION: 'ROWVERSION'>, <TokenType.STRUCT: 'STRUCT'>, <TokenType.ENUM8: 'ENUM8'>, <TokenType.TIMESTAMPLTZ: 'TIMESTAMPLTZ'>, <TokenType.LEFT: 'LEFT'>, <TokenType.OFFSET: 'OFFSET'>, <TokenType.GEOMETRY: 'GEOMETRY'>, <TokenType.LONGTEXT: 'LONGTEXT'>, <TokenType.INTERVAL: 'INTERVAL'>, <TokenType.TIME: 'TIME'>, <TokenType.XML: 'XML'>, <TokenType.NESTED: 'NESTED'>, <TokenType.TIMESTAMP: 'TIMESTAMP'>, <TokenType.TSRANGE: 'TSRANGE'>} + {<TokenType.NUMMULTIRANGE: 'NUMMULTIRANGE'>, <TokenType.UBIGINT: 'UBIGINT'>, <TokenType.BOOLEAN: 'BOOLEAN'>, <TokenType.NULLABLE: 'NULLABLE'>, <TokenType.UINT128: 'UINT128'>, <TokenType.OBJECT_IDENTIFIER: 'OBJECT_IDENTIFIER'>, <TokenType.IDENTIFIER: 'IDENTIFIER'>, <TokenType.ROW: 'ROW'>, <TokenType.STRUCT: 'STRUCT'>, <TokenType.DATE: 'DATE'>, <TokenType.TIMESTAMPTZ: 'TIMESTAMPTZ'>, <TokenType.ENUM: 'ENUM'>, <TokenType.HLLSKETCH: 'HLLSKETCH'>, <TokenType.MERGE: 'MERGE'>, <TokenType.INDEX: 'INDEX'>, <TokenType.UINT256: 'UINT256'>, <TokenType.SERIAL: 'SERIAL'>, <TokenType.RLIKE: 'RLIKE'>, <TokenType.XML: 'XML'>, <TokenType.SMALLMONEY: 'SMALLMONEY'>, <TokenType.DOUBLE: 'DOUBLE'>, <TokenType.IPPREFIX: 'IPPREFIX'>, <TokenType.GEOMETRY: 'GEOMETRY'>, <TokenType.SMALLSERIAL: 'SMALLSERIAL'>, <TokenType.TEXT: 'TEXT'>, <TokenType.NESTED: 'NESTED'>, <TokenType.TIMETZ: 'TIMETZ'>, <TokenType.ANY: 'ANY'>, <TokenType.VAR: 'VAR'>, <TokenType.OFFSET: 'OFFSET'>, <TokenType.RIGHT: 'RIGHT'>, <TokenType.FIXEDSTRING: 'FIXEDSTRING'>, <TokenType.TIME: 'TIME'>, <TokenType.LONGBLOB: 'LONGBLOB'>, <TokenType.TSTZMULTIRANGE: 'TSTZMULTIRANGE'>, <TokenType.CURRENT_DATE: 'CURRENT_DATE'>, <TokenType.UNKNOWN: 'UNKNOWN'>, <TokenType.INSERT: 'INSERT'>, <TokenType.INT: 'INT'>, <TokenType.VARIANT: 'VARIANT'>, <TokenType.SUPER: 'SUPER'>, <TokenType.INT4MULTIRANGE: 'INT4MULTIRANGE'>, <TokenType.HSTORE: 'HSTORE'>, <TokenType.CURRENT_TIME: 'CURRENT_TIME'>, <TokenType.GLOB: 'GLOB'>, <TokenType.FLOAT: 'FLOAT'>, <TokenType.TIMESTAMPLTZ: 'TIMESTAMPLTZ'>, <TokenType.VARCHAR: 'VARCHAR'>, <TokenType.PRIMARY_KEY: 'PRIMARY_KEY'>, <TokenType.BINARY: 'BINARY'>, <TokenType.PSEUDO_TYPE: 'PSEUDO_TYPE'>, <TokenType.NUMRANGE: 'NUMRANGE'>, <TokenType.SMALLINT: 'SMALLINT'>, <TokenType.BIT: 'BIT'>, <TokenType.CURRENT_USER: 'CURRENT_USER'>, <TokenType.INT256: 'INT256'>, <TokenType.UNNEST: 'UNNEST'>, <TokenType.ROWVERSION: 'ROWVERSION'>, <TokenType.INET: 'INET'>, <TokenType.FORMAT: 'FORMAT'>, <TokenType.BIGSERIAL: 'BIGSERIAL'>, <TokenType.DATEMULTIRANGE: 'DATEMULTIRANGE'>, <TokenType.BIGINT: 'BIGINT'>, <TokenType.LIKE: 'LIKE'>, <TokenType.LOWCARDINALITY: 'LOWCARDINALITY'>, <TokenType.INT128: 'INT128'>, <TokenType.USERDEFINED: 'USERDEFINED'>, <TokenType.MEDIUMINT: 'MEDIUMINT'>, <TokenType.TINYINT: 'TINYINT'>, <TokenType.INTERVAL: 'INTERVAL'>, <TokenType.UUID: 'UUID'>, <TokenType.LEFT: 'LEFT'>, <TokenType.UINT: 'UINT'>, <TokenType.UNIQUEIDENTIFIER: 'UNIQUEIDENTIFIER'>, <TokenType.FIRST: 'FIRST'>, <TokenType.MEDIUMTEXT: 'MEDIUMTEXT'>, <TokenType.ARRAY: 'ARRAY'>, <TokenType.DECIMAL: 'DECIMAL'>, <TokenType.LONGTEXT: 'LONGTEXT'>, <TokenType.WINDOW: 'WINDOW'>, <TokenType.SOME: 'SOME'>, <TokenType.UTINYINT: 'UTINYINT'>, <TokenType.TSMULTIRANGE: 'TSMULTIRANGE'>, <TokenType.IMAGE: 'IMAGE'>, <TokenType.NULL: 'NULL'>, <TokenType.IPADDRESS: 'IPADDRESS'>, <TokenType.FILTER: 'FILTER'>, <TokenType.COMMAND: 'COMMAND'>, <TokenType.GEOGRAPHY: 'GEOGRAPHY'>, <TokenType.ALL: 'ALL'>, <TokenType.DATETIME: 'DATETIME'>, <TokenType.CURRENT_DATETIME: 'CURRENT_DATETIME'>, <TokenType.YEAR: 'YEAR'>, <TokenType.CHAR: 'CHAR'>, <TokenType.VARBINARY: 'VARBINARY'>, <TokenType.MONEY: 'MONEY'>, <TokenType.EXISTS: 'EXISTS'>, <TokenType.INT4RANGE: 'INT4RANGE'>, <TokenType.JSONB: 'JSONB'>, <TokenType.INT8MULTIRANGE: 'INT8MULTIRANGE'>, <TokenType.NVARCHAR: 'NVARCHAR'>, <TokenType.TABLE: 'TABLE'>, <TokenType.ENUM16: 'ENUM16'>, <TokenType.TSRANGE: 'TSRANGE'>, <TokenType.ISNULL: 'ISNULL'>, <TokenType.BIGDECIMAL: 'BIGDECIMAL'>, <TokenType.CURRENT_TIMESTAMP: 'CURRENT_TIMESTAMP'>, <TokenType.INT8RANGE: 'INT8RANGE'>, <TokenType.TIMESTAMP: 'TIMESTAMP'>, <TokenType.MAP: 'MAP'>, <TokenType.ILIKE: 'ILIKE'>, <TokenType.JSON: 'JSON'>, <TokenType.NCHAR: 'NCHAR'>, <TokenType.XOR: 'XOR'>, <TokenType.RANGE: 'RANGE'>, <TokenType.TSTZRANGE: 'TSTZRANGE'>, <TokenType.DATETIME64: 'DATETIME64'>, <TokenType.MEDIUMBLOB: 'MEDIUMBLOB'>, <TokenType.USMALLINT: 'USMALLINT'>, <TokenType.OBJECT: 'OBJECT'>, <TokenType.DATERANGE: 'DATERANGE'>, <TokenType.ENUM8: 'ENUM8'>}
    @@ -1183,7 +1194,7 @@ Default: 3
    STATEMENT_PARSERS = - {<TokenType.ALTER: 'ALTER'>: <function Parser.<lambda>>, <TokenType.BEGIN: 'BEGIN'>: <function Parser.<lambda>>, <TokenType.CACHE: 'CACHE'>: <function Parser.<lambda>>, <TokenType.COMMIT: 'COMMIT'>: <function Parser.<lambda>>, <TokenType.COMMENT: 'COMMENT'>: <function Parser.<lambda>>, <TokenType.CREATE: 'CREATE'>: <function Parser.<lambda>>, <TokenType.DELETE: 'DELETE'>: <function Parser.<lambda>>, <TokenType.DESC: 'DESC'>: <function Parser.<lambda>>, <TokenType.DESCRIBE: 'DESCRIBE'>: <function Parser.<lambda>>, <TokenType.DROP: 'DROP'>: <function Parser.<lambda>>, <TokenType.INSERT: 'INSERT'>: <function Parser.<lambda>>, <TokenType.LOAD: 'LOAD'>: <function Parser.<lambda>>, <TokenType.MERGE: 'MERGE'>: <function Parser.<lambda>>, <TokenType.PIVOT: 'PIVOT'>: <function Parser.<lambda>>, <TokenType.PRAGMA: 'PRAGMA'>: <function Parser.<lambda>>, <TokenType.ROLLBACK: 'ROLLBACK'>: <function Parser.<lambda>>, <TokenType.SET: 'SET'>: <function Parser.<lambda>>, <TokenType.UNCACHE: 'UNCACHE'>: <function Parser.<lambda>>, <TokenType.UPDATE: 'UPDATE'>: <function Parser.<lambda>>, <TokenType.USE: 'USE'>: <function Parser.<lambda>>, <TokenType.REPLACE: 'REPLACE'>: <function Teradata.Parser.<lambda>>} + {<TokenType.ALTER: 'ALTER'>: <function Parser.<lambda>>, <TokenType.BEGIN: 'BEGIN'>: <function Parser.<lambda>>, <TokenType.CACHE: 'CACHE'>: <function Parser.<lambda>>, <TokenType.COMMIT: 'COMMIT'>: <function Parser.<lambda>>, <TokenType.COMMENT: 'COMMENT'>: <function Parser.<lambda>>, <TokenType.CREATE: 'CREATE'>: <function Parser.<lambda>>, <TokenType.DELETE: 'DELETE'>: <function Parser.<lambda>>, <TokenType.DESC: 'DESC'>: <function Parser.<lambda>>, <TokenType.DESCRIBE: 'DESCRIBE'>: <function Parser.<lambda>>, <TokenType.DROP: 'DROP'>: <function Parser.<lambda>>, <TokenType.INSERT: 'INSERT'>: <function Parser.<lambda>>, <TokenType.LOAD: 'LOAD'>: <function Parser.<lambda>>, <TokenType.MERGE: 'MERGE'>: <function Parser.<lambda>>, <TokenType.PIVOT: 'PIVOT'>: <function Parser.<lambda>>, <TokenType.PRAGMA: 'PRAGMA'>: <function Parser.<lambda>>, <TokenType.ROLLBACK: 'ROLLBACK'>: <function Parser.<lambda>>, <TokenType.SET: 'SET'>: <function Parser.<lambda>>, <TokenType.UNCACHE: 'UNCACHE'>: <function Parser.<lambda>>, <TokenType.UPDATE: 'UPDATE'>: <function Parser.<lambda>>, <TokenType.USE: 'USE'>: <function Parser.<lambda>>, <TokenType.DATABASE: 'DATABASE'>: <function Teradata.Parser.<lambda>>, <TokenType.REPLACE: 'REPLACE'>: <function Teradata.Parser.<lambda>>}
    @@ -1388,74 +1399,75 @@ Default: 3 -
    145    class Generator(generator.Generator):
    -146        JOIN_HINTS = False
    -147        TABLE_HINTS = False
    -148        QUERY_HINTS = False
    -149
    -150        TYPE_MAPPING = {
    -151            **generator.Generator.TYPE_MAPPING,
    -152            exp.DataType.Type.GEOMETRY: "ST_GEOMETRY",
    -153        }
    -154
    -155        PROPERTIES_LOCATION = {
    -156            **generator.Generator.PROPERTIES_LOCATION,
    -157            exp.OnCommitProperty: exp.Properties.Location.POST_INDEX,
    -158            exp.PartitionedByProperty: exp.Properties.Location.POST_EXPRESSION,
    -159            exp.StabilityProperty: exp.Properties.Location.POST_CREATE,
    -160        }
    -161
    -162        TRANSFORMS = {
    -163            **generator.Generator.TRANSFORMS,
    -164            exp.Max: max_or_greatest,
    -165            exp.Min: min_or_least,
    -166            exp.Select: transforms.preprocess([transforms.eliminate_distinct_on]),
    -167            exp.StrToDate: lambda self, e: f"CAST({self.sql(e, 'this')} AS DATE FORMAT {self.format_time(e)})",
    -168            exp.ToChar: lambda self, e: self.function_fallback_sql(e),
    -169        }
    -170
    -171        def partitionedbyproperty_sql(self, expression: exp.PartitionedByProperty) -> str:
    -172            return f"PARTITION BY {self.sql(expression, 'this')}"
    -173
    -174        # FROM before SET in Teradata UPDATE syntax
    -175        # https://docs.teradata.com/r/Enterprise_IntelliFlex_VMware/Teradata-VantageTM-SQL-Data-Manipulation-Language-17.20/Statement-Syntax/UPDATE/UPDATE-Syntax-Basic-Form-FROM-Clause
    -176        def update_sql(self, expression: exp.Update) -> str:
    -177            this = self.sql(expression, "this")
    -178            from_sql = self.sql(expression, "from")
    -179            set_sql = self.expressions(expression, flat=True)
    -180            where_sql = self.sql(expression, "where")
    -181            sql = f"UPDATE {this}{from_sql} SET {set_sql}{where_sql}"
    -182            return self.prepend_ctes(expression, sql)
    -183
    -184        def mod_sql(self, expression: exp.Mod) -> str:
    -185            return self.binary(expression, "MOD")
    -186
    -187        def datatype_sql(self, expression: exp.DataType) -> str:
    -188            type_sql = super().datatype_sql(expression)
    -189            prefix_sql = expression.args.get("prefix")
    -190            return f"SYSUDTLIB.{type_sql}" if prefix_sql else type_sql
    -191
    -192        def rangen_sql(self, expression: exp.RangeN) -> str:
    -193            this = self.sql(expression, "this")
    -194            expressions_sql = self.expressions(expression)
    -195            each_sql = self.sql(expression, "each")
    -196            each_sql = f" EACH {each_sql}" if each_sql else ""
    -197
    -198            return f"RANGE_N({this} BETWEEN {expressions_sql}{each_sql})"
    -199
    -200        def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str:
    -201            kind = self.sql(expression, "kind").upper()
    -202            if kind == "TABLE" and locations.get(exp.Properties.Location.POST_NAME):
    -203                this_name = self.sql(expression.this, "this")
    -204                this_properties = self.properties(
    -205                    exp.Properties(expressions=locations[exp.Properties.Location.POST_NAME]),
    -206                    wrapped=False,
    -207                    prefix=",",
    -208                )
    -209                this_schema = self.schema_columns_sql(expression.this)
    -210                return f"{this_name}{this_properties}{self.sep()}{this_schema}"
    -211
    -212            return super().createable_sql(expression, locations)
    +            
    148    class Generator(generator.Generator):
    +149        JOIN_HINTS = False
    +150        TABLE_HINTS = False
    +151        QUERY_HINTS = False
    +152
    +153        TYPE_MAPPING = {
    +154            **generator.Generator.TYPE_MAPPING,
    +155            exp.DataType.Type.GEOMETRY: "ST_GEOMETRY",
    +156        }
    +157
    +158        PROPERTIES_LOCATION = {
    +159            **generator.Generator.PROPERTIES_LOCATION,
    +160            exp.OnCommitProperty: exp.Properties.Location.POST_INDEX,
    +161            exp.PartitionedByProperty: exp.Properties.Location.POST_EXPRESSION,
    +162            exp.StabilityProperty: exp.Properties.Location.POST_CREATE,
    +163        }
    +164
    +165        TRANSFORMS = {
    +166            **generator.Generator.TRANSFORMS,
    +167            exp.Max: max_or_greatest,
    +168            exp.Min: min_or_least,
    +169            exp.Select: transforms.preprocess([transforms.eliminate_distinct_on]),
    +170            exp.StrToDate: lambda self, e: f"CAST({self.sql(e, 'this')} AS DATE FORMAT {self.format_time(e)})",
    +171            exp.ToChar: lambda self, e: self.function_fallback_sql(e),
    +172            exp.Use: lambda self, e: f"DATABASE {self.sql(e, 'this')}",
    +173        }
    +174
    +175        def partitionedbyproperty_sql(self, expression: exp.PartitionedByProperty) -> str:
    +176            return f"PARTITION BY {self.sql(expression, 'this')}"
    +177
    +178        # FROM before SET in Teradata UPDATE syntax
    +179        # https://docs.teradata.com/r/Enterprise_IntelliFlex_VMware/Teradata-VantageTM-SQL-Data-Manipulation-Language-17.20/Statement-Syntax/UPDATE/UPDATE-Syntax-Basic-Form-FROM-Clause
    +180        def update_sql(self, expression: exp.Update) -> str:
    +181            this = self.sql(expression, "this")
    +182            from_sql = self.sql(expression, "from")
    +183            set_sql = self.expressions(expression, flat=True)
    +184            where_sql = self.sql(expression, "where")
    +185            sql = f"UPDATE {this}{from_sql} SET {set_sql}{where_sql}"
    +186            return self.prepend_ctes(expression, sql)
    +187
    +188        def mod_sql(self, expression: exp.Mod) -> str:
    +189            return self.binary(expression, "MOD")
    +190
    +191        def datatype_sql(self, expression: exp.DataType) -> str:
    +192            type_sql = super().datatype_sql(expression)
    +193            prefix_sql = expression.args.get("prefix")
    +194            return f"SYSUDTLIB.{type_sql}" if prefix_sql else type_sql
    +195
    +196        def rangen_sql(self, expression: exp.RangeN) -> str:
    +197            this = self.sql(expression, "this")
    +198            expressions_sql = self.expressions(expression)
    +199            each_sql = self.sql(expression, "each")
    +200            each_sql = f" EACH {each_sql}" if each_sql else ""
    +201
    +202            return f"RANGE_N({this} BETWEEN {expressions_sql}{each_sql})"
    +203
    +204        def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str:
    +205            kind = self.sql(expression, "kind").upper()
    +206            if kind == "TABLE" and locations.get(exp.Properties.Location.POST_NAME):
    +207                this_name = self.sql(expression.this, "this")
    +208                this_properties = self.properties(
    +209                    exp.Properties(expressions=locations[exp.Properties.Location.POST_NAME]),
    +210                    wrapped=False,
    +211                    prefix=",",
    +212                )
    +213                this_schema = self.schema_columns_sql(expression.this)
    +214                return f"{this_name}{this_properties}{self.sep()}{this_schema}"
    +215
    +216            return super().createable_sql(expression, locations)
     
    @@ -1564,7 +1576,7 @@ Default: True
    TRANSFORMS = - {<class 'sqlglot.expressions.DateAdd'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.TsOrDsAdd'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.CaseSpecificColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.CharacterSetColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.CharacterSetProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.CheckColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.ClusteredColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.CollateColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.CopyGrantsProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.CommentColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.DateFormatColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.DefaultColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.EncodeColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.ExecuteAsProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.ExternalProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.HeapProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.InlineLengthColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.IntervalDayToSecondSpan'>: 'DAY TO SECOND', <class 'sqlglot.expressions.IntervalYearToMonthSpan'>: 'YEAR TO MONTH', <class 'sqlglot.expressions.LanguageProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.LocationProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.LogProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.MaterializedProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.NoPrimaryIndexProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.NonClusteredColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.NotForReplicationColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.OnCommitProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.OnProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.OnUpdateColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.PathColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.ReturnsProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.SetProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.SettingsProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.SqlSecurityProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.StabilityProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.TemporaryProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.ToTableProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.TransientProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.TitleColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.UppercaseColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.VarMap'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.VolatileProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.WithJournalTableProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.Max'>: <function max_or_greatest>, <class 'sqlglot.expressions.Min'>: <function min_or_least>, <class 'sqlglot.expressions.Select'>: <function preprocess.<locals>._to_sql>, <class 'sqlglot.expressions.StrToDate'>: <function Teradata.Generator.<lambda>>, <class 'sqlglot.expressions.ToChar'>: <function Teradata.Generator.<lambda>>} + {<class 'sqlglot.expressions.DateAdd'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.TsOrDsAdd'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.CaseSpecificColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.CharacterSetColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.CharacterSetProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.CheckColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.ClusteredColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.CollateColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.CopyGrantsProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.CommentColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.DateFormatColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.DefaultColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.EncodeColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.ExecuteAsProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.ExternalProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.HeapProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.InlineLengthColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.IntervalDayToSecondSpan'>: 'DAY TO SECOND', <class 'sqlglot.expressions.IntervalYearToMonthSpan'>: 'YEAR TO MONTH', <class 'sqlglot.expressions.LanguageProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.LocationProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.LogProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.MaterializedProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.NoPrimaryIndexProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.NonClusteredColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.NotForReplicationColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.OnCommitProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.OnProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.OnUpdateColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.PathColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.ReturnsProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.SetProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.SettingsProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.SqlSecurityProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.StabilityProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.TemporaryProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.ToTableProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.TransientProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.TitleColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.UppercaseColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.VarMap'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.VolatileProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.WithJournalTableProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.Max'>: <function max_or_greatest>, <class 'sqlglot.expressions.Min'>: <function min_or_least>, <class 'sqlglot.expressions.Select'>: <function preprocess.<locals>._to_sql>, <class 'sqlglot.expressions.StrToDate'>: <function Teradata.Generator.<lambda>>, <class 'sqlglot.expressions.ToChar'>: <function Teradata.Generator.<lambda>>, <class 'sqlglot.expressions.Use'>: <function Teradata.Generator.<lambda>>}
    @@ -1584,8 +1596,8 @@ Default: True
    -
    171        def partitionedbyproperty_sql(self, expression: exp.PartitionedByProperty) -> str:
    -172            return f"PARTITION BY {self.sql(expression, 'this')}"
    +            
    175        def partitionedbyproperty_sql(self, expression: exp.PartitionedByProperty) -> str:
    +176            return f"PARTITION BY {self.sql(expression, 'this')}"
     
    @@ -1603,13 +1615,13 @@ Default: True
    -
    176        def update_sql(self, expression: exp.Update) -> str:
    -177            this = self.sql(expression, "this")
    -178            from_sql = self.sql(expression, "from")
    -179            set_sql = self.expressions(expression, flat=True)
    -180            where_sql = self.sql(expression, "where")
    -181            sql = f"UPDATE {this}{from_sql} SET {set_sql}{where_sql}"
    -182            return self.prepend_ctes(expression, sql)
    +            
    180        def update_sql(self, expression: exp.Update) -> str:
    +181            this = self.sql(expression, "this")
    +182            from_sql = self.sql(expression, "from")
    +183            set_sql = self.expressions(expression, flat=True)
    +184            where_sql = self.sql(expression, "where")
    +185            sql = f"UPDATE {this}{from_sql} SET {set_sql}{where_sql}"
    +186            return self.prepend_ctes(expression, sql)
     
    @@ -1627,8 +1639,8 @@ Default: True
    -
    184        def mod_sql(self, expression: exp.Mod) -> str:
    -185            return self.binary(expression, "MOD")
    +            
    188        def mod_sql(self, expression: exp.Mod) -> str:
    +189            return self.binary(expression, "MOD")
     
    @@ -1646,10 +1658,10 @@ Default: True
    -
    187        def datatype_sql(self, expression: exp.DataType) -> str:
    -188            type_sql = super().datatype_sql(expression)
    -189            prefix_sql = expression.args.get("prefix")
    -190            return f"SYSUDTLIB.{type_sql}" if prefix_sql else type_sql
    +            
    191        def datatype_sql(self, expression: exp.DataType) -> str:
    +192            type_sql = super().datatype_sql(expression)
    +193            prefix_sql = expression.args.get("prefix")
    +194            return f"SYSUDTLIB.{type_sql}" if prefix_sql else type_sql
     
    @@ -1667,13 +1679,13 @@ Default: True
    -
    192        def rangen_sql(self, expression: exp.RangeN) -> str:
    -193            this = self.sql(expression, "this")
    -194            expressions_sql = self.expressions(expression)
    -195            each_sql = self.sql(expression, "each")
    -196            each_sql = f" EACH {each_sql}" if each_sql else ""
    -197
    -198            return f"RANGE_N({this} BETWEEN {expressions_sql}{each_sql})"
    +            
    196        def rangen_sql(self, expression: exp.RangeN) -> str:
    +197            this = self.sql(expression, "this")
    +198            expressions_sql = self.expressions(expression)
    +199            each_sql = self.sql(expression, "each")
    +200            each_sql = f" EACH {each_sql}" if each_sql else ""
    +201
    +202            return f"RANGE_N({this} BETWEEN {expressions_sql}{each_sql})"
     
    @@ -1691,19 +1703,19 @@ Default: True
    -
    200        def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str:
    -201            kind = self.sql(expression, "kind").upper()
    -202            if kind == "TABLE" and locations.get(exp.Properties.Location.POST_NAME):
    -203                this_name = self.sql(expression.this, "this")
    -204                this_properties = self.properties(
    -205                    exp.Properties(expressions=locations[exp.Properties.Location.POST_NAME]),
    -206                    wrapped=False,
    -207                    prefix=",",
    -208                )
    -209                this_schema = self.schema_columns_sql(expression.this)
    -210                return f"{this_name}{this_properties}{self.sep()}{this_schema}"
    -211
    -212            return super().createable_sql(expression, locations)
    +            
    204        def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str:
    +205            kind = self.sql(expression, "kind").upper()
    +206            if kind == "TABLE" and locations.get(exp.Properties.Location.POST_NAME):
    +207                this_name = self.sql(expression.this, "this")
    +208                this_properties = self.properties(
    +209                    exp.Properties(expressions=locations[exp.Properties.Location.POST_NAME]),
    +210                    wrapped=False,
    +211                    prefix=",",
    +212                )
    +213                this_schema = self.schema_columns_sql(expression.this)
    +214                return f"{this_name}{this_properties}{self.sep()}{this_schema}"
    +215
    +216            return super().createable_sql(expression, locations)
     
    @@ -2064,6 +2076,7 @@ Default: True
    intersect_op
    introducer_sql
    pseudotype_sql
    +
    objectidentifier_sql
    onconflict_sql
    returning_sql
    rowformatdelimitedproperty_sql
    diff --git a/docs/sqlglot/dialects/trino.html b/docs/sqlglot/dialects/trino.html index fdf11c4..c81ae0d 100644 --- a/docs/sqlglot/dialects/trino.html +++ b/docs/sqlglot/dialects/trino.html @@ -915,6 +915,7 @@ Default: True
    intersect_op
    introducer_sql
    pseudotype_sql
    +
    objectidentifier_sql
    onconflict_sql
    returning_sql
    rowformatdelimitedproperty_sql
    diff --git a/docs/sqlglot/dialects/tsql.html b/docs/sqlglot/dialects/tsql.html index ca659c7..18ef35e 100644 --- a/docs/sqlglot/dialects/tsql.html +++ b/docs/sqlglot/dialects/tsql.html @@ -472,13 +472,13 @@
    132 133 134def generate_date_delta_with_unit_sql( -135 self: generator.Generator, expression: exp.DateAdd | exp.DateDiff +135 self: TSQL.Generator, expression: exp.DateAdd | exp.DateDiff 136) -> str: 137 func = "DATEADD" if isinstance(expression, exp.DateAdd) else "DATEDIFF" 138 return self.func(func, expression.text("unit"), expression.expression, expression.this) 139 140 -141def _format_sql(self: generator.Generator, expression: exp.NumberToStr | exp.TimeToStr) -> str: +141def _format_sql(self: TSQL.Generator, expression: exp.NumberToStr | exp.TimeToStr) -> str: 142 fmt = ( 143 expression.args["format"] 144 if isinstance(expression, exp.NumberToStr) @@ -492,7 +492,7 @@ 152 return self.func("FORMAT", expression.this, fmt, expression.args.get("culture")) 153 154 -155def _string_agg_sql(self: generator.Generator, expression: exp.GroupConcat) -> str: +155def _string_agg_sql(self: TSQL.Generator, expression: exp.GroupConcat) -> str: 156 expression = expression.copy() 157 158 this = expression.this @@ -927,156 +927,157 @@ 587 588 TYPE_MAPPING = { 589 **generator.Generator.TYPE_MAPPING, -590 exp.DataType.Type.DECIMAL: "NUMERIC", -591 exp.DataType.Type.DATETIME: "DATETIME2", -592 exp.DataType.Type.INT: "INTEGER", -593 exp.DataType.Type.TIMESTAMP: "DATETIME2", -594 exp.DataType.Type.TIMESTAMPTZ: "DATETIMEOFFSET", -595 exp.DataType.Type.VARIANT: "SQL_VARIANT", -596 } -597 -598 TRANSFORMS = { -599 **generator.Generator.TRANSFORMS, -600 exp.AnyValue: any_value_to_max_sql, -601 exp.AutoIncrementColumnConstraint: lambda *_: "IDENTITY", -602 exp.DateAdd: generate_date_delta_with_unit_sql, -603 exp.DateDiff: generate_date_delta_with_unit_sql, -604 exp.CurrentDate: rename_func("GETDATE"), -605 exp.CurrentTimestamp: rename_func("GETDATE"), -606 exp.Extract: rename_func("DATEPART"), -607 exp.GroupConcat: _string_agg_sql, -608 exp.If: rename_func("IIF"), -609 exp.Max: max_or_greatest, -610 exp.MD5: lambda self, e: self.func("HASHBYTES", exp.Literal.string("MD5"), e.this), -611 exp.Min: min_or_least, -612 exp.NumberToStr: _format_sql, -613 exp.Select: transforms.preprocess([transforms.eliminate_distinct_on]), -614 exp.SHA: lambda self, e: self.func("HASHBYTES", exp.Literal.string("SHA1"), e.this), -615 exp.SHA2: lambda self, e: self.func( -616 "HASHBYTES", -617 exp.Literal.string(f"SHA2_{e.args.get('length', 256)}"), -618 e.this, -619 ), -620 exp.TemporaryProperty: lambda self, e: "", -621 exp.TimeStrToTime: timestrtotime_sql, -622 exp.TimeToStr: _format_sql, -623 } -624 -625 TRANSFORMS.pop(exp.ReturnsProperty) -626 -627 PROPERTIES_LOCATION = { -628 **generator.Generator.PROPERTIES_LOCATION, -629 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, -630 } -631 -632 LIMIT_FETCH = "FETCH" -633 -634 def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str: -635 sql = self.sql(expression, "this") -636 properties = expression.args.get("properties") -637 -638 if sql[:1] != "#" and any( -639 isinstance(prop, exp.TemporaryProperty) -640 for prop in (properties.expressions if properties else []) -641 ): -642 sql = f"#{sql}" -643 -644 return sql -645 -646 def create_sql(self, expression: exp.Create) -> str: -647 expression = expression.copy() -648 kind = self.sql(expression, "kind").upper() -649 exists = expression.args.pop("exists", None) -650 sql = super().create_sql(expression) -651 -652 if exists: -653 table = expression.find(exp.Table) -654 identifier = self.sql(exp.Literal.string(exp.table_name(table) if table else "")) -655 if kind == "SCHEMA": -656 sql = f"""IF NOT EXISTS (SELECT * FROM information_schema.schemata WHERE schema_name = {identifier}) EXEC('{sql}')""" -657 elif kind == "TABLE": -658 sql = f"""IF NOT EXISTS (SELECT * FROM information_schema.tables WHERE table_name = {identifier}) EXEC('{sql}')""" -659 elif kind == "INDEX": -660 index = self.sql(exp.Literal.string(expression.this.text("this"))) -661 sql = f"""IF NOT EXISTS (SELECT * FROM sys.indexes WHERE object_id = object_id({identifier}) AND name = {index}) EXEC('{sql}')""" -662 elif expression.args.get("replace"): -663 sql = sql.replace("CREATE OR REPLACE ", "CREATE OR ALTER ", 1) -664 -665 return sql -666 -667 def offset_sql(self, expression: exp.Offset) -> str: -668 return f"{super().offset_sql(expression)} ROWS" -669 -670 def version_sql(self, expression: exp.Version) -> str: -671 name = "SYSTEM_TIME" if expression.name == "TIMESTAMP" else expression.name -672 this = f"FOR {name}" -673 expr = expression.expression -674 kind = expression.text("kind") -675 if kind in ("FROM", "BETWEEN"): -676 args = expr.expressions -677 sep = "TO" if kind == "FROM" else "AND" -678 expr_sql = f"{self.sql(seq_get(args, 0))} {sep} {self.sql(seq_get(args, 1))}" -679 else: -680 expr_sql = self.sql(expr) -681 -682 expr_sql = f" {expr_sql}" if expr_sql else "" -683 return f"{this} {kind}{expr_sql}" -684 -685 def returnsproperty_sql(self, expression: exp.ReturnsProperty) -> str: -686 table = expression.args.get("table") -687 table = f"{table} " if table else "" -688 return f"RETURNS {table}{self.sql(expression, 'this')}" -689 -690 def returning_sql(self, expression: exp.Returning) -> str: -691 into = self.sql(expression, "into") -692 into = self.seg(f"INTO {into}") if into else "" -693 return f"{self.seg('OUTPUT')} {self.expressions(expression, flat=True)}{into}" -694 -695 def transaction_sql(self, expression: exp.Transaction) -> str: -696 this = self.sql(expression, "this") -697 this = f" {this}" if this else "" -698 mark = self.sql(expression, "mark") -699 mark = f" WITH MARK {mark}" if mark else "" -700 return f"BEGIN TRANSACTION{this}{mark}" -701 -702 def commit_sql(self, expression: exp.Commit) -> str: -703 this = self.sql(expression, "this") -704 this = f" {this}" if this else "" -705 durability = expression.args.get("durability") -706 durability = ( -707 f" WITH (DELAYED_DURABILITY = {'ON' if durability else 'OFF'})" -708 if durability is not None -709 else "" -710 ) -711 return f"COMMIT TRANSACTION{this}{durability}" -712 -713 def rollback_sql(self, expression: exp.Rollback) -> str: -714 this = self.sql(expression, "this") -715 this = f" {this}" if this else "" -716 return f"ROLLBACK TRANSACTION{this}" -717 -718 def identifier_sql(self, expression: exp.Identifier) -> str: -719 identifier = super().identifier_sql(expression) -720 -721 if expression.args.get("global"): -722 identifier = f"##{identifier}" -723 elif expression.args.get("temporary"): -724 identifier = f"#{identifier}" -725 -726 return identifier -727 -728 def constraint_sql(self, expression: exp.Constraint) -> str: -729 this = self.sql(expression, "this") -730 expressions = self.expressions(expression, flat=True, sep=" ") -731 return f"CONSTRAINT {this} {expressions}" -732 -733 # https://learn.microsoft.com/en-us/answers/questions/448821/create-table-in-sql-server -734 def generatedasidentitycolumnconstraint_sql( -735 self, expression: exp.GeneratedAsIdentityColumnConstraint -736 ) -> str: -737 start = self.sql(expression, "start") or "1" -738 increment = self.sql(expression, "increment") or "1" -739 return f"IDENTITY({start}, {increment})" +590 exp.DataType.Type.BOOLEAN: "BIT", +591 exp.DataType.Type.DECIMAL: "NUMERIC", +592 exp.DataType.Type.DATETIME: "DATETIME2", +593 exp.DataType.Type.INT: "INTEGER", +594 exp.DataType.Type.TIMESTAMP: "DATETIME2", +595 exp.DataType.Type.TIMESTAMPTZ: "DATETIMEOFFSET", +596 exp.DataType.Type.VARIANT: "SQL_VARIANT", +597 } +598 +599 TRANSFORMS = { +600 **generator.Generator.TRANSFORMS, +601 exp.AnyValue: any_value_to_max_sql, +602 exp.AutoIncrementColumnConstraint: lambda *_: "IDENTITY", +603 exp.DateAdd: generate_date_delta_with_unit_sql, +604 exp.DateDiff: generate_date_delta_with_unit_sql, +605 exp.CurrentDate: rename_func("GETDATE"), +606 exp.CurrentTimestamp: rename_func("GETDATE"), +607 exp.Extract: rename_func("DATEPART"), +608 exp.GroupConcat: _string_agg_sql, +609 exp.If: rename_func("IIF"), +610 exp.Max: max_or_greatest, +611 exp.MD5: lambda self, e: self.func("HASHBYTES", exp.Literal.string("MD5"), e.this), +612 exp.Min: min_or_least, +613 exp.NumberToStr: _format_sql, +614 exp.Select: transforms.preprocess([transforms.eliminate_distinct_on]), +615 exp.SHA: lambda self, e: self.func("HASHBYTES", exp.Literal.string("SHA1"), e.this), +616 exp.SHA2: lambda self, e: self.func( +617 "HASHBYTES", +618 exp.Literal.string(f"SHA2_{e.args.get('length', 256)}"), +619 e.this, +620 ), +621 exp.TemporaryProperty: lambda self, e: "", +622 exp.TimeStrToTime: timestrtotime_sql, +623 exp.TimeToStr: _format_sql, +624 } +625 +626 TRANSFORMS.pop(exp.ReturnsProperty) +627 +628 PROPERTIES_LOCATION = { +629 **generator.Generator.PROPERTIES_LOCATION, +630 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, +631 } +632 +633 LIMIT_FETCH = "FETCH" +634 +635 def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str: +636 sql = self.sql(expression, "this") +637 properties = expression.args.get("properties") +638 +639 if sql[:1] != "#" and any( +640 isinstance(prop, exp.TemporaryProperty) +641 for prop in (properties.expressions if properties else []) +642 ): +643 sql = f"#{sql}" +644 +645 return sql +646 +647 def create_sql(self, expression: exp.Create) -> str: +648 expression = expression.copy() +649 kind = self.sql(expression, "kind").upper() +650 exists = expression.args.pop("exists", None) +651 sql = super().create_sql(expression) +652 +653 if exists: +654 table = expression.find(exp.Table) +655 identifier = self.sql(exp.Literal.string(exp.table_name(table) if table else "")) +656 if kind == "SCHEMA": +657 sql = f"""IF NOT EXISTS (SELECT * FROM information_schema.schemata WHERE schema_name = {identifier}) EXEC('{sql}')""" +658 elif kind == "TABLE": +659 sql = f"""IF NOT EXISTS (SELECT * FROM information_schema.tables WHERE table_name = {identifier}) EXEC('{sql}')""" +660 elif kind == "INDEX": +661 index = self.sql(exp.Literal.string(expression.this.text("this"))) +662 sql = f"""IF NOT EXISTS (SELECT * FROM sys.indexes WHERE object_id = object_id({identifier}) AND name = {index}) EXEC('{sql}')""" +663 elif expression.args.get("replace"): +664 sql = sql.replace("CREATE OR REPLACE ", "CREATE OR ALTER ", 1) +665 +666 return sql +667 +668 def offset_sql(self, expression: exp.Offset) -> str: +669 return f"{super().offset_sql(expression)} ROWS" +670 +671 def version_sql(self, expression: exp.Version) -> str: +672 name = "SYSTEM_TIME" if expression.name == "TIMESTAMP" else expression.name +673 this = f"FOR {name}" +674 expr = expression.expression +675 kind = expression.text("kind") +676 if kind in ("FROM", "BETWEEN"): +677 args = expr.expressions +678 sep = "TO" if kind == "FROM" else "AND" +679 expr_sql = f"{self.sql(seq_get(args, 0))} {sep} {self.sql(seq_get(args, 1))}" +680 else: +681 expr_sql = self.sql(expr) +682 +683 expr_sql = f" {expr_sql}" if expr_sql else "" +684 return f"{this} {kind}{expr_sql}" +685 +686 def returnsproperty_sql(self, expression: exp.ReturnsProperty) -> str: +687 table = expression.args.get("table") +688 table = f"{table} " if table else "" +689 return f"RETURNS {table}{self.sql(expression, 'this')}" +690 +691 def returning_sql(self, expression: exp.Returning) -> str: +692 into = self.sql(expression, "into") +693 into = self.seg(f"INTO {into}") if into else "" +694 return f"{self.seg('OUTPUT')} {self.expressions(expression, flat=True)}{into}" +695 +696 def transaction_sql(self, expression: exp.Transaction) -> str: +697 this = self.sql(expression, "this") +698 this = f" {this}" if this else "" +699 mark = self.sql(expression, "mark") +700 mark = f" WITH MARK {mark}" if mark else "" +701 return f"BEGIN TRANSACTION{this}{mark}" +702 +703 def commit_sql(self, expression: exp.Commit) -> str: +704 this = self.sql(expression, "this") +705 this = f" {this}" if this else "" +706 durability = expression.args.get("durability") +707 durability = ( +708 f" WITH (DELAYED_DURABILITY = {'ON' if durability else 'OFF'})" +709 if durability is not None +710 else "" +711 ) +712 return f"COMMIT TRANSACTION{this}{durability}" +713 +714 def rollback_sql(self, expression: exp.Rollback) -> str: +715 this = self.sql(expression, "this") +716 this = f" {this}" if this else "" +717 return f"ROLLBACK TRANSACTION{this}" +718 +719 def identifier_sql(self, expression: exp.Identifier) -> str: +720 identifier = super().identifier_sql(expression) +721 +722 if expression.args.get("global"): +723 identifier = f"##{identifier}" +724 elif expression.args.get("temporary"): +725 identifier = f"#{identifier}" +726 +727 return identifier +728 +729 def constraint_sql(self, expression: exp.Constraint) -> str: +730 this = self.sql(expression, "this") +731 expressions = self.expressions(expression, flat=True, sep=" ") +732 return f"CONSTRAINT {this} {expressions}" +733 +734 # https://learn.microsoft.com/en-us/answers/questions/448821/create-table-in-sql-server +735 def generatedasidentitycolumnconstraint_sql( +736 self, expression: exp.GeneratedAsIdentityColumnConstraint +737 ) -> str: +738 start = self.sql(expression, "start") or "1" +739 increment = self.sql(expression, "increment") or "1" +740 return f"IDENTITY({start}, {increment})"
    @@ -1147,14 +1148,14 @@
    def - generate_date_delta_with_unit_sql( self: sqlglot.generator.Generator, expression: sqlglot.expressions.DateAdd | sqlglot.expressions.DateDiff) -> str: + generate_date_delta_with_unit_sql( self: sqlglot.dialects.tsql.TSQL.Generator, expression: sqlglot.expressions.DateAdd | sqlglot.expressions.DateDiff) -> str:
    135def generate_date_delta_with_unit_sql(
    -136    self: generator.Generator, expression: exp.DateAdd | exp.DateDiff
    +136    self: TSQL.Generator, expression: exp.DateAdd | exp.DateDiff
     137) -> str:
     138    func = "DATEADD" if isinstance(expression, exp.DateAdd) else "DATEDIFF"
     139    return self.func(func, expression.text("unit"), expression.expression, expression.this)
    @@ -1562,156 +1563,157 @@
     588
     589        TYPE_MAPPING = {
     590            **generator.Generator.TYPE_MAPPING,
    -591            exp.DataType.Type.DECIMAL: "NUMERIC",
    -592            exp.DataType.Type.DATETIME: "DATETIME2",
    -593            exp.DataType.Type.INT: "INTEGER",
    -594            exp.DataType.Type.TIMESTAMP: "DATETIME2",
    -595            exp.DataType.Type.TIMESTAMPTZ: "DATETIMEOFFSET",
    -596            exp.DataType.Type.VARIANT: "SQL_VARIANT",
    -597        }
    -598
    -599        TRANSFORMS = {
    -600            **generator.Generator.TRANSFORMS,
    -601            exp.AnyValue: any_value_to_max_sql,
    -602            exp.AutoIncrementColumnConstraint: lambda *_: "IDENTITY",
    -603            exp.DateAdd: generate_date_delta_with_unit_sql,
    -604            exp.DateDiff: generate_date_delta_with_unit_sql,
    -605            exp.CurrentDate: rename_func("GETDATE"),
    -606            exp.CurrentTimestamp: rename_func("GETDATE"),
    -607            exp.Extract: rename_func("DATEPART"),
    -608            exp.GroupConcat: _string_agg_sql,
    -609            exp.If: rename_func("IIF"),
    -610            exp.Max: max_or_greatest,
    -611            exp.MD5: lambda self, e: self.func("HASHBYTES", exp.Literal.string("MD5"), e.this),
    -612            exp.Min: min_or_least,
    -613            exp.NumberToStr: _format_sql,
    -614            exp.Select: transforms.preprocess([transforms.eliminate_distinct_on]),
    -615            exp.SHA: lambda self, e: self.func("HASHBYTES", exp.Literal.string("SHA1"), e.this),
    -616            exp.SHA2: lambda self, e: self.func(
    -617                "HASHBYTES",
    -618                exp.Literal.string(f"SHA2_{e.args.get('length', 256)}"),
    -619                e.this,
    -620            ),
    -621            exp.TemporaryProperty: lambda self, e: "",
    -622            exp.TimeStrToTime: timestrtotime_sql,
    -623            exp.TimeToStr: _format_sql,
    -624        }
    -625
    -626        TRANSFORMS.pop(exp.ReturnsProperty)
    -627
    -628        PROPERTIES_LOCATION = {
    -629            **generator.Generator.PROPERTIES_LOCATION,
    -630            exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED,
    -631        }
    -632
    -633        LIMIT_FETCH = "FETCH"
    -634
    -635        def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str:
    -636            sql = self.sql(expression, "this")
    -637            properties = expression.args.get("properties")
    -638
    -639            if sql[:1] != "#" and any(
    -640                isinstance(prop, exp.TemporaryProperty)
    -641                for prop in (properties.expressions if properties else [])
    -642            ):
    -643                sql = f"#{sql}"
    -644
    -645            return sql
    -646
    -647        def create_sql(self, expression: exp.Create) -> str:
    -648            expression = expression.copy()
    -649            kind = self.sql(expression, "kind").upper()
    -650            exists = expression.args.pop("exists", None)
    -651            sql = super().create_sql(expression)
    -652
    -653            if exists:
    -654                table = expression.find(exp.Table)
    -655                identifier = self.sql(exp.Literal.string(exp.table_name(table) if table else ""))
    -656                if kind == "SCHEMA":
    -657                    sql = f"""IF NOT EXISTS (SELECT * FROM information_schema.schemata WHERE schema_name = {identifier}) EXEC('{sql}')"""
    -658                elif kind == "TABLE":
    -659                    sql = f"""IF NOT EXISTS (SELECT * FROM information_schema.tables WHERE table_name = {identifier}) EXEC('{sql}')"""
    -660                elif kind == "INDEX":
    -661                    index = self.sql(exp.Literal.string(expression.this.text("this")))
    -662                    sql = f"""IF NOT EXISTS (SELECT * FROM sys.indexes WHERE object_id = object_id({identifier}) AND name = {index}) EXEC('{sql}')"""
    -663            elif expression.args.get("replace"):
    -664                sql = sql.replace("CREATE OR REPLACE ", "CREATE OR ALTER ", 1)
    -665
    -666            return sql
    -667
    -668        def offset_sql(self, expression: exp.Offset) -> str:
    -669            return f"{super().offset_sql(expression)} ROWS"
    -670
    -671        def version_sql(self, expression: exp.Version) -> str:
    -672            name = "SYSTEM_TIME" if expression.name == "TIMESTAMP" else expression.name
    -673            this = f"FOR {name}"
    -674            expr = expression.expression
    -675            kind = expression.text("kind")
    -676            if kind in ("FROM", "BETWEEN"):
    -677                args = expr.expressions
    -678                sep = "TO" if kind == "FROM" else "AND"
    -679                expr_sql = f"{self.sql(seq_get(args, 0))} {sep} {self.sql(seq_get(args, 1))}"
    -680            else:
    -681                expr_sql = self.sql(expr)
    -682
    -683            expr_sql = f" {expr_sql}" if expr_sql else ""
    -684            return f"{this} {kind}{expr_sql}"
    -685
    -686        def returnsproperty_sql(self, expression: exp.ReturnsProperty) -> str:
    -687            table = expression.args.get("table")
    -688            table = f"{table} " if table else ""
    -689            return f"RETURNS {table}{self.sql(expression, 'this')}"
    -690
    -691        def returning_sql(self, expression: exp.Returning) -> str:
    -692            into = self.sql(expression, "into")
    -693            into = self.seg(f"INTO {into}") if into else ""
    -694            return f"{self.seg('OUTPUT')} {self.expressions(expression, flat=True)}{into}"
    -695
    -696        def transaction_sql(self, expression: exp.Transaction) -> str:
    -697            this = self.sql(expression, "this")
    -698            this = f" {this}" if this else ""
    -699            mark = self.sql(expression, "mark")
    -700            mark = f" WITH MARK {mark}" if mark else ""
    -701            return f"BEGIN TRANSACTION{this}{mark}"
    -702
    -703        def commit_sql(self, expression: exp.Commit) -> str:
    -704            this = self.sql(expression, "this")
    -705            this = f" {this}" if this else ""
    -706            durability = expression.args.get("durability")
    -707            durability = (
    -708                f" WITH (DELAYED_DURABILITY = {'ON' if durability else 'OFF'})"
    -709                if durability is not None
    -710                else ""
    -711            )
    -712            return f"COMMIT TRANSACTION{this}{durability}"
    -713
    -714        def rollback_sql(self, expression: exp.Rollback) -> str:
    -715            this = self.sql(expression, "this")
    -716            this = f" {this}" if this else ""
    -717            return f"ROLLBACK TRANSACTION{this}"
    -718
    -719        def identifier_sql(self, expression: exp.Identifier) -> str:
    -720            identifier = super().identifier_sql(expression)
    -721
    -722            if expression.args.get("global"):
    -723                identifier = f"##{identifier}"
    -724            elif expression.args.get("temporary"):
    -725                identifier = f"#{identifier}"
    -726
    -727            return identifier
    -728
    -729        def constraint_sql(self, expression: exp.Constraint) -> str:
    -730            this = self.sql(expression, "this")
    -731            expressions = self.expressions(expression, flat=True, sep=" ")
    -732            return f"CONSTRAINT {this} {expressions}"
    -733
    -734        # https://learn.microsoft.com/en-us/answers/questions/448821/create-table-in-sql-server
    -735        def generatedasidentitycolumnconstraint_sql(
    -736            self, expression: exp.GeneratedAsIdentityColumnConstraint
    -737        ) -> str:
    -738            start = self.sql(expression, "start") or "1"
    -739            increment = self.sql(expression, "increment") or "1"
    -740            return f"IDENTITY({start}, {increment})"
    +591            exp.DataType.Type.BOOLEAN: "BIT",
    +592            exp.DataType.Type.DECIMAL: "NUMERIC",
    +593            exp.DataType.Type.DATETIME: "DATETIME2",
    +594            exp.DataType.Type.INT: "INTEGER",
    +595            exp.DataType.Type.TIMESTAMP: "DATETIME2",
    +596            exp.DataType.Type.TIMESTAMPTZ: "DATETIMEOFFSET",
    +597            exp.DataType.Type.VARIANT: "SQL_VARIANT",
    +598        }
    +599
    +600        TRANSFORMS = {
    +601            **generator.Generator.TRANSFORMS,
    +602            exp.AnyValue: any_value_to_max_sql,
    +603            exp.AutoIncrementColumnConstraint: lambda *_: "IDENTITY",
    +604            exp.DateAdd: generate_date_delta_with_unit_sql,
    +605            exp.DateDiff: generate_date_delta_with_unit_sql,
    +606            exp.CurrentDate: rename_func("GETDATE"),
    +607            exp.CurrentTimestamp: rename_func("GETDATE"),
    +608            exp.Extract: rename_func("DATEPART"),
    +609            exp.GroupConcat: _string_agg_sql,
    +610            exp.If: rename_func("IIF"),
    +611            exp.Max: max_or_greatest,
    +612            exp.MD5: lambda self, e: self.func("HASHBYTES", exp.Literal.string("MD5"), e.this),
    +613            exp.Min: min_or_least,
    +614            exp.NumberToStr: _format_sql,
    +615            exp.Select: transforms.preprocess([transforms.eliminate_distinct_on]),
    +616            exp.SHA: lambda self, e: self.func("HASHBYTES", exp.Literal.string("SHA1"), e.this),
    +617            exp.SHA2: lambda self, e: self.func(
    +618                "HASHBYTES",
    +619                exp.Literal.string(f"SHA2_{e.args.get('length', 256)}"),
    +620                e.this,
    +621            ),
    +622            exp.TemporaryProperty: lambda self, e: "",
    +623            exp.TimeStrToTime: timestrtotime_sql,
    +624            exp.TimeToStr: _format_sql,
    +625        }
    +626
    +627        TRANSFORMS.pop(exp.ReturnsProperty)
    +628
    +629        PROPERTIES_LOCATION = {
    +630            **generator.Generator.PROPERTIES_LOCATION,
    +631            exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED,
    +632        }
    +633
    +634        LIMIT_FETCH = "FETCH"
    +635
    +636        def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str:
    +637            sql = self.sql(expression, "this")
    +638            properties = expression.args.get("properties")
    +639
    +640            if sql[:1] != "#" and any(
    +641                isinstance(prop, exp.TemporaryProperty)
    +642                for prop in (properties.expressions if properties else [])
    +643            ):
    +644                sql = f"#{sql}"
    +645
    +646            return sql
    +647
    +648        def create_sql(self, expression: exp.Create) -> str:
    +649            expression = expression.copy()
    +650            kind = self.sql(expression, "kind").upper()
    +651            exists = expression.args.pop("exists", None)
    +652            sql = super().create_sql(expression)
    +653
    +654            if exists:
    +655                table = expression.find(exp.Table)
    +656                identifier = self.sql(exp.Literal.string(exp.table_name(table) if table else ""))
    +657                if kind == "SCHEMA":
    +658                    sql = f"""IF NOT EXISTS (SELECT * FROM information_schema.schemata WHERE schema_name = {identifier}) EXEC('{sql}')"""
    +659                elif kind == "TABLE":
    +660                    sql = f"""IF NOT EXISTS (SELECT * FROM information_schema.tables WHERE table_name = {identifier}) EXEC('{sql}')"""
    +661                elif kind == "INDEX":
    +662                    index = self.sql(exp.Literal.string(expression.this.text("this")))
    +663                    sql = f"""IF NOT EXISTS (SELECT * FROM sys.indexes WHERE object_id = object_id({identifier}) AND name = {index}) EXEC('{sql}')"""
    +664            elif expression.args.get("replace"):
    +665                sql = sql.replace("CREATE OR REPLACE ", "CREATE OR ALTER ", 1)
    +666
    +667            return sql
    +668
    +669        def offset_sql(self, expression: exp.Offset) -> str:
    +670            return f"{super().offset_sql(expression)} ROWS"
    +671
    +672        def version_sql(self, expression: exp.Version) -> str:
    +673            name = "SYSTEM_TIME" if expression.name == "TIMESTAMP" else expression.name
    +674            this = f"FOR {name}"
    +675            expr = expression.expression
    +676            kind = expression.text("kind")
    +677            if kind in ("FROM", "BETWEEN"):
    +678                args = expr.expressions
    +679                sep = "TO" if kind == "FROM" else "AND"
    +680                expr_sql = f"{self.sql(seq_get(args, 0))} {sep} {self.sql(seq_get(args, 1))}"
    +681            else:
    +682                expr_sql = self.sql(expr)
    +683
    +684            expr_sql = f" {expr_sql}" if expr_sql else ""
    +685            return f"{this} {kind}{expr_sql}"
    +686
    +687        def returnsproperty_sql(self, expression: exp.ReturnsProperty) -> str:
    +688            table = expression.args.get("table")
    +689            table = f"{table} " if table else ""
    +690            return f"RETURNS {table}{self.sql(expression, 'this')}"
    +691
    +692        def returning_sql(self, expression: exp.Returning) -> str:
    +693            into = self.sql(expression, "into")
    +694            into = self.seg(f"INTO {into}") if into else ""
    +695            return f"{self.seg('OUTPUT')} {self.expressions(expression, flat=True)}{into}"
    +696
    +697        def transaction_sql(self, expression: exp.Transaction) -> str:
    +698            this = self.sql(expression, "this")
    +699            this = f" {this}" if this else ""
    +700            mark = self.sql(expression, "mark")
    +701            mark = f" WITH MARK {mark}" if mark else ""
    +702            return f"BEGIN TRANSACTION{this}{mark}"
    +703
    +704        def commit_sql(self, expression: exp.Commit) -> str:
    +705            this = self.sql(expression, "this")
    +706            this = f" {this}" if this else ""
    +707            durability = expression.args.get("durability")
    +708            durability = (
    +709                f" WITH (DELAYED_DURABILITY = {'ON' if durability else 'OFF'})"
    +710                if durability is not None
    +711                else ""
    +712            )
    +713            return f"COMMIT TRANSACTION{this}{durability}"
    +714
    +715        def rollback_sql(self, expression: exp.Rollback) -> str:
    +716            this = self.sql(expression, "this")
    +717            this = f" {this}" if this else ""
    +718            return f"ROLLBACK TRANSACTION{this}"
    +719
    +720        def identifier_sql(self, expression: exp.Identifier) -> str:
    +721            identifier = super().identifier_sql(expression)
    +722
    +723            if expression.args.get("global"):
    +724                identifier = f"##{identifier}"
    +725            elif expression.args.get("temporary"):
    +726                identifier = f"#{identifier}"
    +727
    +728            return identifier
    +729
    +730        def constraint_sql(self, expression: exp.Constraint) -> str:
    +731            this = self.sql(expression, "this")
    +732            expressions = self.expressions(expression, flat=True, sep=" ")
    +733            return f"CONSTRAINT {this} {expressions}"
    +734
    +735        # https://learn.microsoft.com/en-us/answers/questions/448821/create-table-in-sql-server
    +736        def generatedasidentitycolumnconstraint_sql(
    +737            self, expression: exp.GeneratedAsIdentityColumnConstraint
    +738        ) -> str:
    +739            start = self.sql(expression, "start") or "1"
    +740            increment = self.sql(expression, "increment") or "1"
    +741            return f"IDENTITY({start}, {increment})"
     
    @@ -2439,7 +2441,7 @@ Default: 3
    JOIN_HINTS = -{'HASH', 'MERGE', 'LOOP', 'REMOTE'} +{'HASH', 'LOOP', 'REMOTE', 'MERGE'}
    @@ -2451,7 +2453,7 @@ Default: 3
    VAR_LENGTH_DATATYPES = -{<Type.NVARCHAR: 'NVARCHAR'>, <Type.NCHAR: 'NCHAR'>, <Type.CHAR: 'CHAR'>, <Type.VARCHAR: 'VARCHAR'>} +{<Type.VARCHAR: 'VARCHAR'>, <Type.NVARCHAR: 'NVARCHAR'>, <Type.CHAR: 'CHAR'>, <Type.NCHAR: 'NCHAR'>}
    @@ -2464,7 +2466,7 @@ Default: 3
    RETURNS_TABLE_TOKENS = - {<TokenType.UNPIVOT: 'UNPIVOT'>, <TokenType.END: 'END'>, <TokenType.EXECUTE: 'EXECUTE'>, <TokenType.DIV: 'DIV'>, <TokenType.ROW: 'ROW'>, <TokenType.PRAGMA: 'PRAGMA'>, <TokenType.CACHE: 'CACHE'>, <TokenType.COLUMN: 'COLUMN'>, <TokenType.ORDINALITY: 'ORDINALITY'>, <TokenType.RIGHT: 'RIGHT'>, <TokenType.SET: 'SET'>, <TokenType.UNIQUE: 'UNIQUE'>, <TokenType.DEFAULT: 'DEFAULT'>, <TokenType.AUTO_INCREMENT: 'AUTO_INCREMENT'>, <TokenType.CURRENT_USER: 'CURRENT_USER'>, <TokenType.ASC: 'ASC'>, <TokenType.SETTINGS: 'SETTINGS'>, <TokenType.COMMENT: 'COMMENT'>, <TokenType.RANGE: 'RANGE'>, <TokenType.SOME: 'SOME'>, <TokenType.KEEP: 'KEEP'>, <TokenType.WINDOW: 'WINDOW'>, <TokenType.ISNULL: 'ISNULL'>, <TokenType.CURRENT_TIMESTAMP: 'CURRENT_TIMESTAMP'>, <TokenType.LOAD: 'LOAD'>, <TokenType.SHOW: 'SHOW'>, <TokenType.ANTI: 'ANTI'>, <TokenType.INDEX: 'INDEX'>, <TokenType.MERGE: 'MERGE'>, <TokenType.SCHEMA: 'SCHEMA'>, <TokenType.PERCENT: 'PERCENT'>, <TokenType.VOLATILE: 'VOLATILE'>, <TokenType.DICTIONARY: 'DICTIONARY'>, <TokenType.FALSE: 'FALSE'>, <TokenType.CURRENT_DATETIME: 'CURRENT_DATETIME'>, <TokenType.DESCRIBE: 'DESCRIBE'>, <TokenType.TEMPORARY: 'TEMPORARY'>, <TokenType.NEXT: 'NEXT'>, <TokenType.UPDATE: 'UPDATE'>, <TokenType.ALL: 'ALL'>, <TokenType.DELETE: 'DELETE'>, <TokenType.COMMAND: 'COMMAND'>, <TokenType.CONSTRAINT: 'CONSTRAINT'>, <TokenType.EXISTS: 'EXISTS'>, <TokenType.BEGIN: 'BEGIN'>, <TokenType.FULL: 'FULL'>, <TokenType.PARTITION: 'PARTITION'>, <TokenType.FILTER: 'FILTER'>, <TokenType.ROWS: 'ROWS'>, <TokenType.TOP: 'TOP'>, <TokenType.ESCAPE: 'ESCAPE'>, <TokenType.PROCEDURE: 'PROCEDURE'>, <TokenType.CASE: 'CASE'>, <TokenType.VAR: 'VAR'>, <TokenType.CURRENT_TIME: 'CURRENT_TIME'>, <TokenType.APPLY: 'APPLY'>, <TokenType.DATABASE: 'DATABASE'>, <TokenType.SEMI: 'SEMI'>, <TokenType.CURRENT_DATE: 'CURRENT_DATE'>, <TokenType.PIVOT: 'PIVOT'>, <TokenType.FORMAT: 'FORMAT'>, <TokenType.NATURAL: 'NATURAL'>, <TokenType.COMMIT: 'COMMIT'>, <TokenType.DESC: 'DESC'>, <TokenType.FIRST: 'FIRST'>, <TokenType.OVERWRITE: 'OVERWRITE'>, <TokenType.ANY: 'ANY'>, <TokenType.COLLATE: 'COLLATE'>, <TokenType.LEFT: 'LEFT'>, <TokenType.FUNCTION: 'FUNCTION'>, <TokenType.OFFSET: 'OFFSET'>, <TokenType.REFERENCES: 'REFERENCES'>, <TokenType.VIEW: 'VIEW'>, <TokenType.IS: 'IS'>, <TokenType.TRUE: 'TRUE'>} + {<TokenType.LOAD: 'LOAD'>, <TokenType.ORDINALITY: 'ORDINALITY'>, <TokenType.REFERENCES: 'REFERENCES'>, <TokenType.DELETE: 'DELETE'>, <TokenType.PROCEDURE: 'PROCEDURE'>, <TokenType.LEFT: 'LEFT'>, <TokenType.ROW: 'ROW'>, <TokenType.UNIQUE: 'UNIQUE'>, <TokenType.FIRST: 'FIRST'>, <TokenType.SOME: 'SOME'>, <TokenType.IS: 'IS'>, <TokenType.COLLATE: 'COLLATE'>, <TokenType.MERGE: 'MERGE'>, <TokenType.WINDOW: 'WINDOW'>, <TokenType.SEMI: 'SEMI'>, <TokenType.SHOW: 'SHOW'>, <TokenType.INDEX: 'INDEX'>, <TokenType.TOP: 'TOP'>, <TokenType.SCHEMA: 'SCHEMA'>, <TokenType.DIV: 'DIV'>, <TokenType.COMMENT: 'COMMENT'>, <TokenType.CONSTRAINT: 'CONSTRAINT'>, <TokenType.DESC: 'DESC'>, <TokenType.ASC: 'ASC'>, <TokenType.APPLY: 'APPLY'>, <TokenType.COMMAND: 'COMMAND'>, <TokenType.FILTER: 'FILTER'>, <TokenType.VIEW: 'VIEW'>, <TokenType.ANY: 'ANY'>, <TokenType.DICTIONARY: 'DICTIONARY'>, <TokenType.ALL: 'ALL'>, <TokenType.VAR: 'VAR'>, <TokenType.CURRENT_DATETIME: 'CURRENT_DATETIME'>, <TokenType.UNPIVOT: 'UNPIVOT'>, <TokenType.OFFSET: 'OFFSET'>, <TokenType.RIGHT: 'RIGHT'>, <TokenType.NEXT: 'NEXT'>, <TokenType.SET: 'SET'>, <TokenType.ROWS: 'ROWS'>, <TokenType.END: 'END'>, <TokenType.SETTINGS: 'SETTINGS'>, <TokenType.EXISTS: 'EXISTS'>, <TokenType.ANTI: 'ANTI'>, <TokenType.EXECUTE: 'EXECUTE'>, <TokenType.TEMPORARY: 'TEMPORARY'>, <TokenType.CURRENT_DATE: 'CURRENT_DATE'>, <TokenType.TRUE: 'TRUE'>, <TokenType.UPDATE: 'UPDATE'>, <TokenType.DATABASE: 'DATABASE'>, <TokenType.FULL: 'FULL'>, <TokenType.VOLATILE: 'VOLATILE'>, <TokenType.PRAGMA: 'PRAGMA'>, <TokenType.DESCRIBE: 'DESCRIBE'>, <TokenType.COMMIT: 'COMMIT'>, <TokenType.CURRENT_TIME: 'CURRENT_TIME'>, <TokenType.ESCAPE: 'ESCAPE'>, <TokenType.CASE: 'CASE'>, <TokenType.ISNULL: 'ISNULL'>, <TokenType.CURRENT_TIMESTAMP: 'CURRENT_TIMESTAMP'>, <TokenType.COLUMN: 'COLUMN'>, <TokenType.BEGIN: 'BEGIN'>, <TokenType.FUNCTION: 'FUNCTION'>, <TokenType.PARTITION: 'PARTITION'>, <TokenType.OVERWRITE: 'OVERWRITE'>, <TokenType.CURRENT_USER: 'CURRENT_USER'>, <TokenType.CACHE: 'CACHE'>, <TokenType.NATURAL: 'NATURAL'>, <TokenType.FALSE: 'FALSE'>, <TokenType.FORMAT: 'FORMAT'>, <TokenType.DEFAULT: 'DEFAULT'>, <TokenType.RANGE: 'RANGE'>, <TokenType.AUTO_INCREMENT: 'AUTO_INCREMENT'>, <TokenType.PERCENT: 'PERCENT'>, <TokenType.PIVOT: 'PIVOT'>, <TokenType.KEEP: 'KEEP'>}
    @@ -2734,156 +2736,157 @@ Default: 3 588 589 TYPE_MAPPING = { 590 **generator.Generator.TYPE_MAPPING, -591 exp.DataType.Type.DECIMAL: "NUMERIC", -592 exp.DataType.Type.DATETIME: "DATETIME2", -593 exp.DataType.Type.INT: "INTEGER", -594 exp.DataType.Type.TIMESTAMP: "DATETIME2", -595 exp.DataType.Type.TIMESTAMPTZ: "DATETIMEOFFSET", -596 exp.DataType.Type.VARIANT: "SQL_VARIANT", -597 } -598 -599 TRANSFORMS = { -600 **generator.Generator.TRANSFORMS, -601 exp.AnyValue: any_value_to_max_sql, -602 exp.AutoIncrementColumnConstraint: lambda *_: "IDENTITY", -603 exp.DateAdd: generate_date_delta_with_unit_sql, -604 exp.DateDiff: generate_date_delta_with_unit_sql, -605 exp.CurrentDate: rename_func("GETDATE"), -606 exp.CurrentTimestamp: rename_func("GETDATE"), -607 exp.Extract: rename_func("DATEPART"), -608 exp.GroupConcat: _string_agg_sql, -609 exp.If: rename_func("IIF"), -610 exp.Max: max_or_greatest, -611 exp.MD5: lambda self, e: self.func("HASHBYTES", exp.Literal.string("MD5"), e.this), -612 exp.Min: min_or_least, -613 exp.NumberToStr: _format_sql, -614 exp.Select: transforms.preprocess([transforms.eliminate_distinct_on]), -615 exp.SHA: lambda self, e: self.func("HASHBYTES", exp.Literal.string("SHA1"), e.this), -616 exp.SHA2: lambda self, e: self.func( -617 "HASHBYTES", -618 exp.Literal.string(f"SHA2_{e.args.get('length', 256)}"), -619 e.this, -620 ), -621 exp.TemporaryProperty: lambda self, e: "", -622 exp.TimeStrToTime: timestrtotime_sql, -623 exp.TimeToStr: _format_sql, -624 } -625 -626 TRANSFORMS.pop(exp.ReturnsProperty) -627 -628 PROPERTIES_LOCATION = { -629 **generator.Generator.PROPERTIES_LOCATION, -630 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, -631 } -632 -633 LIMIT_FETCH = "FETCH" -634 -635 def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str: -636 sql = self.sql(expression, "this") -637 properties = expression.args.get("properties") -638 -639 if sql[:1] != "#" and any( -640 isinstance(prop, exp.TemporaryProperty) -641 for prop in (properties.expressions if properties else []) -642 ): -643 sql = f"#{sql}" -644 -645 return sql -646 -647 def create_sql(self, expression: exp.Create) -> str: -648 expression = expression.copy() -649 kind = self.sql(expression, "kind").upper() -650 exists = expression.args.pop("exists", None) -651 sql = super().create_sql(expression) -652 -653 if exists: -654 table = expression.find(exp.Table) -655 identifier = self.sql(exp.Literal.string(exp.table_name(table) if table else "")) -656 if kind == "SCHEMA": -657 sql = f"""IF NOT EXISTS (SELECT * FROM information_schema.schemata WHERE schema_name = {identifier}) EXEC('{sql}')""" -658 elif kind == "TABLE": -659 sql = f"""IF NOT EXISTS (SELECT * FROM information_schema.tables WHERE table_name = {identifier}) EXEC('{sql}')""" -660 elif kind == "INDEX": -661 index = self.sql(exp.Literal.string(expression.this.text("this"))) -662 sql = f"""IF NOT EXISTS (SELECT * FROM sys.indexes WHERE object_id = object_id({identifier}) AND name = {index}) EXEC('{sql}')""" -663 elif expression.args.get("replace"): -664 sql = sql.replace("CREATE OR REPLACE ", "CREATE OR ALTER ", 1) -665 -666 return sql -667 -668 def offset_sql(self, expression: exp.Offset) -> str: -669 return f"{super().offset_sql(expression)} ROWS" -670 -671 def version_sql(self, expression: exp.Version) -> str: -672 name = "SYSTEM_TIME" if expression.name == "TIMESTAMP" else expression.name -673 this = f"FOR {name}" -674 expr = expression.expression -675 kind = expression.text("kind") -676 if kind in ("FROM", "BETWEEN"): -677 args = expr.expressions -678 sep = "TO" if kind == "FROM" else "AND" -679 expr_sql = f"{self.sql(seq_get(args, 0))} {sep} {self.sql(seq_get(args, 1))}" -680 else: -681 expr_sql = self.sql(expr) -682 -683 expr_sql = f" {expr_sql}" if expr_sql else "" -684 return f"{this} {kind}{expr_sql}" -685 -686 def returnsproperty_sql(self, expression: exp.ReturnsProperty) -> str: -687 table = expression.args.get("table") -688 table = f"{table} " if table else "" -689 return f"RETURNS {table}{self.sql(expression, 'this')}" -690 -691 def returning_sql(self, expression: exp.Returning) -> str: -692 into = self.sql(expression, "into") -693 into = self.seg(f"INTO {into}") if into else "" -694 return f"{self.seg('OUTPUT')} {self.expressions(expression, flat=True)}{into}" -695 -696 def transaction_sql(self, expression: exp.Transaction) -> str: -697 this = self.sql(expression, "this") -698 this = f" {this}" if this else "" -699 mark = self.sql(expression, "mark") -700 mark = f" WITH MARK {mark}" if mark else "" -701 return f"BEGIN TRANSACTION{this}{mark}" -702 -703 def commit_sql(self, expression: exp.Commit) -> str: -704 this = self.sql(expression, "this") -705 this = f" {this}" if this else "" -706 durability = expression.args.get("durability") -707 durability = ( -708 f" WITH (DELAYED_DURABILITY = {'ON' if durability else 'OFF'})" -709 if durability is not None -710 else "" -711 ) -712 return f"COMMIT TRANSACTION{this}{durability}" -713 -714 def rollback_sql(self, expression: exp.Rollback) -> str: -715 this = self.sql(expression, "this") -716 this = f" {this}" if this else "" -717 return f"ROLLBACK TRANSACTION{this}" -718 -719 def identifier_sql(self, expression: exp.Identifier) -> str: -720 identifier = super().identifier_sql(expression) -721 -722 if expression.args.get("global"): -723 identifier = f"##{identifier}" -724 elif expression.args.get("temporary"): -725 identifier = f"#{identifier}" -726 -727 return identifier -728 -729 def constraint_sql(self, expression: exp.Constraint) -> str: -730 this = self.sql(expression, "this") -731 expressions = self.expressions(expression, flat=True, sep=" ") -732 return f"CONSTRAINT {this} {expressions}" -733 -734 # https://learn.microsoft.com/en-us/answers/questions/448821/create-table-in-sql-server -735 def generatedasidentitycolumnconstraint_sql( -736 self, expression: exp.GeneratedAsIdentityColumnConstraint -737 ) -> str: -738 start = self.sql(expression, "start") or "1" -739 increment = self.sql(expression, "increment") or "1" -740 return f"IDENTITY({start}, {increment})" +591 exp.DataType.Type.BOOLEAN: "BIT", +592 exp.DataType.Type.DECIMAL: "NUMERIC", +593 exp.DataType.Type.DATETIME: "DATETIME2", +594 exp.DataType.Type.INT: "INTEGER", +595 exp.DataType.Type.TIMESTAMP: "DATETIME2", +596 exp.DataType.Type.TIMESTAMPTZ: "DATETIMEOFFSET", +597 exp.DataType.Type.VARIANT: "SQL_VARIANT", +598 } +599 +600 TRANSFORMS = { +601 **generator.Generator.TRANSFORMS, +602 exp.AnyValue: any_value_to_max_sql, +603 exp.AutoIncrementColumnConstraint: lambda *_: "IDENTITY", +604 exp.DateAdd: generate_date_delta_with_unit_sql, +605 exp.DateDiff: generate_date_delta_with_unit_sql, +606 exp.CurrentDate: rename_func("GETDATE"), +607 exp.CurrentTimestamp: rename_func("GETDATE"), +608 exp.Extract: rename_func("DATEPART"), +609 exp.GroupConcat: _string_agg_sql, +610 exp.If: rename_func("IIF"), +611 exp.Max: max_or_greatest, +612 exp.MD5: lambda self, e: self.func("HASHBYTES", exp.Literal.string("MD5"), e.this), +613 exp.Min: min_or_least, +614 exp.NumberToStr: _format_sql, +615 exp.Select: transforms.preprocess([transforms.eliminate_distinct_on]), +616 exp.SHA: lambda self, e: self.func("HASHBYTES", exp.Literal.string("SHA1"), e.this), +617 exp.SHA2: lambda self, e: self.func( +618 "HASHBYTES", +619 exp.Literal.string(f"SHA2_{e.args.get('length', 256)}"), +620 e.this, +621 ), +622 exp.TemporaryProperty: lambda self, e: "", +623 exp.TimeStrToTime: timestrtotime_sql, +624 exp.TimeToStr: _format_sql, +625 } +626 +627 TRANSFORMS.pop(exp.ReturnsProperty) +628 +629 PROPERTIES_LOCATION = { +630 **generator.Generator.PROPERTIES_LOCATION, +631 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, +632 } +633 +634 LIMIT_FETCH = "FETCH" +635 +636 def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str: +637 sql = self.sql(expression, "this") +638 properties = expression.args.get("properties") +639 +640 if sql[:1] != "#" and any( +641 isinstance(prop, exp.TemporaryProperty) +642 for prop in (properties.expressions if properties else []) +643 ): +644 sql = f"#{sql}" +645 +646 return sql +647 +648 def create_sql(self, expression: exp.Create) -> str: +649 expression = expression.copy() +650 kind = self.sql(expression, "kind").upper() +651 exists = expression.args.pop("exists", None) +652 sql = super().create_sql(expression) +653 +654 if exists: +655 table = expression.find(exp.Table) +656 identifier = self.sql(exp.Literal.string(exp.table_name(table) if table else "")) +657 if kind == "SCHEMA": +658 sql = f"""IF NOT EXISTS (SELECT * FROM information_schema.schemata WHERE schema_name = {identifier}) EXEC('{sql}')""" +659 elif kind == "TABLE": +660 sql = f"""IF NOT EXISTS (SELECT * FROM information_schema.tables WHERE table_name = {identifier}) EXEC('{sql}')""" +661 elif kind == "INDEX": +662 index = self.sql(exp.Literal.string(expression.this.text("this"))) +663 sql = f"""IF NOT EXISTS (SELECT * FROM sys.indexes WHERE object_id = object_id({identifier}) AND name = {index}) EXEC('{sql}')""" +664 elif expression.args.get("replace"): +665 sql = sql.replace("CREATE OR REPLACE ", "CREATE OR ALTER ", 1) +666 +667 return sql +668 +669 def offset_sql(self, expression: exp.Offset) -> str: +670 return f"{super().offset_sql(expression)} ROWS" +671 +672 def version_sql(self, expression: exp.Version) -> str: +673 name = "SYSTEM_TIME" if expression.name == "TIMESTAMP" else expression.name +674 this = f"FOR {name}" +675 expr = expression.expression +676 kind = expression.text("kind") +677 if kind in ("FROM", "BETWEEN"): +678 args = expr.expressions +679 sep = "TO" if kind == "FROM" else "AND" +680 expr_sql = f"{self.sql(seq_get(args, 0))} {sep} {self.sql(seq_get(args, 1))}" +681 else: +682 expr_sql = self.sql(expr) +683 +684 expr_sql = f" {expr_sql}" if expr_sql else "" +685 return f"{this} {kind}{expr_sql}" +686 +687 def returnsproperty_sql(self, expression: exp.ReturnsProperty) -> str: +688 table = expression.args.get("table") +689 table = f"{table} " if table else "" +690 return f"RETURNS {table}{self.sql(expression, 'this')}" +691 +692 def returning_sql(self, expression: exp.Returning) -> str: +693 into = self.sql(expression, "into") +694 into = self.seg(f"INTO {into}") if into else "" +695 return f"{self.seg('OUTPUT')} {self.expressions(expression, flat=True)}{into}" +696 +697 def transaction_sql(self, expression: exp.Transaction) -> str: +698 this = self.sql(expression, "this") +699 this = f" {this}" if this else "" +700 mark = self.sql(expression, "mark") +701 mark = f" WITH MARK {mark}" if mark else "" +702 return f"BEGIN TRANSACTION{this}{mark}" +703 +704 def commit_sql(self, expression: exp.Commit) -> str: +705 this = self.sql(expression, "this") +706 this = f" {this}" if this else "" +707 durability = expression.args.get("durability") +708 durability = ( +709 f" WITH (DELAYED_DURABILITY = {'ON' if durability else 'OFF'})" +710 if durability is not None +711 else "" +712 ) +713 return f"COMMIT TRANSACTION{this}{durability}" +714 +715 def rollback_sql(self, expression: exp.Rollback) -> str: +716 this = self.sql(expression, "this") +717 this = f" {this}" if this else "" +718 return f"ROLLBACK TRANSACTION{this}" +719 +720 def identifier_sql(self, expression: exp.Identifier) -> str: +721 identifier = super().identifier_sql(expression) +722 +723 if expression.args.get("global"): +724 identifier = f"##{identifier}" +725 elif expression.args.get("temporary"): +726 identifier = f"#{identifier}" +727 +728 return identifier +729 +730 def constraint_sql(self, expression: exp.Constraint) -> str: +731 this = self.sql(expression, "this") +732 expressions = self.expressions(expression, flat=True, sep=" ") +733 return f"CONSTRAINT {this} {expressions}" +734 +735 # https://learn.microsoft.com/en-us/answers/questions/448821/create-table-in-sql-server +736 def generatedasidentitycolumnconstraint_sql( +737 self, expression: exp.GeneratedAsIdentityColumnConstraint +738 ) -> str: +739 start = self.sql(expression, "start") or "1" +740 increment = self.sql(expression, "increment") or "1" +741 return f"IDENTITY({start}, {increment})"
    @@ -3002,7 +3005,7 @@ Default: True
    TYPE_MAPPING = - {<Type.NCHAR: 'NCHAR'>: 'CHAR', <Type.NVARCHAR: 'NVARCHAR'>: 'VARCHAR', <Type.MEDIUMTEXT: 'MEDIUMTEXT'>: 'TEXT', <Type.LONGTEXT: 'LONGTEXT'>: 'TEXT', <Type.MEDIUMBLOB: 'MEDIUMBLOB'>: 'BLOB', <Type.LONGBLOB: 'LONGBLOB'>: 'BLOB', <Type.INET: 'INET'>: 'INET', <Type.DECIMAL: 'DECIMAL'>: 'NUMERIC', <Type.DATETIME: 'DATETIME'>: 'DATETIME2', <Type.INT: 'INT'>: 'INTEGER', <Type.TIMESTAMP: 'TIMESTAMP'>: 'DATETIME2', <Type.TIMESTAMPTZ: 'TIMESTAMPTZ'>: 'DATETIMEOFFSET', <Type.VARIANT: 'VARIANT'>: 'SQL_VARIANT'} + {<Type.NCHAR: 'NCHAR'>: 'CHAR', <Type.NVARCHAR: 'NVARCHAR'>: 'VARCHAR', <Type.MEDIUMTEXT: 'MEDIUMTEXT'>: 'TEXT', <Type.LONGTEXT: 'LONGTEXT'>: 'TEXT', <Type.MEDIUMBLOB: 'MEDIUMBLOB'>: 'BLOB', <Type.LONGBLOB: 'LONGBLOB'>: 'BLOB', <Type.INET: 'INET'>: 'INET', <Type.BOOLEAN: 'BOOLEAN'>: 'BIT', <Type.DECIMAL: 'DECIMAL'>: 'NUMERIC', <Type.DATETIME: 'DATETIME'>: 'DATETIME2', <Type.INT: 'INT'>: 'INTEGER', <Type.TIMESTAMP: 'TIMESTAMP'>: 'DATETIME2', <Type.TIMESTAMPTZ: 'TIMESTAMPTZ'>: 'DATETIMEOFFSET', <Type.VARIANT: 'VARIANT'>: 'SQL_VARIANT'}
    @@ -3060,17 +3063,17 @@ Default: True
    -
    635        def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str:
    -636            sql = self.sql(expression, "this")
    -637            properties = expression.args.get("properties")
    -638
    -639            if sql[:1] != "#" and any(
    -640                isinstance(prop, exp.TemporaryProperty)
    -641                for prop in (properties.expressions if properties else [])
    -642            ):
    -643                sql = f"#{sql}"
    -644
    -645            return sql
    +            
    636        def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str:
    +637            sql = self.sql(expression, "this")
    +638            properties = expression.args.get("properties")
    +639
    +640            if sql[:1] != "#" and any(
    +641                isinstance(prop, exp.TemporaryProperty)
    +642                for prop in (properties.expressions if properties else [])
    +643            ):
    +644                sql = f"#{sql}"
    +645
    +646            return sql
     
    @@ -3088,26 +3091,26 @@ Default: True
    -
    647        def create_sql(self, expression: exp.Create) -> str:
    -648            expression = expression.copy()
    -649            kind = self.sql(expression, "kind").upper()
    -650            exists = expression.args.pop("exists", None)
    -651            sql = super().create_sql(expression)
    -652
    -653            if exists:
    -654                table = expression.find(exp.Table)
    -655                identifier = self.sql(exp.Literal.string(exp.table_name(table) if table else ""))
    -656                if kind == "SCHEMA":
    -657                    sql = f"""IF NOT EXISTS (SELECT * FROM information_schema.schemata WHERE schema_name = {identifier}) EXEC('{sql}')"""
    -658                elif kind == "TABLE":
    -659                    sql = f"""IF NOT EXISTS (SELECT * FROM information_schema.tables WHERE table_name = {identifier}) EXEC('{sql}')"""
    -660                elif kind == "INDEX":
    -661                    index = self.sql(exp.Literal.string(expression.this.text("this")))
    -662                    sql = f"""IF NOT EXISTS (SELECT * FROM sys.indexes WHERE object_id = object_id({identifier}) AND name = {index}) EXEC('{sql}')"""
    -663            elif expression.args.get("replace"):
    -664                sql = sql.replace("CREATE OR REPLACE ", "CREATE OR ALTER ", 1)
    -665
    -666            return sql
    +            
    648        def create_sql(self, expression: exp.Create) -> str:
    +649            expression = expression.copy()
    +650            kind = self.sql(expression, "kind").upper()
    +651            exists = expression.args.pop("exists", None)
    +652            sql = super().create_sql(expression)
    +653
    +654            if exists:
    +655                table = expression.find(exp.Table)
    +656                identifier = self.sql(exp.Literal.string(exp.table_name(table) if table else ""))
    +657                if kind == "SCHEMA":
    +658                    sql = f"""IF NOT EXISTS (SELECT * FROM information_schema.schemata WHERE schema_name = {identifier}) EXEC('{sql}')"""
    +659                elif kind == "TABLE":
    +660                    sql = f"""IF NOT EXISTS (SELECT * FROM information_schema.tables WHERE table_name = {identifier}) EXEC('{sql}')"""
    +661                elif kind == "INDEX":
    +662                    index = self.sql(exp.Literal.string(expression.this.text("this")))
    +663                    sql = f"""IF NOT EXISTS (SELECT * FROM sys.indexes WHERE object_id = object_id({identifier}) AND name = {index}) EXEC('{sql}')"""
    +664            elif expression.args.get("replace"):
    +665                sql = sql.replace("CREATE OR REPLACE ", "CREATE OR ALTER ", 1)
    +666
    +667            return sql
     
    @@ -3125,8 +3128,8 @@ Default: True
    -
    668        def offset_sql(self, expression: exp.Offset) -> str:
    -669            return f"{super().offset_sql(expression)} ROWS"
    +            
    669        def offset_sql(self, expression: exp.Offset) -> str:
    +670            return f"{super().offset_sql(expression)} ROWS"
     
    @@ -3144,20 +3147,20 @@ Default: True
    -
    671        def version_sql(self, expression: exp.Version) -> str:
    -672            name = "SYSTEM_TIME" if expression.name == "TIMESTAMP" else expression.name
    -673            this = f"FOR {name}"
    -674            expr = expression.expression
    -675            kind = expression.text("kind")
    -676            if kind in ("FROM", "BETWEEN"):
    -677                args = expr.expressions
    -678                sep = "TO" if kind == "FROM" else "AND"
    -679                expr_sql = f"{self.sql(seq_get(args, 0))} {sep} {self.sql(seq_get(args, 1))}"
    -680            else:
    -681                expr_sql = self.sql(expr)
    -682
    -683            expr_sql = f" {expr_sql}" if expr_sql else ""
    -684            return f"{this} {kind}{expr_sql}"
    +            
    672        def version_sql(self, expression: exp.Version) -> str:
    +673            name = "SYSTEM_TIME" if expression.name == "TIMESTAMP" else expression.name
    +674            this = f"FOR {name}"
    +675            expr = expression.expression
    +676            kind = expression.text("kind")
    +677            if kind in ("FROM", "BETWEEN"):
    +678                args = expr.expressions
    +679                sep = "TO" if kind == "FROM" else "AND"
    +680                expr_sql = f"{self.sql(seq_get(args, 0))} {sep} {self.sql(seq_get(args, 1))}"
    +681            else:
    +682                expr_sql = self.sql(expr)
    +683
    +684            expr_sql = f" {expr_sql}" if expr_sql else ""
    +685            return f"{this} {kind}{expr_sql}"
     
    @@ -3175,10 +3178,10 @@ Default: True
    -
    686        def returnsproperty_sql(self, expression: exp.ReturnsProperty) -> str:
    -687            table = expression.args.get("table")
    -688            table = f"{table} " if table else ""
    -689            return f"RETURNS {table}{self.sql(expression, 'this')}"
    +            
    687        def returnsproperty_sql(self, expression: exp.ReturnsProperty) -> str:
    +688            table = expression.args.get("table")
    +689            table = f"{table} " if table else ""
    +690            return f"RETURNS {table}{self.sql(expression, 'this')}"
     
    @@ -3196,10 +3199,10 @@ Default: True
    -
    691        def returning_sql(self, expression: exp.Returning) -> str:
    -692            into = self.sql(expression, "into")
    -693            into = self.seg(f"INTO {into}") if into else ""
    -694            return f"{self.seg('OUTPUT')} {self.expressions(expression, flat=True)}{into}"
    +            
    692        def returning_sql(self, expression: exp.Returning) -> str:
    +693            into = self.sql(expression, "into")
    +694            into = self.seg(f"INTO {into}") if into else ""
    +695            return f"{self.seg('OUTPUT')} {self.expressions(expression, flat=True)}{into}"
     
    @@ -3217,12 +3220,12 @@ Default: True
    -
    696        def transaction_sql(self, expression: exp.Transaction) -> str:
    -697            this = self.sql(expression, "this")
    -698            this = f" {this}" if this else ""
    -699            mark = self.sql(expression, "mark")
    -700            mark = f" WITH MARK {mark}" if mark else ""
    -701            return f"BEGIN TRANSACTION{this}{mark}"
    +            
    697        def transaction_sql(self, expression: exp.Transaction) -> str:
    +698            this = self.sql(expression, "this")
    +699            this = f" {this}" if this else ""
    +700            mark = self.sql(expression, "mark")
    +701            mark = f" WITH MARK {mark}" if mark else ""
    +702            return f"BEGIN TRANSACTION{this}{mark}"
     
    @@ -3240,16 +3243,16 @@ Default: True
    -
    703        def commit_sql(self, expression: exp.Commit) -> str:
    -704            this = self.sql(expression, "this")
    -705            this = f" {this}" if this else ""
    -706            durability = expression.args.get("durability")
    -707            durability = (
    -708                f" WITH (DELAYED_DURABILITY = {'ON' if durability else 'OFF'})"
    -709                if durability is not None
    -710                else ""
    -711            )
    -712            return f"COMMIT TRANSACTION{this}{durability}"
    +            
    704        def commit_sql(self, expression: exp.Commit) -> str:
    +705            this = self.sql(expression, "this")
    +706            this = f" {this}" if this else ""
    +707            durability = expression.args.get("durability")
    +708            durability = (
    +709                f" WITH (DELAYED_DURABILITY = {'ON' if durability else 'OFF'})"
    +710                if durability is not None
    +711                else ""
    +712            )
    +713            return f"COMMIT TRANSACTION{this}{durability}"
     
    @@ -3267,10 +3270,10 @@ Default: True
    -
    714        def rollback_sql(self, expression: exp.Rollback) -> str:
    -715            this = self.sql(expression, "this")
    -716            this = f" {this}" if this else ""
    -717            return f"ROLLBACK TRANSACTION{this}"
    +            
    715        def rollback_sql(self, expression: exp.Rollback) -> str:
    +716            this = self.sql(expression, "this")
    +717            this = f" {this}" if this else ""
    +718            return f"ROLLBACK TRANSACTION{this}"
     
    @@ -3288,15 +3291,15 @@ Default: True
    -
    719        def identifier_sql(self, expression: exp.Identifier) -> str:
    -720            identifier = super().identifier_sql(expression)
    -721
    -722            if expression.args.get("global"):
    -723                identifier = f"##{identifier}"
    -724            elif expression.args.get("temporary"):
    -725                identifier = f"#{identifier}"
    -726
    -727            return identifier
    +            
    720        def identifier_sql(self, expression: exp.Identifier) -> str:
    +721            identifier = super().identifier_sql(expression)
    +722
    +723            if expression.args.get("global"):
    +724                identifier = f"##{identifier}"
    +725            elif expression.args.get("temporary"):
    +726                identifier = f"#{identifier}"
    +727
    +728            return identifier
     
    @@ -3314,10 +3317,10 @@ Default: True
    -
    729        def constraint_sql(self, expression: exp.Constraint) -> str:
    -730            this = self.sql(expression, "this")
    -731            expressions = self.expressions(expression, flat=True, sep=" ")
    -732            return f"CONSTRAINT {this} {expressions}"
    +            
    730        def constraint_sql(self, expression: exp.Constraint) -> str:
    +731            this = self.sql(expression, "this")
    +732            expressions = self.expressions(expression, flat=True, sep=" ")
    +733            return f"CONSTRAINT {this} {expressions}"
     
    @@ -3335,12 +3338,12 @@ Default: True
    -
    735        def generatedasidentitycolumnconstraint_sql(
    -736            self, expression: exp.GeneratedAsIdentityColumnConstraint
    -737        ) -> str:
    -738            start = self.sql(expression, "start") or "1"
    -739            increment = self.sql(expression, "increment") or "1"
    -740            return f"IDENTITY({start}, {increment})"
    +            
    736        def generatedasidentitycolumnconstraint_sql(
    +737            self, expression: exp.GeneratedAsIdentityColumnConstraint
    +738        ) -> str:
    +739            start = self.sql(expression, "start") or "1"
    +740            increment = self.sql(expression, "increment") or "1"
    +741            return f"IDENTITY({start}, {increment})"
     
    @@ -3706,6 +3709,7 @@ Default: True
    intersect_op
    introducer_sql
    pseudotype_sql
    +
    objectidentifier_sql
    onconflict_sql
    rowformatdelimitedproperty_sql
    withtablehint_sql
    -- cgit v1.2.3