From 8d36f5966675e23bee7026ba37ae0647fbf47300 Mon Sep 17 00:00:00 2001 From: Daniel Baumann Date: Mon, 8 Apr 2024 10:11:53 +0200 Subject: Merging upstream version 23.7.0. Signed-off-by: Daniel Baumann --- docs/sqlglot/dialects/bigquery.html | 3356 ++++++++++++++++++----------------- 1 file changed, 1705 insertions(+), 1651 deletions(-) (limited to 'docs/sqlglot/dialects/bigquery.html') diff --git a/docs/sqlglot/dialects/bigquery.html b/docs/sqlglot/dialects/bigquery.html index b6ecac1..ee82da2 100644 --- a/docs/sqlglot/dialects/bigquery.html +++ b/docs/sqlglot/dialects/bigquery.html @@ -60,9 +60,6 @@
  • TIME_MAPPING
  • -
  • - ESCAPE_SEQUENCES -
  • FORMAT_MAPPING
  • @@ -213,6 +210,9 @@
  • CAN_IMPLEMENT_ARRAY_ANY
  • +
  • + SUPPORTS_TO_NUMBER +
  • NAMED_PLACEHOLDER_TOKEN
  • @@ -267,9 +267,15 @@
  • version_sql
  • +
  • + AFTER_HAVING_MODIFIER_TRANSFORMS +
  • +
  • + UNESCAPED_SEQUENCES +
  • tokenizer_class
  • @@ -292,7 +298,7 @@ INVERSE_TIME_TRIE
  • - INVERSE_ESCAPE_SEQUENCES + ESCAPED_SEQUENCES
  • QUOTE_START @@ -382,297 +388,297 @@ 24 rename_func, 25 timestrtotime_sql, 26 ts_or_ds_add_cast, - 27) - 28from sqlglot.helper import seq_get, split_num_words - 29from sqlglot.tokens import TokenType - 30 - 31if t.TYPE_CHECKING: - 32 from sqlglot._typing import E, Lit - 33 - 34logger = logging.getLogger("sqlglot") - 35 + 27 unit_to_var, + 28) + 29from sqlglot.helper import seq_get, split_num_words + 30from sqlglot.tokens import TokenType + 31 + 32if t.TYPE_CHECKING: + 33 from sqlglot._typing import E, Lit + 34 + 35logger = logging.getLogger("sqlglot") 36 - 37def _derived_table_values_to_unnest(self: BigQuery.Generator, expression: exp.Values) -> str: - 38 if not expression.find_ancestor(exp.From, exp.Join): - 39 return self.values_sql(expression) - 40 - 41 structs = [] - 42 alias = expression.args.get("alias") - 43 for tup in expression.find_all(exp.Tuple): - 44 field_aliases = alias.columns if alias else (f"_c{i}" for i in range(len(tup.expressions))) - 45 expressions = [ - 46 exp.PropertyEQ(this=exp.to_identifier(name), expression=fld) - 47 for name, fld in zip(field_aliases, tup.expressions) - 48 ] - 49 structs.append(exp.Struct(expressions=expressions)) - 50 - 51 return self.unnest_sql(exp.Unnest(expressions=[exp.array(*structs, copy=False)])) - 52 - 53 - 54def _returnsproperty_sql(self: BigQuery.Generator, expression: exp.ReturnsProperty) -> str: - 55 this = expression.this - 56 if isinstance(this, exp.Schema): - 57 this = f"{self.sql(this, 'this')} <{self.expressions(this)}>" - 58 else: - 59 this = self.sql(this) - 60 return f"RETURNS {this}" + 37 + 38def _derived_table_values_to_unnest(self: BigQuery.Generator, expression: exp.Values) -> str: + 39 if not expression.find_ancestor(exp.From, exp.Join): + 40 return self.values_sql(expression) + 41 + 42 structs = [] + 43 alias = expression.args.get("alias") + 44 for tup in expression.find_all(exp.Tuple): + 45 field_aliases = ( + 46 alias.columns + 47 if alias and alias.columns + 48 else (f"_c{i}" for i in range(len(tup.expressions))) + 49 ) + 50 expressions = [ + 51 exp.PropertyEQ(this=exp.to_identifier(name), expression=fld) + 52 for name, fld in zip(field_aliases, tup.expressions) + 53 ] + 54 structs.append(exp.Struct(expressions=expressions)) + 55 + 56 # Due to `UNNEST_COLUMN_ONLY`, it is expected that the table alias be contained in the columns expression + 57 alias_name_only = exp.TableAlias(columns=[alias.this]) if alias else None + 58 return self.unnest_sql( + 59 exp.Unnest(expressions=[exp.array(*structs, copy=False)], alias=alias_name_only) + 60 ) 61 62 - 63def _create_sql(self: BigQuery.Generator, expression: exp.Create) -> str: - 64 returns = expression.find(exp.ReturnsProperty) - 65 if expression.kind == "FUNCTION" and returns and returns.args.get("is_table"): - 66 expression.set("kind", "TABLE FUNCTION") - 67 - 68 if isinstance(expression.expression, (exp.Subquery, exp.Literal)): - 69 expression.set("expression", expression.expression.this) + 63def _returnsproperty_sql(self: BigQuery.Generator, expression: exp.ReturnsProperty) -> str: + 64 this = expression.this + 65 if isinstance(this, exp.Schema): + 66 this = f"{self.sql(this, 'this')} <{self.expressions(this)}>" + 67 else: + 68 this = self.sql(this) + 69 return f"RETURNS {this}" 70 - 71 return self.create_sql(expression) - 72 - 73 - 74def _unqualify_unnest(expression: exp.Expression) -> exp.Expression: - 75 """Remove references to unnest table aliases since bigquery doesn't allow them. + 71 + 72def _create_sql(self: BigQuery.Generator, expression: exp.Create) -> str: + 73 returns = expression.find(exp.ReturnsProperty) + 74 if expression.kind == "FUNCTION" and returns and returns.args.get("is_table"): + 75 expression.set("kind", "TABLE FUNCTION") 76 - 77 These are added by the optimizer's qualify_column step. - 78 """ - 79 from sqlglot.optimizer.scope import find_all_in_scope - 80 - 81 if isinstance(expression, exp.Select): - 82 unnest_aliases = { - 83 unnest.alias - 84 for unnest in find_all_in_scope(expression, exp.Unnest) - 85 if isinstance(unnest.parent, (exp.From, exp.Join)) - 86 } - 87 if unnest_aliases: - 88 for column in expression.find_all(exp.Column): - 89 if column.table in unnest_aliases: - 90 column.set("table", None) - 91 elif column.db in unnest_aliases: - 92 column.set("db", None) - 93 - 94 return expression - 95 - 96 - 97# https://issuetracker.google.com/issues/162294746 - 98# workaround for bigquery bug when grouping by an expression and then ordering - 99# WITH x AS (SELECT 1 y) -100# SELECT y + 1 z -101# FROM x -102# GROUP BY x + 1 -103# ORDER by z -104def _alias_ordered_group(expression: exp.Expression) -> exp.Expression: -105 if isinstance(expression, exp.Select): -106 group = expression.args.get("group") -107 order = expression.args.get("order") -108 -109 if group and order: -110 aliases = { -111 select.this: select.args["alias"] -112 for select in expression.selects -113 if isinstance(select, exp.Alias) -114 } -115 -116 for grouped in group.expressions: -117 if grouped.is_int: -118 continue -119 alias = aliases.get(grouped) -120 if alias: -121 grouped.replace(exp.column(alias)) -122 -123 return expression + 77 if isinstance(expression.expression, (exp.Subquery, exp.Literal)): + 78 expression.set("expression", expression.expression.this) + 79 + 80 return self.create_sql(expression) + 81 + 82 + 83def _unqualify_unnest(expression: exp.Expression) -> exp.Expression: + 84 """Remove references to unnest table aliases since bigquery doesn't allow them. + 85 + 86 These are added by the optimizer's qualify_column step. + 87 """ + 88 from sqlglot.optimizer.scope import find_all_in_scope + 89 + 90 if isinstance(expression, exp.Select): + 91 unnest_aliases = { + 92 unnest.alias + 93 for unnest in find_all_in_scope(expression, exp.Unnest) + 94 if isinstance(unnest.parent, (exp.From, exp.Join)) + 95 } + 96 if unnest_aliases: + 97 for column in expression.find_all(exp.Column): + 98 if column.table in unnest_aliases: + 99 column.set("table", None) +100 elif column.db in unnest_aliases: +101 column.set("db", None) +102 +103 return expression +104 +105 +106# https://issuetracker.google.com/issues/162294746 +107# workaround for bigquery bug when grouping by an expression and then ordering +108# WITH x AS (SELECT 1 y) +109# SELECT y + 1 z +110# FROM x +111# GROUP BY x + 1 +112# ORDER by z +113def _alias_ordered_group(expression: exp.Expression) -> exp.Expression: +114 if isinstance(expression, exp.Select): +115 group = expression.args.get("group") +116 order = expression.args.get("order") +117 +118 if group and order: +119 aliases = { +120 select.this: select.args["alias"] +121 for select in expression.selects +122 if isinstance(select, exp.Alias) +123 } 124 -125 -126def _pushdown_cte_column_names(expression: exp.Expression) -> exp.Expression: -127 """BigQuery doesn't allow column names when defining a CTE, so we try to push them down.""" -128 if isinstance(expression, exp.CTE) and expression.alias_column_names: -129 cte_query = expression.this -130 -131 if cte_query.is_star: -132 logger.warning( -133 "Can't push down CTE column names for star queries. Run the query through" -134 " the optimizer or use 'qualify' to expand the star projections first." -135 ) -136 return expression -137 -138 column_names = expression.alias_column_names -139 expression.args["alias"].set("columns", None) -140 -141 for name, select in zip(column_names, cte_query.selects): -142 to_replace = select -143 -144 if isinstance(select, exp.Alias): -145 select = select.this +125 for grouped in group.expressions: +126 if grouped.is_int: +127 continue +128 alias = aliases.get(grouped) +129 if alias: +130 grouped.replace(exp.column(alias)) +131 +132 return expression +133 +134 +135def _pushdown_cte_column_names(expression: exp.Expression) -> exp.Expression: +136 """BigQuery doesn't allow column names when defining a CTE, so we try to push them down.""" +137 if isinstance(expression, exp.CTE) and expression.alias_column_names: +138 cte_query = expression.this +139 +140 if cte_query.is_star: +141 logger.warning( +142 "Can't push down CTE column names for star queries. Run the query through" +143 " the optimizer or use 'qualify' to expand the star projections first." +144 ) +145 return expression 146 -147 # Inner aliases are shadowed by the CTE column names -148 to_replace.replace(exp.alias_(select, name)) +147 column_names = expression.alias_column_names +148 expression.args["alias"].set("columns", None) 149 -150 return expression -151 +150 for name, select in zip(column_names, cte_query.selects): +151 to_replace = select 152 -153def _build_parse_timestamp(args: t.List) -> exp.StrToTime: -154 this = build_formatted_time(exp.StrToTime, "bigquery")([seq_get(args, 1), seq_get(args, 0)]) -155 this.set("zone", seq_get(args, 2)) -156 return this -157 +153 if isinstance(select, exp.Alias): +154 select = select.this +155 +156 # Inner aliases are shadowed by the CTE column names +157 to_replace.replace(exp.alias_(select, name)) 158 -159def _build_timestamp(args: t.List) -> exp.Timestamp: -160 timestamp = exp.Timestamp.from_arg_list(args) -161 timestamp.set("with_tz", True) -162 return timestamp -163 -164 -165def _build_date(args: t.List) -> exp.Date | exp.DateFromParts: -166 expr_type = exp.DateFromParts if len(args) == 3 else exp.Date -167 return expr_type.from_arg_list(args) -168 -169 -170def _build_to_hex(args: t.List) -> exp.Hex | exp.MD5: -171 # TO_HEX(MD5(..)) is common in BigQuery, so it's parsed into MD5 to simplify its transpilation -172 arg = seq_get(args, 0) -173 return exp.MD5(this=arg.this) if isinstance(arg, exp.MD5Digest) else exp.Hex(this=arg) -174 -175 -176def _array_contains_sql(self: BigQuery.Generator, expression: exp.ArrayContains) -> str: -177 return self.sql( -178 exp.Exists( -179 this=exp.select("1") -180 .from_(exp.Unnest(expressions=[expression.left]).as_("_unnest", table=["_col"])) -181 .where(exp.column("_col").eq(expression.right)) -182 ) -183 ) +159 return expression +160 +161 +162def _build_parse_timestamp(args: t.List) -> exp.StrToTime: +163 this = build_formatted_time(exp.StrToTime, "bigquery")([seq_get(args, 1), seq_get(args, 0)]) +164 this.set("zone", seq_get(args, 2)) +165 return this +166 +167 +168def _build_timestamp(args: t.List) -> exp.Timestamp: +169 timestamp = exp.Timestamp.from_arg_list(args) +170 timestamp.set("with_tz", True) +171 return timestamp +172 +173 +174def _build_date(args: t.List) -> exp.Date | exp.DateFromParts: +175 expr_type = exp.DateFromParts if len(args) == 3 else exp.Date +176 return expr_type.from_arg_list(args) +177 +178 +179def _build_to_hex(args: t.List) -> exp.Hex | exp.MD5: +180 # TO_HEX(MD5(..)) is common in BigQuery, so it's parsed into MD5 to simplify its transpilation +181 arg = seq_get(args, 0) +182 return exp.MD5(this=arg.this) if isinstance(arg, exp.MD5Digest) else exp.Hex(this=arg) +183 184 -185 -186def _ts_or_ds_add_sql(self: BigQuery.Generator, expression: exp.TsOrDsAdd) -> str: -187 return date_add_interval_sql("DATE", "ADD")(self, ts_or_ds_add_cast(expression)) -188 -189 -190def _ts_or_ds_diff_sql(self: BigQuery.Generator, expression: exp.TsOrDsDiff) -> str: -191 expression.this.replace(exp.cast(expression.this, "TIMESTAMP", copy=True)) -192 expression.expression.replace(exp.cast(expression.expression, "TIMESTAMP", copy=True)) -193 unit = expression.args.get("unit") or "DAY" -194 return self.func("DATE_DIFF", expression.this, expression.expression, unit) -195 -196 -197def _unix_to_time_sql(self: BigQuery.Generator, expression: exp.UnixToTime) -> str: -198 scale = expression.args.get("scale") -199 timestamp = expression.this -200 -201 if scale in (None, exp.UnixToTime.SECONDS): -202 return self.func("TIMESTAMP_SECONDS", timestamp) -203 if scale == exp.UnixToTime.MILLIS: -204 return self.func("TIMESTAMP_MILLIS", timestamp) -205 if scale == exp.UnixToTime.MICROS: -206 return self.func("TIMESTAMP_MICROS", timestamp) -207 -208 unix_seconds = exp.cast(exp.Div(this=timestamp, expression=exp.func("POW", 10, scale)), "int64") -209 return self.func("TIMESTAMP_SECONDS", unix_seconds) -210 -211 -212def _build_time(args: t.List) -> exp.Func: -213 if len(args) == 1: -214 return exp.TsOrDsToTime(this=args[0]) -215 if len(args) == 3: -216 return exp.TimeFromParts.from_arg_list(args) -217 -218 return exp.Anonymous(this="TIME", expressions=args) +185def _array_contains_sql(self: BigQuery.Generator, expression: exp.ArrayContains) -> str: +186 return self.sql( +187 exp.Exists( +188 this=exp.select("1") +189 .from_(exp.Unnest(expressions=[expression.left]).as_("_unnest", table=["_col"])) +190 .where(exp.column("_col").eq(expression.right)) +191 ) +192 ) +193 +194 +195def _ts_or_ds_add_sql(self: BigQuery.Generator, expression: exp.TsOrDsAdd) -> str: +196 return date_add_interval_sql("DATE", "ADD")(self, ts_or_ds_add_cast(expression)) +197 +198 +199def _ts_or_ds_diff_sql(self: BigQuery.Generator, expression: exp.TsOrDsDiff) -> str: +200 expression.this.replace(exp.cast(expression.this, "TIMESTAMP", copy=True)) +201 expression.expression.replace(exp.cast(expression.expression, "TIMESTAMP", copy=True)) +202 unit = unit_to_var(expression) +203 return self.func("DATE_DIFF", expression.this, expression.expression, unit) +204 +205 +206def _unix_to_time_sql(self: BigQuery.Generator, expression: exp.UnixToTime) -> str: +207 scale = expression.args.get("scale") +208 timestamp = expression.this +209 +210 if scale in (None, exp.UnixToTime.SECONDS): +211 return self.func("TIMESTAMP_SECONDS", timestamp) +212 if scale == exp.UnixToTime.MILLIS: +213 return self.func("TIMESTAMP_MILLIS", timestamp) +214 if scale == exp.UnixToTime.MICROS: +215 return self.func("TIMESTAMP_MICROS", timestamp) +216 +217 unix_seconds = exp.cast(exp.Div(this=timestamp, expression=exp.func("POW", 10, scale)), "int64") +218 return self.func("TIMESTAMP_SECONDS", unix_seconds) 219 220 -221class BigQuery(Dialect): -222 WEEK_OFFSET = -1 -223 UNNEST_COLUMN_ONLY = True -224 SUPPORTS_USER_DEFINED_TYPES = False -225 SUPPORTS_SEMI_ANTI_JOIN = False -226 LOG_BASE_FIRST = False -227 -228 # https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#case_sensitivity -229 NORMALIZATION_STRATEGY = NormalizationStrategy.CASE_INSENSITIVE -230 -231 # bigquery udfs are case sensitive -232 NORMALIZE_FUNCTIONS = False -233 -234 # https://cloud.google.com/bigquery/docs/reference/standard-sql/format-elements#format_elements_date_time -235 TIME_MAPPING = { -236 "%D": "%m/%d/%y", -237 "%E*S": "%S.%f", -238 "%E6S": "%S.%f", -239 } -240 -241 ESCAPE_SEQUENCES = { -242 "\\a": "\a", -243 "\\b": "\b", -244 "\\f": "\f", -245 "\\n": "\n", -246 "\\r": "\r", -247 "\\t": "\t", -248 "\\v": "\v", -249 } -250 -251 FORMAT_MAPPING = { -252 "DD": "%d", -253 "MM": "%m", -254 "MON": "%b", -255 "MONTH": "%B", -256 "YYYY": "%Y", -257 "YY": "%y", -258 "HH": "%I", -259 "HH12": "%I", -260 "HH24": "%H", -261 "MI": "%M", -262 "SS": "%S", -263 "SSSSS": "%f", -264 "TZH": "%z", -265 } -266 -267 # The _PARTITIONTIME and _PARTITIONDATE pseudo-columns are not returned by a SELECT * statement -268 # https://cloud.google.com/bigquery/docs/querying-partitioned-tables#query_an_ingestion-time_partitioned_table -269 PSEUDOCOLUMNS = {"_PARTITIONTIME", "_PARTITIONDATE"} -270 -271 def normalize_identifier(self, expression: E) -> E: -272 if isinstance(expression, exp.Identifier): -273 parent = expression.parent -274 while isinstance(parent, exp.Dot): -275 parent = parent.parent -276 -277 # In BigQuery, CTEs are case-insensitive, but UDF and table names are case-sensitive -278 # by default. The following check uses a heuristic to detect tables based on whether -279 # they are qualified. This should generally be correct, because tables in BigQuery -280 # must be qualified with at least a dataset, unless @@dataset_id is set. -281 case_sensitive = ( -282 isinstance(parent, exp.UserDefinedFunction) -283 or ( -284 isinstance(parent, exp.Table) -285 and parent.db -286 and (parent.meta.get("quoted_table") or not parent.meta.get("maybe_column")) -287 ) -288 or expression.meta.get("is_table") -289 ) -290 if not case_sensitive: -291 expression.set("this", expression.this.lower()) -292 -293 return expression -294 -295 class Tokenizer(tokens.Tokenizer): -296 QUOTES = ["'", '"', '"""', "'''"] -297 COMMENTS = ["--", "#", ("/*", "*/")] -298 IDENTIFIERS = ["`"] -299 STRING_ESCAPES = ["\\"] -300 -301 HEX_STRINGS = [("0x", ""), ("0X", "")] -302 -303 BYTE_STRINGS = [ -304 (prefix + q, q) for q in t.cast(t.List[str], QUOTES) for prefix in ("b", "B") -305 ] -306 -307 RAW_STRINGS = [ -308 (prefix + q, q) for q in t.cast(t.List[str], QUOTES) for prefix in ("r", "R") -309 ] -310 -311 KEYWORDS = { -312 **tokens.Tokenizer.KEYWORDS, -313 "ANY TYPE": TokenType.VARIANT, -314 "BEGIN": TokenType.COMMAND, -315 "BEGIN TRANSACTION": TokenType.BEGIN, -316 "BYTES": TokenType.BINARY, -317 "CURRENT_DATETIME": TokenType.CURRENT_DATETIME, +221def _build_time(args: t.List) -> exp.Func: +222 if len(args) == 1: +223 return exp.TsOrDsToTime(this=args[0]) +224 if len(args) == 3: +225 return exp.TimeFromParts.from_arg_list(args) +226 +227 return exp.Anonymous(this="TIME", expressions=args) +228 +229 +230class BigQuery(Dialect): +231 WEEK_OFFSET = -1 +232 UNNEST_COLUMN_ONLY = True +233 SUPPORTS_USER_DEFINED_TYPES = False +234 SUPPORTS_SEMI_ANTI_JOIN = False +235 LOG_BASE_FIRST = False +236 +237 # https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#case_sensitivity +238 NORMALIZATION_STRATEGY = NormalizationStrategy.CASE_INSENSITIVE +239 +240 # bigquery udfs are case sensitive +241 NORMALIZE_FUNCTIONS = False +242 +243 # https://cloud.google.com/bigquery/docs/reference/standard-sql/format-elements#format_elements_date_time +244 TIME_MAPPING = { +245 "%D": "%m/%d/%y", +246 "%E*S": "%S.%f", +247 "%E6S": "%S.%f", +248 } +249 +250 FORMAT_MAPPING = { +251 "DD": "%d", +252 "MM": "%m", +253 "MON": "%b", +254 "MONTH": "%B", +255 "YYYY": "%Y", +256 "YY": "%y", +257 "HH": "%I", +258 "HH12": "%I", +259 "HH24": "%H", +260 "MI": "%M", +261 "SS": "%S", +262 "SSSSS": "%f", +263 "TZH": "%z", +264 } +265 +266 # The _PARTITIONTIME and _PARTITIONDATE pseudo-columns are not returned by a SELECT * statement +267 # https://cloud.google.com/bigquery/docs/querying-partitioned-tables#query_an_ingestion-time_partitioned_table +268 PSEUDOCOLUMNS = {"_PARTITIONTIME", "_PARTITIONDATE"} +269 +270 def normalize_identifier(self, expression: E) -> E: +271 if isinstance(expression, exp.Identifier): +272 parent = expression.parent +273 while isinstance(parent, exp.Dot): +274 parent = parent.parent +275 +276 # In BigQuery, CTEs are case-insensitive, but UDF and table names are case-sensitive +277 # by default. The following check uses a heuristic to detect tables based on whether +278 # they are qualified. This should generally be correct, because tables in BigQuery +279 # must be qualified with at least a dataset, unless @@dataset_id is set. +280 case_sensitive = ( +281 isinstance(parent, exp.UserDefinedFunction) +282 or ( +283 isinstance(parent, exp.Table) +284 and parent.db +285 and (parent.meta.get("quoted_table") or not parent.meta.get("maybe_column")) +286 ) +287 or expression.meta.get("is_table") +288 ) +289 if not case_sensitive: +290 expression.set("this", expression.this.lower()) +291 +292 return expression +293 +294 class Tokenizer(tokens.Tokenizer): +295 QUOTES = ["'", '"', '"""', "'''"] +296 COMMENTS = ["--", "#", ("/*", "*/")] +297 IDENTIFIERS = ["`"] +298 STRING_ESCAPES = ["\\"] +299 +300 HEX_STRINGS = [("0x", ""), ("0X", "")] +301 +302 BYTE_STRINGS = [ +303 (prefix + q, q) for q in t.cast(t.List[str], QUOTES) for prefix in ("b", "B") +304 ] +305 +306 RAW_STRINGS = [ +307 (prefix + q, q) for q in t.cast(t.List[str], QUOTES) for prefix in ("r", "R") +308 ] +309 +310 KEYWORDS = { +311 **tokens.Tokenizer.KEYWORDS, +312 "ANY TYPE": TokenType.VARIANT, +313 "BEGIN": TokenType.COMMAND, +314 "BEGIN TRANSACTION": TokenType.BEGIN, +315 "BYTES": TokenType.BINARY, +316 "CURRENT_DATETIME": TokenType.CURRENT_DATETIME, +317 "DATETIME": TokenType.TIMESTAMP, 318 "DECLARE": TokenType.COMMAND, 319 "ELSEIF": TokenType.COMMAND, 320 "EXCEPTION": TokenType.COMMAND, @@ -844,14 +850,14 @@ 486 table.set("db", exp.Identifier(this=parts[0])) 487 table.set("this", exp.Identifier(this=parts[1])) 488 -489 if isinstance(table.this, exp.Identifier) and "." in table.name: +489 if any("." in p.name for p in table.parts): 490 catalog, db, this, *rest = ( -491 t.cast(t.Optional[exp.Expression], exp.to_identifier(x, quoted=True)) -492 for x in split_num_words(table.name, ".", 3) +491 exp.to_identifier(p, quoted=True) +492 for p in split_num_words(".".join(p.name for p in table.parts), ".", 3) 493 ) 494 495 if rest and this: -496 this = exp.Dot.build(t.cast(t.List[exp.Expression], [this, *rest])) +496 this = exp.Dot.build([this, *rest]) # type: ignore 497 498 table = exp.Table(this=this, db=db, catalog=catalog) 499 table.meta["quoted_table"] = True @@ -885,375 +891,379 @@ 527 528 return json_object 529 -530 def _parse_bracket(self, this: t.Optional[exp.Expression]) -> t.Optional[exp.Expression]: -531 bracket = super()._parse_bracket(this) -532 -533 if this is bracket: -534 return bracket -535 -536 if isinstance(bracket, exp.Bracket): -537 for expression in bracket.expressions: -538 name = expression.name.upper() -539 -540 if name not in self.BRACKET_OFFSETS: -541 break -542 -543 offset, safe = self.BRACKET_OFFSETS[name] -544 bracket.set("offset", offset) -545 bracket.set("safe", safe) -546 expression.replace(expression.expressions[0]) -547 -548 return bracket +530 def _parse_bracket( +531 self, this: t.Optional[exp.Expression] = None +532 ) -> t.Optional[exp.Expression]: +533 bracket = super()._parse_bracket(this) +534 +535 if this is bracket: +536 return bracket +537 +538 if isinstance(bracket, exp.Bracket): +539 for expression in bracket.expressions: +540 name = expression.name.upper() +541 +542 if name not in self.BRACKET_OFFSETS: +543 break +544 +545 offset, safe = self.BRACKET_OFFSETS[name] +546 bracket.set("offset", offset) +547 bracket.set("safe", safe) +548 expression.replace(expression.expressions[0]) 549 -550 class Generator(generator.Generator): -551 EXPLICIT_UNION = True -552 INTERVAL_ALLOWS_PLURAL_FORM = False -553 JOIN_HINTS = False -554 QUERY_HINTS = False -555 TABLE_HINTS = False -556 LIMIT_FETCH = "LIMIT" -557 RENAME_TABLE_WITH_DB = False -558 NVL2_SUPPORTED = False -559 UNNEST_WITH_ORDINALITY = False -560 COLLATE_IS_FUNC = True -561 LIMIT_ONLY_LITERALS = True -562 SUPPORTS_TABLE_ALIAS_COLUMNS = False -563 UNPIVOT_ALIASES_ARE_IDENTIFIERS = False -564 JSON_KEY_VALUE_PAIR_SEP = "," -565 NULL_ORDERING_SUPPORTED = False -566 IGNORE_NULLS_IN_FUNC = True -567 JSON_PATH_SINGLE_QUOTE_ESCAPE = True -568 CAN_IMPLEMENT_ARRAY_ANY = True -569 NAMED_PLACEHOLDER_TOKEN = "@" -570 -571 TRANSFORMS = { -572 **generator.Generator.TRANSFORMS, -573 exp.ApproxDistinct: rename_func("APPROX_COUNT_DISTINCT"), -574 exp.ArgMax: arg_max_or_min_no_count("MAX_BY"), -575 exp.ArgMin: arg_max_or_min_no_count("MIN_BY"), -576 exp.ArrayContains: _array_contains_sql, -577 exp.ArrayFilter: filter_array_using_unnest, -578 exp.ArraySize: rename_func("ARRAY_LENGTH"), -579 exp.Cast: transforms.preprocess([transforms.remove_precision_parameterized_types]), -580 exp.CollateProperty: lambda self, e: ( -581 f"DEFAULT COLLATE {self.sql(e, 'this')}" -582 if e.args.get("default") -583 else f"COLLATE {self.sql(e, 'this')}" -584 ), -585 exp.Commit: lambda *_: "COMMIT TRANSACTION", -586 exp.CountIf: rename_func("COUNTIF"), -587 exp.Create: _create_sql, -588 exp.CTE: transforms.preprocess([_pushdown_cte_column_names]), -589 exp.DateAdd: date_add_interval_sql("DATE", "ADD"), -590 exp.DateDiff: lambda self, e: self.func( -591 "DATE_DIFF", e.this, e.expression, e.unit or "DAY" -592 ), -593 exp.DateFromParts: rename_func("DATE"), -594 exp.DateStrToDate: datestrtodate_sql, -595 exp.DateSub: date_add_interval_sql("DATE", "SUB"), -596 exp.DatetimeAdd: date_add_interval_sql("DATETIME", "ADD"), -597 exp.DatetimeSub: date_add_interval_sql("DATETIME", "SUB"), -598 exp.DateTrunc: lambda self, e: self.func("DATE_TRUNC", e.this, e.text("unit")), -599 exp.FromTimeZone: lambda self, e: self.func( -600 "DATETIME", self.func("TIMESTAMP", e.this, e.args.get("zone")), "'UTC'" -601 ), -602 exp.GenerateSeries: rename_func("GENERATE_ARRAY"), -603 exp.GroupConcat: rename_func("STRING_AGG"), -604 exp.Hex: rename_func("TO_HEX"), -605 exp.If: if_sql(false_value="NULL"), -606 exp.ILike: no_ilike_sql, -607 exp.IntDiv: rename_func("DIV"), -608 exp.JSONFormat: rename_func("TO_JSON_STRING"), -609 exp.Max: max_or_greatest, -610 exp.MD5: lambda self, e: self.func("TO_HEX", self.func("MD5", e.this)), -611 exp.MD5Digest: rename_func("MD5"), -612 exp.Min: min_or_least, -613 exp.PartitionedByProperty: lambda self, e: f"PARTITION BY {self.sql(e, 'this')}", -614 exp.RegexpExtract: lambda self, e: self.func( -615 "REGEXP_EXTRACT", -616 e.this, -617 e.expression, -618 e.args.get("position"), -619 e.args.get("occurrence"), -620 ), -621 exp.RegexpReplace: regexp_replace_sql, -622 exp.RegexpLike: rename_func("REGEXP_CONTAINS"), -623 exp.ReturnsProperty: _returnsproperty_sql, -624 exp.Rollback: lambda *_: "ROLLBACK TRANSACTION", -625 exp.Select: transforms.preprocess( -626 [ -627 transforms.explode_to_unnest(), -628 _unqualify_unnest, -629 transforms.eliminate_distinct_on, -630 _alias_ordered_group, -631 transforms.eliminate_semi_and_anti_joins, -632 ] -633 ), -634 exp.SHA2: lambda self, e: self.func( -635 "SHA256" if e.text("length") == "256" else "SHA512", e.this -636 ), -637 exp.StabilityProperty: lambda self, e: ( -638 "DETERMINISTIC" if e.name == "IMMUTABLE" else "NOT DETERMINISTIC" -639 ), -640 exp.StrToDate: lambda self, e: self.func("PARSE_DATE", self.format_time(e), e.this), -641 exp.StrToTime: lambda self, e: self.func( -642 "PARSE_TIMESTAMP", self.format_time(e), e.this, e.args.get("zone") +550 return bracket +551 +552 class Generator(generator.Generator): +553 EXPLICIT_UNION = True +554 INTERVAL_ALLOWS_PLURAL_FORM = False +555 JOIN_HINTS = False +556 QUERY_HINTS = False +557 TABLE_HINTS = False +558 LIMIT_FETCH = "LIMIT" +559 RENAME_TABLE_WITH_DB = False +560 NVL2_SUPPORTED = False +561 UNNEST_WITH_ORDINALITY = False +562 COLLATE_IS_FUNC = True +563 LIMIT_ONLY_LITERALS = True +564 SUPPORTS_TABLE_ALIAS_COLUMNS = False +565 UNPIVOT_ALIASES_ARE_IDENTIFIERS = False +566 JSON_KEY_VALUE_PAIR_SEP = "," +567 NULL_ORDERING_SUPPORTED = False +568 IGNORE_NULLS_IN_FUNC = True +569 JSON_PATH_SINGLE_QUOTE_ESCAPE = True +570 CAN_IMPLEMENT_ARRAY_ANY = True +571 SUPPORTS_TO_NUMBER = False +572 NAMED_PLACEHOLDER_TOKEN = "@" +573 +574 TRANSFORMS = { +575 **generator.Generator.TRANSFORMS, +576 exp.ApproxDistinct: rename_func("APPROX_COUNT_DISTINCT"), +577 exp.ArgMax: arg_max_or_min_no_count("MAX_BY"), +578 exp.ArgMin: arg_max_or_min_no_count("MIN_BY"), +579 exp.ArrayContains: _array_contains_sql, +580 exp.ArrayFilter: filter_array_using_unnest, +581 exp.ArraySize: rename_func("ARRAY_LENGTH"), +582 exp.Cast: transforms.preprocess([transforms.remove_precision_parameterized_types]), +583 exp.CollateProperty: lambda self, e: ( +584 f"DEFAULT COLLATE {self.sql(e, 'this')}" +585 if e.args.get("default") +586 else f"COLLATE {self.sql(e, 'this')}" +587 ), +588 exp.Commit: lambda *_: "COMMIT TRANSACTION", +589 exp.CountIf: rename_func("COUNTIF"), +590 exp.Create: _create_sql, +591 exp.CTE: transforms.preprocess([_pushdown_cte_column_names]), +592 exp.DateAdd: date_add_interval_sql("DATE", "ADD"), +593 exp.DateDiff: lambda self, e: self.func( +594 "DATE_DIFF", e.this, e.expression, unit_to_var(e) +595 ), +596 exp.DateFromParts: rename_func("DATE"), +597 exp.DateStrToDate: datestrtodate_sql, +598 exp.DateSub: date_add_interval_sql("DATE", "SUB"), +599 exp.DatetimeAdd: date_add_interval_sql("DATETIME", "ADD"), +600 exp.DatetimeSub: date_add_interval_sql("DATETIME", "SUB"), +601 exp.DateTrunc: lambda self, e: self.func("DATE_TRUNC", e.this, e.text("unit")), +602 exp.FromTimeZone: lambda self, e: self.func( +603 "DATETIME", self.func("TIMESTAMP", e.this, e.args.get("zone")), "'UTC'" +604 ), +605 exp.GenerateSeries: rename_func("GENERATE_ARRAY"), +606 exp.GroupConcat: rename_func("STRING_AGG"), +607 exp.Hex: rename_func("TO_HEX"), +608 exp.If: if_sql(false_value="NULL"), +609 exp.ILike: no_ilike_sql, +610 exp.IntDiv: rename_func("DIV"), +611 exp.JSONFormat: rename_func("TO_JSON_STRING"), +612 exp.Max: max_or_greatest, +613 exp.Mod: rename_func("MOD"), +614 exp.MD5: lambda self, e: self.func("TO_HEX", self.func("MD5", e.this)), +615 exp.MD5Digest: rename_func("MD5"), +616 exp.Min: min_or_least, +617 exp.PartitionedByProperty: lambda self, e: f"PARTITION BY {self.sql(e, 'this')}", +618 exp.RegexpExtract: lambda self, e: self.func( +619 "REGEXP_EXTRACT", +620 e.this, +621 e.expression, +622 e.args.get("position"), +623 e.args.get("occurrence"), +624 ), +625 exp.RegexpReplace: regexp_replace_sql, +626 exp.RegexpLike: rename_func("REGEXP_CONTAINS"), +627 exp.ReturnsProperty: _returnsproperty_sql, +628 exp.Rollback: lambda *_: "ROLLBACK TRANSACTION", +629 exp.Select: transforms.preprocess( +630 [ +631 transforms.explode_to_unnest(), +632 _unqualify_unnest, +633 transforms.eliminate_distinct_on, +634 _alias_ordered_group, +635 transforms.eliminate_semi_and_anti_joins, +636 ] +637 ), +638 exp.SHA2: lambda self, e: self.func( +639 "SHA256" if e.text("length") == "256" else "SHA512", e.this +640 ), +641 exp.StabilityProperty: lambda self, e: ( +642 "DETERMINISTIC" if e.name == "IMMUTABLE" else "NOT DETERMINISTIC" 643 ), -644 exp.TimeAdd: date_add_interval_sql("TIME", "ADD"), -645 exp.TimeFromParts: rename_func("TIME"), -646 exp.TimeSub: date_add_interval_sql("TIME", "SUB"), -647 exp.TimestampAdd: date_add_interval_sql("TIMESTAMP", "ADD"), -648 exp.TimestampDiff: rename_func("TIMESTAMP_DIFF"), -649 exp.TimestampSub: date_add_interval_sql("TIMESTAMP", "SUB"), -650 exp.TimeStrToTime: timestrtotime_sql, -651 exp.Transaction: lambda *_: "BEGIN TRANSACTION", -652 exp.Trim: lambda self, e: self.func("TRIM", e.this, e.expression), -653 exp.TsOrDsAdd: _ts_or_ds_add_sql, -654 exp.TsOrDsDiff: _ts_or_ds_diff_sql, -655 exp.TsOrDsToTime: rename_func("TIME"), -656 exp.Unhex: rename_func("FROM_HEX"), -657 exp.UnixDate: rename_func("UNIX_DATE"), -658 exp.UnixToTime: _unix_to_time_sql, -659 exp.Values: _derived_table_values_to_unnest, -660 exp.VariancePop: rename_func("VAR_POP"), -661 } -662 -663 SUPPORTED_JSON_PATH_PARTS = { -664 exp.JSONPathKey, -665 exp.JSONPathRoot, -666 exp.JSONPathSubscript, -667 } -668 -669 TYPE_MAPPING = { -670 **generator.Generator.TYPE_MAPPING, -671 exp.DataType.Type.BIGDECIMAL: "BIGNUMERIC", -672 exp.DataType.Type.BIGINT: "INT64", -673 exp.DataType.Type.BINARY: "BYTES", -674 exp.DataType.Type.BOOLEAN: "BOOL", -675 exp.DataType.Type.CHAR: "STRING", -676 exp.DataType.Type.DECIMAL: "NUMERIC", -677 exp.DataType.Type.DOUBLE: "FLOAT64", -678 exp.DataType.Type.FLOAT: "FLOAT64", -679 exp.DataType.Type.INT: "INT64", -680 exp.DataType.Type.NCHAR: "STRING", -681 exp.DataType.Type.NVARCHAR: "STRING", -682 exp.DataType.Type.SMALLINT: "INT64", -683 exp.DataType.Type.TEXT: "STRING", -684 exp.DataType.Type.TIMESTAMP: "DATETIME", -685 exp.DataType.Type.TIMESTAMPTZ: "TIMESTAMP", -686 exp.DataType.Type.TIMESTAMPLTZ: "TIMESTAMP", -687 exp.DataType.Type.TINYINT: "INT64", -688 exp.DataType.Type.VARBINARY: "BYTES", -689 exp.DataType.Type.VARCHAR: "STRING", -690 exp.DataType.Type.VARIANT: "ANY TYPE", -691 } -692 -693 PROPERTIES_LOCATION = { -694 **generator.Generator.PROPERTIES_LOCATION, -695 exp.PartitionedByProperty: exp.Properties.Location.POST_SCHEMA, -696 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, -697 } -698 -699 # from: https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#reserved_keywords -700 RESERVED_KEYWORDS = { -701 *generator.Generator.RESERVED_KEYWORDS, -702 "all", -703 "and", -704 "any", -705 "array", -706 "as", -707 "asc", -708 "assert_rows_modified", -709 "at", -710 "between", -711 "by", -712 "case", -713 "cast", -714 "collate", -715 "contains", -716 "create", -717 "cross", -718 "cube", -719 "current", -720 "default", -721 "define", -722 "desc", -723 "distinct", -724 "else", -725 "end", -726 "enum", -727 "escape", -728 "except", -729 "exclude", -730 "exists", -731 "extract", -732 "false", -733 "fetch", -734 "following", -735 "for", -736 "from", -737 "full", -738 "group", -739 "grouping", -740 "groups", -741 "hash", -742 "having", -743 "if", -744 "ignore", -745 "in", -746 "inner", -747 "intersect", -748 "interval", -749 "into", -750 "is", -751 "join", -752 "lateral", -753 "left", -754 "like", -755 "limit", -756 "lookup", -757 "merge", -758 "natural", -759 "new", -760 "no", -761 "not", -762 "null", -763 "nulls", -764 "of", -765 "on", -766 "or", -767 "order", -768 "outer", -769 "over", -770 "partition", -771 "preceding", -772 "proto", -773 "qualify", -774 "range", -775 "recursive", -776 "respect", -777 "right", -778 "rollup", -779 "rows", -780 "select", -781 "set", -782 "some", -783 "struct", -784 "tablesample", -785 "then", -786 "to", -787 "treat", -788 "true", -789 "unbounded", -790 "union", -791 "unnest", -792 "using", -793 "when", -794 "where", -795 "window", -796 "with", -797 "within", -798 } -799 -800 def table_parts(self, expression: exp.Table) -> str: -801 # Depending on the context, `x.y` may not resolve to the same data source as `x`.`y`, so -802 # we need to make sure the correct quoting is used in each case. -803 # -804 # For example, if there is a CTE x that clashes with a schema name, then the former will -805 # return the table y in that schema, whereas the latter will return the CTE's y column: -806 # -807 # - WITH x AS (SELECT [1, 2] AS y) SELECT * FROM x, `x.y` -> cross join -808 # - WITH x AS (SELECT [1, 2] AS y) SELECT * FROM x, `x`.`y` -> implicit unnest -809 if expression.meta.get("quoted_table"): -810 table_parts = ".".join(p.name for p in expression.parts) -811 return self.sql(exp.Identifier(this=table_parts, quoted=True)) -812 -813 return super().table_parts(expression) -814 -815 def timetostr_sql(self, expression: exp.TimeToStr) -> str: -816 this = expression.this if isinstance(expression.this, exp.TsOrDsToDate) else expression -817 return self.func("FORMAT_DATE", self.format_time(expression), this.this) +644 exp.StrToDate: lambda self, e: self.func("PARSE_DATE", self.format_time(e), e.this), +645 exp.StrToTime: lambda self, e: self.func( +646 "PARSE_TIMESTAMP", self.format_time(e), e.this, e.args.get("zone") +647 ), +648 exp.TimeAdd: date_add_interval_sql("TIME", "ADD"), +649 exp.TimeFromParts: rename_func("TIME"), +650 exp.TimeSub: date_add_interval_sql("TIME", "SUB"), +651 exp.TimestampAdd: date_add_interval_sql("TIMESTAMP", "ADD"), +652 exp.TimestampDiff: rename_func("TIMESTAMP_DIFF"), +653 exp.TimestampSub: date_add_interval_sql("TIMESTAMP", "SUB"), +654 exp.TimeStrToTime: timestrtotime_sql, +655 exp.Transaction: lambda *_: "BEGIN TRANSACTION", +656 exp.Trim: lambda self, e: self.func("TRIM", e.this, e.expression), +657 exp.TsOrDsAdd: _ts_or_ds_add_sql, +658 exp.TsOrDsDiff: _ts_or_ds_diff_sql, +659 exp.TsOrDsToTime: rename_func("TIME"), +660 exp.Unhex: rename_func("FROM_HEX"), +661 exp.UnixDate: rename_func("UNIX_DATE"), +662 exp.UnixToTime: _unix_to_time_sql, +663 exp.Values: _derived_table_values_to_unnest, +664 exp.VariancePop: rename_func("VAR_POP"), +665 } +666 +667 SUPPORTED_JSON_PATH_PARTS = { +668 exp.JSONPathKey, +669 exp.JSONPathRoot, +670 exp.JSONPathSubscript, +671 } +672 +673 TYPE_MAPPING = { +674 **generator.Generator.TYPE_MAPPING, +675 exp.DataType.Type.BIGDECIMAL: "BIGNUMERIC", +676 exp.DataType.Type.BIGINT: "INT64", +677 exp.DataType.Type.BINARY: "BYTES", +678 exp.DataType.Type.BOOLEAN: "BOOL", +679 exp.DataType.Type.CHAR: "STRING", +680 exp.DataType.Type.DECIMAL: "NUMERIC", +681 exp.DataType.Type.DOUBLE: "FLOAT64", +682 exp.DataType.Type.FLOAT: "FLOAT64", +683 exp.DataType.Type.INT: "INT64", +684 exp.DataType.Type.NCHAR: "STRING", +685 exp.DataType.Type.NVARCHAR: "STRING", +686 exp.DataType.Type.SMALLINT: "INT64", +687 exp.DataType.Type.TEXT: "STRING", +688 exp.DataType.Type.TIMESTAMP: "DATETIME", +689 exp.DataType.Type.TIMESTAMPTZ: "TIMESTAMP", +690 exp.DataType.Type.TIMESTAMPLTZ: "TIMESTAMP", +691 exp.DataType.Type.TINYINT: "INT64", +692 exp.DataType.Type.VARBINARY: "BYTES", +693 exp.DataType.Type.VARCHAR: "STRING", +694 exp.DataType.Type.VARIANT: "ANY TYPE", +695 } +696 +697 PROPERTIES_LOCATION = { +698 **generator.Generator.PROPERTIES_LOCATION, +699 exp.PartitionedByProperty: exp.Properties.Location.POST_SCHEMA, +700 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, +701 } +702 +703 # from: https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#reserved_keywords +704 RESERVED_KEYWORDS = { +705 *generator.Generator.RESERVED_KEYWORDS, +706 "all", +707 "and", +708 "any", +709 "array", +710 "as", +711 "asc", +712 "assert_rows_modified", +713 "at", +714 "between", +715 "by", +716 "case", +717 "cast", +718 "collate", +719 "contains", +720 "create", +721 "cross", +722 "cube", +723 "current", +724 "default", +725 "define", +726 "desc", +727 "distinct", +728 "else", +729 "end", +730 "enum", +731 "escape", +732 "except", +733 "exclude", +734 "exists", +735 "extract", +736 "false", +737 "fetch", +738 "following", +739 "for", +740 "from", +741 "full", +742 "group", +743 "grouping", +744 "groups", +745 "hash", +746 "having", +747 "if", +748 "ignore", +749 "in", +750 "inner", +751 "intersect", +752 "interval", +753 "into", +754 "is", +755 "join", +756 "lateral", +757 "left", +758 "like", +759 "limit", +760 "lookup", +761 "merge", +762 "natural", +763 "new", +764 "no", +765 "not", +766 "null", +767 "nulls", +768 "of", +769 "on", +770 "or", +771 "order", +772 "outer", +773 "over", +774 "partition", +775 "preceding", +776 "proto", +777 "qualify", +778 "range", +779 "recursive", +780 "respect", +781 "right", +782 "rollup", +783 "rows", +784 "select", +785 "set", +786 "some", +787 "struct", +788 "tablesample", +789 "then", +790 "to", +791 "treat", +792 "true", +793 "unbounded", +794 "union", +795 "unnest", +796 "using", +797 "when", +798 "where", +799 "window", +800 "with", +801 "within", +802 } +803 +804 def table_parts(self, expression: exp.Table) -> str: +805 # Depending on the context, `x.y` may not resolve to the same data source as `x`.`y`, so +806 # we need to make sure the correct quoting is used in each case. +807 # +808 # For example, if there is a CTE x that clashes with a schema name, then the former will +809 # return the table y in that schema, whereas the latter will return the CTE's y column: +810 # +811 # - WITH x AS (SELECT [1, 2] AS y) SELECT * FROM x, `x.y` -> cross join +812 # - WITH x AS (SELECT [1, 2] AS y) SELECT * FROM x, `x`.`y` -> implicit unnest +813 if expression.meta.get("quoted_table"): +814 table_parts = ".".join(p.name for p in expression.parts) +815 return self.sql(exp.Identifier(this=table_parts, quoted=True)) +816 +817 return super().table_parts(expression) 818 -819 def eq_sql(self, expression: exp.EQ) -> str: -820 # Operands of = cannot be NULL in BigQuery -821 if isinstance(expression.left, exp.Null) or isinstance(expression.right, exp.Null): -822 if not isinstance(expression.parent, exp.Update): -823 return "NULL" -824 -825 return self.binary(expression, "=") -826 -827 def attimezone_sql(self, expression: exp.AtTimeZone) -> str: -828 parent = expression.parent -829 -830 # BigQuery allows CAST(.. AS {STRING|TIMESTAMP} [FORMAT <fmt> [AT TIME ZONE <tz>]]). -831 # Only the TIMESTAMP one should use the below conversion, when AT TIME ZONE is included. -832 if not isinstance(parent, exp.Cast) or not parent.to.is_type("text"): -833 return self.func( -834 "TIMESTAMP", self.func("DATETIME", expression.this, expression.args.get("zone")) -835 ) -836 -837 return super().attimezone_sql(expression) -838 -839 def trycast_sql(self, expression: exp.TryCast) -> str: -840 return self.cast_sql(expression, safe_prefix="SAFE_") -841 -842 def array_sql(self, expression: exp.Array) -> str: -843 first_arg = seq_get(expression.expressions, 0) -844 if isinstance(first_arg, exp.Query): -845 return f"ARRAY{self.wrap(self.sql(first_arg))}" -846 -847 return inline_array_sql(self, expression) -848 -849 def bracket_sql(self, expression: exp.Bracket) -> str: -850 this = self.sql(expression, "this") -851 expressions = expression.expressions +819 def timetostr_sql(self, expression: exp.TimeToStr) -> str: +820 this = expression.this if isinstance(expression.this, exp.TsOrDsToDate) else expression +821 return self.func("FORMAT_DATE", self.format_time(expression), this.this) +822 +823 def eq_sql(self, expression: exp.EQ) -> str: +824 # Operands of = cannot be NULL in BigQuery +825 if isinstance(expression.left, exp.Null) or isinstance(expression.right, exp.Null): +826 if not isinstance(expression.parent, exp.Update): +827 return "NULL" +828 +829 return self.binary(expression, "=") +830 +831 def attimezone_sql(self, expression: exp.AtTimeZone) -> str: +832 parent = expression.parent +833 +834 # BigQuery allows CAST(.. AS {STRING|TIMESTAMP} [FORMAT <fmt> [AT TIME ZONE <tz>]]). +835 # Only the TIMESTAMP one should use the below conversion, when AT TIME ZONE is included. +836 if not isinstance(parent, exp.Cast) or not parent.to.is_type("text"): +837 return self.func( +838 "TIMESTAMP", self.func("DATETIME", expression.this, expression.args.get("zone")) +839 ) +840 +841 return super().attimezone_sql(expression) +842 +843 def trycast_sql(self, expression: exp.TryCast) -> str: +844 return self.cast_sql(expression, safe_prefix="SAFE_") +845 +846 def array_sql(self, expression: exp.Array) -> str: +847 first_arg = seq_get(expression.expressions, 0) +848 if isinstance(first_arg, exp.Query): +849 return f"ARRAY{self.wrap(self.sql(first_arg))}" +850 +851 return inline_array_sql(self, expression) 852 -853 if len(expressions) == 1: -854 arg = expressions[0] -855 if arg.type is None: -856 from sqlglot.optimizer.annotate_types import annotate_types -857 -858 arg = annotate_types(arg) -859 -860 if arg.type and arg.type.this in exp.DataType.TEXT_TYPES: -861 # BQ doesn't support bracket syntax with string values -862 return f"{this}.{arg.name}" +853 def bracket_sql(self, expression: exp.Bracket) -> str: +854 this = expression.this +855 expressions = expression.expressions +856 +857 if len(expressions) == 1 and this and this.is_type(exp.DataType.Type.STRUCT): +858 arg = expressions[0] +859 if arg.type is None: +860 from sqlglot.optimizer.annotate_types import annotate_types +861 +862 arg = annotate_types(arg) 863 -864 expressions_sql = ", ".join(self.sql(e) for e in expressions) -865 offset = expression.args.get("offset") -866 -867 if offset == 0: -868 expressions_sql = f"OFFSET({expressions_sql})" -869 elif offset == 1: -870 expressions_sql = f"ORDINAL({expressions_sql})" -871 elif offset is not None: -872 self.unsupported(f"Unsupported array offset: {offset}") -873 -874 if expression.args.get("safe"): -875 expressions_sql = f"SAFE_{expressions_sql}" -876 -877 return f"{this}[{expressions_sql}]" -878 -879 def in_unnest_op(self, expression: exp.Unnest) -> str: -880 return self.sql(expression) -881 -882 def except_op(self, expression: exp.Except) -> str: -883 if not expression.args.get("distinct"): -884 self.unsupported("EXCEPT without DISTINCT is not supported in BigQuery") -885 return f"EXCEPT{' DISTINCT' if expression.args.get('distinct') else ' ALL'}" -886 -887 def intersect_op(self, expression: exp.Intersect) -> str: -888 if not expression.args.get("distinct"): -889 self.unsupported("INTERSECT without DISTINCT is not supported in BigQuery") -890 return f"INTERSECT{' DISTINCT' if expression.args.get('distinct') else ' ALL'}" -891 -892 def with_properties(self, properties: exp.Properties) -> str: -893 return self.properties(properties, prefix=self.seg("OPTIONS")) -894 -895 def version_sql(self, expression: exp.Version) -> str: -896 if expression.name == "TIMESTAMP": -897 expression.set("this", "SYSTEM_TIME") -898 return super().version_sql(expression) +864 if arg.type and arg.type.this in exp.DataType.TEXT_TYPES: +865 # BQ doesn't support bracket syntax with string values for structs +866 return f"{self.sql(this)}.{arg.name}" +867 +868 expressions_sql = self.expressions(expression, flat=True) +869 offset = expression.args.get("offset") +870 +871 if offset == 0: +872 expressions_sql = f"OFFSET({expressions_sql})" +873 elif offset == 1: +874 expressions_sql = f"ORDINAL({expressions_sql})" +875 elif offset is not None: +876 self.unsupported(f"Unsupported array offset: {offset}") +877 +878 if expression.args.get("safe"): +879 expressions_sql = f"SAFE_{expressions_sql}" +880 +881 return f"{self.sql(this)}[{expressions_sql}]" +882 +883 def in_unnest_op(self, expression: exp.Unnest) -> str: +884 return self.sql(expression) +885 +886 def except_op(self, expression: exp.Except) -> str: +887 if not expression.args.get("distinct"): +888 self.unsupported("EXCEPT without DISTINCT is not supported in BigQuery") +889 return f"EXCEPT{' DISTINCT' if expression.args.get('distinct') else ' ALL'}" +890 +891 def intersect_op(self, expression: exp.Intersect) -> str: +892 if not expression.args.get("distinct"): +893 self.unsupported("INTERSECT without DISTINCT is not supported in BigQuery") +894 return f"INTERSECT{' DISTINCT' if expression.args.get('distinct') else ' ALL'}" +895 +896 def with_properties(self, properties: exp.Properties) -> str: +897 return self.properties(properties, prefix=self.seg("OPTIONS")) +898 +899 def version_sql(self, expression: exp.Version) -> str: +900 if expression.name == "TIMESTAMP": +901 expression.set("this", "SYSTEM_TIME") +902 return super().version_sql(expression) @@ -1281,103 +1291,94 @@ -
    222class BigQuery(Dialect):
    -223    WEEK_OFFSET = -1
    -224    UNNEST_COLUMN_ONLY = True
    -225    SUPPORTS_USER_DEFINED_TYPES = False
    -226    SUPPORTS_SEMI_ANTI_JOIN = False
    -227    LOG_BASE_FIRST = False
    -228
    -229    # https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#case_sensitivity
    -230    NORMALIZATION_STRATEGY = NormalizationStrategy.CASE_INSENSITIVE
    -231
    -232    # bigquery udfs are case sensitive
    -233    NORMALIZE_FUNCTIONS = False
    -234
    -235    # https://cloud.google.com/bigquery/docs/reference/standard-sql/format-elements#format_elements_date_time
    -236    TIME_MAPPING = {
    -237        "%D": "%m/%d/%y",
    -238        "%E*S": "%S.%f",
    -239        "%E6S": "%S.%f",
    -240    }
    -241
    -242    ESCAPE_SEQUENCES = {
    -243        "\\a": "\a",
    -244        "\\b": "\b",
    -245        "\\f": "\f",
    -246        "\\n": "\n",
    -247        "\\r": "\r",
    -248        "\\t": "\t",
    -249        "\\v": "\v",
    -250    }
    -251
    -252    FORMAT_MAPPING = {
    -253        "DD": "%d",
    -254        "MM": "%m",
    -255        "MON": "%b",
    -256        "MONTH": "%B",
    -257        "YYYY": "%Y",
    -258        "YY": "%y",
    -259        "HH": "%I",
    -260        "HH12": "%I",
    -261        "HH24": "%H",
    -262        "MI": "%M",
    -263        "SS": "%S",
    -264        "SSSSS": "%f",
    -265        "TZH": "%z",
    -266    }
    -267
    -268    # The _PARTITIONTIME and _PARTITIONDATE pseudo-columns are not returned by a SELECT * statement
    -269    # https://cloud.google.com/bigquery/docs/querying-partitioned-tables#query_an_ingestion-time_partitioned_table
    -270    PSEUDOCOLUMNS = {"_PARTITIONTIME", "_PARTITIONDATE"}
    -271
    -272    def normalize_identifier(self, expression: E) -> E:
    -273        if isinstance(expression, exp.Identifier):
    -274            parent = expression.parent
    -275            while isinstance(parent, exp.Dot):
    -276                parent = parent.parent
    -277
    -278            # In BigQuery, CTEs are case-insensitive, but UDF and table names are case-sensitive
    -279            # by default. The following check uses a heuristic to detect tables based on whether
    -280            # they are qualified. This should generally be correct, because tables in BigQuery
    -281            # must be qualified with at least a dataset, unless @@dataset_id is set.
    -282            case_sensitive = (
    -283                isinstance(parent, exp.UserDefinedFunction)
    -284                or (
    -285                    isinstance(parent, exp.Table)
    -286                    and parent.db
    -287                    and (parent.meta.get("quoted_table") or not parent.meta.get("maybe_column"))
    -288                )
    -289                or expression.meta.get("is_table")
    -290            )
    -291            if not case_sensitive:
    -292                expression.set("this", expression.this.lower())
    -293
    -294        return expression
    -295
    -296    class Tokenizer(tokens.Tokenizer):
    -297        QUOTES = ["'", '"', '"""', "'''"]
    -298        COMMENTS = ["--", "#", ("/*", "*/")]
    -299        IDENTIFIERS = ["`"]
    -300        STRING_ESCAPES = ["\\"]
    -301
    -302        HEX_STRINGS = [("0x", ""), ("0X", "")]
    -303
    -304        BYTE_STRINGS = [
    -305            (prefix + q, q) for q in t.cast(t.List[str], QUOTES) for prefix in ("b", "B")
    -306        ]
    -307
    -308        RAW_STRINGS = [
    -309            (prefix + q, q) for q in t.cast(t.List[str], QUOTES) for prefix in ("r", "R")
    -310        ]
    -311
    -312        KEYWORDS = {
    -313            **tokens.Tokenizer.KEYWORDS,
    -314            "ANY TYPE": TokenType.VARIANT,
    -315            "BEGIN": TokenType.COMMAND,
    -316            "BEGIN TRANSACTION": TokenType.BEGIN,
    -317            "BYTES": TokenType.BINARY,
    -318            "CURRENT_DATETIME": TokenType.CURRENT_DATETIME,
    +            
    231class BigQuery(Dialect):
    +232    WEEK_OFFSET = -1
    +233    UNNEST_COLUMN_ONLY = True
    +234    SUPPORTS_USER_DEFINED_TYPES = False
    +235    SUPPORTS_SEMI_ANTI_JOIN = False
    +236    LOG_BASE_FIRST = False
    +237
    +238    # https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#case_sensitivity
    +239    NORMALIZATION_STRATEGY = NormalizationStrategy.CASE_INSENSITIVE
    +240
    +241    # bigquery udfs are case sensitive
    +242    NORMALIZE_FUNCTIONS = False
    +243
    +244    # https://cloud.google.com/bigquery/docs/reference/standard-sql/format-elements#format_elements_date_time
    +245    TIME_MAPPING = {
    +246        "%D": "%m/%d/%y",
    +247        "%E*S": "%S.%f",
    +248        "%E6S": "%S.%f",
    +249    }
    +250
    +251    FORMAT_MAPPING = {
    +252        "DD": "%d",
    +253        "MM": "%m",
    +254        "MON": "%b",
    +255        "MONTH": "%B",
    +256        "YYYY": "%Y",
    +257        "YY": "%y",
    +258        "HH": "%I",
    +259        "HH12": "%I",
    +260        "HH24": "%H",
    +261        "MI": "%M",
    +262        "SS": "%S",
    +263        "SSSSS": "%f",
    +264        "TZH": "%z",
    +265    }
    +266
    +267    # The _PARTITIONTIME and _PARTITIONDATE pseudo-columns are not returned by a SELECT * statement
    +268    # https://cloud.google.com/bigquery/docs/querying-partitioned-tables#query_an_ingestion-time_partitioned_table
    +269    PSEUDOCOLUMNS = {"_PARTITIONTIME", "_PARTITIONDATE"}
    +270
    +271    def normalize_identifier(self, expression: E) -> E:
    +272        if isinstance(expression, exp.Identifier):
    +273            parent = expression.parent
    +274            while isinstance(parent, exp.Dot):
    +275                parent = parent.parent
    +276
    +277            # In BigQuery, CTEs are case-insensitive, but UDF and table names are case-sensitive
    +278            # by default. The following check uses a heuristic to detect tables based on whether
    +279            # they are qualified. This should generally be correct, because tables in BigQuery
    +280            # must be qualified with at least a dataset, unless @@dataset_id is set.
    +281            case_sensitive = (
    +282                isinstance(parent, exp.UserDefinedFunction)
    +283                or (
    +284                    isinstance(parent, exp.Table)
    +285                    and parent.db
    +286                    and (parent.meta.get("quoted_table") or not parent.meta.get("maybe_column"))
    +287                )
    +288                or expression.meta.get("is_table")
    +289            )
    +290            if not case_sensitive:
    +291                expression.set("this", expression.this.lower())
    +292
    +293        return expression
    +294
    +295    class Tokenizer(tokens.Tokenizer):
    +296        QUOTES = ["'", '"', '"""', "'''"]
    +297        COMMENTS = ["--", "#", ("/*", "*/")]
    +298        IDENTIFIERS = ["`"]
    +299        STRING_ESCAPES = ["\\"]
    +300
    +301        HEX_STRINGS = [("0x", ""), ("0X", "")]
    +302
    +303        BYTE_STRINGS = [
    +304            (prefix + q, q) for q in t.cast(t.List[str], QUOTES) for prefix in ("b", "B")
    +305        ]
    +306
    +307        RAW_STRINGS = [
    +308            (prefix + q, q) for q in t.cast(t.List[str], QUOTES) for prefix in ("r", "R")
    +309        ]
    +310
    +311        KEYWORDS = {
    +312            **tokens.Tokenizer.KEYWORDS,
    +313            "ANY TYPE": TokenType.VARIANT,
    +314            "BEGIN": TokenType.COMMAND,
    +315            "BEGIN TRANSACTION": TokenType.BEGIN,
    +316            "BYTES": TokenType.BINARY,
    +317            "CURRENT_DATETIME": TokenType.CURRENT_DATETIME,
    +318            "DATETIME": TokenType.TIMESTAMP,
     319            "DECLARE": TokenType.COMMAND,
     320            "ELSEIF": TokenType.COMMAND,
     321            "EXCEPTION": TokenType.COMMAND,
    @@ -1549,14 +1550,14 @@
     487                        table.set("db", exp.Identifier(this=parts[0]))
     488                        table.set("this", exp.Identifier(this=parts[1]))
     489
    -490            if isinstance(table.this, exp.Identifier) and "." in table.name:
    +490            if any("." in p.name for p in table.parts):
     491                catalog, db, this, *rest = (
    -492                    t.cast(t.Optional[exp.Expression], exp.to_identifier(x, quoted=True))
    -493                    for x in split_num_words(table.name, ".", 3)
    +492                    exp.to_identifier(p, quoted=True)
    +493                    for p in split_num_words(".".join(p.name for p in table.parts), ".", 3)
     494                )
     495
     496                if rest and this:
    -497                    this = exp.Dot.build(t.cast(t.List[exp.Expression], [this, *rest]))
    +497                    this = exp.Dot.build([this, *rest])  # type: ignore
     498
     499                table = exp.Table(this=this, db=db, catalog=catalog)
     500                table.meta["quoted_table"] = True
    @@ -1590,375 +1591,379 @@
     528
     529            return json_object
     530
    -531        def _parse_bracket(self, this: t.Optional[exp.Expression]) -> t.Optional[exp.Expression]:
    -532            bracket = super()._parse_bracket(this)
    -533
    -534            if this is bracket:
    -535                return bracket
    -536
    -537            if isinstance(bracket, exp.Bracket):
    -538                for expression in bracket.expressions:
    -539                    name = expression.name.upper()
    -540
    -541                    if name not in self.BRACKET_OFFSETS:
    -542                        break
    -543
    -544                    offset, safe = self.BRACKET_OFFSETS[name]
    -545                    bracket.set("offset", offset)
    -546                    bracket.set("safe", safe)
    -547                    expression.replace(expression.expressions[0])
    -548
    -549            return bracket
    +531        def _parse_bracket(
    +532            self, this: t.Optional[exp.Expression] = None
    +533        ) -> t.Optional[exp.Expression]:
    +534            bracket = super()._parse_bracket(this)
    +535
    +536            if this is bracket:
    +537                return bracket
    +538
    +539            if isinstance(bracket, exp.Bracket):
    +540                for expression in bracket.expressions:
    +541                    name = expression.name.upper()
    +542
    +543                    if name not in self.BRACKET_OFFSETS:
    +544                        break
    +545
    +546                    offset, safe = self.BRACKET_OFFSETS[name]
    +547                    bracket.set("offset", offset)
    +548                    bracket.set("safe", safe)
    +549                    expression.replace(expression.expressions[0])
     550
    -551    class Generator(generator.Generator):
    -552        EXPLICIT_UNION = True
    -553        INTERVAL_ALLOWS_PLURAL_FORM = False
    -554        JOIN_HINTS = False
    -555        QUERY_HINTS = False
    -556        TABLE_HINTS = False
    -557        LIMIT_FETCH = "LIMIT"
    -558        RENAME_TABLE_WITH_DB = False
    -559        NVL2_SUPPORTED = False
    -560        UNNEST_WITH_ORDINALITY = False
    -561        COLLATE_IS_FUNC = True
    -562        LIMIT_ONLY_LITERALS = True
    -563        SUPPORTS_TABLE_ALIAS_COLUMNS = False
    -564        UNPIVOT_ALIASES_ARE_IDENTIFIERS = False
    -565        JSON_KEY_VALUE_PAIR_SEP = ","
    -566        NULL_ORDERING_SUPPORTED = False
    -567        IGNORE_NULLS_IN_FUNC = True
    -568        JSON_PATH_SINGLE_QUOTE_ESCAPE = True
    -569        CAN_IMPLEMENT_ARRAY_ANY = True
    -570        NAMED_PLACEHOLDER_TOKEN = "@"
    -571
    -572        TRANSFORMS = {
    -573            **generator.Generator.TRANSFORMS,
    -574            exp.ApproxDistinct: rename_func("APPROX_COUNT_DISTINCT"),
    -575            exp.ArgMax: arg_max_or_min_no_count("MAX_BY"),
    -576            exp.ArgMin: arg_max_or_min_no_count("MIN_BY"),
    -577            exp.ArrayContains: _array_contains_sql,
    -578            exp.ArrayFilter: filter_array_using_unnest,
    -579            exp.ArraySize: rename_func("ARRAY_LENGTH"),
    -580            exp.Cast: transforms.preprocess([transforms.remove_precision_parameterized_types]),
    -581            exp.CollateProperty: lambda self, e: (
    -582                f"DEFAULT COLLATE {self.sql(e, 'this')}"
    -583                if e.args.get("default")
    -584                else f"COLLATE {self.sql(e, 'this')}"
    -585            ),
    -586            exp.Commit: lambda *_: "COMMIT TRANSACTION",
    -587            exp.CountIf: rename_func("COUNTIF"),
    -588            exp.Create: _create_sql,
    -589            exp.CTE: transforms.preprocess([_pushdown_cte_column_names]),
    -590            exp.DateAdd: date_add_interval_sql("DATE", "ADD"),
    -591            exp.DateDiff: lambda self, e: self.func(
    -592                "DATE_DIFF", e.this, e.expression, e.unit or "DAY"
    -593            ),
    -594            exp.DateFromParts: rename_func("DATE"),
    -595            exp.DateStrToDate: datestrtodate_sql,
    -596            exp.DateSub: date_add_interval_sql("DATE", "SUB"),
    -597            exp.DatetimeAdd: date_add_interval_sql("DATETIME", "ADD"),
    -598            exp.DatetimeSub: date_add_interval_sql("DATETIME", "SUB"),
    -599            exp.DateTrunc: lambda self, e: self.func("DATE_TRUNC", e.this, e.text("unit")),
    -600            exp.FromTimeZone: lambda self, e: self.func(
    -601                "DATETIME", self.func("TIMESTAMP", e.this, e.args.get("zone")), "'UTC'"
    -602            ),
    -603            exp.GenerateSeries: rename_func("GENERATE_ARRAY"),
    -604            exp.GroupConcat: rename_func("STRING_AGG"),
    -605            exp.Hex: rename_func("TO_HEX"),
    -606            exp.If: if_sql(false_value="NULL"),
    -607            exp.ILike: no_ilike_sql,
    -608            exp.IntDiv: rename_func("DIV"),
    -609            exp.JSONFormat: rename_func("TO_JSON_STRING"),
    -610            exp.Max: max_or_greatest,
    -611            exp.MD5: lambda self, e: self.func("TO_HEX", self.func("MD5", e.this)),
    -612            exp.MD5Digest: rename_func("MD5"),
    -613            exp.Min: min_or_least,
    -614            exp.PartitionedByProperty: lambda self, e: f"PARTITION BY {self.sql(e, 'this')}",
    -615            exp.RegexpExtract: lambda self, e: self.func(
    -616                "REGEXP_EXTRACT",
    -617                e.this,
    -618                e.expression,
    -619                e.args.get("position"),
    -620                e.args.get("occurrence"),
    -621            ),
    -622            exp.RegexpReplace: regexp_replace_sql,
    -623            exp.RegexpLike: rename_func("REGEXP_CONTAINS"),
    -624            exp.ReturnsProperty: _returnsproperty_sql,
    -625            exp.Rollback: lambda *_: "ROLLBACK TRANSACTION",
    -626            exp.Select: transforms.preprocess(
    -627                [
    -628                    transforms.explode_to_unnest(),
    -629                    _unqualify_unnest,
    -630                    transforms.eliminate_distinct_on,
    -631                    _alias_ordered_group,
    -632                    transforms.eliminate_semi_and_anti_joins,
    -633                ]
    -634            ),
    -635            exp.SHA2: lambda self, e: self.func(
    -636                "SHA256" if e.text("length") == "256" else "SHA512", e.this
    -637            ),
    -638            exp.StabilityProperty: lambda self, e: (
    -639                "DETERMINISTIC" if e.name == "IMMUTABLE" else "NOT DETERMINISTIC"
    -640            ),
    -641            exp.StrToDate: lambda self, e: self.func("PARSE_DATE", self.format_time(e), e.this),
    -642            exp.StrToTime: lambda self, e: self.func(
    -643                "PARSE_TIMESTAMP", self.format_time(e), e.this, e.args.get("zone")
    +551            return bracket
    +552
    +553    class Generator(generator.Generator):
    +554        EXPLICIT_UNION = True
    +555        INTERVAL_ALLOWS_PLURAL_FORM = False
    +556        JOIN_HINTS = False
    +557        QUERY_HINTS = False
    +558        TABLE_HINTS = False
    +559        LIMIT_FETCH = "LIMIT"
    +560        RENAME_TABLE_WITH_DB = False
    +561        NVL2_SUPPORTED = False
    +562        UNNEST_WITH_ORDINALITY = False
    +563        COLLATE_IS_FUNC = True
    +564        LIMIT_ONLY_LITERALS = True
    +565        SUPPORTS_TABLE_ALIAS_COLUMNS = False
    +566        UNPIVOT_ALIASES_ARE_IDENTIFIERS = False
    +567        JSON_KEY_VALUE_PAIR_SEP = ","
    +568        NULL_ORDERING_SUPPORTED = False
    +569        IGNORE_NULLS_IN_FUNC = True
    +570        JSON_PATH_SINGLE_QUOTE_ESCAPE = True
    +571        CAN_IMPLEMENT_ARRAY_ANY = True
    +572        SUPPORTS_TO_NUMBER = False
    +573        NAMED_PLACEHOLDER_TOKEN = "@"
    +574
    +575        TRANSFORMS = {
    +576            **generator.Generator.TRANSFORMS,
    +577            exp.ApproxDistinct: rename_func("APPROX_COUNT_DISTINCT"),
    +578            exp.ArgMax: arg_max_or_min_no_count("MAX_BY"),
    +579            exp.ArgMin: arg_max_or_min_no_count("MIN_BY"),
    +580            exp.ArrayContains: _array_contains_sql,
    +581            exp.ArrayFilter: filter_array_using_unnest,
    +582            exp.ArraySize: rename_func("ARRAY_LENGTH"),
    +583            exp.Cast: transforms.preprocess([transforms.remove_precision_parameterized_types]),
    +584            exp.CollateProperty: lambda self, e: (
    +585                f"DEFAULT COLLATE {self.sql(e, 'this')}"
    +586                if e.args.get("default")
    +587                else f"COLLATE {self.sql(e, 'this')}"
    +588            ),
    +589            exp.Commit: lambda *_: "COMMIT TRANSACTION",
    +590            exp.CountIf: rename_func("COUNTIF"),
    +591            exp.Create: _create_sql,
    +592            exp.CTE: transforms.preprocess([_pushdown_cte_column_names]),
    +593            exp.DateAdd: date_add_interval_sql("DATE", "ADD"),
    +594            exp.DateDiff: lambda self, e: self.func(
    +595                "DATE_DIFF", e.this, e.expression, unit_to_var(e)
    +596            ),
    +597            exp.DateFromParts: rename_func("DATE"),
    +598            exp.DateStrToDate: datestrtodate_sql,
    +599            exp.DateSub: date_add_interval_sql("DATE", "SUB"),
    +600            exp.DatetimeAdd: date_add_interval_sql("DATETIME", "ADD"),
    +601            exp.DatetimeSub: date_add_interval_sql("DATETIME", "SUB"),
    +602            exp.DateTrunc: lambda self, e: self.func("DATE_TRUNC", e.this, e.text("unit")),
    +603            exp.FromTimeZone: lambda self, e: self.func(
    +604                "DATETIME", self.func("TIMESTAMP", e.this, e.args.get("zone")), "'UTC'"
    +605            ),
    +606            exp.GenerateSeries: rename_func("GENERATE_ARRAY"),
    +607            exp.GroupConcat: rename_func("STRING_AGG"),
    +608            exp.Hex: rename_func("TO_HEX"),
    +609            exp.If: if_sql(false_value="NULL"),
    +610            exp.ILike: no_ilike_sql,
    +611            exp.IntDiv: rename_func("DIV"),
    +612            exp.JSONFormat: rename_func("TO_JSON_STRING"),
    +613            exp.Max: max_or_greatest,
    +614            exp.Mod: rename_func("MOD"),
    +615            exp.MD5: lambda self, e: self.func("TO_HEX", self.func("MD5", e.this)),
    +616            exp.MD5Digest: rename_func("MD5"),
    +617            exp.Min: min_or_least,
    +618            exp.PartitionedByProperty: lambda self, e: f"PARTITION BY {self.sql(e, 'this')}",
    +619            exp.RegexpExtract: lambda self, e: self.func(
    +620                "REGEXP_EXTRACT",
    +621                e.this,
    +622                e.expression,
    +623                e.args.get("position"),
    +624                e.args.get("occurrence"),
    +625            ),
    +626            exp.RegexpReplace: regexp_replace_sql,
    +627            exp.RegexpLike: rename_func("REGEXP_CONTAINS"),
    +628            exp.ReturnsProperty: _returnsproperty_sql,
    +629            exp.Rollback: lambda *_: "ROLLBACK TRANSACTION",
    +630            exp.Select: transforms.preprocess(
    +631                [
    +632                    transforms.explode_to_unnest(),
    +633                    _unqualify_unnest,
    +634                    transforms.eliminate_distinct_on,
    +635                    _alias_ordered_group,
    +636                    transforms.eliminate_semi_and_anti_joins,
    +637                ]
    +638            ),
    +639            exp.SHA2: lambda self, e: self.func(
    +640                "SHA256" if e.text("length") == "256" else "SHA512", e.this
    +641            ),
    +642            exp.StabilityProperty: lambda self, e: (
    +643                "DETERMINISTIC" if e.name == "IMMUTABLE" else "NOT DETERMINISTIC"
     644            ),
    -645            exp.TimeAdd: date_add_interval_sql("TIME", "ADD"),
    -646            exp.TimeFromParts: rename_func("TIME"),
    -647            exp.TimeSub: date_add_interval_sql("TIME", "SUB"),
    -648            exp.TimestampAdd: date_add_interval_sql("TIMESTAMP", "ADD"),
    -649            exp.TimestampDiff: rename_func("TIMESTAMP_DIFF"),
    -650            exp.TimestampSub: date_add_interval_sql("TIMESTAMP", "SUB"),
    -651            exp.TimeStrToTime: timestrtotime_sql,
    -652            exp.Transaction: lambda *_: "BEGIN TRANSACTION",
    -653            exp.Trim: lambda self, e: self.func("TRIM", e.this, e.expression),
    -654            exp.TsOrDsAdd: _ts_or_ds_add_sql,
    -655            exp.TsOrDsDiff: _ts_or_ds_diff_sql,
    -656            exp.TsOrDsToTime: rename_func("TIME"),
    -657            exp.Unhex: rename_func("FROM_HEX"),
    -658            exp.UnixDate: rename_func("UNIX_DATE"),
    -659            exp.UnixToTime: _unix_to_time_sql,
    -660            exp.Values: _derived_table_values_to_unnest,
    -661            exp.VariancePop: rename_func("VAR_POP"),
    -662        }
    -663
    -664        SUPPORTED_JSON_PATH_PARTS = {
    -665            exp.JSONPathKey,
    -666            exp.JSONPathRoot,
    -667            exp.JSONPathSubscript,
    -668        }
    -669
    -670        TYPE_MAPPING = {
    -671            **generator.Generator.TYPE_MAPPING,
    -672            exp.DataType.Type.BIGDECIMAL: "BIGNUMERIC",
    -673            exp.DataType.Type.BIGINT: "INT64",
    -674            exp.DataType.Type.BINARY: "BYTES",
    -675            exp.DataType.Type.BOOLEAN: "BOOL",
    -676            exp.DataType.Type.CHAR: "STRING",
    -677            exp.DataType.Type.DECIMAL: "NUMERIC",
    -678            exp.DataType.Type.DOUBLE: "FLOAT64",
    -679            exp.DataType.Type.FLOAT: "FLOAT64",
    -680            exp.DataType.Type.INT: "INT64",
    -681            exp.DataType.Type.NCHAR: "STRING",
    -682            exp.DataType.Type.NVARCHAR: "STRING",
    -683            exp.DataType.Type.SMALLINT: "INT64",
    -684            exp.DataType.Type.TEXT: "STRING",
    -685            exp.DataType.Type.TIMESTAMP: "DATETIME",
    -686            exp.DataType.Type.TIMESTAMPTZ: "TIMESTAMP",
    -687            exp.DataType.Type.TIMESTAMPLTZ: "TIMESTAMP",
    -688            exp.DataType.Type.TINYINT: "INT64",
    -689            exp.DataType.Type.VARBINARY: "BYTES",
    -690            exp.DataType.Type.VARCHAR: "STRING",
    -691            exp.DataType.Type.VARIANT: "ANY TYPE",
    -692        }
    -693
    -694        PROPERTIES_LOCATION = {
    -695            **generator.Generator.PROPERTIES_LOCATION,
    -696            exp.PartitionedByProperty: exp.Properties.Location.POST_SCHEMA,
    -697            exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED,
    -698        }
    -699
    -700        # from: https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#reserved_keywords
    -701        RESERVED_KEYWORDS = {
    -702            *generator.Generator.RESERVED_KEYWORDS,
    -703            "all",
    -704            "and",
    -705            "any",
    -706            "array",
    -707            "as",
    -708            "asc",
    -709            "assert_rows_modified",
    -710            "at",
    -711            "between",
    -712            "by",
    -713            "case",
    -714            "cast",
    -715            "collate",
    -716            "contains",
    -717            "create",
    -718            "cross",
    -719            "cube",
    -720            "current",
    -721            "default",
    -722            "define",
    -723            "desc",
    -724            "distinct",
    -725            "else",
    -726            "end",
    -727            "enum",
    -728            "escape",
    -729            "except",
    -730            "exclude",
    -731            "exists",
    -732            "extract",
    -733            "false",
    -734            "fetch",
    -735            "following",
    -736            "for",
    -737            "from",
    -738            "full",
    -739            "group",
    -740            "grouping",
    -741            "groups",
    -742            "hash",
    -743            "having",
    -744            "if",
    -745            "ignore",
    -746            "in",
    -747            "inner",
    -748            "intersect",
    -749            "interval",
    -750            "into",
    -751            "is",
    -752            "join",
    -753            "lateral",
    -754            "left",
    -755            "like",
    -756            "limit",
    -757            "lookup",
    -758            "merge",
    -759            "natural",
    -760            "new",
    -761            "no",
    -762            "not",
    -763            "null",
    -764            "nulls",
    -765            "of",
    -766            "on",
    -767            "or",
    -768            "order",
    -769            "outer",
    -770            "over",
    -771            "partition",
    -772            "preceding",
    -773            "proto",
    -774            "qualify",
    -775            "range",
    -776            "recursive",
    -777            "respect",
    -778            "right",
    -779            "rollup",
    -780            "rows",
    -781            "select",
    -782            "set",
    -783            "some",
    -784            "struct",
    -785            "tablesample",
    -786            "then",
    -787            "to",
    -788            "treat",
    -789            "true",
    -790            "unbounded",
    -791            "union",
    -792            "unnest",
    -793            "using",
    -794            "when",
    -795            "where",
    -796            "window",
    -797            "with",
    -798            "within",
    -799        }
    -800
    -801        def table_parts(self, expression: exp.Table) -> str:
    -802            # Depending on the context, `x.y` may not resolve to the same data source as `x`.`y`, so
    -803            # we need to make sure the correct quoting is used in each case.
    -804            #
    -805            # For example, if there is a CTE x that clashes with a schema name, then the former will
    -806            # return the table y in that schema, whereas the latter will return the CTE's y column:
    -807            #
    -808            # - WITH x AS (SELECT [1, 2] AS y) SELECT * FROM x, `x.y`   -> cross join
    -809            # - WITH x AS (SELECT [1, 2] AS y) SELECT * FROM x, `x`.`y` -> implicit unnest
    -810            if expression.meta.get("quoted_table"):
    -811                table_parts = ".".join(p.name for p in expression.parts)
    -812                return self.sql(exp.Identifier(this=table_parts, quoted=True))
    -813
    -814            return super().table_parts(expression)
    -815
    -816        def timetostr_sql(self, expression: exp.TimeToStr) -> str:
    -817            this = expression.this if isinstance(expression.this, exp.TsOrDsToDate) else expression
    -818            return self.func("FORMAT_DATE", self.format_time(expression), this.this)
    +645            exp.StrToDate: lambda self, e: self.func("PARSE_DATE", self.format_time(e), e.this),
    +646            exp.StrToTime: lambda self, e: self.func(
    +647                "PARSE_TIMESTAMP", self.format_time(e), e.this, e.args.get("zone")
    +648            ),
    +649            exp.TimeAdd: date_add_interval_sql("TIME", "ADD"),
    +650            exp.TimeFromParts: rename_func("TIME"),
    +651            exp.TimeSub: date_add_interval_sql("TIME", "SUB"),
    +652            exp.TimestampAdd: date_add_interval_sql("TIMESTAMP", "ADD"),
    +653            exp.TimestampDiff: rename_func("TIMESTAMP_DIFF"),
    +654            exp.TimestampSub: date_add_interval_sql("TIMESTAMP", "SUB"),
    +655            exp.TimeStrToTime: timestrtotime_sql,
    +656            exp.Transaction: lambda *_: "BEGIN TRANSACTION",
    +657            exp.Trim: lambda self, e: self.func("TRIM", e.this, e.expression),
    +658            exp.TsOrDsAdd: _ts_or_ds_add_sql,
    +659            exp.TsOrDsDiff: _ts_or_ds_diff_sql,
    +660            exp.TsOrDsToTime: rename_func("TIME"),
    +661            exp.Unhex: rename_func("FROM_HEX"),
    +662            exp.UnixDate: rename_func("UNIX_DATE"),
    +663            exp.UnixToTime: _unix_to_time_sql,
    +664            exp.Values: _derived_table_values_to_unnest,
    +665            exp.VariancePop: rename_func("VAR_POP"),
    +666        }
    +667
    +668        SUPPORTED_JSON_PATH_PARTS = {
    +669            exp.JSONPathKey,
    +670            exp.JSONPathRoot,
    +671            exp.JSONPathSubscript,
    +672        }
    +673
    +674        TYPE_MAPPING = {
    +675            **generator.Generator.TYPE_MAPPING,
    +676            exp.DataType.Type.BIGDECIMAL: "BIGNUMERIC",
    +677            exp.DataType.Type.BIGINT: "INT64",
    +678            exp.DataType.Type.BINARY: "BYTES",
    +679            exp.DataType.Type.BOOLEAN: "BOOL",
    +680            exp.DataType.Type.CHAR: "STRING",
    +681            exp.DataType.Type.DECIMAL: "NUMERIC",
    +682            exp.DataType.Type.DOUBLE: "FLOAT64",
    +683            exp.DataType.Type.FLOAT: "FLOAT64",
    +684            exp.DataType.Type.INT: "INT64",
    +685            exp.DataType.Type.NCHAR: "STRING",
    +686            exp.DataType.Type.NVARCHAR: "STRING",
    +687            exp.DataType.Type.SMALLINT: "INT64",
    +688            exp.DataType.Type.TEXT: "STRING",
    +689            exp.DataType.Type.TIMESTAMP: "DATETIME",
    +690            exp.DataType.Type.TIMESTAMPTZ: "TIMESTAMP",
    +691            exp.DataType.Type.TIMESTAMPLTZ: "TIMESTAMP",
    +692            exp.DataType.Type.TINYINT: "INT64",
    +693            exp.DataType.Type.VARBINARY: "BYTES",
    +694            exp.DataType.Type.VARCHAR: "STRING",
    +695            exp.DataType.Type.VARIANT: "ANY TYPE",
    +696        }
    +697
    +698        PROPERTIES_LOCATION = {
    +699            **generator.Generator.PROPERTIES_LOCATION,
    +700            exp.PartitionedByProperty: exp.Properties.Location.POST_SCHEMA,
    +701            exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED,
    +702        }
    +703
    +704        # from: https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#reserved_keywords
    +705        RESERVED_KEYWORDS = {
    +706            *generator.Generator.RESERVED_KEYWORDS,
    +707            "all",
    +708            "and",
    +709            "any",
    +710            "array",
    +711            "as",
    +712            "asc",
    +713            "assert_rows_modified",
    +714            "at",
    +715            "between",
    +716            "by",
    +717            "case",
    +718            "cast",
    +719            "collate",
    +720            "contains",
    +721            "create",
    +722            "cross",
    +723            "cube",
    +724            "current",
    +725            "default",
    +726            "define",
    +727            "desc",
    +728            "distinct",
    +729            "else",
    +730            "end",
    +731            "enum",
    +732            "escape",
    +733            "except",
    +734            "exclude",
    +735            "exists",
    +736            "extract",
    +737            "false",
    +738            "fetch",
    +739            "following",
    +740            "for",
    +741            "from",
    +742            "full",
    +743            "group",
    +744            "grouping",
    +745            "groups",
    +746            "hash",
    +747            "having",
    +748            "if",
    +749            "ignore",
    +750            "in",
    +751            "inner",
    +752            "intersect",
    +753            "interval",
    +754            "into",
    +755            "is",
    +756            "join",
    +757            "lateral",
    +758            "left",
    +759            "like",
    +760            "limit",
    +761            "lookup",
    +762            "merge",
    +763            "natural",
    +764            "new",
    +765            "no",
    +766            "not",
    +767            "null",
    +768            "nulls",
    +769            "of",
    +770            "on",
    +771            "or",
    +772            "order",
    +773            "outer",
    +774            "over",
    +775            "partition",
    +776            "preceding",
    +777            "proto",
    +778            "qualify",
    +779            "range",
    +780            "recursive",
    +781            "respect",
    +782            "right",
    +783            "rollup",
    +784            "rows",
    +785            "select",
    +786            "set",
    +787            "some",
    +788            "struct",
    +789            "tablesample",
    +790            "then",
    +791            "to",
    +792            "treat",
    +793            "true",
    +794            "unbounded",
    +795            "union",
    +796            "unnest",
    +797            "using",
    +798            "when",
    +799            "where",
    +800            "window",
    +801            "with",
    +802            "within",
    +803        }
    +804
    +805        def table_parts(self, expression: exp.Table) -> str:
    +806            # Depending on the context, `x.y` may not resolve to the same data source as `x`.`y`, so
    +807            # we need to make sure the correct quoting is used in each case.
    +808            #
    +809            # For example, if there is a CTE x that clashes with a schema name, then the former will
    +810            # return the table y in that schema, whereas the latter will return the CTE's y column:
    +811            #
    +812            # - WITH x AS (SELECT [1, 2] AS y) SELECT * FROM x, `x.y`   -> cross join
    +813            # - WITH x AS (SELECT [1, 2] AS y) SELECT * FROM x, `x`.`y` -> implicit unnest
    +814            if expression.meta.get("quoted_table"):
    +815                table_parts = ".".join(p.name for p in expression.parts)
    +816                return self.sql(exp.Identifier(this=table_parts, quoted=True))
    +817
    +818            return super().table_parts(expression)
     819
    -820        def eq_sql(self, expression: exp.EQ) -> str:
    -821            # Operands of = cannot be NULL in BigQuery
    -822            if isinstance(expression.left, exp.Null) or isinstance(expression.right, exp.Null):
    -823                if not isinstance(expression.parent, exp.Update):
    -824                    return "NULL"
    -825
    -826            return self.binary(expression, "=")
    -827
    -828        def attimezone_sql(self, expression: exp.AtTimeZone) -> str:
    -829            parent = expression.parent
    -830
    -831            # BigQuery allows CAST(.. AS {STRING|TIMESTAMP} [FORMAT <fmt> [AT TIME ZONE <tz>]]).
    -832            # Only the TIMESTAMP one should use the below conversion, when AT TIME ZONE is included.
    -833            if not isinstance(parent, exp.Cast) or not parent.to.is_type("text"):
    -834                return self.func(
    -835                    "TIMESTAMP", self.func("DATETIME", expression.this, expression.args.get("zone"))
    -836                )
    -837
    -838            return super().attimezone_sql(expression)
    -839
    -840        def trycast_sql(self, expression: exp.TryCast) -> str:
    -841            return self.cast_sql(expression, safe_prefix="SAFE_")
    -842
    -843        def array_sql(self, expression: exp.Array) -> str:
    -844            first_arg = seq_get(expression.expressions, 0)
    -845            if isinstance(first_arg, exp.Query):
    -846                return f"ARRAY{self.wrap(self.sql(first_arg))}"
    -847
    -848            return inline_array_sql(self, expression)
    -849
    -850        def bracket_sql(self, expression: exp.Bracket) -> str:
    -851            this = self.sql(expression, "this")
    -852            expressions = expression.expressions
    +820        def timetostr_sql(self, expression: exp.TimeToStr) -> str:
    +821            this = expression.this if isinstance(expression.this, exp.TsOrDsToDate) else expression
    +822            return self.func("FORMAT_DATE", self.format_time(expression), this.this)
    +823
    +824        def eq_sql(self, expression: exp.EQ) -> str:
    +825            # Operands of = cannot be NULL in BigQuery
    +826            if isinstance(expression.left, exp.Null) or isinstance(expression.right, exp.Null):
    +827                if not isinstance(expression.parent, exp.Update):
    +828                    return "NULL"
    +829
    +830            return self.binary(expression, "=")
    +831
    +832        def attimezone_sql(self, expression: exp.AtTimeZone) -> str:
    +833            parent = expression.parent
    +834
    +835            # BigQuery allows CAST(.. AS {STRING|TIMESTAMP} [FORMAT <fmt> [AT TIME ZONE <tz>]]).
    +836            # Only the TIMESTAMP one should use the below conversion, when AT TIME ZONE is included.
    +837            if not isinstance(parent, exp.Cast) or not parent.to.is_type("text"):
    +838                return self.func(
    +839                    "TIMESTAMP", self.func("DATETIME", expression.this, expression.args.get("zone"))
    +840                )
    +841
    +842            return super().attimezone_sql(expression)
    +843
    +844        def trycast_sql(self, expression: exp.TryCast) -> str:
    +845            return self.cast_sql(expression, safe_prefix="SAFE_")
    +846
    +847        def array_sql(self, expression: exp.Array) -> str:
    +848            first_arg = seq_get(expression.expressions, 0)
    +849            if isinstance(first_arg, exp.Query):
    +850                return f"ARRAY{self.wrap(self.sql(first_arg))}"
    +851
    +852            return inline_array_sql(self, expression)
     853
    -854            if len(expressions) == 1:
    -855                arg = expressions[0]
    -856                if arg.type is None:
    -857                    from sqlglot.optimizer.annotate_types import annotate_types
    -858
    -859                    arg = annotate_types(arg)
    -860
    -861                if arg.type and arg.type.this in exp.DataType.TEXT_TYPES:
    -862                    # BQ doesn't support bracket syntax with string values
    -863                    return f"{this}.{arg.name}"
    +854        def bracket_sql(self, expression: exp.Bracket) -> str:
    +855            this = expression.this
    +856            expressions = expression.expressions
    +857
    +858            if len(expressions) == 1 and this and this.is_type(exp.DataType.Type.STRUCT):
    +859                arg = expressions[0]
    +860                if arg.type is None:
    +861                    from sqlglot.optimizer.annotate_types import annotate_types
    +862
    +863                    arg = annotate_types(arg)
     864
    -865            expressions_sql = ", ".join(self.sql(e) for e in expressions)
    -866            offset = expression.args.get("offset")
    -867
    -868            if offset == 0:
    -869                expressions_sql = f"OFFSET({expressions_sql})"
    -870            elif offset == 1:
    -871                expressions_sql = f"ORDINAL({expressions_sql})"
    -872            elif offset is not None:
    -873                self.unsupported(f"Unsupported array offset: {offset}")
    -874
    -875            if expression.args.get("safe"):
    -876                expressions_sql = f"SAFE_{expressions_sql}"
    -877
    -878            return f"{this}[{expressions_sql}]"
    -879
    -880        def in_unnest_op(self, expression: exp.Unnest) -> str:
    -881            return self.sql(expression)
    -882
    -883        def except_op(self, expression: exp.Except) -> str:
    -884            if not expression.args.get("distinct"):
    -885                self.unsupported("EXCEPT without DISTINCT is not supported in BigQuery")
    -886            return f"EXCEPT{' DISTINCT' if expression.args.get('distinct') else ' ALL'}"
    -887
    -888        def intersect_op(self, expression: exp.Intersect) -> str:
    -889            if not expression.args.get("distinct"):
    -890                self.unsupported("INTERSECT without DISTINCT is not supported in BigQuery")
    -891            return f"INTERSECT{' DISTINCT' if expression.args.get('distinct') else ' ALL'}"
    -892
    -893        def with_properties(self, properties: exp.Properties) -> str:
    -894            return self.properties(properties, prefix=self.seg("OPTIONS"))
    -895
    -896        def version_sql(self, expression: exp.Version) -> str:
    -897            if expression.name == "TIMESTAMP":
    -898                expression.set("this", "SYSTEM_TIME")
    -899            return super().version_sql(expression)
    +865                if arg.type and arg.type.this in exp.DataType.TEXT_TYPES:
    +866                    # BQ doesn't support bracket syntax with string values for structs
    +867                    return f"{self.sql(this)}.{arg.name}"
    +868
    +869            expressions_sql = self.expressions(expression, flat=True)
    +870            offset = expression.args.get("offset")
    +871
    +872            if offset == 0:
    +873                expressions_sql = f"OFFSET({expressions_sql})"
    +874            elif offset == 1:
    +875                expressions_sql = f"ORDINAL({expressions_sql})"
    +876            elif offset is not None:
    +877                self.unsupported(f"Unsupported array offset: {offset}")
    +878
    +879            if expression.args.get("safe"):
    +880                expressions_sql = f"SAFE_{expressions_sql}"
    +881
    +882            return f"{self.sql(this)}[{expressions_sql}]"
    +883
    +884        def in_unnest_op(self, expression: exp.Unnest) -> str:
    +885            return self.sql(expression)
    +886
    +887        def except_op(self, expression: exp.Except) -> str:
    +888            if not expression.args.get("distinct"):
    +889                self.unsupported("EXCEPT without DISTINCT is not supported in BigQuery")
    +890            return f"EXCEPT{' DISTINCT' if expression.args.get('distinct') else ' ALL'}"
    +891
    +892        def intersect_op(self, expression: exp.Intersect) -> str:
    +893            if not expression.args.get("distinct"):
    +894                self.unsupported("INTERSECT without DISTINCT is not supported in BigQuery")
    +895            return f"INTERSECT{' DISTINCT' if expression.args.get('distinct') else ' ALL'}"
    +896
    +897        def with_properties(self, properties: exp.Properties) -> str:
    +898            return self.properties(properties, prefix=self.seg("OPTIONS"))
    +899
    +900        def version_sql(self, expression: exp.Version) -> str:
    +901            if expression.name == "TIMESTAMP":
    +902                expression.set("this", "SYSTEM_TIME")
    +903            return super().version_sql(expression)
     
    @@ -2022,14 +2027,15 @@
    - LOG_BASE_FIRST = + LOG_BASE_FIRST: Optional[bool] = False
    -

    Whether the base comes first in the LOG function.

    +

    Whether the base comes first in the LOG function. +Possible values: True, False, None (two arguments are not supported by LOG)

    @@ -2083,20 +2089,6 @@
    -
    -
    -
    - ESCAPE_SEQUENCES: Dict[str, str] = -{'\\a': '\x07', '\\b': '\x08', '\\f': '\x0c', '\\n': '\n', '\\r': '\r', '\\t': '\t', '\\v': '\x0b'} - - -
    - - -

    Mapping of an unescaped escape sequence to the corresponding character.

    -
    - -
    @@ -2140,29 +2132,29 @@ For example, such columns may be excluded from SELECT * queries.

    -
    272    def normalize_identifier(self, expression: E) -> E:
    -273        if isinstance(expression, exp.Identifier):
    -274            parent = expression.parent
    -275            while isinstance(parent, exp.Dot):
    -276                parent = parent.parent
    -277
    -278            # In BigQuery, CTEs are case-insensitive, but UDF and table names are case-sensitive
    -279            # by default. The following check uses a heuristic to detect tables based on whether
    -280            # they are qualified. This should generally be correct, because tables in BigQuery
    -281            # must be qualified with at least a dataset, unless @@dataset_id is set.
    -282            case_sensitive = (
    -283                isinstance(parent, exp.UserDefinedFunction)
    -284                or (
    -285                    isinstance(parent, exp.Table)
    -286                    and parent.db
    -287                    and (parent.meta.get("quoted_table") or not parent.meta.get("maybe_column"))
    -288                )
    -289                or expression.meta.get("is_table")
    -290            )
    -291            if not case_sensitive:
    -292                expression.set("this", expression.this.lower())
    -293
    -294        return expression
    +            
    271    def normalize_identifier(self, expression: E) -> E:
    +272        if isinstance(expression, exp.Identifier):
    +273            parent = expression.parent
    +274            while isinstance(parent, exp.Dot):
    +275                parent = parent.parent
    +276
    +277            # In BigQuery, CTEs are case-insensitive, but UDF and table names are case-sensitive
    +278            # by default. The following check uses a heuristic to detect tables based on whether
    +279            # they are qualified. This should generally be correct, because tables in BigQuery
    +280            # must be qualified with at least a dataset, unless @@dataset_id is set.
    +281            case_sensitive = (
    +282                isinstance(parent, exp.UserDefinedFunction)
    +283                or (
    +284                    isinstance(parent, exp.Table)
    +285                    and parent.db
    +286                    and (parent.meta.get("quoted_table") or not parent.meta.get("maybe_column"))
    +287                )
    +288                or expression.meta.get("is_table")
    +289            )
    +290            if not case_sensitive:
    +291                expression.set("this", expression.this.lower())
    +292
    +293        return expression
     
    @@ -2185,6 +2177,22 @@ that it can analyze queries in the optimizer and successfully capture their sema
    +
    +
    +
    + UNESCAPED_SEQUENCES: Dict[str, str] = + + {'\\a': '\x07', '\\b': '\x08', '\\f': '\x0c', '\\n': '\n', '\\r': '\r', '\\t': '\t', '\\v': '\x0b', '\\\\': '\\'} + + +
    + + +

    Mapping of an escaped sequence (\n) to its unescaped version ( +).

    +
    + +
    @@ -2272,14 +2280,15 @@ that it can analyze queries in the optimizer and successfully capture their sema
    -
    +
    - INVERSE_ESCAPE_SEQUENCES: Dict[str, str] = -{'\x07': '\\a', '\x08': '\\b', '\x0c': '\\f', '\n': '\\n', '\r': '\\r', '\t': '\\t', '\x0b': '\\v'} + ESCAPED_SEQUENCES: Dict[str, str] = + + {'\x07': '\\a', '\x08': '\\b', '\x0c': '\\f', '\n': '\\n', '\r': '\\r', '\t': '\\t', '\x0b': '\\v', '\\': '\\\\'}
    - + @@ -2477,29 +2486,30 @@ that it can analyze queries in the optimizer and successfully capture their sema
    -
    296    class Tokenizer(tokens.Tokenizer):
    -297        QUOTES = ["'", '"', '"""', "'''"]
    -298        COMMENTS = ["--", "#", ("/*", "*/")]
    -299        IDENTIFIERS = ["`"]
    -300        STRING_ESCAPES = ["\\"]
    -301
    -302        HEX_STRINGS = [("0x", ""), ("0X", "")]
    -303
    -304        BYTE_STRINGS = [
    -305            (prefix + q, q) for q in t.cast(t.List[str], QUOTES) for prefix in ("b", "B")
    -306        ]
    -307
    -308        RAW_STRINGS = [
    -309            (prefix + q, q) for q in t.cast(t.List[str], QUOTES) for prefix in ("r", "R")
    -310        ]
    -311
    -312        KEYWORDS = {
    -313            **tokens.Tokenizer.KEYWORDS,
    -314            "ANY TYPE": TokenType.VARIANT,
    -315            "BEGIN": TokenType.COMMAND,
    -316            "BEGIN TRANSACTION": TokenType.BEGIN,
    -317            "BYTES": TokenType.BINARY,
    -318            "CURRENT_DATETIME": TokenType.CURRENT_DATETIME,
    +            
    295    class Tokenizer(tokens.Tokenizer):
    +296        QUOTES = ["'", '"', '"""', "'''"]
    +297        COMMENTS = ["--", "#", ("/*", "*/")]
    +298        IDENTIFIERS = ["`"]
    +299        STRING_ESCAPES = ["\\"]
    +300
    +301        HEX_STRINGS = [("0x", ""), ("0X", "")]
    +302
    +303        BYTE_STRINGS = [
    +304            (prefix + q, q) for q in t.cast(t.List[str], QUOTES) for prefix in ("b", "B")
    +305        ]
    +306
    +307        RAW_STRINGS = [
    +308            (prefix + q, q) for q in t.cast(t.List[str], QUOTES) for prefix in ("r", "R")
    +309        ]
    +310
    +311        KEYWORDS = {
    +312            **tokens.Tokenizer.KEYWORDS,
    +313            "ANY TYPE": TokenType.VARIANT,
    +314            "BEGIN": TokenType.COMMAND,
    +315            "BEGIN TRANSACTION": TokenType.BEGIN,
    +316            "BYTES": TokenType.BINARY,
    +317            "CURRENT_DATETIME": TokenType.CURRENT_DATETIME,
    +318            "DATETIME": TokenType.TIMESTAMP,
     319            "DECLARE": TokenType.COMMAND,
     320            "ELSEIF": TokenType.COMMAND,
     321            "EXCEPTION": TokenType.COMMAND,
    @@ -2607,7 +2617,7 @@ that it can analyze queries in the optimizer and successfully capture their sema
                                     
    KEYWORDS = - {'{%': <TokenType.BLOCK_START: 'BLOCK_START'>, '{%+': <TokenType.BLOCK_START: 'BLOCK_START'>, '{%-': <TokenType.BLOCK_START: 'BLOCK_START'>, '%}': <TokenType.BLOCK_END: 'BLOCK_END'>, '+%}': <TokenType.BLOCK_END: 'BLOCK_END'>, '-%}': <TokenType.BLOCK_END: 'BLOCK_END'>, '{{+': <TokenType.BLOCK_START: 'BLOCK_START'>, '{{-': <TokenType.BLOCK_START: 'BLOCK_START'>, '+}}': <TokenType.BLOCK_END: 'BLOCK_END'>, '-}}': <TokenType.BLOCK_END: 'BLOCK_END'>, '/*+': <TokenType.HINT: 'HINT'>, '==': <TokenType.EQ: 'EQ'>, '::': <TokenType.DCOLON: 'DCOLON'>, '||': <TokenType.DPIPE: 'DPIPE'>, '>=': <TokenType.GTE: 'GTE'>, '<=': <TokenType.LTE: 'LTE'>, '<>': <TokenType.NEQ: 'NEQ'>, '!=': <TokenType.NEQ: 'NEQ'>, ':=': <TokenType.COLON_EQ: 'COLON_EQ'>, '<=>': <TokenType.NULLSAFE_EQ: 'NULLSAFE_EQ'>, '->': <TokenType.ARROW: 'ARROW'>, '->>': <TokenType.DARROW: 'DARROW'>, '=>': <TokenType.FARROW: 'FARROW'>, '#>': <TokenType.HASH_ARROW: 'HASH_ARROW'>, '#>>': <TokenType.DHASH_ARROW: 'DHASH_ARROW'>, '<->': <TokenType.LR_ARROW: 'LR_ARROW'>, '&&': <TokenType.DAMP: 'DAMP'>, '??': <TokenType.DQMARK: 'DQMARK'>, 'ALL': <TokenType.ALL: 'ALL'>, 'ALWAYS': <TokenType.ALWAYS: 'ALWAYS'>, 'AND': <TokenType.AND: 'AND'>, 'ANTI': <TokenType.ANTI: 'ANTI'>, 'ANY': <TokenType.ANY: 'ANY'>, 'ASC': <TokenType.ASC: 'ASC'>, 'AS': <TokenType.ALIAS: 'ALIAS'>, 'ASOF': <TokenType.ASOF: 'ASOF'>, 'AUTOINCREMENT': <TokenType.AUTO_INCREMENT: 'AUTO_INCREMENT'>, 'AUTO_INCREMENT': <TokenType.AUTO_INCREMENT: 'AUTO_INCREMENT'>, 'BEGIN': <TokenType.COMMAND: 'COMMAND'>, 'BETWEEN': <TokenType.BETWEEN: 'BETWEEN'>, 'CACHE': <TokenType.CACHE: 'CACHE'>, 'UNCACHE': <TokenType.UNCACHE: 'UNCACHE'>, 'CASE': <TokenType.CASE: 'CASE'>, 'CHARACTER SET': <TokenType.CHARACTER_SET: 'CHARACTER_SET'>, 'CLUSTER BY': <TokenType.CLUSTER_BY: 'CLUSTER_BY'>, 'COLLATE': <TokenType.COLLATE: 'COLLATE'>, 'COLUMN': <TokenType.COLUMN: 'COLUMN'>, 'COMMIT': <TokenType.COMMIT: 'COMMIT'>, 'CONNECT BY': <TokenType.CONNECT_BY: 'CONNECT_BY'>, 'CONSTRAINT': <TokenType.CONSTRAINT: 'CONSTRAINT'>, 'CREATE': <TokenType.CREATE: 'CREATE'>, 'CROSS': <TokenType.CROSS: 'CROSS'>, 'CUBE': <TokenType.CUBE: 'CUBE'>, 'CURRENT_DATE': <TokenType.CURRENT_DATE: 'CURRENT_DATE'>, 'CURRENT_TIME': <TokenType.CURRENT_TIME: 'CURRENT_TIME'>, 'CURRENT_TIMESTAMP': <TokenType.CURRENT_TIMESTAMP: 'CURRENT_TIMESTAMP'>, 'CURRENT_USER': <TokenType.CURRENT_USER: 'CURRENT_USER'>, 'DATABASE': <TokenType.DATABASE: 'DATABASE'>, 'DEFAULT': <TokenType.DEFAULT: 'DEFAULT'>, 'DELETE': <TokenType.DELETE: 'DELETE'>, 'DESC': <TokenType.DESC: 'DESC'>, 'DESCRIBE': <TokenType.DESCRIBE: 'DESCRIBE'>, 'DISTINCT': <TokenType.DISTINCT: 'DISTINCT'>, 'DISTRIBUTE BY': <TokenType.DISTRIBUTE_BY: 'DISTRIBUTE_BY'>, 'DROP': <TokenType.DROP: 'DROP'>, 'ELSE': <TokenType.ELSE: 'ELSE'>, 'END': <TokenType.END: 'END'>, 'ENUM': <TokenType.ENUM: 'ENUM'>, 'ESCAPE': <TokenType.ESCAPE: 'ESCAPE'>, 'EXCEPT': <TokenType.EXCEPT: 'EXCEPT'>, 'EXECUTE': <TokenType.EXECUTE: 'EXECUTE'>, 'EXISTS': <TokenType.EXISTS: 'EXISTS'>, 'FALSE': <TokenType.FALSE: 'FALSE'>, 'FETCH': <TokenType.FETCH: 'FETCH'>, 'FILTER': <TokenType.FILTER: 'FILTER'>, 'FIRST': <TokenType.FIRST: 'FIRST'>, 'FULL': <TokenType.FULL: 'FULL'>, 'FUNCTION': <TokenType.FUNCTION: 'FUNCTION'>, 'FOR': <TokenType.FOR: 'FOR'>, 'FOREIGN KEY': <TokenType.FOREIGN_KEY: 'FOREIGN_KEY'>, 'FORMAT': <TokenType.FORMAT: 'FORMAT'>, 'FROM': <TokenType.FROM: 'FROM'>, 'GEOGRAPHY': <TokenType.GEOGRAPHY: 'GEOGRAPHY'>, 'GEOMETRY': <TokenType.GEOMETRY: 'GEOMETRY'>, 'GLOB': <TokenType.GLOB: 'GLOB'>, 'GROUP BY': <TokenType.GROUP_BY: 'GROUP_BY'>, 'GROUPING SETS': <TokenType.GROUPING_SETS: 'GROUPING_SETS'>, 'HAVING': <TokenType.HAVING: 'HAVING'>, 'ILIKE': <TokenType.ILIKE: 'ILIKE'>, 'IN': <TokenType.IN: 'IN'>, 'INDEX': <TokenType.INDEX: 'INDEX'>, 'INET': <TokenType.INET: 'INET'>, 'INNER': <TokenType.INNER: 'INNER'>, 'INSERT': <TokenType.INSERT: 'INSERT'>, 'INTERVAL': <TokenType.INTERVAL: 'INTERVAL'>, 'INTERSECT': <TokenType.INTERSECT: 'INTERSECT'>, 'INTO': <TokenType.INTO: 'INTO'>, 'IS': <TokenType.IS: 'IS'>, 'ISNULL': <TokenType.ISNULL: 'ISNULL'>, 'JOIN': <TokenType.JOIN: 'JOIN'>, 'KEEP': <TokenType.KEEP: 'KEEP'>, 'KILL': <TokenType.KILL: 'KILL'>, 'LATERAL': <TokenType.LATERAL: 'LATERAL'>, 'LEFT': <TokenType.LEFT: 'LEFT'>, 'LIKE': <TokenType.LIKE: 'LIKE'>, 'LIMIT': <TokenType.LIMIT: 'LIMIT'>, 'LOAD': <TokenType.LOAD: 'LOAD'>, 'LOCK': <TokenType.LOCK: 'LOCK'>, 'MERGE': <TokenType.MERGE: 'MERGE'>, 'NATURAL': <TokenType.NATURAL: 'NATURAL'>, 'NEXT': <TokenType.NEXT: 'NEXT'>, 'NOT': <TokenType.NOT: 'NOT'>, 'NOTNULL': <TokenType.NOTNULL: 'NOTNULL'>, 'NULL': <TokenType.NULL: 'NULL'>, 'OBJECT': <TokenType.OBJECT: 'OBJECT'>, 'OFFSET': <TokenType.OFFSET: 'OFFSET'>, 'ON': <TokenType.ON: 'ON'>, 'OR': <TokenType.OR: 'OR'>, 'XOR': <TokenType.XOR: 'XOR'>, 'ORDER BY': <TokenType.ORDER_BY: 'ORDER_BY'>, 'ORDINALITY': <TokenType.ORDINALITY: 'ORDINALITY'>, 'OUTER': <TokenType.OUTER: 'OUTER'>, 'OVER': <TokenType.OVER: 'OVER'>, 'OVERLAPS': <TokenType.OVERLAPS: 'OVERLAPS'>, 'OVERWRITE': <TokenType.OVERWRITE: 'OVERWRITE'>, 'PARTITION': <TokenType.PARTITION: 'PARTITION'>, 'PARTITION BY': <TokenType.PARTITION_BY: 'PARTITION_BY'>, 'PARTITIONED BY': <TokenType.PARTITION_BY: 'PARTITION_BY'>, 'PARTITIONED_BY': <TokenType.PARTITION_BY: 'PARTITION_BY'>, 'PERCENT': <TokenType.PERCENT: 'PERCENT'>, 'PIVOT': <TokenType.PIVOT: 'PIVOT'>, 'PRAGMA': <TokenType.PRAGMA: 'PRAGMA'>, 'PRIMARY KEY': <TokenType.PRIMARY_KEY: 'PRIMARY_KEY'>, 'PROCEDURE': <TokenType.PROCEDURE: 'PROCEDURE'>, 'QUALIFY': <TokenType.QUALIFY: 'QUALIFY'>, 'RANGE': <TokenType.RANGE: 'RANGE'>, 'RECURSIVE': <TokenType.RECURSIVE: 'RECURSIVE'>, 'REGEXP': <TokenType.RLIKE: 'RLIKE'>, 'REPLACE': <TokenType.REPLACE: 'REPLACE'>, 'RETURNING': <TokenType.RETURNING: 'RETURNING'>, 'REFERENCES': <TokenType.REFERENCES: 'REFERENCES'>, 'RIGHT': <TokenType.RIGHT: 'RIGHT'>, 'RLIKE': <TokenType.RLIKE: 'RLIKE'>, 'ROLLBACK': <TokenType.ROLLBACK: 'ROLLBACK'>, 'ROLLUP': <TokenType.ROLLUP: 'ROLLUP'>, 'ROW': <TokenType.ROW: 'ROW'>, 'ROWS': <TokenType.ROWS: 'ROWS'>, 'SCHEMA': <TokenType.SCHEMA: 'SCHEMA'>, 'SELECT': <TokenType.SELECT: 'SELECT'>, 'SEMI': <TokenType.SEMI: 'SEMI'>, 'SET': <TokenType.SET: 'SET'>, 'SETTINGS': <TokenType.SETTINGS: 'SETTINGS'>, 'SHOW': <TokenType.SHOW: 'SHOW'>, 'SIMILAR TO': <TokenType.SIMILAR_TO: 'SIMILAR_TO'>, 'SOME': <TokenType.SOME: 'SOME'>, 'SORT BY': <TokenType.SORT_BY: 'SORT_BY'>, 'START WITH': <TokenType.START_WITH: 'START_WITH'>, 'TABLE': <TokenType.TABLE: 'TABLE'>, 'TABLESAMPLE': <TokenType.TABLE_SAMPLE: 'TABLE_SAMPLE'>, 'TEMP': <TokenType.TEMPORARY: 'TEMPORARY'>, 'TEMPORARY': <TokenType.TEMPORARY: 'TEMPORARY'>, 'THEN': <TokenType.THEN: 'THEN'>, 'TRUE': <TokenType.TRUE: 'TRUE'>, 'TRUNCATE': <TokenType.TRUNCATE: 'TRUNCATE'>, 'UNION': <TokenType.UNION: 'UNION'>, 'UNKNOWN': <TokenType.UNKNOWN: 'UNKNOWN'>, 'UNNEST': <TokenType.UNNEST: 'UNNEST'>, 'UNPIVOT': <TokenType.UNPIVOT: 'UNPIVOT'>, 'UPDATE': <TokenType.UPDATE: 'UPDATE'>, 'USE': <TokenType.USE: 'USE'>, 'USING': <TokenType.USING: 'USING'>, 'UUID': <TokenType.UUID: 'UUID'>, 'VIEW': <TokenType.VIEW: 'VIEW'>, 'VOLATILE': <TokenType.VOLATILE: 'VOLATILE'>, 'WHEN': <TokenType.WHEN: 'WHEN'>, 'WHERE': <TokenType.WHERE: 'WHERE'>, 'WINDOW': <TokenType.WINDOW: 'WINDOW'>, 'WITH': <TokenType.WITH: 'WITH'>, 'APPLY': <TokenType.APPLY: 'APPLY'>, 'ARRAY': <TokenType.ARRAY: 'ARRAY'>, 'BIT': <TokenType.BIT: 'BIT'>, 'BOOL': <TokenType.BOOLEAN: 'BOOLEAN'>, 'BOOLEAN': <TokenType.BOOLEAN: 'BOOLEAN'>, 'BYTE': <TokenType.TINYINT: 'TINYINT'>, 'MEDIUMINT': <TokenType.MEDIUMINT: 'MEDIUMINT'>, 'INT1': <TokenType.TINYINT: 'TINYINT'>, 'TINYINT': <TokenType.TINYINT: 'TINYINT'>, 'INT16': <TokenType.SMALLINT: 'SMALLINT'>, 'SHORT': <TokenType.SMALLINT: 'SMALLINT'>, 'SMALLINT': <TokenType.SMALLINT: 'SMALLINT'>, 'INT128': <TokenType.INT128: 'INT128'>, 'HUGEINT': <TokenType.INT128: 'INT128'>, 'INT2': <TokenType.SMALLINT: 'SMALLINT'>, 'INTEGER': <TokenType.INT: 'INT'>, 'INT': <TokenType.INT: 'INT'>, 'INT4': <TokenType.INT: 'INT'>, 'INT32': <TokenType.INT: 'INT'>, 'INT64': <TokenType.BIGINT: 'BIGINT'>, 'LONG': <TokenType.BIGINT: 'BIGINT'>, 'BIGINT': <TokenType.BIGINT: 'BIGINT'>, 'INT8': <TokenType.TINYINT: 'TINYINT'>, 'DEC': <TokenType.DECIMAL: 'DECIMAL'>, 'DECIMAL': <TokenType.DECIMAL: 'DECIMAL'>, 'BIGDECIMAL': <TokenType.BIGDECIMAL: 'BIGDECIMAL'>, 'BIGNUMERIC': <TokenType.BIGDECIMAL: 'BIGDECIMAL'>, 'MAP': <TokenType.MAP: 'MAP'>, 'NULLABLE': <TokenType.NULLABLE: 'NULLABLE'>, 'NUMBER': <TokenType.DECIMAL: 'DECIMAL'>, 'NUMERIC': <TokenType.DECIMAL: 'DECIMAL'>, 'FIXED': <TokenType.DECIMAL: 'DECIMAL'>, 'REAL': <TokenType.FLOAT: 'FLOAT'>, 'FLOAT': <TokenType.FLOAT: 'FLOAT'>, 'FLOAT4': <TokenType.FLOAT: 'FLOAT'>, 'FLOAT8': <TokenType.DOUBLE: 'DOUBLE'>, 'DOUBLE': <TokenType.DOUBLE: 'DOUBLE'>, 'DOUBLE PRECISION': <TokenType.DOUBLE: 'DOUBLE'>, 'JSON': <TokenType.JSON: 'JSON'>, 'CHAR': <TokenType.CHAR: 'CHAR'>, 'CHARACTER': <TokenType.CHAR: 'CHAR'>, 'NCHAR': <TokenType.NCHAR: 'NCHAR'>, 'VARCHAR': <TokenType.VARCHAR: 'VARCHAR'>, 'VARCHAR2': <TokenType.VARCHAR: 'VARCHAR'>, 'NVARCHAR': <TokenType.NVARCHAR: 'NVARCHAR'>, 'NVARCHAR2': <TokenType.NVARCHAR: 'NVARCHAR'>, 'BPCHAR': <TokenType.BPCHAR: 'BPCHAR'>, 'STR': <TokenType.TEXT: 'TEXT'>, 'STRING': <TokenType.TEXT: 'TEXT'>, 'TEXT': <TokenType.TEXT: 'TEXT'>, 'LONGTEXT': <TokenType.LONGTEXT: 'LONGTEXT'>, 'MEDIUMTEXT': <TokenType.MEDIUMTEXT: 'MEDIUMTEXT'>, 'TINYTEXT': <TokenType.TINYTEXT: 'TINYTEXT'>, 'CLOB': <TokenType.TEXT: 'TEXT'>, 'LONGVARCHAR': <TokenType.TEXT: 'TEXT'>, 'BINARY': <TokenType.BINARY: 'BINARY'>, 'BLOB': <TokenType.VARBINARY: 'VARBINARY'>, 'LONGBLOB': <TokenType.LONGBLOB: 'LONGBLOB'>, 'MEDIUMBLOB': <TokenType.MEDIUMBLOB: 'MEDIUMBLOB'>, 'TINYBLOB': <TokenType.TINYBLOB: 'TINYBLOB'>, 'BYTEA': <TokenType.VARBINARY: 'VARBINARY'>, 'VARBINARY': <TokenType.VARBINARY: 'VARBINARY'>, 'TIME': <TokenType.TIME: 'TIME'>, 'TIMETZ': <TokenType.TIMETZ: 'TIMETZ'>, 'TIMESTAMP': <TokenType.TIMESTAMPTZ: 'TIMESTAMPTZ'>, 'TIMESTAMPTZ': <TokenType.TIMESTAMPTZ: 'TIMESTAMPTZ'>, 'TIMESTAMPLTZ': <TokenType.TIMESTAMPLTZ: 'TIMESTAMPLTZ'>, 'DATE': <TokenType.DATE: 'DATE'>, 'DATETIME': <TokenType.DATETIME: 'DATETIME'>, 'INT4RANGE': <TokenType.INT4RANGE: 'INT4RANGE'>, 'INT4MULTIRANGE': <TokenType.INT4MULTIRANGE: 'INT4MULTIRANGE'>, 'INT8RANGE': <TokenType.INT8RANGE: 'INT8RANGE'>, 'INT8MULTIRANGE': <TokenType.INT8MULTIRANGE: 'INT8MULTIRANGE'>, 'NUMRANGE': <TokenType.NUMRANGE: 'NUMRANGE'>, 'NUMMULTIRANGE': <TokenType.NUMMULTIRANGE: 'NUMMULTIRANGE'>, 'TSRANGE': <TokenType.TSRANGE: 'TSRANGE'>, 'TSMULTIRANGE': <TokenType.TSMULTIRANGE: 'TSMULTIRANGE'>, 'TSTZRANGE': <TokenType.TSTZRANGE: 'TSTZRANGE'>, 'TSTZMULTIRANGE': <TokenType.TSTZMULTIRANGE: 'TSTZMULTIRANGE'>, 'DATERANGE': <TokenType.DATERANGE: 'DATERANGE'>, 'DATEMULTIRANGE': <TokenType.DATEMULTIRANGE: 'DATEMULTIRANGE'>, 'UNIQUE': <TokenType.UNIQUE: 'UNIQUE'>, 'STRUCT': <TokenType.STRUCT: 'STRUCT'>, 'VARIANT': <TokenType.VARIANT: 'VARIANT'>, 'ALTER': <TokenType.ALTER: 'ALTER'>, 'ANALYZE': <TokenType.COMMAND: 'COMMAND'>, 'CALL': <TokenType.COMMAND: 'COMMAND'>, 'COMMENT': <TokenType.COMMENT: 'COMMENT'>, 'COPY': <TokenType.COMMAND: 'COMMAND'>, 'EXPLAIN': <TokenType.COMMAND: 'COMMAND'>, 'GRANT': <TokenType.COMMAND: 'COMMAND'>, 'OPTIMIZE': <TokenType.COMMAND: 'COMMAND'>, 'PREPARE': <TokenType.COMMAND: 'COMMAND'>, 'VACUUM': <TokenType.COMMAND: 'COMMAND'>, 'USER-DEFINED': <TokenType.USERDEFINED: 'USERDEFINED'>, 'FOR VERSION': <TokenType.VERSION_SNAPSHOT: 'VERSION_SNAPSHOT'>, 'FOR TIMESTAMP': <TokenType.TIMESTAMP_SNAPSHOT: 'TIMESTAMP_SNAPSHOT'>, 'ANY TYPE': <TokenType.VARIANT: 'VARIANT'>, 'BEGIN TRANSACTION': <TokenType.BEGIN: 'BEGIN'>, 'BYTES': <TokenType.BINARY: 'BINARY'>, 'CURRENT_DATETIME': <TokenType.CURRENT_DATETIME: 'CURRENT_DATETIME'>, 'DECLARE': <TokenType.COMMAND: 'COMMAND'>, 'ELSEIF': <TokenType.COMMAND: 'COMMAND'>, 'EXCEPTION': <TokenType.COMMAND: 'COMMAND'>, 'FLOAT64': <TokenType.DOUBLE: 'DOUBLE'>, 'FOR SYSTEM_TIME': <TokenType.TIMESTAMP_SNAPSHOT: 'TIMESTAMP_SNAPSHOT'>, 'MODEL': <TokenType.MODEL: 'MODEL'>, 'NOT DETERMINISTIC': <TokenType.VOLATILE: 'VOLATILE'>, 'RECORD': <TokenType.STRUCT: 'STRUCT'>} + {'{%': <TokenType.BLOCK_START: 'BLOCK_START'>, '{%+': <TokenType.BLOCK_START: 'BLOCK_START'>, '{%-': <TokenType.BLOCK_START: 'BLOCK_START'>, '%}': <TokenType.BLOCK_END: 'BLOCK_END'>, '+%}': <TokenType.BLOCK_END: 'BLOCK_END'>, '-%}': <TokenType.BLOCK_END: 'BLOCK_END'>, '{{+': <TokenType.BLOCK_START: 'BLOCK_START'>, '{{-': <TokenType.BLOCK_START: 'BLOCK_START'>, '+}}': <TokenType.BLOCK_END: 'BLOCK_END'>, '-}}': <TokenType.BLOCK_END: 'BLOCK_END'>, '/*+': <TokenType.HINT: 'HINT'>, '==': <TokenType.EQ: 'EQ'>, '::': <TokenType.DCOLON: 'DCOLON'>, '||': <TokenType.DPIPE: 'DPIPE'>, '>=': <TokenType.GTE: 'GTE'>, '<=': <TokenType.LTE: 'LTE'>, '<>': <TokenType.NEQ: 'NEQ'>, '!=': <TokenType.NEQ: 'NEQ'>, ':=': <TokenType.COLON_EQ: 'COLON_EQ'>, '<=>': <TokenType.NULLSAFE_EQ: 'NULLSAFE_EQ'>, '->': <TokenType.ARROW: 'ARROW'>, '->>': <TokenType.DARROW: 'DARROW'>, '=>': <TokenType.FARROW: 'FARROW'>, '#>': <TokenType.HASH_ARROW: 'HASH_ARROW'>, '#>>': <TokenType.DHASH_ARROW: 'DHASH_ARROW'>, '<->': <TokenType.LR_ARROW: 'LR_ARROW'>, '&&': <TokenType.DAMP: 'DAMP'>, '??': <TokenType.DQMARK: 'DQMARK'>, 'ALL': <TokenType.ALL: 'ALL'>, 'ALWAYS': <TokenType.ALWAYS: 'ALWAYS'>, 'AND': <TokenType.AND: 'AND'>, 'ANTI': <TokenType.ANTI: 'ANTI'>, 'ANY': <TokenType.ANY: 'ANY'>, 'ASC': <TokenType.ASC: 'ASC'>, 'AS': <TokenType.ALIAS: 'ALIAS'>, 'ASOF': <TokenType.ASOF: 'ASOF'>, 'AUTOINCREMENT': <TokenType.AUTO_INCREMENT: 'AUTO_INCREMENT'>, 'AUTO_INCREMENT': <TokenType.AUTO_INCREMENT: 'AUTO_INCREMENT'>, 'BEGIN': <TokenType.COMMAND: 'COMMAND'>, 'BETWEEN': <TokenType.BETWEEN: 'BETWEEN'>, 'CACHE': <TokenType.CACHE: 'CACHE'>, 'UNCACHE': <TokenType.UNCACHE: 'UNCACHE'>, 'CASE': <TokenType.CASE: 'CASE'>, 'CHARACTER SET': <TokenType.CHARACTER_SET: 'CHARACTER_SET'>, 'CLUSTER BY': <TokenType.CLUSTER_BY: 'CLUSTER_BY'>, 'COLLATE': <TokenType.COLLATE: 'COLLATE'>, 'COLUMN': <TokenType.COLUMN: 'COLUMN'>, 'COMMIT': <TokenType.COMMIT: 'COMMIT'>, 'CONNECT BY': <TokenType.CONNECT_BY: 'CONNECT_BY'>, 'CONSTRAINT': <TokenType.CONSTRAINT: 'CONSTRAINT'>, 'CREATE': <TokenType.CREATE: 'CREATE'>, 'CROSS': <TokenType.CROSS: 'CROSS'>, 'CUBE': <TokenType.CUBE: 'CUBE'>, 'CURRENT_DATE': <TokenType.CURRENT_DATE: 'CURRENT_DATE'>, 'CURRENT_TIME': <TokenType.CURRENT_TIME: 'CURRENT_TIME'>, 'CURRENT_TIMESTAMP': <TokenType.CURRENT_TIMESTAMP: 'CURRENT_TIMESTAMP'>, 'CURRENT_USER': <TokenType.CURRENT_USER: 'CURRENT_USER'>, 'DATABASE': <TokenType.DATABASE: 'DATABASE'>, 'DEFAULT': <TokenType.DEFAULT: 'DEFAULT'>, 'DELETE': <TokenType.DELETE: 'DELETE'>, 'DESC': <TokenType.DESC: 'DESC'>, 'DESCRIBE': <TokenType.DESCRIBE: 'DESCRIBE'>, 'DISTINCT': <TokenType.DISTINCT: 'DISTINCT'>, 'DISTRIBUTE BY': <TokenType.DISTRIBUTE_BY: 'DISTRIBUTE_BY'>, 'DROP': <TokenType.DROP: 'DROP'>, 'ELSE': <TokenType.ELSE: 'ELSE'>, 'END': <TokenType.END: 'END'>, 'ENUM': <TokenType.ENUM: 'ENUM'>, 'ESCAPE': <TokenType.ESCAPE: 'ESCAPE'>, 'EXCEPT': <TokenType.EXCEPT: 'EXCEPT'>, 'EXECUTE': <TokenType.EXECUTE: 'EXECUTE'>, 'EXISTS': <TokenType.EXISTS: 'EXISTS'>, 'FALSE': <TokenType.FALSE: 'FALSE'>, 'FETCH': <TokenType.FETCH: 'FETCH'>, 'FILTER': <TokenType.FILTER: 'FILTER'>, 'FIRST': <TokenType.FIRST: 'FIRST'>, 'FULL': <TokenType.FULL: 'FULL'>, 'FUNCTION': <TokenType.FUNCTION: 'FUNCTION'>, 'FOR': <TokenType.FOR: 'FOR'>, 'FOREIGN KEY': <TokenType.FOREIGN_KEY: 'FOREIGN_KEY'>, 'FORMAT': <TokenType.FORMAT: 'FORMAT'>, 'FROM': <TokenType.FROM: 'FROM'>, 'GEOGRAPHY': <TokenType.GEOGRAPHY: 'GEOGRAPHY'>, 'GEOMETRY': <TokenType.GEOMETRY: 'GEOMETRY'>, 'GLOB': <TokenType.GLOB: 'GLOB'>, 'GROUP BY': <TokenType.GROUP_BY: 'GROUP_BY'>, 'GROUPING SETS': <TokenType.GROUPING_SETS: 'GROUPING_SETS'>, 'HAVING': <TokenType.HAVING: 'HAVING'>, 'ILIKE': <TokenType.ILIKE: 'ILIKE'>, 'IN': <TokenType.IN: 'IN'>, 'INDEX': <TokenType.INDEX: 'INDEX'>, 'INET': <TokenType.INET: 'INET'>, 'INNER': <TokenType.INNER: 'INNER'>, 'INSERT': <TokenType.INSERT: 'INSERT'>, 'INTERVAL': <TokenType.INTERVAL: 'INTERVAL'>, 'INTERSECT': <TokenType.INTERSECT: 'INTERSECT'>, 'INTO': <TokenType.INTO: 'INTO'>, 'IS': <TokenType.IS: 'IS'>, 'ISNULL': <TokenType.ISNULL: 'ISNULL'>, 'JOIN': <TokenType.JOIN: 'JOIN'>, 'KEEP': <TokenType.KEEP: 'KEEP'>, 'KILL': <TokenType.KILL: 'KILL'>, 'LATERAL': <TokenType.LATERAL: 'LATERAL'>, 'LEFT': <TokenType.LEFT: 'LEFT'>, 'LIKE': <TokenType.LIKE: 'LIKE'>, 'LIMIT': <TokenType.LIMIT: 'LIMIT'>, 'LOAD': <TokenType.LOAD: 'LOAD'>, 'LOCK': <TokenType.LOCK: 'LOCK'>, 'MERGE': <TokenType.MERGE: 'MERGE'>, 'NATURAL': <TokenType.NATURAL: 'NATURAL'>, 'NEXT': <TokenType.NEXT: 'NEXT'>, 'NOT': <TokenType.NOT: 'NOT'>, 'NOTNULL': <TokenType.NOTNULL: 'NOTNULL'>, 'NULL': <TokenType.NULL: 'NULL'>, 'OBJECT': <TokenType.OBJECT: 'OBJECT'>, 'OFFSET': <TokenType.OFFSET: 'OFFSET'>, 'ON': <TokenType.ON: 'ON'>, 'OR': <TokenType.OR: 'OR'>, 'XOR': <TokenType.XOR: 'XOR'>, 'ORDER BY': <TokenType.ORDER_BY: 'ORDER_BY'>, 'ORDINALITY': <TokenType.ORDINALITY: 'ORDINALITY'>, 'OUTER': <TokenType.OUTER: 'OUTER'>, 'OVER': <TokenType.OVER: 'OVER'>, 'OVERLAPS': <TokenType.OVERLAPS: 'OVERLAPS'>, 'OVERWRITE': <TokenType.OVERWRITE: 'OVERWRITE'>, 'PARTITION': <TokenType.PARTITION: 'PARTITION'>, 'PARTITION BY': <TokenType.PARTITION_BY: 'PARTITION_BY'>, 'PARTITIONED BY': <TokenType.PARTITION_BY: 'PARTITION_BY'>, 'PARTITIONED_BY': <TokenType.PARTITION_BY: 'PARTITION_BY'>, 'PERCENT': <TokenType.PERCENT: 'PERCENT'>, 'PIVOT': <TokenType.PIVOT: 'PIVOT'>, 'PRAGMA': <TokenType.PRAGMA: 'PRAGMA'>, 'PRIMARY KEY': <TokenType.PRIMARY_KEY: 'PRIMARY_KEY'>, 'PROCEDURE': <TokenType.PROCEDURE: 'PROCEDURE'>, 'QUALIFY': <TokenType.QUALIFY: 'QUALIFY'>, 'RANGE': <TokenType.RANGE: 'RANGE'>, 'RECURSIVE': <TokenType.RECURSIVE: 'RECURSIVE'>, 'REGEXP': <TokenType.RLIKE: 'RLIKE'>, 'REPLACE': <TokenType.REPLACE: 'REPLACE'>, 'RETURNING': <TokenType.RETURNING: 'RETURNING'>, 'REFERENCES': <TokenType.REFERENCES: 'REFERENCES'>, 'RIGHT': <TokenType.RIGHT: 'RIGHT'>, 'RLIKE': <TokenType.RLIKE: 'RLIKE'>, 'ROLLBACK': <TokenType.ROLLBACK: 'ROLLBACK'>, 'ROLLUP': <TokenType.ROLLUP: 'ROLLUP'>, 'ROW': <TokenType.ROW: 'ROW'>, 'ROWS': <TokenType.ROWS: 'ROWS'>, 'SCHEMA': <TokenType.SCHEMA: 'SCHEMA'>, 'SELECT': <TokenType.SELECT: 'SELECT'>, 'SEMI': <TokenType.SEMI: 'SEMI'>, 'SET': <TokenType.SET: 'SET'>, 'SETTINGS': <TokenType.SETTINGS: 'SETTINGS'>, 'SHOW': <TokenType.SHOW: 'SHOW'>, 'SIMILAR TO': <TokenType.SIMILAR_TO: 'SIMILAR_TO'>, 'SOME': <TokenType.SOME: 'SOME'>, 'SORT BY': <TokenType.SORT_BY: 'SORT_BY'>, 'START WITH': <TokenType.START_WITH: 'START_WITH'>, 'TABLE': <TokenType.TABLE: 'TABLE'>, 'TABLESAMPLE': <TokenType.TABLE_SAMPLE: 'TABLE_SAMPLE'>, 'TEMP': <TokenType.TEMPORARY: 'TEMPORARY'>, 'TEMPORARY': <TokenType.TEMPORARY: 'TEMPORARY'>, 'THEN': <TokenType.THEN: 'THEN'>, 'TRUE': <TokenType.TRUE: 'TRUE'>, 'TRUNCATE': <TokenType.TRUNCATE: 'TRUNCATE'>, 'UNION': <TokenType.UNION: 'UNION'>, 'UNKNOWN': <TokenType.UNKNOWN: 'UNKNOWN'>, 'UNNEST': <TokenType.UNNEST: 'UNNEST'>, 'UNPIVOT': <TokenType.UNPIVOT: 'UNPIVOT'>, 'UPDATE': <TokenType.UPDATE: 'UPDATE'>, 'USE': <TokenType.USE: 'USE'>, 'USING': <TokenType.USING: 'USING'>, 'UUID': <TokenType.UUID: 'UUID'>, 'VIEW': <TokenType.VIEW: 'VIEW'>, 'VOLATILE': <TokenType.VOLATILE: 'VOLATILE'>, 'WHEN': <TokenType.WHEN: 'WHEN'>, 'WHERE': <TokenType.WHERE: 'WHERE'>, 'WINDOW': <TokenType.WINDOW: 'WINDOW'>, 'WITH': <TokenType.WITH: 'WITH'>, 'APPLY': <TokenType.APPLY: 'APPLY'>, 'ARRAY': <TokenType.ARRAY: 'ARRAY'>, 'BIT': <TokenType.BIT: 'BIT'>, 'BOOL': <TokenType.BOOLEAN: 'BOOLEAN'>, 'BOOLEAN': <TokenType.BOOLEAN: 'BOOLEAN'>, 'BYTE': <TokenType.TINYINT: 'TINYINT'>, 'MEDIUMINT': <TokenType.MEDIUMINT: 'MEDIUMINT'>, 'INT1': <TokenType.TINYINT: 'TINYINT'>, 'TINYINT': <TokenType.TINYINT: 'TINYINT'>, 'INT16': <TokenType.SMALLINT: 'SMALLINT'>, 'SHORT': <TokenType.SMALLINT: 'SMALLINT'>, 'SMALLINT': <TokenType.SMALLINT: 'SMALLINT'>, 'INT128': <TokenType.INT128: 'INT128'>, 'HUGEINT': <TokenType.INT128: 'INT128'>, 'INT2': <TokenType.SMALLINT: 'SMALLINT'>, 'INTEGER': <TokenType.INT: 'INT'>, 'INT': <TokenType.INT: 'INT'>, 'INT4': <TokenType.INT: 'INT'>, 'INT32': <TokenType.INT: 'INT'>, 'INT64': <TokenType.BIGINT: 'BIGINT'>, 'LONG': <TokenType.BIGINT: 'BIGINT'>, 'BIGINT': <TokenType.BIGINT: 'BIGINT'>, 'INT8': <TokenType.TINYINT: 'TINYINT'>, 'UINT': <TokenType.UINT: 'UINT'>, 'DEC': <TokenType.DECIMAL: 'DECIMAL'>, 'DECIMAL': <TokenType.DECIMAL: 'DECIMAL'>, 'BIGDECIMAL': <TokenType.BIGDECIMAL: 'BIGDECIMAL'>, 'BIGNUMERIC': <TokenType.BIGDECIMAL: 'BIGDECIMAL'>, 'MAP': <TokenType.MAP: 'MAP'>, 'NULLABLE': <TokenType.NULLABLE: 'NULLABLE'>, 'NUMBER': <TokenType.DECIMAL: 'DECIMAL'>, 'NUMERIC': <TokenType.DECIMAL: 'DECIMAL'>, 'FIXED': <TokenType.DECIMAL: 'DECIMAL'>, 'REAL': <TokenType.FLOAT: 'FLOAT'>, 'FLOAT': <TokenType.FLOAT: 'FLOAT'>, 'FLOAT4': <TokenType.FLOAT: 'FLOAT'>, 'FLOAT8': <TokenType.DOUBLE: 'DOUBLE'>, 'DOUBLE': <TokenType.DOUBLE: 'DOUBLE'>, 'DOUBLE PRECISION': <TokenType.DOUBLE: 'DOUBLE'>, 'JSON': <TokenType.JSON: 'JSON'>, 'CHAR': <TokenType.CHAR: 'CHAR'>, 'CHARACTER': <TokenType.CHAR: 'CHAR'>, 'NCHAR': <TokenType.NCHAR: 'NCHAR'>, 'VARCHAR': <TokenType.VARCHAR: 'VARCHAR'>, 'VARCHAR2': <TokenType.VARCHAR: 'VARCHAR'>, 'NVARCHAR': <TokenType.NVARCHAR: 'NVARCHAR'>, 'NVARCHAR2': <TokenType.NVARCHAR: 'NVARCHAR'>, 'BPCHAR': <TokenType.BPCHAR: 'BPCHAR'>, 'STR': <TokenType.TEXT: 'TEXT'>, 'STRING': <TokenType.TEXT: 'TEXT'>, 'TEXT': <TokenType.TEXT: 'TEXT'>, 'LONGTEXT': <TokenType.LONGTEXT: 'LONGTEXT'>, 'MEDIUMTEXT': <TokenType.MEDIUMTEXT: 'MEDIUMTEXT'>, 'TINYTEXT': <TokenType.TINYTEXT: 'TINYTEXT'>, 'CLOB': <TokenType.TEXT: 'TEXT'>, 'LONGVARCHAR': <TokenType.TEXT: 'TEXT'>, 'BINARY': <TokenType.BINARY: 'BINARY'>, 'BLOB': <TokenType.VARBINARY: 'VARBINARY'>, 'LONGBLOB': <TokenType.LONGBLOB: 'LONGBLOB'>, 'MEDIUMBLOB': <TokenType.MEDIUMBLOB: 'MEDIUMBLOB'>, 'TINYBLOB': <TokenType.TINYBLOB: 'TINYBLOB'>, 'BYTEA': <TokenType.VARBINARY: 'VARBINARY'>, 'VARBINARY': <TokenType.VARBINARY: 'VARBINARY'>, 'TIME': <TokenType.TIME: 'TIME'>, 'TIMETZ': <TokenType.TIMETZ: 'TIMETZ'>, 'TIMESTAMP': <TokenType.TIMESTAMPTZ: 'TIMESTAMPTZ'>, 'TIMESTAMPTZ': <TokenType.TIMESTAMPTZ: 'TIMESTAMPTZ'>, 'TIMESTAMPLTZ': <TokenType.TIMESTAMPLTZ: 'TIMESTAMPLTZ'>, 'DATE': <TokenType.DATE: 'DATE'>, 'DATETIME': <TokenType.TIMESTAMP: 'TIMESTAMP'>, 'INT4RANGE': <TokenType.INT4RANGE: 'INT4RANGE'>, 'INT4MULTIRANGE': <TokenType.INT4MULTIRANGE: 'INT4MULTIRANGE'>, 'INT8RANGE': <TokenType.INT8RANGE: 'INT8RANGE'>, 'INT8MULTIRANGE': <TokenType.INT8MULTIRANGE: 'INT8MULTIRANGE'>, 'NUMRANGE': <TokenType.NUMRANGE: 'NUMRANGE'>, 'NUMMULTIRANGE': <TokenType.NUMMULTIRANGE: 'NUMMULTIRANGE'>, 'TSRANGE': <TokenType.TSRANGE: 'TSRANGE'>, 'TSMULTIRANGE': <TokenType.TSMULTIRANGE: 'TSMULTIRANGE'>, 'TSTZRANGE': <TokenType.TSTZRANGE: 'TSTZRANGE'>, 'TSTZMULTIRANGE': <TokenType.TSTZMULTIRANGE: 'TSTZMULTIRANGE'>, 'DATERANGE': <TokenType.DATERANGE: 'DATERANGE'>, 'DATEMULTIRANGE': <TokenType.DATEMULTIRANGE: 'DATEMULTIRANGE'>, 'UNIQUE': <TokenType.UNIQUE: 'UNIQUE'>, 'STRUCT': <TokenType.STRUCT: 'STRUCT'>, 'SEQUENCE': <TokenType.SEQUENCE: 'SEQUENCE'>, 'VARIANT': <TokenType.VARIANT: 'VARIANT'>, 'ALTER': <TokenType.ALTER: 'ALTER'>, 'ANALYZE': <TokenType.COMMAND: 'COMMAND'>, 'CALL': <TokenType.COMMAND: 'COMMAND'>, 'COMMENT': <TokenType.COMMENT: 'COMMENT'>, 'COPY': <TokenType.COMMAND: 'COMMAND'>, 'EXPLAIN': <TokenType.COMMAND: 'COMMAND'>, 'GRANT': <TokenType.COMMAND: 'COMMAND'>, 'OPTIMIZE': <TokenType.COMMAND: 'COMMAND'>, 'PREPARE': <TokenType.COMMAND: 'COMMAND'>, 'VACUUM': <TokenType.COMMAND: 'COMMAND'>, 'USER-DEFINED': <TokenType.USERDEFINED: 'USERDEFINED'>, 'FOR VERSION': <TokenType.VERSION_SNAPSHOT: 'VERSION_SNAPSHOT'>, 'FOR TIMESTAMP': <TokenType.TIMESTAMP_SNAPSHOT: 'TIMESTAMP_SNAPSHOT'>, 'ANY TYPE': <TokenType.VARIANT: 'VARIANT'>, 'BEGIN TRANSACTION': <TokenType.BEGIN: 'BEGIN'>, 'BYTES': <TokenType.BINARY: 'BINARY'>, 'CURRENT_DATETIME': <TokenType.CURRENT_DATETIME: 'CURRENT_DATETIME'>, 'DECLARE': <TokenType.COMMAND: 'COMMAND'>, 'ELSEIF': <TokenType.COMMAND: 'COMMAND'>, 'EXCEPTION': <TokenType.COMMAND: 'COMMAND'>, 'FLOAT64': <TokenType.DOUBLE: 'DOUBLE'>, 'FOR SYSTEM_TIME': <TokenType.TIMESTAMP_SNAPSHOT: 'TIMESTAMP_SNAPSHOT'>, 'MODEL': <TokenType.MODEL: 'MODEL'>, 'NOT DETERMINISTIC': <TokenType.VOLATILE: 'VOLATILE'>, 'RECORD': <TokenType.STRUCT: 'STRUCT'>}
    @@ -2814,14 +2824,14 @@ that it can analyze queries in the optimizer and successfully capture their sema
    487 table.set("db", exp.Identifier(this=parts[0])) 488 table.set("this", exp.Identifier(this=parts[1])) 489 -490 if isinstance(table.this, exp.Identifier) and "." in table.name: +490 if any("." in p.name for p in table.parts): 491 catalog, db, this, *rest = ( -492 t.cast(t.Optional[exp.Expression], exp.to_identifier(x, quoted=True)) -493 for x in split_num_words(table.name, ".", 3) +492 exp.to_identifier(p, quoted=True) +493 for p in split_num_words(".".join(p.name for p in table.parts), ".", 3) 494 ) 495 496 if rest and this: -497 this = exp.Dot.build(t.cast(t.List[exp.Expression], [this, *rest])) +497 this = exp.Dot.build([this, *rest]) # type: ignore 498 499 table = exp.Table(this=this, db=db, catalog=catalog) 500 table.meta["quoted_table"] = True @@ -2855,25 +2865,27 @@ that it can analyze queries in the optimizer and successfully capture their sema 528 529 return json_object 530 -531 def _parse_bracket(self, this: t.Optional[exp.Expression]) -> t.Optional[exp.Expression]: -532 bracket = super()._parse_bracket(this) -533 -534 if this is bracket: -535 return bracket -536 -537 if isinstance(bracket, exp.Bracket): -538 for expression in bracket.expressions: -539 name = expression.name.upper() -540 -541 if name not in self.BRACKET_OFFSETS: -542 break -543 -544 offset, safe = self.BRACKET_OFFSETS[name] -545 bracket.set("offset", offset) -546 bracket.set("safe", safe) -547 expression.replace(expression.expressions[0]) -548 -549 return bracket +531 def _parse_bracket( +532 self, this: t.Optional[exp.Expression] = None +533 ) -> t.Optional[exp.Expression]: +534 bracket = super()._parse_bracket(this) +535 +536 if this is bracket: +537 return bracket +538 +539 if isinstance(bracket, exp.Bracket): +540 for expression in bracket.expressions: +541 name = expression.name.upper() +542 +543 if name not in self.BRACKET_OFFSETS: +544 break +545 +546 offset, safe = self.BRACKET_OFFSETS[name] +547 bracket.set("offset", offset) +548 bracket.set("safe", safe) +549 expression.replace(expression.expressions[0]) +550 +551 return bracket
    @@ -2934,7 +2946,7 @@ Default: 3
  • FUNCTIONS = - {'ABS': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Abs'>>, 'ADD_MONTHS': <bound method Func.from_arg_list of <class 'sqlglot.expressions.AddMonths'>>, 'ANONYMOUS_AGG_FUNC': <bound method Func.from_arg_list of <class 'sqlglot.expressions.AnonymousAggFunc'>>, 'ANY_VALUE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.AnyValue'>>, 'APPROX_DISTINCT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ApproxDistinct'>>, 'APPROX_COUNT_DISTINCT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ApproxDistinct'>>, 'APPROX_QUANTILE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ApproxQuantile'>>, 'APPROX_TOP_K': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ApproxTopK'>>, 'ARG_MAX': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArgMax'>>, 'ARGMAX': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArgMax'>>, 'MAX_BY': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArgMax'>>, 'ARG_MIN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArgMin'>>, 'ARGMIN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArgMin'>>, 'MIN_BY': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArgMin'>>, 'ARRAY': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Array'>>, 'ARRAY_AGG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayAgg'>>, 'ARRAY_ALL': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayAll'>>, 'ARRAY_ANY': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayAny'>>, 'ARRAY_CONCAT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayConcat'>>, 'ARRAY_CAT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayConcat'>>, 'ARRAY_CONTAINS': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayContains'>>, 'FILTER': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayFilter'>>, 'ARRAY_FILTER': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayFilter'>>, 'ARRAY_JOIN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayJoin'>>, 'ARRAY_OVERLAPS': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayOverlaps'>>, 'ARRAY_SIZE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArraySize'>>, 'ARRAY_LENGTH': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArraySize'>>, 'ARRAY_SORT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArraySort'>>, 'ARRAY_SUM': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArraySum'>>, 'ARRAY_UNION_AGG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayUnionAgg'>>, 'ARRAY_UNIQUE_AGG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayUniqueAgg'>>, 'AVG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Avg'>>, 'CASE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Case'>>, 'CAST': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Cast'>>, 'CAST_TO_STR_TYPE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CastToStrType'>>, 'CBRT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Cbrt'>>, 'CEIL': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Ceil'>>, 'CEILING': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Ceil'>>, 'CHR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Chr'>>, 'CHAR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Chr'>>, 'COALESCE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Coalesce'>>, 'IFNULL': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Coalesce'>>, 'NVL': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Coalesce'>>, 'COLLATE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Collate'>>, 'COMBINED_AGG_FUNC': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CombinedAggFunc'>>, 'COMBINED_PARAMETERIZED_AGG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CombinedParameterizedAgg'>>, 'CONCAT': <function Parser.<lambda>>, 'CONCAT_WS': <function Parser.<lambda>>, 'CONNECT_BY_ROOT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ConnectByRoot'>>, 'CONVERT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Convert'>>, 'COUNT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Count'>>, 'COUNT_IF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CountIf'>>, 'COUNTIF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CountIf'>>, 'CURRENT_DATE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CurrentDate'>>, 'CURRENT_DATETIME': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CurrentDatetime'>>, 'CURRENT_TIME': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CurrentTime'>>, 'CURRENT_TIMESTAMP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CurrentTimestamp'>>, 'CURRENT_USER': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CurrentUser'>>, 'DATE': <function _build_date>, 'DATE_ADD': <function build_date_delta_with_interval.<locals>._builder>, 'DATEDIFF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DateDiff'>>, 'DATE_DIFF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DateDiff'>>, 'DATE_FROM_PARTS': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DateFromParts'>>, 'DATEFROMPARTS': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DateFromParts'>>, 'DATE_STR_TO_DATE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DateStrToDate'>>, 'DATE_SUB': <function build_date_delta_with_interval.<locals>._builder>, 'DATE_TO_DATE_STR': <function Parser.<lambda>>, 'DATE_TO_DI': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DateToDi'>>, 'DATE_TRUNC': <function BigQuery.Parser.<lambda>>, 'DATETIME_ADD': <function build_date_delta_with_interval.<locals>._builder>, 'DATETIME_DIFF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DatetimeDiff'>>, 'DATETIME_SUB': <function build_date_delta_with_interval.<locals>._builder>, 'DATETIME_TRUNC': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DatetimeTrunc'>>, 'DAY': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Day'>>, 'DAY_OF_MONTH': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DayOfMonth'>>, 'DAYOFMONTH': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DayOfMonth'>>, 'DAY_OF_WEEK': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DayOfWeek'>>, 'DAYOFWEEK': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DayOfWeek'>>, 'DAY_OF_YEAR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DayOfYear'>>, 'DAYOFYEAR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DayOfYear'>>, 'DECODE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Decode'>>, 'DI_TO_DATE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DiToDate'>>, 'ENCODE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Encode'>>, 'EXP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Exp'>>, 'EXPLODE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Explode'>>, 'EXPLODE_OUTER': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ExplodeOuter'>>, 'EXTRACT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Extract'>>, 'FIRST': <bound method Func.from_arg_list of <class 'sqlglot.expressions.First'>>, 'FIRST_VALUE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.FirstValue'>>, 'FLATTEN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Flatten'>>, 'FLOOR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Floor'>>, 'FROM_BASE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.FromBase'>>, 'FROM_BASE64': <bound method Func.from_arg_list of <class 'sqlglot.expressions.FromBase64'>>, 'GENERATE_SERIES': <bound method Func.from_arg_list of <class 'sqlglot.expressions.GenerateSeries'>>, 'GREATEST': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Greatest'>>, 'GROUP_CONCAT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.GroupConcat'>>, 'HEX': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Hex'>>, 'HLL': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Hll'>>, 'IF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.If'>>, 'IIF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.If'>>, 'INITCAP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Initcap'>>, 'IS_INF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.IsInf'>>, 'ISINF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.IsInf'>>, 'IS_NAN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.IsNan'>>, 'ISNAN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.IsNan'>>, 'J_S_O_N_ARRAY': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONArray'>>, 'J_S_O_N_ARRAY_AGG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONArrayAgg'>>, 'JSON_ARRAY_CONTAINS': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONArrayContains'>>, 'JSONB_EXTRACT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONBExtract'>>, 'JSONB_EXTRACT_SCALAR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONBExtractScalar'>>, 'JSON_EXTRACT': <function build_extract_json_with_path.<locals>._builder>, 'JSON_EXTRACT_SCALAR': <function BigQuery.Parser.<lambda>>, 'JSON_FORMAT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONFormat'>>, 'J_S_O_N_OBJECT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONObject'>>, 'J_S_O_N_OBJECT_AGG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONObjectAgg'>>, 'J_S_O_N_TABLE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONTable'>>, 'LAG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Lag'>>, 'LAST': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Last'>>, 'LAST_DAY': <bound method Func.from_arg_list of <class 'sqlglot.expressions.LastDay'>>, 'LAST_DAY_OF_MONTH': <bound method Func.from_arg_list of <class 'sqlglot.expressions.LastDay'>>, 'LAST_VALUE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.LastValue'>>, 'LEAD': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Lead'>>, 'LEAST': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Least'>>, 'LEFT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Left'>>, 'LENGTH': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Length'>>, 'LEN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Length'>>, 'LEVENSHTEIN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Levenshtein'>>, 'LN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Ln'>>, 'LOG': <function build_logarithm>, 'LOG10': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Log10'>>, 'LOG2': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Log2'>>, 'LOGICAL_AND': <bound method Func.from_arg_list of <class 'sqlglot.expressions.LogicalAnd'>>, 'BOOL_AND': <bound method Func.from_arg_list of <class 'sqlglot.expressions.LogicalAnd'>>, 'BOOLAND_AGG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.LogicalAnd'>>, 'LOGICAL_OR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.LogicalOr'>>, 'BOOL_OR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.LogicalOr'>>, 'BOOLOR_AGG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.LogicalOr'>>, 'LOWER': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Lower'>>, 'LCASE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Lower'>>, 'MD5': <bound method Func.from_arg_list of <class 'sqlglot.expressions.MD5Digest'>>, 'MD5_DIGEST': <bound method Func.from_arg_list of <class 'sqlglot.expressions.MD5Digest'>>, 'MAP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Map'>>, 'MAP_FROM_ENTRIES': <bound method Func.from_arg_list of <class 'sqlglot.expressions.MapFromEntries'>>, 'MATCH_AGAINST': <bound method Func.from_arg_list of <class 'sqlglot.expressions.MatchAgainst'>>, 'MAX': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Max'>>, 'MIN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Min'>>, 'MONTH': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Month'>>, 'MONTHS_BETWEEN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.MonthsBetween'>>, 'NEXT_VALUE_FOR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.NextValueFor'>>, 'NTH_VALUE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.NthValue'>>, 'NULLIF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Nullif'>>, 'NUMBER_TO_STR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.NumberToStr'>>, 'NVL2': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Nvl2'>>, 'OPEN_J_S_O_N': <bound method Func.from_arg_list of <class 'sqlglot.expressions.OpenJSON'>>, 'PARAMETERIZED_AGG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ParameterizedAgg'>>, 'PARSE_JSON': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ParseJSON'>>, 'JSON_PARSE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ParseJSON'>>, 'PERCENTILE_CONT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.PercentileCont'>>, 'PERCENTILE_DISC': <bound method Func.from_arg_list of <class 'sqlglot.expressions.PercentileDisc'>>, 'POSEXPLODE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Posexplode'>>, 'POSEXPLODE_OUTER': <bound method Func.from_arg_list of <class 'sqlglot.expressions.PosexplodeOuter'>>, 'POWER': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Pow'>>, 'POW': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Pow'>>, 'PREDICT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Predict'>>, 'QUANTILE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Quantile'>>, 'RAND': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Rand'>>, 'RANDOM': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Rand'>>, 'RANDN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Randn'>>, 'RANGE_N': <bound method Func.from_arg_list of <class 'sqlglot.expressions.RangeN'>>, 'READ_CSV': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ReadCSV'>>, 'REDUCE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Reduce'>>, 'REGEXP_EXTRACT': <function BigQuery.Parser.<lambda>>, 'REGEXP_I_LIKE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.RegexpILike'>>, 'REGEXP_LIKE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.RegexpLike'>>, 'REGEXP_REPLACE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.RegexpReplace'>>, 'REGEXP_SPLIT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.RegexpSplit'>>, 'REPEAT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Repeat'>>, 'RIGHT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Right'>>, 'ROUND': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Round'>>, 'ROW_NUMBER': <bound method Func.from_arg_list of <class 'sqlglot.expressions.RowNumber'>>, 'SHA': <bound method Func.from_arg_list of <class 'sqlglot.expressions.SHA'>>, 'SHA1': <bound method Func.from_arg_list of <class 'sqlglot.expressions.SHA'>>, 'SHA2': <bound method Func.from_arg_list of <class 'sqlglot.expressions.SHA2'>>, 'SAFE_DIVIDE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.SafeDivide'>>, 'SIGN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Sign'>>, 'SIGNUM': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Sign'>>, 'SORT_ARRAY': <bound method Func.from_arg_list of <class 'sqlglot.expressions.SortArray'>>, 'SPLIT': <function BigQuery.Parser.<lambda>>, 'SQRT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Sqrt'>>, 'STANDARD_HASH': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StandardHash'>>, 'STAR_MAP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StarMap'>>, 'STARTS_WITH': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StartsWith'>>, 'STARTSWITH': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StartsWith'>>, 'STDDEV': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Stddev'>>, 'STDDEV_POP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StddevPop'>>, 'STDDEV_SAMP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StddevSamp'>>, 'STR_POSITION': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StrPosition'>>, 'STR_TO_DATE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StrToDate'>>, 'STR_TO_MAP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StrToMap'>>, 'STR_TO_TIME': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StrToTime'>>, 'STR_TO_UNIX': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StrToUnix'>>, 'STRUCT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Struct'>>, 'STRUCT_EXTRACT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StructExtract'>>, 'STUFF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Stuff'>>, 'INSERT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Stuff'>>, 'SUBSTRING': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Substring'>>, 'SUM': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Sum'>>, 'TIME_ADD': <function build_date_delta_with_interval.<locals>._builder>, 'TIME_DIFF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimeDiff'>>, 'TIME_FROM_PARTS': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimeFromParts'>>, 'TIMEFROMPARTS': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimeFromParts'>>, 'TIME_STR_TO_DATE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimeStrToDate'>>, 'TIME_STR_TO_TIME': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimeStrToTime'>>, 'TIME_STR_TO_UNIX': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimeStrToUnix'>>, 'TIME_SUB': <function build_date_delta_with_interval.<locals>._builder>, 'TIME_TO_STR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimeToStr'>>, 'TIME_TO_TIME_STR': <function Parser.<lambda>>, 'TIME_TO_UNIX': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimeToUnix'>>, 'TIME_TRUNC': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimeTrunc'>>, 'TIMESTAMP': <function _build_timestamp>, 'TIMESTAMP_ADD': <function build_date_delta_with_interval.<locals>._builder>, 'TIMESTAMPDIFF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimestampDiff'>>, 'TIMESTAMP_DIFF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimestampDiff'>>, 'TIMESTAMP_FROM_PARTS': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimestampFromParts'>>, 'TIMESTAMPFROMPARTS': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimestampFromParts'>>, 'TIMESTAMP_SUB': <function build_date_delta_with_interval.<locals>._builder>, 'TIMESTAMP_TRUNC': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimestampTrunc'>>, 'TO_ARRAY': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ToArray'>>, 'TO_BASE64': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ToBase64'>>, 'TO_CHAR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ToChar'>>, 'TO_DAYS': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ToDays'>>, 'TRANSFORM': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Transform'>>, 'TRIM': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Trim'>>, 'TRY_CAST': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TryCast'>>, 'TS_OR_DI_TO_DI': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TsOrDiToDi'>>, 'TS_OR_DS_ADD': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TsOrDsAdd'>>, 'TS_OR_DS_DIFF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TsOrDsDiff'>>, 'TS_OR_DS_TO_DATE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TsOrDsToDate'>>, 'TS_OR_DS_TO_DATE_STR': <function Parser.<lambda>>, 'TS_OR_DS_TO_TIME': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TsOrDsToTime'>>, 'UNHEX': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Unhex'>>, 'UNIX_DATE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.UnixDate'>>, 'UNIX_TO_STR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.UnixToStr'>>, 'UNIX_TO_TIME': <bound method Func.from_arg_list of <class 'sqlglot.expressions.UnixToTime'>>, 'UNIX_TO_TIME_STR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.UnixToTimeStr'>>, 'UPPER': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Upper'>>, 'UCASE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Upper'>>, 'VAR_MAP': <function build_var_map>, 'VARIANCE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Variance'>>, 'VARIANCE_SAMP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Variance'>>, 'VAR_SAMP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Variance'>>, 'VARIANCE_POP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.VariancePop'>>, 'VAR_POP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.VariancePop'>>, 'WEEK': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Week'>>, 'WEEK_OF_YEAR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.WeekOfYear'>>, 'WEEKOFYEAR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.WeekOfYear'>>, 'WHEN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.When'>>, 'X_M_L_TABLE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.XMLTable'>>, 'XOR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Xor'>>, 'YEAR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Year'>>, 'GLOB': <function Parser.<lambda>>, 'JSON_EXTRACT_PATH_TEXT': <function build_extract_json_with_path.<locals>._builder>, 'LIKE': <function build_like>, 'DIV': <function binary_from_function.<locals>.<lambda>>, 'FORMAT_DATE': <function BigQuery.Parser.<lambda>>, 'GENERATE_ARRAY': <bound method Func.from_arg_list of <class 'sqlglot.expressions.GenerateSeries'>>, 'TO_HEX': <function _build_to_hex>, 'PARSE_DATE': <function BigQuery.Parser.<lambda>>, 'PARSE_TIMESTAMP': <function _build_parse_timestamp>, 'REGEXP_CONTAINS': <bound method Func.from_arg_list of <class 'sqlglot.expressions.RegexpLike'>>, 'SHA256': <function BigQuery.Parser.<lambda>>, 'SHA512': <function BigQuery.Parser.<lambda>>, 'TIME': <function _build_time>, 'TIMESTAMP_MICROS': <function BigQuery.Parser.<lambda>>, 'TIMESTAMP_MILLIS': <function BigQuery.Parser.<lambda>>, 'TIMESTAMP_SECONDS': <function BigQuery.Parser.<lambda>>, 'TO_JSON_STRING': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONFormat'>>} + {'ABS': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Abs'>>, 'ADD_MONTHS': <bound method Func.from_arg_list of <class 'sqlglot.expressions.AddMonths'>>, 'ANONYMOUS_AGG_FUNC': <bound method Func.from_arg_list of <class 'sqlglot.expressions.AnonymousAggFunc'>>, 'ANY_VALUE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.AnyValue'>>, 'APPROX_DISTINCT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ApproxDistinct'>>, 'APPROX_COUNT_DISTINCT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ApproxDistinct'>>, 'APPROX_QUANTILE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ApproxQuantile'>>, 'APPROX_TOP_K': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ApproxTopK'>>, 'ARG_MAX': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArgMax'>>, 'ARGMAX': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArgMax'>>, 'MAX_BY': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArgMax'>>, 'ARG_MIN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArgMin'>>, 'ARGMIN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArgMin'>>, 'MIN_BY': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArgMin'>>, 'ARRAY': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Array'>>, 'ARRAY_AGG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayAgg'>>, 'ARRAY_ALL': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayAll'>>, 'ARRAY_ANY': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayAny'>>, 'ARRAY_CONCAT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayConcat'>>, 'ARRAY_CAT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayConcat'>>, 'ARRAY_CONTAINS': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayContains'>>, 'FILTER': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayFilter'>>, 'ARRAY_FILTER': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayFilter'>>, 'ARRAY_OVERLAPS': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayOverlaps'>>, 'ARRAY_SIZE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArraySize'>>, 'ARRAY_LENGTH': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArraySize'>>, 'ARRAY_SORT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArraySort'>>, 'ARRAY_SUM': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArraySum'>>, 'ARRAY_TO_STRING': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayToString'>>, 'ARRAY_JOIN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayToString'>>, 'ARRAY_UNION_AGG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayUnionAgg'>>, 'ARRAY_UNIQUE_AGG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayUniqueAgg'>>, 'AVG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Avg'>>, 'CASE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Case'>>, 'CAST': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Cast'>>, 'CAST_TO_STR_TYPE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CastToStrType'>>, 'CBRT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Cbrt'>>, 'CEIL': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Ceil'>>, 'CEILING': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Ceil'>>, 'CHR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Chr'>>, 'CHAR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Chr'>>, 'COALESCE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Coalesce'>>, 'IFNULL': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Coalesce'>>, 'NVL': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Coalesce'>>, 'COLLATE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Collate'>>, 'COMBINED_AGG_FUNC': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CombinedAggFunc'>>, 'COMBINED_PARAMETERIZED_AGG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CombinedParameterizedAgg'>>, 'CONCAT': <function Parser.<lambda>>, 'CONCAT_WS': <function Parser.<lambda>>, 'CONNECT_BY_ROOT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ConnectByRoot'>>, 'CONVERT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Convert'>>, 'CORR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Corr'>>, 'COUNT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Count'>>, 'COUNT_IF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CountIf'>>, 'COUNTIF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CountIf'>>, 'COVAR_POP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CovarPop'>>, 'COVAR_SAMP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CovarSamp'>>, 'CURRENT_DATE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CurrentDate'>>, 'CURRENT_DATETIME': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CurrentDatetime'>>, 'CURRENT_TIME': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CurrentTime'>>, 'CURRENT_TIMESTAMP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CurrentTimestamp'>>, 'CURRENT_USER': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CurrentUser'>>, 'DATE': <function _build_date>, 'DATE_ADD': <function build_date_delta_with_interval.<locals>._builder>, 'DATEDIFF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DateDiff'>>, 'DATE_DIFF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DateDiff'>>, 'DATE_FROM_PARTS': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DateFromParts'>>, 'DATEFROMPARTS': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DateFromParts'>>, 'DATE_STR_TO_DATE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DateStrToDate'>>, 'DATE_SUB': <function build_date_delta_with_interval.<locals>._builder>, 'DATE_TO_DATE_STR': <function Parser.<lambda>>, 'DATE_TO_DI': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DateToDi'>>, 'DATE_TRUNC': <function BigQuery.Parser.<lambda>>, 'DATETIME_ADD': <function build_date_delta_with_interval.<locals>._builder>, 'DATETIME_DIFF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DatetimeDiff'>>, 'DATETIME_SUB': <function build_date_delta_with_interval.<locals>._builder>, 'DATETIME_TRUNC': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DatetimeTrunc'>>, 'DAY': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Day'>>, 'DAY_OF_MONTH': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DayOfMonth'>>, 'DAYOFMONTH': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DayOfMonth'>>, 'DAY_OF_WEEK': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DayOfWeek'>>, 'DAYOFWEEK': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DayOfWeek'>>, 'DAY_OF_YEAR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DayOfYear'>>, 'DAYOFYEAR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DayOfYear'>>, 'DECODE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Decode'>>, 'DI_TO_DATE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DiToDate'>>, 'ENCODE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Encode'>>, 'EXP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Exp'>>, 'EXPLODE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Explode'>>, 'EXPLODE_OUTER': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ExplodeOuter'>>, 'EXTRACT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Extract'>>, 'FIRST': <bound method Func.from_arg_list of <class 'sqlglot.expressions.First'>>, 'FIRST_VALUE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.FirstValue'>>, 'FLATTEN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Flatten'>>, 'FLOOR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Floor'>>, 'FROM_BASE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.FromBase'>>, 'FROM_BASE64': <bound method Func.from_arg_list of <class 'sqlglot.expressions.FromBase64'>>, 'GENERATE_DATE_ARRAY': <bound method Func.from_arg_list of <class 'sqlglot.expressions.GenerateDateArray'>>, 'GENERATE_SERIES': <bound method Func.from_arg_list of <class 'sqlglot.expressions.GenerateSeries'>>, 'GREATEST': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Greatest'>>, 'GROUP_CONCAT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.GroupConcat'>>, 'HEX': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Hex'>>, 'HLL': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Hll'>>, 'IF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.If'>>, 'IIF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.If'>>, 'INITCAP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Initcap'>>, 'IS_INF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.IsInf'>>, 'ISINF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.IsInf'>>, 'IS_NAN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.IsNan'>>, 'ISNAN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.IsNan'>>, 'J_S_O_N_ARRAY': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONArray'>>, 'J_S_O_N_ARRAY_AGG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONArrayAgg'>>, 'JSON_ARRAY_CONTAINS': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONArrayContains'>>, 'JSONB_EXTRACT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONBExtract'>>, 'JSONB_EXTRACT_SCALAR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONBExtractScalar'>>, 'JSON_EXTRACT': <function build_extract_json_with_path.<locals>._builder>, 'JSON_EXTRACT_SCALAR': <function BigQuery.Parser.<lambda>>, 'JSON_FORMAT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONFormat'>>, 'J_S_O_N_OBJECT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONObject'>>, 'J_S_O_N_OBJECT_AGG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONObjectAgg'>>, 'J_S_O_N_TABLE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONTable'>>, 'LAG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Lag'>>, 'LAST': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Last'>>, 'LAST_DAY': <bound method Func.from_arg_list of <class 'sqlglot.expressions.LastDay'>>, 'LAST_DAY_OF_MONTH': <bound method Func.from_arg_list of <class 'sqlglot.expressions.LastDay'>>, 'LAST_VALUE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.LastValue'>>, 'LEAD': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Lead'>>, 'LEAST': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Least'>>, 'LEFT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Left'>>, 'LENGTH': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Length'>>, 'LEN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Length'>>, 'LEVENSHTEIN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Levenshtein'>>, 'LN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Ln'>>, 'LOG': <function build_logarithm>, 'LOGICAL_AND': <bound method Func.from_arg_list of <class 'sqlglot.expressions.LogicalAnd'>>, 'BOOL_AND': <bound method Func.from_arg_list of <class 'sqlglot.expressions.LogicalAnd'>>, 'BOOLAND_AGG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.LogicalAnd'>>, 'LOGICAL_OR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.LogicalOr'>>, 'BOOL_OR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.LogicalOr'>>, 'BOOLOR_AGG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.LogicalOr'>>, 'LOWER': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Lower'>>, 'LCASE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Lower'>>, 'MD5': <bound method Func.from_arg_list of <class 'sqlglot.expressions.MD5Digest'>>, 'MD5_DIGEST': <bound method Func.from_arg_list of <class 'sqlglot.expressions.MD5Digest'>>, 'MAP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Map'>>, 'MAP_FROM_ENTRIES': <bound method Func.from_arg_list of <class 'sqlglot.expressions.MapFromEntries'>>, 'MATCH_AGAINST': <bound method Func.from_arg_list of <class 'sqlglot.expressions.MatchAgainst'>>, 'MAX': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Max'>>, 'MIN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Min'>>, 'MONTH': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Month'>>, 'MONTHS_BETWEEN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.MonthsBetween'>>, 'NEXT_VALUE_FOR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.NextValueFor'>>, 'NTH_VALUE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.NthValue'>>, 'NULLIF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Nullif'>>, 'NUMBER_TO_STR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.NumberToStr'>>, 'NVL2': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Nvl2'>>, 'OPEN_J_S_O_N': <bound method Func.from_arg_list of <class 'sqlglot.expressions.OpenJSON'>>, 'PARAMETERIZED_AGG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ParameterizedAgg'>>, 'PARSE_JSON': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ParseJSON'>>, 'JSON_PARSE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ParseJSON'>>, 'PERCENTILE_CONT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.PercentileCont'>>, 'PERCENTILE_DISC': <bound method Func.from_arg_list of <class 'sqlglot.expressions.PercentileDisc'>>, 'POSEXPLODE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Posexplode'>>, 'POSEXPLODE_OUTER': <bound method Func.from_arg_list of <class 'sqlglot.expressions.PosexplodeOuter'>>, 'POWER': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Pow'>>, 'POW': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Pow'>>, 'PREDICT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Predict'>>, 'QUANTILE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Quantile'>>, 'RAND': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Rand'>>, 'RANDOM': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Rand'>>, 'RANDN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Randn'>>, 'RANGE_N': <bound method Func.from_arg_list of <class 'sqlglot.expressions.RangeN'>>, 'READ_CSV': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ReadCSV'>>, 'REDUCE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Reduce'>>, 'REGEXP_EXTRACT': <function BigQuery.Parser.<lambda>>, 'REGEXP_I_LIKE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.RegexpILike'>>, 'REGEXP_LIKE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.RegexpLike'>>, 'REGEXP_REPLACE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.RegexpReplace'>>, 'REGEXP_SPLIT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.RegexpSplit'>>, 'REPEAT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Repeat'>>, 'RIGHT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Right'>>, 'ROUND': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Round'>>, 'ROW_NUMBER': <bound method Func.from_arg_list of <class 'sqlglot.expressions.RowNumber'>>, 'SHA': <bound method Func.from_arg_list of <class 'sqlglot.expressions.SHA'>>, 'SHA1': <bound method Func.from_arg_list of <class 'sqlglot.expressions.SHA'>>, 'SHA2': <bound method Func.from_arg_list of <class 'sqlglot.expressions.SHA2'>>, 'SAFE_DIVIDE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.SafeDivide'>>, 'SIGN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Sign'>>, 'SIGNUM': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Sign'>>, 'SORT_ARRAY': <bound method Func.from_arg_list of <class 'sqlglot.expressions.SortArray'>>, 'SPLIT': <function BigQuery.Parser.<lambda>>, 'SQRT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Sqrt'>>, 'STANDARD_HASH': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StandardHash'>>, 'STAR_MAP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StarMap'>>, 'STARTS_WITH': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StartsWith'>>, 'STARTSWITH': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StartsWith'>>, 'STDDEV': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Stddev'>>, 'STDDEV_POP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StddevPop'>>, 'STDDEV_SAMP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StddevSamp'>>, 'STR_POSITION': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StrPosition'>>, 'STR_TO_DATE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StrToDate'>>, 'STR_TO_MAP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StrToMap'>>, 'STR_TO_TIME': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StrToTime'>>, 'STR_TO_UNIX': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StrToUnix'>>, 'STRUCT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Struct'>>, 'STRUCT_EXTRACT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StructExtract'>>, 'STUFF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Stuff'>>, 'INSERT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Stuff'>>, 'SUBSTRING': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Substring'>>, 'SUM': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Sum'>>, 'TIME_ADD': <function build_date_delta_with_interval.<locals>._builder>, 'TIME_DIFF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimeDiff'>>, 'TIME_FROM_PARTS': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimeFromParts'>>, 'TIMEFROMPARTS': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimeFromParts'>>, 'TIME_STR_TO_DATE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimeStrToDate'>>, 'TIME_STR_TO_TIME': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimeStrToTime'>>, 'TIME_STR_TO_UNIX': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimeStrToUnix'>>, 'TIME_SUB': <function build_date_delta_with_interval.<locals>._builder>, 'TIME_TO_STR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimeToStr'>>, 'TIME_TO_TIME_STR': <function Parser.<lambda>>, 'TIME_TO_UNIX': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimeToUnix'>>, 'TIME_TRUNC': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimeTrunc'>>, 'TIMESTAMP': <function _build_timestamp>, 'TIMESTAMP_ADD': <function build_date_delta_with_interval.<locals>._builder>, 'TIMESTAMPDIFF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimestampDiff'>>, 'TIMESTAMP_DIFF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimestampDiff'>>, 'TIMESTAMP_FROM_PARTS': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimestampFromParts'>>, 'TIMESTAMPFROMPARTS': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimestampFromParts'>>, 'TIMESTAMP_SUB': <function build_date_delta_with_interval.<locals>._builder>, 'TIMESTAMP_TRUNC': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimestampTrunc'>>, 'TO_ARRAY': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ToArray'>>, 'TO_BASE64': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ToBase64'>>, 'TO_CHAR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ToChar'>>, 'TO_DAYS': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ToDays'>>, 'TO_MAP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ToMap'>>, 'TO_NUMBER': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ToNumber'>>, 'TRANSFORM': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Transform'>>, 'TRIM': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Trim'>>, 'TRY_CAST': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TryCast'>>, 'TS_OR_DI_TO_DI': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TsOrDiToDi'>>, 'TS_OR_DS_ADD': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TsOrDsAdd'>>, 'TS_OR_DS_DIFF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TsOrDsDiff'>>, 'TS_OR_DS_TO_DATE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TsOrDsToDate'>>, 'TS_OR_DS_TO_DATE_STR': <function Parser.<lambda>>, 'TS_OR_DS_TO_TIME': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TsOrDsToTime'>>, 'TS_OR_DS_TO_TIMESTAMP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TsOrDsToTimestamp'>>, 'UNHEX': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Unhex'>>, 'UNIX_DATE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.UnixDate'>>, 'UNIX_TO_STR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.UnixToStr'>>, 'UNIX_TO_TIME': <bound method Func.from_arg_list of <class 'sqlglot.expressions.UnixToTime'>>, 'UNIX_TO_TIME_STR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.UnixToTimeStr'>>, 'UPPER': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Upper'>>, 'UCASE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Upper'>>, 'VAR_MAP': <function build_var_map>, 'VARIANCE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Variance'>>, 'VARIANCE_SAMP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Variance'>>, 'VAR_SAMP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Variance'>>, 'VARIANCE_POP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.VariancePop'>>, 'VAR_POP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.VariancePop'>>, 'WEEK': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Week'>>, 'WEEK_OF_YEAR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.WeekOfYear'>>, 'WEEKOFYEAR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.WeekOfYear'>>, 'WHEN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.When'>>, 'X_M_L_TABLE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.XMLTable'>>, 'XOR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Xor'>>, 'YEAR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Year'>>, 'GLOB': <function Parser.<lambda>>, 'JSON_EXTRACT_PATH_TEXT': <function build_extract_json_with_path.<locals>._builder>, 'LIKE': <function build_like>, 'LOG2': <function Parser.<lambda>>, 'LOG10': <function Parser.<lambda>>, 'MOD': <function Parser.<lambda>>, 'DIV': <function binary_from_function.<locals>.<lambda>>, 'FORMAT_DATE': <function BigQuery.Parser.<lambda>>, 'GENERATE_ARRAY': <bound method Func.from_arg_list of <class 'sqlglot.expressions.GenerateSeries'>>, 'TO_HEX': <function _build_to_hex>, 'PARSE_DATE': <function BigQuery.Parser.<lambda>>, 'PARSE_TIMESTAMP': <function _build_parse_timestamp>, 'REGEXP_CONTAINS': <bound method Func.from_arg_list of <class 'sqlglot.expressions.RegexpLike'>>, 'SHA256': <function BigQuery.Parser.<lambda>>, 'SHA512': <function BigQuery.Parser.<lambda>>, 'TIME': <function _build_time>, 'TIMESTAMP_MICROS': <function BigQuery.Parser.<lambda>>, 'TIMESTAMP_MILLIS': <function BigQuery.Parser.<lambda>>, 'TIMESTAMP_SECONDS': <function BigQuery.Parser.<lambda>>, 'TO_JSON_STRING': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONFormat'>>}
    @@ -2973,7 +2985,7 @@ Default: 3
    NESTED_TYPE_TOKENS = - {<TokenType.MAP: 'MAP'>, <TokenType.LOWCARDINALITY: 'LOWCARDINALITY'>, <TokenType.TABLE: 'TABLE'>, <TokenType.NULLABLE: 'NULLABLE'>, <TokenType.NESTED: 'NESTED'>, <TokenType.STRUCT: 'STRUCT'>, <TokenType.ARRAY: 'ARRAY'>} + {<TokenType.LOWCARDINALITY: 'LOWCARDINALITY'>, <TokenType.NESTED: 'NESTED'>, <TokenType.TABLE: 'TABLE'>, <TokenType.OBJECT: 'OBJECT'>, <TokenType.ARRAY: 'ARRAY'>, <TokenType.STRUCT: 'STRUCT'>, <TokenType.MAP: 'MAP'>, <TokenType.NULLABLE: 'NULLABLE'>}
    @@ -2986,7 +2998,7 @@ Default: 3
    PROPERTY_PARSERS = - {'ALGORITHM': <function Parser.<lambda>>, 'AUTO': <function Parser.<lambda>>, 'AUTO_INCREMENT': <function Parser.<lambda>>, 'BLOCKCOMPRESSION': <function Parser.<lambda>>, 'CHARSET': <function Parser.<lambda>>, 'CHARACTER SET': <function Parser.<lambda>>, 'CHECKSUM': <function Parser.<lambda>>, 'CLUSTER BY': <function Parser.<lambda>>, 'CLUSTERED': <function Parser.<lambda>>, 'COLLATE': <function Parser.<lambda>>, 'COMMENT': <function Parser.<lambda>>, 'CONTAINS': <function Parser.<lambda>>, 'COPY': <function Parser.<lambda>>, 'DATABLOCKSIZE': <function Parser.<lambda>>, 'DEFINER': <function Parser.<lambda>>, 'DETERMINISTIC': <function Parser.<lambda>>, 'DISTKEY': <function Parser.<lambda>>, 'DISTSTYLE': <function Parser.<lambda>>, 'ENGINE': <function Parser.<lambda>>, 'EXECUTE': <function Parser.<lambda>>, 'EXTERNAL': <function Parser.<lambda>>, 'FALLBACK': <function Parser.<lambda>>, 'FORMAT': <function Parser.<lambda>>, 'FREESPACE': <function Parser.<lambda>>, 'HEAP': <function Parser.<lambda>>, 'IMMUTABLE': <function Parser.<lambda>>, 'INHERITS': <function Parser.<lambda>>, 'INPUT': <function Parser.<lambda>>, 'JOURNAL': <function Parser.<lambda>>, 'LANGUAGE': <function Parser.<lambda>>, 'LAYOUT': <function Parser.<lambda>>, 'LIFETIME': <function Parser.<lambda>>, 'LIKE': <function Parser.<lambda>>, 'LOCATION': <function Parser.<lambda>>, 'LOCK': <function Parser.<lambda>>, 'LOCKING': <function Parser.<lambda>>, 'LOG': <function Parser.<lambda>>, 'MATERIALIZED': <function Parser.<lambda>>, 'MERGEBLOCKRATIO': <function Parser.<lambda>>, 'MODIFIES': <function Parser.<lambda>>, 'MULTISET': <function Parser.<lambda>>, 'NO': <function Parser.<lambda>>, 'ON': <function Parser.<lambda>>, 'ORDER BY': <function Parser.<lambda>>, 'OUTPUT': <function Parser.<lambda>>, 'PARTITION': <function Parser.<lambda>>, 'PARTITION BY': <function Parser.<lambda>>, 'PARTITIONED BY': <function Parser.<lambda>>, 'PARTITIONED_BY': <function Parser.<lambda>>, 'PRIMARY KEY': <function Parser.<lambda>>, 'RANGE': <function Parser.<lambda>>, 'READS': <function Parser.<lambda>>, 'REMOTE': <function Parser.<lambda>>, 'RETURNS': <function Parser.<lambda>>, 'ROW': <function Parser.<lambda>>, 'ROW_FORMAT': <function Parser.<lambda>>, 'SAMPLE': <function Parser.<lambda>>, 'SET': <function Parser.<lambda>>, 'SETTINGS': <function Parser.<lambda>>, 'SORTKEY': <function Parser.<lambda>>, 'SOURCE': <function Parser.<lambda>>, 'STABLE': <function Parser.<lambda>>, 'STORED': <function Parser.<lambda>>, 'SYSTEM_VERSIONING': <function Parser.<lambda>>, 'TBLPROPERTIES': <function Parser.<lambda>>, 'TEMP': <function Parser.<lambda>>, 'TEMPORARY': <function Parser.<lambda>>, 'TO': <function Parser.<lambda>>, 'TRANSIENT': <function Parser.<lambda>>, 'TRANSFORM': <function Parser.<lambda>>, 'TTL': <function Parser.<lambda>>, 'USING': <function Parser.<lambda>>, 'VOLATILE': <function Parser.<lambda>>, 'WITH': <function Parser.<lambda>>, 'NOT DETERMINISTIC': <function BigQuery.Parser.<lambda>>, 'OPTIONS': <function BigQuery.Parser.<lambda>>} + {'ALGORITHM': <function Parser.<lambda>>, 'AUTO': <function Parser.<lambda>>, 'AUTO_INCREMENT': <function Parser.<lambda>>, 'BACKUP': <function Parser.<lambda>>, 'BLOCKCOMPRESSION': <function Parser.<lambda>>, 'CHARSET': <function Parser.<lambda>>, 'CHARACTER SET': <function Parser.<lambda>>, 'CHECKSUM': <function Parser.<lambda>>, 'CLUSTER BY': <function Parser.<lambda>>, 'CLUSTERED': <function Parser.<lambda>>, 'COLLATE': <function Parser.<lambda>>, 'COMMENT': <function Parser.<lambda>>, 'CONTAINS': <function Parser.<lambda>>, 'COPY': <function Parser.<lambda>>, 'DATABLOCKSIZE': <function Parser.<lambda>>, 'DEFINER': <function Parser.<lambda>>, 'DETERMINISTIC': <function Parser.<lambda>>, 'DISTKEY': <function Parser.<lambda>>, 'DISTSTYLE': <function Parser.<lambda>>, 'ENGINE': <function Parser.<lambda>>, 'EXECUTE': <function Parser.<lambda>>, 'EXTERNAL': <function Parser.<lambda>>, 'FALLBACK': <function Parser.<lambda>>, 'FORMAT': <function Parser.<lambda>>, 'FREESPACE': <function Parser.<lambda>>, 'GLOBAL': <function Parser.<lambda>>, 'HEAP': <function Parser.<lambda>>, 'ICEBERG': <function Parser.<lambda>>, 'IMMUTABLE': <function Parser.<lambda>>, 'INHERITS': <function Parser.<lambda>>, 'INPUT': <function Parser.<lambda>>, 'JOURNAL': <function Parser.<lambda>>, 'LANGUAGE': <function Parser.<lambda>>, 'LAYOUT': <function Parser.<lambda>>, 'LIFETIME': <function Parser.<lambda>>, 'LIKE': <function Parser.<lambda>>, 'LOCATION': <function Parser.<lambda>>, 'LOCK': <function Parser.<lambda>>, 'LOCKING': <function Parser.<lambda>>, 'LOG': <function Parser.<lambda>>, 'MATERIALIZED': <function Parser.<lambda>>, 'MERGEBLOCKRATIO': <function Parser.<lambda>>, 'MODIFIES': <function Parser.<lambda>>, 'MULTISET': <function Parser.<lambda>>, 'NO': <function Parser.<lambda>>, 'ON': <function Parser.<lambda>>, 'ORDER BY': <function Parser.<lambda>>, 'OUTPUT': <function Parser.<lambda>>, 'PARTITION': <function Parser.<lambda>>, 'PARTITION BY': <function Parser.<lambda>>, 'PARTITIONED BY': <function Parser.<lambda>>, 'PARTITIONED_BY': <function Parser.<lambda>>, 'PRIMARY KEY': <function Parser.<lambda>>, 'RANGE': <function Parser.<lambda>>, 'READS': <function Parser.<lambda>>, 'REMOTE': <function Parser.<lambda>>, 'RETURNS': <function Parser.<lambda>>, 'ROW': <function Parser.<lambda>>, 'ROW_FORMAT': <function Parser.<lambda>>, 'SAMPLE': <function Parser.<lambda>>, 'SET': <function Parser.<lambda>>, 'SETTINGS': <function Parser.<lambda>>, 'SHARING': <function Parser.<lambda>>, 'SORTKEY': <function Parser.<lambda>>, 'SOURCE': <function Parser.<lambda>>, 'STABLE': <function Parser.<lambda>>, 'STORED': <function Parser.<lambda>>, 'SYSTEM_VERSIONING': <function Parser.<lambda>>, 'TBLPROPERTIES': <function Parser.<lambda>>, 'TEMP': <function Parser.<lambda>>, 'TEMPORARY': <function Parser.<lambda>>, 'TO': <function Parser.<lambda>>, 'TRANSIENT': <function Parser.<lambda>>, 'TRANSFORM': <function Parser.<lambda>>, 'TTL': <function Parser.<lambda>>, 'USING': <function Parser.<lambda>>, 'UNLOGGED': <function Parser.<lambda>>, 'VOLATILE': <function Parser.<lambda>>, 'WITH': <function Parser.<lambda>>, 'NOT DETERMINISTIC': <function BigQuery.Parser.<lambda>>, 'OPTIONS': <function BigQuery.Parser.<lambda>>}
    @@ -2999,7 +3011,7 @@ Default: 3
    CONSTRAINT_PARSERS = - {'AUTOINCREMENT': <function Parser.<lambda>>, 'AUTO_INCREMENT': <function Parser.<lambda>>, 'CASESPECIFIC': <function Parser.<lambda>>, 'CHARACTER SET': <function Parser.<lambda>>, 'CHECK': <function Parser.<lambda>>, 'COLLATE': <function Parser.<lambda>>, 'COMMENT': <function Parser.<lambda>>, 'COMPRESS': <function Parser.<lambda>>, 'CLUSTERED': <function Parser.<lambda>>, 'NONCLUSTERED': <function Parser.<lambda>>, 'DEFAULT': <function Parser.<lambda>>, 'ENCODE': <function Parser.<lambda>>, 'FOREIGN KEY': <function Parser.<lambda>>, 'FORMAT': <function Parser.<lambda>>, 'GENERATED': <function Parser.<lambda>>, 'IDENTITY': <function Parser.<lambda>>, 'INLINE': <function Parser.<lambda>>, 'LIKE': <function Parser.<lambda>>, 'NOT': <function Parser.<lambda>>, 'NULL': <function Parser.<lambda>>, 'ON': <function Parser.<lambda>>, 'PATH': <function Parser.<lambda>>, 'PERIOD': <function Parser.<lambda>>, 'PRIMARY KEY': <function Parser.<lambda>>, 'REFERENCES': <function Parser.<lambda>>, 'TITLE': <function Parser.<lambda>>, 'TTL': <function Parser.<lambda>>, 'UNIQUE': <function Parser.<lambda>>, 'UPPERCASE': <function Parser.<lambda>>, 'WITH': <function Parser.<lambda>>, 'OPTIONS': <function BigQuery.Parser.<lambda>>} + {'AUTOINCREMENT': <function Parser.<lambda>>, 'AUTO_INCREMENT': <function Parser.<lambda>>, 'CASESPECIFIC': <function Parser.<lambda>>, 'CHARACTER SET': <function Parser.<lambda>>, 'CHECK': <function Parser.<lambda>>, 'COLLATE': <function Parser.<lambda>>, 'COMMENT': <function Parser.<lambda>>, 'COMPRESS': <function Parser.<lambda>>, 'CLUSTERED': <function Parser.<lambda>>, 'NONCLUSTERED': <function Parser.<lambda>>, 'DEFAULT': <function Parser.<lambda>>, 'ENCODE': <function Parser.<lambda>>, 'EXCLUDE': <function Parser.<lambda>>, 'FOREIGN KEY': <function Parser.<lambda>>, 'FORMAT': <function Parser.<lambda>>, 'GENERATED': <function Parser.<lambda>>, 'IDENTITY': <function Parser.<lambda>>, 'INLINE': <function Parser.<lambda>>, 'LIKE': <function Parser.<lambda>>, 'NOT': <function Parser.<lambda>>, 'NULL': <function Parser.<lambda>>, 'ON': <function Parser.<lambda>>, 'PATH': <function Parser.<lambda>>, 'PERIOD': <function Parser.<lambda>>, 'PRIMARY KEY': <function Parser.<lambda>>, 'REFERENCES': <function Parser.<lambda>>, 'TITLE': <function Parser.<lambda>>, 'TTL': <function Parser.<lambda>>, 'UNIQUE': <function Parser.<lambda>>, 'UPPERCASE': <function Parser.<lambda>>, 'WITH': <function Parser.<lambda>>, 'OPTIONS': <function BigQuery.Parser.<lambda>>}
    @@ -3037,7 +3049,7 @@ Default: 3
    STATEMENT_PARSERS = - {<TokenType.ALTER: 'ALTER'>: <function Parser.<lambda>>, <TokenType.BEGIN: 'BEGIN'>: <function Parser.<lambda>>, <TokenType.CACHE: 'CACHE'>: <function Parser.<lambda>>, <TokenType.COMMIT: 'COMMIT'>: <function Parser.<lambda>>, <TokenType.COMMENT: 'COMMENT'>: <function Parser.<lambda>>, <TokenType.CREATE: 'CREATE'>: <function Parser.<lambda>>, <TokenType.DELETE: 'DELETE'>: <function Parser.<lambda>>, <TokenType.DESC: 'DESC'>: <function Parser.<lambda>>, <TokenType.DESCRIBE: 'DESCRIBE'>: <function Parser.<lambda>>, <TokenType.DROP: 'DROP'>: <function Parser.<lambda>>, <TokenType.INSERT: 'INSERT'>: <function Parser.<lambda>>, <TokenType.KILL: 'KILL'>: <function Parser.<lambda>>, <TokenType.LOAD: 'LOAD'>: <function Parser.<lambda>>, <TokenType.MERGE: 'MERGE'>: <function Parser.<lambda>>, <TokenType.PIVOT: 'PIVOT'>: <function Parser.<lambda>>, <TokenType.PRAGMA: 'PRAGMA'>: <function Parser.<lambda>>, <TokenType.REFRESH: 'REFRESH'>: <function Parser.<lambda>>, <TokenType.ROLLBACK: 'ROLLBACK'>: <function Parser.<lambda>>, <TokenType.SET: 'SET'>: <function Parser.<lambda>>, <TokenType.UNCACHE: 'UNCACHE'>: <function Parser.<lambda>>, <TokenType.UPDATE: 'UPDATE'>: <function Parser.<lambda>>, <TokenType.TRUNCATE: 'TRUNCATE'>: <function Parser.<lambda>>, <TokenType.USE: 'USE'>: <function Parser.<lambda>>, <TokenType.ELSE: 'ELSE'>: <function BigQuery.Parser.<lambda>>, <TokenType.END: 'END'>: <function BigQuery.Parser.<lambda>>, <TokenType.FOR: 'FOR'>: <function BigQuery.Parser.<lambda>>} + {<TokenType.ALTER: 'ALTER'>: <function Parser.<lambda>>, <TokenType.BEGIN: 'BEGIN'>: <function Parser.<lambda>>, <TokenType.CACHE: 'CACHE'>: <function Parser.<lambda>>, <TokenType.COMMENT: 'COMMENT'>: <function Parser.<lambda>>, <TokenType.COMMIT: 'COMMIT'>: <function Parser.<lambda>>, <TokenType.CREATE: 'CREATE'>: <function Parser.<lambda>>, <TokenType.DELETE: 'DELETE'>: <function Parser.<lambda>>, <TokenType.DESC: 'DESC'>: <function Parser.<lambda>>, <TokenType.DESCRIBE: 'DESCRIBE'>: <function Parser.<lambda>>, <TokenType.DROP: 'DROP'>: <function Parser.<lambda>>, <TokenType.INSERT: 'INSERT'>: <function Parser.<lambda>>, <TokenType.KILL: 'KILL'>: <function Parser.<lambda>>, <TokenType.LOAD: 'LOAD'>: <function Parser.<lambda>>, <TokenType.MERGE: 'MERGE'>: <function Parser.<lambda>>, <TokenType.PIVOT: 'PIVOT'>: <function Parser.<lambda>>, <TokenType.PRAGMA: 'PRAGMA'>: <function Parser.<lambda>>, <TokenType.REFRESH: 'REFRESH'>: <function Parser.<lambda>>, <TokenType.ROLLBACK: 'ROLLBACK'>: <function Parser.<lambda>>, <TokenType.SET: 'SET'>: <function Parser.<lambda>>, <TokenType.TRUNCATE: 'TRUNCATE'>: <function Parser.<lambda>>, <TokenType.UNCACHE: 'UNCACHE'>: <function Parser.<lambda>>, <TokenType.UPDATE: 'UPDATE'>: <function Parser.<lambda>>, <TokenType.USE: 'USE'>: <function Parser.<lambda>>, <TokenType.ELSE: 'ELSE'>: <function BigQuery.Parser.<lambda>>, <TokenType.END: 'END'>: <function BigQuery.Parser.<lambda>>, <TokenType.FOR: 'FOR'>: <function BigQuery.Parser.<lambda>>}
    @@ -3062,7 +3074,7 @@ Default: 3
    TABLE_ALIAS_TOKENS = - {<TokenType.VAR: 'VAR'>, <TokenType.PROCEDURE: 'PROCEDURE'>, <TokenType.FALSE: 'FALSE'>, <TokenType.VARCHAR: 'VARCHAR'>, <TokenType.SMALLINT: 'SMALLINT'>, <TokenType.UINT: 'UINT'>, <TokenType.SEMI: 'SEMI'>, <TokenType.ISNULL: 'ISNULL'>, <TokenType.INT256: 'INT256'>, <TokenType.ROWS: 'ROWS'>, <TokenType.ANTI: 'ANTI'>, <TokenType.IPV6: 'IPV6'>, <TokenType.DATETIME64: 'DATETIME64'>, <TokenType.ROW: 'ROW'>, <TokenType.IS: 'IS'>, <TokenType.NEXT: 'NEXT'>, <TokenType.UMEDIUMINT: 'UMEDIUMINT'>, <TokenType.ARRAY: 'ARRAY'>, <TokenType.IPADDRESS: 'IPADDRESS'>, <TokenType.DATETIME: 'DATETIME'>, <TokenType.ASC: 'ASC'>, <TokenType.TIMESTAMP_S: 'TIMESTAMP_S'>, <TokenType.AGGREGATEFUNCTION: 'AGGREGATEFUNCTION'>, <TokenType.COMMAND: 'COMMAND'>, <TokenType.REFERENCES: 'REFERENCES'>, <TokenType.REPLACE: 'REPLACE'>, <TokenType.TRUE: 'TRUE'>, <TokenType.IPPREFIX: 'IPPREFIX'>, <TokenType.TEXT: 'TEXT'>, <TokenType.BIGINT: 'BIGINT'>, <TokenType.NESTED: 'NESTED'>, <TokenType.KILL: 'KILL'>, <TokenType.BINARY: 'BINARY'>, <TokenType.TIMESTAMPLTZ: 'TIMESTAMPLTZ'>, <TokenType.PSEUDO_TYPE: 'PSEUDO_TYPE'>, <TokenType.PRAGMA: 'PRAGMA'>, <TokenType.DESCRIBE: 'DESCRIBE'>, <TokenType.SET: 'SET'>, <TokenType.TIMESTAMP_MS: 'TIMESTAMP_MS'>, <TokenType.CASE: 'CASE'>, <TokenType.RANGE: 'RANGE'>, <TokenType.DATEMULTIRANGE: 'DATEMULTIRANGE'>, <TokenType.NULL: 'NULL'>, <TokenType.PIVOT: 'PIVOT'>, <TokenType.ENUM16: 'ENUM16'>, <TokenType.UNPIVOT: 'UNPIVOT'>, <TokenType.KEEP: 'KEEP'>, <TokenType.ORDINALITY: 'ORDINALITY'>, <TokenType.FINAL: 'FINAL'>, <TokenType.INT8MULTIRANGE: 'INT8MULTIRANGE'>, <TokenType.ANY: 'ANY'>, <TokenType.ENUM: 'ENUM'>, <TokenType.SETTINGS: 'SETTINGS'>, <TokenType.VOLATILE: 'VOLATILE'>, <TokenType.TSRANGE: 'TSRANGE'>, <TokenType.IMAGE: 'IMAGE'>, <TokenType.LONGTEXT: 'LONGTEXT'>, <TokenType.VARIANT: 'VARIANT'>, <TokenType.STRUCT: 'STRUCT'>, <TokenType.AUTO_INCREMENT: 'AUTO_INCREMENT'>, <TokenType.MONEY: 'MONEY'>, <TokenType.ALL: 'ALL'>, <TokenType.VIEW: 'VIEW'>, <TokenType.SMALLSERIAL: 'SMALLSERIAL'>, <TokenType.TIMETZ: 'TIMETZ'>, <TokenType.MERGE: 'MERGE'>, <TokenType.TINYBLOB: 'TINYBLOB'>, <TokenType.HSTORE: 'HSTORE'>, <TokenType.OVERWRITE: 'OVERWRITE'>, <TokenType.DATE32: 'DATE32'>, <TokenType.USERDEFINED: 'USERDEFINED'>, <TokenType.BIT: 'BIT'>, <TokenType.CHAR: 'CHAR'>, <TokenType.MODEL: 'MODEL'>, <TokenType.TSTZMULTIRANGE: 'TSTZMULTIRANGE'>, <TokenType.SHOW: 'SHOW'>, <TokenType.TIMESTAMP: 'TIMESTAMP'>, <TokenType.UDECIMAL: 'UDECIMAL'>, <TokenType.INT8RANGE: 'INT8RANGE'>, <TokenType.TIME: 'TIME'>, <TokenType.LONGBLOB: 'LONGBLOB'>, <TokenType.SERIAL: 'SERIAL'>, <TokenType.RECURSIVE: 'RECURSIVE'>, <TokenType.TOP: 'TOP'>, <TokenType.FIXEDSTRING: 'FIXEDSTRING'>, <TokenType.STORAGE_INTEGRATION: 'STORAGE_INTEGRATION'>, <TokenType.CURRENT_DATE: 'CURRENT_DATE'>, <TokenType.NULLABLE: 'NULLABLE'>, <TokenType.CURRENT_TIMESTAMP: 'CURRENT_TIMESTAMP'>, <TokenType.DATABASE: 'DATABASE'>, <TokenType.BIGDECIMAL: 'BIGDECIMAL'>, <TokenType.IPV4: 'IPV4'>, <TokenType.BEGIN: 'BEGIN'>, <TokenType.DELETE: 'DELETE'>, <TokenType.TINYINT: 'TINYINT'>, <TokenType.DICTIONARY: 'DICTIONARY'>, <TokenType.SIMPLEAGGREGATEFUNCTION: 'SIMPLEAGGREGATEFUNCTION'>, <TokenType.CONSTRAINT: 'CONSTRAINT'>, <TokenType.BOOLEAN: 'BOOLEAN'>, <TokenType.USMALLINT: 'USMALLINT'>, <TokenType.UNKNOWN: 'UNKNOWN'>, <TokenType.CURRENT_TIME: 'CURRENT_TIME'>, <TokenType.OPERATOR: 'OPERATOR'>, <TokenType.UPDATE: 'UPDATE'>, <TokenType.INT128: 'INT128'>, <TokenType.PARTITION: 'PARTITION'>, <TokenType.UINT256: 'UINT256'>, <TokenType.SOME: 'SOME'>, <TokenType.BPCHAR: 'BPCHAR'>, <TokenType.TINYTEXT: 'TINYTEXT'>, <TokenType.TSTZRANGE: 'TSTZRANGE'>, <TokenType.DATERANGE: 'DATERANGE'>, <TokenType.CACHE: 'CACHE'>, <TokenType.VARBINARY: 'VARBINARY'>, <TokenType.JSON: 'JSON'>, <TokenType.YEAR: 'YEAR'>, <TokenType.ENUM8: 'ENUM8'>, <TokenType.PERCENT: 'PERCENT'>, <TokenType.SCHEMA: 'SCHEMA'>, <TokenType.GEOGRAPHY: 'GEOGRAPHY'>, <TokenType.NVARCHAR: 'NVARCHAR'>, <TokenType.FOREIGN_KEY: 'FOREIGN_KEY'>, <TokenType.REFRESH: 'REFRESH'>, <TokenType.USE: 'USE'>, <TokenType.DESC: 'DESC'>, <TokenType.SMALLMONEY: 'SMALLMONEY'>, <TokenType.NCHAR: 'NCHAR'>, <TokenType.FORMAT: 'FORMAT'>, <TokenType.HLLSKETCH: 'HLLSKETCH'>, <TokenType.DECIMAL: 'DECIMAL'>, <TokenType.COLLATE: 'COLLATE'>, <TokenType.TIMESTAMPTZ: 'TIMESTAMPTZ'>, <TokenType.EXISTS: 'EXISTS'>, <TokenType.ROWVERSION: 'ROWVERSION'>, <TokenType.NUMRANGE: 'NUMRANGE'>, <TokenType.INT4RANGE: 'INT4RANGE'>, <TokenType.UNIQUEIDENTIFIER: 'UNIQUEIDENTIFIER'>, <TokenType.UNIQUE: 'UNIQUE'>, <TokenType.JSONB: 'JSONB'>, <TokenType.INT: 'INT'>, <TokenType.INDEX: 'INDEX'>, <TokenType.DIV: 'DIV'>, <TokenType.MEDIUMINT: 'MEDIUMINT'>, <TokenType.FLOAT: 'FLOAT'>, <TokenType.DATE: 'DATE'>, <TokenType.COLUMN: 'COLUMN'>, <TokenType.FUNCTION: 'FUNCTION'>, <TokenType.EXECUTE: 'EXECUTE'>, <TokenType.UTINYINT: 'UTINYINT'>, <TokenType.INET: 'INET'>, <TokenType.UBIGINT: 'UBIGINT'>, <TokenType.INTERVAL: 'INTERVAL'>, <TokenType.TABLE: 'TABLE'>, <TokenType.UINT128: 'UINT128'>, <TokenType.MEDIUMBLOB: 'MEDIUMBLOB'>, <TokenType.CURRENT_USER: 'CURRENT_USER'>, <TokenType.COMMENT: 'COMMENT'>, <TokenType.XML: 'XML'>, <TokenType.UUID: 'UUID'>, <TokenType.GEOMETRY: 'GEOMETRY'>, <TokenType.OBJECT_IDENTIFIER: 'OBJECT_IDENTIFIER'>, <TokenType.TIMESTAMP_NS: 'TIMESTAMP_NS'>, <TokenType.BIGSERIAL: 'BIGSERIAL'>, <TokenType.DOUBLE: 'DOUBLE'>, <TokenType.NUMMULTIRANGE: 'NUMMULTIRANGE'>, <TokenType.OBJECT: 'OBJECT'>, <TokenType.LOAD: 'LOAD'>, <TokenType.MEDIUMTEXT: 'MEDIUMTEXT'>, <TokenType.COMMIT: 'COMMIT'>, <TokenType.FIRST: 'FIRST'>, <TokenType.MAP: 'MAP'>, <TokenType.TRUNCATE: 'TRUNCATE'>, <TokenType.OVERLAPS: 'OVERLAPS'>, <TokenType.LOWCARDINALITY: 'LOWCARDINALITY'>, <TokenType.END: 'END'>, <TokenType.FILTER: 'FILTER'>, <TokenType.ESCAPE: 'ESCAPE'>, <TokenType.TEMPORARY: 'TEMPORARY'>, <TokenType.INT4MULTIRANGE: 'INT4MULTIRANGE'>, <TokenType.SUPER: 'SUPER'>, <TokenType.CURRENT_DATETIME: 'CURRENT_DATETIME'>, <TokenType.TSMULTIRANGE: 'TSMULTIRANGE'>, <TokenType.DEFAULT: 'DEFAULT'>} + {<TokenType.TSMULTIRANGE: 'TSMULTIRANGE'>, <TokenType.COMMAND: 'COMMAND'>, <TokenType.NVARCHAR: 'NVARCHAR'>, <TokenType.JSON: 'JSON'>, <TokenType.USMALLINT: 'USMALLINT'>, <TokenType.TRUNCATE: 'TRUNCATE'>, <TokenType.UBIGINT: 'UBIGINT'>, <TokenType.TSTZMULTIRANGE: 'TSTZMULTIRANGE'>, <TokenType.OBJECT: 'OBJECT'>, <TokenType.SEQUENCE: 'SEQUENCE'>, <TokenType.INT8MULTIRANGE: 'INT8MULTIRANGE'>, <TokenType.FLOAT: 'FLOAT'>, <TokenType.ENUM8: 'ENUM8'>, <TokenType.MERGE: 'MERGE'>, <TokenType.BIGINT: 'BIGINT'>, <TokenType.PRAGMA: 'PRAGMA'>, <TokenType.UINT128: 'UINT128'>, <TokenType.ARRAY: 'ARRAY'>, <TokenType.MEDIUMTEXT: 'MEDIUMTEXT'>, <TokenType.CASE: 'CASE'>, <TokenType.DESC: 'DESC'>, <TokenType.UNPIVOT: 'UNPIVOT'>, <TokenType.INT256: 'INT256'>, <TokenType.NUMRANGE: 'NUMRANGE'>, <TokenType.CACHE: 'CACHE'>, <TokenType.TIMESTAMP_NS: 'TIMESTAMP_NS'>, <TokenType.XML: 'XML'>, <TokenType.CURRENT_DATE: 'CURRENT_DATE'>, <TokenType.FINAL: 'FINAL'>, <TokenType.ESCAPE: 'ESCAPE'>, <TokenType.INT4MULTIRANGE: 'INT4MULTIRANGE'>, <TokenType.CURRENT_TIME: 'CURRENT_TIME'>, <TokenType.DESCRIBE: 'DESCRIBE'>, <TokenType.NESTED: 'NESTED'>, <TokenType.TSRANGE: 'TSRANGE'>, <TokenType.INDEX: 'INDEX'>, <TokenType.VAR: 'VAR'>, <TokenType.END: 'END'>, <TokenType.PARTITION: 'PARTITION'>, <TokenType.BIGDECIMAL: 'BIGDECIMAL'>, <TokenType.SEMI: 'SEMI'>, <TokenType.REFRESH: 'REFRESH'>, <TokenType.IPV4: 'IPV4'>, <TokenType.INTERVAL: 'INTERVAL'>, <TokenType.TIME: 'TIME'>, <TokenType.TIMESTAMPLTZ: 'TIMESTAMPLTZ'>, <TokenType.EXISTS: 'EXISTS'>, <TokenType.NEXT: 'NEXT'>, <TokenType.PSEUDO_TYPE: 'PSEUDO_TYPE'>, <TokenType.UDECIMAL: 'UDECIMAL'>, <TokenType.DELETE: 'DELETE'>, <TokenType.DICTIONARY: 'DICTIONARY'>, <TokenType.OBJECT_IDENTIFIER: 'OBJECT_IDENTIFIER'>, <TokenType.ENUM16: 'ENUM16'>, <TokenType.SETTINGS: 'SETTINGS'>, <TokenType.SMALLINT: 'SMALLINT'>, <TokenType.SCHEMA: 'SCHEMA'>, <TokenType.FALSE: 'FALSE'>, <TokenType.FOREIGN_KEY: 'FOREIGN_KEY'>, <TokenType.SMALLSERIAL: 'SMALLSERIAL'>, <TokenType.ROWS: 'ROWS'>, <TokenType.MONEY: 'MONEY'>, <TokenType.TIMESTAMP_MS: 'TIMESTAMP_MS'>, <TokenType.COLUMN: 'COLUMN'>, <TokenType.IMAGE: 'IMAGE'>, <TokenType.UTINYINT: 'UTINYINT'>, <TokenType.FILTER: 'FILTER'>, <TokenType.EXECUTE: 'EXECUTE'>, <TokenType.OVERWRITE: 'OVERWRITE'>, <TokenType.USE: 'USE'>, <TokenType.UUID: 'UUID'>, <TokenType.MEDIUMBLOB: 'MEDIUMBLOB'>, <TokenType.INET: 'INET'>, <TokenType.REPLACE: 'REPLACE'>, <TokenType.DATERANGE: 'DATERANGE'>, <TokenType.DATE32: 'DATE32'>, <TokenType.VARCHAR: 'VARCHAR'>, <TokenType.PERCENT: 'PERCENT'>, <TokenType.DATE: 'DATE'>, <TokenType.TEXT: 'TEXT'>, <TokenType.COMMIT: 'COMMIT'>, <TokenType.LONGTEXT: 'LONGTEXT'>, <TokenType.SUPER: 'SUPER'>, <TokenType.VOLATILE: 'VOLATILE'>, <TokenType.VIEW: 'VIEW'>, <TokenType.ORDINALITY: 'ORDINALITY'>, <TokenType.SERIAL: 'SERIAL'>, <TokenType.MAP: 'MAP'>, <TokenType.TIMESTAMP_S: 'TIMESTAMP_S'>, <TokenType.OVERLAPS: 'OVERLAPS'>, <TokenType.IPV6: 'IPV6'>, <TokenType.GEOGRAPHY: 'GEOGRAPHY'>, <TokenType.BEGIN: 'BEGIN'>, <TokenType.NAME: 'NAME'>, <TokenType.ENUM: 'ENUM'>, <TokenType.SHOW: 'SHOW'>, <TokenType.AUTO_INCREMENT: 'AUTO_INCREMENT'>, <TokenType.LONGBLOB: 'LONGBLOB'>, <TokenType.DECIMAL: 'DECIMAL'>, <TokenType.FIRST: 'FIRST'>, <TokenType.SMALLMONEY: 'SMALLMONEY'>, <TokenType.NULL: 'NULL'>, <TokenType.VARBINARY: 'VARBINARY'>, <TokenType.UNKNOWN: 'UNKNOWN'>, <TokenType.SIMPLEAGGREGATEFUNCTION: 'SIMPLEAGGREGATEFUNCTION'>, <TokenType.BPCHAR: 'BPCHAR'>, <TokenType.BIT: 'BIT'>, <TokenType.KILL: 'KILL'>, <TokenType.MODEL: 'MODEL'>, <TokenType.TEMPORARY: 'TEMPORARY'>, <TokenType.ANTI: 'ANTI'>, <TokenType.SET: 'SET'>, <TokenType.REFERENCES: 'REFERENCES'>, <TokenType.PROCEDURE: 'PROCEDURE'>, <TokenType.ASC: 'ASC'>, <TokenType.JSONB: 'JSONB'>, <TokenType.UNIQUEIDENTIFIER: 'UNIQUEIDENTIFIER'>, <TokenType.CURRENT_DATETIME: 'CURRENT_DATETIME'>, <TokenType.PIVOT: 'PIVOT'>, <TokenType.AGGREGATEFUNCTION: 'AGGREGATEFUNCTION'>, <TokenType.ROW: 'ROW'>, <TokenType.FIXEDSTRING: 'FIXEDSTRING'>, <TokenType.DEFAULT: 'DEFAULT'>, <TokenType.DATEMULTIRANGE: 'DATEMULTIRANGE'>, <TokenType.DOUBLE: 'DOUBLE'>, <TokenType.ROWVERSION: 'ROWVERSION'>, <TokenType.SOME: 'SOME'>, <TokenType.CHAR: 'CHAR'>, <TokenType.FORMAT: 'FORMAT'>, <TokenType.INT4RANGE: 'INT4RANGE'>, <TokenType.ISNULL: 'ISNULL'>, <TokenType.TRUE: 'TRUE'>, <TokenType.FUNCTION: 'FUNCTION'>, <TokenType.GEOMETRY: 'GEOMETRY'>, <TokenType.IPPREFIX: 'IPPREFIX'>, <TokenType.COLLATE: 'COLLATE'>, <TokenType.BOOLEAN: 'BOOLEAN'>, <TokenType.DATABASE: 'DATABASE'>, <TokenType.TINYTEXT: 'TINYTEXT'>, <TokenType.TOP: 'TOP'>, <TokenType.LOAD: 'LOAD'>, <TokenType.BIGSERIAL: 'BIGSERIAL'>, <TokenType.NUMMULTIRANGE: 'NUMMULTIRANGE'>, <TokenType.TINYBLOB: 'TINYBLOB'>, <TokenType.DATETIME: 'DATETIME'>, <TokenType.UPDATE: 'UPDATE'>, <TokenType.UNIQUE: 'UNIQUE'>, <TokenType.LOWCARDINALITY: 'LOWCARDINALITY'>, <TokenType.HSTORE: 'HSTORE'>, <TokenType.UINT256: 'UINT256'>, <TokenType.STORAGE_INTEGRATION: 'STORAGE_INTEGRATION'>, <TokenType.UMEDIUMINT: 'UMEDIUMINT'>, <TokenType.NCHAR: 'NCHAR'>, <TokenType.BINARY: 'BINARY'>, <TokenType.TINYINT: 'TINYINT'>, <TokenType.TIMESTAMPTZ: 'TIMESTAMPTZ'>, <TokenType.STRUCT: 'STRUCT'>, <TokenType.OPERATOR: 'OPERATOR'>, <TokenType.INT: 'INT'>, <TokenType.TIMESTAMP: 'TIMESTAMP'>, <TokenType.INT128: 'INT128'>, <TokenType.CURRENT_TIMESTAMP: 'CURRENT_TIMESTAMP'>, <TokenType.IS: 'IS'>, <TokenType.VARIANT: 'VARIANT'>, <TokenType.KEEP: 'KEEP'>, <TokenType.USERDEFINED: 'USERDEFINED'>, <TokenType.TABLE: 'TABLE'>, <TokenType.TSTZRANGE: 'TSTZRANGE'>, <TokenType.DATETIME64: 'DATETIME64'>, <TokenType.TIMETZ: 'TIMETZ'>, <TokenType.HLLSKETCH: 'HLLSKETCH'>, <TokenType.INT8RANGE: 'INT8RANGE'>, <TokenType.YEAR: 'YEAR'>, <TokenType.NULLABLE: 'NULLABLE'>, <TokenType.RANGE: 'RANGE'>, <TokenType.RECURSIVE: 'RECURSIVE'>, <TokenType.COMMENT: 'COMMENT'>, <TokenType.CONSTRAINT: 'CONSTRAINT'>, <TokenType.CURRENT_USER: 'CURRENT_USER'>, <TokenType.UINT: 'UINT'>, <TokenType.IPADDRESS: 'IPADDRESS'>, <TokenType.ANY: 'ANY'>, <TokenType.DIV: 'DIV'>, <TokenType.ALL: 'ALL'>, <TokenType.MEDIUMINT: 'MEDIUMINT'>}
    @@ -3111,6 +3123,7 @@ Default: 3
    CREATABLES
    ID_VAR_TOKENS
    INTERVAL_VARS
    +
    ALIAS_TOKENS
    COMMENT_TABLE_ALIAS_TOKENS
    UPDATE_ALIAS_TOKENS
    TRIM_TYPES
    @@ -3133,6 +3146,8 @@ Default: 3
    COLUMN_OPERATORS
    EXPRESSION_PARSERS
    UNARY_PARSERS
    +
    STRING_PARSERS
    +
    NUMERIC_PARSERS
    PRIMARY_PARSERS
    PLACEHOLDER_PARSERS
    ALTER_PARSERS
    @@ -3149,13 +3164,18 @@ Default: 3
    PRE_VOLATILE_TOKENS
    TRANSACTION_KIND
    TRANSACTION_CHARACTERISTICS
    +
    CONFLICT_ACTIONS
    +
    CREATE_SEQUENCE
    +
    ISOLATED_LOADING_OPTIONS
    USABLES
    +
    CAST_ACTIONS
    INSERT_ALTERNATIVES
    CLONE_KEYWORDS
    HISTORICAL_DATA_KIND
    OPCLASS_FOLLOW_KEYWORDS
    OPTYPE_FOLLOW_TOKENS
    TABLE_INDEX_HINT_TOKENS
    +
    VIEW_ATTRIBUTES
    WINDOW_ALIAS_TOKENS
    WINDOW_BEFORE_PAREN_TOKENS
    WINDOW_SIDES
    @@ -3164,6 +3184,7 @@ Default: 3
    ADD_CONSTRAINT_TOKENS
    DISTINCT_TOKENS
    UNNEST_OFFSET_ALIAS_TOKENS
    +
    SELECT_START_TOKENS
    STRICT_CAST
    IDENTIFY_PIVOT_STRINGS
    ALTER_TABLE_ADD_REQUIRED_FOR_EACH_COLUMN
    @@ -3176,6 +3197,7 @@ Default: 3
    NO_PAREN_IF_COMMANDS
    JSON_ARROWS_REQUIRE_JSON_TYPE
    VALUES_FOLLOWED_BY_PAREN
    +
    INTERVAL_SPANS
    error_level
    error_message_context
    max_errors
    @@ -3205,355 +3227,357 @@ Default: 3 -
    551    class Generator(generator.Generator):
    -552        EXPLICIT_UNION = True
    -553        INTERVAL_ALLOWS_PLURAL_FORM = False
    -554        JOIN_HINTS = False
    -555        QUERY_HINTS = False
    -556        TABLE_HINTS = False
    -557        LIMIT_FETCH = "LIMIT"
    -558        RENAME_TABLE_WITH_DB = False
    -559        NVL2_SUPPORTED = False
    -560        UNNEST_WITH_ORDINALITY = False
    -561        COLLATE_IS_FUNC = True
    -562        LIMIT_ONLY_LITERALS = True
    -563        SUPPORTS_TABLE_ALIAS_COLUMNS = False
    -564        UNPIVOT_ALIASES_ARE_IDENTIFIERS = False
    -565        JSON_KEY_VALUE_PAIR_SEP = ","
    -566        NULL_ORDERING_SUPPORTED = False
    -567        IGNORE_NULLS_IN_FUNC = True
    -568        JSON_PATH_SINGLE_QUOTE_ESCAPE = True
    -569        CAN_IMPLEMENT_ARRAY_ANY = True
    -570        NAMED_PLACEHOLDER_TOKEN = "@"
    -571
    -572        TRANSFORMS = {
    -573            **generator.Generator.TRANSFORMS,
    -574            exp.ApproxDistinct: rename_func("APPROX_COUNT_DISTINCT"),
    -575            exp.ArgMax: arg_max_or_min_no_count("MAX_BY"),
    -576            exp.ArgMin: arg_max_or_min_no_count("MIN_BY"),
    -577            exp.ArrayContains: _array_contains_sql,
    -578            exp.ArrayFilter: filter_array_using_unnest,
    -579            exp.ArraySize: rename_func("ARRAY_LENGTH"),
    -580            exp.Cast: transforms.preprocess([transforms.remove_precision_parameterized_types]),
    -581            exp.CollateProperty: lambda self, e: (
    -582                f"DEFAULT COLLATE {self.sql(e, 'this')}"
    -583                if e.args.get("default")
    -584                else f"COLLATE {self.sql(e, 'this')}"
    -585            ),
    -586            exp.Commit: lambda *_: "COMMIT TRANSACTION",
    -587            exp.CountIf: rename_func("COUNTIF"),
    -588            exp.Create: _create_sql,
    -589            exp.CTE: transforms.preprocess([_pushdown_cte_column_names]),
    -590            exp.DateAdd: date_add_interval_sql("DATE", "ADD"),
    -591            exp.DateDiff: lambda self, e: self.func(
    -592                "DATE_DIFF", e.this, e.expression, e.unit or "DAY"
    -593            ),
    -594            exp.DateFromParts: rename_func("DATE"),
    -595            exp.DateStrToDate: datestrtodate_sql,
    -596            exp.DateSub: date_add_interval_sql("DATE", "SUB"),
    -597            exp.DatetimeAdd: date_add_interval_sql("DATETIME", "ADD"),
    -598            exp.DatetimeSub: date_add_interval_sql("DATETIME", "SUB"),
    -599            exp.DateTrunc: lambda self, e: self.func("DATE_TRUNC", e.this, e.text("unit")),
    -600            exp.FromTimeZone: lambda self, e: self.func(
    -601                "DATETIME", self.func("TIMESTAMP", e.this, e.args.get("zone")), "'UTC'"
    -602            ),
    -603            exp.GenerateSeries: rename_func("GENERATE_ARRAY"),
    -604            exp.GroupConcat: rename_func("STRING_AGG"),
    -605            exp.Hex: rename_func("TO_HEX"),
    -606            exp.If: if_sql(false_value="NULL"),
    -607            exp.ILike: no_ilike_sql,
    -608            exp.IntDiv: rename_func("DIV"),
    -609            exp.JSONFormat: rename_func("TO_JSON_STRING"),
    -610            exp.Max: max_or_greatest,
    -611            exp.MD5: lambda self, e: self.func("TO_HEX", self.func("MD5", e.this)),
    -612            exp.MD5Digest: rename_func("MD5"),
    -613            exp.Min: min_or_least,
    -614            exp.PartitionedByProperty: lambda self, e: f"PARTITION BY {self.sql(e, 'this')}",
    -615            exp.RegexpExtract: lambda self, e: self.func(
    -616                "REGEXP_EXTRACT",
    -617                e.this,
    -618                e.expression,
    -619                e.args.get("position"),
    -620                e.args.get("occurrence"),
    -621            ),
    -622            exp.RegexpReplace: regexp_replace_sql,
    -623            exp.RegexpLike: rename_func("REGEXP_CONTAINS"),
    -624            exp.ReturnsProperty: _returnsproperty_sql,
    -625            exp.Rollback: lambda *_: "ROLLBACK TRANSACTION",
    -626            exp.Select: transforms.preprocess(
    -627                [
    -628                    transforms.explode_to_unnest(),
    -629                    _unqualify_unnest,
    -630                    transforms.eliminate_distinct_on,
    -631                    _alias_ordered_group,
    -632                    transforms.eliminate_semi_and_anti_joins,
    -633                ]
    -634            ),
    -635            exp.SHA2: lambda self, e: self.func(
    -636                "SHA256" if e.text("length") == "256" else "SHA512", e.this
    -637            ),
    -638            exp.StabilityProperty: lambda self, e: (
    -639                "DETERMINISTIC" if e.name == "IMMUTABLE" else "NOT DETERMINISTIC"
    -640            ),
    -641            exp.StrToDate: lambda self, e: self.func("PARSE_DATE", self.format_time(e), e.this),
    -642            exp.StrToTime: lambda self, e: self.func(
    -643                "PARSE_TIMESTAMP", self.format_time(e), e.this, e.args.get("zone")
    +            
    553    class Generator(generator.Generator):
    +554        EXPLICIT_UNION = True
    +555        INTERVAL_ALLOWS_PLURAL_FORM = False
    +556        JOIN_HINTS = False
    +557        QUERY_HINTS = False
    +558        TABLE_HINTS = False
    +559        LIMIT_FETCH = "LIMIT"
    +560        RENAME_TABLE_WITH_DB = False
    +561        NVL2_SUPPORTED = False
    +562        UNNEST_WITH_ORDINALITY = False
    +563        COLLATE_IS_FUNC = True
    +564        LIMIT_ONLY_LITERALS = True
    +565        SUPPORTS_TABLE_ALIAS_COLUMNS = False
    +566        UNPIVOT_ALIASES_ARE_IDENTIFIERS = False
    +567        JSON_KEY_VALUE_PAIR_SEP = ","
    +568        NULL_ORDERING_SUPPORTED = False
    +569        IGNORE_NULLS_IN_FUNC = True
    +570        JSON_PATH_SINGLE_QUOTE_ESCAPE = True
    +571        CAN_IMPLEMENT_ARRAY_ANY = True
    +572        SUPPORTS_TO_NUMBER = False
    +573        NAMED_PLACEHOLDER_TOKEN = "@"
    +574
    +575        TRANSFORMS = {
    +576            **generator.Generator.TRANSFORMS,
    +577            exp.ApproxDistinct: rename_func("APPROX_COUNT_DISTINCT"),
    +578            exp.ArgMax: arg_max_or_min_no_count("MAX_BY"),
    +579            exp.ArgMin: arg_max_or_min_no_count("MIN_BY"),
    +580            exp.ArrayContains: _array_contains_sql,
    +581            exp.ArrayFilter: filter_array_using_unnest,
    +582            exp.ArraySize: rename_func("ARRAY_LENGTH"),
    +583            exp.Cast: transforms.preprocess([transforms.remove_precision_parameterized_types]),
    +584            exp.CollateProperty: lambda self, e: (
    +585                f"DEFAULT COLLATE {self.sql(e, 'this')}"
    +586                if e.args.get("default")
    +587                else f"COLLATE {self.sql(e, 'this')}"
    +588            ),
    +589            exp.Commit: lambda *_: "COMMIT TRANSACTION",
    +590            exp.CountIf: rename_func("COUNTIF"),
    +591            exp.Create: _create_sql,
    +592            exp.CTE: transforms.preprocess([_pushdown_cte_column_names]),
    +593            exp.DateAdd: date_add_interval_sql("DATE", "ADD"),
    +594            exp.DateDiff: lambda self, e: self.func(
    +595                "DATE_DIFF", e.this, e.expression, unit_to_var(e)
    +596            ),
    +597            exp.DateFromParts: rename_func("DATE"),
    +598            exp.DateStrToDate: datestrtodate_sql,
    +599            exp.DateSub: date_add_interval_sql("DATE", "SUB"),
    +600            exp.DatetimeAdd: date_add_interval_sql("DATETIME", "ADD"),
    +601            exp.DatetimeSub: date_add_interval_sql("DATETIME", "SUB"),
    +602            exp.DateTrunc: lambda self, e: self.func("DATE_TRUNC", e.this, e.text("unit")),
    +603            exp.FromTimeZone: lambda self, e: self.func(
    +604                "DATETIME", self.func("TIMESTAMP", e.this, e.args.get("zone")), "'UTC'"
    +605            ),
    +606            exp.GenerateSeries: rename_func("GENERATE_ARRAY"),
    +607            exp.GroupConcat: rename_func("STRING_AGG"),
    +608            exp.Hex: rename_func("TO_HEX"),
    +609            exp.If: if_sql(false_value="NULL"),
    +610            exp.ILike: no_ilike_sql,
    +611            exp.IntDiv: rename_func("DIV"),
    +612            exp.JSONFormat: rename_func("TO_JSON_STRING"),
    +613            exp.Max: max_or_greatest,
    +614            exp.Mod: rename_func("MOD"),
    +615            exp.MD5: lambda self, e: self.func("TO_HEX", self.func("MD5", e.this)),
    +616            exp.MD5Digest: rename_func("MD5"),
    +617            exp.Min: min_or_least,
    +618            exp.PartitionedByProperty: lambda self, e: f"PARTITION BY {self.sql(e, 'this')}",
    +619            exp.RegexpExtract: lambda self, e: self.func(
    +620                "REGEXP_EXTRACT",
    +621                e.this,
    +622                e.expression,
    +623                e.args.get("position"),
    +624                e.args.get("occurrence"),
    +625            ),
    +626            exp.RegexpReplace: regexp_replace_sql,
    +627            exp.RegexpLike: rename_func("REGEXP_CONTAINS"),
    +628            exp.ReturnsProperty: _returnsproperty_sql,
    +629            exp.Rollback: lambda *_: "ROLLBACK TRANSACTION",
    +630            exp.Select: transforms.preprocess(
    +631                [
    +632                    transforms.explode_to_unnest(),
    +633                    _unqualify_unnest,
    +634                    transforms.eliminate_distinct_on,
    +635                    _alias_ordered_group,
    +636                    transforms.eliminate_semi_and_anti_joins,
    +637                ]
    +638            ),
    +639            exp.SHA2: lambda self, e: self.func(
    +640                "SHA256" if e.text("length") == "256" else "SHA512", e.this
    +641            ),
    +642            exp.StabilityProperty: lambda self, e: (
    +643                "DETERMINISTIC" if e.name == "IMMUTABLE" else "NOT DETERMINISTIC"
     644            ),
    -645            exp.TimeAdd: date_add_interval_sql("TIME", "ADD"),
    -646            exp.TimeFromParts: rename_func("TIME"),
    -647            exp.TimeSub: date_add_interval_sql("TIME", "SUB"),
    -648            exp.TimestampAdd: date_add_interval_sql("TIMESTAMP", "ADD"),
    -649            exp.TimestampDiff: rename_func("TIMESTAMP_DIFF"),
    -650            exp.TimestampSub: date_add_interval_sql("TIMESTAMP", "SUB"),
    -651            exp.TimeStrToTime: timestrtotime_sql,
    -652            exp.Transaction: lambda *_: "BEGIN TRANSACTION",
    -653            exp.Trim: lambda self, e: self.func("TRIM", e.this, e.expression),
    -654            exp.TsOrDsAdd: _ts_or_ds_add_sql,
    -655            exp.TsOrDsDiff: _ts_or_ds_diff_sql,
    -656            exp.TsOrDsToTime: rename_func("TIME"),
    -657            exp.Unhex: rename_func("FROM_HEX"),
    -658            exp.UnixDate: rename_func("UNIX_DATE"),
    -659            exp.UnixToTime: _unix_to_time_sql,
    -660            exp.Values: _derived_table_values_to_unnest,
    -661            exp.VariancePop: rename_func("VAR_POP"),
    -662        }
    -663
    -664        SUPPORTED_JSON_PATH_PARTS = {
    -665            exp.JSONPathKey,
    -666            exp.JSONPathRoot,
    -667            exp.JSONPathSubscript,
    -668        }
    -669
    -670        TYPE_MAPPING = {
    -671            **generator.Generator.TYPE_MAPPING,
    -672            exp.DataType.Type.BIGDECIMAL: "BIGNUMERIC",
    -673            exp.DataType.Type.BIGINT: "INT64",
    -674            exp.DataType.Type.BINARY: "BYTES",
    -675            exp.DataType.Type.BOOLEAN: "BOOL",
    -676            exp.DataType.Type.CHAR: "STRING",
    -677            exp.DataType.Type.DECIMAL: "NUMERIC",
    -678            exp.DataType.Type.DOUBLE: "FLOAT64",
    -679            exp.DataType.Type.FLOAT: "FLOAT64",
    -680            exp.DataType.Type.INT: "INT64",
    -681            exp.DataType.Type.NCHAR: "STRING",
    -682            exp.DataType.Type.NVARCHAR: "STRING",
    -683            exp.DataType.Type.SMALLINT: "INT64",
    -684            exp.DataType.Type.TEXT: "STRING",
    -685            exp.DataType.Type.TIMESTAMP: "DATETIME",
    -686            exp.DataType.Type.TIMESTAMPTZ: "TIMESTAMP",
    -687            exp.DataType.Type.TIMESTAMPLTZ: "TIMESTAMP",
    -688            exp.DataType.Type.TINYINT: "INT64",
    -689            exp.DataType.Type.VARBINARY: "BYTES",
    -690            exp.DataType.Type.VARCHAR: "STRING",
    -691            exp.DataType.Type.VARIANT: "ANY TYPE",
    -692        }
    -693
    -694        PROPERTIES_LOCATION = {
    -695            **generator.Generator.PROPERTIES_LOCATION,
    -696            exp.PartitionedByProperty: exp.Properties.Location.POST_SCHEMA,
    -697            exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED,
    -698        }
    -699
    -700        # from: https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#reserved_keywords
    -701        RESERVED_KEYWORDS = {
    -702            *generator.Generator.RESERVED_KEYWORDS,
    -703            "all",
    -704            "and",
    -705            "any",
    -706            "array",
    -707            "as",
    -708            "asc",
    -709            "assert_rows_modified",
    -710            "at",
    -711            "between",
    -712            "by",
    -713            "case",
    -714            "cast",
    -715            "collate",
    -716            "contains",
    -717            "create",
    -718            "cross",
    -719            "cube",
    -720            "current",
    -721            "default",
    -722            "define",
    -723            "desc",
    -724            "distinct",
    -725            "else",
    -726            "end",
    -727            "enum",
    -728            "escape",
    -729            "except",
    -730            "exclude",
    -731            "exists",
    -732            "extract",
    -733            "false",
    -734            "fetch",
    -735            "following",
    -736            "for",
    -737            "from",
    -738            "full",
    -739            "group",
    -740            "grouping",
    -741            "groups",
    -742            "hash",
    -743            "having",
    -744            "if",
    -745            "ignore",
    -746            "in",
    -747            "inner",
    -748            "intersect",
    -749            "interval",
    -750            "into",
    -751            "is",
    -752            "join",
    -753            "lateral",
    -754            "left",
    -755            "like",
    -756            "limit",
    -757            "lookup",
    -758            "merge",
    -759            "natural",
    -760            "new",
    -761            "no",
    -762            "not",
    -763            "null",
    -764            "nulls",
    -765            "of",
    -766            "on",
    -767            "or",
    -768            "order",
    -769            "outer",
    -770            "over",
    -771            "partition",
    -772            "preceding",
    -773            "proto",
    -774            "qualify",
    -775            "range",
    -776            "recursive",
    -777            "respect",
    -778            "right",
    -779            "rollup",
    -780            "rows",
    -781            "select",
    -782            "set",
    -783            "some",
    -784            "struct",
    -785            "tablesample",
    -786            "then",
    -787            "to",
    -788            "treat",
    -789            "true",
    -790            "unbounded",
    -791            "union",
    -792            "unnest",
    -793            "using",
    -794            "when",
    -795            "where",
    -796            "window",
    -797            "with",
    -798            "within",
    -799        }
    -800
    -801        def table_parts(self, expression: exp.Table) -> str:
    -802            # Depending on the context, `x.y` may not resolve to the same data source as `x`.`y`, so
    -803            # we need to make sure the correct quoting is used in each case.
    -804            #
    -805            # For example, if there is a CTE x that clashes with a schema name, then the former will
    -806            # return the table y in that schema, whereas the latter will return the CTE's y column:
    -807            #
    -808            # - WITH x AS (SELECT [1, 2] AS y) SELECT * FROM x, `x.y`   -> cross join
    -809            # - WITH x AS (SELECT [1, 2] AS y) SELECT * FROM x, `x`.`y` -> implicit unnest
    -810            if expression.meta.get("quoted_table"):
    -811                table_parts = ".".join(p.name for p in expression.parts)
    -812                return self.sql(exp.Identifier(this=table_parts, quoted=True))
    -813
    -814            return super().table_parts(expression)
    -815
    -816        def timetostr_sql(self, expression: exp.TimeToStr) -> str:
    -817            this = expression.this if isinstance(expression.this, exp.TsOrDsToDate) else expression
    -818            return self.func("FORMAT_DATE", self.format_time(expression), this.this)
    +645            exp.StrToDate: lambda self, e: self.func("PARSE_DATE", self.format_time(e), e.this),
    +646            exp.StrToTime: lambda self, e: self.func(
    +647                "PARSE_TIMESTAMP", self.format_time(e), e.this, e.args.get("zone")
    +648            ),
    +649            exp.TimeAdd: date_add_interval_sql("TIME", "ADD"),
    +650            exp.TimeFromParts: rename_func("TIME"),
    +651            exp.TimeSub: date_add_interval_sql("TIME", "SUB"),
    +652            exp.TimestampAdd: date_add_interval_sql("TIMESTAMP", "ADD"),
    +653            exp.TimestampDiff: rename_func("TIMESTAMP_DIFF"),
    +654            exp.TimestampSub: date_add_interval_sql("TIMESTAMP", "SUB"),
    +655            exp.TimeStrToTime: timestrtotime_sql,
    +656            exp.Transaction: lambda *_: "BEGIN TRANSACTION",
    +657            exp.Trim: lambda self, e: self.func("TRIM", e.this, e.expression),
    +658            exp.TsOrDsAdd: _ts_or_ds_add_sql,
    +659            exp.TsOrDsDiff: _ts_or_ds_diff_sql,
    +660            exp.TsOrDsToTime: rename_func("TIME"),
    +661            exp.Unhex: rename_func("FROM_HEX"),
    +662            exp.UnixDate: rename_func("UNIX_DATE"),
    +663            exp.UnixToTime: _unix_to_time_sql,
    +664            exp.Values: _derived_table_values_to_unnest,
    +665            exp.VariancePop: rename_func("VAR_POP"),
    +666        }
    +667
    +668        SUPPORTED_JSON_PATH_PARTS = {
    +669            exp.JSONPathKey,
    +670            exp.JSONPathRoot,
    +671            exp.JSONPathSubscript,
    +672        }
    +673
    +674        TYPE_MAPPING = {
    +675            **generator.Generator.TYPE_MAPPING,
    +676            exp.DataType.Type.BIGDECIMAL: "BIGNUMERIC",
    +677            exp.DataType.Type.BIGINT: "INT64",
    +678            exp.DataType.Type.BINARY: "BYTES",
    +679            exp.DataType.Type.BOOLEAN: "BOOL",
    +680            exp.DataType.Type.CHAR: "STRING",
    +681            exp.DataType.Type.DECIMAL: "NUMERIC",
    +682            exp.DataType.Type.DOUBLE: "FLOAT64",
    +683            exp.DataType.Type.FLOAT: "FLOAT64",
    +684            exp.DataType.Type.INT: "INT64",
    +685            exp.DataType.Type.NCHAR: "STRING",
    +686            exp.DataType.Type.NVARCHAR: "STRING",
    +687            exp.DataType.Type.SMALLINT: "INT64",
    +688            exp.DataType.Type.TEXT: "STRING",
    +689            exp.DataType.Type.TIMESTAMP: "DATETIME",
    +690            exp.DataType.Type.TIMESTAMPTZ: "TIMESTAMP",
    +691            exp.DataType.Type.TIMESTAMPLTZ: "TIMESTAMP",
    +692            exp.DataType.Type.TINYINT: "INT64",
    +693            exp.DataType.Type.VARBINARY: "BYTES",
    +694            exp.DataType.Type.VARCHAR: "STRING",
    +695            exp.DataType.Type.VARIANT: "ANY TYPE",
    +696        }
    +697
    +698        PROPERTIES_LOCATION = {
    +699            **generator.Generator.PROPERTIES_LOCATION,
    +700            exp.PartitionedByProperty: exp.Properties.Location.POST_SCHEMA,
    +701            exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED,
    +702        }
    +703
    +704        # from: https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#reserved_keywords
    +705        RESERVED_KEYWORDS = {
    +706            *generator.Generator.RESERVED_KEYWORDS,
    +707            "all",
    +708            "and",
    +709            "any",
    +710            "array",
    +711            "as",
    +712            "asc",
    +713            "assert_rows_modified",
    +714            "at",
    +715            "between",
    +716            "by",
    +717            "case",
    +718            "cast",
    +719            "collate",
    +720            "contains",
    +721            "create",
    +722            "cross",
    +723            "cube",
    +724            "current",
    +725            "default",
    +726            "define",
    +727            "desc",
    +728            "distinct",
    +729            "else",
    +730            "end",
    +731            "enum",
    +732            "escape",
    +733            "except",
    +734            "exclude",
    +735            "exists",
    +736            "extract",
    +737            "false",
    +738            "fetch",
    +739            "following",
    +740            "for",
    +741            "from",
    +742            "full",
    +743            "group",
    +744            "grouping",
    +745            "groups",
    +746            "hash",
    +747            "having",
    +748            "if",
    +749            "ignore",
    +750            "in",
    +751            "inner",
    +752            "intersect",
    +753            "interval",
    +754            "into",
    +755            "is",
    +756            "join",
    +757            "lateral",
    +758            "left",
    +759            "like",
    +760            "limit",
    +761            "lookup",
    +762            "merge",
    +763            "natural",
    +764            "new",
    +765            "no",
    +766            "not",
    +767            "null",
    +768            "nulls",
    +769            "of",
    +770            "on",
    +771            "or",
    +772            "order",
    +773            "outer",
    +774            "over",
    +775            "partition",
    +776            "preceding",
    +777            "proto",
    +778            "qualify",
    +779            "range",
    +780            "recursive",
    +781            "respect",
    +782            "right",
    +783            "rollup",
    +784            "rows",
    +785            "select",
    +786            "set",
    +787            "some",
    +788            "struct",
    +789            "tablesample",
    +790            "then",
    +791            "to",
    +792            "treat",
    +793            "true",
    +794            "unbounded",
    +795            "union",
    +796            "unnest",
    +797            "using",
    +798            "when",
    +799            "where",
    +800            "window",
    +801            "with",
    +802            "within",
    +803        }
    +804
    +805        def table_parts(self, expression: exp.Table) -> str:
    +806            # Depending on the context, `x.y` may not resolve to the same data source as `x`.`y`, so
    +807            # we need to make sure the correct quoting is used in each case.
    +808            #
    +809            # For example, if there is a CTE x that clashes with a schema name, then the former will
    +810            # return the table y in that schema, whereas the latter will return the CTE's y column:
    +811            #
    +812            # - WITH x AS (SELECT [1, 2] AS y) SELECT * FROM x, `x.y`   -> cross join
    +813            # - WITH x AS (SELECT [1, 2] AS y) SELECT * FROM x, `x`.`y` -> implicit unnest
    +814            if expression.meta.get("quoted_table"):
    +815                table_parts = ".".join(p.name for p in expression.parts)
    +816                return self.sql(exp.Identifier(this=table_parts, quoted=True))
    +817
    +818            return super().table_parts(expression)
     819
    -820        def eq_sql(self, expression: exp.EQ) -> str:
    -821            # Operands of = cannot be NULL in BigQuery
    -822            if isinstance(expression.left, exp.Null) or isinstance(expression.right, exp.Null):
    -823                if not isinstance(expression.parent, exp.Update):
    -824                    return "NULL"
    -825
    -826            return self.binary(expression, "=")
    -827
    -828        def attimezone_sql(self, expression: exp.AtTimeZone) -> str:
    -829            parent = expression.parent
    -830
    -831            # BigQuery allows CAST(.. AS {STRING|TIMESTAMP} [FORMAT <fmt> [AT TIME ZONE <tz>]]).
    -832            # Only the TIMESTAMP one should use the below conversion, when AT TIME ZONE is included.
    -833            if not isinstance(parent, exp.Cast) or not parent.to.is_type("text"):
    -834                return self.func(
    -835                    "TIMESTAMP", self.func("DATETIME", expression.this, expression.args.get("zone"))
    -836                )
    -837
    -838            return super().attimezone_sql(expression)
    -839
    -840        def trycast_sql(self, expression: exp.TryCast) -> str:
    -841            return self.cast_sql(expression, safe_prefix="SAFE_")
    -842
    -843        def array_sql(self, expression: exp.Array) -> str:
    -844            first_arg = seq_get(expression.expressions, 0)
    -845            if isinstance(first_arg, exp.Query):
    -846                return f"ARRAY{self.wrap(self.sql(first_arg))}"
    -847
    -848            return inline_array_sql(self, expression)
    -849
    -850        def bracket_sql(self, expression: exp.Bracket) -> str:
    -851            this = self.sql(expression, "this")
    -852            expressions = expression.expressions
    +820        def timetostr_sql(self, expression: exp.TimeToStr) -> str:
    +821            this = expression.this if isinstance(expression.this, exp.TsOrDsToDate) else expression
    +822            return self.func("FORMAT_DATE", self.format_time(expression), this.this)
    +823
    +824        def eq_sql(self, expression: exp.EQ) -> str:
    +825            # Operands of = cannot be NULL in BigQuery
    +826            if isinstance(expression.left, exp.Null) or isinstance(expression.right, exp.Null):
    +827                if not isinstance(expression.parent, exp.Update):
    +828                    return "NULL"
    +829
    +830            return self.binary(expression, "=")
    +831
    +832        def attimezone_sql(self, expression: exp.AtTimeZone) -> str:
    +833            parent = expression.parent
    +834
    +835            # BigQuery allows CAST(.. AS {STRING|TIMESTAMP} [FORMAT <fmt> [AT TIME ZONE <tz>]]).
    +836            # Only the TIMESTAMP one should use the below conversion, when AT TIME ZONE is included.
    +837            if not isinstance(parent, exp.Cast) or not parent.to.is_type("text"):
    +838                return self.func(
    +839                    "TIMESTAMP", self.func("DATETIME", expression.this, expression.args.get("zone"))
    +840                )
    +841
    +842            return super().attimezone_sql(expression)
    +843
    +844        def trycast_sql(self, expression: exp.TryCast) -> str:
    +845            return self.cast_sql(expression, safe_prefix="SAFE_")
    +846
    +847        def array_sql(self, expression: exp.Array) -> str:
    +848            first_arg = seq_get(expression.expressions, 0)
    +849            if isinstance(first_arg, exp.Query):
    +850                return f"ARRAY{self.wrap(self.sql(first_arg))}"
    +851
    +852            return inline_array_sql(self, expression)
     853
    -854            if len(expressions) == 1:
    -855                arg = expressions[0]
    -856                if arg.type is None:
    -857                    from sqlglot.optimizer.annotate_types import annotate_types
    -858
    -859                    arg = annotate_types(arg)
    -860
    -861                if arg.type and arg.type.this in exp.DataType.TEXT_TYPES:
    -862                    # BQ doesn't support bracket syntax with string values
    -863                    return f"{this}.{arg.name}"
    +854        def bracket_sql(self, expression: exp.Bracket) -> str:
    +855            this = expression.this
    +856            expressions = expression.expressions
    +857
    +858            if len(expressions) == 1 and this and this.is_type(exp.DataType.Type.STRUCT):
    +859                arg = expressions[0]
    +860                if arg.type is None:
    +861                    from sqlglot.optimizer.annotate_types import annotate_types
    +862
    +863                    arg = annotate_types(arg)
     864
    -865            expressions_sql = ", ".join(self.sql(e) for e in expressions)
    -866            offset = expression.args.get("offset")
    -867
    -868            if offset == 0:
    -869                expressions_sql = f"OFFSET({expressions_sql})"
    -870            elif offset == 1:
    -871                expressions_sql = f"ORDINAL({expressions_sql})"
    -872            elif offset is not None:
    -873                self.unsupported(f"Unsupported array offset: {offset}")
    -874
    -875            if expression.args.get("safe"):
    -876                expressions_sql = f"SAFE_{expressions_sql}"
    -877
    -878            return f"{this}[{expressions_sql}]"
    -879
    -880        def in_unnest_op(self, expression: exp.Unnest) -> str:
    -881            return self.sql(expression)
    -882
    -883        def except_op(self, expression: exp.Except) -> str:
    -884            if not expression.args.get("distinct"):
    -885                self.unsupported("EXCEPT without DISTINCT is not supported in BigQuery")
    -886            return f"EXCEPT{' DISTINCT' if expression.args.get('distinct') else ' ALL'}"
    -887
    -888        def intersect_op(self, expression: exp.Intersect) -> str:
    -889            if not expression.args.get("distinct"):
    -890                self.unsupported("INTERSECT without DISTINCT is not supported in BigQuery")
    -891            return f"INTERSECT{' DISTINCT' if expression.args.get('distinct') else ' ALL'}"
    -892
    -893        def with_properties(self, properties: exp.Properties) -> str:
    -894            return self.properties(properties, prefix=self.seg("OPTIONS"))
    -895
    -896        def version_sql(self, expression: exp.Version) -> str:
    -897            if expression.name == "TIMESTAMP":
    -898                expression.set("this", "SYSTEM_TIME")
    -899            return super().version_sql(expression)
    +865                if arg.type and arg.type.this in exp.DataType.TEXT_TYPES:
    +866                    # BQ doesn't support bracket syntax with string values for structs
    +867                    return f"{self.sql(this)}.{arg.name}"
    +868
    +869            expressions_sql = self.expressions(expression, flat=True)
    +870            offset = expression.args.get("offset")
    +871
    +872            if offset == 0:
    +873                expressions_sql = f"OFFSET({expressions_sql})"
    +874            elif offset == 1:
    +875                expressions_sql = f"ORDINAL({expressions_sql})"
    +876            elif offset is not None:
    +877                self.unsupported(f"Unsupported array offset: {offset}")
    +878
    +879            if expression.args.get("safe"):
    +880                expressions_sql = f"SAFE_{expressions_sql}"
    +881
    +882            return f"{self.sql(this)}[{expressions_sql}]"
    +883
    +884        def in_unnest_op(self, expression: exp.Unnest) -> str:
    +885            return self.sql(expression)
    +886
    +887        def except_op(self, expression: exp.Except) -> str:
    +888            if not expression.args.get("distinct"):
    +889                self.unsupported("EXCEPT without DISTINCT is not supported in BigQuery")
    +890            return f"EXCEPT{' DISTINCT' if expression.args.get('distinct') else ' ALL'}"
    +891
    +892        def intersect_op(self, expression: exp.Intersect) -> str:
    +893            if not expression.args.get("distinct"):
    +894                self.unsupported("INTERSECT without DISTINCT is not supported in BigQuery")
    +895            return f"INTERSECT{' DISTINCT' if expression.args.get('distinct') else ' ALL'}"
    +896
    +897        def with_properties(self, properties: exp.Properties) -> str:
    +898            return self.properties(properties, prefix=self.seg("OPTIONS"))
    +899
    +900        def version_sql(self, expression: exp.Version) -> str:
    +901            if expression.name == "TIMESTAMP":
    +902                expression.set("this", "SYSTEM_TIME")
    +903            return super().version_sql(expression)
     
    @@ -3570,9 +3594,11 @@ True or 'always': Always quote. 'safe': Only quote identifiers that are case insensitive.
  • normalize: Whether to normalize identifiers to lowercase. Default: False.
  • -
  • pad: The pad size in a formatted string. +
  • pad: The pad size in a formatted string. For example, this affects the indentation of +a projection in a query, relative to its nesting level. Default: 2.
  • -
  • indent: The indentation size in a formatted string. +
  • indent: The indentation size in a formatted string. For example, this affects the +indentation of subqueries and filters under a WHERE clause. Default: 2.
  • normalize_functions: How to normalize function names. Possible values are: "upper" or True (default): Convert names to uppercase. @@ -3811,6 +3837,18 @@ Default: True
  • +
    +
    +
    + SUPPORTS_TO_NUMBER = +False + + +
    + + + +
    @@ -3828,7 +3866,7 @@ Default: True
    TRANSFORMS = - {<class 'sqlglot.expressions.JSONPathKey'>: <function <lambda>>, <class 'sqlglot.expressions.JSONPathRoot'>: <function <lambda>>, <class 'sqlglot.expressions.JSONPathSubscript'>: <function <lambda>>, <class 'sqlglot.expressions.AutoRefreshProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.CaseSpecificColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.CharacterSetColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.CharacterSetProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.ClusteredColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.CollateColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.CommentColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.CopyGrantsProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.DateAdd'>: <function date_add_interval_sql.<locals>.func>, <class 'sqlglot.expressions.DateFormatColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.DefaultColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.EncodeColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.ExecuteAsProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.ExternalProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.HeapProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.InheritsProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.InlineLengthColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.InputModelProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.IntervalSpan'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.JSONExtract'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.JSONExtractScalar'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.LanguageProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.LocationProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.LogProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.MaterializedProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.NonClusteredColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.NoPrimaryIndexProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.NotForReplicationColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.OnCommitProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.OnProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.OnUpdateColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.OutputModelProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.PathColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.RemoteWithConnectionModelProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.ReturnsProperty'>: <function _returnsproperty_sql>, <class 'sqlglot.expressions.SampleProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.SetConfigProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.SetProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.SettingsProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.SqlReadWriteProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.SqlSecurityProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.StabilityProperty'>: <function BigQuery.Generator.<lambda>>, <class 'sqlglot.expressions.TemporaryProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.TitleColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.Timestamp'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.ToTableProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.TransformModelProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.TransientProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.UppercaseColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.VarMap'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.VolatileProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.WithJournalTableProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.ApproxDistinct'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.ArgMax'>: <function arg_max_or_min_no_count.<locals>._arg_max_or_min_sql>, <class 'sqlglot.expressions.ArgMin'>: <function arg_max_or_min_no_count.<locals>._arg_max_or_min_sql>, <class 'sqlglot.expressions.ArrayContains'>: <function _array_contains_sql>, <class 'sqlglot.expressions.ArrayFilter'>: <function filter_array_using_unnest>, <class 'sqlglot.expressions.ArraySize'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.Cast'>: <function preprocess.<locals>._to_sql>, <class 'sqlglot.expressions.CollateProperty'>: <function BigQuery.Generator.<lambda>>, <class 'sqlglot.expressions.Commit'>: <function BigQuery.Generator.<lambda>>, <class 'sqlglot.expressions.CountIf'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.Create'>: <function _create_sql>, <class 'sqlglot.expressions.CTE'>: <function preprocess.<locals>._to_sql>, <class 'sqlglot.expressions.DateDiff'>: <function BigQuery.Generator.<lambda>>, <class 'sqlglot.expressions.DateFromParts'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.DateStrToDate'>: <function datestrtodate_sql>, <class 'sqlglot.expressions.DateSub'>: <function date_add_interval_sql.<locals>.func>, <class 'sqlglot.expressions.DatetimeAdd'>: <function date_add_interval_sql.<locals>.func>, <class 'sqlglot.expressions.DatetimeSub'>: <function date_add_interval_sql.<locals>.func>, <class 'sqlglot.expressions.DateTrunc'>: <function BigQuery.Generator.<lambda>>, <class 'sqlglot.expressions.FromTimeZone'>: <function BigQuery.Generator.<lambda>>, <class 'sqlglot.expressions.GenerateSeries'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.GroupConcat'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.Hex'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.If'>: <function if_sql.<locals>._if_sql>, <class 'sqlglot.expressions.ILike'>: <function no_ilike_sql>, <class 'sqlglot.expressions.IntDiv'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.JSONFormat'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.Max'>: <function max_or_greatest>, <class 'sqlglot.expressions.MD5'>: <function BigQuery.Generator.<lambda>>, <class 'sqlglot.expressions.MD5Digest'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.Min'>: <function min_or_least>, <class 'sqlglot.expressions.PartitionedByProperty'>: <function BigQuery.Generator.<lambda>>, <class 'sqlglot.expressions.RegexpExtract'>: <function BigQuery.Generator.<lambda>>, <class 'sqlglot.expressions.RegexpReplace'>: <function regexp_replace_sql>, <class 'sqlglot.expressions.RegexpLike'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.Rollback'>: <function BigQuery.Generator.<lambda>>, <class 'sqlglot.expressions.Select'>: <function preprocess.<locals>._to_sql>, <class 'sqlglot.expressions.SHA2'>: <function BigQuery.Generator.<lambda>>, <class 'sqlglot.expressions.StrToDate'>: <function BigQuery.Generator.<lambda>>, <class 'sqlglot.expressions.StrToTime'>: <function BigQuery.Generator.<lambda>>, <class 'sqlglot.expressions.TimeAdd'>: <function date_add_interval_sql.<locals>.func>, <class 'sqlglot.expressions.TimeFromParts'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.TimeSub'>: <function date_add_interval_sql.<locals>.func>, <class 'sqlglot.expressions.TimestampAdd'>: <function date_add_interval_sql.<locals>.func>, <class 'sqlglot.expressions.TimestampDiff'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.TimestampSub'>: <function date_add_interval_sql.<locals>.func>, <class 'sqlglot.expressions.TimeStrToTime'>: <function timestrtotime_sql>, <class 'sqlglot.expressions.Transaction'>: <function BigQuery.Generator.<lambda>>, <class 'sqlglot.expressions.Trim'>: <function BigQuery.Generator.<lambda>>, <class 'sqlglot.expressions.TsOrDsAdd'>: <function _ts_or_ds_add_sql>, <class 'sqlglot.expressions.TsOrDsDiff'>: <function _ts_or_ds_diff_sql>, <class 'sqlglot.expressions.TsOrDsToTime'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.Unhex'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.UnixDate'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.UnixToTime'>: <function _unix_to_time_sql>, <class 'sqlglot.expressions.Values'>: <function _derived_table_values_to_unnest>, <class 'sqlglot.expressions.VariancePop'>: <function rename_func.<locals>.<lambda>>} + {<class 'sqlglot.expressions.JSONPathKey'>: <function <lambda>>, <class 'sqlglot.expressions.JSONPathRoot'>: <function <lambda>>, <class 'sqlglot.expressions.JSONPathSubscript'>: <function <lambda>>, <class 'sqlglot.expressions.AutoRefreshProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.BackupProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.CaseSpecificColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.CharacterSetColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.CharacterSetProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.ClusteredColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.CollateColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.CommentColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.CopyGrantsProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.DateFormatColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.DefaultColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.EncodeColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.ExcludeColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.ExecuteAsProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.ExternalProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.GlobalProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.HeapProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.IcebergProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.InheritsProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.InlineLengthColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.InputModelProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.IntervalSpan'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.JSONExtract'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.JSONExtractScalar'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.LanguageProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.LocationProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.LogProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.MaterializedProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.NonClusteredColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.NoPrimaryIndexProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.NotForReplicationColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.OnCommitProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.OnProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.OnUpdateColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.OutputModelProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.PathColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.RemoteWithConnectionModelProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.ReturnsProperty'>: <function _returnsproperty_sql>, <class 'sqlglot.expressions.SampleProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.SetConfigProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.SetProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.SettingsProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.SharingProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.SqlReadWriteProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.SqlSecurityProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.StabilityProperty'>: <function BigQuery.Generator.<lambda>>, <class 'sqlglot.expressions.TemporaryProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.TitleColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.Timestamp'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.ToMap'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.ToTableProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.TransformModelProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.TransientProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.UppercaseColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.UnloggedProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.VarMap'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.ViewAttributeProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.VolatileProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.WithJournalTableProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.WithOperator'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.ApproxDistinct'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.ArgMax'>: <function arg_max_or_min_no_count.<locals>._arg_max_or_min_sql>, <class 'sqlglot.expressions.ArgMin'>: <function arg_max_or_min_no_count.<locals>._arg_max_or_min_sql>, <class 'sqlglot.expressions.ArrayContains'>: <function _array_contains_sql>, <class 'sqlglot.expressions.ArrayFilter'>: <function filter_array_using_unnest>, <class 'sqlglot.expressions.ArraySize'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.Cast'>: <function preprocess.<locals>._to_sql>, <class 'sqlglot.expressions.CollateProperty'>: <function BigQuery.Generator.<lambda>>, <class 'sqlglot.expressions.Commit'>: <function BigQuery.Generator.<lambda>>, <class 'sqlglot.expressions.CountIf'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.Create'>: <function _create_sql>, <class 'sqlglot.expressions.CTE'>: <function preprocess.<locals>._to_sql>, <class 'sqlglot.expressions.DateAdd'>: <function date_add_interval_sql.<locals>.func>, <class 'sqlglot.expressions.DateDiff'>: <function BigQuery.Generator.<lambda>>, <class 'sqlglot.expressions.DateFromParts'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.DateStrToDate'>: <function datestrtodate_sql>, <class 'sqlglot.expressions.DateSub'>: <function date_add_interval_sql.<locals>.func>, <class 'sqlglot.expressions.DatetimeAdd'>: <function date_add_interval_sql.<locals>.func>, <class 'sqlglot.expressions.DatetimeSub'>: <function date_add_interval_sql.<locals>.func>, <class 'sqlglot.expressions.DateTrunc'>: <function BigQuery.Generator.<lambda>>, <class 'sqlglot.expressions.FromTimeZone'>: <function BigQuery.Generator.<lambda>>, <class 'sqlglot.expressions.GenerateSeries'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.GroupConcat'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.Hex'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.If'>: <function if_sql.<locals>._if_sql>, <class 'sqlglot.expressions.ILike'>: <function no_ilike_sql>, <class 'sqlglot.expressions.IntDiv'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.JSONFormat'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.Max'>: <function max_or_greatest>, <class 'sqlglot.expressions.Mod'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.MD5'>: <function BigQuery.Generator.<lambda>>, <class 'sqlglot.expressions.MD5Digest'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.Min'>: <function min_or_least>, <class 'sqlglot.expressions.PartitionedByProperty'>: <function BigQuery.Generator.<lambda>>, <class 'sqlglot.expressions.RegexpExtract'>: <function BigQuery.Generator.<lambda>>, <class 'sqlglot.expressions.RegexpReplace'>: <function regexp_replace_sql>, <class 'sqlglot.expressions.RegexpLike'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.Rollback'>: <function BigQuery.Generator.<lambda>>, <class 'sqlglot.expressions.Select'>: <function preprocess.<locals>._to_sql>, <class 'sqlglot.expressions.SHA2'>: <function BigQuery.Generator.<lambda>>, <class 'sqlglot.expressions.StrToDate'>: <function BigQuery.Generator.<lambda>>, <class 'sqlglot.expressions.StrToTime'>: <function BigQuery.Generator.<lambda>>, <class 'sqlglot.expressions.TimeAdd'>: <function date_add_interval_sql.<locals>.func>, <class 'sqlglot.expressions.TimeFromParts'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.TimeSub'>: <function date_add_interval_sql.<locals>.func>, <class 'sqlglot.expressions.TimestampAdd'>: <function date_add_interval_sql.<locals>.func>, <class 'sqlglot.expressions.TimestampDiff'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.TimestampSub'>: <function date_add_interval_sql.<locals>.func>, <class 'sqlglot.expressions.TimeStrToTime'>: <function timestrtotime_sql>, <class 'sqlglot.expressions.Transaction'>: <function BigQuery.Generator.<lambda>>, <class 'sqlglot.expressions.Trim'>: <function BigQuery.Generator.<lambda>>, <class 'sqlglot.expressions.TsOrDsAdd'>: <function _ts_or_ds_add_sql>, <class 'sqlglot.expressions.TsOrDsDiff'>: <function _ts_or_ds_diff_sql>, <class 'sqlglot.expressions.TsOrDsToTime'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.Unhex'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.UnixDate'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.UnixToTime'>: <function _unix_to_time_sql>, <class 'sqlglot.expressions.Values'>: <function _derived_table_values_to_unnest>, <class 'sqlglot.expressions.VariancePop'>: <function rename_func.<locals>.<lambda>>}
    @@ -3867,7 +3905,7 @@ Default: True
    PROPERTIES_LOCATION = - {<class 'sqlglot.expressions.AlgorithmProperty'>: <Location.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.AutoIncrementProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.AutoRefreshProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.BlockCompressionProperty'>: <Location.POST_NAME: 'POST_NAME'>, <class 'sqlglot.expressions.CharacterSetProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.ChecksumProperty'>: <Location.POST_NAME: 'POST_NAME'>, <class 'sqlglot.expressions.CollateProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.CopyGrantsProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.Cluster'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.ClusteredByProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.DataBlocksizeProperty'>: <Location.POST_NAME: 'POST_NAME'>, <class 'sqlglot.expressions.DefinerProperty'>: <Location.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.DictRange'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.DictProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.DistKeyProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.DistStyleProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.EngineProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.ExecuteAsProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.ExternalProperty'>: <Location.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.FallbackProperty'>: <Location.POST_NAME: 'POST_NAME'>, <class 'sqlglot.expressions.FileFormatProperty'>: <Location.POST_WITH: 'POST_WITH'>, <class 'sqlglot.expressions.FreespaceProperty'>: <Location.POST_NAME: 'POST_NAME'>, <class 'sqlglot.expressions.HeapProperty'>: <Location.POST_WITH: 'POST_WITH'>, <class 'sqlglot.expressions.InheritsProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.InputModelProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.IsolatedLoadingProperty'>: <Location.POST_NAME: 'POST_NAME'>, <class 'sqlglot.expressions.JournalProperty'>: <Location.POST_NAME: 'POST_NAME'>, <class 'sqlglot.expressions.LanguageProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.LikeProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.LocationProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.LockProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.LockingProperty'>: <Location.POST_ALIAS: 'POST_ALIAS'>, <class 'sqlglot.expressions.LogProperty'>: <Location.POST_NAME: 'POST_NAME'>, <class 'sqlglot.expressions.MaterializedProperty'>: <Location.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.MergeBlockRatioProperty'>: <Location.POST_NAME: 'POST_NAME'>, <class 'sqlglot.expressions.NoPrimaryIndexProperty'>: <Location.POST_EXPRESSION: 'POST_EXPRESSION'>, <class 'sqlglot.expressions.OnProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.OnCommitProperty'>: <Location.POST_EXPRESSION: 'POST_EXPRESSION'>, <class 'sqlglot.expressions.Order'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.OutputModelProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.PartitionedByProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.PartitionedOfProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.PrimaryKey'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.Property'>: <Location.POST_WITH: 'POST_WITH'>, <class 'sqlglot.expressions.RemoteWithConnectionModelProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.ReturnsProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.RowFormatProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.RowFormatDelimitedProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.RowFormatSerdeProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.SampleProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.SchemaCommentProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.SerdeProperties'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.Set'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.SettingsProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.SetProperty'>: <Location.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.SetConfigProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.SortKeyProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.SqlReadWriteProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.SqlSecurityProperty'>: <Location.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.StabilityProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.TemporaryProperty'>: <Location.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.ToTableProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.TransientProperty'>: <Location.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.TransformModelProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.MergeTreeTTL'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.VolatileProperty'>: <Location.UNSUPPORTED: 'UNSUPPORTED'>, <class 'sqlglot.expressions.WithDataProperty'>: <Location.POST_EXPRESSION: 'POST_EXPRESSION'>, <class 'sqlglot.expressions.WithJournalTableProperty'>: <Location.POST_NAME: 'POST_NAME'>, <class 'sqlglot.expressions.WithSystemVersioningProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>} + {<class 'sqlglot.expressions.AlgorithmProperty'>: <Location.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.AutoIncrementProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.AutoRefreshProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.BackupProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.BlockCompressionProperty'>: <Location.POST_NAME: 'POST_NAME'>, <class 'sqlglot.expressions.CharacterSetProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.ChecksumProperty'>: <Location.POST_NAME: 'POST_NAME'>, <class 'sqlglot.expressions.CollateProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.CopyGrantsProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.Cluster'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.ClusteredByProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.DataBlocksizeProperty'>: <Location.POST_NAME: 'POST_NAME'>, <class 'sqlglot.expressions.DefinerProperty'>: <Location.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.DictRange'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.DictProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.DistKeyProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.DistStyleProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.EngineProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.ExecuteAsProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.ExternalProperty'>: <Location.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.FallbackProperty'>: <Location.POST_NAME: 'POST_NAME'>, <class 'sqlglot.expressions.FileFormatProperty'>: <Location.POST_WITH: 'POST_WITH'>, <class 'sqlglot.expressions.FreespaceProperty'>: <Location.POST_NAME: 'POST_NAME'>, <class 'sqlglot.expressions.GlobalProperty'>: <Location.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.HeapProperty'>: <Location.POST_WITH: 'POST_WITH'>, <class 'sqlglot.expressions.InheritsProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.IcebergProperty'>: <Location.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.InputModelProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.IsolatedLoadingProperty'>: <Location.POST_NAME: 'POST_NAME'>, <class 'sqlglot.expressions.JournalProperty'>: <Location.POST_NAME: 'POST_NAME'>, <class 'sqlglot.expressions.LanguageProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.LikeProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.LocationProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.LockProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.LockingProperty'>: <Location.POST_ALIAS: 'POST_ALIAS'>, <class 'sqlglot.expressions.LogProperty'>: <Location.POST_NAME: 'POST_NAME'>, <class 'sqlglot.expressions.MaterializedProperty'>: <Location.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.MergeBlockRatioProperty'>: <Location.POST_NAME: 'POST_NAME'>, <class 'sqlglot.expressions.NoPrimaryIndexProperty'>: <Location.POST_EXPRESSION: 'POST_EXPRESSION'>, <class 'sqlglot.expressions.OnProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.OnCommitProperty'>: <Location.POST_EXPRESSION: 'POST_EXPRESSION'>, <class 'sqlglot.expressions.Order'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.OutputModelProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.PartitionedByProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.PartitionedOfProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.PrimaryKey'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.Property'>: <Location.POST_WITH: 'POST_WITH'>, <class 'sqlglot.expressions.RemoteWithConnectionModelProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.ReturnsProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.RowFormatProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.RowFormatDelimitedProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.RowFormatSerdeProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.SampleProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.SchemaCommentProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.SerdeProperties'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.Set'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.SettingsProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.SetProperty'>: <Location.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.SetConfigProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.SharingProperty'>: <Location.POST_EXPRESSION: 'POST_EXPRESSION'>, <class 'sqlglot.expressions.SequenceProperties'>: <Location.POST_EXPRESSION: 'POST_EXPRESSION'>, <class 'sqlglot.expressions.SortKeyProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.SqlReadWriteProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.SqlSecurityProperty'>: <Location.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.StabilityProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.TemporaryProperty'>: <Location.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.ToTableProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.TransientProperty'>: <Location.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.TransformModelProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.MergeTreeTTL'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.UnloggedProperty'>: <Location.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.ViewAttributeProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.VolatileProperty'>: <Location.UNSUPPORTED: 'UNSUPPORTED'>, <class 'sqlglot.expressions.WithDataProperty'>: <Location.POST_EXPRESSION: 'POST_EXPRESSION'>, <class 'sqlglot.expressions.WithJournalTableProperty'>: <Location.POST_NAME: 'POST_NAME'>, <class 'sqlglot.expressions.WithSystemVersioningProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>}
    @@ -3880,7 +3918,7 @@ Default: True
    RESERVED_KEYWORDS = - {'inner', 'true', 'define', 'is', 'ignore', 'array', 'having', 'null', 'into', 'if', 'unbounded', 'default', 'groups', 'within', 'limit', 'and', 'by', 'create', 'or', 'unnest', 'merge', 'interval', 'proto', 'for', 'select', 'collate', 'like', 'when', 'intersect', 'lateral', 'qualify', 'join', 'left', 'full', 'new', 'cross', 'cube', 'some', 'tablesample', 'no', 'escape', 'fetch', 'exists', 'in', 'following', 'right', 'with', 'except', 'extract', 'any', 'group', 'cast', 'then', 'asc', 'union', 'distinct', 'set', 'from', 'recursive', 'rollup', 'else', 'desc', 'where', 'nulls', 'using', 'natural', 'exclude', 'of', 'struct', 'contains', 'respect', 'enum', 'to', 'treat', 'rows', 'between', 'false', 'case', 'hash', 'at', 'preceding', 'outer', 'order', 'over', 'current', 'lookup', 'window', 'as', 'on', 'range', 'grouping', 'all', 'assert_rows_modified', 'partition', 'end', 'not'} + {'and', 'group', 'partition', 'inner', 'rollup', 'case', 'enum', 'exclude', 'new', 'set', 'of', 'not', 'unbounded', 'cross', 'assert_rows_modified', 'union', 'no', 'fetch', 'qualify', 'over', 'all', 'asc', 'hash', 'lateral', 'left', 'end', 'limit', 'to', 'within', 'treat', 'true', 'contains', 'groups', 'merge', 'following', 'nulls', 'range', 'for', 'where', 'null', 'false', 'intersect', 'window', 'respect', 'using', 'is', 'some', 'or', 'having', 'rows', 'select', 'struct', 'natural', 'at', 'extract', 'right', 'recursive', 'any', 'grouping', 'like', 'outer', 'into', 'preceding', 'create', 'order', 'by', 'join', 'when', 'current', 'exists', 'between', 'as', 'from', 'cube', 'array', 'ignore', 'else', 'on', 'collate', 'except', 'with', 'proto', 'define', 'in', 'escape', 'tablesample', 'cast', 'distinct', 'lookup', 'unnest', 'if', 'desc', 'default', 'full', 'then', 'interval'}
    @@ -3900,20 +3938,20 @@ Default: True
    -
    801        def table_parts(self, expression: exp.Table) -> str:
    -802            # Depending on the context, `x.y` may not resolve to the same data source as `x`.`y`, so
    -803            # we need to make sure the correct quoting is used in each case.
    -804            #
    -805            # For example, if there is a CTE x that clashes with a schema name, then the former will
    -806            # return the table y in that schema, whereas the latter will return the CTE's y column:
    -807            #
    -808            # - WITH x AS (SELECT [1, 2] AS y) SELECT * FROM x, `x.y`   -> cross join
    -809            # - WITH x AS (SELECT [1, 2] AS y) SELECT * FROM x, `x`.`y` -> implicit unnest
    -810            if expression.meta.get("quoted_table"):
    -811                table_parts = ".".join(p.name for p in expression.parts)
    -812                return self.sql(exp.Identifier(this=table_parts, quoted=True))
    -813
    -814            return super().table_parts(expression)
    +            
    805        def table_parts(self, expression: exp.Table) -> str:
    +806            # Depending on the context, `x.y` may not resolve to the same data source as `x`.`y`, so
    +807            # we need to make sure the correct quoting is used in each case.
    +808            #
    +809            # For example, if there is a CTE x that clashes with a schema name, then the former will
    +810            # return the table y in that schema, whereas the latter will return the CTE's y column:
    +811            #
    +812            # - WITH x AS (SELECT [1, 2] AS y) SELECT * FROM x, `x.y`   -> cross join
    +813            # - WITH x AS (SELECT [1, 2] AS y) SELECT * FROM x, `x`.`y` -> implicit unnest
    +814            if expression.meta.get("quoted_table"):
    +815                table_parts = ".".join(p.name for p in expression.parts)
    +816                return self.sql(exp.Identifier(this=table_parts, quoted=True))
    +817
    +818            return super().table_parts(expression)
     
    @@ -3931,9 +3969,9 @@ Default: True
    -
    816        def timetostr_sql(self, expression: exp.TimeToStr) -> str:
    -817            this = expression.this if isinstance(expression.this, exp.TsOrDsToDate) else expression
    -818            return self.func("FORMAT_DATE", self.format_time(expression), this.this)
    +            
    820        def timetostr_sql(self, expression: exp.TimeToStr) -> str:
    +821            this = expression.this if isinstance(expression.this, exp.TsOrDsToDate) else expression
    +822            return self.func("FORMAT_DATE", self.format_time(expression), this.this)
     
    @@ -3951,13 +3989,13 @@ Default: True
    -
    820        def eq_sql(self, expression: exp.EQ) -> str:
    -821            # Operands of = cannot be NULL in BigQuery
    -822            if isinstance(expression.left, exp.Null) or isinstance(expression.right, exp.Null):
    -823                if not isinstance(expression.parent, exp.Update):
    -824                    return "NULL"
    -825
    -826            return self.binary(expression, "=")
    +            
    824        def eq_sql(self, expression: exp.EQ) -> str:
    +825            # Operands of = cannot be NULL in BigQuery
    +826            if isinstance(expression.left, exp.Null) or isinstance(expression.right, exp.Null):
    +827                if not isinstance(expression.parent, exp.Update):
    +828                    return "NULL"
    +829
    +830            return self.binary(expression, "=")
     
    @@ -3975,17 +4013,17 @@ Default: True
    -
    828        def attimezone_sql(self, expression: exp.AtTimeZone) -> str:
    -829            parent = expression.parent
    -830
    -831            # BigQuery allows CAST(.. AS {STRING|TIMESTAMP} [FORMAT <fmt> [AT TIME ZONE <tz>]]).
    -832            # Only the TIMESTAMP one should use the below conversion, when AT TIME ZONE is included.
    -833            if not isinstance(parent, exp.Cast) or not parent.to.is_type("text"):
    -834                return self.func(
    -835                    "TIMESTAMP", self.func("DATETIME", expression.this, expression.args.get("zone"))
    -836                )
    -837
    -838            return super().attimezone_sql(expression)
    +            
    832        def attimezone_sql(self, expression: exp.AtTimeZone) -> str:
    +833            parent = expression.parent
    +834
    +835            # BigQuery allows CAST(.. AS {STRING|TIMESTAMP} [FORMAT <fmt> [AT TIME ZONE <tz>]]).
    +836            # Only the TIMESTAMP one should use the below conversion, when AT TIME ZONE is included.
    +837            if not isinstance(parent, exp.Cast) or not parent.to.is_type("text"):
    +838                return self.func(
    +839                    "TIMESTAMP", self.func("DATETIME", expression.this, expression.args.get("zone"))
    +840                )
    +841
    +842            return super().attimezone_sql(expression)
     
    @@ -4003,8 +4041,8 @@ Default: True
    -
    840        def trycast_sql(self, expression: exp.TryCast) -> str:
    -841            return self.cast_sql(expression, safe_prefix="SAFE_")
    +            
    844        def trycast_sql(self, expression: exp.TryCast) -> str:
    +845            return self.cast_sql(expression, safe_prefix="SAFE_")
     
    @@ -4022,12 +4060,12 @@ Default: True
    -
    843        def array_sql(self, expression: exp.Array) -> str:
    -844            first_arg = seq_get(expression.expressions, 0)
    -845            if isinstance(first_arg, exp.Query):
    -846                return f"ARRAY{self.wrap(self.sql(first_arg))}"
    -847
    -848            return inline_array_sql(self, expression)
    +            
    847        def array_sql(self, expression: exp.Array) -> str:
    +848            first_arg = seq_get(expression.expressions, 0)
    +849            if isinstance(first_arg, exp.Query):
    +850                return f"ARRAY{self.wrap(self.sql(first_arg))}"
    +851
    +852            return inline_array_sql(self, expression)
     
    @@ -4045,35 +4083,35 @@ Default: True
    -
    850        def bracket_sql(self, expression: exp.Bracket) -> str:
    -851            this = self.sql(expression, "this")
    -852            expressions = expression.expressions
    -853
    -854            if len(expressions) == 1:
    -855                arg = expressions[0]
    -856                if arg.type is None:
    -857                    from sqlglot.optimizer.annotate_types import annotate_types
    -858
    -859                    arg = annotate_types(arg)
    -860
    -861                if arg.type and arg.type.this in exp.DataType.TEXT_TYPES:
    -862                    # BQ doesn't support bracket syntax with string values
    -863                    return f"{this}.{arg.name}"
    +            
    854        def bracket_sql(self, expression: exp.Bracket) -> str:
    +855            this = expression.this
    +856            expressions = expression.expressions
    +857
    +858            if len(expressions) == 1 and this and this.is_type(exp.DataType.Type.STRUCT):
    +859                arg = expressions[0]
    +860                if arg.type is None:
    +861                    from sqlglot.optimizer.annotate_types import annotate_types
    +862
    +863                    arg = annotate_types(arg)
     864
    -865            expressions_sql = ", ".join(self.sql(e) for e in expressions)
    -866            offset = expression.args.get("offset")
    -867
    -868            if offset == 0:
    -869                expressions_sql = f"OFFSET({expressions_sql})"
    -870            elif offset == 1:
    -871                expressions_sql = f"ORDINAL({expressions_sql})"
    -872            elif offset is not None:
    -873                self.unsupported(f"Unsupported array offset: {offset}")
    -874
    -875            if expression.args.get("safe"):
    -876                expressions_sql = f"SAFE_{expressions_sql}"
    -877
    -878            return f"{this}[{expressions_sql}]"
    +865                if arg.type and arg.type.this in exp.DataType.TEXT_TYPES:
    +866                    # BQ doesn't support bracket syntax with string values for structs
    +867                    return f"{self.sql(this)}.{arg.name}"
    +868
    +869            expressions_sql = self.expressions(expression, flat=True)
    +870            offset = expression.args.get("offset")
    +871
    +872            if offset == 0:
    +873                expressions_sql = f"OFFSET({expressions_sql})"
    +874            elif offset == 1:
    +875                expressions_sql = f"ORDINAL({expressions_sql})"
    +876            elif offset is not None:
    +877                self.unsupported(f"Unsupported array offset: {offset}")
    +878
    +879            if expression.args.get("safe"):
    +880                expressions_sql = f"SAFE_{expressions_sql}"
    +881
    +882            return f"{self.sql(this)}[{expressions_sql}]"
     
    @@ -4091,8 +4129,8 @@ Default: True
    -
    880        def in_unnest_op(self, expression: exp.Unnest) -> str:
    -881            return self.sql(expression)
    +            
    884        def in_unnest_op(self, expression: exp.Unnest) -> str:
    +885            return self.sql(expression)
     
    @@ -4110,10 +4148,10 @@ Default: True
    -
    883        def except_op(self, expression: exp.Except) -> str:
    -884            if not expression.args.get("distinct"):
    -885                self.unsupported("EXCEPT without DISTINCT is not supported in BigQuery")
    -886            return f"EXCEPT{' DISTINCT' if expression.args.get('distinct') else ' ALL'}"
    +            
    887        def except_op(self, expression: exp.Except) -> str:
    +888            if not expression.args.get("distinct"):
    +889                self.unsupported("EXCEPT without DISTINCT is not supported in BigQuery")
    +890            return f"EXCEPT{' DISTINCT' if expression.args.get('distinct') else ' ALL'}"
     
    @@ -4131,10 +4169,10 @@ Default: True
    -
    888        def intersect_op(self, expression: exp.Intersect) -> str:
    -889            if not expression.args.get("distinct"):
    -890                self.unsupported("INTERSECT without DISTINCT is not supported in BigQuery")
    -891            return f"INTERSECT{' DISTINCT' if expression.args.get('distinct') else ' ALL'}"
    +            
    892        def intersect_op(self, expression: exp.Intersect) -> str:
    +893            if not expression.args.get("distinct"):
    +894                self.unsupported("INTERSECT without DISTINCT is not supported in BigQuery")
    +895            return f"INTERSECT{' DISTINCT' if expression.args.get('distinct') else ' ALL'}"
     
    @@ -4152,8 +4190,8 @@ Default: True
    -
    893        def with_properties(self, properties: exp.Properties) -> str:
    -894            return self.properties(properties, prefix=self.seg("OPTIONS"))
    +            
    897        def with_properties(self, properties: exp.Properties) -> str:
    +898            return self.properties(properties, prefix=self.seg("OPTIONS"))
     
    @@ -4171,15 +4209,27 @@ Default: True
    -
    896        def version_sql(self, expression: exp.Version) -> str:
    -897            if expression.name == "TIMESTAMP":
    -898                expression.set("this", "SYSTEM_TIME")
    -899            return super().version_sql(expression)
    +            
    900        def version_sql(self, expression: exp.Version) -> str:
    +901            if expression.name == "TIMESTAMP":
    +902                expression.set("this", "SYSTEM_TIME")
    +903            return super().version_sql(expression)
     
    +
    +
    +
    + AFTER_HAVING_MODIFIER_TRANSFORMS = +{'qualify': <function Generator.<lambda>>, 'windows': <function Generator.<lambda>>} + + +
    + + + +