sqlglot.dialects.bigquery
1from __future__ import annotations 2 3import logging 4import re 5import typing as t 6 7from sqlglot import exp, generator, parser, tokens, transforms 8from sqlglot._typing import E 9from sqlglot.dialects.dialect import ( 10 Dialect, 11 binary_from_function, 12 datestrtodate_sql, 13 format_time_lambda, 14 inline_array_sql, 15 max_or_greatest, 16 min_or_least, 17 no_ilike_sql, 18 parse_date_delta_with_interval, 19 regexp_replace_sql, 20 rename_func, 21 timestrtotime_sql, 22 ts_or_ds_to_date_sql, 23) 24from sqlglot.helper import seq_get, split_num_words 25from sqlglot.tokens import TokenType 26 27logger = logging.getLogger("sqlglot") 28 29 30def _date_add_sql( 31 data_type: str, kind: str 32) -> t.Callable[[generator.Generator, exp.Expression], str]: 33 def func(self, expression): 34 this = self.sql(expression, "this") 35 unit = expression.args.get("unit") 36 unit = exp.var(unit.name.upper() if unit else "DAY") 37 interval = exp.Interval(this=expression.expression, unit=unit) 38 return f"{data_type}_{kind}({this}, {self.sql(interval)})" 39 40 return func 41 42 43def _derived_table_values_to_unnest(self: generator.Generator, expression: exp.Values) -> str: 44 if not expression.find_ancestor(exp.From, exp.Join): 45 return self.values_sql(expression) 46 47 alias = expression.args.get("alias") 48 49 structs = [ 50 exp.Struct( 51 expressions=[ 52 exp.alias_(value, column_name) 53 for value, column_name in zip( 54 t.expressions, 55 alias.columns 56 if alias and alias.columns 57 else (f"_c{i}" for i in range(len(t.expressions))), 58 ) 59 ] 60 ) 61 for t in expression.find_all(exp.Tuple) 62 ] 63 64 return self.unnest_sql(exp.Unnest(expressions=[exp.Array(expressions=structs)])) 65 66 67def _returnsproperty_sql(self: generator.Generator, expression: exp.ReturnsProperty) -> str: 68 this = expression.this 69 if isinstance(this, exp.Schema): 70 this = f"{this.this} <{self.expressions(this)}>" 71 else: 72 this = self.sql(this) 73 return f"RETURNS {this}" 74 75 76def _create_sql(self: generator.Generator, expression: exp.Create) -> str: 77 kind = expression.args["kind"] 78 returns = expression.find(exp.ReturnsProperty) 79 if kind.upper() == "FUNCTION" and returns and returns.args.get("is_table"): 80 expression = expression.copy() 81 expression.set("kind", "TABLE FUNCTION") 82 if isinstance( 83 expression.expression, 84 ( 85 exp.Subquery, 86 exp.Literal, 87 ), 88 ): 89 expression.set("expression", expression.expression.this) 90 91 return self.create_sql(expression) 92 93 return self.create_sql(expression) 94 95 96def _unqualify_unnest(expression: exp.Expression) -> exp.Expression: 97 """Remove references to unnest table aliases since bigquery doesn't allow them. 98 99 These are added by the optimizer's qualify_column step. 100 """ 101 from sqlglot.optimizer.scope import Scope 102 103 if isinstance(expression, exp.Select): 104 for unnest in expression.find_all(exp.Unnest): 105 if isinstance(unnest.parent, (exp.From, exp.Join)) and unnest.alias: 106 for column in Scope(expression).find_all(exp.Column): 107 if column.table == unnest.alias: 108 column.set("table", None) 109 110 return expression 111 112 113# https://issuetracker.google.com/issues/162294746 114# workaround for bigquery bug when grouping by an expression and then ordering 115# WITH x AS (SELECT 1 y) 116# SELECT y + 1 z 117# FROM x 118# GROUP BY x + 1 119# ORDER by z 120def _alias_ordered_group(expression: exp.Expression) -> exp.Expression: 121 if isinstance(expression, exp.Select): 122 group = expression.args.get("group") 123 order = expression.args.get("order") 124 125 if group and order: 126 aliases = { 127 select.this: select.args["alias"] 128 for select in expression.selects 129 if isinstance(select, exp.Alias) 130 } 131 132 for e in group.expressions: 133 alias = aliases.get(e) 134 135 if alias: 136 e.replace(exp.column(alias)) 137 138 return expression 139 140 141def _pushdown_cte_column_names(expression: exp.Expression) -> exp.Expression: 142 """BigQuery doesn't allow column names when defining a CTE, so we try to push them down.""" 143 if isinstance(expression, exp.CTE) and expression.alias_column_names: 144 cte_query = expression.this 145 146 if cte_query.is_star: 147 logger.warning( 148 "Can't push down CTE column names for star queries. Run the query through" 149 " the optimizer or use 'qualify' to expand the star projections first." 150 ) 151 return expression 152 153 column_names = expression.alias_column_names 154 expression.args["alias"].set("columns", None) 155 156 for name, select in zip(column_names, cte_query.selects): 157 to_replace = select 158 159 if isinstance(select, exp.Alias): 160 select = select.this 161 162 # Inner aliases are shadowed by the CTE column names 163 to_replace.replace(exp.alias_(select, name)) 164 165 return expression 166 167 168def _parse_timestamp(args: t.List) -> exp.StrToTime: 169 this = format_time_lambda(exp.StrToTime, "bigquery")([seq_get(args, 1), seq_get(args, 0)]) 170 this.set("zone", seq_get(args, 2)) 171 return this 172 173 174def _parse_date(args: t.List) -> exp.Date | exp.DateFromParts: 175 expr_type = exp.DateFromParts if len(args) == 3 else exp.Date 176 return expr_type.from_arg_list(args) 177 178 179def _parse_to_hex(args: t.List) -> exp.Hex | exp.MD5: 180 # TO_HEX(MD5(..)) is common in BigQuery, so it's parsed into MD5 to simplify its transpilation 181 arg = seq_get(args, 0) 182 return exp.MD5(this=arg.this) if isinstance(arg, exp.MD5Digest) else exp.Hex(this=arg) 183 184 185class BigQuery(Dialect): 186 UNNEST_COLUMN_ONLY = True 187 188 # https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#case_sensitivity 189 RESOLVES_IDENTIFIERS_AS_UPPERCASE = None 190 191 # bigquery udfs are case sensitive 192 NORMALIZE_FUNCTIONS = False 193 194 TIME_MAPPING = { 195 "%D": "%m/%d/%y", 196 } 197 198 FORMAT_MAPPING = { 199 "DD": "%d", 200 "MM": "%m", 201 "MON": "%b", 202 "MONTH": "%B", 203 "YYYY": "%Y", 204 "YY": "%y", 205 "HH": "%I", 206 "HH12": "%I", 207 "HH24": "%H", 208 "MI": "%M", 209 "SS": "%S", 210 "SSSSS": "%f", 211 "TZH": "%z", 212 } 213 214 @classmethod 215 def normalize_identifier(cls, expression: E) -> E: 216 # In BigQuery, CTEs aren't case-sensitive, but table names are (by default, at least). 217 # The following check is essentially a heuristic to detect tables based on whether or 218 # not they're qualified. 219 if isinstance(expression, exp.Identifier): 220 parent = expression.parent 221 222 while isinstance(parent, exp.Dot): 223 parent = parent.parent 224 225 if ( 226 not isinstance(parent, exp.UserDefinedFunction) 227 and not (isinstance(parent, exp.Table) and parent.db) 228 and not expression.meta.get("is_table") 229 ): 230 expression.set("this", expression.this.lower()) 231 232 return expression 233 234 class Tokenizer(tokens.Tokenizer): 235 QUOTES = ["'", '"', '"""', "'''"] 236 COMMENTS = ["--", "#", ("/*", "*/")] 237 IDENTIFIERS = ["`"] 238 STRING_ESCAPES = ["\\"] 239 240 HEX_STRINGS = [("0x", ""), ("0X", "")] 241 242 BYTE_STRINGS = [ 243 (prefix + q, q) for q in t.cast(t.List[str], QUOTES) for prefix in ("b", "B") 244 ] 245 246 RAW_STRINGS = [ 247 (prefix + q, q) for q in t.cast(t.List[str], QUOTES) for prefix in ("r", "R") 248 ] 249 250 KEYWORDS = { 251 **tokens.Tokenizer.KEYWORDS, 252 "ANY TYPE": TokenType.VARIANT, 253 "BEGIN": TokenType.COMMAND, 254 "BEGIN TRANSACTION": TokenType.BEGIN, 255 "CURRENT_DATETIME": TokenType.CURRENT_DATETIME, 256 "BYTES": TokenType.BINARY, 257 "DECLARE": TokenType.COMMAND, 258 "FLOAT64": TokenType.DOUBLE, 259 "INT64": TokenType.BIGINT, 260 "RECORD": TokenType.STRUCT, 261 "TIMESTAMP": TokenType.TIMESTAMPTZ, 262 "NOT DETERMINISTIC": TokenType.VOLATILE, 263 "UNKNOWN": TokenType.NULL, 264 } 265 KEYWORDS.pop("DIV") 266 267 class Parser(parser.Parser): 268 PREFIXED_PIVOT_COLUMNS = True 269 270 LOG_BASE_FIRST = False 271 LOG_DEFAULTS_TO_LN = True 272 273 FUNCTIONS = { 274 **parser.Parser.FUNCTIONS, 275 "DATE": _parse_date, 276 "DATE_ADD": parse_date_delta_with_interval(exp.DateAdd), 277 "DATE_SUB": parse_date_delta_with_interval(exp.DateSub), 278 "DATE_TRUNC": lambda args: exp.DateTrunc( 279 unit=exp.Literal.string(str(seq_get(args, 1))), 280 this=seq_get(args, 0), 281 ), 282 "DATETIME_ADD": parse_date_delta_with_interval(exp.DatetimeAdd), 283 "DATETIME_SUB": parse_date_delta_with_interval(exp.DatetimeSub), 284 "DIV": binary_from_function(exp.IntDiv), 285 "GENERATE_ARRAY": exp.GenerateSeries.from_arg_list, 286 "MD5": exp.MD5Digest.from_arg_list, 287 "TO_HEX": _parse_to_hex, 288 "PARSE_DATE": lambda args: format_time_lambda(exp.StrToDate, "bigquery")( 289 [seq_get(args, 1), seq_get(args, 0)] 290 ), 291 "PARSE_TIMESTAMP": _parse_timestamp, 292 "REGEXP_CONTAINS": exp.RegexpLike.from_arg_list, 293 "REGEXP_EXTRACT": lambda args: exp.RegexpExtract( 294 this=seq_get(args, 0), 295 expression=seq_get(args, 1), 296 position=seq_get(args, 2), 297 occurrence=seq_get(args, 3), 298 group=exp.Literal.number(1) 299 if re.compile(str(seq_get(args, 1))).groups == 1 300 else None, 301 ), 302 "SPLIT": lambda args: exp.Split( 303 # https://cloud.google.com/bigquery/docs/reference/standard-sql/string_functions#split 304 this=seq_get(args, 0), 305 expression=seq_get(args, 1) or exp.Literal.string(","), 306 ), 307 "TIME_ADD": parse_date_delta_with_interval(exp.TimeAdd), 308 "TIME_SUB": parse_date_delta_with_interval(exp.TimeSub), 309 "TIMESTAMP_ADD": parse_date_delta_with_interval(exp.TimestampAdd), 310 "TIMESTAMP_SUB": parse_date_delta_with_interval(exp.TimestampSub), 311 "TO_JSON_STRING": exp.JSONFormat.from_arg_list, 312 } 313 314 FUNCTION_PARSERS = { 315 **parser.Parser.FUNCTION_PARSERS, 316 "ARRAY": lambda self: self.expression(exp.Array, expressions=[self._parse_statement()]), 317 } 318 FUNCTION_PARSERS.pop("TRIM") 319 320 NO_PAREN_FUNCTIONS = { 321 **parser.Parser.NO_PAREN_FUNCTIONS, 322 TokenType.CURRENT_DATETIME: exp.CurrentDatetime, 323 } 324 325 NESTED_TYPE_TOKENS = { 326 *parser.Parser.NESTED_TYPE_TOKENS, 327 TokenType.TABLE, 328 } 329 330 ID_VAR_TOKENS = { 331 *parser.Parser.ID_VAR_TOKENS, 332 TokenType.VALUES, 333 } 334 335 PROPERTY_PARSERS = { 336 **parser.Parser.PROPERTY_PARSERS, 337 "NOT DETERMINISTIC": lambda self: self.expression( 338 exp.StabilityProperty, this=exp.Literal.string("VOLATILE") 339 ), 340 "OPTIONS": lambda self: self._parse_with_property(), 341 } 342 343 CONSTRAINT_PARSERS = { 344 **parser.Parser.CONSTRAINT_PARSERS, 345 "OPTIONS": lambda self: exp.Properties(expressions=self._parse_with_property()), 346 } 347 348 def _parse_table_part(self, schema: bool = False) -> t.Optional[exp.Expression]: 349 this = super()._parse_table_part(schema=schema) 350 351 # https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#table_names 352 if isinstance(this, exp.Identifier): 353 table_name = this.name 354 while self._match(TokenType.DASH, advance=False) and self._next: 355 self._advance(2) 356 table_name += f"-{self._prev.text}" 357 358 this = exp.Identifier(this=table_name, quoted=this.args.get("quoted")) 359 360 return this 361 362 def _parse_table_parts(self, schema: bool = False) -> exp.Table: 363 table = super()._parse_table_parts(schema=schema) 364 if isinstance(table.this, exp.Identifier) and "." in table.name: 365 catalog, db, this, *rest = ( 366 t.cast(t.Optional[exp.Expression], exp.to_identifier(x)) 367 for x in split_num_words(table.name, ".", 3) 368 ) 369 370 if rest and this: 371 this = exp.Dot.build(t.cast(t.List[exp.Expression], [this, *rest])) 372 373 table = exp.Table(this=this, db=db, catalog=catalog) 374 375 return table 376 377 class Generator(generator.Generator): 378 EXPLICIT_UNION = True 379 INTERVAL_ALLOWS_PLURAL_FORM = False 380 JOIN_HINTS = False 381 QUERY_HINTS = False 382 TABLE_HINTS = False 383 LIMIT_FETCH = "LIMIT" 384 RENAME_TABLE_WITH_DB = False 385 ESCAPE_LINE_BREAK = True 386 387 TRANSFORMS = { 388 **generator.Generator.TRANSFORMS, 389 exp.ApproxDistinct: rename_func("APPROX_COUNT_DISTINCT"), 390 exp.ArraySize: rename_func("ARRAY_LENGTH"), 391 exp.Cast: transforms.preprocess([transforms.remove_precision_parameterized_types]), 392 exp.Create: _create_sql, 393 exp.CTE: transforms.preprocess([_pushdown_cte_column_names]), 394 exp.DateAdd: _date_add_sql("DATE", "ADD"), 395 exp.DateDiff: lambda self, e: f"DATE_DIFF({self.sql(e, 'this')}, {self.sql(e, 'expression')}, {self.sql(e.args.get('unit', 'DAY'))})", 396 exp.DateFromParts: rename_func("DATE"), 397 exp.DateStrToDate: datestrtodate_sql, 398 exp.DateSub: _date_add_sql("DATE", "SUB"), 399 exp.DatetimeAdd: _date_add_sql("DATETIME", "ADD"), 400 exp.DatetimeSub: _date_add_sql("DATETIME", "SUB"), 401 exp.DateTrunc: lambda self, e: self.func("DATE_TRUNC", e.this, e.text("unit")), 402 exp.GenerateSeries: rename_func("GENERATE_ARRAY"), 403 exp.GroupConcat: rename_func("STRING_AGG"), 404 exp.Hex: rename_func("TO_HEX"), 405 exp.ILike: no_ilike_sql, 406 exp.IntDiv: rename_func("DIV"), 407 exp.JSONFormat: rename_func("TO_JSON_STRING"), 408 exp.Max: max_or_greatest, 409 exp.MD5: lambda self, e: self.func("TO_HEX", self.func("MD5", e.this)), 410 exp.MD5Digest: rename_func("MD5"), 411 exp.Min: min_or_least, 412 exp.PartitionedByProperty: lambda self, e: f"PARTITION BY {self.sql(e, 'this')}", 413 exp.RegexpExtract: lambda self, e: self.func( 414 "REGEXP_EXTRACT", 415 e.this, 416 e.expression, 417 e.args.get("position"), 418 e.args.get("occurrence"), 419 ), 420 exp.RegexpReplace: regexp_replace_sql, 421 exp.RegexpLike: rename_func("REGEXP_CONTAINS"), 422 exp.ReturnsProperty: _returnsproperty_sql, 423 exp.Select: transforms.preprocess( 424 [ 425 transforms.explode_to_unnest, 426 _unqualify_unnest, 427 transforms.eliminate_distinct_on, 428 _alias_ordered_group, 429 ] 430 ), 431 exp.StabilityProperty: lambda self, e: f"DETERMINISTIC" 432 if e.name == "IMMUTABLE" 433 else "NOT DETERMINISTIC", 434 exp.StrToDate: lambda self, e: f"PARSE_DATE({self.format_time(e)}, {self.sql(e, 'this')})", 435 exp.StrToTime: lambda self, e: self.func( 436 "PARSE_TIMESTAMP", self.format_time(e), e.this, e.args.get("zone") 437 ), 438 exp.TimeAdd: _date_add_sql("TIME", "ADD"), 439 exp.TimeSub: _date_add_sql("TIME", "SUB"), 440 exp.TimestampAdd: _date_add_sql("TIMESTAMP", "ADD"), 441 exp.TimestampSub: _date_add_sql("TIMESTAMP", "SUB"), 442 exp.TimeStrToTime: timestrtotime_sql, 443 exp.Trim: lambda self, e: self.func(f"TRIM", e.this, e.expression), 444 exp.TsOrDsAdd: _date_add_sql("DATE", "ADD"), 445 exp.TsOrDsToDate: ts_or_ds_to_date_sql("bigquery"), 446 exp.Unhex: rename_func("FROM_HEX"), 447 exp.Values: _derived_table_values_to_unnest, 448 exp.VariancePop: rename_func("VAR_POP"), 449 } 450 451 TYPE_MAPPING = { 452 **generator.Generator.TYPE_MAPPING, 453 exp.DataType.Type.BIGDECIMAL: "BIGNUMERIC", 454 exp.DataType.Type.BIGINT: "INT64", 455 exp.DataType.Type.BINARY: "BYTES", 456 exp.DataType.Type.BOOLEAN: "BOOL", 457 exp.DataType.Type.CHAR: "STRING", 458 exp.DataType.Type.DECIMAL: "NUMERIC", 459 exp.DataType.Type.DOUBLE: "FLOAT64", 460 exp.DataType.Type.FLOAT: "FLOAT64", 461 exp.DataType.Type.INT: "INT64", 462 exp.DataType.Type.NCHAR: "STRING", 463 exp.DataType.Type.NVARCHAR: "STRING", 464 exp.DataType.Type.SMALLINT: "INT64", 465 exp.DataType.Type.TEXT: "STRING", 466 exp.DataType.Type.TIMESTAMP: "DATETIME", 467 exp.DataType.Type.TIMESTAMPTZ: "TIMESTAMP", 468 exp.DataType.Type.TIMESTAMPLTZ: "TIMESTAMP", 469 exp.DataType.Type.TINYINT: "INT64", 470 exp.DataType.Type.VARBINARY: "BYTES", 471 exp.DataType.Type.VARCHAR: "STRING", 472 exp.DataType.Type.VARIANT: "ANY TYPE", 473 } 474 475 PROPERTIES_LOCATION = { 476 **generator.Generator.PROPERTIES_LOCATION, 477 exp.PartitionedByProperty: exp.Properties.Location.POST_SCHEMA, 478 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, 479 } 480 481 # from: https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#reserved_keywords 482 RESERVED_KEYWORDS = { 483 *generator.Generator.RESERVED_KEYWORDS, 484 "all", 485 "and", 486 "any", 487 "array", 488 "as", 489 "asc", 490 "assert_rows_modified", 491 "at", 492 "between", 493 "by", 494 "case", 495 "cast", 496 "collate", 497 "contains", 498 "create", 499 "cross", 500 "cube", 501 "current", 502 "default", 503 "define", 504 "desc", 505 "distinct", 506 "else", 507 "end", 508 "enum", 509 "escape", 510 "except", 511 "exclude", 512 "exists", 513 "extract", 514 "false", 515 "fetch", 516 "following", 517 "for", 518 "from", 519 "full", 520 "group", 521 "grouping", 522 "groups", 523 "hash", 524 "having", 525 "if", 526 "ignore", 527 "in", 528 "inner", 529 "intersect", 530 "interval", 531 "into", 532 "is", 533 "join", 534 "lateral", 535 "left", 536 "like", 537 "limit", 538 "lookup", 539 "merge", 540 "natural", 541 "new", 542 "no", 543 "not", 544 "null", 545 "nulls", 546 "of", 547 "on", 548 "or", 549 "order", 550 "outer", 551 "over", 552 "partition", 553 "preceding", 554 "proto", 555 "qualify", 556 "range", 557 "recursive", 558 "respect", 559 "right", 560 "rollup", 561 "rows", 562 "select", 563 "set", 564 "some", 565 "struct", 566 "tablesample", 567 "then", 568 "to", 569 "treat", 570 "true", 571 "unbounded", 572 "union", 573 "unnest", 574 "using", 575 "when", 576 "where", 577 "window", 578 "with", 579 "within", 580 } 581 582 def attimezone_sql(self, expression: exp.AtTimeZone) -> str: 583 parent = expression.parent 584 585 # BigQuery allows CAST(.. AS {STRING|TIMESTAMP} [FORMAT <fmt> [AT TIME ZONE <tz>]]). 586 # Only the TIMESTAMP one should use the below conversion, when AT TIME ZONE is included. 587 if not isinstance(parent, exp.Cast) or not parent.to.is_type("text"): 588 return self.func( 589 "TIMESTAMP", self.func("DATETIME", expression.this, expression.args.get("zone")) 590 ) 591 592 return super().attimezone_sql(expression) 593 594 def trycast_sql(self, expression: exp.TryCast) -> str: 595 return self.cast_sql(expression, safe_prefix="SAFE_") 596 597 def cte_sql(self, expression: exp.CTE) -> str: 598 if expression.alias_column_names: 599 self.unsupported("Column names in CTE definition are not supported.") 600 return super().cte_sql(expression) 601 602 def array_sql(self, expression: exp.Array) -> str: 603 first_arg = seq_get(expression.expressions, 0) 604 if isinstance(first_arg, exp.Subqueryable): 605 return f"ARRAY{self.wrap(self.sql(first_arg))}" 606 607 return inline_array_sql(self, expression) 608 609 def transaction_sql(self, *_) -> str: 610 return "BEGIN TRANSACTION" 611 612 def commit_sql(self, *_) -> str: 613 return "COMMIT TRANSACTION" 614 615 def rollback_sql(self, *_) -> str: 616 return "ROLLBACK TRANSACTION" 617 618 def in_unnest_op(self, expression: exp.Unnest) -> str: 619 return self.sql(expression) 620 621 def except_op(self, expression: exp.Except) -> str: 622 if not expression.args.get("distinct", False): 623 self.unsupported("EXCEPT without DISTINCT is not supported in BigQuery") 624 return f"EXCEPT{' DISTINCT' if expression.args.get('distinct') else ' ALL'}" 625 626 def intersect_op(self, expression: exp.Intersect) -> str: 627 if not expression.args.get("distinct", False): 628 self.unsupported("INTERSECT without DISTINCT is not supported in BigQuery") 629 return f"INTERSECT{' DISTINCT' if expression.args.get('distinct') else ' ALL'}" 630 631 def with_properties(self, properties: exp.Properties) -> str: 632 return self.properties(properties, prefix=self.seg("OPTIONS"))
logger =
<Logger sqlglot (WARNING)>
186class BigQuery(Dialect): 187 UNNEST_COLUMN_ONLY = True 188 189 # https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#case_sensitivity 190 RESOLVES_IDENTIFIERS_AS_UPPERCASE = None 191 192 # bigquery udfs are case sensitive 193 NORMALIZE_FUNCTIONS = False 194 195 TIME_MAPPING = { 196 "%D": "%m/%d/%y", 197 } 198 199 FORMAT_MAPPING = { 200 "DD": "%d", 201 "MM": "%m", 202 "MON": "%b", 203 "MONTH": "%B", 204 "YYYY": "%Y", 205 "YY": "%y", 206 "HH": "%I", 207 "HH12": "%I", 208 "HH24": "%H", 209 "MI": "%M", 210 "SS": "%S", 211 "SSSSS": "%f", 212 "TZH": "%z", 213 } 214 215 @classmethod 216 def normalize_identifier(cls, expression: E) -> E: 217 # In BigQuery, CTEs aren't case-sensitive, but table names are (by default, at least). 218 # The following check is essentially a heuristic to detect tables based on whether or 219 # not they're qualified. 220 if isinstance(expression, exp.Identifier): 221 parent = expression.parent 222 223 while isinstance(parent, exp.Dot): 224 parent = parent.parent 225 226 if ( 227 not isinstance(parent, exp.UserDefinedFunction) 228 and not (isinstance(parent, exp.Table) and parent.db) 229 and not expression.meta.get("is_table") 230 ): 231 expression.set("this", expression.this.lower()) 232 233 return expression 234 235 class Tokenizer(tokens.Tokenizer): 236 QUOTES = ["'", '"', '"""', "'''"] 237 COMMENTS = ["--", "#", ("/*", "*/")] 238 IDENTIFIERS = ["`"] 239 STRING_ESCAPES = ["\\"] 240 241 HEX_STRINGS = [("0x", ""), ("0X", "")] 242 243 BYTE_STRINGS = [ 244 (prefix + q, q) for q in t.cast(t.List[str], QUOTES) for prefix in ("b", "B") 245 ] 246 247 RAW_STRINGS = [ 248 (prefix + q, q) for q in t.cast(t.List[str], QUOTES) for prefix in ("r", "R") 249 ] 250 251 KEYWORDS = { 252 **tokens.Tokenizer.KEYWORDS, 253 "ANY TYPE": TokenType.VARIANT, 254 "BEGIN": TokenType.COMMAND, 255 "BEGIN TRANSACTION": TokenType.BEGIN, 256 "CURRENT_DATETIME": TokenType.CURRENT_DATETIME, 257 "BYTES": TokenType.BINARY, 258 "DECLARE": TokenType.COMMAND, 259 "FLOAT64": TokenType.DOUBLE, 260 "INT64": TokenType.BIGINT, 261 "RECORD": TokenType.STRUCT, 262 "TIMESTAMP": TokenType.TIMESTAMPTZ, 263 "NOT DETERMINISTIC": TokenType.VOLATILE, 264 "UNKNOWN": TokenType.NULL, 265 } 266 KEYWORDS.pop("DIV") 267 268 class Parser(parser.Parser): 269 PREFIXED_PIVOT_COLUMNS = True 270 271 LOG_BASE_FIRST = False 272 LOG_DEFAULTS_TO_LN = True 273 274 FUNCTIONS = { 275 **parser.Parser.FUNCTIONS, 276 "DATE": _parse_date, 277 "DATE_ADD": parse_date_delta_with_interval(exp.DateAdd), 278 "DATE_SUB": parse_date_delta_with_interval(exp.DateSub), 279 "DATE_TRUNC": lambda args: exp.DateTrunc( 280 unit=exp.Literal.string(str(seq_get(args, 1))), 281 this=seq_get(args, 0), 282 ), 283 "DATETIME_ADD": parse_date_delta_with_interval(exp.DatetimeAdd), 284 "DATETIME_SUB": parse_date_delta_with_interval(exp.DatetimeSub), 285 "DIV": binary_from_function(exp.IntDiv), 286 "GENERATE_ARRAY": exp.GenerateSeries.from_arg_list, 287 "MD5": exp.MD5Digest.from_arg_list, 288 "TO_HEX": _parse_to_hex, 289 "PARSE_DATE": lambda args: format_time_lambda(exp.StrToDate, "bigquery")( 290 [seq_get(args, 1), seq_get(args, 0)] 291 ), 292 "PARSE_TIMESTAMP": _parse_timestamp, 293 "REGEXP_CONTAINS": exp.RegexpLike.from_arg_list, 294 "REGEXP_EXTRACT": lambda args: exp.RegexpExtract( 295 this=seq_get(args, 0), 296 expression=seq_get(args, 1), 297 position=seq_get(args, 2), 298 occurrence=seq_get(args, 3), 299 group=exp.Literal.number(1) 300 if re.compile(str(seq_get(args, 1))).groups == 1 301 else None, 302 ), 303 "SPLIT": lambda args: exp.Split( 304 # https://cloud.google.com/bigquery/docs/reference/standard-sql/string_functions#split 305 this=seq_get(args, 0), 306 expression=seq_get(args, 1) or exp.Literal.string(","), 307 ), 308 "TIME_ADD": parse_date_delta_with_interval(exp.TimeAdd), 309 "TIME_SUB": parse_date_delta_with_interval(exp.TimeSub), 310 "TIMESTAMP_ADD": parse_date_delta_with_interval(exp.TimestampAdd), 311 "TIMESTAMP_SUB": parse_date_delta_with_interval(exp.TimestampSub), 312 "TO_JSON_STRING": exp.JSONFormat.from_arg_list, 313 } 314 315 FUNCTION_PARSERS = { 316 **parser.Parser.FUNCTION_PARSERS, 317 "ARRAY": lambda self: self.expression(exp.Array, expressions=[self._parse_statement()]), 318 } 319 FUNCTION_PARSERS.pop("TRIM") 320 321 NO_PAREN_FUNCTIONS = { 322 **parser.Parser.NO_PAREN_FUNCTIONS, 323 TokenType.CURRENT_DATETIME: exp.CurrentDatetime, 324 } 325 326 NESTED_TYPE_TOKENS = { 327 *parser.Parser.NESTED_TYPE_TOKENS, 328 TokenType.TABLE, 329 } 330 331 ID_VAR_TOKENS = { 332 *parser.Parser.ID_VAR_TOKENS, 333 TokenType.VALUES, 334 } 335 336 PROPERTY_PARSERS = { 337 **parser.Parser.PROPERTY_PARSERS, 338 "NOT DETERMINISTIC": lambda self: self.expression( 339 exp.StabilityProperty, this=exp.Literal.string("VOLATILE") 340 ), 341 "OPTIONS": lambda self: self._parse_with_property(), 342 } 343 344 CONSTRAINT_PARSERS = { 345 **parser.Parser.CONSTRAINT_PARSERS, 346 "OPTIONS": lambda self: exp.Properties(expressions=self._parse_with_property()), 347 } 348 349 def _parse_table_part(self, schema: bool = False) -> t.Optional[exp.Expression]: 350 this = super()._parse_table_part(schema=schema) 351 352 # https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#table_names 353 if isinstance(this, exp.Identifier): 354 table_name = this.name 355 while self._match(TokenType.DASH, advance=False) and self._next: 356 self._advance(2) 357 table_name += f"-{self._prev.text}" 358 359 this = exp.Identifier(this=table_name, quoted=this.args.get("quoted")) 360 361 return this 362 363 def _parse_table_parts(self, schema: bool = False) -> exp.Table: 364 table = super()._parse_table_parts(schema=schema) 365 if isinstance(table.this, exp.Identifier) and "." in table.name: 366 catalog, db, this, *rest = ( 367 t.cast(t.Optional[exp.Expression], exp.to_identifier(x)) 368 for x in split_num_words(table.name, ".", 3) 369 ) 370 371 if rest and this: 372 this = exp.Dot.build(t.cast(t.List[exp.Expression], [this, *rest])) 373 374 table = exp.Table(this=this, db=db, catalog=catalog) 375 376 return table 377 378 class Generator(generator.Generator): 379 EXPLICIT_UNION = True 380 INTERVAL_ALLOWS_PLURAL_FORM = False 381 JOIN_HINTS = False 382 QUERY_HINTS = False 383 TABLE_HINTS = False 384 LIMIT_FETCH = "LIMIT" 385 RENAME_TABLE_WITH_DB = False 386 ESCAPE_LINE_BREAK = True 387 388 TRANSFORMS = { 389 **generator.Generator.TRANSFORMS, 390 exp.ApproxDistinct: rename_func("APPROX_COUNT_DISTINCT"), 391 exp.ArraySize: rename_func("ARRAY_LENGTH"), 392 exp.Cast: transforms.preprocess([transforms.remove_precision_parameterized_types]), 393 exp.Create: _create_sql, 394 exp.CTE: transforms.preprocess([_pushdown_cte_column_names]), 395 exp.DateAdd: _date_add_sql("DATE", "ADD"), 396 exp.DateDiff: lambda self, e: f"DATE_DIFF({self.sql(e, 'this')}, {self.sql(e, 'expression')}, {self.sql(e.args.get('unit', 'DAY'))})", 397 exp.DateFromParts: rename_func("DATE"), 398 exp.DateStrToDate: datestrtodate_sql, 399 exp.DateSub: _date_add_sql("DATE", "SUB"), 400 exp.DatetimeAdd: _date_add_sql("DATETIME", "ADD"), 401 exp.DatetimeSub: _date_add_sql("DATETIME", "SUB"), 402 exp.DateTrunc: lambda self, e: self.func("DATE_TRUNC", e.this, e.text("unit")), 403 exp.GenerateSeries: rename_func("GENERATE_ARRAY"), 404 exp.GroupConcat: rename_func("STRING_AGG"), 405 exp.Hex: rename_func("TO_HEX"), 406 exp.ILike: no_ilike_sql, 407 exp.IntDiv: rename_func("DIV"), 408 exp.JSONFormat: rename_func("TO_JSON_STRING"), 409 exp.Max: max_or_greatest, 410 exp.MD5: lambda self, e: self.func("TO_HEX", self.func("MD5", e.this)), 411 exp.MD5Digest: rename_func("MD5"), 412 exp.Min: min_or_least, 413 exp.PartitionedByProperty: lambda self, e: f"PARTITION BY {self.sql(e, 'this')}", 414 exp.RegexpExtract: lambda self, e: self.func( 415 "REGEXP_EXTRACT", 416 e.this, 417 e.expression, 418 e.args.get("position"), 419 e.args.get("occurrence"), 420 ), 421 exp.RegexpReplace: regexp_replace_sql, 422 exp.RegexpLike: rename_func("REGEXP_CONTAINS"), 423 exp.ReturnsProperty: _returnsproperty_sql, 424 exp.Select: transforms.preprocess( 425 [ 426 transforms.explode_to_unnest, 427 _unqualify_unnest, 428 transforms.eliminate_distinct_on, 429 _alias_ordered_group, 430 ] 431 ), 432 exp.StabilityProperty: lambda self, e: f"DETERMINISTIC" 433 if e.name == "IMMUTABLE" 434 else "NOT DETERMINISTIC", 435 exp.StrToDate: lambda self, e: f"PARSE_DATE({self.format_time(e)}, {self.sql(e, 'this')})", 436 exp.StrToTime: lambda self, e: self.func( 437 "PARSE_TIMESTAMP", self.format_time(e), e.this, e.args.get("zone") 438 ), 439 exp.TimeAdd: _date_add_sql("TIME", "ADD"), 440 exp.TimeSub: _date_add_sql("TIME", "SUB"), 441 exp.TimestampAdd: _date_add_sql("TIMESTAMP", "ADD"), 442 exp.TimestampSub: _date_add_sql("TIMESTAMP", "SUB"), 443 exp.TimeStrToTime: timestrtotime_sql, 444 exp.Trim: lambda self, e: self.func(f"TRIM", e.this, e.expression), 445 exp.TsOrDsAdd: _date_add_sql("DATE", "ADD"), 446 exp.TsOrDsToDate: ts_or_ds_to_date_sql("bigquery"), 447 exp.Unhex: rename_func("FROM_HEX"), 448 exp.Values: _derived_table_values_to_unnest, 449 exp.VariancePop: rename_func("VAR_POP"), 450 } 451 452 TYPE_MAPPING = { 453 **generator.Generator.TYPE_MAPPING, 454 exp.DataType.Type.BIGDECIMAL: "BIGNUMERIC", 455 exp.DataType.Type.BIGINT: "INT64", 456 exp.DataType.Type.BINARY: "BYTES", 457 exp.DataType.Type.BOOLEAN: "BOOL", 458 exp.DataType.Type.CHAR: "STRING", 459 exp.DataType.Type.DECIMAL: "NUMERIC", 460 exp.DataType.Type.DOUBLE: "FLOAT64", 461 exp.DataType.Type.FLOAT: "FLOAT64", 462 exp.DataType.Type.INT: "INT64", 463 exp.DataType.Type.NCHAR: "STRING", 464 exp.DataType.Type.NVARCHAR: "STRING", 465 exp.DataType.Type.SMALLINT: "INT64", 466 exp.DataType.Type.TEXT: "STRING", 467 exp.DataType.Type.TIMESTAMP: "DATETIME", 468 exp.DataType.Type.TIMESTAMPTZ: "TIMESTAMP", 469 exp.DataType.Type.TIMESTAMPLTZ: "TIMESTAMP", 470 exp.DataType.Type.TINYINT: "INT64", 471 exp.DataType.Type.VARBINARY: "BYTES", 472 exp.DataType.Type.VARCHAR: "STRING", 473 exp.DataType.Type.VARIANT: "ANY TYPE", 474 } 475 476 PROPERTIES_LOCATION = { 477 **generator.Generator.PROPERTIES_LOCATION, 478 exp.PartitionedByProperty: exp.Properties.Location.POST_SCHEMA, 479 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, 480 } 481 482 # from: https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#reserved_keywords 483 RESERVED_KEYWORDS = { 484 *generator.Generator.RESERVED_KEYWORDS, 485 "all", 486 "and", 487 "any", 488 "array", 489 "as", 490 "asc", 491 "assert_rows_modified", 492 "at", 493 "between", 494 "by", 495 "case", 496 "cast", 497 "collate", 498 "contains", 499 "create", 500 "cross", 501 "cube", 502 "current", 503 "default", 504 "define", 505 "desc", 506 "distinct", 507 "else", 508 "end", 509 "enum", 510 "escape", 511 "except", 512 "exclude", 513 "exists", 514 "extract", 515 "false", 516 "fetch", 517 "following", 518 "for", 519 "from", 520 "full", 521 "group", 522 "grouping", 523 "groups", 524 "hash", 525 "having", 526 "if", 527 "ignore", 528 "in", 529 "inner", 530 "intersect", 531 "interval", 532 "into", 533 "is", 534 "join", 535 "lateral", 536 "left", 537 "like", 538 "limit", 539 "lookup", 540 "merge", 541 "natural", 542 "new", 543 "no", 544 "not", 545 "null", 546 "nulls", 547 "of", 548 "on", 549 "or", 550 "order", 551 "outer", 552 "over", 553 "partition", 554 "preceding", 555 "proto", 556 "qualify", 557 "range", 558 "recursive", 559 "respect", 560 "right", 561 "rollup", 562 "rows", 563 "select", 564 "set", 565 "some", 566 "struct", 567 "tablesample", 568 "then", 569 "to", 570 "treat", 571 "true", 572 "unbounded", 573 "union", 574 "unnest", 575 "using", 576 "when", 577 "where", 578 "window", 579 "with", 580 "within", 581 } 582 583 def attimezone_sql(self, expression: exp.AtTimeZone) -> str: 584 parent = expression.parent 585 586 # BigQuery allows CAST(.. AS {STRING|TIMESTAMP} [FORMAT <fmt> [AT TIME ZONE <tz>]]). 587 # Only the TIMESTAMP one should use the below conversion, when AT TIME ZONE is included. 588 if not isinstance(parent, exp.Cast) or not parent.to.is_type("text"): 589 return self.func( 590 "TIMESTAMP", self.func("DATETIME", expression.this, expression.args.get("zone")) 591 ) 592 593 return super().attimezone_sql(expression) 594 595 def trycast_sql(self, expression: exp.TryCast) -> str: 596 return self.cast_sql(expression, safe_prefix="SAFE_") 597 598 def cte_sql(self, expression: exp.CTE) -> str: 599 if expression.alias_column_names: 600 self.unsupported("Column names in CTE definition are not supported.") 601 return super().cte_sql(expression) 602 603 def array_sql(self, expression: exp.Array) -> str: 604 first_arg = seq_get(expression.expressions, 0) 605 if isinstance(first_arg, exp.Subqueryable): 606 return f"ARRAY{self.wrap(self.sql(first_arg))}" 607 608 return inline_array_sql(self, expression) 609 610 def transaction_sql(self, *_) -> str: 611 return "BEGIN TRANSACTION" 612 613 def commit_sql(self, *_) -> str: 614 return "COMMIT TRANSACTION" 615 616 def rollback_sql(self, *_) -> str: 617 return "ROLLBACK TRANSACTION" 618 619 def in_unnest_op(self, expression: exp.Unnest) -> str: 620 return self.sql(expression) 621 622 def except_op(self, expression: exp.Except) -> str: 623 if not expression.args.get("distinct", False): 624 self.unsupported("EXCEPT without DISTINCT is not supported in BigQuery") 625 return f"EXCEPT{' DISTINCT' if expression.args.get('distinct') else ' ALL'}" 626 627 def intersect_op(self, expression: exp.Intersect) -> str: 628 if not expression.args.get("distinct", False): 629 self.unsupported("INTERSECT without DISTINCT is not supported in BigQuery") 630 return f"INTERSECT{' DISTINCT' if expression.args.get('distinct') else ' ALL'}" 631 632 def with_properties(self, properties: exp.Properties) -> str: 633 return self.properties(properties, prefix=self.seg("OPTIONS"))
FORMAT_MAPPING: Dict[str, str] =
{'DD': '%d', 'MM': '%m', 'MON': '%b', 'MONTH': '%B', 'YYYY': '%Y', 'YY': '%y', 'HH': '%I', 'HH12': '%I', 'HH24': '%H', 'MI': '%M', 'SS': '%S', 'SSSSS': '%f', 'TZH': '%z'}
@classmethod
def
normalize_identifier(cls, expression: ~E) -> ~E:
215 @classmethod 216 def normalize_identifier(cls, expression: E) -> E: 217 # In BigQuery, CTEs aren't case-sensitive, but table names are (by default, at least). 218 # The following check is essentially a heuristic to detect tables based on whether or 219 # not they're qualified. 220 if isinstance(expression, exp.Identifier): 221 parent = expression.parent 222 223 while isinstance(parent, exp.Dot): 224 parent = parent.parent 225 226 if ( 227 not isinstance(parent, exp.UserDefinedFunction) 228 and not (isinstance(parent, exp.Table) and parent.db) 229 and not expression.meta.get("is_table") 230 ): 231 expression.set("this", expression.this.lower()) 232 233 return expression
Normalizes an unquoted identifier to either lower or upper case, thus essentially making it case-insensitive. If a dialect treats all identifiers as case-insensitive, they will be normalized regardless of being quoted or not.
tokenizer_class =
<class 'sqlglot.dialects.bigquery.BigQuery.Tokenizer'>
parser_class =
<class 'sqlglot.dialects.bigquery.BigQuery.Parser'>
generator_class =
<class 'sqlglot.dialects.bigquery.BigQuery.Generator'>
FORMAT_TRIE: Dict =
{'D': {'D': {0: True}}, 'M': {'M': {0: True}, 'O': {'N': {0: True, 'T': {'H': {0: True}}}}, 'I': {0: True}}, 'Y': {'Y': {'Y': {'Y': {0: True}}, 0: True}}, 'H': {'H': {0: True, '1': {'2': {0: True}}, '2': {'4': {0: True}}}}, 'S': {'S': {0: True, 'S': {'S': {'S': {0: True}}}}}, 'T': {'Z': {'H': {0: True}}}}
Inherited Members
- sqlglot.dialects.dialect.Dialect
- INDEX_OFFSET
- ALIAS_POST_TABLESAMPLE
- IDENTIFIERS_CAN_START_WITH_DIGIT
- STRICT_STRING_CONCAT
- NULL_ORDERING
- DATE_FORMAT
- DATEINT_FORMAT
- TIME_FORMAT
- get_or_raise
- format_time
- case_sensitive
- can_identify
- quote_identifier
- parse
- parse_into
- generate
- transpile
- tokenize
- tokenizer
- parser
- generator
235 class Tokenizer(tokens.Tokenizer): 236 QUOTES = ["'", '"', '"""', "'''"] 237 COMMENTS = ["--", "#", ("/*", "*/")] 238 IDENTIFIERS = ["`"] 239 STRING_ESCAPES = ["\\"] 240 241 HEX_STRINGS = [("0x", ""), ("0X", "")] 242 243 BYTE_STRINGS = [ 244 (prefix + q, q) for q in t.cast(t.List[str], QUOTES) for prefix in ("b", "B") 245 ] 246 247 RAW_STRINGS = [ 248 (prefix + q, q) for q in t.cast(t.List[str], QUOTES) for prefix in ("r", "R") 249 ] 250 251 KEYWORDS = { 252 **tokens.Tokenizer.KEYWORDS, 253 "ANY TYPE": TokenType.VARIANT, 254 "BEGIN": TokenType.COMMAND, 255 "BEGIN TRANSACTION": TokenType.BEGIN, 256 "CURRENT_DATETIME": TokenType.CURRENT_DATETIME, 257 "BYTES": TokenType.BINARY, 258 "DECLARE": TokenType.COMMAND, 259 "FLOAT64": TokenType.DOUBLE, 260 "INT64": TokenType.BIGINT, 261 "RECORD": TokenType.STRUCT, 262 "TIMESTAMP": TokenType.TIMESTAMPTZ, 263 "NOT DETERMINISTIC": TokenType.VOLATILE, 264 "UNKNOWN": TokenType.NULL, 265 } 266 KEYWORDS.pop("DIV")
BYTE_STRINGS =
[("b'", "'"), ("B'", "'"), ('b"', '"'), ('B"', '"'), ('b"""', '"""'), ('B"""', '"""'), ("b'''", "'''"), ("B'''", "'''")]
RAW_STRINGS =
[("r'", "'"), ("R'", "'"), ('r"', '"'), ('R"', '"'), ('r"""', '"""'), ('R"""', '"""'), ("r'''", "'''"), ("R'''", "'''")]
KEYWORDS =
{'{%': <TokenType.BLOCK_START: 'BLOCK_START'>, '{%+': <TokenType.BLOCK_START: 'BLOCK_START'>, '{%-': <TokenType.BLOCK_START: 'BLOCK_START'>, '%}': <TokenType.BLOCK_END: 'BLOCK_END'>, '+%}': <TokenType.BLOCK_END: 'BLOCK_END'>, '-%}': <TokenType.BLOCK_END: 'BLOCK_END'>, '{{+': <TokenType.BLOCK_START: 'BLOCK_START'>, '{{-': <TokenType.BLOCK_START: 'BLOCK_START'>, '+}}': <TokenType.BLOCK_END: 'BLOCK_END'>, '-}}': <TokenType.BLOCK_END: 'BLOCK_END'>, '/*+': <TokenType.HINT: 'HINT'>, '==': <TokenType.EQ: 'EQ'>, '::': <TokenType.DCOLON: 'DCOLON'>, '||': <TokenType.DPIPE: 'DPIPE'>, '>=': <TokenType.GTE: 'GTE'>, '<=': <TokenType.LTE: 'LTE'>, '<>': <TokenType.NEQ: 'NEQ'>, '!=': <TokenType.NEQ: 'NEQ'>, '<=>': <TokenType.NULLSAFE_EQ: 'NULLSAFE_EQ'>, '->': <TokenType.ARROW: 'ARROW'>, '->>': <TokenType.DARROW: 'DARROW'>, '=>': <TokenType.FARROW: 'FARROW'>, '#>': <TokenType.HASH_ARROW: 'HASH_ARROW'>, '#>>': <TokenType.DHASH_ARROW: 'DHASH_ARROW'>, '<->': <TokenType.LR_ARROW: 'LR_ARROW'>, '&&': <TokenType.DAMP: 'DAMP'>, 'ALL': <TokenType.ALL: 'ALL'>, 'ALWAYS': <TokenType.ALWAYS: 'ALWAYS'>, 'AND': <TokenType.AND: 'AND'>, 'ANTI': <TokenType.ANTI: 'ANTI'>, 'ANY': <TokenType.ANY: 'ANY'>, 'ASC': <TokenType.ASC: 'ASC'>, 'AS': <TokenType.ALIAS: 'ALIAS'>, 'ASOF': <TokenType.ASOF: 'ASOF'>, 'AUTOINCREMENT': <TokenType.AUTO_INCREMENT: 'AUTO_INCREMENT'>, 'AUTO_INCREMENT': <TokenType.AUTO_INCREMENT: 'AUTO_INCREMENT'>, 'BEGIN': <TokenType.COMMAND: 'COMMAND'>, 'BETWEEN': <TokenType.BETWEEN: 'BETWEEN'>, 'CACHE': <TokenType.CACHE: 'CACHE'>, 'UNCACHE': <TokenType.UNCACHE: 'UNCACHE'>, 'CASE': <TokenType.CASE: 'CASE'>, 'CHARACTER SET': <TokenType.CHARACTER_SET: 'CHARACTER_SET'>, 'CLUSTER BY': <TokenType.CLUSTER_BY: 'CLUSTER_BY'>, 'COLLATE': <TokenType.COLLATE: 'COLLATE'>, 'COLUMN': <TokenType.COLUMN: 'COLUMN'>, 'COMMIT': <TokenType.COMMIT: 'COMMIT'>, 'CONSTRAINT': <TokenType.CONSTRAINT: 'CONSTRAINT'>, 'CREATE': <TokenType.CREATE: 'CREATE'>, 'CROSS': <TokenType.CROSS: 'CROSS'>, 'CUBE': <TokenType.CUBE: 'CUBE'>, 'CURRENT_DATE': <TokenType.CURRENT_DATE: 'CURRENT_DATE'>, 'CURRENT_TIME': <TokenType.CURRENT_TIME: 'CURRENT_TIME'>, 'CURRENT_TIMESTAMP': <TokenType.CURRENT_TIMESTAMP: 'CURRENT_TIMESTAMP'>, 'CURRENT_USER': <TokenType.CURRENT_USER: 'CURRENT_USER'>, 'DATABASE': <TokenType.DATABASE: 'DATABASE'>, 'DEFAULT': <TokenType.DEFAULT: 'DEFAULT'>, 'DELETE': <TokenType.DELETE: 'DELETE'>, 'DESC': <TokenType.DESC: 'DESC'>, 'DESCRIBE': <TokenType.DESCRIBE: 'DESCRIBE'>, 'DISTINCT': <TokenType.DISTINCT: 'DISTINCT'>, 'DISTRIBUTE BY': <TokenType.DISTRIBUTE_BY: 'DISTRIBUTE_BY'>, 'DROP': <TokenType.DROP: 'DROP'>, 'ELSE': <TokenType.ELSE: 'ELSE'>, 'END': <TokenType.END: 'END'>, 'ESCAPE': <TokenType.ESCAPE: 'ESCAPE'>, 'EXCEPT': <TokenType.EXCEPT: 'EXCEPT'>, 'EXECUTE': <TokenType.EXECUTE: 'EXECUTE'>, 'EXISTS': <TokenType.EXISTS: 'EXISTS'>, 'FALSE': <TokenType.FALSE: 'FALSE'>, 'FETCH': <TokenType.FETCH: 'FETCH'>, 'FILTER': <TokenType.FILTER: 'FILTER'>, 'FIRST': <TokenType.FIRST: 'FIRST'>, 'FULL': <TokenType.FULL: 'FULL'>, 'FUNCTION': <TokenType.FUNCTION: 'FUNCTION'>, 'FOR': <TokenType.FOR: 'FOR'>, 'FOREIGN KEY': <TokenType.FOREIGN_KEY: 'FOREIGN_KEY'>, 'FORMAT': <TokenType.FORMAT: 'FORMAT'>, 'FROM': <TokenType.FROM: 'FROM'>, 'GEOGRAPHY': <TokenType.GEOGRAPHY: 'GEOGRAPHY'>, 'GEOMETRY': <TokenType.GEOMETRY: 'GEOMETRY'>, 'GLOB': <TokenType.GLOB: 'GLOB'>, 'GROUP BY': <TokenType.GROUP_BY: 'GROUP_BY'>, 'GROUPING SETS': <TokenType.GROUPING_SETS: 'GROUPING_SETS'>, 'HAVING': <TokenType.HAVING: 'HAVING'>, 'IF': <TokenType.IF: 'IF'>, 'ILIKE': <TokenType.ILIKE: 'ILIKE'>, 'IN': <TokenType.IN: 'IN'>, 'INDEX': <TokenType.INDEX: 'INDEX'>, 'INET': <TokenType.INET: 'INET'>, 'INNER': <TokenType.INNER: 'INNER'>, 'INSERT': <TokenType.INSERT: 'INSERT'>, 'INTERVAL': <TokenType.INTERVAL: 'INTERVAL'>, 'INTERSECT': <TokenType.INTERSECT: 'INTERSECT'>, 'INTO': <TokenType.INTO: 'INTO'>, 'IS': <TokenType.IS: 'IS'>, 'ISNULL': <TokenType.ISNULL: 'ISNULL'>, 'JOIN': <TokenType.JOIN: 'JOIN'>, 'KEEP': <TokenType.KEEP: 'KEEP'>, 'LATERAL': <TokenType.LATERAL: 'LATERAL'>, 'LEFT': <TokenType.LEFT: 'LEFT'>, 'LIKE': <TokenType.LIKE: 'LIKE'>, 'LIMIT': <TokenType.LIMIT: 'LIMIT'>, 'LOAD': <TokenType.LOAD: 'LOAD'>, 'LOCK': <TokenType.LOCK: 'LOCK'>, 'MERGE': <TokenType.MERGE: 'MERGE'>, 'NATURAL': <TokenType.NATURAL: 'NATURAL'>, 'NEXT': <TokenType.NEXT: 'NEXT'>, 'NEXT VALUE FOR': <TokenType.NEXT_VALUE_FOR: 'NEXT_VALUE_FOR'>, 'NOT': <TokenType.NOT: 'NOT'>, 'NOTNULL': <TokenType.NOTNULL: 'NOTNULL'>, 'NULL': <TokenType.NULL: 'NULL'>, 'OBJECT': <TokenType.OBJECT: 'OBJECT'>, 'OFFSET': <TokenType.OFFSET: 'OFFSET'>, 'ON': <TokenType.ON: 'ON'>, 'OR': <TokenType.OR: 'OR'>, 'XOR': <TokenType.XOR: 'XOR'>, 'ORDER BY': <TokenType.ORDER_BY: 'ORDER_BY'>, 'ORDINALITY': <TokenType.ORDINALITY: 'ORDINALITY'>, 'OUTER': <TokenType.OUTER: 'OUTER'>, 'OVER': <TokenType.OVER: 'OVER'>, 'OVERLAPS': <TokenType.OVERLAPS: 'OVERLAPS'>, 'OVERWRITE': <TokenType.OVERWRITE: 'OVERWRITE'>, 'PARTITION': <TokenType.PARTITION: 'PARTITION'>, 'PARTITION BY': <TokenType.PARTITION_BY: 'PARTITION_BY'>, 'PARTITIONED BY': <TokenType.PARTITION_BY: 'PARTITION_BY'>, 'PARTITIONED_BY': <TokenType.PARTITION_BY: 'PARTITION_BY'>, 'PERCENT': <TokenType.PERCENT: 'PERCENT'>, 'PIVOT': <TokenType.PIVOT: 'PIVOT'>, 'PRAGMA': <TokenType.PRAGMA: 'PRAGMA'>, 'PRIMARY KEY': <TokenType.PRIMARY_KEY: 'PRIMARY_KEY'>, 'PROCEDURE': <TokenType.PROCEDURE: 'PROCEDURE'>, 'QUALIFY': <TokenType.QUALIFY: 'QUALIFY'>, 'RANGE': <TokenType.RANGE: 'RANGE'>, 'RECURSIVE': <TokenType.RECURSIVE: 'RECURSIVE'>, 'REGEXP': <TokenType.RLIKE: 'RLIKE'>, 'REPLACE': <TokenType.REPLACE: 'REPLACE'>, 'RETURNING': <TokenType.RETURNING: 'RETURNING'>, 'REFERENCES': <TokenType.REFERENCES: 'REFERENCES'>, 'RIGHT': <TokenType.RIGHT: 'RIGHT'>, 'RLIKE': <TokenType.RLIKE: 'RLIKE'>, 'ROLLBACK': <TokenType.ROLLBACK: 'ROLLBACK'>, 'ROLLUP': <TokenType.ROLLUP: 'ROLLUP'>, 'ROW': <TokenType.ROW: 'ROW'>, 'ROWS': <TokenType.ROWS: 'ROWS'>, 'SCHEMA': <TokenType.SCHEMA: 'SCHEMA'>, 'SELECT': <TokenType.SELECT: 'SELECT'>, 'SEMI': <TokenType.SEMI: 'SEMI'>, 'SET': <TokenType.SET: 'SET'>, 'SETTINGS': <TokenType.SETTINGS: 'SETTINGS'>, 'SHOW': <TokenType.SHOW: 'SHOW'>, 'SIMILAR TO': <TokenType.SIMILAR_TO: 'SIMILAR_TO'>, 'SOME': <TokenType.SOME: 'SOME'>, 'SORT BY': <TokenType.SORT_BY: 'SORT_BY'>, 'TABLE': <TokenType.TABLE: 'TABLE'>, 'TABLESAMPLE': <TokenType.TABLE_SAMPLE: 'TABLE_SAMPLE'>, 'TEMP': <TokenType.TEMPORARY: 'TEMPORARY'>, 'TEMPORARY': <TokenType.TEMPORARY: 'TEMPORARY'>, 'THEN': <TokenType.THEN: 'THEN'>, 'TRUE': <TokenType.TRUE: 'TRUE'>, 'UNION': <TokenType.UNION: 'UNION'>, 'UNNEST': <TokenType.UNNEST: 'UNNEST'>, 'UNPIVOT': <TokenType.UNPIVOT: 'UNPIVOT'>, 'UPDATE': <TokenType.UPDATE: 'UPDATE'>, 'USE': <TokenType.USE: 'USE'>, 'USING': <TokenType.USING: 'USING'>, 'UUID': <TokenType.UUID: 'UUID'>, 'VALUES': <TokenType.VALUES: 'VALUES'>, 'VIEW': <TokenType.VIEW: 'VIEW'>, 'VOLATILE': <TokenType.VOLATILE: 'VOLATILE'>, 'WHEN': <TokenType.WHEN: 'WHEN'>, 'WHERE': <TokenType.WHERE: 'WHERE'>, 'WINDOW': <TokenType.WINDOW: 'WINDOW'>, 'WITH': <TokenType.WITH: 'WITH'>, 'APPLY': <TokenType.APPLY: 'APPLY'>, 'ARRAY': <TokenType.ARRAY: 'ARRAY'>, 'BIT': <TokenType.BIT: 'BIT'>, 'BOOL': <TokenType.BOOLEAN: 'BOOLEAN'>, 'BOOLEAN': <TokenType.BOOLEAN: 'BOOLEAN'>, 'BYTE': <TokenType.TINYINT: 'TINYINT'>, 'TINYINT': <TokenType.TINYINT: 'TINYINT'>, 'SHORT': <TokenType.SMALLINT: 'SMALLINT'>, 'SMALLINT': <TokenType.SMALLINT: 'SMALLINT'>, 'INT2': <TokenType.SMALLINT: 'SMALLINT'>, 'INTEGER': <TokenType.INT: 'INT'>, 'INT': <TokenType.INT: 'INT'>, 'INT4': <TokenType.INT: 'INT'>, 'LONG': <TokenType.BIGINT: 'BIGINT'>, 'BIGINT': <TokenType.BIGINT: 'BIGINT'>, 'INT8': <TokenType.BIGINT: 'BIGINT'>, 'DEC': <TokenType.DECIMAL: 'DECIMAL'>, 'DECIMAL': <TokenType.DECIMAL: 'DECIMAL'>, 'BIGDECIMAL': <TokenType.BIGDECIMAL: 'BIGDECIMAL'>, 'BIGNUMERIC': <TokenType.BIGDECIMAL: 'BIGDECIMAL'>, 'MAP': <TokenType.MAP: 'MAP'>, 'NULLABLE': <TokenType.NULLABLE: 'NULLABLE'>, 'NUMBER': <TokenType.DECIMAL: 'DECIMAL'>, 'NUMERIC': <TokenType.DECIMAL: 'DECIMAL'>, 'FIXED': <TokenType.DECIMAL: 'DECIMAL'>, 'REAL': <TokenType.FLOAT: 'FLOAT'>, 'FLOAT': <TokenType.FLOAT: 'FLOAT'>, 'FLOAT4': <TokenType.FLOAT: 'FLOAT'>, 'FLOAT8': <TokenType.DOUBLE: 'DOUBLE'>, 'DOUBLE': <TokenType.DOUBLE: 'DOUBLE'>, 'DOUBLE PRECISION': <TokenType.DOUBLE: 'DOUBLE'>, 'JSON': <TokenType.JSON: 'JSON'>, 'CHAR': <TokenType.CHAR: 'CHAR'>, 'CHARACTER': <TokenType.CHAR: 'CHAR'>, 'NCHAR': <TokenType.NCHAR: 'NCHAR'>, 'VARCHAR': <TokenType.VARCHAR: 'VARCHAR'>, 'VARCHAR2': <TokenType.VARCHAR: 'VARCHAR'>, 'NVARCHAR': <TokenType.NVARCHAR: 'NVARCHAR'>, 'NVARCHAR2': <TokenType.NVARCHAR: 'NVARCHAR'>, 'STR': <TokenType.TEXT: 'TEXT'>, 'STRING': <TokenType.TEXT: 'TEXT'>, 'TEXT': <TokenType.TEXT: 'TEXT'>, 'CLOB': <TokenType.TEXT: 'TEXT'>, 'LONGVARCHAR': <TokenType.TEXT: 'TEXT'>, 'BINARY': <TokenType.BINARY: 'BINARY'>, 'BLOB': <TokenType.VARBINARY: 'VARBINARY'>, 'BYTEA': <TokenType.VARBINARY: 'VARBINARY'>, 'VARBINARY': <TokenType.VARBINARY: 'VARBINARY'>, 'TIME': <TokenType.TIME: 'TIME'>, 'TIMESTAMP': <TokenType.TIMESTAMPTZ: 'TIMESTAMPTZ'>, 'TIMESTAMPTZ': <TokenType.TIMESTAMPTZ: 'TIMESTAMPTZ'>, 'TIMESTAMPLTZ': <TokenType.TIMESTAMPLTZ: 'TIMESTAMPLTZ'>, 'DATE': <TokenType.DATE: 'DATE'>, 'DATETIME': <TokenType.DATETIME: 'DATETIME'>, 'INT4RANGE': <TokenType.INT4RANGE: 'INT4RANGE'>, 'INT4MULTIRANGE': <TokenType.INT4MULTIRANGE: 'INT4MULTIRANGE'>, 'INT8RANGE': <TokenType.INT8RANGE: 'INT8RANGE'>, 'INT8MULTIRANGE': <TokenType.INT8MULTIRANGE: 'INT8MULTIRANGE'>, 'NUMRANGE': <TokenType.NUMRANGE: 'NUMRANGE'>, 'NUMMULTIRANGE': <TokenType.NUMMULTIRANGE: 'NUMMULTIRANGE'>, 'TSRANGE': <TokenType.TSRANGE: 'TSRANGE'>, 'TSMULTIRANGE': <TokenType.TSMULTIRANGE: 'TSMULTIRANGE'>, 'TSTZRANGE': <TokenType.TSTZRANGE: 'TSTZRANGE'>, 'TSTZMULTIRANGE': <TokenType.TSTZMULTIRANGE: 'TSTZMULTIRANGE'>, 'DATERANGE': <TokenType.DATERANGE: 'DATERANGE'>, 'DATEMULTIRANGE': <TokenType.DATEMULTIRANGE: 'DATEMULTIRANGE'>, 'UNIQUE': <TokenType.UNIQUE: 'UNIQUE'>, 'STRUCT': <TokenType.STRUCT: 'STRUCT'>, 'VARIANT': <TokenType.VARIANT: 'VARIANT'>, 'ALTER': <TokenType.ALTER: 'ALTER'>, 'ANALYZE': <TokenType.COMMAND: 'COMMAND'>, 'CALL': <TokenType.COMMAND: 'COMMAND'>, 'COMMENT': <TokenType.COMMENT: 'COMMENT'>, 'COPY': <TokenType.COMMAND: 'COMMAND'>, 'EXPLAIN': <TokenType.COMMAND: 'COMMAND'>, 'GRANT': <TokenType.COMMAND: 'COMMAND'>, 'OPTIMIZE': <TokenType.COMMAND: 'COMMAND'>, 'PREPARE': <TokenType.COMMAND: 'COMMAND'>, 'TRUNCATE': <TokenType.COMMAND: 'COMMAND'>, 'VACUUM': <TokenType.COMMAND: 'COMMAND'>, 'USER-DEFINED': <TokenType.USERDEFINED: 'USERDEFINED'>, 'ANY TYPE': <TokenType.VARIANT: 'VARIANT'>, 'BEGIN TRANSACTION': <TokenType.BEGIN: 'BEGIN'>, 'CURRENT_DATETIME': <TokenType.CURRENT_DATETIME: 'CURRENT_DATETIME'>, 'BYTES': <TokenType.BINARY: 'BINARY'>, 'DECLARE': <TokenType.COMMAND: 'COMMAND'>, 'FLOAT64': <TokenType.DOUBLE: 'DOUBLE'>, 'INT64': <TokenType.BIGINT: 'BIGINT'>, 'RECORD': <TokenType.STRUCT: 'STRUCT'>, 'NOT DETERMINISTIC': <TokenType.VOLATILE: 'VOLATILE'>, 'UNKNOWN': <TokenType.NULL: 'NULL'>}
268 class Parser(parser.Parser): 269 PREFIXED_PIVOT_COLUMNS = True 270 271 LOG_BASE_FIRST = False 272 LOG_DEFAULTS_TO_LN = True 273 274 FUNCTIONS = { 275 **parser.Parser.FUNCTIONS, 276 "DATE": _parse_date, 277 "DATE_ADD": parse_date_delta_with_interval(exp.DateAdd), 278 "DATE_SUB": parse_date_delta_with_interval(exp.DateSub), 279 "DATE_TRUNC": lambda args: exp.DateTrunc( 280 unit=exp.Literal.string(str(seq_get(args, 1))), 281 this=seq_get(args, 0), 282 ), 283 "DATETIME_ADD": parse_date_delta_with_interval(exp.DatetimeAdd), 284 "DATETIME_SUB": parse_date_delta_with_interval(exp.DatetimeSub), 285 "DIV": binary_from_function(exp.IntDiv), 286 "GENERATE_ARRAY": exp.GenerateSeries.from_arg_list, 287 "MD5": exp.MD5Digest.from_arg_list, 288 "TO_HEX": _parse_to_hex, 289 "PARSE_DATE": lambda args: format_time_lambda(exp.StrToDate, "bigquery")( 290 [seq_get(args, 1), seq_get(args, 0)] 291 ), 292 "PARSE_TIMESTAMP": _parse_timestamp, 293 "REGEXP_CONTAINS": exp.RegexpLike.from_arg_list, 294 "REGEXP_EXTRACT": lambda args: exp.RegexpExtract( 295 this=seq_get(args, 0), 296 expression=seq_get(args, 1), 297 position=seq_get(args, 2), 298 occurrence=seq_get(args, 3), 299 group=exp.Literal.number(1) 300 if re.compile(str(seq_get(args, 1))).groups == 1 301 else None, 302 ), 303 "SPLIT": lambda args: exp.Split( 304 # https://cloud.google.com/bigquery/docs/reference/standard-sql/string_functions#split 305 this=seq_get(args, 0), 306 expression=seq_get(args, 1) or exp.Literal.string(","), 307 ), 308 "TIME_ADD": parse_date_delta_with_interval(exp.TimeAdd), 309 "TIME_SUB": parse_date_delta_with_interval(exp.TimeSub), 310 "TIMESTAMP_ADD": parse_date_delta_with_interval(exp.TimestampAdd), 311 "TIMESTAMP_SUB": parse_date_delta_with_interval(exp.TimestampSub), 312 "TO_JSON_STRING": exp.JSONFormat.from_arg_list, 313 } 314 315 FUNCTION_PARSERS = { 316 **parser.Parser.FUNCTION_PARSERS, 317 "ARRAY": lambda self: self.expression(exp.Array, expressions=[self._parse_statement()]), 318 } 319 FUNCTION_PARSERS.pop("TRIM") 320 321 NO_PAREN_FUNCTIONS = { 322 **parser.Parser.NO_PAREN_FUNCTIONS, 323 TokenType.CURRENT_DATETIME: exp.CurrentDatetime, 324 } 325 326 NESTED_TYPE_TOKENS = { 327 *parser.Parser.NESTED_TYPE_TOKENS, 328 TokenType.TABLE, 329 } 330 331 ID_VAR_TOKENS = { 332 *parser.Parser.ID_VAR_TOKENS, 333 TokenType.VALUES, 334 } 335 336 PROPERTY_PARSERS = { 337 **parser.Parser.PROPERTY_PARSERS, 338 "NOT DETERMINISTIC": lambda self: self.expression( 339 exp.StabilityProperty, this=exp.Literal.string("VOLATILE") 340 ), 341 "OPTIONS": lambda self: self._parse_with_property(), 342 } 343 344 CONSTRAINT_PARSERS = { 345 **parser.Parser.CONSTRAINT_PARSERS, 346 "OPTIONS": lambda self: exp.Properties(expressions=self._parse_with_property()), 347 } 348 349 def _parse_table_part(self, schema: bool = False) -> t.Optional[exp.Expression]: 350 this = super()._parse_table_part(schema=schema) 351 352 # https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#table_names 353 if isinstance(this, exp.Identifier): 354 table_name = this.name 355 while self._match(TokenType.DASH, advance=False) and self._next: 356 self._advance(2) 357 table_name += f"-{self._prev.text}" 358 359 this = exp.Identifier(this=table_name, quoted=this.args.get("quoted")) 360 361 return this 362 363 def _parse_table_parts(self, schema: bool = False) -> exp.Table: 364 table = super()._parse_table_parts(schema=schema) 365 if isinstance(table.this, exp.Identifier) and "." in table.name: 366 catalog, db, this, *rest = ( 367 t.cast(t.Optional[exp.Expression], exp.to_identifier(x)) 368 for x in split_num_words(table.name, ".", 3) 369 ) 370 371 if rest and this: 372 this = exp.Dot.build(t.cast(t.List[exp.Expression], [this, *rest])) 373 374 table = exp.Table(this=this, db=db, catalog=catalog) 375 376 return table
Parser consumes a list of tokens produced by the Tokenizer and produces a parsed syntax tree.
Arguments:
- error_level: The desired error level. Default: ErrorLevel.IMMEDIATE
- error_message_context: Determines the amount of context to capture from a query string when displaying the error message (in number of characters). Default: 100
- max_errors: Maximum number of error messages to include in a raised ParseError. This is only relevant if error_level is ErrorLevel.RAISE. Default: 3
FUNCTIONS =
{'ABS': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Abs'>>, 'ANY_VALUE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.AnyValue'>>, 'APPROX_DISTINCT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ApproxDistinct'>>, 'APPROX_COUNT_DISTINCT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ApproxDistinct'>>, 'APPROX_QUANTILE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ApproxQuantile'>>, 'ARRAY': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Array'>>, 'ARRAY_AGG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayAgg'>>, 'ARRAY_ALL': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayAll'>>, 'ARRAY_ANY': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayAny'>>, 'ARRAY_CONCAT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayConcat'>>, 'ARRAY_CONTAINS': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayContains'>>, 'FILTER': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayFilter'>>, 'ARRAY_FILTER': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayFilter'>>, 'ARRAY_JOIN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayJoin'>>, 'ARRAY_SIZE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArraySize'>>, 'ARRAY_SORT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArraySort'>>, 'ARRAY_SUM': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArraySum'>>, 'ARRAY_UNION_AGG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayUnionAgg'>>, 'AVG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Avg'>>, 'CASE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Case'>>, 'CAST': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Cast'>>, 'CAST_TO_STR_TYPE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CastToStrType'>>, 'CEIL': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Ceil'>>, 'CEILING': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Ceil'>>, 'COALESCE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Coalesce'>>, 'IFNULL': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Coalesce'>>, 'NVL': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Coalesce'>>, 'CONCAT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Concat'>>, 'CONCAT_WS': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ConcatWs'>>, 'COUNT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Count'>>, 'COUNT_IF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CountIf'>>, 'CURRENT_DATE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CurrentDate'>>, 'CURRENT_DATETIME': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CurrentDatetime'>>, 'CURRENT_TIME': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CurrentTime'>>, 'CURRENT_TIMESTAMP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CurrentTimestamp'>>, 'CURRENT_USER': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CurrentUser'>>, 'DATE': <function _parse_date>, 'DATE_ADD': <function parse_date_delta_with_interval.<locals>.func>, 'DATEDIFF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DateDiff'>>, 'DATE_DIFF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DateDiff'>>, 'DATEFROMPARTS': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DateFromParts'>>, 'DATE_STR_TO_DATE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DateStrToDate'>>, 'DATE_SUB': <function parse_date_delta_with_interval.<locals>.func>, 'DATE_TO_DATE_STR': <function Parser.<lambda>>, 'DATE_TO_DI': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DateToDi'>>, 'DATE_TRUNC': <function BigQuery.Parser.<lambda>>, 'DATETIME_ADD': <function parse_date_delta_with_interval.<locals>.func>, 'DATETIME_DIFF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DatetimeDiff'>>, 'DATETIME_SUB': <function parse_date_delta_with_interval.<locals>.func>, 'DATETIME_TRUNC': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DatetimeTrunc'>>, 'DAY': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Day'>>, 'DAY_OF_MONTH': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DayOfMonth'>>, 'DAYOFMONTH': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DayOfMonth'>>, 'DAY_OF_WEEK': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DayOfWeek'>>, 'DAYOFWEEK': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DayOfWeek'>>, 'DAY_OF_YEAR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DayOfYear'>>, 'DAYOFYEAR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DayOfYear'>>, 'DECODE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Decode'>>, 'DI_TO_DATE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DiToDate'>>, 'ENCODE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Encode'>>, 'EXP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Exp'>>, 'EXPLODE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Explode'>>, 'EXTRACT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Extract'>>, 'FLOOR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Floor'>>, 'FROM_BASE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.FromBase'>>, 'FROM_BASE64': <bound method Func.from_arg_list of <class 'sqlglot.expressions.FromBase64'>>, 'GENERATE_SERIES': <bound method Func.from_arg_list of <class 'sqlglot.expressions.GenerateSeries'>>, 'GREATEST': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Greatest'>>, 'GROUP_CONCAT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.GroupConcat'>>, 'HEX': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Hex'>>, 'HLL': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Hll'>>, 'IF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.If'>>, 'INITCAP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Initcap'>>, 'JSON_ARRAY_CONTAINS': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONArrayContains'>>, 'JSONB_EXTRACT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONBExtract'>>, 'JSONB_EXTRACT_SCALAR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONBExtractScalar'>>, 'JSON_EXTRACT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONExtract'>>, 'JSON_EXTRACT_SCALAR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONExtractScalar'>>, 'JSON_FORMAT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONFormat'>>, 'J_S_O_N_OBJECT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONObject'>>, 'LAST_DATE_OF_MONTH': <bound method Func.from_arg_list of <class 'sqlglot.expressions.LastDateOfMonth'>>, 'LEAST': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Least'>>, 'LEFT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Left'>>, 'LENGTH': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Length'>>, 'LEN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Length'>>, 'LEVENSHTEIN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Levenshtein'>>, 'LN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Ln'>>, 'LOG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Log'>>, 'LOG10': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Log10'>>, 'LOG2': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Log2'>>, 'LOGICAL_AND': <bound method Func.from_arg_list of <class 'sqlglot.expressions.LogicalAnd'>>, 'BOOL_AND': <bound method Func.from_arg_list of <class 'sqlglot.expressions.LogicalAnd'>>, 'BOOLAND_AGG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.LogicalAnd'>>, 'LOGICAL_OR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.LogicalOr'>>, 'BOOL_OR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.LogicalOr'>>, 'BOOLOR_AGG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.LogicalOr'>>, 'LOWER': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Lower'>>, 'LCASE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Lower'>>, 'MD5': <bound method Func.from_arg_list of <class 'sqlglot.expressions.MD5Digest'>>, 'MD5_DIGEST': <bound method Func.from_arg_list of <class 'sqlglot.expressions.MD5Digest'>>, 'MAP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Map'>>, 'MAP_FROM_ENTRIES': <bound method Func.from_arg_list of <class 'sqlglot.expressions.MapFromEntries'>>, 'MATCH_AGAINST': <bound method Func.from_arg_list of <class 'sqlglot.expressions.MatchAgainst'>>, 'MAX': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Max'>>, 'MIN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Min'>>, 'MONTH': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Month'>>, 'MONTHS_BETWEEN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.MonthsBetween'>>, 'NEXT_VALUE_FOR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.NextValueFor'>>, 'NUMBER_TO_STR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.NumberToStr'>>, 'NVL2': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Nvl2'>>, 'OPEN_J_S_O_N': <bound method Func.from_arg_list of <class 'sqlglot.expressions.OpenJSON'>>, 'PARAMETERIZED_AGG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ParameterizedAgg'>>, 'PERCENTILE_CONT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.PercentileCont'>>, 'PERCENTILE_DISC': <bound method Func.from_arg_list of <class 'sqlglot.expressions.PercentileDisc'>>, 'POSEXPLODE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Posexplode'>>, 'POWER': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Pow'>>, 'POW': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Pow'>>, 'QUANTILE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Quantile'>>, 'RANGE_N': <bound method Func.from_arg_list of <class 'sqlglot.expressions.RangeN'>>, 'READ_CSV': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ReadCSV'>>, 'REDUCE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Reduce'>>, 'REGEXP_EXTRACT': <function BigQuery.Parser.<lambda>>, 'REGEXP_I_LIKE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.RegexpILike'>>, 'REGEXP_LIKE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.RegexpLike'>>, 'REGEXP_REPLACE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.RegexpReplace'>>, 'REGEXP_SPLIT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.RegexpSplit'>>, 'REPEAT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Repeat'>>, 'RIGHT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Right'>>, 'ROUND': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Round'>>, 'ROW_NUMBER': <bound method Func.from_arg_list of <class 'sqlglot.expressions.RowNumber'>>, 'SHA': <bound method Func.from_arg_list of <class 'sqlglot.expressions.SHA'>>, 'SHA1': <bound method Func.from_arg_list of <class 'sqlglot.expressions.SHA'>>, 'SHA2': <bound method Func.from_arg_list of <class 'sqlglot.expressions.SHA2'>>, 'SAFE_CONCAT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.SafeConcat'>>, 'SAFE_DIVIDE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.SafeDivide'>>, 'SET_AGG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.SetAgg'>>, 'SORT_ARRAY': <bound method Func.from_arg_list of <class 'sqlglot.expressions.SortArray'>>, 'SPLIT': <function BigQuery.Parser.<lambda>>, 'SQRT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Sqrt'>>, 'STANDARD_HASH': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StandardHash'>>, 'STAR_MAP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StarMap'>>, 'STDDEV': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Stddev'>>, 'STDDEV_POP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StddevPop'>>, 'STDDEV_SAMP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StddevSamp'>>, 'STR_POSITION': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StrPosition'>>, 'STR_TO_DATE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StrToDate'>>, 'STR_TO_TIME': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StrToTime'>>, 'STR_TO_UNIX': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StrToUnix'>>, 'STRUCT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Struct'>>, 'STRUCT_EXTRACT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StructExtract'>>, 'SUBSTRING': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Substring'>>, 'SUM': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Sum'>>, 'TIME_ADD': <function parse_date_delta_with_interval.<locals>.func>, 'TIME_DIFF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimeDiff'>>, 'TIME_STR_TO_DATE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimeStrToDate'>>, 'TIME_STR_TO_TIME': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimeStrToTime'>>, 'TIME_STR_TO_UNIX': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimeStrToUnix'>>, 'TIME_SUB': <function parse_date_delta_with_interval.<locals>.func>, 'TIME_TO_STR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimeToStr'>>, 'TIME_TO_TIME_STR': <function Parser.<lambda>>, 'TIME_TO_UNIX': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimeToUnix'>>, 'TIME_TRUNC': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimeTrunc'>>, 'TIMESTAMP_ADD': <function parse_date_delta_with_interval.<locals>.func>, 'TIMESTAMP_DIFF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimestampDiff'>>, 'TIMESTAMP_SUB': <function parse_date_delta_with_interval.<locals>.func>, 'TIMESTAMP_TRUNC': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimestampTrunc'>>, 'TO_BASE64': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ToBase64'>>, 'TO_CHAR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ToChar'>>, 'TRANSFORM': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Transform'>>, 'TRIM': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Trim'>>, 'TRY_CAST': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TryCast'>>, 'TS_OR_DI_TO_DI': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TsOrDiToDi'>>, 'TS_OR_DS_ADD': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TsOrDsAdd'>>, 'TS_OR_DS_TO_DATE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TsOrDsToDate'>>, 'TS_OR_DS_TO_DATE_STR': <function Parser.<lambda>>, 'UNHEX': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Unhex'>>, 'UNIX_TO_STR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.UnixToStr'>>, 'UNIX_TO_TIME': <bound method Func.from_arg_list of <class 'sqlglot.expressions.UnixToTime'>>, 'UNIX_TO_TIME_STR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.UnixToTimeStr'>>, 'UPPER': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Upper'>>, 'UCASE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Upper'>>, 'VAR_MAP': <function parse_var_map>, 'VARIANCE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Variance'>>, 'VARIANCE_SAMP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Variance'>>, 'VAR_SAMP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Variance'>>, 'VARIANCE_POP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.VariancePop'>>, 'VAR_POP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.VariancePop'>>, 'WEEK': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Week'>>, 'WEEK_OF_YEAR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.WeekOfYear'>>, 'WEEKOFYEAR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.WeekOfYear'>>, 'WHEN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.When'>>, 'X_M_L_TABLE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.XMLTable'>>, 'XOR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Xor'>>, 'YEAR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Year'>>, 'GLOB': <function Parser.<lambda>>, 'LIKE': <function parse_like>, 'DIV': <function binary_from_function.<locals>.<lambda>>, 'GENERATE_ARRAY': <bound method Func.from_arg_list of <class 'sqlglot.expressions.GenerateSeries'>>, 'TO_HEX': <function _parse_to_hex>, 'PARSE_DATE': <function BigQuery.Parser.<lambda>>, 'PARSE_TIMESTAMP': <function _parse_timestamp>, 'REGEXP_CONTAINS': <bound method Func.from_arg_list of <class 'sqlglot.expressions.RegexpLike'>>, 'TO_JSON_STRING': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONFormat'>>}
FUNCTION_PARSERS =
{'ANY_VALUE': <function Parser.<lambda>>, 'CAST': <function Parser.<lambda>>, 'CONCAT': <function Parser.<lambda>>, 'CONVERT': <function Parser.<lambda>>, 'DECODE': <function Parser.<lambda>>, 'EXTRACT': <function Parser.<lambda>>, 'JSON_OBJECT': <function Parser.<lambda>>, 'LOG': <function Parser.<lambda>>, 'MATCH': <function Parser.<lambda>>, 'OPENJSON': <function Parser.<lambda>>, 'POSITION': <function Parser.<lambda>>, 'SAFE_CAST': <function Parser.<lambda>>, 'STRING_AGG': <function Parser.<lambda>>, 'SUBSTRING': <function Parser.<lambda>>, 'TRY_CAST': <function Parser.<lambda>>, 'TRY_CONVERT': <function Parser.<lambda>>, 'ARRAY': <function BigQuery.Parser.<lambda>>}
NO_PAREN_FUNCTIONS =
{<TokenType.CURRENT_DATE: 'CURRENT_DATE'>: <class 'sqlglot.expressions.CurrentDate'>, <TokenType.CURRENT_DATETIME: 'CURRENT_DATETIME'>: <class 'sqlglot.expressions.CurrentDatetime'>, <TokenType.CURRENT_TIME: 'CURRENT_TIME'>: <class 'sqlglot.expressions.CurrentTime'>, <TokenType.CURRENT_TIMESTAMP: 'CURRENT_TIMESTAMP'>: <class 'sqlglot.expressions.CurrentTimestamp'>, <TokenType.CURRENT_USER: 'CURRENT_USER'>: <class 'sqlglot.expressions.CurrentUser'>}
NESTED_TYPE_TOKENS =
{<TokenType.TABLE: 'TABLE'>, <TokenType.STRUCT: 'STRUCT'>, <TokenType.ARRAY: 'ARRAY'>, <TokenType.MAP: 'MAP'>, <TokenType.NULLABLE: 'NULLABLE'>}
ID_VAR_TOKENS =
{<TokenType.PARTITION: 'PARTITION'>, <TokenType.HLLSKETCH: 'HLLSKETCH'>, <TokenType.KEEP: 'KEEP'>, <TokenType.COLUMN: 'COLUMN'>, <TokenType.HSTORE: 'HSTORE'>, <TokenType.END: 'END'>, <TokenType.STRUCT: 'STRUCT'>, <TokenType.DESCRIBE: 'DESCRIBE'>, <TokenType.DATETIME: 'DATETIME'>, <TokenType.UINT128: 'UINT128'>, <TokenType.ARRAY: 'ARRAY'>, <TokenType.GEOGRAPHY: 'GEOGRAPHY'>, <TokenType.SERIAL: 'SERIAL'>, <TokenType.SUPER: 'SUPER'>, <TokenType.LONGTEXT: 'LONGTEXT'>, <TokenType.INT4RANGE: 'INT4RANGE'>, <TokenType.MAP: 'MAP'>, <TokenType.LEFT: 'LEFT'>, <TokenType.DATE: 'DATE'>, <TokenType.NUMRANGE: 'NUMRANGE'>, <TokenType.UNIQUEIDENTIFIER: 'UNIQUEIDENTIFIER'>, <TokenType.INT: 'INT'>, <TokenType.SEMI: 'SEMI'>, <TokenType.UPDATE: 'UPDATE'>, <TokenType.WINDOW: 'WINDOW'>, <TokenType.TINYINT: 'TINYINT'>, <TokenType.UBIGINT: 'UBIGINT'>, <TokenType.TSTZMULTIRANGE: 'TSTZMULTIRANGE'>, <TokenType.ORDINALITY: 'ORDINALITY'>, <TokenType.RIGHT: 'RIGHT'>, <TokenType.VALUES: 'VALUES'>, <TokenType.FLOAT: 'FLOAT'>, <TokenType.TEMPORARY: 'TEMPORARY'>, <TokenType.PRAGMA: 'PRAGMA'>, <TokenType.SETTINGS: 'SETTINGS'>, <TokenType.INT4MULTIRANGE: 'INT4MULTIRANGE'>, <TokenType.NVARCHAR: 'NVARCHAR'>, <TokenType.UINT: 'UINT'>, <TokenType.JSONB: 'JSONB'>, <TokenType.IS: 'IS'>, <TokenType.FALSE: 'FALSE'>, <TokenType.GEOMETRY: 'GEOMETRY'>, <TokenType.TIME: 'TIME'>, <TokenType.DIV: 'DIV'>, <TokenType.VARIANT: 'VARIANT'>, <TokenType.UTINYINT: 'UTINYINT'>, <TokenType.BOOLEAN: 'BOOLEAN'>, <TokenType.BEGIN: 'BEGIN'>, <TokenType.DATEMULTIRANGE: 'DATEMULTIRANGE'>, <TokenType.ANTI: 'ANTI'>, <TokenType.PERCENT: 'PERCENT'>, <TokenType.TIMESTAMPLTZ: 'TIMESTAMPLTZ'>, <TokenType.INTERVAL: 'INTERVAL'>, <TokenType.FILTER: 'FILTER'>, <TokenType.NEXT: 'NEXT'>, <TokenType.TOP: 'TOP'>, <TokenType.ESCAPE: 'ESCAPE'>, <TokenType.DELETE: 'DELETE'>, <TokenType.LONGBLOB: 'LONGBLOB'>, <TokenType.MEDIUMTEXT: 'MEDIUMTEXT'>, <TokenType.TABLE: 'TABLE'>, <TokenType.CURRENT_TIMESTAMP: 'CURRENT_TIMESTAMP'>, <TokenType.CACHE: 'CACHE'>, <TokenType.TSTZRANGE: 'TSTZRANGE'>, <TokenType.CHAR: 'CHAR'>, <TokenType.IF: 'IF'>, <TokenType.COLLATE: 'COLLATE'>, <TokenType.EXECUTE: 'EXECUTE'>, <TokenType.USMALLINT: 'USMALLINT'>, <TokenType.CURRENT_TIME: 'CURRENT_TIME'>, <TokenType.TSRANGE: 'TSRANGE'>, <TokenType.BIGSERIAL: 'BIGSERIAL'>, <TokenType.DEFAULT: 'DEFAULT'>, <TokenType.FULL: 'FULL'>, <TokenType.OBJECT: 'OBJECT'>, <TokenType.DICTIONARY: 'DICTIONARY'>, <TokenType.TIMESTAMPTZ: 'TIMESTAMPTZ'>, <TokenType.NULLABLE: 'NULLABLE'>, <TokenType.SMALLMONEY: 'SMALLMONEY'>, <TokenType.NATURAL: 'NATURAL'>, <TokenType.CONSTRAINT: 'CONSTRAINT'>, <TokenType.MEDIUMBLOB: 'MEDIUMBLOB'>, <TokenType.SOME: 'SOME'>, <TokenType.ASC: 'ASC'>, <TokenType.ENUM: 'ENUM'>, <TokenType.PIVOT: 'PIVOT'>, <TokenType.ROW: 'ROW'>, <TokenType.DECIMAL: 'DECIMAL'>, <TokenType.FUNCTION: 'FUNCTION'>, <TokenType.INT8MULTIRANGE: 'INT8MULTIRANGE'>, <TokenType.TEXT: 'TEXT'>, <TokenType.LOAD: 'LOAD'>, <TokenType.EXISTS: 'EXISTS'>, <TokenType.VIEW: 'VIEW'>, <TokenType.RANGE: 'RANGE'>, <TokenType.SCHEMA: 'SCHEMA'>, <TokenType.COMMENT: 'COMMENT'>, <TokenType.CURRENT_DATETIME: 'CURRENT_DATETIME'>, <TokenType.USERDEFINED: 'USERDEFINED'>, <TokenType.SMALLINT: 'SMALLINT'>, <TokenType.SMALLSERIAL: 'SMALLSERIAL'>, <TokenType.SET: 'SET'>, <TokenType.MERGE: 'MERGE'>, <TokenType.CURRENT_USER: 'CURRENT_USER'>, <TokenType.BIGDECIMAL: 'BIGDECIMAL'>, <TokenType.CASE: 'CASE'>, <TokenType.VOLATILE: 'VOLATILE'>, <TokenType.NCHAR: 'NCHAR'>, <TokenType.ROWS: 'ROWS'>, <TokenType.INT256: 'INT256'>, <TokenType.PROCEDURE: 'PROCEDURE'>, <TokenType.UINT256: 'UINT256'>, <TokenType.PSEUDO_TYPE: 'PSEUDO_TYPE'>, <TokenType.IMAGE: 'IMAGE'>, <TokenType.VAR: 'VAR'>, <TokenType.DATERANGE: 'DATERANGE'>, <TokenType.DOUBLE: 'DOUBLE'>, <TokenType.INT128: 'INT128'>, <TokenType.DATABASE: 'DATABASE'>, <TokenType.BIGINT: 'BIGINT'>, <TokenType.MONEY: 'MONEY'>, <TokenType.CURRENT_DATE: 'CURRENT_DATE'>, <TokenType.VARCHAR: 'VARCHAR'>, <TokenType.ROWVERSION: 'ROWVERSION'>, <TokenType.INT8RANGE: 'INT8RANGE'>, <TokenType.NUMMULTIRANGE: 'NUMMULTIRANGE'>, <TokenType.REFERENCES: 'REFERENCES'>, <TokenType.DATETIME64: 'DATETIME64'>, <TokenType.TIMESTAMP: 'TIMESTAMP'>, <TokenType.JSON: 'JSON'>, <TokenType.OFFSET: 'OFFSET'>, <TokenType.UUID: 'UUID'>, <TokenType.TSMULTIRANGE: 'TSMULTIRANGE'>, <TokenType.ANY: 'ANY'>, <TokenType.ALL: 'ALL'>, <TokenType.APPLY: 'APPLY'>, <TokenType.BINARY: 'BINARY'>, <TokenType.AUTO_INCREMENT: 'AUTO_INCREMENT'>, <TokenType.INET: 'INET'>, <TokenType.SHOW: 'SHOW'>, <TokenType.TRUE: 'TRUE'>, <TokenType.COMMAND: 'COMMAND'>, <TokenType.FIRST: 'FIRST'>, <TokenType.INDEX: 'INDEX'>, <TokenType.OVERWRITE: 'OVERWRITE'>, <TokenType.COMMIT: 'COMMIT'>, <TokenType.UNIQUE: 'UNIQUE'>, <TokenType.UNPIVOT: 'UNPIVOT'>, <TokenType.ISNULL: 'ISNULL'>, <TokenType.VARBINARY: 'VARBINARY'>, <TokenType.BIT: 'BIT'>, <TokenType.DESC: 'DESC'>, <TokenType.FORMAT: 'FORMAT'>, <TokenType.XML: 'XML'>}
PROPERTY_PARSERS =
{'ALGORITHM': <function Parser.<lambda>>, 'AUTO_INCREMENT': <function Parser.<lambda>>, 'BLOCKCOMPRESSION': <function Parser.<lambda>>, 'CHARACTER SET': <function Parser.<lambda>>, 'CHECKSUM': <function Parser.<lambda>>, 'CLUSTER BY': <function Parser.<lambda>>, 'CLUSTERED': <function Parser.<lambda>>, 'COLLATE': <function Parser.<lambda>>, 'COMMENT': <function Parser.<lambda>>, 'COPY': <function Parser.<lambda>>, 'DATABLOCKSIZE': <function Parser.<lambda>>, 'DEFINER': <function Parser.<lambda>>, 'DETERMINISTIC': <function Parser.<lambda>>, 'DISTKEY': <function Parser.<lambda>>, 'DISTSTYLE': <function Parser.<lambda>>, 'ENGINE': <function Parser.<lambda>>, 'EXECUTE': <function Parser.<lambda>>, 'EXTERNAL': <function Parser.<lambda>>, 'FALLBACK': <function Parser.<lambda>>, 'FORMAT': <function Parser.<lambda>>, 'FREESPACE': <function Parser.<lambda>>, 'IMMUTABLE': <function Parser.<lambda>>, 'JOURNAL': <function Parser.<lambda>>, 'LANGUAGE': <function Parser.<lambda>>, 'LAYOUT': <function Parser.<lambda>>, 'LIFETIME': <function Parser.<lambda>>, 'LIKE': <function Parser.<lambda>>, 'LOCATION': <function Parser.<lambda>>, 'LOCK': <function Parser.<lambda>>, 'LOCKING': <function Parser.<lambda>>, 'LOG': <function Parser.<lambda>>, 'MATERIALIZED': <function Parser.<lambda>>, 'MERGEBLOCKRATIO': <function Parser.<lambda>>, 'MULTISET': <function Parser.<lambda>>, 'NO': <function Parser.<lambda>>, 'ON': <function Parser.<lambda>>, 'ORDER BY': <function Parser.<lambda>>, 'PARTITION BY': <function Parser.<lambda>>, 'PARTITIONED BY': <function Parser.<lambda>>, 'PARTITIONED_BY': <function Parser.<lambda>>, 'PRIMARY KEY': <function Parser.<lambda>>, 'RANGE': <function Parser.<lambda>>, 'RETURNS': <function Parser.<lambda>>, 'ROW': <function Parser.<lambda>>, 'ROW_FORMAT': <function Parser.<lambda>>, 'SET': <function Parser.<lambda>>, 'SETTINGS': <function Parser.<lambda>>, 'SORTKEY': <function Parser.<lambda>>, 'SOURCE': <function Parser.<lambda>>, 'STABLE': <function Parser.<lambda>>, 'STORED': <function Parser.<lambda>>, 'TBLPROPERTIES': <function Parser.<lambda>>, 'TEMP': <function Parser.<lambda>>, 'TEMPORARY': <function Parser.<lambda>>, 'TO': <function Parser.<lambda>>, 'TRANSIENT': <function Parser.<lambda>>, 'TTL': <function Parser.<lambda>>, 'USING': <function Parser.<lambda>>, 'VOLATILE': <function Parser.<lambda>>, 'WITH': <function Parser.<lambda>>, 'NOT DETERMINISTIC': <function BigQuery.Parser.<lambda>>, 'OPTIONS': <function BigQuery.Parser.<lambda>>}
CONSTRAINT_PARSERS =
{'AUTOINCREMENT': <function Parser.<lambda>>, 'AUTO_INCREMENT': <function Parser.<lambda>>, 'CASESPECIFIC': <function Parser.<lambda>>, 'CHARACTER SET': <function Parser.<lambda>>, 'CHECK': <function Parser.<lambda>>, 'COLLATE': <function Parser.<lambda>>, 'COMMENT': <function Parser.<lambda>>, 'COMPRESS': <function Parser.<lambda>>, 'DEFAULT': <function Parser.<lambda>>, 'ENCODE': <function Parser.<lambda>>, 'FOREIGN KEY': <function Parser.<lambda>>, 'FORMAT': <function Parser.<lambda>>, 'GENERATED': <function Parser.<lambda>>, 'IDENTITY': <function Parser.<lambda>>, 'INLINE': <function Parser.<lambda>>, 'LIKE': <function Parser.<lambda>>, 'NOT': <function Parser.<lambda>>, 'NULL': <function Parser.<lambda>>, 'ON': <function Parser.<lambda>>, 'PATH': <function Parser.<lambda>>, 'PRIMARY KEY': <function Parser.<lambda>>, 'REFERENCES': <function Parser.<lambda>>, 'TITLE': <function Parser.<lambda>>, 'TTL': <function Parser.<lambda>>, 'UNIQUE': <function Parser.<lambda>>, 'UPPERCASE': <function Parser.<lambda>>, 'OPTIONS': <function BigQuery.Parser.<lambda>>}
SET_TRIE: Dict =
{'GLOBAL': {0: True}, 'LOCAL': {0: True}, 'SESSION': {0: True}, 'TRANSACTION': {0: True}}
FORMAT_MAPPING: Dict[str, str] =
{'DD': '%d', 'MM': '%m', 'MON': '%b', 'MONTH': '%B', 'YYYY': '%Y', 'YY': '%y', 'HH': '%I', 'HH12': '%I', 'HH24': '%H', 'MI': '%M', 'SS': '%S', 'SSSSS': '%f', 'TZH': '%z'}
FORMAT_TRIE: Dict =
{'D': {'D': {0: True}}, 'M': {'M': {0: True}, 'O': {'N': {0: True, 'T': {'H': {0: True}}}}, 'I': {0: True}}, 'Y': {'Y': {'Y': {'Y': {0: True}}, 0: True}}, 'H': {'H': {0: True, '1': {'2': {0: True}}, '2': {'4': {0: True}}}}, 'S': {'S': {0: True, 'S': {'S': {'S': {0: True}}}}}, 'T': {'Z': {'H': {0: True}}}}
Inherited Members
- sqlglot.parser.Parser
- Parser
- ENUM_TYPE_TOKENS
- TYPE_TOKENS
- SUBQUERY_PREDICATES
- RESERVED_KEYWORDS
- DB_CREATABLES
- CREATABLES
- INTERVAL_VARS
- TABLE_ALIAS_TOKENS
- COMMENT_TABLE_ALIAS_TOKENS
- UPDATE_ALIAS_TOKENS
- TRIM_TYPES
- FUNC_TOKENS
- CONJUNCTION
- EQUALITY
- COMPARISON
- BITWISE
- TERM
- FACTOR
- TIMESTAMPS
- SET_OPERATIONS
- JOIN_METHODS
- JOIN_SIDES
- JOIN_KINDS
- JOIN_HINTS
- LAMBDAS
- COLUMN_OPERATORS
- EXPRESSION_PARSERS
- STATEMENT_PARSERS
- UNARY_PARSERS
- PRIMARY_PARSERS
- PLACEHOLDER_PARSERS
- RANGE_PARSERS
- ALTER_PARSERS
- SCHEMA_UNNAMED_CONSTRAINTS
- NO_PAREN_FUNCTION_PARSERS
- FUNCTIONS_WITH_ALIASED_ARGS
- QUERY_MODIFIER_PARSERS
- SET_PARSERS
- SHOW_PARSERS
- TYPE_LITERAL_PARSERS
- MODIFIABLES
- DDL_SELECT_TOKENS
- PRE_VOLATILE_TOKENS
- TRANSACTION_KIND
- TRANSACTION_CHARACTERISTICS
- INSERT_ALTERNATIVES
- CLONE_KINDS
- TABLE_INDEX_HINT_TOKENS
- WINDOW_ALIAS_TOKENS
- WINDOW_BEFORE_PAREN_TOKENS
- WINDOW_SIDES
- ADD_CONSTRAINT_TOKENS
- STRICT_CAST
- CONCAT_NULL_OUTPUTS_STRING
- IDENTIFY_PIVOT_STRINGS
- INDEX_OFFSET
- ALIAS_POST_TABLESAMPLE
- STRICT_STRING_CONCAT
- NULL_ORDERING
- error_level
- error_message_context
- max_errors
- reset
- parse
- parse_into
- check_errors
- raise_error
- expression
- validate_expression
- errors
- sql
378 class Generator(generator.Generator): 379 EXPLICIT_UNION = True 380 INTERVAL_ALLOWS_PLURAL_FORM = False 381 JOIN_HINTS = False 382 QUERY_HINTS = False 383 TABLE_HINTS = False 384 LIMIT_FETCH = "LIMIT" 385 RENAME_TABLE_WITH_DB = False 386 ESCAPE_LINE_BREAK = True 387 388 TRANSFORMS = { 389 **generator.Generator.TRANSFORMS, 390 exp.ApproxDistinct: rename_func("APPROX_COUNT_DISTINCT"), 391 exp.ArraySize: rename_func("ARRAY_LENGTH"), 392 exp.Cast: transforms.preprocess([transforms.remove_precision_parameterized_types]), 393 exp.Create: _create_sql, 394 exp.CTE: transforms.preprocess([_pushdown_cte_column_names]), 395 exp.DateAdd: _date_add_sql("DATE", "ADD"), 396 exp.DateDiff: lambda self, e: f"DATE_DIFF({self.sql(e, 'this')}, {self.sql(e, 'expression')}, {self.sql(e.args.get('unit', 'DAY'))})", 397 exp.DateFromParts: rename_func("DATE"), 398 exp.DateStrToDate: datestrtodate_sql, 399 exp.DateSub: _date_add_sql("DATE", "SUB"), 400 exp.DatetimeAdd: _date_add_sql("DATETIME", "ADD"), 401 exp.DatetimeSub: _date_add_sql("DATETIME", "SUB"), 402 exp.DateTrunc: lambda self, e: self.func("DATE_TRUNC", e.this, e.text("unit")), 403 exp.GenerateSeries: rename_func("GENERATE_ARRAY"), 404 exp.GroupConcat: rename_func("STRING_AGG"), 405 exp.Hex: rename_func("TO_HEX"), 406 exp.ILike: no_ilike_sql, 407 exp.IntDiv: rename_func("DIV"), 408 exp.JSONFormat: rename_func("TO_JSON_STRING"), 409 exp.Max: max_or_greatest, 410 exp.MD5: lambda self, e: self.func("TO_HEX", self.func("MD5", e.this)), 411 exp.MD5Digest: rename_func("MD5"), 412 exp.Min: min_or_least, 413 exp.PartitionedByProperty: lambda self, e: f"PARTITION BY {self.sql(e, 'this')}", 414 exp.RegexpExtract: lambda self, e: self.func( 415 "REGEXP_EXTRACT", 416 e.this, 417 e.expression, 418 e.args.get("position"), 419 e.args.get("occurrence"), 420 ), 421 exp.RegexpReplace: regexp_replace_sql, 422 exp.RegexpLike: rename_func("REGEXP_CONTAINS"), 423 exp.ReturnsProperty: _returnsproperty_sql, 424 exp.Select: transforms.preprocess( 425 [ 426 transforms.explode_to_unnest, 427 _unqualify_unnest, 428 transforms.eliminate_distinct_on, 429 _alias_ordered_group, 430 ] 431 ), 432 exp.StabilityProperty: lambda self, e: f"DETERMINISTIC" 433 if e.name == "IMMUTABLE" 434 else "NOT DETERMINISTIC", 435 exp.StrToDate: lambda self, e: f"PARSE_DATE({self.format_time(e)}, {self.sql(e, 'this')})", 436 exp.StrToTime: lambda self, e: self.func( 437 "PARSE_TIMESTAMP", self.format_time(e), e.this, e.args.get("zone") 438 ), 439 exp.TimeAdd: _date_add_sql("TIME", "ADD"), 440 exp.TimeSub: _date_add_sql("TIME", "SUB"), 441 exp.TimestampAdd: _date_add_sql("TIMESTAMP", "ADD"), 442 exp.TimestampSub: _date_add_sql("TIMESTAMP", "SUB"), 443 exp.TimeStrToTime: timestrtotime_sql, 444 exp.Trim: lambda self, e: self.func(f"TRIM", e.this, e.expression), 445 exp.TsOrDsAdd: _date_add_sql("DATE", "ADD"), 446 exp.TsOrDsToDate: ts_or_ds_to_date_sql("bigquery"), 447 exp.Unhex: rename_func("FROM_HEX"), 448 exp.Values: _derived_table_values_to_unnest, 449 exp.VariancePop: rename_func("VAR_POP"), 450 } 451 452 TYPE_MAPPING = { 453 **generator.Generator.TYPE_MAPPING, 454 exp.DataType.Type.BIGDECIMAL: "BIGNUMERIC", 455 exp.DataType.Type.BIGINT: "INT64", 456 exp.DataType.Type.BINARY: "BYTES", 457 exp.DataType.Type.BOOLEAN: "BOOL", 458 exp.DataType.Type.CHAR: "STRING", 459 exp.DataType.Type.DECIMAL: "NUMERIC", 460 exp.DataType.Type.DOUBLE: "FLOAT64", 461 exp.DataType.Type.FLOAT: "FLOAT64", 462 exp.DataType.Type.INT: "INT64", 463 exp.DataType.Type.NCHAR: "STRING", 464 exp.DataType.Type.NVARCHAR: "STRING", 465 exp.DataType.Type.SMALLINT: "INT64", 466 exp.DataType.Type.TEXT: "STRING", 467 exp.DataType.Type.TIMESTAMP: "DATETIME", 468 exp.DataType.Type.TIMESTAMPTZ: "TIMESTAMP", 469 exp.DataType.Type.TIMESTAMPLTZ: "TIMESTAMP", 470 exp.DataType.Type.TINYINT: "INT64", 471 exp.DataType.Type.VARBINARY: "BYTES", 472 exp.DataType.Type.VARCHAR: "STRING", 473 exp.DataType.Type.VARIANT: "ANY TYPE", 474 } 475 476 PROPERTIES_LOCATION = { 477 **generator.Generator.PROPERTIES_LOCATION, 478 exp.PartitionedByProperty: exp.Properties.Location.POST_SCHEMA, 479 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, 480 } 481 482 # from: https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#reserved_keywords 483 RESERVED_KEYWORDS = { 484 *generator.Generator.RESERVED_KEYWORDS, 485 "all", 486 "and", 487 "any", 488 "array", 489 "as", 490 "asc", 491 "assert_rows_modified", 492 "at", 493 "between", 494 "by", 495 "case", 496 "cast", 497 "collate", 498 "contains", 499 "create", 500 "cross", 501 "cube", 502 "current", 503 "default", 504 "define", 505 "desc", 506 "distinct", 507 "else", 508 "end", 509 "enum", 510 "escape", 511 "except", 512 "exclude", 513 "exists", 514 "extract", 515 "false", 516 "fetch", 517 "following", 518 "for", 519 "from", 520 "full", 521 "group", 522 "grouping", 523 "groups", 524 "hash", 525 "having", 526 "if", 527 "ignore", 528 "in", 529 "inner", 530 "intersect", 531 "interval", 532 "into", 533 "is", 534 "join", 535 "lateral", 536 "left", 537 "like", 538 "limit", 539 "lookup", 540 "merge", 541 "natural", 542 "new", 543 "no", 544 "not", 545 "null", 546 "nulls", 547 "of", 548 "on", 549 "or", 550 "order", 551 "outer", 552 "over", 553 "partition", 554 "preceding", 555 "proto", 556 "qualify", 557 "range", 558 "recursive", 559 "respect", 560 "right", 561 "rollup", 562 "rows", 563 "select", 564 "set", 565 "some", 566 "struct", 567 "tablesample", 568 "then", 569 "to", 570 "treat", 571 "true", 572 "unbounded", 573 "union", 574 "unnest", 575 "using", 576 "when", 577 "where", 578 "window", 579 "with", 580 "within", 581 } 582 583 def attimezone_sql(self, expression: exp.AtTimeZone) -> str: 584 parent = expression.parent 585 586 # BigQuery allows CAST(.. AS {STRING|TIMESTAMP} [FORMAT <fmt> [AT TIME ZONE <tz>]]). 587 # Only the TIMESTAMP one should use the below conversion, when AT TIME ZONE is included. 588 if not isinstance(parent, exp.Cast) or not parent.to.is_type("text"): 589 return self.func( 590 "TIMESTAMP", self.func("DATETIME", expression.this, expression.args.get("zone")) 591 ) 592 593 return super().attimezone_sql(expression) 594 595 def trycast_sql(self, expression: exp.TryCast) -> str: 596 return self.cast_sql(expression, safe_prefix="SAFE_") 597 598 def cte_sql(self, expression: exp.CTE) -> str: 599 if expression.alias_column_names: 600 self.unsupported("Column names in CTE definition are not supported.") 601 return super().cte_sql(expression) 602 603 def array_sql(self, expression: exp.Array) -> str: 604 first_arg = seq_get(expression.expressions, 0) 605 if isinstance(first_arg, exp.Subqueryable): 606 return f"ARRAY{self.wrap(self.sql(first_arg))}" 607 608 return inline_array_sql(self, expression) 609 610 def transaction_sql(self, *_) -> str: 611 return "BEGIN TRANSACTION" 612 613 def commit_sql(self, *_) -> str: 614 return "COMMIT TRANSACTION" 615 616 def rollback_sql(self, *_) -> str: 617 return "ROLLBACK TRANSACTION" 618 619 def in_unnest_op(self, expression: exp.Unnest) -> str: 620 return self.sql(expression) 621 622 def except_op(self, expression: exp.Except) -> str: 623 if not expression.args.get("distinct", False): 624 self.unsupported("EXCEPT without DISTINCT is not supported in BigQuery") 625 return f"EXCEPT{' DISTINCT' if expression.args.get('distinct') else ' ALL'}" 626 627 def intersect_op(self, expression: exp.Intersect) -> str: 628 if not expression.args.get("distinct", False): 629 self.unsupported("INTERSECT without DISTINCT is not supported in BigQuery") 630 return f"INTERSECT{' DISTINCT' if expression.args.get('distinct') else ' ALL'}" 631 632 def with_properties(self, properties: exp.Properties) -> str: 633 return self.properties(properties, prefix=self.seg("OPTIONS"))
Generator converts a given syntax tree to the corresponding SQL string.
Arguments:
- pretty: Whether or not to format the produced SQL string. Default: False.
- identify: Determines when an identifier should be quoted. Possible values are: False (default): Never quote, except in cases where it's mandatory by the dialect. True or 'always': Always quote. 'safe': Only quote identifiers that are case insensitive.
- normalize: Whether or not to normalize identifiers to lowercase. Default: False.
- pad: Determines the pad size in a formatted string. Default: 2.
- indent: Determines the indentation size in a formatted string. Default: 2.
- normalize_functions: Whether or not to normalize all function names. Possible values are: "upper" or True (default): Convert names to uppercase. "lower": Convert names to lowercase. False: Disables function name normalization.
- unsupported_level: Determines the generator's behavior when it encounters unsupported expressions. Default ErrorLevel.WARN.
- max_unsupported: Maximum number of unsupported messages to include in a raised UnsupportedError. This is only relevant if unsupported_level is ErrorLevel.RAISE. Default: 3
- leading_comma: Determines whether or not the comma is leading or trailing in select expressions. This is only relevant when generating in pretty mode. Default: False
- max_text_width: The max number of characters in a segment before creating new lines in pretty mode. The default is on the smaller end because the length only represents a segment and not the true line length. Default: 80
- comments: Whether or not to preserve comments in the output SQL code. Default: True
TRANSFORMS =
{<class 'sqlglot.expressions.DateAdd'>: <function _date_add_sql.<locals>.func>, <class 'sqlglot.expressions.TsOrDsAdd'>: <function _date_add_sql.<locals>.func>, <class 'sqlglot.expressions.CaseSpecificColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.CharacterSetColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.CharacterSetProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.CheckColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.CollateColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.CopyGrantsProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.CommentColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.DateFormatColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.DefaultColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.EncodeColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.ExecuteAsProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.ExternalProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.InlineLengthColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.LanguageProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.LocationProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.LogProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.MaterializedProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.NoPrimaryIndexProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.OnCommitProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.OnUpdateColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.PathColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.ReturnsProperty'>: <function _returnsproperty_sql>, <class 'sqlglot.expressions.SetProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.SettingsProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.SqlSecurityProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.StabilityProperty'>: <function BigQuery.Generator.<lambda>>, <class 'sqlglot.expressions.TemporaryProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.ToTableProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.TransientProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.TitleColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.UppercaseColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.VarMap'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.VolatileProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.WithJournalTableProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.ApproxDistinct'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.ArraySize'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.Cast'>: <function preprocess.<locals>._to_sql>, <class 'sqlglot.expressions.Create'>: <function _create_sql>, <class 'sqlglot.expressions.CTE'>: <function preprocess.<locals>._to_sql>, <class 'sqlglot.expressions.DateDiff'>: <function BigQuery.Generator.<lambda>>, <class 'sqlglot.expressions.DateFromParts'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.DateStrToDate'>: <function datestrtodate_sql>, <class 'sqlglot.expressions.DateSub'>: <function _date_add_sql.<locals>.func>, <class 'sqlglot.expressions.DatetimeAdd'>: <function _date_add_sql.<locals>.func>, <class 'sqlglot.expressions.DatetimeSub'>: <function _date_add_sql.<locals>.func>, <class 'sqlglot.expressions.DateTrunc'>: <function BigQuery.Generator.<lambda>>, <class 'sqlglot.expressions.GenerateSeries'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.GroupConcat'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.Hex'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.ILike'>: <function no_ilike_sql>, <class 'sqlglot.expressions.IntDiv'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.JSONFormat'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.Max'>: <function max_or_greatest>, <class 'sqlglot.expressions.MD5'>: <function BigQuery.Generator.<lambda>>, <class 'sqlglot.expressions.MD5Digest'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.Min'>: <function min_or_least>, <class 'sqlglot.expressions.PartitionedByProperty'>: <function BigQuery.Generator.<lambda>>, <class 'sqlglot.expressions.RegexpExtract'>: <function BigQuery.Generator.<lambda>>, <class 'sqlglot.expressions.RegexpReplace'>: <function regexp_replace_sql>, <class 'sqlglot.expressions.RegexpLike'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.Select'>: <function preprocess.<locals>._to_sql>, <class 'sqlglot.expressions.StrToDate'>: <function BigQuery.Generator.<lambda>>, <class 'sqlglot.expressions.StrToTime'>: <function BigQuery.Generator.<lambda>>, <class 'sqlglot.expressions.TimeAdd'>: <function _date_add_sql.<locals>.func>, <class 'sqlglot.expressions.TimeSub'>: <function _date_add_sql.<locals>.func>, <class 'sqlglot.expressions.TimestampAdd'>: <function _date_add_sql.<locals>.func>, <class 'sqlglot.expressions.TimestampSub'>: <function _date_add_sql.<locals>.func>, <class 'sqlglot.expressions.TimeStrToTime'>: <function timestrtotime_sql>, <class 'sqlglot.expressions.Trim'>: <function BigQuery.Generator.<lambda>>, <class 'sqlglot.expressions.TsOrDsToDate'>: <function ts_or_ds_to_date_sql.<locals>._ts_or_ds_to_date_sql>, <class 'sqlglot.expressions.Unhex'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.Values'>: <function _derived_table_values_to_unnest>, <class 'sqlglot.expressions.VariancePop'>: <function rename_func.<locals>.<lambda>>}
TYPE_MAPPING =
{<Type.NCHAR: 'NCHAR'>: 'STRING', <Type.NVARCHAR: 'NVARCHAR'>: 'STRING', <Type.MEDIUMTEXT: 'MEDIUMTEXT'>: 'TEXT', <Type.LONGTEXT: 'LONGTEXT'>: 'TEXT', <Type.MEDIUMBLOB: 'MEDIUMBLOB'>: 'BLOB', <Type.LONGBLOB: 'LONGBLOB'>: 'BLOB', <Type.INET: 'INET'>: 'INET', <Type.BIGDECIMAL: 'BIGDECIMAL'>: 'BIGNUMERIC', <Type.BIGINT: 'BIGINT'>: 'INT64', <Type.BINARY: 'BINARY'>: 'BYTES', <Type.BOOLEAN: 'BOOLEAN'>: 'BOOL', <Type.CHAR: 'CHAR'>: 'STRING', <Type.DECIMAL: 'DECIMAL'>: 'NUMERIC', <Type.DOUBLE: 'DOUBLE'>: 'FLOAT64', <Type.FLOAT: 'FLOAT'>: 'FLOAT64', <Type.INT: 'INT'>: 'INT64', <Type.SMALLINT: 'SMALLINT'>: 'INT64', <Type.TEXT: 'TEXT'>: 'STRING', <Type.TIMESTAMP: 'TIMESTAMP'>: 'DATETIME', <Type.TIMESTAMPTZ: 'TIMESTAMPTZ'>: 'TIMESTAMP', <Type.TIMESTAMPLTZ: 'TIMESTAMPLTZ'>: 'TIMESTAMP', <Type.TINYINT: 'TINYINT'>: 'INT64', <Type.VARBINARY: 'VARBINARY'>: 'BYTES', <Type.VARCHAR: 'VARCHAR'>: 'STRING', <Type.VARIANT: 'VARIANT'>: 'ANY TYPE'}
PROPERTIES_LOCATION =
{<class 'sqlglot.expressions.AlgorithmProperty'>: <Location.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.AutoIncrementProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.BlockCompressionProperty'>: <Location.POST_NAME: 'POST_NAME'>, <class 'sqlglot.expressions.CharacterSetProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.ChecksumProperty'>: <Location.POST_NAME: 'POST_NAME'>, <class 'sqlglot.expressions.CollateProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.CopyGrantsProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.Cluster'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.ClusteredByProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.DataBlocksizeProperty'>: <Location.POST_NAME: 'POST_NAME'>, <class 'sqlglot.expressions.DefinerProperty'>: <Location.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.DictRange'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.DictProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.DistKeyProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.DistStyleProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.EngineProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.ExecuteAsProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.ExternalProperty'>: <Location.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.FallbackProperty'>: <Location.POST_NAME: 'POST_NAME'>, <class 'sqlglot.expressions.FileFormatProperty'>: <Location.POST_WITH: 'POST_WITH'>, <class 'sqlglot.expressions.FreespaceProperty'>: <Location.POST_NAME: 'POST_NAME'>, <class 'sqlglot.expressions.IsolatedLoadingProperty'>: <Location.POST_NAME: 'POST_NAME'>, <class 'sqlglot.expressions.JournalProperty'>: <Location.POST_NAME: 'POST_NAME'>, <class 'sqlglot.expressions.LanguageProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.LikeProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.LocationProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.LockingProperty'>: <Location.POST_ALIAS: 'POST_ALIAS'>, <class 'sqlglot.expressions.LogProperty'>: <Location.POST_NAME: 'POST_NAME'>, <class 'sqlglot.expressions.MaterializedProperty'>: <Location.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.MergeBlockRatioProperty'>: <Location.POST_NAME: 'POST_NAME'>, <class 'sqlglot.expressions.NoPrimaryIndexProperty'>: <Location.POST_EXPRESSION: 'POST_EXPRESSION'>, <class 'sqlglot.expressions.OnCommitProperty'>: <Location.POST_EXPRESSION: 'POST_EXPRESSION'>, <class 'sqlglot.expressions.Order'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.PartitionedByProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.PrimaryKey'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.Property'>: <Location.POST_WITH: 'POST_WITH'>, <class 'sqlglot.expressions.ReturnsProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.RowFormatProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.RowFormatDelimitedProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.RowFormatSerdeProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.SchemaCommentProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.SerdeProperties'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.Set'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.SettingsProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.SetProperty'>: <Location.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.SortKeyProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.SqlSecurityProperty'>: <Location.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.StabilityProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.TemporaryProperty'>: <Location.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.ToTableProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.TransientProperty'>: <Location.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.MergeTreeTTL'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.VolatileProperty'>: <Location.UNSUPPORTED: 'UNSUPPORTED'>, <class 'sqlglot.expressions.WithDataProperty'>: <Location.POST_EXPRESSION: 'POST_EXPRESSION'>, <class 'sqlglot.expressions.WithJournalTableProperty'>: <Location.POST_NAME: 'POST_NAME'>}
RESERVED_KEYWORDS =
{'any', 'default', 'following', 'define', 'merge', 'is', 'if', 'asc', 'cube', 'recursive', 'then', 'hash', 'proto', 'intersect', 'enum', 'new', 'for', 'lookup', 'natural', 'case', 'or', 'assert_rows_modified', 'like', 'exclude', 'using', 'having', 'select', 'no', 'some', 'null', 'tablesample', 'partition', 'nulls', 'from', 'on', 'into', 'set', 'except', 'join', 'all', 'and', 'limit', 'rollup', 'group', 'in', 'desc', 'between', 'qualify', 'exists', 'union', 'full', 'outer', 'order', 'fetch', 'inner', 'respect', 'with', 'struct', 'within', 'preceding', 'false', 'cast', 'rows', 'end', 'as', 'array', 'left', 'when', 'else', 'distinct', 'current', 'true', 'window', 'at', 'cross', 'ignore', 'groups', 'escape', 'unbounded', 'interval', 'where', 'extract', 'lateral', 'unnest', 'treat', 'not', 'contains', 'of', 'by', 'right', 'over', 'collate', 'range', 'create', 'to', 'grouping'}
583 def attimezone_sql(self, expression: exp.AtTimeZone) -> str: 584 parent = expression.parent 585 586 # BigQuery allows CAST(.. AS {STRING|TIMESTAMP} [FORMAT <fmt> [AT TIME ZONE <tz>]]). 587 # Only the TIMESTAMP one should use the below conversion, when AT TIME ZONE is included. 588 if not isinstance(parent, exp.Cast) or not parent.to.is_type("text"): 589 return self.func( 590 "TIMESTAMP", self.func("DATETIME", expression.this, expression.args.get("zone")) 591 ) 592 593 return super().attimezone_sql(expression)
@classmethod
def
can_identify(text: str, identify: str | bool = 'safe') -> bool:
248 @classmethod 249 def can_identify(cls, text: str, identify: str | bool = "safe") -> bool: 250 """Checks if text can be identified given an identify option. 251 252 Args: 253 text: The text to check. 254 identify: 255 "always" or `True`: Always returns true. 256 "safe": True if the identifier is case-insensitive. 257 258 Returns: 259 Whether or not the given text can be identified. 260 """ 261 if identify is True or identify == "always": 262 return True 263 264 if identify == "safe": 265 return not cls.case_sensitive(text) 266 267 return False
Checks if text can be identified given an identify option.
Arguments:
- text: The text to check.
- identify: "always" or
True
: Always returns true. "safe": True if the identifier is case-insensitive.
Returns:
Whether or not the given text can be identified.
Inherited Members
- sqlglot.generator.Generator
- Generator
- NULL_ORDERING_SUPPORTED
- LOCKING_READS_SUPPORTED
- WRAP_DERIVED_VALUES
- CREATE_FUNCTION_RETURN_AS
- MATCHED_BY_SOURCE
- SINGLE_STRING_INTERVAL
- TABLESAMPLE_WITH_METHOD
- TABLESAMPLE_SIZE_IS_PERCENT
- GROUPINGS_SEP
- INDEX_ON
- QUERY_HINT_SEP
- IS_BOOL_ALLOWED
- DUPLICATE_KEY_UPDATE_WITH_SET
- LIMIT_IS_TOP
- RETURNING_END
- COLUMN_JOIN_MARKS_SUPPORTED
- SELECT_KINDS
- STAR_MAPPING
- TIME_PART_SINGULARS
- TOKEN_MAPPING
- STRUCT_DELIMITER
- PARAMETER_TOKEN
- WITH_SEPARATED_COMMENTS
- UNWRAPPED_INTERVAL_VALUES
- SENTINEL_LINE_BREAK
- INDEX_OFFSET
- ALIAS_POST_TABLESAMPLE
- IDENTIFIERS_CAN_START_WITH_DIGIT
- STRICT_STRING_CONCAT
- NULL_ORDERING
- pretty
- identify
- normalize
- pad
- unsupported_level
- max_unsupported
- leading_comma
- max_text_width
- comments
- normalize_functions
- unsupported_messages
- generate
- unsupported
- sep
- seg
- pad_comment
- maybe_comment
- wrap
- no_identify
- normalize_func
- indent
- sql
- uncache_sql
- cache_sql
- characterset_sql
- column_sql
- columnposition_sql
- columndef_sql
- columnconstraint_sql
- autoincrementcolumnconstraint_sql
- compresscolumnconstraint_sql
- generatedasidentitycolumnconstraint_sql
- notnullcolumnconstraint_sql
- primarykeycolumnconstraint_sql
- uniquecolumnconstraint_sql
- createable_sql
- create_sql
- clone_sql
- describe_sql
- prepend_ctes
- with_sql
- tablealias_sql
- bitstring_sql
- hexstring_sql
- bytestring_sql
- rawstring_sql
- datatypesize_sql
- datatype_sql
- directory_sql
- delete_sql
- drop_sql
- except_sql
- fetch_sql
- filter_sql
- hint_sql
- index_sql
- identifier_sql
- inputoutputformat_sql
- national_sql
- partition_sql
- properties_sql
- root_properties
- properties
- locate_properties
- property_sql
- likeproperty_sql
- fallbackproperty_sql
- journalproperty_sql
- freespaceproperty_sql
- checksumproperty_sql
- mergeblockratioproperty_sql
- datablocksizeproperty_sql
- blockcompressionproperty_sql
- isolatedloadingproperty_sql
- lockingproperty_sql
- withdataproperty_sql
- insert_sql
- intersect_sql
- introducer_sql
- pseudotype_sql
- onconflict_sql
- returning_sql
- rowformatdelimitedproperty_sql
- withtablehint_sql
- indextablehint_sql
- table_sql
- tablesample_sql
- pivot_sql
- tuple_sql
- update_sql
- values_sql
- var_sql
- into_sql
- from_sql
- group_sql
- having_sql
- join_sql
- lambda_sql
- lateral_sql
- limit_sql
- offset_sql
- setitem_sql
- set_sql
- pragma_sql
- lock_sql
- literal_sql
- escape_str
- loaddata_sql
- null_sql
- boolean_sql
- order_sql
- cluster_sql
- distribute_sql
- sort_sql
- ordered_sql
- matchrecognize_sql
- query_modifiers
- offset_limit_modifiers
- after_having_modifiers
- after_limit_modifiers
- select_sql
- schema_sql
- schema_columns_sql
- star_sql
- parameter_sql
- sessionparameter_sql
- placeholder_sql
- subquery_sql
- qualify_sql
- union_sql
- union_op
- unnest_sql
- where_sql
- window_sql
- partition_by_sql
- windowspec_sql
- withingroup_sql
- between_sql
- bracket_sql
- safebracket_sql
- all_sql
- any_sql
- exists_sql
- case_sql
- constraint_sql
- nextvaluefor_sql
- extract_sql
- trim_sql
- safeconcat_sql
- check_sql
- foreignkey_sql
- primarykey_sql
- if_sql
- matchagainst_sql
- jsonkeyvalue_sql
- jsonobject_sql
- openjsoncolumndef_sql
- openjson_sql
- in_sql
- interval_sql
- return_sql
- reference_sql
- anonymous_sql
- paren_sql
- neg_sql
- not_sql
- alias_sql
- aliases_sql
- add_sql
- and_sql
- xor_sql
- connector_sql
- bitwiseand_sql
- bitwiseleftshift_sql
- bitwisenot_sql
- bitwiseor_sql
- bitwiserightshift_sql
- bitwisexor_sql
- cast_sql
- currentdate_sql
- collate_sql
- command_sql
- comment_sql
- mergetreettlaction_sql
- mergetreettl_sql
- altercolumn_sql
- renametable_sql
- altertable_sql
- droppartition_sql
- addconstraint_sql
- distinct_sql
- ignorenulls_sql
- respectnulls_sql
- intdiv_sql
- dpipe_sql
- safedpipe_sql
- div_sql
- overlaps_sql
- distance_sql
- dot_sql
- eq_sql
- escape_sql
- glob_sql
- gt_sql
- gte_sql
- ilike_sql
- ilikeany_sql
- is_sql
- like_sql
- likeany_sql
- similarto_sql
- lt_sql
- lte_sql
- mod_sql
- mul_sql
- neq_sql
- nullsafeeq_sql
- nullsafeneq_sql
- or_sql
- slice_sql
- sub_sql
- use_sql
- binary
- function_fallback_sql
- func
- format_args
- text_width
- format_time
- expressions
- op_expressions
- naked_property
- set_operation
- tag_sql
- token_sql
- userdefinedfunction_sql
- joinhint_sql
- kwarg_sql
- when_sql
- merge_sql
- tochar_sql
- dictproperty_sql
- dictrange_sql
- dictsubproperty_sql
- oncluster_sql
- clusteredbyproperty_sql
- anyvalue_sql
- querytransform_sql