sqlglot.dialects.bigquery
1from __future__ import annotations 2 3import logging 4import re 5import typing as t 6 7from sqlglot import exp, generator, parser, tokens, transforms 8from sqlglot._typing import E 9from sqlglot.dialects.dialect import ( 10 Dialect, 11 datestrtodate_sql, 12 format_time_lambda, 13 inline_array_sql, 14 max_or_greatest, 15 min_or_least, 16 no_ilike_sql, 17 parse_date_delta_with_interval, 18 rename_func, 19 timestrtotime_sql, 20 ts_or_ds_to_date_sql, 21) 22from sqlglot.helper import seq_get, split_num_words 23from sqlglot.tokens import TokenType 24 25logger = logging.getLogger("sqlglot") 26 27 28def _date_add_sql( 29 data_type: str, kind: str 30) -> t.Callable[[generator.Generator, exp.Expression], str]: 31 def func(self, expression): 32 this = self.sql(expression, "this") 33 unit = expression.args.get("unit") 34 unit = exp.var(unit.name.upper() if unit else "DAY") 35 interval = exp.Interval(this=expression.expression, unit=unit) 36 return f"{data_type}_{kind}({this}, {self.sql(interval)})" 37 38 return func 39 40 41def _derived_table_values_to_unnest(self: generator.Generator, expression: exp.Values) -> str: 42 if not isinstance(expression.unnest().parent, exp.From): 43 return self.values_sql(expression) 44 45 alias = expression.args.get("alias") 46 47 structs = [ 48 exp.Struct( 49 expressions=[ 50 exp.alias_(value, column_name) 51 for value, column_name in zip( 52 t.expressions, 53 alias.columns 54 if alias and alias.columns 55 else (f"_c{i}" for i in range(len(t.expressions))), 56 ) 57 ] 58 ) 59 for t in expression.find_all(exp.Tuple) 60 ] 61 62 return self.unnest_sql(exp.Unnest(expressions=[exp.Array(expressions=structs)])) 63 64 65def _returnsproperty_sql(self: generator.Generator, expression: exp.ReturnsProperty) -> str: 66 this = expression.this 67 if isinstance(this, exp.Schema): 68 this = f"{this.this} <{self.expressions(this)}>" 69 else: 70 this = self.sql(this) 71 return f"RETURNS {this}" 72 73 74def _create_sql(self: generator.Generator, expression: exp.Create) -> str: 75 kind = expression.args["kind"] 76 returns = expression.find(exp.ReturnsProperty) 77 if kind.upper() == "FUNCTION" and returns and returns.args.get("is_table"): 78 expression = expression.copy() 79 expression.set("kind", "TABLE FUNCTION") 80 if isinstance( 81 expression.expression, 82 ( 83 exp.Subquery, 84 exp.Literal, 85 ), 86 ): 87 expression.set("expression", expression.expression.this) 88 89 return self.create_sql(expression) 90 91 return self.create_sql(expression) 92 93 94def _unqualify_unnest(expression: exp.Expression) -> exp.Expression: 95 """Remove references to unnest table aliases since bigquery doesn't allow them. 96 97 These are added by the optimizer's qualify_column step. 98 """ 99 from sqlglot.optimizer.scope import Scope 100 101 if isinstance(expression, exp.Select): 102 for unnest in expression.find_all(exp.Unnest): 103 if isinstance(unnest.parent, (exp.From, exp.Join)) and unnest.alias: 104 for column in Scope(expression).find_all(exp.Column): 105 if column.table == unnest.alias: 106 column.set("table", None) 107 108 return expression 109 110 111# https://issuetracker.google.com/issues/162294746 112# workaround for bigquery bug when grouping by an expression and then ordering 113# WITH x AS (SELECT 1 y) 114# SELECT y + 1 z 115# FROM x 116# GROUP BY x + 1 117# ORDER by z 118def _alias_ordered_group(expression: exp.Expression) -> exp.Expression: 119 if isinstance(expression, exp.Select): 120 group = expression.args.get("group") 121 order = expression.args.get("order") 122 123 if group and order: 124 aliases = { 125 select.this: select.args["alias"] 126 for select in expression.selects 127 if isinstance(select, exp.Alias) 128 } 129 130 for e in group.expressions: 131 alias = aliases.get(e) 132 133 if alias: 134 e.replace(exp.column(alias)) 135 136 return expression 137 138 139def _pushdown_cte_column_names(expression: exp.Expression) -> exp.Expression: 140 """BigQuery doesn't allow column names when defining a CTE, so we try to push them down.""" 141 if isinstance(expression, exp.CTE) and expression.alias_column_names: 142 cte_query = expression.this 143 144 if cte_query.is_star: 145 logger.warning( 146 "Can't push down CTE column names for star queries. Run the query through" 147 " the optimizer or use 'qualify' to expand the star projections first." 148 ) 149 return expression 150 151 column_names = expression.alias_column_names 152 expression.args["alias"].set("columns", None) 153 154 for name, select in zip(column_names, cte_query.selects): 155 to_replace = select 156 157 if isinstance(select, exp.Alias): 158 select = select.this 159 160 # Inner aliases are shadowed by the CTE column names 161 to_replace.replace(exp.alias_(select, name)) 162 163 return expression 164 165 166def _parse_timestamp(args: t.List) -> exp.StrToTime: 167 this = format_time_lambda(exp.StrToTime, "bigquery")([seq_get(args, 1), seq_get(args, 0)]) 168 this.set("zone", seq_get(args, 2)) 169 return this 170 171 172def _parse_date(args: t.List) -> exp.Date | exp.DateFromParts: 173 expr_type = exp.DateFromParts if len(args) == 3 else exp.Date 174 return expr_type.from_arg_list(args) 175 176 177def _parse_to_hex(args: t.List) -> exp.Hex | exp.MD5: 178 # TO_HEX(MD5(..)) is common in BigQuery, so it's parsed into MD5 to simplify its transpilation 179 arg = seq_get(args, 0) 180 return exp.MD5(this=arg.this) if isinstance(arg, exp.MD5Digest) else exp.Hex(this=arg) 181 182 183class BigQuery(Dialect): 184 UNNEST_COLUMN_ONLY = True 185 186 # https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#case_sensitivity 187 RESOLVES_IDENTIFIERS_AS_UPPERCASE = None 188 189 # bigquery udfs are case sensitive 190 NORMALIZE_FUNCTIONS = False 191 192 TIME_MAPPING = { 193 "%D": "%m/%d/%y", 194 } 195 196 FORMAT_MAPPING = { 197 "DD": "%d", 198 "MM": "%m", 199 "MON": "%b", 200 "MONTH": "%B", 201 "YYYY": "%Y", 202 "YY": "%y", 203 "HH": "%I", 204 "HH12": "%I", 205 "HH24": "%H", 206 "MI": "%M", 207 "SS": "%S", 208 "SSSSS": "%f", 209 "TZH": "%z", 210 } 211 212 @classmethod 213 def normalize_identifier(cls, expression: E) -> E: 214 # In BigQuery, CTEs aren't case-sensitive, but table names are (by default, at least). 215 # The following check is essentially a heuristic to detect tables based on whether or 216 # not they're qualified. 217 if isinstance(expression, exp.Identifier): 218 parent = expression.parent 219 220 while isinstance(parent, exp.Dot): 221 parent = parent.parent 222 223 if ( 224 not isinstance(parent, exp.UserDefinedFunction) 225 and not (isinstance(parent, exp.Table) and parent.db) 226 and not expression.meta.get("is_table") 227 ): 228 expression.set("this", expression.this.lower()) 229 230 return expression 231 232 class Tokenizer(tokens.Tokenizer): 233 QUOTES = ["'", '"', '"""', "'''"] 234 COMMENTS = ["--", "#", ("/*", "*/")] 235 IDENTIFIERS = ["`"] 236 STRING_ESCAPES = ["\\"] 237 238 HEX_STRINGS = [("0x", ""), ("0X", "")] 239 240 BYTE_STRINGS = [ 241 (prefix + q, q) for q in t.cast(t.List[str], QUOTES) for prefix in ("b", "B") 242 ] 243 244 RAW_STRINGS = [ 245 (prefix + q, q) for q in t.cast(t.List[str], QUOTES) for prefix in ("r", "R") 246 ] 247 248 KEYWORDS = { 249 **tokens.Tokenizer.KEYWORDS, 250 "ANY TYPE": TokenType.VARIANT, 251 "BEGIN": TokenType.COMMAND, 252 "BEGIN TRANSACTION": TokenType.BEGIN, 253 "CURRENT_DATETIME": TokenType.CURRENT_DATETIME, 254 "BYTES": TokenType.BINARY, 255 "DECLARE": TokenType.COMMAND, 256 "FLOAT64": TokenType.DOUBLE, 257 "INT64": TokenType.BIGINT, 258 "RECORD": TokenType.STRUCT, 259 "TIMESTAMP": TokenType.TIMESTAMPTZ, 260 "NOT DETERMINISTIC": TokenType.VOLATILE, 261 "UNKNOWN": TokenType.NULL, 262 } 263 KEYWORDS.pop("DIV") 264 265 class Parser(parser.Parser): 266 PREFIXED_PIVOT_COLUMNS = True 267 268 LOG_BASE_FIRST = False 269 LOG_DEFAULTS_TO_LN = True 270 271 FUNCTIONS = { 272 **parser.Parser.FUNCTIONS, 273 "DATE": _parse_date, 274 "DATE_ADD": parse_date_delta_with_interval(exp.DateAdd), 275 "DATE_SUB": parse_date_delta_with_interval(exp.DateSub), 276 "DATE_TRUNC": lambda args: exp.DateTrunc( 277 unit=exp.Literal.string(str(seq_get(args, 1))), 278 this=seq_get(args, 0), 279 ), 280 "DATETIME_ADD": parse_date_delta_with_interval(exp.DatetimeAdd), 281 "DATETIME_SUB": parse_date_delta_with_interval(exp.DatetimeSub), 282 "DIV": lambda args: exp.IntDiv(this=seq_get(args, 0), expression=seq_get(args, 1)), 283 "GENERATE_ARRAY": exp.GenerateSeries.from_arg_list, 284 "MD5": exp.MD5Digest.from_arg_list, 285 "TO_HEX": _parse_to_hex, 286 "PARSE_DATE": lambda args: format_time_lambda(exp.StrToDate, "bigquery")( 287 [seq_get(args, 1), seq_get(args, 0)] 288 ), 289 "PARSE_TIMESTAMP": _parse_timestamp, 290 "REGEXP_CONTAINS": exp.RegexpLike.from_arg_list, 291 "REGEXP_EXTRACT": lambda args: exp.RegexpExtract( 292 this=seq_get(args, 0), 293 expression=seq_get(args, 1), 294 position=seq_get(args, 2), 295 occurrence=seq_get(args, 3), 296 group=exp.Literal.number(1) 297 if re.compile(str(seq_get(args, 1))).groups == 1 298 else None, 299 ), 300 "SPLIT": lambda args: exp.Split( 301 # https://cloud.google.com/bigquery/docs/reference/standard-sql/string_functions#split 302 this=seq_get(args, 0), 303 expression=seq_get(args, 1) or exp.Literal.string(","), 304 ), 305 "TIME_ADD": parse_date_delta_with_interval(exp.TimeAdd), 306 "TIME_SUB": parse_date_delta_with_interval(exp.TimeSub), 307 "TIMESTAMP_ADD": parse_date_delta_with_interval(exp.TimestampAdd), 308 "TIMESTAMP_SUB": parse_date_delta_with_interval(exp.TimestampSub), 309 "TO_JSON_STRING": exp.JSONFormat.from_arg_list, 310 } 311 312 FUNCTION_PARSERS = { 313 **parser.Parser.FUNCTION_PARSERS, 314 "ARRAY": lambda self: self.expression(exp.Array, expressions=[self._parse_statement()]), 315 } 316 FUNCTION_PARSERS.pop("TRIM") 317 318 NO_PAREN_FUNCTIONS = { 319 **parser.Parser.NO_PAREN_FUNCTIONS, 320 TokenType.CURRENT_DATETIME: exp.CurrentDatetime, 321 } 322 323 NESTED_TYPE_TOKENS = { 324 *parser.Parser.NESTED_TYPE_TOKENS, 325 TokenType.TABLE, 326 } 327 328 ID_VAR_TOKENS = { 329 *parser.Parser.ID_VAR_TOKENS, 330 TokenType.VALUES, 331 } 332 333 PROPERTY_PARSERS = { 334 **parser.Parser.PROPERTY_PARSERS, 335 "NOT DETERMINISTIC": lambda self: self.expression( 336 exp.StabilityProperty, this=exp.Literal.string("VOLATILE") 337 ), 338 "OPTIONS": lambda self: self._parse_with_property(), 339 } 340 341 CONSTRAINT_PARSERS = { 342 **parser.Parser.CONSTRAINT_PARSERS, 343 "OPTIONS": lambda self: exp.Properties(expressions=self._parse_with_property()), 344 } 345 346 def _parse_table_part(self, schema: bool = False) -> t.Optional[exp.Expression]: 347 this = super()._parse_table_part(schema=schema) 348 349 # https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#table_names 350 if isinstance(this, exp.Identifier): 351 table_name = this.name 352 while self._match(TokenType.DASH, advance=False) and self._next: 353 self._advance(2) 354 table_name += f"-{self._prev.text}" 355 356 this = exp.Identifier(this=table_name, quoted=this.args.get("quoted")) 357 358 return this 359 360 def _parse_table_parts(self, schema: bool = False) -> exp.Table: 361 table = super()._parse_table_parts(schema=schema) 362 if isinstance(table.this, exp.Identifier) and "." in table.name: 363 catalog, db, this, *rest = ( 364 t.cast(t.Optional[exp.Expression], exp.to_identifier(x)) 365 for x in split_num_words(table.name, ".", 3) 366 ) 367 368 if rest and this: 369 this = exp.Dot.build(t.cast(t.List[exp.Expression], [this, *rest])) 370 371 table = exp.Table(this=this, db=db, catalog=catalog) 372 373 return table 374 375 class Generator(generator.Generator): 376 EXPLICIT_UNION = True 377 INTERVAL_ALLOWS_PLURAL_FORM = False 378 JOIN_HINTS = False 379 QUERY_HINTS = False 380 TABLE_HINTS = False 381 LIMIT_FETCH = "LIMIT" 382 RENAME_TABLE_WITH_DB = False 383 ESCAPE_LINE_BREAK = True 384 385 TRANSFORMS = { 386 **generator.Generator.TRANSFORMS, 387 exp.ApproxDistinct: rename_func("APPROX_COUNT_DISTINCT"), 388 exp.ArraySize: rename_func("ARRAY_LENGTH"), 389 exp.Cast: transforms.preprocess([transforms.remove_precision_parameterized_types]), 390 exp.Create: _create_sql, 391 exp.CTE: transforms.preprocess([_pushdown_cte_column_names]), 392 exp.DateAdd: _date_add_sql("DATE", "ADD"), 393 exp.DateDiff: lambda self, e: f"DATE_DIFF({self.sql(e, 'this')}, {self.sql(e, 'expression')}, {self.sql(e.args.get('unit', 'DAY'))})", 394 exp.DateFromParts: rename_func("DATE"), 395 exp.DateStrToDate: datestrtodate_sql, 396 exp.DateSub: _date_add_sql("DATE", "SUB"), 397 exp.DatetimeAdd: _date_add_sql("DATETIME", "ADD"), 398 exp.DatetimeSub: _date_add_sql("DATETIME", "SUB"), 399 exp.DateTrunc: lambda self, e: self.func("DATE_TRUNC", e.this, e.text("unit")), 400 exp.GenerateSeries: rename_func("GENERATE_ARRAY"), 401 exp.GroupConcat: rename_func("STRING_AGG"), 402 exp.Hex: rename_func("TO_HEX"), 403 exp.ILike: no_ilike_sql, 404 exp.IntDiv: rename_func("DIV"), 405 exp.JSONFormat: rename_func("TO_JSON_STRING"), 406 exp.Max: max_or_greatest, 407 exp.MD5: lambda self, e: self.func("TO_HEX", self.func("MD5", e.this)), 408 exp.MD5Digest: rename_func("MD5"), 409 exp.Min: min_or_least, 410 exp.PartitionedByProperty: lambda self, e: f"PARTITION BY {self.sql(e, 'this')}", 411 exp.RegexpExtract: lambda self, e: self.func( 412 "REGEXP_EXTRACT", 413 e.this, 414 e.expression, 415 e.args.get("position"), 416 e.args.get("occurrence"), 417 ), 418 exp.RegexpLike: rename_func("REGEXP_CONTAINS"), 419 exp.ReturnsProperty: _returnsproperty_sql, 420 exp.Select: transforms.preprocess( 421 [ 422 transforms.explode_to_unnest, 423 _unqualify_unnest, 424 transforms.eliminate_distinct_on, 425 _alias_ordered_group, 426 ] 427 ), 428 exp.StabilityProperty: lambda self, e: f"DETERMINISTIC" 429 if e.name == "IMMUTABLE" 430 else "NOT DETERMINISTIC", 431 exp.StrToDate: lambda self, e: f"PARSE_DATE({self.format_time(e)}, {self.sql(e, 'this')})", 432 exp.StrToTime: lambda self, e: self.func( 433 "PARSE_TIMESTAMP", self.format_time(e), e.this, e.args.get("zone") 434 ), 435 exp.TimeAdd: _date_add_sql("TIME", "ADD"), 436 exp.TimeSub: _date_add_sql("TIME", "SUB"), 437 exp.TimestampAdd: _date_add_sql("TIMESTAMP", "ADD"), 438 exp.TimestampSub: _date_add_sql("TIMESTAMP", "SUB"), 439 exp.TimeStrToTime: timestrtotime_sql, 440 exp.Trim: lambda self, e: self.func(f"TRIM", e.this, e.expression), 441 exp.TsOrDsAdd: _date_add_sql("DATE", "ADD"), 442 exp.TsOrDsToDate: ts_or_ds_to_date_sql("bigquery"), 443 exp.Unhex: rename_func("FROM_HEX"), 444 exp.Values: _derived_table_values_to_unnest, 445 exp.VariancePop: rename_func("VAR_POP"), 446 } 447 448 TYPE_MAPPING = { 449 **generator.Generator.TYPE_MAPPING, 450 exp.DataType.Type.BIGDECIMAL: "BIGNUMERIC", 451 exp.DataType.Type.BIGINT: "INT64", 452 exp.DataType.Type.BINARY: "BYTES", 453 exp.DataType.Type.BOOLEAN: "BOOL", 454 exp.DataType.Type.CHAR: "STRING", 455 exp.DataType.Type.DECIMAL: "NUMERIC", 456 exp.DataType.Type.DOUBLE: "FLOAT64", 457 exp.DataType.Type.FLOAT: "FLOAT64", 458 exp.DataType.Type.INT: "INT64", 459 exp.DataType.Type.NCHAR: "STRING", 460 exp.DataType.Type.NVARCHAR: "STRING", 461 exp.DataType.Type.SMALLINT: "INT64", 462 exp.DataType.Type.TEXT: "STRING", 463 exp.DataType.Type.TIMESTAMP: "DATETIME", 464 exp.DataType.Type.TIMESTAMPTZ: "TIMESTAMP", 465 exp.DataType.Type.TIMESTAMPLTZ: "TIMESTAMP", 466 exp.DataType.Type.TINYINT: "INT64", 467 exp.DataType.Type.VARBINARY: "BYTES", 468 exp.DataType.Type.VARCHAR: "STRING", 469 exp.DataType.Type.VARIANT: "ANY TYPE", 470 } 471 472 PROPERTIES_LOCATION = { 473 **generator.Generator.PROPERTIES_LOCATION, 474 exp.PartitionedByProperty: exp.Properties.Location.POST_SCHEMA, 475 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, 476 } 477 478 # from: https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#reserved_keywords 479 RESERVED_KEYWORDS = { 480 *generator.Generator.RESERVED_KEYWORDS, 481 "all", 482 "and", 483 "any", 484 "array", 485 "as", 486 "asc", 487 "assert_rows_modified", 488 "at", 489 "between", 490 "by", 491 "case", 492 "cast", 493 "collate", 494 "contains", 495 "create", 496 "cross", 497 "cube", 498 "current", 499 "default", 500 "define", 501 "desc", 502 "distinct", 503 "else", 504 "end", 505 "enum", 506 "escape", 507 "except", 508 "exclude", 509 "exists", 510 "extract", 511 "false", 512 "fetch", 513 "following", 514 "for", 515 "from", 516 "full", 517 "group", 518 "grouping", 519 "groups", 520 "hash", 521 "having", 522 "if", 523 "ignore", 524 "in", 525 "inner", 526 "intersect", 527 "interval", 528 "into", 529 "is", 530 "join", 531 "lateral", 532 "left", 533 "like", 534 "limit", 535 "lookup", 536 "merge", 537 "natural", 538 "new", 539 "no", 540 "not", 541 "null", 542 "nulls", 543 "of", 544 "on", 545 "or", 546 "order", 547 "outer", 548 "over", 549 "partition", 550 "preceding", 551 "proto", 552 "qualify", 553 "range", 554 "recursive", 555 "respect", 556 "right", 557 "rollup", 558 "rows", 559 "select", 560 "set", 561 "some", 562 "struct", 563 "tablesample", 564 "then", 565 "to", 566 "treat", 567 "true", 568 "unbounded", 569 "union", 570 "unnest", 571 "using", 572 "when", 573 "where", 574 "window", 575 "with", 576 "within", 577 } 578 579 def attimezone_sql(self, expression: exp.AtTimeZone) -> str: 580 parent = expression.parent 581 582 # BigQuery allows CAST(.. AS {STRING|TIMESTAMP} [FORMAT <fmt> [AT TIME ZONE <tz>]]). 583 # Only the TIMESTAMP one should use the below conversion, when AT TIME ZONE is included. 584 if not isinstance(parent, exp.Cast) or not parent.to.is_type("text"): 585 return self.func( 586 "TIMESTAMP", self.func("DATETIME", expression.this, expression.args.get("zone")) 587 ) 588 589 return super().attimezone_sql(expression) 590 591 def trycast_sql(self, expression: exp.TryCast) -> str: 592 return self.cast_sql(expression, safe_prefix="SAFE_") 593 594 def cte_sql(self, expression: exp.CTE) -> str: 595 if expression.alias_column_names: 596 self.unsupported("Column names in CTE definition are not supported.") 597 return super().cte_sql(expression) 598 599 def array_sql(self, expression: exp.Array) -> str: 600 first_arg = seq_get(expression.expressions, 0) 601 if isinstance(first_arg, exp.Subqueryable): 602 return f"ARRAY{self.wrap(self.sql(first_arg))}" 603 604 return inline_array_sql(self, expression) 605 606 def transaction_sql(self, *_) -> str: 607 return "BEGIN TRANSACTION" 608 609 def commit_sql(self, *_) -> str: 610 return "COMMIT TRANSACTION" 611 612 def rollback_sql(self, *_) -> str: 613 return "ROLLBACK TRANSACTION" 614 615 def in_unnest_op(self, expression: exp.Unnest) -> str: 616 return self.sql(expression) 617 618 def except_op(self, expression: exp.Except) -> str: 619 if not expression.args.get("distinct", False): 620 self.unsupported("EXCEPT without DISTINCT is not supported in BigQuery") 621 return f"EXCEPT{' DISTINCT' if expression.args.get('distinct') else ' ALL'}" 622 623 def intersect_op(self, expression: exp.Intersect) -> str: 624 if not expression.args.get("distinct", False): 625 self.unsupported("INTERSECT without DISTINCT is not supported in BigQuery") 626 return f"INTERSECT{' DISTINCT' if expression.args.get('distinct') else ' ALL'}" 627 628 def with_properties(self, properties: exp.Properties) -> str: 629 return self.properties(properties, prefix=self.seg("OPTIONS"))
logger =
<Logger sqlglot (WARNING)>
184class BigQuery(Dialect): 185 UNNEST_COLUMN_ONLY = True 186 187 # https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#case_sensitivity 188 RESOLVES_IDENTIFIERS_AS_UPPERCASE = None 189 190 # bigquery udfs are case sensitive 191 NORMALIZE_FUNCTIONS = False 192 193 TIME_MAPPING = { 194 "%D": "%m/%d/%y", 195 } 196 197 FORMAT_MAPPING = { 198 "DD": "%d", 199 "MM": "%m", 200 "MON": "%b", 201 "MONTH": "%B", 202 "YYYY": "%Y", 203 "YY": "%y", 204 "HH": "%I", 205 "HH12": "%I", 206 "HH24": "%H", 207 "MI": "%M", 208 "SS": "%S", 209 "SSSSS": "%f", 210 "TZH": "%z", 211 } 212 213 @classmethod 214 def normalize_identifier(cls, expression: E) -> E: 215 # In BigQuery, CTEs aren't case-sensitive, but table names are (by default, at least). 216 # The following check is essentially a heuristic to detect tables based on whether or 217 # not they're qualified. 218 if isinstance(expression, exp.Identifier): 219 parent = expression.parent 220 221 while isinstance(parent, exp.Dot): 222 parent = parent.parent 223 224 if ( 225 not isinstance(parent, exp.UserDefinedFunction) 226 and not (isinstance(parent, exp.Table) and parent.db) 227 and not expression.meta.get("is_table") 228 ): 229 expression.set("this", expression.this.lower()) 230 231 return expression 232 233 class Tokenizer(tokens.Tokenizer): 234 QUOTES = ["'", '"', '"""', "'''"] 235 COMMENTS = ["--", "#", ("/*", "*/")] 236 IDENTIFIERS = ["`"] 237 STRING_ESCAPES = ["\\"] 238 239 HEX_STRINGS = [("0x", ""), ("0X", "")] 240 241 BYTE_STRINGS = [ 242 (prefix + q, q) for q in t.cast(t.List[str], QUOTES) for prefix in ("b", "B") 243 ] 244 245 RAW_STRINGS = [ 246 (prefix + q, q) for q in t.cast(t.List[str], QUOTES) for prefix in ("r", "R") 247 ] 248 249 KEYWORDS = { 250 **tokens.Tokenizer.KEYWORDS, 251 "ANY TYPE": TokenType.VARIANT, 252 "BEGIN": TokenType.COMMAND, 253 "BEGIN TRANSACTION": TokenType.BEGIN, 254 "CURRENT_DATETIME": TokenType.CURRENT_DATETIME, 255 "BYTES": TokenType.BINARY, 256 "DECLARE": TokenType.COMMAND, 257 "FLOAT64": TokenType.DOUBLE, 258 "INT64": TokenType.BIGINT, 259 "RECORD": TokenType.STRUCT, 260 "TIMESTAMP": TokenType.TIMESTAMPTZ, 261 "NOT DETERMINISTIC": TokenType.VOLATILE, 262 "UNKNOWN": TokenType.NULL, 263 } 264 KEYWORDS.pop("DIV") 265 266 class Parser(parser.Parser): 267 PREFIXED_PIVOT_COLUMNS = True 268 269 LOG_BASE_FIRST = False 270 LOG_DEFAULTS_TO_LN = True 271 272 FUNCTIONS = { 273 **parser.Parser.FUNCTIONS, 274 "DATE": _parse_date, 275 "DATE_ADD": parse_date_delta_with_interval(exp.DateAdd), 276 "DATE_SUB": parse_date_delta_with_interval(exp.DateSub), 277 "DATE_TRUNC": lambda args: exp.DateTrunc( 278 unit=exp.Literal.string(str(seq_get(args, 1))), 279 this=seq_get(args, 0), 280 ), 281 "DATETIME_ADD": parse_date_delta_with_interval(exp.DatetimeAdd), 282 "DATETIME_SUB": parse_date_delta_with_interval(exp.DatetimeSub), 283 "DIV": lambda args: exp.IntDiv(this=seq_get(args, 0), expression=seq_get(args, 1)), 284 "GENERATE_ARRAY": exp.GenerateSeries.from_arg_list, 285 "MD5": exp.MD5Digest.from_arg_list, 286 "TO_HEX": _parse_to_hex, 287 "PARSE_DATE": lambda args: format_time_lambda(exp.StrToDate, "bigquery")( 288 [seq_get(args, 1), seq_get(args, 0)] 289 ), 290 "PARSE_TIMESTAMP": _parse_timestamp, 291 "REGEXP_CONTAINS": exp.RegexpLike.from_arg_list, 292 "REGEXP_EXTRACT": lambda args: exp.RegexpExtract( 293 this=seq_get(args, 0), 294 expression=seq_get(args, 1), 295 position=seq_get(args, 2), 296 occurrence=seq_get(args, 3), 297 group=exp.Literal.number(1) 298 if re.compile(str(seq_get(args, 1))).groups == 1 299 else None, 300 ), 301 "SPLIT": lambda args: exp.Split( 302 # https://cloud.google.com/bigquery/docs/reference/standard-sql/string_functions#split 303 this=seq_get(args, 0), 304 expression=seq_get(args, 1) or exp.Literal.string(","), 305 ), 306 "TIME_ADD": parse_date_delta_with_interval(exp.TimeAdd), 307 "TIME_SUB": parse_date_delta_with_interval(exp.TimeSub), 308 "TIMESTAMP_ADD": parse_date_delta_with_interval(exp.TimestampAdd), 309 "TIMESTAMP_SUB": parse_date_delta_with_interval(exp.TimestampSub), 310 "TO_JSON_STRING": exp.JSONFormat.from_arg_list, 311 } 312 313 FUNCTION_PARSERS = { 314 **parser.Parser.FUNCTION_PARSERS, 315 "ARRAY": lambda self: self.expression(exp.Array, expressions=[self._parse_statement()]), 316 } 317 FUNCTION_PARSERS.pop("TRIM") 318 319 NO_PAREN_FUNCTIONS = { 320 **parser.Parser.NO_PAREN_FUNCTIONS, 321 TokenType.CURRENT_DATETIME: exp.CurrentDatetime, 322 } 323 324 NESTED_TYPE_TOKENS = { 325 *parser.Parser.NESTED_TYPE_TOKENS, 326 TokenType.TABLE, 327 } 328 329 ID_VAR_TOKENS = { 330 *parser.Parser.ID_VAR_TOKENS, 331 TokenType.VALUES, 332 } 333 334 PROPERTY_PARSERS = { 335 **parser.Parser.PROPERTY_PARSERS, 336 "NOT DETERMINISTIC": lambda self: self.expression( 337 exp.StabilityProperty, this=exp.Literal.string("VOLATILE") 338 ), 339 "OPTIONS": lambda self: self._parse_with_property(), 340 } 341 342 CONSTRAINT_PARSERS = { 343 **parser.Parser.CONSTRAINT_PARSERS, 344 "OPTIONS": lambda self: exp.Properties(expressions=self._parse_with_property()), 345 } 346 347 def _parse_table_part(self, schema: bool = False) -> t.Optional[exp.Expression]: 348 this = super()._parse_table_part(schema=schema) 349 350 # https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#table_names 351 if isinstance(this, exp.Identifier): 352 table_name = this.name 353 while self._match(TokenType.DASH, advance=False) and self._next: 354 self._advance(2) 355 table_name += f"-{self._prev.text}" 356 357 this = exp.Identifier(this=table_name, quoted=this.args.get("quoted")) 358 359 return this 360 361 def _parse_table_parts(self, schema: bool = False) -> exp.Table: 362 table = super()._parse_table_parts(schema=schema) 363 if isinstance(table.this, exp.Identifier) and "." in table.name: 364 catalog, db, this, *rest = ( 365 t.cast(t.Optional[exp.Expression], exp.to_identifier(x)) 366 for x in split_num_words(table.name, ".", 3) 367 ) 368 369 if rest and this: 370 this = exp.Dot.build(t.cast(t.List[exp.Expression], [this, *rest])) 371 372 table = exp.Table(this=this, db=db, catalog=catalog) 373 374 return table 375 376 class Generator(generator.Generator): 377 EXPLICIT_UNION = True 378 INTERVAL_ALLOWS_PLURAL_FORM = False 379 JOIN_HINTS = False 380 QUERY_HINTS = False 381 TABLE_HINTS = False 382 LIMIT_FETCH = "LIMIT" 383 RENAME_TABLE_WITH_DB = False 384 ESCAPE_LINE_BREAK = True 385 386 TRANSFORMS = { 387 **generator.Generator.TRANSFORMS, 388 exp.ApproxDistinct: rename_func("APPROX_COUNT_DISTINCT"), 389 exp.ArraySize: rename_func("ARRAY_LENGTH"), 390 exp.Cast: transforms.preprocess([transforms.remove_precision_parameterized_types]), 391 exp.Create: _create_sql, 392 exp.CTE: transforms.preprocess([_pushdown_cte_column_names]), 393 exp.DateAdd: _date_add_sql("DATE", "ADD"), 394 exp.DateDiff: lambda self, e: f"DATE_DIFF({self.sql(e, 'this')}, {self.sql(e, 'expression')}, {self.sql(e.args.get('unit', 'DAY'))})", 395 exp.DateFromParts: rename_func("DATE"), 396 exp.DateStrToDate: datestrtodate_sql, 397 exp.DateSub: _date_add_sql("DATE", "SUB"), 398 exp.DatetimeAdd: _date_add_sql("DATETIME", "ADD"), 399 exp.DatetimeSub: _date_add_sql("DATETIME", "SUB"), 400 exp.DateTrunc: lambda self, e: self.func("DATE_TRUNC", e.this, e.text("unit")), 401 exp.GenerateSeries: rename_func("GENERATE_ARRAY"), 402 exp.GroupConcat: rename_func("STRING_AGG"), 403 exp.Hex: rename_func("TO_HEX"), 404 exp.ILike: no_ilike_sql, 405 exp.IntDiv: rename_func("DIV"), 406 exp.JSONFormat: rename_func("TO_JSON_STRING"), 407 exp.Max: max_or_greatest, 408 exp.MD5: lambda self, e: self.func("TO_HEX", self.func("MD5", e.this)), 409 exp.MD5Digest: rename_func("MD5"), 410 exp.Min: min_or_least, 411 exp.PartitionedByProperty: lambda self, e: f"PARTITION BY {self.sql(e, 'this')}", 412 exp.RegexpExtract: lambda self, e: self.func( 413 "REGEXP_EXTRACT", 414 e.this, 415 e.expression, 416 e.args.get("position"), 417 e.args.get("occurrence"), 418 ), 419 exp.RegexpLike: rename_func("REGEXP_CONTAINS"), 420 exp.ReturnsProperty: _returnsproperty_sql, 421 exp.Select: transforms.preprocess( 422 [ 423 transforms.explode_to_unnest, 424 _unqualify_unnest, 425 transforms.eliminate_distinct_on, 426 _alias_ordered_group, 427 ] 428 ), 429 exp.StabilityProperty: lambda self, e: f"DETERMINISTIC" 430 if e.name == "IMMUTABLE" 431 else "NOT DETERMINISTIC", 432 exp.StrToDate: lambda self, e: f"PARSE_DATE({self.format_time(e)}, {self.sql(e, 'this')})", 433 exp.StrToTime: lambda self, e: self.func( 434 "PARSE_TIMESTAMP", self.format_time(e), e.this, e.args.get("zone") 435 ), 436 exp.TimeAdd: _date_add_sql("TIME", "ADD"), 437 exp.TimeSub: _date_add_sql("TIME", "SUB"), 438 exp.TimestampAdd: _date_add_sql("TIMESTAMP", "ADD"), 439 exp.TimestampSub: _date_add_sql("TIMESTAMP", "SUB"), 440 exp.TimeStrToTime: timestrtotime_sql, 441 exp.Trim: lambda self, e: self.func(f"TRIM", e.this, e.expression), 442 exp.TsOrDsAdd: _date_add_sql("DATE", "ADD"), 443 exp.TsOrDsToDate: ts_or_ds_to_date_sql("bigquery"), 444 exp.Unhex: rename_func("FROM_HEX"), 445 exp.Values: _derived_table_values_to_unnest, 446 exp.VariancePop: rename_func("VAR_POP"), 447 } 448 449 TYPE_MAPPING = { 450 **generator.Generator.TYPE_MAPPING, 451 exp.DataType.Type.BIGDECIMAL: "BIGNUMERIC", 452 exp.DataType.Type.BIGINT: "INT64", 453 exp.DataType.Type.BINARY: "BYTES", 454 exp.DataType.Type.BOOLEAN: "BOOL", 455 exp.DataType.Type.CHAR: "STRING", 456 exp.DataType.Type.DECIMAL: "NUMERIC", 457 exp.DataType.Type.DOUBLE: "FLOAT64", 458 exp.DataType.Type.FLOAT: "FLOAT64", 459 exp.DataType.Type.INT: "INT64", 460 exp.DataType.Type.NCHAR: "STRING", 461 exp.DataType.Type.NVARCHAR: "STRING", 462 exp.DataType.Type.SMALLINT: "INT64", 463 exp.DataType.Type.TEXT: "STRING", 464 exp.DataType.Type.TIMESTAMP: "DATETIME", 465 exp.DataType.Type.TIMESTAMPTZ: "TIMESTAMP", 466 exp.DataType.Type.TIMESTAMPLTZ: "TIMESTAMP", 467 exp.DataType.Type.TINYINT: "INT64", 468 exp.DataType.Type.VARBINARY: "BYTES", 469 exp.DataType.Type.VARCHAR: "STRING", 470 exp.DataType.Type.VARIANT: "ANY TYPE", 471 } 472 473 PROPERTIES_LOCATION = { 474 **generator.Generator.PROPERTIES_LOCATION, 475 exp.PartitionedByProperty: exp.Properties.Location.POST_SCHEMA, 476 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, 477 } 478 479 # from: https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#reserved_keywords 480 RESERVED_KEYWORDS = { 481 *generator.Generator.RESERVED_KEYWORDS, 482 "all", 483 "and", 484 "any", 485 "array", 486 "as", 487 "asc", 488 "assert_rows_modified", 489 "at", 490 "between", 491 "by", 492 "case", 493 "cast", 494 "collate", 495 "contains", 496 "create", 497 "cross", 498 "cube", 499 "current", 500 "default", 501 "define", 502 "desc", 503 "distinct", 504 "else", 505 "end", 506 "enum", 507 "escape", 508 "except", 509 "exclude", 510 "exists", 511 "extract", 512 "false", 513 "fetch", 514 "following", 515 "for", 516 "from", 517 "full", 518 "group", 519 "grouping", 520 "groups", 521 "hash", 522 "having", 523 "if", 524 "ignore", 525 "in", 526 "inner", 527 "intersect", 528 "interval", 529 "into", 530 "is", 531 "join", 532 "lateral", 533 "left", 534 "like", 535 "limit", 536 "lookup", 537 "merge", 538 "natural", 539 "new", 540 "no", 541 "not", 542 "null", 543 "nulls", 544 "of", 545 "on", 546 "or", 547 "order", 548 "outer", 549 "over", 550 "partition", 551 "preceding", 552 "proto", 553 "qualify", 554 "range", 555 "recursive", 556 "respect", 557 "right", 558 "rollup", 559 "rows", 560 "select", 561 "set", 562 "some", 563 "struct", 564 "tablesample", 565 "then", 566 "to", 567 "treat", 568 "true", 569 "unbounded", 570 "union", 571 "unnest", 572 "using", 573 "when", 574 "where", 575 "window", 576 "with", 577 "within", 578 } 579 580 def attimezone_sql(self, expression: exp.AtTimeZone) -> str: 581 parent = expression.parent 582 583 # BigQuery allows CAST(.. AS {STRING|TIMESTAMP} [FORMAT <fmt> [AT TIME ZONE <tz>]]). 584 # Only the TIMESTAMP one should use the below conversion, when AT TIME ZONE is included. 585 if not isinstance(parent, exp.Cast) or not parent.to.is_type("text"): 586 return self.func( 587 "TIMESTAMP", self.func("DATETIME", expression.this, expression.args.get("zone")) 588 ) 589 590 return super().attimezone_sql(expression) 591 592 def trycast_sql(self, expression: exp.TryCast) -> str: 593 return self.cast_sql(expression, safe_prefix="SAFE_") 594 595 def cte_sql(self, expression: exp.CTE) -> str: 596 if expression.alias_column_names: 597 self.unsupported("Column names in CTE definition are not supported.") 598 return super().cte_sql(expression) 599 600 def array_sql(self, expression: exp.Array) -> str: 601 first_arg = seq_get(expression.expressions, 0) 602 if isinstance(first_arg, exp.Subqueryable): 603 return f"ARRAY{self.wrap(self.sql(first_arg))}" 604 605 return inline_array_sql(self, expression) 606 607 def transaction_sql(self, *_) -> str: 608 return "BEGIN TRANSACTION" 609 610 def commit_sql(self, *_) -> str: 611 return "COMMIT TRANSACTION" 612 613 def rollback_sql(self, *_) -> str: 614 return "ROLLBACK TRANSACTION" 615 616 def in_unnest_op(self, expression: exp.Unnest) -> str: 617 return self.sql(expression) 618 619 def except_op(self, expression: exp.Except) -> str: 620 if not expression.args.get("distinct", False): 621 self.unsupported("EXCEPT without DISTINCT is not supported in BigQuery") 622 return f"EXCEPT{' DISTINCT' if expression.args.get('distinct') else ' ALL'}" 623 624 def intersect_op(self, expression: exp.Intersect) -> str: 625 if not expression.args.get("distinct", False): 626 self.unsupported("INTERSECT without DISTINCT is not supported in BigQuery") 627 return f"INTERSECT{' DISTINCT' if expression.args.get('distinct') else ' ALL'}" 628 629 def with_properties(self, properties: exp.Properties) -> str: 630 return self.properties(properties, prefix=self.seg("OPTIONS"))
FORMAT_MAPPING: Dict[str, str] =
{'DD': '%d', 'MM': '%m', 'MON': '%b', 'MONTH': '%B', 'YYYY': '%Y', 'YY': '%y', 'HH': '%I', 'HH12': '%I', 'HH24': '%H', 'MI': '%M', 'SS': '%S', 'SSSSS': '%f', 'TZH': '%z'}
@classmethod
def
normalize_identifier(cls, expression: ~E) -> ~E:
213 @classmethod 214 def normalize_identifier(cls, expression: E) -> E: 215 # In BigQuery, CTEs aren't case-sensitive, but table names are (by default, at least). 216 # The following check is essentially a heuristic to detect tables based on whether or 217 # not they're qualified. 218 if isinstance(expression, exp.Identifier): 219 parent = expression.parent 220 221 while isinstance(parent, exp.Dot): 222 parent = parent.parent 223 224 if ( 225 not isinstance(parent, exp.UserDefinedFunction) 226 and not (isinstance(parent, exp.Table) and parent.db) 227 and not expression.meta.get("is_table") 228 ): 229 expression.set("this", expression.this.lower()) 230 231 return expression
Normalizes an unquoted identifier to either lower or upper case, thus essentially making it case-insensitive. If a dialect treats all identifiers as case-insensitive, they will be normalized regardless of being quoted or not.
tokenizer_class =
<class 'sqlglot.dialects.bigquery.BigQuery.Tokenizer'>
parser_class =
<class 'sqlglot.dialects.bigquery.BigQuery.Parser'>
generator_class =
<class 'sqlglot.dialects.bigquery.BigQuery.Generator'>
FORMAT_TRIE: Dict =
{'D': {'D': {0: True}}, 'M': {'M': {0: True}, 'O': {'N': {0: True, 'T': {'H': {0: True}}}}, 'I': {0: True}}, 'Y': {'Y': {'Y': {'Y': {0: True}}, 0: True}}, 'H': {'H': {0: True, '1': {'2': {0: True}}, '2': {'4': {0: True}}}}, 'S': {'S': {0: True, 'S': {'S': {'S': {0: True}}}}}, 'T': {'Z': {'H': {0: True}}}}
Inherited Members
- sqlglot.dialects.dialect.Dialect
- INDEX_OFFSET
- ALIAS_POST_TABLESAMPLE
- IDENTIFIERS_CAN_START_WITH_DIGIT
- STRICT_STRING_CONCAT
- NULL_ORDERING
- DATE_FORMAT
- DATEINT_FORMAT
- TIME_FORMAT
- get_or_raise
- format_time
- case_sensitive
- can_identify
- quote_identifier
- parse
- parse_into
- generate
- transpile
- tokenize
- tokenizer
- parser
- generator
233 class Tokenizer(tokens.Tokenizer): 234 QUOTES = ["'", '"', '"""', "'''"] 235 COMMENTS = ["--", "#", ("/*", "*/")] 236 IDENTIFIERS = ["`"] 237 STRING_ESCAPES = ["\\"] 238 239 HEX_STRINGS = [("0x", ""), ("0X", "")] 240 241 BYTE_STRINGS = [ 242 (prefix + q, q) for q in t.cast(t.List[str], QUOTES) for prefix in ("b", "B") 243 ] 244 245 RAW_STRINGS = [ 246 (prefix + q, q) for q in t.cast(t.List[str], QUOTES) for prefix in ("r", "R") 247 ] 248 249 KEYWORDS = { 250 **tokens.Tokenizer.KEYWORDS, 251 "ANY TYPE": TokenType.VARIANT, 252 "BEGIN": TokenType.COMMAND, 253 "BEGIN TRANSACTION": TokenType.BEGIN, 254 "CURRENT_DATETIME": TokenType.CURRENT_DATETIME, 255 "BYTES": TokenType.BINARY, 256 "DECLARE": TokenType.COMMAND, 257 "FLOAT64": TokenType.DOUBLE, 258 "INT64": TokenType.BIGINT, 259 "RECORD": TokenType.STRUCT, 260 "TIMESTAMP": TokenType.TIMESTAMPTZ, 261 "NOT DETERMINISTIC": TokenType.VOLATILE, 262 "UNKNOWN": TokenType.NULL, 263 } 264 KEYWORDS.pop("DIV")
BYTE_STRINGS =
[("b'", "'"), ("B'", "'"), ('b"', '"'), ('B"', '"'), ('b"""', '"""'), ('B"""', '"""'), ("b'''", "'''"), ("B'''", "'''")]
RAW_STRINGS =
[("r'", "'"), ("R'", "'"), ('r"', '"'), ('R"', '"'), ('r"""', '"""'), ('R"""', '"""'), ("r'''", "'''"), ("R'''", "'''")]
KEYWORDS =
{'{%': <TokenType.BLOCK_START: 'BLOCK_START'>, '{%+': <TokenType.BLOCK_START: 'BLOCK_START'>, '{%-': <TokenType.BLOCK_START: 'BLOCK_START'>, '%}': <TokenType.BLOCK_END: 'BLOCK_END'>, '+%}': <TokenType.BLOCK_END: 'BLOCK_END'>, '-%}': <TokenType.BLOCK_END: 'BLOCK_END'>, '{{+': <TokenType.BLOCK_START: 'BLOCK_START'>, '{{-': <TokenType.BLOCK_START: 'BLOCK_START'>, '+}}': <TokenType.BLOCK_END: 'BLOCK_END'>, '-}}': <TokenType.BLOCK_END: 'BLOCK_END'>, '/*+': <TokenType.HINT: 'HINT'>, '==': <TokenType.EQ: 'EQ'>, '::': <TokenType.DCOLON: 'DCOLON'>, '||': <TokenType.DPIPE: 'DPIPE'>, '>=': <TokenType.GTE: 'GTE'>, '<=': <TokenType.LTE: 'LTE'>, '<>': <TokenType.NEQ: 'NEQ'>, '!=': <TokenType.NEQ: 'NEQ'>, '<=>': <TokenType.NULLSAFE_EQ: 'NULLSAFE_EQ'>, '->': <TokenType.ARROW: 'ARROW'>, '->>': <TokenType.DARROW: 'DARROW'>, '=>': <TokenType.FARROW: 'FARROW'>, '#>': <TokenType.HASH_ARROW: 'HASH_ARROW'>, '#>>': <TokenType.DHASH_ARROW: 'DHASH_ARROW'>, '<->': <TokenType.LR_ARROW: 'LR_ARROW'>, '&&': <TokenType.DAMP: 'DAMP'>, 'ALL': <TokenType.ALL: 'ALL'>, 'ALWAYS': <TokenType.ALWAYS: 'ALWAYS'>, 'AND': <TokenType.AND: 'AND'>, 'ANTI': <TokenType.ANTI: 'ANTI'>, 'ANY': <TokenType.ANY: 'ANY'>, 'ASC': <TokenType.ASC: 'ASC'>, 'AS': <TokenType.ALIAS: 'ALIAS'>, 'ASOF': <TokenType.ASOF: 'ASOF'>, 'AUTOINCREMENT': <TokenType.AUTO_INCREMENT: 'AUTO_INCREMENT'>, 'AUTO_INCREMENT': <TokenType.AUTO_INCREMENT: 'AUTO_INCREMENT'>, 'BEGIN': <TokenType.COMMAND: 'COMMAND'>, 'BETWEEN': <TokenType.BETWEEN: 'BETWEEN'>, 'CACHE': <TokenType.CACHE: 'CACHE'>, 'UNCACHE': <TokenType.UNCACHE: 'UNCACHE'>, 'CASE': <TokenType.CASE: 'CASE'>, 'CHARACTER SET': <TokenType.CHARACTER_SET: 'CHARACTER_SET'>, 'CLUSTER BY': <TokenType.CLUSTER_BY: 'CLUSTER_BY'>, 'COLLATE': <TokenType.COLLATE: 'COLLATE'>, 'COLUMN': <TokenType.COLUMN: 'COLUMN'>, 'COMMIT': <TokenType.COMMIT: 'COMMIT'>, 'CONSTRAINT': <TokenType.CONSTRAINT: 'CONSTRAINT'>, 'CREATE': <TokenType.CREATE: 'CREATE'>, 'CROSS': <TokenType.CROSS: 'CROSS'>, 'CUBE': <TokenType.CUBE: 'CUBE'>, 'CURRENT_DATE': <TokenType.CURRENT_DATE: 'CURRENT_DATE'>, 'CURRENT_TIME': <TokenType.CURRENT_TIME: 'CURRENT_TIME'>, 'CURRENT_TIMESTAMP': <TokenType.CURRENT_TIMESTAMP: 'CURRENT_TIMESTAMP'>, 'CURRENT_USER': <TokenType.CURRENT_USER: 'CURRENT_USER'>, 'DATABASE': <TokenType.DATABASE: 'DATABASE'>, 'DEFAULT': <TokenType.DEFAULT: 'DEFAULT'>, 'DELETE': <TokenType.DELETE: 'DELETE'>, 'DESC': <TokenType.DESC: 'DESC'>, 'DESCRIBE': <TokenType.DESCRIBE: 'DESCRIBE'>, 'DISTINCT': <TokenType.DISTINCT: 'DISTINCT'>, 'DISTRIBUTE BY': <TokenType.DISTRIBUTE_BY: 'DISTRIBUTE_BY'>, 'DROP': <TokenType.DROP: 'DROP'>, 'ELSE': <TokenType.ELSE: 'ELSE'>, 'END': <TokenType.END: 'END'>, 'ESCAPE': <TokenType.ESCAPE: 'ESCAPE'>, 'EXCEPT': <TokenType.EXCEPT: 'EXCEPT'>, 'EXECUTE': <TokenType.EXECUTE: 'EXECUTE'>, 'EXISTS': <TokenType.EXISTS: 'EXISTS'>, 'FALSE': <TokenType.FALSE: 'FALSE'>, 'FETCH': <TokenType.FETCH: 'FETCH'>, 'FILTER': <TokenType.FILTER: 'FILTER'>, 'FIRST': <TokenType.FIRST: 'FIRST'>, 'FULL': <TokenType.FULL: 'FULL'>, 'FUNCTION': <TokenType.FUNCTION: 'FUNCTION'>, 'FOR': <TokenType.FOR: 'FOR'>, 'FOREIGN KEY': <TokenType.FOREIGN_KEY: 'FOREIGN_KEY'>, 'FORMAT': <TokenType.FORMAT: 'FORMAT'>, 'FROM': <TokenType.FROM: 'FROM'>, 'GEOGRAPHY': <TokenType.GEOGRAPHY: 'GEOGRAPHY'>, 'GEOMETRY': <TokenType.GEOMETRY: 'GEOMETRY'>, 'GLOB': <TokenType.GLOB: 'GLOB'>, 'GROUP BY': <TokenType.GROUP_BY: 'GROUP_BY'>, 'GROUPING SETS': <TokenType.GROUPING_SETS: 'GROUPING_SETS'>, 'HAVING': <TokenType.HAVING: 'HAVING'>, 'IF': <TokenType.IF: 'IF'>, 'ILIKE': <TokenType.ILIKE: 'ILIKE'>, 'IN': <TokenType.IN: 'IN'>, 'INDEX': <TokenType.INDEX: 'INDEX'>, 'INET': <TokenType.INET: 'INET'>, 'INNER': <TokenType.INNER: 'INNER'>, 'INSERT': <TokenType.INSERT: 'INSERT'>, 'INTERVAL': <TokenType.INTERVAL: 'INTERVAL'>, 'INTERSECT': <TokenType.INTERSECT: 'INTERSECT'>, 'INTO': <TokenType.INTO: 'INTO'>, 'IS': <TokenType.IS: 'IS'>, 'ISNULL': <TokenType.ISNULL: 'ISNULL'>, 'JOIN': <TokenType.JOIN: 'JOIN'>, 'KEEP': <TokenType.KEEP: 'KEEP'>, 'LATERAL': <TokenType.LATERAL: 'LATERAL'>, 'LEFT': <TokenType.LEFT: 'LEFT'>, 'LIKE': <TokenType.LIKE: 'LIKE'>, 'LIMIT': <TokenType.LIMIT: 'LIMIT'>, 'LOAD': <TokenType.LOAD: 'LOAD'>, 'LOCK': <TokenType.LOCK: 'LOCK'>, 'MERGE': <TokenType.MERGE: 'MERGE'>, 'NATURAL': <TokenType.NATURAL: 'NATURAL'>, 'NEXT': <TokenType.NEXT: 'NEXT'>, 'NEXT VALUE FOR': <TokenType.NEXT_VALUE_FOR: 'NEXT_VALUE_FOR'>, 'NOT': <TokenType.NOT: 'NOT'>, 'NOTNULL': <TokenType.NOTNULL: 'NOTNULL'>, 'NULL': <TokenType.NULL: 'NULL'>, 'OBJECT': <TokenType.OBJECT: 'OBJECT'>, 'OFFSET': <TokenType.OFFSET: 'OFFSET'>, 'ON': <TokenType.ON: 'ON'>, 'OR': <TokenType.OR: 'OR'>, 'XOR': <TokenType.XOR: 'XOR'>, 'ORDER BY': <TokenType.ORDER_BY: 'ORDER_BY'>, 'ORDINALITY': <TokenType.ORDINALITY: 'ORDINALITY'>, 'OUTER': <TokenType.OUTER: 'OUTER'>, 'OVER': <TokenType.OVER: 'OVER'>, 'OVERLAPS': <TokenType.OVERLAPS: 'OVERLAPS'>, 'OVERWRITE': <TokenType.OVERWRITE: 'OVERWRITE'>, 'PARTITION': <TokenType.PARTITION: 'PARTITION'>, 'PARTITION BY': <TokenType.PARTITION_BY: 'PARTITION_BY'>, 'PARTITIONED BY': <TokenType.PARTITION_BY: 'PARTITION_BY'>, 'PARTITIONED_BY': <TokenType.PARTITION_BY: 'PARTITION_BY'>, 'PERCENT': <TokenType.PERCENT: 'PERCENT'>, 'PIVOT': <TokenType.PIVOT: 'PIVOT'>, 'PRAGMA': <TokenType.PRAGMA: 'PRAGMA'>, 'PRIMARY KEY': <TokenType.PRIMARY_KEY: 'PRIMARY_KEY'>, 'PROCEDURE': <TokenType.PROCEDURE: 'PROCEDURE'>, 'QUALIFY': <TokenType.QUALIFY: 'QUALIFY'>, 'RANGE': <TokenType.RANGE: 'RANGE'>, 'RECURSIVE': <TokenType.RECURSIVE: 'RECURSIVE'>, 'REGEXP': <TokenType.RLIKE: 'RLIKE'>, 'REPLACE': <TokenType.REPLACE: 'REPLACE'>, 'RETURNING': <TokenType.RETURNING: 'RETURNING'>, 'REFERENCES': <TokenType.REFERENCES: 'REFERENCES'>, 'RIGHT': <TokenType.RIGHT: 'RIGHT'>, 'RLIKE': <TokenType.RLIKE: 'RLIKE'>, 'ROLLBACK': <TokenType.ROLLBACK: 'ROLLBACK'>, 'ROLLUP': <TokenType.ROLLUP: 'ROLLUP'>, 'ROW': <TokenType.ROW: 'ROW'>, 'ROWS': <TokenType.ROWS: 'ROWS'>, 'SCHEMA': <TokenType.SCHEMA: 'SCHEMA'>, 'SELECT': <TokenType.SELECT: 'SELECT'>, 'SEMI': <TokenType.SEMI: 'SEMI'>, 'SET': <TokenType.SET: 'SET'>, 'SETTINGS': <TokenType.SETTINGS: 'SETTINGS'>, 'SHOW': <TokenType.SHOW: 'SHOW'>, 'SIMILAR TO': <TokenType.SIMILAR_TO: 'SIMILAR_TO'>, 'SOME': <TokenType.SOME: 'SOME'>, 'SORT BY': <TokenType.SORT_BY: 'SORT_BY'>, 'TABLE': <TokenType.TABLE: 'TABLE'>, 'TABLESAMPLE': <TokenType.TABLE_SAMPLE: 'TABLE_SAMPLE'>, 'TEMP': <TokenType.TEMPORARY: 'TEMPORARY'>, 'TEMPORARY': <TokenType.TEMPORARY: 'TEMPORARY'>, 'THEN': <TokenType.THEN: 'THEN'>, 'TRUE': <TokenType.TRUE: 'TRUE'>, 'UNION': <TokenType.UNION: 'UNION'>, 'UNNEST': <TokenType.UNNEST: 'UNNEST'>, 'UNPIVOT': <TokenType.UNPIVOT: 'UNPIVOT'>, 'UPDATE': <TokenType.UPDATE: 'UPDATE'>, 'USE': <TokenType.USE: 'USE'>, 'USING': <TokenType.USING: 'USING'>, 'UUID': <TokenType.UUID: 'UUID'>, 'VALUES': <TokenType.VALUES: 'VALUES'>, 'VIEW': <TokenType.VIEW: 'VIEW'>, 'VOLATILE': <TokenType.VOLATILE: 'VOLATILE'>, 'WHEN': <TokenType.WHEN: 'WHEN'>, 'WHERE': <TokenType.WHERE: 'WHERE'>, 'WINDOW': <TokenType.WINDOW: 'WINDOW'>, 'WITH': <TokenType.WITH: 'WITH'>, 'APPLY': <TokenType.APPLY: 'APPLY'>, 'ARRAY': <TokenType.ARRAY: 'ARRAY'>, 'BIT': <TokenType.BIT: 'BIT'>, 'BOOL': <TokenType.BOOLEAN: 'BOOLEAN'>, 'BOOLEAN': <TokenType.BOOLEAN: 'BOOLEAN'>, 'BYTE': <TokenType.TINYINT: 'TINYINT'>, 'TINYINT': <TokenType.TINYINT: 'TINYINT'>, 'SHORT': <TokenType.SMALLINT: 'SMALLINT'>, 'SMALLINT': <TokenType.SMALLINT: 'SMALLINT'>, 'INT2': <TokenType.SMALLINT: 'SMALLINT'>, 'INTEGER': <TokenType.INT: 'INT'>, 'INT': <TokenType.INT: 'INT'>, 'INT4': <TokenType.INT: 'INT'>, 'LONG': <TokenType.BIGINT: 'BIGINT'>, 'BIGINT': <TokenType.BIGINT: 'BIGINT'>, 'INT8': <TokenType.BIGINT: 'BIGINT'>, 'DEC': <TokenType.DECIMAL: 'DECIMAL'>, 'DECIMAL': <TokenType.DECIMAL: 'DECIMAL'>, 'BIGDECIMAL': <TokenType.BIGDECIMAL: 'BIGDECIMAL'>, 'BIGNUMERIC': <TokenType.BIGDECIMAL: 'BIGDECIMAL'>, 'MAP': <TokenType.MAP: 'MAP'>, 'NULLABLE': <TokenType.NULLABLE: 'NULLABLE'>, 'NUMBER': <TokenType.DECIMAL: 'DECIMAL'>, 'NUMERIC': <TokenType.DECIMAL: 'DECIMAL'>, 'FIXED': <TokenType.DECIMAL: 'DECIMAL'>, 'REAL': <TokenType.FLOAT: 'FLOAT'>, 'FLOAT': <TokenType.FLOAT: 'FLOAT'>, 'FLOAT4': <TokenType.FLOAT: 'FLOAT'>, 'FLOAT8': <TokenType.DOUBLE: 'DOUBLE'>, 'DOUBLE': <TokenType.DOUBLE: 'DOUBLE'>, 'DOUBLE PRECISION': <TokenType.DOUBLE: 'DOUBLE'>, 'JSON': <TokenType.JSON: 'JSON'>, 'CHAR': <TokenType.CHAR: 'CHAR'>, 'CHARACTER': <TokenType.CHAR: 'CHAR'>, 'NCHAR': <TokenType.NCHAR: 'NCHAR'>, 'VARCHAR': <TokenType.VARCHAR: 'VARCHAR'>, 'VARCHAR2': <TokenType.VARCHAR: 'VARCHAR'>, 'NVARCHAR': <TokenType.NVARCHAR: 'NVARCHAR'>, 'NVARCHAR2': <TokenType.NVARCHAR: 'NVARCHAR'>, 'STR': <TokenType.TEXT: 'TEXT'>, 'STRING': <TokenType.TEXT: 'TEXT'>, 'TEXT': <TokenType.TEXT: 'TEXT'>, 'CLOB': <TokenType.TEXT: 'TEXT'>, 'LONGVARCHAR': <TokenType.TEXT: 'TEXT'>, 'BINARY': <TokenType.BINARY: 'BINARY'>, 'BLOB': <TokenType.VARBINARY: 'VARBINARY'>, 'BYTEA': <TokenType.VARBINARY: 'VARBINARY'>, 'VARBINARY': <TokenType.VARBINARY: 'VARBINARY'>, 'TIME': <TokenType.TIME: 'TIME'>, 'TIMESTAMP': <TokenType.TIMESTAMPTZ: 'TIMESTAMPTZ'>, 'TIMESTAMPTZ': <TokenType.TIMESTAMPTZ: 'TIMESTAMPTZ'>, 'TIMESTAMPLTZ': <TokenType.TIMESTAMPLTZ: 'TIMESTAMPLTZ'>, 'DATE': <TokenType.DATE: 'DATE'>, 'DATETIME': <TokenType.DATETIME: 'DATETIME'>, 'INT4RANGE': <TokenType.INT4RANGE: 'INT4RANGE'>, 'INT4MULTIRANGE': <TokenType.INT4MULTIRANGE: 'INT4MULTIRANGE'>, 'INT8RANGE': <TokenType.INT8RANGE: 'INT8RANGE'>, 'INT8MULTIRANGE': <TokenType.INT8MULTIRANGE: 'INT8MULTIRANGE'>, 'NUMRANGE': <TokenType.NUMRANGE: 'NUMRANGE'>, 'NUMMULTIRANGE': <TokenType.NUMMULTIRANGE: 'NUMMULTIRANGE'>, 'TSRANGE': <TokenType.TSRANGE: 'TSRANGE'>, 'TSMULTIRANGE': <TokenType.TSMULTIRANGE: 'TSMULTIRANGE'>, 'TSTZRANGE': <TokenType.TSTZRANGE: 'TSTZRANGE'>, 'TSTZMULTIRANGE': <TokenType.TSTZMULTIRANGE: 'TSTZMULTIRANGE'>, 'DATERANGE': <TokenType.DATERANGE: 'DATERANGE'>, 'DATEMULTIRANGE': <TokenType.DATEMULTIRANGE: 'DATEMULTIRANGE'>, 'UNIQUE': <TokenType.UNIQUE: 'UNIQUE'>, 'STRUCT': <TokenType.STRUCT: 'STRUCT'>, 'VARIANT': <TokenType.VARIANT: 'VARIANT'>, 'ALTER': <TokenType.ALTER: 'ALTER'>, 'ANALYZE': <TokenType.COMMAND: 'COMMAND'>, 'CALL': <TokenType.COMMAND: 'COMMAND'>, 'COMMENT': <TokenType.COMMENT: 'COMMENT'>, 'COPY': <TokenType.COMMAND: 'COMMAND'>, 'EXPLAIN': <TokenType.COMMAND: 'COMMAND'>, 'GRANT': <TokenType.COMMAND: 'COMMAND'>, 'OPTIMIZE': <TokenType.COMMAND: 'COMMAND'>, 'PREPARE': <TokenType.COMMAND: 'COMMAND'>, 'TRUNCATE': <TokenType.COMMAND: 'COMMAND'>, 'VACUUM': <TokenType.COMMAND: 'COMMAND'>, 'USER-DEFINED': <TokenType.USERDEFINED: 'USERDEFINED'>, 'ANY TYPE': <TokenType.VARIANT: 'VARIANT'>, 'BEGIN TRANSACTION': <TokenType.BEGIN: 'BEGIN'>, 'CURRENT_DATETIME': <TokenType.CURRENT_DATETIME: 'CURRENT_DATETIME'>, 'BYTES': <TokenType.BINARY: 'BINARY'>, 'DECLARE': <TokenType.COMMAND: 'COMMAND'>, 'FLOAT64': <TokenType.DOUBLE: 'DOUBLE'>, 'INT64': <TokenType.BIGINT: 'BIGINT'>, 'RECORD': <TokenType.STRUCT: 'STRUCT'>, 'NOT DETERMINISTIC': <TokenType.VOLATILE: 'VOLATILE'>, 'UNKNOWN': <TokenType.NULL: 'NULL'>}
266 class Parser(parser.Parser): 267 PREFIXED_PIVOT_COLUMNS = True 268 269 LOG_BASE_FIRST = False 270 LOG_DEFAULTS_TO_LN = True 271 272 FUNCTIONS = { 273 **parser.Parser.FUNCTIONS, 274 "DATE": _parse_date, 275 "DATE_ADD": parse_date_delta_with_interval(exp.DateAdd), 276 "DATE_SUB": parse_date_delta_with_interval(exp.DateSub), 277 "DATE_TRUNC": lambda args: exp.DateTrunc( 278 unit=exp.Literal.string(str(seq_get(args, 1))), 279 this=seq_get(args, 0), 280 ), 281 "DATETIME_ADD": parse_date_delta_with_interval(exp.DatetimeAdd), 282 "DATETIME_SUB": parse_date_delta_with_interval(exp.DatetimeSub), 283 "DIV": lambda args: exp.IntDiv(this=seq_get(args, 0), expression=seq_get(args, 1)), 284 "GENERATE_ARRAY": exp.GenerateSeries.from_arg_list, 285 "MD5": exp.MD5Digest.from_arg_list, 286 "TO_HEX": _parse_to_hex, 287 "PARSE_DATE": lambda args: format_time_lambda(exp.StrToDate, "bigquery")( 288 [seq_get(args, 1), seq_get(args, 0)] 289 ), 290 "PARSE_TIMESTAMP": _parse_timestamp, 291 "REGEXP_CONTAINS": exp.RegexpLike.from_arg_list, 292 "REGEXP_EXTRACT": lambda args: exp.RegexpExtract( 293 this=seq_get(args, 0), 294 expression=seq_get(args, 1), 295 position=seq_get(args, 2), 296 occurrence=seq_get(args, 3), 297 group=exp.Literal.number(1) 298 if re.compile(str(seq_get(args, 1))).groups == 1 299 else None, 300 ), 301 "SPLIT": lambda args: exp.Split( 302 # https://cloud.google.com/bigquery/docs/reference/standard-sql/string_functions#split 303 this=seq_get(args, 0), 304 expression=seq_get(args, 1) or exp.Literal.string(","), 305 ), 306 "TIME_ADD": parse_date_delta_with_interval(exp.TimeAdd), 307 "TIME_SUB": parse_date_delta_with_interval(exp.TimeSub), 308 "TIMESTAMP_ADD": parse_date_delta_with_interval(exp.TimestampAdd), 309 "TIMESTAMP_SUB": parse_date_delta_with_interval(exp.TimestampSub), 310 "TO_JSON_STRING": exp.JSONFormat.from_arg_list, 311 } 312 313 FUNCTION_PARSERS = { 314 **parser.Parser.FUNCTION_PARSERS, 315 "ARRAY": lambda self: self.expression(exp.Array, expressions=[self._parse_statement()]), 316 } 317 FUNCTION_PARSERS.pop("TRIM") 318 319 NO_PAREN_FUNCTIONS = { 320 **parser.Parser.NO_PAREN_FUNCTIONS, 321 TokenType.CURRENT_DATETIME: exp.CurrentDatetime, 322 } 323 324 NESTED_TYPE_TOKENS = { 325 *parser.Parser.NESTED_TYPE_TOKENS, 326 TokenType.TABLE, 327 } 328 329 ID_VAR_TOKENS = { 330 *parser.Parser.ID_VAR_TOKENS, 331 TokenType.VALUES, 332 } 333 334 PROPERTY_PARSERS = { 335 **parser.Parser.PROPERTY_PARSERS, 336 "NOT DETERMINISTIC": lambda self: self.expression( 337 exp.StabilityProperty, this=exp.Literal.string("VOLATILE") 338 ), 339 "OPTIONS": lambda self: self._parse_with_property(), 340 } 341 342 CONSTRAINT_PARSERS = { 343 **parser.Parser.CONSTRAINT_PARSERS, 344 "OPTIONS": lambda self: exp.Properties(expressions=self._parse_with_property()), 345 } 346 347 def _parse_table_part(self, schema: bool = False) -> t.Optional[exp.Expression]: 348 this = super()._parse_table_part(schema=schema) 349 350 # https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#table_names 351 if isinstance(this, exp.Identifier): 352 table_name = this.name 353 while self._match(TokenType.DASH, advance=False) and self._next: 354 self._advance(2) 355 table_name += f"-{self._prev.text}" 356 357 this = exp.Identifier(this=table_name, quoted=this.args.get("quoted")) 358 359 return this 360 361 def _parse_table_parts(self, schema: bool = False) -> exp.Table: 362 table = super()._parse_table_parts(schema=schema) 363 if isinstance(table.this, exp.Identifier) and "." in table.name: 364 catalog, db, this, *rest = ( 365 t.cast(t.Optional[exp.Expression], exp.to_identifier(x)) 366 for x in split_num_words(table.name, ".", 3) 367 ) 368 369 if rest and this: 370 this = exp.Dot.build(t.cast(t.List[exp.Expression], [this, *rest])) 371 372 table = exp.Table(this=this, db=db, catalog=catalog) 373 374 return table
Parser consumes a list of tokens produced by the Tokenizer and produces a parsed syntax tree.
Arguments:
- error_level: The desired error level. Default: ErrorLevel.IMMEDIATE
- error_message_context: Determines the amount of context to capture from a query string when displaying the error message (in number of characters). Default: 100
- max_errors: Maximum number of error messages to include in a raised ParseError. This is only relevant if error_level is ErrorLevel.RAISE. Default: 3
FUNCTIONS =
{'ABS': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Abs'>>, 'ANY_VALUE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.AnyValue'>>, 'APPROX_DISTINCT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ApproxDistinct'>>, 'APPROX_COUNT_DISTINCT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ApproxDistinct'>>, 'APPROX_QUANTILE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ApproxQuantile'>>, 'ARRAY': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Array'>>, 'ARRAY_AGG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayAgg'>>, 'ARRAY_ALL': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayAll'>>, 'ARRAY_ANY': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayAny'>>, 'ARRAY_CONCAT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayConcat'>>, 'ARRAY_CONTAINS': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayContains'>>, 'FILTER': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayFilter'>>, 'ARRAY_FILTER': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayFilter'>>, 'ARRAY_JOIN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayJoin'>>, 'ARRAY_SIZE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArraySize'>>, 'ARRAY_SORT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArraySort'>>, 'ARRAY_SUM': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArraySum'>>, 'ARRAY_UNION_AGG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayUnionAgg'>>, 'AVG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Avg'>>, 'CASE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Case'>>, 'CAST': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Cast'>>, 'CAST_TO_STR_TYPE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CastToStrType'>>, 'CEIL': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Ceil'>>, 'CEILING': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Ceil'>>, 'COALESCE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Coalesce'>>, 'IFNULL': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Coalesce'>>, 'NVL': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Coalesce'>>, 'CONCAT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Concat'>>, 'CONCAT_WS': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ConcatWs'>>, 'COUNT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Count'>>, 'COUNT_IF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CountIf'>>, 'CURRENT_DATE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CurrentDate'>>, 'CURRENT_DATETIME': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CurrentDatetime'>>, 'CURRENT_TIME': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CurrentTime'>>, 'CURRENT_TIMESTAMP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CurrentTimestamp'>>, 'CURRENT_USER': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CurrentUser'>>, 'DATE': <function _parse_date>, 'DATE_ADD': <function parse_date_delta_with_interval.<locals>.func>, 'DATEDIFF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DateDiff'>>, 'DATE_DIFF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DateDiff'>>, 'DATEFROMPARTS': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DateFromParts'>>, 'DATE_STR_TO_DATE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DateStrToDate'>>, 'DATE_SUB': <function parse_date_delta_with_interval.<locals>.func>, 'DATE_TO_DATE_STR': <function Parser.<lambda>>, 'DATE_TO_DI': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DateToDi'>>, 'DATE_TRUNC': <function BigQuery.Parser.<lambda>>, 'DATETIME_ADD': <function parse_date_delta_with_interval.<locals>.func>, 'DATETIME_DIFF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DatetimeDiff'>>, 'DATETIME_SUB': <function parse_date_delta_with_interval.<locals>.func>, 'DATETIME_TRUNC': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DatetimeTrunc'>>, 'DAY': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Day'>>, 'DAY_OF_MONTH': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DayOfMonth'>>, 'DAYOFMONTH': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DayOfMonth'>>, 'DAY_OF_WEEK': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DayOfWeek'>>, 'DAYOFWEEK': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DayOfWeek'>>, 'DAY_OF_YEAR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DayOfYear'>>, 'DAYOFYEAR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DayOfYear'>>, 'DECODE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Decode'>>, 'DI_TO_DATE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DiToDate'>>, 'ENCODE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Encode'>>, 'EXP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Exp'>>, 'EXPLODE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Explode'>>, 'EXTRACT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Extract'>>, 'FLOOR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Floor'>>, 'FROM_BASE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.FromBase'>>, 'FROM_BASE64': <bound method Func.from_arg_list of <class 'sqlglot.expressions.FromBase64'>>, 'GENERATE_SERIES': <bound method Func.from_arg_list of <class 'sqlglot.expressions.GenerateSeries'>>, 'GREATEST': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Greatest'>>, 'GROUP_CONCAT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.GroupConcat'>>, 'HEX': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Hex'>>, 'HLL': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Hll'>>, 'IF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.If'>>, 'INITCAP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Initcap'>>, 'JSON_ARRAY_CONTAINS': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONArrayContains'>>, 'JSONB_EXTRACT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONBExtract'>>, 'JSONB_EXTRACT_SCALAR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONBExtractScalar'>>, 'JSON_EXTRACT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONExtract'>>, 'JSON_EXTRACT_SCALAR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONExtractScalar'>>, 'JSON_FORMAT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONFormat'>>, 'J_S_O_N_OBJECT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONObject'>>, 'LAST_DATE_OF_MONTH': <bound method Func.from_arg_list of <class 'sqlglot.expressions.LastDateOfMonth'>>, 'LEAST': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Least'>>, 'LEFT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Left'>>, 'LENGTH': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Length'>>, 'LEN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Length'>>, 'LEVENSHTEIN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Levenshtein'>>, 'LN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Ln'>>, 'LOG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Log'>>, 'LOG10': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Log10'>>, 'LOG2': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Log2'>>, 'LOGICAL_AND': <bound method Func.from_arg_list of <class 'sqlglot.expressions.LogicalAnd'>>, 'BOOL_AND': <bound method Func.from_arg_list of <class 'sqlglot.expressions.LogicalAnd'>>, 'BOOLAND_AGG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.LogicalAnd'>>, 'LOGICAL_OR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.LogicalOr'>>, 'BOOL_OR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.LogicalOr'>>, 'BOOLOR_AGG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.LogicalOr'>>, 'LOWER': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Lower'>>, 'LCASE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Lower'>>, 'MD5': <bound method Func.from_arg_list of <class 'sqlglot.expressions.MD5Digest'>>, 'MD5_DIGEST': <bound method Func.from_arg_list of <class 'sqlglot.expressions.MD5Digest'>>, 'MAP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Map'>>, 'MAP_FROM_ENTRIES': <bound method Func.from_arg_list of <class 'sqlglot.expressions.MapFromEntries'>>, 'MATCH_AGAINST': <bound method Func.from_arg_list of <class 'sqlglot.expressions.MatchAgainst'>>, 'MAX': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Max'>>, 'MIN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Min'>>, 'MONTH': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Month'>>, 'NEXT_VALUE_FOR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.NextValueFor'>>, 'NUMBER_TO_STR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.NumberToStr'>>, 'NVL2': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Nvl2'>>, 'OPEN_J_S_O_N': <bound method Func.from_arg_list of <class 'sqlglot.expressions.OpenJSON'>>, 'PARAMETERIZED_AGG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ParameterizedAgg'>>, 'PERCENTILE_CONT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.PercentileCont'>>, 'PERCENTILE_DISC': <bound method Func.from_arg_list of <class 'sqlglot.expressions.PercentileDisc'>>, 'POSEXPLODE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Posexplode'>>, 'POWER': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Pow'>>, 'POW': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Pow'>>, 'QUANTILE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Quantile'>>, 'RANGE_N': <bound method Func.from_arg_list of <class 'sqlglot.expressions.RangeN'>>, 'READ_CSV': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ReadCSV'>>, 'REDUCE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Reduce'>>, 'REGEXP_EXTRACT': <function BigQuery.Parser.<lambda>>, 'REGEXP_I_LIKE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.RegexpILike'>>, 'REGEXP_LIKE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.RegexpLike'>>, 'REGEXP_SPLIT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.RegexpSplit'>>, 'REPEAT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Repeat'>>, 'RIGHT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Right'>>, 'ROUND': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Round'>>, 'ROW_NUMBER': <bound method Func.from_arg_list of <class 'sqlglot.expressions.RowNumber'>>, 'SHA': <bound method Func.from_arg_list of <class 'sqlglot.expressions.SHA'>>, 'SHA1': <bound method Func.from_arg_list of <class 'sqlglot.expressions.SHA'>>, 'SHA2': <bound method Func.from_arg_list of <class 'sqlglot.expressions.SHA2'>>, 'SAFE_CONCAT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.SafeConcat'>>, 'SAFE_DIVIDE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.SafeDivide'>>, 'SET_AGG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.SetAgg'>>, 'SORT_ARRAY': <bound method Func.from_arg_list of <class 'sqlglot.expressions.SortArray'>>, 'SPLIT': <function BigQuery.Parser.<lambda>>, 'SQRT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Sqrt'>>, 'STANDARD_HASH': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StandardHash'>>, 'STAR_MAP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StarMap'>>, 'STDDEV': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Stddev'>>, 'STDDEV_POP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StddevPop'>>, 'STDDEV_SAMP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StddevSamp'>>, 'STR_POSITION': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StrPosition'>>, 'STR_TO_DATE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StrToDate'>>, 'STR_TO_TIME': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StrToTime'>>, 'STR_TO_UNIX': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StrToUnix'>>, 'STRUCT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Struct'>>, 'STRUCT_EXTRACT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StructExtract'>>, 'SUBSTRING': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Substring'>>, 'SUM': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Sum'>>, 'TIME_ADD': <function parse_date_delta_with_interval.<locals>.func>, 'TIME_DIFF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimeDiff'>>, 'TIME_STR_TO_DATE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimeStrToDate'>>, 'TIME_STR_TO_TIME': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimeStrToTime'>>, 'TIME_STR_TO_UNIX': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimeStrToUnix'>>, 'TIME_SUB': <function parse_date_delta_with_interval.<locals>.func>, 'TIME_TO_STR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimeToStr'>>, 'TIME_TO_TIME_STR': <function Parser.<lambda>>, 'TIME_TO_UNIX': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimeToUnix'>>, 'TIME_TRUNC': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimeTrunc'>>, 'TIMESTAMP_ADD': <function parse_date_delta_with_interval.<locals>.func>, 'TIMESTAMP_DIFF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimestampDiff'>>, 'TIMESTAMP_SUB': <function parse_date_delta_with_interval.<locals>.func>, 'TIMESTAMP_TRUNC': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimestampTrunc'>>, 'TO_BASE64': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ToBase64'>>, 'TO_CHAR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ToChar'>>, 'TRIM': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Trim'>>, 'TRY_CAST': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TryCast'>>, 'TS_OR_DI_TO_DI': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TsOrDiToDi'>>, 'TS_OR_DS_ADD': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TsOrDsAdd'>>, 'TS_OR_DS_TO_DATE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TsOrDsToDate'>>, 'TS_OR_DS_TO_DATE_STR': <function Parser.<lambda>>, 'UNHEX': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Unhex'>>, 'UNIX_TO_STR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.UnixToStr'>>, 'UNIX_TO_TIME': <bound method Func.from_arg_list of <class 'sqlglot.expressions.UnixToTime'>>, 'UNIX_TO_TIME_STR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.UnixToTimeStr'>>, 'UPPER': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Upper'>>, 'UCASE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Upper'>>, 'VAR_MAP': <function parse_var_map>, 'VARIANCE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Variance'>>, 'VARIANCE_SAMP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Variance'>>, 'VAR_SAMP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Variance'>>, 'VARIANCE_POP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.VariancePop'>>, 'VAR_POP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.VariancePop'>>, 'WEEK': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Week'>>, 'WEEK_OF_YEAR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.WeekOfYear'>>, 'WEEKOFYEAR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.WeekOfYear'>>, 'WHEN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.When'>>, 'X_M_L_TABLE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.XMLTable'>>, 'YEAR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Year'>>, 'GLOB': <function Parser.<lambda>>, 'LIKE': <function parse_like>, 'DIV': <function BigQuery.Parser.<lambda>>, 'GENERATE_ARRAY': <bound method Func.from_arg_list of <class 'sqlglot.expressions.GenerateSeries'>>, 'TO_HEX': <function _parse_to_hex>, 'PARSE_DATE': <function BigQuery.Parser.<lambda>>, 'PARSE_TIMESTAMP': <function _parse_timestamp>, 'REGEXP_CONTAINS': <bound method Func.from_arg_list of <class 'sqlglot.expressions.RegexpLike'>>, 'TO_JSON_STRING': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONFormat'>>}
FUNCTION_PARSERS =
{'ANY_VALUE': <function Parser.<lambda>>, 'CAST': <function Parser.<lambda>>, 'CONCAT': <function Parser.<lambda>>, 'CONVERT': <function Parser.<lambda>>, 'DECODE': <function Parser.<lambda>>, 'EXTRACT': <function Parser.<lambda>>, 'JSON_OBJECT': <function Parser.<lambda>>, 'LOG': <function Parser.<lambda>>, 'MATCH': <function Parser.<lambda>>, 'OPENJSON': <function Parser.<lambda>>, 'POSITION': <function Parser.<lambda>>, 'SAFE_CAST': <function Parser.<lambda>>, 'STRING_AGG': <function Parser.<lambda>>, 'SUBSTRING': <function Parser.<lambda>>, 'TRY_CAST': <function Parser.<lambda>>, 'TRY_CONVERT': <function Parser.<lambda>>, 'ARRAY': <function BigQuery.Parser.<lambda>>}
NO_PAREN_FUNCTIONS =
{<TokenType.CURRENT_DATE: 'CURRENT_DATE'>: <class 'sqlglot.expressions.CurrentDate'>, <TokenType.CURRENT_DATETIME: 'CURRENT_DATETIME'>: <class 'sqlglot.expressions.CurrentDatetime'>, <TokenType.CURRENT_TIME: 'CURRENT_TIME'>: <class 'sqlglot.expressions.CurrentTime'>, <TokenType.CURRENT_TIMESTAMP: 'CURRENT_TIMESTAMP'>: <class 'sqlglot.expressions.CurrentTimestamp'>, <TokenType.CURRENT_USER: 'CURRENT_USER'>: <class 'sqlglot.expressions.CurrentUser'>}
NESTED_TYPE_TOKENS =
{<TokenType.MAP: 'MAP'>, <TokenType.STRUCT: 'STRUCT'>, <TokenType.TABLE: 'TABLE'>, <TokenType.ARRAY: 'ARRAY'>, <TokenType.NULLABLE: 'NULLABLE'>}
ID_VAR_TOKENS =
{<TokenType.UNPIVOT: 'UNPIVOT'>, <TokenType.INT4RANGE: 'INT4RANGE'>, <TokenType.TSRANGE: 'TSRANGE'>, <TokenType.JSON: 'JSON'>, <TokenType.RIGHT: 'RIGHT'>, <TokenType.ALL: 'ALL'>, <TokenType.UTINYINT: 'UTINYINT'>, <TokenType.UBIGINT: 'UBIGINT'>, <TokenType.PSEUDO_TYPE: 'PSEUDO_TYPE'>, <TokenType.TSMULTIRANGE: 'TSMULTIRANGE'>, <TokenType.CONSTRAINT: 'CONSTRAINT'>, <TokenType.PROCEDURE: 'PROCEDURE'>, <TokenType.DATETIME: 'DATETIME'>, <TokenType.DICTIONARY: 'DICTIONARY'>, <TokenType.FLOAT: 'FLOAT'>, <TokenType.PARTITION: 'PARTITION'>, <TokenType.USMALLINT: 'USMALLINT'>, <TokenType.SHOW: 'SHOW'>, <TokenType.IMAGE: 'IMAGE'>, <TokenType.NUMMULTIRANGE: 'NUMMULTIRANGE'>, <TokenType.BIGSERIAL: 'BIGSERIAL'>, <TokenType.TIMESTAMPTZ: 'TIMESTAMPTZ'>, <TokenType.SOME: 'SOME'>, <TokenType.DEFAULT: 'DEFAULT'>, <TokenType.ANTI: 'ANTI'>, <TokenType.CACHE: 'CACHE'>, <TokenType.EXISTS: 'EXISTS'>, <TokenType.FALSE: 'FALSE'>, <TokenType.NEXT: 'NEXT'>, <TokenType.TIMESTAMPLTZ: 'TIMESTAMPLTZ'>, <TokenType.VARBINARY: 'VARBINARY'>, <TokenType.SCHEMA: 'SCHEMA'>, <TokenType.MEDIUMTEXT: 'MEDIUMTEXT'>, <TokenType.TEMPORARY: 'TEMPORARY'>, <TokenType.OVERWRITE: 'OVERWRITE'>, <TokenType.ORDINALITY: 'ORDINALITY'>, <TokenType.DATEMULTIRANGE: 'DATEMULTIRANGE'>, <TokenType.INET: 'INET'>, <TokenType.FUNCTION: 'FUNCTION'>, <TokenType.TIME: 'TIME'>, <TokenType.CURRENT_DATE: 'CURRENT_DATE'>, <TokenType.IS: 'IS'>, <TokenType.DESC: 'DESC'>, <TokenType.UNIQUE: 'UNIQUE'>, <TokenType.FULL: 'FULL'>, <TokenType.INT: 'INT'>, <TokenType.STRUCT: 'STRUCT'>, <TokenType.IF: 'IF'>, <TokenType.UINT256: 'UINT256'>, <TokenType.PERCENT: 'PERCENT'>, <TokenType.SMALLMONEY: 'SMALLMONEY'>, <TokenType.INT8RANGE: 'INT8RANGE'>, <TokenType.USERDEFINED: 'USERDEFINED'>, <TokenType.TEXT: 'TEXT'>, <TokenType.RANGE: 'RANGE'>, <TokenType.BIGINT: 'BIGINT'>, <TokenType.COMMENT: 'COMMENT'>, <TokenType.HLLSKETCH: 'HLLSKETCH'>, <TokenType.ROW: 'ROW'>, <TokenType.INT256: 'INT256'>, <TokenType.CURRENT_DATETIME: 'CURRENT_DATETIME'>, <TokenType.OFFSET: 'OFFSET'>, <TokenType.NCHAR: 'NCHAR'>, <TokenType.EXECUTE: 'EXECUTE'>, <TokenType.VAR: 'VAR'>, <TokenType.NATURAL: 'NATURAL'>, <TokenType.SMALLINT: 'SMALLINT'>, <TokenType.TABLE: 'TABLE'>, <TokenType.OBJECT: 'OBJECT'>, <TokenType.DELETE: 'DELETE'>, <TokenType.ARRAY: 'ARRAY'>, <TokenType.CURRENT_TIMESTAMP: 'CURRENT_TIMESTAMP'>, <TokenType.LEFT: 'LEFT'>, <TokenType.LOAD: 'LOAD'>, <TokenType.VARIANT: 'VARIANT'>, <TokenType.INT128: 'INT128'>, <TokenType.MONEY: 'MONEY'>, <TokenType.LONGBLOB: 'LONGBLOB'>, <TokenType.PRAGMA: 'PRAGMA'>, <TokenType.CURRENT_TIME: 'CURRENT_TIME'>, <TokenType.BEGIN: 'BEGIN'>, <TokenType.XML: 'XML'>, <TokenType.ROWVERSION: 'ROWVERSION'>, <TokenType.MEDIUMBLOB: 'MEDIUMBLOB'>, <TokenType.APPLY: 'APPLY'>, <TokenType.DATETIME64: 'DATETIME64'>, <TokenType.VIEW: 'VIEW'>, <TokenType.BINARY: 'BINARY'>, <TokenType.TOP: 'TOP'>, <TokenType.TRUE: 'TRUE'>, <TokenType.TSTZMULTIRANGE: 'TSTZMULTIRANGE'>, <TokenType.CHAR: 'CHAR'>, <TokenType.WINDOW: 'WINDOW'>, <TokenType.SMALLSERIAL: 'SMALLSERIAL'>, <TokenType.JSONB: 'JSONB'>, <TokenType.REFERENCES: 'REFERENCES'>, <TokenType.VOLATILE: 'VOLATILE'>, <TokenType.DATE: 'DATE'>, <TokenType.INDEX: 'INDEX'>, <TokenType.ANY: 'ANY'>, <TokenType.INT4MULTIRANGE: 'INT4MULTIRANGE'>, <TokenType.UINT128: 'UINT128'>, <TokenType.ENUM: 'ENUM'>, <TokenType.UNIQUEIDENTIFIER: 'UNIQUEIDENTIFIER'>, <TokenType.COLUMN: 'COLUMN'>, <TokenType.DATERANGE: 'DATERANGE'>, <TokenType.CASE: 'CASE'>, <TokenType.INTERVAL: 'INTERVAL'>, <TokenType.NVARCHAR: 'NVARCHAR'>, <TokenType.SERIAL: 'SERIAL'>, <TokenType.TIMESTAMP: 'TIMESTAMP'>, <TokenType.DIV: 'DIV'>, <TokenType.UINT: 'UINT'>, <TokenType.BIT: 'BIT'>, <TokenType.DECIMAL: 'DECIMAL'>, <TokenType.FORMAT: 'FORMAT'>, <TokenType.HSTORE: 'HSTORE'>, <TokenType.INT8MULTIRANGE: 'INT8MULTIRANGE'>, <TokenType.GEOMETRY: 'GEOMETRY'>, <TokenType.FILTER: 'FILTER'>, <TokenType.ESCAPE: 'ESCAPE'>, <TokenType.ASC: 'ASC'>, <TokenType.BOOLEAN: 'BOOLEAN'>, <TokenType.NUMRANGE: 'NUMRANGE'>, <TokenType.FIRST: 'FIRST'>, <TokenType.SET: 'SET'>, <TokenType.PIVOT: 'PIVOT'>, <TokenType.COLLATE: 'COLLATE'>, <TokenType.SETTINGS: 'SETTINGS'>, <TokenType.KEEP: 'KEEP'>, <TokenType.DATABASE: 'DATABASE'>, <TokenType.UUID: 'UUID'>, <TokenType.COMMIT: 'COMMIT'>, <TokenType.COMMAND: 'COMMAND'>, <TokenType.END: 'END'>, <TokenType.TINYINT: 'TINYINT'>, <TokenType.DOUBLE: 'DOUBLE'>, <TokenType.TSTZRANGE: 'TSTZRANGE'>, <TokenType.ROWS: 'ROWS'>, <TokenType.GEOGRAPHY: 'GEOGRAPHY'>, <TokenType.BIGDECIMAL: 'BIGDECIMAL'>, <TokenType.ISNULL: 'ISNULL'>, <TokenType.UPDATE: 'UPDATE'>, <TokenType.MAP: 'MAP'>, <TokenType.MERGE: 'MERGE'>, <TokenType.VALUES: 'VALUES'>, <TokenType.VARCHAR: 'VARCHAR'>, <TokenType.AUTO_INCREMENT: 'AUTO_INCREMENT'>, <TokenType.DESCRIBE: 'DESCRIBE'>, <TokenType.NULLABLE: 'NULLABLE'>, <TokenType.SEMI: 'SEMI'>, <TokenType.LONGTEXT: 'LONGTEXT'>, <TokenType.SUPER: 'SUPER'>, <TokenType.CURRENT_USER: 'CURRENT_USER'>}
PROPERTY_PARSERS =
{'ALGORITHM': <function Parser.<lambda>>, 'AUTO_INCREMENT': <function Parser.<lambda>>, 'BLOCKCOMPRESSION': <function Parser.<lambda>>, 'CHARACTER SET': <function Parser.<lambda>>, 'CHECKSUM': <function Parser.<lambda>>, 'CLUSTER BY': <function Parser.<lambda>>, 'CLUSTERED': <function Parser.<lambda>>, 'COLLATE': <function Parser.<lambda>>, 'COMMENT': <function Parser.<lambda>>, 'COPY': <function Parser.<lambda>>, 'DATABLOCKSIZE': <function Parser.<lambda>>, 'DEFINER': <function Parser.<lambda>>, 'DETERMINISTIC': <function Parser.<lambda>>, 'DISTKEY': <function Parser.<lambda>>, 'DISTSTYLE': <function Parser.<lambda>>, 'ENGINE': <function Parser.<lambda>>, 'EXECUTE': <function Parser.<lambda>>, 'EXTERNAL': <function Parser.<lambda>>, 'FALLBACK': <function Parser.<lambda>>, 'FORMAT': <function Parser.<lambda>>, 'FREESPACE': <function Parser.<lambda>>, 'IMMUTABLE': <function Parser.<lambda>>, 'JOURNAL': <function Parser.<lambda>>, 'LANGUAGE': <function Parser.<lambda>>, 'LAYOUT': <function Parser.<lambda>>, 'LIFETIME': <function Parser.<lambda>>, 'LIKE': <function Parser.<lambda>>, 'LOCATION': <function Parser.<lambda>>, 'LOCK': <function Parser.<lambda>>, 'LOCKING': <function Parser.<lambda>>, 'LOG': <function Parser.<lambda>>, 'MATERIALIZED': <function Parser.<lambda>>, 'MERGEBLOCKRATIO': <function Parser.<lambda>>, 'MULTISET': <function Parser.<lambda>>, 'NO': <function Parser.<lambda>>, 'ON': <function Parser.<lambda>>, 'ORDER BY': <function Parser.<lambda>>, 'PARTITION BY': <function Parser.<lambda>>, 'PARTITIONED BY': <function Parser.<lambda>>, 'PARTITIONED_BY': <function Parser.<lambda>>, 'PRIMARY KEY': <function Parser.<lambda>>, 'RANGE': <function Parser.<lambda>>, 'RETURNS': <function Parser.<lambda>>, 'ROW': <function Parser.<lambda>>, 'ROW_FORMAT': <function Parser.<lambda>>, 'SET': <function Parser.<lambda>>, 'SETTINGS': <function Parser.<lambda>>, 'SORTKEY': <function Parser.<lambda>>, 'SOURCE': <function Parser.<lambda>>, 'STABLE': <function Parser.<lambda>>, 'STORED': <function Parser.<lambda>>, 'TBLPROPERTIES': <function Parser.<lambda>>, 'TEMP': <function Parser.<lambda>>, 'TEMPORARY': <function Parser.<lambda>>, 'TO': <function Parser.<lambda>>, 'TRANSIENT': <function Parser.<lambda>>, 'TTL': <function Parser.<lambda>>, 'USING': <function Parser.<lambda>>, 'VOLATILE': <function Parser.<lambda>>, 'WITH': <function Parser.<lambda>>, 'NOT DETERMINISTIC': <function BigQuery.Parser.<lambda>>, 'OPTIONS': <function BigQuery.Parser.<lambda>>}
CONSTRAINT_PARSERS =
{'AUTOINCREMENT': <function Parser.<lambda>>, 'AUTO_INCREMENT': <function Parser.<lambda>>, 'CASESPECIFIC': <function Parser.<lambda>>, 'CHARACTER SET': <function Parser.<lambda>>, 'CHECK': <function Parser.<lambda>>, 'COLLATE': <function Parser.<lambda>>, 'COMMENT': <function Parser.<lambda>>, 'COMPRESS': <function Parser.<lambda>>, 'DEFAULT': <function Parser.<lambda>>, 'ENCODE': <function Parser.<lambda>>, 'FOREIGN KEY': <function Parser.<lambda>>, 'FORMAT': <function Parser.<lambda>>, 'GENERATED': <function Parser.<lambda>>, 'IDENTITY': <function Parser.<lambda>>, 'INLINE': <function Parser.<lambda>>, 'LIKE': <function Parser.<lambda>>, 'NOT': <function Parser.<lambda>>, 'NULL': <function Parser.<lambda>>, 'ON': <function Parser.<lambda>>, 'PATH': <function Parser.<lambda>>, 'PRIMARY KEY': <function Parser.<lambda>>, 'REFERENCES': <function Parser.<lambda>>, 'TITLE': <function Parser.<lambda>>, 'TTL': <function Parser.<lambda>>, 'UNIQUE': <function Parser.<lambda>>, 'UPPERCASE': <function Parser.<lambda>>, 'OPTIONS': <function BigQuery.Parser.<lambda>>}
SET_TRIE: Dict =
{'GLOBAL': {0: True}, 'LOCAL': {0: True}, 'SESSION': {0: True}, 'TRANSACTION': {0: True}}
FORMAT_MAPPING: Dict[str, str] =
{'DD': '%d', 'MM': '%m', 'MON': '%b', 'MONTH': '%B', 'YYYY': '%Y', 'YY': '%y', 'HH': '%I', 'HH12': '%I', 'HH24': '%H', 'MI': '%M', 'SS': '%S', 'SSSSS': '%f', 'TZH': '%z'}
FORMAT_TRIE: Dict =
{'D': {'D': {0: True}}, 'M': {'M': {0: True}, 'O': {'N': {0: True, 'T': {'H': {0: True}}}}, 'I': {0: True}}, 'Y': {'Y': {'Y': {'Y': {0: True}}, 0: True}}, 'H': {'H': {0: True, '1': {'2': {0: True}}, '2': {'4': {0: True}}}}, 'S': {'S': {0: True, 'S': {'S': {'S': {0: True}}}}}, 'T': {'Z': {'H': {0: True}}}}
Inherited Members
- sqlglot.parser.Parser
- Parser
- ENUM_TYPE_TOKENS
- TYPE_TOKENS
- SUBQUERY_PREDICATES
- RESERVED_KEYWORDS
- DB_CREATABLES
- CREATABLES
- INTERVAL_VARS
- TABLE_ALIAS_TOKENS
- COMMENT_TABLE_ALIAS_TOKENS
- UPDATE_ALIAS_TOKENS
- TRIM_TYPES
- FUNC_TOKENS
- CONJUNCTION
- EQUALITY
- COMPARISON
- BITWISE
- TERM
- FACTOR
- TIMESTAMPS
- SET_OPERATIONS
- JOIN_METHODS
- JOIN_SIDES
- JOIN_KINDS
- JOIN_HINTS
- LAMBDAS
- COLUMN_OPERATORS
- EXPRESSION_PARSERS
- STATEMENT_PARSERS
- UNARY_PARSERS
- PRIMARY_PARSERS
- PLACEHOLDER_PARSERS
- RANGE_PARSERS
- ALTER_PARSERS
- SCHEMA_UNNAMED_CONSTRAINTS
- NO_PAREN_FUNCTION_PARSERS
- FUNCTIONS_WITH_ALIASED_ARGS
- QUERY_MODIFIER_PARSERS
- SET_PARSERS
- SHOW_PARSERS
- TYPE_LITERAL_PARSERS
- MODIFIABLES
- DDL_SELECT_TOKENS
- PRE_VOLATILE_TOKENS
- TRANSACTION_KIND
- TRANSACTION_CHARACTERISTICS
- INSERT_ALTERNATIVES
- CLONE_KINDS
- TABLE_INDEX_HINT_TOKENS
- WINDOW_ALIAS_TOKENS
- WINDOW_BEFORE_PAREN_TOKENS
- WINDOW_SIDES
- ADD_CONSTRAINT_TOKENS
- STRICT_CAST
- CONCAT_NULL_OUTPUTS_STRING
- IDENTIFY_PIVOT_STRINGS
- INDEX_OFFSET
- ALIAS_POST_TABLESAMPLE
- STRICT_STRING_CONCAT
- NULL_ORDERING
- error_level
- error_message_context
- max_errors
- reset
- parse
- parse_into
- check_errors
- raise_error
- expression
- validate_expression
- errors
- sql
376 class Generator(generator.Generator): 377 EXPLICIT_UNION = True 378 INTERVAL_ALLOWS_PLURAL_FORM = False 379 JOIN_HINTS = False 380 QUERY_HINTS = False 381 TABLE_HINTS = False 382 LIMIT_FETCH = "LIMIT" 383 RENAME_TABLE_WITH_DB = False 384 ESCAPE_LINE_BREAK = True 385 386 TRANSFORMS = { 387 **generator.Generator.TRANSFORMS, 388 exp.ApproxDistinct: rename_func("APPROX_COUNT_DISTINCT"), 389 exp.ArraySize: rename_func("ARRAY_LENGTH"), 390 exp.Cast: transforms.preprocess([transforms.remove_precision_parameterized_types]), 391 exp.Create: _create_sql, 392 exp.CTE: transforms.preprocess([_pushdown_cte_column_names]), 393 exp.DateAdd: _date_add_sql("DATE", "ADD"), 394 exp.DateDiff: lambda self, e: f"DATE_DIFF({self.sql(e, 'this')}, {self.sql(e, 'expression')}, {self.sql(e.args.get('unit', 'DAY'))})", 395 exp.DateFromParts: rename_func("DATE"), 396 exp.DateStrToDate: datestrtodate_sql, 397 exp.DateSub: _date_add_sql("DATE", "SUB"), 398 exp.DatetimeAdd: _date_add_sql("DATETIME", "ADD"), 399 exp.DatetimeSub: _date_add_sql("DATETIME", "SUB"), 400 exp.DateTrunc: lambda self, e: self.func("DATE_TRUNC", e.this, e.text("unit")), 401 exp.GenerateSeries: rename_func("GENERATE_ARRAY"), 402 exp.GroupConcat: rename_func("STRING_AGG"), 403 exp.Hex: rename_func("TO_HEX"), 404 exp.ILike: no_ilike_sql, 405 exp.IntDiv: rename_func("DIV"), 406 exp.JSONFormat: rename_func("TO_JSON_STRING"), 407 exp.Max: max_or_greatest, 408 exp.MD5: lambda self, e: self.func("TO_HEX", self.func("MD5", e.this)), 409 exp.MD5Digest: rename_func("MD5"), 410 exp.Min: min_or_least, 411 exp.PartitionedByProperty: lambda self, e: f"PARTITION BY {self.sql(e, 'this')}", 412 exp.RegexpExtract: lambda self, e: self.func( 413 "REGEXP_EXTRACT", 414 e.this, 415 e.expression, 416 e.args.get("position"), 417 e.args.get("occurrence"), 418 ), 419 exp.RegexpLike: rename_func("REGEXP_CONTAINS"), 420 exp.ReturnsProperty: _returnsproperty_sql, 421 exp.Select: transforms.preprocess( 422 [ 423 transforms.explode_to_unnest, 424 _unqualify_unnest, 425 transforms.eliminate_distinct_on, 426 _alias_ordered_group, 427 ] 428 ), 429 exp.StabilityProperty: lambda self, e: f"DETERMINISTIC" 430 if e.name == "IMMUTABLE" 431 else "NOT DETERMINISTIC", 432 exp.StrToDate: lambda self, e: f"PARSE_DATE({self.format_time(e)}, {self.sql(e, 'this')})", 433 exp.StrToTime: lambda self, e: self.func( 434 "PARSE_TIMESTAMP", self.format_time(e), e.this, e.args.get("zone") 435 ), 436 exp.TimeAdd: _date_add_sql("TIME", "ADD"), 437 exp.TimeSub: _date_add_sql("TIME", "SUB"), 438 exp.TimestampAdd: _date_add_sql("TIMESTAMP", "ADD"), 439 exp.TimestampSub: _date_add_sql("TIMESTAMP", "SUB"), 440 exp.TimeStrToTime: timestrtotime_sql, 441 exp.Trim: lambda self, e: self.func(f"TRIM", e.this, e.expression), 442 exp.TsOrDsAdd: _date_add_sql("DATE", "ADD"), 443 exp.TsOrDsToDate: ts_or_ds_to_date_sql("bigquery"), 444 exp.Unhex: rename_func("FROM_HEX"), 445 exp.Values: _derived_table_values_to_unnest, 446 exp.VariancePop: rename_func("VAR_POP"), 447 } 448 449 TYPE_MAPPING = { 450 **generator.Generator.TYPE_MAPPING, 451 exp.DataType.Type.BIGDECIMAL: "BIGNUMERIC", 452 exp.DataType.Type.BIGINT: "INT64", 453 exp.DataType.Type.BINARY: "BYTES", 454 exp.DataType.Type.BOOLEAN: "BOOL", 455 exp.DataType.Type.CHAR: "STRING", 456 exp.DataType.Type.DECIMAL: "NUMERIC", 457 exp.DataType.Type.DOUBLE: "FLOAT64", 458 exp.DataType.Type.FLOAT: "FLOAT64", 459 exp.DataType.Type.INT: "INT64", 460 exp.DataType.Type.NCHAR: "STRING", 461 exp.DataType.Type.NVARCHAR: "STRING", 462 exp.DataType.Type.SMALLINT: "INT64", 463 exp.DataType.Type.TEXT: "STRING", 464 exp.DataType.Type.TIMESTAMP: "DATETIME", 465 exp.DataType.Type.TIMESTAMPTZ: "TIMESTAMP", 466 exp.DataType.Type.TIMESTAMPLTZ: "TIMESTAMP", 467 exp.DataType.Type.TINYINT: "INT64", 468 exp.DataType.Type.VARBINARY: "BYTES", 469 exp.DataType.Type.VARCHAR: "STRING", 470 exp.DataType.Type.VARIANT: "ANY TYPE", 471 } 472 473 PROPERTIES_LOCATION = { 474 **generator.Generator.PROPERTIES_LOCATION, 475 exp.PartitionedByProperty: exp.Properties.Location.POST_SCHEMA, 476 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, 477 } 478 479 # from: https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#reserved_keywords 480 RESERVED_KEYWORDS = { 481 *generator.Generator.RESERVED_KEYWORDS, 482 "all", 483 "and", 484 "any", 485 "array", 486 "as", 487 "asc", 488 "assert_rows_modified", 489 "at", 490 "between", 491 "by", 492 "case", 493 "cast", 494 "collate", 495 "contains", 496 "create", 497 "cross", 498 "cube", 499 "current", 500 "default", 501 "define", 502 "desc", 503 "distinct", 504 "else", 505 "end", 506 "enum", 507 "escape", 508 "except", 509 "exclude", 510 "exists", 511 "extract", 512 "false", 513 "fetch", 514 "following", 515 "for", 516 "from", 517 "full", 518 "group", 519 "grouping", 520 "groups", 521 "hash", 522 "having", 523 "if", 524 "ignore", 525 "in", 526 "inner", 527 "intersect", 528 "interval", 529 "into", 530 "is", 531 "join", 532 "lateral", 533 "left", 534 "like", 535 "limit", 536 "lookup", 537 "merge", 538 "natural", 539 "new", 540 "no", 541 "not", 542 "null", 543 "nulls", 544 "of", 545 "on", 546 "or", 547 "order", 548 "outer", 549 "over", 550 "partition", 551 "preceding", 552 "proto", 553 "qualify", 554 "range", 555 "recursive", 556 "respect", 557 "right", 558 "rollup", 559 "rows", 560 "select", 561 "set", 562 "some", 563 "struct", 564 "tablesample", 565 "then", 566 "to", 567 "treat", 568 "true", 569 "unbounded", 570 "union", 571 "unnest", 572 "using", 573 "when", 574 "where", 575 "window", 576 "with", 577 "within", 578 } 579 580 def attimezone_sql(self, expression: exp.AtTimeZone) -> str: 581 parent = expression.parent 582 583 # BigQuery allows CAST(.. AS {STRING|TIMESTAMP} [FORMAT <fmt> [AT TIME ZONE <tz>]]). 584 # Only the TIMESTAMP one should use the below conversion, when AT TIME ZONE is included. 585 if not isinstance(parent, exp.Cast) or not parent.to.is_type("text"): 586 return self.func( 587 "TIMESTAMP", self.func("DATETIME", expression.this, expression.args.get("zone")) 588 ) 589 590 return super().attimezone_sql(expression) 591 592 def trycast_sql(self, expression: exp.TryCast) -> str: 593 return self.cast_sql(expression, safe_prefix="SAFE_") 594 595 def cte_sql(self, expression: exp.CTE) -> str: 596 if expression.alias_column_names: 597 self.unsupported("Column names in CTE definition are not supported.") 598 return super().cte_sql(expression) 599 600 def array_sql(self, expression: exp.Array) -> str: 601 first_arg = seq_get(expression.expressions, 0) 602 if isinstance(first_arg, exp.Subqueryable): 603 return f"ARRAY{self.wrap(self.sql(first_arg))}" 604 605 return inline_array_sql(self, expression) 606 607 def transaction_sql(self, *_) -> str: 608 return "BEGIN TRANSACTION" 609 610 def commit_sql(self, *_) -> str: 611 return "COMMIT TRANSACTION" 612 613 def rollback_sql(self, *_) -> str: 614 return "ROLLBACK TRANSACTION" 615 616 def in_unnest_op(self, expression: exp.Unnest) -> str: 617 return self.sql(expression) 618 619 def except_op(self, expression: exp.Except) -> str: 620 if not expression.args.get("distinct", False): 621 self.unsupported("EXCEPT without DISTINCT is not supported in BigQuery") 622 return f"EXCEPT{' DISTINCT' if expression.args.get('distinct') else ' ALL'}" 623 624 def intersect_op(self, expression: exp.Intersect) -> str: 625 if not expression.args.get("distinct", False): 626 self.unsupported("INTERSECT without DISTINCT is not supported in BigQuery") 627 return f"INTERSECT{' DISTINCT' if expression.args.get('distinct') else ' ALL'}" 628 629 def with_properties(self, properties: exp.Properties) -> str: 630 return self.properties(properties, prefix=self.seg("OPTIONS"))
Generator converts a given syntax tree to the corresponding SQL string.
Arguments:
- pretty: Whether or not to format the produced SQL string. Default: False.
- identify: Determines when an identifier should be quoted. Possible values are: False (default): Never quote, except in cases where it's mandatory by the dialect. True or 'always': Always quote. 'safe': Only quote identifiers that are case insensitive.
- normalize: Whether or not to normalize identifiers to lowercase. Default: False.
- pad: Determines the pad size in a formatted string. Default: 2.
- indent: Determines the indentation size in a formatted string. Default: 2.
- normalize_functions: Whether or not to normalize all function names. Possible values are: "upper" or True (default): Convert names to uppercase. "lower": Convert names to lowercase. False: Disables function name normalization.
- unsupported_level: Determines the generator's behavior when it encounters unsupported expressions. Default ErrorLevel.WARN.
- max_unsupported: Maximum number of unsupported messages to include in a raised UnsupportedError. This is only relevant if unsupported_level is ErrorLevel.RAISE. Default: 3
- leading_comma: Determines whether or not the comma is leading or trailing in select expressions. This is only relevant when generating in pretty mode. Default: False
- max_text_width: The max number of characters in a segment before creating new lines in pretty mode. The default is on the smaller end because the length only represents a segment and not the true line length. Default: 80
- comments: Whether or not to preserve comments in the output SQL code. Default: True
TRANSFORMS =
{<class 'sqlglot.expressions.DateAdd'>: <function _date_add_sql.<locals>.func>, <class 'sqlglot.expressions.TsOrDsAdd'>: <function _date_add_sql.<locals>.func>, <class 'sqlglot.expressions.CaseSpecificColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.CharacterSetColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.CharacterSetProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.CheckColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.CollateColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.CopyGrantsProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.CommentColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.DateFormatColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.DefaultColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.EncodeColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.ExecuteAsProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.ExternalProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.InlineLengthColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.LanguageProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.LocationProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.LogProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.MaterializedProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.NoPrimaryIndexProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.OnCommitProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.OnUpdateColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.PathColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.ReturnsProperty'>: <function _returnsproperty_sql>, <class 'sqlglot.expressions.SetProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.SettingsProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.SqlSecurityProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.StabilityProperty'>: <function BigQuery.Generator.<lambda>>, <class 'sqlglot.expressions.TemporaryProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.ToTableProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.TransientProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.TitleColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.UppercaseColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.VarMap'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.VolatileProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.WithJournalTableProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.ApproxDistinct'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.ArraySize'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.Cast'>: <function preprocess.<locals>._to_sql>, <class 'sqlglot.expressions.Create'>: <function _create_sql>, <class 'sqlglot.expressions.CTE'>: <function preprocess.<locals>._to_sql>, <class 'sqlglot.expressions.DateDiff'>: <function BigQuery.Generator.<lambda>>, <class 'sqlglot.expressions.DateFromParts'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.DateStrToDate'>: <function datestrtodate_sql>, <class 'sqlglot.expressions.DateSub'>: <function _date_add_sql.<locals>.func>, <class 'sqlglot.expressions.DatetimeAdd'>: <function _date_add_sql.<locals>.func>, <class 'sqlglot.expressions.DatetimeSub'>: <function _date_add_sql.<locals>.func>, <class 'sqlglot.expressions.DateTrunc'>: <function BigQuery.Generator.<lambda>>, <class 'sqlglot.expressions.GenerateSeries'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.GroupConcat'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.Hex'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.ILike'>: <function no_ilike_sql>, <class 'sqlglot.expressions.IntDiv'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.JSONFormat'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.Max'>: <function max_or_greatest>, <class 'sqlglot.expressions.MD5'>: <function BigQuery.Generator.<lambda>>, <class 'sqlglot.expressions.MD5Digest'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.Min'>: <function min_or_least>, <class 'sqlglot.expressions.PartitionedByProperty'>: <function BigQuery.Generator.<lambda>>, <class 'sqlglot.expressions.RegexpExtract'>: <function BigQuery.Generator.<lambda>>, <class 'sqlglot.expressions.RegexpLike'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.Select'>: <function preprocess.<locals>._to_sql>, <class 'sqlglot.expressions.StrToDate'>: <function BigQuery.Generator.<lambda>>, <class 'sqlglot.expressions.StrToTime'>: <function BigQuery.Generator.<lambda>>, <class 'sqlglot.expressions.TimeAdd'>: <function _date_add_sql.<locals>.func>, <class 'sqlglot.expressions.TimeSub'>: <function _date_add_sql.<locals>.func>, <class 'sqlglot.expressions.TimestampAdd'>: <function _date_add_sql.<locals>.func>, <class 'sqlglot.expressions.TimestampSub'>: <function _date_add_sql.<locals>.func>, <class 'sqlglot.expressions.TimeStrToTime'>: <function timestrtotime_sql>, <class 'sqlglot.expressions.Trim'>: <function BigQuery.Generator.<lambda>>, <class 'sqlglot.expressions.TsOrDsToDate'>: <function ts_or_ds_to_date_sql.<locals>._ts_or_ds_to_date_sql>, <class 'sqlglot.expressions.Unhex'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.Values'>: <function _derived_table_values_to_unnest>, <class 'sqlglot.expressions.VariancePop'>: <function rename_func.<locals>.<lambda>>}
TYPE_MAPPING =
{<Type.NCHAR: 'NCHAR'>: 'STRING', <Type.NVARCHAR: 'NVARCHAR'>: 'STRING', <Type.MEDIUMTEXT: 'MEDIUMTEXT'>: 'TEXT', <Type.LONGTEXT: 'LONGTEXT'>: 'TEXT', <Type.MEDIUMBLOB: 'MEDIUMBLOB'>: 'BLOB', <Type.LONGBLOB: 'LONGBLOB'>: 'BLOB', <Type.INET: 'INET'>: 'INET', <Type.BIGDECIMAL: 'BIGDECIMAL'>: 'BIGNUMERIC', <Type.BIGINT: 'BIGINT'>: 'INT64', <Type.BINARY: 'BINARY'>: 'BYTES', <Type.BOOLEAN: 'BOOLEAN'>: 'BOOL', <Type.CHAR: 'CHAR'>: 'STRING', <Type.DECIMAL: 'DECIMAL'>: 'NUMERIC', <Type.DOUBLE: 'DOUBLE'>: 'FLOAT64', <Type.FLOAT: 'FLOAT'>: 'FLOAT64', <Type.INT: 'INT'>: 'INT64', <Type.SMALLINT: 'SMALLINT'>: 'INT64', <Type.TEXT: 'TEXT'>: 'STRING', <Type.TIMESTAMP: 'TIMESTAMP'>: 'DATETIME', <Type.TIMESTAMPTZ: 'TIMESTAMPTZ'>: 'TIMESTAMP', <Type.TIMESTAMPLTZ: 'TIMESTAMPLTZ'>: 'TIMESTAMP', <Type.TINYINT: 'TINYINT'>: 'INT64', <Type.VARBINARY: 'VARBINARY'>: 'BYTES', <Type.VARCHAR: 'VARCHAR'>: 'STRING', <Type.VARIANT: 'VARIANT'>: 'ANY TYPE'}
PROPERTIES_LOCATION =
{<class 'sqlglot.expressions.AlgorithmProperty'>: <Location.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.AutoIncrementProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.BlockCompressionProperty'>: <Location.POST_NAME: 'POST_NAME'>, <class 'sqlglot.expressions.CharacterSetProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.ChecksumProperty'>: <Location.POST_NAME: 'POST_NAME'>, <class 'sqlglot.expressions.CollateProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.CopyGrantsProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.Cluster'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.ClusteredByProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.DataBlocksizeProperty'>: <Location.POST_NAME: 'POST_NAME'>, <class 'sqlglot.expressions.DefinerProperty'>: <Location.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.DictRange'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.DictProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.DistKeyProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.DistStyleProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.EngineProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.ExecuteAsProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.ExternalProperty'>: <Location.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.FallbackProperty'>: <Location.POST_NAME: 'POST_NAME'>, <class 'sqlglot.expressions.FileFormatProperty'>: <Location.POST_WITH: 'POST_WITH'>, <class 'sqlglot.expressions.FreespaceProperty'>: <Location.POST_NAME: 'POST_NAME'>, <class 'sqlglot.expressions.IsolatedLoadingProperty'>: <Location.POST_NAME: 'POST_NAME'>, <class 'sqlglot.expressions.JournalProperty'>: <Location.POST_NAME: 'POST_NAME'>, <class 'sqlglot.expressions.LanguageProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.LikeProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.LocationProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.LockingProperty'>: <Location.POST_ALIAS: 'POST_ALIAS'>, <class 'sqlglot.expressions.LogProperty'>: <Location.POST_NAME: 'POST_NAME'>, <class 'sqlglot.expressions.MaterializedProperty'>: <Location.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.MergeBlockRatioProperty'>: <Location.POST_NAME: 'POST_NAME'>, <class 'sqlglot.expressions.NoPrimaryIndexProperty'>: <Location.POST_EXPRESSION: 'POST_EXPRESSION'>, <class 'sqlglot.expressions.OnCommitProperty'>: <Location.POST_EXPRESSION: 'POST_EXPRESSION'>, <class 'sqlglot.expressions.Order'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.PartitionedByProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.PrimaryKey'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.Property'>: <Location.POST_WITH: 'POST_WITH'>, <class 'sqlglot.expressions.ReturnsProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.RowFormatProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.RowFormatDelimitedProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.RowFormatSerdeProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.SchemaCommentProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.SerdeProperties'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.Set'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.SettingsProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.SetProperty'>: <Location.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.SortKeyProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.SqlSecurityProperty'>: <Location.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.StabilityProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.TemporaryProperty'>: <Location.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.ToTableProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.TransientProperty'>: <Location.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.MergeTreeTTL'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.VolatileProperty'>: <Location.UNSUPPORTED: 'UNSUPPORTED'>, <class 'sqlglot.expressions.WithDataProperty'>: <Location.POST_EXPRESSION: 'POST_EXPRESSION'>, <class 'sqlglot.expressions.WithJournalTableProperty'>: <Location.POST_NAME: 'POST_NAME'>}
RESERVED_KEYWORDS =
{'exists', 'into', 'by', 'at', 'following', 'lookup', 'with', 'full', 'join', 'escape', 'if', 'except', 'exclude', 'qualify', 'is', 'from', 'order', 'as', 'like', 'partition', 'create', 'asc', 'between', 'enum', 'ignore', 'using', 'then', 'else', 'right', 'all', 'some', 'recursive', 'having', 'nulls', 'over', 'rows', 'in', 'when', 'for', 'tablesample', 'null', 'groups', 'select', 'struct', 'left', 'union', 'distinct', 'to', 'within', 'desc', 'intersect', 'current', 'cast', 'on', 'not', 'unnest', 'of', 'contains', 'outer', 'interval', 'and', 'window', 'assert_rows_modified', 'collate', 'cross', 'any', 'default', 'merge', 'or', 'cube', 'range', 'inner', 'natural', 'treat', 'no', 'rollup', 'true', 'group', 'fetch', 'extract', 'hash', 'end', 'lateral', 'respect', 'set', 'unbounded', 'new', 'limit', 'array', 'false', 'where', 'proto', 'case', 'grouping', 'define', 'preceding'}
580 def attimezone_sql(self, expression: exp.AtTimeZone) -> str: 581 parent = expression.parent 582 583 # BigQuery allows CAST(.. AS {STRING|TIMESTAMP} [FORMAT <fmt> [AT TIME ZONE <tz>]]). 584 # Only the TIMESTAMP one should use the below conversion, when AT TIME ZONE is included. 585 if not isinstance(parent, exp.Cast) or not parent.to.is_type("text"): 586 return self.func( 587 "TIMESTAMP", self.func("DATETIME", expression.this, expression.args.get("zone")) 588 ) 589 590 return super().attimezone_sql(expression)
@classmethod
def
can_identify(text: str, identify: str | bool = 'safe') -> bool:
246 @classmethod 247 def can_identify(cls, text: str, identify: str | bool = "safe") -> bool: 248 """Checks if text can be identified given an identify option. 249 250 Args: 251 text: The text to check. 252 identify: 253 "always" or `True`: Always returns true. 254 "safe": True if the identifier is case-insensitive. 255 256 Returns: 257 Whether or not the given text can be identified. 258 """ 259 if identify is True or identify == "always": 260 return True 261 262 if identify == "safe": 263 return not cls.case_sensitive(text) 264 265 return False
Checks if text can be identified given an identify option.
Arguments:
- text: The text to check.
- identify: "always" or
True
: Always returns true. "safe": True if the identifier is case-insensitive.
Returns:
Whether or not the given text can be identified.
Inherited Members
- sqlglot.generator.Generator
- Generator
- NULL_ORDERING_SUPPORTED
- LOCKING_READS_SUPPORTED
- WRAP_DERIVED_VALUES
- CREATE_FUNCTION_RETURN_AS
- MATCHED_BY_SOURCE
- SINGLE_STRING_INTERVAL
- TABLESAMPLE_WITH_METHOD
- TABLESAMPLE_SIZE_IS_PERCENT
- GROUPINGS_SEP
- INDEX_ON
- QUERY_HINT_SEP
- IS_BOOL_ALLOWED
- DUPLICATE_KEY_UPDATE_WITH_SET
- LIMIT_IS_TOP
- RETURNING_END
- COLUMN_JOIN_MARKS_SUPPORTED
- SELECT_KINDS
- STAR_MAPPING
- TIME_PART_SINGULARS
- TOKEN_MAPPING
- STRUCT_DELIMITER
- PARAMETER_TOKEN
- WITH_SEPARATED_COMMENTS
- UNWRAPPED_INTERVAL_VALUES
- SENTINEL_LINE_BREAK
- INDEX_OFFSET
- ALIAS_POST_TABLESAMPLE
- IDENTIFIERS_CAN_START_WITH_DIGIT
- STRICT_STRING_CONCAT
- NULL_ORDERING
- pretty
- identify
- normalize
- pad
- unsupported_level
- max_unsupported
- leading_comma
- max_text_width
- comments
- normalize_functions
- unsupported_messages
- generate
- unsupported
- sep
- seg
- pad_comment
- maybe_comment
- wrap
- no_identify
- normalize_func
- indent
- sql
- uncache_sql
- cache_sql
- characterset_sql
- column_sql
- columnposition_sql
- columndef_sql
- columnconstraint_sql
- autoincrementcolumnconstraint_sql
- compresscolumnconstraint_sql
- generatedasidentitycolumnconstraint_sql
- notnullcolumnconstraint_sql
- primarykeycolumnconstraint_sql
- uniquecolumnconstraint_sql
- createable_sql
- create_sql
- clone_sql
- describe_sql
- prepend_ctes
- with_sql
- tablealias_sql
- bitstring_sql
- hexstring_sql
- bytestring_sql
- rawstring_sql
- datatypesize_sql
- datatype_sql
- directory_sql
- delete_sql
- drop_sql
- except_sql
- fetch_sql
- filter_sql
- hint_sql
- index_sql
- identifier_sql
- inputoutputformat_sql
- national_sql
- partition_sql
- properties_sql
- root_properties
- properties
- locate_properties
- property_sql
- likeproperty_sql
- fallbackproperty_sql
- journalproperty_sql
- freespaceproperty_sql
- checksumproperty_sql
- mergeblockratioproperty_sql
- datablocksizeproperty_sql
- blockcompressionproperty_sql
- isolatedloadingproperty_sql
- lockingproperty_sql
- withdataproperty_sql
- insert_sql
- intersect_sql
- introducer_sql
- pseudotype_sql
- onconflict_sql
- returning_sql
- rowformatdelimitedproperty_sql
- withtablehint_sql
- indextablehint_sql
- table_sql
- tablesample_sql
- pivot_sql
- tuple_sql
- update_sql
- values_sql
- var_sql
- into_sql
- from_sql
- group_sql
- having_sql
- join_sql
- lambda_sql
- lateral_sql
- limit_sql
- offset_sql
- setitem_sql
- set_sql
- pragma_sql
- lock_sql
- literal_sql
- escape_str
- loaddata_sql
- null_sql
- boolean_sql
- order_sql
- cluster_sql
- distribute_sql
- sort_sql
- ordered_sql
- matchrecognize_sql
- query_modifiers
- offset_limit_modifiers
- after_having_modifiers
- after_limit_modifiers
- select_sql
- schema_sql
- schema_columns_sql
- star_sql
- parameter_sql
- sessionparameter_sql
- placeholder_sql
- subquery_sql
- qualify_sql
- union_sql
- union_op
- unnest_sql
- where_sql
- window_sql
- partition_by_sql
- windowspec_sql
- withingroup_sql
- between_sql
- bracket_sql
- safebracket_sql
- all_sql
- any_sql
- exists_sql
- case_sql
- constraint_sql
- nextvaluefor_sql
- extract_sql
- trim_sql
- safeconcat_sql
- check_sql
- foreignkey_sql
- primarykey_sql
- if_sql
- matchagainst_sql
- jsonkeyvalue_sql
- jsonobject_sql
- openjsoncolumndef_sql
- openjson_sql
- in_sql
- interval_sql
- return_sql
- reference_sql
- anonymous_sql
- paren_sql
- neg_sql
- not_sql
- alias_sql
- aliases_sql
- add_sql
- and_sql
- xor_sql
- connector_sql
- bitwiseand_sql
- bitwiseleftshift_sql
- bitwisenot_sql
- bitwiseor_sql
- bitwiserightshift_sql
- bitwisexor_sql
- cast_sql
- currentdate_sql
- collate_sql
- command_sql
- comment_sql
- mergetreettlaction_sql
- mergetreettl_sql
- altercolumn_sql
- renametable_sql
- altertable_sql
- droppartition_sql
- addconstraint_sql
- distinct_sql
- ignorenulls_sql
- respectnulls_sql
- intdiv_sql
- dpipe_sql
- safedpipe_sql
- div_sql
- overlaps_sql
- distance_sql
- dot_sql
- eq_sql
- escape_sql
- glob_sql
- gt_sql
- gte_sql
- ilike_sql
- ilikeany_sql
- is_sql
- like_sql
- likeany_sql
- similarto_sql
- lt_sql
- lte_sql
- mod_sql
- mul_sql
- neq_sql
- nullsafeeq_sql
- nullsafeneq_sql
- or_sql
- slice_sql
- sub_sql
- use_sql
- binary
- function_fallback_sql
- func
- format_args
- text_width
- format_time
- expressions
- op_expressions
- naked_property
- set_operation
- tag_sql
- token_sql
- userdefinedfunction_sql
- joinhint_sql
- kwarg_sql
- when_sql
- merge_sql
- tochar_sql
- dictproperty_sql
- dictrange_sql
- dictsubproperty_sql
- oncluster_sql
- clusteredbyproperty_sql
- anyvalue_sql