Edit on GitHub

sqlglot.dialects.bigquery

  1from __future__ import annotations
  2
  3import logging
  4import re
  5import typing as t
  6
  7from sqlglot import exp, generator, parser, tokens, transforms
  8from sqlglot._typing import E
  9from sqlglot.dialects.dialect import (
 10    Dialect,
 11    binary_from_function,
 12    datestrtodate_sql,
 13    format_time_lambda,
 14    inline_array_sql,
 15    json_keyvalue_comma_sql,
 16    max_or_greatest,
 17    min_or_least,
 18    no_ilike_sql,
 19    parse_date_delta_with_interval,
 20    regexp_replace_sql,
 21    rename_func,
 22    timestrtotime_sql,
 23    ts_or_ds_to_date_sql,
 24)
 25from sqlglot.helper import seq_get, split_num_words
 26from sqlglot.tokens import TokenType
 27
 28logger = logging.getLogger("sqlglot")
 29
 30
 31def _date_add_sql(
 32    data_type: str, kind: str
 33) -> t.Callable[[BigQuery.Generator, exp.Expression], str]:
 34    def func(self: BigQuery.Generator, expression: exp.Expression) -> str:
 35        this = self.sql(expression, "this")
 36        unit = expression.args.get("unit")
 37        unit = exp.var(unit.name.upper() if unit else "DAY")
 38        interval = exp.Interval(this=expression.expression.copy(), unit=unit)
 39        return f"{data_type}_{kind}({this}, {self.sql(interval)})"
 40
 41    return func
 42
 43
 44def _derived_table_values_to_unnest(self: BigQuery.Generator, expression: exp.Values) -> str:
 45    if not expression.find_ancestor(exp.From, exp.Join):
 46        return self.values_sql(expression)
 47
 48    alias = expression.args.get("alias")
 49
 50    structs = [
 51        exp.Struct(
 52            expressions=[
 53                exp.alias_(value, column_name)
 54                for value, column_name in zip(
 55                    t.expressions,
 56                    alias.columns
 57                    if alias and alias.columns
 58                    else (f"_c{i}" for i in range(len(t.expressions))),
 59                )
 60            ]
 61        )
 62        for t in expression.find_all(exp.Tuple)
 63    ]
 64
 65    return self.unnest_sql(exp.Unnest(expressions=[exp.Array(expressions=structs)]))
 66
 67
 68def _returnsproperty_sql(self: BigQuery.Generator, expression: exp.ReturnsProperty) -> str:
 69    this = expression.this
 70    if isinstance(this, exp.Schema):
 71        this = f"{this.this} <{self.expressions(this)}>"
 72    else:
 73        this = self.sql(this)
 74    return f"RETURNS {this}"
 75
 76
 77def _create_sql(self: BigQuery.Generator, expression: exp.Create) -> str:
 78    kind = expression.args["kind"]
 79    returns = expression.find(exp.ReturnsProperty)
 80
 81    if kind.upper() == "FUNCTION" and returns and returns.args.get("is_table"):
 82        expression = expression.copy()
 83        expression.set("kind", "TABLE FUNCTION")
 84
 85        if isinstance(expression.expression, (exp.Subquery, exp.Literal)):
 86            expression.set("expression", expression.expression.this)
 87
 88        return self.create_sql(expression)
 89
 90    return self.create_sql(expression)
 91
 92
 93def _unqualify_unnest(expression: exp.Expression) -> exp.Expression:
 94    """Remove references to unnest table aliases since bigquery doesn't allow them.
 95
 96    These are added by the optimizer's qualify_column step.
 97    """
 98    from sqlglot.optimizer.scope import find_all_in_scope
 99
100    if isinstance(expression, exp.Select):
101        unnest_aliases = {
102            unnest.alias
103            for unnest in find_all_in_scope(expression, exp.Unnest)
104            if isinstance(unnest.parent, (exp.From, exp.Join))
105        }
106        if unnest_aliases:
107            for column in expression.find_all(exp.Column):
108                if column.table in unnest_aliases:
109                    column.set("table", None)
110                elif column.db in unnest_aliases:
111                    column.set("db", None)
112
113    return expression
114
115
116# https://issuetracker.google.com/issues/162294746
117# workaround for bigquery bug when grouping by an expression and then ordering
118# WITH x AS (SELECT 1 y)
119# SELECT y + 1 z
120# FROM x
121# GROUP BY x + 1
122# ORDER by z
123def _alias_ordered_group(expression: exp.Expression) -> exp.Expression:
124    if isinstance(expression, exp.Select):
125        group = expression.args.get("group")
126        order = expression.args.get("order")
127
128        if group and order:
129            aliases = {
130                select.this: select.args["alias"]
131                for select in expression.selects
132                if isinstance(select, exp.Alias)
133            }
134
135            for e in group.expressions:
136                alias = aliases.get(e)
137
138                if alias:
139                    e.replace(exp.column(alias))
140
141    return expression
142
143
144def _pushdown_cte_column_names(expression: exp.Expression) -> exp.Expression:
145    """BigQuery doesn't allow column names when defining a CTE, so we try to push them down."""
146    if isinstance(expression, exp.CTE) and expression.alias_column_names:
147        cte_query = expression.this
148
149        if cte_query.is_star:
150            logger.warning(
151                "Can't push down CTE column names for star queries. Run the query through"
152                " the optimizer or use 'qualify' to expand the star projections first."
153            )
154            return expression
155
156        column_names = expression.alias_column_names
157        expression.args["alias"].set("columns", None)
158
159        for name, select in zip(column_names, cte_query.selects):
160            to_replace = select
161
162            if isinstance(select, exp.Alias):
163                select = select.this
164
165            # Inner aliases are shadowed by the CTE column names
166            to_replace.replace(exp.alias_(select, name))
167
168    return expression
169
170
171def _parse_timestamp(args: t.List) -> exp.StrToTime:
172    this = format_time_lambda(exp.StrToTime, "bigquery")([seq_get(args, 1), seq_get(args, 0)])
173    this.set("zone", seq_get(args, 2))
174    return this
175
176
177def _parse_date(args: t.List) -> exp.Date | exp.DateFromParts:
178    expr_type = exp.DateFromParts if len(args) == 3 else exp.Date
179    return expr_type.from_arg_list(args)
180
181
182def _parse_to_hex(args: t.List) -> exp.Hex | exp.MD5:
183    # TO_HEX(MD5(..)) is common in BigQuery, so it's parsed into MD5 to simplify its transpilation
184    arg = seq_get(args, 0)
185    return exp.MD5(this=arg.this) if isinstance(arg, exp.MD5Digest) else exp.Hex(this=arg)
186
187
188class BigQuery(Dialect):
189    UNNEST_COLUMN_ONLY = True
190
191    # https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#case_sensitivity
192    RESOLVES_IDENTIFIERS_AS_UPPERCASE = None
193
194    # bigquery udfs are case sensitive
195    NORMALIZE_FUNCTIONS = False
196
197    TIME_MAPPING = {
198        "%D": "%m/%d/%y",
199    }
200
201    FORMAT_MAPPING = {
202        "DD": "%d",
203        "MM": "%m",
204        "MON": "%b",
205        "MONTH": "%B",
206        "YYYY": "%Y",
207        "YY": "%y",
208        "HH": "%I",
209        "HH12": "%I",
210        "HH24": "%H",
211        "MI": "%M",
212        "SS": "%S",
213        "SSSSS": "%f",
214        "TZH": "%z",
215    }
216
217    # The _PARTITIONTIME and _PARTITIONDATE pseudo-columns are not returned by a SELECT * statement
218    # https://cloud.google.com/bigquery/docs/querying-partitioned-tables#query_an_ingestion-time_partitioned_table
219    PSEUDOCOLUMNS = {"_PARTITIONTIME", "_PARTITIONDATE"}
220
221    @classmethod
222    def normalize_identifier(cls, expression: E) -> E:
223        # In BigQuery, CTEs aren't case-sensitive, but table names are (by default, at least).
224        # The following check is essentially a heuristic to detect tables based on whether or
225        # not they're qualified.
226        if isinstance(expression, exp.Identifier):
227            parent = expression.parent
228
229            while isinstance(parent, exp.Dot):
230                parent = parent.parent
231
232            if (
233                not isinstance(parent, exp.UserDefinedFunction)
234                and not (isinstance(parent, exp.Table) and parent.db)
235                and not expression.meta.get("is_table")
236            ):
237                expression.set("this", expression.this.lower())
238
239        return expression
240
241    class Tokenizer(tokens.Tokenizer):
242        QUOTES = ["'", '"', '"""', "'''"]
243        COMMENTS = ["--", "#", ("/*", "*/")]
244        IDENTIFIERS = ["`"]
245        STRING_ESCAPES = ["\\"]
246
247        HEX_STRINGS = [("0x", ""), ("0X", "")]
248
249        BYTE_STRINGS = [
250            (prefix + q, q) for q in t.cast(t.List[str], QUOTES) for prefix in ("b", "B")
251        ]
252
253        RAW_STRINGS = [
254            (prefix + q, q) for q in t.cast(t.List[str], QUOTES) for prefix in ("r", "R")
255        ]
256
257        KEYWORDS = {
258            **tokens.Tokenizer.KEYWORDS,
259            "ANY TYPE": TokenType.VARIANT,
260            "BEGIN": TokenType.COMMAND,
261            "BEGIN TRANSACTION": TokenType.BEGIN,
262            "CURRENT_DATETIME": TokenType.CURRENT_DATETIME,
263            "BYTES": TokenType.BINARY,
264            "DECLARE": TokenType.COMMAND,
265            "FLOAT64": TokenType.DOUBLE,
266            "INT64": TokenType.BIGINT,
267            "RECORD": TokenType.STRUCT,
268            "TIMESTAMP": TokenType.TIMESTAMPTZ,
269            "NOT DETERMINISTIC": TokenType.VOLATILE,
270            "UNKNOWN": TokenType.NULL,
271            "FOR SYSTEM_TIME": TokenType.TIMESTAMP_SNAPSHOT,
272        }
273        KEYWORDS.pop("DIV")
274
275    class Parser(parser.Parser):
276        PREFIXED_PIVOT_COLUMNS = True
277
278        LOG_BASE_FIRST = False
279        LOG_DEFAULTS_TO_LN = True
280
281        SUPPORTS_USER_DEFINED_TYPES = False
282
283        FUNCTIONS = {
284            **parser.Parser.FUNCTIONS,
285            "DATE": _parse_date,
286            "DATE_ADD": parse_date_delta_with_interval(exp.DateAdd),
287            "DATE_SUB": parse_date_delta_with_interval(exp.DateSub),
288            "DATE_TRUNC": lambda args: exp.DateTrunc(
289                unit=exp.Literal.string(str(seq_get(args, 1))),
290                this=seq_get(args, 0),
291            ),
292            "DATETIME_ADD": parse_date_delta_with_interval(exp.DatetimeAdd),
293            "DATETIME_SUB": parse_date_delta_with_interval(exp.DatetimeSub),
294            "DIV": binary_from_function(exp.IntDiv),
295            "GENERATE_ARRAY": exp.GenerateSeries.from_arg_list,
296            "MD5": exp.MD5Digest.from_arg_list,
297            "TO_HEX": _parse_to_hex,
298            "PARSE_DATE": lambda args: format_time_lambda(exp.StrToDate, "bigquery")(
299                [seq_get(args, 1), seq_get(args, 0)]
300            ),
301            "PARSE_TIMESTAMP": _parse_timestamp,
302            "REGEXP_CONTAINS": exp.RegexpLike.from_arg_list,
303            "REGEXP_EXTRACT": lambda args: exp.RegexpExtract(
304                this=seq_get(args, 0),
305                expression=seq_get(args, 1),
306                position=seq_get(args, 2),
307                occurrence=seq_get(args, 3),
308                group=exp.Literal.number(1)
309                if re.compile(str(seq_get(args, 1))).groups == 1
310                else None,
311            ),
312            "SHA256": lambda args: exp.SHA2(this=seq_get(args, 0), length=exp.Literal.number(256)),
313            "SHA512": lambda args: exp.SHA2(this=seq_get(args, 0), length=exp.Literal.number(512)),
314            "SPLIT": lambda args: exp.Split(
315                # https://cloud.google.com/bigquery/docs/reference/standard-sql/string_functions#split
316                this=seq_get(args, 0),
317                expression=seq_get(args, 1) or exp.Literal.string(","),
318            ),
319            "TIME_ADD": parse_date_delta_with_interval(exp.TimeAdd),
320            "TIME_SUB": parse_date_delta_with_interval(exp.TimeSub),
321            "TIMESTAMP_ADD": parse_date_delta_with_interval(exp.TimestampAdd),
322            "TIMESTAMP_SUB": parse_date_delta_with_interval(exp.TimestampSub),
323            "TO_JSON_STRING": exp.JSONFormat.from_arg_list,
324        }
325
326        FUNCTION_PARSERS = {
327            **parser.Parser.FUNCTION_PARSERS,
328            "ARRAY": lambda self: self.expression(exp.Array, expressions=[self._parse_statement()]),
329        }
330        FUNCTION_PARSERS.pop("TRIM")
331
332        NO_PAREN_FUNCTIONS = {
333            **parser.Parser.NO_PAREN_FUNCTIONS,
334            TokenType.CURRENT_DATETIME: exp.CurrentDatetime,
335        }
336
337        NESTED_TYPE_TOKENS = {
338            *parser.Parser.NESTED_TYPE_TOKENS,
339            TokenType.TABLE,
340        }
341
342        ID_VAR_TOKENS = {
343            *parser.Parser.ID_VAR_TOKENS,
344            TokenType.VALUES,
345        }
346
347        PROPERTY_PARSERS = {
348            **parser.Parser.PROPERTY_PARSERS,
349            "NOT DETERMINISTIC": lambda self: self.expression(
350                exp.StabilityProperty, this=exp.Literal.string("VOLATILE")
351            ),
352            "OPTIONS": lambda self: self._parse_with_property(),
353        }
354
355        CONSTRAINT_PARSERS = {
356            **parser.Parser.CONSTRAINT_PARSERS,
357            "OPTIONS": lambda self: exp.Properties(expressions=self._parse_with_property()),
358        }
359
360        def _parse_table_part(self, schema: bool = False) -> t.Optional[exp.Expression]:
361            this = super()._parse_table_part(schema=schema) or self._parse_number()
362
363            # https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#table_names
364            if isinstance(this, exp.Identifier):
365                table_name = this.name
366                while self._match(TokenType.DASH, advance=False) and self._next:
367                    self._advance(2)
368                    table_name += f"-{self._prev.text}"
369
370                this = exp.Identifier(this=table_name, quoted=this.args.get("quoted"))
371            elif isinstance(this, exp.Literal):
372                table_name = this.name
373
374                if (
375                    self._curr
376                    and self._prev.end == self._curr.start - 1
377                    and self._parse_var(any_token=True)
378                ):
379                    table_name += self._prev.text
380
381                this = exp.Identifier(this=table_name, quoted=True)
382
383            return this
384
385        def _parse_table_parts(self, schema: bool = False) -> exp.Table:
386            table = super()._parse_table_parts(schema=schema)
387            if isinstance(table.this, exp.Identifier) and "." in table.name:
388                catalog, db, this, *rest = (
389                    t.cast(t.Optional[exp.Expression], exp.to_identifier(x))
390                    for x in split_num_words(table.name, ".", 3)
391                )
392
393                if rest and this:
394                    this = exp.Dot.build(t.cast(t.List[exp.Expression], [this, *rest]))
395
396                table = exp.Table(this=this, db=db, catalog=catalog)
397
398            return table
399
400        def _parse_json_object(self) -> exp.JSONObject:
401            json_object = super()._parse_json_object()
402            array_kv_pair = seq_get(json_object.expressions, 0)
403
404            # Converts BQ's "signature 2" of JSON_OBJECT into SQLGlot's canonical representation
405            # https://cloud.google.com/bigquery/docs/reference/standard-sql/json_functions#json_object_signature2
406            if (
407                array_kv_pair
408                and isinstance(array_kv_pair.this, exp.Array)
409                and isinstance(array_kv_pair.expression, exp.Array)
410            ):
411                keys = array_kv_pair.this.expressions
412                values = array_kv_pair.expression.expressions
413
414                json_object.set(
415                    "expressions",
416                    [exp.JSONKeyValue(this=k, expression=v) for k, v in zip(keys, values)],
417                )
418
419            return json_object
420
421    class Generator(generator.Generator):
422        EXPLICIT_UNION = True
423        INTERVAL_ALLOWS_PLURAL_FORM = False
424        JOIN_HINTS = False
425        QUERY_HINTS = False
426        TABLE_HINTS = False
427        LIMIT_FETCH = "LIMIT"
428        RENAME_TABLE_WITH_DB = False
429        ESCAPE_LINE_BREAK = True
430        NVL2_SUPPORTED = False
431
432        TRANSFORMS = {
433            **generator.Generator.TRANSFORMS,
434            exp.ApproxDistinct: rename_func("APPROX_COUNT_DISTINCT"),
435            exp.ArraySize: rename_func("ARRAY_LENGTH"),
436            exp.Cast: transforms.preprocess([transforms.remove_precision_parameterized_types]),
437            exp.Create: _create_sql,
438            exp.CTE: transforms.preprocess([_pushdown_cte_column_names]),
439            exp.DateAdd: _date_add_sql("DATE", "ADD"),
440            exp.DateDiff: lambda self, e: f"DATE_DIFF({self.sql(e, 'this')}, {self.sql(e, 'expression')}, {self.sql(e.args.get('unit', 'DAY'))})",
441            exp.DateFromParts: rename_func("DATE"),
442            exp.DateStrToDate: datestrtodate_sql,
443            exp.DateSub: _date_add_sql("DATE", "SUB"),
444            exp.DatetimeAdd: _date_add_sql("DATETIME", "ADD"),
445            exp.DatetimeSub: _date_add_sql("DATETIME", "SUB"),
446            exp.DateTrunc: lambda self, e: self.func("DATE_TRUNC", e.this, e.text("unit")),
447            exp.GenerateSeries: rename_func("GENERATE_ARRAY"),
448            exp.GroupConcat: rename_func("STRING_AGG"),
449            exp.Hex: rename_func("TO_HEX"),
450            exp.ILike: no_ilike_sql,
451            exp.IntDiv: rename_func("DIV"),
452            exp.JSONFormat: rename_func("TO_JSON_STRING"),
453            exp.JSONKeyValue: json_keyvalue_comma_sql,
454            exp.Max: max_or_greatest,
455            exp.MD5: lambda self, e: self.func("TO_HEX", self.func("MD5", e.this)),
456            exp.MD5Digest: rename_func("MD5"),
457            exp.Min: min_or_least,
458            exp.PartitionedByProperty: lambda self, e: f"PARTITION BY {self.sql(e, 'this')}",
459            exp.RegexpExtract: lambda self, e: self.func(
460                "REGEXP_EXTRACT",
461                e.this,
462                e.expression,
463                e.args.get("position"),
464                e.args.get("occurrence"),
465            ),
466            exp.RegexpReplace: regexp_replace_sql,
467            exp.RegexpLike: rename_func("REGEXP_CONTAINS"),
468            exp.ReturnsProperty: _returnsproperty_sql,
469            exp.Select: transforms.preprocess(
470                [
471                    transforms.explode_to_unnest,
472                    _unqualify_unnest,
473                    transforms.eliminate_distinct_on,
474                    _alias_ordered_group,
475                ]
476            ),
477            exp.SHA2: lambda self, e: self.func(
478                f"SHA256" if e.text("length") == "256" else "SHA512", e.this
479            ),
480            exp.StabilityProperty: lambda self, e: f"DETERMINISTIC"
481            if e.name == "IMMUTABLE"
482            else "NOT DETERMINISTIC",
483            exp.StrToDate: lambda self, e: f"PARSE_DATE({self.format_time(e)}, {self.sql(e, 'this')})",
484            exp.StrToTime: lambda self, e: self.func(
485                "PARSE_TIMESTAMP", self.format_time(e), e.this, e.args.get("zone")
486            ),
487            exp.TimeAdd: _date_add_sql("TIME", "ADD"),
488            exp.TimeSub: _date_add_sql("TIME", "SUB"),
489            exp.TimestampAdd: _date_add_sql("TIMESTAMP", "ADD"),
490            exp.TimestampSub: _date_add_sql("TIMESTAMP", "SUB"),
491            exp.TimeStrToTime: timestrtotime_sql,
492            exp.Trim: lambda self, e: self.func(f"TRIM", e.this, e.expression),
493            exp.TsOrDsAdd: _date_add_sql("DATE", "ADD"),
494            exp.TsOrDsToDate: ts_or_ds_to_date_sql("bigquery"),
495            exp.Unhex: rename_func("FROM_HEX"),
496            exp.Values: _derived_table_values_to_unnest,
497            exp.VariancePop: rename_func("VAR_POP"),
498        }
499
500        TYPE_MAPPING = {
501            **generator.Generator.TYPE_MAPPING,
502            exp.DataType.Type.BIGDECIMAL: "BIGNUMERIC",
503            exp.DataType.Type.BIGINT: "INT64",
504            exp.DataType.Type.BINARY: "BYTES",
505            exp.DataType.Type.BOOLEAN: "BOOL",
506            exp.DataType.Type.CHAR: "STRING",
507            exp.DataType.Type.DECIMAL: "NUMERIC",
508            exp.DataType.Type.DOUBLE: "FLOAT64",
509            exp.DataType.Type.FLOAT: "FLOAT64",
510            exp.DataType.Type.INT: "INT64",
511            exp.DataType.Type.NCHAR: "STRING",
512            exp.DataType.Type.NVARCHAR: "STRING",
513            exp.DataType.Type.SMALLINT: "INT64",
514            exp.DataType.Type.TEXT: "STRING",
515            exp.DataType.Type.TIMESTAMP: "DATETIME",
516            exp.DataType.Type.TIMESTAMPTZ: "TIMESTAMP",
517            exp.DataType.Type.TIMESTAMPLTZ: "TIMESTAMP",
518            exp.DataType.Type.TINYINT: "INT64",
519            exp.DataType.Type.VARBINARY: "BYTES",
520            exp.DataType.Type.VARCHAR: "STRING",
521            exp.DataType.Type.VARIANT: "ANY TYPE",
522        }
523
524        PROPERTIES_LOCATION = {
525            **generator.Generator.PROPERTIES_LOCATION,
526            exp.PartitionedByProperty: exp.Properties.Location.POST_SCHEMA,
527            exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED,
528        }
529
530        # from: https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#reserved_keywords
531        RESERVED_KEYWORDS = {
532            *generator.Generator.RESERVED_KEYWORDS,
533            "all",
534            "and",
535            "any",
536            "array",
537            "as",
538            "asc",
539            "assert_rows_modified",
540            "at",
541            "between",
542            "by",
543            "case",
544            "cast",
545            "collate",
546            "contains",
547            "create",
548            "cross",
549            "cube",
550            "current",
551            "default",
552            "define",
553            "desc",
554            "distinct",
555            "else",
556            "end",
557            "enum",
558            "escape",
559            "except",
560            "exclude",
561            "exists",
562            "extract",
563            "false",
564            "fetch",
565            "following",
566            "for",
567            "from",
568            "full",
569            "group",
570            "grouping",
571            "groups",
572            "hash",
573            "having",
574            "if",
575            "ignore",
576            "in",
577            "inner",
578            "intersect",
579            "interval",
580            "into",
581            "is",
582            "join",
583            "lateral",
584            "left",
585            "like",
586            "limit",
587            "lookup",
588            "merge",
589            "natural",
590            "new",
591            "no",
592            "not",
593            "null",
594            "nulls",
595            "of",
596            "on",
597            "or",
598            "order",
599            "outer",
600            "over",
601            "partition",
602            "preceding",
603            "proto",
604            "qualify",
605            "range",
606            "recursive",
607            "respect",
608            "right",
609            "rollup",
610            "rows",
611            "select",
612            "set",
613            "some",
614            "struct",
615            "tablesample",
616            "then",
617            "to",
618            "treat",
619            "true",
620            "unbounded",
621            "union",
622            "unnest",
623            "using",
624            "when",
625            "where",
626            "window",
627            "with",
628            "within",
629        }
630
631        def attimezone_sql(self, expression: exp.AtTimeZone) -> str:
632            parent = expression.parent
633
634            # BigQuery allows CAST(.. AS {STRING|TIMESTAMP} [FORMAT <fmt> [AT TIME ZONE <tz>]]).
635            # Only the TIMESTAMP one should use the below conversion, when AT TIME ZONE is included.
636            if not isinstance(parent, exp.Cast) or not parent.to.is_type("text"):
637                return self.func(
638                    "TIMESTAMP", self.func("DATETIME", expression.this, expression.args.get("zone"))
639                )
640
641            return super().attimezone_sql(expression)
642
643        def cast_sql(self, expression: exp.Cast, safe_prefix: t.Optional[str] = None) -> str:
644            # https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#json_literals
645            if expression.is_type("json"):
646                return f"JSON {self.sql(expression, 'this')}"
647
648            return super().cast_sql(expression, safe_prefix=safe_prefix)
649
650        def trycast_sql(self, expression: exp.TryCast) -> str:
651            return self.cast_sql(expression, safe_prefix="SAFE_")
652
653        def cte_sql(self, expression: exp.CTE) -> str:
654            if expression.alias_column_names:
655                self.unsupported("Column names in CTE definition are not supported.")
656            return super().cte_sql(expression)
657
658        def array_sql(self, expression: exp.Array) -> str:
659            first_arg = seq_get(expression.expressions, 0)
660            if isinstance(first_arg, exp.Subqueryable):
661                return f"ARRAY{self.wrap(self.sql(first_arg))}"
662
663            return inline_array_sql(self, expression)
664
665        def transaction_sql(self, *_) -> str:
666            return "BEGIN TRANSACTION"
667
668        def commit_sql(self, *_) -> str:
669            return "COMMIT TRANSACTION"
670
671        def rollback_sql(self, *_) -> str:
672            return "ROLLBACK TRANSACTION"
673
674        def in_unnest_op(self, expression: exp.Unnest) -> str:
675            return self.sql(expression)
676
677        def except_op(self, expression: exp.Except) -> str:
678            if not expression.args.get("distinct", False):
679                self.unsupported("EXCEPT without DISTINCT is not supported in BigQuery")
680            return f"EXCEPT{' DISTINCT' if expression.args.get('distinct') else ' ALL'}"
681
682        def intersect_op(self, expression: exp.Intersect) -> str:
683            if not expression.args.get("distinct", False):
684                self.unsupported("INTERSECT without DISTINCT is not supported in BigQuery")
685            return f"INTERSECT{' DISTINCT' if expression.args.get('distinct') else ' ALL'}"
686
687        def with_properties(self, properties: exp.Properties) -> str:
688            return self.properties(properties, prefix=self.seg("OPTIONS"))
689
690        def version_sql(self, expression: exp.Version) -> str:
691            if expression.name == "TIMESTAMP":
692                expression = expression.copy()
693                expression.set("this", "SYSTEM_TIME")
694            return super().version_sql(expression)
logger = <Logger sqlglot (WARNING)>
class BigQuery(sqlglot.dialects.dialect.Dialect):
189class BigQuery(Dialect):
190    UNNEST_COLUMN_ONLY = True
191
192    # https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#case_sensitivity
193    RESOLVES_IDENTIFIERS_AS_UPPERCASE = None
194
195    # bigquery udfs are case sensitive
196    NORMALIZE_FUNCTIONS = False
197
198    TIME_MAPPING = {
199        "%D": "%m/%d/%y",
200    }
201
202    FORMAT_MAPPING = {
203        "DD": "%d",
204        "MM": "%m",
205        "MON": "%b",
206        "MONTH": "%B",
207        "YYYY": "%Y",
208        "YY": "%y",
209        "HH": "%I",
210        "HH12": "%I",
211        "HH24": "%H",
212        "MI": "%M",
213        "SS": "%S",
214        "SSSSS": "%f",
215        "TZH": "%z",
216    }
217
218    # The _PARTITIONTIME and _PARTITIONDATE pseudo-columns are not returned by a SELECT * statement
219    # https://cloud.google.com/bigquery/docs/querying-partitioned-tables#query_an_ingestion-time_partitioned_table
220    PSEUDOCOLUMNS = {"_PARTITIONTIME", "_PARTITIONDATE"}
221
222    @classmethod
223    def normalize_identifier(cls, expression: E) -> E:
224        # In BigQuery, CTEs aren't case-sensitive, but table names are (by default, at least).
225        # The following check is essentially a heuristic to detect tables based on whether or
226        # not they're qualified.
227        if isinstance(expression, exp.Identifier):
228            parent = expression.parent
229
230            while isinstance(parent, exp.Dot):
231                parent = parent.parent
232
233            if (
234                not isinstance(parent, exp.UserDefinedFunction)
235                and not (isinstance(parent, exp.Table) and parent.db)
236                and not expression.meta.get("is_table")
237            ):
238                expression.set("this", expression.this.lower())
239
240        return expression
241
242    class Tokenizer(tokens.Tokenizer):
243        QUOTES = ["'", '"', '"""', "'''"]
244        COMMENTS = ["--", "#", ("/*", "*/")]
245        IDENTIFIERS = ["`"]
246        STRING_ESCAPES = ["\\"]
247
248        HEX_STRINGS = [("0x", ""), ("0X", "")]
249
250        BYTE_STRINGS = [
251            (prefix + q, q) for q in t.cast(t.List[str], QUOTES) for prefix in ("b", "B")
252        ]
253
254        RAW_STRINGS = [
255            (prefix + q, q) for q in t.cast(t.List[str], QUOTES) for prefix in ("r", "R")
256        ]
257
258        KEYWORDS = {
259            **tokens.Tokenizer.KEYWORDS,
260            "ANY TYPE": TokenType.VARIANT,
261            "BEGIN": TokenType.COMMAND,
262            "BEGIN TRANSACTION": TokenType.BEGIN,
263            "CURRENT_DATETIME": TokenType.CURRENT_DATETIME,
264            "BYTES": TokenType.BINARY,
265            "DECLARE": TokenType.COMMAND,
266            "FLOAT64": TokenType.DOUBLE,
267            "INT64": TokenType.BIGINT,
268            "RECORD": TokenType.STRUCT,
269            "TIMESTAMP": TokenType.TIMESTAMPTZ,
270            "NOT DETERMINISTIC": TokenType.VOLATILE,
271            "UNKNOWN": TokenType.NULL,
272            "FOR SYSTEM_TIME": TokenType.TIMESTAMP_SNAPSHOT,
273        }
274        KEYWORDS.pop("DIV")
275
276    class Parser(parser.Parser):
277        PREFIXED_PIVOT_COLUMNS = True
278
279        LOG_BASE_FIRST = False
280        LOG_DEFAULTS_TO_LN = True
281
282        SUPPORTS_USER_DEFINED_TYPES = False
283
284        FUNCTIONS = {
285            **parser.Parser.FUNCTIONS,
286            "DATE": _parse_date,
287            "DATE_ADD": parse_date_delta_with_interval(exp.DateAdd),
288            "DATE_SUB": parse_date_delta_with_interval(exp.DateSub),
289            "DATE_TRUNC": lambda args: exp.DateTrunc(
290                unit=exp.Literal.string(str(seq_get(args, 1))),
291                this=seq_get(args, 0),
292            ),
293            "DATETIME_ADD": parse_date_delta_with_interval(exp.DatetimeAdd),
294            "DATETIME_SUB": parse_date_delta_with_interval(exp.DatetimeSub),
295            "DIV": binary_from_function(exp.IntDiv),
296            "GENERATE_ARRAY": exp.GenerateSeries.from_arg_list,
297            "MD5": exp.MD5Digest.from_arg_list,
298            "TO_HEX": _parse_to_hex,
299            "PARSE_DATE": lambda args: format_time_lambda(exp.StrToDate, "bigquery")(
300                [seq_get(args, 1), seq_get(args, 0)]
301            ),
302            "PARSE_TIMESTAMP": _parse_timestamp,
303            "REGEXP_CONTAINS": exp.RegexpLike.from_arg_list,
304            "REGEXP_EXTRACT": lambda args: exp.RegexpExtract(
305                this=seq_get(args, 0),
306                expression=seq_get(args, 1),
307                position=seq_get(args, 2),
308                occurrence=seq_get(args, 3),
309                group=exp.Literal.number(1)
310                if re.compile(str(seq_get(args, 1))).groups == 1
311                else None,
312            ),
313            "SHA256": lambda args: exp.SHA2(this=seq_get(args, 0), length=exp.Literal.number(256)),
314            "SHA512": lambda args: exp.SHA2(this=seq_get(args, 0), length=exp.Literal.number(512)),
315            "SPLIT": lambda args: exp.Split(
316                # https://cloud.google.com/bigquery/docs/reference/standard-sql/string_functions#split
317                this=seq_get(args, 0),
318                expression=seq_get(args, 1) or exp.Literal.string(","),
319            ),
320            "TIME_ADD": parse_date_delta_with_interval(exp.TimeAdd),
321            "TIME_SUB": parse_date_delta_with_interval(exp.TimeSub),
322            "TIMESTAMP_ADD": parse_date_delta_with_interval(exp.TimestampAdd),
323            "TIMESTAMP_SUB": parse_date_delta_with_interval(exp.TimestampSub),
324            "TO_JSON_STRING": exp.JSONFormat.from_arg_list,
325        }
326
327        FUNCTION_PARSERS = {
328            **parser.Parser.FUNCTION_PARSERS,
329            "ARRAY": lambda self: self.expression(exp.Array, expressions=[self._parse_statement()]),
330        }
331        FUNCTION_PARSERS.pop("TRIM")
332
333        NO_PAREN_FUNCTIONS = {
334            **parser.Parser.NO_PAREN_FUNCTIONS,
335            TokenType.CURRENT_DATETIME: exp.CurrentDatetime,
336        }
337
338        NESTED_TYPE_TOKENS = {
339            *parser.Parser.NESTED_TYPE_TOKENS,
340            TokenType.TABLE,
341        }
342
343        ID_VAR_TOKENS = {
344            *parser.Parser.ID_VAR_TOKENS,
345            TokenType.VALUES,
346        }
347
348        PROPERTY_PARSERS = {
349            **parser.Parser.PROPERTY_PARSERS,
350            "NOT DETERMINISTIC": lambda self: self.expression(
351                exp.StabilityProperty, this=exp.Literal.string("VOLATILE")
352            ),
353            "OPTIONS": lambda self: self._parse_with_property(),
354        }
355
356        CONSTRAINT_PARSERS = {
357            **parser.Parser.CONSTRAINT_PARSERS,
358            "OPTIONS": lambda self: exp.Properties(expressions=self._parse_with_property()),
359        }
360
361        def _parse_table_part(self, schema: bool = False) -> t.Optional[exp.Expression]:
362            this = super()._parse_table_part(schema=schema) or self._parse_number()
363
364            # https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#table_names
365            if isinstance(this, exp.Identifier):
366                table_name = this.name
367                while self._match(TokenType.DASH, advance=False) and self._next:
368                    self._advance(2)
369                    table_name += f"-{self._prev.text}"
370
371                this = exp.Identifier(this=table_name, quoted=this.args.get("quoted"))
372            elif isinstance(this, exp.Literal):
373                table_name = this.name
374
375                if (
376                    self._curr
377                    and self._prev.end == self._curr.start - 1
378                    and self._parse_var(any_token=True)
379                ):
380                    table_name += self._prev.text
381
382                this = exp.Identifier(this=table_name, quoted=True)
383
384            return this
385
386        def _parse_table_parts(self, schema: bool = False) -> exp.Table:
387            table = super()._parse_table_parts(schema=schema)
388            if isinstance(table.this, exp.Identifier) and "." in table.name:
389                catalog, db, this, *rest = (
390                    t.cast(t.Optional[exp.Expression], exp.to_identifier(x))
391                    for x in split_num_words(table.name, ".", 3)
392                )
393
394                if rest and this:
395                    this = exp.Dot.build(t.cast(t.List[exp.Expression], [this, *rest]))
396
397                table = exp.Table(this=this, db=db, catalog=catalog)
398
399            return table
400
401        def _parse_json_object(self) -> exp.JSONObject:
402            json_object = super()._parse_json_object()
403            array_kv_pair = seq_get(json_object.expressions, 0)
404
405            # Converts BQ's "signature 2" of JSON_OBJECT into SQLGlot's canonical representation
406            # https://cloud.google.com/bigquery/docs/reference/standard-sql/json_functions#json_object_signature2
407            if (
408                array_kv_pair
409                and isinstance(array_kv_pair.this, exp.Array)
410                and isinstance(array_kv_pair.expression, exp.Array)
411            ):
412                keys = array_kv_pair.this.expressions
413                values = array_kv_pair.expression.expressions
414
415                json_object.set(
416                    "expressions",
417                    [exp.JSONKeyValue(this=k, expression=v) for k, v in zip(keys, values)],
418                )
419
420            return json_object
421
422    class Generator(generator.Generator):
423        EXPLICIT_UNION = True
424        INTERVAL_ALLOWS_PLURAL_FORM = False
425        JOIN_HINTS = False
426        QUERY_HINTS = False
427        TABLE_HINTS = False
428        LIMIT_FETCH = "LIMIT"
429        RENAME_TABLE_WITH_DB = False
430        ESCAPE_LINE_BREAK = True
431        NVL2_SUPPORTED = False
432
433        TRANSFORMS = {
434            **generator.Generator.TRANSFORMS,
435            exp.ApproxDistinct: rename_func("APPROX_COUNT_DISTINCT"),
436            exp.ArraySize: rename_func("ARRAY_LENGTH"),
437            exp.Cast: transforms.preprocess([transforms.remove_precision_parameterized_types]),
438            exp.Create: _create_sql,
439            exp.CTE: transforms.preprocess([_pushdown_cte_column_names]),
440            exp.DateAdd: _date_add_sql("DATE", "ADD"),
441            exp.DateDiff: lambda self, e: f"DATE_DIFF({self.sql(e, 'this')}, {self.sql(e, 'expression')}, {self.sql(e.args.get('unit', 'DAY'))})",
442            exp.DateFromParts: rename_func("DATE"),
443            exp.DateStrToDate: datestrtodate_sql,
444            exp.DateSub: _date_add_sql("DATE", "SUB"),
445            exp.DatetimeAdd: _date_add_sql("DATETIME", "ADD"),
446            exp.DatetimeSub: _date_add_sql("DATETIME", "SUB"),
447            exp.DateTrunc: lambda self, e: self.func("DATE_TRUNC", e.this, e.text("unit")),
448            exp.GenerateSeries: rename_func("GENERATE_ARRAY"),
449            exp.GroupConcat: rename_func("STRING_AGG"),
450            exp.Hex: rename_func("TO_HEX"),
451            exp.ILike: no_ilike_sql,
452            exp.IntDiv: rename_func("DIV"),
453            exp.JSONFormat: rename_func("TO_JSON_STRING"),
454            exp.JSONKeyValue: json_keyvalue_comma_sql,
455            exp.Max: max_or_greatest,
456            exp.MD5: lambda self, e: self.func("TO_HEX", self.func("MD5", e.this)),
457            exp.MD5Digest: rename_func("MD5"),
458            exp.Min: min_or_least,
459            exp.PartitionedByProperty: lambda self, e: f"PARTITION BY {self.sql(e, 'this')}",
460            exp.RegexpExtract: lambda self, e: self.func(
461                "REGEXP_EXTRACT",
462                e.this,
463                e.expression,
464                e.args.get("position"),
465                e.args.get("occurrence"),
466            ),
467            exp.RegexpReplace: regexp_replace_sql,
468            exp.RegexpLike: rename_func("REGEXP_CONTAINS"),
469            exp.ReturnsProperty: _returnsproperty_sql,
470            exp.Select: transforms.preprocess(
471                [
472                    transforms.explode_to_unnest,
473                    _unqualify_unnest,
474                    transforms.eliminate_distinct_on,
475                    _alias_ordered_group,
476                ]
477            ),
478            exp.SHA2: lambda self, e: self.func(
479                f"SHA256" if e.text("length") == "256" else "SHA512", e.this
480            ),
481            exp.StabilityProperty: lambda self, e: f"DETERMINISTIC"
482            if e.name == "IMMUTABLE"
483            else "NOT DETERMINISTIC",
484            exp.StrToDate: lambda self, e: f"PARSE_DATE({self.format_time(e)}, {self.sql(e, 'this')})",
485            exp.StrToTime: lambda self, e: self.func(
486                "PARSE_TIMESTAMP", self.format_time(e), e.this, e.args.get("zone")
487            ),
488            exp.TimeAdd: _date_add_sql("TIME", "ADD"),
489            exp.TimeSub: _date_add_sql("TIME", "SUB"),
490            exp.TimestampAdd: _date_add_sql("TIMESTAMP", "ADD"),
491            exp.TimestampSub: _date_add_sql("TIMESTAMP", "SUB"),
492            exp.TimeStrToTime: timestrtotime_sql,
493            exp.Trim: lambda self, e: self.func(f"TRIM", e.this, e.expression),
494            exp.TsOrDsAdd: _date_add_sql("DATE", "ADD"),
495            exp.TsOrDsToDate: ts_or_ds_to_date_sql("bigquery"),
496            exp.Unhex: rename_func("FROM_HEX"),
497            exp.Values: _derived_table_values_to_unnest,
498            exp.VariancePop: rename_func("VAR_POP"),
499        }
500
501        TYPE_MAPPING = {
502            **generator.Generator.TYPE_MAPPING,
503            exp.DataType.Type.BIGDECIMAL: "BIGNUMERIC",
504            exp.DataType.Type.BIGINT: "INT64",
505            exp.DataType.Type.BINARY: "BYTES",
506            exp.DataType.Type.BOOLEAN: "BOOL",
507            exp.DataType.Type.CHAR: "STRING",
508            exp.DataType.Type.DECIMAL: "NUMERIC",
509            exp.DataType.Type.DOUBLE: "FLOAT64",
510            exp.DataType.Type.FLOAT: "FLOAT64",
511            exp.DataType.Type.INT: "INT64",
512            exp.DataType.Type.NCHAR: "STRING",
513            exp.DataType.Type.NVARCHAR: "STRING",
514            exp.DataType.Type.SMALLINT: "INT64",
515            exp.DataType.Type.TEXT: "STRING",
516            exp.DataType.Type.TIMESTAMP: "DATETIME",
517            exp.DataType.Type.TIMESTAMPTZ: "TIMESTAMP",
518            exp.DataType.Type.TIMESTAMPLTZ: "TIMESTAMP",
519            exp.DataType.Type.TINYINT: "INT64",
520            exp.DataType.Type.VARBINARY: "BYTES",
521            exp.DataType.Type.VARCHAR: "STRING",
522            exp.DataType.Type.VARIANT: "ANY TYPE",
523        }
524
525        PROPERTIES_LOCATION = {
526            **generator.Generator.PROPERTIES_LOCATION,
527            exp.PartitionedByProperty: exp.Properties.Location.POST_SCHEMA,
528            exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED,
529        }
530
531        # from: https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#reserved_keywords
532        RESERVED_KEYWORDS = {
533            *generator.Generator.RESERVED_KEYWORDS,
534            "all",
535            "and",
536            "any",
537            "array",
538            "as",
539            "asc",
540            "assert_rows_modified",
541            "at",
542            "between",
543            "by",
544            "case",
545            "cast",
546            "collate",
547            "contains",
548            "create",
549            "cross",
550            "cube",
551            "current",
552            "default",
553            "define",
554            "desc",
555            "distinct",
556            "else",
557            "end",
558            "enum",
559            "escape",
560            "except",
561            "exclude",
562            "exists",
563            "extract",
564            "false",
565            "fetch",
566            "following",
567            "for",
568            "from",
569            "full",
570            "group",
571            "grouping",
572            "groups",
573            "hash",
574            "having",
575            "if",
576            "ignore",
577            "in",
578            "inner",
579            "intersect",
580            "interval",
581            "into",
582            "is",
583            "join",
584            "lateral",
585            "left",
586            "like",
587            "limit",
588            "lookup",
589            "merge",
590            "natural",
591            "new",
592            "no",
593            "not",
594            "null",
595            "nulls",
596            "of",
597            "on",
598            "or",
599            "order",
600            "outer",
601            "over",
602            "partition",
603            "preceding",
604            "proto",
605            "qualify",
606            "range",
607            "recursive",
608            "respect",
609            "right",
610            "rollup",
611            "rows",
612            "select",
613            "set",
614            "some",
615            "struct",
616            "tablesample",
617            "then",
618            "to",
619            "treat",
620            "true",
621            "unbounded",
622            "union",
623            "unnest",
624            "using",
625            "when",
626            "where",
627            "window",
628            "with",
629            "within",
630        }
631
632        def attimezone_sql(self, expression: exp.AtTimeZone) -> str:
633            parent = expression.parent
634
635            # BigQuery allows CAST(.. AS {STRING|TIMESTAMP} [FORMAT <fmt> [AT TIME ZONE <tz>]]).
636            # Only the TIMESTAMP one should use the below conversion, when AT TIME ZONE is included.
637            if not isinstance(parent, exp.Cast) or not parent.to.is_type("text"):
638                return self.func(
639                    "TIMESTAMP", self.func("DATETIME", expression.this, expression.args.get("zone"))
640                )
641
642            return super().attimezone_sql(expression)
643
644        def cast_sql(self, expression: exp.Cast, safe_prefix: t.Optional[str] = None) -> str:
645            # https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#json_literals
646            if expression.is_type("json"):
647                return f"JSON {self.sql(expression, 'this')}"
648
649            return super().cast_sql(expression, safe_prefix=safe_prefix)
650
651        def trycast_sql(self, expression: exp.TryCast) -> str:
652            return self.cast_sql(expression, safe_prefix="SAFE_")
653
654        def cte_sql(self, expression: exp.CTE) -> str:
655            if expression.alias_column_names:
656                self.unsupported("Column names in CTE definition are not supported.")
657            return super().cte_sql(expression)
658
659        def array_sql(self, expression: exp.Array) -> str:
660            first_arg = seq_get(expression.expressions, 0)
661            if isinstance(first_arg, exp.Subqueryable):
662                return f"ARRAY{self.wrap(self.sql(first_arg))}"
663
664            return inline_array_sql(self, expression)
665
666        def transaction_sql(self, *_) -> str:
667            return "BEGIN TRANSACTION"
668
669        def commit_sql(self, *_) -> str:
670            return "COMMIT TRANSACTION"
671
672        def rollback_sql(self, *_) -> str:
673            return "ROLLBACK TRANSACTION"
674
675        def in_unnest_op(self, expression: exp.Unnest) -> str:
676            return self.sql(expression)
677
678        def except_op(self, expression: exp.Except) -> str:
679            if not expression.args.get("distinct", False):
680                self.unsupported("EXCEPT without DISTINCT is not supported in BigQuery")
681            return f"EXCEPT{' DISTINCT' if expression.args.get('distinct') else ' ALL'}"
682
683        def intersect_op(self, expression: exp.Intersect) -> str:
684            if not expression.args.get("distinct", False):
685                self.unsupported("INTERSECT without DISTINCT is not supported in BigQuery")
686            return f"INTERSECT{' DISTINCT' if expression.args.get('distinct') else ' ALL'}"
687
688        def with_properties(self, properties: exp.Properties) -> str:
689            return self.properties(properties, prefix=self.seg("OPTIONS"))
690
691        def version_sql(self, expression: exp.Version) -> str:
692            if expression.name == "TIMESTAMP":
693                expression = expression.copy()
694                expression.set("this", "SYSTEM_TIME")
695            return super().version_sql(expression)
UNNEST_COLUMN_ONLY = True
RESOLVES_IDENTIFIERS_AS_UPPERCASE: Optional[bool] = None
NORMALIZE_FUNCTIONS: bool | str = False
TIME_MAPPING: Dict[str, str] = {'%D': '%m/%d/%y'}
FORMAT_MAPPING: Dict[str, str] = {'DD': '%d', 'MM': '%m', 'MON': '%b', 'MONTH': '%B', 'YYYY': '%Y', 'YY': '%y', 'HH': '%I', 'HH12': '%I', 'HH24': '%H', 'MI': '%M', 'SS': '%S', 'SSSSS': '%f', 'TZH': '%z'}
PSEUDOCOLUMNS: Set[str] = {'_PARTITIONDATE', '_PARTITIONTIME'}
@classmethod
def normalize_identifier(cls, expression: ~E) -> ~E:
222    @classmethod
223    def normalize_identifier(cls, expression: E) -> E:
224        # In BigQuery, CTEs aren't case-sensitive, but table names are (by default, at least).
225        # The following check is essentially a heuristic to detect tables based on whether or
226        # not they're qualified.
227        if isinstance(expression, exp.Identifier):
228            parent = expression.parent
229
230            while isinstance(parent, exp.Dot):
231                parent = parent.parent
232
233            if (
234                not isinstance(parent, exp.UserDefinedFunction)
235                and not (isinstance(parent, exp.Table) and parent.db)
236                and not expression.meta.get("is_table")
237            ):
238                expression.set("this", expression.this.lower())
239
240        return expression

Normalizes an unquoted identifier to either lower or upper case, thus essentially making it case-insensitive. If a dialect treats all identifiers as case-insensitive, they will be normalized regardless of being quoted or not.

tokenizer_class = <class 'sqlglot.dialects.bigquery.BigQuery.Tokenizer'>
generator_class = <class 'sqlglot.dialects.bigquery.BigQuery.Generator'>
TIME_TRIE: Dict = {'%': {'D': {0: True}}}
FORMAT_TRIE: Dict = {'D': {'D': {0: True}}, 'M': {'M': {0: True}, 'O': {'N': {0: True, 'T': {'H': {0: True}}}}, 'I': {0: True}}, 'Y': {'Y': {'Y': {'Y': {0: True}}, 0: True}}, 'H': {'H': {0: True, '1': {'2': {0: True}}, '2': {'4': {0: True}}}}, 'S': {'S': {0: True, 'S': {'S': {'S': {0: True}}}}}, 'T': {'Z': {'H': {0: True}}}}
INVERSE_TIME_MAPPING: Dict[str, str] = {'%m/%d/%y': '%D'}
INVERSE_TIME_TRIE: Dict = {'%': {'m': {'/': {'%': {'d': {'/': {'%': {'y': {0: True}}}}}}}}}
QUOTE_START = "'"
QUOTE_END = "'"
IDENTIFIER_START = '`'
IDENTIFIER_END = '`'
BIT_START = None
BIT_END = None
HEX_START = '0x'
HEX_END = ''
BYTE_START = "b'"
BYTE_END = "'"
class BigQuery.Tokenizer(sqlglot.tokens.Tokenizer):
242    class Tokenizer(tokens.Tokenizer):
243        QUOTES = ["'", '"', '"""', "'''"]
244        COMMENTS = ["--", "#", ("/*", "*/")]
245        IDENTIFIERS = ["`"]
246        STRING_ESCAPES = ["\\"]
247
248        HEX_STRINGS = [("0x", ""), ("0X", "")]
249
250        BYTE_STRINGS = [
251            (prefix + q, q) for q in t.cast(t.List[str], QUOTES) for prefix in ("b", "B")
252        ]
253
254        RAW_STRINGS = [
255            (prefix + q, q) for q in t.cast(t.List[str], QUOTES) for prefix in ("r", "R")
256        ]
257
258        KEYWORDS = {
259            **tokens.Tokenizer.KEYWORDS,
260            "ANY TYPE": TokenType.VARIANT,
261            "BEGIN": TokenType.COMMAND,
262            "BEGIN TRANSACTION": TokenType.BEGIN,
263            "CURRENT_DATETIME": TokenType.CURRENT_DATETIME,
264            "BYTES": TokenType.BINARY,
265            "DECLARE": TokenType.COMMAND,
266            "FLOAT64": TokenType.DOUBLE,
267            "INT64": TokenType.BIGINT,
268            "RECORD": TokenType.STRUCT,
269            "TIMESTAMP": TokenType.TIMESTAMPTZ,
270            "NOT DETERMINISTIC": TokenType.VOLATILE,
271            "UNKNOWN": TokenType.NULL,
272            "FOR SYSTEM_TIME": TokenType.TIMESTAMP_SNAPSHOT,
273        }
274        KEYWORDS.pop("DIV")
QUOTES = ["'", '"', '"""', "'''"]
COMMENTS = ['--', '#', ('/*', '*/')]
IDENTIFIERS = ['`']
STRING_ESCAPES = ['\\']
HEX_STRINGS = [('0x', ''), ('0X', '')]
BYTE_STRINGS = [("b'", "'"), ("B'", "'"), ('b"', '"'), ('B"', '"'), ('b"""', '"""'), ('B"""', '"""'), ("b'''", "'''"), ("B'''", "'''")]
RAW_STRINGS = [("r'", "'"), ("R'", "'"), ('r"', '"'), ('R"', '"'), ('r"""', '"""'), ('R"""', '"""'), ("r'''", "'''"), ("R'''", "'''")]
KEYWORDS = {'{%': <TokenType.BLOCK_START: 'BLOCK_START'>, '{%+': <TokenType.BLOCK_START: 'BLOCK_START'>, '{%-': <TokenType.BLOCK_START: 'BLOCK_START'>, '%}': <TokenType.BLOCK_END: 'BLOCK_END'>, '+%}': <TokenType.BLOCK_END: 'BLOCK_END'>, '-%}': <TokenType.BLOCK_END: 'BLOCK_END'>, '{{+': <TokenType.BLOCK_START: 'BLOCK_START'>, '{{-': <TokenType.BLOCK_START: 'BLOCK_START'>, '+}}': <TokenType.BLOCK_END: 'BLOCK_END'>, '-}}': <TokenType.BLOCK_END: 'BLOCK_END'>, '/*+': <TokenType.HINT: 'HINT'>, '==': <TokenType.EQ: 'EQ'>, '::': <TokenType.DCOLON: 'DCOLON'>, '||': <TokenType.DPIPE: 'DPIPE'>, '>=': <TokenType.GTE: 'GTE'>, '<=': <TokenType.LTE: 'LTE'>, '<>': <TokenType.NEQ: 'NEQ'>, '!=': <TokenType.NEQ: 'NEQ'>, '<=>': <TokenType.NULLSAFE_EQ: 'NULLSAFE_EQ'>, '->': <TokenType.ARROW: 'ARROW'>, '->>': <TokenType.DARROW: 'DARROW'>, '=>': <TokenType.FARROW: 'FARROW'>, '#>': <TokenType.HASH_ARROW: 'HASH_ARROW'>, '#>>': <TokenType.DHASH_ARROW: 'DHASH_ARROW'>, '<->': <TokenType.LR_ARROW: 'LR_ARROW'>, '&&': <TokenType.DAMP: 'DAMP'>, '??': <TokenType.DQMARK: 'DQMARK'>, 'ALL': <TokenType.ALL: 'ALL'>, 'ALWAYS': <TokenType.ALWAYS: 'ALWAYS'>, 'AND': <TokenType.AND: 'AND'>, 'ANTI': <TokenType.ANTI: 'ANTI'>, 'ANY': <TokenType.ANY: 'ANY'>, 'ASC': <TokenType.ASC: 'ASC'>, 'AS': <TokenType.ALIAS: 'ALIAS'>, 'ASOF': <TokenType.ASOF: 'ASOF'>, 'AUTOINCREMENT': <TokenType.AUTO_INCREMENT: 'AUTO_INCREMENT'>, 'AUTO_INCREMENT': <TokenType.AUTO_INCREMENT: 'AUTO_INCREMENT'>, 'BEGIN': <TokenType.COMMAND: 'COMMAND'>, 'BETWEEN': <TokenType.BETWEEN: 'BETWEEN'>, 'CACHE': <TokenType.CACHE: 'CACHE'>, 'UNCACHE': <TokenType.UNCACHE: 'UNCACHE'>, 'CASE': <TokenType.CASE: 'CASE'>, 'CHARACTER SET': <TokenType.CHARACTER_SET: 'CHARACTER_SET'>, 'CLUSTER BY': <TokenType.CLUSTER_BY: 'CLUSTER_BY'>, 'COLLATE': <TokenType.COLLATE: 'COLLATE'>, 'COLUMN': <TokenType.COLUMN: 'COLUMN'>, 'COMMIT': <TokenType.COMMIT: 'COMMIT'>, 'CONNECT BY': <TokenType.CONNECT_BY: 'CONNECT_BY'>, 'CONSTRAINT': <TokenType.CONSTRAINT: 'CONSTRAINT'>, 'CREATE': <TokenType.CREATE: 'CREATE'>, 'CROSS': <TokenType.CROSS: 'CROSS'>, 'CUBE': <TokenType.CUBE: 'CUBE'>, 'CURRENT_DATE': <TokenType.CURRENT_DATE: 'CURRENT_DATE'>, 'CURRENT_TIME': <TokenType.CURRENT_TIME: 'CURRENT_TIME'>, 'CURRENT_TIMESTAMP': <TokenType.CURRENT_TIMESTAMP: 'CURRENT_TIMESTAMP'>, 'CURRENT_USER': <TokenType.CURRENT_USER: 'CURRENT_USER'>, 'DATABASE': <TokenType.DATABASE: 'DATABASE'>, 'DEFAULT': <TokenType.DEFAULT: 'DEFAULT'>, 'DELETE': <TokenType.DELETE: 'DELETE'>, 'DESC': <TokenType.DESC: 'DESC'>, 'DESCRIBE': <TokenType.DESCRIBE: 'DESCRIBE'>, 'DISTINCT': <TokenType.DISTINCT: 'DISTINCT'>, 'DISTRIBUTE BY': <TokenType.DISTRIBUTE_BY: 'DISTRIBUTE_BY'>, 'DROP': <TokenType.DROP: 'DROP'>, 'ELSE': <TokenType.ELSE: 'ELSE'>, 'END': <TokenType.END: 'END'>, 'ESCAPE': <TokenType.ESCAPE: 'ESCAPE'>, 'EXCEPT': <TokenType.EXCEPT: 'EXCEPT'>, 'EXECUTE': <TokenType.EXECUTE: 'EXECUTE'>, 'EXISTS': <TokenType.EXISTS: 'EXISTS'>, 'FALSE': <TokenType.FALSE: 'FALSE'>, 'FETCH': <TokenType.FETCH: 'FETCH'>, 'FILTER': <TokenType.FILTER: 'FILTER'>, 'FIRST': <TokenType.FIRST: 'FIRST'>, 'FULL': <TokenType.FULL: 'FULL'>, 'FUNCTION': <TokenType.FUNCTION: 'FUNCTION'>, 'FOR': <TokenType.FOR: 'FOR'>, 'FOREIGN KEY': <TokenType.FOREIGN_KEY: 'FOREIGN_KEY'>, 'FORMAT': <TokenType.FORMAT: 'FORMAT'>, 'FROM': <TokenType.FROM: 'FROM'>, 'GEOGRAPHY': <TokenType.GEOGRAPHY: 'GEOGRAPHY'>, 'GEOMETRY': <TokenType.GEOMETRY: 'GEOMETRY'>, 'GLOB': <TokenType.GLOB: 'GLOB'>, 'GROUP BY': <TokenType.GROUP_BY: 'GROUP_BY'>, 'GROUPING SETS': <TokenType.GROUPING_SETS: 'GROUPING_SETS'>, 'HAVING': <TokenType.HAVING: 'HAVING'>, 'ILIKE': <TokenType.ILIKE: 'ILIKE'>, 'IN': <TokenType.IN: 'IN'>, 'INDEX': <TokenType.INDEX: 'INDEX'>, 'INET': <TokenType.INET: 'INET'>, 'INNER': <TokenType.INNER: 'INNER'>, 'INSERT': <TokenType.INSERT: 'INSERT'>, 'INTERVAL': <TokenType.INTERVAL: 'INTERVAL'>, 'INTERSECT': <TokenType.INTERSECT: 'INTERSECT'>, 'INTO': <TokenType.INTO: 'INTO'>, 'IS': <TokenType.IS: 'IS'>, 'ISNULL': <TokenType.ISNULL: 'ISNULL'>, 'JOIN': <TokenType.JOIN: 'JOIN'>, 'KEEP': <TokenType.KEEP: 'KEEP'>, 'LATERAL': <TokenType.LATERAL: 'LATERAL'>, 'LEFT': <TokenType.LEFT: 'LEFT'>, 'LIKE': <TokenType.LIKE: 'LIKE'>, 'LIMIT': <TokenType.LIMIT: 'LIMIT'>, 'LOAD': <TokenType.LOAD: 'LOAD'>, 'LOCK': <TokenType.LOCK: 'LOCK'>, 'MERGE': <TokenType.MERGE: 'MERGE'>, 'NATURAL': <TokenType.NATURAL: 'NATURAL'>, 'NEXT': <TokenType.NEXT: 'NEXT'>, 'NOT': <TokenType.NOT: 'NOT'>, 'NOTNULL': <TokenType.NOTNULL: 'NOTNULL'>, 'NULL': <TokenType.NULL: 'NULL'>, 'OBJECT': <TokenType.OBJECT: 'OBJECT'>, 'OFFSET': <TokenType.OFFSET: 'OFFSET'>, 'ON': <TokenType.ON: 'ON'>, 'OR': <TokenType.OR: 'OR'>, 'XOR': <TokenType.XOR: 'XOR'>, 'ORDER BY': <TokenType.ORDER_BY: 'ORDER_BY'>, 'ORDINALITY': <TokenType.ORDINALITY: 'ORDINALITY'>, 'OUTER': <TokenType.OUTER: 'OUTER'>, 'OVER': <TokenType.OVER: 'OVER'>, 'OVERLAPS': <TokenType.OVERLAPS: 'OVERLAPS'>, 'OVERWRITE': <TokenType.OVERWRITE: 'OVERWRITE'>, 'PARTITION': <TokenType.PARTITION: 'PARTITION'>, 'PARTITION BY': <TokenType.PARTITION_BY: 'PARTITION_BY'>, 'PARTITIONED BY': <TokenType.PARTITION_BY: 'PARTITION_BY'>, 'PARTITIONED_BY': <TokenType.PARTITION_BY: 'PARTITION_BY'>, 'PERCENT': <TokenType.PERCENT: 'PERCENT'>, 'PIVOT': <TokenType.PIVOT: 'PIVOT'>, 'PRAGMA': <TokenType.PRAGMA: 'PRAGMA'>, 'PRIMARY KEY': <TokenType.PRIMARY_KEY: 'PRIMARY_KEY'>, 'PROCEDURE': <TokenType.PROCEDURE: 'PROCEDURE'>, 'QUALIFY': <TokenType.QUALIFY: 'QUALIFY'>, 'RANGE': <TokenType.RANGE: 'RANGE'>, 'RECURSIVE': <TokenType.RECURSIVE: 'RECURSIVE'>, 'REGEXP': <TokenType.RLIKE: 'RLIKE'>, 'REPLACE': <TokenType.REPLACE: 'REPLACE'>, 'RETURNING': <TokenType.RETURNING: 'RETURNING'>, 'REFERENCES': <TokenType.REFERENCES: 'REFERENCES'>, 'RIGHT': <TokenType.RIGHT: 'RIGHT'>, 'RLIKE': <TokenType.RLIKE: 'RLIKE'>, 'ROLLBACK': <TokenType.ROLLBACK: 'ROLLBACK'>, 'ROLLUP': <TokenType.ROLLUP: 'ROLLUP'>, 'ROW': <TokenType.ROW: 'ROW'>, 'ROWS': <TokenType.ROWS: 'ROWS'>, 'SCHEMA': <TokenType.SCHEMA: 'SCHEMA'>, 'SELECT': <TokenType.SELECT: 'SELECT'>, 'SEMI': <TokenType.SEMI: 'SEMI'>, 'SET': <TokenType.SET: 'SET'>, 'SETTINGS': <TokenType.SETTINGS: 'SETTINGS'>, 'SHOW': <TokenType.SHOW: 'SHOW'>, 'SIMILAR TO': <TokenType.SIMILAR_TO: 'SIMILAR_TO'>, 'SOME': <TokenType.SOME: 'SOME'>, 'SORT BY': <TokenType.SORT_BY: 'SORT_BY'>, 'START WITH': <TokenType.START_WITH: 'START_WITH'>, 'TABLE': <TokenType.TABLE: 'TABLE'>, 'TABLESAMPLE': <TokenType.TABLE_SAMPLE: 'TABLE_SAMPLE'>, 'TEMP': <TokenType.TEMPORARY: 'TEMPORARY'>, 'TEMPORARY': <TokenType.TEMPORARY: 'TEMPORARY'>, 'THEN': <TokenType.THEN: 'THEN'>, 'TRUE': <TokenType.TRUE: 'TRUE'>, 'UNION': <TokenType.UNION: 'UNION'>, 'UNKNOWN': <TokenType.NULL: 'NULL'>, 'UNNEST': <TokenType.UNNEST: 'UNNEST'>, 'UNPIVOT': <TokenType.UNPIVOT: 'UNPIVOT'>, 'UPDATE': <TokenType.UPDATE: 'UPDATE'>, 'USE': <TokenType.USE: 'USE'>, 'USING': <TokenType.USING: 'USING'>, 'UUID': <TokenType.UUID: 'UUID'>, 'VALUES': <TokenType.VALUES: 'VALUES'>, 'VIEW': <TokenType.VIEW: 'VIEW'>, 'VOLATILE': <TokenType.VOLATILE: 'VOLATILE'>, 'WHEN': <TokenType.WHEN: 'WHEN'>, 'WHERE': <TokenType.WHERE: 'WHERE'>, 'WINDOW': <TokenType.WINDOW: 'WINDOW'>, 'WITH': <TokenType.WITH: 'WITH'>, 'APPLY': <TokenType.APPLY: 'APPLY'>, 'ARRAY': <TokenType.ARRAY: 'ARRAY'>, 'BIT': <TokenType.BIT: 'BIT'>, 'BOOL': <TokenType.BOOLEAN: 'BOOLEAN'>, 'BOOLEAN': <TokenType.BOOLEAN: 'BOOLEAN'>, 'BYTE': <TokenType.TINYINT: 'TINYINT'>, 'TINYINT': <TokenType.TINYINT: 'TINYINT'>, 'SHORT': <TokenType.SMALLINT: 'SMALLINT'>, 'SMALLINT': <TokenType.SMALLINT: 'SMALLINT'>, 'INT128': <TokenType.INT128: 'INT128'>, 'INT2': <TokenType.SMALLINT: 'SMALLINT'>, 'INTEGER': <TokenType.INT: 'INT'>, 'INT': <TokenType.INT: 'INT'>, 'INT4': <TokenType.INT: 'INT'>, 'LONG': <TokenType.BIGINT: 'BIGINT'>, 'BIGINT': <TokenType.BIGINT: 'BIGINT'>, 'INT8': <TokenType.BIGINT: 'BIGINT'>, 'DEC': <TokenType.DECIMAL: 'DECIMAL'>, 'DECIMAL': <TokenType.DECIMAL: 'DECIMAL'>, 'BIGDECIMAL': <TokenType.BIGDECIMAL: 'BIGDECIMAL'>, 'BIGNUMERIC': <TokenType.BIGDECIMAL: 'BIGDECIMAL'>, 'MAP': <TokenType.MAP: 'MAP'>, 'NULLABLE': <TokenType.NULLABLE: 'NULLABLE'>, 'NUMBER': <TokenType.DECIMAL: 'DECIMAL'>, 'NUMERIC': <TokenType.DECIMAL: 'DECIMAL'>, 'FIXED': <TokenType.DECIMAL: 'DECIMAL'>, 'REAL': <TokenType.FLOAT: 'FLOAT'>, 'FLOAT': <TokenType.FLOAT: 'FLOAT'>, 'FLOAT4': <TokenType.FLOAT: 'FLOAT'>, 'FLOAT8': <TokenType.DOUBLE: 'DOUBLE'>, 'DOUBLE': <TokenType.DOUBLE: 'DOUBLE'>, 'DOUBLE PRECISION': <TokenType.DOUBLE: 'DOUBLE'>, 'JSON': <TokenType.JSON: 'JSON'>, 'CHAR': <TokenType.CHAR: 'CHAR'>, 'CHARACTER': <TokenType.CHAR: 'CHAR'>, 'NCHAR': <TokenType.NCHAR: 'NCHAR'>, 'VARCHAR': <TokenType.VARCHAR: 'VARCHAR'>, 'VARCHAR2': <TokenType.VARCHAR: 'VARCHAR'>, 'NVARCHAR': <TokenType.NVARCHAR: 'NVARCHAR'>, 'NVARCHAR2': <TokenType.NVARCHAR: 'NVARCHAR'>, 'STR': <TokenType.TEXT: 'TEXT'>, 'STRING': <TokenType.TEXT: 'TEXT'>, 'TEXT': <TokenType.TEXT: 'TEXT'>, 'CLOB': <TokenType.TEXT: 'TEXT'>, 'LONGVARCHAR': <TokenType.TEXT: 'TEXT'>, 'BINARY': <TokenType.BINARY: 'BINARY'>, 'BLOB': <TokenType.VARBINARY: 'VARBINARY'>, 'BYTEA': <TokenType.VARBINARY: 'VARBINARY'>, 'VARBINARY': <TokenType.VARBINARY: 'VARBINARY'>, 'TIME': <TokenType.TIME: 'TIME'>, 'TIMETZ': <TokenType.TIMETZ: 'TIMETZ'>, 'TIMESTAMP': <TokenType.TIMESTAMPTZ: 'TIMESTAMPTZ'>, 'TIMESTAMPTZ': <TokenType.TIMESTAMPTZ: 'TIMESTAMPTZ'>, 'TIMESTAMPLTZ': <TokenType.TIMESTAMPLTZ: 'TIMESTAMPLTZ'>, 'DATE': <TokenType.DATE: 'DATE'>, 'DATETIME': <TokenType.DATETIME: 'DATETIME'>, 'INT4RANGE': <TokenType.INT4RANGE: 'INT4RANGE'>, 'INT4MULTIRANGE': <TokenType.INT4MULTIRANGE: 'INT4MULTIRANGE'>, 'INT8RANGE': <TokenType.INT8RANGE: 'INT8RANGE'>, 'INT8MULTIRANGE': <TokenType.INT8MULTIRANGE: 'INT8MULTIRANGE'>, 'NUMRANGE': <TokenType.NUMRANGE: 'NUMRANGE'>, 'NUMMULTIRANGE': <TokenType.NUMMULTIRANGE: 'NUMMULTIRANGE'>, 'TSRANGE': <TokenType.TSRANGE: 'TSRANGE'>, 'TSMULTIRANGE': <TokenType.TSMULTIRANGE: 'TSMULTIRANGE'>, 'TSTZRANGE': <TokenType.TSTZRANGE: 'TSTZRANGE'>, 'TSTZMULTIRANGE': <TokenType.TSTZMULTIRANGE: 'TSTZMULTIRANGE'>, 'DATERANGE': <TokenType.DATERANGE: 'DATERANGE'>, 'DATEMULTIRANGE': <TokenType.DATEMULTIRANGE: 'DATEMULTIRANGE'>, 'UNIQUE': <TokenType.UNIQUE: 'UNIQUE'>, 'STRUCT': <TokenType.STRUCT: 'STRUCT'>, 'VARIANT': <TokenType.VARIANT: 'VARIANT'>, 'ALTER': <TokenType.ALTER: 'ALTER'>, 'ANALYZE': <TokenType.COMMAND: 'COMMAND'>, 'CALL': <TokenType.COMMAND: 'COMMAND'>, 'COMMENT': <TokenType.COMMENT: 'COMMENT'>, 'COPY': <TokenType.COMMAND: 'COMMAND'>, 'EXPLAIN': <TokenType.COMMAND: 'COMMAND'>, 'GRANT': <TokenType.COMMAND: 'COMMAND'>, 'OPTIMIZE': <TokenType.COMMAND: 'COMMAND'>, 'PREPARE': <TokenType.COMMAND: 'COMMAND'>, 'TRUNCATE': <TokenType.COMMAND: 'COMMAND'>, 'VACUUM': <TokenType.COMMAND: 'COMMAND'>, 'USER-DEFINED': <TokenType.USERDEFINED: 'USERDEFINED'>, 'FOR VERSION': <TokenType.VERSION_SNAPSHOT: 'VERSION_SNAPSHOT'>, 'FOR TIMESTAMP': <TokenType.TIMESTAMP_SNAPSHOT: 'TIMESTAMP_SNAPSHOT'>, 'ANY TYPE': <TokenType.VARIANT: 'VARIANT'>, 'BEGIN TRANSACTION': <TokenType.BEGIN: 'BEGIN'>, 'CURRENT_DATETIME': <TokenType.CURRENT_DATETIME: 'CURRENT_DATETIME'>, 'BYTES': <TokenType.BINARY: 'BINARY'>, 'DECLARE': <TokenType.COMMAND: 'COMMAND'>, 'FLOAT64': <TokenType.DOUBLE: 'DOUBLE'>, 'INT64': <TokenType.BIGINT: 'BIGINT'>, 'RECORD': <TokenType.STRUCT: 'STRUCT'>, 'NOT DETERMINISTIC': <TokenType.VOLATILE: 'VOLATILE'>, 'FOR SYSTEM_TIME': <TokenType.TIMESTAMP_SNAPSHOT: 'TIMESTAMP_SNAPSHOT'>}
class BigQuery.Parser(sqlglot.parser.Parser):
276    class Parser(parser.Parser):
277        PREFIXED_PIVOT_COLUMNS = True
278
279        LOG_BASE_FIRST = False
280        LOG_DEFAULTS_TO_LN = True
281
282        SUPPORTS_USER_DEFINED_TYPES = False
283
284        FUNCTIONS = {
285            **parser.Parser.FUNCTIONS,
286            "DATE": _parse_date,
287            "DATE_ADD": parse_date_delta_with_interval(exp.DateAdd),
288            "DATE_SUB": parse_date_delta_with_interval(exp.DateSub),
289            "DATE_TRUNC": lambda args: exp.DateTrunc(
290                unit=exp.Literal.string(str(seq_get(args, 1))),
291                this=seq_get(args, 0),
292            ),
293            "DATETIME_ADD": parse_date_delta_with_interval(exp.DatetimeAdd),
294            "DATETIME_SUB": parse_date_delta_with_interval(exp.DatetimeSub),
295            "DIV": binary_from_function(exp.IntDiv),
296            "GENERATE_ARRAY": exp.GenerateSeries.from_arg_list,
297            "MD5": exp.MD5Digest.from_arg_list,
298            "TO_HEX": _parse_to_hex,
299            "PARSE_DATE": lambda args: format_time_lambda(exp.StrToDate, "bigquery")(
300                [seq_get(args, 1), seq_get(args, 0)]
301            ),
302            "PARSE_TIMESTAMP": _parse_timestamp,
303            "REGEXP_CONTAINS": exp.RegexpLike.from_arg_list,
304            "REGEXP_EXTRACT": lambda args: exp.RegexpExtract(
305                this=seq_get(args, 0),
306                expression=seq_get(args, 1),
307                position=seq_get(args, 2),
308                occurrence=seq_get(args, 3),
309                group=exp.Literal.number(1)
310                if re.compile(str(seq_get(args, 1))).groups == 1
311                else None,
312            ),
313            "SHA256": lambda args: exp.SHA2(this=seq_get(args, 0), length=exp.Literal.number(256)),
314            "SHA512": lambda args: exp.SHA2(this=seq_get(args, 0), length=exp.Literal.number(512)),
315            "SPLIT": lambda args: exp.Split(
316                # https://cloud.google.com/bigquery/docs/reference/standard-sql/string_functions#split
317                this=seq_get(args, 0),
318                expression=seq_get(args, 1) or exp.Literal.string(","),
319            ),
320            "TIME_ADD": parse_date_delta_with_interval(exp.TimeAdd),
321            "TIME_SUB": parse_date_delta_with_interval(exp.TimeSub),
322            "TIMESTAMP_ADD": parse_date_delta_with_interval(exp.TimestampAdd),
323            "TIMESTAMP_SUB": parse_date_delta_with_interval(exp.TimestampSub),
324            "TO_JSON_STRING": exp.JSONFormat.from_arg_list,
325        }
326
327        FUNCTION_PARSERS = {
328            **parser.Parser.FUNCTION_PARSERS,
329            "ARRAY": lambda self: self.expression(exp.Array, expressions=[self._parse_statement()]),
330        }
331        FUNCTION_PARSERS.pop("TRIM")
332
333        NO_PAREN_FUNCTIONS = {
334            **parser.Parser.NO_PAREN_FUNCTIONS,
335            TokenType.CURRENT_DATETIME: exp.CurrentDatetime,
336        }
337
338        NESTED_TYPE_TOKENS = {
339            *parser.Parser.NESTED_TYPE_TOKENS,
340            TokenType.TABLE,
341        }
342
343        ID_VAR_TOKENS = {
344            *parser.Parser.ID_VAR_TOKENS,
345            TokenType.VALUES,
346        }
347
348        PROPERTY_PARSERS = {
349            **parser.Parser.PROPERTY_PARSERS,
350            "NOT DETERMINISTIC": lambda self: self.expression(
351                exp.StabilityProperty, this=exp.Literal.string("VOLATILE")
352            ),
353            "OPTIONS": lambda self: self._parse_with_property(),
354        }
355
356        CONSTRAINT_PARSERS = {
357            **parser.Parser.CONSTRAINT_PARSERS,
358            "OPTIONS": lambda self: exp.Properties(expressions=self._parse_with_property()),
359        }
360
361        def _parse_table_part(self, schema: bool = False) -> t.Optional[exp.Expression]:
362            this = super()._parse_table_part(schema=schema) or self._parse_number()
363
364            # https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#table_names
365            if isinstance(this, exp.Identifier):
366                table_name = this.name
367                while self._match(TokenType.DASH, advance=False) and self._next:
368                    self._advance(2)
369                    table_name += f"-{self._prev.text}"
370
371                this = exp.Identifier(this=table_name, quoted=this.args.get("quoted"))
372            elif isinstance(this, exp.Literal):
373                table_name = this.name
374
375                if (
376                    self._curr
377                    and self._prev.end == self._curr.start - 1
378                    and self._parse_var(any_token=True)
379                ):
380                    table_name += self._prev.text
381
382                this = exp.Identifier(this=table_name, quoted=True)
383
384            return this
385
386        def _parse_table_parts(self, schema: bool = False) -> exp.Table:
387            table = super()._parse_table_parts(schema=schema)
388            if isinstance(table.this, exp.Identifier) and "." in table.name:
389                catalog, db, this, *rest = (
390                    t.cast(t.Optional[exp.Expression], exp.to_identifier(x))
391                    for x in split_num_words(table.name, ".", 3)
392                )
393
394                if rest and this:
395                    this = exp.Dot.build(t.cast(t.List[exp.Expression], [this, *rest]))
396
397                table = exp.Table(this=this, db=db, catalog=catalog)
398
399            return table
400
401        def _parse_json_object(self) -> exp.JSONObject:
402            json_object = super()._parse_json_object()
403            array_kv_pair = seq_get(json_object.expressions, 0)
404
405            # Converts BQ's "signature 2" of JSON_OBJECT into SQLGlot's canonical representation
406            # https://cloud.google.com/bigquery/docs/reference/standard-sql/json_functions#json_object_signature2
407            if (
408                array_kv_pair
409                and isinstance(array_kv_pair.this, exp.Array)
410                and isinstance(array_kv_pair.expression, exp.Array)
411            ):
412                keys = array_kv_pair.this.expressions
413                values = array_kv_pair.expression.expressions
414
415                json_object.set(
416                    "expressions",
417                    [exp.JSONKeyValue(this=k, expression=v) for k, v in zip(keys, values)],
418                )
419
420            return json_object

Parser consumes a list of tokens produced by the Tokenizer and produces a parsed syntax tree.

Arguments:
  • error_level: The desired error level. Default: ErrorLevel.IMMEDIATE
  • error_message_context: Determines the amount of context to capture from a query string when displaying the error message (in number of characters). Default: 100
  • max_errors: Maximum number of error messages to include in a raised ParseError. This is only relevant if error_level is ErrorLevel.RAISE. Default: 3
PREFIXED_PIVOT_COLUMNS = True
LOG_BASE_FIRST = False
LOG_DEFAULTS_TO_LN = True
SUPPORTS_USER_DEFINED_TYPES = False
FUNCTIONS = {'ABS': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Abs'>>, 'ANY_VALUE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.AnyValue'>>, 'APPROX_DISTINCT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ApproxDistinct'>>, 'APPROX_COUNT_DISTINCT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ApproxDistinct'>>, 'APPROX_QUANTILE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ApproxQuantile'>>, 'ARRAY': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Array'>>, 'ARRAY_AGG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayAgg'>>, 'ARRAY_ALL': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayAll'>>, 'ARRAY_ANY': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayAny'>>, 'ARRAY_CONCAT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayConcat'>>, 'ARRAY_CAT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayConcat'>>, 'ARRAY_CONTAINS': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayContains'>>, 'FILTER': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayFilter'>>, 'ARRAY_FILTER': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayFilter'>>, 'ARRAY_JOIN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayJoin'>>, 'ARRAY_SIZE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArraySize'>>, 'ARRAY_SORT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArraySort'>>, 'ARRAY_SUM': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArraySum'>>, 'ARRAY_UNION_AGG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayUnionAgg'>>, 'AVG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Avg'>>, 'CASE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Case'>>, 'CAST': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Cast'>>, 'CAST_TO_STR_TYPE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CastToStrType'>>, 'CEIL': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Ceil'>>, 'CEILING': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Ceil'>>, 'COALESCE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Coalesce'>>, 'IFNULL': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Coalesce'>>, 'NVL': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Coalesce'>>, 'CONCAT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Concat'>>, 'CONCAT_WS': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ConcatWs'>>, 'COUNT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Count'>>, 'COUNT_IF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CountIf'>>, 'CURRENT_DATE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CurrentDate'>>, 'CURRENT_DATETIME': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CurrentDatetime'>>, 'CURRENT_TIME': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CurrentTime'>>, 'CURRENT_TIMESTAMP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CurrentTimestamp'>>, 'CURRENT_USER': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CurrentUser'>>, 'DATE': <function _parse_date>, 'DATE_ADD': <function parse_date_delta_with_interval.<locals>.func>, 'DATEDIFF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DateDiff'>>, 'DATE_DIFF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DateDiff'>>, 'DATEFROMPARTS': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DateFromParts'>>, 'DATE_STR_TO_DATE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DateStrToDate'>>, 'DATE_SUB': <function parse_date_delta_with_interval.<locals>.func>, 'DATE_TO_DATE_STR': <function Parser.<lambda>>, 'DATE_TO_DI': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DateToDi'>>, 'DATE_TRUNC': <function BigQuery.Parser.<lambda>>, 'DATETIME_ADD': <function parse_date_delta_with_interval.<locals>.func>, 'DATETIME_DIFF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DatetimeDiff'>>, 'DATETIME_SUB': <function parse_date_delta_with_interval.<locals>.func>, 'DATETIME_TRUNC': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DatetimeTrunc'>>, 'DAY': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Day'>>, 'DAY_OF_MONTH': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DayOfMonth'>>, 'DAYOFMONTH': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DayOfMonth'>>, 'DAY_OF_WEEK': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DayOfWeek'>>, 'DAYOFWEEK': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DayOfWeek'>>, 'DAY_OF_YEAR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DayOfYear'>>, 'DAYOFYEAR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DayOfYear'>>, 'DECODE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Decode'>>, 'DI_TO_DATE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DiToDate'>>, 'ENCODE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Encode'>>, 'EXP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Exp'>>, 'EXPLODE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Explode'>>, 'EXTRACT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Extract'>>, 'FIRST': <bound method Func.from_arg_list of <class 'sqlglot.expressions.First'>>, 'FLOOR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Floor'>>, 'FROM_BASE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.FromBase'>>, 'FROM_BASE64': <bound method Func.from_arg_list of <class 'sqlglot.expressions.FromBase64'>>, 'GENERATE_SERIES': <bound method Func.from_arg_list of <class 'sqlglot.expressions.GenerateSeries'>>, 'GREATEST': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Greatest'>>, 'GROUP_CONCAT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.GroupConcat'>>, 'HEX': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Hex'>>, 'HLL': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Hll'>>, 'IF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.If'>>, 'INITCAP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Initcap'>>, 'IS_NAN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.IsNan'>>, 'ISNAN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.IsNan'>>, 'JSON_ARRAY_CONTAINS': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONArrayContains'>>, 'JSONB_EXTRACT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONBExtract'>>, 'JSONB_EXTRACT_SCALAR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONBExtractScalar'>>, 'JSON_EXTRACT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONExtract'>>, 'JSON_EXTRACT_SCALAR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONExtractScalar'>>, 'JSON_FORMAT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONFormat'>>, 'J_S_O_N_OBJECT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONObject'>>, 'LAST': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Last'>>, 'LAST_DATE_OF_MONTH': <bound method Func.from_arg_list of <class 'sqlglot.expressions.LastDateOfMonth'>>, 'LEAST': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Least'>>, 'LEFT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Left'>>, 'LENGTH': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Length'>>, 'LEN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Length'>>, 'LEVENSHTEIN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Levenshtein'>>, 'LN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Ln'>>, 'LOG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Log'>>, 'LOG10': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Log10'>>, 'LOG2': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Log2'>>, 'LOGICAL_AND': <bound method Func.from_arg_list of <class 'sqlglot.expressions.LogicalAnd'>>, 'BOOL_AND': <bound method Func.from_arg_list of <class 'sqlglot.expressions.LogicalAnd'>>, 'BOOLAND_AGG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.LogicalAnd'>>, 'LOGICAL_OR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.LogicalOr'>>, 'BOOL_OR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.LogicalOr'>>, 'BOOLOR_AGG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.LogicalOr'>>, 'LOWER': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Lower'>>, 'LCASE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Lower'>>, 'MD5': <bound method Func.from_arg_list of <class 'sqlglot.expressions.MD5Digest'>>, 'MD5_DIGEST': <bound method Func.from_arg_list of <class 'sqlglot.expressions.MD5Digest'>>, 'MAP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Map'>>, 'MAP_FROM_ENTRIES': <bound method Func.from_arg_list of <class 'sqlglot.expressions.MapFromEntries'>>, 'MATCH_AGAINST': <bound method Func.from_arg_list of <class 'sqlglot.expressions.MatchAgainst'>>, 'MAX': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Max'>>, 'MIN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Min'>>, 'MONTH': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Month'>>, 'MONTHS_BETWEEN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.MonthsBetween'>>, 'NEXT_VALUE_FOR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.NextValueFor'>>, 'NUMBER_TO_STR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.NumberToStr'>>, 'NVL2': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Nvl2'>>, 'OPEN_J_S_O_N': <bound method Func.from_arg_list of <class 'sqlglot.expressions.OpenJSON'>>, 'PARAMETERIZED_AGG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ParameterizedAgg'>>, 'PERCENTILE_CONT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.PercentileCont'>>, 'PERCENTILE_DISC': <bound method Func.from_arg_list of <class 'sqlglot.expressions.PercentileDisc'>>, 'POSEXPLODE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Posexplode'>>, 'POWER': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Pow'>>, 'POW': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Pow'>>, 'QUANTILE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Quantile'>>, 'RANGE_N': <bound method Func.from_arg_list of <class 'sqlglot.expressions.RangeN'>>, 'READ_CSV': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ReadCSV'>>, 'REDUCE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Reduce'>>, 'REGEXP_EXTRACT': <function BigQuery.Parser.<lambda>>, 'REGEXP_I_LIKE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.RegexpILike'>>, 'REGEXP_LIKE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.RegexpLike'>>, 'REGEXP_REPLACE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.RegexpReplace'>>, 'REGEXP_SPLIT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.RegexpSplit'>>, 'REPEAT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Repeat'>>, 'RIGHT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Right'>>, 'ROUND': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Round'>>, 'ROW_NUMBER': <bound method Func.from_arg_list of <class 'sqlglot.expressions.RowNumber'>>, 'SHA': <bound method Func.from_arg_list of <class 'sqlglot.expressions.SHA'>>, 'SHA1': <bound method Func.from_arg_list of <class 'sqlglot.expressions.SHA'>>, 'SHA2': <bound method Func.from_arg_list of <class 'sqlglot.expressions.SHA2'>>, 'SAFE_CONCAT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.SafeConcat'>>, 'SAFE_DIVIDE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.SafeDivide'>>, 'SET_AGG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.SetAgg'>>, 'SORT_ARRAY': <bound method Func.from_arg_list of <class 'sqlglot.expressions.SortArray'>>, 'SPLIT': <function BigQuery.Parser.<lambda>>, 'SQRT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Sqrt'>>, 'STANDARD_HASH': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StandardHash'>>, 'STAR_MAP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StarMap'>>, 'STARTS_WITH': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StartsWith'>>, 'STARTSWITH': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StartsWith'>>, 'STDDEV': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Stddev'>>, 'STDDEV_POP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StddevPop'>>, 'STDDEV_SAMP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StddevSamp'>>, 'STR_POSITION': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StrPosition'>>, 'STR_TO_DATE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StrToDate'>>, 'STR_TO_MAP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StrToMap'>>, 'STR_TO_TIME': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StrToTime'>>, 'STR_TO_UNIX': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StrToUnix'>>, 'STRUCT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Struct'>>, 'STRUCT_EXTRACT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StructExtract'>>, 'STUFF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Stuff'>>, 'INSERT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Stuff'>>, 'SUBSTRING': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Substring'>>, 'SUM': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Sum'>>, 'TIME_ADD': <function parse_date_delta_with_interval.<locals>.func>, 'TIME_DIFF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimeDiff'>>, 'TIME_STR_TO_DATE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimeStrToDate'>>, 'TIME_STR_TO_TIME': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimeStrToTime'>>, 'TIME_STR_TO_UNIX': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimeStrToUnix'>>, 'TIME_SUB': <function parse_date_delta_with_interval.<locals>.func>, 'TIME_TO_STR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimeToStr'>>, 'TIME_TO_TIME_STR': <function Parser.<lambda>>, 'TIME_TO_UNIX': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimeToUnix'>>, 'TIME_TRUNC': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimeTrunc'>>, 'TIMESTAMP_ADD': <function parse_date_delta_with_interval.<locals>.func>, 'TIMESTAMP_DIFF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimestampDiff'>>, 'TIMESTAMP_SUB': <function parse_date_delta_with_interval.<locals>.func>, 'TIMESTAMP_TRUNC': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimestampTrunc'>>, 'TO_BASE64': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ToBase64'>>, 'TO_CHAR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ToChar'>>, 'TRANSFORM': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Transform'>>, 'TRIM': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Trim'>>, 'TRY_CAST': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TryCast'>>, 'TS_OR_DI_TO_DI': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TsOrDiToDi'>>, 'TS_OR_DS_ADD': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TsOrDsAdd'>>, 'TS_OR_DS_TO_DATE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TsOrDsToDate'>>, 'TS_OR_DS_TO_DATE_STR': <function Parser.<lambda>>, 'UNHEX': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Unhex'>>, 'UNIX_TO_STR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.UnixToStr'>>, 'UNIX_TO_TIME': <bound method Func.from_arg_list of <class 'sqlglot.expressions.UnixToTime'>>, 'UNIX_TO_TIME_STR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.UnixToTimeStr'>>, 'UPPER': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Upper'>>, 'UCASE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Upper'>>, 'VAR_MAP': <function parse_var_map>, 'VARIANCE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Variance'>>, 'VARIANCE_SAMP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Variance'>>, 'VAR_SAMP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Variance'>>, 'VARIANCE_POP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.VariancePop'>>, 'VAR_POP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.VariancePop'>>, 'WEEK': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Week'>>, 'WEEK_OF_YEAR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.WeekOfYear'>>, 'WEEKOFYEAR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.WeekOfYear'>>, 'WHEN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.When'>>, 'X_M_L_TABLE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.XMLTable'>>, 'XOR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Xor'>>, 'YEAR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Year'>>, 'GLOB': <function Parser.<lambda>>, 'LIKE': <function parse_like>, 'DIV': <function binary_from_function.<locals>.<lambda>>, 'GENERATE_ARRAY': <bound method Func.from_arg_list of <class 'sqlglot.expressions.GenerateSeries'>>, 'TO_HEX': <function _parse_to_hex>, 'PARSE_DATE': <function BigQuery.Parser.<lambda>>, 'PARSE_TIMESTAMP': <function _parse_timestamp>, 'REGEXP_CONTAINS': <bound method Func.from_arg_list of <class 'sqlglot.expressions.RegexpLike'>>, 'SHA256': <function BigQuery.Parser.<lambda>>, 'SHA512': <function BigQuery.Parser.<lambda>>, 'TO_JSON_STRING': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONFormat'>>}
FUNCTION_PARSERS = {'ANY_VALUE': <function Parser.<lambda>>, 'CAST': <function Parser.<lambda>>, 'CONCAT': <function Parser.<lambda>>, 'CONVERT': <function Parser.<lambda>>, 'DECODE': <function Parser.<lambda>>, 'EXTRACT': <function Parser.<lambda>>, 'JSON_OBJECT': <function Parser.<lambda>>, 'LOG': <function Parser.<lambda>>, 'MATCH': <function Parser.<lambda>>, 'OPENJSON': <function Parser.<lambda>>, 'POSITION': <function Parser.<lambda>>, 'SAFE_CAST': <function Parser.<lambda>>, 'STRING_AGG': <function Parser.<lambda>>, 'SUBSTRING': <function Parser.<lambda>>, 'TRY_CAST': <function Parser.<lambda>>, 'TRY_CONVERT': <function Parser.<lambda>>, 'ARRAY': <function BigQuery.Parser.<lambda>>}
NO_PAREN_FUNCTIONS = {<TokenType.CURRENT_DATE: 'CURRENT_DATE'>: <class 'sqlglot.expressions.CurrentDate'>, <TokenType.CURRENT_DATETIME: 'CURRENT_DATETIME'>: <class 'sqlglot.expressions.CurrentDatetime'>, <TokenType.CURRENT_TIME: 'CURRENT_TIME'>: <class 'sqlglot.expressions.CurrentTime'>, <TokenType.CURRENT_TIMESTAMP: 'CURRENT_TIMESTAMP'>: <class 'sqlglot.expressions.CurrentTimestamp'>, <TokenType.CURRENT_USER: 'CURRENT_USER'>: <class 'sqlglot.expressions.CurrentUser'>}
NESTED_TYPE_TOKENS = {<TokenType.TABLE: 'TABLE'>, <TokenType.ARRAY: 'ARRAY'>, <TokenType.NESTED: 'NESTED'>, <TokenType.NULLABLE: 'NULLABLE'>, <TokenType.LOWCARDINALITY: 'LOWCARDINALITY'>, <TokenType.STRUCT: 'STRUCT'>, <TokenType.MAP: 'MAP'>}
ID_VAR_TOKENS = {<TokenType.UBIGINT: 'UBIGINT'>, <TokenType.BOOLEAN: 'BOOLEAN'>, <TokenType.NULLABLE: 'NULLABLE'>, <TokenType.OBJECT_IDENTIFIER: 'OBJECT_IDENTIFIER'>, <TokenType.STRUCT: 'STRUCT'>, <TokenType.ROW: 'ROW'>, <TokenType.TIMESTAMPTZ: 'TIMESTAMPTZ'>, <TokenType.DATE: 'DATE'>, <TokenType.ENUM: 'ENUM'>, <TokenType.COLLATE: 'COLLATE'>, <TokenType.MERGE: 'MERGE'>, <TokenType.XML: 'XML'>, <TokenType.SMALLMONEY: 'SMALLMONEY'>, <TokenType.IPPREFIX: 'IPPREFIX'>, <TokenType.GEOMETRY: 'GEOMETRY'>, <TokenType.SMALLSERIAL: 'SMALLSERIAL'>, <TokenType.TEXT: 'TEXT'>, <TokenType.NESTED: 'NESTED'>, <TokenType.TIMETZ: 'TIMETZ'>, <TokenType.APPLY: 'APPLY'>, <TokenType.VIEW: 'VIEW'>, <TokenType.ANY: 'ANY'>, <TokenType.FIXEDSTRING: 'FIXEDSTRING'>, <TokenType.LONGBLOB: 'LONGBLOB'>, <TokenType.TSTZMULTIRANGE: 'TSTZMULTIRANGE'>, <TokenType.TEMPORARY: 'TEMPORARY'>, <TokenType.TRUE: 'TRUE'>, <TokenType.DATABASE: 'DATABASE'>, <TokenType.UNKNOWN: 'UNKNOWN'>, <TokenType.VOLATILE: 'VOLATILE'>, <TokenType.DESCRIBE: 'DESCRIBE'>, <TokenType.COMMIT: 'COMMIT'>, <TokenType.INT4MULTIRANGE: 'INT4MULTIRANGE'>, <TokenType.CURRENT_TIME: 'CURRENT_TIME'>, <TokenType.FLOAT: 'FLOAT'>, <TokenType.BINARY: 'BINARY'>, <TokenType.PSEUDO_TYPE: 'PSEUDO_TYPE'>, <TokenType.BEGIN: 'BEGIN'>, <TokenType.FUNCTION: 'FUNCTION'>, <TokenType.BIT: 'BIT'>, <TokenType.NATURAL: 'NATURAL'>, <TokenType.ROWVERSION: 'ROWVERSION'>, <TokenType.INET: 'INET'>, <TokenType.BIGSERIAL: 'BIGSERIAL'>, <TokenType.DATEMULTIRANGE: 'DATEMULTIRANGE'>, <TokenType.BIGINT: 'BIGINT'>, <TokenType.PERCENT: 'PERCENT'>, <TokenType.USERDEFINED: 'USERDEFINED'>, <TokenType.KEEP: 'KEEP'>, <TokenType.LOAD: 'LOAD'>, <TokenType.ORDINALITY: 'ORDINALITY'>, <TokenType.MEDIUMINT: 'MEDIUMINT'>, <TokenType.DELETE: 'DELETE'>, <TokenType.UINT: 'UINT'>, <TokenType.FIRST: 'FIRST'>, <TokenType.SOME: 'SOME'>, <TokenType.MEDIUMTEXT: 'MEDIUMTEXT'>, <TokenType.WINDOW: 'WINDOW'>, <TokenType.SEMI: 'SEMI'>, <TokenType.UTINYINT: 'UTINYINT'>, <TokenType.DIV: 'DIV'>, <TokenType.COMMENT: 'COMMENT'>, <TokenType.CONSTRAINT: 'CONSTRAINT'>, <TokenType.COMMAND: 'COMMAND'>, <TokenType.YEAR: 'YEAR'>, <TokenType.UNPIVOT: 'UNPIVOT'>, <TokenType.CHAR: 'CHAR'>, <TokenType.VARBINARY: 'VARBINARY'>, <TokenType.CURRENT_DATETIME: 'CURRENT_DATETIME'>, <TokenType.ROWS: 'ROWS'>, <TokenType.INT4RANGE: 'INT4RANGE'>, <TokenType.JSONB: 'JSONB'>, <TokenType.FULL: 'FULL'>, <TokenType.NVARCHAR: 'NVARCHAR'>, <TokenType.PRAGMA: 'PRAGMA'>, <TokenType.ESCAPE: 'ESCAPE'>, <TokenType.TSRANGE: 'TSRANGE'>, <TokenType.ISNULL: 'ISNULL'>, <TokenType.INT8RANGE: 'INT8RANGE'>, <TokenType.CURRENT_TIMESTAMP: 'CURRENT_TIMESTAMP'>, <TokenType.COLUMN: 'COLUMN'>, <TokenType.PARTITION: 'PARTITION'>, <TokenType.FALSE: 'FALSE'>, <TokenType.NCHAR: 'NCHAR'>, <TokenType.TSTZRANGE: 'TSTZRANGE'>, <TokenType.DEFAULT: 'DEFAULT'>, <TokenType.RANGE: 'RANGE'>, <TokenType.USMALLINT: 'USMALLINT'>, <TokenType.DATERANGE: 'DATERANGE'>, <TokenType.NUMMULTIRANGE: 'NUMMULTIRANGE'>, <TokenType.PROCEDURE: 'PROCEDURE'>, <TokenType.UINT128: 'UINT128'>, <TokenType.UNIQUE: 'UNIQUE'>, <TokenType.HLLSKETCH: 'HLLSKETCH'>, <TokenType.IS: 'IS'>, <TokenType.SHOW: 'SHOW'>, <TokenType.INDEX: 'INDEX'>, <TokenType.UINT256: 'UINT256'>, <TokenType.SERIAL: 'SERIAL'>, <TokenType.SCHEMA: 'SCHEMA'>, <TokenType.DOUBLE: 'DOUBLE'>, <TokenType.DICTIONARY: 'DICTIONARY'>, <TokenType.VAR: 'VAR'>, <TokenType.OFFSET: 'OFFSET'>, <TokenType.RIGHT: 'RIGHT'>, <TokenType.TIME: 'TIME'>, <TokenType.ANTI: 'ANTI'>, <TokenType.CURRENT_DATE: 'CURRENT_DATE'>, <TokenType.INT: 'INT'>, <TokenType.VARIANT: 'VARIANT'>, <TokenType.SUPER: 'SUPER'>, <TokenType.HSTORE: 'HSTORE'>, <TokenType.TIMESTAMPLTZ: 'TIMESTAMPLTZ'>, <TokenType.VARCHAR: 'VARCHAR'>, <TokenType.CASE: 'CASE'>, <TokenType.NUMRANGE: 'NUMRANGE'>, <TokenType.SMALLINT: 'SMALLINT'>, <TokenType.OVERWRITE: 'OVERWRITE'>, <TokenType.CURRENT_USER: 'CURRENT_USER'>, <TokenType.INT256: 'INT256'>, <TokenType.FORMAT: 'FORMAT'>, <TokenType.VALUES: 'VALUES'>, <TokenType.AUTO_INCREMENT: 'AUTO_INCREMENT'>, <TokenType.END: 'END'>, <TokenType.LOWCARDINALITY: 'LOWCARDINALITY'>, <TokenType.INT128: 'INT128'>, <TokenType.PIVOT: 'PIVOT'>, <TokenType.NEXT: 'NEXT'>, <TokenType.TINYINT: 'TINYINT'>, <TokenType.REFERENCES: 'REFERENCES'>, <TokenType.INTERVAL: 'INTERVAL'>, <TokenType.UUID: 'UUID'>, <TokenType.LEFT: 'LEFT'>, <TokenType.UNIQUEIDENTIFIER: 'UNIQUEIDENTIFIER'>, <TokenType.ARRAY: 'ARRAY'>, <TokenType.DECIMAL: 'DECIMAL'>, <TokenType.LONGTEXT: 'LONGTEXT'>, <TokenType.TSMULTIRANGE: 'TSMULTIRANGE'>, <TokenType.TOP: 'TOP'>, <TokenType.DESC: 'DESC'>, <TokenType.ASC: 'ASC'>, <TokenType.IMAGE: 'IMAGE'>, <TokenType.NULL: 'NULL'>, <TokenType.IPADDRESS: 'IPADDRESS'>, <TokenType.FILTER: 'FILTER'>, <TokenType.GEOGRAPHY: 'GEOGRAPHY'>, <TokenType.ALL: 'ALL'>, <TokenType.DATETIME: 'DATETIME'>, <TokenType.MONEY: 'MONEY'>, <TokenType.SET: 'SET'>, <TokenType.SETTINGS: 'SETTINGS'>, <TokenType.EXISTS: 'EXISTS'>, <TokenType.EXECUTE: 'EXECUTE'>, <TokenType.UPDATE: 'UPDATE'>, <TokenType.INT8MULTIRANGE: 'INT8MULTIRANGE'>, <TokenType.TABLE: 'TABLE'>, <TokenType.ENUM16: 'ENUM16'>, <TokenType.BIGDECIMAL: 'BIGDECIMAL'>, <TokenType.MAP: 'MAP'>, <TokenType.TIMESTAMP: 'TIMESTAMP'>, <TokenType.CACHE: 'CACHE'>, <TokenType.JSON: 'JSON'>, <TokenType.DATETIME64: 'DATETIME64'>, <TokenType.MEDIUMBLOB: 'MEDIUMBLOB'>, <TokenType.OBJECT: 'OBJECT'>, <TokenType.ENUM8: 'ENUM8'>}
PROPERTY_PARSERS = {'ALGORITHM': <function Parser.<lambda>>, 'AUTO_INCREMENT': <function Parser.<lambda>>, 'BLOCKCOMPRESSION': <function Parser.<lambda>>, 'CHARACTER SET': <function Parser.<lambda>>, 'CHECKSUM': <function Parser.<lambda>>, 'CLUSTER BY': <function Parser.<lambda>>, 'CLUSTERED': <function Parser.<lambda>>, 'COLLATE': <function Parser.<lambda>>, 'COMMENT': <function Parser.<lambda>>, 'COPY': <function Parser.<lambda>>, 'DATABLOCKSIZE': <function Parser.<lambda>>, 'DEFINER': <function Parser.<lambda>>, 'DETERMINISTIC': <function Parser.<lambda>>, 'DISTKEY': <function Parser.<lambda>>, 'DISTSTYLE': <function Parser.<lambda>>, 'ENGINE': <function Parser.<lambda>>, 'EXECUTE': <function Parser.<lambda>>, 'EXTERNAL': <function Parser.<lambda>>, 'FALLBACK': <function Parser.<lambda>>, 'FORMAT': <function Parser.<lambda>>, 'FREESPACE': <function Parser.<lambda>>, 'HEAP': <function Parser.<lambda>>, 'IMMUTABLE': <function Parser.<lambda>>, 'JOURNAL': <function Parser.<lambda>>, 'LANGUAGE': <function Parser.<lambda>>, 'LAYOUT': <function Parser.<lambda>>, 'LIFETIME': <function Parser.<lambda>>, 'LIKE': <function Parser.<lambda>>, 'LOCATION': <function Parser.<lambda>>, 'LOCK': <function Parser.<lambda>>, 'LOCKING': <function Parser.<lambda>>, 'LOG': <function Parser.<lambda>>, 'MATERIALIZED': <function Parser.<lambda>>, 'MERGEBLOCKRATIO': <function Parser.<lambda>>, 'MULTISET': <function Parser.<lambda>>, 'NO': <function Parser.<lambda>>, 'ON': <function Parser.<lambda>>, 'ORDER BY': <function Parser.<lambda>>, 'PARTITION BY': <function Parser.<lambda>>, 'PARTITIONED BY': <function Parser.<lambda>>, 'PARTITIONED_BY': <function Parser.<lambda>>, 'PRIMARY KEY': <function Parser.<lambda>>, 'RANGE': <function Parser.<lambda>>, 'RETURNS': <function Parser.<lambda>>, 'ROW': <function Parser.<lambda>>, 'ROW_FORMAT': <function Parser.<lambda>>, 'SET': <function Parser.<lambda>>, 'SETTINGS': <function Parser.<lambda>>, 'SORTKEY': <function Parser.<lambda>>, 'SOURCE': <function Parser.<lambda>>, 'STABLE': <function Parser.<lambda>>, 'STORED': <function Parser.<lambda>>, 'TBLPROPERTIES': <function Parser.<lambda>>, 'TEMP': <function Parser.<lambda>>, 'TEMPORARY': <function Parser.<lambda>>, 'TO': <function Parser.<lambda>>, 'TRANSIENT': <function Parser.<lambda>>, 'TTL': <function Parser.<lambda>>, 'USING': <function Parser.<lambda>>, 'VOLATILE': <function Parser.<lambda>>, 'WITH': <function Parser.<lambda>>, 'NOT DETERMINISTIC': <function BigQuery.Parser.<lambda>>, 'OPTIONS': <function BigQuery.Parser.<lambda>>}
CONSTRAINT_PARSERS = {'AUTOINCREMENT': <function Parser.<lambda>>, 'AUTO_INCREMENT': <function Parser.<lambda>>, 'CASESPECIFIC': <function Parser.<lambda>>, 'CHARACTER SET': <function Parser.<lambda>>, 'CHECK': <function Parser.<lambda>>, 'COLLATE': <function Parser.<lambda>>, 'COMMENT': <function Parser.<lambda>>, 'COMPRESS': <function Parser.<lambda>>, 'CLUSTERED': <function Parser.<lambda>>, 'NONCLUSTERED': <function Parser.<lambda>>, 'DEFAULT': <function Parser.<lambda>>, 'ENCODE': <function Parser.<lambda>>, 'FOREIGN KEY': <function Parser.<lambda>>, 'FORMAT': <function Parser.<lambda>>, 'GENERATED': <function Parser.<lambda>>, 'IDENTITY': <function Parser.<lambda>>, 'INLINE': <function Parser.<lambda>>, 'LIKE': <function Parser.<lambda>>, 'NOT': <function Parser.<lambda>>, 'NULL': <function Parser.<lambda>>, 'ON': <function Parser.<lambda>>, 'PATH': <function Parser.<lambda>>, 'PRIMARY KEY': <function Parser.<lambda>>, 'REFERENCES': <function Parser.<lambda>>, 'TITLE': <function Parser.<lambda>>, 'TTL': <function Parser.<lambda>>, 'UNIQUE': <function Parser.<lambda>>, 'UPPERCASE': <function Parser.<lambda>>, 'WITH': <function Parser.<lambda>>, 'OPTIONS': <function BigQuery.Parser.<lambda>>}
UNNEST_COLUMN_ONLY: bool = True
NORMALIZE_FUNCTIONS = False
SHOW_TRIE: Dict = {}
SET_TRIE: Dict = {'GLOBAL': {0: True}, 'LOCAL': {0: True}, 'SESSION': {0: True}, 'TRANSACTION': {0: True}}
FORMAT_MAPPING: Dict[str, str] = {'DD': '%d', 'MM': '%m', 'MON': '%b', 'MONTH': '%B', 'YYYY': '%Y', 'YY': '%y', 'HH': '%I', 'HH12': '%I', 'HH24': '%H', 'MI': '%M', 'SS': '%S', 'SSSSS': '%f', 'TZH': '%z'}
FORMAT_TRIE: Dict = {'D': {'D': {0: True}}, 'M': {'M': {0: True}, 'O': {'N': {0: True, 'T': {'H': {0: True}}}}, 'I': {0: True}}, 'Y': {'Y': {'Y': {'Y': {0: True}}, 0: True}}, 'H': {'H': {0: True, '1': {'2': {0: True}}, '2': {'4': {0: True}}}}, 'S': {'S': {0: True, 'S': {'S': {'S': {0: True}}}}}, 'T': {'Z': {'H': {0: True}}}}
TIME_MAPPING: Dict[str, str] = {'%D': '%m/%d/%y'}
TIME_TRIE: Dict = {'%': {'D': {0: True}}}
class BigQuery.Generator(sqlglot.generator.Generator):
422    class Generator(generator.Generator):
423        EXPLICIT_UNION = True
424        INTERVAL_ALLOWS_PLURAL_FORM = False
425        JOIN_HINTS = False
426        QUERY_HINTS = False
427        TABLE_HINTS = False
428        LIMIT_FETCH = "LIMIT"
429        RENAME_TABLE_WITH_DB = False
430        ESCAPE_LINE_BREAK = True
431        NVL2_SUPPORTED = False
432
433        TRANSFORMS = {
434            **generator.Generator.TRANSFORMS,
435            exp.ApproxDistinct: rename_func("APPROX_COUNT_DISTINCT"),
436            exp.ArraySize: rename_func("ARRAY_LENGTH"),
437            exp.Cast: transforms.preprocess([transforms.remove_precision_parameterized_types]),
438            exp.Create: _create_sql,
439            exp.CTE: transforms.preprocess([_pushdown_cte_column_names]),
440            exp.DateAdd: _date_add_sql("DATE", "ADD"),
441            exp.DateDiff: lambda self, e: f"DATE_DIFF({self.sql(e, 'this')}, {self.sql(e, 'expression')}, {self.sql(e.args.get('unit', 'DAY'))})",
442            exp.DateFromParts: rename_func("DATE"),
443            exp.DateStrToDate: datestrtodate_sql,
444            exp.DateSub: _date_add_sql("DATE", "SUB"),
445            exp.DatetimeAdd: _date_add_sql("DATETIME", "ADD"),
446            exp.DatetimeSub: _date_add_sql("DATETIME", "SUB"),
447            exp.DateTrunc: lambda self, e: self.func("DATE_TRUNC", e.this, e.text("unit")),
448            exp.GenerateSeries: rename_func("GENERATE_ARRAY"),
449            exp.GroupConcat: rename_func("STRING_AGG"),
450            exp.Hex: rename_func("TO_HEX"),
451            exp.ILike: no_ilike_sql,
452            exp.IntDiv: rename_func("DIV"),
453            exp.JSONFormat: rename_func("TO_JSON_STRING"),
454            exp.JSONKeyValue: json_keyvalue_comma_sql,
455            exp.Max: max_or_greatest,
456            exp.MD5: lambda self, e: self.func("TO_HEX", self.func("MD5", e.this)),
457            exp.MD5Digest: rename_func("MD5"),
458            exp.Min: min_or_least,
459            exp.PartitionedByProperty: lambda self, e: f"PARTITION BY {self.sql(e, 'this')}",
460            exp.RegexpExtract: lambda self, e: self.func(
461                "REGEXP_EXTRACT",
462                e.this,
463                e.expression,
464                e.args.get("position"),
465                e.args.get("occurrence"),
466            ),
467            exp.RegexpReplace: regexp_replace_sql,
468            exp.RegexpLike: rename_func("REGEXP_CONTAINS"),
469            exp.ReturnsProperty: _returnsproperty_sql,
470            exp.Select: transforms.preprocess(
471                [
472                    transforms.explode_to_unnest,
473                    _unqualify_unnest,
474                    transforms.eliminate_distinct_on,
475                    _alias_ordered_group,
476                ]
477            ),
478            exp.SHA2: lambda self, e: self.func(
479                f"SHA256" if e.text("length") == "256" else "SHA512", e.this
480            ),
481            exp.StabilityProperty: lambda self, e: f"DETERMINISTIC"
482            if e.name == "IMMUTABLE"
483            else "NOT DETERMINISTIC",
484            exp.StrToDate: lambda self, e: f"PARSE_DATE({self.format_time(e)}, {self.sql(e, 'this')})",
485            exp.StrToTime: lambda self, e: self.func(
486                "PARSE_TIMESTAMP", self.format_time(e), e.this, e.args.get("zone")
487            ),
488            exp.TimeAdd: _date_add_sql("TIME", "ADD"),
489            exp.TimeSub: _date_add_sql("TIME", "SUB"),
490            exp.TimestampAdd: _date_add_sql("TIMESTAMP", "ADD"),
491            exp.TimestampSub: _date_add_sql("TIMESTAMP", "SUB"),
492            exp.TimeStrToTime: timestrtotime_sql,
493            exp.Trim: lambda self, e: self.func(f"TRIM", e.this, e.expression),
494            exp.TsOrDsAdd: _date_add_sql("DATE", "ADD"),
495            exp.TsOrDsToDate: ts_or_ds_to_date_sql("bigquery"),
496            exp.Unhex: rename_func("FROM_HEX"),
497            exp.Values: _derived_table_values_to_unnest,
498            exp.VariancePop: rename_func("VAR_POP"),
499        }
500
501        TYPE_MAPPING = {
502            **generator.Generator.TYPE_MAPPING,
503            exp.DataType.Type.BIGDECIMAL: "BIGNUMERIC",
504            exp.DataType.Type.BIGINT: "INT64",
505            exp.DataType.Type.BINARY: "BYTES",
506            exp.DataType.Type.BOOLEAN: "BOOL",
507            exp.DataType.Type.CHAR: "STRING",
508            exp.DataType.Type.DECIMAL: "NUMERIC",
509            exp.DataType.Type.DOUBLE: "FLOAT64",
510            exp.DataType.Type.FLOAT: "FLOAT64",
511            exp.DataType.Type.INT: "INT64",
512            exp.DataType.Type.NCHAR: "STRING",
513            exp.DataType.Type.NVARCHAR: "STRING",
514            exp.DataType.Type.SMALLINT: "INT64",
515            exp.DataType.Type.TEXT: "STRING",
516            exp.DataType.Type.TIMESTAMP: "DATETIME",
517            exp.DataType.Type.TIMESTAMPTZ: "TIMESTAMP",
518            exp.DataType.Type.TIMESTAMPLTZ: "TIMESTAMP",
519            exp.DataType.Type.TINYINT: "INT64",
520            exp.DataType.Type.VARBINARY: "BYTES",
521            exp.DataType.Type.VARCHAR: "STRING",
522            exp.DataType.Type.VARIANT: "ANY TYPE",
523        }
524
525        PROPERTIES_LOCATION = {
526            **generator.Generator.PROPERTIES_LOCATION,
527            exp.PartitionedByProperty: exp.Properties.Location.POST_SCHEMA,
528            exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED,
529        }
530
531        # from: https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#reserved_keywords
532        RESERVED_KEYWORDS = {
533            *generator.Generator.RESERVED_KEYWORDS,
534            "all",
535            "and",
536            "any",
537            "array",
538            "as",
539            "asc",
540            "assert_rows_modified",
541            "at",
542            "between",
543            "by",
544            "case",
545            "cast",
546            "collate",
547            "contains",
548            "create",
549            "cross",
550            "cube",
551            "current",
552            "default",
553            "define",
554            "desc",
555            "distinct",
556            "else",
557            "end",
558            "enum",
559            "escape",
560            "except",
561            "exclude",
562            "exists",
563            "extract",
564            "false",
565            "fetch",
566            "following",
567            "for",
568            "from",
569            "full",
570            "group",
571            "grouping",
572            "groups",
573            "hash",
574            "having",
575            "if",
576            "ignore",
577            "in",
578            "inner",
579            "intersect",
580            "interval",
581            "into",
582            "is",
583            "join",
584            "lateral",
585            "left",
586            "like",
587            "limit",
588            "lookup",
589            "merge",
590            "natural",
591            "new",
592            "no",
593            "not",
594            "null",
595            "nulls",
596            "of",
597            "on",
598            "or",
599            "order",
600            "outer",
601            "over",
602            "partition",
603            "preceding",
604            "proto",
605            "qualify",
606            "range",
607            "recursive",
608            "respect",
609            "right",
610            "rollup",
611            "rows",
612            "select",
613            "set",
614            "some",
615            "struct",
616            "tablesample",
617            "then",
618            "to",
619            "treat",
620            "true",
621            "unbounded",
622            "union",
623            "unnest",
624            "using",
625            "when",
626            "where",
627            "window",
628            "with",
629            "within",
630        }
631
632        def attimezone_sql(self, expression: exp.AtTimeZone) -> str:
633            parent = expression.parent
634
635            # BigQuery allows CAST(.. AS {STRING|TIMESTAMP} [FORMAT <fmt> [AT TIME ZONE <tz>]]).
636            # Only the TIMESTAMP one should use the below conversion, when AT TIME ZONE is included.
637            if not isinstance(parent, exp.Cast) or not parent.to.is_type("text"):
638                return self.func(
639                    "TIMESTAMP", self.func("DATETIME", expression.this, expression.args.get("zone"))
640                )
641
642            return super().attimezone_sql(expression)
643
644        def cast_sql(self, expression: exp.Cast, safe_prefix: t.Optional[str] = None) -> str:
645            # https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#json_literals
646            if expression.is_type("json"):
647                return f"JSON {self.sql(expression, 'this')}"
648
649            return super().cast_sql(expression, safe_prefix=safe_prefix)
650
651        def trycast_sql(self, expression: exp.TryCast) -> str:
652            return self.cast_sql(expression, safe_prefix="SAFE_")
653
654        def cte_sql(self, expression: exp.CTE) -> str:
655            if expression.alias_column_names:
656                self.unsupported("Column names in CTE definition are not supported.")
657            return super().cte_sql(expression)
658
659        def array_sql(self, expression: exp.Array) -> str:
660            first_arg = seq_get(expression.expressions, 0)
661            if isinstance(first_arg, exp.Subqueryable):
662                return f"ARRAY{self.wrap(self.sql(first_arg))}"
663
664            return inline_array_sql(self, expression)
665
666        def transaction_sql(self, *_) -> str:
667            return "BEGIN TRANSACTION"
668
669        def commit_sql(self, *_) -> str:
670            return "COMMIT TRANSACTION"
671
672        def rollback_sql(self, *_) -> str:
673            return "ROLLBACK TRANSACTION"
674
675        def in_unnest_op(self, expression: exp.Unnest) -> str:
676            return self.sql(expression)
677
678        def except_op(self, expression: exp.Except) -> str:
679            if not expression.args.get("distinct", False):
680                self.unsupported("EXCEPT without DISTINCT is not supported in BigQuery")
681            return f"EXCEPT{' DISTINCT' if expression.args.get('distinct') else ' ALL'}"
682
683        def intersect_op(self, expression: exp.Intersect) -> str:
684            if not expression.args.get("distinct", False):
685                self.unsupported("INTERSECT without DISTINCT is not supported in BigQuery")
686            return f"INTERSECT{' DISTINCT' if expression.args.get('distinct') else ' ALL'}"
687
688        def with_properties(self, properties: exp.Properties) -> str:
689            return self.properties(properties, prefix=self.seg("OPTIONS"))
690
691        def version_sql(self, expression: exp.Version) -> str:
692            if expression.name == "TIMESTAMP":
693                expression = expression.copy()
694                expression.set("this", "SYSTEM_TIME")
695            return super().version_sql(expression)

Generator converts a given syntax tree to the corresponding SQL string.

Arguments:
  • pretty: Whether or not to format the produced SQL string. Default: False.
  • identify: Determines when an identifier should be quoted. Possible values are: False (default): Never quote, except in cases where it's mandatory by the dialect. True or 'always': Always quote. 'safe': Only quote identifiers that are case insensitive.
  • normalize: Whether or not to normalize identifiers to lowercase. Default: False.
  • pad: Determines the pad size in a formatted string. Default: 2.
  • indent: Determines the indentation size in a formatted string. Default: 2.
  • normalize_functions: Whether or not to normalize all function names. Possible values are: "upper" or True (default): Convert names to uppercase. "lower": Convert names to lowercase. False: Disables function name normalization.
  • unsupported_level: Determines the generator's behavior when it encounters unsupported expressions. Default ErrorLevel.WARN.
  • max_unsupported: Maximum number of unsupported messages to include in a raised UnsupportedError. This is only relevant if unsupported_level is ErrorLevel.RAISE. Default: 3
  • leading_comma: Determines whether or not the comma is leading or trailing in select expressions. This is only relevant when generating in pretty mode. Default: False
  • max_text_width: The max number of characters in a segment before creating new lines in pretty mode. The default is on the smaller end because the length only represents a segment and not the true line length. Default: 80
  • comments: Whether or not to preserve comments in the output SQL code. Default: True
EXPLICIT_UNION = True
INTERVAL_ALLOWS_PLURAL_FORM = False
JOIN_HINTS = False
QUERY_HINTS = False
TABLE_HINTS = False
LIMIT_FETCH = 'LIMIT'
RENAME_TABLE_WITH_DB = False
ESCAPE_LINE_BREAK = True
NVL2_SUPPORTED = False
TRANSFORMS = {<class 'sqlglot.expressions.DateAdd'>: <function _date_add_sql.<locals>.func>, <class 'sqlglot.expressions.TsOrDsAdd'>: <function _date_add_sql.<locals>.func>, <class 'sqlglot.expressions.CaseSpecificColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.CharacterSetColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.CharacterSetProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.CheckColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.ClusteredColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.CollateColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.CopyGrantsProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.CommentColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.DateFormatColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.DefaultColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.EncodeColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.ExecuteAsProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.ExternalProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.HeapProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.InlineLengthColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.IntervalDayToSecondSpan'>: 'DAY TO SECOND', <class 'sqlglot.expressions.IntervalYearToMonthSpan'>: 'YEAR TO MONTH', <class 'sqlglot.expressions.LanguageProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.LocationProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.LogProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.MaterializedProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.NoPrimaryIndexProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.NonClusteredColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.NotForReplicationColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.OnCommitProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.OnProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.OnUpdateColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.PathColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.ReturnsProperty'>: <function _returnsproperty_sql>, <class 'sqlglot.expressions.SetProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.SettingsProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.SqlSecurityProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.StabilityProperty'>: <function BigQuery.Generator.<lambda>>, <class 'sqlglot.expressions.TemporaryProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.ToTableProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.TransientProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.TitleColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.UppercaseColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.VarMap'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.VolatileProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.WithJournalTableProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.ApproxDistinct'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.ArraySize'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.Cast'>: <function preprocess.<locals>._to_sql>, <class 'sqlglot.expressions.Create'>: <function _create_sql>, <class 'sqlglot.expressions.CTE'>: <function preprocess.<locals>._to_sql>, <class 'sqlglot.expressions.DateDiff'>: <function BigQuery.Generator.<lambda>>, <class 'sqlglot.expressions.DateFromParts'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.DateStrToDate'>: <function datestrtodate_sql>, <class 'sqlglot.expressions.DateSub'>: <function _date_add_sql.<locals>.func>, <class 'sqlglot.expressions.DatetimeAdd'>: <function _date_add_sql.<locals>.func>, <class 'sqlglot.expressions.DatetimeSub'>: <function _date_add_sql.<locals>.func>, <class 'sqlglot.expressions.DateTrunc'>: <function BigQuery.Generator.<lambda>>, <class 'sqlglot.expressions.GenerateSeries'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.GroupConcat'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.Hex'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.ILike'>: <function no_ilike_sql>, <class 'sqlglot.expressions.IntDiv'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.JSONFormat'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.JSONKeyValue'>: <function json_keyvalue_comma_sql>, <class 'sqlglot.expressions.Max'>: <function max_or_greatest>, <class 'sqlglot.expressions.MD5'>: <function BigQuery.Generator.<lambda>>, <class 'sqlglot.expressions.MD5Digest'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.Min'>: <function min_or_least>, <class 'sqlglot.expressions.PartitionedByProperty'>: <function BigQuery.Generator.<lambda>>, <class 'sqlglot.expressions.RegexpExtract'>: <function BigQuery.Generator.<lambda>>, <class 'sqlglot.expressions.RegexpReplace'>: <function regexp_replace_sql>, <class 'sqlglot.expressions.RegexpLike'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.Select'>: <function preprocess.<locals>._to_sql>, <class 'sqlglot.expressions.SHA2'>: <function BigQuery.Generator.<lambda>>, <class 'sqlglot.expressions.StrToDate'>: <function BigQuery.Generator.<lambda>>, <class 'sqlglot.expressions.StrToTime'>: <function BigQuery.Generator.<lambda>>, <class 'sqlglot.expressions.TimeAdd'>: <function _date_add_sql.<locals>.func>, <class 'sqlglot.expressions.TimeSub'>: <function _date_add_sql.<locals>.func>, <class 'sqlglot.expressions.TimestampAdd'>: <function _date_add_sql.<locals>.func>, <class 'sqlglot.expressions.TimestampSub'>: <function _date_add_sql.<locals>.func>, <class 'sqlglot.expressions.TimeStrToTime'>: <function timestrtotime_sql>, <class 'sqlglot.expressions.Trim'>: <function BigQuery.Generator.<lambda>>, <class 'sqlglot.expressions.TsOrDsToDate'>: <function ts_or_ds_to_date_sql.<locals>._ts_or_ds_to_date_sql>, <class 'sqlglot.expressions.Unhex'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.Values'>: <function _derived_table_values_to_unnest>, <class 'sqlglot.expressions.VariancePop'>: <function rename_func.<locals>.<lambda>>}
TYPE_MAPPING = {<Type.NCHAR: 'NCHAR'>: 'STRING', <Type.NVARCHAR: 'NVARCHAR'>: 'STRING', <Type.MEDIUMTEXT: 'MEDIUMTEXT'>: 'TEXT', <Type.LONGTEXT: 'LONGTEXT'>: 'TEXT', <Type.MEDIUMBLOB: 'MEDIUMBLOB'>: 'BLOB', <Type.LONGBLOB: 'LONGBLOB'>: 'BLOB', <Type.INET: 'INET'>: 'INET', <Type.BIGDECIMAL: 'BIGDECIMAL'>: 'BIGNUMERIC', <Type.BIGINT: 'BIGINT'>: 'INT64', <Type.BINARY: 'BINARY'>: 'BYTES', <Type.BOOLEAN: 'BOOLEAN'>: 'BOOL', <Type.CHAR: 'CHAR'>: 'STRING', <Type.DECIMAL: 'DECIMAL'>: 'NUMERIC', <Type.DOUBLE: 'DOUBLE'>: 'FLOAT64', <Type.FLOAT: 'FLOAT'>: 'FLOAT64', <Type.INT: 'INT'>: 'INT64', <Type.SMALLINT: 'SMALLINT'>: 'INT64', <Type.TEXT: 'TEXT'>: 'STRING', <Type.TIMESTAMP: 'TIMESTAMP'>: 'DATETIME', <Type.TIMESTAMPTZ: 'TIMESTAMPTZ'>: 'TIMESTAMP', <Type.TIMESTAMPLTZ: 'TIMESTAMPLTZ'>: 'TIMESTAMP', <Type.TINYINT: 'TINYINT'>: 'INT64', <Type.VARBINARY: 'VARBINARY'>: 'BYTES', <Type.VARCHAR: 'VARCHAR'>: 'STRING', <Type.VARIANT: 'VARIANT'>: 'ANY TYPE'}
PROPERTIES_LOCATION = {<class 'sqlglot.expressions.AlgorithmProperty'>: <Location.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.AutoIncrementProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.BlockCompressionProperty'>: <Location.POST_NAME: 'POST_NAME'>, <class 'sqlglot.expressions.CharacterSetProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.ChecksumProperty'>: <Location.POST_NAME: 'POST_NAME'>, <class 'sqlglot.expressions.CollateProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.CopyGrantsProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.Cluster'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.ClusteredByProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.DataBlocksizeProperty'>: <Location.POST_NAME: 'POST_NAME'>, <class 'sqlglot.expressions.DefinerProperty'>: <Location.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.DictRange'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.DictProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.DistKeyProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.DistStyleProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.EngineProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.ExecuteAsProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.ExternalProperty'>: <Location.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.FallbackProperty'>: <Location.POST_NAME: 'POST_NAME'>, <class 'sqlglot.expressions.FileFormatProperty'>: <Location.POST_WITH: 'POST_WITH'>, <class 'sqlglot.expressions.FreespaceProperty'>: <Location.POST_NAME: 'POST_NAME'>, <class 'sqlglot.expressions.HeapProperty'>: <Location.POST_WITH: 'POST_WITH'>, <class 'sqlglot.expressions.IsolatedLoadingProperty'>: <Location.POST_NAME: 'POST_NAME'>, <class 'sqlglot.expressions.JournalProperty'>: <Location.POST_NAME: 'POST_NAME'>, <class 'sqlglot.expressions.LanguageProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.LikeProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.LocationProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.LockingProperty'>: <Location.POST_ALIAS: 'POST_ALIAS'>, <class 'sqlglot.expressions.LogProperty'>: <Location.POST_NAME: 'POST_NAME'>, <class 'sqlglot.expressions.MaterializedProperty'>: <Location.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.MergeBlockRatioProperty'>: <Location.POST_NAME: 'POST_NAME'>, <class 'sqlglot.expressions.NoPrimaryIndexProperty'>: <Location.POST_EXPRESSION: 'POST_EXPRESSION'>, <class 'sqlglot.expressions.OnProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.OnCommitProperty'>: <Location.POST_EXPRESSION: 'POST_EXPRESSION'>, <class 'sqlglot.expressions.Order'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.PartitionedByProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.PrimaryKey'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.Property'>: <Location.POST_WITH: 'POST_WITH'>, <class 'sqlglot.expressions.ReturnsProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.RowFormatProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.RowFormatDelimitedProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.RowFormatSerdeProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.SchemaCommentProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.SerdeProperties'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.Set'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.SettingsProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.SetProperty'>: <Location.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.SortKeyProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.SqlSecurityProperty'>: <Location.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.StabilityProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.TemporaryProperty'>: <Location.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.ToTableProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.TransientProperty'>: <Location.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.MergeTreeTTL'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.VolatileProperty'>: <Location.UNSUPPORTED: 'UNSUPPORTED'>, <class 'sqlglot.expressions.WithDataProperty'>: <Location.POST_EXPRESSION: 'POST_EXPRESSION'>, <class 'sqlglot.expressions.WithJournalTableProperty'>: <Location.POST_NAME: 'POST_NAME'>}
RESERVED_KEYWORDS = {'new', 'or', 'else', 'outer', 'right', 'qualify', 'rollup', 'assert_rows_modified', 'order', 'at', 'for', 'default', 'current', 'treat', 'contains', 'rows', 'where', 'array', 'not', 'within', 'nulls', 'recursive', 'exclude', 'as', 'is', 'extract', 'cross', 'unbounded', 'to', 'some', 'case', 'preceding', 'no', 'intersect', 'having', 'distinct', 'limit', 'null', 'full', 'hash', 'using', 'on', 'merge', 'create', 'define', 'partition', 'left', 'collate', 'lookup', 'into', 'struct', 'window', 'between', 'false', 'cube', 'inner', 'ignore', 'if', 'like', 'group', 'in', 'proto', 'all', 'from', 'over', 'unnest', 'grouping', 'range', 'union', 'except', 'desc', 'and', 'select', 'of', 'enum', 'tablesample', 'set', 'fetch', 'cast', 'then', 'following', 'by', 'lateral', 'respect', 'when', 'groups', 'end', 'escape', 'join', 'any', 'interval', 'true', 'with', 'asc', 'natural', 'exists'}
def attimezone_sql(self, expression: sqlglot.expressions.AtTimeZone) -> str:
632        def attimezone_sql(self, expression: exp.AtTimeZone) -> str:
633            parent = expression.parent
634
635            # BigQuery allows CAST(.. AS {STRING|TIMESTAMP} [FORMAT <fmt> [AT TIME ZONE <tz>]]).
636            # Only the TIMESTAMP one should use the below conversion, when AT TIME ZONE is included.
637            if not isinstance(parent, exp.Cast) or not parent.to.is_type("text"):
638                return self.func(
639                    "TIMESTAMP", self.func("DATETIME", expression.this, expression.args.get("zone"))
640                )
641
642            return super().attimezone_sql(expression)
def cast_sql( self, expression: sqlglot.expressions.Cast, safe_prefix: Optional[str] = None) -> str:
644        def cast_sql(self, expression: exp.Cast, safe_prefix: t.Optional[str] = None) -> str:
645            # https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#json_literals
646            if expression.is_type("json"):
647                return f"JSON {self.sql(expression, 'this')}"
648
649            return super().cast_sql(expression, safe_prefix=safe_prefix)
def trycast_sql(self, expression: sqlglot.expressions.TryCast) -> str:
651        def trycast_sql(self, expression: exp.TryCast) -> str:
652            return self.cast_sql(expression, safe_prefix="SAFE_")
def cte_sql(self, expression: sqlglot.expressions.CTE) -> str:
654        def cte_sql(self, expression: exp.CTE) -> str:
655            if expression.alias_column_names:
656                self.unsupported("Column names in CTE definition are not supported.")
657            return super().cte_sql(expression)
def array_sql(self, expression: sqlglot.expressions.Array) -> str:
659        def array_sql(self, expression: exp.Array) -> str:
660            first_arg = seq_get(expression.expressions, 0)
661            if isinstance(first_arg, exp.Subqueryable):
662                return f"ARRAY{self.wrap(self.sql(first_arg))}"
663
664            return inline_array_sql(self, expression)
def transaction_sql(self, *_) -> str:
666        def transaction_sql(self, *_) -> str:
667            return "BEGIN TRANSACTION"
def commit_sql(self, *_) -> str:
669        def commit_sql(self, *_) -> str:
670            return "COMMIT TRANSACTION"
def rollback_sql(self, *_) -> str:
672        def rollback_sql(self, *_) -> str:
673            return "ROLLBACK TRANSACTION"
def in_unnest_op(self, expression: sqlglot.expressions.Unnest) -> str:
675        def in_unnest_op(self, expression: exp.Unnest) -> str:
676            return self.sql(expression)
def except_op(self, expression: sqlglot.expressions.Except) -> str:
678        def except_op(self, expression: exp.Except) -> str:
679            if not expression.args.get("distinct", False):
680                self.unsupported("EXCEPT without DISTINCT is not supported in BigQuery")
681            return f"EXCEPT{' DISTINCT' if expression.args.get('distinct') else ' ALL'}"
def intersect_op(self, expression: sqlglot.expressions.Intersect) -> str:
683        def intersect_op(self, expression: exp.Intersect) -> str:
684            if not expression.args.get("distinct", False):
685                self.unsupported("INTERSECT without DISTINCT is not supported in BigQuery")
686            return f"INTERSECT{' DISTINCT' if expression.args.get('distinct') else ' ALL'}"
def with_properties(self, properties: sqlglot.expressions.Properties) -> str:
688        def with_properties(self, properties: exp.Properties) -> str:
689            return self.properties(properties, prefix=self.seg("OPTIONS"))
def version_sql(self, expression: sqlglot.expressions.Version) -> str:
691        def version_sql(self, expression: exp.Version) -> str:
692            if expression.name == "TIMESTAMP":
693                expression = expression.copy()
694                expression.set("this", "SYSTEM_TIME")
695            return super().version_sql(expression)
INVERSE_TIME_MAPPING: Dict[str, str] = {'%m/%d/%y': '%D'}
INVERSE_TIME_TRIE: Dict = {'%': {'m': {'/': {'%': {'d': {'/': {'%': {'y': {0: True}}}}}}}}}
UNNEST_COLUMN_ONLY = True
NORMALIZE_FUNCTIONS: bool | str = False
@classmethod
def can_identify(text: str, identify: str | bool = 'safe') -> bool:
256    @classmethod
257    def can_identify(cls, text: str, identify: str | bool = "safe") -> bool:
258        """Checks if text can be identified given an identify option.
259
260        Args:
261            text: The text to check.
262            identify:
263                "always" or `True`: Always returns true.
264                "safe": True if the identifier is case-insensitive.
265
266        Returns:
267            Whether or not the given text can be identified.
268        """
269        if identify is True or identify == "always":
270            return True
271
272        if identify == "safe":
273            return not cls.case_sensitive(text)
274
275        return False

Checks if text can be identified given an identify option.

Arguments:
  • text: The text to check.
  • identify: "always" or True: Always returns true. "safe": True if the identifier is case-insensitive.
Returns:

Whether or not the given text can be identified.

QUOTE_START = "'"
QUOTE_END = "'"
IDENTIFIER_START = '`'
IDENTIFIER_END = '`'
TOKENIZER_CLASS = <class 'sqlglot.dialects.bigquery.BigQuery.Tokenizer'>
BIT_START: Optional[str] = None
BIT_END: Optional[str] = None
HEX_START: Optional[str] = '0x'
HEX_END: Optional[str] = ''
BYTE_START: Optional[str] = "b'"
BYTE_END: Optional[str] = "'"
Inherited Members
sqlglot.generator.Generator
Generator
NULL_ORDERING_SUPPORTED
LOCKING_READS_SUPPORTED
WRAP_DERIVED_VALUES
CREATE_FUNCTION_RETURN_AS
MATCHED_BY_SOURCE
SINGLE_STRING_INTERVAL
TABLESAMPLE_WITH_METHOD
TABLESAMPLE_SIZE_IS_PERCENT
GROUPINGS_SEP
INDEX_ON
QUERY_HINT_SEP
IS_BOOL_ALLOWED
DUPLICATE_KEY_UPDATE_WITH_SET
LIMIT_IS_TOP
RETURNING_END
COLUMN_JOIN_MARKS_SUPPORTED
EXTRACT_ALLOWS_QUOTES
TZ_TO_WITH_TIME_ZONE
SELECT_KINDS
VALUES_AS_TABLE
ALTER_TABLE_ADD_COLUMN_KEYWORD
STAR_MAPPING
TIME_PART_SINGULARS
TOKEN_MAPPING
STRUCT_DELIMITER
PARAMETER_TOKEN
WITH_SEPARATED_COMMENTS
UNWRAPPED_INTERVAL_VALUES
SENTINEL_LINE_BREAK
INDEX_OFFSET
ALIAS_POST_TABLESAMPLE
IDENTIFIERS_CAN_START_WITH_DIGIT
STRICT_STRING_CONCAT
NULL_ORDERING
pretty
identify
normalize
pad
unsupported_level
max_unsupported
leading_comma
max_text_width
comments
normalize_functions
unsupported_messages
generate
unsupported
sep
seg
pad_comment
maybe_comment
wrap
no_identify
normalize_func
indent
sql
uncache_sql
cache_sql
characterset_sql
column_sql
columnposition_sql
columndef_sql
columnconstraint_sql
computedcolumnconstraint_sql
autoincrementcolumnconstraint_sql
compresscolumnconstraint_sql
generatedasidentitycolumnconstraint_sql
notnullcolumnconstraint_sql
primarykeycolumnconstraint_sql
uniquecolumnconstraint_sql
createable_sql
create_sql
clone_sql
describe_sql
prepend_ctes
with_sql
tablealias_sql
bitstring_sql
hexstring_sql
bytestring_sql
rawstring_sql
datatypeparam_sql
datatype_sql
directory_sql
delete_sql
drop_sql
except_sql
fetch_sql
filter_sql
hint_sql
index_sql
identifier_sql
inputoutputformat_sql
national_sql
partition_sql
properties_sql
root_properties
properties
locate_properties
property_sql
likeproperty_sql
fallbackproperty_sql
journalproperty_sql
freespaceproperty_sql
checksumproperty_sql
mergeblockratioproperty_sql
datablocksizeproperty_sql
blockcompressionproperty_sql
isolatedloadingproperty_sql
lockingproperty_sql
withdataproperty_sql
insert_sql
intersect_sql
introducer_sql
pseudotype_sql
objectidentifier_sql
onconflict_sql
returning_sql
rowformatdelimitedproperty_sql
withtablehint_sql
indextablehint_sql
table_sql
tablesample_sql
pivot_sql
tuple_sql
update_sql
values_sql
var_sql
into_sql
from_sql
group_sql
having_sql
connect_sql
prior_sql
join_sql
lambda_sql
lateral_sql
limit_sql
offset_sql
setitem_sql
set_sql
pragma_sql
lock_sql
literal_sql
escape_str
loaddata_sql
null_sql
boolean_sql
order_sql
cluster_sql
distribute_sql
sort_sql
ordered_sql
matchrecognize_sql
query_modifiers
offset_limit_modifiers
after_having_modifiers
after_limit_modifiers
select_sql
schema_sql
schema_columns_sql
star_sql
parameter_sql
sessionparameter_sql
placeholder_sql
subquery_sql
qualify_sql
union_sql
union_op
unnest_sql
where_sql
window_sql
partition_by_sql
windowspec_sql
withingroup_sql
between_sql
bracket_sql
safebracket_sql
all_sql
any_sql
exists_sql
case_sql
constraint_sql
nextvaluefor_sql
extract_sql
trim_sql
safeconcat_sql
check_sql
foreignkey_sql
primarykey_sql
if_sql
matchagainst_sql
jsonkeyvalue_sql
jsonobject_sql
openjsoncolumndef_sql
openjson_sql
in_sql
interval_sql
return_sql
reference_sql
anonymous_sql
paren_sql
neg_sql
not_sql
alias_sql
aliases_sql
add_sql
and_sql
xor_sql
connector_sql
bitwiseand_sql
bitwiseleftshift_sql
bitwisenot_sql
bitwiseor_sql
bitwiserightshift_sql
bitwisexor_sql
currentdate_sql
collate_sql
command_sql
comment_sql
mergetreettlaction_sql
mergetreettl_sql
altercolumn_sql
renametable_sql
altertable_sql
droppartition_sql
addconstraint_sql
distinct_sql
ignorenulls_sql
respectnulls_sql
intdiv_sql
dpipe_sql
safedpipe_sql
div_sql
overlaps_sql
distance_sql
dot_sql
eq_sql
escape_sql
glob_sql
gt_sql
gte_sql
ilike_sql
ilikeany_sql
is_sql
like_sql
likeany_sql
similarto_sql
lt_sql
lte_sql
mod_sql
mul_sql
neq_sql
nullsafeeq_sql
nullsafeneq_sql
or_sql
slice_sql
sub_sql
use_sql
binary
function_fallback_sql
func
format_args
text_width
format_time
expressions
op_expressions
naked_property
set_operation
tag_sql
token_sql
userdefinedfunction_sql
joinhint_sql
kwarg_sql
when_sql
merge_sql
tochar_sql
dictproperty_sql
dictrange_sql
dictsubproperty_sql
oncluster_sql
clusteredbyproperty_sql
anyvalue_sql
querytransform_sql
indexconstraintoption_sql
indexcolumnconstraint_sql
nvl2_sql
comprehension_sql