summaryrefslogtreecommitdiffstats
path: root/sqlglot/dialects/bigquery.py
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-01-31 05:44:41 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-01-31 05:44:41 +0000
commit376de8b6892deca7dc5d83035c047f1e13eb67ea (patch)
tree334a1753cd914294aa99128fac3fb59bf14dc10f /sqlglot/dialects/bigquery.py
parentReleasing debian version 20.9.0-1. (diff)
downloadsqlglot-376de8b6892deca7dc5d83035c047f1e13eb67ea.tar.xz
sqlglot-376de8b6892deca7dc5d83035c047f1e13eb67ea.zip
Merging upstream version 20.11.0.
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'sqlglot/dialects/bigquery.py')
-rw-r--r--sqlglot/dialects/bigquery.py34
1 files changed, 19 insertions, 15 deletions
diff --git a/sqlglot/dialects/bigquery.py b/sqlglot/dialects/bigquery.py
index 0151e6c..771ae1a 100644
--- a/sqlglot/dialects/bigquery.py
+++ b/sqlglot/dialects/bigquery.py
@@ -5,7 +5,6 @@ import re
import typing as t
from sqlglot import exp, generator, parser, tokens, transforms
-from sqlglot._typing import E
from sqlglot.dialects.dialect import (
Dialect,
NormalizationStrategy,
@@ -30,7 +29,7 @@ from sqlglot.helper import seq_get, split_num_words
from sqlglot.tokens import TokenType
if t.TYPE_CHECKING:
- from typing_extensions import Literal
+ from sqlglot._typing import E, Lit
logger = logging.getLogger("sqlglot")
@@ -47,9 +46,11 @@ def _derived_table_values_to_unnest(self: BigQuery.Generator, expression: exp.Va
exp.alias_(value, column_name)
for value, column_name in zip(
t.expressions,
- alias.columns
- if alias and alias.columns
- else (f"_c{i}" for i in range(len(t.expressions))),
+ (
+ alias.columns
+ if alias and alias.columns
+ else (f"_c{i}" for i in range(len(t.expressions)))
+ ),
)
]
)
@@ -473,12 +474,10 @@ class BigQuery(Dialect):
return table
@t.overload
- def _parse_json_object(self, agg: Literal[False]) -> exp.JSONObject:
- ...
+ def _parse_json_object(self, agg: Lit[False]) -> exp.JSONObject: ...
@t.overload
- def _parse_json_object(self, agg: Literal[True]) -> exp.JSONObjectAgg:
- ...
+ def _parse_json_object(self, agg: Lit[True]) -> exp.JSONObjectAgg: ...
def _parse_json_object(self, agg=False):
json_object = super()._parse_json_object()
@@ -546,9 +545,11 @@ class BigQuery(Dialect):
exp.ArrayContains: _array_contains_sql,
exp.ArraySize: rename_func("ARRAY_LENGTH"),
exp.Cast: transforms.preprocess([transforms.remove_precision_parameterized_types]),
- exp.CollateProperty: lambda self, e: f"DEFAULT COLLATE {self.sql(e, 'this')}"
- if e.args.get("default")
- else f"COLLATE {self.sql(e, 'this')}",
+ exp.CollateProperty: lambda self, e: (
+ f"DEFAULT COLLATE {self.sql(e, 'this')}"
+ if e.args.get("default")
+ else f"COLLATE {self.sql(e, 'this')}"
+ ),
exp.CountIf: rename_func("COUNTIF"),
exp.Create: _create_sql,
exp.CTE: transforms.preprocess([_pushdown_cte_column_names]),
@@ -560,6 +561,9 @@ class BigQuery(Dialect):
exp.DatetimeAdd: date_add_interval_sql("DATETIME", "ADD"),
exp.DatetimeSub: date_add_interval_sql("DATETIME", "SUB"),
exp.DateTrunc: lambda self, e: self.func("DATE_TRUNC", e.this, e.text("unit")),
+ exp.FromTimeZone: lambda self, e: self.func(
+ "DATETIME", self.func("TIMESTAMP", e.this, e.args.get("zone")), "'UTC'"
+ ),
exp.GenerateSeries: rename_func("GENERATE_ARRAY"),
exp.GetPath: path_to_jsonpath(),
exp.GroupConcat: rename_func("STRING_AGG"),
@@ -595,9 +599,9 @@ class BigQuery(Dialect):
exp.SHA2: lambda self, e: self.func(
f"SHA256" if e.text("length") == "256" else "SHA512", e.this
),
- exp.StabilityProperty: lambda self, e: f"DETERMINISTIC"
- if e.name == "IMMUTABLE"
- else "NOT DETERMINISTIC",
+ exp.StabilityProperty: lambda self, e: (
+ f"DETERMINISTIC" if e.name == "IMMUTABLE" else "NOT DETERMINISTIC"
+ ),
exp.StrToDate: lambda self, e: f"PARSE_DATE({self.format_time(e)}, {self.sql(e, 'this')})",
exp.StrToTime: lambda self, e: self.func(
"PARSE_TIMESTAMP", self.format_time(e), e.this, e.args.get("zone")