summaryrefslogtreecommitdiffstats
path: root/tests/dialects/test_bigquery.py
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2023-06-02 23:59:11 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2023-06-02 23:59:11 +0000
commitcaea5267cb8e1fea3702adbdf6f68fd37d13b3b7 (patch)
treef06f1da1ab3b6906beca1c3c7222d28ff00766ac /tests/dialects/test_bigquery.py
parentAdding upstream version 12.2.0. (diff)
downloadsqlglot-caea5267cb8e1fea3702adbdf6f68fd37d13b3b7.tar.xz
sqlglot-caea5267cb8e1fea3702adbdf6f68fd37d13b3b7.zip
Adding upstream version 15.0.0.upstream/15.0.0
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'tests/dialects/test_bigquery.py')
-rw-r--r--tests/dialects/test_bigquery.py71
1 files changed, 53 insertions, 18 deletions
diff --git a/tests/dialects/test_bigquery.py b/tests/dialects/test_bigquery.py
index 87bba6f..99d8a3c 100644
--- a/tests/dialects/test_bigquery.py
+++ b/tests/dialects/test_bigquery.py
@@ -6,6 +6,14 @@ class TestBigQuery(Validator):
dialect = "bigquery"
def test_bigquery(self):
+ self.validate_identity("SAFE_CAST(x AS STRING)")
+ self.validate_identity("SELECT * FROM a-b-c.mydataset.mytable")
+ self.validate_identity("SELECT * FROM abc-def-ghi")
+ self.validate_identity("SELECT * FROM a-b-c")
+ self.validate_identity("SELECT * FROM my-table")
+ self.validate_identity("SELECT * FROM my-project.mydataset.mytable")
+ self.validate_identity("SELECT * FROM pro-ject_id.c.d CROSS JOIN foo-bar")
+ self.validate_identity("x <> ''")
self.validate_identity("DATE_TRUNC(col, WEEK(MONDAY))")
self.validate_identity("SELECT b'abc'")
self.validate_identity("""SELECT * FROM UNNEST(ARRAY<STRUCT<x INT64>>[1, 2])""")
@@ -13,6 +21,7 @@ class TestBigQuery(Validator):
self.validate_identity("SELECT DISTINCT AS STRUCT 1 AS a, 2 AS b")
self.validate_identity("SELECT AS VALUE STRUCT(1 AS a, 2 AS b)")
self.validate_identity("SELECT STRUCT<ARRAY<STRING>>(['2023-01-17'])")
+ self.validate_identity("SELECT STRUCT<STRING>((SELECT a FROM b.c LIMIT 1)).*")
self.validate_identity("SELECT * FROM q UNPIVOT(values FOR quarter IN (b, c))")
self.validate_identity("""CREATE TABLE x (a STRUCT<values ARRAY<INT64>>)""")
self.validate_identity("""CREATE TABLE x (a STRUCT<b STRING OPTIONS (description='b')>)""")
@@ -22,17 +31,34 @@ class TestBigQuery(Validator):
self.validate_identity(
"SELECT * FROM (SELECT * FROM `t`) AS a UNPIVOT((c) FOR c_name IN (v1, v2))"
)
+ self.validate_identity(
+ "CREATE TABLE IF NOT EXISTS foo AS SELECT * FROM bla EXCEPT DISTINCT (SELECT * FROM bar) LIMIT 0"
+ )
+ self.validate_all('x <> ""', write={"bigquery": "x <> ''"})
+ self.validate_all('x <> """"""', write={"bigquery": "x <> ''"})
+ self.validate_all("x <> ''''''", write={"bigquery": "x <> ''"})
self.validate_all(
"CREATE TEMP TABLE foo AS SELECT 1",
write={"bigquery": "CREATE TEMPORARY TABLE foo AS SELECT 1"},
)
+ self.validate_all(
+ "SELECT * FROM `SOME_PROJECT_ID.SOME_DATASET_ID.INFORMATION_SCHEMA.SOME_VIEW`",
+ write={
+ "bigquery": "SELECT * FROM SOME_PROJECT_ID.SOME_DATASET_ID.INFORMATION_SCHEMA.SOME_VIEW",
+ },
+ )
+ self.validate_all(
+ "SELECT * FROM `my-project.my-dataset.my-table`",
+ write={"bigquery": "SELECT * FROM `my-project`.`my-dataset`.`my-table`"},
+ )
self.validate_all("LEAST(x, y)", read={"sqlite": "MIN(x, y)"})
self.validate_all("CAST(x AS CHAR)", write={"bigquery": "CAST(x AS STRING)"})
self.validate_all("CAST(x AS NCHAR)", write={"bigquery": "CAST(x AS STRING)"})
self.validate_all("CAST(x AS NVARCHAR)", write={"bigquery": "CAST(x AS STRING)"})
self.validate_all("CAST(x AS TIMESTAMP)", write={"bigquery": "CAST(x AS DATETIME)"})
self.validate_all("CAST(x AS TIMESTAMPTZ)", write={"bigquery": "CAST(x AS TIMESTAMP)"})
+ self.validate_all("CAST(x AS RECORD)", write={"bigquery": "CAST(x AS STRUCT)"})
self.validate_all(
"SELECT ARRAY(SELECT AS STRUCT 1 a, 2 b)",
write={
@@ -64,16 +90,6 @@ class TestBigQuery(Validator):
"spark": "'x\\''",
},
)
- self.validate_all(
- r'r"""/\*.*\*/"""',
- write={
- "bigquery": r"'/\*.*\*/'",
- "duckdb": r"'/\*.*\*/'",
- "presto": r"'/\*.*\*/'",
- "hive": r"'/\*.*\*/'",
- "spark": r"'/\*.*\*/'",
- },
- )
with self.assertRaises(ValueError):
transpile("'\\'", read="bigquery")
@@ -87,13 +103,23 @@ class TestBigQuery(Validator):
},
)
self.validate_all(
+ r'r"""/\*.*\*/"""',
+ write={
+ "bigquery": r"r'/\*.*\*/'",
+ "duckdb": r"'/\\*.*\\*/'",
+ "presto": r"'/\\*.*\\*/'",
+ "hive": r"'/\\*.*\\*/'",
+ "spark": r"'/\\*.*\\*/'",
+ },
+ )
+ self.validate_all(
r'R"""/\*.*\*/"""',
write={
- "bigquery": r"'/\*.*\*/'",
- "duckdb": r"'/\*.*\*/'",
- "presto": r"'/\*.*\*/'",
- "hive": r"'/\*.*\*/'",
- "spark": r"'/\*.*\*/'",
+ "bigquery": r"r'/\*.*\*/'",
+ "duckdb": r"'/\\*.*\\*/'",
+ "presto": r"'/\\*.*\\*/'",
+ "hive": r"'/\\*.*\\*/'",
+ "spark": r"'/\\*.*\\*/'",
},
)
self.validate_all(
@@ -234,7 +260,7 @@ class TestBigQuery(Validator):
"DIV(x, y)",
write={
"bigquery": "DIV(x, y)",
- "duckdb": "CAST(x / y AS INT)",
+ "duckdb": "x // y",
},
)
@@ -308,7 +334,7 @@ class TestBigQuery(Validator):
self.validate_all(
"DATE_SUB(CURRENT_DATE(), INTERVAL 1 DAY)",
write={
- "postgres": "CURRENT_DATE - INTERVAL '1' DAY",
+ "postgres": "CURRENT_DATE - INTERVAL '1 DAY'",
"bigquery": "DATE_SUB(CURRENT_DATE, INTERVAL 1 DAY)",
},
)
@@ -318,7 +344,7 @@ class TestBigQuery(Validator):
"bigquery": "DATE_ADD(CURRENT_DATE, INTERVAL 1 DAY)",
"duckdb": "CURRENT_DATE + INTERVAL 1 DAY",
"mysql": "DATE_ADD(CURRENT_DATE, INTERVAL 1 DAY)",
- "postgres": "CURRENT_DATE + INTERVAL '1' DAY",
+ "postgres": "CURRENT_DATE + INTERVAL '1 DAY'",
"presto": "DATE_ADD('DAY', 1, CURRENT_DATE)",
"hive": "DATE_ADD(CURRENT_DATE, 1)",
"spark": "DATE_ADD(CURRENT_DATE, 1)",
@@ -470,3 +496,12 @@ class TestBigQuery(Validator):
"snowflake": "MERGE INTO dataset.Inventory AS T USING dataset.NewArrivals AS S ON FALSE WHEN NOT MATCHED AND product LIKE '%a%' THEN DELETE WHEN NOT MATCHED AND product LIKE '%b%' THEN DELETE",
},
)
+
+ def test_rename_table(self):
+ self.validate_all(
+ "ALTER TABLE db.t1 RENAME TO db.t2",
+ write={
+ "snowflake": "ALTER TABLE db.t1 RENAME TO db.t2",
+ "bigquery": "ALTER TABLE db.t1 RENAME TO t2",
+ },
+ )