summaryrefslogtreecommitdiffstats
path: root/tests/dialects/test_bigquery.py
diff options
context:
space:
mode:
Diffstat (limited to 'tests/dialects/test_bigquery.py')
-rw-r--r--tests/dialects/test_bigquery.py217
1 files changed, 212 insertions, 5 deletions
diff --git a/tests/dialects/test_bigquery.py b/tests/dialects/test_bigquery.py
index 301cd57..728785c 100644
--- a/tests/dialects/test_bigquery.py
+++ b/tests/dialects/test_bigquery.py
@@ -21,6 +21,13 @@ class TestBigQuery(Validator):
def test_bigquery(self):
self.validate_identity(
+ """CREATE TEMPORARY FUNCTION FOO()
+RETURNS STRING
+LANGUAGE js AS
+'return "Hello world!"'""",
+ pretty=True,
+ )
+ self.validate_identity(
"[a, a(1, 2,3,4444444444444444, tttttaoeunthaoentuhaoentuheoantu, toheuntaoheutnahoeunteoahuntaoeh), b(3, 4,5), c, d, tttttttttttttttteeeeeeeeeeeeeett, 12312312312]",
"""[
a,
@@ -279,6 +286,13 @@ class TestBigQuery(Validator):
)
self.validate_all(
+ "SELECT t.c1, h.c2, s.c3 FROM t1 AS t, UNNEST(t.t2) AS h, UNNEST(h.t3) AS s",
+ write={
+ "bigquery": "SELECT t.c1, h.c2, s.c3 FROM t1 AS t, UNNEST(t.t2) AS h, UNNEST(h.t3) AS s",
+ "duckdb": "SELECT t.c1, h.c2, s.c3 FROM t1 AS t, UNNEST(t.t2) AS _t0(h), UNNEST(h.t3) AS _t1(s)",
+ },
+ )
+ self.validate_all(
"PARSE_TIMESTAMP('%Y-%m-%dT%H:%M:%E6S%z', x)",
write={
"bigquery": "PARSE_TIMESTAMP('%Y-%m-%dT%H:%M:%E6S%z', x)",
@@ -289,7 +303,7 @@ class TestBigQuery(Validator):
"SELECT results FROM Coordinates, Coordinates.position AS results",
write={
"bigquery": "SELECT results FROM Coordinates, UNNEST(Coordinates.position) AS results",
- "presto": "SELECT results FROM Coordinates, UNNEST(Coordinates.position) AS _t(results)",
+ "presto": "SELECT results FROM Coordinates, UNNEST(Coordinates.position) AS _t0(results)",
},
)
self.validate_all(
@@ -307,7 +321,7 @@ class TestBigQuery(Validator):
},
write={
"bigquery": "SELECT results FROM Coordinates AS c, UNNEST(c.position) AS results",
- "presto": "SELECT results FROM Coordinates AS c, UNNEST(c.position) AS _t(results)",
+ "presto": "SELECT results FROM Coordinates AS c, UNNEST(c.position) AS _t0(results)",
"redshift": "SELECT results FROM Coordinates AS c, c.position AS results",
},
)
@@ -525,7 +539,7 @@ class TestBigQuery(Validator):
"SELECT * FROM t WHERE EXISTS(SELECT * FROM unnest(nums) AS x WHERE x > 1)",
write={
"bigquery": "SELECT * FROM t WHERE EXISTS(SELECT * FROM UNNEST(nums) AS x WHERE x > 1)",
- "duckdb": "SELECT * FROM t WHERE EXISTS(SELECT * FROM UNNEST(nums) AS _t(x) WHERE x > 1)",
+ "duckdb": "SELECT * FROM t WHERE EXISTS(SELECT * FROM UNNEST(nums) AS _t0(x) WHERE x > 1)",
},
)
self.validate_all(
@@ -618,12 +632,87 @@ class TestBigQuery(Validator):
},
)
self.validate_all(
+ "LOWER(TO_HEX(x))",
+ write={
+ "": "LOWER(HEX(x))",
+ "bigquery": "TO_HEX(x)",
+ "clickhouse": "LOWER(HEX(x))",
+ "duckdb": "LOWER(HEX(x))",
+ "hive": "LOWER(HEX(x))",
+ "mysql": "LOWER(HEX(x))",
+ "spark": "LOWER(HEX(x))",
+ "sqlite": "LOWER(HEX(x))",
+ "presto": "LOWER(TO_HEX(x))",
+ "trino": "LOWER(TO_HEX(x))",
+ },
+ )
+ self.validate_all(
+ "TO_HEX(x)",
+ read={
+ "": "LOWER(HEX(x))",
+ "clickhouse": "LOWER(HEX(x))",
+ "duckdb": "LOWER(HEX(x))",
+ "hive": "LOWER(HEX(x))",
+ "mysql": "LOWER(HEX(x))",
+ "spark": "LOWER(HEX(x))",
+ "sqlite": "LOWER(HEX(x))",
+ "presto": "LOWER(TO_HEX(x))",
+ "trino": "LOWER(TO_HEX(x))",
+ },
+ write={
+ "": "LOWER(HEX(x))",
+ "bigquery": "TO_HEX(x)",
+ "clickhouse": "LOWER(HEX(x))",
+ "duckdb": "LOWER(HEX(x))",
+ "hive": "LOWER(HEX(x))",
+ "mysql": "LOWER(HEX(x))",
+ "presto": "LOWER(TO_HEX(x))",
+ "spark": "LOWER(HEX(x))",
+ "sqlite": "LOWER(HEX(x))",
+ "trino": "LOWER(TO_HEX(x))",
+ },
+ )
+ self.validate_all(
+ "UPPER(TO_HEX(x))",
+ read={
+ "": "HEX(x)",
+ "clickhouse": "HEX(x)",
+ "duckdb": "HEX(x)",
+ "hive": "HEX(x)",
+ "mysql": "HEX(x)",
+ "presto": "TO_HEX(x)",
+ "spark": "HEX(x)",
+ "sqlite": "HEX(x)",
+ "trino": "TO_HEX(x)",
+ },
+ write={
+ "": "HEX(x)",
+ "bigquery": "UPPER(TO_HEX(x))",
+ "clickhouse": "HEX(x)",
+ "duckdb": "HEX(x)",
+ "hive": "HEX(x)",
+ "mysql": "HEX(x)",
+ "presto": "TO_HEX(x)",
+ "spark": "HEX(x)",
+ "sqlite": "HEX(x)",
+ "trino": "TO_HEX(x)",
+ },
+ )
+ self.validate_all(
"MD5(x)",
+ read={
+ "clickhouse": "MD5(x)",
+ "presto": "MD5(x)",
+ "trino": "MD5(x)",
+ },
write={
"": "MD5_DIGEST(x)",
"bigquery": "MD5(x)",
+ "clickhouse": "MD5(x)",
"hive": "UNHEX(MD5(x))",
+ "presto": "MD5(x)",
"spark": "UNHEX(MD5(x))",
+ "trino": "MD5(x)",
},
)
self.validate_all(
@@ -631,25 +720,69 @@ class TestBigQuery(Validator):
read={
"duckdb": "SELECT MD5(some_string)",
"spark": "SELECT MD5(some_string)",
+ "clickhouse": "SELECT LOWER(HEX(MD5(some_string)))",
+ "presto": "SELECT LOWER(TO_HEX(MD5(some_string)))",
+ "trino": "SELECT LOWER(TO_HEX(MD5(some_string)))",
},
write={
"": "SELECT MD5(some_string)",
"bigquery": "SELECT TO_HEX(MD5(some_string))",
"duckdb": "SELECT MD5(some_string)",
+ "clickhouse": "SELECT LOWER(HEX(MD5(some_string)))",
+ "presto": "SELECT LOWER(TO_HEX(MD5(some_string)))",
+ "trino": "SELECT LOWER(TO_HEX(MD5(some_string)))",
+ },
+ )
+ self.validate_all(
+ "SHA1(x)",
+ read={
+ "clickhouse": "SHA1(x)",
+ "presto": "SHA1(x)",
+ "trino": "SHA1(x)",
+ },
+ write={
+ "clickhouse": "SHA1(x)",
+ "bigquery": "SHA1(x)",
+ "": "SHA(x)",
+ "presto": "SHA1(x)",
+ "trino": "SHA1(x)",
+ },
+ )
+ self.validate_all(
+ "SHA1(x)",
+ write={
+ "bigquery": "SHA1(x)",
+ "": "SHA(x)",
},
)
self.validate_all(
"SHA256(x)",
+ read={
+ "clickhouse": "SHA256(x)",
+ "presto": "SHA256(x)",
+ "trino": "SHA256(x)",
+ },
write={
"bigquery": "SHA256(x)",
"spark2": "SHA2(x, 256)",
+ "clickhouse": "SHA256(x)",
+ "presto": "SHA256(x)",
+ "trino": "SHA256(x)",
},
)
self.validate_all(
"SHA512(x)",
+ read={
+ "clickhouse": "SHA512(x)",
+ "presto": "SHA512(x)",
+ "trino": "SHA512(x)",
+ },
write={
+ "clickhouse": "SHA512(x)",
"bigquery": "SHA512(x)",
"spark2": "SHA2(x, 512)",
+ "presto": "SHA512(x)",
+ "trino": "SHA512(x)",
},
)
self.validate_all(
@@ -860,8 +993,8 @@ class TestBigQuery(Validator):
},
write={
"bigquery": "SELECT * FROM UNNEST(['7', '14']) AS x",
- "presto": "SELECT * FROM UNNEST(ARRAY['7', '14']) AS _t(x)",
- "spark": "SELECT * FROM UNNEST(ARRAY('7', '14')) AS _t(x)",
+ "presto": "SELECT * FROM UNNEST(ARRAY['7', '14']) AS _t0(x)",
+ "spark": "SELECT * FROM UNNEST(ARRAY('7', '14')) AS _t0(x)",
},
)
self.validate_all(
@@ -982,6 +1115,69 @@ class TestBigQuery(Validator):
},
)
self.validate_all(
+ "DELETE db.example_table WHERE x = 1",
+ write={
+ "bigquery": "DELETE db.example_table WHERE x = 1",
+ "presto": "DELETE FROM db.example_table WHERE x = 1",
+ },
+ )
+ self.validate_all(
+ "DELETE db.example_table tb WHERE tb.x = 1",
+ write={
+ "bigquery": "DELETE db.example_table AS tb WHERE tb.x = 1",
+ "presto": "DELETE FROM db.example_table WHERE x = 1",
+ },
+ )
+ self.validate_all(
+ "DELETE db.example_table AS tb WHERE tb.x = 1",
+ write={
+ "bigquery": "DELETE db.example_table AS tb WHERE tb.x = 1",
+ "presto": "DELETE FROM db.example_table WHERE x = 1",
+ },
+ )
+ self.validate_all(
+ "DELETE FROM db.example_table WHERE x = 1",
+ write={
+ "bigquery": "DELETE FROM db.example_table WHERE x = 1",
+ "presto": "DELETE FROM db.example_table WHERE x = 1",
+ },
+ )
+ self.validate_all(
+ "DELETE FROM db.example_table tb WHERE tb.x = 1",
+ write={
+ "bigquery": "DELETE FROM db.example_table AS tb WHERE tb.x = 1",
+ "presto": "DELETE FROM db.example_table WHERE x = 1",
+ },
+ )
+ self.validate_all(
+ "DELETE FROM db.example_table AS tb WHERE tb.x = 1",
+ write={
+ "bigquery": "DELETE FROM db.example_table AS tb WHERE tb.x = 1",
+ "presto": "DELETE FROM db.example_table WHERE x = 1",
+ },
+ )
+ self.validate_all(
+ "DELETE FROM db.example_table AS tb WHERE example_table.x = 1",
+ write={
+ "bigquery": "DELETE FROM db.example_table AS tb WHERE example_table.x = 1",
+ "presto": "DELETE FROM db.example_table WHERE x = 1",
+ },
+ )
+ self.validate_all(
+ "DELETE FROM db.example_table WHERE example_table.x = 1",
+ write={
+ "bigquery": "DELETE FROM db.example_table WHERE example_table.x = 1",
+ "presto": "DELETE FROM db.example_table WHERE example_table.x = 1",
+ },
+ )
+ self.validate_all(
+ "DELETE FROM db.t1 AS t1 WHERE NOT t1.c IN (SELECT db.t2.c FROM db.t2)",
+ write={
+ "bigquery": "DELETE FROM db.t1 AS t1 WHERE NOT t1.c IN (SELECT db.t2.c FROM db.t2)",
+ "presto": "DELETE FROM db.t1 WHERE NOT c IN (SELECT c FROM db.t2)",
+ },
+ )
+ self.validate_all(
"SELECT * FROM a WHERE b IN UNNEST([1, 2, 3])",
write={
"bigquery": "SELECT * FROM a WHERE b IN UNNEST([1, 2, 3])",
@@ -1464,3 +1660,14 @@ OPTIONS (
with self.assertRaises(ParseError):
transpile("SELECT JSON_OBJECT('a', 1, 'b') AS json_data", read="bigquery")
+
+ def test_mod(self):
+ for sql in ("MOD(a, b)", "MOD('a', b)", "MOD(5, 2)", "MOD((a + 1) * 8, 5 - 1)"):
+ with self.subTest(f"Testing BigQuery roundtrip of modulo operation: {sql}"):
+ self.validate_identity(sql)
+
+ self.validate_identity("SELECT MOD((SELECT 1), 2)")
+ self.validate_identity(
+ "MOD((a + 1), b)",
+ "MOD(a + 1, b)",
+ )