diff options
author | Daniel Baumann <daniel.baumann@progress-linux.org> | 2023-05-03 09:12:24 +0000 |
---|---|---|
committer | Daniel Baumann <daniel.baumann@progress-linux.org> | 2023-05-03 09:12:24 +0000 |
commit | 98d5537435b2951b36c45f1fda667fa27c165794 (patch) | |
tree | d26b4dfa6cf91847100fe10a94a04dcc2ad36a86 /tests/dialects/test_presto.py | |
parent | Adding upstream version 11.5.2. (diff) | |
download | sqlglot-98d5537435b2951b36c45f1fda667fa27c165794.tar.xz sqlglot-98d5537435b2951b36c45f1fda667fa27c165794.zip |
Adding upstream version 11.7.1.upstream/11.7.1
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'tests/dialects/test_presto.py')
-rw-r--r-- | tests/dialects/test_presto.py | 83 |
1 files changed, 77 insertions, 6 deletions
diff --git a/tests/dialects/test_presto.py b/tests/dialects/test_presto.py index 1007899..3080476 100644 --- a/tests/dialects/test_presto.py +++ b/tests/dialects/test_presto.py @@ -60,7 +60,7 @@ class TestPresto(Validator): self.validate_all( "CAST(x AS TIMESTAMP(9) WITH TIME ZONE)", write={ - "bigquery": "CAST(x AS TIMESTAMPTZ)", + "bigquery": "CAST(x AS TIMESTAMP)", "duckdb": "CAST(x AS TIMESTAMPTZ(9))", "presto": "CAST(x AS TIMESTAMP(9) WITH TIME ZONE)", "hive": "CAST(x AS TIMESTAMP)", @@ -106,7 +106,33 @@ class TestPresto(Validator): }, ) + def test_interval_plural_to_singular(self): + # Microseconds, weeks and quarters are not supported in Presto/Trino INTERVAL literals + unit_to_expected = { + "SeCoNds": "second", + "minutes": "minute", + "hours": "hour", + "days": "day", + "months": "month", + "years": "year", + } + + for unit, expected in unit_to_expected.items(): + self.validate_all( + f"SELECT INTERVAL '1' {unit}", + write={ + "bigquery": f"SELECT INTERVAL '1' {expected}", + "presto": f"SELECT INTERVAL '1' {expected}", + "trino": f"SELECT INTERVAL '1' {expected}", + }, + ) + def test_time(self): + self.validate_identity("FROM_UNIXTIME(a, b)") + self.validate_identity("FROM_UNIXTIME(a, b, c)") + self.validate_identity("TRIM(a, b)") + self.validate_identity("VAR_POP(a)") + self.validate_all( "DATE_FORMAT(x, '%Y-%m-%d %H:%i:%S')", write={ @@ -158,10 +184,6 @@ class TestPresto(Validator): "spark": "FROM_UNIXTIME(x)", }, ) - self.validate_identity("FROM_UNIXTIME(a, b)") - self.validate_identity("FROM_UNIXTIME(a, b, c)") - self.validate_identity("TRIM(a, b)") - self.validate_identity("VAR_POP(a)") self.validate_all( "TO_UNIXTIME(x)", write={ @@ -243,7 +265,7 @@ class TestPresto(Validator): }, ) self.validate_all( - "CREATE TABLE test STORED = 'PARQUET' AS SELECT 1", + "CREATE TABLE test STORED AS 'PARQUET' AS SELECT 1", write={ "duckdb": "CREATE TABLE test AS SELECT 1", "presto": "CREATE TABLE test WITH (FORMAT='PARQUET') AS SELECT 1", @@ -362,6 +384,14 @@ class TestPresto(Validator): }, ) + self.validate_all( + "SELECT a FROM x CROSS JOIN UNNEST(ARRAY(y)) AS t (a) CROSS JOIN b", + write={ + "presto": "SELECT a FROM x CROSS JOIN UNNEST(ARRAY[y]) AS t(a) CROSS JOIN b", + "hive": "SELECT a FROM x CROSS JOIN b LATERAL VIEW EXPLODE(ARRAY(y)) t AS a", + }, + ) + def test_presto(self): self.validate_identity("SELECT BOOL_OR(a > 10) FROM asd AS T(a)") self.validate_identity("SELECT * FROM (VALUES (1))") @@ -369,6 +399,9 @@ class TestPresto(Validator): self.validate_identity("START TRANSACTION ISOLATION LEVEL REPEATABLE READ") self.validate_identity("APPROX_PERCENTILE(a, b, c, d)") + self.validate_all("INTERVAL '1 day'", write={"trino": "INTERVAL '1' day"}) + self.validate_all("(5 * INTERVAL '7' day)", read={"": "INTERVAL '5' week"}) + self.validate_all("(5 * INTERVAL '7' day)", read={"": "INTERVAL '5' WEEKS"}) self.validate_all( "SELECT JSON_OBJECT(KEY 'key1' VALUE 1, KEY 'key2' VALUE TRUE)", write={ @@ -643,3 +676,41 @@ class TestPresto(Validator): "presto": "SELECT CAST(ARRAY[1, 23, 456] AS JSON)", }, ) + + def test_explode_to_unnest(self): + self.validate_all( + "SELECT col FROM tbl CROSS JOIN UNNEST(x) AS _u(col)", + read={"spark": "SELECT EXPLODE(x) FROM tbl"}, + ) + self.validate_all( + "SELECT col_2 FROM _u CROSS JOIN UNNEST(col) AS _u_2(col_2)", + read={"spark": "SELECT EXPLODE(col) FROM _u"}, + ) + self.validate_all( + "SELECT exploded FROM schema.tbl CROSS JOIN UNNEST(col) AS _u(exploded)", + read={"spark": "SELECT EXPLODE(col) AS exploded FROM schema.tbl"}, + ) + self.validate_all( + "SELECT col FROM UNNEST(SEQUENCE(1, 2)) AS _u(col)", + read={"spark": "SELECT EXPLODE(SEQUENCE(1, 2))"}, + ) + self.validate_all( + "SELECT col FROM tbl AS t CROSS JOIN UNNEST(t.c) AS _u(col)", + read={"spark": "SELECT EXPLODE(t.c) FROM tbl t"}, + ) + self.validate_all( + "SELECT pos, col FROM UNNEST(SEQUENCE(2, 3)) WITH ORDINALITY AS _u(col, pos)", + read={"spark": "SELECT POSEXPLODE(SEQUENCE(2, 3))"}, + ) + self.validate_all( + "SELECT pos, col FROM tbl CROSS JOIN UNNEST(SEQUENCE(2, 3)) WITH ORDINALITY AS _u(col, pos)", + read={"spark": "SELECT POSEXPLODE(SEQUENCE(2, 3)) FROM tbl"}, + ) + self.validate_all( + "SELECT pos, col FROM tbl AS t CROSS JOIN UNNEST(t.c) WITH ORDINALITY AS _u(col, pos)", + read={"spark": "SELECT POSEXPLODE(t.c) FROM tbl t"}, + ) + self.validate_all( + "SELECT col, pos, pos_2, col_2 FROM _u CROSS JOIN UNNEST(SEQUENCE(2, 3)) WITH ORDINALITY AS _u_2(col_2, pos_2)", + read={"spark": "SELECT col, pos, POSEXPLODE(SEQUENCE(2, 3)) FROM _u"}, + ) |