From f73e9af131151f1e058446361c35b05c4c90bf10 Mon Sep 17 00:00:00 2001 From: Daniel Baumann Date: Thu, 7 Sep 2023 13:39:48 +0200 Subject: Merging upstream version 18.2.0. Signed-off-by: Daniel Baumann --- tests/dialects/test_presto.py | 86 ++++++++++++++++++++++++++++++++++++++++--- 1 file changed, 81 insertions(+), 5 deletions(-) (limited to 'tests/dialects/test_presto.py') diff --git a/tests/dialects/test_presto.py b/tests/dialects/test_presto.py index 5091540..dbca5b3 100644 --- a/tests/dialects/test_presto.py +++ b/tests/dialects/test_presto.py @@ -88,7 +88,7 @@ class TestPresto(Validator): "CAST(ARRAY[1, 2] AS ARRAY(BIGINT))", write={ "bigquery": "CAST([1, 2] AS ARRAY)", - "duckdb": "CAST(LIST_VALUE(1, 2) AS BIGINT[])", + "duckdb": "CAST([1, 2] AS BIGINT[])", "presto": "CAST(ARRAY[1, 2] AS ARRAY(BIGINT))", "spark": "CAST(ARRAY(1, 2) AS ARRAY)", "snowflake": "CAST([1, 2] AS ARRAY)", @@ -98,7 +98,7 @@ class TestPresto(Validator): "CAST(MAP(ARRAY[1], ARRAY[1]) AS MAP(INT,INT))", write={ "bigquery": "CAST(MAP([1], [1]) AS MAP)", - "duckdb": "CAST(MAP(LIST_VALUE(1), LIST_VALUE(1)) AS MAP(INT, INT))", + "duckdb": "CAST(MAP([1], [1]) AS MAP(INT, INT))", "presto": "CAST(MAP(ARRAY[1], ARRAY[1]) AS MAP(INTEGER, INTEGER))", "hive": "CAST(MAP(1, 1) AS MAP)", "spark": "CAST(MAP_FROM_ARRAYS(ARRAY(1), ARRAY(1)) AS MAP)", @@ -109,7 +109,7 @@ class TestPresto(Validator): "CAST(MAP(ARRAY['a','b','c'], ARRAY[ARRAY[1], ARRAY[2], ARRAY[3]]) AS MAP(VARCHAR, ARRAY(INT)))", write={ "bigquery": "CAST(MAP(['a', 'b', 'c'], [[1], [2], [3]]) AS MAP>)", - "duckdb": "CAST(MAP(LIST_VALUE('a', 'b', 'c'), LIST_VALUE(LIST_VALUE(1), LIST_VALUE(2), LIST_VALUE(3))) AS MAP(TEXT, INT[]))", + "duckdb": "CAST(MAP(['a', 'b', 'c'], [[1], [2], [3]]) AS MAP(TEXT, INT[]))", "presto": "CAST(MAP(ARRAY['a', 'b', 'c'], ARRAY[ARRAY[1], ARRAY[2], ARRAY[3]]) AS MAP(VARCHAR, ARRAY(INTEGER)))", "hive": "CAST(MAP('a', ARRAY(1), 'b', ARRAY(2), 'c', ARRAY(3)) AS MAP>)", "spark": "CAST(MAP_FROM_ARRAYS(ARRAY('a', 'b', 'c'), ARRAY(ARRAY(1), ARRAY(2), ARRAY(3))) AS MAP>)", @@ -137,6 +137,13 @@ class TestPresto(Validator): ) def test_regex(self): + self.validate_all( + "REGEXP_REPLACE('abcd', '[ab]')", + write={ + "presto": "REGEXP_REPLACE('abcd', '[ab]', '')", + "spark": "REGEXP_REPLACE('abcd', '[ab]', '')", + }, + ) self.validate_all( "REGEXP_LIKE(a, 'x')", write={ @@ -288,6 +295,13 @@ class TestPresto(Validator): "spark": "DATE_ADD(x, 1)", }, ) + self.validate_all( + "DATE_ADD('DAY', 1 * -1, x)", + write={ + "presto": "DATE_ADD('DAY', 1 * -1, x)", + }, + read={"mysql": "DATE_SUB(x, INTERVAL 1 DAY)"}, + ) self.validate_all( "NOW()", write={ @@ -339,6 +353,11 @@ class TestPresto(Validator): "presto": "SELECT CAST('2012-10-31 00:00' AS TIMESTAMP) AT TIME ZONE 'America/Sao_Paulo'", }, ) + self.validate_all( + "CAST(x AS TIMESTAMP)", + write={"presto": "CAST(x AS TIMESTAMP)"}, + read={"mysql": "CAST(x AS DATETIME)", "clickhouse": "CAST(x AS DATETIME64)"}, + ) def test_ddl(self): self.validate_all( @@ -480,6 +499,13 @@ class TestPresto(Validator): @mock.patch("sqlglot.helper.logger") def test_presto(self, logger): + self.validate_identity( + "SELECT * FROM example.testdb.customer_orders FOR VERSION AS OF 8954597067493422955" + ) + self.validate_identity( + "SELECT * FROM example.testdb.customer_orders FOR TIMESTAMP AS OF CAST('2022-03-23 09:59:29.803 Europe/Vienna' AS TIMESTAMP)" + ) + self.validate_identity("SELECT * FROM x OFFSET 1 LIMIT 1") self.validate_identity("SELECT * FROM x OFFSET 1 FETCH FIRST 1 ROWS ONLY") self.validate_identity("SELECT BOOL_OR(a > 10) FROM asd AS T(a)") @@ -487,7 +513,57 @@ class TestPresto(Validator): self.validate_identity("START TRANSACTION READ WRITE, ISOLATION LEVEL SERIALIZABLE") self.validate_identity("START TRANSACTION ISOLATION LEVEL REPEATABLE READ") self.validate_identity("APPROX_PERCENTILE(a, b, c, d)") + self.validate_identity( + "SELECT SPLIT_TO_MAP('a:1;b:2;a:3', ';', ':', (k, v1, v2) -> CONCAT(v1, v2))" + ) + self.validate_all( + "SELECT ROW(1, 2)", + read={ + "spark": "SELECT STRUCT(1, 2)", + }, + write={ + "presto": "SELECT ROW(1, 2)", + "spark": "SELECT STRUCT(1, 2)", + }, + ) + self.validate_all( + "ARBITRARY(x)", + read={ + "bigquery": "ANY_VALUE(x)", + "clickhouse": "any(x)", + "databricks": "ANY_VALUE(x)", + "doris": "ANY_VALUE(x)", + "drill": "ANY_VALUE(x)", + "duckdb": "ANY_VALUE(x)", + "hive": "FIRST(x)", + "mysql": "ANY_VALUE(x)", + "oracle": "ANY_VALUE(x)", + "redshift": "ANY_VALUE(x)", + "snowflake": "ANY_VALUE(x)", + "spark": "ANY_VALUE(x)", + "spark2": "FIRST(x)", + }, + write={ + "bigquery": "ANY_VALUE(x)", + "clickhouse": "any(x)", + "databricks": "ANY_VALUE(x)", + "doris": "ANY_VALUE(x)", + "drill": "ANY_VALUE(x)", + "duckdb": "ANY_VALUE(x)", + "hive": "FIRST(x)", + "mysql": "ANY_VALUE(x)", + "oracle": "ANY_VALUE(x)", + "postgres": "MAX(x)", + "presto": "ARBITRARY(x)", + "redshift": "ANY_VALUE(x)", + "snowflake": "ANY_VALUE(x)", + "spark": "ANY_VALUE(x)", + "spark2": "FIRST(x)", + "sqlite": "MAX(x)", + "tsql": "MAX(x)", + }, + ) self.validate_all( "STARTS_WITH('abc', 'a')", read={"spark": "STARTSWITH('abc', 'a')"}, @@ -596,7 +672,7 @@ class TestPresto(Validator): "SELECT ARRAY[1, 2]", write={ "bigquery": "SELECT [1, 2]", - "duckdb": "SELECT LIST_VALUE(1, 2)", + "duckdb": "SELECT [1, 2]", "presto": "SELECT ARRAY[1, 2]", "spark": "SELECT ARRAY(1, 2)", }, @@ -748,7 +824,7 @@ class TestPresto(Validator): self.validate_all( """JSON_FORMAT(JSON '"x"')""", write={ - "bigquery": """TO_JSON_STRING(CAST('"x"' AS JSON))""", + "bigquery": """TO_JSON_STRING(JSON '"x"')""", "duckdb": """CAST(TO_JSON(CAST('"x"' AS JSON)) AS TEXT)""", "presto": """JSON_FORMAT(CAST('"x"' AS JSON))""", "spark": """REGEXP_EXTRACT(TO_JSON(FROM_JSON('["x"]', SCHEMA_OF_JSON('["x"]'))), '^.(.*).$', 1)""", -- cgit v1.2.3