summaryrefslogtreecommitdiffstats
path: root/tests/dialects/test_presto.py
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2023-09-07 11:39:43 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2023-09-07 11:39:43 +0000
commit341eb1a6bdf0dd5b015e5140d3b068c6fd3f4d87 (patch)
tree61fb7eca2238fb5d41d3906f4af41de03abd25ea /tests/dialects/test_presto.py
parentAdding upstream version 17.12.0. (diff)
downloadsqlglot-341eb1a6bdf0dd5b015e5140d3b068c6fd3f4d87.tar.xz
sqlglot-341eb1a6bdf0dd5b015e5140d3b068c6fd3f4d87.zip
Adding upstream version 18.2.0.upstream/18.2.0
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'tests/dialects/test_presto.py')
-rw-r--r--tests/dialects/test_presto.py86
1 files changed, 81 insertions, 5 deletions
diff --git a/tests/dialects/test_presto.py b/tests/dialects/test_presto.py
index 5091540..dbca5b3 100644
--- a/tests/dialects/test_presto.py
+++ b/tests/dialects/test_presto.py
@@ -88,7 +88,7 @@ class TestPresto(Validator):
"CAST(ARRAY[1, 2] AS ARRAY(BIGINT))",
write={
"bigquery": "CAST([1, 2] AS ARRAY<INT64>)",
- "duckdb": "CAST(LIST_VALUE(1, 2) AS BIGINT[])",
+ "duckdb": "CAST([1, 2] AS BIGINT[])",
"presto": "CAST(ARRAY[1, 2] AS ARRAY(BIGINT))",
"spark": "CAST(ARRAY(1, 2) AS ARRAY<BIGINT>)",
"snowflake": "CAST([1, 2] AS ARRAY)",
@@ -98,7 +98,7 @@ class TestPresto(Validator):
"CAST(MAP(ARRAY[1], ARRAY[1]) AS MAP(INT,INT))",
write={
"bigquery": "CAST(MAP([1], [1]) AS MAP<INT64, INT64>)",
- "duckdb": "CAST(MAP(LIST_VALUE(1), LIST_VALUE(1)) AS MAP(INT, INT))",
+ "duckdb": "CAST(MAP([1], [1]) AS MAP(INT, INT))",
"presto": "CAST(MAP(ARRAY[1], ARRAY[1]) AS MAP(INTEGER, INTEGER))",
"hive": "CAST(MAP(1, 1) AS MAP<INT, INT>)",
"spark": "CAST(MAP_FROM_ARRAYS(ARRAY(1), ARRAY(1)) AS MAP<INT, INT>)",
@@ -109,7 +109,7 @@ class TestPresto(Validator):
"CAST(MAP(ARRAY['a','b','c'], ARRAY[ARRAY[1], ARRAY[2], ARRAY[3]]) AS MAP(VARCHAR, ARRAY(INT)))",
write={
"bigquery": "CAST(MAP(['a', 'b', 'c'], [[1], [2], [3]]) AS MAP<STRING, ARRAY<INT64>>)",
- "duckdb": "CAST(MAP(LIST_VALUE('a', 'b', 'c'), LIST_VALUE(LIST_VALUE(1), LIST_VALUE(2), LIST_VALUE(3))) AS MAP(TEXT, INT[]))",
+ "duckdb": "CAST(MAP(['a', 'b', 'c'], [[1], [2], [3]]) AS MAP(TEXT, INT[]))",
"presto": "CAST(MAP(ARRAY['a', 'b', 'c'], ARRAY[ARRAY[1], ARRAY[2], ARRAY[3]]) AS MAP(VARCHAR, ARRAY(INTEGER)))",
"hive": "CAST(MAP('a', ARRAY(1), 'b', ARRAY(2), 'c', ARRAY(3)) AS MAP<STRING, ARRAY<INT>>)",
"spark": "CAST(MAP_FROM_ARRAYS(ARRAY('a', 'b', 'c'), ARRAY(ARRAY(1), ARRAY(2), ARRAY(3))) AS MAP<STRING, ARRAY<INT>>)",
@@ -138,6 +138,13 @@ class TestPresto(Validator):
def test_regex(self):
self.validate_all(
+ "REGEXP_REPLACE('abcd', '[ab]')",
+ write={
+ "presto": "REGEXP_REPLACE('abcd', '[ab]', '')",
+ "spark": "REGEXP_REPLACE('abcd', '[ab]', '')",
+ },
+ )
+ self.validate_all(
"REGEXP_LIKE(a, 'x')",
write={
"duckdb": "REGEXP_MATCHES(a, 'x')",
@@ -289,6 +296,13 @@ class TestPresto(Validator):
},
)
self.validate_all(
+ "DATE_ADD('DAY', 1 * -1, x)",
+ write={
+ "presto": "DATE_ADD('DAY', 1 * -1, x)",
+ },
+ read={"mysql": "DATE_SUB(x, INTERVAL 1 DAY)"},
+ )
+ self.validate_all(
"NOW()",
write={
"presto": "CURRENT_TIMESTAMP",
@@ -339,6 +353,11 @@ class TestPresto(Validator):
"presto": "SELECT CAST('2012-10-31 00:00' AS TIMESTAMP) AT TIME ZONE 'America/Sao_Paulo'",
},
)
+ self.validate_all(
+ "CAST(x AS TIMESTAMP)",
+ write={"presto": "CAST(x AS TIMESTAMP)"},
+ read={"mysql": "CAST(x AS DATETIME)", "clickhouse": "CAST(x AS DATETIME64)"},
+ )
def test_ddl(self):
self.validate_all(
@@ -480,6 +499,13 @@ class TestPresto(Validator):
@mock.patch("sqlglot.helper.logger")
def test_presto(self, logger):
+ self.validate_identity(
+ "SELECT * FROM example.testdb.customer_orders FOR VERSION AS OF 8954597067493422955"
+ )
+ self.validate_identity(
+ "SELECT * FROM example.testdb.customer_orders FOR TIMESTAMP AS OF CAST('2022-03-23 09:59:29.803 Europe/Vienna' AS TIMESTAMP)"
+ )
+
self.validate_identity("SELECT * FROM x OFFSET 1 LIMIT 1")
self.validate_identity("SELECT * FROM x OFFSET 1 FETCH FIRST 1 ROWS ONLY")
self.validate_identity("SELECT BOOL_OR(a > 10) FROM asd AS T(a)")
@@ -487,8 +513,58 @@ class TestPresto(Validator):
self.validate_identity("START TRANSACTION READ WRITE, ISOLATION LEVEL SERIALIZABLE")
self.validate_identity("START TRANSACTION ISOLATION LEVEL REPEATABLE READ")
self.validate_identity("APPROX_PERCENTILE(a, b, c, d)")
+ self.validate_identity(
+ "SELECT SPLIT_TO_MAP('a:1;b:2;a:3', ';', ':', (k, v1, v2) -> CONCAT(v1, v2))"
+ )
self.validate_all(
+ "SELECT ROW(1, 2)",
+ read={
+ "spark": "SELECT STRUCT(1, 2)",
+ },
+ write={
+ "presto": "SELECT ROW(1, 2)",
+ "spark": "SELECT STRUCT(1, 2)",
+ },
+ )
+ self.validate_all(
+ "ARBITRARY(x)",
+ read={
+ "bigquery": "ANY_VALUE(x)",
+ "clickhouse": "any(x)",
+ "databricks": "ANY_VALUE(x)",
+ "doris": "ANY_VALUE(x)",
+ "drill": "ANY_VALUE(x)",
+ "duckdb": "ANY_VALUE(x)",
+ "hive": "FIRST(x)",
+ "mysql": "ANY_VALUE(x)",
+ "oracle": "ANY_VALUE(x)",
+ "redshift": "ANY_VALUE(x)",
+ "snowflake": "ANY_VALUE(x)",
+ "spark": "ANY_VALUE(x)",
+ "spark2": "FIRST(x)",
+ },
+ write={
+ "bigquery": "ANY_VALUE(x)",
+ "clickhouse": "any(x)",
+ "databricks": "ANY_VALUE(x)",
+ "doris": "ANY_VALUE(x)",
+ "drill": "ANY_VALUE(x)",
+ "duckdb": "ANY_VALUE(x)",
+ "hive": "FIRST(x)",
+ "mysql": "ANY_VALUE(x)",
+ "oracle": "ANY_VALUE(x)",
+ "postgres": "MAX(x)",
+ "presto": "ARBITRARY(x)",
+ "redshift": "ANY_VALUE(x)",
+ "snowflake": "ANY_VALUE(x)",
+ "spark": "ANY_VALUE(x)",
+ "spark2": "FIRST(x)",
+ "sqlite": "MAX(x)",
+ "tsql": "MAX(x)",
+ },
+ )
+ self.validate_all(
"STARTS_WITH('abc', 'a')",
read={"spark": "STARTSWITH('abc', 'a')"},
write={
@@ -596,7 +672,7 @@ class TestPresto(Validator):
"SELECT ARRAY[1, 2]",
write={
"bigquery": "SELECT [1, 2]",
- "duckdb": "SELECT LIST_VALUE(1, 2)",
+ "duckdb": "SELECT [1, 2]",
"presto": "SELECT ARRAY[1, 2]",
"spark": "SELECT ARRAY(1, 2)",
},
@@ -748,7 +824,7 @@ class TestPresto(Validator):
self.validate_all(
"""JSON_FORMAT(JSON '"x"')""",
write={
- "bigquery": """TO_JSON_STRING(CAST('"x"' AS JSON))""",
+ "bigquery": """TO_JSON_STRING(JSON '"x"')""",
"duckdb": """CAST(TO_JSON(CAST('"x"' AS JSON)) AS TEXT)""",
"presto": """JSON_FORMAT(CAST('"x"' AS JSON))""",
"spark": """REGEXP_EXTRACT(TO_JSON(FROM_JSON('["x"]', SCHEMA_OF_JSON('["x"]'))), '^.(.*).$', 1)""",