summaryrefslogtreecommitdiffstats
path: root/tests/dialects/test_presto.py
diff options
context:
space:
mode:
Diffstat (limited to 'tests/dialects/test_presto.py')
-rw-r--r--tests/dialects/test_presto.py45
1 files changed, 44 insertions, 1 deletions
diff --git a/tests/dialects/test_presto.py b/tests/dialects/test_presto.py
index 3080476..15962cc 100644
--- a/tests/dialects/test_presto.py
+++ b/tests/dialects/test_presto.py
@@ -7,6 +7,26 @@ class TestPresto(Validator):
def test_cast(self):
self.validate_all(
+ "FROM_BASE64(x)",
+ read={
+ "hive": "UNBASE64(x)",
+ },
+ write={
+ "hive": "UNBASE64(x)",
+ "presto": "FROM_BASE64(x)",
+ },
+ )
+ self.validate_all(
+ "TO_BASE64(x)",
+ read={
+ "hive": "BASE64(x)",
+ },
+ write={
+ "hive": "BASE64(x)",
+ "presto": "TO_BASE64(x)",
+ },
+ )
+ self.validate_all(
"CAST(a AS ARRAY(INT))",
write={
"bigquery": "CAST(a AS ARRAY<INT64>)",
@@ -105,6 +125,13 @@ class TestPresto(Validator):
"spark": "SIZE(x)",
},
)
+ self.validate_all(
+ "ARRAY_JOIN(x, '-', 'a')",
+ write={
+ "hive": "CONCAT_WS('-', x)",
+ "spark": "ARRAY_JOIN(x, '-', 'a')",
+ },
+ )
def test_interval_plural_to_singular(self):
# Microseconds, weeks and quarters are not supported in Presto/Trino INTERVAL literals
@@ -134,6 +161,14 @@ class TestPresto(Validator):
self.validate_identity("VAR_POP(a)")
self.validate_all(
+ "SELECT FROM_UNIXTIME(col) FROM tbl",
+ write={
+ "presto": "SELECT FROM_UNIXTIME(col) FROM tbl",
+ "spark": "SELECT CAST(FROM_UNIXTIME(col) AS TIMESTAMP) FROM tbl",
+ "trino": "SELECT FROM_UNIXTIME(col) FROM tbl",
+ },
+ )
+ self.validate_all(
"DATE_FORMAT(x, '%Y-%m-%d %H:%i:%S')",
write={
"duckdb": "STRFTIME(x, '%Y-%m-%d %H:%M:%S')",
@@ -181,7 +216,7 @@ class TestPresto(Validator):
"duckdb": "TO_TIMESTAMP(x)",
"presto": "FROM_UNIXTIME(x)",
"hive": "FROM_UNIXTIME(x)",
- "spark": "FROM_UNIXTIME(x)",
+ "spark": "CAST(FROM_UNIXTIME(x) AS TIMESTAMP)",
},
)
self.validate_all(
@@ -583,6 +618,14 @@ class TestPresto(Validator):
},
)
+ self.validate_all(
+ "JSON_FORMAT(JSON 'x')",
+ write={
+ "presto": "JSON_FORMAT(CAST('x' AS JSON))",
+ "spark": "TO_JSON('x')",
+ },
+ )
+
def test_encode_decode(self):
self.validate_all(
"TO_UTF8(x)",