diff options
author | Daniel Baumann <daniel.baumann@progress-linux.org> | 2023-02-08 04:14:30 +0000 |
---|---|---|
committer | Daniel Baumann <daniel.baumann@progress-linux.org> | 2023-02-08 04:14:30 +0000 |
commit | 99980f928b5b7be237d108266072e51aa3bb354e (patch) | |
tree | ce6fff00ea2b834bdbe3d84dcac90df1617d4245 /tests/dialects/test_presto.py | |
parent | Adding upstream version 10.6.0. (diff) | |
download | sqlglot-99980f928b5b7be237d108266072e51aa3bb354e.tar.xz sqlglot-99980f928b5b7be237d108266072e51aa3bb354e.zip |
Adding upstream version 10.6.3.upstream/10.6.3
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'tests/dialects/test_presto.py')
-rw-r--r-- | tests/dialects/test_presto.py | 87 |
1 files changed, 79 insertions, 8 deletions
diff --git a/tests/dialects/test_presto.py b/tests/dialects/test_presto.py index 5ecd69a..9815dcc 100644 --- a/tests/dialects/test_presto.py +++ b/tests/dialects/test_presto.py @@ -111,7 +111,7 @@ class TestPresto(Validator): "DATE_FORMAT(x, '%Y-%m-%d %H:%i:%S')", write={ "duckdb": "STRFTIME(x, '%Y-%m-%d %H:%M:%S')", - "presto": "DATE_FORMAT(x, '%Y-%m-%d %H:%i:%S')", + "presto": "DATE_FORMAT(x, '%Y-%m-%d %T')", "hive": "DATE_FORMAT(x, 'yyyy-MM-dd HH:mm:ss')", "spark": "DATE_FORMAT(x, 'yyyy-MM-dd HH:mm:ss')", }, @@ -120,7 +120,7 @@ class TestPresto(Validator): "DATE_PARSE(x, '%Y-%m-%d %H:%i:%S')", write={ "duckdb": "STRPTIME(x, '%Y-%m-%d %H:%M:%S')", - "presto": "DATE_PARSE(x, '%Y-%m-%d %H:%i:%S')", + "presto": "DATE_PARSE(x, '%Y-%m-%d %T')", "hive": "CAST(x AS TIMESTAMP)", "spark": "TO_TIMESTAMP(x, 'yyyy-MM-dd HH:mm:ss')", }, @@ -135,6 +135,12 @@ class TestPresto(Validator): }, ) self.validate_all( + "DATE_FORMAT(x, '%T')", + write={ + "hive": "DATE_FORMAT(x, 'HH:mm:ss')", + }, + ) + self.validate_all( "DATE_PARSE(SUBSTR(x, 1, 10), '%Y-%m-%d')", write={ "duckdb": "STRPTIME(SUBSTR(x, 1, 10), '%Y-%m-%d')", @@ -146,7 +152,7 @@ class TestPresto(Validator): self.validate_all( "FROM_UNIXTIME(x)", write={ - "duckdb": "TO_TIMESTAMP(CAST(x AS BIGINT))", + "duckdb": "TO_TIMESTAMP(x)", "presto": "FROM_UNIXTIME(x)", "hive": "FROM_UNIXTIME(x)", "spark": "FROM_UNIXTIME(x)", @@ -177,11 +183,51 @@ class TestPresto(Validator): self.validate_all( "NOW()", write={ - "presto": "CURRENT_TIMESTAMP()", + "presto": "CURRENT_TIMESTAMP", "hive": "CURRENT_TIMESTAMP()", }, ) + self.validate_all( + "DAY_OF_WEEK(timestamp '2012-08-08 01:00')", + write={ + "spark": "DAYOFWEEK(CAST('2012-08-08 01:00' AS TIMESTAMP))", + "presto": "DAY_OF_WEEK(CAST('2012-08-08 01:00' AS TIMESTAMP))", + }, + ) + + self.validate_all( + "DAY_OF_MONTH(timestamp '2012-08-08 01:00')", + write={ + "spark": "DAYOFMONTH(CAST('2012-08-08 01:00' AS TIMESTAMP))", + "presto": "DAY_OF_MONTH(CAST('2012-08-08 01:00' AS TIMESTAMP))", + }, + ) + + self.validate_all( + "DAY_OF_YEAR(timestamp '2012-08-08 01:00')", + write={ + "spark": "DAYOFYEAR(CAST('2012-08-08 01:00' AS TIMESTAMP))", + "presto": "DAY_OF_YEAR(CAST('2012-08-08 01:00' AS TIMESTAMP))", + }, + ) + + self.validate_all( + "WEEK_OF_YEAR(timestamp '2012-08-08 01:00')", + write={ + "spark": "WEEKOFYEAR(CAST('2012-08-08 01:00' AS TIMESTAMP))", + "presto": "WEEK_OF_YEAR(CAST('2012-08-08 01:00' AS TIMESTAMP))", + }, + ) + + self.validate_all( + "SELECT timestamp '2012-10-31 00:00' AT TIME ZONE 'America/Sao_Paulo'", + write={ + "spark": "SELECT FROM_UTC_TIMESTAMP(CAST('2012-10-31 00:00' AS TIMESTAMP), 'America/Sao_Paulo')", + "presto": "SELECT CAST('2012-10-31 00:00' AS TIMESTAMP) AT TIME ZONE 'America/Sao_Paulo'", + }, + ) + def test_ddl(self): self.validate_all( "CREATE TABLE test WITH (FORMAT = 'PARQUET') AS SELECT 1", @@ -314,6 +360,11 @@ class TestPresto(Validator): def test_presto(self): self.validate_identity("SELECT BOOL_OR(a > 10) FROM asd AS T(a)") + self.validate_identity("SELECT * FROM (VALUES (1))") + self.validate_identity("START TRANSACTION READ WRITE, ISOLATION LEVEL SERIALIZABLE") + self.validate_identity("START TRANSACTION ISOLATION LEVEL REPEATABLE READ") + self.validate_identity("APPROX_PERCENTILE(a, b, c, d)") + self.validate_all( 'SELECT a."b" FROM "foo"', write={ @@ -455,10 +506,6 @@ class TestPresto(Validator): "spark": UnsupportedError, }, ) - self.validate_identity("SELECT * FROM (VALUES (1))") - self.validate_identity("START TRANSACTION READ WRITE, ISOLATION LEVEL SERIALIZABLE") - self.validate_identity("START TRANSACTION ISOLATION LEVEL REPEATABLE READ") - self.validate_identity("APPROX_PERCENTILE(a, b, c, d)") def test_encode_decode(self): self.validate_all( @@ -529,3 +576,27 @@ class TestPresto(Validator): "presto": "FROM_HEX(x)", }, ) + + def test_json(self): + self.validate_all( + "SELECT CAST(JSON '[1,23,456]' AS ARRAY(INTEGER))", + write={ + "spark": "SELECT FROM_JSON('[1,23,456]', 'ARRAY<INT>')", + "presto": "SELECT CAST(CAST('[1,23,456]' AS JSON) AS ARRAY(INTEGER))", + }, + ) + self.validate_all( + """SELECT CAST(JSON '{"k1":1,"k2":23,"k3":456}' AS MAP(VARCHAR, INTEGER))""", + write={ + "spark": 'SELECT FROM_JSON(\'{"k1":1,"k2":23,"k3":456}\', \'MAP<STRING, INT>\')', + "presto": 'SELECT CAST(CAST(\'{"k1":1,"k2":23,"k3":456}\' AS JSON) AS MAP(VARCHAR, INTEGER))', + }, + ) + + self.validate_all( + "SELECT CAST(ARRAY [1, 23, 456] AS JSON)", + write={ + "spark": "SELECT TO_JSON(ARRAY(1, 23, 456))", + "presto": "SELECT CAST(ARRAY[1, 23, 456] AS JSON)", + }, + ) |