summaryrefslogtreecommitdiffstats
path: root/tests/dialects/test_duckdb.py
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2023-02-08 04:14:30 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2023-02-08 04:14:30 +0000
commit99980f928b5b7be237d108266072e51aa3bb354e (patch)
treece6fff00ea2b834bdbe3d84dcac90df1617d4245 /tests/dialects/test_duckdb.py
parentAdding upstream version 10.6.0. (diff)
downloadsqlglot-99980f928b5b7be237d108266072e51aa3bb354e.tar.xz
sqlglot-99980f928b5b7be237d108266072e51aa3bb354e.zip
Adding upstream version 10.6.3.upstream/10.6.3
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'tests/dialects/test_duckdb.py')
-rw-r--r--tests/dialects/test_duckdb.py19
1 files changed, 14 insertions, 5 deletions
diff --git a/tests/dialects/test_duckdb.py b/tests/dialects/test_duckdb.py
index f6446ca..f01a604 100644
--- a/tests/dialects/test_duckdb.py
+++ b/tests/dialects/test_duckdb.py
@@ -22,7 +22,7 @@ class TestDuckDB(Validator):
"EPOCH_MS(x)",
write={
"bigquery": "UNIX_TO_TIME(x / 1000)",
- "duckdb": "TO_TIMESTAMP(CAST(x / 1000 AS BIGINT))",
+ "duckdb": "TO_TIMESTAMP(x / 1000)",
"presto": "FROM_UNIXTIME(x / 1000)",
"spark": "FROM_UNIXTIME(x / 1000)",
},
@@ -41,7 +41,7 @@ class TestDuckDB(Validator):
"STRFTIME(x, '%Y-%m-%d %H:%M:%S')",
write={
"duckdb": "STRFTIME(x, '%Y-%m-%d %H:%M:%S')",
- "presto": "DATE_FORMAT(x, '%Y-%m-%d %H:%i:%S')",
+ "presto": "DATE_FORMAT(x, '%Y-%m-%d %T')",
"hive": "DATE_FORMAT(x, 'yyyy-MM-dd HH:mm:ss')",
},
)
@@ -58,9 +58,10 @@ class TestDuckDB(Validator):
self.validate_all(
"TO_TIMESTAMP(x)",
write={
- "duckdb": "CAST(x AS TIMESTAMP)",
- "presto": "CAST(x AS TIMESTAMP)",
- "hive": "CAST(x AS TIMESTAMP)",
+ "bigquery": "UNIX_TO_TIME(x)",
+ "duckdb": "TO_TIMESTAMP(x)",
+ "presto": "FROM_UNIXTIME(x)",
+ "hive": "FROM_UNIXTIME(x)",
},
)
self.validate_all(
@@ -334,6 +335,14 @@ class TestDuckDB(Validator):
},
)
+ self.validate_all(
+ "cast([[1]] as int[][])",
+ write={
+ "duckdb": "CAST(LIST_VALUE(LIST_VALUE(1)) AS INT[][])",
+ "spark": "CAST(ARRAY(ARRAY(1)) AS ARRAY<ARRAY<INT>>)",
+ },
+ )
+
def test_bool_or(self):
self.validate_all(
"SELECT a, LOGICAL_OR(b) FROM table GROUP BY a",