summaryrefslogtreecommitdiffstats
path: root/tests/dialects/test_duckdb.py
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-08 08:11:50 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-08 08:11:50 +0000
commit8978da3b39d7ca3cf83ee30fcc63ffe0e5453fb2 (patch)
tree2e29f131dff77b31e84c957266de8f18655b6f88 /tests/dialects/test_duckdb.py
parentAdding upstream version 22.2.0. (diff)
downloadsqlglot-8978da3b39d7ca3cf83ee30fcc63ffe0e5453fb2.tar.xz
sqlglot-8978da3b39d7ca3cf83ee30fcc63ffe0e5453fb2.zip
Adding upstream version 23.7.0.upstream/23.7.0
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'tests/dialects/test_duckdb.py')
-rw-r--r--tests/dialects/test_duckdb.py90
1 files changed, 76 insertions, 14 deletions
diff --git a/tests/dialects/test_duckdb.py b/tests/dialects/test_duckdb.py
index 58d1f06..5a7e93e 100644
--- a/tests/dialects/test_duckdb.py
+++ b/tests/dialects/test_duckdb.py
@@ -1,5 +1,6 @@
from sqlglot import ErrorLevel, UnsupportedError, exp, parse_one, transpile
from sqlglot.helper import logger as helper_logger
+from sqlglot.optimizer.annotate_types import annotate_types
from tests.dialects.test_dialect import Validator
@@ -7,6 +8,31 @@ class TestDuckDB(Validator):
dialect = "duckdb"
def test_duckdb(self):
+ query = "WITH _data AS (SELECT [{'a': 1, 'b': 2}, {'a': 2, 'b': 3}] AS col) SELECT t.col['b'] FROM _data, UNNEST(_data.col) AS t(col) WHERE t.col['a'] = 1"
+ expr = annotate_types(self.validate_identity(query))
+ self.assertEqual(
+ expr.sql(dialect="bigquery"),
+ "WITH _data AS (SELECT [STRUCT(1 AS a, 2 AS b), STRUCT(2 AS a, 3 AS b)] AS col) SELECT col.b FROM _data, UNNEST(_data.col) AS col WHERE col.a = 1",
+ )
+
+ self.validate_all(
+ "SELECT CAST('2020-01-01' AS DATE) + INTERVAL (day_offset) DAY FROM t",
+ read={
+ "duckdb": "SELECT CAST('2020-01-01' AS DATE) + INTERVAL (day_offset) DAY FROM t",
+ "mysql": "SELECT DATE '2020-01-01' + INTERVAL day_offset DAY FROM t",
+ },
+ )
+ self.validate_all(
+ "SELECT CAST('09:05:03' AS TIME) + INTERVAL 2 HOUR",
+ read={
+ "bigquery": "SELECT TIME_ADD(CAST('09:05:03' AS TIME), INTERVAL 2 HOUR)",
+ "snowflake": "SELECT TIMEADD(HOUR, 2, TO_TIME('09:05:03'))",
+ },
+ write={
+ "duckdb": "SELECT CAST('09:05:03' AS TIME) + INTERVAL '2' HOUR",
+ "snowflake": "SELECT CAST('09:05:03' AS TIME) + INTERVAL '2 HOUR'",
+ },
+ )
self.validate_all(
'STRUCT_PACK("a b" := 1)',
write={
@@ -15,7 +41,25 @@ class TestDuckDB(Validator):
"snowflake": "OBJECT_CONSTRUCT('a b', 1)",
},
)
-
+ self.validate_all(
+ "ARRAY_TO_STRING(arr, delim)",
+ read={
+ "bigquery": "ARRAY_TO_STRING(arr, delim)",
+ "postgres": "ARRAY_TO_STRING(arr, delim)",
+ "presto": "ARRAY_JOIN(arr, delim)",
+ "snowflake": "ARRAY_TO_STRING(arr, delim)",
+ "spark": "ARRAY_JOIN(arr, delim)",
+ },
+ write={
+ "bigquery": "ARRAY_TO_STRING(arr, delim)",
+ "duckdb": "ARRAY_TO_STRING(arr, delim)",
+ "postgres": "ARRAY_TO_STRING(arr, delim)",
+ "presto": "ARRAY_JOIN(arr, delim)",
+ "snowflake": "ARRAY_TO_STRING(arr, delim)",
+ "spark": "ARRAY_JOIN(arr, delim)",
+ "tsql": "STRING_AGG(arr, delim)",
+ },
+ )
self.validate_all(
"SELECT SUM(X) OVER (ORDER BY x)",
write={
@@ -131,13 +175,6 @@ class TestDuckDB(Validator):
},
)
self.validate_all(
- "WITH _data AS (SELECT [{'a': 1, 'b': 2}, {'a': 2, 'b': 3}] AS col) SELECT (SELECT col['b'] FROM UNNEST(col) AS t(col) WHERE col['a'] = 1) FROM _data",
- write={
- "bigquery": "WITH _data AS (SELECT [STRUCT(1 AS a, 2 AS b), STRUCT(2 AS a, 3 AS b)] AS col) SELECT (SELECT col.b FROM UNNEST(col) AS col WHERE col.a = 1) FROM _data",
- "duckdb": "WITH _data AS (SELECT [{'a': 1, 'b': 2}, {'a': 2, 'b': 3}] AS col) SELECT (SELECT col['b'] FROM UNNEST(col) AS t(col) WHERE col['a'] = 1) FROM _data",
- },
- )
- self.validate_all(
"SELECT {'bla': column1, 'foo': column2, 'bar': column3} AS data FROM source_table",
read={
"bigquery": "SELECT STRUCT(column1 AS bla, column2 AS foo, column3 AS bar) AS data FROM source_table",
@@ -201,6 +238,9 @@ class TestDuckDB(Validator):
parse_one("a // b", read="duckdb").assert_is(exp.IntDiv).sql(dialect="duckdb"), "a // b"
)
+ self.validate_identity("SELECT MAP(['key1', 'key2', 'key3'], [10, 20, 30])")
+ self.validate_identity("SELECT MAP {'x': 1}")
+ self.validate_identity("SELECT df1.*, df2.* FROM df1 POSITIONAL JOIN df2")
self.validate_identity("MAKE_TIMESTAMP(1992, 9, 20, 13, 34, 27.123456)")
self.validate_identity("MAKE_TIMESTAMP(1667810584123456)")
self.validate_identity("SELECT EPOCH_MS(10) AS t")
@@ -235,6 +275,18 @@ class TestDuckDB(Validator):
"""SELECT '{"foo": [1, 2, 3]}' -> '$.foo' -> '$[0]'""",
)
self.validate_identity(
+ "SELECT ($$hello)'world$$)",
+ "SELECT ('hello)''world')",
+ )
+ self.validate_identity(
+ "SELECT $$foo$$",
+ "SELECT 'foo'",
+ )
+ self.validate_identity(
+ "SELECT $tag$foo$tag$",
+ "SELECT 'foo'",
+ )
+ self.validate_identity(
"JSON_EXTRACT(x, '$.family')",
"x -> '$.family'",
)
@@ -679,7 +731,19 @@ class TestDuckDB(Validator):
},
)
self.validate_identity(
- "[x.STRING_SPLIT(' ')[1] FOR x IN ['1', '2', 3] IF x.CONTAINS('1')]"
+ "[x.STRING_SPLIT(' ')[i] FOR x IN ['1', '2', 3] IF x.CONTAINS('1')]"
+ )
+ self.validate_identity(
+ """SELECT LIST_VALUE(1)[i]""",
+ """SELECT ([1])[i]""",
+ )
+ self.validate_identity(
+ """{'x': LIST_VALUE(1)[i]}""",
+ """{'x': ([1])[i]}""",
+ )
+ self.validate_identity(
+ """SELECT LIST_APPLY(RANGE(1, 4), i -> {'f1': LIST_VALUE(1, 2, 3)[i], 'f2': LIST_VALUE(1, 2, 3)[i]})""",
+ """SELECT LIST_APPLY(RANGE(1, 4), i -> {'f1': ([1, 2, 3])[i], 'f2': ([1, 2, 3])[i]})""",
)
self.assertEqual(
@@ -689,8 +753,6 @@ class TestDuckDB(Validator):
"WARNING:sqlglot:Applying array index offset (1)",
"WARNING:sqlglot:Applying array index offset (1)",
"WARNING:sqlglot:Applying array index offset (1)",
- "WARNING:sqlglot:Applying array index offset (-1)",
- "WARNING:sqlglot:Applying array index offset (1)",
],
)
@@ -702,7 +764,7 @@ class TestDuckDB(Validator):
"SELECT MAKE_DATE(2016, 12, 25)", read={"bigquery": "SELECT DATE(2016, 12, 25)"}
)
self.validate_all(
- "SELECT CAST(CAST('2016-12-25 23:59:59' AS DATETIME) AS DATE)",
+ "SELECT CAST(CAST('2016-12-25 23:59:59' AS TIMESTAMP) AS DATE)",
read={"bigquery": "SELECT DATE(DATETIME '2016-12-25 23:59:59')"},
)
self.validate_all(
@@ -724,7 +786,7 @@ class TestDuckDB(Validator):
write={"duckdb": "SELECT (90 * INTERVAL '1' DAY)"},
)
self.validate_all(
- "SELECT ((DATE_TRUNC('DAY', CAST(CAST(DATE_TRUNC('DAY', CURRENT_TIMESTAMP) AS DATE) AS TIMESTAMP) + INTERVAL (0 - MOD((DAYOFWEEK(CAST(CAST(DATE_TRUNC('DAY', CURRENT_TIMESTAMP) AS DATE) AS TIMESTAMP)) % 7) - 1 + 7, 7)) DAY) + (7 * INTERVAL (-5) DAY))) AS t1",
+ "SELECT ((DATE_TRUNC('DAY', CAST(CAST(DATE_TRUNC('DAY', CURRENT_TIMESTAMP) AS DATE) AS TIMESTAMP) + INTERVAL (0 - (DAYOFWEEK(CAST(CAST(DATE_TRUNC('DAY', CURRENT_TIMESTAMP) AS DATE) AS TIMESTAMP)) % 7) - 1 + 7 % 7) DAY) + (7 * INTERVAL (-5) DAY))) AS t1",
read={
"presto": "SELECT ((DATE_ADD('week', -5, DATE_TRUNC('DAY', DATE_ADD('day', (0 - MOD((DAY_OF_WEEK(CAST(CAST(DATE_TRUNC('DAY', NOW()) AS DATE) AS TIMESTAMP)) % 7) - 1 + 7, 7)), CAST(CAST(DATE_TRUNC('DAY', NOW()) AS DATE) AS TIMESTAMP)))))) AS t1",
},
@@ -952,7 +1014,7 @@ class TestDuckDB(Validator):
"hive": "CAST(COL AS ARRAY<BIGINT>)",
"spark": "CAST(COL AS ARRAY<BIGINT>)",
"postgres": "CAST(COL AS BIGINT[])",
- "snowflake": "CAST(COL AS ARRAY)",
+ "snowflake": "CAST(COL AS ARRAY(BIGINT))",
},
)
self.validate_all(