summaryrefslogtreecommitdiffstats
path: root/tests/dialects/test_spark.py
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-09-03 07:37:50 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-09-03 07:37:50 +0000
commit8b1fb8d8676e832d02b99bb8e69f6897dd10c0f6 (patch)
treed31426ac93b330e755f0b500704e236b9a62dc15 /tests/dialects/test_spark.py
parentReleasing debian version 25.16.1-1. (diff)
downloadsqlglot-8b1fb8d8676e832d02b99bb8e69f6897dd10c0f6.tar.xz
sqlglot-8b1fb8d8676e832d02b99bb8e69f6897dd10c0f6.zip
Merging upstream version 25.18.0.
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'tests/dialects/test_spark.py')
-rw-r--r--tests/dialects/test_spark.py26
1 files changed, 25 insertions, 1 deletions
diff --git a/tests/dialects/test_spark.py b/tests/dialects/test_spark.py
index a2d87df..cbaa169 100644
--- a/tests/dialects/test_spark.py
+++ b/tests/dialects/test_spark.py
@@ -10,6 +10,7 @@ class TestSpark(Validator):
dialect = "spark"
def test_ddl(self):
+ self.validate_identity("INSERT OVERWRITE TABLE db1.tb1 TABLE db2.tb2")
self.validate_identity("CREATE TABLE foo AS WITH t AS (SELECT 1 AS col) SELECT col FROM t")
self.validate_identity("CREATE TEMPORARY VIEW test AS SELECT 1")
self.validate_identity("CREATE TABLE foo (col VARCHAR(50))")
@@ -484,7 +485,7 @@ TBLPROPERTIES (
)
self.validate_all(
"SELECT CAST(STRUCT('fooo') AS STRUCT<a: VARCHAR(2)>)",
- write={"spark": "SELECT CAST(STRUCT('fooo') AS STRUCT<a: STRING>)"},
+ write={"spark": "SELECT CAST(STRUCT('fooo' AS col1) AS STRUCT<a: STRING>)"},
)
self.validate_all(
"SELECT CAST(123456 AS VARCHAR(3))",
@@ -710,6 +711,29 @@ TBLPROPERTIES (
)
self.validate_identity("DESCRIBE schema.test PARTITION(ds = '2024-01-01')")
+ self.validate_all(
+ "SELECT ANY_VALUE(col, true), FIRST(col, true), FIRST_VALUE(col, true) OVER ()",
+ write={
+ "duckdb": "SELECT ANY_VALUE(col), FIRST(col), FIRST_VALUE(col IGNORE NULLS) OVER ()"
+ },
+ )
+
+ self.validate_all(
+ "SELECT STRUCT(1, 2)",
+ write={
+ "spark": "SELECT STRUCT(1 AS col1, 2 AS col2)",
+ "presto": "SELECT CAST(ROW(1, 2) AS ROW(col1 INTEGER, col2 INTEGER))",
+ "duckdb": "SELECT {'col1': 1, 'col2': 2}",
+ },
+ )
+ self.validate_all(
+ "SELECT STRUCT(x, 1, y AS col3, STRUCT(5)) FROM t",
+ write={
+ "spark": "SELECT STRUCT(x AS x, 1 AS col2, y AS col3, STRUCT(5 AS col1) AS col4) FROM t",
+ "duckdb": "SELECT {'x': x, 'col2': 1, 'col3': y, 'col4': {'col1': 5}} FROM t",
+ },
+ )
+
def test_bool_or(self):
self.validate_all(
"SELECT a, LOGICAL_OR(b) FROM table GROUP BY a",