summaryrefslogtreecommitdiffstats
path: root/tests/dialects/test_hive.py
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2022-12-02 09:16:29 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2022-12-02 09:16:29 +0000
commit1a60bbae98d3b530924a6807a55f8250de19ea86 (patch)
tree87d3000f271a6604fff43db188731229aed918a8 /tests/dialects/test_hive.py
parentAdding upstream version 10.0.8. (diff)
downloadsqlglot-1a60bbae98d3b530924a6807a55f8250de19ea86.tar.xz
sqlglot-1a60bbae98d3b530924a6807a55f8250de19ea86.zip
Adding upstream version 10.1.3.upstream/10.1.3
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'tests/dialects/test_hive.py')
-rw-r--r--tests/dialects/test_hive.py5
1 files changed, 4 insertions, 1 deletions
diff --git a/tests/dialects/test_hive.py b/tests/dialects/test_hive.py
index 69c7630..22d7bce 100644
--- a/tests/dialects/test_hive.py
+++ b/tests/dialects/test_hive.py
@@ -139,7 +139,7 @@ class TestHive(Validator):
"CREATE TABLE test STORED AS parquet TBLPROPERTIES ('x'='1', 'Z'='2') AS SELECT 1",
write={
"duckdb": "CREATE TABLE test AS SELECT 1",
- "presto": "CREATE TABLE test WITH (FORMAT='parquet', x='1', Z='2') AS SELECT 1",
+ "presto": "CREATE TABLE test WITH (FORMAT='PARQUET', x='1', Z='2') AS SELECT 1",
"hive": "CREATE TABLE test STORED AS PARQUET TBLPROPERTIES ('x'='1', 'Z'='2') AS SELECT 1",
"spark": "CREATE TABLE test USING PARQUET TBLPROPERTIES ('x'='1', 'Z'='2') AS SELECT 1",
},
@@ -459,6 +459,7 @@ class TestHive(Validator):
"hive": "MAP(a, b, c, d)",
"presto": "MAP(ARRAY[a, c], ARRAY[b, d])",
"spark": "MAP(a, b, c, d)",
+ "snowflake": "OBJECT_CONSTRUCT(a, b, c, d)",
},
write={
"": "MAP(ARRAY(a, c), ARRAY(b, d))",
@@ -467,6 +468,7 @@ class TestHive(Validator):
"presto": "MAP(ARRAY[a, c], ARRAY[b, d])",
"hive": "MAP(a, b, c, d)",
"spark": "MAP(a, b, c, d)",
+ "snowflake": "OBJECT_CONSTRUCT(a, b, c, d)",
},
)
self.validate_all(
@@ -476,6 +478,7 @@ class TestHive(Validator):
"presto": "MAP(ARRAY[a], ARRAY[b])",
"hive": "MAP(a, b)",
"spark": "MAP(a, b)",
+ "snowflake": "OBJECT_CONSTRUCT(a, b)",
},
)
self.validate_all(