From 07115fb6b7dc48595d30c3d1568fbeff0388d096 Mon Sep 17 00:00:00 2001 From: Daniel Baumann Date: Mon, 15 Apr 2024 07:02:12 +0200 Subject: Adding upstream version 23.10.0. Signed-off-by: Daniel Baumann --- tests/dialects/test_spark.py | 36 ++++++++++++++++++++++++++++++------ 1 file changed, 30 insertions(+), 6 deletions(-) (limited to 'tests/dialects/test_spark.py') diff --git a/tests/dialects/test_spark.py b/tests/dialects/test_spark.py index 18f1fb7..d2285e0 100644 --- a/tests/dialects/test_spark.py +++ b/tests/dialects/test_spark.py @@ -2,6 +2,7 @@ from unittest import mock from sqlglot import exp, parse_one from sqlglot.dialects.dialect import Dialects +from sqlglot.helper import logger as helper_logger from tests.dialects.test_dialect import Validator @@ -223,17 +224,16 @@ TBLPROPERTIES ( ) def test_spark(self): - self.validate_identity("any_value(col, true)", "ANY_VALUE(col) IGNORE NULLS") - self.validate_identity("first(col, true)", "FIRST(col) IGNORE NULLS") - self.validate_identity("first_value(col, true)", "FIRST_VALUE(col) IGNORE NULLS") - self.validate_identity("last(col, true)", "LAST(col) IGNORE NULLS") - self.validate_identity("last_value(col, true)", "LAST_VALUE(col) IGNORE NULLS") - self.assertEqual( parse_one("REFRESH TABLE t", read="spark").assert_is(exp.Refresh).sql(dialect="spark"), "REFRESH TABLE t", ) + self.validate_identity("any_value(col, true)", "ANY_VALUE(col) IGNORE NULLS") + self.validate_identity("first(col, true)", "FIRST(col) IGNORE NULLS") + self.validate_identity("first_value(col, true)", "FIRST_VALUE(col) IGNORE NULLS") + self.validate_identity("last(col, true)", "LAST(col) IGNORE NULLS") + self.validate_identity("last_value(col, true)", "LAST_VALUE(col) IGNORE NULLS") self.validate_identity("DESCRIBE EXTENDED db.table") self.validate_identity("SELECT * FROM test TABLESAMPLE (50 PERCENT)") self.validate_identity("SELECT * FROM test TABLESAMPLE (5 ROWS)") @@ -284,6 +284,30 @@ TBLPROPERTIES ( "SELECT STR_TO_MAP('a:1,b:2,c:3', ',', ':')", ) + with self.assertLogs(helper_logger): + self.validate_all( + "SELECT TRY_ELEMENT_AT(ARRAY(1, 2, 3), 2)", + read={ + "databricks": "SELECT TRY_ELEMENT_AT(ARRAY(1, 2, 3), 2)", + }, + write={ + "databricks": "SELECT TRY_ELEMENT_AT(ARRAY(1, 2, 3), 2)", + "duckdb": "SELECT ([1, 2, 3])[3]", + "spark": "SELECT TRY_ELEMENT_AT(ARRAY(1, 2, 3), 2)", + }, + ) + + self.validate_all( + "SELECT TRY_ELEMENT_AT(MAP(1, 'a', 2, 'b'), 2)", + read={ + "databricks": "SELECT TRY_ELEMENT_AT(MAP(1, 'a', 2, 'b'), 2)", + }, + write={ + "databricks": "SELECT TRY_ELEMENT_AT(MAP(1, 'a', 2, 'b'), 2)", + "duckdb": "SELECT (MAP([1, 2], ['a', 'b'])[2])[1]", + "spark": "SELECT TRY_ELEMENT_AT(MAP(1, 'a', 2, 'b'), 2)", + }, + ) self.validate_all( "SELECT SPLIT('123|789', '\\\\|')", read={ -- cgit v1.2.3