summaryrefslogtreecommitdiffstats
path: root/tests/dialects
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2023-02-08 04:14:30 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2023-02-08 04:14:30 +0000
commit99980f928b5b7be237d108266072e51aa3bb354e (patch)
treece6fff00ea2b834bdbe3d84dcac90df1617d4245 /tests/dialects
parentAdding upstream version 10.6.0. (diff)
downloadsqlglot-99980f928b5b7be237d108266072e51aa3bb354e.tar.xz
sqlglot-99980f928b5b7be237d108266072e51aa3bb354e.zip
Adding upstream version 10.6.3.upstream/10.6.3
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to '')
-rw-r--r--tests/dialects/test_bigquery.py4
-rw-r--r--tests/dialects/test_dialect.py58
-rw-r--r--tests/dialects/test_duckdb.py19
-rw-r--r--tests/dialects/test_hive.py6
-rw-r--r--tests/dialects/test_mysql.py20
-rw-r--r--tests/dialects/test_postgres.py20
-rw-r--r--tests/dialects/test_presto.py87
-rw-r--r--tests/dialects/test_spark.py11
-rw-r--r--tests/dialects/test_sqlite.py6
-rw-r--r--tests/dialects/test_teradata.py3
10 files changed, 205 insertions, 29 deletions
diff --git a/tests/dialects/test_bigquery.py b/tests/dialects/test_bigquery.py
index e5b1c94..241f496 100644
--- a/tests/dialects/test_bigquery.py
+++ b/tests/dialects/test_bigquery.py
@@ -170,7 +170,7 @@ class TestBigQuery(Validator):
"bigquery": "CURRENT_TIMESTAMP()",
"duckdb": "CURRENT_TIMESTAMP()",
"postgres": "CURRENT_TIMESTAMP",
- "presto": "CURRENT_TIMESTAMP()",
+ "presto": "CURRENT_TIMESTAMP",
"hive": "CURRENT_TIMESTAMP()",
"spark": "CURRENT_TIMESTAMP()",
},
@@ -181,7 +181,7 @@ class TestBigQuery(Validator):
"bigquery": "CURRENT_TIMESTAMP()",
"duckdb": "CURRENT_TIMESTAMP()",
"postgres": "CURRENT_TIMESTAMP",
- "presto": "CURRENT_TIMESTAMP()",
+ "presto": "CURRENT_TIMESTAMP",
"hive": "CURRENT_TIMESTAMP()",
"spark": "CURRENT_TIMESTAMP()",
},
diff --git a/tests/dialects/test_dialect.py b/tests/dialects/test_dialect.py
index 5a13655..a456415 100644
--- a/tests/dialects/test_dialect.py
+++ b/tests/dialects/test_dialect.py
@@ -1,6 +1,7 @@
import unittest
from sqlglot import Dialect, Dialects, ErrorLevel, UnsupportedError, parse_one
+from sqlglot.dialects import Hive
class Validator(unittest.TestCase):
@@ -67,6 +68,11 @@ class TestDialect(Validator):
self.assertIsNotNone(Dialect.get_or_raise(dialect))
self.assertIsNotNone(Dialect[dialect.value])
+ def test_get_or_raise(self):
+ self.assertEqual(Dialect.get_or_raise(Hive), Hive)
+ self.assertEqual(Dialect.get_or_raise(Hive()), Hive)
+ self.assertEqual(Dialect.get_or_raise("hive"), Hive)
+
def test_cast(self):
self.validate_all(
"CAST(a AS TEXT)",
@@ -280,6 +286,21 @@ class TestDialect(Validator):
write={"oracle": "CAST(a AS NUMBER)"},
)
+ def test_if_null(self):
+ self.validate_all(
+ "SELECT IFNULL(1, NULL) FROM foo",
+ write={
+ "": "SELECT COALESCE(1, NULL) FROM foo",
+ "redshift": "SELECT COALESCE(1, NULL) FROM foo",
+ "postgres": "SELECT COALESCE(1, NULL) FROM foo",
+ "mysql": "SELECT COALESCE(1, NULL) FROM foo",
+ "duckdb": "SELECT COALESCE(1, NULL) FROM foo",
+ "spark": "SELECT COALESCE(1, NULL) FROM foo",
+ "bigquery": "SELECT COALESCE(1, NULL) FROM foo",
+ "presto": "SELECT COALESCE(1, NULL) FROM foo",
+ },
+ )
+
def test_time(self):
self.validate_all(
"STR_TO_TIME(x, '%Y-%m-%dT%H:%M:%S')",
@@ -287,10 +308,10 @@ class TestDialect(Validator):
"duckdb": "STRPTIME(x, '%Y-%m-%dT%H:%M:%S')",
},
write={
- "mysql": "STR_TO_DATE(x, '%Y-%m-%dT%H:%i:%S')",
+ "mysql": "STR_TO_DATE(x, '%Y-%m-%dT%T')",
"duckdb": "STRPTIME(x, '%Y-%m-%dT%H:%M:%S')",
"hive": "CAST(FROM_UNIXTIME(UNIX_TIMESTAMP(x, 'yyyy-MM-ddTHH:mm:ss')) AS TIMESTAMP)",
- "presto": "DATE_PARSE(x, '%Y-%m-%dT%H:%i:%S')",
+ "presto": "DATE_PARSE(x, '%Y-%m-%dT%T')",
"drill": "TO_TIMESTAMP(x, 'yyyy-MM-dd''T''HH:mm:ss')",
"redshift": "TO_TIMESTAMP(x, 'YYYY-MM-DDTHH:MI:SS')",
"spark": "TO_TIMESTAMP(x, 'yyyy-MM-ddTHH:mm:ss')",
@@ -356,7 +377,7 @@ class TestDialect(Validator):
write={
"duckdb": "EPOCH(CAST('2020-01-01' AS TIMESTAMP))",
"hive": "UNIX_TIMESTAMP('2020-01-01')",
- "presto": "TO_UNIXTIME(DATE_PARSE('2020-01-01', '%Y-%m-%d %H:%i:%S'))",
+ "presto": "TO_UNIXTIME(DATE_PARSE('2020-01-01', '%Y-%m-%d %T'))",
},
)
self.validate_all(
@@ -418,7 +439,7 @@ class TestDialect(Validator):
self.validate_all(
"UNIX_TO_STR(x, y)",
write={
- "duckdb": "STRFTIME(TO_TIMESTAMP(CAST(x AS BIGINT)), y)",
+ "duckdb": "STRFTIME(TO_TIMESTAMP(x), y)",
"hive": "FROM_UNIXTIME(x, y)",
"presto": "DATE_FORMAT(FROM_UNIXTIME(x), y)",
"starrocks": "FROM_UNIXTIME(x, y)",
@@ -427,7 +448,7 @@ class TestDialect(Validator):
self.validate_all(
"UNIX_TO_TIME(x)",
write={
- "duckdb": "TO_TIMESTAMP(CAST(x AS BIGINT))",
+ "duckdb": "TO_TIMESTAMP(x)",
"hive": "FROM_UNIXTIME(x)",
"oracle": "TO_DATE('1970-01-01','YYYY-MM-DD') + (x / 86400)",
"postgres": "TO_TIMESTAMP(x)",
@@ -438,7 +459,7 @@ class TestDialect(Validator):
self.validate_all(
"UNIX_TO_TIME_STR(x)",
write={
- "duckdb": "CAST(TO_TIMESTAMP(CAST(x AS BIGINT)) AS TEXT)",
+ "duckdb": "CAST(TO_TIMESTAMP(x) AS TEXT)",
"hive": "FROM_UNIXTIME(x)",
"presto": "CAST(FROM_UNIXTIME(x) AS VARCHAR)",
},
@@ -575,10 +596,10 @@ class TestDialect(Validator):
},
write={
"drill": "TO_DATE(x, 'yyyy-MM-dd''T''HH:mm:ss')",
- "mysql": "STR_TO_DATE(x, '%Y-%m-%dT%H:%i:%S')",
- "starrocks": "STR_TO_DATE(x, '%Y-%m-%dT%H:%i:%S')",
+ "mysql": "STR_TO_DATE(x, '%Y-%m-%dT%T')",
+ "starrocks": "STR_TO_DATE(x, '%Y-%m-%dT%T')",
"hive": "CAST(FROM_UNIXTIME(UNIX_TIMESTAMP(x, 'yyyy-MM-ddTHH:mm:ss')) AS DATE)",
- "presto": "CAST(DATE_PARSE(x, '%Y-%m-%dT%H:%i:%S') AS DATE)",
+ "presto": "CAST(DATE_PARSE(x, '%Y-%m-%dT%T') AS DATE)",
"spark": "TO_DATE(x, 'yyyy-MM-ddTHH:mm:ss')",
},
)
@@ -709,6 +730,7 @@ class TestDialect(Validator):
"hive": "REDUCE(x, 0, (acc, x) -> acc + x, acc -> acc)",
"presto": "REDUCE(x, 0, (acc, x) -> acc + x, acc -> acc)",
"spark": "AGGREGATE(x, 0, (acc, x) -> acc + x, acc -> acc)",
+ "presto": "REDUCE(x, 0, (acc, x) -> acc + x, acc -> acc)",
},
)
@@ -1381,3 +1403,21 @@ SELECT
"spark": "MERGE INTO a AS b USING c AS d ON b.id = d.id WHEN MATCHED AND EXISTS(SELECT b.name EXCEPT SELECT d.name) THEN UPDATE SET b.name = d.name",
},
)
+
+ def test_substring(self):
+ self.validate_all(
+ "SUBSTR('123456', 2, 3)",
+ write={
+ "bigquery": "SUBSTR('123456', 2, 3)",
+ "oracle": "SUBSTR('123456', 2, 3)",
+ "postgres": "SUBSTR('123456', 2, 3)",
+ },
+ )
+ self.validate_all(
+ "SUBSTRING('123456', 2, 3)",
+ write={
+ "bigquery": "SUBSTRING('123456', 2, 3)",
+ "oracle": "SUBSTR('123456', 2, 3)",
+ "postgres": "SUBSTRING('123456' FROM 2 FOR 3)",
+ },
+ )
diff --git a/tests/dialects/test_duckdb.py b/tests/dialects/test_duckdb.py
index f6446ca..f01a604 100644
--- a/tests/dialects/test_duckdb.py
+++ b/tests/dialects/test_duckdb.py
@@ -22,7 +22,7 @@ class TestDuckDB(Validator):
"EPOCH_MS(x)",
write={
"bigquery": "UNIX_TO_TIME(x / 1000)",
- "duckdb": "TO_TIMESTAMP(CAST(x / 1000 AS BIGINT))",
+ "duckdb": "TO_TIMESTAMP(x / 1000)",
"presto": "FROM_UNIXTIME(x / 1000)",
"spark": "FROM_UNIXTIME(x / 1000)",
},
@@ -41,7 +41,7 @@ class TestDuckDB(Validator):
"STRFTIME(x, '%Y-%m-%d %H:%M:%S')",
write={
"duckdb": "STRFTIME(x, '%Y-%m-%d %H:%M:%S')",
- "presto": "DATE_FORMAT(x, '%Y-%m-%d %H:%i:%S')",
+ "presto": "DATE_FORMAT(x, '%Y-%m-%d %T')",
"hive": "DATE_FORMAT(x, 'yyyy-MM-dd HH:mm:ss')",
},
)
@@ -58,9 +58,10 @@ class TestDuckDB(Validator):
self.validate_all(
"TO_TIMESTAMP(x)",
write={
- "duckdb": "CAST(x AS TIMESTAMP)",
- "presto": "CAST(x AS TIMESTAMP)",
- "hive": "CAST(x AS TIMESTAMP)",
+ "bigquery": "UNIX_TO_TIME(x)",
+ "duckdb": "TO_TIMESTAMP(x)",
+ "presto": "FROM_UNIXTIME(x)",
+ "hive": "FROM_UNIXTIME(x)",
},
)
self.validate_all(
@@ -334,6 +335,14 @@ class TestDuckDB(Validator):
},
)
+ self.validate_all(
+ "cast([[1]] as int[][])",
+ write={
+ "duckdb": "CAST(LIST_VALUE(LIST_VALUE(1)) AS INT[][])",
+ "spark": "CAST(ARRAY(ARRAY(1)) AS ARRAY<ARRAY<INT>>)",
+ },
+ )
+
def test_bool_or(self):
self.validate_all(
"SELECT a, LOGICAL_OR(b) FROM table GROUP BY a",
diff --git a/tests/dialects/test_hive.py b/tests/dialects/test_hive.py
index c41e4f7..1f35d1d 100644
--- a/tests/dialects/test_hive.py
+++ b/tests/dialects/test_hive.py
@@ -259,7 +259,7 @@ class TestHive(Validator):
self.validate_all(
"""from_unixtime(x, "yyyy-MM-dd'T'HH")""",
write={
- "duckdb": "STRFTIME(TO_TIMESTAMP(CAST(x AS BIGINT)), '%Y-%m-%d''T''%H')",
+ "duckdb": "STRFTIME(TO_TIMESTAMP(x), '%Y-%m-%d''T''%H')",
"presto": "DATE_FORMAT(FROM_UNIXTIME(x), '%Y-%m-%d''T''%H')",
"hive": "FROM_UNIXTIME(x, 'yyyy-MM-dd\\'T\\'HH')",
"spark": "FROM_UNIXTIME(x, 'yyyy-MM-dd\\'T\\'HH')",
@@ -269,7 +269,7 @@ class TestHive(Validator):
"DATE_FORMAT('2020-01-01', 'yyyy-MM-dd HH:mm:ss')",
write={
"duckdb": "STRFTIME(CAST('2020-01-01' AS TIMESTAMP), '%Y-%m-%d %H:%M:%S')",
- "presto": "DATE_FORMAT(CAST('2020-01-01' AS TIMESTAMP), '%Y-%m-%d %H:%i:%S')",
+ "presto": "DATE_FORMAT(CAST('2020-01-01' AS TIMESTAMP), '%Y-%m-%d %T')",
"hive": "DATE_FORMAT(CAST('2020-01-01' AS TIMESTAMP), 'yyyy-MM-dd HH:mm:ss')",
"spark": "DATE_FORMAT(CAST('2020-01-01' AS TIMESTAMP), 'yyyy-MM-dd HH:mm:ss')",
},
@@ -308,7 +308,7 @@ class TestHive(Validator):
"UNIX_TIMESTAMP(x)",
write={
"duckdb": "EPOCH(STRPTIME(x, '%Y-%m-%d %H:%M:%S'))",
- "presto": "TO_UNIXTIME(DATE_PARSE(x, '%Y-%m-%d %H:%i:%S'))",
+ "presto": "TO_UNIXTIME(DATE_PARSE(x, '%Y-%m-%d %T'))",
"hive": "UNIX_TIMESTAMP(x)",
"spark": "UNIX_TIMESTAMP(x)",
"": "STR_TO_UNIX(x, '%Y-%m-%d %H:%M:%S')",
diff --git a/tests/dialects/test_mysql.py b/tests/dialects/test_mysql.py
index ce865e1..3e3b0d3 100644
--- a/tests/dialects/test_mysql.py
+++ b/tests/dialects/test_mysql.py
@@ -196,6 +196,26 @@ class TestMySQL(Validator):
def test_mysql(self):
self.validate_all(
+ "SELECT a FROM tbl FOR UPDATE",
+ write={
+ "": "SELECT a FROM tbl",
+ "mysql": "SELECT a FROM tbl FOR UPDATE",
+ "oracle": "SELECT a FROM tbl FOR UPDATE",
+ "postgres": "SELECT a FROM tbl FOR UPDATE",
+ "tsql": "SELECT a FROM tbl FOR UPDATE",
+ },
+ )
+ self.validate_all(
+ "SELECT a FROM tbl FOR SHARE",
+ write={
+ "": "SELECT a FROM tbl",
+ "mysql": "SELECT a FROM tbl FOR SHARE",
+ "oracle": "SELECT a FROM tbl FOR SHARE",
+ "postgres": "SELECT a FROM tbl FOR SHARE",
+ "tsql": "SELECT a FROM tbl FOR SHARE",
+ },
+ )
+ self.validate_all(
"GROUP_CONCAT(DISTINCT x ORDER BY y DESC)",
write={
"mysql": "GROUP_CONCAT(DISTINCT x ORDER BY y DESC SEPARATOR ',')",
diff --git a/tests/dialects/test_postgres.py b/tests/dialects/test_postgres.py
index 8a17b78..5664a2a 100644
--- a/tests/dialects/test_postgres.py
+++ b/tests/dialects/test_postgres.py
@@ -113,6 +113,22 @@ class TestPostgres(Validator):
self.validate_identity("x ~* 'y'")
self.validate_all(
+ "GENERATE_SERIES(a, b, ' 2 days ')",
+ write={
+ "postgres": "GENERATE_SERIES(a, b, INTERVAL '2' days)",
+ "presto": "SEQUENCE(a, b, INTERVAL '2' days)",
+ "trino": "SEQUENCE(a, b, INTERVAL '2' days)",
+ },
+ )
+ self.validate_all(
+ "GENERATE_SERIES('2019-01-01'::TIMESTAMP, NOW(), '1day')",
+ write={
+ "postgres": "GENERATE_SERIES(CAST('2019-01-01' AS TIMESTAMP), CURRENT_TIMESTAMP, INTERVAL '1' day)",
+ "presto": "SEQUENCE(CAST('2019-01-01' AS TIMESTAMP), CAST(CURRENT_TIMESTAMP AS TIMESTAMP), INTERVAL '1' day)",
+ "trino": "SEQUENCE(CAST('2019-01-01' AS TIMESTAMP), CAST(CURRENT_TIMESTAMP AS TIMESTAMP), INTERVAL '1' day)",
+ },
+ )
+ self.validate_all(
"END WORK AND NO CHAIN",
write={"postgres": "COMMIT AND NO CHAIN"},
)
@@ -249,7 +265,7 @@ class TestPostgres(Validator):
)
self.validate_all(
"'[1,2,3]'::json->2",
- write={"postgres": "CAST('[1,2,3]' AS JSON) -> '2'"},
+ write={"postgres": "CAST('[1,2,3]' AS JSON) -> 2"},
)
self.validate_all(
"""'{"a":1,"b":2}'::json->'b'""",
@@ -265,7 +281,7 @@ class TestPostgres(Validator):
)
self.validate_all(
"""'[1,2,3]'::json->>2""",
- write={"postgres": "CAST('[1,2,3]' AS JSON) ->> '2'"},
+ write={"postgres": "CAST('[1,2,3]' AS JSON) ->> 2"},
)
self.validate_all(
"""'{"a":1,"b":2}'::json->>'b'""",
diff --git a/tests/dialects/test_presto.py b/tests/dialects/test_presto.py
index 5ecd69a..9815dcc 100644
--- a/tests/dialects/test_presto.py
+++ b/tests/dialects/test_presto.py
@@ -111,7 +111,7 @@ class TestPresto(Validator):
"DATE_FORMAT(x, '%Y-%m-%d %H:%i:%S')",
write={
"duckdb": "STRFTIME(x, '%Y-%m-%d %H:%M:%S')",
- "presto": "DATE_FORMAT(x, '%Y-%m-%d %H:%i:%S')",
+ "presto": "DATE_FORMAT(x, '%Y-%m-%d %T')",
"hive": "DATE_FORMAT(x, 'yyyy-MM-dd HH:mm:ss')",
"spark": "DATE_FORMAT(x, 'yyyy-MM-dd HH:mm:ss')",
},
@@ -120,7 +120,7 @@ class TestPresto(Validator):
"DATE_PARSE(x, '%Y-%m-%d %H:%i:%S')",
write={
"duckdb": "STRPTIME(x, '%Y-%m-%d %H:%M:%S')",
- "presto": "DATE_PARSE(x, '%Y-%m-%d %H:%i:%S')",
+ "presto": "DATE_PARSE(x, '%Y-%m-%d %T')",
"hive": "CAST(x AS TIMESTAMP)",
"spark": "TO_TIMESTAMP(x, 'yyyy-MM-dd HH:mm:ss')",
},
@@ -135,6 +135,12 @@ class TestPresto(Validator):
},
)
self.validate_all(
+ "DATE_FORMAT(x, '%T')",
+ write={
+ "hive": "DATE_FORMAT(x, 'HH:mm:ss')",
+ },
+ )
+ self.validate_all(
"DATE_PARSE(SUBSTR(x, 1, 10), '%Y-%m-%d')",
write={
"duckdb": "STRPTIME(SUBSTR(x, 1, 10), '%Y-%m-%d')",
@@ -146,7 +152,7 @@ class TestPresto(Validator):
self.validate_all(
"FROM_UNIXTIME(x)",
write={
- "duckdb": "TO_TIMESTAMP(CAST(x AS BIGINT))",
+ "duckdb": "TO_TIMESTAMP(x)",
"presto": "FROM_UNIXTIME(x)",
"hive": "FROM_UNIXTIME(x)",
"spark": "FROM_UNIXTIME(x)",
@@ -177,11 +183,51 @@ class TestPresto(Validator):
self.validate_all(
"NOW()",
write={
- "presto": "CURRENT_TIMESTAMP()",
+ "presto": "CURRENT_TIMESTAMP",
"hive": "CURRENT_TIMESTAMP()",
},
)
+ self.validate_all(
+ "DAY_OF_WEEK(timestamp '2012-08-08 01:00')",
+ write={
+ "spark": "DAYOFWEEK(CAST('2012-08-08 01:00' AS TIMESTAMP))",
+ "presto": "DAY_OF_WEEK(CAST('2012-08-08 01:00' AS TIMESTAMP))",
+ },
+ )
+
+ self.validate_all(
+ "DAY_OF_MONTH(timestamp '2012-08-08 01:00')",
+ write={
+ "spark": "DAYOFMONTH(CAST('2012-08-08 01:00' AS TIMESTAMP))",
+ "presto": "DAY_OF_MONTH(CAST('2012-08-08 01:00' AS TIMESTAMP))",
+ },
+ )
+
+ self.validate_all(
+ "DAY_OF_YEAR(timestamp '2012-08-08 01:00')",
+ write={
+ "spark": "DAYOFYEAR(CAST('2012-08-08 01:00' AS TIMESTAMP))",
+ "presto": "DAY_OF_YEAR(CAST('2012-08-08 01:00' AS TIMESTAMP))",
+ },
+ )
+
+ self.validate_all(
+ "WEEK_OF_YEAR(timestamp '2012-08-08 01:00')",
+ write={
+ "spark": "WEEKOFYEAR(CAST('2012-08-08 01:00' AS TIMESTAMP))",
+ "presto": "WEEK_OF_YEAR(CAST('2012-08-08 01:00' AS TIMESTAMP))",
+ },
+ )
+
+ self.validate_all(
+ "SELECT timestamp '2012-10-31 00:00' AT TIME ZONE 'America/Sao_Paulo'",
+ write={
+ "spark": "SELECT FROM_UTC_TIMESTAMP(CAST('2012-10-31 00:00' AS TIMESTAMP), 'America/Sao_Paulo')",
+ "presto": "SELECT CAST('2012-10-31 00:00' AS TIMESTAMP) AT TIME ZONE 'America/Sao_Paulo'",
+ },
+ )
+
def test_ddl(self):
self.validate_all(
"CREATE TABLE test WITH (FORMAT = 'PARQUET') AS SELECT 1",
@@ -314,6 +360,11 @@ class TestPresto(Validator):
def test_presto(self):
self.validate_identity("SELECT BOOL_OR(a > 10) FROM asd AS T(a)")
+ self.validate_identity("SELECT * FROM (VALUES (1))")
+ self.validate_identity("START TRANSACTION READ WRITE, ISOLATION LEVEL SERIALIZABLE")
+ self.validate_identity("START TRANSACTION ISOLATION LEVEL REPEATABLE READ")
+ self.validate_identity("APPROX_PERCENTILE(a, b, c, d)")
+
self.validate_all(
'SELECT a."b" FROM "foo"',
write={
@@ -455,10 +506,6 @@ class TestPresto(Validator):
"spark": UnsupportedError,
},
)
- self.validate_identity("SELECT * FROM (VALUES (1))")
- self.validate_identity("START TRANSACTION READ WRITE, ISOLATION LEVEL SERIALIZABLE")
- self.validate_identity("START TRANSACTION ISOLATION LEVEL REPEATABLE READ")
- self.validate_identity("APPROX_PERCENTILE(a, b, c, d)")
def test_encode_decode(self):
self.validate_all(
@@ -529,3 +576,27 @@ class TestPresto(Validator):
"presto": "FROM_HEX(x)",
},
)
+
+ def test_json(self):
+ self.validate_all(
+ "SELECT CAST(JSON '[1,23,456]' AS ARRAY(INTEGER))",
+ write={
+ "spark": "SELECT FROM_JSON('[1,23,456]', 'ARRAY<INT>')",
+ "presto": "SELECT CAST(CAST('[1,23,456]' AS JSON) AS ARRAY(INTEGER))",
+ },
+ )
+ self.validate_all(
+ """SELECT CAST(JSON '{"k1":1,"k2":23,"k3":456}' AS MAP(VARCHAR, INTEGER))""",
+ write={
+ "spark": 'SELECT FROM_JSON(\'{"k1":1,"k2":23,"k3":456}\', \'MAP<STRING, INT>\')',
+ "presto": 'SELECT CAST(CAST(\'{"k1":1,"k2":23,"k3":456}\' AS JSON) AS MAP(VARCHAR, INTEGER))',
+ },
+ )
+
+ self.validate_all(
+ "SELECT CAST(ARRAY [1, 23, 456] AS JSON)",
+ write={
+ "spark": "SELECT TO_JSON(ARRAY(1, 23, 456))",
+ "presto": "SELECT CAST(ARRAY[1, 23, 456] AS JSON)",
+ },
+ )
diff --git a/tests/dialects/test_spark.py b/tests/dialects/test_spark.py
index 02d43aa..be74a27 100644
--- a/tests/dialects/test_spark.py
+++ b/tests/dialects/test_spark.py
@@ -212,6 +212,17 @@ TBLPROPERTIES (
self.validate_identity("TRIM(BOTH 'SL' FROM 'SSparkSQLS')")
self.validate_identity("TRIM(LEADING 'SL' FROM 'SSparkSQLS')")
self.validate_identity("TRIM(TRAILING 'SL' FROM 'SSparkSQLS')")
+
+ self.validate_all(
+ "AGGREGATE(my_arr, 0, (acc, x) -> acc + x, s -> s * 2)",
+ write={
+ "trino": "REDUCE(my_arr, 0, (acc, x) -> acc + x, s -> s * 2)",
+ "duckdb": "REDUCE(my_arr, 0, (acc, x) -> acc + x, s -> s * 2)",
+ "hive": "REDUCE(my_arr, 0, (acc, x) -> acc + x, s -> s * 2)",
+ "presto": "REDUCE(my_arr, 0, (acc, x) -> acc + x, s -> s * 2)",
+ "spark": "AGGREGATE(my_arr, 0, (acc, x) -> acc + x, s -> s * 2)",
+ },
+ )
self.validate_all(
"TRIM('SL', 'SSparkSQLS')", write={"spark": "TRIM('SL' FROM 'SSparkSQLS')"}
)
diff --git a/tests/dialects/test_sqlite.py b/tests/dialects/test_sqlite.py
index e54a4bc..c4f4a6e 100644
--- a/tests/dialects/test_sqlite.py
+++ b/tests/dialects/test_sqlite.py
@@ -92,3 +92,9 @@ class TestSQLite(Validator):
"sqlite": "SELECT FIRST_VALUE(Name) OVER (PARTITION BY AlbumId ORDER BY Bytes DESC) AS LargestTrack FROM tracks"
},
)
+
+ def test_longvarchar_dtype(self):
+ self.validate_all(
+ "CREATE TABLE foo (bar LONGVARCHAR)",
+ write={"sqlite": "CREATE TABLE foo (bar TEXT)"},
+ )
diff --git a/tests/dialects/test_teradata.py b/tests/dialects/test_teradata.py
index e56de25..9e82961 100644
--- a/tests/dialects/test_teradata.py
+++ b/tests/dialects/test_teradata.py
@@ -21,3 +21,6 @@ class TestTeradata(Validator):
"mysql": "UPDATE A SET col2 = '' FROM schema.tableA AS A, (SELECT col1 FROM schema.tableA GROUP BY col1) AS B WHERE A.col1 = B.col1",
},
)
+
+ def test_create(self):
+ self.validate_identity("CREATE TABLE x (y INT) PRIMARY INDEX (y) PARTITION BY y INDEX (y)")