summaryrefslogtreecommitdiffstats
path: root/tests/dialects
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-10-09 06:28:48 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-10-09 06:28:48 +0000
commit33802ae744af096b1be30c5e2d02e03c8fce4c77 (patch)
tree13be65e148a9772441401d092259912c630a2adc /tests/dialects
parentAdding upstream version 25.24.0. (diff)
downloadsqlglot-upstream.tar.xz
sqlglot-upstream.zip
Adding upstream version 25.24.5.upstream/25.24.5upstream
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'tests/dialects')
-rw-r--r--tests/dialects/test_athena.py6
-rw-r--r--tests/dialects/test_bigquery.py14
-rw-r--r--tests/dialects/test_duckdb.py32
-rw-r--r--tests/dialects/test_mysql.py20
-rw-r--r--tests/dialects/test_oracle.py17
-rw-r--r--tests/dialects/test_postgres.py12
-rw-r--r--tests/dialects/test_redshift.py6
-rw-r--r--tests/dialects/test_spark.py27
-rw-r--r--tests/dialects/test_sqlite.py1
-rw-r--r--tests/dialects/test_trino.py6
-rw-r--r--tests/dialects/test_tsql.py9
11 files changed, 122 insertions, 28 deletions
diff --git a/tests/dialects/test_athena.py b/tests/dialects/test_athena.py
index ca91d4a..ef96938 100644
--- a/tests/dialects/test_athena.py
+++ b/tests/dialects/test_athena.py
@@ -62,8 +62,12 @@ class TestAthena(Validator):
# CTAS goes to the Trino engine, where the table properties cant be encased in single quotes like they can for Hive
# ref: https://docs.aws.amazon.com/athena/latest/ug/create-table-as.html#ctas-table-properties
+ # They're also case sensitive and need to be lowercase, otherwise you get eg "Table properties [FORMAT] are not supported."
self.validate_identity(
- "CREATE TABLE foo WITH (table_type='ICEBERG', external_location='s3://foo/') AS SELECT * FROM a"
+ "CREATE TABLE foo WITH (table_type='ICEBERG', location='s3://foo/', format='orc', partitioning=ARRAY['bucket(id, 5)']) AS SELECT * FROM a"
+ )
+ self.validate_identity(
+ "CREATE TABLE foo WITH (table_type='HIVE', external_location='s3://foo/', format='parquet', partitioned_by=ARRAY['ds']) AS SELECT * FROM a"
)
self.validate_identity(
"CREATE TABLE foo AS WITH foo AS (SELECT a, b FROM bar) SELECT * FROM foo"
diff --git a/tests/dialects/test_bigquery.py b/tests/dialects/test_bigquery.py
index e2adfea..d854165 100644
--- a/tests/dialects/test_bigquery.py
+++ b/tests/dialects/test_bigquery.py
@@ -1985,3 +1985,17 @@ OPTIONS (
self.validate_identity(
"SELECT RANGE(CAST('2022-10-01 14:53:27 America/Los_Angeles' AS TIMESTAMP), CAST('2022-10-01 16:00:00 America/Los_Angeles' AS TIMESTAMP))"
)
+
+ def test_null_ordering(self):
+ # Aggregate functions allow "NULLS FIRST" only with ascending order and
+ # "NULLS LAST" only with descending
+ for sort_order, null_order in (("ASC", "NULLS LAST"), ("DESC", "NULLS FIRST")):
+ self.validate_all(
+ f"SELECT color, ARRAY_AGG(id ORDER BY id {sort_order}) AS ids FROM colors GROUP BY 1",
+ read={
+ "": f"SELECT color, ARRAY_AGG(id ORDER BY id {sort_order} {null_order}) AS ids FROM colors GROUP BY 1"
+ },
+ write={
+ "bigquery": f"SELECT color, ARRAY_AGG(id ORDER BY id {sort_order}) AS ids FROM colors GROUP BY 1",
+ },
+ )
diff --git a/tests/dialects/test_duckdb.py b/tests/dialects/test_duckdb.py
index e4788ec..6b58934 100644
--- a/tests/dialects/test_duckdb.py
+++ b/tests/dialects/test_duckdb.py
@@ -858,6 +858,28 @@ class TestDuckDB(Validator):
self.validate_identity(
"SELECT COALESCE(*COLUMNS(['a', 'b', 'c'])) AS result FROM (SELECT NULL AS a, 42 AS b, TRUE AS c)"
)
+ self.validate_all(
+ "SELECT UNNEST(foo) AS x",
+ write={
+ "redshift": UnsupportedError,
+ },
+ )
+ self.validate_identity("a ^ b", "POWER(a, b)")
+ self.validate_identity("a ** b", "POWER(a, b)")
+ self.validate_identity("a ~~~ b", "a GLOB b")
+ self.validate_identity("a ~~ b", "a LIKE b")
+ self.validate_identity("a @> b")
+ self.validate_identity("a <@ b", "b @> a")
+ self.validate_identity("a && b").assert_is(exp.ArrayOverlaps)
+ self.validate_identity("a ^@ b", "STARTS_WITH(a, b)")
+ self.validate_identity(
+ "a !~~ b",
+ "NOT a LIKE b",
+ )
+ self.validate_identity(
+ "a !~~* b",
+ "NOT a ILIKE b",
+ )
def test_array_index(self):
with self.assertLogs(helper_logger) as cm:
@@ -967,6 +989,15 @@ class TestDuckDB(Validator):
"spark": "DATE_FORMAT(x, 'yy-M-ss')",
},
)
+
+ self.validate_all(
+ "SHA1(x)",
+ write={
+ "duckdb": "SHA1(x)",
+ "": "SHA(x)",
+ },
+ )
+
self.validate_all(
"STRFTIME(x, '%Y-%m-%d %H:%M:%S')",
write={
@@ -1086,6 +1117,7 @@ class TestDuckDB(Validator):
self.validate_identity("CAST(x AS INT16)", "CAST(x AS SMALLINT)")
self.validate_identity("CAST(x AS NUMERIC(1, 2))", "CAST(x AS DECIMAL(1, 2))")
self.validate_identity("CAST(x AS HUGEINT)", "CAST(x AS INT128)")
+ self.validate_identity("CAST(x AS UHUGEINT)", "CAST(x AS UINT128)")
self.validate_identity("CAST(x AS CHAR)", "CAST(x AS TEXT)")
self.validate_identity("CAST(x AS BPCHAR)", "CAST(x AS TEXT)")
self.validate_identity("CAST(x AS STRING)", "CAST(x AS TEXT)")
diff --git a/tests/dialects/test_mysql.py b/tests/dialects/test_mysql.py
index 835ee7c..0e593ef 100644
--- a/tests/dialects/test_mysql.py
+++ b/tests/dialects/test_mysql.py
@@ -747,16 +747,28 @@ class TestMySQL(Validator):
},
)
self.validate_all(
- "SELECT * FROM x LEFT JOIN y ON x.id = y.id UNION SELECT * FROM x RIGHT JOIN y ON x.id = y.id LIMIT 0",
+ "SELECT * FROM x LEFT JOIN y ON x.id = y.id UNION ALL SELECT * FROM x RIGHT JOIN y ON x.id = y.id WHERE NOT EXISTS(SELECT 1 FROM x WHERE x.id = y.id) ORDER BY 1 LIMIT 0",
read={
- "postgres": "SELECT * FROM x FULL JOIN y ON x.id = y.id LIMIT 0",
+ "postgres": "SELECT * FROM x FULL JOIN y ON x.id = y.id ORDER BY 1 LIMIT 0",
},
)
self.validate_all(
# MySQL doesn't support FULL OUTER joins
- "WITH t1 AS (SELECT 1) SELECT * FROM t1 LEFT OUTER JOIN t2 ON t1.x = t2.x UNION SELECT * FROM t1 RIGHT OUTER JOIN t2 ON t1.x = t2.x",
+ "SELECT * FROM t1 LEFT OUTER JOIN t2 ON t1.x = t2.x UNION ALL SELECT * FROM t1 RIGHT OUTER JOIN t2 ON t1.x = t2.x WHERE NOT EXISTS(SELECT 1 FROM t1 WHERE t1.x = t2.x)",
read={
- "postgres": "WITH t1 AS (SELECT 1) SELECT * FROM t1 FULL OUTER JOIN t2 ON t1.x = t2.x",
+ "postgres": "SELECT * FROM t1 FULL OUTER JOIN t2 ON t1.x = t2.x",
+ },
+ )
+ self.validate_all(
+ "SELECT * FROM t1 LEFT OUTER JOIN t2 USING (x) UNION ALL SELECT * FROM t1 RIGHT OUTER JOIN t2 USING (x) WHERE NOT EXISTS(SELECT 1 FROM t1 WHERE t1.x = t2.x)",
+ read={
+ "postgres": "SELECT * FROM t1 FULL OUTER JOIN t2 USING (x) ",
+ },
+ )
+ self.validate_all(
+ "SELECT * FROM t1 LEFT OUTER JOIN t2 USING (x, y) UNION ALL SELECT * FROM t1 RIGHT OUTER JOIN t2 USING (x, y) WHERE NOT EXISTS(SELECT 1 FROM t1 WHERE t1.x = t2.x AND t1.y = t2.y)",
+ read={
+ "postgres": "SELECT * FROM t1 FULL OUTER JOIN t2 USING (x, y) ",
},
)
self.validate_all(
diff --git a/tests/dialects/test_oracle.py b/tests/dialects/test_oracle.py
index 8675086..d2bbedc 100644
--- a/tests/dialects/test_oracle.py
+++ b/tests/dialects/test_oracle.py
@@ -67,6 +67,15 @@ class TestOracle(Validator):
"SELECT COUNT(1) INTO V_Temp FROM TABLE(CAST(somelist AS data_list)) WHERE col LIKE '%contact'"
)
self.validate_identity(
+ "SELECT department_id INTO v_department_id FROM departments FETCH FIRST 1 ROWS ONLY"
+ )
+ self.validate_identity(
+ "SELECT department_id BULK COLLECT INTO v_department_ids FROM departments"
+ )
+ self.validate_identity(
+ "SELECT department_id, department_name BULK COLLECT INTO v_department_ids, v_department_names FROM departments"
+ )
+ self.validate_identity(
"SELECT MIN(column_name) KEEP (DENSE_RANK FIRST ORDER BY column_name DESC) FROM table_name"
)
self.validate_identity(
@@ -103,6 +112,14 @@ class TestOracle(Validator):
)
self.validate_all(
+ "SELECT department_id, department_name INTO v_department_id, v_department_name FROM departments FETCH FIRST 1 ROWS ONLY",
+ write={
+ "oracle": "SELECT department_id, department_name INTO v_department_id, v_department_name FROM departments FETCH FIRST 1 ROWS ONLY",
+ "postgres": UnsupportedError,
+ "tsql": UnsupportedError,
+ },
+ )
+ self.validate_all(
"TRUNC(SYSDATE, 'YEAR')",
write={
"clickhouse": "DATE_TRUNC('YEAR', CURRENT_TIMESTAMP())",
diff --git a/tests/dialects/test_postgres.py b/tests/dialects/test_postgres.py
index 63266a5..62ae247 100644
--- a/tests/dialects/test_postgres.py
+++ b/tests/dialects/test_postgres.py
@@ -354,10 +354,10 @@ class TestPostgres(Validator):
self.validate_all(
"SELECT ARRAY[1, 2, 3] @> ARRAY[1, 2]",
read={
- "duckdb": "SELECT ARRAY_HAS_ALL([1, 2, 3], [1, 2])",
+ "duckdb": "SELECT [1, 2, 3] @> [1, 2]",
},
write={
- "duckdb": "SELECT ARRAY_HAS_ALL([1, 2, 3], [1, 2])",
+ "duckdb": "SELECT [1, 2, 3] @> [1, 2]",
"mysql": UnsupportedError,
"postgres": "SELECT ARRAY[1, 2, 3] @> ARRAY[1, 2]",
},
@@ -399,13 +399,6 @@ class TestPostgres(Validator):
},
)
self.validate_all(
- "SELECT ARRAY[1, 2, 3] && ARRAY[1, 2]",
- write={
- "": "SELECT ARRAY_OVERLAPS(ARRAY(1, 2, 3), ARRAY(1, 2))",
- "postgres": "SELECT ARRAY[1, 2, 3] && ARRAY[1, 2]",
- },
- )
- self.validate_all(
"SELECT JSON_EXTRACT_PATH_TEXT(x, k1, k2, k3) FROM t",
read={
"clickhouse": "SELECT JSONExtractString(x, k1, k2, k3) FROM t",
@@ -802,6 +795,7 @@ class TestPostgres(Validator):
)
self.validate_identity("SELECT OVERLAY(a PLACING b FROM 1)")
self.validate_identity("SELECT OVERLAY(a PLACING b FROM 1 FOR 1)")
+ self.validate_identity("ARRAY[1, 2, 3] && ARRAY[1, 2]").assert_is(exp.ArrayOverlaps)
def test_ddl(self):
# Checks that user-defined types are parsed into DataType instead of Identifier
diff --git a/tests/dialects/test_redshift.py b/tests/dialects/test_redshift.py
index 6f561da..01c7f78 100644
--- a/tests/dialects/test_redshift.py
+++ b/tests/dialects/test_redshift.py
@@ -214,6 +214,12 @@ class TestRedshift(Validator):
},
)
self.validate_all(
+ "CREATE TABLE a (b BINARY VARYING(10))",
+ write={
+ "redshift": "CREATE TABLE a (b VARBYTE(10))",
+ },
+ )
+ self.validate_all(
"SELECT 'abc'::CHARACTER",
write={
"redshift": "SELECT CAST('abc' AS CHAR)",
diff --git a/tests/dialects/test_spark.py b/tests/dialects/test_spark.py
index 4fed68c..01859c6 100644
--- a/tests/dialects/test_spark.py
+++ b/tests/dialects/test_spark.py
@@ -2,7 +2,6 @@ from unittest import mock
from sqlglot import exp, parse_one
from sqlglot.dialects.dialect import Dialects
-from sqlglot.helper import logger as helper_logger
from tests.dialects.test_dialect import Validator
@@ -294,19 +293,19 @@ TBLPROPERTIES (
"SELECT STR_TO_MAP('a:1,b:2,c:3')",
"SELECT STR_TO_MAP('a:1,b:2,c:3', ',', ':')",
)
-
- with self.assertLogs(helper_logger):
- self.validate_all(
- "SELECT TRY_ELEMENT_AT(ARRAY(1, 2, 3), 2)",
- read={
- "databricks": "SELECT TRY_ELEMENT_AT(ARRAY(1, 2, 3), 2)",
- },
- write={
- "databricks": "SELECT TRY_ELEMENT_AT(ARRAY(1, 2, 3), 2)",
- "duckdb": "SELECT ([1, 2, 3])[3]",
- "spark": "SELECT TRY_ELEMENT_AT(ARRAY(1, 2, 3), 2)",
- },
- )
+ self.validate_all(
+ "SELECT TRY_ELEMENT_AT(ARRAY(1, 2, 3), 2)",
+ read={
+ "databricks": "SELECT TRY_ELEMENT_AT(ARRAY(1, 2, 3), 2)",
+ "presto": "SELECT ELEMENT_AT(ARRAY[1, 2, 3], 2)",
+ },
+ write={
+ "databricks": "SELECT TRY_ELEMENT_AT(ARRAY(1, 2, 3), 2)",
+ "spark": "SELECT TRY_ELEMENT_AT(ARRAY(1, 2, 3), 2)",
+ "duckdb": "SELECT ([1, 2, 3])[2]",
+ "presto": "SELECT ELEMENT_AT(ARRAY[1, 2, 3], 2)",
+ },
+ )
self.validate_all(
"SELECT ARRAY_AGG(x) FILTER (WHERE x = 5) FROM (SELECT 1 UNION ALL SELECT NULL) AS t(x)",
diff --git a/tests/dialects/test_sqlite.py b/tests/dialects/test_sqlite.py
index f2c9802..230c0e8 100644
--- a/tests/dialects/test_sqlite.py
+++ b/tests/dialects/test_sqlite.py
@@ -26,6 +26,7 @@ class TestSQLite(Validator):
"""SELECT item AS "item", some AS "some" FROM data WHERE (item = 'value_1' COLLATE NOCASE) AND (some = 't' COLLATE NOCASE) ORDER BY item ASC LIMIT 1 OFFSET 0"""
)
self.validate_identity("SELECT * FROM GENERATE_SERIES(1, 5)")
+ self.validate_identity("SELECT INSTR(haystack, needle)")
self.validate_all("SELECT LIKE(y, x)", write={"sqlite": "SELECT x LIKE y"})
self.validate_all("SELECT GLOB('*y*', 'xyz')", write={"sqlite": "SELECT 'xyz' GLOB '*y*'"})
diff --git a/tests/dialects/test_trino.py b/tests/dialects/test_trino.py
index 0ebe749..8c73ec1 100644
--- a/tests/dialects/test_trino.py
+++ b/tests/dialects/test_trino.py
@@ -4,6 +4,12 @@ from tests.dialects.test_dialect import Validator
class TestTrino(Validator):
dialect = "trino"
+ def test_trino(self):
+ self.validate_identity("JSON_EXTRACT(content, json_path)")
+ self.validate_identity("JSON_QUERY(content, 'lax $.HY.*')")
+ self.validate_identity("JSON_QUERY(content, 'strict $.HY.*' WITH UNCONDITIONAL WRAPPER)")
+ self.validate_identity("JSON_QUERY(content, 'strict $.HY.*' WITHOUT CONDITIONAL WRAPPER)")
+
def test_trim(self):
self.validate_identity("SELECT TRIM('!' FROM '!foo!')")
self.validate_identity("SELECT TRIM(BOTH '$' FROM '$var$')")
diff --git a/tests/dialects/test_tsql.py b/tests/dialects/test_tsql.py
index 453cd5a..9be6fcd 100644
--- a/tests/dialects/test_tsql.py
+++ b/tests/dialects/test_tsql.py
@@ -8,6 +8,11 @@ class TestTSQL(Validator):
dialect = "tsql"
def test_tsql(self):
+ self.validate_identity(
+ "with x as (select 1) select * from x union select * from x order by 1 limit 0",
+ "WITH x AS (SELECT 1 AS [1]) SELECT TOP 0 * FROM (SELECT * FROM x UNION SELECT * FROM x) AS _l_0 ORDER BY 1",
+ )
+
# https://learn.microsoft.com/en-us/previous-versions/sql/sql-server-2008-r2/ms187879(v=sql.105)?redirectedfrom=MSDN
# tsql allows .. which means use the default schema
self.validate_identity("SELECT * FROM a..b")
@@ -46,6 +51,10 @@ class TestTSQL(Validator):
self.validate_identity(
"COPY INTO test_1 FROM 'path' WITH (FORMAT_NAME = test, FILE_TYPE = 'CSV', CREDENTIAL = (IDENTITY='Shared Access Signature', SECRET='token'), FIELDTERMINATOR = ';', ROWTERMINATOR = '0X0A', ENCODING = 'UTF8', DATEFORMAT = 'ymd', MAXERRORS = 10, ERRORFILE = 'errorsfolder', IDENTITY_INSERT = 'ON')"
)
+ self.validate_identity(
+ 'SELECT 1 AS "[x]"',
+ "SELECT 1 AS [[x]]]",
+ )
self.assertEqual(
annotate_types(self.validate_identity("SELECT 1 WHERE EXISTS(SELECT 1)")).sql("tsql"),
"SELECT 1 WHERE EXISTS(SELECT 1)",