summaryrefslogtreecommitdiffstats
path: root/tests/dialects
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2023-06-16 09:41:15 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2023-06-16 09:41:15 +0000
commit358a09296d7198a4cc142f1976de8f3eb3318e58 (patch)
tree762db96c44014dc4db5e9fc7f6709c138589155e /tests/dialects
parentAdding upstream version 15.2.0. (diff)
downloadsqlglot-358a09296d7198a4cc142f1976de8f3eb3318e58.tar.xz
sqlglot-358a09296d7198a4cc142f1976de8f3eb3318e58.zip
Adding upstream version 16.2.1.upstream/16.2.1
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'tests/dialects')
-rw-r--r--tests/dialects/test_bigquery.py24
-rw-r--r--tests/dialects/test_clickhouse.py14
-rw-r--r--tests/dialects/test_dialect.py23
-rw-r--r--tests/dialects/test_duckdb.py18
-rw-r--r--tests/dialects/test_mysql.py2
-rw-r--r--tests/dialects/test_oracle.py2
-rw-r--r--tests/dialects/test_postgres.py56
-rw-r--r--tests/dialects/test_presto.py2
-rw-r--r--tests/dialects/test_snowflake.py8
-rw-r--r--tests/dialects/test_teradata.py12
10 files changed, 149 insertions, 12 deletions
diff --git a/tests/dialects/test_bigquery.py b/tests/dialects/test_bigquery.py
index 05ded11..1c8aa51 100644
--- a/tests/dialects/test_bigquery.py
+++ b/tests/dialects/test_bigquery.py
@@ -6,6 +6,9 @@ class TestBigQuery(Validator):
dialect = "bigquery"
def test_bigquery(self):
+ self.validate_identity("DATE(2016, 12, 25)")
+ self.validate_identity("DATE(CAST('2016-12-25 23:59:59' AS DATETIME))")
+ self.validate_identity("SELECT foo IN UNNEST(bar) AS bla")
self.validate_identity("SELECT * FROM x-0.a")
self.validate_identity("SELECT * FROM pivot CROSS JOIN foo")
self.validate_identity("SAFE_CAST(x AS STRING)")
@@ -28,6 +31,9 @@ class TestBigQuery(Validator):
self.validate_identity("""CREATE TABLE x (a STRUCT<values ARRAY<INT64>>)""")
self.validate_identity("""CREATE TABLE x (a STRUCT<b STRING OPTIONS (description='b')>)""")
self.validate_identity(
+ "DATE(CAST('2016-12-25 05:30:00+07' AS DATETIME), 'America/Los_Angeles')"
+ )
+ self.validate_identity(
"""CREATE TABLE x (a STRING OPTIONS (description='x')) OPTIONS (table_expiration_days=1)"""
)
self.validate_identity(
@@ -37,6 +43,19 @@ class TestBigQuery(Validator):
"CREATE TABLE IF NOT EXISTS foo AS SELECT * FROM bla EXCEPT DISTINCT (SELECT * FROM bar) LIMIT 0"
)
+ self.validate_all("SELECT SPLIT(foo)", write={"bigquery": "SELECT SPLIT(foo, ',')"})
+ self.validate_all(
+ "cast(x as date format 'MM/DD/YYYY')",
+ write={
+ "bigquery": "PARSE_DATE('%m/%d/%Y', x)",
+ },
+ )
+ self.validate_all(
+ "cast(x as time format 'YYYY.MM.DD HH:MI:SSTZH')",
+ write={
+ "bigquery": "PARSE_TIMESTAMP('%Y.%m.%d %I:%M:%S%z', x)",
+ },
+ )
self.validate_all("SELECT 1 AS hash", write={"bigquery": "SELECT 1 AS `hash`"})
self.validate_all('x <> ""', write={"bigquery": "x <> ''"})
self.validate_all('x <> """"""', write={"bigquery": "x <> ''"})
@@ -55,11 +74,12 @@ class TestBigQuery(Validator):
"SELECT * FROM `my-project.my-dataset.my-table`",
write={"bigquery": "SELECT * FROM `my-project`.`my-dataset`.`my-table`"},
)
+ self.validate_all("CAST(x AS DATETIME)", read={"": "x::timestamp"})
+ self.validate_identity("CAST(x AS TIMESTAMP)")
self.validate_all("LEAST(x, y)", read={"sqlite": "MIN(x, y)"})
self.validate_all("CAST(x AS CHAR)", write={"bigquery": "CAST(x AS STRING)"})
self.validate_all("CAST(x AS NCHAR)", write={"bigquery": "CAST(x AS STRING)"})
self.validate_all("CAST(x AS NVARCHAR)", write={"bigquery": "CAST(x AS STRING)"})
- self.validate_all("CAST(x AS TIMESTAMP)", write={"bigquery": "CAST(x AS DATETIME)"})
self.validate_all("CAST(x AS TIMESTAMPTZ)", write={"bigquery": "CAST(x AS TIMESTAMP)"})
self.validate_all("CAST(x AS RECORD)", write={"bigquery": "CAST(x AS STRUCT)"})
self.validate_all(
@@ -418,9 +438,11 @@ class TestBigQuery(Validator):
self.validate_all(
"SELECT REGEXP_EXTRACT(abc, 'pattern(group)') FROM table",
write={
+ "bigquery": "SELECT REGEXP_EXTRACT(abc, 'pattern(group)') FROM table",
"duckdb": "SELECT REGEXP_EXTRACT(abc, 'pattern(group)', 1) FROM table",
},
)
+ self.validate_identity("REGEXP_EXTRACT(`foo`, 'bar: (.+?)', 1, 1)")
self.validate_identity("BEGIN A B C D E F")
self.validate_identity("BEGIN TRANSACTION")
self.validate_identity("COMMIT TRANSACTION")
diff --git a/tests/dialects/test_clickhouse.py b/tests/dialects/test_clickhouse.py
index f5372d9..7584c67 100644
--- a/tests/dialects/test_clickhouse.py
+++ b/tests/dialects/test_clickhouse.py
@@ -45,8 +45,22 @@ class TestClickhouse(Validator):
self.validate_identity(
"CREATE MATERIALIZED VIEW test_view ON CLUSTER cl1 (id UInt8) ENGINE=AggregatingMergeTree() ORDER BY tuple() AS SELECT * FROM test_data"
)
+ self.validate_identity(
+ "CREATE MATERIALIZED VIEW test_view ON CLUSTER cl1 (id UInt8) TO table1 AS SELECT * FROM test_data"
+ )
+ self.validate_identity(
+ "CREATE MATERIALIZED VIEW test_view (id UInt8) TO db.table1 AS SELECT * FROM test_data"
+ )
self.validate_all(
+ "CONCAT(CASE WHEN COALESCE(a, '') IS NULL THEN COALESCE(a, '') ELSE CAST(COALESCE(a, '') AS TEXT) END, CASE WHEN COALESCE(b, '') IS NULL THEN COALESCE(b, '') ELSE CAST(COALESCE(b, '') AS TEXT) END)",
+ read={"postgres": "CONCAT(a, b)"},
+ )
+ self.validate_all(
+ "CONCAT(CASE WHEN a IS NULL THEN a ELSE CAST(a AS TEXT) END, CASE WHEN b IS NULL THEN b ELSE CAST(b AS TEXT) END)",
+ read={"mysql": "CONCAT(a, b)"},
+ )
+ self.validate_all(
r"'Enum8(\'Sunday\' = 0)'", write={"clickhouse": "'Enum8(''Sunday'' = 0)'"}
)
self.validate_all(
diff --git a/tests/dialects/test_dialect.py b/tests/dialects/test_dialect.py
index 7e20812..8ffdf07 100644
--- a/tests/dialects/test_dialect.py
+++ b/tests/dialects/test_dialect.py
@@ -1085,6 +1085,14 @@ class TestDialect(Validator):
self.validate_all("LIKE(x, 'z')", write={"": "'z' LIKE x"})
self.validate_all(
+ "CONCAT(a, b, c)",
+ write={
+ "": "CONCAT(a, b, c)",
+ "redshift": "a || b || c",
+ "sqlite": "a || b || c",
+ },
+ )
+ self.validate_all(
"x ILIKE '%y'",
read={
"clickhouse": "x ILIKE '%y'",
@@ -1177,11 +1185,22 @@ class TestDialect(Validator):
self.validate_all(
"CONCAT(a)",
write={
- "mysql": "a",
+ "clickhouse": "a",
+ "presto": "a",
+ "trino": "a",
"tsql": "a",
},
)
self.validate_all(
+ "COALESCE(a, '')",
+ read={
+ "drill": "CONCAT(a)",
+ "duckdb": "CONCAT(a)",
+ "postgres": "CONCAT(a)",
+ "tsql": "CONCAT(a)",
+ },
+ )
+ self.validate_all(
"IF(x > 1, 1, 0)",
write={
"drill": "`IF`(x > 1, 1, 0)",
@@ -1276,7 +1295,7 @@ class TestDialect(Validator):
def test_limit(self):
self.validate_all(
"SELECT * FROM data LIMIT 10, 20",
- write={"sqlite": "SELECT * FROM data LIMIT 10 OFFSET 20"},
+ write={"sqlite": "SELECT * FROM data LIMIT 20 OFFSET 10"},
)
self.validate_all(
"SELECT x FROM y LIMIT 10",
diff --git a/tests/dialects/test_duckdb.py b/tests/dialects/test_duckdb.py
index ee15d04..f0caafc 100644
--- a/tests/dialects/test_duckdb.py
+++ b/tests/dialects/test_duckdb.py
@@ -10,6 +10,20 @@ class TestDuckDB(Validator):
self.validate_identity("SELECT CURRENT_TIMESTAMP")
self.validate_all(
+ "SELECT CAST('2020-01-01' AS DATE) + INTERVAL (-1) DAY",
+ read={"mysql": "SELECT DATE '2020-01-01' + INTERVAL -1 DAY"},
+ )
+ self.validate_all(
+ "SELECT INTERVAL '1 quarter'",
+ write={"duckdb": "SELECT (90 * INTERVAL '1' day)"},
+ )
+ self.validate_all(
+ "SELECT ((DATE_TRUNC('DAY', CAST(CAST(DATE_TRUNC('DAY', CURRENT_TIMESTAMP) AS DATE) AS TIMESTAMP) + INTERVAL (0 - MOD((DAYOFWEEK(CAST(CAST(DATE_TRUNC('DAY', CURRENT_TIMESTAMP) AS DATE) AS TIMESTAMP)) % 7) - 1 + 7, 7)) day) + (7 * INTERVAL (-5) day))) AS t1",
+ read={
+ "presto": "SELECT ((DATE_ADD('week', -5, DATE_TRUNC('DAY', DATE_ADD('day', (0 - MOD((DAY_OF_WEEK(CAST(CAST(DATE_TRUNC('DAY', NOW()) AS DATE) AS TIMESTAMP)) % 7) - 1 + 7, 7)), CAST(CAST(DATE_TRUNC('DAY', NOW()) AS DATE) AS TIMESTAMP)))))) AS t1",
+ },
+ )
+ self.validate_all(
"EPOCH(x)",
read={
"presto": "TO_UNIXTIME(x)",
@@ -51,7 +65,7 @@ class TestDuckDB(Validator):
self.validate_all(
"STRPTIME(x, '%y-%-m')",
write={
- "bigquery": "PARSE_TIMESTAMP('%y-%m', x)",
+ "bigquery": "PARSE_TIMESTAMP('%y-%-m', x)",
"duckdb": "STRPTIME(x, '%y-%-m')",
"presto": "DATE_PARSE(x, '%y-%c')",
"hive": "CAST(FROM_UNIXTIME(UNIX_TIMESTAMP(x, 'yy-M')) AS TIMESTAMP)",
@@ -70,7 +84,7 @@ class TestDuckDB(Validator):
self.validate_all(
"STRPTIME(x, '%-m/%-d/%y %-I:%M %p')",
write={
- "bigquery": "PARSE_TIMESTAMP('%m/%d/%y %I:%M %p', x)",
+ "bigquery": "PARSE_TIMESTAMP('%-m/%-d/%y %-I:%M %p', x)",
"duckdb": "STRPTIME(x, '%-m/%-d/%y %-I:%M %p')",
"presto": "DATE_PARSE(x, '%c/%e/%y %l:%i %p')",
"hive": "CAST(FROM_UNIXTIME(UNIX_TIMESTAMP(x, 'M/d/yy h:mm a')) AS TIMESTAMP)",
diff --git a/tests/dialects/test_mysql.py b/tests/dialects/test_mysql.py
index 4fb6fa5..0b9c8b7 100644
--- a/tests/dialects/test_mysql.py
+++ b/tests/dialects/test_mysql.py
@@ -45,6 +45,8 @@ class TestMySQL(Validator):
)
def test_identity(self):
+ self.validate_identity("CAST(x AS ENUM('a', 'b'))")
+ self.validate_identity("CAST(x AS SET('a', 'b'))")
self.validate_identity("SELECT CURRENT_TIMESTAMP(6)")
self.validate_identity("x ->> '$.name'")
self.validate_identity("SELECT CAST(`a`.`b` AS INT) FROM foo")
diff --git a/tests/dialects/test_oracle.py b/tests/dialects/test_oracle.py
index 12ff699..2c67805 100644
--- a/tests/dialects/test_oracle.py
+++ b/tests/dialects/test_oracle.py
@@ -26,8 +26,8 @@ class TestOracle(Validator):
self.validate_all(
"NVL(NULL, 1)",
write={
+ "": "COALESCE(NULL, 1)",
"oracle": "NVL(NULL, 1)",
- "": "IFNULL(NULL, 1)",
},
)
self.validate_all(
diff --git a/tests/dialects/test_postgres.py b/tests/dialects/test_postgres.py
index 972a8c8..4e57b36 100644
--- a/tests/dialects/test_postgres.py
+++ b/tests/dialects/test_postgres.py
@@ -1,3 +1,5 @@
+from unittest import mock
+
from sqlglot import ParseError, exp, parse_one, transpile
from tests.dialects.test_dialect import Validator
@@ -85,6 +87,39 @@ class TestPostgres(Validator):
read="postgres",
)
+ def test_unnest(self):
+ self.validate_identity(
+ "SELECT * FROM UNNEST(ARRAY[1, 2], ARRAY['foo', 'bar', 'baz']) AS x(a, b)"
+ )
+
+ self.validate_all(
+ "SELECT UNNEST(c) FROM t",
+ write={
+ "hive": "SELECT EXPLODE(c) FROM t",
+ "postgres": "SELECT UNNEST(c) FROM t",
+ "presto": "SELECT col FROM t CROSS JOIN UNNEST(c) AS _u(col)",
+ },
+ )
+ self.validate_all(
+ "SELECT UNNEST(ARRAY[1])",
+ write={
+ "hive": "SELECT EXPLODE(ARRAY(1))",
+ "postgres": "SELECT UNNEST(ARRAY[1])",
+ "presto": "SELECT col FROM UNNEST(ARRAY[1]) AS _u(col)",
+ },
+ )
+
+ @mock.patch("sqlglot.helper.logger")
+ def test_array_offset(self, mock_logger):
+ self.validate_all(
+ "SELECT col[1]",
+ write={
+ "hive": "SELECT col[0]",
+ "postgres": "SELECT col[1]",
+ "presto": "SELECT col[1]",
+ },
+ )
+
def test_postgres(self):
self.validate_identity("CAST(x AS INT4RANGE)")
self.validate_identity("CAST(x AS INT4MULTIRANGE)")
@@ -540,3 +575,24 @@ class TestPostgres(Validator):
"SELECT a, LOGICAL_OR(b) FROM table GROUP BY a",
write={"postgres": "SELECT a, BOOL_OR(b) FROM table GROUP BY a"},
)
+
+ def test_string_concat(self):
+ self.validate_all(
+ "CONCAT(a, b)",
+ write={
+ "": "CONCAT(COALESCE(a, ''), COALESCE(b, ''))",
+ "duckdb": "CONCAT(COALESCE(a, ''), COALESCE(b, ''))",
+ "postgres": "CONCAT(COALESCE(a, ''), COALESCE(b, ''))",
+ "presto": "CONCAT(CAST(COALESCE(a, '') AS VARCHAR), CAST(COALESCE(b, '') AS VARCHAR))",
+ },
+ )
+ self.validate_all(
+ "a || b",
+ write={
+ "": "a || b",
+ "clickhouse": "CONCAT(CAST(a AS TEXT), CAST(b AS TEXT))",
+ "duckdb": "a || b",
+ "postgres": "a || b",
+ "presto": "CONCAT(CAST(a AS VARCHAR), CAST(b AS VARCHAR))",
+ },
+ )
diff --git a/tests/dialects/test_presto.py b/tests/dialects/test_presto.py
index e3d09ef..4f37be5 100644
--- a/tests/dialects/test_presto.py
+++ b/tests/dialects/test_presto.py
@@ -440,6 +440,8 @@ class TestPresto(Validator):
)
def test_presto(self):
+ self.validate_identity("SELECT * FROM x OFFSET 1 LIMIT 1")
+ self.validate_identity("SELECT * FROM x OFFSET 1 FETCH FIRST 1 ROWS ONLY")
self.validate_identity("SELECT BOOL_OR(a > 10) FROM asd AS T(a)")
self.validate_identity("SELECT * FROM (VALUES (1))")
self.validate_identity("START TRANSACTION READ WRITE, ISOLATION LEVEL SERIALIZABLE")
diff --git a/tests/dialects/test_snowflake.py b/tests/dialects/test_snowflake.py
index 941f2aa..426e188 100644
--- a/tests/dialects/test_snowflake.py
+++ b/tests/dialects/test_snowflake.py
@@ -30,6 +30,10 @@ class TestSnowflake(Validator):
self.validate_identity("SELECT CONVERT_TIMEZONE('UTC', 'America/Los_Angeles', col)")
self.validate_all("CAST(x AS CHAR VARYING)", write={"snowflake": "CAST(x AS VARCHAR)"})
+ self.validate_all(
+ "SELECT * FROM (VALUES (0) foo(bar))",
+ write={"snowflake": "SELECT * FROM (VALUES (0)) AS foo(bar)"},
+ )
self.validate_all("CAST(x AS CHARACTER VARYING)", write={"snowflake": "CAST(x AS VARCHAR)"})
self.validate_all("CAST(x AS NCHAR VARYING)", write={"snowflake": "CAST(x AS VARCHAR)"})
self.validate_all(
@@ -274,8 +278,8 @@ class TestSnowflake(Validator):
"SELECT TO_TIMESTAMP('2013-04-05 01:02:03')",
write={
"bigquery": "SELECT PARSE_TIMESTAMP('%Y-%m-%d %H:%M:%S', '2013-04-05 01:02:03')",
- "snowflake": "SELECT TO_TIMESTAMP('2013-04-05 01:02:03', 'yyyy-mm-dd hh24:mi:ss')",
- "spark": "SELECT TO_TIMESTAMP('2013-04-05 01:02:03', 'yyyy-MM-d HH:mm:ss')",
+ "snowflake": "SELECT TO_TIMESTAMP('2013-04-05 01:02:03', 'yyyy-mm-DD hh24:mi:ss')",
+ "spark": "SELECT TO_TIMESTAMP('2013-04-05 01:02:03', 'yyyy-MM-dd HH:mm:ss')",
},
)
self.validate_all(
diff --git a/tests/dialects/test_teradata.py b/tests/dialects/test_teradata.py
index 9f789d0..6906e47 100644
--- a/tests/dialects/test_teradata.py
+++ b/tests/dialects/test_teradata.py
@@ -138,11 +138,15 @@ class TestTeradata(Validator):
def test_cast(self):
self.validate_all(
"CAST('1992-01' AS DATE FORMAT 'YYYY-DD')",
+ read={
+ "bigquery": "CAST('1992-01' AS DATE FORMAT 'YYYY-DD')",
+ },
write={
"teradata": "CAST('1992-01' AS DATE FORMAT 'YYYY-DD')",
- "databricks": "DATE_FORMAT('1992-01', 'YYYY-DD')",
- "mysql": "DATE_FORMAT('1992-01', 'YYYY-DD')",
- "spark": "DATE_FORMAT('1992-01', 'YYYY-DD')",
- "": "TIME_TO_STR('1992-01', 'YYYY-DD')",
+ "bigquery": "PARSE_DATE('%Y-%d', '1992-01')",
+ "databricks": "TO_DATE('1992-01', 'yyyy-dd')",
+ "mysql": "STR_TO_DATE('1992-01', '%Y-%d')",
+ "spark": "TO_DATE('1992-01', 'yyyy-dd')",
+ "": "STR_TO_DATE('1992-01', '%Y-%d')",
},
)