diff options
author | Daniel Baumann <daniel.baumann@progress-linux.org> | 2023-07-06 07:28:09 +0000 |
---|---|---|
committer | Daniel Baumann <daniel.baumann@progress-linux.org> | 2023-07-06 07:28:09 +0000 |
commit | 52f4a5e2260f3e5b919b4e270339afd670bf0b8a (patch) | |
tree | 5ca419af0e2e409018492b82f5b9847f0112b5fb /tests/dialects | |
parent | Adding upstream version 16.7.7. (diff) | |
download | sqlglot-52f4a5e2260f3e5b919b4e270339afd670bf0b8a.tar.xz sqlglot-52f4a5e2260f3e5b919b4e270339afd670bf0b8a.zip |
Adding upstream version 17.2.0.upstream/17.2.0
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to '')
-rw-r--r-- | tests/dialects/test_bigquery.py | 34 | ||||
-rw-r--r-- | tests/dialects/test_dialect.py | 4 | ||||
-rw-r--r-- | tests/dialects/test_duckdb.py | 17 | ||||
-rw-r--r-- | tests/dialects/test_mysql.py | 45 | ||||
-rw-r--r-- | tests/dialects/test_postgres.py | 2 | ||||
-rw-r--r-- | tests/dialects/test_presto.py | 2 | ||||
-rw-r--r-- | tests/dialects/test_snowflake.py | 7 | ||||
-rw-r--r-- | tests/dialects/test_spark.py | 5 | ||||
-rw-r--r-- | tests/dialects/test_sqlite.py | 9 | ||||
-rw-r--r-- | tests/dialects/test_tsql.py | 31 |
10 files changed, 133 insertions, 23 deletions
diff --git a/tests/dialects/test_bigquery.py b/tests/dialects/test_bigquery.py index e3fc495..eac3cac 100644 --- a/tests/dialects/test_bigquery.py +++ b/tests/dialects/test_bigquery.py @@ -29,12 +29,14 @@ class TestBigQuery(Validator): with self.assertRaises(ParseError): transpile("SELECT * FROM UNNEST(x) AS x(y)", read="bigquery") + self.validate_identity("SELECT PARSE_TIMESTAMP('%c', 'Thu Dec 25 07:30:00 2008', 'UTC')") self.validate_identity("SELECT ANY_VALUE(fruit HAVING MAX sold) FROM fruits") self.validate_identity("SELECT ANY_VALUE(fruit HAVING MIN sold) FROM fruits") self.validate_identity("SELECT `project-id`.udfs.func(call.dir)") self.validate_identity("SELECT CAST(CURRENT_DATE AS STRING FORMAT 'DAY') AS current_day") self.validate_identity("SAFE_CAST(encrypted_value AS STRING FORMAT 'BASE64')") self.validate_identity("CAST(encrypted_value AS STRING FORMAT 'BASE64')") + self.validate_identity("CAST(STRUCT<a INT64>(1) AS STRUCT<a INT64>)") self.validate_identity("STRING_AGG(a)") self.validate_identity("STRING_AGG(a, ' & ')") self.validate_identity("STRING_AGG(DISTINCT a, ' & ')") @@ -106,6 +108,14 @@ class TestBigQuery(Validator): self.validate_all("CAST(x AS TIMESTAMPTZ)", write={"bigquery": "CAST(x AS TIMESTAMP)"}) self.validate_all("CAST(x AS RECORD)", write={"bigquery": "CAST(x AS STRUCT)"}) self.validate_all( + "SELECT CAST('20201225' AS TIMESTAMP FORMAT 'YYYYMMDD' AT TIME ZONE 'America/New_York')", + write={"bigquery": "SELECT PARSE_TIMESTAMP('%Y%m%d', '20201225', 'America/New_York')"}, + ) + self.validate_all( + "SELECT CAST('20201225' AS TIMESTAMP FORMAT 'YYYYMMDD')", + write={"bigquery": "SELECT PARSE_TIMESTAMP('%Y%m%d', '20201225')"}, + ) + self.validate_all( "SELECT CAST(TIMESTAMP '2008-12-25 00:00:00+00:00' AS STRING FORMAT 'YYYY-MM-DD HH24:MI:SS TZH:TZM') AS date_time_to_string", write={ "bigquery": "SELECT CAST(CAST('2008-12-25 00:00:00+00:00' AS TIMESTAMP) AS STRING FORMAT 'YYYY-MM-DD HH24:MI:SS TZH:TZM') AS date_time_to_string", @@ -191,7 +201,7 @@ class TestBigQuery(Validator): self.validate_all( "r'x\\''", write={ - "bigquery": "r'x\\''", + "bigquery": "'x\\''", "hive": "'x\\''", }, ) @@ -199,7 +209,7 @@ class TestBigQuery(Validator): self.validate_all( "r'x\\y'", write={ - "bigquery": "r'x\\y'", + "bigquery": "'x\\\y'", "hive": "'x\\\\y'", }, ) @@ -215,7 +225,7 @@ class TestBigQuery(Validator): self.validate_all( r'r"""/\*.*\*/"""', write={ - "bigquery": r"r'/\*.*\*/'", + "bigquery": r"'/\\*.*\\*/'", "duckdb": r"'/\\*.*\\*/'", "presto": r"'/\\*.*\\*/'", "hive": r"'/\\*.*\\*/'", @@ -225,7 +235,7 @@ class TestBigQuery(Validator): self.validate_all( r'R"""/\*.*\*/"""', write={ - "bigquery": r"r'/\*.*\*/'", + "bigquery": r"'/\\*.*\\*/'", "duckdb": r"'/\\*.*\\*/'", "presto": r"'/\\*.*\\*/'", "hive": r"'/\\*.*\\*/'", @@ -233,6 +243,20 @@ class TestBigQuery(Validator): }, ) self.validate_all( + 'r"""a\n"""', + write={ + "bigquery": "'a\\n'", + "duckdb": "'a\n'", + }, + ) + self.validate_all( + '"""a\n"""', + write={ + "bigquery": "'a\\n'", + "duckdb": "'a\n'", + }, + ) + self.validate_all( "CAST(a AS INT64)", write={ "bigquery": "CAST(a AS INT64)", @@ -603,7 +627,7 @@ class TestBigQuery(Validator): ) @mock.patch("sqlglot.dialects.bigquery.logger") - def test_pushdown_cte_column_names(self, mock_logger): + def test_pushdown_cte_column_names(self, logger): with self.assertRaises(UnsupportedError): transpile( "WITH cte(foo) AS (SELECT * FROM tbl) SELECT foo FROM cte", diff --git a/tests/dialects/test_dialect.py b/tests/dialects/test_dialect.py index 78f87ff..21efc6b 100644 --- a/tests/dialects/test_dialect.py +++ b/tests/dialects/test_dialect.py @@ -601,7 +601,6 @@ class TestDialect(Validator): "DATE_TRUNC('day', x)", read={ "bigquery": "DATE_TRUNC(x, day)", - "duckdb": "DATE_TRUNC('day', x)", "spark": "TRUNC(x, 'day')", }, write={ @@ -619,6 +618,7 @@ class TestDialect(Validator): "TIMESTAMP_TRUNC(x, day)", read={ "bigquery": "TIMESTAMP_TRUNC(x, day)", + "duckdb": "DATE_TRUNC('day', x)", "presto": "DATE_TRUNC('day', x)", "postgres": "DATE_TRUNC('day', x)", "snowflake": "DATE_TRUNC('day', x)", @@ -1307,7 +1307,7 @@ class TestDialect(Validator): write={ "sqlite": "SELECT x FROM y LIMIT 10", "oracle": "SELECT x FROM y FETCH FIRST 10 ROWS ONLY", - "tsql": "SELECT x FROM y FETCH FIRST 10 ROWS ONLY", + "tsql": "SELECT TOP 10 x FROM y", }, ) self.validate_all( diff --git a/tests/dialects/test_duckdb.py b/tests/dialects/test_duckdb.py index 4065f81..cad1c15 100644 --- a/tests/dialects/test_duckdb.py +++ b/tests/dialects/test_duckdb.py @@ -10,6 +10,23 @@ class TestDuckDB(Validator): self.validate_identity("SELECT CURRENT_TIMESTAMP") self.validate_all( + "SELECT MAKE_DATE(2016, 12, 25)", read={"bigquery": "SELECT DATE(2016, 12, 25)"} + ) + self.validate_all( + "SELECT CAST(CAST('2016-12-25 23:59:59' AS DATETIME) AS DATE)", + read={"bigquery": "SELECT DATE(DATETIME '2016-12-25 23:59:59')"}, + ) + self.validate_all( + "SELECT STRPTIME(STRFTIME(CAST(CAST('2016-12-25' AS TIMESTAMPTZ) AS DATE), '%d/%m/%Y') || ' ' || 'America/Los_Angeles', '%d/%m/%Y %Z')", + read={ + "bigquery": "SELECT DATE(TIMESTAMP '2016-12-25', 'America/Los_Angeles')", + }, + ) + self.validate_all( + "SELECT CAST(CAST(STRPTIME('05/06/2020', '%m/%d/%Y') AS DATE) AS DATE)", + read={"bigquery": "SELECT DATE(PARSE_DATE('%m/%d/%Y', '05/06/2020'))"}, + ) + self.validate_all( "SELECT CAST('2020-01-01' AS DATE) + INTERVAL (-1) DAY", read={"mysql": "SELECT DATE '2020-01-01' + INTERVAL -1 DAY"}, ) diff --git a/tests/dialects/test_mysql.py b/tests/dialects/test_mysql.py index ca2f921..3539ad0 100644 --- a/tests/dialects/test_mysql.py +++ b/tests/dialects/test_mysql.py @@ -10,7 +10,28 @@ class TestMySQL(Validator): self.validate_identity("UPDATE items SET items.price = 0 WHERE items.id >= 5 LIMIT 10") self.validate_identity("DELETE FROM t WHERE a <= 10 LIMIT 10") self.validate_identity( - "INSERT INTO x VALUES (1, 'a', 2.0) ON DUPLICATE KEY UPDATE SET x.id = 1" + "DELETE t1 FROM t1 LEFT JOIN t2 ON t1.id = t2.id WHERE t2.id IS NULL" + ) + self.validate_identity( + "DELETE t1, t2 FROM t1 INNER JOIN t2 INNER JOIN t3 WHERE t1.id = t2.id AND t2.id = t3.id" + ) + self.validate_identity( + "DELETE FROM t1, t2 USING t1 INNER JOIN t2 INNER JOIN t3 WHERE t1.id = t2.id AND t2.id = t3.id" + ) + self.validate_identity( + "INSERT IGNORE INTO subscribers (email) VALUES ('john.doe@gmail.com'), ('jane.smith@ibm.com')" + ) + self.validate_identity( + "INSERT INTO t1 (a, b, c) VALUES (1, 2, 3), (4, 5, 6) ON DUPLICATE KEY UPDATE c = VALUES(a) + VALUES(b)" + ) + self.validate_identity( + "INSERT INTO t1 (a, b) SELECT c, d FROM t2 UNION SELECT e, f FROM t3 ON DUPLICATE KEY UPDATE b = b + c" + ) + self.validate_identity( + "INSERT INTO t1 (a, b, c) VALUES (1, 2, 3) ON DUPLICATE KEY UPDATE c = c + 1" + ) + self.validate_identity( + "INSERT INTO x VALUES (1, 'a', 2.0) ON DUPLICATE KEY UPDATE x.id = 1" ) self.validate_all( @@ -48,6 +69,14 @@ class TestMySQL(Validator): ) def test_identity(self): + self.validate_identity("SELECT /*+ BKA(t1) NO_BKA(t2) */ * FROM t1 INNER JOIN t2") + self.validate_identity("SELECT /*+ MERGE(dt) */ * FROM (SELECT * FROM t1) AS dt") + self.validate_identity("SELECT /*+ INDEX(t, i) */ c1 FROM t WHERE c2 = 'value'") + self.validate_identity("SELECT @a MEMBER OF(@c), @b MEMBER OF(@c)") + self.validate_identity("SELECT JSON_ARRAY(4, 5) MEMBER OF('[[3,4],[4,5]]')") + self.validate_identity("SELECT CAST('[4,5]' AS JSON) MEMBER OF('[[3,4],[4,5]]')") + self.validate_identity("""SELECT 'ab' MEMBER OF('[23, "abc", 17, "ab", 10]')""") + self.validate_identity("""SELECT * FROM foo WHERE 'ab' MEMBER OF(content)""") self.validate_identity("CAST(x AS ENUM('a', 'b'))") self.validate_identity("CAST(x AS SET('a', 'b'))") self.validate_identity("SELECT CURRENT_TIMESTAMP(6)") @@ -61,8 +90,15 @@ class TestMySQL(Validator): self.validate_identity("CREATE TABLE A LIKE B") self.validate_identity("SELECT * FROM t1, t2 FOR SHARE OF t1, t2 SKIP LOCKED") self.validate_identity( + """SELECT * FROM foo WHERE 3 MEMBER OF(JSON_EXTRACT(info, '$.value'))""" + ) + self.validate_identity( "SELECT * FROM t1, t2, t3 FOR SHARE OF t1 NOWAIT FOR UPDATE OF t2, t3 SKIP LOCKED" ) + self.validate_identity( + """SELECT * FROM foo WHERE 3 MEMBER OF(info->'$.value')""", + """SELECT * FROM foo WHERE 3 MEMBER OF(JSON_EXTRACT(info, '$.value'))""", + ) # Index hints self.validate_identity( @@ -403,6 +439,13 @@ class TestMySQL(Validator): self.validate_all("CAST(x AS UNSIGNED)", write={"mysql": "CAST(x AS UNSIGNED)"}) self.validate_all("CAST(x AS UNSIGNED INTEGER)", write={"mysql": "CAST(x AS UNSIGNED)"}) self.validate_all( + """SELECT 17 MEMBER OF('[23, "abc", 17, "ab", 10]')""", + write={ + "": """SELECT JSON_ARRAY_CONTAINS(17, '[23, "abc", 17, "ab", 10]')""", + "mysql": """SELECT 17 MEMBER OF('[23, "abc", 17, "ab", 10]')""", + }, + ) + self.validate_all( "SELECT DATE_ADD('2023-06-23 12:00:00', INTERVAL 2 * 2 MONTH) FROM foo", write={ "mysql": "SELECT DATE_ADD('2023-06-23 12:00:00', INTERVAL (2 * 2) MONTH) FROM foo", diff --git a/tests/dialects/test_postgres.py b/tests/dialects/test_postgres.py index c391052..052d4cc 100644 --- a/tests/dialects/test_postgres.py +++ b/tests/dialects/test_postgres.py @@ -110,7 +110,7 @@ class TestPostgres(Validator): ) @mock.patch("sqlglot.helper.logger") - def test_array_offset(self, mock_logger): + def test_array_offset(self, logger): self.validate_all( "SELECT col[1]", write={ diff --git a/tests/dialects/test_presto.py b/tests/dialects/test_presto.py index 49139f9..45a0cd9 100644 --- a/tests/dialects/test_presto.py +++ b/tests/dialects/test_presto.py @@ -442,7 +442,7 @@ class TestPresto(Validator): ) @mock.patch("sqlglot.helper.logger") - def test_presto(self, mock_logger): + def test_presto(self, logger): self.validate_identity("SELECT * FROM x OFFSET 1 LIMIT 1") self.validate_identity("SELECT * FROM x OFFSET 1 FETCH FIRST 1 ROWS ONLY") self.validate_identity("SELECT BOOL_OR(a > 10) FROM asd AS T(a)") diff --git a/tests/dialects/test_snowflake.py b/tests/dialects/test_snowflake.py index e9826a6..f7bab4d 100644 --- a/tests/dialects/test_snowflake.py +++ b/tests/dialects/test_snowflake.py @@ -6,6 +6,7 @@ class TestSnowflake(Validator): dialect = "snowflake" def test_snowflake(self): + self.validate_identity("SELECT SUM(amount) FROM mytable GROUP BY ALL") self.validate_identity("WITH x AS (SELECT 1 AS foo) SELECT foo FROM IDENTIFIER('x')") self.validate_identity("WITH x AS (SELECT 1 AS foo) SELECT IDENTIFIER('foo') FROM x") self.validate_identity("INITCAP('iqamqinterestedqinqthisqtopic', 'q')") @@ -33,6 +34,9 @@ class TestSnowflake(Validator): self.validate_identity( 'COPY INTO NEW_TABLE ("foo", "bar") FROM (SELECT $1, $2, $3, $4 FROM @%old_table)' ) + self.validate_identity( + "SELECT state, city, SUM(retail_price * quantity) AS gross_revenue FROM sales GROUP BY ALL" + ) self.validate_all("CAST(x AS BYTEINT)", write={"snowflake": "CAST(x AS INT)"}) self.validate_all("CAST(x AS CHAR VARYING)", write={"snowflake": "CAST(x AS VARCHAR)"}) @@ -611,6 +615,9 @@ class TestSnowflake(Validator): self.validate_identity( "CREATE SCHEMA mytestschema_clone_restore CLONE testschema BEFORE (TIMESTAMP => TO_TIMESTAMP(40 * 365 * 86400))" ) + self.validate_identity( + "CREATE OR REPLACE TABLE EXAMPLE_DB.DEMO.USERS (ID DECIMAL(38, 0) NOT NULL, PRIMARY KEY (ID), FOREIGN KEY (CITY_CODE) REFERENCES EXAMPLE_DB.DEMO.CITIES (CITY_CODE))" + ) self.validate_all( "CREATE OR REPLACE TRANSIENT TABLE a (id INT)", diff --git a/tests/dialects/test_spark.py b/tests/dialects/test_spark.py index 8acc48e..25841c5 100644 --- a/tests/dialects/test_spark.py +++ b/tests/dialects/test_spark.py @@ -1,3 +1,5 @@ +from unittest import mock + from tests.dialects.test_dialect import Validator @@ -148,7 +150,8 @@ TBLPROPERTIES ( }, ) - def test_hint(self): + @mock.patch("sqlglot.generator.logger") + def test_hint(self, logger): self.validate_all( "SELECT /*+ COALESCE(3) */ * FROM x", write={ diff --git a/tests/dialects/test_sqlite.py b/tests/dialects/test_sqlite.py index 583d5be..10da9b0 100644 --- a/tests/dialects/test_sqlite.py +++ b/tests/dialects/test_sqlite.py @@ -20,8 +20,9 @@ class TestSQLite(Validator): CREATE TABLE "Track" ( CONSTRAINT "PK_Track" FOREIGN KEY ("TrackId"), - FOREIGN KEY ("AlbumId") REFERENCES "Album" ("AlbumId") - ON DELETE NO ACTION ON UPDATE NO ACTION, + FOREIGN KEY ("AlbumId") REFERENCES "Album" ( + "AlbumId" + ) ON DELETE NO ACTION ON UPDATE NO ACTION, FOREIGN KEY ("AlbumId") ON DELETE CASCADE ON UPDATE RESTRICT, FOREIGN KEY ("AlbumId") ON DELETE SET NULL ON UPDATE SET DEFAULT ) @@ -29,7 +30,9 @@ class TestSQLite(Validator): write={ "sqlite": """CREATE TABLE "Track" ( CONSTRAINT "PK_Track" FOREIGN KEY ("TrackId"), - FOREIGN KEY ("AlbumId") REFERENCES "Album"("AlbumId") ON DELETE NO ACTION ON UPDATE NO ACTION, + FOREIGN KEY ("AlbumId") REFERENCES "Album" ( + "AlbumId" + ) ON DELETE NO ACTION ON UPDATE NO ACTION, FOREIGN KEY ("AlbumId") ON DELETE CASCADE ON UPDATE RESTRICT, FOREIGN KEY ("AlbumId") ON DELETE SET NULL ON UPDATE SET DEFAULT )""", diff --git a/tests/dialects/test_tsql.py b/tests/dialects/test_tsql.py index 953d64d..ca6d70c 100644 --- a/tests/dialects/test_tsql.py +++ b/tests/dialects/test_tsql.py @@ -17,17 +17,29 @@ class TestTSQL(Validator): self.validate_identity("PRINT @TestVariable") self.validate_identity("SELECT Employee_ID, Department_ID FROM @MyTableVar") self.validate_identity("INSERT INTO @TestTable VALUES (1, 'Value1', 12, 20)") - self.validate_identity( - "SELECT x FROM @MyTableVar AS m JOIN Employee ON m.EmployeeID = Employee.EmployeeID" - ) self.validate_identity('SELECT "x"."y" FROM foo') self.validate_identity("SELECT * FROM #foo") self.validate_identity("SELECT * FROM ##foo") self.validate_identity( + "SELECT x FROM @MyTableVar AS m JOIN Employee ON m.EmployeeID = Employee.EmployeeID" + ) + self.validate_identity( "SELECT DISTINCT DepartmentName, PERCENTILE_CONT(0.5) WITHIN GROUP (ORDER BY BaseRate) OVER (PARTITION BY DepartmentName) AS MedianCont FROM dbo.DimEmployee" ) self.validate_all( + "SELECT DATEPART(year, TRY_CAST('2017-01-01' AS DATE))", + read={"postgres": "SELECT DATE_PART('year', '2017-01-01'::DATE)"}, + ) + self.validate_all( + "SELECT DATEPART(month, TRY_CAST('2017-03-01' AS DATE))", + read={"postgres": "SELECT DATE_PART('month', '2017-03-01'::DATE)"}, + ) + self.validate_all( + "SELECT DATEPART(day, TRY_CAST('2017-01-02' AS DATE))", + read={"postgres": "SELECT DATE_PART('day', '2017-01-02'::DATE)"}, + ) + self.validate_all( "SELECT CAST([a].[b] AS SMALLINT) FROM foo", write={ "tsql": 'SELECT CAST("a"."b" AS SMALLINT) FROM foo', @@ -281,19 +293,20 @@ WHERE def test_datename(self): self.validate_all( - "SELECT DATENAME(mm,'01-01-1970')", - write={"spark": "SELECT DATE_FORMAT('01-01-1970', 'MMMM')"}, + "SELECT DATENAME(mm,'1970-01-01')", + write={"spark": "SELECT DATE_FORMAT(CAST('1970-01-01' AS TIMESTAMP), 'MMMM')"}, ) self.validate_all( - "SELECT DATENAME(dw,'01-01-1970')", - write={"spark": "SELECT DATE_FORMAT('01-01-1970', 'EEEE')"}, + "SELECT DATENAME(dw,'1970-01-01')", + write={"spark": "SELECT DATE_FORMAT(CAST('1970-01-01' AS TIMESTAMP), 'EEEE')"}, ) def test_datepart(self): self.validate_all( - "SELECT DATEPART(month,'01-01-1970')", - write={"spark": "SELECT DATE_FORMAT('01-01-1970', 'MM')"}, + "SELECT DATEPART(month,'1970-01-01')", + write={"spark": "SELECT DATE_FORMAT(CAST('1970-01-01' AS TIMESTAMP), 'MM')"}, ) + self.validate_identity("DATEPART(YEAR, x)", "FORMAT(CAST(x AS DATETIME2), 'yyyy')") def test_convert_date_format(self): self.validate_all( |