diff options
author | Daniel Baumann <daniel.baumann@progress-linux.org> | 2023-09-13 09:17:37 +0000 |
---|---|---|
committer | Daniel Baumann <daniel.baumann@progress-linux.org> | 2023-09-13 09:17:37 +0000 |
commit | 22342ec7693d09ebf96484be7c3cd5d8b506f38e (patch) | |
tree | b8fe2ebfb290eb425dcee4f15fa8cab46e74b40f /tests | |
parent | Adding upstream version 18.3.0. (diff) | |
download | sqlglot-22342ec7693d09ebf96484be7c3cd5d8b506f38e.tar.xz sqlglot-22342ec7693d09ebf96484be7c3cd5d8b506f38e.zip |
Adding upstream version 18.4.1.upstream/18.4.1
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'tests')
-rw-r--r-- | tests/dialects/test_clickhouse.py | 65 | ||||
-rw-r--r-- | tests/dialects/test_duckdb.py | 16 | ||||
-rw-r--r-- | tests/dialects/test_mysql.py | 25 | ||||
-rw-r--r-- | tests/dialects/test_oracle.py | 28 | ||||
-rw-r--r-- | tests/dialects/test_postgres.py | 3 | ||||
-rw-r--r-- | tests/dialects/test_redshift.py | 14 | ||||
-rw-r--r-- | tests/fixtures/identity.sql | 1 | ||||
-rw-r--r-- | tests/fixtures/optimizer/merge_subqueries.sql | 15 | ||||
-rw-r--r-- | tests/fixtures/optimizer/optimizer.sql | 36 | ||||
-rw-r--r-- | tests/test_optimizer.py | 5 | ||||
-rw-r--r-- | tests/test_parser.py | 15 |
11 files changed, 188 insertions, 35 deletions
diff --git a/tests/dialects/test_clickhouse.py b/tests/dialects/test_clickhouse.py index ab2379d..2cda0dc 100644 --- a/tests/dialects/test_clickhouse.py +++ b/tests/dialects/test_clickhouse.py @@ -6,30 +6,19 @@ class TestClickhouse(Validator): dialect = "clickhouse" def test_clickhouse(self): - self.validate_all( - "DATE_ADD('day', 1, x)", - read={ - "clickhouse": "dateAdd(day, 1, x)", - "presto": "DATE_ADD('day', 1, x)", - }, - write={ - "clickhouse": "DATE_ADD('day', 1, x)", - "presto": "DATE_ADD('day', 1, x)", - "": "DATE_ADD(x, 1, 'day')", - }, - ) - self.validate_all( - "DATE_DIFF('day', a, b)", - read={ - "clickhouse": "dateDiff('day', a, b)", - "presto": "DATE_DIFF('day', a, b)", - }, - write={ - "clickhouse": "DATE_DIFF('day', a, b)", - "presto": "DATE_DIFF('day', a, b)", - "": "DATEDIFF(b, a, day)", - }, - ) + string_types = [ + "BLOB", + "LONGBLOB", + "LONGTEXT", + "MEDIUMBLOB", + "MEDIUMTEXT", + "TINYBLOB", + "TINYTEXT", + "VARCHAR(255)", + ] + + for string_type in string_types: + self.validate_identity(f"CAST(x AS {string_type})", "CAST(x AS String)") expr = parse_one("count(x)") self.assertEqual(expr.sql(dialect="clickhouse"), "COUNT(x)") @@ -72,8 +61,8 @@ class TestClickhouse(Validator): self.validate_identity("position(haystack, needle)") self.validate_identity("position(haystack, needle, position)") self.validate_identity("CAST(x AS DATETIME)") - self.validate_identity("CAST(x AS VARCHAR(255))", "CAST(x AS String)") - self.validate_identity("CAST(x AS BLOB)", "CAST(x AS String)") + self.validate_identity("CAST(x as MEDIUMINT)", "CAST(x AS Int32)") + self.validate_identity( 'SELECT CAST(tuple(1 AS "a", 2 AS "b", 3.0 AS "c").2 AS Nullable(String))' ) @@ -94,6 +83,30 @@ class TestClickhouse(Validator): ) self.validate_all( + "DATE_ADD('day', 1, x)", + read={ + "clickhouse": "dateAdd(day, 1, x)", + "presto": "DATE_ADD('day', 1, x)", + }, + write={ + "clickhouse": "DATE_ADD('day', 1, x)", + "presto": "DATE_ADD('day', 1, x)", + "": "DATE_ADD(x, 1, 'day')", + }, + ) + self.validate_all( + "DATE_DIFF('day', a, b)", + read={ + "clickhouse": "dateDiff('day', a, b)", + "presto": "DATE_DIFF('day', a, b)", + }, + write={ + "clickhouse": "DATE_DIFF('day', a, b)", + "presto": "DATE_DIFF('day', a, b)", + "": "DATEDIFF(b, a, day)", + }, + ) + self.validate_all( "SELECT xor(1, 0)", read={ "clickhouse": "SELECT xor(1, 0)", diff --git a/tests/dialects/test_duckdb.py b/tests/dialects/test_duckdb.py index aca0d7a..c5ee679 100644 --- a/tests/dialects/test_duckdb.py +++ b/tests/dialects/test_duckdb.py @@ -19,6 +19,7 @@ class TestDuckDB(Validator): parse_one("a // b", read="duckdb").assert_is(exp.IntDiv).sql(dialect="duckdb"), "a // b" ) + self.validate_identity("VAR_POP(a)") self.validate_identity("SELECT * FROM foo ASOF LEFT JOIN bar ON a = b") self.validate_identity("PIVOT Cities ON Year USING SUM(Population)") self.validate_identity("PIVOT Cities ON Year USING FIRST(Population)") @@ -34,6 +35,9 @@ class TestDuckDB(Validator): self.validate_identity("SELECT (x, x + 1, y) FROM (SELECT 1 AS x, 'a' AS y)") self.validate_identity("SELECT a.x FROM (SELECT {'x': 1, 'y': 2, 'z': 3} AS a)") self.validate_identity("ATTACH DATABASE ':memory:' AS new_database") + self.validate_identity("FROM x SELECT x UNION SELECT 1", "SELECT x FROM x UNION SELECT 1") + self.validate_identity("FROM (FROM tbl)", "SELECT * FROM (SELECT * FROM tbl)") + self.validate_identity("FROM tbl", "SELECT * FROM tbl") self.validate_identity( "SELECT {'yes': 'duck', 'maybe': 'goose', 'huh': NULL, 'no': 'heron'}" ) @@ -53,14 +57,20 @@ class TestDuckDB(Validator): "SELECT * FROM (PIVOT Cities ON Year USING SUM(Population) GROUP BY Country) AS pivot_alias" ) - self.validate_identity("FROM x SELECT x UNION SELECT 1", "SELECT x FROM x UNION SELECT 1") - self.validate_all("FROM (FROM tbl)", write={"duckdb": "SELECT * FROM (SELECT * FROM tbl)"}) - self.validate_all("FROM tbl", write={"duckdb": "SELECT * FROM tbl"}) self.validate_all("0b1010", write={"": "0 AS b1010"}) self.validate_all("0x1010", write={"": "0 AS x1010"}) self.validate_all("x ~ y", write={"duckdb": "REGEXP_MATCHES(x, y)"}) self.validate_all("SELECT * FROM 'x.y'", write={"duckdb": 'SELECT * FROM "x.y"'}) self.validate_all( + "VAR_POP(x)", + read={ + "": "VARIANCE_POP(x)", + }, + write={ + "": "VARIANCE_POP(x)", + }, + ) + self.validate_all( "DATE_DIFF('day', CAST(b AS DATE), CAST(a AS DATE))", read={ "duckdb": "DATE_DIFF('day', CAST(b AS DATE), CAST(a AS DATE))", diff --git a/tests/dialects/test_mysql.py b/tests/dialects/test_mysql.py index 1f1c2e9..6104e3f 100644 --- a/tests/dialects/test_mysql.py +++ b/tests/dialects/test_mysql.py @@ -27,6 +27,9 @@ class TestMySQL(Validator): self.validate_identity("CREATE TABLE foo (a BIGINT, FULLTEXT INDEX (b))") self.validate_identity("CREATE TABLE foo (a BIGINT, SPATIAL INDEX (b))") self.validate_identity( + "CREATE TABLE `x` (`username` VARCHAR(200), PRIMARY KEY (`username`(16)))" + ) + self.validate_identity( "UPDATE items SET items.price = 0 WHERE items.id >= 5 ORDER BY items.id LIMIT 10" ) self.validate_identity( @@ -204,21 +207,21 @@ class TestMySQL(Validator): self.validate_identity("CAST(x AS MEDIUMINT) + CAST(y AS YEAR(4))") self.validate_all( - "CAST(x AS MEDIUMTEXT) + CAST(y AS LONGTEXT)", + "CAST(x AS MEDIUMTEXT) + CAST(y AS LONGTEXT) + CAST(z AS TINYTEXT)", read={ - "mysql": "CAST(x AS MEDIUMTEXT) + CAST(y AS LONGTEXT)", + "mysql": "CAST(x AS MEDIUMTEXT) + CAST(y AS LONGTEXT) + CAST(z AS TINYTEXT)", }, write={ - "spark": "CAST(x AS TEXT) + CAST(y AS TEXT)", + "spark": "CAST(x AS TEXT) + CAST(y AS TEXT) + CAST(z AS TEXT)", }, ) self.validate_all( - "CAST(x AS MEDIUMBLOB) + CAST(y AS LONGBLOB)", + "CAST(x AS MEDIUMBLOB) + CAST(y AS LONGBLOB) + CAST(z AS TINYBLOB)", read={ - "mysql": "CAST(x AS MEDIUMBLOB) + CAST(y AS LONGBLOB)", + "mysql": "CAST(x AS MEDIUMBLOB) + CAST(y AS LONGBLOB) + CAST(z AS TINYBLOB)", }, write={ - "spark": "CAST(x AS BLOB) + CAST(y AS BLOB)", + "spark": "CAST(x AS BLOB) + CAST(y AS BLOB) + CAST(z AS BLOB)", }, ) self.validate_all("CAST(x AS TIMESTAMP)", write={"mysql": "CAST(x AS DATETIME)"}) @@ -240,6 +243,15 @@ class TestMySQL(Validator): ) def test_escape(self): + self.validate_identity("""'"abc"'""") + self.validate_identity( + r"'\'a'", + "'''a'", + ) + self.validate_identity( + '''"'abc'"''', + "'''abc'''", + ) self.validate_all( r"'a \' b '' '", write={ @@ -525,6 +537,7 @@ class TestMySQL(Validator): "mysql": "SELECT DATE(DATE_SUB(`dt`, INTERVAL (DAYOFMONTH(`dt`) - 1) DAY)) AS __timestamp FROM tableT", }, ) + self.validate_identity("SELECT name FROM temp WHERE name = ? FOR UPDATE") self.validate_all( "SELECT a FROM tbl FOR UPDATE", write={ diff --git a/tests/dialects/test_oracle.py b/tests/dialects/test_oracle.py index 01a9ca3..2dfd179 100644 --- a/tests/dialects/test_oracle.py +++ b/tests/dialects/test_oracle.py @@ -6,6 +6,7 @@ class TestOracle(Validator): dialect = "oracle" def test_oracle(self): + self.validate_identity("SELECT JSON_OBJECT(k1: v1 FORMAT JSON, k2: v2 FORMAT JSON)") self.validate_identity("SELECT JSON_OBJECT('name': first_name || ' ' || last_name) FROM t") self.validate_identity("COALESCE(c1, c2, c3)") self.validate_identity("SELECT * FROM TABLE(foo)") @@ -26,6 +27,15 @@ class TestOracle(Validator): self.validate_identity("SELECT * FROM table_name SAMPLE (25) s") self.validate_identity("SELECT * FROM V$SESSION") self.validate_identity( + "SELECT JSON_ARRAYAGG(JSON_OBJECT('RNK': RNK, 'RATING_CODE': RATING_CODE, 'DATE_VALUE': DATE_VALUE, 'AGENT_ID': AGENT_ID RETURNING CLOB) RETURNING CLOB) AS JSON_DATA FROM tablename" + ) + self.validate_identity( + "SELECT JSON_ARRAY(FOO() FORMAT JSON, BAR() NULL ON NULL RETURNING CLOB STRICT)" + ) + self.validate_identity( + "SELECT JSON_ARRAYAGG(FOO() FORMAT JSON ORDER BY bar NULL ON NULL RETURNING CLOB STRICT)" + ) + self.validate_identity( "SELECT COUNT(1) INTO V_Temp FROM TABLE(CAST(somelist AS data_list)) WHERE col LIKE '%contact'" ) self.validate_identity( @@ -190,3 +200,21 @@ MATCH_RECOGNIZE ( ) MR""", pretty=True, ) + + def test_json_table(self): + self.validate_identity( + "SELECT * FROM JSON_TABLE(foo FORMAT JSON, 'bla' ERROR ON ERROR NULL ON EMPTY COLUMNS (foo PATH 'bar'))" + ) + self.validate_identity( + "SELECT * FROM JSON_TABLE(foo FORMAT JSON, 'bla' ERROR ON ERROR NULL ON EMPTY COLUMNS foo PATH 'bar')", + "SELECT * FROM JSON_TABLE(foo FORMAT JSON, 'bla' ERROR ON ERROR NULL ON EMPTY COLUMNS (foo PATH 'bar'))", + ) + self.validate_identity( + """SELECT + CASE WHEN DBMS_LOB.GETLENGTH(info) < 32000 THEN DBMS_LOB.SUBSTR(info) END AS info_txt, + info AS info_clob +FROM schemaname.tablename ar +INNER JOIN JSON_TABLE(:emps, '$[*]' COLUMNS (empno NUMBER PATH '$')) jt + ON ar.empno = jt.empno""", + pretty=True, + ) diff --git a/tests/dialects/test_postgres.py b/tests/dialects/test_postgres.py index 21196b7..151f3af 100644 --- a/tests/dialects/test_postgres.py +++ b/tests/dialects/test_postgres.py @@ -133,6 +133,9 @@ class TestPostgres(Validator): alter_table_only = """ALTER TABLE ONLY "Album" ADD CONSTRAINT "FK_AlbumArtistId" FOREIGN KEY ("ArtistId") REFERENCES "Artist" ("ArtistId") ON DELETE NO ACTION ON UPDATE NO ACTION""" expr = parse_one(alter_table_only) + # Checks that user-defined types are parsed into DataType instead of Identifier + parse_one("CREATE TABLE t (a udt)").this.expressions[0].args["kind"].assert_is(exp.DataType) + self.assertIsInstance(expr, exp.AlterTable) self.assertEqual(expr.sql(dialect="postgres"), alter_table_only) diff --git a/tests/dialects/test_redshift.py b/tests/dialects/test_redshift.py index aea8b69..e261c01 100644 --- a/tests/dialects/test_redshift.py +++ b/tests/dialects/test_redshift.py @@ -360,3 +360,17 @@ class TestRedshift(Validator): "redshift": "CREATE OR REPLACE VIEW v1 AS SELECT cola, colb FROM t1 WITH NO SCHEMA BINDING", }, ) + + def test_concat(self): + self.validate_all( + "SELECT CONCAT('abc', 'def')", + write={ + "redshift": "SELECT COALESCE(CAST('abc' AS VARCHAR(MAX)), '') || COALESCE(CAST('def' AS VARCHAR(MAX)), '')", + }, + ) + self.validate_all( + "SELECT CONCAT_WS('DELIM', 'abc', 'def', 'ghi')", + write={ + "redshift": "SELECT COALESCE(CAST('abc' AS VARCHAR(MAX)), '') || 'DELIM' || COALESCE(CAST('def' AS VARCHAR(MAX)), '') || 'DELIM' || COALESCE(CAST('ghi' AS VARCHAR(MAX)), '')", + }, + ) diff --git a/tests/fixtures/identity.sql b/tests/fixtures/identity.sql index 0953fee..f999620 100644 --- a/tests/fixtures/identity.sql +++ b/tests/fixtures/identity.sql @@ -717,6 +717,7 @@ UPDATE tbl_name SET foo = 123, bar = 345 UPDATE db.tbl_name SET foo = 123 WHERE tbl_name.bar = 234 UPDATE db.tbl_name SET foo = 123, foo_1 = 234 WHERE tbl_name.bar = 234 UPDATE products SET price = price * 1.10 WHERE price <= 99.99 RETURNING name, price AS new_price +UPDATE t1 AS a, t2 AS b, t3 AS c LEFT JOIN t4 AS d ON c.id = d.id SET a.id = 1 TRUNCATE TABLE x OPTIMIZE TABLE y VACUUM FREEZE my_table diff --git a/tests/fixtures/optimizer/merge_subqueries.sql b/tests/fixtures/optimizer/merge_subqueries.sql index b89e2fb..eec8a73 100644 --- a/tests/fixtures/optimizer/merge_subqueries.sql +++ b/tests/fixtures/optimizer/merge_subqueries.sql @@ -310,6 +310,21 @@ FROM t1; SELECT x.a AS a, x.b AS b, ROW_NUMBER() OVER (PARTITION BY x.a ORDER BY x.a) AS row_num FROM x AS x; +# title: Don't merge window functions, inner table is aliased in outer query +with t1 as ( + SELECT + ROW_NUMBER() OVER (PARTITION BY x.a ORDER BY x.a) as row_num + FROM + x +) +SELECT + t2.row_num +FROM + t1 AS t2 +WHERE + t2.row_num = 2; +WITH t1 AS (SELECT ROW_NUMBER() OVER (PARTITION BY x.a ORDER BY x.a) AS row_num FROM x AS x) SELECT t2.row_num AS row_num FROM t1 AS t2 WHERE t2.row_num = 2; + # title: Values Test # dialect: spark WITH t1 AS ( diff --git a/tests/fixtures/optimizer/optimizer.sql b/tests/fixtures/optimizer/optimizer.sql index 18ee804..4a994c1 100644 --- a/tests/fixtures/optimizer/optimizer.sql +++ b/tests/fixtures/optimizer/optimizer.sql @@ -987,3 +987,39 @@ SELECT FROM "SALES" AS "SALES" WHERE "SALES"."INSERT_TS" > '2023-08-07 21:03:35.590 -0700'; + +# title: using join without select * +# execute: false +with + alias1 as (select * from table1), + alias2 as (select * from table2), + alias3 as ( + select + cid, + min(od) as m_od, + count(odi) as c_od, + from alias2 + group by 1 + ) +select + alias1.cid, + alias3.m_od, + coalesce(alias3.c_od, 0) as c_od, +from alias1 +left join alias3 using (cid); +WITH "alias3" AS ( + SELECT + "table2"."cid" AS "cid", + MIN("table2"."od") AS "m_od", + COUNT("table2"."odi") AS "c_od" + FROM "table2" AS "table2" + GROUP BY + "table2"."cid" +) +SELECT + "table1"."cid" AS "cid", + "alias3"."m_od" AS "m_od", + COALESCE("alias3"."c_od", 0) AS "c_od" +FROM "table1" AS "table1" +LEFT JOIN "alias3" + ON "table1"."cid" = "alias3"."cid"; diff --git a/tests/test_optimizer.py b/tests/test_optimizer.py index e001c1f..4415e03 100644 --- a/tests/test_optimizer.py +++ b/tests/test_optimizer.py @@ -858,3 +858,8 @@ FROM READ_CSV('tests/fixtures/optimizer/tpc-h/nation.csv.gz', 'delimiter', '|') ), parse_one('SELECT "a"."a" AS "a", "a"."b" AS "b" FROM "a" AS "a"'), ) + + def test_semistructured(self): + query = parse_one("select a.b:c from d", read="snowflake") + qualified = optimizer.qualify.qualify(query) + self.assertEqual(qualified.expressions[0].alias, "c") diff --git a/tests/test_parser.py b/tests/test_parser.py index 7135dd8..ad9b941 100644 --- a/tests/test_parser.py +++ b/tests/test_parser.py @@ -719,3 +719,18 @@ class TestParser(unittest.TestCase): self.assertEqual(ast.find(exp.Interval).this.sql(), "'71'") self.assertEqual(ast.find(exp.Interval).unit.assert_is(exp.Var).sql(), "days") + + def test_parse_concat_ws(self): + ast = parse_one("CONCAT_WS(' ', 'John', 'Doe')") + + self.assertEqual(ast.sql(), "CONCAT_WS(' ', 'John', 'Doe')") + self.assertEqual(ast.expressions[0].sql(), "' '") + self.assertEqual(ast.expressions[1].sql(), "'John'") + self.assertEqual(ast.expressions[2].sql(), "'Doe'") + + # Ensure we can parse without argument when error level is ignore + ast = parse( + "CONCAT_WS()", + error_level=ErrorLevel.IGNORE, + ) + self.assertEqual(ast[0].sql(), "CONCAT_WS()") |