diff options
author | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-03-03 14:11:03 +0000 |
---|---|---|
committer | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-03-03 14:11:03 +0000 |
commit | 5d0ea770947ae1da51537ff75b14b48218d729aa (patch) | |
tree | 86e66fc69feeae4c4b23749e7779f6731791edcc /tests/dialects | |
parent | Adding upstream version 21.1.2. (diff) | |
download | sqlglot-5d0ea770947ae1da51537ff75b14b48218d729aa.tar.xz sqlglot-5d0ea770947ae1da51537ff75b14b48218d729aa.zip |
Adding upstream version 22.2.0.upstream/22.2.0
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'tests/dialects')
-rw-r--r-- | tests/dialects/test_bigquery.py | 164 | ||||
-rw-r--r-- | tests/dialects/test_clickhouse.py | 20 | ||||
-rw-r--r-- | tests/dialects/test_databricks.py | 5 | ||||
-rw-r--r-- | tests/dialects/test_dialect.py | 13 | ||||
-rw-r--r-- | tests/dialects/test_doris.py | 10 | ||||
-rw-r--r-- | tests/dialects/test_duckdb.py | 61 | ||||
-rw-r--r-- | tests/dialects/test_hive.py | 9 | ||||
-rw-r--r-- | tests/dialects/test_mysql.py | 53 | ||||
-rw-r--r-- | tests/dialects/test_oracle.py | 22 | ||||
-rw-r--r-- | tests/dialects/test_postgres.py | 45 | ||||
-rw-r--r-- | tests/dialects/test_presto.py | 16 | ||||
-rw-r--r-- | tests/dialects/test_redshift.py | 13 | ||||
-rw-r--r-- | tests/dialects/test_snowflake.py | 167 | ||||
-rw-r--r-- | tests/dialects/test_spark.py | 1 | ||||
-rw-r--r-- | tests/dialects/test_sqlite.py | 12 | ||||
-rw-r--r-- | tests/dialects/test_tsql.py | 142 |
16 files changed, 585 insertions, 168 deletions
diff --git a/tests/dialects/test_bigquery.py b/tests/dialects/test_bigquery.py index 8c47948..0d94d19 100644 --- a/tests/dialects/test_bigquery.py +++ b/tests/dialects/test_bigquery.py @@ -5,7 +5,9 @@ from sqlglot import ( ParseError, TokenError, UnsupportedError, + exp, parse, + parse_one, transpile, ) from sqlglot.helper import logger as helper_logger @@ -18,6 +20,51 @@ class TestBigQuery(Validator): maxDiff = None def test_bigquery(self): + self.validate_all( + "SELECT STRUCT(1, 2, 3), STRUCT(), STRUCT('abc'), STRUCT(1, t.str_col), STRUCT(1 as a, 'abc' AS b), STRUCT(str_col AS abc)", + write={ + "bigquery": "SELECT STRUCT(1, 2, 3), STRUCT(), STRUCT('abc'), STRUCT(1, t.str_col), STRUCT(1 AS a, 'abc' AS b), STRUCT(str_col AS abc)", + "duckdb": "SELECT {'_0': 1, '_1': 2, '_2': 3}, {}, {'_0': 'abc'}, {'_0': 1, '_1': t.str_col}, {'a': 1, 'b': 'abc'}, {'abc': str_col}", + "hive": "SELECT STRUCT(1, 2, 3), STRUCT(), STRUCT('abc'), STRUCT(1, t.str_col), STRUCT(1, 'abc'), STRUCT(str_col)", + "spark2": "SELECT STRUCT(1, 2, 3), STRUCT(), STRUCT('abc'), STRUCT(1, t.str_col), STRUCT(1 AS a, 'abc' AS b), STRUCT(str_col AS abc)", + "spark": "SELECT STRUCT(1, 2, 3), STRUCT(), STRUCT('abc'), STRUCT(1, t.str_col), STRUCT(1 AS a, 'abc' AS b), STRUCT(str_col AS abc)", + "snowflake": "SELECT OBJECT_CONSTRUCT('_0', 1, '_1', 2, '_2', 3), OBJECT_CONSTRUCT(), OBJECT_CONSTRUCT('_0', 'abc'), OBJECT_CONSTRUCT('_0', 1, '_1', t.str_col), OBJECT_CONSTRUCT('a', 1, 'b', 'abc'), OBJECT_CONSTRUCT('abc', str_col)", + # fallback to unnamed without type inference + "trino": "SELECT ROW(1, 2, 3), ROW(), ROW('abc'), ROW(1, t.str_col), CAST(ROW(1, 'abc') AS ROW(a INTEGER, b VARCHAR)), ROW(str_col)", + }, + ) + self.validate_all( + "PARSE_TIMESTAMP('%Y-%m-%dT%H:%M:%E6S%z', x)", + write={ + "bigquery": "PARSE_TIMESTAMP('%Y-%m-%dT%H:%M:%E6S%z', x)", + "duckdb": "STRPTIME(x, '%Y-%m-%dT%H:%M:%S.%f%z')", + }, + ) + + table = parse_one("x-0._y.z", dialect="bigquery", into=exp.Table) + self.assertEqual(table.catalog, "x-0") + self.assertEqual(table.db, "_y") + self.assertEqual(table.name, "z") + + table = parse_one("x-0._y", dialect="bigquery", into=exp.Table) + self.assertEqual(table.db, "x-0") + self.assertEqual(table.name, "_y") + + self.validate_identity("SELECT * FROM x-0.y") + self.assertEqual(exp.to_table("`x.y.z`", dialect="bigquery").sql(), '"x"."y"."z"') + self.assertEqual(exp.to_table("`x.y.z`", dialect="bigquery").sql("bigquery"), "`x.y.z`") + self.assertEqual(exp.to_table("`x`.`y`", dialect="bigquery").sql("bigquery"), "`x`.`y`") + + select_with_quoted_udf = self.validate_identity("SELECT `p.d.UdF`(data) FROM `p.d.t`") + self.assertEqual(select_with_quoted_udf.selects[0].name, "p.d.UdF") + + self.validate_identity("SELECT `p.d.UdF`(data).* FROM `p.d.t`") + self.validate_identity("SELECT * FROM `my-project.my-dataset.my-table`") + self.validate_identity("CREATE OR REPLACE TABLE `a.b.c` CLONE `a.b.d`") + self.validate_identity("SELECT x, 1 AS y GROUP BY 1 ORDER BY 1") + self.validate_identity("SELECT * FROM x.*") + self.validate_identity("SELECT * FROM x.y*") + self.validate_identity("CASE A WHEN 90 THEN 'red' WHEN 50 THEN 'blue' ELSE 'green' END") self.validate_identity("CREATE SCHEMA x DEFAULT COLLATE 'en'") self.validate_identity("CREATE TABLE x (y INT64) DEFAULT COLLATE 'en'") self.validate_identity("PARSE_JSON('{}', wide_number_mode => 'exact')") @@ -90,6 +137,16 @@ class TestBigQuery(Validator): self.validate_identity("LOG(n, b)") self.validate_identity("SELECT COUNT(x RESPECT NULLS)") self.validate_identity("SELECT LAST_VALUE(x IGNORE NULLS) OVER y AS x") + self.validate_identity("SELECT ARRAY((SELECT AS STRUCT 1 AS a, 2 AS b))") + self.validate_identity("SELECT ARRAY((SELECT AS STRUCT 1 AS a, 2 AS b) LIMIT 10)") + self.validate_identity("CAST(x AS CHAR)", "CAST(x AS STRING)") + self.validate_identity("CAST(x AS NCHAR)", "CAST(x AS STRING)") + self.validate_identity("CAST(x AS NVARCHAR)", "CAST(x AS STRING)") + self.validate_identity("CAST(x AS TIMESTAMPTZ)", "CAST(x AS TIMESTAMP)") + self.validate_identity("CAST(x AS RECORD)", "CAST(x AS STRUCT)") + self.validate_identity( + "SELECT * FROM `SOME_PROJECT_ID.SOME_DATASET_ID.INFORMATION_SCHEMA.SOME_VIEW`" + ) self.validate_identity( "SELECT * FROM test QUALIFY a IS DISTINCT FROM b WINDOW c AS (PARTITION BY d)" ) @@ -121,6 +178,10 @@ class TestBigQuery(Validator): """SELECT JSON_EXTRACT_SCALAR('5')""", """SELECT JSON_EXTRACT_SCALAR('5', '$')""" ) self.validate_identity( + "SELECT ARRAY(SELECT AS STRUCT 1 a, 2 b)", + "SELECT ARRAY(SELECT AS STRUCT 1 AS a, 2 AS b)", + ) + self.validate_identity( "select array_contains([1, 2, 3], 1)", "SELECT EXISTS(SELECT 1 FROM UNNEST([1, 2, 3]) AS _col WHERE _col = 1)", ) @@ -169,10 +230,6 @@ class TestBigQuery(Validator): """SELECT PARSE_JSON('"foo"') AS json_data""", ) self.validate_identity( - "CREATE OR REPLACE TABLE `a.b.c` CLONE `a.b.d`", - "CREATE OR REPLACE TABLE a.b.c CLONE a.b.d", - ) - self.validate_identity( "SELECT * FROM UNNEST(x) WITH OFFSET EXCEPT DISTINCT SELECT * FROM UNNEST(y) WITH OFFSET", "SELECT * FROM UNNEST(x) WITH OFFSET AS offset EXCEPT DISTINCT SELECT * FROM UNNEST(y) WITH OFFSET AS offset", ) @@ -186,6 +243,39 @@ class TestBigQuery(Validator): ) self.validate_all( + "PARSE_TIMESTAMP('%Y-%m-%dT%H:%M:%E6S%z', x)", + write={ + "bigquery": "PARSE_TIMESTAMP('%Y-%m-%dT%H:%M:%E6S%z', x)", + "duckdb": "STRPTIME(x, '%Y-%m-%dT%H:%M:%S.%f%z')", + }, + ) + self.validate_all( + "SELECT results FROM Coordinates, Coordinates.position AS results", + write={ + "bigquery": "SELECT results FROM Coordinates, UNNEST(Coordinates.position) AS results", + "presto": "SELECT results FROM Coordinates, UNNEST(Coordinates.position) AS _t(results)", + }, + ) + self.validate_all( + "SELECT results FROM Coordinates, `Coordinates.position` AS results", + write={ + "bigquery": "SELECT results FROM Coordinates, `Coordinates.position` AS results", + "presto": 'SELECT results FROM Coordinates, "Coordinates"."position" AS results', + }, + ) + self.validate_all( + "SELECT results FROM Coordinates AS c, UNNEST(c.position) AS results", + read={ + "presto": "SELECT results FROM Coordinates AS c, UNNEST(c.position) AS _t(results)", + "redshift": "SELECT results FROM Coordinates AS c, c.position AS results", + }, + write={ + "bigquery": "SELECT results FROM Coordinates AS c, UNNEST(c.position) AS results", + "presto": "SELECT results FROM Coordinates AS c, UNNEST(c.position) AS _t(results)", + "redshift": "SELECT results FROM Coordinates AS c, c.position AS results", + }, + ) + self.validate_all( "TIMESTAMP(x)", write={ "bigquery": "TIMESTAMP(x)", @@ -434,8 +524,8 @@ class TestBigQuery(Validator): self.validate_all( "CREATE OR REPLACE TABLE `a.b.c` COPY `a.b.d`", write={ - "bigquery": "CREATE OR REPLACE TABLE a.b.c COPY a.b.d", - "snowflake": "CREATE OR REPLACE TABLE a.b.c CLONE a.b.d", + "bigquery": "CREATE OR REPLACE TABLE `a.b.c` COPY `a.b.d`", + "snowflake": 'CREATE OR REPLACE TABLE "a"."b"."c" CLONE "a"."b"."d"', }, ) ( @@ -475,11 +565,6 @@ class TestBigQuery(Validator): ), ) self.validate_all("LEAST(x, y)", read={"sqlite": "MIN(x, y)"}) - self.validate_all("CAST(x AS CHAR)", write={"bigquery": "CAST(x AS STRING)"}) - self.validate_all("CAST(x AS NCHAR)", write={"bigquery": "CAST(x AS STRING)"}) - self.validate_all("CAST(x AS NVARCHAR)", write={"bigquery": "CAST(x AS STRING)"}) - self.validate_all("CAST(x AS TIMESTAMPTZ)", write={"bigquery": "CAST(x AS TIMESTAMP)"}) - self.validate_all("CAST(x AS RECORD)", write={"bigquery": "CAST(x AS STRUCT)"}) self.validate_all( 'SELECT TIMESTAMP_ADD(TIMESTAMP "2008-12-25 15:30:00+00", INTERVAL 10 MINUTE)', write={ @@ -566,11 +651,11 @@ class TestBigQuery(Validator): read={"spark": "select posexplode_outer([])"}, ) self.validate_all( - "SELECT AS STRUCT ARRAY(SELECT AS STRUCT b FROM x) AS y FROM z", + "SELECT AS STRUCT ARRAY(SELECT AS STRUCT 1 AS b FROM x) AS y FROM z", write={ - "": "SELECT AS STRUCT ARRAY(SELECT AS STRUCT b FROM x) AS y FROM z", - "bigquery": "SELECT AS STRUCT ARRAY(SELECT AS STRUCT b FROM x) AS y FROM z", - "duckdb": "SELECT {'y': ARRAY(SELECT {'b': b} FROM x)} FROM z", + "": "SELECT AS STRUCT ARRAY(SELECT AS STRUCT 1 AS b FROM x) AS y FROM z", + "bigquery": "SELECT AS STRUCT ARRAY(SELECT AS STRUCT 1 AS b FROM x) AS y FROM z", + "duckdb": "SELECT {'y': ARRAY(SELECT {'b': 1} FROM x)} FROM z", }, ) self.validate_all( @@ -585,25 +670,9 @@ class TestBigQuery(Validator): "bigquery": "PARSE_TIMESTAMP('%Y.%m.%d %I:%M:%S%z', x)", }, ) - self.validate_all( + self.validate_identity( "CREATE TEMP TABLE foo AS SELECT 1", - write={"bigquery": "CREATE TEMPORARY TABLE foo AS SELECT 1"}, - ) - self.validate_all( - "SELECT * FROM `SOME_PROJECT_ID.SOME_DATASET_ID.INFORMATION_SCHEMA.SOME_VIEW`", - write={ - "bigquery": "SELECT * FROM SOME_PROJECT_ID.SOME_DATASET_ID.INFORMATION_SCHEMA.SOME_VIEW", - }, - ) - self.validate_all( - "SELECT * FROM `my-project.my-dataset.my-table`", - write={"bigquery": "SELECT * FROM `my-project`.`my-dataset`.`my-table`"}, - ) - self.validate_all( - "SELECT ARRAY(SELECT AS STRUCT 1 a, 2 b)", - write={ - "bigquery": "SELECT ARRAY(SELECT AS STRUCT 1 AS a, 2 AS b)", - }, + "CREATE TEMPORARY TABLE foo AS SELECT 1", ) self.validate_all( "REGEXP_CONTAINS('foo', '.*')", @@ -1088,6 +1157,35 @@ WHERE self.assertIn("unsupported syntax", cm.output[0]) + with self.assertLogs(helper_logger): + statements = parse( + """ + BEGIN + DECLARE MY_VAR INT64 DEFAULT 1; + SET MY_VAR = (SELECT 0); + + IF MY_VAR = 1 THEN SELECT 'TRUE'; + ELSEIF MY_VAR = 0 THEN SELECT 'FALSE'; + ELSE SELECT 'NULL'; + END IF; + END + """, + read="bigquery", + ) + + expected_statements = ( + "BEGIN DECLARE MY_VAR INT64 DEFAULT 1", + "SET MY_VAR = (SELECT 0)", + "IF MY_VAR = 1 THEN SELECT 'TRUE'", + "ELSEIF MY_VAR = 0 THEN SELECT 'FALSE'", + "ELSE SELECT 'NULL'", + "END IF", + "END", + ) + + for actual, expected in zip(statements, expected_statements): + self.assertEqual(actual.sql(dialect="bigquery"), expected) + with self.assertLogs(helper_logger) as cm: self.validate_identity( "SELECT * FROM t AS t(c1, c2)", diff --git a/tests/dialects/test_clickhouse.py b/tests/dialects/test_clickhouse.py index 0148812..edf3da1 100644 --- a/tests/dialects/test_clickhouse.py +++ b/tests/dialects/test_clickhouse.py @@ -6,6 +6,21 @@ class TestClickhouse(Validator): dialect = "clickhouse" def test_clickhouse(self): + self.validate_all( + "SELECT * FROM x PREWHERE y = 1 WHERE z = 2", + write={ + "": "SELECT * FROM x WHERE z = 2", + "clickhouse": "SELECT * FROM x PREWHERE y = 1 WHERE z = 2", + }, + ) + self.validate_all( + "SELECT * FROM x AS prewhere", + read={ + "clickhouse": "SELECT * FROM x AS prewhere", + "duckdb": "SELECT * FROM x prewhere", + }, + ) + self.validate_identity("SELECT * FROM x LIMIT 1 UNION ALL SELECT * FROM y") string_types = [ @@ -77,6 +92,7 @@ class TestClickhouse(Validator): self.validate_identity("""SELECT JSONExtractString('{"x": {"y": 1}}', 'x', 'y')""") self.validate_identity("SELECT * FROM table LIMIT 1 BY a, b") self.validate_identity("SELECT * FROM table LIMIT 2 OFFSET 1 BY a, b") + self.validate_identity( "SELECT $1$foo$1$", "SELECT 'foo'", @@ -134,6 +150,9 @@ class TestClickhouse(Validator): self.validate_identity( "CREATE MATERIALIZED VIEW test_view (id UInt8) TO db.table1 AS SELECT * FROM test_data" ) + self.validate_identity("TRUNCATE TABLE t1 ON CLUSTER test_cluster") + self.validate_identity("TRUNCATE DATABASE db") + self.validate_identity("TRUNCATE DATABASE db ON CLUSTER test_cluster") self.validate_all( "SELECT arrayJoin([1,2,3])", @@ -373,6 +392,7 @@ class TestClickhouse(Validator): def test_cte(self): self.validate_identity("WITH 'x' AS foo SELECT foo") + self.validate_identity("WITH ['c'] AS field_names SELECT field_names") self.validate_identity("WITH SUM(bytes) AS foo SELECT foo FROM system.parts") self.validate_identity("WITH (SELECT foo) AS bar SELECT bar + 5") self.validate_identity("WITH test1 AS (SELECT i + 1, j + 1 FROM test1) SELECT * FROM test1") diff --git a/tests/dialects/test_databricks.py b/tests/dialects/test_databricks.py index 8222170..94f2dc2 100644 --- a/tests/dialects/test_databricks.py +++ b/tests/dialects/test_databricks.py @@ -38,6 +38,11 @@ class TestDatabricks(Validator): "CREATE FUNCTION add_one(x INT) RETURNS INT LANGUAGE PYTHON AS $FOO$def add_one(x):\n return x+1$FOO$" ) + self.validate_identity("TRUNCATE TABLE t1 PARTITION(age = 10, name = 'test', address)") + self.validate_identity( + "TRUNCATE TABLE t1 PARTITION(age = 10, name = 'test', city LIKE 'LA')" + ) + self.validate_all( "CREATE TABLE foo (x INT GENERATED ALWAYS AS (YEAR(y)))", write={ diff --git a/tests/dialects/test_dialect.py b/tests/dialects/test_dialect.py index b50fec8..5faed51 100644 --- a/tests/dialects/test_dialect.py +++ b/tests/dialects/test_dialect.py @@ -1108,6 +1108,11 @@ class TestDialect(Validator): ) def test_order_by(self): + self.validate_identity( + "SELECT c FROM t ORDER BY a, b,", + "SELECT c FROM t ORDER BY a, b", + ) + self.validate_all( "SELECT fname, lname, age FROM person ORDER BY age DESC NULLS FIRST, fname ASC NULLS LAST, lname", write={ @@ -1777,7 +1782,7 @@ class TestDialect(Validator): "CREATE TABLE t (c CHAR, nc NCHAR, v1 VARCHAR, v2 VARCHAR2, nv NVARCHAR, nv2 NVARCHAR2)", write={ "duckdb": "CREATE TABLE t (c TEXT, nc TEXT, v1 TEXT, v2 TEXT, nv TEXT, nv2 TEXT)", - "hive": "CREATE TABLE t (c CHAR, nc CHAR, v1 STRING, v2 STRING, nv STRING, nv2 STRING)", + "hive": "CREATE TABLE t (c STRING, nc STRING, v1 STRING, v2 STRING, nv STRING, nv2 STRING)", "oracle": "CREATE TABLE t (c CHAR, nc NCHAR, v1 VARCHAR2, v2 VARCHAR2, nv NVARCHAR2, nv2 NVARCHAR2)", "postgres": "CREATE TABLE t (c CHAR, nc CHAR, v1 VARCHAR, v2 VARCHAR, nv VARCHAR, nv2 VARCHAR)", "sqlite": "CREATE TABLE t (c TEXT, nc TEXT, v1 TEXT, v2 TEXT, nv TEXT, nv2 TEXT)", @@ -2301,3 +2306,9 @@ SELECT "tsql": UnsupportedError, }, ) + + def test_truncate(self): + self.validate_identity("TRUNCATE TABLE table") + self.validate_identity("TRUNCATE TABLE db.schema.test") + self.validate_identity("TRUNCATE TABLE IF EXISTS db.schema.test") + self.validate_identity("TRUNCATE TABLE t1, t2, t3") diff --git a/tests/dialects/test_doris.py b/tests/dialects/test_doris.py index 5ae23ad..035289b 100644 --- a/tests/dialects/test_doris.py +++ b/tests/dialects/test_doris.py @@ -26,6 +26,16 @@ class TestDoris(Validator): "doris": "SELECT ARRAY_SUM(x -> x * x, ARRAY(2, 3))", }, ) + self.validate_all( + "MONTHS_ADD(d, n)", + read={ + "oracle": "ADD_MONTHS(d, n)", + }, + write={ + "doris": "MONTHS_ADD(d, n)", + "oracle": "ADD_MONTHS(d, n)", + }, + ) def test_identity(self): self.validate_identity("COALECSE(a, b, c, d)") diff --git a/tests/dialects/test_duckdb.py b/tests/dialects/test_duckdb.py index 9c48f69..58d1f06 100644 --- a/tests/dialects/test_duckdb.py +++ b/tests/dialects/test_duckdb.py @@ -7,9 +7,14 @@ class TestDuckDB(Validator): dialect = "duckdb" def test_duckdb(self): - struct_pack = parse_one('STRUCT_PACK("a b" := 1)', read="duckdb") - self.assertIsInstance(struct_pack.expressions[0].this, exp.Identifier) - self.assertEqual(struct_pack.sql(dialect="duckdb"), "{'a b': 1}") + self.validate_all( + 'STRUCT_PACK("a b" := 1)', + write={ + "duckdb": "{'a b': 1}", + "spark": "STRUCT(1 AS `a b`)", + "snowflake": "OBJECT_CONSTRUCT('a b', 1)", + }, + ) self.validate_all( "SELECT SUM(X) OVER (ORDER BY x)", @@ -52,8 +57,21 @@ class TestDuckDB(Validator): exp.select("*").from_("t").offset(exp.select("5").subquery()).sql(dialect="duckdb"), ) - for struct_value in ("{'a': 1}", "struct_pack(a := 1)"): - self.validate_all(struct_value, write={"presto": UnsupportedError}) + self.validate_all( + "{'a': 1, 'b': '2'}", write={"presto": "CAST(ROW(1, '2') AS ROW(a INTEGER, b VARCHAR))"} + ) + self.validate_all( + "struct_pack(a := 1, b := 2)", + write={"presto": "CAST(ROW(1, 2) AS ROW(a INTEGER, b INTEGER))"}, + ) + + self.validate_all( + "struct_pack(a := 1, b := x)", + write={ + "duckdb": "{'a': 1, 'b': x}", + "presto": UnsupportedError, + }, + ) for join_type in ("SEMI", "ANTI"): exists = "EXISTS" if join_type == "SEMI" else "NOT EXISTS" @@ -171,7 +189,6 @@ class TestDuckDB(Validator): }, ) - self.validate_identity("SELECT i FROM RANGE(5) AS _(i) ORDER BY i ASC") self.validate_identity("INSERT INTO x BY NAME SELECT 1 AS y") self.validate_identity("SELECT 1 AS x UNION ALL BY NAME SELECT 2 AS x") self.validate_identity("SELECT SUM(x) FILTER (x = 1)", "SELECT SUM(x) FILTER(WHERE x = 1)") @@ -209,6 +226,10 @@ class TestDuckDB(Validator): self.validate_identity("FROM (FROM tbl)", "SELECT * FROM (SELECT * FROM tbl)") self.validate_identity("FROM tbl", "SELECT * FROM tbl") self.validate_identity("x -> '$.family'") + self.validate_identity("CREATE TABLE color (name ENUM('RED', 'GREEN', 'BLUE'))") + self.validate_identity( + "SELECT * FROM x LEFT JOIN UNNEST(y)", "SELECT * FROM x LEFT JOIN UNNEST(y) ON TRUE" + ) self.validate_identity( """SELECT '{"foo": [1, 2, 3]}' -> 'foo' -> 0""", """SELECT '{"foo": [1, 2, 3]}' -> '$.foo' -> '$[0]'""", @@ -623,6 +644,27 @@ class TestDuckDB(Validator): }, ) + self.validate_identity("SELECT * FROM RANGE(1, 5, 10)") + self.validate_identity("SELECT * FROM GENERATE_SERIES(2, 13, 4)") + + self.validate_all( + "WITH t AS (SELECT i, i * i * i * i * i AS i5 FROM RANGE(1, 5) t(i)) SELECT * FROM t", + write={ + "duckdb": "WITH t AS (SELECT i, i * i * i * i * i AS i5 FROM RANGE(1, 5) AS t(i)) SELECT * FROM t", + "sqlite": "WITH t AS (SELECT i, i * i * i * i * i AS i5 FROM (SELECT value AS i FROM GENERATE_SERIES(1, 5)) AS t) SELECT * FROM t", + }, + ) + + self.validate_identity( + """SELECT i FROM RANGE(5) AS _(i) ORDER BY i ASC""", + """SELECT i FROM RANGE(0, 5) AS _(i) ORDER BY i ASC""", + ) + + self.validate_identity( + """SELECT i FROM GENERATE_SERIES(12) AS _(i) ORDER BY i ASC""", + """SELECT i FROM GENERATE_SERIES(0, 12) AS _(i) ORDER BY i ASC""", + ) + def test_array_index(self): with self.assertLogs(helper_logger) as cm: self.validate_all( @@ -994,3 +1036,10 @@ class TestDuckDB(Validator): read={"bigquery": "IS_INF(x)"}, write={"bigquery": "IS_INF(x)", "duckdb": "ISINF(x)"}, ) + + def test_parameter_token(self): + self.validate_all( + "SELECT $foo", + read={"bigquery": "SELECT @foo"}, + write={"bigquery": "SELECT @foo", "duckdb": "SELECT $foo"}, + ) diff --git a/tests/dialects/test_hive.py b/tests/dialects/test_hive.py index ea28f29..b892dd6 100644 --- a/tests/dialects/test_hive.py +++ b/tests/dialects/test_hive.py @@ -440,6 +440,9 @@ class TestHive(Validator): self.validate_identity( "SELECT key, value, GROUPING__ID, COUNT(*) FROM T1 GROUP BY key, value WITH ROLLUP" ) + self.validate_identity( + "TRUNCATE TABLE t1 PARTITION(age = 10, name = 'test', address = 'abc')" + ) self.validate_all( "SELECT ${hiveconf:some_var}", @@ -612,12 +615,6 @@ class TestHive(Validator): }, ) self.validate_all( - "STRUCT(a = b, c = d)", - read={ - "snowflake": "OBJECT_CONSTRUCT(a, b, c, d)", - }, - ) - self.validate_all( "MAP(a, b, c, d)", read={ "": "VAR_MAP(a, b, c, d)", diff --git a/tests/dialects/test_mysql.py b/tests/dialects/test_mysql.py index fd27a1e..5f23c44 100644 --- a/tests/dialects/test_mysql.py +++ b/tests/dialects/test_mysql.py @@ -29,6 +29,7 @@ class TestMySQL(Validator): self.validate_identity("CREATE TABLE foo (a BIGINT, INDEX USING BTREE (b))") self.validate_identity("CREATE TABLE foo (a BIGINT, FULLTEXT INDEX (b))") self.validate_identity("CREATE TABLE foo (a BIGINT, SPATIAL INDEX (b))") + self.validate_identity("ALTER TABLE t1 ADD COLUMN x INT, ALGORITHM=INPLACE, LOCK=EXCLUSIVE") self.validate_identity( "CREATE TABLE `oauth_consumer` (`key` VARCHAR(32) NOT NULL, UNIQUE `OAUTH_CONSUMER_KEY` (`key`))" ) @@ -68,6 +69,26 @@ class TestMySQL(Validator): self.validate_identity( "CREATE OR REPLACE VIEW my_view AS SELECT column1 AS `boo`, column2 AS `foo` FROM my_table WHERE column3 = 'some_value' UNION SELECT q.* FROM fruits_table, JSON_TABLE(Fruits, '$[*]' COLUMNS(id VARCHAR(255) PATH '$.$id', value VARCHAR(255) PATH '$.value')) AS q", ) + self.validate_identity( + "CREATE TABLE `foo` (`id` char(36) NOT NULL DEFAULT (uuid()), PRIMARY KEY (`id`), UNIQUE KEY `id` (`id`))", + "CREATE TABLE `foo` (`id` CHAR(36) NOT NULL DEFAULT (UUID()), PRIMARY KEY (`id`), UNIQUE `id` (`id`))", + ) + self.validate_identity( + "CREATE TABLE IF NOT EXISTS industry_info (a BIGINT(20) NOT NULL AUTO_INCREMENT, b BIGINT(20) NOT NULL, c VARCHAR(1000), PRIMARY KEY (a), UNIQUE KEY d (b), KEY e (b))", + "CREATE TABLE IF NOT EXISTS industry_info (a BIGINT(20) NOT NULL AUTO_INCREMENT, b BIGINT(20) NOT NULL, c VARCHAR(1000), PRIMARY KEY (a), UNIQUE d (b), INDEX e (b))", + ) + self.validate_identity( + "CREATE TABLE test (ts TIMESTAMP, ts_tz TIMESTAMPTZ, ts_ltz TIMESTAMPLTZ)", + "CREATE TABLE test (ts DATETIME, ts_tz TIMESTAMP, ts_ltz TIMESTAMP)", + ) + self.validate_identity( + "ALTER TABLE test_table ALTER COLUMN test_column SET DATA TYPE LONGTEXT", + "ALTER TABLE test_table MODIFY COLUMN test_column LONGTEXT", + ) + self.validate_identity( + "CREATE TABLE t (c DATETIME DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP) DEFAULT CHARSET=utf8 ROW_FORMAT=DYNAMIC", + "CREATE TABLE t (c DATETIME DEFAULT CURRENT_TIMESTAMP() ON UPDATE CURRENT_TIMESTAMP()) DEFAULT CHARACTER SET=utf8 ROW_FORMAT=DYNAMIC", + ) self.validate_all( "CREATE TABLE z (a INT) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARACTER SET=utf8 COLLATE=utf8_bin COMMENT='x'", @@ -79,12 +100,6 @@ class TestMySQL(Validator): }, ) self.validate_all( - "CREATE TABLE t (c DATETIME DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP) DEFAULT CHARSET=utf8 ROW_FORMAT=DYNAMIC", - write={ - "mysql": "CREATE TABLE t (c DATETIME DEFAULT CURRENT_TIMESTAMP() ON UPDATE CURRENT_TIMESTAMP()) DEFAULT CHARACTER SET=utf8 ROW_FORMAT=DYNAMIC", - }, - ) - self.validate_all( "CREATE TABLE x (id int not null auto_increment, primary key (id))", write={ "sqlite": "CREATE TABLE x (id INTEGER NOT NULL AUTOINCREMENT PRIMARY KEY)", @@ -96,33 +111,9 @@ class TestMySQL(Validator): "sqlite": "CREATE TABLE x (id INTEGER NOT NULL)", }, ) - self.validate_all( - "CREATE TABLE `foo` (`id` char(36) NOT NULL DEFAULT (uuid()), PRIMARY KEY (`id`), UNIQUE KEY `id` (`id`))", - write={ - "mysql": "CREATE TABLE `foo` (`id` CHAR(36) NOT NULL DEFAULT (UUID()), PRIMARY KEY (`id`), UNIQUE `id` (`id`))", - }, - ) - self.validate_all( - "CREATE TABLE IF NOT EXISTS industry_info (a BIGINT(20) NOT NULL AUTO_INCREMENT, b BIGINT(20) NOT NULL, c VARCHAR(1000), PRIMARY KEY (a), UNIQUE KEY d (b), KEY e (b))", - write={ - "mysql": "CREATE TABLE IF NOT EXISTS industry_info (a BIGINT(20) NOT NULL AUTO_INCREMENT, b BIGINT(20) NOT NULL, c VARCHAR(1000), PRIMARY KEY (a), UNIQUE d (b), INDEX e (b))", - }, - ) - self.validate_all( - "CREATE TABLE test (ts TIMESTAMP, ts_tz TIMESTAMPTZ, ts_ltz TIMESTAMPLTZ)", - write={ - "mysql": "CREATE TABLE test (ts DATETIME, ts_tz TIMESTAMP, ts_ltz TIMESTAMP)", - }, - ) - self.validate_all( - "ALTER TABLE test_table ALTER COLUMN test_column SET DATA TYPE LONGTEXT", - write={ - "mysql": "ALTER TABLE test_table MODIFY COLUMN test_column LONGTEXT", - }, - ) - self.validate_identity("ALTER TABLE test_table ALTER COLUMN test_column SET DEFAULT 1") def test_identity(self): + self.validate_identity("ALTER TABLE test_table ALTER COLUMN test_column SET DEFAULT 1") self.validate_identity("SELECT DATE_FORMAT(NOW(), '%Y-%m-%d %H:%i:00.0000')") self.validate_identity("SELECT @var1 := 1, @var2") self.validate_identity("UNLOCK TABLES") diff --git a/tests/dialects/test_oracle.py b/tests/dialects/test_oracle.py index bc8f8bb..9438507 100644 --- a/tests/dialects/test_oracle.py +++ b/tests/dialects/test_oracle.py @@ -1,4 +1,4 @@ -from sqlglot import exp, parse_one +from sqlglot import exp from sqlglot.errors import UnsupportedError from tests.dialects.test_dialect import Validator @@ -7,11 +7,18 @@ class TestOracle(Validator): dialect = "oracle" def test_oracle(self): - self.validate_identity("REGEXP_REPLACE('source', 'search')") - parse_one("ALTER TABLE tbl_name DROP FOREIGN KEY fk_symbol", dialect="oracle").assert_is( - exp.AlterTable + self.validate_all( + "SELECT CONNECT_BY_ROOT x y", + write={ + "": "SELECT CONNECT_BY_ROOT(x) AS y", + "oracle": "SELECT CONNECT_BY_ROOT x AS y", + }, ) + self.parse_one("ALTER TABLE tbl_name DROP FOREIGN KEY fk_symbol").assert_is(exp.AlterTable) + self.validate_identity("CREATE GLOBAL TEMPORARY TABLE t AS SELECT * FROM orders") + self.validate_identity("CREATE PRIVATE TEMPORARY TABLE t AS SELECT * FROM orders") + self.validate_identity("REGEXP_REPLACE('source', 'search')") self.validate_identity("TIMESTAMP(3) WITH TIME ZONE") self.validate_identity("CURRENT_TIMESTAMP(precision)") self.validate_identity("ALTER TABLE tbl_name DROP FOREIGN KEY fk_symbol") @@ -89,6 +96,13 @@ class TestOracle(Validator): self.validate_identity("SELECT TO_CHAR(-100, 'L99', 'NL_CURRENCY = '' AusDollars '' ')") self.validate_all( + "TO_CHAR(x)", + write={ + "doris": "CAST(x AS STRING)", + "oracle": "TO_CHAR(x)", + }, + ) + self.validate_all( "SELECT TO_CHAR(TIMESTAMP '1999-12-01 10:00:00')", write={ "oracle": "SELECT TO_CHAR(CAST('1999-12-01 10:00:00' AS TIMESTAMP), 'YYYY-MM-DD HH24:MI:SS')", diff --git a/tests/dialects/test_postgres.py b/tests/dialects/test_postgres.py index d1ecb2a..1d0ea8b 100644 --- a/tests/dialects/test_postgres.py +++ b/tests/dialects/test_postgres.py @@ -8,8 +8,10 @@ class TestPostgres(Validator): dialect = "postgres" def test_postgres(self): + self.validate_identity("1.x", "1. AS x") self.validate_identity("|/ x", "SQRT(x)") self.validate_identity("||/ x", "CBRT(x)") + expr = parse_one( "SELECT * FROM r CROSS JOIN LATERAL UNNEST(ARRAY[1]) AS s(location)", read="postgres" ) @@ -82,6 +84,7 @@ class TestPostgres(Validator): self.validate_identity("CAST(1 AS DECIMAL) / CAST(2 AS DECIMAL) * -100") self.validate_identity("EXEC AS myfunc @id = 123", check_command_warning=True) self.validate_identity("SELECT CURRENT_USER") + self.validate_identity("SELECT * FROM ONLY t1") self.validate_identity( """LAST_VALUE("col1") OVER (ORDER BY "col2" RANGE BETWEEN INTERVAL '1 DAY' PRECEDING AND '1 month' FOLLOWING)""" ) @@ -164,6 +167,9 @@ class TestPostgres(Validator): "SELECT 'Dianne''s horse'", ) self.validate_identity( + "COMMENT ON TABLE mytable IS $$doc this$$", "COMMENT ON TABLE mytable IS 'doc this'" + ) + self.validate_identity( "UPDATE MYTABLE T1 SET T1.COL = 13", "UPDATE MYTABLE AS T1 SET T1.COL = 13", ) @@ -320,6 +326,7 @@ class TestPostgres(Validator): "MERGE INTO x USING (SELECT id) AS y ON a = b WHEN MATCHED THEN UPDATE SET x.a = y.b WHEN NOT MATCHED THEN INSERT (a, b) VALUES (y.a, y.b)", "MERGE INTO x USING (SELECT id) AS y ON a = b WHEN MATCHED THEN UPDATE SET a = y.b WHEN NOT MATCHED THEN INSERT (a, b) VALUES (y.a, y.b)", ) + self.validate_identity("SELECT * FROM t1*", "SELECT * FROM t1") self.validate_all( "SELECT JSON_EXTRACT_PATH_TEXT(x, k1, k2, k3) FROM t", @@ -653,6 +660,12 @@ class TestPostgres(Validator): self.validate_identity("CREATE TABLE t (c CHAR(2) UNIQUE NOT NULL) INHERITS (t1)") self.validate_identity("CREATE TABLE s.t (c CHAR(2) UNIQUE NOT NULL) INHERITS (s.t1, s.t2)") self.validate_identity("CREATE FUNCTION x(INT) RETURNS INT SET search_path = 'public'") + self.validate_identity("TRUNCATE TABLE t1 CONTINUE IDENTITY") + self.validate_identity("TRUNCATE TABLE t1 RESTART IDENTITY") + self.validate_identity("TRUNCATE TABLE t1 CASCADE") + self.validate_identity("TRUNCATE TABLE t1 RESTRICT") + self.validate_identity("TRUNCATE TABLE t1 CONTINUE IDENTITY CASCADE") + self.validate_identity("TRUNCATE TABLE t1 RESTART IDENTITY RESTRICT") self.validate_identity( "CREATE TABLE cust_part3 PARTITION OF customers FOR VALUES WITH (MODULUS 3, REMAINDER 2)" ) @@ -785,6 +798,10 @@ class TestPostgres(Validator): self.validate_identity( "CREATE INDEX index_ci_pipelines_on_project_idandrefandiddesc ON public.ci_pipelines USING btree(project_id, ref, id DESC)" ) + self.validate_identity( + "TRUNCATE TABLE ONLY t1, t2*, ONLY t3, t4, t5* RESTART IDENTITY CASCADE", + "TRUNCATE TABLE ONLY t1, t2, ONLY t3, t4, t5 RESTART IDENTITY CASCADE", + ) with self.assertRaises(ParseError): transpile("CREATE TABLE products (price DECIMAL CHECK price > 0)", read="postgres") @@ -911,3 +928,31 @@ class TestPostgres(Validator): """See https://github.com/tobymao/sqlglot/pull/2404 for details.""" self.assertIsInstance(parse_one("'thomas' ~ '.*thomas.*'", read="postgres"), exp.Binary) self.assertIsInstance(parse_one("'thomas' ~* '.*thomas.*'", read="postgres"), exp.Binary) + + def test_unnest_json_array(self): + trino_input = """ + WITH t(boxcrate) AS ( + SELECT JSON '[{"boxes": [{"name": "f1", "type": "plant", "color": "red"}]}]' + ) + SELECT + JSON_EXTRACT_SCALAR(boxes,'$.name') AS name, + JSON_EXTRACT_SCALAR(boxes,'$.type') AS type, + JSON_EXTRACT_SCALAR(boxes,'$.color') AS color + FROM t + CROSS JOIN UNNEST(CAST(boxcrate AS array(json))) AS x(tbox) + CROSS JOIN UNNEST(CAST(json_extract(tbox, '$.boxes') AS array(json))) AS y(boxes) + """ + + expected_postgres = """WITH t(boxcrate) AS ( + SELECT + CAST('[{"boxes": [{"name": "f1", "type": "plant", "color": "red"}]}]' AS JSON) +) +SELECT + JSON_EXTRACT_PATH_TEXT(boxes, 'name') AS name, + JSON_EXTRACT_PATH_TEXT(boxes, 'type') AS type, + JSON_EXTRACT_PATH_TEXT(boxes, 'color') AS color +FROM t +CROSS JOIN JSON_ARRAY_ELEMENTS(CAST(boxcrate AS JSON)) AS x(tbox) +CROSS JOIN JSON_ARRAY_ELEMENTS(CAST(JSON_EXTRACT_PATH(tbox, 'boxes') AS JSON)) AS y(boxes)""" + + self.validate_all(expected_postgres, read={"trino": trino_input}, pretty=True) diff --git a/tests/dialects/test_presto.py b/tests/dialects/test_presto.py index d3d1a76..2ea595e 100644 --- a/tests/dialects/test_presto.py +++ b/tests/dialects/test_presto.py @@ -647,6 +647,7 @@ class TestPresto(Validator): """JSON '"foo"'""", write={ "bigquery": """PARSE_JSON('"foo"')""", + "postgres": """CAST('"foo"' AS JSON)""", "presto": """JSON_PARSE('"foo"')""", "snowflake": """PARSE_JSON('"foo"')""", }, @@ -1142,3 +1143,18 @@ MATCH_RECOGNIZE ( "presto": "DATE_FORMAT(ts, '%y')", }, ) + + def test_signum(self): + self.validate_all( + "SIGN(x)", + read={ + "presto": "SIGN(x)", + "spark": "SIGNUM(x)", + "starrocks": "SIGN(x)", + }, + write={ + "presto": "SIGN(x)", + "spark": "SIGN(x)", + "starrocks": "SIGN(x)", + }, + ) diff --git a/tests/dialects/test_redshift.py b/tests/dialects/test_redshift.py index 33cfa0c..506f429 100644 --- a/tests/dialects/test_redshift.py +++ b/tests/dialects/test_redshift.py @@ -515,6 +515,11 @@ FROM ( ) def test_column_unnesting(self): + self.validate_identity("SELECT c.*, o FROM bloo AS c, c.c_orders AS o") + self.validate_identity( + "SELECT c.*, o, l FROM bloo AS c, c.c_orders AS o, o.o_lineitems AS l" + ) + ast = parse_one("SELECT * FROM t.t JOIN t.c1 ON c1.c2 = t.c3", read="redshift") ast.args["from"].this.assert_is(exp.Table) ast.args["joins"][0].this.assert_is(exp.Table) @@ -522,7 +527,7 @@ FROM ( ast = parse_one("SELECT * FROM t AS t CROSS JOIN t.c1", read="redshift") ast.args["from"].this.assert_is(exp.Table) - ast.args["joins"][0].this.assert_is(exp.Column) + ast.args["joins"][0].this.assert_is(exp.Unnest) self.assertEqual(ast.sql("redshift"), "SELECT * FROM t AS t CROSS JOIN t.c1") ast = parse_one( @@ -530,9 +535,9 @@ FROM ( ) joins = ast.args["joins"] ast.args["from"].this.assert_is(exp.Table) - joins[0].this.this.assert_is(exp.Column) - joins[1].this.this.assert_is(exp.Column) - joins[2].this.this.assert_is(exp.Dot) + joins[0].this.assert_is(exp.Unnest) + joins[1].this.assert_is(exp.Unnest) + joins[2].this.assert_is(exp.Unnest).expressions[0].assert_is(exp.Dot) self.assertEqual( ast.sql("redshift"), "SELECT * FROM x AS a, a.b AS c, c.d.e AS f, f.g.h.i.j.k AS l" ) diff --git a/tests/dialects/test_snowflake.py b/tests/dialects/test_snowflake.py index 4e4feb3..e48f811 100644 --- a/tests/dialects/test_snowflake.py +++ b/tests/dialects/test_snowflake.py @@ -40,6 +40,7 @@ WHERE )""", ) + self.validate_identity("ALTER TABLE authors ADD CONSTRAINT c1 UNIQUE (id, email)") self.validate_identity("RM @parquet_stage", check_command_warning=True) self.validate_identity("REMOVE @parquet_stage", check_command_warning=True) self.validate_identity("SELECT TIMESTAMP_FROM_PARTS(d, t)") @@ -84,6 +85,7 @@ WHERE self.validate_identity( "SELECT a FROM test PIVOT(SUM(x) FOR y IN ('z', 'q')) AS x TABLESAMPLE (0.1)" ) + self.validate_identity("x:from", "GET_PATH(x, 'from')") self.validate_identity( "value:values::string", "CAST(GET_PATH(value, 'values') AS TEXT)", @@ -371,15 +373,17 @@ WHERE write={"snowflake": "SELECT * FROM (VALUES (0)) AS foo(bar)"}, ) self.validate_all( - "OBJECT_CONSTRUCT(a, b, c, d)", + "OBJECT_CONSTRUCT('a', b, 'c', d)", read={ - "": "STRUCT(a as b, c as d)", + "": "STRUCT(b as a, d as c)", }, write={ "duckdb": "{'a': b, 'c': d}", - "snowflake": "OBJECT_CONSTRUCT(a, b, c, d)", + "snowflake": "OBJECT_CONSTRUCT('a', b, 'c', d)", }, ) + self.validate_identity("OBJECT_CONSTRUCT(a, b, c, d)") + self.validate_all( "SELECT i, p, o FROM qt QUALIFY ROW_NUMBER() OVER (PARTITION BY p ORDER BY o) = 1", write={ @@ -1461,26 +1465,22 @@ MATCH_RECOGNIZE ( pretty=True, ) - def test_show(self): - # Parsed as Show - self.validate_identity("SHOW PRIMARY KEYS") - self.validate_identity("SHOW PRIMARY KEYS IN ACCOUNT") - self.validate_identity("SHOW PRIMARY KEYS IN DATABASE") - self.validate_identity("SHOW PRIMARY KEYS IN DATABASE foo") - self.validate_identity("SHOW PRIMARY KEYS IN TABLE") - self.validate_identity("SHOW PRIMARY KEYS IN TABLE foo") - self.validate_identity( - 'SHOW PRIMARY KEYS IN "TEST"."PUBLIC"."customers"', - 'SHOW PRIMARY KEYS IN TABLE "TEST"."PUBLIC"."customers"', - ) - self.validate_identity( - 'SHOW TERSE PRIMARY KEYS IN "TEST"."PUBLIC"."customers"', - 'SHOW PRIMARY KEYS IN TABLE "TEST"."PUBLIC"."customers"', - ) + def test_show_users(self): + self.validate_identity("SHOW USERS") + self.validate_identity("SHOW TERSE USERS") + self.validate_identity("SHOW USERS LIKE '_foo%' STARTS WITH 'bar' LIMIT 5 FROM 'baz'") + + def test_show_schemas(self): self.validate_identity( "show terse schemas in database db1 starts with 'a' limit 10 from 'b'", "SHOW TERSE SCHEMAS IN DATABASE db1 STARTS WITH 'a' LIMIT 10 FROM 'b'", ) + + ast = parse_one("SHOW SCHEMAS IN DATABASE db1", read="snowflake") + self.assertEqual(ast.args.get("scope_kind"), "DATABASE") + self.assertEqual(ast.find(exp.Table).sql(dialect="snowflake"), "db1") + + def test_show_objects(self): self.validate_identity( "show terse objects in schema db1.schema1 starts with 'a' limit 10 from 'b'", "SHOW TERSE OBJECTS IN SCHEMA db1.schema1 STARTS WITH 'a' LIMIT 10 FROM 'b'", @@ -1489,6 +1489,23 @@ MATCH_RECOGNIZE ( "show terse objects in db1.schema1 starts with 'a' limit 10 from 'b'", "SHOW TERSE OBJECTS IN SCHEMA db1.schema1 STARTS WITH 'a' LIMIT 10 FROM 'b'", ) + + ast = parse_one("SHOW OBJECTS IN db1.schema1", read="snowflake") + self.assertEqual(ast.args.get("scope_kind"), "SCHEMA") + self.assertEqual(ast.find(exp.Table).sql(dialect="snowflake"), "db1.schema1") + + def test_show_columns(self): + self.validate_identity("SHOW COLUMNS") + self.validate_identity("SHOW COLUMNS IN TABLE dt_test") + self.validate_identity("SHOW COLUMNS LIKE '_foo%' IN TABLE dt_test") + self.validate_identity("SHOW COLUMNS IN VIEW") + self.validate_identity("SHOW COLUMNS LIKE '_foo%' IN VIEW dt_test") + + ast = parse_one("SHOW COLUMNS LIKE '_testing%' IN dt_test", read="snowflake") + self.assertEqual(ast.find(exp.Table).sql(dialect="snowflake"), "dt_test") + self.assertEqual(ast.find(exp.Literal).sql(dialect="snowflake"), "'_testing%'") + + def test_show_tables(self): self.validate_identity( "SHOW TABLES LIKE 'line%' IN tpch.public", "SHOW TABLES LIKE 'line%' IN SCHEMA tpch.public", @@ -1506,47 +1523,97 @@ MATCH_RECOGNIZE ( "SHOW TERSE TABLES IN SCHEMA db1.schema1 STARTS WITH 'a' LIMIT 10 FROM 'b'", ) - ast = parse_one('SHOW PRIMARY KEYS IN "TEST"."PUBLIC"."customers"', read="snowflake") - table = ast.find(exp.Table) + ast = parse_one("SHOW TABLES IN db1.schema1", read="snowflake") + self.assertEqual(ast.find(exp.Table).sql(dialect="snowflake"), "db1.schema1") + + def test_show_primary_keys(self): + self.validate_identity("SHOW PRIMARY KEYS") + self.validate_identity("SHOW PRIMARY KEYS IN ACCOUNT") + self.validate_identity("SHOW PRIMARY KEYS IN DATABASE") + self.validate_identity("SHOW PRIMARY KEYS IN DATABASE foo") + self.validate_identity("SHOW PRIMARY KEYS IN TABLE") + self.validate_identity("SHOW PRIMARY KEYS IN TABLE foo") + self.validate_identity( + 'SHOW PRIMARY KEYS IN "TEST"."PUBLIC"."foo"', + 'SHOW PRIMARY KEYS IN TABLE "TEST"."PUBLIC"."foo"', + ) + self.validate_identity( + 'SHOW TERSE PRIMARY KEYS IN "TEST"."PUBLIC"."foo"', + 'SHOW PRIMARY KEYS IN TABLE "TEST"."PUBLIC"."foo"', + ) - self.assertEqual(table.sql(dialect="snowflake"), '"TEST"."PUBLIC"."customers"') + ast = parse_one('SHOW PRIMARY KEYS IN "TEST"."PUBLIC"."foo"', read="snowflake") + self.assertEqual(ast.find(exp.Table).sql(dialect="snowflake"), '"TEST"."PUBLIC"."foo"') - self.validate_identity("SHOW COLUMNS") - self.validate_identity("SHOW COLUMNS IN TABLE dt_test") - self.validate_identity("SHOW COLUMNS LIKE '_foo%' IN TABLE dt_test") - self.validate_identity("SHOW COLUMNS IN VIEW") - self.validate_identity("SHOW COLUMNS LIKE '_foo%' IN VIEW dt_test") + def test_show_views(self): + self.validate_identity("SHOW TERSE VIEWS") + self.validate_identity("SHOW VIEWS") + self.validate_identity("SHOW VIEWS LIKE 'foo%'") + self.validate_identity("SHOW VIEWS IN ACCOUNT") + self.validate_identity("SHOW VIEWS IN DATABASE") + self.validate_identity("SHOW VIEWS IN DATABASE foo") + self.validate_identity("SHOW VIEWS IN SCHEMA foo") + self.validate_identity( + "SHOW VIEWS IN foo", + "SHOW VIEWS IN SCHEMA foo", + ) - self.validate_identity("SHOW USERS") - self.validate_identity("SHOW TERSE USERS") - self.validate_identity("SHOW USERS LIKE '_foo%' STARTS WITH 'bar' LIMIT 5 FROM 'baz'") + ast = parse_one("SHOW VIEWS IN db1.schema1", read="snowflake") + self.assertEqual(ast.find(exp.Table).sql(dialect="snowflake"), "db1.schema1") - ast = parse_one("SHOW COLUMNS LIKE '_testing%' IN dt_test", read="snowflake") - table = ast.find(exp.Table) - literal = ast.find(exp.Literal) + def test_show_unique_keys(self): + self.validate_identity("SHOW UNIQUE KEYS") + self.validate_identity("SHOW UNIQUE KEYS IN ACCOUNT") + self.validate_identity("SHOW UNIQUE KEYS IN DATABASE") + self.validate_identity("SHOW UNIQUE KEYS IN DATABASE foo") + self.validate_identity("SHOW UNIQUE KEYS IN TABLE") + self.validate_identity("SHOW UNIQUE KEYS IN TABLE foo") + self.validate_identity( + 'SHOW UNIQUE KEYS IN "TEST"."PUBLIC"."foo"', + 'SHOW UNIQUE KEYS IN SCHEMA "TEST"."PUBLIC"."foo"', + ) + self.validate_identity( + 'SHOW TERSE UNIQUE KEYS IN "TEST"."PUBLIC"."foo"', + 'SHOW UNIQUE KEYS IN SCHEMA "TEST"."PUBLIC"."foo"', + ) - self.assertEqual(table.sql(dialect="snowflake"), "dt_test") + ast = parse_one('SHOW UNIQUE KEYS IN "TEST"."PUBLIC"."foo"', read="snowflake") + self.assertEqual(ast.find(exp.Table).sql(dialect="snowflake"), '"TEST"."PUBLIC"."foo"') - self.assertEqual(literal.sql(dialect="snowflake"), "'_testing%'") + def test_show_imported_keys(self): + self.validate_identity("SHOW IMPORTED KEYS") + self.validate_identity("SHOW IMPORTED KEYS IN ACCOUNT") + self.validate_identity("SHOW IMPORTED KEYS IN DATABASE") + self.validate_identity("SHOW IMPORTED KEYS IN DATABASE foo") + self.validate_identity("SHOW IMPORTED KEYS IN TABLE") + self.validate_identity("SHOW IMPORTED KEYS IN TABLE foo") + self.validate_identity( + 'SHOW IMPORTED KEYS IN "TEST"."PUBLIC"."foo"', + 'SHOW IMPORTED KEYS IN SCHEMA "TEST"."PUBLIC"."foo"', + ) + self.validate_identity( + 'SHOW TERSE IMPORTED KEYS IN "TEST"."PUBLIC"."foo"', + 'SHOW IMPORTED KEYS IN SCHEMA "TEST"."PUBLIC"."foo"', + ) - ast = parse_one("SHOW SCHEMAS IN DATABASE db1", read="snowflake") - self.assertEqual(ast.args.get("scope_kind"), "DATABASE") - table = ast.find(exp.Table) - self.assertEqual(table.sql(dialect="snowflake"), "db1") + ast = parse_one('SHOW IMPORTED KEYS IN "TEST"."PUBLIC"."foo"', read="snowflake") + self.assertEqual(ast.find(exp.Table).sql(dialect="snowflake"), '"TEST"."PUBLIC"."foo"') - ast = parse_one("SHOW OBJECTS IN db1.schema1", read="snowflake") - self.assertEqual(ast.args.get("scope_kind"), "SCHEMA") - table = ast.find(exp.Table) - self.assertEqual(table.sql(dialect="snowflake"), "db1.schema1") + def test_show_sequences(self): + self.validate_identity("SHOW TERSE SEQUENCES") + self.validate_identity("SHOW SEQUENCES") + self.validate_identity("SHOW SEQUENCES LIKE '_foo%' IN ACCOUNT") + self.validate_identity("SHOW SEQUENCES LIKE '_foo%' IN DATABASE") + self.validate_identity("SHOW SEQUENCES LIKE '_foo%' IN DATABASE foo") + self.validate_identity("SHOW SEQUENCES LIKE '_foo%' IN SCHEMA") + self.validate_identity("SHOW SEQUENCES LIKE '_foo%' IN SCHEMA foo") + self.validate_identity( + "SHOW SEQUENCES LIKE '_foo%' IN foo", + "SHOW SEQUENCES LIKE '_foo%' IN SCHEMA foo", + ) - ast = parse_one("SHOW TABLES IN db1.schema1", read="snowflake") + ast = parse_one("SHOW SEQUENCES IN dt_test", read="snowflake") self.assertEqual(ast.args.get("scope_kind"), "SCHEMA") - table = ast.find(exp.Table) - self.assertEqual(table.sql(dialect="snowflake"), "db1.schema1") - - users_exp = self.validate_identity("SHOW USERS") - self.assertTrue(isinstance(users_exp, exp.Show)) - self.assertEqual(users_exp.this, "USERS") def test_storage_integration(self): self.validate_identity( diff --git a/tests/dialects/test_spark.py b/tests/dialects/test_spark.py index 196735b..1cf1ede 100644 --- a/tests/dialects/test_spark.py +++ b/tests/dialects/test_spark.py @@ -16,6 +16,7 @@ class TestSpark(Validator): self.validate_identity( "CREATE TABLE foo (col STRING) CLUSTERED BY (col) SORTED BY (col) INTO 10 BUCKETS" ) + self.validate_identity("TRUNCATE TABLE t1 PARTITION(age = 10, name = 'test', address)") self.validate_all( "CREATE TABLE db.example_table (col_a struct<struct_col_a:int, struct_col_b:string>)", diff --git a/tests/dialects/test_sqlite.py b/tests/dialects/test_sqlite.py index f7a3dd7..2421987 100644 --- a/tests/dialects/test_sqlite.py +++ b/tests/dialects/test_sqlite.py @@ -1,5 +1,7 @@ from tests.dialects.test_dialect import Validator +from sqlglot.helper import logger as helper_logger + class TestSQLite(Validator): dialect = "sqlite" @@ -76,6 +78,7 @@ class TestSQLite(Validator): self.validate_identity( """SELECT item AS "item", some AS "some" FROM data WHERE (item = 'value_1' COLLATE NOCASE) AND (some = 't' COLLATE NOCASE) ORDER BY item ASC LIMIT 1 OFFSET 0""" ) + self.validate_identity("SELECT * FROM GENERATE_SERIES(1, 5)") self.validate_all("SELECT LIKE(y, x)", write={"sqlite": "SELECT x LIKE y"}) self.validate_all("SELECT GLOB('*y*', 'xyz')", write={"sqlite": "SELECT 'xyz' GLOB '*y*'"}) @@ -178,3 +181,12 @@ class TestSQLite(Validator): "CREATE TABLE foo (bar LONGVARCHAR)", write={"sqlite": "CREATE TABLE foo (bar TEXT)"}, ) + + def test_warnings(self): + with self.assertLogs(helper_logger) as cm: + self.validate_identity( + "SELECT * FROM t AS t(c1, c2)", + "SELECT * FROM t AS t", + ) + + self.assertIn("Named columns are not supported in table alias.", cm.output[0]) diff --git a/tests/dialects/test_tsql.py b/tests/dialects/test_tsql.py index a304a9e..ed474fd 100644 --- a/tests/dialects/test_tsql.py +++ b/tests/dialects/test_tsql.py @@ -1,6 +1,7 @@ from sqlglot import exp, parse, parse_one from sqlglot.parser import logger as parser_logger from tests.dialects.test_dialect import Validator +from sqlglot.errors import ParseError class TestTSQL(Validator): @@ -27,6 +28,7 @@ class TestTSQL(Validator): self.validate_identity("SELECT * FROM t WHERE NOT c", "SELECT * FROM t WHERE NOT c <> 0") self.validate_identity("1 AND true", "1 <> 0 AND (1 = 1)") self.validate_identity("CAST(x AS int) OR y", "CAST(x AS INTEGER) <> 0 OR y <> 0") + self.validate_identity("TRUNCATE TABLE t1 WITH (PARTITIONS(1, 2 TO 5, 10 TO 20, 84))") self.validate_all( "SELECT IIF(cond <> 0, 'True', 'False')", @@ -142,7 +144,7 @@ class TestTSQL(Validator): "tsql": "CREATE TABLE #mytemptable (a INTEGER)", "snowflake": "CREATE TEMPORARY TABLE mytemptable (a INT)", "duckdb": "CREATE TEMPORARY TABLE mytemptable (a INT)", - "oracle": "CREATE TEMPORARY TABLE mytemptable (a NUMBER)", + "oracle": "CREATE GLOBAL TEMPORARY TABLE mytemptable (a NUMBER)", "hive": "CREATE TEMPORARY TABLE mytemptable (a INT)", "spark2": "CREATE TEMPORARY TABLE mytemptable (a INT) USING PARQUET", "spark": "CREATE TEMPORARY TABLE mytemptable (a INT) USING PARQUET", @@ -281,7 +283,7 @@ class TestTSQL(Validator): "CONVERT(INT, CONVERT(NUMERIC, '444.75'))", write={ "mysql": "CAST(CAST('444.75' AS DECIMAL) AS SIGNED)", - "tsql": "CAST(CAST('444.75' AS NUMERIC) AS INTEGER)", + "tsql": "CONVERT(INTEGER, CONVERT(NUMERIC, '444.75'))", }, ) self.validate_all( @@ -356,6 +358,76 @@ class TestTSQL(Validator): self.validate_identity("HASHBYTES('MD2', 'x')") self.validate_identity("LOG(n, b)") + def test_option(self): + possible_options = [ + "HASH GROUP", + "ORDER GROUP", + "CONCAT UNION", + "HASH UNION", + "MERGE UNION", + "LOOP JOIN", + "MERGE JOIN", + "HASH JOIN", + "DISABLE_OPTIMIZED_PLAN_FORCING", + "EXPAND VIEWS", + "FAST 15", + "FORCE ORDER", + "FORCE EXTERNALPUSHDOWN", + "DISABLE EXTERNALPUSHDOWN", + "FORCE SCALEOUTEXECUTION", + "DISABLE SCALEOUTEXECUTION", + "IGNORE_NONCLUSTERED_COLUMNSTORE_INDEX", + "KEEP PLAN", + "KEEPFIXED PLAN", + "MAX_GRANT_PERCENT = 5", + "MIN_GRANT_PERCENT = 10", + "MAXDOP 13", + "MAXRECURSION 8", + "NO_PERFORMANCE_SPOOL", + "OPTIMIZE FOR UNKNOWN", + "PARAMETERIZATION SIMPLE", + "PARAMETERIZATION FORCED", + "QUERYTRACEON 99", + "RECOMPILE", + "ROBUST PLAN", + "USE PLAN N'<xml_plan>'", + "LABEL = 'MyLabel'", + ] + + possible_statements = [ + # These should be un-commented once support for the OPTION clause is added for DELETE, MERGE and UPDATE + # "DELETE FROM Table1", + # "MERGE INTO Locations AS T USING locations_stage AS S ON T.LocationID = S.LocationID WHEN MATCHED THEN UPDATE SET LocationName = S.LocationName", + # "UPDATE Customers SET ContactName = 'Alfred Schmidt', City = 'Frankfurt' WHERE CustomerID = 1", + "SELECT * FROM Table1", + "SELECT * FROM Table1 WHERE id = 2", + ] + + for statement in possible_statements: + for option in possible_options: + query = f"{statement} OPTION({option})" + result = self.validate_identity(query) + options = result.args.get("options") + self.assertIsInstance(options, list, f"When parsing query {query}") + is_query_options = map(lambda o: isinstance(o, exp.QueryOption), options) + self.assertTrue(all(is_query_options), f"When parsing query {query}") + + self.validate_identity( + f"{statement} OPTION(RECOMPILE, USE PLAN N'<xml_plan>', MAX_GRANT_PERCENT = 5)" + ) + + raising_queries = [ + # Missing parentheses + "SELECT * FROM Table1 OPTION HASH GROUP", + # Must be followed by 'PLAN" + "SELECT * FROM Table1 OPTION(KEEPFIXED)", + # Missing commas + "SELECT * FROM Table1 OPTION(HASH GROUP HASH GROUP)", + ] + for query in raising_queries: + with self.assertRaises(ParseError, msg=f"When running '{query}'"): + self.parse_one(query) + def test_types(self): self.validate_identity("CAST(x AS XML)") self.validate_identity("CAST(x AS UNIQUEIDENTIFIER)") @@ -525,7 +597,7 @@ class TestTSQL(Validator): "CAST(x as NCHAR(1))", write={ "spark": "CAST(x AS CHAR(1))", - "tsql": "CAST(x AS CHAR(1))", + "tsql": "CAST(x AS NCHAR(1))", }, ) @@ -533,7 +605,7 @@ class TestTSQL(Validator): "CAST(x as NVARCHAR(2))", write={ "spark": "CAST(x AS VARCHAR(2))", - "tsql": "CAST(x AS VARCHAR(2))", + "tsql": "CAST(x AS NVARCHAR(2))", }, ) @@ -692,12 +764,7 @@ class TestTSQL(Validator): "SELECT * INTO foo.bar.baz FROM (SELECT * FROM a.b.c) AS temp", read={ "": "CREATE TABLE foo.bar.baz AS SELECT * FROM a.b.c", - }, - ) - self.validate_all( - "SELECT * INTO foo.bar.baz FROM (SELECT * FROM a.b.c) AS temp", - read={ - "": "CREATE TABLE foo.bar.baz AS (SELECT * FROM a.b.c)", + "duckdb": "CREATE TABLE foo.bar.baz AS (SELECT * FROM a.b.c)", }, ) self.validate_all( @@ -759,11 +826,6 @@ class TestTSQL(Validator): ) def test_transaction(self): - # BEGIN { TRAN | TRANSACTION } - # [ { transaction_name | @tran_name_variable } - # [ WITH MARK [ 'description' ] ] - # ] - # [ ; ] self.validate_identity("BEGIN TRANSACTION") self.validate_all("BEGIN TRAN", write={"tsql": "BEGIN TRANSACTION"}) self.validate_identity("BEGIN TRANSACTION transaction_name") @@ -771,8 +833,6 @@ class TestTSQL(Validator): self.validate_identity("BEGIN TRANSACTION transaction_name WITH MARK 'description'") def test_commit(self): - # COMMIT [ { TRAN | TRANSACTION } [ transaction_name | @tran_name_variable ] ] [ WITH ( DELAYED_DURABILITY = { OFF | ON } ) ] [ ; ] - self.validate_all("COMMIT", write={"tsql": "COMMIT TRANSACTION"}) self.validate_all("COMMIT TRAN", write={"tsql": "COMMIT TRANSACTION"}) self.validate_identity("COMMIT TRANSACTION") @@ -787,11 +847,6 @@ class TestTSQL(Validator): ) def test_rollback(self): - # Applies to SQL Server and Azure SQL Database - # ROLLBACK { TRAN | TRANSACTION } - # [ transaction_name | @tran_name_variable - # | savepoint_name | @savepoint_variable ] - # [ ; ] self.validate_all("ROLLBACK", write={"tsql": "ROLLBACK TRANSACTION"}) self.validate_all("ROLLBACK TRAN", write={"tsql": "ROLLBACK TRANSACTION"}) self.validate_identity("ROLLBACK TRANSACTION") @@ -911,7 +966,7 @@ WHERE expected_sqls = [ "CREATE PROC [dbo].[transform_proc] AS DECLARE @CurrentDate VARCHAR(20)", - "SET @CurrentDate = CAST(FORMAT(GETDATE(), 'yyyy-MM-dd HH:mm:ss') AS VARCHAR(20))", + "SET @CurrentDate = CONVERT(VARCHAR(20), GETDATE(), 120)", "CREATE TABLE [target_schema].[target_table] (a INTEGER) WITH (DISTRIBUTION=REPLICATE, HEAP)", ] @@ -1090,155 +1145,173 @@ WHERE }, ) - def test_convert_date_format(self): + def test_convert(self): self.validate_all( "CONVERT(NVARCHAR(200), x)", write={ "spark": "CAST(x AS VARCHAR(200))", + "tsql": "CONVERT(NVARCHAR(200), x)", }, ) self.validate_all( "CONVERT(NVARCHAR, x)", write={ "spark": "CAST(x AS VARCHAR(30))", + "tsql": "CONVERT(NVARCHAR, x)", }, ) self.validate_all( "CONVERT(NVARCHAR(MAX), x)", write={ "spark": "CAST(x AS STRING)", + "tsql": "CONVERT(NVARCHAR(MAX), x)", }, ) self.validate_all( "CONVERT(VARCHAR(200), x)", write={ "spark": "CAST(x AS VARCHAR(200))", + "tsql": "CONVERT(VARCHAR(200), x)", }, ) self.validate_all( "CONVERT(VARCHAR, x)", write={ "spark": "CAST(x AS VARCHAR(30))", + "tsql": "CONVERT(VARCHAR, x)", }, ) self.validate_all( "CONVERT(VARCHAR(MAX), x)", write={ "spark": "CAST(x AS STRING)", + "tsql": "CONVERT(VARCHAR(MAX), x)", }, ) self.validate_all( "CONVERT(CHAR(40), x)", write={ "spark": "CAST(x AS CHAR(40))", + "tsql": "CONVERT(CHAR(40), x)", }, ) self.validate_all( "CONVERT(CHAR, x)", write={ "spark": "CAST(x AS CHAR(30))", + "tsql": "CONVERT(CHAR, x)", }, ) self.validate_all( "CONVERT(NCHAR(40), x)", write={ "spark": "CAST(x AS CHAR(40))", + "tsql": "CONVERT(NCHAR(40), x)", }, ) self.validate_all( "CONVERT(NCHAR, x)", write={ "spark": "CAST(x AS CHAR(30))", + "tsql": "CONVERT(NCHAR, x)", }, ) self.validate_all( "CONVERT(VARCHAR, x, 121)", write={ "spark": "CAST(DATE_FORMAT(x, 'yyyy-MM-dd HH:mm:ss.SSSSSS') AS VARCHAR(30))", + "tsql": "CONVERT(VARCHAR, x, 121)", }, ) self.validate_all( "CONVERT(VARCHAR(40), x, 121)", write={ "spark": "CAST(DATE_FORMAT(x, 'yyyy-MM-dd HH:mm:ss.SSSSSS') AS VARCHAR(40))", + "tsql": "CONVERT(VARCHAR(40), x, 121)", }, ) self.validate_all( "CONVERT(VARCHAR(MAX), x, 121)", write={ - "spark": "DATE_FORMAT(x, 'yyyy-MM-dd HH:mm:ss.SSSSSS')", + "spark": "CAST(DATE_FORMAT(x, 'yyyy-MM-dd HH:mm:ss.SSSSSS') AS STRING)", + "tsql": "CONVERT(VARCHAR(MAX), x, 121)", }, ) self.validate_all( "CONVERT(NVARCHAR, x, 121)", write={ "spark": "CAST(DATE_FORMAT(x, 'yyyy-MM-dd HH:mm:ss.SSSSSS') AS VARCHAR(30))", + "tsql": "CONVERT(NVARCHAR, x, 121)", }, ) self.validate_all( "CONVERT(NVARCHAR(40), x, 121)", write={ "spark": "CAST(DATE_FORMAT(x, 'yyyy-MM-dd HH:mm:ss.SSSSSS') AS VARCHAR(40))", + "tsql": "CONVERT(NVARCHAR(40), x, 121)", }, ) self.validate_all( "CONVERT(NVARCHAR(MAX), x, 121)", write={ - "spark": "DATE_FORMAT(x, 'yyyy-MM-dd HH:mm:ss.SSSSSS')", + "spark": "CAST(DATE_FORMAT(x, 'yyyy-MM-dd HH:mm:ss.SSSSSS') AS STRING)", + "tsql": "CONVERT(NVARCHAR(MAX), x, 121)", }, ) self.validate_all( "CONVERT(DATE, x, 121)", write={ "spark": "TO_DATE(x, 'yyyy-MM-dd HH:mm:ss.SSSSSS')", + "tsql": "CONVERT(DATE, x, 121)", }, ) self.validate_all( "CONVERT(DATETIME, x, 121)", write={ "spark": "TO_TIMESTAMP(x, 'yyyy-MM-dd HH:mm:ss.SSSSSS')", + "tsql": "CONVERT(DATETIME2, x, 121)", }, ) self.validate_all( "CONVERT(DATETIME2, x, 121)", write={ "spark": "TO_TIMESTAMP(x, 'yyyy-MM-dd HH:mm:ss.SSSSSS')", + "tsql": "CONVERT(DATETIME2, x, 121)", }, ) self.validate_all( "CONVERT(INT, x)", write={ "spark": "CAST(x AS INT)", + "tsql": "CONVERT(INTEGER, x)", }, ) self.validate_all( "CONVERT(INT, x, 121)", write={ "spark": "CAST(x AS INT)", + "tsql": "CONVERT(INTEGER, x, 121)", }, ) self.validate_all( "TRY_CONVERT(NVARCHAR, x, 121)", write={ "spark": "TRY_CAST(DATE_FORMAT(x, 'yyyy-MM-dd HH:mm:ss.SSSSSS') AS VARCHAR(30))", + "tsql": "TRY_CONVERT(NVARCHAR, x, 121)", }, ) self.validate_all( "TRY_CONVERT(INT, x)", write={ "spark": "TRY_CAST(x AS INT)", + "tsql": "TRY_CONVERT(INTEGER, x)", }, ) self.validate_all( "TRY_CAST(x AS INT)", write={ "spark": "TRY_CAST(x AS INT)", - }, - ) - self.validate_all( - "CAST(x AS INT)", - write={ - "spark": "CAST(x AS INT)", + "tsql": "TRY_CAST(x AS INTEGER)", }, ) self.validate_all( @@ -1246,6 +1319,7 @@ WHERE write={ "mysql": "SELECT CAST(DATE_FORMAT(testdb.dbo.test.x, '%Y-%m-%d %T') AS CHAR(10)) AS y FROM testdb.dbo.test", "spark": "SELECT CAST(DATE_FORMAT(testdb.dbo.test.x, 'yyyy-MM-dd HH:mm:ss') AS VARCHAR(10)) AS y FROM testdb.dbo.test", + "tsql": "SELECT CONVERT(VARCHAR(10), testdb.dbo.test.x, 120) AS y FROM testdb.dbo.test", }, ) self.validate_all( @@ -1253,12 +1327,14 @@ WHERE write={ "mysql": "SELECT CAST(y.x AS CHAR(10)) AS z FROM testdb.dbo.test AS y", "spark": "SELECT CAST(y.x AS VARCHAR(10)) AS z FROM testdb.dbo.test AS y", + "tsql": "SELECT CONVERT(VARCHAR(10), y.x) AS z FROM testdb.dbo.test AS y", }, ) self.validate_all( "SELECT CAST((SELECT x FROM y) AS VARCHAR) AS test", write={ "spark": "SELECT CAST((SELECT x FROM y) AS STRING) AS test", + "tsql": "SELECT CAST((SELECT x FROM y) AS VARCHAR) AS test", }, ) @@ -1654,7 +1730,7 @@ FROM OPENJSON(@json) WITH ( Date DATETIME2 '$.Order.Date', Customer VARCHAR(200) '$.AccountNumber', Quantity INTEGER '$.Item.Quantity', - [Order] VARCHAR(MAX) AS JSON + [Order] NVARCHAR(MAX) AS JSON )""" }, pretty=True, |