From 38e6461a8afbd7cb83709ddb998f03d40ba87755 Mon Sep 17 00:00:00 2001 From: Daniel Baumann Date: Tue, 23 Jan 2024 06:06:14 +0100 Subject: Merging upstream version 20.9.0. Signed-off-by: Daniel Baumann --- tests/dialects/test_tsql.py | 286 +++++++++++++++++++++++++++++--------------- 1 file changed, 188 insertions(+), 98 deletions(-) (limited to 'tests/dialects/test_tsql.py') diff --git a/tests/dialects/test_tsql.py b/tests/dialects/test_tsql.py index a2569ab..7cf9971 100644 --- a/tests/dialects/test_tsql.py +++ b/tests/dialects/test_tsql.py @@ -6,10 +6,17 @@ class TestTSQL(Validator): dialect = "tsql" def test_tsql(self): + self.validate_identity("ROUND(x, 1, 0)") + self.validate_identity("EXEC MyProc @id=7, @name='Lochristi'") # https://learn.microsoft.com/en-us/previous-versions/sql/sql-server-2008-r2/ms187879(v=sql.105)?redirectedfrom=MSDN # tsql allows .. which means use the default schema self.validate_identity("SELECT * FROM a..b") + self.validate_identity("SELECT TRIM(' test ') AS Result") + self.validate_identity("SELECT TRIM('.,! ' FROM ' # test .') AS Result") + self.validate_identity("SELECT * FROM t TABLESAMPLE (10 PERCENT)") + self.validate_identity("SELECT * FROM t TABLESAMPLE (20 ROWS)") + self.validate_identity("SELECT * FROM t TABLESAMPLE (10 PERCENT) REPEATABLE (123)") self.validate_identity("SELECT CONCAT(column1, column2)") self.validate_identity("SELECT TestSpecialChar.Test# FROM TestSpecialChar") self.validate_identity("SELECT TestSpecialChar.Test@ FROM TestSpecialChar") @@ -20,6 +27,40 @@ class TestTSQL(Validator): self.validate_identity("1 AND true", "1 <> 0 AND (1 = 1)") self.validate_identity("CAST(x AS int) OR y", "CAST(x AS INTEGER) <> 0 OR y <> 0") + self.validate_all( + "SELECT TRIM(BOTH 'a' FROM a)", + read={ + "mysql": "SELECT TRIM(BOTH 'a' FROM a)", + }, + write={ + "mysql": "SELECT TRIM(BOTH 'a' FROM a)", + "tsql": "SELECT TRIM(BOTH 'a' FROM a)", + }, + ) + self.validate_all( + "SELECT TIMEFROMPARTS(23, 59, 59, 0, 0)", + read={ + "duckdb": "SELECT MAKE_TIME(23, 59, 59)", + "mysql": "SELECT MAKETIME(23, 59, 59)", + "postgres": "SELECT MAKE_TIME(23, 59, 59)", + "snowflake": "SELECT TIME_FROM_PARTS(23, 59, 59)", + }, + write={ + "tsql": "SELECT TIMEFROMPARTS(23, 59, 59, 0, 0)", + }, + ) + self.validate_all( + "SELECT DATETIMEFROMPARTS(2013, 4, 5, 12, 00, 00, 0)", + read={ + # The nanoseconds are ignored since T-SQL doesn't support that precision + "snowflake": "SELECT TIMESTAMP_FROM_PARTS(2013, 4, 5, 12, 00, 00, 987654321)" + }, + write={ + "duckdb": "SELECT MAKE_TIMESTAMP(2013, 4, 5, 12, 00, 00 + (0 / 1000.0))", + "snowflake": "SELECT TIMESTAMP_FROM_PARTS(2013, 4, 5, 12, 00, 00, 0 * 1000000)", + "tsql": "SELECT DATETIMEFROMPARTS(2013, 4, 5, 12, 00, 00, 0)", + }, + ) self.validate_all( "SELECT TOP 1 * FROM (SELECT x FROM t1 UNION ALL SELECT x FROM t2) AS _l_0", read={ @@ -32,6 +73,33 @@ class TestTSQL(Validator): "duckdb": "CREATE TABLE foo AS WITH t(c) AS (SELECT 1) SELECT c FROM t", }, ) + self.validate_all( + "WITH t(c) AS (SELECT 1) SELECT * INTO foo FROM (SELECT c AS c FROM t) AS temp", + write={ + "duckdb": "CREATE TABLE foo AS WITH t(c) AS (SELECT 1) SELECT * FROM (SELECT c AS c FROM t) AS temp", + "postgres": "WITH t(c) AS (SELECT 1) SELECT * INTO foo FROM (SELECT c AS c FROM t) AS temp", + "oracle": "WITH t(c) AS (SELECT 1) SELECT * INTO foo FROM (SELECT c AS c FROM t) temp", + }, + ) + self.validate_all( + "WITH t(c) AS (SELECT 1) SELECT * INTO TEMP UNLOGGED foo FROM (SELECT c AS c FROM t) AS temp", + write={ + "duckdb": "CREATE TEMPORARY TABLE foo AS WITH t(c) AS (SELECT 1) SELECT * FROM (SELECT c AS c FROM t) AS temp", + "postgres": "WITH t(c) AS (SELECT 1) SELECT * INTO TEMPORARY foo FROM (SELECT c AS c FROM t) AS temp", + }, + ) + self.validate_all( + "WITH t(c) AS (SELECT 1) SELECT * INTO UNLOGGED foo FROM (SELECT c AS c FROM t) AS temp", + write={ + "duckdb": "CREATE TABLE foo AS WITH t(c) AS (SELECT 1) SELECT * FROM (SELECT c AS c FROM t) AS temp", + }, + ) + self.validate_all( + "WITH t(c) AS (SELECT 1) SELECT * INTO UNLOGGED foo FROM (SELECT c AS c FROM t) AS temp", + write={ + "duckdb": "CREATE TABLE foo AS WITH t(c) AS (SELECT 1) SELECT * FROM (SELECT c AS c FROM t) AS temp", + }, + ) self.validate_all( "WITH y AS (SELECT 2 AS c) INSERT INTO t SELECT * FROM y", read={ @@ -104,7 +172,8 @@ class TestTSQL(Validator): ) self.validate_identity( - 'CREATE TABLE x (CONSTRAINT "pk_mytable" UNIQUE NONCLUSTERED (a DESC)) ON b (c)' + 'CREATE TABLE x (CONSTRAINT "pk_mytable" UNIQUE NONCLUSTERED (a DESC)) ON b (c)', + "CREATE TABLE x (CONSTRAINT [pk_mytable] UNIQUE NONCLUSTERED (a DESC)) ON b (c)", ) self.validate_all( @@ -117,7 +186,7 @@ class TestTSQL(Validator): ) ON [SECONDARY] """, write={ - "tsql": 'CREATE TABLE x ("zip_cd" VARCHAR(5) NULL NOT FOR REPLICATION, "zip_cd_mkey" VARCHAR(5) NOT NULL, CONSTRAINT "pk_mytable" PRIMARY KEY CLUSTERED ("zip_cd_mkey" ASC) WITH (PAD_INDEX=ON, STATISTICS_NORECOMPUTE=OFF) ON "INDEX") ON "SECONDARY"', + "tsql": "CREATE TABLE x ([zip_cd] VARCHAR(5) NULL NOT FOR REPLICATION, [zip_cd_mkey] VARCHAR(5) NOT NULL, CONSTRAINT [pk_mytable] PRIMARY KEY CLUSTERED ([zip_cd_mkey] ASC) WITH (PAD_INDEX=ON, STATISTICS_NORECOMPUTE=OFF) ON [INDEX]) ON [SECONDARY]", "spark2": "CREATE TABLE x (`zip_cd` VARCHAR(5), `zip_cd_mkey` VARCHAR(5) NOT NULL, CONSTRAINT `pk_mytable` PRIMARY KEY (`zip_cd_mkey`))", }, ) @@ -136,7 +205,8 @@ class TestTSQL(Validator): ) self.validate_identity( - "CREATE TABLE [db].[tbl]([a] [int])", 'CREATE TABLE "db"."tbl" ("a" INTEGER)' + "CREATE TABLE [db].[tbl]([a] [int])", + "CREATE TABLE [db].[tbl] ([a] INTEGER)", ) projection = parse_one("SELECT a = 1", read="tsql").selects[0] @@ -172,7 +242,6 @@ class TestTSQL(Validator): self.validate_identity("PRINT @TestVariable") self.validate_identity("SELECT Employee_ID, Department_ID FROM @MyTableVar") self.validate_identity("INSERT INTO @TestTable VALUES (1, 'Value1', 12, 20)") - self.validate_identity('SELECT "x"."y" FROM foo') self.validate_identity("SELECT * FROM #foo") self.validate_identity("SELECT * FROM ##foo") self.validate_identity("SELECT a = 1", "SELECT 1 AS a") @@ -185,11 +254,15 @@ class TestTSQL(Validator): self.validate_identity( "SELECT DISTINCT DepartmentName, PERCENTILE_CONT(0.5) WITHIN GROUP (ORDER BY BaseRate) OVER (PARTITION BY DepartmentName) AS MedianCont FROM dbo.DimEmployee" ) + self.validate_identity( + 'SELECT "x"."y" FROM foo', + "SELECT [x].[y] FROM foo", + ) self.validate_all( "SELECT CAST([a].[b] AS SMALLINT) FROM foo", write={ - "tsql": 'SELECT CAST("a"."b" AS SMALLINT) FROM foo', + "tsql": "SELECT CAST([a].[b] AS SMALLINT) FROM foo", "spark": "SELECT CAST(`a`.`b` AS SMALLINT) FROM foo", }, ) @@ -227,13 +300,6 @@ class TestTSQL(Validator): "postgres": "STRING_AGG(x, '|')", }, ) - self.validate_all( - "SELECT CAST([a].[b] AS SMALLINT) FROM foo", - write={ - "tsql": 'SELECT CAST("a"."b" AS SMALLINT) FROM foo', - "spark": "SELECT CAST(`a`.`b` AS SMALLINT) FROM foo", - }, - ) self.validate_all( "HASHBYTES('SHA1', x)", read={ @@ -583,7 +649,11 @@ class TestTSQL(Validator): 'CREATE TABLE "dbo"."benchmark" (' '"name" CHAR(7) NOT NULL, ' '"internal_id" VARCHAR(10) NOT NULL, ' - f'UNIQUE {clusterd_keyword} ("internal_id" ASC))' + f'UNIQUE {clusterd_keyword} ("internal_id" ASC))', + "CREATE TABLE [dbo].[benchmark] (" + "[name] CHAR(7) NOT NULL, " + "[internal_id] VARCHAR(10) NOT NULL, " + f"UNIQUE {clusterd_keyword} ([internal_id] ASC))", ) self.validate_identity( @@ -802,13 +872,13 @@ WHERE """ expected_sqls = [ - 'CREATE PROCEDURE "TRANSF"."SP_Merge_Sales_Real" @Loadid INTEGER, @NumberOfRows INTEGER AS BEGIN SET XACT_ABORT ON', + "CREATE PROCEDURE [TRANSF].[SP_Merge_Sales_Real] @Loadid INTEGER, @NumberOfRows INTEGER AS BEGIN SET XACT_ABORT ON", "DECLARE @DWH_DateCreated DATETIME = CONVERT(DATETIME, getdate(), 104)", "DECLARE @DWH_DateModified DATETIME = CONVERT(DATETIME, getdate(), 104)", "DECLARE @DWH_IdUserCreated INTEGER = SUSER_ID (SYSTEM_USER)", "DECLARE @DWH_IdUserModified INTEGER = SUSER_ID (SYSTEM_USER)", "DECLARE @SalesAmountBefore float", - 'SELECT @SalesAmountBefore = SUM(SalesAmount) FROM TRANSF."Pre_Merge_Sales_Real" AS S', + "SELECT @SalesAmountBefore = SUM(SalesAmount) FROM TRANSF.[Pre_Merge_Sales_Real] AS S", "END", ] @@ -827,9 +897,9 @@ WHERE """ expected_sqls = [ - 'CREATE PROC "dbo"."transform_proc" AS DECLARE @CurrentDate VARCHAR(20)', + "CREATE PROC [dbo].[transform_proc] AS DECLARE @CurrentDate VARCHAR(20)", "SET @CurrentDate = CAST(FORMAT(GETDATE(), 'yyyy-MM-dd HH:mm:ss') AS VARCHAR(20))", - 'CREATE TABLE "target_schema"."target_table" (a INTEGER) WITH (DISTRIBUTION=REPLICATE, HEAP)', + "CREATE TABLE [target_schema].[target_table] (a INTEGER) WITH (DISTRIBUTION=REPLICATE, HEAP)", ] for expr, expected_sql in zip(parse(sql, read="tsql"), expected_sqls): @@ -901,7 +971,7 @@ WHERE def test_datepart(self): self.validate_identity( "DATEPART(QUARTER, x)", - "DATEPART(quarter, CAST(x AS DATETIME2))", + "DATEPART(QUARTER, CAST(x AS DATETIME2))", ) self.validate_identity( "DATEPART(YEAR, x)", @@ -909,15 +979,15 @@ WHERE ) self.validate_identity( "DATEPART(HOUR, date_and_time)", - "DATEPART(hour, CAST(date_and_time AS DATETIME2))", + "DATEPART(HOUR, CAST(date_and_time AS DATETIME2))", ) self.validate_identity( "DATEPART(WEEKDAY, date_and_time)", - "DATEPART(dw, CAST(date_and_time AS DATETIME2))", + "DATEPART(DW, CAST(date_and_time AS DATETIME2))", ) self.validate_identity( "DATEPART(DW, date_and_time)", - "DATEPART(dw, CAST(date_and_time AS DATETIME2))", + "DATEPART(DW, CAST(date_and_time AS DATETIME2))", ) self.validate_all( @@ -929,9 +999,9 @@ WHERE }, ) self.validate_all( - "SELECT DATEPART(year, CAST('2017-01-01' AS DATE))", + "SELECT DATEPART(YEAR, CAST('2017-01-01' AS DATE))", read={ - "postgres": "SELECT DATE_PART('year', '2017-01-01'::DATE)", + "postgres": "SELECT DATE_PART('YEAR', '2017-01-01'::DATE)", }, write={ "postgres": "SELECT TO_CHAR(CAST(CAST('2017-01-01' AS DATE) AS TIMESTAMP), 'YYYY')", @@ -1135,7 +1205,7 @@ WHERE ) def test_add_date(self): - self.validate_identity("SELECT DATEADD(year, 1, '2017/08/25')") + self.validate_identity("SELECT DATEADD(YEAR, 1, '2017/08/25')") self.validate_all( "DATEADD(year, 50, '2006-07-31')", @@ -1153,52 +1223,52 @@ WHERE "SELECT DATEADD(wk, 1, '2017/08/25')", write={ "spark": "SELECT DATE_ADD('2017/08/25', 7)", - "databricks": "SELECT DATEADD(week, 1, '2017/08/25')", + "databricks": "SELECT DATEADD(WEEK, 1, '2017/08/25')", }, ) def test_date_diff(self): - self.validate_identity("SELECT DATEDIFF(hour, 1.5, '2021-01-01')") + self.validate_identity("SELECT DATEDIFF(HOUR, 1.5, '2021-01-01')") self.validate_all( "SELECT DATEDIFF(quarter, 0, '2021-01-01')", write={ - "tsql": "SELECT DATEDIFF(quarter, CAST('1900-01-01' AS DATETIME2), CAST('2021-01-01' AS DATETIME2))", - "spark": "SELECT DATEDIFF(quarter, CAST('1900-01-01' AS TIMESTAMP), CAST('2021-01-01' AS TIMESTAMP))", - "duckdb": "SELECT DATE_DIFF('quarter', CAST('1900-01-01' AS TIMESTAMP), CAST('2021-01-01' AS TIMESTAMP))", + "tsql": "SELECT DATEDIFF(QUARTER, CAST('1900-01-01' AS DATETIME2), CAST('2021-01-01' AS DATETIME2))", + "spark": "SELECT DATEDIFF(QUARTER, CAST('1900-01-01' AS TIMESTAMP), CAST('2021-01-01' AS TIMESTAMP))", + "duckdb": "SELECT DATE_DIFF('QUARTER', CAST('1900-01-01' AS TIMESTAMP), CAST('2021-01-01' AS TIMESTAMP))", }, ) self.validate_all( "SELECT DATEDIFF(day, 1, '2021-01-01')", write={ - "tsql": "SELECT DATEDIFF(day, CAST('1900-01-02' AS DATETIME2), CAST('2021-01-01' AS DATETIME2))", - "spark": "SELECT DATEDIFF(day, CAST('1900-01-02' AS TIMESTAMP), CAST('2021-01-01' AS TIMESTAMP))", - "duckdb": "SELECT DATE_DIFF('day', CAST('1900-01-02' AS TIMESTAMP), CAST('2021-01-01' AS TIMESTAMP))", + "tsql": "SELECT DATEDIFF(DAY, CAST('1900-01-02' AS DATETIME2), CAST('2021-01-01' AS DATETIME2))", + "spark": "SELECT DATEDIFF(DAY, CAST('1900-01-02' AS TIMESTAMP), CAST('2021-01-01' AS TIMESTAMP))", + "duckdb": "SELECT DATE_DIFF('DAY', CAST('1900-01-02' AS TIMESTAMP), CAST('2021-01-01' AS TIMESTAMP))", }, ) self.validate_all( "SELECT DATEDIFF(year, '2020-01-01', '2021-01-01')", write={ - "tsql": "SELECT DATEDIFF(year, CAST('2020-01-01' AS DATETIME2), CAST('2021-01-01' AS DATETIME2))", - "spark": "SELECT DATEDIFF(year, CAST('2020-01-01' AS TIMESTAMP), CAST('2021-01-01' AS TIMESTAMP))", + "tsql": "SELECT DATEDIFF(YEAR, CAST('2020-01-01' AS DATETIME2), CAST('2021-01-01' AS DATETIME2))", + "spark": "SELECT DATEDIFF(YEAR, CAST('2020-01-01' AS TIMESTAMP), CAST('2021-01-01' AS TIMESTAMP))", "spark2": "SELECT CAST(MONTHS_BETWEEN(CAST('2021-01-01' AS TIMESTAMP), CAST('2020-01-01' AS TIMESTAMP)) / 12 AS INT)", }, ) self.validate_all( "SELECT DATEDIFF(mm, 'start', 'end')", write={ - "databricks": "SELECT DATEDIFF(month, CAST('start' AS TIMESTAMP), CAST('end' AS TIMESTAMP))", + "databricks": "SELECT DATEDIFF(MONTH, CAST('start' AS TIMESTAMP), CAST('end' AS TIMESTAMP))", "spark2": "SELECT CAST(MONTHS_BETWEEN(CAST('end' AS TIMESTAMP), CAST('start' AS TIMESTAMP)) AS INT)", - "tsql": "SELECT DATEDIFF(month, CAST('start' AS DATETIME2), CAST('end' AS DATETIME2))", + "tsql": "SELECT DATEDIFF(MONTH, CAST('start' AS DATETIME2), CAST('end' AS DATETIME2))", }, ) self.validate_all( "SELECT DATEDIFF(quarter, 'start', 'end')", write={ - "databricks": "SELECT DATEDIFF(quarter, CAST('start' AS TIMESTAMP), CAST('end' AS TIMESTAMP))", - "spark": "SELECT DATEDIFF(quarter, CAST('start' AS TIMESTAMP), CAST('end' AS TIMESTAMP))", + "databricks": "SELECT DATEDIFF(QUARTER, CAST('start' AS TIMESTAMP), CAST('end' AS TIMESTAMP))", + "spark": "SELECT DATEDIFF(QUARTER, CAST('start' AS TIMESTAMP), CAST('end' AS TIMESTAMP))", "spark2": "SELECT CAST(MONTHS_BETWEEN(CAST('end' AS TIMESTAMP), CAST('start' AS TIMESTAMP)) / 3 AS INT)", - "tsql": "SELECT DATEDIFF(quarter, CAST('start' AS DATETIME2), CAST('end' AS DATETIME2))", + "tsql": "SELECT DATEDIFF(QUARTER, CAST('start' AS DATETIME2), CAST('end' AS DATETIME2))", }, ) @@ -1220,39 +1290,45 @@ WHERE self.validate_all( "SELECT x.a, x.b, t.v, t.y FROM x CROSS APPLY (SELECT v, y FROM t) t(v, y)", write={ - "spark": "SELECT x.a, x.b, t.v, t.y FROM x, LATERAL (SELECT v, y FROM t) AS t(v, y)", + "spark": "SELECT x.a, x.b, t.v, t.y FROM x INNER JOIN LATERAL (SELECT v, y FROM t) AS t(v, y)", + "tsql": "SELECT x.a, x.b, t.v, t.y FROM x CROSS APPLY (SELECT v, y FROM t) AS t(v, y)", }, ) self.validate_all( "SELECT x.a, x.b, t.v, t.y FROM x OUTER APPLY (SELECT v, y FROM t) t(v, y)", write={ "spark": "SELECT x.a, x.b, t.v, t.y FROM x LEFT JOIN LATERAL (SELECT v, y FROM t) AS t(v, y)", + "tsql": "SELECT x.a, x.b, t.v, t.y FROM x OUTER APPLY (SELECT v, y FROM t) AS t(v, y)", }, ) self.validate_all( "SELECT x.a, x.b, t.v, t.y, s.v, s.y FROM x OUTER APPLY (SELECT v, y FROM t) t(v, y) OUTER APPLY (SELECT v, y FROM t) s(v, y) LEFT JOIN z ON z.id = s.id", write={ "spark": "SELECT x.a, x.b, t.v, t.y, s.v, s.y FROM x LEFT JOIN LATERAL (SELECT v, y FROM t) AS t(v, y) LEFT JOIN LATERAL (SELECT v, y FROM t) AS s(v, y) LEFT JOIN z ON z.id = s.id", + "tsql": "SELECT x.a, x.b, t.v, t.y, s.v, s.y FROM x OUTER APPLY (SELECT v, y FROM t) AS t(v, y) OUTER APPLY (SELECT v, y FROM t) AS s(v, y) LEFT JOIN z ON z.id = s.id", }, ) def test_lateral_table_valued_function(self): self.validate_all( - "SELECT t.x, y.z FROM x CROSS APPLY tvfTest(t.x)y(z)", + "SELECT t.x, y.z FROM x CROSS APPLY tvfTest(t.x) y(z)", write={ - "spark": "SELECT t.x, y.z FROM x, LATERAL TVFTEST(t.x) AS y(z)", + "spark": "SELECT t.x, y.z FROM x INNER JOIN LATERAL TVFTEST(t.x) AS y(z)", + "tsql": "SELECT t.x, y.z FROM x CROSS APPLY TVFTEST(t.x) AS y(z)", }, ) self.validate_all( "SELECT t.x, y.z FROM x OUTER APPLY tvfTest(t.x)y(z)", write={ "spark": "SELECT t.x, y.z FROM x LEFT JOIN LATERAL TVFTEST(t.x) AS y(z)", + "tsql": "SELECT t.x, y.z FROM x OUTER APPLY TVFTEST(t.x) AS y(z)", }, ) self.validate_all( "SELECT t.x, y.z FROM x OUTER APPLY a.b.tvfTest(t.x)y(z)", write={ "spark": "SELECT t.x, y.z FROM x LEFT JOIN LATERAL a.b.TVFTEST(t.x) AS y(z)", + "tsql": "SELECT t.x, y.z FROM x OUTER APPLY a.b.TVFTEST(t.x) AS y(z)", }, ) @@ -1284,27 +1360,45 @@ WHERE self.validate_all( "SELECT FORMAT(1000000.01,'###,###.###')", - write={"spark": "SELECT FORMAT_NUMBER(1000000.01, '###,###.###')"}, + write={ + "spark": "SELECT FORMAT_NUMBER(1000000.01, '###,###.###')", + "tsql": "SELECT FORMAT(1000000.01, '###,###.###')", + }, ) self.validate_all( "SELECT FORMAT(1234567, 'f')", - write={"spark": "SELECT FORMAT_NUMBER(1234567, 'f')"}, + write={ + "spark": "SELECT FORMAT_NUMBER(1234567, 'f')", + "tsql": "SELECT FORMAT(1234567, 'f')", + }, ) self.validate_all( "SELECT FORMAT('01-01-1991', 'dd.mm.yyyy')", - write={"spark": "SELECT DATE_FORMAT('01-01-1991', 'dd.mm.yyyy')"}, + write={ + "spark": "SELECT DATE_FORMAT('01-01-1991', 'dd.mm.yyyy')", + "tsql": "SELECT FORMAT('01-01-1991', 'dd.mm.yyyy')", + }, ) self.validate_all( "SELECT FORMAT(date_col, 'dd.mm.yyyy')", - write={"spark": "SELECT DATE_FORMAT(date_col, 'dd.mm.yyyy')"}, + write={ + "spark": "SELECT DATE_FORMAT(date_col, 'dd.mm.yyyy')", + "tsql": "SELECT FORMAT(date_col, 'dd.mm.yyyy')", + }, ) self.validate_all( "SELECT FORMAT(date_col, 'm')", - write={"spark": "SELECT DATE_FORMAT(date_col, 'MMMM d')"}, + write={ + "spark": "SELECT DATE_FORMAT(date_col, 'MMMM d')", + "tsql": "SELECT FORMAT(date_col, 'MMMM d')", + }, ) self.validate_all( "SELECT FORMAT(num_col, 'c')", - write={"spark": "SELECT FORMAT_NUMBER(num_col, 'c')"}, + write={ + "spark": "SELECT FORMAT_NUMBER(num_col, 'c')", + "tsql": "SELECT FORMAT(num_col, 'c')", + }, ) def test_string(self): @@ -1324,11 +1418,36 @@ WHERE def test_eomonth(self): self.validate_all( "EOMONTH(GETDATE())", - write={"spark": "LAST_DAY(CURRENT_TIMESTAMP())"}, + read={ + "spark": "LAST_DAY(CURRENT_TIMESTAMP())", + }, + write={ + "bigquery": "LAST_DAY(CAST(CURRENT_TIMESTAMP() AS DATE))", + "clickhouse": "LAST_DAY(CAST(CURRENT_TIMESTAMP() AS DATE))", + "duckdb": "LAST_DAY(CAST(CURRENT_TIMESTAMP AS DATE))", + "mysql": "LAST_DAY(DATE(CURRENT_TIMESTAMP()))", + "postgres": "CAST(DATE_TRUNC('MONTH', CAST(CURRENT_TIMESTAMP AS DATE)) + INTERVAL '1 MONTH' - INTERVAL '1 DAY' AS DATE)", + "presto": "LAST_DAY_OF_MONTH(CAST(CAST(CURRENT_TIMESTAMP AS TIMESTAMP) AS DATE))", + "redshift": "LAST_DAY(CAST(SYSDATE AS DATE))", + "snowflake": "LAST_DAY(CAST(CURRENT_TIMESTAMP() AS DATE))", + "spark": "LAST_DAY(TO_DATE(CURRENT_TIMESTAMP()))", + "tsql": "EOMONTH(CAST(GETDATE() AS DATE))", + }, ) self.validate_all( "EOMONTH(GETDATE(), -1)", - write={"spark": "LAST_DAY(ADD_MONTHS(CURRENT_TIMESTAMP(), -1))"}, + write={ + "bigquery": "LAST_DAY(DATE_ADD(CAST(CURRENT_TIMESTAMP() AS DATE), INTERVAL -1 MONTH))", + "clickhouse": "LAST_DAY(DATE_ADD(MONTH, -1, CAST(CURRENT_TIMESTAMP() AS DATE)))", + "duckdb": "LAST_DAY(CAST(CURRENT_TIMESTAMP AS DATE) + INTERVAL (-1) MONTH)", + "mysql": "LAST_DAY(DATE_ADD(CURRENT_TIMESTAMP(), INTERVAL -1 MONTH))", + "postgres": "CAST(DATE_TRUNC('MONTH', CAST(CURRENT_TIMESTAMP AS DATE) + INTERVAL '-1 MONTH') + INTERVAL '1 MONTH' - INTERVAL '1 DAY' AS DATE)", + "presto": "LAST_DAY_OF_MONTH(DATE_ADD('MONTH', CAST(-1 AS BIGINT), CAST(CAST(CURRENT_TIMESTAMP AS TIMESTAMP) AS DATE)))", + "redshift": "LAST_DAY(DATEADD(MONTH, -1, CAST(SYSDATE AS DATE)))", + "snowflake": "LAST_DAY(DATEADD(MONTH, -1, CAST(CURRENT_TIMESTAMP() AS DATE)))", + "spark": "LAST_DAY(ADD_MONTHS(TO_DATE(CURRENT_TIMESTAMP()), -1))", + "tsql": "EOMONTH(DATEADD(MONTH, -1, CAST(GETDATE() AS DATE)))", + }, ) def test_identifier_prefixes(self): @@ -1384,61 +1503,32 @@ WHERE def test_temporal_table(self): self.validate_identity( - """CREATE TABLE test ("data" CHAR(7), "valid_from" DATETIME2(2) GENERATED ALWAYS AS ROW START NOT NULL, "valid_to" DATETIME2(2) GENERATED ALWAYS AS ROW END NOT NULL, PERIOD FOR SYSTEM_TIME ("valid_from", "valid_to")) WITH(SYSTEM_VERSIONING=ON)""" + """CREATE TABLE test ("data" CHAR(7), "valid_from" DATETIME2(2) GENERATED ALWAYS AS ROW START NOT NULL, "valid_to" DATETIME2(2) GENERATED ALWAYS AS ROW END NOT NULL, PERIOD FOR SYSTEM_TIME ("valid_from", "valid_to")) WITH(SYSTEM_VERSIONING=ON)""", + "CREATE TABLE test ([data] CHAR(7), [valid_from] DATETIME2(2) GENERATED ALWAYS AS ROW START NOT NULL, [valid_to] DATETIME2(2) GENERATED ALWAYS AS ROW END NOT NULL, PERIOD FOR SYSTEM_TIME ([valid_from], [valid_to])) WITH(SYSTEM_VERSIONING=ON)", ) self.validate_identity( - """CREATE TABLE test ("data" CHAR(7), "valid_from" DATETIME2(2) GENERATED ALWAYS AS ROW START HIDDEN NOT NULL, "valid_to" DATETIME2(2) GENERATED ALWAYS AS ROW END HIDDEN NOT NULL, PERIOD FOR SYSTEM_TIME ("valid_from", "valid_to")) WITH(SYSTEM_VERSIONING=ON(HISTORY_TABLE="dbo"."benchmark_history", DATA_CONSISTENCY_CHECK=ON))""" + """CREATE TABLE test ([data] CHAR(7), [valid_from] DATETIME2(2) GENERATED ALWAYS AS ROW START HIDDEN NOT NULL, [valid_to] DATETIME2(2) GENERATED ALWAYS AS ROW END HIDDEN NOT NULL, PERIOD FOR SYSTEM_TIME ([valid_from], [valid_to])) WITH(SYSTEM_VERSIONING=ON(HISTORY_TABLE=[dbo].[benchmark_history], DATA_CONSISTENCY_CHECK=ON))""" ) self.validate_identity( - """CREATE TABLE test ("data" CHAR(7), "valid_from" DATETIME2(2) GENERATED ALWAYS AS ROW START NOT NULL, "valid_to" DATETIME2(2) GENERATED ALWAYS AS ROW END NOT NULL, PERIOD FOR SYSTEM_TIME ("valid_from", "valid_to")) WITH(SYSTEM_VERSIONING=ON(HISTORY_TABLE="dbo"."benchmark_history", DATA_CONSISTENCY_CHECK=ON))""" + """CREATE TABLE test ([data] CHAR(7), [valid_from] DATETIME2(2) GENERATED ALWAYS AS ROW START NOT NULL, [valid_to] DATETIME2(2) GENERATED ALWAYS AS ROW END NOT NULL, PERIOD FOR SYSTEM_TIME ([valid_from], [valid_to])) WITH(SYSTEM_VERSIONING=ON(HISTORY_TABLE=[dbo].[benchmark_history], DATA_CONSISTENCY_CHECK=ON))""" ) self.validate_identity( - """CREATE TABLE test ("data" CHAR(7), "valid_from" DATETIME2(2) GENERATED ALWAYS AS ROW START NOT NULL, "valid_to" DATETIME2(2) GENERATED ALWAYS AS ROW END NOT NULL, PERIOD FOR SYSTEM_TIME ("valid_from", "valid_to")) WITH(SYSTEM_VERSIONING=ON(HISTORY_TABLE="dbo"."benchmark_history", DATA_CONSISTENCY_CHECK=OFF))""" + """CREATE TABLE test ([data] CHAR(7), [valid_from] DATETIME2(2) GENERATED ALWAYS AS ROW START NOT NULL, [valid_to] DATETIME2(2) GENERATED ALWAYS AS ROW END NOT NULL, PERIOD FOR SYSTEM_TIME ([valid_from], [valid_to])) WITH(SYSTEM_VERSIONING=ON(HISTORY_TABLE=[dbo].[benchmark_history], DATA_CONSISTENCY_CHECK=OFF))""" ) self.validate_identity( - """CREATE TABLE test ("data" CHAR(7), "valid_from" DATETIME2(2) GENERATED ALWAYS AS ROW START NOT NULL, "valid_to" DATETIME2(2) GENERATED ALWAYS AS ROW END NOT NULL, PERIOD FOR SYSTEM_TIME ("valid_from", "valid_to")) WITH(SYSTEM_VERSIONING=ON(HISTORY_TABLE="dbo"."benchmark_history"))""" + """CREATE TABLE test ([data] CHAR(7), [valid_from] DATETIME2(2) GENERATED ALWAYS AS ROW START NOT NULL, [valid_to] DATETIME2(2) GENERATED ALWAYS AS ROW END NOT NULL, PERIOD FOR SYSTEM_TIME ([valid_from], [valid_to])) WITH(SYSTEM_VERSIONING=ON(HISTORY_TABLE=[dbo].[benchmark_history]))""" ) self.validate_identity( - """CREATE TABLE test ("data" CHAR(7), "valid_from" DATETIME2(2) GENERATED ALWAYS AS ROW START NOT NULL, "valid_to" DATETIME2(2) GENERATED ALWAYS AS ROW END NOT NULL, PERIOD FOR SYSTEM_TIME ("valid_from", "valid_to")) WITH(SYSTEM_VERSIONING=ON(HISTORY_TABLE="dbo"."benchmark_history"))""" + """CREATE TABLE test ([data] CHAR(7), [valid_from] DATETIME2(2) GENERATED ALWAYS AS ROW START NOT NULL, [valid_to] DATETIME2(2) GENERATED ALWAYS AS ROW END NOT NULL, PERIOD FOR SYSTEM_TIME ([valid_from], [valid_to])) WITH(SYSTEM_VERSIONING=ON(HISTORY_TABLE=[dbo].[benchmark_history]))""" ) def test_system_time(self): - self.validate_all( - "SELECT [x] FROM [a].[b] FOR SYSTEM_TIME AS OF 'foo'", - write={ - "tsql": """SELECT "x" FROM "a"."b" FOR SYSTEM_TIME AS OF 'foo'""", - }, - ) - self.validate_all( - "SELECT [x] FROM [a].[b] FOR SYSTEM_TIME AS OF 'foo' AS alias", - write={ - "tsql": """SELECT "x" FROM "a"."b" FOR SYSTEM_TIME AS OF 'foo' AS alias""", - }, - ) - self.validate_all( - "SELECT [x] FROM [a].[b] FOR SYSTEM_TIME FROM c TO d", - write={ - "tsql": """SELECT "x" FROM "a"."b" FOR SYSTEM_TIME FROM c TO d""", - }, - ) - self.validate_all( - "SELECT [x] FROM [a].[b] FOR SYSTEM_TIME BETWEEN c AND d", - write={ - "tsql": """SELECT "x" FROM "a"."b" FOR SYSTEM_TIME BETWEEN c AND d""", - }, - ) - self.validate_all( - "SELECT [x] FROM [a].[b] FOR SYSTEM_TIME CONTAINED IN (c, d)", - write={ - "tsql": """SELECT "x" FROM "a"."b" FOR SYSTEM_TIME CONTAINED IN (c, d)""", - }, - ) - self.validate_all( - "SELECT [x] FROM [a].[b] FOR SYSTEM_TIME ALL AS alias", - write={ - "tsql": """SELECT "x" FROM "a"."b" FOR SYSTEM_TIME ALL AS alias""", - }, - ) + self.validate_identity("SELECT [x] FROM [a].[b] FOR SYSTEM_TIME AS OF 'foo'") + self.validate_identity("SELECT [x] FROM [a].[b] FOR SYSTEM_TIME AS OF 'foo' AS alias") + self.validate_identity("SELECT [x] FROM [a].[b] FOR SYSTEM_TIME FROM c TO d") + self.validate_identity("SELECT [x] FROM [a].[b] FOR SYSTEM_TIME BETWEEN c AND d") + self.validate_identity("SELECT [x] FROM [a].[b] FOR SYSTEM_TIME CONTAINED IN (c, d)") + self.validate_identity("SELECT [x] FROM [a].[b] FOR SYSTEM_TIME ALL AS alias") def test_current_user(self): self.validate_all( @@ -1491,7 +1581,7 @@ WHERE self.validate_all( """SELECT [key], value FROM OPENJSON(@json,'$.path.to."sub-object"')""", write={ - "tsql": """SELECT "key", value FROM OPENJSON(@json, '$.path.to."sub-object"')""", + "tsql": """SELECT [key], value FROM OPENJSON(@json, '$.path.to."sub-object"')""", }, ) self.validate_all( @@ -1520,7 +1610,7 @@ FROM OPENJSON(@json) WITH ( Date DATETIME2 '$.Order.Date', Customer VARCHAR(200) '$.AccountNumber', Quantity INTEGER '$.Item.Quantity', - "Order" VARCHAR(MAX) AS JSON + [Order] VARCHAR(MAX) AS JSON )""" }, pretty=True, @@ -1547,19 +1637,19 @@ FROM OPENJSON(@json) WITH ( def test_qualify_derived_table_outputs(self): self.validate_identity( "WITH t AS (SELECT 1) SELECT * FROM t", - 'WITH t AS (SELECT 1 AS "1") SELECT * FROM t', + "WITH t AS (SELECT 1 AS [1]) SELECT * FROM t", ) self.validate_identity( 'WITH t AS (SELECT "c") SELECT * FROM t', - 'WITH t AS (SELECT "c" AS "c") SELECT * FROM t', + "WITH t AS (SELECT [c] AS [c]) SELECT * FROM t", ) self.validate_identity( "SELECT * FROM (SELECT 1) AS subq", - 'SELECT * FROM (SELECT 1 AS "1") AS subq', + "SELECT * FROM (SELECT 1 AS [1]) AS subq", ) self.validate_identity( 'SELECT * FROM (SELECT "c") AS subq', - 'SELECT * FROM (SELECT "c" AS "c") AS subq', + "SELECT * FROM (SELECT [c] AS [c]) AS subq", ) self.validate_all( -- cgit v1.2.3