diff options
author | Daniel Baumann <daniel.baumann@progress-linux.org> | 2022-10-25 16:01:43 +0000 |
---|---|---|
committer | Daniel Baumann <daniel.baumann@progress-linux.org> | 2022-10-25 16:01:43 +0000 |
commit | 29f9a15cce138301cd5a84a1fd4060494a3a65b6 (patch) | |
tree | c593be2f0b0fdc60a43983aa547b34a441170e59 /tests/dialects/test_tsql.py | |
parent | Adding upstream version 9.0.1. (diff) | |
download | sqlglot-29f9a15cce138301cd5a84a1fd4060494a3a65b6.tar.xz sqlglot-29f9a15cce138301cd5a84a1fd4060494a3a65b6.zip |
Adding upstream version 9.0.3.upstream/9.0.3
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'tests/dialects/test_tsql.py')
-rw-r--r-- | tests/dialects/test_tsql.py | 223 |
1 files changed, 223 insertions, 0 deletions
diff --git a/tests/dialects/test_tsql.py b/tests/dialects/test_tsql.py index 9a6bc36..2a20163 100644 --- a/tests/dialects/test_tsql.py +++ b/tests/dialects/test_tsql.py @@ -71,3 +71,226 @@ class TestTSQL(Validator): "spark": "LOCATE('sub', 'testsubstring')", }, ) + + def test_len(self): + self.validate_all("LEN(x)", write={"spark": "LENGTH(x)"}) + + def test_replicate(self): + self.validate_all("REPLICATE('x', 2)", write={"spark": "REPEAT('x', 2)"}) + + def test_isnull(self): + self.validate_all("ISNULL(x, y)", write={"spark": "COALESCE(x, y)"}) + + def test_jsonvalue(self): + self.validate_all( + "JSON_VALUE(r.JSON, '$.Attr_INT')", + write={"spark": "GET_JSON_OBJECT(r.JSON, '$.Attr_INT')"}, + ) + + def test_datefromparts(self): + self.validate_all( + "SELECT DATEFROMPARTS('2020', 10, 01)", + write={"spark": "SELECT MAKE_DATE('2020', 10, 01)"}, + ) + + def test_datename(self): + self.validate_all( + "SELECT DATENAME(mm,'01-01-1970')", + write={"spark": "SELECT DATE_FORMAT('01-01-1970', 'MMMM')"}, + ) + self.validate_all( + "SELECT DATENAME(dw,'01-01-1970')", + write={"spark": "SELECT DATE_FORMAT('01-01-1970', 'EEEE')"}, + ) + + def test_datepart(self): + self.validate_all( + "SELECT DATEPART(month,'01-01-1970')", + write={"spark": "SELECT DATE_FORMAT('01-01-1970', 'MM')"}, + ) + + def test_convert_date_format(self): + self.validate_all( + "CONVERT(NVARCHAR(200), x)", + write={ + "spark": "CAST(x AS VARCHAR(200))", + }, + ) + self.validate_all( + "CONVERT(NVARCHAR, x)", + write={ + "spark": "CAST(x AS VARCHAR(30))", + }, + ) + self.validate_all( + "CONVERT(NVARCHAR(MAX), x)", + write={ + "spark": "CAST(x AS STRING)", + }, + ) + self.validate_all( + "CONVERT(VARCHAR(200), x)", + write={ + "spark": "CAST(x AS VARCHAR(200))", + }, + ) + self.validate_all( + "CONVERT(VARCHAR, x)", + write={ + "spark": "CAST(x AS VARCHAR(30))", + }, + ) + self.validate_all( + "CONVERT(VARCHAR(MAX), x)", + write={ + "spark": "CAST(x AS STRING)", + }, + ) + self.validate_all( + "CONVERT(CHAR(40), x)", + write={ + "spark": "CAST(x AS CHAR(40))", + }, + ) + self.validate_all( + "CONVERT(CHAR, x)", + write={ + "spark": "CAST(x AS CHAR(30))", + }, + ) + self.validate_all( + "CONVERT(NCHAR(40), x)", + write={ + "spark": "CAST(x AS CHAR(40))", + }, + ) + self.validate_all( + "CONVERT(NCHAR, x)", + write={ + "spark": "CAST(x AS CHAR(30))", + }, + ) + self.validate_all( + "CONVERT(VARCHAR, x, 121)", + write={ + "spark": "CAST(DATE_FORMAT(x, 'yyyy-MM-dd HH:mm:ss.SSSSSS') AS VARCHAR(30))", + }, + ) + self.validate_all( + "CONVERT(VARCHAR(40), x, 121)", + write={ + "spark": "CAST(DATE_FORMAT(x, 'yyyy-MM-dd HH:mm:ss.SSSSSS') AS VARCHAR(40))", + }, + ) + self.validate_all( + "CONVERT(VARCHAR(MAX), x, 121)", + write={ + "spark": "DATE_FORMAT(x, 'yyyy-MM-dd HH:mm:ss.SSSSSS')", + }, + ) + self.validate_all( + "CONVERT(NVARCHAR, x, 121)", + write={ + "spark": "CAST(DATE_FORMAT(x, 'yyyy-MM-dd HH:mm:ss.SSSSSS') AS VARCHAR(30))", + }, + ) + self.validate_all( + "CONVERT(NVARCHAR(40), x, 121)", + write={ + "spark": "CAST(DATE_FORMAT(x, 'yyyy-MM-dd HH:mm:ss.SSSSSS') AS VARCHAR(40))", + }, + ) + self.validate_all( + "CONVERT(NVARCHAR(MAX), x, 121)", + write={ + "spark": "DATE_FORMAT(x, 'yyyy-MM-dd HH:mm:ss.SSSSSS')", + }, + ) + self.validate_all( + "CONVERT(DATE, x, 121)", + write={ + "spark": "TO_DATE(x, 'yyyy-MM-dd HH:mm:ss.SSSSSS')", + }, + ) + self.validate_all( + "CONVERT(DATETIME, x, 121)", + write={ + "spark": "TO_TIMESTAMP(x, 'yyyy-MM-dd HH:mm:ss.SSSSSS')", + }, + ) + self.validate_all( + "CONVERT(DATETIME2, x, 121)", + write={ + "spark": "TO_TIMESTAMP(x, 'yyyy-MM-dd HH:mm:ss.SSSSSS')", + }, + ) + self.validate_all( + "CONVERT(INT, x)", + write={ + "spark": "CAST(x AS INT)", + }, + ) + self.validate_all( + "CONVERT(INT, x, 121)", + write={ + "spark": "CAST(x AS INT)", + }, + ) + self.validate_all( + "TRY_CONVERT(NVARCHAR, x, 121)", + write={ + "spark": "CAST(DATE_FORMAT(x, 'yyyy-MM-dd HH:mm:ss.SSSSSS') AS VARCHAR(30))", + }, + ) + self.validate_all( + "TRY_CONVERT(INT, x)", + write={ + "spark": "CAST(x AS INT)", + }, + ) + self.validate_all( + "TRY_CAST(x AS INT)", + write={ + "spark": "CAST(x AS INT)", + }, + ) + self.validate_all( + "CAST(x AS INT)", + write={ + "spark": "CAST(x AS INT)", + }, + ) + + def test_add_date(self): + self.validate_identity("SELECT DATEADD(year, 1, '2017/08/25')") + self.validate_all( + "SELECT DATEADD(year, 1, '2017/08/25')", write={"spark": "SELECT ADD_MONTHS('2017/08/25', 12)"} + ) + self.validate_all("SELECT DATEADD(qq, 1, '2017/08/25')", write={"spark": "SELECT ADD_MONTHS('2017/08/25', 3)"}) + self.validate_all("SELECT DATEADD(wk, 1, '2017/08/25')", write={"spark": "SELECT DATE_ADD('2017/08/25', 7)"}) + + def test_date_diff(self): + self.validate_identity("SELECT DATEDIFF(year, '2020/01/01', '2021/01/01')") + self.validate_all( + "SELECT DATEDIFF(year, '2020/01/01', '2021/01/01')", + write={ + "tsql": "SELECT DATEDIFF(year, '2020/01/01', '2021/01/01')", + "spark": "SELECT MONTHS_BETWEEN('2021/01/01', '2020/01/01') / 12", + }, + ) + self.validate_all( + "SELECT DATEDIFF(month, 'start','end')", + write={"spark": "SELECT MONTHS_BETWEEN('end', 'start')", "tsql": "SELECT DATEDIFF(month, 'start', 'end')"}, + ) + self.validate_all( + "SELECT DATEDIFF(quarter, 'start', 'end')", write={"spark": "SELECT MONTHS_BETWEEN('end', 'start') / 3"} + ) + + def test_iif(self): + self.validate_identity("SELECT IIF(cond, 'True', 'False')") + self.validate_all( + "SELECT IIF(cond, 'True', 'False');", + write={ + "spark": "SELECT IF(cond, 'True', 'False')", + }, + ) |