summaryrefslogtreecommitdiffstats
path: root/tests/dialects/test_tsql.py
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-02-08 05:38:39 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-02-08 05:38:39 +0000
commitaedf35026379f52d7e2b4c1f957691410a758089 (patch)
tree86540364259b66741173d2333387b78d6f9c31e2 /tests/dialects/test_tsql.py
parentAdding upstream version 20.11.0. (diff)
downloadsqlglot-aedf35026379f52d7e2b4c1f957691410a758089.tar.xz
sqlglot-aedf35026379f52d7e2b4c1f957691410a758089.zip
Adding upstream version 21.0.1.upstream/21.0.1
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'tests/dialects/test_tsql.py')
-rw-r--r--tests/dialects/test_tsql.py32
1 files changed, 26 insertions, 6 deletions
diff --git a/tests/dialects/test_tsql.py b/tests/dialects/test_tsql.py
index 101d356..c8c0d82 100644
--- a/tests/dialects/test_tsql.py
+++ b/tests/dialects/test_tsql.py
@@ -886,7 +886,7 @@ WHERE
"END",
]
- with self.assertLogs(parser_logger) as cm:
+ with self.assertLogs(parser_logger):
for expr, expected_sql in zip(parse(sql, read="tsql"), expected_sqls):
self.assertEqual(expr.sql(dialect="tsql"), expected_sql)
@@ -907,7 +907,7 @@ WHERE
"CREATE TABLE [target_schema].[target_table] (a INTEGER) WITH (DISTRIBUTION=REPLICATE, HEAP)",
]
- with self.assertLogs(parser_logger) as cm:
+ with self.assertLogs(parser_logger):
for expr, expected_sql in zip(parse(sql, read="tsql"), expected_sqls):
self.assertEqual(expr.sql(dialect="tsql"), expected_sql)
@@ -941,6 +941,16 @@ WHERE
self.validate_all(
"LEN(x)", read={"": "LENGTH(x)"}, write={"spark": "LENGTH(CAST(x AS STRING))"}
)
+ self.validate_all(
+ "RIGHT(x, 1)",
+ read={"": "RIGHT(CAST(x AS STRING), 1)"},
+ write={"spark": "RIGHT(CAST(x AS STRING), 1)"},
+ )
+ self.validate_all(
+ "LEFT(x, 1)",
+ read={"": "LEFT(CAST(x AS STRING), 1)"},
+ write={"spark": "LEFT(CAST(x AS STRING), 1)"},
+ )
self.validate_all("LEN(1)", write={"tsql": "LEN(1)", "spark": "LENGTH(CAST(1 AS STRING))"})
self.validate_all("LEN('x')", write={"tsql": "LEN('x')", "spark": "LENGTH('x')"})
@@ -950,10 +960,20 @@ WHERE
def test_isnull(self):
self.validate_all("ISNULL(x, y)", write={"spark": "COALESCE(x, y)"})
- def test_jsonvalue(self):
+ def test_json(self):
+ self.validate_all(
+ "JSON_QUERY(r.JSON, '$.Attr_INT')",
+ write={
+ "spark": "GET_JSON_OBJECT(r.JSON, '$.Attr_INT')",
+ "tsql": "ISNULL(JSON_QUERY(r.JSON, '$.Attr_INT'), JSON_VALUE(r.JSON, '$.Attr_INT'))",
+ },
+ )
self.validate_all(
"JSON_VALUE(r.JSON, '$.Attr_INT')",
- write={"spark": "GET_JSON_OBJECT(r.JSON, '$.Attr_INT')"},
+ write={
+ "spark": "GET_JSON_OBJECT(r.JSON, '$.Attr_INT')",
+ "tsql": "ISNULL(JSON_QUERY(r.JSON, '$.Attr_INT'), JSON_VALUE(r.JSON, '$.Attr_INT'))",
+ },
)
def test_datefromparts(self):
@@ -1438,7 +1458,7 @@ WHERE
"mysql": "LAST_DAY(DATE(CURRENT_TIMESTAMP()))",
"postgres": "CAST(DATE_TRUNC('MONTH', CAST(CURRENT_TIMESTAMP AS DATE)) + INTERVAL '1 MONTH' - INTERVAL '1 DAY' AS DATE)",
"presto": "LAST_DAY_OF_MONTH(CAST(CAST(CURRENT_TIMESTAMP AS TIMESTAMP) AS DATE))",
- "redshift": "LAST_DAY(CAST(SYSDATE AS DATE))",
+ "redshift": "LAST_DAY(CAST(GETDATE() AS DATE))",
"snowflake": "LAST_DAY(CAST(CURRENT_TIMESTAMP() AS DATE))",
"spark": "LAST_DAY(TO_DATE(CURRENT_TIMESTAMP()))",
"tsql": "EOMONTH(CAST(GETDATE() AS DATE))",
@@ -1453,7 +1473,7 @@ WHERE
"mysql": "LAST_DAY(DATE_ADD(CURRENT_TIMESTAMP(), INTERVAL -1 MONTH))",
"postgres": "CAST(DATE_TRUNC('MONTH', CAST(CURRENT_TIMESTAMP AS DATE) + INTERVAL '-1 MONTH') + INTERVAL '1 MONTH' - INTERVAL '1 DAY' AS DATE)",
"presto": "LAST_DAY_OF_MONTH(DATE_ADD('MONTH', CAST(-1 AS BIGINT), CAST(CAST(CURRENT_TIMESTAMP AS TIMESTAMP) AS DATE)))",
- "redshift": "LAST_DAY(DATEADD(MONTH, -1, CAST(SYSDATE AS DATE)))",
+ "redshift": "LAST_DAY(DATEADD(MONTH, -1, CAST(GETDATE() AS DATE)))",
"snowflake": "LAST_DAY(DATEADD(MONTH, -1, CAST(CURRENT_TIMESTAMP() AS DATE)))",
"spark": "LAST_DAY(ADD_MONTHS(TO_DATE(CURRENT_TIMESTAMP()), -1))",
"tsql": "EOMONTH(DATEADD(MONTH, -1, CAST(GETDATE() AS DATE)))",