diff options
author | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-01-23 05:06:10 +0000 |
---|---|---|
committer | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-01-23 05:06:10 +0000 |
commit | 258c7df9cab21a4978c100568907ac1cb7fd6ee0 (patch) | |
tree | a98c4c9fc7433833be72543de5d99d15b9927442 /tests/dialects/test_dialect.py | |
parent | Adding upstream version 20.4.0. (diff) | |
download | sqlglot-258c7df9cab21a4978c100568907ac1cb7fd6ee0.tar.xz sqlglot-258c7df9cab21a4978c100568907ac1cb7fd6ee0.zip |
Adding upstream version 20.9.0.upstream/20.9.0
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'tests/dialects/test_dialect.py')
-rw-r--r-- | tests/dialects/test_dialect.py | 95 |
1 files changed, 65 insertions, 30 deletions
diff --git a/tests/dialects/test_dialect.py b/tests/dialects/test_dialect.py index a49d067..3cf4ddc 100644 --- a/tests/dialects/test_dialect.py +++ b/tests/dialects/test_dialect.py @@ -104,6 +104,16 @@ class TestDialect(Validator): "Please use the correct format: 'dialect [, k1 = v2 [, ...]]'.", ) + with self.assertRaises(ValueError) as cm: + Dialect.get_or_raise("myqsl") + + self.assertEqual(str(cm.exception), "Unknown dialect 'myqsl'. Did you mean mysql?") + + with self.assertRaises(ValueError) as cm: + Dialect.get_or_raise("asdfjasodiufjsd") + + self.assertEqual(str(cm.exception), "Unknown dialect 'asdfjasodiufjsd'.") + def test_compare_dialects(self): bigquery_class = Dialect["bigquery"] bigquery_object = BigQuery() @@ -767,32 +777,32 @@ class TestDialect(Validator): "duckdb": "x + INTERVAL 1 DAY", "hive": "DATE_ADD(x, 1)", "mysql": "DATE_ADD(x, INTERVAL 1 DAY)", - "presto": "DATE_ADD('day', 1, x)", + "presto": "DATE_ADD('DAY', 1, x)", "spark": "DATE_ADD(x, 1)", "starrocks": "DATE_ADD(x, INTERVAL 1 DAY)", "doris": "DATE_ADD(x, INTERVAL 1 DAY)", }, ) self.validate_all( - "DATE_TRUNC('day', x)", + "DATE_TRUNC('DAY', x)", read={ "bigquery": "DATE_TRUNC(x, day)", "spark": "TRUNC(x, 'day')", }, write={ - "bigquery": "DATE_TRUNC(x, day)", - "duckdb": "DATE_TRUNC('day', x)", + "bigquery": "DATE_TRUNC(x, DAY)", + "duckdb": "DATE_TRUNC('DAY', x)", "mysql": "DATE(x)", - "presto": "DATE_TRUNC('day', x)", - "postgres": "DATE_TRUNC('day', x)", - "snowflake": "DATE_TRUNC('day', x)", - "starrocks": "DATE_TRUNC('day', x)", - "spark": "TRUNC(x, 'day')", - "doris": "DATE_TRUNC(x, 'day')", + "presto": "DATE_TRUNC('DAY', x)", + "postgres": "DATE_TRUNC('DAY', x)", + "snowflake": "DATE_TRUNC('DAY', x)", + "starrocks": "DATE_TRUNC('DAY', x)", + "spark": "TRUNC(x, 'DAY')", + "doris": "DATE_TRUNC(x, 'DAY')", }, ) self.validate_all( - "TIMESTAMP_TRUNC(x, day)", + "TIMESTAMP_TRUNC(x, DAY)", read={ "bigquery": "TIMESTAMP_TRUNC(x, day)", "duckdb": "DATE_TRUNC('day', x)", @@ -805,18 +815,18 @@ class TestDialect(Validator): }, ) self.validate_all( - "DATE_TRUNC('day', CAST(x AS DATE))", + "DATE_TRUNC('DAY', CAST(x AS DATE))", read={ - "presto": "DATE_TRUNC('day', x::DATE)", - "snowflake": "DATE_TRUNC('day', x::DATE)", + "presto": "DATE_TRUNC('DAY', x::DATE)", + "snowflake": "DATE_TRUNC('DAY', x::DATE)", }, ) self.validate_all( - "TIMESTAMP_TRUNC(CAST(x AS DATE), day)", + "TIMESTAMP_TRUNC(CAST(x AS DATE), DAY)", read={"postgres": "DATE_TRUNC('day', x::DATE)"}, ) self.validate_all( - "TIMESTAMP_TRUNC(CAST(x AS DATE), day)", + "TIMESTAMP_TRUNC(CAST(x AS DATE), DAY)", read={"starrocks": "DATE_TRUNC('day', x::DATE)"}, ) self.validate_all( @@ -850,23 +860,23 @@ class TestDialect(Validator): }, ) self.validate_all( - "DATE_TRUNC('year', x)", + "DATE_TRUNC('YEAR', x)", read={ "bigquery": "DATE_TRUNC(x, year)", "spark": "TRUNC(x, 'year')", }, write={ - "bigquery": "DATE_TRUNC(x, year)", + "bigquery": "DATE_TRUNC(x, YEAR)", "mysql": "STR_TO_DATE(CONCAT(YEAR(x), ' 1 1'), '%Y %c %e')", - "postgres": "DATE_TRUNC('year', x)", - "snowflake": "DATE_TRUNC('year', x)", - "starrocks": "DATE_TRUNC('year', x)", - "spark": "TRUNC(x, 'year')", - "doris": "DATE_TRUNC(x, 'year')", + "postgres": "DATE_TRUNC('YEAR', x)", + "snowflake": "DATE_TRUNC('YEAR', x)", + "starrocks": "DATE_TRUNC('YEAR', x)", + "spark": "TRUNC(x, 'YEAR')", + "doris": "DATE_TRUNC(x, 'YEAR')", }, ) self.validate_all( - "TIMESTAMP_TRUNC(x, year)", + "TIMESTAMP_TRUNC(x, YEAR)", read={ "bigquery": "TIMESTAMP_TRUNC(x, year)", "postgres": "DATE_TRUNC(year, x)", @@ -875,9 +885,9 @@ class TestDialect(Validator): "starrocks": "DATE_TRUNC('year', x)", }, write={ - "bigquery": "TIMESTAMP_TRUNC(x, year)", - "spark": "DATE_TRUNC('year', x)", - "doris": "DATE_TRUNC(x, 'year')", + "bigquery": "TIMESTAMP_TRUNC(x, YEAR)", + "spark": "DATE_TRUNC('YEAR', x)", + "doris": "DATE_TRUNC(x, 'YEAR')", }, ) self.validate_all( @@ -952,7 +962,7 @@ class TestDialect(Validator): "drill": "DATE_ADD(CAST('2020-01-01' AS DATE), INTERVAL 1 DAY)", "duckdb": "CAST('2020-01-01' AS DATE) + INTERVAL 1 DAY", "hive": "DATE_ADD(CAST('2020-01-01' AS DATE), 1)", - "presto": "DATE_ADD('day', 1, CAST('2020-01-01' AS DATE))", + "presto": "DATE_ADD('DAY', 1, CAST('2020-01-01' AS DATE))", "spark": "DATE_ADD(CAST('2020-01-01' AS DATE), 1)", }, ) @@ -1003,7 +1013,7 @@ class TestDialect(Validator): ) self.validate_all( f"{unit}(TS_OR_DS_TO_DATE(x))", - read={ + write={ dialect: f"{unit}(x)" for dialect in ( "mysql", @@ -1011,7 +1021,10 @@ class TestDialect(Validator): "starrocks", ) }, - write={ + ) + self.validate_all( + f"{unit}(CAST(x AS DATE))", + read={ dialect: f"{unit}(x)" for dialect in ( "mysql", @@ -1671,6 +1684,28 @@ class TestDialect(Validator): def test_alias(self): self.validate_all( + 'SELECT 1 AS "foo"', + read={ + "mysql": "SELECT 1 'foo'", + "sqlite": "SELECT 1 'foo'", + "tsql": "SELECT 1 'foo'", + }, + ) + + for dialect in ( + "presto", + "hive", + "postgres", + "clickhouse", + "bigquery", + "snowflake", + "duckdb", + ): + with self.subTest(f"string alias: {dialect}"): + with self.assertRaises(ParseError): + parse_one("SELECT 1 'foo'", dialect=dialect) + + self.validate_all( "SELECT a AS b FROM x GROUP BY b", write={ "drill": "SELECT a AS b FROM x GROUP BY b", |