summaryrefslogtreecommitdiffstats
path: root/tests/dialects/test_dialect.py
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-02-08 05:38:39 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-02-08 05:38:39 +0000
commitaedf35026379f52d7e2b4c1f957691410a758089 (patch)
tree86540364259b66741173d2333387b78d6f9c31e2 /tests/dialects/test_dialect.py
parentAdding upstream version 20.11.0. (diff)
downloadsqlglot-upstream/21.0.1.tar.xz
sqlglot-upstream/21.0.1.zip
Adding upstream version 21.0.1.upstream/21.0.1
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'tests/dialects/test_dialect.py')
-rw-r--r--tests/dialects/test_dialect.py211
1 files changed, 174 insertions, 37 deletions
diff --git a/tests/dialects/test_dialect.py b/tests/dialects/test_dialect.py
index 22e7d49..fd9dbdb 100644
--- a/tests/dialects/test_dialect.py
+++ b/tests/dialects/test_dialect.py
@@ -378,6 +378,31 @@ class TestDialect(Validator):
read={"postgres": "INET '127.0.0.1/32'"},
)
+ def test_ddl(self):
+ self.validate_all(
+ "CREATE TABLE a LIKE b",
+ write={
+ "": "CREATE TABLE a LIKE b",
+ "bigquery": "CREATE TABLE a LIKE b",
+ "clickhouse": "CREATE TABLE a AS b",
+ "databricks": "CREATE TABLE a LIKE b",
+ "doris": "CREATE TABLE a LIKE b",
+ "drill": "CREATE TABLE a AS SELECT * FROM b LIMIT 0",
+ "duckdb": "CREATE TABLE a AS SELECT * FROM b LIMIT 0",
+ "hive": "CREATE TABLE a LIKE b",
+ "mysql": "CREATE TABLE a LIKE b",
+ "oracle": "CREATE TABLE a LIKE b",
+ "postgres": "CREATE TABLE a (LIKE b)",
+ "presto": "CREATE TABLE a (LIKE b)",
+ "redshift": "CREATE TABLE a (LIKE b)",
+ "snowflake": "CREATE TABLE a LIKE b",
+ "spark": "CREATE TABLE a LIKE b",
+ "sqlite": "CREATE TABLE a AS SELECT * FROM b LIMIT 0",
+ "trino": "CREATE TABLE a (LIKE b)",
+ "tsql": "SELECT TOP 0 * INTO a FROM b AS temp",
+ },
+ )
+
def test_heredoc_strings(self):
for dialect in ("clickhouse", "postgres", "redshift"):
# Invalid matching tag
@@ -1097,61 +1122,173 @@ class TestDialect(Validator):
def test_json(self):
self.validate_all(
- "JSON_EXTRACT(x, 'y')",
- read={
- "mysql": "JSON_EXTRACT(x, 'y')",
- "postgres": "x->'y'",
- "presto": "JSON_EXTRACT(x, 'y')",
- "starrocks": "x -> 'y'",
- "doris": "x -> 'y'",
- },
+ """JSON_EXTRACT(x, '$["a b"]')""",
write={
- "mysql": "JSON_EXTRACT(x, 'y')",
- "oracle": "JSON_EXTRACT(x, 'y')",
- "postgres": "x -> 'y'",
- "presto": "JSON_EXTRACT(x, 'y')",
- "starrocks": "x -> 'y'",
- "doris": "x -> 'y'",
+ "": """JSON_EXTRACT(x, '$["a b"]')""",
+ "bigquery": """JSON_EXTRACT(x, '$[\\'a b\\']')""",
+ "clickhouse": "JSONExtractString(x, 'a b')",
+ "duckdb": """x -> '$."a b"'""",
+ "mysql": """JSON_EXTRACT(x, '$."a b"')""",
+ "postgres": "JSON_EXTRACT_PATH(x, 'a b')",
+ "presto": """JSON_EXTRACT(x, '$["a b"]')""",
+ "redshift": "JSON_EXTRACT_PATH_TEXT(x, 'a b')",
+ "snowflake": """GET_PATH(x, '["a b"]')""",
+ "spark": """GET_JSON_OBJECT(x, '$[\\'a b\\']')""",
+ "sqlite": """x -> '$."a b"'""",
+ "trino": """JSON_EXTRACT(x, '$["a b"]')""",
+ "tsql": """ISNULL(JSON_QUERY(x, '$."a b"'), JSON_VALUE(x, '$."a b"'))""",
},
)
self.validate_all(
- "JSON_EXTRACT_SCALAR(x, 'y')",
+ "JSON_EXTRACT(x, '$.y')",
read={
- "postgres": "x ->> 'y'",
- "presto": "JSON_EXTRACT_SCALAR(x, 'y')",
+ "bigquery": "JSON_EXTRACT(x, '$.y')",
+ "duckdb": "x -> 'y'",
+ "doris": "x -> '$.y'",
+ "mysql": "JSON_EXTRACT(x, '$.y')",
+ "postgres": "x->'y'",
+ "presto": "JSON_EXTRACT(x, '$.y')",
+ "snowflake": "GET_PATH(x, 'y')",
+ "sqlite": "x -> '$.y'",
+ "starrocks": "x -> '$.y'",
},
write={
- "postgres": "x ->> 'y'",
- "presto": "JSON_EXTRACT_SCALAR(x, 'y')",
+ "bigquery": "JSON_EXTRACT(x, '$.y')",
+ "clickhouse": "JSONExtractString(x, 'y')",
+ "doris": "x -> '$.y'",
+ "duckdb": "x -> '$.y'",
+ "mysql": "JSON_EXTRACT(x, '$.y')",
+ "oracle": "JSON_EXTRACT(x, '$.y')",
+ "postgres": "JSON_EXTRACT_PATH(x, 'y')",
+ "presto": "JSON_EXTRACT(x, '$.y')",
+ "snowflake": "GET_PATH(x, 'y')",
+ "spark": "GET_JSON_OBJECT(x, '$.y')",
+ "sqlite": "x -> '$.y'",
+ "starrocks": "x -> '$.y'",
+ "tsql": "ISNULL(JSON_QUERY(x, '$.y'), JSON_VALUE(x, '$.y'))",
},
)
self.validate_all(
- "JSON_EXTRACT_SCALAR(stream_data, '$.data.results')",
+ "JSON_EXTRACT_SCALAR(x, '$.y')",
read={
- "hive": "GET_JSON_OBJECT(stream_data, '$.data.results')",
- "mysql": "stream_data ->> '$.data.results'",
+ "bigquery": "JSON_EXTRACT_SCALAR(x, '$.y')",
+ "clickhouse": "JSONExtractString(x, 'y')",
+ "duckdb": "x ->> 'y'",
+ "postgres": "x ->> 'y'",
+ "presto": "JSON_EXTRACT_SCALAR(x, '$.y')",
+ "redshift": "JSON_EXTRACT_PATH_TEXT(x, 'y')",
+ "spark": "GET_JSON_OBJECT(x, '$.y')",
+ "snowflake": "JSON_EXTRACT_PATH_TEXT(x, 'y')",
+ "sqlite": "x ->> '$.y'",
},
write={
- "hive": "GET_JSON_OBJECT(stream_data, '$.data.results')",
- "mysql": "stream_data ->> '$.data.results'",
+ "bigquery": "JSON_EXTRACT_SCALAR(x, '$.y')",
+ "clickhouse": "JSONExtractString(x, 'y')",
+ "duckdb": "x ->> '$.y'",
+ "postgres": "JSON_EXTRACT_PATH_TEXT(x, 'y')",
+ "presto": "JSON_EXTRACT_SCALAR(x, '$.y')",
+ "redshift": "JSON_EXTRACT_PATH_TEXT(x, 'y')",
+ "snowflake": "JSON_EXTRACT_PATH_TEXT(x, 'y')",
+ "spark": "GET_JSON_OBJECT(x, '$.y')",
+ "sqlite": "x ->> '$.y'",
+ "tsql": "ISNULL(JSON_QUERY(x, '$.y'), JSON_VALUE(x, '$.y'))",
},
)
self.validate_all(
- "JSONB_EXTRACT(x, 'y')",
+ "JSON_EXTRACT(x, '$.y[0].z')",
read={
- "postgres": "x#>'y'",
- },
- write={
- "postgres": "x #> 'y'",
- },
- )
- self.validate_all(
- "JSONB_EXTRACT_SCALAR(x, 'y')",
+ "bigquery": "JSON_EXTRACT(x, '$.y[0].z')",
+ "duckdb": "x -> '$.y[0].z'",
+ "doris": "x -> '$.y[0].z'",
+ "mysql": "JSON_EXTRACT(x, '$.y[0].z')",
+ "presto": "JSON_EXTRACT(x, '$.y[0].z')",
+ "snowflake": "GET_PATH(x, 'y[0].z')",
+ "sqlite": "x -> '$.y[0].z'",
+ "starrocks": "x -> '$.y[0].z'",
+ },
+ write={
+ "bigquery": "JSON_EXTRACT(x, '$.y[0].z')",
+ "clickhouse": "JSONExtractString(x, 'y', 1, 'z')",
+ "doris": "x -> '$.y[0].z'",
+ "duckdb": "x -> '$.y[0].z'",
+ "mysql": "JSON_EXTRACT(x, '$.y[0].z')",
+ "oracle": "JSON_EXTRACT(x, '$.y[0].z')",
+ "postgres": "JSON_EXTRACT_PATH(x, 'y', '0', 'z')",
+ "presto": "JSON_EXTRACT(x, '$.y[0].z')",
+ "redshift": "JSON_EXTRACT_PATH_TEXT(x, 'y', '0', 'z')",
+ "snowflake": "GET_PATH(x, 'y[0].z')",
+ "spark": "GET_JSON_OBJECT(x, '$.y[0].z')",
+ "sqlite": "x -> '$.y[0].z'",
+ "starrocks": "x -> '$.y[0].z'",
+ "tsql": "ISNULL(JSON_QUERY(x, '$.y[0].z'), JSON_VALUE(x, '$.y[0].z'))",
+ },
+ )
+ self.validate_all(
+ "JSON_EXTRACT_SCALAR(x, '$.y[0].z')",
read={
- "postgres": "x#>>'y'",
- },
- write={
- "postgres": "x #>> 'y'",
+ "bigquery": "JSON_EXTRACT_SCALAR(x, '$.y[0].z')",
+ "clickhouse": "JSONExtractString(x, 'y', 1, 'z')",
+ "duckdb": "x ->> '$.y[0].z'",
+ "presto": "JSON_EXTRACT_SCALAR(x, '$.y[0].z')",
+ "snowflake": "JSON_EXTRACT_PATH_TEXT(x, 'y[0].z')",
+ "spark": 'GET_JSON_OBJECT(x, "$.y[0].z")',
+ "sqlite": "x ->> '$.y[0].z'",
+ },
+ write={
+ "bigquery": "JSON_EXTRACT_SCALAR(x, '$.y[0].z')",
+ "clickhouse": "JSONExtractString(x, 'y', 1, 'z')",
+ "duckdb": "x ->> '$.y[0].z'",
+ "postgres": "JSON_EXTRACT_PATH_TEXT(x, 'y', '0', 'z')",
+ "presto": "JSON_EXTRACT_SCALAR(x, '$.y[0].z')",
+ "redshift": "JSON_EXTRACT_PATH_TEXT(x, 'y', '0', 'z')",
+ "snowflake": "JSON_EXTRACT_PATH_TEXT(x, 'y[0].z')",
+ "spark": "GET_JSON_OBJECT(x, '$.y[0].z')",
+ "sqlite": "x ->> '$.y[0].z'",
+ "tsql": "ISNULL(JSON_QUERY(x, '$.y[0].z'), JSON_VALUE(x, '$.y[0].z'))",
+ },
+ )
+ self.validate_all(
+ "JSON_EXTRACT(x, '$.y[*]')",
+ write={
+ "bigquery": UnsupportedError,
+ "clickhouse": UnsupportedError,
+ "duckdb": "x -> '$.y[*]'",
+ "mysql": "JSON_EXTRACT(x, '$.y[*]')",
+ "postgres": UnsupportedError,
+ "presto": "JSON_EXTRACT(x, '$.y[*]')",
+ "redshift": UnsupportedError,
+ "snowflake": UnsupportedError,
+ "spark": "GET_JSON_OBJECT(x, '$.y[*]')",
+ "sqlite": UnsupportedError,
+ "tsql": UnsupportedError,
+ },
+ )
+ self.validate_all(
+ "JSON_EXTRACT(x, '$.y[*]')",
+ write={
+ "bigquery": "JSON_EXTRACT(x, '$.y')",
+ "clickhouse": "JSONExtractString(x, 'y')",
+ "postgres": "JSON_EXTRACT_PATH(x, 'y')",
+ "redshift": "JSON_EXTRACT_PATH_TEXT(x, 'y')",
+ "snowflake": "GET_PATH(x, 'y')",
+ "sqlite": "x -> '$.y'",
+ "tsql": "ISNULL(JSON_QUERY(x, '$.y'), JSON_VALUE(x, '$.y'))",
+ },
+ )
+ self.validate_all(
+ "JSON_EXTRACT(x, '$.y.*')",
+ write={
+ "bigquery": UnsupportedError,
+ "clickhouse": UnsupportedError,
+ "duckdb": "x -> '$.y.*'",
+ "mysql": "JSON_EXTRACT(x, '$.y.*')",
+ "postgres": UnsupportedError,
+ "presto": "JSON_EXTRACT(x, '$.y.*')",
+ "redshift": UnsupportedError,
+ "snowflake": UnsupportedError,
+ "spark": UnsupportedError,
+ "sqlite": UnsupportedError,
+ "tsql": UnsupportedError,
},
)