summaryrefslogtreecommitdiffstats
path: root/tests/dialects
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-09-30 04:54:49 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-09-30 04:54:49 +0000
commit3baf961bf84d7a67c838d6bc09cc5f0fe90a35b4 (patch)
treedc974c69ee45867df73e46342261aa658eb50392 /tests/dialects
parentReleasing debian version 25.21.3-1. (diff)
downloadsqlglot-3baf961bf84d7a67c838d6bc09cc5f0fe90a35b4.tar.xz
sqlglot-3baf961bf84d7a67c838d6bc09cc5f0fe90a35b4.zip
Merging upstream version 25.24.0.
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to '')
-rw-r--r--tests/dialects/test_athena.py60
-rw-r--r--tests/dialects/test_bigquery.py27
-rw-r--r--tests/dialects/test_clickhouse.py20
-rw-r--r--tests/dialects/test_databricks.py6
-rw-r--r--tests/dialects/test_dialect.py4
-rw-r--r--tests/dialects/test_duckdb.py31
-rw-r--r--tests/dialects/test_mysql.py15
-rw-r--r--tests/dialects/test_oracle.py18
-rw-r--r--tests/dialects/test_postgres.py5
-rw-r--r--tests/dialects/test_presto.py21
-rw-r--r--tests/dialects/test_redshift.py33
-rw-r--r--tests/dialects/test_snowflake.py19
-rw-r--r--tests/dialects/test_starrocks.py6
-rw-r--r--tests/dialects/test_tsql.py7
14 files changed, 255 insertions, 17 deletions
diff --git a/tests/dialects/test_athena.py b/tests/dialects/test_athena.py
index bf54914..ca91d4a 100644
--- a/tests/dialects/test_athena.py
+++ b/tests/dialects/test_athena.py
@@ -1,3 +1,4 @@
+from sqlglot import exp
from tests.dialects.test_dialect import Validator
@@ -68,6 +69,23 @@ class TestAthena(Validator):
"CREATE TABLE foo AS WITH foo AS (SELECT a, b FROM bar) SELECT * FROM foo"
)
+ # ALTER TABLE ADD COLUMN not supported, it needs to be generated as ALTER TABLE ADD COLUMNS
+ self.validate_identity(
+ "ALTER TABLE `foo`.`bar` ADD COLUMN `end_ts` BIGINT",
+ write_sql="ALTER TABLE `foo`.`bar` ADD COLUMNS (`end_ts` BIGINT)",
+ )
+
+ def test_dml(self):
+ self.validate_all(
+ "SELECT CAST(ds AS VARCHAR) AS ds FROM (VALUES ('2022-01-01')) AS t(ds)",
+ read={"": "SELECT CAST(ds AS STRING) AS ds FROM (VALUES ('2022-01-01')) AS t(ds)"},
+ write={
+ "hive": "SELECT CAST(ds AS STRING) AS ds FROM (VALUES ('2022-01-01')) AS t(ds)",
+ "trino": "SELECT CAST(ds AS VARCHAR) AS ds FROM (VALUES ('2022-01-01')) AS t(ds)",
+ "athena": "SELECT CAST(ds AS VARCHAR) AS ds FROM (VALUES ('2022-01-01')) AS t(ds)",
+ },
+ )
+
def test_ddl_quoting(self):
self.validate_identity("CREATE SCHEMA `foo`")
self.validate_identity("CREATE SCHEMA foo")
@@ -111,6 +129,10 @@ class TestAthena(Validator):
'CREATE VIEW `foo` AS SELECT "id" FROM `tbl`',
write_sql='CREATE VIEW "foo" AS SELECT "id" FROM "tbl"',
)
+ self.validate_identity(
+ "DROP VIEW IF EXISTS `foo`.`bar`",
+ write_sql='DROP VIEW IF EXISTS "foo"."bar"',
+ )
self.validate_identity(
'ALTER TABLE "foo" ADD COLUMNS ("id" STRING)',
@@ -128,6 +150,8 @@ class TestAthena(Validator):
write_sql='CREATE TABLE "foo" AS WITH "foo" AS (SELECT "a", "b" FROM "bar") SELECT * FROM "foo"',
)
+ self.validate_identity("DESCRIBE foo.bar", write_sql="DESCRIBE `foo`.`bar`", identify=True)
+
def test_dml_quoting(self):
self.validate_identity("SELECT a AS foo FROM tbl")
self.validate_identity('SELECT "a" AS "foo" FROM "tbl"')
@@ -167,3 +191,39 @@ class TestAthena(Validator):
write_sql='WITH "foo" AS (SELECT "a", "b" FROM "bar") SELECT * FROM "foo"',
identify=True,
)
+
+ def test_ctas(self):
+ # Hive tables use 'external_location' to specify the table location, Iceberg tables use 'location' to specify the table location
+ # The 'table_type' property is used to determine if it's a Hive or an Iceberg table
+ # ref: https://docs.aws.amazon.com/athena/latest/ug/create-table-as.html#ctas-table-properties
+ ctas_hive = exp.Create(
+ this=exp.to_table("foo.bar"),
+ kind="TABLE",
+ properties=exp.Properties(
+ expressions=[
+ exp.FileFormatProperty(this=exp.Literal.string("parquet")),
+ exp.LocationProperty(this=exp.Literal.string("s3://foo")),
+ ]
+ ),
+ expression=exp.select("1"),
+ )
+ self.assertEqual(
+ ctas_hive.sql(dialect=self.dialect, identify=True),
+ "CREATE TABLE \"foo\".\"bar\" WITH (format='parquet', external_location='s3://foo') AS SELECT 1",
+ )
+
+ ctas_iceberg = exp.Create(
+ this=exp.to_table("foo.bar"),
+ kind="TABLE",
+ properties=exp.Properties(
+ expressions=[
+ exp.Property(this=exp.var("table_type"), value=exp.Literal.string("iceberg")),
+ exp.LocationProperty(this=exp.Literal.string("s3://foo")),
+ ]
+ ),
+ expression=exp.select("1"),
+ )
+ self.assertEqual(
+ ctas_iceberg.sql(dialect=self.dialect, identify=True),
+ "CREATE TABLE \"foo\".\"bar\" WITH (table_type='iceberg', location='s3://foo') AS SELECT 1",
+ )
diff --git a/tests/dialects/test_bigquery.py b/tests/dialects/test_bigquery.py
index b883896..e2adfea 100644
--- a/tests/dialects/test_bigquery.py
+++ b/tests/dialects/test_bigquery.py
@@ -22,6 +22,8 @@ class TestBigQuery(Validator):
maxDiff = None
def test_bigquery(self):
+ self.validate_identity("REGEXP_EXTRACT(x, '(?<)')")
+
self.validate_all(
"EXTRACT(HOUR FROM DATETIME(2008, 12, 25, 15, 30, 00))",
write={
@@ -1502,6 +1504,8 @@ WHERE
},
)
+ self.validate_identity("SELECT * FROM a-b c", "SELECT * FROM a-b AS c")
+
def test_errors(self):
with self.assertRaises(TokenError):
transpile("'\\'", read="bigquery")
@@ -1958,3 +1962,26 @@ OPTIONS (
"duckdb": "WITH Races AS (SELECT '800M' AS race) SELECT race, participant FROM Races AS r CROSS JOIN (SELECT UNNEST([{'name': 'Rudisha', 'laps': [23.4, 26.3, 26.4, 26.1]}], max_depth => 2)) AS participant",
},
)
+
+ def test_range_type(self):
+ for type, value in (
+ ("RANGE<DATE>", "'[2020-01-01, 2020-12-31)'"),
+ ("RANGE<DATE>", "'[UNBOUNDED, 2020-12-31)'"),
+ ("RANGE<DATETIME>", "'[2020-01-01 12:00:00, 2020-12-31 12:00:00)'"),
+ ("RANGE<TIMESTAMP>", "'[2020-10-01 12:00:00+08, 2020-12-31 12:00:00+08)'"),
+ ):
+ with self.subTest(f"Testing BigQuery's RANGE<T> type: {type} {value}"):
+ self.validate_identity(f"SELECT {type} {value}", f"SELECT CAST({value} AS {type})")
+
+ self.assertEqual(self.parse_one(type), exp.DataType.build(type, dialect="bigquery"))
+
+ self.validate_identity(
+ "SELECT RANGE(CAST('2022-12-01' AS DATE), CAST('2022-12-31' AS DATE))"
+ )
+ self.validate_identity("SELECT RANGE(NULL, CAST('2022-12-31' AS DATE))")
+ self.validate_identity(
+ "SELECT RANGE(CAST('2022-10-01 14:53:27' AS DATETIME), CAST('2022-10-01 16:00:00' AS DATETIME))"
+ )
+ self.validate_identity(
+ "SELECT RANGE(CAST('2022-10-01 14:53:27 America/Los_Angeles' AS TIMESTAMP), CAST('2022-10-01 16:00:00 America/Los_Angeles' AS TIMESTAMP))"
+ )
diff --git a/tests/dialects/test_clickhouse.py b/tests/dialects/test_clickhouse.py
index d203141..b4fc587 100644
--- a/tests/dialects/test_clickhouse.py
+++ b/tests/dialects/test_clickhouse.py
@@ -139,10 +139,10 @@ class TestClickhouse(Validator):
"CREATE MATERIALIZED VIEW test_view ON CLUSTER cl1 (id UInt8) ENGINE=AggregatingMergeTree() ORDER BY tuple() AS SELECT * FROM test_data"
)
self.validate_identity(
- "CREATE MATERIALIZED VIEW test_view ON CLUSTER cl1 (id UInt8) TO table1 AS SELECT * FROM test_data"
+ "CREATE MATERIALIZED VIEW test_view ON CLUSTER cl1 TO table1 AS SELECT * FROM test_data"
)
self.validate_identity(
- "CREATE MATERIALIZED VIEW test_view (id UInt8) TO db.table1 AS SELECT * FROM test_data"
+ "CREATE MATERIALIZED VIEW test_view TO db.table1 (id UInt8) AS SELECT * FROM test_data"
)
self.validate_identity(
"CREATE TABLE t (foo String CODEC(LZ4HC(9), ZSTD, DELTA), size String ALIAS formatReadableSize(size_bytes), INDEX idx1 a TYPE bloom_filter(0.001) GRANULARITY 1, INDEX idx2 a TYPE set(100) GRANULARITY 2, INDEX idx3 a TYPE minmax GRANULARITY 3)"
@@ -519,6 +519,17 @@ class TestClickhouse(Validator):
self.validate_identity("SELECT TRIM(LEADING '(' FROM '( Hello, world! )')")
self.validate_identity("current_timestamp").assert_is(exp.Column)
+ self.validate_identity("SELECT * APPLY(sum) FROM columns_transformers")
+ self.validate_identity("SELECT COLUMNS('[jk]') APPLY(toString) FROM columns_transformers")
+ self.validate_identity(
+ "SELECT COLUMNS('[jk]') APPLY(toString) APPLY(length) APPLY(max) FROM columns_transformers"
+ )
+ self.validate_identity("SELECT * APPLY(sum), COLUMNS('col') APPLY(sum) APPLY(avg) FROM t")
+ self.validate_identity(
+ "SELECT * FROM ABC WHERE hasAny(COLUMNS('.*field') APPLY(toUInt64) APPLY(to), (SELECT groupUniqArray(toUInt64(field))))"
+ )
+ self.validate_identity("SELECT col apply", "SELECT col AS apply")
+
def test_clickhouse_values(self):
values = exp.select("*").from_(
exp.values([exp.tuple_(1, 2, 3)], alias="subq", columns=["a", "b", "c"])
@@ -670,6 +681,7 @@ class TestClickhouse(Validator):
"CREATE TABLE foo ENGINE=Memory AS (SELECT * FROM db.other_table) COMMENT 'foo'",
)
+ self.validate_identity("CREATE MATERIALIZED VIEW a.b TO a.c (c Int32) AS SELECT * FROM a.d")
self.validate_identity("""CREATE TABLE ip_data (ip4 IPv4, ip6 IPv6) ENGINE=TinyLog()""")
self.validate_identity("""CREATE TABLE dates (dt1 Date32) ENGINE=TinyLog()""")
self.validate_identity("CREATE TABLE named_tuples (a Tuple(select String, i Int64))")
@@ -1084,3 +1096,7 @@ LIFETIME(MIN 0 MAX 0)""",
self.assertEqual(
convert(date(2020, 1, 1)).sql(dialect=self.dialect), "toDate('2020-01-01')"
)
+
+ def test_grant(self):
+ self.validate_identity("GRANT SELECT(x, y) ON db.table TO john WITH GRANT OPTION")
+ self.validate_identity("GRANT INSERT(x, y) ON db.table TO john")
diff --git a/tests/dialects/test_databricks.py b/tests/dialects/test_databricks.py
index c2965e8..65e8d5d 100644
--- a/tests/dialects/test_databricks.py
+++ b/tests/dialects/test_databricks.py
@@ -271,3 +271,9 @@ class TestDatabricks(Validator):
self.validate_identity(
"CREATE OR REFRESH STREAMING TABLE csv_data (id INT, ts TIMESTAMP, event STRING) AS SELECT * FROM STREAM READ_FILES('s3://bucket/path', format => 'csv', schema => 'id int, ts timestamp, event string')"
)
+
+ def test_grant(self):
+ self.validate_identity("GRANT CREATE ON SCHEMA my_schema TO `alf@melmak.et`")
+ self.validate_identity("GRANT SELECT ON TABLE sample_data TO `alf@melmak.et`")
+ self.validate_identity("GRANT ALL PRIVILEGES ON TABLE forecasts TO finance")
+ self.validate_identity("GRANT SELECT ON TABLE t TO `fab9e00e-ca35-11ec-9d64-0242ac120002`")
diff --git a/tests/dialects/test_dialect.py b/tests/dialects/test_dialect.py
index 19effb3..96ce600 100644
--- a/tests/dialects/test_dialect.py
+++ b/tests/dialects/test_dialect.py
@@ -2221,9 +2221,9 @@ SELECT
self.validate_all(
"SUBSTR('123456', 2, 3)",
write={
- "bigquery": "SUBSTR('123456', 2, 3)",
+ "bigquery": "SUBSTRING('123456', 2, 3)",
"oracle": "SUBSTR('123456', 2, 3)",
- "postgres": "SUBSTR('123456', 2, 3)",
+ "postgres": "SUBSTRING('123456' FROM 2 FOR 3)",
},
)
self.validate_all(
diff --git a/tests/dialects/test_duckdb.py b/tests/dialects/test_duckdb.py
index 84c82f1..e4788ec 100644
--- a/tests/dialects/test_duckdb.py
+++ b/tests/dialects/test_duckdb.py
@@ -256,6 +256,7 @@ class TestDuckDB(Validator):
parse_one("a // b", read="duckdb").assert_is(exp.IntDiv).sql(dialect="duckdb"), "a // b"
)
+ self.validate_identity("CREATE TABLE tbl1 (u UNION(num INT, str TEXT))")
self.validate_identity("INSERT INTO x BY NAME SELECT 1 AS y")
self.validate_identity("SELECT 1 AS x UNION ALL BY NAME SELECT 2 AS x")
self.validate_identity("SELECT SUM(x) FILTER (x = 1)", "SELECT SUM(x) FILTER(WHERE x = 1)")
@@ -294,6 +295,9 @@ class TestDuckDB(Validator):
self.validate_identity("SUMMARIZE tbl").assert_is(exp.Summarize)
self.validate_identity("SUMMARIZE SELECT * FROM tbl").assert_is(exp.Summarize)
self.validate_identity("CREATE TABLE tbl_summary AS SELECT * FROM (SUMMARIZE tbl)")
+ self.validate_identity("UNION_VALUE(k1 := 1)").find(exp.PropertyEQ).this.assert_is(
+ exp.Identifier
+ )
self.validate_identity(
"SELECT species, island, COUNT(*) FROM t GROUP BY GROUPING SETS (species), GROUPING SETS (island)"
)
@@ -310,6 +314,13 @@ class TestDuckDB(Validator):
"SELECT * FROM x LEFT JOIN UNNEST(y)", "SELECT * FROM x LEFT JOIN UNNEST(y) ON TRUE"
)
self.validate_identity(
+ """SELECT '{ "family": "anatidae", "species": [ "duck", "goose", "swan", null ] }' ->> ['$.family', '$.species']""",
+ )
+ self.validate_identity(
+ """SELECT JSON_EXTRACT_STRING('{ "family": "anatidae", "species": [ "duck", "goose", "swan", null ] }', ['$.family', '$.species'])""",
+ """SELECT '{ "family": "anatidae", "species": [ "duck", "goose", "swan", null ] }' ->> ['$.family', '$.species']""",
+ )
+ self.validate_identity(
"SELECT col FROM t WHERE JSON_EXTRACT_STRING(col, '$.id') NOT IN ('b')",
"SELECT col FROM t WHERE NOT (col ->> '$.id') IN ('b')",
)
@@ -524,8 +535,8 @@ class TestDuckDB(Validator):
write={
"duckdb": "STR_SPLIT(x, 'a')",
"presto": "SPLIT(x, 'a')",
- "hive": "SPLIT(x, CONCAT('\\\\Q', 'a'))",
- "spark": "SPLIT(x, CONCAT('\\\\Q', 'a'))",
+ "hive": "SPLIT(x, CONCAT('\\\\Q', 'a', '\\\\E'))",
+ "spark": "SPLIT(x, CONCAT('\\\\Q', 'a', '\\\\E'))",
},
)
self.validate_all(
@@ -533,8 +544,8 @@ class TestDuckDB(Validator):
write={
"duckdb": "STR_SPLIT(x, 'a')",
"presto": "SPLIT(x, 'a')",
- "hive": "SPLIT(x, CONCAT('\\\\Q', 'a'))",
- "spark": "SPLIT(x, CONCAT('\\\\Q', 'a'))",
+ "hive": "SPLIT(x, CONCAT('\\\\Q', 'a', '\\\\E'))",
+ "spark": "SPLIT(x, CONCAT('\\\\Q', 'a', '\\\\E'))",
},
)
self.validate_all(
@@ -835,6 +846,18 @@ class TestDuckDB(Validator):
"SELECT id, STRUCT_PACK(*COLUMNS('m\\d')) AS measurements FROM many_measurements",
"""SELECT id, {'_0': *COLUMNS('m\\d')} AS measurements FROM many_measurements""",
)
+ self.validate_identity("SELECT COLUMNS(c -> c LIKE '%num%') FROM numbers")
+ self.validate_identity(
+ "SELECT MIN(COLUMNS(* REPLACE (number + id AS number))), COUNT(COLUMNS(* EXCLUDE (number))) FROM numbers"
+ )
+ self.validate_identity("SELECT COLUMNS(*) + COLUMNS(*) FROM numbers")
+ self.validate_identity("SELECT COLUMNS('(id|numbers?)') FROM numbers")
+ self.validate_identity(
+ "SELECT COALESCE(COLUMNS(['a', 'b', 'c'])) AS result FROM (SELECT NULL AS a, 42 AS b, TRUE AS c)"
+ )
+ self.validate_identity(
+ "SELECT COALESCE(*COLUMNS(['a', 'b', 'c'])) AS result FROM (SELECT NULL AS a, 42 AS b, TRUE AS c)"
+ )
def test_array_index(self):
with self.assertLogs(helper_logger) as cm:
diff --git a/tests/dialects/test_mysql.py b/tests/dialects/test_mysql.py
index 2f1f7ba..835ee7c 100644
--- a/tests/dialects/test_mysql.py
+++ b/tests/dialects/test_mysql.py
@@ -1278,3 +1278,18 @@ COMMENT='客户账户表'"""
self.validate_identity(
f"""SELECT JSON_VALUE({json_doc}, '$.price' RETURNING DECIMAL(4, 2) {on_option} ON EMPTY {on_option} ON ERROR) AS price"""
)
+
+ def test_grant(self):
+ grant_cmds = [
+ "GRANT 'role1', 'role2' TO 'user1'@'localhost', 'user2'@'localhost'",
+ "GRANT SELECT ON world.* TO 'role3'",
+ "GRANT SELECT ON db2.invoice TO 'jeffrey'@'localhost'",
+ "GRANT INSERT ON `d%`.* TO u",
+ "GRANT ALL ON test.* TO ''@'localhost'",
+ "GRANT SELECT (col1), INSERT (col1, col2) ON mydb.mytbl TO 'someuser'@'somehost'",
+ "GRANT SELECT, INSERT, UPDATE ON *.* TO u2",
+ ]
+
+ for sql in grant_cmds:
+ with self.subTest(f"Testing MySQL's GRANT command statement: {sql}"):
+ self.validate_identity(sql, check_command_warning=True)
diff --git a/tests/dialects/test_oracle.py b/tests/dialects/test_oracle.py
index 28a8da9..8675086 100644
--- a/tests/dialects/test_oracle.py
+++ b/tests/dialects/test_oracle.py
@@ -546,3 +546,21 @@ WHERE
self.validate_identity(
f"SELECT * FROM t WHERE JSON_EXISTS(name{format_json}, '$[1].middle'{passing}{on_cond})"
)
+
+ def test_grant(self):
+ grant_cmds = [
+ "GRANT purchases_reader_role TO george, maria",
+ "GRANT USAGE ON TYPE price TO finance_role",
+ "GRANT USAGE ON DERBY AGGREGATE types.maxPrice TO sales_role",
+ ]
+
+ for sql in grant_cmds:
+ with self.subTest(f"Testing Oracles's GRANT command statement: {sql}"):
+ self.validate_identity(sql, check_command_warning=True)
+
+ self.validate_identity("GRANT SELECT ON TABLE t TO maria, harry")
+ self.validate_identity("GRANT SELECT ON TABLE s.v TO PUBLIC")
+ self.validate_identity("GRANT SELECT ON TABLE t TO purchases_reader_role")
+ self.validate_identity("GRANT UPDATE, TRIGGER ON TABLE t TO anita, zhi")
+ self.validate_identity("GRANT EXECUTE ON PROCEDURE p TO george")
+ self.validate_identity("GRANT USAGE ON SEQUENCE order_id TO sales_role")
diff --git a/tests/dialects/test_postgres.py b/tests/dialects/test_postgres.py
index ab06683..63266a5 100644
--- a/tests/dialects/test_postgres.py
+++ b/tests/dialects/test_postgres.py
@@ -797,6 +797,11 @@ class TestPostgres(Validator):
self.validate_identity(
"MERGE INTO target_table USING source_table AS source ON target.id = source.id WHEN MATCHED THEN DO NOTHING WHEN NOT MATCHED THEN DO NOTHING RETURNING MERGE_ACTION(), *"
)
+ self.validate_identity(
+ "SELECT 1 FROM ((VALUES (1)) AS vals(id) LEFT OUTER JOIN tbl ON vals.id = tbl.id)"
+ )
+ self.validate_identity("SELECT OVERLAY(a PLACING b FROM 1)")
+ self.validate_identity("SELECT OVERLAY(a PLACING b FROM 1 FOR 1)")
def test_ddl(self):
# Checks that user-defined types are parsed into DataType instead of Identifier
diff --git a/tests/dialects/test_presto.py b/tests/dialects/test_presto.py
index 9c61f62..3d5fbfe 100644
--- a/tests/dialects/test_presto.py
+++ b/tests/dialects/test_presto.py
@@ -158,8 +158,8 @@ class TestPresto(Validator):
write={
"duckdb": "STR_SPLIT(x, 'a.')",
"presto": "SPLIT(x, 'a.')",
- "hive": "SPLIT(x, CONCAT('\\\\Q', 'a.'))",
- "spark": "SPLIT(x, CONCAT('\\\\Q', 'a.'))",
+ "hive": "SPLIT(x, CONCAT('\\\\Q', 'a.', '\\\\E'))",
+ "spark": "SPLIT(x, CONCAT('\\\\Q', 'a.', '\\\\E'))",
},
)
self.validate_all(
@@ -276,10 +276,19 @@ class TestPresto(Validator):
self.validate_all(
"DATE_PARSE(SUBSTR(x, 1, 10), '%Y-%m-%d')",
write={
- "duckdb": "STRPTIME(SUBSTR(x, 1, 10), '%Y-%m-%d')",
- "presto": "DATE_PARSE(SUBSTR(x, 1, 10), '%Y-%m-%d')",
- "hive": "CAST(SUBSTR(x, 1, 10) AS TIMESTAMP)",
- "spark": "TO_TIMESTAMP(SUBSTR(x, 1, 10), 'yyyy-MM-dd')",
+ "duckdb": "STRPTIME(SUBSTRING(x, 1, 10), '%Y-%m-%d')",
+ "presto": "DATE_PARSE(SUBSTRING(x, 1, 10), '%Y-%m-%d')",
+ "hive": "CAST(SUBSTRING(x, 1, 10) AS TIMESTAMP)",
+ "spark": "TO_TIMESTAMP(SUBSTRING(x, 1, 10), 'yyyy-MM-dd')",
+ },
+ )
+ self.validate_all(
+ "DATE_PARSE(SUBSTRING(x, 1, 10), '%Y-%m-%d')",
+ write={
+ "duckdb": "STRPTIME(SUBSTRING(x, 1, 10), '%Y-%m-%d')",
+ "presto": "DATE_PARSE(SUBSTRING(x, 1, 10), '%Y-%m-%d')",
+ "hive": "CAST(SUBSTRING(x, 1, 10) AS TIMESTAMP)",
+ "spark": "TO_TIMESTAMP(SUBSTRING(x, 1, 10), 'yyyy-MM-dd')",
},
)
self.validate_all(
diff --git a/tests/dialects/test_redshift.py b/tests/dialects/test_redshift.py
index c13b61a..6f561da 100644
--- a/tests/dialects/test_redshift.py
+++ b/tests/dialects/test_redshift.py
@@ -626,3 +626,36 @@ FROM (
"TIME_TO_STR(a, '%Y-%m-%d %H:%M:%S.%f')",
write={"redshift": "TO_CHAR(a, 'YYYY-MM-DD HH24:MI:SS.US')"},
)
+
+ def test_grant(self):
+ grant_cmds = [
+ "GRANT SELECT ON ALL TABLES IN SCHEMA qa_tickit TO fred",
+ "GRANT USAGE ON DATASHARE salesshare TO NAMESPACE '13b8833d-17c6-4f16-8fe4-1a018f5ed00d'",
+ "GRANT USAGE FOR SCHEMAS IN DATABASE Sales_db TO ROLE Sales",
+ "GRANT EXECUTE FOR FUNCTIONS IN SCHEMA Sales_schema TO bob",
+ "GRANT SELECT FOR TABLES IN DATABASE Sales_db TO alice WITH GRANT OPTION",
+ "GRANT ALL FOR TABLES IN SCHEMA ShareSchema DATABASE ShareDb TO ROLE Sales",
+ "GRANT ASSUMEROLE ON 'arn:aws:iam::123456789012:role/Redshift-Exfunc' TO reg_user1 FOR EXTERNAL FUNCTION",
+ "GRANT ROLE sample_role1 TO ROLE sample_role2",
+ ]
+
+ for sql in grant_cmds:
+ with self.subTest(f"Testing Redshift's GRANT command statement: {sql}"):
+ self.validate_identity(sql, check_command_warning=True)
+
+ self.validate_identity("GRANT SELECT ON TABLE sales TO fred")
+ self.validate_identity("GRANT ALL ON SCHEMA qa_tickit TO GROUP qa_users")
+ self.validate_identity("GRANT ALL ON TABLE qa_tickit.sales TO GROUP qa_users")
+ self.validate_identity(
+ "GRANT ALL ON TABLE qa_tickit.sales TO GROUP qa_users, GROUP ro_users"
+ )
+ self.validate_identity("GRANT ALL ON view_date TO view_user")
+ self.validate_identity(
+ "GRANT SELECT(cust_name, cust_phone), UPDATE(cust_contact_preference) ON cust_profile TO GROUP sales_group"
+ )
+ self.validate_identity(
+ "GRANT ALL(cust_name, cust_phone, cust_contact_preference) ON cust_profile TO GROUP sales_admin"
+ )
+ self.validate_identity("GRANT USAGE ON DATABASE sales_db TO Bob")
+ self.validate_identity("GRANT USAGE ON SCHEMA sales_schema TO ROLE Analyst_role")
+ self.validate_identity("GRANT SELECT ON sales_db.sales_schema.tickit_sales_redshift TO Bob")
diff --git a/tests/dialects/test_snowflake.py b/tests/dialects/test_snowflake.py
index 40413c8..6cde86b 100644
--- a/tests/dialects/test_snowflake.py
+++ b/tests/dialects/test_snowflake.py
@@ -755,6 +755,8 @@ WHERE
write={
"spark": "SELECT COLLECT_LIST(DISTINCT a)",
"snowflake": "SELECT ARRAY_AGG(DISTINCT a)",
+ "duckdb": "SELECT ARRAY_AGG(DISTINCT a) FILTER(WHERE a IS NOT NULL)",
+ "presto": "SELECT ARRAY_AGG(DISTINCT a) FILTER(WHERE a IS NOT NULL)",
},
)
self.validate_all(
@@ -2175,3 +2177,20 @@ SINGLE = TRUE""",
"""SELECT 1 FROM some_table CHANGES (INFORMATION => APPEND_ONLY) AT (TIMESTAMP => TO_TIMESTAMP_TZ('2024-07-01 00:00:00+00:00')) END (TIMESTAMP => TO_TIMESTAMP_TZ('2024-07-01 14:28:59.999999+00:00'))""",
"""SELECT 1 FROM some_table CHANGES (INFORMATION => APPEND_ONLY) AT (TIMESTAMP => CAST('2024-07-01 00:00:00+00:00' AS TIMESTAMPTZ)) END (TIMESTAMP => CAST('2024-07-01 14:28:59.999999+00:00' AS TIMESTAMPTZ))""",
)
+
+ def test_grant(self):
+ grant_cmds = [
+ "GRANT SELECT ON FUTURE TABLES IN DATABASE d1 TO ROLE r1",
+ "GRANT INSERT, DELETE ON FUTURE TABLES IN SCHEMA d1.s1 TO ROLE r2",
+ "GRANT SELECT ON ALL TABLES IN SCHEMA mydb.myschema to ROLE analyst",
+ "GRANT SELECT, INSERT ON FUTURE TABLES IN SCHEMA mydb.myschema TO ROLE role1",
+ "GRANT CREATE MATERIALIZED VIEW ON SCHEMA mydb.myschema TO DATABASE ROLE mydb.dr1",
+ ]
+
+ for sql in grant_cmds:
+ with self.subTest(f"Testing Snowflake's GRANT command statement: {sql}"):
+ self.validate_identity(sql, check_command_warning=True)
+
+ self.validate_identity(
+ "GRANT ALL PRIVILEGES ON FUNCTION mydb.myschema.ADD5(number) TO ROLE analyst"
+ )
diff --git a/tests/dialects/test_starrocks.py b/tests/dialects/test_starrocks.py
index ee4dc90..44c54a6 100644
--- a/tests/dialects/test_starrocks.py
+++ b/tests/dialects/test_starrocks.py
@@ -85,8 +85,8 @@ class TestStarrocks(Validator):
r"""SELECT id, t.type, t.scores FROM example_table, unnest(split(type, ";"), scores) AS t(type,scores)""",
write={
"postgres": "SELECT id, t.type, t.scores FROM example_table, UNNEST(SPLIT(type, ';'), scores) AS t(type, scores)",
- "spark": r"""SELECT id, t.type, t.scores FROM example_table LATERAL VIEW INLINE(ARRAYS_ZIP(SPLIT(type, CONCAT('\\Q', ';')), scores)) t AS type, scores""",
- "databricks": r"""SELECT id, t.type, t.scores FROM example_table LATERAL VIEW INLINE(ARRAYS_ZIP(SPLIT(type, CONCAT('\\Q', ';')), scores)) t AS type, scores""",
+ "spark": r"""SELECT id, t.type, t.scores FROM example_table LATERAL VIEW INLINE(ARRAYS_ZIP(SPLIT(type, CONCAT('\\Q', ';', '\\E')), scores)) t AS type, scores""",
+ "databricks": r"""SELECT id, t.type, t.scores FROM example_table LATERAL VIEW INLINE(ARRAYS_ZIP(SPLIT(type, CONCAT('\\Q', ';', '\\E')), scores)) t AS type, scores""",
"starrocks": r"""SELECT id, t.type, t.scores FROM example_table, UNNEST(SPLIT(type, ';'), scores) AS t(type, scores)""",
"hive": UnsupportedError,
},
@@ -95,7 +95,7 @@ class TestStarrocks(Validator):
self.validate_all(
r"""SELECT id, t.type, t.scores FROM example_table_2 CROSS JOIN LATERAL unnest(split(type, ";"), scores) AS t(type,scores)""",
write={
- "spark": r"""SELECT id, t.type, t.scores FROM example_table_2 LATERAL VIEW INLINE(ARRAYS_ZIP(SPLIT(type, CONCAT('\\Q', ';')), scores)) t AS type, scores""",
+ "spark": r"""SELECT id, t.type, t.scores FROM example_table_2 LATERAL VIEW INLINE(ARRAYS_ZIP(SPLIT(type, CONCAT('\\Q', ';', '\\E')), scores)) t AS type, scores""",
"starrocks": r"""SELECT id, t.type, t.scores FROM example_table_2 CROSS JOIN LATERAL UNNEST(SPLIT(type, ';'), scores) AS t(type, scores)""",
"hive": UnsupportedError,
},
diff --git a/tests/dialects/test_tsql.py b/tests/dialects/test_tsql.py
index 7114750..453cd5a 100644
--- a/tests/dialects/test_tsql.py
+++ b/tests/dialects/test_tsql.py
@@ -1998,3 +1998,10 @@ FROM OPENJSON(@json) WITH (
"tsql": "SELECT COUNT(1) FROM x",
},
)
+
+ def test_grant(self):
+ self.validate_identity("GRANT EXECUTE ON TestProc TO User2")
+ self.validate_identity("GRANT EXECUTE ON TestProc TO TesterRole WITH GRANT OPTION")
+ self.validate_identity(
+ "GRANT EXECUTE ON TestProc TO User2 AS TesterRole", check_command_warning=True
+ )