summaryrefslogtreecommitdiffstats
path: root/tests/dialects
diff options
context:
space:
mode:
Diffstat (limited to 'tests/dialects')
-rw-r--r--tests/dialects/test_duckdb.py8
-rw-r--r--tests/dialects/test_mysql.py3
-rw-r--r--tests/dialects/test_oracle.py15
-rw-r--r--tests/dialects/test_redshift.py12
-rw-r--r--tests/dialects/test_spark.py11
-rw-r--r--tests/dialects/test_teradata.py8
-rw-r--r--tests/dialects/test_tsql.py6
7 files changed, 57 insertions, 6 deletions
diff --git a/tests/dialects/test_duckdb.py b/tests/dialects/test_duckdb.py
index 240f6f9..54553b3 100644
--- a/tests/dialects/test_duckdb.py
+++ b/tests/dialects/test_duckdb.py
@@ -790,3 +790,11 @@ class TestDuckDB(Validator):
"duckdb": "ALTER TABLE db.t1 RENAME TO t2",
},
)
+
+ def test_timestamps_with_units(self):
+ self.validate_all(
+ "SELECT w::TIMESTAMP_S, x::TIMESTAMP_MS, y::TIMESTAMP_US, z::TIMESTAMP_NS",
+ write={
+ "duckdb": "SELECT CAST(w AS TIMESTAMP_S), CAST(x AS TIMESTAMP_MS), CAST(y AS TIMESTAMP), CAST(z AS TIMESTAMP_NS)",
+ },
+ )
diff --git a/tests/dialects/test_mysql.py b/tests/dialects/test_mysql.py
index 14a864b..b9d1d26 100644
--- a/tests/dialects/test_mysql.py
+++ b/tests/dialects/test_mysql.py
@@ -65,6 +65,9 @@ class TestMySQL(Validator):
self.validate_identity(
"INSERT INTO x VALUES (1, 'a', 2.0) ON DUPLICATE KEY UPDATE x.id = 1"
)
+ self.validate_identity(
+ "CREATE OR REPLACE VIEW my_view AS SELECT column1 AS `boo`, column2 AS `foo` FROM my_table WHERE column3 = 'some_value' UNION SELECT q.* FROM fruits_table, JSON_TABLE(Fruits, '$[*]' COLUMNS(id VARCHAR(255) PATH '$.$id', value VARCHAR(255) PATH '$.value')) AS q",
+ )
self.validate_all(
"CREATE TABLE z (a INT) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARACTER SET=utf8 COLLATE=utf8_bin COMMENT='x'",
diff --git a/tests/dialects/test_oracle.py b/tests/dialects/test_oracle.py
index 5572ec1..d92eea5 100644
--- a/tests/dialects/test_oracle.py
+++ b/tests/dialects/test_oracle.py
@@ -234,21 +234,30 @@ MATCH_RECOGNIZE (
def test_json_table(self):
self.validate_identity(
- "SELECT * FROM JSON_TABLE(foo FORMAT JSON, 'bla' ERROR ON ERROR NULL ON EMPTY COLUMNS (foo PATH 'bar'))"
+ "SELECT * FROM JSON_TABLE(foo FORMAT JSON, 'bla' ERROR ON ERROR NULL ON EMPTY COLUMNS(foo PATH 'bar'))"
)
self.validate_identity(
"SELECT * FROM JSON_TABLE(foo FORMAT JSON, 'bla' ERROR ON ERROR NULL ON EMPTY COLUMNS foo PATH 'bar')",
- "SELECT * FROM JSON_TABLE(foo FORMAT JSON, 'bla' ERROR ON ERROR NULL ON EMPTY COLUMNS (foo PATH 'bar'))",
+ "SELECT * FROM JSON_TABLE(foo FORMAT JSON, 'bla' ERROR ON ERROR NULL ON EMPTY COLUMNS(foo PATH 'bar'))",
)
self.validate_identity(
"""SELECT
CASE WHEN DBMS_LOB.GETLENGTH(info) < 32000 THEN DBMS_LOB.SUBSTR(info) END AS info_txt,
info AS info_clob
FROM schemaname.tablename ar
-INNER JOIN JSON_TABLE(:emps, '$[*]' COLUMNS (empno NUMBER PATH '$')) jt
+INNER JOIN JSON_TABLE(:emps, '$[*]' COLUMNS(empno NUMBER PATH '$')) jt
ON ar.empno = jt.empno""",
pretty=True,
)
+ self.validate_identity(
+ """SELECT
+ *
+FROM JSON_TABLE(res, '$.info[*]' COLUMNS(
+ tempid NUMBER PATH '$.tempid',
+ NESTED PATH '$.calid[*]' COLUMNS(last_dt PATH '$.last_dt ')
+)) src""",
+ pretty=True,
+ )
def test_connect_by(self):
start = "START WITH last_name = 'King'"
diff --git a/tests/dialects/test_redshift.py b/tests/dialects/test_redshift.py
index 9f2761f..f182feb 100644
--- a/tests/dialects/test_redshift.py
+++ b/tests/dialects/test_redshift.py
@@ -6,6 +6,18 @@ class TestRedshift(Validator):
dialect = "redshift"
def test_redshift(self):
+ self.validate_all(
+ "SELECT APPROXIMATE COUNT(DISTINCT y)",
+ read={
+ "spark": "SELECT APPROX_COUNT_DISTINCT(y)",
+ },
+ write={
+ "redshift": "SELECT APPROXIMATE COUNT(DISTINCT y)",
+ "spark": "SELECT APPROX_COUNT_DISTINCT(y)",
+ },
+ )
+ self.validate_identity("SELECT APPROXIMATE AS y")
+
self.validate_identity(
"SELECT 'a''b'",
"SELECT 'a\\'b'",
diff --git a/tests/dialects/test_spark.py b/tests/dialects/test_spark.py
index 0148e55..9bb9d79 100644
--- a/tests/dialects/test_spark.py
+++ b/tests/dialects/test_spark.py
@@ -361,7 +361,18 @@ TBLPROPERTIES (
"SELECT CAST(123456 AS VARCHAR(3))",
write={
"": "SELECT TRY_CAST(123456 AS TEXT)",
+ "databricks": "SELECT TRY_CAST(123456 AS STRING)",
"spark": "SELECT CAST(123456 AS STRING)",
+ "spark2": "SELECT CAST(123456 AS STRING)",
+ },
+ )
+ self.validate_all(
+ "SELECT TRY_CAST('a' AS INT)",
+ write={
+ "": "SELECT TRY_CAST('a' AS INT)",
+ "databricks": "SELECT TRY_CAST('a' AS INT)",
+ "spark": "SELECT TRY_CAST('a' AS INT)",
+ "spark2": "SELECT CAST('a' AS INT)",
},
)
self.validate_all(
diff --git a/tests/dialects/test_teradata.py b/tests/dialects/test_teradata.py
index f3615ff..9dbac8c 100644
--- a/tests/dialects/test_teradata.py
+++ b/tests/dialects/test_teradata.py
@@ -48,6 +48,14 @@ class TestTeradata(Validator):
self.validate_identity("HELP STATISTICS personnel.employee FROM my_qcd")
def test_create(self):
+ self.validate_identity(
+ "REPLACE VIEW view_b (COL1, COL2) AS LOCKING ROW FOR ACCESS SELECT COL1, COL2 FROM table_b",
+ "CREATE OR REPLACE VIEW view_b (COL1, COL2) AS LOCKING ROW FOR ACCESS SELECT COL1, COL2 FROM table_b",
+ )
+ self.validate_identity(
+ "REPLACE VIEW view_b (COL1, COL2) AS LOCKING ROW FOR ACCESS SELECT COL1, COL2 FROM table_b",
+ "CREATE OR REPLACE VIEW view_b (COL1, COL2) AS LOCKING ROW FOR ACCESS SELECT COL1, COL2 FROM table_b",
+ )
self.validate_identity("CREATE TABLE x (y INT) PRIMARY INDEX (y) PARTITION BY y INDEX (y)")
self.validate_identity("CREATE TABLE x (y INT) PARTITION BY y INDEX (y)")
self.validate_identity(
diff --git a/tests/dialects/test_tsql.py b/tests/dialects/test_tsql.py
index fbd913d..f9a720a 100644
--- a/tests/dialects/test_tsql.py
+++ b/tests/dialects/test_tsql.py
@@ -970,19 +970,19 @@ WHERE
self.validate_all(
"TRY_CONVERT(NVARCHAR, x, 121)",
write={
- "spark": "CAST(DATE_FORMAT(x, 'yyyy-MM-dd HH:mm:ss.SSSSSS') AS VARCHAR(30))",
+ "spark": "TRY_CAST(DATE_FORMAT(x, 'yyyy-MM-dd HH:mm:ss.SSSSSS') AS VARCHAR(30))",
},
)
self.validate_all(
"TRY_CONVERT(INT, x)",
write={
- "spark": "CAST(x AS INT)",
+ "spark": "TRY_CAST(x AS INT)",
},
)
self.validate_all(
"TRY_CAST(x AS INT)",
write={
- "spark": "CAST(x AS INT)",
+ "spark": "TRY_CAST(x AS INT)",
},
)
self.validate_all(