summaryrefslogtreecommitdiffstats
path: root/tests/dialects
diff options
context:
space:
mode:
Diffstat (limited to '')
-rw-r--r--tests/dialects/test_dialect.py8
-rw-r--r--tests/dialects/test_drill.py13
-rw-r--r--tests/dialects/test_duckdb.py97
-rw-r--r--tests/dialects/test_postgres.py39
-rw-r--r--tests/dialects/test_redshift.py1
-rw-r--r--tests/dialects/test_snowflake.py78
-rw-r--r--tests/dialects/test_sqlite.py14
7 files changed, 142 insertions, 108 deletions
diff --git a/tests/dialects/test_dialect.py b/tests/dialects/test_dialect.py
index 69563cb..21f6be6 100644
--- a/tests/dialects/test_dialect.py
+++ b/tests/dialects/test_dialect.py
@@ -98,7 +98,7 @@ class TestDialect(Validator):
"bigquery": "CAST(a AS BINARY)",
"clickhouse": "CAST(a AS BINARY(4))",
"drill": "CAST(a AS VARBINARY(4))",
- "duckdb": "CAST(a AS BINARY(4))",
+ "duckdb": "CAST(a AS BLOB(4))",
"mysql": "CAST(a AS BINARY(4))",
"hive": "CAST(a AS BINARY(4))",
"oracle": "CAST(a AS BLOB(4))",
@@ -116,7 +116,7 @@ class TestDialect(Validator):
write={
"bigquery": "CAST(a AS VARBINARY)",
"clickhouse": "CAST(a AS VARBINARY(4))",
- "duckdb": "CAST(a AS VARBINARY(4))",
+ "duckdb": "CAST(a AS BLOB(4))",
"mysql": "CAST(a AS VARBINARY(4))",
"hive": "CAST(a AS BINARY(4))",
"oracle": "CAST(a AS BLOB(4))",
@@ -1208,7 +1208,7 @@ class TestDialect(Validator):
self.validate_all(
"CREATE TABLE t (c CHAR, nc NCHAR, v1 VARCHAR, v2 VARCHAR2, nv NVARCHAR, nv2 NVARCHAR2)",
write={
- "duckdb": "CREATE TABLE t (c CHAR, nc CHAR, v1 TEXT, v2 TEXT, nv TEXT, nv2 TEXT)",
+ "duckdb": "CREATE TABLE t (c TEXT, nc TEXT, v1 TEXT, v2 TEXT, nv TEXT, nv2 TEXT)",
"hive": "CREATE TABLE t (c CHAR, nc CHAR, v1 STRING, v2 STRING, nv STRING, nv2 STRING)",
"oracle": "CREATE TABLE t (c CHAR, nc CHAR, v1 VARCHAR2, v2 VARCHAR2, nv NVARCHAR2, nv2 NVARCHAR2)",
"postgres": "CREATE TABLE t (c CHAR, nc CHAR, v1 VARCHAR, v2 VARCHAR, nv VARCHAR, nv2 VARCHAR)",
@@ -1249,7 +1249,7 @@ class TestDialect(Validator):
self.validate_all(
"CREATE TABLE t (b1 BINARY, b2 BINARY(1024), c1 TEXT, c2 TEXT(1024))",
write={
- "duckdb": "CREATE TABLE t (b1 BINARY, b2 BINARY(1024), c1 TEXT, c2 TEXT(1024))",
+ "duckdb": "CREATE TABLE t (b1 BLOB, b2 BLOB(1024), c1 TEXT, c2 TEXT(1024))",
"hive": "CREATE TABLE t (b1 BINARY, b2 BINARY(1024), c1 STRING, c2 STRING(1024))",
"oracle": "CREATE TABLE t (b1 BLOB, b2 BLOB(1024), c1 CLOB, c2 CLOB(1024))",
"postgres": "CREATE TABLE t (b1 BYTEA, b2 BYTEA(1024), c1 TEXT, c2 TEXT(1024))",
diff --git a/tests/dialects/test_drill.py b/tests/dialects/test_drill.py
index a196013..e41bd34 100644
--- a/tests/dialects/test_drill.py
+++ b/tests/dialects/test_drill.py
@@ -51,3 +51,16 @@ class TestDrill(Validator):
"drill": "SELECT * FROM table(dfs.`test_data.xlsx`(type => 'excel', sheetName => 'secondSheet'))",
},
)
+
+ def test_validate_pivot(self):
+ self.validate_all(
+ "SELECT * FROM (SELECT education_level, salary, marital_status, "
+ "EXTRACT(year FROM age(birth_date)) age FROM cp.`employee.json`) PIVOT (avg(salary) AS "
+ "avg_salary, avg(age) AS avg_age FOR marital_status IN ('M' married, 'S' single))",
+ write={
+ "drill": "SELECT * FROM (SELECT education_level, salary, marital_status, "
+ "EXTRACT(year FROM age(birth_date)) AS age FROM cp.`employee.json`) "
+ "PIVOT(AVG(salary) AS avg_salary, AVG(age) AS avg_age FOR marital_status "
+ "IN ('M' AS married, 'S' AS single))"
+ },
+ )
diff --git a/tests/dialects/test_duckdb.py b/tests/dialects/test_duckdb.py
index a1a0090..c314163 100644
--- a/tests/dialects/test_duckdb.py
+++ b/tests/dialects/test_duckdb.py
@@ -21,10 +21,10 @@ class TestDuckDB(Validator):
self.validate_all(
"EPOCH_MS(x)",
write={
- "bigquery": "UNIX_TO_TIME(CAST(x / 1000 AS INT64))",
+ "bigquery": "UNIX_TO_TIME(x / 1000)",
"duckdb": "TO_TIMESTAMP(x / 1000)",
"presto": "FROM_UNIXTIME(x / 1000)",
- "spark": "FROM_UNIXTIME(CAST(x / 1000 AS INT))",
+ "spark": "FROM_UNIXTIME(x / 1000)",
},
)
self.validate_all(
@@ -120,28 +120,17 @@ class TestDuckDB(Validator):
self.validate_identity("SELECT ROW(x, x + 1, y) FROM (SELECT 1 AS x, 'a' AS y)")
self.validate_identity("SELECT (x, x + 1, y) FROM (SELECT 1 AS x, 'a' AS y)")
self.validate_identity("SELECT a.x FROM (SELECT {'x': 1, 'y': 2, 'z': 3} AS a)")
+ self.validate_identity("ATTACH DATABASE ':memory:' AS new_database")
self.validate_identity(
"SELECT a['x space'] FROM (SELECT {'x space': 1, 'y': 2, 'z': 3} AS a)"
)
+
self.validate_all(
"CREATE TABLE IF NOT EXISTS table (cola INT, colb STRING) USING ICEBERG PARTITIONED BY (colb)",
write={
"duckdb": "CREATE TABLE IF NOT EXISTS table (cola INT, colb TEXT)",
},
)
-
- self.validate_all(
- "COL::BIGINT[]",
- write={
- "duckdb": "CAST(COL AS BIGINT[])",
- "presto": "CAST(COL AS ARRAY(BIGINT))",
- "hive": "CAST(COL AS ARRAY<BIGINT>)",
- "spark": "CAST(COL AS ARRAY<LONG>)",
- "postgres": "CAST(COL AS BIGINT[])",
- "snowflake": "CAST(COL AS ARRAY)",
- },
- )
-
self.validate_all(
"LIST_VALUE(0, 1, 2)",
read={
@@ -212,7 +201,6 @@ class TestDuckDB(Validator):
"spark": "x.`y`.`abc`",
},
)
-
self.validate_all(
"QUANTILE(x, 0.5)",
write={
@@ -222,14 +210,6 @@ class TestDuckDB(Validator):
"spark": "PERCENTILE(x, 0.5)",
},
)
-
- self.validate_all(
- "CAST(x AS DATE)",
- write={
- "duckdb": "CAST(x AS DATE)",
- "": "CAST(x AS DATE)",
- },
- )
self.validate_all(
"UNNEST(x)",
read={
@@ -240,7 +220,6 @@ class TestDuckDB(Validator):
"spark": "EXPLODE(x)",
},
)
-
self.validate_all(
"1d",
write={
@@ -249,13 +228,6 @@ class TestDuckDB(Validator):
},
)
self.validate_all(
- "CAST(1 AS DOUBLE)",
- read={
- "hive": "1d",
- "spark": "1d",
- },
- )
- self.validate_all(
"POWER(CAST(2 AS SMALLINT), 3)",
read={
"hive": "POW(2S, 3)",
@@ -351,8 +323,6 @@ class TestDuckDB(Validator):
},
)
- self.validate_identity("ATTACH DATABASE ':memory:' AS new_database")
-
with self.assertRaises(UnsupportedError):
transpile(
"SELECT a FROM b PIVOT(SUM(x) FOR y IN ('z', 'q'))",
@@ -372,13 +342,44 @@ class TestDuckDB(Validator):
self.validate_identity("ARRAY(SELECT id FROM t)")
def test_cast(self):
+ self.validate_identity("CAST(x AS REAL)")
+ self.validate_identity("CAST(x AS UINTEGER)")
+ self.validate_identity("CAST(x AS UBIGINT)")
+ self.validate_identity("CAST(x AS USMALLINT)")
+ self.validate_identity("CAST(x AS UTINYINT)")
+ self.validate_identity("CAST(x AS TEXT)")
+
+ self.validate_all("CAST(x AS NUMERIC)", write={"duckdb": "CAST(x AS DOUBLE)"})
+ self.validate_all("CAST(x AS CHAR)", write={"duckdb": "CAST(x AS TEXT)"})
+ self.validate_all("CAST(x AS BPCHAR)", write={"duckdb": "CAST(x AS TEXT)"})
+ self.validate_all("CAST(x AS STRING)", write={"duckdb": "CAST(x AS TEXT)"})
+ self.validate_all("CAST(x AS INT1)", write={"duckdb": "CAST(x AS TINYINT)"})
+ self.validate_all("CAST(x AS FLOAT4)", write={"duckdb": "CAST(x AS REAL)"})
+ self.validate_all("CAST(x AS FLOAT)", write={"duckdb": "CAST(x AS REAL)"})
+ self.validate_all("CAST(x AS INT4)", write={"duckdb": "CAST(x AS INT)"})
+ self.validate_all("CAST(x AS INTEGER)", write={"duckdb": "CAST(x AS INT)"})
+ self.validate_all("CAST(x AS SIGNED)", write={"duckdb": "CAST(x AS INT)"})
+ self.validate_all("CAST(x AS BLOB)", write={"duckdb": "CAST(x AS BLOB)"})
+ self.validate_all("CAST(x AS BYTEA)", write={"duckdb": "CAST(x AS BLOB)"})
+ self.validate_all("CAST(x AS BINARY)", write={"duckdb": "CAST(x AS BLOB)"})
+ self.validate_all("CAST(x AS VARBINARY)", write={"duckdb": "CAST(x AS BLOB)"})
+ self.validate_all("CAST(x AS LOGICAL)", write={"duckdb": "CAST(x AS BOOLEAN)"})
+ self.validate_all(
+ "CAST(x AS BIT)",
+ read={
+ "duckdb": "CAST(x AS BITSTRING)",
+ },
+ write={
+ "duckdb": "CAST(x AS BIT)",
+ "tsql": "CAST(x AS BIT)",
+ },
+ )
self.validate_all(
"123::CHARACTER VARYING",
write={
"duckdb": "CAST(123 AS TEXT)",
},
)
-
self.validate_all(
"cast([[1]] as int[][])",
write={
@@ -386,10 +387,34 @@ class TestDuckDB(Validator):
"spark": "CAST(ARRAY(ARRAY(1)) AS ARRAY<ARRAY<INT>>)",
},
)
-
self.validate_all(
"CAST(x AS DATE) + INTERVAL (7 * -1) DAY", read={"spark": "DATE_SUB(x, 7)"}
)
+ self.validate_all(
+ "CAST(1 AS DOUBLE)",
+ read={
+ "hive": "1d",
+ "spark": "1d",
+ },
+ )
+ self.validate_all(
+ "CAST(x AS DATE)",
+ write={
+ "duckdb": "CAST(x AS DATE)",
+ "": "CAST(x AS DATE)",
+ },
+ )
+ self.validate_all(
+ "COL::BIGINT[]",
+ write={
+ "duckdb": "CAST(COL AS BIGINT[])",
+ "presto": "CAST(COL AS ARRAY(BIGINT))",
+ "hive": "CAST(COL AS ARRAY<BIGINT>)",
+ "spark": "CAST(COL AS ARRAY<LONG>)",
+ "postgres": "CAST(COL AS BIGINT[])",
+ "snowflake": "CAST(COL AS ARRAY)",
+ },
+ )
def test_bool_or(self):
self.validate_all(
diff --git a/tests/dialects/test_postgres.py b/tests/dialects/test_postgres.py
index 0881a89..c8dea95 100644
--- a/tests/dialects/test_postgres.py
+++ b/tests/dialects/test_postgres.py
@@ -71,6 +71,9 @@ class TestPostgres(Validator):
)
def test_postgres(self):
+ self.validate_identity("SELECT ARRAY[1, 2, 3] @> ARRAY[1, 2]")
+ self.validate_identity("SELECT ARRAY[1, 2, 3] <@ ARRAY[1, 2]")
+ self.validate_identity("SELECT ARRAY[1, 2, 3] && ARRAY[1, 2]")
self.validate_identity("$x")
self.validate_identity("SELECT ARRAY[1, 2, 3]")
self.validate_identity("SELECT ARRAY(SELECT 1)")
@@ -115,40 +118,6 @@ class TestPostgres(Validator):
self.validate_identity("x ~* 'y'")
self.validate_all(
- "1 / 2",
- read={
- "drill": "1 / 2",
- "duckdb": "1 / 2",
- "postgres": "1 / 2",
- "presto": "1 / 2",
- "redshift": "1 / 2",
- "sqlite": "1 / 2",
- "teradata": "1 / 2",
- "trino": "1 / 2",
- "tsql": "1 / 2",
- },
- write={
- "drill": "1 / 2",
- "duckdb": "1 / 2",
- "postgres": "1 / 2",
- "presto": "1 / 2",
- "redshift": "1 / 2",
- "sqlite": "1 / 2",
- "teradata": "1 / 2",
- "trino": "1 / 2",
- "tsql": "1 / 2",
- "bigquery": "CAST(1 / 2 AS INT64)",
- "clickhouse": "CAST(1 / 2 AS Int32)",
- "databricks": "CAST(1 / 2 AS INT)",
- "hive": "CAST(1 / 2 AS INT)",
- "mysql": "CAST(1 / 2 AS INT)",
- "oracle": "CAST(1 / 2 AS NUMBER)",
- "snowflake": "CAST(1 / 2 AS INT)",
- "spark": "CAST(1 / 2 AS INT)",
- "starrocks": "CAST(1 / 2 AS INT)",
- },
- )
- self.validate_all(
"SELECT (DATE '2016-01-10', DATE '2016-02-01') OVERLAPS (DATE '2016-01-20', DATE '2016-02-10')",
write={
"postgres": "SELECT (CAST('2016-01-10' AS DATE), CAST('2016-02-01' AS DATE)) OVERLAPS (CAST('2016-01-20' AS DATE), CAST('2016-02-10' AS DATE))",
@@ -196,7 +165,7 @@ class TestPostgres(Validator):
self.validate_all(
"CREATE TABLE x (a UUID, b BYTEA)",
write={
- "duckdb": "CREATE TABLE x (a UUID, b VARBINARY)",
+ "duckdb": "CREATE TABLE x (a UUID, b BLOB)",
"presto": "CREATE TABLE x (a UUID, b VARBINARY)",
"hive": "CREATE TABLE x (a UUID, b BINARY)",
"spark": "CREATE TABLE x (a UUID, b BINARY)",
diff --git a/tests/dialects/test_redshift.py b/tests/dialects/test_redshift.py
index ad83b99..f5b8a43 100644
--- a/tests/dialects/test_redshift.py
+++ b/tests/dialects/test_redshift.py
@@ -5,6 +5,7 @@ class TestRedshift(Validator):
dialect = "redshift"
def test_redshift(self):
+ self.validate_all("CONVERT(INTEGER, x)", write={"redshift": "CAST(x AS INTEGER)"})
self.validate_all(
'create table "group" ("col" char(10))',
write={
diff --git a/tests/dialects/test_snowflake.py b/tests/dialects/test_snowflake.py
index c28c58d..1ac910c 100644
--- a/tests/dialects/test_snowflake.py
+++ b/tests/dialects/test_snowflake.py
@@ -18,48 +18,60 @@ class TestSnowflake(Validator):
self.validate_identity("COMMENT IF EXISTS ON TABLE foo IS 'bar'")
self.validate_all(
- "1 / 2",
+ "TO_CHAR(x, y)",
read={
- "bigquery": "1 / 2",
- "clickhouse": "1 / 2",
- "databricks": "1 / 2",
- "hive": "1 / 2",
- "mysql": "1 / 2",
- "oracle": "1 / 2",
- "snowflake": "1 / 2",
- "spark": "1 / 2",
- "starrocks": "1 / 2",
- },
- write={
- "bigquery": "1 / 2",
- "clickhouse": "1 / 2",
- "databricks": "1 / 2",
- "hive": "1 / 2",
- "mysql": "1 / 2",
- "oracle": "1 / 2",
- "snowflake": "1 / 2",
- "spark": "1 / 2",
- "starrocks": "1 / 2",
- "drill": "CAST(1 AS DOUBLE) / 2",
- "duckdb": "CAST(1 AS DOUBLE) / 2",
- "postgres": "CAST(1 AS DOUBLE PRECISION) / 2",
- "presto": "CAST(1 AS DOUBLE) / 2",
- "redshift": "CAST(1 AS DOUBLE PRECISION) / 2",
- "sqlite": "CAST(1 AS REAL) / 2",
- "teradata": "CAST(1 AS DOUBLE) / 2",
- "trino": "CAST(1 AS DOUBLE) / 2",
- "tsql": "CAST(1 AS DOUBLE) / 2",
+ "": "TO_CHAR(x, y)",
+ "snowflake": "TO_VARCHAR(x, y)",
+ },
+ write={
+ "": "CAST(x AS TEXT)",
+ "databricks": "TO_CHAR(x, y)",
+ "drill": "TO_CHAR(x, y)",
+ "oracle": "TO_CHAR(x, y)",
+ "postgres": "TO_CHAR(x, y)",
+ "snowflake": "TO_CHAR(x, y)",
+ "teradata": "TO_CHAR(x, y)",
+ },
+ )
+ self.validate_all(
+ "SQUARE(x)",
+ write={
+ "bigquery": "POWER(x, 2)",
+ "clickhouse": "POWER(x, 2)",
+ "databricks": "POWER(x, 2)",
+ "drill": "POW(x, 2)",
+ "duckdb": "POWER(x, 2)",
+ "hive": "POWER(x, 2)",
+ "mysql": "POWER(x, 2)",
+ "oracle": "POWER(x, 2)",
+ "postgres": "x ^ 2",
+ "presto": "POWER(x, 2)",
+ "redshift": "POWER(x, 2)",
+ "snowflake": "POWER(x, 2)",
+ "spark": "POWER(x, 2)",
+ "sqlite": "POWER(x, 2)",
+ "starrocks": "POWER(x, 2)",
+ "trino": "POWER(x, 2)",
+ "tsql": "POWER(x, 2)",
+ },
+ )
+ self.validate_all(
+ "POWER(x, 2)",
+ read={
+ "oracle": "SQUARE(x)",
+ "snowflake": "SQUARE(x)",
+ "tsql": "SQUARE(x)",
},
)
self.validate_all(
"DIV0(foo, bar)",
write={
"snowflake": "IFF(bar = 0, 0, foo / bar)",
- "sqlite": "CASE WHEN bar = 0 THEN 0 ELSE CAST(foo AS REAL) / bar END",
- "presto": "IF(bar = 0, 0, CAST(foo AS DOUBLE) / bar)",
+ "sqlite": "CASE WHEN bar = 0 THEN 0 ELSE foo / bar END",
+ "presto": "IF(bar = 0, 0, foo / bar)",
"spark": "IF(bar = 0, 0, foo / bar)",
"hive": "IF(bar = 0, 0, foo / bar)",
- "duckdb": "CASE WHEN bar = 0 THEN 0 ELSE CAST(foo AS DOUBLE) / bar END",
+ "duckdb": "CASE WHEN bar = 0 THEN 0 ELSE foo / bar END",
},
)
self.validate_all(
diff --git a/tests/dialects/test_sqlite.py b/tests/dialects/test_sqlite.py
index a3e4cc9..f889445 100644
--- a/tests/dialects/test_sqlite.py
+++ b/tests/dialects/test_sqlite.py
@@ -89,6 +89,20 @@ class TestSQLite(Validator):
write={"snowflake": "LEAST(x, y, z)"},
)
+ def test_datediff(self):
+ self.validate_all(
+ "DATEDIFF(a, b, 'day')",
+ write={"sqlite": "CAST((JULIANDAY(a) - JULIANDAY(b)) AS INTEGER)"},
+ )
+ self.validate_all(
+ "DATEDIFF(a, b, 'hour')",
+ write={"sqlite": "CAST((JULIANDAY(a) - JULIANDAY(b)) * 24.0 AS INTEGER)"},
+ )
+ self.validate_all(
+ "DATEDIFF(a, b, 'year')",
+ write={"sqlite": "CAST((JULIANDAY(a) - JULIANDAY(b)) / 365.0 AS INTEGER)"},
+ )
+
def test_hexadecimal_literal(self):
self.validate_all(
"SELECT 0XCC",