diff options
Diffstat (limited to 'tests/dialects')
-rw-r--r-- | tests/dialects/test_bigquery.py | 42 | ||||
-rw-r--r-- | tests/dialects/test_clickhouse.py | 9 | ||||
-rw-r--r-- | tests/dialects/test_dialect.py | 6 | ||||
-rw-r--r-- | tests/dialects/test_hive.py | 20 | ||||
-rw-r--r-- | tests/dialects/test_postgres.py | 4 | ||||
-rw-r--r-- | tests/dialects/test_presto.py | 6 | ||||
-rw-r--r-- | tests/dialects/test_snowflake.py | 30 | ||||
-rw-r--r-- | tests/dialects/test_spark.py | 1 | ||||
-rw-r--r-- | tests/dialects/test_tsql.py | 24 |
9 files changed, 134 insertions, 8 deletions
diff --git a/tests/dialects/test_bigquery.py b/tests/dialects/test_bigquery.py index 258e47f..c61a2f3 100644 --- a/tests/dialects/test_bigquery.py +++ b/tests/dialects/test_bigquery.py @@ -125,7 +125,7 @@ class TestBigQuery(Validator): }, ) self.validate_all( - "CURRENT_DATE", + "CURRENT_TIMESTAMP()", read={ "tsql": "GETDATE()", }, @@ -300,6 +300,14 @@ class TestBigQuery(Validator): }, ) self.validate_all( + "SELECT cola, colb, colc FROM (VALUES (1, 'test', NULL)) AS tab(cola, colb, colc)", + write={ + "spark": "SELECT cola, colb, colc FROM VALUES (1, 'test', NULL) AS tab(cola, colb, colc)", + "bigquery": "SELECT cola, colb, colc FROM UNNEST([STRUCT(1 AS cola, 'test' AS colb, NULL AS colc)])", + "snowflake": "SELECT cola, colb, colc FROM (VALUES (1, 'test', NULL)) AS tab(cola, colb, colc)", + }, + ) + self.validate_all( "SELECT * FROM (SELECT a, b, c FROM test) PIVOT(SUM(b) d, COUNT(*) e FOR c IN ('x', 'y'))", write={ "bigquery": "SELECT * FROM (SELECT a, b, c FROM test) PIVOT(SUM(b) AS d, COUNT(*) AS e FOR c IN ('x', 'y'))", @@ -324,3 +332,35 @@ class TestBigQuery(Validator): "SELECT a, GROUP_CONCAT(b) FROM table GROUP BY a", write={"bigquery": "SELECT a, STRING_AGG(b) FROM table GROUP BY a"}, ) + + def test_remove_precision_parameterized_types(self): + self.validate_all( + "SELECT CAST(1 AS NUMERIC(10, 2))", + write={ + "bigquery": "SELECT CAST(1 AS NUMERIC)", + }, + ) + self.validate_all( + "CREATE TABLE test (a NUMERIC(10, 2))", + write={ + "bigquery": "CREATE TABLE test (a NUMERIC(10, 2))", + }, + ) + self.validate_all( + "SELECT CAST('1' AS STRING(10)) UNION ALL SELECT CAST('2' AS STRING(10))", + write={ + "bigquery": "SELECT CAST('1' AS STRING) UNION ALL SELECT CAST('2' AS STRING)", + }, + ) + self.validate_all( + "SELECT cola FROM (SELECT CAST('1' AS STRING(10)) AS cola UNION ALL SELECT CAST('2' AS STRING(10)) AS cola)", + write={ + "bigquery": "SELECT cola FROM (SELECT CAST('1' AS STRING) AS cola UNION ALL SELECT CAST('2' AS STRING) AS cola)", + }, + ) + self.validate_all( + "INSERT INTO test (cola, colb) VALUES (CAST(7 AS STRING(10)), CAST(14 AS STRING(10)))", + write={ + "bigquery": "INSERT INTO test (cola, colb) VALUES (CAST(7 AS STRING), CAST(14 AS STRING))", + }, + ) diff --git a/tests/dialects/test_clickhouse.py b/tests/dialects/test_clickhouse.py index c95c967..109e9f3 100644 --- a/tests/dialects/test_clickhouse.py +++ b/tests/dialects/test_clickhouse.py @@ -14,6 +14,9 @@ class TestClickhouse(Validator): self.validate_identity("SELECT * FROM foo LEFT ASOF JOIN bla") self.validate_identity("SELECT * FROM foo ASOF JOIN bla") self.validate_identity("SELECT * FROM foo ANY JOIN bla") + self.validate_identity("SELECT quantile(0.5)(a)") + self.validate_identity("SELECT quantiles(0.5)(a) AS x FROM t") + self.validate_identity("SELECT * FROM foo WHERE x GLOBAL IN (SELECT * FROM bar)") self.validate_all( "SELECT fname, lname, age FROM person ORDER BY age DESC NULLS FIRST, fname ASC NULLS LAST, lname", @@ -38,3 +41,9 @@ class TestClickhouse(Validator): "SELECT x #! comment", write={"": "SELECT x /* comment */"}, ) + self.validate_all( + "SELECT quantileIf(0.5)(a, true)", + write={ + "clickhouse": "SELECT quantileIf(0.5)(a, TRUE)", + }, + ) diff --git a/tests/dialects/test_dialect.py b/tests/dialects/test_dialect.py index ced7102..284a30d 100644 --- a/tests/dialects/test_dialect.py +++ b/tests/dialects/test_dialect.py @@ -85,7 +85,7 @@ class TestDialect(Validator): self.validate_all( "CAST(a AS BINARY(4))", write={ - "bigquery": "CAST(a AS BINARY(4))", + "bigquery": "CAST(a AS BINARY)", "clickhouse": "CAST(a AS BINARY(4))", "drill": "CAST(a AS VARBINARY(4))", "duckdb": "CAST(a AS BINARY(4))", @@ -104,7 +104,7 @@ class TestDialect(Validator): self.validate_all( "CAST(a AS VARBINARY(4))", write={ - "bigquery": "CAST(a AS VARBINARY(4))", + "bigquery": "CAST(a AS VARBINARY)", "clickhouse": "CAST(a AS VARBINARY(4))", "duckdb": "CAST(a AS VARBINARY(4))", "mysql": "CAST(a AS VARBINARY(4))", @@ -181,7 +181,7 @@ class TestDialect(Validator): self.validate_all( "CAST(a AS VARCHAR(3))", write={ - "bigquery": "CAST(a AS STRING(3))", + "bigquery": "CAST(a AS STRING)", "drill": "CAST(a AS VARCHAR(3))", "duckdb": "CAST(a AS TEXT(3))", "mysql": "CAST(a AS VARCHAR(3))", diff --git a/tests/dialects/test_hive.py b/tests/dialects/test_hive.py index a7f3b8f..bbf00b1 100644 --- a/tests/dialects/test_hive.py +++ b/tests/dialects/test_hive.py @@ -339,6 +339,24 @@ class TestHive(Validator): def test_hive(self): self.validate_all( + "SELECT A.1a AS b FROM test_a AS A", + write={ + "spark": "SELECT A.1a AS b FROM test_a AS A", + }, + ) + self.validate_all( + "SELECT 1_a AS a FROM test_table", + write={ + "spark": "SELECT 1_a AS a FROM test_table", + }, + ) + self.validate_all( + "SELECT a_b AS 1_a FROM test_table", + write={ + "spark": "SELECT a_b AS 1_a FROM test_table", + }, + ) + self.validate_all( "PERCENTILE(x, 0.5)", write={ "duckdb": "QUANTILE(x, 0.5)", @@ -411,7 +429,7 @@ class TestHive(Validator): "INITCAP('new york')", write={ "duckdb": "INITCAP('new york')", - "presto": "REGEXP_REPLACE('new york', '(\w)(\w*)', x -> UPPER(x[1]) || LOWER(x[2]))", + "presto": r"REGEXP_REPLACE('new york', '(\w)(\w*)', x -> UPPER(x[1]) || LOWER(x[2]))", "hive": "INITCAP('new york')", "spark": "INITCAP('new york')", }, diff --git a/tests/dialects/test_postgres.py b/tests/dialects/test_postgres.py index 1e048d5..583d349 100644 --- a/tests/dialects/test_postgres.py +++ b/tests/dialects/test_postgres.py @@ -122,6 +122,10 @@ class TestPostgres(Validator): "TO_TIMESTAMP(123::DOUBLE PRECISION)", write={"postgres": "TO_TIMESTAMP(CAST(123 AS DOUBLE PRECISION))"}, ) + self.validate_all( + "SELECT to_timestamp(123)::time without time zone", + write={"postgres": "SELECT CAST(TO_TIMESTAMP(123) AS TIME)"}, + ) self.validate_identity( "CREATE TABLE A (LIKE B INCLUDING CONSTRAINT INCLUDING COMPRESSION EXCLUDING COMMENTS)" diff --git a/tests/dialects/test_presto.py b/tests/dialects/test_presto.py index 70e1059..ee535e9 100644 --- a/tests/dialects/test_presto.py +++ b/tests/dialects/test_presto.py @@ -60,11 +60,11 @@ class TestPresto(Validator): self.validate_all( "CAST(x AS TIMESTAMP(9) WITH TIME ZONE)", write={ - "bigquery": "CAST(x AS TIMESTAMPTZ(9))", + "bigquery": "CAST(x AS TIMESTAMPTZ)", "duckdb": "CAST(x AS TIMESTAMPTZ(9))", "presto": "CAST(x AS TIMESTAMP(9) WITH TIME ZONE)", - "hive": "CAST(x AS TIMESTAMPTZ(9))", - "spark": "CAST(x AS TIMESTAMPTZ(9))", + "hive": "CAST(x AS TIMESTAMPTZ)", + "spark": "CAST(x AS TIMESTAMPTZ)", }, ) diff --git a/tests/dialects/test_snowflake.py b/tests/dialects/test_snowflake.py index df62c6c..0e9ce9b 100644 --- a/tests/dialects/test_snowflake.py +++ b/tests/dialects/test_snowflake.py @@ -523,3 +523,33 @@ FROM persons AS p, LATERAL FLATTEN(input => p.c, path => 'contact') AS f, LATERA "spark": "SELECT `c0`, `c1` FROM (VALUES (1, 2), (3, 4)) AS `t0`(`c0`, `c1`)", }, ) + + def test_describe_table(self): + self.validate_all( + "DESCRIBE TABLE db.table", + write={ + "snowflake": "DESCRIBE TABLE db.table", + "spark": "DESCRIBE db.table", + }, + ) + self.validate_all( + "DESCRIBE db.table", + write={ + "snowflake": "DESCRIBE TABLE db.table", + "spark": "DESCRIBE db.table", + }, + ) + self.validate_all( + "DESC TABLE db.table", + write={ + "snowflake": "DESCRIBE TABLE db.table", + "spark": "DESCRIBE db.table", + }, + ) + self.validate_all( + "DESC VIEW db.table", + write={ + "snowflake": "DESCRIBE VIEW db.table", + "spark": "DESCRIBE db.table", + }, + ) diff --git a/tests/dialects/test_spark.py b/tests/dialects/test_spark.py index 7395e72..f287a89 100644 --- a/tests/dialects/test_spark.py +++ b/tests/dialects/test_spark.py @@ -207,6 +207,7 @@ TBLPROPERTIES ( ) def test_spark(self): + self.validate_identity("SELECT UNIX_TIMESTAMP()") self.validate_all( "ARRAY_SORT(x, (left, right) -> -1)", write={ diff --git a/tests/dialects/test_tsql.py b/tests/dialects/test_tsql.py index b4ac094..b74c05f 100644 --- a/tests/dialects/test_tsql.py +++ b/tests/dialects/test_tsql.py @@ -6,6 +6,8 @@ class TestTSQL(Validator): def test_tsql(self): self.validate_identity('SELECT "x"."y" FROM foo') + self.validate_identity("SELECT * FROM #foo") + self.validate_identity("SELECT * FROM ##foo") self.validate_identity( "SELECT DISTINCT DepartmentName, PERCENTILE_CONT(0.5) WITHIN GROUP (ORDER BY BaseRate) OVER (PARTITION BY DepartmentName) AS MedianCont FROM dbo.DimEmployee" ) @@ -71,6 +73,12 @@ class TestTSQL(Validator): "tsql": "CAST(x AS DATETIME2)", }, ) + self.validate_all( + "CAST(x AS DATETIME2(6))", + write={ + "hive": "CAST(x AS TIMESTAMP)", + }, + ) def test_charindex(self): self.validate_all( @@ -300,6 +308,12 @@ class TestTSQL(Validator): "spark": "SELECT CAST(y.x AS VARCHAR(10)) AS z FROM testdb.dbo.test AS y", }, ) + self.validate_all( + "SELECT CAST((SELECT x FROM y) AS VARCHAR) AS test", + write={ + "spark": "SELECT CAST((SELECT x FROM y) AS STRING) AS test", + }, + ) def test_add_date(self): self.validate_identity("SELECT DATEADD(year, 1, '2017/08/25')") @@ -441,3 +455,13 @@ class TestTSQL(Validator): "SELECT '''test'''", write={"spark": r"SELECT '\'test\''"}, ) + + def test_eomonth(self): + self.validate_all( + "EOMONTH(GETDATE())", + write={"spark": "LAST_DAY(CURRENT_TIMESTAMP())"}, + ) + self.validate_all( + "EOMONTH(GETDATE(), -1)", + write={"spark": "LAST_DAY(ADD_MONTHS(CURRENT_TIMESTAMP(), -1))"}, + ) |