diff options
Diffstat (limited to 'tests/dialects/test_bigquery.py')
-rw-r--r-- | tests/dialects/test_bigquery.py | 137 |
1 files changed, 109 insertions, 28 deletions
diff --git a/tests/dialects/test_bigquery.py b/tests/dialects/test_bigquery.py index 2c8ac7b..340630c 100644 --- a/tests/dialects/test_bigquery.py +++ b/tests/dialects/test_bigquery.py @@ -18,6 +18,64 @@ class TestBigQuery(Validator): maxDiff = None def test_bigquery(self): + self.validate_all( + "SELECT SUM(x IGNORE NULLS) AS x", + read={ + "bigquery": "SELECT SUM(x IGNORE NULLS) AS x", + "duckdb": "SELECT SUM(x IGNORE NULLS) AS x", + "postgres": "SELECT SUM(x) IGNORE NULLS AS x", + "spark": "SELECT SUM(x) IGNORE NULLS AS x", + "snowflake": "SELECT SUM(x) IGNORE NULLS AS x", + }, + write={ + "bigquery": "SELECT SUM(x IGNORE NULLS) AS x", + "duckdb": "SELECT SUM(x IGNORE NULLS) AS x", + "postgres": "SELECT SUM(x) IGNORE NULLS AS x", + "spark": "SELECT SUM(x) IGNORE NULLS AS x", + "snowflake": "SELECT SUM(x) IGNORE NULLS AS x", + }, + ) + self.validate_all( + "SELECT SUM(x RESPECT NULLS) AS x", + read={ + "bigquery": "SELECT SUM(x RESPECT NULLS) AS x", + "duckdb": "SELECT SUM(x RESPECT NULLS) AS x", + "postgres": "SELECT SUM(x) RESPECT NULLS AS x", + "spark": "SELECT SUM(x) RESPECT NULLS AS x", + "snowflake": "SELECT SUM(x) RESPECT NULLS AS x", + }, + write={ + "bigquery": "SELECT SUM(x RESPECT NULLS) AS x", + "duckdb": "SELECT SUM(x RESPECT NULLS) AS x", + "postgres": "SELECT SUM(x) RESPECT NULLS AS x", + "spark": "SELECT SUM(x) RESPECT NULLS AS x", + "snowflake": "SELECT SUM(x) RESPECT NULLS AS x", + }, + ) + self.validate_all( + "SELECT PERCENTILE_CONT(x, 0.5 RESPECT NULLS) OVER ()", + write={ + "duckdb": "SELECT QUANTILE_CONT(x, 0.5 RESPECT NULLS) OVER ()", + "spark": "SELECT PERCENTILE_CONT(x, 0.5) RESPECT NULLS OVER ()", + }, + ) + self.validate_all( + "SELECT ARRAY_AGG(DISTINCT x IGNORE NULLS ORDER BY a, b DESC LIMIT 10) AS x", + write={ + "duckdb": "SELECT ARRAY_AGG(DISTINCT x ORDER BY a NULLS FIRST, b DESC LIMIT 10 IGNORE NULLS) AS x", + "spark": "SELECT COLLECT_LIST(DISTINCT x ORDER BY a, b DESC LIMIT 10) IGNORE NULLS AS x", + }, + ) + self.validate_all( + "SELECT ARRAY_AGG(DISTINCT x IGNORE NULLS ORDER BY a, b DESC LIMIT 1, 10) AS x", + write={ + "duckdb": "SELECT ARRAY_AGG(DISTINCT x ORDER BY a NULLS FIRST, b DESC LIMIT 1, 10 IGNORE NULLS) AS x", + "spark": "SELECT COLLECT_LIST(DISTINCT x ORDER BY a, b DESC LIMIT 1, 10) IGNORE NULLS AS x", + }, + ) + self.validate_identity("SELECT COUNT(x RESPECT NULLS)") + self.validate_identity("SELECT LAST_VALUE(x IGNORE NULLS) OVER y AS x") + self.validate_identity( "create or replace view test (tenant_id OPTIONS(description='Test description on table creation')) select 1 as tenant_id, 1 as customer_id;", "CREATE OR REPLACE VIEW test (tenant_id OPTIONS (description='Test description on table creation')) AS SELECT 1 AS tenant_id, 1 AS customer_id", @@ -358,11 +416,26 @@ class TestBigQuery(Validator): "SELECT TIMESTAMP_DIFF(TIMESTAMP_SECONDS(60), TIMESTAMP_SECONDS(0), minute)", write={ "bigquery": "SELECT TIMESTAMP_DIFF(TIMESTAMP_SECONDS(60), TIMESTAMP_SECONDS(0), MINUTE)", + "databricks": "SELECT TIMESTAMPDIFF(MINUTE, CAST(FROM_UNIXTIME(0) AS TIMESTAMP), CAST(FROM_UNIXTIME(60) AS TIMESTAMP))", "duckdb": "SELECT DATE_DIFF('MINUTE', TO_TIMESTAMP(0), TO_TIMESTAMP(60))", "snowflake": "SELECT TIMESTAMPDIFF(MINUTE, TO_TIMESTAMP(0), TO_TIMESTAMP(60))", }, ) self.validate_all( + "TIMESTAMP_DIFF(a, b, MONTH)", + read={ + "bigquery": "TIMESTAMP_DIFF(a, b, month)", + "databricks": "TIMESTAMPDIFF(month, b, a)", + "mysql": "TIMESTAMPDIFF(month, b, a)", + }, + write={ + "databricks": "TIMESTAMPDIFF(MONTH, b, a)", + "mysql": "TIMESTAMPDIFF(MONTH, b, a)", + "snowflake": "TIMESTAMPDIFF(MONTH, b, a)", + }, + ) + + self.validate_all( "SELECT TIMESTAMP_MICROS(x)", read={ "duckdb": "SELECT MAKE_TIMESTAMP(x)", @@ -419,34 +492,42 @@ class TestBigQuery(Validator): "snowflake": "CREATE OR REPLACE TABLE a.b.c CLONE a.b.d", }, ) - self.validate_all( - "SELECT DATETIME_DIFF('2023-01-01T00:00:00', '2023-01-01T05:00:00', MILLISECOND)", - write={ - "bigquery": "SELECT DATETIME_DIFF('2023-01-01T00:00:00', '2023-01-01T05:00:00', MILLISECOND)", - "databricks": "SELECT TIMESTAMPDIFF(MILLISECOND, '2023-01-01T05:00:00', '2023-01-01T00:00:00')", - }, - ), - self.validate_all( - "SELECT DATETIME_ADD('2023-01-01T00:00:00', INTERVAL 1 MILLISECOND)", - write={ - "bigquery": "SELECT DATETIME_ADD('2023-01-01T00:00:00', INTERVAL 1 MILLISECOND)", - "databricks": "SELECT TIMESTAMPADD(MILLISECOND, 1, '2023-01-01T00:00:00')", - }, - ), - self.validate_all( - "SELECT DATETIME_SUB('2023-01-01T00:00:00', INTERVAL 1 MILLISECOND)", - write={ - "bigquery": "SELECT DATETIME_SUB('2023-01-01T00:00:00', INTERVAL 1 MILLISECOND)", - "databricks": "SELECT TIMESTAMPADD(MILLISECOND, 1 * -1, '2023-01-01T00:00:00')", - }, - ), - self.validate_all( - "SELECT DATETIME_TRUNC('2023-01-01T01:01:01', HOUR)", - write={ - "bigquery": "SELECT DATETIME_TRUNC('2023-01-01T01:01:01', HOUR)", - "databricks": "SELECT DATE_TRUNC('HOUR', '2023-01-01T01:01:01')", - }, - ), + ( + self.validate_all( + "SELECT DATETIME_DIFF('2023-01-01T00:00:00', '2023-01-01T05:00:00', MILLISECOND)", + write={ + "bigquery": "SELECT DATETIME_DIFF('2023-01-01T00:00:00', '2023-01-01T05:00:00', MILLISECOND)", + "databricks": "SELECT TIMESTAMPDIFF(MILLISECOND, '2023-01-01T05:00:00', '2023-01-01T00:00:00')", + }, + ), + ) + ( + self.validate_all( + "SELECT DATETIME_ADD('2023-01-01T00:00:00', INTERVAL 1 MILLISECOND)", + write={ + "bigquery": "SELECT DATETIME_ADD('2023-01-01T00:00:00', INTERVAL 1 MILLISECOND)", + "databricks": "SELECT TIMESTAMPADD(MILLISECOND, 1, '2023-01-01T00:00:00')", + }, + ), + ) + ( + self.validate_all( + "SELECT DATETIME_SUB('2023-01-01T00:00:00', INTERVAL 1 MILLISECOND)", + write={ + "bigquery": "SELECT DATETIME_SUB('2023-01-01T00:00:00', INTERVAL 1 MILLISECOND)", + "databricks": "SELECT TIMESTAMPADD(MILLISECOND, 1 * -1, '2023-01-01T00:00:00')", + }, + ), + ) + ( + self.validate_all( + "SELECT DATETIME_TRUNC('2023-01-01T01:01:01', HOUR)", + write={ + "bigquery": "SELECT DATETIME_TRUNC('2023-01-01T01:01:01', HOUR)", + "databricks": "SELECT DATE_TRUNC('HOUR', '2023-01-01T01:01:01')", + }, + ), + ) self.validate_all("LEAST(x, y)", read={"sqlite": "MIN(x, y)"}) self.validate_all("CAST(x AS CHAR)", write={"bigquery": "CAST(x AS STRING)"}) self.validate_all("CAST(x AS NCHAR)", write={"bigquery": "CAST(x AS STRING)"}) |