diff options
Diffstat (limited to '')
-rw-r--r-- | tests/dialects/test_bigquery.py | 349 |
1 files changed, 316 insertions, 33 deletions
diff --git a/tests/dialects/test_bigquery.py b/tests/dialects/test_bigquery.py index ae8ed16..e2adfea 100644 --- a/tests/dialects/test_bigquery.py +++ b/tests/dialects/test_bigquery.py @@ -1,4 +1,6 @@ from unittest import mock +import datetime +import pytz from sqlglot import ( ErrorLevel, @@ -20,6 +22,8 @@ class TestBigQuery(Validator): maxDiff = None def test_bigquery(self): + self.validate_identity("REGEXP_EXTRACT(x, '(?<)')") + self.validate_all( "EXTRACT(HOUR FROM DATETIME(2008, 12, 25, 15, 30, 00))", write={ @@ -103,8 +107,9 @@ LANGUAGE js AS select_with_quoted_udf = self.validate_identity("SELECT `p.d.UdF`(data) FROM `p.d.t`") self.assertEqual(select_with_quoted_udf.selects[0].name, "p.d.UdF") + self.validate_identity("SELECT * FROM READ_CSV('bla.csv')") + self.validate_identity("CAST(x AS STRUCT<list ARRAY<INT64>>)") self.validate_identity("assert.true(1 = 1)") - self.validate_identity("SELECT ARRAY_TO_STRING(list, '--') AS text") self.validate_identity("SELECT jsondoc['some_key']") self.validate_identity("SELECT `p.d.UdF`(data).* FROM `p.d.t`") self.validate_identity("SELECT * FROM `my-project.my-dataset.my-table`") @@ -137,7 +142,6 @@ LANGUAGE js AS self.validate_identity("SELECT CAST(CURRENT_DATE AS STRING FORMAT 'DAY') AS current_day") self.validate_identity("SAFE_CAST(encrypted_value AS STRING FORMAT 'BASE64')") self.validate_identity("CAST(encrypted_value AS STRING FORMAT 'BASE64')") - self.validate_identity("CAST(STRUCT<a INT64>(1) AS STRUCT<a INT64>)") self.validate_identity("STRING_AGG(a)") self.validate_identity("STRING_AGG(a, ' & ')") self.validate_identity("STRING_AGG(DISTINCT a, ' & ')") @@ -161,12 +165,9 @@ LANGUAGE js AS self.validate_identity("x <> ''") self.validate_identity("DATE_TRUNC(col, WEEK(MONDAY))") self.validate_identity("SELECT b'abc'") - self.validate_identity("""SELECT * FROM UNNEST(ARRAY<STRUCT<x INT64>>[])""") self.validate_identity("SELECT AS STRUCT 1 AS a, 2 AS b") self.validate_identity("SELECT DISTINCT AS STRUCT 1 AS a, 2 AS b") self.validate_identity("SELECT AS VALUE STRUCT(1 AS a, 2 AS b)") - self.validate_identity("SELECT STRUCT<ARRAY<STRING>>(['2023-01-17'])") - self.validate_identity("SELECT STRUCT<STRING>((SELECT a FROM b.c LIMIT 1)).*") self.validate_identity("SELECT * FROM q UNPIVOT(values FOR quarter IN (b, c))") self.validate_identity("""CREATE TABLE x (a STRUCT<values ARRAY<INT64>>)""") self.validate_identity("""CREATE TABLE x (a STRUCT<b STRING OPTIONS (description='b')>)""") @@ -193,6 +194,9 @@ LANGUAGE js AS self.validate_identity("CAST(x AS TIMESTAMPTZ)", "CAST(x AS TIMESTAMP)") self.validate_identity("CAST(x AS RECORD)", "CAST(x AS STRUCT)") self.validate_identity( + "MERGE INTO dataset.NewArrivals USING (SELECT * FROM UNNEST([('microwave', 10, 'warehouse #1'), ('dryer', 30, 'warehouse #1'), ('oven', 20, 'warehouse #2')])) ON FALSE WHEN NOT MATCHED THEN INSERT ROW WHEN NOT MATCHED BY SOURCE THEN DELETE" + ) + self.validate_identity( "SELECT * FROM `SOME_PROJECT_ID.SOME_DATASET_ID.INFORMATION_SCHEMA.SOME_VIEW`" ) self.validate_identity( @@ -292,8 +296,26 @@ LANGUAGE js AS r"REGEXP_EXTRACT(svc_plugin_output, r'\\\((.*)')", r"REGEXP_EXTRACT(svc_plugin_output, '\\\\\\((.*)')", ) + self.validate_identity( + "SELECT CAST(1 AS BYTEINT)", + "SELECT CAST(1 AS INT64)", + ) self.validate_all( + "SAFE_CAST(some_date AS DATE FORMAT 'DD MONTH YYYY')", + write={ + "bigquery": "SAFE_CAST(some_date AS DATE FORMAT 'DD MONTH YYYY')", + "duckdb": "CAST(TRY_STRPTIME(some_date, '%d %B %Y') AS DATE)", + }, + ) + self.validate_all( + "SAFE_CAST(some_date AS DATE FORMAT 'YYYY-MM-DD') AS some_date", + write={ + "bigquery": "SAFE_CAST(some_date AS DATE FORMAT 'YYYY-MM-DD') AS some_date", + "duckdb": "CAST(TRY_STRPTIME(some_date, '%Y-%m-%d') AS DATE) AS some_date", + }, + ) + self.validate_all( "SELECT t.c1, h.c2, s.c3 FROM t1 AS t, UNNEST(t.t2) AS h, UNNEST(h.t3) AS s", write={ "bigquery": "SELECT t.c1, h.c2, s.c3 FROM t1 AS t, UNNEST(t.t2) AS h, UNNEST(h.t3) AS s", @@ -352,7 +374,7 @@ LANGUAGE js AS }, write={ "bigquery": "SELECT SUM(x IGNORE NULLS) AS x", - "duckdb": "SELECT SUM(x IGNORE NULLS) AS x", + "duckdb": "SELECT SUM(x) AS x", "postgres": "SELECT SUM(x) IGNORE NULLS AS x", "spark": "SELECT SUM(x) IGNORE NULLS AS x", "snowflake": "SELECT SUM(x) IGNORE NULLS AS x", @@ -387,7 +409,7 @@ LANGUAGE js AS "SELECT ARRAY_AGG(DISTINCT x IGNORE NULLS ORDER BY a, b DESC LIMIT 10) AS x", write={ "bigquery": "SELECT ARRAY_AGG(DISTINCT x IGNORE NULLS ORDER BY a, b DESC LIMIT 10) AS x", - "duckdb": "SELECT ARRAY_AGG(DISTINCT x IGNORE NULLS ORDER BY a NULLS FIRST, b DESC LIMIT 10) AS x", + "duckdb": "SELECT ARRAY_AGG(DISTINCT x ORDER BY a NULLS FIRST, b DESC LIMIT 10) AS x", "spark": "SELECT COLLECT_LIST(DISTINCT x ORDER BY a, b DESC LIMIT 10) IGNORE NULLS AS x", }, ) @@ -395,7 +417,7 @@ LANGUAGE js AS "SELECT ARRAY_AGG(DISTINCT x IGNORE NULLS ORDER BY a, b DESC LIMIT 1, 10) AS x", write={ "bigquery": "SELECT ARRAY_AGG(DISTINCT x IGNORE NULLS ORDER BY a, b DESC LIMIT 1, 10) AS x", - "duckdb": "SELECT ARRAY_AGG(DISTINCT x IGNORE NULLS ORDER BY a NULLS FIRST, b DESC LIMIT 1, 10) AS x", + "duckdb": "SELECT ARRAY_AGG(DISTINCT x ORDER BY a NULLS FIRST, b DESC LIMIT 1, 10) AS x", "spark": "SELECT COLLECT_LIST(DISTINCT x ORDER BY a, b DESC LIMIT 1, 10) IGNORE NULLS AS x", }, ) @@ -431,7 +453,7 @@ LANGUAGE js AS write={ "bigquery": "SELECT LAST_DAY(CAST('2008-11-25' AS DATE), MONTH)", "duckdb": "SELECT LAST_DAY(CAST('2008-11-25' AS DATE))", - "clickhouse": "SELECT LAST_DAY(CAST('2008-11-25' AS DATE))", + "clickhouse": "SELECT LAST_DAY(CAST('2008-11-25' AS Nullable(DATE)))", "mysql": "SELECT LAST_DAY(CAST('2008-11-25' AS DATE))", "oracle": "SELECT LAST_DAY(CAST('2008-11-25' AS DATE))", "postgres": "SELECT CAST(DATE_TRUNC('MONTH', CAST('2008-11-25' AS DATE)) + INTERVAL '1 MONTH' - INTERVAL '1 DAY' AS DATE)", @@ -496,6 +518,20 @@ LANGUAGE js AS }, ) self.validate_all( + "SELECT FORMAT_DATETIME('%Y%m%d %H:%M:%S', DATETIME '2023-12-25 15:30:00')", + write={ + "bigquery": "SELECT FORMAT_DATETIME('%Y%m%d %H:%M:%S', CAST('2023-12-25 15:30:00' AS DATETIME))", + "duckdb": "SELECT STRFTIME(CAST('2023-12-25 15:30:00' AS TIMESTAMP), '%Y%m%d %H:%M:%S')", + }, + ) + self.validate_all( + "SELECT FORMAT_DATETIME('%x', '2023-12-25 15:30:00')", + write={ + "bigquery": "SELECT FORMAT_DATETIME('%x', '2023-12-25 15:30:00')", + "duckdb": "SELECT STRFTIME(CAST('2023-12-25 15:30:00' AS TIMESTAMP), '%x')", + }, + ) + self.validate_all( "SELECT COUNTIF(x)", read={ "clickhouse": "SELECT countIf(x)", @@ -599,8 +635,9 @@ LANGUAGE js AS self.validate_all( "SELECT DATETIME_ADD('2023-01-01T00:00:00', INTERVAL 1 MILLISECOND)", write={ - "bigquery": "SELECT DATETIME_ADD('2023-01-01T00:00:00', INTERVAL 1 MILLISECOND)", - "databricks": "SELECT TIMESTAMPADD(MILLISECOND, 1, '2023-01-01T00:00:00')", + "bigquery": "SELECT DATETIME_ADD('2023-01-01T00:00:00', INTERVAL '1' MILLISECOND)", + "databricks": "SELECT TIMESTAMPADD(MILLISECOND, '1', '2023-01-01T00:00:00')", + "duckdb": "SELECT CAST('2023-01-01T00:00:00' AS DATETIME) + INTERVAL '1' MILLISECOND", }, ), ) @@ -608,8 +645,9 @@ LANGUAGE js AS self.validate_all( "SELECT DATETIME_SUB('2023-01-01T00:00:00', INTERVAL 1 MILLISECOND)", write={ - "bigquery": "SELECT DATETIME_SUB('2023-01-01T00:00:00', INTERVAL 1 MILLISECOND)", - "databricks": "SELECT TIMESTAMPADD(MILLISECOND, 1 * -1, '2023-01-01T00:00:00')", + "bigquery": "SELECT DATETIME_SUB('2023-01-01T00:00:00', INTERVAL '1' MILLISECOND)", + "databricks": "SELECT TIMESTAMPADD(MILLISECOND, '1' * -1, '2023-01-01T00:00:00')", + "duckdb": "SELECT CAST('2023-01-01T00:00:00' AS DATETIME) - INTERVAL '1' MILLISECOND", }, ), ) @@ -619,6 +657,7 @@ LANGUAGE js AS write={ "bigquery": "SELECT DATETIME_TRUNC('2023-01-01T01:01:01', HOUR)", "databricks": "SELECT DATE_TRUNC('HOUR', '2023-01-01T01:01:01')", + "duckdb": "SELECT DATE_TRUNC('HOUR', CAST('2023-01-01T01:01:01' AS DATETIME))", }, ), ) @@ -626,17 +665,24 @@ LANGUAGE js AS self.validate_all( 'SELECT TIMESTAMP_ADD(TIMESTAMP "2008-12-25 15:30:00+00", INTERVAL 10 MINUTE)', write={ - "bigquery": "SELECT TIMESTAMP_ADD(CAST('2008-12-25 15:30:00+00' AS TIMESTAMP), INTERVAL 10 MINUTE)", - "databricks": "SELECT DATE_ADD(MINUTE, 10, CAST('2008-12-25 15:30:00+00' AS TIMESTAMP))", - "mysql": "SELECT DATE_ADD(TIMESTAMP('2008-12-25 15:30:00+00'), INTERVAL 10 MINUTE)", - "spark": "SELECT DATE_ADD(MINUTE, 10, CAST('2008-12-25 15:30:00+00' AS TIMESTAMP))", + "bigquery": "SELECT TIMESTAMP_ADD(CAST('2008-12-25 15:30:00+00' AS TIMESTAMP), INTERVAL '10' MINUTE)", + "databricks": "SELECT DATE_ADD(MINUTE, '10', CAST('2008-12-25 15:30:00+00' AS TIMESTAMP))", + "mysql": "SELECT DATE_ADD(TIMESTAMP('2008-12-25 15:30:00+00'), INTERVAL '10' MINUTE)", + "spark": "SELECT DATE_ADD(MINUTE, '10', CAST('2008-12-25 15:30:00+00' AS TIMESTAMP))", }, ) self.validate_all( 'SELECT TIMESTAMP_SUB(TIMESTAMP "2008-12-25 15:30:00+00", INTERVAL 10 MINUTE)', write={ - "bigquery": "SELECT TIMESTAMP_SUB(CAST('2008-12-25 15:30:00+00' AS TIMESTAMP), INTERVAL 10 MINUTE)", - "mysql": "SELECT DATE_SUB(TIMESTAMP('2008-12-25 15:30:00+00'), INTERVAL 10 MINUTE)", + "bigquery": "SELECT TIMESTAMP_SUB(CAST('2008-12-25 15:30:00+00' AS TIMESTAMP), INTERVAL '10' MINUTE)", + "mysql": "SELECT DATE_SUB(TIMESTAMP('2008-12-25 15:30:00+00'), INTERVAL '10' MINUTE)", + }, + ) + self.validate_all( + "SELECT TIME_ADD(CAST('09:05:03' AS TIME), INTERVAL 2 HOUR)", + write={ + "bigquery": "SELECT TIME_ADD(CAST('09:05:03' AS TIME), INTERVAL '2' HOUR)", + "duckdb": "SELECT CAST('09:05:03' AS TIME) + INTERVAL '2' HOUR", }, ) self.validate_all( @@ -779,6 +825,7 @@ LANGUAGE js AS "presto": "SHA256(x)", "redshift": "SHA2(x, 256)", "trino": "SHA256(x)", + "duckdb": "SHA256(x)", }, ) self.validate_all( @@ -1005,7 +1052,7 @@ LANGUAGE js AS write={ "bigquery": "SELECT * FROM UNNEST(['7', '14']) AS x", "presto": "SELECT * FROM UNNEST(ARRAY['7', '14']) AS _t0(x)", - "spark": "SELECT * FROM UNNEST(ARRAY('7', '14')) AS _t0(x)", + "spark": "SELECT * FROM EXPLODE(ARRAY('7', '14')) AS _t0(x)", }, ) self.validate_all( @@ -1192,29 +1239,28 @@ LANGUAGE js AS "SELECT * FROM a WHERE b IN UNNEST([1, 2, 3])", write={ "bigquery": "SELECT * FROM a WHERE b IN UNNEST([1, 2, 3])", - "mysql": "SELECT * FROM a WHERE b IN (SELECT UNNEST(ARRAY(1, 2, 3)))", "presto": "SELECT * FROM a WHERE b IN (SELECT UNNEST(ARRAY[1, 2, 3]))", - "hive": "SELECT * FROM a WHERE b IN (SELECT UNNEST(ARRAY(1, 2, 3)))", - "spark": "SELECT * FROM a WHERE b IN (SELECT UNNEST(ARRAY(1, 2, 3)))", + "hive": "SELECT * FROM a WHERE b IN (SELECT EXPLODE(ARRAY(1, 2, 3)))", + "spark": "SELECT * FROM a WHERE b IN (SELECT EXPLODE(ARRAY(1, 2, 3)))", }, ) self.validate_all( "DATE_SUB(CURRENT_DATE(), INTERVAL 1 DAY)", write={ "postgres": "CURRENT_DATE - INTERVAL '1 DAY'", - "bigquery": "DATE_SUB(CURRENT_DATE, INTERVAL 1 DAY)", + "bigquery": "DATE_SUB(CURRENT_DATE, INTERVAL '1' DAY)", }, ) self.validate_all( - "DATE_ADD(CURRENT_DATE(), INTERVAL 1 DAY)", + "DATE_ADD(CURRENT_DATE(), INTERVAL -1 DAY)", write={ - "bigquery": "DATE_ADD(CURRENT_DATE, INTERVAL 1 DAY)", - "duckdb": "CURRENT_DATE + INTERVAL 1 DAY", - "mysql": "DATE_ADD(CURRENT_DATE, INTERVAL 1 DAY)", - "postgres": "CURRENT_DATE + INTERVAL '1 DAY'", - "presto": "DATE_ADD('DAY', 1, CURRENT_DATE)", - "hive": "DATE_ADD(CURRENT_DATE, 1)", - "spark": "DATE_ADD(CURRENT_DATE, 1)", + "bigquery": "DATE_ADD(CURRENT_DATE, INTERVAL '-1' DAY)", + "duckdb": "CURRENT_DATE + INTERVAL '-1' DAY", + "mysql": "DATE_ADD(CURRENT_DATE, INTERVAL '-1' DAY)", + "postgres": "CURRENT_DATE + INTERVAL '-1 DAY'", + "presto": "DATE_ADD('DAY', CAST('-1' AS BIGINT), CURRENT_DATE)", + "hive": "DATE_ADD(CURRENT_DATE, '-1')", + "spark": "DATE_ADD(CURRENT_DATE, '-1')", }, ) self.validate_all( @@ -1240,6 +1286,13 @@ LANGUAGE js AS }, ) self.validate_all( + "DATE_DIFF('2021-01-01', '2020-01-01', DAY)", + write={ + "bigquery": "DATE_DIFF('2021-01-01', '2020-01-01', DAY)", + "duckdb": "DATE_DIFF('DAY', CAST('2020-01-01' AS DATE), CAST('2021-01-01' AS DATE))", + }, + ) + self.validate_all( "CURRENT_DATE('UTC')", write={ "mysql": "CURRENT_DATE AT TIME ZONE 'UTC'", @@ -1345,6 +1398,113 @@ WHERE "bigquery": "SELECT CAST(x AS DATETIME)", }, ) + self.validate_all( + "SELECT TIME(foo, 'America/Los_Angeles')", + write={ + "duckdb": "SELECT CAST(CAST(foo AS TIMESTAMPTZ) AT TIME ZONE 'America/Los_Angeles' AS TIME)", + "bigquery": "SELECT TIME(foo, 'America/Los_Angeles')", + }, + ) + self.validate_all( + "SELECT DATETIME('2020-01-01')", + write={ + "duckdb": "SELECT CAST('2020-01-01' AS TIMESTAMP)", + "bigquery": "SELECT DATETIME('2020-01-01')", + }, + ) + self.validate_all( + "SELECT DATETIME('2020-01-01', TIME '23:59:59')", + write={ + "duckdb": "SELECT CAST(CAST('2020-01-01' AS DATE) + CAST('23:59:59' AS TIME) AS TIMESTAMP)", + "bigquery": "SELECT DATETIME('2020-01-01', CAST('23:59:59' AS TIME))", + }, + ) + self.validate_all( + "SELECT DATETIME('2020-01-01', 'America/Los_Angeles')", + write={ + "duckdb": "SELECT CAST(CAST('2020-01-01' AS TIMESTAMPTZ) AT TIME ZONE 'America/Los_Angeles' AS TIMESTAMP)", + "bigquery": "SELECT DATETIME('2020-01-01', 'America/Los_Angeles')", + }, + ) + self.validate_all( + "SELECT LENGTH(foo)", + read={ + "bigquery": "SELECT LENGTH(foo)", + "snowflake": "SELECT LENGTH(foo)", + }, + write={ + "duckdb": "SELECT CASE TYPEOF(foo) WHEN 'VARCHAR' THEN LENGTH(CAST(foo AS TEXT)) WHEN 'BLOB' THEN OCTET_LENGTH(CAST(foo AS BLOB)) END", + "snowflake": "SELECT LENGTH(foo)", + "": "SELECT LENGTH(foo)", + }, + ) + self.validate_all( + "SELECT TIME_DIFF('12:00:00', '12:30:00', MINUTE)", + write={ + "duckdb": "SELECT DATE_DIFF('MINUTE', CAST('12:30:00' AS TIME), CAST('12:00:00' AS TIME))", + "bigquery": "SELECT TIME_DIFF('12:00:00', '12:30:00', MINUTE)", + }, + ) + self.validate_all( + "ARRAY_CONCAT([1, 2], [3, 4], [5, 6])", + write={ + "bigquery": "ARRAY_CONCAT([1, 2], [3, 4], [5, 6])", + "duckdb": "ARRAY_CONCAT([1, 2], ARRAY_CONCAT([3, 4], [5, 6]))", + "postgres": "ARRAY_CAT(ARRAY[1, 2], ARRAY_CAT(ARRAY[3, 4], ARRAY[5, 6]))", + "redshift": "ARRAY_CONCAT(ARRAY(1, 2), ARRAY_CONCAT(ARRAY(3, 4), ARRAY(5, 6)))", + "snowflake": "ARRAY_CAT([1, 2], ARRAY_CAT([3, 4], [5, 6]))", + "hive": "CONCAT(ARRAY(1, 2), ARRAY(3, 4), ARRAY(5, 6))", + "spark2": "CONCAT(ARRAY(1, 2), ARRAY(3, 4), ARRAY(5, 6))", + "spark": "CONCAT(ARRAY(1, 2), ARRAY(3, 4), ARRAY(5, 6))", + "databricks": "CONCAT(ARRAY(1, 2), ARRAY(3, 4), ARRAY(5, 6))", + "presto": "CONCAT(ARRAY[1, 2], ARRAY[3, 4], ARRAY[5, 6])", + "trino": "CONCAT(ARRAY[1, 2], ARRAY[3, 4], ARRAY[5, 6])", + }, + ) + self.validate_all( + "SELECT GENERATE_DATE_ARRAY('2016-10-05', '2016-10-08')", + write={ + "duckdb": "SELECT CAST(GENERATE_SERIES(CAST('2016-10-05' AS DATE), CAST('2016-10-08' AS DATE), INTERVAL 1 DAY) AS DATE[])", + "bigquery": "SELECT GENERATE_DATE_ARRAY('2016-10-05', '2016-10-08', INTERVAL 1 DAY)", + }, + ) + self.validate_all( + "SELECT GENERATE_DATE_ARRAY('2016-10-05', '2016-10-08', INTERVAL '1' MONTH)", + write={ + "duckdb": "SELECT CAST(GENERATE_SERIES(CAST('2016-10-05' AS DATE), CAST('2016-10-08' AS DATE), INTERVAL '1' MONTH) AS DATE[])", + "bigquery": "SELECT GENERATE_DATE_ARRAY('2016-10-05', '2016-10-08', INTERVAL '1' MONTH)", + }, + ) + self.validate_all( + "SELECT GENERATE_TIMESTAMP_ARRAY('2016-10-05 00:00:00', '2016-10-07 00:00:00', INTERVAL '1' DAY)", + write={ + "duckdb": "SELECT GENERATE_SERIES(CAST('2016-10-05 00:00:00' AS TIMESTAMP), CAST('2016-10-07 00:00:00' AS TIMESTAMP), INTERVAL '1' DAY)", + "bigquery": "SELECT GENERATE_TIMESTAMP_ARRAY('2016-10-05 00:00:00', '2016-10-07 00:00:00', INTERVAL '1' DAY)", + }, + ) + self.validate_all( + "SELECT PARSE_DATE('%A %b %e %Y', 'Thursday Dec 25 2008')", + write={ + "bigquery": "SELECT PARSE_DATE('%A %b %e %Y', 'Thursday Dec 25 2008')", + "duckdb": "SELECT CAST(STRPTIME('Thursday Dec 25 2008', '%A %b %-d %Y') AS DATE)", + }, + ) + self.validate_all( + "SELECT ARRAY_TO_STRING(['cake', 'pie', NULL], '--') AS text", + write={ + "bigquery": "SELECT ARRAY_TO_STRING(['cake', 'pie', NULL], '--') AS text", + "duckdb": "SELECT ARRAY_TO_STRING(['cake', 'pie', NULL], '--') AS text", + }, + ) + self.validate_all( + "SELECT ARRAY_TO_STRING(['cake', 'pie', NULL], '--', 'MISSING') AS text", + write={ + "bigquery": "SELECT ARRAY_TO_STRING(['cake', 'pie', NULL], '--', 'MISSING') AS text", + "duckdb": "SELECT ARRAY_TO_STRING(LIST_TRANSFORM(['cake', 'pie', NULL], x -> COALESCE(x, 'MISSING')), '--') AS text", + }, + ) + + self.validate_identity("SELECT * FROM a-b c", "SELECT * FROM a-b AS c") def test_errors(self): with self.assertRaises(TokenError): @@ -1372,6 +1532,12 @@ WHERE transpile("DATE_ADD(x, day)", read="bigquery") def test_warnings(self): + with self.assertLogs(parser_logger) as cm: + self.validate_identity( + "/* some comment */ DECLARE foo DATE DEFAULT DATE_SUB(current_date, INTERVAL 2 day)" + ) + self.assertIn("contains unsupported syntax", cm.output[0]) + with self.assertLogs(helper_logger) as cm: self.validate_identity( "WITH cte(c) AS (SELECT * FROM t) SELECT * FROM cte", @@ -1552,7 +1718,7 @@ WHERE "SELECT * FROM GAP_FILL(TABLE device_data, ts_column => 'time', bucket_width => INTERVAL '1' MINUTE, value_columns => [('signal', 'null')], origin => CAST('2023-11-01 09:30:01' AS DATETIME)) ORDER BY time" ) self.validate_identity( - "SELECT * FROM GAP_FILL(TABLE (SELECT * FROM UNNEST(ARRAY<STRUCT<device_id INT64, time DATETIME, signal INT64, state STRING>>[STRUCT(1, CAST('2023-11-01 09:34:01' AS DATETIME), 74, 'INACTIVE'), STRUCT(2, CAST('2023-11-01 09:36:00' AS DATETIME), 77, 'ACTIVE'), STRUCT(3, CAST('2023-11-01 09:37:00' AS DATETIME), 78, 'ACTIVE'), STRUCT(4, CAST('2023-11-01 09:38:01' AS DATETIME), 80, 'ACTIVE')])), ts_column => 'time', bucket_width => INTERVAL '1' MINUTE, value_columns => [('signal', 'linear')]) ORDER BY time" + "SELECT * FROM GAP_FILL(TABLE device_data, ts_column => 'time', bucket_width => INTERVAL '1' MINUTE, value_columns => [('signal', 'locf')]) ORDER BY time" ) def test_models(self): @@ -1702,3 +1868,120 @@ OPTIONS ( "MOD((a + 1), b)", "MOD(a + 1, b)", ) + + def test_inline_constructor(self): + self.validate_identity( + """SELECT STRUCT<ARRAY<STRING>>(["2023-01-17"])""", + """SELECT CAST(STRUCT(['2023-01-17']) AS STRUCT<ARRAY<STRING>>)""", + ) + self.validate_identity( + """SELECT STRUCT<STRING>((SELECT 'foo')).*""", + """SELECT CAST(STRUCT((SELECT 'foo')) AS STRUCT<STRING>).*""", + ) + + self.validate_all( + "SELECT ARRAY<INT>[1, 2, 3]", + write={ + "bigquery": "SELECT CAST([1, 2, 3] AS ARRAY<INT64>)", + "duckdb": "SELECT CAST([1, 2, 3] AS INT[])", + }, + ) + self.validate_all( + "CAST(STRUCT<a INT64>(1) AS STRUCT<a INT64>)", + write={ + "bigquery": "CAST(CAST(STRUCT(1) AS STRUCT<a INT64>) AS STRUCT<a INT64>)", + "duckdb": "CAST(CAST(ROW(1) AS STRUCT(a BIGINT)) AS STRUCT(a BIGINT))", + }, + ) + self.validate_all( + "SELECT * FROM UNNEST(ARRAY<STRUCT<x INT64>>[])", + write={ + "bigquery": "SELECT * FROM UNNEST(CAST([] AS ARRAY<STRUCT<x INT64>>))", + "duckdb": "SELECT * FROM (SELECT UNNEST(CAST([] AS STRUCT(x BIGINT)[]), max_depth => 2))", + }, + ) + self.validate_all( + "SELECT * FROM UNNEST(ARRAY<STRUCT<device_id INT64, time DATETIME, signal INT64, state STRING>>[STRUCT(1, DATETIME '2023-11-01 09:34:01', 74, 'INACTIVE'),STRUCT(4, DATETIME '2023-11-01 09:38:01', 80, 'ACTIVE')])", + write={ + "bigquery": "SELECT * FROM UNNEST(CAST([STRUCT(1, CAST('2023-11-01 09:34:01' AS DATETIME), 74, 'INACTIVE'), STRUCT(4, CAST('2023-11-01 09:38:01' AS DATETIME), 80, 'ACTIVE')] AS ARRAY<STRUCT<device_id INT64, time DATETIME, signal INT64, state STRING>>))", + "duckdb": "SELECT * FROM (SELECT UNNEST(CAST([ROW(1, CAST('2023-11-01 09:34:01' AS TIMESTAMP), 74, 'INACTIVE'), ROW(4, CAST('2023-11-01 09:38:01' AS TIMESTAMP), 80, 'ACTIVE')] AS STRUCT(device_id BIGINT, time TIMESTAMP, signal BIGINT, state TEXT)[]), max_depth => 2))", + }, + ) + self.validate_all( + "SELECT STRUCT<a INT64, b STRUCT<c STRING>>(1, STRUCT('c_str'))", + write={ + "bigquery": "SELECT CAST(STRUCT(1, STRUCT('c_str')) AS STRUCT<a INT64, b STRUCT<c STRING>>)", + "duckdb": "SELECT CAST(ROW(1, ROW('c_str')) AS STRUCT(a BIGINT, b STRUCT(c TEXT)))", + }, + ) + + def test_convert(self): + for value, expected in [ + (datetime.datetime(2023, 1, 1), "CAST('2023-01-01 00:00:00' AS DATETIME)"), + (datetime.datetime(2023, 1, 1, 12, 13, 14), "CAST('2023-01-01 12:13:14' AS DATETIME)"), + ( + datetime.datetime(2023, 1, 1, 12, 13, 14, tzinfo=datetime.timezone.utc), + "CAST('2023-01-01 12:13:14+00:00' AS TIMESTAMP)", + ), + ( + pytz.timezone("America/Los_Angeles").localize( + datetime.datetime(2023, 1, 1, 12, 13, 14) + ), + "CAST('2023-01-01 12:13:14-08:00' AS TIMESTAMP)", + ), + ]: + with self.subTest(value): + self.assertEqual(exp.convert(value).sql(dialect=self.dialect), expected) + + def test_unnest(self): + self.validate_all( + "SELECT name, laps FROM UNNEST([STRUCT('Rudisha' AS name, [23.4, 26.3, 26.4, 26.1] AS laps), STRUCT('Makhloufi' AS name, [24.5, 25.4, 26.6, 26.1] AS laps)])", + write={ + "bigquery": "SELECT name, laps FROM UNNEST([STRUCT('Rudisha' AS name, [23.4, 26.3, 26.4, 26.1] AS laps), STRUCT('Makhloufi' AS name, [24.5, 25.4, 26.6, 26.1] AS laps)])", + "duckdb": "SELECT name, laps FROM (SELECT UNNEST([{'name': 'Rudisha', 'laps': [23.4, 26.3, 26.4, 26.1]}, {'name': 'Makhloufi', 'laps': [24.5, 25.4, 26.6, 26.1]}], max_depth => 2))", + }, + ) + self.validate_all( + "WITH Races AS (SELECT '800M' AS race) SELECT race, name, laps FROM Races AS r CROSS JOIN UNNEST([STRUCT('Rudisha' AS name, [23.4, 26.3, 26.4, 26.1] AS laps)])", + write={ + "bigquery": "WITH Races AS (SELECT '800M' AS race) SELECT race, name, laps FROM Races AS r CROSS JOIN UNNEST([STRUCT('Rudisha' AS name, [23.4, 26.3, 26.4, 26.1] AS laps)])", + "duckdb": "WITH Races AS (SELECT '800M' AS race) SELECT race, name, laps FROM Races AS r CROSS JOIN (SELECT UNNEST([{'name': 'Rudisha', 'laps': [23.4, 26.3, 26.4, 26.1]}], max_depth => 2))", + }, + ) + self.validate_all( + "SELECT participant FROM UNNEST([STRUCT('Rudisha' AS name, [23.4, 26.3, 26.4, 26.1] AS laps)]) AS participant", + write={ + "bigquery": "SELECT participant FROM UNNEST([STRUCT('Rudisha' AS name, [23.4, 26.3, 26.4, 26.1] AS laps)]) AS participant", + "duckdb": "SELECT participant FROM (SELECT UNNEST([{'name': 'Rudisha', 'laps': [23.4, 26.3, 26.4, 26.1]}], max_depth => 2)) AS participant", + }, + ) + self.validate_all( + "WITH Races AS (SELECT '800M' AS race) SELECT race, participant FROM Races AS r CROSS JOIN UNNEST([STRUCT('Rudisha' AS name, [23.4, 26.3, 26.4, 26.1] AS laps)]) AS participant", + write={ + "bigquery": "WITH Races AS (SELECT '800M' AS race) SELECT race, participant FROM Races AS r CROSS JOIN UNNEST([STRUCT('Rudisha' AS name, [23.4, 26.3, 26.4, 26.1] AS laps)]) AS participant", + "duckdb": "WITH Races AS (SELECT '800M' AS race) SELECT race, participant FROM Races AS r CROSS JOIN (SELECT UNNEST([{'name': 'Rudisha', 'laps': [23.4, 26.3, 26.4, 26.1]}], max_depth => 2)) AS participant", + }, + ) + + def test_range_type(self): + for type, value in ( + ("RANGE<DATE>", "'[2020-01-01, 2020-12-31)'"), + ("RANGE<DATE>", "'[UNBOUNDED, 2020-12-31)'"), + ("RANGE<DATETIME>", "'[2020-01-01 12:00:00, 2020-12-31 12:00:00)'"), + ("RANGE<TIMESTAMP>", "'[2020-10-01 12:00:00+08, 2020-12-31 12:00:00+08)'"), + ): + with self.subTest(f"Testing BigQuery's RANGE<T> type: {type} {value}"): + self.validate_identity(f"SELECT {type} {value}", f"SELECT CAST({value} AS {type})") + + self.assertEqual(self.parse_one(type), exp.DataType.build(type, dialect="bigquery")) + + self.validate_identity( + "SELECT RANGE(CAST('2022-12-01' AS DATE), CAST('2022-12-31' AS DATE))" + ) + self.validate_identity("SELECT RANGE(NULL, CAST('2022-12-31' AS DATE))") + self.validate_identity( + "SELECT RANGE(CAST('2022-10-01 14:53:27' AS DATETIME), CAST('2022-10-01 16:00:00' AS DATETIME))" + ) + self.validate_identity( + "SELECT RANGE(CAST('2022-10-01 14:53:27 America/Los_Angeles' AS TIMESTAMP), CAST('2022-10-01 16:00:00 America/Los_Angeles' AS TIMESTAMP))" + ) |