summaryrefslogtreecommitdiffstats
path: root/tests/dialects/test_bigquery.py
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-02-20 09:37:57 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-02-20 09:37:57 +0000
commit5c70c63284a8ff61607db1a51ac2829b74f71c1c (patch)
tree10a81ffbd8da8cae58e292848cbdd0550d08721d /tests/dialects/test_bigquery.py
parentAdding upstream version 21.1.1. (diff)
downloadsqlglot-5c70c63284a8ff61607db1a51ac2829b74f71c1c.tar.xz
sqlglot-5c70c63284a8ff61607db1a51ac2829b74f71c1c.zip
Adding upstream version 21.1.2.upstream/21.1.2
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'tests/dialects/test_bigquery.py')
-rw-r--r--tests/dialects/test_bigquery.py203
1 files changed, 121 insertions, 82 deletions
diff --git a/tests/dialects/test_bigquery.py b/tests/dialects/test_bigquery.py
index f231179..8c47948 100644
--- a/tests/dialects/test_bigquery.py
+++ b/tests/dialects/test_bigquery.py
@@ -18,88 +18,6 @@ class TestBigQuery(Validator):
maxDiff = None
def test_bigquery(self):
- with self.assertLogs(helper_logger) as cm:
- statements = parse(
- """
- BEGIN
- DECLARE 1;
- IF from_date IS NULL THEN SET x = 1;
- END IF;
- END
- """,
- read="bigquery",
- )
- self.assertIn("unsupported syntax", cm.output[0])
-
- for actual, expected in zip(
- statements, ("BEGIN DECLARE 1", "IF from_date IS NULL THEN SET x = 1", "END IF", "END")
- ):
- self.assertEqual(actual.sql(dialect="bigquery"), expected)
-
- with self.assertLogs(helper_logger) as cm:
- self.validate_identity(
- "SELECT * FROM t AS t(c1, c2)",
- "SELECT * FROM t AS t",
- )
-
- self.assertEqual(
- cm.output, ["WARNING:sqlglot:Named columns are not supported in table alias."]
- )
-
- with self.assertLogs(helper_logger) as cm:
- self.validate_all(
- "SELECT a[1], b[OFFSET(1)], c[ORDINAL(1)], d[SAFE_OFFSET(1)], e[SAFE_ORDINAL(1)]",
- write={
- "duckdb": "SELECT a[2], b[2], c[1], d[2], e[1]",
- "bigquery": "SELECT a[1], b[OFFSET(1)], c[ORDINAL(1)], d[SAFE_OFFSET(1)], e[SAFE_ORDINAL(1)]",
- "presto": "SELECT a[2], b[2], c[1], ELEMENT_AT(d, 2), ELEMENT_AT(e, 1)",
- },
- )
-
- self.validate_all(
- "a[0]",
- read={
- "bigquery": "a[0]",
- "duckdb": "a[1]",
- "presto": "a[1]",
- },
- )
-
- with self.assertRaises(TokenError):
- transpile("'\\'", read="bigquery")
-
- # Reference: https://cloud.google.com/bigquery/docs/reference/standard-sql/query-syntax#set_operators
- with self.assertRaises(UnsupportedError):
- transpile(
- "SELECT * FROM a INTERSECT ALL SELECT * FROM b",
- write="bigquery",
- unsupported_level=ErrorLevel.RAISE,
- )
-
- with self.assertRaises(UnsupportedError):
- transpile(
- "SELECT * FROM a EXCEPT ALL SELECT * FROM b",
- write="bigquery",
- unsupported_level=ErrorLevel.RAISE,
- )
-
- with self.assertRaises(ParseError):
- transpile("SELECT * FROM UNNEST(x) AS x(y)", read="bigquery")
-
- with self.assertRaises(ParseError):
- transpile("DATE_ADD(x, day)", read="bigquery")
-
- with self.assertLogs(parser_logger) as cm:
- for_in_stmts = parse(
- "FOR record IN (SELECT word FROM shakespeare) DO SELECT record.word; END FOR;",
- read="bigquery",
- )
- self.assertEqual(
- [s.sql(dialect="bigquery") for s in for_in_stmts],
- ["FOR record IN (SELECT word FROM shakespeare) DO SELECT record.word", "END FOR"],
- )
- assert "'END FOR'" in cm.output[0]
-
self.validate_identity("CREATE SCHEMA x DEFAULT COLLATE 'en'")
self.validate_identity("CREATE TABLE x (y INT64) DEFAULT COLLATE 'en'")
self.validate_identity("PARSE_JSON('{}', wide_number_mode => 'exact')")
@@ -1086,6 +1004,127 @@ WHERE
pretty=True,
)
+ def test_errors(self):
+ with self.assertRaises(TokenError):
+ transpile("'\\'", read="bigquery")
+
+ # Reference: https://cloud.google.com/bigquery/docs/reference/standard-sql/query-syntax#set_operators
+ with self.assertRaises(UnsupportedError):
+ transpile(
+ "SELECT * FROM a INTERSECT ALL SELECT * FROM b",
+ write="bigquery",
+ unsupported_level=ErrorLevel.RAISE,
+ )
+
+ with self.assertRaises(UnsupportedError):
+ transpile(
+ "SELECT * FROM a EXCEPT ALL SELECT * FROM b",
+ write="bigquery",
+ unsupported_level=ErrorLevel.RAISE,
+ )
+
+ with self.assertRaises(ParseError):
+ transpile("SELECT * FROM UNNEST(x) AS x(y)", read="bigquery")
+
+ with self.assertRaises(ParseError):
+ transpile("DATE_ADD(x, day)", read="bigquery")
+
+ def test_warnings(self):
+ with self.assertLogs(helper_logger) as cm:
+ self.validate_identity(
+ "WITH cte(c) AS (SELECT * FROM t) SELECT * FROM cte",
+ "WITH cte AS (SELECT * FROM t) SELECT * FROM cte",
+ )
+
+ self.assertIn("Can't push down CTE column names for star queries.", cm.output[0])
+ self.assertIn("Named columns are not supported in table alias.", cm.output[1])
+
+ with self.assertLogs(helper_logger) as cm:
+ self.validate_identity(
+ "SELECT * FROM t AS t(c1, c2)",
+ "SELECT * FROM t AS t",
+ )
+
+ self.assertIn("Named columns are not supported in table alias.", cm.output[0])
+
+ with self.assertLogs(helper_logger) as cm:
+ statements = parse(
+ """
+ BEGIN
+ DECLARE 1;
+ IF from_date IS NULL THEN SET x = 1;
+ END IF;
+ END
+ """,
+ read="bigquery",
+ )
+
+ for actual, expected in zip(
+ statements,
+ ("BEGIN DECLARE 1", "IF from_date IS NULL THEN SET x = 1", "END IF", "END"),
+ ):
+ self.assertEqual(actual.sql(dialect="bigquery"), expected)
+
+ self.assertIn("unsupported syntax", cm.output[0])
+
+ with self.assertLogs(helper_logger) as cm:
+ statements = parse(
+ """
+ BEGIN CALL `project_id.dataset_id.stored_procedure_id`();
+ EXCEPTION WHEN ERROR THEN INSERT INTO `project_id.dataset_id.table_id` SELECT @@error.message, CURRENT_TIMESTAMP();
+ END
+ """,
+ read="bigquery",
+ )
+
+ expected_statements = (
+ "BEGIN CALL `project_id.dataset_id.stored_procedure_id`()",
+ "EXCEPTION WHEN ERROR THEN INSERT INTO `project_id.dataset_id.table_id` SELECT @@error.message, CURRENT_TIMESTAMP()",
+ "END",
+ )
+
+ for actual, expected in zip(statements, expected_statements):
+ self.assertEqual(actual.sql(dialect="bigquery"), expected)
+
+ self.assertIn("unsupported syntax", cm.output[0])
+
+ with self.assertLogs(helper_logger) as cm:
+ self.validate_identity(
+ "SELECT * FROM t AS t(c1, c2)",
+ "SELECT * FROM t AS t",
+ )
+
+ self.assertIn("Named columns are not supported in table alias.", cm.output[0])
+
+ with self.assertLogs(helper_logger):
+ self.validate_all(
+ "SELECT a[1], b[OFFSET(1)], c[ORDINAL(1)], d[SAFE_OFFSET(1)], e[SAFE_ORDINAL(1)]",
+ write={
+ "duckdb": "SELECT a[2], b[2], c[1], d[2], e[1]",
+ "bigquery": "SELECT a[1], b[OFFSET(1)], c[ORDINAL(1)], d[SAFE_OFFSET(1)], e[SAFE_ORDINAL(1)]",
+ "presto": "SELECT a[2], b[2], c[1], ELEMENT_AT(d, 2), ELEMENT_AT(e, 1)",
+ },
+ )
+ self.validate_all(
+ "a[0]",
+ read={
+ "bigquery": "a[0]",
+ "duckdb": "a[1]",
+ "presto": "a[1]",
+ },
+ )
+
+ with self.assertLogs(parser_logger) as cm:
+ for_in_stmts = parse(
+ "FOR record IN (SELECT word FROM shakespeare) DO SELECT record.word; END FOR;",
+ read="bigquery",
+ )
+ self.assertEqual(
+ [s.sql(dialect="bigquery") for s in for_in_stmts],
+ ["FOR record IN (SELECT word FROM shakespeare) DO SELECT record.word", "END FOR"],
+ )
+ self.assertIn("'END FOR'", cm.output[0])
+
def test_user_defined_functions(self):
self.validate_identity(
"CREATE TEMPORARY FUNCTION a(x FLOAT64, y FLOAT64) RETURNS FLOAT64 NOT DETERMINISTIC LANGUAGE js AS 'return x*y;'"