From 6ea73443a6a81d2477f8eab80dcf1a9206a44d5e Mon Sep 17 00:00:00 2001 From: Daniel Baumann Date: Tue, 23 Jul 2024 13:12:26 +0200 Subject: Adding upstream version 25.6.1. Signed-off-by: Daniel Baumann --- tests/dialects/test_bigquery.py | 58 +++++++++++++++++++++++++--- tests/dialects/test_clickhouse.py | 8 ++++ tests/dialects/test_databricks.py | 16 ++++++++ tests/dialects/test_dialect.py | 41 +++++++++++++++++++- tests/dialects/test_duckdb.py | 8 +++- tests/dialects/test_postgres.py | 13 +++++++ tests/dialects/test_presto.py | 8 ++++ tests/dialects/test_spark.py | 2 + tests/fixtures/identity.sql | 9 +---- tests/fixtures/optimizer/annotate_types.sql | 12 ++++++ tests/fixtures/optimizer/qualify_columns.sql | 30 ++++++++++++-- tests/fixtures/optimizer/qualify_tables.sql | 3 ++ 12 files changed, 188 insertions(+), 20 deletions(-) (limited to 'tests') diff --git a/tests/dialects/test_bigquery.py b/tests/dialects/test_bigquery.py index 803ac11..16a89b8 100644 --- a/tests/dialects/test_bigquery.py +++ b/tests/dialects/test_bigquery.py @@ -138,7 +138,6 @@ LANGUAGE js AS self.validate_identity("SELECT CAST(CURRENT_DATE AS STRING FORMAT 'DAY') AS current_day") self.validate_identity("SAFE_CAST(encrypted_value AS STRING FORMAT 'BASE64')") self.validate_identity("CAST(encrypted_value AS STRING FORMAT 'BASE64')") - self.validate_identity("CAST(STRUCT(1) AS STRUCT)") self.validate_identity("STRING_AGG(a)") self.validate_identity("STRING_AGG(a, ' & ')") self.validate_identity("STRING_AGG(DISTINCT a, ' & ')") @@ -162,12 +161,9 @@ LANGUAGE js AS self.validate_identity("x <> ''") self.validate_identity("DATE_TRUNC(col, WEEK(MONDAY))") self.validate_identity("SELECT b'abc'") - self.validate_identity("""SELECT * FROM UNNEST(ARRAY>[])""") self.validate_identity("SELECT AS STRUCT 1 AS a, 2 AS b") self.validate_identity("SELECT DISTINCT AS STRUCT 1 AS a, 2 AS b") self.validate_identity("SELECT AS VALUE STRUCT(1 AS a, 2 AS b)") - self.validate_identity("SELECT STRUCT>(['2023-01-17'])") - self.validate_identity("SELECT STRUCT((SELECT a FROM b.c LIMIT 1)).*") self.validate_identity("SELECT * FROM q UNPIVOT(values FOR quarter IN (b, c))") self.validate_identity("""CREATE TABLE x (a STRUCT>)""") self.validate_identity("""CREATE TABLE x (a STRUCT)""") @@ -1427,6 +1423,12 @@ WHERE transpile("DATE_ADD(x, day)", read="bigquery") def test_warnings(self): + with self.assertLogs(parser_logger) as cm: + self.validate_identity( + "/* some comment */ DECLARE foo DATE DEFAULT DATE_SUB(current_date, INTERVAL 2 day)" + ) + self.assertIn("contains unsupported syntax", cm.output[0]) + with self.assertLogs(helper_logger) as cm: self.validate_identity( "WITH cte(c) AS (SELECT * FROM t) SELECT * FROM cte", @@ -1607,7 +1609,7 @@ WHERE "SELECT * FROM GAP_FILL(TABLE device_data, ts_column => 'time', bucket_width => INTERVAL '1' MINUTE, value_columns => [('signal', 'null')], origin => CAST('2023-11-01 09:30:01' AS DATETIME)) ORDER BY time" ) self.validate_identity( - "SELECT * FROM GAP_FILL(TABLE (SELECT * FROM UNNEST(ARRAY>[STRUCT(1, CAST('2023-11-01 09:34:01' AS DATETIME), 74, 'INACTIVE'), STRUCT(2, CAST('2023-11-01 09:36:00' AS DATETIME), 77, 'ACTIVE'), STRUCT(3, CAST('2023-11-01 09:37:00' AS DATETIME), 78, 'ACTIVE'), STRUCT(4, CAST('2023-11-01 09:38:01' AS DATETIME), 80, 'ACTIVE')])), ts_column => 'time', bucket_width => INTERVAL '1' MINUTE, value_columns => [('signal', 'linear')]) ORDER BY time" + "SELECT * FROM GAP_FILL(TABLE device_data, ts_column => 'time', bucket_width => INTERVAL '1' MINUTE, value_columns => [('signal', 'locf')]) ORDER BY time" ) def test_models(self): @@ -1757,3 +1759,49 @@ OPTIONS ( "MOD((a + 1), b)", "MOD(a + 1, b)", ) + + def test_inline_constructor(self): + self.validate_identity( + """SELECT STRUCT>(["2023-01-17"])""", + """SELECT CAST(STRUCT(['2023-01-17']) AS STRUCT>)""", + ) + self.validate_identity( + """SELECT STRUCT((SELECT 'foo')).*""", + """SELECT CAST(STRUCT((SELECT 'foo')) AS STRUCT).*""", + ) + + self.validate_all( + "SELECT ARRAY[1, 2, 3]", + write={ + "bigquery": "SELECT CAST([1, 2, 3] AS ARRAY)", + "duckdb": "SELECT CAST([1, 2, 3] AS INT[])", + }, + ) + self.validate_all( + "CAST(STRUCT(1) AS STRUCT)", + write={ + "bigquery": "CAST(CAST(STRUCT(1) AS STRUCT) AS STRUCT)", + "duckdb": "CAST(CAST(ROW(1) AS STRUCT(a BIGINT)) AS STRUCT(a BIGINT))", + }, + ) + self.validate_all( + "SELECT * FROM UNNEST(ARRAY>[])", + write={ + "bigquery": "SELECT * FROM UNNEST(CAST([] AS ARRAY>))", + "duckdb": "SELECT * FROM UNNEST(CAST([] AS STRUCT(x BIGINT)[]))", + }, + ) + self.validate_all( + "SELECT * FROM UNNEST(ARRAY>[STRUCT(1, DATETIME '2023-11-01 09:34:01', 74, 'INACTIVE'),STRUCT(4, DATETIME '2023-11-01 09:38:01', 80, 'ACTIVE')])", + write={ + "bigquery": "SELECT * FROM UNNEST(CAST([STRUCT(1, CAST('2023-11-01 09:34:01' AS DATETIME), 74, 'INACTIVE'), STRUCT(4, CAST('2023-11-01 09:38:01' AS DATETIME), 80, 'ACTIVE')] AS ARRAY>))", + "duckdb": "SELECT * FROM UNNEST(CAST([ROW(1, CAST('2023-11-01 09:34:01' AS TIMESTAMP), 74, 'INACTIVE'), ROW(4, CAST('2023-11-01 09:38:01' AS TIMESTAMP), 80, 'ACTIVE')] AS STRUCT(device_id BIGINT, time TIMESTAMP, signal BIGINT, state TEXT)[]))", + }, + ) + self.validate_all( + "SELECT STRUCT>(1, STRUCT('c_str'))", + write={ + "bigquery": "SELECT CAST(STRUCT(1, STRUCT('c_str')) AS STRUCT>)", + "duckdb": "SELECT CAST(ROW(1, ROW('c_str')) AS STRUCT(a BIGINT, b STRUCT(c TEXT)))", + }, + ) diff --git a/tests/dialects/test_clickhouse.py b/tests/dialects/test_clickhouse.py index ef84d48..8f8c7f0 100644 --- a/tests/dialects/test_clickhouse.py +++ b/tests/dialects/test_clickhouse.py @@ -83,6 +83,9 @@ class TestClickhouse(Validator): self.validate_identity("TRUNCATE TABLE t1 ON CLUSTER test_cluster") self.validate_identity("TRUNCATE DATABASE db") self.validate_identity("TRUNCATE DATABASE db ON CLUSTER test_cluster") + self.validate_identity( + "SELECT number, COUNT() OVER (PARTITION BY number % 3) AS partition_count FROM numbers(10) WINDOW window_name AS (PARTITION BY number) QUALIFY partition_count = 4 ORDER BY number" + ) self.validate_identity( "SELECT id, quantileGK(100, 0.95)(reading) OVER (PARTITION BY id ORDER BY id RANGE BETWEEN 30000 PRECEDING AND CURRENT ROW) AS window FROM table" ) @@ -448,6 +451,10 @@ class TestClickhouse(Validator): self.validate_identity("ALTER TABLE visits REPLACE PARTITION ID '201901' FROM visits_tmp") self.validate_identity("ALTER TABLE visits ON CLUSTER test_cluster DROP COLUMN col1") + self.assertIsInstance( + parse_one("Tuple(select Int64)", into=exp.DataType, read="clickhouse"), exp.DataType + ) + def test_cte(self): self.validate_identity("WITH 'x' AS foo SELECT foo") self.validate_identity("WITH ['c'] AS field_names SELECT field_names") @@ -545,6 +552,7 @@ class TestClickhouse(Validator): self.validate_identity( "CREATE TABLE foo (x UInt32) TTL time_column + INTERVAL '1' MONTH DELETE WHERE column = 'value'" ) + self.validate_identity("CREATE TABLE named_tuples (a Tuple(select String, i Int64))") self.validate_all( """ diff --git a/tests/dialects/test_databricks.py b/tests/dialects/test_databricks.py index 471830f..86612cc 100644 --- a/tests/dialects/test_databricks.py +++ b/tests/dialects/test_databricks.py @@ -98,6 +98,22 @@ class TestDatabricks(Validator): read="databricks", ) + self.validate_all( + "CREATE OR REPLACE FUNCTION func(a BIGINT, b BIGINT) RETURNS TABLE (a INT) RETURN SELECT a", + write={ + "databricks": "CREATE OR REPLACE FUNCTION func(a BIGINT, b BIGINT) RETURNS TABLE (a INT) RETURN SELECT a", + "duckdb": "CREATE OR REPLACE FUNCTION func(a, b) AS TABLE SELECT a", + }, + ) + + self.validate_all( + "CREATE OR REPLACE FUNCTION func(a BIGINT, b BIGINT) RETURNS BIGINT RETURN a", + write={ + "databricks": "CREATE OR REPLACE FUNCTION func(a BIGINT, b BIGINT) RETURNS BIGINT RETURN a", + "duckdb": "CREATE OR REPLACE FUNCTION func(a, b) AS a", + }, + ) + # https://docs.databricks.com/sql/language-manual/functions/colonsign.html def test_json(self): self.validate_identity("SELECT c1:price, c1:price.foo, c1:price.bar[1]") diff --git a/tests/dialects/test_dialect.py b/tests/dialects/test_dialect.py index c0afb2f..e31b114 100644 --- a/tests/dialects/test_dialect.py +++ b/tests/dialects/test_dialect.py @@ -1164,6 +1164,13 @@ class TestDialect(Validator): }, ) + order_by_all_sql = "SELECT * FROM t ORDER BY ALL" + self.validate_identity(order_by_all_sql).find(exp.Ordered).this.assert_is(exp.Column) + + for dialect in ("duckdb", "spark", "databricks"): + with self.subTest(f"Testing ORDER BY ALL in {dialect}"): + parse_one(order_by_all_sql, read=dialect).find(exp.Ordered).this.assert_is(exp.Var) + def test_json(self): self.validate_all( """JSON_EXTRACT(x, '$["a b"]')""", @@ -2267,7 +2274,7 @@ SELECT write={ "duckdb": "SELECT * FROM t QUALIFY COUNT(*) OVER () > 1", "snowflake": "SELECT * FROM t QUALIFY COUNT(*) OVER () > 1", - "clickhouse": "SELECT * FROM (SELECT *, COUNT(*) OVER () AS _w FROM t) AS _t WHERE _w > 1", + "clickhouse": "SELECT * FROM t QUALIFY COUNT(*) OVER () > 1", "mysql": "SELECT * FROM (SELECT *, COUNT(*) OVER () AS _w FROM t) AS _t WHERE _w > 1", "oracle": "SELECT * FROM (SELECT *, COUNT(*) OVER () AS _w FROM t) _t WHERE _w > 1", "postgres": "SELECT * FROM (SELECT *, COUNT(*) OVER () AS _w FROM t) AS _t WHERE _w > 1", @@ -2279,7 +2286,7 @@ SELECT write={ "duckdb": 'SELECT "user id", some_id, 1 AS other_id, 2 AS "2 nd id" FROM t QUALIFY COUNT(*) OVER () > 1', "snowflake": 'SELECT "user id", some_id, 1 AS other_id, 2 AS "2 nd id" FROM t QUALIFY COUNT(*) OVER () > 1', - "clickhouse": 'SELECT "user id", some_id, other_id, "2 nd id" FROM (SELECT "user id", some_id, 1 AS other_id, 2 AS "2 nd id", COUNT(*) OVER () AS _w FROM t) AS _t WHERE _w > 1', + "clickhouse": 'SELECT "user id", some_id, 1 AS other_id, 2 AS "2 nd id" FROM t QUALIFY COUNT(*) OVER () > 1', "mysql": "SELECT `user id`, some_id, other_id, `2 nd id` FROM (SELECT `user id`, some_id, 1 AS other_id, 2 AS `2 nd id`, COUNT(*) OVER () AS _w FROM t) AS _t WHERE _w > 1", "oracle": 'SELECT "user id", some_id, other_id, "2 nd id" FROM (SELECT "user id", some_id, 1 AS other_id, 2 AS "2 nd id", COUNT(*) OVER () AS _w FROM t) _t WHERE _w > 1', "postgres": 'SELECT "user id", some_id, other_id, "2 nd id" FROM (SELECT "user id", some_id, 1 AS other_id, 2 AS "2 nd id", COUNT(*) OVER () AS _w FROM t) AS _t WHERE _w > 1', @@ -2566,3 +2573,33 @@ FROM subquery2""", """SELECT partition.d FROM t PARTITION (d)""", """SELECT partition.d FROM t AS PARTITION(d)""", ) + + def test_string_functions(self): + for pad_func in ("LPAD", "RPAD"): + ch_alias = "LEFTPAD" if pad_func == "LPAD" else "RIGHTPAD" + for fill_pattern in ("", ", ' '"): + with self.subTest(f"Testing {pad_func}() with pattern {fill_pattern}"): + self.validate_all( + f"SELECT {pad_func}('bar', 5{fill_pattern})", + read={ + "snowflake": f"SELECT {pad_func}('bar', 5{fill_pattern})", + "databricks": f"SELECT {pad_func}('bar', 5{fill_pattern})", + "spark": f"SELECT {pad_func}('bar', 5{fill_pattern})", + "postgres": f"SELECT {pad_func}('bar', 5{fill_pattern})", + "clickhouse": f"SELECT {ch_alias}('bar', 5{fill_pattern})", + }, + write={ + "": f"SELECT {pad_func}('bar', 5{fill_pattern})", + "spark": f"SELECT {pad_func}('bar', 5{fill_pattern})", + "postgres": f"SELECT {pad_func}('bar', 5{fill_pattern})", + "clickhouse": f"SELECT {pad_func}('bar', 5{fill_pattern})", + "snowflake": f"SELECT {pad_func}('bar', 5{fill_pattern})", + "databricks": f"SELECT {pad_func}('bar', 5{fill_pattern})", + "duckdb": f"SELECT {pad_func}('bar', 5, ' ')", + "mysql": f"SELECT {pad_func}('bar', 5, ' ')", + "hive": f"SELECT {pad_func}('bar', 5, ' ')", + "spark2": f"SELECT {pad_func}('bar', 5, ' ')", + "presto": f"SELECT {pad_func}('bar', 5, ' ')", + "trino": f"SELECT {pad_func}('bar', 5, ' ')", + }, + ) diff --git a/tests/dialects/test_duckdb.py b/tests/dialects/test_duckdb.py index e0b0131..bcabd20 100644 --- a/tests/dialects/test_duckdb.py +++ b/tests/dialects/test_duckdb.py @@ -361,6 +361,10 @@ class TestDuckDB(Validator): self.validate_identity( "SELECT * FROM (PIVOT Cities ON Year USING SUM(Population) GROUP BY Country) AS pivot_alias" ) + self.validate_identity( + # QUALIFY comes after WINDOW + "SELECT schema_name, function_name, ROW_NUMBER() OVER my_window AS function_rank FROM DUCKDB_FUNCTIONS() WINDOW my_window AS (PARTITION BY schema_name ORDER BY function_name) QUALIFY ROW_NUMBER() OVER my_window < 3" + ) self.validate_identity("DATE_SUB('YEAR', col, '2020-01-01')").assert_is(exp.Anonymous) self.validate_identity("DATESUB('YEAR', col, '2020-01-01')").assert_is(exp.Anonymous) @@ -1038,11 +1042,11 @@ class TestDuckDB(Validator): ) self.validate_identity( "CAST([[STRUCT_PACK(a := 1)]] AS STRUCT(a BIGINT)[][])", - "CAST([[{'a': 1}]] AS STRUCT(a BIGINT)[][])", + "CAST([[ROW(1)]] AS STRUCT(a BIGINT)[][])", ) self.validate_identity( "CAST([STRUCT_PACK(a := 1)] AS STRUCT(a BIGINT)[])", - "CAST([{'a': 1}] AS STRUCT(a BIGINT)[])", + "CAST([ROW(1)] AS STRUCT(a BIGINT)[])", ) self.validate_all( diff --git a/tests/dialects/test_postgres.py b/tests/dialects/test_postgres.py index 816a283..16191c5 100644 --- a/tests/dialects/test_postgres.py +++ b/tests/dialects/test_postgres.py @@ -977,6 +977,10 @@ class TestPostgres(Validator): }, ) + self.validate_identity("CREATE TABLE tbl (col INT UNIQUE NULLS NOT DISTINCT DEFAULT 9.99)") + self.validate_identity("CREATE TABLE tbl (col UUID UNIQUE DEFAULT GEN_RANDOM_UUID())") + self.validate_identity("CREATE TABLE tbl (col UUID, UNIQUE NULLS NOT DISTINCT (col))") + with self.assertRaises(ParseError): transpile("CREATE TABLE products (price DECIMAL CHECK price > 0)", read="postgres") with self.assertRaises(ParseError): @@ -1130,3 +1134,12 @@ CROSS JOIN JSON_ARRAY_ELEMENTS(CAST(boxcrate AS JSON)) AS x(tbox) CROSS JOIN JSON_ARRAY_ELEMENTS(CAST(JSON_EXTRACT_PATH(tbox, 'boxes') AS JSON)) AS y(boxes)""" self.validate_all(expected_postgres, read={"trino": trino_input}, pretty=True) + + def test_rows_from(self): + self.validate_identity("""SELECT * FROM ROWS FROM (FUNC1(col1, col2))""") + self.validate_identity( + """SELECT * FROM ROWS FROM (FUNC1(col1) AS alias1("col1" TEXT), FUNC2(col2) AS alias2("col2" INT)) WITH ORDINALITY""" + ) + self.validate_identity( + """SELECT * FROM table1, ROWS FROM (FUNC1(col1) AS alias1("col1" TEXT)) WITH ORDINALITY AS alias3("col3" INT, "col4" TEXT)""" + ) diff --git a/tests/dialects/test_presto.py b/tests/dialects/test_presto.py index dbe3abc..a56d4a8 100644 --- a/tests/dialects/test_presto.py +++ b/tests/dialects/test_presto.py @@ -405,6 +405,14 @@ class TestPresto(Validator): ) self.validate_identity("DATE_ADD('DAY', 1, y)") + self.validate_all( + "SELECT DATE_ADD('MINUTE', 30, col)", + write={ + "presto": "SELECT DATE_ADD('MINUTE', 30, col)", + "trino": "SELECT DATE_ADD('MINUTE', 30, col)", + }, + ) + def test_ddl(self): self.validate_all( "CREATE TABLE test WITH (FORMAT = 'PARQUET') AS SELECT 1", diff --git a/tests/dialects/test_spark.py b/tests/dialects/test_spark.py index 4e62b32..fc3146f 100644 --- a/tests/dialects/test_spark.py +++ b/tests/dialects/test_spark.py @@ -684,6 +684,8 @@ TBLPROPERTIES ( write={ "spark": "SELECT DATE_ADD(MONTH, 20, col)", "databricks": "SELECT DATE_ADD(MONTH, 20, col)", + "presto": "SELECT DATE_ADD('MONTH', 20, col)", + "trino": "SELECT DATE_ADD('MONTH', 20, col)", }, ) diff --git a/tests/fixtures/identity.sql b/tests/fixtures/identity.sql index 433c23d..5b1decd 100644 --- a/tests/fixtures/identity.sql +++ b/tests/fixtures/identity.sql @@ -53,6 +53,7 @@ x >> 1 | 1 & 1 ^ 1 x || y x[ : ] x[1 : ] +x[ : 2] x[1 : 2] x[-4 : -1] 1 - -1 @@ -777,14 +778,6 @@ ALTER TABLE table1 RENAME COLUMN IF EXISTS c1 TO c2 SELECT div.a FROM test_table AS div WITH view AS (SELECT 1 AS x) SELECT * FROM view ARRAY>> -ARRAY[1, 2, 3] -ARRAY[] -STRUCT -STRUCT("bla") -STRUCT("bla") -STRUCT(5) -STRUCT("2011-05-05") -STRUCT(1, t.str_col) STRUCT SELECT CAST(NULL AS ARRAY) IS NULL AS array_is_null ALTER TABLE "schema"."tablename" ADD CONSTRAINT "CHK_Name" CHECK (NOT "IdDwh" IS NULL AND "IdDwh" <> (0)) diff --git a/tests/fixtures/optimizer/annotate_types.sql b/tests/fixtures/optimizer/annotate_types.sql index e781765..589f0ec 100644 --- a/tests/fixtures/optimizer/annotate_types.sql +++ b/tests/fixtures/optimizer/annotate_types.sql @@ -55,3 +55,15 @@ STRUCT; # dialect: presto ROW(1, 2.5, 'foo'); STRUCT; + +# dialect: bigquery +EXTRACT(date from x); +DATE; + +# dialect: bigquery +EXTRACT(time from x); +TIME; + +# dialect: bigquery +EXTRACT(day from x); +INT; diff --git a/tests/fixtures/optimizer/qualify_columns.sql b/tests/fixtures/optimizer/qualify_columns.sql index 836bcf2..444a8e0 100644 --- a/tests/fixtures/optimizer/qualify_columns.sql +++ b/tests/fixtures/optimizer/qualify_columns.sql @@ -385,14 +385,14 @@ WITH player AS (SELECT player.name, player.asset.info FROM players) SELECT * FRO WITH player AS (SELECT players.player.name AS name, players.player.asset.info AS info FROM players AS players) SELECT player.name AS name, player.info AS info FROM player AS player; -------------------------------------- --- Except and Replace +-- Except, Replace, Rename -------------------------------------- # execute: false -SELECT * REPLACE(a AS d) FROM x; +SELECT * RENAME(a AS d) FROM x; SELECT x.a AS d, x.b AS b FROM x AS x; # execute: false -SELECT * EXCEPT(b) REPLACE(a AS d) FROM x; +SELECT * EXCEPT(b) RENAME(a AS d) FROM x; SELECT x.a AS d FROM x AS x; SELECT x.* EXCEPT(a), y.* FROM x, y; @@ -416,6 +416,30 @@ SELECT x.a AS a, x.b AS b, y.b AS b FROM x AS x LEFT JOIN x AS y ON x.a = y.a; SELECT COALESCE(CAST(t1.a AS VARCHAR), '') AS a, t2.* EXCEPT (a) FROM x AS t1, x AS t2; SELECT COALESCE(CAST(t1.a AS VARCHAR), '') AS a, t2.b AS b FROM x AS t1, x AS t2; +# execute: false +SELECT * REPLACE(2 AS a) FROM x; +SELECT 2 AS a, x.b AS b FROM x AS x; + +# execute: false +SELECT * EXCEPT (a, b) REPLACE (a AS a) FROM x; +SELECT * EXCEPT (a, b) REPLACE (x.a AS a) FROM x AS x; + +# execute: false +SELECT * REPLACE(COALESCE(b, a) AS a, a as b) FROM x; +SELECT COALESCE(x.b, x.a) AS a, x.a AS b FROM x AS x; + +# execute: false +SELECT * REPLACE(1 AS a) RENAME(b as alias_b) FROM x; +SELECT 1 AS a, x.b AS alias_b FROM x AS x; + +# execute: false +SELECT * EXCEPT(a) REPLACE(COALESCE(a, b) AS b) RENAME(b AS new_b) FROM x; +SELECT COALESCE(x.a, x.b) AS new_b FROM x AS x; + +# execute: false +SELECT * REPLACE(1 AS a, a AS b) RENAME(b AS new_b) FROM x; +SELECT 1 AS a, x.a AS new_b FROM x AS x; + -------------------------------------- -- Using -------------------------------------- diff --git a/tests/fixtures/optimizer/qualify_tables.sql b/tests/fixtures/optimizer/qualify_tables.sql index 30bf834..4b7d33d 100644 --- a/tests/fixtures/optimizer/qualify_tables.sql +++ b/tests/fixtures/optimizer/qualify_tables.sql @@ -177,3 +177,6 @@ SELECT a.id, a_2.id FROM c.db1.a AS a JOIN c.db2.a AS a_2 ON a.id = a_2.id; SELECT cat.db1.a.id, db2.a.id FROM cat.db1.a JOIN db2.a ON cat.db1.a.id = db2.a.id; SELECT a.id, a_2.id FROM cat.db1.a AS a JOIN c.db2.a AS a_2 ON a.id = a_2.id; + +COPY INTO (SELECT * FROM x) TO 'data' WITH (FORMAT 'CSV'); +COPY INTO (SELECT * FROM c.db.x AS x) TO 'data' WITH (FORMAT 'CSV'); -- cgit v1.2.3