summaryrefslogtreecommitdiffstats
path: root/tests/dialects/test_postgres.py
diff options
context:
space:
mode:
Diffstat (limited to '')
-rw-r--r--tests/dialects/test_postgres.py212
1 files changed, 168 insertions, 44 deletions
diff --git a/tests/dialects/test_postgres.py b/tests/dialects/test_postgres.py
index 74753be..62ae247 100644
--- a/tests/dialects/test_postgres.py
+++ b/tests/dialects/test_postgres.py
@@ -8,9 +8,15 @@ class TestPostgres(Validator):
dialect = "postgres"
def test_postgres(self):
- self.validate_identity(
- 'CREATE TABLE x (a TEXT COLLATE "de_DE")', "CREATE TABLE x (a TEXT COLLATE de_DE)"
+ self.validate_all(
+ "x ? y",
+ write={
+ "": "JSONB_CONTAINS(x, y)",
+ "postgres": "x ? y",
+ },
)
+
+ self.validate_identity("SHA384(x)")
self.validate_identity("1.x", "1. AS x")
self.validate_identity("|/ x", "SQRT(x)")
self.validate_identity("||/ x", "CBRT(x)")
@@ -22,7 +28,7 @@ class TestPostgres(Validator):
alter_table_only = """ALTER TABLE ONLY "Album" ADD CONSTRAINT "FK_AlbumArtistId" FOREIGN KEY ("ArtistId") REFERENCES "Artist" ("ArtistId") ON DELETE NO ACTION ON UPDATE NO ACTION"""
expr = self.parse_one(alter_table_only)
- self.assertIsInstance(expr, exp.AlterTable)
+ self.assertIsInstance(expr, exp.Alter)
self.assertEqual(expr.sql(dialect="postgres"), alter_table_only)
self.validate_identity("STRING_TO_ARRAY('xx~^~yy~^~zz', '~^~', 'yy')")
@@ -67,10 +73,6 @@ class TestPostgres(Validator):
self.validate_identity("SELECT CURRENT_USER")
self.validate_identity("SELECT * FROM ONLY t1")
self.validate_identity(
- "SELECT ARRAY[1, 2, 3] <@ ARRAY[1, 2]",
- "SELECT ARRAY[1, 2] @> ARRAY[1, 2, 3]",
- )
- self.validate_identity(
"""UPDATE "x" SET "y" = CAST('0 days 60.000000 seconds' AS INTERVAL) WHERE "x"."id" IN (2, 3)"""
)
self.validate_identity(
@@ -127,6 +129,14 @@ class TestPostgres(Validator):
"ORDER BY 2, 3"
)
self.validate_identity(
+ "/*+ some comment*/ SELECT b.foo, b.bar FROM baz AS b",
+ "/* + some comment */ SELECT b.foo, b.bar FROM baz AS b",
+ )
+ self.validate_identity(
+ "SELECT ARRAY[1, 2, 3] <@ ARRAY[1, 2]",
+ "SELECT ARRAY[1, 2] @> ARRAY[1, 2, 3]",
+ )
+ self.validate_identity(
"SELECT ARRAY[]::INT[] AS foo",
"SELECT CAST(ARRAY[] AS INT[]) AS foo",
)
@@ -344,10 +354,10 @@ class TestPostgres(Validator):
self.validate_all(
"SELECT ARRAY[1, 2, 3] @> ARRAY[1, 2]",
read={
- "duckdb": "SELECT ARRAY_HAS_ALL([1, 2, 3], [1, 2])",
+ "duckdb": "SELECT [1, 2, 3] @> [1, 2]",
},
write={
- "duckdb": "SELECT ARRAY_HAS_ALL([1, 2, 3], [1, 2])",
+ "duckdb": "SELECT [1, 2, 3] @> [1, 2]",
"mysql": UnsupportedError,
"postgres": "SELECT ARRAY[1, 2, 3] @> ARRAY[1, 2]",
},
@@ -389,13 +399,6 @@ class TestPostgres(Validator):
},
)
self.validate_all(
- "SELECT ARRAY[1, 2, 3] && ARRAY[1, 2]",
- write={
- "": "SELECT ARRAY_OVERLAPS(ARRAY(1, 2, 3), ARRAY(1, 2))",
- "postgres": "SELECT ARRAY[1, 2, 3] && ARRAY[1, 2]",
- },
- )
- self.validate_all(
"SELECT JSON_EXTRACT_PATH_TEXT(x, k1, k2, k3) FROM t",
read={
"clickhouse": "SELECT JSONExtractString(x, k1, k2, k3) FROM t",
@@ -539,37 +542,53 @@ class TestPostgres(Validator):
},
)
self.validate_all(
- "GENERATE_SERIES(a, b, ' 2 days ')",
+ "SELECT GENERATE_SERIES(1, 5)",
write={
- "postgres": "GENERATE_SERIES(a, b, INTERVAL '2 DAYS')",
- "presto": "SEQUENCE(a, b, INTERVAL '2' DAY)",
- "trino": "SEQUENCE(a, b, INTERVAL '2' DAY)",
+ "bigquery": UnsupportedError,
+ "postgres": "SELECT GENERATE_SERIES(1, 5)",
},
)
self.validate_all(
- "GENERATE_SERIES('2019-01-01'::TIMESTAMP, NOW(), '1day')",
+ "WITH dates AS (SELECT GENERATE_SERIES('2020-01-01'::DATE, '2024-01-01'::DATE, '1 day'::INTERVAL) AS date), date_table AS (SELECT DISTINCT DATE_TRUNC('MONTH', date) AS date FROM dates) SELECT * FROM date_table",
write={
- "postgres": "GENERATE_SERIES(CAST('2019-01-01' AS TIMESTAMP), CURRENT_TIMESTAMP, INTERVAL '1 DAY')",
- "presto": "SEQUENCE(CAST('2019-01-01' AS TIMESTAMP), CAST(CURRENT_TIMESTAMP AS TIMESTAMP), INTERVAL '1' DAY)",
- "trino": "SEQUENCE(CAST('2019-01-01' AS TIMESTAMP), CAST(CURRENT_TIMESTAMP AS TIMESTAMP), INTERVAL '1' DAY)",
+ "duckdb": "WITH dates AS (SELECT UNNEST(GENERATE_SERIES(CAST('2020-01-01' AS DATE), CAST('2024-01-01' AS DATE), CAST('1 day' AS INTERVAL))) AS date), date_table AS (SELECT DISTINCT DATE_TRUNC('MONTH', date) AS date FROM dates) SELECT * FROM date_table",
+ "postgres": "WITH dates AS (SELECT GENERATE_SERIES(CAST('2020-01-01' AS DATE), CAST('2024-01-01' AS DATE), CAST('1 day' AS INTERVAL)) AS date), date_table AS (SELECT DISTINCT DATE_TRUNC('MONTH', date) AS date FROM dates) SELECT * FROM date_table",
},
)
self.validate_all(
- "GENERATE_SERIES(a, b)",
+ "GENERATE_SERIES(a, b, ' 2 days ')",
+ write={
+ "postgres": "GENERATE_SERIES(a, b, INTERVAL '2 DAYS')",
+ "presto": "UNNEST(SEQUENCE(a, b, INTERVAL '2' DAY))",
+ "trino": "UNNEST(SEQUENCE(a, b, INTERVAL '2' DAY))",
+ },
+ )
+ self.validate_all(
+ "GENERATE_SERIES('2019-01-01'::TIMESTAMP, NOW(), '1day')",
write={
- "postgres": "GENERATE_SERIES(a, b)",
- "presto": "SEQUENCE(a, b)",
- "trino": "SEQUENCE(a, b)",
- "tsql": "GENERATE_SERIES(a, b)",
+ "databricks": "EXPLODE(SEQUENCE(CAST('2019-01-01' AS TIMESTAMP), CAST(CURRENT_TIMESTAMP() AS TIMESTAMP), INTERVAL '1' DAY))",
+ "hive": "EXPLODE(SEQUENCE(CAST('2019-01-01' AS TIMESTAMP), CAST(CURRENT_TIMESTAMP() AS TIMESTAMP), INTERVAL '1' DAY))",
+ "postgres": "GENERATE_SERIES(CAST('2019-01-01' AS TIMESTAMP), CURRENT_TIMESTAMP, INTERVAL '1 DAY')",
+ "presto": "UNNEST(SEQUENCE(CAST('2019-01-01' AS TIMESTAMP), CAST(CURRENT_TIMESTAMP AS TIMESTAMP), INTERVAL '1' DAY))",
+ "spark": "EXPLODE(SEQUENCE(CAST('2019-01-01' AS TIMESTAMP), CAST(CURRENT_TIMESTAMP() AS TIMESTAMP), INTERVAL '1' DAY))",
+ "spark2": "EXPLODE(SEQUENCE(CAST('2019-01-01' AS TIMESTAMP), CAST(CURRENT_TIMESTAMP() AS TIMESTAMP), INTERVAL '1' DAY))",
+ "trino": "UNNEST(SEQUENCE(CAST('2019-01-01' AS TIMESTAMP), CAST(CURRENT_TIMESTAMP AS TIMESTAMP), INTERVAL '1' DAY))",
},
)
self.validate_all(
- "GENERATE_SERIES(a, b)",
+ "SELECT * FROM GENERATE_SERIES(a, b)",
read={
- "postgres": "GENERATE_SERIES(a, b)",
- "presto": "SEQUENCE(a, b)",
- "trino": "SEQUENCE(a, b)",
- "tsql": "GENERATE_SERIES(a, b)",
+ "tsql": "SELECT * FROM GENERATE_SERIES(a, b)",
+ },
+ write={
+ "databricks": "SELECT * FROM EXPLODE(SEQUENCE(a, b))",
+ "hive": "SELECT * FROM EXPLODE(SEQUENCE(a, b))",
+ "postgres": "SELECT * FROM GENERATE_SERIES(a, b)",
+ "presto": "SELECT * FROM UNNEST(SEQUENCE(a, b))",
+ "spark": "SELECT * FROM EXPLODE(SEQUENCE(a, b))",
+ "spark2": "SELECT * FROM EXPLODE(SEQUENCE(a, b))",
+ "trino": "SELECT * FROM UNNEST(SEQUENCE(a, b))",
+ "tsql": "SELECT * FROM GENERATE_SERIES(a, b)",
},
)
self.validate_all(
@@ -650,17 +669,25 @@ class TestPostgres(Validator):
},
)
self.validate_all(
- """'{"a":1,"b":2}'::json->'b'""",
+ "TRIM(BOTH 'as' FROM 'as string as')",
write={
- "postgres": """CAST('{"a":1,"b":2}' AS JSON) -> 'b'""",
- "redshift": """JSON_EXTRACT_PATH_TEXT('{"a":1,"b":2}', 'b')""",
+ "postgres": "TRIM(BOTH 'as' FROM 'as string as')",
+ "spark": "TRIM(BOTH 'as' FROM 'as string as')",
},
)
+ self.validate_identity(
+ """SELECT TRIM(LEADING ' XXX ' COLLATE "de_DE")""",
+ """SELECT LTRIM(' XXX ' COLLATE "de_DE")""",
+ )
+ self.validate_identity(
+ """SELECT TRIM(TRAILING ' XXX ' COLLATE "de_DE")""",
+ """SELECT RTRIM(' XXX ' COLLATE "de_DE")""",
+ )
self.validate_all(
- "TRIM(BOTH 'as' FROM 'as string as')",
+ """'{"a":1,"b":2}'::json->'b'""",
write={
- "postgres": "TRIM(BOTH 'as' FROM 'as string as')",
- "spark": "TRIM(BOTH 'as' FROM 'as string as')",
+ "postgres": """CAST('{"a":1,"b":2}' AS JSON) -> 'b'""",
+ "redshift": """JSON_EXTRACT_PATH_TEXT('{"a":1,"b":2}', 'b')""",
},
)
self.validate_all(
@@ -724,6 +751,52 @@ class TestPostgres(Validator):
self.validate_identity("cast(a as FLOAT8)", "CAST(a AS DOUBLE PRECISION)")
self.validate_identity("cast(a as FLOAT4)", "CAST(a AS REAL)")
+ self.validate_all(
+ "1 / DIV(4, 2)",
+ read={
+ "postgres": "1 / DIV(4, 2)",
+ },
+ write={
+ "sqlite": "1 / CAST(CAST(CAST(4 AS REAL) / 2 AS INTEGER) AS REAL)",
+ "duckdb": "1 / CAST(4 // 2 AS DECIMAL)",
+ "bigquery": "1 / CAST(DIV(4, 2) AS NUMERIC)",
+ },
+ )
+ self.validate_all(
+ "CAST(DIV(4, 2) AS DECIMAL(5, 3))",
+ read={
+ "duckdb": "CAST(4 // 2 AS DECIMAL(5, 3))",
+ },
+ write={
+ "duckdb": "CAST(CAST(4 // 2 AS DECIMAL) AS DECIMAL(5, 3))",
+ "postgres": "CAST(DIV(4, 2) AS DECIMAL(5, 3))",
+ },
+ )
+
+ self.validate_all(
+ "SELECT TO_DATE('01/01/2000', 'MM/DD/YYYY')",
+ write={
+ "duckdb": "SELECT CAST(STRPTIME('01/01/2000', '%m/%d/%Y') AS DATE)",
+ "postgres": "SELECT TO_DATE('01/01/2000', 'MM/DD/YYYY')",
+ },
+ )
+
+ self.validate_identity(
+ 'SELECT js, js IS JSON AS "json?", js IS JSON VALUE AS "scalar?", js IS JSON SCALAR AS "scalar?", js IS JSON OBJECT AS "object?", js IS JSON ARRAY AS "array?" FROM t'
+ )
+ self.validate_identity(
+ 'SELECT js, js IS JSON ARRAY WITH UNIQUE KEYS AS "array w. UK?", js IS JSON ARRAY WITHOUT UNIQUE KEYS AS "array w/o UK?", js IS JSON ARRAY UNIQUE KEYS AS "array w UK 2?" FROM t'
+ )
+ self.validate_identity(
+ "MERGE INTO target_table USING source_table AS source ON target.id = source.id WHEN MATCHED THEN DO NOTHING WHEN NOT MATCHED THEN DO NOTHING RETURNING MERGE_ACTION(), *"
+ )
+ self.validate_identity(
+ "SELECT 1 FROM ((VALUES (1)) AS vals(id) LEFT OUTER JOIN tbl ON vals.id = tbl.id)"
+ )
+ self.validate_identity("SELECT OVERLAY(a PLACING b FROM 1)")
+ self.validate_identity("SELECT OVERLAY(a PLACING b FROM 1 FOR 1)")
+ self.validate_identity("ARRAY[1, 2, 3] && ARRAY[1, 2]").assert_is(exp.ArrayOverlaps)
+
def test_ddl(self):
# Checks that user-defined types are parsed into DataType instead of Identifier
self.parse_one("CREATE TABLE t (a udt)").this.expressions[0].args["kind"].assert_is(
@@ -740,6 +813,9 @@ class TestPostgres(Validator):
cdef.args["kind"].assert_is(exp.DataType)
self.assertEqual(expr.sql(dialect="postgres"), "CREATE TABLE t (x INTERVAL DAY)")
+ self.validate_identity('ALTER INDEX "IX_Ratings_Column1" RENAME TO "IX_Ratings_Column2"')
+ self.validate_identity('CREATE TABLE x (a TEXT COLLATE "de_DE")')
+ self.validate_identity('CREATE TABLE x (a TEXT COLLATE pg_catalog."default")')
self.validate_identity("CREATE TABLE t (col INT[3][5])")
self.validate_identity("CREATE TABLE t (col INT[3])")
self.validate_identity("CREATE INDEX IF NOT EXISTS ON t(c)")
@@ -773,6 +849,9 @@ class TestPostgres(Validator):
self.validate_identity("ALTER TABLE t1 SET TABLESPACE tablespace")
self.validate_identity("ALTER TABLE t1 SET (fillfactor = 5, autovacuum_enabled = TRUE)")
self.validate_identity(
+ "ALTER TABLE tested_table ADD CONSTRAINT unique_example UNIQUE (column_name) NOT VALID"
+ )
+ self.validate_identity(
"CREATE FUNCTION pymax(a INT, b INT) RETURNS INT LANGUAGE plpython3u AS $$\n if a > b:\n return a\n return b\n$$",
)
self.validate_identity(
@@ -942,6 +1021,42 @@ class TestPostgres(Validator):
},
)
+ self.validate_identity("CREATE TABLE tbl (col INT UNIQUE NULLS NOT DISTINCT DEFAULT 9.99)")
+ self.validate_identity("CREATE TABLE tbl (col UUID UNIQUE DEFAULT GEN_RANDOM_UUID())")
+ self.validate_identity("CREATE TABLE tbl (col UUID, UNIQUE NULLS NOT DISTINCT (col))")
+
+ self.validate_identity("CREATE INDEX CONCURRENTLY ix_table_id ON tbl USING btree(id)")
+ self.validate_identity(
+ "CREATE INDEX CONCURRENTLY IF NOT EXISTS ix_table_id ON tbl USING btree(id)"
+ )
+ self.validate_identity("DROP INDEX ix_table_id")
+ self.validate_identity("DROP INDEX IF EXISTS ix_table_id")
+ self.validate_identity("DROP INDEX CONCURRENTLY ix_table_id")
+ self.validate_identity("DROP INDEX CONCURRENTLY IF EXISTS ix_table_id")
+
+ self.validate_identity(
+ """
+ CREATE TABLE IF NOT EXISTS public.rental
+ (
+ inventory_id INT NOT NULL,
+ CONSTRAINT rental_customer_id_fkey FOREIGN KEY (customer_id)
+ REFERENCES public.customer (customer_id) MATCH FULL
+ ON UPDATE CASCADE
+ ON DELETE RESTRICT,
+ CONSTRAINT rental_inventory_id_fkey FOREIGN KEY (inventory_id)
+ REFERENCES public.inventory (inventory_id) MATCH PARTIAL
+ ON UPDATE CASCADE
+ ON DELETE RESTRICT,
+ CONSTRAINT rental_staff_id_fkey FOREIGN KEY (staff_id)
+ REFERENCES public.staff (staff_id) MATCH SIMPLE
+ ON UPDATE CASCADE
+ ON DELETE RESTRICT,
+ INITIALLY IMMEDIATE
+ )
+ """,
+ "CREATE TABLE IF NOT EXISTS public.rental (inventory_id INT NOT NULL, CONSTRAINT rental_customer_id_fkey FOREIGN KEY (customer_id) REFERENCES public.customer (customer_id) MATCH FULL ON UPDATE CASCADE ON DELETE RESTRICT, CONSTRAINT rental_inventory_id_fkey FOREIGN KEY (inventory_id) REFERENCES public.inventory (inventory_id) MATCH PARTIAL ON UPDATE CASCADE ON DELETE RESTRICT, CONSTRAINT rental_staff_id_fkey FOREIGN KEY (staff_id) REFERENCES public.staff (staff_id) MATCH SIMPLE ON UPDATE CASCADE ON DELETE RESTRICT, INITIALLY IMMEDIATE)",
+ )
+
with self.assertRaises(ParseError):
transpile("CREATE TABLE products (price DECIMAL CHECK price > 0)", read="postgres")
with self.assertRaises(ParseError):
@@ -988,10 +1103,10 @@ class TestPostgres(Validator):
self.assertEqual(
cm.output,
[
- "WARNING:sqlglot:Applying array index offset (-1)",
- "WARNING:sqlglot:Applying array index offset (1)",
- "WARNING:sqlglot:Applying array index offset (1)",
- "WARNING:sqlglot:Applying array index offset (1)",
+ "INFO:sqlglot:Applying array index offset (-1)",
+ "INFO:sqlglot:Applying array index offset (1)",
+ "INFO:sqlglot:Applying array index offset (1)",
+ "INFO:sqlglot:Applying array index offset (1)",
],
)
@@ -1095,3 +1210,12 @@ CROSS JOIN JSON_ARRAY_ELEMENTS(CAST(boxcrate AS JSON)) AS x(tbox)
CROSS JOIN JSON_ARRAY_ELEMENTS(CAST(JSON_EXTRACT_PATH(tbox, 'boxes') AS JSON)) AS y(boxes)"""
self.validate_all(expected_postgres, read={"trino": trino_input}, pretty=True)
+
+ def test_rows_from(self):
+ self.validate_identity("""SELECT * FROM ROWS FROM (FUNC1(col1, col2))""")
+ self.validate_identity(
+ """SELECT * FROM ROWS FROM (FUNC1(col1) AS alias1("col1" TEXT), FUNC2(col2) AS alias2("col2" INT)) WITH ORDINALITY"""
+ )
+ self.validate_identity(
+ """SELECT * FROM table1, ROWS FROM (FUNC1(col1) AS alias1("col1" TEXT)) WITH ORDINALITY AS alias3("col3" INT, "col4" TEXT)"""
+ )