summaryrefslogtreecommitdiffstats
path: root/tests/dialects/test_postgres.py
diff options
context:
space:
mode:
Diffstat (limited to 'tests/dialects/test_postgres.py')
-rw-r--r--tests/dialects/test_postgres.py147
1 files changed, 98 insertions, 49 deletions
diff --git a/tests/dialects/test_postgres.py b/tests/dialects/test_postgres.py
index 1d0ea8b..7a41cef 100644
--- a/tests/dialects/test_postgres.py
+++ b/tests/dialects/test_postgres.py
@@ -1,4 +1,4 @@
-from sqlglot import ParseError, UnsupportedError, exp, parse_one, transpile
+from sqlglot import ParseError, UnsupportedError, exp, transpile
from sqlglot.helper import logger as helper_logger
from tests.dialects.test_dialect import Validator
@@ -12,27 +12,12 @@ class TestPostgres(Validator):
self.validate_identity("|/ x", "SQRT(x)")
self.validate_identity("||/ x", "CBRT(x)")
- expr = parse_one(
- "SELECT * FROM r CROSS JOIN LATERAL UNNEST(ARRAY[1]) AS s(location)", read="postgres"
- )
+ expr = self.parse_one("SELECT * FROM r CROSS JOIN LATERAL UNNEST(ARRAY[1]) AS s(location)")
unnest = expr.args["joins"][0].this.this
unnest.assert_is(exp.Unnest)
alter_table_only = """ALTER TABLE ONLY "Album" ADD CONSTRAINT "FK_AlbumArtistId" FOREIGN KEY ("ArtistId") REFERENCES "Artist" ("ArtistId") ON DELETE NO ACTION ON UPDATE NO ACTION"""
- expr = parse_one(alter_table_only, read="postgres")
-
- # Checks that user-defined types are parsed into DataType instead of Identifier
- parse_one("CREATE TABLE t (a udt)", read="postgres").this.expressions[0].args[
- "kind"
- ].assert_is(exp.DataType)
-
- # Checks that OID is parsed into a DataType (ObjectIdentifier)
- self.assertIsInstance(
- parse_one("CREATE TABLE public.propertydata (propertyvalue oid)", read="postgres").find(
- exp.DataType
- ),
- exp.ObjectIdentifier,
- )
+ expr = self.parse_one(alter_table_only)
self.assertIsInstance(expr, exp.AlterTable)
self.assertEqual(expr.sql(dialect="postgres"), alter_table_only)
@@ -55,13 +40,6 @@ class TestPostgres(Validator):
self.validate_identity("CAST(x AS DATEMULTIRANGE)")
self.validate_identity("SELECT ARRAY[1, 2, 3] @> ARRAY[1, 2]")
self.validate_identity("SELECT ARRAY[1, 2, 3] <@ ARRAY[1, 2]")
- self.validate_all(
- "SELECT ARRAY[1, 2, 3] && ARRAY[1, 2]",
- write={
- "": "SELECT ARRAY_OVERLAPS(ARRAY(1, 2, 3), ARRAY(1, 2))",
- "postgres": "SELECT ARRAY[1, 2, 3] && ARRAY[1, 2]",
- },
- )
self.validate_identity("x$")
self.validate_identity("SELECT ARRAY[1, 2, 3]")
self.validate_identity("SELECT ARRAY(SELECT 1)")
@@ -86,6 +64,12 @@ class TestPostgres(Validator):
self.validate_identity("SELECT CURRENT_USER")
self.validate_identity("SELECT * FROM ONLY t1")
self.validate_identity(
+ """UPDATE "x" SET "y" = CAST('0 days 60.000000 seconds' AS INTERVAL) WHERE "x"."id" IN (2, 3)"""
+ )
+ self.validate_identity(
+ "WITH t1 AS MATERIALIZED (SELECT 1), t2 AS NOT MATERIALIZED (SELECT 2) SELECT * FROM t1, t2"
+ )
+ self.validate_identity(
"""LAST_VALUE("col1") OVER (ORDER BY "col2" RANGE BETWEEN INTERVAL '1 DAY' PRECEDING AND '1 month' FOLLOWING)"""
)
self.validate_identity(
@@ -107,9 +91,6 @@ class TestPostgres(Validator):
"SELECT SUM(x) OVER a, SUM(y) OVER b FROM c WINDOW a AS (PARTITION BY d), b AS (PARTITION BY e)"
)
self.validate_identity(
- "CREATE TABLE A (LIKE B INCLUDING CONSTRAINT INCLUDING COMPRESSION EXCLUDING COMMENTS)"
- )
- self.validate_identity(
"SELECT CASE WHEN SUBSTRING('abcdefg' FROM 1) IN ('ab') THEN 1 ELSE 0 END"
)
self.validate_identity(
@@ -167,6 +148,10 @@ class TestPostgres(Validator):
"SELECT 'Dianne''s horse'",
)
self.validate_identity(
+ "SELECT $$The price is $9.95$$ AS msg",
+ "SELECT 'The price is $9.95' AS msg",
+ )
+ self.validate_identity(
"COMMENT ON TABLE mytable IS $$doc this$$", "COMMENT ON TABLE mytable IS 'doc this'"
)
self.validate_identity(
@@ -329,6 +314,36 @@ class TestPostgres(Validator):
self.validate_identity("SELECT * FROM t1*", "SELECT * FROM t1")
self.validate_all(
+ 'SELECT * FROM "test_table" ORDER BY RANDOM() LIMIT 5',
+ write={
+ "bigquery": "SELECT * FROM `test_table` ORDER BY RAND() NULLS LAST LIMIT 5",
+ "duckdb": 'SELECT * FROM "test_table" ORDER BY RANDOM() LIMIT 5',
+ "postgres": 'SELECT * FROM "test_table" ORDER BY RANDOM() LIMIT 5',
+ "tsql": "SELECT TOP 5 * FROM [test_table] ORDER BY RAND()",
+ },
+ )
+ self.validate_all(
+ "SELECT (data -> 'en-US') AS acat FROM my_table",
+ write={
+ "duckdb": """SELECT (data -> '$."en-US"') AS acat FROM my_table""",
+ "postgres": "SELECT (data -> 'en-US') AS acat FROM my_table",
+ },
+ )
+ self.validate_all(
+ "SELECT (data ->> 'en-US') AS acat FROM my_table",
+ write={
+ "duckdb": """SELECT (data ->> '$."en-US"') AS acat FROM my_table""",
+ "postgres": "SELECT (data ->> 'en-US') AS acat FROM my_table",
+ },
+ )
+ self.validate_all(
+ "SELECT ARRAY[1, 2, 3] && ARRAY[1, 2]",
+ write={
+ "": "SELECT ARRAY_OVERLAPS(ARRAY(1, 2, 3), ARRAY(1, 2))",
+ "postgres": "SELECT ARRAY[1, 2, 3] && ARRAY[1, 2]",
+ },
+ )
+ self.validate_all(
"SELECT JSON_EXTRACT_PATH_TEXT(x, k1, k2, k3) FROM t",
read={
"clickhouse": "SELECT JSONExtractString(x, k1, k2, k3) FROM t",
@@ -518,15 +533,6 @@ class TestPostgres(Validator):
},
)
self.validate_all(
- "CREATE TABLE x (a UUID, b BYTEA)",
- write={
- "duckdb": "CREATE TABLE x (a UUID, b BLOB)",
- "presto": "CREATE TABLE x (a UUID, b VARBINARY)",
- "hive": "CREATE TABLE x (a UUID, b BINARY)",
- "spark": "CREATE TABLE x (a UUID, b BINARY)",
- },
- )
- self.validate_all(
"SELECT * FROM x FETCH 1 ROW",
write={
"postgres": "SELECT * FROM x FETCH FIRST 1 ROWS ONLY",
@@ -635,11 +641,30 @@ class TestPostgres(Validator):
"postgres": "x / y ^ z",
},
)
-
- self.assertIsInstance(parse_one("id::UUID", read="postgres"), exp.Cast)
+ self.validate_all(
+ "CAST(x AS NAME)",
+ read={
+ "redshift": "CAST(x AS NAME)",
+ },
+ write={
+ "postgres": "CAST(x AS NAME)",
+ "redshift": "CAST(x AS NAME)",
+ },
+ )
+ self.assertIsInstance(self.parse_one("id::UUID"), exp.Cast)
def test_ddl(self):
- expr = parse_one("CREATE TABLE t (x INTERVAL day)", read="postgres")
+ # Checks that user-defined types are parsed into DataType instead of Identifier
+ self.parse_one("CREATE TABLE t (a udt)").this.expressions[0].args["kind"].assert_is(
+ exp.DataType
+ )
+
+ # Checks that OID is parsed into a DataType (ObjectIdentifier)
+ self.assertIsInstance(
+ self.parse_one("CREATE TABLE p.t (c oid)").find(exp.DataType), exp.ObjectIdentifier
+ )
+
+ expr = self.parse_one("CREATE TABLE t (x INTERVAL day)")
cdef = expr.find(exp.ColumnDef)
cdef.args["kind"].assert_is(exp.DataType)
self.assertEqual(expr.sql(dialect="postgres"), "CREATE TABLE t (x INTERVAL DAY)")
@@ -667,6 +692,21 @@ class TestPostgres(Validator):
self.validate_identity("TRUNCATE TABLE t1 CONTINUE IDENTITY CASCADE")
self.validate_identity("TRUNCATE TABLE t1 RESTART IDENTITY RESTRICT")
self.validate_identity(
+ "CREATE TABLE t (vid INT NOT NULL, CONSTRAINT ht_vid_nid_fid_idx EXCLUDE (INT4RANGE(vid, nid) WITH &&, INT4RANGE(fid, fid, '[]') WITH &&))"
+ )
+ self.validate_identity(
+ "CREATE TABLE t (i INT, PRIMARY KEY (i), EXCLUDE USING gist(col varchar_pattern_ops DESC NULLS LAST WITH &&) WITH (sp1=1, sp2=2))"
+ )
+ self.validate_identity(
+ "CREATE TABLE t (i INT, EXCLUDE USING btree(INT4RANGE(vid, nid, '[]') ASC NULLS FIRST WITH &&) INCLUDE (col1, col2))"
+ )
+ self.validate_identity(
+ "CREATE TABLE t (i INT, EXCLUDE USING gin(col1 WITH &&, col2 WITH ||) USING INDEX TABLESPACE tablespace WHERE (id > 5))"
+ )
+ self.validate_identity(
+ "CREATE TABLE A (LIKE B INCLUDING CONSTRAINT INCLUDING COMPRESSION EXCLUDING COMMENTS)"
+ )
+ self.validate_identity(
"CREATE TABLE cust_part3 PARTITION OF customers FOR VALUES WITH (MODULUS 3, REMAINDER 2)"
)
self.validate_identity(
@@ -691,13 +731,13 @@ class TestPostgres(Validator):
"CREATE INDEX index_issues_on_title_trigram ON public.issues USING gin(title public.gin_trgm_ops)"
)
self.validate_identity(
- "INSERT INTO x VALUES (1, 'a', 2.0) ON CONFLICT (id) DO NOTHING RETURNING *"
+ "INSERT INTO x VALUES (1, 'a', 2.0) ON CONFLICT(id) DO NOTHING RETURNING *"
)
self.validate_identity(
- "INSERT INTO x VALUES (1, 'a', 2.0) ON CONFLICT (id) DO UPDATE SET x.id = 1 RETURNING *"
+ "INSERT INTO x VALUES (1, 'a', 2.0) ON CONFLICT(id) DO UPDATE SET x.id = 1 RETURNING *"
)
self.validate_identity(
- "INSERT INTO x VALUES (1, 'a', 2.0) ON CONFLICT (id) DO UPDATE SET x.id = excluded.id RETURNING *"
+ "INSERT INTO x VALUES (1, 'a', 2.0) ON CONFLICT(id) DO UPDATE SET x.id = excluded.id RETURNING *"
)
self.validate_identity(
"INSERT INTO x VALUES (1, 'a', 2.0) ON CONFLICT ON CONSTRAINT pkey DO NOTHING RETURNING *"
@@ -740,8 +780,7 @@ class TestPostgres(Validator):
check_command_warning=True,
)
self.validate_identity(
- "CREATE UNLOGGED TABLE foo AS WITH t(c) AS (SELECT 1) SELECT * FROM (SELECT c AS c FROM t) AS temp",
- check_command_warning=True,
+ "CREATE UNLOGGED TABLE foo AS WITH t(c) AS (SELECT 1) SELECT * FROM (SELECT c AS c FROM t) AS temp"
)
self.validate_identity(
"CREATE FUNCTION x(INT) RETURNS INT SET search_path TO 'public'",
@@ -793,7 +832,7 @@ class TestPostgres(Validator):
]))
)
""",
- "CREATE INDEX index_ci_builds_on_commit_id_and_artifacts_expireatandidpartial ON public.ci_builds USING btree(commit_id, artifacts_expire_at, id) WHERE ((CAST((type) AS TEXT) = CAST('Ci::Build' AS TEXT)) AND ((retried = FALSE) OR (retried IS NULL)) AND (CAST((name) AS TEXT) = ANY (ARRAY[CAST((CAST('sast' AS VARCHAR)) AS TEXT), CAST((CAST('dependency_scanning' AS VARCHAR)) AS TEXT), CAST((CAST('sast:container' AS VARCHAR)) AS TEXT), CAST((CAST('container_scanning' AS VARCHAR)) AS TEXT), CAST((CAST('dast' AS VARCHAR)) AS TEXT)])))",
+ "CREATE INDEX index_ci_builds_on_commit_id_and_artifacts_expireatandidpartial ON public.ci_builds USING btree(commit_id, artifacts_expire_at, id) WHERE ((CAST((type) AS TEXT) = CAST('Ci::Build' AS TEXT)) AND ((retried = FALSE) OR (retried IS NULL)) AND (CAST((name) AS TEXT) = ANY(ARRAY[CAST((CAST('sast' AS VARCHAR)) AS TEXT), CAST((CAST('dependency_scanning' AS VARCHAR)) AS TEXT), CAST((CAST('sast:container' AS VARCHAR)) AS TEXT), CAST((CAST('container_scanning' AS VARCHAR)) AS TEXT), CAST((CAST('dast' AS VARCHAR)) AS TEXT)])))",
)
self.validate_identity(
"CREATE INDEX index_ci_pipelines_on_project_idandrefandiddesc ON public.ci_pipelines USING btree(project_id, ref, id DESC)"
@@ -803,6 +842,16 @@ class TestPostgres(Validator):
"TRUNCATE TABLE ONLY t1, t2, ONLY t3, t4, t5 RESTART IDENTITY CASCADE",
)
+ self.validate_all(
+ "CREATE TABLE x (a UUID, b BYTEA)",
+ write={
+ "duckdb": "CREATE TABLE x (a UUID, b BLOB)",
+ "presto": "CREATE TABLE x (a UUID, b VARBINARY)",
+ "hive": "CREATE TABLE x (a UUID, b BINARY)",
+ "spark": "CREATE TABLE x (a UUID, b BINARY)",
+ },
+ )
+
with self.assertRaises(ParseError):
transpile("CREATE TABLE products (price DECIMAL CHECK price > 0)", read="postgres")
with self.assertRaises(ParseError):
@@ -857,7 +906,7 @@ class TestPostgres(Validator):
)
def test_operator(self):
- expr = parse_one("1 OPERATOR(+) 2 OPERATOR(*) 3", read="postgres")
+ expr = self.parse_one("1 OPERATOR(+) 2 OPERATOR(*) 3")
expr.left.assert_is(exp.Operator)
expr.left.left.assert_is(exp.Literal)
@@ -926,8 +975,8 @@ class TestPostgres(Validator):
def test_regexp_binary(self):
"""See https://github.com/tobymao/sqlglot/pull/2404 for details."""
- self.assertIsInstance(parse_one("'thomas' ~ '.*thomas.*'", read="postgres"), exp.Binary)
- self.assertIsInstance(parse_one("'thomas' ~* '.*thomas.*'", read="postgres"), exp.Binary)
+ self.assertIsInstance(self.parse_one("'thomas' ~ '.*thomas.*'"), exp.Binary)
+ self.assertIsInstance(self.parse_one("'thomas' ~* '.*thomas.*'"), exp.Binary)
def test_unnest_json_array(self):
trino_input = """