diff options
Diffstat (limited to 'tests/dialects/test_postgres.py')
-rw-r--r-- | tests/dialects/test_postgres.py | 37 |
1 files changed, 26 insertions, 11 deletions
diff --git a/tests/dialects/test_postgres.py b/tests/dialects/test_postgres.py index 9155696..f46eeba 100644 --- a/tests/dialects/test_postgres.py +++ b/tests/dialects/test_postgres.py @@ -1,4 +1,4 @@ -from sqlglot import ParseError, exp, parse_one, transpile +from sqlglot import ParseError, UnsupportedError, exp, parse_one, transpile from sqlglot.helper import logger as helper_logger from tests.dialects.test_dialect import Validator @@ -11,7 +11,7 @@ class TestPostgres(Validator): expr = parse_one("CREATE TABLE t (x INTERVAL day)", read="postgres") cdef = expr.find(exp.ColumnDef) cdef.args["kind"].assert_is(exp.DataType) - self.assertEqual(expr.sql(dialect="postgres"), "CREATE TABLE t (x INTERVAL day)") + self.assertEqual(expr.sql(dialect="postgres"), "CREATE TABLE t (x INTERVAL DAY)") self.validate_identity("CREATE INDEX idx_x ON x USING BTREE(x, y) WHERE (NOT y IS NULL)") self.validate_identity("CREATE TABLE test (elems JSONB[])") @@ -74,6 +74,12 @@ class TestPostgres(Validator): "CREATE TABLE test (x TIMESTAMP WITHOUT TIME ZONE[][])", "CREATE TABLE test (x TIMESTAMP[][])", ) + self.validate_identity( + "CREATE UNLOGGED TABLE foo AS WITH t(c) AS (SELECT 1) SELECT * FROM (SELECT c AS c FROM t) AS temp" + ) + self.validate_identity( + "WITH t(c) AS (SELECT 1) SELECT * INTO UNLOGGED foo FROM (SELECT c AS c FROM t) AS temp" + ) self.validate_all( "CREATE OR REPLACE FUNCTION function_name (input_a character varying DEFAULT NULL::character varying)", @@ -158,7 +164,7 @@ class TestPostgres(Validator): write={ "hive": "SELECT EXPLODE(c) FROM t", "postgres": "SELECT UNNEST(c) FROM t", - "presto": "SELECT IF(_u.pos = _u_2.pos_2, _u_2.col) AS col FROM t, UNNEST(SEQUENCE(1, GREATEST(CARDINALITY(c)))) AS _u(pos) CROSS JOIN UNNEST(c) WITH ORDINALITY AS _u_2(col, pos_2) WHERE _u.pos = _u_2.pos_2 OR (_u.pos > CARDINALITY(c) AND _u_2.pos_2 = CARDINALITY(c))", + "presto": "SELECT IF(_u.pos = _u_2.pos_2, _u_2.col) AS col FROM t CROSS JOIN UNNEST(SEQUENCE(1, GREATEST(CARDINALITY(c)))) AS _u(pos) CROSS JOIN UNNEST(c) WITH ORDINALITY AS _u_2(col, pos_2) WHERE _u.pos = _u_2.pos_2 OR (_u.pos > CARDINALITY(c) AND _u_2.pos_2 = CARDINALITY(c))", }, ) self.validate_all( @@ -262,6 +268,7 @@ class TestPostgres(Validator): self.validate_identity( "SELECT * FROM JSON_ARRAY_ELEMENTS('[1,true, [2,false]]') WITH ORDINALITY AS kv_json(a, b)" ) + self.validate_identity("SELECT * FROM t TABLESAMPLE SYSTEM (50) REPEATABLE (55)") self.validate_identity("x @@ y") self.validate_identity("CAST(x AS MONEY)") self.validate_identity("CAST(x AS INT4RANGE)") @@ -277,7 +284,7 @@ class TestPostgres(Validator): self.validate_identity("CAST(x AS DATERANGE)") self.validate_identity("CAST(x AS DATEMULTIRANGE)") self.validate_identity( - """LAST_VALUE("col1") OVER (ORDER BY "col2" RANGE BETWEEN INTERVAL '1 day' PRECEDING AND '1 month' FOLLOWING)""" + """LAST_VALUE("col1") OVER (ORDER BY "col2" RANGE BETWEEN INTERVAL '1 DAY' PRECEDING AND '1 month' FOLLOWING)""" ) self.validate_identity("SELECT ARRAY[1, 2, 3] @> ARRAY[1, 2]") self.validate_identity("SELECT ARRAY[1, 2, 3] <@ ARRAY[1, 2]") @@ -326,6 +333,13 @@ class TestPostgres(Validator): ) self.validate_all( + "SELECT * FROM t TABLESAMPLE SYSTEM (50)", + write={ + "postgres": "SELECT * FROM t TABLESAMPLE SYSTEM (50)", + "redshift": UnsupportedError, + }, + ) + self.validate_all( "SELECT PERCENTILE_CONT(0.5) WITHIN GROUP (ORDER BY amount)", write={ "databricks": "SELECT PERCENTILE_APPROX(amount, 0.5)", @@ -387,17 +401,17 @@ class TestPostgres(Validator): self.validate_all( "GENERATE_SERIES(a, b, ' 2 days ')", write={ - "postgres": "GENERATE_SERIES(a, b, INTERVAL '2 days')", - "presto": "SEQUENCE(a, b, INTERVAL '2' day)", - "trino": "SEQUENCE(a, b, INTERVAL '2' day)", + "postgres": "GENERATE_SERIES(a, b, INTERVAL '2 DAYS')", + "presto": "SEQUENCE(a, b, INTERVAL '2' DAY)", + "trino": "SEQUENCE(a, b, INTERVAL '2' DAY)", }, ) self.validate_all( "GENERATE_SERIES('2019-01-01'::TIMESTAMP, NOW(), '1day')", write={ - "postgres": "GENERATE_SERIES(CAST('2019-01-01' AS TIMESTAMP), CURRENT_TIMESTAMP, INTERVAL '1 day')", - "presto": "SEQUENCE(CAST('2019-01-01' AS TIMESTAMP), CAST(CURRENT_TIMESTAMP AS TIMESTAMP), INTERVAL '1' day)", - "trino": "SEQUENCE(CAST('2019-01-01' AS TIMESTAMP), CAST(CURRENT_TIMESTAMP AS TIMESTAMP), INTERVAL '1' day)", + "postgres": "GENERATE_SERIES(CAST('2019-01-01' AS TIMESTAMP), CURRENT_TIMESTAMP, INTERVAL '1 DAY')", + "presto": "SEQUENCE(CAST('2019-01-01' AS TIMESTAMP), CAST(CURRENT_TIMESTAMP AS TIMESTAMP), INTERVAL '1' DAY)", + "trino": "SEQUENCE(CAST('2019-01-01' AS TIMESTAMP), CAST(CURRENT_TIMESTAMP AS TIMESTAMP), INTERVAL '1' DAY)", }, ) self.validate_all( @@ -657,7 +671,7 @@ class TestPostgres(Validator): ) self.validate_all( "'45 days'::interval day", - write={"postgres": "CAST('45 days' AS INTERVAL day)"}, + write={"postgres": "CAST('45 days' AS INTERVAL DAY)"}, ) self.validate_all( "'x' 'y' 'z'", @@ -726,6 +740,7 @@ class TestPostgres(Validator): """merge into x as x using (select id) as y on a = b WHEN matched then update set X."A" = y.b""", write={ "postgres": """MERGE INTO x AS x USING (SELECT id) AS y ON a = b WHEN MATCHED THEN UPDATE SET "A" = y.b""", + "trino": """MERGE INTO x AS x USING (SELECT id) AS y ON a = b WHEN MATCHED THEN UPDATE SET "A" = y.b""", "snowflake": """MERGE INTO x AS x USING (SELECT id) AS y ON a = b WHEN MATCHED THEN UPDATE SET X."A" = y.b""", }, ) |