diff options
author | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-04-08 08:11:50 +0000 |
---|---|---|
committer | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-04-08 08:11:50 +0000 |
commit | 8978da3b39d7ca3cf83ee30fcc63ffe0e5453fb2 (patch) | |
tree | 2e29f131dff77b31e84c957266de8f18655b6f88 /tests/dialects/test_snowflake.py | |
parent | Adding upstream version 22.2.0. (diff) | |
download | sqlglot-8978da3b39d7ca3cf83ee30fcc63ffe0e5453fb2.tar.xz sqlglot-8978da3b39d7ca3cf83ee30fcc63ffe0e5453fb2.zip |
Adding upstream version 23.7.0.upstream/23.7.0
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'tests/dialects/test_snowflake.py')
-rw-r--r-- | tests/dialects/test_snowflake.py | 175 |
1 files changed, 159 insertions, 16 deletions
diff --git a/tests/dialects/test_snowflake.py b/tests/dialects/test_snowflake.py index e48f811..a41d35a 100644 --- a/tests/dialects/test_snowflake.py +++ b/tests/dialects/test_snowflake.py @@ -40,6 +40,19 @@ WHERE )""", ) + self.validate_identity("SELECT TIMEADD(HOUR, 2, CAST('09:05:03' AS TIME))") + self.validate_identity("SELECT CAST(OBJECT_CONSTRUCT('a', 1) AS MAP(VARCHAR, INT))") + self.validate_identity("SELECT CAST(OBJECT_CONSTRUCT('a', 1) AS OBJECT(a CHAR NOT NULL))") + self.validate_identity("SELECT CAST([1, 2, 3] AS ARRAY(INT))") + self.validate_identity("SELECT CAST(obj AS OBJECT(x CHAR) RENAME FIELDS)") + self.validate_identity("SELECT CAST(obj AS OBJECT(x CHAR, y VARCHAR) ADD FIELDS)") + self.validate_identity("SELECT TO_TIMESTAMP(123.4)").selects[0].assert_is(exp.Anonymous) + self.validate_identity("SELECT TO_TIME(x) FROM t") + self.validate_identity("SELECT TO_TIMESTAMP(x) FROM t") + self.validate_identity("SELECT TO_TIMESTAMP_NTZ(x) FROM t") + self.validate_identity("SELECT TO_TIMESTAMP_LTZ(x) FROM t") + self.validate_identity("SELECT TO_TIMESTAMP_TZ(x) FROM t") + self.validate_identity("TO_DECIMAL(expr, fmt, precision, scale)") self.validate_identity("ALTER TABLE authors ADD CONSTRAINT c1 UNIQUE (id, email)") self.validate_identity("RM @parquet_stage", check_command_warning=True) self.validate_identity("REMOVE @parquet_stage", check_command_warning=True) @@ -59,7 +72,6 @@ WHERE self.validate_identity("INITCAP('iqamqinterestedqinqthisqtopic', 'q')") self.validate_identity("CAST(x AS GEOMETRY)") self.validate_identity("OBJECT_CONSTRUCT(*)") - self.validate_identity("SELECT TO_DATE('2019-02-28') + INTERVAL '1 day, 1 year'") self.validate_identity("SELECT CAST('2021-01-01' AS DATE) + INTERVAL '1 DAY'") self.validate_identity("SELECT HLL(*)") self.validate_identity("SELECT HLL(a)") @@ -77,18 +89,29 @@ WHERE self.validate_identity("ALTER TABLE foo UNSET DATA_RETENTION_TIME_IN_DAYS, CHANGE_TRACKING") self.validate_identity("COMMENT IF EXISTS ON TABLE foo IS 'bar'") self.validate_identity("SELECT CONVERT_TIMEZONE('UTC', 'America/Los_Angeles', col)") - self.validate_identity("REGEXP_REPLACE('target', 'pattern', '\n')") self.validate_identity("ALTER TABLE a SWAP WITH b") + self.validate_identity("SELECT MATCH_CONDITION") self.validate_identity( 'DESCRIBE TABLE "SNOWFLAKE_SAMPLE_DATA"."TPCDS_SF100TCL"."WEB_SITE" type=stage' ) self.validate_identity( "SELECT a FROM test PIVOT(SUM(x) FOR y IN ('z', 'q')) AS x TABLESAMPLE (0.1)" ) + self.validate_identity( + "SELECT * FROM DATA AS DATA_L ASOF JOIN DATA AS DATA_R MATCH_CONDITION (DATA_L.VAL > DATA_R.VAL) ON DATA_L.ID = DATA_R.ID" + ) + self.validate_identity( + "REGEXP_REPLACE('target', 'pattern', '\n')", + "REGEXP_REPLACE('target', 'pattern', '\\n')", + ) + self.validate_identity( + "SELECT a:from::STRING, a:from || ' test' ", + "SELECT CAST(GET_PATH(a, 'from') AS TEXT), GET_PATH(a, 'from') || ' test'", + ) self.validate_identity("x:from", "GET_PATH(x, 'from')") self.validate_identity( - "value:values::string", - "CAST(GET_PATH(value, 'values') AS TEXT)", + "value:values::string::int", + "CAST(CAST(GET_PATH(value, 'values') AS TEXT) AS INT)", ) self.validate_identity( """SELECT GET_PATH(PARSE_JSON('{"y": [{"z": 1}]}'), 'y[0]:z')""", @@ -132,7 +155,11 @@ WHERE ) self.validate_identity( "v:attr[0]:name", - "GET_PATH(GET_PATH(v, 'attr[0]'), 'name')", + "GET_PATH(v, 'attr[0].name')", + ) + self.validate_identity( + "a.x:from.b:c.d::int", + "CAST(GET_PATH(a.x, 'from.b.c.d') AS INT)", ) self.validate_identity( """SELECT PARSE_JSON('{"food":{"fruit":"banana"}}'):food.fruit::VARCHAR""", @@ -190,10 +217,6 @@ WHERE "SELECT CEIL(5.3)", ) self.validate_identity( - "SELECT TO_TIMESTAMP(x) FROM t", - "SELECT CAST(x AS TIMESTAMPNTZ) FROM t", - ) - self.validate_identity( "CAST(x AS BYTEINT)", "CAST(x AS INT)", ) @@ -380,6 +403,7 @@ WHERE write={ "duckdb": "{'a': b, 'c': d}", "snowflake": "OBJECT_CONSTRUCT('a', b, 'c', d)", + "": "STRUCT(b AS a, d AS c)", }, ) self.validate_identity("OBJECT_CONSTRUCT(a, b, c, d)") @@ -419,6 +443,46 @@ WHERE "sqlite": "SELECT MIN(c1), MIN(c2) FROM test", }, ) + for suffix in ( + "", + " OVER ()", + ): + self.validate_all( + f"SELECT PERCENTILE_CONT(0.5) WITHIN GROUP (ORDER BY x){suffix}", + read={ + "snowflake": f"SELECT MEDIAN(x){suffix}", + "postgres": f"SELECT PERCENTILE_CONT(0.5) WITHIN GROUP (ORDER BY x){suffix}", + }, + write={ + "": f"SELECT PERCENTILE_CONT(0.5) WITHIN GROUP (ORDER BY x NULLS LAST){suffix}", + "duckdb": f"SELECT PERCENTILE_CONT(0.5) WITHIN GROUP (ORDER BY x){suffix}", + "postgres": f"SELECT PERCENTILE_CONT(0.5) WITHIN GROUP (ORDER BY x){suffix}", + "snowflake": f"SELECT PERCENTILE_CONT(0.5) WITHIN GROUP (ORDER BY x){suffix}", + }, + ) + self.validate_all( + f"SELECT MEDIAN(x){suffix}", + write={ + "": f"SELECT PERCENTILE_CONT(x, 0.5){suffix}", + "duckdb": f"SELECT QUANTILE_CONT(x, 0.5){suffix}", + "postgres": f"SELECT PERCENTILE_CONT(0.5) WITHIN GROUP (ORDER BY x){suffix}", + "snowflake": f"SELECT PERCENTILE_CONT(0.5) WITHIN GROUP (ORDER BY x){suffix}", + }, + ) + for func in ( + "CORR", + "COVAR_POP", + "COVAR_SAMP", + ): + self.validate_all( + f"SELECT {func}(y, x){suffix}", + write={ + "": f"SELECT {func}(y, x){suffix}", + "duckdb": f"SELECT {func}(y, x){suffix}", + "postgres": f"SELECT {func}(y, x){suffix}", + "snowflake": f"SELECT {func}(y, x){suffix}", + }, + ) self.validate_all( "TO_CHAR(x, y)", read={ @@ -560,9 +624,9 @@ WHERE self.validate_all( '''SELECT PARSE_JSON('{"a": {"b c": "foo"}}'):a:"b c"''', write={ - "duckdb": """SELECT JSON('{"a": {"b c": "foo"}}') -> '$.a' -> '$."b c"'""", - "mysql": """SELECT JSON_EXTRACT(JSON_EXTRACT('{"a": {"b c": "foo"}}', '$.a'), '$."b c"')""", - "snowflake": """SELECT GET_PATH(GET_PATH(PARSE_JSON('{"a": {"b c": "foo"}}'), 'a'), '["b c"]')""", + "duckdb": """SELECT JSON('{"a": {"b c": "foo"}}') -> '$.a."b c"'""", + "mysql": """SELECT JSON_EXTRACT('{"a": {"b c": "foo"}}', '$.a."b c"')""", + "snowflake": """SELECT GET_PATH(PARSE_JSON('{"a": {"b c": "foo"}}'), 'a["b c"]')""", }, ) self.validate_all( @@ -623,9 +687,16 @@ WHERE self.validate_all( "SELECT TO_TIMESTAMP('2013-04-05 01:02:03')", write={ - "bigquery": "SELECT PARSE_TIMESTAMP('%Y-%m-%d %H:%M:%S', '2013-04-05 01:02:03')", - "snowflake": "SELECT TO_TIMESTAMP('2013-04-05 01:02:03', 'yyyy-mm-DD hh24:mi:ss')", - "spark": "SELECT TO_TIMESTAMP('2013-04-05 01:02:03', 'yyyy-MM-dd HH:mm:ss')", + "bigquery": "SELECT CAST('2013-04-05 01:02:03' AS DATETIME)", + "snowflake": "SELECT CAST('2013-04-05 01:02:03' AS TIMESTAMPNTZ)", + "spark": "SELECT CAST('2013-04-05 01:02:03' AS TIMESTAMP)", + }, + ) + self.validate_all( + "SELECT TO_TIME('12:05:00')", + write={ + "bigquery": "SELECT CAST('12:05:00' AS TIME)", + "snowflake": "SELECT CAST('12:05:00' AS TIME)", }, ) self.validate_all( @@ -667,9 +738,13 @@ WHERE ) self.validate_all( "ARRAY_TO_STRING(x, '')", + read={ + "duckdb": "ARRAY_TO_STRING(x, '')", + }, write={ "spark": "ARRAY_JOIN(x, '')", "snowflake": "ARRAY_TO_STRING(x, '')", + "duckdb": "ARRAY_TO_STRING(x, '')", }, ) self.validate_all( @@ -930,6 +1005,9 @@ WHERE ) self.validate_all( "DATEADD(DAY, 5, CAST('2008-12-25' AS DATE))", + read={ + "snowflake": "TIMESTAMPADD(DAY, 5, CAST('2008-12-25' AS DATE))", + }, write={ "bigquery": "DATE_ADD(CAST('2008-12-25' AS DATE), INTERVAL 5 DAY)", "snowflake": "DATEADD(DAY, 5, CAST('2008-12-25' AS DATE))", @@ -952,6 +1030,46 @@ WHERE self.validate_identity("DATE_PART(yyy, x)", "DATE_PART(YEAR, x)") self.validate_identity("DATE_TRUNC(yr, x)", "DATE_TRUNC('YEAR', x)") + self.validate_identity("TO_DATE('12345')").assert_is(exp.Anonymous) + + self.validate_identity( + "SELECT TO_DATE('2019-02-28') + INTERVAL '1 day, 1 year'", + "SELECT CAST('2019-02-28' AS DATE) + INTERVAL '1 day, 1 year'", + ) + + self.validate_identity("DATE(x)").assert_is(exp.Anonymous) + self.validate_identity("TO_DATE(x)").assert_is(exp.Anonymous) + self.validate_identity("TRY_TO_DATE(x)").assert_is(exp.Anonymous) + + self.validate_all( + "TO_DATE(x, 'MM-DD-YYYY')", + write={ + "snowflake": "TO_DATE(x, 'mm-DD-yyyy')", + "duckdb": "CAST(STRPTIME(x, '%m-%d-%Y') AS DATE)", + }, + ) + self.validate_all( + "DATE('01-01-2000', 'MM-DD-YYYY')", + write={ + "snowflake": "TO_DATE('01-01-2000', 'mm-DD-yyyy')", + "duckdb": "CAST(STRPTIME('01-01-2000', '%m-%d-%Y') AS DATE)", + }, + ) + self.validate_all( + "TO_DATE('01-01-2000', 'MM-DD-YYYY')", + write={ + "snowflake": "TO_DATE('01-01-2000', 'mm-DD-yyyy')", + "duckdb": "CAST(STRPTIME('01-01-2000', '%m-%d-%Y') AS DATE)", + }, + ) + self.validate_all( + "TRY_TO_DATE('01-01-2000', 'MM-DD-YYYY')", + write={ + "snowflake": "TRY_TO_DATE('01-01-2000', 'mm-DD-yyyy')", + "duckdb": "CAST(STRPTIME('01-01-2000', '%m-%d-%Y') AS DATE)", + }, + ) + def test_semi_structured_types(self): self.validate_identity("SELECT CAST(a AS VARIANT)") self.validate_identity("SELECT CAST(a AS ARRAY)") @@ -1047,6 +1165,9 @@ WHERE self.validate_identity("CREATE TABLE IDENTIFIER('foo') (COLUMN1 VARCHAR, COLUMN2 VARCHAR)") self.validate_identity("CREATE TABLE IDENTIFIER($foo) (col1 VARCHAR, col2 VARCHAR)") self.validate_identity( + "DROP function my_udf (OBJECT(city VARCHAR, zipcode DECIMAL, val ARRAY(BOOLEAN)))" + ) + self.validate_identity( "CREATE TABLE orders_clone_restore CLONE orders AT (TIMESTAMP => TO_TIMESTAMP_TZ('04/05/2013 01:02:03', 'mm/dd/yyyy hh24:mi:ss'))" ) self.validate_identity( @@ -1061,6 +1182,17 @@ WHERE self.validate_identity( "CREATE OR REPLACE TABLE EXAMPLE_DB.DEMO.USERS (ID DECIMAL(38, 0) NOT NULL, PRIMARY KEY (ID), FOREIGN KEY (CITY_CODE) REFERENCES EXAMPLE_DB.DEMO.CITIES (CITY_CODE))" ) + self.validate_identity( + "CREATE ICEBERG TABLE my_iceberg_table (amount ARRAY(INT)) CATALOG='SNOWFLAKE' EXTERNAL_VOLUME='my_external_volume' BASE_LOCATION='my/relative/path/from/extvol'" + ) + self.validate_identity( + "CREATE OR REPLACE FUNCTION my_udf(location OBJECT(city VARCHAR, zipcode DECIMAL, val ARRAY(BOOLEAN))) RETURNS VARCHAR AS $$ SELECT 'foo' $$", + "CREATE OR REPLACE FUNCTION my_udf(location OBJECT(city VARCHAR, zipcode DECIMAL, val ARRAY(BOOLEAN))) RETURNS VARCHAR AS ' SELECT \\'foo\\' '", + ) + self.validate_identity( + "CREATE OR REPLACE FUNCTION my_udtf(foo BOOLEAN) RETURNS TABLE(col1 ARRAY(INT)) AS $$ WITH t AS (SELECT CAST([1, 2, 3] AS ARRAY(INT)) AS c) SELECT c FROM t $$", + "CREATE OR REPLACE FUNCTION my_udtf(foo BOOLEAN) RETURNS TABLE (col1 ARRAY(INT)) AS ' WITH t AS (SELECT CAST([1, 2, 3] AS ARRAY(INT)) AS c) SELECT c FROM t '", + ) self.validate_all( "CREATE TABLE orders_clone CLONE orders", @@ -1292,7 +1424,6 @@ FROM persons AS p, LATERAL FLATTEN(input => p.c, path => 'contact') AS _flattene "spark": "SELECT `c0`, `c1` FROM (VALUES (1, 2), (3, 4)) AS `t0`(`c0`, `c1`)", }, ) - self.validate_all( """SELECT $1 AS "_1" FROM VALUES ('a'), ('b')""", write={ @@ -1300,6 +1431,18 @@ FROM persons AS p, LATERAL FLATTEN(input => p.c, path => 'contact') AS _flattene "spark": """SELECT ${1} AS `_1` FROM VALUES ('a'), ('b')""", }, ) + self.validate_all( + "SELECT * FROM (SELECT OBJECT_CONSTRUCT('a', 1) AS x) AS t", + read={ + "duckdb": "SELECT * FROM (VALUES ({'a': 1})) AS t(x)", + }, + ) + self.validate_all( + "SELECT * FROM (SELECT OBJECT_CONSTRUCT('a', 1) AS x UNION ALL SELECT OBJECT_CONSTRUCT('a', 2)) AS t", + read={ + "duckdb": "SELECT * FROM (VALUES ({'a': 1}), ({'a': 2})) AS t(x)", + }, + ) def test_describe_table(self): self.validate_all( |