summaryrefslogtreecommitdiffstats
path: root/tests/dialects/test_snowflake.py
diff options
context:
space:
mode:
Diffstat (limited to 'tests/dialects/test_snowflake.py')
-rw-r--r--tests/dialects/test_snowflake.py88
1 files changed, 68 insertions, 20 deletions
diff --git a/tests/dialects/test_snowflake.py b/tests/dialects/test_snowflake.py
index f8c2ea1..ba85719 100644
--- a/tests/dialects/test_snowflake.py
+++ b/tests/dialects/test_snowflake.py
@@ -2,6 +2,7 @@ from unittest import mock
from sqlglot import UnsupportedError, exp, parse_one
from sqlglot.optimizer.normalize_identifiers import normalize_identifiers
+from sqlglot.optimizer.qualify_columns import quote_identifiers
from tests.dialects.test_dialect import Validator
@@ -48,6 +49,8 @@ WHERE
)""",
)
+ self.validate_identity("SELECT number").selects[0].assert_is(exp.Column)
+ self.validate_identity("INTERVAL '4 years, 5 months, 3 hours'")
self.validate_identity("ALTER TABLE table1 CLUSTER BY (name DESC)")
self.validate_identity("SELECT rename, replace")
self.validate_identity("SELECT TIMEADD(HOUR, 2, CAST('09:05:03' AS TIME))")
@@ -111,6 +114,10 @@ WHERE
"SELECT * FROM DATA AS DATA_L ASOF JOIN DATA AS DATA_R MATCH_CONDITION (DATA_L.VAL > DATA_R.VAL) ON DATA_L.ID = DATA_R.ID"
)
self.validate_identity(
+ "CURRENT_TIMESTAMP - INTERVAL '1 w' AND (1 = 1)",
+ "CURRENT_TIMESTAMP() - INTERVAL '1 WEEK' AND (1 = 1)",
+ )
+ self.validate_identity(
"REGEXP_REPLACE('target', 'pattern', '\n')",
"REGEXP_REPLACE('target', 'pattern', '\\n')",
)
@@ -1186,6 +1193,25 @@ WHERE
)
def test_ddl(self):
+ for constraint_prefix in ("WITH ", ""):
+ with self.subTest(f"Constraint prefix: {constraint_prefix}"):
+ self.validate_identity(
+ f"CREATE TABLE t (id INT {constraint_prefix}MASKING POLICY p)",
+ "CREATE TABLE t (id INT MASKING POLICY p)",
+ )
+ self.validate_identity(
+ f"CREATE TABLE t (id INT {constraint_prefix}MASKING POLICY p USING (c1, c2, c3))",
+ "CREATE TABLE t (id INT MASKING POLICY p USING (c1, c2, c3))",
+ )
+ self.validate_identity(
+ f"CREATE TABLE t (id INT {constraint_prefix}PROJECTION POLICY p)",
+ "CREATE TABLE t (id INT PROJECTION POLICY p)",
+ )
+ self.validate_identity(
+ f"CREATE TABLE t (id INT {constraint_prefix}TAG (key1='value_1', key2='value_2'))",
+ "CREATE TABLE t (id INT TAG (key1='value_1', key2='value_2'))",
+ )
+
self.validate_identity(
"""create external table et2(
col1 date as (parse_json(metadata$external_table_partition):COL1::date),
@@ -1211,6 +1237,9 @@ WHERE
"CREATE OR REPLACE TAG IF NOT EXISTS cost_center COMMENT='cost_center tag'"
).this.assert_is(exp.Identifier)
self.validate_identity(
+ "ALTER TABLE db_name.schmaName.tblName ADD COLUMN COLUMN_1 VARCHAR NOT NULL TAG (key1='value_1')"
+ )
+ self.validate_identity(
"DROP FUNCTION my_udf (OBJECT(city VARCHAR, zipcode DECIMAL(38, 0), val ARRAY(BOOLEAN)))"
)
self.validate_identity(
@@ -1283,6 +1312,20 @@ WHERE
write={"snowflake": "CREATE TABLE a (b INT)"},
)
+ for action in ("SET", "DROP"):
+ with self.subTest(f"ALTER COLUMN {action} NOT NULL"):
+ self.validate_all(
+ f"""
+ ALTER TABLE a
+ ALTER COLUMN my_column {action} NOT NULL;
+ """,
+ write={
+ "snowflake": f"ALTER TABLE a ALTER COLUMN my_column {action} NOT NULL",
+ "duckdb": f"ALTER TABLE a ALTER COLUMN my_column {action} NOT NULL",
+ "postgres": f"ALTER TABLE a ALTER COLUMN my_column {action} NOT NULL",
+ },
+ )
+
def test_user_defined_functions(self):
self.validate_all(
"CREATE FUNCTION a(x DATE, y BIGINT) RETURNS ARRAY LANGUAGE JAVASCRIPT AS $$ SELECT 1 $$",
@@ -1880,22 +1923,21 @@ STORAGE_ALLOWED_LOCATIONS=('s3://mybucket1/path1/', 's3://mybucket2/path2/')""",
def test_copy(self):
self.validate_identity("COPY INTO test (c1) FROM (SELECT $1.c1 FROM @mystage)")
self.validate_identity(
- """COPY INTO temp FROM @random_stage/path/ FILE_FORMAT = (TYPE = CSV FIELD_DELIMITER = '|' NULL_IF = () FIELD_OPTIONALLY_ENCLOSED_BY = '"' TIMESTAMP_FORMAT = 'TZHTZM YYYY-MM-DD HH24:MI:SS.FF9' DATE_FORMAT = 'TZHTZM YYYY-MM-DD HH24:MI:SS.FF9' BINARY_FORMAT = BASE64) VALIDATION_MODE = 'RETURN_3_ROWS'"""
+ """COPY INTO temp FROM @random_stage/path/ FILE_FORMAT = (TYPE=CSV FIELD_DELIMITER='|' NULL_IF=('str1', 'str2') FIELD_OPTIONALLY_ENCLOSED_BY='"' TIMESTAMP_FORMAT='TZHTZM YYYY-MM-DD HH24:MI:SS.FF9' DATE_FORMAT='TZHTZM YYYY-MM-DD HH24:MI:SS.FF9' BINARY_FORMAT=BASE64) VALIDATION_MODE = 'RETURN_3_ROWS'"""
)
self.validate_identity(
- """COPY INTO load1 FROM @%load1/data1/ FILES = ('test1.csv', 'test2.csv') FORCE = TRUE"""
+ """COPY INTO load1 FROM @%load1/data1/ CREDENTIALS = (AWS_KEY_ID='id' AWS_SECRET_KEY='key' AWS_TOKEN='token') FILES = ('test1.csv', 'test2.csv') FORCE = TRUE"""
)
self.validate_identity(
- """COPY INTO mytable FROM 'azure://myaccount.blob.core.windows.net/mycontainer/data/files' CREDENTIALS = (AZURE_SAS_TOKEN = 'token') ENCRYPTION = (TYPE = 'AZURE_CSE' MASTER_KEY = 'kPx...') FILE_FORMAT = (FORMAT_NAME = my_csv_format)"""
+ """COPY INTO mytable FROM 'azure://myaccount.blob.core.windows.net/mycontainer/data/files' CREDENTIALS = (AZURE_SAS_TOKEN='token') ENCRYPTION = (TYPE='AZURE_CSE' MASTER_KEY='kPx...') FILE_FORMAT = (FORMAT_NAME=my_csv_format)"""
)
self.validate_identity(
- """COPY INTO mytable (col1, col2) FROM 's3://mybucket/data/files' FILES = ('file1', 'file2') PATTERN = 'pattern' FILE_FORMAT = (FORMAT_NAME = my_csv_format NULL_IF = ('str1', 'str2')) PARSE_HEADER = TRUE"""
+ """COPY INTO mytable (col1, col2) FROM 's3://mybucket/data/files' STORAGE_INTEGRATION = "storage" ENCRYPTION = (TYPE='NONE' MASTER_KEY='key') FILES = ('file1', 'file2') PATTERN = 'pattern' FILE_FORMAT = (FORMAT_NAME=my_csv_format NULL_IF=('')) PARSE_HEADER = TRUE"""
)
self.validate_all(
"""COPY INTO 's3://example/data.csv'
FROM EXTRA.EXAMPLE.TABLE
- credentials = (x)
- STORAGE_INTEGRATION = S3_INTEGRATION
+ CREDENTIALS = ()
FILE_FORMAT = (TYPE = CSV COMPRESSION = NONE NULL_IF = ('') FIELD_OPTIONALLY_ENCLOSED_BY = '"')
HEADER = TRUE
OVERWRITE = TRUE
@@ -1904,22 +1946,20 @@ STORAGE_ALLOWED_LOCATIONS=('s3://mybucket1/path1/', 's3://mybucket2/path2/')""",
write={
"": """COPY INTO 's3://example/data.csv'
FROM EXTRA.EXAMPLE.TABLE
-CREDENTIALS = (x) WITH (
- STORAGE_INTEGRATION S3_INTEGRATION,
- FILE_FORMAT = (TYPE = CSV COMPRESSION = NONE NULL_IF = (
+CREDENTIALS = () WITH (
+ FILE_FORMAT = (TYPE=CSV COMPRESSION=NONE NULL_IF=(
''
- ) FIELD_OPTIONALLY_ENCLOSED_BY = '"'),
+ ) FIELD_OPTIONALLY_ENCLOSED_BY='"'),
HEADER TRUE,
OVERWRITE TRUE,
SINGLE TRUE
)""",
"snowflake": """COPY INTO 's3://example/data.csv'
FROM EXTRA.EXAMPLE.TABLE
-CREDENTIALS = (x)
-STORAGE_INTEGRATION = S3_INTEGRATION
-FILE_FORMAT = (TYPE = CSV COMPRESSION = NONE NULL_IF = (
+CREDENTIALS = ()
+FILE_FORMAT = (TYPE=CSV COMPRESSION=NONE NULL_IF=(
''
-) FIELD_OPTIONALLY_ENCLOSED_BY = '"')
+) FIELD_OPTIONALLY_ENCLOSED_BY='"')
HEADER = TRUE
OVERWRITE = TRUE
SINGLE = TRUE""",
@@ -1929,19 +1969,27 @@ SINGLE = TRUE""",
self.validate_all(
"""COPY INTO 's3://example/data.csv'
FROM EXTRA.EXAMPLE.TABLE
- credentials = (x)
STORAGE_INTEGRATION = S3_INTEGRATION
- FILE_FORMAT = (TYPE = CSV COMPRESSION = NONE NULL_IF = ('') FIELD_OPTIONALLY_ENCLOSED_BY = '"')
+ FILE_FORMAT = (TYPE=CSV COMPRESSION=NONE NULL_IF=('') FIELD_OPTIONALLY_ENCLOSED_BY='"')
HEADER = TRUE
OVERWRITE = TRUE
SINGLE = TRUE
""",
write={
- "": """COPY INTO 's3://example/data.csv' FROM EXTRA.EXAMPLE.TABLE CREDENTIALS = (x) WITH (STORAGE_INTEGRATION S3_INTEGRATION, FILE_FORMAT = (TYPE = CSV COMPRESSION = NONE NULL_IF = ('') FIELD_OPTIONALLY_ENCLOSED_BY = '"'), HEADER TRUE, OVERWRITE TRUE, SINGLE TRUE)""",
- "snowflake": """COPY INTO 's3://example/data.csv' FROM EXTRA.EXAMPLE.TABLE CREDENTIALS = (x) STORAGE_INTEGRATION = S3_INTEGRATION FILE_FORMAT = (TYPE = CSV COMPRESSION = NONE NULL_IF = ('') FIELD_OPTIONALLY_ENCLOSED_BY = '"') HEADER = TRUE OVERWRITE = TRUE SINGLE = TRUE""",
+ "": """COPY INTO 's3://example/data.csv' FROM EXTRA.EXAMPLE.TABLE STORAGE_INTEGRATION = S3_INTEGRATION WITH (FILE_FORMAT = (TYPE=CSV COMPRESSION=NONE NULL_IF=('') FIELD_OPTIONALLY_ENCLOSED_BY='"'), HEADER TRUE, OVERWRITE TRUE, SINGLE TRUE)""",
+ "snowflake": """COPY INTO 's3://example/data.csv' FROM EXTRA.EXAMPLE.TABLE STORAGE_INTEGRATION = S3_INTEGRATION FILE_FORMAT = (TYPE=CSV COMPRESSION=NONE NULL_IF=('') FIELD_OPTIONALLY_ENCLOSED_BY='"') HEADER = TRUE OVERWRITE = TRUE SINGLE = TRUE""",
},
)
+ copy_ast = parse_one(
+ """COPY INTO 's3://example/contacts.csv' FROM db.tbl STORAGE_INTEGRATION = PROD_S3_SIDETRADE_INTEGRATION FILE_FORMAT = (FORMAT_NAME=my_csv_format TYPE=CSV COMPRESSION=NONE NULL_IF=('') FIELD_OPTIONALLY_ENCLOSED_BY='"') MATCH_BY_COLUMN_NAME = CASE_SENSITIVE OVERWRITE = TRUE SINGLE = TRUE INCLUDE_METADATA = (col1 = METADATA$START_SCAN_TIME)""",
+ read="snowflake",
+ )
+ self.assertEqual(
+ quote_identifiers(copy_ast, dialect="snowflake").sql(dialect="snowflake"),
+ """COPY INTO 's3://example/contacts.csv' FROM "db"."tbl" STORAGE_INTEGRATION = "PROD_S3_SIDETRADE_INTEGRATION" FILE_FORMAT = (FORMAT_NAME="my_csv_format" TYPE=CSV COMPRESSION=NONE NULL_IF=('') FIELD_OPTIONALLY_ENCLOSED_BY='"') MATCH_BY_COLUMN_NAME = CASE_SENSITIVE OVERWRITE = TRUE SINGLE = TRUE INCLUDE_METADATA = ("col1" = "METADATA$START_SCAN_TIME")""",
+ )
+
def test_querying_semi_structured_data(self):
self.validate_identity("SELECT $1")
self.validate_identity("SELECT $1.elem")
@@ -1958,10 +2006,10 @@ SINGLE = TRUE""",
self.validate_identity("ALTER TABLE table1 SET TAG foo.bar = 'baz'")
self.validate_identity("ALTER TABLE IF EXISTS foo SET TAG a = 'a', b = 'b', c = 'c'")
self.validate_identity(
- """ALTER TABLE tbl SET STAGE_FILE_FORMAT = (TYPE = CSV FIELD_DELIMITER = '|' NULL_IF = () FIELD_OPTIONALLY_ENCLOSED_BY = '"' TIMESTAMP_FORMAT = 'TZHTZM YYYY-MM-DD HH24:MI:SS.FF9' DATE_FORMAT = 'TZHTZM YYYY-MM-DD HH24:MI:SS.FF9' BINARY_FORMAT = BASE64)""",
+ """ALTER TABLE tbl SET STAGE_FILE_FORMAT = (TYPE=CSV FIELD_DELIMITER='|' NULL_IF=('') FIELD_OPTIONALLY_ENCLOSED_BY='"' TIMESTAMP_FORMAT='TZHTZM YYYY-MM-DD HH24:MI:SS.FF9' DATE_FORMAT='TZHTZM YYYY-MM-DD HH24:MI:SS.FF9' BINARY_FORMAT=BASE64)""",
)
self.validate_identity(
- """ALTER TABLE tbl SET STAGE_COPY_OPTIONS = (ON_ERROR = SKIP_FILE SIZE_LIMIT = 5 PURGE = TRUE MATCH_BY_COLUMN_NAME = CASE_SENSITIVE)"""
+ """ALTER TABLE tbl SET STAGE_COPY_OPTIONS = (ON_ERROR=SKIP_FILE SIZE_LIMIT=5 PURGE=TRUE MATCH_BY_COLUMN_NAME=CASE_SENSITIVE)"""
)
self.validate_identity("ALTER TABLE foo UNSET TAG a, b, c")