summaryrefslogtreecommitdiffstats
path: root/tests/dialects
diff options
context:
space:
mode:
Diffstat (limited to 'tests/dialects')
-rw-r--r--tests/dialects/test_bigquery.py19
-rw-r--r--tests/dialects/test_dialect.py36
-rw-r--r--tests/dialects/test_hive.py4
-rw-r--r--tests/dialects/test_postgres.py4
-rw-r--r--tests/dialects/test_redshift.py26
-rw-r--r--tests/dialects/test_snowflake.py9
6 files changed, 96 insertions, 2 deletions
diff --git a/tests/dialects/test_bigquery.py b/tests/dialects/test_bigquery.py
index cc44311..1d60ec6 100644
--- a/tests/dialects/test_bigquery.py
+++ b/tests/dialects/test_bigquery.py
@@ -7,6 +7,11 @@ class TestBigQuery(Validator):
def test_bigquery(self):
self.validate_all(
+ "REGEXP_CONTAINS('foo', '.*')",
+ read={"bigquery": "REGEXP_CONTAINS('foo', '.*')"},
+ write={"mysql": "REGEXP_LIKE('foo', '.*')"},
+ ),
+ self.validate_all(
'"""x"""',
write={
"bigquery": "'x'",
@@ -94,6 +99,20 @@ class TestBigQuery(Validator):
"spark": "SELECT * FROM UNNEST(ARRAY('7', '14')) AS (x)",
},
)
+ self.validate_all(
+ "SELECT ARRAY(SELECT x FROM UNNEST([0, 1]) AS x)",
+ write={"bigquery": "SELECT ARRAY(SELECT x FROM UNNEST([0, 1]) AS x)"},
+ )
+ self.validate_all(
+ "SELECT ARRAY(SELECT DISTINCT x FROM UNNEST(some_numbers) AS x) AS unique_numbers",
+ write={
+ "bigquery": "SELECT ARRAY(SELECT DISTINCT x FROM UNNEST(some_numbers) AS x) AS unique_numbers"
+ },
+ )
+ self.validate_all(
+ "SELECT ARRAY(SELECT * FROM foo JOIN bla ON x = y)",
+ write={"bigquery": "SELECT ARRAY(SELECT * FROM foo JOIN bla ON x = y)"},
+ )
self.validate_all(
"x IS unknown",
diff --git a/tests/dialects/test_dialect.py b/tests/dialects/test_dialect.py
index 6033570..ee67bf1 100644
--- a/tests/dialects/test_dialect.py
+++ b/tests/dialects/test_dialect.py
@@ -1318,3 +1318,39 @@ SELECT
"BEGIN IMMEDIATE TRANSACTION",
write={"sqlite": "BEGIN IMMEDIATE TRANSACTION"},
)
+
+ def test_merge(self):
+ self.validate_all(
+ """
+ MERGE INTO target USING source ON target.id = source.id
+ WHEN NOT MATCHED THEN INSERT (id) values (source.id)
+ """,
+ write={
+ "bigquery": "MERGE INTO target USING source ON target.id = source.id WHEN NOT MATCHED THEN INSERT (id) VALUES (source.id)",
+ "snowflake": "MERGE INTO target USING source ON target.id = source.id WHEN NOT MATCHED THEN INSERT (id) VALUES (source.id)",
+ "spark": "MERGE INTO target USING source ON target.id = source.id WHEN NOT MATCHED THEN INSERT (id) VALUES (source.id)",
+ },
+ )
+ self.validate_all(
+ """
+ MERGE INTO target USING source ON target.id = source.id
+ WHEN MATCHED AND source.is_deleted = 1 THEN DELETE
+ WHEN MATCHED THEN UPDATE SET val = source.val
+ WHEN NOT MATCHED THEN INSERT (id, val) VALUES (source.id, source.val)
+ """,
+ write={
+ "bigquery": "MERGE INTO target USING source ON target.id = source.id WHEN MATCHED AND source.is_deleted = 1 THEN DELETE WHEN MATCHED THEN UPDATE SET val = source.val WHEN NOT MATCHED THEN INSERT (id, val) VALUES (source.id, source.val)",
+ "snowflake": "MERGE INTO target USING source ON target.id = source.id WHEN MATCHED AND source.is_deleted = 1 THEN DELETE WHEN MATCHED THEN UPDATE SET val = source.val WHEN NOT MATCHED THEN INSERT (id, val) VALUES (source.id, source.val)",
+ "spark": "MERGE INTO target USING source ON target.id = source.id WHEN MATCHED AND source.is_deleted = 1 THEN DELETE WHEN MATCHED THEN UPDATE SET val = source.val WHEN NOT MATCHED THEN INSERT (id, val) VALUES (source.id, source.val)",
+ },
+ )
+ self.validate_all(
+ """
+ MERGE INTO target USING source ON target.id = source.id
+ WHEN MATCHED THEN UPDATE *
+ WHEN NOT MATCHED THEN INSERT *
+ """,
+ write={
+ "spark": "MERGE INTO target USING source ON target.id = source.id WHEN MATCHED THEN UPDATE * WHEN NOT MATCHED THEN INSERT *",
+ },
+ )
diff --git a/tests/dialects/test_hive.py b/tests/dialects/test_hive.py
index 22d7bce..5ac8714 100644
--- a/tests/dialects/test_hive.py
+++ b/tests/dialects/test_hive.py
@@ -145,6 +145,10 @@ class TestHive(Validator):
},
)
+ self.validate_identity(
+ """CREATE EXTERNAL TABLE x (y INT) ROW FORMAT SERDE 'serde' ROW FORMAT DELIMITED FIELDS TERMINATED BY '1' WITH SERDEPROPERTIES ('input.regex'='')""",
+ )
+
def test_lateral_view(self):
self.validate_all(
"SELECT a, b FROM x LATERAL VIEW EXPLODE(y) t AS a LATERAL VIEW EXPLODE(z) u AS b",
diff --git a/tests/dialects/test_postgres.py b/tests/dialects/test_postgres.py
index cd6117c..962b28b 100644
--- a/tests/dialects/test_postgres.py
+++ b/tests/dialects/test_postgres.py
@@ -256,3 +256,7 @@ class TestPostgres(Validator):
"SELECT $$Dianne's horse$$",
write={"postgres": "SELECT 'Dianne''s horse'"},
)
+ self.validate_all(
+ "UPDATE MYTABLE T1 SET T1.COL = 13",
+ write={"postgres": "UPDATE MYTABLE AS T1 SET T1.COL = 13"},
+ )
diff --git a/tests/dialects/test_redshift.py b/tests/dialects/test_redshift.py
index 1943ee3..3034df5 100644
--- a/tests/dialects/test_redshift.py
+++ b/tests/dialects/test_redshift.py
@@ -56,8 +56,27 @@ class TestRedshift(Validator):
"redshift": 'SELECT a, b FROM (SELECT a, b, ROW_NUMBER() OVER (PARTITION BY a ORDER BY c DESC) AS "_row_number" FROM x) WHERE "_row_number" = 1',
},
)
+ self.validate_all(
+ "DECODE(x, a, b, c, d)",
+ write={
+ "": "MATCHES(x, a, b, c, d)",
+ "oracle": "DECODE(x, a, b, c, d)",
+ "snowflake": "DECODE(x, a, b, c, d)",
+ },
+ )
+ self.validate_all(
+ "NVL(a, b, c, d)",
+ write={
+ "redshift": "COALESCE(a, b, c, d)",
+ "mysql": "COALESCE(a, b, c, d)",
+ "postgres": "COALESCE(a, b, c, d)",
+ },
+ )
def test_identity(self):
+ self.validate_identity(
+ "SELECT DECODE(COL1, 'replace_this', 'with_this', 'replace_that', 'with_that')"
+ )
self.validate_identity("CAST('bla' AS SUPER)")
self.validate_identity("CREATE TABLE real1 (realcol REAL)")
self.validate_identity("CAST('foo' AS HLLSKETCH)")
@@ -70,9 +89,9 @@ class TestRedshift(Validator):
self.validate_identity(
"SELECT COUNT(*) FROM event WHERE eventname LIKE '%Ring%' OR eventname LIKE '%Die%'"
)
- self.validate_identity("CREATE TABLE SOUP DISTKEY(soup1) SORTKEY(soup2) DISTSTYLE AUTO")
+ self.validate_identity("CREATE TABLE SOUP DISTKEY(soup1) SORTKEY(soup2) DISTSTYLE ALL")
self.validate_identity(
- "CREATE TABLE sales (salesid INTEGER NOT NULL) DISTKEY(listid) COMPOUND SORTKEY(listid, sellerid)"
+ "CREATE TABLE sales (salesid INTEGER NOT NULL) DISTKEY(listid) COMPOUND SORTKEY(listid, sellerid) DISTSTYLE AUTO"
)
self.validate_identity(
"COPY customer FROM 's3://mybucket/customer' IAM_ROLE 'arn:aws:iam::0123456789012:role/MyRedshiftRole'"
@@ -80,3 +99,6 @@ class TestRedshift(Validator):
self.validate_identity(
"UNLOAD ('select * from venue') TO 's3://mybucket/unload/' IAM_ROLE 'arn:aws:iam::0123456789012:role/MyRedshiftRole'"
)
+ self.validate_identity(
+ "CREATE TABLE SOUP (SOUP1 VARCHAR(50) NOT NULL ENCODE ZSTD, SOUP2 VARCHAR(70) NULL ENCODE DELTA)"
+ )
diff --git a/tests/dialects/test_snowflake.py b/tests/dialects/test_snowflake.py
index baca269..bca5aaa 100644
--- a/tests/dialects/test_snowflake.py
+++ b/tests/dialects/test_snowflake.py
@@ -500,3 +500,12 @@ FROM persons AS p, LATERAL FLATTEN(input => p.c, path => 'contact') f, LATERAL F
},
pretty=True,
)
+
+ def test_minus(self):
+ self.validate_all(
+ "SELECT 1 EXCEPT SELECT 1",
+ read={
+ "oracle": "SELECT 1 MINUS SELECT 1",
+ "snowflake": "SELECT 1 MINUS SELECT 1",
+ },
+ )