diff options
author | Daniel Baumann <daniel.baumann@progress-linux.org> | 2023-07-10 05:36:25 +0000 |
---|---|---|
committer | Daniel Baumann <daniel.baumann@progress-linux.org> | 2023-07-10 05:36:25 +0000 |
commit | 3517257d82008871c863f71a402742556dacc5cf (patch) | |
tree | 65704dee85c7aa830d01426c9bc3080a80c4e863 /tests/dialects | |
parent | Adding upstream version 17.2.0. (diff) | |
download | sqlglot-upstream/17.3.0.tar.xz sqlglot-upstream/17.3.0.zip |
Adding upstream version 17.3.0.upstream/17.3.0
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to '')
-rw-r--r-- | tests/dialects/test_bigquery.py | 25 | ||||
-rw-r--r-- | tests/dialects/test_dialect.py | 2 | ||||
-rw-r--r-- | tests/dialects/test_duckdb.py | 2 | ||||
-rw-r--r-- | tests/dialects/test_hive.py | 8 | ||||
-rw-r--r-- | tests/dialects/test_postgres.py | 1 | ||||
-rw-r--r-- | tests/dialects/test_presto.py | 2 | ||||
-rw-r--r-- | tests/dialects/test_spark.py | 7 | ||||
-rw-r--r-- | tests/dialects/test_sqlite.py | 2 | ||||
-rw-r--r-- | tests/dialects/test_tsql.py | 209 |
9 files changed, 246 insertions, 12 deletions
diff --git a/tests/dialects/test_bigquery.py b/tests/dialects/test_bigquery.py index eac3cac..3939ba0 100644 --- a/tests/dialects/test_bigquery.py +++ b/tests/dialects/test_bigquery.py @@ -108,6 +108,27 @@ class TestBigQuery(Validator): self.validate_all("CAST(x AS TIMESTAMPTZ)", write={"bigquery": "CAST(x AS TIMESTAMP)"}) self.validate_all("CAST(x AS RECORD)", write={"bigquery": "CAST(x AS STRUCT)"}) self.validate_all( + "MD5(x)", + write={ + "": "MD5_DIGEST(x)", + "bigquery": "MD5(x)", + "hive": "UNHEX(MD5(x))", + "spark": "UNHEX(MD5(x))", + }, + ) + self.validate_all( + "SELECT TO_HEX(MD5(some_string))", + read={ + "duckdb": "SELECT MD5(some_string)", + "spark": "SELECT MD5(some_string)", + }, + write={ + "": "SELECT MD5(some_string)", + "bigquery": "SELECT TO_HEX(MD5(some_string))", + "duckdb": "SELECT MD5(some_string)", + }, + ) + self.validate_all( "SELECT CAST('20201225' AS TIMESTAMP FORMAT 'YYYYMMDD' AT TIME ZONE 'America/New_York')", write={"bigquery": "SELECT PARSE_TIMESTAMP('%Y%m%d', '20201225', 'America/New_York')"}, ) @@ -263,7 +284,7 @@ class TestBigQuery(Validator): "duckdb": "CAST(a AS BIGINT)", "presto": "CAST(a AS BIGINT)", "hive": "CAST(a AS BIGINT)", - "spark": "CAST(a AS LONG)", + "spark": "CAST(a AS BIGINT)", }, ) self.validate_all( @@ -413,7 +434,7 @@ class TestBigQuery(Validator): "duckdb": "CREATE TABLE db.example_table (col_a STRUCT(struct_col_a BIGINT, struct_col_b STRUCT(nested_col_a TEXT, nested_col_b TEXT)))", "presto": "CREATE TABLE db.example_table (col_a ROW(struct_col_a BIGINT, struct_col_b ROW(nested_col_a VARCHAR, nested_col_b VARCHAR)))", "hive": "CREATE TABLE db.example_table (col_a STRUCT<struct_col_a BIGINT, struct_col_b STRUCT<nested_col_a STRING, nested_col_b STRING>>)", - "spark": "CREATE TABLE db.example_table (col_a STRUCT<struct_col_a: LONG, struct_col_b: STRUCT<nested_col_a: STRING, nested_col_b: STRING>>)", + "spark": "CREATE TABLE db.example_table (col_a STRUCT<struct_col_a: BIGINT, struct_col_b: STRUCT<nested_col_a: STRING, nested_col_b: STRING>>)", }, ) self.validate_all( diff --git a/tests/dialects/test_dialect.py b/tests/dialects/test_dialect.py index 21efc6b..05738cf 100644 --- a/tests/dialects/test_dialect.py +++ b/tests/dialects/test_dialect.py @@ -218,7 +218,7 @@ class TestDialect(Validator): "presto": "CAST(a AS SMALLINT)", "redshift": "CAST(a AS SMALLINT)", "snowflake": "CAST(a AS SMALLINT)", - "spark": "CAST(a AS SHORT)", + "spark": "CAST(a AS SMALLINT)", "sqlite": "CAST(a AS INTEGER)", "starrocks": "CAST(a AS SMALLINT)", }, diff --git a/tests/dialects/test_duckdb.py b/tests/dialects/test_duckdb.py index cad1c15..336f47d 100644 --- a/tests/dialects/test_duckdb.py +++ b/tests/dialects/test_duckdb.py @@ -532,7 +532,7 @@ class TestDuckDB(Validator): "duckdb": "CAST(COL AS BIGINT[])", "presto": "CAST(COL AS ARRAY(BIGINT))", "hive": "CAST(COL AS ARRAY<BIGINT>)", - "spark": "CAST(COL AS ARRAY<LONG>)", + "spark": "CAST(COL AS ARRAY<BIGINT>)", "postgres": "CAST(COL AS BIGINT[])", "snowflake": "CAST(COL AS ARRAY)", }, diff --git a/tests/dialects/test_hive.py b/tests/dialects/test_hive.py index c9bcf16..0503f6a 100644 --- a/tests/dialects/test_hive.py +++ b/tests/dialects/test_hive.py @@ -73,7 +73,7 @@ class TestHive(Validator): "duckdb": "TRY_CAST(1 AS SMALLINT)", "presto": "TRY_CAST(1 AS SMALLINT)", "hive": "CAST(1 AS SMALLINT)", - "spark": "CAST(1 AS SHORT)", + "spark": "CAST(1 AS SMALLINT)", }, ) self.validate_all( @@ -82,7 +82,7 @@ class TestHive(Validator): "duckdb": "TRY_CAST(1 AS SMALLINT)", "presto": "TRY_CAST(1 AS SMALLINT)", "hive": "CAST(1 AS SMALLINT)", - "spark": "CAST(1 AS SHORT)", + "spark": "CAST(1 AS SMALLINT)", }, ) self.validate_all( @@ -91,7 +91,7 @@ class TestHive(Validator): "duckdb": "TRY_CAST(1 AS TINYINT)", "presto": "TRY_CAST(1 AS TINYINT)", "hive": "CAST(1 AS TINYINT)", - "spark": "CAST(1 AS BYTE)", + "spark": "CAST(1 AS TINYINT)", }, ) self.validate_all( @@ -100,7 +100,7 @@ class TestHive(Validator): "duckdb": "TRY_CAST(1 AS BIGINT)", "presto": "TRY_CAST(1 AS BIGINT)", "hive": "CAST(1 AS BIGINT)", - "spark": "CAST(1 AS LONG)", + "spark": "CAST(1 AS BIGINT)", }, ) self.validate_all( diff --git a/tests/dialects/test_postgres.py b/tests/dialects/test_postgres.py index 052d4cc..605dfff 100644 --- a/tests/dialects/test_postgres.py +++ b/tests/dialects/test_postgres.py @@ -121,6 +121,7 @@ class TestPostgres(Validator): ) def test_postgres(self): + self.validate_identity("CAST(x AS MONEY)") self.validate_identity("CAST(x AS INT4RANGE)") self.validate_identity("CAST(x AS INT4MULTIRANGE)") self.validate_identity("CAST(x AS INT8RANGE)") diff --git a/tests/dialects/test_presto.py b/tests/dialects/test_presto.py index 45a0cd9..ddfa9e8 100644 --- a/tests/dialects/test_presto.py +++ b/tests/dialects/test_presto.py @@ -65,7 +65,7 @@ class TestPresto(Validator): "bigquery": "CAST([1, 2] AS ARRAY<INT64>)", "duckdb": "CAST(LIST_VALUE(1, 2) AS BIGINT[])", "presto": "CAST(ARRAY[1, 2] AS ARRAY(BIGINT))", - "spark": "CAST(ARRAY(1, 2) AS ARRAY<LONG>)", + "spark": "CAST(ARRAY(1, 2) AS ARRAY<BIGINT>)", "snowflake": "CAST([1, 2] AS ARRAY)", }, ) diff --git a/tests/dialects/test_spark.py b/tests/dialects/test_spark.py index 25841c5..32be23e 100644 --- a/tests/dialects/test_spark.py +++ b/tests/dialects/test_spark.py @@ -233,6 +233,13 @@ TBLPROPERTIES ( self.validate_identity("SPLIT(str, pattern, lim)") self.validate_all( + "UNHEX(MD5(x))", + write={ + "bigquery": "FROM_HEX(TO_HEX(MD5(x)))", + "spark": "UNHEX(MD5(x))", + }, + ) + self.validate_all( "SELECT * FROM ((VALUES 1))", write={"spark": "SELECT * FROM (VALUES (1))"} ) self.validate_all( diff --git a/tests/dialects/test_sqlite.py b/tests/dialects/test_sqlite.py index 10da9b0..4cf0832 100644 --- a/tests/dialects/test_sqlite.py +++ b/tests/dialects/test_sqlite.py @@ -95,7 +95,7 @@ class TestSQLite(Validator): "SELECT CAST([a].[b] AS SMALLINT) FROM foo", write={ "sqlite": 'SELECT CAST("a"."b" AS INTEGER) FROM foo', - "spark": "SELECT CAST(`a`.`b` AS SHORT) FROM foo", + "spark": "SELECT CAST(`a`.`b` AS SMALLINT) FROM foo", }, ) self.validate_all( diff --git a/tests/dialects/test_tsql.py b/tests/dialects/test_tsql.py index ca6d70c..5426859 100644 --- a/tests/dialects/test_tsql.py +++ b/tests/dialects/test_tsql.py @@ -43,7 +43,7 @@ class TestTSQL(Validator): "SELECT CAST([a].[b] AS SMALLINT) FROM foo", write={ "tsql": 'SELECT CAST("a"."b" AS SMALLINT) FROM foo', - "spark": "SELECT CAST(`a`.`b` AS SHORT) FROM foo", + "spark": "SELECT CAST(`a`.`b` AS SMALLINT) FROM foo", }, ) self.validate_all( @@ -84,7 +84,7 @@ class TestTSQL(Validator): "SELECT CAST([a].[b] AS SMALLINT) FROM foo", write={ "tsql": 'SELECT CAST("a"."b" AS SMALLINT) FROM foo', - "spark": "SELECT CAST(`a`.`b` AS SHORT) FROM foo", + "spark": "SELECT CAST(`a`.`b` AS SMALLINT) FROM foo", }, ) self.validate_all( @@ -155,6 +155,211 @@ class TestTSQL(Validator): }, ) + def test__types_ints(self): + self.validate_all( + "CAST(X AS INT)", + write={ + "hive": "CAST(X AS INT)", + "spark2": "CAST(X AS INT)", + "spark": "CAST(X AS INT)", + "tsql": "CAST(X AS INTEGER)", + }, + ) + + self.validate_all( + "CAST(X AS BIGINT)", + write={ + "hive": "CAST(X AS BIGINT)", + "spark2": "CAST(X AS BIGINT)", + "spark": "CAST(X AS BIGINT)", + "tsql": "CAST(X AS BIGINT)", + }, + ) + + self.validate_all( + "CAST(X AS SMALLINT)", + write={ + "hive": "CAST(X AS SMALLINT)", + "spark2": "CAST(X AS SMALLINT)", + "spark": "CAST(X AS SMALLINT)", + "tsql": "CAST(X AS SMALLINT)", + }, + ) + + self.validate_all( + "CAST(X AS TINYINT)", + write={ + "hive": "CAST(X AS TINYINT)", + "spark2": "CAST(X AS TINYINT)", + "spark": "CAST(X AS TINYINT)", + "tsql": "CAST(X AS TINYINT)", + }, + ) + + def test_types_decimals(self): + self.validate_all( + "CAST(x as FLOAT)", + write={ + "spark": "CAST(x AS FLOAT)", + "tsql": "CAST(x AS FLOAT)", + }, + ) + + self.validate_all( + "CAST(x as DOUBLE)", + write={ + "spark": "CAST(x AS DOUBLE)", + "tsql": "CAST(x AS DOUBLE)", + }, + ) + + self.validate_all( + "CAST(x as DECIMAL(15, 4))", + write={ + "spark": "CAST(x AS DECIMAL(15, 4))", + "tsql": "CAST(x AS NUMERIC(15, 4))", + }, + ) + + self.validate_all( + "CAST(x as NUMERIC(13,3))", + write={ + "spark": "CAST(x AS DECIMAL(13, 3))", + "tsql": "CAST(x AS NUMERIC(13, 3))", + }, + ) + + self.validate_all( + "CAST(x as MONEY)", + write={ + "spark": "CAST(x AS DECIMAL(15, 4))", + "tsql": "CAST(x AS MONEY)", + }, + ) + + self.validate_all( + "CAST(x as SMALLMONEY)", + write={ + "spark": "CAST(x AS DECIMAL(6, 4))", + "tsql": "CAST(x AS SMALLMONEY)", + }, + ) + + self.validate_all( + "CAST(x as REAL)", + write={ + "spark": "CAST(x AS FLOAT)", + "tsql": "CAST(x AS FLOAT)", + }, + ) + + def test_types_string(self): + self.validate_all( + "CAST(x as CHAR(1))", + write={ + "spark": "CAST(x AS CHAR(1))", + "tsql": "CAST(x AS CHAR(1))", + }, + ) + + self.validate_all( + "CAST(x as VARCHAR(2))", + write={ + "spark": "CAST(x AS VARCHAR(2))", + "tsql": "CAST(x AS VARCHAR(2))", + }, + ) + + self.validate_all( + "CAST(x as NCHAR(1))", + write={ + "spark": "CAST(x AS CHAR(1))", + "tsql": "CAST(x AS CHAR(1))", + }, + ) + + self.validate_all( + "CAST(x as NVARCHAR(2))", + write={ + "spark": "CAST(x AS VARCHAR(2))", + "tsql": "CAST(x AS VARCHAR(2))", + }, + ) + + def test_types_date(self): + self.validate_all( + "CAST(x as DATE)", + write={ + "spark": "CAST(x AS DATE)", + "tsql": "CAST(x AS DATE)", + }, + ) + + self.validate_all( + "CAST(x as DATE)", + write={ + "spark": "CAST(x AS DATE)", + "tsql": "CAST(x AS DATE)", + }, + ) + + self.validate_all( + "CAST(x as TIME(4))", + write={ + "spark": "CAST(x AS TIMESTAMP)", + "tsql": "CAST(x AS TIMESTAMP(4))", + }, + ) + + self.validate_all( + "CAST(x as DATETIME2)", + write={ + "spark": "CAST(x AS TIMESTAMP)", + "tsql": "CAST(x AS DATETIME2)", + }, + ) + + self.validate_all( + "CAST(x as DATETIMEOFFSET)", + write={ + "spark": "CAST(x AS TIMESTAMP)", + "tsql": "CAST(x AS TIMESTAMPTZ)", + }, + ) + + self.validate_all( + "CAST(x as SMALLDATETIME)", + write={ + "spark": "CAST(x AS TIMESTAMP)", + "tsql": "CAST(x AS DATETIME2)", + }, + ) + + def test_types_bin(self): + self.validate_all( + "CAST(x as BIT)", + write={ + "spark": "CAST(x AS BOOLEAN)", + "tsql": "CAST(x AS BIT)", + }, + ) + + self.validate_all( + "CAST(x as UNIQUEIDENTIFIER)", + write={ + "spark": "CAST(x AS STRING)", + "tsql": "CAST(x AS UNIQUEIDENTIFIER)", + }, + ) + + self.validate_all( + "CAST(x as VARBINARY)", + write={ + "spark": "CAST(x AS BINARY)", + "tsql": "CAST(x AS VARBINARY)", + }, + ) + def test_udf(self): self.validate_identity( "CREATE PROCEDURE foo @a INTEGER, @b INTEGER AS SELECT @a = SUM(bla) FROM baz AS bar" |