summaryrefslogtreecommitdiffstats
path: root/sqlglot/dialects/spark2.py
diff options
context:
space:
mode:
Diffstat (limited to 'sqlglot/dialects/spark2.py')
-rw-r--r--sqlglot/dialects/spark2.py24
1 files changed, 22 insertions, 2 deletions
diff --git a/sqlglot/dialects/spark2.py b/sqlglot/dialects/spark2.py
index da84bd8..aa09f53 100644
--- a/sqlglot/dialects/spark2.py
+++ b/sqlglot/dialects/spark2.py
@@ -48,8 +48,11 @@ def _unix_to_time_sql(self: Spark2.Generator, expression: exp.UnixToTime) -> str
return f"TIMESTAMP_MILLIS({timestamp})"
if scale == exp.UnixToTime.MICROS:
return f"TIMESTAMP_MICROS({timestamp})"
+ if scale == exp.UnixToTime.NANOS:
+ return f"TIMESTAMP_SECONDS({timestamp} / 1000000000)"
- raise ValueError("Improper scale for timestamp")
+ self.unsupported(f"Unsupported scale for timestamp: {scale}.")
+ return ""
def _unalias_pivot(expression: exp.Expression) -> exp.Expression:
@@ -119,7 +122,11 @@ class Spark2(Hive):
"DOUBLE": _parse_as_cast("double"),
"FLOAT": _parse_as_cast("float"),
"FROM_UTC_TIMESTAMP": lambda args: exp.AtTimeZone(
- this=exp.Cast(this=seq_get(args, 0), to=exp.DataType.build("timestamp")),
+ this=exp.cast_unless(
+ seq_get(args, 0) or exp.Var(this=""),
+ exp.DataType.build("timestamp"),
+ exp.DataType.build("timestamp"),
+ ),
zone=seq_get(args, 1),
),
"IIF": exp.If.from_arg_list,
@@ -224,6 +231,19 @@ class Spark2(Hive):
WRAP_DERIVED_VALUES = False
CREATE_FUNCTION_RETURN_AS = False
+ def struct_sql(self, expression: exp.Struct) -> str:
+ args = []
+ for arg in expression.expressions:
+ if isinstance(arg, self.KEY_VALUE_DEFINITONS):
+ if isinstance(arg, exp.Bracket):
+ args.append(exp.alias_(arg.this, arg.expressions[0].name))
+ else:
+ args.append(exp.alias_(arg.expression, arg.this.name))
+ else:
+ args.append(arg)
+
+ return self.func("STRUCT", *args)
+
def temporary_storage_provider(self, expression: exp.Create) -> exp.Create:
# spark2, spark, Databricks require a storage provider for temporary tables
provider = exp.FileFormatProperty(this=exp.Literal.string("parquet"))