summaryrefslogtreecommitdiffstats
path: root/sqlglot/dialects/spark2.py
diff options
context:
space:
mode:
Diffstat (limited to 'sqlglot/dialects/spark2.py')
-rw-r--r--sqlglot/dialects/spark2.py14
1 files changed, 3 insertions, 11 deletions
diff --git a/sqlglot/dialects/spark2.py b/sqlglot/dialects/spark2.py
index aa09f53..e27ba18 100644
--- a/sqlglot/dialects/spark2.py
+++ b/sqlglot/dialects/spark2.py
@@ -48,11 +48,8 @@ def _unix_to_time_sql(self: Spark2.Generator, expression: exp.UnixToTime) -> str
return f"TIMESTAMP_MILLIS({timestamp})"
if scale == exp.UnixToTime.MICROS:
return f"TIMESTAMP_MICROS({timestamp})"
- if scale == exp.UnixToTime.NANOS:
- return f"TIMESTAMP_SECONDS({timestamp} / 1000000000)"
- self.unsupported(f"Unsupported scale for timestamp: {scale}.")
- return ""
+ return f"TIMESTAMP_SECONDS({timestamp} / POW(10, {scale}))"
def _unalias_pivot(expression: exp.Expression) -> exp.Expression:
@@ -93,12 +90,7 @@ def _unqualify_pivot_columns(expression: exp.Expression) -> exp.Expression:
SELECT * FROM tbl PIVOT(SUM(tbl.sales) FOR quarter IN ('Q1', 'Q1'))
"""
if isinstance(expression, exp.Pivot):
- expression.args["field"].transform(
- lambda node: exp.column(node.output_name, quoted=node.this.quoted)
- if isinstance(node, exp.Column)
- else node,
- copy=False,
- )
+ expression.set("field", transforms.unqualify_columns(expression.args["field"]))
return expression
@@ -234,7 +226,7 @@ class Spark2(Hive):
def struct_sql(self, expression: exp.Struct) -> str:
args = []
for arg in expression.expressions:
- if isinstance(arg, self.KEY_VALUE_DEFINITONS):
+ if isinstance(arg, self.KEY_VALUE_DEFINITIONS):
if isinstance(arg, exp.Bracket):
args.append(exp.alias_(arg.this, arg.expressions[0].name))
else: