sqlglot.dialects.presto
1from __future__ import annotations 2 3import typing as t 4 5from sqlglot import exp, generator, parser, tokens, transforms 6from sqlglot.dialects.dialect import ( 7 Dialect, 8 date_trunc_to_time, 9 format_time_lambda, 10 if_sql, 11 no_ilike_sql, 12 no_pivot_sql, 13 no_safe_divide_sql, 14 rename_func, 15 struct_extract_sql, 16 timestamptrunc_sql, 17 timestrtotime_sql, 18) 19from sqlglot.dialects.mysql import MySQL 20from sqlglot.errors import UnsupportedError 21from sqlglot.helper import seq_get 22from sqlglot.tokens import TokenType 23 24 25def _approx_distinct_sql(self: generator.Generator, expression: exp.ApproxDistinct) -> str: 26 accuracy = expression.args.get("accuracy") 27 accuracy = ", " + self.sql(accuracy) if accuracy else "" 28 return f"APPROX_DISTINCT({self.sql(expression, 'this')}{accuracy})" 29 30 31def _datatype_sql(self: generator.Generator, expression: exp.DataType) -> str: 32 sql = self.datatype_sql(expression) 33 if expression.this == exp.DataType.Type.TIMESTAMPTZ: 34 sql = f"{sql} WITH TIME ZONE" 35 return sql 36 37 38def _explode_to_unnest_sql(self: generator.Generator, expression: exp.Lateral) -> str: 39 if isinstance(expression.this, (exp.Explode, exp.Posexplode)): 40 return self.sql( 41 exp.Join( 42 this=exp.Unnest( 43 expressions=[expression.this.this], 44 alias=expression.args.get("alias"), 45 ordinality=isinstance(expression.this, exp.Posexplode), 46 ), 47 kind="cross", 48 ) 49 ) 50 return self.lateral_sql(expression) 51 52 53def _initcap_sql(self: generator.Generator, expression: exp.Initcap) -> str: 54 regex = r"(\w)(\w*)" 55 return f"REGEXP_REPLACE({self.sql(expression, 'this')}, '{regex}', x -> UPPER(x[1]) || LOWER(x[2]))" 56 57 58def _decode_sql(self: generator.Generator, expression: exp.Decode) -> str: 59 _ensure_utf8(expression.args["charset"]) 60 return self.func("FROM_UTF8", expression.this, expression.args.get("replace")) 61 62 63def _encode_sql(self: generator.Generator, expression: exp.Encode) -> str: 64 _ensure_utf8(expression.args["charset"]) 65 return f"TO_UTF8({self.sql(expression, 'this')})" 66 67 68def _no_sort_array(self: generator.Generator, expression: exp.SortArray) -> str: 69 if expression.args.get("asc") == exp.false(): 70 comparator = "(a, b) -> CASE WHEN a < b THEN 1 WHEN a > b THEN -1 ELSE 0 END" 71 else: 72 comparator = None 73 return self.func("ARRAY_SORT", expression.this, comparator) 74 75 76def _schema_sql(self: generator.Generator, expression: exp.Schema) -> str: 77 if isinstance(expression.parent, exp.Property): 78 columns = ", ".join(f"'{c.name}'" for c in expression.expressions) 79 return f"ARRAY[{columns}]" 80 81 if expression.parent: 82 for schema in expression.parent.find_all(exp.Schema): 83 if isinstance(schema.parent, exp.Property): 84 expression = expression.copy() 85 expression.expressions.extend(schema.expressions) 86 87 return self.schema_sql(expression) 88 89 90def _quantile_sql(self: generator.Generator, expression: exp.Quantile) -> str: 91 self.unsupported("Presto does not support exact quantiles") 92 return f"APPROX_PERCENTILE({self.sql(expression, 'this')}, {self.sql(expression, 'quantile')})" 93 94 95def _str_to_time_sql( 96 self: generator.Generator, expression: exp.StrToDate | exp.StrToTime | exp.TsOrDsToDate 97) -> str: 98 return f"DATE_PARSE({self.sql(expression, 'this')}, {self.format_time(expression)})" 99 100 101def _ts_or_ds_to_date_sql(self: generator.Generator, expression: exp.TsOrDsToDate) -> str: 102 time_format = self.format_time(expression) 103 if time_format and time_format not in (Presto.time_format, Presto.date_format): 104 return f"CAST({_str_to_time_sql(self, expression)} AS DATE)" 105 return f"CAST(SUBSTR(CAST({self.sql(expression, 'this')} AS VARCHAR), 1, 10) AS DATE)" 106 107 108def _ts_or_ds_add_sql(self: generator.Generator, expression: exp.TsOrDsAdd) -> str: 109 this = expression.this 110 111 if not isinstance(this, exp.CurrentDate): 112 this = self.func( 113 "DATE_PARSE", 114 self.func( 115 "SUBSTR", 116 this if this.is_string else exp.cast(this, "VARCHAR"), 117 exp.Literal.number(1), 118 exp.Literal.number(10), 119 ), 120 Presto.date_format, 121 ) 122 123 return self.func( 124 "DATE_ADD", 125 exp.Literal.string(expression.text("unit") or "day"), 126 expression.expression, 127 this, 128 ) 129 130 131def _ensure_utf8(charset: exp.Literal) -> None: 132 if charset.name.lower() != "utf-8": 133 raise UnsupportedError(f"Unsupported charset {charset}") 134 135 136def _approx_percentile(args: t.List) -> exp.Expression: 137 if len(args) == 4: 138 return exp.ApproxQuantile( 139 this=seq_get(args, 0), 140 weight=seq_get(args, 1), 141 quantile=seq_get(args, 2), 142 accuracy=seq_get(args, 3), 143 ) 144 if len(args) == 3: 145 return exp.ApproxQuantile( 146 this=seq_get(args, 0), 147 quantile=seq_get(args, 1), 148 accuracy=seq_get(args, 2), 149 ) 150 return exp.ApproxQuantile.from_arg_list(args) 151 152 153def _from_unixtime(args: t.List) -> exp.Expression: 154 if len(args) == 3: 155 return exp.UnixToTime( 156 this=seq_get(args, 0), 157 hours=seq_get(args, 1), 158 minutes=seq_get(args, 2), 159 ) 160 if len(args) == 2: 161 return exp.UnixToTime( 162 this=seq_get(args, 0), 163 zone=seq_get(args, 1), 164 ) 165 return exp.UnixToTime.from_arg_list(args) 166 167 168def _unnest_sequence(expression: exp.Expression) -> exp.Expression: 169 if isinstance(expression, exp.Table): 170 if isinstance(expression.this, exp.GenerateSeries): 171 unnest = exp.Unnest(expressions=[expression.this]) 172 173 if expression.alias: 174 return exp.alias_( 175 unnest, 176 alias="_u", 177 table=[expression.alias], 178 copy=False, 179 ) 180 return unnest 181 return expression 182 183 184class Presto(Dialect): 185 index_offset = 1 186 null_ordering = "nulls_are_last" 187 time_format = MySQL.time_format 188 time_mapping = MySQL.time_mapping 189 190 class Tokenizer(tokens.Tokenizer): 191 KEYWORDS = { 192 **tokens.Tokenizer.KEYWORDS, 193 "START": TokenType.BEGIN, 194 "MATCH_RECOGNIZE": TokenType.MATCH_RECOGNIZE, 195 "ROW": TokenType.STRUCT, 196 } 197 198 class Parser(parser.Parser): 199 FUNCTIONS = { 200 **parser.Parser.FUNCTIONS, 201 "APPROX_DISTINCT": exp.ApproxDistinct.from_arg_list, 202 "APPROX_PERCENTILE": _approx_percentile, 203 "CARDINALITY": exp.ArraySize.from_arg_list, 204 "CONTAINS": exp.ArrayContains.from_arg_list, 205 "DATE_ADD": lambda args: exp.DateAdd( 206 this=seq_get(args, 2), 207 expression=seq_get(args, 1), 208 unit=seq_get(args, 0), 209 ), 210 "DATE_DIFF": lambda args: exp.DateDiff( 211 this=seq_get(args, 2), 212 expression=seq_get(args, 1), 213 unit=seq_get(args, 0), 214 ), 215 "DATE_FORMAT": format_time_lambda(exp.TimeToStr, "presto"), 216 "DATE_PARSE": format_time_lambda(exp.StrToTime, "presto"), 217 "DATE_TRUNC": date_trunc_to_time, 218 "FROM_HEX": exp.Unhex.from_arg_list, 219 "FROM_UNIXTIME": _from_unixtime, 220 "FROM_UTF8": lambda args: exp.Decode( 221 this=seq_get(args, 0), replace=seq_get(args, 1), charset=exp.Literal.string("utf-8") 222 ), 223 "NOW": exp.CurrentTimestamp.from_arg_list, 224 "SEQUENCE": exp.GenerateSeries.from_arg_list, 225 "STRPOS": lambda args: exp.StrPosition( 226 this=seq_get(args, 0), 227 substr=seq_get(args, 1), 228 instance=seq_get(args, 2), 229 ), 230 "TO_UNIXTIME": exp.TimeToUnix.from_arg_list, 231 "TO_HEX": exp.Hex.from_arg_list, 232 "TO_UTF8": lambda args: exp.Encode( 233 this=seq_get(args, 0), charset=exp.Literal.string("utf-8") 234 ), 235 } 236 FUNCTION_PARSERS = parser.Parser.FUNCTION_PARSERS.copy() 237 FUNCTION_PARSERS.pop("TRIM") 238 239 class Generator(generator.Generator): 240 INTERVAL_ALLOWS_PLURAL_FORM = False 241 JOIN_HINTS = False 242 TABLE_HINTS = False 243 STRUCT_DELIMITER = ("(", ")") 244 245 PROPERTIES_LOCATION = { 246 **generator.Generator.PROPERTIES_LOCATION, 247 exp.LocationProperty: exp.Properties.Location.UNSUPPORTED, 248 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, 249 } 250 251 TYPE_MAPPING = { 252 **generator.Generator.TYPE_MAPPING, 253 exp.DataType.Type.INT: "INTEGER", 254 exp.DataType.Type.FLOAT: "REAL", 255 exp.DataType.Type.BINARY: "VARBINARY", 256 exp.DataType.Type.TEXT: "VARCHAR", 257 exp.DataType.Type.TIMESTAMPTZ: "TIMESTAMP", 258 exp.DataType.Type.STRUCT: "ROW", 259 } 260 261 TRANSFORMS = { 262 **generator.Generator.TRANSFORMS, 263 exp.ApproxDistinct: _approx_distinct_sql, 264 exp.ApproxQuantile: rename_func("APPROX_PERCENTILE"), 265 exp.Array: lambda self, e: f"ARRAY[{self.expressions(e, flat=True)}]", 266 exp.ArrayConcat: rename_func("CONCAT"), 267 exp.ArrayContains: rename_func("CONTAINS"), 268 exp.ArraySize: rename_func("CARDINALITY"), 269 exp.BitwiseAnd: lambda self, e: f"BITWISE_AND({self.sql(e, 'this')}, {self.sql(e, 'expression')})", 270 exp.BitwiseLeftShift: lambda self, e: f"BITWISE_ARITHMETIC_SHIFT_LEFT({self.sql(e, 'this')}, {self.sql(e, 'expression')})", 271 exp.BitwiseNot: lambda self, e: f"BITWISE_NOT({self.sql(e, 'this')})", 272 exp.BitwiseOr: lambda self, e: f"BITWISE_OR({self.sql(e, 'this')}, {self.sql(e, 'expression')})", 273 exp.BitwiseRightShift: lambda self, e: f"BITWISE_ARITHMETIC_SHIFT_RIGHT({self.sql(e, 'this')}, {self.sql(e, 'expression')})", 274 exp.BitwiseXor: lambda self, e: f"BITWISE_XOR({self.sql(e, 'this')}, {self.sql(e, 'expression')})", 275 exp.CurrentTimestamp: lambda *_: "CURRENT_TIMESTAMP", 276 exp.DataType: _datatype_sql, 277 exp.DateAdd: lambda self, e: self.func( 278 "DATE_ADD", exp.Literal.string(e.text("unit") or "day"), e.expression, e.this 279 ), 280 exp.DateDiff: lambda self, e: self.func( 281 "DATE_DIFF", exp.Literal.string(e.text("unit") or "day"), e.expression, e.this 282 ), 283 exp.DateStrToDate: lambda self, e: f"CAST(DATE_PARSE({self.sql(e, 'this')}, {Presto.date_format}) AS DATE)", 284 exp.DateToDi: lambda self, e: f"CAST(DATE_FORMAT({self.sql(e, 'this')}, {Presto.dateint_format}) AS INT)", 285 exp.Decode: _decode_sql, 286 exp.DiToDate: lambda self, e: f"CAST(DATE_PARSE(CAST({self.sql(e, 'this')} AS VARCHAR), {Presto.dateint_format}) AS DATE)", 287 exp.Encode: _encode_sql, 288 exp.FileFormatProperty: lambda self, e: f"FORMAT='{e.name.upper()}'", 289 exp.Group: transforms.preprocess([transforms.unalias_group]), 290 exp.Hex: rename_func("TO_HEX"), 291 exp.If: if_sql, 292 exp.ILike: no_ilike_sql, 293 exp.Initcap: _initcap_sql, 294 exp.Lateral: _explode_to_unnest_sql, 295 exp.Levenshtein: rename_func("LEVENSHTEIN_DISTANCE"), 296 exp.LogicalAnd: rename_func("BOOL_AND"), 297 exp.LogicalOr: rename_func("BOOL_OR"), 298 exp.Pivot: no_pivot_sql, 299 exp.Quantile: _quantile_sql, 300 exp.SafeDivide: no_safe_divide_sql, 301 exp.Schema: _schema_sql, 302 exp.Select: transforms.preprocess( 303 [ 304 transforms.eliminate_qualify, 305 transforms.eliminate_distinct_on, 306 transforms.explode_to_unnest, 307 ] 308 ), 309 exp.SortArray: _no_sort_array, 310 exp.StrPosition: rename_func("STRPOS"), 311 exp.StrToDate: lambda self, e: f"CAST({_str_to_time_sql(self, e)} AS DATE)", 312 exp.StrToTime: _str_to_time_sql, 313 exp.StrToUnix: lambda self, e: f"TO_UNIXTIME(DATE_PARSE({self.sql(e, 'this')}, {self.format_time(e)}))", 314 exp.StructExtract: struct_extract_sql, 315 exp.Table: transforms.preprocess([_unnest_sequence]), 316 exp.TimestampTrunc: timestamptrunc_sql, 317 exp.TimeStrToDate: timestrtotime_sql, 318 exp.TimeStrToTime: timestrtotime_sql, 319 exp.TimeStrToUnix: lambda self, e: f"TO_UNIXTIME(DATE_PARSE({self.sql(e, 'this')}, {Presto.time_format}))", 320 exp.TimeToStr: lambda self, e: f"DATE_FORMAT({self.sql(e, 'this')}, {self.format_time(e)})", 321 exp.TimeToUnix: rename_func("TO_UNIXTIME"), 322 exp.TsOrDiToDi: lambda self, e: f"CAST(SUBSTR(REPLACE(CAST({self.sql(e, 'this')} AS VARCHAR), '-', ''), 1, 8) AS INT)", 323 exp.TsOrDsAdd: _ts_or_ds_add_sql, 324 exp.TsOrDsToDate: _ts_or_ds_to_date_sql, 325 exp.Unhex: rename_func("FROM_HEX"), 326 exp.UnixToStr: lambda self, e: f"DATE_FORMAT(FROM_UNIXTIME({self.sql(e, 'this')}), {self.format_time(e)})", 327 exp.UnixToTime: rename_func("FROM_UNIXTIME"), 328 exp.UnixToTimeStr: lambda self, e: f"CAST(FROM_UNIXTIME({self.sql(e, 'this')}) AS VARCHAR)", 329 exp.VariancePop: rename_func("VAR_POP"), 330 exp.With: transforms.preprocess([transforms.add_recursive_cte_column_names]), 331 exp.WithinGroup: transforms.preprocess( 332 [transforms.remove_within_group_for_percentiles] 333 ), 334 } 335 336 def interval_sql(self, expression: exp.Interval) -> str: 337 unit = self.sql(expression, "unit") 338 if expression.this and unit.lower().startswith("week"): 339 return f"({expression.this.name} * INTERVAL '7' day)" 340 return super().interval_sql(expression) 341 342 def transaction_sql(self, expression: exp.Transaction) -> str: 343 modes = expression.args.get("modes") 344 modes = f" {', '.join(modes)}" if modes else "" 345 return f"START TRANSACTION{modes}" 346 347 def generateseries_sql(self, expression: exp.GenerateSeries) -> str: 348 start = expression.args["start"] 349 end = expression.args["end"] 350 step = expression.args.get("step") 351 352 if isinstance(start, exp.Cast): 353 target_type = start.to 354 elif isinstance(end, exp.Cast): 355 target_type = end.to 356 else: 357 target_type = None 358 359 if target_type and target_type.is_type(exp.DataType.Type.TIMESTAMP): 360 to = target_type.copy() 361 362 if target_type is start.to: 363 end = exp.Cast(this=end, to=to) 364 else: 365 start = exp.Cast(this=start, to=to) 366 367 return self.func("SEQUENCE", start, end, step)
185class Presto(Dialect): 186 index_offset = 1 187 null_ordering = "nulls_are_last" 188 time_format = MySQL.time_format 189 time_mapping = MySQL.time_mapping 190 191 class Tokenizer(tokens.Tokenizer): 192 KEYWORDS = { 193 **tokens.Tokenizer.KEYWORDS, 194 "START": TokenType.BEGIN, 195 "MATCH_RECOGNIZE": TokenType.MATCH_RECOGNIZE, 196 "ROW": TokenType.STRUCT, 197 } 198 199 class Parser(parser.Parser): 200 FUNCTIONS = { 201 **parser.Parser.FUNCTIONS, 202 "APPROX_DISTINCT": exp.ApproxDistinct.from_arg_list, 203 "APPROX_PERCENTILE": _approx_percentile, 204 "CARDINALITY": exp.ArraySize.from_arg_list, 205 "CONTAINS": exp.ArrayContains.from_arg_list, 206 "DATE_ADD": lambda args: exp.DateAdd( 207 this=seq_get(args, 2), 208 expression=seq_get(args, 1), 209 unit=seq_get(args, 0), 210 ), 211 "DATE_DIFF": lambda args: exp.DateDiff( 212 this=seq_get(args, 2), 213 expression=seq_get(args, 1), 214 unit=seq_get(args, 0), 215 ), 216 "DATE_FORMAT": format_time_lambda(exp.TimeToStr, "presto"), 217 "DATE_PARSE": format_time_lambda(exp.StrToTime, "presto"), 218 "DATE_TRUNC": date_trunc_to_time, 219 "FROM_HEX": exp.Unhex.from_arg_list, 220 "FROM_UNIXTIME": _from_unixtime, 221 "FROM_UTF8": lambda args: exp.Decode( 222 this=seq_get(args, 0), replace=seq_get(args, 1), charset=exp.Literal.string("utf-8") 223 ), 224 "NOW": exp.CurrentTimestamp.from_arg_list, 225 "SEQUENCE": exp.GenerateSeries.from_arg_list, 226 "STRPOS": lambda args: exp.StrPosition( 227 this=seq_get(args, 0), 228 substr=seq_get(args, 1), 229 instance=seq_get(args, 2), 230 ), 231 "TO_UNIXTIME": exp.TimeToUnix.from_arg_list, 232 "TO_HEX": exp.Hex.from_arg_list, 233 "TO_UTF8": lambda args: exp.Encode( 234 this=seq_get(args, 0), charset=exp.Literal.string("utf-8") 235 ), 236 } 237 FUNCTION_PARSERS = parser.Parser.FUNCTION_PARSERS.copy() 238 FUNCTION_PARSERS.pop("TRIM") 239 240 class Generator(generator.Generator): 241 INTERVAL_ALLOWS_PLURAL_FORM = False 242 JOIN_HINTS = False 243 TABLE_HINTS = False 244 STRUCT_DELIMITER = ("(", ")") 245 246 PROPERTIES_LOCATION = { 247 **generator.Generator.PROPERTIES_LOCATION, 248 exp.LocationProperty: exp.Properties.Location.UNSUPPORTED, 249 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, 250 } 251 252 TYPE_MAPPING = { 253 **generator.Generator.TYPE_MAPPING, 254 exp.DataType.Type.INT: "INTEGER", 255 exp.DataType.Type.FLOAT: "REAL", 256 exp.DataType.Type.BINARY: "VARBINARY", 257 exp.DataType.Type.TEXT: "VARCHAR", 258 exp.DataType.Type.TIMESTAMPTZ: "TIMESTAMP", 259 exp.DataType.Type.STRUCT: "ROW", 260 } 261 262 TRANSFORMS = { 263 **generator.Generator.TRANSFORMS, 264 exp.ApproxDistinct: _approx_distinct_sql, 265 exp.ApproxQuantile: rename_func("APPROX_PERCENTILE"), 266 exp.Array: lambda self, e: f"ARRAY[{self.expressions(e, flat=True)}]", 267 exp.ArrayConcat: rename_func("CONCAT"), 268 exp.ArrayContains: rename_func("CONTAINS"), 269 exp.ArraySize: rename_func("CARDINALITY"), 270 exp.BitwiseAnd: lambda self, e: f"BITWISE_AND({self.sql(e, 'this')}, {self.sql(e, 'expression')})", 271 exp.BitwiseLeftShift: lambda self, e: f"BITWISE_ARITHMETIC_SHIFT_LEFT({self.sql(e, 'this')}, {self.sql(e, 'expression')})", 272 exp.BitwiseNot: lambda self, e: f"BITWISE_NOT({self.sql(e, 'this')})", 273 exp.BitwiseOr: lambda self, e: f"BITWISE_OR({self.sql(e, 'this')}, {self.sql(e, 'expression')})", 274 exp.BitwiseRightShift: lambda self, e: f"BITWISE_ARITHMETIC_SHIFT_RIGHT({self.sql(e, 'this')}, {self.sql(e, 'expression')})", 275 exp.BitwiseXor: lambda self, e: f"BITWISE_XOR({self.sql(e, 'this')}, {self.sql(e, 'expression')})", 276 exp.CurrentTimestamp: lambda *_: "CURRENT_TIMESTAMP", 277 exp.DataType: _datatype_sql, 278 exp.DateAdd: lambda self, e: self.func( 279 "DATE_ADD", exp.Literal.string(e.text("unit") or "day"), e.expression, e.this 280 ), 281 exp.DateDiff: lambda self, e: self.func( 282 "DATE_DIFF", exp.Literal.string(e.text("unit") or "day"), e.expression, e.this 283 ), 284 exp.DateStrToDate: lambda self, e: f"CAST(DATE_PARSE({self.sql(e, 'this')}, {Presto.date_format}) AS DATE)", 285 exp.DateToDi: lambda self, e: f"CAST(DATE_FORMAT({self.sql(e, 'this')}, {Presto.dateint_format}) AS INT)", 286 exp.Decode: _decode_sql, 287 exp.DiToDate: lambda self, e: f"CAST(DATE_PARSE(CAST({self.sql(e, 'this')} AS VARCHAR), {Presto.dateint_format}) AS DATE)", 288 exp.Encode: _encode_sql, 289 exp.FileFormatProperty: lambda self, e: f"FORMAT='{e.name.upper()}'", 290 exp.Group: transforms.preprocess([transforms.unalias_group]), 291 exp.Hex: rename_func("TO_HEX"), 292 exp.If: if_sql, 293 exp.ILike: no_ilike_sql, 294 exp.Initcap: _initcap_sql, 295 exp.Lateral: _explode_to_unnest_sql, 296 exp.Levenshtein: rename_func("LEVENSHTEIN_DISTANCE"), 297 exp.LogicalAnd: rename_func("BOOL_AND"), 298 exp.LogicalOr: rename_func("BOOL_OR"), 299 exp.Pivot: no_pivot_sql, 300 exp.Quantile: _quantile_sql, 301 exp.SafeDivide: no_safe_divide_sql, 302 exp.Schema: _schema_sql, 303 exp.Select: transforms.preprocess( 304 [ 305 transforms.eliminate_qualify, 306 transforms.eliminate_distinct_on, 307 transforms.explode_to_unnest, 308 ] 309 ), 310 exp.SortArray: _no_sort_array, 311 exp.StrPosition: rename_func("STRPOS"), 312 exp.StrToDate: lambda self, e: f"CAST({_str_to_time_sql(self, e)} AS DATE)", 313 exp.StrToTime: _str_to_time_sql, 314 exp.StrToUnix: lambda self, e: f"TO_UNIXTIME(DATE_PARSE({self.sql(e, 'this')}, {self.format_time(e)}))", 315 exp.StructExtract: struct_extract_sql, 316 exp.Table: transforms.preprocess([_unnest_sequence]), 317 exp.TimestampTrunc: timestamptrunc_sql, 318 exp.TimeStrToDate: timestrtotime_sql, 319 exp.TimeStrToTime: timestrtotime_sql, 320 exp.TimeStrToUnix: lambda self, e: f"TO_UNIXTIME(DATE_PARSE({self.sql(e, 'this')}, {Presto.time_format}))", 321 exp.TimeToStr: lambda self, e: f"DATE_FORMAT({self.sql(e, 'this')}, {self.format_time(e)})", 322 exp.TimeToUnix: rename_func("TO_UNIXTIME"), 323 exp.TsOrDiToDi: lambda self, e: f"CAST(SUBSTR(REPLACE(CAST({self.sql(e, 'this')} AS VARCHAR), '-', ''), 1, 8) AS INT)", 324 exp.TsOrDsAdd: _ts_or_ds_add_sql, 325 exp.TsOrDsToDate: _ts_or_ds_to_date_sql, 326 exp.Unhex: rename_func("FROM_HEX"), 327 exp.UnixToStr: lambda self, e: f"DATE_FORMAT(FROM_UNIXTIME({self.sql(e, 'this')}), {self.format_time(e)})", 328 exp.UnixToTime: rename_func("FROM_UNIXTIME"), 329 exp.UnixToTimeStr: lambda self, e: f"CAST(FROM_UNIXTIME({self.sql(e, 'this')}) AS VARCHAR)", 330 exp.VariancePop: rename_func("VAR_POP"), 331 exp.With: transforms.preprocess([transforms.add_recursive_cte_column_names]), 332 exp.WithinGroup: transforms.preprocess( 333 [transforms.remove_within_group_for_percentiles] 334 ), 335 } 336 337 def interval_sql(self, expression: exp.Interval) -> str: 338 unit = self.sql(expression, "unit") 339 if expression.this and unit.lower().startswith("week"): 340 return f"({expression.this.name} * INTERVAL '7' day)" 341 return super().interval_sql(expression) 342 343 def transaction_sql(self, expression: exp.Transaction) -> str: 344 modes = expression.args.get("modes") 345 modes = f" {', '.join(modes)}" if modes else "" 346 return f"START TRANSACTION{modes}" 347 348 def generateseries_sql(self, expression: exp.GenerateSeries) -> str: 349 start = expression.args["start"] 350 end = expression.args["end"] 351 step = expression.args.get("step") 352 353 if isinstance(start, exp.Cast): 354 target_type = start.to 355 elif isinstance(end, exp.Cast): 356 target_type = end.to 357 else: 358 target_type = None 359 360 if target_type and target_type.is_type(exp.DataType.Type.TIMESTAMP): 361 to = target_type.copy() 362 363 if target_type is start.to: 364 end = exp.Cast(this=end, to=to) 365 else: 366 start = exp.Cast(this=start, to=to) 367 368 return self.func("SEQUENCE", start, end, step)
191 class Tokenizer(tokens.Tokenizer): 192 KEYWORDS = { 193 **tokens.Tokenizer.KEYWORDS, 194 "START": TokenType.BEGIN, 195 "MATCH_RECOGNIZE": TokenType.MATCH_RECOGNIZE, 196 "ROW": TokenType.STRUCT, 197 }
Inherited Members
199 class Parser(parser.Parser): 200 FUNCTIONS = { 201 **parser.Parser.FUNCTIONS, 202 "APPROX_DISTINCT": exp.ApproxDistinct.from_arg_list, 203 "APPROX_PERCENTILE": _approx_percentile, 204 "CARDINALITY": exp.ArraySize.from_arg_list, 205 "CONTAINS": exp.ArrayContains.from_arg_list, 206 "DATE_ADD": lambda args: exp.DateAdd( 207 this=seq_get(args, 2), 208 expression=seq_get(args, 1), 209 unit=seq_get(args, 0), 210 ), 211 "DATE_DIFF": lambda args: exp.DateDiff( 212 this=seq_get(args, 2), 213 expression=seq_get(args, 1), 214 unit=seq_get(args, 0), 215 ), 216 "DATE_FORMAT": format_time_lambda(exp.TimeToStr, "presto"), 217 "DATE_PARSE": format_time_lambda(exp.StrToTime, "presto"), 218 "DATE_TRUNC": date_trunc_to_time, 219 "FROM_HEX": exp.Unhex.from_arg_list, 220 "FROM_UNIXTIME": _from_unixtime, 221 "FROM_UTF8": lambda args: exp.Decode( 222 this=seq_get(args, 0), replace=seq_get(args, 1), charset=exp.Literal.string("utf-8") 223 ), 224 "NOW": exp.CurrentTimestamp.from_arg_list, 225 "SEQUENCE": exp.GenerateSeries.from_arg_list, 226 "STRPOS": lambda args: exp.StrPosition( 227 this=seq_get(args, 0), 228 substr=seq_get(args, 1), 229 instance=seq_get(args, 2), 230 ), 231 "TO_UNIXTIME": exp.TimeToUnix.from_arg_list, 232 "TO_HEX": exp.Hex.from_arg_list, 233 "TO_UTF8": lambda args: exp.Encode( 234 this=seq_get(args, 0), charset=exp.Literal.string("utf-8") 235 ), 236 } 237 FUNCTION_PARSERS = parser.Parser.FUNCTION_PARSERS.copy() 238 FUNCTION_PARSERS.pop("TRIM")
Parser consumes a list of tokens produced by the sqlglot.tokens.Tokenizer
and produces
a parsed syntax tree.
Arguments:
- error_level: the desired error level. Default: ErrorLevel.RAISE
- error_message_context: determines the amount of context to capture from a query string when displaying the error message (in number of characters). Default: 50.
- index_offset: Index offset for arrays eg ARRAY[0] vs ARRAY[1] as the head of a list. Default: 0
- alias_post_tablesample: If the table alias comes after tablesample. Default: False
- max_errors: Maximum number of error messages to include in a raised ParseError. This is only relevant if error_level is ErrorLevel.RAISE. Default: 3
- null_ordering: Indicates the default null ordering method to use if not explicitly set. Options are "nulls_are_small", "nulls_are_large", "nulls_are_last". Default: "nulls_are_small"
Inherited Members
240 class Generator(generator.Generator): 241 INTERVAL_ALLOWS_PLURAL_FORM = False 242 JOIN_HINTS = False 243 TABLE_HINTS = False 244 STRUCT_DELIMITER = ("(", ")") 245 246 PROPERTIES_LOCATION = { 247 **generator.Generator.PROPERTIES_LOCATION, 248 exp.LocationProperty: exp.Properties.Location.UNSUPPORTED, 249 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, 250 } 251 252 TYPE_MAPPING = { 253 **generator.Generator.TYPE_MAPPING, 254 exp.DataType.Type.INT: "INTEGER", 255 exp.DataType.Type.FLOAT: "REAL", 256 exp.DataType.Type.BINARY: "VARBINARY", 257 exp.DataType.Type.TEXT: "VARCHAR", 258 exp.DataType.Type.TIMESTAMPTZ: "TIMESTAMP", 259 exp.DataType.Type.STRUCT: "ROW", 260 } 261 262 TRANSFORMS = { 263 **generator.Generator.TRANSFORMS, 264 exp.ApproxDistinct: _approx_distinct_sql, 265 exp.ApproxQuantile: rename_func("APPROX_PERCENTILE"), 266 exp.Array: lambda self, e: f"ARRAY[{self.expressions(e, flat=True)}]", 267 exp.ArrayConcat: rename_func("CONCAT"), 268 exp.ArrayContains: rename_func("CONTAINS"), 269 exp.ArraySize: rename_func("CARDINALITY"), 270 exp.BitwiseAnd: lambda self, e: f"BITWISE_AND({self.sql(e, 'this')}, {self.sql(e, 'expression')})", 271 exp.BitwiseLeftShift: lambda self, e: f"BITWISE_ARITHMETIC_SHIFT_LEFT({self.sql(e, 'this')}, {self.sql(e, 'expression')})", 272 exp.BitwiseNot: lambda self, e: f"BITWISE_NOT({self.sql(e, 'this')})", 273 exp.BitwiseOr: lambda self, e: f"BITWISE_OR({self.sql(e, 'this')}, {self.sql(e, 'expression')})", 274 exp.BitwiseRightShift: lambda self, e: f"BITWISE_ARITHMETIC_SHIFT_RIGHT({self.sql(e, 'this')}, {self.sql(e, 'expression')})", 275 exp.BitwiseXor: lambda self, e: f"BITWISE_XOR({self.sql(e, 'this')}, {self.sql(e, 'expression')})", 276 exp.CurrentTimestamp: lambda *_: "CURRENT_TIMESTAMP", 277 exp.DataType: _datatype_sql, 278 exp.DateAdd: lambda self, e: self.func( 279 "DATE_ADD", exp.Literal.string(e.text("unit") or "day"), e.expression, e.this 280 ), 281 exp.DateDiff: lambda self, e: self.func( 282 "DATE_DIFF", exp.Literal.string(e.text("unit") or "day"), e.expression, e.this 283 ), 284 exp.DateStrToDate: lambda self, e: f"CAST(DATE_PARSE({self.sql(e, 'this')}, {Presto.date_format}) AS DATE)", 285 exp.DateToDi: lambda self, e: f"CAST(DATE_FORMAT({self.sql(e, 'this')}, {Presto.dateint_format}) AS INT)", 286 exp.Decode: _decode_sql, 287 exp.DiToDate: lambda self, e: f"CAST(DATE_PARSE(CAST({self.sql(e, 'this')} AS VARCHAR), {Presto.dateint_format}) AS DATE)", 288 exp.Encode: _encode_sql, 289 exp.FileFormatProperty: lambda self, e: f"FORMAT='{e.name.upper()}'", 290 exp.Group: transforms.preprocess([transforms.unalias_group]), 291 exp.Hex: rename_func("TO_HEX"), 292 exp.If: if_sql, 293 exp.ILike: no_ilike_sql, 294 exp.Initcap: _initcap_sql, 295 exp.Lateral: _explode_to_unnest_sql, 296 exp.Levenshtein: rename_func("LEVENSHTEIN_DISTANCE"), 297 exp.LogicalAnd: rename_func("BOOL_AND"), 298 exp.LogicalOr: rename_func("BOOL_OR"), 299 exp.Pivot: no_pivot_sql, 300 exp.Quantile: _quantile_sql, 301 exp.SafeDivide: no_safe_divide_sql, 302 exp.Schema: _schema_sql, 303 exp.Select: transforms.preprocess( 304 [ 305 transforms.eliminate_qualify, 306 transforms.eliminate_distinct_on, 307 transforms.explode_to_unnest, 308 ] 309 ), 310 exp.SortArray: _no_sort_array, 311 exp.StrPosition: rename_func("STRPOS"), 312 exp.StrToDate: lambda self, e: f"CAST({_str_to_time_sql(self, e)} AS DATE)", 313 exp.StrToTime: _str_to_time_sql, 314 exp.StrToUnix: lambda self, e: f"TO_UNIXTIME(DATE_PARSE({self.sql(e, 'this')}, {self.format_time(e)}))", 315 exp.StructExtract: struct_extract_sql, 316 exp.Table: transforms.preprocess([_unnest_sequence]), 317 exp.TimestampTrunc: timestamptrunc_sql, 318 exp.TimeStrToDate: timestrtotime_sql, 319 exp.TimeStrToTime: timestrtotime_sql, 320 exp.TimeStrToUnix: lambda self, e: f"TO_UNIXTIME(DATE_PARSE({self.sql(e, 'this')}, {Presto.time_format}))", 321 exp.TimeToStr: lambda self, e: f"DATE_FORMAT({self.sql(e, 'this')}, {self.format_time(e)})", 322 exp.TimeToUnix: rename_func("TO_UNIXTIME"), 323 exp.TsOrDiToDi: lambda self, e: f"CAST(SUBSTR(REPLACE(CAST({self.sql(e, 'this')} AS VARCHAR), '-', ''), 1, 8) AS INT)", 324 exp.TsOrDsAdd: _ts_or_ds_add_sql, 325 exp.TsOrDsToDate: _ts_or_ds_to_date_sql, 326 exp.Unhex: rename_func("FROM_HEX"), 327 exp.UnixToStr: lambda self, e: f"DATE_FORMAT(FROM_UNIXTIME({self.sql(e, 'this')}), {self.format_time(e)})", 328 exp.UnixToTime: rename_func("FROM_UNIXTIME"), 329 exp.UnixToTimeStr: lambda self, e: f"CAST(FROM_UNIXTIME({self.sql(e, 'this')}) AS VARCHAR)", 330 exp.VariancePop: rename_func("VAR_POP"), 331 exp.With: transforms.preprocess([transforms.add_recursive_cte_column_names]), 332 exp.WithinGroup: transforms.preprocess( 333 [transforms.remove_within_group_for_percentiles] 334 ), 335 } 336 337 def interval_sql(self, expression: exp.Interval) -> str: 338 unit = self.sql(expression, "unit") 339 if expression.this and unit.lower().startswith("week"): 340 return f"({expression.this.name} * INTERVAL '7' day)" 341 return super().interval_sql(expression) 342 343 def transaction_sql(self, expression: exp.Transaction) -> str: 344 modes = expression.args.get("modes") 345 modes = f" {', '.join(modes)}" if modes else "" 346 return f"START TRANSACTION{modes}" 347 348 def generateseries_sql(self, expression: exp.GenerateSeries) -> str: 349 start = expression.args["start"] 350 end = expression.args["end"] 351 step = expression.args.get("step") 352 353 if isinstance(start, exp.Cast): 354 target_type = start.to 355 elif isinstance(end, exp.Cast): 356 target_type = end.to 357 else: 358 target_type = None 359 360 if target_type and target_type.is_type(exp.DataType.Type.TIMESTAMP): 361 to = target_type.copy() 362 363 if target_type is start.to: 364 end = exp.Cast(this=end, to=to) 365 else: 366 start = exp.Cast(this=start, to=to) 367 368 return self.func("SEQUENCE", start, end, step)
Generator interprets the given syntax tree and produces a SQL string as an output.
Arguments:
- time_mapping (dict): the dictionary of custom time mappings in which the key represents a python time format and the output the target time format
- time_trie (trie): a trie of the time_mapping keys
- pretty (bool): if set to True the returned string will be formatted. Default: False.
- quote_start (str): specifies which starting character to use to delimit quotes. Default: '.
- quote_end (str): specifies which ending character to use to delimit quotes. Default: '.
- identifier_start (str): specifies which starting character to use to delimit identifiers. Default: ".
- identifier_end (str): specifies which ending character to use to delimit identifiers. Default: ".
- bit_start (str): specifies which starting character to use to delimit bit literals. Default: None.
- bit_end (str): specifies which ending character to use to delimit bit literals. Default: None.
- hex_start (str): specifies which starting character to use to delimit hex literals. Default: None.
- hex_end (str): specifies which ending character to use to delimit hex literals. Default: None.
- byte_start (str): specifies which starting character to use to delimit byte literals. Default: None.
- byte_end (str): specifies which ending character to use to delimit byte literals. Default: None.
- raw_start (str): specifies which starting character to use to delimit raw literals. Default: None.
- raw_end (str): specifies which ending character to use to delimit raw literals. Default: None.
- identify (bool | str): 'always': always quote, 'safe': quote identifiers if they don't contain an upcase, True defaults to always.
- normalize (bool): if set to True all identifiers will lower cased
- string_escape (str): specifies a string escape character. Default: '.
- identifier_escape (str): specifies an identifier escape character. Default: ".
- pad (int): determines padding in a formatted string. Default: 2.
- indent (int): determines the size of indentation in a formatted string. Default: 4.
- unnest_column_only (bool): if true unnest table aliases are considered only as column aliases
- normalize_functions (str): normalize function names, "upper", "lower", or None Default: "upper"
- alias_post_tablesample (bool): if the table alias comes after tablesample Default: False
- unsupported_level (ErrorLevel): determines the generator's behavior when it encounters unsupported expressions. Default ErrorLevel.WARN.
- null_ordering (str): Indicates the default null ordering method to use if not explicitly set. Options are "nulls_are_small", "nulls_are_large", "nulls_are_last". Default: "nulls_are_small"
- max_unsupported (int): Maximum number of unsupported messages to include in a raised UnsupportedError. This is only relevant if unsupported_level is ErrorLevel.RAISE. Default: 3
- leading_comma (bool): if the the comma is leading or trailing in select statements Default: False
- max_text_width: The max number of characters in a segment before creating new lines in pretty mode. The default is on the smaller end because the length only represents a segment and not the true line length. Default: 80
- comments: Whether or not to preserve comments in the output SQL code. Default: True
348 def generateseries_sql(self, expression: exp.GenerateSeries) -> str: 349 start = expression.args["start"] 350 end = expression.args["end"] 351 step = expression.args.get("step") 352 353 if isinstance(start, exp.Cast): 354 target_type = start.to 355 elif isinstance(end, exp.Cast): 356 target_type = end.to 357 else: 358 target_type = None 359 360 if target_type and target_type.is_type(exp.DataType.Type.TIMESTAMP): 361 to = target_type.copy() 362 363 if target_type is start.to: 364 end = exp.Cast(this=end, to=to) 365 else: 366 start = exp.Cast(this=start, to=to) 367 368 return self.func("SEQUENCE", start, end, step)
Inherited Members
- sqlglot.generator.Generator
- Generator
- generate
- unsupported
- sep
- seg
- pad_comment
- maybe_comment
- wrap
- no_identify
- normalize_func
- indent
- sql
- uncache_sql
- cache_sql
- characterset_sql
- column_sql
- columnposition_sql
- columndef_sql
- columnconstraint_sql
- autoincrementcolumnconstraint_sql
- compresscolumnconstraint_sql
- generatedasidentitycolumnconstraint_sql
- notnullcolumnconstraint_sql
- primarykeycolumnconstraint_sql
- uniquecolumnconstraint_sql
- create_sql
- clone_sql
- describe_sql
- prepend_ctes
- with_sql
- cte_sql
- tablealias_sql
- bitstring_sql
- hexstring_sql
- bytestring_sql
- rawstring_sql
- datatypesize_sql
- datatype_sql
- directory_sql
- delete_sql
- drop_sql
- except_sql
- except_op
- fetch_sql
- filter_sql
- hint_sql
- index_sql
- identifier_sql
- inputoutputformat_sql
- national_sql
- partition_sql
- properties_sql
- root_properties
- properties
- with_properties
- locate_properties
- property_sql
- likeproperty_sql
- fallbackproperty_sql
- journalproperty_sql
- freespaceproperty_sql
- checksumproperty_sql
- mergeblockratioproperty_sql
- datablocksizeproperty_sql
- blockcompressionproperty_sql
- isolatedloadingproperty_sql
- lockingproperty_sql
- withdataproperty_sql
- insert_sql
- intersect_sql
- intersect_op
- introducer_sql
- pseudotype_sql
- onconflict_sql
- returning_sql
- rowformatdelimitedproperty_sql
- table_sql
- tablesample_sql
- pivot_sql
- tuple_sql
- update_sql
- values_sql
- var_sql
- into_sql
- from_sql
- group_sql
- having_sql
- join_sql
- lambda_sql
- lateral_sql
- limit_sql
- offset_sql
- setitem_sql
- set_sql
- pragma_sql
- lock_sql
- literal_sql
- loaddata_sql
- null_sql
- boolean_sql
- order_sql
- cluster_sql
- distribute_sql
- sort_sql
- ordered_sql
- matchrecognize_sql
- query_modifiers
- after_having_modifiers
- after_limit_modifiers
- select_sql
- schema_sql
- star_sql
- parameter_sql
- sessionparameter_sql
- placeholder_sql
- subquery_sql
- qualify_sql
- union_sql
- union_op
- unnest_sql
- where_sql
- window_sql
- partition_by_sql
- windowspec_sql
- withingroup_sql
- between_sql
- bracket_sql
- all_sql
- any_sql
- exists_sql
- case_sql
- constraint_sql
- nextvaluefor_sql
- extract_sql
- trim_sql
- concat_sql
- check_sql
- foreignkey_sql
- primarykey_sql
- unique_sql
- if_sql
- matchagainst_sql
- jsonkeyvalue_sql
- jsonobject_sql
- openjsoncolumndef_sql
- openjson_sql
- in_sql
- in_unnest_op
- return_sql
- reference_sql
- anonymous_sql
- paren_sql
- neg_sql
- not_sql
- alias_sql
- aliases_sql
- attimezone_sql
- add_sql
- and_sql
- connector_sql
- bitwiseand_sql
- bitwiseleftshift_sql
- bitwisenot_sql
- bitwiseor_sql
- bitwiserightshift_sql
- bitwisexor_sql
- cast_sql
- currentdate_sql
- collate_sql
- command_sql
- comment_sql
- mergetreettlaction_sql
- mergetreettl_sql
- commit_sql
- rollback_sql
- altercolumn_sql
- renametable_sql
- altertable_sql
- droppartition_sql
- addconstraint_sql
- distinct_sql
- ignorenulls_sql
- respectnulls_sql
- intdiv_sql
- dpipe_sql
- div_sql
- overlaps_sql
- distance_sql
- dot_sql
- eq_sql
- escape_sql
- glob_sql
- gt_sql
- gte_sql
- ilike_sql
- ilikeany_sql
- is_sql
- like_sql
- likeany_sql
- similarto_sql
- lt_sql
- lte_sql
- mod_sql
- mul_sql
- neq_sql
- nullsafeeq_sql
- nullsafeneq_sql
- or_sql
- slice_sql
- sub_sql
- trycast_sql
- use_sql
- binary
- function_fallback_sql
- func
- format_args
- text_width
- format_time
- expressions
- op_expressions
- naked_property
- set_operation
- tag_sql
- token_sql
- userdefinedfunction_sql
- joinhint_sql
- kwarg_sql
- when_sql
- merge_sql
- tochar_sql