sqlglot.dialects.hive
1from __future__ import annotations 2 3import typing as t 4 5from sqlglot import exp, generator, parser, tokens, transforms 6from sqlglot.dialects.dialect import ( 7 Dialect, 8 approx_count_distinct_sql, 9 create_with_partitions_sql, 10 format_time_lambda, 11 if_sql, 12 locate_to_strposition, 13 max_or_greatest, 14 min_or_least, 15 no_ilike_sql, 16 no_recursive_cte_sql, 17 no_safe_divide_sql, 18 no_trycast_sql, 19 rename_func, 20 strposition_to_locate_sql, 21 struct_extract_sql, 22 timestrtotime_sql, 23 var_map_sql, 24) 25from sqlglot.helper import seq_get 26from sqlglot.parser import parse_var_map 27from sqlglot.tokens import TokenType 28 29# (FuncType, Multiplier) 30DATE_DELTA_INTERVAL = { 31 "YEAR": ("ADD_MONTHS", 12), 32 "MONTH": ("ADD_MONTHS", 1), 33 "QUARTER": ("ADD_MONTHS", 3), 34 "WEEK": ("DATE_ADD", 7), 35 "DAY": ("DATE_ADD", 1), 36} 37 38TIME_DIFF_FACTOR = { 39 "MILLISECOND": " * 1000", 40 "SECOND": "", 41 "MINUTE": " / 60", 42 "HOUR": " / 3600", 43} 44 45DIFF_MONTH_SWITCH = ("YEAR", "QUARTER", "MONTH") 46 47 48def _add_date_sql(self: generator.Generator, expression: exp.DateAdd) -> str: 49 unit = expression.text("unit").upper() 50 func, multiplier = DATE_DELTA_INTERVAL.get(unit, ("DATE_ADD", 1)) 51 modified_increment = ( 52 int(expression.text("expression")) * multiplier 53 if expression.expression.is_number 54 else expression.expression 55 ) 56 modified_increment = exp.Literal.number(modified_increment) 57 return self.func(func, expression.this, modified_increment.this) 58 59 60def _date_diff_sql(self: generator.Generator, expression: exp.DateDiff) -> str: 61 unit = expression.text("unit").upper() 62 63 factor = TIME_DIFF_FACTOR.get(unit) 64 if factor is not None: 65 left = self.sql(expression, "this") 66 right = self.sql(expression, "expression") 67 sec_diff = f"UNIX_TIMESTAMP({left}) - UNIX_TIMESTAMP({right})" 68 return f"({sec_diff}){factor}" if factor else sec_diff 69 70 sql_func = "MONTHS_BETWEEN" if unit in DIFF_MONTH_SWITCH else "DATEDIFF" 71 _, multiplier = DATE_DELTA_INTERVAL.get(unit, ("", 1)) 72 multiplier_sql = f" / {multiplier}" if multiplier > 1 else "" 73 diff_sql = f"{sql_func}({self.format_args(expression.this, expression.expression)})" 74 return f"{diff_sql}{multiplier_sql}" 75 76 77def _array_sort(self: generator.Generator, expression: exp.ArraySort) -> str: 78 if expression.expression: 79 self.unsupported("Hive SORT_ARRAY does not support a comparator") 80 return f"SORT_ARRAY({self.sql(expression, 'this')})" 81 82 83def _property_sql(self: generator.Generator, expression: exp.Property) -> str: 84 return f"'{expression.name}'={self.sql(expression, 'value')}" 85 86 87def _str_to_unix(self: generator.Generator, expression: exp.StrToUnix) -> str: 88 return self.func("UNIX_TIMESTAMP", expression.this, _time_format(self, expression)) 89 90 91def _str_to_date(self: generator.Generator, expression: exp.StrToDate) -> str: 92 this = self.sql(expression, "this") 93 time_format = self.format_time(expression) 94 if time_format not in (Hive.time_format, Hive.date_format): 95 this = f"FROM_UNIXTIME(UNIX_TIMESTAMP({this}, {time_format}))" 96 return f"CAST({this} AS DATE)" 97 98 99def _str_to_time(self: generator.Generator, expression: exp.StrToTime) -> str: 100 this = self.sql(expression, "this") 101 time_format = self.format_time(expression) 102 if time_format not in (Hive.time_format, Hive.date_format): 103 this = f"FROM_UNIXTIME(UNIX_TIMESTAMP({this}, {time_format}))" 104 return f"CAST({this} AS TIMESTAMP)" 105 106 107def _time_format( 108 self: generator.Generator, expression: exp.UnixToStr | exp.StrToUnix 109) -> t.Optional[str]: 110 time_format = self.format_time(expression) 111 if time_format == Hive.time_format: 112 return None 113 return time_format 114 115 116def _time_to_str(self: generator.Generator, expression: exp.TimeToStr) -> str: 117 this = self.sql(expression, "this") 118 time_format = self.format_time(expression) 119 return f"DATE_FORMAT({this}, {time_format})" 120 121 122def _to_date_sql(self: generator.Generator, expression: exp.TsOrDsToDate) -> str: 123 this = self.sql(expression, "this") 124 time_format = self.format_time(expression) 125 if time_format and time_format not in (Hive.time_format, Hive.date_format): 126 return f"TO_DATE({this}, {time_format})" 127 return f"TO_DATE({this})" 128 129 130def _unnest_to_explode_sql(self: generator.Generator, expression: exp.Join) -> str: 131 unnest = expression.this 132 if isinstance(unnest, exp.Unnest): 133 alias = unnest.args.get("alias") 134 udtf = exp.Posexplode if unnest.args.get("ordinality") else exp.Explode 135 return "".join( 136 self.sql( 137 exp.Lateral( 138 this=udtf(this=expression), 139 view=True, 140 alias=exp.TableAlias(this=alias.this, columns=[column]), # type: ignore 141 ) 142 ) 143 for expression, column in zip(unnest.expressions, alias.columns if alias else []) 144 ) 145 return self.join_sql(expression) 146 147 148def _index_sql(self: generator.Generator, expression: exp.Index) -> str: 149 this = self.sql(expression, "this") 150 table = self.sql(expression, "table") 151 columns = self.sql(expression, "columns") 152 return f"{this} ON TABLE {table} {columns}" 153 154 155class Hive(Dialect): 156 alias_post_tablesample = True 157 158 time_mapping = { 159 "y": "%Y", 160 "Y": "%Y", 161 "YYYY": "%Y", 162 "yyyy": "%Y", 163 "YY": "%y", 164 "yy": "%y", 165 "MMMM": "%B", 166 "MMM": "%b", 167 "MM": "%m", 168 "M": "%-m", 169 "dd": "%d", 170 "d": "%-d", 171 "HH": "%H", 172 "H": "%-H", 173 "hh": "%I", 174 "h": "%-I", 175 "mm": "%M", 176 "m": "%-M", 177 "ss": "%S", 178 "s": "%-S", 179 "SSSSSS": "%f", 180 "a": "%p", 181 "DD": "%j", 182 "D": "%-j", 183 "E": "%a", 184 "EE": "%a", 185 "EEE": "%a", 186 "EEEE": "%A", 187 } 188 189 date_format = "'yyyy-MM-dd'" 190 dateint_format = "'yyyyMMdd'" 191 time_format = "'yyyy-MM-dd HH:mm:ss'" 192 193 class Tokenizer(tokens.Tokenizer): 194 QUOTES = ["'", '"'] 195 IDENTIFIERS = ["`"] 196 STRING_ESCAPES = ["\\"] 197 ENCODE = "utf-8" 198 199 KEYWORDS = { 200 **tokens.Tokenizer.KEYWORDS, 201 "ADD ARCHIVE": TokenType.COMMAND, 202 "ADD ARCHIVES": TokenType.COMMAND, 203 "ADD FILE": TokenType.COMMAND, 204 "ADD FILES": TokenType.COMMAND, 205 "ADD JAR": TokenType.COMMAND, 206 "ADD JARS": TokenType.COMMAND, 207 "MSCK REPAIR": TokenType.COMMAND, 208 "WITH SERDEPROPERTIES": TokenType.SERDE_PROPERTIES, 209 } 210 211 NUMERIC_LITERALS = { 212 "L": "BIGINT", 213 "S": "SMALLINT", 214 "Y": "TINYINT", 215 "D": "DOUBLE", 216 "F": "FLOAT", 217 "BD": "DECIMAL", 218 } 219 220 IDENTIFIER_CAN_START_WITH_DIGIT = True 221 222 class Parser(parser.Parser): 223 STRICT_CAST = False 224 225 FUNCTIONS = { 226 **parser.Parser.FUNCTIONS, # type: ignore 227 "APPROX_COUNT_DISTINCT": exp.ApproxDistinct.from_arg_list, 228 "COLLECT_LIST": exp.ArrayAgg.from_arg_list, 229 "DATE_ADD": lambda args: exp.TsOrDsAdd( 230 this=seq_get(args, 0), 231 expression=seq_get(args, 1), 232 unit=exp.Literal.string("DAY"), 233 ), 234 "DATEDIFF": lambda args: exp.DateDiff( 235 this=exp.TsOrDsToDate(this=seq_get(args, 0)), 236 expression=exp.TsOrDsToDate(this=seq_get(args, 1)), 237 ), 238 "DATE_SUB": lambda args: exp.TsOrDsAdd( 239 this=seq_get(args, 0), 240 expression=exp.Mul( 241 this=seq_get(args, 1), 242 expression=exp.Literal.number(-1), 243 ), 244 unit=exp.Literal.string("DAY"), 245 ), 246 "DATE_FORMAT": lambda args: format_time_lambda(exp.TimeToStr, "hive")( 247 [ 248 exp.TimeStrToTime(this=seq_get(args, 0)), 249 seq_get(args, 1), 250 ] 251 ), 252 "DAY": lambda args: exp.Day(this=exp.TsOrDsToDate(this=seq_get(args, 0))), 253 "FROM_UNIXTIME": format_time_lambda(exp.UnixToStr, "hive", True), 254 "GET_JSON_OBJECT": exp.JSONExtractScalar.from_arg_list, 255 "LOCATE": locate_to_strposition, 256 "LOG": ( 257 lambda args: exp.Log.from_arg_list(args) 258 if len(args) > 1 259 else exp.Ln.from_arg_list(args) 260 ), 261 "MAP": parse_var_map, 262 "MONTH": lambda args: exp.Month(this=exp.TsOrDsToDate.from_arg_list(args)), 263 "PERCENTILE": exp.Quantile.from_arg_list, 264 "PERCENTILE_APPROX": exp.ApproxQuantile.from_arg_list, 265 "COLLECT_SET": exp.SetAgg.from_arg_list, 266 "SIZE": exp.ArraySize.from_arg_list, 267 "SPLIT": exp.RegexpSplit.from_arg_list, 268 "TO_DATE": format_time_lambda(exp.TsOrDsToDate, "hive"), 269 "UNIX_TIMESTAMP": format_time_lambda(exp.StrToUnix, "hive", True), 270 "YEAR": lambda args: exp.Year(this=exp.TsOrDsToDate.from_arg_list(args)), 271 } 272 273 PROPERTY_PARSERS = { 274 **parser.Parser.PROPERTY_PARSERS, # type: ignore 275 "WITH SERDEPROPERTIES": lambda self: exp.SerdeProperties( 276 expressions=self._parse_wrapped_csv(self._parse_property) 277 ), 278 } 279 280 class Generator(generator.Generator): 281 TYPE_MAPPING = { 282 **generator.Generator.TYPE_MAPPING, # type: ignore 283 exp.DataType.Type.TEXT: "STRING", 284 exp.DataType.Type.DATETIME: "TIMESTAMP", 285 exp.DataType.Type.VARBINARY: "BINARY", 286 exp.DataType.Type.TIMESTAMPTZ: "TIMESTAMP", 287 } 288 289 TRANSFORMS = { 290 **generator.Generator.TRANSFORMS, # type: ignore 291 **transforms.UNALIAS_GROUP, # type: ignore 292 **transforms.ELIMINATE_QUALIFY, # type: ignore 293 exp.Property: _property_sql, 294 exp.ApproxDistinct: approx_count_distinct_sql, 295 exp.ArrayConcat: rename_func("CONCAT"), 296 exp.ArraySize: rename_func("SIZE"), 297 exp.ArraySort: _array_sort, 298 exp.With: no_recursive_cte_sql, 299 exp.DateAdd: _add_date_sql, 300 exp.DateDiff: _date_diff_sql, 301 exp.DateStrToDate: rename_func("TO_DATE"), 302 exp.DateToDi: lambda self, e: f"CAST(DATE_FORMAT({self.sql(e, 'this')}, {Hive.dateint_format}) AS INT)", 303 exp.DiToDate: lambda self, e: f"TO_DATE(CAST({self.sql(e, 'this')} AS STRING), {Hive.dateint_format})", 304 exp.FileFormatProperty: lambda self, e: f"STORED AS {e.name.upper()}", 305 exp.If: if_sql, 306 exp.Index: _index_sql, 307 exp.ILike: no_ilike_sql, 308 exp.Join: _unnest_to_explode_sql, 309 exp.JSONExtract: rename_func("GET_JSON_OBJECT"), 310 exp.JSONExtractScalar: rename_func("GET_JSON_OBJECT"), 311 exp.Map: var_map_sql, 312 exp.Max: max_or_greatest, 313 exp.Min: min_or_least, 314 exp.VarMap: var_map_sql, 315 exp.Create: create_with_partitions_sql, 316 exp.Quantile: rename_func("PERCENTILE"), 317 exp.ApproxQuantile: rename_func("PERCENTILE_APPROX"), 318 exp.RegexpLike: lambda self, e: self.binary(e, "RLIKE"), 319 exp.RegexpSplit: rename_func("SPLIT"), 320 exp.SafeDivide: no_safe_divide_sql, 321 exp.SchemaCommentProperty: lambda self, e: self.naked_property(e), 322 exp.SetAgg: rename_func("COLLECT_SET"), 323 exp.Split: lambda self, e: f"SPLIT({self.sql(e, 'this')}, CONCAT('\\\\Q', {self.sql(e, 'expression')}))", 324 exp.StrPosition: strposition_to_locate_sql, 325 exp.StrToDate: _str_to_date, 326 exp.StrToTime: _str_to_time, 327 exp.StrToUnix: _str_to_unix, 328 exp.StructExtract: struct_extract_sql, 329 exp.TableFormatProperty: lambda self, e: f"USING {self.sql(e, 'this')}", 330 exp.TimeStrToDate: rename_func("TO_DATE"), 331 exp.TimeStrToTime: timestrtotime_sql, 332 exp.TimeStrToUnix: rename_func("UNIX_TIMESTAMP"), 333 exp.TimeToStr: _time_to_str, 334 exp.TimeToUnix: rename_func("UNIX_TIMESTAMP"), 335 exp.TsOrDiToDi: lambda self, e: f"CAST(SUBSTR(REPLACE(CAST({self.sql(e, 'this')} AS STRING), '-', ''), 1, 8) AS INT)", 336 exp.TsOrDsAdd: lambda self, e: f"DATE_ADD({self.sql(e, 'this')}, {self.sql(e, 'expression')})", 337 exp.TsOrDsToDate: _to_date_sql, 338 exp.TryCast: no_trycast_sql, 339 exp.UnixToStr: lambda self, e: self.func( 340 "FROM_UNIXTIME", e.this, _time_format(self, e) 341 ), 342 exp.UnixToTime: rename_func("FROM_UNIXTIME"), 343 exp.UnixToTimeStr: rename_func("FROM_UNIXTIME"), 344 exp.PartitionedByProperty: lambda self, e: f"PARTITIONED BY {self.sql(e, 'this')}", 345 exp.RowFormatSerdeProperty: lambda self, e: f"ROW FORMAT SERDE {self.sql(e, 'this')}", 346 exp.SerdeProperties: lambda self, e: self.properties(e, prefix="WITH SERDEPROPERTIES"), 347 exp.NumberToStr: rename_func("FORMAT_NUMBER"), 348 exp.LastDateOfMonth: rename_func("LAST_DAY"), 349 } 350 351 PROPERTIES_LOCATION = { 352 **generator.Generator.PROPERTIES_LOCATION, # type: ignore 353 exp.FileFormatProperty: exp.Properties.Location.POST_SCHEMA, 354 exp.PartitionedByProperty: exp.Properties.Location.POST_SCHEMA, 355 exp.TableFormatProperty: exp.Properties.Location.POST_SCHEMA, 356 } 357 358 LIMIT_FETCH = "LIMIT" 359 360 def arrayagg_sql(self, expression: exp.ArrayAgg) -> str: 361 return self.func( 362 "COLLECT_LIST", 363 expression.this.this if isinstance(expression.this, exp.Order) else expression.this, 364 ) 365 366 def with_properties(self, properties: exp.Properties) -> str: 367 return self.properties( 368 properties, 369 prefix=self.seg("TBLPROPERTIES"), 370 ) 371 372 def datatype_sql(self, expression: exp.DataType) -> str: 373 if ( 374 expression.this in (exp.DataType.Type.VARCHAR, exp.DataType.Type.NVARCHAR) 375 and not expression.expressions 376 ): 377 expression = exp.DataType.build("text") 378 elif expression.this in exp.DataType.TEMPORAL_TYPES: 379 expression = exp.DataType.build(expression.this) 380 return super().datatype_sql(expression)
156class Hive(Dialect): 157 alias_post_tablesample = True 158 159 time_mapping = { 160 "y": "%Y", 161 "Y": "%Y", 162 "YYYY": "%Y", 163 "yyyy": "%Y", 164 "YY": "%y", 165 "yy": "%y", 166 "MMMM": "%B", 167 "MMM": "%b", 168 "MM": "%m", 169 "M": "%-m", 170 "dd": "%d", 171 "d": "%-d", 172 "HH": "%H", 173 "H": "%-H", 174 "hh": "%I", 175 "h": "%-I", 176 "mm": "%M", 177 "m": "%-M", 178 "ss": "%S", 179 "s": "%-S", 180 "SSSSSS": "%f", 181 "a": "%p", 182 "DD": "%j", 183 "D": "%-j", 184 "E": "%a", 185 "EE": "%a", 186 "EEE": "%a", 187 "EEEE": "%A", 188 } 189 190 date_format = "'yyyy-MM-dd'" 191 dateint_format = "'yyyyMMdd'" 192 time_format = "'yyyy-MM-dd HH:mm:ss'" 193 194 class Tokenizer(tokens.Tokenizer): 195 QUOTES = ["'", '"'] 196 IDENTIFIERS = ["`"] 197 STRING_ESCAPES = ["\\"] 198 ENCODE = "utf-8" 199 200 KEYWORDS = { 201 **tokens.Tokenizer.KEYWORDS, 202 "ADD ARCHIVE": TokenType.COMMAND, 203 "ADD ARCHIVES": TokenType.COMMAND, 204 "ADD FILE": TokenType.COMMAND, 205 "ADD FILES": TokenType.COMMAND, 206 "ADD JAR": TokenType.COMMAND, 207 "ADD JARS": TokenType.COMMAND, 208 "MSCK REPAIR": TokenType.COMMAND, 209 "WITH SERDEPROPERTIES": TokenType.SERDE_PROPERTIES, 210 } 211 212 NUMERIC_LITERALS = { 213 "L": "BIGINT", 214 "S": "SMALLINT", 215 "Y": "TINYINT", 216 "D": "DOUBLE", 217 "F": "FLOAT", 218 "BD": "DECIMAL", 219 } 220 221 IDENTIFIER_CAN_START_WITH_DIGIT = True 222 223 class Parser(parser.Parser): 224 STRICT_CAST = False 225 226 FUNCTIONS = { 227 **parser.Parser.FUNCTIONS, # type: ignore 228 "APPROX_COUNT_DISTINCT": exp.ApproxDistinct.from_arg_list, 229 "COLLECT_LIST": exp.ArrayAgg.from_arg_list, 230 "DATE_ADD": lambda args: exp.TsOrDsAdd( 231 this=seq_get(args, 0), 232 expression=seq_get(args, 1), 233 unit=exp.Literal.string("DAY"), 234 ), 235 "DATEDIFF": lambda args: exp.DateDiff( 236 this=exp.TsOrDsToDate(this=seq_get(args, 0)), 237 expression=exp.TsOrDsToDate(this=seq_get(args, 1)), 238 ), 239 "DATE_SUB": lambda args: exp.TsOrDsAdd( 240 this=seq_get(args, 0), 241 expression=exp.Mul( 242 this=seq_get(args, 1), 243 expression=exp.Literal.number(-1), 244 ), 245 unit=exp.Literal.string("DAY"), 246 ), 247 "DATE_FORMAT": lambda args: format_time_lambda(exp.TimeToStr, "hive")( 248 [ 249 exp.TimeStrToTime(this=seq_get(args, 0)), 250 seq_get(args, 1), 251 ] 252 ), 253 "DAY": lambda args: exp.Day(this=exp.TsOrDsToDate(this=seq_get(args, 0))), 254 "FROM_UNIXTIME": format_time_lambda(exp.UnixToStr, "hive", True), 255 "GET_JSON_OBJECT": exp.JSONExtractScalar.from_arg_list, 256 "LOCATE": locate_to_strposition, 257 "LOG": ( 258 lambda args: exp.Log.from_arg_list(args) 259 if len(args) > 1 260 else exp.Ln.from_arg_list(args) 261 ), 262 "MAP": parse_var_map, 263 "MONTH": lambda args: exp.Month(this=exp.TsOrDsToDate.from_arg_list(args)), 264 "PERCENTILE": exp.Quantile.from_arg_list, 265 "PERCENTILE_APPROX": exp.ApproxQuantile.from_arg_list, 266 "COLLECT_SET": exp.SetAgg.from_arg_list, 267 "SIZE": exp.ArraySize.from_arg_list, 268 "SPLIT": exp.RegexpSplit.from_arg_list, 269 "TO_DATE": format_time_lambda(exp.TsOrDsToDate, "hive"), 270 "UNIX_TIMESTAMP": format_time_lambda(exp.StrToUnix, "hive", True), 271 "YEAR": lambda args: exp.Year(this=exp.TsOrDsToDate.from_arg_list(args)), 272 } 273 274 PROPERTY_PARSERS = { 275 **parser.Parser.PROPERTY_PARSERS, # type: ignore 276 "WITH SERDEPROPERTIES": lambda self: exp.SerdeProperties( 277 expressions=self._parse_wrapped_csv(self._parse_property) 278 ), 279 } 280 281 class Generator(generator.Generator): 282 TYPE_MAPPING = { 283 **generator.Generator.TYPE_MAPPING, # type: ignore 284 exp.DataType.Type.TEXT: "STRING", 285 exp.DataType.Type.DATETIME: "TIMESTAMP", 286 exp.DataType.Type.VARBINARY: "BINARY", 287 exp.DataType.Type.TIMESTAMPTZ: "TIMESTAMP", 288 } 289 290 TRANSFORMS = { 291 **generator.Generator.TRANSFORMS, # type: ignore 292 **transforms.UNALIAS_GROUP, # type: ignore 293 **transforms.ELIMINATE_QUALIFY, # type: ignore 294 exp.Property: _property_sql, 295 exp.ApproxDistinct: approx_count_distinct_sql, 296 exp.ArrayConcat: rename_func("CONCAT"), 297 exp.ArraySize: rename_func("SIZE"), 298 exp.ArraySort: _array_sort, 299 exp.With: no_recursive_cte_sql, 300 exp.DateAdd: _add_date_sql, 301 exp.DateDiff: _date_diff_sql, 302 exp.DateStrToDate: rename_func("TO_DATE"), 303 exp.DateToDi: lambda self, e: f"CAST(DATE_FORMAT({self.sql(e, 'this')}, {Hive.dateint_format}) AS INT)", 304 exp.DiToDate: lambda self, e: f"TO_DATE(CAST({self.sql(e, 'this')} AS STRING), {Hive.dateint_format})", 305 exp.FileFormatProperty: lambda self, e: f"STORED AS {e.name.upper()}", 306 exp.If: if_sql, 307 exp.Index: _index_sql, 308 exp.ILike: no_ilike_sql, 309 exp.Join: _unnest_to_explode_sql, 310 exp.JSONExtract: rename_func("GET_JSON_OBJECT"), 311 exp.JSONExtractScalar: rename_func("GET_JSON_OBJECT"), 312 exp.Map: var_map_sql, 313 exp.Max: max_or_greatest, 314 exp.Min: min_or_least, 315 exp.VarMap: var_map_sql, 316 exp.Create: create_with_partitions_sql, 317 exp.Quantile: rename_func("PERCENTILE"), 318 exp.ApproxQuantile: rename_func("PERCENTILE_APPROX"), 319 exp.RegexpLike: lambda self, e: self.binary(e, "RLIKE"), 320 exp.RegexpSplit: rename_func("SPLIT"), 321 exp.SafeDivide: no_safe_divide_sql, 322 exp.SchemaCommentProperty: lambda self, e: self.naked_property(e), 323 exp.SetAgg: rename_func("COLLECT_SET"), 324 exp.Split: lambda self, e: f"SPLIT({self.sql(e, 'this')}, CONCAT('\\\\Q', {self.sql(e, 'expression')}))", 325 exp.StrPosition: strposition_to_locate_sql, 326 exp.StrToDate: _str_to_date, 327 exp.StrToTime: _str_to_time, 328 exp.StrToUnix: _str_to_unix, 329 exp.StructExtract: struct_extract_sql, 330 exp.TableFormatProperty: lambda self, e: f"USING {self.sql(e, 'this')}", 331 exp.TimeStrToDate: rename_func("TO_DATE"), 332 exp.TimeStrToTime: timestrtotime_sql, 333 exp.TimeStrToUnix: rename_func("UNIX_TIMESTAMP"), 334 exp.TimeToStr: _time_to_str, 335 exp.TimeToUnix: rename_func("UNIX_TIMESTAMP"), 336 exp.TsOrDiToDi: lambda self, e: f"CAST(SUBSTR(REPLACE(CAST({self.sql(e, 'this')} AS STRING), '-', ''), 1, 8) AS INT)", 337 exp.TsOrDsAdd: lambda self, e: f"DATE_ADD({self.sql(e, 'this')}, {self.sql(e, 'expression')})", 338 exp.TsOrDsToDate: _to_date_sql, 339 exp.TryCast: no_trycast_sql, 340 exp.UnixToStr: lambda self, e: self.func( 341 "FROM_UNIXTIME", e.this, _time_format(self, e) 342 ), 343 exp.UnixToTime: rename_func("FROM_UNIXTIME"), 344 exp.UnixToTimeStr: rename_func("FROM_UNIXTIME"), 345 exp.PartitionedByProperty: lambda self, e: f"PARTITIONED BY {self.sql(e, 'this')}", 346 exp.RowFormatSerdeProperty: lambda self, e: f"ROW FORMAT SERDE {self.sql(e, 'this')}", 347 exp.SerdeProperties: lambda self, e: self.properties(e, prefix="WITH SERDEPROPERTIES"), 348 exp.NumberToStr: rename_func("FORMAT_NUMBER"), 349 exp.LastDateOfMonth: rename_func("LAST_DAY"), 350 } 351 352 PROPERTIES_LOCATION = { 353 **generator.Generator.PROPERTIES_LOCATION, # type: ignore 354 exp.FileFormatProperty: exp.Properties.Location.POST_SCHEMA, 355 exp.PartitionedByProperty: exp.Properties.Location.POST_SCHEMA, 356 exp.TableFormatProperty: exp.Properties.Location.POST_SCHEMA, 357 } 358 359 LIMIT_FETCH = "LIMIT" 360 361 def arrayagg_sql(self, expression: exp.ArrayAgg) -> str: 362 return self.func( 363 "COLLECT_LIST", 364 expression.this.this if isinstance(expression.this, exp.Order) else expression.this, 365 ) 366 367 def with_properties(self, properties: exp.Properties) -> str: 368 return self.properties( 369 properties, 370 prefix=self.seg("TBLPROPERTIES"), 371 ) 372 373 def datatype_sql(self, expression: exp.DataType) -> str: 374 if ( 375 expression.this in (exp.DataType.Type.VARCHAR, exp.DataType.Type.NVARCHAR) 376 and not expression.expressions 377 ): 378 expression = exp.DataType.build("text") 379 elif expression.this in exp.DataType.TEMPORAL_TYPES: 380 expression = exp.DataType.build(expression.this) 381 return super().datatype_sql(expression)
194 class Tokenizer(tokens.Tokenizer): 195 QUOTES = ["'", '"'] 196 IDENTIFIERS = ["`"] 197 STRING_ESCAPES = ["\\"] 198 ENCODE = "utf-8" 199 200 KEYWORDS = { 201 **tokens.Tokenizer.KEYWORDS, 202 "ADD ARCHIVE": TokenType.COMMAND, 203 "ADD ARCHIVES": TokenType.COMMAND, 204 "ADD FILE": TokenType.COMMAND, 205 "ADD FILES": TokenType.COMMAND, 206 "ADD JAR": TokenType.COMMAND, 207 "ADD JARS": TokenType.COMMAND, 208 "MSCK REPAIR": TokenType.COMMAND, 209 "WITH SERDEPROPERTIES": TokenType.SERDE_PROPERTIES, 210 } 211 212 NUMERIC_LITERALS = { 213 "L": "BIGINT", 214 "S": "SMALLINT", 215 "Y": "TINYINT", 216 "D": "DOUBLE", 217 "F": "FLOAT", 218 "BD": "DECIMAL", 219 } 220 221 IDENTIFIER_CAN_START_WITH_DIGIT = True
Inherited Members
223 class Parser(parser.Parser): 224 STRICT_CAST = False 225 226 FUNCTIONS = { 227 **parser.Parser.FUNCTIONS, # type: ignore 228 "APPROX_COUNT_DISTINCT": exp.ApproxDistinct.from_arg_list, 229 "COLLECT_LIST": exp.ArrayAgg.from_arg_list, 230 "DATE_ADD": lambda args: exp.TsOrDsAdd( 231 this=seq_get(args, 0), 232 expression=seq_get(args, 1), 233 unit=exp.Literal.string("DAY"), 234 ), 235 "DATEDIFF": lambda args: exp.DateDiff( 236 this=exp.TsOrDsToDate(this=seq_get(args, 0)), 237 expression=exp.TsOrDsToDate(this=seq_get(args, 1)), 238 ), 239 "DATE_SUB": lambda args: exp.TsOrDsAdd( 240 this=seq_get(args, 0), 241 expression=exp.Mul( 242 this=seq_get(args, 1), 243 expression=exp.Literal.number(-1), 244 ), 245 unit=exp.Literal.string("DAY"), 246 ), 247 "DATE_FORMAT": lambda args: format_time_lambda(exp.TimeToStr, "hive")( 248 [ 249 exp.TimeStrToTime(this=seq_get(args, 0)), 250 seq_get(args, 1), 251 ] 252 ), 253 "DAY": lambda args: exp.Day(this=exp.TsOrDsToDate(this=seq_get(args, 0))), 254 "FROM_UNIXTIME": format_time_lambda(exp.UnixToStr, "hive", True), 255 "GET_JSON_OBJECT": exp.JSONExtractScalar.from_arg_list, 256 "LOCATE": locate_to_strposition, 257 "LOG": ( 258 lambda args: exp.Log.from_arg_list(args) 259 if len(args) > 1 260 else exp.Ln.from_arg_list(args) 261 ), 262 "MAP": parse_var_map, 263 "MONTH": lambda args: exp.Month(this=exp.TsOrDsToDate.from_arg_list(args)), 264 "PERCENTILE": exp.Quantile.from_arg_list, 265 "PERCENTILE_APPROX": exp.ApproxQuantile.from_arg_list, 266 "COLLECT_SET": exp.SetAgg.from_arg_list, 267 "SIZE": exp.ArraySize.from_arg_list, 268 "SPLIT": exp.RegexpSplit.from_arg_list, 269 "TO_DATE": format_time_lambda(exp.TsOrDsToDate, "hive"), 270 "UNIX_TIMESTAMP": format_time_lambda(exp.StrToUnix, "hive", True), 271 "YEAR": lambda args: exp.Year(this=exp.TsOrDsToDate.from_arg_list(args)), 272 } 273 274 PROPERTY_PARSERS = { 275 **parser.Parser.PROPERTY_PARSERS, # type: ignore 276 "WITH SERDEPROPERTIES": lambda self: exp.SerdeProperties( 277 expressions=self._parse_wrapped_csv(self._parse_property) 278 ), 279 }
Parser consumes a list of tokens produced by the sqlglot.tokens.Tokenizer
and produces
a parsed syntax tree.
Arguments:
- error_level: the desired error level. Default: ErrorLevel.RAISE
- error_message_context: determines the amount of context to capture from a query string when displaying the error message (in number of characters). Default: 50.
- index_offset: Index offset for arrays eg ARRAY[0] vs ARRAY[1] as the head of a list. Default: 0
- alias_post_tablesample: If the table alias comes after tablesample. Default: False
- max_errors: Maximum number of error messages to include in a raised ParseError. This is only relevant if error_level is ErrorLevel.RAISE. Default: 3
- null_ordering: Indicates the default null ordering method to use if not explicitly set. Options are "nulls_are_small", "nulls_are_large", "nulls_are_last". Default: "nulls_are_small"
Inherited Members
281 class Generator(generator.Generator): 282 TYPE_MAPPING = { 283 **generator.Generator.TYPE_MAPPING, # type: ignore 284 exp.DataType.Type.TEXT: "STRING", 285 exp.DataType.Type.DATETIME: "TIMESTAMP", 286 exp.DataType.Type.VARBINARY: "BINARY", 287 exp.DataType.Type.TIMESTAMPTZ: "TIMESTAMP", 288 } 289 290 TRANSFORMS = { 291 **generator.Generator.TRANSFORMS, # type: ignore 292 **transforms.UNALIAS_GROUP, # type: ignore 293 **transforms.ELIMINATE_QUALIFY, # type: ignore 294 exp.Property: _property_sql, 295 exp.ApproxDistinct: approx_count_distinct_sql, 296 exp.ArrayConcat: rename_func("CONCAT"), 297 exp.ArraySize: rename_func("SIZE"), 298 exp.ArraySort: _array_sort, 299 exp.With: no_recursive_cte_sql, 300 exp.DateAdd: _add_date_sql, 301 exp.DateDiff: _date_diff_sql, 302 exp.DateStrToDate: rename_func("TO_DATE"), 303 exp.DateToDi: lambda self, e: f"CAST(DATE_FORMAT({self.sql(e, 'this')}, {Hive.dateint_format}) AS INT)", 304 exp.DiToDate: lambda self, e: f"TO_DATE(CAST({self.sql(e, 'this')} AS STRING), {Hive.dateint_format})", 305 exp.FileFormatProperty: lambda self, e: f"STORED AS {e.name.upper()}", 306 exp.If: if_sql, 307 exp.Index: _index_sql, 308 exp.ILike: no_ilike_sql, 309 exp.Join: _unnest_to_explode_sql, 310 exp.JSONExtract: rename_func("GET_JSON_OBJECT"), 311 exp.JSONExtractScalar: rename_func("GET_JSON_OBJECT"), 312 exp.Map: var_map_sql, 313 exp.Max: max_or_greatest, 314 exp.Min: min_or_least, 315 exp.VarMap: var_map_sql, 316 exp.Create: create_with_partitions_sql, 317 exp.Quantile: rename_func("PERCENTILE"), 318 exp.ApproxQuantile: rename_func("PERCENTILE_APPROX"), 319 exp.RegexpLike: lambda self, e: self.binary(e, "RLIKE"), 320 exp.RegexpSplit: rename_func("SPLIT"), 321 exp.SafeDivide: no_safe_divide_sql, 322 exp.SchemaCommentProperty: lambda self, e: self.naked_property(e), 323 exp.SetAgg: rename_func("COLLECT_SET"), 324 exp.Split: lambda self, e: f"SPLIT({self.sql(e, 'this')}, CONCAT('\\\\Q', {self.sql(e, 'expression')}))", 325 exp.StrPosition: strposition_to_locate_sql, 326 exp.StrToDate: _str_to_date, 327 exp.StrToTime: _str_to_time, 328 exp.StrToUnix: _str_to_unix, 329 exp.StructExtract: struct_extract_sql, 330 exp.TableFormatProperty: lambda self, e: f"USING {self.sql(e, 'this')}", 331 exp.TimeStrToDate: rename_func("TO_DATE"), 332 exp.TimeStrToTime: timestrtotime_sql, 333 exp.TimeStrToUnix: rename_func("UNIX_TIMESTAMP"), 334 exp.TimeToStr: _time_to_str, 335 exp.TimeToUnix: rename_func("UNIX_TIMESTAMP"), 336 exp.TsOrDiToDi: lambda self, e: f"CAST(SUBSTR(REPLACE(CAST({self.sql(e, 'this')} AS STRING), '-', ''), 1, 8) AS INT)", 337 exp.TsOrDsAdd: lambda self, e: f"DATE_ADD({self.sql(e, 'this')}, {self.sql(e, 'expression')})", 338 exp.TsOrDsToDate: _to_date_sql, 339 exp.TryCast: no_trycast_sql, 340 exp.UnixToStr: lambda self, e: self.func( 341 "FROM_UNIXTIME", e.this, _time_format(self, e) 342 ), 343 exp.UnixToTime: rename_func("FROM_UNIXTIME"), 344 exp.UnixToTimeStr: rename_func("FROM_UNIXTIME"), 345 exp.PartitionedByProperty: lambda self, e: f"PARTITIONED BY {self.sql(e, 'this')}", 346 exp.RowFormatSerdeProperty: lambda self, e: f"ROW FORMAT SERDE {self.sql(e, 'this')}", 347 exp.SerdeProperties: lambda self, e: self.properties(e, prefix="WITH SERDEPROPERTIES"), 348 exp.NumberToStr: rename_func("FORMAT_NUMBER"), 349 exp.LastDateOfMonth: rename_func("LAST_DAY"), 350 } 351 352 PROPERTIES_LOCATION = { 353 **generator.Generator.PROPERTIES_LOCATION, # type: ignore 354 exp.FileFormatProperty: exp.Properties.Location.POST_SCHEMA, 355 exp.PartitionedByProperty: exp.Properties.Location.POST_SCHEMA, 356 exp.TableFormatProperty: exp.Properties.Location.POST_SCHEMA, 357 } 358 359 LIMIT_FETCH = "LIMIT" 360 361 def arrayagg_sql(self, expression: exp.ArrayAgg) -> str: 362 return self.func( 363 "COLLECT_LIST", 364 expression.this.this if isinstance(expression.this, exp.Order) else expression.this, 365 ) 366 367 def with_properties(self, properties: exp.Properties) -> str: 368 return self.properties( 369 properties, 370 prefix=self.seg("TBLPROPERTIES"), 371 ) 372 373 def datatype_sql(self, expression: exp.DataType) -> str: 374 if ( 375 expression.this in (exp.DataType.Type.VARCHAR, exp.DataType.Type.NVARCHAR) 376 and not expression.expressions 377 ): 378 expression = exp.DataType.build("text") 379 elif expression.this in exp.DataType.TEMPORAL_TYPES: 380 expression = exp.DataType.build(expression.this) 381 return super().datatype_sql(expression)
Generator interprets the given syntax tree and produces a SQL string as an output.
Arguments:
- time_mapping (dict): the dictionary of custom time mappings in which the key represents a python time format and the output the target time format
- time_trie (trie): a trie of the time_mapping keys
- pretty (bool): if set to True the returned string will be formatted. Default: False.
- quote_start (str): specifies which starting character to use to delimit quotes. Default: '.
- quote_end (str): specifies which ending character to use to delimit quotes. Default: '.
- identifier_start (str): specifies which starting character to use to delimit identifiers. Default: ".
- identifier_end (str): specifies which ending character to use to delimit identifiers. Default: ".
- identify (bool | str): 'always': always quote, 'safe': quote identifiers if they don't contain an upcase, True defaults to always.
- normalize (bool): if set to True all identifiers will lower cased
- string_escape (str): specifies a string escape character. Default: '.
- identifier_escape (str): specifies an identifier escape character. Default: ".
- pad (int): determines padding in a formatted string. Default: 2.
- indent (int): determines the size of indentation in a formatted string. Default: 4.
- unnest_column_only (bool): if true unnest table aliases are considered only as column aliases
- normalize_functions (str): normalize function names, "upper", "lower", or None Default: "upper"
- alias_post_tablesample (bool): if the table alias comes after tablesample Default: False
- unsupported_level (ErrorLevel): determines the generator's behavior when it encounters unsupported expressions. Default ErrorLevel.WARN.
- null_ordering (str): Indicates the default null ordering method to use if not explicitly set. Options are "nulls_are_small", "nulls_are_large", "nulls_are_last". Default: "nulls_are_small"
- max_unsupported (int): Maximum number of unsupported messages to include in a raised UnsupportedError. This is only relevant if unsupported_level is ErrorLevel.RAISE. Default: 3
- leading_comma (bool): if the the comma is leading or trailing in select statements Default: False
- max_text_width: The max number of characters in a segment before creating new lines in pretty mode. The default is on the smaller end because the length only represents a segment and not the true line length. Default: 80
- comments: Whether or not to preserve comments in the output SQL code. Default: True
373 def datatype_sql(self, expression: exp.DataType) -> str: 374 if ( 375 expression.this in (exp.DataType.Type.VARCHAR, exp.DataType.Type.NVARCHAR) 376 and not expression.expressions 377 ): 378 expression = exp.DataType.build("text") 379 elif expression.this in exp.DataType.TEMPORAL_TYPES: 380 expression = exp.DataType.build(expression.this) 381 return super().datatype_sql(expression)
Inherited Members
- sqlglot.generator.Generator
- Generator
- generate
- unsupported
- sep
- seg
- pad_comment
- maybe_comment
- wrap
- no_identify
- normalize_func
- indent
- sql
- uncache_sql
- cache_sql
- characterset_sql
- column_sql
- columndef_sql
- columnconstraint_sql
- autoincrementcolumnconstraint_sql
- compresscolumnconstraint_sql
- generatedasidentitycolumnconstraint_sql
- notnullcolumnconstraint_sql
- primarykeycolumnconstraint_sql
- uniquecolumnconstraint_sql
- create_sql
- describe_sql
- prepend_ctes
- with_sql
- cte_sql
- tablealias_sql
- bitstring_sql
- hexstring_sql
- directory_sql
- delete_sql
- drop_sql
- except_sql
- except_op
- fetch_sql
- filter_sql
- hint_sql
- index_sql
- identifier_sql
- national_sql
- partition_sql
- properties_sql
- root_properties
- properties
- locate_properties
- property_sql
- likeproperty_sql
- fallbackproperty_sql
- journalproperty_sql
- freespaceproperty_sql
- afterjournalproperty_sql
- checksumproperty_sql
- mergeblockratioproperty_sql
- datablocksizeproperty_sql
- blockcompressionproperty_sql
- isolatedloadingproperty_sql
- lockingproperty_sql
- withdataproperty_sql
- insert_sql
- intersect_sql
- intersect_op
- introducer_sql
- pseudotype_sql
- returning_sql
- rowformatdelimitedproperty_sql
- table_sql
- tablesample_sql
- pivot_sql
- tuple_sql
- update_sql
- values_sql
- var_sql
- into_sql
- from_sql
- group_sql
- having_sql
- join_sql
- lambda_sql
- lateral_sql
- limit_sql
- offset_sql
- setitem_sql
- set_sql
- pragma_sql
- lock_sql
- literal_sql
- loaddata_sql
- null_sql
- boolean_sql
- order_sql
- cluster_sql
- distribute_sql
- sort_sql
- ordered_sql
- matchrecognize_sql
- query_modifiers
- select_sql
- schema_sql
- star_sql
- structkwarg_sql
- parameter_sql
- sessionparameter_sql
- placeholder_sql
- subquery_sql
- qualify_sql
- union_sql
- union_op
- unnest_sql
- where_sql
- window_sql
- partition_by_sql
- window_spec_sql
- withingroup_sql
- between_sql
- bracket_sql
- all_sql
- any_sql
- exists_sql
- case_sql
- constraint_sql
- extract_sql
- trim_sql
- concat_sql
- check_sql
- foreignkey_sql
- primarykey_sql
- unique_sql
- if_sql
- jsonkeyvalue_sql
- jsonobject_sql
- in_sql
- in_unnest_op
- interval_sql
- return_sql
- reference_sql
- anonymous_sql
- paren_sql
- neg_sql
- not_sql
- alias_sql
- aliases_sql
- attimezone_sql
- add_sql
- and_sql
- connector_sql
- bitwiseand_sql
- bitwiseleftshift_sql
- bitwisenot_sql
- bitwiseor_sql
- bitwiserightshift_sql
- bitwisexor_sql
- cast_sql
- currentdate_sql
- collate_sql
- command_sql
- comment_sql
- transaction_sql
- commit_sql
- rollback_sql
- altercolumn_sql
- renametable_sql
- altertable_sql
- droppartition_sql
- addconstraint_sql
- distinct_sql
- ignorenulls_sql
- respectnulls_sql
- intdiv_sql
- dpipe_sql
- div_sql
- overlaps_sql
- distance_sql
- dot_sql
- eq_sql
- escape_sql
- glob_sql
- gt_sql
- gte_sql
- ilike_sql
- is_sql
- like_sql
- similarto_sql
- lt_sql
- lte_sql
- mod_sql
- mul_sql
- neq_sql
- nullsafeeq_sql
- nullsafeneq_sql
- or_sql
- slice_sql
- sub_sql
- trycast_sql
- use_sql
- binary
- function_fallback_sql
- func
- format_args
- text_width
- format_time
- expressions
- op_expressions
- naked_property
- set_operation
- tag_sql
- token_sql
- userdefinedfunction_sql
- joinhint_sql
- kwarg_sql
- when_sql
- merge_sql
- tochar_sql