sqlglot.dialects.hive
1from __future__ import annotations 2 3from sqlglot import exp, generator, parser, tokens, transforms 4from sqlglot.dialects.dialect import ( 5 Dialect, 6 approx_count_distinct_sql, 7 create_with_partitions_sql, 8 format_time_lambda, 9 if_sql, 10 locate_to_strposition, 11 min_or_least, 12 no_ilike_sql, 13 no_recursive_cte_sql, 14 no_safe_divide_sql, 15 no_trycast_sql, 16 rename_func, 17 strposition_to_locate_sql, 18 struct_extract_sql, 19 timestrtotime_sql, 20 var_map_sql, 21) 22from sqlglot.helper import seq_get 23from sqlglot.parser import parse_var_map 24from sqlglot.tokens import TokenType 25 26# (FuncType, Multiplier) 27DATE_DELTA_INTERVAL = { 28 "YEAR": ("ADD_MONTHS", 12), 29 "MONTH": ("ADD_MONTHS", 1), 30 "QUARTER": ("ADD_MONTHS", 3), 31 "WEEK": ("DATE_ADD", 7), 32 "DAY": ("DATE_ADD", 1), 33} 34 35DIFF_MONTH_SWITCH = ("YEAR", "QUARTER", "MONTH") 36 37 38def _add_date_sql(self, expression): 39 unit = expression.text("unit").upper() 40 func, multiplier = DATE_DELTA_INTERVAL.get(unit, ("DATE_ADD", 1)) 41 modified_increment = ( 42 int(expression.text("expression")) * multiplier 43 if expression.expression.is_number 44 else expression.expression 45 ) 46 modified_increment = exp.Literal.number(modified_increment) 47 return self.func(func, expression.this, modified_increment.this) 48 49 50def _date_diff_sql(self, expression): 51 unit = expression.text("unit").upper() 52 sql_func = "MONTHS_BETWEEN" if unit in DIFF_MONTH_SWITCH else "DATEDIFF" 53 _, multiplier = DATE_DELTA_INTERVAL.get(unit, ("", 1)) 54 multiplier_sql = f" / {multiplier}" if multiplier > 1 else "" 55 diff_sql = f"{sql_func}({self.format_args(expression.this, expression.expression)})" 56 return f"{diff_sql}{multiplier_sql}" 57 58 59def _array_sort(self, expression): 60 if expression.expression: 61 self.unsupported("Hive SORT_ARRAY does not support a comparator") 62 return f"SORT_ARRAY({self.sql(expression, 'this')})" 63 64 65def _property_sql(self, expression): 66 return f"'{expression.name}'={self.sql(expression, 'value')}" 67 68 69def _str_to_unix(self, expression): 70 return self.func("UNIX_TIMESTAMP", expression.this, _time_format(self, expression)) 71 72 73def _str_to_date(self, expression): 74 this = self.sql(expression, "this") 75 time_format = self.format_time(expression) 76 if time_format not in (Hive.time_format, Hive.date_format): 77 this = f"FROM_UNIXTIME(UNIX_TIMESTAMP({this}, {time_format}))" 78 return f"CAST({this} AS DATE)" 79 80 81def _str_to_time(self, expression): 82 this = self.sql(expression, "this") 83 time_format = self.format_time(expression) 84 if time_format not in (Hive.time_format, Hive.date_format): 85 this = f"FROM_UNIXTIME(UNIX_TIMESTAMP({this}, {time_format}))" 86 return f"CAST({this} AS TIMESTAMP)" 87 88 89def _time_format(self, expression): 90 time_format = self.format_time(expression) 91 if time_format == Hive.time_format: 92 return None 93 return time_format 94 95 96def _time_to_str(self, expression): 97 this = self.sql(expression, "this") 98 time_format = self.format_time(expression) 99 return f"DATE_FORMAT({this}, {time_format})" 100 101 102def _to_date_sql(self, expression): 103 this = self.sql(expression, "this") 104 time_format = self.format_time(expression) 105 if time_format and time_format not in (Hive.time_format, Hive.date_format): 106 return f"TO_DATE({this}, {time_format})" 107 return f"TO_DATE({this})" 108 109 110def _unnest_to_explode_sql(self, expression): 111 unnest = expression.this 112 if isinstance(unnest, exp.Unnest): 113 alias = unnest.args.get("alias") 114 udtf = exp.Posexplode if unnest.args.get("ordinality") else exp.Explode 115 return "".join( 116 self.sql( 117 exp.Lateral( 118 this=udtf(this=expression), 119 view=True, 120 alias=exp.TableAlias(this=alias.this, columns=[column]), 121 ) 122 ) 123 for expression, column in zip(unnest.expressions, alias.columns if alias else []) 124 ) 125 return self.join_sql(expression) 126 127 128def _index_sql(self, expression): 129 this = self.sql(expression, "this") 130 table = self.sql(expression, "table") 131 columns = self.sql(expression, "columns") 132 return f"{this} ON TABLE {table} {columns}" 133 134 135class Hive(Dialect): 136 alias_post_tablesample = True 137 138 time_mapping = { 139 "y": "%Y", 140 "Y": "%Y", 141 "YYYY": "%Y", 142 "yyyy": "%Y", 143 "YY": "%y", 144 "yy": "%y", 145 "MMMM": "%B", 146 "MMM": "%b", 147 "MM": "%m", 148 "M": "%-m", 149 "dd": "%d", 150 "d": "%-d", 151 "HH": "%H", 152 "H": "%-H", 153 "hh": "%I", 154 "h": "%-I", 155 "mm": "%M", 156 "m": "%-M", 157 "ss": "%S", 158 "s": "%-S", 159 "SSSSSS": "%f", 160 "a": "%p", 161 "DD": "%j", 162 "D": "%-j", 163 "E": "%a", 164 "EE": "%a", 165 "EEE": "%a", 166 "EEEE": "%A", 167 } 168 169 date_format = "'yyyy-MM-dd'" 170 dateint_format = "'yyyyMMdd'" 171 time_format = "'yyyy-MM-dd HH:mm:ss'" 172 173 class Tokenizer(tokens.Tokenizer): 174 QUOTES = ["'", '"'] 175 IDENTIFIERS = ["`"] 176 STRING_ESCAPES = ["\\"] 177 ENCODE = "utf-8" 178 179 KEYWORDS = { 180 **tokens.Tokenizer.KEYWORDS, 181 "ADD ARCHIVE": TokenType.COMMAND, 182 "ADD ARCHIVES": TokenType.COMMAND, 183 "ADD FILE": TokenType.COMMAND, 184 "ADD FILES": TokenType.COMMAND, 185 "ADD JAR": TokenType.COMMAND, 186 "ADD JARS": TokenType.COMMAND, 187 "MSCK REPAIR": TokenType.COMMAND, 188 "WITH SERDEPROPERTIES": TokenType.SERDE_PROPERTIES, 189 } 190 191 NUMERIC_LITERALS = { 192 "L": "BIGINT", 193 "S": "SMALLINT", 194 "Y": "TINYINT", 195 "D": "DOUBLE", 196 "F": "FLOAT", 197 "BD": "DECIMAL", 198 } 199 200 IDENTIFIER_CAN_START_WITH_DIGIT = True 201 202 class Parser(parser.Parser): 203 STRICT_CAST = False 204 205 FUNCTIONS = { 206 **parser.Parser.FUNCTIONS, # type: ignore 207 "APPROX_COUNT_DISTINCT": exp.ApproxDistinct.from_arg_list, 208 "COLLECT_LIST": exp.ArrayAgg.from_arg_list, 209 "DATE_ADD": lambda args: exp.TsOrDsAdd( 210 this=seq_get(args, 0), 211 expression=seq_get(args, 1), 212 unit=exp.Literal.string("DAY"), 213 ), 214 "DATEDIFF": lambda args: exp.DateDiff( 215 this=exp.TsOrDsToDate(this=seq_get(args, 0)), 216 expression=exp.TsOrDsToDate(this=seq_get(args, 1)), 217 ), 218 "DATE_SUB": lambda args: exp.TsOrDsAdd( 219 this=seq_get(args, 0), 220 expression=exp.Mul( 221 this=seq_get(args, 1), 222 expression=exp.Literal.number(-1), 223 ), 224 unit=exp.Literal.string("DAY"), 225 ), 226 "DATE_FORMAT": lambda args: format_time_lambda(exp.TimeToStr, "hive")( 227 [ 228 exp.TimeStrToTime(this=seq_get(args, 0)), 229 seq_get(args, 1), 230 ] 231 ), 232 "DAY": lambda args: exp.Day(this=exp.TsOrDsToDate(this=seq_get(args, 0))), 233 "FROM_UNIXTIME": format_time_lambda(exp.UnixToStr, "hive", True), 234 "GET_JSON_OBJECT": exp.JSONExtractScalar.from_arg_list, 235 "LOCATE": locate_to_strposition, 236 "LOG": ( 237 lambda args: exp.Log.from_arg_list(args) 238 if len(args) > 1 239 else exp.Ln.from_arg_list(args) 240 ), 241 "MAP": parse_var_map, 242 "MONTH": lambda args: exp.Month(this=exp.TsOrDsToDate.from_arg_list(args)), 243 "PERCENTILE": exp.Quantile.from_arg_list, 244 "PERCENTILE_APPROX": exp.ApproxQuantile.from_arg_list, 245 "COLLECT_SET": exp.SetAgg.from_arg_list, 246 "SIZE": exp.ArraySize.from_arg_list, 247 "SPLIT": exp.RegexpSplit.from_arg_list, 248 "TO_DATE": format_time_lambda(exp.TsOrDsToDate, "hive"), 249 "UNIX_TIMESTAMP": format_time_lambda(exp.StrToUnix, "hive", True), 250 "YEAR": lambda args: exp.Year(this=exp.TsOrDsToDate.from_arg_list(args)), 251 } 252 253 PROPERTY_PARSERS = { 254 **parser.Parser.PROPERTY_PARSERS, # type: ignore 255 "WITH SERDEPROPERTIES": lambda self: exp.SerdeProperties( 256 expressions=self._parse_wrapped_csv(self._parse_property) 257 ), 258 } 259 260 INTEGER_DIVISION = False 261 262 class Generator(generator.Generator): 263 INTEGER_DIVISION = False 264 265 TYPE_MAPPING = { 266 **generator.Generator.TYPE_MAPPING, # type: ignore 267 exp.DataType.Type.TEXT: "STRING", 268 exp.DataType.Type.DATETIME: "TIMESTAMP", 269 exp.DataType.Type.VARBINARY: "BINARY", 270 } 271 272 TRANSFORMS = { 273 **generator.Generator.TRANSFORMS, # type: ignore 274 **transforms.UNALIAS_GROUP, # type: ignore 275 exp.Property: _property_sql, 276 exp.ApproxDistinct: approx_count_distinct_sql, 277 exp.ArrayAgg: rename_func("COLLECT_LIST"), 278 exp.ArrayConcat: rename_func("CONCAT"), 279 exp.ArraySize: rename_func("SIZE"), 280 exp.ArraySort: _array_sort, 281 exp.With: no_recursive_cte_sql, 282 exp.DateAdd: _add_date_sql, 283 exp.DateDiff: _date_diff_sql, 284 exp.DateStrToDate: rename_func("TO_DATE"), 285 exp.DateToDi: lambda self, e: f"CAST(DATE_FORMAT({self.sql(e, 'this')}, {Hive.dateint_format}) AS INT)", 286 exp.DiToDate: lambda self, e: f"TO_DATE(CAST({self.sql(e, 'this')} AS STRING), {Hive.dateint_format})", 287 exp.FileFormatProperty: lambda self, e: f"STORED AS {e.name.upper()}", 288 exp.If: if_sql, 289 exp.Index: _index_sql, 290 exp.ILike: no_ilike_sql, 291 exp.Join: _unnest_to_explode_sql, 292 exp.JSONExtract: rename_func("GET_JSON_OBJECT"), 293 exp.JSONExtractScalar: rename_func("GET_JSON_OBJECT"), 294 exp.Map: var_map_sql, 295 exp.Min: min_or_least, 296 exp.VarMap: var_map_sql, 297 exp.Create: create_with_partitions_sql, 298 exp.Quantile: rename_func("PERCENTILE"), 299 exp.ApproxQuantile: rename_func("PERCENTILE_APPROX"), 300 exp.RegexpLike: lambda self, e: self.binary(e, "RLIKE"), 301 exp.RegexpSplit: rename_func("SPLIT"), 302 exp.SafeDivide: no_safe_divide_sql, 303 exp.SchemaCommentProperty: lambda self, e: self.naked_property(e), 304 exp.SetAgg: rename_func("COLLECT_SET"), 305 exp.Split: lambda self, e: f"SPLIT({self.sql(e, 'this')}, CONCAT('\\\\Q', {self.sql(e, 'expression')}))", 306 exp.StrPosition: strposition_to_locate_sql, 307 exp.StrToDate: _str_to_date, 308 exp.StrToTime: _str_to_time, 309 exp.StrToUnix: _str_to_unix, 310 exp.StructExtract: struct_extract_sql, 311 exp.TableFormatProperty: lambda self, e: f"USING {self.sql(e, 'this')}", 312 exp.TimeStrToDate: rename_func("TO_DATE"), 313 exp.TimeStrToTime: timestrtotime_sql, 314 exp.TimeStrToUnix: rename_func("UNIX_TIMESTAMP"), 315 exp.TimeToStr: _time_to_str, 316 exp.TimeToUnix: rename_func("UNIX_TIMESTAMP"), 317 exp.TsOrDiToDi: lambda self, e: f"CAST(SUBSTR(REPLACE(CAST({self.sql(e, 'this')} AS STRING), '-', ''), 1, 8) AS INT)", 318 exp.TsOrDsAdd: lambda self, e: f"DATE_ADD({self.sql(e, 'this')}, {self.sql(e, 'expression')})", 319 exp.TsOrDsToDate: _to_date_sql, 320 exp.TryCast: no_trycast_sql, 321 exp.UnixToStr: lambda self, e: self.func( 322 "FROM_UNIXTIME", e.this, _time_format(self, e) 323 ), 324 exp.UnixToTime: rename_func("FROM_UNIXTIME"), 325 exp.UnixToTimeStr: rename_func("FROM_UNIXTIME"), 326 exp.PartitionedByProperty: lambda self, e: f"PARTITIONED BY {self.sql(e, 'this')}", 327 exp.RowFormatSerdeProperty: lambda self, e: f"ROW FORMAT SERDE {self.sql(e, 'this')}", 328 exp.SerdeProperties: lambda self, e: self.properties(e, prefix="WITH SERDEPROPERTIES"), 329 exp.NumberToStr: rename_func("FORMAT_NUMBER"), 330 exp.LastDateOfMonth: rename_func("LAST_DAY"), 331 } 332 333 PROPERTIES_LOCATION = { 334 **generator.Generator.PROPERTIES_LOCATION, # type: ignore 335 exp.FileFormatProperty: exp.Properties.Location.POST_SCHEMA, 336 exp.PartitionedByProperty: exp.Properties.Location.POST_SCHEMA, 337 exp.TableFormatProperty: exp.Properties.Location.POST_SCHEMA, 338 } 339 340 def with_properties(self, properties): 341 return self.properties( 342 properties, 343 prefix=self.seg("TBLPROPERTIES"), 344 ) 345 346 def datatype_sql(self, expression): 347 if ( 348 expression.this in (exp.DataType.Type.VARCHAR, exp.DataType.Type.NVARCHAR) 349 and not expression.expressions 350 ): 351 expression = exp.DataType.build("text") 352 elif expression.this in exp.DataType.TEMPORAL_TYPES: 353 expression = exp.DataType.build(expression.this) 354 return super().datatype_sql(expression)
136class Hive(Dialect): 137 alias_post_tablesample = True 138 139 time_mapping = { 140 "y": "%Y", 141 "Y": "%Y", 142 "YYYY": "%Y", 143 "yyyy": "%Y", 144 "YY": "%y", 145 "yy": "%y", 146 "MMMM": "%B", 147 "MMM": "%b", 148 "MM": "%m", 149 "M": "%-m", 150 "dd": "%d", 151 "d": "%-d", 152 "HH": "%H", 153 "H": "%-H", 154 "hh": "%I", 155 "h": "%-I", 156 "mm": "%M", 157 "m": "%-M", 158 "ss": "%S", 159 "s": "%-S", 160 "SSSSSS": "%f", 161 "a": "%p", 162 "DD": "%j", 163 "D": "%-j", 164 "E": "%a", 165 "EE": "%a", 166 "EEE": "%a", 167 "EEEE": "%A", 168 } 169 170 date_format = "'yyyy-MM-dd'" 171 dateint_format = "'yyyyMMdd'" 172 time_format = "'yyyy-MM-dd HH:mm:ss'" 173 174 class Tokenizer(tokens.Tokenizer): 175 QUOTES = ["'", '"'] 176 IDENTIFIERS = ["`"] 177 STRING_ESCAPES = ["\\"] 178 ENCODE = "utf-8" 179 180 KEYWORDS = { 181 **tokens.Tokenizer.KEYWORDS, 182 "ADD ARCHIVE": TokenType.COMMAND, 183 "ADD ARCHIVES": TokenType.COMMAND, 184 "ADD FILE": TokenType.COMMAND, 185 "ADD FILES": TokenType.COMMAND, 186 "ADD JAR": TokenType.COMMAND, 187 "ADD JARS": TokenType.COMMAND, 188 "MSCK REPAIR": TokenType.COMMAND, 189 "WITH SERDEPROPERTIES": TokenType.SERDE_PROPERTIES, 190 } 191 192 NUMERIC_LITERALS = { 193 "L": "BIGINT", 194 "S": "SMALLINT", 195 "Y": "TINYINT", 196 "D": "DOUBLE", 197 "F": "FLOAT", 198 "BD": "DECIMAL", 199 } 200 201 IDENTIFIER_CAN_START_WITH_DIGIT = True 202 203 class Parser(parser.Parser): 204 STRICT_CAST = False 205 206 FUNCTIONS = { 207 **parser.Parser.FUNCTIONS, # type: ignore 208 "APPROX_COUNT_DISTINCT": exp.ApproxDistinct.from_arg_list, 209 "COLLECT_LIST": exp.ArrayAgg.from_arg_list, 210 "DATE_ADD": lambda args: exp.TsOrDsAdd( 211 this=seq_get(args, 0), 212 expression=seq_get(args, 1), 213 unit=exp.Literal.string("DAY"), 214 ), 215 "DATEDIFF": lambda args: exp.DateDiff( 216 this=exp.TsOrDsToDate(this=seq_get(args, 0)), 217 expression=exp.TsOrDsToDate(this=seq_get(args, 1)), 218 ), 219 "DATE_SUB": lambda args: exp.TsOrDsAdd( 220 this=seq_get(args, 0), 221 expression=exp.Mul( 222 this=seq_get(args, 1), 223 expression=exp.Literal.number(-1), 224 ), 225 unit=exp.Literal.string("DAY"), 226 ), 227 "DATE_FORMAT": lambda args: format_time_lambda(exp.TimeToStr, "hive")( 228 [ 229 exp.TimeStrToTime(this=seq_get(args, 0)), 230 seq_get(args, 1), 231 ] 232 ), 233 "DAY": lambda args: exp.Day(this=exp.TsOrDsToDate(this=seq_get(args, 0))), 234 "FROM_UNIXTIME": format_time_lambda(exp.UnixToStr, "hive", True), 235 "GET_JSON_OBJECT": exp.JSONExtractScalar.from_arg_list, 236 "LOCATE": locate_to_strposition, 237 "LOG": ( 238 lambda args: exp.Log.from_arg_list(args) 239 if len(args) > 1 240 else exp.Ln.from_arg_list(args) 241 ), 242 "MAP": parse_var_map, 243 "MONTH": lambda args: exp.Month(this=exp.TsOrDsToDate.from_arg_list(args)), 244 "PERCENTILE": exp.Quantile.from_arg_list, 245 "PERCENTILE_APPROX": exp.ApproxQuantile.from_arg_list, 246 "COLLECT_SET": exp.SetAgg.from_arg_list, 247 "SIZE": exp.ArraySize.from_arg_list, 248 "SPLIT": exp.RegexpSplit.from_arg_list, 249 "TO_DATE": format_time_lambda(exp.TsOrDsToDate, "hive"), 250 "UNIX_TIMESTAMP": format_time_lambda(exp.StrToUnix, "hive", True), 251 "YEAR": lambda args: exp.Year(this=exp.TsOrDsToDate.from_arg_list(args)), 252 } 253 254 PROPERTY_PARSERS = { 255 **parser.Parser.PROPERTY_PARSERS, # type: ignore 256 "WITH SERDEPROPERTIES": lambda self: exp.SerdeProperties( 257 expressions=self._parse_wrapped_csv(self._parse_property) 258 ), 259 } 260 261 INTEGER_DIVISION = False 262 263 class Generator(generator.Generator): 264 INTEGER_DIVISION = False 265 266 TYPE_MAPPING = { 267 **generator.Generator.TYPE_MAPPING, # type: ignore 268 exp.DataType.Type.TEXT: "STRING", 269 exp.DataType.Type.DATETIME: "TIMESTAMP", 270 exp.DataType.Type.VARBINARY: "BINARY", 271 } 272 273 TRANSFORMS = { 274 **generator.Generator.TRANSFORMS, # type: ignore 275 **transforms.UNALIAS_GROUP, # type: ignore 276 exp.Property: _property_sql, 277 exp.ApproxDistinct: approx_count_distinct_sql, 278 exp.ArrayAgg: rename_func("COLLECT_LIST"), 279 exp.ArrayConcat: rename_func("CONCAT"), 280 exp.ArraySize: rename_func("SIZE"), 281 exp.ArraySort: _array_sort, 282 exp.With: no_recursive_cte_sql, 283 exp.DateAdd: _add_date_sql, 284 exp.DateDiff: _date_diff_sql, 285 exp.DateStrToDate: rename_func("TO_DATE"), 286 exp.DateToDi: lambda self, e: f"CAST(DATE_FORMAT({self.sql(e, 'this')}, {Hive.dateint_format}) AS INT)", 287 exp.DiToDate: lambda self, e: f"TO_DATE(CAST({self.sql(e, 'this')} AS STRING), {Hive.dateint_format})", 288 exp.FileFormatProperty: lambda self, e: f"STORED AS {e.name.upper()}", 289 exp.If: if_sql, 290 exp.Index: _index_sql, 291 exp.ILike: no_ilike_sql, 292 exp.Join: _unnest_to_explode_sql, 293 exp.JSONExtract: rename_func("GET_JSON_OBJECT"), 294 exp.JSONExtractScalar: rename_func("GET_JSON_OBJECT"), 295 exp.Map: var_map_sql, 296 exp.Min: min_or_least, 297 exp.VarMap: var_map_sql, 298 exp.Create: create_with_partitions_sql, 299 exp.Quantile: rename_func("PERCENTILE"), 300 exp.ApproxQuantile: rename_func("PERCENTILE_APPROX"), 301 exp.RegexpLike: lambda self, e: self.binary(e, "RLIKE"), 302 exp.RegexpSplit: rename_func("SPLIT"), 303 exp.SafeDivide: no_safe_divide_sql, 304 exp.SchemaCommentProperty: lambda self, e: self.naked_property(e), 305 exp.SetAgg: rename_func("COLLECT_SET"), 306 exp.Split: lambda self, e: f"SPLIT({self.sql(e, 'this')}, CONCAT('\\\\Q', {self.sql(e, 'expression')}))", 307 exp.StrPosition: strposition_to_locate_sql, 308 exp.StrToDate: _str_to_date, 309 exp.StrToTime: _str_to_time, 310 exp.StrToUnix: _str_to_unix, 311 exp.StructExtract: struct_extract_sql, 312 exp.TableFormatProperty: lambda self, e: f"USING {self.sql(e, 'this')}", 313 exp.TimeStrToDate: rename_func("TO_DATE"), 314 exp.TimeStrToTime: timestrtotime_sql, 315 exp.TimeStrToUnix: rename_func("UNIX_TIMESTAMP"), 316 exp.TimeToStr: _time_to_str, 317 exp.TimeToUnix: rename_func("UNIX_TIMESTAMP"), 318 exp.TsOrDiToDi: lambda self, e: f"CAST(SUBSTR(REPLACE(CAST({self.sql(e, 'this')} AS STRING), '-', ''), 1, 8) AS INT)", 319 exp.TsOrDsAdd: lambda self, e: f"DATE_ADD({self.sql(e, 'this')}, {self.sql(e, 'expression')})", 320 exp.TsOrDsToDate: _to_date_sql, 321 exp.TryCast: no_trycast_sql, 322 exp.UnixToStr: lambda self, e: self.func( 323 "FROM_UNIXTIME", e.this, _time_format(self, e) 324 ), 325 exp.UnixToTime: rename_func("FROM_UNIXTIME"), 326 exp.UnixToTimeStr: rename_func("FROM_UNIXTIME"), 327 exp.PartitionedByProperty: lambda self, e: f"PARTITIONED BY {self.sql(e, 'this')}", 328 exp.RowFormatSerdeProperty: lambda self, e: f"ROW FORMAT SERDE {self.sql(e, 'this')}", 329 exp.SerdeProperties: lambda self, e: self.properties(e, prefix="WITH SERDEPROPERTIES"), 330 exp.NumberToStr: rename_func("FORMAT_NUMBER"), 331 exp.LastDateOfMonth: rename_func("LAST_DAY"), 332 } 333 334 PROPERTIES_LOCATION = { 335 **generator.Generator.PROPERTIES_LOCATION, # type: ignore 336 exp.FileFormatProperty: exp.Properties.Location.POST_SCHEMA, 337 exp.PartitionedByProperty: exp.Properties.Location.POST_SCHEMA, 338 exp.TableFormatProperty: exp.Properties.Location.POST_SCHEMA, 339 } 340 341 def with_properties(self, properties): 342 return self.properties( 343 properties, 344 prefix=self.seg("TBLPROPERTIES"), 345 ) 346 347 def datatype_sql(self, expression): 348 if ( 349 expression.this in (exp.DataType.Type.VARCHAR, exp.DataType.Type.NVARCHAR) 350 and not expression.expressions 351 ): 352 expression = exp.DataType.build("text") 353 elif expression.this in exp.DataType.TEMPORAL_TYPES: 354 expression = exp.DataType.build(expression.this) 355 return super().datatype_sql(expression)
Inherited Members
174 class Tokenizer(tokens.Tokenizer): 175 QUOTES = ["'", '"'] 176 IDENTIFIERS = ["`"] 177 STRING_ESCAPES = ["\\"] 178 ENCODE = "utf-8" 179 180 KEYWORDS = { 181 **tokens.Tokenizer.KEYWORDS, 182 "ADD ARCHIVE": TokenType.COMMAND, 183 "ADD ARCHIVES": TokenType.COMMAND, 184 "ADD FILE": TokenType.COMMAND, 185 "ADD FILES": TokenType.COMMAND, 186 "ADD JAR": TokenType.COMMAND, 187 "ADD JARS": TokenType.COMMAND, 188 "MSCK REPAIR": TokenType.COMMAND, 189 "WITH SERDEPROPERTIES": TokenType.SERDE_PROPERTIES, 190 } 191 192 NUMERIC_LITERALS = { 193 "L": "BIGINT", 194 "S": "SMALLINT", 195 "Y": "TINYINT", 196 "D": "DOUBLE", 197 "F": "FLOAT", 198 "BD": "DECIMAL", 199 } 200 201 IDENTIFIER_CAN_START_WITH_DIGIT = True
Inherited Members
203 class Parser(parser.Parser): 204 STRICT_CAST = False 205 206 FUNCTIONS = { 207 **parser.Parser.FUNCTIONS, # type: ignore 208 "APPROX_COUNT_DISTINCT": exp.ApproxDistinct.from_arg_list, 209 "COLLECT_LIST": exp.ArrayAgg.from_arg_list, 210 "DATE_ADD": lambda args: exp.TsOrDsAdd( 211 this=seq_get(args, 0), 212 expression=seq_get(args, 1), 213 unit=exp.Literal.string("DAY"), 214 ), 215 "DATEDIFF": lambda args: exp.DateDiff( 216 this=exp.TsOrDsToDate(this=seq_get(args, 0)), 217 expression=exp.TsOrDsToDate(this=seq_get(args, 1)), 218 ), 219 "DATE_SUB": lambda args: exp.TsOrDsAdd( 220 this=seq_get(args, 0), 221 expression=exp.Mul( 222 this=seq_get(args, 1), 223 expression=exp.Literal.number(-1), 224 ), 225 unit=exp.Literal.string("DAY"), 226 ), 227 "DATE_FORMAT": lambda args: format_time_lambda(exp.TimeToStr, "hive")( 228 [ 229 exp.TimeStrToTime(this=seq_get(args, 0)), 230 seq_get(args, 1), 231 ] 232 ), 233 "DAY": lambda args: exp.Day(this=exp.TsOrDsToDate(this=seq_get(args, 0))), 234 "FROM_UNIXTIME": format_time_lambda(exp.UnixToStr, "hive", True), 235 "GET_JSON_OBJECT": exp.JSONExtractScalar.from_arg_list, 236 "LOCATE": locate_to_strposition, 237 "LOG": ( 238 lambda args: exp.Log.from_arg_list(args) 239 if len(args) > 1 240 else exp.Ln.from_arg_list(args) 241 ), 242 "MAP": parse_var_map, 243 "MONTH": lambda args: exp.Month(this=exp.TsOrDsToDate.from_arg_list(args)), 244 "PERCENTILE": exp.Quantile.from_arg_list, 245 "PERCENTILE_APPROX": exp.ApproxQuantile.from_arg_list, 246 "COLLECT_SET": exp.SetAgg.from_arg_list, 247 "SIZE": exp.ArraySize.from_arg_list, 248 "SPLIT": exp.RegexpSplit.from_arg_list, 249 "TO_DATE": format_time_lambda(exp.TsOrDsToDate, "hive"), 250 "UNIX_TIMESTAMP": format_time_lambda(exp.StrToUnix, "hive", True), 251 "YEAR": lambda args: exp.Year(this=exp.TsOrDsToDate.from_arg_list(args)), 252 } 253 254 PROPERTY_PARSERS = { 255 **parser.Parser.PROPERTY_PARSERS, # type: ignore 256 "WITH SERDEPROPERTIES": lambda self: exp.SerdeProperties( 257 expressions=self._parse_wrapped_csv(self._parse_property) 258 ), 259 } 260 261 INTEGER_DIVISION = False
Parser consumes a list of tokens produced by the sqlglot.tokens.Tokenizer
and produces
a parsed syntax tree.
Arguments:
- error_level: the desired error level. Default: ErrorLevel.RAISE
- error_message_context: determines the amount of context to capture from a query string when displaying the error message (in number of characters). Default: 50.
- index_offset: Index offset for arrays eg ARRAY[0] vs ARRAY[1] as the head of a list. Default: 0
- alias_post_tablesample: If the table alias comes after tablesample. Default: False
- max_errors: Maximum number of error messages to include in a raised ParseError. This is only relevant if error_level is ErrorLevel.RAISE. Default: 3
- null_ordering: Indicates the default null ordering method to use if not explicitly set. Options are "nulls_are_small", "nulls_are_large", "nulls_are_last". Default: "nulls_are_small"
Inherited Members
263 class Generator(generator.Generator): 264 INTEGER_DIVISION = False 265 266 TYPE_MAPPING = { 267 **generator.Generator.TYPE_MAPPING, # type: ignore 268 exp.DataType.Type.TEXT: "STRING", 269 exp.DataType.Type.DATETIME: "TIMESTAMP", 270 exp.DataType.Type.VARBINARY: "BINARY", 271 } 272 273 TRANSFORMS = { 274 **generator.Generator.TRANSFORMS, # type: ignore 275 **transforms.UNALIAS_GROUP, # type: ignore 276 exp.Property: _property_sql, 277 exp.ApproxDistinct: approx_count_distinct_sql, 278 exp.ArrayAgg: rename_func("COLLECT_LIST"), 279 exp.ArrayConcat: rename_func("CONCAT"), 280 exp.ArraySize: rename_func("SIZE"), 281 exp.ArraySort: _array_sort, 282 exp.With: no_recursive_cte_sql, 283 exp.DateAdd: _add_date_sql, 284 exp.DateDiff: _date_diff_sql, 285 exp.DateStrToDate: rename_func("TO_DATE"), 286 exp.DateToDi: lambda self, e: f"CAST(DATE_FORMAT({self.sql(e, 'this')}, {Hive.dateint_format}) AS INT)", 287 exp.DiToDate: lambda self, e: f"TO_DATE(CAST({self.sql(e, 'this')} AS STRING), {Hive.dateint_format})", 288 exp.FileFormatProperty: lambda self, e: f"STORED AS {e.name.upper()}", 289 exp.If: if_sql, 290 exp.Index: _index_sql, 291 exp.ILike: no_ilike_sql, 292 exp.Join: _unnest_to_explode_sql, 293 exp.JSONExtract: rename_func("GET_JSON_OBJECT"), 294 exp.JSONExtractScalar: rename_func("GET_JSON_OBJECT"), 295 exp.Map: var_map_sql, 296 exp.Min: min_or_least, 297 exp.VarMap: var_map_sql, 298 exp.Create: create_with_partitions_sql, 299 exp.Quantile: rename_func("PERCENTILE"), 300 exp.ApproxQuantile: rename_func("PERCENTILE_APPROX"), 301 exp.RegexpLike: lambda self, e: self.binary(e, "RLIKE"), 302 exp.RegexpSplit: rename_func("SPLIT"), 303 exp.SafeDivide: no_safe_divide_sql, 304 exp.SchemaCommentProperty: lambda self, e: self.naked_property(e), 305 exp.SetAgg: rename_func("COLLECT_SET"), 306 exp.Split: lambda self, e: f"SPLIT({self.sql(e, 'this')}, CONCAT('\\\\Q', {self.sql(e, 'expression')}))", 307 exp.StrPosition: strposition_to_locate_sql, 308 exp.StrToDate: _str_to_date, 309 exp.StrToTime: _str_to_time, 310 exp.StrToUnix: _str_to_unix, 311 exp.StructExtract: struct_extract_sql, 312 exp.TableFormatProperty: lambda self, e: f"USING {self.sql(e, 'this')}", 313 exp.TimeStrToDate: rename_func("TO_DATE"), 314 exp.TimeStrToTime: timestrtotime_sql, 315 exp.TimeStrToUnix: rename_func("UNIX_TIMESTAMP"), 316 exp.TimeToStr: _time_to_str, 317 exp.TimeToUnix: rename_func("UNIX_TIMESTAMP"), 318 exp.TsOrDiToDi: lambda self, e: f"CAST(SUBSTR(REPLACE(CAST({self.sql(e, 'this')} AS STRING), '-', ''), 1, 8) AS INT)", 319 exp.TsOrDsAdd: lambda self, e: f"DATE_ADD({self.sql(e, 'this')}, {self.sql(e, 'expression')})", 320 exp.TsOrDsToDate: _to_date_sql, 321 exp.TryCast: no_trycast_sql, 322 exp.UnixToStr: lambda self, e: self.func( 323 "FROM_UNIXTIME", e.this, _time_format(self, e) 324 ), 325 exp.UnixToTime: rename_func("FROM_UNIXTIME"), 326 exp.UnixToTimeStr: rename_func("FROM_UNIXTIME"), 327 exp.PartitionedByProperty: lambda self, e: f"PARTITIONED BY {self.sql(e, 'this')}", 328 exp.RowFormatSerdeProperty: lambda self, e: f"ROW FORMAT SERDE {self.sql(e, 'this')}", 329 exp.SerdeProperties: lambda self, e: self.properties(e, prefix="WITH SERDEPROPERTIES"), 330 exp.NumberToStr: rename_func("FORMAT_NUMBER"), 331 exp.LastDateOfMonth: rename_func("LAST_DAY"), 332 } 333 334 PROPERTIES_LOCATION = { 335 **generator.Generator.PROPERTIES_LOCATION, # type: ignore 336 exp.FileFormatProperty: exp.Properties.Location.POST_SCHEMA, 337 exp.PartitionedByProperty: exp.Properties.Location.POST_SCHEMA, 338 exp.TableFormatProperty: exp.Properties.Location.POST_SCHEMA, 339 } 340 341 def with_properties(self, properties): 342 return self.properties( 343 properties, 344 prefix=self.seg("TBLPROPERTIES"), 345 ) 346 347 def datatype_sql(self, expression): 348 if ( 349 expression.this in (exp.DataType.Type.VARCHAR, exp.DataType.Type.NVARCHAR) 350 and not expression.expressions 351 ): 352 expression = exp.DataType.build("text") 353 elif expression.this in exp.DataType.TEMPORAL_TYPES: 354 expression = exp.DataType.build(expression.this) 355 return super().datatype_sql(expression)
Generator interprets the given syntax tree and produces a SQL string as an output.
Arguments:
- time_mapping (dict): the dictionary of custom time mappings in which the key represents a python time format and the output the target time format
- time_trie (trie): a trie of the time_mapping keys
- pretty (bool): if set to True the returned string will be formatted. Default: False.
- quote_start (str): specifies which starting character to use to delimit quotes. Default: '.
- quote_end (str): specifies which ending character to use to delimit quotes. Default: '.
- identifier_start (str): specifies which starting character to use to delimit identifiers. Default: ".
- identifier_end (str): specifies which ending character to use to delimit identifiers. Default: ".
- identify (bool): if set to True all identifiers will be delimited by the corresponding character.
- normalize (bool): if set to True all identifiers will lower cased
- string_escape (str): specifies a string escape character. Default: '.
- identifier_escape (str): specifies an identifier escape character. Default: ".
- pad (int): determines padding in a formatted string. Default: 2.
- indent (int): determines the size of indentation in a formatted string. Default: 4.
- unnest_column_only (bool): if true unnest table aliases are considered only as column aliases
- normalize_functions (str): normalize function names, "upper", "lower", or None Default: "upper"
- alias_post_tablesample (bool): if the table alias comes after tablesample Default: False
- unsupported_level (ErrorLevel): determines the generator's behavior when it encounters unsupported expressions. Default ErrorLevel.WARN.
- null_ordering (str): Indicates the default null ordering method to use if not explicitly set. Options are "nulls_are_small", "nulls_are_large", "nulls_are_last". Default: "nulls_are_small"
- max_unsupported (int): Maximum number of unsupported messages to include in a raised UnsupportedError. This is only relevant if unsupported_level is ErrorLevel.RAISE. Default: 3
- leading_comma (bool): if the the comma is leading or trailing in select statements Default: False
- max_text_width: The max number of characters in a segment before creating new lines in pretty mode. The default is on the smaller end because the length only represents a segment and not the true line length. Default: 80
- comments: Whether or not to preserve comments in the output SQL code. Default: True
def
datatype_sql(self, expression):
347 def datatype_sql(self, expression): 348 if ( 349 expression.this in (exp.DataType.Type.VARCHAR, exp.DataType.Type.NVARCHAR) 350 and not expression.expressions 351 ): 352 expression = exp.DataType.build("text") 353 elif expression.this in exp.DataType.TEMPORAL_TYPES: 354 expression = exp.DataType.build(expression.this) 355 return super().datatype_sql(expression)
Inherited Members
- sqlglot.generator.Generator
- Generator
- generate
- unsupported
- sep
- seg
- pad_comment
- maybe_comment
- wrap
- no_identify
- normalize_func
- indent
- sql
- uncache_sql
- cache_sql
- characterset_sql
- column_sql
- columndef_sql
- columnconstraint_sql
- autoincrementcolumnconstraint_sql
- compresscolumnconstraint_sql
- generatedasidentitycolumnconstraint_sql
- notnullcolumnconstraint_sql
- primarykeycolumnconstraint_sql
- uniquecolumnconstraint_sql
- create_sql
- describe_sql
- prepend_ctes
- with_sql
- cte_sql
- tablealias_sql
- bitstring_sql
- hexstring_sql
- directory_sql
- delete_sql
- drop_sql
- except_sql
- except_op
- fetch_sql
- filter_sql
- hint_sql
- index_sql
- identifier_sql
- national_sql
- partition_sql
- properties_sql
- root_properties
- properties
- locate_properties
- property_sql
- likeproperty_sql
- fallbackproperty_sql
- journalproperty_sql
- freespaceproperty_sql
- afterjournalproperty_sql
- checksumproperty_sql
- mergeblockratioproperty_sql
- datablocksizeproperty_sql
- blockcompressionproperty_sql
- isolatedloadingproperty_sql
- lockingproperty_sql
- withdataproperty_sql
- insert_sql
- intersect_sql
- intersect_op
- introducer_sql
- pseudotype_sql
- returning_sql
- rowformatdelimitedproperty_sql
- table_sql
- tablesample_sql
- pivot_sql
- tuple_sql
- update_sql
- values_sql
- var_sql
- into_sql
- from_sql
- group_sql
- having_sql
- join_sql
- lambda_sql
- lateral_sql
- limit_sql
- offset_sql
- lock_sql
- literal_sql
- loaddata_sql
- null_sql
- boolean_sql
- order_sql
- cluster_sql
- distribute_sql
- sort_sql
- ordered_sql
- matchrecognize_sql
- query_modifiers
- select_sql
- schema_sql
- star_sql
- structkwarg_sql
- parameter_sql
- sessionparameter_sql
- placeholder_sql
- subquery_sql
- qualify_sql
- union_sql
- union_op
- unnest_sql
- where_sql
- window_sql
- partition_by_sql
- window_spec_sql
- withingroup_sql
- between_sql
- bracket_sql
- all_sql
- any_sql
- exists_sql
- case_sql
- constraint_sql
- extract_sql
- trim_sql
- concat_sql
- check_sql
- foreignkey_sql
- primarykey_sql
- unique_sql
- if_sql
- in_sql
- in_unnest_op
- interval_sql
- return_sql
- reference_sql
- anonymous_sql
- paren_sql
- neg_sql
- not_sql
- alias_sql
- aliases_sql
- attimezone_sql
- add_sql
- and_sql
- connector_sql
- bitwiseand_sql
- bitwiseleftshift_sql
- bitwisenot_sql
- bitwiseor_sql
- bitwiserightshift_sql
- bitwisexor_sql
- cast_sql
- currentdate_sql
- collate_sql
- command_sql
- comment_sql
- transaction_sql
- commit_sql
- rollback_sql
- altercolumn_sql
- renametable_sql
- altertable_sql
- droppartition_sql
- addconstraint_sql
- distinct_sql
- ignorenulls_sql
- respectnulls_sql
- intdiv_sql
- dpipe_sql
- div_sql
- floatdiv_sql
- overlaps_sql
- distance_sql
- dot_sql
- eq_sql
- escape_sql
- glob_sql
- gt_sql
- gte_sql
- ilike_sql
- is_sql
- like_sql
- similarto_sql
- lt_sql
- lte_sql
- mod_sql
- mul_sql
- neq_sql
- nullsafeeq_sql
- nullsafeneq_sql
- or_sql
- slice_sql
- sub_sql
- trycast_sql
- use_sql
- binary
- function_fallback_sql
- func
- format_args
- text_width
- format_time
- expressions
- op_expressions
- naked_property
- set_operation
- tag_sql
- token_sql
- userdefinedfunction_sql
- joinhint_sql
- kwarg_sql
- when_sql
- merge_sql