sqlglot.dialects.hive
1from __future__ import annotations 2 3from sqlglot import exp, generator, parser, tokens, transforms 4from sqlglot.dialects.dialect import ( 5 Dialect, 6 approx_count_distinct_sql, 7 create_with_partitions_sql, 8 format_time_lambda, 9 if_sql, 10 locate_to_strposition, 11 no_ilike_sql, 12 no_recursive_cte_sql, 13 no_safe_divide_sql, 14 no_trycast_sql, 15 rename_func, 16 strposition_to_locate_sql, 17 struct_extract_sql, 18 timestrtotime_sql, 19 var_map_sql, 20) 21from sqlglot.helper import seq_get 22from sqlglot.parser import parse_var_map 23from sqlglot.tokens import TokenType 24 25# (FuncType, Multiplier) 26DATE_DELTA_INTERVAL = { 27 "YEAR": ("ADD_MONTHS", 12), 28 "MONTH": ("ADD_MONTHS", 1), 29 "QUARTER": ("ADD_MONTHS", 3), 30 "WEEK": ("DATE_ADD", 7), 31 "DAY": ("DATE_ADD", 1), 32} 33 34DIFF_MONTH_SWITCH = ("YEAR", "QUARTER", "MONTH") 35 36 37def _add_date_sql(self, expression): 38 unit = expression.text("unit").upper() 39 func, multiplier = DATE_DELTA_INTERVAL.get(unit, ("DATE_ADD", 1)) 40 modified_increment = ( 41 int(expression.text("expression")) * multiplier 42 if expression.expression.is_number 43 else expression.expression 44 ) 45 modified_increment = exp.Literal.number(modified_increment) 46 return f"{func}({self.format_args(expression.this, modified_increment.this)})" 47 48 49def _date_diff_sql(self, expression): 50 unit = expression.text("unit").upper() 51 sql_func = "MONTHS_BETWEEN" if unit in DIFF_MONTH_SWITCH else "DATEDIFF" 52 _, multiplier = DATE_DELTA_INTERVAL.get(unit, ("", 1)) 53 multiplier_sql = f" / {multiplier}" if multiplier > 1 else "" 54 diff_sql = f"{sql_func}({self.format_args(expression.this, expression.expression)})" 55 return f"{diff_sql}{multiplier_sql}" 56 57 58def _array_sort(self, expression): 59 if expression.expression: 60 self.unsupported("Hive SORT_ARRAY does not support a comparator") 61 return f"SORT_ARRAY({self.sql(expression, 'this')})" 62 63 64def _property_sql(self, expression): 65 return f"'{expression.name}'={self.sql(expression, 'value')}" 66 67 68def _str_to_unix(self, expression): 69 return f"UNIX_TIMESTAMP({self.format_args(expression.this, _time_format(self, expression))})" 70 71 72def _str_to_date(self, expression): 73 this = self.sql(expression, "this") 74 time_format = self.format_time(expression) 75 if time_format not in (Hive.time_format, Hive.date_format): 76 this = f"FROM_UNIXTIME(UNIX_TIMESTAMP({this}, {time_format}))" 77 return f"CAST({this} AS DATE)" 78 79 80def _str_to_time(self, expression): 81 this = self.sql(expression, "this") 82 time_format = self.format_time(expression) 83 if time_format not in (Hive.time_format, Hive.date_format): 84 this = f"FROM_UNIXTIME(UNIX_TIMESTAMP({this}, {time_format}))" 85 return f"CAST({this} AS TIMESTAMP)" 86 87 88def _time_format(self, expression): 89 time_format = self.format_time(expression) 90 if time_format == Hive.time_format: 91 return None 92 return time_format 93 94 95def _time_to_str(self, expression): 96 this = self.sql(expression, "this") 97 time_format = self.format_time(expression) 98 return f"DATE_FORMAT({this}, {time_format})" 99 100 101def _to_date_sql(self, expression): 102 this = self.sql(expression, "this") 103 time_format = self.format_time(expression) 104 if time_format and time_format not in (Hive.time_format, Hive.date_format): 105 return f"TO_DATE({this}, {time_format})" 106 return f"TO_DATE({this})" 107 108 109def _unnest_to_explode_sql(self, expression): 110 unnest = expression.this 111 if isinstance(unnest, exp.Unnest): 112 alias = unnest.args.get("alias") 113 udtf = exp.Posexplode if unnest.args.get("ordinality") else exp.Explode 114 return "".join( 115 self.sql( 116 exp.Lateral( 117 this=udtf(this=expression), 118 view=True, 119 alias=exp.TableAlias(this=alias.this, columns=[column]), 120 ) 121 ) 122 for expression, column in zip(unnest.expressions, alias.columns if alias else []) 123 ) 124 return self.join_sql(expression) 125 126 127def _index_sql(self, expression): 128 this = self.sql(expression, "this") 129 table = self.sql(expression, "table") 130 columns = self.sql(expression, "columns") 131 return f"{this} ON TABLE {table} {columns}" 132 133 134class Hive(Dialect): 135 alias_post_tablesample = True 136 137 time_mapping = { 138 "y": "%Y", 139 "Y": "%Y", 140 "YYYY": "%Y", 141 "yyyy": "%Y", 142 "YY": "%y", 143 "yy": "%y", 144 "MMMM": "%B", 145 "MMM": "%b", 146 "MM": "%m", 147 "M": "%-m", 148 "dd": "%d", 149 "d": "%-d", 150 "HH": "%H", 151 "H": "%-H", 152 "hh": "%I", 153 "h": "%-I", 154 "mm": "%M", 155 "m": "%-M", 156 "ss": "%S", 157 "s": "%-S", 158 "SSSSSS": "%f", 159 "a": "%p", 160 "DD": "%j", 161 "D": "%-j", 162 "E": "%a", 163 "EE": "%a", 164 "EEE": "%a", 165 "EEEE": "%A", 166 } 167 168 date_format = "'yyyy-MM-dd'" 169 dateint_format = "'yyyyMMdd'" 170 time_format = "'yyyy-MM-dd HH:mm:ss'" 171 172 class Tokenizer(tokens.Tokenizer): 173 QUOTES = ["'", '"'] 174 IDENTIFIERS = ["`"] 175 STRING_ESCAPES = ["\\"] 176 ENCODE = "utf-8" 177 178 KEYWORDS = { 179 **tokens.Tokenizer.KEYWORDS, 180 "ADD ARCHIVE": TokenType.COMMAND, 181 "ADD ARCHIVES": TokenType.COMMAND, 182 "ADD FILE": TokenType.COMMAND, 183 "ADD FILES": TokenType.COMMAND, 184 "ADD JAR": TokenType.COMMAND, 185 "ADD JARS": TokenType.COMMAND, 186 "MSCK REPAIR": TokenType.COMMAND, 187 "WITH SERDEPROPERTIES": TokenType.SERDE_PROPERTIES, 188 } 189 190 NUMERIC_LITERALS = { 191 "L": "BIGINT", 192 "S": "SMALLINT", 193 "Y": "TINYINT", 194 "D": "DOUBLE", 195 "F": "FLOAT", 196 "BD": "DECIMAL", 197 } 198 199 IDENTIFIER_CAN_START_WITH_DIGIT = True 200 201 class Parser(parser.Parser): 202 STRICT_CAST = False 203 204 FUNCTIONS = { 205 **parser.Parser.FUNCTIONS, # type: ignore 206 "APPROX_COUNT_DISTINCT": exp.ApproxDistinct.from_arg_list, 207 "COLLECT_LIST": exp.ArrayAgg.from_arg_list, 208 "DATE_ADD": lambda args: exp.TsOrDsAdd( 209 this=seq_get(args, 0), 210 expression=seq_get(args, 1), 211 unit=exp.Literal.string("DAY"), 212 ), 213 "DATEDIFF": lambda args: exp.DateDiff( 214 this=exp.TsOrDsToDate(this=seq_get(args, 0)), 215 expression=exp.TsOrDsToDate(this=seq_get(args, 1)), 216 ), 217 "DATE_SUB": lambda args: exp.TsOrDsAdd( 218 this=seq_get(args, 0), 219 expression=exp.Mul( 220 this=seq_get(args, 1), 221 expression=exp.Literal.number(-1), 222 ), 223 unit=exp.Literal.string("DAY"), 224 ), 225 "DATE_FORMAT": lambda args: format_time_lambda(exp.TimeToStr, "hive")( 226 [ 227 exp.TimeStrToTime(this=seq_get(args, 0)), 228 seq_get(args, 1), 229 ] 230 ), 231 "DAY": lambda args: exp.Day(this=exp.TsOrDsToDate(this=seq_get(args, 0))), 232 "FROM_UNIXTIME": format_time_lambda(exp.UnixToStr, "hive", True), 233 "GET_JSON_OBJECT": exp.JSONExtractScalar.from_arg_list, 234 "LOCATE": locate_to_strposition, 235 "LOG": ( 236 lambda args: exp.Log.from_arg_list(args) 237 if len(args) > 1 238 else exp.Ln.from_arg_list(args) 239 ), 240 "MAP": parse_var_map, 241 "MONTH": lambda args: exp.Month(this=exp.TsOrDsToDate.from_arg_list(args)), 242 "PERCENTILE": exp.Quantile.from_arg_list, 243 "PERCENTILE_APPROX": exp.ApproxQuantile.from_arg_list, 244 "COLLECT_SET": exp.SetAgg.from_arg_list, 245 "SIZE": exp.ArraySize.from_arg_list, 246 "SPLIT": exp.RegexpSplit.from_arg_list, 247 "TO_DATE": format_time_lambda(exp.TsOrDsToDate, "hive"), 248 "UNIX_TIMESTAMP": format_time_lambda(exp.StrToUnix, "hive", True), 249 "YEAR": lambda args: exp.Year(this=exp.TsOrDsToDate.from_arg_list(args)), 250 } 251 252 PROPERTY_PARSERS = { 253 **parser.Parser.PROPERTY_PARSERS, # type: ignore 254 "WITH SERDEPROPERTIES": lambda self: exp.SerdeProperties( 255 expressions=self._parse_wrapped_csv(self._parse_property) 256 ), 257 } 258 259 class Generator(generator.Generator): 260 TYPE_MAPPING = { 261 **generator.Generator.TYPE_MAPPING, # type: ignore 262 exp.DataType.Type.TEXT: "STRING", 263 exp.DataType.Type.DATETIME: "TIMESTAMP", 264 exp.DataType.Type.VARBINARY: "BINARY", 265 } 266 267 TRANSFORMS = { 268 **generator.Generator.TRANSFORMS, # type: ignore 269 **transforms.UNALIAS_GROUP, # type: ignore 270 exp.Property: _property_sql, 271 exp.ApproxDistinct: approx_count_distinct_sql, 272 exp.ArrayAgg: rename_func("COLLECT_LIST"), 273 exp.ArrayConcat: rename_func("CONCAT"), 274 exp.ArraySize: rename_func("SIZE"), 275 exp.ArraySort: _array_sort, 276 exp.With: no_recursive_cte_sql, 277 exp.DateAdd: _add_date_sql, 278 exp.DateDiff: _date_diff_sql, 279 exp.DateStrToDate: rename_func("TO_DATE"), 280 exp.DateToDi: lambda self, e: f"CAST(DATE_FORMAT({self.sql(e, 'this')}, {Hive.dateint_format}) AS INT)", 281 exp.DiToDate: lambda self, e: f"TO_DATE(CAST({self.sql(e, 'this')} AS STRING), {Hive.dateint_format})", 282 exp.FileFormatProperty: lambda self, e: f"STORED AS {e.name.upper()}", 283 exp.If: if_sql, 284 exp.Index: _index_sql, 285 exp.ILike: no_ilike_sql, 286 exp.Join: _unnest_to_explode_sql, 287 exp.JSONExtract: rename_func("GET_JSON_OBJECT"), 288 exp.JSONExtractScalar: rename_func("GET_JSON_OBJECT"), 289 exp.Map: var_map_sql, 290 exp.VarMap: var_map_sql, 291 exp.Create: create_with_partitions_sql, 292 exp.Quantile: rename_func("PERCENTILE"), 293 exp.ApproxQuantile: rename_func("PERCENTILE_APPROX"), 294 exp.RegexpLike: lambda self, e: self.binary(e, "RLIKE"), 295 exp.RegexpSplit: rename_func("SPLIT"), 296 exp.SafeDivide: no_safe_divide_sql, 297 exp.SchemaCommentProperty: lambda self, e: self.naked_property(e), 298 exp.SetAgg: rename_func("COLLECT_SET"), 299 exp.Split: lambda self, e: f"SPLIT({self.sql(e, 'this')}, CONCAT('\\\\Q', {self.sql(e, 'expression')}))", 300 exp.StrPosition: strposition_to_locate_sql, 301 exp.StrToDate: _str_to_date, 302 exp.StrToTime: _str_to_time, 303 exp.StrToUnix: _str_to_unix, 304 exp.StructExtract: struct_extract_sql, 305 exp.TableFormatProperty: lambda self, e: f"USING {self.sql(e, 'this')}", 306 exp.TimeStrToDate: rename_func("TO_DATE"), 307 exp.TimeStrToTime: timestrtotime_sql, 308 exp.TimeStrToUnix: rename_func("UNIX_TIMESTAMP"), 309 exp.TimeToStr: _time_to_str, 310 exp.TimeToUnix: rename_func("UNIX_TIMESTAMP"), 311 exp.TsOrDiToDi: lambda self, e: f"CAST(SUBSTR(REPLACE(CAST({self.sql(e, 'this')} AS STRING), '-', ''), 1, 8) AS INT)", 312 exp.TsOrDsAdd: lambda self, e: f"DATE_ADD({self.sql(e, 'this')}, {self.sql(e, 'expression')})", 313 exp.TsOrDsToDate: _to_date_sql, 314 exp.TryCast: no_trycast_sql, 315 exp.UnixToStr: lambda self, e: f"FROM_UNIXTIME({self.format_args(e.this, _time_format(self, e))})", 316 exp.UnixToTime: rename_func("FROM_UNIXTIME"), 317 exp.UnixToTimeStr: rename_func("FROM_UNIXTIME"), 318 exp.PartitionedByProperty: lambda self, e: f"PARTITIONED BY {self.sql(e, 'this')}", 319 exp.RowFormatSerdeProperty: lambda self, e: f"ROW FORMAT SERDE {self.sql(e, 'this')}", 320 exp.SerdeProperties: lambda self, e: self.properties(e, prefix="WITH SERDEPROPERTIES"), 321 exp.NumberToStr: rename_func("FORMAT_NUMBER"), 322 exp.LastDateOfMonth: rename_func("LAST_DAY"), 323 } 324 325 PROPERTIES_LOCATION = { 326 **generator.Generator.PROPERTIES_LOCATION, # type: ignore 327 exp.FileFormatProperty: exp.Properties.Location.POST_SCHEMA_ROOT, 328 exp.PartitionedByProperty: exp.Properties.Location.POST_SCHEMA_ROOT, 329 exp.TableFormatProperty: exp.Properties.Location.POST_SCHEMA_ROOT, 330 } 331 332 def with_properties(self, properties): 333 return self.properties( 334 properties, 335 prefix=self.seg("TBLPROPERTIES"), 336 ) 337 338 def datatype_sql(self, expression): 339 if ( 340 expression.this in (exp.DataType.Type.VARCHAR, exp.DataType.Type.NVARCHAR) 341 and not expression.expressions 342 ): 343 expression = exp.DataType.build("text") 344 elif expression.this in exp.DataType.TEMPORAL_TYPES: 345 expression = exp.DataType.build(expression.this) 346 return super().datatype_sql(expression)
135class Hive(Dialect): 136 alias_post_tablesample = True 137 138 time_mapping = { 139 "y": "%Y", 140 "Y": "%Y", 141 "YYYY": "%Y", 142 "yyyy": "%Y", 143 "YY": "%y", 144 "yy": "%y", 145 "MMMM": "%B", 146 "MMM": "%b", 147 "MM": "%m", 148 "M": "%-m", 149 "dd": "%d", 150 "d": "%-d", 151 "HH": "%H", 152 "H": "%-H", 153 "hh": "%I", 154 "h": "%-I", 155 "mm": "%M", 156 "m": "%-M", 157 "ss": "%S", 158 "s": "%-S", 159 "SSSSSS": "%f", 160 "a": "%p", 161 "DD": "%j", 162 "D": "%-j", 163 "E": "%a", 164 "EE": "%a", 165 "EEE": "%a", 166 "EEEE": "%A", 167 } 168 169 date_format = "'yyyy-MM-dd'" 170 dateint_format = "'yyyyMMdd'" 171 time_format = "'yyyy-MM-dd HH:mm:ss'" 172 173 class Tokenizer(tokens.Tokenizer): 174 QUOTES = ["'", '"'] 175 IDENTIFIERS = ["`"] 176 STRING_ESCAPES = ["\\"] 177 ENCODE = "utf-8" 178 179 KEYWORDS = { 180 **tokens.Tokenizer.KEYWORDS, 181 "ADD ARCHIVE": TokenType.COMMAND, 182 "ADD ARCHIVES": TokenType.COMMAND, 183 "ADD FILE": TokenType.COMMAND, 184 "ADD FILES": TokenType.COMMAND, 185 "ADD JAR": TokenType.COMMAND, 186 "ADD JARS": TokenType.COMMAND, 187 "MSCK REPAIR": TokenType.COMMAND, 188 "WITH SERDEPROPERTIES": TokenType.SERDE_PROPERTIES, 189 } 190 191 NUMERIC_LITERALS = { 192 "L": "BIGINT", 193 "S": "SMALLINT", 194 "Y": "TINYINT", 195 "D": "DOUBLE", 196 "F": "FLOAT", 197 "BD": "DECIMAL", 198 } 199 200 IDENTIFIER_CAN_START_WITH_DIGIT = True 201 202 class Parser(parser.Parser): 203 STRICT_CAST = False 204 205 FUNCTIONS = { 206 **parser.Parser.FUNCTIONS, # type: ignore 207 "APPROX_COUNT_DISTINCT": exp.ApproxDistinct.from_arg_list, 208 "COLLECT_LIST": exp.ArrayAgg.from_arg_list, 209 "DATE_ADD": lambda args: exp.TsOrDsAdd( 210 this=seq_get(args, 0), 211 expression=seq_get(args, 1), 212 unit=exp.Literal.string("DAY"), 213 ), 214 "DATEDIFF": lambda args: exp.DateDiff( 215 this=exp.TsOrDsToDate(this=seq_get(args, 0)), 216 expression=exp.TsOrDsToDate(this=seq_get(args, 1)), 217 ), 218 "DATE_SUB": lambda args: exp.TsOrDsAdd( 219 this=seq_get(args, 0), 220 expression=exp.Mul( 221 this=seq_get(args, 1), 222 expression=exp.Literal.number(-1), 223 ), 224 unit=exp.Literal.string("DAY"), 225 ), 226 "DATE_FORMAT": lambda args: format_time_lambda(exp.TimeToStr, "hive")( 227 [ 228 exp.TimeStrToTime(this=seq_get(args, 0)), 229 seq_get(args, 1), 230 ] 231 ), 232 "DAY": lambda args: exp.Day(this=exp.TsOrDsToDate(this=seq_get(args, 0))), 233 "FROM_UNIXTIME": format_time_lambda(exp.UnixToStr, "hive", True), 234 "GET_JSON_OBJECT": exp.JSONExtractScalar.from_arg_list, 235 "LOCATE": locate_to_strposition, 236 "LOG": ( 237 lambda args: exp.Log.from_arg_list(args) 238 if len(args) > 1 239 else exp.Ln.from_arg_list(args) 240 ), 241 "MAP": parse_var_map, 242 "MONTH": lambda args: exp.Month(this=exp.TsOrDsToDate.from_arg_list(args)), 243 "PERCENTILE": exp.Quantile.from_arg_list, 244 "PERCENTILE_APPROX": exp.ApproxQuantile.from_arg_list, 245 "COLLECT_SET": exp.SetAgg.from_arg_list, 246 "SIZE": exp.ArraySize.from_arg_list, 247 "SPLIT": exp.RegexpSplit.from_arg_list, 248 "TO_DATE": format_time_lambda(exp.TsOrDsToDate, "hive"), 249 "UNIX_TIMESTAMP": format_time_lambda(exp.StrToUnix, "hive", True), 250 "YEAR": lambda args: exp.Year(this=exp.TsOrDsToDate.from_arg_list(args)), 251 } 252 253 PROPERTY_PARSERS = { 254 **parser.Parser.PROPERTY_PARSERS, # type: ignore 255 "WITH SERDEPROPERTIES": lambda self: exp.SerdeProperties( 256 expressions=self._parse_wrapped_csv(self._parse_property) 257 ), 258 } 259 260 class Generator(generator.Generator): 261 TYPE_MAPPING = { 262 **generator.Generator.TYPE_MAPPING, # type: ignore 263 exp.DataType.Type.TEXT: "STRING", 264 exp.DataType.Type.DATETIME: "TIMESTAMP", 265 exp.DataType.Type.VARBINARY: "BINARY", 266 } 267 268 TRANSFORMS = { 269 **generator.Generator.TRANSFORMS, # type: ignore 270 **transforms.UNALIAS_GROUP, # type: ignore 271 exp.Property: _property_sql, 272 exp.ApproxDistinct: approx_count_distinct_sql, 273 exp.ArrayAgg: rename_func("COLLECT_LIST"), 274 exp.ArrayConcat: rename_func("CONCAT"), 275 exp.ArraySize: rename_func("SIZE"), 276 exp.ArraySort: _array_sort, 277 exp.With: no_recursive_cte_sql, 278 exp.DateAdd: _add_date_sql, 279 exp.DateDiff: _date_diff_sql, 280 exp.DateStrToDate: rename_func("TO_DATE"), 281 exp.DateToDi: lambda self, e: f"CAST(DATE_FORMAT({self.sql(e, 'this')}, {Hive.dateint_format}) AS INT)", 282 exp.DiToDate: lambda self, e: f"TO_DATE(CAST({self.sql(e, 'this')} AS STRING), {Hive.dateint_format})", 283 exp.FileFormatProperty: lambda self, e: f"STORED AS {e.name.upper()}", 284 exp.If: if_sql, 285 exp.Index: _index_sql, 286 exp.ILike: no_ilike_sql, 287 exp.Join: _unnest_to_explode_sql, 288 exp.JSONExtract: rename_func("GET_JSON_OBJECT"), 289 exp.JSONExtractScalar: rename_func("GET_JSON_OBJECT"), 290 exp.Map: var_map_sql, 291 exp.VarMap: var_map_sql, 292 exp.Create: create_with_partitions_sql, 293 exp.Quantile: rename_func("PERCENTILE"), 294 exp.ApproxQuantile: rename_func("PERCENTILE_APPROX"), 295 exp.RegexpLike: lambda self, e: self.binary(e, "RLIKE"), 296 exp.RegexpSplit: rename_func("SPLIT"), 297 exp.SafeDivide: no_safe_divide_sql, 298 exp.SchemaCommentProperty: lambda self, e: self.naked_property(e), 299 exp.SetAgg: rename_func("COLLECT_SET"), 300 exp.Split: lambda self, e: f"SPLIT({self.sql(e, 'this')}, CONCAT('\\\\Q', {self.sql(e, 'expression')}))", 301 exp.StrPosition: strposition_to_locate_sql, 302 exp.StrToDate: _str_to_date, 303 exp.StrToTime: _str_to_time, 304 exp.StrToUnix: _str_to_unix, 305 exp.StructExtract: struct_extract_sql, 306 exp.TableFormatProperty: lambda self, e: f"USING {self.sql(e, 'this')}", 307 exp.TimeStrToDate: rename_func("TO_DATE"), 308 exp.TimeStrToTime: timestrtotime_sql, 309 exp.TimeStrToUnix: rename_func("UNIX_TIMESTAMP"), 310 exp.TimeToStr: _time_to_str, 311 exp.TimeToUnix: rename_func("UNIX_TIMESTAMP"), 312 exp.TsOrDiToDi: lambda self, e: f"CAST(SUBSTR(REPLACE(CAST({self.sql(e, 'this')} AS STRING), '-', ''), 1, 8) AS INT)", 313 exp.TsOrDsAdd: lambda self, e: f"DATE_ADD({self.sql(e, 'this')}, {self.sql(e, 'expression')})", 314 exp.TsOrDsToDate: _to_date_sql, 315 exp.TryCast: no_trycast_sql, 316 exp.UnixToStr: lambda self, e: f"FROM_UNIXTIME({self.format_args(e.this, _time_format(self, e))})", 317 exp.UnixToTime: rename_func("FROM_UNIXTIME"), 318 exp.UnixToTimeStr: rename_func("FROM_UNIXTIME"), 319 exp.PartitionedByProperty: lambda self, e: f"PARTITIONED BY {self.sql(e, 'this')}", 320 exp.RowFormatSerdeProperty: lambda self, e: f"ROW FORMAT SERDE {self.sql(e, 'this')}", 321 exp.SerdeProperties: lambda self, e: self.properties(e, prefix="WITH SERDEPROPERTIES"), 322 exp.NumberToStr: rename_func("FORMAT_NUMBER"), 323 exp.LastDateOfMonth: rename_func("LAST_DAY"), 324 } 325 326 PROPERTIES_LOCATION = { 327 **generator.Generator.PROPERTIES_LOCATION, # type: ignore 328 exp.FileFormatProperty: exp.Properties.Location.POST_SCHEMA_ROOT, 329 exp.PartitionedByProperty: exp.Properties.Location.POST_SCHEMA_ROOT, 330 exp.TableFormatProperty: exp.Properties.Location.POST_SCHEMA_ROOT, 331 } 332 333 def with_properties(self, properties): 334 return self.properties( 335 properties, 336 prefix=self.seg("TBLPROPERTIES"), 337 ) 338 339 def datatype_sql(self, expression): 340 if ( 341 expression.this in (exp.DataType.Type.VARCHAR, exp.DataType.Type.NVARCHAR) 342 and not expression.expressions 343 ): 344 expression = exp.DataType.build("text") 345 elif expression.this in exp.DataType.TEMPORAL_TYPES: 346 expression = exp.DataType.build(expression.this) 347 return super().datatype_sql(expression)
Inherited Members
173 class Tokenizer(tokens.Tokenizer): 174 QUOTES = ["'", '"'] 175 IDENTIFIERS = ["`"] 176 STRING_ESCAPES = ["\\"] 177 ENCODE = "utf-8" 178 179 KEYWORDS = { 180 **tokens.Tokenizer.KEYWORDS, 181 "ADD ARCHIVE": TokenType.COMMAND, 182 "ADD ARCHIVES": TokenType.COMMAND, 183 "ADD FILE": TokenType.COMMAND, 184 "ADD FILES": TokenType.COMMAND, 185 "ADD JAR": TokenType.COMMAND, 186 "ADD JARS": TokenType.COMMAND, 187 "MSCK REPAIR": TokenType.COMMAND, 188 "WITH SERDEPROPERTIES": TokenType.SERDE_PROPERTIES, 189 } 190 191 NUMERIC_LITERALS = { 192 "L": "BIGINT", 193 "S": "SMALLINT", 194 "Y": "TINYINT", 195 "D": "DOUBLE", 196 "F": "FLOAT", 197 "BD": "DECIMAL", 198 } 199 200 IDENTIFIER_CAN_START_WITH_DIGIT = True
Inherited Members
202 class Parser(parser.Parser): 203 STRICT_CAST = False 204 205 FUNCTIONS = { 206 **parser.Parser.FUNCTIONS, # type: ignore 207 "APPROX_COUNT_DISTINCT": exp.ApproxDistinct.from_arg_list, 208 "COLLECT_LIST": exp.ArrayAgg.from_arg_list, 209 "DATE_ADD": lambda args: exp.TsOrDsAdd( 210 this=seq_get(args, 0), 211 expression=seq_get(args, 1), 212 unit=exp.Literal.string("DAY"), 213 ), 214 "DATEDIFF": lambda args: exp.DateDiff( 215 this=exp.TsOrDsToDate(this=seq_get(args, 0)), 216 expression=exp.TsOrDsToDate(this=seq_get(args, 1)), 217 ), 218 "DATE_SUB": lambda args: exp.TsOrDsAdd( 219 this=seq_get(args, 0), 220 expression=exp.Mul( 221 this=seq_get(args, 1), 222 expression=exp.Literal.number(-1), 223 ), 224 unit=exp.Literal.string("DAY"), 225 ), 226 "DATE_FORMAT": lambda args: format_time_lambda(exp.TimeToStr, "hive")( 227 [ 228 exp.TimeStrToTime(this=seq_get(args, 0)), 229 seq_get(args, 1), 230 ] 231 ), 232 "DAY": lambda args: exp.Day(this=exp.TsOrDsToDate(this=seq_get(args, 0))), 233 "FROM_UNIXTIME": format_time_lambda(exp.UnixToStr, "hive", True), 234 "GET_JSON_OBJECT": exp.JSONExtractScalar.from_arg_list, 235 "LOCATE": locate_to_strposition, 236 "LOG": ( 237 lambda args: exp.Log.from_arg_list(args) 238 if len(args) > 1 239 else exp.Ln.from_arg_list(args) 240 ), 241 "MAP": parse_var_map, 242 "MONTH": lambda args: exp.Month(this=exp.TsOrDsToDate.from_arg_list(args)), 243 "PERCENTILE": exp.Quantile.from_arg_list, 244 "PERCENTILE_APPROX": exp.ApproxQuantile.from_arg_list, 245 "COLLECT_SET": exp.SetAgg.from_arg_list, 246 "SIZE": exp.ArraySize.from_arg_list, 247 "SPLIT": exp.RegexpSplit.from_arg_list, 248 "TO_DATE": format_time_lambda(exp.TsOrDsToDate, "hive"), 249 "UNIX_TIMESTAMP": format_time_lambda(exp.StrToUnix, "hive", True), 250 "YEAR": lambda args: exp.Year(this=exp.TsOrDsToDate.from_arg_list(args)), 251 } 252 253 PROPERTY_PARSERS = { 254 **parser.Parser.PROPERTY_PARSERS, # type: ignore 255 "WITH SERDEPROPERTIES": lambda self: exp.SerdeProperties( 256 expressions=self._parse_wrapped_csv(self._parse_property) 257 ), 258 }
Parser consumes a list of tokens produced by the sqlglot.tokens.Tokenizer
and produces
a parsed syntax tree.
Arguments:
- error_level: the desired error level. Default: ErrorLevel.RAISE
- error_message_context: determines the amount of context to capture from a query string when displaying the error message (in number of characters). Default: 50.
- index_offset: Index offset for arrays eg ARRAY[0] vs ARRAY[1] as the head of a list. Default: 0
- alias_post_tablesample: If the table alias comes after tablesample. Default: False
- max_errors: Maximum number of error messages to include in a raised ParseError. This is only relevant if error_level is ErrorLevel.RAISE. Default: 3
- null_ordering: Indicates the default null ordering method to use if not explicitly set. Options are "nulls_are_small", "nulls_are_large", "nulls_are_last". Default: "nulls_are_small"
Inherited Members
260 class Generator(generator.Generator): 261 TYPE_MAPPING = { 262 **generator.Generator.TYPE_MAPPING, # type: ignore 263 exp.DataType.Type.TEXT: "STRING", 264 exp.DataType.Type.DATETIME: "TIMESTAMP", 265 exp.DataType.Type.VARBINARY: "BINARY", 266 } 267 268 TRANSFORMS = { 269 **generator.Generator.TRANSFORMS, # type: ignore 270 **transforms.UNALIAS_GROUP, # type: ignore 271 exp.Property: _property_sql, 272 exp.ApproxDistinct: approx_count_distinct_sql, 273 exp.ArrayAgg: rename_func("COLLECT_LIST"), 274 exp.ArrayConcat: rename_func("CONCAT"), 275 exp.ArraySize: rename_func("SIZE"), 276 exp.ArraySort: _array_sort, 277 exp.With: no_recursive_cte_sql, 278 exp.DateAdd: _add_date_sql, 279 exp.DateDiff: _date_diff_sql, 280 exp.DateStrToDate: rename_func("TO_DATE"), 281 exp.DateToDi: lambda self, e: f"CAST(DATE_FORMAT({self.sql(e, 'this')}, {Hive.dateint_format}) AS INT)", 282 exp.DiToDate: lambda self, e: f"TO_DATE(CAST({self.sql(e, 'this')} AS STRING), {Hive.dateint_format})", 283 exp.FileFormatProperty: lambda self, e: f"STORED AS {e.name.upper()}", 284 exp.If: if_sql, 285 exp.Index: _index_sql, 286 exp.ILike: no_ilike_sql, 287 exp.Join: _unnest_to_explode_sql, 288 exp.JSONExtract: rename_func("GET_JSON_OBJECT"), 289 exp.JSONExtractScalar: rename_func("GET_JSON_OBJECT"), 290 exp.Map: var_map_sql, 291 exp.VarMap: var_map_sql, 292 exp.Create: create_with_partitions_sql, 293 exp.Quantile: rename_func("PERCENTILE"), 294 exp.ApproxQuantile: rename_func("PERCENTILE_APPROX"), 295 exp.RegexpLike: lambda self, e: self.binary(e, "RLIKE"), 296 exp.RegexpSplit: rename_func("SPLIT"), 297 exp.SafeDivide: no_safe_divide_sql, 298 exp.SchemaCommentProperty: lambda self, e: self.naked_property(e), 299 exp.SetAgg: rename_func("COLLECT_SET"), 300 exp.Split: lambda self, e: f"SPLIT({self.sql(e, 'this')}, CONCAT('\\\\Q', {self.sql(e, 'expression')}))", 301 exp.StrPosition: strposition_to_locate_sql, 302 exp.StrToDate: _str_to_date, 303 exp.StrToTime: _str_to_time, 304 exp.StrToUnix: _str_to_unix, 305 exp.StructExtract: struct_extract_sql, 306 exp.TableFormatProperty: lambda self, e: f"USING {self.sql(e, 'this')}", 307 exp.TimeStrToDate: rename_func("TO_DATE"), 308 exp.TimeStrToTime: timestrtotime_sql, 309 exp.TimeStrToUnix: rename_func("UNIX_TIMESTAMP"), 310 exp.TimeToStr: _time_to_str, 311 exp.TimeToUnix: rename_func("UNIX_TIMESTAMP"), 312 exp.TsOrDiToDi: lambda self, e: f"CAST(SUBSTR(REPLACE(CAST({self.sql(e, 'this')} AS STRING), '-', ''), 1, 8) AS INT)", 313 exp.TsOrDsAdd: lambda self, e: f"DATE_ADD({self.sql(e, 'this')}, {self.sql(e, 'expression')})", 314 exp.TsOrDsToDate: _to_date_sql, 315 exp.TryCast: no_trycast_sql, 316 exp.UnixToStr: lambda self, e: f"FROM_UNIXTIME({self.format_args(e.this, _time_format(self, e))})", 317 exp.UnixToTime: rename_func("FROM_UNIXTIME"), 318 exp.UnixToTimeStr: rename_func("FROM_UNIXTIME"), 319 exp.PartitionedByProperty: lambda self, e: f"PARTITIONED BY {self.sql(e, 'this')}", 320 exp.RowFormatSerdeProperty: lambda self, e: f"ROW FORMAT SERDE {self.sql(e, 'this')}", 321 exp.SerdeProperties: lambda self, e: self.properties(e, prefix="WITH SERDEPROPERTIES"), 322 exp.NumberToStr: rename_func("FORMAT_NUMBER"), 323 exp.LastDateOfMonth: rename_func("LAST_DAY"), 324 } 325 326 PROPERTIES_LOCATION = { 327 **generator.Generator.PROPERTIES_LOCATION, # type: ignore 328 exp.FileFormatProperty: exp.Properties.Location.POST_SCHEMA_ROOT, 329 exp.PartitionedByProperty: exp.Properties.Location.POST_SCHEMA_ROOT, 330 exp.TableFormatProperty: exp.Properties.Location.POST_SCHEMA_ROOT, 331 } 332 333 def with_properties(self, properties): 334 return self.properties( 335 properties, 336 prefix=self.seg("TBLPROPERTIES"), 337 ) 338 339 def datatype_sql(self, expression): 340 if ( 341 expression.this in (exp.DataType.Type.VARCHAR, exp.DataType.Type.NVARCHAR) 342 and not expression.expressions 343 ): 344 expression = exp.DataType.build("text") 345 elif expression.this in exp.DataType.TEMPORAL_TYPES: 346 expression = exp.DataType.build(expression.this) 347 return super().datatype_sql(expression)
Generator interprets the given syntax tree and produces a SQL string as an output.
Arguments:
- time_mapping (dict): the dictionary of custom time mappings in which the key represents a python time format and the output the target time format
- time_trie (trie): a trie of the time_mapping keys
- pretty (bool): if set to True the returned string will be formatted. Default: False.
- quote_start (str): specifies which starting character to use to delimit quotes. Default: '.
- quote_end (str): specifies which ending character to use to delimit quotes. Default: '.
- identifier_start (str): specifies which starting character to use to delimit identifiers. Default: ".
- identifier_end (str): specifies which ending character to use to delimit identifiers. Default: ".
- identify (bool): if set to True all identifiers will be delimited by the corresponding character.
- normalize (bool): if set to True all identifiers will lower cased
- string_escape (str): specifies a string escape character. Default: '.
- identifier_escape (str): specifies an identifier escape character. Default: ".
- pad (int): determines padding in a formatted string. Default: 2.
- indent (int): determines the size of indentation in a formatted string. Default: 4.
- unnest_column_only (bool): if true unnest table aliases are considered only as column aliases
- normalize_functions (str): normalize function names, "upper", "lower", or None Default: "upper"
- alias_post_tablesample (bool): if the table alias comes after tablesample Default: False
- unsupported_level (ErrorLevel): determines the generator's behavior when it encounters unsupported expressions. Default ErrorLevel.WARN.
- null_ordering (str): Indicates the default null ordering method to use if not explicitly set. Options are "nulls_are_small", "nulls_are_large", "nulls_are_last". Default: "nulls_are_small"
- max_unsupported (int): Maximum number of unsupported messages to include in a raised UnsupportedError. This is only relevant if unsupported_level is ErrorLevel.RAISE. Default: 3
- leading_comma (bool): if the the comma is leading or trailing in select statements Default: False
- max_text_width: The max number of characters in a segment before creating new lines in pretty mode. The default is on the smaller end because the length only represents a segment and not the true line length. Default: 80
- comments: Whether or not to preserve comments in the output SQL code. Default: True
def
datatype_sql(self, expression):
339 def datatype_sql(self, expression): 340 if ( 341 expression.this in (exp.DataType.Type.VARCHAR, exp.DataType.Type.NVARCHAR) 342 and not expression.expressions 343 ): 344 expression = exp.DataType.build("text") 345 elif expression.this in exp.DataType.TEMPORAL_TYPES: 346 expression = exp.DataType.build(expression.this) 347 return super().datatype_sql(expression)
Inherited Members
- sqlglot.generator.Generator
- Generator
- generate
- unsupported
- sep
- seg
- pad_comment
- maybe_comment
- wrap
- no_identify
- normalize_func
- indent
- sql
- uncache_sql
- cache_sql
- characterset_sql
- column_sql
- columndef_sql
- columnconstraint_sql
- autoincrementcolumnconstraint_sql
- checkcolumnconstraint_sql
- commentcolumnconstraint_sql
- collatecolumnconstraint_sql
- encodecolumnconstraint_sql
- defaultcolumnconstraint_sql
- generatedasidentitycolumnconstraint_sql
- notnullcolumnconstraint_sql
- primarykeycolumnconstraint_sql
- uniquecolumnconstraint_sql
- create_sql
- describe_sql
- prepend_ctes
- with_sql
- cte_sql
- tablealias_sql
- bitstring_sql
- hexstring_sql
- directory_sql
- delete_sql
- drop_sql
- except_sql
- except_op
- fetch_sql
- filter_sql
- hint_sql
- index_sql
- identifier_sql
- national_sql
- partition_sql
- properties_sql
- root_properties
- properties
- locate_properties
- property_sql
- likeproperty_sql
- fallbackproperty_sql
- journalproperty_sql
- freespaceproperty_sql
- afterjournalproperty_sql
- checksumproperty_sql
- mergeblockratioproperty_sql
- datablocksizeproperty_sql
- blockcompressionproperty_sql
- isolatedloadingproperty_sql
- insert_sql
- intersect_sql
- intersect_op
- introducer_sql
- pseudotype_sql
- rowformatdelimitedproperty_sql
- table_sql
- tablesample_sql
- pivot_sql
- tuple_sql
- update_sql
- values_sql
- var_sql
- into_sql
- from_sql
- group_sql
- having_sql
- join_sql
- lambda_sql
- lateral_sql
- limit_sql
- offset_sql
- lock_sql
- literal_sql
- loaddata_sql
- null_sql
- boolean_sql
- order_sql
- cluster_sql
- distribute_sql
- sort_sql
- ordered_sql
- matchrecognize_sql
- query_modifiers
- select_sql
- schema_sql
- star_sql
- structkwarg_sql
- parameter_sql
- sessionparameter_sql
- placeholder_sql
- subquery_sql
- qualify_sql
- union_sql
- union_op
- unnest_sql
- where_sql
- window_sql
- partition_by_sql
- window_spec_sql
- withingroup_sql
- between_sql
- bracket_sql
- all_sql
- any_sql
- exists_sql
- case_sql
- constraint_sql
- extract_sql
- trim_sql
- concat_sql
- check_sql
- foreignkey_sql
- primarykey_sql
- unique_sql
- if_sql
- in_sql
- in_unnest_op
- interval_sql
- return_sql
- reference_sql
- anonymous_sql
- paren_sql
- neg_sql
- not_sql
- alias_sql
- aliases_sql
- attimezone_sql
- add_sql
- and_sql
- connector_sql
- bitwiseand_sql
- bitwiseleftshift_sql
- bitwisenot_sql
- bitwiseor_sql
- bitwiserightshift_sql
- bitwisexor_sql
- cast_sql
- currentdate_sql
- collate_sql
- command_sql
- transaction_sql
- commit_sql
- rollback_sql
- altercolumn_sql
- renametable_sql
- altertable_sql
- droppartition_sql
- addconstraint_sql
- distinct_sql
- ignorenulls_sql
- respectnulls_sql
- intdiv_sql
- dpipe_sql
- div_sql
- distance_sql
- dot_sql
- eq_sql
- escape_sql
- glob_sql
- gt_sql
- gte_sql
- ilike_sql
- is_sql
- like_sql
- similarto_sql
- lt_sql
- lte_sql
- mod_sql
- mul_sql
- neq_sql
- nullsafeeq_sql
- nullsafeneq_sql
- or_sql
- slice_sql
- sub_sql
- trycast_sql
- use_sql
- binary
- function_fallback_sql
- format_args
- text_width
- format_time
- expressions
- op_expressions
- naked_property
- set_operation
- tag_sql
- token_sql
- userdefinedfunction_sql
- userdefinedfunctionkwarg_sql
- joinhint_sql
- kwarg_sql
- when_sql
- merge_sql