From 58c11f70074708344e433080e47621091a6dcd84 Mon Sep 17 00:00:00 2001 From: Daniel Baumann Date: Fri, 7 Apr 2023 14:35:01 +0200 Subject: Adding upstream version 11.5.2. Signed-off-by: Daniel Baumann --- docs/sqlglot/dialects/bigquery.html | 602 +++++++++++++++++++----------------- 1 file changed, 312 insertions(+), 290 deletions(-) (limited to 'docs/sqlglot/dialects/bigquery.html') diff --git a/docs/sqlglot/dialects/bigquery.html b/docs/sqlglot/dialects/bigquery.html index 7a0daa5..c1b1dce 100644 --- a/docs/sqlglot/dialects/bigquery.html +++ b/docs/sqlglot/dialects/bigquery.html @@ -3,7 +3,7 @@ - + sqlglot.dialects.bigquery API documentation @@ -313,98 +313,104 @@ 213 ), 214 } 215 -216 class Generator(generator.Generator): -217 TRANSFORMS = { -218 **generator.Generator.TRANSFORMS, # type: ignore -219 **transforms.REMOVE_PRECISION_PARAMETERIZED_TYPES, # type: ignore -220 exp.ArraySize: rename_func("ARRAY_LENGTH"), -221 exp.DateAdd: _date_add_sql("DATE", "ADD"), -222 exp.DateSub: _date_add_sql("DATE", "SUB"), -223 exp.DatetimeAdd: _date_add_sql("DATETIME", "ADD"), -224 exp.DatetimeSub: _date_add_sql("DATETIME", "SUB"), -225 exp.DateDiff: lambda self, e: f"DATE_DIFF({self.sql(e, 'this')}, {self.sql(e, 'expression')}, {self.sql(e.args.get('unit', 'DAY'))})", -226 exp.DateStrToDate: datestrtodate_sql, -227 exp.DateTrunc: lambda self, e: self.func("DATE_TRUNC", e.this, e.text("unit")), -228 exp.GroupConcat: rename_func("STRING_AGG"), -229 exp.ILike: no_ilike_sql, -230 exp.IntDiv: rename_func("DIV"), -231 exp.Max: max_or_greatest, -232 exp.Min: min_or_least, -233 exp.Select: transforms.preprocess( -234 [_unqualify_unnest], transforms.delegate("select_sql") -235 ), -236 exp.StrToTime: lambda self, e: f"PARSE_TIMESTAMP({self.format_time(e)}, {self.sql(e, 'this')})", -237 exp.TimeAdd: _date_add_sql("TIME", "ADD"), -238 exp.TimeSub: _date_add_sql("TIME", "SUB"), -239 exp.TimestampAdd: _date_add_sql("TIMESTAMP", "ADD"), -240 exp.TimestampSub: _date_add_sql("TIMESTAMP", "SUB"), -241 exp.TimeStrToTime: timestrtotime_sql, -242 exp.TsOrDsToDate: ts_or_ds_to_date_sql("bigquery"), -243 exp.TsOrDsAdd: _date_add_sql("DATE", "ADD"), -244 exp.PartitionedByProperty: lambda self, e: f"PARTITION BY {self.sql(e, 'this')}", -245 exp.VariancePop: rename_func("VAR_POP"), -246 exp.Values: _derived_table_values_to_unnest, -247 exp.ReturnsProperty: _returnsproperty_sql, -248 exp.Create: _create_sql, -249 exp.Trim: lambda self, e: self.func(f"TRIM", e.this, e.expression), -250 exp.VolatilityProperty: lambda self, e: f"DETERMINISTIC" -251 if e.name == "IMMUTABLE" -252 else "NOT DETERMINISTIC", -253 exp.RegexpLike: rename_func("REGEXP_CONTAINS"), -254 } -255 -256 TYPE_MAPPING = { -257 **generator.Generator.TYPE_MAPPING, # type: ignore -258 exp.DataType.Type.BIGINT: "INT64", -259 exp.DataType.Type.BOOLEAN: "BOOL", -260 exp.DataType.Type.CHAR: "STRING", -261 exp.DataType.Type.DECIMAL: "NUMERIC", -262 exp.DataType.Type.DOUBLE: "FLOAT64", -263 exp.DataType.Type.FLOAT: "FLOAT64", -264 exp.DataType.Type.INT: "INT64", -265 exp.DataType.Type.NCHAR: "STRING", -266 exp.DataType.Type.NVARCHAR: "STRING", -267 exp.DataType.Type.SMALLINT: "INT64", -268 exp.DataType.Type.TEXT: "STRING", -269 exp.DataType.Type.TINYINT: "INT64", -270 exp.DataType.Type.VARCHAR: "STRING", -271 } -272 PROPERTIES_LOCATION = { -273 **generator.Generator.PROPERTIES_LOCATION, # type: ignore -274 exp.PartitionedByProperty: exp.Properties.Location.POST_SCHEMA, -275 } -276 -277 EXPLICIT_UNION = True -278 LIMIT_FETCH = "LIMIT" -279 -280 def array_sql(self, expression: exp.Array) -> str: -281 first_arg = seq_get(expression.expressions, 0) -282 if isinstance(first_arg, exp.Subqueryable): -283 return f"ARRAY{self.wrap(self.sql(first_arg))}" -284 -285 return inline_array_sql(self, expression) -286 -287 def transaction_sql(self, *_) -> str: -288 return "BEGIN TRANSACTION" -289 -290 def commit_sql(self, *_) -> str: -291 return "COMMIT TRANSACTION" +216 LOG_BASE_FIRST = False +217 LOG_DEFAULTS_TO_LN = True +218 +219 class Generator(generator.Generator): +220 TRANSFORMS = { +221 **generator.Generator.TRANSFORMS, # type: ignore +222 **transforms.REMOVE_PRECISION_PARAMETERIZED_TYPES, # type: ignore +223 exp.ArraySize: rename_func("ARRAY_LENGTH"), +224 exp.AtTimeZone: lambda self, e: self.func( +225 "TIMESTAMP", self.func("DATETIME", e.this, e.args.get("zone")) +226 ), +227 exp.DateAdd: _date_add_sql("DATE", "ADD"), +228 exp.DateSub: _date_add_sql("DATE", "SUB"), +229 exp.DatetimeAdd: _date_add_sql("DATETIME", "ADD"), +230 exp.DatetimeSub: _date_add_sql("DATETIME", "SUB"), +231 exp.DateDiff: lambda self, e: f"DATE_DIFF({self.sql(e, 'this')}, {self.sql(e, 'expression')}, {self.sql(e.args.get('unit', 'DAY'))})", +232 exp.DateStrToDate: datestrtodate_sql, +233 exp.DateTrunc: lambda self, e: self.func("DATE_TRUNC", e.this, e.text("unit")), +234 exp.GroupConcat: rename_func("STRING_AGG"), +235 exp.ILike: no_ilike_sql, +236 exp.IntDiv: rename_func("DIV"), +237 exp.Max: max_or_greatest, +238 exp.Min: min_or_least, +239 exp.Select: transforms.preprocess( +240 [_unqualify_unnest], transforms.delegate("select_sql") +241 ), +242 exp.StrToTime: lambda self, e: f"PARSE_TIMESTAMP({self.format_time(e)}, {self.sql(e, 'this')})", +243 exp.TimeAdd: _date_add_sql("TIME", "ADD"), +244 exp.TimeSub: _date_add_sql("TIME", "SUB"), +245 exp.TimestampAdd: _date_add_sql("TIMESTAMP", "ADD"), +246 exp.TimestampSub: _date_add_sql("TIMESTAMP", "SUB"), +247 exp.TimeStrToTime: timestrtotime_sql, +248 exp.TsOrDsToDate: ts_or_ds_to_date_sql("bigquery"), +249 exp.TsOrDsAdd: _date_add_sql("DATE", "ADD"), +250 exp.PartitionedByProperty: lambda self, e: f"PARTITION BY {self.sql(e, 'this')}", +251 exp.VariancePop: rename_func("VAR_POP"), +252 exp.Values: _derived_table_values_to_unnest, +253 exp.ReturnsProperty: _returnsproperty_sql, +254 exp.Create: _create_sql, +255 exp.Trim: lambda self, e: self.func(f"TRIM", e.this, e.expression), +256 exp.VolatilityProperty: lambda self, e: f"DETERMINISTIC" +257 if e.name == "IMMUTABLE" +258 else "NOT DETERMINISTIC", +259 exp.RegexpLike: rename_func("REGEXP_CONTAINS"), +260 } +261 +262 TYPE_MAPPING = { +263 **generator.Generator.TYPE_MAPPING, # type: ignore +264 exp.DataType.Type.BIGINT: "INT64", +265 exp.DataType.Type.BOOLEAN: "BOOL", +266 exp.DataType.Type.CHAR: "STRING", +267 exp.DataType.Type.DECIMAL: "NUMERIC", +268 exp.DataType.Type.DOUBLE: "FLOAT64", +269 exp.DataType.Type.FLOAT: "FLOAT64", +270 exp.DataType.Type.INT: "INT64", +271 exp.DataType.Type.NCHAR: "STRING", +272 exp.DataType.Type.NVARCHAR: "STRING", +273 exp.DataType.Type.SMALLINT: "INT64", +274 exp.DataType.Type.TEXT: "STRING", +275 exp.DataType.Type.TINYINT: "INT64", +276 exp.DataType.Type.VARCHAR: "STRING", +277 } +278 PROPERTIES_LOCATION = { +279 **generator.Generator.PROPERTIES_LOCATION, # type: ignore +280 exp.PartitionedByProperty: exp.Properties.Location.POST_SCHEMA, +281 } +282 +283 EXPLICIT_UNION = True +284 LIMIT_FETCH = "LIMIT" +285 +286 def array_sql(self, expression: exp.Array) -> str: +287 first_arg = seq_get(expression.expressions, 0) +288 if isinstance(first_arg, exp.Subqueryable): +289 return f"ARRAY{self.wrap(self.sql(first_arg))}" +290 +291 return inline_array_sql(self, expression) 292 -293 def rollback_sql(self, *_) -> str: -294 return "ROLLBACK TRANSACTION" +293 def transaction_sql(self, *_) -> str: +294 return "BEGIN TRANSACTION" 295 -296 def in_unnest_op(self, expression: exp.Unnest) -> str: -297 return self.sql(expression) +296 def commit_sql(self, *_) -> str: +297 return "COMMIT TRANSACTION" 298 -299 def except_op(self, expression: exp.Except) -> str: -300 if not expression.args.get("distinct", False): -301 self.unsupported("EXCEPT without DISTINCT is not supported in BigQuery") -302 return f"EXCEPT{' DISTINCT' if expression.args.get('distinct') else ' ALL'}" -303 -304 def intersect_op(self, expression: exp.Intersect) -> str: -305 if not expression.args.get("distinct", False): -306 self.unsupported("INTERSECT without DISTINCT is not supported in BigQuery") -307 return f"INTERSECT{' DISTINCT' if expression.args.get('distinct') else ' ALL'}" +299 def rollback_sql(self, *_) -> str: +300 return "ROLLBACK TRANSACTION" +301 +302 def in_unnest_op(self, expression: exp.Unnest) -> str: +303 return self.sql(expression) +304 +305 def except_op(self, expression: exp.Except) -> str: +306 if not expression.args.get("distinct", False): +307 self.unsupported("EXCEPT without DISTINCT is not supported in BigQuery") +308 return f"EXCEPT{' DISTINCT' if expression.args.get('distinct') else ' ALL'}" +309 +310 def intersect_op(self, expression: exp.Intersect) -> str: +311 if not expression.args.get("distinct", False): +312 self.unsupported("INTERSECT without DISTINCT is not supported in BigQuery") +313 return f"INTERSECT{' DISTINCT' if expression.args.get('distinct') else ' ALL'}" @@ -517,98 +523,104 @@ 214 ), 215 } 216 -217 class Generator(generator.Generator): -218 TRANSFORMS = { -219 **generator.Generator.TRANSFORMS, # type: ignore -220 **transforms.REMOVE_PRECISION_PARAMETERIZED_TYPES, # type: ignore -221 exp.ArraySize: rename_func("ARRAY_LENGTH"), -222 exp.DateAdd: _date_add_sql("DATE", "ADD"), -223 exp.DateSub: _date_add_sql("DATE", "SUB"), -224 exp.DatetimeAdd: _date_add_sql("DATETIME", "ADD"), -225 exp.DatetimeSub: _date_add_sql("DATETIME", "SUB"), -226 exp.DateDiff: lambda self, e: f"DATE_DIFF({self.sql(e, 'this')}, {self.sql(e, 'expression')}, {self.sql(e.args.get('unit', 'DAY'))})", -227 exp.DateStrToDate: datestrtodate_sql, -228 exp.DateTrunc: lambda self, e: self.func("DATE_TRUNC", e.this, e.text("unit")), -229 exp.GroupConcat: rename_func("STRING_AGG"), -230 exp.ILike: no_ilike_sql, -231 exp.IntDiv: rename_func("DIV"), -232 exp.Max: max_or_greatest, -233 exp.Min: min_or_least, -234 exp.Select: transforms.preprocess( -235 [_unqualify_unnest], transforms.delegate("select_sql") -236 ), -237 exp.StrToTime: lambda self, e: f"PARSE_TIMESTAMP({self.format_time(e)}, {self.sql(e, 'this')})", -238 exp.TimeAdd: _date_add_sql("TIME", "ADD"), -239 exp.TimeSub: _date_add_sql("TIME", "SUB"), -240 exp.TimestampAdd: _date_add_sql("TIMESTAMP", "ADD"), -241 exp.TimestampSub: _date_add_sql("TIMESTAMP", "SUB"), -242 exp.TimeStrToTime: timestrtotime_sql, -243 exp.TsOrDsToDate: ts_or_ds_to_date_sql("bigquery"), -244 exp.TsOrDsAdd: _date_add_sql("DATE", "ADD"), -245 exp.PartitionedByProperty: lambda self, e: f"PARTITION BY {self.sql(e, 'this')}", -246 exp.VariancePop: rename_func("VAR_POP"), -247 exp.Values: _derived_table_values_to_unnest, -248 exp.ReturnsProperty: _returnsproperty_sql, -249 exp.Create: _create_sql, -250 exp.Trim: lambda self, e: self.func(f"TRIM", e.this, e.expression), -251 exp.VolatilityProperty: lambda self, e: f"DETERMINISTIC" -252 if e.name == "IMMUTABLE" -253 else "NOT DETERMINISTIC", -254 exp.RegexpLike: rename_func("REGEXP_CONTAINS"), -255 } -256 -257 TYPE_MAPPING = { -258 **generator.Generator.TYPE_MAPPING, # type: ignore -259 exp.DataType.Type.BIGINT: "INT64", -260 exp.DataType.Type.BOOLEAN: "BOOL", -261 exp.DataType.Type.CHAR: "STRING", -262 exp.DataType.Type.DECIMAL: "NUMERIC", -263 exp.DataType.Type.DOUBLE: "FLOAT64", -264 exp.DataType.Type.FLOAT: "FLOAT64", -265 exp.DataType.Type.INT: "INT64", -266 exp.DataType.Type.NCHAR: "STRING", -267 exp.DataType.Type.NVARCHAR: "STRING", -268 exp.DataType.Type.SMALLINT: "INT64", -269 exp.DataType.Type.TEXT: "STRING", -270 exp.DataType.Type.TINYINT: "INT64", -271 exp.DataType.Type.VARCHAR: "STRING", -272 } -273 PROPERTIES_LOCATION = { -274 **generator.Generator.PROPERTIES_LOCATION, # type: ignore -275 exp.PartitionedByProperty: exp.Properties.Location.POST_SCHEMA, -276 } -277 -278 EXPLICIT_UNION = True -279 LIMIT_FETCH = "LIMIT" -280 -281 def array_sql(self, expression: exp.Array) -> str: -282 first_arg = seq_get(expression.expressions, 0) -283 if isinstance(first_arg, exp.Subqueryable): -284 return f"ARRAY{self.wrap(self.sql(first_arg))}" -285 -286 return inline_array_sql(self, expression) -287 -288 def transaction_sql(self, *_) -> str: -289 return "BEGIN TRANSACTION" -290 -291 def commit_sql(self, *_) -> str: -292 return "COMMIT TRANSACTION" +217 LOG_BASE_FIRST = False +218 LOG_DEFAULTS_TO_LN = True +219 +220 class Generator(generator.Generator): +221 TRANSFORMS = { +222 **generator.Generator.TRANSFORMS, # type: ignore +223 **transforms.REMOVE_PRECISION_PARAMETERIZED_TYPES, # type: ignore +224 exp.ArraySize: rename_func("ARRAY_LENGTH"), +225 exp.AtTimeZone: lambda self, e: self.func( +226 "TIMESTAMP", self.func("DATETIME", e.this, e.args.get("zone")) +227 ), +228 exp.DateAdd: _date_add_sql("DATE", "ADD"), +229 exp.DateSub: _date_add_sql("DATE", "SUB"), +230 exp.DatetimeAdd: _date_add_sql("DATETIME", "ADD"), +231 exp.DatetimeSub: _date_add_sql("DATETIME", "SUB"), +232 exp.DateDiff: lambda self, e: f"DATE_DIFF({self.sql(e, 'this')}, {self.sql(e, 'expression')}, {self.sql(e.args.get('unit', 'DAY'))})", +233 exp.DateStrToDate: datestrtodate_sql, +234 exp.DateTrunc: lambda self, e: self.func("DATE_TRUNC", e.this, e.text("unit")), +235 exp.GroupConcat: rename_func("STRING_AGG"), +236 exp.ILike: no_ilike_sql, +237 exp.IntDiv: rename_func("DIV"), +238 exp.Max: max_or_greatest, +239 exp.Min: min_or_least, +240 exp.Select: transforms.preprocess( +241 [_unqualify_unnest], transforms.delegate("select_sql") +242 ), +243 exp.StrToTime: lambda self, e: f"PARSE_TIMESTAMP({self.format_time(e)}, {self.sql(e, 'this')})", +244 exp.TimeAdd: _date_add_sql("TIME", "ADD"), +245 exp.TimeSub: _date_add_sql("TIME", "SUB"), +246 exp.TimestampAdd: _date_add_sql("TIMESTAMP", "ADD"), +247 exp.TimestampSub: _date_add_sql("TIMESTAMP", "SUB"), +248 exp.TimeStrToTime: timestrtotime_sql, +249 exp.TsOrDsToDate: ts_or_ds_to_date_sql("bigquery"), +250 exp.TsOrDsAdd: _date_add_sql("DATE", "ADD"), +251 exp.PartitionedByProperty: lambda self, e: f"PARTITION BY {self.sql(e, 'this')}", +252 exp.VariancePop: rename_func("VAR_POP"), +253 exp.Values: _derived_table_values_to_unnest, +254 exp.ReturnsProperty: _returnsproperty_sql, +255 exp.Create: _create_sql, +256 exp.Trim: lambda self, e: self.func(f"TRIM", e.this, e.expression), +257 exp.VolatilityProperty: lambda self, e: f"DETERMINISTIC" +258 if e.name == "IMMUTABLE" +259 else "NOT DETERMINISTIC", +260 exp.RegexpLike: rename_func("REGEXP_CONTAINS"), +261 } +262 +263 TYPE_MAPPING = { +264 **generator.Generator.TYPE_MAPPING, # type: ignore +265 exp.DataType.Type.BIGINT: "INT64", +266 exp.DataType.Type.BOOLEAN: "BOOL", +267 exp.DataType.Type.CHAR: "STRING", +268 exp.DataType.Type.DECIMAL: "NUMERIC", +269 exp.DataType.Type.DOUBLE: "FLOAT64", +270 exp.DataType.Type.FLOAT: "FLOAT64", +271 exp.DataType.Type.INT: "INT64", +272 exp.DataType.Type.NCHAR: "STRING", +273 exp.DataType.Type.NVARCHAR: "STRING", +274 exp.DataType.Type.SMALLINT: "INT64", +275 exp.DataType.Type.TEXT: "STRING", +276 exp.DataType.Type.TINYINT: "INT64", +277 exp.DataType.Type.VARCHAR: "STRING", +278 } +279 PROPERTIES_LOCATION = { +280 **generator.Generator.PROPERTIES_LOCATION, # type: ignore +281 exp.PartitionedByProperty: exp.Properties.Location.POST_SCHEMA, +282 } +283 +284 EXPLICIT_UNION = True +285 LIMIT_FETCH = "LIMIT" +286 +287 def array_sql(self, expression: exp.Array) -> str: +288 first_arg = seq_get(expression.expressions, 0) +289 if isinstance(first_arg, exp.Subqueryable): +290 return f"ARRAY{self.wrap(self.sql(first_arg))}" +291 +292 return inline_array_sql(self, expression) 293 -294 def rollback_sql(self, *_) -> str: -295 return "ROLLBACK TRANSACTION" +294 def transaction_sql(self, *_) -> str: +295 return "BEGIN TRANSACTION" 296 -297 def in_unnest_op(self, expression: exp.Unnest) -> str: -298 return self.sql(expression) +297 def commit_sql(self, *_) -> str: +298 return "COMMIT TRANSACTION" 299 -300 def except_op(self, expression: exp.Except) -> str: -301 if not expression.args.get("distinct", False): -302 self.unsupported("EXCEPT without DISTINCT is not supported in BigQuery") -303 return f"EXCEPT{' DISTINCT' if expression.args.get('distinct') else ' ALL'}" -304 -305 def intersect_op(self, expression: exp.Intersect) -> str: -306 if not expression.args.get("distinct", False): -307 self.unsupported("INTERSECT without DISTINCT is not supported in BigQuery") -308 return f"INTERSECT{' DISTINCT' if expression.args.get('distinct') else ' ALL'}" +300 def rollback_sql(self, *_) -> str: +301 return "ROLLBACK TRANSACTION" +302 +303 def in_unnest_op(self, expression: exp.Unnest) -> str: +304 return self.sql(expression) +305 +306 def except_op(self, expression: exp.Except) -> str: +307 if not expression.args.get("distinct", False): +308 self.unsupported("EXCEPT without DISTINCT is not supported in BigQuery") +309 return f"EXCEPT{' DISTINCT' if expression.args.get('distinct') else ' ALL'}" +310 +311 def intersect_op(self, expression: exp.Intersect) -> str: +312 if not expression.args.get("distinct", False): +313 self.unsupported("INTERSECT without DISTINCT is not supported in BigQuery") +314 return f"INTERSECT{' DISTINCT' if expression.args.get('distinct') else ' ALL'}" @@ -752,6 +764,9 @@ 213 exp.VolatilityProperty, this=exp.Literal.string("VOLATILE") 214 ), 215 } +216 +217 LOG_BASE_FIRST = False +218 LOG_DEFAULTS_TO_LN = True @@ -808,98 +823,101 @@ Default: "nulls_are_small" -
217    class Generator(generator.Generator):
-218        TRANSFORMS = {
-219            **generator.Generator.TRANSFORMS,  # type: ignore
-220            **transforms.REMOVE_PRECISION_PARAMETERIZED_TYPES,  # type: ignore
-221            exp.ArraySize: rename_func("ARRAY_LENGTH"),
-222            exp.DateAdd: _date_add_sql("DATE", "ADD"),
-223            exp.DateSub: _date_add_sql("DATE", "SUB"),
-224            exp.DatetimeAdd: _date_add_sql("DATETIME", "ADD"),
-225            exp.DatetimeSub: _date_add_sql("DATETIME", "SUB"),
-226            exp.DateDiff: lambda self, e: f"DATE_DIFF({self.sql(e, 'this')}, {self.sql(e, 'expression')}, {self.sql(e.args.get('unit', 'DAY'))})",
-227            exp.DateStrToDate: datestrtodate_sql,
-228            exp.DateTrunc: lambda self, e: self.func("DATE_TRUNC", e.this, e.text("unit")),
-229            exp.GroupConcat: rename_func("STRING_AGG"),
-230            exp.ILike: no_ilike_sql,
-231            exp.IntDiv: rename_func("DIV"),
-232            exp.Max: max_or_greatest,
-233            exp.Min: min_or_least,
-234            exp.Select: transforms.preprocess(
-235                [_unqualify_unnest], transforms.delegate("select_sql")
-236            ),
-237            exp.StrToTime: lambda self, e: f"PARSE_TIMESTAMP({self.format_time(e)}, {self.sql(e, 'this')})",
-238            exp.TimeAdd: _date_add_sql("TIME", "ADD"),
-239            exp.TimeSub: _date_add_sql("TIME", "SUB"),
-240            exp.TimestampAdd: _date_add_sql("TIMESTAMP", "ADD"),
-241            exp.TimestampSub: _date_add_sql("TIMESTAMP", "SUB"),
-242            exp.TimeStrToTime: timestrtotime_sql,
-243            exp.TsOrDsToDate: ts_or_ds_to_date_sql("bigquery"),
-244            exp.TsOrDsAdd: _date_add_sql("DATE", "ADD"),
-245            exp.PartitionedByProperty: lambda self, e: f"PARTITION BY {self.sql(e, 'this')}",
-246            exp.VariancePop: rename_func("VAR_POP"),
-247            exp.Values: _derived_table_values_to_unnest,
-248            exp.ReturnsProperty: _returnsproperty_sql,
-249            exp.Create: _create_sql,
-250            exp.Trim: lambda self, e: self.func(f"TRIM", e.this, e.expression),
-251            exp.VolatilityProperty: lambda self, e: f"DETERMINISTIC"
-252            if e.name == "IMMUTABLE"
-253            else "NOT DETERMINISTIC",
-254            exp.RegexpLike: rename_func("REGEXP_CONTAINS"),
-255        }
-256
-257        TYPE_MAPPING = {
-258            **generator.Generator.TYPE_MAPPING,  # type: ignore
-259            exp.DataType.Type.BIGINT: "INT64",
-260            exp.DataType.Type.BOOLEAN: "BOOL",
-261            exp.DataType.Type.CHAR: "STRING",
-262            exp.DataType.Type.DECIMAL: "NUMERIC",
-263            exp.DataType.Type.DOUBLE: "FLOAT64",
-264            exp.DataType.Type.FLOAT: "FLOAT64",
-265            exp.DataType.Type.INT: "INT64",
-266            exp.DataType.Type.NCHAR: "STRING",
-267            exp.DataType.Type.NVARCHAR: "STRING",
-268            exp.DataType.Type.SMALLINT: "INT64",
-269            exp.DataType.Type.TEXT: "STRING",
-270            exp.DataType.Type.TINYINT: "INT64",
-271            exp.DataType.Type.VARCHAR: "STRING",
-272        }
-273        PROPERTIES_LOCATION = {
-274            **generator.Generator.PROPERTIES_LOCATION,  # type: ignore
-275            exp.PartitionedByProperty: exp.Properties.Location.POST_SCHEMA,
-276        }
-277
-278        EXPLICIT_UNION = True
-279        LIMIT_FETCH = "LIMIT"
-280
-281        def array_sql(self, expression: exp.Array) -> str:
-282            first_arg = seq_get(expression.expressions, 0)
-283            if isinstance(first_arg, exp.Subqueryable):
-284                return f"ARRAY{self.wrap(self.sql(first_arg))}"
-285
-286            return inline_array_sql(self, expression)
-287
-288        def transaction_sql(self, *_) -> str:
-289            return "BEGIN TRANSACTION"
-290
-291        def commit_sql(self, *_) -> str:
-292            return "COMMIT TRANSACTION"
+            
220    class Generator(generator.Generator):
+221        TRANSFORMS = {
+222            **generator.Generator.TRANSFORMS,  # type: ignore
+223            **transforms.REMOVE_PRECISION_PARAMETERIZED_TYPES,  # type: ignore
+224            exp.ArraySize: rename_func("ARRAY_LENGTH"),
+225            exp.AtTimeZone: lambda self, e: self.func(
+226                "TIMESTAMP", self.func("DATETIME", e.this, e.args.get("zone"))
+227            ),
+228            exp.DateAdd: _date_add_sql("DATE", "ADD"),
+229            exp.DateSub: _date_add_sql("DATE", "SUB"),
+230            exp.DatetimeAdd: _date_add_sql("DATETIME", "ADD"),
+231            exp.DatetimeSub: _date_add_sql("DATETIME", "SUB"),
+232            exp.DateDiff: lambda self, e: f"DATE_DIFF({self.sql(e, 'this')}, {self.sql(e, 'expression')}, {self.sql(e.args.get('unit', 'DAY'))})",
+233            exp.DateStrToDate: datestrtodate_sql,
+234            exp.DateTrunc: lambda self, e: self.func("DATE_TRUNC", e.this, e.text("unit")),
+235            exp.GroupConcat: rename_func("STRING_AGG"),
+236            exp.ILike: no_ilike_sql,
+237            exp.IntDiv: rename_func("DIV"),
+238            exp.Max: max_or_greatest,
+239            exp.Min: min_or_least,
+240            exp.Select: transforms.preprocess(
+241                [_unqualify_unnest], transforms.delegate("select_sql")
+242            ),
+243            exp.StrToTime: lambda self, e: f"PARSE_TIMESTAMP({self.format_time(e)}, {self.sql(e, 'this')})",
+244            exp.TimeAdd: _date_add_sql("TIME", "ADD"),
+245            exp.TimeSub: _date_add_sql("TIME", "SUB"),
+246            exp.TimestampAdd: _date_add_sql("TIMESTAMP", "ADD"),
+247            exp.TimestampSub: _date_add_sql("TIMESTAMP", "SUB"),
+248            exp.TimeStrToTime: timestrtotime_sql,
+249            exp.TsOrDsToDate: ts_or_ds_to_date_sql("bigquery"),
+250            exp.TsOrDsAdd: _date_add_sql("DATE", "ADD"),
+251            exp.PartitionedByProperty: lambda self, e: f"PARTITION BY {self.sql(e, 'this')}",
+252            exp.VariancePop: rename_func("VAR_POP"),
+253            exp.Values: _derived_table_values_to_unnest,
+254            exp.ReturnsProperty: _returnsproperty_sql,
+255            exp.Create: _create_sql,
+256            exp.Trim: lambda self, e: self.func(f"TRIM", e.this, e.expression),
+257            exp.VolatilityProperty: lambda self, e: f"DETERMINISTIC"
+258            if e.name == "IMMUTABLE"
+259            else "NOT DETERMINISTIC",
+260            exp.RegexpLike: rename_func("REGEXP_CONTAINS"),
+261        }
+262
+263        TYPE_MAPPING = {
+264            **generator.Generator.TYPE_MAPPING,  # type: ignore
+265            exp.DataType.Type.BIGINT: "INT64",
+266            exp.DataType.Type.BOOLEAN: "BOOL",
+267            exp.DataType.Type.CHAR: "STRING",
+268            exp.DataType.Type.DECIMAL: "NUMERIC",
+269            exp.DataType.Type.DOUBLE: "FLOAT64",
+270            exp.DataType.Type.FLOAT: "FLOAT64",
+271            exp.DataType.Type.INT: "INT64",
+272            exp.DataType.Type.NCHAR: "STRING",
+273            exp.DataType.Type.NVARCHAR: "STRING",
+274            exp.DataType.Type.SMALLINT: "INT64",
+275            exp.DataType.Type.TEXT: "STRING",
+276            exp.DataType.Type.TINYINT: "INT64",
+277            exp.DataType.Type.VARCHAR: "STRING",
+278        }
+279        PROPERTIES_LOCATION = {
+280            **generator.Generator.PROPERTIES_LOCATION,  # type: ignore
+281            exp.PartitionedByProperty: exp.Properties.Location.POST_SCHEMA,
+282        }
+283
+284        EXPLICIT_UNION = True
+285        LIMIT_FETCH = "LIMIT"
+286
+287        def array_sql(self, expression: exp.Array) -> str:
+288            first_arg = seq_get(expression.expressions, 0)
+289            if isinstance(first_arg, exp.Subqueryable):
+290                return f"ARRAY{self.wrap(self.sql(first_arg))}"
+291
+292            return inline_array_sql(self, expression)
 293
-294        def rollback_sql(self, *_) -> str:
-295            return "ROLLBACK TRANSACTION"
+294        def transaction_sql(self, *_) -> str:
+295            return "BEGIN TRANSACTION"
 296
-297        def in_unnest_op(self, expression: exp.Unnest) -> str:
-298            return self.sql(expression)
+297        def commit_sql(self, *_) -> str:
+298            return "COMMIT TRANSACTION"
 299
-300        def except_op(self, expression: exp.Except) -> str:
-301            if not expression.args.get("distinct", False):
-302                self.unsupported("EXCEPT without DISTINCT is not supported in BigQuery")
-303            return f"EXCEPT{' DISTINCT' if expression.args.get('distinct') else ' ALL'}"
-304
-305        def intersect_op(self, expression: exp.Intersect) -> str:
-306            if not expression.args.get("distinct", False):
-307                self.unsupported("INTERSECT without DISTINCT is not supported in BigQuery")
-308            return f"INTERSECT{' DISTINCT' if expression.args.get('distinct') else ' ALL'}"
+300        def rollback_sql(self, *_) -> str:
+301            return "ROLLBACK TRANSACTION"
+302
+303        def in_unnest_op(self, expression: exp.Unnest) -> str:
+304            return self.sql(expression)
+305
+306        def except_op(self, expression: exp.Except) -> str:
+307            if not expression.args.get("distinct", False):
+308                self.unsupported("EXCEPT without DISTINCT is not supported in BigQuery")
+309            return f"EXCEPT{' DISTINCT' if expression.args.get('distinct') else ' ALL'}"
+310
+311        def intersect_op(self, expression: exp.Intersect) -> str:
+312            if not expression.args.get("distinct", False):
+313                self.unsupported("INTERSECT without DISTINCT is not supported in BigQuery")
+314            return f"INTERSECT{' DISTINCT' if expression.args.get('distinct') else ' ALL'}"
 
@@ -958,12 +976,12 @@ Default: True
-
281        def array_sql(self, expression: exp.Array) -> str:
-282            first_arg = seq_get(expression.expressions, 0)
-283            if isinstance(first_arg, exp.Subqueryable):
-284                return f"ARRAY{self.wrap(self.sql(first_arg))}"
-285
-286            return inline_array_sql(self, expression)
+            
287        def array_sql(self, expression: exp.Array) -> str:
+288            first_arg = seq_get(expression.expressions, 0)
+289            if isinstance(first_arg, exp.Subqueryable):
+290                return f"ARRAY{self.wrap(self.sql(first_arg))}"
+291
+292            return inline_array_sql(self, expression)
 
@@ -981,8 +999,8 @@ Default: True
-
288        def transaction_sql(self, *_) -> str:
-289            return "BEGIN TRANSACTION"
+            
294        def transaction_sql(self, *_) -> str:
+295            return "BEGIN TRANSACTION"
 
@@ -1000,8 +1018,8 @@ Default: True
-
291        def commit_sql(self, *_) -> str:
-292            return "COMMIT TRANSACTION"
+            
297        def commit_sql(self, *_) -> str:
+298            return "COMMIT TRANSACTION"
 
@@ -1019,8 +1037,8 @@ Default: True
-
294        def rollback_sql(self, *_) -> str:
-295            return "ROLLBACK TRANSACTION"
+            
300        def rollback_sql(self, *_) -> str:
+301            return "ROLLBACK TRANSACTION"
 
@@ -1038,8 +1056,8 @@ Default: True
-
297        def in_unnest_op(self, expression: exp.Unnest) -> str:
-298            return self.sql(expression)
+            
303        def in_unnest_op(self, expression: exp.Unnest) -> str:
+304            return self.sql(expression)
 
@@ -1057,10 +1075,10 @@ Default: True
-
300        def except_op(self, expression: exp.Except) -> str:
-301            if not expression.args.get("distinct", False):
-302                self.unsupported("EXCEPT without DISTINCT is not supported in BigQuery")
-303            return f"EXCEPT{' DISTINCT' if expression.args.get('distinct') else ' ALL'}"
+            
306        def except_op(self, expression: exp.Except) -> str:
+307            if not expression.args.get("distinct", False):
+308                self.unsupported("EXCEPT without DISTINCT is not supported in BigQuery")
+309            return f"EXCEPT{' DISTINCT' if expression.args.get('distinct') else ' ALL'}"
 
@@ -1078,10 +1096,10 @@ Default: True
-
305        def intersect_op(self, expression: exp.Intersect) -> str:
-306            if not expression.args.get("distinct", False):
-307                self.unsupported("INTERSECT without DISTINCT is not supported in BigQuery")
-308            return f"INTERSECT{' DISTINCT' if expression.args.get('distinct') else ' ALL'}"
+            
311        def intersect_op(self, expression: exp.Intersect) -> str:
+312            if not expression.args.get("distinct", False):
+313                self.unsupported("INTERSECT without DISTINCT is not supported in BigQuery")
+314            return f"INTERSECT{' DISTINCT' if expression.args.get('distinct') else ' ALL'}"
 
@@ -1108,6 +1126,7 @@ Default: True
cache_sql
characterset_sql
column_sql
+
columnposition_sql
columndef_sql
columnconstraint_sql
autoincrementcolumnconstraint_sql
@@ -1223,6 +1242,7 @@ Default: True
primarykey_sql
unique_sql
if_sql
+
matchagainst_sql
jsonkeyvalue_sql
jsonobject_sql
in_sql
@@ -1270,8 +1290,10 @@ Default: True
gt_sql
gte_sql
ilike_sql
+
ilikeany_sql
is_sql
like_sql
+
likeany_sql
similarto_sql
lt_sql
lte_sql
-- cgit v1.2.3