summaryrefslogtreecommitdiffstats
path: root/third_party/rust/time-macros
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-19 00:47:55 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-19 00:47:55 +0000
commit26a029d407be480d791972afb5975cf62c9360a6 (patch)
treef435a8308119effd964b339f76abb83a57c29483 /third_party/rust/time-macros
parentInitial commit. (diff)
downloadfirefox-26a029d407be480d791972afb5975cf62c9360a6.tar.xz
firefox-26a029d407be480d791972afb5975cf62c9360a6.zip
Adding upstream version 124.0.1.upstream/124.0.1
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'third_party/rust/time-macros')
-rw-r--r--third_party/rust/time-macros/.cargo-checksum.json1
-rw-r--r--third_party/rust/time-macros/Cargo.toml45
-rw-r--r--third_party/rust/time-macros/LICENSE-Apache202
-rw-r--r--third_party/rust/time-macros/LICENSE-MIT19
-rw-r--r--third_party/rust/time-macros/src/date.rs137
-rw-r--r--third_party/rust/time-macros/src/datetime.rs57
-rw-r--r--third_party/rust/time-macros/src/error.rs123
-rw-r--r--third_party/rust/time-macros/src/format_description/ast.rs253
-rw-r--r--third_party/rust/time-macros/src/format_description/format_item.rs442
-rw-r--r--third_party/rust/time-macros/src/format_description/lexer.rs248
-rw-r--r--third_party/rust/time-macros/src/format_description/mod.rs171
-rw-r--r--third_party/rust/time-macros/src/format_description/public/component.rs49
-rw-r--r--third_party/rust/time-macros/src/format_description/public/mod.rs54
-rw-r--r--third_party/rust/time-macros/src/format_description/public/modifier.rs247
-rw-r--r--third_party/rust/time-macros/src/helpers/mod.rs127
-rw-r--r--third_party/rust/time-macros/src/helpers/string.rs188
-rw-r--r--third_party/rust/time-macros/src/lib.rs277
-rw-r--r--third_party/rust/time-macros/src/offset.rs96
-rw-r--r--third_party/rust/time-macros/src/quote.rs139
-rw-r--r--third_party/rust/time-macros/src/serde_format_description.rs172
-rw-r--r--third_party/rust/time-macros/src/time.rs119
-rw-r--r--third_party/rust/time-macros/src/to_tokens.rs78
22 files changed, 3244 insertions, 0 deletions
diff --git a/third_party/rust/time-macros/.cargo-checksum.json b/third_party/rust/time-macros/.cargo-checksum.json
new file mode 100644
index 0000000000..6f3847898a
--- /dev/null
+++ b/third_party/rust/time-macros/.cargo-checksum.json
@@ -0,0 +1 @@
+{"files":{"Cargo.toml":"97dbc36d7e8c8658e151c1cfe57397a116135a0d0efc97aacd339142da5d1c96","LICENSE-Apache":"b8929fea28678da67251fb2daf9438f67503814211051861612441806d8edb05","LICENSE-MIT":"04620bf27e4a643dd47bf27652320c205acdb776c1f9f24bb8c3bfaba10804c5","src/date.rs":"ffcd3d0998ec67abb43a3f8eccc6104172f5061b855312b89d53bb82fece2460","src/datetime.rs":"5c7f6e07dc2f0dcfcd86216664df53bc008dbc86f346df57a9ff57f52fe43bc6","src/error.rs":"b3dea92631092068dd73e57e1cbf548f7ae85762826dcdea7fd6454bf357a50a","src/format_description/ast.rs":"8ba87e3249766b89c42b040f623d3134aeec46b78208fdfee825ed0eeeb4591a","src/format_description/format_item.rs":"03ff10699383e5ad08fe690199d45288f13363337abbc811a70b03a8b1703ab1","src/format_description/lexer.rs":"e7db7b6431f00c81b8d15a162088a1622ecd65bfb58d4e642c3c93a8dd5ae4ad","src/format_description/mod.rs":"f48c0ff590bc74529f06a98f60a6af5814bc30d1456bf0b81ac334c0b3f41bba","src/format_description/public/component.rs":"e2c2c8a189e2eb9f9354ff1d9d8edeafa34303e91dc58457df373e7e61c38b78","src/format_description/public/mod.rs":"5260592b310ea9e30808d30c92ea94c7bf1bdb171250a1342279e927d2528d73","src/format_description/public/modifier.rs":"37661e1f7cd9fd11a82f5a1ce6d5971686afa91e6feebc7b9d32df297e8b667f","src/helpers/mod.rs":"a8f8ed59a72b239d7a530357d212873f2e75ea924ec19a6d5d6e24a2baa8100c","src/helpers/string.rs":"3af2d0c701ca978c705922a272e76506dbdf0f376d44ed9ae7283086c67852ba","src/lib.rs":"200678edc14d5920abc0516717b8e010667e58da8bdc65c1cb583fdde0353089","src/offset.rs":"4b9c001a954c1f121a572f5675073f7a4e46d00cc9eb77736bfea2df94ffd05b","src/quote.rs":"634a12b95236e4ab2b8ab70a1a4a2629113c3ce3cf6defefc7ffeb81544c1d89","src/serde_format_description.rs":"db5fb2dc94e01c5114cab3484e68334516d53c4642f31dae0d66f1183253a17c","src/time.rs":"d762e8f22f749d9546d5d2a78b8a0380510be27b4cd3fed375695d7982d8396e","src/to_tokens.rs":"6636ea489c7484bad9b39f72d6956a04c95ce82d8462b12079cc03db778fd263"},"package":"96ba15a897f3c86766b757e5ac7221554c6750054d74d5b28844fce5fb36a6c4"} \ No newline at end of file
diff --git a/third_party/rust/time-macros/Cargo.toml b/third_party/rust/time-macros/Cargo.toml
new file mode 100644
index 0000000000..1c86d657d5
--- /dev/null
+++ b/third_party/rust/time-macros/Cargo.toml
@@ -0,0 +1,45 @@
+# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
+#
+# When uploading crates to the registry Cargo will automatically
+# "normalize" Cargo.toml files for maximal compatibility
+# with all versions of Cargo and also rewrite `path` dependencies
+# to registry (e.g., crates.io) dependencies.
+#
+# If you are reading this file be aware that the original Cargo.toml
+# will likely look very different (and much more reasonable).
+# See Cargo.toml.orig for the original contents.
+
+[package]
+edition = "2021"
+rust-version = "1.65.0"
+name = "time-macros"
+version = "0.2.10"
+authors = [
+ "Jacob Pratt <open-source@jhpratt.dev>",
+ "Time contributors",
+]
+description = """
+ Procedural macros for the time crate.
+ This crate is an implementation detail and should not be relied upon directly.
+"""
+keywords = [
+ "date",
+ "time",
+ "calendar",
+ "duration",
+]
+categories = ["date-and-time"]
+license = "MIT OR Apache-2.0"
+repository = "https://github.com/time-rs/time"
+
+[lib]
+proc-macro = true
+
+[dependencies.time-core]
+version = "=0.1.1"
+
+[features]
+formatting = []
+large-dates = []
+parsing = []
+serde = []
diff --git a/third_party/rust/time-macros/LICENSE-Apache b/third_party/rust/time-macros/LICENSE-Apache
new file mode 100644
index 0000000000..7646f21e37
--- /dev/null
+++ b/third_party/rust/time-macros/LICENSE-Apache
@@ -0,0 +1,202 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright 2022 Jacob Pratt et al.
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/third_party/rust/time-macros/LICENSE-MIT b/third_party/rust/time-macros/LICENSE-MIT
new file mode 100644
index 0000000000..a11a755732
--- /dev/null
+++ b/third_party/rust/time-macros/LICENSE-MIT
@@ -0,0 +1,19 @@
+Copyright (c) 2022 Jacob Pratt et al.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/third_party/rust/time-macros/src/date.rs b/third_party/rust/time-macros/src/date.rs
new file mode 100644
index 0000000000..574ef8ce6f
--- /dev/null
+++ b/third_party/rust/time-macros/src/date.rs
@@ -0,0 +1,137 @@
+use std::iter::Peekable;
+
+use proc_macro::{token_stream, TokenTree};
+use time_core::util::{days_in_year, weeks_in_year};
+
+use crate::helpers::{
+ consume_any_ident, consume_number, consume_punct, days_in_year_month, ymd_to_yo, ywd_to_yo,
+};
+use crate::to_tokens::ToTokenTree;
+use crate::Error;
+
+#[cfg(feature = "large-dates")]
+const MAX_YEAR: i32 = 999_999;
+#[cfg(not(feature = "large-dates"))]
+const MAX_YEAR: i32 = 9_999;
+
+pub(crate) struct Date {
+ pub(crate) year: i32,
+ pub(crate) ordinal: u16,
+}
+
+pub(crate) fn parse(chars: &mut Peekable<token_stream::IntoIter>) -> Result<Date, Error> {
+ let (year_sign_span, year_sign, explicit_sign) = if let Ok(span) = consume_punct('-', chars) {
+ (Some(span), -1, true)
+ } else if let Ok(span) = consume_punct('+', chars) {
+ (Some(span), 1, true)
+ } else {
+ (None, 1, false)
+ };
+ let (year_span, mut year) = consume_number::<i32>("year", chars)?;
+ year *= year_sign;
+ if year.abs() > MAX_YEAR {
+ return Err(Error::InvalidComponent {
+ name: "year",
+ value: year.to_string(),
+ span_start: Some(year_sign_span.unwrap_or(year_span)),
+ span_end: Some(year_span),
+ });
+ }
+ if !explicit_sign && year.abs() >= 10_000 {
+ return Err(Error::Custom {
+ message: "years with more than four digits must have an explicit sign".into(),
+ span_start: Some(year_sign_span.unwrap_or(year_span)),
+ span_end: Some(year_span),
+ });
+ }
+
+ consume_punct('-', chars)?;
+
+ // year-week-day
+ if let Ok(w_span) = consume_any_ident(&["W"], chars) {
+ let (week_span, week) = consume_number::<u8>("week", chars)?;
+ consume_punct('-', chars)?;
+ let (day_span, day) = consume_number::<u8>("day", chars)?;
+
+ if week > weeks_in_year(year) {
+ return Err(Error::InvalidComponent {
+ name: "week",
+ value: week.to_string(),
+ span_start: Some(w_span),
+ span_end: Some(week_span),
+ });
+ }
+ if day == 0 || day > 7 {
+ return Err(Error::InvalidComponent {
+ name: "day",
+ value: day.to_string(),
+ span_start: Some(day_span),
+ span_end: Some(day_span),
+ });
+ }
+
+ let (year, ordinal) = ywd_to_yo(year, week, day);
+
+ return Ok(Date { year, ordinal });
+ }
+
+ // We don't yet know whether it's year-month-day or year-ordinal.
+ let (month_or_ordinal_span, month_or_ordinal) =
+ consume_number::<u16>("month or ordinal", chars)?;
+
+ // year-month-day
+ #[allow(clippy::branches_sharing_code)] // clarity
+ if consume_punct('-', chars).is_ok() {
+ let (month_span, month) = (month_or_ordinal_span, month_or_ordinal);
+ let (day_span, day) = consume_number::<u8>("day", chars)?;
+
+ if month == 0 || month > 12 {
+ return Err(Error::InvalidComponent {
+ name: "month",
+ value: month.to_string(),
+ span_start: Some(month_span),
+ span_end: Some(month_span),
+ });
+ }
+ let month = month as _;
+ if day == 0 || day > days_in_year_month(year, month) {
+ return Err(Error::InvalidComponent {
+ name: "day",
+ value: day.to_string(),
+ span_start: Some(day_span),
+ span_end: Some(day_span),
+ });
+ }
+
+ let (year, ordinal) = ymd_to_yo(year, month, day);
+
+ Ok(Date { year, ordinal })
+ }
+ // year-ordinal
+ else {
+ let (ordinal_span, ordinal) = (month_or_ordinal_span, month_or_ordinal);
+
+ if ordinal == 0 || ordinal > days_in_year(year) {
+ return Err(Error::InvalidComponent {
+ name: "ordinal",
+ value: ordinal.to_string(),
+ span_start: Some(ordinal_span),
+ span_end: Some(ordinal_span),
+ });
+ }
+
+ Ok(Date { year, ordinal })
+ }
+}
+
+impl ToTokenTree for Date {
+ fn into_token_tree(self) -> TokenTree {
+ quote_group! {{
+ const DATE: ::time::Date = ::time::Date::__from_ordinal_date_unchecked(
+ #(self.year),
+ #(self.ordinal),
+ );
+ DATE
+ }}
+ }
+}
diff --git a/third_party/rust/time-macros/src/datetime.rs b/third_party/rust/time-macros/src/datetime.rs
new file mode 100644
index 0000000000..2d41e9a532
--- /dev/null
+++ b/third_party/rust/time-macros/src/datetime.rs
@@ -0,0 +1,57 @@
+use std::iter::Peekable;
+
+use proc_macro::{token_stream, Ident, Span, TokenTree};
+
+use crate::date::Date;
+use crate::error::Error;
+use crate::offset::Offset;
+use crate::time::Time;
+use crate::to_tokens::ToTokenTree;
+use crate::{date, offset, time};
+
+pub(crate) struct DateTime {
+ date: Date,
+ time: Time,
+ offset: Option<Offset>,
+}
+
+pub(crate) fn parse(chars: &mut Peekable<token_stream::IntoIter>) -> Result<DateTime, Error> {
+ let date = date::parse(chars)?;
+ let time = time::parse(chars)?;
+ let offset = match offset::parse(chars) {
+ Ok(offset) => Some(offset),
+ Err(Error::UnexpectedEndOfInput | Error::MissingComponent { name: "sign", .. }) => None,
+ Err(err) => return Err(err),
+ };
+
+ if let Some(token) = chars.peek() {
+ return Err(Error::UnexpectedToken {
+ tree: token.clone(),
+ });
+ }
+
+ Ok(DateTime { date, time, offset })
+}
+
+impl ToTokenTree for DateTime {
+ fn into_token_tree(self) -> TokenTree {
+ let (type_name, maybe_offset) = match self.offset {
+ Some(offset) => (
+ Ident::new("OffsetDateTime", Span::mixed_site()),
+ quote!(.assume_offset(#(offset))),
+ ),
+ None => (
+ Ident::new("PrimitiveDateTime", Span::mixed_site()),
+ quote!(),
+ ),
+ };
+
+ quote_group! {{
+ const DATE_TIME: ::time::#(type_name) = ::time::PrimitiveDateTime::new(
+ #(self.date),
+ #(self.time),
+ ) #S(maybe_offset);
+ DATE_TIME
+ }}
+ }
+}
diff --git a/third_party/rust/time-macros/src/error.rs b/third_party/rust/time-macros/src/error.rs
new file mode 100644
index 0000000000..849317f15f
--- /dev/null
+++ b/third_party/rust/time-macros/src/error.rs
@@ -0,0 +1,123 @@
+use std::borrow::Cow;
+use std::fmt;
+
+use proc_macro::{Delimiter, Group, Ident, Literal, Punct, Spacing, Span, TokenStream, TokenTree};
+
+trait WithSpan {
+ fn with_span(self, span: Span) -> Self;
+}
+
+impl WithSpan for TokenTree {
+ fn with_span(mut self, span: Span) -> Self {
+ self.set_span(span);
+ self
+ }
+}
+
+pub(crate) enum Error {
+ MissingComponent {
+ name: &'static str,
+ span_start: Option<Span>,
+ span_end: Option<Span>,
+ },
+ InvalidComponent {
+ name: &'static str,
+ value: String,
+ span_start: Option<Span>,
+ span_end: Option<Span>,
+ },
+ #[cfg(any(feature = "formatting", feature = "parsing"))]
+ ExpectedString {
+ span_start: Option<Span>,
+ span_end: Option<Span>,
+ },
+ UnexpectedToken {
+ tree: TokenTree,
+ },
+ UnexpectedEndOfInput,
+ Custom {
+ message: Cow<'static, str>,
+ span_start: Option<Span>,
+ span_end: Option<Span>,
+ },
+}
+
+impl fmt::Display for Error {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ Self::MissingComponent { name, .. } => write!(f, "missing component: {name}"),
+ Self::InvalidComponent { name, value, .. } => {
+ write!(f, "invalid component: {name} was {value}")
+ }
+ #[cfg(any(feature = "formatting", feature = "parsing"))]
+ Self::ExpectedString { .. } => f.write_str("expected string literal"),
+ Self::UnexpectedToken { tree } => write!(f, "unexpected token: {tree}"),
+ Self::UnexpectedEndOfInput => f.write_str("unexpected end of input"),
+ Self::Custom { message, .. } => f.write_str(message),
+ }
+ }
+}
+
+impl Error {
+ fn span_start(&self) -> Span {
+ match self {
+ Self::MissingComponent { span_start, .. }
+ | Self::InvalidComponent { span_start, .. }
+ | Self::Custom { span_start, .. } => *span_start,
+ #[cfg(any(feature = "formatting", feature = "parsing"))]
+ Self::ExpectedString { span_start, .. } => *span_start,
+ Self::UnexpectedToken { tree } => Some(tree.span()),
+ Self::UnexpectedEndOfInput => Some(Span::mixed_site()),
+ }
+ .unwrap_or_else(Span::mixed_site)
+ }
+
+ fn span_end(&self) -> Span {
+ match self {
+ Self::MissingComponent { span_end, .. }
+ | Self::InvalidComponent { span_end, .. }
+ | Self::Custom { span_end, .. } => *span_end,
+ #[cfg(any(feature = "formatting", feature = "parsing"))]
+ Self::ExpectedString { span_end, .. } => *span_end,
+ Self::UnexpectedToken { tree, .. } => Some(tree.span()),
+ Self::UnexpectedEndOfInput => Some(Span::mixed_site()),
+ }
+ .unwrap_or_else(|| self.span_start())
+ }
+
+ pub(crate) fn to_compile_error(&self) -> TokenStream {
+ let (start, end) = (self.span_start(), self.span_end());
+
+ [
+ TokenTree::from(Punct::new(':', Spacing::Joint)).with_span(start),
+ TokenTree::from(Punct::new(':', Spacing::Alone)).with_span(start),
+ TokenTree::from(Ident::new("core", start)),
+ TokenTree::from(Punct::new(':', Spacing::Joint)).with_span(start),
+ TokenTree::from(Punct::new(':', Spacing::Alone)).with_span(start),
+ TokenTree::from(Ident::new("compile_error", start)),
+ TokenTree::from(Punct::new('!', Spacing::Alone)).with_span(start),
+ TokenTree::from(Group::new(
+ Delimiter::Parenthesis,
+ TokenStream::from(
+ TokenTree::from(Literal::string(&self.to_string())).with_span(end),
+ ),
+ ))
+ .with_span(end),
+ ]
+ .iter()
+ .cloned()
+ .collect()
+ }
+
+ /// Like `to_compile_error`, but for use in macros that produce items.
+ #[cfg(all(feature = "serde", any(feature = "formatting", feature = "parsing")))]
+ pub(crate) fn to_compile_error_standalone(&self) -> TokenStream {
+ let end = self.span_end();
+ self.to_compile_error()
+ .into_iter()
+ .chain(std::iter::once(
+ TokenTree::from(Punct::new(';', Spacing::Alone)).with_span(end),
+ ))
+ .collect()
+ }
+}
diff --git a/third_party/rust/time-macros/src/format_description/ast.rs b/third_party/rust/time-macros/src/format_description/ast.rs
new file mode 100644
index 0000000000..b75056bc2f
--- /dev/null
+++ b/third_party/rust/time-macros/src/format_description/ast.rs
@@ -0,0 +1,253 @@
+use std::boxed::Box;
+use std::iter;
+
+use super::{lexer, unused, Error, Location, Spanned, SpannedValue, Unused};
+
+pub(super) enum Item<'a> {
+ Literal(Spanned<&'a [u8]>),
+ EscapedBracket {
+ _first: Unused<Location>,
+ _second: Unused<Location>,
+ },
+ Component {
+ _opening_bracket: Unused<Location>,
+ _leading_whitespace: Unused<Option<Spanned<&'a [u8]>>>,
+ name: Spanned<&'a [u8]>,
+ modifiers: Box<[Modifier<'a>]>,
+ _trailing_whitespace: Unused<Option<Spanned<&'a [u8]>>>,
+ _closing_bracket: Unused<Location>,
+ },
+ Optional {
+ opening_bracket: Location,
+ _leading_whitespace: Unused<Option<Spanned<&'a [u8]>>>,
+ _optional_kw: Unused<Spanned<&'a [u8]>>,
+ _whitespace: Unused<Spanned<&'a [u8]>>,
+ nested_format_description: NestedFormatDescription<'a>,
+ closing_bracket: Location,
+ },
+ First {
+ opening_bracket: Location,
+ _leading_whitespace: Unused<Option<Spanned<&'a [u8]>>>,
+ _first_kw: Unused<Spanned<&'a [u8]>>,
+ _whitespace: Unused<Spanned<&'a [u8]>>,
+ nested_format_descriptions: Box<[NestedFormatDescription<'a>]>,
+ closing_bracket: Location,
+ },
+}
+
+pub(super) struct NestedFormatDescription<'a> {
+ pub(super) _opening_bracket: Unused<Location>,
+ pub(super) items: Box<[Item<'a>]>,
+ pub(super) _closing_bracket: Unused<Location>,
+ pub(super) _trailing_whitespace: Unused<Option<Spanned<&'a [u8]>>>,
+}
+
+pub(super) struct Modifier<'a> {
+ pub(super) _leading_whitespace: Unused<Spanned<&'a [u8]>>,
+ pub(super) key: Spanned<&'a [u8]>,
+ pub(super) _colon: Unused<Location>,
+ pub(super) value: Spanned<&'a [u8]>,
+}
+
+pub(super) fn parse<
+ 'item: 'iter,
+ 'iter,
+ I: Iterator<Item = Result<lexer::Token<'item>, Error>>,
+ const VERSION: u8,
+>(
+ tokens: &'iter mut lexer::Lexed<I>,
+) -> impl Iterator<Item = Result<Item<'item>, Error>> + 'iter {
+ assert!(version!(1..=2));
+ parse_inner::<_, false, VERSION>(tokens)
+}
+
+fn parse_inner<
+ 'item,
+ I: Iterator<Item = Result<lexer::Token<'item>, Error>>,
+ const NESTED: bool,
+ const VERSION: u8,
+>(
+ tokens: &mut lexer::Lexed<I>,
+) -> impl Iterator<Item = Result<Item<'item>, Error>> + '_ {
+ iter::from_fn(move || {
+ if NESTED && tokens.peek_closing_bracket().is_some() {
+ return None;
+ }
+
+ let next = match tokens.next()? {
+ Ok(token) => token,
+ Err(err) => return Some(Err(err)),
+ };
+
+ Some(match next {
+ lexer::Token::Literal(Spanned { value: _, span: _ }) if NESTED => {
+ bug!("literal should not be present in nested description")
+ }
+ lexer::Token::Literal(value) => Ok(Item::Literal(value)),
+ lexer::Token::Bracket {
+ kind: lexer::BracketKind::Opening,
+ location,
+ } => {
+ if version!(..=1) {
+ if let Some(second_location) = tokens.next_if_opening_bracket() {
+ Ok(Item::EscapedBracket {
+ _first: unused(location),
+ _second: unused(second_location),
+ })
+ } else {
+ parse_component::<_, VERSION>(location, tokens)
+ }
+ } else {
+ parse_component::<_, VERSION>(location, tokens)
+ }
+ }
+ lexer::Token::Bracket {
+ kind: lexer::BracketKind::Closing,
+ location: _,
+ } if NESTED => {
+ bug!("closing bracket should be caught by the `if` statement")
+ }
+ lexer::Token::Bracket {
+ kind: lexer::BracketKind::Closing,
+ location: _,
+ } => {
+ bug!("closing bracket should have been consumed by `parse_component`")
+ }
+ lexer::Token::ComponentPart { kind: _, value } if NESTED => Ok(Item::Literal(value)),
+ lexer::Token::ComponentPart { kind: _, value: _ } => {
+ bug!("component part should have been consumed by `parse_component`")
+ }
+ })
+ })
+}
+
+fn parse_component<'a, I: Iterator<Item = Result<lexer::Token<'a>, Error>>, const VERSION: u8>(
+ opening_bracket: Location,
+ tokens: &mut lexer::Lexed<I>,
+) -> Result<Item<'a>, Error> {
+ let leading_whitespace = tokens.next_if_whitespace();
+
+ let Some(name) = tokens.next_if_not_whitespace() else {
+ let span = match leading_whitespace {
+ Some(Spanned { value: _, span }) => span,
+ None => opening_bracket.to(opening_bracket),
+ };
+ return Err(span.error("expected component name"));
+ };
+
+ if *name == b"optional" {
+ let Some(whitespace) = tokens.next_if_whitespace() else {
+ return Err(name.span.error("expected whitespace after `optional`"));
+ };
+
+ let nested = parse_nested::<_, VERSION>(whitespace.span.end, tokens)?;
+
+ let Some(closing_bracket) = tokens.next_if_closing_bracket() else {
+ return Err(opening_bracket.error("unclosed bracket"));
+ };
+
+ return Ok(Item::Optional {
+ opening_bracket,
+ _leading_whitespace: unused(leading_whitespace),
+ _optional_kw: unused(name),
+ _whitespace: unused(whitespace),
+ nested_format_description: nested,
+ closing_bracket,
+ });
+ }
+
+ if *name == b"first" {
+ let Some(whitespace) = tokens.next_if_whitespace() else {
+ return Err(name.span.error("expected whitespace after `first`"));
+ };
+
+ let mut nested_format_descriptions = Vec::new();
+ while let Ok(description) = parse_nested::<_, VERSION>(whitespace.span.end, tokens) {
+ nested_format_descriptions.push(description);
+ }
+
+ let Some(closing_bracket) = tokens.next_if_closing_bracket() else {
+ return Err(opening_bracket.error("unclosed bracket"));
+ };
+
+ return Ok(Item::First {
+ opening_bracket,
+ _leading_whitespace: unused(leading_whitespace),
+ _first_kw: unused(name),
+ _whitespace: unused(whitespace),
+ nested_format_descriptions: nested_format_descriptions.into_boxed_slice(),
+ closing_bracket,
+ });
+ }
+
+ let mut modifiers = Vec::new();
+ let trailing_whitespace = loop {
+ let Some(whitespace) = tokens.next_if_whitespace() else {
+ break None;
+ };
+
+ if let Some(location) = tokens.next_if_opening_bracket() {
+ return Err(location
+ .to(location)
+ .error("modifier must be of the form `key:value`"));
+ }
+
+ let Some(Spanned { value, span }) = tokens.next_if_not_whitespace() else {
+ break Some(whitespace);
+ };
+
+ let Some(colon_index) = value.iter().position(|&b| b == b':') else {
+ return Err(span.error("modifier must be of the form `key:value`"));
+ };
+ let key = &value[..colon_index];
+ let value = &value[colon_index + 1..];
+
+ if key.is_empty() {
+ return Err(span.shrink_to_start().error("expected modifier key"));
+ }
+ if value.is_empty() {
+ return Err(span.shrink_to_end().error("expected modifier value"));
+ }
+
+ modifiers.push(Modifier {
+ _leading_whitespace: unused(whitespace),
+ key: key.spanned(span.shrink_to_before(colon_index as _)),
+ _colon: unused(span.start.offset(colon_index as _)),
+ value: value.spanned(span.shrink_to_after(colon_index as _)),
+ });
+ };
+
+ let Some(closing_bracket) = tokens.next_if_closing_bracket() else {
+ return Err(opening_bracket.error("unclosed bracket"));
+ };
+
+ Ok(Item::Component {
+ _opening_bracket: unused(opening_bracket),
+ _leading_whitespace: unused(leading_whitespace),
+ name,
+ modifiers: modifiers.into_boxed_slice(),
+ _trailing_whitespace: unused(trailing_whitespace),
+ _closing_bracket: unused(closing_bracket),
+ })
+}
+
+fn parse_nested<'a, I: Iterator<Item = Result<lexer::Token<'a>, Error>>, const VERSION: u8>(
+ last_location: Location,
+ tokens: &mut lexer::Lexed<I>,
+) -> Result<NestedFormatDescription<'a>, Error> {
+ let Some(opening_bracket) = tokens.next_if_opening_bracket() else {
+ return Err(last_location.error("expected opening bracket"));
+ };
+ let items = parse_inner::<_, true, VERSION>(tokens).collect::<Result<_, _>>()?;
+ let Some(closing_bracket) = tokens.next_if_closing_bracket() else {
+ return Err(opening_bracket.error("unclosed bracket"));
+ };
+ let trailing_whitespace = tokens.next_if_whitespace();
+
+ Ok(NestedFormatDescription {
+ _opening_bracket: unused(opening_bracket),
+ items,
+ _closing_bracket: unused(closing_bracket),
+ _trailing_whitespace: unused(trailing_whitespace),
+ })
+}
diff --git a/third_party/rust/time-macros/src/format_description/format_item.rs b/third_party/rust/time-macros/src/format_description/format_item.rs
new file mode 100644
index 0000000000..6a8cf555ee
--- /dev/null
+++ b/third_party/rust/time-macros/src/format_description/format_item.rs
@@ -0,0 +1,442 @@
+use std::boxed::Box;
+use std::num::NonZeroU16;
+use std::str::{self, FromStr};
+
+use super::{ast, unused, Error, Span, Spanned, Unused};
+
+pub(super) fn parse<'a>(
+ ast_items: impl Iterator<Item = Result<ast::Item<'a>, Error>>,
+) -> impl Iterator<Item = Result<Item<'a>, Error>> {
+ ast_items.map(|ast_item| ast_item.and_then(Item::from_ast))
+}
+
+pub(super) enum Item<'a> {
+ Literal(&'a [u8]),
+ Component(Component),
+ Optional {
+ value: Box<[Self]>,
+ _span: Unused<Span>,
+ },
+ First {
+ value: Box<[Box<[Self]>]>,
+ _span: Unused<Span>,
+ },
+}
+
+impl Item<'_> {
+ pub(super) fn from_ast(ast_item: ast::Item<'_>) -> Result<Item<'_>, Error> {
+ Ok(match ast_item {
+ ast::Item::Component {
+ _opening_bracket: _,
+ _leading_whitespace: _,
+ name,
+ modifiers,
+ _trailing_whitespace: _,
+ _closing_bracket: _,
+ } => Item::Component(component_from_ast(&name, &modifiers)?),
+ ast::Item::Literal(Spanned { value, span: _ }) => Item::Literal(value),
+ ast::Item::EscapedBracket {
+ _first: _,
+ _second: _,
+ } => Item::Literal(b"["),
+ ast::Item::Optional {
+ opening_bracket,
+ _leading_whitespace: _,
+ _optional_kw: _,
+ _whitespace: _,
+ nested_format_description,
+ closing_bracket,
+ } => {
+ let items = nested_format_description
+ .items
+ .into_vec()
+ .into_iter()
+ .map(Item::from_ast)
+ .collect::<Result<_, _>>()?;
+ Item::Optional {
+ value: items,
+ _span: unused(opening_bracket.to(closing_bracket)),
+ }
+ }
+ ast::Item::First {
+ opening_bracket,
+ _leading_whitespace: _,
+ _first_kw: _,
+ _whitespace: _,
+ nested_format_descriptions,
+ closing_bracket,
+ } => {
+ let items = nested_format_descriptions
+ .into_vec()
+ .into_iter()
+ .map(|nested_format_description| {
+ nested_format_description
+ .items
+ .into_vec()
+ .into_iter()
+ .map(Item::from_ast)
+ .collect()
+ })
+ .collect::<Result<_, _>>()?;
+ Item::First {
+ value: items,
+ _span: unused(opening_bracket.to(closing_bracket)),
+ }
+ }
+ })
+ }
+}
+
+impl From<Item<'_>> for crate::format_description::public::OwnedFormatItem {
+ fn from(item: Item<'_>) -> Self {
+ match item {
+ Item::Literal(literal) => Self::Literal(literal.to_vec().into_boxed_slice()),
+ Item::Component(component) => Self::Component(component.into()),
+ Item::Optional { value, _span: _ } => Self::Optional(Box::new(value.into())),
+ Item::First { value, _span: _ } => {
+ Self::First(value.into_vec().into_iter().map(Into::into).collect())
+ }
+ }
+ }
+}
+
+impl<'a> From<Box<[Item<'a>]>> for crate::format_description::public::OwnedFormatItem {
+ fn from(items: Box<[Item<'a>]>) -> Self {
+ let items = items.into_vec();
+ if items.len() == 1 {
+ if let Ok([item]) = <[_; 1]>::try_from(items) {
+ item.into()
+ } else {
+ bug!("the length was just checked to be 1")
+ }
+ } else {
+ Self::Compound(items.into_iter().map(Self::from).collect())
+ }
+ }
+}
+
+macro_rules! component_definition {
+ (@if_required required then { $($then:tt)* } $(else { $($else:tt)* })?) => { $($then)* };
+ (@if_required then { $($then:tt)* } $(else { $($else:tt)* })?) => { $($($else)*)? };
+ (@if_from_str from_str then { $($then:tt)* } $(else { $($else:tt)* })?) => { $($then)* };
+ (@if_from_str then { $($then:tt)* } $(else { $($else:tt)* })?) => { $($($else)*)? };
+
+ ($vis:vis enum $name:ident {
+ $($variant:ident = $parse_variant:literal {$(
+ $(#[$required:tt])?
+ $field:ident = $parse_field:literal:
+ Option<$(#[$from_str:tt])? $field_type:ty>
+ => $target_field:ident
+ ),* $(,)?}),* $(,)?
+ }) => {
+ $vis enum $name {
+ $($variant($variant),)*
+ }
+
+ $($vis struct $variant {
+ $($field: Option<$field_type>),*
+ })*
+
+ $(impl $variant {
+ fn with_modifiers(
+ modifiers: &[ast::Modifier<'_>],
+ _component_span: Span,
+ ) -> Result<Self, Error>
+ {
+ let mut this = Self {
+ $($field: None),*
+ };
+
+ for modifier in modifiers {
+ $(#[allow(clippy::string_lit_as_bytes)]
+ if modifier.key.eq_ignore_ascii_case($parse_field.as_bytes()) {
+ this.$field = component_definition!(@if_from_str $($from_str)?
+ then {
+ parse_from_modifier_value::<$field_type>(&modifier.value)?
+ } else {
+ <$field_type>::from_modifier_value(&modifier.value)?
+ });
+ continue;
+ })*
+ return Err(modifier.key.span.error("invalid modifier key"));
+ }
+
+ $(component_definition! { @if_required $($required)? then {
+ if this.$field.is_none() {
+ return Err(_component_span.error("missing required modifier"));
+ }
+ }})*
+
+ Ok(this)
+ }
+ })*
+
+ impl From<$name> for crate::format_description::public::Component {
+ fn from(component: $name) -> Self {
+ match component {$(
+ $name::$variant($variant { $($field),* }) => {
+ $crate::format_description::public::Component::$variant(
+ super::public::modifier::$variant {$(
+ $target_field: component_definition! { @if_required $($required)?
+ then {
+ match $field {
+ Some(value) => value.into(),
+ None => bug!("required modifier was not set"),
+ }
+ } else {
+ $field.unwrap_or_default().into()
+ }
+ }
+ ),*}
+ )
+ }
+ )*}
+ }
+ }
+
+ fn component_from_ast(
+ name: &Spanned<&[u8]>,
+ modifiers: &[ast::Modifier<'_>],
+ ) -> Result<Component, Error> {
+ $(#[allow(clippy::string_lit_as_bytes)]
+ if name.eq_ignore_ascii_case($parse_variant.as_bytes()) {
+ return Ok(Component::$variant($variant::with_modifiers(&modifiers, name.span)?));
+ })*
+ Err(name.span.error("invalid component"))
+ }
+ }
+}
+
+component_definition! {
+ pub(super) enum Component {
+ Day = "day" {
+ padding = "padding": Option<Padding> => padding,
+ },
+ Hour = "hour" {
+ padding = "padding": Option<Padding> => padding,
+ base = "repr": Option<HourBase> => is_12_hour_clock,
+ },
+ Ignore = "ignore" {
+ #[required]
+ count = "count": Option<#[from_str] NonZeroU16> => count,
+ },
+ Minute = "minute" {
+ padding = "padding": Option<Padding> => padding,
+ },
+ Month = "month" {
+ padding = "padding": Option<Padding> => padding,
+ repr = "repr": Option<MonthRepr> => repr,
+ case_sensitive = "case_sensitive": Option<MonthCaseSensitive> => case_sensitive,
+ },
+ OffsetHour = "offset_hour" {
+ sign_behavior = "sign": Option<SignBehavior> => sign_is_mandatory,
+ padding = "padding": Option<Padding> => padding,
+ },
+ OffsetMinute = "offset_minute" {
+ padding = "padding": Option<Padding> => padding,
+ },
+ OffsetSecond = "offset_second" {
+ padding = "padding": Option<Padding> => padding,
+ },
+ Ordinal = "ordinal" {
+ padding = "padding": Option<Padding> => padding,
+ },
+ Period = "period" {
+ case = "case": Option<PeriodCase> => is_uppercase,
+ case_sensitive = "case_sensitive": Option<PeriodCaseSensitive> => case_sensitive,
+ },
+ Second = "second" {
+ padding = "padding": Option<Padding> => padding,
+ },
+ Subsecond = "subsecond" {
+ digits = "digits": Option<SubsecondDigits> => digits,
+ },
+ UnixTimestamp = "unix_timestamp" {
+ precision = "precision": Option<UnixTimestampPrecision> => precision,
+ sign_behavior = "sign": Option<SignBehavior> => sign_is_mandatory,
+ },
+ Weekday = "weekday" {
+ repr = "repr": Option<WeekdayRepr> => repr,
+ one_indexed = "one_indexed": Option<WeekdayOneIndexed> => one_indexed,
+ case_sensitive = "case_sensitive": Option<WeekdayCaseSensitive> => case_sensitive,
+ },
+ WeekNumber = "week_number" {
+ padding = "padding": Option<Padding> => padding,
+ repr = "repr": Option<WeekNumberRepr> => repr,
+ },
+ Year = "year" {
+ padding = "padding": Option<Padding> => padding,
+ repr = "repr": Option<YearRepr> => repr,
+ base = "base": Option<YearBase> => iso_week_based,
+ sign_behavior = "sign": Option<SignBehavior> => sign_is_mandatory,
+ },
+ }
+}
+
+macro_rules! target_ty {
+ ($name:ident $type:ty) => {
+ $type
+ };
+ ($name:ident) => {
+ super::public::modifier::$name
+ };
+}
+
+/// Get the target value for a given enum.
+macro_rules! target_value {
+ ($name:ident $variant:ident $value:expr) => {
+ $value
+ };
+ ($name:ident $variant:ident) => {
+ super::public::modifier::$name::$variant
+ };
+}
+
+macro_rules! modifier {
+ ($(
+ enum $name:ident $(($target_ty:ty))? {
+ $(
+ $(#[$attr:meta])?
+ $variant:ident $(($target_value:expr))? = $parse_variant:literal
+ ),* $(,)?
+ }
+ )+) => {$(
+ #[derive(Default)]
+ enum $name {
+ $($(#[$attr])? $variant),*
+ }
+
+ impl $name {
+ /// Parse the modifier from its string representation.
+ fn from_modifier_value(value: &Spanned<&[u8]>) -> Result<Option<Self>, Error> {
+ $(if value.eq_ignore_ascii_case($parse_variant) {
+ return Ok(Some(Self::$variant));
+ })*
+ Err(value.span.error("invalid modifier value"))
+ }
+ }
+
+ impl From<$name> for target_ty!($name $($target_ty)?) {
+ fn from(modifier: $name) -> Self {
+ match modifier {
+ $($name::$variant => target_value!($name $variant $($target_value)?)),*
+ }
+ }
+ }
+ )+};
+}
+
+modifier! {
+ enum HourBase(bool) {
+ Twelve(true) = b"12",
+ #[default]
+ TwentyFour(false) = b"24",
+ }
+
+ enum MonthCaseSensitive(bool) {
+ False(false) = b"false",
+ #[default]
+ True(true) = b"true",
+ }
+
+ enum MonthRepr {
+ #[default]
+ Numerical = b"numerical",
+ Long = b"long",
+ Short = b"short",
+ }
+
+ enum Padding {
+ Space = b"space",
+ #[default]
+ Zero = b"zero",
+ None = b"none",
+ }
+
+ enum PeriodCase(bool) {
+ Lower(false) = b"lower",
+ #[default]
+ Upper(true) = b"upper",
+ }
+
+ enum PeriodCaseSensitive(bool) {
+ False(false) = b"false",
+ #[default]
+ True(true) = b"true",
+ }
+
+ enum SignBehavior(bool) {
+ #[default]
+ Automatic(false) = b"automatic",
+ Mandatory(true) = b"mandatory",
+ }
+
+ enum SubsecondDigits {
+ One = b"1",
+ Two = b"2",
+ Three = b"3",
+ Four = b"4",
+ Five = b"5",
+ Six = b"6",
+ Seven = b"7",
+ Eight = b"8",
+ Nine = b"9",
+ #[default]
+ OneOrMore = b"1+",
+ }
+
+ enum UnixTimestampPrecision {
+ #[default]
+ Second = b"second",
+ Millisecond = b"millisecond",
+ Microsecond = b"microsecond",
+ Nanosecond = b"nanosecond",
+ }
+
+ enum WeekNumberRepr {
+ #[default]
+ Iso = b"iso",
+ Sunday = b"sunday",
+ Monday = b"monday",
+ }
+
+ enum WeekdayCaseSensitive(bool) {
+ False(false) = b"false",
+ #[default]
+ True(true) = b"true",
+ }
+
+ enum WeekdayOneIndexed(bool) {
+ False(false) = b"false",
+ #[default]
+ True(true) = b"true",
+ }
+
+ enum WeekdayRepr {
+ Short = b"short",
+ #[default]
+ Long = b"long",
+ Sunday = b"sunday",
+ Monday = b"monday",
+ }
+
+ enum YearBase(bool) {
+ #[default]
+ Calendar(false) = b"calendar",
+ IsoWeek(true) = b"iso_week",
+ }
+
+ enum YearRepr {
+ #[default]
+ Full = b"full",
+ LastTwo = b"last_two",
+ }
+}
+
+fn parse_from_modifier_value<T: FromStr>(value: &Spanned<&[u8]>) -> Result<Option<T>, Error> {
+ str::from_utf8(value)
+ .ok()
+ .and_then(|val| val.parse::<T>().ok())
+ .map(|val| Some(val))
+ .ok_or_else(|| value.span.error("invalid modifier value"))
+}
diff --git a/third_party/rust/time-macros/src/format_description/lexer.rs b/third_party/rust/time-macros/src/format_description/lexer.rs
new file mode 100644
index 0000000000..2c927cb94d
--- /dev/null
+++ b/third_party/rust/time-macros/src/format_description/lexer.rs
@@ -0,0 +1,248 @@
+use core::iter;
+
+use super::{Error, Location, Spanned, SpannedValue};
+
+pub(super) struct Lexed<I: Iterator> {
+ iter: core::iter::Peekable<I>,
+}
+
+impl<I: Iterator> Iterator for Lexed<I> {
+ type Item = I::Item;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ self.iter.next()
+ }
+}
+
+impl<'iter, 'token: 'iter, I: Iterator<Item = Result<Token<'token>, Error>> + 'iter> Lexed<I> {
+ pub(super) fn peek(&mut self) -> Option<&I::Item> {
+ self.iter.peek()
+ }
+
+ pub(super) fn next_if_whitespace(&mut self) -> Option<Spanned<&'token [u8]>> {
+ if let Some(&Ok(Token::ComponentPart {
+ kind: ComponentKind::Whitespace,
+ value,
+ })) = self.peek()
+ {
+ self.next(); // consume
+ Some(value)
+ } else {
+ None
+ }
+ }
+
+ pub(super) fn next_if_not_whitespace(&mut self) -> Option<Spanned<&'token [u8]>> {
+ if let Some(&Ok(Token::ComponentPart {
+ kind: ComponentKind::NotWhitespace,
+ value,
+ })) = self.peek()
+ {
+ self.next();
+ Some(value)
+ } else {
+ None
+ }
+ }
+
+ pub(super) fn next_if_opening_bracket(&mut self) -> Option<Location> {
+ if let Some(&Ok(Token::Bracket {
+ kind: BracketKind::Opening,
+ location,
+ })) = self.peek()
+ {
+ self.next();
+ Some(location)
+ } else {
+ None
+ }
+ }
+
+ pub(super) fn peek_closing_bracket(&'iter mut self) -> Option<&'iter Location> {
+ if let Some(Ok(Token::Bracket {
+ kind: BracketKind::Closing,
+ location,
+ })) = self.peek()
+ {
+ Some(location)
+ } else {
+ None
+ }
+ }
+
+ pub(super) fn next_if_closing_bracket(&mut self) -> Option<Location> {
+ if let Some(&Ok(Token::Bracket {
+ kind: BracketKind::Closing,
+ location,
+ })) = self.peek()
+ {
+ self.next();
+ Some(location)
+ } else {
+ None
+ }
+ }
+}
+
+pub(super) enum Token<'a> {
+ Literal(Spanned<&'a [u8]>),
+ Bracket {
+ kind: BracketKind,
+ location: Location,
+ },
+ ComponentPart {
+ kind: ComponentKind,
+ value: Spanned<&'a [u8]>,
+ },
+}
+
+pub(super) enum BracketKind {
+ Opening,
+ Closing,
+}
+
+pub(super) enum ComponentKind {
+ #[allow(clippy::missing_docs_in_private_items)]
+ Whitespace,
+ #[allow(clippy::missing_docs_in_private_items)]
+ NotWhitespace,
+}
+
+fn attach_location<'item>(
+ iter: impl Iterator<Item = &'item u8>,
+ proc_span: proc_macro::Span,
+) -> impl Iterator<Item = (&'item u8, Location)> {
+ let mut byte_pos = 0;
+
+ iter.map(move |byte| {
+ let location = Location {
+ byte: byte_pos,
+ proc_span,
+ };
+ byte_pos += 1;
+ (byte, location)
+ })
+}
+
+#[allow(clippy::unused_peekable)] // false positive
+pub(super) fn lex<const VERSION: u8>(
+ mut input: &[u8],
+ proc_span: proc_macro::Span,
+) -> Lexed<impl Iterator<Item = Result<Token<'_>, Error>>> {
+ assert!(version!(1..=2));
+
+ let mut depth: u8 = 0;
+ let mut iter = attach_location(input.iter(), proc_span).peekable();
+ let mut second_bracket_location = None;
+
+ let iter = iter::from_fn(move || {
+ if version!(..=1) {
+ if let Some(location) = second_bracket_location.take() {
+ return Some(Ok(Token::Bracket {
+ kind: BracketKind::Opening,
+ location,
+ }));
+ }
+ }
+
+ Some(Ok(match iter.next()? {
+ (b'\\', backslash_loc) if version!(2..) => match iter.next() {
+ Some((b'\\' | b'[' | b']', char_loc)) => {
+ let char = &input[1..2];
+ input = &input[2..];
+ if depth == 0 {
+ Token::Literal(char.spanned(backslash_loc.to(char_loc)))
+ } else {
+ Token::ComponentPart {
+ kind: ComponentKind::NotWhitespace,
+ value: char.spanned(backslash_loc.to(char_loc)),
+ }
+ }
+ }
+ Some((_, loc)) => {
+ return Some(Err(loc.error("invalid escape sequence")));
+ }
+ None => {
+ return Some(Err(backslash_loc.error("unexpected end of input")));
+ }
+ },
+ (b'[', location) if version!(..=1) => {
+ if let Some((_, second_location)) = iter.next_if(|&(&byte, _)| byte == b'[') {
+ second_bracket_location = Some(second_location);
+ input = &input[2..];
+ } else {
+ depth += 1;
+ input = &input[1..];
+ }
+
+ Token::Bracket {
+ kind: BracketKind::Opening,
+ location,
+ }
+ }
+ (b'[', location) => {
+ depth += 1;
+ input = &input[1..];
+
+ Token::Bracket {
+ kind: BracketKind::Opening,
+ location,
+ }
+ }
+ (b']', location) if depth > 0 => {
+ depth -= 1;
+ input = &input[1..];
+
+ Token::Bracket {
+ kind: BracketKind::Closing,
+ location,
+ }
+ }
+ (_, start_location) if depth == 0 => {
+ let mut bytes = 1;
+ let mut end_location = start_location;
+
+ while let Some((_, location)) =
+ iter.next_if(|&(&byte, _)| !((version!(2..) && byte == b'\\') || byte == b'['))
+ {
+ end_location = location;
+ bytes += 1;
+ }
+
+ let value = &input[..bytes];
+ input = &input[bytes..];
+
+ Token::Literal(value.spanned(start_location.to(end_location)))
+ }
+ (byte, start_location) => {
+ let mut bytes = 1;
+ let mut end_location = start_location;
+ let is_whitespace = byte.is_ascii_whitespace();
+
+ while let Some((_, location)) = iter.next_if(|&(byte, _)| {
+ !matches!(byte, b'\\' | b'[' | b']')
+ && is_whitespace == byte.is_ascii_whitespace()
+ }) {
+ end_location = location;
+ bytes += 1;
+ }
+
+ let value = &input[..bytes];
+ input = &input[bytes..];
+
+ Token::ComponentPart {
+ kind: if is_whitespace {
+ ComponentKind::Whitespace
+ } else {
+ ComponentKind::NotWhitespace
+ },
+ value: value.spanned(start_location.to(end_location)),
+ }
+ }
+ }))
+ });
+
+ Lexed {
+ iter: iter.peekable(),
+ }
+}
diff --git a/third_party/rust/time-macros/src/format_description/mod.rs b/third_party/rust/time-macros/src/format_description/mod.rs
new file mode 100644
index 0000000000..fde1272f6a
--- /dev/null
+++ b/third_party/rust/time-macros/src/format_description/mod.rs
@@ -0,0 +1,171 @@
+//! Parser for format descriptions.
+
+use std::vec::Vec;
+
+macro_rules! version {
+ ($range:expr) => {
+ $range.contains(&VERSION)
+ };
+}
+
+mod ast;
+mod format_item;
+mod lexer;
+mod public;
+
+pub(crate) fn parse_with_version(
+ version: Option<crate::FormatDescriptionVersion>,
+ s: &[u8],
+ proc_span: proc_macro::Span,
+) -> Result<Vec<crate::format_description::public::OwnedFormatItem>, crate::Error> {
+ match version {
+ Some(crate::FormatDescriptionVersion::V1) | None => parse::<1>(s, proc_span),
+ Some(crate::FormatDescriptionVersion::V2) => parse::<2>(s, proc_span),
+ }
+}
+
+fn parse<const VERSION: u8>(
+ s: &[u8],
+ proc_span: proc_macro::Span,
+) -> Result<Vec<crate::format_description::public::OwnedFormatItem>, crate::Error> {
+ let mut lexed = lexer::lex::<VERSION>(s, proc_span);
+ let ast = ast::parse::<_, VERSION>(&mut lexed);
+ let format_items = format_item::parse(ast);
+ Ok(format_items
+ .map(|res| res.map(Into::into))
+ .collect::<Result<_, _>>()?)
+}
+
+#[derive(Clone, Copy)]
+struct Location {
+ byte: u32,
+ proc_span: proc_macro::Span,
+}
+
+impl Location {
+ fn to(self, end: Self) -> Span {
+ Span { start: self, end }
+ }
+
+ #[must_use = "this does not modify the original value"]
+ fn offset(&self, offset: u32) -> Self {
+ Self {
+ byte: self.byte + offset,
+ proc_span: self.proc_span,
+ }
+ }
+
+ fn error(self, message: &'static str) -> Error {
+ Error {
+ message,
+ _span: unused(Span {
+ start: self,
+ end: self,
+ }),
+ proc_span: self.proc_span,
+ }
+ }
+}
+
+#[derive(Clone, Copy)]
+struct Span {
+ #[allow(clippy::missing_docs_in_private_items)]
+ start: Location,
+ #[allow(clippy::missing_docs_in_private_items)]
+ end: Location,
+}
+
+impl Span {
+ #[must_use = "this does not modify the original value"]
+ const fn shrink_to_start(&self) -> Self {
+ Self {
+ start: self.start,
+ end: self.start,
+ }
+ }
+
+ #[must_use = "this does not modify the original value"]
+ const fn shrink_to_end(&self) -> Self {
+ Self {
+ start: self.end,
+ end: self.end,
+ }
+ }
+
+ #[must_use = "this does not modify the original value"]
+ const fn shrink_to_before(&self, pos: u32) -> Self {
+ Self {
+ start: self.start,
+ end: Location {
+ byte: self.start.byte + pos - 1,
+ proc_span: self.start.proc_span,
+ },
+ }
+ }
+
+ #[must_use = "this does not modify the original value"]
+ fn shrink_to_after(&self, pos: u32) -> Self {
+ Self {
+ start: Location {
+ byte: self.start.byte + pos + 1,
+ proc_span: self.start.proc_span,
+ },
+ end: self.end,
+ }
+ }
+
+ fn error(self, message: &'static str) -> Error {
+ Error {
+ message,
+ _span: unused(self),
+ proc_span: self.start.proc_span,
+ }
+ }
+}
+
+#[derive(Clone, Copy)]
+struct Spanned<T> {
+ value: T,
+ span: Span,
+}
+
+impl<T> core::ops::Deref for Spanned<T> {
+ type Target = T;
+
+ fn deref(&self) -> &Self::Target {
+ &self.value
+ }
+}
+
+trait SpannedValue: Sized {
+ fn spanned(self, span: Span) -> Spanned<Self>;
+}
+
+impl<T> SpannedValue for T {
+ fn spanned(self, span: Span) -> Spanned<Self> {
+ Spanned { value: self, span }
+ }
+}
+
+struct Error {
+ message: &'static str,
+ _span: Unused<Span>,
+ proc_span: proc_macro::Span,
+}
+
+impl From<Error> for crate::Error {
+ fn from(error: Error) -> Self {
+ Self::Custom {
+ message: error.message.into(),
+ span_start: Some(error.proc_span),
+ span_end: Some(error.proc_span),
+ }
+ }
+}
+
+struct Unused<T>(core::marker::PhantomData<T>);
+
+#[allow(clippy::missing_const_for_fn)] // false positive
+fn unused<T>(_: T) -> Unused<T> {
+ Unused(core::marker::PhantomData)
+}
diff --git a/third_party/rust/time-macros/src/format_description/public/component.rs b/third_party/rust/time-macros/src/format_description/public/component.rs
new file mode 100644
index 0000000000..4737c6ce5c
--- /dev/null
+++ b/third_party/rust/time-macros/src/format_description/public/component.rs
@@ -0,0 +1,49 @@
+use proc_macro::{Ident, Span, TokenStream};
+
+use super::modifier;
+use crate::to_tokens::ToTokenStream;
+
+macro_rules! declare_component {
+ ($($name:ident)*) => {
+ pub(crate) enum Component {$(
+ $name(modifier::$name),
+ )*}
+
+ impl ToTokenStream for Component {
+ fn append_to(self, ts: &mut TokenStream) {
+ let mut mts = TokenStream::new();
+
+ let component = match self {$(
+ Self::$name(modifier) => {
+ modifier.append_to(&mut mts);
+ stringify!($name)
+ }
+ )*};
+ let component = Ident::new(component, Span::mixed_site());
+
+ quote_append! { ts
+ ::time::format_description::Component::#(component)(#S(mts))
+ }
+ }
+ }
+ };
+}
+
+declare_component! {
+ Day
+ Month
+ Ordinal
+ Weekday
+ WeekNumber
+ Year
+ Hour
+ Minute
+ Period
+ Second
+ Subsecond
+ OffsetHour
+ OffsetMinute
+ OffsetSecond
+ Ignore
+ UnixTimestamp
+}
diff --git a/third_party/rust/time-macros/src/format_description/public/mod.rs b/third_party/rust/time-macros/src/format_description/public/mod.rs
new file mode 100644
index 0000000000..ccb0b6e2a3
--- /dev/null
+++ b/third_party/rust/time-macros/src/format_description/public/mod.rs
@@ -0,0 +1,54 @@
+mod component;
+pub(super) mod modifier;
+
+use proc_macro::{Literal, TokenStream};
+
+pub(crate) use self::component::Component;
+use crate::to_tokens::ToTokenStream;
+
+#[allow(variant_size_differences)]
+pub(crate) enum OwnedFormatItem {
+ Literal(Box<[u8]>),
+ Component(Component),
+ Compound(Box<[Self]>),
+ Optional(Box<Self>),
+ First(Box<[Self]>),
+}
+
+impl ToTokenStream for OwnedFormatItem {
+ fn append_to(self, ts: &mut TokenStream) {
+ match self {
+ Self::Literal(bytes) => quote_append! { ts
+ ::time::format_description::FormatItem::Literal {
+ 0: #(Literal::byte_string(bytes.as_ref()))
+ }
+ },
+ Self::Component(component) => quote_append! { ts
+ ::time::format_description::FormatItem::Component { 0: #S(component) }
+ },
+ Self::Compound(items) => {
+ let items = items
+ .into_vec()
+ .into_iter()
+ .map(|item| quote! { #S(item), })
+ .collect::<TokenStream>();
+ quote_append! { ts
+ ::time::format_description::FormatItem::Compound { 0: &[#S(items)] }
+ }
+ }
+ Self::Optional(item) => quote_append! {ts
+ ::time::format_description::FormatItem::Optional { 0: &#S(*item) }
+ },
+ Self::First(items) => {
+ let items = items
+ .into_vec()
+ .into_iter()
+ .map(|item| quote! { #S(item), })
+ .collect::<TokenStream>();
+ quote_append! { ts
+ ::time::format_description::FormatItem::First { 0: &[#S(items)] }
+ }
+ }
+ }
+ }
+}
diff --git a/third_party/rust/time-macros/src/format_description/public/modifier.rs b/third_party/rust/time-macros/src/format_description/public/modifier.rs
new file mode 100644
index 0000000000..e39c6bf552
--- /dev/null
+++ b/third_party/rust/time-macros/src/format_description/public/modifier.rs
@@ -0,0 +1,247 @@
+use std::num::NonZeroU16;
+
+use proc_macro::{Ident, Span, TokenStream, TokenTree};
+
+use crate::to_tokens::{ToTokenStream, ToTokenTree};
+
+macro_rules! to_tokens {
+ (
+ $(#[$struct_attr:meta])*
+ $struct_vis:vis struct $struct_name:ident {$(
+ $(#[$field_attr:meta])*
+ $field_vis:vis $field_name:ident : $field_ty:ty
+ ),+ $(,)?}
+ ) => {
+ $(#[$struct_attr])*
+ $struct_vis struct $struct_name {$(
+ $(#[$field_attr])*
+ $field_vis $field_name: $field_ty
+ ),+}
+
+ impl ToTokenTree for $struct_name {
+ fn into_token_tree(self) -> TokenTree {
+ let mut tokens = TokenStream::new();
+ let Self {$($field_name),+} = self;
+
+ quote_append! { tokens
+ let mut value = ::time::format_description::modifier::$struct_name::default();
+ };
+ $(
+ quote_append!(tokens value.$field_name =);
+ $field_name.append_to(&mut tokens);
+ quote_append!(tokens ;);
+ )+
+ quote_append!(tokens value);
+
+ proc_macro::TokenTree::Group(proc_macro::Group::new(
+ proc_macro::Delimiter::Brace,
+ tokens,
+ ))
+ }
+ }
+ };
+
+ (
+ $(#[$enum_attr:meta])*
+ $enum_vis:vis enum $enum_name:ident {$(
+ $(#[$variant_attr:meta])*
+ $variant_name:ident
+ ),+ $(,)?}
+ ) => {
+ $(#[$enum_attr])*
+ $enum_vis enum $enum_name {$(
+ $(#[$variant_attr])*
+ $variant_name
+ ),+}
+
+ impl ToTokenStream for $enum_name {
+ fn append_to(self, ts: &mut TokenStream) {
+ quote_append! { ts
+ ::time::format_description::modifier::$enum_name::
+ };
+ let name = match self {
+ $(Self::$variant_name => stringify!($variant_name)),+
+ };
+ ts.extend([TokenTree::Ident(Ident::new(name, Span::mixed_site()))]);
+ }
+ }
+ }
+}
+
+to_tokens! {
+ pub(crate) struct Day {
+ pub(crate) padding: Padding,
+ }
+}
+
+to_tokens! {
+ pub(crate) enum MonthRepr {
+ Numerical,
+ Long,
+ Short,
+ }
+}
+
+to_tokens! {
+ pub(crate) struct Month {
+ pub(crate) padding: Padding,
+ pub(crate) repr: MonthRepr,
+ pub(crate) case_sensitive: bool,
+ }
+}
+
+to_tokens! {
+ pub(crate) struct Ordinal {
+ pub(crate) padding: Padding,
+ }
+}
+
+to_tokens! {
+ pub(crate) enum WeekdayRepr {
+ Short,
+ Long,
+ Sunday,
+ Monday,
+ }
+}
+
+to_tokens! {
+ pub(crate) struct Weekday {
+ pub(crate) repr: WeekdayRepr,
+ pub(crate) one_indexed: bool,
+ pub(crate) case_sensitive: bool,
+ }
+}
+
+to_tokens! {
+ pub(crate) enum WeekNumberRepr {
+ Iso,
+ Sunday,
+ Monday,
+ }
+}
+
+to_tokens! {
+ pub(crate) struct WeekNumber {
+ pub(crate) padding: Padding,
+ pub(crate) repr: WeekNumberRepr,
+ }
+}
+
+to_tokens! {
+ pub(crate) enum YearRepr {
+ Full,
+ LastTwo,
+ }
+}
+
+to_tokens! {
+ pub(crate) struct Year {
+ pub(crate) padding: Padding,
+ pub(crate) repr: YearRepr,
+ pub(crate) iso_week_based: bool,
+ pub(crate) sign_is_mandatory: bool,
+ }
+}
+
+to_tokens! {
+ pub(crate) struct Hour {
+ pub(crate) padding: Padding,
+ pub(crate) is_12_hour_clock: bool,
+ }
+}
+
+to_tokens! {
+ pub(crate) struct Minute {
+ pub(crate) padding: Padding,
+ }
+}
+
+to_tokens! {
+ pub(crate) struct Period {
+ pub(crate) is_uppercase: bool,
+ pub(crate) case_sensitive: bool,
+ }
+}
+
+to_tokens! {
+ pub(crate) struct Second {
+ pub(crate) padding: Padding,
+ }
+}
+
+to_tokens! {
+ pub(crate) enum SubsecondDigits {
+ One,
+ Two,
+ Three,
+ Four,
+ Five,
+ Six,
+ Seven,
+ Eight,
+ Nine,
+ OneOrMore,
+ }
+}
+
+to_tokens! {
+ pub(crate) struct Subsecond {
+ pub(crate) digits: SubsecondDigits,
+ }
+}
+
+to_tokens! {
+ pub(crate) struct OffsetHour {
+ pub(crate) sign_is_mandatory: bool,
+ pub(crate) padding: Padding,
+ }
+}
+
+to_tokens! {
+ pub(crate) struct OffsetMinute {
+ pub(crate) padding: Padding,
+ }
+}
+
+to_tokens! {
+ pub(crate) struct OffsetSecond {
+ pub(crate) padding: Padding,
+ }
+}
+
+to_tokens! {
+ pub(crate) enum Padding {
+ Space,
+ Zero,
+ None,
+ }
+}
+
+pub(crate) struct Ignore {
+ pub(crate) count: NonZeroU16,
+}
+
+impl ToTokenTree for Ignore {
+ fn into_token_tree(self) -> TokenTree {
+ quote_group! {{
+ ::time::format_description::modifier::Ignore::count(#(self.count))
+ }}
+ }
+}
+
+to_tokens! {
+ pub(crate) enum UnixTimestampPrecision {
+ Second,
+ Millisecond,
+ Microsecond,
+ Nanosecond,
+ }
+}
+
+to_tokens! {
+ pub(crate) struct UnixTimestamp {
+ pub(crate) precision: UnixTimestampPrecision,
+ pub(crate) sign_is_mandatory: bool,
+ }
+}
diff --git a/third_party/rust/time-macros/src/helpers/mod.rs b/third_party/rust/time-macros/src/helpers/mod.rs
new file mode 100644
index 0000000000..56300b3e65
--- /dev/null
+++ b/third_party/rust/time-macros/src/helpers/mod.rs
@@ -0,0 +1,127 @@
+#[cfg(any(feature = "formatting", feature = "parsing"))]
+mod string;
+
+use std::iter::Peekable;
+use std::str::FromStr;
+
+use proc_macro::{token_stream, Span, TokenTree};
+use time_core::util::{days_in_year, is_leap_year};
+
+use crate::Error;
+
+#[cfg(any(feature = "formatting", feature = "parsing"))]
+pub(crate) fn get_string_literal(
+ mut tokens: impl Iterator<Item = TokenTree>,
+) -> Result<(Span, Vec<u8>), Error> {
+ match (tokens.next(), tokens.next()) {
+ (Some(TokenTree::Literal(literal)), None) => string::parse(&literal),
+ (Some(tree), None) => Err(Error::ExpectedString {
+ span_start: Some(tree.span()),
+ span_end: Some(tree.span()),
+ }),
+ (_, Some(tree)) => Err(Error::UnexpectedToken { tree }),
+ (None, None) => Err(Error::ExpectedString {
+ span_start: None,
+ span_end: None,
+ }),
+ }
+}
+
+pub(crate) fn consume_number<T: FromStr>(
+ component_name: &'static str,
+ chars: &mut Peekable<token_stream::IntoIter>,
+) -> Result<(Span, T), Error> {
+ let (span, digits) = match chars.next() {
+ Some(TokenTree::Literal(literal)) => (literal.span(), literal.to_string()),
+ Some(tree) => return Err(Error::UnexpectedToken { tree }),
+ None => return Err(Error::UnexpectedEndOfInput),
+ };
+
+ if let Ok(value) = digits.replace('_', "").parse() {
+ Ok((span, value))
+ } else {
+ Err(Error::InvalidComponent {
+ name: component_name,
+ value: digits,
+ span_start: Some(span),
+ span_end: Some(span),
+ })
+ }
+}
+
+pub(crate) fn consume_any_ident(
+ idents: &[&str],
+ chars: &mut Peekable<token_stream::IntoIter>,
+) -> Result<Span, Error> {
+ match chars.peek() {
+ Some(TokenTree::Ident(char)) if idents.contains(&char.to_string().as_str()) => {
+ let ret = Ok(char.span());
+ drop(chars.next());
+ ret
+ }
+ Some(tree) => Err(Error::UnexpectedToken { tree: tree.clone() }),
+ None => Err(Error::UnexpectedEndOfInput),
+ }
+}
+
+pub(crate) fn consume_punct(
+ c: char,
+ chars: &mut Peekable<token_stream::IntoIter>,
+) -> Result<Span, Error> {
+ match chars.peek() {
+ Some(TokenTree::Punct(punct)) if *punct == c => {
+ let ret = Ok(punct.span());
+ drop(chars.next());
+ ret
+ }
+ Some(tree) => Err(Error::UnexpectedToken { tree: tree.clone() }),
+ None => Err(Error::UnexpectedEndOfInput),
+ }
+}
+
+fn jan_weekday(year: i32, ordinal: i32) -> u8 {
+ macro_rules! div_floor {
+ ($a:expr, $b:expr) => {{
+ let (_quotient, _remainder) = ($a / $b, $a % $b);
+ if (_remainder > 0 && $b < 0) || (_remainder < 0 && $b > 0) {
+ _quotient - 1
+ } else {
+ _quotient
+ }
+ }};
+ }
+
+ let adj_year = year - 1;
+ ((ordinal + adj_year + div_floor!(adj_year, 4) - div_floor!(adj_year, 100)
+ + div_floor!(adj_year, 400)
+ + 6)
+ .rem_euclid(7)) as _
+}
+
+pub(crate) fn days_in_year_month(year: i32, month: u8) -> u8 {
+ [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31][month as usize - 1]
+ + (month == 2 && is_leap_year(year)) as u8
+}
+
+pub(crate) fn ywd_to_yo(year: i32, week: u8, iso_weekday_number: u8) -> (i32, u16) {
+ let (ordinal, overflow) = (u16::from(week) * 7 + u16::from(iso_weekday_number))
+ .overflowing_sub(u16::from(jan_weekday(year, 4)) + 4);
+
+ if overflow || ordinal == 0 {
+ return (year - 1, (ordinal.wrapping_add(days_in_year(year - 1))));
+ }
+
+ let days_in_cur_year = days_in_year(year);
+ if ordinal > days_in_cur_year {
+ (year + 1, ordinal - days_in_cur_year)
+ } else {
+ (year, ordinal)
+ }
+}
+
+pub(crate) fn ymd_to_yo(year: i32, month: u8, day: u8) -> (i32, u16) {
+ let ordinal = [0, 31, 59, 90, 120, 151, 181, 212, 243, 273, 304, 334][month as usize - 1]
+ + (month > 2 && is_leap_year(year)) as u16;
+
+ (year, ordinal + u16::from(day))
+}
diff --git a/third_party/rust/time-macros/src/helpers/string.rs b/third_party/rust/time-macros/src/helpers/string.rs
new file mode 100644
index 0000000000..6b478f60dc
--- /dev/null
+++ b/third_party/rust/time-macros/src/helpers/string.rs
@@ -0,0 +1,188 @@
+use std::ops::{Index, RangeFrom};
+
+use proc_macro::Span;
+
+use crate::Error;
+
+pub(crate) fn parse(token: &proc_macro::Literal) -> Result<(Span, Vec<u8>), Error> {
+ let span = token.span();
+ let repr = token.to_string();
+
+ match repr.as_bytes() {
+ [b'"', ..] => Ok((span, parse_lit_str_cooked(&repr[1..]))),
+ [b'b', b'"', rest @ ..] => Ok((span, parse_lit_byte_str_cooked(rest))),
+ [b'r', rest @ ..] | [b'b', b'r', rest @ ..] => Ok((span, parse_lit_str_raw(rest))),
+ _ => Err(Error::ExpectedString {
+ span_start: Some(span),
+ span_end: Some(span),
+ }),
+ }
+}
+
+fn byte(s: impl AsRef<[u8]>, idx: usize) -> u8 {
+ s.as_ref().get(idx).copied().unwrap_or_default()
+}
+
+fn parse_lit_str_cooked(mut s: &str) -> Vec<u8> {
+ let mut content = String::new();
+ 'outer: loop {
+ let ch = match byte(s, 0) {
+ b'"' => break,
+ b'\\' => {
+ let b = byte(s, 1);
+ s = &s[2..];
+ match b {
+ b'x' => {
+ let (byte, rest) = backslash_x(s);
+ s = rest;
+ char::from_u32(u32::from(byte)).expect("byte was just validated")
+ }
+ b'u' => {
+ let (chr, rest) = backslash_u(s);
+ s = rest;
+ chr
+ }
+ b'n' => '\n',
+ b'r' => '\r',
+ b't' => '\t',
+ b'\\' => '\\',
+ b'0' => '\0',
+ b'\'' => '\'',
+ b'"' => '"',
+ b'\r' | b'\n' => loop {
+ let ch = s.chars().next().unwrap_or_default();
+ if ch.is_whitespace() {
+ s = &s[ch.len_utf8()..];
+ } else {
+ continue 'outer;
+ }
+ },
+ _ => bug!("invalid escape"),
+ }
+ }
+ b'\r' => {
+ // bare CR not permitted
+ s = &s[2..];
+ '\n'
+ }
+ _ => {
+ let ch = s.chars().next().unwrap_or_default();
+ s = &s[ch.len_utf8()..];
+ ch
+ }
+ };
+ content.push(ch);
+ }
+
+ content.into_bytes()
+}
+
+fn parse_lit_str_raw(s: &[u8]) -> Vec<u8> {
+ let mut pounds = 0;
+ while byte(s, pounds) == b'#' {
+ pounds += 1;
+ }
+ let close = s
+ .iter()
+ .rposition(|&b| b == b'"')
+ .expect("had a string without trailing \"");
+
+ s[pounds + 1..close].to_owned()
+}
+
+fn parse_lit_byte_str_cooked(mut v: &[u8]) -> Vec<u8> {
+ let mut out = Vec::new();
+ 'outer: loop {
+ let byte = match byte(v, 0) {
+ b'"' => break,
+ b'\\' => {
+ let b = byte(v, 1);
+ v = &v[2..];
+ match b {
+ b'x' => {
+ let (byte, rest) = backslash_x(v);
+ v = rest;
+ byte
+ }
+ b'n' => b'\n',
+ b'r' => b'\r',
+ b't' => b'\t',
+ b'\\' => b'\\',
+ b'0' => b'\0',
+ b'\'' => b'\'',
+ b'"' => b'"',
+ b'\r' | b'\n' => loop {
+ let byte = byte(v, 0);
+ let ch = char::from_u32(u32::from(byte)).expect("invalid byte");
+ if ch.is_whitespace() {
+ v = &v[1..];
+ } else {
+ continue 'outer;
+ }
+ },
+ _ => bug!("invalid escape"),
+ }
+ }
+ b'\r' => {
+ // bare CR not permitted
+ v = &v[2..];
+ b'\n'
+ }
+ b => {
+ v = &v[1..];
+ b
+ }
+ };
+ out.push(byte);
+ }
+
+ out
+}
+
+fn backslash_x<S>(s: &S) -> (u8, &S)
+where
+ S: Index<RangeFrom<usize>, Output = S> + AsRef<[u8]> + ?Sized,
+{
+ let mut ch = 0;
+ let b0 = byte(s, 0);
+ let b1 = byte(s, 1);
+ ch += 0x10 * (b0 - b'0');
+ ch += match b1 {
+ b'0'..=b'9' => b1 - b'0',
+ b'a'..=b'f' => 10 + (b1 - b'a'),
+ b'A'..=b'F' => 10 + (b1 - b'A'),
+ _ => bug!("invalid hex escape"),
+ };
+ (ch, &s[2..])
+}
+
+fn backslash_u(mut s: &str) -> (char, &str) {
+ s = &s[1..];
+
+ let mut ch = 0;
+ let mut digits = 0;
+ loop {
+ let b = byte(s, 0);
+ let digit = match b {
+ b'0'..=b'9' => b - b'0',
+ b'a'..=b'f' => 10 + b - b'a',
+ b'A'..=b'F' => 10 + b - b'A',
+ b'_' if digits > 0 => {
+ s = &s[1..];
+ continue;
+ }
+ b'}' if digits != 0 => break,
+ _ => bug!("invalid unicode escape"),
+ };
+ ch *= 0x10;
+ ch += u32::from(digit);
+ digits += 1;
+ s = &s[1..];
+ }
+ s = &s[1..];
+
+ (
+ char::from_u32(ch).expect("invalid unicode escape passed by compiler"),
+ s,
+ )
+}
diff --git a/third_party/rust/time-macros/src/lib.rs b/third_party/rust/time-macros/src/lib.rs
new file mode 100644
index 0000000000..0e8568cdad
--- /dev/null
+++ b/third_party/rust/time-macros/src/lib.rs
@@ -0,0 +1,277 @@
+#![deny(
+ anonymous_parameters,
+ clippy::all,
+ illegal_floating_point_literal_pattern,
+ late_bound_lifetime_arguments,
+ path_statements,
+ patterns_in_fns_without_body,
+ rust_2018_idioms,
+ trivial_casts,
+ trivial_numeric_casts,
+ unreachable_pub,
+ unsafe_code,
+ unused_extern_crates
+)]
+#![warn(
+ clippy::dbg_macro,
+ clippy::decimal_literal_representation,
+ clippy::get_unwrap,
+ clippy::nursery,
+ clippy::print_stdout,
+ clippy::todo,
+ clippy::unimplemented,
+ clippy::unnested_or_patterns,
+ clippy::unwrap_used,
+ clippy::use_debug,
+ single_use_lifetimes,
+ unused_qualifications,
+ variant_size_differences
+)]
+#![allow(
+ clippy::missing_const_for_fn, // useless in proc macro
+ clippy::redundant_pub_crate, // suggests bad style
+ clippy::option_if_let_else, // suggests terrible code
+)]
+#[allow(unused_macros)]
+macro_rules! bug {
+ () => { compile_error!("provide an error message to help fix a possible bug") };
+ ($descr:literal $($rest:tt)?) => {
+ unreachable!(concat!("internal error: ", $descr) $($rest)?)
+ }
+}
+
+#[macro_use]
+mod quote;
+
+mod date;
+mod datetime;
+mod error;
+#[cfg(any(feature = "formatting", feature = "parsing"))]
+mod format_description;
+mod helpers;
+mod offset;
+#[cfg(all(feature = "serde", any(feature = "formatting", feature = "parsing")))]
+mod serde_format_description;
+mod time;
+mod to_tokens;
+
+#[cfg(any(feature = "formatting", feature = "parsing"))]
+use std::iter::Peekable;
+
+use proc_macro::TokenStream;
+#[cfg(any(feature = "formatting", feature = "parsing"))]
+use proc_macro::{Ident, TokenTree};
+
+use self::error::Error;
+
+macro_rules! impl_macros {
+ ($($name:ident)*) => {$(
+ #[proc_macro]
+ pub fn $name(input: TokenStream) -> TokenStream {
+ use crate::to_tokens::ToTokenTree;
+
+ let mut iter = input.into_iter().peekable();
+ match $name::parse(&mut iter) {
+ Ok(value) => match iter.peek() {
+ Some(tree) => Error::UnexpectedToken { tree: tree.clone() }.to_compile_error(),
+ None => TokenStream::from(value.into_token_tree()),
+ },
+ Err(err) => err.to_compile_error(),
+ }
+ }
+ )*};
+}
+
+impl_macros![date datetime offset time];
+
+#[cfg(any(feature = "formatting", feature = "parsing"))]
+enum FormatDescriptionVersion {
+ V1,
+ V2,
+}
+
+#[cfg(any(feature = "formatting", feature = "parsing"))]
+enum VersionOrModuleName {
+ Version(FormatDescriptionVersion),
+ ModuleName(Ident),
+}
+
+#[cfg(any(feature = "formatting", feature = "parsing"))]
+fn parse_format_description_version<const NO_EQUALS_IS_MOD_NAME: bool>(
+ iter: &mut Peekable<proc_macro::token_stream::IntoIter>,
+) -> Result<Option<VersionOrModuleName>, Error> {
+ let version_ident = match iter.peek() {
+ Some(TokenTree::Ident(ident)) if ident.to_string() == "version" => match iter.next() {
+ Some(TokenTree::Ident(ident)) => ident,
+ _ => unreachable!(),
+ },
+ _ => return Ok(None),
+ };
+ match iter.peek() {
+ Some(TokenTree::Punct(punct)) if punct.as_char() == '=' => iter.next(),
+ _ if NO_EQUALS_IS_MOD_NAME => {
+ return Ok(Some(VersionOrModuleName::ModuleName(version_ident)));
+ }
+ Some(token) => {
+ return Err(Error::Custom {
+ message: "expected `=`".into(),
+ span_start: Some(token.span()),
+ span_end: Some(token.span()),
+ });
+ }
+ None => {
+ return Err(Error::Custom {
+ message: "expected `=`".into(),
+ span_start: None,
+ span_end: None,
+ });
+ }
+ };
+ let version_literal = match iter.next() {
+ Some(TokenTree::Literal(literal)) => literal,
+ Some(token) => {
+ return Err(Error::Custom {
+ message: "expected 1 or 2".into(),
+ span_start: Some(token.span()),
+ span_end: Some(token.span()),
+ });
+ }
+ None => {
+ return Err(Error::Custom {
+ message: "expected 1 or 2".into(),
+ span_start: None,
+ span_end: None,
+ });
+ }
+ };
+ let version = match version_literal.to_string().as_str() {
+ "1" => FormatDescriptionVersion::V1,
+ "2" => FormatDescriptionVersion::V2,
+ _ => {
+ return Err(Error::Custom {
+ message: "invalid format description version".into(),
+ span_start: Some(version_literal.span()),
+ span_end: Some(version_literal.span()),
+ });
+ }
+ };
+ helpers::consume_punct(',', iter)?;
+
+ Ok(Some(VersionOrModuleName::Version(version)))
+}
+
+#[cfg(any(feature = "formatting", feature = "parsing"))]
+#[proc_macro]
+pub fn format_description(input: TokenStream) -> TokenStream {
+ (|| {
+ let mut input = input.into_iter().peekable();
+ let version = match parse_format_description_version::<false>(&mut input)? {
+ Some(VersionOrModuleName::Version(version)) => Some(version),
+ None => None,
+ // This branch should never occur here, as `false` is the provided as a const parameter.
+ Some(VersionOrModuleName::ModuleName(_)) => bug!("branch should never occur"),
+ };
+ let (span, string) = helpers::get_string_literal(input)?;
+ let items = format_description::parse_with_version(version, &string, span)?;
+
+ Ok(quote! {{
+ const DESCRIPTION: &[::time::format_description::FormatItem<'_>] = &[#S(
+ items
+ .into_iter()
+ .map(|item| quote! { #S(item), })
+ .collect::<TokenStream>()
+ )];
+ DESCRIPTION
+ }})
+ })()
+ .unwrap_or_else(|err: Error| err.to_compile_error())
+}
+
+#[cfg(all(feature = "serde", any(feature = "formatting", feature = "parsing")))]
+#[proc_macro]
+pub fn serde_format_description(input: TokenStream) -> TokenStream {
+ (|| {
+ let mut tokens = input.into_iter().peekable();
+
+ // First, the optional format description version.
+ let version = parse_format_description_version::<true>(&mut tokens)?;
+ let (version, mod_name) = match version {
+ Some(VersionOrModuleName::ModuleName(module_name)) => (None, Some(module_name)),
+ Some(VersionOrModuleName::Version(version)) => (Some(version), None),
+ None => (None, None),
+ };
+
+ // Next, an identifier (the desired module name)
+ // Only parse this if it wasn't parsed when attempting to get the version.
+ let mod_name = match mod_name {
+ Some(mod_name) => mod_name,
+ None => match tokens.next() {
+ Some(TokenTree::Ident(ident)) => Ok(ident),
+ Some(tree) => Err(Error::UnexpectedToken { tree }),
+ None => Err(Error::UnexpectedEndOfInput),
+ }?,
+ };
+
+ // Followed by a comma
+ helpers::consume_punct(',', &mut tokens)?;
+
+ // Then, the type to create serde serializers for (e.g., `OffsetDateTime`).
+ let formattable = match tokens.next() {
+ Some(tree @ TokenTree::Ident(_)) => Ok(tree),
+ Some(tree) => Err(Error::UnexpectedToken { tree }),
+ None => Err(Error::UnexpectedEndOfInput),
+ }?;
+
+ // Another comma
+ helpers::consume_punct(',', &mut tokens)?;
+
+ // We now have two options. The user can either provide a format description as a string or
+ // they can provide a path to a format description. If the latter, all remaining tokens are
+ // assumed to be part of the path.
+ let (format, format_description_display) = match tokens.peek() {
+ // string literal
+ Some(TokenTree::Literal(_)) => {
+ let (span, format_string) = helpers::get_string_literal(tokens)?;
+ let items = format_description::parse_with_version(version, &format_string, span)?;
+ let items: TokenStream =
+ items.into_iter().map(|item| quote! { #S(item), }).collect();
+ let items = quote! {
+ const ITEMS: &[::time::format_description::FormatItem<'_>] = &[#S(items)];
+ ITEMS
+ };
+
+ (items, String::from_utf8_lossy(&format_string).into_owned())
+ }
+ // path
+ Some(_) => {
+ let tokens = tokens.collect::<TokenStream>();
+ let tokens_string = tokens.to_string();
+ (
+ quote! {{
+ // We can't just do `super::path` because the path could be an absolute
+ // path. In that case, we'd be generating `super::::path`, which is invalid.
+ // Even if we took that into account, it's not possible to know if it's an
+ // external crate, which would just require emitting `path` directly. By
+ // taking this approach, we can leave it to the compiler to do the actual
+ // resolution.
+ mod __path_hack {
+ pub(super) use super::super::*;
+ pub(super) use #S(tokens) as FORMAT;
+ }
+ __path_hack::FORMAT
+ }},
+ tokens_string,
+ )
+ }
+ None => return Err(Error::UnexpectedEndOfInput),
+ };
+
+ Ok(serde_format_description::build(
+ mod_name,
+ formattable,
+ format,
+ format_description_display,
+ ))
+ })()
+ .unwrap_or_else(|err: Error| err.to_compile_error_standalone())
+}
diff --git a/third_party/rust/time-macros/src/offset.rs b/third_party/rust/time-macros/src/offset.rs
new file mode 100644
index 0000000000..62d7a223da
--- /dev/null
+++ b/third_party/rust/time-macros/src/offset.rs
@@ -0,0 +1,96 @@
+use std::iter::Peekable;
+
+use proc_macro::{token_stream, Span, TokenTree};
+use time_core::convert::*;
+
+use crate::helpers::{consume_any_ident, consume_number, consume_punct};
+use crate::to_tokens::ToTokenTree;
+use crate::Error;
+
+pub(crate) struct Offset {
+ pub(crate) hours: i8,
+ pub(crate) minutes: i8,
+ pub(crate) seconds: i8,
+}
+
+pub(crate) fn parse(chars: &mut Peekable<token_stream::IntoIter>) -> Result<Offset, Error> {
+ if consume_any_ident(&["utc", "UTC"], chars).is_ok() {
+ return Ok(Offset {
+ hours: 0,
+ minutes: 0,
+ seconds: 0,
+ });
+ }
+
+ let sign = if consume_punct('+', chars).is_ok() {
+ 1
+ } else if consume_punct('-', chars).is_ok() {
+ -1
+ } else if let Some(tree) = chars.next() {
+ return Err(Error::UnexpectedToken { tree });
+ } else {
+ return Err(Error::MissingComponent {
+ name: "sign",
+ span_start: None,
+ span_end: None,
+ });
+ };
+
+ let (hours_span, hours) = consume_number::<i8>("hour", chars)?;
+ let (mut minutes_span, mut minutes) = (Span::mixed_site(), 0);
+ let (mut seconds_span, mut seconds) = (Span::mixed_site(), 0);
+
+ if consume_punct(':', chars).is_ok() {
+ let min = consume_number::<i8>("minute", chars)?;
+ minutes_span = min.0;
+ minutes = min.1;
+
+ if consume_punct(':', chars).is_ok() {
+ let sec = consume_number::<i8>("second", chars)?;
+ seconds_span = sec.0;
+ seconds = sec.1;
+ }
+ }
+
+ if hours >= 24 {
+ Err(Error::InvalidComponent {
+ name: "hour",
+ value: hours.to_string(),
+ span_start: Some(hours_span),
+ span_end: Some(hours_span),
+ })
+ } else if minutes >= Minute.per(Hour) as _ {
+ Err(Error::InvalidComponent {
+ name: "minute",
+ value: minutes.to_string(),
+ span_start: Some(minutes_span),
+ span_end: Some(minutes_span),
+ })
+ } else if seconds >= Second.per(Minute) as _ {
+ Err(Error::InvalidComponent {
+ name: "second",
+ value: seconds.to_string(),
+ span_start: Some(seconds_span),
+ span_end: Some(seconds_span),
+ })
+ } else {
+ Ok(Offset {
+ hours: sign * hours,
+ minutes: sign * minutes,
+ seconds: sign * seconds,
+ })
+ }
+}
+
+impl ToTokenTree for Offset {
+ fn into_token_tree(self) -> TokenTree {
+ quote_group! {{
+ const OFFSET: ::time::UtcOffset = ::time::UtcOffset::__from_hms_unchecked(
+ #(self.hours),
+ #(self.minutes),
+ #(self.seconds),
+ );
+ OFFSET
+ }}
+ }
+}
diff --git a/third_party/rust/time-macros/src/quote.rs b/third_party/rust/time-macros/src/quote.rs
new file mode 100644
index 0000000000..4d3dcbca03
--- /dev/null
+++ b/third_party/rust/time-macros/src/quote.rs
@@ -0,0 +1,139 @@
+macro_rules! quote {
+ () => (::proc_macro::TokenStream::new());
+ ($($x:tt)*) => {{
+ let mut ts = ::proc_macro::TokenStream::new();
+ let ts_mut = &mut ts;
+ quote_inner!(ts_mut $($x)*);
+ ts
+ }};
+}
+
+#[cfg(any(feature = "formatting", feature = "parsing"))]
+macro_rules! quote_append {
+ ($ts:ident $($x:tt)*) => {{
+ quote_inner!($ts $($x)*);
+ }};
+}
+
+macro_rules! quote_group {
+ ({ $($x:tt)* }) => {
+ ::proc_macro::TokenTree::Group(::proc_macro::Group::new(
+ ::proc_macro::Delimiter::Brace,
+ quote!($($x)*)
+ ))
+ };
+}
+
+macro_rules! sym {
+ ($ts:ident $x:tt $y:tt) => {
+ $ts.extend([
+ ::proc_macro::TokenTree::from(::proc_macro::Punct::new(
+ $x,
+ ::proc_macro::Spacing::Joint,
+ )),
+ ::proc_macro::TokenTree::from(::proc_macro::Punct::new(
+ $y,
+ ::proc_macro::Spacing::Alone,
+ )),
+ ]);
+ };
+ ($ts:ident $x:tt) => {
+ $ts.extend([::proc_macro::TokenTree::from(::proc_macro::Punct::new(
+ $x,
+ ::proc_macro::Spacing::Alone,
+ ))]);
+ };
+}
+
+macro_rules! quote_inner {
+ // Base case
+ ($ts:ident) => {};
+
+ // Single or double symbols
+ ($ts:ident :: $($tail:tt)*) => { sym!($ts ':' ':'); quote_inner!($ts $($tail)*); };
+ ($ts:ident .. $($tail:tt)*) => { sym!($ts '.' '.'); quote_inner!($ts $($tail)*); };
+ ($ts:ident : $($tail:tt)*) => { sym!($ts ':'); quote_inner!($ts $($tail)*); };
+ ($ts:ident = $($tail:tt)*) => { sym!($ts '='); quote_inner!($ts $($tail)*); };
+ ($ts:ident ; $($tail:tt)*) => { sym!($ts ';'); quote_inner!($ts $($tail)*); };
+ ($ts:ident , $($tail:tt)*) => { sym!($ts ','); quote_inner!($ts $($tail)*); };
+ ($ts:ident . $($tail:tt)*) => { sym!($ts '.'); quote_inner!($ts $($tail)*); };
+ ($ts:ident & $($tail:tt)*) => { sym!($ts '&'); quote_inner!($ts $($tail)*); };
+ ($ts:ident << $($tail:tt)*) => { sym!($ts '<' '<'); quote_inner!($ts $($tail)*); };
+ ($ts:ident < $($tail:tt)*) => { sym!($ts '<'); quote_inner!($ts $($tail)*); };
+ ($ts:ident >> $($tail:tt)*) => { sym!($ts '>' '>'); quote_inner!($ts $($tail)*); };
+ ($ts:ident > $($tail:tt)*) => { sym!($ts '>'); quote_inner!($ts $($tail)*); };
+ ($ts:ident -> $($tail:tt)*) => { sym!($ts '-' '>'); quote_inner!($ts $($tail)*); };
+ ($ts:ident ? $($tail:tt)*) => { sym!($ts '?'); quote_inner!($ts $($tail)*); };
+ ($ts:ident ! $($tail:tt)*) => { sym!($ts '!'); quote_inner!($ts $($tail)*); };
+ ($ts:ident | $($tail:tt)*) => { sym!($ts '|'); quote_inner!($ts $($tail)*); };
+ ($ts:ident * $($tail:tt)*) => { sym!($ts '*'); quote_inner!($ts $($tail)*); };
+ ($ts:ident + $($tail:tt)*) => { sym!($ts '+'); quote_inner!($ts $($tail)*); };
+
+ // Identifier
+ ($ts:ident $i:ident $($tail:tt)*) => {
+ $ts.extend([::proc_macro::TokenTree::from(::proc_macro::Ident::new(
+ &stringify!($i),
+ ::proc_macro::Span::mixed_site(),
+ ))]);
+ quote_inner!($ts $($tail)*);
+ };
+
+ // Literal
+ ($ts:ident 0 $($tail:tt)*) => {
+ $ts.extend([::proc_macro::TokenTree::from(::proc_macro::Literal::usize_unsuffixed(0))]);
+ quote_inner!($ts $($tail)*);
+ };
+ ($ts:ident $l:literal $($tail:tt)*) => {
+ $ts.extend([::proc_macro::TokenTree::from(::proc_macro::Literal::string(&$l))]);
+ quote_inner!($ts $($tail)*);
+ };
+
+ // Lifetime
+ ($ts:ident $l:lifetime $($tail:tt)*) => {
+ $ts.extend([
+ ::proc_macro::TokenTree::from(
+ ::proc_macro::Punct::new('\'', ::proc_macro::Spacing::Joint)
+ ),
+ ::proc_macro::TokenTree::from(::proc_macro::Ident::new(
+ stringify!($l).trim_start_matches(|c| c == '\''),
+ ::proc_macro::Span::mixed_site(),
+ )),
+ ]);
+ quote_inner!($ts $($tail)*);
+ };
+
+ // Groups
+ ($ts:ident ($($inner:tt)*) $($tail:tt)*) => {
+ $ts.extend([::proc_macro::TokenTree::Group(::proc_macro::Group::new(
+ ::proc_macro::Delimiter::Parenthesis,
+ quote!($($inner)*)
+ ))]);
+ quote_inner!($ts $($tail)*);
+ };
+ ($ts:ident [$($inner:tt)*] $($tail:tt)*) => {
+ $ts.extend([::proc_macro::TokenTree::Group(::proc_macro::Group::new(
+ ::proc_macro::Delimiter::Bracket,
+ quote!($($inner)*)
+ ))]);
+ quote_inner!($ts $($tail)*);
+ };
+ ($ts:ident {$($inner:tt)*} $($tail:tt)*) => {
+ $ts.extend([::proc_macro::TokenTree::Group(::proc_macro::Group::new(
+ ::proc_macro::Delimiter::Brace,
+ quote!($($inner)*)
+ ))]);
+ quote_inner!($ts $($tail)*);
+ };
+
+ // Interpolated values
+ // TokenTree by default
+ ($ts:ident #($e:expr) $($tail:tt)*) => {
+ $ts.extend([$crate::to_tokens::ToTokenTree::into_token_tree($e)]);
+ quote_inner!($ts $($tail)*);
+ };
+ // Allow a TokenStream by request. It's more expensive, so avoid if possible.
+ ($ts:ident #S($e:expr) $($tail:tt)*) => {
+ $crate::to_tokens::ToTokenStream::append_to($e, $ts);
+ quote_inner!($ts $($tail)*);
+ };
+}
diff --git a/third_party/rust/time-macros/src/serde_format_description.rs b/third_party/rust/time-macros/src/serde_format_description.rs
new file mode 100644
index 0000000000..34b99f6790
--- /dev/null
+++ b/third_party/rust/time-macros/src/serde_format_description.rs
@@ -0,0 +1,172 @@
+use proc_macro::{Ident, TokenStream, TokenTree};
+
+pub(crate) fn build(
+ mod_name: Ident,
+ ty: TokenTree,
+ format: TokenStream,
+ format_description_display: String,
+) -> TokenStream {
+ let ty_s = &*ty.to_string();
+
+ let visitor = if cfg!(feature = "parsing") {
+ quote! {
+ struct Visitor;
+ struct OptionVisitor;
+
+ impl<'a> ::serde::de::Visitor<'a> for Visitor {
+ type Value = __TimeSerdeType;
+
+ fn expecting(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
+ write!(
+ f,
+ concat!(
+ "a(n) `",
+ #(ty_s),
+ "` in the format \"{}\"",
+ ),
+ #(format_description_display.as_str())
+ )
+ }
+
+ fn visit_str<E: ::serde::de::Error>(
+ self,
+ value: &str
+ ) -> Result<__TimeSerdeType, E> {
+ __TimeSerdeType::parse(value, &description()).map_err(E::custom)
+ }
+ }
+
+ impl<'a> ::serde::de::Visitor<'a> for OptionVisitor {
+ type Value = Option<__TimeSerdeType>;
+
+ fn expecting(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
+ write!(
+ f,
+ concat!(
+ "an `Option<",
+ #(ty_s),
+ ">` in the format \"{}\"",
+ ),
+ #(format_description_display.as_str())
+ )
+ }
+
+ fn visit_some<D: ::serde::de::Deserializer<'a>>(
+ self,
+ deserializer: D
+ ) -> Result<Option<__TimeSerdeType>, D::Error> {
+ deserializer
+ .deserialize_str(Visitor)
+ .map(Some)
+ }
+
+ fn visit_none<E: ::serde::de::Error>(
+ self
+ ) -> Result<Option<__TimeSerdeType>, E> {
+ Ok(None)
+ }
+ }
+ }
+ } else {
+ quote!()
+ };
+
+ let serialize_primary = if cfg!(feature = "formatting") {
+ quote! {
+ pub fn serialize<S: ::serde::Serializer>(
+ datetime: &__TimeSerdeType,
+ serializer: S,
+ ) -> Result<S::Ok, S::Error> {
+ use ::serde::Serialize;
+ datetime
+ .format(&description())
+ .map_err(::time::error::Format::into_invalid_serde_value::<S>)?
+ .serialize(serializer)
+ }
+ }
+ } else {
+ quote!()
+ };
+
+ let deserialize_primary = if cfg!(feature = "parsing") {
+ quote! {
+ pub fn deserialize<'a, D: ::serde::Deserializer<'a>>(
+ deserializer: D
+ ) -> Result<__TimeSerdeType, D::Error> {
+ use ::serde::Deserialize;
+ deserializer.deserialize_str(Visitor)
+ }
+ }
+ } else {
+ quote!()
+ };
+
+ let serialize_option = if cfg!(feature = "formatting") {
+ quote! {
+ pub fn serialize<S: ::serde::Serializer>(
+ option: &Option<__TimeSerdeType>,
+ serializer: S,
+ ) -> Result<S::Ok, S::Error> {
+ use ::serde::Serialize;
+ option.map(|datetime| datetime.format(&description()))
+ .transpose()
+ .map_err(::time::error::Format::into_invalid_serde_value::<S>)?
+ .serialize(serializer)
+ }
+ }
+ } else {
+ quote!()
+ };
+
+ let deserialize_option = if cfg!(feature = "parsing") {
+ quote! {
+ pub fn deserialize<'a, D: ::serde::Deserializer<'a>>(
+ deserializer: D
+ ) -> Result<Option<__TimeSerdeType>, D::Error> {
+ use ::serde::Deserialize;
+ deserializer.deserialize_option(OptionVisitor)
+ }
+ }
+ } else {
+ quote!()
+ };
+
+ let deserialize_option_imports = if cfg!(feature = "parsing") {
+ quote! {
+ use super::{OptionVisitor, Visitor};
+ }
+ } else {
+ quote!()
+ };
+
+ let fd_traits = match (cfg!(feature = "formatting"), cfg!(feature = "parsing")) {
+ (false, false) => {
+ bug!("serde_format_description::build called without formatting or parsing enabled")
+ }
+ (false, true) => quote! { ::time::parsing::Parsable },
+ (true, false) => quote! { ::time::formatting::Formattable },
+ (true, true) => quote! { ::time::formatting::Formattable + ::time::parsing::Parsable },
+ };
+
+ quote! {
+ mod #(mod_name) {
+ use ::time::#(ty) as __TimeSerdeType;
+
+ const fn description() -> impl #S(fd_traits) {
+ #S(format)
+ }
+
+ #S(visitor)
+ #S(serialize_primary)
+ #S(deserialize_primary)
+
+ pub(super) mod option {
+ use super::{description, __TimeSerdeType};
+ #S(deserialize_option_imports)
+
+ #S(serialize_option)
+ #S(deserialize_option)
+ }
+ }
+ }
+}
diff --git a/third_party/rust/time-macros/src/time.rs b/third_party/rust/time-macros/src/time.rs
new file mode 100644
index 0000000000..96314de1f4
--- /dev/null
+++ b/third_party/rust/time-macros/src/time.rs
@@ -0,0 +1,119 @@
+use std::iter::Peekable;
+
+use proc_macro::{token_stream, Span, TokenTree};
+use time_core::convert::*;
+
+use crate::helpers::{consume_any_ident, consume_number, consume_punct};
+use crate::to_tokens::ToTokenTree;
+use crate::Error;
+
+enum Period {
+ Am,
+ Pm,
+ _24,
+}
+
+pub(crate) struct Time {
+ pub(crate) hour: u8,
+ pub(crate) minute: u8,
+ pub(crate) second: u8,
+ pub(crate) nanosecond: u32,
+}
+
+pub(crate) fn parse(chars: &mut Peekable<token_stream::IntoIter>) -> Result<Time, Error> {
+ fn consume_period(chars: &mut Peekable<token_stream::IntoIter>) -> (Option<Span>, Period) {
+ if let Ok(span) = consume_any_ident(&["am", "AM"], chars) {
+ (Some(span), Period::Am)
+ } else if let Ok(span) = consume_any_ident(&["pm", "PM"], chars) {
+ (Some(span), Period::Pm)
+ } else {
+ (None, Period::_24)
+ }
+ }
+
+ let (hour_span, hour) = consume_number("hour", chars)?;
+
+ let ((minute_span, minute), (second_span, second), (period_span, period)) =
+ match consume_period(chars) {
+ // Nothing but the 12-hour clock hour and AM/PM
+ (period_span @ Some(_), period) => (
+ (Span::mixed_site(), 0),
+ (Span::mixed_site(), 0.),
+ (period_span, period),
+ ),
+ (None, _) => {
+ consume_punct(':', chars)?;
+ let (minute_span, minute) = consume_number::<u8>("minute", chars)?;
+ let (second_span, second): (_, f64) = if consume_punct(':', chars).is_ok() {
+ consume_number("second", chars)?
+ } else {
+ (Span::mixed_site(), 0.)
+ };
+ let (period_span, period) = consume_period(chars);
+ (
+ (minute_span, minute),
+ (second_span, second),
+ (period_span, period),
+ )
+ }
+ };
+
+ let hour = match (hour, period) {
+ (0, Period::Am | Period::Pm) => {
+ return Err(Error::InvalidComponent {
+ name: "hour",
+ value: hour.to_string(),
+ span_start: Some(hour_span),
+ span_end: Some(period_span.unwrap_or(hour_span)),
+ });
+ }
+ (12, Period::Am) => 0,
+ (12, Period::Pm) => 12,
+ (hour, Period::Am | Period::_24) => hour,
+ (hour, Period::Pm) => hour + 12,
+ };
+
+ if hour >= Hour.per(Day) {
+ Err(Error::InvalidComponent {
+ name: "hour",
+ value: hour.to_string(),
+ span_start: Some(hour_span),
+ span_end: Some(period_span.unwrap_or(hour_span)),
+ })
+ } else if minute >= Minute.per(Hour) {
+ Err(Error::InvalidComponent {
+ name: "minute",
+ value: minute.to_string(),
+ span_start: Some(minute_span),
+ span_end: Some(minute_span),
+ })
+ } else if second >= Second.per(Minute) as _ {
+ Err(Error::InvalidComponent {
+ name: "second",
+ value: second.to_string(),
+ span_start: Some(second_span),
+ span_end: Some(second_span),
+ })
+ } else {
+ Ok(Time {
+ hour,
+ minute,
+ second: second.trunc() as _,
+ nanosecond: (second.fract() * Nanosecond.per(Second) as f64).round() as _,
+ })
+ }
+}
+
+impl ToTokenTree for Time {
+ fn into_token_tree(self) -> TokenTree {
+ quote_group! {{
+ const TIME: ::time::Time = ::time::Time::__from_hms_nanos_unchecked(
+ #(self.hour),
+ #(self.minute),
+ #(self.second),
+ #(self.nanosecond),
+ );
+ TIME
+ }}
+ }
+}
diff --git a/third_party/rust/time-macros/src/to_tokens.rs b/third_party/rust/time-macros/src/to_tokens.rs
new file mode 100644
index 0000000000..7e73211533
--- /dev/null
+++ b/third_party/rust/time-macros/src/to_tokens.rs
@@ -0,0 +1,78 @@
+use std::num::NonZeroU16;
+
+use proc_macro::{Group, Ident, Literal, Punct, Span, TokenStream, TokenTree};
+
+pub(crate) trait ToTokenStream: Sized {
+ fn append_to(self, ts: &mut TokenStream);
+}
+
+pub(crate) trait ToTokenTree: Sized {
+ fn into_token_tree(self) -> TokenTree;
+}
+
+impl<T: ToTokenTree> ToTokenStream for T {
+ fn append_to(self, ts: &mut TokenStream) {
+ ts.extend([self.into_token_tree()])
+ }
+}
+
+impl ToTokenTree for bool {
+ fn into_token_tree(self) -> TokenTree {
+ let lit = if self { "true" } else { "false" };
+ TokenTree::Ident(Ident::new(lit, Span::mixed_site()))
+ }
+}
+
+impl ToTokenStream for TokenStream {
+ fn append_to(self, ts: &mut TokenStream) {
+ ts.extend(self)
+ }
+}
+
+impl ToTokenTree for TokenTree {
+ fn into_token_tree(self) -> TokenTree {
+ self
+ }
+}
+
+impl ToTokenTree for &str {
+ fn into_token_tree(self) -> TokenTree {
+ TokenTree::Literal(Literal::string(self))
+ }
+}
+
+impl ToTokenTree for NonZeroU16 {
+ fn into_token_tree(self) -> TokenTree {
+ quote_group! {{
+ unsafe { ::core::num::NonZeroU16::new_unchecked(#(self.get())) }
+ }}
+ }
+}
+
+macro_rules! impl_for_tree_types {
+ ($($type:ty)*) => {$(
+ impl ToTokenTree for $type {
+ fn into_token_tree(self) -> TokenTree {
+ TokenTree::from(self)
+ }
+ }
+ )*};
+}
+impl_for_tree_types![Ident Literal Group Punct];
+
+macro_rules! impl_for_int {
+ ($($type:ty => $method:ident)*) => {$(
+ impl ToTokenTree for $type {
+ fn into_token_tree(self) -> TokenTree {
+ TokenTree::from(Literal::$method(self))
+ }
+ }
+ )*};
+}
+impl_for_int! {
+ i8 => i8_unsuffixed
+ u8 => u8_unsuffixed
+ u16 => u16_unsuffixed
+ i32 => i32_unsuffixed
+ u32 => u32_unsuffixed
+}