summaryrefslogtreecommitdiffstats
path: root/vendor/indoc/src
diff options
context:
space:
mode:
Diffstat (limited to 'vendor/indoc/src')
-rw-r--r--vendor/indoc/src/error.rs47
-rw-r--r--vendor/indoc/src/expr.rs56
-rw-r--r--vendor/indoc/src/lib.rs403
-rw-r--r--vendor/indoc/src/unindent.rs131
4 files changed, 637 insertions, 0 deletions
diff --git a/vendor/indoc/src/error.rs b/vendor/indoc/src/error.rs
new file mode 100644
index 000000000..7c5badb25
--- /dev/null
+++ b/vendor/indoc/src/error.rs
@@ -0,0 +1,47 @@
+use proc_macro::{Delimiter, Group, Ident, Literal, Punct, Spacing, Span, TokenStream, TokenTree};
+use std::iter::FromIterator;
+
+pub type Result<T> = std::result::Result<T, Error>;
+
+pub struct Error {
+ begin: Span,
+ end: Span,
+ msg: String,
+}
+
+impl Error {
+ pub fn new(span: Span, msg: &str) -> Self {
+ Self::new2(span, span, msg)
+ }
+
+ pub fn new2(begin: Span, end: Span, msg: &str) -> Self {
+ Error {
+ begin,
+ end,
+ msg: msg.to_owned(),
+ }
+ }
+
+ pub fn to_compile_error(&self) -> TokenStream {
+ // compile_error! { $msg }
+ TokenStream::from_iter(vec![
+ TokenTree::Ident(Ident::new("compile_error", self.begin)),
+ TokenTree::Punct({
+ let mut punct = Punct::new('!', Spacing::Alone);
+ punct.set_span(self.begin);
+ punct
+ }),
+ TokenTree::Group({
+ let mut group = Group::new(Delimiter::Brace, {
+ TokenStream::from_iter(vec![TokenTree::Literal({
+ let mut string = Literal::string(&self.msg);
+ string.set_span(self.end);
+ string
+ })])
+ });
+ group.set_span(self.end);
+ group
+ }),
+ ])
+ }
+}
diff --git a/vendor/indoc/src/expr.rs b/vendor/indoc/src/expr.rs
new file mode 100644
index 000000000..e802e2015
--- /dev/null
+++ b/vendor/indoc/src/expr.rs
@@ -0,0 +1,56 @@
+use crate::error::{Error, Result};
+use proc_macro::token_stream::IntoIter as TokenIter;
+use proc_macro::{Spacing, Span, TokenStream, TokenTree};
+use std::iter;
+
+pub struct Expr(TokenStream);
+
+pub fn parse(input: &mut TokenIter) -> Result<Expr> {
+ #[derive(PartialEq)]
+ enum Lookbehind {
+ JointColon,
+ DoubleColon,
+ Other,
+ }
+
+ let mut expr = TokenStream::new();
+ let mut lookbehind = Lookbehind::Other;
+ let mut angle_bracket_depth = 0;
+
+ loop {
+ match input.next() {
+ Some(TokenTree::Punct(punct)) => {
+ let ch = punct.as_char();
+ let spacing = punct.spacing();
+ expr.extend(iter::once(TokenTree::Punct(punct)));
+ lookbehind = match ch {
+ ',' if angle_bracket_depth == 0 => return Ok(Expr(expr)),
+ ':' if lookbehind == Lookbehind::JointColon => Lookbehind::DoubleColon,
+ ':' if spacing == Spacing::Joint => Lookbehind::JointColon,
+ '<' if lookbehind == Lookbehind::DoubleColon => {
+ angle_bracket_depth += 1;
+ Lookbehind::Other
+ }
+ '>' if angle_bracket_depth > 0 => {
+ angle_bracket_depth -= 1;
+ Lookbehind::Other
+ }
+ _ => Lookbehind::Other,
+ };
+ }
+ Some(token) => expr.extend(iter::once(token)),
+ None => {
+ return Err(Error::new(
+ Span::call_site(),
+ "unexpected end of macro input",
+ ))
+ }
+ }
+ }
+}
+
+impl Expr {
+ pub fn into_tokens(self) -> TokenStream {
+ self.0
+ }
+}
diff --git a/vendor/indoc/src/lib.rs b/vendor/indoc/src/lib.rs
new file mode 100644
index 000000000..9c200adc7
--- /dev/null
+++ b/vendor/indoc/src/lib.rs
@@ -0,0 +1,403 @@
+//! [![github]](https://github.com/dtolnay/indoc)&ensp;[![crates-io]](https://crates.io/crates/indoc)&ensp;[![docs-rs]](https://docs.rs/indoc)
+//!
+//! [github]: https://img.shields.io/badge/github-8da0cb?style=for-the-badge&labelColor=555555&logo=github
+//! [crates-io]: https://img.shields.io/badge/crates.io-fc8d62?style=for-the-badge&labelColor=555555&logo=rust
+//! [docs-rs]: https://img.shields.io/badge/docs.rs-66c2a5?style=for-the-badge&labelColor=555555&logoColor=white&logo=data:image/svg+xml;base64,PHN2ZyByb2xlPSJpbWciIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyIgdmlld0JveD0iMCAwIDUxMiA1MTIiPjxwYXRoIGZpbGw9IiNmNWY1ZjUiIGQ9Ik00ODguNiAyNTAuMkwzOTIgMjE0VjEwNS41YzAtMTUtOS4zLTI4LjQtMjMuNC0zMy43bC0xMDAtMzcuNWMtOC4xLTMuMS0xNy4xLTMuMS0yNS4zIDBsLTEwMCAzNy41Yy0xNC4xIDUuMy0yMy40IDE4LjctMjMuNCAzMy43VjIxNGwtOTYuNiAzNi4yQzkuMyAyNTUuNSAwIDI2OC45IDAgMjgzLjlWMzk0YzAgMTMuNiA3LjcgMjYuMSAxOS45IDMyLjJsMTAwIDUwYzEwLjEgNS4xIDIyLjEgNS4xIDMyLjIgMGwxMDMuOS01MiAxMDMuOSA1MmMxMC4xIDUuMSAyMi4xIDUuMSAzMi4yIDBsMTAwLTUwYzEyLjItNi4xIDE5LjktMTguNiAxOS45LTMyLjJWMjgzLjljMC0xNS05LjMtMjguNC0yMy40LTMzLjd6TTM1OCAyMTQuOGwtODUgMzEuOXYtNjguMmw4NS0zN3Y3My4zek0xNTQgMTA0LjFsMTAyLTM4LjIgMTAyIDM4LjJ2LjZsLTEwMiA0MS40LTEwMi00MS40di0uNnptODQgMjkxLjFsLTg1IDQyLjV2LTc5LjFsODUtMzguOHY3NS40em0wLTExMmwtMTAyIDQxLjQtMTAyLTQxLjR2LS42bDEwMi0zOC4yIDEwMiAzOC4ydi42em0yNDAgMTEybC04NSA0Mi41di03OS4xbDg1LTM4Ljh2NzUuNHptMC0xMTJsLTEwMiA0MS40LTEwMi00MS40di0uNmwxMDItMzguMiAxMDIgMzguMnYuNnoiPjwvcGF0aD48L3N2Zz4K
+//!
+//! <br>
+//!
+//! This crate provides a procedural macro for indented string literals. The
+//! `indoc!()` macro takes a multiline string literal and un-indents it at
+//! compile time so the leftmost non-space character is in the first column.
+//!
+//! ```toml
+//! [dependencies]
+//! indoc = "1.0"
+//! ```
+//!
+//! <br>
+//!
+//! # Using indoc
+//!
+//! ```
+//! use indoc::indoc;
+//!
+//! fn main() {
+//! let testing = indoc! {"
+//! def hello():
+//! print('Hello, world!')
+//!
+//! hello()
+//! "};
+//! let expected = "def hello():\n print('Hello, world!')\n\nhello()\n";
+//! assert_eq!(testing, expected);
+//! }
+//! ```
+//!
+//! Indoc also works with raw string literals:
+//!
+//! ```
+//! use indoc::indoc;
+//!
+//! fn main() {
+//! let testing = indoc! {r#"
+//! def hello():
+//! print("Hello, world!")
+//!
+//! hello()
+//! "#};
+//! let expected = "def hello():\n print(\"Hello, world!\")\n\nhello()\n";
+//! assert_eq!(testing, expected);
+//! }
+//! ```
+//!
+//! And byte string literals:
+//!
+//! ```
+//! use indoc::indoc;
+//!
+//! fn main() {
+//! let testing = indoc! {b"
+//! def hello():
+//! print('Hello, world!')
+//!
+//! hello()
+//! "};
+//! let expected = b"def hello():\n print('Hello, world!')\n\nhello()\n";
+//! assert_eq!(testing[..], expected[..]);
+//! }
+//! ```
+//!
+//! <br><br>
+//!
+//! # Formatting macros
+//!
+//! The indoc crate exports four additional macros to substitute conveniently
+//! for the standard library's formatting macros:
+//!
+//! - `formatdoc!($fmt, ...)`&ensp;&mdash;&ensp;equivalent to `format!(indoc!($fmt), ...)`
+//! - `printdoc!($fmt, ...)`&ensp;&mdash;&ensp;equivalent to `print!(indoc!($fmt), ...)`
+//! - `eprintdoc!($fmt, ...)`&ensp;&mdash;&ensp;equivalent to `eprint!(indoc!($fmt), ...)`
+//! - `writedoc!($dest, $fmt, ...)`&ensp;&mdash;&ensp;equivalent to `write!($dest, indoc!($fmt), ...)`
+//!
+//! ```
+//! use indoc::printdoc;
+//!
+//! fn main() {
+//! printdoc! {"
+//! GET {url}
+//! Accept: {mime}
+//! ",
+//! url = "http://localhost:8080",
+//! mime = "application/json",
+//! }
+//! }
+//! ```
+//!
+//! <br><br>
+//!
+//! # Explanation
+//!
+//! The following rules characterize the behavior of the `indoc!()` macro:
+//!
+//! 1. Count the leading spaces of each line, ignoring the first line and any
+//! lines that are empty or contain spaces only.
+//! 2. Take the minimum.
+//! 3. If the first line is empty i.e. the string begins with a newline, remove
+//! the first line.
+//! 4. Remove the computed number of spaces from the beginning of each line.
+
+#![allow(
+ clippy::module_name_repetitions,
+ clippy::needless_doctest_main,
+ clippy::needless_pass_by_value,
+ clippy::trivially_copy_pass_by_ref,
+ clippy::type_complexity
+)]
+
+mod error;
+mod expr;
+mod unindent;
+
+use crate::error::{Error, Result};
+use crate::expr::Expr;
+use crate::unindent::unindent;
+use proc_macro::token_stream::IntoIter as TokenIter;
+use proc_macro::{Delimiter, Group, Ident, Literal, Punct, Spacing, Span, TokenStream, TokenTree};
+use std::iter::{self, FromIterator};
+use std::str::FromStr;
+
+#[derive(Copy, Clone, PartialEq)]
+enum Macro {
+ Indoc,
+ Format,
+ Print,
+ Eprint,
+ Write,
+}
+
+/// Unindent and produce `&'static str`.
+///
+/// # Example
+///
+/// ```
+/// # use indoc::indoc;
+/// #
+/// // The type of `program` is &'static str
+/// let program = indoc! {"
+/// def hello():
+/// print('Hello, world!')
+///
+/// hello()
+/// "};
+/// print!("{}", program);
+/// ```
+///
+/// ```text
+/// def hello():
+/// print('Hello, world!')
+///
+/// hello()
+/// ```
+#[proc_macro]
+pub fn indoc(input: TokenStream) -> TokenStream {
+ expand(input, Macro::Indoc)
+}
+
+/// Unindent and call `format!`.
+///
+/// Argument syntax is the same as for [`std::format!`].
+///
+/// # Example
+///
+/// ```
+/// # use indoc::formatdoc;
+/// #
+/// let request = formatdoc! {"
+/// GET {url}
+/// Accept: {mime}
+/// ",
+/// url = "http://localhost:8080",
+/// mime = "application/json",
+/// };
+/// println!("{}", request);
+/// ```
+///
+/// ```text
+/// GET http://localhost:8080
+/// Accept: application/json
+/// ```
+#[proc_macro]
+pub fn formatdoc(input: TokenStream) -> TokenStream {
+ expand(input, Macro::Format)
+}
+
+/// Unindent and call `print!`.
+///
+/// Argument syntax is the same as for [`std::print!`].
+///
+/// # Example
+///
+/// ```
+/// # use indoc::printdoc;
+/// #
+/// printdoc! {"
+/// GET {url}
+/// Accept: {mime}
+/// ",
+/// url = "http://localhost:8080",
+/// mime = "application/json",
+/// }
+/// ```
+///
+/// ```text
+/// GET http://localhost:8080
+/// Accept: application/json
+/// ```
+#[proc_macro]
+pub fn printdoc(input: TokenStream) -> TokenStream {
+ expand(input, Macro::Print)
+}
+
+/// Unindent and call `eprint!`.
+///
+/// Argument syntax is the same as for [`std::eprint!`].
+///
+/// # Example
+///
+/// ```
+/// # use indoc::eprintdoc;
+/// #
+/// eprintdoc! {"
+/// GET {url}
+/// Accept: {mime}
+/// ",
+/// url = "http://localhost:8080",
+/// mime = "application/json",
+/// }
+/// ```
+///
+/// ```text
+/// GET http://localhost:8080
+/// Accept: application/json
+/// ```
+#[proc_macro]
+pub fn eprintdoc(input: TokenStream) -> TokenStream {
+ expand(input, Macro::Eprint)
+}
+
+/// Unindent and call `write!`.
+///
+/// Argument syntax is the same as for [`std::write!`].
+///
+/// # Example
+///
+/// ```
+/// # use indoc::writedoc;
+/// # use std::io::Write;
+/// #
+/// let _ = writedoc!(
+/// std::io::stdout(),
+/// "
+/// GET {url}
+/// Accept: {mime}
+/// ",
+/// url = "http://localhost:8080",
+/// mime = "application/json",
+/// );
+/// ```
+///
+/// ```text
+/// GET http://localhost:8080
+/// Accept: application/json
+/// ```
+#[proc_macro]
+pub fn writedoc(input: TokenStream) -> TokenStream {
+ expand(input, Macro::Write)
+}
+
+fn expand(input: TokenStream, mode: Macro) -> TokenStream {
+ match try_expand(input, mode) {
+ Ok(tokens) => tokens,
+ Err(err) => err.to_compile_error(),
+ }
+}
+
+fn try_expand(input: TokenStream, mode: Macro) -> Result<TokenStream> {
+ let mut input = input.into_iter();
+
+ let prefix = if mode == Macro::Write {
+ Some(expr::parse(&mut input)?)
+ } else {
+ None
+ };
+
+ let first = input.next().ok_or_else(|| {
+ Error::new(
+ Span::call_site(),
+ "unexpected end of macro invocation, expected format string",
+ )
+ })?;
+
+ let unindented_lit = lit_indoc(first, mode)?;
+
+ let macro_name = match mode {
+ Macro::Indoc => {
+ require_empty_or_trailing_comma(&mut input)?;
+ return Ok(TokenStream::from(TokenTree::Literal(unindented_lit)));
+ }
+ Macro::Format => "format",
+ Macro::Print => "print",
+ Macro::Eprint => "eprint",
+ Macro::Write => "write",
+ };
+
+ // #macro_name! { #unindented_lit #args }
+ Ok(TokenStream::from_iter(vec![
+ TokenTree::Ident(Ident::new(macro_name, Span::call_site())),
+ TokenTree::Punct(Punct::new('!', Spacing::Alone)),
+ TokenTree::Group(Group::new(
+ Delimiter::Brace,
+ prefix
+ .map_or_else(TokenStream::new, Expr::into_tokens)
+ .into_iter()
+ .chain(iter::once(TokenTree::Literal(unindented_lit)))
+ .chain(input)
+ .collect(),
+ )),
+ ]))
+}
+
+fn lit_indoc(token: TokenTree, mode: Macro) -> Result<Literal> {
+ let span = token.span();
+ let mut single_token = Some(token);
+
+ while let Some(TokenTree::Group(group)) = single_token {
+ single_token = if group.delimiter() == Delimiter::None {
+ let mut token_iter = group.stream().into_iter();
+ token_iter.next().xor(token_iter.next())
+ } else {
+ None
+ };
+ }
+
+ let single_token =
+ single_token.ok_or_else(|| Error::new(span, "argument must be a single string literal"))?;
+
+ let repr = single_token.to_string();
+ let is_string = repr.starts_with('"') || repr.starts_with('r');
+ let is_byte_string = repr.starts_with("b\"") || repr.starts_with("br");
+
+ if !is_string && !is_byte_string {
+ return Err(Error::new(span, "argument must be a single string literal"));
+ }
+
+ if is_byte_string && mode != Macro::Indoc {
+ return Err(Error::new(
+ span,
+ "byte strings are not supported in formatting macros",
+ ));
+ }
+
+ let begin = repr.find('"').unwrap() + 1;
+ let end = repr.rfind('"').unwrap();
+ let repr = format!(
+ "{open}{content}{close}",
+ open = &repr[..begin],
+ content = unindent(&repr[begin..end]),
+ close = &repr[end..],
+ );
+
+ match TokenStream::from_str(&repr)
+ .unwrap()
+ .into_iter()
+ .next()
+ .unwrap()
+ {
+ TokenTree::Literal(mut lit) => {
+ lit.set_span(span);
+ Ok(lit)
+ }
+ _ => unreachable!(),
+ }
+}
+
+fn require_empty_or_trailing_comma(input: &mut TokenIter) -> Result<()> {
+ let first = match input.next() {
+ Some(TokenTree::Punct(punct)) if punct.as_char() == ',' => match input.next() {
+ Some(second) => second,
+ None => return Ok(()),
+ },
+ Some(first) => first,
+ None => return Ok(()),
+ };
+ let last = input.last();
+
+ let begin_span = first.span();
+ let end_span = last.as_ref().map_or(begin_span, TokenTree::span);
+ let msg = format!(
+ "unexpected {token} in macro invocation; indoc argument must be a single string literal",
+ token = if last.is_some() { "tokens" } else { "token" }
+ );
+ Err(Error::new2(begin_span, end_span, &msg))
+}
diff --git a/vendor/indoc/src/unindent.rs b/vendor/indoc/src/unindent.rs
new file mode 100644
index 000000000..11d19d222
--- /dev/null
+++ b/vendor/indoc/src/unindent.rs
@@ -0,0 +1,131 @@
+use std::iter::Peekable;
+use std::slice::Split;
+
+pub fn unindent(s: &str) -> String {
+ let bytes = s.as_bytes();
+ let unindented = unindent_bytes(bytes);
+ String::from_utf8(unindented).unwrap()
+}
+
+// Compute the maximal number of spaces that can be removed from every line, and
+// remove them.
+pub fn unindent_bytes(s: &[u8]) -> Vec<u8> {
+ // Document may start either on the same line as opening quote or
+ // on the next line
+ let ignore_first_line = s.starts_with(b"\n") || s.starts_with(b"\r\n");
+
+ // Largest number of spaces that can be removed from every
+ // non-whitespace-only line after the first
+ let spaces = s
+ .lines()
+ .skip(1)
+ .filter_map(count_spaces)
+ .min()
+ .unwrap_or(0);
+
+ let mut result = Vec::with_capacity(s.len());
+ for (i, line) in s.lines().enumerate() {
+ if i > 1 || (i == 1 && !ignore_first_line) {
+ result.push(b'\n');
+ }
+ if i == 0 {
+ // Do not un-indent anything on same line as opening quote
+ result.extend_from_slice(line);
+ } else if line.len() > spaces {
+ // Whitespace-only lines may have fewer than the number of spaces
+ // being removed
+ result.extend_from_slice(&line[spaces..]);
+ }
+ }
+ result
+}
+
+pub trait Unindent {
+ type Output;
+
+ fn unindent(&self) -> Self::Output;
+}
+
+impl Unindent for str {
+ type Output = String;
+
+ fn unindent(&self) -> Self::Output {
+ unindent(self)
+ }
+}
+
+impl Unindent for String {
+ type Output = String;
+
+ fn unindent(&self) -> Self::Output {
+ unindent(self)
+ }
+}
+
+impl Unindent for [u8] {
+ type Output = Vec<u8>;
+
+ fn unindent(&self) -> Self::Output {
+ unindent_bytes(self)
+ }
+}
+
+impl<'a, T: ?Sized + Unindent> Unindent for &'a T {
+ type Output = T::Output;
+
+ fn unindent(&self) -> Self::Output {
+ (**self).unindent()
+ }
+}
+
+// Number of leading spaces in the line, or None if the line is entirely spaces.
+fn count_spaces(line: &[u8]) -> Option<usize> {
+ for (i, ch) in line.iter().enumerate() {
+ if *ch != b' ' && *ch != b'\t' {
+ return Some(i);
+ }
+ }
+ None
+}
+
+// Based on core::str::StrExt.
+trait BytesExt {
+ fn lines(&self) -> Lines;
+}
+
+impl BytesExt for [u8] {
+ fn lines(&self) -> Lines {
+ fn is_newline(b: &u8) -> bool {
+ *b == b'\n'
+ }
+ let bytestring = if self.starts_with(b"\r\n") {
+ &self[1..]
+ } else {
+ self
+ };
+ Lines {
+ split: bytestring.split(is_newline as fn(&u8) -> bool).peekable(),
+ }
+ }
+}
+
+struct Lines<'a> {
+ split: Peekable<Split<'a, u8, fn(&u8) -> bool>>,
+}
+
+impl<'a> Iterator for Lines<'a> {
+ type Item = &'a [u8];
+
+ fn next(&mut self) -> Option<Self::Item> {
+ match self.split.next() {
+ None => None,
+ Some(fragment) => {
+ if fragment.is_empty() && self.split.peek().is_none() {
+ None
+ } else {
+ Some(fragment)
+ }
+ }
+ }
+ }
+}