summaryrefslogtreecommitdiffstats
path: root/vendor/pest/src/iterators
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-17 12:02:58 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-17 12:02:58 +0000
commit698f8c2f01ea549d77d7dc3338a12e04c11057b9 (patch)
tree173a775858bd501c378080a10dca74132f05bc50 /vendor/pest/src/iterators
parentInitial commit. (diff)
downloadrustc-698f8c2f01ea549d77d7dc3338a12e04c11057b9.tar.xz
rustc-698f8c2f01ea549d77d7dc3338a12e04c11057b9.zip
Adding upstream version 1.64.0+dfsg1.upstream/1.64.0+dfsg1
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'vendor/pest/src/iterators')
-rw-r--r--vendor/pest/src/iterators/flat_pairs.rs177
-rw-r--r--vendor/pest/src/iterators/mod.rs22
-rw-r--r--vendor/pest/src/iterators/pair.rs389
-rw-r--r--vendor/pest/src/iterators/pairs.rs419
-rw-r--r--vendor/pest/src/iterators/queueable_token.rs27
-rw-r--r--vendor/pest/src/iterators/tokens.rs144
6 files changed, 1178 insertions, 0 deletions
diff --git a/vendor/pest/src/iterators/flat_pairs.rs b/vendor/pest/src/iterators/flat_pairs.rs
new file mode 100644
index 000000000..bb5bbb185
--- /dev/null
+++ b/vendor/pest/src/iterators/flat_pairs.rs
@@ -0,0 +1,177 @@
+// pest. The Elegant Parser
+// Copyright (c) 2018 Dragoș Tiselice
+//
+// Licensed under the Apache License, Version 2.0
+// <LICENSE-APACHE or http://www.apache.org/licenses/LICENSE-2.0> or the MIT
+// license <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. All files in the project carrying such notice may not be copied,
+// modified, or distributed except according to those terms.
+
+use std::fmt;
+use std::rc::Rc;
+
+use super::pair::{self, Pair};
+use super::queueable_token::QueueableToken;
+use super::tokens::{self, Tokens};
+use RuleType;
+
+/// An iterator over [`Pair`]s. It is created by [`Pairs::flatten`].
+///
+/// [`Pair`]: struct.Pair.html
+/// [`Pairs::flatten`]: struct.Pairs.html#method.flatten
+pub struct FlatPairs<'i, R> {
+ /// # Safety
+ ///
+ /// All `QueueableToken`s' `input_pos` must be valid character boundary indices into `input`.
+ queue: Rc<Vec<QueueableToken<R>>>,
+ input: &'i str,
+ start: usize,
+ end: usize,
+}
+
+/// # Safety
+///
+/// All `QueueableToken`s' `input_pos` must be valid character boundary indices into `input`.
+pub unsafe fn new<R: RuleType>(
+ queue: Rc<Vec<QueueableToken<R>>>,
+ input: &str,
+ start: usize,
+ end: usize,
+) -> FlatPairs<R> {
+ FlatPairs {
+ queue,
+ input,
+ start,
+ end,
+ }
+}
+
+impl<'i, R: RuleType> FlatPairs<'i, R> {
+ /// Returns the `Tokens` for these pairs.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # use std::rc::Rc;
+ /// # use pest;
+ /// # #[allow(non_camel_case_types)]
+ /// # #[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
+ /// enum Rule {
+ /// a
+ /// }
+ ///
+ /// let input = "";
+ /// let pairs = pest::state(input, |state| {
+ /// // generating Token pair with Rule::a ...
+ /// # state.rule(Rule::a, |s| Ok(s))
+ /// }).unwrap();
+ /// let tokens: Vec<_> = pairs.flatten().tokens().collect();
+ ///
+ /// assert_eq!(tokens.len(), 2);
+ /// ```
+ #[inline]
+ pub fn tokens(self) -> Tokens<'i, R> {
+ tokens::new(self.queue, self.input, self.start, self.end)
+ }
+
+ fn next_start(&mut self) {
+ self.start += 1;
+
+ while self.start < self.end && !self.is_start(self.start) {
+ self.start += 1;
+ }
+ }
+
+ fn next_start_from_end(&mut self) {
+ self.end -= 1;
+
+ while self.end >= self.start && !self.is_start(self.end) {
+ self.end -= 1;
+ }
+ }
+
+ fn is_start(&self, index: usize) -> bool {
+ match self.queue[index] {
+ QueueableToken::Start { .. } => true,
+ QueueableToken::End { .. } => false,
+ }
+ }
+}
+
+impl<'i, R: RuleType> Iterator for FlatPairs<'i, R> {
+ type Item = Pair<'i, R>;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ if self.start >= self.end {
+ return None;
+ }
+
+ let pair = unsafe { pair::new(Rc::clone(&self.queue), self.input, self.start) };
+
+ self.next_start();
+
+ Some(pair)
+ }
+}
+
+impl<'i, R: RuleType> DoubleEndedIterator for FlatPairs<'i, R> {
+ fn next_back(&mut self) -> Option<Self::Item> {
+ if self.end <= self.start {
+ return None;
+ }
+
+ self.next_start_from_end();
+
+ let pair = unsafe { pair::new(Rc::clone(&self.queue), self.input, self.end) };
+
+ Some(pair)
+ }
+}
+
+impl<'i, R: RuleType> fmt::Debug for FlatPairs<'i, R> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.debug_struct("FlatPairs")
+ .field("pairs", &self.clone().collect::<Vec<_>>())
+ .finish()
+ }
+}
+
+impl<'i, R: Clone> Clone for FlatPairs<'i, R> {
+ fn clone(&self) -> FlatPairs<'i, R> {
+ FlatPairs {
+ queue: Rc::clone(&self.queue),
+ input: self.input,
+ start: self.start,
+ end: self.end,
+ }
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::super::super::macros::tests::*;
+ use super::super::super::Parser;
+
+ #[test]
+ fn iter_for_flat_pairs() {
+ let pairs = AbcParser::parse(Rule::a, "abcde").unwrap();
+
+ assert_eq!(
+ pairs.flatten().map(|p| p.as_rule()).collect::<Vec<Rule>>(),
+ vec![Rule::a, Rule::b, Rule::c]
+ );
+ }
+
+ #[test]
+ fn double_ended_iter_for_flat_pairs() {
+ let pairs = AbcParser::parse(Rule::a, "abcde").unwrap();
+ assert_eq!(
+ pairs
+ .flatten()
+ .rev()
+ .map(|p| p.as_rule())
+ .collect::<Vec<Rule>>(),
+ vec![Rule::c, Rule::b, Rule::a]
+ );
+ }
+}
diff --git a/vendor/pest/src/iterators/mod.rs b/vendor/pest/src/iterators/mod.rs
new file mode 100644
index 000000000..1a7896371
--- /dev/null
+++ b/vendor/pest/src/iterators/mod.rs
@@ -0,0 +1,22 @@
+// pest. The Elegant Parser
+// Copyright (c) 2018 Dragoș Tiselice
+//
+// Licensed under the Apache License, Version 2.0
+// <LICENSE-APACHE or http://www.apache.org/licenses/LICENSE-2.0> or the MIT
+// license <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. All files in the project carrying such notice may not be copied,
+// modified, or distributed except according to those terms.
+
+//! Types and iterators for parser output.
+
+mod flat_pairs;
+mod pair;
+pub(crate) mod pairs;
+mod queueable_token;
+mod tokens;
+
+pub use self::flat_pairs::FlatPairs;
+pub use self::pair::Pair;
+pub use self::pairs::Pairs;
+pub(crate) use self::queueable_token::QueueableToken;
+pub use self::tokens::Tokens;
diff --git a/vendor/pest/src/iterators/pair.rs b/vendor/pest/src/iterators/pair.rs
new file mode 100644
index 000000000..88844a353
--- /dev/null
+++ b/vendor/pest/src/iterators/pair.rs
@@ -0,0 +1,389 @@
+// pest. The Elegant Parser
+// Copyright (c) 2018 Dragoș Tiselice
+//
+// Licensed under the Apache License, Version 2.0
+// <LICENSE-APACHE or http://www.apache.org/licenses/LICENSE-2.0> or the MIT
+// license <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. All files in the project carrying such notice may not be copied,
+// modified, or distributed except according to those terms.
+
+use std::fmt;
+use std::hash::{Hash, Hasher};
+use std::ptr;
+use std::rc::Rc;
+use std::str;
+
+#[cfg(feature = "pretty-print")]
+use serde::ser::SerializeStruct;
+
+use super::pairs::{self, Pairs};
+use super::queueable_token::QueueableToken;
+use super::tokens::{self, Tokens};
+use span::{self, Span};
+use RuleType;
+
+/// A matching pair of [`Token`]s and everything between them.
+///
+/// A matching `Token` pair is formed by a `Token::Start` and a subsequent `Token::End` with the
+/// same `Rule`, with the condition that all `Token`s between them can form such pairs as well.
+/// This is similar to the [brace matching problem](https://en.wikipedia.org/wiki/Brace_matching) in
+/// editors.
+///
+/// [`Token`]: ../enum.Token.html
+#[derive(Clone)]
+pub struct Pair<'i, R> {
+ /// # Safety
+ ///
+ /// All `QueueableToken`s' `input_pos` must be valid character boundary indices into `input`.
+ queue: Rc<Vec<QueueableToken<R>>>,
+ input: &'i str,
+ /// Token index into `queue`.
+ start: usize,
+}
+
+/// # Safety
+///
+/// All `QueueableToken`s' `input_pos` must be valid character boundary indices into `input`.
+pub unsafe fn new<R: RuleType>(
+ queue: Rc<Vec<QueueableToken<R>>>,
+ input: &str,
+ start: usize,
+) -> Pair<R> {
+ Pair {
+ queue,
+ input,
+ start,
+ }
+}
+
+impl<'i, R: RuleType> Pair<'i, R> {
+ /// Returns the `Rule` of the `Pair`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # use std::rc::Rc;
+ /// # use pest;
+ /// # #[allow(non_camel_case_types)]
+ /// # #[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
+ /// enum Rule {
+ /// a
+ /// }
+ ///
+ /// let input = "";
+ /// let pair = pest::state(input, |state| {
+ /// // generating Token pair with Rule::a ...
+ /// # state.rule(Rule::a, |s| Ok(s))
+ /// }).unwrap().next().unwrap();
+ ///
+ /// assert_eq!(pair.as_rule(), Rule::a);
+ /// ```
+ #[inline]
+ pub fn as_rule(&self) -> R {
+ match self.queue[self.pair()] {
+ QueueableToken::End { rule, .. } => rule,
+ _ => unreachable!(),
+ }
+ }
+
+ /// Captures a slice from the `&str` defined by the token `Pair`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # use std::rc::Rc;
+ /// # use pest;
+ /// # #[allow(non_camel_case_types)]
+ /// # #[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
+ /// enum Rule {
+ /// ab
+ /// }
+ ///
+ /// let input = "ab";
+ /// let pair = pest::state(input, |state| {
+ /// // generating Token pair with Rule::ab ...
+ /// # state.rule(Rule::ab, |s| s.match_string("ab"))
+ /// }).unwrap().next().unwrap();
+ ///
+ /// assert_eq!(pair.as_str(), "ab");
+ /// ```
+ #[inline]
+ pub fn as_str(&self) -> &'i str {
+ let start = self.pos(self.start);
+ let end = self.pos(self.pair());
+
+ // Generated positions always come from Positions and are UTF-8 borders.
+ &self.input[start..end]
+ }
+
+ /// Returns the `Span` defined by the `Pair`, consuming it.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # use std::rc::Rc;
+ /// # use pest;
+ /// # #[allow(non_camel_case_types)]
+ /// # #[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
+ /// enum Rule {
+ /// ab
+ /// }
+ ///
+ /// let input = "ab";
+ /// let pair = pest::state(input, |state| {
+ /// // generating Token pair with Rule::ab ...
+ /// # state.rule(Rule::ab, |s| s.match_string("ab"))
+ /// }).unwrap().next().unwrap();
+ ///
+ /// assert_eq!(pair.into_span().as_str(), "ab");
+ /// ```
+ #[inline]
+ #[deprecated(since = "2.0.0", note = "Please use `as_span` instead")]
+ pub fn into_span(self) -> Span<'i> {
+ self.as_span()
+ }
+
+ /// Returns the `Span` defined by the `Pair`, **without** consuming it.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # use std::rc::Rc;
+ /// # use pest;
+ /// # #[allow(non_camel_case_types)]
+ /// # #[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
+ /// enum Rule {
+ /// ab
+ /// }
+ ///
+ /// let input = "ab";
+ /// let pair = pest::state(input, |state| {
+ /// // generating Token pair with Rule::ab ...
+ /// # state.rule(Rule::ab, |s| s.match_string("ab"))
+ /// }).unwrap().next().unwrap();
+ ///
+ /// assert_eq!(pair.as_span().as_str(), "ab");
+ /// ```
+ #[inline]
+ pub fn as_span(&self) -> Span<'i> {
+ let start = self.pos(self.start);
+ let end = self.pos(self.pair());
+
+ // Generated positions always come from Positions and are UTF-8 borders.
+ unsafe { span::Span::new_unchecked(self.input, start, end) }
+ }
+
+ /// Returns the inner `Pairs` between the `Pair`, consuming it.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # use std::rc::Rc;
+ /// # use pest;
+ /// # #[allow(non_camel_case_types)]
+ /// # #[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
+ /// enum Rule {
+ /// a
+ /// }
+ ///
+ /// let input = "";
+ /// let pair = pest::state(input, |state| {
+ /// // generating Token pair with Rule::a ...
+ /// # state.rule(Rule::a, |s| Ok(s))
+ /// }).unwrap().next().unwrap();
+ ///
+ /// assert!(pair.into_inner().next().is_none());
+ /// ```
+ #[inline]
+ pub fn into_inner(self) -> Pairs<'i, R> {
+ let pair = self.pair();
+
+ pairs::new(self.queue, self.input, self.start + 1, pair)
+ }
+
+ /// Returns the `Tokens` for the `Pair`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # use std::rc::Rc;
+ /// # use pest;
+ /// # #[allow(non_camel_case_types)]
+ /// # #[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
+ /// enum Rule {
+ /// a
+ /// }
+ ///
+ /// let input = "";
+ /// let pair = pest::state(input, |state| {
+ /// // generating Token pair with Rule::a ...
+ /// # state.rule(Rule::a, |s| Ok(s))
+ /// }).unwrap().next().unwrap();
+ /// let tokens: Vec<_> = pair.tokens().collect();
+ ///
+ /// assert_eq!(tokens.len(), 2);
+ /// ```
+ #[inline]
+ pub fn tokens(self) -> Tokens<'i, R> {
+ let end = self.pair();
+
+ tokens::new(self.queue, self.input, self.start, end + 1)
+ }
+
+ /// Generates a string that stores the lexical information of `self` in
+ /// a pretty-printed JSON format.
+ #[cfg(feature = "pretty-print")]
+ pub fn to_json(&self) -> String {
+ ::serde_json::to_string_pretty(self).expect("Failed to pretty-print Pair to json.")
+ }
+
+ fn pair(&self) -> usize {
+ match self.queue[self.start] {
+ QueueableToken::Start {
+ end_token_index, ..
+ } => end_token_index,
+ _ => unreachable!(),
+ }
+ }
+
+ fn pos(&self, index: usize) -> usize {
+ match self.queue[index] {
+ QueueableToken::Start { input_pos, .. } | QueueableToken::End { input_pos, .. } => {
+ input_pos
+ }
+ }
+ }
+}
+
+impl<'i, R: RuleType> Pairs<'i, R> {
+ /// Create a new `Pairs` iterator containing just the single `Pair`.
+ pub fn single(pair: Pair<'i, R>) -> Self {
+ let end = pair.pair();
+ pairs::new(pair.queue, pair.input, pair.start, end)
+ }
+}
+
+impl<'i, R: RuleType> fmt::Debug for Pair<'i, R> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.debug_struct("Pair")
+ .field("rule", &self.as_rule())
+ .field("span", &self.as_span())
+ .field("inner", &self.clone().into_inner().collect::<Vec<_>>())
+ .finish()
+ }
+}
+
+impl<'i, R: RuleType> fmt::Display for Pair<'i, R> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let rule = self.as_rule();
+ let start = self.pos(self.start);
+ let end = self.pos(self.pair());
+ let mut pairs = self.clone().into_inner().peekable();
+
+ if pairs.peek().is_none() {
+ write!(f, "{:?}({}, {})", rule, start, end)
+ } else {
+ write!(
+ f,
+ "{:?}({}, {}, [{}])",
+ rule,
+ start,
+ end,
+ pairs
+ .map(|pair| format!("{}", pair))
+ .collect::<Vec<_>>()
+ .join(", ")
+ )
+ }
+ }
+}
+
+impl<'i, R: PartialEq> PartialEq for Pair<'i, R> {
+ fn eq(&self, other: &Pair<'i, R>) -> bool {
+ Rc::ptr_eq(&self.queue, &other.queue)
+ && ptr::eq(self.input, other.input)
+ && self.start == other.start
+ }
+}
+
+impl<'i, R: Eq> Eq for Pair<'i, R> {}
+
+impl<'i, R: Hash> Hash for Pair<'i, R> {
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ (&*self.queue as *const Vec<QueueableToken<R>>).hash(state);
+ (self.input as *const str).hash(state);
+ self.start.hash(state);
+ }
+}
+
+#[cfg(feature = "pretty-print")]
+impl<'i, R: RuleType> ::serde::Serialize for Pair<'i, R> {
+ fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
+ where
+ S: ::serde::Serializer,
+ {
+ let start = self.pos(self.start);
+ let end = self.pos(self.pair());
+ let rule = format!("{:?}", self.as_rule());
+ let inner = self.clone().into_inner();
+
+ let mut ser = serializer.serialize_struct("Pairs", 3)?;
+ ser.serialize_field("pos", &(start, end))?;
+ ser.serialize_field("rule", &rule)?;
+
+ if inner.peek().is_none() {
+ ser.serialize_field("inner", &self.as_str())?;
+ } else {
+ ser.serialize_field("inner", &inner)?;
+ }
+
+ ser.end()
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use macros::tests::*;
+ use parser::Parser;
+
+ #[test]
+ #[cfg(feature = "pretty-print")]
+ fn test_pretty_print() {
+ let pair = AbcParser::parse(Rule::a, "abcde").unwrap().next().unwrap();
+
+ let expected = r#"{
+ "pos": [
+ 0,
+ 3
+ ],
+ "rule": "a",
+ "inner": {
+ "pos": [
+ 1,
+ 2
+ ],
+ "pairs": [
+ {
+ "pos": [
+ 1,
+ 2
+ ],
+ "rule": "b",
+ "inner": "b"
+ }
+ ]
+ }
+}"#;
+
+ assert_eq!(expected, pair.to_json());
+ }
+
+ #[test]
+ fn pair_into_inner() {
+ let pair = AbcParser::parse(Rule::a, "abcde").unwrap().next().unwrap(); // the tokens a(b())
+
+ let pairs = pair.into_inner(); // the tokens b()
+
+ assert_eq!(2, pairs.tokens().count());
+ }
+}
diff --git a/vendor/pest/src/iterators/pairs.rs b/vendor/pest/src/iterators/pairs.rs
new file mode 100644
index 000000000..abae123e1
--- /dev/null
+++ b/vendor/pest/src/iterators/pairs.rs
@@ -0,0 +1,419 @@
+// pest. The Elegant Parser
+// Copyright (c) 2018 Dragoș Tiselice
+//
+// Licensed under the Apache License, Version 2.0
+// <LICENSE-APACHE or http://www.apache.org/licenses/LICENSE-2.0> or the MIT
+// license <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. All files in the project carrying such notice may not be copied,
+// modified, or distributed except according to those terms.
+
+use std::fmt;
+use std::hash::{Hash, Hasher};
+use std::ptr;
+use std::rc::Rc;
+use std::str;
+
+#[cfg(feature = "pretty-print")]
+use serde::ser::SerializeStruct;
+
+use super::flat_pairs::{self, FlatPairs};
+use super::pair::{self, Pair};
+use super::queueable_token::QueueableToken;
+use super::tokens::{self, Tokens};
+use RuleType;
+
+/// An iterator over [`Pair`]s. It is created by [`pest::state`] and [`Pair::into_inner`].
+///
+/// [`Pair`]: struct.Pair.html
+/// [`pest::state`]: ../fn.state.html
+/// [`Pair::into_inner`]: struct.Pair.html#method.into_inner
+#[derive(Clone)]
+pub struct Pairs<'i, R> {
+ queue: Rc<Vec<QueueableToken<R>>>,
+ input: &'i str,
+ start: usize,
+ end: usize,
+}
+
+pub fn new<R: RuleType>(
+ queue: Rc<Vec<QueueableToken<R>>>,
+ input: &str,
+ start: usize,
+ end: usize,
+) -> Pairs<R> {
+ Pairs {
+ queue,
+ input,
+ start,
+ end,
+ }
+}
+
+impl<'i, R: RuleType> Pairs<'i, R> {
+ /// Captures a slice from the `&str` defined by the starting position of the first token `Pair`
+ /// and the ending position of the last token `Pair` of the `Pairs`. This also captures
+ /// the input between those two token `Pair`s.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # use std::rc::Rc;
+ /// # use pest;
+ /// # #[allow(non_camel_case_types)]
+ /// # #[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
+ /// enum Rule {
+ /// a,
+ /// b
+ /// }
+ ///
+ /// let input = "a b";
+ /// let pairs = pest::state(input, |state| {
+ /// // generating Token pairs with Rule::a and Rule::b ...
+ /// # state.rule(Rule::a, |s| s.match_string("a")).and_then(|s| s.skip(1))
+ /// # .and_then(|s| s.rule(Rule::b, |s| s.match_string("b")))
+ /// }).unwrap();
+ ///
+ /// assert_eq!(pairs.as_str(), "a b");
+ /// ```
+ #[inline]
+ pub fn as_str(&self) -> &'i str {
+ if self.start < self.end {
+ let start = self.pos(self.start);
+ let end = self.pos(self.end - 1);
+ // Generated positions always come from Positions and are UTF-8 borders.
+ &self.input[start..end]
+ } else {
+ ""
+ }
+ }
+
+ /// Captures inner token `Pair`s and concatenates resulting `&str`s. This does not capture
+ /// the input between token `Pair`s.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # use std::rc::Rc;
+ /// # use pest;
+ /// # #[allow(non_camel_case_types)]
+ /// # #[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
+ /// enum Rule {
+ /// a,
+ /// b
+ /// }
+ ///
+ /// let input = "a b";
+ /// let pairs = pest::state(input, |state| {
+ /// // generating Token pairs with Rule::a and Rule::b ...
+ /// # state.rule(Rule::a, |s| s.match_string("a")).and_then(|s| s.skip(1))
+ /// # .and_then(|s| s.rule(Rule::b, |s| s.match_string("b")))
+ /// }).unwrap();
+ ///
+ /// assert_eq!(pairs.concat(), "ab");
+ /// ```
+ #[inline]
+ pub fn concat(&self) -> String {
+ self.clone()
+ .fold(String::new(), |string, pair| string + pair.as_str())
+ }
+
+ /// Flattens the `Pairs`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # use std::rc::Rc;
+ /// # use pest;
+ /// # #[allow(non_camel_case_types)]
+ /// # #[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
+ /// enum Rule {
+ /// a,
+ /// b
+ /// }
+ ///
+ /// let input = "";
+ /// let pairs = pest::state(input, |state| {
+ /// // generating nested Token pair with Rule::b inside Rule::a
+ /// # state.rule(Rule::a, |state| {
+ /// # state.rule(Rule::b, |s| Ok(s))
+ /// # })
+ /// }).unwrap();
+ /// let tokens: Vec<_> = pairs.flatten().tokens().collect();
+ ///
+ /// assert_eq!(tokens.len(), 4);
+ /// ```
+ #[inline]
+ pub fn flatten(self) -> FlatPairs<'i, R> {
+ unsafe { flat_pairs::new(self.queue, self.input, self.start, self.end) }
+ }
+
+ /// Returns the `Tokens` for the `Pairs`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # use std::rc::Rc;
+ /// # use pest;
+ /// # #[allow(non_camel_case_types)]
+ /// # #[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
+ /// enum Rule {
+ /// a
+ /// }
+ ///
+ /// let input = "";
+ /// let pairs = pest::state(input, |state| {
+ /// // generating Token pair with Rule::a ...
+ /// # state.rule(Rule::a, |s| Ok(s))
+ /// }).unwrap();
+ /// let tokens: Vec<_> = pairs.tokens().collect();
+ ///
+ /// assert_eq!(tokens.len(), 2);
+ /// ```
+ #[inline]
+ pub fn tokens(self) -> Tokens<'i, R> {
+ tokens::new(self.queue, self.input, self.start, self.end)
+ }
+
+ /// Peek at the first inner `Pair` without changing the position of this iterator.
+ #[inline]
+ pub fn peek(&self) -> Option<Pair<'i, R>> {
+ if self.start < self.end {
+ Some(unsafe { pair::new(Rc::clone(&self.queue), self.input, self.start) })
+ } else {
+ None
+ }
+ }
+
+ /// Generates a string that stores the lexical information of `self` in
+ /// a pretty-printed JSON format.
+ #[cfg(feature = "pretty-print")]
+ pub fn to_json(&self) -> String {
+ ::serde_json::to_string_pretty(self).expect("Failed to pretty-print Pairs to json.")
+ }
+
+ fn pair(&self) -> usize {
+ match self.queue[self.start] {
+ QueueableToken::Start {
+ end_token_index, ..
+ } => end_token_index,
+ _ => unreachable!(),
+ }
+ }
+
+ fn pair_from_end(&self) -> usize {
+ match self.queue[self.end - 1] {
+ QueueableToken::End {
+ start_token_index, ..
+ } => start_token_index,
+ _ => unreachable!(),
+ }
+ }
+
+ fn pos(&self, index: usize) -> usize {
+ match self.queue[index] {
+ QueueableToken::Start { input_pos, .. } | QueueableToken::End { input_pos, .. } => {
+ input_pos
+ }
+ }
+ }
+}
+
+impl<'i, R: RuleType> Iterator for Pairs<'i, R> {
+ type Item = Pair<'i, R>;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ let pair = self.peek()?;
+ self.start = self.pair() + 1;
+ Some(pair)
+ }
+}
+
+impl<'i, R: RuleType> DoubleEndedIterator for Pairs<'i, R> {
+ fn next_back(&mut self) -> Option<Self::Item> {
+ if self.end <= self.start {
+ return None;
+ }
+
+ self.end = self.pair_from_end();
+
+ let pair = unsafe { pair::new(Rc::clone(&self.queue), self.input, self.end) };
+
+ Some(pair)
+ }
+}
+
+impl<'i, R: RuleType> fmt::Debug for Pairs<'i, R> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.debug_list().entries(self.clone()).finish()
+ }
+}
+
+impl<'i, R: RuleType> fmt::Display for Pairs<'i, R> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ write!(
+ f,
+ "[{}]",
+ self.clone()
+ .map(|pair| format!("{}", pair))
+ .collect::<Vec<_>>()
+ .join(", ")
+ )
+ }
+}
+
+impl<'i, R: PartialEq> PartialEq for Pairs<'i, R> {
+ fn eq(&self, other: &Pairs<'i, R>) -> bool {
+ Rc::ptr_eq(&self.queue, &other.queue)
+ && ptr::eq(self.input, other.input)
+ && self.start == other.start
+ && self.end == other.end
+ }
+}
+
+impl<'i, R: Eq> Eq for Pairs<'i, R> {}
+
+impl<'i, R: Hash> Hash for Pairs<'i, R> {
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ (&*self.queue as *const Vec<QueueableToken<R>>).hash(state);
+ (self.input as *const str).hash(state);
+ self.start.hash(state);
+ self.end.hash(state);
+ }
+}
+
+#[cfg(feature = "pretty-print")]
+impl<'i, R: RuleType> ::serde::Serialize for Pairs<'i, R> {
+ fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
+ where
+ S: ::serde::Serializer,
+ {
+ let start = self.pos(self.start);
+ let end = self.pos(self.end - 1);
+ let pairs = self.clone().collect::<Vec<_>>();
+
+ let mut ser = serializer.serialize_struct("Pairs", 2)?;
+ ser.serialize_field("pos", &(start, end))?;
+ ser.serialize_field("pairs", &pairs)?;
+ ser.end()
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::super::super::macros::tests::*;
+ use super::super::super::Parser;
+
+ #[test]
+ #[cfg(feature = "pretty-print")]
+ fn test_pretty_print() {
+ let pairs = AbcParser::parse(Rule::a, "abcde").unwrap();
+
+ let expected = r#"{
+ "pos": [
+ 0,
+ 5
+ ],
+ "pairs": [
+ {
+ "pos": [
+ 0,
+ 3
+ ],
+ "rule": "a",
+ "inner": {
+ "pos": [
+ 1,
+ 2
+ ],
+ "pairs": [
+ {
+ "pos": [
+ 1,
+ 2
+ ],
+ "rule": "b",
+ "inner": "b"
+ }
+ ]
+ }
+ },
+ {
+ "pos": [
+ 4,
+ 5
+ ],
+ "rule": "c",
+ "inner": "e"
+ }
+ ]
+}"#;
+
+ assert_eq!(expected, pairs.to_json());
+ }
+
+ #[test]
+ fn as_str() {
+ let pairs = AbcParser::parse(Rule::a, "abcde").unwrap();
+
+ assert_eq!(pairs.as_str(), "abcde");
+ }
+
+ #[test]
+ fn as_str_empty() {
+ let mut pairs = AbcParser::parse(Rule::a, "abcde").unwrap();
+
+ assert_eq!(pairs.nth(1).unwrap().into_inner().as_str(), "");
+ }
+
+ #[test]
+ fn concat() {
+ let pairs = AbcParser::parse(Rule::a, "abcde").unwrap();
+
+ assert_eq!(pairs.concat(), "abce");
+ }
+
+ #[test]
+ fn pairs_debug() {
+ let pairs = AbcParser::parse(Rule::a, "abcde").unwrap();
+
+ #[rustfmt::skip]
+ assert_eq!(
+ format!("{:?}", pairs),
+ "[\
+ Pair { rule: a, span: Span { str: \"abc\", start: 0, end: 3 }, inner: [\
+ Pair { rule: b, span: Span { str: \"b\", start: 1, end: 2 }, inner: [] }\
+ ] }, \
+ Pair { rule: c, span: Span { str: \"e\", start: 4, end: 5 }, inner: [] }\
+ ]"
+ .to_owned()
+ );
+ }
+
+ #[test]
+ fn pairs_display() {
+ let pairs = AbcParser::parse(Rule::a, "abcde").unwrap();
+
+ assert_eq!(
+ format!("{}", pairs),
+ "[a(0, 3, [b(1, 2)]), c(4, 5)]".to_owned()
+ );
+ }
+
+ #[test]
+ fn iter_for_pairs() {
+ let pairs = AbcParser::parse(Rule::a, "abcde").unwrap();
+ assert_eq!(
+ pairs.map(|p| p.as_rule()).collect::<Vec<Rule>>(),
+ vec![Rule::a, Rule::c]
+ );
+ }
+
+ #[test]
+ fn double_ended_iter_for_pairs() {
+ let pairs = AbcParser::parse(Rule::a, "abcde").unwrap();
+ assert_eq!(
+ pairs.rev().map(|p| p.as_rule()).collect::<Vec<Rule>>(),
+ vec![Rule::c, Rule::a]
+ );
+ }
+}
diff --git a/vendor/pest/src/iterators/queueable_token.rs b/vendor/pest/src/iterators/queueable_token.rs
new file mode 100644
index 000000000..7d56749bb
--- /dev/null
+++ b/vendor/pest/src/iterators/queueable_token.rs
@@ -0,0 +1,27 @@
+// pest. The Elegant Parser
+// Copyright (c) 2018 Dragoș Tiselice
+//
+// Licensed under the Apache License, Version 2.0
+// <LICENSE-APACHE or http://www.apache.org/licenses/LICENSE-2.0> or the MIT
+// license <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. All files in the project carrying such notice may not be copied,
+// modified, or distributed except according to those terms.
+
+// This structure serves to improve performance over Token objects in two ways:
+//
+// * it is smaller than a Token, leading to both less memory use when stored in the queue but also
+// increased speed when pushing to the queue
+// * it finds its pair in O(1) time instead of O(N), since pair positions are known at parse time
+// and can easily be stored instead of recomputed
+#[derive(Debug)]
+pub enum QueueableToken<R> {
+ Start {
+ end_token_index: usize,
+ input_pos: usize,
+ },
+ End {
+ start_token_index: usize,
+ rule: R,
+ input_pos: usize,
+ },
+}
diff --git a/vendor/pest/src/iterators/tokens.rs b/vendor/pest/src/iterators/tokens.rs
new file mode 100644
index 000000000..59b75c520
--- /dev/null
+++ b/vendor/pest/src/iterators/tokens.rs
@@ -0,0 +1,144 @@
+// pest. The Elegant Parser
+// Copyright (c) 2018 Dragoș Tiselice
+//
+// Licensed under the Apache License, Version 2.0
+// <LICENSE-APACHE or http://www.apache.org/licenses/LICENSE-2.0> or the MIT
+// license <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. All files in the project carrying such notice may not be copied,
+// modified, or distributed except according to those terms.
+
+use std::fmt;
+use std::rc::Rc;
+use std::str;
+
+use super::queueable_token::QueueableToken;
+use position;
+use token::Token;
+use RuleType;
+
+/// An iterator over [`Token`]s. It is created by [`Pair::tokens`] and [`Pairs::tokens`].
+///
+/// [`Token`]: ../enum.Token.html
+/// [`Pair::tokens`]: struct.Pair.html#method.tokens
+/// [`Pairs::tokens`]: struct.Pairs.html#method.tokens
+#[derive(Clone)]
+pub struct Tokens<'i, R> {
+ /// # Safety:
+ ///
+ /// All `QueueableToken`s' `input_pos` must be valid character boundary indices into `input`.
+ queue: Rc<Vec<QueueableToken<R>>>,
+ input: &'i str,
+ start: usize,
+ end: usize,
+}
+
+// TODO(safety): QueueableTokens must be valid indices into input.
+pub fn new<R: RuleType>(
+ queue: Rc<Vec<QueueableToken<R>>>,
+ input: &str,
+ start: usize,
+ end: usize,
+) -> Tokens<R> {
+ if cfg!(debug_assertions) {
+ for tok in queue.iter() {
+ match *tok {
+ QueueableToken::Start { input_pos, .. } | QueueableToken::End { input_pos, .. } => {
+ assert!(
+ input.get(input_pos..).is_some(),
+ "💥 UNSAFE `Tokens` CREATED 💥"
+ )
+ }
+ }
+ }
+ }
+
+ Tokens {
+ queue,
+ input,
+ start,
+ end,
+ }
+}
+
+impl<'i, R: RuleType> Tokens<'i, R> {
+ fn create_token(&self, index: usize) -> Token<'i, R> {
+ match self.queue[index] {
+ QueueableToken::Start {
+ end_token_index,
+ input_pos,
+ } => {
+ let rule = match self.queue[end_token_index] {
+ QueueableToken::End { rule, .. } => rule,
+ _ => unreachable!(),
+ };
+
+ Token::Start {
+ rule,
+ // QueueableTokens are safely created.
+ pos: unsafe { position::Position::new_unchecked(self.input, input_pos) },
+ }
+ }
+ QueueableToken::End {
+ rule, input_pos, ..
+ } => {
+ Token::End {
+ rule,
+ // QueueableTokens are safely created.
+ pos: unsafe { position::Position::new_unchecked(self.input, input_pos) },
+ }
+ }
+ }
+ }
+}
+
+impl<'i, R: RuleType> Iterator for Tokens<'i, R> {
+ type Item = Token<'i, R>;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ if self.start >= self.end {
+ return None;
+ }
+
+ let token = self.create_token(self.start);
+
+ self.start += 1;
+
+ Some(token)
+ }
+}
+
+impl<'i, R: RuleType> DoubleEndedIterator for Tokens<'i, R> {
+ fn next_back(&mut self) -> Option<Self::Item> {
+ if self.end <= self.start {
+ return None;
+ }
+
+ let token = self.create_token(self.end - 1);
+
+ self.end -= 1;
+
+ Some(token)
+ }
+}
+
+impl<'i, R: RuleType> fmt::Debug for Tokens<'i, R> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.debug_list().entries(self.clone()).finish()
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::super::super::macros::tests::*;
+ use super::super::super::Parser;
+ use super::Token;
+
+ #[test]
+ fn double_ended_iter_for_tokens() {
+ let pairs = AbcParser::parse(Rule::a, "abcde").unwrap();
+ let mut tokens = pairs.clone().tokens().collect::<Vec<Token<Rule>>>();
+ tokens.reverse();
+ let reverse_tokens = pairs.tokens().rev().collect::<Vec<Token<Rule>>>();
+ assert_eq!(tokens, reverse_tokens);
+ }
+}