summaryrefslogtreecommitdiffstats
path: root/vendor/pest_generator
diff options
context:
space:
mode:
Diffstat (limited to 'vendor/pest_generator')
-rw-r--r--vendor/pest_generator/.cargo-checksum.json2
-rw-r--r--vendor/pest_generator/Cargo.toml10
-rw-r--r--vendor/pest_generator/_README.md13
-rw-r--r--vendor/pest_generator/src/generator.rs145
-rw-r--r--vendor/pest_generator/src/lib.rs50
5 files changed, 170 insertions, 50 deletions
diff --git a/vendor/pest_generator/.cargo-checksum.json b/vendor/pest_generator/.cargo-checksum.json
index 82b6b9b7f..30e72c2cf 100644
--- a/vendor/pest_generator/.cargo-checksum.json
+++ b/vendor/pest_generator/.cargo-checksum.json
@@ -1 +1 @@
-{"files":{"Cargo.toml":"ab9bb3fd1ba288c4e5519cf6b9f1d8fa8b891723c64a10c3eb5c66b78b2bc11b","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","_README.md":"b49ff086f7c9e37c8bb432f445168061d74bdaddf713d968b15f5316f940f285","src/docs.rs":"041b2c24377955dfdb6c29991b1f1dd7d7191431d8e5eaa245325253b250f702","src/generator.rs":"0984ca845f4edbf993cdb55c518b26f37c5d243f406e1d553b149ea95d7bce89","src/lib.rs":"46f82cefa053acf431884342b9b9e84d639fe864f018567d9e212ea0aa781c56","src/macros.rs":"897d9004449b1c219f17c079630a790f3de1a27f61bc6a03cd777a163a6a1fba","tests/base.pest":"30f6965031bc52937114f60233a327e41ccc43429ae41a8e40c7b7c8006c466f","tests/test.pest":"f3fea8154a9a26c773ab8392685039d0d84bd845587bb2d42b970946f7967ee8"},"package":"b3e8cba4ec22bada7fc55ffe51e2deb6a0e0db2d0b7ab0b103acc80d2510c190"} \ No newline at end of file
+{"files":{"Cargo.toml":"ad7558bd6b4e3996834c611afe58bcf5b6b0448e8f664def2c1e2cc77bc6eae9","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","_README.md":"bde746653575153be4ae26ce950963ced5034449e352e60cfd8825260c666c16","src/docs.rs":"041b2c24377955dfdb6c29991b1f1dd7d7191431d8e5eaa245325253b250f702","src/generator.rs":"e49e3655da7d0ebc2b1f03dcec47111d469e52164b6e0f904a1b723d9348c921","src/lib.rs":"034624c6d8ad89b365f81ab04ad96a3d67909ba3485008355f21429a42b5e02c","src/macros.rs":"897d9004449b1c219f17c079630a790f3de1a27f61bc6a03cd777a163a6a1fba","tests/base.pest":"30f6965031bc52937114f60233a327e41ccc43429ae41a8e40c7b7c8006c466f","tests/test.pest":"f3fea8154a9a26c773ab8392685039d0d84bd845587bb2d42b970946f7967ee8"},"package":"68bd1206e71118b5356dae5ddc61c8b11e28b09ef6a31acbd15ea48a28e0c227"} \ No newline at end of file
diff --git a/vendor/pest_generator/Cargo.toml b/vendor/pest_generator/Cargo.toml
index dc4fe6cea..08d6e76a7 100644
--- a/vendor/pest_generator/Cargo.toml
+++ b/vendor/pest_generator/Cargo.toml
@@ -11,9 +11,9 @@
[package]
edition = "2021"
-rust-version = "1.60"
+rust-version = "1.61"
name = "pest_generator"
-version = "2.7.0"
+version = "2.7.5"
authors = ["Dragoș Tiselice <dragostiselice@gmail.com>"]
description = "pest code generator"
homepage = "https://pest.rs/"
@@ -24,15 +24,15 @@ keywords = [
"generator",
]
categories = ["parsing"]
-license = "MIT/Apache-2.0"
+license = "MIT OR Apache-2.0"
repository = "https://github.com/pest-parser/pest"
[dependencies.pest]
-version = "2.7.0"
+version = "2.7.5"
default-features = false
[dependencies.pest_meta]
-version = "2.7.0"
+version = "2.7.5"
[dependencies.proc-macro2]
version = "1.0"
diff --git a/vendor/pest_generator/_README.md b/vendor/pest_generator/_README.md
index 2bfc5664a..6d91eafa2 100644
--- a/vendor/pest_generator/_README.md
+++ b/vendor/pest_generator/_README.md
@@ -11,7 +11,7 @@
[![pest Continuous Integration](https://github.com/pest-parser/pest/actions/workflows/ci.yml/badge.svg)](https://github.com/pest-parser/pest/actions/workflows/ci.yml)
[![codecov](https://codecov.io/gh/pest-parser/pest/branch/master/graph/badge.svg)](https://codecov.io/gh/pest-parser/pest)
-<a href="https://blog.rust-lang.org/2021/11/01/Rust-1.60.0.html"><img alt="Rustc Version 1.60.0+" src="https://img.shields.io/badge/rustc-1.60.0%2B-lightgrey.svg"/></a>
+<a href="https://blog.rust-lang.org/2021/11/01/Rust-1.61.0.html"><img alt="Rustc Version 1.61.0+" src="https://img.shields.io/badge/rustc-1.61.0%2B-lightgrey.svg"/></a>
[![Crates.io](https://img.shields.io/crates/d/pest.svg)](https://crates.io/crates/pest)
[![Crates.io](https://img.shields.io/crates/v/pest.svg)](https://crates.io/crates/pest)
@@ -93,10 +93,7 @@ The grammar can be used to derive a `Parser` implementation automatically.
Parsing returns an iterator of nested token pairs:
```rust
-extern crate pest;
-#[macro_use]
-extern crate pest_derive;
-
+use pest_derive::Parser;
use pest::Parser;
#[derive(Parser)]
@@ -104,7 +101,7 @@ use pest::Parser;
struct IdentParser;
fn main() {
-    let pairs = IdentParser::parse(Rule::ident_list, "a1 b2").unwrap_or_else(|e| panic!("{}", e));
+ let pairs = IdentParser::parse(Rule::ident_list, "a1 b2").unwrap_or_else(|e| panic!("{}", e));
// Because ident_list is silent, the iterator will contain idents
for pair in pairs {
@@ -200,11 +197,11 @@ You can find more projects and ecosystem tools in the [awesome-pest](https://git
* [caith](https://github.com/Geobert/caith) (a dice roller crate)
* [Melody](https://github.com/yoav-lavi/melody)
* [json5-nodes](https://github.com/jlyonsmith/json5-nodes)
+* [prisma](https://github.com/prisma/prisma)
## Minimum Supported Rust Version (MSRV)
-This library should always compile with default features on **Rust 1.60.0**
-or **Rust 1.61** with `const_prec_climber`.
+This library should always compile with default features on **Rust 1.61.0**.
## no_std support
diff --git a/vendor/pest_generator/src/generator.rs b/vendor/pest_generator/src/generator.rs
index e36e9eb87..d301e43a2 100644
--- a/vendor/pest_generator/src/generator.rs
+++ b/vendor/pest_generator/src/generator.rs
@@ -11,17 +11,17 @@ use std::path::PathBuf;
use proc_macro2::TokenStream;
use quote::{ToTokens, TokenStreamExt};
-use syn::{self, Generics, Ident};
+use syn::{self, Ident};
use pest::unicode::unicode_property_names;
use pest_meta::ast::*;
use pest_meta::optimizer::*;
use crate::docs::DocComment;
+use crate::ParsedDerive;
pub(crate) fn generate(
- name: Ident,
- generics: &Generics,
+ parsed_derive: ParsedDerive,
paths: Vec<PathBuf>,
rules: Vec<OptimizedRule>,
defaults: Vec<&str>,
@@ -29,14 +29,14 @@ pub(crate) fn generate(
include_grammar: bool,
) -> TokenStream {
let uses_eoi = defaults.iter().any(|name| *name == "EOI");
-
+ let name = parsed_derive.name;
let builtins = generate_builtin_rules();
let include_fix = if include_grammar {
generate_include(&name, paths)
} else {
quote!()
};
- let rule_enum = generate_enum(&rules, doc_comment, uses_eoi);
+ let rule_enum = generate_enum(&rules, doc_comment, uses_eoi, parsed_derive.non_exhaustive);
let patterns = generate_patterns(&rules, uses_eoi);
let skip = generate_skip(&rules);
@@ -49,7 +49,7 @@ pub(crate) fn generate(
}
}));
- let (impl_generics, ty_generics, where_clause) = generics.split_for_impl();
+ let (impl_generics, ty_generics, where_clause) = parsed_derive.generics.split_for_impl();
let result = result_type();
@@ -197,8 +197,13 @@ fn generate_include(name: &Ident, paths: Vec<PathBuf>) -> TokenStream {
}
}
-fn generate_enum(rules: &[OptimizedRule], doc_comment: &DocComment, uses_eoi: bool) -> TokenStream {
- let rules = rules.iter().map(|rule| {
+fn generate_enum(
+ rules: &[OptimizedRule],
+ doc_comment: &DocComment,
+ uses_eoi: bool,
+ non_exhaustive: bool,
+) -> TokenStream {
+ let rule_variants = rules.iter().map(|rule| {
let rule_name = format_ident!("r#{}", rule.name);
match doc_comment.line_docs.get(&rule.name) {
@@ -213,26 +218,49 @@ fn generate_enum(rules: &[OptimizedRule], doc_comment: &DocComment, uses_eoi: bo
});
let grammar_doc = &doc_comment.grammar_doc;
+ let mut result = quote! {
+ #[doc = #grammar_doc]
+ #[allow(dead_code, non_camel_case_types, clippy::upper_case_acronyms)]
+ #[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
+ };
+ if non_exhaustive {
+ result.append_all(quote! {
+ #[non_exhaustive]
+ });
+ }
+ result.append_all(quote! {
+ pub enum Rule
+ });
if uses_eoi {
- quote! {
- #[doc = #grammar_doc]
- #[allow(dead_code, non_camel_case_types, clippy::upper_case_acronyms)]
- #[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
- pub enum Rule {
+ result.append_all(quote! {
+ {
+ #[doc = "End-of-input"]
EOI,
- #( #rules ),*
+ #( #rule_variants ),*
}
- }
+ });
} else {
- quote! {
- #[doc = #grammar_doc]
- #[allow(dead_code, non_camel_case_types, clippy::upper_case_acronyms)]
- #[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
- pub enum Rule {
- #( #rules ),*
+ result.append_all(quote! {
+ {
+ #( #rule_variants ),*
+ }
+ })
+ };
+
+ let rules = rules.iter().map(|rule| {
+ let rule_name = format_ident!("r#{}", rule.name);
+ quote! { #rule_name }
+ });
+
+ result.append_all(quote! {
+ impl Rule {
+ pub fn all_rules() -> &'static[Rule] {
+ &[ #(Rule::#rules), * ]
}
}
- }
+ });
+
+ result
}
fn generate_patterns(rules: &[OptimizedRule], uses_eoi: bool) -> TokenStream {
@@ -496,6 +524,26 @@ fn generate_expr(expr: OptimizedExpr) -> TokenStream {
})
}
}
+ #[cfg(feature = "grammar-extras")]
+ OptimizedExpr::RepOnce(expr) => {
+ let expr = generate_expr(*expr);
+
+ quote! {
+ state.sequence(|state| {
+ #expr.and_then(|state| {
+ state.repeat(|state| {
+ state.sequence(|state| {
+ super::hidden::skip(
+ state
+ ).and_then(|state| {
+ #expr
+ })
+ })
+ })
+ })
+ })
+ }
+ }
OptimizedExpr::Skip(strings) => {
quote! {
let strings = [#(#strings),*];
@@ -520,8 +568,14 @@ fn generate_expr(expr: OptimizedExpr) -> TokenStream {
#[cfg(feature = "grammar-extras")]
OptimizedExpr::NodeTag(expr, tag) => {
let expr = generate_expr(*expr);
+ let tag_cow = {
+ #[cfg(feature = "std")]
+ quote! { ::std::borrow::Cow::Borrowed(#tag) }
+ #[cfg(not(feature = "std"))]
+ quote! { ::alloc::borrow::Cow::Borrowed(#tag) }
+ };
quote! {
- #expr.and_then(|state| state.tag_node(alloc::borrow::Cow::Borrowed(#tag)))
+ #expr.and_then(|state| state.tag_node(#tag_cow))
}
}
}
@@ -635,6 +689,22 @@ fn generate_expr_atomic(expr: OptimizedExpr) -> TokenStream {
})
}
}
+ #[cfg(feature = "grammar-extras")]
+ OptimizedExpr::RepOnce(expr) => {
+ let expr = generate_expr_atomic(*expr);
+
+ quote! {
+ state.sequence(|state| {
+ #expr.and_then(|state| {
+ state.repeat(|state| {
+ state.sequence(|state| {
+ #expr
+ })
+ })
+ })
+ })
+ }
+ }
OptimizedExpr::Skip(strings) => {
quote! {
let strings = [#(#strings),*];
@@ -659,8 +729,14 @@ fn generate_expr_atomic(expr: OptimizedExpr) -> TokenStream {
#[cfg(feature = "grammar-extras")]
OptimizedExpr::NodeTag(expr, tag) => {
let expr = generate_expr_atomic(*expr);
+ let tag_cow = {
+ #[cfg(feature = "std")]
+ quote! { ::std::borrow::Cow::Borrowed(#tag) }
+ #[cfg(not(feature = "std"))]
+ quote! { ::alloc::borrow::Cow::Borrowed(#tag) }
+ };
quote! {
- #expr.and_then(|state| state.tag_node(alloc::borrow::Cow::Borrowed(#tag)))
+ #expr.and_then(|state| state.tag_node(#tag_cow))
}
}
}
@@ -708,6 +784,7 @@ mod tests {
use proc_macro2::Span;
use std::collections::HashMap;
+ use syn::Generics;
#[test]
fn rule_enum_simple() {
@@ -726,7 +803,7 @@ mod tests {
};
assert_eq!(
- generate_enum(&rules, doc_comment, false).to_string(),
+ generate_enum(&rules, doc_comment, false, false).to_string(),
quote! {
#[doc = "Rule doc\nhello"]
#[allow(dead_code, non_camel_case_types, clippy::upper_case_acronyms)]
@@ -735,6 +812,11 @@ mod tests {
#[doc = "This is rule comment"]
r#f
}
+ impl Rule {
+ pub fn all_rules() -> &'static [Rule] {
+ &[Rule::r#f]
+ }
+ }
}
.to_string()
);
@@ -1047,9 +1129,13 @@ mod tests {
let base_path = current_dir.join("base.pest").to_str().unwrap().to_string();
let test_path = current_dir.join("test.pest").to_str().unwrap().to_string();
-
+ let parsed_derive = ParsedDerive {
+ name,
+ generics,
+ non_exhaustive: false,
+ };
assert_eq!(
- generate(name, &generics, vec![PathBuf::from("base.pest"), PathBuf::from("test.pest")], rules, defaults, doc_comment, true).to_string(),
+ generate(parsed_derive, vec![PathBuf::from("base.pest"), PathBuf::from("test.pest")], rules, defaults, doc_comment, true).to_string(),
quote! {
#[allow(non_upper_case_globals)]
const _PEST_GRAMMAR_MyParser: [&'static str; 2usize] = [include_str!(#base_path), include_str!(#test_path)];
@@ -1062,6 +1148,11 @@ mod tests {
#[doc = "If statement"]
r#if
}
+ impl Rule {
+ pub fn all_rules() -> &'static [Rule] {
+ &[Rule::r#a, Rule::r#if]
+ }
+ }
#[allow(clippy::all)]
impl ::pest::Parser<Rule> for MyParser {
diff --git a/vendor/pest_generator/src/lib.rs b/vendor/pest_generator/src/lib.rs
index 98c726525..cbd13eaf5 100644
--- a/vendor/pest_generator/src/lib.rs
+++ b/vendor/pest_generator/src/lib.rs
@@ -42,7 +42,7 @@ use pest_meta::{optimizer, unwrap_or_report, validator};
/// "include_str" statement (done in pest_derive, but turned off in the local bootstrap).
pub fn derive_parser(input: TokenStream, include_grammar: bool) -> TokenStream {
let ast: DeriveInput = syn::parse2(input).unwrap();
- let (name, generics, contents) = parse_derive(ast);
+ let (parsed_derive, contents) = parse_derive(ast);
let mut data = String::new();
let mut paths = vec![];
@@ -97,8 +97,7 @@ pub fn derive_parser(input: TokenStream, include_grammar: bool) -> TokenStream {
let optimized = optimizer::optimize(ast);
generator::generate(
- name,
- &generics,
+ parsed_derive,
paths,
optimized,
defaults,
@@ -120,7 +119,13 @@ enum GrammarSource {
Inline(String),
}
-fn parse_derive(ast: DeriveInput) -> (Ident, Generics, Vec<GrammarSource>) {
+struct ParsedDerive {
+ pub(crate) name: Ident,
+ pub(crate) generics: Generics,
+ pub(crate) non_exhaustive: bool,
+}
+
+fn parse_derive(ast: DeriveInput) -> (ParsedDerive, Vec<GrammarSource>) {
let name = ast.ident;
let generics = ast.generics;
@@ -142,7 +147,19 @@ fn parse_derive(ast: DeriveInput) -> (Ident, Generics, Vec<GrammarSource>) {
grammar_sources.push(get_attribute(attr))
}
- (name, generics, grammar_sources)
+ let non_exhaustive = ast
+ .attrs
+ .iter()
+ .any(|attr| attr.meta.path().is_ident("non_exhaustive"));
+
+ (
+ ParsedDerive {
+ name,
+ generics,
+ non_exhaustive,
+ },
+ grammar_sources,
+ )
}
fn get_attribute(attr: &Attribute) -> GrammarSource {
@@ -177,7 +194,7 @@ mod tests {
pub struct MyParser<'a, T>;
";
let ast = syn::parse_str(definition).unwrap();
- let (_, _, filenames) = parse_derive(ast);
+ let (_, filenames) = parse_derive(ast);
assert_eq!(filenames, [GrammarSource::Inline("GRAMMAR".to_string())]);
}
@@ -189,8 +206,9 @@ mod tests {
pub struct MyParser<'a, T>;
";
let ast = syn::parse_str(definition).unwrap();
- let (_, _, filenames) = parse_derive(ast);
+ let (parsed_derive, filenames) = parse_derive(ast);
assert_eq!(filenames, [GrammarSource::File("myfile.pest".to_string())]);
+ assert!(!parsed_derive.non_exhaustive);
}
#[test]
@@ -202,7 +220,7 @@ mod tests {
pub struct MyParser<'a, T>;
";
let ast = syn::parse_str(definition).unwrap();
- let (_, _, filenames) = parse_derive(ast);
+ let (_, filenames) = parse_derive(ast);
assert_eq!(
filenames,
[
@@ -213,6 +231,19 @@ mod tests {
}
#[test]
+ fn derive_nonexhaustive() {
+ let definition = "
+ #[non_exhaustive]
+ #[grammar = \"myfile.pest\"]
+ pub struct MyParser<'a, T>;
+ ";
+ let ast = syn::parse_str(definition).unwrap();
+ let (parsed_derive, filenames) = parse_derive(ast);
+ assert_eq!(filenames, [GrammarSource::File("myfile.pest".to_string())]);
+ assert!(parsed_derive.non_exhaustive);
+ }
+
+ #[test]
#[should_panic(expected = "grammar attribute must be a string")]
fn derive_wrong_arg() {
let definition = "
@@ -242,6 +273,7 @@ mod tests {
fn test_generate_doc() {
let input = quote! {
#[derive(Parser)]
+ #[non_exhaustive]
#[grammar = "../tests/test.pest"]
pub struct TestParser;
};
@@ -252,7 +284,7 @@ mod tests {
#[doc = "A parser for JSON file.\nAnd this is a example for JSON parser.\n\n indent-4-space\n"]
#[allow(dead_code, non_camel_case_types, clippy::upper_case_acronyms)]
#[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
-
+ #[non_exhaustive]
pub enum Rule {
#[doc = "Matches foo str, e.g.: `foo`"]
r#foo,