summaryrefslogtreecommitdiffstats
path: root/vendor/pest_generator/src
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-17 12:19:13 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-17 12:19:13 +0000
commit218caa410aa38c29984be31a5229b9fa717560ee (patch)
treec54bd55eeb6e4c508940a30e94c0032fbd45d677 /vendor/pest_generator/src
parentReleasing progress-linux version 1.67.1+dfsg1-1~progress7.99u1. (diff)
downloadrustc-218caa410aa38c29984be31a5229b9fa717560ee.tar.xz
rustc-218caa410aa38c29984be31a5229b9fa717560ee.zip
Merging upstream version 1.68.2+dfsg1.
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'vendor/pest_generator/src')
-rw-r--r--vendor/pest_generator/src/generator.rs79
-rw-r--r--vendor/pest_generator/src/lib.rs70
-rw-r--r--vendor/pest_generator/src/macros.rs4
3 files changed, 79 insertions, 74 deletions
diff --git a/vendor/pest_generator/src/generator.rs b/vendor/pest_generator/src/generator.rs
index 0d3051e27..fc1263d86 100644
--- a/vendor/pest_generator/src/generator.rs
+++ b/vendor/pest_generator/src/generator.rs
@@ -9,13 +9,13 @@
use std::path::PathBuf;
-use proc_macro2::{Span, TokenStream};
+use proc_macro2::TokenStream;
use quote::{ToTokens, TokenStreamExt};
use syn::{self, Generics, Ident};
+use pest::unicode::unicode_property_names;
use pest_meta::ast::*;
use pest_meta::optimizer::*;
-use pest_meta::UNICODE_PROPERTY_NAMES;
pub fn generate(
name: Ident,
@@ -153,13 +153,13 @@ fn generate_builtin_rules() -> Vec<(&'static str, TokenStream)> {
let box_ty = box_type();
- for property in UNICODE_PROPERTY_NAMES {
+ for property in unicode_property_names() {
let property_ident: Ident = syn::parse_str(property).unwrap();
// insert manually for #property substitution
builtins.push((property, quote! {
#[inline]
#[allow(dead_code, non_snake_case, unused_variables)]
- fn #property_ident(state: #box_ty<::pest::ParserState<Rule>>) -> ::pest::ParseResult<#box_ty<::pest::ParserState<Rule>>> {
+ fn #property_ident(state: #box_ty<::pest::ParserState<'_, Rule>>) -> ::pest::ParseResult<#box_ty<::pest::ParserState<'_, Rule>>> {
state.match_char_by(::pest::unicode::#property_ident)
}
}));
@@ -169,7 +169,7 @@ fn generate_builtin_rules() -> Vec<(&'static str, TokenStream)> {
// Needed because Cargo doesn't watch for changes in grammars.
fn generate_include(name: &Ident, path: &str) -> TokenStream {
- let const_name = Ident::new(&format!("_PEST_GRAMMAR_{}", name), Span::call_site());
+ let const_name = format_ident!("_PEST_GRAMMAR_{}", name);
// Need to make this relative to the current directory since the path to the file
// is derived from the CARGO_MANIFEST_DIR environment variable
let mut current_dir = std::env::current_dir().expect("Unable to get current directory");
@@ -182,9 +182,7 @@ fn generate_include(name: &Ident, path: &str) -> TokenStream {
}
fn generate_enum(rules: &[OptimizedRule], uses_eoi: bool) -> TokenStream {
- let rules = rules
- .iter()
- .map(|rule| Ident::new(rule.name.as_str(), Span::call_site()));
+ let rules = rules.iter().map(|rule| format_ident!("r#{}", rule.name));
if uses_eoi {
quote! {
#[allow(dead_code, non_camel_case_types, clippy::upper_case_acronyms)]
@@ -209,7 +207,7 @@ fn generate_patterns(rules: &[OptimizedRule], uses_eoi: bool) -> TokenStream {
let mut rules: Vec<TokenStream> = rules
.iter()
.map(|rule| {
- let rule = Ident::new(rule.name.as_str(), Span::call_site());
+ let rule = format_ident!("r#{}", rule.name);
quote! {
Rule::#rule => rules::#rule(state)
}
@@ -228,10 +226,10 @@ fn generate_patterns(rules: &[OptimizedRule], uses_eoi: bool) -> TokenStream {
}
fn generate_rule(rule: OptimizedRule) -> TokenStream {
- let name = Ident::new(&rule.name, Span::call_site());
+ let name = format_ident!("r#{}", rule.name);
let expr = if rule.ty == RuleType::Atomic || rule.ty == RuleType::CompoundAtomic {
generate_expr_atomic(rule.expr)
- } else if name == "WHITESPACE" || name == "COMMENT" {
+ } else if rule.name == "WHITESPACE" || rule.name == "COMMENT" {
let atomic = generate_expr_atomic(rule.expr);
quote! {
@@ -249,7 +247,7 @@ fn generate_rule(rule: OptimizedRule) -> TokenStream {
RuleType::Normal => quote! {
#[inline]
#[allow(non_snake_case, unused_variables)]
- pub fn #name(state: #box_ty<::pest::ParserState<Rule>>) -> ::pest::ParseResult<#box_ty<::pest::ParserState<Rule>>> {
+ pub fn #name(state: #box_ty<::pest::ParserState<'_, Rule>>) -> ::pest::ParseResult<#box_ty<::pest::ParserState<'_, Rule>>> {
state.rule(Rule::#name, |state| {
#expr
})
@@ -258,14 +256,14 @@ fn generate_rule(rule: OptimizedRule) -> TokenStream {
RuleType::Silent => quote! {
#[inline]
#[allow(non_snake_case, unused_variables)]
- pub fn #name(state: #box_ty<::pest::ParserState<Rule>>) -> ::pest::ParseResult<#box_ty<::pest::ParserState<Rule>>> {
+ pub fn #name(state: #box_ty<::pest::ParserState<'_, Rule>>) -> ::pest::ParseResult<#box_ty<::pest::ParserState<'_, Rule>>> {
#expr
}
},
RuleType::Atomic => quote! {
#[inline]
#[allow(non_snake_case, unused_variables)]
- pub fn #name(state: #box_ty<::pest::ParserState<Rule>>) -> ::pest::ParseResult<#box_ty<::pest::ParserState<Rule>>> {
+ pub fn #name(state: #box_ty<::pest::ParserState<'_, Rule>>) -> ::pest::ParseResult<#box_ty<::pest::ParserState<'_, Rule>>> {
state.rule(Rule::#name, |state| {
state.atomic(::pest::Atomicity::Atomic, |state| {
#expr
@@ -276,7 +274,7 @@ fn generate_rule(rule: OptimizedRule) -> TokenStream {
RuleType::CompoundAtomic => quote! {
#[inline]
#[allow(non_snake_case, unused_variables)]
- pub fn #name(state: #box_ty<::pest::ParserState<Rule>>) -> ::pest::ParseResult<#box_ty<::pest::ParserState<Rule>>> {
+ pub fn #name(state: #box_ty<::pest::ParserState<'_, Rule>>) -> ::pest::ParseResult<#box_ty<::pest::ParserState<'_, Rule>>> {
state.atomic(::pest::Atomicity::CompoundAtomic, |state| {
state.rule(Rule::#name, |state| {
#expr
@@ -287,7 +285,7 @@ fn generate_rule(rule: OptimizedRule) -> TokenStream {
RuleType::NonAtomic => quote! {
#[inline]
#[allow(non_snake_case, unused_variables)]
- pub fn #name(state: #box_ty<::pest::ParserState<Rule>>) -> ::pest::ParseResult<#box_ty<::pest::ParserState<Rule>>> {
+ pub fn #name(state: #box_ty<::pest::ParserState<'_, Rule>>) -> ::pest::ParseResult<#box_ty<::pest::ParserState<'_, Rule>>> {
state.atomic(::pest::Atomicity::NonAtomic, |state| {
state.rule(Rule::#name, |state| {
#expr
@@ -364,7 +362,7 @@ fn generate_expr(expr: OptimizedExpr) -> TokenStream {
}
}
OptimizedExpr::Ident(ident) => {
- let ident = Ident::new(&ident, Span::call_site());
+ let ident = format_ident!("r#{}", ident);
quote! { self::#ident(state) }
}
OptimizedExpr::PeekSlice(start, end_) => {
@@ -510,7 +508,7 @@ fn generate_expr_atomic(expr: OptimizedExpr) -> TokenStream {
}
}
OptimizedExpr::Ident(ident) => {
- let ident = Ident::new(&ident, Span::call_site());
+ let ident = format_ident!("r#{}", ident);
quote! { self::#ident(state) }
}
OptimizedExpr::PeekSlice(start, end_) => {
@@ -659,6 +657,8 @@ fn option_type() -> TokenStream {
#[cfg(test)]
mod tests {
+ use proc_macro2::Span;
+
use super::*;
#[test]
@@ -675,7 +675,7 @@ mod tests {
#[allow(dead_code, non_camel_case_types, clippy::upper_case_acronyms)]
#[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
pub enum Rule {
- f
+ r#f
}
}
.to_string()
@@ -863,7 +863,7 @@ mod tests {
assert_eq!(
generate_expr(expr).to_string(),
quote! {
- self::a(state).or_else(|state| {
+ self::r#a(state).or_else(|state| {
state.sequence(|state| {
state.match_range('a'..'b').and_then(|state| {
super::hidden::skip(state)
@@ -929,7 +929,7 @@ mod tests {
assert_eq!(
generate_expr_atomic(expr).to_string(),
quote! {
- self::a(state).or_else(|state| {
+ self::r#a(state).or_else(|state| {
state.sequence(|state| {
state.match_range('a'..'b').and_then(|state| {
state.lookahead(false, |state| {
@@ -960,11 +960,20 @@ mod tests {
fn generate_complete() {
let name = Ident::new("MyParser", Span::call_site());
let generics = Generics::default();
- let rules = vec![OptimizedRule {
- name: "a".to_owned(),
- ty: RuleType::Silent,
- expr: OptimizedExpr::Str("b".to_owned()),
- }];
+
+ let rules = vec![
+ OptimizedRule {
+ name: "a".to_owned(),
+ ty: RuleType::Silent,
+ expr: OptimizedExpr::Str("b".to_owned()),
+ },
+ OptimizedRule {
+ name: "if".to_owned(),
+ ty: RuleType::Silent,
+ expr: OptimizedExpr::Ident("a".to_owned()),
+ },
+ ];
+
let defaults = vec!["ANY"];
let result = result_type();
let box_ty = box_type();
@@ -980,7 +989,8 @@ mod tests {
#[allow(dead_code, non_camel_case_types, clippy::upper_case_acronyms)]
#[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
pub enum Rule {
- a
+ r#a,
+ r#if
}
#[allow(clippy::all)]
@@ -999,7 +1009,7 @@ mod tests {
#[inline]
#[allow(dead_code, non_snake_case, unused_variables)]
- pub fn skip(state: #box_ty<::pest::ParserState<Rule>>) -> ::pest::ParseResult<#box_ty<::pest::ParserState<Rule>>> {
+ pub fn skip(state: #box_ty<::pest::ParserState<'_, Rule>>) -> ::pest::ParseResult<#box_ty<::pest::ParserState<'_, Rule>>> {
Ok(state)
}
}
@@ -1009,13 +1019,19 @@ mod tests {
#[inline]
#[allow(non_snake_case, unused_variables)]
- pub fn a(state: #box_ty<::pest::ParserState<Rule>>) -> ::pest::ParseResult<#box_ty<::pest::ParserState<Rule>>> {
+ pub fn r#a(state: #box_ty<::pest::ParserState<'_, Rule>>) -> ::pest::ParseResult<#box_ty<::pest::ParserState<'_, Rule>>> {
state.match_string("b")
}
#[inline]
+ #[allow(non_snake_case, unused_variables)]
+ pub fn r#if(state: #box_ty<::pest::ParserState<'_, Rule>>) -> ::pest::ParseResult<#box_ty<::pest::ParserState<'_, Rule>>> {
+ self::r#a(state)
+ }
+
+ #[inline]
#[allow(dead_code, non_snake_case, unused_variables)]
- pub fn ANY(state: #box_ty<::pest::ParserState<Rule>>) -> ::pest::ParseResult<#box_ty<::pest::ParserState<Rule>>> {
+ pub fn ANY(state: #box_ty<::pest::ParserState<'_, Rule>>) -> ::pest::ParseResult<#box_ty<::pest::ParserState<'_, Rule>>> {
state.skip(1)
}
}
@@ -1025,7 +1041,8 @@ mod tests {
::pest::state(input, |state| {
match rule {
- Rule::a => rules::a(state)
+ Rule::r#a => rules::r#a(state),
+ Rule::r#if => rules::r#if(state)
}
})
}
diff --git a/vendor/pest_generator/src/lib.rs b/vendor/pest_generator/src/lib.rs
index 27b4d8168..2a1203e4a 100644
--- a/vendor/pest_generator/src/lib.rs
+++ b/vendor/pest_generator/src/lib.rs
@@ -7,17 +7,19 @@
// option. All files in the project carrying such notice may not be copied,
// modified, or distributed except according to those terms.
-#![doc(html_root_url = "https://docs.rs/pest_derive")]
+#![doc(
+ html_root_url = "https://docs.rs/pest_derive",
+ html_logo_url = "https://raw.githubusercontent.com/pest-parser/pest/master/pest-logo.svg",
+ html_favicon_url = "https://raw.githubusercontent.com/pest-parser/pest/master/pest-logo.svg"
+)]
+#![warn(missing_docs, rust_2018_idioms, unused_qualifications)]
#![recursion_limit = "256"]
+//! # pest generator
+//!
+//! This crate generates code from ASTs (which is used in the `pest_derive` crate).
-extern crate pest;
-extern crate pest_meta;
-
-extern crate proc_macro;
-extern crate proc_macro2;
#[macro_use]
extern crate quote;
-extern crate syn;
use std::env;
use std::fs::File;
@@ -31,9 +33,12 @@ use syn::{Attribute, DeriveInput, Generics, Ident, Lit, Meta};
mod macros;
mod generator;
-use pest_meta::parser::{self, Rule};
+use pest_meta::parser::{self, rename_meta_rule, Rule};
use pest_meta::{optimizer, unwrap_or_report, validator};
+/// Processes the derive/proc macro input and generates the corresponding parser based
+/// on the parsed grammar. If `include_grammar` is set to true, it'll generate an explicit
+/// "include_str" statement (done in pest_derive, but turned off in the local bootstrap).
pub fn derive_parser(input: TokenStream, include_grammar: bool) -> TokenStream {
let ast: DeriveInput = syn::parse2(input).unwrap();
let (name, generics, content) = parse_derive(ast);
@@ -41,7 +46,21 @@ pub fn derive_parser(input: TokenStream, include_grammar: bool) -> TokenStream {
let (data, path) = match content {
GrammarSource::File(ref path) => {
let root = env::var("CARGO_MANIFEST_DIR").unwrap_or_else(|_| ".".into());
- let path = Path::new(&root).join("src/").join(&path);
+
+ // Check whether we can find a file at the path relative to the CARGO_MANIFEST_DIR
+ // first.
+ //
+ // If we cannot find the expected file over there, fallback to the
+ // `CARGO_MANIFEST_DIR/src`, which is the old default and kept for convenience
+ // reasons.
+ // TODO: This could be refactored once `std::path::absolute()` get's stabilized.
+ // https://doc.rust-lang.org/std/path/fn.absolute.html
+ let path = if Path::new(&root).join(path).exists() {
+ Path::new(&root).join(path)
+ } else {
+ Path::new(&root).join("src/").join(path)
+ };
+
let file_name = match path.file_name() {
Some(file_name) => file_name,
None => panic!("grammar attribute should point to a file"),
@@ -58,37 +77,7 @@ pub fn derive_parser(input: TokenStream, include_grammar: bool) -> TokenStream {
let pairs = match parser::parse(Rule::grammar_rules, &data) {
Ok(pairs) => pairs,
- Err(error) => panic!(
- "error parsing \n{}",
- error.renamed_rules(|rule| match *rule {
- Rule::grammar_rule => "rule".to_owned(),
- Rule::_push => "PUSH".to_owned(),
- Rule::assignment_operator => "`=`".to_owned(),
- Rule::silent_modifier => "`_`".to_owned(),
- Rule::atomic_modifier => "`@`".to_owned(),
- Rule::compound_atomic_modifier => "`$`".to_owned(),
- Rule::non_atomic_modifier => "`!`".to_owned(),
- Rule::opening_brace => "`{`".to_owned(),
- Rule::closing_brace => "`}`".to_owned(),
- Rule::opening_brack => "`[`".to_owned(),
- Rule::closing_brack => "`]`".to_owned(),
- Rule::opening_paren => "`(`".to_owned(),
- Rule::positive_predicate_operator => "`&`".to_owned(),
- Rule::negative_predicate_operator => "`!`".to_owned(),
- Rule::sequence_operator => "`&`".to_owned(),
- Rule::choice_operator => "`|`".to_owned(),
- Rule::optional_operator => "`?`".to_owned(),
- Rule::repeat_operator => "`*`".to_owned(),
- Rule::repeat_once_operator => "`+`".to_owned(),
- Rule::comma => "`,`".to_owned(),
- Rule::closing_paren => "`)`".to_owned(),
- Rule::quote => "`\"`".to_owned(),
- Rule::insensitive_string => "`^`".to_owned(),
- Rule::range_operator => "`..`".to_owned(),
- Rule::single_quote => "`'`".to_owned(),
- other_rule => format!("{:?}", other_rule),
- })
- ),
+ Err(error) => panic!("error parsing \n{}", error.renamed_rules(rename_meta_rule)),
};
let defaults = unwrap_or_report(validator::validate_pairs(pairs.clone()));
@@ -155,7 +144,6 @@ fn get_attribute(attr: &Attribute) -> GrammarSource {
mod tests {
use super::parse_derive;
use super::GrammarSource;
- use syn;
#[test]
fn derive_inline_file() {
diff --git a/vendor/pest_generator/src/macros.rs b/vendor/pest_generator/src/macros.rs
index 37ef531f3..377f66e60 100644
--- a/vendor/pest_generator/src/macros.rs
+++ b/vendor/pest_generator/src/macros.rs
@@ -19,7 +19,7 @@ macro_rules! generate_rule {
quote! {
#[inline]
#[allow(dead_code, non_snake_case, unused_variables)]
- pub fn $name(state: ::std::boxed::Box<::pest::ParserState<Rule>>) -> ::pest::ParseResult<::std::boxed::Box<::pest::ParserState<Rule>>> {
+ pub fn $name(state: ::std::boxed::Box<::pest::ParserState<'_, Rule>>) -> ::pest::ParseResult<::std::boxed::Box<::pest::ParserState<'_, Rule>>> {
$pattern
}
}
@@ -32,7 +32,7 @@ macro_rules! generate_rule {
quote! {
#[inline]
#[allow(dead_code, non_snake_case, unused_variables)]
- pub fn $name(state: ::alloc::boxed::Box<::pest::ParserState<Rule>>) -> ::pest::ParseResult<::alloc::boxed::Box<::pest::ParserState<Rule>>> {
+ pub fn $name(state: ::alloc::boxed::Box<::pest::ParserState<'_, Rule>>) -> ::pest::ParseResult<::alloc::boxed::Box<::pest::ParserState<'_, Rule>>> {
$pattern
}
}