summaryrefslogtreecommitdiffstats
path: root/compiler/rustc_ast
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/rustc_ast')
-rw-r--r--compiler/rustc_ast/src/ast.rs184
-rw-r--r--compiler/rustc_ast/src/attr/mod.rs10
-rw-r--r--compiler/rustc_ast/src/lib.rs10
-rw-r--r--compiler/rustc_ast/src/mut_visit.rs39
-rw-r--r--compiler/rustc_ast/src/node_id.rs2
-rw-r--r--compiler/rustc_ast/src/token.rs67
-rw-r--r--compiler/rustc_ast/src/tokenstream.rs141
-rw-r--r--compiler/rustc_ast/src/util/classify.rs35
-rw-r--r--compiler/rustc_ast/src/util/literal.rs74
-rw-r--r--compiler/rustc_ast/src/visit.rs8
10 files changed, 389 insertions, 181 deletions
diff --git a/compiler/rustc_ast/src/ast.rs b/compiler/rustc_ast/src/ast.rs
index c85ff6f5c..a121b5a9b 100644
--- a/compiler/rustc_ast/src/ast.rs
+++ b/compiler/rustc_ast/src/ast.rs
@@ -301,7 +301,7 @@ pub enum TraitBoundModifier {
Maybe,
/// `~const Trait`
- MaybeConst,
+ MaybeConst(Span),
/// `~const !Trait`
//
@@ -317,8 +317,7 @@ pub enum TraitBoundModifier {
impl TraitBoundModifier {
pub fn to_constness(self) -> Const {
match self {
- // FIXME(effects) span
- Self::MaybeConst => Const::Yes(DUMMY_SP),
+ Self::MaybeConst(span) => Const::Yes(span),
_ => Const::No,
}
}
@@ -646,6 +645,7 @@ impl Pat {
// These patterns do not contain subpatterns, skip.
PatKind::Wild
| PatKind::Rest
+ | PatKind::Never
| PatKind::Lit(_)
| PatKind::Range(..)
| PatKind::Ident(..)
@@ -658,6 +658,37 @@ impl Pat {
pub fn is_rest(&self) -> bool {
matches!(self.kind, PatKind::Rest)
}
+
+ /// Whether this could be a never pattern, taking into account that a macro invocation can
+ /// return a never pattern. Used to inform errors during parsing.
+ pub fn could_be_never_pattern(&self) -> bool {
+ let mut could_be_never_pattern = false;
+ self.walk(&mut |pat| match &pat.kind {
+ PatKind::Never | PatKind::MacCall(_) => {
+ could_be_never_pattern = true;
+ false
+ }
+ PatKind::Or(s) => {
+ could_be_never_pattern = s.iter().all(|p| p.could_be_never_pattern());
+ false
+ }
+ _ => true,
+ });
+ could_be_never_pattern
+ }
+
+ /// Whether this contains a `!` pattern. This in particular means that a feature gate error will
+ /// be raised if the feature is off. Used to avoid gating the feature twice.
+ pub fn contains_never_pattern(&self) -> bool {
+ let mut contains_never_pattern = false;
+ self.walk(&mut |pat| {
+ if matches!(pat.kind, PatKind::Never) {
+ contains_never_pattern = true;
+ }
+ true
+ });
+ contains_never_pattern
+ }
}
/// A single field in a struct pattern.
@@ -796,6 +827,9 @@ pub enum PatKind {
/// only one rest pattern may occur in the pattern sequences.
Rest,
+ // A never pattern `!`
+ Never,
+
/// Parentheses in patterns used for grouping (i.e., `(PAT)`).
Paren(P<Pat>),
@@ -818,7 +852,7 @@ pub enum BorrowKind {
Raw,
}
-#[derive(Clone, PartialEq, Encodable, Decodable, Debug, Copy)]
+#[derive(Clone, Copy, Debug, PartialEq, Encodable, Decodable, HashStable_Generic)]
pub enum BinOpKind {
/// The `+` operator (addition)
Add,
@@ -859,9 +893,9 @@ pub enum BinOpKind {
}
impl BinOpKind {
- pub fn to_string(&self) -> &'static str {
+ pub fn as_str(&self) -> &'static str {
use BinOpKind::*;
- match *self {
+ match self {
Add => "+",
Sub => "-",
Mul => "*",
@@ -882,19 +916,25 @@ impl BinOpKind {
Gt => ">",
}
}
- pub fn lazy(&self) -> bool {
+
+ pub fn is_lazy(&self) -> bool {
matches!(self, BinOpKind::And | BinOpKind::Or)
}
pub fn is_comparison(&self) -> bool {
use BinOpKind::*;
- // Note for developers: please keep this as is;
+ // Note for developers: please keep this match exhaustive;
// we want compilation to fail if another variant is added.
match *self {
Eq | Lt | Le | Ne | Gt | Ge => true,
And | Or | Add | Sub | Mul | Div | Rem | BitXor | BitAnd | BitOr | Shl | Shr => false,
}
}
+
+ /// Returns `true` if the binary operator takes its arguments by value.
+ pub fn is_by_value(self) -> bool {
+ !self.is_comparison()
+ }
}
pub type BinOp = Spanned<BinOpKind>;
@@ -902,7 +942,7 @@ pub type BinOp = Spanned<BinOpKind>;
/// Unary operator.
///
/// Note that `&data` is not an operator, it's an `AddrOf` expression.
-#[derive(Clone, Encodable, Decodable, Debug, Copy)]
+#[derive(Clone, Copy, Debug, PartialEq, Encodable, Decodable, HashStable_Generic)]
pub enum UnOp {
/// The `*` operator for dereferencing
Deref,
@@ -913,13 +953,18 @@ pub enum UnOp {
}
impl UnOp {
- pub fn to_string(op: UnOp) -> &'static str {
- match op {
+ pub fn as_str(&self) -> &'static str {
+ match self {
UnOp::Deref => "*",
UnOp::Not => "!",
UnOp::Neg => "-",
}
}
+
+ /// Returns `true` if the unary operator takes its argument by value.
+ pub fn is_by_value(self) -> bool {
+ matches!(self, Self::Neg | Self::Not)
+ }
}
/// A statement
@@ -1066,8 +1111,8 @@ pub struct Arm {
pub pat: P<Pat>,
/// Match arm guard, e.g. `n > 10` in `match foo { n if n > 10 => {}, _ => {} }`
pub guard: Option<P<Expr>>,
- /// Match arm body.
- pub body: P<Expr>,
+ /// Match arm body. Omitted if the pattern is a never pattern.
+ pub body: Option<P<Expr>>,
pub span: Span,
pub id: NodeId,
pub is_placeholder: bool,
@@ -1297,7 +1342,7 @@ pub struct Closure {
pub binder: ClosureBinder,
pub capture_clause: CaptureBy,
pub constness: Const,
- pub asyncness: Async,
+ pub coroutine_kind: Option<CoroutineKind>,
pub movability: Movability,
pub fn_decl: P<FnDecl>,
pub body: P<Expr>,
@@ -1502,6 +1547,7 @@ pub enum ExprKind {
pub enum GenBlockKind {
Async,
Gen,
+ AsyncGen,
}
impl fmt::Display for GenBlockKind {
@@ -1515,6 +1561,7 @@ impl GenBlockKind {
match self {
GenBlockKind::Async => "async",
GenBlockKind::Gen => "gen",
+ GenBlockKind::AsyncGen => "async gen",
}
}
}
@@ -2237,6 +2284,18 @@ pub enum InlineAsmOperand {
},
}
+impl InlineAsmOperand {
+ pub fn reg(&self) -> Option<&InlineAsmRegOrRegClass> {
+ match self {
+ Self::In { reg, .. }
+ | Self::Out { reg, .. }
+ | Self::InOut { reg, .. }
+ | Self::SplitInOut { reg, .. } => Some(reg),
+ Self::Const { .. } | Self::Sym { .. } => None,
+ }
+ }
+}
+
/// Inline assembly.
///
/// E.g., `asm!("NOP");`.
@@ -2380,28 +2439,47 @@ pub enum Unsafe {
No,
}
+/// Describes what kind of coroutine markers, if any, a function has.
+///
+/// Coroutine markers are things that cause the function to generate a coroutine, such as `async`,
+/// which makes the function return `impl Future`, or `gen`, which makes the function return `impl
+/// Iterator`.
#[derive(Copy, Clone, Encodable, Decodable, Debug)]
-pub enum Async {
- Yes { span: Span, closure_id: NodeId, return_impl_trait_id: NodeId },
- No,
-}
-
-#[derive(Copy, Clone, Encodable, Decodable, Debug)]
-pub enum Gen {
- Yes { span: Span, closure_id: NodeId, return_impl_trait_id: NodeId },
- No,
+pub enum CoroutineKind {
+ /// `async`, which returns an `impl Future`
+ Async { span: Span, closure_id: NodeId, return_impl_trait_id: NodeId },
+ /// `gen`, which returns an `impl Iterator`
+ Gen { span: Span, closure_id: NodeId, return_impl_trait_id: NodeId },
+ /// `async gen`, which returns an `impl AsyncIterator`
+ AsyncGen { span: Span, closure_id: NodeId, return_impl_trait_id: NodeId },
}
-impl Async {
+impl CoroutineKind {
pub fn is_async(self) -> bool {
- matches!(self, Async::Yes { .. })
+ matches!(self, CoroutineKind::Async { .. })
+ }
+
+ pub fn is_gen(self) -> bool {
+ matches!(self, CoroutineKind::Gen { .. })
+ }
+
+ pub fn closure_id(self) -> NodeId {
+ match self {
+ CoroutineKind::Async { closure_id, .. }
+ | CoroutineKind::Gen { closure_id, .. }
+ | CoroutineKind::AsyncGen { closure_id, .. } => closure_id,
+ }
}
- /// In this case this is an `async` return, the `NodeId` for the generated `impl Trait` item.
- pub fn opt_return_id(self) -> Option<(NodeId, Span)> {
+ /// In this case this is an `async` or `gen` return, the `NodeId` for the generated `impl Trait`
+ /// item.
+ pub fn return_id(self) -> (NodeId, Span) {
match self {
- Async::Yes { return_impl_trait_id, span, .. } => Some((return_impl_trait_id, span)),
- Async::No => None,
+ CoroutineKind::Async { return_impl_trait_id, span, .. }
+ | CoroutineKind::Gen { return_impl_trait_id, span, .. }
+ | CoroutineKind::AsyncGen { return_impl_trait_id, span, .. } => {
+ (return_impl_trait_id, span)
+ }
}
}
}
@@ -2574,7 +2652,7 @@ pub enum AttrStyle {
}
rustc_index::newtype_index! {
- #[custom_encodable]
+ #[orderable]
#[debug_format = "AttrId({})"]
pub struct AttrId {}
}
@@ -2710,7 +2788,11 @@ pub enum VariantData {
/// Struct variant.
///
/// E.g., `Bar { .. }` as in `enum Foo { Bar { .. } }`.
- Struct(ThinVec<FieldDef>, bool),
+ Struct {
+ fields: ThinVec<FieldDef>,
+ // FIXME: investigate making this a `Option<ErrorGuaranteed>`
+ recovered: bool,
+ },
/// Tuple variant.
///
/// E.g., `Bar(..)` as in `enum Foo { Bar(..) }`.
@@ -2725,7 +2807,7 @@ impl VariantData {
/// Return the fields of this variant.
pub fn fields(&self) -> &[FieldDef] {
match self {
- VariantData::Struct(fields, ..) | VariantData::Tuple(fields, _) => fields,
+ VariantData::Struct { fields, .. } | VariantData::Tuple(fields, _) => fields,
_ => &[],
}
}
@@ -2733,7 +2815,7 @@ impl VariantData {
/// Return the `NodeId` of this variant's constructor, if it has one.
pub fn ctor_node_id(&self) -> Option<NodeId> {
match *self {
- VariantData::Struct(..) => None,
+ VariantData::Struct { .. } => None,
VariantData::Tuple(_, id) | VariantData::Unit(id) => Some(id),
}
}
@@ -2767,6 +2849,28 @@ impl Item {
pub fn span_with_attributes(&self) -> Span {
self.attrs.iter().fold(self.span, |acc, attr| acc.to(attr.span))
}
+
+ pub fn opt_generics(&self) -> Option<&Generics> {
+ match &self.kind {
+ ItemKind::ExternCrate(_)
+ | ItemKind::Use(_)
+ | ItemKind::Mod(_, _)
+ | ItemKind::ForeignMod(_)
+ | ItemKind::GlobalAsm(_)
+ | ItemKind::MacCall(_)
+ | ItemKind::MacroDef(_) => None,
+ ItemKind::Static(_) => None,
+ ItemKind::Const(i) => Some(&i.generics),
+ ItemKind::Fn(i) => Some(&i.generics),
+ ItemKind::TyAlias(i) => Some(&i.generics),
+ ItemKind::TraitAlias(generics, _)
+ | ItemKind::Enum(_, generics)
+ | ItemKind::Struct(_, generics)
+ | ItemKind::Union(_, generics) => Some(&generics),
+ ItemKind::Trait(i) => Some(&i.generics),
+ ItemKind::Impl(i) => Some(&i.generics),
+ }
+ }
}
/// `extern` qualifier on a function item or function type.
@@ -2805,8 +2909,8 @@ impl Extern {
pub struct FnHeader {
/// The `unsafe` keyword, if any
pub unsafety: Unsafe,
- /// The `async` keyword, if any
- pub asyncness: Async,
+ /// Whether this is `async`, `gen`, or nothing.
+ pub coroutine_kind: Option<CoroutineKind>,
/// The `const` keyword, if any
pub constness: Const,
/// The `extern` keyword and corresponding ABI string, if any
@@ -2816,9 +2920,9 @@ pub struct FnHeader {
impl FnHeader {
/// Does this function header have any qualifiers or is it empty?
pub fn has_qualifiers(&self) -> bool {
- let Self { unsafety, asyncness, constness, ext } = self;
+ let Self { unsafety, coroutine_kind, constness, ext } = self;
matches!(unsafety, Unsafe::Yes(_))
- || asyncness.is_async()
+ || coroutine_kind.is_some()
|| matches!(constness, Const::Yes(_))
|| !matches!(ext, Extern::None)
}
@@ -2828,7 +2932,7 @@ impl Default for FnHeader {
fn default() -> FnHeader {
FnHeader {
unsafety: Unsafe::No,
- asyncness: Async::No,
+ coroutine_kind: None,
constness: Const::No,
ext: Extern::None,
}
@@ -3151,11 +3255,11 @@ mod size_asserts {
static_assert_size!(Block, 32);
static_assert_size!(Expr, 72);
static_assert_size!(ExprKind, 40);
- static_assert_size!(Fn, 152);
+ static_assert_size!(Fn, 160);
static_assert_size!(ForeignItem, 96);
static_assert_size!(ForeignItemKind, 24);
static_assert_size!(GenericArg, 24);
- static_assert_size!(GenericBound, 56);
+ static_assert_size!(GenericBound, 64);
static_assert_size!(Generics, 40);
static_assert_size!(Impl, 136);
static_assert_size!(Item, 136);
diff --git a/compiler/rustc_ast/src/attr/mod.rs b/compiler/rustc_ast/src/attr/mod.rs
index be7d1b207..98138cedb 100644
--- a/compiler/rustc_ast/src/attr/mod.rs
+++ b/compiler/rustc_ast/src/attr/mod.rs
@@ -342,7 +342,7 @@ impl MetaItem {
let span = span.with_hi(segments.last().unwrap().ident.span.hi());
Path { span, segments, tokens: None }
}
- Some(TokenTree::Token(Token { kind: token::Interpolated(nt), .. }, _)) => match &**nt {
+ Some(TokenTree::Token(Token { kind: token::Interpolated(nt), .. }, _)) => match &nt.0 {
token::Nonterminal::NtMeta(item) => return item.meta(item.path.span),
token::Nonterminal::NtPath(path) => (**path).clone(),
_ => return None,
@@ -387,11 +387,11 @@ impl MetaItemKind {
tokens: &mut impl Iterator<Item = &'a TokenTree>,
) -> Option<MetaItemKind> {
match tokens.next() {
- Some(TokenTree::Delimited(_, Delimiter::Invisible, inner_tokens)) => {
+ Some(TokenTree::Delimited(.., Delimiter::Invisible, inner_tokens)) => {
MetaItemKind::name_value_from_tokens(&mut inner_tokens.trees())
}
Some(TokenTree::Token(token, _)) => {
- MetaItemLit::from_token(&token).map(MetaItemKind::NameValue)
+ MetaItemLit::from_token(token).map(MetaItemKind::NameValue)
}
_ => None,
}
@@ -401,7 +401,7 @@ impl MetaItemKind {
tokens: &mut iter::Peekable<impl Iterator<Item = &'a TokenTree>>,
) -> Option<MetaItemKind> {
match tokens.peek() {
- Some(TokenTree::Delimited(_, Delimiter::Parenthesis, inner_tokens)) => {
+ Some(TokenTree::Delimited(.., Delimiter::Parenthesis, inner_tokens)) => {
let inner_tokens = inner_tokens.clone();
tokens.next();
MetaItemKind::list_from_tokens(inner_tokens).map(MetaItemKind::List)
@@ -524,7 +524,7 @@ impl NestedMetaItem {
tokens.next();
return Some(NestedMetaItem::Lit(lit));
}
- Some(TokenTree::Delimited(_, Delimiter::Invisible, inner_tokens)) => {
+ Some(TokenTree::Delimited(.., Delimiter::Invisible, inner_tokens)) => {
tokens.next();
return NestedMetaItem::from_tokens(&mut inner_tokens.trees().peekable());
}
diff --git a/compiler/rustc_ast/src/lib.rs b/compiler/rustc_ast/src/lib.rs
index c1f6ad6a2..7e713a49a 100644
--- a/compiler/rustc_ast/src/lib.rs
+++ b/compiler/rustc_ast/src/lib.rs
@@ -8,9 +8,9 @@
html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/",
test(attr(deny(warnings)))
)]
-#![cfg_attr(not(bootstrap), doc(rust_logo))]
-#![cfg_attr(not(bootstrap), allow(internal_features))]
-#![cfg_attr(not(bootstrap), feature(rustdoc_internals))]
+#![doc(rust_logo)]
+#![allow(internal_features)]
+#![feature(rustdoc_internals)]
#![feature(associated_type_bounds)]
#![feature(box_patterns)]
#![feature(const_trait_impl)]
@@ -59,9 +59,7 @@ use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
/// Requirements for a `StableHashingContext` to be used in this crate.
/// This is a hack to allow using the `HashStable_Generic` derive macro
/// instead of implementing everything in `rustc_middle`.
-pub trait HashStableContext:
- rustc_type_ir::HashStableContext + rustc_span::HashStableContext
-{
+pub trait HashStableContext: rustc_span::HashStableContext {
fn hash_attr(&mut self, _: &ast::Attribute, hasher: &mut StableHasher);
}
diff --git a/compiler/rustc_ast/src/mut_visit.rs b/compiler/rustc_ast/src/mut_visit.rs
index 7c0a78253..557ae02a8 100644
--- a/compiler/rustc_ast/src/mut_visit.rs
+++ b/compiler/rustc_ast/src/mut_visit.rs
@@ -7,10 +7,10 @@
//! a `MutVisitor` renaming item names in a module will miss all of those
//! that are created by the expansion of a macro.
+use crate::ast::*;
use crate::ptr::P;
use crate::token::{self, Token};
use crate::tokenstream::*;
-use crate::{ast::*, StaticItem};
use rustc_data_structures::flat_map_in_place::FlatMapInPlace;
use rustc_data_structures::stack::ensure_sufficient_stack;
@@ -121,8 +121,8 @@ pub trait MutVisitor: Sized {
noop_visit_fn_decl(d, self);
}
- fn visit_asyncness(&mut self, a: &mut Async) {
- noop_visit_asyncness(a, self);
+ fn visit_coroutine_kind(&mut self, a: &mut CoroutineKind) {
+ noop_visit_coroutine_kind(a, self);
}
fn visit_closure_binder(&mut self, b: &mut ClosureBinder) {
@@ -453,7 +453,7 @@ pub fn noop_flat_map_arm<T: MutVisitor>(mut arm: Arm, vis: &mut T) -> SmallVec<[
vis.visit_id(id);
vis.visit_pat(pat);
visit_opt(guard, |guard| vis.visit_expr(guard));
- vis.visit_expr(body);
+ visit_opt(body, |body| vis.visit_expr(body));
vis.visit_span(span);
smallvec![arm]
}
@@ -682,7 +682,7 @@ pub fn visit_attr_tt<T: MutVisitor>(tt: &mut AttrTokenTree, vis: &mut T) {
AttrTokenTree::Token(token, _) => {
visit_token(token, vis);
}
- AttrTokenTree::Delimited(DelimSpan { open, close }, _delim, tts) => {
+ AttrTokenTree::Delimited(DelimSpan { open, close }, _spacing, _delim, tts) => {
vis.visit_span(open);
vis.visit_span(close);
visit_attr_tts(tts, vis);
@@ -709,7 +709,7 @@ pub fn visit_tt<T: MutVisitor>(tt: &mut TokenTree, vis: &mut T) {
TokenTree::Token(token, _) => {
visit_token(token, vis);
}
- TokenTree::Delimited(DelimSpan { open, close }, _delim, tts) => {
+ TokenTree::Delimited(DelimSpan { open, close }, _spacing, _delim, tts) => {
vis.visit_span(open);
vis.visit_span(close);
visit_tts(tts, vis);
@@ -764,7 +764,10 @@ pub fn visit_token<T: MutVisitor>(t: &mut Token, vis: &mut T) {
return; // Avoid visiting the span for the second time.
}
token::Interpolated(nt) => {
- visit_nonterminal(Lrc::make_mut(nt), vis);
+ let nt = Lrc::make_mut(nt);
+ let (nt, sp) = (&mut nt.0, &mut nt.1);
+ vis.visit_span(sp);
+ visit_nonterminal(nt, vis);
}
_ => {}
}
@@ -868,13 +871,15 @@ pub fn noop_visit_closure_binder<T: MutVisitor>(binder: &mut ClosureBinder, vis:
}
}
-pub fn noop_visit_asyncness<T: MutVisitor>(asyncness: &mut Async, vis: &mut T) {
- match asyncness {
- Async::Yes { span: _, closure_id, return_impl_trait_id } => {
+pub fn noop_visit_coroutine_kind<T: MutVisitor>(coroutine_kind: &mut CoroutineKind, vis: &mut T) {
+ match coroutine_kind {
+ CoroutineKind::Async { span, closure_id, return_impl_trait_id }
+ | CoroutineKind::Gen { span, closure_id, return_impl_trait_id }
+ | CoroutineKind::AsyncGen { span, closure_id, return_impl_trait_id } => {
+ vis.visit_span(span);
vis.visit_id(closure_id);
vis.visit_id(return_impl_trait_id);
}
- Async::No => {}
}
}
@@ -971,7 +976,7 @@ pub fn noop_visit_where_predicate<T: MutVisitor>(pred: &mut WherePredicate, vis:
pub fn noop_visit_variant_data<T: MutVisitor>(vdata: &mut VariantData, vis: &mut T) {
match vdata {
- VariantData::Struct(fields, ..) => {
+ VariantData::Struct { fields, .. } => {
fields.flat_map_in_place(|field| vis.flat_map_field_def(field));
}
VariantData::Tuple(fields, id) => {
@@ -1167,9 +1172,9 @@ fn visit_const_item<T: MutVisitor>(
}
pub fn noop_visit_fn_header<T: MutVisitor>(header: &mut FnHeader, vis: &mut T) {
- let FnHeader { unsafety, asyncness, constness, ext: _ } = header;
+ let FnHeader { unsafety, coroutine_kind, constness, ext: _ } = header;
visit_constness(constness, vis);
- vis.visit_asyncness(asyncness);
+ coroutine_kind.as_mut().map(|coroutine_kind| vis.visit_coroutine_kind(coroutine_kind));
visit_unsafety(unsafety, vis);
}
@@ -1246,7 +1251,7 @@ pub fn noop_visit_pat<T: MutVisitor>(pat: &mut P<Pat>, vis: &mut T) {
let Pat { id, kind, span, tokens } = pat.deref_mut();
vis.visit_id(id);
match kind {
- PatKind::Wild | PatKind::Rest => {}
+ PatKind::Wild | PatKind::Rest | PatKind::Never => {}
PatKind::Ident(_binding_mode, ident, sub) => {
vis.visit_ident(ident);
visit_opt(sub, |sub| vis.visit_pat(sub));
@@ -1403,7 +1408,7 @@ pub fn noop_visit_expr<T: MutVisitor>(
binder,
capture_clause,
constness,
- asyncness,
+ coroutine_kind,
movability: _,
fn_decl,
body,
@@ -1412,7 +1417,7 @@ pub fn noop_visit_expr<T: MutVisitor>(
}) => {
vis.visit_closure_binder(binder);
visit_constness(constness, vis);
- vis.visit_asyncness(asyncness);
+ coroutine_kind.as_mut().map(|coroutine_kind| vis.visit_coroutine_kind(coroutine_kind));
vis.visit_capture_by(capture_clause);
vis.visit_fn_decl(fn_decl);
vis.visit_expr(body);
diff --git a/compiler/rustc_ast/src/node_id.rs b/compiler/rustc_ast/src/node_id.rs
index d16741757..1cd244953 100644
--- a/compiler/rustc_ast/src/node_id.rs
+++ b/compiler/rustc_ast/src/node_id.rs
@@ -8,6 +8,8 @@ rustc_index::newtype_index! {
/// This is later turned into [`DefId`] and `HirId` for the HIR.
///
/// [`DefId`]: rustc_span::def_id::DefId
+ #[encodable]
+ #[orderable]
#[debug_format = "NodeId({})"]
pub struct NodeId {
/// The [`NodeId`] used to represent the root of the crate.
diff --git a/compiler/rustc_ast/src/token.rs b/compiler/rustc_ast/src/token.rs
index 914c97a14..b0cd2ec98 100644
--- a/compiler/rustc_ast/src/token.rs
+++ b/compiler/rustc_ast/src/token.rs
@@ -13,7 +13,7 @@ use rustc_macros::HashStable_Generic;
use rustc_span::symbol::{kw, sym};
#[allow(hidden_glob_reexports)]
use rustc_span::symbol::{Ident, Symbol};
-use rustc_span::{self, edition::Edition, Span, DUMMY_SP};
+use rustc_span::{edition::Edition, Span, DUMMY_SP};
use std::borrow::Cow;
use std::fmt;
@@ -110,7 +110,7 @@ impl Lit {
Ident(name, false) if name.is_bool_lit() => Some(Lit::new(Bool, name, None)),
Literal(token_lit) => Some(token_lit),
Interpolated(ref nt)
- if let NtExpr(expr) | NtLiteral(expr) = &**nt
+ if let NtExpr(expr) | NtLiteral(expr) = &nt.0
&& let ast::ExprKind::Lit(token_lit) = expr.kind =>
{
Some(token_lit)
@@ -238,9 +238,9 @@ pub enum TokenKind {
EqEq,
/// `!=`
Ne,
- /// `>`
- Ge,
/// `>=`
+ Ge,
+ /// `>`
Gt,
/// `&&`
AndAnd,
@@ -314,7 +314,7 @@ pub enum TokenKind {
/// - It prevents `Token` from implementing `Copy`.
/// It adds complexity and likely slows things down. Please don't add new
/// occurrences of this token kind!
- Interpolated(Lrc<Nonterminal>),
+ Interpolated(Lrc<(Nonterminal, Span)>),
/// A doc comment token.
/// `Symbol` is the doc comment's data excluding its "quotes" (`///`, `/**`, etc)
@@ -388,7 +388,8 @@ impl TokenKind {
match *self {
Comma => Some(vec![Dot, Lt, Semi]),
Semi => Some(vec![Colon, Comma]),
- FatArrow => Some(vec![Eq, RArrow]),
+ Colon => Some(vec![Semi]),
+ FatArrow => Some(vec![Eq, RArrow, Ge, Gt]),
_ => None,
}
}
@@ -421,7 +422,7 @@ impl Token {
/// if they keep spans or perform edition checks.
pub fn uninterpolated_span(&self) -> Span {
match &self.kind {
- Interpolated(nt) => nt.span(),
+ Interpolated(nt) => nt.0.use_span(),
_ => self.span,
}
}
@@ -464,7 +465,7 @@ impl Token {
ModSep | // global path
Lifetime(..) | // labeled loop
Pound => true, // expression attributes
- Interpolated(ref nt) => matches!(**nt, NtLiteral(..) |
+ Interpolated(ref nt) => matches!(&nt.0, NtLiteral(..) |
NtExpr(..) |
NtBlock(..) |
NtPath(..)),
@@ -488,7 +489,7 @@ impl Token {
| DotDot | DotDotDot | DotDotEq // ranges
| Lt | BinOp(Shl) // associated path
| ModSep => true, // global path
- Interpolated(ref nt) => matches!(**nt, NtLiteral(..) |
+ Interpolated(ref nt) => matches!(&nt.0, NtLiteral(..) |
NtPat(..) |
NtBlock(..) |
NtPath(..)),
@@ -511,7 +512,7 @@ impl Token {
Lifetime(..) | // lifetime bound in trait object
Lt | BinOp(Shl) | // associated path
ModSep => true, // global path
- Interpolated(ref nt) => matches!(**nt, NtTy(..) | NtPath(..)),
+ Interpolated(ref nt) => matches!(&nt.0, NtTy(..) | NtPath(..)),
// For anonymous structs or unions, which only appear in specific positions
// (type of struct fields or union fields), we don't consider them as regular types
_ => false,
@@ -522,7 +523,7 @@ impl Token {
pub fn can_begin_const_arg(&self) -> bool {
match self.kind {
OpenDelim(Delimiter::Brace) => true,
- Interpolated(ref nt) => matches!(**nt, NtExpr(..) | NtBlock(..) | NtLiteral(..)),
+ Interpolated(ref nt) => matches!(&nt.0, NtExpr(..) | NtBlock(..) | NtLiteral(..)),
_ => self.can_begin_literal_maybe_minus(),
}
}
@@ -576,7 +577,7 @@ impl Token {
match self.uninterpolate().kind {
Literal(..) | BinOp(Minus) => true,
Ident(name, false) if name.is_bool_lit() => true,
- Interpolated(ref nt) => match &**nt {
+ Interpolated(ref nt) => match &nt.0 {
NtLiteral(_) => true,
NtExpr(e) => match &e.kind {
ast::ExprKind::Lit(_) => true,
@@ -597,9 +598,9 @@ impl Token {
/// otherwise returns the original token.
pub fn uninterpolate(&self) -> Cow<'_, Token> {
match &self.kind {
- Interpolated(nt) => match **nt {
+ Interpolated(nt) => match &nt.0 {
NtIdent(ident, is_raw) => {
- Cow::Owned(Token::new(Ident(ident.name, is_raw), ident.span))
+ Cow::Owned(Token::new(Ident(ident.name, *is_raw), ident.span))
}
NtLifetime(ident) => Cow::Owned(Token::new(Lifetime(ident.name), ident.span)),
_ => Cow::Borrowed(self),
@@ -614,8 +615,8 @@ impl Token {
// We avoid using `Token::uninterpolate` here because it's slow.
match &self.kind {
&Ident(name, is_raw) => Some((Ident::new(name, self.span), is_raw)),
- Interpolated(nt) => match **nt {
- NtIdent(ident, is_raw) => Some((ident, is_raw)),
+ Interpolated(nt) => match &nt.0 {
+ NtIdent(ident, is_raw) => Some((*ident, *is_raw)),
_ => None,
},
_ => None,
@@ -628,8 +629,8 @@ impl Token {
// We avoid using `Token::uninterpolate` here because it's slow.
match &self.kind {
&Lifetime(name) => Some(Ident::new(name, self.span)),
- Interpolated(nt) => match **nt {
- NtLifetime(ident) => Some(ident),
+ Interpolated(nt) => match &nt.0 {
+ NtLifetime(ident) => Some(*ident),
_ => None,
},
_ => None,
@@ -655,7 +656,7 @@ impl Token {
/// Returns `true` if the token is an interpolated path.
fn is_path(&self) -> bool {
if let Interpolated(nt) = &self.kind
- && let NtPath(..) = **nt
+ && let NtPath(..) = &nt.0
{
return true;
}
@@ -668,7 +669,7 @@ impl Token {
/// (which happens while parsing the result of macro expansion)?
pub fn is_whole_expr(&self) -> bool {
if let Interpolated(nt) = &self.kind
- && let NtExpr(_) | NtLiteral(_) | NtPath(_) | NtBlock(_) = **nt
+ && let NtExpr(_) | NtLiteral(_) | NtPath(_) | NtBlock(_) = &nt.0
{
return true;
}
@@ -679,7 +680,7 @@ impl Token {
/// Is the token an interpolated block (`$b:block`)?
pub fn is_whole_block(&self) -> bool {
if let Interpolated(nt) = &self.kind
- && let NtBlock(..) = **nt
+ && let NtBlock(..) = &nt.0
{
return true;
}
@@ -755,6 +756,11 @@ impl Token {
)
}
+ /// Returns `true` if the token is the integer literal.
+ pub fn is_integer_lit(&self) -> bool {
+ matches!(self.kind, Literal(Lit { kind: LitKind::Integer, .. }))
+ }
+
/// Returns `true` if the token is a non-raw identifier for which `pred` holds.
pub fn is_non_raw_ident_where(&self, pred: impl FnOnce(Ident) -> bool) -> bool {
match self.ident() {
@@ -927,7 +933,7 @@ impl fmt::Display for NonterminalKind {
}
impl Nonterminal {
- pub fn span(&self) -> Span {
+ pub fn use_span(&self) -> Span {
match self {
NtItem(item) => item.span,
NtBlock(block) => block.span,
@@ -941,6 +947,23 @@ impl Nonterminal {
NtVis(vis) => vis.span,
}
}
+
+ pub fn descr(&self) -> &'static str {
+ match self {
+ NtItem(..) => "item",
+ NtBlock(..) => "block",
+ NtStmt(..) => "statement",
+ NtPat(..) => "pattern",
+ NtExpr(..) => "expression",
+ NtLiteral(..) => "literal",
+ NtTy(..) => "type",
+ NtIdent(..) => "identifier",
+ NtLifetime(..) => "lifetime",
+ NtMeta(..) => "attribute",
+ NtPath(..) => "path",
+ NtVis(..) => "visibility",
+ }
+ }
}
impl PartialEq for Nonterminal {
diff --git a/compiler/rustc_ast/src/tokenstream.rs b/compiler/rustc_ast/src/tokenstream.rs
index 23b8f9c12..4c0c49658 100644
--- a/compiler/rustc_ast/src/tokenstream.rs
+++ b/compiler/rustc_ast/src/tokenstream.rs
@@ -46,7 +46,7 @@ pub enum TokenTree {
/// delimiters are implicitly represented by `Delimited`.
Token(Token, Spacing),
/// A delimited sequence of token trees.
- Delimited(DelimSpan, Delimiter, TokenStream),
+ Delimited(DelimSpan, DelimSpacing, Delimiter, TokenStream),
}
// Ensure all fields of `TokenTree` are `DynSend` and `DynSync`.
@@ -62,11 +62,11 @@ where
}
impl TokenTree {
- /// Checks if this `TokenTree` is equal to the other, regardless of span information.
+ /// Checks if this `TokenTree` is equal to the other, regardless of span/spacing information.
pub fn eq_unspanned(&self, other: &TokenTree) -> bool {
match (self, other) {
(TokenTree::Token(token, _), TokenTree::Token(token2, _)) => token.kind == token2.kind,
- (TokenTree::Delimited(_, delim, tts), TokenTree::Delimited(_, delim2, tts2)) => {
+ (TokenTree::Delimited(.., delim, tts), TokenTree::Delimited(.., delim2, tts2)) => {
delim == delim2 && tts.eq_unspanned(tts2)
}
_ => false,
@@ -99,6 +99,11 @@ impl TokenTree {
TokenTree::Token(Token::new(kind, span), Spacing::Joint)
}
+ /// Create a `TokenTree::Token` with joint-hidden spacing.
+ pub fn token_joint_hidden(kind: TokenKind, span: Span) -> TokenTree {
+ TokenTree::Token(Token::new(kind, span), Spacing::JointHidden)
+ }
+
pub fn uninterpolate(&self) -> Cow<'_, TokenTree> {
match self {
TokenTree::Token(token, spacing) => match token.uninterpolate() {
@@ -183,7 +188,7 @@ pub struct AttrTokenStream(pub Lrc<Vec<AttrTokenTree>>);
#[derive(Clone, Debug, Encodable, Decodable)]
pub enum AttrTokenTree {
Token(Token, Spacing),
- Delimited(DelimSpan, Delimiter, AttrTokenStream),
+ Delimited(DelimSpan, DelimSpacing, Delimiter, AttrTokenStream),
/// Stores the attributes for an attribute target,
/// along with the tokens for that attribute target.
/// See `AttributesData` for more information
@@ -208,9 +213,14 @@ impl AttrTokenStream {
AttrTokenTree::Token(inner, spacing) => {
smallvec![TokenTree::Token(inner.clone(), *spacing)].into_iter()
}
- AttrTokenTree::Delimited(span, delim, stream) => {
- smallvec![TokenTree::Delimited(*span, *delim, stream.to_tokenstream()),]
- .into_iter()
+ AttrTokenTree::Delimited(span, spacing, delim, stream) => {
+ smallvec![TokenTree::Delimited(
+ *span,
+ *spacing,
+ *delim,
+ stream.to_tokenstream()
+ ),]
+ .into_iter()
}
AttrTokenTree::Attributes(data) => {
let idx = data
@@ -230,7 +240,7 @@ impl AttrTokenStream {
let mut found = false;
// Check the last two trees (to account for a trailing semi)
for tree in target_tokens.iter_mut().rev().take(2) {
- if let TokenTree::Delimited(span, delim, delim_tokens) = tree {
+ if let TokenTree::Delimited(span, spacing, delim, delim_tokens) = tree {
// Inner attributes are only supported on extern blocks, functions,
// impls, and modules. All of these have their inner attributes
// placed at the beginning of the rightmost outermost braced group:
@@ -250,7 +260,7 @@ impl AttrTokenStream {
stream.push_stream(inner_attr.tokens());
}
stream.push_stream(delim_tokens.clone());
- *tree = TokenTree::Delimited(*span, *delim, stream);
+ *tree = TokenTree::Delimited(*span, *spacing, *delim, stream);
found = true;
break;
}
@@ -303,21 +313,64 @@ pub struct AttributesData {
#[derive(Clone, Debug, Default, Encodable, Decodable)]
pub struct TokenStream(pub(crate) Lrc<Vec<TokenTree>>);
-/// Similar to `proc_macro::Spacing`, but for tokens.
-///
-/// Note that all `ast::TokenTree::Token` instances have a `Spacing`, but when
-/// we convert to `proc_macro::TokenTree` for proc macros only `Punct`
-/// `TokenTree`s have a `proc_macro::Spacing`.
+/// Indicates whether a token can join with the following token to form a
+/// compound token. Used for conversions to `proc_macro::Spacing`. Also used to
+/// guide pretty-printing, which is where the `JointHidden` value (which isn't
+/// part of `proc_macro::Spacing`) comes in useful.
#[derive(Clone, Copy, Debug, PartialEq, Encodable, Decodable, HashStable_Generic)]
pub enum Spacing {
- /// The token is not immediately followed by an operator token (as
- /// determined by `Token::is_op`). E.g. a `+` token is `Alone` in `+ =`,
- /// `+/*foo*/=`, `+ident`, and `+()`.
+ /// The token cannot join with the following token to form a compound
+ /// token.
+ ///
+ /// In token streams parsed from source code, the compiler will use `Alone`
+ /// for any token immediately followed by whitespace, a non-doc comment, or
+ /// EOF.
+ ///
+ /// When constructing token streams within the compiler, use this for each
+ /// token that (a) should be pretty-printed with a space after it, or (b)
+ /// is the last token in the stream. (In the latter case the choice of
+ /// spacing doesn't matter because it is never used for the last token. We
+ /// arbitrarily use `Alone`.)
+ ///
+ /// Converts to `proc_macro::Spacing::Alone`, and
+ /// `proc_macro::Spacing::Alone` converts back to this.
Alone,
- /// The token is immediately followed by an operator token. E.g. a `+`
- /// token is `Joint` in `+=` and `++`.
+ /// The token can join with the following token to form a compound token.
+ ///
+ /// In token streams parsed from source code, the compiler will use `Joint`
+ /// for any token immediately followed by punctuation (as determined by
+ /// `Token::is_punct`).
+ ///
+ /// When constructing token streams within the compiler, use this for each
+ /// token that (a) should be pretty-printed without a space after it, and
+ /// (b) is followed by a punctuation token.
+ ///
+ /// Converts to `proc_macro::Spacing::Joint`, and
+ /// `proc_macro::Spacing::Joint` converts back to this.
Joint,
+
+ /// The token can join with the following token to form a compound token,
+ /// but this will not be visible at the proc macro level. (This is what the
+ /// `Hidden` means; see below.)
+ ///
+ /// In token streams parsed from source code, the compiler will use
+ /// `JointHidden` for any token immediately followed by anything not
+ /// covered by the `Alone` and `Joint` cases: an identifier, lifetime,
+ /// literal, delimiter, doc comment.
+ ///
+ /// When constructing token streams, use this for each token that (a)
+ /// should be pretty-printed without a space after it, and (b) is followed
+ /// by a non-punctuation token.
+ ///
+ /// Converts to `proc_macro::Spacing::Alone`, but
+ /// `proc_macro::Spacing::Alone` converts back to `token::Spacing::Alone`.
+ /// Because of that, pretty-printing of `TokenStream`s produced by proc
+ /// macros is unavoidably uglier (with more whitespace between tokens) than
+ /// pretty-printing of `TokenStream`'s produced by other means (i.e. parsed
+ /// source code, internally constructed token streams, and token streams
+ /// produced by declarative macros).
+ JointHidden,
}
impl TokenStream {
@@ -421,21 +474,14 @@ impl TokenStream {
self
}
- /// Create a token stream containing a single token with alone spacing.
+ /// Create a token stream containing a single token with alone spacing. The
+ /// spacing used for the final token in a constructed stream doesn't matter
+ /// because it's never used. In practice we arbitrarily use
+ /// `Spacing::Alone`.
pub fn token_alone(kind: TokenKind, span: Span) -> TokenStream {
TokenStream::new(vec![TokenTree::token_alone(kind, span)])
}
- /// Create a token stream containing a single token with joint spacing.
- pub fn token_joint(kind: TokenKind, span: Span) -> TokenStream {
- TokenStream::new(vec![TokenTree::token_joint(kind, span)])
- }
-
- /// Create a token stream containing a single `Delimited`.
- pub fn delimited(span: DelimSpan, delim: Delimiter, tts: TokenStream) -> TokenStream {
- TokenStream::new(vec![TokenTree::Delimited(span, delim, tts)])
- }
-
pub fn from_ast(node: &(impl HasAttrs + HasSpan + HasTokens + fmt::Debug)) -> TokenStream {
let Some(tokens) = node.tokens() else {
panic!("missing tokens for node at {:?}: {:?}", node.span(), node);
@@ -477,13 +523,14 @@ impl TokenStream {
fn flatten_token(token: &Token, spacing: Spacing) -> TokenTree {
match &token.kind {
- token::Interpolated(nt) if let token::NtIdent(ident, is_raw) = **nt => {
+ token::Interpolated(nt) if let token::NtIdent(ident, is_raw) = nt.0 => {
TokenTree::Token(Token::new(token::Ident(ident.name, is_raw), ident.span), spacing)
}
token::Interpolated(nt) => TokenTree::Delimited(
DelimSpan::from_single(token.span),
+ DelimSpacing::new(Spacing::JointHidden, spacing),
Delimiter::Invisible,
- TokenStream::from_nonterminal_ast(nt).flattened(),
+ TokenStream::from_nonterminal_ast(&nt.0).flattened(),
),
_ => TokenTree::Token(token.clone(), spacing),
}
@@ -492,8 +539,8 @@ impl TokenStream {
fn flatten_token_tree(tree: &TokenTree) -> TokenTree {
match tree {
TokenTree::Token(token, spacing) => TokenStream::flatten_token(token, *spacing),
- TokenTree::Delimited(span, delim, tts) => {
- TokenTree::Delimited(*span, *delim, tts.flattened())
+ TokenTree::Delimited(span, spacing, delim, tts) => {
+ TokenTree::Delimited(*span, *spacing, *delim, tts.flattened())
}
}
}
@@ -503,7 +550,7 @@ impl TokenStream {
fn can_skip(stream: &TokenStream) -> bool {
stream.trees().all(|tree| match tree {
TokenTree::Token(token, _) => !matches!(token.kind, token::Interpolated(_)),
- TokenTree::Delimited(_, _, inner) => can_skip(inner),
+ TokenTree::Delimited(.., inner) => can_skip(inner),
})
}
@@ -517,7 +564,7 @@ impl TokenStream {
// If `vec` is not empty, try to glue `tt` onto its last token. The return
// value indicates if gluing took place.
fn try_glue_to_last(vec: &mut Vec<TokenTree>, tt: &TokenTree) -> bool {
- if let Some(TokenTree::Token(last_tok, Spacing::Joint)) = vec.last()
+ if let Some(TokenTree::Token(last_tok, Spacing::Joint | Spacing::JointHidden)) = vec.last()
&& let TokenTree::Token(tok, spacing) = tt
&& let Some(glued_tok) = last_tok.glue(tok)
{
@@ -592,9 +639,10 @@ impl TokenStream {
&TokenTree::Token(..) => i += 1,
- &TokenTree::Delimited(sp, delim, ref delim_stream) => {
+ &TokenTree::Delimited(sp, spacing, delim, ref delim_stream) => {
if let Some(desugared_delim_stream) = desugar_inner(delim_stream.clone()) {
- let new_tt = TokenTree::Delimited(sp, delim, desugared_delim_stream);
+ let new_tt =
+ TokenTree::Delimited(sp, spacing, delim, desugared_delim_stream);
Lrc::make_mut(&mut stream.0)[i] = new_tt;
modified = true;
}
@@ -622,10 +670,11 @@ impl TokenStream {
num_of_hashes = cmp::max(num_of_hashes, count);
}
- // `/// foo` becomes `doc = r"foo"`.
+ // `/// foo` becomes `[doc = r"foo"]`.
let delim_span = DelimSpan::from_single(span);
let body = TokenTree::Delimited(
delim_span,
+ DelimSpacing::new(Spacing::JointHidden, Spacing::Alone),
Delimiter::Bracket,
[
TokenTree::token_alone(token::Ident(sym::doc, false), span),
@@ -641,7 +690,7 @@ impl TokenStream {
if attr_style == AttrStyle::Inner {
vec![
- TokenTree::token_alone(token::Pound, span),
+ TokenTree::token_joint(token::Pound, span),
TokenTree::token_alone(token::Not, span),
body,
]
@@ -738,6 +787,18 @@ impl DelimSpan {
}
}
+#[derive(Copy, Clone, Debug, PartialEq, Encodable, Decodable, HashStable_Generic)]
+pub struct DelimSpacing {
+ pub open: Spacing,
+ pub close: Spacing,
+}
+
+impl DelimSpacing {
+ pub fn new(open: Spacing, close: Spacing) -> DelimSpacing {
+ DelimSpacing { open, close }
+ }
+}
+
// Some types are used a lot. Make sure they don't unintentionally get bigger.
#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
mod size_asserts {
diff --git a/compiler/rustc_ast/src/util/classify.rs b/compiler/rustc_ast/src/util/classify.rs
index 821fca665..4dece0797 100644
--- a/compiler/rustc_ast/src/util/classify.rs
+++ b/compiler/rustc_ast/src/util/classify.rs
@@ -40,15 +40,44 @@ pub fn expr_trailing_brace(mut expr: &ast::Expr) -> Option<&ast::Expr> {
| Range(_, Some(e), _)
| Ret(Some(e))
| Unary(_, e)
- | Yield(Some(e)) => {
+ | Yield(Some(e))
+ | Yeet(Some(e))
+ | Become(e) => {
expr = e;
}
Closure(closure) => {
expr = &closure.body;
}
Gen(..) | Block(..) | ForLoop(..) | If(..) | Loop(..) | Match(..) | Struct(..)
- | TryBlock(..) | While(..) => break Some(expr),
- _ => break None,
+ | TryBlock(..) | While(..) | ConstBlock(_) => break Some(expr),
+
+ // FIXME: These can end in `}`, but changing these would break stable code.
+ InlineAsm(_) | OffsetOf(_, _) | MacCall(_) | IncludedBytes(_) | FormatArgs(_) => {
+ break None;
+ }
+
+ Break(_, None)
+ | Range(_, None, _)
+ | Ret(None)
+ | Yield(None)
+ | Array(_)
+ | Call(_, _)
+ | MethodCall(_)
+ | Tup(_)
+ | Lit(_)
+ | Cast(_, _)
+ | Type(_, _)
+ | Await(_, _)
+ | Field(_, _)
+ | Index(_, _, _)
+ | Underscore
+ | Path(_, _)
+ | Continue(_)
+ | Repeat(_, _)
+ | Paren(_)
+ | Try(_)
+ | Yeet(None)
+ | Err => break None,
}
}
}
diff --git a/compiler/rustc_ast/src/util/literal.rs b/compiler/rustc_ast/src/util/literal.rs
index 50eb92125..92b9adf1d 100644
--- a/compiler/rustc_ast/src/util/literal.rs
+++ b/compiler/rustc_ast/src/util/literal.rs
@@ -77,6 +77,8 @@ impl LitKind {
// new symbol because the string in the LitKind is different to the
// string in the token.
let s = symbol.as_str();
+ // Vanilla strings are so common we optimize for the common case where no chars
+ // requiring special behaviour are present.
let symbol = if s.contains(['\\', '\r']) {
let mut buf = String::with_capacity(s.len());
let mut error = Ok(());
@@ -104,27 +106,20 @@ impl LitKind {
LitKind::Str(symbol, ast::StrStyle::Cooked)
}
token::StrRaw(n) => {
- // Ditto.
- let s = symbol.as_str();
- let symbol =
- if s.contains('\r') {
- let mut buf = String::with_capacity(s.len());
- let mut error = Ok(());
- unescape_literal(s, Mode::RawStr, &mut |_, unescaped_char| {
- match unescaped_char {
- Ok(c) => buf.push(c),
- Err(err) => {
- if err.is_fatal() {
- error = Err(LitError::LexerError);
- }
- }
+ // Raw strings have no escapes, so we only need to check for invalid chars, and we
+ // can reuse the symbol on success.
+ let mut error = Ok(());
+ unescape_literal(symbol.as_str(), Mode::RawStr, &mut |_, unescaped_char| {
+ match unescaped_char {
+ Ok(_) => {}
+ Err(err) => {
+ if err.is_fatal() {
+ error = Err(LitError::LexerError);
}
- });
- error?;
- Symbol::intern(&buf)
- } else {
- symbol
- };
+ }
+ }
+ });
+ error?;
LitKind::Str(symbol, ast::StrStyle::Raw(n))
}
token::ByteStr => {
@@ -143,25 +138,19 @@ impl LitKind {
LitKind::ByteStr(buf.into(), StrStyle::Cooked)
}
token::ByteStrRaw(n) => {
+ // Raw strings have no escapes, so we only need to check for invalid chars, and we
+ // can convert the symbol directly to a `Lrc<u8>` on success.
let s = symbol.as_str();
- let bytes = if s.contains('\r') {
- let mut buf = Vec::with_capacity(s.len());
- let mut error = Ok(());
- unescape_literal(s, Mode::RawByteStr, &mut |_, c| match c {
- Ok(c) => buf.push(byte_from_char(c)),
- Err(err) => {
- if err.is_fatal() {
- error = Err(LitError::LexerError);
- }
+ let mut error = Ok(());
+ unescape_literal(s, Mode::RawByteStr, &mut |_, c| match c {
+ Ok(_) => {}
+ Err(err) => {
+ if err.is_fatal() {
+ error = Err(LitError::LexerError);
}
- });
- error?;
- buf
- } else {
- symbol.to_string().into_bytes()
- };
-
- LitKind::ByteStr(bytes.into(), StrStyle::Raw(n))
+ }
+ });
+ LitKind::ByteStr(s.to_owned().into_bytes().into(), StrStyle::Raw(n))
}
token::CStr => {
let s = symbol.as_str();
@@ -172,7 +161,6 @@ impl LitKind {
error = Err(LitError::NulInCStr(span));
}
Ok(CStrUnit::Byte(b)) => buf.push(b),
- Ok(CStrUnit::Char(c)) if c.len_utf8() == 1 => buf.push(c as u8),
Ok(CStrUnit::Char(c)) => {
buf.extend_from_slice(c.encode_utf8(&mut [0; 4]).as_bytes())
}
@@ -187,18 +175,15 @@ impl LitKind {
LitKind::CStr(buf.into(), StrStyle::Cooked)
}
token::CStrRaw(n) => {
+ // Raw strings have no escapes, so we only need to check for invalid chars, and we
+ // can convert the symbol directly to a `Lrc<u8>` on success.
let s = symbol.as_str();
- let mut buf = Vec::with_capacity(s.len());
let mut error = Ok(());
unescape_c_string(s, Mode::RawCStr, &mut |span, c| match c {
Ok(CStrUnit::Byte(0) | CStrUnit::Char('\0')) => {
error = Err(LitError::NulInCStr(span));
}
- Ok(CStrUnit::Byte(b)) => buf.push(b),
- Ok(CStrUnit::Char(c)) if c.len_utf8() == 1 => buf.push(c as u8),
- Ok(CStrUnit::Char(c)) => {
- buf.extend_from_slice(c.encode_utf8(&mut [0; 4]).as_bytes())
- }
+ Ok(_) => {}
Err(err) => {
if err.is_fatal() {
error = Err(LitError::LexerError);
@@ -206,6 +191,7 @@ impl LitKind {
}
});
error?;
+ let mut buf = s.to_owned().into_bytes();
buf.push(0);
LitKind::CStr(buf.into(), StrStyle::Raw(n))
}
diff --git a/compiler/rustc_ast/src/visit.rs b/compiler/rustc_ast/src/visit.rs
index 1caa39e2d..27f1b84f3 100644
--- a/compiler/rustc_ast/src/visit.rs
+++ b/compiler/rustc_ast/src/visit.rs
@@ -13,7 +13,7 @@
//! instance, a walker looking for item names in a module will miss all of
//! those that are created by the expansion of a macro.
-use crate::{ast::*, StaticItem};
+use crate::ast::*;
use rustc_span::symbol::Ident;
use rustc_span::Span;
@@ -559,7 +559,7 @@ pub fn walk_pat<'a, V: Visitor<'a>>(visitor: &mut V, pattern: &'a Pat) {
walk_list!(visitor, visit_expr, lower_bound);
walk_list!(visitor, visit_expr, upper_bound);
}
- PatKind::Wild | PatKind::Rest => {}
+ PatKind::Wild | PatKind::Rest | PatKind::Never => {}
PatKind::Tuple(elems) | PatKind::Slice(elems) | PatKind::Or(elems) => {
walk_list!(visitor, visit_pat, elems);
}
@@ -861,7 +861,7 @@ pub fn walk_expr<'a, V: Visitor<'a>>(visitor: &mut V, expression: &'a Expr) {
ExprKind::Closure(box Closure {
binder,
capture_clause,
- asyncness: _,
+ coroutine_kind: _,
constness: _,
movability: _,
fn_decl,
@@ -951,7 +951,7 @@ pub fn walk_param<'a, V: Visitor<'a>>(visitor: &mut V, param: &'a Param) {
pub fn walk_arm<'a, V: Visitor<'a>>(visitor: &mut V, arm: &'a Arm) {
visitor.visit_pat(&arm.pat);
walk_list!(visitor, visit_expr, &arm.guard);
- visitor.visit_expr(&arm.body);
+ walk_list!(visitor, visit_expr, &arm.body);
walk_list!(visitor, visit_attribute, &arm.attrs);
}