summaryrefslogtreecommitdiffstats
path: root/vendor/rowan/src
diff options
context:
space:
mode:
Diffstat (limited to 'vendor/rowan/src')
-rw-r--r--vendor/rowan/src/api.rs487
-rw-r--r--vendor/rowan/src/arc.rs463
-rw-r--r--vendor/rowan/src/ast.rs206
-rw-r--r--vendor/rowan/src/cow_mut.rs30
-rw-r--r--vendor/rowan/src/cursor.rs1284
-rw-r--r--vendor/rowan/src/green.rs42
-rw-r--r--vendor/rowan/src/green/builder.rs119
-rw-r--r--vendor/rowan/src/green/element.rs89
-rw-r--r--vendor/rowan/src/green/node.rs361
-rw-r--r--vendor/rowan/src/green/node_cache.rs157
-rw-r--r--vendor/rowan/src/green/token.rs145
-rw-r--r--vendor/rowan/src/lib.rs41
-rw-r--r--vendor/rowan/src/serde_impls.rs66
-rw-r--r--vendor/rowan/src/sll.rs129
-rw-r--r--vendor/rowan/src/syntax_text.rs311
-rw-r--r--vendor/rowan/src/utility_types.rs180
16 files changed, 4110 insertions, 0 deletions
diff --git a/vendor/rowan/src/api.rs b/vendor/rowan/src/api.rs
new file mode 100644
index 000000000..605fcc50c
--- /dev/null
+++ b/vendor/rowan/src/api.rs
@@ -0,0 +1,487 @@
+use std::{borrow::Cow, fmt, iter, marker::PhantomData, ops::Range};
+
+use crate::{
+ cursor, green::GreenTokenData, Direction, GreenNode, GreenNodeData, GreenToken, NodeOrToken,
+ SyntaxKind, SyntaxText, TextRange, TextSize, TokenAtOffset, WalkEvent,
+};
+
+pub trait Language: Sized + Copy + fmt::Debug + Eq + Ord + std::hash::Hash {
+ type Kind: Sized + Copy + fmt::Debug + Eq + Ord + std::hash::Hash;
+
+ fn kind_from_raw(raw: SyntaxKind) -> Self::Kind;
+ fn kind_to_raw(kind: Self::Kind) -> SyntaxKind;
+}
+
+#[derive(Clone, PartialEq, Eq, Hash)]
+pub struct SyntaxNode<L: Language> {
+ raw: cursor::SyntaxNode,
+ _p: PhantomData<L>,
+}
+
+#[derive(Clone, PartialEq, Eq, Hash)]
+pub struct SyntaxToken<L: Language> {
+ raw: cursor::SyntaxToken,
+ _p: PhantomData<L>,
+}
+
+pub type SyntaxElement<L> = NodeOrToken<SyntaxNode<L>, SyntaxToken<L>>;
+
+impl<L: Language> fmt::Debug for SyntaxNode<L> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ if f.alternate() {
+ let mut level = 0;
+ for event in self.preorder_with_tokens() {
+ match event {
+ WalkEvent::Enter(element) => {
+ for _ in 0..level {
+ write!(f, " ")?;
+ }
+ match element {
+ NodeOrToken::Node(node) => writeln!(f, "{:?}", node)?,
+ NodeOrToken::Token(token) => writeln!(f, "{:?}", token)?,
+ }
+ level += 1;
+ }
+ WalkEvent::Leave(_) => level -= 1,
+ }
+ }
+ assert_eq!(level, 0);
+ Ok(())
+ } else {
+ write!(f, "{:?}@{:?}", self.kind(), self.text_range())
+ }
+ }
+}
+
+impl<L: Language> fmt::Display for SyntaxNode<L> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt::Display::fmt(&self.raw, f)
+ }
+}
+
+impl<L: Language> fmt::Debug for SyntaxToken<L> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(f, "{:?}@{:?}", self.kind(), self.text_range())?;
+ if self.text().len() < 25 {
+ return write!(f, " {:?}", self.text());
+ }
+ let text = self.text();
+ for idx in 21..25 {
+ if text.is_char_boundary(idx) {
+ let text = format!("{} ...", &text[..idx]);
+ return write!(f, " {:?}", text);
+ }
+ }
+ unreachable!()
+ }
+}
+
+impl<L: Language> fmt::Display for SyntaxToken<L> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt::Display::fmt(&self.raw, f)
+ }
+}
+
+impl<L: Language> From<SyntaxNode<L>> for SyntaxElement<L> {
+ fn from(node: SyntaxNode<L>) -> SyntaxElement<L> {
+ NodeOrToken::Node(node)
+ }
+}
+
+impl<L: Language> From<SyntaxToken<L>> for SyntaxElement<L> {
+ fn from(token: SyntaxToken<L>) -> SyntaxElement<L> {
+ NodeOrToken::Token(token)
+ }
+}
+
+impl<L: Language> SyntaxNode<L> {
+ pub fn new_root(green: GreenNode) -> SyntaxNode<L> {
+ SyntaxNode::from(cursor::SyntaxNode::new_root(green))
+ }
+ /// Returns a green tree, equal to the green tree this node
+ /// belongs two, except with this node substitute. The complexity
+ /// of operation is proportional to the depth of the tree
+ pub fn replace_with(&self, replacement: GreenNode) -> GreenNode {
+ self.raw.replace_with(replacement)
+ }
+
+ pub fn kind(&self) -> L::Kind {
+ L::kind_from_raw(self.raw.kind())
+ }
+
+ pub fn text_range(&self) -> TextRange {
+ self.raw.text_range()
+ }
+
+ pub fn index(&self) -> usize {
+ self.raw.index()
+ }
+
+ pub fn text(&self) -> SyntaxText {
+ self.raw.text()
+ }
+
+ pub fn green(&self) -> Cow<'_, GreenNodeData> {
+ self.raw.green()
+ }
+
+ pub fn parent(&self) -> Option<SyntaxNode<L>> {
+ self.raw.parent().map(Self::from)
+ }
+
+ pub fn ancestors(&self) -> impl Iterator<Item = SyntaxNode<L>> {
+ self.raw.ancestors().map(SyntaxNode::from)
+ }
+
+ pub fn children(&self) -> SyntaxNodeChildren<L> {
+ SyntaxNodeChildren { raw: self.raw.children(), _p: PhantomData }
+ }
+
+ pub fn children_with_tokens(&self) -> SyntaxElementChildren<L> {
+ SyntaxElementChildren { raw: self.raw.children_with_tokens(), _p: PhantomData }
+ }
+
+ pub fn first_child(&self) -> Option<SyntaxNode<L>> {
+ self.raw.first_child().map(Self::from)
+ }
+ pub fn last_child(&self) -> Option<SyntaxNode<L>> {
+ self.raw.last_child().map(Self::from)
+ }
+
+ pub fn first_child_or_token(&self) -> Option<SyntaxElement<L>> {
+ self.raw.first_child_or_token().map(NodeOrToken::from)
+ }
+ pub fn last_child_or_token(&self) -> Option<SyntaxElement<L>> {
+ self.raw.last_child_or_token().map(NodeOrToken::from)
+ }
+
+ pub fn next_sibling(&self) -> Option<SyntaxNode<L>> {
+ self.raw.next_sibling().map(Self::from)
+ }
+ pub fn prev_sibling(&self) -> Option<SyntaxNode<L>> {
+ self.raw.prev_sibling().map(Self::from)
+ }
+
+ pub fn next_sibling_or_token(&self) -> Option<SyntaxElement<L>> {
+ self.raw.next_sibling_or_token().map(NodeOrToken::from)
+ }
+ pub fn prev_sibling_or_token(&self) -> Option<SyntaxElement<L>> {
+ self.raw.prev_sibling_or_token().map(NodeOrToken::from)
+ }
+
+ /// Return the leftmost token in the subtree of this node.
+ pub fn first_token(&self) -> Option<SyntaxToken<L>> {
+ self.raw.first_token().map(SyntaxToken::from)
+ }
+ /// Return the rightmost token in the subtree of this node.
+ pub fn last_token(&self) -> Option<SyntaxToken<L>> {
+ self.raw.last_token().map(SyntaxToken::from)
+ }
+
+ pub fn siblings(&self, direction: Direction) -> impl Iterator<Item = SyntaxNode<L>> {
+ self.raw.siblings(direction).map(SyntaxNode::from)
+ }
+
+ pub fn siblings_with_tokens(
+ &self,
+ direction: Direction,
+ ) -> impl Iterator<Item = SyntaxElement<L>> {
+ self.raw.siblings_with_tokens(direction).map(SyntaxElement::from)
+ }
+
+ pub fn descendants(&self) -> impl Iterator<Item = SyntaxNode<L>> {
+ self.raw.descendants().map(SyntaxNode::from)
+ }
+
+ pub fn descendants_with_tokens(&self) -> impl Iterator<Item = SyntaxElement<L>> {
+ self.raw.descendants_with_tokens().map(NodeOrToken::from)
+ }
+
+ /// Traverse the subtree rooted at the current node (including the current
+ /// node) in preorder, excluding tokens.
+ pub fn preorder(&self) -> Preorder<L> {
+ Preorder { raw: self.raw.preorder(), _p: PhantomData }
+ }
+
+ /// Traverse the subtree rooted at the current node (including the current
+ /// node) in preorder, including tokens.
+ pub fn preorder_with_tokens(&self) -> PreorderWithTokens<L> {
+ PreorderWithTokens { raw: self.raw.preorder_with_tokens(), _p: PhantomData }
+ }
+
+ /// Find a token in the subtree corresponding to this node, which covers the offset.
+ /// Precondition: offset must be withing node's range.
+ pub fn token_at_offset(&self, offset: TextSize) -> TokenAtOffset<SyntaxToken<L>> {
+ self.raw.token_at_offset(offset).map(SyntaxToken::from)
+ }
+
+ /// Return the deepest node or token in the current subtree that fully
+ /// contains the range. If the range is empty and is contained in two leaf
+ /// nodes, either one can be returned. Precondition: range must be contained
+ /// withing the current node
+ pub fn covering_element(&self, range: TextRange) -> SyntaxElement<L> {
+ NodeOrToken::from(self.raw.covering_element(range))
+ }
+
+ /// Finds a [`SyntaxElement`] which intersects with a given `range`. If
+ /// there are several intersecting elements, any one can be returned.
+ ///
+ /// The method uses binary search internally, so it's complexity is
+ /// `O(log(N))` where `N = self.children_with_tokens().count()`.
+ pub fn child_or_token_at_range(&self, range: TextRange) -> Option<SyntaxElement<L>> {
+ self.raw.child_or_token_at_range(range).map(SyntaxElement::from)
+ }
+
+ /// Returns an independent copy of the subtree rooted at this node.
+ ///
+ /// The parent of the returned node will be `None`, the start offset will be
+ /// zero, but, otherwise, it'll be equivalent to the source node.
+ pub fn clone_subtree(&self) -> SyntaxNode<L> {
+ SyntaxNode::from(self.raw.clone_subtree())
+ }
+
+ pub fn clone_for_update(&self) -> SyntaxNode<L> {
+ SyntaxNode::from(self.raw.clone_for_update())
+ }
+
+ pub fn detach(&self) {
+ self.raw.detach()
+ }
+
+ pub fn splice_children(&self, to_delete: Range<usize>, to_insert: Vec<SyntaxElement<L>>) {
+ let to_insert = to_insert.into_iter().map(cursor::SyntaxElement::from).collect::<Vec<_>>();
+ self.raw.splice_children(to_delete, to_insert)
+ }
+}
+
+impl<L: Language> SyntaxToken<L> {
+ /// Returns a green tree, equal to the green tree this token
+ /// belongs two, except with this token substitute. The complexity
+ /// of operation is proportional to the depth of the tree
+ pub fn replace_with(&self, new_token: GreenToken) -> GreenNode {
+ self.raw.replace_with(new_token)
+ }
+
+ pub fn kind(&self) -> L::Kind {
+ L::kind_from_raw(self.raw.kind())
+ }
+
+ pub fn text_range(&self) -> TextRange {
+ self.raw.text_range()
+ }
+
+ pub fn index(&self) -> usize {
+ self.raw.index()
+ }
+
+ pub fn text(&self) -> &str {
+ self.raw.text()
+ }
+
+ pub fn green(&self) -> &GreenTokenData {
+ self.raw.green()
+ }
+
+ pub fn parent(&self) -> Option<SyntaxNode<L>> {
+ self.raw.parent().map(SyntaxNode::from)
+ }
+
+ /// Iterator over all the ancestors of this token excluding itself.
+ #[deprecated = "use `SyntaxToken::parent_ancestors` instead"]
+ pub fn ancestors(&self) -> impl Iterator<Item = SyntaxNode<L>> {
+ self.parent_ancestors()
+ }
+
+ /// Iterator over all the ancestors of this token excluding itself.
+ pub fn parent_ancestors(&self) -> impl Iterator<Item = SyntaxNode<L>> {
+ self.raw.ancestors().map(SyntaxNode::from)
+ }
+
+ pub fn next_sibling_or_token(&self) -> Option<SyntaxElement<L>> {
+ self.raw.next_sibling_or_token().map(NodeOrToken::from)
+ }
+ pub fn prev_sibling_or_token(&self) -> Option<SyntaxElement<L>> {
+ self.raw.prev_sibling_or_token().map(NodeOrToken::from)
+ }
+
+ pub fn siblings_with_tokens(
+ &self,
+ direction: Direction,
+ ) -> impl Iterator<Item = SyntaxElement<L>> {
+ self.raw.siblings_with_tokens(direction).map(SyntaxElement::from)
+ }
+
+ /// Next token in the tree (i.e, not necessary a sibling).
+ pub fn next_token(&self) -> Option<SyntaxToken<L>> {
+ self.raw.next_token().map(SyntaxToken::from)
+ }
+ /// Previous token in the tree (i.e, not necessary a sibling).
+ pub fn prev_token(&self) -> Option<SyntaxToken<L>> {
+ self.raw.prev_token().map(SyntaxToken::from)
+ }
+
+ pub fn detach(&self) {
+ self.raw.detach()
+ }
+}
+
+impl<L: Language> SyntaxElement<L> {
+ pub fn text_range(&self) -> TextRange {
+ match self {
+ NodeOrToken::Node(it) => it.text_range(),
+ NodeOrToken::Token(it) => it.text_range(),
+ }
+ }
+
+ pub fn index(&self) -> usize {
+ match self {
+ NodeOrToken::Node(it) => it.index(),
+ NodeOrToken::Token(it) => it.index(),
+ }
+ }
+
+ pub fn kind(&self) -> L::Kind {
+ match self {
+ NodeOrToken::Node(it) => it.kind(),
+ NodeOrToken::Token(it) => it.kind(),
+ }
+ }
+
+ pub fn parent(&self) -> Option<SyntaxNode<L>> {
+ match self {
+ NodeOrToken::Node(it) => it.parent(),
+ NodeOrToken::Token(it) => it.parent(),
+ }
+ }
+
+ pub fn ancestors(&self) -> impl Iterator<Item = SyntaxNode<L>> {
+ let first = match self {
+ NodeOrToken::Node(it) => Some(it.clone()),
+ NodeOrToken::Token(it) => it.parent(),
+ };
+ iter::successors(first, SyntaxNode::parent)
+ }
+
+ pub fn next_sibling_or_token(&self) -> Option<SyntaxElement<L>> {
+ match self {
+ NodeOrToken::Node(it) => it.next_sibling_or_token(),
+ NodeOrToken::Token(it) => it.next_sibling_or_token(),
+ }
+ }
+ pub fn prev_sibling_or_token(&self) -> Option<SyntaxElement<L>> {
+ match self {
+ NodeOrToken::Node(it) => it.prev_sibling_or_token(),
+ NodeOrToken::Token(it) => it.prev_sibling_or_token(),
+ }
+ }
+ pub fn detach(&self) {
+ match self {
+ NodeOrToken::Node(it) => it.detach(),
+ NodeOrToken::Token(it) => it.detach(),
+ }
+ }
+}
+
+#[derive(Debug, Clone)]
+pub struct SyntaxNodeChildren<L: Language> {
+ raw: cursor::SyntaxNodeChildren,
+ _p: PhantomData<L>,
+}
+
+impl<L: Language> Iterator for SyntaxNodeChildren<L> {
+ type Item = SyntaxNode<L>;
+ fn next(&mut self) -> Option<Self::Item> {
+ self.raw.next().map(SyntaxNode::from)
+ }
+}
+
+#[derive(Debug, Clone)]
+pub struct SyntaxElementChildren<L: Language> {
+ raw: cursor::SyntaxElementChildren,
+ _p: PhantomData<L>,
+}
+
+impl<L: Language> Iterator for SyntaxElementChildren<L> {
+ type Item = SyntaxElement<L>;
+ fn next(&mut self) -> Option<Self::Item> {
+ self.raw.next().map(NodeOrToken::from)
+ }
+}
+
+pub struct Preorder<L: Language> {
+ raw: cursor::Preorder,
+ _p: PhantomData<L>,
+}
+
+impl<L: Language> Preorder<L> {
+ pub fn skip_subtree(&mut self) {
+ self.raw.skip_subtree()
+ }
+}
+
+impl<L: Language> Iterator for Preorder<L> {
+ type Item = WalkEvent<SyntaxNode<L>>;
+ fn next(&mut self) -> Option<Self::Item> {
+ self.raw.next().map(|it| it.map(SyntaxNode::from))
+ }
+}
+
+pub struct PreorderWithTokens<L: Language> {
+ raw: cursor::PreorderWithTokens,
+ _p: PhantomData<L>,
+}
+
+impl<L: Language> PreorderWithTokens<L> {
+ pub fn skip_subtree(&mut self) {
+ self.raw.skip_subtree()
+ }
+}
+
+impl<L: Language> Iterator for PreorderWithTokens<L> {
+ type Item = WalkEvent<SyntaxElement<L>>;
+ fn next(&mut self) -> Option<Self::Item> {
+ self.raw.next().map(|it| it.map(SyntaxElement::from))
+ }
+}
+
+impl<L: Language> From<cursor::SyntaxNode> for SyntaxNode<L> {
+ fn from(raw: cursor::SyntaxNode) -> SyntaxNode<L> {
+ SyntaxNode { raw, _p: PhantomData }
+ }
+}
+
+impl<L: Language> From<SyntaxNode<L>> for cursor::SyntaxNode {
+ fn from(node: SyntaxNode<L>) -> cursor::SyntaxNode {
+ node.raw
+ }
+}
+
+impl<L: Language> From<cursor::SyntaxToken> for SyntaxToken<L> {
+ fn from(raw: cursor::SyntaxToken) -> SyntaxToken<L> {
+ SyntaxToken { raw, _p: PhantomData }
+ }
+}
+
+impl<L: Language> From<SyntaxToken<L>> for cursor::SyntaxToken {
+ fn from(token: SyntaxToken<L>) -> cursor::SyntaxToken {
+ token.raw
+ }
+}
+
+impl<L: Language> From<cursor::SyntaxElement> for SyntaxElement<L> {
+ fn from(raw: cursor::SyntaxElement) -> SyntaxElement<L> {
+ match raw {
+ NodeOrToken::Node(it) => NodeOrToken::Node(it.into()),
+ NodeOrToken::Token(it) => NodeOrToken::Token(it.into()),
+ }
+ }
+}
+
+impl<L: Language> From<SyntaxElement<L>> for cursor::SyntaxElement {
+ fn from(element: SyntaxElement<L>) -> cursor::SyntaxElement {
+ match element {
+ NodeOrToken::Node(it) => NodeOrToken::Node(it.into()),
+ NodeOrToken::Token(it) => NodeOrToken::Token(it.into()),
+ }
+ }
+}
diff --git a/vendor/rowan/src/arc.rs b/vendor/rowan/src/arc.rs
new file mode 100644
index 000000000..348831290
--- /dev/null
+++ b/vendor/rowan/src/arc.rs
@@ -0,0 +1,463 @@
+//! Vendored and stripped down version of triomphe
+use std::{
+ alloc::{self, Layout},
+ cmp::Ordering,
+ hash::{Hash, Hasher},
+ marker::PhantomData,
+ mem::{self, ManuallyDrop},
+ ops::Deref,
+ ptr,
+ sync::atomic::{
+ self,
+ Ordering::{Acquire, Relaxed, Release},
+ },
+};
+
+use memoffset::offset_of;
+
+/// A soft limit on the amount of references that may be made to an `Arc`.
+///
+/// Going above this limit will abort your program (although not
+/// necessarily) at _exactly_ `MAX_REFCOUNT + 1` references.
+const MAX_REFCOUNT: usize = (isize::MAX) as usize;
+
+/// The object allocated by an Arc<T>
+#[repr(C)]
+pub(crate) struct ArcInner<T: ?Sized> {
+ pub(crate) count: atomic::AtomicUsize,
+ pub(crate) data: T,
+}
+
+unsafe impl<T: ?Sized + Sync + Send> Send for ArcInner<T> {}
+unsafe impl<T: ?Sized + Sync + Send> Sync for ArcInner<T> {}
+
+/// An atomically reference counted shared pointer
+///
+/// See the documentation for [`Arc`] in the standard library. Unlike the
+/// standard library `Arc`, this `Arc` does not support weak reference counting.
+///
+/// [`Arc`]: https://doc.rust-lang.org/stable/std/sync/struct.Arc.html
+#[repr(transparent)]
+pub(crate) struct Arc<T: ?Sized> {
+ pub(crate) p: ptr::NonNull<ArcInner<T>>,
+ pub(crate) phantom: PhantomData<T>,
+}
+
+unsafe impl<T: ?Sized + Sync + Send> Send for Arc<T> {}
+unsafe impl<T: ?Sized + Sync + Send> Sync for Arc<T> {}
+
+impl<T> Arc<T> {
+ /// Reconstruct the Arc<T> from a raw pointer obtained from into_raw()
+ ///
+ /// Note: This raw pointer will be offset in the allocation and must be preceded
+ /// by the atomic count.
+ ///
+ /// It is recommended to use OffsetArc for this
+ #[inline]
+ pub(crate) unsafe fn from_raw(ptr: *const T) -> Self {
+ // To find the corresponding pointer to the `ArcInner` we need
+ // to subtract the offset of the `data` field from the pointer.
+ let ptr = (ptr as *const u8).sub(offset_of!(ArcInner<T>, data));
+ Arc { p: ptr::NonNull::new_unchecked(ptr as *mut ArcInner<T>), phantom: PhantomData }
+ }
+}
+
+impl<T: ?Sized> Arc<T> {
+ #[inline]
+ fn inner(&self) -> &ArcInner<T> {
+ // This unsafety is ok because while this arc is alive we're guaranteed
+ // that the inner pointer is valid. Furthermore, we know that the
+ // `ArcInner` structure itself is `Sync` because the inner data is
+ // `Sync` as well, so we're ok loaning out an immutable pointer to these
+ // contents.
+ unsafe { &*self.ptr() }
+ }
+
+ // Non-inlined part of `drop`. Just invokes the destructor.
+ #[inline(never)]
+ unsafe fn drop_slow(&mut self) {
+ let _ = Box::from_raw(self.ptr());
+ }
+
+ /// Test pointer equality between the two Arcs, i.e. they must be the _same_
+ /// allocation
+ #[inline]
+ pub(crate) fn ptr_eq(this: &Self, other: &Self) -> bool {
+ this.ptr() == other.ptr()
+ }
+
+ pub(crate) fn ptr(&self) -> *mut ArcInner<T> {
+ self.p.as_ptr()
+ }
+}
+
+impl<T: ?Sized> Clone for Arc<T> {
+ #[inline]
+ fn clone(&self) -> Self {
+ // Using a relaxed ordering is alright here, as knowledge of the
+ // original reference prevents other threads from erroneously deleting
+ // the object.
+ //
+ // As explained in the [Boost documentation][1], Increasing the
+ // reference counter can always be done with memory_order_relaxed: New
+ // references to an object can only be formed from an existing
+ // reference, and passing an existing reference from one thread to
+ // another must already provide any required synchronization.
+ //
+ // [1]: (www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html)
+ let old_size = self.inner().count.fetch_add(1, Relaxed);
+
+ // However we need to guard against massive refcounts in case someone
+ // is `mem::forget`ing Arcs. If we don't do this the count can overflow
+ // and users will use-after free. We racily saturate to `isize::MAX` on
+ // the assumption that there aren't ~2 billion threads incrementing
+ // the reference count at once. This branch will never be taken in
+ // any realistic program.
+ //
+ // We abort because such a program is incredibly degenerate, and we
+ // don't care to support it.
+ if old_size > MAX_REFCOUNT {
+ std::process::abort();
+ }
+
+ unsafe { Arc { p: ptr::NonNull::new_unchecked(self.ptr()), phantom: PhantomData } }
+ }
+}
+
+impl<T: ?Sized> Deref for Arc<T> {
+ type Target = T;
+
+ #[inline]
+ fn deref(&self) -> &T {
+ &self.inner().data
+ }
+}
+
+impl<T: ?Sized> Arc<T> {
+ /// Provides mutable access to the contents _if_ the `Arc` is uniquely owned.
+ #[inline]
+ pub(crate) fn get_mut(this: &mut Self) -> Option<&mut T> {
+ if this.is_unique() {
+ unsafe {
+ // See make_mut() for documentation of the threadsafety here.
+ Some(&mut (*this.ptr()).data)
+ }
+ } else {
+ None
+ }
+ }
+
+ /// Whether or not the `Arc` is uniquely owned (is the refcount 1?).
+ pub(crate) fn is_unique(&self) -> bool {
+ // See the extensive discussion in [1] for why this needs to be Acquire.
+ //
+ // [1] https://github.com/servo/servo/issues/21186
+ self.inner().count.load(Acquire) == 1
+ }
+}
+
+impl<T: ?Sized> Drop for Arc<T> {
+ #[inline]
+ fn drop(&mut self) {
+ // Because `fetch_sub` is already atomic, we do not need to synchronize
+ // with other threads unless we are going to delete the object.
+ if self.inner().count.fetch_sub(1, Release) != 1 {
+ return;
+ }
+
+ // FIXME(bholley): Use the updated comment when [2] is merged.
+ //
+ // This load is needed to prevent reordering of use of the data and
+ // deletion of the data. Because it is marked `Release`, the decreasing
+ // of the reference count synchronizes with this `Acquire` load. This
+ // means that use of the data happens before decreasing the reference
+ // count, which happens before this load, which happens before the
+ // deletion of the data.
+ //
+ // As explained in the [Boost documentation][1],
+ //
+ // > It is important to enforce any possible access to the object in one
+ // > thread (through an existing reference) to *happen before* deleting
+ // > the object in a different thread. This is achieved by a "release"
+ // > operation after dropping a reference (any access to the object
+ // > through this reference must obviously happened before), and an
+ // > "acquire" operation before deleting the object.
+ //
+ // [1]: (www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html)
+ // [2]: https://github.com/rust-lang/rust/pull/41714
+ self.inner().count.load(Acquire);
+
+ unsafe {
+ self.drop_slow();
+ }
+ }
+}
+
+impl<T: ?Sized + PartialEq> PartialEq for Arc<T> {
+ fn eq(&self, other: &Arc<T>) -> bool {
+ Self::ptr_eq(self, other) || *(*self) == *(*other)
+ }
+
+ fn ne(&self, other: &Arc<T>) -> bool {
+ !Self::ptr_eq(self, other) && *(*self) != *(*other)
+ }
+}
+
+impl<T: ?Sized + PartialOrd> PartialOrd for Arc<T> {
+ fn partial_cmp(&self, other: &Arc<T>) -> Option<Ordering> {
+ (**self).partial_cmp(&**other)
+ }
+
+ fn lt(&self, other: &Arc<T>) -> bool {
+ *(*self) < *(*other)
+ }
+
+ fn le(&self, other: &Arc<T>) -> bool {
+ *(*self) <= *(*other)
+ }
+
+ fn gt(&self, other: &Arc<T>) -> bool {
+ *(*self) > *(*other)
+ }
+
+ fn ge(&self, other: &Arc<T>) -> bool {
+ *(*self) >= *(*other)
+ }
+}
+
+impl<T: ?Sized + Ord> Ord for Arc<T> {
+ fn cmp(&self, other: &Arc<T>) -> Ordering {
+ (**self).cmp(&**other)
+ }
+}
+
+impl<T: ?Sized + Eq> Eq for Arc<T> {}
+
+impl<T: ?Sized + Hash> Hash for Arc<T> {
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ (**self).hash(state)
+ }
+}
+
+#[derive(Debug, Eq, PartialEq, Hash, PartialOrd)]
+#[repr(C)]
+pub(crate) struct HeaderSlice<H, T: ?Sized> {
+ pub(crate) header: H,
+ length: usize,
+ slice: T,
+}
+
+impl<H, T> HeaderSlice<H, [T]> {
+ pub(crate) fn slice(&self) -> &[T] {
+ &self.slice
+ }
+}
+
+impl<H, T> Deref for HeaderSlice<H, [T; 0]> {
+ type Target = HeaderSlice<H, [T]>;
+
+ fn deref(&self) -> &Self::Target {
+ unsafe {
+ let len = self.length;
+ let fake_slice: *const [T] =
+ ptr::slice_from_raw_parts(self as *const _ as *const T, len);
+ &*(fake_slice as *const HeaderSlice<H, [T]>)
+ }
+ }
+}
+
+/// A "thin" `Arc` containing dynamically sized data
+///
+/// This is functionally equivalent to `Arc<(H, [T])>`
+///
+/// When you create an `Arc` containing a dynamically sized type
+/// like `HeaderSlice<H, [T]>`, the `Arc` is represented on the stack
+/// as a "fat pointer", where the length of the slice is stored
+/// alongside the `Arc`'s pointer. In some situations you may wish to
+/// have a thin pointer instead, perhaps for FFI compatibility
+/// or space efficiency.
+///
+/// Note that we use `[T; 0]` in order to have the right alignment for `T`.
+///
+/// `ThinArc` solves this by storing the length in the allocation itself,
+/// via `HeaderSlice`.
+#[repr(transparent)]
+pub(crate) struct ThinArc<H, T> {
+ ptr: ptr::NonNull<ArcInner<HeaderSlice<H, [T; 0]>>>,
+ phantom: PhantomData<(H, T)>,
+}
+
+unsafe impl<H: Sync + Send, T: Sync + Send> Send for ThinArc<H, T> {}
+unsafe impl<H: Sync + Send, T: Sync + Send> Sync for ThinArc<H, T> {}
+
+// Synthesize a fat pointer from a thin pointer.
+fn thin_to_thick<H, T>(
+ thin: *mut ArcInner<HeaderSlice<H, [T; 0]>>,
+) -> *mut ArcInner<HeaderSlice<H, [T]>> {
+ let len = unsafe { (*thin).data.length };
+ let fake_slice: *mut [T] = ptr::slice_from_raw_parts_mut(thin as *mut T, len);
+ // Transplants metadata.
+ fake_slice as *mut ArcInner<HeaderSlice<H, [T]>>
+}
+
+impl<H, T> ThinArc<H, T> {
+ /// Temporarily converts |self| into a bonafide Arc and exposes it to the
+ /// provided callback. The refcount is not modified.
+ #[inline]
+ pub(crate) fn with_arc<F, U>(&self, f: F) -> U
+ where
+ F: FnOnce(&Arc<HeaderSlice<H, [T]>>) -> U,
+ {
+ // Synthesize transient Arc, which never touches the refcount of the ArcInner.
+ let transient = unsafe {
+ ManuallyDrop::new(Arc {
+ p: ptr::NonNull::new_unchecked(thin_to_thick(self.ptr.as_ptr())),
+ phantom: PhantomData,
+ })
+ };
+
+ // Expose the transient Arc to the callback, which may clone it if it wants.
+ let result = f(&transient);
+
+ // Forward the result.
+ result
+ }
+
+ /// Creates a `ThinArc` for a HeaderSlice using the given header struct and
+ /// iterator to generate the slice.
+ pub(crate) fn from_header_and_iter<I>(header: H, mut items: I) -> Self
+ where
+ I: Iterator<Item = T> + ExactSizeIterator,
+ {
+ assert_ne!(mem::size_of::<T>(), 0, "Need to think about ZST");
+
+ let num_items = items.len();
+
+ // Offset of the start of the slice in the allocation.
+ let inner_to_data_offset = offset_of!(ArcInner<HeaderSlice<H, [T; 0]>>, data);
+ let data_to_slice_offset = offset_of!(HeaderSlice<H, [T; 0]>, slice);
+ let slice_offset = inner_to_data_offset + data_to_slice_offset;
+
+ // Compute the size of the real payload.
+ let slice_size = mem::size_of::<T>().checked_mul(num_items).expect("size overflows");
+ let usable_size = slice_offset.checked_add(slice_size).expect("size overflows");
+
+ // Round up size to alignment.
+ let align = mem::align_of::<ArcInner<HeaderSlice<H, [T; 0]>>>();
+ let size = usable_size.wrapping_add(align - 1) & !(align - 1);
+ assert!(size >= usable_size, "size overflows");
+ let layout = Layout::from_size_align(size, align).expect("invalid layout");
+
+ let ptr: *mut ArcInner<HeaderSlice<H, [T; 0]>>;
+ unsafe {
+ let buffer = alloc::alloc(layout);
+
+ if buffer.is_null() {
+ alloc::handle_alloc_error(layout);
+ }
+
+ // // Synthesize the fat pointer. We do this by claiming we have a direct
+ // // pointer to a [T], and then changing the type of the borrow. The key
+ // // point here is that the length portion of the fat pointer applies
+ // // only to the number of elements in the dynamically-sized portion of
+ // // the type, so the value will be the same whether it points to a [T]
+ // // or something else with a [T] as its last member.
+ // let fake_slice: &mut [T] = slice::from_raw_parts_mut(buffer as *mut T, num_items);
+ // ptr = fake_slice as *mut [T] as *mut ArcInner<HeaderSlice<H, [T]>>;
+ ptr = buffer as *mut _;
+
+ let count = atomic::AtomicUsize::new(1);
+
+ // Write the data.
+ //
+ // Note that any panics here (i.e. from the iterator) are safe, since
+ // we'll just leak the uninitialized memory.
+ ptr::write(ptr::addr_of_mut!((*ptr).count), count);
+ ptr::write(ptr::addr_of_mut!((*ptr).data.header), header);
+ ptr::write(ptr::addr_of_mut!((*ptr).data.length), num_items);
+ if num_items != 0 {
+ let mut current = ptr::addr_of_mut!((*ptr).data.slice) as *mut T;
+ debug_assert_eq!(current as usize - buffer as usize, slice_offset);
+ for _ in 0..num_items {
+ ptr::write(
+ current,
+ items.next().expect("ExactSizeIterator over-reported length"),
+ );
+ current = current.offset(1);
+ }
+ assert!(items.next().is_none(), "ExactSizeIterator under-reported length");
+
+ // We should have consumed the buffer exactly.
+ debug_assert_eq!(current as *mut u8, buffer.add(usable_size));
+ }
+ assert!(items.next().is_none(), "ExactSizeIterator under-reported length");
+ }
+
+ ThinArc { ptr: unsafe { ptr::NonNull::new_unchecked(ptr) }, phantom: PhantomData }
+ }
+}
+
+impl<H, T> Deref for ThinArc<H, T> {
+ type Target = HeaderSlice<H, [T]>;
+
+ #[inline]
+ fn deref(&self) -> &Self::Target {
+ unsafe { &(*thin_to_thick(self.ptr.as_ptr())).data }
+ }
+}
+
+impl<H, T> Clone for ThinArc<H, T> {
+ #[inline]
+ fn clone(&self) -> Self {
+ ThinArc::with_arc(self, |a| Arc::into_thin(a.clone()))
+ }
+}
+
+impl<H, T> Drop for ThinArc<H, T> {
+ #[inline]
+ fn drop(&mut self) {
+ let _ = Arc::from_thin(ThinArc { ptr: self.ptr, phantom: PhantomData });
+ }
+}
+
+impl<H, T> Arc<HeaderSlice<H, [T]>> {
+ /// Converts an `Arc` into a `ThinArc`. This consumes the `Arc`, so the refcount
+ /// is not modified.
+ #[inline]
+ pub(crate) fn into_thin(a: Self) -> ThinArc<H, T> {
+ assert_eq!(a.length, a.slice.len(), "Length needs to be correct for ThinArc to work");
+ let fat_ptr: *mut ArcInner<HeaderSlice<H, [T]>> = a.ptr();
+ mem::forget(a);
+ let thin_ptr = fat_ptr as *mut [usize] as *mut usize;
+ ThinArc {
+ ptr: unsafe {
+ ptr::NonNull::new_unchecked(thin_ptr as *mut ArcInner<HeaderSlice<H, [T; 0]>>)
+ },
+ phantom: PhantomData,
+ }
+ }
+
+ /// Converts a `ThinArc` into an `Arc`. This consumes the `ThinArc`, so the refcount
+ /// is not modified.
+ #[inline]
+ pub(crate) fn from_thin(a: ThinArc<H, T>) -> Self {
+ let ptr = thin_to_thick(a.ptr.as_ptr());
+ mem::forget(a);
+ unsafe { Arc { p: ptr::NonNull::new_unchecked(ptr), phantom: PhantomData } }
+ }
+}
+
+impl<H: PartialEq, T: PartialEq> PartialEq for ThinArc<H, T> {
+ #[inline]
+ fn eq(&self, other: &ThinArc<H, T>) -> bool {
+ **self == **other
+ }
+}
+
+impl<H: Eq, T: Eq> Eq for ThinArc<H, T> {}
+
+impl<H: Hash, T: Hash> Hash for ThinArc<H, T> {
+ fn hash<HSR: Hasher>(&self, state: &mut HSR) {
+ (**self).hash(state)
+ }
+}
diff --git a/vendor/rowan/src/ast.rs b/vendor/rowan/src/ast.rs
new file mode 100644
index 000000000..ea848e08a
--- /dev/null
+++ b/vendor/rowan/src/ast.rs
@@ -0,0 +1,206 @@
+//! Working with abstract syntax trees.
+//!
+//! In rowan, syntax trees are transient objects. That means that we create
+//! trees when we need them, and tear them down to save memory. In this
+//! architecture, hanging on to a particular syntax node for a long time is
+//! ill-advisable, as that keeps the whole tree resident.
+//!
+//! Instead, we provide a [`SyntaxNodePtr`] type, which stores information about
+//! the _location_ of a particular syntax node in a tree. It's a small type
+//! which can be cheaply stored, and which can be resolved to a real
+//! [`SyntaxNode`] when necessary.
+//!
+//! We also provide an [`AstNode`] trait for typed AST wrapper APIs over rowan
+//! nodes.
+
+use std::{
+ fmt,
+ hash::{Hash, Hasher},
+ iter::successors,
+ marker::PhantomData,
+};
+
+use crate::{Language, SyntaxNode, SyntaxNodeChildren, TextRange};
+
+/// The main trait to go from untyped [`SyntaxNode`] to a typed AST. The
+/// conversion itself has zero runtime cost: AST and syntax nodes have exactly
+/// the same representation: a pointer to the tree root and a pointer to the
+/// node itself.
+pub trait AstNode {
+ type Language: Language;
+
+ fn can_cast(kind: <Self::Language as Language>::Kind) -> bool
+ where
+ Self: Sized;
+
+ fn cast(node: SyntaxNode<Self::Language>) -> Option<Self>
+ where
+ Self: Sized;
+
+ fn syntax(&self) -> &SyntaxNode<Self::Language>;
+
+ fn clone_for_update(&self) -> Self
+ where
+ Self: Sized,
+ {
+ Self::cast(self.syntax().clone_for_update()).unwrap()
+ }
+
+ fn clone_subtree(&self) -> Self
+ where
+ Self: Sized,
+ {
+ Self::cast(self.syntax().clone_subtree()).unwrap()
+ }
+}
+
+/// A "pointer" to a [`SyntaxNode`], via location in the source code.
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct SyntaxNodePtr<L: Language> {
+ kind: L::Kind,
+ range: TextRange,
+}
+
+impl<L: Language> SyntaxNodePtr<L> {
+ /// Returns a [`SyntaxNodePtr`] for the node.
+ pub fn new(node: &SyntaxNode<L>) -> Self {
+ Self { kind: node.kind(), range: node.text_range() }
+ }
+
+ /// "Dereferences" the pointer to get the [`SyntaxNode`] it points to.
+ ///
+ /// Panics if node is not found, so make sure that `root` syntax tree is
+ /// equivalent (is build from the same text) to the tree which was
+ /// originally used to get this [`SyntaxNodePtr`].
+ ///
+ /// Also panics if `root` is not actually a root (i.e. it has a parent).
+ ///
+ /// The complexity is linear in the depth of the tree and logarithmic in
+ /// tree width. As most trees are shallow, thinking about this as
+ /// `O(log(N))` in the size of the tree is not too wrong!
+ pub fn to_node(&self, root: &SyntaxNode<L>) -> SyntaxNode<L> {
+ assert!(root.parent().is_none());
+ successors(Some(root.clone()), |node| {
+ node.child_or_token_at_range(self.range).and_then(|it| it.into_node())
+ })
+ .find(|it| it.text_range() == self.range && it.kind() == self.kind)
+ .unwrap_or_else(|| panic!("can't resolve local ptr to SyntaxNode: {:?}", self))
+ }
+
+ /// Casts this to an [`AstPtr`] to the given node type if possible.
+ pub fn cast<N: AstNode<Language = L>>(self) -> Option<AstPtr<N>> {
+ if !N::can_cast(self.kind) {
+ return None;
+ }
+ Some(AstPtr { raw: self })
+ }
+
+ /// Returns the kind of the syntax node this points to.
+ pub fn kind(&self) -> L::Kind {
+ self.kind
+ }
+
+ /// Returns the range of the syntax node this points to.
+ pub fn text_range(&self) -> TextRange {
+ self.range
+ }
+}
+
+/// Like [`SyntaxNodePtr`], but remembers the type of node.
+pub struct AstPtr<N: AstNode> {
+ raw: SyntaxNodePtr<N::Language>,
+}
+
+impl<N: AstNode> AstPtr<N> {
+ /// Returns an [`AstPtr`] for the node.
+ pub fn new(node: &N) -> Self {
+ Self { raw: SyntaxNodePtr::new(node.syntax()) }
+ }
+
+ /// Given the root node containing the node `n` that `self` is a pointer to,
+ /// returns `n`. See [`SyntaxNodePtr::to_node`].
+ pub fn to_node(&self, root: &SyntaxNode<N::Language>) -> N {
+ N::cast(self.raw.to_node(root)).unwrap()
+ }
+
+ /// Returns the underlying [`SyntaxNodePtr`].
+ pub fn syntax_node_ptr(&self) -> SyntaxNodePtr<N::Language> {
+ self.raw.clone()
+ }
+
+ /// Casts this to an [`AstPtr`] to the given node type if possible.
+ pub fn cast<U: AstNode<Language = N::Language>>(self) -> Option<AstPtr<U>> {
+ if !U::can_cast(self.raw.kind) {
+ return None;
+ }
+ Some(AstPtr { raw: self.raw })
+ }
+}
+
+impl<N: AstNode> fmt::Debug for AstPtr<N> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("AstPtr").field("raw", &self.raw).finish()
+ }
+}
+
+impl<N: AstNode> Clone for AstPtr<N> {
+ fn clone(&self) -> Self {
+ Self { raw: self.raw.clone() }
+ }
+}
+
+impl<N: AstNode> PartialEq for AstPtr<N> {
+ fn eq(&self, other: &AstPtr<N>) -> bool {
+ self.raw == other.raw
+ }
+}
+
+impl<N: AstNode> Eq for AstPtr<N> {}
+
+impl<N: AstNode> Hash for AstPtr<N> {
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ self.raw.hash(state)
+ }
+}
+
+impl<N: AstNode> From<AstPtr<N>> for SyntaxNodePtr<N::Language> {
+ fn from(ptr: AstPtr<N>) -> SyntaxNodePtr<N::Language> {
+ ptr.raw
+ }
+}
+
+#[derive(Debug, Clone)]
+pub struct AstChildren<N: AstNode> {
+ inner: SyntaxNodeChildren<N::Language>,
+ ph: PhantomData<N>,
+}
+
+impl<N: AstNode> AstChildren<N> {
+ fn new(parent: &SyntaxNode<N::Language>) -> Self {
+ AstChildren { inner: parent.children(), ph: PhantomData }
+ }
+}
+
+impl<N: AstNode> Iterator for AstChildren<N> {
+ type Item = N;
+ fn next(&mut self) -> Option<N> {
+ self.inner.find_map(N::cast)
+ }
+}
+
+pub mod support {
+ use super::{AstChildren, AstNode};
+ use crate::{Language, SyntaxNode, SyntaxToken};
+
+ pub fn child<N: AstNode>(parent: &SyntaxNode<N::Language>) -> Option<N> {
+ parent.children().find_map(N::cast)
+ }
+
+ pub fn children<N: AstNode>(parent: &SyntaxNode<N::Language>) -> AstChildren<N> {
+ AstChildren::new(parent)
+ }
+
+ pub fn token<L: Language>(parent: &SyntaxNode<L>, kind: L::Kind) -> Option<SyntaxToken<L>> {
+ parent.children_with_tokens().filter_map(|it| it.into_token()).find(|it| it.kind() == kind)
+ }
+}
diff --git a/vendor/rowan/src/cow_mut.rs b/vendor/rowan/src/cow_mut.rs
new file mode 100644
index 000000000..c50e25b70
--- /dev/null
+++ b/vendor/rowan/src/cow_mut.rs
@@ -0,0 +1,30 @@
+#[derive(Debug)]
+pub(crate) enum CowMut<'a, T> {
+ Owned(T),
+ Borrowed(&'a mut T),
+}
+
+impl<T> std::ops::Deref for CowMut<'_, T> {
+ type Target = T;
+ fn deref(&self) -> &T {
+ match self {
+ CowMut::Owned(it) => it,
+ CowMut::Borrowed(it) => *it,
+ }
+ }
+}
+
+impl<T> std::ops::DerefMut for CowMut<'_, T> {
+ fn deref_mut(&mut self) -> &mut T {
+ match self {
+ CowMut::Owned(it) => it,
+ CowMut::Borrowed(it) => *it,
+ }
+ }
+}
+
+impl<T: Default> Default for CowMut<'_, T> {
+ fn default() -> Self {
+ CowMut::Owned(T::default())
+ }
+}
diff --git a/vendor/rowan/src/cursor.rs b/vendor/rowan/src/cursor.rs
new file mode 100644
index 000000000..39b0c9624
--- /dev/null
+++ b/vendor/rowan/src/cursor.rs
@@ -0,0 +1,1284 @@
+//! Implementation of the cursors -- API for convenient access to syntax trees.
+//!
+//! Functional programmers will recognize that this module implements a zipper
+//! for a purely functional (green) tree.
+//!
+//! A cursor node (`SyntaxNode`) points to a `GreenNode` and a parent
+//! `SyntaxNode`. This allows cursor to provide iteration over both ancestors
+//! and descendants, as well as a cheep access to absolute offset of the node in
+//! file.
+//!
+//! By default `SyntaxNode`s are immutable, but you can get a mutable copy of
+//! the tree by calling `clone_for_update`. Mutation is based on interior
+//! mutability and doesn't need `&mut`. You can have two `SyntaxNode`s pointing
+//! at different parts of the same tree; mutations via the first node will be
+//! reflected in the other.
+
+// Implementation notes:
+//
+// The implementation is utterly and horribly unsafe. This whole module is an
+// unsafety boundary. It is believed that the API here is, in principle, sound,
+// but the implementation might have bugs.
+//
+// The core type is `NodeData` -- a heap-allocated reference counted object,
+// which points to a green node or a green token, and to the parent `NodeData`.
+// Publicly-exposed `SyntaxNode` and `SyntaxToken` own a reference to
+// `NodeData`.
+//
+// `NodeData`s are transient, and are created and destroyed during tree
+// traversals. In general, only currently referenced nodes and their ancestors
+// are alive at any given moment.
+//
+// More specifically, `NodeData`'s ref count is equal to the number of
+// outstanding `SyntaxNode` and `SyntaxToken` plus the number of children with
+// non-zero ref counts. For example, if the user has only a single `SyntaxNode`
+// pointing somewhere in the middle of the tree, then all `NodeData` on the path
+// from that point towards the root have ref count equal to one.
+//
+// `NodeData` which doesn't have a parent (is a root) owns the corresponding
+// green node or token, and is responsible for freeing it.
+//
+// That's mostly it for the immutable subset of the API. Mutation is fun though,
+// you'll like it!
+//
+// Mutability is a run-time property of a tree of `NodeData`. The whole tree is
+// either mutable or immutable. `clone_for_update` clones the whole tree of
+// `NodeData`s, making it mutable (note that the green tree is re-used).
+//
+// If the tree is mutable, then all live `NodeData` are additionally liked to
+// each other via intrusive liked lists. Specifically, there are two pointers to
+// siblings, as well as a pointer to the first child. Note that only live nodes
+// are considered. If the user only has `SyntaxNode`s for the first and last
+// children of some particular node, then their `NodeData` will point at each
+// other.
+//
+// The links are used to propagate mutations across the tree. Specifically, each
+// `NodeData` remembers it's index in parent. When the node is detached from or
+// attached to the tree, we need to adjust the indices of all subsequent
+// siblings. That's what makes the `for c in node.children() { c.detach() }`
+// pattern work despite the apparent iterator invalidation.
+//
+// This code is encapsulated into the sorted linked list (`sll`) module.
+//
+// The actual mutation consist of functionally "mutating" (creating a
+// structurally shared copy) the green node, and then re-spinning the tree. This
+// is a delicate process: `NodeData` point directly to the green nodes, so we
+// must make sure that those nodes don't move. Additionally, during mutation a
+// node might become or might stop being a root, so we must take care to not
+// double free / leak its green node.
+//
+// Because we can change green nodes using only shared references, handing out
+// references into green nodes in the public API would be unsound. We don't do
+// that, but we do use such references internally a lot. Additionally, for
+// tokens the underlying green token actually is immutable, so we can, and do
+// return `&str`.
+//
+// Invariants [must not leak outside of the module]:
+// - Mutability is the property of the whole tree. Intermixing elements that
+// differ in mutability is not allowed.
+// - Mutability property is persistent.
+// - References to the green elements' data are not exposed into public API
+// when the tree is mutable.
+// - TBD
+
+use std::{
+ borrow::Cow,
+ cell::Cell,
+ fmt,
+ hash::{Hash, Hasher},
+ iter,
+ mem::{self, ManuallyDrop},
+ ops::Range,
+ ptr, slice,
+};
+
+use countme::Count;
+
+use crate::{
+ green::{GreenChild, GreenElementRef, GreenNodeData, GreenTokenData, SyntaxKind},
+ sll,
+ utility_types::Delta,
+ Direction, GreenNode, GreenToken, NodeOrToken, SyntaxText, TextRange, TextSize, TokenAtOffset,
+ WalkEvent,
+};
+
+enum Green {
+ Node { ptr: Cell<ptr::NonNull<GreenNodeData>> },
+ Token { ptr: ptr::NonNull<GreenTokenData> },
+}
+
+struct _SyntaxElement;
+
+struct NodeData {
+ _c: Count<_SyntaxElement>,
+
+ rc: Cell<u32>,
+ parent: Cell<Option<ptr::NonNull<NodeData>>>,
+ index: Cell<u32>,
+ green: Green,
+
+ /// Invariant: never changes after NodeData is created.
+ mutable: bool,
+ /// Absolute offset for immutable nodes, unused for mutable nodes.
+ offset: TextSize,
+ // The following links only have meaning when `mutable` is true.
+ first: Cell<*const NodeData>,
+ /// Invariant: never null if mutable.
+ next: Cell<*const NodeData>,
+ /// Invariant: never null if mutable.
+ prev: Cell<*const NodeData>,
+}
+
+unsafe impl sll::Elem for NodeData {
+ fn prev(&self) -> &Cell<*const Self> {
+ &self.prev
+ }
+ fn next(&self) -> &Cell<*const Self> {
+ &self.next
+ }
+ fn key(&self) -> &Cell<u32> {
+ &self.index
+ }
+}
+
+pub type SyntaxElement = NodeOrToken<SyntaxNode, SyntaxToken>;
+
+pub struct SyntaxNode {
+ ptr: ptr::NonNull<NodeData>,
+}
+
+impl Clone for SyntaxNode {
+ #[inline]
+ fn clone(&self) -> Self {
+ self.data().inc_rc();
+ SyntaxNode { ptr: self.ptr }
+ }
+}
+
+impl Drop for SyntaxNode {
+ #[inline]
+ fn drop(&mut self) {
+ if self.data().dec_rc() {
+ unsafe { free(self.ptr) }
+ }
+ }
+}
+
+#[derive(Debug)]
+pub struct SyntaxToken {
+ ptr: ptr::NonNull<NodeData>,
+}
+
+impl Clone for SyntaxToken {
+ #[inline]
+ fn clone(&self) -> Self {
+ self.data().inc_rc();
+ SyntaxToken { ptr: self.ptr }
+ }
+}
+
+impl Drop for SyntaxToken {
+ #[inline]
+ fn drop(&mut self) {
+ if self.data().dec_rc() {
+ unsafe { free(self.ptr) }
+ }
+ }
+}
+
+#[inline(never)]
+unsafe fn free(mut data: ptr::NonNull<NodeData>) {
+ loop {
+ debug_assert_eq!(data.as_ref().rc.get(), 0);
+ debug_assert!(data.as_ref().first.get().is_null());
+ let node = Box::from_raw(data.as_ptr());
+ match node.parent.take() {
+ Some(parent) => {
+ debug_assert!(parent.as_ref().rc.get() > 0);
+ if node.mutable {
+ sll::unlink(&parent.as_ref().first, &*node)
+ }
+ if parent.as_ref().dec_rc() {
+ data = parent;
+ } else {
+ break;
+ }
+ }
+ None => {
+ match &node.green {
+ Green::Node { ptr } => {
+ let _ = GreenNode::from_raw(ptr.get());
+ }
+ Green::Token { ptr } => {
+ let _ = GreenToken::from_raw(*ptr);
+ }
+ }
+ break;
+ }
+ }
+ }
+}
+
+impl NodeData {
+ #[inline]
+ fn new(
+ parent: Option<SyntaxNode>,
+ index: u32,
+ offset: TextSize,
+ green: Green,
+ mutable: bool,
+ ) -> ptr::NonNull<NodeData> {
+ let parent = ManuallyDrop::new(parent);
+ let res = NodeData {
+ _c: Count::new(),
+ rc: Cell::new(1),
+ parent: Cell::new(parent.as_ref().map(|it| it.ptr)),
+ index: Cell::new(index),
+ green,
+
+ mutable,
+ offset,
+ first: Cell::new(ptr::null()),
+ next: Cell::new(ptr::null()),
+ prev: Cell::new(ptr::null()),
+ };
+ unsafe {
+ if mutable {
+ let res_ptr: *const NodeData = &res;
+ match sll::init((*res_ptr).parent().map(|it| &it.first), res_ptr.as_ref().unwrap())
+ {
+ sll::AddToSllResult::AlreadyInSll(node) => {
+ if cfg!(debug_assertions) {
+ assert_eq!((*node).index(), (*res_ptr).index());
+ match ((*node).green(), (*res_ptr).green()) {
+ (NodeOrToken::Node(lhs), NodeOrToken::Node(rhs)) => {
+ assert!(ptr::eq(lhs, rhs))
+ }
+ (NodeOrToken::Token(lhs), NodeOrToken::Token(rhs)) => {
+ assert!(ptr::eq(lhs, rhs))
+ }
+ it => {
+ panic!("node/token confusion: {:?}", it)
+ }
+ }
+ }
+
+ ManuallyDrop::into_inner(parent);
+ let res = node as *mut NodeData;
+ (*res).inc_rc();
+ return ptr::NonNull::new_unchecked(res);
+ }
+ it => {
+ let res = Box::into_raw(Box::new(res));
+ it.add_to_sll(res);
+ return ptr::NonNull::new_unchecked(res);
+ }
+ }
+ }
+ ptr::NonNull::new_unchecked(Box::into_raw(Box::new(res)))
+ }
+ }
+
+ #[inline]
+ fn inc_rc(&self) {
+ let rc = match self.rc.get().checked_add(1) {
+ Some(it) => it,
+ None => std::process::abort(),
+ };
+ self.rc.set(rc)
+ }
+
+ #[inline]
+ fn dec_rc(&self) -> bool {
+ let rc = self.rc.get() - 1;
+ self.rc.set(rc);
+ rc == 0
+ }
+
+ #[inline]
+ fn key(&self) -> (ptr::NonNull<()>, TextSize) {
+ let ptr = match &self.green {
+ Green::Node { ptr } => ptr.get().cast(),
+ Green::Token { ptr } => ptr.cast(),
+ };
+ (ptr, self.offset())
+ }
+
+ #[inline]
+ fn parent_node(&self) -> Option<SyntaxNode> {
+ let parent = self.parent()?;
+ debug_assert!(matches!(parent.green, Green::Node { .. }));
+ parent.inc_rc();
+ Some(SyntaxNode { ptr: ptr::NonNull::from(parent) })
+ }
+
+ #[inline]
+ fn parent(&self) -> Option<&NodeData> {
+ self.parent.get().map(|it| unsafe { &*it.as_ptr() })
+ }
+
+ #[inline]
+ fn green(&self) -> GreenElementRef<'_> {
+ match &self.green {
+ Green::Node { ptr } => GreenElementRef::Node(unsafe { &*ptr.get().as_ptr() }),
+ Green::Token { ptr } => GreenElementRef::Token(unsafe { &*ptr.as_ref() }),
+ }
+ }
+ #[inline]
+ fn green_siblings(&self) -> slice::Iter<GreenChild> {
+ match &self.parent().map(|it| &it.green) {
+ Some(Green::Node { ptr }) => unsafe { &*ptr.get().as_ptr() }.children().raw,
+ Some(Green::Token { .. }) => {
+ debug_assert!(false);
+ [].iter()
+ }
+ None => [].iter(),
+ }
+ }
+ #[inline]
+ fn index(&self) -> u32 {
+ self.index.get()
+ }
+
+ #[inline]
+ fn offset(&self) -> TextSize {
+ if self.mutable {
+ self.offset_mut()
+ } else {
+ self.offset
+ }
+ }
+
+ #[cold]
+ fn offset_mut(&self) -> TextSize {
+ let mut res = TextSize::from(0);
+
+ let mut node = self;
+ while let Some(parent) = node.parent() {
+ let green = parent.green().into_node().unwrap();
+ res += green.children().raw.nth(node.index() as usize).unwrap().rel_offset();
+ node = parent;
+ }
+
+ res
+ }
+
+ #[inline]
+ fn text_range(&self) -> TextRange {
+ let offset = self.offset();
+ let len = self.green().text_len();
+ TextRange::at(offset, len)
+ }
+
+ #[inline]
+ fn kind(&self) -> SyntaxKind {
+ self.green().kind()
+ }
+
+ fn next_sibling(&self) -> Option<SyntaxNode> {
+ let mut siblings = self.green_siblings().enumerate();
+ let index = self.index() as usize;
+
+ siblings.nth(index);
+ siblings.find_map(|(index, child)| {
+ child.as_ref().into_node().and_then(|green| {
+ let parent = self.parent_node()?;
+ let offset = parent.offset() + child.rel_offset();
+ Some(SyntaxNode::new_child(green, parent, index as u32, offset))
+ })
+ })
+ }
+ fn prev_sibling(&self) -> Option<SyntaxNode> {
+ let mut rev_siblings = self.green_siblings().enumerate().rev();
+ let index = rev_siblings.len() - (self.index() as usize);
+
+ rev_siblings.nth(index);
+ rev_siblings.find_map(|(index, child)| {
+ child.as_ref().into_node().and_then(|green| {
+ let parent = self.parent_node()?;
+ let offset = parent.offset() + child.rel_offset();
+ Some(SyntaxNode::new_child(green, parent, index as u32, offset))
+ })
+ })
+ }
+
+ fn next_sibling_or_token(&self) -> Option<SyntaxElement> {
+ let mut siblings = self.green_siblings().enumerate();
+ let index = self.index() as usize + 1;
+
+ siblings.nth(index).and_then(|(index, child)| {
+ let parent = self.parent_node()?;
+ let offset = parent.offset() + child.rel_offset();
+ Some(SyntaxElement::new(child.as_ref(), parent, index as u32, offset))
+ })
+ }
+ fn prev_sibling_or_token(&self) -> Option<SyntaxElement> {
+ let mut siblings = self.green_siblings().enumerate();
+ let index = self.index().checked_sub(1)? as usize;
+
+ siblings.nth(index).and_then(|(index, child)| {
+ let parent = self.parent_node()?;
+ let offset = parent.offset() + child.rel_offset();
+ Some(SyntaxElement::new(child.as_ref(), parent, index as u32, offset))
+ })
+ }
+
+ fn detach(&self) {
+ assert!(self.mutable);
+ assert!(self.rc.get() > 0);
+ let parent_ptr = match self.parent.take() {
+ Some(parent) => parent,
+ None => return,
+ };
+
+ unsafe {
+ sll::adjust(self, self.index() + 1, Delta::Sub(1));
+ let parent = parent_ptr.as_ref();
+ sll::unlink(&parent.first, self);
+
+ // Add strong ref to green
+ match self.green().to_owned() {
+ NodeOrToken::Node(it) => {
+ GreenNode::into_raw(it);
+ }
+ NodeOrToken::Token(it) => {
+ GreenToken::into_raw(it);
+ }
+ }
+
+ match parent.green() {
+ NodeOrToken::Node(green) => {
+ let green = green.remove_child(self.index() as usize);
+ parent.respine(green)
+ }
+ NodeOrToken::Token(_) => unreachable!(),
+ }
+
+ if parent.dec_rc() {
+ free(parent_ptr)
+ }
+ }
+ }
+ fn attach_child(&self, index: usize, child: &NodeData) {
+ assert!(self.mutable && child.mutable && child.parent().is_none());
+ assert!(self.rc.get() > 0 && child.rc.get() > 0);
+
+ unsafe {
+ child.index.set(index as u32);
+ child.parent.set(Some(self.into()));
+ self.inc_rc();
+
+ if !self.first.get().is_null() {
+ sll::adjust(&*self.first.get(), index as u32, Delta::Add(1));
+ }
+
+ match sll::link(&self.first, child) {
+ sll::AddToSllResult::AlreadyInSll(_) => {
+ panic!("Child already in sorted linked list")
+ }
+ it => it.add_to_sll(child),
+ }
+
+ match self.green() {
+ NodeOrToken::Node(green) => {
+ // Child is root, so it ownes the green node. Steal it!
+ let child_green = match &child.green {
+ Green::Node { ptr } => GreenNode::from_raw(ptr.get()).into(),
+ Green::Token { ptr } => GreenToken::from_raw(*ptr).into(),
+ };
+
+ let green = green.insert_child(index, child_green);
+ self.respine(green);
+ }
+ NodeOrToken::Token(_) => unreachable!(),
+ }
+ }
+ }
+ unsafe fn respine(&self, mut new_green: GreenNode) {
+ let mut node = self;
+ loop {
+ let old_green = match &node.green {
+ Green::Node { ptr } => ptr.replace(ptr::NonNull::from(&*new_green)),
+ Green::Token { .. } => unreachable!(),
+ };
+ match node.parent() {
+ Some(parent) => match parent.green() {
+ NodeOrToken::Node(parent_green) => {
+ new_green =
+ parent_green.replace_child(node.index() as usize, new_green.into());
+ node = parent;
+ }
+ _ => unreachable!(),
+ },
+ None => {
+ mem::forget(new_green);
+ let _ = GreenNode::from_raw(old_green);
+ break;
+ }
+ }
+ }
+ }
+}
+
+impl SyntaxNode {
+ pub fn new_root(green: GreenNode) -> SyntaxNode {
+ let green = GreenNode::into_raw(green);
+ let green = Green::Node { ptr: Cell::new(green) };
+ SyntaxNode { ptr: NodeData::new(None, 0, 0.into(), green, false) }
+ }
+
+ pub fn new_root_mut(green: GreenNode) -> SyntaxNode {
+ let green = GreenNode::into_raw(green);
+ let green = Green::Node { ptr: Cell::new(green) };
+ SyntaxNode { ptr: NodeData::new(None, 0, 0.into(), green, true) }
+ }
+
+ fn new_child(
+ green: &GreenNodeData,
+ parent: SyntaxNode,
+ index: u32,
+ offset: TextSize,
+ ) -> SyntaxNode {
+ let mutable = parent.data().mutable;
+ let green = Green::Node { ptr: Cell::new(green.into()) };
+ SyntaxNode { ptr: NodeData::new(Some(parent), index, offset, green, mutable) }
+ }
+
+ pub fn clone_for_update(&self) -> SyntaxNode {
+ assert!(!self.data().mutable);
+ match self.parent() {
+ Some(parent) => {
+ let parent = parent.clone_for_update();
+ SyntaxNode::new_child(self.green_ref(), parent, self.data().index(), self.offset())
+ }
+ None => SyntaxNode::new_root_mut(self.green_ref().to_owned()),
+ }
+ }
+
+ pub fn clone_subtree(&self) -> SyntaxNode {
+ SyntaxNode::new_root(self.green().into())
+ }
+
+ #[inline]
+ fn data(&self) -> &NodeData {
+ unsafe { self.ptr.as_ref() }
+ }
+
+ pub fn replace_with(&self, replacement: GreenNode) -> GreenNode {
+ assert_eq!(self.kind(), replacement.kind());
+ match &self.parent() {
+ None => replacement,
+ Some(parent) => {
+ let new_parent = parent
+ .green_ref()
+ .replace_child(self.data().index() as usize, replacement.into());
+ parent.replace_with(new_parent)
+ }
+ }
+ }
+
+ #[inline]
+ pub fn kind(&self) -> SyntaxKind {
+ self.data().kind()
+ }
+
+ #[inline]
+ fn offset(&self) -> TextSize {
+ self.data().offset()
+ }
+
+ #[inline]
+ pub fn text_range(&self) -> TextRange {
+ self.data().text_range()
+ }
+
+ #[inline]
+ pub fn index(&self) -> usize {
+ self.data().index() as usize
+ }
+
+ #[inline]
+ pub fn text(&self) -> SyntaxText {
+ SyntaxText::new(self.clone())
+ }
+
+ #[inline]
+ pub fn green(&self) -> Cow<'_, GreenNodeData> {
+ let green_ref = self.green_ref();
+ match self.data().mutable {
+ false => Cow::Borrowed(green_ref),
+ true => Cow::Owned(green_ref.to_owned()),
+ }
+ }
+ #[inline]
+ fn green_ref(&self) -> &GreenNodeData {
+ self.data().green().into_node().unwrap()
+ }
+
+ #[inline]
+ pub fn parent(&self) -> Option<SyntaxNode> {
+ self.data().parent_node()
+ }
+
+ #[inline]
+ pub fn ancestors(&self) -> impl Iterator<Item = SyntaxNode> {
+ iter::successors(Some(self.clone()), SyntaxNode::parent)
+ }
+
+ #[inline]
+ pub fn children(&self) -> SyntaxNodeChildren {
+ SyntaxNodeChildren::new(self.clone())
+ }
+
+ #[inline]
+ pub fn children_with_tokens(&self) -> SyntaxElementChildren {
+ SyntaxElementChildren::new(self.clone())
+ }
+
+ pub fn first_child(&self) -> Option<SyntaxNode> {
+ self.green_ref().children().raw.enumerate().find_map(|(index, child)| {
+ child.as_ref().into_node().map(|green| {
+ SyntaxNode::new_child(
+ green,
+ self.clone(),
+ index as u32,
+ self.offset() + child.rel_offset(),
+ )
+ })
+ })
+ }
+ pub fn last_child(&self) -> Option<SyntaxNode> {
+ self.green_ref().children().raw.enumerate().rev().find_map(|(index, child)| {
+ child.as_ref().into_node().map(|green| {
+ SyntaxNode::new_child(
+ green,
+ self.clone(),
+ index as u32,
+ self.offset() + child.rel_offset(),
+ )
+ })
+ })
+ }
+
+ pub fn first_child_or_token(&self) -> Option<SyntaxElement> {
+ self.green_ref().children().raw.next().map(|child| {
+ SyntaxElement::new(child.as_ref(), self.clone(), 0, self.offset() + child.rel_offset())
+ })
+ }
+ pub fn last_child_or_token(&self) -> Option<SyntaxElement> {
+ self.green_ref().children().raw.enumerate().next_back().map(|(index, child)| {
+ SyntaxElement::new(
+ child.as_ref(),
+ self.clone(),
+ index as u32,
+ self.offset() + child.rel_offset(),
+ )
+ })
+ }
+
+ pub fn next_sibling(&self) -> Option<SyntaxNode> {
+ self.data().next_sibling()
+ }
+ pub fn prev_sibling(&self) -> Option<SyntaxNode> {
+ self.data().prev_sibling()
+ }
+
+ pub fn next_sibling_or_token(&self) -> Option<SyntaxElement> {
+ self.data().next_sibling_or_token()
+ }
+ pub fn prev_sibling_or_token(&self) -> Option<SyntaxElement> {
+ self.data().prev_sibling_or_token()
+ }
+
+ pub fn first_token(&self) -> Option<SyntaxToken> {
+ self.first_child_or_token()?.first_token()
+ }
+ pub fn last_token(&self) -> Option<SyntaxToken> {
+ self.last_child_or_token()?.last_token()
+ }
+
+ #[inline]
+ pub fn siblings(&self, direction: Direction) -> impl Iterator<Item = SyntaxNode> {
+ iter::successors(Some(self.clone()), move |node| match direction {
+ Direction::Next => node.next_sibling(),
+ Direction::Prev => node.prev_sibling(),
+ })
+ }
+
+ #[inline]
+ pub fn siblings_with_tokens(
+ &self,
+ direction: Direction,
+ ) -> impl Iterator<Item = SyntaxElement> {
+ let me: SyntaxElement = self.clone().into();
+ iter::successors(Some(me), move |el| match direction {
+ Direction::Next => el.next_sibling_or_token(),
+ Direction::Prev => el.prev_sibling_or_token(),
+ })
+ }
+
+ #[inline]
+ pub fn descendants(&self) -> impl Iterator<Item = SyntaxNode> {
+ self.preorder().filter_map(|event| match event {
+ WalkEvent::Enter(node) => Some(node),
+ WalkEvent::Leave(_) => None,
+ })
+ }
+
+ #[inline]
+ pub fn descendants_with_tokens(&self) -> impl Iterator<Item = SyntaxElement> {
+ self.preorder_with_tokens().filter_map(|event| match event {
+ WalkEvent::Enter(it) => Some(it),
+ WalkEvent::Leave(_) => None,
+ })
+ }
+
+ #[inline]
+ pub fn preorder(&self) -> Preorder {
+ Preorder::new(self.clone())
+ }
+
+ #[inline]
+ pub fn preorder_with_tokens(&self) -> PreorderWithTokens {
+ PreorderWithTokens::new(self.clone())
+ }
+
+ pub fn token_at_offset(&self, offset: TextSize) -> TokenAtOffset<SyntaxToken> {
+ // TODO: this could be faster if we first drill-down to node, and only
+ // then switch to token search. We should also replace explicit
+ // recursion with a loop.
+ let range = self.text_range();
+ assert!(
+ range.start() <= offset && offset <= range.end(),
+ "Bad offset: range {:?} offset {:?}",
+ range,
+ offset
+ );
+ if range.is_empty() {
+ return TokenAtOffset::None;
+ }
+
+ let mut children = self.children_with_tokens().filter(|child| {
+ let child_range = child.text_range();
+ !child_range.is_empty()
+ && (child_range.start() <= offset && offset <= child_range.end())
+ });
+
+ let left = children.next().unwrap();
+ let right = children.next();
+ assert!(children.next().is_none());
+
+ if let Some(right) = right {
+ match (left.token_at_offset(offset), right.token_at_offset(offset)) {
+ (TokenAtOffset::Single(left), TokenAtOffset::Single(right)) => {
+ TokenAtOffset::Between(left, right)
+ }
+ _ => unreachable!(),
+ }
+ } else {
+ left.token_at_offset(offset)
+ }
+ }
+
+ pub fn covering_element(&self, range: TextRange) -> SyntaxElement {
+ let mut res: SyntaxElement = self.clone().into();
+ loop {
+ assert!(
+ res.text_range().contains_range(range),
+ "Bad range: node range {:?}, range {:?}",
+ res.text_range(),
+ range,
+ );
+ res = match &res {
+ NodeOrToken::Token(_) => return res,
+ NodeOrToken::Node(node) => match node.child_or_token_at_range(range) {
+ Some(it) => it,
+ None => return res,
+ },
+ };
+ }
+ }
+
+ pub fn child_or_token_at_range(&self, range: TextRange) -> Option<SyntaxElement> {
+ let rel_range = range - self.offset();
+ self.green_ref().child_at_range(rel_range).map(|(index, rel_offset, green)| {
+ SyntaxElement::new(green, self.clone(), index as u32, self.offset() + rel_offset)
+ })
+ }
+
+ pub fn splice_children(&self, to_delete: Range<usize>, to_insert: Vec<SyntaxElement>) {
+ assert!(self.data().mutable, "immutable tree: {}", self);
+ for (i, child) in self.children_with_tokens().enumerate() {
+ if to_delete.contains(&i) {
+ child.detach();
+ }
+ }
+ let mut index = to_delete.start;
+ for child in to_insert {
+ self.attach_child(index, child);
+ index += 1;
+ }
+ }
+
+ pub fn detach(&self) {
+ assert!(self.data().mutable, "immutable tree: {}", self);
+ self.data().detach()
+ }
+
+ fn attach_child(&self, index: usize, child: SyntaxElement) {
+ assert!(self.data().mutable, "immutable tree: {}", self);
+ child.detach();
+ let data = match &child {
+ NodeOrToken::Node(it) => it.data(),
+ NodeOrToken::Token(it) => it.data(),
+ };
+ self.data().attach_child(index, data)
+ }
+}
+
+impl SyntaxToken {
+ fn new(
+ green: &GreenTokenData,
+ parent: SyntaxNode,
+ index: u32,
+ offset: TextSize,
+ ) -> SyntaxToken {
+ let mutable = parent.data().mutable;
+ let green = Green::Token { ptr: green.into() };
+ SyntaxToken { ptr: NodeData::new(Some(parent), index, offset, green, mutable) }
+ }
+
+ #[inline]
+ fn data(&self) -> &NodeData {
+ unsafe { self.ptr.as_ref() }
+ }
+
+ pub fn replace_with(&self, replacement: GreenToken) -> GreenNode {
+ assert_eq!(self.kind(), replacement.kind());
+ let parent = self.parent().unwrap();
+ let me: u32 = self.data().index();
+
+ let new_parent = parent.green_ref().replace_child(me as usize, replacement.into());
+ parent.replace_with(new_parent)
+ }
+
+ #[inline]
+ pub fn kind(&self) -> SyntaxKind {
+ self.data().kind()
+ }
+
+ #[inline]
+ pub fn text_range(&self) -> TextRange {
+ self.data().text_range()
+ }
+
+ #[inline]
+ pub fn index(&self) -> usize {
+ self.data().index() as usize
+ }
+
+ #[inline]
+ pub fn text(&self) -> &str {
+ match self.data().green().as_token() {
+ Some(it) => it.text(),
+ None => {
+ debug_assert!(
+ false,
+ "corrupted tree: a node thinks it is a token: {:?}",
+ self.data().green().as_node().unwrap().to_string()
+ );
+ ""
+ }
+ }
+ }
+
+ #[inline]
+ pub fn green(&self) -> &GreenTokenData {
+ self.data().green().into_token().unwrap()
+ }
+
+ #[inline]
+ pub fn parent(&self) -> Option<SyntaxNode> {
+ self.data().parent_node()
+ }
+
+ #[inline]
+ pub fn ancestors(&self) -> impl Iterator<Item = SyntaxNode> {
+ std::iter::successors(self.parent(), SyntaxNode::parent)
+ }
+
+ pub fn next_sibling_or_token(&self) -> Option<SyntaxElement> {
+ self.data().next_sibling_or_token()
+ }
+ pub fn prev_sibling_or_token(&self) -> Option<SyntaxElement> {
+ self.data().prev_sibling_or_token()
+ }
+
+ #[inline]
+ pub fn siblings_with_tokens(
+ &self,
+ direction: Direction,
+ ) -> impl Iterator<Item = SyntaxElement> {
+ let me: SyntaxElement = self.clone().into();
+ iter::successors(Some(me), move |el| match direction {
+ Direction::Next => el.next_sibling_or_token(),
+ Direction::Prev => el.prev_sibling_or_token(),
+ })
+ }
+
+ pub fn next_token(&self) -> Option<SyntaxToken> {
+ match self.next_sibling_or_token() {
+ Some(element) => element.first_token(),
+ None => self
+ .ancestors()
+ .find_map(|it| it.next_sibling_or_token())
+ .and_then(|element| element.first_token()),
+ }
+ }
+ pub fn prev_token(&self) -> Option<SyntaxToken> {
+ match self.prev_sibling_or_token() {
+ Some(element) => element.last_token(),
+ None => self
+ .ancestors()
+ .find_map(|it| it.prev_sibling_or_token())
+ .and_then(|element| element.last_token()),
+ }
+ }
+
+ pub fn detach(&self) {
+ assert!(self.data().mutable, "immutable tree: {}", self);
+ self.data().detach()
+ }
+}
+
+impl SyntaxElement {
+ fn new(
+ element: GreenElementRef<'_>,
+ parent: SyntaxNode,
+ index: u32,
+ offset: TextSize,
+ ) -> SyntaxElement {
+ match element {
+ NodeOrToken::Node(node) => {
+ SyntaxNode::new_child(node, parent, index as u32, offset).into()
+ }
+ NodeOrToken::Token(token) => {
+ SyntaxToken::new(token, parent, index as u32, offset).into()
+ }
+ }
+ }
+
+ #[inline]
+ pub fn text_range(&self) -> TextRange {
+ match self {
+ NodeOrToken::Node(it) => it.text_range(),
+ NodeOrToken::Token(it) => it.text_range(),
+ }
+ }
+
+ #[inline]
+ pub fn index(&self) -> usize {
+ match self {
+ NodeOrToken::Node(it) => it.index(),
+ NodeOrToken::Token(it) => it.index(),
+ }
+ }
+
+ #[inline]
+ pub fn kind(&self) -> SyntaxKind {
+ match self {
+ NodeOrToken::Node(it) => it.kind(),
+ NodeOrToken::Token(it) => it.kind(),
+ }
+ }
+
+ #[inline]
+ pub fn parent(&self) -> Option<SyntaxNode> {
+ match self {
+ NodeOrToken::Node(it) => it.parent(),
+ NodeOrToken::Token(it) => it.parent(),
+ }
+ }
+
+ #[inline]
+ pub fn ancestors(&self) -> impl Iterator<Item = SyntaxNode> {
+ let first = match self {
+ NodeOrToken::Node(it) => Some(it.clone()),
+ NodeOrToken::Token(it) => it.parent(),
+ };
+ iter::successors(first, SyntaxNode::parent)
+ }
+
+ pub fn first_token(&self) -> Option<SyntaxToken> {
+ match self {
+ NodeOrToken::Node(it) => it.first_token(),
+ NodeOrToken::Token(it) => Some(it.clone()),
+ }
+ }
+ pub fn last_token(&self) -> Option<SyntaxToken> {
+ match self {
+ NodeOrToken::Node(it) => it.last_token(),
+ NodeOrToken::Token(it) => Some(it.clone()),
+ }
+ }
+
+ pub fn next_sibling_or_token(&self) -> Option<SyntaxElement> {
+ match self {
+ NodeOrToken::Node(it) => it.next_sibling_or_token(),
+ NodeOrToken::Token(it) => it.next_sibling_or_token(),
+ }
+ }
+ pub fn prev_sibling_or_token(&self) -> Option<SyntaxElement> {
+ match self {
+ NodeOrToken::Node(it) => it.prev_sibling_or_token(),
+ NodeOrToken::Token(it) => it.prev_sibling_or_token(),
+ }
+ }
+
+ fn token_at_offset(&self, offset: TextSize) -> TokenAtOffset<SyntaxToken> {
+ assert!(self.text_range().start() <= offset && offset <= self.text_range().end());
+ match self {
+ NodeOrToken::Token(token) => TokenAtOffset::Single(token.clone()),
+ NodeOrToken::Node(node) => node.token_at_offset(offset),
+ }
+ }
+
+ pub fn detach(&self) {
+ match self {
+ NodeOrToken::Node(it) => it.detach(),
+ NodeOrToken::Token(it) => it.detach(),
+ }
+ }
+}
+
+// region: impls
+
+// Identity semantics for hash & eq
+impl PartialEq for SyntaxNode {
+ #[inline]
+ fn eq(&self, other: &SyntaxNode) -> bool {
+ self.data().key() == other.data().key()
+ }
+}
+
+impl Eq for SyntaxNode {}
+
+impl Hash for SyntaxNode {
+ #[inline]
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ self.data().key().hash(state);
+ }
+}
+
+impl fmt::Debug for SyntaxNode {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("SyntaxNode")
+ .field("kind", &self.kind())
+ .field("text_range", &self.text_range())
+ .finish()
+ }
+}
+
+impl fmt::Display for SyntaxNode {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ self.preorder_with_tokens()
+ .filter_map(|event| match event {
+ WalkEvent::Enter(NodeOrToken::Token(token)) => Some(token),
+ _ => None,
+ })
+ .try_for_each(|it| fmt::Display::fmt(&it, f))
+ }
+}
+
+// Identity semantics for hash & eq
+impl PartialEq for SyntaxToken {
+ #[inline]
+ fn eq(&self, other: &SyntaxToken) -> bool {
+ self.data().key() == other.data().key()
+ }
+}
+
+impl Eq for SyntaxToken {}
+
+impl Hash for SyntaxToken {
+ #[inline]
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ self.data().key().hash(state);
+ }
+}
+
+impl fmt::Display for SyntaxToken {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt::Display::fmt(self.text(), f)
+ }
+}
+
+impl From<SyntaxNode> for SyntaxElement {
+ #[inline]
+ fn from(node: SyntaxNode) -> SyntaxElement {
+ NodeOrToken::Node(node)
+ }
+}
+
+impl From<SyntaxToken> for SyntaxElement {
+ #[inline]
+ fn from(token: SyntaxToken) -> SyntaxElement {
+ NodeOrToken::Token(token)
+ }
+}
+
+// endregion
+
+// region: iterators
+
+#[derive(Clone, Debug)]
+pub struct SyntaxNodeChildren {
+ next: Option<SyntaxNode>,
+}
+
+impl SyntaxNodeChildren {
+ fn new(parent: SyntaxNode) -> SyntaxNodeChildren {
+ SyntaxNodeChildren { next: parent.first_child() }
+ }
+}
+
+impl Iterator for SyntaxNodeChildren {
+ type Item = SyntaxNode;
+ fn next(&mut self) -> Option<SyntaxNode> {
+ self.next.take().map(|next| {
+ self.next = next.next_sibling();
+ next
+ })
+ }
+}
+
+#[derive(Clone, Debug)]
+pub struct SyntaxElementChildren {
+ next: Option<SyntaxElement>,
+}
+
+impl SyntaxElementChildren {
+ fn new(parent: SyntaxNode) -> SyntaxElementChildren {
+ SyntaxElementChildren { next: parent.first_child_or_token() }
+ }
+}
+
+impl Iterator for SyntaxElementChildren {
+ type Item = SyntaxElement;
+ fn next(&mut self) -> Option<SyntaxElement> {
+ self.next.take().map(|next| {
+ self.next = next.next_sibling_or_token();
+ next
+ })
+ }
+}
+
+pub struct Preorder {
+ start: SyntaxNode,
+ next: Option<WalkEvent<SyntaxNode>>,
+ skip_subtree: bool,
+}
+
+impl Preorder {
+ fn new(start: SyntaxNode) -> Preorder {
+ let next = Some(WalkEvent::Enter(start.clone()));
+ Preorder { start, next, skip_subtree: false }
+ }
+
+ pub fn skip_subtree(&mut self) {
+ self.skip_subtree = true;
+ }
+
+ #[cold]
+ fn do_skip(&mut self) {
+ self.next = self.next.take().map(|next| match next {
+ WalkEvent::Enter(first_child) => WalkEvent::Leave(first_child.parent().unwrap()),
+ WalkEvent::Leave(parent) => WalkEvent::Leave(parent),
+ })
+ }
+}
+
+impl Iterator for Preorder {
+ type Item = WalkEvent<SyntaxNode>;
+
+ fn next(&mut self) -> Option<WalkEvent<SyntaxNode>> {
+ if self.skip_subtree {
+ self.do_skip();
+ self.skip_subtree = false;
+ }
+ let next = self.next.take();
+ self.next = next.as_ref().and_then(|next| {
+ Some(match next {
+ WalkEvent::Enter(node) => match node.first_child() {
+ Some(child) => WalkEvent::Enter(child),
+ None => WalkEvent::Leave(node.clone()),
+ },
+ WalkEvent::Leave(node) => {
+ if node == &self.start {
+ return None;
+ }
+ match node.next_sibling() {
+ Some(sibling) => WalkEvent::Enter(sibling),
+ None => WalkEvent::Leave(node.parent()?),
+ }
+ }
+ })
+ });
+ next
+ }
+}
+
+pub struct PreorderWithTokens {
+ start: SyntaxElement,
+ next: Option<WalkEvent<SyntaxElement>>,
+ skip_subtree: bool,
+}
+
+impl PreorderWithTokens {
+ fn new(start: SyntaxNode) -> PreorderWithTokens {
+ let next = Some(WalkEvent::Enter(start.clone().into()));
+ PreorderWithTokens { start: start.into(), next, skip_subtree: false }
+ }
+
+ pub fn skip_subtree(&mut self) {
+ self.skip_subtree = true;
+ }
+
+ #[cold]
+ fn do_skip(&mut self) {
+ self.next = self.next.take().map(|next| match next {
+ WalkEvent::Enter(first_child) => WalkEvent::Leave(first_child.parent().unwrap().into()),
+ WalkEvent::Leave(parent) => WalkEvent::Leave(parent),
+ })
+ }
+}
+
+impl Iterator for PreorderWithTokens {
+ type Item = WalkEvent<SyntaxElement>;
+
+ fn next(&mut self) -> Option<WalkEvent<SyntaxElement>> {
+ if self.skip_subtree {
+ self.do_skip();
+ self.skip_subtree = false;
+ }
+ let next = self.next.take();
+ self.next = next.as_ref().and_then(|next| {
+ Some(match next {
+ WalkEvent::Enter(el) => match el {
+ NodeOrToken::Node(node) => match node.first_child_or_token() {
+ Some(child) => WalkEvent::Enter(child),
+ None => WalkEvent::Leave(node.clone().into()),
+ },
+ NodeOrToken::Token(token) => WalkEvent::Leave(token.clone().into()),
+ },
+ WalkEvent::Leave(el) if el == &self.start => return None,
+ WalkEvent::Leave(el) => match el.next_sibling_or_token() {
+ Some(sibling) => WalkEvent::Enter(sibling),
+ None => WalkEvent::Leave(el.parent()?.into()),
+ },
+ })
+ });
+ next
+ }
+}
+// endregion
diff --git a/vendor/rowan/src/green.rs b/vendor/rowan/src/green.rs
new file mode 100644
index 000000000..e2b5b4a4e
--- /dev/null
+++ b/vendor/rowan/src/green.rs
@@ -0,0 +1,42 @@
+mod node;
+mod token;
+mod element;
+mod builder;
+mod node_cache;
+
+use self::element::GreenElement;
+
+pub(crate) use self::{element::GreenElementRef, node::GreenChild};
+
+pub use self::{
+ builder::{Checkpoint, GreenNodeBuilder},
+ node::{Children, GreenNode, GreenNodeData},
+ node_cache::NodeCache,
+ token::{GreenToken, GreenTokenData},
+};
+
+/// SyntaxKind is a type tag for each token or node.
+#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+pub struct SyntaxKind(pub u16);
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn assert_send_sync() {
+ fn f<T: Send + Sync>() {}
+ f::<GreenNode>();
+ f::<GreenToken>();
+ f::<GreenElement>();
+ }
+
+ #[test]
+ fn test_size_of() {
+ use std::mem::size_of;
+
+ eprintln!("GreenNode {}", size_of::<GreenNode>());
+ eprintln!("GreenToken {}", size_of::<GreenToken>());
+ eprintln!("GreenElement {}", size_of::<GreenElement>());
+ }
+}
diff --git a/vendor/rowan/src/green/builder.rs b/vendor/rowan/src/green/builder.rs
new file mode 100644
index 000000000..1dbc8bb08
--- /dev/null
+++ b/vendor/rowan/src/green/builder.rs
@@ -0,0 +1,119 @@
+use crate::{
+ cow_mut::CowMut,
+ green::{node_cache::NodeCache, GreenElement, GreenNode, SyntaxKind},
+ NodeOrToken,
+};
+
+/// A checkpoint for maybe wrapping a node. See `GreenNodeBuilder::checkpoint` for details.
+#[derive(Clone, Copy, Debug)]
+pub struct Checkpoint(usize);
+
+/// A builder for a green tree.
+#[derive(Default, Debug)]
+pub struct GreenNodeBuilder<'cache> {
+ cache: CowMut<'cache, NodeCache>,
+ parents: Vec<(SyntaxKind, usize)>,
+ children: Vec<(u64, GreenElement)>,
+}
+
+impl GreenNodeBuilder<'_> {
+ /// Creates new builder.
+ pub fn new() -> GreenNodeBuilder<'static> {
+ GreenNodeBuilder::default()
+ }
+
+ /// Reusing `NodeCache` between different `GreenNodeBuilder`s saves memory.
+ /// It allows to structurally share underlying trees.
+ pub fn with_cache(cache: &mut NodeCache) -> GreenNodeBuilder<'_> {
+ GreenNodeBuilder {
+ cache: CowMut::Borrowed(cache),
+ parents: Vec::new(),
+ children: Vec::new(),
+ }
+ }
+
+ /// Adds new token to the current branch.
+ #[inline]
+ pub fn token(&mut self, kind: SyntaxKind, text: &str) {
+ let (hash, token) = self.cache.token(kind, text);
+ self.children.push((hash, token.into()));
+ }
+
+ /// Start new node and make it current.
+ #[inline]
+ pub fn start_node(&mut self, kind: SyntaxKind) {
+ let len = self.children.len();
+ self.parents.push((kind, len));
+ }
+
+ /// Finish current branch and restore previous
+ /// branch as current.
+ #[inline]
+ pub fn finish_node(&mut self) {
+ let (kind, first_child) = self.parents.pop().unwrap();
+ let (hash, node) = self.cache.node(kind, &mut self.children, first_child);
+ self.children.push((hash, node.into()));
+ }
+
+ /// Prepare for maybe wrapping the next node.
+ /// The way wrapping works is that you first of all get a checkpoint,
+ /// then you place all tokens you want to wrap, and then *maybe* call
+ /// `start_node_at`.
+ /// Example:
+ /// ```rust
+ /// # use rowan::{GreenNodeBuilder, SyntaxKind};
+ /// # const PLUS: SyntaxKind = SyntaxKind(0);
+ /// # const OPERATION: SyntaxKind = SyntaxKind(1);
+ /// # struct Parser;
+ /// # impl Parser {
+ /// # fn peek(&self) -> Option<SyntaxKind> { None }
+ /// # fn parse_expr(&mut self) {}
+ /// # }
+ /// # let mut builder = GreenNodeBuilder::new();
+ /// # let mut parser = Parser;
+ /// let checkpoint = builder.checkpoint();
+ /// parser.parse_expr();
+ /// if parser.peek() == Some(PLUS) {
+ /// // 1 + 2 = Add(1, 2)
+ /// builder.start_node_at(checkpoint, OPERATION);
+ /// parser.parse_expr();
+ /// builder.finish_node();
+ /// }
+ /// ```
+ #[inline]
+ pub fn checkpoint(&self) -> Checkpoint {
+ Checkpoint(self.children.len())
+ }
+
+ /// Wrap the previous branch marked by `checkpoint` in a new branch and
+ /// make it current.
+ #[inline]
+ pub fn start_node_at(&mut self, checkpoint: Checkpoint, kind: SyntaxKind) {
+ let Checkpoint(checkpoint) = checkpoint;
+ assert!(
+ checkpoint <= self.children.len(),
+ "checkpoint no longer valid, was finish_node called early?"
+ );
+
+ if let Some(&(_, first_child)) = self.parents.last() {
+ assert!(
+ checkpoint >= first_child,
+ "checkpoint no longer valid, was an unmatched start_node_at called?"
+ );
+ }
+
+ self.parents.push((kind, checkpoint));
+ }
+
+ /// Complete tree building. Make sure that
+ /// `start_node_at` and `finish_node` calls
+ /// are paired!
+ #[inline]
+ pub fn finish(mut self) -> GreenNode {
+ assert_eq!(self.children.len(), 1);
+ match self.children.pop().unwrap().1 {
+ NodeOrToken::Node(node) => node,
+ NodeOrToken::Token(_) => panic!(),
+ }
+ }
+}
diff --git a/vendor/rowan/src/green/element.rs b/vendor/rowan/src/green/element.rs
new file mode 100644
index 000000000..2d1ce1f61
--- /dev/null
+++ b/vendor/rowan/src/green/element.rs
@@ -0,0 +1,89 @@
+use std::borrow::Cow;
+
+use crate::{
+ green::{GreenNode, GreenToken, SyntaxKind},
+ GreenNodeData, NodeOrToken, TextSize,
+};
+
+use super::GreenTokenData;
+
+pub(super) type GreenElement = NodeOrToken<GreenNode, GreenToken>;
+pub(crate) type GreenElementRef<'a> = NodeOrToken<&'a GreenNodeData, &'a GreenTokenData>;
+
+impl From<GreenNode> for GreenElement {
+ #[inline]
+ fn from(node: GreenNode) -> GreenElement {
+ NodeOrToken::Node(node)
+ }
+}
+
+impl<'a> From<&'a GreenNode> for GreenElementRef<'a> {
+ #[inline]
+ fn from(node: &'a GreenNode) -> GreenElementRef<'a> {
+ NodeOrToken::Node(node)
+ }
+}
+
+impl From<GreenToken> for GreenElement {
+ #[inline]
+ fn from(token: GreenToken) -> GreenElement {
+ NodeOrToken::Token(token)
+ }
+}
+
+impl From<Cow<'_, GreenNodeData>> for GreenElement {
+ #[inline]
+ fn from(cow: Cow<'_, GreenNodeData>) -> Self {
+ NodeOrToken::Node(cow.into_owned())
+ }
+}
+
+impl<'a> From<&'a GreenToken> for GreenElementRef<'a> {
+ #[inline]
+ fn from(token: &'a GreenToken) -> GreenElementRef<'a> {
+ NodeOrToken::Token(token)
+ }
+}
+
+impl GreenElementRef<'_> {
+ pub fn to_owned(self) -> GreenElement {
+ match self {
+ NodeOrToken::Node(it) => NodeOrToken::Node(it.to_owned()),
+ NodeOrToken::Token(it) => NodeOrToken::Token(it.to_owned()),
+ }
+ }
+}
+
+impl GreenElement {
+ /// Returns kind of this element.
+ #[inline]
+ pub fn kind(&self) -> SyntaxKind {
+ self.as_deref().kind()
+ }
+
+ /// Returns the length of the text covered by this element.
+ #[inline]
+ pub fn text_len(&self) -> TextSize {
+ self.as_deref().text_len()
+ }
+}
+
+impl GreenElementRef<'_> {
+ /// Returns kind of this element.
+ #[inline]
+ pub fn kind(&self) -> SyntaxKind {
+ match self {
+ NodeOrToken::Node(it) => it.kind(),
+ NodeOrToken::Token(it) => it.kind(),
+ }
+ }
+
+ /// Returns the length of the text covered by this element.
+ #[inline]
+ pub fn text_len(self) -> TextSize {
+ match self {
+ NodeOrToken::Node(it) => it.text_len(),
+ NodeOrToken::Token(it) => it.text_len(),
+ }
+ }
+}
diff --git a/vendor/rowan/src/green/node.rs b/vendor/rowan/src/green/node.rs
new file mode 100644
index 000000000..e94dc01cf
--- /dev/null
+++ b/vendor/rowan/src/green/node.rs
@@ -0,0 +1,361 @@
+use std::{
+ borrow::{Borrow, Cow},
+ fmt,
+ iter::{self, FusedIterator},
+ mem::{self, ManuallyDrop},
+ ops, ptr, slice,
+};
+
+use countme::Count;
+
+use crate::{
+ arc::{Arc, HeaderSlice, ThinArc},
+ green::{GreenElement, GreenElementRef, SyntaxKind},
+ utility_types::static_assert,
+ GreenToken, NodeOrToken, TextRange, TextSize,
+};
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub(super) struct GreenNodeHead {
+ kind: SyntaxKind,
+ text_len: TextSize,
+ _c: Count<GreenNode>,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub(crate) enum GreenChild {
+ Node { rel_offset: TextSize, node: GreenNode },
+ Token { rel_offset: TextSize, token: GreenToken },
+}
+#[cfg(target_pointer_width = "64")]
+static_assert!(mem::size_of::<GreenChild>() == mem::size_of::<usize>() * 2);
+
+type Repr = HeaderSlice<GreenNodeHead, [GreenChild]>;
+type ReprThin = HeaderSlice<GreenNodeHead, [GreenChild; 0]>;
+#[repr(transparent)]
+pub struct GreenNodeData {
+ data: ReprThin,
+}
+
+impl PartialEq for GreenNodeData {
+ fn eq(&self, other: &Self) -> bool {
+ self.header() == other.header() && self.slice() == other.slice()
+ }
+}
+
+/// Internal node in the immutable tree.
+/// It has other nodes and tokens as children.
+#[derive(Clone, PartialEq, Eq, Hash)]
+#[repr(transparent)]
+pub struct GreenNode {
+ ptr: ThinArc<GreenNodeHead, GreenChild>,
+}
+
+impl ToOwned for GreenNodeData {
+ type Owned = GreenNode;
+
+ #[inline]
+ fn to_owned(&self) -> GreenNode {
+ unsafe {
+ let green = GreenNode::from_raw(ptr::NonNull::from(self));
+ let green = ManuallyDrop::new(green);
+ GreenNode::clone(&green)
+ }
+ }
+}
+
+impl Borrow<GreenNodeData> for GreenNode {
+ #[inline]
+ fn borrow(&self) -> &GreenNodeData {
+ &*self
+ }
+}
+
+impl From<Cow<'_, GreenNodeData>> for GreenNode {
+ #[inline]
+ fn from(cow: Cow<'_, GreenNodeData>) -> Self {
+ cow.into_owned()
+ }
+}
+
+impl fmt::Debug for GreenNodeData {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("GreenNode")
+ .field("kind", &self.kind())
+ .field("text_len", &self.text_len())
+ .field("n_children", &self.children().len())
+ .finish()
+ }
+}
+
+impl fmt::Debug for GreenNode {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let data: &GreenNodeData = &*self;
+ fmt::Debug::fmt(data, f)
+ }
+}
+
+impl fmt::Display for GreenNode {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let data: &GreenNodeData = &*self;
+ fmt::Display::fmt(data, f)
+ }
+}
+
+impl fmt::Display for GreenNodeData {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ for child in self.children() {
+ write!(f, "{}", child)?;
+ }
+ Ok(())
+ }
+}
+
+impl GreenNodeData {
+ #[inline]
+ fn header(&self) -> &GreenNodeHead {
+ &self.data.header
+ }
+
+ #[inline]
+ fn slice(&self) -> &[GreenChild] {
+ self.data.slice()
+ }
+
+ /// Kind of this node.
+ #[inline]
+ pub fn kind(&self) -> SyntaxKind {
+ self.header().kind
+ }
+
+ /// Returns the length of the text covered by this node.
+ #[inline]
+ pub fn text_len(&self) -> TextSize {
+ self.header().text_len
+ }
+
+ /// Children of this node.
+ #[inline]
+ pub fn children(&self) -> Children<'_> {
+ Children { raw: self.slice().iter() }
+ }
+
+ pub(crate) fn child_at_range(
+ &self,
+ rel_range: TextRange,
+ ) -> Option<(usize, TextSize, GreenElementRef<'_>)> {
+ let idx = self
+ .slice()
+ .binary_search_by(|it| {
+ let child_range = it.rel_range();
+ TextRange::ordering(child_range, rel_range)
+ })
+ // XXX: this handles empty ranges
+ .unwrap_or_else(|it| it.saturating_sub(1));
+ let child = &self.slice().get(idx).filter(|it| it.rel_range().contains_range(rel_range))?;
+ Some((idx, child.rel_offset(), child.as_ref()))
+ }
+
+ #[must_use]
+ pub fn replace_child(&self, index: usize, new_child: GreenElement) -> GreenNode {
+ let mut replacement = Some(new_child);
+ let children = self.children().enumerate().map(|(i, child)| {
+ if i == index {
+ replacement.take().unwrap()
+ } else {
+ child.to_owned()
+ }
+ });
+ GreenNode::new(self.kind(), children)
+ }
+ #[must_use]
+ pub fn insert_child(&self, index: usize, new_child: GreenElement) -> GreenNode {
+ // https://github.com/rust-lang/rust/issues/34433
+ self.splice_children(index..index, iter::once(new_child))
+ }
+ #[must_use]
+ pub fn remove_child(&self, index: usize) -> GreenNode {
+ self.splice_children(index..=index, iter::empty())
+ }
+ #[must_use]
+ pub fn splice_children<R, I>(&self, range: R, replace_with: I) -> GreenNode
+ where
+ R: ops::RangeBounds<usize>,
+ I: IntoIterator<Item = GreenElement>,
+ {
+ let mut children: Vec<_> = self.children().map(|it| it.to_owned()).collect();
+ children.splice(range, replace_with);
+ GreenNode::new(self.kind(), children)
+ }
+}
+
+impl ops::Deref for GreenNode {
+ type Target = GreenNodeData;
+
+ #[inline]
+ fn deref(&self) -> &GreenNodeData {
+ unsafe {
+ let repr: &Repr = &self.ptr;
+ let repr: &ReprThin = &*(repr as *const Repr as *const ReprThin);
+ mem::transmute::<&ReprThin, &GreenNodeData>(repr)
+ }
+ }
+}
+
+impl GreenNode {
+ /// Creates new Node.
+ #[inline]
+ pub fn new<I>(kind: SyntaxKind, children: I) -> GreenNode
+ where
+ I: IntoIterator<Item = GreenElement>,
+ I::IntoIter: ExactSizeIterator,
+ {
+ let mut text_len: TextSize = 0.into();
+ let children = children.into_iter().map(|el| {
+ let rel_offset = text_len;
+ text_len += el.text_len();
+ match el {
+ NodeOrToken::Node(node) => GreenChild::Node { rel_offset, node },
+ NodeOrToken::Token(token) => GreenChild::Token { rel_offset, token },
+ }
+ });
+
+ let data = ThinArc::from_header_and_iter(
+ GreenNodeHead { kind, text_len: 0.into(), _c: Count::new() },
+ children,
+ );
+
+ // XXX: fixup `text_len` after construction, because we can't iterate
+ // `children` twice.
+ let data = {
+ let mut data = Arc::from_thin(data);
+ Arc::get_mut(&mut data).unwrap().header.text_len = text_len;
+ Arc::into_thin(data)
+ };
+
+ GreenNode { ptr: data }
+ }
+
+ #[inline]
+ pub(crate) fn into_raw(this: GreenNode) -> ptr::NonNull<GreenNodeData> {
+ let green = ManuallyDrop::new(this);
+ let green: &GreenNodeData = &*green;
+ ptr::NonNull::from(&*green)
+ }
+
+ #[inline]
+ pub(crate) unsafe fn from_raw(ptr: ptr::NonNull<GreenNodeData>) -> GreenNode {
+ let arc = Arc::from_raw(&ptr.as_ref().data as *const ReprThin);
+ let arc = mem::transmute::<Arc<ReprThin>, ThinArc<GreenNodeHead, GreenChild>>(arc);
+ GreenNode { ptr: arc }
+ }
+}
+
+impl GreenChild {
+ #[inline]
+ pub(crate) fn as_ref(&self) -> GreenElementRef {
+ match self {
+ GreenChild::Node { node, .. } => NodeOrToken::Node(node),
+ GreenChild::Token { token, .. } => NodeOrToken::Token(token),
+ }
+ }
+ #[inline]
+ pub(crate) fn rel_offset(&self) -> TextSize {
+ match self {
+ GreenChild::Node { rel_offset, .. } | GreenChild::Token { rel_offset, .. } => {
+ *rel_offset
+ }
+ }
+ }
+ #[inline]
+ fn rel_range(&self) -> TextRange {
+ let len = self.as_ref().text_len();
+ TextRange::at(self.rel_offset(), len)
+ }
+}
+
+#[derive(Debug, Clone)]
+pub struct Children<'a> {
+ pub(crate) raw: slice::Iter<'a, GreenChild>,
+}
+
+// NB: forward everything stable that iter::Slice specializes as of Rust 1.39.0
+impl ExactSizeIterator for Children<'_> {
+ #[inline(always)]
+ fn len(&self) -> usize {
+ self.raw.len()
+ }
+}
+
+impl<'a> Iterator for Children<'a> {
+ type Item = GreenElementRef<'a>;
+
+ #[inline]
+ fn next(&mut self) -> Option<GreenElementRef<'a>> {
+ self.raw.next().map(GreenChild::as_ref)
+ }
+
+ #[inline]
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.raw.size_hint()
+ }
+
+ #[inline]
+ fn count(self) -> usize
+ where
+ Self: Sized,
+ {
+ self.raw.count()
+ }
+
+ #[inline]
+ fn nth(&mut self, n: usize) -> Option<Self::Item> {
+ self.raw.nth(n).map(GreenChild::as_ref)
+ }
+
+ #[inline]
+ fn last(mut self) -> Option<Self::Item>
+ where
+ Self: Sized,
+ {
+ self.next_back()
+ }
+
+ #[inline]
+ fn fold<Acc, Fold>(mut self, init: Acc, mut f: Fold) -> Acc
+ where
+ Fold: FnMut(Acc, Self::Item) -> Acc,
+ {
+ let mut accum = init;
+ while let Some(x) = self.next() {
+ accum = f(accum, x);
+ }
+ accum
+ }
+}
+
+impl<'a> DoubleEndedIterator for Children<'a> {
+ #[inline]
+ fn next_back(&mut self) -> Option<Self::Item> {
+ self.raw.next_back().map(GreenChild::as_ref)
+ }
+
+ #[inline]
+ fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
+ self.raw.nth_back(n).map(GreenChild::as_ref)
+ }
+
+ #[inline]
+ fn rfold<Acc, Fold>(mut self, init: Acc, mut f: Fold) -> Acc
+ where
+ Fold: FnMut(Acc, Self::Item) -> Acc,
+ {
+ let mut accum = init;
+ while let Some(x) = self.next_back() {
+ accum = f(accum, x);
+ }
+ accum
+ }
+}
+
+impl FusedIterator for Children<'_> {}
diff --git a/vendor/rowan/src/green/node_cache.rs b/vendor/rowan/src/green/node_cache.rs
new file mode 100644
index 000000000..c73f3e696
--- /dev/null
+++ b/vendor/rowan/src/green/node_cache.rs
@@ -0,0 +1,157 @@
+use hashbrown::hash_map::RawEntryMut;
+use rustc_hash::FxHasher;
+use std::hash::{BuildHasherDefault, Hash, Hasher};
+
+use crate::{
+ green::GreenElementRef, GreenNode, GreenNodeData, GreenToken, GreenTokenData, NodeOrToken,
+ SyntaxKind,
+};
+
+use super::element::GreenElement;
+
+type HashMap<K, V> = hashbrown::HashMap<K, V, BuildHasherDefault<FxHasher>>;
+
+#[derive(Debug)]
+struct NoHash<T>(T);
+
+/// Interner for GreenTokens and GreenNodes
+// XXX: the impl is a bit tricky. As usual when writing interners, we want to
+// store all values in one HashSet.
+//
+// However, hashing trees is fun: hash of the tree is recursively defined. We
+// maintain an invariant -- if the tree is interned, then all of its children
+// are interned as well.
+//
+// That means that computing the hash naively is wasteful -- we just *know*
+// hashes of children, and we can re-use those.
+//
+// So here we use *raw* API of hashbrown and provide the hashes manually,
+// instead of going via a `Hash` impl. Our manual `Hash` and the
+// `#[derive(Hash)]` are actually different! At some point we had a fun bug,
+// where we accidentally mixed the two hashes, which made the cache much less
+// efficient.
+//
+// To fix that, we additionally wrap the data in `NoHash` wrapper, to make sure
+// we don't accidentally use the wrong hash!
+#[derive(Default, Debug)]
+pub struct NodeCache {
+ nodes: HashMap<NoHash<GreenNode>, ()>,
+ tokens: HashMap<NoHash<GreenToken>, ()>,
+}
+
+fn token_hash(token: &GreenTokenData) -> u64 {
+ let mut h = FxHasher::default();
+ token.kind().hash(&mut h);
+ token.text().hash(&mut h);
+ h.finish()
+}
+
+fn node_hash(node: &GreenNodeData) -> u64 {
+ let mut h = FxHasher::default();
+ node.kind().hash(&mut h);
+ for child in node.children() {
+ match child {
+ NodeOrToken::Node(it) => node_hash(it),
+ NodeOrToken::Token(it) => token_hash(it),
+ }
+ .hash(&mut h)
+ }
+ h.finish()
+}
+
+fn element_id(elem: GreenElementRef<'_>) -> *const () {
+ match elem {
+ NodeOrToken::Node(it) => it as *const GreenNodeData as *const (),
+ NodeOrToken::Token(it) => it as *const GreenTokenData as *const (),
+ }
+}
+
+impl NodeCache {
+ pub(crate) fn node(
+ &mut self,
+ kind: SyntaxKind,
+ children: &mut Vec<(u64, GreenElement)>,
+ first_child: usize,
+ ) -> (u64, GreenNode) {
+ let build_node = move |children: &mut Vec<(u64, GreenElement)>| {
+ GreenNode::new(kind, children.drain(first_child..).map(|(_, it)| it))
+ };
+
+ let children_ref = &children[first_child..];
+ if children_ref.len() > 3 {
+ let node = build_node(children);
+ return (0, node);
+ }
+
+ let hash = {
+ let mut h = FxHasher::default();
+ kind.hash(&mut h);
+ for &(hash, _) in children_ref {
+ if hash == 0 {
+ let node = build_node(children);
+ return (0, node);
+ }
+ hash.hash(&mut h);
+ }
+ h.finish()
+ };
+
+ // Green nodes are fully immutable, so it's ok to deduplicate them.
+ // This is the same optimization that Roslyn does
+ // https://github.com/KirillOsenkov/Bliki/wiki/Roslyn-Immutable-Trees
+ //
+ // For example, all `#[inline]` in this file share the same green node!
+ // For `libsyntax/parse/parser.rs`, measurements show that deduping saves
+ // 17% of the memory for green nodes!
+ let entry = self.nodes.raw_entry_mut().from_hash(hash, |node| {
+ node.0.kind() == kind && node.0.children().len() == children_ref.len() && {
+ let lhs = node.0.children();
+ let rhs = children_ref.iter().map(|(_, it)| it.as_deref());
+
+ let lhs = lhs.map(element_id);
+ let rhs = rhs.map(element_id);
+
+ lhs.eq(rhs)
+ }
+ });
+
+ let node = match entry {
+ RawEntryMut::Occupied(entry) => {
+ drop(children.drain(first_child..));
+ entry.key().0.clone()
+ }
+ RawEntryMut::Vacant(entry) => {
+ let node = build_node(children);
+ entry.insert_with_hasher(hash, NoHash(node.clone()), (), |n| node_hash(&n.0));
+ node
+ }
+ };
+
+ (hash, node)
+ }
+
+ pub(crate) fn token(&mut self, kind: SyntaxKind, text: &str) -> (u64, GreenToken) {
+ let hash = {
+ let mut h = FxHasher::default();
+ kind.hash(&mut h);
+ text.hash(&mut h);
+ h.finish()
+ };
+
+ let entry = self
+ .tokens
+ .raw_entry_mut()
+ .from_hash(hash, |token| token.0.kind() == kind && token.0.text() == text);
+
+ let token = match entry {
+ RawEntryMut::Occupied(entry) => entry.key().0.clone(),
+ RawEntryMut::Vacant(entry) => {
+ let token = GreenToken::new(kind, text);
+ entry.insert_with_hasher(hash, NoHash(token.clone()), (), |t| token_hash(&t.0));
+ token
+ }
+ };
+
+ (hash, token)
+ }
+}
diff --git a/vendor/rowan/src/green/token.rs b/vendor/rowan/src/green/token.rs
new file mode 100644
index 000000000..1a4548a43
--- /dev/null
+++ b/vendor/rowan/src/green/token.rs
@@ -0,0 +1,145 @@
+use std::{
+ borrow::Borrow,
+ fmt,
+ mem::{self, ManuallyDrop},
+ ops, ptr,
+};
+
+use countme::Count;
+
+use crate::{
+ arc::{Arc, HeaderSlice, ThinArc},
+ green::SyntaxKind,
+ TextSize,
+};
+
+#[derive(PartialEq, Eq, Hash)]
+struct GreenTokenHead {
+ kind: SyntaxKind,
+ _c: Count<GreenToken>,
+}
+
+type Repr = HeaderSlice<GreenTokenHead, [u8]>;
+type ReprThin = HeaderSlice<GreenTokenHead, [u8; 0]>;
+#[repr(transparent)]
+pub struct GreenTokenData {
+ data: ReprThin,
+}
+
+impl PartialEq for GreenTokenData {
+ fn eq(&self, other: &Self) -> bool {
+ self.kind() == other.kind() && self.text() == other.text()
+ }
+}
+
+/// Leaf node in the immutable tree.
+#[derive(PartialEq, Eq, Hash, Clone)]
+#[repr(transparent)]
+pub struct GreenToken {
+ ptr: ThinArc<GreenTokenHead, u8>,
+}
+
+impl ToOwned for GreenTokenData {
+ type Owned = GreenToken;
+
+ #[inline]
+ fn to_owned(&self) -> GreenToken {
+ unsafe {
+ let green = GreenToken::from_raw(ptr::NonNull::from(self));
+ let green = ManuallyDrop::new(green);
+ GreenToken::clone(&green)
+ }
+ }
+}
+
+impl Borrow<GreenTokenData> for GreenToken {
+ #[inline]
+ fn borrow(&self) -> &GreenTokenData {
+ &*self
+ }
+}
+
+impl fmt::Debug for GreenTokenData {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("GreenToken")
+ .field("kind", &self.kind())
+ .field("text", &self.text())
+ .finish()
+ }
+}
+
+impl fmt::Debug for GreenToken {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let data: &GreenTokenData = &*self;
+ fmt::Debug::fmt(data, f)
+ }
+}
+
+impl fmt::Display for GreenToken {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let data: &GreenTokenData = &*self;
+ fmt::Display::fmt(data, f)
+ }
+}
+
+impl fmt::Display for GreenTokenData {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(f, "{}", self.text())
+ }
+}
+
+impl GreenTokenData {
+ /// Kind of this Token.
+ #[inline]
+ pub fn kind(&self) -> SyntaxKind {
+ self.data.header.kind
+ }
+
+ /// Text of this Token.
+ #[inline]
+ pub fn text(&self) -> &str {
+ unsafe { std::str::from_utf8_unchecked(self.data.slice()) }
+ }
+
+ /// Returns the length of the text covered by this token.
+ #[inline]
+ pub fn text_len(&self) -> TextSize {
+ TextSize::of(self.text())
+ }
+}
+
+impl GreenToken {
+ /// Creates new Token.
+ #[inline]
+ pub fn new(kind: SyntaxKind, text: &str) -> GreenToken {
+ let head = GreenTokenHead { kind, _c: Count::new() };
+ let ptr = ThinArc::from_header_and_iter(head, text.bytes());
+ GreenToken { ptr }
+ }
+ #[inline]
+ pub(crate) fn into_raw(this: GreenToken) -> ptr::NonNull<GreenTokenData> {
+ let green = ManuallyDrop::new(this);
+ let green: &GreenTokenData = &*green;
+ ptr::NonNull::from(&*green)
+ }
+
+ #[inline]
+ pub(crate) unsafe fn from_raw(ptr: ptr::NonNull<GreenTokenData>) -> GreenToken {
+ let arc = Arc::from_raw(&ptr.as_ref().data as *const ReprThin);
+ let arc = mem::transmute::<Arc<ReprThin>, ThinArc<GreenTokenHead, u8>>(arc);
+ GreenToken { ptr: arc }
+ }
+}
+
+impl ops::Deref for GreenToken {
+ type Target = GreenTokenData;
+
+ #[inline]
+ fn deref(&self) -> &GreenTokenData {
+ unsafe {
+ let repr: &Repr = &self.ptr;
+ let repr: &ReprThin = &*(repr as *const Repr as *const ReprThin);
+ mem::transmute::<&ReprThin, &GreenTokenData>(repr)
+ }
+ }
+}
diff --git a/vendor/rowan/src/lib.rs b/vendor/rowan/src/lib.rs
new file mode 100644
index 000000000..bb8f30d02
--- /dev/null
+++ b/vendor/rowan/src/lib.rs
@@ -0,0 +1,41 @@
+//! A generic library for lossless syntax trees.
+//! See `examples/s_expressions.rs` for a tutorial.
+#![forbid(
+ // missing_debug_implementations,
+ unconditional_recursion,
+ future_incompatible,
+ // missing_docs,
+)]
+#![deny(unsafe_code)]
+
+#[allow(unsafe_code)]
+mod green;
+#[allow(unsafe_code)]
+pub mod cursor;
+
+pub mod api;
+mod syntax_text;
+mod utility_types;
+
+mod cow_mut;
+#[allow(unsafe_code)]
+mod sll;
+#[allow(unsafe_code)]
+mod arc;
+#[cfg(feature = "serde1")]
+mod serde_impls;
+pub mod ast;
+
+pub use text_size::{TextLen, TextRange, TextSize};
+
+pub use crate::{
+ api::{
+ Language, SyntaxElement, SyntaxElementChildren, SyntaxNode, SyntaxNodeChildren, SyntaxToken,
+ },
+ green::{
+ Checkpoint, Children, GreenNode, GreenNodeBuilder, GreenNodeData, GreenToken,
+ GreenTokenData, NodeCache, SyntaxKind,
+ },
+ syntax_text::SyntaxText,
+ utility_types::{Direction, NodeOrToken, TokenAtOffset, WalkEvent},
+};
diff --git a/vendor/rowan/src/serde_impls.rs b/vendor/rowan/src/serde_impls.rs
new file mode 100644
index 000000000..aea5d88b7
--- /dev/null
+++ b/vendor/rowan/src/serde_impls.rs
@@ -0,0 +1,66 @@
+use serde::ser::{Serialize, SerializeMap, SerializeSeq, Serializer};
+use std::fmt;
+
+use crate::{
+ api::{Language, SyntaxNode, SyntaxToken},
+ NodeOrToken,
+};
+
+struct SerDisplay<T>(T);
+impl<T: fmt::Display> Serialize for SerDisplay<T> {
+ fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
+ where
+ S: Serializer,
+ {
+ serializer.collect_str(&self.0)
+ }
+}
+
+struct DisplayDebug<T>(T);
+impl<T: fmt::Debug> fmt::Display for DisplayDebug<T> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt::Debug::fmt(&self.0, f)
+ }
+}
+
+impl<L: Language> Serialize for SyntaxNode<L> {
+ fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
+ where
+ S: Serializer,
+ {
+ let mut state = serializer.serialize_map(Some(3))?;
+ state.serialize_entry("kind", &SerDisplay(DisplayDebug(self.kind())))?;
+ state.serialize_entry("text_range", &self.text_range())?;
+ state.serialize_entry("children", &Children(self))?;
+ state.end()
+ }
+}
+
+impl<L: Language> Serialize for SyntaxToken<L> {
+ fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
+ where
+ S: Serializer,
+ {
+ let mut state = serializer.serialize_map(Some(3))?;
+ state.serialize_entry("kind", &SerDisplay(DisplayDebug(self.kind())))?;
+ state.serialize_entry("text_range", &self.text_range())?;
+ state.serialize_entry("text", &self.text())?;
+ state.end()
+ }
+}
+
+struct Children<T>(T);
+
+impl<L: Language> Serialize for Children<&'_ SyntaxNode<L>> {
+ fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
+ where
+ S: Serializer,
+ {
+ let mut state = serializer.serialize_seq(None)?;
+ self.0.children_with_tokens().try_for_each(|element| match element {
+ NodeOrToken::Node(it) => state.serialize_element(&it),
+ NodeOrToken::Token(it) => state.serialize_element(&it),
+ })?;
+ state.end()
+ }
+}
diff --git a/vendor/rowan/src/sll.rs b/vendor/rowan/src/sll.rs
new file mode 100644
index 000000000..87d2f1f31
--- /dev/null
+++ b/vendor/rowan/src/sll.rs
@@ -0,0 +1,129 @@
+//! Sorted Linked List
+
+use std::{cell::Cell, cmp::Ordering, ptr};
+
+use crate::utility_types::Delta;
+pub(crate) unsafe trait Elem {
+ fn prev(&self) -> &Cell<*const Self>;
+ fn next(&self) -> &Cell<*const Self>;
+ fn key(&self) -> &Cell<u32>;
+}
+
+pub(crate) enum AddToSllResult<'a, E: Elem> {
+ NoHead,
+ EmptyHead(&'a Cell<*const E>),
+ SmallerThanHead(&'a Cell<*const E>),
+ SmallerThanNotHead(*const E),
+ AlreadyInSll(*const E),
+}
+
+impl<'a, E: Elem> AddToSllResult<'a, E> {
+ pub(crate) fn add_to_sll(&self, elem_ptr: *const E) {
+ unsafe {
+ (*elem_ptr).prev().set(elem_ptr);
+ (*elem_ptr).next().set(elem_ptr);
+
+ match self {
+ // Case 1: empty head, replace it.
+ AddToSllResult::EmptyHead(head) => head.set(elem_ptr),
+
+ // Case 2: we are smaller than the head, replace it.
+ AddToSllResult::SmallerThanHead(head) => {
+ let old_head = head.get();
+ let prev = (*old_head).prev().replace(elem_ptr);
+ (*prev).next().set(elem_ptr);
+ (*elem_ptr).next().set(old_head);
+ (*elem_ptr).prev().set(prev);
+ head.set(elem_ptr);
+ }
+
+ // Case 3: insert in place found by looping
+ AddToSllResult::SmallerThanNotHead(curr) => {
+ let next = (**curr).next().replace(elem_ptr);
+ (*next).prev().set(elem_ptr);
+ (*elem_ptr).prev().set(*curr);
+ (*elem_ptr).next().set(next);
+ }
+ AddToSllResult::NoHead | AddToSllResult::AlreadyInSll(_) => (),
+ }
+ }
+ }
+}
+
+#[cold]
+pub(crate) fn init<'a, E: Elem>(
+ head: Option<&'a Cell<*const E>>,
+ elem: &E,
+) -> AddToSllResult<'a, E> {
+ if let Some(head) = head {
+ link(head, elem)
+ } else {
+ AddToSllResult::NoHead
+ }
+}
+
+#[cold]
+pub(crate) fn unlink<E: Elem>(head: &Cell<*const E>, elem: &E) {
+ debug_assert!(!head.get().is_null(), "invalid linked list head");
+
+ let elem_ptr: *const E = elem;
+
+ let prev = elem.prev().replace(elem_ptr);
+ let next = elem.next().replace(elem_ptr);
+ unsafe {
+ debug_assert_eq!((*prev).next().get(), elem_ptr, "invalid linked list links");
+ debug_assert_eq!((*next).prev().get(), elem_ptr, "invalid linked list links");
+ (*prev).next().set(next);
+ (*next).prev().set(prev);
+ }
+
+ if head.get() == elem_ptr {
+ head.set(if next == elem_ptr { ptr::null() } else { next })
+ }
+}
+
+#[cold]
+pub(crate) fn link<'a, E: Elem>(head: &'a Cell<*const E>, elem: &E) -> AddToSllResult<'a, E> {
+ unsafe {
+ let old_head = head.get();
+ // Case 1: empty head, replace it.
+ if old_head.is_null() {
+ return AddToSllResult::EmptyHead(head);
+ }
+
+ // Case 2: we are smaller than the head, replace it.
+ if elem.key() < (*old_head).key() {
+ return AddToSllResult::SmallerThanHead(head);
+ }
+
+ // Case 3: loop *backward* until we find insertion place. Because of
+ // Case 2, we can't loop beyond the head.
+ let mut curr = (*old_head).prev().get();
+ loop {
+ match (*curr).key().cmp(elem.key()) {
+ Ordering::Less => return AddToSllResult::SmallerThanNotHead(curr),
+ Ordering::Equal => return AddToSllResult::AlreadyInSll(curr),
+ Ordering::Greater => curr = (*curr).prev().get(),
+ }
+ }
+ }
+}
+
+pub(crate) fn adjust<E: Elem>(elem: &E, from: u32, by: Delta<u32>) {
+ let elem_ptr: *const E = elem;
+
+ unsafe {
+ let mut curr = elem_ptr;
+ loop {
+ let mut key = (*curr).key().get();
+ if key >= from {
+ key += by;
+ (*curr).key().set(key);
+ }
+ curr = (*curr).next().get();
+ if curr == elem_ptr {
+ break;
+ }
+ }
+ }
+}
diff --git a/vendor/rowan/src/syntax_text.rs b/vendor/rowan/src/syntax_text.rs
new file mode 100644
index 000000000..3ab3f6cb0
--- /dev/null
+++ b/vendor/rowan/src/syntax_text.rs
@@ -0,0 +1,311 @@
+use std::fmt;
+
+use crate::{
+ cursor::{SyntaxNode, SyntaxToken},
+ TextRange, TextSize,
+};
+
+#[derive(Clone)]
+pub struct SyntaxText {
+ node: SyntaxNode,
+ range: TextRange,
+}
+
+impl SyntaxText {
+ pub(crate) fn new(node: SyntaxNode) -> SyntaxText {
+ let range = node.text_range();
+ SyntaxText { node, range }
+ }
+
+ pub fn len(&self) -> TextSize {
+ self.range.len()
+ }
+
+ pub fn is_empty(&self) -> bool {
+ self.range.is_empty()
+ }
+
+ pub fn contains_char(&self, c: char) -> bool {
+ self.try_for_each_chunk(|chunk| if chunk.contains(c) { Err(()) } else { Ok(()) }).is_err()
+ }
+
+ pub fn find_char(&self, c: char) -> Option<TextSize> {
+ let mut acc: TextSize = 0.into();
+ let res = self.try_for_each_chunk(|chunk| {
+ if let Some(pos) = chunk.find(c) {
+ let pos: TextSize = (pos as u32).into();
+ return Err(acc + pos);
+ }
+ acc += TextSize::of(chunk);
+ Ok(())
+ });
+ found(res)
+ }
+
+ pub fn char_at(&self, offset: TextSize) -> Option<char> {
+ let offset = offset.into();
+ let mut start: TextSize = 0.into();
+ let res = self.try_for_each_chunk(|chunk| {
+ let end = start + TextSize::of(chunk);
+ if start <= offset && offset < end {
+ let off: usize = u32::from(offset - start) as usize;
+ return Err(chunk[off..].chars().next().unwrap());
+ }
+ start = end;
+ Ok(())
+ });
+ found(res)
+ }
+
+ pub fn slice<R: private::SyntaxTextRange>(&self, range: R) -> SyntaxText {
+ let start = range.start().unwrap_or_default();
+ let end = range.end().unwrap_or(self.len());
+ assert!(start <= end);
+ let len = end - start;
+ let start = self.range.start() + start;
+ let end = start + len;
+ assert!(
+ start <= end,
+ "invalid slice, range: {:?}, slice: {:?}",
+ self.range,
+ (range.start(), range.end()),
+ );
+ let range = TextRange::new(start, end);
+ assert!(
+ self.range.contains_range(range),
+ "invalid slice, range: {:?}, slice: {:?}",
+ self.range,
+ range,
+ );
+ SyntaxText { node: self.node.clone(), range }
+ }
+
+ pub fn try_fold_chunks<T, F, E>(&self, init: T, mut f: F) -> Result<T, E>
+ where
+ F: FnMut(T, &str) -> Result<T, E>,
+ {
+ self.tokens_with_ranges()
+ .try_fold(init, move |acc, (token, range)| f(acc, &token.text()[range]))
+ }
+
+ pub fn try_for_each_chunk<F: FnMut(&str) -> Result<(), E>, E>(
+ &self,
+ mut f: F,
+ ) -> Result<(), E> {
+ self.try_fold_chunks((), move |(), chunk| f(chunk))
+ }
+
+ pub fn for_each_chunk<F: FnMut(&str)>(&self, mut f: F) {
+ enum Void {}
+ match self.try_for_each_chunk(|chunk| Ok::<(), Void>(f(chunk))) {
+ Ok(()) => (),
+ Err(void) => match void {},
+ }
+ }
+
+ fn tokens_with_ranges(&self) -> impl Iterator<Item = (SyntaxToken, TextRange)> {
+ let text_range = self.range;
+ self.node.descendants_with_tokens().filter_map(|element| element.into_token()).filter_map(
+ move |token| {
+ let token_range = token.text_range();
+ let range = text_range.intersect(token_range)?;
+ Some((token, range - token_range.start()))
+ },
+ )
+ }
+}
+
+fn found<T>(res: Result<(), T>) -> Option<T> {
+ match res {
+ Ok(()) => None,
+ Err(it) => Some(it),
+ }
+}
+
+impl fmt::Debug for SyntaxText {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ fmt::Debug::fmt(&self.to_string(), f)
+ }
+}
+
+impl fmt::Display for SyntaxText {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ self.try_for_each_chunk(|chunk| fmt::Display::fmt(chunk, f))
+ }
+}
+
+impl From<SyntaxText> for String {
+ fn from(text: SyntaxText) -> String {
+ text.to_string()
+ }
+}
+
+impl PartialEq<str> for SyntaxText {
+ fn eq(&self, mut rhs: &str) -> bool {
+ self.try_for_each_chunk(|chunk| {
+ if !rhs.starts_with(chunk) {
+ return Err(());
+ }
+ rhs = &rhs[chunk.len()..];
+ Ok(())
+ })
+ .is_ok()
+ && rhs.is_empty()
+ }
+}
+
+impl PartialEq<SyntaxText> for str {
+ fn eq(&self, rhs: &SyntaxText) -> bool {
+ rhs == self
+ }
+}
+
+impl PartialEq<&'_ str> for SyntaxText {
+ fn eq(&self, rhs: &&str) -> bool {
+ self == *rhs
+ }
+}
+
+impl PartialEq<SyntaxText> for &'_ str {
+ fn eq(&self, rhs: &SyntaxText) -> bool {
+ rhs == self
+ }
+}
+
+impl PartialEq for SyntaxText {
+ fn eq(&self, other: &SyntaxText) -> bool {
+ if self.range.len() != other.range.len() {
+ return false;
+ }
+ let mut lhs = self.tokens_with_ranges();
+ let mut rhs = other.tokens_with_ranges();
+ zip_texts(&mut lhs, &mut rhs).is_none()
+ && lhs.all(|it| it.1.is_empty())
+ && rhs.all(|it| it.1.is_empty())
+ }
+}
+
+fn zip_texts<I: Iterator<Item = (SyntaxToken, TextRange)>>(xs: &mut I, ys: &mut I) -> Option<()> {
+ let mut x = xs.next()?;
+ let mut y = ys.next()?;
+ loop {
+ while x.1.is_empty() {
+ x = xs.next()?;
+ }
+ while y.1.is_empty() {
+ y = ys.next()?;
+ }
+ let x_text = &x.0.text()[x.1];
+ let y_text = &y.0.text()[y.1];
+ if !(x_text.starts_with(y_text) || y_text.starts_with(x_text)) {
+ return Some(());
+ }
+ let advance = std::cmp::min(x.1.len(), y.1.len());
+ x.1 = TextRange::new(x.1.start() + advance, x.1.end());
+ y.1 = TextRange::new(y.1.start() + advance, y.1.end());
+ }
+}
+
+impl Eq for SyntaxText {}
+
+mod private {
+ use std::ops;
+
+ use crate::{TextRange, TextSize};
+
+ pub trait SyntaxTextRange {
+ fn start(&self) -> Option<TextSize>;
+ fn end(&self) -> Option<TextSize>;
+ }
+
+ impl SyntaxTextRange for TextRange {
+ fn start(&self) -> Option<TextSize> {
+ Some(TextRange::start(*self))
+ }
+ fn end(&self) -> Option<TextSize> {
+ Some(TextRange::end(*self))
+ }
+ }
+
+ impl SyntaxTextRange for ops::Range<TextSize> {
+ fn start(&self) -> Option<TextSize> {
+ Some(self.start)
+ }
+ fn end(&self) -> Option<TextSize> {
+ Some(self.end)
+ }
+ }
+
+ impl SyntaxTextRange for ops::RangeFrom<TextSize> {
+ fn start(&self) -> Option<TextSize> {
+ Some(self.start)
+ }
+ fn end(&self) -> Option<TextSize> {
+ None
+ }
+ }
+
+ impl SyntaxTextRange for ops::RangeTo<TextSize> {
+ fn start(&self) -> Option<TextSize> {
+ None
+ }
+ fn end(&self) -> Option<TextSize> {
+ Some(self.end)
+ }
+ }
+
+ impl SyntaxTextRange for ops::RangeFull {
+ fn start(&self) -> Option<TextSize> {
+ None
+ }
+ fn end(&self) -> Option<TextSize> {
+ None
+ }
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::{green::SyntaxKind, GreenNodeBuilder};
+
+ use super::*;
+
+ fn build_tree(chunks: &[&str]) -> SyntaxNode {
+ let mut builder = GreenNodeBuilder::new();
+ builder.start_node(SyntaxKind(62));
+ for &chunk in chunks.iter() {
+ builder.token(SyntaxKind(92), chunk.into())
+ }
+ builder.finish_node();
+ SyntaxNode::new_root(builder.finish())
+ }
+
+ #[test]
+ fn test_text_equality() {
+ fn do_check(t1: &[&str], t2: &[&str]) {
+ let t1 = build_tree(t1).text();
+ let t2 = build_tree(t2).text();
+ let expected = t1.to_string() == t2.to_string();
+ let actual = t1 == t2;
+ assert_eq!(expected, actual, "`{}` (SyntaxText) `{}` (SyntaxText)", t1, t2);
+ let actual = t1 == &*t2.to_string();
+ assert_eq!(expected, actual, "`{}` (SyntaxText) `{}` (&str)", t1, t2);
+ }
+ fn check(t1: &[&str], t2: &[&str]) {
+ do_check(t1, t2);
+ do_check(t2, t1)
+ }
+
+ check(&[""], &[""]);
+ check(&["a"], &[""]);
+ check(&["a"], &["a"]);
+ check(&["abc"], &["def"]);
+ check(&["hello", "world"], &["hello", "world"]);
+ check(&["hellowo", "rld"], &["hell", "oworld"]);
+ check(&["hel", "lowo", "rld"], &["helloworld"]);
+ check(&["{", "abc", "}"], &["{", "123", "}"]);
+ check(&["{", "abc", "}", "{"], &["{", "123", "}"]);
+ check(&["{", "abc", "}"], &["{", "123", "}", "{"]);
+ check(&["{", "abc", "}ab"], &["{", "abc", "}", "ab"]);
+ }
+}
diff --git a/vendor/rowan/src/utility_types.rs b/vendor/rowan/src/utility_types.rs
new file mode 100644
index 000000000..817add72e
--- /dev/null
+++ b/vendor/rowan/src/utility_types.rs
@@ -0,0 +1,180 @@
+use std::{
+ fmt,
+ ops::{AddAssign, Deref},
+};
+use text_size::TextSize;
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+pub enum NodeOrToken<N, T> {
+ Node(N),
+ Token(T),
+}
+
+impl<N, T> NodeOrToken<N, T> {
+ pub fn into_node(self) -> Option<N> {
+ match self {
+ NodeOrToken::Node(node) => Some(node),
+ NodeOrToken::Token(_) => None,
+ }
+ }
+
+ pub fn into_token(self) -> Option<T> {
+ match self {
+ NodeOrToken::Node(_) => None,
+ NodeOrToken::Token(token) => Some(token),
+ }
+ }
+
+ pub fn as_node(&self) -> Option<&N> {
+ match self {
+ NodeOrToken::Node(node) => Some(node),
+ NodeOrToken::Token(_) => None,
+ }
+ }
+
+ pub fn as_token(&self) -> Option<&T> {
+ match self {
+ NodeOrToken::Node(_) => None,
+ NodeOrToken::Token(token) => Some(token),
+ }
+ }
+}
+
+impl<N: Deref, T: Deref> NodeOrToken<N, T> {
+ pub(crate) fn as_deref(&self) -> NodeOrToken<&N::Target, &T::Target> {
+ match self {
+ NodeOrToken::Node(node) => NodeOrToken::Node(&*node),
+ NodeOrToken::Token(token) => NodeOrToken::Token(&*token),
+ }
+ }
+}
+
+impl<N: fmt::Display, T: fmt::Display> fmt::Display for NodeOrToken<N, T> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ NodeOrToken::Node(node) => fmt::Display::fmt(node, f),
+ NodeOrToken::Token(token) => fmt::Display::fmt(token, f),
+ }
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+pub enum Direction {
+ Next,
+ Prev,
+}
+
+/// `WalkEvent` describes tree walking process.
+#[derive(Debug, Copy, Clone)]
+pub enum WalkEvent<T> {
+ /// Fired before traversing the node.
+ Enter(T),
+ /// Fired after the node is traversed.
+ Leave(T),
+}
+
+impl<T> WalkEvent<T> {
+ pub fn map<F: FnOnce(T) -> U, U>(self, f: F) -> WalkEvent<U> {
+ match self {
+ WalkEvent::Enter(it) => WalkEvent::Enter(f(it)),
+ WalkEvent::Leave(it) => WalkEvent::Leave(f(it)),
+ }
+ }
+}
+
+/// There might be zero, one or two leaves at a given offset.
+#[derive(Clone, Debug)]
+pub enum TokenAtOffset<T> {
+ /// No leaves at offset -- possible for the empty file.
+ None,
+ /// Only a single leaf at offset.
+ Single(T),
+ /// Offset is exactly between two leaves.
+ Between(T, T),
+}
+
+impl<T> TokenAtOffset<T> {
+ pub fn map<F: Fn(T) -> U, U>(self, f: F) -> TokenAtOffset<U> {
+ match self {
+ TokenAtOffset::None => TokenAtOffset::None,
+ TokenAtOffset::Single(it) => TokenAtOffset::Single(f(it)),
+ TokenAtOffset::Between(l, r) => TokenAtOffset::Between(f(l), f(r)),
+ }
+ }
+
+ /// Convert to option, preferring the right leaf in case of a tie.
+ pub fn right_biased(self) -> Option<T> {
+ match self {
+ TokenAtOffset::None => None,
+ TokenAtOffset::Single(node) => Some(node),
+ TokenAtOffset::Between(_, right) => Some(right),
+ }
+ }
+
+ /// Convert to option, preferring the left leaf in case of a tie.
+ pub fn left_biased(self) -> Option<T> {
+ match self {
+ TokenAtOffset::None => None,
+ TokenAtOffset::Single(node) => Some(node),
+ TokenAtOffset::Between(left, _) => Some(left),
+ }
+ }
+}
+
+impl<T> Iterator for TokenAtOffset<T> {
+ type Item = T;
+
+ fn next(&mut self) -> Option<T> {
+ match std::mem::replace(self, TokenAtOffset::None) {
+ TokenAtOffset::None => None,
+ TokenAtOffset::Single(node) => {
+ *self = TokenAtOffset::None;
+ Some(node)
+ }
+ TokenAtOffset::Between(left, right) => {
+ *self = TokenAtOffset::Single(right);
+ Some(left)
+ }
+ }
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ match self {
+ TokenAtOffset::None => (0, Some(0)),
+ TokenAtOffset::Single(_) => (1, Some(1)),
+ TokenAtOffset::Between(_, _) => (2, Some(2)),
+ }
+ }
+}
+
+impl<T> ExactSizeIterator for TokenAtOffset<T> {}
+
+macro_rules! _static_assert {
+ ($expr:expr) => {
+ const _: i32 = 0 / $expr as i32;
+ };
+}
+
+pub(crate) use _static_assert as static_assert;
+
+#[derive(Copy, Clone, Debug)]
+pub(crate) enum Delta<T> {
+ Add(T),
+ Sub(T),
+}
+
+// This won't be coherent :-(
+// impl<T: AddAssign + SubAssign> AddAssign<Delta<T>> for T
+macro_rules! impls {
+ ($($ty:ident)*) => {$(
+ impl AddAssign<Delta<$ty>> for $ty {
+ fn add_assign(&mut self, rhs: Delta<$ty>) {
+ match rhs {
+ Delta::Add(amt) => *self += amt,
+ Delta::Sub(amt) => *self -= amt,
+ }
+ }
+ }
+ )*};
+}
+impls!(u32 TextSize);