diff --git a/src/arc.rs b/src/arc.rs index e5d6319..05e015a 100644 --- a/src/arc.rs +++ b/src/arc.rs @@ -96,7 +96,7 @@ const MAX_REFCOUNT: usize = (isize::MAX) as usize; /// See [`std::sync::Arc`]. #[repr(C)] -pub struct Arc { +pub struct Arc { p: NonNull>, } @@ -104,7 +104,7 @@ pub struct Arc { /// /// This lets us build arcs that we can mutate before /// freezing, without needing to change the allocation -pub struct UniqueArc(Arc); +pub struct UniqueArc(Arc); impl UniqueArc { #[inline] @@ -601,7 +601,7 @@ impl HeaderWithLength { } type HeaderSliceWithLength = HeaderSlice, T>; -pub struct ThinArc { +pub struct ThinArc { ptr: *mut ArcInner>, } @@ -620,7 +620,7 @@ fn thin_to_thick( fake_slice as *mut ArcInner> } -impl ThinArc { +impl ThinArc { /// Temporarily converts |self| into a bonafide Arc and exposes it to the /// provided callback. The refcount is not modified. #[inline] @@ -663,21 +663,21 @@ impl Deref for ThinArc { } } -impl Clone for ThinArc { +impl Clone for ThinArc { #[inline] fn clone(&self) -> Self { ThinArc::with_arc(self, |a| Arc::into_thin(a.clone())) } } -impl Drop for ThinArc { +impl Drop for ThinArc { #[inline] fn drop(&mut self) { let _ = Arc::from_thin(ThinArc { ptr: self.ptr }); } } -impl Arc> { +impl Arc> { /// Converts an Arc into a ThinArc. This consumes the Arc, so the refcount /// is not modified. #[inline] @@ -708,14 +708,14 @@ impl Arc> { } } -impl PartialEq for ThinArc { +impl PartialEq for ThinArc { #[inline] fn eq(&self, other: &ThinArc) -> bool { ThinArc::with_arc(self, |a| ThinArc::with_arc(other, |b| *a == *b)) } } -impl Eq for ThinArc {} +impl Eq for ThinArc {} /// An Arc, except it holds a pointer to the T instead of to the /// entire ArcInner. @@ -734,14 +734,14 @@ impl Eq for ThinArc {} /// but we can also convert it to a "regular" Arc by removing the offset #[derive(Eq)] #[repr(C)] -pub struct RawOffsetArc { +pub struct RawOffsetArc { ptr: NonNull, } -unsafe impl Send for RawOffsetArc {} -unsafe impl Sync for RawOffsetArc {} +unsafe impl Send for RawOffsetArc {} +unsafe impl Sync for RawOffsetArc {} -impl Deref for RawOffsetArc { +impl Deref for RawOffsetArc { type Target = T; fn deref(&self) -> &Self::Target { @@ -749,20 +749,20 @@ impl Deref for RawOffsetArc { } } -impl Clone for RawOffsetArc { +impl Clone for RawOffsetArc { #[inline] fn clone(&self) -> Self { Arc::into_raw_offset(self.clone_arc()) } } -impl Drop for RawOffsetArc { +impl Drop for RawOffsetArc { fn drop(&mut self) { let _ = Arc::from_raw_offset(RawOffsetArc { ptr: self.ptr.clone() }); } } -impl fmt::Debug for RawOffsetArc { +impl fmt::Debug for RawOffsetArc { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fmt::Debug::fmt(&**self, f) } @@ -778,7 +778,7 @@ impl PartialEq for RawOffsetArc { } } -impl RawOffsetArc { +impl RawOffsetArc { /// Temporarily converts |self| into a bonafide Arc and exposes it to the /// provided callback. The refcount is not modified. #[inline] @@ -837,7 +837,7 @@ impl RawOffsetArc { } } -impl Arc { +impl Arc { /// Converts an Arc into a RawOffsetArc. This consumes the Arc, so the refcount /// is not modified. #[inline] @@ -903,7 +903,6 @@ impl<'a, T> ArcBorrow<'a, T> { pub fn with_arc(&self, f: F) -> U where F: FnOnce(&Arc) -> U, - T: 'static, { // Synthesize transient Arc, which never touches the refcount. let transient = unsafe { NoDrop::new(Arc::from_raw(self.0)) }; diff --git a/src/lib.rs b/src/lib.rs index 6644868..d74450f 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -59,7 +59,6 @@ pub mod syntax; #[cfg(feature = "serde1")] mod serde_impls; -mod syntax_text; #[allow(missing_docs)] mod utility_types; @@ -76,8 +75,7 @@ pub use text_size::{TextLen, TextRange, TextSize}; pub use crate::{ arc::Arc, green::{Checkpoint, Children, GreenNode, GreenNodeBuilder, GreenToken, NodeCache, SyntaxKind}, - syntax::{SyntaxElement, SyntaxElementChildren, SyntaxElementRef, SyntaxNode, SyntaxNodeChildren, SyntaxToken}, - syntax_text::SyntaxText, + syntax::*, utility_types::{Direction, NodeOrToken, TokenAtOffset, WalkEvent}, }; diff --git a/src/serde_impls.rs b/src/serde_impls.rs index a30373f..93b5441 100644 --- a/src/serde_impls.rs +++ b/src/serde_impls.rs @@ -2,7 +2,7 @@ use crate::{ interning::{IntoResolver, Resolver}, - GreenNodeBuilder, Language, NodeOrToken, SyntaxKind, SyntaxNode, WalkEvent, + GreenNodeBuilder, Language, NodeOrToken, ResolvedNode, SyntaxKind, SyntaxNode, WalkEvent, }; use serde::{ de::{Error, SeqAccess, Visitor}, @@ -11,9 +11,6 @@ use serde::{ }; use std::{collections::VecDeque, fmt, marker::PhantomData}; -type Rodeo = lasso::Rodeo; -type RodeoResolver = lasso::RodeoResolver; - /// Expands to the first expression, if there's /// no expression following, otherwise return the second expression. /// @@ -86,21 +83,21 @@ enum Event<'text> { } /// Make a `SyntaxNode` serializable but without serializing the data. -pub(crate) struct SerializeWithResolver<'node, 'resolver, L: Language, D: 'static, RN: 'static, R> { - pub(crate) node: &'node SyntaxNode, +pub(crate) struct SerializeWithResolver<'node, 'resolver, L: Language, D: 'static, R: ?Sized> { + pub(crate) node: &'node SyntaxNode, pub(crate) resolver: &'resolver R, } /// Make a `SyntaxNode` serializable which will include the data for serialization. -pub(crate) struct SerializeWithData<'node, 'resolver, L: Language, D: 'static, RN: 'static, R> { - pub(crate) node: &'node SyntaxNode, +pub(crate) struct SerializeWithData<'node, 'resolver, L: Language, D: 'static, R: ?Sized> { + pub(crate) node: &'node SyntaxNode, pub(crate) resolver: &'resolver R, } -impl Serialize for SerializeWithData<'_, '_, L, D, RN, R> +impl Serialize for SerializeWithData<'_, '_, L, D, R> where L: Language, - R: Resolver, + R: Resolver + ?Sized, D: Serialize, { fn serialize(&self, serializer: S) -> Result @@ -112,10 +109,10 @@ where } } -impl Serialize for SerializeWithResolver<'_, '_, L, D, RN, R> +impl Serialize for SerializeWithResolver<'_, '_, L, D, R> where L: Language, - R: Resolver, + R: Resolver + ?Sized, { fn serialize(&self, serializer: S) -> Result where @@ -125,11 +122,10 @@ where } } -impl Serialize for SyntaxNode +impl Serialize for ResolvedNode where L: Language, D: Serialize, - R: Resolver, { fn serialize(&self, serializer: S) -> Result where @@ -143,7 +139,7 @@ where } } -impl<'de, L, D> Deserialize<'de> for SyntaxNode +impl<'de, L, D> Deserialize<'de> for ResolvedNode where L: Language, D: Deserialize<'de>, @@ -163,7 +159,7 @@ where DE: serde::Deserializer<'de>, { struct EventVisitor { - _marker: PhantomData>, + _marker: PhantomData ResolvedNode>, } impl<'de, L, D> Visitor<'de> for EventVisitor @@ -171,7 +167,7 @@ where L: Language, D: Deserialize<'de>, { - type Value = (SyntaxNode, VecDeque); + type Value = (ResolvedNode, VecDeque); fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { formatter.write_str("a list of tree events") @@ -196,12 +192,12 @@ where } let (tree, resolver) = builder.finish(); - let tree = SyntaxNode::new_root_with_resolver(tree, resolver.unwrap().into_resolver()); + let tree = ResolvedNode::new_root_with_resolver(tree, resolver.unwrap().into_resolver()); Ok((tree, data_indices)) } } - struct ProcessedEvents(SyntaxNode, VecDeque); + struct ProcessedEvents(ResolvedNode, VecDeque); impl<'de, L, D> Deserialize<'de> for ProcessedEvents where L: Language, diff --git a/src/syntax/element.rs b/src/syntax/element.rs new file mode 100644 index 0000000..682de17 --- /dev/null +++ b/src/syntax/element.rs @@ -0,0 +1,254 @@ +use std::sync::atomic::AtomicU32; + +use lasso::Resolver; +use text_size::{TextRange, TextSize}; + +use super::*; +use crate::{green::GreenElementRef, Language, NodeOrToken, SyntaxKind, TokenAtOffset}; + +/// An element of the tree, can be either a node or a token. +pub type SyntaxElement = NodeOrToken, SyntaxToken>; + +impl From> for SyntaxElement { + fn from(node: SyntaxNode) -> SyntaxElement { + NodeOrToken::Node(node) + } +} + +impl From> for SyntaxElement { + fn from(token: SyntaxToken) -> SyntaxElement { + NodeOrToken::Token(token) + } +} + +impl SyntaxElement { + #[allow(missing_docs)] + pub fn display(&self, resolver: &impl Resolver) -> String { + match self { + NodeOrToken::Node(it) => it.display(resolver), + NodeOrToken::Token(it) => it.display(resolver), + } + } +} + +/// A reference to an element of the tree, can be either a reference to a node or one to a token. +pub type SyntaxElementRef<'a, L, D = ()> = NodeOrToken<&'a SyntaxNode, &'a SyntaxToken>; + +impl<'a, L: Language, D> From<&'a SyntaxNode> for SyntaxElementRef<'a, L, D> { + fn from(node: &'a SyntaxNode) -> Self { + NodeOrToken::Node(node) + } +} + +impl<'a, L: Language, D> From<&'a SyntaxToken> for SyntaxElementRef<'a, L, D> { + fn from(token: &'a SyntaxToken) -> Self { + NodeOrToken::Token(token) + } +} + +impl<'a, L: Language, D> From<&'a SyntaxElement> for SyntaxElementRef<'a, L, D> { + fn from(element: &'a SyntaxElement) -> Self { + match element { + NodeOrToken::Node(it) => Self::Node(it), + NodeOrToken::Token(it) => Self::Token(it), + } + } +} + +impl<'a, L: Language, D> SyntaxElementRef<'a, L, D> { + #[allow(missing_docs)] + pub fn display(&self, resolver: &impl Resolver) -> String { + match self { + NodeOrToken::Node(it) => it.display(resolver), + NodeOrToken::Token(it) => it.display(resolver), + } + } +} + +impl SyntaxElement { + pub(super) fn new( + element: GreenElementRef<'_>, + parent: &SyntaxNode, + index: u32, + offset: TextSize, + ref_count: *mut AtomicU32, + ) -> SyntaxElement { + match element { + NodeOrToken::Node(node) => SyntaxNode::new_child(node, parent, index as u32, offset, ref_count).into(), + NodeOrToken::Token(_) => SyntaxToken::new(parent, index as u32, offset).into(), + } + } + + /// The range this element covers in the source text, in bytes. + #[inline] + pub fn text_range(&self) -> TextRange { + match self { + NodeOrToken::Node(it) => it.text_range(), + NodeOrToken::Token(it) => it.text_range(), + } + } + + /// The internal representation of the kind of this element. + #[inline] + pub fn syntax_kind(&self) -> SyntaxKind { + match self { + NodeOrToken::Node(it) => it.syntax_kind(), + NodeOrToken::Token(it) => it.syntax_kind(), + } + } + + /// The kind of this element in terms of your language. + #[inline] + pub fn kind(&self) -> L::Kind { + match self { + NodeOrToken::Node(it) => it.kind(), + NodeOrToken::Token(it) => it.kind(), + } + } + + /// The parent node of this element, except if this element is the root. + #[inline] + pub fn parent(&self) -> Option<&SyntaxNode> { + match self { + NodeOrToken::Node(it) => it.parent(), + NodeOrToken::Token(it) => Some(it.parent()), + } + } + + /// Returns an iterator along the chain of parents of this node. + #[inline] + pub fn ancestors(&self) -> impl Iterator> { + match self { + NodeOrToken::Node(it) => it.ancestors(), + NodeOrToken::Token(it) => it.parent().ancestors(), + } + } + + /// Return the leftmost token in the subtree of this element. + #[inline] + pub fn first_token(&self) -> Option<&SyntaxToken> { + match self { + NodeOrToken::Node(it) => it.first_token(), + NodeOrToken::Token(it) => Some(it), + } + } + + /// Return the rightmost token in the subtree of this element. + #[inline] + pub fn last_token(&self) -> Option<&SyntaxToken> { + match self { + NodeOrToken::Node(it) => it.last_token(), + NodeOrToken::Token(it) => Some(it), + } + } + + /// The tree element to the right of this one, i.e. the next child of this element's parent after this element. + #[inline] + pub fn next_sibling_or_token(&self) -> Option> { + match self { + NodeOrToken::Node(it) => it.next_sibling_or_token(), + NodeOrToken::Token(it) => it.next_sibling_or_token(), + } + } + + /// The tree element to the left of this one, i.e. the previous child of this element's parent after this element. + #[inline] + pub fn prev_sibling_or_token(&self) -> Option> { + match self { + NodeOrToken::Node(it) => it.prev_sibling_or_token(), + NodeOrToken::Token(it) => it.prev_sibling_or_token(), + } + } +} + +impl<'a, L: Language, D> SyntaxElementRef<'a, L, D> { + /// The range this element covers in the source text, in bytes. + #[inline] + pub fn text_range(&self) -> TextRange { + match self { + NodeOrToken::Node(it) => it.text_range(), + NodeOrToken::Token(it) => it.text_range(), + } + } + + /// The internal representation of the kind of this element. + #[inline] + pub fn syntax_kind(&self) -> SyntaxKind { + match self { + NodeOrToken::Node(it) => it.syntax_kind(), + NodeOrToken::Token(it) => it.syntax_kind(), + } + } + + /// The kind of this element in terms of your language. + #[inline] + pub fn kind(&self) -> L::Kind { + match self { + NodeOrToken::Node(it) => it.kind(), + NodeOrToken::Token(it) => it.kind(), + } + } + + /// The parent node of this element, except if this element is the root. + #[inline] + pub fn parent(&self) -> Option<&'a SyntaxNode> { + match self { + NodeOrToken::Node(it) => it.parent(), + NodeOrToken::Token(it) => Some(it.parent()), + } + } + + /// Returns an iterator along the chain of parents of this node. + #[inline] + pub fn ancestors(&self) -> impl Iterator> { + match self { + NodeOrToken::Node(it) => it.ancestors(), + NodeOrToken::Token(it) => it.parent().ancestors(), + } + } + + /// Return the leftmost token in the subtree of this element. + #[inline] + pub fn first_token(&self) -> Option<&'a SyntaxToken> { + match self { + NodeOrToken::Node(it) => it.first_token(), + NodeOrToken::Token(it) => Some(it), + } + } + + /// Return the rightmost token in the subtree of this element. + #[inline] + pub fn last_token(&self) -> Option<&'a SyntaxToken> { + match self { + NodeOrToken::Node(it) => it.last_token(), + NodeOrToken::Token(it) => Some(it), + } + } + + /// The tree element to the right of this one, i.e. the next child of this element's parent after this element. + #[inline] + pub fn next_sibling_or_token(&self) -> Option> { + match self { + NodeOrToken::Node(it) => it.next_sibling_or_token(), + NodeOrToken::Token(it) => it.next_sibling_or_token(), + } + } + + /// The tree element to the left of this one, i.e. the previous child of this element's parent after this element. + #[inline] + pub fn prev_sibling_or_token(&self) -> Option> { + match self { + NodeOrToken::Node(it) => it.prev_sibling_or_token(), + NodeOrToken::Token(it) => it.prev_sibling_or_token(), + } + } + + #[inline] + pub(super) fn token_at_offset(&self, offset: TextSize) -> TokenAtOffset> { + assert!(self.text_range().start() <= offset && offset <= self.text_range().end()); + match self { + NodeOrToken::Token(token) => TokenAtOffset::Single((*token).clone()), + NodeOrToken::Node(node) => node.token_at_offset(offset), + } + } +} diff --git a/src/syntax/mod.rs b/src/syntax/mod.rs new file mode 100644 index 0000000..08fbbd4 --- /dev/null +++ b/src/syntax/mod.rs @@ -0,0 +1,33 @@ +//! Implementation of the outer, "red" tree. +//! +//! Inner [`SyntaxNode`]s represent only structural information, but can hold additional, user-defined data. +//! Leaf [`SyntaxToken`]s represent individual pieces of source text. +//! Use [`SyntaxNode::new_root`] and [`SyntaxNode::new_root_with_resolver`] to construct a syntax +//! tree on top of a green tree. + +mod element; +pub use element::{SyntaxElement, SyntaxElementRef}; +mod node; +pub use node::{SyntaxElementChildren, SyntaxNode, SyntaxNodeChildren}; +mod token; +pub use token::SyntaxToken; +mod resolved; +pub use resolved::{ResolvedElement, ResolvedElementRef, ResolvedNode, ResolvedToken}; + +mod text; +pub use text::SyntaxText; + +// A note on `#[inline]` usage in this module: +// In `rowan`, there are two layers of `SyntaxXY`s: the `cursor` layer and the `api` layer. +// The `cursor` layer handles all of the actual methods on the tree, while the `api` layer is +// generic over the `Language` of the tree and otherwise forwards its implementation to the `cursor` +// layer. +// Here, we have unified the `cursor` and the `api` layer into the `syntax` layer. +// This means that all of our types here are generic over a `Language`, including the +// implementations which, in `rowan`, are part of the `cursor` layer. +// Very apparently, this makes the compiler less willing to inline. Almost every "regular use" +// method in this file has some kind of `#[inline]` annotation to counteract that. This is _NOT_ +// just for fun, not inlining decreases tree traversal speed by approx. 50% at the time of writing +// this. +// +// - DQ 01/2021 diff --git a/src/syntax.rs b/src/syntax/node.rs similarity index 60% rename from src/syntax.rs rename to src/syntax/node.rs index 65c7c22..18c8d54 100644 --- a/src/syntax.rs +++ b/src/syntax/node.rs @@ -1,60 +1,42 @@ -//! Implementation of the outer, "red" tree. -//! -//! Inner [`SyntaxNode`]s represent only structural information, but can hold additional, user-defined data. -//! Leaf [`SyntaxToken`]s represent individual pieces of source text. -//! Use [`SyntaxNode::new_root`] and [`SyntaxNode::new_root_with_resolver`] to construct a syntax -//! tree on top of a green tree. - -use std::{ - cell::UnsafeCell, - fmt::{self, Write}, - hash::{Hash, Hasher}, - iter, ptr, - sync::atomic::{AtomicU32, Ordering}, -}; - +use super::*; #[cfg(feature = "serde1")] use crate::serde_impls::{SerializeWithData, SerializeWithResolver}; -use parking_lot::RwLock; - use crate::{ arc::Arc, green::{GreenElementRef, SyntaxKind}, interning::Resolver, - Children, Direction, GreenNode, GreenToken, Language, NodeOrToken, SyntaxText, TextRange, TextSize, TokenAtOffset, - WalkEvent, + *, +}; +use parking_lot::RwLock; +use std::{ + cell::UnsafeCell, + fmt::Write, + hash::{Hash, Hasher}, + iter, ptr, + sync::{ + atomic::{AtomicU32, Ordering}, + Arc as StdArc, + }, }; - -// A note on `#[inline]` usage in this file: -// In `rowan`, there are two layers of `SyntaxXY`s: the `cursor` layer and the `api` layer. -// The `cursor` layer handles all of the actual methods on the tree, while the `api` layer is -// generic over the `Language` of the tree and otherwise forwards its implementation to the `cursor` -// layer. -// Here, we have unified the `cursor` and the `api` layer into the `syntax` layer. -// This means that all of our types here are generic over a `Language`, including the -// implementations which, in `rowan`, are part of the `cursor` layer. -// Very apparently, this makes the compiler less willing to inline. Almost every "regular use" -// method in this file has some kind of `#[inline]` annotation to counteract that. This is _NOT_ -// just for fun, not inlining decreases tree traversal speed by approx. 50% at the time of writing -// this. -// -// - DQ 01/2021 /// Inner syntax tree node. /// Syntax nodes can be shared between threads. /// Every syntax tree is reference counted as a whole and nodes are pointer-sized, so copying /// individual nodes is relatively cheap. #[repr(transparent)] -pub struct SyntaxNode { - data: *mut NodeData, +pub struct SyntaxNode { + data: *mut NodeData, } -unsafe impl Send for SyntaxNode {} -unsafe impl Sync for SyntaxNode {} +unsafe impl Send for SyntaxNode {} +unsafe impl Sync for SyntaxNode {} -impl SyntaxNode { +impl SyntaxNode { #[allow(missing_docs)] - pub fn debug(&self, resolver: &impl Resolver, recursive: bool) -> String { + pub fn debug(&self, resolver: &R, recursive: bool) -> String + where + R: Resolver + ?Sized, + { // NOTE: `fmt::Write` methods on `String` never fail let mut res = String::new(); if recursive { @@ -87,7 +69,10 @@ impl SyntaxNode { } #[allow(missing_docs)] - pub fn display(&self, resolver: &impl Resolver) -> String { + pub fn display(&self, resolver: &R) -> String + where + R: Resolver + ?Sized, + { let mut res = String::new(); self.preorder_with_tokens() .filter_map(|event| match event { @@ -98,9 +83,32 @@ impl SyntaxNode { .unwrap(); res } + + /// If there is a resolver associated with this tree, returns it. + pub fn resolver(&self) -> Option<&StdArc> { + match &self.root().data().kind { + Kind::Root(_, resolver) => resolver.as_ref(), + _ => unreachable!(), + } + } + + /// Turns this node into a [`ResolvedNode`], but only if there is a resolver associated with this tree. + #[inline] + pub fn try_resolved(&self) -> Option<&ResolvedNode> { + // safety: we only coerce if `resolver` exists + self.resolver().map(|_| unsafe { ResolvedNode::coerce_ref(self) }) + } + + /// Turns this node into a [`ResolvedNode`]. + /// # Panics + /// If there is no resolver associated with this tree. + #[inline] + pub fn resolved(&self) -> &ResolvedNode { + self.try_resolved().expect("tried to resolve a node without resolver") + } } -impl Clone for SyntaxNode { +impl Clone for SyntaxNode { fn clone(&self) -> Self { // safety:: the ref count is only dropped when there are no more external references (see below) // since we are currently cloning such a reference, there is still at least one @@ -110,7 +118,7 @@ impl Clone for SyntaxNode { } } -impl Drop for SyntaxNode { +impl Drop for SyntaxNode { fn drop(&mut self) { // safety:: the ref count is only dropped when there are no more external references (see below) // and all nodes but the root have been dropped. @@ -135,9 +143,9 @@ impl Drop for SyntaxNode { } } -impl SyntaxNode { +impl SyntaxNode { #[inline] - fn data(&self) -> &NodeData { + fn data(&self) -> &NodeData { unsafe { &*self.data } } @@ -146,16 +154,19 @@ impl SyntaxNode { /// references). #[inline] #[allow(clippy::mut_from_ref)] - unsafe fn data_mut(&self) -> &mut NodeData { + unsafe fn data_mut(&self) -> &mut NodeData { &mut *self.data } #[inline] - fn clone_uncounted(&self) -> Self { + pub(super) fn clone_uncounted(&self) -> Self { Self { data: self.data } } - fn root(&self) -> &SyntaxNode { + /// The root of the tree this node belongs to. + /// + /// If this node is the root, returns `self`. + pub fn root(&self) -> &SyntaxNode { let mut current = self; while let Some(parent) = current.parent() { current = parent; @@ -194,149 +205,31 @@ impl SyntaxNode { } // Identity semantics for hash & eq -impl PartialEq for SyntaxNode { - fn eq(&self, other: &SyntaxNode) -> bool { +impl PartialEq for SyntaxNode { + fn eq(&self, other: &SyntaxNode) -> bool { self.data == other.data } } -impl Eq for SyntaxNode {} +impl Eq for SyntaxNode {} -impl Hash for SyntaxNode { +impl Hash for SyntaxNode { fn hash(&self, state: &mut H) { ptr::hash(self.data, state); } } -/// Syntax tree token. -pub struct SyntaxToken { - parent: SyntaxNode, - index: u32, - offset: TextSize, -} - -impl Clone for SyntaxToken { - fn clone(&self) -> Self { - Self { - parent: self.parent.clone(), - index: self.index, - offset: self.offset, - } - } -} - -impl Hash for SyntaxToken { - fn hash(&self, state: &mut H) { - self.parent.hash(state); - self.index.hash(state); - self.offset.hash(state); - } -} - -impl PartialEq for SyntaxToken { - fn eq(&self, other: &SyntaxToken) -> bool { - self.parent == other.parent && self.index == other.index && self.offset == other.offset - } -} - -impl Eq for SyntaxToken {} - -impl SyntaxToken { - #[allow(missing_docs)] - pub fn debug(&self, resolver: &impl Resolver) -> String { - let mut res = String::new(); - write!(res, "{:?}@{:?}", self.kind(), self.text_range()).unwrap(); - if self.resolve_text(resolver).len() < 25 { - write!(res, " {:?}", self.resolve_text(resolver)).unwrap(); - return res; - } - let text = self.resolve_text(resolver); - for idx in 21..25 { - if text.is_char_boundary(idx) { - let text = format!("{} ...", &text[..idx]); - write!(res, " {:?}", text).unwrap(); - return res; - } - } - unreachable!() - } - - #[allow(missing_docs)] - pub fn display(&self, resolver: &impl Resolver) -> String { - self.resolve_text(resolver).to_string() - } -} - -/// An element of the tree, can be either a node or a token. -pub type SyntaxElement = NodeOrToken, SyntaxToken>; - -impl From> for SyntaxElement { - fn from(node: SyntaxNode) -> SyntaxElement { - NodeOrToken::Node(node) - } -} - -impl From> for SyntaxElement { - fn from(token: SyntaxToken) -> SyntaxElement { - NodeOrToken::Token(token) - } -} - -impl SyntaxElement { - #[allow(missing_docs)] - pub fn display(&self, resolver: &impl Resolver) -> String { - match self { - NodeOrToken::Node(it) => it.display(resolver), - NodeOrToken::Token(it) => it.display(resolver), - } - } -} - -/// A reference to an element of the tree, can be either a reference to a node or one to a token. -pub type SyntaxElementRef<'a, L, D = (), R = ()> = NodeOrToken<&'a SyntaxNode, &'a SyntaxToken>; - -impl<'a, L: Language, D, R> From<&'a SyntaxNode> for SyntaxElementRef<'a, L, D, R> { - fn from(node: &'a SyntaxNode) -> Self { - NodeOrToken::Node(node) - } -} - -impl<'a, L: Language, D, R> From<&'a SyntaxToken> for SyntaxElementRef<'a, L, D, R> { - fn from(token: &'a SyntaxToken) -> Self { - NodeOrToken::Token(token) - } -} - -impl<'a, L: Language, D, R> From<&'a SyntaxElement> for SyntaxElementRef<'a, L, D, R> { - fn from(element: &'a SyntaxElement) -> Self { - match element { - NodeOrToken::Node(it) => Self::Node(it), - NodeOrToken::Token(it) => Self::Token(it), - } - } -} - -impl<'a, L: Language, D, R> SyntaxElementRef<'a, L, D, R> { - #[allow(missing_docs)] - pub fn display(&self, resolver: &impl Resolver) -> String { - match self { - NodeOrToken::Node(it) => it.display(resolver), - NodeOrToken::Token(it) => it.display(resolver), - } - } -} - -enum Kind { - Root(GreenNode, Arc), +enum Kind { + Root(GreenNode, Option>), Child { - parent: SyntaxNode, + parent: SyntaxNode, index: u32, offset: TextSize, }, } -impl Kind { - fn as_child(&self) -> Option<(&SyntaxNode, u32, TextSize)> { +impl Kind { + fn as_child(&self) -> Option<(&SyntaxNode, u32, TextSize)> { match self { Kind::Child { parent, index, offset } => Some((parent, *index, *offset)), _ => None, @@ -344,18 +237,18 @@ impl Kind { } } -struct NodeData { - kind: Kind, +pub(super) struct NodeData { + kind: Kind, green: ptr::NonNull, ref_count: *mut AtomicU32, data: RwLock>>, - children: Vec>>>, + children: Vec>>>, child_locks: Vec>, } -impl NodeData { +impl NodeData { fn new( - kind: Kind, + kind: Kind, green: ptr::NonNull, ref_count: *mut AtomicU32, n_children: usize, @@ -375,7 +268,7 @@ impl NodeData { } } -impl SyntaxNode { +impl SyntaxNode { /// Build a new syntax tree on top of a green tree. /// /// # Example @@ -409,21 +302,20 @@ impl SyntaxNode { /// let root: SyntaxNode = SyntaxNode::new_root(green); /// assert_eq!(root.kind(), SyntaxKind::ROOT); /// ``` + #[inline] pub fn new_root(green: GreenNode) -> Self { - Self::make_new_root(green, ()) + Self::make_new_root(green, None) } -} -impl SyntaxNode { - fn new(data: *mut NodeData) -> Self { + pub(super) fn new(data: *mut NodeData) -> Self { Self { data } } - fn make_new_root(green: GreenNode, resolver: R) -> Self { + fn make_new_root(green: GreenNode, resolver: Option>) -> Self { let ref_count = Box::new(AtomicU32::new(1)); let n_children = green.children().count(); let data = NodeData::new( - Kind::Root(green, Arc::new(resolver)), + Kind::Root(green, resolver), ptr::NonNull::dangling(), Box::into_raw(ref_count), n_children, @@ -443,7 +335,7 @@ impl SyntaxNode { /// /// # Example /// ``` - /// # use cstree::{*, interning::TokenInterner}; + /// # use cstree::*; /// # #[allow(non_camel_case_types)] /// #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] /// #[repr(u16)] @@ -468,25 +360,32 @@ impl SyntaxNode { /// } /// # const ROOT: cstree::SyntaxKind = cstree::SyntaxKind(0); /// # const TOKEN: cstree::SyntaxKind = cstree::SyntaxKind(1); - /// # type SyntaxNode = cstree::SyntaxNode; + /// # type SyntaxNode = cstree::SyntaxNode; /// let mut builder = GreenNodeBuilder::new(); /// builder.start_node(ROOT); /// builder.token(TOKEN, "content"); /// builder.finish_node(); /// let (green, resolver) = builder.finish(); - /// let root: SyntaxNode = SyntaxNode::new_root_with_resolver(green, resolver.unwrap()); + /// let root: ResolvedNode = SyntaxNode::new_root_with_resolver(green, resolver.unwrap()); /// assert_eq!(root.text(), "content"); /// ``` - pub fn new_root_with_resolver(green: GreenNode, resolver: R) -> Self - where - R: Resolver, - { - Self::make_new_root(green, resolver) + #[inline] + pub fn new_root_with_resolver(green: GreenNode, resolver: impl Resolver + 'static) -> ResolvedNode { + let ptr: StdArc = StdArc::new(resolver); + ResolvedNode { + syntax: SyntaxNode::make_new_root(green, Some(ptr)), + } } // Technically, unsafe, but private so that's OK. // Safety: `green` must be a descendent of `parent.green` - fn new_child(green: &GreenNode, parent: &Self, index: u32, offset: TextSize, ref_count: *mut AtomicU32) -> Self { + pub(super) fn new_child( + green: &GreenNode, + parent: &Self, + index: u32, + offset: TextSize, + ref_count: *mut AtomicU32, + ) -> Self { let n_children = green.children().count(); let data = NodeData::new( Kind::Child { @@ -534,16 +433,8 @@ impl SyntaxNode { *ptr = None; } - /// If there is a resolver associated with this tree, returns it. - pub fn resolver(&self) -> &Arc { - match &self.root().data().kind { - Kind::Root(_, resolver) => resolver, - _ => unreachable!(), - } - } - #[inline] - fn read(&self, index: usize) -> Option> { + fn read(&self, index: usize) -> Option> { // safety: children are pre-allocated and indices are determined internally let _read = unsafe { self.data().child_locks.get_unchecked(index).read() }; // safety: mutable accesses to the slot only occur below and have to take the lock @@ -551,7 +442,7 @@ impl SyntaxNode { slot.as_ref().map(|elem| elem.into()) } - fn try_write(&self, index: usize, elem: SyntaxElement) { + fn try_write(&self, index: usize, elem: SyntaxElement) { // safety: children are pre-allocated and indices are determined internally let _write = unsafe { self.data().child_locks.get_unchecked(index).write() }; // safety: we are the only writer and there are no readers as evidenced by the write lock @@ -586,7 +477,7 @@ impl SyntaxNode { // by one. // safety: as above - let ref_count = unsafe { &*token.parent.data().ref_count }; + let ref_count = unsafe { &*token.parent().data().ref_count }; ref_count.fetch_add(1, Ordering::AcqRel); drop(token); } @@ -595,7 +486,7 @@ impl SyntaxNode { } #[inline(always)] - fn get_or_add_node(&self, node: &GreenNode, index: usize, offset: TextSize) -> SyntaxElementRef<'_, L, D, R> { + fn get_or_add_node(&self, node: &GreenNode, index: usize, offset: TextSize) -> SyntaxElementRef<'_, L, D> { if let Some(elem) = self.read(index) { debug_assert_eq!(elem.text_range().start(), offset); return elem; @@ -613,7 +504,7 @@ impl SyntaxNode { element: GreenElementRef<'_>, index: usize, offset: TextSize, - ) -> SyntaxElementRef<'_, L, D, R> { + ) -> SyntaxElementRef<'_, L, D> { if let Some(elem) = self.read(index) { debug_assert_eq!(elem.text_range().start(), offset); return elem; @@ -673,7 +564,7 @@ impl SyntaxNode { /// by this node, i.e. the combined text of all token leafs of the subtree originating in this /// node. #[inline] - pub fn resolve_text<'n, 'i, I>(&'n self, resolver: &'i I) -> SyntaxText<'n, 'i, I, L, D, R> + pub fn resolve_text<'n, 'i, I>(&'n self, resolver: &'i I) -> SyntaxText<'n, 'i, I, L, D> where I: Resolver + ?Sized, { @@ -688,7 +579,7 @@ impl SyntaxNode { /// The parent node of this node, except if this node is the root. #[inline] - pub fn parent(&self) -> Option<&SyntaxNode> { + pub fn parent(&self) -> Option<&SyntaxNode> { match &self.data().kind { Kind::Root(_, _) => None, Kind::Child { parent, .. } => Some(parent), @@ -711,7 +602,7 @@ impl SyntaxNode { /// Returns an iterator along the chain of parents of this node. #[inline] - pub fn ancestors(&self) -> impl Iterator> { + pub fn ancestors(&self) -> impl Iterator> { iter::successors(Some(self), |&node| node.parent()) } @@ -719,13 +610,13 @@ impl SyntaxNode { /// /// If you want to also consider leafs, see [`children_with_tokens`](SyntaxNode::children_with_tokens). #[inline] - pub fn children(&self) -> SyntaxNodeChildren<'_, L, D, R> { + pub fn children(&self) -> SyntaxNodeChildren<'_, L, D> { SyntaxNodeChildren::new(self) } /// Returns an iterator over child elements of this node, including tokens. #[inline] - pub fn children_with_tokens(&self) -> SyntaxElementChildren<'_, L, D, R> { + pub fn children_with_tokens(&self) -> SyntaxElementChildren<'_, L, D> { SyntaxElementChildren::new(self) } @@ -734,14 +625,14 @@ impl SyntaxNode { /// If you want to also consider leafs, see [`first_child_or_token`](SyntaxNode::first_child_or_token). #[inline] #[allow(clippy::map_clone)] - pub fn first_child(&self) -> Option<&SyntaxNode> { + pub fn first_child(&self) -> Option<&SyntaxNode> { let (node, (index, offset)) = filter_nodes(self.green().children_from(0, self.text_range().start())).next()?; self.get_or_add_node(node, index, offset).as_node().map(|node| *node) } /// The first child element of this node, if any, including tokens. #[inline] - pub fn first_child_or_token(&self) -> Option> { + pub fn first_child_or_token(&self) -> Option> { let (element, (index, offset)) = self.green().children_from(0, self.text_range().start()).next()?; Some(self.get_or_add_element(element, index, offset)) } @@ -751,7 +642,7 @@ impl SyntaxNode { /// If you want to also consider leafs, see [`last_child_or_token`](SyntaxNode::last_child_or_token). #[inline] #[allow(clippy::map_clone)] - pub fn last_child(&self) -> Option<&SyntaxNode> { + pub fn last_child(&self) -> Option<&SyntaxNode> { let (node, (index, offset)) = filter_nodes( self.green() .children_to(self.green().children().len(), self.text_range().end()), @@ -762,7 +653,7 @@ impl SyntaxNode { /// The last child element of this node, if any, including tokens. #[inline] - pub fn last_child_or_token(&self) -> Option> { + pub fn last_child_or_token(&self) -> Option> { let (element, (index, offset)) = self .green() .children_to(self.green().children().len(), self.text_range().end()) @@ -776,7 +667,7 @@ impl SyntaxNode { /// /// If you want to also consider leafs, see [`next_child_or_token_after`](SyntaxNode::next_child_or_token_after). #[inline] - pub fn next_child_after(&self, n: usize, offset: TextSize) -> Option<&SyntaxNode> { + pub fn next_child_after(&self, n: usize, offset: TextSize) -> Option<&SyntaxNode> { let (node, (index, offset)) = filter_nodes(self.green().children_from(n + 1, offset)).next()?; self.get_or_add_node(node, index, offset).as_node().copied() } @@ -784,7 +675,7 @@ impl SyntaxNode { /// The first child element of this node starting at the (n + 1)-st, if any. /// If this method returns `Some`, the contained node is the (n + 1)-st child of this node. #[inline] - pub fn next_child_or_token_after(&self, n: usize, offset: TextSize) -> Option> { + pub fn next_child_or_token_after(&self, n: usize, offset: TextSize) -> Option> { let (element, (index, offset)) = self.green().children_from(n + 1, offset).next()?; Some(self.get_or_add_element(element, index, offset)) } @@ -795,7 +686,7 @@ impl SyntaxNode { /// /// If you want to also consider leafs, see [`prev_child_or_token_before`](SyntaxNode::prev_child_or_token_before). #[inline] - pub fn prev_child_before(&self, n: usize, offset: TextSize) -> Option<&SyntaxNode> { + pub fn prev_child_before(&self, n: usize, offset: TextSize) -> Option<&SyntaxNode> { let (node, (index, offset)) = filter_nodes(self.green().children_to(n, offset)).next()?; self.get_or_add_node(node, index, offset).as_node().copied() } @@ -803,7 +694,7 @@ impl SyntaxNode { /// The last child node of this node up to the nth, if any. /// If this method returns `Some`, the contained node is the (n - 1)-st child. #[inline] - pub fn prev_child_or_token_before(&self, n: usize, offset: TextSize) -> Option> { + pub fn prev_child_or_token_before(&self, n: usize, offset: TextSize) -> Option> { let (element, (index, offset)) = self.green().children_to(n, offset).next()?; Some(self.get_or_add_element(element, index, offset)) } @@ -812,7 +703,7 @@ impl SyntaxNode { /// /// If you want to also consider leafs, see [`next_sibling_or_token`](SyntaxNode::next_sibling_or_token). #[inline] - pub fn next_sibling(&self) -> Option<&SyntaxNode> { + pub fn next_sibling(&self) -> Option<&SyntaxNode> { let (parent, index, _) = self.data().kind.as_child()?; let (node, (index, offset)) = filter_nodes( @@ -826,7 +717,7 @@ impl SyntaxNode { /// The tree element to the right of this one, i.e. the next child of this node's parent after this node. #[inline] - pub fn next_sibling_or_token(&self) -> Option> { + pub fn next_sibling_or_token(&self) -> Option> { let (parent, index, _) = self.data().kind.as_child()?; let (element, (index, offset)) = parent @@ -840,7 +731,7 @@ impl SyntaxNode { /// /// If you want to also consider leafs, see [`prev_sibling_or_token`](SyntaxNode::prev_sibling_or_token). #[inline] - pub fn prev_sibling(&self) -> Option<&SyntaxNode> { + pub fn prev_sibling(&self) -> Option<&SyntaxNode> { let (parent, index, _) = self.data().kind.as_child()?; let (node, (index, offset)) = @@ -850,7 +741,7 @@ impl SyntaxNode { /// The tree element to the left of this one, i.e. the previous child of this node's parent before this node. #[inline] - pub fn prev_sibling_or_token(&self) -> Option> { + pub fn prev_sibling_or_token(&self) -> Option> { let (parent, index, _) = self.data().kind.as_child()?; let (element, (index, offset)) = parent @@ -862,13 +753,13 @@ impl SyntaxNode { /// Return the leftmost token in the subtree of this node #[inline] - pub fn first_token(&self) -> Option<&SyntaxToken> { + pub fn first_token(&self) -> Option<&SyntaxToken> { self.first_child_or_token()?.first_token() } /// Return the rightmost token in the subtree of this node #[inline] - pub fn last_token(&self) -> Option<&SyntaxToken> { + pub fn last_token(&self) -> Option<&SyntaxToken> { self.last_child_or_token()?.last_token() } @@ -878,7 +769,7 @@ impl SyntaxNode { /// /// If you want to also consider leafs, see [`siblings_with_tokens`](SyntaxNode::siblings_with_tokens). #[inline] - pub fn siblings(&self, direction: Direction) -> impl Iterator> { + pub fn siblings(&self, direction: Direction) -> impl Iterator> { iter::successors(Some(self), move |node| match direction { Direction::Next => node.next_sibling(), Direction::Prev => node.prev_sibling(), @@ -889,8 +780,8 @@ impl SyntaxNode { /// node's parent's children from this node on to the left or the right. /// The first item in the iterator will always be this node. #[inline] - pub fn siblings_with_tokens(&self, direction: Direction) -> impl Iterator> { - let me: SyntaxElementRef<'_, L, D, R> = self.into(); + pub fn siblings_with_tokens(&self, direction: Direction) -> impl Iterator> { + let me: SyntaxElementRef<'_, L, D> = self.into(); iter::successors(Some(me), move |el| match direction { Direction::Next => el.next_sibling_or_token(), Direction::Prev => el.prev_sibling_or_token(), @@ -901,7 +792,7 @@ impl SyntaxNode { /// /// If you want to also consider leafs, see [`descendants_with_tokens`](SyntaxNode::descendants_with_tokens). #[inline] - pub fn descendants(&self) -> impl Iterator> { + pub fn descendants(&self) -> impl Iterator> { self.preorder().filter_map(|event| match event { WalkEvent::Enter(node) => Some(node), WalkEvent::Leave(_) => None, @@ -910,7 +801,7 @@ impl SyntaxNode { /// Returns an iterator over all elements in the subtree starting at this node, including this node. #[inline] - pub fn descendants_with_tokens(&self) -> impl Iterator> { + pub fn descendants_with_tokens(&self) -> impl Iterator> { self.preorder_with_tokens().filter_map(|event| match event { WalkEvent::Enter(it) => Some(it), WalkEvent::Leave(_) => None, @@ -920,7 +811,7 @@ impl SyntaxNode { /// Traverse the subtree rooted at the current node (including the current /// node) in preorder, excluding tokens. #[inline(always)] - pub fn preorder(&self) -> impl Iterator>> { + pub fn preorder(&self) -> impl Iterator>> { iter::successors(Some(WalkEvent::Enter(self)), move |pos| { let next = match pos { WalkEvent::Enter(node) => match node.first_child() { @@ -944,7 +835,7 @@ impl SyntaxNode { /// Traverse the subtree rooted at the current node (including the current /// node) in preorder, including tokens. #[inline(always)] - pub fn preorder_with_tokens(&self) -> impl Iterator>> { + pub fn preorder_with_tokens(&self) -> impl Iterator>> { let me = self.into(); iter::successors(Some(WalkEvent::Enter(me)), move |pos| { let next = match pos { @@ -971,7 +862,7 @@ impl SyntaxNode { /// Find a token in the subtree corresponding to this node, which covers the offset. /// Precondition: offset must be withing node's range. - pub fn token_at_offset(&self, offset: TextSize) -> TokenAtOffset> { + pub fn token_at_offset(&self, offset: TextSize) -> TokenAtOffset> { // TODO: this could be faster if we first drill-down to node, and only // then switch to token search. We should also replace explicit // recursion with a loop. @@ -1009,8 +900,8 @@ impl SyntaxNode { /// contains the range. If the range is empty and is contained in two leaf /// nodes, either one can be returned. Precondition: range must be contained /// withing the current node - pub fn covering_element(&self, range: TextRange) -> SyntaxElementRef<'_, L, D, R> { - let mut res: SyntaxElementRef<'_, L, D, R> = self.into(); + pub fn covering_element(&self, range: TextRange) -> SyntaxElementRef<'_, L, D> { + let mut res: SyntaxElementRef<'_, L, D> = self.into(); loop { assert!( res.text_range().contains_range(range), @@ -1034,37 +925,11 @@ impl SyntaxNode { } } -impl SyntaxNode -where - R: Resolver, -{ - /// Uses the resolver associated with this tree to return an efficient representation of all - /// source text covered by this node, i.e. the combined text of all token leafs of the subtree - /// originating in this node. - #[inline] - pub fn text(&self) -> SyntaxText<'_, '_, R, L, D, R> { - SyntaxText::new(self, self.resolver().as_ref()) - } -} - #[cfg(feature = "serde1")] -impl SyntaxNode +impl SyntaxNode where L: Language, { - /// Return an anonymous object that can be used to serialize this node, - /// including the data for each node. - pub fn as_serialize_with_data(&self) -> impl serde::Serialize + '_ - where - R: Resolver, - D: serde::Serialize, - { - SerializeWithData { - node: self, - resolver: self.resolver().as_ref(), - } - } - /// Return an anonymous object that can be used to serialize this node, /// including the data and by using an external resolver. pub fn as_serialize_with_data_with_resolver<'node>( @@ -1087,373 +952,6 @@ where } } -impl fmt::Debug for SyntaxNode -where - R: Resolver, -{ - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "{}", Self::debug(self, self.resolver().as_ref(), f.alternate())) - } -} - -impl fmt::Display for SyntaxNode -where - R: Resolver, -{ - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "{}", Self::display(self, self.resolver().as_ref())) - } -} - -impl SyntaxToken { - fn new(parent: &SyntaxNode, index: u32, offset: TextSize) -> SyntaxToken { - Self { - parent: parent.clone_uncounted(), - index, - offset, - } - } - - /// Returns a green tree, equal to the green tree this token - /// belongs two, except with this token substitute. The complexity - /// of operation is proportional to the depth of the tree - pub fn replace_with(&self, replacement: GreenToken) -> GreenNode { - assert_eq!(self.syntax_kind(), replacement.kind()); - let mut replacement = Some(replacement); - let parent = self.parent(); - let me = self.index; - - let children = parent.green().children().enumerate().map(|(i, child)| { - if i as u32 == me { - replacement.take().unwrap().into() - } else { - child.cloned() - } - }); - let new_parent = GreenNode::new(parent.syntax_kind(), children); - parent.replace_with(new_parent) - } - - /// The internal representation of the kind of this token. - #[inline] - pub fn syntax_kind(&self) -> SyntaxKind { - self.green().kind() - } - - /// The kind of this token in terms of your language. - #[inline] - pub fn kind(&self) -> L::Kind { - L::kind_from_raw(self.syntax_kind()) - } - - /// The range this token covers in the source text, in bytes. - #[inline] - pub fn text_range(&self) -> TextRange { - TextRange::at(self.offset, self.green().text_len()) - } - - /// Uses the provided resolver to return the source text of this token. - #[inline] - pub fn resolve_text<'i, I>(&self, resolver: &'i I) -> &'i str - where - I: Resolver + ?Sized, - { - self.green().text(resolver) - } - - /// Returns the unterlying green tree token of this token. - pub fn green(&self) -> &GreenToken { - self.parent - .green() - .children() - .nth(self.index as usize) - .unwrap() - .as_token() - .unwrap() - } - - /// The parent node of this token. - #[inline] - pub fn parent(&self) -> &SyntaxNode { - &self.parent - } - - /// Returns an iterator along the chain of parents of this token. - #[inline] - pub fn ancestors(&self) -> impl Iterator> { - self.parent().ancestors() - } - - /// The tree element to the right of this one, i.e. the next child of this token's parent after this token. - #[inline] - pub fn next_sibling_or_token(&self) -> Option> { - self.parent() - .next_child_or_token_after(self.index as usize, self.text_range().end()) - } - - /// The tree element to the left of this one, i.e. the previous child of this token's parent after this token. - #[inline] - pub fn prev_sibling_or_token(&self) -> Option> { - self.parent() - .prev_child_or_token_before(self.index as usize, self.text_range().start()) - } - - /// Returns an iterator over all siblings of this token in the given `direction`, i.e. all of this - /// token's parent's children from this token on to the left or the right. - /// The first item in the iterator will always be this token. - #[inline] - pub fn siblings_with_tokens(&self, direction: Direction) -> impl Iterator> { - let me: SyntaxElementRef<'_, L, D, R> = self.into(); - iter::successors(Some(me), move |el| match direction { - Direction::Next => el.next_sibling_or_token(), - Direction::Prev => el.prev_sibling_or_token(), - }) - } - - /// Returns the next token in the tree. - /// This is not necessary a direct sibling of this token, but will always be further right in the tree. - pub fn next_token(&self) -> Option<&SyntaxToken> { - match self.next_sibling_or_token() { - Some(element) => element.first_token(), - None => self - .parent() - .ancestors() - .find_map(|it| it.next_sibling_or_token()) - .and_then(|element| element.first_token()), - } - } - - /// Returns the previous token in the tree. - /// This is not necessary a direct sibling of this token, but will always be further left in the tree. - pub fn prev_token(&self) -> Option<&SyntaxToken> { - match self.prev_sibling_or_token() { - Some(element) => element.last_token(), - None => self - .parent() - .ancestors() - .find_map(|it| it.prev_sibling_or_token()) - .and_then(|element| element.last_token()), - } - } -} - -impl SyntaxToken -where - R: Resolver, -{ - /// Uses the resolver associated with this tree to return the source text of this token. - #[inline] - pub fn text(&self) -> &str { - self.green().text(self.parent().resolver().as_ref()) - } -} - -impl fmt::Debug for SyntaxToken -where - R: Resolver, -{ - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "{}", Self::debug(self, self.parent().resolver().as_ref())) - } -} - -impl fmt::Display for SyntaxToken -where - R: Resolver, -{ - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "{}", Self::display(self, self.parent().resolver().as_ref())) - } -} - -impl SyntaxElement { - fn new( - element: GreenElementRef<'_>, - parent: &SyntaxNode, - index: u32, - offset: TextSize, - ref_count: *mut AtomicU32, - ) -> SyntaxElement { - match element { - NodeOrToken::Node(node) => SyntaxNode::new_child(node, parent, index as u32, offset, ref_count).into(), - NodeOrToken::Token(_) => SyntaxToken::new(parent, index as u32, offset).into(), - } - } - - /// The range this element covers in the source text, in bytes. - #[inline] - pub fn text_range(&self) -> TextRange { - match self { - NodeOrToken::Node(it) => it.text_range(), - NodeOrToken::Token(it) => it.text_range(), - } - } - - /// The internal representation of the kind of this element. - #[inline] - pub fn syntax_kind(&self) -> SyntaxKind { - match self { - NodeOrToken::Node(it) => it.syntax_kind(), - NodeOrToken::Token(it) => it.syntax_kind(), - } - } - - /// The kind of this element in terms of your language. - #[inline] - pub fn kind(&self) -> L::Kind { - match self { - NodeOrToken::Node(it) => it.kind(), - NodeOrToken::Token(it) => it.kind(), - } - } - - /// The parent node of this element, except if this element is the root. - #[inline] - pub fn parent(&self) -> Option<&SyntaxNode> { - match self { - NodeOrToken::Node(it) => it.parent(), - NodeOrToken::Token(it) => Some(it.parent()), - } - } - - /// Returns an iterator along the chain of parents of this node. - #[inline] - pub fn ancestors(&self) -> impl Iterator> { - match self { - NodeOrToken::Node(it) => it.ancestors(), - NodeOrToken::Token(it) => it.parent().ancestors(), - } - } - - /// Return the leftmost token in the subtree of this element. - #[inline] - pub fn first_token(&self) -> Option<&SyntaxToken> { - match self { - NodeOrToken::Node(it) => it.first_token(), - NodeOrToken::Token(it) => Some(it), - } - } - - /// Return the rightmost token in the subtree of this element. - #[inline] - pub fn last_token(&self) -> Option<&SyntaxToken> { - match self { - NodeOrToken::Node(it) => it.last_token(), - NodeOrToken::Token(it) => Some(it), - } - } - - /// The tree element to the right of this one, i.e. the next child of this element's parent after this element. - #[inline] - pub fn next_sibling_or_token(&self) -> Option> { - match self { - NodeOrToken::Node(it) => it.next_sibling_or_token(), - NodeOrToken::Token(it) => it.next_sibling_or_token(), - } - } - - /// The tree element to the left of this one, i.e. the previous child of this element's parent after this element. - #[inline] - pub fn prev_sibling_or_token(&self) -> Option> { - match self { - NodeOrToken::Node(it) => it.prev_sibling_or_token(), - NodeOrToken::Token(it) => it.prev_sibling_or_token(), - } - } -} - -impl<'a, L: Language, D, R> SyntaxElementRef<'a, L, D, R> { - /// The range this element covers in the source text, in bytes. - #[inline] - pub fn text_range(&self) -> TextRange { - match self { - NodeOrToken::Node(it) => it.text_range(), - NodeOrToken::Token(it) => it.text_range(), - } - } - - /// The internal representation of the kind of this element. - #[inline] - pub fn syntax_kind(&self) -> SyntaxKind { - match self { - NodeOrToken::Node(it) => it.syntax_kind(), - NodeOrToken::Token(it) => it.syntax_kind(), - } - } - - /// The kind of this element in terms of your language. - #[inline] - pub fn kind(&self) -> L::Kind { - match self { - NodeOrToken::Node(it) => it.kind(), - NodeOrToken::Token(it) => it.kind(), - } - } - - /// The parent node of this element, except if this element is the root. - #[inline] - pub fn parent(&self) -> Option<&'a SyntaxNode> { - match self { - NodeOrToken::Node(it) => it.parent(), - NodeOrToken::Token(it) => Some(it.parent()), - } - } - - /// Returns an iterator along the chain of parents of this node. - #[inline] - pub fn ancestors(&self) -> impl Iterator> { - match self { - NodeOrToken::Node(it) => it.ancestors(), - NodeOrToken::Token(it) => it.parent().ancestors(), - } - } - - /// Return the leftmost token in the subtree of this element. - #[inline] - pub fn first_token(&self) -> Option<&'a SyntaxToken> { - match self { - NodeOrToken::Node(it) => it.first_token(), - NodeOrToken::Token(it) => Some(it), - } - } - - /// Return the rightmost token in the subtree of this element. - #[inline] - pub fn last_token(&self) -> Option<&'a SyntaxToken> { - match self { - NodeOrToken::Node(it) => it.last_token(), - NodeOrToken::Token(it) => Some(it), - } - } - - /// The tree element to the right of this one, i.e. the next child of this element's parent after this element. - #[inline] - pub fn next_sibling_or_token(&self) -> Option> { - match self { - NodeOrToken::Node(it) => it.next_sibling_or_token(), - NodeOrToken::Token(it) => it.next_sibling_or_token(), - } - } - - /// The tree element to the left of this one, i.e. the previous child of this element's parent after this element. - #[inline] - pub fn prev_sibling_or_token(&self) -> Option> { - match self { - NodeOrToken::Node(it) => it.prev_sibling_or_token(), - NodeOrToken::Token(it) => it.prev_sibling_or_token(), - } - } - - #[inline] - fn token_at_offset(&self, offset: TextSize) -> TokenAtOffset> { - assert!(self.text_range().start() <= offset && offset <= self.text_range().end()); - match self { - NodeOrToken::Token(token) => TokenAtOffset::Single((*token).clone()), - NodeOrToken::Node(node) => node.token_at_offset(offset), - } - } -} - #[derive(Clone, Debug)] struct Iter<'n> { green: Children<'n>, @@ -1462,7 +960,7 @@ struct Iter<'n> { } impl<'n> Iter<'n> { - fn new(parent: &'n SyntaxNode) -> Self { + fn new(parent: &'n SyntaxNode) -> Self { let offset = parent.text_range().start(); let green: Children<'_> = parent.green().children(); Iter { @@ -1486,14 +984,14 @@ impl<'n> Iter<'n> { /// An iterator over the child nodes of a [`SyntaxNode`]. #[derive(Clone)] -pub struct SyntaxNodeChildren<'n, L: Language, D: 'static = (), R: 'static = ()> { +pub struct SyntaxNodeChildren<'n, L: Language, D: 'static = ()> { inner: Iter<'n>, - parent: &'n SyntaxNode, + parent: &'n SyntaxNode, } -impl<'n, L: Language, D, R> SyntaxNodeChildren<'n, L, D, R> { +impl<'n, L: Language, D> SyntaxNodeChildren<'n, L, D> { #[inline] - fn new(parent: &'n SyntaxNode) -> Self { + fn new(parent: &'n SyntaxNode) -> Self { Self { inner: Iter::new(parent), parent, @@ -1501,8 +999,8 @@ impl<'n, L: Language, D, R> SyntaxNodeChildren<'n, L, D, R> { } } -impl<'n, L: Language, D, R> Iterator for SyntaxNodeChildren<'n, L, D, R> { - type Item = &'n SyntaxNode; +impl<'n, L: Language, D> Iterator for SyntaxNodeChildren<'n, L, D> { + type Item = &'n SyntaxNode; #[inline(always)] fn next(&mut self) -> Option { @@ -1517,14 +1015,14 @@ impl<'n, L: Language, D, R> Iterator for SyntaxNodeChildren<'n, L, D, R> { /// An iterator over the children of a [`SyntaxNode`]. #[derive(Clone)] -pub struct SyntaxElementChildren<'n, L: Language, D: 'static = (), R: 'static = ()> { +pub struct SyntaxElementChildren<'n, L: Language, D: 'static = ()> { inner: Iter<'n>, - parent: &'n SyntaxNode, + parent: &'n SyntaxNode, } -impl<'n, L: Language, D, R> SyntaxElementChildren<'n, L, D, R> { +impl<'n, L: Language, D> SyntaxElementChildren<'n, L, D> { #[inline] - fn new(parent: &'n SyntaxNode) -> Self { + fn new(parent: &'n SyntaxNode) -> Self { Self { inner: Iter::new(parent), parent, @@ -1532,8 +1030,8 @@ impl<'n, L: Language, D, R> SyntaxElementChildren<'n, L, D, R> { } } -impl<'n, L: Language, D, R> Iterator for SyntaxElementChildren<'n, L, D, R> { - type Item = SyntaxElementRef<'n, L, D, R>; +impl<'n, L: Language, D> Iterator for SyntaxElementChildren<'n, L, D> { + type Item = SyntaxElementRef<'n, L, D>; #[inline(always)] fn next(&mut self) -> Option { diff --git a/src/syntax/resolved.rs b/src/syntax/resolved.rs new file mode 100644 index 0000000..548f0e3 --- /dev/null +++ b/src/syntax/resolved.rs @@ -0,0 +1,735 @@ +//! Nodes, tokens, elements and their references which are guaranteed to belong to trees with +//! associated [`Resolver`]s(lasso::Resolver). +//! +//! This means they can implement `Debug` and `Display` and be (de-)serializable by default. + +use std::{ + fmt, + ops::{Deref, DerefMut}, + sync::Arc as StdArc, +}; + +use lasso::Resolver; +use text_size::{TextRange, TextSize}; + +use crate::{ + Direction, GreenNode, Language, NodeOrToken, SyntaxElementRef, SyntaxKind, SyntaxNode, SyntaxText, SyntaxToken, + TokenAtOffset, WalkEvent, +}; + +/// Syntax tree node that is guaranteed to belong to a tree that contains an associated +/// [`Resolver`](lasso::Resolver). +/// # See also +/// [`SyntaxNode`] +/// [`SyntaxNode::new_root_with_resolver`] +#[derive(Clone)] +#[repr(transparent)] +pub struct ResolvedNode { + pub(super) syntax: SyntaxNode, +} + +impl ResolvedNode { + /// # Safety: + /// `syntax` must belong to a tree that contains an associated inline resolver. + pub(super) unsafe fn coerce_ref(syntax: &SyntaxNode) -> &Self { + &*(syntax as *const _ as *const Self) + } + + /// Returns this node as a [`SyntaxNode`]. + pub fn syntax(&self) -> &SyntaxNode { + &self.syntax + } +} + +impl Deref for ResolvedNode { + type Target = SyntaxNode; + + fn deref(&self) -> &Self::Target { + &self.syntax + } +} + +impl DerefMut for ResolvedNode { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.syntax + } +} + +/// Syntax tree token that is guaranteed to belong to a tree that contains an associated +/// [`Resolver`](lasso::Resolver). +/// # See also +/// [`SyntaxToken`]] +#[repr(transparent)] +pub struct ResolvedToken { + syntax: SyntaxToken, +} + +impl ResolvedToken { + /// # Safety: + /// `syntax` must belong to a tree that contains an associated inline resolver. + pub(super) unsafe fn coerce_ref(syntax: &SyntaxToken) -> &Self { + &*(syntax as *const _ as *const Self) + } + + /// Returns this token as a [`SyntaxToken`]. + pub fn syntax(&self) -> &SyntaxToken { + &self.syntax + } +} + +impl Deref for ResolvedToken { + type Target = SyntaxToken; + + fn deref(&self) -> &Self::Target { + &self.syntax + } +} + +impl DerefMut for ResolvedToken { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.syntax + } +} + +/// An element of the tree that is guaranteed to belong to a tree that contains an associated +/// [`Resolver`](lasso::Resolver), can be either a node or a token. +/// # See also +/// [`SyntaxElement`](crate::SyntaxElement) +pub type ResolvedElement = NodeOrToken, ResolvedToken>; + +impl From> for ResolvedElement { + fn from(node: ResolvedNode) -> ResolvedElement { + NodeOrToken::Node(node) + } +} + +impl From> for ResolvedElement { + fn from(token: ResolvedToken) -> ResolvedElement { + NodeOrToken::Token(token) + } +} + +impl ResolvedElement { + #[allow(missing_docs)] + pub fn display(&self, resolver: &impl Resolver) -> String { + match self { + NodeOrToken::Node(it) => it.display(resolver), + NodeOrToken::Token(it) => it.display(resolver), + } + } +} + +/// A reference to an element of the tree that is guaranteed to belong to a tree that contains an +/// associated [`Resolver`](lasso::Resolver), can be either a reference to a node or one to a token. +/// # See also +/// [`SyntaxElementRef`] +pub type ResolvedElementRef<'a, L, D = ()> = NodeOrToken<&'a ResolvedNode, &'a ResolvedToken>; + +impl<'a, L: Language, D> ResolvedElementRef<'a, L, D> { + /// # Safety: + /// `syntax` must belong to a tree that contains an associated inline resolver. + pub(super) unsafe fn coerce_ref(syntax: SyntaxElementRef<'a, L, D>) -> Self { + match syntax { + NodeOrToken::Node(node) => Self::Node(ResolvedNode::coerce_ref(node)), + NodeOrToken::Token(token) => Self::Token(ResolvedToken::coerce_ref(token)), + } + } +} + +impl<'a, L: Language, D> From<&'a ResolvedNode> for ResolvedElementRef<'a, L, D> { + fn from(node: &'a ResolvedNode) -> Self { + NodeOrToken::Node(node) + } +} + +impl<'a, L: Language, D> From<&'a ResolvedToken> for ResolvedElementRef<'a, L, D> { + fn from(token: &'a ResolvedToken) -> Self { + NodeOrToken::Token(token) + } +} + +impl<'a, L: Language, D> From<&'a ResolvedElement> for ResolvedElementRef<'a, L, D> { + fn from(element: &'a ResolvedElement) -> Self { + match element { + NodeOrToken::Node(it) => Self::Node(it), + NodeOrToken::Token(it) => Self::Token(it), + } + } +} + +impl ResolvedNode { + /// Uses the resolver associated with this tree to return an efficient representation of all + /// source text covered by this node, i.e. the combined text of all token leafs of the subtree + /// originating in this node. + #[inline] + pub fn text(&self) -> SyntaxText<'_, '_, dyn Resolver, L, D> { + SyntaxText::new(self, &**self.resolver()) + } +} + +impl fmt::Debug for ResolvedNode { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.debug(&**self.resolver(), f.alternate())) + } +} + +impl fmt::Display for ResolvedNode { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.display(&**self.resolver())) + } +} + +impl ResolvedToken { + /// Uses the resolver associated with this tree to return the source text of this token. + #[inline] + pub fn text(&self) -> &str { + self.green().text(&**self.parent().resolver()) + } +} + +impl fmt::Debug for ResolvedToken { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.debug(&**self.parent().resolver())) + } +} + +impl fmt::Display for ResolvedToken { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.display(&**self.parent().resolver())) + } +} + +#[cfg(feature = "serde1")] +impl ResolvedNode +where + L: Language, +{ + /// Return an anonymous object that can be used to serialize this node, + /// including the data for each node. + pub fn as_serialize_with_data(&self) -> impl serde::Serialize + '_ + where + D: serde::Serialize, + { + crate::serde_impls::SerializeWithData { + node: self, + resolver: self.resolver().as_ref(), + } + } +} + +/* It follows: wrapping all _traversal_ methods so they return `ResolvedXY`s */ +macro_rules! forward { + // safety: if we're starting from a `ResolvedXY`, then the tree must have a resolver + ($e:expr) => { + ($e).map(|e| unsafe { Self::coerce_ref(e) }) + }; +} + +macro_rules! forward_as_elem { + // safety: if we're starting from a `ResolvedXY`, then the tree must have a resolver + ($e:expr) => { + ($e).map(|e| unsafe { ResolvedElementRef::coerce_ref(e) }) + }; +} + +macro_rules! forward_token { + // safety: if we're starting from a `ResolvedXY`, then the tree must have a resolver + ($e:expr) => { + ($e).map(|e| unsafe { ResolvedToken::coerce_ref(e) }) + }; +} + +macro_rules! forward_node { + // safety: if we're starting from a `ResolvedXY`, then the tree must have a resolver + ($e:expr) => { + ($e).map(|e| unsafe { ResolvedNode::coerce_ref(e) }) + }; +} + +impl ResolvedNode { + /// If there is a resolver associated with this tree, returns it. + pub fn resolver(&self) -> &StdArc { + self.syntax.resolver().unwrap() + } + + /// See [`SyntaxNode::new_root_with_resolver`]. + #[inline] + pub fn new_root_with_resolver(green: GreenNode, resolver: impl Resolver + 'static) -> Self { + SyntaxNode::new_root_with_resolver(green, resolver) + } + + /// Always returns `Some(self)`. + /// + /// This method mostly exists to allow the convenience of being agnostic over [`SyntaxNode`] vs [`ResolvedNode`]. + #[inline] + pub fn try_resolved(&self) -> Option<&ResolvedNode> { + Some(self) + } + + /// Always returns `self`. + /// + /// This method mostly exists to allow the convenience of being agnostic over [`SyntaxNode`] vs [`ResolvedNode`]. + #[inline] + pub fn resolved(&self) -> &ResolvedNode { + self + } + + /// The root of the tree this node belongs to. + /// + /// If this node is the root, returns `self`. + #[inline] + pub fn root(&self) -> &SyntaxNode { + unsafe { Self::coerce_ref(self.syntax.root()) } + } + + /// The parent node of this node, except if this node is the root. + #[inline] + pub fn parent(&self) -> Option<&Self> { + forward!(self.syntax.parent()) + } + + /// Returns an iterator along the chain of parents of this node. + #[inline] + pub fn ancestors(&self) -> impl Iterator { + forward!(self.syntax.ancestors()) + } + + /// Returns an iterator over all nodes that are children of this node. + /// + /// If you want to also consider leafs, see [`children_with_tokens`](ResolvedNode::children_with_tokens). + #[inline] + pub fn children(&self) -> impl Iterator { + forward!(self.syntax.children()) + } + + /// Returns an iterator over child elements of this node, including tokens. + #[inline] + pub fn children_with_tokens(&self) -> impl Iterator> { + forward_as_elem!(self.syntax.children_with_tokens()) + } + + /// The first child node of this node, if any. + /// + /// If you want to also consider leafs, see [`first_child_or_token`](ResolvedNode::first_child_or_token). + #[inline] + pub fn first_child(&self) -> Option<&ResolvedNode> { + forward!(self.syntax.first_child()) + } + + /// The first child element of this node, if any, including tokens. + #[inline] + pub fn first_child_or_token(&self) -> Option> { + forward_as_elem!(self.syntax.first_child_or_token()) + } + + /// The last child node of this node, if any. + /// + /// If you want to also consider leafs, see [`last_child_or_token`](ResolvedNode::last_child_or_token). + #[inline] + pub fn last_child(&self) -> Option<&ResolvedNode> { + forward!(self.syntax.last_child()) + } + + /// The last child element of this node, if any, including tokens. + #[inline] + pub fn last_child_or_token(&self) -> Option> { + forward_as_elem!(self.syntax.last_child_or_token()) + } + + /// The first child node of this node starting at the (n + 1)-st, if any. + /// Note that even if this method returns `Some`, the contained node may not actually be the (n + + /// 1)-st child, but the next child from there that is a node. + /// + /// If you want to also consider leafs, see [`next_child_or_token_after`](ResolvedNode::next_child_or_token_after). + #[inline] + pub fn next_child_after(&self, n: usize, offset: TextSize) -> Option<&ResolvedNode> { + forward!(self.syntax.next_child_after(n, offset)) + } + + /// The first child element of this node starting at the (n + 1)-st, if any. + /// If this method returns `Some`, the contained node is the (n + 1)-st child of this node. + #[inline] + pub fn next_child_or_token_after(&self, n: usize, offset: TextSize) -> Option> { + forward_as_elem!(self.syntax.next_child_or_token_after(n, offset)) + } + + /// The last child node of this node up to the nth, if any. + /// Note that even if this method returns `Some`, the contained node may not actually be the (n - + /// 1)-st child, but the previous child from there that is a node. + /// + /// If you want to also consider leafs, see + /// [`prev_child_or_token_before`](ResolvedNode::prev_child_or_token_before). + #[inline] + pub fn prev_child_before(&self, n: usize, offset: TextSize) -> Option<&ResolvedNode> { + forward!(self.syntax.prev_child_before(n, offset)) + } + + /// The last child node of this node up to the nth, if any. + /// If this method returns `Some`, the contained node is the (n - 1)-st child. + #[inline] + pub fn prev_child_or_token_before(&self, n: usize, offset: TextSize) -> Option> { + forward_as_elem!(self.syntax.prev_child_or_token_before(n, offset)) + } + + /// The node to the right of this one, i.e. the next child node (!) of this node's parent after this node. + /// + /// If you want to also consider leafs, see [`next_sibling_or_token`](ResolvedNode::next_sibling_or_token). + #[inline] + pub fn next_sibling(&self) -> Option<&ResolvedNode> { + forward!(self.syntax.next_sibling()) + } + + /// The tree element to the right of this one, i.e. the next child of this node's parent after this node. + #[inline] + pub fn next_sibling_or_token(&self) -> Option> { + forward_as_elem!(self.syntax.next_sibling_or_token()) + } + + /// The node to the left of this one, i.e. the previous child node (!) of this node's parent before this node. + /// + /// If you want to also consider leafs, see [`prev_sibling_or_token`](ResolvedNode::prev_sibling_or_token). + #[inline] + pub fn prev_sibling(&self) -> Option<&ResolvedNode> { + forward!(self.syntax.prev_sibling()) + } + + /// The tree element to the left of this one, i.e. the previous child of this node's parent before this node. + #[inline] + pub fn prev_sibling_or_token(&self) -> Option> { + forward_as_elem!(self.syntax.prev_sibling_or_token()) + } + + /// Return the leftmost token in the subtree of this node + #[inline] + pub fn first_token(&self) -> Option<&ResolvedToken> { + forward_token!(self.syntax.first_token()) + } + + /// Return the rightmost token in the subtree of this node + #[inline] + pub fn last_token(&self) -> Option<&ResolvedToken> { + forward_token!(self.syntax.last_token()) + } + + /// Returns an iterator over all sibling nodes of this node in the given `direction`, i.e. all of + /// this node's parent's child nodes (!) from this node on to the left or the right. The first + /// item in the iterator will always be this node. + /// + /// If you want to also consider leafs, see [`siblings_with_tokens`](ResolvedNode::siblings_with_tokens). + #[inline] + pub fn siblings(&self, direction: Direction) -> impl Iterator> { + forward!(self.syntax.siblings(direction)) + } + + /// Returns an iterator over all siblings of this node in the given `direction`, i.e. all of this + /// node's parent's children from this node on to the left or the right. + /// The first item in the iterator will always be this node. + #[inline] + pub fn siblings_with_tokens(&self, direction: Direction) -> impl Iterator> { + forward_as_elem!(self.syntax.siblings_with_tokens(direction)) + } + + /// Returns an iterator over all nodes (!) in the subtree starting at this node, including this node. + /// + /// If you want to also consider leafs, see [`descendants_with_tokens`](ResolvedNode::descendants_with_tokens). + #[inline] + pub fn descendants(&self) -> impl Iterator> { + forward!(self.syntax.descendants()) + } + + /// Returns an iterator over all elements in the subtree starting at this node, including this node. + #[inline] + pub fn descendants_with_tokens(&self) -> impl Iterator> { + forward_as_elem!(self.syntax.descendants_with_tokens()) + } + + /// Traverse the subtree rooted at the current node (including the current + /// node) in preorder, excluding tokens. + #[inline(always)] + pub fn preorder(&self) -> impl Iterator>> { + self.syntax + .preorder() + .map(|event| event.map(|node| unsafe { Self::coerce_ref(node) })) + } + + /// Traverse the subtree rooted at the current node (including the current + /// node) in preorder, including tokens. + #[inline(always)] + pub fn preorder_with_tokens(&self) -> impl Iterator>> { + self.syntax + .preorder_with_tokens() + .map(|event| event.map(|elem| unsafe { ResolvedElementRef::coerce_ref(elem) })) + } + + /// Find a token in the subtree corresponding to this node, which covers the offset. + /// Precondition: offset must be withing node's range. + pub fn token_at_offset(&self, offset: TextSize) -> TokenAtOffset> { + self.syntax + .token_at_offset(offset) + .map(|token| ResolvedToken { syntax: token }) + } + + /// Return the deepest node or token in the current subtree that fully + /// contains the range. If the range is empty and is contained in two leaf + /// nodes, either one can be returned. Precondition: range must be contained + /// withing the current node + pub fn covering_element(&self, range: TextRange) -> ResolvedElementRef<'_, L, D> { + unsafe { ResolvedElementRef::coerce_ref(self.syntax.covering_element(range)) } + } +} + +impl ResolvedToken { + /// Always returns `Some(self)`. + /// + /// This method mostly exists to allow the convenience of being agnostic over [`SyntaxToken`] vs [`ResolvedToken`]. + #[inline] + pub fn try_resolved(&self) -> Option<&ResolvedToken> { + Some(self) + } + + /// Always returns `self`. + /// + /// This method mostly exists to allow the convenience of being agnostic over [`SyntaxToken`] vs [`ResolvedToken`]. + #[inline] + pub fn resolved(&self) -> &ResolvedToken { + self + } + + /// The parent node of this token. + #[inline] + pub fn parent(&self) -> &ResolvedNode { + unsafe { ResolvedNode::coerce_ref(self.syntax.parent()) } + } + + /// Returns an iterator along the chain of parents of this token. + #[inline] + pub fn ancestors(&self) -> impl Iterator> { + forward_node!(self.syntax.ancestors()) + } + + /// The tree element to the right of this one, i.e. the next child of this token's parent after this token. + #[inline] + pub fn next_sibling_or_token(&self) -> Option> { + forward_as_elem!(self.syntax.next_sibling_or_token()) + } + + /// The tree element to the left of this one, i.e. the previous child of this token's parent after this token. + #[inline] + pub fn prev_sibling_or_token(&self) -> Option> { + forward_as_elem!(self.syntax.prev_sibling_or_token()) + } + + /// Returns an iterator over all siblings of this token in the given `direction`, i.e. all of this + /// token's parent's children from this token on to the left or the right. + /// The first item in the iterator will always be this token. + #[inline] + pub fn siblings_with_tokens(&self, direction: Direction) -> impl Iterator> { + forward_as_elem!(self.syntax.siblings_with_tokens(direction)) + } + + /// Returns the next token in the tree. + /// This is not necessary a direct sibling of this token, but will always be further right in the tree. + pub fn next_token(&self) -> Option<&ResolvedToken> { + forward!(self.syntax.next_token()) + } + + /// Returns the previous token in the tree. + /// This is not necessary a direct sibling of this token, but will always be further left in the tree. + pub fn prev_token(&self) -> Option<&ResolvedToken> { + forward!(self.syntax.prev_token()) + } +} + +impl ResolvedElement { + /// The range this element covers in the source text, in bytes. + #[inline] + pub fn text_range(&self) -> TextRange { + match self { + NodeOrToken::Node(it) => it.text_range(), + NodeOrToken::Token(it) => it.text_range(), + } + } + + /// The internal representation of the kind of this element. + #[inline] + pub fn syntax_kind(&self) -> SyntaxKind { + match self { + NodeOrToken::Node(it) => it.syntax_kind(), + NodeOrToken::Token(it) => it.syntax_kind(), + } + } + + /// The kind of this element in terms of your language. + #[inline] + pub fn kind(&self) -> L::Kind { + match self { + NodeOrToken::Node(it) => it.kind(), + NodeOrToken::Token(it) => it.kind(), + } + } + + /// The parent node of this element, except if this element is the root. + #[inline] + pub fn parent(&self) -> Option<&ResolvedNode> { + match self { + NodeOrToken::Node(it) => it.parent(), + NodeOrToken::Token(it) => Some(it.parent()), + } + } + + /// Returns an iterator along the chain of parents of this node. + #[inline] + pub fn ancestors(&self) -> impl Iterator> { + match self { + NodeOrToken::Node(it) => it.ancestors(), + NodeOrToken::Token(it) => it.parent().ancestors(), + } + } + + /// Return the leftmost token in the subtree of this element. + #[inline] + pub fn first_token(&self) -> Option<&ResolvedToken> { + match self { + NodeOrToken::Node(it) => it.first_token(), + NodeOrToken::Token(it) => Some(it), + } + } + + /// Return the rightmost token in the subtree of this element. + #[inline] + pub fn last_token(&self) -> Option<&ResolvedToken> { + match self { + NodeOrToken::Node(it) => it.last_token(), + NodeOrToken::Token(it) => Some(it), + } + } + + /// The tree element to the right of this one, i.e. the next child of this element's parent after this element. + #[inline] + pub fn next_sibling_or_token(&self) -> Option> { + match self { + NodeOrToken::Node(it) => it.next_sibling_or_token(), + NodeOrToken::Token(it) => it.next_sibling_or_token(), + } + } + + /// The tree element to the left of this one, i.e. the previous child of this element's parent after this element. + #[inline] + pub fn prev_sibling_or_token(&self) -> Option> { + match self { + NodeOrToken::Node(it) => it.prev_sibling_or_token(), + NodeOrToken::Token(it) => it.prev_sibling_or_token(), + } + } +} + +impl<'a, L: Language, D> ResolvedElementRef<'a, L, D> { + /// The range this element covers in the source text, in bytes. + #[inline] + pub fn text_range(&self) -> TextRange { + match self { + NodeOrToken::Node(it) => it.text_range(), + NodeOrToken::Token(it) => it.text_range(), + } + } + + /// The internal representation of the kind of this element. + #[inline] + pub fn syntax_kind(&self) -> SyntaxKind { + match self { + NodeOrToken::Node(it) => it.syntax_kind(), + NodeOrToken::Token(it) => it.syntax_kind(), + } + } + + /// The kind of this element in terms of your language. + #[inline] + pub fn kind(&self) -> L::Kind { + match self { + NodeOrToken::Node(it) => it.kind(), + NodeOrToken::Token(it) => it.kind(), + } + } + + /// The parent node of this element, except if this element is the root. + #[inline] + pub fn parent(&self) -> Option<&'a ResolvedNode> { + match self { + NodeOrToken::Node(it) => it.parent(), + NodeOrToken::Token(it) => Some(it.parent()), + } + } + + /// Returns an iterator along the chain of parents of this node. + #[inline] + pub fn ancestors(&self) -> impl Iterator> { + match self { + NodeOrToken::Node(it) => it.ancestors(), + NodeOrToken::Token(it) => it.parent().ancestors(), + } + } + + /// Return the leftmost token in the subtree of this element. + #[inline] + pub fn first_token(&self) -> Option<&'a ResolvedToken> { + match self { + NodeOrToken::Node(it) => it.first_token(), + NodeOrToken::Token(it) => Some(it), + } + } + + /// Return the rightmost token in the subtree of this element. + #[inline] + pub fn last_token(&self) -> Option<&'a ResolvedToken> { + match self { + NodeOrToken::Node(it) => it.last_token(), + NodeOrToken::Token(it) => Some(it), + } + } + + /// The tree element to the right of this one, i.e. the next child of this element's parent after this element. + #[inline] + pub fn next_sibling_or_token(&self) -> Option> { + match self { + NodeOrToken::Node(it) => it.next_sibling_or_token(), + NodeOrToken::Token(it) => it.next_sibling_or_token(), + } + } + + /// The tree element to the left of this one, i.e. the previous child of this element's parent after this element. + #[inline] + pub fn prev_sibling_or_token(&self) -> Option> { + match self { + NodeOrToken::Node(it) => it.prev_sibling_or_token(), + NodeOrToken::Token(it) => it.prev_sibling_or_token(), + } + } +} + +#[test] +fn assert_send_sync() { + use crate::SyntaxKind; + + #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)] + enum L {} + #[derive(Debug)] + enum Kind { + Var, + } + impl Language for L { + type Kind = Kind; + + fn kind_from_raw(_: SyntaxKind) -> Self::Kind { + Kind::Var + } + + fn kind_to_raw(_: Self::Kind) -> SyntaxKind { + SyntaxKind(0) + } + } + fn f() {} + f::>(); + f::>(); + f::>(); + f::>(); +} diff --git a/src/syntax_text.rs b/src/syntax/text.rs similarity index 88% rename from src/syntax_text.rs rename to src/syntax/text.rs index 79bb2d7..41f7ddf 100644 --- a/src/syntax_text.rs +++ b/src/syntax/text.rs @@ -35,9 +35,10 @@ use crate::{interning::Resolver, Language, SyntaxNode, SyntaxToken, TextRange, T /// # cstree::SyntaxKind(kind as u16) /// # } /// # } -/// # type SyntaxNode = cstree::SyntaxNode>; +/// # type SyntaxNode = cstree::SyntaxNode; +/// # type ResolvedNode = cstree::ResolvedNode; /// # -/// # fn parse_float_literal(s: &str) -> SyntaxNode { +/// # fn parse_float_literal(s: &str) -> ResolvedNode { /// # const LITERAL: cstree::SyntaxKind = cstree::SyntaxKind(0); /// # let mut builder = GreenNodeBuilder::new(); /// # builder.start_node(LITERAL); @@ -57,14 +58,14 @@ use crate::{interning::Resolver, Language, SyntaxNode, SyntaxToken, TextRange, T /// assert_eq!(sub, "748"); /// ``` #[derive(Clone)] -pub struct SyntaxText<'n, 'i, I: ?Sized, L: Language, D: 'static = (), R: 'static = ()> { - node: &'n SyntaxNode, +pub struct SyntaxText<'n, 'i, I: ?Sized, L: Language, D: 'static = ()> { + node: &'n SyntaxNode, range: TextRange, resolver: &'i I, } -impl<'n, 'i, I: Resolver + ?Sized, L: Language, D, R> SyntaxText<'n, 'i, I, L, D, R> { - pub(crate) fn new(node: &'n SyntaxNode, resolver: &'i I) -> Self { +impl<'n, 'i, I: Resolver + ?Sized, L: Language, D> SyntaxText<'n, 'i, I, L, D> { + pub(crate) fn new(node: &'n SyntaxNode, resolver: &'i I) -> Self { let range = node.text_range(); SyntaxText { node, range, resolver } } @@ -190,7 +191,7 @@ impl<'n, 'i, I: Resolver + ?Sized, L: Language, D, R> SyntaxText<'n, 'i, I, L, D } } - fn tokens_with_ranges(&self) -> impl Iterator, TextRange)> { + fn tokens_with_ranges(&self) -> impl Iterator, TextRange)> { let text_range = self.range; self.node .descendants_with_tokens() @@ -210,25 +211,25 @@ fn found(res: Result<(), T>) -> Option { } } -impl fmt::Debug for SyntaxText<'_, '_, I, L, D, R> { +impl fmt::Debug for SyntaxText<'_, '_, I, L, D> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fmt::Debug::fmt(&self.to_string(), f) } } -impl fmt::Display for SyntaxText<'_, '_, I, L, D, R> { +impl fmt::Display for SyntaxText<'_, '_, I, L, D> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { self.try_for_each_chunk(|chunk| fmt::Display::fmt(chunk, f)) } } -impl From> for String { - fn from(text: SyntaxText<'_, '_, I, L, D, R>) -> String { +impl From> for String { + fn from(text: SyntaxText<'_, '_, I, L, D>) -> String { text.to_string() } } -impl PartialEq for SyntaxText<'_, '_, I, L, D, R> { +impl PartialEq for SyntaxText<'_, '_, I, L, D> { fn eq(&self, mut rhs: &str) -> bool { self.try_for_each_chunk(|chunk| { if !rhs.starts_with(chunk) { @@ -242,33 +243,33 @@ impl PartialEq for SyntaxText<'_, } } -impl PartialEq> for str { - fn eq(&self, rhs: &SyntaxText<'_, '_, I, L, D, R>) -> bool { +impl PartialEq> for str { + fn eq(&self, rhs: &SyntaxText<'_, '_, I, L, D>) -> bool { rhs == self } } -impl PartialEq<&'_ str> for SyntaxText<'_, '_, I, L, D, R> { +impl PartialEq<&'_ str> for SyntaxText<'_, '_, I, L, D> { fn eq(&self, rhs: &&str) -> bool { self == *rhs } } -impl PartialEq> for &'_ str { - fn eq(&self, rhs: &SyntaxText<'_, '_, I, L, D, R>) -> bool { +impl PartialEq> for &'_ str { + fn eq(&self, rhs: &SyntaxText<'_, '_, I, L, D>) -> bool { rhs == self } } -impl<'n1, 'i1, 'n2, 'i2, I1, I2, L1, L2, D1, D2, R1, R2> PartialEq> - for SyntaxText<'n1, 'i1, I1, L1, D1, R1> +impl<'n1, 'i1, 'n2, 'i2, I1, I2, L1, L2, D1, D2> PartialEq> + for SyntaxText<'n1, 'i1, I1, L1, D1> where L1: Language, L2: Language, I1: Resolver + ?Sized, I2: Resolver + ?Sized, { - fn eq(&self, other: &SyntaxText<'_, '_, I2, L2, D2, R2>) -> bool { + fn eq(&self, other: &SyntaxText<'_, '_, I2, L2, D2>) -> bool { if self.range.len() != other.range.len() { return false; } @@ -280,21 +281,19 @@ where } } -fn zip_texts<'it1, 'it2, It1, It2, I1, I2, L1, L2, D1, D2, R1, R2>( +fn zip_texts<'it1, 'it2, It1, It2, I1, I2, L1, L2, D1, D2>( xs: &mut It1, ys: &mut It2, resolver_x: &I1, resolver_y: &I2, ) -> Option<()> where - It1: Iterator, TextRange)>, - It2: Iterator, TextRange)>, + It1: Iterator, TextRange)>, + It2: Iterator, TextRange)>, I1: Resolver + ?Sized, I2: Resolver + ?Sized, D1: 'static, D2: 'static, - R1: 'static, - R2: 'static, L1: Language + 'it1, L2: Language + 'it2, { diff --git a/src/syntax/token.rs b/src/syntax/token.rs new file mode 100644 index 0000000..b7008bd --- /dev/null +++ b/src/syntax/token.rs @@ -0,0 +1,225 @@ +use std::{ + fmt::Write, + hash::{Hash, Hasher}, + iter, +}; + +use lasso::Resolver; +use text_size::{TextRange, TextSize}; + +use super::*; +use crate::{Direction, GreenNode, GreenToken, Language, SyntaxKind}; + +/// Syntax tree token. +pub struct SyntaxToken { + parent: SyntaxNode, + index: u32, + offset: TextSize, +} + +impl Clone for SyntaxToken { + fn clone(&self) -> Self { + Self { + parent: self.parent.clone(), + index: self.index, + offset: self.offset, + } + } +} + +impl Hash for SyntaxToken { + fn hash(&self, state: &mut H) { + self.parent.hash(state); + self.index.hash(state); + self.offset.hash(state); + } +} + +impl PartialEq for SyntaxToken { + fn eq(&self, other: &SyntaxToken) -> bool { + self.parent == other.parent && self.index == other.index && self.offset == other.offset + } +} + +impl Eq for SyntaxToken {} + +impl SyntaxToken { + #[allow(missing_docs)] + pub fn debug(&self, resolver: &R) -> String + where + R: Resolver + ?Sized, + { + let mut res = String::new(); + write!(res, "{:?}@{:?}", self.kind(), self.text_range()).unwrap(); + if self.resolve_text(resolver).len() < 25 { + write!(res, " {:?}", self.resolve_text(resolver)).unwrap(); + return res; + } + let text = self.resolve_text(resolver); + for idx in 21..25 { + if text.is_char_boundary(idx) { + let text = format!("{} ...", &text[..idx]); + write!(res, " {:?}", text).unwrap(); + return res; + } + } + unreachable!() + } + + #[allow(missing_docs)] + pub fn display(&self, resolver: &R) -> String + where + R: Resolver + ?Sized, + { + self.resolve_text(resolver).to_string() + } + + /// Turns this token into a [`ResolvedToken`], but only if there is a resolver associated with this tree. + #[inline] + pub fn try_resolved(&self) -> Option<&ResolvedToken> { + // safety: we only coerce if `resolver` exists + self.parent() + .resolver() + .map(|_| unsafe { ResolvedToken::coerce_ref(self) }) + } + + /// Turns this token into a [`ResolvedToken`]. + /// # Panics + /// If there is no resolver associated with this tree. + #[inline] + pub fn resolved(&self) -> &ResolvedToken { + self.try_resolved().expect("tried to resolve a node without resolver") + } +} + +impl SyntaxToken { + pub(super) fn new(parent: &SyntaxNode, index: u32, offset: TextSize) -> SyntaxToken { + Self { + parent: parent.clone_uncounted(), + index, + offset, + } + } + + /// Returns a green tree, equal to the green tree this token + /// belongs two, except with this token substitute. The complexity + /// of operation is proportional to the depth of the tree + pub fn replace_with(&self, replacement: GreenToken) -> GreenNode { + assert_eq!(self.syntax_kind(), replacement.kind()); + let mut replacement = Some(replacement); + let parent = self.parent(); + let me = self.index; + + let children = parent.green().children().enumerate().map(|(i, child)| { + if i as u32 == me { + replacement.take().unwrap().into() + } else { + child.cloned() + } + }); + let new_parent = GreenNode::new(parent.syntax_kind(), children); + parent.replace_with(new_parent) + } + + /// The internal representation of the kind of this token. + #[inline] + pub fn syntax_kind(&self) -> SyntaxKind { + self.green().kind() + } + + /// The kind of this token in terms of your language. + #[inline] + pub fn kind(&self) -> L::Kind { + L::kind_from_raw(self.syntax_kind()) + } + + /// The range this token covers in the source text, in bytes. + #[inline] + pub fn text_range(&self) -> TextRange { + TextRange::at(self.offset, self.green().text_len()) + } + + /// Uses the provided resolver to return the source text of this token. + #[inline] + pub fn resolve_text<'i, I>(&self, resolver: &'i I) -> &'i str + where + I: Resolver + ?Sized, + { + self.green().text(resolver) + } + + /// Returns the unterlying green tree token of this token. + pub fn green(&self) -> &GreenToken { + self.parent + .green() + .children() + .nth(self.index as usize) + .unwrap() + .as_token() + .unwrap() + } + + /// The parent node of this token. + #[inline] + pub fn parent(&self) -> &SyntaxNode { + &self.parent + } + + /// Returns an iterator along the chain of parents of this token. + #[inline] + pub fn ancestors(&self) -> impl Iterator> { + self.parent().ancestors() + } + + /// The tree element to the right of this one, i.e. the next child of this token's parent after this token. + #[inline] + pub fn next_sibling_or_token(&self) -> Option> { + self.parent() + .next_child_or_token_after(self.index as usize, self.text_range().end()) + } + + /// The tree element to the left of this one, i.e. the previous child of this token's parent after this token. + #[inline] + pub fn prev_sibling_or_token(&self) -> Option> { + self.parent() + .prev_child_or_token_before(self.index as usize, self.text_range().start()) + } + + /// Returns an iterator over all siblings of this token in the given `direction`, i.e. all of this + /// token's parent's children from this token on to the left or the right. + /// The first item in the iterator will always be this token. + #[inline] + pub fn siblings_with_tokens(&self, direction: Direction) -> impl Iterator> { + let me: SyntaxElementRef<'_, L, D> = self.into(); + iter::successors(Some(me), move |el| match direction { + Direction::Next => el.next_sibling_or_token(), + Direction::Prev => el.prev_sibling_or_token(), + }) + } + + /// Returns the next token in the tree. + /// This is not necessary a direct sibling of this token, but will always be further right in the tree. + pub fn next_token(&self) -> Option<&SyntaxToken> { + match self.next_sibling_or_token() { + Some(element) => element.first_token(), + None => self + .parent() + .ancestors() + .find_map(|it| it.next_sibling_or_token()) + .and_then(|element| element.first_token()), + } + } + + /// Returns the previous token in the tree. + /// This is not necessary a direct sibling of this token, but will always be further left in the tree. + pub fn prev_token(&self) -> Option<&SyntaxToken> { + match self.prev_sibling_or_token() { + Some(element) => element.last_token(), + None => self + .parent() + .ancestors() + .find_map(|it| it.prev_sibling_or_token()) + .and_then(|element| element.last_token()), + } + } +} diff --git a/tests/basic.rs b/tests/it/basic.rs similarity index 92% rename from tests/basic.rs rename to tests/it/basic.rs index de0b63e..aae31c9 100644 --- a/tests/basic.rs +++ b/tests/it/basic.rs @@ -1,8 +1,4 @@ -mod common; - -use common::{ - build_recursive, build_tree_with_cache, Element, SyntaxElement, SyntaxElementRef, SyntaxNode, SyntaxToken, -}; +use super::*; use cstree::{GreenNodeBuilder, NodeCache, SyntaxKind, TextRange}; use lasso::{Resolver, Rodeo}; @@ -105,7 +101,7 @@ fn inline_resolver() { let mut cache = NodeCache::with_interner(&mut interner); let tree = two_level_tree(); let tree = build_tree_with_cache(&tree, &mut cache); - let tree: SyntaxNode<(), Rodeo> = SyntaxNode::new_root_with_resolver(tree, interner); + let tree: ResolvedNode = SyntaxNode::new_root_with_resolver(tree, interner); { let leaf1_0 = tree.children().nth(1).unwrap().children_with_tokens().next().unwrap(); let leaf1_0 = leaf1_0.into_token().unwrap(); @@ -137,10 +133,10 @@ fn assert_debug_display() { use std::fmt; fn f() {} - f::>(); - f::>(); - f::>(); - f::>(); + f::(); + f::(); + f::(); + f::>(); f::>(); fn dbg() {} diff --git a/tests/common.rs b/tests/it/main.rs similarity index 67% rename from tests/common.rs rename to tests/it/main.rs index 178c2c9..7fff523 100644 --- a/tests/common.rs +++ b/tests/it/main.rs @@ -1,10 +1,20 @@ +mod basic; +mod sendsync; +#[cfg(feature = "serde1")] +mod serde; + use cstree::{GreenNode, GreenNodeBuilder, Language, NodeCache, SyntaxKind}; use lasso::Interner; -pub type SyntaxNode = cstree::SyntaxNode; -pub type SyntaxToken = cstree::SyntaxToken; -pub type SyntaxElement = cstree::SyntaxElement; -pub type SyntaxElementRef<'a, D = (), R = ()> = cstree::SyntaxElementRef<'a, TestLang, D, R>; +pub type SyntaxNode = cstree::SyntaxNode; +pub type SyntaxToken = cstree::SyntaxToken; +pub type SyntaxElement = cstree::SyntaxElement; +pub type SyntaxElementRef<'a, D = ()> = cstree::SyntaxElementRef<'a, TestLang, D>; + +pub type ResolvedNode = cstree::ResolvedNode; +pub type ResolvedToken = cstree::ResolvedToken; +pub type ResolvedElement = cstree::ResolvedElement; +pub type ResolvedElementRef<'a, D = ()> = cstree::ResolvedElementRef<'a, TestLang, D>; #[derive(Debug)] pub enum Element<'s> { diff --git a/tests/sendsync.rs b/tests/it/sendsync.rs similarity index 92% rename from tests/sendsync.rs rename to tests/it/sendsync.rs index b347ca9..53013e1 100644 --- a/tests/sendsync.rs +++ b/tests/it/sendsync.rs @@ -1,18 +1,12 @@ #![allow(clippy::redundant_clone)] -#[allow(unused)] -mod common; - use crossbeam_utils::thread::scope; use std::{thread, time::Duration}; -use common::{build_recursive, Element, SyntaxNode}; -use cstree::{ - interning::{IntoResolver, Resolver}, - GreenNodeBuilder, -}; +use super::{build_recursive, Element, ResolvedNode, SyntaxNode}; +use cstree::{interning::IntoResolver, GreenNodeBuilder}; -fn build_tree(root: &Element<'_>) -> SyntaxNode { +fn build_tree(root: &Element<'_>) -> ResolvedNode { let mut builder = GreenNodeBuilder::new(); build_recursive(root, &mut builder, 0); let (node, interner) = builder.finish(); @@ -43,7 +37,7 @@ fn send() { .next() .unwrap(); let leaf1_0 = leaf1_0.into_token().unwrap(); - leaf1_0.resolve_text(thread_tree.resolver().as_ref()).to_string() + leaf1_0.text().to_string() }); assert_eq!(thread.join().unwrap(), "1.0"); } diff --git a/tests/serde.rs b/tests/it/serde.rs similarity index 83% rename from tests/serde.rs rename to tests/it/serde.rs index 5537e84..5ce135e 100644 --- a/tests/serde.rs +++ b/tests/it/serde.rs @@ -1,18 +1,11 @@ -#![cfg(feature = "serde1")] +use crate::{build_recursive, build_tree_with_cache, ResolvedNode}; -#[allow(unused)] -mod common; - -use common::{Element, SyntaxNode}; -use cstree::{ - interning::{IntoResolver, Resolver}, - GreenNodeBuilder, NodeCache, NodeOrToken, -}; +use super::{Element, SyntaxNode}; +use cstree::{interning::IntoResolver, GreenNodeBuilder, NodeCache, NodeOrToken}; use serde_test::Token; use std::fmt; type Rodeo = lasso::Rodeo; -type RodeoResolver = lasso::RodeoResolver; /// Macro for generating a list of `serde_test::Token`s using a simpler DSL. macro_rules! event_tokens { @@ -142,28 +135,28 @@ struct NonSerializable; /// Serializable SyntaxNode that doesn't have a identity `PartialEq` implementation, /// but checks if both trees have equal nodes and tokens. -struct TestNode { - node: SyntaxNode, +struct TestNode { + node: ResolvedNode, with_data: bool, } -impl TestNode { - fn new(node: SyntaxNode) -> Self { +impl TestNode { + fn new(node: ResolvedNode) -> Self { Self { node, with_data: false } } - fn with_data(node: SyntaxNode) -> Self { + fn with_data(node: ResolvedNode) -> Self { Self { node, with_data: true } } } -impl fmt::Debug for TestNode { +impl fmt::Debug for TestNode { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Debug::fmt(&self.node, f) } } -impl serde::Serialize for TestNode { +impl serde::Serialize for TestNode { fn serialize(&self, serializer: S) -> Result where S: serde::Serializer, @@ -176,20 +169,20 @@ impl serde::Serialize for TestNode { } } -impl<'de> serde::Deserialize<'de> for TestNode { +impl<'de> serde::Deserialize<'de> for TestNode { fn deserialize(deserializer: D) -> Result where D: serde::Deserializer<'de>, { Ok(Self { - node: SyntaxNode::deserialize(deserializer)?, + node: ResolvedNode::deserialize(deserializer)?, with_data: true, }) } } -impl PartialEq> for TestNode { - fn eq(&self, other: &TestNode) -> bool { +impl PartialEq for TestNode { + fn eq(&self, other: &TestNode) -> bool { self.node.kind() == other.node.kind() && self.node.get_data() == other.node.get_data() && self.node.text_range() == other.node.text_range() @@ -229,14 +222,14 @@ fn three_level_tree() -> Element<'static> { ]) } -fn build_tree(root: Element<'_>) -> SyntaxNode { +fn build_tree(root: Element<'_>) -> ResolvedNode { let mut builder = GreenNodeBuilder::new(); - common::build_recursive(&root, &mut builder, 0); + build_recursive(&root, &mut builder, 0); let (node, interner) = builder.finish(); SyntaxNode::new_root_with_resolver(node, interner.unwrap().into_resolver()) } -fn attach_data(node: &SyntaxNode) { +fn attach_data(node: &SyntaxNode) { node.descendants().enumerate().for_each(|(idx, node)| { node.set_data(format!("{}", idx + 1)); }); @@ -248,12 +241,12 @@ fn serialize_tree_with_data_with_resolver() { let mut cache = NodeCache::with_interner(&mut interner); let root = three_level_tree(); - let root = common::build_tree_with_cache(&root, &mut cache); - let tree = SyntaxNode::::new_root(root.clone()); + let root = build_tree_with_cache(&root, &mut cache); + let tree = SyntaxNode::::new_root(root.clone()); attach_data(&tree); let serialized = serde_json::to_string(&tree.as_serialize_with_data_with_resolver(&interner)).unwrap(); - let deserialized: TestNode<_> = serde_json::from_str(&serialized).unwrap(); + let deserialized: TestNode = serde_json::from_str(&serialized).unwrap(); let expected = SyntaxNode::new_root_with_resolver(root, interner); attach_data(&expected); @@ -266,11 +259,11 @@ fn serialize_tree_with_resolver() { let mut cache = NodeCache::with_interner(&mut interner); let root = three_level_tree(); - let root = common::build_tree_with_cache(&root, &mut cache); + let root = build_tree_with_cache(&root, &mut cache); let tree = SyntaxNode::::new_root(root.clone()); let serialized = serde_json::to_string(&tree.as_serialize_with_resolver(&interner)).unwrap(); - let deserialized: TestNode<_> = serde_json::from_str(&serialized).unwrap(); + let deserialized: TestNode = serde_json::from_str(&serialized).unwrap(); let expected = SyntaxNode::new_root_with_resolver(root, interner); assert_eq!(TestNode::new(expected), deserialized);