1
Fork 0
mirror of https://github.com/RGBCube/cstree synced 2025-07-27 17:17:45 +00:00

Split syntax into modules and make inline resolvers a dyn Trait instead of a generic parameter (#21)

This commit is contained in:
DQ 2021-03-17 19:20:02 +01:00 committed by GitHub
parent 45b5e93bc0
commit e46ddbd765
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
13 changed files with 1494 additions and 764 deletions

View file

@ -96,7 +96,7 @@ const MAX_REFCOUNT: usize = (isize::MAX) as usize;
/// See [`std::sync::Arc`]. /// See [`std::sync::Arc`].
#[repr(C)] #[repr(C)]
pub struct Arc<T: ?Sized + 'static> { pub struct Arc<T: ?Sized> {
p: NonNull<ArcInner<T>>, p: NonNull<ArcInner<T>>,
} }
@ -104,7 +104,7 @@ pub struct Arc<T: ?Sized + 'static> {
/// ///
/// This lets us build arcs that we can mutate before /// This lets us build arcs that we can mutate before
/// freezing, without needing to change the allocation /// freezing, without needing to change the allocation
pub struct UniqueArc<T: ?Sized + 'static>(Arc<T>); pub struct UniqueArc<T: ?Sized>(Arc<T>);
impl<T> UniqueArc<T> { impl<T> UniqueArc<T> {
#[inline] #[inline]
@ -601,7 +601,7 @@ impl<H> HeaderWithLength<H> {
} }
type HeaderSliceWithLength<H, T> = HeaderSlice<HeaderWithLength<H>, T>; type HeaderSliceWithLength<H, T> = HeaderSlice<HeaderWithLength<H>, T>;
pub struct ThinArc<H: 'static, T: 'static> { pub struct ThinArc<H, T> {
ptr: *mut ArcInner<HeaderSliceWithLength<H, [T; 1]>>, ptr: *mut ArcInner<HeaderSliceWithLength<H, [T; 1]>>,
} }
@ -620,7 +620,7 @@ fn thin_to_thick<H, T>(
fake_slice as *mut ArcInner<HeaderSliceWithLength<H, [T]>> fake_slice as *mut ArcInner<HeaderSliceWithLength<H, [T]>>
} }
impl<H: 'static, T: 'static> ThinArc<H, T> { impl<H, T> ThinArc<H, T> {
/// Temporarily converts |self| into a bonafide Arc and exposes it to the /// Temporarily converts |self| into a bonafide Arc and exposes it to the
/// provided callback. The refcount is not modified. /// provided callback. The refcount is not modified.
#[inline] #[inline]
@ -663,21 +663,21 @@ impl<H, T> Deref for ThinArc<H, T> {
} }
} }
impl<H: 'static, T: 'static> Clone for ThinArc<H, T> { impl<H, T> Clone for ThinArc<H, T> {
#[inline] #[inline]
fn clone(&self) -> Self { fn clone(&self) -> Self {
ThinArc::with_arc(self, |a| Arc::into_thin(a.clone())) ThinArc::with_arc(self, |a| Arc::into_thin(a.clone()))
} }
} }
impl<H: 'static, T: 'static> Drop for ThinArc<H, T> { impl<H, T> Drop for ThinArc<H, T> {
#[inline] #[inline]
fn drop(&mut self) { fn drop(&mut self) {
let _ = Arc::from_thin(ThinArc { ptr: self.ptr }); let _ = Arc::from_thin(ThinArc { ptr: self.ptr });
} }
} }
impl<H: 'static, T: 'static> Arc<HeaderSliceWithLength<H, [T]>> { impl<H, T> Arc<HeaderSliceWithLength<H, [T]>> {
/// Converts an Arc into a ThinArc. This consumes the Arc, so the refcount /// Converts an Arc into a ThinArc. This consumes the Arc, so the refcount
/// is not modified. /// is not modified.
#[inline] #[inline]
@ -708,14 +708,14 @@ impl<H: 'static, T: 'static> Arc<HeaderSliceWithLength<H, [T]>> {
} }
} }
impl<H: PartialEq + 'static, T: PartialEq + 'static> PartialEq for ThinArc<H, T> { impl<H: PartialEq, T: PartialEq> PartialEq for ThinArc<H, T> {
#[inline] #[inline]
fn eq(&self, other: &ThinArc<H, T>) -> bool { fn eq(&self, other: &ThinArc<H, T>) -> bool {
ThinArc::with_arc(self, |a| ThinArc::with_arc(other, |b| *a == *b)) ThinArc::with_arc(self, |a| ThinArc::with_arc(other, |b| *a == *b))
} }
} }
impl<H: Eq + 'static, T: Eq + 'static> Eq for ThinArc<H, T> {} impl<H: Eq, T: Eq> Eq for ThinArc<H, T> {}
/// An Arc, except it holds a pointer to the T instead of to the /// An Arc, except it holds a pointer to the T instead of to the
/// entire ArcInner. /// entire ArcInner.
@ -734,14 +734,14 @@ impl<H: Eq + 'static, T: Eq + 'static> Eq for ThinArc<H, T> {}
/// but we can also convert it to a "regular" Arc<T> by removing the offset /// but we can also convert it to a "regular" Arc<T> by removing the offset
#[derive(Eq)] #[derive(Eq)]
#[repr(C)] #[repr(C)]
pub struct RawOffsetArc<T: 'static> { pub struct RawOffsetArc<T> {
ptr: NonNull<T>, ptr: NonNull<T>,
} }
unsafe impl<T: 'static + Sync + Send> Send for RawOffsetArc<T> {} unsafe impl<T: Sync + Send> Send for RawOffsetArc<T> {}
unsafe impl<T: 'static + Sync + Send> Sync for RawOffsetArc<T> {} unsafe impl<T: Sync + Send> Sync for RawOffsetArc<T> {}
impl<T: 'static> Deref for RawOffsetArc<T> { impl<T> Deref for RawOffsetArc<T> {
type Target = T; type Target = T;
fn deref(&self) -> &Self::Target { fn deref(&self) -> &Self::Target {
@ -749,20 +749,20 @@ impl<T: 'static> Deref for RawOffsetArc<T> {
} }
} }
impl<T: 'static> Clone for RawOffsetArc<T> { impl<T> Clone for RawOffsetArc<T> {
#[inline] #[inline]
fn clone(&self) -> Self { fn clone(&self) -> Self {
Arc::into_raw_offset(self.clone_arc()) Arc::into_raw_offset(self.clone_arc())
} }
} }
impl<T: 'static> Drop for RawOffsetArc<T> { impl<T> Drop for RawOffsetArc<T> {
fn drop(&mut self) { fn drop(&mut self) {
let _ = Arc::from_raw_offset(RawOffsetArc { ptr: self.ptr.clone() }); let _ = Arc::from_raw_offset(RawOffsetArc { ptr: self.ptr.clone() });
} }
} }
impl<T: fmt::Debug + 'static> fmt::Debug for RawOffsetArc<T> { impl<T: fmt::Debug> fmt::Debug for RawOffsetArc<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Debug::fmt(&**self, f) fmt::Debug::fmt(&**self, f)
} }
@ -778,7 +778,7 @@ impl<T: PartialEq> PartialEq for RawOffsetArc<T> {
} }
} }
impl<T: 'static> RawOffsetArc<T> { impl<T> RawOffsetArc<T> {
/// Temporarily converts |self| into a bonafide Arc and exposes it to the /// Temporarily converts |self| into a bonafide Arc and exposes it to the
/// provided callback. The refcount is not modified. /// provided callback. The refcount is not modified.
#[inline] #[inline]
@ -837,7 +837,7 @@ impl<T: 'static> RawOffsetArc<T> {
} }
} }
impl<T: 'static> Arc<T> { impl<T> Arc<T> {
/// Converts an Arc into a RawOffsetArc. This consumes the Arc, so the refcount /// Converts an Arc into a RawOffsetArc. This consumes the Arc, so the refcount
/// is not modified. /// is not modified.
#[inline] #[inline]
@ -903,7 +903,6 @@ impl<'a, T> ArcBorrow<'a, T> {
pub fn with_arc<F, U>(&self, f: F) -> U pub fn with_arc<F, U>(&self, f: F) -> U
where where
F: FnOnce(&Arc<T>) -> U, F: FnOnce(&Arc<T>) -> U,
T: 'static,
{ {
// Synthesize transient Arc, which never touches the refcount. // Synthesize transient Arc, which never touches the refcount.
let transient = unsafe { NoDrop::new(Arc::from_raw(self.0)) }; let transient = unsafe { NoDrop::new(Arc::from_raw(self.0)) };

View file

@ -59,7 +59,6 @@ pub mod syntax;
#[cfg(feature = "serde1")] #[cfg(feature = "serde1")]
mod serde_impls; mod serde_impls;
mod syntax_text;
#[allow(missing_docs)] #[allow(missing_docs)]
mod utility_types; mod utility_types;
@ -76,8 +75,7 @@ pub use text_size::{TextLen, TextRange, TextSize};
pub use crate::{ pub use crate::{
arc::Arc, arc::Arc,
green::{Checkpoint, Children, GreenNode, GreenNodeBuilder, GreenToken, NodeCache, SyntaxKind}, green::{Checkpoint, Children, GreenNode, GreenNodeBuilder, GreenToken, NodeCache, SyntaxKind},
syntax::{SyntaxElement, SyntaxElementChildren, SyntaxElementRef, SyntaxNode, SyntaxNodeChildren, SyntaxToken}, syntax::*,
syntax_text::SyntaxText,
utility_types::{Direction, NodeOrToken, TokenAtOffset, WalkEvent}, utility_types::{Direction, NodeOrToken, TokenAtOffset, WalkEvent},
}; };

View file

@ -2,7 +2,7 @@
use crate::{ use crate::{
interning::{IntoResolver, Resolver}, interning::{IntoResolver, Resolver},
GreenNodeBuilder, Language, NodeOrToken, SyntaxKind, SyntaxNode, WalkEvent, GreenNodeBuilder, Language, NodeOrToken, ResolvedNode, SyntaxKind, SyntaxNode, WalkEvent,
}; };
use serde::{ use serde::{
de::{Error, SeqAccess, Visitor}, de::{Error, SeqAccess, Visitor},
@ -11,9 +11,6 @@ use serde::{
}; };
use std::{collections::VecDeque, fmt, marker::PhantomData}; use std::{collections::VecDeque, fmt, marker::PhantomData};
type Rodeo = lasso::Rodeo<lasso::Spur, fxhash::FxBuildHasher>;
type RodeoResolver = lasso::RodeoResolver<lasso::Spur>;
/// Expands to the first expression, if there's /// Expands to the first expression, if there's
/// no expression following, otherwise return the second expression. /// no expression following, otherwise return the second expression.
/// ///
@ -86,21 +83,21 @@ enum Event<'text> {
} }
/// Make a `SyntaxNode` serializable but without serializing the data. /// Make a `SyntaxNode` serializable but without serializing the data.
pub(crate) struct SerializeWithResolver<'node, 'resolver, L: Language, D: 'static, RN: 'static, R> { pub(crate) struct SerializeWithResolver<'node, 'resolver, L: Language, D: 'static, R: ?Sized> {
pub(crate) node: &'node SyntaxNode<L, D, RN>, pub(crate) node: &'node SyntaxNode<L, D>,
pub(crate) resolver: &'resolver R, pub(crate) resolver: &'resolver R,
} }
/// Make a `SyntaxNode` serializable which will include the data for serialization. /// Make a `SyntaxNode` serializable which will include the data for serialization.
pub(crate) struct SerializeWithData<'node, 'resolver, L: Language, D: 'static, RN: 'static, R> { pub(crate) struct SerializeWithData<'node, 'resolver, L: Language, D: 'static, R: ?Sized> {
pub(crate) node: &'node SyntaxNode<L, D, RN>, pub(crate) node: &'node SyntaxNode<L, D>,
pub(crate) resolver: &'resolver R, pub(crate) resolver: &'resolver R,
} }
impl<L, D, RN, R> Serialize for SerializeWithData<'_, '_, L, D, RN, R> impl<L, D, R> Serialize for SerializeWithData<'_, '_, L, D, R>
where where
L: Language, L: Language,
R: Resolver, R: Resolver + ?Sized,
D: Serialize, D: Serialize,
{ {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
@ -112,10 +109,10 @@ where
} }
} }
impl<L, D, RN, R> Serialize for SerializeWithResolver<'_, '_, L, D, RN, R> impl<L, D, R> Serialize for SerializeWithResolver<'_, '_, L, D, R>
where where
L: Language, L: Language,
R: Resolver, R: Resolver + ?Sized,
{ {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where where
@ -125,11 +122,10 @@ where
} }
} }
impl<L, D, R> Serialize for SyntaxNode<L, D, R> impl<L, D> Serialize for ResolvedNode<L, D>
where where
L: Language, L: Language,
D: Serialize, D: Serialize,
R: Resolver,
{ {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where where
@ -143,7 +139,7 @@ where
} }
} }
impl<'de, L, D> Deserialize<'de> for SyntaxNode<L, D, RodeoResolver> impl<'de, L, D> Deserialize<'de> for ResolvedNode<L, D>
where where
L: Language, L: Language,
D: Deserialize<'de>, D: Deserialize<'de>,
@ -163,7 +159,7 @@ where
DE: serde::Deserializer<'de>, DE: serde::Deserializer<'de>,
{ {
struct EventVisitor<L: Language, D: 'static> { struct EventVisitor<L: Language, D: 'static> {
_marker: PhantomData<SyntaxNode<L, D, Rodeo>>, _marker: PhantomData<fn() -> ResolvedNode<L, D>>,
} }
impl<'de, L, D> Visitor<'de> for EventVisitor<L, D> impl<'de, L, D> Visitor<'de> for EventVisitor<L, D>
@ -171,7 +167,7 @@ where
L: Language, L: Language,
D: Deserialize<'de>, D: Deserialize<'de>,
{ {
type Value = (SyntaxNode<L, D, RodeoResolver>, VecDeque<bool>); type Value = (ResolvedNode<L, D>, VecDeque<bool>);
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("a list of tree events") formatter.write_str("a list of tree events")
@ -196,12 +192,12 @@ where
} }
let (tree, resolver) = builder.finish(); let (tree, resolver) = builder.finish();
let tree = SyntaxNode::new_root_with_resolver(tree, resolver.unwrap().into_resolver()); let tree = ResolvedNode::new_root_with_resolver(tree, resolver.unwrap().into_resolver());
Ok((tree, data_indices)) Ok((tree, data_indices))
} }
} }
struct ProcessedEvents<L: Language, D: 'static>(SyntaxNode<L, D, RodeoResolver>, VecDeque<bool>); struct ProcessedEvents<L: Language, D: 'static>(ResolvedNode<L, D>, VecDeque<bool>);
impl<'de, L, D> Deserialize<'de> for ProcessedEvents<L, D> impl<'de, L, D> Deserialize<'de> for ProcessedEvents<L, D>
where where
L: Language, L: Language,

254
src/syntax/element.rs Normal file
View file

@ -0,0 +1,254 @@
use std::sync::atomic::AtomicU32;
use lasso::Resolver;
use text_size::{TextRange, TextSize};
use super::*;
use crate::{green::GreenElementRef, Language, NodeOrToken, SyntaxKind, TokenAtOffset};
/// An element of the tree, can be either a node or a token.
pub type SyntaxElement<L, D = ()> = NodeOrToken<SyntaxNode<L, D>, SyntaxToken<L, D>>;
impl<L: Language, D> From<SyntaxNode<L, D>> for SyntaxElement<L, D> {
fn from(node: SyntaxNode<L, D>) -> SyntaxElement<L, D> {
NodeOrToken::Node(node)
}
}
impl<L: Language, D> From<SyntaxToken<L, D>> for SyntaxElement<L, D> {
fn from(token: SyntaxToken<L, D>) -> SyntaxElement<L, D> {
NodeOrToken::Token(token)
}
}
impl<L: Language, D> SyntaxElement<L, D> {
#[allow(missing_docs)]
pub fn display(&self, resolver: &impl Resolver) -> String {
match self {
NodeOrToken::Node(it) => it.display(resolver),
NodeOrToken::Token(it) => it.display(resolver),
}
}
}
/// A reference to an element of the tree, can be either a reference to a node or one to a token.
pub type SyntaxElementRef<'a, L, D = ()> = NodeOrToken<&'a SyntaxNode<L, D>, &'a SyntaxToken<L, D>>;
impl<'a, L: Language, D> From<&'a SyntaxNode<L, D>> for SyntaxElementRef<'a, L, D> {
fn from(node: &'a SyntaxNode<L, D>) -> Self {
NodeOrToken::Node(node)
}
}
impl<'a, L: Language, D> From<&'a SyntaxToken<L, D>> for SyntaxElementRef<'a, L, D> {
fn from(token: &'a SyntaxToken<L, D>) -> Self {
NodeOrToken::Token(token)
}
}
impl<'a, L: Language, D> From<&'a SyntaxElement<L, D>> for SyntaxElementRef<'a, L, D> {
fn from(element: &'a SyntaxElement<L, D>) -> Self {
match element {
NodeOrToken::Node(it) => Self::Node(it),
NodeOrToken::Token(it) => Self::Token(it),
}
}
}
impl<'a, L: Language, D> SyntaxElementRef<'a, L, D> {
#[allow(missing_docs)]
pub fn display(&self, resolver: &impl Resolver) -> String {
match self {
NodeOrToken::Node(it) => it.display(resolver),
NodeOrToken::Token(it) => it.display(resolver),
}
}
}
impl<L: Language, D> SyntaxElement<L, D> {
pub(super) fn new(
element: GreenElementRef<'_>,
parent: &SyntaxNode<L, D>,
index: u32,
offset: TextSize,
ref_count: *mut AtomicU32,
) -> SyntaxElement<L, D> {
match element {
NodeOrToken::Node(node) => SyntaxNode::new_child(node, parent, index as u32, offset, ref_count).into(),
NodeOrToken::Token(_) => SyntaxToken::new(parent, index as u32, offset).into(),
}
}
/// The range this element covers in the source text, in bytes.
#[inline]
pub fn text_range(&self) -> TextRange {
match self {
NodeOrToken::Node(it) => it.text_range(),
NodeOrToken::Token(it) => it.text_range(),
}
}
/// The internal representation of the kind of this element.
#[inline]
pub fn syntax_kind(&self) -> SyntaxKind {
match self {
NodeOrToken::Node(it) => it.syntax_kind(),
NodeOrToken::Token(it) => it.syntax_kind(),
}
}
/// The kind of this element in terms of your language.
#[inline]
pub fn kind(&self) -> L::Kind {
match self {
NodeOrToken::Node(it) => it.kind(),
NodeOrToken::Token(it) => it.kind(),
}
}
/// The parent node of this element, except if this element is the root.
#[inline]
pub fn parent(&self) -> Option<&SyntaxNode<L, D>> {
match self {
NodeOrToken::Node(it) => it.parent(),
NodeOrToken::Token(it) => Some(it.parent()),
}
}
/// Returns an iterator along the chain of parents of this node.
#[inline]
pub fn ancestors(&self) -> impl Iterator<Item = &SyntaxNode<L, D>> {
match self {
NodeOrToken::Node(it) => it.ancestors(),
NodeOrToken::Token(it) => it.parent().ancestors(),
}
}
/// Return the leftmost token in the subtree of this element.
#[inline]
pub fn first_token(&self) -> Option<&SyntaxToken<L, D>> {
match self {
NodeOrToken::Node(it) => it.first_token(),
NodeOrToken::Token(it) => Some(it),
}
}
/// Return the rightmost token in the subtree of this element.
#[inline]
pub fn last_token(&self) -> Option<&SyntaxToken<L, D>> {
match self {
NodeOrToken::Node(it) => it.last_token(),
NodeOrToken::Token(it) => Some(it),
}
}
/// The tree element to the right of this one, i.e. the next child of this element's parent after this element.
#[inline]
pub fn next_sibling_or_token(&self) -> Option<SyntaxElementRef<'_, L, D>> {
match self {
NodeOrToken::Node(it) => it.next_sibling_or_token(),
NodeOrToken::Token(it) => it.next_sibling_or_token(),
}
}
/// The tree element to the left of this one, i.e. the previous child of this element's parent after this element.
#[inline]
pub fn prev_sibling_or_token(&self) -> Option<SyntaxElementRef<'_, L, D>> {
match self {
NodeOrToken::Node(it) => it.prev_sibling_or_token(),
NodeOrToken::Token(it) => it.prev_sibling_or_token(),
}
}
}
impl<'a, L: Language, D> SyntaxElementRef<'a, L, D> {
/// The range this element covers in the source text, in bytes.
#[inline]
pub fn text_range(&self) -> TextRange {
match self {
NodeOrToken::Node(it) => it.text_range(),
NodeOrToken::Token(it) => it.text_range(),
}
}
/// The internal representation of the kind of this element.
#[inline]
pub fn syntax_kind(&self) -> SyntaxKind {
match self {
NodeOrToken::Node(it) => it.syntax_kind(),
NodeOrToken::Token(it) => it.syntax_kind(),
}
}
/// The kind of this element in terms of your language.
#[inline]
pub fn kind(&self) -> L::Kind {
match self {
NodeOrToken::Node(it) => it.kind(),
NodeOrToken::Token(it) => it.kind(),
}
}
/// The parent node of this element, except if this element is the root.
#[inline]
pub fn parent(&self) -> Option<&'a SyntaxNode<L, D>> {
match self {
NodeOrToken::Node(it) => it.parent(),
NodeOrToken::Token(it) => Some(it.parent()),
}
}
/// Returns an iterator along the chain of parents of this node.
#[inline]
pub fn ancestors(&self) -> impl Iterator<Item = &'a SyntaxNode<L, D>> {
match self {
NodeOrToken::Node(it) => it.ancestors(),
NodeOrToken::Token(it) => it.parent().ancestors(),
}
}
/// Return the leftmost token in the subtree of this element.
#[inline]
pub fn first_token(&self) -> Option<&'a SyntaxToken<L, D>> {
match self {
NodeOrToken::Node(it) => it.first_token(),
NodeOrToken::Token(it) => Some(it),
}
}
/// Return the rightmost token in the subtree of this element.
#[inline]
pub fn last_token(&self) -> Option<&'a SyntaxToken<L, D>> {
match self {
NodeOrToken::Node(it) => it.last_token(),
NodeOrToken::Token(it) => Some(it),
}
}
/// The tree element to the right of this one, i.e. the next child of this element's parent after this element.
#[inline]
pub fn next_sibling_or_token(&self) -> Option<SyntaxElementRef<'a, L, D>> {
match self {
NodeOrToken::Node(it) => it.next_sibling_or_token(),
NodeOrToken::Token(it) => it.next_sibling_or_token(),
}
}
/// The tree element to the left of this one, i.e. the previous child of this element's parent after this element.
#[inline]
pub fn prev_sibling_or_token(&self) -> Option<SyntaxElementRef<'a, L, D>> {
match self {
NodeOrToken::Node(it) => it.prev_sibling_or_token(),
NodeOrToken::Token(it) => it.prev_sibling_or_token(),
}
}
#[inline]
pub(super) fn token_at_offset(&self, offset: TextSize) -> TokenAtOffset<SyntaxToken<L, D>> {
assert!(self.text_range().start() <= offset && offset <= self.text_range().end());
match self {
NodeOrToken::Token(token) => TokenAtOffset::Single((*token).clone()),
NodeOrToken::Node(node) => node.token_at_offset(offset),
}
}
}

33
src/syntax/mod.rs Normal file
View file

@ -0,0 +1,33 @@
//! Implementation of the outer, "red" tree.
//!
//! Inner [`SyntaxNode`]s represent only structural information, but can hold additional, user-defined data.
//! Leaf [`SyntaxToken`]s represent individual pieces of source text.
//! Use [`SyntaxNode::new_root`] and [`SyntaxNode::new_root_with_resolver`] to construct a syntax
//! tree on top of a green tree.
mod element;
pub use element::{SyntaxElement, SyntaxElementRef};
mod node;
pub use node::{SyntaxElementChildren, SyntaxNode, SyntaxNodeChildren};
mod token;
pub use token::SyntaxToken;
mod resolved;
pub use resolved::{ResolvedElement, ResolvedElementRef, ResolvedNode, ResolvedToken};
mod text;
pub use text::SyntaxText;
// A note on `#[inline]` usage in this module:
// In `rowan`, there are two layers of `SyntaxXY`s: the `cursor` layer and the `api` layer.
// The `cursor` layer handles all of the actual methods on the tree, while the `api` layer is
// generic over the `Language` of the tree and otherwise forwards its implementation to the `cursor`
// layer.
// Here, we have unified the `cursor` and the `api` layer into the `syntax` layer.
// This means that all of our types here are generic over a `Language`, including the
// implementations which, in `rowan`, are part of the `cursor` layer.
// Very apparently, this makes the compiler less willing to inline. Almost every "regular use"
// method in this file has some kind of `#[inline]` annotation to counteract that. This is _NOT_
// just for fun, not inlining decreases tree traversal speed by approx. 50% at the time of writing
// this.
//
// - DQ 01/2021

File diff suppressed because it is too large Load diff

735
src/syntax/resolved.rs Normal file
View file

@ -0,0 +1,735 @@
//! Nodes, tokens, elements and their references which are guaranteed to belong to trees with
//! associated [`Resolver`]s(lasso::Resolver).
//!
//! This means they can implement `Debug` and `Display` and be (de-)serializable by default.
use std::{
fmt,
ops::{Deref, DerefMut},
sync::Arc as StdArc,
};
use lasso::Resolver;
use text_size::{TextRange, TextSize};
use crate::{
Direction, GreenNode, Language, NodeOrToken, SyntaxElementRef, SyntaxKind, SyntaxNode, SyntaxText, SyntaxToken,
TokenAtOffset, WalkEvent,
};
/// Syntax tree node that is guaranteed to belong to a tree that contains an associated
/// [`Resolver`](lasso::Resolver).
/// # See also
/// [`SyntaxNode`]
/// [`SyntaxNode::new_root_with_resolver`]
#[derive(Clone)]
#[repr(transparent)]
pub struct ResolvedNode<L: Language, D: 'static = ()> {
pub(super) syntax: SyntaxNode<L, D>,
}
impl<L: Language, D> ResolvedNode<L, D> {
/// # Safety:
/// `syntax` must belong to a tree that contains an associated inline resolver.
pub(super) unsafe fn coerce_ref(syntax: &SyntaxNode<L, D>) -> &Self {
&*(syntax as *const _ as *const Self)
}
/// Returns this node as a [`SyntaxNode`].
pub fn syntax(&self) -> &SyntaxNode<L, D> {
&self.syntax
}
}
impl<L: Language, D> Deref for ResolvedNode<L, D> {
type Target = SyntaxNode<L, D>;
fn deref(&self) -> &Self::Target {
&self.syntax
}
}
impl<L: Language, D> DerefMut for ResolvedNode<L, D> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.syntax
}
}
/// Syntax tree token that is guaranteed to belong to a tree that contains an associated
/// [`Resolver`](lasso::Resolver).
/// # See also
/// [`SyntaxToken`]]
#[repr(transparent)]
pub struct ResolvedToken<L: Language, D: 'static = ()> {
syntax: SyntaxToken<L, D>,
}
impl<L: Language, D> ResolvedToken<L, D> {
/// # Safety:
/// `syntax` must belong to a tree that contains an associated inline resolver.
pub(super) unsafe fn coerce_ref(syntax: &SyntaxToken<L, D>) -> &Self {
&*(syntax as *const _ as *const Self)
}
/// Returns this token as a [`SyntaxToken`].
pub fn syntax(&self) -> &SyntaxToken<L, D> {
&self.syntax
}
}
impl<L: Language, D> Deref for ResolvedToken<L, D> {
type Target = SyntaxToken<L, D>;
fn deref(&self) -> &Self::Target {
&self.syntax
}
}
impl<L: Language, D> DerefMut for ResolvedToken<L, D> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.syntax
}
}
/// An element of the tree that is guaranteed to belong to a tree that contains an associated
/// [`Resolver`](lasso::Resolver), can be either a node or a token.
/// # See also
/// [`SyntaxElement`](crate::SyntaxElement)
pub type ResolvedElement<L, D = ()> = NodeOrToken<ResolvedNode<L, D>, ResolvedToken<L, D>>;
impl<L: Language, D> From<ResolvedNode<L, D>> for ResolvedElement<L, D> {
fn from(node: ResolvedNode<L, D>) -> ResolvedElement<L, D> {
NodeOrToken::Node(node)
}
}
impl<L: Language, D> From<ResolvedToken<L, D>> for ResolvedElement<L, D> {
fn from(token: ResolvedToken<L, D>) -> ResolvedElement<L, D> {
NodeOrToken::Token(token)
}
}
impl<L: Language, D> ResolvedElement<L, D> {
#[allow(missing_docs)]
pub fn display(&self, resolver: &impl Resolver) -> String {
match self {
NodeOrToken::Node(it) => it.display(resolver),
NodeOrToken::Token(it) => it.display(resolver),
}
}
}
/// A reference to an element of the tree that is guaranteed to belong to a tree that contains an
/// associated [`Resolver`](lasso::Resolver), can be either a reference to a node or one to a token.
/// # See also
/// [`SyntaxElementRef`]
pub type ResolvedElementRef<'a, L, D = ()> = NodeOrToken<&'a ResolvedNode<L, D>, &'a ResolvedToken<L, D>>;
impl<'a, L: Language, D> ResolvedElementRef<'a, L, D> {
/// # Safety:
/// `syntax` must belong to a tree that contains an associated inline resolver.
pub(super) unsafe fn coerce_ref(syntax: SyntaxElementRef<'a, L, D>) -> Self {
match syntax {
NodeOrToken::Node(node) => Self::Node(ResolvedNode::coerce_ref(node)),
NodeOrToken::Token(token) => Self::Token(ResolvedToken::coerce_ref(token)),
}
}
}
impl<'a, L: Language, D> From<&'a ResolvedNode<L, D>> for ResolvedElementRef<'a, L, D> {
fn from(node: &'a ResolvedNode<L, D>) -> Self {
NodeOrToken::Node(node)
}
}
impl<'a, L: Language, D> From<&'a ResolvedToken<L, D>> for ResolvedElementRef<'a, L, D> {
fn from(token: &'a ResolvedToken<L, D>) -> Self {
NodeOrToken::Token(token)
}
}
impl<'a, L: Language, D> From<&'a ResolvedElement<L, D>> for ResolvedElementRef<'a, L, D> {
fn from(element: &'a ResolvedElement<L, D>) -> Self {
match element {
NodeOrToken::Node(it) => Self::Node(it),
NodeOrToken::Token(it) => Self::Token(it),
}
}
}
impl<L: Language, D> ResolvedNode<L, D> {
/// Uses the resolver associated with this tree to return an efficient representation of all
/// source text covered by this node, i.e. the combined text of all token leafs of the subtree
/// originating in this node.
#[inline]
pub fn text(&self) -> SyntaxText<'_, '_, dyn Resolver, L, D> {
SyntaxText::new(self, &**self.resolver())
}
}
impl<L: Language, D> fmt::Debug for ResolvedNode<L, D> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.debug(&**self.resolver(), f.alternate()))
}
}
impl<L: Language, D> fmt::Display for ResolvedNode<L, D> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.display(&**self.resolver()))
}
}
impl<L: Language, D> ResolvedToken<L, D> {
/// Uses the resolver associated with this tree to return the source text of this token.
#[inline]
pub fn text(&self) -> &str {
self.green().text(&**self.parent().resolver())
}
}
impl<L: Language, D> fmt::Debug for ResolvedToken<L, D> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.debug(&**self.parent().resolver()))
}
}
impl<L: Language, D> fmt::Display for ResolvedToken<L, D> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.display(&**self.parent().resolver()))
}
}
#[cfg(feature = "serde1")]
impl<L, D> ResolvedNode<L, D>
where
L: Language,
{
/// Return an anonymous object that can be used to serialize this node,
/// including the data for each node.
pub fn as_serialize_with_data(&self) -> impl serde::Serialize + '_
where
D: serde::Serialize,
{
crate::serde_impls::SerializeWithData {
node: self,
resolver: self.resolver().as_ref(),
}
}
}
/* It follows: wrapping all _traversal_ methods so they return `ResolvedXY`s */
macro_rules! forward {
// safety: if we're starting from a `ResolvedXY`, then the tree must have a resolver
($e:expr) => {
($e).map(|e| unsafe { Self::coerce_ref(e) })
};
}
macro_rules! forward_as_elem {
// safety: if we're starting from a `ResolvedXY`, then the tree must have a resolver
($e:expr) => {
($e).map(|e| unsafe { ResolvedElementRef::coerce_ref(e) })
};
}
macro_rules! forward_token {
// safety: if we're starting from a `ResolvedXY`, then the tree must have a resolver
($e:expr) => {
($e).map(|e| unsafe { ResolvedToken::coerce_ref(e) })
};
}
macro_rules! forward_node {
// safety: if we're starting from a `ResolvedXY`, then the tree must have a resolver
($e:expr) => {
($e).map(|e| unsafe { ResolvedNode::coerce_ref(e) })
};
}
impl<L: Language, D> ResolvedNode<L, D> {
/// If there is a resolver associated with this tree, returns it.
pub fn resolver(&self) -> &StdArc<dyn Resolver> {
self.syntax.resolver().unwrap()
}
/// See [`SyntaxNode::new_root_with_resolver`].
#[inline]
pub fn new_root_with_resolver(green: GreenNode, resolver: impl Resolver + 'static) -> Self {
SyntaxNode::new_root_with_resolver(green, resolver)
}
/// Always returns `Some(self)`.
///
/// This method mostly exists to allow the convenience of being agnostic over [`SyntaxNode`] vs [`ResolvedNode`].
#[inline]
pub fn try_resolved(&self) -> Option<&ResolvedNode<L, D>> {
Some(self)
}
/// Always returns `self`.
///
/// This method mostly exists to allow the convenience of being agnostic over [`SyntaxNode`] vs [`ResolvedNode`].
#[inline]
pub fn resolved(&self) -> &ResolvedNode<L, D> {
self
}
/// The root of the tree this node belongs to.
///
/// If this node is the root, returns `self`.
#[inline]
pub fn root(&self) -> &SyntaxNode<L, D> {
unsafe { Self::coerce_ref(self.syntax.root()) }
}
/// The parent node of this node, except if this node is the root.
#[inline]
pub fn parent(&self) -> Option<&Self> {
forward!(self.syntax.parent())
}
/// Returns an iterator along the chain of parents of this node.
#[inline]
pub fn ancestors(&self) -> impl Iterator<Item = &Self> {
forward!(self.syntax.ancestors())
}
/// Returns an iterator over all nodes that are children of this node.
///
/// If you want to also consider leafs, see [`children_with_tokens`](ResolvedNode::children_with_tokens).
#[inline]
pub fn children(&self) -> impl Iterator<Item = &Self> {
forward!(self.syntax.children())
}
/// Returns an iterator over child elements of this node, including tokens.
#[inline]
pub fn children_with_tokens(&self) -> impl Iterator<Item = ResolvedElementRef<'_, L, D>> {
forward_as_elem!(self.syntax.children_with_tokens())
}
/// The first child node of this node, if any.
///
/// If you want to also consider leafs, see [`first_child_or_token`](ResolvedNode::first_child_or_token).
#[inline]
pub fn first_child(&self) -> Option<&ResolvedNode<L, D>> {
forward!(self.syntax.first_child())
}
/// The first child element of this node, if any, including tokens.
#[inline]
pub fn first_child_or_token(&self) -> Option<ResolvedElementRef<'_, L, D>> {
forward_as_elem!(self.syntax.first_child_or_token())
}
/// The last child node of this node, if any.
///
/// If you want to also consider leafs, see [`last_child_or_token`](ResolvedNode::last_child_or_token).
#[inline]
pub fn last_child(&self) -> Option<&ResolvedNode<L, D>> {
forward!(self.syntax.last_child())
}
/// The last child element of this node, if any, including tokens.
#[inline]
pub fn last_child_or_token(&self) -> Option<ResolvedElementRef<'_, L, D>> {
forward_as_elem!(self.syntax.last_child_or_token())
}
/// The first child node of this node starting at the (n + 1)-st, if any.
/// Note that even if this method returns `Some`, the contained node may not actually be the (n +
/// 1)-st child, but the next child from there that is a node.
///
/// If you want to also consider leafs, see [`next_child_or_token_after`](ResolvedNode::next_child_or_token_after).
#[inline]
pub fn next_child_after(&self, n: usize, offset: TextSize) -> Option<&ResolvedNode<L, D>> {
forward!(self.syntax.next_child_after(n, offset))
}
/// The first child element of this node starting at the (n + 1)-st, if any.
/// If this method returns `Some`, the contained node is the (n + 1)-st child of this node.
#[inline]
pub fn next_child_or_token_after(&self, n: usize, offset: TextSize) -> Option<ResolvedElementRef<'_, L, D>> {
forward_as_elem!(self.syntax.next_child_or_token_after(n, offset))
}
/// The last child node of this node up to the nth, if any.
/// Note that even if this method returns `Some`, the contained node may not actually be the (n -
/// 1)-st child, but the previous child from there that is a node.
///
/// If you want to also consider leafs, see
/// [`prev_child_or_token_before`](ResolvedNode::prev_child_or_token_before).
#[inline]
pub fn prev_child_before(&self, n: usize, offset: TextSize) -> Option<&ResolvedNode<L, D>> {
forward!(self.syntax.prev_child_before(n, offset))
}
/// The last child node of this node up to the nth, if any.
/// If this method returns `Some`, the contained node is the (n - 1)-st child.
#[inline]
pub fn prev_child_or_token_before(&self, n: usize, offset: TextSize) -> Option<ResolvedElementRef<'_, L, D>> {
forward_as_elem!(self.syntax.prev_child_or_token_before(n, offset))
}
/// The node to the right of this one, i.e. the next child node (!) of this node's parent after this node.
///
/// If you want to also consider leafs, see [`next_sibling_or_token`](ResolvedNode::next_sibling_or_token).
#[inline]
pub fn next_sibling(&self) -> Option<&ResolvedNode<L, D>> {
forward!(self.syntax.next_sibling())
}
/// The tree element to the right of this one, i.e. the next child of this node's parent after this node.
#[inline]
pub fn next_sibling_or_token(&self) -> Option<ResolvedElementRef<'_, L, D>> {
forward_as_elem!(self.syntax.next_sibling_or_token())
}
/// The node to the left of this one, i.e. the previous child node (!) of this node's parent before this node.
///
/// If you want to also consider leafs, see [`prev_sibling_or_token`](ResolvedNode::prev_sibling_or_token).
#[inline]
pub fn prev_sibling(&self) -> Option<&ResolvedNode<L, D>> {
forward!(self.syntax.prev_sibling())
}
/// The tree element to the left of this one, i.e. the previous child of this node's parent before this node.
#[inline]
pub fn prev_sibling_or_token(&self) -> Option<ResolvedElementRef<'_, L, D>> {
forward_as_elem!(self.syntax.prev_sibling_or_token())
}
/// Return the leftmost token in the subtree of this node
#[inline]
pub fn first_token(&self) -> Option<&ResolvedToken<L, D>> {
forward_token!(self.syntax.first_token())
}
/// Return the rightmost token in the subtree of this node
#[inline]
pub fn last_token(&self) -> Option<&ResolvedToken<L, D>> {
forward_token!(self.syntax.last_token())
}
/// Returns an iterator over all sibling nodes of this node in the given `direction`, i.e. all of
/// this node's parent's child nodes (!) from this node on to the left or the right. The first
/// item in the iterator will always be this node.
///
/// If you want to also consider leafs, see [`siblings_with_tokens`](ResolvedNode::siblings_with_tokens).
#[inline]
pub fn siblings(&self, direction: Direction) -> impl Iterator<Item = &ResolvedNode<L, D>> {
forward!(self.syntax.siblings(direction))
}
/// Returns an iterator over all siblings of this node in the given `direction`, i.e. all of this
/// node's parent's children from this node on to the left or the right.
/// The first item in the iterator will always be this node.
#[inline]
pub fn siblings_with_tokens(&self, direction: Direction) -> impl Iterator<Item = ResolvedElementRef<'_, L, D>> {
forward_as_elem!(self.syntax.siblings_with_tokens(direction))
}
/// Returns an iterator over all nodes (!) in the subtree starting at this node, including this node.
///
/// If you want to also consider leafs, see [`descendants_with_tokens`](ResolvedNode::descendants_with_tokens).
#[inline]
pub fn descendants(&self) -> impl Iterator<Item = &ResolvedNode<L, D>> {
forward!(self.syntax.descendants())
}
/// Returns an iterator over all elements in the subtree starting at this node, including this node.
#[inline]
pub fn descendants_with_tokens(&self) -> impl Iterator<Item = ResolvedElementRef<'_, L, D>> {
forward_as_elem!(self.syntax.descendants_with_tokens())
}
/// Traverse the subtree rooted at the current node (including the current
/// node) in preorder, excluding tokens.
#[inline(always)]
pub fn preorder(&self) -> impl Iterator<Item = WalkEvent<&ResolvedNode<L, D>>> {
self.syntax
.preorder()
.map(|event| event.map(|node| unsafe { Self::coerce_ref(node) }))
}
/// Traverse the subtree rooted at the current node (including the current
/// node) in preorder, including tokens.
#[inline(always)]
pub fn preorder_with_tokens(&self) -> impl Iterator<Item = WalkEvent<ResolvedElementRef<'_, L, D>>> {
self.syntax
.preorder_with_tokens()
.map(|event| event.map(|elem| unsafe { ResolvedElementRef::coerce_ref(elem) }))
}
/// Find a token in the subtree corresponding to this node, which covers the offset.
/// Precondition: offset must be withing node's range.
pub fn token_at_offset(&self, offset: TextSize) -> TokenAtOffset<ResolvedToken<L, D>> {
self.syntax
.token_at_offset(offset)
.map(|token| ResolvedToken { syntax: token })
}
/// Return the deepest node or token in the current subtree that fully
/// contains the range. If the range is empty and is contained in two leaf
/// nodes, either one can be returned. Precondition: range must be contained
/// withing the current node
pub fn covering_element(&self, range: TextRange) -> ResolvedElementRef<'_, L, D> {
unsafe { ResolvedElementRef::coerce_ref(self.syntax.covering_element(range)) }
}
}
impl<L: Language, D> ResolvedToken<L, D> {
/// Always returns `Some(self)`.
///
/// This method mostly exists to allow the convenience of being agnostic over [`SyntaxToken`] vs [`ResolvedToken`].
#[inline]
pub fn try_resolved(&self) -> Option<&ResolvedToken<L, D>> {
Some(self)
}
/// Always returns `self`.
///
/// This method mostly exists to allow the convenience of being agnostic over [`SyntaxToken`] vs [`ResolvedToken`].
#[inline]
pub fn resolved(&self) -> &ResolvedToken<L, D> {
self
}
/// The parent node of this token.
#[inline]
pub fn parent(&self) -> &ResolvedNode<L, D> {
unsafe { ResolvedNode::coerce_ref(self.syntax.parent()) }
}
/// Returns an iterator along the chain of parents of this token.
#[inline]
pub fn ancestors(&self) -> impl Iterator<Item = &ResolvedNode<L, D>> {
forward_node!(self.syntax.ancestors())
}
/// The tree element to the right of this one, i.e. the next child of this token's parent after this token.
#[inline]
pub fn next_sibling_or_token(&self) -> Option<ResolvedElementRef<'_, L, D>> {
forward_as_elem!(self.syntax.next_sibling_or_token())
}
/// The tree element to the left of this one, i.e. the previous child of this token's parent after this token.
#[inline]
pub fn prev_sibling_or_token(&self) -> Option<ResolvedElementRef<'_, L, D>> {
forward_as_elem!(self.syntax.prev_sibling_or_token())
}
/// Returns an iterator over all siblings of this token in the given `direction`, i.e. all of this
/// token's parent's children from this token on to the left or the right.
/// The first item in the iterator will always be this token.
#[inline]
pub fn siblings_with_tokens(&self, direction: Direction) -> impl Iterator<Item = ResolvedElementRef<'_, L, D>> {
forward_as_elem!(self.syntax.siblings_with_tokens(direction))
}
/// Returns the next token in the tree.
/// This is not necessary a direct sibling of this token, but will always be further right in the tree.
pub fn next_token(&self) -> Option<&ResolvedToken<L, D>> {
forward!(self.syntax.next_token())
}
/// Returns the previous token in the tree.
/// This is not necessary a direct sibling of this token, but will always be further left in the tree.
pub fn prev_token(&self) -> Option<&ResolvedToken<L, D>> {
forward!(self.syntax.prev_token())
}
}
impl<L: Language, D> ResolvedElement<L, D> {
/// The range this element covers in the source text, in bytes.
#[inline]
pub fn text_range(&self) -> TextRange {
match self {
NodeOrToken::Node(it) => it.text_range(),
NodeOrToken::Token(it) => it.text_range(),
}
}
/// The internal representation of the kind of this element.
#[inline]
pub fn syntax_kind(&self) -> SyntaxKind {
match self {
NodeOrToken::Node(it) => it.syntax_kind(),
NodeOrToken::Token(it) => it.syntax_kind(),
}
}
/// The kind of this element in terms of your language.
#[inline]
pub fn kind(&self) -> L::Kind {
match self {
NodeOrToken::Node(it) => it.kind(),
NodeOrToken::Token(it) => it.kind(),
}
}
/// The parent node of this element, except if this element is the root.
#[inline]
pub fn parent(&self) -> Option<&ResolvedNode<L, D>> {
match self {
NodeOrToken::Node(it) => it.parent(),
NodeOrToken::Token(it) => Some(it.parent()),
}
}
/// Returns an iterator along the chain of parents of this node.
#[inline]
pub fn ancestors(&self) -> impl Iterator<Item = &ResolvedNode<L, D>> {
match self {
NodeOrToken::Node(it) => it.ancestors(),
NodeOrToken::Token(it) => it.parent().ancestors(),
}
}
/// Return the leftmost token in the subtree of this element.
#[inline]
pub fn first_token(&self) -> Option<&ResolvedToken<L, D>> {
match self {
NodeOrToken::Node(it) => it.first_token(),
NodeOrToken::Token(it) => Some(it),
}
}
/// Return the rightmost token in the subtree of this element.
#[inline]
pub fn last_token(&self) -> Option<&ResolvedToken<L, D>> {
match self {
NodeOrToken::Node(it) => it.last_token(),
NodeOrToken::Token(it) => Some(it),
}
}
/// The tree element to the right of this one, i.e. the next child of this element's parent after this element.
#[inline]
pub fn next_sibling_or_token(&self) -> Option<ResolvedElementRef<'_, L, D>> {
match self {
NodeOrToken::Node(it) => it.next_sibling_or_token(),
NodeOrToken::Token(it) => it.next_sibling_or_token(),
}
}
/// The tree element to the left of this one, i.e. the previous child of this element's parent after this element.
#[inline]
pub fn prev_sibling_or_token(&self) -> Option<ResolvedElementRef<'_, L, D>> {
match self {
NodeOrToken::Node(it) => it.prev_sibling_or_token(),
NodeOrToken::Token(it) => it.prev_sibling_or_token(),
}
}
}
impl<'a, L: Language, D> ResolvedElementRef<'a, L, D> {
/// The range this element covers in the source text, in bytes.
#[inline]
pub fn text_range(&self) -> TextRange {
match self {
NodeOrToken::Node(it) => it.text_range(),
NodeOrToken::Token(it) => it.text_range(),
}
}
/// The internal representation of the kind of this element.
#[inline]
pub fn syntax_kind(&self) -> SyntaxKind {
match self {
NodeOrToken::Node(it) => it.syntax_kind(),
NodeOrToken::Token(it) => it.syntax_kind(),
}
}
/// The kind of this element in terms of your language.
#[inline]
pub fn kind(&self) -> L::Kind {
match self {
NodeOrToken::Node(it) => it.kind(),
NodeOrToken::Token(it) => it.kind(),
}
}
/// The parent node of this element, except if this element is the root.
#[inline]
pub fn parent(&self) -> Option<&'a ResolvedNode<L, D>> {
match self {
NodeOrToken::Node(it) => it.parent(),
NodeOrToken::Token(it) => Some(it.parent()),
}
}
/// Returns an iterator along the chain of parents of this node.
#[inline]
pub fn ancestors(&self) -> impl Iterator<Item = &'a ResolvedNode<L, D>> {
match self {
NodeOrToken::Node(it) => it.ancestors(),
NodeOrToken::Token(it) => it.parent().ancestors(),
}
}
/// Return the leftmost token in the subtree of this element.
#[inline]
pub fn first_token(&self) -> Option<&'a ResolvedToken<L, D>> {
match self {
NodeOrToken::Node(it) => it.first_token(),
NodeOrToken::Token(it) => Some(it),
}
}
/// Return the rightmost token in the subtree of this element.
#[inline]
pub fn last_token(&self) -> Option<&'a ResolvedToken<L, D>> {
match self {
NodeOrToken::Node(it) => it.last_token(),
NodeOrToken::Token(it) => Some(it),
}
}
/// The tree element to the right of this one, i.e. the next child of this element's parent after this element.
#[inline]
pub fn next_sibling_or_token(&self) -> Option<ResolvedElementRef<'a, L, D>> {
match self {
NodeOrToken::Node(it) => it.next_sibling_or_token(),
NodeOrToken::Token(it) => it.next_sibling_or_token(),
}
}
/// The tree element to the left of this one, i.e. the previous child of this element's parent after this element.
#[inline]
pub fn prev_sibling_or_token(&self) -> Option<ResolvedElementRef<'a, L, D>> {
match self {
NodeOrToken::Node(it) => it.prev_sibling_or_token(),
NodeOrToken::Token(it) => it.prev_sibling_or_token(),
}
}
}
#[test]
fn assert_send_sync() {
use crate::SyntaxKind;
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
enum L {}
#[derive(Debug)]
enum Kind {
Var,
}
impl Language for L {
type Kind = Kind;
fn kind_from_raw(_: SyntaxKind) -> Self::Kind {
Kind::Var
}
fn kind_to_raw(_: Self::Kind) -> SyntaxKind {
SyntaxKind(0)
}
}
fn f<T: Send + Sync>() {}
f::<ResolvedNode<L>>();
f::<ResolvedToken<L>>();
f::<ResolvedElement<L>>();
f::<ResolvedElementRef<'static, L>>();
}

View file

@ -35,9 +35,10 @@ use crate::{interning::Resolver, Language, SyntaxNode, SyntaxToken, TextRange, T
/// # cstree::SyntaxKind(kind as u16) /// # cstree::SyntaxKind(kind as u16)
/// # } /// # }
/// # } /// # }
/// # type SyntaxNode = cstree::SyntaxNode<Lang, (), lasso::RodeoResolver<lasso::Spur>>; /// # type SyntaxNode = cstree::SyntaxNode<Lang, ()>;
/// # type ResolvedNode = cstree::ResolvedNode<Lang, ()>;
/// # /// #
/// # fn parse_float_literal(s: &str) -> SyntaxNode { /// # fn parse_float_literal(s: &str) -> ResolvedNode {
/// # const LITERAL: cstree::SyntaxKind = cstree::SyntaxKind(0); /// # const LITERAL: cstree::SyntaxKind = cstree::SyntaxKind(0);
/// # let mut builder = GreenNodeBuilder::new(); /// # let mut builder = GreenNodeBuilder::new();
/// # builder.start_node(LITERAL); /// # builder.start_node(LITERAL);
@ -57,14 +58,14 @@ use crate::{interning::Resolver, Language, SyntaxNode, SyntaxToken, TextRange, T
/// assert_eq!(sub, "748"); /// assert_eq!(sub, "748");
/// ``` /// ```
#[derive(Clone)] #[derive(Clone)]
pub struct SyntaxText<'n, 'i, I: ?Sized, L: Language, D: 'static = (), R: 'static = ()> { pub struct SyntaxText<'n, 'i, I: ?Sized, L: Language, D: 'static = ()> {
node: &'n SyntaxNode<L, D, R>, node: &'n SyntaxNode<L, D>,
range: TextRange, range: TextRange,
resolver: &'i I, resolver: &'i I,
} }
impl<'n, 'i, I: Resolver + ?Sized, L: Language, D, R> SyntaxText<'n, 'i, I, L, D, R> { impl<'n, 'i, I: Resolver + ?Sized, L: Language, D> SyntaxText<'n, 'i, I, L, D> {
pub(crate) fn new(node: &'n SyntaxNode<L, D, R>, resolver: &'i I) -> Self { pub(crate) fn new(node: &'n SyntaxNode<L, D>, resolver: &'i I) -> Self {
let range = node.text_range(); let range = node.text_range();
SyntaxText { node, range, resolver } SyntaxText { node, range, resolver }
} }
@ -190,7 +191,7 @@ impl<'n, 'i, I: Resolver + ?Sized, L: Language, D, R> SyntaxText<'n, 'i, I, L, D
} }
} }
fn tokens_with_ranges(&self) -> impl Iterator<Item = (&SyntaxToken<L, D, R>, TextRange)> { fn tokens_with_ranges(&self) -> impl Iterator<Item = (&SyntaxToken<L, D>, TextRange)> {
let text_range = self.range; let text_range = self.range;
self.node self.node
.descendants_with_tokens() .descendants_with_tokens()
@ -210,25 +211,25 @@ fn found<T>(res: Result<(), T>) -> Option<T> {
} }
} }
impl<I: Resolver + ?Sized, L: Language, D, R> fmt::Debug for SyntaxText<'_, '_, I, L, D, R> { impl<I: Resolver + ?Sized, L: Language, D> fmt::Debug for SyntaxText<'_, '_, I, L, D> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Debug::fmt(&self.to_string(), f) fmt::Debug::fmt(&self.to_string(), f)
} }
} }
impl<I: Resolver + ?Sized, L: Language, D, R> fmt::Display for SyntaxText<'_, '_, I, L, D, R> { impl<I: Resolver + ?Sized, L: Language, D> fmt::Display for SyntaxText<'_, '_, I, L, D> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.try_for_each_chunk(|chunk| fmt::Display::fmt(chunk, f)) self.try_for_each_chunk(|chunk| fmt::Display::fmt(chunk, f))
} }
} }
impl<I: Resolver + ?Sized, L: Language, D, R> From<SyntaxText<'_, '_, I, L, D, R>> for String { impl<I: Resolver + ?Sized, L: Language, D> From<SyntaxText<'_, '_, I, L, D>> for String {
fn from(text: SyntaxText<'_, '_, I, L, D, R>) -> String { fn from(text: SyntaxText<'_, '_, I, L, D>) -> String {
text.to_string() text.to_string()
} }
} }
impl<I: Resolver + ?Sized, L: Language, D, R> PartialEq<str> for SyntaxText<'_, '_, I, L, D, R> { impl<I: Resolver + ?Sized, L: Language, D> PartialEq<str> for SyntaxText<'_, '_, I, L, D> {
fn eq(&self, mut rhs: &str) -> bool { fn eq(&self, mut rhs: &str) -> bool {
self.try_for_each_chunk(|chunk| { self.try_for_each_chunk(|chunk| {
if !rhs.starts_with(chunk) { if !rhs.starts_with(chunk) {
@ -242,33 +243,33 @@ impl<I: Resolver + ?Sized, L: Language, D, R> PartialEq<str> for SyntaxText<'_,
} }
} }
impl<I: Resolver + ?Sized, L: Language, D, R> PartialEq<SyntaxText<'_, '_, I, L, D, R>> for str { impl<I: Resolver + ?Sized, L: Language, D> PartialEq<SyntaxText<'_, '_, I, L, D>> for str {
fn eq(&self, rhs: &SyntaxText<'_, '_, I, L, D, R>) -> bool { fn eq(&self, rhs: &SyntaxText<'_, '_, I, L, D>) -> bool {
rhs == self rhs == self
} }
} }
impl<I: Resolver + ?Sized, L: Language, D, R> PartialEq<&'_ str> for SyntaxText<'_, '_, I, L, D, R> { impl<I: Resolver + ?Sized, L: Language, D> PartialEq<&'_ str> for SyntaxText<'_, '_, I, L, D> {
fn eq(&self, rhs: &&str) -> bool { fn eq(&self, rhs: &&str) -> bool {
self == *rhs self == *rhs
} }
} }
impl<I: Resolver + ?Sized, L: Language, D, R> PartialEq<SyntaxText<'_, '_, I, L, D, R>> for &'_ str { impl<I: Resolver + ?Sized, L: Language, D> PartialEq<SyntaxText<'_, '_, I, L, D>> for &'_ str {
fn eq(&self, rhs: &SyntaxText<'_, '_, I, L, D, R>) -> bool { fn eq(&self, rhs: &SyntaxText<'_, '_, I, L, D>) -> bool {
rhs == self rhs == self
} }
} }
impl<'n1, 'i1, 'n2, 'i2, I1, I2, L1, L2, D1, D2, R1, R2> PartialEq<SyntaxText<'n2, 'i2, I2, L2, D2, R2>> impl<'n1, 'i1, 'n2, 'i2, I1, I2, L1, L2, D1, D2> PartialEq<SyntaxText<'n2, 'i2, I2, L2, D2>>
for SyntaxText<'n1, 'i1, I1, L1, D1, R1> for SyntaxText<'n1, 'i1, I1, L1, D1>
where where
L1: Language, L1: Language,
L2: Language, L2: Language,
I1: Resolver + ?Sized, I1: Resolver + ?Sized,
I2: Resolver + ?Sized, I2: Resolver + ?Sized,
{ {
fn eq(&self, other: &SyntaxText<'_, '_, I2, L2, D2, R2>) -> bool { fn eq(&self, other: &SyntaxText<'_, '_, I2, L2, D2>) -> bool {
if self.range.len() != other.range.len() { if self.range.len() != other.range.len() {
return false; return false;
} }
@ -280,21 +281,19 @@ where
} }
} }
fn zip_texts<'it1, 'it2, It1, It2, I1, I2, L1, L2, D1, D2, R1, R2>( fn zip_texts<'it1, 'it2, It1, It2, I1, I2, L1, L2, D1, D2>(
xs: &mut It1, xs: &mut It1,
ys: &mut It2, ys: &mut It2,
resolver_x: &I1, resolver_x: &I1,
resolver_y: &I2, resolver_y: &I2,
) -> Option<()> ) -> Option<()>
where where
It1: Iterator<Item = (&'it1 SyntaxToken<L1, D1, R1>, TextRange)>, It1: Iterator<Item = (&'it1 SyntaxToken<L1, D1>, TextRange)>,
It2: Iterator<Item = (&'it2 SyntaxToken<L2, D2, R2>, TextRange)>, It2: Iterator<Item = (&'it2 SyntaxToken<L2, D2>, TextRange)>,
I1: Resolver + ?Sized, I1: Resolver + ?Sized,
I2: Resolver + ?Sized, I2: Resolver + ?Sized,
D1: 'static, D1: 'static,
D2: 'static, D2: 'static,
R1: 'static,
R2: 'static,
L1: Language + 'it1, L1: Language + 'it1,
L2: Language + 'it2, L2: Language + 'it2,
{ {

225
src/syntax/token.rs Normal file
View file

@ -0,0 +1,225 @@
use std::{
fmt::Write,
hash::{Hash, Hasher},
iter,
};
use lasso::Resolver;
use text_size::{TextRange, TextSize};
use super::*;
use crate::{Direction, GreenNode, GreenToken, Language, SyntaxKind};
/// Syntax tree token.
pub struct SyntaxToken<L: Language, D: 'static = ()> {
parent: SyntaxNode<L, D>,
index: u32,
offset: TextSize,
}
impl<L: Language, D> Clone for SyntaxToken<L, D> {
fn clone(&self) -> Self {
Self {
parent: self.parent.clone(),
index: self.index,
offset: self.offset,
}
}
}
impl<L: Language, D> Hash for SyntaxToken<L, D> {
fn hash<H: Hasher>(&self, state: &mut H) {
self.parent.hash(state);
self.index.hash(state);
self.offset.hash(state);
}
}
impl<L: Language, D> PartialEq for SyntaxToken<L, D> {
fn eq(&self, other: &SyntaxToken<L, D>) -> bool {
self.parent == other.parent && self.index == other.index && self.offset == other.offset
}
}
impl<L: Language, D> Eq for SyntaxToken<L, D> {}
impl<L: Language, D> SyntaxToken<L, D> {
#[allow(missing_docs)]
pub fn debug<R>(&self, resolver: &R) -> String
where
R: Resolver + ?Sized,
{
let mut res = String::new();
write!(res, "{:?}@{:?}", self.kind(), self.text_range()).unwrap();
if self.resolve_text(resolver).len() < 25 {
write!(res, " {:?}", self.resolve_text(resolver)).unwrap();
return res;
}
let text = self.resolve_text(resolver);
for idx in 21..25 {
if text.is_char_boundary(idx) {
let text = format!("{} ...", &text[..idx]);
write!(res, " {:?}", text).unwrap();
return res;
}
}
unreachable!()
}
#[allow(missing_docs)]
pub fn display<R>(&self, resolver: &R) -> String
where
R: Resolver + ?Sized,
{
self.resolve_text(resolver).to_string()
}
/// Turns this token into a [`ResolvedToken`], but only if there is a resolver associated with this tree.
#[inline]
pub fn try_resolved(&self) -> Option<&ResolvedToken<L, D>> {
// safety: we only coerce if `resolver` exists
self.parent()
.resolver()
.map(|_| unsafe { ResolvedToken::coerce_ref(self) })
}
/// Turns this token into a [`ResolvedToken`].
/// # Panics
/// If there is no resolver associated with this tree.
#[inline]
pub fn resolved(&self) -> &ResolvedToken<L, D> {
self.try_resolved().expect("tried to resolve a node without resolver")
}
}
impl<L: Language, D> SyntaxToken<L, D> {
pub(super) fn new(parent: &SyntaxNode<L, D>, index: u32, offset: TextSize) -> SyntaxToken<L, D> {
Self {
parent: parent.clone_uncounted(),
index,
offset,
}
}
/// Returns a green tree, equal to the green tree this token
/// belongs two, except with this token substitute. The complexity
/// of operation is proportional to the depth of the tree
pub fn replace_with(&self, replacement: GreenToken) -> GreenNode {
assert_eq!(self.syntax_kind(), replacement.kind());
let mut replacement = Some(replacement);
let parent = self.parent();
let me = self.index;
let children = parent.green().children().enumerate().map(|(i, child)| {
if i as u32 == me {
replacement.take().unwrap().into()
} else {
child.cloned()
}
});
let new_parent = GreenNode::new(parent.syntax_kind(), children);
parent.replace_with(new_parent)
}
/// The internal representation of the kind of this token.
#[inline]
pub fn syntax_kind(&self) -> SyntaxKind {
self.green().kind()
}
/// The kind of this token in terms of your language.
#[inline]
pub fn kind(&self) -> L::Kind {
L::kind_from_raw(self.syntax_kind())
}
/// The range this token covers in the source text, in bytes.
#[inline]
pub fn text_range(&self) -> TextRange {
TextRange::at(self.offset, self.green().text_len())
}
/// Uses the provided resolver to return the source text of this token.
#[inline]
pub fn resolve_text<'i, I>(&self, resolver: &'i I) -> &'i str
where
I: Resolver + ?Sized,
{
self.green().text(resolver)
}
/// Returns the unterlying green tree token of this token.
pub fn green(&self) -> &GreenToken {
self.parent
.green()
.children()
.nth(self.index as usize)
.unwrap()
.as_token()
.unwrap()
}
/// The parent node of this token.
#[inline]
pub fn parent(&self) -> &SyntaxNode<L, D> {
&self.parent
}
/// Returns an iterator along the chain of parents of this token.
#[inline]
pub fn ancestors(&self) -> impl Iterator<Item = &SyntaxNode<L, D>> {
self.parent().ancestors()
}
/// The tree element to the right of this one, i.e. the next child of this token's parent after this token.
#[inline]
pub fn next_sibling_or_token(&self) -> Option<SyntaxElementRef<'_, L, D>> {
self.parent()
.next_child_or_token_after(self.index as usize, self.text_range().end())
}
/// The tree element to the left of this one, i.e. the previous child of this token's parent after this token.
#[inline]
pub fn prev_sibling_or_token(&self) -> Option<SyntaxElementRef<'_, L, D>> {
self.parent()
.prev_child_or_token_before(self.index as usize, self.text_range().start())
}
/// Returns an iterator over all siblings of this token in the given `direction`, i.e. all of this
/// token's parent's children from this token on to the left or the right.
/// The first item in the iterator will always be this token.
#[inline]
pub fn siblings_with_tokens(&self, direction: Direction) -> impl Iterator<Item = SyntaxElementRef<'_, L, D>> {
let me: SyntaxElementRef<'_, L, D> = self.into();
iter::successors(Some(me), move |el| match direction {
Direction::Next => el.next_sibling_or_token(),
Direction::Prev => el.prev_sibling_or_token(),
})
}
/// Returns the next token in the tree.
/// This is not necessary a direct sibling of this token, but will always be further right in the tree.
pub fn next_token(&self) -> Option<&SyntaxToken<L, D>> {
match self.next_sibling_or_token() {
Some(element) => element.first_token(),
None => self
.parent()
.ancestors()
.find_map(|it| it.next_sibling_or_token())
.and_then(|element| element.first_token()),
}
}
/// Returns the previous token in the tree.
/// This is not necessary a direct sibling of this token, but will always be further left in the tree.
pub fn prev_token(&self) -> Option<&SyntaxToken<L, D>> {
match self.prev_sibling_or_token() {
Some(element) => element.last_token(),
None => self
.parent()
.ancestors()
.find_map(|it| it.prev_sibling_or_token())
.and_then(|element| element.last_token()),
}
}
}

View file

@ -1,8 +1,4 @@
mod common; use super::*;
use common::{
build_recursive, build_tree_with_cache, Element, SyntaxElement, SyntaxElementRef, SyntaxNode, SyntaxToken,
};
use cstree::{GreenNodeBuilder, NodeCache, SyntaxKind, TextRange}; use cstree::{GreenNodeBuilder, NodeCache, SyntaxKind, TextRange};
use lasso::{Resolver, Rodeo}; use lasso::{Resolver, Rodeo};
@ -105,7 +101,7 @@ fn inline_resolver() {
let mut cache = NodeCache::with_interner(&mut interner); let mut cache = NodeCache::with_interner(&mut interner);
let tree = two_level_tree(); let tree = two_level_tree();
let tree = build_tree_with_cache(&tree, &mut cache); let tree = build_tree_with_cache(&tree, &mut cache);
let tree: SyntaxNode<(), Rodeo> = SyntaxNode::new_root_with_resolver(tree, interner); let tree: ResolvedNode = SyntaxNode::new_root_with_resolver(tree, interner);
{ {
let leaf1_0 = tree.children().nth(1).unwrap().children_with_tokens().next().unwrap(); let leaf1_0 = tree.children().nth(1).unwrap().children_with_tokens().next().unwrap();
let leaf1_0 = leaf1_0.into_token().unwrap(); let leaf1_0 = leaf1_0.into_token().unwrap();
@ -137,10 +133,10 @@ fn assert_debug_display() {
use std::fmt; use std::fmt;
fn f<T: fmt::Debug + fmt::Display>() {} fn f<T: fmt::Debug + fmt::Display>() {}
f::<SyntaxNode<(), lasso::Rodeo>>(); f::<ResolvedNode>();
f::<SyntaxToken<(), lasso::Rodeo>>(); f::<ResolvedToken>();
f::<SyntaxElement<(), lasso::Rodeo>>(); f::<ResolvedElement>();
f::<SyntaxElementRef<'static, (), lasso::Rodeo>>(); f::<ResolvedElementRef<'static>>();
f::<cstree::NodeOrToken<String, u128>>(); f::<cstree::NodeOrToken<String, u128>>();
fn dbg<T: fmt::Debug>() {} fn dbg<T: fmt::Debug>() {}

View file

@ -1,10 +1,20 @@
mod basic;
mod sendsync;
#[cfg(feature = "serde1")]
mod serde;
use cstree::{GreenNode, GreenNodeBuilder, Language, NodeCache, SyntaxKind}; use cstree::{GreenNode, GreenNodeBuilder, Language, NodeCache, SyntaxKind};
use lasso::Interner; use lasso::Interner;
pub type SyntaxNode<D = (), R = ()> = cstree::SyntaxNode<TestLang, D, R>; pub type SyntaxNode<D = ()> = cstree::SyntaxNode<TestLang, D>;
pub type SyntaxToken<D = (), R = ()> = cstree::SyntaxToken<TestLang, D, R>; pub type SyntaxToken<D = ()> = cstree::SyntaxToken<TestLang, D>;
pub type SyntaxElement<D = (), R = ()> = cstree::SyntaxElement<TestLang, D, R>; pub type SyntaxElement<D = ()> = cstree::SyntaxElement<TestLang, D>;
pub type SyntaxElementRef<'a, D = (), R = ()> = cstree::SyntaxElementRef<'a, TestLang, D, R>; pub type SyntaxElementRef<'a, D = ()> = cstree::SyntaxElementRef<'a, TestLang, D>;
pub type ResolvedNode<D = ()> = cstree::ResolvedNode<TestLang, D>;
pub type ResolvedToken<D = ()> = cstree::ResolvedToken<TestLang, D>;
pub type ResolvedElement<D = ()> = cstree::ResolvedElement<TestLang, D>;
pub type ResolvedElementRef<'a, D = ()> = cstree::ResolvedElementRef<'a, TestLang, D>;
#[derive(Debug)] #[derive(Debug)]
pub enum Element<'s> { pub enum Element<'s> {

View file

@ -1,18 +1,12 @@
#![allow(clippy::redundant_clone)] #![allow(clippy::redundant_clone)]
#[allow(unused)]
mod common;
use crossbeam_utils::thread::scope; use crossbeam_utils::thread::scope;
use std::{thread, time::Duration}; use std::{thread, time::Duration};
use common::{build_recursive, Element, SyntaxNode}; use super::{build_recursive, Element, ResolvedNode, SyntaxNode};
use cstree::{ use cstree::{interning::IntoResolver, GreenNodeBuilder};
interning::{IntoResolver, Resolver},
GreenNodeBuilder,
};
fn build_tree<D>(root: &Element<'_>) -> SyntaxNode<D, impl Resolver> { fn build_tree<D>(root: &Element<'_>) -> ResolvedNode<D> {
let mut builder = GreenNodeBuilder::new(); let mut builder = GreenNodeBuilder::new();
build_recursive(root, &mut builder, 0); build_recursive(root, &mut builder, 0);
let (node, interner) = builder.finish(); let (node, interner) = builder.finish();
@ -43,7 +37,7 @@ fn send() {
.next() .next()
.unwrap(); .unwrap();
let leaf1_0 = leaf1_0.into_token().unwrap(); let leaf1_0 = leaf1_0.into_token().unwrap();
leaf1_0.resolve_text(thread_tree.resolver().as_ref()).to_string() leaf1_0.text().to_string()
}); });
assert_eq!(thread.join().unwrap(), "1.0"); assert_eq!(thread.join().unwrap(), "1.0");
} }

View file

@ -1,18 +1,11 @@
#![cfg(feature = "serde1")] use crate::{build_recursive, build_tree_with_cache, ResolvedNode};
#[allow(unused)] use super::{Element, SyntaxNode};
mod common; use cstree::{interning::IntoResolver, GreenNodeBuilder, NodeCache, NodeOrToken};
use common::{Element, SyntaxNode};
use cstree::{
interning::{IntoResolver, Resolver},
GreenNodeBuilder, NodeCache, NodeOrToken,
};
use serde_test::Token; use serde_test::Token;
use std::fmt; use std::fmt;
type Rodeo = lasso::Rodeo<lasso::Spur, fxhash::FxBuildHasher>; type Rodeo = lasso::Rodeo<lasso::Spur, fxhash::FxBuildHasher>;
type RodeoResolver = lasso::RodeoResolver<lasso::Spur>;
/// Macro for generating a list of `serde_test::Token`s using a simpler DSL. /// Macro for generating a list of `serde_test::Token`s using a simpler DSL.
macro_rules! event_tokens { macro_rules! event_tokens {
@ -142,28 +135,28 @@ struct NonSerializable;
/// Serializable SyntaxNode that doesn't have a identity `PartialEq` implementation, /// Serializable SyntaxNode that doesn't have a identity `PartialEq` implementation,
/// but checks if both trees have equal nodes and tokens. /// but checks if both trees have equal nodes and tokens.
struct TestNode<R: 'static> { struct TestNode {
node: SyntaxNode<String, R>, node: ResolvedNode<String>,
with_data: bool, with_data: bool,
} }
impl<R> TestNode<R> { impl TestNode {
fn new(node: SyntaxNode<String, R>) -> Self { fn new(node: ResolvedNode<String>) -> Self {
Self { node, with_data: false } Self { node, with_data: false }
} }
fn with_data(node: SyntaxNode<String, R>) -> Self { fn with_data(node: ResolvedNode<String>) -> Self {
Self { node, with_data: true } Self { node, with_data: true }
} }
} }
impl<R: Resolver> fmt::Debug for TestNode<R> { impl fmt::Debug for TestNode {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Debug::fmt(&self.node, f) fmt::Debug::fmt(&self.node, f)
} }
} }
impl<R: Resolver> serde::Serialize for TestNode<R> { impl serde::Serialize for TestNode {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where where
S: serde::Serializer, S: serde::Serializer,
@ -176,20 +169,20 @@ impl<R: Resolver> serde::Serialize for TestNode<R> {
} }
} }
impl<'de> serde::Deserialize<'de> for TestNode<RodeoResolver> { impl<'de> serde::Deserialize<'de> for TestNode {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where where
D: serde::Deserializer<'de>, D: serde::Deserializer<'de>,
{ {
Ok(Self { Ok(Self {
node: SyntaxNode::deserialize(deserializer)?, node: ResolvedNode::deserialize(deserializer)?,
with_data: true, with_data: true,
}) })
} }
} }
impl<R1: Resolver, R2: Resolver> PartialEq<TestNode<R2>> for TestNode<R1> { impl PartialEq for TestNode {
fn eq(&self, other: &TestNode<R2>) -> bool { fn eq(&self, other: &TestNode) -> bool {
self.node.kind() == other.node.kind() self.node.kind() == other.node.kind()
&& self.node.get_data() == other.node.get_data() && self.node.get_data() == other.node.get_data()
&& self.node.text_range() == other.node.text_range() && self.node.text_range() == other.node.text_range()
@ -229,14 +222,14 @@ fn three_level_tree() -> Element<'static> {
]) ])
} }
fn build_tree(root: Element<'_>) -> SyntaxNode<String, RodeoResolver> { fn build_tree(root: Element<'_>) -> ResolvedNode<String> {
let mut builder = GreenNodeBuilder::new(); let mut builder = GreenNodeBuilder::new();
common::build_recursive(&root, &mut builder, 0); build_recursive(&root, &mut builder, 0);
let (node, interner) = builder.finish(); let (node, interner) = builder.finish();
SyntaxNode::new_root_with_resolver(node, interner.unwrap().into_resolver()) SyntaxNode::new_root_with_resolver(node, interner.unwrap().into_resolver())
} }
fn attach_data<R>(node: &SyntaxNode<String, R>) { fn attach_data(node: &SyntaxNode<String>) {
node.descendants().enumerate().for_each(|(idx, node)| { node.descendants().enumerate().for_each(|(idx, node)| {
node.set_data(format!("{}", idx + 1)); node.set_data(format!("{}", idx + 1));
}); });
@ -248,12 +241,12 @@ fn serialize_tree_with_data_with_resolver() {
let mut cache = NodeCache::with_interner(&mut interner); let mut cache = NodeCache::with_interner(&mut interner);
let root = three_level_tree(); let root = three_level_tree();
let root = common::build_tree_with_cache(&root, &mut cache); let root = build_tree_with_cache(&root, &mut cache);
let tree = SyntaxNode::<String, ()>::new_root(root.clone()); let tree = SyntaxNode::<String>::new_root(root.clone());
attach_data(&tree); attach_data(&tree);
let serialized = serde_json::to_string(&tree.as_serialize_with_data_with_resolver(&interner)).unwrap(); let serialized = serde_json::to_string(&tree.as_serialize_with_data_with_resolver(&interner)).unwrap();
let deserialized: TestNode<_> = serde_json::from_str(&serialized).unwrap(); let deserialized: TestNode = serde_json::from_str(&serialized).unwrap();
let expected = SyntaxNode::new_root_with_resolver(root, interner); let expected = SyntaxNode::new_root_with_resolver(root, interner);
attach_data(&expected); attach_data(&expected);
@ -266,11 +259,11 @@ fn serialize_tree_with_resolver() {
let mut cache = NodeCache::with_interner(&mut interner); let mut cache = NodeCache::with_interner(&mut interner);
let root = three_level_tree(); let root = three_level_tree();
let root = common::build_tree_with_cache(&root, &mut cache); let root = build_tree_with_cache(&root, &mut cache);
let tree = SyntaxNode::<NonSerializable>::new_root(root.clone()); let tree = SyntaxNode::<NonSerializable>::new_root(root.clone());
let serialized = serde_json::to_string(&tree.as_serialize_with_resolver(&interner)).unwrap(); let serialized = serde_json::to_string(&tree.as_serialize_with_resolver(&interner)).unwrap();
let deserialized: TestNode<_> = serde_json::from_str(&serialized).unwrap(); let deserialized: TestNode = serde_json::from_str(&serialized).unwrap();
let expected = SyntaxNode::new_root_with_resolver(root, interner); let expected = SyntaxNode::new_root_with_resolver(root, interner);
assert_eq!(TestNode::new(expected), deserialized); assert_eq!(TestNode::new(expected), deserialized);