mirror of
https://github.com/RGBCube/cstree
synced 2025-07-28 09:37:44 +00:00
Merge pull request #9 from domenicquirl/inline-resolver
This commit is contained in:
commit
91b68c9a23
7 changed files with 367 additions and 208 deletions
|
@ -13,10 +13,7 @@
|
||||||
//! - "+" Token(Add)
|
//! - "+" Token(Add)
|
||||||
//! - "4" Token(Number)
|
//! - "4" Token(Number)
|
||||||
|
|
||||||
use cstree::{
|
use cstree::{interning::Resolver, GreenNodeBuilder, NodeOrToken};
|
||||||
interning::{Reader, Resolver},
|
|
||||||
GreenNodeBuilder, NodeOrToken,
|
|
||||||
};
|
|
||||||
use std::iter::Peekable;
|
use std::iter::Peekable;
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||||
|
@ -66,7 +63,7 @@ type SyntaxElement = cstree::NodeOrToken<SyntaxNode, SyntaxToken>;
|
||||||
type SyntaxElementRef<'a> = cstree::NodeOrToken<&'a SyntaxNode, &'a SyntaxToken>;
|
type SyntaxElementRef<'a> = cstree::NodeOrToken<&'a SyntaxNode, &'a SyntaxToken>;
|
||||||
|
|
||||||
struct Parser<'input, I: Iterator<Item = (SyntaxKind, &'input str)>> {
|
struct Parser<'input, I: Iterator<Item = (SyntaxKind, &'input str)>> {
|
||||||
builder: GreenNodeBuilder<'static>,
|
builder: GreenNodeBuilder<'static, 'static>,
|
||||||
iter: Peekable<I>,
|
iter: Peekable<I>,
|
||||||
}
|
}
|
||||||
impl<'input, I: Iterator<Item = (SyntaxKind, &'input str)>> Parser<'input, I> {
|
impl<'input, I: Iterator<Item = (SyntaxKind, &'input str)>> Parser<'input, I> {
|
||||||
|
@ -134,7 +131,7 @@ fn print(indent: usize, element: SyntaxElementRef<'_>, resolver: &impl Resolver)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
NodeOrToken::Token(token) => println!("- {:?} {:?}", token.text(resolver), kind),
|
NodeOrToken::Token(token) => println!("- {:?} {:?}", token.resolve_text(resolver), kind),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -62,10 +62,7 @@ impl cstree::Language for Lang {
|
||||||
|
|
||||||
/// GreenNode is an immutable tree, which is cheap to change,
|
/// GreenNode is an immutable tree, which is cheap to change,
|
||||||
/// but doesn't contain offsets and parent pointers.
|
/// but doesn't contain offsets and parent pointers.
|
||||||
use cstree::{
|
use cstree::{interning::Resolver, GreenNode};
|
||||||
interning::{Reader, Resolver},
|
|
||||||
GreenNode,
|
|
||||||
};
|
|
||||||
|
|
||||||
/// You can construct GreenNodes by hand, but a builder
|
/// You can construct GreenNodes by hand, but a builder
|
||||||
/// is helpful for top-down parsers: it maintains a stack
|
/// is helpful for top-down parsers: it maintains a stack
|
||||||
|
@ -91,7 +88,7 @@ fn parse(text: &str) -> Parse<impl Resolver> {
|
||||||
/// in *reverse* order.
|
/// in *reverse* order.
|
||||||
tokens: Vec<(SyntaxKind, &'input str)>,
|
tokens: Vec<(SyntaxKind, &'input str)>,
|
||||||
/// the in-progress tree.
|
/// the in-progress tree.
|
||||||
builder: GreenNodeBuilder<'static>,
|
builder: GreenNodeBuilder<'static, 'static>,
|
||||||
/// the list of syntax errors we've accumulated
|
/// the list of syntax errors we've accumulated
|
||||||
/// so far.
|
/// so far.
|
||||||
errors: Vec<String>,
|
errors: Vec<String>,
|
||||||
|
|
|
@ -19,29 +19,42 @@ use super::{node::GreenNodeHead, token::GreenTokenData};
|
||||||
const CHILDREN_CACHE_THRESHOLD: usize = 3;
|
const CHILDREN_CACHE_THRESHOLD: usize = 3;
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct NodeCache {
|
pub struct NodeCache<'i, I = Rodeo<Spur, FxBuildHasher>> {
|
||||||
nodes: FxHashMap<GreenNodeHead, GreenNode>,
|
nodes: FxHashMap<GreenNodeHead, GreenNode>,
|
||||||
tokens: FxHashMap<GreenTokenData, GreenToken>,
|
tokens: FxHashMap<GreenTokenData, GreenToken>,
|
||||||
interner: Rodeo<Spur, FxBuildHasher>,
|
interner: MaybeOwned<'i, I>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl NodeCache {
|
impl NodeCache<'static, Rodeo<Spur, FxBuildHasher>> {
|
||||||
pub fn new() -> Self {
|
pub fn new() -> Self {
|
||||||
Self {
|
Self {
|
||||||
nodes: FxHashMap::default(),
|
nodes: FxHashMap::default(),
|
||||||
tokens: FxHashMap::default(),
|
tokens: FxHashMap::default(),
|
||||||
interner: Rodeo::with_capacity_and_hasher(
|
interner: MaybeOwned::Owned(Rodeo::with_capacity_and_hasher(
|
||||||
// capacity values suggested by author of `lasso`
|
// capacity values suggested by author of `lasso`
|
||||||
Capacity::new(512, unsafe { NonZeroUsize::new_unchecked(4096) }),
|
Capacity::new(512, unsafe { NonZeroUsize::new_unchecked(4096) }),
|
||||||
FxBuildHasher::default(),
|
FxBuildHasher::default(),
|
||||||
),
|
)),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn node<I>(&mut self, kind: SyntaxKind, children: I) -> GreenNode
|
impl<'i, I> NodeCache<'i, I>
|
||||||
where
|
where
|
||||||
I: IntoIterator<Item = GreenElement>,
|
I: Interner,
|
||||||
I::IntoIter: ExactSizeIterator,
|
{
|
||||||
|
pub fn with_interner(interner: &'i mut I) -> Self {
|
||||||
|
Self {
|
||||||
|
nodes: FxHashMap::default(),
|
||||||
|
tokens: FxHashMap::default(),
|
||||||
|
interner: MaybeOwned::Borrowed(interner),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn node<It>(&mut self, kind: SyntaxKind, children: It) -> GreenNode
|
||||||
|
where
|
||||||
|
It: IntoIterator<Item = GreenElement>,
|
||||||
|
It::IntoIter: ExactSizeIterator,
|
||||||
{
|
{
|
||||||
let children = children.into_iter();
|
let children = children.into_iter();
|
||||||
|
|
||||||
|
@ -61,10 +74,10 @@ impl NodeCache {
|
||||||
|
|
||||||
/// Creates a [`GreenNode`] by looking inside the cache or inserting
|
/// Creates a [`GreenNode`] by looking inside the cache or inserting
|
||||||
/// a new node into the cache if it's a cache miss.
|
/// a new node into the cache if it's a cache miss.
|
||||||
fn get_cached_node<I>(&mut self, kind: SyntaxKind, children: I) -> GreenNode
|
fn get_cached_node<It>(&mut self, kind: SyntaxKind, children: It) -> GreenNode
|
||||||
where
|
where
|
||||||
I: IntoIterator<Item = GreenElement>,
|
It: IntoIterator<Item = GreenElement>,
|
||||||
I::IntoIter: ExactSizeIterator,
|
It::IntoIter: ExactSizeIterator,
|
||||||
{
|
{
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
struct ChildrenIter {
|
struct ChildrenIter {
|
||||||
|
@ -132,6 +145,15 @@ enum MaybeOwned<'a, T> {
|
||||||
Borrowed(&'a mut T),
|
Borrowed(&'a mut T),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl<T> MaybeOwned<'_, T> {
|
||||||
|
fn as_owned(self) -> Option<T> {
|
||||||
|
match self {
|
||||||
|
MaybeOwned::Owned(owned) => Some(owned),
|
||||||
|
MaybeOwned::Borrowed(_) => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl<T> std::ops::Deref for MaybeOwned<'_, T> {
|
impl<T> std::ops::Deref for MaybeOwned<'_, T> {
|
||||||
type Target = T;
|
type Target = T;
|
||||||
|
|
||||||
|
@ -164,26 +186,31 @@ pub struct Checkpoint(usize);
|
||||||
|
|
||||||
/// A builder for a green tree.
|
/// A builder for a green tree.
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct GreenNodeBuilder<'cache> {
|
pub struct GreenNodeBuilder<'cache, 'interner, I = Rodeo<Spur, FxBuildHasher>> {
|
||||||
cache: MaybeOwned<'cache, NodeCache>,
|
cache: MaybeOwned<'cache, NodeCache<'interner, I>>,
|
||||||
parents: Vec<(SyntaxKind, usize)>,
|
parents: Vec<(SyntaxKind, usize)>,
|
||||||
children: Vec<GreenElement>,
|
children: Vec<GreenElement>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl GreenNodeBuilder<'_> {
|
impl GreenNodeBuilder<'static, 'static, Rodeo<Spur, FxBuildHasher>> {
|
||||||
/// Creates new builder.
|
/// Creates new builder.
|
||||||
pub fn new() -> GreenNodeBuilder<'static> {
|
pub fn new() -> Self {
|
||||||
GreenNodeBuilder {
|
Self {
|
||||||
cache: MaybeOwned::Owned(NodeCache::new()),
|
cache: MaybeOwned::Owned(NodeCache::new()),
|
||||||
parents: Vec::with_capacity(8),
|
parents: Vec::with_capacity(8),
|
||||||
children: Vec::with_capacity(8),
|
children: Vec::with_capacity(8),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'cache, 'interner, I> GreenNodeBuilder<'cache, 'interner, I>
|
||||||
|
where
|
||||||
|
I: Interner,
|
||||||
|
{
|
||||||
/// Reusing `NodeCache` between different `GreenNodeBuilder`s saves memory.
|
/// Reusing `NodeCache` between different `GreenNodeBuilder`s saves memory.
|
||||||
/// It allows to structurally share underlying trees.
|
/// It allows to structurally share underlying trees.
|
||||||
pub fn with_cache(cache: &mut NodeCache) -> GreenNodeBuilder<'_> {
|
pub fn with_cache(cache: &'cache mut NodeCache<'interner, I>) -> Self {
|
||||||
GreenNodeBuilder {
|
Self {
|
||||||
cache: MaybeOwned::Borrowed(cache),
|
cache: MaybeOwned::Borrowed(cache),
|
||||||
parents: Vec::with_capacity(8),
|
parents: Vec::with_capacity(8),
|
||||||
children: Vec::with_capacity(8),
|
children: Vec::with_capacity(8),
|
||||||
|
@ -268,15 +295,12 @@ impl GreenNodeBuilder<'_> {
|
||||||
/// `start_node_at` and `finish_node` calls
|
/// `start_node_at` and `finish_node` calls
|
||||||
/// are paired!
|
/// are paired!
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn finish(mut self) -> (GreenNode, Option<impl Interner<Spur>>) {
|
pub fn finish(mut self) -> (GreenNode, Option<I>) {
|
||||||
assert_eq!(self.children.len(), 1);
|
assert_eq!(self.children.len(), 1);
|
||||||
let resolver = match self.cache {
|
let resolver = self.cache.as_owned().and_then(|cache| cache.interner.as_owned());
|
||||||
MaybeOwned::Owned(cache) => Some(cache.interner),
|
|
||||||
MaybeOwned::Borrowed(_) => None,
|
|
||||||
};
|
|
||||||
match self.children.pop().unwrap() {
|
match self.children.pop().unwrap() {
|
||||||
NodeOrToken::Node(node) => (node, resolver),
|
NodeOrToken::Node(node) => (node, resolver),
|
||||||
NodeOrToken::Token(_) => panic!(),
|
NodeOrToken::Token(_) => panic!("called `finish` on a `GreenNodeBuilder` which only contained a token"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -53,7 +53,7 @@ use std::fmt;
|
||||||
pub use text_size::{TextLen, TextRange, TextSize};
|
pub use text_size::{TextLen, TextRange, TextSize};
|
||||||
|
|
||||||
pub use crate::{
|
pub use crate::{
|
||||||
green::{Checkpoint, Children, GreenNode, GreenNodeBuilder, GreenToken, SyntaxKind},
|
green::{Checkpoint, Children, GreenNode, GreenNodeBuilder, GreenToken, NodeCache, SyntaxKind},
|
||||||
syntax::{SyntaxElement, SyntaxElementChildren, SyntaxElementRef, SyntaxNode, SyntaxNodeChildren, SyntaxToken},
|
syntax::{SyntaxElement, SyntaxElementChildren, SyntaxElementRef, SyntaxNode, SyntaxNodeChildren, SyntaxToken},
|
||||||
syntax_text::SyntaxText,
|
syntax_text::SyntaxText,
|
||||||
utility_types::{Direction, NodeOrToken, TokenAtOffset, WalkEvent},
|
utility_types::{Direction, NodeOrToken, TokenAtOffset, WalkEvent},
|
||||||
|
|
348
src/syntax.rs
348
src/syntax.rs
|
@ -1,6 +1,6 @@
|
||||||
use std::{
|
use std::{
|
||||||
cell::UnsafeCell,
|
cell::UnsafeCell,
|
||||||
fmt::Write,
|
fmt::{self, Write},
|
||||||
hash::{Hash, Hasher},
|
hash::{Hash, Hasher},
|
||||||
iter, ptr,
|
iter, ptr,
|
||||||
sync::atomic::{AtomicU32, Ordering},
|
sync::atomic::{AtomicU32, Ordering},
|
||||||
|
@ -32,11 +32,11 @@ use crate::{
|
||||||
// - DQ 01/2021
|
// - DQ 01/2021
|
||||||
|
|
||||||
#[repr(transparent)]
|
#[repr(transparent)]
|
||||||
pub struct SyntaxNode<L: Language, D: 'static = ()> {
|
pub struct SyntaxNode<L: Language, D: 'static = (), R: 'static = ()> {
|
||||||
data: *mut NodeData<L, D>,
|
data: *mut NodeData<L, D, R>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<L: Language, D> SyntaxNode<L, D> {
|
impl<L: Language, D, R> SyntaxNode<L, D, R> {
|
||||||
pub fn debug(&self, resolver: &impl Resolver, recursive: bool) -> String {
|
pub fn debug(&self, resolver: &impl Resolver, recursive: bool) -> String {
|
||||||
// NOTE: `fmt::Write` methods on `String` never fail
|
// NOTE: `fmt::Write` methods on `String` never fail
|
||||||
let mut res = String::new();
|
let mut res = String::new();
|
||||||
|
@ -82,7 +82,7 @@ impl<L: Language, D> SyntaxNode<L, D> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<L: Language, D> Clone for SyntaxNode<L, D> {
|
impl<L: Language, D, R> Clone for SyntaxNode<L, D, R> {
|
||||||
fn clone(&self) -> Self {
|
fn clone(&self) -> Self {
|
||||||
// safety:: the ref count is only dropped when there are no more external references (see below)
|
// safety:: the ref count is only dropped when there are no more external references (see below)
|
||||||
// since we are currently cloning such a reference, there is still at least one
|
// since we are currently cloning such a reference, there is still at least one
|
||||||
|
@ -92,7 +92,7 @@ impl<L: Language, D> Clone for SyntaxNode<L, D> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<L: Language, D> Drop for SyntaxNode<L, D> {
|
impl<L: Language, D, R> Drop for SyntaxNode<L, D, R> {
|
||||||
fn drop(&mut self) {
|
fn drop(&mut self) {
|
||||||
// safety:: the ref count is only dropped when there are no more external references (see below)
|
// safety:: the ref count is only dropped when there are no more external references (see below)
|
||||||
// and all nodes but the root have been dropped.
|
// and all nodes but the root have been dropped.
|
||||||
|
@ -117,9 +117,9 @@ impl<L: Language, D> Drop for SyntaxNode<L, D> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<L: Language, D> SyntaxNode<L, D> {
|
impl<L: Language, D, R> SyntaxNode<L, D, R> {
|
||||||
#[inline]
|
#[inline]
|
||||||
fn data(&self) -> &NodeData<L, D> {
|
fn data(&self) -> &NodeData<L, D, R> {
|
||||||
unsafe { &*self.data }
|
unsafe { &*self.data }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -127,7 +127,7 @@ impl<L: Language, D> SyntaxNode<L, D> {
|
||||||
/// Caller must ensure that the access to the underlying data is unique (no active _mutable or immutable_
|
/// Caller must ensure that the access to the underlying data is unique (no active _mutable or immutable_
|
||||||
/// references).
|
/// references).
|
||||||
#[inline]
|
#[inline]
|
||||||
unsafe fn data_mut(&self) -> &mut NodeData<L, D> {
|
unsafe fn data_mut(&self) -> &mut NodeData<L, D, R> {
|
||||||
&mut *self.data
|
&mut *self.data
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -136,7 +136,7 @@ impl<L: Language, D> SyntaxNode<L, D> {
|
||||||
Self { data: self.data }
|
Self { data: self.data }
|
||||||
}
|
}
|
||||||
|
|
||||||
fn root(&self) -> &SyntaxNode<L, D> {
|
fn root(&self) -> &SyntaxNode<L, D, R> {
|
||||||
let mut current = self;
|
let mut current = self;
|
||||||
while let Some(parent) = current.parent() {
|
while let Some(parent) = current.parent() {
|
||||||
current = parent;
|
current = parent;
|
||||||
|
@ -177,28 +177,28 @@ impl<L: Language, D> SyntaxNode<L, D> {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Identity semantics for hash & eq
|
// Identity semantics for hash & eq
|
||||||
impl<L: Language, D> PartialEq for SyntaxNode<L, D> {
|
impl<L: Language, D, R> PartialEq for SyntaxNode<L, D, R> {
|
||||||
fn eq(&self, other: &SyntaxNode<L, D>) -> bool {
|
fn eq(&self, other: &SyntaxNode<L, D, R>) -> bool {
|
||||||
self.green().ptr() == other.green().ptr() && self.text_range().start() == other.text_range().start()
|
self.green().ptr() == other.green().ptr() && self.text_range().start() == other.text_range().start()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<L: Language, D> Eq for SyntaxNode<L, D> {}
|
impl<L: Language, D, R> Eq for SyntaxNode<L, D, R> {}
|
||||||
|
|
||||||
impl<L: Language, D> Hash for SyntaxNode<L, D> {
|
impl<L: Language, D, R> Hash for SyntaxNode<L, D, R> {
|
||||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||||
ptr::hash(self.green().ptr(), state);
|
ptr::hash(self.green().ptr(), state);
|
||||||
self.text_range().start().hash(state);
|
self.text_range().start().hash(state);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct SyntaxToken<L: Language, D: 'static = ()> {
|
pub struct SyntaxToken<L: Language, D: 'static = (), R: 'static = ()> {
|
||||||
parent: SyntaxNode<L, D>,
|
parent: SyntaxNode<L, D, R>,
|
||||||
index: u32,
|
index: u32,
|
||||||
offset: TextSize,
|
offset: TextSize,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<L: Language, D> Clone for SyntaxToken<L, D> {
|
impl<L: Language, D, R> Clone for SyntaxToken<L, D, R> {
|
||||||
fn clone(&self) -> Self {
|
fn clone(&self) -> Self {
|
||||||
Self {
|
Self {
|
||||||
parent: self.parent.clone(),
|
parent: self.parent.clone(),
|
||||||
|
@ -208,7 +208,7 @@ impl<L: Language, D> Clone for SyntaxToken<L, D> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<L: Language, D> Hash for SyntaxToken<L, D> {
|
impl<L: Language, D, R> Hash for SyntaxToken<L, D, R> {
|
||||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||||
self.parent.hash(state);
|
self.parent.hash(state);
|
||||||
self.index.hash(state);
|
self.index.hash(state);
|
||||||
|
@ -216,23 +216,23 @@ impl<L: Language, D> Hash for SyntaxToken<L, D> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<L: Language, D> PartialEq for SyntaxToken<L, D> {
|
impl<L: Language, D, R> PartialEq for SyntaxToken<L, D, R> {
|
||||||
fn eq(&self, other: &SyntaxToken<L, D>) -> bool {
|
fn eq(&self, other: &SyntaxToken<L, D, R>) -> bool {
|
||||||
self.parent == other.parent && self.index == other.index && self.offset == other.offset
|
self.parent == other.parent && self.index == other.index && self.offset == other.offset
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<L: Language, D> Eq for SyntaxToken<L, D> {}
|
impl<L: Language, D, R> Eq for SyntaxToken<L, D, R> {}
|
||||||
|
|
||||||
impl<L: Language, D> SyntaxToken<L, D> {
|
impl<L: Language, D, R> SyntaxToken<L, D, R> {
|
||||||
pub fn debug(&self, resolver: &impl Resolver) -> String {
|
pub fn debug(&self, resolver: &impl Resolver) -> String {
|
||||||
let mut res = String::new();
|
let mut res = String::new();
|
||||||
write!(res, "{:?}@{:?}", self.kind(), self.text_range()).unwrap();
|
write!(res, "{:?}@{:?}", self.kind(), self.text_range()).unwrap();
|
||||||
if self.text(resolver).len() < 25 {
|
if self.resolve_text(resolver).len() < 25 {
|
||||||
write!(res, " {:?}", self.text(resolver)).unwrap();
|
write!(res, " {:?}", self.resolve_text(resolver)).unwrap();
|
||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
let text = self.text(resolver);
|
let text = self.resolve_text(resolver);
|
||||||
for idx in 21..25 {
|
for idx in 21..25 {
|
||||||
if text.is_char_boundary(idx) {
|
if text.is_char_boundary(idx) {
|
||||||
let text = format!("{} ...", &text[..idx]);
|
let text = format!("{} ...", &text[..idx]);
|
||||||
|
@ -244,25 +244,25 @@ impl<L: Language, D> SyntaxToken<L, D> {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn display(&self, resolver: &impl Resolver) -> String {
|
pub fn display(&self, resolver: &impl Resolver) -> String {
|
||||||
self.text(resolver).to_string()
|
self.resolve_text(resolver).to_string()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub type SyntaxElement<L, D = ()> = NodeOrToken<SyntaxNode<L, D>, SyntaxToken<L, D>>;
|
pub type SyntaxElement<L, D = (), R = ()> = NodeOrToken<SyntaxNode<L, D, R>, SyntaxToken<L, D, R>>;
|
||||||
|
|
||||||
impl<L: Language, D> From<SyntaxNode<L, D>> for SyntaxElement<L, D> {
|
impl<L: Language, D, R> From<SyntaxNode<L, D, R>> for SyntaxElement<L, D, R> {
|
||||||
fn from(node: SyntaxNode<L, D>) -> SyntaxElement<L, D> {
|
fn from(node: SyntaxNode<L, D, R>) -> SyntaxElement<L, D, R> {
|
||||||
NodeOrToken::Node(node)
|
NodeOrToken::Node(node)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<L: Language, D> From<SyntaxToken<L, D>> for SyntaxElement<L, D> {
|
impl<L: Language, D, R> From<SyntaxToken<L, D, R>> for SyntaxElement<L, D, R> {
|
||||||
fn from(token: SyntaxToken<L, D>) -> SyntaxElement<L, D> {
|
fn from(token: SyntaxToken<L, D, R>) -> SyntaxElement<L, D, R> {
|
||||||
NodeOrToken::Token(token)
|
NodeOrToken::Token(token)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<L: Language, D> SyntaxElement<L, D> {
|
impl<L: Language, D, R> SyntaxElement<L, D, R> {
|
||||||
pub fn display(&self, resolver: &impl Resolver) -> String {
|
pub fn display(&self, resolver: &impl Resolver) -> String {
|
||||||
match self {
|
match self {
|
||||||
NodeOrToken::Node(it) => it.display(resolver),
|
NodeOrToken::Node(it) => it.display(resolver),
|
||||||
|
@ -271,22 +271,22 @@ impl<L: Language, D> SyntaxElement<L, D> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub type SyntaxElementRef<'a, L, D = ()> = NodeOrToken<&'a SyntaxNode<L, D>, &'a SyntaxToken<L, D>>;
|
pub type SyntaxElementRef<'a, L, D = (), R = ()> = NodeOrToken<&'a SyntaxNode<L, D, R>, &'a SyntaxToken<L, D, R>>;
|
||||||
|
|
||||||
impl<'a, L: Language, D> From<&'a SyntaxNode<L, D>> for SyntaxElementRef<'a, L, D> {
|
impl<'a, L: Language, D, R> From<&'a SyntaxNode<L, D, R>> for SyntaxElementRef<'a, L, D, R> {
|
||||||
fn from(node: &'a SyntaxNode<L, D>) -> Self {
|
fn from(node: &'a SyntaxNode<L, D, R>) -> Self {
|
||||||
NodeOrToken::Node(node)
|
NodeOrToken::Node(node)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, L: Language, D> From<&'a SyntaxToken<L, D>> for SyntaxElementRef<'a, L, D> {
|
impl<'a, L: Language, D, R> From<&'a SyntaxToken<L, D, R>> for SyntaxElementRef<'a, L, D, R> {
|
||||||
fn from(token: &'a SyntaxToken<L, D>) -> Self {
|
fn from(token: &'a SyntaxToken<L, D, R>) -> Self {
|
||||||
NodeOrToken::Token(token)
|
NodeOrToken::Token(token)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, L: Language, D> From<&'a SyntaxElement<L, D>> for SyntaxElementRef<'a, L, D> {
|
impl<'a, L: Language, D, R> From<&'a SyntaxElement<L, D, R>> for SyntaxElementRef<'a, L, D, R> {
|
||||||
fn from(element: &'a SyntaxElement<L, D>) -> Self {
|
fn from(element: &'a SyntaxElement<L, D, R>) -> Self {
|
||||||
match element {
|
match element {
|
||||||
NodeOrToken::Node(it) => Self::Node(it),
|
NodeOrToken::Node(it) => Self::Node(it),
|
||||||
NodeOrToken::Token(it) => Self::Token(it),
|
NodeOrToken::Token(it) => Self::Token(it),
|
||||||
|
@ -294,7 +294,7 @@ impl<'a, L: Language, D> From<&'a SyntaxElement<L, D>> for SyntaxElementRef<'a,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, L: Language, D> SyntaxElementRef<'a, L, D> {
|
impl<'a, L: Language, D, R> SyntaxElementRef<'a, L, D, R> {
|
||||||
pub fn display(&self, resolver: &impl Resolver) -> String {
|
pub fn display(&self, resolver: &impl Resolver) -> String {
|
||||||
match self {
|
match self {
|
||||||
NodeOrToken::Node(it) => it.display(resolver),
|
NodeOrToken::Node(it) => it.display(resolver),
|
||||||
|
@ -303,17 +303,17 @@ impl<'a, L: Language, D> SyntaxElementRef<'a, L, D> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
enum Kind<L: Language, D: 'static> {
|
enum Kind<L: Language, D: 'static, R: 'static> {
|
||||||
Root(GreenNode),
|
Root(GreenNode, Arc<R>),
|
||||||
Child {
|
Child {
|
||||||
parent: SyntaxNode<L, D>,
|
parent: SyntaxNode<L, D, R>,
|
||||||
index: u32,
|
index: u32,
|
||||||
offset: TextSize,
|
offset: TextSize,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<L: Language, D> Kind<L, D> {
|
impl<L: Language, D, R> Kind<L, D, R> {
|
||||||
fn as_child(&self) -> Option<(&SyntaxNode<L, D>, u32, TextSize)> {
|
fn as_child(&self) -> Option<(&SyntaxNode<L, D, R>, u32, TextSize)> {
|
||||||
match self {
|
match self {
|
||||||
Kind::Child { parent, index, offset } => Some((parent, *index, *offset)),
|
Kind::Child { parent, index, offset } => Some((parent, *index, *offset)),
|
||||||
_ => None,
|
_ => None,
|
||||||
|
@ -321,18 +321,18 @@ impl<L: Language, D> Kind<L, D> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
struct NodeData<L: Language, D: 'static> {
|
struct NodeData<L: Language, D: 'static, R: 'static> {
|
||||||
kind: Kind<L, D>,
|
kind: Kind<L, D, R>,
|
||||||
green: ptr::NonNull<GreenNode>,
|
green: ptr::NonNull<GreenNode>,
|
||||||
ref_count: *mut AtomicU32,
|
ref_count: *mut AtomicU32,
|
||||||
data: RwLock<Option<Arc<D>>>,
|
data: RwLock<Option<Arc<D>>>,
|
||||||
children: Vec<UnsafeCell<Option<SyntaxElement<L, D>>>>,
|
children: Vec<UnsafeCell<Option<SyntaxElement<L, D, R>>>>,
|
||||||
child_locks: Vec<RwLock<()>>,
|
child_locks: Vec<RwLock<()>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<L: Language, D> NodeData<L, D> {
|
impl<L: Language, D, R> NodeData<L, D, R> {
|
||||||
fn new(
|
fn new(
|
||||||
kind: Kind<L, D>,
|
kind: Kind<L, D, R>,
|
||||||
green: ptr::NonNull<GreenNode>,
|
green: ptr::NonNull<GreenNode>,
|
||||||
ref_count: *mut AtomicU32,
|
ref_count: *mut AtomicU32,
|
||||||
n_children: usize,
|
n_children: usize,
|
||||||
|
@ -352,23 +352,29 @@ impl<L: Language, D> NodeData<L, D> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<L: Language, D> SyntaxNode<L, D> {
|
impl<L: Language, D> SyntaxNode<L, D, ()> {
|
||||||
fn new(data: *mut NodeData<L, D>) -> SyntaxNode<L, D> {
|
pub fn new_root(green: GreenNode) -> Self {
|
||||||
|
Self::make_new_root(green, ())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<L: Language, D, R> SyntaxNode<L, D, R> {
|
||||||
|
fn new(data: *mut NodeData<L, D, R>) -> Self {
|
||||||
Self { data }
|
Self { data }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn new_root(green: GreenNode) -> SyntaxNode<L, D> {
|
fn make_new_root(green: GreenNode, resolver: R) -> Self {
|
||||||
let ref_count = Box::new(AtomicU32::new(1));
|
let ref_count = Box::new(AtomicU32::new(1));
|
||||||
let n_children = green.children().count();
|
let n_children = green.children().count();
|
||||||
let data = NodeData::new(
|
let data = NodeData::new(
|
||||||
Kind::Root(green),
|
Kind::Root(green, Arc::new(resolver)),
|
||||||
ptr::NonNull::dangling(),
|
ptr::NonNull::dangling(),
|
||||||
Box::into_raw(ref_count),
|
Box::into_raw(ref_count),
|
||||||
n_children,
|
n_children,
|
||||||
);
|
);
|
||||||
let ret = Self::new(data);
|
let ret = Self::new(data);
|
||||||
let green: ptr::NonNull<GreenNode> = match &ret.data().kind {
|
let green: ptr::NonNull<GreenNode> = match &ret.data().kind {
|
||||||
Kind::Root(green) => green.into(),
|
Kind::Root(green, _resolver) => green.into(),
|
||||||
_ => unreachable!(),
|
_ => unreachable!(),
|
||||||
};
|
};
|
||||||
// safety: we have just created `ret` and have not shared it
|
// safety: we have just created `ret` and have not shared it
|
||||||
|
@ -376,15 +382,16 @@ impl<L: Language, D> SyntaxNode<L, D> {
|
||||||
ret
|
ret
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn new_root_with_resolver(green: GreenNode, resolver: R) -> Self
|
||||||
|
where
|
||||||
|
R: Resolver,
|
||||||
|
{
|
||||||
|
Self::make_new_root(green, resolver)
|
||||||
|
}
|
||||||
|
|
||||||
// Technically, unsafe, but private so that's OK.
|
// Technically, unsafe, but private so that's OK.
|
||||||
// Safety: `green` must be a descendent of `parent.green`
|
// Safety: `green` must be a descendent of `parent.green`
|
||||||
fn new_child(
|
fn new_child(green: &GreenNode, parent: &Self, index: u32, offset: TextSize, ref_count: *mut AtomicU32) -> Self {
|
||||||
green: &GreenNode,
|
|
||||||
parent: &SyntaxNode<L, D>,
|
|
||||||
index: u32,
|
|
||||||
offset: TextSize,
|
|
||||||
ref_count: *mut AtomicU32,
|
|
||||||
) -> SyntaxNode<L, D> {
|
|
||||||
let n_children = green.children().count();
|
let n_children = green.children().count();
|
||||||
let data = NodeData::new(
|
let data = NodeData::new(
|
||||||
Kind::Child {
|
Kind::Child {
|
||||||
|
@ -426,8 +433,15 @@ impl<L: Language, D> SyntaxNode<L, D> {
|
||||||
*ptr = None;
|
*ptr = None;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn resolver(&self) -> &Arc<R> {
|
||||||
|
match &self.root().data().kind {
|
||||||
|
Kind::Root(_, resolver) => resolver,
|
||||||
|
_ => unreachable!(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
fn read(&self, index: usize) -> Option<SyntaxElementRef<'_, L, D>> {
|
fn read(&self, index: usize) -> Option<SyntaxElementRef<'_, L, D, R>> {
|
||||||
// safety: children are pre-allocated and indices are determined internally
|
// safety: children are pre-allocated and indices are determined internally
|
||||||
let _read = unsafe { self.data().child_locks.get_unchecked(index).read() };
|
let _read = unsafe { self.data().child_locks.get_unchecked(index).read() };
|
||||||
// safety: mutable accesses to the slot only occur below and have to take the lock
|
// safety: mutable accesses to the slot only occur below and have to take the lock
|
||||||
|
@ -435,7 +449,7 @@ impl<L: Language, D> SyntaxNode<L, D> {
|
||||||
slot.as_ref().map(|elem| elem.into())
|
slot.as_ref().map(|elem| elem.into())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn try_write(&self, index: usize, elem: SyntaxElement<L, D>) {
|
fn try_write(&self, index: usize, elem: SyntaxElement<L, D, R>) {
|
||||||
// safety: children are pre-allocated and indices are determined internally
|
// safety: children are pre-allocated and indices are determined internally
|
||||||
let _write = unsafe { self.data().child_locks.get_unchecked(index).write() };
|
let _write = unsafe { self.data().child_locks.get_unchecked(index).write() };
|
||||||
// safety: we are the only writer and there are no readers as evidenced by the write lock
|
// safety: we are the only writer and there are no readers as evidenced by the write lock
|
||||||
|
@ -480,7 +494,7 @@ impl<L: Language, D> SyntaxNode<L, D> {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline(always)]
|
#[inline(always)]
|
||||||
fn get_or_add_node(&self, node: &GreenNode, index: usize, offset: TextSize) -> SyntaxElementRef<'_, L, D> {
|
fn get_or_add_node(&self, node: &GreenNode, index: usize, offset: TextSize) -> SyntaxElementRef<'_, L, D, R> {
|
||||||
if let Some(elem) = self.read(index) {
|
if let Some(elem) = self.read(index) {
|
||||||
debug_assert_eq!(elem.text_range().start(), offset);
|
debug_assert_eq!(elem.text_range().start(), offset);
|
||||||
return elem;
|
return elem;
|
||||||
|
@ -498,7 +512,7 @@ impl<L: Language, D> SyntaxNode<L, D> {
|
||||||
element: GreenElementRef<'_>,
|
element: GreenElementRef<'_>,
|
||||||
index: usize,
|
index: usize,
|
||||||
offset: TextSize,
|
offset: TextSize,
|
||||||
) -> SyntaxElementRef<'_, L, D> {
|
) -> SyntaxElementRef<'_, L, D, R> {
|
||||||
if let Some(elem) = self.read(index) {
|
if let Some(elem) = self.read(index) {
|
||||||
debug_assert_eq!(elem.text_range().start(), offset);
|
debug_assert_eq!(elem.text_range().start(), offset);
|
||||||
return elem;
|
return elem;
|
||||||
|
@ -552,7 +566,7 @@ impl<L: Language, D> SyntaxNode<L, D> {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn text<'n, 'i, I>(&'n self, resolver: &'i I) -> SyntaxText<'n, 'i, I, L, D>
|
pub fn resolve_text<'n, 'i, I>(&'n self, resolver: &'i I) -> SyntaxText<'n, 'i, I, L, D, R>
|
||||||
where
|
where
|
||||||
I: Resolver + ?Sized,
|
I: Resolver + ?Sized,
|
||||||
{
|
{
|
||||||
|
@ -565,42 +579,42 @@ impl<L: Language, D> SyntaxNode<L, D> {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn parent(&self) -> Option<&SyntaxNode<L, D>> {
|
pub fn parent(&self) -> Option<&SyntaxNode<L, D, R>> {
|
||||||
match &self.data().kind {
|
match &self.data().kind {
|
||||||
Kind::Root(_) => None,
|
Kind::Root(_, _) => None,
|
||||||
Kind::Child { parent, .. } => Some(parent),
|
Kind::Child { parent, .. } => Some(parent),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn ancestors(&self) -> impl Iterator<Item = &SyntaxNode<L, D>> {
|
pub fn ancestors(&self) -> impl Iterator<Item = &SyntaxNode<L, D, R>> {
|
||||||
iter::successors(Some(self), |&node| node.parent())
|
iter::successors(Some(self), |&node| node.parent())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn children(&self) -> SyntaxNodeChildren<'_, L, D> {
|
pub fn children(&self) -> SyntaxNodeChildren<'_, L, D, R> {
|
||||||
SyntaxNodeChildren::new(self)
|
SyntaxNodeChildren::new(self)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn children_with_tokens(&self) -> SyntaxElementChildren<'_, L, D> {
|
pub fn children_with_tokens(&self) -> SyntaxElementChildren<'_, L, D, R> {
|
||||||
SyntaxElementChildren::new(self)
|
SyntaxElementChildren::new(self)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn first_child(&self) -> Option<&SyntaxNode<L, D>> {
|
pub fn first_child(&self) -> Option<&SyntaxNode<L, D, R>> {
|
||||||
let (node, (index, offset)) = filter_nodes(self.green().children_from(0, self.text_range().start())).next()?;
|
let (node, (index, offset)) = filter_nodes(self.green().children_from(0, self.text_range().start())).next()?;
|
||||||
self.get_or_add_node(node, index, offset).as_node().map(|node| *node)
|
self.get_or_add_node(node, index, offset).as_node().map(|node| *node)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn first_child_or_token(&self) -> Option<SyntaxElementRef<'_, L, D>> {
|
pub fn first_child_or_token(&self) -> Option<SyntaxElementRef<'_, L, D, R>> {
|
||||||
let (element, (index, offset)) = self.green().children_from(0, self.text_range().start()).next()?;
|
let (element, (index, offset)) = self.green().children_from(0, self.text_range().start()).next()?;
|
||||||
Some(self.get_or_add_element(element, index, offset))
|
Some(self.get_or_add_element(element, index, offset))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn last_child(&self) -> Option<&SyntaxNode<L, D>> {
|
pub fn last_child(&self) -> Option<&SyntaxNode<L, D, R>> {
|
||||||
let (node, (index, offset)) = filter_nodes(
|
let (node, (index, offset)) = filter_nodes(
|
||||||
self.green()
|
self.green()
|
||||||
.children_to(self.green().children().len(), self.text_range().end()),
|
.children_to(self.green().children().len(), self.text_range().end()),
|
||||||
|
@ -610,7 +624,7 @@ impl<L: Language, D> SyntaxNode<L, D> {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn last_child_or_token(&self) -> Option<SyntaxElementRef<'_, L, D>> {
|
pub fn last_child_or_token(&self) -> Option<SyntaxElementRef<'_, L, D, R>> {
|
||||||
let (element, (index, offset)) = self
|
let (element, (index, offset)) = self
|
||||||
.green()
|
.green()
|
||||||
.children_to(self.green().children().len(), self.text_range().end())
|
.children_to(self.green().children().len(), self.text_range().end())
|
||||||
|
@ -619,31 +633,31 @@ impl<L: Language, D> SyntaxNode<L, D> {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn next_child_after(&self, n: usize, offset: TextSize) -> Option<&SyntaxNode<L, D>> {
|
pub fn next_child_after(&self, n: usize, offset: TextSize) -> Option<&SyntaxNode<L, D, R>> {
|
||||||
let (node, (index, offset)) = filter_nodes(self.green().children_from(n + 1, offset)).next()?;
|
let (node, (index, offset)) = filter_nodes(self.green().children_from(n + 1, offset)).next()?;
|
||||||
self.get_or_add_node(node, index, offset).as_node().map(|node| *node)
|
self.get_or_add_node(node, index, offset).as_node().map(|node| *node)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn next_child_or_token_after(&self, n: usize, offset: TextSize) -> Option<SyntaxElementRef<'_, L, D>> {
|
pub fn next_child_or_token_after(&self, n: usize, offset: TextSize) -> Option<SyntaxElementRef<'_, L, D, R>> {
|
||||||
let (element, (index, offset)) = self.green().children_from(n + 1, offset).next()?;
|
let (element, (index, offset)) = self.green().children_from(n + 1, offset).next()?;
|
||||||
Some(self.get_or_add_element(element, index, offset))
|
Some(self.get_or_add_element(element, index, offset))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn prev_child_before(&self, n: usize, offset: TextSize) -> Option<&SyntaxNode<L, D>> {
|
pub fn prev_child_before(&self, n: usize, offset: TextSize) -> Option<&SyntaxNode<L, D, R>> {
|
||||||
let (node, (index, offset)) = filter_nodes(self.green().children_to(n, offset)).next()?;
|
let (node, (index, offset)) = filter_nodes(self.green().children_to(n, offset)).next()?;
|
||||||
self.get_or_add_node(node, index, offset).as_node().map(|node| *node)
|
self.get_or_add_node(node, index, offset).as_node().map(|node| *node)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn prev_child_or_token_before(&self, n: usize, offset: TextSize) -> Option<SyntaxElementRef<'_, L, D>> {
|
pub fn prev_child_or_token_before(&self, n: usize, offset: TextSize) -> Option<SyntaxElementRef<'_, L, D, R>> {
|
||||||
let (element, (index, offset)) = self.green().children_to(n, offset).next()?;
|
let (element, (index, offset)) = self.green().children_to(n, offset).next()?;
|
||||||
Some(self.get_or_add_element(element, index, offset))
|
Some(self.get_or_add_element(element, index, offset))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn next_sibling(&self) -> Option<&SyntaxNode<L, D>> {
|
pub fn next_sibling(&self) -> Option<&SyntaxNode<L, D, R>> {
|
||||||
let (parent, index, _) = self.data().kind.as_child()?;
|
let (parent, index, _) = self.data().kind.as_child()?;
|
||||||
|
|
||||||
let (node, (index, offset)) = filter_nodes(
|
let (node, (index, offset)) = filter_nodes(
|
||||||
|
@ -656,7 +670,7 @@ impl<L: Language, D> SyntaxNode<L, D> {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn next_sibling_or_token(&self) -> Option<SyntaxElementRef<'_, L, D>> {
|
pub fn next_sibling_or_token(&self) -> Option<SyntaxElementRef<'_, L, D, R>> {
|
||||||
let (parent, index, _) = self.data().kind.as_child()?;
|
let (parent, index, _) = self.data().kind.as_child()?;
|
||||||
|
|
||||||
let (element, (index, offset)) = parent
|
let (element, (index, offset)) = parent
|
||||||
|
@ -667,7 +681,7 @@ impl<L: Language, D> SyntaxNode<L, D> {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn prev_sibling(&self) -> Option<&SyntaxNode<L, D>> {
|
pub fn prev_sibling(&self) -> Option<&SyntaxNode<L, D, R>> {
|
||||||
let (parent, index, _) = self.data().kind.as_child()?;
|
let (parent, index, _) = self.data().kind.as_child()?;
|
||||||
|
|
||||||
let (node, (index, offset)) =
|
let (node, (index, offset)) =
|
||||||
|
@ -676,7 +690,7 @@ impl<L: Language, D> SyntaxNode<L, D> {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn prev_sibling_or_token(&self) -> Option<SyntaxElementRef<'_, L, D>> {
|
pub fn prev_sibling_or_token(&self) -> Option<SyntaxElementRef<'_, L, D, R>> {
|
||||||
let (parent, index, _) = self.data().kind.as_child()?;
|
let (parent, index, _) = self.data().kind.as_child()?;
|
||||||
|
|
||||||
let (element, (index, offset)) = parent
|
let (element, (index, offset)) = parent
|
||||||
|
@ -688,18 +702,18 @@ impl<L: Language, D> SyntaxNode<L, D> {
|
||||||
|
|
||||||
/// Return the leftmost token in the subtree of this node
|
/// Return the leftmost token in the subtree of this node
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn first_token(&self) -> Option<&SyntaxToken<L, D>> {
|
pub fn first_token(&self) -> Option<&SyntaxToken<L, D, R>> {
|
||||||
self.first_child_or_token()?.first_token()
|
self.first_child_or_token()?.first_token()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return the rightmost token in the subtree of this node
|
/// Return the rightmost token in the subtree of this node
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn last_token(&self) -> Option<&SyntaxToken<L, D>> {
|
pub fn last_token(&self) -> Option<&SyntaxToken<L, D, R>> {
|
||||||
self.last_child_or_token()?.last_token()
|
self.last_child_or_token()?.last_token()
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn siblings(&self, direction: Direction) -> impl Iterator<Item = &SyntaxNode<L, D>> {
|
pub fn siblings(&self, direction: Direction) -> impl Iterator<Item = &SyntaxNode<L, D, R>> {
|
||||||
iter::successors(Some(self), move |node| match direction {
|
iter::successors(Some(self), move |node| match direction {
|
||||||
Direction::Next => node.next_sibling(),
|
Direction::Next => node.next_sibling(),
|
||||||
Direction::Prev => node.prev_sibling(),
|
Direction::Prev => node.prev_sibling(),
|
||||||
|
@ -707,8 +721,8 @@ impl<L: Language, D> SyntaxNode<L, D> {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn siblings_with_tokens(&self, direction: Direction) -> impl Iterator<Item = SyntaxElementRef<'_, L, D>> {
|
pub fn siblings_with_tokens(&self, direction: Direction) -> impl Iterator<Item = SyntaxElementRef<'_, L, D, R>> {
|
||||||
let me: SyntaxElementRef<'_, L, D> = self.into();
|
let me: SyntaxElementRef<'_, L, D, R> = self.into();
|
||||||
iter::successors(Some(me), move |el| match direction {
|
iter::successors(Some(me), move |el| match direction {
|
||||||
Direction::Next => el.next_sibling_or_token(),
|
Direction::Next => el.next_sibling_or_token(),
|
||||||
Direction::Prev => el.prev_sibling_or_token(),
|
Direction::Prev => el.prev_sibling_or_token(),
|
||||||
|
@ -716,7 +730,7 @@ impl<L: Language, D> SyntaxNode<L, D> {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn descendants(&self) -> impl Iterator<Item = &SyntaxNode<L, D>> {
|
pub fn descendants(&self) -> impl Iterator<Item = &SyntaxNode<L, D, R>> {
|
||||||
self.preorder().filter_map(|event| match event {
|
self.preorder().filter_map(|event| match event {
|
||||||
WalkEvent::Enter(node) => Some(node),
|
WalkEvent::Enter(node) => Some(node),
|
||||||
WalkEvent::Leave(_) => None,
|
WalkEvent::Leave(_) => None,
|
||||||
|
@ -724,7 +738,7 @@ impl<L: Language, D> SyntaxNode<L, D> {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn descendants_with_tokens(&self) -> impl Iterator<Item = SyntaxElementRef<'_, L, D>> {
|
pub fn descendants_with_tokens(&self) -> impl Iterator<Item = SyntaxElementRef<'_, L, D, R>> {
|
||||||
self.preorder_with_tokens().filter_map(|event| match event {
|
self.preorder_with_tokens().filter_map(|event| match event {
|
||||||
WalkEvent::Enter(it) => Some(it),
|
WalkEvent::Enter(it) => Some(it),
|
||||||
WalkEvent::Leave(_) => None,
|
WalkEvent::Leave(_) => None,
|
||||||
|
@ -733,8 +747,8 @@ impl<L: Language, D> SyntaxNode<L, D> {
|
||||||
|
|
||||||
/// Traverse the subtree rooted at the current node (including the current
|
/// Traverse the subtree rooted at the current node (including the current
|
||||||
/// node) in preorder, excluding tokens.
|
/// node) in preorder, excluding tokens.
|
||||||
#[inline]
|
#[inline(always)]
|
||||||
pub fn preorder(&self) -> impl Iterator<Item = WalkEvent<&SyntaxNode<L, D>>> {
|
pub fn preorder(&self) -> impl Iterator<Item = WalkEvent<&SyntaxNode<L, D, R>>> {
|
||||||
iter::successors(Some(WalkEvent::Enter(self)), move |pos| {
|
iter::successors(Some(WalkEvent::Enter(self)), move |pos| {
|
||||||
let next = match pos {
|
let next = match pos {
|
||||||
WalkEvent::Enter(node) => match node.first_child() {
|
WalkEvent::Enter(node) => match node.first_child() {
|
||||||
|
@ -757,8 +771,8 @@ impl<L: Language, D> SyntaxNode<L, D> {
|
||||||
|
|
||||||
/// Traverse the subtree rooted at the current node (including the current
|
/// Traverse the subtree rooted at the current node (including the current
|
||||||
/// node) in preorder, including tokens.
|
/// node) in preorder, including tokens.
|
||||||
#[inline]
|
#[inline(always)]
|
||||||
pub fn preorder_with_tokens(&self) -> impl Iterator<Item = WalkEvent<SyntaxElementRef<'_, L, D>>> {
|
pub fn preorder_with_tokens(&self) -> impl Iterator<Item = WalkEvent<SyntaxElementRef<'_, L, D, R>>> {
|
||||||
let me = self.into();
|
let me = self.into();
|
||||||
iter::successors(Some(WalkEvent::Enter(me)), move |pos| {
|
iter::successors(Some(WalkEvent::Enter(me)), move |pos| {
|
||||||
let next = match pos {
|
let next = match pos {
|
||||||
|
@ -785,7 +799,7 @@ impl<L: Language, D> SyntaxNode<L, D> {
|
||||||
|
|
||||||
/// Find a token in the subtree corresponding to this node, which covers the offset.
|
/// Find a token in the subtree corresponding to this node, which covers the offset.
|
||||||
/// Precondition: offset must be withing node's range.
|
/// Precondition: offset must be withing node's range.
|
||||||
pub fn token_at_offset(&self, offset: TextSize) -> TokenAtOffset<SyntaxToken<L, D>> {
|
pub fn token_at_offset(&self, offset: TextSize) -> TokenAtOffset<SyntaxToken<L, D, R>> {
|
||||||
// TODO: this could be faster if we first drill-down to node, and only
|
// TODO: this could be faster if we first drill-down to node, and only
|
||||||
// then switch to token search. We should also replace explicit
|
// then switch to token search. We should also replace explicit
|
||||||
// recursion with a loop.
|
// recursion with a loop.
|
||||||
|
@ -823,8 +837,8 @@ impl<L: Language, D> SyntaxNode<L, D> {
|
||||||
/// contains the range. If the range is empty and is contained in two leaf
|
/// contains the range. If the range is empty and is contained in two leaf
|
||||||
/// nodes, either one can be returned. Precondition: range must be contained
|
/// nodes, either one can be returned. Precondition: range must be contained
|
||||||
/// withing the current node
|
/// withing the current node
|
||||||
pub fn covering_element(&self, range: TextRange) -> SyntaxElementRef<'_, L, D> {
|
pub fn covering_element(&self, range: TextRange) -> SyntaxElementRef<'_, L, D, R> {
|
||||||
let mut res: SyntaxElementRef<'_, L, D> = self.into();
|
let mut res: SyntaxElementRef<'_, L, D, R> = self.into();
|
||||||
loop {
|
loop {
|
||||||
assert!(
|
assert!(
|
||||||
res.text_range().contains_range(range),
|
res.text_range().contains_range(range),
|
||||||
|
@ -848,8 +862,36 @@ impl<L: Language, D> SyntaxNode<L, D> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<L: Language, D> SyntaxToken<L, D> {
|
impl<L: Language, D, R> SyntaxNode<L, D, R>
|
||||||
fn new(parent: &SyntaxNode<L, D>, index: u32, offset: TextSize) -> SyntaxToken<L, D> {
|
where
|
||||||
|
R: Resolver,
|
||||||
|
{
|
||||||
|
#[inline]
|
||||||
|
pub fn text<'n>(&'n self) -> SyntaxText<'n, 'n, R, L, D, R> {
|
||||||
|
SyntaxText::new(self, self.resolver().as_ref())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<L: Language, D, R> fmt::Debug for SyntaxNode<L, D, R>
|
||||||
|
where
|
||||||
|
R: Resolver,
|
||||||
|
{
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
write!(f, "{}", Self::debug(self, self.resolver().as_ref(), f.alternate()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<L: Language, D, R> fmt::Display for SyntaxNode<L, D, R>
|
||||||
|
where
|
||||||
|
R: Resolver,
|
||||||
|
{
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
write!(f, "{}", Self::display(self, self.resolver().as_ref()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<L: Language, D, R> SyntaxToken<L, D, R> {
|
||||||
|
fn new(parent: &SyntaxNode<L, D, R>, index: u32, offset: TextSize) -> SyntaxToken<L, D, R> {
|
||||||
Self {
|
Self {
|
||||||
parent: parent.clone_uncounted(),
|
parent: parent.clone_uncounted(),
|
||||||
index,
|
index,
|
||||||
|
@ -893,7 +935,7 @@ impl<L: Language, D> SyntaxToken<L, D> {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn text<'i, I>(&self, resolver: &'i I) -> &'i str
|
pub fn resolve_text<'i, I>(&self, resolver: &'i I) -> &'i str
|
||||||
where
|
where
|
||||||
I: Resolver + ?Sized,
|
I: Resolver + ?Sized,
|
||||||
{
|
{
|
||||||
|
@ -911,30 +953,30 @@ impl<L: Language, D> SyntaxToken<L, D> {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn parent(&self) -> &SyntaxNode<L, D> {
|
pub fn parent(&self) -> &SyntaxNode<L, D, R> {
|
||||||
&self.parent
|
&self.parent
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn ancestors(&self) -> impl Iterator<Item = &SyntaxNode<L, D>> {
|
pub fn ancestors(&self) -> impl Iterator<Item = &SyntaxNode<L, D, R>> {
|
||||||
self.parent().ancestors()
|
self.parent().ancestors()
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn next_sibling_or_token(&self) -> Option<SyntaxElementRef<'_, L, D>> {
|
pub fn next_sibling_or_token(&self) -> Option<SyntaxElementRef<'_, L, D, R>> {
|
||||||
self.parent()
|
self.parent()
|
||||||
.next_child_or_token_after(self.index as usize, self.text_range().end())
|
.next_child_or_token_after(self.index as usize, self.text_range().end())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn prev_sibling_or_token(&self) -> Option<SyntaxElementRef<'_, L, D>> {
|
pub fn prev_sibling_or_token(&self) -> Option<SyntaxElementRef<'_, L, D, R>> {
|
||||||
self.parent()
|
self.parent()
|
||||||
.prev_child_or_token_before(self.index as usize, self.text_range().start())
|
.prev_child_or_token_before(self.index as usize, self.text_range().start())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn siblings_with_tokens(&self, direction: Direction) -> impl Iterator<Item = SyntaxElementRef<'_, L, D>> {
|
pub fn siblings_with_tokens(&self, direction: Direction) -> impl Iterator<Item = SyntaxElementRef<'_, L, D, R>> {
|
||||||
let me: SyntaxElementRef<'_, L, D> = self.into();
|
let me: SyntaxElementRef<'_, L, D, R> = self.into();
|
||||||
iter::successors(Some(me), move |el| match direction {
|
iter::successors(Some(me), move |el| match direction {
|
||||||
Direction::Next => el.next_sibling_or_token(),
|
Direction::Next => el.next_sibling_or_token(),
|
||||||
Direction::Prev => el.prev_sibling_or_token(),
|
Direction::Prev => el.prev_sibling_or_token(),
|
||||||
|
@ -942,7 +984,7 @@ impl<L: Language, D> SyntaxToken<L, D> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Next token in the tree (i.e, not necessary a sibling)
|
/// Next token in the tree (i.e, not necessary a sibling)
|
||||||
pub fn next_token(&self) -> Option<&SyntaxToken<L, D>> {
|
pub fn next_token(&self) -> Option<&SyntaxToken<L, D, R>> {
|
||||||
match self.next_sibling_or_token() {
|
match self.next_sibling_or_token() {
|
||||||
Some(element) => element.first_token(),
|
Some(element) => element.first_token(),
|
||||||
None => self
|
None => self
|
||||||
|
@ -954,7 +996,7 @@ impl<L: Language, D> SyntaxToken<L, D> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Previous token in the tree (i.e, not necessary a sibling)
|
/// Previous token in the tree (i.e, not necessary a sibling)
|
||||||
pub fn prev_token(&self) -> Option<&SyntaxToken<L, D>> {
|
pub fn prev_token(&self) -> Option<&SyntaxToken<L, D, R>> {
|
||||||
match self.prev_sibling_or_token() {
|
match self.prev_sibling_or_token() {
|
||||||
Some(element) => element.last_token(),
|
Some(element) => element.last_token(),
|
||||||
None => self
|
None => self
|
||||||
|
@ -966,14 +1008,42 @@ impl<L: Language, D> SyntaxToken<L, D> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<L: Language, D> SyntaxElement<L, D> {
|
impl<L: Language, D, R> SyntaxToken<L, D, R>
|
||||||
|
where
|
||||||
|
R: Resolver,
|
||||||
|
{
|
||||||
|
#[inline]
|
||||||
|
pub fn text(&self) -> &str {
|
||||||
|
self.green().text(self.parent().resolver().as_ref())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<L: Language, D, R> fmt::Debug for SyntaxToken<L, D, R>
|
||||||
|
where
|
||||||
|
R: Resolver,
|
||||||
|
{
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
write!(f, "{}", Self::debug(self, self.parent().resolver().as_ref()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<L: Language, D, R> fmt::Display for SyntaxToken<L, D, R>
|
||||||
|
where
|
||||||
|
R: Resolver,
|
||||||
|
{
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
write!(f, "{}", Self::display(self, self.parent().resolver().as_ref()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<L: Language, D, R> SyntaxElement<L, D, R> {
|
||||||
fn new(
|
fn new(
|
||||||
element: GreenElementRef<'_>,
|
element: GreenElementRef<'_>,
|
||||||
parent: &SyntaxNode<L, D>,
|
parent: &SyntaxNode<L, D, R>,
|
||||||
index: u32,
|
index: u32,
|
||||||
offset: TextSize,
|
offset: TextSize,
|
||||||
ref_count: *mut AtomicU32,
|
ref_count: *mut AtomicU32,
|
||||||
) -> SyntaxElement<L, D> {
|
) -> SyntaxElement<L, D, R> {
|
||||||
match element {
|
match element {
|
||||||
NodeOrToken::Node(node) => SyntaxNode::new_child(node, parent, index as u32, offset, ref_count).into(),
|
NodeOrToken::Node(node) => SyntaxNode::new_child(node, parent, index as u32, offset, ref_count).into(),
|
||||||
NodeOrToken::Token(_) => SyntaxToken::new(parent, index as u32, offset).into(),
|
NodeOrToken::Token(_) => SyntaxToken::new(parent, index as u32, offset).into(),
|
||||||
|
@ -1005,7 +1075,7 @@ impl<L: Language, D> SyntaxElement<L, D> {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn parent(&self) -> Option<&SyntaxNode<L, D>> {
|
pub fn parent(&self) -> Option<&SyntaxNode<L, D, R>> {
|
||||||
match self {
|
match self {
|
||||||
NodeOrToken::Node(it) => it.parent(),
|
NodeOrToken::Node(it) => it.parent(),
|
||||||
NodeOrToken::Token(it) => Some(it.parent()),
|
NodeOrToken::Token(it) => Some(it.parent()),
|
||||||
|
@ -1013,7 +1083,7 @@ impl<L: Language, D> SyntaxElement<L, D> {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn ancestors(&self) -> impl Iterator<Item = &SyntaxNode<L, D>> {
|
pub fn ancestors(&self) -> impl Iterator<Item = &SyntaxNode<L, D, R>> {
|
||||||
match self {
|
match self {
|
||||||
NodeOrToken::Node(it) => it.ancestors(),
|
NodeOrToken::Node(it) => it.ancestors(),
|
||||||
NodeOrToken::Token(it) => it.parent().ancestors(),
|
NodeOrToken::Token(it) => it.parent().ancestors(),
|
||||||
|
@ -1021,7 +1091,7 @@ impl<L: Language, D> SyntaxElement<L, D> {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn first_token(&self) -> Option<&SyntaxToken<L, D>> {
|
pub fn first_token(&self) -> Option<&SyntaxToken<L, D, R>> {
|
||||||
match self {
|
match self {
|
||||||
NodeOrToken::Node(it) => it.first_token(),
|
NodeOrToken::Node(it) => it.first_token(),
|
||||||
NodeOrToken::Token(it) => Some(it),
|
NodeOrToken::Token(it) => Some(it),
|
||||||
|
@ -1029,7 +1099,7 @@ impl<L: Language, D> SyntaxElement<L, D> {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn last_token(&self) -> Option<&SyntaxToken<L, D>> {
|
pub fn last_token(&self) -> Option<&SyntaxToken<L, D, R>> {
|
||||||
match self {
|
match self {
|
||||||
NodeOrToken::Node(it) => it.last_token(),
|
NodeOrToken::Node(it) => it.last_token(),
|
||||||
NodeOrToken::Token(it) => Some(it),
|
NodeOrToken::Token(it) => Some(it),
|
||||||
|
@ -1037,7 +1107,7 @@ impl<L: Language, D> SyntaxElement<L, D> {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn next_sibling_or_token(&self) -> Option<SyntaxElementRef<'_, L, D>> {
|
pub fn next_sibling_or_token(&self) -> Option<SyntaxElementRef<'_, L, D, R>> {
|
||||||
match self {
|
match self {
|
||||||
NodeOrToken::Node(it) => it.next_sibling_or_token(),
|
NodeOrToken::Node(it) => it.next_sibling_or_token(),
|
||||||
NodeOrToken::Token(it) => it.next_sibling_or_token(),
|
NodeOrToken::Token(it) => it.next_sibling_or_token(),
|
||||||
|
@ -1045,7 +1115,7 @@ impl<L: Language, D> SyntaxElement<L, D> {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn prev_sibling_or_token(&self) -> Option<SyntaxElementRef<'_, L, D>> {
|
pub fn prev_sibling_or_token(&self) -> Option<SyntaxElementRef<'_, L, D, R>> {
|
||||||
match self {
|
match self {
|
||||||
NodeOrToken::Node(it) => it.prev_sibling_or_token(),
|
NodeOrToken::Node(it) => it.prev_sibling_or_token(),
|
||||||
NodeOrToken::Token(it) => it.prev_sibling_or_token(),
|
NodeOrToken::Token(it) => it.prev_sibling_or_token(),
|
||||||
|
@ -1053,7 +1123,7 @@ impl<L: Language, D> SyntaxElement<L, D> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, L: Language, D> SyntaxElementRef<'a, L, D> {
|
impl<'a, L: Language, D, R> SyntaxElementRef<'a, L, D, R> {
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn text_range(&self) -> TextRange {
|
pub fn text_range(&self) -> TextRange {
|
||||||
match self {
|
match self {
|
||||||
|
@ -1079,7 +1149,7 @@ impl<'a, L: Language, D> SyntaxElementRef<'a, L, D> {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn parent(&self) -> Option<&'a SyntaxNode<L, D>> {
|
pub fn parent(&self) -> Option<&'a SyntaxNode<L, D, R>> {
|
||||||
match self {
|
match self {
|
||||||
NodeOrToken::Node(it) => it.parent(),
|
NodeOrToken::Node(it) => it.parent(),
|
||||||
NodeOrToken::Token(it) => Some(it.parent()),
|
NodeOrToken::Token(it) => Some(it.parent()),
|
||||||
|
@ -1087,7 +1157,7 @@ impl<'a, L: Language, D> SyntaxElementRef<'a, L, D> {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn ancestors(&self) -> impl Iterator<Item = &'a SyntaxNode<L, D>> {
|
pub fn ancestors(&self) -> impl Iterator<Item = &'a SyntaxNode<L, D, R>> {
|
||||||
match self {
|
match self {
|
||||||
NodeOrToken::Node(it) => it.ancestors(),
|
NodeOrToken::Node(it) => it.ancestors(),
|
||||||
NodeOrToken::Token(it) => it.parent().ancestors(),
|
NodeOrToken::Token(it) => it.parent().ancestors(),
|
||||||
|
@ -1095,7 +1165,7 @@ impl<'a, L: Language, D> SyntaxElementRef<'a, L, D> {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn first_token(&self) -> Option<&'a SyntaxToken<L, D>> {
|
pub fn first_token(&self) -> Option<&'a SyntaxToken<L, D, R>> {
|
||||||
match self {
|
match self {
|
||||||
NodeOrToken::Node(it) => it.first_token(),
|
NodeOrToken::Node(it) => it.first_token(),
|
||||||
NodeOrToken::Token(it) => Some(it),
|
NodeOrToken::Token(it) => Some(it),
|
||||||
|
@ -1103,7 +1173,7 @@ impl<'a, L: Language, D> SyntaxElementRef<'a, L, D> {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn last_token(&self) -> Option<&'a SyntaxToken<L, D>> {
|
pub fn last_token(&self) -> Option<&'a SyntaxToken<L, D, R>> {
|
||||||
match self {
|
match self {
|
||||||
NodeOrToken::Node(it) => it.last_token(),
|
NodeOrToken::Node(it) => it.last_token(),
|
||||||
NodeOrToken::Token(it) => Some(it),
|
NodeOrToken::Token(it) => Some(it),
|
||||||
|
@ -1111,7 +1181,7 @@ impl<'a, L: Language, D> SyntaxElementRef<'a, L, D> {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn next_sibling_or_token(&self) -> Option<SyntaxElementRef<'a, L, D>> {
|
pub fn next_sibling_or_token(&self) -> Option<SyntaxElementRef<'a, L, D, R>> {
|
||||||
match self {
|
match self {
|
||||||
NodeOrToken::Node(it) => it.next_sibling_or_token(),
|
NodeOrToken::Node(it) => it.next_sibling_or_token(),
|
||||||
NodeOrToken::Token(it) => it.next_sibling_or_token(),
|
NodeOrToken::Token(it) => it.next_sibling_or_token(),
|
||||||
|
@ -1119,7 +1189,7 @@ impl<'a, L: Language, D> SyntaxElementRef<'a, L, D> {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn prev_sibling_or_token(&self) -> Option<SyntaxElementRef<'a, L, D>> {
|
pub fn prev_sibling_or_token(&self) -> Option<SyntaxElementRef<'a, L, D, R>> {
|
||||||
match self {
|
match self {
|
||||||
NodeOrToken::Node(it) => it.prev_sibling_or_token(),
|
NodeOrToken::Node(it) => it.prev_sibling_or_token(),
|
||||||
NodeOrToken::Token(it) => it.prev_sibling_or_token(),
|
NodeOrToken::Token(it) => it.prev_sibling_or_token(),
|
||||||
|
@ -1127,7 +1197,7 @@ impl<'a, L: Language, D> SyntaxElementRef<'a, L, D> {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
fn token_at_offset(&self, offset: TextSize) -> TokenAtOffset<SyntaxToken<L, D>> {
|
fn token_at_offset(&self, offset: TextSize) -> TokenAtOffset<SyntaxToken<L, D, R>> {
|
||||||
assert!(self.text_range().start() <= offset && offset <= self.text_range().end());
|
assert!(self.text_range().start() <= offset && offset <= self.text_range().end());
|
||||||
match self {
|
match self {
|
||||||
NodeOrToken::Token(token) => TokenAtOffset::Single((*token).clone()),
|
NodeOrToken::Token(token) => TokenAtOffset::Single((*token).clone()),
|
||||||
|
@ -1144,7 +1214,7 @@ struct Iter<'n> {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'n> Iter<'n> {
|
impl<'n> Iter<'n> {
|
||||||
fn new<L: Language, D>(parent: &'n SyntaxNode<L, D>) -> Self {
|
fn new<L: Language, D, R>(parent: &'n SyntaxNode<L, D, R>) -> Self {
|
||||||
let offset = parent.text_range().start();
|
let offset = parent.text_range().start();
|
||||||
let green: Children<'_> = parent.green().children();
|
let green: Children<'_> = parent.green().children();
|
||||||
Iter {
|
Iter {
|
||||||
|
@ -1167,14 +1237,14 @@ impl<'n> Iter<'n> {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct SyntaxNodeChildren<'n, L: Language, D: 'static = ()> {
|
pub struct SyntaxNodeChildren<'n, L: Language, D: 'static = (), R: 'static = ()> {
|
||||||
inner: Iter<'n>,
|
inner: Iter<'n>,
|
||||||
parent: &'n SyntaxNode<L, D>,
|
parent: &'n SyntaxNode<L, D, R>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'n, L: Language, D> SyntaxNodeChildren<'n, L, D> {
|
impl<'n, L: Language, D, R> SyntaxNodeChildren<'n, L, D, R> {
|
||||||
#[inline]
|
#[inline]
|
||||||
fn new(parent: &'n SyntaxNode<L, D>) -> Self {
|
fn new(parent: &'n SyntaxNode<L, D, R>) -> Self {
|
||||||
Self {
|
Self {
|
||||||
inner: Iter::new(parent),
|
inner: Iter::new(parent),
|
||||||
parent,
|
parent,
|
||||||
|
@ -1182,8 +1252,8 @@ impl<'n, L: Language, D> SyntaxNodeChildren<'n, L, D> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'n, L: Language, D> Iterator for SyntaxNodeChildren<'n, L, D> {
|
impl<'n, L: Language, D, R> Iterator for SyntaxNodeChildren<'n, L, D, R> {
|
||||||
type Item = &'n SyntaxNode<L, D>;
|
type Item = &'n SyntaxNode<L, D, R>;
|
||||||
|
|
||||||
#[inline(always)]
|
#[inline(always)]
|
||||||
fn next(&mut self) -> Option<Self::Item> {
|
fn next(&mut self) -> Option<Self::Item> {
|
||||||
|
@ -1197,14 +1267,14 @@ impl<'n, L: Language, D> Iterator for SyntaxNodeChildren<'n, L, D> {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct SyntaxElementChildren<'n, L: Language, D: 'static = ()> {
|
pub struct SyntaxElementChildren<'n, L: Language, D: 'static = (), R: 'static = ()> {
|
||||||
inner: Iter<'n>,
|
inner: Iter<'n>,
|
||||||
parent: &'n SyntaxNode<L, D>,
|
parent: &'n SyntaxNode<L, D, R>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'n, L: Language, D> SyntaxElementChildren<'n, L, D> {
|
impl<'n, L: Language, D, R> SyntaxElementChildren<'n, L, D, R> {
|
||||||
#[inline]
|
#[inline]
|
||||||
fn new(parent: &'n SyntaxNode<L, D>) -> Self {
|
fn new(parent: &'n SyntaxNode<L, D, R>) -> Self {
|
||||||
Self {
|
Self {
|
||||||
inner: Iter::new(parent),
|
inner: Iter::new(parent),
|
||||||
parent,
|
parent,
|
||||||
|
@ -1212,8 +1282,8 @@ impl<'n, L: Language, D> SyntaxElementChildren<'n, L, D> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'n, L: Language, D> Iterator for SyntaxElementChildren<'n, L, D> {
|
impl<'n, L: Language, D, R> Iterator for SyntaxElementChildren<'n, L, D, R> {
|
||||||
type Item = SyntaxElementRef<'n, L, D>;
|
type Item = SyntaxElementRef<'n, L, D, R>;
|
||||||
|
|
||||||
#[inline(always)]
|
#[inline(always)]
|
||||||
fn next(&mut self) -> Option<Self::Item> {
|
fn next(&mut self) -> Option<Self::Item> {
|
||||||
|
|
|
@ -3,14 +3,14 @@ use std::fmt;
|
||||||
use crate::{interning::Resolver, Language, SyntaxNode, SyntaxToken, TextRange, TextSize};
|
use crate::{interning::Resolver, Language, SyntaxNode, SyntaxToken, TextRange, TextSize};
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct SyntaxText<'n, 'i, I: ?Sized, L: Language, D: 'static = ()> {
|
pub struct SyntaxText<'n, 'i, I: ?Sized, L: Language, D: 'static = (), R: 'static = ()> {
|
||||||
node: &'n SyntaxNode<L, D>,
|
node: &'n SyntaxNode<L, D, R>,
|
||||||
range: TextRange,
|
range: TextRange,
|
||||||
resolver: &'i I,
|
resolver: &'i I,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'n, 'i, I: Resolver + ?Sized, L: Language, D> SyntaxText<'n, 'i, I, L, D> {
|
impl<'n, 'i, I: Resolver + ?Sized, L: Language, D, R> SyntaxText<'n, 'i, I, L, D, R> {
|
||||||
pub(crate) fn new(node: &'n SyntaxNode<L, D>, resolver: &'i I) -> Self {
|
pub(crate) fn new(node: &'n SyntaxNode<L, D, R>, resolver: &'i I) -> Self {
|
||||||
let range = node.text_range();
|
let range = node.text_range();
|
||||||
SyntaxText { node, range, resolver }
|
SyntaxText { node, range, resolver }
|
||||||
}
|
}
|
||||||
|
@ -56,7 +56,7 @@ impl<'n, 'i, I: Resolver + ?Sized, L: Language, D> SyntaxText<'n, 'i, I, L, D> {
|
||||||
found(res)
|
found(res)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn slice<R: private::SyntaxTextRange>(&self, range: R) -> Self {
|
pub fn slice<Ra: private::SyntaxTextRange>(&self, range: Ra) -> Self {
|
||||||
let start = range.start().unwrap_or_default();
|
let start = range.start().unwrap_or_default();
|
||||||
let end = range.end().unwrap_or(self.len());
|
let end = range.end().unwrap_or(self.len());
|
||||||
assert!(start <= end);
|
assert!(start <= end);
|
||||||
|
@ -88,7 +88,7 @@ impl<'n, 'i, I: Resolver + ?Sized, L: Language, D> SyntaxText<'n, 'i, I, L, D> {
|
||||||
F: FnMut(T, &str) -> Result<T, E>,
|
F: FnMut(T, &str) -> Result<T, E>,
|
||||||
{
|
{
|
||||||
self.tokens_with_ranges().try_fold(init, move |acc, (token, range)| {
|
self.tokens_with_ranges().try_fold(init, move |acc, (token, range)| {
|
||||||
f(acc, &token.text(self.resolver)[range])
|
f(acc, &token.resolve_text(self.resolver)[range])
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -104,7 +104,7 @@ impl<'n, 'i, I: Resolver + ?Sized, L: Language, D> SyntaxText<'n, 'i, I, L, D> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn tokens_with_ranges(&self) -> impl Iterator<Item = (&SyntaxToken<L, D>, TextRange)> {
|
fn tokens_with_ranges(&self) -> impl Iterator<Item = (&SyntaxToken<L, D, R>, TextRange)> {
|
||||||
let text_range = self.range;
|
let text_range = self.range;
|
||||||
self.node
|
self.node
|
||||||
.descendants_with_tokens()
|
.descendants_with_tokens()
|
||||||
|
@ -124,25 +124,25 @@ fn found<T>(res: Result<(), T>) -> Option<T> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<I: Resolver + ?Sized, L: Language, D> fmt::Debug for SyntaxText<'_, '_, I, L, D> {
|
impl<I: Resolver + ?Sized, L: Language, D, R> fmt::Debug for SyntaxText<'_, '_, I, L, D, R> {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
fmt::Debug::fmt(&self.to_string(), f)
|
fmt::Debug::fmt(&self.to_string(), f)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<I: Resolver + ?Sized, L: Language, D> fmt::Display for SyntaxText<'_, '_, I, L, D> {
|
impl<I: Resolver + ?Sized, L: Language, D, R> fmt::Display for SyntaxText<'_, '_, I, L, D, R> {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
self.try_for_each_chunk(|chunk| fmt::Display::fmt(chunk, f))
|
self.try_for_each_chunk(|chunk| fmt::Display::fmt(chunk, f))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<I: Resolver + ?Sized, L: Language, D> From<SyntaxText<'_, '_, I, L, D>> for String {
|
impl<I: Resolver + ?Sized, L: Language, D, R> From<SyntaxText<'_, '_, I, L, D, R>> for String {
|
||||||
fn from(text: SyntaxText<'_, '_, I, L, D>) -> String {
|
fn from(text: SyntaxText<'_, '_, I, L, D, R>) -> String {
|
||||||
text.to_string()
|
text.to_string()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<I: Resolver + ?Sized, L: Language, D> PartialEq<str> for SyntaxText<'_, '_, I, L, D> {
|
impl<I: Resolver + ?Sized, L: Language, D, R> PartialEq<str> for SyntaxText<'_, '_, I, L, D, R> {
|
||||||
fn eq(&self, mut rhs: &str) -> bool {
|
fn eq(&self, mut rhs: &str) -> bool {
|
||||||
self.try_for_each_chunk(|chunk| {
|
self.try_for_each_chunk(|chunk| {
|
||||||
if !rhs.starts_with(chunk) {
|
if !rhs.starts_with(chunk) {
|
||||||
|
@ -156,33 +156,33 @@ impl<I: Resolver + ?Sized, L: Language, D> PartialEq<str> for SyntaxText<'_, '_,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<I: Resolver + ?Sized, L: Language, D> PartialEq<SyntaxText<'_, '_, I, L, D>> for str {
|
impl<I: Resolver + ?Sized, L: Language, D, R> PartialEq<SyntaxText<'_, '_, I, L, D, R>> for str {
|
||||||
fn eq(&self, rhs: &SyntaxText<'_, '_, I, L, D>) -> bool {
|
fn eq(&self, rhs: &SyntaxText<'_, '_, I, L, D, R>) -> bool {
|
||||||
rhs == self
|
rhs == self
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<I: Resolver + ?Sized, L: Language, D> PartialEq<&'_ str> for SyntaxText<'_, '_, I, L, D> {
|
impl<I: Resolver + ?Sized, L: Language, D, R> PartialEq<&'_ str> for SyntaxText<'_, '_, I, L, D, R> {
|
||||||
fn eq(&self, rhs: &&str) -> bool {
|
fn eq(&self, rhs: &&str) -> bool {
|
||||||
self == *rhs
|
self == *rhs
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<I: Resolver + ?Sized, L: Language, D> PartialEq<SyntaxText<'_, '_, I, L, D>> for &'_ str {
|
impl<I: Resolver + ?Sized, L: Language, D, R> PartialEq<SyntaxText<'_, '_, I, L, D, R>> for &'_ str {
|
||||||
fn eq(&self, rhs: &SyntaxText<'_, '_, I, L, D>) -> bool {
|
fn eq(&self, rhs: &SyntaxText<'_, '_, I, L, D, R>) -> bool {
|
||||||
rhs == self
|
rhs == self
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'n1, 'i1, 'n2, 'i2, I1, I2, D1, D2, L1, L2> PartialEq<SyntaxText<'n2, 'i2, I2, L2, D2>>
|
impl<'n1, 'i1, 'n2, 'i2, I1, I2, L1, L2, D1, D2, R1, R2> PartialEq<SyntaxText<'n2, 'i2, I2, L2, D2, R2>>
|
||||||
for SyntaxText<'n1, 'i1, I1, L1, D1>
|
for SyntaxText<'n1, 'i1, I1, L1, D1, R1>
|
||||||
where
|
where
|
||||||
L1: Language,
|
L1: Language,
|
||||||
L2: Language,
|
L2: Language,
|
||||||
I1: Resolver + ?Sized,
|
I1: Resolver + ?Sized,
|
||||||
I2: Resolver + ?Sized,
|
I2: Resolver + ?Sized,
|
||||||
{
|
{
|
||||||
fn eq(&self, other: &SyntaxText<'_, '_, I2, L2, D2>) -> bool {
|
fn eq(&self, other: &SyntaxText<'_, '_, I2, L2, D2, R2>) -> bool {
|
||||||
if self.range.len() != other.range.len() {
|
if self.range.len() != other.range.len() {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
@ -194,19 +194,21 @@ where
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn zip_texts<'it1, 'it2, It1, It2, I1, I2, L1, L2, D1, D2>(
|
fn zip_texts<'it1, 'it2, It1, It2, I1, I2, L1, L2, D1, D2, R1, R2>(
|
||||||
xs: &mut It1,
|
xs: &mut It1,
|
||||||
ys: &mut It2,
|
ys: &mut It2,
|
||||||
resolver_x: &I1,
|
resolver_x: &I1,
|
||||||
resolver_y: &I2,
|
resolver_y: &I2,
|
||||||
) -> Option<()>
|
) -> Option<()>
|
||||||
where
|
where
|
||||||
It1: Iterator<Item = (&'it1 SyntaxToken<L1, D1>, TextRange)>,
|
It1: Iterator<Item = (&'it1 SyntaxToken<L1, D1, R1>, TextRange)>,
|
||||||
It2: Iterator<Item = (&'it2 SyntaxToken<L2, D2>, TextRange)>,
|
It2: Iterator<Item = (&'it2 SyntaxToken<L2, D2, R2>, TextRange)>,
|
||||||
I1: Resolver + ?Sized,
|
I1: Resolver + ?Sized,
|
||||||
I2: Resolver + ?Sized,
|
I2: Resolver + ?Sized,
|
||||||
D1: 'static,
|
D1: 'static,
|
||||||
D2: 'static,
|
D2: 'static,
|
||||||
|
R1: 'static,
|
||||||
|
R2: 'static,
|
||||||
L1: Language + 'it1,
|
L1: Language + 'it1,
|
||||||
L2: Language + 'it2,
|
L2: Language + 'it2,
|
||||||
{
|
{
|
||||||
|
@ -219,8 +221,8 @@ where
|
||||||
while y.1.is_empty() {
|
while y.1.is_empty() {
|
||||||
y = ys.next()?;
|
y = ys.next()?;
|
||||||
}
|
}
|
||||||
let x_text = &x.0.text(resolver_x)[x.1];
|
let x_text = &x.0.resolve_text(resolver_x)[x.1];
|
||||||
let y_text = &y.0.text(resolver_y)[y.1];
|
let y_text = &y.0.resolve_text(resolver_y)[y.1];
|
||||||
if !(x_text.starts_with(y_text) || y_text.starts_with(x_text)) {
|
if !(x_text.starts_with(y_text) || y_text.starts_with(x_text)) {
|
||||||
return Some(());
|
return Some(());
|
||||||
}
|
}
|
||||||
|
@ -328,9 +330,9 @@ mod tests {
|
||||||
fn test_text_equality() {
|
fn test_text_equality() {
|
||||||
fn do_check(t1: &[&str], t2: &[&str]) {
|
fn do_check(t1: &[&str], t2: &[&str]) {
|
||||||
let (t1, resolver) = build_tree(t1);
|
let (t1, resolver) = build_tree(t1);
|
||||||
let t1 = t1.text(&resolver);
|
let t1 = t1.resolve_text(&resolver);
|
||||||
let (t2, resolver) = build_tree(t2);
|
let (t2, resolver) = build_tree(t2);
|
||||||
let t2 = t2.text(&resolver);
|
let t2 = t2.resolve_text(&resolver);
|
||||||
let expected = t1.to_string() == t2.to_string();
|
let expected = t1.to_string() == t2.to_string();
|
||||||
let actual = t1 == t2;
|
let actual = t1 == t2;
|
||||||
assert_eq!(expected, actual, "`{}` (SyntaxText) `{}` (SyntaxText)", t1, t2);
|
assert_eq!(expected, actual, "`{}` (SyntaxText) `{}` (SyntaxText)", t1, t2);
|
||||||
|
|
|
@ -1,8 +1,10 @@
|
||||||
mod common;
|
mod common;
|
||||||
|
|
||||||
use common::TestLang;
|
use common::TestLang;
|
||||||
use cstree::{GreenNodeBuilder, SyntaxKind, SyntaxNode, TextRange};
|
use cstree::{GreenNode, GreenNodeBuilder, NodeCache, SyntaxKind, TextRange};
|
||||||
use lasso::Resolver;
|
use lasso::{Interner, Resolver, Rodeo};
|
||||||
|
|
||||||
|
type SyntaxNode<D = (), R = ()> = cstree::SyntaxNode<TestLang, D, R>;
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
enum Element<'s> {
|
enum Element<'s> {
|
||||||
|
@ -19,14 +21,28 @@ fn two_level_tree() -> Element<'static> {
|
||||||
])
|
])
|
||||||
}
|
}
|
||||||
|
|
||||||
fn build_tree<D>(root: &Element<'_>) -> (SyntaxNode<TestLang, D>, impl Resolver) {
|
fn build_tree<D>(root: &Element<'_>) -> (SyntaxNode<D>, impl Resolver) {
|
||||||
let mut builder = GreenNodeBuilder::new();
|
let mut builder = GreenNodeBuilder::new();
|
||||||
build_recursive(root, &mut builder, 0);
|
build_recursive(root, &mut builder, 0);
|
||||||
let (node, interner) = builder.finish();
|
let (node, interner) = builder.finish();
|
||||||
(SyntaxNode::new_root(node), interner.unwrap())
|
(SyntaxNode::new_root(node), interner.unwrap())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn build_recursive(root: &Element<'_>, builder: &mut GreenNodeBuilder, mut from: u16) -> u16 {
|
fn build_tree_with_cache<'c, 'i, I>(root: &Element<'_>, cache: &'c mut NodeCache<'i, I>) -> GreenNode
|
||||||
|
where
|
||||||
|
I: Interner,
|
||||||
|
{
|
||||||
|
let mut builder = GreenNodeBuilder::with_cache(cache);
|
||||||
|
build_recursive(root, &mut builder, 0);
|
||||||
|
let (node, interner) = builder.finish();
|
||||||
|
assert!(interner.is_none());
|
||||||
|
node
|
||||||
|
}
|
||||||
|
|
||||||
|
fn build_recursive<'c, 'i, I>(root: &Element<'_>, builder: &mut GreenNodeBuilder<'c, 'i, I>, mut from: u16) -> u16
|
||||||
|
where
|
||||||
|
I: Interner,
|
||||||
|
{
|
||||||
match root {
|
match root {
|
||||||
Element::Node(children) => {
|
Element::Node(children) => {
|
||||||
builder.start_node(SyntaxKind(from));
|
builder.start_node(SyntaxKind(from));
|
||||||
|
@ -53,7 +69,7 @@ fn create() {
|
||||||
let leaf1_0 = leaf1_0.into_token().unwrap();
|
let leaf1_0 = leaf1_0.into_token().unwrap();
|
||||||
assert_eq!(leaf1_0.syntax_kind(), SyntaxKind(5));
|
assert_eq!(leaf1_0.syntax_kind(), SyntaxKind(5));
|
||||||
assert_eq!(leaf1_0.kind(), SyntaxKind(5));
|
assert_eq!(leaf1_0.kind(), SyntaxKind(5));
|
||||||
assert_eq!(leaf1_0.text(&resolver), "1.0");
|
assert_eq!(leaf1_0.resolve_text(&resolver), "1.0");
|
||||||
assert_eq!(leaf1_0.text_range(), TextRange::at(6.into(), 3.into()));
|
assert_eq!(leaf1_0.text_range(), TextRange::at(6.into(), 3.into()));
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
|
@ -61,7 +77,7 @@ fn create() {
|
||||||
assert_eq!(node2.syntax_kind(), SyntaxKind(6));
|
assert_eq!(node2.syntax_kind(), SyntaxKind(6));
|
||||||
assert_eq!(node2.kind(), SyntaxKind(6));
|
assert_eq!(node2.kind(), SyntaxKind(6));
|
||||||
assert_eq!(node2.children_with_tokens().count(), 3);
|
assert_eq!(node2.children_with_tokens().count(), 3);
|
||||||
assert_eq!(node2.text(&resolver), "2.02.12.2");
|
assert_eq!(node2.resolve_text(&resolver), "2.02.12.2");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -98,3 +114,56 @@ fn data() {
|
||||||
assert_eq!(node2.get_data(), None);
|
assert_eq!(node2.get_data(), None);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn with_interner() {
|
||||||
|
let mut interner = Rodeo::new();
|
||||||
|
let mut cache = NodeCache::with_interner(&mut interner);
|
||||||
|
let tree = two_level_tree();
|
||||||
|
let tree = build_tree_with_cache(&tree, &mut cache);
|
||||||
|
let tree: SyntaxNode = SyntaxNode::new_root(tree);
|
||||||
|
let resolver = interner;
|
||||||
|
{
|
||||||
|
let leaf1_0 = tree.children().nth(1).unwrap().children_with_tokens().nth(0).unwrap();
|
||||||
|
let leaf1_0 = leaf1_0.into_token().unwrap();
|
||||||
|
assert_eq!(leaf1_0.resolve_text(&resolver), "1.0");
|
||||||
|
assert_eq!(leaf1_0.text_range(), TextRange::at(6.into(), 3.into()));
|
||||||
|
}
|
||||||
|
{
|
||||||
|
let node2 = tree.children().nth(2).unwrap();
|
||||||
|
assert_eq!(node2.resolve_text(&resolver), "2.02.12.2");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn inline_resolver() {
|
||||||
|
let mut interner = Rodeo::new();
|
||||||
|
let mut cache = NodeCache::with_interner(&mut interner);
|
||||||
|
let tree = two_level_tree();
|
||||||
|
let tree = build_tree_with_cache(&tree, &mut cache);
|
||||||
|
let tree: SyntaxNode<(), Rodeo> = SyntaxNode::new_root_with_resolver(tree, interner);
|
||||||
|
{
|
||||||
|
let leaf1_0 = tree.children().nth(1).unwrap().children_with_tokens().nth(0).unwrap();
|
||||||
|
let leaf1_0 = leaf1_0.into_token().unwrap();
|
||||||
|
assert_eq!(leaf1_0.text(), "1.0");
|
||||||
|
assert_eq!(leaf1_0.text_range(), TextRange::at(6.into(), 3.into()));
|
||||||
|
assert_eq!(format!("{}", leaf1_0), leaf1_0.text());
|
||||||
|
assert_eq!(format!("{:?}", leaf1_0), "SyntaxKind(5)@6..9 \"1.0\"");
|
||||||
|
}
|
||||||
|
{
|
||||||
|
let node2 = tree.children().nth(2).unwrap();
|
||||||
|
assert_eq!(node2.text(), "2.02.12.2");
|
||||||
|
let resolver = node2.resolver();
|
||||||
|
assert_eq!(node2.resolve_text(resolver.as_ref()), node2.text());
|
||||||
|
assert_eq!(format!("{}", node2).as_str(), node2.text());
|
||||||
|
assert_eq!(format!("{:?}", node2), "SyntaxKind(6)@9..18");
|
||||||
|
assert_eq!(
|
||||||
|
format!("{:#?}", node2),
|
||||||
|
r#"SyntaxKind(6)@9..18
|
||||||
|
SyntaxKind(7)@9..12 "2.0"
|
||||||
|
SyntaxKind(8)@12..15 "2.1"
|
||||||
|
SyntaxKind(9)@15..18 "2.2"
|
||||||
|
"#
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue