1
Fork 0
mirror of https://github.com/RGBCube/cstree synced 2025-07-27 00:57:44 +00:00

Add derive macro for Syntax (used to be Language) (#51)

This commit is contained in:
DQ 2023-04-18 20:10:35 +02:00 committed by GitHub
parent 2aa543036f
commit c5279bae7d
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
70 changed files with 1459 additions and 899 deletions

View file

@ -3,10 +3,13 @@
## `v0.12.0`
* Documentation has been improved in most areas, together with a switch to a more principled module structure that allows explicitly documenting submodules.
* The `Language` trait has been deprecated in favour of a new `Syntax` trait. `Syntax` provides the same methods that `Language` did before, but is implemented directly on the syntax kind enum instead of an additional type representing the language.
* The supertrait requirements on `PartialOrd`, `Ord`, and `Hash` have been dropped.
* TODO: this allows to optionally provide derive. To enable, add feature flag
* The `interning` module has been rewritten. It now provides fuctions for obtaining a default interner (`new_interner` and `new_threaded_interner`) and provides a small, dependency-free interner implementation.
* Compatibility with other interners can be enable via feature flags.
* **Note** that compatibilty with `lasso` is not enabled by default. Use the `lasso_compat` feature to match the previous default.
* Introduced `Language::static_text` to optimize tokens that always appear with the same text (estimated 10-15% faster tree building when used, depending on the ratio of static to dynamic tokens).
* Introduced `Syntax::static_text` to optimize tokens that always appear with the same text (estimated 10-15% faster tree building when used, depending on the ratio of static to dynamic tokens).
* Since `cstree`s are lossless, `GreenNodeBuilder::token` must still be passed the source text even for static tokens.
* Internal performance improvements for up to 10% faster tree building by avoiding unnecessary duplication of elements.
* Use `NonNull` for the internal representation of `SyntaxNode`, meaning it now benefits from niche optimizations (`Option<SyntaxNode>` is now the same size as `SyntaxNode` itself: the size of a pointer).
@ -14,7 +17,7 @@
* `RawSyntaxKind` has been changed to use a 32-bit index internally, which means existing `Language` implementations and syntax kind `enum`s need to be adjusted to `#[repr(u32)]` and the corresponding conversions.
* The crate's export module structure has been reorganized to give different groups of definitions their own submodules. A `cstree::prelude` module is available, containing the most commonly needed types that were previously accessible via `use cstree::*`. Otherwise, the module structure is now as follows:
* `cstree`
* `Language`
* `Syntax`
* `RawSyntaxKind`
* `build`
* `GreenNodeBuilder`

View file

@ -1,6 +1,12 @@
[package]
[workspace]
members = [
"cstree",
"cstree-derive",
"test_suite",
]
[workspace.package]
edition = "2021"
name = "cstree"
version = "0.12.0-rc.0" # when updating, also update `#![doc(html_root_url)]`
authors = [
"Domenic Quirl <DomenicQuirl@pm.me>",
@ -10,70 +16,7 @@ description = "Library for generic lossless syntax trees"
license = "MIT OR Apache-2.0"
repository = "https://github.com/domenicquirl/cstree"
readme = "README.md"
rust-version = "1.68"
[profile.release]
debug = true
[dependencies]
text-size = "1.1.0"
fxhash = "0.2.1"
parking_lot = "0.12.1"
# Arc
triomphe = "0.1.7"
sptr = "0.3.2"
# Default Interner
indexmap = "1.9"
[dependencies.lasso]
version = "0.6"
features = ["inline-more"]
optional = true
[dependencies.salsa]
git = "https://github.com/salsa-rs/salsa/"
version = "0.1"
optional = true
package = "salsa-2022"
[dependencies.serde]
version = "1.0"
optional = true
default-features = false
features = ["derive", "std"]
[dev-dependencies]
m_lexer = "0.0.4"
serde_json = "1.0"
serde_test = "1.0"
crossbeam-utils = "0.8"
criterion = "0.3"
[[bench]]
name = "main"
harness = false
[features]
default = []
# Implementations of `serde::{De,}Serialize` for CSTrees.
serialize = ["serde", "lasso?/serialize"]
# Interoperability with the `lasso` interning crate.
# When enabled, `cstree`'s default interners will use `lasso` internally, too.
lasso_compat = ["lasso"]
# Additionally provide threadsafe interner types.
# Where applicable (and if the corresponding features are selected), provide compatibility
# implementations for multi-thread interners from other crates.
multi_threaded_interning = ["lasso_compat", "lasso/multi-threaded"]
# Interoperability with the `salsa` framework for incremental computation.
# Use this feature for "Salsa 2022".
# WARNING: This feature is considered unstable!
salsa_2022_compat = ["salsa"]
[[example]]
name = "salsa"
required-features = ["salsa_2022_compat"]
[package.metadata.docs.rs]
all-features = true
rustdoc-args = ["--cfg", "doc_cfg"]

View file

@ -38,7 +38,7 @@ concrete syntax trees as its output. We'll talk more about parsing below -- firs
to happen to go from input text to a `cstree` syntax tree:
1. Define an enumeration of the types of tokens (like keywords) and nodes (like "an expression")
that you want to have in your syntax and implement `Language`
that you want to have in your syntax and implement `Syntax`
2. Create a `GreenNodeBuilder` and call `start_node`, `token` and `finish_node` from your parser
@ -52,12 +52,12 @@ compound expression. They will, however, be allowed to write nested expressions
### Defining the language
First, we need to list the different part of our language's grammar.
We can do that using an `enum` with a unit variant for any terminal and non-terminal.
The `enum` needs to be convertible to a `u16`, so we use the `repr` attribute to ensure it uses the correct
The `enum` needs to be convertible to a `u32`, so we use the `repr` attribute to ensure it uses the correct
representation.
```rust
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[repr(u16)]
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[repr(u32)]
enum SyntaxKind {
/* Tokens */
Int, // 42
@ -71,6 +71,15 @@ enum SyntaxKind {
}
```
For convenience when we're working with generic `cstree` types like `SyntaxNode`, we'll also give a
name to our syntax as a whole and add a type alias for it.
That way, we can match against `SyntaxKind`s using the original name, but use the more informative
`Node<Calculator>` to instantiate `cstree`'s types.
```rust
type Calculator = SyntaxKind;
```
Most of these are tokens to lex the input string into, like numbers (`Int`) and operators (`Plus`, `Minus`).
We only really need one type of node; expressions.
Our syntax tree's root node will have the special kind `Root`, all other nodes will be
@ -78,22 +87,16 @@ expressions containing a sequence of arithmetic operations potentially involving
expression nodes.
To use our `SyntaxKind`s with `cstree`, we need to tell it how to convert it back to just a number (the
`#[repr(u16)]` that we added) by implementing the `Language` trait. We can also tell `cstree` about tokens that
`#[repr(u32)]` that we added) by implementing the `Syntax` trait. We can also tell `cstree` about tokens that
always have the same text through the `static_text` method on the trait. This is useful for the operators and
parentheses, but not possible for numbers, since an integer token may be produced from the input `3`, but also from
other numbers like `7` or `12`. We implement `Language` on an empty type, just so we can give it a name.
other numbers like `7` or `12`. We implement `Syntax` on an empty type, just so we can give it a name.
```rust
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct Calculator;
impl Language for Calculator {
// The tokens and nodes we just defined
type Kind = SyntaxKind;
fn kind_from_raw(raw: RawSyntaxKind) -> Self::Kind {
// This just needs to be the inverse of `kind_to_raw`, but could also
// be an `impl TryFrom<u16> for SyntaxKind` or any other conversion.
impl Syntax for Calculator {
fn from_raw(raw: RawSyntaxKind) -> Self {
// This just needs to be the inverse of `into_raw`, but could also
// be an `impl TryFrom<u32> for SyntaxKind` or any other conversion.
match raw.0 {
0 => SyntaxKind::Int,
1 => SyntaxKind::Plus,
@ -106,12 +109,12 @@ impl Language for Calculator {
}
}
fn kind_to_raw(kind: Self::Kind) -> RawSyntaxKind {
RawSyntaxKind(kind as u16)
fn ino_raw(self) -> RawSyntaxKind {
RawSyntaxKind(self as u32)
}
fn static_text(kind: Self::Kind) -> Option<&'static str> {
match kind {
fn static_text(self) -> Option<&'static str> {
match self {
SyntaxKind::Plus => Some("+"),
SyntaxKind::Minus => Some("-"),
SyntaxKind::LParen => Some("("),
@ -122,7 +125,14 @@ impl Language for Calculator {
}
```
#### Deriving `Syntax`
To save yourself the hassle of defining this conversion (and, perhaps more importantly, continually updating it
while your language's syntax is in flux), `cstree` includes a derive macro for `Syntax` when built with the `derive`
feature. With the macro, the `Syntax` trait implementation above can be replaced by simply adding
`#[derive(Syntax)]` to `SyntaxKind`.
### Parsing into a green tree
With that out of the way, we can start writing the parser for our expressions.
For the purposes of this introduction to `cstree`, I'll assume that there is a lexer that yields the following
tokens:

18
cstree-derive/Cargo.toml Normal file
View file

@ -0,0 +1,18 @@
[package]
name = "cstree_derive"
edition.workspace = true
version.workspace = true
authors.workspace = true
license.workspace = true
repository.workspace = true
readme.workspace = true
rust-version.workspace = true
[lib]
name = "cstree_derive"
proc-macro = true
[dependencies]
proc-macro2 = "1.0.56"
quote = "1.0.26"
syn = { version = "2.0.14" }

View file

@ -0,0 +1 @@
../LICENSE-APACHE

1
cstree-derive/LICENSE-MIT Symbolic link
View file

@ -0,0 +1 @@
../LICENSE-MIT

1
cstree-derive/README.md Symbolic link
View file

@ -0,0 +1 @@
../README.md

View file

@ -0,0 +1,56 @@
use std::{cell::RefCell, fmt, thread};
use quote::ToTokens;
/// Context to collect multiple errors and output them all after parsing in order to not abort
/// immediately on the first error.
///
/// Ensures that the errors are handled using [`check`](ErrorContext::check) by otherwise panicking
/// on `Drop`.
#[derive(Debug, Default)]
pub(crate) struct ErrorContext {
errors: RefCell<Option<Vec<syn::Error>>>,
}
impl ErrorContext {
/// Create a new context.
///
/// This context contains no errors, but will still trigger a panic if it is not `check`ed.
pub fn new() -> Self {
ErrorContext {
errors: RefCell::new(Some(Vec::new())),
}
}
/// Add an error to the context that points to `source`.
pub fn error_at<S: ToTokens, T: fmt::Display>(&self, source: S, msg: T) {
self.errors
.borrow_mut()
.as_mut()
.unwrap()
// Transform `ToTokens` here so we don't monomorphize `new_spanned` so much.
.push(syn::Error::new_spanned(source.into_token_stream(), msg));
}
/// Add a `syn` parse error directly.
pub fn syn_error(&self, err: syn::Error) {
self.errors.borrow_mut().as_mut().unwrap().push(err);
}
/// Consume the context, producing a formatted error string if there are errors.
pub fn check(self) -> Result<(), Vec<syn::Error>> {
let errors = self.errors.borrow_mut().take().unwrap();
match errors.len() {
0 => Ok(()),
_ => Err(errors),
}
}
}
impl Drop for ErrorContext {
fn drop(&mut self) {
if !thread::panicking() && self.errors.borrow().is_some() {
panic!("forgot to check for errors");
}
}
}

74
cstree-derive/src/lib.rs Normal file
View file

@ -0,0 +1,74 @@
use errors::ErrorContext;
use parsing::SyntaxKindEnum;
use proc_macro2::TokenStream;
use quote::{quote, quote_spanned};
use syn::{parse_macro_input, spanned::Spanned, DeriveInput};
mod errors;
mod parsing;
mod symbols;
use symbols::*;
#[proc_macro_derive(Syntax, attributes(static_text))]
pub fn language(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
let ast = parse_macro_input!(input as DeriveInput);
expand_syntax(ast).unwrap_or_else(to_compile_errors).into()
}
fn expand_syntax(ast: DeriveInput) -> Result<TokenStream, Vec<syn::Error>> {
let error_handler = ErrorContext::new();
let Ok(syntax_kind_enum) = SyntaxKindEnum::parse_from_ast(&error_handler, &ast) else {
return Err(error_handler.check().unwrap_err());
};
// Check that the `enum` is `#[repr(u32)]`
match &syntax_kind_enum.repr {
Some(repr) if repr == U32 => (),
Some(_) | None => error_handler.error_at(
syntax_kind_enum.source,
"syntax kind definitions must be `#[repr(u32)]` to derive `Syntax`",
),
}
error_handler.check()?;
let name = &syntax_kind_enum.name;
let variant_count = syntax_kind_enum.variants.len() as u32;
let static_texts = syntax_kind_enum.variants.iter().map(|variant| {
let variant_name = &variant.name;
let static_text = match variant.static_text.as_deref() {
Some(text) => quote!(::core::option::Option::Some(#text)),
None => quote!(::core::option::Option::None),
};
quote_spanned!(variant.source.span()=>
#name :: #variant_name => #static_text,
)
});
let trait_impl = quote_spanned! { syntax_kind_enum.source.span()=>
#[automatically_derived]
impl ::cstree::Syntax for #name {
fn from_raw(raw: ::cstree::RawSyntaxKind) -> Self {
assert!(raw.0 < #variant_count, "Invalid raw syntax kind: {}", raw.0);
// Safety: discriminant is valid by the assert above
unsafe { ::std::mem::transmute::<u32, #name>(raw.0) }
}
fn into_raw(self) -> ::cstree::RawSyntaxKind {
::cstree::RawSyntaxKind(self as u32)
}
fn static_text(self) -> ::core::option::Option<&'static str> {
match self {
#( #static_texts )*
}
}
}
};
Ok(trait_impl)
}
fn to_compile_errors(errors: Vec<syn::Error>) -> proc_macro2::TokenStream {
let compile_errors = errors.iter().map(syn::Error::to_compile_error);
quote!(#(#compile_errors)*)
}

View file

@ -0,0 +1,131 @@
mod attributes;
use syn::{punctuated::Punctuated, Token};
use crate::{errors::ErrorContext, symbols::*};
use self::attributes::Attr;
/// Convenience for recording errors inside `ErrorContext` instead of the `Err` variant of the `Result`.
pub(crate) type Result<T, E = ()> = std::result::Result<T, E>;
pub(crate) struct SyntaxKindEnum<'i> {
pub(crate) name: syn::Ident,
pub(crate) repr: Option<syn::Ident>,
pub(crate) variants: Vec<SyntaxKindVariant<'i>>,
pub(crate) source: &'i syn::DeriveInput,
}
impl<'i> SyntaxKindEnum<'i> {
pub(crate) fn parse_from_ast(error_handler: &ErrorContext, item: &'i syn::DeriveInput) -> Result<Self> {
let syn::Data::Enum(data) = &item.data else {
error_handler.error_at(item, "`Syntax` can only be derived on enums");
return Err(());
};
let name = item.ident.clone();
let mut repr = Attr::none(error_handler, REPR);
for repr_attr in item.attrs.iter().filter(|&attr| attr.path().is_ident(&REPR)) {
if let syn::Meta::List(nested) = &repr_attr.meta {
if let Ok(nested) = nested.parse_args_with(Punctuated::<syn::Meta, Token![,]>::parse_terminated) {
for meta in nested {
if let syn::Meta::Path(path) = meta {
if let Some(ident) = path.get_ident() {
repr.set(repr_attr, ident.clone());
}
}
}
}
}
}
let variants = data
.variants
.iter()
.map(|variant| SyntaxKindVariant::parse_from_ast(error_handler, variant))
.collect();
Ok(Self {
name,
repr: repr.get(),
variants,
source: item,
})
}
}
pub(crate) struct SyntaxKindVariant<'i> {
pub(crate) name: syn::Ident,
pub(crate) static_text: Option<String>,
pub(crate) source: &'i syn::Variant,
}
impl<'i> SyntaxKindVariant<'i> {
pub(crate) fn parse_from_ast(error_handler: &ErrorContext, variant: &'i syn::Variant) -> Self {
let name = variant.ident.clone();
// Check that `variant` is a unit variant
match &variant.fields {
syn::Fields::Unit => (),
syn::Fields::Named(_) | syn::Fields::Unnamed(_) => {
error_handler.error_at(variant, "syntax kinds with fields are not supported");
}
}
// Check that discriminants are unaltered
if variant.discriminant.is_some() {
error_handler.error_at(
variant,
"syntax kinds are not allowed to have custom discriminant values",
);
}
let mut static_text = Attr::none(error_handler, STATIC_TEXT);
for text in variant
.attrs
.iter()
.flat_map(|attr| get_static_text(error_handler, attr))
{
static_text.set(&text, text.value());
}
Self {
name,
static_text: static_text.get(),
source: variant,
}
}
}
fn get_static_text(error_handler: &ErrorContext, attr: &syn::Attribute) -> Option<syn::LitStr> {
use syn::Meta::*;
if attr.path() != STATIC_TEXT {
return None;
}
match &attr.meta {
List(list) => match list.parse_args() {
Ok(lit) => Some(lit),
Err(e) => {
error_handler.error_at(
list,
"argument to `static_text` must be a string literal: `#[static_text(\"...\")]`",
);
error_handler.syn_error(e);
None
}
},
Path(_) => {
error_handler.error_at(attr, "missing text for `static_text`: try `#[static_text(\"...\")]`");
None
}
NameValue(_) => {
error_handler.error_at(
attr,
"`static_text` takes the text as a function argument: `#[static_text(\"...\")]`",
);
None
}
}
}

View file

@ -0,0 +1,59 @@
#![allow(unused)]
use super::*;
use proc_macro2::TokenStream;
use quote::ToTokens;
#[derive(Debug)]
pub(crate) struct Attr<'i, T> {
error_handler: &'i ErrorContext,
name: Symbol,
tokens: TokenStream,
value: Option<T>,
}
impl<'i, T> Attr<'i, T> {
pub(super) fn none(error_handler: &'i ErrorContext, name: Symbol) -> Self {
Attr {
error_handler,
name,
tokens: TokenStream::new(),
value: None,
}
}
pub(super) fn set<S: ToTokens>(&mut self, source: S, value: T) {
let tokens = source.into_token_stream();
if self.value.is_some() {
self.error_handler
.error_at(tokens, format!("duplicate attribute: `{}`", self.name));
} else {
self.tokens = tokens;
self.value = Some(value);
}
}
pub(super) fn set_opt<S: ToTokens>(&mut self, source: S, value: Option<T>) {
if let Some(value) = value {
self.set(source, value);
}
}
pub(super) fn set_if_none(&mut self, value: T) {
if self.value.is_none() {
self.value = Some(value);
}
}
pub(super) fn get(self) -> Option<T> {
self.value
}
pub(super) fn get_with_tokens(self) -> Option<(TokenStream, T)> {
match self.value {
Some(v) => Some((self.tokens, v)),
None => None,
}
}
}

View file

@ -0,0 +1,50 @@
use std::fmt::{self};
use syn::{Ident, Path};
#[derive(Copy, Clone)]
pub struct Symbol(&'static str);
pub const STATIC_TEXT: Symbol = Symbol("static_text");
pub const REPR: Symbol = Symbol("repr");
pub const U32: Symbol = Symbol("u32");
impl Symbol {
pub const fn new(text: &'static str) -> Self {
Self(text)
}
}
impl PartialEq<Symbol> for Ident {
fn eq(&self, word: &Symbol) -> bool {
self == word.0
}
}
impl<'a> PartialEq<Symbol> for &'a Ident {
fn eq(&self, word: &Symbol) -> bool {
*self == word.0
}
}
impl PartialEq<Symbol> for Path {
fn eq(&self, word: &Symbol) -> bool {
self.is_ident(word.0)
}
}
impl<'a> PartialEq<Symbol> for &'a Path {
fn eq(&self, word: &Symbol) -> bool {
self.is_ident(word.0)
}
}
impl fmt::Display for Symbol {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str(self.0)
}
}
impl fmt::Debug for Symbol {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.debug_tuple("Symbol").field(&self.0).finish()
}
}

79
cstree/Cargo.toml Normal file
View file

@ -0,0 +1,79 @@
[package]
name = "cstree"
edition.workspace = true
version.workspace = true
authors.workspace = true
license.workspace = true
repository.workspace = true
readme.workspace = true
rust-version.workspace = true
[dependencies]
text-size = "1.1.0"
fxhash = "0.2.1"
parking_lot = "0.12.1"
# Arc
triomphe = "0.1.7"
sptr = "0.3.2"
# Default Interner
indexmap = "1.9"
[dependencies.cstree_derive]
path = "../cstree-derive"
optional = true
[dependencies.lasso]
version = "0.6"
features = ["inline-more"]
optional = true
[dependencies.salsa]
git = "https://github.com/salsa-rs/salsa/"
version = "0.1"
optional = true
package = "salsa-2022"
[dependencies.serde]
version = "1.0"
optional = true
default-features = false
features = ["derive", "std"]
[dev-dependencies]
m_lexer = "0.0.4"
serde_json = "1.0"
serde_test = "1.0"
crossbeam-utils = "0.8"
criterion = "0.3"
[[bench]]
name = "main"
harness = false
[features]
default = []
# Derive macro for `Syntax`
derive = ["dep:cstree_derive"]
# Implementations of `serde::{De,}Serialize` for CSTrees.
serialize = ["serde", "lasso?/serialize"]
# Interoperability with the `lasso` interning crate.
# When enabled, `cstree`'s default interners will use `lasso` internally, too.
lasso_compat = ["lasso"]
# Additionally provide threadsafe interner types.
# Where applicable (and if the corresponding features are selected), provide compatibility
# implementations for multi-thread interners from other crates.
multi_threaded_interning = ["lasso_compat", "lasso/multi-threaded"]
# Interoperability with the `salsa` framework for incremental computation.
# Use this feature for "Salsa 2022".
# WARNING: This feature is considered unstable!
salsa_2022_compat = ["salsa"]
[[example]]
name = "salsa"
required-features = ["salsa_2022_compat"]
[package.metadata.docs.rs]
all-features = true
rustdoc-args = ["--cfg", "doc_cfg"]

1
cstree/LICENSE-APACHE Symbolic link
View file

@ -0,0 +1 @@
../LICENSE-APACHE

1
cstree/LICENSE-MIT Symbolic link
View file

@ -0,0 +1 @@
../LICENSE-MIT

1
cstree/README.md Symbolic link
View file

@ -0,0 +1 @@
../README.md

View file

@ -3,9 +3,8 @@ use cstree::{
build::*,
green::GreenNode,
interning::{new_interner, Interner},
Language, RawSyntaxKind,
RawSyntaxKind, Syntax,
};
use std::{fmt, hash::Hash};
#[derive(Debug)]
pub enum Element<'s> {
@ -14,37 +13,14 @@ pub enum Element<'s> {
Plus,
}
#[derive(Debug, Clone, Copy)]
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum TestKind {
Element { n: u32 },
Plus,
}
pub trait Bool: Hash + Ord + fmt::Debug + Copy {
const VALUE: bool;
}
#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]
pub struct TestLang<T: Bool> {
_marker: std::marker::PhantomData<T>,
}
#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
pub struct NoStaticText;
impl Bool for NoStaticText {
const VALUE: bool = false;
}
#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
pub struct UseStaticText;
impl Bool for UseStaticText {
const VALUE: bool = true;
}
impl<T: Bool> Language for TestLang<T> {
type Kind = TestKind;
fn kind_from_raw(raw: RawSyntaxKind) -> Self::Kind {
impl Syntax for TestKind {
fn from_raw(raw: RawSyntaxKind) -> Self {
if raw.0 == u32::MAX - 1 {
TestKind::Plus
} else {
@ -52,40 +28,37 @@ impl<T: Bool> Language for TestLang<T> {
}
}
fn kind_to_raw(kind: Self::Kind) -> RawSyntaxKind {
match kind {
fn into_raw(self) -> RawSyntaxKind {
match self {
TestKind::Element { n } => RawSyntaxKind(n),
TestKind::Plus => RawSyntaxKind(u32::MAX - 1),
}
}
fn static_text(kind: Self::Kind) -> Option<&'static str> {
if !<T as Bool>::VALUE {
return None;
}
match kind {
fn static_text(self) -> Option<&'static str> {
match self {
TestKind::Plus => Some("+"),
TestKind::Element { .. } => None,
}
}
}
pub fn build_tree_with_cache<T: Bool, I>(root: &Element<'_>, cache: &mut NodeCache<'_, I>) -> GreenNode
pub fn build_tree_with_cache<I>(root: &Element<'_>, cache: &mut NodeCache<'_, I>, use_static_text: bool) -> GreenNode
where
I: Interner,
{
let mut builder: GreenNodeBuilder<TestLang<T>, I> = GreenNodeBuilder::with_cache(cache);
build_recursive(root, &mut builder, 0);
let mut builder: GreenNodeBuilder<TestKind, I> = GreenNodeBuilder::with_cache(cache);
build_recursive(root, &mut builder, 0, use_static_text);
let (node, cache) = builder.finish();
assert!(cache.is_none());
node
}
pub fn build_recursive<T: Bool, I>(
pub fn build_recursive<I>(
root: &Element<'_>,
builder: &mut GreenNodeBuilder<'_, '_, TestLang<T>, I>,
builder: &mut GreenNodeBuilder<'_, '_, TestKind, I>,
mut from: u32,
use_static_text: bool,
) -> u32
where
I: Interner,
@ -94,13 +67,16 @@ where
Element::Node(children) => {
builder.start_node(TestKind::Element { n: from });
for child in children {
from = build_recursive(child, builder, from + 1);
from = build_recursive(child, builder, from + 1, use_static_text);
}
builder.finish_node();
}
Element::Token(text) => {
builder.token(TestKind::Element { n: from }, text);
}
Element::Plus if use_static_text => {
builder.static_token(TestKind::Plus);
}
Element::Plus => {
builder.token(TestKind::Plus, "+");
}
@ -132,14 +108,14 @@ pub fn create(c: &mut Criterion) {
group.bench_function("with static text", |b| {
b.iter(|| {
let tree = build_tree_with_cache::<UseStaticText, _>(&tree, &mut cache);
let tree = build_tree_with_cache(&tree, &mut cache, true);
black_box(tree);
})
});
group.bench_function("without static text", |b| {
b.iter(|| {
let tree = build_tree_with_cache::<NoStaticText, _>(&tree, &mut cache);
let tree = build_tree_with_cache(&tree, &mut cache, false);
black_box(tree);
})
});

View file

@ -13,17 +13,21 @@
//! - "+" Token(Add)
//! - "4" Token(Number)
use cstree::{build::GreenNodeBuilder, interning::Resolver, util::NodeOrToken};
use cstree::{build::GreenNodeBuilder, interning::Resolver, util::NodeOrToken, Syntax};
use std::iter::Peekable;
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[derive(Debug, Clone, Copy, PartialEq, Eq, Syntax)]
#[repr(u32)]
enum SyntaxKind {
Whitespace = 0,
Whitespace,
#[static_text("+")]
Add,
#[static_text("-")]
Sub,
#[static_text("*")]
Mul,
#[static_text("/")]
Div,
Number,
@ -31,6 +35,7 @@ enum SyntaxKind {
Operation,
Root,
}
type MySyntax = SyntaxKind;
use SyntaxKind::*;
impl From<SyntaxKind> for cstree::RawSyntaxKind {
@ -39,40 +44,15 @@ impl From<SyntaxKind> for cstree::RawSyntaxKind {
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
enum Lang {}
impl cstree::Language for Lang {
type Kind = SyntaxKind;
fn kind_from_raw(raw: cstree::RawSyntaxKind) -> Self::Kind {
assert!(raw.0 <= Root as u32);
unsafe { std::mem::transmute::<u32, SyntaxKind>(raw.0) }
}
fn kind_to_raw(kind: Self::Kind) -> cstree::RawSyntaxKind {
kind.into()
}
fn static_text(kind: Self::Kind) -> Option<&'static str> {
match kind {
Add => Some("+"),
Sub => Some("-"),
Mul => Some("*"),
Div => Some("/"),
_ => None,
}
}
}
type SyntaxNode = cstree::syntax::SyntaxNode<Lang>;
type SyntaxNode = cstree::syntax::SyntaxNode<MySyntax>;
#[allow(unused)]
type SyntaxToken = cstree::syntax::SyntaxToken<Lang>;
type SyntaxToken = cstree::syntax::SyntaxToken<MySyntax>;
#[allow(unused)]
type SyntaxElement = cstree::util::NodeOrToken<SyntaxNode, SyntaxToken>;
type SyntaxElementRef<'a> = cstree::util::NodeOrToken<&'a SyntaxNode, &'a SyntaxToken>;
struct Parser<'input, I: Iterator<Item = (SyntaxKind, &'input str)>> {
builder: GreenNodeBuilder<'static, 'static, Lang>,
builder: GreenNodeBuilder<'static, 'static, MySyntax>,
iter: Peekable<I>,
}
impl<'input, I: Iterator<Item = (SyntaxKind, &'input str)>> Parser<'input, I> {

View file

@ -6,7 +6,7 @@ use cstree::{
syntax::{ResolvedElementRef, ResolvedNode},
};
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[repr(u32)]
pub enum SyntaxKind {
/* Tokens */
@ -19,16 +19,12 @@ pub enum SyntaxKind {
Expr,
Root,
}
type Calculator = SyntaxKind;
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct Calculator;
impl Language for Calculator {
// The tokens and nodes we just defined
type Kind = SyntaxKind;
fn kind_from_raw(raw: RawSyntaxKind) -> Self::Kind {
// This just needs to be the inverse of `kind_to_raw`, but could also
// be an `impl TryFrom<u16> for SyntaxKind` or any other conversion.
impl Syntax for Calculator {
fn from_raw(raw: RawSyntaxKind) -> Self {
// This just needs to be the inverse of `into_raw`, but could also
// be an `impl TryFrom<u32> for SyntaxKind` or any other conversion.
match raw.0 {
0 => SyntaxKind::Int,
1 => SyntaxKind::Plus,
@ -41,12 +37,12 @@ impl Language for Calculator {
}
}
fn kind_to_raw(kind: Self::Kind) -> RawSyntaxKind {
RawSyntaxKind(kind as u32)
fn into_raw(self) -> RawSyntaxKind {
RawSyntaxKind(self as u32)
}
fn static_text(kind: Self::Kind) -> Option<&'static str> {
match kind {
fn static_text(self) -> Option<&'static str> {
match self {
SyntaxKind::Plus => Some("+"),
SyntaxKind::Minus => Some("-"),
SyntaxKind::LParen => Some("("),

View file

@ -7,12 +7,23 @@
//! You may want to follow the conceptual overview of the design alongside this tutorial:
//! https://github.com/rust-analyzer/rust-analyzer/blob/master/docs/dev/syntax.md
/// Let's start with defining all kinds of tokens and composite nodes.
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
use std::collections::VecDeque;
/// Let's start with defining all kinds of syntactical elements that we want to have in our
/// language's grammar. These correspond to tokens and composite nodes in the syntax tree that we
/// want to parse the S-expressions into.
use cstree::Syntax;
/// Implementing the `Syntax` trait teaches `cstree` to convert our `SyntaxKind`s to its internal
/// types, allowing for a nicer `SyntaxNode` API where "kinds" are values from our `enum` instead of
/// plain `u32` values.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Syntax)]
#[repr(u32)]
pub enum SyntaxKind {
LParen = 0, // '('
RParen, // ')'
#[static_text("(")]
LParen, // '('
#[static_text(")")]
RParen, // ')'
Word, // '+', '15'
Whitespace, // whitespaces is explicit
Error, // as well as errors
@ -22,53 +33,20 @@ pub enum SyntaxKind {
Atom, // `+`, `15`, wraps a WORD token
Root, // top-level node: a list of s-expressions
}
use std::collections::VecDeque;
/// When matching against the kind of the node, `SyntaxKind::Kind` is a fine name to use.
/// For specifying generic arguments like `Node<MySyntax>`, we'll use this alias to refer to the
/// syntax as a whole.
type SExprSyntax = SyntaxKind;
use SyntaxKind::*;
/// Some boilerplate is needed, as cstree represents kinds as `struct SyntaxKind(u16)` internally,
/// in order to not need the user's `enum SyntaxKind` as a type parameter.
///
/// First, to easily pass the enum variants into cstree via `.into()`:
impl From<SyntaxKind> for cstree::RawSyntaxKind {
fn from(kind: SyntaxKind) -> Self {
Self(kind as u32)
}
}
/// Second, implementing the `Language` trait teaches cstree to convert between these two SyntaxKind
/// types, allowing for a nicer SyntaxNode API where "kinds" are values from our `enum SyntaxKind`,
/// instead of plain u16 values.
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub enum Lang {}
impl cstree::Language for Lang {
type Kind = SyntaxKind;
fn kind_from_raw(raw: cstree::RawSyntaxKind) -> Self::Kind {
assert!(raw.0 <= Root as u32);
unsafe { std::mem::transmute::<u32, SyntaxKind>(raw.0) }
}
fn kind_to_raw(kind: Self::Kind) -> cstree::RawSyntaxKind {
kind.into()
}
fn static_text(kind: Self::Kind) -> Option<&'static str> {
match kind {
LParen => Some("("),
RParen => Some(")"),
_ => None,
}
}
}
/// GreenNode is an immutable tree, which caches identical nodes and tokens, but doesn't contain
/// `GreenNode` is an immutable tree, which caches identical nodes and tokens, but doesn't contain
/// offsets and parent pointers.
/// cstree also deduplicates the actual source string in addition to the tree nodes, so we will need
/// the Resolver to get the real text back from the interned representation.
use cstree::{green::GreenNode, interning::Resolver, Language};
/// `cstree` also deduplicates the actual source string in addition to the tree nodes, so we will need
/// the `Resolver` to get the real text back from the interned representation.
use cstree::{green::GreenNode, interning::Resolver};
/// You can construct GreenNodes by hand, but a builder is helpful for top-down parsers: it maintains
/// You can construct `GreenNode`s by hand, but a builder is helpful for top-down parsers: it maintains
/// a stack of currently in-progress nodes.
use cstree::build::GreenNodeBuilder;
@ -89,13 +67,13 @@ fn parse(text: &str) -> Parse<impl Resolver> {
/// input tokens, including whitespace.
tokens: VecDeque<(SyntaxKind, &'input str)>,
/// the in-progress green tree.
builder: GreenNodeBuilder<'static, 'static, Lang>,
builder: GreenNodeBuilder<'static, 'static, SExprSyntax>,
/// the list of syntax errors we've accumulated so far.
errors: Vec<String>,
}
/// The outcome of parsing a single S-expression
enum SexpRes {
enum SExprResult {
/// An S-expression (i.e. an atom, or a list) was successfully parsed
Ok,
/// Nothing was parsed, as no significant tokens remained
@ -111,14 +89,14 @@ fn parse(text: &str) -> Parse<impl Resolver> {
// Parse zero or more S-expressions
loop {
match self.sexp() {
SexpRes::Eof => break,
SexpRes::RParen => {
SExprResult::Eof => break,
SExprResult::RParen => {
self.builder.start_node(Error);
self.errors.push("unmatched `)`".to_string());
self.bump(); // be sure to advance even in case of an error, so as to not get stuck
self.builder.finish_node();
}
SexpRes::Ok => {}
SExprResult::Ok => {}
}
}
// Don't forget to eat *trailing* whitespace
@ -144,28 +122,28 @@ fn parse(text: &str) -> Parse<impl Resolver> {
self.bump(); // '('
loop {
match self.sexp() {
SexpRes::Eof => {
SExprResult::Eof => {
self.errors.push("expected `)`".to_string());
break;
}
SexpRes::RParen => {
SExprResult::RParen => {
self.bump();
break;
}
SexpRes::Ok => {}
SExprResult::Ok => {}
}
}
// close the list node
self.builder.finish_node();
}
fn sexp(&mut self) -> SexpRes {
fn sexp(&mut self) -> SExprResult {
// Eat leading whitespace
self.skip_ws();
// Either a list, an atom, a closing paren, or an eof.
let t = match self.current() {
None => return SexpRes::Eof,
Some(RParen) => return SexpRes::RParen,
None => return SExprResult::Eof,
Some(RParen) => return SExprResult::RParen,
Some(t) => t,
};
match t {
@ -178,7 +156,7 @@ fn parse(text: &str) -> Parse<impl Resolver> {
Error => self.bump(),
_ => unreachable!(),
}
SexpRes::Ok
SExprResult::Ok
}
/// Advance one token, adding it to the current branch of the tree builder.
@ -208,13 +186,13 @@ fn parse(text: &str) -> Parse<impl Resolver> {
}
/// To work with the parse results we need a view into the green tree - the syntax tree.
/// It is also immutable, like a GreenNode, but it contains parent pointers, offsets, and has
/// It is also immutable, like a `GreenNode`, but it contains parent pointers, offsets, and has
/// identity semantics.
type SyntaxNode = cstree::syntax::SyntaxNode<Lang>;
type SyntaxNode = cstree::syntax::SyntaxNode<SExprSyntax>;
#[allow(unused)]
type SyntaxToken = cstree::syntax::SyntaxToken<Lang>;
type SyntaxToken = cstree::syntax::SyntaxToken<SExprSyntax>;
#[allow(unused)]
type SyntaxElement = cstree::syntax::SyntaxElement<Lang>;
type SyntaxElement = cstree::syntax::SyntaxElement<SExprSyntax>;
impl<I> Parse<I> {
fn syntax(&self) -> SyntaxNode {
@ -292,21 +270,21 @@ mod ast {
ast_node!(List, List);
}
// Sexp is slightly different because it can be both an atom and a list, so let's do it by hand.
// `SExpr` is slightly different because it can be both an atom and a list, so let's do it by hand.
#[derive(PartialEq, Eq, Hash)]
#[repr(transparent)]
struct Sexp(SyntaxNode);
struct SExpr(SyntaxNode);
enum SexpKind {
Atom(ast::Atom),
List(ast::List),
}
impl Sexp {
impl SExpr {
fn cast(node: SyntaxNode) -> Option<Self> {
use ast::*;
if Atom::cast(node.clone()).is_some() || List::cast(node.clone()).is_some() {
Some(Sexp(node))
Some(SExpr(node))
} else {
None
}
@ -323,8 +301,8 @@ impl Sexp {
// Let's enhance AST nodes with ancillary functions and eval.
impl ast::Root {
fn sexps(&self) -> impl Iterator<Item = Sexp> + '_ {
self.0.children().cloned().filter_map(Sexp::cast)
fn sexps(&self) -> impl Iterator<Item = SExpr> + '_ {
self.0.children().cloned().filter_map(SExpr::cast)
}
}
@ -355,7 +333,7 @@ impl ast::Atom {
use cstree::util::NodeOrToken;
match self.0.green().children().next() {
Some(NodeOrToken::Token(token)) => Lang::static_text(Lang::kind_from_raw(token.kind()))
Some(NodeOrToken::Token(token)) => SExprSyntax::static_text(SExprSyntax::from_raw(token.kind()))
.or_else(|| token.text(resolver))
.unwrap(),
_ => unreachable!(),
@ -364,8 +342,8 @@ impl ast::Atom {
}
impl ast::List {
fn sexps(&self) -> impl Iterator<Item = Sexp> + '_ {
self.0.children().cloned().filter_map(Sexp::cast)
fn sexps(&self) -> impl Iterator<Item = SExpr> + '_ {
self.0.children().cloned().filter_map(SExpr::cast)
}
fn eval(&self, resolver: &impl Resolver) -> Option<i64> {
@ -386,7 +364,7 @@ impl ast::List {
}
}
impl Sexp {
impl SExpr {
fn eval(&self, resolver: &impl Resolver) -> Option<i64> {
match self.kind() {
SexpKind::Atom(atom) => atom.eval(resolver),
@ -418,10 +396,10 @@ nan
assert_eq!(res, vec![Some(92), Some(92), None, None, Some(92),])
}
/// Split the input string into a flat list of tokens (such as L_PAREN, WORD, and WHITESPACE)
/// Split the input string into a flat list of tokens (such as `LParen`, `Word`, and `Whitespace`)
fn lex(text: &str) -> VecDeque<(SyntaxKind, &str)> {
fn tok(t: SyntaxKind) -> m_lexer::TokenKind {
m_lexer::TokenKind(cstree::RawSyntaxKind::from(t).0 as u16)
m_lexer::TokenKind(t.into_raw().0 as u16)
}
fn kind(t: m_lexer::TokenKind) -> SyntaxKind {
match t.0 {

View file

@ -34,7 +34,7 @@ fn main() {
let interner = db.as_interner();
let mut shared_interner = &interner;
let mut builder: GreenNodeBuilder<TestLang, _> = GreenNodeBuilder::with_interner(&mut shared_interner);
let mut builder: GreenNodeBuilder<MySyntax, _> = GreenNodeBuilder::with_interner(&mut shared_interner);
let (tree, _no_interner_because_it_was_borrowed) = {
builder.start_node(TestSyntaxKind::Plus);
builder.token(TestSyntaxKind::Float, "2.05");
@ -45,6 +45,6 @@ fn main() {
builder.finish_node();
builder.finish()
};
let tree: SyntaxNode<TestLang> = SyntaxNode::new_root(tree);
let tree: SyntaxNode<MySyntax> = SyntaxNode::new_root(tree);
assert_eq!(tree.resolve_text(shared_interner), "2.05 + 7.32");
}

View file

@ -8,7 +8,7 @@ use crate::{
interning::{new_interner, Interner, TokenInterner, TokenKey},
util::NodeOrToken,
utility_types::MaybeOwned,
Language, RawSyntaxKind,
RawSyntaxKind, Syntax,
};
use super::{node::GreenNodeHead, token::GreenTokenData};
@ -35,12 +35,12 @@ impl NodeCache<'static> {
/// tokens. To re-use an existing interner, see [`with_interner`](NodeCache::with_interner).
/// # Examples
/// ```
/// # use cstree::testing::{*, Language as _};
/// # use cstree::testing::*;
/// use cstree::build::NodeCache;
///
/// // Build a tree
/// let mut cache = NodeCache::new();
/// let mut builder: GreenNodeBuilder<MyLanguage> = GreenNodeBuilder::with_cache(&mut cache);
/// let mut builder: GreenNodeBuilder<MySyntax> = GreenNodeBuilder::with_cache(&mut cache);
/// # builder.start_node(Root);
/// # builder.token(Int, "42");
/// # builder.finish_node();
@ -48,9 +48,9 @@ impl NodeCache<'static> {
/// let (tree, _) = builder.finish();
///
/// // Check it out!
/// assert_eq!(tree.kind(), MyLanguage::kind_to_raw(Root));
/// assert_eq!(tree.kind(), MySyntax::into_raw(Root));
/// let int = tree.children().next().unwrap();
/// assert_eq!(int.kind(), MyLanguage::kind_to_raw(Int));
/// assert_eq!(int.kind(), MySyntax::into_raw(Int));
/// ```
pub fn new() -> Self {
Self {
@ -75,14 +75,14 @@ where
/// (strings) across tokens.
/// # Examples
/// ```
/// # use cstree::testing::{*, Language as _};
/// # use cstree::testing::*;
/// # use cstree::interning::*;
/// use cstree::build::NodeCache;
///
/// // Create the builder from a custom interner
/// let mut interner = new_interner();
/// let mut cache = NodeCache::with_interner(&mut interner);
/// let mut builder: GreenNodeBuilder<MyLanguage, TokenInterner> =
/// let mut builder: GreenNodeBuilder<MySyntax, TokenInterner> =
/// GreenNodeBuilder::with_cache(&mut cache);
///
/// // Construct the tree
@ -93,9 +93,9 @@ where
/// let (tree, _) = builder.finish();
///
/// // Use the tree
/// assert_eq!(tree.kind(), MyLanguage::kind_to_raw(Root));
/// assert_eq!(tree.kind(), MySyntax::into_raw(Root));
/// let int = tree.children().next().unwrap();
/// assert_eq!(int.kind(), MyLanguage::kind_to_raw(Int));
/// assert_eq!(int.kind(), MySyntax::into_raw(Int));
/// assert_eq!(int.as_token().unwrap().text(&interner), Some("42"));
/// ```
#[inline]
@ -111,14 +111,14 @@ where
/// (strings) across tokens.
/// # Examples
/// ```
/// # use cstree::testing::{*, Language as _};
/// # use cstree::testing::*;
/// # use cstree::interning::*;
/// use cstree::build::NodeCache;
///
/// // Create the builder from a custom interner
/// let mut interner = new_interner();
/// let cache = NodeCache::from_interner(interner);
/// let mut builder: GreenNodeBuilder<MyLanguage, TokenInterner> =
/// let mut builder: GreenNodeBuilder<MySyntax, TokenInterner> =
/// GreenNodeBuilder::from_cache(cache);
///
/// // Construct the tree
@ -130,9 +130,9 @@ where
///
/// // Use the tree
/// let interner = cache.unwrap().into_interner().unwrap();
/// assert_eq!(tree.kind(), MyLanguage::kind_to_raw(Root));
/// assert_eq!(tree.kind(), MySyntax::into_raw(Root));
/// let int = tree.children().next().unwrap();
/// assert_eq!(int.kind(), MyLanguage::kind_to_raw(Int));
/// assert_eq!(int.kind(), MySyntax::into_raw(Int));
/// assert_eq!(int.as_token().unwrap().text(&interner), Some("42"));
/// ```
#[inline]
@ -177,9 +177,9 @@ where
self.interner.into_owned()
}
fn node<L: Language>(&mut self, kind: L::Kind, all_children: &mut Vec<GreenElement>, offset: usize) -> GreenNode {
fn node<S: Syntax>(&mut self, kind: S, all_children: &mut Vec<GreenElement>, offset: usize) -> GreenNode {
// NOTE: this fn must remove all children starting at `first_child` from `all_children` before returning
let kind = L::kind_to_raw(kind);
let kind = S::into_raw(kind);
let mut hasher = FxHasher32::default();
let mut text_len: TextSize = 0.into();
for child in &all_children[offset..] {
@ -229,9 +229,9 @@ where
.clone()
}
fn token<L: Language>(&mut self, kind: L::Kind, text: Option<TokenKey>, len: u32) -> GreenToken {
fn token<S: Syntax>(&mut self, kind: S, text: Option<TokenKey>, len: u32) -> GreenToken {
let text_len = TextSize::from(len);
let kind = L::kind_to_raw(kind);
let kind = S::into_raw(kind);
let data = GreenTokenData { kind, text, text_len };
self.tokens
.entry(data)
@ -253,29 +253,29 @@ pub struct Checkpoint(usize);
///
/// # Examples
/// ```
/// # use cstree::testing::{*, Language as _};
/// # use cstree::testing::*;
/// // Build a tree
/// let mut builder: GreenNodeBuilder<MyLanguage> = GreenNodeBuilder::new();
/// let mut builder: GreenNodeBuilder<MySyntax> = GreenNodeBuilder::new();
/// builder.start_node(Root);
/// builder.token(Int, "42");
/// builder.finish_node();
/// let (tree, cache) = builder.finish();
///
/// // Check it out!
/// assert_eq!(tree.kind(), MyLanguage::kind_to_raw(Root));
/// assert_eq!(tree.kind(), MySyntax::into_raw(Root));
/// let int = tree.children().next().unwrap();
/// assert_eq!(int.kind(), MyLanguage::kind_to_raw(Int));
/// assert_eq!(int.kind(), MySyntax::into_raw(Int));
/// let resolver = cache.unwrap().into_interner().unwrap();
/// assert_eq!(int.as_token().unwrap().text(&resolver), Some("42"));
/// ```
#[derive(Debug)]
pub struct GreenNodeBuilder<'cache, 'interner, L: Language, I = TokenInterner> {
pub struct GreenNodeBuilder<'cache, 'interner, S: Syntax, I = TokenInterner> {
cache: MaybeOwned<'cache, NodeCache<'interner, I>>,
parents: Vec<(L::Kind, usize)>,
parents: Vec<(S, usize)>,
children: Vec<GreenElement>,
}
impl<L: Language> GreenNodeBuilder<'static, 'static, L> {
impl<S: Syntax> GreenNodeBuilder<'static, 'static, S> {
/// Creates new builder with an empty [`NodeCache`].
pub fn new() -> Self {
Self {
@ -286,15 +286,15 @@ impl<L: Language> GreenNodeBuilder<'static, 'static, L> {
}
}
impl<L: Language> Default for GreenNodeBuilder<'static, 'static, L> {
impl<S: Syntax> Default for GreenNodeBuilder<'static, 'static, S> {
fn default() -> Self {
Self::new()
}
}
impl<'cache, 'interner, L, I> GreenNodeBuilder<'cache, 'interner, L, I>
impl<'cache, 'interner, S, I> GreenNodeBuilder<'cache, 'interner, S, I>
where
L: Language,
S: Syntax,
I: Interner<TokenKey>,
{
/// Reusing a [`NodeCache`] between multiple builders saves memory, as it allows to structurally
@ -312,11 +312,11 @@ where
/// The `cache` given will be returned on [`finish`](GreenNodeBuilder::finish).
/// # Examples
/// ```
/// # use cstree::testing::{*, Language as _};
/// # use cstree::testing::*;
/// # use cstree::build::*;
/// // Construct a builder from our own cache
/// let cache = NodeCache::new();
/// let mut builder: GreenNodeBuilder<MyLanguage> = GreenNodeBuilder::from_cache(cache);
/// let mut builder: GreenNodeBuilder<MySyntax> = GreenNodeBuilder::from_cache(cache);
///
/// // Build a tree
/// # builder.start_node(Root);
@ -327,9 +327,9 @@ where
///
/// // Use the tree
/// let interner = cache.unwrap().into_interner().unwrap();
/// assert_eq!(tree.kind(), MyLanguage::kind_to_raw(Root));
/// assert_eq!(tree.kind(), MySyntax::into_raw(Root));
/// let int = tree.children().next().unwrap();
/// assert_eq!(int.kind(), MyLanguage::kind_to_raw(Int));
/// assert_eq!(int.kind(), MySyntax::into_raw(Int));
/// assert_eq!(int.as_token().unwrap().text(&interner), Some("42"));
/// ```
pub fn from_cache(cache: NodeCache<'interner, I>) -> Self {
@ -377,7 +377,7 @@ where
/// # use cstree::testing::*;
/// # use cstree::build::*;
/// # use cstree::interning::*;
/// let mut builder: GreenNodeBuilder<MyLanguage> = GreenNodeBuilder::new();
/// let mut builder: GreenNodeBuilder<MySyntax> = GreenNodeBuilder::new();
/// let interner = builder.interner_mut();
/// let key = interner.get_or_intern("foo");
/// assert_eq!(interner.resolve(key), "foo");
@ -392,19 +392,19 @@ where
/// ## Panics
/// In debug mode, if `kind` has static text, this function will verify that `text` matches that text.
#[inline]
pub fn token(&mut self, kind: L::Kind, text: &str) {
let token = match L::static_text(kind) {
pub fn token(&mut self, kind: S, text: &str) {
let token = match S::static_text(kind) {
Some(static_text) => {
debug_assert_eq!(
static_text, text,
r#"Received `{kind:?}` token which should have text "{static_text}", but "{text}" was given."#
);
self.cache.token::<L>(kind, None, static_text.len() as u32)
self.cache.token::<S>(kind, None, static_text.len() as u32)
}
None => {
let len = text.len() as u32;
let text = self.cache.intern(text);
self.cache.token::<L>(kind, Some(text), len)
self.cache.token::<S>(kind, Some(text), len)
}
};
self.children.push(token.into());
@ -420,15 +420,15 @@ where
/// ## Panics
/// If `kind` does not have static text, i.e., `L::static_text(kind)` returns `None`.
#[inline]
pub fn static_token(&mut self, kind: L::Kind) {
let static_text = L::static_text(kind).unwrap_or_else(|| panic!("Missing static text for '{kind:?}'"));
let token = self.cache.token::<L>(kind, None, static_text.len() as u32);
pub fn static_token(&mut self, kind: S) {
let static_text = S::static_text(kind).unwrap_or_else(|| panic!("Missing static text for '{kind:?}'"));
let token = self.cache.token::<S>(kind, None, static_text.len() as u32);
self.children.push(token.into());
}
/// Start new node of the given `kind` and make it current.
#[inline]
pub fn start_node(&mut self, kind: L::Kind) {
pub fn start_node(&mut self, kind: S) {
let len = self.children.len();
self.parents.push((kind, len));
}
@ -438,7 +438,7 @@ where
pub fn finish_node(&mut self) {
let (kind, first_child) = self.parents.pop().unwrap();
// NOTE: we rely on the node cache to remove all children starting at `first_child` from `self.children`
let node = self.cache.node::<L>(kind, &mut self.children, first_child);
let node = self.cache.node::<S>(kind, &mut self.children, first_child);
self.children.push(node.into());
}
@ -450,13 +450,13 @@ where
/// # Examples
/// ```
/// # use cstree::testing::*;
/// # use cstree::{build::GreenNodeBuilder, Language};
/// # use cstree::build::GreenNodeBuilder;
/// # struct Parser;
/// # impl Parser {
/// # fn peek(&self) -> Option<TestSyntaxKind> { None }
/// # fn parse_expr(&mut self) {}
/// # }
/// # let mut builder: GreenNodeBuilder<MyLanguage> = GreenNodeBuilder::new();
/// # let mut builder: GreenNodeBuilder<MySyntax> = GreenNodeBuilder::new();
/// # let mut parser = Parser;
/// let checkpoint = builder.checkpoint();
/// parser.parse_expr();
@ -475,7 +475,7 @@ where
/// Wrap the previous branch marked by [`checkpoint`](GreenNodeBuilder::checkpoint) in a new
/// branch and make it current.
#[inline]
pub fn start_node_at(&mut self, checkpoint: Checkpoint, kind: L::Kind) {
pub fn start_node_at(&mut self, checkpoint: Checkpoint, kind: S) {
let Checkpoint(checkpoint) = checkpoint;
assert!(
checkpoint <= self.children.len(),

View file

@ -5,7 +5,7 @@
//! [`GreenNodeBuilder::token`], which takes the kind of token and a refernce to the text of the token in the source.
//!
//! Of course, there are tokens whose text will always be the same, such as punctuation (like a semicolon), keywords
//! (like `fn`), or operators (like `<=`). Use [`Language::static_text`] when implementing `Language` to make `cstree`
//! (like `fn`), or operators (like `<=`). Use [`Syntax::static_text`] when implementing `Syntax` to make `cstree`
//! aware of such tokens.
//!
//! There is, however, another category of tokens whose text will appear repeatedly, but for which we cannot know the
@ -59,44 +59,45 @@
//! capabilities with `cstree` as well. Support for this is experimental, and you have to opt in via the
//! `salsa_2022_compat` feature. For instructions on how to do this, and whether you actually want to, please refer to
//! [the `salsa_compat` module documentation].
//!
//! ## Multi-threaded interners
//! If you want to use your interner on more than one thread, the interner needs to support interning new text through
//! shared access. With the `multi_threaded_interning` feature, you can get such an interner by calling
//! [`new_threaded_interner`]. The feature also enables support for `ThreadedRodeo`, the multi-threaded interner from
//! `lasso`.
//!
//! **You can pass a reference to that interner to anything that expects an [`Interner`]!**
//! While the interning methods on [`Interner`] require a `&mut self` to also work for single-threaded interners, both
//! [`Resolver`] and [`Interner`] will be implemented for `&interner` if `interner` is multi-threaded:
//!
//! ```
//! # use cstree::testing::{*, Language as _};
//! # use cstree::interning::*;
//!
//! let interner = new_threaded_interner();
//! let mut builder: GreenNodeBuilder<MyLanguage, &MultiThreadedTokenInterner> =
//! GreenNodeBuilder::from_interner(&interner);
//!
//! # builder.start_node(Root);
//! # builder.token(Int, "42");
//! # builder.finish_node();
//! parse(&mut builder, "42");
//! let (tree, cache) = builder.finish();
//!
//! // Note that we get a cache and interner back, because we passed an "owned" reference to `from_interner`
//! let used_interner = cache.unwrap().into_interner().unwrap();
//! assert_eq!(used_interner as *const _, &interner as *const _);
//!
//! let int = tree.children().next().unwrap();
//! assert_eq!(int.as_token().unwrap().text(&interner), Some("42"));
//! ```
//!
//! Here, we use `from_interner`, but pass it only a shared reference to "own". Take care to denote the type signature
//! of the `GreenNodeBuilder` appropriately.
#![cfg_attr(
feature = "multi_threaded_interning",
doc = r###"
## Multi-threaded interners
If you want to use your interner on more than one thread, the interner needs to support interning new text through
shared access. With the `multi_threaded_interning` feature, you can get such an interner by calling
[`new_threaded_interner`]. The feature also enables support for `ThreadedRodeo`, the multi-threaded interner from
`lasso`.
**You can pass a reference to that interner to anything that expects an [`Interner`]!**
While the interning methods on [`Interner`] require a `&mut self` to also work for single-threaded interners, both
[`Resolver`] and [`Interner`] will be implemented for `&interner` if `interner` is multi-threaded:
```
# use cstree::testing::*;
# use cstree::interning::*;
let interner = new_threaded_interner();
let mut builder: GreenNodeBuilder<MySyntax, &MultiThreadedTokenInterner> =
GreenNodeBuilder::from_interner(&interner);
# builder.start_node(Root);
# builder.token(Int, "42");
# builder.finish_node();
parse(&mut builder, "42");
let (tree, cache) = builder.finish();
// Note that we get a cache and interner back, because we passed an "owned" reference to `from_interner`
let used_interner = cache.unwrap().into_interner().unwrap();
assert_eq!(used_interner as *const _, &interner as *const _);
let int = tree.children().next().unwrap();
assert_eq!(int.as_token().unwrap().text(&interner), Some("42"));
```
Here, we use `from_interner`, but pass it only a shared reference to "own". Take care to denote the type signature
of the `GreenNodeBuilder` appropriately.
"###
)]
//!
//! [crate documentation]: crate
//! [`Language::static_text`]: crate::Language::static_text
//! [`Syntax::static_text`]: crate::Syntax::static_text
//! [`GreenNodeBuilder::token`]: crate::build::GreenNodeBuilder::token
//! [`GreenNodeBuilder::new`]: crate::build::GreenNodeBuilder::new
//! [`finish`]: crate::build::GreenNodeBuilder::finish

View file

@ -55,7 +55,7 @@
//! let db = Database::default();
//! let interner = db.as_interner(); // <-- conversion happens here
//! let mut shared_interner = &interner;
//! let mut builder: GreenNodeBuilder<TestLang, _> = GreenNodeBuilder::with_interner(&mut shared_interner);
//! let mut builder: GreenNodeBuilder<MySyntax, _> = GreenNodeBuilder::with_interner(&mut shared_interner);
//! let (tree, _no_interner_because_it_was_borrowed) = {
//! builder.start_node(TestSyntaxKind::Plus);
//! builder.token(TestSyntaxKind::Float, "2.05");
@ -66,7 +66,7 @@
//! builder.finish_node();
//! builder.finish()
//! };
//! let tree: SyntaxNode<TestLang> = SyntaxNode::new_root(tree);
//! let tree: SyntaxNode<MySyntax> = SyntaxNode::new_root(tree);
//! assert_eq!(tree.resolve_text(shared_interner), "2.05 + 7.32");
//! ```
//!

View file

@ -40,7 +40,7 @@
//! to happen to go from input text to a `cstree` syntax tree:
//!
//! 1. Define an enumeration of the types of tokens (like keywords) and nodes (like "an expression") that you want to
//! have in your syntax and implement [`Language`]
//! have in your syntax and implement [`Syntax`]
//!
//! 2. Create a [`GreenNodeBuilder`](build::GreenNodeBuilder) and call
//! [`start_node`](build::GreenNodeBuilder::start_node), [`token`](build::GreenNodeBuilder::token) and
@ -61,8 +61,8 @@
//! The `enum` needs to be convertible to a `u32`, so we use the `repr` attribute to ensure it uses the correct
//! representation.
//!
//! ```rust,ignore
//! #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
//! ```rust,no_run
//! #[derive(Debug, Clone, Copy, PartialEq, Eq)]
//! #[repr(u32)]
//! enum SyntaxKind {
//! /* Tokens */
@ -77,6 +77,15 @@
//! }
//! ```
//!
//! For convenience when we're working with generic `cstree` types like `SyntaxNode`, we'll also give a name to our
//! syntax as a whole and add a type alias for it. That way, we can match against `SyntaxKind`s using the original name,
//! but use the more informative `Node<Calculator>` to instantiate `cstree`'s types.
//!
//! ```rust,no_run
//! # enum SyntaxKind {}
//! type Calculator = SyntaxKind;
//! ```
//!
//! Most of these are tokens to lex the input string into, like numbers (`Int`) and operators (`Plus`, `Minus`).
//! We only really need one type of node; expressions.
//! Our syntax tree's root node will have the special kind `Root`, all other nodes will be
@ -84,21 +93,22 @@
//! expression nodes.
//!
//! To use our `SyntaxKind`s with `cstree`, we need to tell it how to convert it back to just a number (the
//! `#[repr(u16)]` that we added) by implementing the [`Language`] trait. We can also tell `cstree` about tokens that
//! `#[repr(u32)]` that we added) by implementing the [`Syntax`] trait. We can also tell `cstree` about tokens that
//! always have the same text through the `static_text` method on the trait. This is useful for the operators and
//! parentheses, but not possible for numbers, since an integer token may be produced from the input `3`, but also from
//! other numbers like `7` or `12`. We implement `Language` on an empty type, just so we can give it a name.
//! other numbers like `7` or `12`.
//!
//! ```rust,ignore
//! #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
//! pub struct Calculator;
//! impl Language for Calculator {
//! // The tokens and nodes we just defined
//! type Kind = SyntaxKind;
//! ```rust,no_run
//! # #[derive(Debug, Clone, Copy, PartialEq, Eq)]
//! # #[repr(u32)]
//! # enum SyntaxKind { Int, Plus, Minus, LParen, RParen, Expr, Root }
//! # type Calculator = SyntaxKind;
//! # use cstree::{Syntax, RawSyntaxKind};
//!
//! fn kind_from_raw(raw: RawSyntaxKind) -> Self::Kind {
//! // This just needs to be the inverse of `kind_to_raw`, but could also
//! // be an `impl TryFrom<u16> for SyntaxKind` or any other conversion.
//! impl Syntax for Calculator {
//! fn from_raw(raw: RawSyntaxKind) -> Self {
//! // This just needs to be the inverse of `into_raw`, but could also
//! // be an `impl TryFrom<u32> for SyntaxKind` or any other conversion.
//! match raw.0 {
//! 0 => SyntaxKind::Int,
//! 1 => SyntaxKind::Plus,
@ -111,12 +121,12 @@
//! }
//! }
//!
//! fn kind_to_raw(kind: Self::Kind) -> RawSyntaxKind {
//! RawSyntaxKind(kind as u32)
//! fn into_raw(self) -> RawSyntaxKind {
//! RawSyntaxKind(self as u32)
//! }
//!
//! fn static_text(kind: Self::Kind) -> Option<&'static str> {
//! match kind {
//! fn static_text(self) -> Option<&'static str> {
//! match self {
//! SyntaxKind::Plus => Some("+"),
//! SyntaxKind::Minus => Some("-"),
//! SyntaxKind::LParen => Some("("),
@ -127,12 +137,21 @@
//! }
//! ```
//!
//! #### Deriving `Syntax`
//!
//! To save yourself the hassle of defining this conversion (and, perhaps more importantly,
//! continually updating it while your language's syntax is in flux), `cstree` includes a derive
//! macro for [`Syntax`](macro@crate::Syntax) when built with the `derive` feature. With the macro,
//! the `Syntax` trait implementation above can be replaced by simply adding `#[derive(Syntax)]` to
//! `SyntaxKind`.
//!
//! ### Parsing into a green tree
//!
//! With that out of the way, we can start writing the parser for our expressions.
//! For the purposes of this introduction to `cstree`, I'll assume that there is a lexer that yields the following
//! tokens:
//!
//! ```rust,ignore
//! ```rust,no_run
//! #[derive(Debug, PartialEq, Eq, Clone, Copy)]
//! pub enum Token<'input> {
//! // Note that number strings are not yet parsed into actual numbers,
@ -150,7 +169,19 @@
//! A simple lexer that yields such tokens is part of the full `readme` example, but we'll be busy enough with the
//! combination of `cstree` and the actual parser, which we define like this:
//!
//! ```rust,ignore
//! ```rust,no_run
//! # use std::iter::Peekable;
//! # use cstree::build::GreenNodeBuilder;
//! # struct Lexer<'a> { input: &'a str }
//! # impl<'a> Lexer<'a> { fn new(input: &'a str) -> Self { Self { input } } }
//! # struct Token<'a> { input: &'a str }
//! # impl<'a> Iterator for Lexer<'a> {
//! # type Item = Token<'a>;
//! # fn next(&mut self) -> Option<Self::Item> { None }
//! # }
//! # #[derive(Debug, Clone, Copy, PartialEq, Eq, cstree::Syntax)]
//! # #[repr(u32)] enum Calculator { A }
//!
//! pub struct Parser<'input> {
//! // `Peekable` is a standard library iterator adapter that allows
//! // looking ahead at the next item without removing it from the iterator yet
@ -391,7 +422,7 @@ pub mod prelude {
build::GreenNodeBuilder,
green::{GreenNode, GreenToken},
syntax::{SyntaxElement, SyntaxNode, SyntaxToken},
Language, RawSyntaxKind,
RawSyntaxKind, Syntax,
};
}
@ -415,62 +446,39 @@ pub mod sync {
pub use triomphe::Arc;
}
/// The `Language` trait is the bridge between the internal `cstree` representation and your
/// A type that represents what items in your language can be.
/// Typically, this is an `enum` with variants such as `Identifier`, `Literal`, ...
///
/// The `Syntax` trait is the bridge between the internal `cstree` representation and your
/// language's types.
/// This is essential for providing a [`SyntaxNode`] API that can be used with your types, as in the
/// `s_expressions` example:
///
/// ```
/// #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
/// #[derive(Debug, Clone, Copy, PartialEq, Eq, cstree::Syntax)]
/// # #[allow(non_camel_case_types)]
/// #[repr(u32)]
/// enum SyntaxKind {
/// Plus, // `+`
/// Minus, // `-`
/// #[static_text("+")]
/// Plus, // `+`
/// #[static_text("-")]
/// Minus, // `-`
/// Integer, // like `15`
/// Expression, // combined expression, like `5 + 4 - 3`
/// Whitespace, // whitespaces is explicit
/// #[doc(hidden)]
/// __LAST,
/// }
/// use SyntaxKind::*;
///
/// #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
/// enum Lang {}
///
/// impl cstree::Language for Lang {
/// type Kind = SyntaxKind;
///
/// fn kind_from_raw(raw: cstree::RawSyntaxKind) -> Self::Kind {
/// assert!(raw.0 <= __LAST as u32);
/// unsafe { std::mem::transmute::<u32, SyntaxKind>(raw.0) }
/// }
///
/// fn kind_to_raw(kind: Self::Kind) -> cstree::RawSyntaxKind {
/// cstree::RawSyntaxKind(kind as u32)
/// }
///
/// fn static_text(kind: Self::Kind) -> Option<&'static str> {
/// match kind {
/// Plus => Some("+"),
/// Minus => Some("-"),
/// _ => None,
/// }
/// }
/// Whitespace, // whitespace is explicit
/// }
/// ```
///
/// `cstree` provides a procedural macro called `cstree_derive` to automatically generate `Syntax` implementations for
/// syntax kind enums if its `derive` feature is enabled.
///
/// [`SyntaxNode`]: crate::syntax::SyntaxNode
pub trait Language: Sized + Clone + Copy + fmt::Debug + Eq + Ord + std::hash::Hash {
/// A type that represents what items in your Language can be.
/// Typically, this is an `enum` with variants such as `Identifier`, `Literal`, ...
type Kind: Sized + Clone + Copy + fmt::Debug;
pub trait Syntax: Sized + Copy + fmt::Debug + Eq {
/// Construct a semantic item kind from the compact representation.
fn kind_from_raw(raw: RawSyntaxKind) -> Self::Kind;
fn from_raw(raw: RawSyntaxKind) -> Self;
/// Convert a semantic item kind into a more compact representation.
fn kind_to_raw(kind: Self::Kind) -> RawSyntaxKind;
fn into_raw(self) -> RawSyntaxKind;
/// Fixed text for a particular syntax kind.
/// Implement for kinds that will only ever represent the same text, such as punctuation (like a
@ -479,16 +487,25 @@ pub trait Language: Sized + Clone + Copy + fmt::Debug + Eq + Ord + std::hash::Ha
/// Indicating tokens that have a `static_text` this way allows `cstree` to store them more efficiently, which makes
/// it faster to add them to a syntax tree and to look up their text. Since there can often be many occurrences
/// of these tokens inside a file, doing so will improve the performance of using `cstree`.
fn static_text(kind: Self::Kind) -> Option<&'static str>;
fn static_text(self) -> Option<&'static str>;
}
#[cfg(feature = "derive")]
#[allow(unused_imports)]
#[macro_use]
extern crate cstree_derive;
#[cfg(feature = "derive")]
/// Derive macro available if `cstree` is built with `features = ["derive"]`.
pub use cstree_derive::Syntax;
#[doc(hidden)]
#[allow(unsafe_code, unused)]
pub mod testing {
pub use crate::prelude::*;
pub fn parse<L: Language, I>(_b: &mut GreenNodeBuilder<L, I>, _s: &str) {}
pub fn parse<S: Syntax, I>(_b: &mut GreenNodeBuilder<S, I>, _s: &str) {}
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[repr(u32)]
#[allow(non_camel_case_types)]
pub enum TestSyntaxKind {
@ -501,25 +518,21 @@ pub mod testing {
Whitespace,
__LAST,
}
pub type MySyntax = TestSyntaxKind;
pub use TestSyntaxKind::*;
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub enum TestLang {}
pub type MyLanguage = TestLang;
impl Language for TestLang {
type Kind = TestSyntaxKind;
fn kind_from_raw(raw: RawSyntaxKind) -> Self::Kind {
impl Syntax for TestSyntaxKind {
fn from_raw(raw: RawSyntaxKind) -> Self {
assert!(raw.0 <= TestSyntaxKind::__LAST as u32);
unsafe { std::mem::transmute::<u32, TestSyntaxKind>(raw.0) }
unsafe { std::mem::transmute::<u32, Self>(raw.0) }
}
fn kind_to_raw(kind: Self::Kind) -> RawSyntaxKind {
RawSyntaxKind(kind as u32)
fn into_raw(self) -> RawSyntaxKind {
RawSyntaxKind(self as u32)
}
fn static_text(kind: Self::Kind) -> Option<&'static str> {
match kind {
fn static_text(self) -> Option<&'static str> {
match self {
TestSyntaxKind::Plus => Some("+"),
_ => None,
}

View file

@ -6,7 +6,7 @@ use crate::{
syntax::{ResolvedNode, SyntaxNode},
traversal::WalkEvent,
util::NodeOrToken,
Language, RawSyntaxKind,
RawSyntaxKind, Syntax,
};
use serde::{
de::{Error, SeqAccess, Visitor},
@ -40,7 +40,7 @@ macro_rules! data_list {
/// contains a boolean which indicates if this node has a data. If it has one,
/// the deserializer should pop the first element from the data list and continue.
///
/// Takes the `Language` (`$l`), `SyntaxNode` (`$node`), `Resolver` (`$resolver`),
/// Takes the `Syntax` (`$l`), `SyntaxNode` (`$node`), `Resolver` (`$resolver`),
/// `Serializer` (`$serializer`), and an optional `data_list` which must be a `mut Vec<D>`.
macro_rules! gen_serialize {
($l:ident, $node:expr, $resolver:expr, $ser:ident, $($data_list:ident)?) => {{
@ -56,9 +56,9 @@ macro_rules! gen_serialize {
})
.unwrap_or(false);)?
Some(Event::EnterNode($l::kind_to_raw(node.kind()), has_data))
Some(Event::EnterNode($l::into_raw(node.kind()), has_data))
}
WalkEvent::Enter(NodeOrToken::Token(tok)) => Some(Event::Token($l::kind_to_raw(tok.kind()), tok.resolve_text($resolver))),
WalkEvent::Enter(NodeOrToken::Token(tok)) => Some(Event::Token($l::into_raw(tok.kind()), tok.resolve_text($resolver))),
WalkEvent::Leave(NodeOrToken::Node(_)) => Some(Event::LeaveNode),
WalkEvent::Leave(NodeOrToken::Token(_)) => None,
@ -87,53 +87,53 @@ enum Event<'text> {
}
/// Make a `SyntaxNode` serializable but without serializing the data.
pub(crate) struct SerializeWithResolver<'node, 'resolver, L: Language, D: 'static, R: ?Sized> {
pub(crate) node: &'node SyntaxNode<L, D>,
pub(crate) struct SerializeWithResolver<'node, 'resolver, S: Syntax, D: 'static, R: ?Sized> {
pub(crate) node: &'node SyntaxNode<S, D>,
pub(crate) resolver: &'resolver R,
}
/// Make a `SyntaxNode` serializable which will include the data for serialization.
pub(crate) struct SerializeWithData<'node, 'resolver, L: Language, D: 'static, R: ?Sized> {
pub(crate) node: &'node SyntaxNode<L, D>,
pub(crate) struct SerializeWithData<'node, 'resolver, S: Syntax, D: 'static, R: ?Sized> {
pub(crate) node: &'node SyntaxNode<S, D>,
pub(crate) resolver: &'resolver R,
}
impl<L, D, R> Serialize for SerializeWithData<'_, '_, L, D, R>
impl<S, D, R> Serialize for SerializeWithData<'_, '_, S, D, R>
where
L: Language,
S: Syntax,
R: Resolver<TokenKey> + ?Sized,
D: Serialize,
{
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
fn serialize<Ser>(&self, serializer: Ser) -> Result<Ser::Ok, Ser::Error>
where
S: serde::Serializer,
Ser: serde::Serializer,
{
let mut data_list = Vec::new();
gen_serialize!(L, self.node, self.resolver, serializer, data_list)
gen_serialize!(S, self.node, self.resolver, serializer, data_list)
}
}
impl<L, D, R> Serialize for SerializeWithResolver<'_, '_, L, D, R>
impl<S, D, R> Serialize for SerializeWithResolver<'_, '_, S, D, R>
where
L: Language,
S: Syntax,
R: Resolver<TokenKey> + ?Sized,
{
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
fn serialize<Ser>(&self, serializer: Ser) -> Result<Ser::Ok, Ser::Error>
where
S: serde::Serializer,
Ser: serde::Serializer,
{
gen_serialize!(L, self.node, self.resolver, serializer,)
gen_serialize!(S, self.node, self.resolver, serializer,)
}
}
impl<L, D> Serialize for ResolvedNode<L, D>
impl<S, D> Serialize for ResolvedNode<S, D>
where
L: Language,
S: Syntax,
D: Serialize,
{
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
fn serialize<Ser>(&self, serializer: Ser) -> Result<Ser::Ok, Ser::Error>
where
S: serde::Serializer,
Ser: serde::Serializer,
{
let node = SerializeWithResolver {
node: self,
@ -143,9 +143,9 @@ where
}
}
impl<'de, L, D> Deserialize<'de> for ResolvedNode<L, D>
impl<'de, S, D> Deserialize<'de> for ResolvedNode<S, D>
where
L: Language,
S: Syntax,
D: Deserialize<'de>,
{
// Deserialization is done by walking down the deserialized event stream,
@ -158,20 +158,20 @@ where
// we walk down the nodes, check if the bool at `data_list[idx]` is true,
// and if so, pop the first element of the data list and attach the data
// to the current node.
fn deserialize<DE>(deserializer: DE) -> Result<Self, DE::Error>
fn deserialize<De>(deserializer: De) -> Result<Self, De::Error>
where
DE: serde::Deserializer<'de>,
De: serde::Deserializer<'de>,
{
struct EventVisitor<L: Language, D: 'static> {
_marker: PhantomData<fn() -> ResolvedNode<L, D>>,
struct EventVisitor<S: Syntax, D: 'static> {
_marker: PhantomData<fn() -> ResolvedNode<S, D>>,
}
impl<'de, L, D> Visitor<'de> for EventVisitor<L, D>
impl<'de, S, D> Visitor<'de> for EventVisitor<S, D>
where
L: Language,
S: Syntax,
D: Deserialize<'de>,
{
type Value = (ResolvedNode<L, D>, VecDeque<bool>);
type Value = (ResolvedNode<S, D>, VecDeque<bool>);
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("a list of tree events")
@ -181,16 +181,16 @@ where
where
A: SeqAccess<'de>,
{
let mut builder: GreenNodeBuilder<L> = GreenNodeBuilder::new();
let mut builder: GreenNodeBuilder<S> = GreenNodeBuilder::new();
let mut data_indices = VecDeque::new();
while let Some(next) = seq.next_element::<Event<'_>>()? {
match next {
Event::EnterNode(kind, has_data) => {
builder.start_node(L::kind_from_raw(kind));
builder.start_node(S::from_raw(kind));
data_indices.push_back(has_data);
}
Event::Token(kind, text) => builder.token(L::kind_from_raw(kind), text),
Event::Token(kind, text) => builder.token(S::from_raw(kind), text),
Event::LeaveNode => builder.finish_node(),
}
}
@ -201,10 +201,10 @@ where
}
}
struct ProcessedEvents<L: Language, D: 'static>(ResolvedNode<L, D>, VecDeque<bool>);
impl<'de, L, D> Deserialize<'de> for ProcessedEvents<L, D>
struct ProcessedEvents<S: Syntax, D: 'static>(ResolvedNode<S, D>, VecDeque<bool>);
impl<'de, S, D> Deserialize<'de> for ProcessedEvents<S, D>
where
L: Language,
S: Syntax,
D: Deserialize<'de>,
{
fn deserialize<DE>(deserializer: DE) -> Result<Self, DE::Error>
@ -217,20 +217,20 @@ where
}
let (ProcessedEvents(tree, data_indices), mut data) =
<(ProcessedEvents<L, D>, VecDeque<D>)>::deserialize(deserializer)?;
<(ProcessedEvents<S, D>, VecDeque<D>)>::deserialize(deserializer)?;
tree.descendants().zip(data_indices).try_for_each(|(node, has_data)| {
if has_data {
let data = data
.pop_front()
.ok_or_else(|| DE::Error::custom("invalid serialized tree"))?;
.ok_or_else(|| De::Error::custom("invalid serialized tree"))?;
node.set_data(data);
}
<Result<(), DE::Error>>::Ok(())
<Result<(), De::Error>>::Ok(())
})?;
if !data.is_empty() {
Err(DE::Error::custom(
Err(De::Error::custom(
"serialized SyntaxNode contained too many data elements",
))
} else {
@ -240,18 +240,18 @@ where
}
impl Serialize for RawSyntaxKind {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
fn serialize<Ser>(&self, serializer: Ser) -> Result<Ser::Ok, Ser::Error>
where
S: serde::Serializer,
Ser: serde::Serializer,
{
serializer.serialize_u32(self.0)
}
}
impl<'de> Deserialize<'de> for RawSyntaxKind {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
fn deserialize<De>(deserializer: De) -> Result<Self, De::Error>
where
D: serde::Deserializer<'de>,
De: serde::Deserializer<'de>,
{
Ok(Self(u32::deserialize(deserializer)?))
}

View file

@ -7,25 +7,25 @@ use crate::{
green::GreenElementRef,
interning::{Resolver, TokenKey},
util::{NodeOrToken, TokenAtOffset},
Language, RawSyntaxKind,
RawSyntaxKind, Syntax,
};
/// An element of the tree, can be either a node or a token.
pub type SyntaxElement<L, D = ()> = NodeOrToken<SyntaxNode<L, D>, SyntaxToken<L, D>>;
pub type SyntaxElement<S, D = ()> = NodeOrToken<SyntaxNode<S, D>, SyntaxToken<S, D>>;
impl<L: Language, D> From<SyntaxNode<L, D>> for SyntaxElement<L, D> {
fn from(node: SyntaxNode<L, D>) -> SyntaxElement<L, D> {
impl<S: Syntax, D> From<SyntaxNode<S, D>> for SyntaxElement<S, D> {
fn from(node: SyntaxNode<S, D>) -> SyntaxElement<S, D> {
NodeOrToken::Node(node)
}
}
impl<L: Language, D> From<SyntaxToken<L, D>> for SyntaxElement<L, D> {
fn from(token: SyntaxToken<L, D>) -> SyntaxElement<L, D> {
impl<S: Syntax, D> From<SyntaxToken<S, D>> for SyntaxElement<S, D> {
fn from(token: SyntaxToken<S, D>) -> SyntaxElement<S, D> {
NodeOrToken::Token(token)
}
}
impl<L: Language, D> SyntaxElement<L, D> {
impl<S: Syntax, D> SyntaxElement<S, D> {
/// Returns this element's [`Display`](fmt::Display) representation as a string.
///
/// To avoid allocating for every element, see [`write_display`](type.SyntaxElement.html#method.write_display).
@ -80,22 +80,22 @@ impl<L: Language, D> SyntaxElement<L, D> {
}
/// A reference to an element of the tree, can be either a reference to a node or one to a token.
pub type SyntaxElementRef<'a, L, D = ()> = NodeOrToken<&'a SyntaxNode<L, D>, &'a SyntaxToken<L, D>>;
pub type SyntaxElementRef<'a, S, D = ()> = NodeOrToken<&'a SyntaxNode<S, D>, &'a SyntaxToken<S, D>>;
impl<'a, L: Language, D> From<&'a SyntaxNode<L, D>> for SyntaxElementRef<'a, L, D> {
fn from(node: &'a SyntaxNode<L, D>) -> Self {
impl<'a, S: Syntax, D> From<&'a SyntaxNode<S, D>> for SyntaxElementRef<'a, S, D> {
fn from(node: &'a SyntaxNode<S, D>) -> Self {
NodeOrToken::Node(node)
}
}
impl<'a, L: Language, D> From<&'a SyntaxToken<L, D>> for SyntaxElementRef<'a, L, D> {
fn from(token: &'a SyntaxToken<L, D>) -> Self {
impl<'a, S: Syntax, D> From<&'a SyntaxToken<S, D>> for SyntaxElementRef<'a, S, D> {
fn from(token: &'a SyntaxToken<S, D>) -> Self {
NodeOrToken::Token(token)
}
}
impl<'a, L: Language, D> From<&'a SyntaxElement<L, D>> for SyntaxElementRef<'a, L, D> {
fn from(element: &'a SyntaxElement<L, D>) -> Self {
impl<'a, S: Syntax, D> From<&'a SyntaxElement<S, D>> for SyntaxElementRef<'a, S, D> {
fn from(element: &'a SyntaxElement<S, D>) -> Self {
match element {
NodeOrToken::Node(it) => Self::Node(it),
NodeOrToken::Token(it) => Self::Token(it),
@ -103,7 +103,7 @@ impl<'a, L: Language, D> From<&'a SyntaxElement<L, D>> for SyntaxElementRef<'a,
}
}
impl<'a, L: Language, D> SyntaxElementRef<'a, L, D> {
impl<'a, S: Syntax, D> SyntaxElementRef<'a, S, D> {
/// Returns this element's [`Display`](fmt::Display) representation as a string.
///
/// To avoid allocating for every element, see [`write_display`](type.SyntaxElementRef.html#method.write_display).
@ -157,14 +157,14 @@ impl<'a, L: Language, D> SyntaxElementRef<'a, L, D> {
}
}
impl<L: Language, D> SyntaxElement<L, D> {
impl<S: Syntax, D> SyntaxElement<S, D> {
pub(super) fn new(
element: GreenElementRef<'_>,
parent: &SyntaxNode<L, D>,
parent: &SyntaxNode<S, D>,
index: u32,
offset: TextSize,
ref_count: *mut AtomicU32,
) -> SyntaxElement<L, D> {
) -> SyntaxElement<S, D> {
match element {
NodeOrToken::Node(node) => SyntaxNode::new_child(node, parent, index, offset, ref_count).into(),
NodeOrToken::Token(_) => SyntaxToken::new(parent, index, offset).into(),
@ -191,7 +191,7 @@ impl<L: Language, D> SyntaxElement<L, D> {
/// The kind of this element in terms of your language.
#[inline]
pub fn kind(&self) -> L::Kind {
pub fn kind(&self) -> S {
match self {
NodeOrToken::Node(it) => it.kind(),
NodeOrToken::Token(it) => it.kind(),
@ -200,7 +200,7 @@ impl<L: Language, D> SyntaxElement<L, D> {
/// The parent node of this element, except if this element is the root.
#[inline]
pub fn parent(&self) -> Option<&SyntaxNode<L, D>> {
pub fn parent(&self) -> Option<&SyntaxNode<S, D>> {
match self {
NodeOrToken::Node(it) => it.parent(),
NodeOrToken::Token(it) => Some(it.parent()),
@ -209,7 +209,7 @@ impl<L: Language, D> SyntaxElement<L, D> {
/// Returns an iterator along the chain of parents of this node.
#[inline]
pub fn ancestors(&self) -> impl Iterator<Item = &SyntaxNode<L, D>> {
pub fn ancestors(&self) -> impl Iterator<Item = &SyntaxNode<S, D>> {
match self {
NodeOrToken::Node(it) => it.ancestors(),
NodeOrToken::Token(it) => it.parent().ancestors(),
@ -218,7 +218,7 @@ impl<L: Language, D> SyntaxElement<L, D> {
/// Return the leftmost token in the subtree of this element.
#[inline]
pub fn first_token(&self) -> Option<&SyntaxToken<L, D>> {
pub fn first_token(&self) -> Option<&SyntaxToken<S, D>> {
match self {
NodeOrToken::Node(it) => it.first_token(),
NodeOrToken::Token(it) => Some(it),
@ -227,7 +227,7 @@ impl<L: Language, D> SyntaxElement<L, D> {
/// Return the rightmost token in the subtree of this element.
#[inline]
pub fn last_token(&self) -> Option<&SyntaxToken<L, D>> {
pub fn last_token(&self) -> Option<&SyntaxToken<S, D>> {
match self {
NodeOrToken::Node(it) => it.last_token(),
NodeOrToken::Token(it) => Some(it),
@ -236,7 +236,7 @@ impl<L: Language, D> SyntaxElement<L, D> {
/// The tree element to the right of this one, i.e. the next child of this element's parent after this element.
#[inline]
pub fn next_sibling_or_token(&self) -> Option<SyntaxElementRef<'_, L, D>> {
pub fn next_sibling_or_token(&self) -> Option<SyntaxElementRef<'_, S, D>> {
match self {
NodeOrToken::Node(it) => it.next_sibling_or_token(),
NodeOrToken::Token(it) => it.next_sibling_or_token(),
@ -245,7 +245,7 @@ impl<L: Language, D> SyntaxElement<L, D> {
/// The tree element to the left of this one, i.e. the previous child of this element's parent after this element.
#[inline]
pub fn prev_sibling_or_token(&self) -> Option<SyntaxElementRef<'_, L, D>> {
pub fn prev_sibling_or_token(&self) -> Option<SyntaxElementRef<'_, S, D>> {
match self {
NodeOrToken::Node(it) => it.prev_sibling_or_token(),
NodeOrToken::Token(it) => it.prev_sibling_or_token(),
@ -253,7 +253,7 @@ impl<L: Language, D> SyntaxElement<L, D> {
}
}
impl<'a, L: Language, D> SyntaxElementRef<'a, L, D> {
impl<'a, S: Syntax, D> SyntaxElementRef<'a, S, D> {
/// The range this element covers in the source text, in bytes.
#[inline]
pub fn text_range(&self) -> TextRange {
@ -274,7 +274,7 @@ impl<'a, L: Language, D> SyntaxElementRef<'a, L, D> {
/// The kind of this element in terms of your language.
#[inline]
pub fn kind(&self) -> L::Kind {
pub fn kind(&self) -> S {
match self {
NodeOrToken::Node(it) => it.kind(),
NodeOrToken::Token(it) => it.kind(),
@ -283,7 +283,7 @@ impl<'a, L: Language, D> SyntaxElementRef<'a, L, D> {
/// The parent node of this element, except if this element is the root.
#[inline]
pub fn parent(&self) -> Option<&'a SyntaxNode<L, D>> {
pub fn parent(&self) -> Option<&'a SyntaxNode<S, D>> {
match self {
NodeOrToken::Node(it) => it.parent(),
NodeOrToken::Token(it) => Some(it.parent()),
@ -292,7 +292,7 @@ impl<'a, L: Language, D> SyntaxElementRef<'a, L, D> {
/// Returns an iterator along the chain of parents of this node.
#[inline]
pub fn ancestors(&self) -> impl Iterator<Item = &'a SyntaxNode<L, D>> {
pub fn ancestors(&self) -> impl Iterator<Item = &'a SyntaxNode<S, D>> {
match self {
NodeOrToken::Node(it) => it.ancestors(),
NodeOrToken::Token(it) => it.parent().ancestors(),
@ -301,7 +301,7 @@ impl<'a, L: Language, D> SyntaxElementRef<'a, L, D> {
/// Return the leftmost token in the subtree of this element.
#[inline]
pub fn first_token(&self) -> Option<&'a SyntaxToken<L, D>> {
pub fn first_token(&self) -> Option<&'a SyntaxToken<S, D>> {
match self {
NodeOrToken::Node(it) => it.first_token(),
NodeOrToken::Token(it) => Some(it),
@ -310,7 +310,7 @@ impl<'a, L: Language, D> SyntaxElementRef<'a, L, D> {
/// Return the rightmost token in the subtree of this element.
#[inline]
pub fn last_token(&self) -> Option<&'a SyntaxToken<L, D>> {
pub fn last_token(&self) -> Option<&'a SyntaxToken<S, D>> {
match self {
NodeOrToken::Node(it) => it.last_token(),
NodeOrToken::Token(it) => Some(it),
@ -319,7 +319,7 @@ impl<'a, L: Language, D> SyntaxElementRef<'a, L, D> {
/// The tree element to the right of this one, i.e. the next child of this element's parent after this element.
#[inline]
pub fn next_sibling_or_token(&self) -> Option<SyntaxElementRef<'a, L, D>> {
pub fn next_sibling_or_token(&self) -> Option<SyntaxElementRef<'a, S, D>> {
match self {
NodeOrToken::Node(it) => it.next_sibling_or_token(),
NodeOrToken::Token(it) => it.next_sibling_or_token(),
@ -328,7 +328,7 @@ impl<'a, L: Language, D> SyntaxElementRef<'a, L, D> {
/// The tree element to the left of this one, i.e. the previous child of this element's parent after this element.
#[inline]
pub fn prev_sibling_or_token(&self) -> Option<SyntaxElementRef<'a, L, D>> {
pub fn prev_sibling_or_token(&self) -> Option<SyntaxElementRef<'a, S, D>> {
match self {
NodeOrToken::Node(it) => it.prev_sibling_or_token(),
NodeOrToken::Token(it) => it.prev_sibling_or_token(),
@ -336,7 +336,7 @@ impl<'a, L: Language, D> SyntaxElementRef<'a, L, D> {
}
#[inline]
pub(super) fn token_at_offset(&self, offset: TextSize) -> TokenAtOffset<SyntaxToken<L, D>> {
pub(super) fn token_at_offset(&self, offset: TextSize) -> TokenAtOffset<SyntaxToken<S, D>> {
assert!(self.text_range().start() <= offset && offset <= self.text_range().end());
match self {
NodeOrToken::Token(token) => TokenAtOffset::Single((*token).clone()),

View file

@ -7,7 +7,7 @@ use text_size::TextSize;
use crate::{
green::{GreenElementRef, GreenNodeChildren},
syntax::{SyntaxElementRef, SyntaxNode},
Language,
Syntax,
};
#[derive(Clone, Debug)]
@ -18,7 +18,7 @@ struct Iter<'n> {
}
impl<'n> Iter<'n> {
fn new<L: Language, D>(parent: &'n SyntaxNode<L, D>) -> Self {
fn new<S: Syntax, D>(parent: &'n SyntaxNode<S, D>) -> Self {
let offset = parent.text_range().start();
let green: GreenNodeChildren<'_> = parent.green().children();
Iter {
@ -67,14 +67,14 @@ impl<'n> FusedIterator for Iter<'n> {}
/// An iterator over the child nodes of a [`SyntaxNode`].
#[derive(Clone, Debug)]
pub struct SyntaxNodeChildren<'n, L: Language, D: 'static = ()> {
pub struct SyntaxNodeChildren<'n, S: Syntax, D: 'static = ()> {
inner: Iter<'n>,
parent: &'n SyntaxNode<L, D>,
parent: &'n SyntaxNode<S, D>,
}
impl<'n, L: Language, D> SyntaxNodeChildren<'n, L, D> {
impl<'n, S: Syntax, D> SyntaxNodeChildren<'n, S, D> {
#[inline]
pub(super) fn new(parent: &'n SyntaxNode<L, D>) -> Self {
pub(super) fn new(parent: &'n SyntaxNode<S, D>) -> Self {
Self {
inner: Iter::new(parent),
parent,
@ -82,8 +82,8 @@ impl<'n, L: Language, D> SyntaxNodeChildren<'n, L, D> {
}
}
impl<'n, L: Language, D> Iterator for SyntaxNodeChildren<'n, L, D> {
type Item = &'n SyntaxNode<L, D>;
impl<'n, S: Syntax, D> Iterator for SyntaxNodeChildren<'n, S, D> {
type Item = &'n SyntaxNode<S, D>;
#[inline(always)]
fn next(&mut self) -> Option<Self::Item> {
@ -109,24 +109,24 @@ impl<'n, L: Language, D> Iterator for SyntaxNodeChildren<'n, L, D> {
}
}
impl<'n, L: Language, D> ExactSizeIterator for SyntaxNodeChildren<'n, L, D> {
impl<'n, S: Syntax, D> ExactSizeIterator for SyntaxNodeChildren<'n, S, D> {
#[inline(always)]
fn len(&self) -> usize {
self.inner.len()
}
}
impl<'n, L: Language, D> FusedIterator for SyntaxNodeChildren<'n, L, D> {}
impl<'n, S: Syntax, D> FusedIterator for SyntaxNodeChildren<'n, S, D> {}
/// An iterator over the children of a [`SyntaxNode`].
#[derive(Clone, Debug)]
pub struct SyntaxElementChildren<'n, L: Language, D: 'static = ()> {
pub struct SyntaxElementChildren<'n, S: Syntax, D: 'static = ()> {
inner: Iter<'n>,
parent: &'n SyntaxNode<L, D>,
parent: &'n SyntaxNode<S, D>,
}
impl<'n, L: Language, D> SyntaxElementChildren<'n, L, D> {
impl<'n, S: Syntax, D> SyntaxElementChildren<'n, S, D> {
#[inline]
pub(super) fn new(parent: &'n SyntaxNode<L, D>) -> Self {
pub(super) fn new(parent: &'n SyntaxNode<S, D>) -> Self {
Self {
inner: Iter::new(parent),
parent,
@ -134,8 +134,8 @@ impl<'n, L: Language, D> SyntaxElementChildren<'n, L, D> {
}
}
impl<'n, L: Language, D> Iterator for SyntaxElementChildren<'n, L, D> {
type Item = SyntaxElementRef<'n, L, D>;
impl<'n, S: Syntax, D> Iterator for SyntaxElementChildren<'n, S, D> {
type Item = SyntaxElementRef<'n, S, D>;
#[inline(always)]
fn next(&mut self) -> Option<Self::Item> {
@ -159,10 +159,10 @@ impl<'n, L: Language, D> Iterator for SyntaxElementChildren<'n, L, D> {
}
}
impl<'n, L: Language, D> ExactSizeIterator for SyntaxElementChildren<'n, L, D> {
impl<'n, S: Syntax, D> ExactSizeIterator for SyntaxElementChildren<'n, S, D> {
#[inline(always)]
fn len(&self) -> usize {
self.inner.len()
}
}
impl<'n, L: Language, D> FusedIterator for SyntaxElementChildren<'n, L, D> {}
impl<'n, S: Syntax, D> FusedIterator for SyntaxElementChildren<'n, S, D> {}

View file

@ -22,10 +22,10 @@ pub use text::SyntaxText;
// A note on `#[inline]` usage in this module:
// In `rowan`, there are two layers of `SyntaxXY`s: the `cursor` layer and the `api` layer.
// The `cursor` layer handles all of the actual methods on the tree, while the `api` layer is
// generic over the `Language` of the tree and otherwise forwards its implementation to the `cursor`
// generic over the `Syntax` of the tree and otherwise forwards its implementation to the `cursor`
// layer.
// Here, we have unified the `cursor` and the `api` layer into the `syntax` layer.
// This means that all of our types here are generic over a `Language`, including the
// This means that all of our types here are generic over a `Syntax`, including the
// implementations which, in `rowan`, are part of the `cursor` layer.
// Very apparently, this makes the compiler less willing to inline. Almost every "regular use"
// method in this file has some kind of `#[inline]` annotation to counteract that. This is _NOT_
@ -43,15 +43,15 @@ mod tests {
#[cfg_attr(miri, ignore)]
fn assert_send_sync() {
fn f<T: Send + Sync>() {}
f::<SyntaxNode<TestLang>>();
f::<SyntaxToken<TestLang>>();
f::<SyntaxElement<TestLang>>();
f::<SyntaxElementRef<'static, TestLang>>();
f::<SyntaxNode<TestSyntaxKind>>();
f::<SyntaxToken<TestSyntaxKind>>();
f::<SyntaxElement<TestSyntaxKind>>();
f::<SyntaxElementRef<'static, TestSyntaxKind>>();
f::<ResolvedNode<TestLang>>();
f::<ResolvedToken<TestLang>>();
f::<ResolvedElement<TestLang>>();
f::<ResolvedElementRef<'static, TestLang>>();
f::<ResolvedNode<TestSyntaxKind>>();
f::<ResolvedToken<TestSyntaxKind>>();
f::<ResolvedElement<TestSyntaxKind>>();
f::<ResolvedElementRef<'static, TestSyntaxKind>>();
}
#[test]
@ -60,10 +60,10 @@ mod tests {
fn assert_syntax_sizes() {
use std::mem::size_of;
assert_eq!(size_of::<SyntaxNode<TestLang>>(), size_of::<*const u8>());
assert_eq!(size_of::<SyntaxNode<TestSyntaxKind>>(), size_of::<*const u8>());
// verify niche opt of `NonNull`
assert_eq!(size_of::<Option<SyntaxNode<TestLang>>>(), size_of::<*const u8>());
assert_eq!(size_of::<Option<SyntaxNode<TestSyntaxKind>>>(), size_of::<*const u8>());
// parent + child index + text len
assert_eq!(size_of::<SyntaxToken<TestLang>>(), size_of::<SyntaxNode<TestLang>>() + size_of::<u32>() * 2);
assert_eq!(size_of::<SyntaxToken<TestSyntaxKind>>(), size_of::<SyntaxNode<TestSyntaxKind>>() + size_of::<u32>() * 2);
}
}

View file

@ -7,7 +7,7 @@ use crate::{
text::*,
traversal::*,
util::*,
Language, RawSyntaxKind,
RawSyntaxKind, Syntax,
};
use parking_lot::RwLock;
use std::{
@ -29,14 +29,14 @@ use triomphe::Arc;
/// individual nodes is relatively cheap.
#[derive(Debug)]
#[repr(transparent)]
pub struct SyntaxNode<L: Language, D: 'static = ()> {
data: NonNull<NodeData<L, D>>,
pub struct SyntaxNode<S: Syntax, D: 'static = ()> {
data: NonNull<NodeData<S, D>>,
}
unsafe impl<L: Language, D: 'static> Send for SyntaxNode<L, D> {}
unsafe impl<L: Language, D: 'static> Sync for SyntaxNode<L, D> {}
unsafe impl<S: Syntax, D: 'static> Send for SyntaxNode<S, D> {}
unsafe impl<S: Syntax, D: 'static> Sync for SyntaxNode<S, D> {}
impl<L: Language, D> SyntaxNode<L, D> {
impl<S: Syntax, D> SyntaxNode<S, D> {
/// Writes this node's [`Debug`](fmt::Debug) representation into the given `target`.
/// If `recursive` is `true`, prints the entire subtree rooted in this node.
/// Otherwise, only this node's kind and range are written.
@ -120,7 +120,7 @@ impl<L: Language, D> SyntaxNode<L, D> {
/// Turns this node into a [`ResolvedNode`](crate::syntax::ResolvedNode), but only if there is a resolver associated
/// with this tree.
#[inline]
pub fn try_resolved(&self) -> Option<&ResolvedNode<L, D>> {
pub fn try_resolved(&self) -> Option<&ResolvedNode<S, D>> {
// safety: we only coerce if `resolver` exists
self.resolver().map(|_| unsafe { ResolvedNode::coerce_ref(self) })
}
@ -129,12 +129,12 @@ impl<L: Language, D> SyntaxNode<L, D> {
/// # Panics
/// If there is no resolver associated with this tree.
#[inline]
pub fn resolved(&self) -> &ResolvedNode<L, D> {
pub fn resolved(&self) -> &ResolvedNode<S, D> {
self.try_resolved().expect("tried to resolve a node without resolver")
}
}
impl<L: Language, D> Clone for SyntaxNode<L, D> {
impl<S: Syntax, D> Clone for SyntaxNode<S, D> {
fn clone(&self) -> Self {
// safety:: the ref count is only dropped when there are no more external references (see below)
// since we are currently cloning such a reference, there is still at least one
@ -144,7 +144,7 @@ impl<L: Language, D> Clone for SyntaxNode<L, D> {
}
}
impl<L: Language, D> Drop for SyntaxNode<L, D> {
impl<S: Syntax, D> Drop for SyntaxNode<S, D> {
fn drop(&mut self) {
// safety:: the ref count is only dropped when there are no more external references (see below)
// and all nodes but the root have been dropped.
@ -169,9 +169,9 @@ impl<L: Language, D> Drop for SyntaxNode<L, D> {
}
}
impl<L: Language, D> SyntaxNode<L, D> {
impl<S: Syntax, D> SyntaxNode<S, D> {
#[inline]
fn data(&self) -> &NodeData<L, D> {
fn data(&self) -> &NodeData<S, D> {
unsafe { self.data.as_ref() }
}
@ -183,7 +183,7 @@ impl<L: Language, D> SyntaxNode<L, D> {
/// The root of the tree this node belongs to.
///
/// If this node is the root, returns `self`.
pub fn root(&self) -> &SyntaxNode<L, D> {
pub fn root(&self) -> &SyntaxNode<S, D> {
let mut current = self;
while let Some(parent) = current.parent() {
current = parent;
@ -222,31 +222,31 @@ impl<L: Language, D> SyntaxNode<L, D> {
}
// Identity semantics for hash & eq
impl<L: Language, D> PartialEq for SyntaxNode<L, D> {
fn eq(&self, other: &SyntaxNode<L, D>) -> bool {
impl<S: Syntax, D> PartialEq for SyntaxNode<S, D> {
fn eq(&self, other: &SyntaxNode<S, D>) -> bool {
self.data == other.data
}
}
impl<L: Language, D> Eq for SyntaxNode<L, D> {}
impl<S: Syntax, D> Eq for SyntaxNode<S, D> {}
impl<L: Language, D> Hash for SyntaxNode<L, D> {
impl<S: Syntax, D> Hash for SyntaxNode<S, D> {
fn hash<H: Hasher>(&self, state: &mut H) {
self.data.hash(state);
}
}
enum Kind<L: Language, D: 'static> {
enum Kind<S: Syntax, D: 'static> {
Root(GreenNode, Option<StdArc<dyn Resolver<TokenKey>>>),
Child {
parent: SyntaxNode<L, D>,
parent: SyntaxNode<S, D>,
index: u32,
offset: TextSize,
},
}
impl<L: Language, D> Kind<L, D> {
fn as_child(&self) -> Option<(&SyntaxNode<L, D>, u32, TextSize)> {
impl<S: Syntax, D> Kind<S, D> {
fn as_child(&self) -> Option<(&SyntaxNode<S, D>, u32, TextSize)> {
match self {
Kind::Child { parent, index, offset } => Some((parent, *index, *offset)),
_ => None,
@ -254,17 +254,17 @@ impl<L: Language, D> Kind<L, D> {
}
}
pub(super) struct NodeData<L: Language, D: 'static> {
kind: Kind<L, D>,
pub(super) struct NodeData<S: Syntax, D: 'static> {
kind: Kind<S, D>,
green: NonNull<GreenNode>,
ref_count: *mut AtomicU32,
data: RwLock<Option<Arc<D>>>,
children: Vec<UnsafeCell<Option<SyntaxElement<L, D>>>>,
children: Vec<UnsafeCell<Option<SyntaxElement<S, D>>>>,
child_locks: Vec<RwLock<()>>,
}
impl<L: Language, D> NodeData<L, D> {
fn new(kind: Kind<L, D>, green: NonNull<GreenNode>, ref_count: *mut AtomicU32, n_children: usize) -> NonNull<Self> {
impl<S: Syntax, D> NodeData<S, D> {
fn new(kind: Kind<S, D>, green: NonNull<GreenNode>, ref_count: *mut AtomicU32, n_children: usize) -> NonNull<Self> {
let mut children = Vec::with_capacity(n_children);
let mut child_locks = Vec::with_capacity(n_children);
children.extend((0..n_children).map(|_| Default::default()));
@ -282,17 +282,17 @@ impl<L: Language, D> NodeData<L, D> {
}
}
impl<L: Language, D> SyntaxNode<L, D> {
impl<S: Syntax, D> SyntaxNode<S, D> {
/// Build a new syntax tree on top of a green tree.
///
/// # Example
/// ```
/// # use cstree::testing::*;
/// # let mut builder: GreenNodeBuilder<MyLanguage> = GreenNodeBuilder::new();
/// # let mut builder: GreenNodeBuilder<MySyntax> = GreenNodeBuilder::new();
/// # builder.start_node(Root);
/// # builder.finish_node();
/// # let (green_root, _) = builder.finish();
/// let root: SyntaxNode<MyLanguage> = SyntaxNode::new_root(green_root);
/// let root: SyntaxNode<MySyntax> = SyntaxNode::new_root(green_root);
/// assert_eq!(root.kind(), Root);
/// ```
#[inline]
@ -300,7 +300,7 @@ impl<L: Language, D> SyntaxNode<L, D> {
Self::make_new_root(green, None)
}
fn new(data: NonNull<NodeData<L, D>>) -> Self {
fn new(data: NonNull<NodeData<S, D>>) -> Self {
Self { data }
}
@ -334,7 +334,7 @@ impl<L: Language, D> SyntaxNode<L, D> {
/// # use cstree::testing::*;
/// use cstree::syntax::ResolvedNode;
///
/// let mut builder: GreenNodeBuilder<MyLanguage> = GreenNodeBuilder::new();
/// let mut builder: GreenNodeBuilder<MySyntax> = GreenNodeBuilder::new();
/// builder.start_node(Root);
/// builder.token(Identifier, "content");
/// builder.finish_node();
@ -344,11 +344,11 @@ impl<L: Language, D> SyntaxNode<L, D> {
/// // This created a new interner and cache for us owned by the builder,
/// // and `finish` always returns these.
/// let interner = cache.unwrap().into_interner().unwrap();
/// let root: ResolvedNode<MyLanguage> = SyntaxNode::new_root_with_resolver(green, interner);
/// let root: ResolvedNode<MySyntax> = SyntaxNode::new_root_with_resolver(green, interner);
/// assert_eq!(root.text(), "content");
/// ```
#[inline]
pub fn new_root_with_resolver(green: GreenNode, resolver: impl Resolver<TokenKey> + 'static) -> ResolvedNode<L, D> {
pub fn new_root_with_resolver(green: GreenNode, resolver: impl Resolver<TokenKey> + 'static) -> ResolvedNode<S, D> {
let ptr: StdArc<dyn Resolver<TokenKey>> = StdArc::new(resolver);
ResolvedNode {
syntax: SyntaxNode::make_new_root(green, Some(ptr)),
@ -412,7 +412,7 @@ impl<L: Language, D> SyntaxNode<L, D> {
}
#[inline]
fn read(&self, index: usize) -> Option<SyntaxElementRef<'_, L, D>> {
fn read(&self, index: usize) -> Option<SyntaxElementRef<'_, S, D>> {
// safety: children are pre-allocated and indices are determined internally
let _read = unsafe { self.data().child_locks.get_unchecked(index).read() };
// safety: mutable accesses to the slot only occur below and have to take the lock
@ -420,7 +420,7 @@ impl<L: Language, D> SyntaxNode<L, D> {
slot.as_ref().map(|elem| elem.into())
}
fn try_write(&self, index: usize, elem: SyntaxElement<L, D>) {
fn try_write(&self, index: usize, elem: SyntaxElement<S, D>) {
// safety: children are pre-allocated and indices are determined internally
let _write = unsafe { self.data().child_locks.get_unchecked(index).write() };
// safety: we are the only writer and there are no readers as evidenced by the write lock
@ -469,7 +469,7 @@ impl<L: Language, D> SyntaxNode<L, D> {
node: &GreenNode,
index: usize,
offset: TextSize,
) -> SyntaxElementRef<'_, L, D> {
) -> SyntaxElementRef<'_, S, D> {
if let Some(elem) = self.read(index) {
debug_assert_eq!(elem.text_range().start(), offset);
return elem;
@ -487,7 +487,7 @@ impl<L: Language, D> SyntaxNode<L, D> {
element: GreenElementRef<'_>,
index: usize,
offset: TextSize,
) -> SyntaxElementRef<'_, L, D> {
) -> SyntaxElementRef<'_, S, D> {
if let Some(elem) = self.read(index) {
debug_assert_eq!(elem.text_range().start(), offset);
return elem;
@ -529,8 +529,8 @@ impl<L: Language, D> SyntaxNode<L, D> {
/// The kind of this node in terms of your language.
#[inline]
pub fn kind(&self) -> L::Kind {
L::kind_from_raw(self.syntax_kind())
pub fn kind(&self) -> S {
S::from_raw(self.syntax_kind())
}
/// The range this node covers in the source text, in bytes.
@ -547,7 +547,7 @@ impl<L: Language, D> SyntaxNode<L, D> {
/// by this node, i.e. the combined text of all token leafs of the subtree originating in this
/// node.
#[inline]
pub fn resolve_text<'n, 'i, I>(&'n self, resolver: &'i I) -> SyntaxText<'n, 'i, I, L, D>
pub fn resolve_text<'n, 'i, I>(&'n self, resolver: &'i I) -> SyntaxText<'n, 'i, I, S, D>
where
I: Resolver<TokenKey> + ?Sized,
{
@ -562,7 +562,7 @@ impl<L: Language, D> SyntaxNode<L, D> {
/// The parent node of this node, except if this node is the root.
#[inline]
pub fn parent(&self) -> Option<&SyntaxNode<L, D>> {
pub fn parent(&self) -> Option<&SyntaxNode<S, D>> {
match &self.data().kind {
Kind::Root(_, _) => None,
Kind::Child { parent, .. } => Some(parent),
@ -585,7 +585,7 @@ impl<L: Language, D> SyntaxNode<L, D> {
/// Returns an iterator along the chain of parents of this node.
#[inline]
pub fn ancestors(&self) -> impl Iterator<Item = &SyntaxNode<L, D>> {
pub fn ancestors(&self) -> impl Iterator<Item = &SyntaxNode<S, D>> {
iter::successors(Some(self), |&node| node.parent())
}
@ -593,13 +593,13 @@ impl<L: Language, D> SyntaxNode<L, D> {
///
/// If you want to also consider leafs, see [`children_with_tokens`](SyntaxNode::children_with_tokens).
#[inline]
pub fn children(&self) -> SyntaxNodeChildren<'_, L, D> {
pub fn children(&self) -> SyntaxNodeChildren<'_, S, D> {
SyntaxNodeChildren::new(self)
}
/// Returns an iterator over child elements of this node, including tokens.
#[inline]
pub fn children_with_tokens(&self) -> SyntaxElementChildren<'_, L, D> {
pub fn children_with_tokens(&self) -> SyntaxElementChildren<'_, S, D> {
SyntaxElementChildren::new(self)
}
@ -608,14 +608,14 @@ impl<L: Language, D> SyntaxNode<L, D> {
/// If you want to also consider leafs, see [`first_child_or_token`](SyntaxNode::first_child_or_token).
#[inline]
#[allow(clippy::map_clone)]
pub fn first_child(&self) -> Option<&SyntaxNode<L, D>> {
pub fn first_child(&self) -> Option<&SyntaxNode<S, D>> {
let (node, (index, offset)) = filter_nodes(self.green().children_from(0, self.text_range().start())).next()?;
self.get_or_add_node(node, index, offset).as_node().map(|node| *node)
}
/// The first child element of this node, if any, including tokens.
#[inline]
pub fn first_child_or_token(&self) -> Option<SyntaxElementRef<'_, L, D>> {
pub fn first_child_or_token(&self) -> Option<SyntaxElementRef<'_, S, D>> {
let (element, (index, offset)) = self.green().children_from(0, self.text_range().start()).next()?;
Some(self.get_or_add_element(element, index, offset))
}
@ -625,7 +625,7 @@ impl<L: Language, D> SyntaxNode<L, D> {
/// If you want to also consider leafs, see [`last_child_or_token`](SyntaxNode::last_child_or_token).
#[inline]
#[allow(clippy::map_clone)]
pub fn last_child(&self) -> Option<&SyntaxNode<L, D>> {
pub fn last_child(&self) -> Option<&SyntaxNode<S, D>> {
let (node, (index, offset)) = filter_nodes(
self.green()
.children_to(self.green().children().len(), self.text_range().end()),
@ -636,7 +636,7 @@ impl<L: Language, D> SyntaxNode<L, D> {
/// The last child element of this node, if any, including tokens.
#[inline]
pub fn last_child_or_token(&self) -> Option<SyntaxElementRef<'_, L, D>> {
pub fn last_child_or_token(&self) -> Option<SyntaxElementRef<'_, S, D>> {
let (element, (index, offset)) = self
.green()
.children_to(self.green().children().len(), self.text_range().end())
@ -650,7 +650,7 @@ impl<L: Language, D> SyntaxNode<L, D> {
///
/// If you want to also consider leafs, see [`next_child_or_token_after`](SyntaxNode::next_child_or_token_after).
#[inline]
pub fn next_child_after(&self, n: usize, offset: TextSize) -> Option<&SyntaxNode<L, D>> {
pub fn next_child_after(&self, n: usize, offset: TextSize) -> Option<&SyntaxNode<S, D>> {
let (node, (index, offset)) = filter_nodes(self.green().children_from(n + 1, offset)).next()?;
self.get_or_add_node(node, index, offset).as_node().copied()
}
@ -658,7 +658,7 @@ impl<L: Language, D> SyntaxNode<L, D> {
/// The first child element of this node starting at the (n + 1)-st, if any.
/// If this method returns `Some`, the contained node is the (n + 1)-st child of this node.
#[inline]
pub fn next_child_or_token_after(&self, n: usize, offset: TextSize) -> Option<SyntaxElementRef<'_, L, D>> {
pub fn next_child_or_token_after(&self, n: usize, offset: TextSize) -> Option<SyntaxElementRef<'_, S, D>> {
let (element, (index, offset)) = self.green().children_from(n + 1, offset).next()?;
Some(self.get_or_add_element(element, index, offset))
}
@ -669,7 +669,7 @@ impl<L: Language, D> SyntaxNode<L, D> {
///
/// If you want to also consider leafs, see [`prev_child_or_token_before`](SyntaxNode::prev_child_or_token_before).
#[inline]
pub fn prev_child_before(&self, n: usize, offset: TextSize) -> Option<&SyntaxNode<L, D>> {
pub fn prev_child_before(&self, n: usize, offset: TextSize) -> Option<&SyntaxNode<S, D>> {
let (node, (index, offset)) = filter_nodes(self.green().children_to(n, offset)).next()?;
self.get_or_add_node(node, index, offset).as_node().copied()
}
@ -677,7 +677,7 @@ impl<L: Language, D> SyntaxNode<L, D> {
/// The last child node of this node up to the nth, if any.
/// If this method returns `Some`, the contained node is the (n - 1)-st child.
#[inline]
pub fn prev_child_or_token_before(&self, n: usize, offset: TextSize) -> Option<SyntaxElementRef<'_, L, D>> {
pub fn prev_child_or_token_before(&self, n: usize, offset: TextSize) -> Option<SyntaxElementRef<'_, S, D>> {
let (element, (index, offset)) = self.green().children_to(n, offset).next()?;
Some(self.get_or_add_element(element, index, offset))
}
@ -686,7 +686,7 @@ impl<L: Language, D> SyntaxNode<L, D> {
///
/// If you want to also consider leafs, see [`next_sibling_or_token`](SyntaxNode::next_sibling_or_token).
#[inline]
pub fn next_sibling(&self) -> Option<&SyntaxNode<L, D>> {
pub fn next_sibling(&self) -> Option<&SyntaxNode<S, D>> {
let (parent, index, _) = self.data().kind.as_child()?;
let (node, (index, offset)) = filter_nodes(
@ -700,7 +700,7 @@ impl<L: Language, D> SyntaxNode<L, D> {
/// The tree element to the right of this one, i.e. the next child of this node's parent after this node.
#[inline]
pub fn next_sibling_or_token(&self) -> Option<SyntaxElementRef<'_, L, D>> {
pub fn next_sibling_or_token(&self) -> Option<SyntaxElementRef<'_, S, D>> {
let (parent, index, _) = self.data().kind.as_child()?;
let (element, (index, offset)) = parent
@ -714,7 +714,7 @@ impl<L: Language, D> SyntaxNode<L, D> {
///
/// If you want to also consider leafs, see [`prev_sibling_or_token`](SyntaxNode::prev_sibling_or_token).
#[inline]
pub fn prev_sibling(&self) -> Option<&SyntaxNode<L, D>> {
pub fn prev_sibling(&self) -> Option<&SyntaxNode<S, D>> {
let (parent, index, _) = self.data().kind.as_child()?;
let (node, (index, offset)) =
@ -724,7 +724,7 @@ impl<L: Language, D> SyntaxNode<L, D> {
/// The tree element to the left of this one, i.e. the previous child of this node's parent before this node.
#[inline]
pub fn prev_sibling_or_token(&self) -> Option<SyntaxElementRef<'_, L, D>> {
pub fn prev_sibling_or_token(&self) -> Option<SyntaxElementRef<'_, S, D>> {
let (parent, index, _) = self.data().kind.as_child()?;
let (element, (index, offset)) = parent
@ -736,13 +736,13 @@ impl<L: Language, D> SyntaxNode<L, D> {
/// Return the leftmost token in the subtree of this node
#[inline]
pub fn first_token(&self) -> Option<&SyntaxToken<L, D>> {
pub fn first_token(&self) -> Option<&SyntaxToken<S, D>> {
self.first_child_or_token()?.first_token()
}
/// Return the rightmost token in the subtree of this node
#[inline]
pub fn last_token(&self) -> Option<&SyntaxToken<L, D>> {
pub fn last_token(&self) -> Option<&SyntaxToken<S, D>> {
self.last_child_or_token()?.last_token()
}
@ -752,7 +752,7 @@ impl<L: Language, D> SyntaxNode<L, D> {
///
/// If you want to also consider leafs, see [`siblings_with_tokens`](SyntaxNode::siblings_with_tokens).
#[inline]
pub fn siblings(&self, direction: Direction) -> impl Iterator<Item = &SyntaxNode<L, D>> {
pub fn siblings(&self, direction: Direction) -> impl Iterator<Item = &SyntaxNode<S, D>> {
iter::successors(Some(self), move |node| match direction {
Direction::Next => node.next_sibling(),
Direction::Prev => node.prev_sibling(),
@ -763,8 +763,8 @@ impl<L: Language, D> SyntaxNode<L, D> {
/// node's parent's children from this node on to the left or the right.
/// The first item in the iterator will always be this node.
#[inline]
pub fn siblings_with_tokens(&self, direction: Direction) -> impl Iterator<Item = SyntaxElementRef<'_, L, D>> {
let me: SyntaxElementRef<'_, L, D> = self.into();
pub fn siblings_with_tokens(&self, direction: Direction) -> impl Iterator<Item = SyntaxElementRef<'_, S, D>> {
let me: SyntaxElementRef<'_, S, D> = self.into();
iter::successors(Some(me), move |el| match direction {
Direction::Next => el.next_sibling_or_token(),
Direction::Prev => el.prev_sibling_or_token(),
@ -775,7 +775,7 @@ impl<L: Language, D> SyntaxNode<L, D> {
///
/// If you want to also consider leafs, see [`descendants_with_tokens`](SyntaxNode::descendants_with_tokens).
#[inline]
pub fn descendants(&self) -> impl Iterator<Item = &SyntaxNode<L, D>> {
pub fn descendants(&self) -> impl Iterator<Item = &SyntaxNode<S, D>> {
self.preorder().filter_map(|event| match event {
WalkEvent::Enter(node) => Some(node),
WalkEvent::Leave(_) => None,
@ -784,7 +784,7 @@ impl<L: Language, D> SyntaxNode<L, D> {
/// Returns an iterator over all elements in the subtree starting at this node, including this node.
#[inline]
pub fn descendants_with_tokens(&self) -> impl Iterator<Item = SyntaxElementRef<'_, L, D>> {
pub fn descendants_with_tokens(&self) -> impl Iterator<Item = SyntaxElementRef<'_, S, D>> {
self.preorder_with_tokens().filter_map(|event| match event {
WalkEvent::Enter(it) => Some(it),
WalkEvent::Leave(_) => None,
@ -794,7 +794,7 @@ impl<L: Language, D> SyntaxNode<L, D> {
/// Traverse the subtree rooted at the current node (including the current
/// node) in preorder, excluding tokens.
#[inline(always)]
pub fn preorder(&self) -> impl Iterator<Item = WalkEvent<&SyntaxNode<L, D>>> {
pub fn preorder(&self) -> impl Iterator<Item = WalkEvent<&SyntaxNode<S, D>>> {
iter::successors(Some(WalkEvent::Enter(self)), move |pos| {
let next = match pos {
WalkEvent::Enter(node) => match node.first_child() {
@ -818,7 +818,7 @@ impl<L: Language, D> SyntaxNode<L, D> {
/// Traverse the subtree rooted at the current node (including the current
/// node) in preorder, including tokens.
#[inline(always)]
pub fn preorder_with_tokens(&self) -> impl Iterator<Item = WalkEvent<SyntaxElementRef<'_, L, D>>> {
pub fn preorder_with_tokens(&self) -> impl Iterator<Item = WalkEvent<SyntaxElementRef<'_, S, D>>> {
let me = self.into();
iter::successors(Some(WalkEvent::Enter(me)), move |pos| {
let next = match pos {
@ -845,7 +845,7 @@ impl<L: Language, D> SyntaxNode<L, D> {
/// Find a token in the subtree corresponding to this node, which covers the offset.
/// Precondition: offset must be withing node's range.
pub fn token_at_offset(&self, offset: TextSize) -> TokenAtOffset<SyntaxToken<L, D>> {
pub fn token_at_offset(&self, offset: TextSize) -> TokenAtOffset<SyntaxToken<S, D>> {
// TODO: this could be faster if we first drill-down to node, and only
// then switch to token search. We should also replace explicit
// recursion with a loop.
@ -883,8 +883,8 @@ impl<L: Language, D> SyntaxNode<L, D> {
/// contains the range. If the range is empty and is contained in two leaf
/// nodes, either one can be returned. Precondition: range must be contained
/// withing the current node
pub fn covering_element(&self, range: TextRange) -> SyntaxElementRef<'_, L, D> {
let mut res: SyntaxElementRef<'_, L, D> = self.into();
pub fn covering_element(&self, range: TextRange) -> SyntaxElementRef<'_, S, D> {
let mut res: SyntaxElementRef<'_, S, D> = self.into();
loop {
assert!(
res.text_range().contains_range(range),
@ -909,9 +909,9 @@ impl<L: Language, D> SyntaxNode<L, D> {
}
#[cfg(feature = "serialize")]
impl<L, D> SyntaxNode<L, D>
impl<S, D> SyntaxNode<S, D>
where
L: Language,
S: Syntax,
{
/// Return an anonymous object that can be used to serialize this node,
/// including the data and by using an external resolver.

View file

@ -17,7 +17,7 @@ use crate::{
syntax::*,
traversal::*,
util::*,
Language, RawSyntaxKind,
RawSyntaxKind, Syntax,
};
/// Syntax tree node that is guaranteed to belong to a tree that contains an associated
@ -26,24 +26,24 @@ use crate::{
/// [`SyntaxNode`]
/// [`SyntaxNode::new_root_with_resolver`]
#[repr(transparent)]
pub struct ResolvedNode<L: Language, D: 'static = ()> {
pub(super) syntax: SyntaxNode<L, D>,
pub struct ResolvedNode<S: Syntax, D: 'static = ()> {
pub(super) syntax: SyntaxNode<S, D>,
}
impl<L: Language, D> ResolvedNode<L, D> {
impl<S: Syntax, D> ResolvedNode<S, D> {
/// # Safety:
/// `syntax` must belong to a tree that contains an associated inline resolver.
pub(super) unsafe fn coerce_ref(syntax: &SyntaxNode<L, D>) -> &Self {
pub(super) unsafe fn coerce_ref(syntax: &SyntaxNode<S, D>) -> &Self {
&*(syntax as *const _ as *const Self)
}
/// Returns this node as a [`SyntaxNode`].
pub fn syntax(&self) -> &SyntaxNode<L, D> {
pub fn syntax(&self) -> &SyntaxNode<S, D> {
&self.syntax
}
}
impl<L: Language, D> Clone for ResolvedNode<L, D> {
impl<S: Syntax, D> Clone for ResolvedNode<S, D> {
fn clone(&self) -> Self {
Self {
syntax: self.syntax.clone(),
@ -51,15 +51,15 @@ impl<L: Language, D> Clone for ResolvedNode<L, D> {
}
}
impl<L: Language, D> Deref for ResolvedNode<L, D> {
type Target = SyntaxNode<L, D>;
impl<S: Syntax, D> Deref for ResolvedNode<S, D> {
type Target = SyntaxNode<S, D>;
fn deref(&self) -> &Self::Target {
&self.syntax
}
}
impl<L: Language, D> DerefMut for ResolvedNode<L, D> {
impl<S: Syntax, D> DerefMut for ResolvedNode<S, D> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.syntax
}
@ -70,24 +70,24 @@ impl<L: Language, D> DerefMut for ResolvedNode<L, D> {
/// # See also
/// [`SyntaxToken`]
#[repr(transparent)]
pub struct ResolvedToken<L: Language, D: 'static = ()> {
syntax: SyntaxToken<L, D>,
pub struct ResolvedToken<S: Syntax, D: 'static = ()> {
syntax: SyntaxToken<S, D>,
}
impl<L: Language, D> ResolvedToken<L, D> {
impl<S: Syntax, D> ResolvedToken<S, D> {
/// # Safety:
/// `syntax` must belong to a tree that contains an associated inline resolver.
pub(super) unsafe fn coerce_ref(syntax: &SyntaxToken<L, D>) -> &Self {
pub(super) unsafe fn coerce_ref(syntax: &SyntaxToken<S, D>) -> &Self {
&*(syntax as *const _ as *const Self)
}
/// Returns this token as a [`SyntaxToken`].
pub fn syntax(&self) -> &SyntaxToken<L, D> {
pub fn syntax(&self) -> &SyntaxToken<S, D> {
&self.syntax
}
}
impl<L: Language, D> Clone for ResolvedToken<L, D> {
impl<S: Syntax, D> Clone for ResolvedToken<S, D> {
fn clone(&self) -> Self {
Self {
syntax: self.syntax.clone(),
@ -95,15 +95,15 @@ impl<L: Language, D> Clone for ResolvedToken<L, D> {
}
}
impl<L: Language, D> Deref for ResolvedToken<L, D> {
type Target = SyntaxToken<L, D>;
impl<S: Syntax, D> Deref for ResolvedToken<S, D> {
type Target = SyntaxToken<S, D>;
fn deref(&self) -> &Self::Target {
&self.syntax
}
}
impl<L: Language, D> DerefMut for ResolvedToken<L, D> {
impl<S: Syntax, D> DerefMut for ResolvedToken<S, D> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.syntax
}
@ -113,21 +113,21 @@ impl<L: Language, D> DerefMut for ResolvedToken<L, D> {
/// [`Resolver`](lasso::Resolver), can be either a node or a token.
/// # See also
/// [`SyntaxElement`](crate::syntax::SyntaxElement)
pub type ResolvedElement<L, D = ()> = NodeOrToken<ResolvedNode<L, D>, ResolvedToken<L, D>>;
pub type ResolvedElement<S, D = ()> = NodeOrToken<ResolvedNode<S, D>, ResolvedToken<S, D>>;
impl<L: Language, D> From<ResolvedNode<L, D>> for ResolvedElement<L, D> {
fn from(node: ResolvedNode<L, D>) -> ResolvedElement<L, D> {
impl<S: Syntax, D> From<ResolvedNode<S, D>> for ResolvedElement<S, D> {
fn from(node: ResolvedNode<S, D>) -> ResolvedElement<S, D> {
NodeOrToken::Node(node)
}
}
impl<L: Language, D> From<ResolvedToken<L, D>> for ResolvedElement<L, D> {
fn from(token: ResolvedToken<L, D>) -> ResolvedElement<L, D> {
impl<S: Syntax, D> From<ResolvedToken<S, D>> for ResolvedElement<S, D> {
fn from(token: ResolvedToken<S, D>) -> ResolvedElement<S, D> {
NodeOrToken::Token(token)
}
}
impl<L: Language, D> ResolvedElement<L, D> {
impl<S: Syntax, D> ResolvedElement<S, D> {
#[allow(missing_docs)]
pub fn display(&self, resolver: &impl Resolver<TokenKey>) -> String {
match self {
@ -141,12 +141,12 @@ impl<L: Language, D> ResolvedElement<L, D> {
/// associated [`Resolver`](lasso::Resolver), can be either a reference to a node or one to a token.
/// # See also
/// [`SyntaxElementRef`]
pub type ResolvedElementRef<'a, L, D = ()> = NodeOrToken<&'a ResolvedNode<L, D>, &'a ResolvedToken<L, D>>;
pub type ResolvedElementRef<'a, S, D = ()> = NodeOrToken<&'a ResolvedNode<S, D>, &'a ResolvedToken<S, D>>;
impl<'a, L: Language, D> ResolvedElementRef<'a, L, D> {
impl<'a, S: Syntax, D> ResolvedElementRef<'a, S, D> {
/// # Safety:
/// `syntax` must belong to a tree that contains an associated inline resolver.
pub(super) unsafe fn coerce_ref(syntax: SyntaxElementRef<'a, L, D>) -> Self {
pub(super) unsafe fn coerce_ref(syntax: SyntaxElementRef<'a, S, D>) -> Self {
match syntax {
NodeOrToken::Node(node) => Self::Node(ResolvedNode::coerce_ref(node)),
NodeOrToken::Token(token) => Self::Token(ResolvedToken::coerce_ref(token)),
@ -154,20 +154,20 @@ impl<'a, L: Language, D> ResolvedElementRef<'a, L, D> {
}
}
impl<'a, L: Language, D> From<&'a ResolvedNode<L, D>> for ResolvedElementRef<'a, L, D> {
fn from(node: &'a ResolvedNode<L, D>) -> Self {
impl<'a, S: Syntax, D> From<&'a ResolvedNode<S, D>> for ResolvedElementRef<'a, S, D> {
fn from(node: &'a ResolvedNode<S, D>) -> Self {
NodeOrToken::Node(node)
}
}
impl<'a, L: Language, D> From<&'a ResolvedToken<L, D>> for ResolvedElementRef<'a, L, D> {
fn from(token: &'a ResolvedToken<L, D>) -> Self {
impl<'a, S: Syntax, D> From<&'a ResolvedToken<S, D>> for ResolvedElementRef<'a, S, D> {
fn from(token: &'a ResolvedToken<S, D>) -> Self {
NodeOrToken::Token(token)
}
}
impl<'a, L: Language, D> From<&'a ResolvedElement<L, D>> for ResolvedElementRef<'a, L, D> {
fn from(element: &'a ResolvedElement<L, D>) -> Self {
impl<'a, S: Syntax, D> From<&'a ResolvedElement<S, D>> for ResolvedElementRef<'a, S, D> {
fn from(element: &'a ResolvedElement<S, D>) -> Self {
match element {
NodeOrToken::Node(it) => Self::Node(it),
NodeOrToken::Token(it) => Self::Token(it),
@ -175,29 +175,29 @@ impl<'a, L: Language, D> From<&'a ResolvedElement<L, D>> for ResolvedElementRef<
}
}
impl<L: Language, D> ResolvedNode<L, D> {
impl<S: Syntax, D> ResolvedNode<S, D> {
/// Uses the resolver associated with this tree to return an efficient representation of all
/// source text covered by this node, i.e. the combined text of all token leafs of the subtree
/// originating in this node.
#[inline]
pub fn text(&self) -> SyntaxText<'_, '_, dyn Resolver<TokenKey>, L, D> {
pub fn text(&self) -> SyntaxText<'_, '_, dyn Resolver<TokenKey>, S, D> {
SyntaxText::new(self, &**self.resolver())
}
}
impl<L: Language, D> fmt::Debug for ResolvedNode<L, D> {
impl<S: Syntax, D> fmt::Debug for ResolvedNode<S, D> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.write_debug(&**self.resolver(), f, f.alternate())
}
}
impl<L: Language, D> fmt::Display for ResolvedNode<L, D> {
impl<S: Syntax, D> fmt::Display for ResolvedNode<S, D> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.write_display(&**self.resolver(), f)
}
}
impl<L: Language, D> ResolvedToken<L, D> {
impl<S: Syntax, D> ResolvedToken<S, D> {
/// Uses the resolver associated with this tree to return the source text of this token.
#[inline]
pub fn text(&self) -> &str {
@ -208,22 +208,22 @@ impl<L: Language, D> ResolvedToken<L, D> {
}
}
impl<L: Language, D> fmt::Debug for ResolvedToken<L, D> {
impl<S: Syntax, D> fmt::Debug for ResolvedToken<S, D> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.write_debug(&**self.resolver(), f)
}
}
impl<L: Language, D> fmt::Display for ResolvedToken<L, D> {
impl<S: Syntax, D> fmt::Display for ResolvedToken<S, D> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.write_display(&**self.resolver(), f)
}
}
#[cfg(feature = "serialize")]
impl<L, D> ResolvedNode<L, D>
impl<S, D> ResolvedNode<S, D>
where
L: Language,
S: Syntax,
{
/// Return an anonymous object that can be used to serialize this node,
/// including the data for each node.
@ -267,7 +267,7 @@ macro_rules! forward_node {
};
}
impl<L: Language, D> ResolvedNode<L, D> {
impl<S: Syntax, D> ResolvedNode<S, D> {
/// Returns the [`Resolver`] associated with this tree.
pub fn resolver(&self) -> &StdArc<dyn Resolver<TokenKey>> {
self.syntax.resolver().unwrap()
@ -283,7 +283,7 @@ impl<L: Language, D> ResolvedNode<L, D> {
///
/// This method mostly exists to allow the convenience of being agnostic over [`SyntaxNode`] vs [`ResolvedNode`].
#[inline]
pub fn try_resolved(&self) -> Option<&ResolvedNode<L, D>> {
pub fn try_resolved(&self) -> Option<&ResolvedNode<S, D>> {
Some(self)
}
@ -291,7 +291,7 @@ impl<L: Language, D> ResolvedNode<L, D> {
///
/// This method mostly exists to allow the convenience of being agnostic over [`SyntaxNode`] vs [`ResolvedNode`].
#[inline]
pub fn resolved(&self) -> &ResolvedNode<L, D> {
pub fn resolved(&self) -> &ResolvedNode<S, D> {
self
}
@ -299,7 +299,7 @@ impl<L: Language, D> ResolvedNode<L, D> {
///
/// If this node is the root, returns `self`.
#[inline]
pub fn root(&self) -> &SyntaxNode<L, D> {
pub fn root(&self) -> &SyntaxNode<S, D> {
unsafe { Self::coerce_ref(self.syntax.root()) }
}
@ -325,7 +325,7 @@ impl<L: Language, D> ResolvedNode<L, D> {
/// Returns an iterator over child elements of this node, including tokens.
#[inline]
pub fn children_with_tokens(&self) -> impl Iterator<Item = ResolvedElementRef<'_, L, D>> {
pub fn children_with_tokens(&self) -> impl Iterator<Item = ResolvedElementRef<'_, S, D>> {
forward_as_elem!(self.syntax.children_with_tokens())
}
@ -333,13 +333,13 @@ impl<L: Language, D> ResolvedNode<L, D> {
///
/// If you want to also consider leafs, see [`first_child_or_token`](ResolvedNode::first_child_or_token).
#[inline]
pub fn first_child(&self) -> Option<&ResolvedNode<L, D>> {
pub fn first_child(&self) -> Option<&ResolvedNode<S, D>> {
forward!(self.syntax.first_child())
}
/// The first child element of this node, if any, including tokens.
#[inline]
pub fn first_child_or_token(&self) -> Option<ResolvedElementRef<'_, L, D>> {
pub fn first_child_or_token(&self) -> Option<ResolvedElementRef<'_, S, D>> {
forward_as_elem!(self.syntax.first_child_or_token())
}
@ -347,13 +347,13 @@ impl<L: Language, D> ResolvedNode<L, D> {
///
/// If you want to also consider leafs, see [`last_child_or_token`](ResolvedNode::last_child_or_token).
#[inline]
pub fn last_child(&self) -> Option<&ResolvedNode<L, D>> {
pub fn last_child(&self) -> Option<&ResolvedNode<S, D>> {
forward!(self.syntax.last_child())
}
/// The last child element of this node, if any, including tokens.
#[inline]
pub fn last_child_or_token(&self) -> Option<ResolvedElementRef<'_, L, D>> {
pub fn last_child_or_token(&self) -> Option<ResolvedElementRef<'_, S, D>> {
forward_as_elem!(self.syntax.last_child_or_token())
}
@ -363,14 +363,14 @@ impl<L: Language, D> ResolvedNode<L, D> {
///
/// If you want to also consider leafs, see [`next_child_or_token_after`](ResolvedNode::next_child_or_token_after).
#[inline]
pub fn next_child_after(&self, n: usize, offset: TextSize) -> Option<&ResolvedNode<L, D>> {
pub fn next_child_after(&self, n: usize, offset: TextSize) -> Option<&ResolvedNode<S, D>> {
forward!(self.syntax.next_child_after(n, offset))
}
/// The first child element of this node starting at the (n + 1)-st, if any.
/// If this method returns `Some`, the contained node is the (n + 1)-st child of this node.
#[inline]
pub fn next_child_or_token_after(&self, n: usize, offset: TextSize) -> Option<ResolvedElementRef<'_, L, D>> {
pub fn next_child_or_token_after(&self, n: usize, offset: TextSize) -> Option<ResolvedElementRef<'_, S, D>> {
forward_as_elem!(self.syntax.next_child_or_token_after(n, offset))
}
@ -381,14 +381,14 @@ impl<L: Language, D> ResolvedNode<L, D> {
/// If you want to also consider leafs, see
/// [`prev_child_or_token_before`](ResolvedNode::prev_child_or_token_before).
#[inline]
pub fn prev_child_before(&self, n: usize, offset: TextSize) -> Option<&ResolvedNode<L, D>> {
pub fn prev_child_before(&self, n: usize, offset: TextSize) -> Option<&ResolvedNode<S, D>> {
forward!(self.syntax.prev_child_before(n, offset))
}
/// The last child node of this node up to the nth, if any.
/// If this method returns `Some`, the contained node is the (n - 1)-st child.
#[inline]
pub fn prev_child_or_token_before(&self, n: usize, offset: TextSize) -> Option<ResolvedElementRef<'_, L, D>> {
pub fn prev_child_or_token_before(&self, n: usize, offset: TextSize) -> Option<ResolvedElementRef<'_, S, D>> {
forward_as_elem!(self.syntax.prev_child_or_token_before(n, offset))
}
@ -396,13 +396,13 @@ impl<L: Language, D> ResolvedNode<L, D> {
///
/// If you want to also consider leafs, see [`next_sibling_or_token`](ResolvedNode::next_sibling_or_token).
#[inline]
pub fn next_sibling(&self) -> Option<&ResolvedNode<L, D>> {
pub fn next_sibling(&self) -> Option<&ResolvedNode<S, D>> {
forward!(self.syntax.next_sibling())
}
/// The tree element to the right of this one, i.e. the next child of this node's parent after this node.
#[inline]
pub fn next_sibling_or_token(&self) -> Option<ResolvedElementRef<'_, L, D>> {
pub fn next_sibling_or_token(&self) -> Option<ResolvedElementRef<'_, S, D>> {
forward_as_elem!(self.syntax.next_sibling_or_token())
}
@ -410,25 +410,25 @@ impl<L: Language, D> ResolvedNode<L, D> {
///
/// If you want to also consider leafs, see [`prev_sibling_or_token`](ResolvedNode::prev_sibling_or_token).
#[inline]
pub fn prev_sibling(&self) -> Option<&ResolvedNode<L, D>> {
pub fn prev_sibling(&self) -> Option<&ResolvedNode<S, D>> {
forward!(self.syntax.prev_sibling())
}
/// The tree element to the left of this one, i.e. the previous child of this node's parent before this node.
#[inline]
pub fn prev_sibling_or_token(&self) -> Option<ResolvedElementRef<'_, L, D>> {
pub fn prev_sibling_or_token(&self) -> Option<ResolvedElementRef<'_, S, D>> {
forward_as_elem!(self.syntax.prev_sibling_or_token())
}
/// Return the leftmost token in the subtree of this node
#[inline]
pub fn first_token(&self) -> Option<&ResolvedToken<L, D>> {
pub fn first_token(&self) -> Option<&ResolvedToken<S, D>> {
forward_token!(self.syntax.first_token())
}
/// Return the rightmost token in the subtree of this node
#[inline]
pub fn last_token(&self) -> Option<&ResolvedToken<L, D>> {
pub fn last_token(&self) -> Option<&ResolvedToken<S, D>> {
forward_token!(self.syntax.last_token())
}
@ -438,7 +438,7 @@ impl<L: Language, D> ResolvedNode<L, D> {
///
/// If you want to also consider leafs, see [`siblings_with_tokens`](ResolvedNode::siblings_with_tokens).
#[inline]
pub fn siblings(&self, direction: Direction) -> impl Iterator<Item = &ResolvedNode<L, D>> {
pub fn siblings(&self, direction: Direction) -> impl Iterator<Item = &ResolvedNode<S, D>> {
forward!(self.syntax.siblings(direction))
}
@ -446,7 +446,7 @@ impl<L: Language, D> ResolvedNode<L, D> {
/// node's parent's children from this node on to the left or the right.
/// The first item in the iterator will always be this node.
#[inline]
pub fn siblings_with_tokens(&self, direction: Direction) -> impl Iterator<Item = ResolvedElementRef<'_, L, D>> {
pub fn siblings_with_tokens(&self, direction: Direction) -> impl Iterator<Item = ResolvedElementRef<'_, S, D>> {
forward_as_elem!(self.syntax.siblings_with_tokens(direction))
}
@ -454,20 +454,20 @@ impl<L: Language, D> ResolvedNode<L, D> {
///
/// If you want to also consider leafs, see [`descendants_with_tokens`](ResolvedNode::descendants_with_tokens).
#[inline]
pub fn descendants(&self) -> impl Iterator<Item = &ResolvedNode<L, D>> {
pub fn descendants(&self) -> impl Iterator<Item = &ResolvedNode<S, D>> {
forward!(self.syntax.descendants())
}
/// Returns an iterator over all elements in the subtree starting at this node, including this node.
#[inline]
pub fn descendants_with_tokens(&self) -> impl Iterator<Item = ResolvedElementRef<'_, L, D>> {
pub fn descendants_with_tokens(&self) -> impl Iterator<Item = ResolvedElementRef<'_, S, D>> {
forward_as_elem!(self.syntax.descendants_with_tokens())
}
/// Traverse the subtree rooted at the current node (including the current
/// node) in preorder, excluding tokens.
#[inline(always)]
pub fn preorder(&self) -> impl Iterator<Item = WalkEvent<&ResolvedNode<L, D>>> {
pub fn preorder(&self) -> impl Iterator<Item = WalkEvent<&ResolvedNode<S, D>>> {
self.syntax
.preorder()
.map(|event| event.map(|node| unsafe { Self::coerce_ref(node) }))
@ -476,7 +476,7 @@ impl<L: Language, D> ResolvedNode<L, D> {
/// Traverse the subtree rooted at the current node (including the current
/// node) in preorder, including tokens.
#[inline(always)]
pub fn preorder_with_tokens(&self) -> impl Iterator<Item = WalkEvent<ResolvedElementRef<'_, L, D>>> {
pub fn preorder_with_tokens(&self) -> impl Iterator<Item = WalkEvent<ResolvedElementRef<'_, S, D>>> {
self.syntax
.preorder_with_tokens()
.map(|event| event.map(|elem| unsafe { ResolvedElementRef::coerce_ref(elem) }))
@ -484,7 +484,7 @@ impl<L: Language, D> ResolvedNode<L, D> {
/// Find a token in the subtree corresponding to this node, which covers the offset.
/// Precondition: offset must be withing node's range.
pub fn token_at_offset(&self, offset: TextSize) -> TokenAtOffset<ResolvedToken<L, D>> {
pub fn token_at_offset(&self, offset: TextSize) -> TokenAtOffset<ResolvedToken<S, D>> {
self.syntax
.token_at_offset(offset)
.map(|token| ResolvedToken { syntax: token })
@ -494,12 +494,12 @@ impl<L: Language, D> ResolvedNode<L, D> {
/// contains the range. If the range is empty and is contained in two leaf
/// nodes, either one can be returned. Precondition: range must be contained
/// withing the current node
pub fn covering_element(&self, range: TextRange) -> ResolvedElementRef<'_, L, D> {
pub fn covering_element(&self, range: TextRange) -> ResolvedElementRef<'_, S, D> {
unsafe { ResolvedElementRef::coerce_ref(self.syntax.covering_element(range)) }
}
}
impl<L: Language, D> ResolvedToken<L, D> {
impl<S: Syntax, D> ResolvedToken<S, D> {
/// Returns the [`Resolver`] associated with this tree.
pub fn resolver(&self) -> &StdArc<dyn Resolver<TokenKey>> {
self.syntax.resolver().unwrap()
@ -509,7 +509,7 @@ impl<L: Language, D> ResolvedToken<L, D> {
///
/// This method mostly exists to allow the convenience of being agnostic over [`SyntaxToken`] vs [`ResolvedToken`].
#[inline]
pub fn try_resolved(&self) -> Option<&ResolvedToken<L, D>> {
pub fn try_resolved(&self) -> Option<&ResolvedToken<S, D>> {
Some(self)
}
@ -517,31 +517,31 @@ impl<L: Language, D> ResolvedToken<L, D> {
///
/// This method mostly exists to allow the convenience of being agnostic over [`SyntaxToken`] vs [`ResolvedToken`].
#[inline]
pub fn resolved(&self) -> &ResolvedToken<L, D> {
pub fn resolved(&self) -> &ResolvedToken<S, D> {
self
}
/// The parent node of this token.
#[inline]
pub fn parent(&self) -> &ResolvedNode<L, D> {
pub fn parent(&self) -> &ResolvedNode<S, D> {
unsafe { ResolvedNode::coerce_ref(self.syntax.parent()) }
}
/// Returns an iterator along the chain of parents of this token.
#[inline]
pub fn ancestors(&self) -> impl Iterator<Item = &ResolvedNode<L, D>> {
pub fn ancestors(&self) -> impl Iterator<Item = &ResolvedNode<S, D>> {
forward_node!(self.syntax.ancestors())
}
/// The tree element to the right of this one, i.e. the next child of this token's parent after this token.
#[inline]
pub fn next_sibling_or_token(&self) -> Option<ResolvedElementRef<'_, L, D>> {
pub fn next_sibling_or_token(&self) -> Option<ResolvedElementRef<'_, S, D>> {
forward_as_elem!(self.syntax.next_sibling_or_token())
}
/// The tree element to the left of this one, i.e. the previous child of this token's parent after this token.
#[inline]
pub fn prev_sibling_or_token(&self) -> Option<ResolvedElementRef<'_, L, D>> {
pub fn prev_sibling_or_token(&self) -> Option<ResolvedElementRef<'_, S, D>> {
forward_as_elem!(self.syntax.prev_sibling_or_token())
}
@ -549,24 +549,24 @@ impl<L: Language, D> ResolvedToken<L, D> {
/// token's parent's children from this token on to the left or the right.
/// The first item in the iterator will always be this token.
#[inline]
pub fn siblings_with_tokens(&self, direction: Direction) -> impl Iterator<Item = ResolvedElementRef<'_, L, D>> {
pub fn siblings_with_tokens(&self, direction: Direction) -> impl Iterator<Item = ResolvedElementRef<'_, S, D>> {
forward_as_elem!(self.syntax.siblings_with_tokens(direction))
}
/// Returns the next token in the tree.
/// This is not necessary a direct sibling of this token, but will always be further right in the tree.
pub fn next_token(&self) -> Option<&ResolvedToken<L, D>> {
pub fn next_token(&self) -> Option<&ResolvedToken<S, D>> {
forward!(self.syntax.next_token())
}
/// Returns the previous token in the tree.
/// This is not necessary a direct sibling of this token, but will always be further left in the tree.
pub fn prev_token(&self) -> Option<&ResolvedToken<L, D>> {
pub fn prev_token(&self) -> Option<&ResolvedToken<S, D>> {
forward!(self.syntax.prev_token())
}
}
impl<L: Language, D> ResolvedElement<L, D> {
impl<S: Syntax, D> ResolvedElement<S, D> {
/// The range this element covers in the source text, in bytes.
#[inline]
pub fn text_range(&self) -> TextRange {
@ -587,7 +587,7 @@ impl<L: Language, D> ResolvedElement<L, D> {
/// The kind of this element in terms of your language.
#[inline]
pub fn kind(&self) -> L::Kind {
pub fn kind(&self) -> S {
match self {
NodeOrToken::Node(it) => it.kind(),
NodeOrToken::Token(it) => it.kind(),
@ -596,7 +596,7 @@ impl<L: Language, D> ResolvedElement<L, D> {
/// The parent node of this element, except if this element is the root.
#[inline]
pub fn parent(&self) -> Option<&ResolvedNode<L, D>> {
pub fn parent(&self) -> Option<&ResolvedNode<S, D>> {
match self {
NodeOrToken::Node(it) => it.parent(),
NodeOrToken::Token(it) => Some(it.parent()),
@ -605,7 +605,7 @@ impl<L: Language, D> ResolvedElement<L, D> {
/// Returns an iterator along the chain of parents of this node.
#[inline]
pub fn ancestors(&self) -> impl Iterator<Item = &ResolvedNode<L, D>> {
pub fn ancestors(&self) -> impl Iterator<Item = &ResolvedNode<S, D>> {
match self {
NodeOrToken::Node(it) => it.ancestors(),
NodeOrToken::Token(it) => it.parent().ancestors(),
@ -614,7 +614,7 @@ impl<L: Language, D> ResolvedElement<L, D> {
/// Return the leftmost token in the subtree of this element.
#[inline]
pub fn first_token(&self) -> Option<&ResolvedToken<L, D>> {
pub fn first_token(&self) -> Option<&ResolvedToken<S, D>> {
match self {
NodeOrToken::Node(it) => it.first_token(),
NodeOrToken::Token(it) => Some(it),
@ -623,7 +623,7 @@ impl<L: Language, D> ResolvedElement<L, D> {
/// Return the rightmost token in the subtree of this element.
#[inline]
pub fn last_token(&self) -> Option<&ResolvedToken<L, D>> {
pub fn last_token(&self) -> Option<&ResolvedToken<S, D>> {
match self {
NodeOrToken::Node(it) => it.last_token(),
NodeOrToken::Token(it) => Some(it),
@ -632,7 +632,7 @@ impl<L: Language, D> ResolvedElement<L, D> {
/// The tree element to the right of this one, i.e. the next child of this element's parent after this element.
#[inline]
pub fn next_sibling_or_token(&self) -> Option<ResolvedElementRef<'_, L, D>> {
pub fn next_sibling_or_token(&self) -> Option<ResolvedElementRef<'_, S, D>> {
match self {
NodeOrToken::Node(it) => it.next_sibling_or_token(),
NodeOrToken::Token(it) => it.next_sibling_or_token(),
@ -641,7 +641,7 @@ impl<L: Language, D> ResolvedElement<L, D> {
/// The tree element to the left of this one, i.e. the previous child of this element's parent after this element.
#[inline]
pub fn prev_sibling_or_token(&self) -> Option<ResolvedElementRef<'_, L, D>> {
pub fn prev_sibling_or_token(&self) -> Option<ResolvedElementRef<'_, S, D>> {
match self {
NodeOrToken::Node(it) => it.prev_sibling_or_token(),
NodeOrToken::Token(it) => it.prev_sibling_or_token(),
@ -649,7 +649,7 @@ impl<L: Language, D> ResolvedElement<L, D> {
}
}
impl<'a, L: Language, D> ResolvedElementRef<'a, L, D> {
impl<'a, S: Syntax, D> ResolvedElementRef<'a, S, D> {
/// The range this element covers in the source text, in bytes.
#[inline]
pub fn text_range(&self) -> TextRange {
@ -670,7 +670,7 @@ impl<'a, L: Language, D> ResolvedElementRef<'a, L, D> {
/// The kind of this element in terms of your language.
#[inline]
pub fn kind(&self) -> L::Kind {
pub fn kind(&self) -> S {
match self {
NodeOrToken::Node(it) => it.kind(),
NodeOrToken::Token(it) => it.kind(),
@ -679,7 +679,7 @@ impl<'a, L: Language, D> ResolvedElementRef<'a, L, D> {
/// The parent node of this element, except if this element is the root.
#[inline]
pub fn parent(&self) -> Option<&'a ResolvedNode<L, D>> {
pub fn parent(&self) -> Option<&'a ResolvedNode<S, D>> {
match self {
NodeOrToken::Node(it) => it.parent(),
NodeOrToken::Token(it) => Some(it.parent()),
@ -688,7 +688,7 @@ impl<'a, L: Language, D> ResolvedElementRef<'a, L, D> {
/// Returns an iterator along the chain of parents of this node.
#[inline]
pub fn ancestors(&self) -> impl Iterator<Item = &'a ResolvedNode<L, D>> {
pub fn ancestors(&self) -> impl Iterator<Item = &'a ResolvedNode<S, D>> {
match self {
NodeOrToken::Node(it) => it.ancestors(),
NodeOrToken::Token(it) => it.parent().ancestors(),
@ -697,7 +697,7 @@ impl<'a, L: Language, D> ResolvedElementRef<'a, L, D> {
/// Return the leftmost token in the subtree of this element.
#[inline]
pub fn first_token(&self) -> Option<&'a ResolvedToken<L, D>> {
pub fn first_token(&self) -> Option<&'a ResolvedToken<S, D>> {
match self {
NodeOrToken::Node(it) => it.first_token(),
NodeOrToken::Token(it) => Some(it),
@ -706,7 +706,7 @@ impl<'a, L: Language, D> ResolvedElementRef<'a, L, D> {
/// Return the rightmost token in the subtree of this element.
#[inline]
pub fn last_token(&self) -> Option<&'a ResolvedToken<L, D>> {
pub fn last_token(&self) -> Option<&'a ResolvedToken<S, D>> {
match self {
NodeOrToken::Node(it) => it.last_token(),
NodeOrToken::Token(it) => Some(it),
@ -715,7 +715,7 @@ impl<'a, L: Language, D> ResolvedElementRef<'a, L, D> {
/// The tree element to the right of this one, i.e. the next child of this element's parent after this element.
#[inline]
pub fn next_sibling_or_token(&self) -> Option<ResolvedElementRef<'a, L, D>> {
pub fn next_sibling_or_token(&self) -> Option<ResolvedElementRef<'a, S, D>> {
match self {
NodeOrToken::Node(it) => it.next_sibling_or_token(),
NodeOrToken::Token(it) => it.next_sibling_or_token(),
@ -724,7 +724,7 @@ impl<'a, L: Language, D> ResolvedElementRef<'a, L, D> {
/// The tree element to the left of this one, i.e. the previous child of this element's parent after this element.
#[inline]
pub fn prev_sibling_or_token(&self) -> Option<ResolvedElementRef<'a, L, D>> {
pub fn prev_sibling_or_token(&self) -> Option<ResolvedElementRef<'a, S, D>> {
match self {
NodeOrToken::Node(it) => it.prev_sibling_or_token(),
NodeOrToken::Token(it) => it.prev_sibling_or_token(),

View file

@ -6,7 +6,7 @@ use crate::{
interning::{Resolver, TokenKey},
syntax::{SyntaxNode, SyntaxToken},
text::{TextRange, TextSize},
Language,
Syntax,
};
/// An efficient representation of the text that is covered by a [`SyntaxNode`], i.e. the combined
@ -21,9 +21,9 @@ use crate::{
/// # use cstree::testing::*;
/// # use cstree::syntax::ResolvedNode;
/// #
/// fn parse_float_literal(s: &str) -> ResolvedNode<MyLanguage> {
/// fn parse_float_literal(s: &str) -> ResolvedNode<MySyntax> {
/// // parsing...
/// # let mut builder: GreenNodeBuilder<MyLanguage> = GreenNodeBuilder::new();
/// # let mut builder: GreenNodeBuilder<MySyntax> = GreenNodeBuilder::new();
/// # builder.start_node(Float);
/// # builder.token(Float, s);
/// # builder.finish_node();
@ -41,14 +41,14 @@ use crate::{
/// assert_eq!(sub, "748");
/// ```
#[derive(Clone)]
pub struct SyntaxText<'n, 'i, I: ?Sized, L: Language, D: 'static = ()> {
node: &'n SyntaxNode<L, D>,
pub struct SyntaxText<'n, 'i, I: ?Sized, S: Syntax, D: 'static = ()> {
node: &'n SyntaxNode<S, D>,
range: TextRange,
resolver: &'i I,
}
impl<'n, 'i, I: Resolver<TokenKey> + ?Sized, L: Language, D> SyntaxText<'n, 'i, I, L, D> {
pub(crate) fn new(node: &'n SyntaxNode<L, D>, resolver: &'i I) -> Self {
impl<'n, 'i, I: Resolver<TokenKey> + ?Sized, S: Syntax, D> SyntaxText<'n, 'i, I, S, D> {
pub(crate) fn new(node: &'n SyntaxNode<S, D>, resolver: &'i I) -> Self {
let range = node.text_range();
SyntaxText { node, range, resolver }
}
@ -188,7 +188,7 @@ impl<'n, 'i, I: Resolver<TokenKey> + ?Sized, L: Language, D> SyntaxText<'n, 'i,
self.fold_chunks((), |(), chunk| f(chunk))
}
fn tokens_with_ranges(&self) -> impl Iterator<Item = (&SyntaxToken<L, D>, TextRange)> {
fn tokens_with_ranges(&self) -> impl Iterator<Item = (&SyntaxToken<S, D>, TextRange)> {
let text_range = self.range;
self.node
.descendants_with_tokens()
@ -208,25 +208,25 @@ fn found<T>(res: Result<(), T>) -> Option<T> {
}
}
impl<I: Resolver<TokenKey> + ?Sized, L: Language, D> fmt::Debug for SyntaxText<'_, '_, I, L, D> {
impl<I: Resolver<TokenKey> + ?Sized, S: Syntax, D> fmt::Debug for SyntaxText<'_, '_, I, S, D> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Debug::fmt(&self.to_string(), f)
}
}
impl<I: Resolver<TokenKey> + ?Sized, L: Language, D> fmt::Display for SyntaxText<'_, '_, I, L, D> {
impl<I: Resolver<TokenKey> + ?Sized, S: Syntax, D> fmt::Display for SyntaxText<'_, '_, I, S, D> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.try_for_each_chunk(|chunk| fmt::Display::fmt(chunk, f))
}
}
impl<I: Resolver<TokenKey> + ?Sized, L: Language, D> From<SyntaxText<'_, '_, I, L, D>> for String {
fn from(text: SyntaxText<'_, '_, I, L, D>) -> String {
impl<I: Resolver<TokenKey> + ?Sized, S: Syntax, D> From<SyntaxText<'_, '_, I, S, D>> for String {
fn from(text: SyntaxText<'_, '_, I, S, D>) -> String {
text.to_string()
}
}
impl<I: Resolver<TokenKey> + ?Sized, L: Language, D> PartialEq<str> for SyntaxText<'_, '_, I, L, D> {
impl<I: Resolver<TokenKey> + ?Sized, S: Syntax, D> PartialEq<str> for SyntaxText<'_, '_, I, S, D> {
fn eq(&self, mut rhs: &str) -> bool {
self.try_for_each_chunk(|chunk| {
if !rhs.starts_with(chunk) {
@ -240,33 +240,33 @@ impl<I: Resolver<TokenKey> + ?Sized, L: Language, D> PartialEq<str> for SyntaxTe
}
}
impl<I: Resolver<TokenKey> + ?Sized, L: Language, D> PartialEq<SyntaxText<'_, '_, I, L, D>> for str {
fn eq(&self, rhs: &SyntaxText<'_, '_, I, L, D>) -> bool {
impl<I: Resolver<TokenKey> + ?Sized, S: Syntax, D> PartialEq<SyntaxText<'_, '_, I, S, D>> for str {
fn eq(&self, rhs: &SyntaxText<'_, '_, I, S, D>) -> bool {
rhs == self
}
}
impl<I: Resolver<TokenKey> + ?Sized, L: Language, D> PartialEq<&'_ str> for SyntaxText<'_, '_, I, L, D> {
impl<I: Resolver<TokenKey> + ?Sized, S: Syntax, D> PartialEq<&'_ str> for SyntaxText<'_, '_, I, S, D> {
fn eq(&self, rhs: &&str) -> bool {
self == *rhs
}
}
impl<I: Resolver<TokenKey> + ?Sized, L: Language, D> PartialEq<SyntaxText<'_, '_, I, L, D>> for &'_ str {
fn eq(&self, rhs: &SyntaxText<'_, '_, I, L, D>) -> bool {
impl<I: Resolver<TokenKey> + ?Sized, S: Syntax, D> PartialEq<SyntaxText<'_, '_, I, S, D>> for &'_ str {
fn eq(&self, rhs: &SyntaxText<'_, '_, I, S, D>) -> bool {
rhs == self
}
}
impl<'n1, 'i1, 'n2, 'i2, I1, I2, L1, L2, D1, D2> PartialEq<SyntaxText<'n2, 'i2, I2, L2, D2>>
for SyntaxText<'n1, 'i1, I1, L1, D1>
impl<'n1, 'i1, 'n2, 'i2, I1, I2, S1, S2, D1, D2> PartialEq<SyntaxText<'n2, 'i2, I2, S2, D2>>
for SyntaxText<'n1, 'i1, I1, S1, D1>
where
L1: Language,
L2: Language,
S1: Syntax,
S2: Syntax,
I1: Resolver<TokenKey> + ?Sized,
I2: Resolver<TokenKey> + ?Sized,
{
fn eq(&self, other: &SyntaxText<'_, '_, I2, L2, D2>) -> bool {
fn eq(&self, other: &SyntaxText<'_, '_, I2, S2, D2>) -> bool {
if self.range.len() != other.range.len() {
return false;
}
@ -278,21 +278,21 @@ where
}
}
fn zip_texts<'it1, 'it2, It1, It2, I1, I2, L1, L2, D1, D2>(
fn zip_texts<'it1, 'it2, It1, It2, I1, I2, S1, S2, D1, D2>(
xs: &mut It1,
ys: &mut It2,
resolver_x: &I1,
resolver_y: &I2,
) -> Option<()>
where
It1: Iterator<Item = (&'it1 SyntaxToken<L1, D1>, TextRange)>,
It2: Iterator<Item = (&'it2 SyntaxToken<L2, D2>, TextRange)>,
It1: Iterator<Item = (&'it1 SyntaxToken<S1, D1>, TextRange)>,
It2: Iterator<Item = (&'it2 SyntaxToken<S2, D2>, TextRange)>,
I1: Resolver<TokenKey> + ?Sized,
I2: Resolver<TokenKey> + ?Sized,
D1: 'static,
D2: 'static,
L1: Language + 'it1,
L2: Language + 'it2,
S1: Syntax + 'it1,
S2: Syntax + 'it2,
{
let mut x = xs.next()?;
let mut y = ys.next()?;
@ -314,7 +314,7 @@ where
}
}
impl<I: Resolver<TokenKey> + ?Sized, L: Language, D> Eq for SyntaxText<'_, '_, I, L, D> {}
impl<I: Resolver<TokenKey> + ?Sized, S: Syntax, D> Eq for SyntaxText<'_, '_, I, S, D> {}
mod private {
use std::ops;
@ -383,40 +383,38 @@ mod tests {
use super::*;
#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]
pub enum TestLang {}
impl Language for TestLang {
type Kind = RawSyntaxKind;
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[repr(transparent)]
pub struct SyntaxKind(u32);
fn kind_from_raw(raw: RawSyntaxKind) -> Self::Kind {
raw
impl Syntax for SyntaxKind {
fn from_raw(raw: RawSyntaxKind) -> Self {
Self(raw.0)
}
fn kind_to_raw(kind: Self::Kind) -> RawSyntaxKind {
kind
fn into_raw(self) -> RawSyntaxKind {
RawSyntaxKind(self.0)
}
fn static_text(kind: Self::Kind) -> Option<&'static str> {
if kind == RawSyntaxKind(1) {
Some("{")
} else if kind == RawSyntaxKind(2) {
Some("}")
} else {
None
fn static_text(self) -> Option<&'static str> {
match self.0 {
1 => Some("{"),
2 => Some("}"),
_ => None,
}
}
}
fn build_tree(chunks: &[&str]) -> (SyntaxNode<TestLang, ()>, impl Resolver<TokenKey>) {
let mut builder: GreenNodeBuilder<TestLang> = GreenNodeBuilder::new();
builder.start_node(RawSyntaxKind(62));
fn build_tree(chunks: &[&str]) -> (SyntaxNode<SyntaxKind, ()>, impl Resolver<TokenKey>) {
let mut builder: GreenNodeBuilder<SyntaxKind> = GreenNodeBuilder::new();
builder.start_node(SyntaxKind(62));
for &chunk in chunks.iter() {
let kind = match chunk {
"{" => 1,
"}" => 2,
_ => 3,
};
builder.token(RawSyntaxKind(kind), chunk);
builder.token(SyntaxKind(kind), chunk);
}
builder.finish_node();
let (node, cache) = builder.finish();

View file

@ -12,18 +12,18 @@ use crate::{
green::{GreenNode, GreenToken},
interning::{Resolver, TokenKey},
traversal::Direction,
Language, RawSyntaxKind,
RawSyntaxKind, Syntax,
};
/// Syntax tree token.
#[derive(Debug)]
pub struct SyntaxToken<L: Language, D: 'static = ()> {
parent: SyntaxNode<L, D>,
pub struct SyntaxToken<S: Syntax, D: 'static = ()> {
parent: SyntaxNode<S, D>,
index: u32,
offset: TextSize,
}
impl<L: Language, D> Clone for SyntaxToken<L, D> {
impl<S: Syntax, D> Clone for SyntaxToken<S, D> {
fn clone(&self) -> Self {
Self {
parent: self.parent.clone(),
@ -33,7 +33,7 @@ impl<L: Language, D> Clone for SyntaxToken<L, D> {
}
}
impl<L: Language, D> Hash for SyntaxToken<L, D> {
impl<S: Syntax, D> Hash for SyntaxToken<S, D> {
fn hash<H: Hasher>(&self, state: &mut H) {
self.parent.hash(state);
self.index.hash(state);
@ -41,15 +41,15 @@ impl<L: Language, D> Hash for SyntaxToken<L, D> {
}
}
impl<L: Language, D> PartialEq for SyntaxToken<L, D> {
fn eq(&self, other: &SyntaxToken<L, D>) -> bool {
impl<S: Syntax, D> PartialEq for SyntaxToken<S, D> {
fn eq(&self, other: &SyntaxToken<S, D>) -> bool {
self.parent == other.parent && self.index == other.index && self.offset == other.offset
}
}
impl<L: Language, D> Eq for SyntaxToken<L, D> {}
impl<S: Syntax, D> Eq for SyntaxToken<S, D> {}
impl<L: Language, D> SyntaxToken<L, D> {
impl<S: Syntax, D> SyntaxToken<S, D> {
/// Writes this token's [`Debug`](fmt::Debug) representation into the given `target`.
pub fn write_debug<R>(&self, resolver: &R, target: &mut impl fmt::Write) -> fmt::Result
where
@ -113,7 +113,7 @@ impl<L: Language, D> SyntaxToken<L, D> {
/// Turns this token into a [`ResolvedToken`](crate::syntax::ResolvedToken), but only if there is a resolver
/// associated with this tree.
#[inline]
pub fn try_resolved(&self) -> Option<&ResolvedToken<L, D>> {
pub fn try_resolved(&self) -> Option<&ResolvedToken<S, D>> {
// safety: we only coerce if `resolver` exists
self.resolver().map(|_| unsafe { ResolvedToken::coerce_ref(self) })
}
@ -122,13 +122,13 @@ impl<L: Language, D> SyntaxToken<L, D> {
/// # Panics
/// If there is no resolver associated with this tree.
#[inline]
pub fn resolved(&self) -> &ResolvedToken<L, D> {
pub fn resolved(&self) -> &ResolvedToken<S, D> {
self.try_resolved().expect("tried to resolve a node without resolver")
}
}
impl<L: Language, D> SyntaxToken<L, D> {
pub(super) fn new(parent: &SyntaxNode<L, D>, index: u32, offset: TextSize) -> SyntaxToken<L, D> {
impl<S: Syntax, D> SyntaxToken<S, D> {
pub(super) fn new(parent: &SyntaxNode<S, D>, index: u32, offset: TextSize) -> SyntaxToken<S, D> {
Self {
parent: parent.clone_uncounted(),
index,
@ -164,8 +164,8 @@ impl<L: Language, D> SyntaxToken<L, D> {
/// The kind of this token in terms of your language.
#[inline]
pub fn kind(&self) -> L::Kind {
L::kind_from_raw(self.syntax_kind())
pub fn kind(&self) -> S {
S::from_raw(self.syntax_kind())
}
/// The range this token covers in the source text, in bytes.
@ -187,17 +187,17 @@ impl<L: Language, D> SyntaxToken<L, D> {
self.static_text().or_else(|| self.green().text(resolver)).unwrap()
}
/// If the [syntax kind](Language::Kind) of this token always represents the same text, returns
/// If the [syntax kind](Syntax) of this token always represents the same text, returns
/// that text.
///
/// # Examples
/// If there is a syntax kind `Plus` that represents just the `+` operator and we implement
/// [`Language::static_text`] for it, we can retrieve this text in the resulting syntax tree.
/// [`Syntax::static_text`] for it, we can retrieve this text in the resulting syntax tree.
///
/// ```
/// # use cstree::testing::*;
/// # use cstree::build::*;
/// let mut builder: GreenNodeBuilder<MyLanguage> = GreenNodeBuilder::new();
/// let mut builder: GreenNodeBuilder<MySyntax> = GreenNodeBuilder::new();
/// # builder.start_node(Root);
/// # builder.token(Identifier, "x");
/// # builder.token(Whitespace, " ");
@ -206,7 +206,7 @@ impl<L: Language, D> SyntaxToken<L, D> {
/// # builder.token(Int, "3");
/// # builder.finish_node();
/// let tree = parse(&mut builder, "x + 3");
/// # let tree: SyntaxNode<MyLanguage> = SyntaxNode::new_root(builder.finish().0);
/// # let tree: SyntaxNode<MySyntax> = SyntaxNode::new_root(builder.finish().0);
/// let plus = tree
/// .children_with_tokens()
/// .nth(2) // `x`, then a space, then `+`
@ -217,7 +217,7 @@ impl<L: Language, D> SyntaxToken<L, D> {
/// ```
#[inline(always)]
pub fn static_text(&self) -> Option<&'static str> {
L::static_text(self.kind())
S::static_text(self.kind())
}
/// Returns `true` if `self` and `other` represent equal source text.
@ -235,7 +235,7 @@ impl<L: Language, D> SyntaxToken<L, D> {
/// # Examples
/// ```
/// # use cstree::testing::*;
/// let mut builder: GreenNodeBuilder<MyLanguage> = GreenNodeBuilder::new();
/// let mut builder: GreenNodeBuilder<MySyntax> = GreenNodeBuilder::new();
/// # builder.start_node(Root);
/// # builder.token(Identifier, "x");
/// # builder.token(Whitespace, " ");
@ -247,7 +247,7 @@ impl<L: Language, D> SyntaxToken<L, D> {
/// # builder.token(Int, "3");
/// # builder.finish_node();
/// let tree = parse(&mut builder, "x + x + 3");
/// # let tree: SyntaxNode<MyLanguage> = SyntaxNode::new_root(builder.finish().0);
/// # let tree: SyntaxNode<MySyntax> = SyntaxNode::new_root(builder.finish().0);
/// let mut tokens = tree.children_with_tokens();
/// let tokens = tokens.by_ref();
/// let first_x = tokens.next().unwrap().into_token().unwrap();
@ -303,14 +303,14 @@ impl<L: Language, D> SyntaxToken<L, D> {
/// interner,
/// type_table: TypeTable{ /* stuff */},
/// };
/// let mut builder: GreenNodeBuilder<MyLanguage, TokenInterner> =
/// let mut builder: GreenNodeBuilder<MySyntax, TokenInterner> =
/// GreenNodeBuilder::with_interner(&mut state.interner);
/// # let input = "";
/// # builder.start_node(Root);
/// # builder.token(Identifier, "x");
/// # builder.finish_node();
/// let tree = parse(&mut builder, "x");
/// # let tree: SyntaxNode<MyLanguage> = SyntaxNode::new_root(builder.finish().0);
/// # let tree: SyntaxNode<MySyntax> = SyntaxNode::new_root(builder.finish().0);
/// let type_table = &state.type_table;
/// let ident = tree
/// .children_with_tokens()
@ -339,26 +339,26 @@ impl<L: Language, D> SyntaxToken<L, D> {
/// The parent node of this token.
#[inline]
pub fn parent(&self) -> &SyntaxNode<L, D> {
pub fn parent(&self) -> &SyntaxNode<S, D> {
&self.parent
}
/// Returns an iterator along the chain of parents of this token.
#[inline]
pub fn ancestors(&self) -> impl Iterator<Item = &SyntaxNode<L, D>> {
pub fn ancestors(&self) -> impl Iterator<Item = &SyntaxNode<S, D>> {
self.parent().ancestors()
}
/// The tree element to the right of this one, i.e. the next child of this token's parent after this token.
#[inline]
pub fn next_sibling_or_token(&self) -> Option<SyntaxElementRef<'_, L, D>> {
pub fn next_sibling_or_token(&self) -> Option<SyntaxElementRef<'_, S, D>> {
self.parent()
.next_child_or_token_after(self.index as usize, self.text_range().end())
}
/// The tree element to the left of this one, i.e. the previous child of this token's parent after this token.
#[inline]
pub fn prev_sibling_or_token(&self) -> Option<SyntaxElementRef<'_, L, D>> {
pub fn prev_sibling_or_token(&self) -> Option<SyntaxElementRef<'_, S, D>> {
self.parent()
.prev_child_or_token_before(self.index as usize, self.text_range().start())
}
@ -367,8 +367,8 @@ impl<L: Language, D> SyntaxToken<L, D> {
/// token's parent's children from this token on to the left or the right.
/// The first item in the iterator will always be this token.
#[inline]
pub fn siblings_with_tokens(&self, direction: Direction) -> impl Iterator<Item = SyntaxElementRef<'_, L, D>> {
let me: SyntaxElementRef<'_, L, D> = self.into();
pub fn siblings_with_tokens(&self, direction: Direction) -> impl Iterator<Item = SyntaxElementRef<'_, S, D>> {
let me: SyntaxElementRef<'_, S, D> = self.into();
iter::successors(Some(me), move |el| match direction {
Direction::Next => el.next_sibling_or_token(),
Direction::Prev => el.prev_sibling_or_token(),
@ -378,7 +378,7 @@ impl<L: Language, D> SyntaxToken<L, D> {
/// Returns the next token in the tree.
/// This is not necessary a direct sibling of this token, but will always be further right in the tree.
#[inline]
pub fn next_token(&self) -> Option<&SyntaxToken<L, D>> {
pub fn next_token(&self) -> Option<&SyntaxToken<S, D>> {
match self.next_sibling_or_token() {
Some(element) => element.first_token(),
None => self
@ -392,7 +392,7 @@ impl<L: Language, D> SyntaxToken<L, D> {
/// Returns the previous token in the tree.
/// This is not necessary a direct sibling of this token, but will always be further left in the tree.
#[inline]
pub fn prev_token(&self) -> Option<&SyntaxToken<L, D>> {
pub fn prev_token(&self) -> Option<&SyntaxToken<S, D>> {
match self.prev_sibling_or_token() {
Some(element) => element.last_token(),
None => self

View file

@ -7,7 +7,7 @@ use cstree::{
};
fn build_tree<D>(root: &Element<'_>) -> (SyntaxNode<D>, impl Resolver) {
let mut builder: GreenNodeBuilder<TestLang> = GreenNodeBuilder::new();
let mut builder: GreenNodeBuilder<SyntaxKind> = GreenNodeBuilder::new();
build_recursive(root, &mut builder, 0);
let (node, cache) = builder.finish();
(SyntaxNode::new_root(node), cache.unwrap().into_interner().unwrap())
@ -36,19 +36,19 @@ fn create() {
let tree = two_level_tree();
let (tree, resolver) = build_tree::<()>(&tree);
assert_eq!(tree.syntax_kind(), RawSyntaxKind(0));
assert_eq!(tree.kind(), RawSyntaxKind(0));
assert_eq!(tree.kind(), SyntaxKind(0));
{
let leaf1_0 = tree.children().nth(1).unwrap().children_with_tokens().next().unwrap();
let leaf1_0 = leaf1_0.into_token().unwrap();
assert_eq!(leaf1_0.syntax_kind(), RawSyntaxKind(5));
assert_eq!(leaf1_0.kind(), RawSyntaxKind(5));
assert_eq!(leaf1_0.kind(), SyntaxKind(5));
assert_eq!(leaf1_0.resolve_text(&resolver), "1.0");
assert_eq!(leaf1_0.text_range(), TextRange::at(6.into(), 3.into()));
}
{
let node2 = tree.children().nth(2).unwrap();
assert_eq!(node2.syntax_kind(), RawSyntaxKind(6));
assert_eq!(node2.kind(), RawSyntaxKind(6));
assert_eq!(node2.kind(), SyntaxKind(6));
assert_eq!(node2.children_with_tokens().count(), 3);
assert_eq!(node2.resolve_text(&resolver), "2.02.12.2");
}
@ -58,7 +58,7 @@ fn create() {
fn token_text_eq() {
let tree = tree_with_eq_tokens();
let (tree, _) = build_tree::<()>(&tree);
assert_eq!(tree.kind(), RawSyntaxKind(0));
assert_eq!(tree.kind(), SyntaxKind(0));
let leaf0_0 = tree.children().next().unwrap().children_with_tokens().next().unwrap();
let leaf0_0 = leaf0_0.into_token().unwrap();
@ -150,7 +150,7 @@ fn inline_resolver() {
assert_eq!(leaf1_0.text(), "1.0");
assert_eq!(leaf1_0.text_range(), TextRange::at(6.into(), 3.into()));
assert_eq!(format!("{}", leaf1_0), leaf1_0.text());
assert_eq!(format!("{:?}", leaf1_0), "RawSyntaxKind(5)@6..9 \"1.0\"");
assert_eq!(format!("{:?}", leaf1_0), "SyntaxKind(5)@6..9 \"1.0\"");
}
{
let node2 = tree.children().nth(2).unwrap();
@ -158,13 +158,13 @@ fn inline_resolver() {
let resolver = node2.resolver();
assert_eq!(node2.resolve_text(resolver.as_ref()), node2.text());
assert_eq!(format!("{}", node2).as_str(), node2.text());
assert_eq!(format!("{:?}", node2), "RawSyntaxKind(6)@9..18");
assert_eq!(format!("{:?}", node2), "SyntaxKind(6)@9..18");
assert_eq!(
format!("{:#?}", node2),
r#"RawSyntaxKind(6)@9..18
RawSyntaxKind(7)@9..12 "2.0"
RawSyntaxKind(8)@12..15 "2.1"
RawSyntaxKind(9)@15..18 "2.2"
r#"SyntaxKind(6)@9..18
SyntaxKind(7)@9..12 "2.0"
SyntaxKind(8)@12..15 "2.1"
SyntaxKind(9)@15..18 "2.2"
"#
);
}
@ -182,5 +182,5 @@ fn assert_debug_display() {
f::<cstree::util::NodeOrToken<String, u128>>();
fn dbg<T: fmt::Debug>() {}
dbg::<GreenNodeBuilder<'static, 'static, TestLang>>();
dbg::<GreenNodeBuilder<'static, 'static, SyntaxKind>>();
}

80
cstree/tests/it/main.rs Normal file
View file

@ -0,0 +1,80 @@
mod basic;
mod regressions;
mod sendsync;
#[cfg(feature = "serialize")]
mod serde;
use cstree::{
build::{GreenNodeBuilder, NodeCache},
green::GreenNode,
interning::Interner,
RawSyntaxKind, Syntax,
};
pub type SyntaxNode<D = ()> = cstree::syntax::SyntaxNode<SyntaxKind, D>;
pub type SyntaxToken<D = ()> = cstree::syntax::SyntaxToken<SyntaxKind, D>;
pub type SyntaxElement<D = ()> = cstree::syntax::SyntaxElement<SyntaxKind, D>;
pub type SyntaxElementRef<'a, D = ()> = cstree::syntax::SyntaxElementRef<'a, SyntaxKind, D>;
pub type ResolvedNode<D = ()> = cstree::syntax::ResolvedNode<SyntaxKind, D>;
pub type ResolvedToken<D = ()> = cstree::syntax::ResolvedToken<SyntaxKind, D>;
pub type ResolvedElement<D = ()> = cstree::syntax::ResolvedElement<SyntaxKind, D>;
pub type ResolvedElementRef<'a, D = ()> = cstree::syntax::ResolvedElementRef<'a, SyntaxKind, D>;
#[derive(Debug)]
pub enum Element<'s> {
Node(Vec<Element<'s>>),
Token(&'s str),
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[repr(transparent)]
pub struct SyntaxKind(u32);
impl Syntax for SyntaxKind {
fn from_raw(raw: RawSyntaxKind) -> Self {
Self(raw.0)
}
fn into_raw(self) -> RawSyntaxKind {
RawSyntaxKind(self.0)
}
fn static_text(self) -> Option<&'static str> {
None
}
}
pub fn build_tree_with_cache<I>(root: &Element<'_>, cache: &mut NodeCache<'_, I>) -> GreenNode
where
I: Interner,
{
let mut builder: GreenNodeBuilder<SyntaxKind, I> = GreenNodeBuilder::with_cache(cache);
build_recursive(root, &mut builder, 0);
let (node, cache) = builder.finish();
assert!(cache.is_none());
node
}
pub fn build_recursive<I>(
root: &Element<'_>,
builder: &mut GreenNodeBuilder<'_, '_, SyntaxKind, I>,
mut from: u32,
) -> u32
where
I: Interner,
{
match root {
Element::Node(children) => {
builder.start_node(SyntaxKind(from));
for child in children {
from = build_recursive(child, builder, from + 1);
}
builder.finish_node();
}
Element::Token(text) => {
builder.token(SyntaxKind(from), text);
}
}
from
}

View file

@ -0,0 +1,21 @@
use cstree::Syntax;
#[test]
fn empty_tree_arc() {
// this test is not here for the test itself, but to run it through MIRI, who complained about out-of-bound
// `ThinArc` pointers for a root `GreenNode` with no children
use cstree::{build::GreenNodeBuilder, syntax::SyntaxNode};
#[derive(Debug, Clone, Copy, PartialEq, Eq, Syntax)]
#[repr(u32)]
enum SyntaxKind {
Root,
}
let mut builder: GreenNodeBuilder<SyntaxKind> = GreenNodeBuilder::new();
builder.start_node(SyntaxKind::Root);
builder.finish_node();
let (green, _) = builder.finish();
let root: SyntaxNode<SyntaxKind> = SyntaxNode::new_root(green);
assert_eq!(root.kind(), SyntaxKind::Root);
}

View file

@ -3,7 +3,7 @@
use crossbeam_utils::thread::scope;
use std::{thread, time::Duration};
use super::{build_recursive, Element, ResolvedNode, SyntaxNode, TestLang};
use super::{build_recursive, Element, ResolvedNode, SyntaxKind, SyntaxNode};
use cstree::build::GreenNodeBuilder;
// Excercise the multi-threaded interner when the corresponding feature is enabled.
@ -12,12 +12,12 @@ use cstree::build::GreenNodeBuilder;
use cstree::interning::{new_threaded_interner, MultiThreadedTokenInterner};
#[cfg(not(feature = "multi_threaded_interning"))]
fn get_builder() -> GreenNodeBuilder<'static, 'static, TestLang> {
fn get_builder() -> GreenNodeBuilder<'static, 'static, SyntaxKind> {
GreenNodeBuilder::new()
}
#[cfg(feature = "multi_threaded_interning")]
fn get_builder() -> GreenNodeBuilder<'static, 'static, TestLang, MultiThreadedTokenInterner> {
fn get_builder() -> GreenNodeBuilder<'static, 'static, SyntaxKind, MultiThreadedTokenInterner> {
let interner = new_threaded_interner();
GreenNodeBuilder::from_interner(interner)
}

View file

@ -1,6 +1,6 @@
use crate::{build_recursive, build_tree_with_cache, ResolvedNode};
use super::{Element, SyntaxNode, TestLang};
use super::{Element, SyntaxKind, SyntaxNode};
use cstree::{
build::{GreenNodeBuilder, NodeCache},
interning::new_interner,
@ -225,7 +225,7 @@ fn three_level_tree() -> Element<'static> {
}
fn build_tree(root: Element<'_>) -> ResolvedNode<String> {
let mut builder: GreenNodeBuilder<TestLang> = GreenNodeBuilder::new();
let mut builder: GreenNodeBuilder<SyntaxKind> = GreenNodeBuilder::new();
build_recursive(&root, &mut builder, 0);
let (node, cache) = builder.finish();
SyntaxNode::new_root_with_resolver(node, cache.unwrap().into_interner().unwrap())

16
test_suite/Cargo.toml Normal file
View file

@ -0,0 +1,16 @@
[package]
name = "cstree_test_suite"
publish = false
version = "0.0.0"
edition.workspace = true
authors.workspace = true
license.workspace = true
repository.workspace = true
readme.workspace = true
rust-version.workspace = true
[dependencies]
cstree = { path = "../cstree", features = ["derive"] }
[dev-dependencies]
trybuild = { version = "1.0.80", features = ["diff"] }

View file

@ -0,0 +1,22 @@
use cstree::{RawSyntaxKind, Syntax};
#[test]
fn basic() {
#[derive(Debug, Clone, Copy, PartialEq, Eq, Syntax)]
#[repr(u32)]
pub enum SyntaxKind {
A,
#[static_text("b")]
B,
}
pub type MySyntax = SyntaxKind;
assert_eq!(MySyntax::into_raw(SyntaxKind::A), RawSyntaxKind(0));
assert_eq!(MySyntax::into_raw(SyntaxKind::B), RawSyntaxKind(1));
assert_eq!(MySyntax::from_raw(RawSyntaxKind(0)), SyntaxKind::A);
assert_eq!(MySyntax::from_raw(RawSyntaxKind(1)), SyntaxKind::B);
assert!(MySyntax::static_text(SyntaxKind::A).is_none());
assert_eq!(MySyntax::static_text(SyntaxKind::B), Some("b"));
}

6
test_suite/tests/ui.rs Normal file
View file

@ -0,0 +1,6 @@
#[test]
#[cfg_attr(miri, ignore)]
fn ui() {
let t = trybuild::TestCases::new();
t.compile_fail("tests/ui/**/*.rs");
}

View file

@ -0,0 +1,10 @@
use cstree::Syntax;
#[derive(Debug, Clone, Copy, PartialEq, Eq, Syntax)]
pub enum SyntaxKind {
A,
#[static_text("b")]
B,
}
fn main() {}

View file

@ -0,0 +1,9 @@
error: syntax kind definitions must be `#[repr(u32)]` to derive `Syntax`
--> tests/ui/repr/missing_repr.rs:4:1
|
4 | / pub enum SyntaxKind {
5 | | A,
6 | | #[static_text("b")]
7 | | B,
8 | | }
| |_^

View file

@ -0,0 +1,11 @@
use cstree::Syntax;
#[derive(Debug, Clone, Copy, PartialEq, Eq, Syntax)]
#[repr(C)]
pub enum SyntaxKind {
A,
#[static_text("b")]
B,
}
fn main() {}

View file

@ -0,0 +1,10 @@
error: syntax kind definitions must be `#[repr(u32)]` to derive `Syntax`
--> tests/ui/repr/wrong_repr_c.rs:4:1
|
4 | / #[repr(C)]
5 | | pub enum SyntaxKind {
6 | | A,
7 | | #[static_text("b")]
8 | | B,
9 | | }
| |_^

View file

@ -0,0 +1,11 @@
use cstree::Syntax;
#[derive(Debug, Clone, Copy, PartialEq, Eq, Syntax)]
#[repr(u16)]
pub enum SyntaxKind {
A,
#[static_text("b")]
B,
}
fn main() {}

View file

@ -0,0 +1,10 @@
error: syntax kind definitions must be `#[repr(u32)]` to derive `Syntax`
--> tests/ui/repr/wrong_repr_u16.rs:4:1
|
4 | / #[repr(u16)]
5 | | pub enum SyntaxKind {
6 | | A,
7 | | #[static_text("b")]
8 | | B,
9 | | }
| |_^

View file

@ -0,0 +1,11 @@
use cstree::Syntax;
#[derive(Debug, Clone, Copy, PartialEq, Eq, Syntax)]
#[repr(u32)]
pub enum SyntaxKind {
A,
#[static_text()]
B,
}
fn main() {}

View file

@ -0,0 +1,11 @@
error: argument to `static_text` must be a string literal: `#[static_text("...")]`
--> tests/ui/static_text/empty_expr.rs:7:7
|
7 | #[static_text()]
| ^^^^^^^^^^^^^
error: unexpected end of input, expected string literal
--> tests/ui/static_text/empty_expr.rs:7:19
|
7 | #[static_text()]
| ^

View file

@ -0,0 +1,11 @@
use cstree::Syntax;
#[derive(Debug, Clone, Copy, PartialEq, Eq, Syntax)]
#[repr(u32)]
pub enum SyntaxKind {
A,
#[static_text]
B,
}
fn main() {}

View file

@ -0,0 +1,5 @@
error: missing text for `static_text`: try `#[static_text("...")]`
--> tests/ui/static_text/missing_text.rs:7:5
|
7 | #[static_text]
| ^^^^^^^^^^^^^^

View file

@ -0,0 +1,11 @@
use cstree::Syntax;
#[derive(Debug, Clone, Copy, PartialEq, Eq, Syntax)]
#[repr(u32)]
pub enum SyntaxKind {
A,
#[static_text(SyntaxKind)]
B,
}
fn main() {}

View file

@ -0,0 +1,11 @@
error: argument to `static_text` must be a string literal: `#[static_text("...")]`
--> tests/ui/static_text/non_expr.rs:7:7
|
7 | #[static_text(SyntaxKind)]
| ^^^^^^^^^^^^^^^^^^^^^^^
error: expected string literal
--> tests/ui/static_text/non_expr.rs:7:19
|
7 | #[static_text(SyntaxKind)]
| ^^^^^^^^^^

View file

@ -0,0 +1,11 @@
use cstree::Syntax;
#[derive(Debug, Clone, Copy, PartialEq, Eq, Syntax)]
#[repr(u32)]
pub enum SyntaxKind {
A,
#[static_text(foo + 3)]
B,
}
fn main() {}

View file

@ -0,0 +1,11 @@
error: argument to `static_text` must be a string literal: `#[static_text("...")]`
--> tests/ui/static_text/non_string_expr.rs:7:7
|
7 | #[static_text(foo + 3)]
| ^^^^^^^^^^^^^^^^^^^^
error: expected string literal
--> tests/ui/static_text/non_string_expr.rs:7:19
|
7 | #[static_text(foo + 3)]
| ^^^

View file

@ -0,0 +1,11 @@
use cstree::Syntax;
#[derive(Debug, Clone, Copy, PartialEq, Eq, Syntax)]
#[repr(u32)]
pub enum SyntaxKind {
A,
#[static_text = "b"]
B,
}
fn main() {}

View file

@ -0,0 +1,5 @@
error: `static_text` takes the text as a function argument: `#[static_text("...")]`
--> tests/ui/static_text/text_assigned.rs:7:5
|
7 | #[static_text = "b"]
| ^^^^^^^^^^^^^^^^^^^^

View file

@ -1,77 +0,0 @@
mod basic;
mod regressions;
mod sendsync;
#[cfg(feature = "serialize")]
mod serde;
use cstree::{
build::{GreenNodeBuilder, NodeCache},
green::GreenNode,
interning::Interner,
Language, RawSyntaxKind,
};
pub type SyntaxNode<D = ()> = cstree::syntax::SyntaxNode<TestLang, D>;
pub type SyntaxToken<D = ()> = cstree::syntax::SyntaxToken<TestLang, D>;
pub type SyntaxElement<D = ()> = cstree::syntax::SyntaxElement<TestLang, D>;
pub type SyntaxElementRef<'a, D = ()> = cstree::syntax::SyntaxElementRef<'a, TestLang, D>;
pub type ResolvedNode<D = ()> = cstree::syntax::ResolvedNode<TestLang, D>;
pub type ResolvedToken<D = ()> = cstree::syntax::ResolvedToken<TestLang, D>;
pub type ResolvedElement<D = ()> = cstree::syntax::ResolvedElement<TestLang, D>;
pub type ResolvedElementRef<'a, D = ()> = cstree::syntax::ResolvedElementRef<'a, TestLang, D>;
#[derive(Debug)]
pub enum Element<'s> {
Node(Vec<Element<'s>>),
Token(&'s str),
}
#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]
pub enum TestLang {}
impl Language for TestLang {
type Kind = RawSyntaxKind;
fn kind_from_raw(raw: RawSyntaxKind) -> Self::Kind {
raw
}
fn kind_to_raw(kind: Self::Kind) -> RawSyntaxKind {
kind
}
fn static_text(_kind: Self::Kind) -> Option<&'static str> {
None
}
}
pub fn build_tree_with_cache<I>(root: &Element<'_>, cache: &mut NodeCache<'_, I>) -> GreenNode
where
I: Interner,
{
let mut builder: GreenNodeBuilder<TestLang, I> = GreenNodeBuilder::with_cache(cache);
build_recursive(root, &mut builder, 0);
let (node, cache) = builder.finish();
assert!(cache.is_none());
node
}
pub fn build_recursive<L, I>(root: &Element<'_>, builder: &mut GreenNodeBuilder<'_, '_, L, I>, mut from: u32) -> u32
where
L: Language<Kind = RawSyntaxKind>,
I: Interner,
{
match root {
Element::Node(children) => {
builder.start_node(RawSyntaxKind(from));
for child in children {
from = build_recursive(child, builder, from + 1);
}
builder.finish_node();
}
Element::Token(text) => {
builder.token(RawSyntaxKind(from), text);
}
}
from
}

View file

@ -1,38 +0,0 @@
#[test]
fn empty_tree_arc() {
// this test is not here for the test itself, but to run it through MIRI, who complained about out-of-bound
// `ThinArc` pointers for a root `GreenNode` with no children
use cstree::{build::GreenNodeBuilder, syntax::SyntaxNode};
#[allow(non_camel_case_types)]
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[repr(u32)]
enum SyntaxKind {
Root,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
enum Lang {}
impl cstree::Language for Lang {
// ...
type Kind = SyntaxKind;
fn kind_from_raw(raw: cstree::RawSyntaxKind) -> Self::Kind {
assert!(raw.0 <= SyntaxKind::Root as u32);
unsafe { std::mem::transmute::<u32, SyntaxKind>(raw.0) }
}
fn kind_to_raw(kind: Self::Kind) -> cstree::RawSyntaxKind {
cstree::RawSyntaxKind(kind as u32)
}
fn static_text(_kind: Self::Kind) -> Option<&'static str> {
None
}
}
let mut builder: GreenNodeBuilder<Lang> = GreenNodeBuilder::new();
builder.start_node(SyntaxKind::Root);
builder.finish_node();
let (green, _) = builder.finish();
let root: SyntaxNode<Lang> = SyntaxNode::new_root(green);
assert_eq!(root.kind(), SyntaxKind::Root);
}