1
Fork 0
mirror of https://github.com/RGBCube/cstree synced 2025-07-27 17:17:45 +00:00

Add CI and address soundness issues brought up by miri (#10)

This commit is contained in:
DQ 2021-01-30 20:21:30 +01:00 committed by GitHub
parent ac68150dbb
commit 621718e9b8
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
9 changed files with 303 additions and 189 deletions

130
.github/workflows/main.yml vendored Normal file
View file

@ -0,0 +1,130 @@
name: CI
on:
push:
branches:
- master
pull_request:
# Daily
schedule:
- cron: "0 4 * * *"
# Allows to run this workflow manually from the Actions tab
workflow_dispatch:
env:
RUST_LOG: info
RUST_BACKTRACE: 1
jobs:
test:
name: Test ${{ matrix.rust }} on ${{ matrix.os }}
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest, windows-latest, macos-latest]
rust: [stable, nightly]
steps:
- uses: actions/checkout@v2
- uses: hecrj/setup-rust-action@v1
with:
rust-version: ${{ matrix.rust }}
- run: cargo test --verbose --all-features
- run: cargo test --release --verbose --all-features
check:
name: Check
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: nightly
override: true
- uses: actions-rs/cargo@v1
with:
command: check
clippy:
name: Clippy
runs-on: ubuntu-latest
env:
RUSTFLAGS: -Dwarnings
steps:
- uses: actions/checkout@v2
- uses: hecrj/setup-rust-action@v1
with:
components: clippy
- run: cargo clippy --all-targets --all-features --verbose -- -D warnings
rustfmt:
name: Rustfmt
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: hecrj/setup-rust-action@v1
with:
rust-version: nightly
components: rustfmt
- run: cargo fmt -p cstree -- --check
rustdoc:
name: Check doc links
runs-on: ubuntu-latest
env:
RUSTDOCFLAGS: -Dwarnings
steps:
- uses: actions/checkout@v2
- uses: hecrj/setup-rust-action@v1
- run: cargo doc --all-features --document-private-items --no-deps
miri-test:
name: Miri ${{ matrix.os }}
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest, windows-latest, macos-latest]
steps:
- uses: actions/checkout@v2
- uses: hecrj/setup-rust-action@v1
with:
rust-version: nightly
components: miri
- run: cargo miri test --verbose --all-features -- -Zmiri-disable-isolation
sanitizers:
name: ${{ matrix.sanitizer }} sanitizer
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
sanitizer: [address, memory, thread, leak]
steps:
- uses: actions/checkout@v2
- uses: hecrj/setup-rust-action@v1
with:
rust-version: nightly
components: rust-src
- name: Test with sanitizer
env:
RUSTFLAGS: -Zsanitizer=${{ matrix.sanitizer }}
RUSTDOCFLAGS: -Zsanitizer=${{ matrix.sanitizer }}
# only needed by asan
ASAN_OPTIONS: detect_stack_use_after_return=1
# Asan's leak detection occasionally complains
# about some small leaks if backtraces are captured,
# so ensure they're not
RUST_BACKTRACE: 0
run: |
cargo test -Zbuild-std --verbose --target=x86_64-unknown-linux-gnu --all-features

View file

@ -121,7 +121,7 @@ impl<'input, I: Iterator<Item = (SyntaxKind, &'input str)>> Parser<'input, I> {
} }
fn print(indent: usize, element: SyntaxElementRef<'_>, resolver: &impl Resolver) { fn print(indent: usize, element: SyntaxElementRef<'_>, resolver: &impl Resolver) {
let kind: SyntaxKind = element.kind().into(); let kind = element.kind();
print!("{:indent$}", "", indent = indent); print!("{:indent$}", "", indent = indent);
match element { match element {
NodeOrToken::Node(node) => { NodeOrToken::Node(node) => {
@ -140,19 +140,19 @@ fn main() {
builder: GreenNodeBuilder::new(), builder: GreenNodeBuilder::new(),
iter: vec![ iter: vec![
// 1 + 2 * 3 + 4 // 1 + 2 * 3 + 4
(NUMBER, "1".into()), (NUMBER, "1"),
(WHITESPACE, " ".into()), (WHITESPACE, " "),
(ADD, "+".into()), (ADD, "+"),
(WHITESPACE, " ".into()), (WHITESPACE, " "),
(NUMBER, "2".into()), (NUMBER, "2"),
(WHITESPACE, " ".into()), (WHITESPACE, " "),
(MUL, "*".into()), (MUL, "*"),
(WHITESPACE, " ".into()), (WHITESPACE, " "),
(NUMBER, "3".into()), (NUMBER, "3"),
(WHITESPACE, " ".into()), (WHITESPACE, " "),
(ADD, "+".into()), (ADD, "+"),
(WHITESPACE, " ".into()), (WHITESPACE, " "),
(NUMBER, "4".into()), (NUMBER, "4"),
] ]
.into_iter() .into_iter()
.peekable(), .peekable(),

View file

@ -364,7 +364,7 @@ impl List {
} }
fn eval(&self, resolver: &impl Resolver) -> Option<i64> { fn eval(&self, resolver: &impl Resolver) -> Option<i64> {
let op = match self.sexps().nth(0)?.kind() { let op = match self.sexps().next()?.kind() {
SexpKind::Atom(atom) => atom.as_op(resolver)?, SexpKind::Atom(atom) => atom.as_op(resolver)?,
_ => return None, _ => return None,
}; };

View file

@ -39,6 +39,12 @@ impl NodeCache<'static, Rodeo<Spur, FxBuildHasher>> {
} }
} }
impl Default for NodeCache<'static> {
fn default() -> Self {
Self::new()
}
}
impl<'i, I> NodeCache<'i, I> impl<'i, I> NodeCache<'i, I>
where where
I: Interner, I: Interner,
@ -132,10 +138,7 @@ where
let text_len = TextSize::try_from(text.len()).unwrap(); let text_len = TextSize::try_from(text.len()).unwrap();
let text = self.interner.get_or_intern(text); let text = self.interner.get_or_intern(text);
let data = GreenTokenData { kind, text, text_len }; let data = GreenTokenData { kind, text, text_len };
self.tokens self.tokens.entry(data).or_insert_with(|| GreenToken::new(data)).clone()
.entry(data.clone())
.or_insert_with(|| GreenToken::new(data))
.clone()
} }
} }
@ -146,7 +149,7 @@ enum MaybeOwned<'a, T> {
} }
impl<T> MaybeOwned<'_, T> { impl<T> MaybeOwned<'_, T> {
fn as_owned(self) -> Option<T> { fn into_owned(self) -> Option<T> {
match self { match self {
MaybeOwned::Owned(owned) => Some(owned), MaybeOwned::Owned(owned) => Some(owned),
MaybeOwned::Borrowed(_) => None, MaybeOwned::Borrowed(_) => None,
@ -203,6 +206,12 @@ impl GreenNodeBuilder<'static, 'static, Rodeo<Spur, FxBuildHasher>> {
} }
} }
impl Default for GreenNodeBuilder<'static, 'static> {
fn default() -> Self {
Self::new()
}
}
impl<'cache, 'interner, I> GreenNodeBuilder<'cache, 'interner, I> impl<'cache, 'interner, I> GreenNodeBuilder<'cache, 'interner, I>
where where
I: Interner, I: Interner,
@ -297,7 +306,7 @@ where
#[inline] #[inline]
pub fn finish(mut self) -> (GreenNode, Option<I>) { pub fn finish(mut self) -> (GreenNode, Option<I>) {
assert_eq!(self.children.len(), 1); assert_eq!(self.children.len(), 1);
let resolver = self.cache.as_owned().and_then(|cache| cache.interner.as_owned()); let resolver = self.cache.into_owned().and_then(|cache| cache.interner.into_owned());
match self.children.pop().unwrap() { match self.children.pop().unwrap() {
NodeOrToken::Node(node) => (node, resolver), NodeOrToken::Node(node) => (node, resolver),
NodeOrToken::Token(_) => panic!("called `finish` on a `GreenNodeBuilder` which only contained a token"), NodeOrToken::Token(_) => panic!("called `finish` on a `GreenNodeBuilder` which only contained a token"),

View file

@ -42,7 +42,7 @@ impl GreenNodeHead {
/// Internal node in the immutable tree. /// Internal node in the immutable tree.
/// It has other nodes and tokens as children. /// It has other nodes and tokens as children.
#[derive(Clone, PartialEq, Eq)] #[derive(Clone)]
pub struct GreenNode { pub struct GreenNode {
pub(super) data: ThinArc<GreenNodeHead, PackedGreenElement>, pub(super) data: ThinArc<GreenNodeHead, PackedGreenElement>,
} }
@ -136,6 +136,14 @@ impl Hash for GreenNode {
} }
} }
impl PartialEq for GreenNode {
fn eq(&self, other: &Self) -> bool {
self.data == other.data
}
}
impl Eq for GreenNode {}
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct Children<'a> { pub struct Children<'a> {
inner: slice::Iter<'a, PackedGreenElement>, inner: slice::Iter<'a, PackedGreenElement>,
@ -184,12 +192,12 @@ impl<'a> Iterator for Children<'a> {
} }
#[inline] #[inline]
fn fold<Acc, Fold>(mut self, init: Acc, mut f: Fold) -> Acc fn fold<Acc, Fold>(self, init: Acc, mut f: Fold) -> Acc
where where
Fold: FnMut(Acc, Self::Item) -> Acc, Fold: FnMut(Acc, Self::Item) -> Acc,
{ {
let mut accum = init; let mut accum = init;
while let Some(x) = self.next() { for x in self {
accum = f(accum, x); accum = f(accum, x);
} }
accum accum

View file

@ -50,9 +50,9 @@ impl<L: Language, D, R> SyntaxNode<L, D, R> {
for _ in 0..level { for _ in 0..level {
write!(res, " ").unwrap(); write!(res, " ").unwrap();
} }
write!( writeln!(
res, res,
"{}\n", "{}",
match element { match element {
NodeOrToken::Node(node) => node.debug(resolver, false), NodeOrToken::Node(node) => node.debug(resolver, false),
NodeOrToken::Token(token) => token.debug(resolver), NodeOrToken::Token(token) => token.debug(resolver),
@ -109,12 +109,12 @@ impl<L: Language, D, R> Drop for SyntaxNode<L, D, R> {
// `ref_count` // `ref_count`
let root = self.root(); let root = self.root();
let mut root = root.clone_uncounted(); let mut root = root.clone_uncounted();
let ref_count = unsafe { Box::from_raw(root.data().ref_count) }; let ref_count = root.data().ref_count;
root.drop_recursive(); root.drop_recursive();
let root_data = root.data; let root_data = root.data;
drop(root); drop(root);
unsafe { drop(Box::from_raw(root_data)) }; unsafe { drop(Box::from_raw(root_data)) };
drop(ref_count); unsafe { drop(Box::from_raw(ref_count)) };
} }
} }
} }
@ -129,6 +129,7 @@ impl<L: Language, D, R> SyntaxNode<L, D, R> {
/// Caller must ensure that the access to the underlying data is unique (no active _mutable or immutable_ /// Caller must ensure that the access to the underlying data is unique (no active _mutable or immutable_
/// references). /// references).
#[inline] #[inline]
#[allow(clippy::mut_from_ref)]
unsafe fn data_mut(&self) -> &mut NodeData<L, D, R> { unsafe fn data_mut(&self) -> &mut NodeData<L, D, R> {
&mut *self.data &mut *self.data
} }
@ -604,6 +605,7 @@ impl<L: Language, D, R> SyntaxNode<L, D, R> {
} }
#[inline] #[inline]
#[allow(clippy::map_clone)]
pub fn first_child(&self) -> Option<&SyntaxNode<L, D, R>> { pub fn first_child(&self) -> Option<&SyntaxNode<L, D, R>> {
let (node, (index, offset)) = filter_nodes(self.green().children_from(0, self.text_range().start())).next()?; let (node, (index, offset)) = filter_nodes(self.green().children_from(0, self.text_range().start())).next()?;
self.get_or_add_node(node, index, offset).as_node().map(|node| *node) self.get_or_add_node(node, index, offset).as_node().map(|node| *node)
@ -616,6 +618,7 @@ impl<L: Language, D, R> SyntaxNode<L, D, R> {
} }
#[inline] #[inline]
#[allow(clippy::map_clone)]
pub fn last_child(&self) -> Option<&SyntaxNode<L, D, R>> { pub fn last_child(&self) -> Option<&SyntaxNode<L, D, R>> {
let (node, (index, offset)) = filter_nodes( let (node, (index, offset)) = filter_nodes(
self.green() self.green()
@ -637,7 +640,7 @@ impl<L: Language, D, R> SyntaxNode<L, D, R> {
#[inline] #[inline]
pub fn next_child_after(&self, n: usize, offset: TextSize) -> Option<&SyntaxNode<L, D, R>> { pub fn next_child_after(&self, n: usize, offset: TextSize) -> Option<&SyntaxNode<L, D, R>> {
let (node, (index, offset)) = filter_nodes(self.green().children_from(n + 1, offset)).next()?; let (node, (index, offset)) = filter_nodes(self.green().children_from(n + 1, offset)).next()?;
self.get_or_add_node(node, index, offset).as_node().map(|node| *node) self.get_or_add_node(node, index, offset).as_node().copied()
} }
#[inline] #[inline]
@ -649,7 +652,7 @@ impl<L: Language, D, R> SyntaxNode<L, D, R> {
#[inline] #[inline]
pub fn prev_child_before(&self, n: usize, offset: TextSize) -> Option<&SyntaxNode<L, D, R>> { pub fn prev_child_before(&self, n: usize, offset: TextSize) -> Option<&SyntaxNode<L, D, R>> {
let (node, (index, offset)) = filter_nodes(self.green().children_to(n, offset)).next()?; let (node, (index, offset)) = filter_nodes(self.green().children_to(n, offset)).next()?;
self.get_or_add_node(node, index, offset).as_node().map(|node| *node) self.get_or_add_node(node, index, offset).as_node().copied()
} }
#[inline] #[inline]
@ -668,7 +671,7 @@ impl<L: Language, D, R> SyntaxNode<L, D, R> {
.children_from((index + 1) as usize, self.text_range().end()), .children_from((index + 1) as usize, self.text_range().end()),
) )
.next()?; .next()?;
parent.get_or_add_node(node, index, offset).as_node().map(|node| *node) parent.get_or_add_node(node, index, offset).as_node().copied()
} }
#[inline] #[inline]
@ -688,7 +691,7 @@ impl<L: Language, D, R> SyntaxNode<L, D, R> {
let (node, (index, offset)) = let (node, (index, offset)) =
filter_nodes(parent.green().children_to(index as usize, self.text_range().start())).next()?; filter_nodes(parent.green().children_to(index as usize, self.text_range().start())).next()?;
parent.get_or_add_node(node, index, offset).as_node().map(|node| *node) parent.get_or_add_node(node, index, offset).as_node().copied()
} }
#[inline] #[inline]
@ -869,7 +872,7 @@ where
R: Resolver, R: Resolver,
{ {
#[inline] #[inline]
pub fn text<'n>(&'n self) -> SyntaxText<'n, 'n, R, L, D, R> { pub fn text(&self) -> SyntaxText<'_, '_, R, L, D, R> {
SyntaxText::new(self, self.resolver().as_ref()) SyntaxText::new(self, self.resolver().as_ref())
} }
} }

View file

@ -42,7 +42,6 @@ impl<'n, 'i, I: Resolver + ?Sized, L: Language, D, R> SyntaxText<'n, 'i, I, L, D
} }
pub fn char_at(&self, offset: TextSize) -> Option<char> { pub fn char_at(&self, offset: TextSize) -> Option<char> {
let offset = offset.into();
let mut start: TextSize = 0.into(); let mut start: TextSize = 0.into();
let res = self.try_for_each_chunk(|chunk| { let res = self.try_for_each_chunk(|chunk| {
let end = start + TextSize::of(chunk); let end = start + TextSize::of(chunk);
@ -58,7 +57,7 @@ impl<'n, 'i, I: Resolver + ?Sized, L: Language, D, R> SyntaxText<'n, 'i, I, L, D
pub fn slice<Ra: private::SyntaxTextRange>(&self, range: Ra) -> Self { pub fn slice<Ra: private::SyntaxTextRange>(&self, range: Ra) -> Self {
let start = range.start().unwrap_or_default(); let start = range.start().unwrap_or_default();
let end = range.end().unwrap_or(self.len()); let end = range.end().unwrap_or_else(|| self.len());
assert!(start <= end); assert!(start <= end);
let len = end - start; let len = end - start;
let start = self.range.start() + start; let start = self.range.start() + start;
@ -98,7 +97,10 @@ impl<'n, 'i, I: Resolver + ?Sized, L: Language, D, R> SyntaxText<'n, 'i, I, L, D
pub fn for_each_chunk<F: FnMut(&str)>(&self, mut f: F) { pub fn for_each_chunk<F: FnMut(&str)>(&self, mut f: F) {
enum Void {} enum Void {}
match self.try_for_each_chunk(|chunk| Ok::<(), Void>(f(chunk))) { match self.try_for_each_chunk(|chunk| {
f(chunk);
Ok::<(), Void>(())
}) {
Ok(()) => (), Ok(()) => (),
Err(void) => match void {}, Err(void) => match void {},
} }
@ -319,7 +321,7 @@ mod tests {
let mut builder = GreenNodeBuilder::new(); let mut builder = GreenNodeBuilder::new();
builder.start_node(SyntaxKind(62)); builder.start_node(SyntaxKind(62));
for &chunk in chunks.iter() { for &chunk in chunks.iter() {
builder.token(SyntaxKind(92), chunk.into()) builder.token(SyntaxKind(92), chunk);
} }
builder.finish_node(); builder.finish_node();
let (node, interner) = builder.finish(); let (node, interner) = builder.finish();
@ -336,7 +338,7 @@ mod tests {
let expected = t1.to_string() == t2.to_string(); let expected = t1.to_string() == t2.to_string();
let actual = t1 == t2; let actual = t1 == t2;
assert_eq!(expected, actual, "`{}` (SyntaxText) `{}` (SyntaxText)", t1, t2); assert_eq!(expected, actual, "`{}` (SyntaxText) `{}` (SyntaxText)", t1, t2);
let actual = t1 == &*t2.to_string(); let actual = t1 == t2.to_string().as_str();
assert_eq!(expected, actual, "`{}` (SyntaxText) `{}` (&str)", t1, t2); assert_eq!(expected, actual, "`{}` (SyntaxText) `{}` (&str)", t1, t2);
} }
fn check(t1: &[&str], t2: &[&str]) { fn check(t1: &[&str], t2: &[&str]) {

View file

@ -27,7 +27,7 @@ fn create() {
assert_eq!(tree.syntax_kind(), SyntaxKind(0)); assert_eq!(tree.syntax_kind(), SyntaxKind(0));
assert_eq!(tree.kind(), SyntaxKind(0)); assert_eq!(tree.kind(), SyntaxKind(0));
{ {
let leaf1_0 = tree.children().nth(1).unwrap().children_with_tokens().nth(0).unwrap(); let leaf1_0 = tree.children().nth(1).unwrap().children_with_tokens().next().unwrap();
let leaf1_0 = leaf1_0.into_token().unwrap(); let leaf1_0 = leaf1_0.into_token().unwrap();
assert_eq!(leaf1_0.syntax_kind(), SyntaxKind(5)); assert_eq!(leaf1_0.syntax_kind(), SyntaxKind(5));
assert_eq!(leaf1_0.kind(), SyntaxKind(5)); assert_eq!(leaf1_0.kind(), SyntaxKind(5));
@ -86,7 +86,7 @@ fn with_interner() {
let tree: SyntaxNode = SyntaxNode::new_root(tree); let tree: SyntaxNode = SyntaxNode::new_root(tree);
let resolver = interner; let resolver = interner;
{ {
let leaf1_0 = tree.children().nth(1).unwrap().children_with_tokens().nth(0).unwrap(); let leaf1_0 = tree.children().nth(1).unwrap().children_with_tokens().next().unwrap();
let leaf1_0 = leaf1_0.into_token().unwrap(); let leaf1_0 = leaf1_0.into_token().unwrap();
assert_eq!(leaf1_0.resolve_text(&resolver), "1.0"); assert_eq!(leaf1_0.resolve_text(&resolver), "1.0");
assert_eq!(leaf1_0.text_range(), TextRange::at(6.into(), 3.into())); assert_eq!(leaf1_0.text_range(), TextRange::at(6.into(), 3.into()));
@ -105,7 +105,7 @@ fn inline_resolver() {
let tree = build_tree_with_cache(&tree, &mut cache); let tree = build_tree_with_cache(&tree, &mut cache);
let tree: SyntaxNode<(), Rodeo> = SyntaxNode::new_root_with_resolver(tree, interner); let tree: SyntaxNode<(), Rodeo> = SyntaxNode::new_root_with_resolver(tree, interner);
{ {
let leaf1_0 = tree.children().nth(1).unwrap().children_with_tokens().nth(0).unwrap(); let leaf1_0 = tree.children().nth(1).unwrap().children_with_tokens().next().unwrap();
let leaf1_0 = leaf1_0.into_token().unwrap(); let leaf1_0 = leaf1_0.into_token().unwrap();
assert_eq!(leaf1_0.text(), "1.0"); assert_eq!(leaf1_0.text(), "1.0");
assert_eq!(leaf1_0.text_range(), TextRange::at(6.into(), 3.into())); assert_eq!(leaf1_0.text_range(), TextRange::at(6.into(), 3.into()));

View file

@ -22,30 +22,39 @@
// The semantics of Arc are alread documented in the Rust docs, so we don't // The semantics of Arc are alread documented in the Rust docs, so we don't
// duplicate those here. // duplicate those here.
#![allow(missing_docs)] #![allow(missing_docs)]
#![allow(clippy::all)]
extern crate nodrop; extern crate nodrop;
#[cfg(feature = "servo")] extern crate serde; #[cfg(feature = "servo")]
extern crate serde;
extern crate stable_deref_trait; extern crate stable_deref_trait;
use nodrop::NoDrop; use nodrop::NoDrop;
#[cfg(feature = "servo")] #[cfg(feature = "servo")]
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use stable_deref_trait::{CloneStableDeref, StableDeref}; use stable_deref_trait::{CloneStableDeref, StableDeref};
use std::{alloc::Layout, isize, mem::align_of_val, usize}; use std::{
use std::borrow; alloc::Layout,
use std::cmp::Ordering; borrow,
use std::convert::From; cmp::Ordering,
use std::fmt; convert::From,
use std::hash::{Hash, Hasher}; fmt,
use std::iter::{ExactSizeIterator, Iterator}; hash::{Hash, Hasher},
use std::mem; isize,
use std::ops::{Deref, DerefMut}; iter::{ExactSizeIterator, Iterator},
use std::os::raw::c_void; mem,
use std::process; mem::align_of_val,
use std::ptr; ops::{Deref, DerefMut},
use std::slice; os::raw::c_void,
use std::sync::atomic; process,
use std::sync::atomic::Ordering::{Acquire, Relaxed, Release}; ptr::{self, NonNull},
slice,
sync::{
atomic,
atomic::Ordering::{Acquire, Relaxed, Release},
},
usize,
};
/// Get the offset within an `ArcInner` for /// Get the offset within an `ArcInner` for
/// a payload of type described by a pointer. /// a payload of type described by a pointer.
@ -55,8 +64,8 @@ use std::sync::atomic::Ordering::{Acquire, Relaxed, Release};
/// This has the same safety requirements as `align_of_val_raw`. In effect: /// This has the same safety requirements as `align_of_val_raw`. In effect:
/// ///
/// - This function is safe for any argument if `T` is sized, and /// - This function is safe for any argument if `T` is sized, and
/// - if `T` is unsized, the pointer must have appropriate pointer metadata /// - if `T` is unsized, the pointer must have appropriate pointer metadata acquired from the real instance that you are
/// acquired from the real instance that you are getting this offset for. /// getting this offset for.
unsafe fn data_offset<T: ?Sized>(ptr: *const T) -> isize { unsafe fn data_offset<T: ?Sized>(ptr: *const T) -> isize {
// Align the unsized value to the end of the `ArcInner`. // Align the unsized value to the end of the `ArcInner`.
// Because it is `?Sized`, it will always be the last field in memory. // Because it is `?Sized`, it will always be the last field in memory.
@ -84,65 +93,9 @@ fn padding_needed_for(layout: &Layout, align: usize) -> usize {
/// necessarily) at _exactly_ `MAX_REFCOUNT + 1` references. /// necessarily) at _exactly_ `MAX_REFCOUNT + 1` references.
const MAX_REFCOUNT: usize = (isize::MAX) as usize; const MAX_REFCOUNT: usize = (isize::MAX) as usize;
/// Wrapper type for pointers to get the non-zero optimization. When
/// NonZero/Shared/Unique are stabilized, we should just use Shared
/// here to get the same effect. Gankro is working on this in [1].
///
/// It's unfortunate that this needs to infect all the caller types
/// with 'static. It would be nice to just use a &() and a PhantomData<T>
/// instead, but then the compiler can't determine whether the &() should
/// be thin or fat (which depends on whether or not T is sized). Given
/// that this is all a temporary hack, this restriction is fine for now.
///
/// [1]: https://github.com/rust-lang/rust/issues/27730
// FIXME: remove this and use std::ptr::NonNull when Firefox requires Rust 1.25+
pub struct NonZeroPtrMut<T: ?Sized + 'static>(&'static mut T);
impl<T: ?Sized> NonZeroPtrMut<T> {
pub fn new(ptr: *mut T) -> Self {
assert!(!(ptr as *mut u8).is_null());
NonZeroPtrMut(unsafe { mem::transmute(ptr) })
}
pub fn ptr(&self) -> *mut T {
self.0 as *const T as *mut T
}
}
impl<T: ?Sized + 'static> Clone for NonZeroPtrMut<T> {
fn clone(&self) -> Self {
NonZeroPtrMut::new(self.ptr())
}
}
impl<T: ?Sized + 'static> fmt::Pointer for NonZeroPtrMut<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Pointer::fmt(&self.ptr(), f)
}
}
impl<T: ?Sized + 'static> fmt::Debug for NonZeroPtrMut<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
<Self as fmt::Pointer>::fmt(self, f)
}
}
impl<T: ?Sized + 'static> PartialEq for NonZeroPtrMut<T> {
fn eq(&self, other: &Self) -> bool {
self.ptr() == other.ptr()
}
}
impl<T: ?Sized + 'static> Eq for NonZeroPtrMut<T> {}
impl<T: Sized + 'static> Hash for NonZeroPtrMut<T> {
fn hash<H: Hasher>(&self, state: &mut H) {
self.ptr().hash(state)
}
}
#[repr(C)] #[repr(C)]
pub struct Arc<T: ?Sized + 'static> { pub struct Arc<T: ?Sized + 'static> {
p: NonZeroPtrMut<ArcInner<T>>, p: NonNull<ArcInner<T>>,
} }
/// An Arc that is known to be uniquely owned /// An Arc that is known to be uniquely owned
@ -167,6 +120,7 @@ impl<T> UniqueArc<T> {
impl<T> Deref for UniqueArc<T> { impl<T> Deref for UniqueArc<T> {
type Target = T; type Target = T;
fn deref(&self) -> &T { fn deref(&self) -> &T {
&*self.0 &*self.0
} }
@ -185,7 +139,7 @@ unsafe impl<T: ?Sized + Sync + Send> Sync for Arc<T> {}
#[repr(C)] #[repr(C)]
struct ArcInner<T: ?Sized> { struct ArcInner<T: ?Sized> {
count: atomic::AtomicUsize, count: atomic::AtomicUsize,
data: T, data: T,
} }
unsafe impl<T: ?Sized + Sync + Send> Send for ArcInner<T> {} unsafe impl<T: ?Sized + Sync + Send> Send for ArcInner<T> {}
@ -196,9 +150,12 @@ impl<T> Arc<T> {
pub fn new(data: T) -> Self { pub fn new(data: T) -> Self {
let x = Box::new(ArcInner { let x = Box::new(ArcInner {
count: atomic::AtomicUsize::new(1), count: atomic::AtomicUsize::new(1),
data: data, data,
}); });
Arc { p: NonZeroPtrMut::new(Box::into_raw(x)) } Arc {
// safety: we created `x`
p: unsafe { NonNull::new_unchecked(Box::into_raw(x)) },
}
} }
#[inline] #[inline]
@ -209,13 +166,13 @@ impl<T> Arc<T> {
} }
#[inline] #[inline]
pub unsafe fn from_raw(ptr: *const T) -> Self { pub unsafe fn from_raw(ptr: *const T) -> Self {
// To find the corresponding pointer to the `ArcInner` we need // To find the corresponding pointer to the `ArcInner` we need
// to subtract the offset of the `data` field from the pointer. // to subtract the offset of the `data` field from the pointer.
let offset = data_offset(ptr); let offset = data_offset(ptr);
let ptr = (ptr as *const u8).offset(-offset); let ptr = (ptr as *const u8).offset(-offset);
Arc { Arc {
p: NonZeroPtrMut::new(ptr as *mut ArcInner<T>), p: NonNull::new_unchecked(ptr as *mut ArcInner<T>),
} }
} }
@ -230,7 +187,8 @@ impl<T> Arc<T> {
/// provided callback. The refcount is not modified. /// provided callback. The refcount is not modified.
#[inline(always)] #[inline(always)]
pub fn with_raw_offset_arc<F, U>(&self, f: F) -> U pub fn with_raw_offset_arc<F, U>(&self, f: F) -> U
where F: FnOnce(&RawOffsetArc<T>) -> U where
F: FnOnce(&RawOffsetArc<T>) -> U,
{ {
// Synthesize transient Arc, which never touches the refcount of the ArcInner. // Synthesize transient Arc, which never touches the refcount of the ArcInner.
let transient = unsafe { NoDrop::new(Arc::into_raw_offset(ptr::read(self))) }; let transient = unsafe { NoDrop::new(Arc::into_raw_offset(ptr::read(self))) };
@ -248,7 +206,7 @@ impl<T> Arc<T> {
/// Returns the address on the heap of the Arc itself -- not the T within it -- for memory /// Returns the address on the heap of the Arc itself -- not the T within it -- for memory
/// reporting. /// reporting.
pub fn heap_ptr(&self) -> *const c_void { pub fn heap_ptr(&self) -> *const c_void {
self.p.ptr() as *const ArcInner<T> as *const c_void self.p.as_ptr() as *const ArcInner<T> as *const c_void
} }
} }
@ -269,14 +227,13 @@ impl<T: ?Sized> Arc<T> {
let _ = Box::from_raw(self.ptr()); let _ = Box::from_raw(self.ptr());
} }
#[inline] #[inline]
pub fn ptr_eq(this: &Self, other: &Self) -> bool { pub fn ptr_eq(this: &Self, other: &Self) -> bool {
this.ptr() == other.ptr() this.ptr() == other.ptr()
} }
fn ptr(&self) -> *mut ArcInner<T> { fn ptr(&self) -> *mut ArcInner<T> {
self.p.ptr() self.p.as_ptr()
} }
} }
@ -309,7 +266,12 @@ impl<T: ?Sized> Clone for Arc<T> {
process::abort(); process::abort();
} }
Arc { p: NonZeroPtrMut::new(self.ptr()) } Arc {
// safety: as described above, as long as the original reference is alive, the
// allocation is valid. Since the allocation existed previously, the pointer to it is
// not null.
p: unsafe { NonNull::new_unchecked(self.ptr()) },
}
} }
} }
@ -497,8 +459,7 @@ unsafe impl<T: ?Sized> StableDeref for Arc<T> {}
unsafe impl<T: ?Sized> CloneStableDeref for Arc<T> {} unsafe impl<T: ?Sized> CloneStableDeref for Arc<T> {}
#[cfg(feature = "servo")] #[cfg(feature = "servo")]
impl<'de, T: Deserialize<'de>> Deserialize<'de> for Arc<T> impl<'de, T: Deserialize<'de>> Deserialize<'de> for Arc<T> {
{
fn deserialize<D>(deserializer: D) -> Result<Arc<T>, D::Error> fn deserialize<D>(deserializer: D) -> Result<Arc<T>, D::Error>
where where
D: ::serde::de::Deserializer<'de>, D: ::serde::de::Deserializer<'de>,
@ -508,8 +469,7 @@ impl<'de, T: Deserialize<'de>> Deserialize<'de> for Arc<T>
} }
#[cfg(feature = "servo")] #[cfg(feature = "servo")]
impl<T: Serialize> Serialize for Arc<T> impl<T: Serialize> Serialize for Arc<T> {
{
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where where
S: ::serde::ser::Serializer, S: ::serde::ser::Serializer,
@ -539,35 +499,22 @@ impl<H, T> Arc<HeaderSlice<H, [T]>> {
/// iterator to generate the slice. The resulting Arc will be fat. /// iterator to generate the slice. The resulting Arc will be fat.
#[inline] #[inline]
pub fn from_header_and_iter<I>(header: H, mut items: I) -> Self pub fn from_header_and_iter<I>(header: H, mut items: I) -> Self
where I: Iterator<Item=T> + ExactSizeIterator where
I: Iterator<Item = T> + ExactSizeIterator,
{ {
use ::std::mem::size_of; use std::mem::size_of;
assert_ne!(size_of::<T>(), 0, "Need to think about ZST"); assert_ne!(size_of::<T>(), 0, "Need to think about ZST");
// Compute the required size for the allocation. // Compute the required size for the allocation.
let num_items = items.len(); let num_items = items.len();
let size = { let size = {
// First, determine the alignment of a hypothetical pointer to a let inner_layout = Layout::new::<ArcInner<HeaderSlice<H, [T; 0]>>>();
// HeaderSlice. let slice_layout =
let fake_slice_ptr_align: usize = mem::align_of::<ArcInner<HeaderSlice<H, [T; 1]>>>(); Layout::array::<T>(num_items).expect("arithmetic overflow when trying to create array layout");
let slice_align = mem::align_of_val::<[T]>(&[]);
// Next, synthesize a totally garbage (but properly aligned) pointer assert_eq!(slice_layout.align(), slice_align);
// to a sequence of T. let padding = padding_needed_for(&inner_layout, slice_align);
let fake_slice_ptr = fake_slice_ptr_align as *const T; inner_layout.size() + padding + slice_layout.size()
// Convert that sequence to a fat pointer. The address component of
// the fat pointer will be garbage, but the length will be correct.
let fake_slice = unsafe { slice::from_raw_parts(fake_slice_ptr, num_items) };
// Pretend the garbage address points to our allocation target (with
// a trailing sequence of T), rather than just a sequence of T.
let fake_ptr = fake_slice as *const [T] as *const ArcInner<HeaderSlice<H, [T]>>;
let fake_ref: &ArcInner<HeaderSlice<H, [T]>> = unsafe { &*fake_ptr };
// Use size_of_val, which will combine static information about the
// type with the length from the fat pointer. The garbage address
// will not be used.
mem::size_of_val(fake_ref)
}; };
let ptr: *mut ArcInner<HeaderSlice<H, [T]>>; let ptr: *mut ArcInner<HeaderSlice<H, [T]>>;
@ -618,7 +565,10 @@ impl<H, T> Arc<HeaderSlice<H, [T]>> {
// Return the fat Arc. // Return the fat Arc.
assert_eq!(size_of::<Self>(), size_of::<usize>() * 2, "The Arc will be fat"); assert_eq!(size_of::<Self>(), size_of::<usize>() * 2, "The Arc will be fat");
Arc { p: NonZeroPtrMut::new(ptr) } Arc {
// safety: we have just created the underlying allocation
p: unsafe { NonNull::new_unchecked(ptr) },
}
} }
#[inline] #[inline]
@ -644,10 +594,7 @@ pub struct HeaderWithLength<H> {
impl<H> HeaderWithLength<H> { impl<H> HeaderWithLength<H> {
/// Creates a new HeaderWithLength. /// Creates a new HeaderWithLength.
pub fn new(header: H, length: usize) -> Self { pub fn new(header: H, length: usize) -> Self {
HeaderWithLength { HeaderWithLength { header, length }
header: header,
length: length,
}
} }
} }
@ -662,13 +609,11 @@ unsafe impl<H: Sync + Send, T: Sync + Send> Sync for ThinArc<H, T> {}
// Synthesize a fat pointer from a thin pointer. // Synthesize a fat pointer from a thin pointer.
// //
// See the comment around the analogous operation in from_header_and_iter. // See the comment around the analogous operation in from_header_and_iter.
fn thin_to_thick<H, T>(thin: *mut ArcInner<HeaderSliceWithLength<H, [T; 1]>>) fn thin_to_thick<H, T>(
-> *mut ArcInner<HeaderSliceWithLength<H, [T]>> thin: *mut ArcInner<HeaderSliceWithLength<H, [T; 1]>>,
{ ) -> *mut ArcInner<HeaderSliceWithLength<H, [T]>> {
let len = unsafe { (*thin).data.header.length }; let len = unsafe { (*thin).data.header.length };
let fake_slice: *mut [T] = unsafe { let fake_slice: *mut [T] = unsafe { slice::from_raw_parts_mut(thin as *mut T, len) };
slice::from_raw_parts_mut(thin as *mut T, len)
};
fake_slice as *mut ArcInner<HeaderSliceWithLength<H, [T]>> fake_slice as *mut ArcInner<HeaderSliceWithLength<H, [T]>>
} }
@ -678,11 +623,13 @@ impl<H: 'static, T: 'static> ThinArc<H, T> {
/// provided callback. The refcount is not modified. /// provided callback. The refcount is not modified.
#[inline] #[inline]
pub fn with_arc<F, U>(&self, f: F) -> U pub fn with_arc<F, U>(&self, f: F) -> U
where F: FnOnce(&Arc<HeaderSliceWithLength<H, [T]>>) -> U where
F: FnOnce(&Arc<HeaderSliceWithLength<H, [T]>>) -> U,
{ {
// Synthesize transient Arc, which never touches the refcount of the ArcInner. // Synthesize transient Arc, which never touches the refcount of the ArcInner.
let transient = NoDrop::new(Arc { let transient = NoDrop::new(Arc {
p: NonZeroPtrMut::new(thin_to_thick(self.ptr)) // safety: the original thin arc guarantees the object was and is still alive
p: unsafe { NonNull::new_unchecked(thin_to_thick(self.ptr)) },
}); });
// Expose the transient Arc to the callback, which may clone it if it wants. // Expose the transient Arc to the callback, which may clone it if it wants.
@ -733,13 +680,16 @@ impl<H: 'static, T: 'static> Arc<HeaderSliceWithLength<H, [T]>> {
/// is not modified. /// is not modified.
#[inline] #[inline]
pub fn into_thin(a: Self) -> ThinArc<H, T> { pub fn into_thin(a: Self) -> ThinArc<H, T> {
assert_eq!(a.header.length, a.slice.len(), assert_eq!(
"Length needs to be correct for ThinArc to work"); a.header.length,
a.slice.len(),
"Length needs to be correct for ThinArc to work"
);
let fat_ptr: *mut ArcInner<HeaderSliceWithLength<H, [T]>> = a.ptr(); let fat_ptr: *mut ArcInner<HeaderSliceWithLength<H, [T]>> = a.ptr();
mem::forget(a); mem::forget(a);
let thin_ptr = fat_ptr as *mut [usize] as *mut usize; let thin_ptr = fat_ptr as *mut [usize] as *mut usize;
ThinArc { ThinArc {
ptr: thin_ptr as *mut ArcInner<HeaderSliceWithLength<H, [T; 1]>> ptr: thin_ptr as *mut ArcInner<HeaderSliceWithLength<H, [T; 1]>>,
} }
} }
@ -750,7 +700,8 @@ impl<H: 'static, T: 'static> Arc<HeaderSliceWithLength<H, [T]>> {
let ptr = thin_to_thick(a.ptr); let ptr = thin_to_thick(a.ptr);
mem::forget(a); mem::forget(a);
Arc { Arc {
p: NonZeroPtrMut::new(ptr) // safety: as above
p: unsafe { NonNull::new_unchecked(ptr) },
} }
} }
} }
@ -758,11 +709,7 @@ impl<H: 'static, T: 'static> Arc<HeaderSliceWithLength<H, [T]>> {
impl<H: PartialEq + 'static, T: PartialEq + 'static> PartialEq for ThinArc<H, T> { impl<H: PartialEq + 'static, T: PartialEq + 'static> PartialEq for ThinArc<H, T> {
#[inline] #[inline]
fn eq(&self, other: &ThinArc<H, T>) -> bool { fn eq(&self, other: &ThinArc<H, T>) -> bool {
ThinArc::with_arc(self, |a| { ThinArc::with_arc(self, |a| ThinArc::with_arc(other, |b| *a == *b))
ThinArc::with_arc(other, |b| {
*a == *b
})
})
} }
} }
@ -786,7 +733,7 @@ impl<H: Eq + 'static, T: Eq + 'static> Eq for ThinArc<H, T> {}
#[derive(Eq)] #[derive(Eq)]
#[repr(C)] #[repr(C)]
pub struct RawOffsetArc<T: 'static> { pub struct RawOffsetArc<T: 'static> {
ptr: NonZeroPtrMut<T>, ptr: NonNull<T>,
} }
unsafe impl<T: 'static + Sync + Send> Send for RawOffsetArc<T> {} unsafe impl<T: 'static + Sync + Send> Send for RawOffsetArc<T> {}
@ -794,8 +741,9 @@ unsafe impl<T: 'static + Sync + Send> Sync for RawOffsetArc<T> {}
impl<T: 'static> Deref for RawOffsetArc<T> { impl<T: 'static> Deref for RawOffsetArc<T> {
type Target = T; type Target = T;
fn deref(&self) -> &Self::Target { fn deref(&self) -> &Self::Target {
unsafe { &*self.ptr.ptr() } unsafe { &*self.ptr.as_ptr() }
} }
} }
@ -812,7 +760,6 @@ impl<T: 'static> Drop for RawOffsetArc<T> {
} }
} }
impl<T: fmt::Debug + 'static> fmt::Debug for RawOffsetArc<T> { impl<T: fmt::Debug + 'static> fmt::Debug for RawOffsetArc<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Debug::fmt(&**self, f) fmt::Debug::fmt(&**self, f)
@ -834,10 +781,11 @@ impl<T: 'static> RawOffsetArc<T> {
/// provided callback. The refcount is not modified. /// provided callback. The refcount is not modified.
#[inline] #[inline]
pub fn with_arc<F, U>(&self, f: F) -> U pub fn with_arc<F, U>(&self, f: F) -> U
where F: FnOnce(&Arc<T>) -> U where
F: FnOnce(&Arc<T>) -> U,
{ {
// Synthesize transient Arc, which never touches the refcount of the ArcInner. // Synthesize transient Arc, which never touches the refcount of the ArcInner.
let transient = unsafe { NoDrop::new(Arc::from_raw(self.ptr.ptr())) }; let transient = unsafe { NoDrop::new(Arc::from_raw(self.ptr.as_ptr())) };
// Expose the transient Arc to the callback, which may clone it if it wants. // Expose the transient Arc to the callback, which may clone it if it wants.
let result = f(&transient); let result = f(&transient);
@ -854,7 +802,10 @@ impl<T: 'static> RawOffsetArc<T> {
/// If uniquely owned, provide a mutable reference /// If uniquely owned, provide a mutable reference
/// Else create a copy, and mutate that /// Else create a copy, and mutate that
#[inline] #[inline]
pub fn make_mut(&mut self) -> &mut T where T: Clone { pub fn make_mut(&mut self) -> &mut T
where
T: Clone,
{
unsafe { unsafe {
// extract the RawOffsetArc as an owned variable // extract the RawOffsetArc as an owned variable
let this = ptr::read(self); let this = ptr::read(self);
@ -890,7 +841,8 @@ impl<T: 'static> Arc<T> {
#[inline] #[inline]
pub fn into_raw_offset(a: Self) -> RawOffsetArc<T> { pub fn into_raw_offset(a: Self) -> RawOffsetArc<T> {
RawOffsetArc { RawOffsetArc {
ptr: NonZeroPtrMut::new(Arc::into_raw(a) as *mut T), // safety: as above
ptr: unsafe { NonNull::new_unchecked(Arc::into_raw(a) as *mut T) },
} }
} }
@ -898,7 +850,7 @@ impl<T: 'static> Arc<T> {
/// is not modified. /// is not modified.
#[inline] #[inline]
pub fn from_raw_offset(a: RawOffsetArc<T>) -> Self { pub fn from_raw_offset(a: RawOffsetArc<T>) -> Self {
let ptr = a.ptr.ptr(); let ptr = a.ptr.as_ptr();
mem::forget(a); mem::forget(a);
unsafe { Arc::from_raw(ptr) } unsafe { Arc::from_raw(ptr) }
} }
@ -946,7 +898,11 @@ impl<'a, T> ArcBorrow<'a, T> {
} }
#[inline] #[inline]
pub fn with_arc<F, U>(&self, f: F) -> U where F: FnOnce(&Arc<T>) -> U, T: 'static { pub fn with_arc<F, U>(&self, f: F) -> U
where
F: FnOnce(&Arc<T>) -> U,
T: 'static,
{
// Synthesize transient Arc, which never touches the refcount. // Synthesize transient Arc, which never touches the refcount.
let transient = unsafe { NoDrop::new(Arc::from_raw(self.0)) }; let transient = unsafe { NoDrop::new(Arc::from_raw(self.0)) };
@ -974,18 +930,24 @@ impl<'a, T> Deref for ArcBorrow<'a, T> {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use std::clone::Clone;
use std::ops::Drop;
use std::sync::atomic;
use std::sync::atomic::Ordering::{Acquire, SeqCst};
use super::{Arc, HeaderWithLength, ThinArc}; use super::{Arc, HeaderWithLength, ThinArc};
use std::{
clone::Clone,
ops::Drop,
sync::{
atomic,
atomic::Ordering::{Acquire, SeqCst},
},
};
#[derive(PartialEq)] #[derive(PartialEq)]
struct Canary(*mut atomic::AtomicUsize); struct Canary(*mut atomic::AtomicUsize);
impl Drop for Canary { impl Drop for Canary {
fn drop(&mut self) { fn drop(&mut self) {
unsafe { (*self.0).fetch_add(1, SeqCst); } unsafe {
(*self.0).fetch_add(1, SeqCst);
}
} }
} }