diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml new file mode 100644 index 0000000..9d67e81 --- /dev/null +++ b/.github/workflows/main.yml @@ -0,0 +1,130 @@ +name: CI +on: + push: + branches: + - master + pull_request: + # Daily + schedule: + - cron: "0 4 * * *" + + # Allows to run this workflow manually from the Actions tab + workflow_dispatch: + +env: + RUST_LOG: info + RUST_BACKTRACE: 1 + +jobs: + test: + name: Test ${{ matrix.rust }} on ${{ matrix.os }} + runs-on: ${{ matrix.os }} + + strategy: + fail-fast: false + matrix: + os: [ubuntu-latest, windows-latest, macos-latest] + rust: [stable, nightly] + + steps: + - uses: actions/checkout@v2 + - uses: hecrj/setup-rust-action@v1 + with: + rust-version: ${{ matrix.rust }} + - run: cargo test --verbose --all-features + - run: cargo test --release --verbose --all-features + + check: + name: Check + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - uses: actions-rs/toolchain@v1 + with: + profile: minimal + toolchain: nightly + override: true + + - uses: actions-rs/cargo@v1 + with: + command: check + + clippy: + name: Clippy + runs-on: ubuntu-latest + + env: + RUSTFLAGS: -Dwarnings + + steps: + - uses: actions/checkout@v2 + - uses: hecrj/setup-rust-action@v1 + with: + components: clippy + - run: cargo clippy --all-targets --all-features --verbose -- -D warnings + + rustfmt: + name: Rustfmt + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v2 + - uses: hecrj/setup-rust-action@v1 + with: + rust-version: nightly + components: rustfmt + - run: cargo fmt -p cstree -- --check + + rustdoc: + name: Check doc links + runs-on: ubuntu-latest + env: + RUSTDOCFLAGS: -Dwarnings + + steps: + - uses: actions/checkout@v2 + - uses: hecrj/setup-rust-action@v1 + - run: cargo doc --all-features --document-private-items --no-deps + + miri-test: + name: Miri ${{ matrix.os }} + runs-on: ${{ matrix.os }} + + strategy: + fail-fast: false + matrix: + os: [ubuntu-latest, windows-latest, macos-latest] + + steps: + - uses: actions/checkout@v2 + - uses: hecrj/setup-rust-action@v1 + with: + rust-version: nightly + components: miri + - run: cargo miri test --verbose --all-features -- -Zmiri-disable-isolation + + sanitizers: + name: ${{ matrix.sanitizer }} sanitizer + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + sanitizer: [address, memory, thread, leak] + steps: + - uses: actions/checkout@v2 + - uses: hecrj/setup-rust-action@v1 + with: + rust-version: nightly + components: rust-src + - name: Test with sanitizer + env: + RUSTFLAGS: -Zsanitizer=${{ matrix.sanitizer }} + RUSTDOCFLAGS: -Zsanitizer=${{ matrix.sanitizer }} + # only needed by asan + ASAN_OPTIONS: detect_stack_use_after_return=1 + # Asan's leak detection occasionally complains + # about some small leaks if backtraces are captured, + # so ensure they're not + RUST_BACKTRACE: 0 + run: | + cargo test -Zbuild-std --verbose --target=x86_64-unknown-linux-gnu --all-features diff --git a/examples/math.rs b/examples/math.rs index 46e7b15..47414a2 100644 --- a/examples/math.rs +++ b/examples/math.rs @@ -121,7 +121,7 @@ impl<'input, I: Iterator> Parser<'input, I> { } fn print(indent: usize, element: SyntaxElementRef<'_>, resolver: &impl Resolver) { - let kind: SyntaxKind = element.kind().into(); + let kind = element.kind(); print!("{:indent$}", "", indent = indent); match element { NodeOrToken::Node(node) => { @@ -140,19 +140,19 @@ fn main() { builder: GreenNodeBuilder::new(), iter: vec![ // 1 + 2 * 3 + 4 - (NUMBER, "1".into()), - (WHITESPACE, " ".into()), - (ADD, "+".into()), - (WHITESPACE, " ".into()), - (NUMBER, "2".into()), - (WHITESPACE, " ".into()), - (MUL, "*".into()), - (WHITESPACE, " ".into()), - (NUMBER, "3".into()), - (WHITESPACE, " ".into()), - (ADD, "+".into()), - (WHITESPACE, " ".into()), - (NUMBER, "4".into()), + (NUMBER, "1"), + (WHITESPACE, " "), + (ADD, "+"), + (WHITESPACE, " "), + (NUMBER, "2"), + (WHITESPACE, " "), + (MUL, "*"), + (WHITESPACE, " "), + (NUMBER, "3"), + (WHITESPACE, " "), + (ADD, "+"), + (WHITESPACE, " "), + (NUMBER, "4"), ] .into_iter() .peekable(), diff --git a/examples/s_expressions.rs b/examples/s_expressions.rs index 57fdc3d..290d00a 100644 --- a/examples/s_expressions.rs +++ b/examples/s_expressions.rs @@ -364,7 +364,7 @@ impl List { } fn eval(&self, resolver: &impl Resolver) -> Option { - let op = match self.sexps().nth(0)?.kind() { + let op = match self.sexps().next()?.kind() { SexpKind::Atom(atom) => atom.as_op(resolver)?, _ => return None, }; diff --git a/src/green/builder.rs b/src/green/builder.rs index 15875ab..ba5f3b0 100644 --- a/src/green/builder.rs +++ b/src/green/builder.rs @@ -39,6 +39,12 @@ impl NodeCache<'static, Rodeo> { } } +impl Default for NodeCache<'static> { + fn default() -> Self { + Self::new() + } +} + impl<'i, I> NodeCache<'i, I> where I: Interner, @@ -132,10 +138,7 @@ where let text_len = TextSize::try_from(text.len()).unwrap(); let text = self.interner.get_or_intern(text); let data = GreenTokenData { kind, text, text_len }; - self.tokens - .entry(data.clone()) - .or_insert_with(|| GreenToken::new(data)) - .clone() + self.tokens.entry(data).or_insert_with(|| GreenToken::new(data)).clone() } } @@ -146,7 +149,7 @@ enum MaybeOwned<'a, T> { } impl MaybeOwned<'_, T> { - fn as_owned(self) -> Option { + fn into_owned(self) -> Option { match self { MaybeOwned::Owned(owned) => Some(owned), MaybeOwned::Borrowed(_) => None, @@ -203,6 +206,12 @@ impl GreenNodeBuilder<'static, 'static, Rodeo> { } } +impl Default for GreenNodeBuilder<'static, 'static> { + fn default() -> Self { + Self::new() + } +} + impl<'cache, 'interner, I> GreenNodeBuilder<'cache, 'interner, I> where I: Interner, @@ -297,7 +306,7 @@ where #[inline] pub fn finish(mut self) -> (GreenNode, Option) { assert_eq!(self.children.len(), 1); - let resolver = self.cache.as_owned().and_then(|cache| cache.interner.as_owned()); + let resolver = self.cache.into_owned().and_then(|cache| cache.interner.into_owned()); match self.children.pop().unwrap() { NodeOrToken::Node(node) => (node, resolver), NodeOrToken::Token(_) => panic!("called `finish` on a `GreenNodeBuilder` which only contained a token"), diff --git a/src/green/node.rs b/src/green/node.rs index d090cd5..5f6f540 100644 --- a/src/green/node.rs +++ b/src/green/node.rs @@ -42,7 +42,7 @@ impl GreenNodeHead { /// Internal node in the immutable tree. /// It has other nodes and tokens as children. -#[derive(Clone, PartialEq, Eq)] +#[derive(Clone)] pub struct GreenNode { pub(super) data: ThinArc, } @@ -136,6 +136,14 @@ impl Hash for GreenNode { } } +impl PartialEq for GreenNode { + fn eq(&self, other: &Self) -> bool { + self.data == other.data + } +} + +impl Eq for GreenNode {} + #[derive(Debug, Clone)] pub struct Children<'a> { inner: slice::Iter<'a, PackedGreenElement>, @@ -184,12 +192,12 @@ impl<'a> Iterator for Children<'a> { } #[inline] - fn fold(mut self, init: Acc, mut f: Fold) -> Acc + fn fold(self, init: Acc, mut f: Fold) -> Acc where Fold: FnMut(Acc, Self::Item) -> Acc, { let mut accum = init; - while let Some(x) = self.next() { + for x in self { accum = f(accum, x); } accum diff --git a/src/syntax.rs b/src/syntax.rs index 56b4f92..1739a4a 100644 --- a/src/syntax.rs +++ b/src/syntax.rs @@ -50,9 +50,9 @@ impl SyntaxNode { for _ in 0..level { write!(res, " ").unwrap(); } - write!( + writeln!( res, - "{}\n", + "{}", match element { NodeOrToken::Node(node) => node.debug(resolver, false), NodeOrToken::Token(token) => token.debug(resolver), @@ -109,12 +109,12 @@ impl Drop for SyntaxNode { // `ref_count` let root = self.root(); let mut root = root.clone_uncounted(); - let ref_count = unsafe { Box::from_raw(root.data().ref_count) }; + let ref_count = root.data().ref_count; root.drop_recursive(); let root_data = root.data; drop(root); unsafe { drop(Box::from_raw(root_data)) }; - drop(ref_count); + unsafe { drop(Box::from_raw(ref_count)) }; } } } @@ -129,6 +129,7 @@ impl SyntaxNode { /// Caller must ensure that the access to the underlying data is unique (no active _mutable or immutable_ /// references). #[inline] + #[allow(clippy::mut_from_ref)] unsafe fn data_mut(&self) -> &mut NodeData { &mut *self.data } @@ -604,6 +605,7 @@ impl SyntaxNode { } #[inline] + #[allow(clippy::map_clone)] pub fn first_child(&self) -> Option<&SyntaxNode> { let (node, (index, offset)) = filter_nodes(self.green().children_from(0, self.text_range().start())).next()?; self.get_or_add_node(node, index, offset).as_node().map(|node| *node) @@ -616,6 +618,7 @@ impl SyntaxNode { } #[inline] + #[allow(clippy::map_clone)] pub fn last_child(&self) -> Option<&SyntaxNode> { let (node, (index, offset)) = filter_nodes( self.green() @@ -637,7 +640,7 @@ impl SyntaxNode { #[inline] pub fn next_child_after(&self, n: usize, offset: TextSize) -> Option<&SyntaxNode> { let (node, (index, offset)) = filter_nodes(self.green().children_from(n + 1, offset)).next()?; - self.get_or_add_node(node, index, offset).as_node().map(|node| *node) + self.get_or_add_node(node, index, offset).as_node().copied() } #[inline] @@ -649,7 +652,7 @@ impl SyntaxNode { #[inline] pub fn prev_child_before(&self, n: usize, offset: TextSize) -> Option<&SyntaxNode> { let (node, (index, offset)) = filter_nodes(self.green().children_to(n, offset)).next()?; - self.get_or_add_node(node, index, offset).as_node().map(|node| *node) + self.get_or_add_node(node, index, offset).as_node().copied() } #[inline] @@ -668,7 +671,7 @@ impl SyntaxNode { .children_from((index + 1) as usize, self.text_range().end()), ) .next()?; - parent.get_or_add_node(node, index, offset).as_node().map(|node| *node) + parent.get_or_add_node(node, index, offset).as_node().copied() } #[inline] @@ -688,7 +691,7 @@ impl SyntaxNode { let (node, (index, offset)) = filter_nodes(parent.green().children_to(index as usize, self.text_range().start())).next()?; - parent.get_or_add_node(node, index, offset).as_node().map(|node| *node) + parent.get_or_add_node(node, index, offset).as_node().copied() } #[inline] @@ -869,7 +872,7 @@ where R: Resolver, { #[inline] - pub fn text<'n>(&'n self) -> SyntaxText<'n, 'n, R, L, D, R> { + pub fn text(&self) -> SyntaxText<'_, '_, R, L, D, R> { SyntaxText::new(self, self.resolver().as_ref()) } } diff --git a/src/syntax_text.rs b/src/syntax_text.rs index b3cc668..d1503ed 100644 --- a/src/syntax_text.rs +++ b/src/syntax_text.rs @@ -42,7 +42,6 @@ impl<'n, 'i, I: Resolver + ?Sized, L: Language, D, R> SyntaxText<'n, 'i, I, L, D } pub fn char_at(&self, offset: TextSize) -> Option { - let offset = offset.into(); let mut start: TextSize = 0.into(); let res = self.try_for_each_chunk(|chunk| { let end = start + TextSize::of(chunk); @@ -58,7 +57,7 @@ impl<'n, 'i, I: Resolver + ?Sized, L: Language, D, R> SyntaxText<'n, 'i, I, L, D pub fn slice(&self, range: Ra) -> Self { let start = range.start().unwrap_or_default(); - let end = range.end().unwrap_or(self.len()); + let end = range.end().unwrap_or_else(|| self.len()); assert!(start <= end); let len = end - start; let start = self.range.start() + start; @@ -98,7 +97,10 @@ impl<'n, 'i, I: Resolver + ?Sized, L: Language, D, R> SyntaxText<'n, 'i, I, L, D pub fn for_each_chunk(&self, mut f: F) { enum Void {} - match self.try_for_each_chunk(|chunk| Ok::<(), Void>(f(chunk))) { + match self.try_for_each_chunk(|chunk| { + f(chunk); + Ok::<(), Void>(()) + }) { Ok(()) => (), Err(void) => match void {}, } @@ -319,7 +321,7 @@ mod tests { let mut builder = GreenNodeBuilder::new(); builder.start_node(SyntaxKind(62)); for &chunk in chunks.iter() { - builder.token(SyntaxKind(92), chunk.into()) + builder.token(SyntaxKind(92), chunk); } builder.finish_node(); let (node, interner) = builder.finish(); @@ -336,7 +338,7 @@ mod tests { let expected = t1.to_string() == t2.to_string(); let actual = t1 == t2; assert_eq!(expected, actual, "`{}` (SyntaxText) `{}` (SyntaxText)", t1, t2); - let actual = t1 == &*t2.to_string(); + let actual = t1 == t2.to_string().as_str(); assert_eq!(expected, actual, "`{}` (SyntaxText) `{}` (&str)", t1, t2); } fn check(t1: &[&str], t2: &[&str]) { diff --git a/tests/basic.rs b/tests/basic.rs index b404b05..f9c2fdd 100644 --- a/tests/basic.rs +++ b/tests/basic.rs @@ -27,7 +27,7 @@ fn create() { assert_eq!(tree.syntax_kind(), SyntaxKind(0)); assert_eq!(tree.kind(), SyntaxKind(0)); { - let leaf1_0 = tree.children().nth(1).unwrap().children_with_tokens().nth(0).unwrap(); + let leaf1_0 = tree.children().nth(1).unwrap().children_with_tokens().next().unwrap(); let leaf1_0 = leaf1_0.into_token().unwrap(); assert_eq!(leaf1_0.syntax_kind(), SyntaxKind(5)); assert_eq!(leaf1_0.kind(), SyntaxKind(5)); @@ -86,7 +86,7 @@ fn with_interner() { let tree: SyntaxNode = SyntaxNode::new_root(tree); let resolver = interner; { - let leaf1_0 = tree.children().nth(1).unwrap().children_with_tokens().nth(0).unwrap(); + let leaf1_0 = tree.children().nth(1).unwrap().children_with_tokens().next().unwrap(); let leaf1_0 = leaf1_0.into_token().unwrap(); assert_eq!(leaf1_0.resolve_text(&resolver), "1.0"); assert_eq!(leaf1_0.text_range(), TextRange::at(6.into(), 3.into())); @@ -105,7 +105,7 @@ fn inline_resolver() { let tree = build_tree_with_cache(&tree, &mut cache); let tree: SyntaxNode<(), Rodeo> = SyntaxNode::new_root_with_resolver(tree, interner); { - let leaf1_0 = tree.children().nth(1).unwrap().children_with_tokens().nth(0).unwrap(); + let leaf1_0 = tree.children().nth(1).unwrap().children_with_tokens().next().unwrap(); let leaf1_0 = leaf1_0.into_token().unwrap(); assert_eq!(leaf1_0.text(), "1.0"); assert_eq!(leaf1_0.text_range(), TextRange::at(6.into(), 3.into())); diff --git a/vendor/servo_arc/lib.rs b/vendor/servo_arc/lib.rs index 48942d3..743f9ee 100644 --- a/vendor/servo_arc/lib.rs +++ b/vendor/servo_arc/lib.rs @@ -22,30 +22,39 @@ // The semantics of Arc are alread documented in the Rust docs, so we don't // duplicate those here. #![allow(missing_docs)] +#![allow(clippy::all)] extern crate nodrop; -#[cfg(feature = "servo")] extern crate serde; +#[cfg(feature = "servo")] +extern crate serde; extern crate stable_deref_trait; use nodrop::NoDrop; #[cfg(feature = "servo")] use serde::{Deserialize, Serialize}; use stable_deref_trait::{CloneStableDeref, StableDeref}; -use std::{alloc::Layout, isize, mem::align_of_val, usize}; -use std::borrow; -use std::cmp::Ordering; -use std::convert::From; -use std::fmt; -use std::hash::{Hash, Hasher}; -use std::iter::{ExactSizeIterator, Iterator}; -use std::mem; -use std::ops::{Deref, DerefMut}; -use std::os::raw::c_void; -use std::process; -use std::ptr; -use std::slice; -use std::sync::atomic; -use std::sync::atomic::Ordering::{Acquire, Relaxed, Release}; +use std::{ + alloc::Layout, + borrow, + cmp::Ordering, + convert::From, + fmt, + hash::{Hash, Hasher}, + isize, + iter::{ExactSizeIterator, Iterator}, + mem, + mem::align_of_val, + ops::{Deref, DerefMut}, + os::raw::c_void, + process, + ptr::{self, NonNull}, + slice, + sync::{ + atomic, + atomic::Ordering::{Acquire, Relaxed, Release}, + }, + usize, +}; /// Get the offset within an `ArcInner` for /// a payload of type described by a pointer. @@ -55,8 +64,8 @@ use std::sync::atomic::Ordering::{Acquire, Relaxed, Release}; /// This has the same safety requirements as `align_of_val_raw`. In effect: /// /// - This function is safe for any argument if `T` is sized, and -/// - if `T` is unsized, the pointer must have appropriate pointer metadata -/// acquired from the real instance that you are getting this offset for. +/// - if `T` is unsized, the pointer must have appropriate pointer metadata acquired from the real instance that you are +/// getting this offset for. unsafe fn data_offset(ptr: *const T) -> isize { // Align the unsized value to the end of the `ArcInner`. // Because it is `?Sized`, it will always be the last field in memory. @@ -84,65 +93,9 @@ fn padding_needed_for(layout: &Layout, align: usize) -> usize { /// necessarily) at _exactly_ `MAX_REFCOUNT + 1` references. const MAX_REFCOUNT: usize = (isize::MAX) as usize; -/// Wrapper type for pointers to get the non-zero optimization. When -/// NonZero/Shared/Unique are stabilized, we should just use Shared -/// here to get the same effect. Gankro is working on this in [1]. -/// -/// It's unfortunate that this needs to infect all the caller types -/// with 'static. It would be nice to just use a &() and a PhantomData -/// instead, but then the compiler can't determine whether the &() should -/// be thin or fat (which depends on whether or not T is sized). Given -/// that this is all a temporary hack, this restriction is fine for now. -/// -/// [1]: https://github.com/rust-lang/rust/issues/27730 -// FIXME: remove this and use std::ptr::NonNull when Firefox requires Rust 1.25+ -pub struct NonZeroPtrMut(&'static mut T); -impl NonZeroPtrMut { - pub fn new(ptr: *mut T) -> Self { - assert!(!(ptr as *mut u8).is_null()); - NonZeroPtrMut(unsafe { mem::transmute(ptr) }) - } - - pub fn ptr(&self) -> *mut T { - self.0 as *const T as *mut T - } -} - -impl Clone for NonZeroPtrMut { - fn clone(&self) -> Self { - NonZeroPtrMut::new(self.ptr()) - } -} - -impl fmt::Pointer for NonZeroPtrMut { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Pointer::fmt(&self.ptr(), f) - } -} - -impl fmt::Debug for NonZeroPtrMut { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - ::fmt(self, f) - } -} - -impl PartialEq for NonZeroPtrMut { - fn eq(&self, other: &Self) -> bool { - self.ptr() == other.ptr() - } -} - -impl Eq for NonZeroPtrMut {} - -impl Hash for NonZeroPtrMut { - fn hash(&self, state: &mut H) { - self.ptr().hash(state) - } -} - #[repr(C)] pub struct Arc { - p: NonZeroPtrMut>, + p: NonNull>, } /// An Arc that is known to be uniquely owned @@ -167,6 +120,7 @@ impl UniqueArc { impl Deref for UniqueArc { type Target = T; + fn deref(&self) -> &T { &*self.0 } @@ -185,7 +139,7 @@ unsafe impl Sync for Arc {} #[repr(C)] struct ArcInner { count: atomic::AtomicUsize, - data: T, + data: T, } unsafe impl Send for ArcInner {} @@ -196,9 +150,12 @@ impl Arc { pub fn new(data: T) -> Self { let x = Box::new(ArcInner { count: atomic::AtomicUsize::new(1), - data: data, + data, }); - Arc { p: NonZeroPtrMut::new(Box::into_raw(x)) } + Arc { + // safety: we created `x` + p: unsafe { NonNull::new_unchecked(Box::into_raw(x)) }, + } } #[inline] @@ -209,13 +166,13 @@ impl Arc { } #[inline] - pub unsafe fn from_raw(ptr: *const T) -> Self { + pub unsafe fn from_raw(ptr: *const T) -> Self { // To find the corresponding pointer to the `ArcInner` we need // to subtract the offset of the `data` field from the pointer. let offset = data_offset(ptr); let ptr = (ptr as *const u8).offset(-offset); Arc { - p: NonZeroPtrMut::new(ptr as *mut ArcInner), + p: NonNull::new_unchecked(ptr as *mut ArcInner), } } @@ -230,7 +187,8 @@ impl Arc { /// provided callback. The refcount is not modified. #[inline(always)] pub fn with_raw_offset_arc(&self, f: F) -> U - where F: FnOnce(&RawOffsetArc) -> U + where + F: FnOnce(&RawOffsetArc) -> U, { // Synthesize transient Arc, which never touches the refcount of the ArcInner. let transient = unsafe { NoDrop::new(Arc::into_raw_offset(ptr::read(self))) }; @@ -248,7 +206,7 @@ impl Arc { /// Returns the address on the heap of the Arc itself -- not the T within it -- for memory /// reporting. pub fn heap_ptr(&self) -> *const c_void { - self.p.ptr() as *const ArcInner as *const c_void + self.p.as_ptr() as *const ArcInner as *const c_void } } @@ -269,14 +227,13 @@ impl Arc { let _ = Box::from_raw(self.ptr()); } - #[inline] pub fn ptr_eq(this: &Self, other: &Self) -> bool { this.ptr() == other.ptr() } fn ptr(&self) -> *mut ArcInner { - self.p.ptr() + self.p.as_ptr() } } @@ -309,7 +266,12 @@ impl Clone for Arc { process::abort(); } - Arc { p: NonZeroPtrMut::new(self.ptr()) } + Arc { + // safety: as described above, as long as the original reference is alive, the + // allocation is valid. Since the allocation existed previously, the pointer to it is + // not null. + p: unsafe { NonNull::new_unchecked(self.ptr()) }, + } } } @@ -497,8 +459,7 @@ unsafe impl StableDeref for Arc {} unsafe impl CloneStableDeref for Arc {} #[cfg(feature = "servo")] -impl<'de, T: Deserialize<'de>> Deserialize<'de> for Arc -{ +impl<'de, T: Deserialize<'de>> Deserialize<'de> for Arc { fn deserialize(deserializer: D) -> Result, D::Error> where D: ::serde::de::Deserializer<'de>, @@ -508,8 +469,7 @@ impl<'de, T: Deserialize<'de>> Deserialize<'de> for Arc } #[cfg(feature = "servo")] -impl Serialize for Arc -{ +impl Serialize for Arc { fn serialize(&self, serializer: S) -> Result where S: ::serde::ser::Serializer, @@ -539,35 +499,22 @@ impl Arc> { /// iterator to generate the slice. The resulting Arc will be fat. #[inline] pub fn from_header_and_iter(header: H, mut items: I) -> Self - where I: Iterator + ExactSizeIterator + where + I: Iterator + ExactSizeIterator, { - use ::std::mem::size_of; + use std::mem::size_of; assert_ne!(size_of::(), 0, "Need to think about ZST"); // Compute the required size for the allocation. let num_items = items.len(); let size = { - // First, determine the alignment of a hypothetical pointer to a - // HeaderSlice. - let fake_slice_ptr_align: usize = mem::align_of::>>(); - - // Next, synthesize a totally garbage (but properly aligned) pointer - // to a sequence of T. - let fake_slice_ptr = fake_slice_ptr_align as *const T; - - // Convert that sequence to a fat pointer. The address component of - // the fat pointer will be garbage, but the length will be correct. - let fake_slice = unsafe { slice::from_raw_parts(fake_slice_ptr, num_items) }; - - // Pretend the garbage address points to our allocation target (with - // a trailing sequence of T), rather than just a sequence of T. - let fake_ptr = fake_slice as *const [T] as *const ArcInner>; - let fake_ref: &ArcInner> = unsafe { &*fake_ptr }; - - // Use size_of_val, which will combine static information about the - // type with the length from the fat pointer. The garbage address - // will not be used. - mem::size_of_val(fake_ref) + let inner_layout = Layout::new::>>(); + let slice_layout = + Layout::array::(num_items).expect("arithmetic overflow when trying to create array layout"); + let slice_align = mem::align_of_val::<[T]>(&[]); + assert_eq!(slice_layout.align(), slice_align); + let padding = padding_needed_for(&inner_layout, slice_align); + inner_layout.size() + padding + slice_layout.size() }; let ptr: *mut ArcInner>; @@ -618,7 +565,10 @@ impl Arc> { // Return the fat Arc. assert_eq!(size_of::(), size_of::() * 2, "The Arc will be fat"); - Arc { p: NonZeroPtrMut::new(ptr) } + Arc { + // safety: we have just created the underlying allocation + p: unsafe { NonNull::new_unchecked(ptr) }, + } } #[inline] @@ -644,10 +594,7 @@ pub struct HeaderWithLength { impl HeaderWithLength { /// Creates a new HeaderWithLength. pub fn new(header: H, length: usize) -> Self { - HeaderWithLength { - header: header, - length: length, - } + HeaderWithLength { header, length } } } @@ -662,13 +609,11 @@ unsafe impl Sync for ThinArc {} // Synthesize a fat pointer from a thin pointer. // // See the comment around the analogous operation in from_header_and_iter. -fn thin_to_thick(thin: *mut ArcInner>) - -> *mut ArcInner> -{ +fn thin_to_thick( + thin: *mut ArcInner>, +) -> *mut ArcInner> { let len = unsafe { (*thin).data.header.length }; - let fake_slice: *mut [T] = unsafe { - slice::from_raw_parts_mut(thin as *mut T, len) - }; + let fake_slice: *mut [T] = unsafe { slice::from_raw_parts_mut(thin as *mut T, len) }; fake_slice as *mut ArcInner> } @@ -678,11 +623,13 @@ impl ThinArc { /// provided callback. The refcount is not modified. #[inline] pub fn with_arc(&self, f: F) -> U - where F: FnOnce(&Arc>) -> U + where + F: FnOnce(&Arc>) -> U, { // Synthesize transient Arc, which never touches the refcount of the ArcInner. let transient = NoDrop::new(Arc { - p: NonZeroPtrMut::new(thin_to_thick(self.ptr)) + // safety: the original thin arc guarantees the object was and is still alive + p: unsafe { NonNull::new_unchecked(thin_to_thick(self.ptr)) }, }); // Expose the transient Arc to the callback, which may clone it if it wants. @@ -733,13 +680,16 @@ impl Arc> { /// is not modified. #[inline] pub fn into_thin(a: Self) -> ThinArc { - assert_eq!(a.header.length, a.slice.len(), - "Length needs to be correct for ThinArc to work"); + assert_eq!( + a.header.length, + a.slice.len(), + "Length needs to be correct for ThinArc to work" + ); let fat_ptr: *mut ArcInner> = a.ptr(); mem::forget(a); let thin_ptr = fat_ptr as *mut [usize] as *mut usize; ThinArc { - ptr: thin_ptr as *mut ArcInner> + ptr: thin_ptr as *mut ArcInner>, } } @@ -750,7 +700,8 @@ impl Arc> { let ptr = thin_to_thick(a.ptr); mem::forget(a); Arc { - p: NonZeroPtrMut::new(ptr) + // safety: as above + p: unsafe { NonNull::new_unchecked(ptr) }, } } } @@ -758,11 +709,7 @@ impl Arc> { impl PartialEq for ThinArc { #[inline] fn eq(&self, other: &ThinArc) -> bool { - ThinArc::with_arc(self, |a| { - ThinArc::with_arc(other, |b| { - *a == *b - }) - }) + ThinArc::with_arc(self, |a| ThinArc::with_arc(other, |b| *a == *b)) } } @@ -786,7 +733,7 @@ impl Eq for ThinArc {} #[derive(Eq)] #[repr(C)] pub struct RawOffsetArc { - ptr: NonZeroPtrMut, + ptr: NonNull, } unsafe impl Send for RawOffsetArc {} @@ -794,8 +741,9 @@ unsafe impl Sync for RawOffsetArc {} impl Deref for RawOffsetArc { type Target = T; + fn deref(&self) -> &Self::Target { - unsafe { &*self.ptr.ptr() } + unsafe { &*self.ptr.as_ptr() } } } @@ -812,7 +760,6 @@ impl Drop for RawOffsetArc { } } - impl fmt::Debug for RawOffsetArc { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fmt::Debug::fmt(&**self, f) @@ -834,10 +781,11 @@ impl RawOffsetArc { /// provided callback. The refcount is not modified. #[inline] pub fn with_arc(&self, f: F) -> U - where F: FnOnce(&Arc) -> U + where + F: FnOnce(&Arc) -> U, { // Synthesize transient Arc, which never touches the refcount of the ArcInner. - let transient = unsafe { NoDrop::new(Arc::from_raw(self.ptr.ptr())) }; + let transient = unsafe { NoDrop::new(Arc::from_raw(self.ptr.as_ptr())) }; // Expose the transient Arc to the callback, which may clone it if it wants. let result = f(&transient); @@ -854,7 +802,10 @@ impl RawOffsetArc { /// If uniquely owned, provide a mutable reference /// Else create a copy, and mutate that #[inline] - pub fn make_mut(&mut self) -> &mut T where T: Clone { + pub fn make_mut(&mut self) -> &mut T + where + T: Clone, + { unsafe { // extract the RawOffsetArc as an owned variable let this = ptr::read(self); @@ -890,7 +841,8 @@ impl Arc { #[inline] pub fn into_raw_offset(a: Self) -> RawOffsetArc { RawOffsetArc { - ptr: NonZeroPtrMut::new(Arc::into_raw(a) as *mut T), + // safety: as above + ptr: unsafe { NonNull::new_unchecked(Arc::into_raw(a) as *mut T) }, } } @@ -898,7 +850,7 @@ impl Arc { /// is not modified. #[inline] pub fn from_raw_offset(a: RawOffsetArc) -> Self { - let ptr = a.ptr.ptr(); + let ptr = a.ptr.as_ptr(); mem::forget(a); unsafe { Arc::from_raw(ptr) } } @@ -946,7 +898,11 @@ impl<'a, T> ArcBorrow<'a, T> { } #[inline] - pub fn with_arc(&self, f: F) -> U where F: FnOnce(&Arc) -> U, T: 'static { + pub fn with_arc(&self, f: F) -> U + where + F: FnOnce(&Arc) -> U, + T: 'static, + { // Synthesize transient Arc, which never touches the refcount. let transient = unsafe { NoDrop::new(Arc::from_raw(self.0)) }; @@ -974,18 +930,24 @@ impl<'a, T> Deref for ArcBorrow<'a, T> { #[cfg(test)] mod tests { - use std::clone::Clone; - use std::ops::Drop; - use std::sync::atomic; - use std::sync::atomic::Ordering::{Acquire, SeqCst}; use super::{Arc, HeaderWithLength, ThinArc}; + use std::{ + clone::Clone, + ops::Drop, + sync::{ + atomic, + atomic::Ordering::{Acquire, SeqCst}, + }, + }; #[derive(PartialEq)] struct Canary(*mut atomic::AtomicUsize); impl Drop for Canary { fn drop(&mut self) { - unsafe { (*self.0).fetch_add(1, SeqCst); } + unsafe { + (*self.0).fetch_add(1, SeqCst); + } } }