diff --git a/src/boxed.rs b/src/boxed.rs new file mode 100644 index 00000000..2eec7c38 --- /dev/null +++ b/src/boxed.rs @@ -0,0 +1,208 @@ +//! A pointer type for heap allocation. + +use crate::alloc; +use std::{borrow, fmt, hash, mem, ptr}; + +/// A loom version of `std::boxed::Box` based on the leak tracking in `loom::alloc`. +/// +/// # Known problems +/// +/// This `Box` version can't be dereferenced in order to take the value +/// from the heap and bring it back to the stack. This is because `std::boxed::Box` +/// is tightly integrated with the compiler and uses magic that normal crates can't. +/// This version instead provides [`Box::into_value`] which does the same thing. +pub struct Box { + ptr: *mut T, +} + +impl Box { + /// Allocates memory on the heap and then places `x` into it. + pub fn new(x: T) -> Self { + let layout = alloc::Layout::new::(); + let ptr = unsafe { alloc::alloc(layout) } as *mut T; + unsafe { ptr::write(ptr, x) }; + Self { ptr } + } + + /// Consumes the box and returns the value in it. + /// This is a a workaround. The standard library `Box` does not have this. Instead a + /// a standard box can be dereferenced like `*std_box` to get the `T`. This can't be + /// implemented outside of the standard library due to magic. so we need this workaround. + /// + /// In order to transparently switch between using loom and the standard library, consider + /// introducing a function like this in your code and use it instead of directly + /// dereferencing `Box`es: + /// ```rust + /// fn take(b: Box) -> T { + /// #[cfg(not(loom))] + /// { + /// *b + /// } + /// #[cfg(loom)] + /// { + /// b.into_value() + /// } + /// } + /// ``` + pub fn into_value(self) -> T { + let value = unsafe { ptr::read(self.ptr) }; + let layout = alloc::Layout::new::(); + unsafe { alloc::dealloc(self.ptr as *mut u8, layout) }; + mem::forget(self); + value + } +} + +impl Box { + /// Constructs a box from a raw pointer. + /// + /// After calling this function, the raw pointer is owned by the resulting Box. Specifically, + /// the Box destructor will call the destructor of T and free the allocated memory. + /// For this to be safe, the memory must have been allocated in accordance with the memory + /// layout used by Box. + /// + /// # Safety + /// + /// This function is unsafe because improper use may lead to memory problems. For example, + /// a double-free may occur if the function is called twice on the same raw pointer. + #[inline] + pub const unsafe fn from_raw(ptr: *mut T) -> Box { + Self { ptr } + } + + /// Consumes the Box, returning a wrapped raw pointer. + /// + /// The pointer will be properly aligned and non-null. + /// + /// After calling this function, the caller is responsible for the memory previously + /// managed by the Box. + #[inline] + pub fn into_raw(b: Box) -> *mut T { + let ptr = b.ptr; + mem::forget(b); + ptr + } +} + +impl Drop for Box { + fn drop(&mut self) { + unsafe { + let size = mem::size_of_val(&*self.ptr); + let align = mem::align_of_val(&*self.ptr); + let layout = alloc::Layout::from_size_align(size, align).unwrap(); + ptr::drop_in_place(self.ptr); + alloc::dealloc(self.ptr as *mut u8, layout); + } + } +} + +unsafe impl Send for Box {} +unsafe impl Sync for Box {} + +impl std::ops::Deref for Box { + type Target = T; + + fn deref(&self) -> &T { + unsafe { &*self.ptr } + } +} + +impl std::ops::DerefMut for Box { + fn deref_mut(&mut self) -> &mut T { + unsafe { &mut *self.ptr } + } +} + +impl borrow::Borrow for Box { + fn borrow(&self) -> &T { + &**self + } +} + +impl borrow::BorrowMut for Box { + fn borrow_mut(&mut self) -> &mut T { + &mut **self + } +} + +impl AsRef for Box { + fn as_ref(&self) -> &T { + &**self + } +} + +impl AsMut for Box { + fn as_mut(&mut self) -> &mut T { + &mut **self + } +} + +impl fmt::Display for Box { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt::Display::fmt(&**self, f) + } +} + +impl fmt::Debug for Box { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt::Debug::fmt(&**self, f) + } +} + +impl Clone for Box { + #[inline] + fn clone(&self) -> Box { + Self::new(self.as_ref().clone()) + } +} + +impl PartialEq for Box { + #[inline] + fn eq(&self, other: &Box) -> bool { + PartialEq::eq(&**self, &**other) + } + + #[allow(clippy::partialeq_ne_impl)] + #[inline] + fn ne(&self, other: &Box) -> bool { + PartialEq::ne(&**self, &**other) + } +} + +impl Eq for Box {} + +impl PartialOrd for Box { + #[inline] + fn partial_cmp(&self, other: &Box) -> Option { + PartialOrd::partial_cmp(&**self, &**other) + } + #[inline] + fn lt(&self, other: &Box) -> bool { + PartialOrd::lt(&**self, &**other) + } + #[inline] + fn le(&self, other: &Box) -> bool { + PartialOrd::le(&**self, &**other) + } + #[inline] + fn ge(&self, other: &Box) -> bool { + PartialOrd::ge(&**self, &**other) + } + #[inline] + fn gt(&self, other: &Box) -> bool { + PartialOrd::gt(&**self, &**other) + } +} + +impl Ord for Box { + #[inline] + fn cmp(&self, other: &Box) -> core::cmp::Ordering { + Ord::cmp(&**self, &**other) + } +} + +impl hash::Hash for Box { + fn hash(&self, state: &mut H) { + (**self).hash(state); + } +} diff --git a/src/lib.rs b/src/lib.rs index 6e9fbb0c..68515175 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -149,6 +149,7 @@ macro_rules! dbg { mod rt; pub mod alloc; +pub mod boxed; pub mod cell; pub mod lazy_static; pub mod model; diff --git a/tests/boxed.rs b/tests/boxed.rs new file mode 100644 index 00000000..ec8bf03f --- /dev/null +++ b/tests/boxed.rs @@ -0,0 +1,85 @@ +#![deny(warnings, rust_2018_idioms)] + +use loom::boxed::Box; +use loom::sync::atomic::{AtomicU8, Ordering}; +use loom::sync::Arc; +use std::mem; + +#[derive(Clone)] +struct DetectDrop(Arc); + +impl DetectDrop { + pub fn new() -> (Self, Arc) { + let drop_count = Arc::new(AtomicU8::new(0)); + (Self(drop_count.clone()), drop_count) + } +} + +impl Drop for DetectDrop { + fn drop(&mut self) { + self.0.fetch_add(1, Ordering::SeqCst); + } +} + +#[test] +fn allocate_and_drop() { + loom::model(|| { + let (detect_drop, drop_count) = DetectDrop::new(); + let detect_drop = Box::new(detect_drop); + assert_eq!(drop_count.load(Ordering::SeqCst), 0); + mem::drop(detect_drop); + assert_eq!(drop_count.load(Ordering::SeqCst), 1); + }); +} + +#[test] +fn allocate_via_raw_and_drop() { + loom::model(|| { + let (detect_drop, drop_count) = DetectDrop::new(); + let detect_drop = Box::new(detect_drop); + assert_eq!(drop_count.load(Ordering::SeqCst), 0); + let detect_drop_ptr: *mut DetectDrop = Box::into_raw(detect_drop); + assert_eq!(drop_count.load(Ordering::SeqCst), 0); + let detect_drop = unsafe { Box::from_raw(detect_drop_ptr) }; + assert_eq!(drop_count.load(Ordering::SeqCst), 0); + mem::drop(detect_drop); + assert_eq!(drop_count.load(Ordering::SeqCst), 1); + }); +} + +#[test] +fn into_value() { + loom::model(|| { + let (detect_drop, drop_count) = DetectDrop::new(); + let detect_drop = Box::new(detect_drop); + let value: DetectDrop = detect_drop.into_value(); + assert_eq!(drop_count.load(Ordering::SeqCst), 0); + mem::drop(value); + assert_eq!(drop_count.load(Ordering::SeqCst), 1); + }); +} + +#[test] +fn clone() { + loom::model(|| { + let (detect_drop, drop_count) = DetectDrop::new(); + let detect_drop = Box::new(detect_drop); + let detect_drop2: Box = detect_drop.clone(); + assert_eq!(drop_count.load(Ordering::SeqCst), 0); + mem::drop(detect_drop); + assert_eq!(drop_count.load(Ordering::SeqCst), 1); + mem::drop(detect_drop2); + assert_eq!(drop_count.load(Ordering::SeqCst), 2); + }); +} + +#[test] +#[should_panic] +#[ignore] +fn allocate_and_leak() { + loom::model(|| { + let (detect_drop, _detect_drop) = DetectDrop::new(); + let detect_drop = Box::new(detect_drop); + Box::into_raw(detect_drop); + }); +}