diff --git a/src/array_buffer.rs b/src/array_buffer.rs index b6be735156..60ba693814 100644 --- a/src/array_buffer.rs +++ b/src/array_buffer.rs @@ -253,26 +253,66 @@ pub type BackingStoreDeleterCallback = unsafe extern "C" fn( ); pub(crate) mod sealed { - pub trait Rawable { + pub trait Rawable { + fn byte_len(&mut self) -> usize; fn into_raw(self) -> (*const (), *const u8); unsafe fn drop_raw(ptr: *const (), size: usize); } } -impl sealed::Rawable<[u8]> for Vec { - unsafe fn drop_raw(ptr: *const (), size: usize) { - as sealed::Rawable<[u8]>>::drop_raw(ptr, size); - } +macro_rules! rawable { + ($ty:ty) => { + impl sealed::Rawable for Box<[$ty]> { + fn byte_len(&mut self) -> usize { + self.as_mut().len() * std::mem::size_of::<$ty>() + } + + fn into_raw(mut self) -> (*const (), *const u8) { + // Thin the fat pointer + let ptr = self.as_mut_ptr(); + std::mem::forget(self); + (ptr as _, ptr as _) + } + + unsafe fn drop_raw(ptr: *const (), len: usize) { + // Fatten the thin pointer + _ = Self::from_raw(std::ptr::slice_from_raw_parts_mut(ptr as _, len)); + } + } - fn into_raw(self) -> (*const (), *const u8) { - self.into_boxed_slice().into_raw() - } + impl sealed::Rawable for Vec<$ty> { + fn byte_len(&mut self) -> usize { + Vec::<$ty>::len(self) * std::mem::size_of::<$ty>() + } + + unsafe fn drop_raw(ptr: *const (), size: usize) { + as sealed::Rawable>::drop_raw(ptr, size); + } + + fn into_raw(self) -> (*const (), *const u8) { + self.into_boxed_slice().into_raw() + } + } + }; } -impl sealed::Rawable for Box +rawable!(u8); +rawable!(u16); +rawable!(u32); +rawable!(u64); +rawable!(i8); +rawable!(i16); +rawable!(i32); +rawable!(i64); + +impl sealed::Rawable for Box where T: AsMut<[u8]>, { + fn byte_len(&mut self) -> usize { + self.as_mut().as_mut().len() + } + fn into_raw(mut self) -> (*const (), *const u8) { let data = self.as_mut().as_mut().as_mut_ptr(); let ptr = Self::into_raw(self); @@ -284,20 +324,6 @@ where } } -impl sealed::Rawable<[u8]> for Box<[u8]> { - fn into_raw(mut self) -> (*const (), *const u8) { - // Thin the fat pointer - let ptr = self.as_mut_ptr(); - std::mem::forget(self); - (ptr as _, ptr) - } - - unsafe fn drop_raw(ptr: *const (), len: usize) { - // Fatten the thin pointer - _ = Self::from_raw(std::ptr::slice_from_raw_parts_mut(ptr as _, len)); - } -} - /// A wrapper around the backing store (i.e. the raw memory) of an array buffer. /// See a document linked in http://crbug.com/v8/9908 for more information. /// @@ -565,16 +591,13 @@ impl ArrayBuffer { /// let backing_store = v8::ArrayBuffer::new_backing_store_from_bytes(Box::new(bytes::BytesMut::new())); /// ``` #[inline(always)] - pub fn new_backing_store_from_bytes( + pub fn new_backing_store_from_bytes( mut bytes: T, ) -> UniqueRef where - U: ?Sized, - U: AsMut<[u8]>, - T: AsMut, - T: sealed::Rawable, + T: sealed::Rawable, { - let len = bytes.as_mut().as_mut().len(); + let len = bytes.byte_len(); if len == 0 { return unsafe { UniqueRef::from_raw(v8__BackingStore__EmptyBackingStore(false)) @@ -583,13 +606,13 @@ impl ArrayBuffer { let (ptr, slice) = T::into_raw(bytes); - extern "C" fn drop_rawable, U: ?Sized>( + extern "C" fn drop_rawable( _ptr: *mut c_void, len: usize, data: *mut c_void, ) { // SAFETY: We know that data is a raw T from above - unsafe { >::drop_raw(data as _, len) } + unsafe { T::drop_raw(data as _, len) } } // SAFETY: We are extending the lifetime of a slice, but we're locking away the box that we @@ -598,7 +621,7 @@ impl ArrayBuffer { Self::new_backing_store_from_ptr( slice as _, len, - drop_rawable::, + drop_rawable::, ptr as _, ) } diff --git a/src/shared_array_buffer.rs b/src/shared_array_buffer.rs index 17f26dbfc4..529ddaf9e0 100644 --- a/src/shared_array_buffer.rs +++ b/src/shared_array_buffer.rs @@ -166,16 +166,13 @@ impl SharedArrayBuffer { /// let backing_store = v8::ArrayBuffer::new_backing_store_from_bytes(Box::new(bytes::BytesMut::new())); /// ``` #[inline(always)] - pub fn new_backing_store_from_bytes( + pub fn new_backing_store_from_bytes( mut bytes: T, ) -> UniqueRef where - U: ?Sized, - U: AsMut<[u8]>, - T: AsMut, - T: crate::array_buffer::sealed::Rawable, + T: crate::array_buffer::sealed::Rawable, { - let len = bytes.as_mut().as_mut().len(); + let len = bytes.byte_len(); if len == 0 { return unsafe { UniqueRef::from_raw(v8__BackingStore__EmptyBackingStore(false)) @@ -184,17 +181,14 @@ impl SharedArrayBuffer { let (ptr, slice) = T::into_raw(bytes); - extern "C" fn drop_rawable< - T: crate::array_buffer::sealed::Rawable, - U: ?Sized, - >( + extern "C" fn drop_rawable( _ptr: *mut c_void, len: usize, data: *mut c_void, ) { // SAFETY: We know that data is a raw T from above unsafe { - >::drop_raw(data as _, len) + ::drop_raw(data as _, len) } } @@ -204,7 +198,7 @@ impl SharedArrayBuffer { Self::new_backing_store_from_ptr( slice as _, len, - drop_rawable::, + drop_rawable::, ptr as _, ) } diff --git a/tests/test_api.rs b/tests/test_api.rs index 20f6c22236..d8c47caadf 100644 --- a/tests/test_api.rs +++ b/tests/test_api.rs @@ -862,13 +862,14 @@ fn array_buffer() { // Empty but from vec let ab = v8::ArrayBuffer::with_backing_store( scope, - &v8::ArrayBuffer::new_backing_store_from_bytes(vec![]).make_shared(), + &v8::ArrayBuffer::new_backing_store_from_bytes(Vec::::new()) + .make_shared(), ); assert_eq!(0, ab.byte_length()); assert!(!ab.get_backing_store().is_shared()); // Empty but from vec with a huge capacity - let mut v = Vec::with_capacity(10_000_000); + let mut v: Vec = Vec::with_capacity(10_000_000); v.extend_from_slice(&[1, 2, 3, 4]); let ab = v8::ArrayBuffer::with_backing_store( scope,