Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

implement Rawable for all TypedArray types #1679

Merged
merged 1 commit into from
Jan 6, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
87 changes: 55 additions & 32 deletions src/array_buffer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -253,26 +253,66 @@ pub type BackingStoreDeleterCallback = unsafe extern "C" fn(
);

pub(crate) mod sealed {
pub trait Rawable<T: ?Sized> {
pub trait Rawable {
fn byte_len(&mut self) -> usize;
fn into_raw(self) -> (*const (), *const u8);
unsafe fn drop_raw(ptr: *const (), size: usize);
}
}

impl sealed::Rawable<[u8]> for Vec<u8> {
unsafe fn drop_raw(ptr: *const (), size: usize) {
<Box<[u8]> as sealed::Rawable<[u8]>>::drop_raw(ptr, size);
}
macro_rules! rawable {
($ty:ty) => {
impl sealed::Rawable for Box<[$ty]> {
fn byte_len(&mut self) -> usize {
self.as_mut().len() * std::mem::size_of::<$ty>()
}

fn into_raw(mut self) -> (*const (), *const u8) {
// Thin the fat pointer
let ptr = self.as_mut_ptr();
std::mem::forget(self);
(ptr as _, ptr as _)
}

unsafe fn drop_raw(ptr: *const (), len: usize) {
// Fatten the thin pointer
_ = Self::from_raw(std::ptr::slice_from_raw_parts_mut(ptr as _, len));
}
}

fn into_raw(self) -> (*const (), *const u8) {
self.into_boxed_slice().into_raw()
}
impl sealed::Rawable for Vec<$ty> {
fn byte_len(&mut self) -> usize {
Vec::<$ty>::len(self) * std::mem::size_of::<$ty>()
}

unsafe fn drop_raw(ptr: *const (), size: usize) {
<Box<[$ty]> as sealed::Rawable>::drop_raw(ptr, size);
}

fn into_raw(self) -> (*const (), *const u8) {
self.into_boxed_slice().into_raw()
}
}
};
}

impl<T: Sized> sealed::Rawable<T> for Box<T>
rawable!(u8);
rawable!(u16);
rawable!(u32);
rawable!(u64);
rawable!(i8);
rawable!(i16);
rawable!(i32);
rawable!(i64);

impl<T: Sized> sealed::Rawable for Box<T>
where
T: AsMut<[u8]>,
{
fn byte_len(&mut self) -> usize {
self.as_mut().as_mut().len()
}

fn into_raw(mut self) -> (*const (), *const u8) {
let data = self.as_mut().as_mut().as_mut_ptr();
let ptr = Self::into_raw(self);
Expand All @@ -284,20 +324,6 @@ where
}
}

impl sealed::Rawable<[u8]> for Box<[u8]> {
fn into_raw(mut self) -> (*const (), *const u8) {
// Thin the fat pointer
let ptr = self.as_mut_ptr();
std::mem::forget(self);
(ptr as _, ptr)
}

unsafe fn drop_raw(ptr: *const (), len: usize) {
// Fatten the thin pointer
_ = Self::from_raw(std::ptr::slice_from_raw_parts_mut(ptr as _, len));
}
}

/// A wrapper around the backing store (i.e. the raw memory) of an array buffer.
/// See a document linked in http://crbug.com/v8/9908 for more information.
///
Expand Down Expand Up @@ -565,16 +591,13 @@ impl ArrayBuffer {
/// let backing_store = v8::ArrayBuffer::new_backing_store_from_bytes(Box::new(bytes::BytesMut::new()));
/// ```
#[inline(always)]
pub fn new_backing_store_from_bytes<T, U>(
pub fn new_backing_store_from_bytes<T>(
mut bytes: T,
) -> UniqueRef<BackingStore>
where
U: ?Sized,
U: AsMut<[u8]>,
T: AsMut<U>,
T: sealed::Rawable<U>,
T: sealed::Rawable,
{
let len = bytes.as_mut().as_mut().len();
let len = bytes.byte_len();
if len == 0 {
return unsafe {
UniqueRef::from_raw(v8__BackingStore__EmptyBackingStore(false))
Expand All @@ -583,13 +606,13 @@ impl ArrayBuffer {

let (ptr, slice) = T::into_raw(bytes);

extern "C" fn drop_rawable<T: sealed::Rawable<U>, U: ?Sized>(
extern "C" fn drop_rawable<T: sealed::Rawable>(
_ptr: *mut c_void,
len: usize,
data: *mut c_void,
) {
// SAFETY: We know that data is a raw T from above
unsafe { <T as sealed::Rawable<U>>::drop_raw(data as _, len) }
unsafe { T::drop_raw(data as _, len) }
}

// SAFETY: We are extending the lifetime of a slice, but we're locking away the box that we
Expand All @@ -598,7 +621,7 @@ impl ArrayBuffer {
Self::new_backing_store_from_ptr(
slice as _,
len,
drop_rawable::<T, U>,
drop_rawable::<T>,
ptr as _,
)
}
Expand Down
18 changes: 6 additions & 12 deletions src/shared_array_buffer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -166,16 +166,13 @@ impl SharedArrayBuffer {
/// let backing_store = v8::ArrayBuffer::new_backing_store_from_bytes(Box::new(bytes::BytesMut::new()));
/// ```
#[inline(always)]
pub fn new_backing_store_from_bytes<T, U>(
pub fn new_backing_store_from_bytes<T>(
mut bytes: T,
) -> UniqueRef<BackingStore>
where
U: ?Sized,
U: AsMut<[u8]>,
T: AsMut<U>,
T: crate::array_buffer::sealed::Rawable<U>,
T: crate::array_buffer::sealed::Rawable,
{
let len = bytes.as_mut().as_mut().len();
let len = bytes.byte_len();
if len == 0 {
return unsafe {
UniqueRef::from_raw(v8__BackingStore__EmptyBackingStore(false))
Expand All @@ -184,17 +181,14 @@ impl SharedArrayBuffer {

let (ptr, slice) = T::into_raw(bytes);

extern "C" fn drop_rawable<
T: crate::array_buffer::sealed::Rawable<U>,
U: ?Sized,
>(
extern "C" fn drop_rawable<T: crate::array_buffer::sealed::Rawable>(
_ptr: *mut c_void,
len: usize,
data: *mut c_void,
) {
// SAFETY: We know that data is a raw T from above
unsafe {
<T as crate::array_buffer::sealed::Rawable<U>>::drop_raw(data as _, len)
<T as crate::array_buffer::sealed::Rawable>::drop_raw(data as _, len)
}
}

Expand All @@ -204,7 +198,7 @@ impl SharedArrayBuffer {
Self::new_backing_store_from_ptr(
slice as _,
len,
drop_rawable::<T, U>,
drop_rawable::<T>,
ptr as _,
)
}
Expand Down
5 changes: 3 additions & 2 deletions tests/test_api.rs
Original file line number Diff line number Diff line change
Expand Up @@ -862,13 +862,14 @@ fn array_buffer() {
// Empty but from vec
let ab = v8::ArrayBuffer::with_backing_store(
scope,
&v8::ArrayBuffer::new_backing_store_from_bytes(vec![]).make_shared(),
&v8::ArrayBuffer::new_backing_store_from_bytes(Vec::<u8>::new())
.make_shared(),
);
assert_eq!(0, ab.byte_length());
assert!(!ab.get_backing_store().is_shared());

// Empty but from vec with a huge capacity
let mut v = Vec::with_capacity(10_000_000);
let mut v: Vec<u8> = Vec::with_capacity(10_000_000);
v.extend_from_slice(&[1, 2, 3, 4]);
let ab = v8::ArrayBuffer::with_backing_store(
scope,
Expand Down
Loading