Fully stabilize the following API, including const where applicable:
impl <T> NonNull<T> {
pub const unsafe fn offset(self, count: isize) -> Self;
pub const unsafe fn add(self, count: usize) -> Self;
pub const unsafe fn sub(self, count: usize) -> Self;
pub const unsafe fn offset_from(self, origin: NonNull<T>) -> isize;
pub const unsafe fn read(self) -> T;
pub unsafe fn read_volatile(self) -> T;
pub const unsafe fn read_unaligned(self) -> T;
pub unsafe fn write_volatile(self, val: T);
pub unsafe fn replace(self, src: T) -> T;
}
impl<T: ?Sized> NonNull<T> {
pub const unsafe fn byte_offset(self, count: isize) -> Self;
pub const unsafe fn byte_add(self, count: usize) -> Self;
pub const unsafe fn byte_sub(self, count: usize) -> Self;
pub const unsafe fn byte_offset_from<U: ?Sized>(self, origin: NonNull<U>) -> isize;
pub unsafe fn drop_in_place(self);
}
Stabilize the following without const:
impl <T> NonNull<T> {
// const under `const_intrinsic_copy`
pub const unsafe fn copy_to(self, dest: NonNull<T>, count: usize);
pub const unsafe fn copy_to_nonoverlapping(self, dest: NonNull<T>, count: usize);
pub const unsafe fn copy_from(self, src: NonNull<T>, count: usize);
pub const unsafe fn copy_from_nonoverlapping(self, src: NonNull<T>, count: usize);
// const under `const_ptr_write`
pub const unsafe fn write(self, val: T);
pub const unsafe fn write_bytes(self, val: u8, count: usize);
pub const unsafe fn write_unaligned(self, val: T);
// const under `const_swap`
pub const unsafe fn swap(self, with: NonNull<T>);
// const under `const_align_offset`
pub const fn align_offset(self, align: usize) -> usize;
// const under `const_pointer_is_aligned`
pub const fn is_aligned(self) -> bool;
}
Left the following unstable:
impl <T> NonNull<T> {
// moved gate to `ptr_sub_ptr`
pub const unsafe fn sub_ptr(self, subtracted: NonNull<T>) -> usize;
}
impl <T: ?Sized> NonNull<T> {
// moved gate to `pointer_is_aligned_to`
pub const fn is_aligned_to(self, align: usize) -> bool;
}
Fixes: rust-lang#117691