From 7b1173158a2b22633de336aa1bba0bbbe9531bd2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Charlotte=20=F0=9F=A6=9D=20Delenk?= Date: Fri, 8 Jul 2022 15:03:58 +0100 Subject: [PATCH] add MutPtr --- lib/tinyptr/src/ptr/const_ptr.rs | 52 +++-- lib/tinyptr/src/ptr/mod.rs | 2 + lib/tinyptr/src/ptr/mut_ptr.rs | 376 +++++++++++++++++++++++++++++++ 3 files changed, 412 insertions(+), 18 deletions(-) create mode 100644 lib/tinyptr/src/ptr/mut_ptr.rs diff --git a/lib/tinyptr/src/ptr/const_ptr.rs b/lib/tinyptr/src/ptr/const_ptr.rs index e7f4789..e58b1b6 100644 --- a/lib/tinyptr/src/ptr/const_ptr.rs +++ b/lib/tinyptr/src/ptr/const_ptr.rs @@ -1,9 +1,17 @@ //! Constant pointer -use core::{marker::{PhantomData, Unsize}, ops::CoerceUnsized, cmp::Ordering, fmt, hash::{Hash, Hasher}}; +use core::{ + cmp::Ordering, + fmt, + hash::{Hash, Hasher}, + marker::{PhantomData, Unsize}, + ops::CoerceUnsized, +}; use crate::{base_ptr, Pointable, PointerConversionError}; +use super::MutPtr; + /// A tiny constant pointer pub struct ConstPtr { pub(crate) ptr: u16, @@ -77,7 +85,10 @@ impl ConstPtr { ) -> ConstPtr { ConstPtr::from_raw_parts(self.ptr, val.meta) } - // TODO: as_mut + /// Converts the pointer to mutable + pub const fn as_mut(self) -> MutPtr { + MutPtr::from_raw_parts(self.ptr, self.meta) + } /// Gets the address portion of the pointer pub const fn addr(self) -> u16 where @@ -203,8 +214,18 @@ impl ConstPtr { { self.wide().read_unaligned() } - // TODO: copy_to - // TODO: copy_to_nonoverlapping + pub unsafe fn copy_to(self, dest: MutPtr, count: u16) + where + T: Sized, + { + dest.copy_from(self, count) + } + pub unsafe fn copy_to_nonoverlapping(self, dest: MutPtr, count: u16) + where + T: Sized, + { + dest.copy_from_nonoverlapping(self, count) + } pub const fn align_offset(self, align: u16) -> u16 where T: Sized, @@ -228,8 +249,7 @@ impl, const BASE: usize> ConstPtr<[T], BASE> // TODO: as_uninit_slice } -impl PartialEq for ConstPtr -{ +impl PartialEq for ConstPtr { fn eq(&self, other: &Self) -> bool { (self.ptr == other.ptr) && (self.meta == other.meta) } @@ -237,22 +257,22 @@ impl PartialEq for ConstPtr impl Eq for ConstPtr {} -impl Ord for ConstPtr -{ +impl Ord for ConstPtr { fn cmp(&self, other: &Self) -> Ordering { self.ptr.cmp(&other.ptr) } } -impl PartialOrd for ConstPtr -{ +impl PartialOrd for ConstPtr { fn partial_cmp(&self, other: &Self) -> Option { Some(self.cmp(other)) } } -impl, U: Pointable, const BASE: usize> CoerceUnsized> for ConstPtr -where ::PointerMetaTiny: CoerceUnsized<::PointerMetaTiny> +impl, U: Pointable, const BASE: usize> + CoerceUnsized> for ConstPtr +where + ::PointerMetaTiny: CoerceUnsized<::PointerMetaTiny>, { } @@ -263,8 +283,7 @@ impl Clone for ConstPtr { } impl Copy for ConstPtr {} -impl fmt::Debug for ConstPtr -{ +impl fmt::Debug for ConstPtr { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Pointer::fmt(self, f) } @@ -278,11 +297,8 @@ impl Hash for ConstPtr { } } -impl fmt::Pointer for ConstPtr -{ +impl fmt::Pointer for ConstPtr { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Pointer::fmt(&self.wide(), f) } } - - diff --git a/lib/tinyptr/src/ptr/mod.rs b/lib/tinyptr/src/ptr/mod.rs index 34c02aa..55e8033 100644 --- a/lib/tinyptr/src/ptr/mod.rs +++ b/lib/tinyptr/src/ptr/mod.rs @@ -3,3 +3,5 @@ mod const_ptr; #[doc(inline)] pub use const_ptr::*; +mod mut_ptr; +pub use mut_ptr::*; diff --git a/lib/tinyptr/src/ptr/mut_ptr.rs b/lib/tinyptr/src/ptr/mut_ptr.rs new file mode 100644 index 0000000..96beb3e --- /dev/null +++ b/lib/tinyptr/src/ptr/mut_ptr.rs @@ -0,0 +1,376 @@ +//! Mutable pointer + +use core::{ + cmp::Ordering, + fmt, + hash::{Hash, Hasher}, + marker::{PhantomData, Unsize}, + ops::CoerceUnsized, +}; + +use crate::{base_ptr_mut, Pointable, PointerConversionError}; + +use super::ConstPtr; + +/// A tiny mutable pointer +pub struct MutPtr { + pub(crate) ptr: u16, + pub(crate) meta: ::PointerMetaTiny, + pub(crate) _marker: PhantomData<*const T>, +} + +impl MutPtr { + /// Create a new constant pointer from raw parts + pub const fn from_raw_parts(ptr: u16, meta: ::PointerMetaTiny) -> Self { + Self { + ptr, + meta, + _marker: PhantomData, + } + } + /// Creates a tiny pointer unchecked + /// + /// # Safety + /// This is unsafe because the address of the pointer may change. + pub unsafe fn new_unchecked(ptr: *mut T) -> Self { + let (addr, meta) = T::extract_parts(ptr); + let addr = if ptr.is_null() { + 0 + } else { + addr.wrapping_sub(BASE) + }; + Self::from_raw_parts(addr as u16, T::tiny_unchecked(meta)) + } + /// Tries to create a tiny pointer from a pointer + /// + /// # Errors + /// Returns an error if the pointer does not fit in the address space + pub fn new(ptr: *mut T) -> Result> { + let (addr, meta) = T::extract_parts(ptr); + let addr = if ptr.is_null() { + 0 + } else { + addr.wrapping_sub(BASE) + }; + let addr = addr + .try_into() + .map_err(PointerConversionError::NotInAddressSpace)?; + let meta = T::try_tiny(meta).map_err(PointerConversionError::CannotReduceMeta)?; + Ok(Self::from_raw_parts(addr, meta)) + } + /// Widens the pointer + pub fn wide(self) -> *mut T { + let addr = if self.ptr == 0 { + 0 + } else { + usize::from(self.ptr).wrapping_add(BASE) + }; + T::create_ptr_mut(base_ptr_mut::(), addr, T::huge(self.meta)) + } + /// Returns `true` if the pointer is null + pub const fn is_null(self) -> bool { + self.ptr == 0 + } + /// Casts to a pointer of another type + pub const fn cast>(self) -> MutPtr + where + T: Pointable, + { + MutPtr::from_raw_parts(self.ptr, self.meta) + } + /// Use the pointer value in a new pointer of another type + pub const fn with_metadata_of( + self, + val: MutPtr, + ) -> MutPtr { + MutPtr::from_raw_parts(self.ptr, val.meta) + } + pub const fn as_const(self) -> ConstPtr { + ConstPtr::from_raw_parts(self.ptr, self.meta) + } + /// Gets the address portion of the pointer + pub const fn addr(self) -> u16 + where + T: Sized, + { + self.ptr + } + /// Gets the address portion of the pointer and exposeses the provenenance part + pub const fn expose_addr(self) -> u16 + where + T: Sized, + { + self.ptr + } + /// Creates a new pointer with the given address + pub const fn with_addr(self, addr: u16) -> Self + where + T: Sized, + { + Self::from_raw_parts(addr, self.meta) + } + /// Creates a new pointer by mapping self’s address to a new one + pub fn map_addr(self, f: impl FnOnce(u16) -> u16) -> Self + where + T: Sized, + { + self.with_addr(f(self.addr())) + } + /// Decompose a pointer into its address and metadata + pub fn to_raw_parts(self) -> (ConstPtr<(), BASE>, ::PointerMetaTiny) { + (ConstPtr::from_raw_parts(self.ptr, ()), self.meta) + } + // TODO: as_ref + // TODO: as_ref_unchecked + // TODO: as_uninit_ref + /// Calculates the offset from a pointer + pub const unsafe fn offset(self, count: i16) -> Self + where + T: Sized, + { + self.wrapping_offset(count) + } + /// Calculates the offset from a pointer using wrapping arithmetic + pub const fn wrapping_offset(mut self, count: i16) -> Self + where + T: Sized, + { + self.ptr = self + .ptr + .wrapping_add_signed(count.wrapping_mul(core::mem::size_of::() as i16)); + self + } + // TODO: as_mut + // TODO: as_mut_unchecked + // TODO: as_uninit_mut + /// Calculates the distance between two pointers + pub const unsafe fn offset_from(self, origin: Self) -> i16 + where + T: Sized, + { + self.wrapping_offset_from(origin) + } + /// Calculates the distance between two pointers using wrapping arithmetic + pub const fn wrapping_offset_from(self, origin: Self) -> i16 + where + T: Sized, + { + (origin.ptr as i16) + .wrapping_sub(self.ptr as i16) + .wrapping_div(core::mem::size_of::() as i16) + } + /// calculates the distance between two pointers where it is known that self is equal or + /// greater than origin + pub unsafe fn sub_ptr(self, origin: Self) -> u16 + where + T: Sized, + { + u16::try_from(self.wrapping_offset_from(origin)).unwrap_unchecked() + } + /// Calculates the offset from a pointer + pub const unsafe fn add(self, count: u16) -> Self + where + T: Sized, + { + self.offset(count as i16) + } + /// Calculates the offset from a pointer + pub const unsafe fn sub(self, count: u16) -> Self + where + T: Sized, + { + self.offset((count as i16).wrapping_neg()) + } + /// Calculates the offset from a pointer using wrapping arithmetic + pub const fn wrapping_add(self, count: u16) -> Self + where + T: Sized, + { + self.wrapping_offset(count as i16) + } + /// Calculates the offset from a pointer using wrapping arithmetic + pub const fn wrapping_sub(self, count: u16) -> Self + where + T: Sized, + { + self.wrapping_offset((count as i16).wrapping_neg()) + } + /// Reads the value from self without moving it. this leaves the memory in self unchanged. + pub unsafe fn read(self) -> T + where + T: Sized, + { + self.wide().read() + } + /// Performs a volatile read of the value from self without moving it. this leaves the memory in self unchanged. + pub unsafe fn read_volatile(self) -> T + where + T: Sized, + { + self.wide().read_volatile() + } + /// Reads the value from self without moving it. this leaves the memory in self unchanged. + pub unsafe fn read_unaligned(self) -> T + where + T: Sized, + { + self.wide().read_unaligned() + } + /// Copies count * size_of bytes from self to dest. the source nad destination may overlap + pub unsafe fn copy_to(self, dest: MutPtr, count: u16) + where + T: Sized, + { + self.wide().copy_to(dest.wide(), count as usize) + } + /// Copies count * size_of bytes from self to dest. The source and destination may *not* + /// overlap. + pub unsafe fn copy_to_nonoverlapping(self, dest: MutPtr, count: u16) + where + T: Sized, + { + self.wide() + .copy_to_nonoverlapping(dest.wide(), count as usize) + } + /// Copies count * size_of bytes from src to self. the source and destination may overlap + pub unsafe fn copy_from(self, src: ConstPtr, count: u16) + where + T: Sized, + { + self.wide().copy_from(src.wide(), count as usize) + } + /// Copies count * size_of bytes from src to self. the source and destination may *not* + /// overlap + pub unsafe fn copy_from_nonoverlapping(self, src: ConstPtr, count: u16) + where + T: Sized, + { + self.wide() + .copy_from_nonoverlapping(src.wide(), count as usize) + } + /// Executes any destructor of the pointed-to value + pub unsafe fn drop_in_place(self) { + self.wide().drop_in_place() + } + /// Overwrites a memory location with the given value without reading or dropping the old value + pub unsafe fn write(self, val: T) + where + T: Sized, + { + self.wide().write(val) + } + /// Invokes a memset on the specified pointer, setting count * size_of::() bytes of memory + /// starting at self to val + pub unsafe fn write_bytes(self, val: u8, count: u16) + where + T: Sized, + { + self.wide().write_bytes(val, count as usize) + } + /// Performs a volatile write of a memory location + pub unsafe fn write_volatile(self, val: T) + where + T: Sized, + { + self.wide().write_volatile(val) + } + /// Performs an unaligned write of a memory location + pub unsafe fn write_unaligned(self, val: T) + where + T: Sized, + { + self.wide().write_unaligned(val) + } + /// Replace the value of self with source, returning the old value + pub unsafe fn replace(self, src: T) -> T + where + T: Sized, + { + self.wide().replace(src) + } + + /// Swaps the values at two mutable locations + pub unsafe fn swap(self, with: MutPtr) + where + T: Sized, + { + self.wide().swap(with.wide()) + } + + pub const fn align_offset(self, align: u16) -> u16 + where + T: Sized, + { + if !align.is_power_of_two() { + panic!("align must be a power of two"); + } + (self.ptr.wrapping_add(align).wrapping_sub(1) & !align.wrapping_sub(1)) + .wrapping_sub(self.ptr) + .wrapping_div(core::mem::size_of::() as u16) + } +} + +impl, const BASE: usize> MutPtr<[T], BASE> { + pub const fn len(self) -> u16 { + self.meta + } + pub const fn as_mut_ptr(self) -> MutPtr { + MutPtr::from_raw_parts(self.ptr, ()) + } + // TODO: as_uninit_slice + // TODO: as_uninit_slice_mut +} + +impl PartialEq for MutPtr { + fn eq(&self, other: &Self) -> bool { + (self.ptr == other.ptr) && (self.meta == other.meta) + } +} + +impl Eq for MutPtr {} + +impl Ord for MutPtr { + fn cmp(&self, other: &Self) -> Ordering { + self.ptr.cmp(&other.ptr) + } +} + +impl PartialOrd for MutPtr { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} + +impl, U: Pointable, const BASE: usize> + CoerceUnsized> for MutPtr +where + ::PointerMetaTiny: CoerceUnsized<::PointerMetaTiny>, +{ +} + +impl Clone for MutPtr { + fn clone(&self) -> Self { + *self + } +} +impl Copy for MutPtr {} + +impl fmt::Debug for MutPtr { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt::Pointer::fmt(self, f) + } +} + +impl Hash for MutPtr { + fn hash(&self, state: &mut H) { + state.write_usize(BASE); + state.write_u16(self.ptr); + self.meta.hash(state); + } +} + +impl fmt::Pointer for MutPtr { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt::Pointer::fmt(&self.wide(), f) + } +}