Skip to content

Commit

Permalink
Apply clippy suggestions
Browse files Browse the repository at this point in the history
Add clippy workflow

Signed-off-by: Tin Svagelj <tin.svagelj@live.com>
  • Loading branch information
Caellian committed Sep 16, 2023
1 parent bcc533e commit 68b7534
Show file tree
Hide file tree
Showing 8 changed files with 110 additions and 85 deletions.
18 changes: 18 additions & 0 deletions .github/workflows/clippy.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
name: Rust Clippy

on:
push:
pull_request:

env:
CARGO_TERM_COLOR: always

jobs:
build:
runs-on: ubuntu-latest

steps:
- uses: actions/checkout@v3
- run: rustup update nightly && rustup default nightly
- name: Run clippy
run: cargo clippy
2 changes: 1 addition & 1 deletion .github/workflows/semver.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
name: Rust
name: Check SemVer

on:
push:
Expand Down
31 changes: 15 additions & 16 deletions src/details.rs
Original file line number Diff line number Diff line change
Expand Up @@ -180,7 +180,7 @@ impl StorageDetails for ImplConcurrent {
}

fn deref_state(state: &Self::StorageState) -> &ContiguousMemoryState<Self> {
&state
state
}

fn get_base(base: &Self::Base) -> Self::LockResult<*mut u8> {
Expand Down Expand Up @@ -274,7 +274,7 @@ impl StorageDetails for ImplDefault {
}

fn deref_state(state: &Self::StorageState) -> &ContiguousMemoryState<Self> {
&state
state
}

fn get_base(base: &Self::Base) -> Self::LockResult<*mut u8> {
Expand Down Expand Up @@ -370,7 +370,7 @@ impl StorageDetails for ImplUnsafe {
}

fn deref_state(state: &Self::StorageState) -> &ContiguousMemoryState<Self> {
&state
state
}

fn get_base(base: &Self::Base) -> Self::LockResult<*mut u8> {
Expand Down Expand Up @@ -488,7 +488,7 @@ impl ReferenceDetails for ImplConcurrent {
SyncContiguousEntryRef {
inner: Arc::new(ReferenceState {
state: state.clone(),
range: range.clone(),
range,
borrow_kind: RwLock::new(()),
#[cfg(feature = "ptr_metadata")]
drop_metadata: static_metadata::<T, dyn HandleDrop>(),
Expand Down Expand Up @@ -527,7 +527,7 @@ impl ReferenceDetails for ImplDefault {
ContiguousEntryRef {
inner: Rc::new(ReferenceState {
state: state.clone(),
range: range.clone(),
range,
borrow_kind: Cell::new(BorrowState::Read(0)),
#[cfg(feature = "ptr_metadata")]
drop_metadata: static_metadata::<T, dyn HandleDrop>(),
Expand Down Expand Up @@ -706,19 +706,18 @@ impl StoreDataDetails for ImplUnsafe {
data: *const T,
layout: Layout,
) -> Result<*mut T, ContiguousMemoryError> {
let (addr, range) = loop {
match ImplUnsafe::track_next(state, layout) {
Ok(taken) => {
let found = (taken.0
+ ImplUnsafe::get_base(&ImplUnsafe::deref_state(state).base) as usize)
as *mut u8;
unsafe {
core::ptr::copy_nonoverlapping(data as *mut u8, found, layout.size());
}
break (found, taken);
let (addr, range) = match ImplUnsafe::track_next(state, layout) {
Ok(taken) => {
let found = (taken.0
+ ImplUnsafe::get_base(&ImplUnsafe::deref_state(state).base) as usize)
as *mut u8;
unsafe {
core::ptr::copy_nonoverlapping(data as *mut u8, found, layout.size());
}
Err(other) => return Err(other),

(found, taken)
}
Err(other) => return Err(other),
};

Ok(ImplUnsafe::build_ref(state, addr as *mut T, range))
Expand Down
2 changes: 1 addition & 1 deletion src/error.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
//! Errors produced by the crate.

#[cfg(all(feature = "error_in_core"))]
#[cfg(feature = "error_in_core")]
use core::error::Error;
#[cfg(all(not(feature = "error_in_core"), not(feature = "no_std")))]
use std::error::Error;
Expand Down
17 changes: 13 additions & 4 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -225,8 +225,8 @@ impl<Impl: ImplDetails> ContiguousMemoryStorage<Impl> {
let mut data = ManuallyDrop::new(value);
let layout = Layout::for_value(&data);
let pos = &mut *data as *mut T;
let result = unsafe { self.push_raw(pos, layout) };
result

unsafe { self.push_raw(pos, layout) }
}

/// Works same as [`store`](ContiguousMemory::push) but takes a pointer and
Expand All @@ -249,6 +249,15 @@ impl<Impl: ImplDetails> ContiguousMemoryStorage<Impl> {
/// mem::transmute(storage.push_raw(erased, layout))
/// };
/// ```
///
/// # Safety
///
/// This function is unsafe because it clones memory from provided pointer
/// which means it could cause a segmentation fault if the pointer is
/// invalid.
///
/// Further, it also allows escaping type drop glue because it takes type
/// [`Layout`] as a separate argument.
pub unsafe fn push_raw<T: StoreRequirements>(
&mut self,
data: *const T,
Expand Down Expand Up @@ -331,7 +340,7 @@ impl ContiguousMemoryStorage<ImplConcurrent> {
pub fn shrink_to_fit(&mut self) -> Result<usize, LockingError> {
if let Some(shrunk) = ImplConcurrent::shrink_tracker(&mut self.inner)? {
self.resize(shrunk).expect("unable to shrink container");
return Ok(shrunk);
Ok(shrunk)
} else {
Ok(self.get_capacity())
}
Expand Down Expand Up @@ -504,7 +513,7 @@ pub(crate) mod sealed {
impl<Impl: StorageDetails> Drop for ContiguousMemoryState<Impl> {
fn drop(&mut self) {
let layout = self.layout();
Impl::deallocate(&mut self.base.0, layout)
Impl::deallocate(&self.base.0, layout)
}
}
}
Expand Down
5 changes: 5 additions & 0 deletions src/range.rs
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,11 @@ impl ByteRange {
self.1 - self.0
}

/// Returns true if this byte range is zero-sized.
pub fn is_empty(&self) -> bool {
self.0 == self.1
}

/// Returns `true` if this byte range contains another byte range `other`.
pub fn contains(&self, other: Self) -> bool {
self.0 <= other.0 && other.1 <= self.1
Expand Down
24 changes: 10 additions & 14 deletions src/refs.rs
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ impl<T: ?Sized> SyncContiguousEntryRef<T> {
///
/// If the data is mutably accessed, this method will block the current
/// thread until it becomes available.
pub fn get<'a>(&'a self) -> Result<MemoryReadGuard<'a, T, ImplConcurrent>, LockingError>
pub fn get(&self) -> Result<MemoryReadGuard<'_, T, ImplConcurrent>, LockingError>
where
T: RefSizeReq,
{
Expand Down Expand Up @@ -68,7 +68,7 @@ impl<T: ?Sized> SyncContiguousEntryRef<T> {
/// If the data is mutably accessed, this method returns a
/// [`LockingError::WouldBlock`](crate::error::LockingError::WouldBlock)
/// error.
pub fn try_get<'a>(&'a self) -> Result<MemoryReadGuard<'a, T, ImplConcurrent>, LockingError>
pub fn try_get(&self) -> Result<MemoryReadGuard<'_, T, ImplConcurrent>, LockingError>
where
T: RefSizeReq,
{
Expand Down Expand Up @@ -96,9 +96,7 @@ impl<T: ?Sized> SyncContiguousEntryRef<T> {
/// [`LockingError::Poisoned`] error if the Mutex holding the base address
/// pointer or the Mutex holding concurrent mutable access flag has been
/// poisoned.
pub fn get_mut<'a>(
&'a mut self,
) -> Result<MemoryWriteGuard<'a, T, ImplConcurrent>, LockingError>
pub fn get_mut(&mut self) -> Result<MemoryWriteGuard<'_, T, ImplConcurrent>, LockingError>
where
T: RefSizeReq,
{
Expand Down Expand Up @@ -130,9 +128,7 @@ impl<T: ?Sized> SyncContiguousEntryRef<T> {
///
/// - [`LockingError::WouldBlock`] error if accessing referenced data chunk
/// would be blocking.
pub fn try_get_mut<'a>(
&'a mut self,
) -> Result<MemoryWriteGuard<'a, T, ImplConcurrent>, LockingError>
pub fn try_get_mut(&mut self) -> Result<MemoryWriteGuard<'_, T, ImplConcurrent>, LockingError>
where
T: RefSizeReq,
{
Expand Down Expand Up @@ -310,7 +306,7 @@ impl<T: ?Sized> ContiguousEntryRef<T> {

/// Returns a reference to data at its current location and panics if the
/// represented memory region is mutably borrowed.
pub fn get<'a>(&'a self) -> MemoryReadGuard<'a, T, ImplDefault>
pub fn get(&self) -> MemoryReadGuard<'_, T, ImplDefault>
where
T: RefSizeReq,
{
Expand All @@ -320,7 +316,7 @@ impl<T: ?Sized> ContiguousEntryRef<T> {
/// Returns a reference to data at its current location or a
/// [`RegionBorrowedError`] error if the represented memory region is
/// mutably borrowed.
pub fn try_get<'a>(&'a self) -> Result<MemoryReadGuard<'a, T, ImplDefault>, RegionBorrowedError>
pub fn try_get(&self) -> Result<MemoryReadGuard<'_, T, ImplDefault>, RegionBorrowedError>
where
T: RefSizeReq,
{
Expand Down Expand Up @@ -350,7 +346,7 @@ impl<T: ?Sized> ContiguousEntryRef<T> {

/// Returns a mutable reference to data at its current location and panics
/// if the reference has already been borrowed.
pub fn get_mut<'a>(&'a mut self) -> MemoryWriteGuard<'a, T, ImplDefault>
pub fn get_mut(&mut self) -> MemoryWriteGuard<'_, T, ImplDefault>
where
T: RefSizeReq,
{
Expand All @@ -360,9 +356,9 @@ impl<T: ?Sized> ContiguousEntryRef<T> {
/// Returns a mutable reference to data at its current location or a
/// [`RegionBorrowedError`] error if the represented memory region is
/// already borrowed.
pub fn try_get_mut<'a>(
&'a mut self,
) -> Result<MemoryWriteGuard<'a, T, ImplDefault>, RegionBorrowedError>
pub fn try_get_mut(
&mut self,
) -> Result<MemoryWriteGuard<'_, T, ImplDefault>, RegionBorrowedError>
where
T: RefSizeReq,
{
Expand Down
96 changes: 47 additions & 49 deletions src/tracker.rs
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
#![doc(hidden)]

use core::alloc::Layout;
use core::{alloc::Layout, cmp::Ordering};

#[cfg(any(feature = "no_std"))]
#[cfg(feature = "no_std")]
use crate::types::Vec;
use crate::{error::ContiguousMemoryError, range::ByteRange};

Expand All @@ -17,11 +17,9 @@ pub struct AllocationTracker {
impl AllocationTracker {
/// Constructs a new `AllocationTracker` of the provided `size`.
pub fn new(size: usize) -> Self {
let mut initial = Vec::new();
initial.push(ByteRange(0, size));
AllocationTracker {
size,
unused: initial,
unused: vec![ByteRange(0, size)],
}
}

Expand All @@ -45,44 +43,46 @@ impl AllocationTracker {
/// error if the represented memory range cannot be shrunk enough to fit
/// the desired size.
pub fn resize(&mut self, new_size: usize) -> Result<(), ContiguousMemoryError> {
if new_size == self.size {
return Ok(());
} else if new_size < self.size {
let last = self
.unused
.last_mut()
.ok_or(ContiguousMemoryError::Unshrinkable {
required_size: self.size,
})?;

let reduction = self.size - new_size;
if last.len() < reduction {
return Err(ContiguousMemoryError::Unshrinkable {
required_size: self.size - last.len(),
});
match new_size.cmp(&self.size) {
Ordering::Equal => {}
Ordering::Less => {
let last = self
.unused
.last_mut()
.ok_or(ContiguousMemoryError::Unshrinkable {
required_size: self.size,
})?;

let reduction = self.size - new_size;
if last.len() < reduction {
return Err(ContiguousMemoryError::Unshrinkable {
required_size: self.size - last.len(),
});
}
last.1 -= reduction;
self.size = new_size;
}
last.1 -= reduction;
self.size = new_size;
} else {
match self.unused.last() {
Some(it) => {
// check whether the last free region ends at the end of
// tracked region
if it.1 == self.size {
let last = self
.unused
.last_mut()
.expect("free byte ranges isn't empty");
last.1 = new_size;
} else {
Ordering::Greater => {
match self.unused.last() {
Some(it) => {
// check whether the last free region ends at the end of
// tracked region
if it.1 == self.size {
let last = self
.unused
.last_mut()
.expect("free byte ranges isn't empty");
last.1 = new_size;
} else {
self.unused.push(ByteRange(self.size, new_size));
}
}
None => {
self.unused.push(ByteRange(self.size, new_size));
}
}
None => {
self.unused.push(ByteRange(self.size, new_size));
}
self.size = new_size;
}
self.size = new_size;
}
Ok(())
}
Expand Down Expand Up @@ -142,12 +142,12 @@ impl AllocationTracker {

let (left, right) = found.difference_unchecked(region);

if left.len() > 0 {
if !left.is_empty() {
self.unused[i] = left;
if right.len() > 0 {
if !right.is_empty() {
self.unused.insert(i + 1, right);
}
} else if right.len() > 0 {
} else if !right.is_empty() {
self.unused[i] = right;
} else {
self.unused.remove(i);
Expand Down Expand Up @@ -193,12 +193,12 @@ impl AllocationTracker {

let (left, right) = available.difference_unchecked(taken);

if left.len() > 0 {
if !left.is_empty() {
self.unused[i] = left;
if right.len() > 0 {
if !right.is_empty() {
self.unused.insert(i + 1, right);
}
} else if right.len() > 0 {
} else if !right.is_empty() {
self.unused[i] = right;
} else {
self.unused.remove(i);
Expand All @@ -224,12 +224,10 @@ impl AllocationTracker {
return Err(ContiguousMemoryError::DoubleFree);
}
found.merge_in_unchecked(region);
} else if let Some((i, _)) = self.unused.iter().enumerate().find(|it| it.0 > region.0) {
self.unused.insert(i, region);
} else {
if let Some((i, _)) = self.unused.iter().enumerate().find(|it| it.0 > region.0) {
self.unused.insert(i, region);
} else {
self.unused.push(region);
}
self.unused.push(region);
}

Ok(())
Expand Down

0 comments on commit 68b7534

Please sign in to comment.