Skip to content

Commit

Permalink
refactor: run cargo clippy and fixed some style errors
Browse files Browse the repository at this point in the history
  • Loading branch information
0xCCF4 committed Jul 2, 2024
1 parent b148665 commit 1ef03d4
Show file tree
Hide file tree
Showing 21 changed files with 221 additions and 318 deletions.
14 changes: 7 additions & 7 deletions src/data/archive.rs
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ impl ArchiveType {
/// # Errors
/// If the archive could not be opened.
pub fn open<R: Read>(&self, stream: R) -> Result<GeneralArchive<R>> {
GeneralArchive::new(self.clone(), stream)
GeneralArchive::new(*self, stream)
}

/// Get the archive type from the file extension.
Expand Down Expand Up @@ -89,7 +89,7 @@ impl ArchiveType {
while stream.limit() > 0 {
let num_read = stream.read(&mut buffer[num_read_sum..])?;
num_read_sum += num_read;
if num_read <= 0 {
if num_read == 0 {
break;
}
}
Expand All @@ -109,7 +109,7 @@ impl ArchiveType {
#[cfg(feature = "archive-tar")]
{
if num_read_sum >= 257 + 8
&& buffer[257 + 0] == 0x75
&& buffer[257] == 0x75
&& buffer[257 + 1] == 0x73
&& buffer[257 + 2] == 0x74
&& buffer[257 + 3] == 0x61
Expand All @@ -122,7 +122,7 @@ impl ArchiveType {
}

if num_read_sum >= 257 + 8
&& buffer[257 + 0] == 0x75
&& buffer[257] == 0x75
&& buffer[257 + 1] == 0x73
&& buffer[257 + 2] == 0x74
&& buffer[257 + 3] == 0x61
Expand All @@ -148,7 +148,7 @@ impl ArchiveType {
}
}

return Ok(None);
Ok(None)
}
}

Expand All @@ -169,7 +169,7 @@ impl<R: Read> GeneralArchive<R> {
#[cfg(feature = "archive-tar")]
ArchiveType::Tar => Self::Tar(tar::TarArchive::new(stream)?),
#[cfg(feature = "archive-zip")]
ArchiveType::Zip => Self::Zip(zip::ZipArchive::new(stream.into())?),
ArchiveType::Zip => Self::Zip(zip::ZipArchive::new(stream)?),
})
}

Expand Down Expand Up @@ -246,7 +246,7 @@ impl<'a, R: Read> ArchiveEntry<'a, R> {

/// Get the modified time of the archive entry.
/// The time is in seconds since the Unix epoch.
///
///
/// # Returns
/// The modified time of the archive entry.
/// Might be 0 if the time could not be determined.
Expand Down
14 changes: 7 additions & 7 deletions src/data/archive/tar.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,13 +9,13 @@ pub struct TarArchive<R: Read> {

impl<'a, R: Read> TarArchive<R> {
/// Create a new Tar archive from a reader.
///
///
/// # Arguments
/// * `input` - The input reader.
///
///
/// # Returns
/// The Tar archive.
///
///
/// # Errors
/// If the archive is invalid or cannot be read.
pub fn new(input: R) -> Result<TarArchive<R>> {
Expand All @@ -25,10 +25,10 @@ impl<'a, R: Read> TarArchive<R> {
}

/// Get the entries of the Tar archive.
///
///
/// # Returns
/// An iterator over the entries of the Tar archive.
///
///
/// # Errors
/// If the entries cannot be read.
pub fn entries(&'a mut self) -> Result<TarArchiveIterator<'a, R>> {
Expand All @@ -43,10 +43,10 @@ pub struct TarArchiveIterator<'a, R: 'a + Read> {

impl<'a, R: Read> TarArchiveIterator<'a, R> {
/// Create a new Tar archive iterator from a reader.
///
///
/// # Arguments
/// * `entries` - The entries.
///
///
/// # Returns
/// The Tar archive iterator.
pub fn new(entries: tar::Entries<'a, R>) -> TarArchiveIterator<'a, R> {
Expand Down
14 changes: 7 additions & 7 deletions src/data/archive/zip.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,13 +12,13 @@ pub struct ZipArchive<R: Read> {
impl<R: Read> ZipArchive<R> {
/// Create a new Zip archive from a reader.
/// Does not perform any checking on the input. The input is assumed to be a valid Zip archive.
///
///
/// # Arguments
/// * `input` - The input reader.
///
///
/// # Returns
/// The Zip archive.
///
///
/// # Errors
/// Never.
pub fn new(input: R) -> Result<ZipArchive<R>> {
Expand All @@ -28,10 +28,10 @@ impl<R: Read> ZipArchive<R> {
}

/// Get the entries of the Zip archive.
///
///
/// # Returns
/// The entries of the Zip archive.
///
///
/// # Errors
/// Never
pub fn entries(&mut self) -> Result<ZipArchiveIterator<R>> {
Expand All @@ -46,10 +46,10 @@ pub struct ZipArchiveIterator<'a, R: Read> {

impl<'a, R: Read> ZipArchiveIterator<'a, R> {
/// Create a new Zip archive iterator from a reader.
///
///
/// # Arguments
/// * `reader` - The reader.
///
///
/// # Returns
/// The Zip archive iterator.
pub fn new(reader: &'a mut Pin<Box<R>>) -> ZipArchiveIterator<'a, R> {
Expand Down
2 changes: 1 addition & 1 deletion src/data/compression.rs
Original file line number Diff line number Diff line change
Expand Up @@ -125,7 +125,7 @@ impl CompressionType {
while stream.limit() > 0 {
let num_read = stream.read(&mut buffer[num_read_sum..])?;
num_read_sum += num_read;
if num_read <= 0 {
if num_read == 0 {
break;
}
}
Expand Down
20 changes: 10 additions & 10 deletions src/data/copy_stream.rs
Original file line number Diff line number Diff line change
Expand Up @@ -155,8 +155,8 @@ impl<R: Read> BufferCopyStreamReader<R> {
let read_result = reader.read(read_buffer)?;

buffer.reserve(read_result);
for i in 0..read_result {
buffer.push(read_buffer[i]);
for value in read_buffer.iter().take(read_result) {
buffer.push(*value);
}

Ok(read_result)
Expand All @@ -174,14 +174,14 @@ impl<R: Read> BufferCopyStreamReader<R> {
/// Buffer the given amount of bytes from the underlying reader in chunks.
/// This method will allocate a buffer with the given size once. Then reuse this buffer
/// for reading the data from the underlying reader.
///
///
/// # Arguments
/// * `length` - The amount of bytes to buffer.
/// * `chunk_size` - The amount of bytes to request from the underlying reader in each iteration.
///
///
/// # Returns
/// The amount of bytes read.
///
///
/// # Errors
/// If the underlying reader could not be read.
pub fn buffer_bytes_chunked(&self, length: usize, chunk_size: usize) -> std::io::Result<usize> {
Expand All @@ -207,13 +207,13 @@ impl<R: Read> BufferCopyStreamReader<R> {

/// Buffer the given amount of bytes from the underlying reader in chunks.
/// Uses a default chunk size of 4096 bytes.
///
///
/// # Arguments
/// * `length` - The amount of bytes to buffer.
///
///
/// # Returns
/// The amount of bytes read.
///
///
/// # Errors
/// If the underlying reader could not be read.
pub fn buffer_bytes_chunked_default(&self, length: usize) -> std::io::Result<usize> {
Expand Down Expand Up @@ -291,7 +291,7 @@ impl<R: Read> Seek for BufferCopyStreamReader<R> {
));
}

if (-requested_position as u64) > (buffer_length as u64) {
if (-requested_position as u64) > buffer_length {
return Err(std::io::Error::new(
ErrorKind::Other,
"can not seek beyond zero",
Expand Down Expand Up @@ -391,7 +391,7 @@ impl<R: Read> BufferFirstContinueReader<R> {
/// # Returns
/// True if the buffer is empty.
pub fn buffer_empty(&self) -> bool {
self.buffer.len() <= 0 || self.index >= self.buffer.len()
self.buffer.is_empty() || self.index >= self.buffer.len()
}

/// Try to get the original reader back from the [BufferFirstContinueReader].
Expand Down
12 changes: 6 additions & 6 deletions src/data/hash.rs
Original file line number Diff line number Diff line change
Expand Up @@ -116,22 +116,22 @@ impl GeneralHashType {
/// println!("Supported algorithms: {}", supported);
/// ```
pub const fn supported_algorithms() -> &'static str {
const SHA2: &'static str = if cfg!(feature = "hash-sha2") {
const SHA2: &str = if cfg!(feature = "hash-sha2") {
"SHA512, SHA256, "
} else {
""
};
const SHA1: &'static str = if cfg!(feature = "hash-sha1") {
const SHA1: &str = if cfg!(feature = "hash-sha1") {
"SHA1, "
} else {
""
};
const XXH: &'static str = if cfg!(feature = "hash-xxh") {
const XXH: &str = if cfg!(feature = "hash-xxh") {
"XXH64, XXH32, "
} else {
""
};
const NULL: &'static str = "NULL";
const NULL: &str = "NULL";

concatcp!(SHA2, SHA1, XXH, NULL)
}
Expand Down Expand Up @@ -345,14 +345,14 @@ impl FromStr for GeneralHash {
/// * If the hash data is not valid (wrong length or non-hex string).
fn from_str(hex: &str) -> Result<Self, Self::Err> {
let mut iter = hex.split(':');
let hash_type = GeneralHashType::from_str(iter.next().ok_or_else(|| "No hash type")?)
let hash_type = GeneralHashType::from_str(iter.next().ok_or("No hash type")?)
.map_err(|_| "Failed to parse hash type")?;

#[cfg(any(feature = "hash-sha2", feature = "hash-sha1", feature = "hash-xxh"))]
let data = match hash_type {
GeneralHashType::NULL => Vec::new(),
_ => {
let data = iter.next().ok_or_else(|| "No hash data")?;
let data = iter.next().ok_or("No hash data")?;
utils::decode_hex(data).map_err(|_| "Failed to decode hash data")?
}
};
Expand Down
4 changes: 2 additions & 2 deletions src/data/path.rs
Original file line number Diff line number Diff line change
Expand Up @@ -152,7 +152,7 @@ impl FilePath {
}
}

return result;
result
}

/// Gets the parent of the file path.
Expand Down Expand Up @@ -239,7 +239,7 @@ impl std::fmt::Display for FilePath {
result.push_str("| ");
}

result.push_str(component.path.to_str().unwrap_or_else(|| "<invalid path>"));
result.push_str(component.path.to_str().unwrap_or("<invalid path>"));
}

write!(f, "{}", result)
Expand Down
9 changes: 3 additions & 6 deletions src/pool.rs
Original file line number Diff line number Diff line change
Expand Up @@ -165,7 +165,7 @@ impl<Job: Send + JobTrait + 'static, Result: Send + ResultTrait + 'static> Threa
mut args: Vec<Argument>,
func: WorkerEntry<Job, Result, Argument>,
) -> ThreadPool<Job, Result> {
assert!(args.len() > 0);
assert!(!args.is_empty());

let mut workers = Vec::with_capacity(args.len());

Expand Down Expand Up @@ -217,11 +217,8 @@ impl<Job: Send + JobTrait + 'static, Result: Send + ResultTrait + 'static> Threa
None => {
error!("ThreadPool is shutting down. Cannot publish job.");
}
Some(job_publish) => match job_publish.send(job) {
Err(e) => {
error!("Failed to publish job on thread pool. {}", e);
}
Ok(_) => {}
Some(job_publish) => if let Err(e) = job_publish.send(job) {
error!("Failed to publish job on thread pool. {}", e);
},
},
}
Expand Down
Loading

0 comments on commit 1ef03d4

Please sign in to comment.