diff --git a/src/data/archive.rs b/src/data/archive.rs index 2675615..7ae8c82 100644 --- a/src/data/archive.rs +++ b/src/data/archive.rs @@ -30,7 +30,7 @@ impl ArchiveType { /// # Errors /// If the archive could not be opened. pub fn open(&self, stream: R) -> Result> { - GeneralArchive::new(self.clone(), stream) + GeneralArchive::new(*self, stream) } /// Get the archive type from the file extension. @@ -89,7 +89,7 @@ impl ArchiveType { while stream.limit() > 0 { let num_read = stream.read(&mut buffer[num_read_sum..])?; num_read_sum += num_read; - if num_read <= 0 { + if num_read == 0 { break; } } @@ -109,7 +109,7 @@ impl ArchiveType { #[cfg(feature = "archive-tar")] { if num_read_sum >= 257 + 8 - && buffer[257 + 0] == 0x75 + && buffer[257] == 0x75 && buffer[257 + 1] == 0x73 && buffer[257 + 2] == 0x74 && buffer[257 + 3] == 0x61 @@ -122,7 +122,7 @@ impl ArchiveType { } if num_read_sum >= 257 + 8 - && buffer[257 + 0] == 0x75 + && buffer[257] == 0x75 && buffer[257 + 1] == 0x73 && buffer[257 + 2] == 0x74 && buffer[257 + 3] == 0x61 @@ -148,7 +148,7 @@ impl ArchiveType { } } - return Ok(None); + Ok(None) } } @@ -169,7 +169,7 @@ impl GeneralArchive { #[cfg(feature = "archive-tar")] ArchiveType::Tar => Self::Tar(tar::TarArchive::new(stream)?), #[cfg(feature = "archive-zip")] - ArchiveType::Zip => Self::Zip(zip::ZipArchive::new(stream.into())?), + ArchiveType::Zip => Self::Zip(zip::ZipArchive::new(stream)?), }) } @@ -246,7 +246,7 @@ impl<'a, R: Read> ArchiveEntry<'a, R> { /// Get the modified time of the archive entry. /// The time is in seconds since the Unix epoch. - /// + /// /// # Returns /// The modified time of the archive entry. /// Might be 0 if the time could not be determined. diff --git a/src/data/archive/tar.rs b/src/data/archive/tar.rs index 1eb96e9..9c8a5de 100644 --- a/src/data/archive/tar.rs +++ b/src/data/archive/tar.rs @@ -9,13 +9,13 @@ pub struct TarArchive { impl<'a, R: Read> TarArchive { /// Create a new Tar archive from a reader. - /// + /// /// # Arguments /// * `input` - The input reader. - /// + /// /// # Returns /// The Tar archive. - /// + /// /// # Errors /// If the archive is invalid or cannot be read. pub fn new(input: R) -> Result> { @@ -25,10 +25,10 @@ impl<'a, R: Read> TarArchive { } /// Get the entries of the Tar archive. - /// + /// /// # Returns /// An iterator over the entries of the Tar archive. - /// + /// /// # Errors /// If the entries cannot be read. pub fn entries(&'a mut self) -> Result> { @@ -43,10 +43,10 @@ pub struct TarArchiveIterator<'a, R: 'a + Read> { impl<'a, R: Read> TarArchiveIterator<'a, R> { /// Create a new Tar archive iterator from a reader. - /// + /// /// # Arguments /// * `entries` - The entries. - /// + /// /// # Returns /// The Tar archive iterator. pub fn new(entries: tar::Entries<'a, R>) -> TarArchiveIterator<'a, R> { diff --git a/src/data/archive/zip.rs b/src/data/archive/zip.rs index 896a8a6..29e37d8 100644 --- a/src/data/archive/zip.rs +++ b/src/data/archive/zip.rs @@ -12,13 +12,13 @@ pub struct ZipArchive { impl ZipArchive { /// Create a new Zip archive from a reader. /// Does not perform any checking on the input. The input is assumed to be a valid Zip archive. - /// + /// /// # Arguments /// * `input` - The input reader. - /// + /// /// # Returns /// The Zip archive. - /// + /// /// # Errors /// Never. pub fn new(input: R) -> Result> { @@ -28,10 +28,10 @@ impl ZipArchive { } /// Get the entries of the Zip archive. - /// + /// /// # Returns /// The entries of the Zip archive. - /// + /// /// # Errors /// Never pub fn entries(&mut self) -> Result> { @@ -46,10 +46,10 @@ pub struct ZipArchiveIterator<'a, R: Read> { impl<'a, R: Read> ZipArchiveIterator<'a, R> { /// Create a new Zip archive iterator from a reader. - /// + /// /// # Arguments /// * `reader` - The reader. - /// + /// /// # Returns /// The Zip archive iterator. pub fn new(reader: &'a mut Pin>) -> ZipArchiveIterator<'a, R> { diff --git a/src/data/compression.rs b/src/data/compression.rs index 2061e8b..a0fa4df 100644 --- a/src/data/compression.rs +++ b/src/data/compression.rs @@ -125,7 +125,7 @@ impl CompressionType { while stream.limit() > 0 { let num_read = stream.read(&mut buffer[num_read_sum..])?; num_read_sum += num_read; - if num_read <= 0 { + if num_read == 0 { break; } } diff --git a/src/data/copy_stream.rs b/src/data/copy_stream.rs index 635485d..465587f 100644 --- a/src/data/copy_stream.rs +++ b/src/data/copy_stream.rs @@ -155,8 +155,8 @@ impl BufferCopyStreamReader { let read_result = reader.read(read_buffer)?; buffer.reserve(read_result); - for i in 0..read_result { - buffer.push(read_buffer[i]); + for value in read_buffer.iter().take(read_result) { + buffer.push(*value); } Ok(read_result) @@ -174,14 +174,14 @@ impl BufferCopyStreamReader { /// Buffer the given amount of bytes from the underlying reader in chunks. /// This method will allocate a buffer with the given size once. Then reuse this buffer /// for reading the data from the underlying reader. - /// + /// /// # Arguments /// * `length` - The amount of bytes to buffer. /// * `chunk_size` - The amount of bytes to request from the underlying reader in each iteration. - /// + /// /// # Returns /// The amount of bytes read. - /// + /// /// # Errors /// If the underlying reader could not be read. pub fn buffer_bytes_chunked(&self, length: usize, chunk_size: usize) -> std::io::Result { @@ -207,13 +207,13 @@ impl BufferCopyStreamReader { /// Buffer the given amount of bytes from the underlying reader in chunks. /// Uses a default chunk size of 4096 bytes. - /// + /// /// # Arguments /// * `length` - The amount of bytes to buffer. - /// + /// /// # Returns /// The amount of bytes read. - /// + /// /// # Errors /// If the underlying reader could not be read. pub fn buffer_bytes_chunked_default(&self, length: usize) -> std::io::Result { @@ -291,7 +291,7 @@ impl Seek for BufferCopyStreamReader { )); } - if (-requested_position as u64) > (buffer_length as u64) { + if (-requested_position as u64) > buffer_length { return Err(std::io::Error::new( ErrorKind::Other, "can not seek beyond zero", @@ -391,7 +391,7 @@ impl BufferFirstContinueReader { /// # Returns /// True if the buffer is empty. pub fn buffer_empty(&self) -> bool { - self.buffer.len() <= 0 || self.index >= self.buffer.len() + self.buffer.is_empty() || self.index >= self.buffer.len() } /// Try to get the original reader back from the [BufferFirstContinueReader]. diff --git a/src/data/hash.rs b/src/data/hash.rs index 8856a69..68d5989 100644 --- a/src/data/hash.rs +++ b/src/data/hash.rs @@ -116,22 +116,22 @@ impl GeneralHashType { /// println!("Supported algorithms: {}", supported); /// ``` pub const fn supported_algorithms() -> &'static str { - const SHA2: &'static str = if cfg!(feature = "hash-sha2") { + const SHA2: &str = if cfg!(feature = "hash-sha2") { "SHA512, SHA256, " } else { "" }; - const SHA1: &'static str = if cfg!(feature = "hash-sha1") { + const SHA1: &str = if cfg!(feature = "hash-sha1") { "SHA1, " } else { "" }; - const XXH: &'static str = if cfg!(feature = "hash-xxh") { + const XXH: &str = if cfg!(feature = "hash-xxh") { "XXH64, XXH32, " } else { "" }; - const NULL: &'static str = "NULL"; + const NULL: &str = "NULL"; concatcp!(SHA2, SHA1, XXH, NULL) } @@ -345,14 +345,14 @@ impl FromStr for GeneralHash { /// * If the hash data is not valid (wrong length or non-hex string). fn from_str(hex: &str) -> Result { let mut iter = hex.split(':'); - let hash_type = GeneralHashType::from_str(iter.next().ok_or_else(|| "No hash type")?) + let hash_type = GeneralHashType::from_str(iter.next().ok_or("No hash type")?) .map_err(|_| "Failed to parse hash type")?; #[cfg(any(feature = "hash-sha2", feature = "hash-sha1", feature = "hash-xxh"))] let data = match hash_type { GeneralHashType::NULL => Vec::new(), _ => { - let data = iter.next().ok_or_else(|| "No hash data")?; + let data = iter.next().ok_or("No hash data")?; utils::decode_hex(data).map_err(|_| "Failed to decode hash data")? } }; diff --git a/src/data/path.rs b/src/data/path.rs index 00f7b3d..fbd23d0 100644 --- a/src/data/path.rs +++ b/src/data/path.rs @@ -152,7 +152,7 @@ impl FilePath { } } - return result; + result } /// Gets the parent of the file path. @@ -239,7 +239,7 @@ impl std::fmt::Display for FilePath { result.push_str("| "); } - result.push_str(component.path.to_str().unwrap_or_else(|| "")); + result.push_str(component.path.to_str().unwrap_or("")); } write!(f, "{}", result) diff --git a/src/pool.rs b/src/pool.rs index affd6ed..f82ee46 100644 --- a/src/pool.rs +++ b/src/pool.rs @@ -165,7 +165,7 @@ impl Threa mut args: Vec, func: WorkerEntry, ) -> ThreadPool { - assert!(args.len() > 0); + assert!(!args.is_empty()); let mut workers = Vec::with_capacity(args.len()); @@ -217,11 +217,8 @@ impl Threa None => { error!("ThreadPool is shutting down. Cannot publish job."); } - Some(job_publish) => match job_publish.send(job) { - Err(e) => { - error!("Failed to publish job on thread pool. {}", e); - } - Ok(_) => {} + Some(job_publish) => if let Err(e) = job_publish.send(job) { + error!("Failed to publish job on thread pool. {}", e); }, }, } diff --git a/src/stages/analyze/cmd.rs b/src/stages/analyze/cmd.rs index ad50c9b..1070563 100644 --- a/src/stages/analyze/cmd.rs +++ b/src/stages/analyze/cmd.rs @@ -72,7 +72,7 @@ pub fn run(analysis_settings: AnalysisSettings) -> Result<()> { }; let mut input_buf_reader = std::io::BufReader::new(&input_file); - let mut null_out_writer = NullWriter::new(); + let mut null_out_writer = NullWriter::default(); let mut output_buf_writer = std::io::BufWriter::new(&output_file); let mut save_file = HashTreeFile::new( @@ -118,7 +118,7 @@ pub fn run(analysis_settings: AnalysisSettings) -> Result<()> { // create thread pool - let mut args = Vec::with_capacity(analysis_settings.threads.unwrap_or_else(|| num_cpus::get())); + let mut args = Vec::with_capacity(analysis_settings.threads.unwrap_or_else(num_cpus::get)); for _ in 0..args.capacity() { args.push(AnalysisWorkerArgument { file_by_path: Arc::clone(&file_by_path), @@ -131,15 +131,8 @@ pub fn run(analysis_settings: AnalysisSettings) -> Result<()> { pool.publish(AnalysisJob::new(Arc::clone(entry))); } - loop { - match pool.receive_timeout(Duration::from_secs(10)) { - Ok(result) => { - info!("Result: {:?}", result); - } - Err(_) => { - break; - } - } + while let Ok(result) = pool.receive_timeout(Duration::from_secs(10)) { + info!("Result: {:?}", result); } drop(pool); @@ -157,35 +150,20 @@ pub fn run(analysis_settings: AnalysisSettings) -> Result<()> { // check if parent is also conflicting let parent = parent.upgrade().unwrap(); - let parent_hash; - match parent.deref() { - AnalysisFile::File(info) => { - parent_hash = Some(&info.content_hash); - } - AnalysisFile::Directory(info) => { - parent_hash = Some(&info.content_hash); - } - AnalysisFile::Symlink(info) => { - parent_hash = Some(&info.content_hash); - } - AnalysisFile::Other(_) => { - parent_hash = None; - } - } - - let parent_conflicting; - - match parent_hash { - None => { - parent_conflicting = false; - } - Some(parent_hash) => { - parent_conflicting = match file_by_hash.get(parent_hash) { - Some(entries) => entries.len() >= 2, - None => false, - } - } - } + let parent_hash = match parent.deref() { + AnalysisFile::File(info) => Some(&info.content_hash), + AnalysisFile::Directory(info) => Some(&info.content_hash), + AnalysisFile::Symlink(info) => Some(&info.content_hash), + AnalysisFile::Other(_) => None, + }; + + let parent_conflicting = match parent_hash { + None => false, + Some(parent_hash) => match file_by_hash.get(parent_hash) { + Some(entries) => entries.len() >= 2, + None => false, + }, + }; if !parent_conflicting { duplicated_bytes += @@ -244,7 +222,7 @@ fn write_result_entry( ftype: &file.file_type, children: &file.children, }) - .or_insert(Vec::new()) + .or_default() .push(file); } @@ -266,20 +244,20 @@ fn write_result_entry( } let result = DupSetEntryRef { - ftype: &set.0.ftype, + ftype: set.0.ftype, size: set.0.size, hash, conflicting, }; - output_buf_writer + let _ = output_buf_writer .write(serde_json::to_string(&result).unwrap().as_bytes()) .expect("Unable to write to file"); - output_buf_writer + let _ = output_buf_writer .write('\n'.to_string().as_bytes()) .expect("Unable to write to file"); result_size += result.size * (result.conflicting.len() as u64 - 1); } - return result_size; + result_size } diff --git a/src/stages/analyze/worker.rs b/src/stages/analyze/worker.rs index a2c4805..fa2181d 100644 --- a/src/stages/analyze/worker.rs +++ b/src/stages/analyze/worker.rs @@ -73,7 +73,7 @@ static JOB_COUNTER: Mutex = Mutex::new(0); fn new_job_counter_id() -> usize { let mut counter = JOB_COUNTER.lock().expect("Failed to lock job counter"); *counter += 1; - (*counter).clone() + *counter } /// The result for the analysis worker. @@ -91,18 +91,15 @@ impl ResultTrait for AnalysisResult {} /// # Returns /// The parent file and the parent path. /// If the parent file is not present, return None. -fn parent_file<'a, 'b>( - file: &'b AnalysisIntermediaryFile, +fn parent_file<'a>( + file: &AnalysisIntermediaryFile, arg: &'a AnalysisWorkerArgument, ) -> Option<(&'a Arc>>>, FilePath)> { match file.saved_file_entry.path.parent() { None => None, Some(parent_path) => { let cache = arg.file_by_path.get(&parent_path).map(|file| &file.file); - match cache { - None => None, - Some(cache) => Some((cache, parent_path)), - } + cache.map(|cache| (cache, parent_path)) } } } @@ -166,30 +163,21 @@ fn recursive_process_file(id: usize, path: &FilePath, arg: &AnalysisWorkerArgume if let Some((result, parent, parent_path)) = attach_parent { match add_to_parent_as_child(id, parent, &result) { - AddToParentResult::Ok => { - return; - } + AddToParentResult::Ok => {} AddToParentResult::ParentDoesNotExist => { // parent does not exist // create it recursive_process_file(id, &parent_path, arg); // try to read to parent again match add_to_parent_as_child(id, parent, &result) { - AddToParentResult::Ok => { - return; - } + AddToParentResult::Ok => {} AddToParentResult::ParentDoesNotExist => { error!("[{}] Parent still does not exist", id); - return; - } - AddToParentResult::Error => { - return; } + AddToParentResult::Error => {} } } - AddToParentResult::Error => { - return; - } + AddToParentResult::Error => {} } } } diff --git a/src/stages/build/cmd/cmd.rs b/src/stages/build/cmd/cmd.rs index 449bc26..7ba4d27 100644 --- a/src/stages/build/cmd/cmd.rs +++ b/src/stages/build/cmd/cmd.rs @@ -117,7 +117,7 @@ pub fn run(build_settings: BuildSettings) -> Result<()> { // create thread pool - let mut args = Vec::with_capacity(build_settings.threads.unwrap_or_else(|| num_cpus::get())); + let mut args = Vec::with_capacity(build_settings.threads.unwrap_or_else(num_cpus::get)); for _ in 0..args.capacity() { args.push(WorkerArgument { archives: build_settings.into_archives, @@ -157,5 +157,5 @@ pub fn run(build_settings: BuildSettings) -> Result<()> { } } - return Ok(()); + Ok(()) } diff --git a/src/stages/build/cmd/job.rs b/src/stages/build/cmd/job.rs index 34a4a0f..839ab93 100644 --- a/src/stages/build/cmd/job.rs +++ b/src/stages/build/cmd/job.rs @@ -12,7 +12,7 @@ static JOB_COUNTER: Mutex = Mutex::new(0); fn new_job_counter_id() -> usize { let mut counter = JOB_COUNTER.lock().expect("Failed to lock job counter"); *counter += 1; - (*counter).clone() + *counter } /// The state of a build job. Used to track the state of a directory process job. diff --git a/src/stages/build/cmd/worker.rs b/src/stages/build/cmd/worker.rs index b4f6f17..8c8e7dd 100644 --- a/src/stages/build/cmd/worker.rs +++ b/src/stages/build/cmd/worker.rs @@ -112,17 +112,15 @@ pub fn worker_run( Ok(0) }); - let modified; - - let size = metadata.len(); - - match modified_result { - Ok(time) => modified = time, + let modified = match modified_result { + Ok(time) => time, Err(err) => { error!("Error while processing file {:?}: {}", path, err); - modified = 0; + 0 } - } + }; + + let size = metadata.len(); if metadata.is_symlink() { worker_run_symlink( @@ -306,9 +304,9 @@ fn worker_publish_result_or_trigger_parent( /// /// # Returns /// The saved data for the file if it exists. -fn worker_fetch_savedata<'a, 'b>( +fn worker_fetch_savedata<'a>( args: &'a WorkerArgument, - path: &'b FilePath, + path: &FilePath, ) -> Option<&'a HashTreeFileEntry> { args.save_file_by_path.get(path) } diff --git a/src/stages/build/cmd/worker/archive.rs b/src/stages/build/cmd/worker/archive.rs index 829ddab..5b8bdf1 100644 --- a/src/stages/build/cmd/worker/archive.rs +++ b/src/stages/build/cmd/worker/archive.rs @@ -51,7 +51,7 @@ struct Context { } #[allow(dead_code)] -fn worker_run_entry<'a, R: Read>(entry: ArchiveEntry<'a, R>, context: &mut Context) -> BuildFile { +fn worker_run_entry(entry: ArchiveEntry, context: &mut Context) -> BuildFile { let path = match entry.path() { Ok(path) => path, Err(err) => { diff --git a/src/stages/build/cmd/worker/directory.rs b/src/stages/build/cmd/worker/directory.rs index e2af9e0..2d7a4b1 100644 --- a/src/stages/build/cmd/worker/directory.rs +++ b/src/stages/build/cmd/worker/directory.rs @@ -117,37 +117,32 @@ pub fn worker_run_directory( error = false; // query cache - match worker_fetch_savedata(arg, &job.target_path) { - Some(found) => { - if found.file_type == HashTreeFileEntryType::Directory - && found.modified == modified - && found.size == finished.len() as u64 - { - if found.children.len() == finished.len() - && found - .children - .iter() - .zip(finished.iter().map(|e| e.get_content_hash())) - .all(|(a, b)| a == b) - { - trace!("Directory {:?} is already in save file", path); - - let mut children = Vec::new(); - children.append(finished.deref_mut()); - - let file = BuildFile::Directory(BuildDirectoryInformation { - path: job.target_path.clone(), - modified, - content_hash: found.hash.clone(), - number_of_children: children.len() as u64, - children, - }); - - cached_entry = Some(file); - } - } + if let Some(found) = worker_fetch_savedata(arg, &job.target_path) { + if found.file_type == HashTreeFileEntryType::Directory + && found.modified == modified + && found.size == finished.len() as u64 + && found.children.len() == finished.len() + && found + .children + .iter() + .zip(finished.iter().map(|e| e.get_content_hash())) + .all(|(a, b)| a == b) + { + trace!("Directory {:?} is already in save file", path); + + let mut children = Vec::new(); + children.append(finished.deref_mut()); + + let file = BuildFile::Directory(BuildDirectoryInformation { + path: job.target_path.clone(), + modified, + content_hash: found.hash.clone(), + number_of_children: children.len() as u64, + children, + }); + + cached_entry = Some(file); } - None => {} } if cached_entry.is_none() { diff --git a/src/stages/build/cmd/worker/file.rs b/src/stages/build/cmd/worker/file.rs index 05361cc..cc9b5ad 100644 --- a/src/stages/build/cmd/worker/file.rs +++ b/src/stages/build/cmd/worker/file.rs @@ -44,31 +44,28 @@ pub fn worker_run_file( ) { trace!("[{}] analyzing file {} > {:?}", id, &job.target_path, path); - match worker_fetch_savedata(arg, &job.target_path) { - Some(found) => { - if found.file_type == HashTreeFileEntryType::File - && found.modified == modified - && found.size == size - { - trace!("File {:?} is already in save file", path); - worker_publish_result_or_trigger_parent( - id, - true, - BuildFile::File(BuildFileInformation { - path: job.target_path.clone(), - modified, - content_hash: found.hash.clone(), - content_size: size, - }), - job, - result_publish, - job_publish, - arg, - ); - return; - } + if let Some(found) = worker_fetch_savedata(arg, &job.target_path) { + if found.file_type == HashTreeFileEntryType::File + && found.modified == modified + && found.size == size + { + trace!("File {:?} is already in save file", path); + worker_publish_result_or_trigger_parent( + id, + true, + BuildFile::File(BuildFileInformation { + path: job.target_path.clone(), + modified, + content_hash: found.hash.clone(), + content_size: size, + }), + job, + result_publish, + job_publish, + arg, + ); + return; } - None => {} } let mut hasher = match File::open(&path) { @@ -126,7 +123,7 @@ pub fn worker_run_file( Ok(None) => Ok(None), Ok(Some((compression_type, archive_type))) => { let stream = compression_type.open(&mut stream); - worker_run_archive(stream, &path, archive_type, id, arg).map(|result| Some(result)) + worker_run_archive(stream, &path, archive_type, id, arg).map(Some) } } }; @@ -148,10 +145,8 @@ pub fn worker_run_file( Ok(content) => content, }; - let content_size; - // finalize hashing - if arg.hash_type != GeneralHashType::NULL { + let content_size = if arg.hash_type != GeneralHashType::NULL { match std::io::copy(&mut stream, &mut std::io::sink()) { Ok(_) => {} Err(err) => { @@ -169,12 +164,12 @@ pub fn worker_run_file( } } drop(stream); - content_size = hasher.bytes_processed(); + hasher.bytes_processed() } else { - content_size = fs::metadata(&path) + fs::metadata(&path) .map(|metadata| metadata.len()) - .unwrap_or(0); - } + .unwrap_or(0) + }; let hash = hasher.hash(); @@ -194,7 +189,6 @@ pub fn worker_run_file( }), }; worker_publish_result_or_trigger_parent(id, false, file, job, result_publish, job_publish, arg); - return; } /// Check if the file is an archive (potentially compressed). diff --git a/src/stages/build/cmd/worker/other.rs b/src/stages/build/cmd/worker/other.rs index 04545dd..4f50014 100644 --- a/src/stages/build/cmd/worker/other.rs +++ b/src/stages/build/cmd/worker/other.rs @@ -31,30 +31,27 @@ pub fn worker_run_other( ) { trace!("[{}] analyzing other {} > {:?}", id, &job.target_path, path); - match worker_fetch_savedata(arg, &job.target_path) { - Some(found) => { - if found.file_type == HashTreeFileEntryType::Other - && found.modified == modified - && found.size == size - { - trace!("Other {:?} is already in save file", path); - worker_publish_result_or_trigger_parent( - id, - true, - BuildFile::Other(BuildOtherInformation { - path: job.target_path.clone(), - content_size: size, - modified, - }), - job, - result_publish, - job_publish, - arg, - ); - return; - } + if let Some(found) = worker_fetch_savedata(arg, &job.target_path) { + if found.file_type == HashTreeFileEntryType::Other + && found.modified == modified + && found.size == size + { + trace!("Other {:?} is already in save file", path); + worker_publish_result_or_trigger_parent( + id, + true, + BuildFile::Other(BuildOtherInformation { + path: job.target_path.clone(), + content_size: size, + modified, + }), + job, + result_publish, + job_publish, + arg, + ); + return; } - None => {} } let file = BuildFile::Other(BuildOtherInformation { diff --git a/src/stages/build/cmd/worker/symlink.rs b/src/stages/build/cmd/worker/symlink.rs index 2ea7077..3080088 100644 --- a/src/stages/build/cmd/worker/symlink.rs +++ b/src/stages/build/cmd/worker/symlink.rs @@ -40,49 +40,46 @@ pub fn worker_run_symlink( path ); - match worker_fetch_savedata(arg, &job.target_path) { - Some(found) => { - if found.file_type == HashTreeFileEntryType::Symlink - && found.modified == modified - && found.size == size - { - trace!("Symlink {:?} is already in save file", path); - let target_link = fs::read_link(&path); - let target_link = match target_link { - Ok(target_link) => target_link, - Err(err) => { - error!("Error while reading symlink {:?}: {}", path, err); - worker_publish_result_or_trigger_parent( - id, - false, - worker_create_error(job.target_path.clone(), modified, size), - job, - result_publish, - job_publish, - arg, - ); - return; - } - }; - worker_publish_result_or_trigger_parent( - id, - true, - BuildFile::Symlink(BuildSymlinkInformation { - path: job.target_path.clone(), - modified, - content_hash: found.hash.clone(), - target: target_link, - content_size: size, - }), - job, - result_publish, - job_publish, - arg, - ); - return; - } + if let Some(found) = worker_fetch_savedata(arg, &job.target_path) { + if found.file_type == HashTreeFileEntryType::Symlink + && found.modified == modified + && found.size == size + { + trace!("Symlink {:?} is already in save file", path); + let target_link = fs::read_link(&path); + let target_link = match target_link { + Ok(target_link) => target_link, + Err(err) => { + error!("Error while reading symlink {:?}: {}", path, err); + worker_publish_result_or_trigger_parent( + id, + false, + worker_create_error(job.target_path.clone(), modified, size), + job, + result_publish, + job_publish, + arg, + ); + return; + } + }; + worker_publish_result_or_trigger_parent( + id, + true, + BuildFile::Symlink(BuildSymlinkInformation { + path: job.target_path.clone(), + modified, + content_hash: found.hash.clone(), + target: target_link, + content_size: size, + }), + job, + result_publish, + job_publish, + arg, + ); + return; } - None => {} } let target_link = fs::read_link(&path); diff --git a/src/stages/build/intermediary_build_data.rs b/src/stages/build/intermediary_build_data.rs index 0a90a6f..f9a9fb9 100644 --- a/src/stages/build/intermediary_build_data.rs +++ b/src/stages/build/intermediary_build_data.rs @@ -140,7 +140,7 @@ pub enum BuildFile { /// A file that is not a regular file, directory, or symlink, or a file for which permissions are missing. Other(BuildOtherInformation), // for unsupported file types like block devices, character devices, etc., or files without permission /// A file that is not kept in memory but already saved to disk in the hashtree file. - Stub(BuildStubInformation), // for files that are already analyzed + Stub(BuildStubInformation), // for files that are already analyzed } // ---- IMPLEMENTATION ---- @@ -181,10 +181,7 @@ impl BuildFile { /// # Returns /// True if this is a directory, false otherwise. pub fn is_directory(&self) -> bool { - match self { - BuildFile::Directory(_) => true, - _ => false, - } + matches!(self, BuildFile::Directory(_)) } /// Returns true if this is a symlink @@ -192,10 +189,7 @@ impl BuildFile { /// # Returns /// True if this is a symlink, false otherwise. pub fn is_symlink(&self) -> bool { - match self { - BuildFile::Symlink(_) => true, - _ => false, - } + matches!(self, BuildFile::Symlink(_)) } /// Returns true if this is a file @@ -203,11 +197,7 @@ impl BuildFile { /// # Returns /// True if this is a file, false otherwise. pub fn is_file(&self) -> bool { - match self { - BuildFile::File(_) => true, - BuildFile::ArchiveFile(_) => true, - _ => false, - } + matches!(self, BuildFile::File(_) | BuildFile::ArchiveFile(_)) } /// Returns true if this is an archive file @@ -215,10 +205,7 @@ impl BuildFile { /// # Returns /// True if this is an archive file, false otherwise. pub fn is_archive(&self) -> bool { - match self { - BuildFile::ArchiveFile(_) => true, - _ => false, - } + matches!(self, BuildFile::ArchiveFile(_)) } /// Returns true if this is an "other" file @@ -226,10 +213,7 @@ impl BuildFile { /// # Returns /// True if this is an "other" file, false otherwise. pub fn is_other(&self) -> bool { - match self { - BuildFile::Other(_) => true, - _ => false, - } + matches!(self, BuildFile::Other(_)) } /// Returns true if this is a stub file @@ -237,9 +221,6 @@ impl BuildFile { /// # Returns /// True if this is a stub file, false otherwise. pub fn is_stub(&self) -> bool { - match self { - BuildFile::Stub(_) => true, - _ => false, - } + matches!(self, BuildFile::Stub(_)) } } diff --git a/src/stages/build/output/hashtreefile.rs b/src/stages/build/output/hashtreefile.rs index a8fd7b8..b734126 100644 --- a/src/stages/build/output/hashtreefile.rs +++ b/src/stages/build/output/hashtreefile.rs @@ -8,11 +8,11 @@ use anyhow::Result; use log::{info, trace, warn}; use serde::{Deserialize, Serialize}; -pub use HashTreeFileEntryTypeV1 as HashTreeFileEntryType; -pub use HashTreeFileEntryV1 as HashTreeFileEntry; use crate::hash::{GeneralHash, GeneralHashType}; use crate::path::FilePath; use crate::utils; +pub use HashTreeFileEntryTypeV1 as HashTreeFileEntryType; +pub use HashTreeFileEntryV1 as HashTreeFileEntry; /// The current version of the hash tree file. pub type HashTreeFileEntryRef<'a> = HashTreeFileEntryV1Ref<'a>; @@ -283,7 +283,7 @@ impl<'a, W: Write, R: BufRead> HashTreeFile<'a, W, R> { if self.enable_file_by_hash { self.file_by_hash .entry(shared_entry.hash.clone()) - .or_insert_with(Vec::new) + .or_default() .push(Arc::clone(&shared_entry)); } @@ -320,7 +320,7 @@ impl<'a, W: Write, R: BufRead> HashTreeFile<'a, W, R> { /// # Error /// If reading from the file errors pub fn load_all_entries bool>(&mut self, filter: F) -> Result<()> { - while let Some(_) = self.load_entry(&filter)? {} + while (self.load_entry(&filter)?).is_some() {} Ok(()) } diff --git a/src/utils.rs b/src/utils.rs index 6b96159..e36fac6 100644 --- a/src/utils.rs +++ b/src/utils.rs @@ -50,7 +50,7 @@ impl LexicalAbsolute for PathBuf { std::path::Component::ParentDir => { absolute.pop(); } - component @ _ => absolute.push(component.as_os_str()), + component => absolute.push(component.as_os_str()), } } Ok(absolute) @@ -101,18 +101,9 @@ pub fn get_time() -> u64 { /// let mut writer = backup_deduplicator::utils::NullWriter::new(); /// writer.write(b"Hello, world!").unwrap(); /// ``` +#[derive(Default)] pub struct NullWriter {} -impl NullWriter { - /// Create a new NullWriter. - /// - /// # Returns - /// A new NullWriter. - pub fn new() -> Self { - NullWriter {} - } -} - impl Write for NullWriter { /// Discard all data. /// @@ -147,18 +138,9 @@ impl Write for NullWriter { /// let mut buf = [0; 10]; /// assert_eq!(reader.read(&mut buf).unwrap(), 0); /// ``` +#[derive(Default)] pub struct NullReader {} -impl NullReader { - /// Create a new NullReader. - /// - /// # Returns - /// A new NullReader. - pub fn new() -> Self { - NullReader {} - } -} - impl std::io::Read for NullReader { /// Does not provide any data. /// @@ -186,13 +168,13 @@ impl std::io::Seek for NullReader { /// Container that calls a function when value is dropped. #[deprecated] -pub struct DestroyContainer ()> { +pub struct DestroyContainer { inner: T, destroy_func: Option, } #[allow(deprecated)] -impl ()> DestroyContainer { +impl DestroyContainer { /// Create a new [DestroyContainer]. The destroy function is called when `this` /// instance is dropped. /// @@ -211,14 +193,14 @@ impl ()> DestroyContainer { } #[allow(deprecated)] -impl ()> Drop for DestroyContainer { +impl Drop for DestroyContainer { fn drop(&mut self) { - self.destroy_func.take().map(|f| f()); + if let Some(f) = self.destroy_func.take() { f() } } } #[allow(deprecated)] -impl ()> Deref for DestroyContainer { +impl Deref for DestroyContainer { type Target = T; fn deref(&self) -> &Self::Target { @@ -227,7 +209,7 @@ impl ()> Deref for DestroyContainer { } #[allow(deprecated)] -impl ()> Read for DestroyContainer { +impl Read for DestroyContainer { fn read(&mut self, buf: &mut [u8]) -> std::io::Result { self.inner.read(buf) } @@ -314,13 +296,11 @@ pub mod main { let path = path.to_path_buf(); - let path = match kind { + match kind { ParsePathKind::Direct => path, ParsePathKind::AbsoluteExisting => to_lexical_absolute(path, true), ParsePathKind::AbsoluteNonExisting => to_lexical_absolute(path, false), - }; - - path + } } /// Convert a path to a absolute path. @@ -340,14 +320,12 @@ pub mod main { false => path.to_lexical_absolute(), }; - let path = match path { + match path { Ok(out) => out, Err(e) => { eprintln!("IO error, could not resolve output file: {:?}", e); std::process::exit(exitcode::CONFIG); } - }; - - path + } } }