diff options
author | Jon Moroney <darakian@gmail.com> | 2020-10-09 14:30:40 -0700 |
---|---|---|
committer | Jon Moroney <darakian@gmail.com> | 2020-10-09 14:30:40 -0700 |
commit | 4c1a7a31181de654842db6071fe5c375b865b6af (patch) | |
tree | eb8ab8c4c2cca895b78cf6baaabc248890f3bbc1 | |
parent | c9abf2bca4f1944e8e7c2699537b4187ec1acdee (diff) |
Resolve issues raised by clippyclippy-cleanup
-rw-r--r-- | src/fileinfo.rs | 22 | ||||
-rw-r--r-- | src/lib.rs | 13 | ||||
-rw-r--r-- | src/main.rs | 14 |
3 files changed, 22 insertions, 27 deletions
diff --git a/src/fileinfo.rs b/src/fileinfo.rs index 3b24823..3ca67bb 100644 --- a/src/fileinfo.rs +++ b/src/fileinfo.rs @@ -42,8 +42,8 @@ impl Fileinfo{ /// Ok(()) /// } /// ``` - pub fn new(full_hash: Option<u128>, partial_hash: Option<u128>, meta: Metadata, path: PathBuf) -> Self{ - Fileinfo{full_hash: full_hash, partial_hash: partial_hash, metadata: meta, file_paths: vec![path]} + pub fn new(full: Option<u128>, partial: Option<u128>, meta: Metadata, path: PathBuf) -> Self{ + Fileinfo{full_hash: full, partial_hash: partial, metadata: meta, file_paths: vec![path]} } /// Gets the length of the files in the current collection. /// @@ -81,7 +81,7 @@ impl Fileinfo{ pub fn get_full_hash(&self) -> Option<u128>{ self.full_hash } - pub(crate) fn set_full_hash(&mut self, hash: Option<u128>) -> (){ + pub(crate) fn set_full_hash(&mut self, hash: Option<u128>) { self.full_hash = hash } /// Gets the hash of the partially read file if available. @@ -102,7 +102,7 @@ impl Fileinfo{ pub fn get_partial_hash(&self) -> Option<u128>{ self.partial_hash } - pub(crate) fn set_partial_hash(&mut self, hash: Option<u128>) -> (){ + pub(crate) fn set_partial_hash(&mut self, hash: Option<u128>) { self.partial_hash = hash } /// Gets a candidate name. This will be the name of the first file inserted into the collection and so can vary. @@ -122,12 +122,11 @@ impl Fileinfo{ /// ``` pub fn get_candidate_name(&self) -> &str{ self.file_paths - .iter() - .next() + .get(0) .unwrap() .to_str() .unwrap() - .rsplit("/") + .rsplit('/') .next() .unwrap() } @@ -148,15 +147,14 @@ impl Fileinfo{ /// } /// ``` pub fn get_paths(&self) -> &Vec<PathBuf>{ - return &self.file_paths + &self.file_paths } pub fn generate_hash(&mut self, mode: HashMode) -> Option<u128>{ let mut hasher = siphasher::sip128::SipHasher::new(); match fs::File::open( self.file_paths - .iter() - .next() + .get(0) .expect("Cannot read file path from struct") ) { Ok(mut f) => { @@ -178,10 +176,10 @@ impl Fileinfo{ return Some(hasher.finish128().into()); } } - return Some(hasher.finish128().into()); + Some(hasher.finish128().into()) } Err(_e) => { - return None + None } } } @@ -52,7 +52,7 @@ pub fn deduplicate_dirs<P: AsRef<Path> + Sync>(search_dirs: Vec<P>) -> (Vec<File (complete_files, errors) } -fn traverse_and_spawn(current_path: impl AsRef<Path>, sender: Sender<ChannelPackage>) -> (){ +fn traverse_and_spawn(current_path: impl AsRef<Path>, sender: Sender<ChannelPackage>) { let current_path_metadata = match fs::symlink_metadata(¤t_path) { Err(e) =>{ sender.send( @@ -78,7 +78,7 @@ fn traverse_and_spawn(current_path: impl AsRef<Path>, sender: Sender<ChannelPack None, None, meta, - current_path.to_path_buf() + current_path )) ).expect("Error sending new ChannelPackage::Success"); }, @@ -103,7 +103,7 @@ fn traverse_and_spawn(current_path: impl AsRef<Path>, sender: Sender<ChannelPack }, Err(e) => { sender.send( - ChannelPackage::Fail(current_path.to_path_buf(), e) + ChannelPackage::Fail(current_path, e) ).expect("Error sending new ChannelPackage::Fail"); }, } @@ -113,12 +113,9 @@ fn traverse_and_spawn(current_path: impl AsRef<Path>, sender: Sender<ChannelPack } fn differentiate_and_consolidate(file_length: u64, mut files: Vec<Fileinfo>) -> Vec<Fileinfo>{ - if file_length==0{ + if file_length==0 || files.is_empty(){ return files } - if files.len()<=0{ - panic!("Invalid length vector"); - } match files.len(){ 1 => return files, n if n>1 => { @@ -170,6 +167,6 @@ fn dedupe(mut files: Vec<Fileinfo>) -> Vec<Fileinfo>{ } } }); - files.retain(|x| x.get_paths().len()>0); + files.retain(|x| !x.get_paths().is_empty()); files } diff --git a/src/main.rs b/src/main.rs index 3fcbfc6..4f58962 100644 --- a/src/main.rs +++ b/src/main.rs @@ -72,7 +72,7 @@ fn main() { process_full_output(&shared_files, &unique_files, &complete_files, &read_errors, &arguments); } -fn process_full_output(shared_files: &Vec<&Fileinfo>, unique_files: &Vec<&Fileinfo>, complete_files: &Vec<Fileinfo>, error_paths: &Vec<(PathBuf, std::io::Error)>, arguments: &clap::ArgMatches) ->(){ +fn process_full_output(shared_files: &[&Fileinfo], unique_files: &[&Fileinfo], complete_files: &[Fileinfo], error_paths: &[(PathBuf, std::io::Error)], arguments: &clap::ArgMatches) { let blocksize = match arguments.value_of("Blocksize").unwrap_or(""){"B" => "Bytes", "K" => "Kilobytes", "M" => "Megabytes", "G" => "Gigabytes", _ => "Megabytes"}; let display_power = match blocksize{"Bytes" => 0, "Kilobytes" => 1, "Megabytes" => 2, "Gigabytes" => 3, _ => 2}; let display_divisor = 1024u64.pow(display_power); @@ -126,10 +126,10 @@ fn process_full_output(shared_files: &Vec<&Fileinfo>, unique_files: &Vec<&Filein }) }, (PrintFmt::Json, Verbosity::Duplicates) => { - println!("{}", serde_json::to_string(shared_files).unwrap_or("".to_string())); + println!("{}", serde_json::to_string(shared_files).unwrap_or_else(|_| "".to_string())); }, (PrintFmt::Json, Verbosity::All) => { - println!("{}", serde_json::to_string(complete_files).unwrap_or("".to_string())); + println!("{}", serde_json::to_string(complete_files).unwrap_or_else(|_| "".to_string())); }, _ => {}, } @@ -169,12 +169,12 @@ fn process_full_output(shared_files: &Vec<&Fileinfo>, unique_files: &Vec<&Filein } } -fn write_results_to_file(fmt: PrintFmt, shared_files: &Vec<&Fileinfo>, unique_files: &Vec<&Fileinfo>, complete_files: &Vec<Fileinfo>, file: &str) { +fn write_results_to_file(fmt: PrintFmt, shared_files: &[&Fileinfo], unique_files: &[&Fileinfo], complete_files: &[Fileinfo], file: &str) { let mut output = fs::File::create(file).expect("Error opening output file for writing"); match fmt { PrintFmt::Standard => { output.write_fmt(format_args!("Duplicates:\n")).unwrap(); - for file in shared_files.into_iter(){ + for file in shared_files.iter(){ let title = file.get_candidate_name(); output.write_fmt(format_args!("{}\n", title)).unwrap(); for entry in file.get_paths().iter(){ @@ -182,7 +182,7 @@ fn write_results_to_file(fmt: PrintFmt, shared_files: &Vec<&Fileinfo>, unique_fi } } output.write_fmt(format_args!("Singletons:\n")).unwrap(); - for file in unique_files.into_iter(){ + for file in unique_files.iter(){ let title = file.get_candidate_name(); output.write_fmt(format_args!("{}\n", title)).unwrap(); for entry in file.get_paths().iter(){ @@ -191,7 +191,7 @@ fn write_results_to_file(fmt: PrintFmt, shared_files: &Vec<&Fileinfo>, unique_fi } }, PrintFmt::Json => { - output.write_fmt(format_args!("{}", serde_json::to_string(complete_files).unwrap_or("Error deserializing".to_string()))).unwrap(); + output.write_fmt(format_args!("{}", serde_json::to_string(complete_files).unwrap_or_else(|_| "Error deserializing".to_string()))).unwrap(); }, PrintFmt::Off =>{return}, } |