diff --git a/.gitignore b/.gitignore index 2649b43..1a09611 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,4 @@ /target *~ test-inpu* +.rmdupe diff --git a/src/arg.rs b/src/arg.rs index c5ddd19..4a9be9b 100644 --- a/src/arg.rs +++ b/src/arg.rs @@ -39,6 +39,8 @@ pub fn usage() -> ! println!(" --\t\t\tStop reading args"); println!("Other:"); println!(" --help -h:\t\tPrint this message"); + #[cfg(feature="threads")] + println!("Compiled with threading support"); std::process::exit(1) } diff --git a/src/bytes.rs b/src/bytes.rs index 63249e1..4d8335a 100644 --- a/src/bytes.rs +++ b/src/bytes.rs @@ -1,4 +1,3 @@ -use super::*; pub fn copy_slice(mut dst: D, src: S) -> usize where S: AsRef<[T]>, diff --git a/src/config.rs b/src/config.rs index 429cc6c..8f7fca0 100644 --- a/src/config.rs +++ b/src/config.rs @@ -1,7 +1,4 @@ use super::*; -use std::{ - -}; #[derive(Debug, Clone)] pub enum RecursionMode diff --git a/src/container.rs b/src/container.rs index 7c74b85..2181a29 100644 --- a/src/container.rs +++ b/src/container.rs @@ -8,7 +8,7 @@ use std::{ }, }; -#[derive(Clone, PartialEq, Eq)] +#[derive(Clone, PartialEq, Eq, Debug)] pub struct DupeMap(HashSet); impl DupeMap diff --git a/src/ext.rs b/src/ext.rs index 6278b00..06f3ad4 100644 --- a/src/ext.rs +++ b/src/ext.rs @@ -1,7 +1,7 @@ use super::*; use std::{ hash::Hash, - collections::{HashSet, HashMap}, + collections::HashMap, mem, fmt, }; @@ -40,23 +40,25 @@ where T: Hash + Default + Eq } pub trait ErrorLogForgetExt: Sized { - fn log_and_forget(self, mode: &log::Mode, level: log::Level) -> Option; + fn log_and_forget(self, mode: &log::Mode, level: log::Level) -> Result, E>; } impl ErrorLogForgetExt for Result where E: fmt::Display, { - fn log_and_forget(self, mode: &log::Mode, level: log::Level) -> Option + fn log_and_forget(self, mode: &log::Mode, level: log::Level) -> Result, E> { - match self { + Ok(match self { Err(e) => { log!(mode.level(level), "{}", &e); if let log::Level::Fatal = level { std::process::exit(1); + } else if log::Level::Error == level { + return Err(e); } None }, Ok(v) => Some(v), - } + }) } } pub trait ErrorLogExt: Sized diff --git a/src/log.rs b/src/log.rs index 2b617f9..e5d8972 100644 --- a/src/log.rs +++ b/src/log.rs @@ -1,4 +1,3 @@ -use super::*; use std::{ fmt, }; @@ -94,12 +93,23 @@ impl fmt::Display for Level Level::Warning => write!(f, "WARN"), Level::Error => write!(f, "ERROR"), Level::Fatal => write!(f, "FATAL"), + #[allow(unreachable_patterns)] _ => write!(f, "(unbound)"), } } } macro_rules! log { + ($level:tt, $mode:expr => $format:expr) => { + { + if let Some(level) = $mode.level($crate::log::Level::$level) { + println!("{} [{}]: {}", $crate::log::timestamp(), level, $format); + true + } else { + false + }; + } + }; ($level:tt, $mode:expr => $format:expr, $($rest:expr),*) => { { if let Some(level) = $mode.level($crate::log::Level::$level) { diff --git a/src/main.rs b/src/main.rs index c0694d7..b80f9eb 100644 --- a/src/main.rs +++ b/src/main.rs @@ -88,10 +88,13 @@ async fn main() -> Result<(), Box> fs::{ OpenOptions, }, - sync, + sync::{ + Mutex + }, }; use std::{ path::Path, + sync::Arc, }; let args = parse_args().into_string()?; let lmode = &args.mode.logging_mode; @@ -110,9 +113,10 @@ async fn main() -> Result<(), Box> if load.is_file() { if let Some(mut file) = OpenOptions::new() .read(true) - .open(load).await.log_and_forget(lmode, log::Level::Warning) + .open(load).await.log_and_forget(lmode, log::Level::Warning)? { - + log!(Info, lmode => "Hashes loading from {:?}", load); + args.mode.error_mode.handle(hashes.load_async(&mut file).await).log_and_forget(lmode, log::Level::Warning)?; } } else { log!(Warning, lmode => "Exclusing directory from load path {:?}", load); @@ -121,25 +125,123 @@ async fn main() -> Result<(), Box> log!(Info, lmode => "Ignoring non-existant load path {:?}", load); } } - + + log!(Debug, lmode => "Loaded hashes: {:?}", hashes); + log!(Info, lmode => "Starting checks (threaded)"); + let hashes = Arc::new(Mutex::new(hashes)); for path in args.paths.iter() { let path = Path::new(path); if path.is_dir() { + log!(Debug, lmode => "Spawning for {:?}", path); + let mode = args.mode.clone(); + let path = path.to_owned(); + let hashes= Arc::clone(&hashes); children.push(tokio::task::spawn(async move { - //proc::do_dir_async() + log!(Debug, mode.logging_mode => " + {:?}", path); + let res = mode.error_mode.handle(proc::do_dir_async(path.clone(), 0, hashes, mode.clone()).await).log_and_forget(&mode.logging_mode, log::Level::Error); + log!(Info, mode.logging_mode => " - {:?}", path); + res })); } } + log!(Info, lmode => "Waiting on children"); + let mut done = proc::DupeCount::default(); + for child in children.into_iter() + { + done += args.mode.error_mode.handle(child.await?)?.unwrap_or_default().unwrap_or_default().unwrap_or_default(); + } + log!(Info, lmode => "Found: {:?}", done); + + let hashes = hashes.lock().await; + log!(Debug, lmode => "New hashes: {:?}", hashes); + + for save in args.save.iter() + { + let save = Path::new(save); + log!(Info, lmode => "Saving hashes to {:?}", save); + if let Some(mut file) = OpenOptions::new() + .create(true) + //.append(true) + .truncate(true) + .write(true) + .open(save).await.log_and_forget(lmode, log::Level::Warning)? + { + args.mode.error_mode.handle(hashes.save_async(&mut file).await).log_and_forget(lmode, log::Level::Warning)?; + } + } Ok(()) } #[cfg(not(feature="threads"))] -fn main() -> Result<(), error::Error> +fn main() -> Result<(), Box> { - let args = parse_args()?; + use std::{ + path::Path, + fs::{ + OpenOptions, + }, + }; + let args = parse_args().into_string()?; + let lmode = &args.mode.logging_mode; + + log!(Debug, lmode => "Args parsed: {:?}", args); + + let mut hashes = container::DupeMap::new(); + + // Load hashes + for load in args.load.iter() + { + let load = Path::new(load); + if load.exists() { + if load.is_file() { + if let Some(mut file) = OpenOptions::new() + .read(true) + .open(load).log_and_forget(lmode, log::Level::Warning)? + { + log!(Info, lmode => "Hashes loading from {:?}", load); + args.mode.error_mode.handle(hashes.load(&mut file)).log_and_forget(lmode, log::Level::Warning)?; + } + } else { + log!(Warning, lmode => "Exclusing directory from load path {:?}", load); + } + } else { + log!(Info, lmode => "Ignoring non-existant load path {:?}", load); + } + } + + log!(Debug, lmode => "Loaded hashes: {:?}", hashes); + log!(Info, lmode => "Starting checks (threaded)"); + + let mut done = proc::DupeCount::default(); + for path in args.paths.iter() + { + let path = Path::new(path); + if path.is_dir() { + log!(Debug, lmode => " + {:?}", path); + done += args.mode.error_mode.handle(proc::do_dir(path.clone(), 0, &mut hashes, &args.mode)).log_and_forget(lmode, log::Level::Error)?.unwrap_or_default().unwrap_or_default(); + log!(Info, lmode => " - {:?}", path); + } + } + + log!(Info, lmode => "Found: {:?}", done); + log!(Debug, lmode => "New hashes: {:?}", hashes); + + for save in args.save.iter() + { + let save = Path::new(save); + log!(Info, lmode => "Saving hashes to {:?}", save); + if let Some(mut file) = OpenOptions::new() + .create(true) + //.append(true) + .truncate(true) + .write(true) + .open(save).log_and_forget(lmode, log::Level::Warning)? + { + args.mode.error_mode.handle(hashes.save(&mut file)).log_and_forget(lmode, log::Level::Warning)?; + } + } - log!(Fatal, log::Mode::Error => "{:?}", args); Ok(()) } diff --git a/src/proc.rs b/src/proc.rs index 160e66f..2069d08 100644 --- a/src/proc.rs +++ b/src/proc.rs @@ -1,8 +1,5 @@ use super::*; use std::{ - io::{ - self, Read, - }, path::{ Path }, @@ -19,20 +16,35 @@ use std::{ }; /// Handle a detected dupe -fn handle_dupe

(path: P, _mode: &config::Mode) -> Result<(), error::Error> +fn handle_dupe

(path: P, mode: &config::Mode) -> Result<(), error::Error> where P: AsRef { - println!(" -> {:?}", path.as_ref()); + log!(Info, mode.logging_mode => " -> {:?}", path.as_ref()); + match mode.operation_mode + { + config::OperationMode::Delete => { + mode.error_mode.handle(std::fs::remove_file(path.as_ref()))?; + }, + _ => (), + } Ok(()) } /// Handle a detected dupe async #[inline(always)] -#[cfg(feature="threads")] + #[cfg(feature="threads")] async fn handle_dupe_async

(path: P, mode: &config::Mode) -> Result<(), error::Error> where P: AsRef { - handle_dupe(path, mode) + log!(Info, mode.logging_mode => " -> {:?}", path.as_ref()); + match mode.operation_mode + { + config::OperationMode::Delete => { + mode.error_mode.handle(tokio::fs::remove_file(path.as_ref()).await)?; + }, + _ => (), + } + Ok(()) } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] @@ -103,7 +115,6 @@ pub fn process_file>(file: P, set: &mut container::DupeMap) -> Re pub async fn process_file_async>(file: P, set: &std::sync::Arc>) -> Result { use tokio::{ - prelude::*, fs::{ OpenOptions, }, @@ -141,6 +152,7 @@ pub fn do_dir>(dir: P, depth: usize, set: &mut container::DupeMap count += mode.handle(do_dir(obj, depth+1, set, cmode))?.unwrap_or_default(); } else { count += if mode.handle(process_file(&obj, set))?.unwrap_or_default() { + log!(Info, cmode.logging_mode => "OK {:?}", obj); DupeCount{total: 1, dupes: 0} } else { mode.handle(handle_dupe(obj, &cmode))?; @@ -183,6 +195,7 @@ pub fn do_dir_async + std::marker::Send + std::marker::Sync + 'st let cmode = cmode.clone(); let mode = mode.clone(); children.push(tokio::task::spawn(async move { + log!(Info, cmode.logging_mode => "OK {:?}", obj); match mode.handle(do_dir_async(obj, depth+1, set, cmode).await) { Ok(v) => Ok(v.unwrap_or_default()), Err(v) => Err(v), @@ -196,6 +209,7 @@ pub fn do_dir_async + std::marker::Send + std::marker::Sync + 'st match mode.handle(process_file_async(&obj, &set).await) { Ok(v) => { if v.unwrap_or_default() { + log!(Info, cmode.logging_mode => "OK {:?}", obj); Ok(true) } else { if let Err(e) = mode.handle(handle_dupe_async(obj, &cmode).await) {