Added argument parser

Fortune for enumerate-ordered's current commit: Blessing − 吉
master
Avril 1 year ago
parent e5c29eb368
commit dd5c20282b
Signed by: flanchan
GPG Key ID: 284488987C31F630

@ -21,8 +21,9 @@ use futures::{
#[derive(Debug, Default, Clone)]
pub struct Args
{
walker: walk::Config,
worker: work::Config,
pub reverse: bool,
pub walker: walk::Config,
pub worker: work::Config,
paths: Option<Vec<PathBuf>>,
}
@ -258,6 +259,15 @@ where I: Iterator<Item = String>
};
}
// Modes //
// --help
if input == Arg::Long("help") {
return Ok(Some(Mode::Help));
}
// Normal //
// -r, --recursive <limit>
if input.is_any(args![b'r', "recursive"]) {
output.walker.recursion_depth = if input.is_long() {
@ -273,16 +283,18 @@ where I: Iterator<Item = String>
};
}
// -a, -m, -c, --{a,m,c}time
if input.is_any(args![b'a', "atime"]) {
output.worker.by = work::OrderBy::AccessTime;
}
if input.is_any(args![b'c', "ctime"]) {
} else if input.is_any(args![b'c', "ctime"]) {
output.worker.by = work::OrderBy::CreationTime;
}
if input.is_any(args![b'm', "mtime"]) {
} else if input.is_any(args![b'm', "mtime"]) {
output.worker.by = work::OrderBy::ModifiedTime;
}
// -n, --reverse
output.reverse = input.is_any(args![b'n', "reverse"]);
// -P, -p, --parallel cpus|<max>
// -1
if input.is_any(args![b'P', b'p', "parallel"]) {

@ -59,7 +59,7 @@ where I: futures::stream::Stream<Item = P>,
fn print_help<W: ?Sized>(to: &mut W) -> std::io::Result<()>
where W: std::io::Write,
where W: std::io::Write,
{
let execp = args::prog_name();
writeln!(to, "{} v{} - {}", env!("CARGO_PKG_NAME"), env!("CARGO_PKG_VERSION"), env!("CARGO_PKG_DESCRIPTION"))?;
@ -74,8 +74,8 @@ fn print_help<W: ?Sized>(to: &mut W) -> std::io::Result<()>
macro_rules! write_opt {
($($name:literal),+ => $explain:literal $(, $format:expr)*) => {
{
let names = [$($name),+].into_iter().fold(String::default(), |prev, n| format!("{prev}, {n}"));
writeln!(to, concat!(" {}\t\t", $explain), names $(, $format)*)
let names = [$($name),+].into_iter().fold(String::default(), |prev, n| if prev.is_empty() { n.to_string() } else { format!("{prev}, {n}") });
writeln!(to, concat!(" {}\t", $explain), names $(, $format)*)
}
};
}
@ -84,6 +84,7 @@ fn print_help<W: ?Sized>(to: &mut W) -> std::io::Result<()>
write_opt!("-a", "--atime" => "Sort by atime")?;
write_opt!("-c", "--ctime" => "Sort by ctime (default)")?;
write_opt!("-m", "--mtime" => "Sort by mtime")?;
write_opt!("-n", "--reverse" => "Print output in reverse")?;
write_opt!("-p", "--parallel cpus|<max tasks>" => "Run tasks in parallel, with a max number of tasks being equal `<max tasks>`, or, if 0, to infinity (see `-P`), if 'cpus', to the number of logical CPU cores ({}, default)", *walk::NUM_CPUS)?;
write_opt!("-P", "--parallel 0" => "Run tasks with unbounded parallelism, no limit to the number of walker tasks running at once (note: the physical thread pool will always be the same size regardless of these flags)")?;
write_opt!("-1", "--parallel 1" => "Only let one directory be processed at once")?;
@ -123,24 +124,24 @@ async fn main() -> eyre::Result<()> {
debug!("Parsed args: {:?}", args);
let worker_cfg = {
//TODO: Read worker config from main config
std::sync::Arc::new(work::Config::default())
//Read worker config from main config
std::sync::Arc::new(args.worker.clone())
};
let walker_cfg = {
//TODO: Read walker config from main config
walk::Config::default()
//Read walker config from main config
args.walker.clone()
};
// Spin up ordering task.
let ordering = {
let cfg = (*worker_cfg).clone();
tokio::spawn(async move {
trace!("spun up ordering backing thread");
trace!("spun up ordering backing thread with config: {:?}", &cfg);
work_on(cfg, rx).await //TODO: Parse config from args
})
};
trace!("Starting recursive walk of input locations");
trace!("Starting recursive walk of input locations with config: {:?}", &walker_cfg);
//TODO: Trace directory trees from paths in `args` and/or `stdin` and pass results to `tx`
let walk = walk_paths(args.paths(), walker_cfg, &worker_cfg, tx);
tokio::pin!(walk);
@ -175,15 +176,29 @@ async fn main() -> eyre::Result<()> {
let lock = std::io::stdout().lock();
std::io::BufWriter::new(lock)
};
trace!("Writing ordered results to stdout... (buffered, sync)");
for info in set.into_iter()
trace!("Writing ordered results to stdout... (buffered, sync, rev: {})", args.reverse);
#[inline]
fn operate_on<W: ?Sized, I>(stdout: &mut W, set: I) -> eyre::Result<()>
where W: Write,
I: IntoIterator<Item = work::FileInfo> + ExactSizeIterator + DoubleEndedIterator + std::iter::FusedIterator + 'static
{
stdout.write_all(info.path().as_os_str().as_bytes())
.and_then(|_| stdout.write_all(&[b'\n']))
.wrap_err("Failed to write raw pathname for entry to stdout")
.with_context(|| format!("{:?}", info.path()).header("Pathname was"))?;
for info in set
{
stdout.write_all(info.path().as_os_str().as_bytes())
.and_then(|_| stdout.write_all(&[b'\n']))
.wrap_err("Failed to write raw pathname for entry to stdout")
.with_context(|| format!("{:?}", info.path()).header("Pathname was"))?;
}
Ok(())
}
if args.reverse {
operate_on(&mut stdout, set.into_iter().rev())
} else {
operate_on(&mut stdout, set.into_iter())
}.wrap_err("Abandoning output write due to failure")?;
stdout.flush().wrap_err("Failed to flush buffered output to stdout")?;
Ok(())
}).await.wrap_err("Writer (blocking) task panic")?

Loading…
Cancel
Save