Compare commits
No commits in common. 'master' and 'redo-gragh' have entirely different histories.
master
...
redo-gragh
@ -0,0 +1,124 @@
|
||||
//! Argument parsing and handling
|
||||
use super::*;
|
||||
use std::num::NonZeroUsize;
|
||||
|
||||
use config::Config;
|
||||
|
||||
/// Executable name
|
||||
pub fn program_name() -> &'static str
|
||||
{
|
||||
lazy_static! {
|
||||
static ref NAME: String = std::env::args().next().unwrap();
|
||||
}
|
||||
&NAME[..]
|
||||
}
|
||||
|
||||
#[cfg(feature="splash")]
|
||||
/// Print splash screen
|
||||
#[inline] pub fn splash()
|
||||
{
|
||||
eprintln!("dirstat version {}", env!("CARGO_PKG_VERSION"));
|
||||
eprintln!("Made by {} with <3.\n Licensed with GPL v3.0 +", env!("CARGO_PKG_AUTHORS"));
|
||||
}
|
||||
|
||||
/// Print usage message
|
||||
pub fn usage()
|
||||
{
|
||||
#[cfg(feature="splash")]
|
||||
{
|
||||
splash(); println!();
|
||||
}
|
||||
|
||||
println!("{} [OPTIONS] [-] <paths...>", program_name());
|
||||
println!("{} --help", program_name());
|
||||
println!(r#"
|
||||
OPTIONS:
|
||||
--recursive <number> Set max directory recursion depth limit (1 = No recursion (default), 0 = Unlimited recursion).
|
||||
-r Set unlimited directory recursion depth. (same as `--recursive 0`).
|
||||
--threads <number> Limit the maximum number of tasks allowed to process concurrently (Set to 0 for unlimited.)
|
||||
-M Set number of parallel running tasks to unlimited. (Same as `--threads 0`).
|
||||
-m Limit number of parallel tasks to the number of active CPU processors. (default).
|
||||
- Stop parsing arguments, treat all the rest as paths.
|
||||
|
||||
--help Print this message and exit.
|
||||
|
||||
NOTES:
|
||||
The first time a non-option argument is encountered, the program stops parsing arguments and assumes the rest of the arguments are paths.
|
||||
If parallelism is set to unlimited, there can be a huge syscall overhead. It is recommended to use `-m` (which is default anyway).
|
||||
"#);
|
||||
}
|
||||
|
||||
/// Print usage message then exit with code 1.
|
||||
pub fn help() -> !
|
||||
{
|
||||
usage();
|
||||
|
||||
std::process::exit(1)
|
||||
}
|
||||
|
||||
/// Which mode to run in
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum Mode
|
||||
{
|
||||
Normal(Config),
|
||||
Help,
|
||||
}
|
||||
|
||||
/// Parse command-line arguments
|
||||
#[inline] pub fn parse_args() -> eyre::Result<Mode>
|
||||
{
|
||||
parse(std::env::args().skip(1))
|
||||
}
|
||||
|
||||
fn parse<I: IntoIterator<Item=String>>(args: I) -> eyre::Result<Mode>
|
||||
{
|
||||
let suggestion_intended_arg = || "If this was intended as a path instead of an option, use option `-` before it.";
|
||||
|
||||
let mut args = args.into_iter();
|
||||
let mut cfg = Config::default();
|
||||
|
||||
let mut reading = true;
|
||||
while let Some(opt) = args.next()
|
||||
{
|
||||
if reading {
|
||||
match opt.trim()
|
||||
{
|
||||
"--help" => return Ok(Mode::Help),
|
||||
"-" => reading = false,
|
||||
|
||||
"--threads" => {
|
||||
let max = args.next().ok_or(eyre!("`--threads` expects a parameter"))
|
||||
.with_suggestion(suggestion_intended_arg.clone())?;
|
||||
cfg.max_tasks = NonZeroUsize::new(max.parse::<usize>()
|
||||
.wrap_err(eyre!("`--threads` expects a non-negative number"))
|
||||
.with_suggestion(suggestion_intended_arg.clone())
|
||||
.with_section(move || max.header("Parameter given was"))?);
|
||||
},
|
||||
"-M" => cfg.max_tasks = None,
|
||||
"-m" => {
|
||||
cfg.max_tasks = config::max_tasks_cpus(); // this is the default, but it is possible an earlier command mutated it, so doing nothing here would be a bug for that corner case
|
||||
},
|
||||
|
||||
"--recursive" => {
|
||||
let max = args.next().ok_or(eyre!("`--recursive` expects a parameter"))
|
||||
.with_suggestion(suggestion_intended_arg.clone())?;
|
||||
|
||||
cfg.recursive = max.parse::<usize>()
|
||||
.wrap_err(eyre!("`--recursive` expects a non-negative number"))
|
||||
.with_suggestion(suggestion_intended_arg.clone())
|
||||
.with_section(move || max.header("Parameter given was"))?.into();
|
||||
},
|
||||
"-r" => cfg.recursive = config::Recursion::Unlimited,
|
||||
|
||||
_ => {
|
||||
cfg.paths.push(opt.into());
|
||||
reading = false;
|
||||
}
|
||||
}
|
||||
continue;
|
||||
} else {
|
||||
cfg.paths.push(opt.into());
|
||||
}
|
||||
}
|
||||
Ok(Mode::Normal(cfg))
|
||||
}
|
@ -1,137 +0,0 @@
|
||||
//! Argument parsing and handling
|
||||
use super::*;
|
||||
use std::num::NonZeroUsize;
|
||||
use std::fmt;
|
||||
|
||||
use config::Config;
|
||||
|
||||
mod parsing;
|
||||
|
||||
/// Executable name
|
||||
pub fn program_name() -> &'static str
|
||||
{
|
||||
lazy_static! {
|
||||
static ref NAME: String = std::env::args().next().unwrap();
|
||||
}
|
||||
&NAME[..]
|
||||
}
|
||||
|
||||
#[cfg(feature="splash")]
|
||||
/// Print splash screen
|
||||
#[inline] pub fn splash()
|
||||
{
|
||||
eprintln!("dirstat version {}", env!("CARGO_PKG_VERSION"));
|
||||
eprintln!("Made by {} with <3.\n Licensed with GPL v3.0 +", env!("CARGO_PKG_AUTHORS"));
|
||||
}
|
||||
|
||||
const OPTIONS_NORMAL: &'static [&'static str] = &[
|
||||
"--recursive <number> Set max directory recursion depth limit (1 = No recursion (default), 0 = Unlimited recursion).",
|
||||
"-r Set unlimited directory recursion depth. (same as `--recursive 0`).",
|
||||
"--threads <number> Limit the maximum number of tasks allowed to process concurrently (Set to 0 for unlimited.)",
|
||||
"-M Set number of parallel running tasks to unlimited. (Same as `--threads 0`). (default).",
|
||||
"-m Limit number of parallel tasks to the number of active CPU processors.",
|
||||
"-q Quiet mode. Don't output info messages about successful `stat`ing.",
|
||||
"-Q Silent mode. Don't output any messages.",
|
||||
"-v Verbose mode. Output extra information.",
|
||||
#[cfg(feature="inspect")] "--save <file> Dump the collected data to this file for further inspection.",
|
||||
#[cfg(feature="inspect")] "-D Dump the collected data to `stdout` (see `--save`.)",
|
||||
#[cfg(feature="inspect")] "--save-raw <file> Dump the collected data to this file uncompressed. (see `--save`.)",
|
||||
#[cfg(feature="inspect")] "-R Dump the collected data to standard output uncompressed. (see `--save-raw`.)",
|
||||
"- Stop parsing arguments, treat all the rest as paths.",
|
||||
];
|
||||
|
||||
const NOTES: &'static [&'static str] = &[
|
||||
"The first time a non-option argument is encountered, the program stops parsing arguments and assumes the rest of the arguments are paths.",
|
||||
"If parallelism is set to unlimited, there can be a huge syscall overhead. It is recommended to use `-m` in large runs.",
|
||||
"",
|
||||
"Symlinks are ignored while collecting stat data. They will fail with message 'Unknown file type'. Symlinks are generally very small in the actual data they contain themselves, so this is *usually* unimportant.",
|
||||
#[cfg(feature="inspect")] "\nThe save formats of `--save` (`-D`) and `--save-raw` (`-Dr`) are incompatible. The former is bzip2 compressed the latter is uncompressed.",
|
||||
];
|
||||
|
||||
fn get_opt_normal() -> impl fmt::Display
|
||||
{
|
||||
#[derive(Debug)]
|
||||
struct Opt;
|
||||
|
||||
impl fmt::Display for Opt
|
||||
{
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result
|
||||
{
|
||||
for line in OPTIONS_NORMAL.iter()
|
||||
{
|
||||
writeln!(f, " {}", line)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
Opt
|
||||
}
|
||||
|
||||
/// Print usage message
|
||||
pub fn usage()
|
||||
{
|
||||
#[cfg(feature="splash")]
|
||||
{
|
||||
splash(); println!();
|
||||
}
|
||||
|
||||
println!("{} [OPTIONS] [-] <paths...>", program_name());
|
||||
println!("{} --help", program_name());
|
||||
println!(r#"
|
||||
OPTIONS:
|
||||
{}
|
||||
--help Print this message and exit.
|
||||
|
||||
NOTES:
|
||||
The first time a non-option argument is encountered, the program stops parsing arguments and assumes the rest of the arguments are paths.
|
||||
If parallelism is set to unlimited, there can be a huge syscall overhead. It is recommended to use `-m`.
|
||||
|
||||
Symlinks are ignored while collection stat data. They will fail with message 'Unknown file type'. Symlinks are generally very small in the actual data they contain themselves, so this is *usually* unimportant.
|
||||
"#, get_opt_normal());
|
||||
}
|
||||
|
||||
/// Print usage message then exit with code 1.
|
||||
pub fn help() -> !
|
||||
{
|
||||
usage();
|
||||
|
||||
std::process::exit(1)
|
||||
}
|
||||
|
||||
/// Which mode to run in
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum Mode
|
||||
{
|
||||
Normal(Config),
|
||||
Help,
|
||||
}
|
||||
|
||||
/// Parse command-line arguments
|
||||
#[inline] pub fn parse_args() -> eyre::Result<Mode>
|
||||
{
|
||||
parse(std::env::args().skip(1))
|
||||
}
|
||||
|
||||
fn parse<I: IntoIterator<Item=String>>(args: I) -> eyre::Result<Mode>
|
||||
{
|
||||
//let mut cfg = config::Config::default();
|
||||
|
||||
let mut buffer = parsing::Output::new();
|
||||
let mut args = args.into_iter();
|
||||
|
||||
while let Some(arg) = args.next()
|
||||
{
|
||||
match parsing::parse_next(&mut args, &mut buffer, arg)? {
|
||||
parsing::Continue::No => {
|
||||
parsing::consume(args, &mut buffer);
|
||||
break;
|
||||
},
|
||||
parsing::Continue::Abort(Some(change_to)) => return Ok(*change_to),
|
||||
parsing::Continue::Abort(_) => break,
|
||||
_ => (),
|
||||
}
|
||||
}
|
||||
|
||||
parsing::into_mode(buffer)
|
||||
}
|
@ -1,515 +0,0 @@
|
||||
//! For parsing arguments
|
||||
use super::*;
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use std::mem::Discriminant;
|
||||
use std::fmt;
|
||||
|
||||
#[cfg(feature="inspect")] use config::OutputSerialisationMode;
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub enum InspectKind
|
||||
{
|
||||
Treemap(Option<(u64, u64)>),
|
||||
}
|
||||
|
||||
impl fmt::Display for InspectKind
|
||||
{
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result
|
||||
{
|
||||
match self {
|
||||
Self::Treemap(None) => write!(f, "treemap"),
|
||||
Self::Treemap(Some((x,y))) => write!(f, "treemap:{}:{}", x, y), // Width and height.
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::str::FromStr for InspectKind
|
||||
{
|
||||
type Err = eyre::Report;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err>
|
||||
{
|
||||
todo!()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub enum Argument
|
||||
{
|
||||
ModeChangeHelp,
|
||||
|
||||
LimitConcMaxProc,
|
||||
LimitConc(NonZeroUsize),
|
||||
UnlimitConc,
|
||||
|
||||
Save(String),
|
||||
SaveStdout,
|
||||
SaveRaw(String),
|
||||
SaveRawStdout,
|
||||
|
||||
LimitRecurse(NonZeroUsize),
|
||||
UnlimitRecurse,
|
||||
|
||||
LogVerbose,
|
||||
LogQuiet,
|
||||
LogSilent,
|
||||
|
||||
StopReading,
|
||||
|
||||
Inspect(InspectKind),
|
||||
|
||||
Input(String),
|
||||
}
|
||||
|
||||
/// Kinds of modes of operation for the program.
|
||||
///
|
||||
/// These map to `super::Mode`.
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash, Ord, PartialOrd, Copy)]
|
||||
#[non_exhaustive]
|
||||
enum ModeKind
|
||||
{
|
||||
Normal,
|
||||
|
||||
Help
|
||||
}
|
||||
|
||||
impl Default for ModeKind
|
||||
{
|
||||
#[inline]
|
||||
fn default() -> Self
|
||||
{
|
||||
Self::Normal
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
impl Argument
|
||||
{
|
||||
/// What mode does this argument change to, if any?
|
||||
fn mode_change_kind(&self) -> Option<ModeKind>
|
||||
{
|
||||
Some(match self
|
||||
{
|
||||
Self::ModeChangeHelp => ModeKind::Help,
|
||||
_ => return None,
|
||||
})
|
||||
}
|
||||
/// Insert this `Argument` into config
|
||||
pub fn insert_into_cfg(self, cfg: &mut Config)
|
||||
{
|
||||
use Argument::*;
|
||||
match self {
|
||||
Inspect(InspectKind::Treemap(None)) => cfg.inspection.treemap = Some((640, 480)),
|
||||
Inspect(InspectKind::Treemap(x)) => cfg.inspection.treemap = x,
|
||||
|
||||
LimitConcMaxProc => cfg.max_tasks = config::max_tasks_cpus(),
|
||||
LimitConc(max) => cfg.max_tasks = Some(max),
|
||||
UnlimitConc => cfg.max_tasks = None,
|
||||
|
||||
#[cfg(feature="inspect")] Save(output) => cfg.serialise_output = Some(OutputSerialisationMode::File(output.into())),
|
||||
#[cfg(feature="inspect")] SaveStdout => cfg.serialise_output = Some(OutputSerialisationMode::Stdout),
|
||||
#[cfg(feature="inspect")] SaveRaw(output) => {
|
||||
cfg_if! {
|
||||
if #[cfg(feature="prealloc")] {
|
||||
cfg.serialise_output = Some(OutputSerialisationMode::PreallocFile(output.into()));
|
||||
} else {
|
||||
cfg.serialise_output = Some(OutputSerialisationMode::RawFile(output.into()));
|
||||
}
|
||||
}
|
||||
},
|
||||
#[cfg(feature="inspect")] SaveRawStdout => cfg.serialise_output = Some(OutputSerialisationMode::RawStdout),
|
||||
|
||||
LimitRecurse(limit) => cfg.recursive = if limit.get() == 1 { config::Recursion::None } else { config::Recursion::Limited(limit) },
|
||||
UnlimitRecurse => cfg.recursive = config::Recursion::Unlimited,
|
||||
|
||||
LogVerbose => cfg.output_level = config::OutputLevel::Verbose,
|
||||
LogQuiet => cfg.output_level = config::OutputLevel::Quiet,
|
||||
LogSilent => cfg.output_level = config::OutputLevel::Silent,
|
||||
|
||||
Input(path) => cfg.paths.push(path.into()),
|
||||
|
||||
_ => (), //unreachable()! // Do nothing instead of panic.
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for Argument
|
||||
{
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result
|
||||
{
|
||||
use Argument::*;
|
||||
match self
|
||||
{
|
||||
Inspect(ins) => write!(f, "--inspect {}", ins),
|
||||
|
||||
ModeChangeHelp => write!(f, "--help"),
|
||||
LimitConcMaxProc => write!(f, "-m"),
|
||||
LimitConc(limit) => write!(f, "--threads {}", limit),
|
||||
UnlimitConc => write!(f, "-M (--threads 0)"),
|
||||
|
||||
Save(s) => write!(f, "--save {:?}", s),
|
||||
SaveStdout => write!(f, "-D"),
|
||||
SaveRaw(s) => write!(f, "--save-raw {:?}", s),
|
||||
SaveRawStdout => write!(f, "-R"),
|
||||
|
||||
LimitRecurse(rec) => write!(f, "--recursive {}", rec),
|
||||
UnlimitRecurse => write!(f, "-r (--recursive 0)"),
|
||||
|
||||
LogVerbose => write!(f, "-v"),
|
||||
LogQuiet => write!(f, "-q"),
|
||||
LogSilent => write!(f, "-Q"),
|
||||
|
||||
StopReading => write!(f, "-"),
|
||||
|
||||
Input(input) => write!(f, "<{}>", input),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash, Copy)]
|
||||
enum MX
|
||||
{
|
||||
None,
|
||||
Itself,
|
||||
All,
|
||||
Only(Discriminant<Argument>),
|
||||
Many(&'static [Discriminant<Argument>]),
|
||||
}
|
||||
|
||||
impl Default for MX
|
||||
{
|
||||
#[inline]
|
||||
fn default() -> Self
|
||||
{
|
||||
Self::Itself
|
||||
}
|
||||
}
|
||||
|
||||
impl MX
|
||||
{
|
||||
/// Is this argument discriminant mutually exclusive with this other argument?
|
||||
pub fn is_mx(&self, this: Discriminant<Argument>, other: &Argument) -> bool
|
||||
{
|
||||
use std::mem::discriminant;
|
||||
|
||||
let other = discriminant(other);
|
||||
match self
|
||||
{
|
||||
Self::Itself if other == this => true,
|
||||
Self::All => true,
|
||||
Self::Only(disc) if other == *disc => true,
|
||||
Self::Many(discs) if discs.contains(&other) => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
impl Argument
|
||||
{
|
||||
/// Is this `Argument` mutually exclusive with another?
|
||||
pub fn is_mx_with(&self, other: &Self) -> bool
|
||||
{
|
||||
use std::mem::discriminant;
|
||||
lazy_static! {
|
||||
static ref MX_REF: HashMap<Discriminant<Argument>, MaybeVec<MX>> = {
|
||||
let mut out = HashMap::new();
|
||||
macro_rules! mx {
|
||||
(@) => {
|
||||
std::iter::empty()
|
||||
};
|
||||
(@ self $($tt:tt)*) => {
|
||||
iter![MX::Itself].chain(mx!(@ $($tt)*))
|
||||
};
|
||||
|
||||
(@ [$inner:expr] $($tt:tt)*) => {
|
||||
iter![MX::Only(discriminant(&$inner))].chain(mx!(@ $($tt)*))
|
||||
};
|
||||
(@ [$($inner:expr),*] $($tt:tt)*) => {
|
||||
iter![MX::Many(vec![$(discriminant(&$inner)),*].leak())].chain(mx!(@ $($tt)*))
|
||||
};
|
||||
(@ $ident:ident $($tt:tt)*) => {
|
||||
iter![MX::$ident].chain(mx!(@ $($tt)*))
|
||||
};
|
||||
($disc:expr => $($tt:tt)*) => {
|
||||
out.insert(discriminant(&$disc), mx!(@ $($tt)*).collect());
|
||||
};
|
||||
}
|
||||
|
||||
mx!(Argument::ModeChangeHelp => All);
|
||||
|
||||
mx!(Argument::LimitConcMaxProc => self [Argument::UnlimitConc,
|
||||
Argument::LimitConc(unsafe{NonZeroUsize::new_unchecked(1)})]);
|
||||
mx!(Argument::UnlimitConc => self [Argument::LimitConcMaxProc, Argument::LimitConc(unsafe{NonZeroUsize::new_unchecked(1)})]);
|
||||
mx!(Argument::LimitConc(unsafe{NonZeroUsize::new_unchecked(1)}) => self [Argument::LimitConcMaxProc, Argument::UnlimitConc]);
|
||||
|
||||
mx!(Argument::Save(String::default()) => self [Argument::SaveStdout,
|
||||
Argument::SaveRaw(Default::default()),
|
||||
Argument::SaveRawStdout]);
|
||||
mx!(Argument::SaveStdout => self [Argument::Save(String::default()),
|
||||
Argument::SaveRaw(Default::default()),
|
||||
Argument::SaveRawStdout]);
|
||||
mx!(Argument::SaveRaw(Default::default()) => self [Argument::Save(String::default()),
|
||||
Argument::SaveStdout,
|
||||
Argument::SaveRawStdout]);
|
||||
mx!(Argument::SaveRawStdout => self [Argument::Save(String::default()),
|
||||
Argument::SaveRaw(String::default()),
|
||||
Argument::SaveStdout]);
|
||||
mx!(Argument::LimitRecurse(unsafe{NonZeroUsize::new_unchecked(1)}) => self [Argument::UnlimitRecurse]);
|
||||
mx!(Argument::UnlimitRecurse => self [Argument::LimitRecurse(unsafe{NonZeroUsize::new_unchecked(1)})]);
|
||||
mx!(Argument::LogVerbose => self [Argument::LogQuiet, Argument::LogSilent]);
|
||||
mx!(Argument::LogQuiet => self [Argument::LogVerbose, Argument::LogSilent]);
|
||||
mx!(Argument::LogSilent => self [Argument::LogQuiet, Argument::LogVerbose]);
|
||||
|
||||
mx!(Argument::StopReading => All);
|
||||
|
||||
mx!(Argument::Input(String::default()) => None);
|
||||
out
|
||||
};
|
||||
}
|
||||
let this = discriminant(self);
|
||||
match MX_REF.get(&this) {
|
||||
Some(mx) if mx.iter().filter(|mx| mx.is_mx(this, other)).next().is_some() => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Should we continue parsing and/or reading arguments?
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum Continue
|
||||
{
|
||||
/// Keep parsing the arguments
|
||||
Yes,
|
||||
/// Stop parsing arguments, add the rest of args as `Input`s
|
||||
No,
|
||||
/// On mode change, we don't need to parse the rest of the argument. Stop reading entirely, and optionally return the last one here, which must be a mode change argument.
|
||||
///
|
||||
/// Returning this when the contained value is `Some` immediately terminates parsing and precedes to mode-switch. However, if it is `None`, parsing of chained short args is allowed to continue, although `Abort(None)` will be returned at the end regardless of subsequent `Continue` results from that change (unless one is an `Abort(Some(_))`, which immediately returns itself.)
|
||||
// Box `Argument` to reduce the size of `Continue`, as it is returned from functions often and when its value is set to `Some` it will always be the last `Argument` processed anyway and the only one to be boxed here at all.
|
||||
|
||||
//TODO: Deprecate the early return of an `Argument` here. Either change it to `Mode`, or have no early return. Mode change happens at the bottom in `into_mode` now.
|
||||
Abort(Option<Box<Mode>>),
|
||||
}
|
||||
|
||||
impl Continue
|
||||
{
|
||||
/// Should we keep *parsing* args?
|
||||
#[inline] pub fn keep_reading(&self) -> bool
|
||||
{
|
||||
if let Self::Yes = self {
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
/// Is this an abort?
|
||||
#[inline] pub fn is_abort(&self) -> bool
|
||||
{
|
||||
if let Self::Abort(_) = self {
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for Continue
|
||||
{
|
||||
#[inline]
|
||||
fn default() -> Self
|
||||
{
|
||||
Self::Yes
|
||||
}
|
||||
}
|
||||
|
||||
impl From<bool> for Continue
|
||||
{
|
||||
fn from(from: bool) -> Self
|
||||
{
|
||||
if from {
|
||||
Self::Yes
|
||||
} else {
|
||||
Self::No
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
pub type Output = HashSet<Argument>;
|
||||
#[inline] const fn suggestion_intended_arg() -> &'static str {
|
||||
"If this was intended as a path instead of an option, use option `-` before it."
|
||||
}
|
||||
|
||||
fn save_output(output: &mut Output, item: Argument) -> eyre::Result<()>
|
||||
{
|
||||
if let Some(mx) = output.iter().filter(|arg| item.is_mx_with(arg)).next() {
|
||||
return Err(eyre!("Arguments are mutually exclusive"))
|
||||
.with_section(|| item.header("Trying to addargument "))
|
||||
.with_section(|| mx.to_string().header("Which is mutually exclusive with previously added"));
|
||||
}
|
||||
|
||||
output.insert(item); //TODO: Warn when adding duplicate?
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn parse_single<I>(_args: &mut I, output: &mut Output, this: char) -> eyre::Result<Continue>
|
||||
where I: Iterator<Item=String>
|
||||
{
|
||||
let item = match this
|
||||
{
|
||||
'r' => Argument::UnlimitRecurse,
|
||||
|
||||
#[cfg(feature="inspect")] 'D' => Argument::SaveStdout,
|
||||
#[cfg(feature="inspect")] 'R' => Argument::SaveRawStdout,
|
||||
|
||||
'v' => Argument::LogVerbose,
|
||||
'q' => Argument::LogQuiet,
|
||||
'Q' => Argument::LogSilent,
|
||||
|
||||
'm' => Argument::LimitConcMaxProc,
|
||||
'M' => Argument::UnlimitConc,
|
||||
|
||||
unknown => {
|
||||
return Err(eyre!("Unknown short argument {:?}", unknown))
|
||||
.with_suggestion(suggestion_intended_arg.clone());
|
||||
},
|
||||
};
|
||||
|
||||
save_output(output, item)
|
||||
.with_section(|| this.header("Short argument was"))?;
|
||||
|
||||
Ok(Continue::Yes)
|
||||
}
|
||||
|
||||
/// Consume this iterator into `Input`s
|
||||
pub fn consume<I>(args: I, output: &mut Output)
|
||||
where I: IntoIterator<Item=String>
|
||||
{
|
||||
output.extend(args.into_iter().map(Argument::Input));
|
||||
}
|
||||
|
||||
pub fn parse_next<I>(args: &mut I, output: &mut Output, this: String) -> eyre::Result<Continue>
|
||||
where I: Iterator<Item=String>
|
||||
{
|
||||
let mut keep_reading = Continue::Yes;
|
||||
let item = match this.trim()
|
||||
{
|
||||
"--inspect" => {
|
||||
let ins = args.next().ok_or(eyre!("`--inspect` expects a parameter"))
|
||||
.with_suggestion(suggestion_intended_arg.clone())?;
|
||||
Argument::Inspect(ins.parse().wrap_err(eyre!("Failed to parse parameter for `--inspect`"))?)
|
||||
},
|
||||
" --threads" => {
|
||||
let max = args.next().ok_or(eyre!("`--threads` expects a parameter"))
|
||||
.with_suggestion(suggestion_intended_arg.clone())?;
|
||||
match NonZeroUsize::new(max.parse::<usize>()
|
||||
.wrap_err(eyre!("`--threads` expects a non-negative number"))
|
||||
.with_suggestion(suggestion_intended_arg.clone())
|
||||
.with_section(move || max.header("Parameter given was"))?)
|
||||
{
|
||||
Some(max) => Argument::LimitConc(max),
|
||||
None => Argument::UnlimitConc,
|
||||
}
|
||||
},
|
||||
"--recursive" => {
|
||||
let max = args.next().ok_or(eyre!("`--recursive` expects a parameter"))
|
||||
.with_suggestion(suggestion_intended_arg.clone())?;
|
||||
match NonZeroUsize::new(max.parse::<usize>().wrap_err(eyre!("`--recursive` expects a non-negative number"))
|
||||
.with_suggestion(suggestion_intended_arg.clone())
|
||||
.with_section(move || max.header("Parameter given was"))?)
|
||||
{
|
||||
Some(x) => Argument::LimitRecurse(x),
|
||||
None => Argument::UnlimitRecurse,
|
||||
}
|
||||
},
|
||||
"--help" => {
|
||||
return Ok(Continue::Abort(Some(Box::new(Mode::Help))));
|
||||
},
|
||||
"-" => {
|
||||
return Ok(Continue::No);
|
||||
},
|
||||
#[cfg(feature="inspect")] "--save" => {
|
||||
let file = args.next().ok_or(eyre!("`--save` expects a parameter"))
|
||||
.with_suggestion(suggestion_intended_arg.clone())?;
|
||||
|
||||
Argument::Save(file)
|
||||
},
|
||||
#[cfg(feature="inspect")] "--save-raw" => {
|
||||
let file = args.next().ok_or(eyre!("`--save` expects a parameter"))
|
||||
.with_suggestion(suggestion_intended_arg.clone())?;
|
||||
|
||||
Argument::SaveRaw(file)
|
||||
},
|
||||
single if single.starts_with("-") => {
|
||||
for ch in single.chars().skip(1) {
|
||||
match parse_single(args, output, ch)
|
||||
.wrap_err(eyre!("Error parsing short argument"))
|
||||
.with_section(|| this.clone().header("Full short argument chain was"))? {
|
||||
abort @ Continue::Abort(Some(_)) => return Ok(abort),
|
||||
x @ Continue::No |
|
||||
x @ Continue::Abort(_) if !x.is_abort() => keep_reading = x,
|
||||
_ => (),
|
||||
}
|
||||
}
|
||||
return Ok(keep_reading);
|
||||
},
|
||||
_ => {
|
||||
keep_reading = Continue::No;
|
||||
|
||||
Argument::Input(this)
|
||||
}
|
||||
};
|
||||
|
||||
save_output(output, item)?;
|
||||
|
||||
Ok(keep_reading)
|
||||
}
|
||||
|
||||
/// Converts parsed argument lists into a respective mode.
|
||||
///
|
||||
/// # Notes
|
||||
/// These functions assume the mode has already been correctly calculated to be the mode pertaining to that function.
|
||||
mod modes {
|
||||
use super::*;
|
||||
use config::Config;
|
||||
|
||||
/// Consume a parsed list of arguments in `Normal` mode into a `Normal` mode `Config` object.
|
||||
pub fn normal(args: Output) -> eyre::Result<config::Config>
|
||||
{
|
||||
let mut cfg = Config::default();
|
||||
|
||||
for arg in args.into_iter()
|
||||
{
|
||||
arg.insert_into_cfg(&mut cfg);
|
||||
}
|
||||
|
||||
Ok(cfg)
|
||||
}
|
||||
|
||||
}
|
||||
/// Consume this parsed list of arguments into a `Mode` and return it
|
||||
pub fn into_mode(args: Output) -> eyre::Result<Mode>
|
||||
{
|
||||
let mut mode_kind = ModeKind::default(); //Normal.
|
||||
|
||||
for arg in args.iter() {
|
||||
//find any mode change Argument (with `Argument::mode_change_kind()`) in `args`, changing `mode_kind` in turn. There should be at most 1.
|
||||
if let Some(mode) = arg.mode_change_kind()
|
||||
{
|
||||
mode_kind = mode;
|
||||
break;
|
||||
}
|
||||
}
|
||||
//pass `args` to the respective mode generation function in mode `modes`, and wrap that mode around its return value.
|
||||
match mode_kind
|
||||
{
|
||||
ModeKind::Normal => modes::normal(args).map(Mode::Normal),
|
||||
ModeKind::Help => Ok(Mode::Help),
|
||||
}
|
||||
|
||||
}
|
@ -1,12 +0,0 @@
|
||||
//! Dealing with bytes and stuff
|
||||
|
||||
/// Copy from `src` into `dst` and return the number of bytes copied.
|
||||
///
|
||||
/// # Notes
|
||||
/// The regions *must not* overlap. This is UB if they do.
|
||||
#[inline] pub unsafe fn copy_nonoverlapping_unchecked(src: &[u8], dst: &mut [u8]) -> usize
|
||||
{
|
||||
let len = std::cmp::min(dst.len(), src.len());
|
||||
std::ptr::copy_nonoverlapping(src.as_ptr(), dst.as_mut_ptr(), len);
|
||||
len
|
||||
}
|
@ -1,40 +0,0 @@
|
||||
//! Mechanism to defer dropping of large objects to background threads
|
||||
use super::*;
|
||||
use futures::{
|
||||
prelude::*,
|
||||
future::OptionFuture,
|
||||
};
|
||||
|
||||
pub const DEFER_DROP_VEC_SIZE_FLOOR: usize = 1024 * 1024; // 1 MB
|
||||
|
||||
/// Drop a `Vec<T>` that is `Send` and `'static`.
|
||||
///
|
||||
/// This will move the object to a background task if it is deemed nessisary.
|
||||
/// # Note
|
||||
/// This *must* be `await`ed to work properly. If you are not in an async context, use `drop_vec_sync`.
|
||||
pub fn drop_vec<T>(vec: Vec<T>) -> impl Future<Output = ()> + 'static
|
||||
where T: Send + 'static
|
||||
{
|
||||
let len_bytes = vec.len() * std::mem::size_of::<T>();
|
||||
OptionFuture::from(if len_bytes > DEFER_DROP_VEC_SIZE_FLOOR {
|
||||
cfg_eprintln!(Verbose; config::get_global(), "Size of vector ({} bytes, {} elements of {:?}) exceeds defer drop size floor {}. Moving vector to a seperate thread for de-allocation", len_bytes, vec.len(), std::any::type_name::<T>(), DEFER_DROP_VEC_SIZE_FLOOR);
|
||||
Some(async move {
|
||||
tokio::task::spawn_blocking(move || drop(vec)).await.expect("Child panic while dropping vector");
|
||||
})
|
||||
} else {
|
||||
None
|
||||
}).map(|_| ())
|
||||
}
|
||||
|
||||
/// Drop a `Vec<T>` that is `Send` and `'static`.
|
||||
///
|
||||
/// This will move the object to a background task if it is deemed nessisary.
|
||||
pub fn drop_vec_sync<T>(vec: Vec<T>)
|
||||
where T: Send + 'static
|
||||
{
|
||||
let len_bytes = vec.len() * std::mem::size_of::<T>();
|
||||
if len_bytes > DEFER_DROP_VEC_SIZE_FLOOR {
|
||||
cfg_eprintln!(Verbose; config::get_global(), "Size of vector ({} bytes, {} elements of {:?}) exceeds defer drop size floor {}. Moving vector to a seperate thread for de-allocation", len_bytes, vec.len(), std::any::type_name::<T>(), DEFER_DROP_VEC_SIZE_FLOOR);
|
||||
tokio::task::spawn_blocking(move || drop(vec));
|
||||
}
|
||||
}
|
@ -1,124 +0,0 @@
|
||||
use super::*;
|
||||
use treemap::{
|
||||
Rect,
|
||||
Mappable,
|
||||
TreemapLayout
|
||||
};
|
||||
use data::{FsInfo, INodeInfoGraph, INodeInfoGraphEntry};
|
||||
|
||||
/// A treemap of all **files** in the graph.
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub struct Treemap
|
||||
{
|
||||
//layout: TreemapLayout,
|
||||
nodes: Vec<MapNode>,
|
||||
}
|
||||
|
||||
impl Treemap
|
||||
{
|
||||
/// All nodes of the map.
|
||||
#[inline] pub fn nodes(&self) -> &[MapNode]
|
||||
{
|
||||
&self.nodes[..]
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub struct MapNode
|
||||
{
|
||||
name: String,
|
||||
|
||||
vw_size: f64, // Should be halved each iteration
|
||||
vw_bounds: Rect, // should be Rect::new() before aligntment
|
||||
}
|
||||
|
||||
impl MapNode
|
||||
{
|
||||
/// The calculated bounds of the node
|
||||
#[inline] pub fn bounds(&self) -> &Rect
|
||||
{
|
||||
&self.vw_bounds
|
||||
}
|
||||
/// The name of the node
|
||||
#[inline] pub fn name(&self) -> &str
|
||||
{
|
||||
&self.name[..]
|
||||
}
|
||||
|
||||
#[inline] fn size(&self) -> f64 //Is this useful for consumer?
|
||||
{
|
||||
self.vw_size
|
||||
}
|
||||
|
||||
#[inline] fn new(name: String) -> Self
|
||||
{
|
||||
Self {
|
||||
vw_size: 1.0,
|
||||
vw_bounds: Rect::new(),
|
||||
name,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a treemap from this graph.
|
||||
pub fn treemap(_cfg: &Config, graph: &INodeInfoGraph, (w, h): (f64, f64)) -> Treemap
|
||||
{
|
||||
let layout = TreemapLayout::new();
|
||||
let mut nodes = Vec::with_capacity(graph.len());
|
||||
//TODO: Recursively walk the graph, halving size with each iteration. (Maybe we need `INodeInfoGraph` here, not `Hierarchicalinodegraph`?)
|
||||
let total_size = graph.total_size();
|
||||
|
||||
let size = 1.0;
|
||||
|
||||
fn calc_path<'a, I: IntoIterator<Item = INodeInfoGraphEntry<'a>>>(insert: &'a mut Vec<MapNode>, from: I, total_size: u64, size: f64, scale: f64)
|
||||
{
|
||||
for top in from {
|
||||
let path = top.path();
|
||||
match top.info() {
|
||||
FsInfo::Directory(_) => {
|
||||
//TODO: Do we add dir itself? I think not?
|
||||
// Add children
|
||||
let size = size * 0.5;
|
||||
calc_path(insert, top.level().unwrap(), total_size, size, scale);
|
||||
},
|
||||
&FsInfo::File(sz, _) => {
|
||||
let fract = (sz as f64) / (total_size as f64);
|
||||
insert.push(MapNode {
|
||||
name: path.to_string_lossy().into_owned(),
|
||||
vw_size: fract * scale,
|
||||
vw_bounds: Rect::new(),
|
||||
})
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
calc_path(&mut nodes, graph.top_level(), total_size, size, 1.0);
|
||||
|
||||
layout.layout_items(&mut nodes[..], Rect {
|
||||
x: 0.0,
|
||||
y: 0.0,
|
||||
w, h,
|
||||
});
|
||||
|
||||
Treemap {
|
||||
//layout,
|
||||
nodes
|
||||
}
|
||||
}
|
||||
|
||||
impl Mappable for MapNode
|
||||
{
|
||||
fn size(&self) -> f64
|
||||
{
|
||||
self.vw_size
|
||||
}
|
||||
fn bounds(&self) -> &Rect
|
||||
{
|
||||
&self.vw_bounds
|
||||
}
|
||||
fn set_bounds(&mut self, bounds: Rect)
|
||||
{
|
||||
self.vw_bounds = bounds;
|
||||
}
|
||||
}
|
@ -1,21 +0,0 @@
|
||||
//! Prints the information found in graph in different ways
|
||||
use super::*;
|
||||
|
||||
use data::HierarchicalINodeGraph;
|
||||
use config::Config;
|
||||
|
||||
//pub mod repl;
|
||||
|
||||
/// Print the most basic info
|
||||
pub fn print_basic_max_info(cfg: &Config, graph: &HierarchicalINodeGraph)
|
||||
{
|
||||
cfg_println!(Quiet; cfg, "Max size file: {:?}", graph.path_max_size_for(data::FsKind::File));
|
||||
cfg_println!(Quiet; cfg, "Max size dir: {:?}", graph.path_max_size_for(data::FsKind::Directory));
|
||||
cfg_println!(Quiet; cfg, "Max size all: {:?}", graph.path_max_size());
|
||||
}
|
||||
|
||||
|
||||
#[cfg(feature="treemap")]
|
||||
mod map;
|
||||
#[cfg(feature="treemap")]
|
||||
pub use map::*;
|
@ -1,131 +0,0 @@
|
||||
//! Graph inspection REPL
|
||||
use super::*;
|
||||
use std::{fmt, error};
|
||||
use std::path::PathBuf;
|
||||
use std::io;
|
||||
|
||||
use rustyline::error::ReadlineError;
|
||||
use rustyline::Editor;
|
||||
|
||||
mod env;
|
||||
mod command;
|
||||
mod opcodes;
|
||||
|
||||
/// Default history file name
|
||||
///
|
||||
/// # Path lookup
|
||||
/// * To make this an absolute path, start it with `/`
|
||||
/// * To make this path relative to the user's home directory, start it with `~/` (Note: If we are unable to find the user's home directory, it is considered a lookup **failure** (*not* a **disable**) and `calculate_history_path()` will return `Err`.)
|
||||
/// * Otherwise, the path is taken relative to the current working directory
|
||||
///
|
||||
/// # Notes
|
||||
/// This is only used when the `save-history` feature is enabled.
|
||||
const DEFAULT_HISTORY_FILE: &'static str = "~/.dirstat_history";
|
||||
|
||||
/// Get the path to the history file.
|
||||
///
|
||||
/// # Lookup
|
||||
/// * If the `DIRSTAT_HISTORY` envvar is set and not empty, use this file path.
|
||||
/// * If the `DIRSTAT_HISTORY` envvar is set and empty, saving history is considered **disabled**, we return `Ok(None)`.
|
||||
/// * Otherwise, refer to lookup rules for `DEFAULT_HISTORY_FILE`.
|
||||
pub fn calculate_history_path() -> io::Result<Option<PathBuf>>
|
||||
{
|
||||
cfg_if! {
|
||||
if #[cfg(feature="save-history")] {
|
||||
todo!()
|
||||
} else {
|
||||
unreachable!("Tried to calculate repl history path when binary was compiled with history saving perma-disabled.")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Inspect the graph with commands
|
||||
///
|
||||
/// # Note
|
||||
/// This function synchronously blocks the current thread.
|
||||
pub fn inspect(cfg: &Config, graph: &HierarchicalINodeGraph) -> Result<(), ReplExitError>
|
||||
{
|
||||
let mut repl = Editor::<()>::new(); //TODO: Change `()` to our completer, when we have a decent idea of how they'll work.
|
||||
|
||||
cfg_if! {
|
||||
if #[cfg(feature="save-history")] {
|
||||
let history_path = match calculate_history_path() {
|
||||
Ok(Some(path)) => {
|
||||
if let Err(err) = repl.load_history(&path)
|
||||
{
|
||||
cfg_eprintln!(cfg, "Failed to load repl history from {:?}: {}", path, err);
|
||||
}
|
||||
Some(path)
|
||||
},
|
||||
Ok(None) => None,
|
||||
Err(err)
|
||||
{
|
||||
cfg_eprintln!(cfg, "Failed to find repl history: {}", err);
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let res: Result<(), ReplExitError> = try {
|
||||
loop {
|
||||
let line = repl.readline("> ")?;
|
||||
repl.add_history_entry(&line);
|
||||
|
||||
//TODO: How to interpret commands?
|
||||
todo!("Interpret commands from `line`.");
|
||||
}
|
||||
};
|
||||
|
||||
cfg_if! {
|
||||
if #[cfg(feature="save-history")] {
|
||||
if let Some(path) = history_path {
|
||||
if let Err(err) = repl.save_history(&path)
|
||||
{
|
||||
cfg_eprintln!(cfg, "Failed to save repl history to {:?}: {}", path, err);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
res
|
||||
}
|
||||
|
||||
/// When the inspection repl exists abnormally.
|
||||
#[derive(Debug)]
|
||||
pub enum ReplExitError
|
||||
{
|
||||
ReadLine(ReadlineError),
|
||||
}
|
||||
|
||||
impl From<ReadlineError> for ReplExitError
|
||||
{
|
||||
#[inline] fn from(from: ReadlineError) -> Self
|
||||
{
|
||||
Self::ReadLine(from)
|
||||
}
|
||||
}
|
||||
|
||||
impl error::Error for ReplExitError
|
||||
{
|
||||
fn source(&self) -> Option<&(dyn error::Error + 'static)> {
|
||||
Some(match &self
|
||||
{
|
||||
Self::ReadLine(rl) => rl
|
||||
})
|
||||
}
|
||||
}
|
||||
impl fmt::Display for ReplExitError
|
||||
{
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result
|
||||
{
|
||||
match self
|
||||
{
|
||||
Self::ReadLine(ReadlineError::Eof) |
|
||||
Self::ReadLine(ReadlineError::Interrupted) => write!(f, "exit"),
|
||||
Self::ReadLine(_) => write!(f, "readline error"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,54 +0,0 @@
|
||||
//! Repl commands
|
||||
use super::*;
|
||||
use std::str::FromStr;
|
||||
|
||||
use super::env::*;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Context<'a>
|
||||
{
|
||||
/// Environment containing variable name mappings.
|
||||
env: &'a mut Lexenv,
|
||||
}
|
||||
|
||||
/// Trait for commands.
|
||||
///
|
||||
/// # Defining commands
|
||||
/// A command object should be created once only, and then referenced and executed using `params` and through mutating `cx`.
|
||||
pub trait Command: fmt::Debug
|
||||
{
|
||||
fn execute(&self, cx: &mut Context<'_>, params: Vec<Value>) -> eyre::Result<()>;
|
||||
}
|
||||
|
||||
/// Command structurally parsed.
|
||||
///
|
||||
/// Can be converted into `Command` with the `TryInto` trait.
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct IR
|
||||
{
|
||||
op: String,
|
||||
params: Vec<Value>,
|
||||
}
|
||||
|
||||
impl FromStr for IR
|
||||
{
|
||||
type Err = CommandParseError;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
todo!()
|
||||
}
|
||||
}
|
||||
|
||||
/// Error when parsing a command into `IR`.
|
||||
#[derive(Debug)]
|
||||
pub struct CommandParseError(String);
|
||||
|
||||
impl error::Error for CommandParseError{}
|
||||
|
||||
impl fmt::Display for CommandParseError
|
||||
{
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result
|
||||
{
|
||||
write!(f, "failed to parse command from {:?}", self.0)
|
||||
}
|
||||
}
|
@ -1,242 +0,0 @@
|
||||
//! Execution environment for repl
|
||||
use super::*;
|
||||
use std::str::FromStr;
|
||||
use std::collections::{BTreeMap, HashMap};
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Lexenv
|
||||
{
|
||||
/// Maps symbol name to value in generations.
|
||||
kvstack: BTreeMap<usize, HashMap<String, Value>>,
|
||||
/// Current generation of the satck
|
||||
current_generation: usize,
|
||||
}
|
||||
impl Lexenv
|
||||
{
|
||||
/// The generation number of the current level.
|
||||
pub fn depth(&self) -> usize
|
||||
{
|
||||
self.current_generation
|
||||
}
|
||||
/// Create a new empty lexenv
|
||||
pub fn new() -> Self
|
||||
{
|
||||
Self {
|
||||
kvstack: BTreeMap::new(),
|
||||
current_generation: 0,
|
||||
}
|
||||
}
|
||||
/// All valid symbols at this level.
|
||||
///
|
||||
/// # Ordering
|
||||
/// Each symbol's level will appear in the order from level 0 to the current level, however the order of intra-level symbols is undefined.
|
||||
pub fn symbols(&self) -> impl Iterator<Item = &'_ Value> + '_
|
||||
{
|
||||
self.kvstack.range(0..=self.current_generation).flat_map(|(_, v)| v.values())
|
||||
}
|
||||
|
||||
/// All valid symbols **in** this level.
|
||||
pub fn symbols_local(&self) -> impl Iterator<Item = &'_ Value> + '_
|
||||
{
|
||||
OptionIterator::from(self.kvstack.get(&self.current_generation).map(|x| x.values()))
|
||||
}
|
||||
|
||||
/// Remove the current level, but leave its memory allocated for further use.
|
||||
pub fn pop(&mut self)
|
||||
{
|
||||
self.kvstack.entry(self.current_generation).or_insert_with(|| HashMap::new()).clear();
|
||||
if self.current_generation > 0 {
|
||||
self.current_generation-=1;
|
||||
}
|
||||
}
|
||||
|
||||
/// Remove a symbol from the **current** level.
|
||||
pub fn remove(&mut self, name: &str) -> Option<Value>
|
||||
{
|
||||
self.kvstack.entry(self.current_generation).or_insert_with(|| HashMap::new()).remove(name)
|
||||
}
|
||||
|
||||
/// Insert a new value mapping into the current level.
|
||||
pub fn insert(&mut self, name: String, value: Value)
|
||||
{
|
||||
self.kvstack.entry(self.current_generation).or_insert_with(|| HashMap::new()).insert(name, value);
|
||||
}
|
||||
|
||||
/// Look up a symbol in this or any of the above levels.
|
||||
pub fn lookup(&self, name: &str) -> Option<&Value>
|
||||
{
|
||||
for (_, lvmap) in self.kvstack.range(0..=self.current_generation).rev()
|
||||
{
|
||||
let m = lvmap.get(name);
|
||||
if m.is_some() {
|
||||
return m;
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
/// Look up a symbol in this level.
|
||||
pub fn lookup_local(&self, name: &str) -> Option<&Value>
|
||||
{
|
||||
self.kvstack.get(&self.current_generation).map(|map| map.get(name)).flatten()
|
||||
}
|
||||
|
||||
/// Create a new, empty level.
|
||||
pub fn push(&mut self)
|
||||
{
|
||||
self.current_generation+=1;
|
||||
}
|
||||
|
||||
/// Remove the current level, deallocating any memory it was using.
|
||||
pub fn pop_clear(&mut self)
|
||||
{
|
||||
if self.current_generation > 0 {
|
||||
self.kvstack.remove(&self.current_generation);
|
||||
self.current_generation -=1;
|
||||
} else {
|
||||
self.kvstack.entry(0).or_insert_with(|| HashMap::new()).clear();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// The value type
|
||||
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
pub enum Value
|
||||
{
|
||||
String(String),
|
||||
Symbol(String),
|
||||
List(Vec<Value>),
|
||||
}
|
||||
|
||||
impl Value
|
||||
{
|
||||
/// Parse from an iterator of `char`s.
|
||||
pub fn parse_chars<T>(ch: &mut T) -> Result<Self, ValueParseError>
|
||||
where T: Iterator<Item = char>
|
||||
{
|
||||
|
||||
match ch.next()
|
||||
{
|
||||
Some('(') => {
|
||||
todo!("list");
|
||||
},
|
||||
Some('"') => {
|
||||
todo!("string");
|
||||
},
|
||||
Some(first_chr) => {
|
||||
todo!("symbol");
|
||||
},
|
||||
_ => Err(ValueParseError(String::default())),
|
||||
}
|
||||
}
|
||||
/// Parse a `Value` from this string and then return the rest of the string.
|
||||
#[deprecated]
|
||||
pub fn parse_running(s: &str) -> Result<(Self, &'_ str), ValueParseError>
|
||||
{
|
||||
match s.trim().as_bytes()
|
||||
{
|
||||
& [b'(', ..] => {
|
||||
todo!("list");
|
||||
},
|
||||
& [b'"', ..] => {
|
||||
todo!("string");
|
||||
},
|
||||
_ => {
|
||||
todo!("shmbol");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for Value
|
||||
{
|
||||
type Err = ValueParseError;
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
Self::parse_running(s).map(|(x, _)| x)
|
||||
}
|
||||
}
|
||||
|
||||
impl Value
|
||||
{
|
||||
pub fn try_as_symbol(&self) -> Result<&str, ValueTypeError>
|
||||
{
|
||||
match self {
|
||||
Self::Symbol(s) => Ok(&s[..]),
|
||||
_ => Err(ValueTypeError::Symbol),
|
||||
}
|
||||
}
|
||||
pub fn try_as_string(&self) -> Result<&str, ValueTypeError>
|
||||
{
|
||||
match self {
|
||||
Self::Symbol(s) |
|
||||
Self::String(s) => Ok(&s[..]),
|
||||
_ => Err(ValueTypeError::String),
|
||||
}
|
||||
}
|
||||
pub fn try_as_list(&self) -> Result<&[Value], ValueTypeError>
|
||||
{
|
||||
match self {
|
||||
Self::List(l) => Ok(&l[..]),
|
||||
_ => Err(ValueTypeError::List),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_symbol(&self) -> Option<&str>
|
||||
{
|
||||
match self {
|
||||
Self::Symbol(s) => Some(&s[..]),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
pub fn as_string(&self) -> Option<&str>
|
||||
{
|
||||
match self {
|
||||
Self::Symbol(s) |
|
||||
Self::String(s) => Some(&s[..]),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
pub fn as_list(&self) -> Option<&[Value]>
|
||||
{
|
||||
match self {
|
||||
Self::List(l) => Some(&l[..]),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Error when using `try_as_*` functions on `Value`.
|
||||
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Copy)]
|
||||
pub enum ValueTypeError
|
||||
{
|
||||
Symbol,
|
||||
String,
|
||||
List,
|
||||
}
|
||||
|
||||
/// Error when parsing a `Value` from a stirng.
|
||||
#[derive(Debug)]
|
||||
pub struct ValueParseError(String);
|
||||
|
||||
impl error::Error for ValueParseError{}
|
||||
impl fmt::Display for ValueParseError
|
||||
{
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result
|
||||
{
|
||||
write!(f, "cannot parse {:?}", self.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl error::Error for ValueTypeError{}
|
||||
impl fmt::Display for ValueTypeError
|
||||
{
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result
|
||||
{
|
||||
write!(f, "type error: expected ")?;
|
||||
match self {
|
||||
Self::Symbol => write!(f, "symbol"),
|
||||
Self::String => write!(f, "string"),
|
||||
Self::List => write!(f, "list"),
|
||||
}
|
||||
}
|
||||
}
|
@ -1,13 +0,0 @@
|
||||
//! Defined commands
|
||||
use super::*;
|
||||
|
||||
use env::*;
|
||||
use command::*;
|
||||
|
||||
/// Contains all operations
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Operations
|
||||
{
|
||||
|
||||
}
|
||||
|
@ -1,288 +0,0 @@
|
||||
//! For serializing
|
||||
use super::*;
|
||||
use tokio::prelude::*;
|
||||
use std::ops::{Deref, DerefMut};
|
||||
use serde::de::DeserializeOwned;
|
||||
|
||||
use async_compression::tokio_02::write::{
|
||||
BzEncoder,
|
||||
BzDecoder,
|
||||
};
|
||||
|
||||
type Compressor<T> = BzEncoder<T>;
|
||||
type Decompressor<T> = BzDecoder<T>;
|
||||
|
||||
const DEFER_DROP_SIZE_FLOOR: usize = 1024 * 1024; // 1 MB
|
||||
const DESERIALISE_OBJECT_READ_LIMIT: usize = 1024 * 1024 * 1024 * 2; // 2GB
|
||||
|
||||
const BUFFER_SIZE: usize = 4096;
|
||||
|
||||
#[derive(Debug)]
|
||||
enum MaybeCompressor<'a, T>
|
||||
{
|
||||
Compressing(Compressor<&'a mut T>),
|
||||
Decompressing(Decompressor<&'a mut T>),
|
||||
Raw(&'a mut T),
|
||||
}
|
||||
|
||||
/// Compress or decompress?
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash, Copy, PartialOrd, Ord)]
|
||||
enum CompKind
|
||||
{
|
||||
Compress,
|
||||
Decompress
|
||||
}
|
||||
|
||||
impl Default for CompKind
|
||||
{
|
||||
#[inline]
|
||||
fn default() -> Self
|
||||
{
|
||||
Self::Compress
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, T> MaybeCompressor<'a, T>
|
||||
{
|
||||
/// What kind is this compressor set to
|
||||
pub fn kind(&self) -> Option<CompKind>
|
||||
{
|
||||
Some(match self {
|
||||
Self::Raw(_) => return None,
|
||||
Self::Compressing(_) => CompKind::Compress,
|
||||
Self::Decompressing(_) => CompKind::Decompress,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, T> MaybeCompressor<'a, T>
|
||||
where T: AsyncWrite +Send + Unpin + 'a
|
||||
{
|
||||
pub fn new(raw: &'a mut T, compress: Option<CompKind>) -> Self
|
||||
{
|
||||
match compress {
|
||||
Some(CompKind::Compress) => Self::Compressing(Compressor::new(raw)),
|
||||
Some(CompKind::Decompress) => Self::Decompressing(Decompressor::new(raw)),
|
||||
None => Self::Raw(raw),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, T> DerefMut for MaybeCompressor<'a, T>
|
||||
where T: AsyncWrite + Send + Unpin + 'a
|
||||
{
|
||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||
match self {
|
||||
Self::Compressing(t) => t,
|
||||
Self::Decompressing(t) => t,
|
||||
Self::Raw(o) => o,
|
||||
}
|
||||
}
|
||||
}
|
||||
impl<'a, T> Deref for MaybeCompressor<'a, T>
|
||||
where T: AsyncWrite + Unpin + Send + 'a
|
||||
{
|
||||
type Target = dyn AsyncWrite + Send + Unpin+ 'a;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
match self {
|
||||
Self::Compressing(t) => t,
|
||||
Self::Decompressing(t) => t,
|
||||
Self::Raw(o) => o,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn copy_with_limit<R,W>(mut from: R, mut to: W) -> io::Result<usize>
|
||||
where R: AsyncRead + Unpin,
|
||||
W: AsyncWrite + Unpin
|
||||
{
|
||||
let mut buffer = [0u8; BUFFER_SIZE];
|
||||
|
||||
let mut read;
|
||||
let mut done=0;
|
||||
while {read = from.read(&mut buffer[..]).await?; read>0}
|
||||
{
|
||||
to.write_all(&buffer[..read]).await?;
|
||||
done+=read;
|
||||
|
||||
if done > DESERIALISE_OBJECT_READ_LIMIT {
|
||||
return Err(io::Error::new(io::ErrorKind::ConnectionAborted, eyre!("Exceeded limit, aborting.")
|
||||
.with_section(|| DESERIALISE_OBJECT_READ_LIMIT.header("Object read size limit was"))
|
||||
.with_section(|| done.header("Currently read"))));
|
||||
}
|
||||
}
|
||||
Ok(done)
|
||||
}
|
||||
|
||||
/// Deserialise an object from this stream asynchronously
|
||||
///
|
||||
/// # Note
|
||||
/// If the stream is compressed, `compressed` must be set to true or an error will be produced.
|
||||
/// An autodetect feature may be added in the future
|
||||
pub async fn read_async<T: DeserializeOwned + Send + 'static, R>(mut from: R, compressed: bool) -> eyre::Result<T>
|
||||
where R: AsyncRead + Unpin + Send
|
||||
{
|
||||
let sect_type_name = || std::any::type_name::<T>().header("Type trying to deserialise was");
|
||||
let sect_stream_type_name = || std::any::type_name::<R>().header("Stream type was");
|
||||
|
||||
let vec = {
|
||||
let mut vec = Vec::new();
|
||||
let mut writer = MaybeCompressor::new(&mut vec, compressed.then(|| CompKind::Decompress));
|
||||
|
||||
copy_with_limit(&mut from, writer.deref_mut()).await
|
||||
.wrap_err(eyre!("Failed to copy stream into in-memory buffer"))
|
||||
.with_section(sect_type_name.clone())
|
||||
.with_section(sect_stream_type_name.clone())?;
|
||||
|
||||
writer.flush().await.wrap_err(eyre!("Failed to flush decompression stream"))?;
|
||||
writer.shutdown().await.wrap_err(eyre!("Failed to shutdown decompression stream"))?;
|
||||
vec
|
||||
};
|
||||
|
||||
tokio::task::spawn_blocking(move || {
|
||||
(serde_cbor::from_slice(&vec[..])
|
||||
.wrap_err(eyre!("Failed to deseralised decompressed data"))
|
||||
.with_section(sect_type_name.clone())
|
||||
.with_section(sect_stream_type_name.clone()),
|
||||
|
||||
{drop!(vec vec);}).0
|
||||
}).await.wrap_err(eyre!("Panic while deserialising decompressed data"))?
|
||||
}
|
||||
|
||||
/// Serialise this object asynchronously
|
||||
///
|
||||
/// # Note
|
||||
/// This compresses the output stream.
|
||||
/// It cannot be used by `prealloc` read/write functions, as they do not compress.
|
||||
pub async fn write_async<T: Serialize, W>(mut to: W, item: &T, compress: bool) -> eyre::Result<()>
|
||||
where W: AsyncWrite + Unpin + Send
|
||||
{
|
||||
let sect_type_name = || std::any::type_name::<T>().header("Type trying to serialise was");
|
||||
let sect_stream_type_name = || std::any::type_name::<W>().header("Stream type was");
|
||||
|
||||
let vec = tokio::task::block_in_place(|| serde_cbor::to_vec(item))
|
||||
.wrap_err(eyre!("Failed to serialise item"))
|
||||
.with_section(sect_stream_type_name.clone())
|
||||
.with_section(sect_type_name.clone())?;
|
||||
|
||||
{
|
||||
let mut stream = MaybeCompressor::new(&mut to, compress.then(|| CompKind::Compress));
|
||||
|
||||
cfg_eprintln!(Verbose; config::get_global(), "Writing {} bytes of type {:?} to stream of type {:?}", vec.len(), std::any::type_name::<T>(), std::any::type_name::<W>());
|
||||
|
||||
stream.write_all(&vec[..])
|
||||
.await
|
||||
.wrap_err(eyre!("Failed to write serialised memory to stream"))
|
||||
.with_section(|| vec.len().to_string().header("Size of the serialised object was"))
|
||||
.with_section(sect_stream_type_name.clone())
|
||||
.with_section(sect_type_name.clone())?;
|
||||
|
||||
stream.flush().await.wrap_err(eyre!("Failed to flush output compression stream"))?;
|
||||
stream.shutdown().await.wrap_err(eyre!("Failed to shutdown output compression stream"))?;
|
||||
}
|
||||
|
||||
// Extremely overcomplicated concurrent flush+drop.
|
||||
use futures::FutureExt;
|
||||
let flush_fut = async {
|
||||
to.flush().await.wrap_err(eyre!("Failed to flush output backing stream"))?;
|
||||
to.shutdown().await.wrap_err(eyre!("Failed to shutdown output backing stream"))?;
|
||||
Ok::<(), eyre::Report>(())
|
||||
}.fuse();
|
||||
|
||||
tokio::pin!(flush_fut);
|
||||
tokio::select!{
|
||||
res = &mut flush_fut => {
|
||||
return res;
|
||||
}
|
||||
_ = async move { drop!(async vec vec); } => {}
|
||||
}
|
||||
flush_fut.await
|
||||
}
|
||||
|
||||
#[cfg(feature="prealloc")]
|
||||
mod prealloc {
|
||||
use super::*;
|
||||
use std::os::unix::prelude::*;
|
||||
use std::fs::File;
|
||||
use memmap::{MmapMut, Mmap};
|
||||
|
||||
/// Write this object as-is to this file descriptor.
|
||||
///
|
||||
/// # Note
|
||||
/// This does not compress like `write_aynsc()` does. It is just a 1-1 dump of the serialisation.
|
||||
/// Therefore, data written with `write_prealloc()` cannot be then read used with `read_async()`.
|
||||
///
|
||||
/// This is a completely synchronous operation. You should use it with `spawn_blocking` et al. to prevent task hangups.
|
||||
pub fn write_prealloc<T: Serialize>(file: &mut File, item: &T) -> eyre::Result<()>
|
||||
{
|
||||
let sect_type_name = || std::any::type_name::<T>().header("Type trying to serialise was");
|
||||
|
||||
let vec = tokio::task::block_in_place(|| serde_cbor::to_vec(item))
|
||||
.wrap_err(eyre!("Failed to serialise item"))
|
||||
.with_section(sect_type_name.clone())?;
|
||||
|
||||
let fd = file.as_raw_fd();
|
||||
|
||||
cfg_eprintln!(Verbose; config::get_global(), "Writing (raw) {} bytes of type {:?} to fd {}", vec.len(), std::any::type_name::<T>(), fd);
|
||||
|
||||
unsafe {
|
||||
if libc::fallocate(fd, 0, 0, vec.len().try_into()
|
||||
.wrap_err(eyre!("Failed to cast buffer size to `off_t`"))
|
||||
.with_section(|| vec.len().header("Buffer size was"))
|
||||
.with_section(|| libc::off_t::MAX.to_string().header("Max value of `off_t` is"))
|
||||
.with_warning(|| "Usually `off_t` is a signed 64 bit integer. Whereas the buffer's size is unsigned. On systems where `off_t` is 64 bits or higher, this should realistically never happen and probably indicates a bug.")?) < 0 {
|
||||
// Error
|
||||
Err(std::io::Error::last_os_error())
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
}.wrap_err("fallocate() failed")
|
||||
.with_section(|| vec.len().header("Bytes to allocate was"))
|
||||
.with_suggestion(|| "Make sure there is enough space for the fallocate() call")
|
||||
.with_suggestion(|| "Make sure we are able to write to the file")?;
|
||||
// fallocate() succeeded in allocating `vec.len()` bytes to map.
|
||||
let mut map = unsafe { MmapMut::map_mut(file) }
|
||||
.wrap_err(eyre!("Failed to map file for read + write"))
|
||||
.with_section(|| fd.header("fd was"))
|
||||
.with_suggestion(|| "Do we have the premissions for both reading and writing of this file and fd?")?;
|
||||
|
||||
eyre_assert!(tokio::task::block_in_place(|| unsafe {
|
||||
bytes::copy_nonoverlapping_unchecked(&vec[..], &mut map[..])
|
||||
}) == vec.len(); "Length mismatch")
|
||||
.with_section(|| vec.len().header("Expected"))
|
||||
.with_section(|| map.len().header("Got"))
|
||||
.with_warning(|| "This should never happen, it indicates a bug")?;
|
||||
|
||||
tokio::task::block_in_place(move || map.flush())
|
||||
.wrap_err(eyre!("Failed to flush map in place"))?; //map is dropped here
|
||||
|
||||
drop!(vec vec);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Read this object as-is from this file descriptor.
|
||||
///
|
||||
/// # Note
|
||||
/// This does not decompress like `read_aynsc()` does. It is just a 1-1 read of the serialisation.
|
||||
/// Therefore, `read_prealloc()` cannot be used with data written by `write_async()`.
|
||||
///
|
||||
/// This is a completely synchronous operation. You should use it with `spawn_blocking` et al. to prevent task hangups.
|
||||
// This must be `DeserializeOwned` because the lifetime it is bound to is that of the memory map created and destroyed in the function, not of the fd `file` itself.
|
||||
pub fn read_prealloc<T: serde::de::DeserializeOwned>(file: &File) -> eyre::Result<T>
|
||||
{
|
||||
let map = unsafe { Mmap::map(file) }
|
||||
.wrap_err(eyre!("Failed to map file for read"))
|
||||
.with_section(|| file.as_raw_fd().header("fd was"))
|
||||
.with_suggestion(|| "Do we have the premissions for both reading and writing of this file and fd?")?;
|
||||
|
||||
tokio::task::
|
||||
block_in_place(move || serde_cbor::from_slice(&map[..]))
|
||||
.wrap_err(eyre!("Failed to deserialise from map"))
|
||||
.with_note(|| "The prealloc read and write functions handle only *uncompressed* data. Make sure you're not feeding it compressed data (written with the non-prealloc read and write functions)")
|
||||
}
|
||||
}
|
||||
#[cfg(feature="prealloc")] pub use prealloc::{
|
||||
write_prealloc as write_sync_map,
|
||||
read_prealloc as read_sync_map,
|
||||
};
|
Loading…
Reference in new issue