Rework Config

Fortune for transfer's current commit: Great blessing − 大吉
master
Avril 3 years ago
parent b20878a633
commit f22bda2f9a
Signed by: flanchan
GPG Key ID: 284488987C31F630

1
.gitignore vendored

@ -1,2 +1,3 @@
/target
*~
src-old/

24
Cargo.lock generated

@ -57,7 +57,7 @@ dependencies = [
"futures-core",
"memchr",
"pin-project-lite 0.2.7",
"tokio 1.12.0",
"tokio 1.13.0",
]
[[package]]
@ -532,9 +532,9 @@ dependencies = [
[[package]]
name = "instant"
version = "0.1.11"
version = "0.1.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "716d3d89f35ac6a34fd0eed635395f4c3b76fa889338a4632e5231a8684216bd"
checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c"
dependencies = [
"cfg-if 1.0.0",
]
@ -653,9 +653,9 @@ dependencies = [
[[package]]
name = "mio"
version = "0.7.13"
version = "0.7.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8c2bdb6314ec10835cd3293dd268473a835c02b7b352e788be788b3c6ca6bb16"
checksum = "8067b404fe97c70829f082dec8bcf4f71225d7eaea1d8645349cb76fa06205cc"
dependencies = [
"libc",
"log",
@ -1164,15 +1164,15 @@ dependencies = [
[[package]]
name = "tokio"
version = "1.12.0"
version = "1.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c2c2416fdedca8443ae44b4527de1ea633af61d8f7169ffa6e72c5b53d24efcc"
checksum = "588b2d10a336da58d877567cd8fb8a14b463e2104910f8132cd054b4b96e29ee"
dependencies = [
"autocfg",
"bytes 1.1.0",
"libc",
"memchr",
"mio 0.7.13",
"mio 0.7.14",
"num_cpus",
"once_cell",
"parking_lot",
@ -1184,9 +1184,9 @@ dependencies = [
[[package]]
name = "tokio-macros"
version = "1.3.0"
version = "1.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "54473be61f4ebe4efd09cec9bd5d16fa51d70ea0192213d754d2d500457db110"
checksum = "114383b041aa6212c579467afa0075fbbdd0718de036100bc0ba7961d8cb9095"
dependencies = [
"proc-macro2",
"quote",
@ -1203,7 +1203,7 @@ dependencies = [
"libc",
"scoped-tls",
"slab",
"tokio 1.12.0",
"tokio 1.13.0",
]
[[package]]
@ -1229,7 +1229,7 @@ dependencies = [
"serde_json",
"smallvec",
"stackalloc",
"tokio 1.12.0",
"tokio 1.13.0",
"tokio-uring",
]

@ -1,85 +0,0 @@
//! Arg parsing and process info
use super::*;
use std::fmt;
lazy_static!{
static ref EXEC: String = std::env::args().next().unwrap();
}
pub fn program_name() -> &'static str
{
&EXEC[..]
}
/// Program usage
#[derive(Debug)]
pub struct Usage;
impl Usage
{
pub fn print_and_exit(self, code: i32) -> !
{
if code == 0 {
print!("{}", self);
} else {
eprint!("{}", self);
}
std::process::exit(code)
}
}
fn splash(f: &mut fmt::Formatter<'_>) -> fmt::Result
{
writeln!(f, "transfer v{} - simple network file transfer", env!("CARGO_PKG_VERSION"))?;
writeln!(f, " written by {} with <3. License GPL3+", env!("CARGO_PKG_AUTHORS"))?;
writeln!(f, "")
}
impl fmt::Display for Usage
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result
{
splash(f)?;
writeln!(f, "Usage: {} S <bind> --send|--recv [OPTIONS] <file...>", program_name())?;
writeln!(f, "Usage: {} C <connect> --send|--recv [OPTIONS] <output...>", program_name())?;
writeln!(f, "Usage: {} --help", program_name())?;
writeln!(f, "\nNetworking mode:")?;
writeln!(f, " S: Server mode. Bind to an address/port")?;
writeln!(f, " C: Client mode. Connect to a listening address/port")?;
writeln!(f, "\nSEND OPTIONS:")?;
writeln!(f, " -e\t\t\tEncrypt file(s)")?;
writeln!(f, " -c\t\t\tCompress files")?;
writeln!(f, " --buffer-size <bytes>\tSize of file buffer")?;
writeln!(f, " -a\t\t\tSend file names")?;
writeln!(f, " -k\t\t\tSupport continuation of failed downloads")?;
writeln!(f, "\nRECV OPTIONS:")?;
writeln!(f, " -i\t\t\tAsk before starting downloads")?;
writeln!(f, " -k\t\t\tContinue a previously started download")?;
Ok(())
}
}
/// The process parsed from command line
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct Process
{
/// The parsed config (includes mode)
pub config: config::Config,
/// The listed paths
pub paths: Vec<String>,
/// Use stdin/out
pub stdio: bool,
}
/// An operation parsed from command line arguments
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum Op
{
Process(Box<Process>),
Help,
}
mod parse;

@ -1,96 +0,0 @@
//! Parsing args
use super::*;
use ext::*;
use std::iter;
/// Arg state
#[derive(Debug, Default)]
struct State
{
is_server: bool,
is_sending: bool,
enc: Option<bool>,
comp: Option<bool>,
bufsz: Option<usize>,
arc: Option<bool>,
contin: Option<bool>,
oneshot: Option<bool>,
inter: Option<bool>,
files: Vec<String>,
}
impl State
{
fn mode(&self) -> impl fmt::Display + 'static
{
let send = r#if!(self.is_sending, "send", "recv");
let serve = r#if!(self.is_server, "server", "client");
lazy_format!("{} ({})", send, serve)
}
}
fn parse_schain<I>(state: &mut State, single: I) -> eyre::Result<()>
where I: IntoIterator<Item=char>
{
for ch in single.into_iter().map(char::to_lowercase).flatten()
{
match ch {
'e' => state.enc = Some(true),
'c' => state.comp = Some(true),
'a' => state.arc = Some(true),
'k' => state.contin = Some(true),
'1' if state.is_server => state.oneshot = Some(true),
'i' if !state.is_sending => state.inter = Some(true),
x => return Err(eyre!("Unknown option for mode {}", state.mode()))
.with_section(move || x.header("Option was"))
.with_note(move || "Some options are only valid for certain modes"),
}
}
Ok(())
}
/// Try to parse an iterator of strings (usually the command-line arguments) into an `Op`.
pub fn parse_iter<I>(args: &mut I) -> eyre::Result<Op>
where I: Iterator<Item= String> + ?Sized
{
let state = parse_iter_raw(args)
.wrap_err(eyre!("Invalid arguments"))
.with_suggestion(|| "Try passing `--help`")?; // Send help message here, since it's unlikely to be helpful when returning from state's validation compared to here.
todo!("TODO: `impl TryFrom<State> for Op`, etc")
}
fn parse_iter_raw<I>(args: &mut I) -> eyre::Result<State>
where I: Iterator<Item= String> + ?Sized
{
let mut state = State::default();
//TODO: Parse modes before this.
while let Some(arg) = args.next()
{
let mut chars = arg.chars();
match (&mut chars).take(2).collect_array::<2>() {
['-', '-'] => {
// Long option
let opt = &arg[2..];
match opt {
"--" => break,
//TODO: Long options, pulling option param from `args` if needed, etc.
unknown => return Err(eyre!("Unknown option for mode {}", state.mode()))
.with_section(|| format!("--{}", unknown).header("Option was"))
.with_note(|| "Some options are only valid for certain modes"),
}
},
['-', n] => {
// Small option
parse_schain(&mut state, iter::once(n).chain(chars))?;
},
_ => {
// Not an option
state.files.push(arg);
},
}
}
Ok(state)
}

@ -1,40 +0,0 @@
use openssl::{
symm::{
Cipher, Crypter, Mode,
},
error::ErrorStack,
};
use crate::key::{Key, IV};
/// Size of the key used for the cipher
pub const KEY_SIZE: usize = 32;
/// Size of the IV used for the cipher
pub const IV_SIZE: usize = 12;
static NEW_CIPHER: fn() -> Cipher = Cipher::chacha20_poly1305;
#[inline] pub fn decrypter(key: impl AsRef<Key>, iv: impl AsRef<IV>) -> Result<Crypter, ErrorStack>
{
Crypter::new(
NEW_CIPHER(),
Mode::Decrypt,
key.as_ref().as_ref(),
Some(iv.as_ref().as_ref())
)
}
#[inline] pub fn encrypter(key: impl AsRef<Key>, iv: impl AsRef<IV>) -> Result<Crypter, ErrorStack>
{
Crypter::new(
NEW_CIPHER(),
Mode::Encrypt,
key.as_ref().as_ref(),
Some(iv.as_ref().as_ref())
)
}
/// Generate a random key and IV for the chacha20_poly1305 cipher
#[inline(always)] pub fn keygen() -> (Key, IV)
{
(Key::new(), IV::new())
}

@ -1,93 +1,103 @@
//! Configuration
use super::*;
use std::net::SocketAddr;
use sock::SocketAddr;
pub const DEFAULT_BUFFER_SIZE: usize = 4096;
/// Configuration for sending
/// What kind of compression to use
#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord, Copy)]
#[repr(u8)]
pub enum CompressionKind
{
Brotli,
GZip,
BZ2,
}
// -- serve / conn --
/// Server configuration
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct SendConfig
pub struct ServerConfig
{
encrypt: bool,
compress: bool,
buffer_size: usize,
archive: bool,
//oneshot: bool, // Server specific
continuation: bool,
pub bind: SocketAddr,
pub oneshot: bool,
}
impl Default for SendConfig
/// Client configuration
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct ClientConfig
{
#[inline]
fn default() -> Self
{
Self {
encrypt: false,
compress: false,
buffer_size: DEFAULT_BUFFER_SIZE,
archive: false,
//oneshot: false,
continuation: false,
}
}
pub connect: SocketAddr,
pub retry: usize,
}
/// Configuration for receiving
// -- send / recv ---
/// Specifying a sending name, or send the filename
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct RecvConfig
pub enum SendingName
{
interactive: bool,
continuation: bool,
Filename,
Specific(String),
}
impl Default for RecvConfig
impl Default for SendingName
{
#[inline]
fn default() -> Self
{
Self {
interactive: false,
continuation: false,
}
Self::Filename
}
}
/// Instructions for binding (server) mode
/// Configuration for sending file
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct Server
pub struct SendConfig
{
listen: SocketAddr, //TODO: Allow multiple?
pub encrypt: bool,
pub sign: bool,
pub compress: Option<CompressionKind>,
pub buffer_size: usize,
pub continuation: bool,
pub name: Option<SendingName>,
}
/// Instructions for connecting (client) mode
/// Configuration for receiving file
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct Client
pub struct RecvConfig
{
connect: SocketAddr,
pub interactive: bool,
pub name: SendingName,
}
/// A send or recv operation
// -- modes --
/// Mode of file
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum Operation
pub enum TransferMode
{
Send(SendConfig),
Recv(RecvConfig),
}
/// Whether to serve (listen) or connect directly.
/// Configuration of connection
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum Mode
pub enum ConnectionMode
{
Server(Server),
Client(Client),
Server(ServerConfig),
Client(ClientConfig),
}
/// Program full configuration
// --
/// Full program configuration
///
/// You should box this, it's big.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct Config
{
/// Which operation (send/recv) are we performing?
pub op: Operation,
/// How are we performing it? (Bind/connect)
pub mode: Mode,
pub file: TransferMode,
pub connection: ConnectionMode,
}

@ -1,43 +0,0 @@
//! Encodings
use super::*;
use ext::*;
use std::{fmt, error};
use bytes::{
Buf,
Bytes,
};
use std::io;
use tokio::io::{
AsyncRead, AsyncWrite,
AsyncReadExt, AsyncWriteExt,
};
use serde::{
Serialize,
de::DeserializeOwned
};
use cryptohelpers::{
sha256,
rsa,
};
/// Size of buffer to use when copying a stream.
pub const STREAMING_BUFFER_SIZE: usize = 4096;
pub mod ser;
/// Copy `from` to `to`, transforming the data with the provided key and IV.
///
/// # Stream cipher usage
/// The copy is buffered by `STREAMING_BUFFER_SIZE` bytes, and the cipher applied to each read buffer.
/// If the buffer cannot be filled (because the stream reached EOF before filling it), then only the full portion of the buffer is transformed and written.
#[inline] pub async fn cc20_copy_stream<F, T, K>(from: &mut F, to: &mut T, keys: K, decrypt: bool) -> io::Result<(usize, usize)>
where K: key::CC20Key,
F: AsyncRead + Unpin + ?Sized,
T: AsyncWrite + Unpin + ?Sized
{
if decrypt {
ser::cha_copy::<F, T, STREAMING_BUFFER_SIZE, true>(from, to, keys.key(), keys.iv()).await
} else {
ser::cha_copy::<F, T, STREAMING_BUFFER_SIZE, false>(from, to, keys.key(), keys.iv()).await
}
}

@ -1,741 +0,0 @@
//! Data serialisation
use super::*;
use bytes::BufMut;
#[derive(Debug, Clone, PartialEq, Eq, Hash, Copy, PartialOrd, Ord)]
pub enum CompressionKind
{
Brotli,
//TODO: Add cases (and async_compression features) for these three
Xz,
GZip,
Bz2,
}
impl Default for CompressionKind
{
#[inline]
fn default() -> Self
{
//TODO: Should Brotli be default? Check sizes of compressed binary encoded stuffs and compare modes.
Self::Brotli
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash, Copy)]
pub enum EncryptionKind
{
Chacha20((key::Key, key::IV))
}
impl Default for EncryptionKind
{
#[inline]
fn default() -> Self
{
Self::Chacha20(cha::keygen())
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash, Copy)]
pub enum SerialFormat
{
/// CBOR
Binary,
/// JSON
Text,
}
impl Default for SerialFormat
{
#[inline]
fn default() -> Self
{
Self::Binary
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash, Copy)]
pub struct SendOpt
{
comp: Option<CompressionKind>,
encrypt: Option<EncryptionKind>,
format: SerialFormat,
hash: bool,
//pub sign: Option<???>, //TODO: RSA private + public key types
}
ref_self!(SendOpt);
impl Default for SendOpt
{
#[inline]
fn default() -> Self
{
Self::new()
}
}
impl SendOpt
{
pub const NORMAL: Self = Self::new();
pub const CHECKED: Self = Self::new_checked();
pub const COMPRESSED: Self = Self::new_compressed();
/// Add compression
pub const fn compress(self, k: CompressionKind) -> Self
{
Self {
comp: Some(k),
..self
}
}
/// Change the output format
///
/// Default: **Binary**
///
/// # Text format note
/// When using compression and/or encryption, the text format will end up unreadable anyway.
/// Likewise when using signing or hashing, a binary header is prepended to the message regardless of format.
///
/// 2 ASCII whitespace characters are prepended to the message regardless of any other options (`\t`, ` |\n`). These are used to determine if the message is valid and if a header needs to be read from it.
/// Most external text-format parsing software should ignore these and be able to parse a non-headered message.
pub const fn format(self, format: SerialFormat) -> Self
{
Self {
format,
..self
}
}
/// Enable or disable hashing
///
/// Default: *Disabled*
pub const fn hash(self, hash: bool) -> Self
{
Self {
hash,
..self
}
}
/// Add encryption with constant parameters
pub const fn encrypt(self, k: EncryptionKind) -> Self
{
Self {
encrypt: Some(k),
..self
}
}
/// Add default encryption with a randomly generated key and IV.
pub fn encrypt_cc20_gen(self) -> Self
{
self.encrypt(EncryptionKind::Chacha20(cha::keygen()))
}
/// Normal options.
///
/// Does not enable any features.
pub const fn new() -> Self
{
Self {
comp: None,
encrypt: None,
format: SerialFormat::Binary,
hash: false,
}
}
/// Normal options with data compression.
///
/// Uses Brotli compression by default.
pub const fn new_compressed() -> Self
{
Self {
comp: Some(CompressionKind::Brotli),
..Self::new()
}
}
/// Normal options with added integrity checks.
///
/// Increases final size of object but provided data integrity and source validation.
//TODO: Give sig param
pub const fn new_checked() -> Self
{
Self {
hash: true,
//sig: ???
..Self::new()
}
}
/// Should a header be generated for this data?
#[inline(always)] fn needs_header(&self) -> bool
{
self.hash || /*self.sig*/ false
}
#[inline] fn creates_header(&self) -> bool
{
self.needs_header()
}
/// Does the binary data of this format require special handling?
///
/// True if encryption and/or compression are specified.
#[inline(always)] fn is_spec(&self) -> bool
{
self.comp.is_some() || self.encrypt.is_some()
}
}
pub type RecvOpt = SendOpt;
/// Default buffer size for encryption transform stream copying.
pub const DEFAULT_BUFSIZE: usize = 4096;
pub(super) async fn cha_copy<F, T, const BUFSIZE: usize, const DECRYPT: bool>(from: &mut F, to: &mut T, key: &key::Key, iv: &key::IV) -> io::Result<(usize, usize)>
where F: AsyncRead + Unpin + ?Sized,
T: AsyncWrite + Unpin + ?Sized
{
let mut written=0;
let mut read=0;
let mut r;
let mut buffer = [0u8; BUFSIZE];
let mut cbuffer = [0u8; BUFSIZE];
let mut crypter = if DECRYPT {
cha::decrypter(key, iv)
} else {
cha::encrypter(key, iv)
}?;
while { r = from.read(&mut buffer[..]).await?; r > 0 } {
read += r;
r = crypter.update(&buffer[..r], &mut cbuffer[..])?;
to.write(&cbuffer[..r]).await?;
written += r;
}
Ok((written, read))
}
const H_SALT_SIZE: usize = 32;
#[derive(Debug, Clone, PartialEq, Eq, Hash, Default)]
struct FormatHeader
{
hash: Option<(sha256::Sha256Hash, [u8; H_SALT_SIZE])>,
sig: Option<rsa::Signature>,
}
#[derive(Debug)]
pub enum HeaderValidationError
{
Malformed,
Hash,
Signature,
}
impl error::Error for HeaderValidationError{}
impl fmt::Display for HeaderValidationError
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result
{
match self {
Self::Malformed => write!(f, "header was malformed"),
Self::Hash => write!(f, "invalid hash"),
Self::Signature => write!(f, "signature could not be verified"),
}
}
}
impl FormatHeader
{
pub const SIZE: usize = sha256::SIZE + H_SALT_SIZE + cryptohelpers::consts::RSA_SIG_SIZE + 2;
const fn empty_array() -> [u8; Self::SIZE]
{
[0u8; Self::SIZE]
}
fn gen_salt() -> [u8; H_SALT_SIZE]
{
let mut out = [0u8; H_SALT_SIZE];
getrandom::getrandom(&mut out[..]).expect("rng fatal");
out
}
fn generate(data: impl AsRef<[u8]>, opt: &SendOpt) -> Self
{
let hash = if opt.hash {
let salt = Self::gen_salt();
Some((sha256::compute_slices(iter![data.as_ref(), &salt[..]]), salt))
} else {
None
};
let sig = if false /*let Some(sign_with) = opt.sign*/ {
unimplemented!()
} else {
None
};
Self {
hash,
sig //TODO
}
}
fn validate(&self, data: impl AsRef<[u8]>, opt: &RecvOpt) -> Result<(), HeaderValidationError>
{
if opt.hash {
if !self.hash.as_ref().map(|(hash, salt)| &sha256::compute_slices(iter![data.as_ref(), &salt[..]]) == hash).unwrap_or(true) {
return Err(HeaderValidationError::Hash);
}
}
if /*opt.sig*/ false {
unimplemented!();
//if let Some(verify_with) = opt.sig //XXX: How will this work? We will need to store **either** a private or public key in Send/RecvOpt and dynamically dispatch over it.
}
Ok(())
}
fn to_buffer(&self, mut to: impl BufMut)
{
if let Some(hash) = &self.hash
{
to.put_u8(1);
to.put_slice(hash.0.as_ref());
to.put_slice(hash.1.as_ref());
} else {
to.put_u8(0);
to.put_bytes(0, sha256::SIZE + H_SALT_SIZE);
}
if let Some(sig) = &self.sig
{
to.put_u8(1);
to.put_slice(sig.as_ref());
} else {
to.put_u8(0);
to.put_bytes(0, cryptohelpers::consts::RSA_SIG_SIZE);
}
}
fn from_buffer(mut from: impl Buf) -> Self
{
let hash = if from.get_u8() == 1 {
let mut hash = sha256::Sha256Hash::default();
let mut salt = [0u8; H_SALT_SIZE];
from.copy_to_slice(hash.as_mut());
from.copy_to_slice(&mut salt[..]);
Some((hash,salt))
} else {
from.advance(sha256::SIZE + H_SALT_SIZE);
None
};
let sig = if from.get_u8() == 1 {
let mut sig = rsa::Signature::default();
from.copy_to_slice(sig.as_mut());
Some(sig)
} else {
from.advance(sha256::SIZE);
None
};
Self {
hash, sig
}
}
#[inline] fn to_array(&self) -> [u8; Self::SIZE]
{
let mut ar = [0u8; Self::SIZE];
self.to_buffer(&mut &mut ar[..]);
ar
}
#[inline] fn from_array(ar: [u8; Self::SIZE]) -> Self
{
Self::from_buffer(&ar[..])
}
}
const INFO_ASSERT_VALID: u8 = b'\t';
const INFO_WITH_HEADER: u8 = b' ';
const INFO_NO_HEADER: u8 = b'\n';
/// If passing an externally generated message to be deserialised here, it must be prefixed with this regardless of its format.
///
/// Operations that generate/require a message header will not work on these messages and if they are needed must be handled elsewhere by the user. (Hash and signature validation)
pub const BARE_MESSAGE_PREFIX: [u8; 2] = [INFO_ASSERT_VALID, INFO_NO_HEADER];
pub(super) async fn de_singleton_inner<T: DeserializeOwned, B, F>(buf: F, from: &[u8], how: &RecvOpt) -> Result<T, TransformErrorKind>
where B: AsRef<[u8]> + AsyncWrite + Unpin + Default,
F: FnOnce(&[u8]) -> B
{
// Read header
let mut header = FormatHeader::empty_array();
if from.len() < 2 || from[0] != INFO_ASSERT_VALID {
return Err(TransformErrorKind::InvalidHeader(HeaderValidationError::Malformed));
}
let (inf, mut from) = {
(&from[..2], &from[2..])
};
from = {
if inf[1] == INFO_WITH_HEADER {
if from.len() < FormatHeader::SIZE {
return Err(TransformErrorKind::InvalidHeader(HeaderValidationError::Malformed));
}
let hf = &from[..FormatHeader::SIZE];
header.copy_from_slice(hf);
&from[FormatHeader::SIZE..]
} else {
&from[..]
}
};
// Decompressor
// The output is written to this (through writer)
let mut is_spec = false; // This is set later. The value will sometimes differ from `how.is_spec()` depending on combinations of options.
// The `spec` output buffer. Used if there are transformations that need to be done to the data before deserialisation
let mut buf = if how.is_spec() {
buf(&from)
} else {
Default::default()
};
//let mut buf = Vec::with_capacity(from.len());
from = {
let mut b;
let writer: &mut (dyn AsyncWrite + Unpin) =
if let Some(comp) = &how.comp {
is_spec = true;
match comp {
CompressionKind::Brotli => {
b = async_compression::tokio::write::BrotliDecoder::new(&mut buf);
&mut b
},
_ => unimplemented!(),
}
} else {
&mut buf
};
// Decrypt into `writer`.
if let Some(dec) = &how.encrypt {
// There is decryption to be done, decrypt into `writer` (which will handle decompression if needed).
// Return its output buffer
match dec {
EncryptionKind::Chacha20((k, iv)) => {
self::cha_copy::<_, _, DEFAULT_BUFSIZE, true>(&mut &from[..], writer, k, iv).await?;
},
}
// Required for decompression to complete
writer.flush().await?;
writer.shutdown().await?;
&buf.as_ref()[..]
} else if is_spec {
// There is decompression to be done through `writer`. Return its output buffer
writer.write_all(from).await?;
// Required for decompression to complete
writer.flush().await?;
writer.shutdown().await?;
&buf.as_ref()[..]
} else {
// There is neither decompression nor decryption to be done, return the input reference itself
from
}
};
// Deserialise
FormatHeader::from_array(header).validate(from, how)?;
let v = match how.format {
SerialFormat::Text => serde_json::from_slice(&from[..])?,
SerialFormat::Binary => serde_cbor::from_slice(&from[..])?,
};
Ok(v)
}
pub(super) async fn ser_singleton_inner<T: Serialize, V: AsyncWrite + Unpin, F>(to: F, value: &T, how: impl AsRef<SendOpt>) -> Result<(V, usize), TransformErrorKind>
where F: FnOnce(&Vec<u8>) -> V,
{
let how = how.as_ref();
let ser = match how.format {
SerialFormat::Text => serde_json::to_vec(value)?,
SerialFormat::Binary => serde_cbor::to_vec(value)?,
};
let header = if how.needs_header() {
let header = FormatHeader::generate(&ser, how);
header.to_array()
} else {
FormatHeader::empty_array()
};
let mut a;
let mut b;
let reader: &mut (dyn AsyncRead + Unpin) =
if let Some(comp) = &how.comp {
match comp {
CompressionKind::Brotli => {
a = async_compression::tokio::bufread::BrotliEncoder::new(tokio::io::BufReader::new(&ser[..]));
&mut a
},
_ => unimplemented!("Xz and GZip currently unimplemented."),
}
} else {
b = &ser[..];
&mut b
};
let mut ser = to(&ser);
if how.needs_header() {
ser.write_all(&[INFO_ASSERT_VALID, INFO_WITH_HEADER]).await?;
ser.write_all(&header[..]).await?;
} else {
ser.write_all(&[INFO_ASSERT_VALID, INFO_NO_HEADER]).await?;
}
let w= if let Some(enc) = &how.encrypt {
let n = match enc {
EncryptionKind::Chacha20((k, iv)) => {
self::cha_copy::<_, _, DEFAULT_BUFSIZE, false>(reader, &mut ser, k, iv).await?.0
},
};
// Required for compression to complete
ser.flush().await?;
ser.shutdown().await?;
n
} else {
tokio::io::copy(reader, &mut ser).await? as usize
};
Ok((ser, w))
// inner(value, how).map(|res| res.map_err(|k| SendError(Box::new((k, how.clone())))))
}
#[inline(always)] pub fn de_singleton<'a, T: DeserializeOwned + 'a, B: ?Sized + AsRef<[u8]> + 'a>(from: &'a B, how: &'a RecvOpt) -> impl Future<Output = Result<T, RecvError>> + 'a
{
use futures::prelude::*;
de_singleton_inner(|from| Vec::with_capacity(from.as_ref().len()), from.as_ref(), how)
.map_err(|k| RecvError(Box::new((k, how.clone()))))
}
#[inline(always)] pub fn ser_singleton<'a, T: Serialize>(value: &'a T, how: &'a SendOpt) -> impl Future<Output = Result<Vec<u8>, SendError>> + 'a
{
use futures::prelude::*;
// hack to avoid having to enable `try{}` feature :/
ser_singleton_inner(|c| Vec::with_capacity(c.len()), value, how)
.map_ok(|(v, _)| v)
.map_err(|k| SendError(Box::new((k, how.clone()))))
}
/// Deserialise a single object from a stream with the method described by `how`.
///
/// # Returns
/// The deserialised value and the number of bytes read from the stream.
pub async fn read_singleton<T: DeserializeOwned, S: ?Sized + AsyncRead + Unpin>(from: &mut S, how: &RecvOpt) -> Result<(T, usize), RecvError>
{
let (r, v) = async move {
let mut ibuf = [0u8; std::mem::size_of::<u64>()];
from.read_exact(&mut ibuf[..]).await?;
let n = u64::from_be_bytes(ibuf);
let mut v = Vec::with_capacity(n as usize);
tokio::io::copy(&mut from.take(n), &mut v).await
.map(move |_| (v.len() + ibuf.len(), v))
}.await
.map_err(|err| RecvError(Box::new((err.into(), how.to_owned()))))?;
let v = de_singleton(&v[..], how).await?;
Ok((v, r))
}
/// Serialise a single object to a stream with the method described by `how`.
#[inline] pub async fn write_singleton<T: Serialize, S: ?Sized + AsyncWrite + Unpin>(to: &mut S, value: &T, how: &SendOpt) -> Result<usize, SendError>
{
let (cont, v) = ser_singleton_inner(|n| Vec::with_capacity(n.len()), value, &how).await
.map_err(|k| SendError(Box::new((k, how.to_owned()))))?;
let n = async move {
to.write_all(&(v as u64).to_be_bytes()[..]).await?;
to.write_all(&cont).await
.map(|_| std::mem::size_of::<u64>() + cont.len())
}
.await
.map_err(|k| SendError(Box::new((k.into(), how.to_owned()))))?;
Ok(n)
}
/// Kind of error for a send (serialise) or receive (deserialise) operation
#[derive(Debug)]
pub enum TransformErrorKind
{
/// Invalid serialised format
Format,
/// Compression
Compress,
/// Encryption
Encrypt,
/// Misc. IO
//TODO: Disambiguate when this happens into the two above cases.
IO(io::Error),
/// The object header was invalid.
InvalidHeader(HeaderValidationError),
}
/// An error when sending / serialising an object.
#[derive(Debug)]
pub struct RecvError(Box<(TransformErrorKind, RecvOpt)>);
impl RecvError
{
#[inline] pub fn kind(&self) -> &TransformErrorKind
{
&self.0.0
}
}
impl SendError
{
#[inline] pub fn kind(&self) -> &TransformErrorKind
{
&self.0.0
}
}
impl error::Error for RecvError
{
fn source(&self) -> Option<&(dyn error::Error + 'static)> {
Some(match &self.0.0
{
TransformErrorKind::IO(io) => io,
TransformErrorKind::InvalidHeader(ih) => ih,
_ => return None,
})
}
}
impl fmt::Display for RecvError
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result
{
write!(f, "error when deserialising object with params {:?}: ", self.0.1)?;
match self.0.0 {
TransformErrorKind::Format => write!(f, "failed to deserialise object to data"),
TransformErrorKind::Compress => write!(f, "failed to decompress data"),
TransformErrorKind::Encrypt => write!(f, "failed to decrypt data"),
TransformErrorKind::IO(_) => write!(f, "i/o failure"),
TransformErrorKind::InvalidHeader(_) => write!(f, "invalid header"),
}
}
}
/// An error when sending / serialising an object.
#[derive(Debug)]
pub struct SendError(Box<(TransformErrorKind, SendOpt)>);
impl error::Error for SendError
{
fn source(&self) -> Option<&(dyn error::Error + 'static)> {
Some(match &self.0.0
{
TransformErrorKind::IO(io) => io,
TransformErrorKind::InvalidHeader(ih) => ih,
_ => return None,
})
}
}
impl fmt::Display for SendError
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result
{
write!(f, "error when serialising object with params {:?}: ", self.0.1)?;
match self.0.0 {
TransformErrorKind::Format => write!(f, "failed to serialise object to data"),
TransformErrorKind::Compress => write!(f, "failed to compress data"),
TransformErrorKind::Encrypt => write!(f, "failed to encrypt data"),
TransformErrorKind::IO(_) => write!(f, "i/o failure"),
TransformErrorKind::InvalidHeader(_) => write!(f, "invalid header"),
}
}
}
impl From<io::Error> for TransformErrorKind
{
fn from(from: io::Error) -> Self
{
Self::IO(from)
}
}
impl From<HeaderValidationError> for TransformErrorKind
{
fn from(from: HeaderValidationError) -> Self
{
Self::InvalidHeader(from)
}
}
impl From<serde_cbor::Error> for TransformErrorKind
{
#[inline] fn from(_: serde_cbor::Error) -> Self
{
Self::Format
}
}
impl From<serde_json::Error> for TransformErrorKind
{
#[inline] fn from(_: serde_json::Error) -> Self
{
Self::Format
}
}
#[cfg(test)]
mod test
{
use super::*;
async fn ser_de_with(how: SendOpt) -> eyre::Result<()>
{
use ext::*;
let obj = String::from("Hello world");
let var = ser_singleton(&obj, &how).await?;
eprintln!("Ser ({} bytes): {}", var.len(), var.hex());
let des: String = de_singleton(&var, &how).await?;
eprintln!("De: {:?}", des);
assert_eq!(obj, des);
Ok(())
}
#[tokio::test]
async fn ser_de() -> eyre::Result<()>
{
ser_de_with(Default::default()).await
}
#[tokio::test]
async fn ser_de_comp() -> eyre::Result<()>
{
ser_de_with(SendOpt {
comp: Some(CompressionKind::Brotli),
..Default::default()
}).await
}
#[tokio::test]
async fn ser_de_enc() -> eyre::Result<()>
{
ser_de_with(SendOpt {
encrypt: Some(EncryptionKind::Chacha20(cha::keygen())),
//hash: true,
..Default::default()
}).await
}
#[tokio::test]
async fn ser_de_comp_enc() -> eyre::Result<()>
{
ser_de_with(SendOpt {
encrypt: Some(EncryptionKind::Chacha20(cha::keygen())),
comp: Some(CompressionKind::Brotli),
..Default::default()
}).await
}
}

@ -1,233 +0,0 @@
use std::{
mem,
iter::{
self,
ExactSizeIterator,
FusedIterator,
},
slice,
fmt,
};
pub use std::{
marker::{
Send, Sync, Unpin,
},
borrow::{ Borrow, BorrowMut },
convert::{
Infallible,
TryFrom,
TryInto
},
};
pub use tokio::{
io::{
AsyncWriteExt,
AsyncReadExt,
},
task::JoinHandle,
};
/// Make this type act as a reference to itself.
///
/// Implements `AsRef<T>` for type `T`.
#[macro_export] macro_rules! ref_self {
($type:ty) => {
impl AsRef<$type> for $type
{
#[inline] fn as_ref(&self) -> &$type
{
self
}
}
}
}
#[derive(Debug, Clone)]
pub struct HexStringIter<I>(I, [u8; 2]);
impl<I: Iterator<Item = u8>> HexStringIter<I>
{
/// Write this hex string iterator to a formattable buffer
pub fn consume<F>(self, f: &mut F) -> fmt::Result
where F: std::fmt::Write
{
if self.1[0] != 0 {
write!(f, "{}", self.1[0] as char)?;
}
if self.1[1] != 0 {
write!(f, "{}", self.1[1] as char)?;
}
for x in self.0 {
write!(f, "{:02x}", x)?;
}
Ok(())
}
/// Consume into a string
pub fn into_string(self) -> String
{
let mut output = match self.size_hint() {
(0, None) => String::new(),
(_, Some(x)) |
(x, None) => String::with_capacity(x),
};
self.consume(&mut output).unwrap();
output
}
}
pub trait HexStringIterExt<I>: Sized
{
fn into_hex(self) -> HexStringIter<I>;
}
pub type HexStringSliceIter<'a> = HexStringIter<iter::Copied<slice::Iter<'a, u8>>>;
pub trait HexStringSliceIterExt
{
fn hex(&self) -> HexStringSliceIter<'_>;
}
impl<S> HexStringSliceIterExt for S
where S: AsRef<[u8]>
{
fn hex(&self) -> HexStringSliceIter<'_>
{
self.as_ref().iter().copied().into_hex()
}
}
impl<I: IntoIterator<Item=u8>> HexStringIterExt<I::IntoIter> for I
{
#[inline] fn into_hex(self) -> HexStringIter<I::IntoIter> {
HexStringIter(self.into_iter(), [0u8; 2])
}
}
impl<I: Iterator<Item = u8>> Iterator for HexStringIter<I>
{
type Item = char;
fn next(&mut self) -> Option<Self::Item>
{
match self.1 {
[_, 0] => {
use std::io::Write;
write!(&mut self.1[..], "{:02x}", self.0.next()?).unwrap();
Some(mem::replace(&mut self.1[0], 0) as char)
},
[0, _] => Some(mem::replace(&mut self.1[1], 0) as char),
_ => unreachable!(),
}
}
fn size_hint(&self) -> (usize, Option<usize>) {
let (l, h) = self.0.size_hint();
(l * 2, h.map(|x| x*2))
}
}
impl<I: Iterator<Item = u8> + ExactSizeIterator> ExactSizeIterator for HexStringIter<I>{}
impl<I: Iterator<Item = u8> + FusedIterator> FusedIterator for HexStringIter<I>{}
impl<I: Iterator<Item = u8>> From<HexStringIter<I>> for String
{
fn from(from: HexStringIter<I>) -> Self
{
from.into_string()
}
}
impl<I: Iterator<Item = u8> + Clone> fmt::Display for HexStringIter<I>
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result
{
self.clone().consume(f)
}
}
pub trait CollectArrayExt<T>: Sized
{
/// Collect an iterator into an array.
///
/// If the iterator has more elements than `N`, the rest are discarded.
///
/// # Panics
/// If the iterator has **less** elements than `N`.
fn collect_array<const N: usize>(self) -> [T; N];
}
impl<I> CollectArrayExt<I::Item> for I
where I: Iterator
{
fn collect_array<const N: usize>(self) -> [I::Item; N] {
use std::mem::MaybeUninit;
// SAFETY: This pattern is safe. The array elements are still maybeuninit.
let mut out = unsafe { MaybeUninit::<[MaybeUninit::<I::Item>; N]>::uninit().assume_init() };
let mut init_to = 0;
if N == 0 {
// SAFETY: This is valid, [I::Item; N] is 0 sized. (i uhh think...)
return unsafe { MaybeUninit::<[I::Item; N]>::uninit().assume_init() };
}
let res = std::panic::catch_unwind(std::panic::AssertUnwindSafe(|| {
#[cold]
#[inline(never)]
fn _panic_bad_size(exp: usize, got: usize) -> !
{
panic!("tried to collect into array of size {}, when iterator is only {} elements", exp, got)
}
init_to = out.iter_mut().zip(self)
.map(|(o, i)| *o = MaybeUninit::new(i)).count();
match init_to
{
n if n == N => (),
got => _panic_bad_size(N, got),
}
}));
match res {
Ok(()) => {
// SAFETY: Transmuting MaybeUninit<T> to T is fine.
// All elements are initialised by this point
unsafe {
#[inline(always)] unsafe fn assume_init_array<T, const N: usize>(array: [MaybeUninit<T>; N]) -> [T; N]
{
//std::intrinsics::assert_inhabited::<[T; N]>();
(&array as *const _ as *const [T; N]).read()
}
//MaybeUninit::array_assume_init(out)
assume_init_array(out)
}
},
Err(e) => {
// Drop all initialised elements before resuming unwind.
unsafe {
std::ptr::drop_in_place(&mut out[..init_to] as *mut [MaybeUninit<I::Item>] as *mut [I::Item]);
}
std::panic::resume_unwind(e)
},
}
}
}
#[macro_export] macro_rules! prog1 {
($first:expr, $($rest:expr);+ $(;)?) => {
($first, $( $rest ),+).0
}
}
#[macro_export] macro_rules! r#if {
($if:expr, $then:expr, $else:expr) => {
if $if { $then } else { $else }
}
}

@ -1,158 +0,0 @@
//! File-watching
//!
//! When serving a directory not in oneshot-mode, this can be used to update listings.
use super::*;
use std::{path::PathBuf, time::Duration};
use std::sync::Arc;
use std::ops::Deref;
use notify::{
Watcher,
RecursiveMode,
watcher,
//TODO: set up wrapper around the notify callback thread that puts events into a async tokio::mpsc (or broadcast?) sender.
};
use tokio::sync::{
broadcast,
mpsc,
};
//use tokio_uring // Don't do this here, have a seperate thread using this (if we end up using it, we probably should since we probably don't need multiple threads reading/writing files at once.)
pub trait Receiver<T>
{
type Error;
fn recv(&mut self) -> Result<T, Self::Error>;
}
pub trait Sender<T>
{
type Error;
fn send(&self, val: T) -> Result<(), Self::Error>;
}
pub trait Channel<T>: Sized
{
type Sender: Sender<T>;
type Receiver: Receiver<T>;
fn split(self) -> (Self::Sender, Self::Receiver);
}
impl<T, S,R> Channel<T> for (S, R)
where S: Sender<T>,
R: Receiver<T>
{
type Sender = S;
type Receiver = R;
#[inline(always)] fn split(self) -> (Self::Sender, Self::Receiver) {
self
}
}
impl<T> Sender<T> for broadcast::Sender<T>
{
type Error = broadcast::error::SendError<T>;
fn send(&self, val: T) -> Result<(), Self::Error> {
self.send(val)?;
Ok(())
}
}
impl<T> Receiver<T> for broadcast::Receiver<T>
where T: Clone
{
type Error = broadcast::error::TryRecvError;
fn recv(&mut self) -> Result<T, Self::Error> {
broadcast::Receiver::try_recv(self)
}
}
impl<T> Sender<T> for mpsc::Sender<T>
{
type Error = mpsc::error::TrySendError<T>;
fn send(&self, val: T) -> Result<(), Self::Error> {
self.try_send(val)
}
}
impl<T> Receiver<T> for mpsc::Receiver<T>
{
type Error = mpsc::error::TryRecvError;
fn recv(&mut self) -> Result<T, Self::Error> {
self.try_recv()
}
}
impl<T> Sender<T> for mpsc::UnboundedSender<T>
{
type Error = mpsc::error::SendError<T>;
fn send(&self, val: T) -> Result<(), Self::Error> {
self.send(val)
}
}
impl<T> Receiver<T> for mpsc::UnboundedReceiver<T>
{
type Error = mpsc::error::TryRecvError;
fn recv(&mut self) -> Result<T, Self::Error> {
self.try_recv()
}
}
#[derive(Debug, Clone, PartialEq)]
pub struct WatchEvent(Arc<notify::DebouncedEvent>);
impl WatchEvent
{
#[inline(always)] pub fn debounced(&self) -> &notify::DebouncedEvent
{
&self.0
}
}
impl AsRef<notify::DebouncedEvent> for WatchEvent
{
#[inline] fn as_ref(&self) -> &notify::DebouncedEvent
{
self.debounced()
}
}
impl Deref for WatchEvent
{
type Target = notify::DebouncedEvent;
#[inline] fn deref(&self) -> &Self::Target {
self.debounced()
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct Mode
{
pub recurse: RecursiveMode,
pub delay: Duration,
}
/// Start a new watcher thread.
///
/// # Returns
/// * A receiver that gets the events from the watcher
/// * A future that completes when the thread exits
pub fn watch<'a, C>(path: PathBuf, mode: Mode, chan: impl FnOnce() -> C + 'a) -> (C::Receiver, impl Future<Output = ()> + Send + Sync + 'static)
where C: Channel<WatchEvent>,
C::Sender: Send + 'static,
{
let (otx, orx) = chan().split();
let (stx, trx) = std::sync::mpsc::channel();
let mut watcher = watcher(stx, mode.delay).unwrap();
let passing = tokio::spawn(async move {
match trx.try_recv() {
Ok(ev) => otx.send(WatchEvent(Arc::new(ev))).map_err(|_| ()).unwrap(),
Err(_) => (),//tokio::time::sleep(mode.delay).await, FUCK, WHY can't we await here... ALL of this bullshit above with the traits is useless. just return the damn sync `Receiver`.
}
});
{
use futures::prelude::*;
(orx, passing.map(|_| ()))
}
}

@ -1,317 +0,0 @@
//! Key and IV structures for the cipher
use getrandom::getrandom;
use std::{fmt, str};
pub use crate::cha::{
KEY_SIZE,
IV_SIZE,
};
use crate::ext::*;
/// A trait for objects that contain a key and IV.
pub trait CC20Key
{
fn key(&self) -> &Key;
fn iv(&self) -> &IV;
}
impl<'a, T: ?Sized> CC20Key for &'a T
where T: CC20Key {
#[inline] fn key(&self) -> &Key
{
T::key(self)
}
#[inline] fn iv(&self) -> &IV
{
T::iv(self)
}
}
impl<T, U> CC20Key for (T, U)
where T: AsRef<Key>, U: AsRef<IV>
{
fn key(&self) -> &Key
{
self.0.as_ref()
}
fn iv(&self) -> &IV
{
self.1.as_ref()
}
}
/// A 32 byte key for the chacha20_poly1305 cipher
///
/// # Generation
/// You can generate a random key with `Key::new()`.
/// To create a key structure from bytes, you can use `Key::from_bytes()` if the size of the buffer is exact, or you can write to an empty `Key` as it implements `Default`.
/// ```
/// # use chacha20stream::{Key, key::KEY_SIZE};
/// # let key_bytes = [0u8; 32];
/// let mut key = Key::default();
/// key.as_mut().copy_from_slice(&key_bytes[..KEY_SIZE]);
/// ```
///
/// You can also generate a random key/IV pair with `chacha20stream::keygen()`.
///
/// # Encoding
/// This type implements `std::fmt::Display`, which prints the key as a base64 string.
/// Additionally, it implements `std::str::FromStr`, which decodes a base64 string into a `Key` instance.
/// If the input base64 string data decoded is shorter than `KEY_SIZE`, the rest of the key instance is padded with 0s.
/// If it is longer, the rest is ignored.
///
/// The key can also be lazily formatted as a hex string, with the method `to_hex_string()`.
/// ```
/// # use chacha20stream::Key;
/// let key = Key::new();
/// let key_encoded = key.to_string();
///
/// println!("Key base64: {}", key_encoded);
/// println!("Key hex: {}", key.to_hex_string());
///
/// assert_eq!(key_encoded.parse::<Key>().unwrap(), key);
/// ```
#[derive(Debug, Clone, PartialEq, Eq, Hash, Copy, Default, Serialize, Deserialize)]
#[repr(transparent)]
pub struct Key([u8; KEY_SIZE]);
/// A 12 byte IV for the chacha20_poly1305 cipher
///
/// # Generation
/// You can generate a random IV with `IV::new()`.
/// To create an IV structure from bytes, you can use `IV::from_bytes()` if the size of the buffer is exact, or you can write to an empty `IV` as it implements `Default`.
/// ```
/// # use chacha20stream::{IV, key::IV_SIZE};
/// # let iv_bytes = [0u8; 12];
/// let mut iv = IV::default();
/// iv.as_mut().copy_from_slice(&iv_bytes[..IV_SIZE]);
/// ```
///
/// You can also generate a random key/IV pair with `chacha20stream::keygen()`.
///
/// # Encoding
/// This type implements `std::fmt::Display`, which prints the IV as a base64 string.
/// Additionally, it implements `std::str::FromStr`, which decodes a base64 string into a `IV` instance.
/// If the input base64 string data decoded is shorter than `IV_SIZE`, the rest of the IV instance is padded with 0s.
/// If it is longer, the rest is ignored.
///
/// The IV can also be lazily formatted as a hex string, with the method `to_hex_string()`.
/// ```
/// # use chacha20stream::IV;
/// let iv = IV::new();
/// let iv_encoded = iv.to_string();
///
/// println!("IV base64: {}", iv_encoded);
/// println!("IV hex: {}", iv.to_hex_string());
///
/// assert_eq!(iv_encoded.parse::<IV>().unwrap(), iv);
/// ```
#[derive(Debug, Clone, PartialEq, Eq, Hash, Copy, Default, Serialize, Deserialize)]
#[repr(transparent)]
pub struct IV([u8; IV_SIZE]);
impl Key
{
/// Construct a `Key` from an exact length (32 bytes) buffer.
#[inline] pub fn from_bytes(k: [u8; KEY_SIZE]) -> Self
{
Self(k)
}
/// Create a new random 32 byte chacha20_poly1305 `Key`.
pub fn new() -> Self
{
let mut output = [0u8; KEY_SIZE];
getrandom(&mut output[..]).expect("rng fatal");
Self(output)
}
/// Format this key as a hex string
///
/// Returns an opaque type that lazily formats the key into a hex string when written.
///
/// # Example
/// ```
/// # use chacha20stream::Key;
/// fn print_key_info(key: &Key) {
/// println!("Key base64: {}", key);
/// println!("Key hex: {}", key.to_hex_string());
/// }
/// ```
/// Formatting to `String`
/// ```
/// # use chacha20stream::Key;
/// # let key = Key::new();
/// let key_hex_string = key.to_hex_string().to_string();
/// ```
pub fn to_hex_string(&self) -> impl fmt::Display + '_
{
self.0.iter().copied().into_hex()
}
}
impl IV
{
/// Construct a `IV` from an exact length (12 bytes) buffer.
#[inline] pub fn from_bytes(k: [u8; IV_SIZE]) -> Self
{
Self(k)
}
/// Create a new random 12 byte chacha20_poly1305 `IV`.
pub fn new() -> Self
{
let mut output = [0u8; IV_SIZE];
getrandom(&mut output[..]).expect("rng fatal");
Self(output)
}
/// Format this IV as a hex string
///
/// Returns an opaque type that lazily formats the IV into a hex string when written.
///
/// # Example
/// ```
/// # use chacha20stream::IV;
/// fn print_iv_info(iv: &IV) {
/// println!("IV base64: {}", iv);
/// println!("IV hex: {}", iv.to_hex_string());
/// }
/// ```
/// Formatting to `String`
/// ```
/// # use chacha20stream::IV;
/// # let iv = IV::new();
/// let iv_hex_string = iv.to_hex_string().to_string();
/// ```
pub fn to_hex_string(&self) -> impl fmt::Display + '_
{
self.0.iter().copied().into_hex()
}
}
impl From<[u8; KEY_SIZE]> for Key
{
#[inline] fn from(from: [u8; KEY_SIZE]) -> Self
{
Self(from)
}
}
impl From<[u8; IV_SIZE]> for IV
{
fn from(from: [u8; IV_SIZE]) -> Self
{
Self(from)
}
}
impl AsRef<[u8]> for Key
{
fn as_ref(&self) -> &[u8]
{
&self.0[..]
}
}
impl AsRef<[u8]> for IV
{
fn as_ref(&self) -> &[u8]
{
&self.0[..]
}
}
impl AsMut<[u8]> for Key
{
fn as_mut(&mut self) -> &mut [u8]
{
&mut self.0[..]
}
}
impl AsMut<[u8]> for IV
{
fn as_mut(&mut self) -> &mut [u8]
{
&mut self.0[..]
}
}
impl AsRef<Key> for Key
{
#[inline] fn as_ref(&self) -> &Key
{
self
}
}
impl AsRef<IV> for IV
{
#[inline] fn as_ref(&self) -> &IV
{
self
}
}
impl fmt::Display for Key
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result
{
write!(f, "{}", base64::encode(&self.0[..]))
}
}
impl fmt::Display for IV
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result
{
write!(f, "{}", base64::encode(&self.0[..]))
}
}
impl str::FromStr for Key
{
type Err = base64::DecodeError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let mut buffer = Vec::with_capacity(KEY_SIZE);
base64::decode_config_buf(s.as_bytes(), base64::STANDARD, &mut buffer)?;
let mut this = Self::default();
let sz = std::cmp::min(KEY_SIZE, buffer.len());
(&mut this.0[..sz]).copy_from_slice(&buffer[..sz]);
Ok(this)
}
}
impl str::FromStr for IV
{
type Err = base64::DecodeError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let mut buffer = Vec::with_capacity(IV_SIZE);
base64::decode_config_buf(s.as_bytes(), base64::STANDARD, &mut buffer)?;
let mut this = Self::default();
let sz = std::cmp::min(IV_SIZE, buffer.len());
(&mut this.0[..sz]).copy_from_slice(&buffer[..sz]);
Ok(this)
}
}
#[cfg(test)]
mod tests
{
use super::{Key, IV};
#[test]
fn enc_dec()
{
let (key, iv) = crate::cha::keygen();
let key_str = key.to_string();
let iv_str = iv.to_string();
let (key2, iv2): (Key, IV) = (key_str.parse().expect("key"),
iv_str.parse().expect("iv"));
assert_eq!(key, key2);
assert_eq!(iv, iv2);
}
}

@ -19,16 +19,8 @@ use color_eyre::{
};
use futures::Future;
mod fw;
mod ext;
mod key;
mod cha;
mod enc;
mod sock;
mod config;
mod args;
mod send;
fn setup() -> eyre::Result<()>
{
@ -40,9 +32,9 @@ fn setup() -> eyre::Result<()>
#[tokio::main]
async fn main() -> eyre::Result<()> {
setup().wrap_err(eyre!("Failed to initialise logger"))?;
setup()?;
args::Usage.print_and_exit(0);
Ok(())
}

@ -1,23 +0,0 @@
//! Module handles setting up streams for files
use super::*;
use std::marker::Unpin;
use tokio::io::{
AsyncRead,
};
/// A read request from the client.
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct Request
{
}
/*
pub async fn read_req<T>(mut from: T, key: ) -> eyre::Result<Request>
where T: AsyncRead + Unpin
{
todo!("how do we handle encryption of the request data? eh... boring")
}
*/

@ -1,5 +0,0 @@
//! Sending
use super::*;
pub mod handshake;

@ -0,0 +1,64 @@
//! Socket handling
use super::*;
use std::str;
use std::path::{
Path, PathBuf
};
#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct SocketAddrUnix
{
pub path: PathBuf,
}
impl str::FromStr for SocketAddrUnix
{
type Err = AddrParseError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let path = Path::new(s);
if path.exists() && !path.is_dir() {
Ok(Self{path: path.into()})
} else {
Err(AddrParseError)
}
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub enum SocketAddr
{
Unix(SocketAddrUnix),
IP(std::net::SocketAddr),
}
impl From<std::net::SocketAddr> for SocketAddr
{
fn from(from: std::net::SocketAddr) -> Self
{
Self::IP(from)
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct AddrParseError;
impl From<std::net::AddrParseError> for AddrParseError
{
fn from(_: std::net::AddrParseError) -> Self
{
Self
}
}
const UNIX_SOCK_PREFIX: &str = "unix:/";
impl str::FromStr for SocketAddr
{
type Err = AddrParseError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
Ok(if s.starts_with(UNIX_SOCK_PREFIX) {
Self::Unix(s[(UNIX_SOCK_PREFIX.len())..].parse()?)
} else {
Self::IP(s.parse()?)
})
}
}
Loading…
Cancel
Save