You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
742 lines
19 KiB
742 lines
19 KiB
//! Data serialisation
|
|
use super::*;
|
|
use bytes::BufMut;
|
|
|
|
#[derive(Debug, Clone, PartialEq, Eq, Hash, Copy, PartialOrd, Ord)]
|
|
pub enum CompressionKind
|
|
{
|
|
Brotli,
|
|
|
|
//TODO: Add cases (and async_compression features) for these three
|
|
Xz,
|
|
GZip,
|
|
Bz2,
|
|
}
|
|
|
|
impl Default for CompressionKind
|
|
{
|
|
#[inline]
|
|
fn default() -> Self
|
|
{
|
|
//TODO: Should Brotli be default? Check sizes of compressed binary encoded stuffs and compare modes.
|
|
Self::Brotli
|
|
}
|
|
}
|
|
|
|
#[derive(Debug, Clone, PartialEq, Eq, Hash, Copy)]
|
|
pub enum EncryptionKind
|
|
{
|
|
Chacha20((key::Key, key::IV))
|
|
}
|
|
|
|
impl Default for EncryptionKind
|
|
{
|
|
#[inline]
|
|
fn default() -> Self
|
|
{
|
|
Self::Chacha20(cha::keygen())
|
|
}
|
|
}
|
|
|
|
#[derive(Debug, Clone, PartialEq, Eq, Hash, Copy)]
|
|
pub enum SerialFormat
|
|
{
|
|
/// CBOR
|
|
Binary,
|
|
/// JSON
|
|
Text,
|
|
}
|
|
|
|
impl Default for SerialFormat
|
|
{
|
|
#[inline]
|
|
fn default() -> Self
|
|
{
|
|
Self::Binary
|
|
}
|
|
}
|
|
|
|
#[derive(Debug, Clone, PartialEq, Eq, Hash, Copy)]
|
|
pub struct SendOpt
|
|
{
|
|
comp: Option<CompressionKind>,
|
|
encrypt: Option<EncryptionKind>,
|
|
format: SerialFormat,
|
|
hash: bool,
|
|
//pub sign: Option<???>, //TODO: RSA private + public key types
|
|
}
|
|
ref_self!(SendOpt);
|
|
|
|
impl Default for SendOpt
|
|
{
|
|
#[inline]
|
|
fn default() -> Self
|
|
{
|
|
Self::new()
|
|
}
|
|
}
|
|
|
|
|
|
impl SendOpt
|
|
{
|
|
pub const NORMAL: Self = Self::new();
|
|
pub const CHECKED: Self = Self::new_checked();
|
|
pub const COMPRESSED: Self = Self::new_compressed();
|
|
|
|
/// Add compression
|
|
pub const fn compress(self, k: CompressionKind) -> Self
|
|
{
|
|
Self {
|
|
comp: Some(k),
|
|
..self
|
|
}
|
|
}
|
|
|
|
/// Change the output format
|
|
///
|
|
/// Default: **Binary**
|
|
///
|
|
/// # Text format note
|
|
/// When using compression and/or encryption, the text format will end up unreadable anyway.
|
|
/// Likewise when using signing or hashing, a binary header is prepended to the message regardless of format.
|
|
///
|
|
/// 2 ASCII whitespace characters are prepended to the message regardless of any other options (`\t`, ` |\n`). These are used to determine if the message is valid and if a header needs to be read from it.
|
|
/// Most external text-format parsing software should ignore these and be able to parse a non-headered message.
|
|
pub const fn format(self, format: SerialFormat) -> Self
|
|
{
|
|
Self {
|
|
format,
|
|
..self
|
|
}
|
|
}
|
|
|
|
/// Enable or disable hashing
|
|
///
|
|
/// Default: *Disabled*
|
|
pub const fn hash(self, hash: bool) -> Self
|
|
{
|
|
Self {
|
|
hash,
|
|
..self
|
|
}
|
|
}
|
|
|
|
/// Add encryption with constant parameters
|
|
pub const fn encrypt(self, k: EncryptionKind) -> Self
|
|
{
|
|
Self {
|
|
encrypt: Some(k),
|
|
..self
|
|
}
|
|
}
|
|
|
|
/// Add default encryption with a randomly generated key and IV.
|
|
pub fn encrypt_cc20_gen(self) -> Self
|
|
{
|
|
self.encrypt(EncryptionKind::Chacha20(cha::keygen()))
|
|
}
|
|
|
|
/// Normal options.
|
|
///
|
|
/// Does not enable any features.
|
|
pub const fn new() -> Self
|
|
{
|
|
Self {
|
|
comp: None,
|
|
encrypt: None,
|
|
format: SerialFormat::Binary,
|
|
hash: false,
|
|
}
|
|
}
|
|
/// Normal options with data compression.
|
|
///
|
|
/// Uses Brotli compression by default.
|
|
pub const fn new_compressed() -> Self
|
|
{
|
|
Self {
|
|
comp: Some(CompressionKind::Brotli),
|
|
..Self::new()
|
|
}
|
|
}
|
|
/// Normal options with added integrity checks.
|
|
///
|
|
/// Increases final size of object but provided data integrity and source validation.
|
|
//TODO: Give sig param
|
|
pub const fn new_checked() -> Self
|
|
{
|
|
Self {
|
|
hash: true,
|
|
//sig: ???
|
|
..Self::new()
|
|
}
|
|
}
|
|
/// Should a header be generated for this data?
|
|
#[inline(always)] fn needs_header(&self) -> bool
|
|
{
|
|
self.hash || /*self.sig*/ false
|
|
}
|
|
|
|
#[inline] fn creates_header(&self) -> bool
|
|
{
|
|
self.needs_header()
|
|
}
|
|
|
|
/// Does the binary data of this format require special handling?
|
|
///
|
|
/// True if encryption and/or compression are specified.
|
|
#[inline(always)] fn is_spec(&self) -> bool
|
|
{
|
|
self.comp.is_some() || self.encrypt.is_some()
|
|
}
|
|
}
|
|
|
|
pub type RecvOpt = SendOpt;
|
|
|
|
/// Default buffer size for encryption transform stream copying.
|
|
pub const DEFAULT_BUFSIZE: usize = 4096;
|
|
|
|
pub(super) async fn cha_copy<F, T, const BUFSIZE: usize, const DECRYPT: bool>(from: &mut F, to: &mut T, key: &key::Key, iv: &key::IV) -> io::Result<(usize, usize)>
|
|
where F: AsyncRead + Unpin + ?Sized,
|
|
T: AsyncWrite + Unpin + ?Sized
|
|
{
|
|
let mut written=0;
|
|
let mut read=0;
|
|
let mut r;
|
|
let mut buffer = [0u8; BUFSIZE];
|
|
let mut cbuffer = [0u8; BUFSIZE];
|
|
|
|
let mut crypter = if DECRYPT {
|
|
cha::decrypter(key, iv)
|
|
} else {
|
|
cha::encrypter(key, iv)
|
|
}?;
|
|
|
|
while { r = from.read(&mut buffer[..]).await?; r > 0 } {
|
|
read += r;
|
|
r = crypter.update(&buffer[..r], &mut cbuffer[..])?;
|
|
to.write(&cbuffer[..r]).await?;
|
|
written += r;
|
|
}
|
|
|
|
Ok((written, read))
|
|
}
|
|
|
|
const H_SALT_SIZE: usize = 32;
|
|
#[derive(Debug, Clone, PartialEq, Eq, Hash, Default)]
|
|
struct FormatHeader
|
|
{
|
|
hash: Option<(sha256::Sha256Hash, [u8; H_SALT_SIZE])>,
|
|
sig: Option<rsa::Signature>,
|
|
}
|
|
|
|
#[derive(Debug)]
|
|
pub enum HeaderValidationError
|
|
{
|
|
Malformed,
|
|
Hash,
|
|
Signature,
|
|
}
|
|
|
|
impl error::Error for HeaderValidationError{}
|
|
impl fmt::Display for HeaderValidationError
|
|
{
|
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result
|
|
{
|
|
match self {
|
|
Self::Malformed => write!(f, "header was malformed"),
|
|
Self::Hash => write!(f, "invalid hash"),
|
|
Self::Signature => write!(f, "signature could not be verified"),
|
|
}
|
|
}
|
|
}
|
|
|
|
|
|
impl FormatHeader
|
|
{
|
|
pub const SIZE: usize = sha256::SIZE + H_SALT_SIZE + cryptohelpers::consts::RSA_SIG_SIZE + 2;
|
|
const fn empty_array() -> [u8; Self::SIZE]
|
|
{
|
|
[0u8; Self::SIZE]
|
|
}
|
|
fn gen_salt() -> [u8; H_SALT_SIZE]
|
|
{
|
|
let mut out = [0u8; H_SALT_SIZE];
|
|
getrandom::getrandom(&mut out[..]).expect("rng fatal");
|
|
out
|
|
}
|
|
fn generate(data: impl AsRef<[u8]>, opt: &SendOpt) -> Self
|
|
{
|
|
let hash = if opt.hash {
|
|
let salt = Self::gen_salt();
|
|
Some((sha256::compute_slices(iter![data.as_ref(), &salt[..]]), salt))
|
|
} else {
|
|
None
|
|
};
|
|
let sig = if false /*let Some(sign_with) = opt.sign*/ {
|
|
unimplemented!()
|
|
} else {
|
|
None
|
|
};
|
|
Self {
|
|
hash,
|
|
sig //TODO
|
|
}
|
|
}
|
|
fn validate(&self, data: impl AsRef<[u8]>, opt: &RecvOpt) -> Result<(), HeaderValidationError>
|
|
{
|
|
if opt.hash {
|
|
if !self.hash.as_ref().map(|(hash, salt)| &sha256::compute_slices(iter![data.as_ref(), &salt[..]]) == hash).unwrap_or(true) {
|
|
return Err(HeaderValidationError::Hash);
|
|
}
|
|
}
|
|
if /*opt.sig*/ false {
|
|
unimplemented!();
|
|
//if let Some(verify_with) = opt.sig //XXX: How will this work? We will need to store **either** a private or public key in Send/RecvOpt and dynamically dispatch over it.
|
|
}
|
|
Ok(())
|
|
}
|
|
fn to_buffer(&self, mut to: impl BufMut)
|
|
{
|
|
if let Some(hash) = &self.hash
|
|
{
|
|
to.put_u8(1);
|
|
to.put_slice(hash.0.as_ref());
|
|
to.put_slice(hash.1.as_ref());
|
|
} else {
|
|
to.put_u8(0);
|
|
to.put_bytes(0, sha256::SIZE + H_SALT_SIZE);
|
|
}
|
|
if let Some(sig) = &self.sig
|
|
{
|
|
to.put_u8(1);
|
|
to.put_slice(sig.as_ref());
|
|
} else {
|
|
to.put_u8(0);
|
|
to.put_bytes(0, cryptohelpers::consts::RSA_SIG_SIZE);
|
|
}
|
|
}
|
|
fn from_buffer(mut from: impl Buf) -> Self
|
|
{
|
|
let hash = if from.get_u8() == 1 {
|
|
let mut hash = sha256::Sha256Hash::default();
|
|
let mut salt = [0u8; H_SALT_SIZE];
|
|
from.copy_to_slice(hash.as_mut());
|
|
from.copy_to_slice(&mut salt[..]);
|
|
Some((hash,salt))
|
|
} else {
|
|
from.advance(sha256::SIZE + H_SALT_SIZE);
|
|
None
|
|
};
|
|
let sig = if from.get_u8() == 1 {
|
|
let mut sig = rsa::Signature::default();
|
|
from.copy_to_slice(sig.as_mut());
|
|
Some(sig)
|
|
} else {
|
|
from.advance(sha256::SIZE);
|
|
None
|
|
};
|
|
Self {
|
|
hash, sig
|
|
}
|
|
}
|
|
#[inline] fn to_array(&self) -> [u8; Self::SIZE]
|
|
{
|
|
let mut ar = [0u8; Self::SIZE];
|
|
self.to_buffer(&mut &mut ar[..]);
|
|
ar
|
|
}
|
|
#[inline] fn from_array(ar: [u8; Self::SIZE]) -> Self
|
|
{
|
|
Self::from_buffer(&ar[..])
|
|
}
|
|
}
|
|
|
|
const INFO_ASSERT_VALID: u8 = b'\t';
|
|
const INFO_WITH_HEADER: u8 = b' ';
|
|
const INFO_NO_HEADER: u8 = b'\n';
|
|
|
|
/// If passing an externally generated message to be deserialised here, it must be prefixed with this regardless of its format.
|
|
///
|
|
/// Operations that generate/require a message header will not work on these messages and if they are needed must be handled elsewhere by the user. (Hash and signature validation)
|
|
pub const BARE_MESSAGE_PREFIX: [u8; 2] = [INFO_ASSERT_VALID, INFO_NO_HEADER];
|
|
|
|
pub(super) async fn de_singleton_inner<T: DeserializeOwned, B, F>(buf: F, from: &[u8], how: &RecvOpt) -> Result<T, TransformErrorKind>
|
|
where B: AsRef<[u8]> + AsyncWrite + Unpin + Default,
|
|
F: FnOnce(&[u8]) -> B
|
|
{
|
|
|
|
// Read header
|
|
let mut header = FormatHeader::empty_array();
|
|
if from.len() < 2 || from[0] != INFO_ASSERT_VALID {
|
|
return Err(TransformErrorKind::InvalidHeader(HeaderValidationError::Malformed));
|
|
}
|
|
let (inf, mut from) = {
|
|
(&from[..2], &from[2..])
|
|
};
|
|
from = {
|
|
if inf[1] == INFO_WITH_HEADER {
|
|
if from.len() < FormatHeader::SIZE {
|
|
return Err(TransformErrorKind::InvalidHeader(HeaderValidationError::Malformed));
|
|
}
|
|
let hf = &from[..FormatHeader::SIZE];
|
|
header.copy_from_slice(hf);
|
|
&from[FormatHeader::SIZE..]
|
|
} else {
|
|
&from[..]
|
|
}
|
|
};
|
|
// Decompressor
|
|
// The output is written to this (through writer)
|
|
let mut is_spec = false; // This is set later. The value will sometimes differ from `how.is_spec()` depending on combinations of options.
|
|
// The `spec` output buffer. Used if there are transformations that need to be done to the data before deserialisation
|
|
let mut buf = if how.is_spec() {
|
|
buf(&from)
|
|
} else {
|
|
Default::default()
|
|
};
|
|
//let mut buf = Vec::with_capacity(from.len());
|
|
from = {
|
|
let mut b;
|
|
let writer: &mut (dyn AsyncWrite + Unpin) =
|
|
if let Some(comp) = &how.comp {
|
|
is_spec = true;
|
|
match comp {
|
|
CompressionKind::Brotli => {
|
|
b = async_compression::tokio::write::BrotliDecoder::new(&mut buf);
|
|
&mut b
|
|
},
|
|
_ => unimplemented!(),
|
|
}
|
|
} else {
|
|
&mut buf
|
|
};
|
|
// Decrypt into `writer`.
|
|
|
|
if let Some(dec) = &how.encrypt {
|
|
// There is decryption to be done, decrypt into `writer` (which will handle decompression if needed).
|
|
// Return its output buffer
|
|
match dec {
|
|
EncryptionKind::Chacha20((k, iv)) => {
|
|
self::cha_copy::<_, _, DEFAULT_BUFSIZE, true>(&mut &from[..], writer, k, iv).await?;
|
|
},
|
|
}
|
|
// Required for decompression to complete
|
|
writer.flush().await?;
|
|
writer.shutdown().await?;
|
|
|
|
&buf.as_ref()[..]
|
|
} else if is_spec {
|
|
// There is decompression to be done through `writer`. Return its output buffer
|
|
writer.write_all(from).await?;
|
|
|
|
// Required for decompression to complete
|
|
writer.flush().await?;
|
|
writer.shutdown().await?;
|
|
|
|
&buf.as_ref()[..]
|
|
} else {
|
|
// There is neither decompression nor decryption to be done, return the input reference itself
|
|
from
|
|
}
|
|
};
|
|
// Deserialise
|
|
|
|
FormatHeader::from_array(header).validate(from, how)?;
|
|
|
|
let v = match how.format {
|
|
SerialFormat::Text => serde_json::from_slice(&from[..])?,
|
|
SerialFormat::Binary => serde_cbor::from_slice(&from[..])?,
|
|
};
|
|
|
|
Ok(v)
|
|
}
|
|
|
|
pub(super) async fn ser_singleton_inner<T: Serialize, V: AsyncWrite + Unpin, F>(to: F, value: &T, how: impl AsRef<SendOpt>) -> Result<(V, usize), TransformErrorKind>
|
|
where F: FnOnce(&Vec<u8>) -> V,
|
|
{
|
|
let how = how.as_ref();
|
|
let ser = match how.format {
|
|
SerialFormat::Text => serde_json::to_vec(value)?,
|
|
SerialFormat::Binary => serde_cbor::to_vec(value)?,
|
|
};
|
|
let header = if how.needs_header() {
|
|
let header = FormatHeader::generate(&ser, how);
|
|
header.to_array()
|
|
} else {
|
|
FormatHeader::empty_array()
|
|
};
|
|
let mut a;
|
|
let mut b;
|
|
let reader: &mut (dyn AsyncRead + Unpin) =
|
|
if let Some(comp) = &how.comp {
|
|
match comp {
|
|
CompressionKind::Brotli => {
|
|
a = async_compression::tokio::bufread::BrotliEncoder::new(tokio::io::BufReader::new(&ser[..]));
|
|
&mut a
|
|
},
|
|
_ => unimplemented!("Xz and GZip currently unimplemented."),
|
|
}
|
|
} else {
|
|
b = &ser[..];
|
|
&mut b
|
|
};
|
|
let mut ser = to(&ser);
|
|
if how.needs_header() {
|
|
ser.write_all(&[INFO_ASSERT_VALID, INFO_WITH_HEADER]).await?;
|
|
ser.write_all(&header[..]).await?;
|
|
} else {
|
|
ser.write_all(&[INFO_ASSERT_VALID, INFO_NO_HEADER]).await?;
|
|
}
|
|
let w= if let Some(enc) = &how.encrypt {
|
|
let n = match enc {
|
|
EncryptionKind::Chacha20((k, iv)) => {
|
|
self::cha_copy::<_, _, DEFAULT_BUFSIZE, false>(reader, &mut ser, k, iv).await?.0
|
|
},
|
|
};
|
|
// Required for compression to complete
|
|
ser.flush().await?;
|
|
ser.shutdown().await?;
|
|
n
|
|
} else {
|
|
tokio::io::copy(reader, &mut ser).await? as usize
|
|
};
|
|
Ok((ser, w))
|
|
// inner(value, how).map(|res| res.map_err(|k| SendError(Box::new((k, how.clone())))))
|
|
}
|
|
|
|
#[inline(always)] pub fn de_singleton<'a, T: DeserializeOwned + 'a, B: ?Sized + AsRef<[u8]> + 'a>(from: &'a B, how: &'a RecvOpt) -> impl Future<Output = Result<T, RecvError>> + 'a
|
|
{
|
|
use futures::prelude::*;
|
|
de_singleton_inner(|from| Vec::with_capacity(from.as_ref().len()), from.as_ref(), how)
|
|
.map_err(|k| RecvError(Box::new((k, how.clone()))))
|
|
}
|
|
|
|
#[inline(always)] pub fn ser_singleton<'a, T: Serialize>(value: &'a T, how: &'a SendOpt) -> impl Future<Output = Result<Vec<u8>, SendError>> + 'a
|
|
{
|
|
use futures::prelude::*;
|
|
// hack to avoid having to enable `try{}` feature :/
|
|
ser_singleton_inner(|c| Vec::with_capacity(c.len()), value, how)
|
|
.map_ok(|(v, _)| v)
|
|
.map_err(|k| SendError(Box::new((k, how.clone()))))
|
|
}
|
|
|
|
/// Deserialise a single object from a stream with the method described by `how`.
|
|
///
|
|
/// # Returns
|
|
/// The deserialised value and the number of bytes read from the stream.
|
|
pub async fn read_singleton<T: DeserializeOwned, S: ?Sized + AsyncRead + Unpin>(from: &mut S, how: &RecvOpt) -> Result<(T, usize), RecvError>
|
|
{
|
|
let (r, v) = async move {
|
|
let mut ibuf = [0u8; std::mem::size_of::<u64>()];
|
|
from.read_exact(&mut ibuf[..]).await?;
|
|
let n = u64::from_be_bytes(ibuf);
|
|
let mut v = Vec::with_capacity(n as usize);
|
|
tokio::io::copy(&mut from.take(n), &mut v).await
|
|
.map(move |_| (v.len() + ibuf.len(), v))
|
|
}.await
|
|
.map_err(|err| RecvError(Box::new((err.into(), how.to_owned()))))?;
|
|
let v = de_singleton(&v[..], how).await?;
|
|
Ok((v, r))
|
|
}
|
|
|
|
/// Serialise a single object to a stream with the method described by `how`.
|
|
#[inline] pub async fn write_singleton<T: Serialize, S: ?Sized + AsyncWrite + Unpin>(to: &mut S, value: &T, how: &SendOpt) -> Result<usize, SendError>
|
|
{
|
|
let (cont, v) = ser_singleton_inner(|n| Vec::with_capacity(n.len()), value, &how).await
|
|
.map_err(|k| SendError(Box::new((k, how.to_owned()))))?;
|
|
|
|
|
|
let n = async move {
|
|
to.write_all(&(v as u64).to_be_bytes()[..]).await?;
|
|
to.write_all(&cont).await
|
|
.map(|_| std::mem::size_of::<u64>() + cont.len())
|
|
}
|
|
.await
|
|
.map_err(|k| SendError(Box::new((k.into(), how.to_owned()))))?;
|
|
|
|
|
|
Ok(n)
|
|
}
|
|
|
|
/// Kind of error for a send (serialise) or receive (deserialise) operation
|
|
#[derive(Debug)]
|
|
pub enum TransformErrorKind
|
|
{
|
|
/// Invalid serialised format
|
|
Format,
|
|
/// Compression
|
|
Compress,
|
|
/// Encryption
|
|
Encrypt,
|
|
/// Misc. IO
|
|
//TODO: Disambiguate when this happens into the two above cases.
|
|
IO(io::Error),
|
|
/// The object header was invalid.
|
|
InvalidHeader(HeaderValidationError),
|
|
}
|
|
|
|
/// An error when sending / serialising an object.
|
|
#[derive(Debug)]
|
|
pub struct RecvError(Box<(TransformErrorKind, RecvOpt)>);
|
|
|
|
impl RecvError
|
|
{
|
|
#[inline] pub fn kind(&self) -> &TransformErrorKind
|
|
{
|
|
&self.0.0
|
|
}
|
|
}
|
|
impl SendError
|
|
{
|
|
#[inline] pub fn kind(&self) -> &TransformErrorKind
|
|
{
|
|
&self.0.0
|
|
}
|
|
}
|
|
|
|
impl error::Error for RecvError
|
|
{
|
|
fn source(&self) -> Option<&(dyn error::Error + 'static)> {
|
|
Some(match &self.0.0
|
|
{
|
|
TransformErrorKind::IO(io) => io,
|
|
TransformErrorKind::InvalidHeader(ih) => ih,
|
|
_ => return None,
|
|
})
|
|
}
|
|
}
|
|
impl fmt::Display for RecvError
|
|
{
|
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result
|
|
{
|
|
write!(f, "error when deserialising object with params {:?}: ", self.0.1)?;
|
|
match self.0.0 {
|
|
TransformErrorKind::Format => write!(f, "failed to deserialise object to data"),
|
|
TransformErrorKind::Compress => write!(f, "failed to decompress data"),
|
|
TransformErrorKind::Encrypt => write!(f, "failed to decrypt data"),
|
|
TransformErrorKind::IO(_) => write!(f, "i/o failure"),
|
|
TransformErrorKind::InvalidHeader(_) => write!(f, "invalid header"),
|
|
}
|
|
}
|
|
}
|
|
|
|
|
|
/// An error when sending / serialising an object.
|
|
#[derive(Debug)]
|
|
pub struct SendError(Box<(TransformErrorKind, SendOpt)>);
|
|
|
|
impl error::Error for SendError
|
|
{
|
|
fn source(&self) -> Option<&(dyn error::Error + 'static)> {
|
|
Some(match &self.0.0
|
|
{
|
|
TransformErrorKind::IO(io) => io,
|
|
TransformErrorKind::InvalidHeader(ih) => ih,
|
|
_ => return None,
|
|
})
|
|
}
|
|
}
|
|
|
|
impl fmt::Display for SendError
|
|
{
|
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result
|
|
{
|
|
write!(f, "error when serialising object with params {:?}: ", self.0.1)?;
|
|
match self.0.0 {
|
|
TransformErrorKind::Format => write!(f, "failed to serialise object to data"),
|
|
TransformErrorKind::Compress => write!(f, "failed to compress data"),
|
|
TransformErrorKind::Encrypt => write!(f, "failed to encrypt data"),
|
|
TransformErrorKind::IO(_) => write!(f, "i/o failure"),
|
|
TransformErrorKind::InvalidHeader(_) => write!(f, "invalid header"),
|
|
}
|
|
}
|
|
}
|
|
|
|
impl From<io::Error> for TransformErrorKind
|
|
{
|
|
fn from(from: io::Error) -> Self
|
|
{
|
|
Self::IO(from)
|
|
}
|
|
}
|
|
|
|
impl From<HeaderValidationError> for TransformErrorKind
|
|
{
|
|
fn from(from: HeaderValidationError) -> Self
|
|
{
|
|
Self::InvalidHeader(from)
|
|
}
|
|
}
|
|
|
|
|
|
|
|
impl From<serde_cbor::Error> for TransformErrorKind
|
|
{
|
|
#[inline] fn from(_: serde_cbor::Error) -> Self
|
|
{
|
|
Self::Format
|
|
}
|
|
}
|
|
|
|
impl From<serde_json::Error> for TransformErrorKind
|
|
{
|
|
#[inline] fn from(_: serde_json::Error) -> Self
|
|
{
|
|
Self::Format
|
|
}
|
|
}
|
|
|
|
#[cfg(test)]
|
|
mod test
|
|
{
|
|
use super::*;
|
|
async fn ser_de_with(how: SendOpt) -> eyre::Result<()>
|
|
{
|
|
use ext::*;
|
|
|
|
let obj = String::from("Hello world");
|
|
|
|
let var = ser_singleton(&obj, &how).await?;
|
|
eprintln!("Ser ({} bytes): {}", var.len(), var.hex());
|
|
let des: String = de_singleton(&var, &how).await?;
|
|
eprintln!("De: {:?}", des);
|
|
assert_eq!(obj, des);
|
|
Ok(())
|
|
}
|
|
|
|
#[tokio::test]
|
|
async fn ser_de() -> eyre::Result<()>
|
|
{
|
|
ser_de_with(Default::default()).await
|
|
}
|
|
|
|
#[tokio::test]
|
|
async fn ser_de_comp() -> eyre::Result<()>
|
|
{
|
|
ser_de_with(SendOpt {
|
|
comp: Some(CompressionKind::Brotli),
|
|
..Default::default()
|
|
}).await
|
|
}
|
|
|
|
#[tokio::test]
|
|
async fn ser_de_enc() -> eyre::Result<()>
|
|
{
|
|
ser_de_with(SendOpt {
|
|
encrypt: Some(EncryptionKind::Chacha20(cha::keygen())),
|
|
//hash: true,
|
|
..Default::default()
|
|
}).await
|
|
}
|
|
|
|
#[tokio::test]
|
|
async fn ser_de_comp_enc() -> eyre::Result<()>
|
|
{
|
|
ser_de_with(SendOpt {
|
|
encrypt: Some(EncryptionKind::Chacha20(cha::keygen())),
|
|
comp: Some(CompressionKind::Brotli),
|
|
..Default::default()
|
|
}).await
|
|
}
|
|
}
|