shrupl/src/cli.rs

170 lines
4.4 KiB
Rust

use std::{convert::Infallible, fmt, io, time::Duration};
use base64ct::{Base64UrlUnpadded, Encoding};
use blake2b_simd::Params as Blake2b;
use clap::{
Parser,
builder::{PossibleValuesParser, TypedValueParser},
value_parser,
};
use log::LevelFilter;
use crate::{
file::{Checked, FileTrait},
sharry::{AliasID, NewShareRequest, Uri},
};
#[derive(Parser)]
#[command(version, about, long_about = None)]
pub struct Cli {
/// Timeout in seconds for HTTP actions (set 0 or invalid to disable)
#[arg(
short, long,
default_value = "10", value_name = "SECS",
value_parser = parse_seconds,
)]
timeout: Duration,
/// Protocol for Sharry instance
#[arg(
short, long,
default_value = "https", value_name = "VARIANT",
value_parser = PossibleValuesParser::new(["http", "https"]),
)]
protocol: String,
/// Number of times actions are retried
#[arg(short, long, default_value_t = 5, value_name = "N")]
retry_limit: u32,
/// Name of the new share
#[arg(short, long, default_value = "ShrUpl Upload", value_name = "TEXT")]
share_name: String,
/// Description of the new share
#[arg(short, long, value_name = "TEXT")]
description: Option<String>,
/// Maximum number of views for the new share
#[arg(short, long, default_value_t = 100, value_name = "N")]
max_views: u32,
/// Chunk size for uploading, in MiB
#[arg(
short, long,
default_value_t = 4, value_name = "M",
value_parser = value_parser!(u32).range(1..).map(|s| s as usize),
)]
pub chunk_size: usize,
/// Don't hash files before uploading
#[arg(short, long)]
no_hash: bool,
/// Increase output verbosity
#[arg(short, long, action = clap::ArgAction::Count)]
verbose: u8,
/// Base URL for Sharry Instance
url: String,
/// ID of a public alias to use
pub alias: AliasID,
/// Files to upload to the new share
#[arg(value_name = "FILE", required = true, value_parser = parse_sharry_file)]
pub files: Vec<Checked>,
}
impl fmt::Debug for Cli {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("Cli")
.field("uri", &self.get_uri())
.field("retry_limit", &self.retry_limit)
.field("alias", &self.alias)
.field("timeout", &self.get_timeout())
.field("chunk_size", &self.chunk_size)
.field("share_request", &self.get_share_request())
.field("files", &self.files)
.field("level_filter", &self.get_level_filter())
.field("hash", &self.get_hash())
.finish_non_exhaustive()
}
}
fn parse_seconds(data: &str) -> Result<Duration, Infallible> {
data.parse().or(Ok(0)).map(Duration::from_secs)
}
fn parse_sharry_file(data: &str) -> io::Result<Checked> {
Checked::new(data)
}
fn sorted<T>(values: &[T]) -> Vec<&T>
where
T: Ord,
{
let mut refs: Vec<_> = values.iter().collect();
refs.sort_unstable();
refs
}
impl Cli {
#[must_use]
pub fn get_timeout(&self) -> Option<Duration> {
(!self.timeout.is_zero()).then_some(self.timeout)
}
#[must_use]
pub fn get_uri(&self) -> Uri {
Uri::new(&self.protocol, &self.url)
}
#[must_use]
pub fn may_retry(&self, tries: u32) -> bool {
match self.retry_limit {
0 => true,
limit => tries < limit,
}
}
#[must_use]
pub fn should_hash(&self) -> bool {
!self.no_hash
}
#[must_use]
pub fn get_share_request(&self) -> NewShareRequest {
NewShareRequest::new(&self.share_name, self.description.as_ref(), self.max_views)
}
#[must_use]
pub fn get_level_filter(&self) -> LevelFilter {
match self.verbose {
0 => LevelFilter::Error,
1 => LevelFilter::Warn,
2 => LevelFilter::Info,
3 => LevelFilter::Debug,
_ => LevelFilter::Trace,
}
}
pub fn file_names(&self) -> Vec<&str> {
self.files.iter().map(FileTrait::get_name).collect()
}
#[must_use]
pub fn get_hash(&self) -> String {
let mut hasher = Blake2b::new().hash_length(16).to_state();
hasher.update(self.get_uri().as_ref());
hasher.update(self.alias.as_ref());
for chk in sorted(&self.files) {
hasher.update(chk.as_ref());
}
Base64UrlUnpadded::encode_string(hasher.finalize().as_bytes())
}
}