use std::{convert::Infallible, fmt, io, time::Duration}; use base64::prelude::{BASE64_URL_SAFE_NO_PAD as BASE64URL, Engine}; use blake2b_simd::Params as Blake2b; use clap::{Parser, builder::TypedValueParser, value_parser}; use log::LevelFilter; use crate::{ file::{Checked, FileTrait}, sharry::{AliasID, Uri, json::NewShareRequest}, }; #[derive(Parser)] #[command(version, about, long_about = None)] pub struct Cli { /// Timeout in seconds for HTTP actions (set 0 or invalid to disable) #[arg( short, long, default_value = "10", value_name = "SECS", value_parser = parse_seconds, )] timeout: Duration, /// Number of times actions are retried #[arg(short, long, default_value_t = 5, value_name = "N")] retry_limit: u32, /// Name of the new share #[arg(short, long, default_value = "ShrUpl Upload", value_name = "TEXT")] share_name: String, /// Description of the new share #[arg(short, long, value_name = "TEXT")] description: Option, /// Maximum number of views for the new share #[arg(short, long, default_value_t = 100, value_name = "N")] max_views: u32, /// Chunk size for uploading, in MiB #[arg( short, long, default_value_t = 4, value_name = "M", value_parser = value_parser!(u32).range(1..).map(|s| s as usize), )] pub chunk_size: usize, /// Don't hash files before uploading #[arg(short, long)] no_hash: bool, /// Increase output verbosity #[arg(short, long, action = clap::ArgAction::Count)] verbose: u8, /// Base URL for Sharry Instance url: String, /// ID of a public alias to use pub alias: AliasID, /// Files to upload to the new share #[arg(value_name = "FILE", required = true, value_parser = parse_sharry_file)] pub files: Vec, } impl fmt::Debug for Cli { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("Cli") .field("uri", &self.get_uri()) .field("retry_limit", &self.retry_limit) .field("alias", &self.alias) .field("timeout", &self.get_timeout()) .field("chunk_size", &self.chunk_size) .field("share_request", &self.get_share_request()) .field("files", &self.files) .field("level_filter", &self.get_level_filter()) .field("hash", &self.get_hash()) .finish_non_exhaustive() } } fn parse_seconds(data: &str) -> Result { data.parse().or(Ok(0)).map(Duration::from_secs) } fn parse_sharry_file(data: &str) -> io::Result { Checked::new(data) } fn sorted(values: &[T]) -> Vec<&T> where T: Ord, { let mut refs: Vec<_> = values.iter().collect(); refs.sort_unstable(); refs } impl Cli { #[must_use] pub fn get_timeout(&self) -> Option { (!self.timeout.is_zero()).then_some(self.timeout) } #[must_use] pub fn get_uri(&self) -> Uri { Uri::from(self.url.clone()) } #[must_use] pub fn may_retry(&self, tries: u32) -> bool { match self.retry_limit { 0 => true, limit => tries < limit, } } #[must_use] pub fn should_hash(&self) -> bool { !self.no_hash } #[must_use] pub fn get_share_request(&self) -> NewShareRequest { NewShareRequest::new(&self.share_name, self.max_views) .description(self.description.as_ref()) } #[must_use] pub fn get_level_filter(&self) -> LevelFilter { match self.verbose { 0 => LevelFilter::Error, 1 => LevelFilter::Warn, 2 => LevelFilter::Info, 3 => LevelFilter::Debug, _ => LevelFilter::Trace, } } pub fn file_names(&self) -> Vec<&str> { self.files.iter().map(FileTrait::get_name).collect() } #[must_use] pub fn get_hash(&self) -> String { let mut hasher = Blake2b::new().hash_length(16).to_state(); hasher.update(self.get_uri().as_ref()); hasher.update(self.alias.as_ref().as_bytes()); for chk in sorted(&self.files) { hasher.update(chk.as_ref()); } BASE64URL.encode(hasher.finalize()) } }