various minor refactorings mostly targeting CLI ergonomy

This commit is contained in:
Jörn-Michael Miehe 2025-05-31 15:27:52 +00:00
parent 1ee56ac3da
commit b999c35965
5 changed files with 63 additions and 40 deletions

View file

@ -2,7 +2,7 @@ use std::time::Duration;
use clap::{Parser, builder::PossibleValuesParser}; use clap::{Parser, builder::PossibleValuesParser};
use super::sharry::File; use super::sharry::{File, Alias, Uri, NewShareRequest};
#[derive(Parser, Debug, Hash)] #[derive(Parser, Debug, Hash)]
#[command(version, about, long_about = None)] #[command(version, about, long_about = None)]
@ -21,29 +21,29 @@ pub struct Cli {
default_value = "https", value_name = "VARIANT", default_value = "https", value_name = "VARIANT",
value_parser = PossibleValuesParser::new(["http", "https"]), value_parser = PossibleValuesParser::new(["http", "https"]),
)] )]
pub protocol: String, protocol: String,
/// Name of the new share /// Name of the new share
#[arg(short, long, default_value = "ShrUpl Upload", value_name = "TEXT")] #[arg(short, long, default_value = "ShrUpl Upload", value_name = "TEXT")]
pub name: String, name: String,
/// Description of the new share /// Description of the new share
#[arg(short, long, value_name = "TEXT")] #[arg(short, long, value_name = "TEXT")]
pub description: Option<String>, description: Option<String>,
/// Maximum number of views for the new share /// Maximum number of views for the new share
#[arg(short, long, default_value_t = 100, value_name = "N")] #[arg(short, long, default_value_t = 100, value_name = "N")]
pub max_views: u32, max_views: u32,
/// Chunk size for uploading, in MiB /// Chunk size for uploading, in MiB
#[arg(short, long, default_value_t = 10, value_name = "N")] #[arg(short, long, default_value_t = 10, value_name = "N")]
pub chunk_size: usize, pub chunk_size: usize,
/// Base URL for Sharry Instance /// Base URL for Sharry Instance
pub url: String, url: String,
/// ID of a public alias to use /// ID of a public alias to use
pub alias: String, alias: String,
/// Files to upload to the new share /// Files to upload to the new share
#[arg(value_name = "FILE", required = true, value_parser = parse_sharry_file)] #[arg(value_name = "FILE", required = true, value_parser = parse_sharry_file)]
@ -62,4 +62,12 @@ impl Cli {
pub fn get_timeout(&self) -> Option<Duration> { pub fn get_timeout(&self) -> Option<Duration> {
(!self.timeout.is_zero()).then_some(self.timeout) (!self.timeout.is_zero()).then_some(self.timeout)
} }
pub fn get_alias(&self) -> Alias {
Alias::new(Uri::with_protocol(&self.protocol, &self.url), &self.alias)
}
pub fn get_share_request(&self) -> NewShareRequest {
NewShareRequest::new(&self.name, self.description.as_ref(), self.max_views)
}
} }

View file

@ -6,7 +6,7 @@ use log::{error, info};
use ureq::Agent; use ureq::Agent;
use cli::Cli; use cli::Cli;
use sharry::{Alias, NewShareRequest, Share, Uri}; use sharry::Share;
fn main() { fn main() {
env_logger::init(); env_logger::init();
@ -20,22 +20,22 @@ fn main() {
.build() .build()
.into(); .into();
let alias = Alias::new(Uri::with_protocol(args.protocol, args.url), args.alias); let alias = args.get_alias();
let share = Share::create(&agent, &alias, args.get_share_request()).unwrap();
let share = NewShareRequest::new(args.name, args.description, args.max_views);
let share = Share::create(&agent, &alias, share).unwrap();
info!("share: {share:?}"); info!("share: {share:?}");
for file in args.files { for file in args.files {
let file = file.create(&agent, &alias, &share).unwrap(); let file = file.create(&agent, &alias, &share).unwrap();
info!("file: {file:?}"); info!("file: {file:?}");
for chunk in file.chunked(args.chunk_size * 1024 * 1024) { for chunk in file.chunked(args.chunk_size * 1024 * 1024).seek(0) {
info!("chunk len: {}", chunk.bytes.len()); info!("chunk len: {}", chunk.bytes.len());
file.upload_chunk(&agent, &alias, &chunk) file.upload_chunk(&agent, &alias, &chunk)
.inspect_err(|e| error!("error: {e}")) .unwrap_or_else(|e| {
.unwrap(); error!("error: {e}");
panic!("{e}");
});
} }
} }

View file

@ -5,7 +5,7 @@ use ureq::RequestBuilder;
use super::api::Uri; use super::api::Uri;
#[derive(Debug)] #[derive(Debug, Hash)]
pub struct Alias { pub struct Alias {
pub(super) api_uri: String, pub(super) api_uri: String,
pub(super) id: String, pub(super) id: String,

View file

@ -1,23 +1,31 @@
use std::{ use std::{
fs::File, fs::File,
io::{Read, Seek, SeekFrom}, io::{Read, Seek, SeekFrom},
path::PathBuf, path::Path,
}; };
use log::error; use log::error;
pub struct FileChunks<'t> { pub struct FileChunks<'t> {
path: &'t PathBuf, file_path: &'t Path,
cnum: u64, chunk_index: u64,
csize: usize, chunk_size: usize,
} }
impl<'t> FileChunks<'t> { impl<'t> FileChunks<'t> {
pub(super) fn new(path: &'t PathBuf, chunk_size: usize) -> Self { pub(super) fn new(path: &'t Path, chunk_size: usize) -> Self {
Self { Self {
path, file_path: path,
cnum: 0, chunk_index: 0,
csize: chunk_size, chunk_size,
}
}
pub fn seek(self, chunk_index: u64) -> Self {
Self {
file_path: self.file_path,
chunk_index,
chunk_size: self.chunk_size,
} }
} }
} }
@ -26,23 +34,23 @@ impl Iterator for FileChunks<'_> {
type Item = Chunk; type Item = Chunk;
fn next(&mut self) -> Option<Self::Item> { fn next(&mut self) -> Option<Self::Item> {
let offset = { let offset = self.chunk_index
let csize: u64 = self.csize.try_into().unwrap(); * u64::try_from(self.chunk_size)
self.cnum * csize .inspect_err(|e| error!("Error converting to u64: {e}"))
}; .ok()?;
let mut f = File::open(self.path) let mut f = File::open(self.file_path)
.inspect_err(|e| error!("Error opening file: {e}")) .inspect_err(|e| error!("Error opening file: {e}"))
.ok()?; .ok()?;
f.seek(SeekFrom::Start(offset)).ok()?; f.seek(SeekFrom::Start(offset)).ok()?;
let mut bytes = vec![0; self.csize]; let mut bytes = vec![0; self.chunk_size];
let read_len = (f.read(&mut bytes)) let read_len = (f.read(&mut bytes))
.inspect_err(|e| error!("Error reading file: {e}")) .inspect_err(|e| error!("Error reading file: {e}"))
.ok()?; .ok()?;
bytes.truncate(read_len); bytes.truncate(read_len);
self.cnum += 1; self.chunk_index += 1;
Some(Self::Item { offset, bytes }).filter(|c| !c.bytes.is_empty()) Some(Self::Item { offset, bytes }).filter(|c| !c.bytes.is_empty())
} }

View file

@ -3,8 +3,9 @@ mod chunks;
use std::{ use std::{
ffi::OsStr, ffi::OsStr,
fs::metadata, fs::metadata,
hash::{Hash, Hasher},
io::{self, ErrorKind}, io::{self, ErrorKind},
path::{Path, PathBuf}, path::{Path, PathBuf, absolute},
}; };
use log::{debug, error}; use log::{debug, error};
@ -16,29 +17,35 @@ use super::{
}; };
pub use chunks::{Chunk, FileChunks}; pub use chunks::{Chunk, FileChunks};
#[derive(Debug, Clone, Hash)] #[derive(Debug, Clone)]
pub struct File { pub struct File {
path: PathBuf, abs_path: PathBuf,
name: String, name: String,
size: u64, size: u64,
patch_uri: Option<String>, patch_uri: Option<String>,
} }
impl Hash for File {
fn hash<H: Hasher>(&self, state: &mut H) {
self.abs_path.hash(state);
}
}
impl File { impl File {
pub fn new(path: impl AsRef<Path>) -> io::Result<Self> { pub fn new(path: impl AsRef<Path>) -> io::Result<Self> {
let path = path.as_ref().to_owned(); let abs_path = absolute(path)?;
let m = metadata(&path)?; let m = metadata(&abs_path)?;
if !m.is_file() { if !m.is_file() {
return Err(io::Error::new(ErrorKind::NotFound, "not a file")); return Err(io::Error::new(ErrorKind::NotFound, "not a file"));
} }
let name = (path.file_name().and_then(OsStr::to_str)) let name = (abs_path.file_name().and_then(OsStr::to_str))
.ok_or_else(|| io::Error::new(ErrorKind::NotFound, "bad file name"))? .ok_or_else(|| io::Error::new(ErrorKind::NotFound, "bad file name"))?
.to_string(); .to_string();
Ok(Self { Ok(Self {
path, abs_path,
name, name,
size: m.len(), size: m.len(),
patch_uri: None, patch_uri: None,
@ -76,7 +83,7 @@ impl File {
debug!("received uri: {location}"); debug!("received uri: {location}");
Ok(Self { Ok(Self {
path: self.path, abs_path: self.abs_path,
name: self.name, name: self.name,
size: self.size, size: self.size,
patch_uri: Some(location), patch_uri: Some(location),
@ -84,7 +91,7 @@ impl File {
} }
pub fn chunked(&self, chunk_size: usize) -> FileChunks { pub fn chunked(&self, chunk_size: usize) -> FileChunks {
FileChunks::new(&self.path, chunk_size) FileChunks::new(&self.abs_path, chunk_size)
} }
pub fn upload_chunk( pub fn upload_chunk(