Merge branch 'feature/sharry_client' into develop

This commit is contained in:
Jörn-Michael Miehe 2025-06-12 23:07:25 +00:00
commit 4b650fd82f
15 changed files with 670 additions and 619 deletions

View file

@ -1,138 +1,84 @@
use std::{ use std::{
collections::VecDeque, cell::{Ref, RefCell},
fs, fmt, io,
io::{self, Write},
path::{Path, PathBuf},
time::Duration, time::Duration,
}; };
use console::style; use console::style;
use indicatif::{ProgressBar, ProgressStyle}; use indicatif::{ProgressBar, ProgressStyle};
use log::{debug, trace}; use log::debug;
use serde::{Deserialize, Serialize};
use super::{ use super::{
cachefile::CacheFile,
cli::Cli, cli::Cli,
sharry::{Alias, ChunkState, FileChecked, FileUploading, Share, SharryFile, UploadError}, file::{self, FileTrait},
sharry::{self, Client, ClientError},
}; };
#[derive(Serialize, Deserialize, Debug)]
pub struct AppState { pub struct AppState {
#[serde(skip)] current_bar: RefCell<Option<ProgressBar>>,
file_name: PathBuf, buffer: Vec<u8>,
#[serde(skip)]
progress: Option<ProgressBar>,
alias: Alias, http: ureq::Agent,
share: Share, inner: CacheFile,
files: VecDeque<FileState>,
} }
#[derive(Serialize, Deserialize, Debug)] impl fmt::Debug for AppState {
enum FileState { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
C(FileChecked), f.debug_struct("AppState")
U(FileUploading), .field("inner", &self.inner)
.finish_non_exhaustive()
}
} }
impl FileState { fn new_http(timeout: Option<Duration>) -> ureq::Agent {
fn file_name(&self) -> &str { ureq::Agent::config_builder()
match self { .timeout_global(timeout)
FileState::C(checked) => checked.get_name(), .build()
FileState::U(uploading) => uploading.get_name(), .into()
}
}
fn start_upload(
self,
http: &ureq::Agent,
alias: &Alias,
share: &Share,
) -> io::Result<FileUploading> {
match self {
FileState::C(checked) => checked.start_upload(http, alias, share),
FileState::U(uploading) => Ok(uploading),
}
}
} }
impl AppState { impl AppState {
fn cache_dir() -> PathBuf { fn new(chunk_size: usize, http: ureq::Agent, inner: CacheFile) -> Self {
let dir_name = dirs_next::cache_dir() Self {
.expect("could not determine cache directory") current_bar: None.into(),
.join("shrupl"); buffer: vec![0; chunk_size * 1024 * 1024],
http,
trace!("cachedir: {:?}", dir_name.display()); inner,
dir_name }
}
fn cache_file(args: &Cli) -> PathBuf {
let file_name = Self::cache_dir().join(format!("{}.json", args.get_hash()));
trace!("cachefile: {:?}", file_name.display());
file_name
}
fn load(file_name: impl AsRef<Path>) -> io::Result<Self> {
let content = fs::read_to_string(file_name)?;
serde_json::from_str(&content).map_err(io::Error::other)
} }
pub fn try_resume(args: &Cli) -> Option<Self> { pub fn try_resume(args: &Cli) -> Option<Self> {
let file_name = Self::cache_file(args); let inner = CacheFile::try_resume(args)
.inspect_err(|e| debug!("could not resume from hash {:?}: {e}", args.get_hash()))
.ok()?;
Self::load(&file_name) Some(Self::new(
.inspect_err(|e| debug!("could not resume from {:?}: {e}", file_name.display())) args.chunk_size,
.map(|state| { new_http(args.get_timeout()),
debug!("successfully loaded AppState"); inner,
))
Self {
file_name,
progress: None,
alias: state.alias,
share: state.share,
files: state.files,
}
})
.ok()
} }
pub fn from_args(args: &Cli, http: &ureq::Agent) -> Result<Self, ureq::Error> { pub fn from_args(args: &Cli) -> sharry::Result<Self> {
let file_name = Self::cache_file(args); let http = new_http(args.get_timeout());
let alias = args.get_alias();
let share = Share::create(http, &alias, args.get_share_request())?; let share_id = http.share_create(
&args.get_uri().endpoint("alias/upload/new"),
&args.alias,
args.get_share_request(),
)?;
let files: VecDeque<_> = args.files.clone().into_iter().map(FileState::C).collect(); Ok(Self::new(
args.chunk_size,
Ok(Self { http,
file_name, CacheFile::from_args(args, share_id),
progress: None, ))
alias,
share,
files,
})
} }
pub fn file_names(&self) -> Vec<&str> { fn get_or_create_progressbar(&self, uploading: &file::Uploading) -> Ref<'_, ProgressBar> {
self.files.iter().map(FileState::file_name).collect() let mut slot = self.current_bar.borrow_mut();
} if slot.is_none() {
pub fn upload_chunk(
&mut self,
http: &ureq::Agent,
chunk_size: usize,
) -> Result<Option<()>, UploadError> {
let uploading = if let Some(state) = self.files.pop_front() {
state.start_upload(http, &self.alias, &self.share).unwrap() // HACK unwrap
} else {
return Ok(None);
};
debug!("{uploading} chunk {chunk_size}");
// Initialize or fetch the existing ProgressBar in one call:
let bar = &*self.progress.get_or_insert_with(|| {
// Create a new bar with style
let bar = ProgressBar::new(uploading.get_size()) let bar = ProgressBar::new(uploading.get_size())
.with_style( .with_style(
ProgressStyle::with_template(&format!( ProgressStyle::with_template(&format!(
@ -143,51 +89,80 @@ impl AppState {
), ),
style("/").magenta(), style("/").magenta(),
)) ))
.unwrap(), .unwrap(), // safe as long as the style template is valid
) )
.with_message(uploading.get_name().to_owned()) .with_position(uploading.get_offset())
.with_position(uploading.get_offset()); .with_message(uploading.get_name().to_owned());
bar.enable_steady_tick(Duration::from_millis(100)); bar.enable_steady_tick(Duration::from_millis(100));
bar *slot = Some(bar);
}); }
drop(slot);
match uploading.upload_chunk(http, &self.alias, chunk_size) { // unwrap is safe: We just made sure it's `Some`.
ChunkState::Ok(upl) => { Ref::map(self.current_bar.borrow(), |opt| opt.as_ref().unwrap())
bar.set_position(upl.get_offset()); }
self.files.push_front(FileState::U(upl));
fn finish_bar(&self) {
let mut slot = self.current_bar.borrow_mut();
if let Some(bar) = &*slot {
bar.finish();
*slot = None;
}
}
pub fn upload_chunk(&mut self) -> sharry::Result<Option<()>> {
let Some(mut uploading) = self.inner.pop_file(&self.http) else {
self.inner.share_notify(&self.http).unwrap(); // HACK unwrap
return Ok(None);
};
self.get_or_create_progressbar(&uploading);
debug!("{uploading} chunk {}", self.buffer.len());
let chunk = uploading
.read(&mut self.buffer)
.map_err(ClientError::from)?;
self.http.file_patch(
chunk.get_patch_uri(),
self.inner.alias_id(),
chunk.get_offset(),
chunk.get_data(),
)?;
match uploading.check_eof() {
Ok(uploading) => {
let bar = self.get_or_create_progressbar(&uploading);
bar.set_position(uploading.get_offset());
// BUG in `indicatif` crate?
// `set_position` does not force immediate redraw, so we also call `inc_length` here
bar.inc_length(0);
drop(bar);
self.inner.push_file(uploading);
Ok(Some(())) Ok(Some(()))
} }
ChunkState::Err(upl, e) => { Err(path) => {
self.files.push_front(FileState::U(upl));
Err(e)
}
ChunkState::Finished(path) => {
debug!("Finished {:?}!", path.display()); debug!("Finished {:?}!", path.display());
bar.finish(); self.finish_bar();
self.progress = None;
self.share.notify(http, &self.alias).unwrap(); // HACK unwrap
Ok(self.files.front().map(drop)) Ok(self.inner.has_file().then_some(()))
} }
} }
} }
pub fn file_names(&self) -> Vec<&str> {
self.inner.file_names()
}
pub fn save(&self) -> io::Result<()> { pub fn save(&self) -> io::Result<()> {
fs::create_dir_all(Self::cache_dir())?; self.inner.save()
let json = serde_json::to_string_pretty(self).map_err(io::Error::other)?;
let mut file = fs::File::create(&self.file_name)?;
file.write_all(json.as_bytes())?;
trace!("updated {:?}", self.file_name.display());
Ok(())
} }
pub fn clear(self) -> io::Result<()> { pub fn clear(self) -> io::Result<()> {
fs::remove_file(&self.file_name)?; self.inner.clear()
trace!("removed {:?}", self.file_name.display());
Ok(())
} }
} }

149
src/cachefile.rs Normal file
View file

@ -0,0 +1,149 @@
use std::{
collections::VecDeque,
fs,
io::{self, Write},
path::PathBuf,
};
use log::trace;
use serde::{Deserialize, Serialize};
use super::{
cli::Cli,
file::{self, FileTrait},
sharry::{self, Client, Uri},
};
#[derive(Serialize, Deserialize, Debug)]
enum FileState {
C(file::Checked),
U(file::Uploading),
}
impl FileState {
fn file_name(&self) -> &str {
match self {
FileState::C(c) => c.get_name(),
FileState::U(u) => u.get_name(),
}
}
fn start_upload(
self,
http: &impl Client,
endpoint: &str,
alias_id: &str,
) -> sharry::Result<file::Uploading> {
match self {
FileState::C(checked) => checked.start_upload(http, endpoint, alias_id),
FileState::U(uploading) => Ok(uploading),
}
}
}
#[derive(Serialize, Deserialize, Debug)]
pub struct CacheFile {
#[serde(skip)]
file_name: PathBuf,
uri: Uri,
alias_id: String,
share_id: String,
files: VecDeque<FileState>,
}
impl CacheFile {
fn cache_dir() -> PathBuf {
let dir_name = dirs_next::cache_dir()
.expect("could not determine cache directory")
.join("shrupl");
trace!("cachedir: {:?}", dir_name.display());
dir_name
}
fn cache_file(args: &Cli) -> PathBuf {
let file_name = Self::cache_dir().join(format!("{}.json", args.get_hash()));
trace!("cachefile: {:?}", file_name.display());
file_name
}
pub fn try_resume(args: &Cli) -> io::Result<Self> {
let file_name = Self::cache_file(args);
let state: Self = {
let file = fs::File::open(&file_name)?;
let reader = io::BufReader::new(file);
serde_json::from_reader(reader).map_err(io::Error::other)?
};
Ok(Self { file_name, ..state })
}
pub fn from_args(args: &Cli, share_id: String) -> Self {
Self {
file_name: Self::cache_file(args),
uri: args.get_uri(),
alias_id: args.alias.clone(),
share_id,
files: args.files.clone().into_iter().map(FileState::C).collect(),
}
}
pub fn alias_id(&self) -> &str {
&self.alias_id
}
pub fn file_names(&self) -> Vec<&str> {
self.files.iter().map(FileState::file_name).collect()
}
pub fn has_file(&self) -> bool {
!self.files.is_empty()
}
pub fn pop_file(&mut self, http: &impl Client) -> Option<file::Uploading> {
if let Some(state) = self.files.pop_front() {
let endpoint = self
.uri
.endpoint(format!("alias/upload/{}/files/tus", self.share_id));
Some(state.start_upload(http, &endpoint, &self.alias_id).unwrap()) // HACK unwrap
} else {
None
}
}
pub fn push_file(&mut self, file: file::Uploading) {
self.files.push_front(FileState::U(file));
}
pub fn share_notify(&self, http: &impl Client) -> sharry::Result<()> {
let endpoint = self
.uri
.endpoint(format!("alias/mail/notify/{}", self.share_id));
http.share_notify(&endpoint, &self.alias_id)
}
pub fn save(&self) -> io::Result<()> {
let cache_dir = self.file_name.parent().ok_or_else(|| {
io::Error::other(format!("orphan file {:?}", self.file_name.display()))
})?;
fs::create_dir_all(cache_dir)?;
let json = serde_json::to_string_pretty(self).map_err(io::Error::other)?;
let mut file = fs::File::create(&self.file_name)?;
file.write_all(json.as_bytes())?;
trace!("updated {:?}", self.file_name.display());
Ok(())
}
pub fn clear(self) -> io::Result<()> {
fs::remove_file(&self.file_name)?;
trace!("removed {:?}", self.file_name.display());
Ok(())
}
}

View file

@ -1,13 +1,17 @@
use std::{ use std::{
fmt,
hash::{DefaultHasher, Hash, Hasher}, hash::{DefaultHasher, Hash, Hasher},
time::Duration, time::Duration,
}; };
use clap::{Parser, builder::PossibleValuesParser}; use clap::{Parser, builder::PossibleValuesParser};
use super::sharry::{Alias, FileChecked, NewShareRequest, Uri}; use super::{
file::Checked,
sharry::{NewShareRequest, Uri},
};
#[derive(Parser, Debug, Hash)] #[derive(Parser, Hash)]
#[command(version, about, long_about = None)] #[command(version, about, long_about = None)]
pub struct Cli { pub struct Cli {
/// Timeout in seconds for HTTP actions (set 0 or invalid to disable) /// Timeout in seconds for HTTP actions (set 0 or invalid to disable)
@ -46,19 +50,33 @@ pub struct Cli {
url: String, url: String,
/// ID of a public alias to use /// ID of a public alias to use
alias: String, pub alias: String,
/// Files to upload to the new share /// Files to upload to the new share
#[arg(value_name = "FILE", required = true, value_parser = parse_sharry_file)] #[arg(value_name = "FILE", required = true, value_parser = parse_sharry_file)]
pub files: Vec<FileChecked>, pub files: Vec<Checked>,
}
impl fmt::Debug for Cli {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("Cli")
.field("uri", &self.get_uri())
.field("alias", &self.alias)
.field("timeout", &self.get_timeout())
.field("chunk_size", &self.chunk_size)
.field("share_request", &self.get_share_request())
.field("files", &self.files)
.field("hash", &self.get_hash())
.finish_non_exhaustive()
}
} }
fn parse_seconds(data: &str) -> Result<Duration, String> { fn parse_seconds(data: &str) -> Result<Duration, String> {
data.parse().or(Ok(0)).map(Duration::from_secs) data.parse().or(Ok(0)).map(Duration::from_secs)
} }
fn parse_sharry_file(data: &str) -> Result<FileChecked, String> { fn parse_sharry_file(data: &str) -> Result<Checked, String> {
FileChecked::new(data).map_err(|e| e.to_string()) Checked::new(data).map_err(|e| e.to_string())
} }
impl Cli { impl Cli {
@ -66,8 +84,8 @@ impl Cli {
(!self.timeout.is_zero()).then_some(self.timeout) (!self.timeout.is_zero()).then_some(self.timeout)
} }
pub fn get_alias(&self) -> Alias { pub fn get_uri(&self) -> Uri {
Alias::new(Uri::with_protocol(&self.protocol, &self.url), &self.alias) Uri::with_protocol(&self.protocol, &self.url)
} }
pub fn get_share_request(&self) -> NewShareRequest { pub fn get_share_request(&self) -> NewShareRequest {
@ -83,7 +101,7 @@ impl Cli {
}; };
let mut hasher = DefaultHasher::new(); let mut hasher = DefaultHasher::new();
(self.get_alias(), file_refs).hash(&mut hasher); (self.get_uri(), &self.alias, file_refs).hash(&mut hasher);
format!("{:x}", hasher.finish()) format!("{:x}", hasher.finish())
} }

57
src/file/checked.rs Normal file
View file

@ -0,0 +1,57 @@
use std::{
fs, io,
path::{Path, PathBuf},
};
use serde::{Deserialize, Serialize};
use crate::sharry;
use super::{FileTrait, Uploading};
#[derive(Debug, Clone, Hash, Serialize, Deserialize, PartialEq, Eq, PartialOrd, Ord)]
pub struct Checked {
path: PathBuf,
size: u64,
}
impl Checked {
pub fn new(value: impl AsRef<Path>) -> io::Result<Self> {
let meta = fs::metadata(&value)?;
if meta.is_file() {
Ok(Self {
path: fs::canonicalize(&value)?,
size: meta.len(),
})
} else {
Err(io::Error::new(
io::ErrorKind::InvalidInput,
"Not a regular file",
))
}
}
pub fn start_upload(
self,
client: &impl sharry::Client,
endpoint: &str,
alias_id: &str,
) -> sharry::Result<Uploading> {
let patch_uri = client.file_create(endpoint, alias_id, self.get_name(), self.size)?;
Ok(Uploading::new(self.path, self.size, patch_uri))
}
}
impl<'t> FileTrait<'t> for Checked {
/// get a reference to the file's name
///
/// Uses `SharryFile::extract_file_name`, which may **panic**!
fn get_name(&'t self) -> &'t str {
<Self as FileTrait>::extract_file_name(&self.path)
}
fn get_size(&self) -> u64 {
self.size
}
}

36
src/file/chunk.rs Normal file
View file

@ -0,0 +1,36 @@
pub struct Chunk<'t> {
data: &'t [u8],
patch_uri: &'t str,
offset: u64,
}
impl<'t> Chunk<'t> {
pub fn new(data: &'t [u8], patch_uri: &'t str, offset: u64) -> Self {
Self {
data,
patch_uri,
offset,
}
}
pub fn get_data(&self) -> &[u8] {
self.data
}
pub fn get_length(&self) -> u64 {
let len = self.data.len();
// BOOKMARK this might **panic** on platforms where `usize` has more than 64 bit.
// Also, you've allocated more than 2 EiB ... in ONE chunk.
// Whoa! Maybe just chill?
u64::try_from(len).unwrap_or_else(|e| panic!("usize={len} did not fit into u64: {e}"))
}
pub fn get_patch_uri(&self) -> &str {
self.patch_uri
}
pub fn get_offset(&self) -> u64 {
self.offset
}
}

View file

@ -1,17 +1,14 @@
mod checked; mod checked;
mod chunk;
mod uploading; mod uploading;
use std::{ use std::{ffi::OsStr, path::Path};
ffi::OsStr,
path::{Path, PathBuf},
};
pub use checked::FileChecked; pub use checked::Checked;
pub use uploading::{ChunkState, FileUploading, UploadError}; pub use chunk::Chunk;
pub use uploading::Uploading;
use super::{Alias, Share, alias::SharryAlias}; pub trait FileTrait<'t> {
pub trait SharryFile<'t> {
/// extract the filename part of a `Path` reference /// extract the filename part of a `Path` reference
/// ///
/// # Panics /// # Panics
@ -23,8 +20,6 @@ pub trait SharryFile<'t> {
.expect("bad file name") .expect("bad file name")
} }
fn into_path(self) -> PathBuf;
fn get_name(&'t self) -> &'t str; fn get_name(&'t self) -> &'t str;
fn get_size(&self) -> u64; fn get_size(&self) -> u64;

84
src/file/uploading.rs Normal file
View file

@ -0,0 +1,84 @@
use std::{
fmt, fs,
io::{self, Read, Seek, SeekFrom},
path::PathBuf,
};
use serde::{Deserialize, Serialize};
use super::{Chunk, FileTrait};
#[derive(Serialize, Deserialize, Debug)]
pub struct Uploading {
path: PathBuf,
size: u64,
patch_uri: String,
offset: u64,
}
impl fmt::Display for Uploading {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(
f,
"Uploading {:?} ({}/{})",
self.path.display(),
self.offset,
self.size
)
}
}
impl Uploading {
pub(super) fn new(path: PathBuf, size: u64, patch_uri: String) -> Self {
Self {
path,
size,
patch_uri,
offset: 0,
}
}
pub fn get_offset(&self) -> u64 {
self.offset
}
pub fn read<'t>(&'t mut self, buf: &'t mut [u8]) -> io::Result<Chunk<'t>> {
let mut f = fs::File::open(&self.path)?;
f.seek(SeekFrom::Start(self.offset))?;
let read_len = f.read(buf)?;
if read_len == 0 {
return Err(io::Error::new(
io::ErrorKind::UnexpectedEof,
format!("could not read from file {:?}", self.path.display()),
));
}
let chunk = Chunk::new(&buf[..read_len], &self.patch_uri, self.offset);
self.offset += chunk.get_length();
Ok(chunk)
}
pub fn check_eof(self) -> Result<Self, PathBuf> {
if self.offset < self.size {
Ok(self)
} else {
Err(self.path)
}
}
}
impl<'t> FileTrait<'t> for Uploading {
/// get a reference to the file's name
///
/// Uses `SharryFile::extract_file_name`, which may **panic**!
fn get_name(&'t self) -> &'t str {
<Self as FileTrait>::extract_file_name(&self.path)
}
fn get_size(&self) -> u64 {
self.size
}
}

View file

@ -1,9 +1,11 @@
mod appstate; mod appstate;
mod cachefile;
mod cli; mod cli;
mod file;
mod sharry; mod sharry;
use std::{ use std::{
process::exit, process,
sync::{ sync::{
Arc, Arc,
atomic::{AtomicBool, Ordering}, atomic::{AtomicBool, Ordering},
@ -14,37 +16,65 @@ use clap::Parser;
use console::style; use console::style;
use dialoguer::{Confirm, theme::ColorfulTheme}; use dialoguer::{Confirm, theme::ColorfulTheme};
use log::{error, info}; use log::{error, info};
use ureq::Agent;
use appstate::AppState; use appstate::AppState;
use cli::Cli; use cli::Cli;
use sharry::ClientError;
fn print_error(e: &ClientError) {
if let Some(cause) = match e {
// known errors
ClientError::ResponseStatus {
actual: 403,
expected: _,
} => Some("Alias ID"),
ClientError::StdIo(_) => Some("URL"),
// unknown error
_ => None,
} {
// handle known error
info!("known error: {e:?}");
println!(
"{} probably wrong: {}",
style("Error!").red().bold(),
style(cause).cyan(),
);
println!("{}", style(e.to_string()).yellow().italic());
} else {
// handle unknown error
error!("unknown error: {e} ({e:?})");
println!("{}", style("Unknown Error!").red().bold());
}
}
fn main() { fn main() {
env_logger::init();
println!( println!(
"{} to {}!", "{} to {}!",
style("Welcome").magenta().bold(), style("Welcome").magenta().bold(),
style("ShrUpl").yellow().bold(), style("ShrUpl").yellow().bold(),
); );
let stop = Arc::new(AtomicBool::new(false)); let check_ctrlc = {
let stop = Arc::new(AtomicBool::new(false));
let stop_ctrlc = stop.clone();
let stop_ctrlc = stop.clone(); ctrlc::set_handler(move || {
ctrlc::set_handler(move || { stop_ctrlc.store(true, Ordering::SeqCst);
stop_ctrlc.store(true, Ordering::SeqCst); info!("stopping as soon as possible ...");
info!("stopping as soon as possible ..."); })
}) .expect("Error setting Ctrl-C handler");
.expect("Error setting Ctrl-C handler");
env_logger::init(); move || {
if stop.load(Ordering::SeqCst) {
process::exit(255);
}
}
};
let args = Cli::parse(); let args = Cli::parse();
info!("args: {args:?}"); info!("args: {args:?}");
info!("timeout: {:?}", args.get_timeout());
let agent: Agent = Agent::config_builder()
.timeout_global(args.get_timeout())
.build()
.into();
let mut state = AppState::try_resume(&args) let mut state = AppState::try_resume(&args)
.and_then(|state| { .and_then(|state| {
@ -55,46 +85,30 @@ fn main() {
.map_or(None, |b| b.then_some(state)) .map_or(None, |b| b.then_some(state))
}) })
.unwrap_or_else(|| { .unwrap_or_else(|| {
stop.load(Ordering::SeqCst).then(|| exit(0)); check_ctrlc();
match AppState::from_args(&args, &agent) { match AppState::from_args(&args) {
Ok(state) => { Ok(state) => {
state.save().unwrap(); // HACK unwrap state.save().unwrap(); // HACK unwrap
state state
} }
Err(e) => { Err(e) => {
if let Some(cause) = match e { print_error(&e);
ureq::Error::StatusCode(403) => Some("Alias ID"), process::exit(1);
ureq::Error::Io(_) => Some("URL"),
_ => None,
} {
info!("handling error: {e:?}");
println!(
"{} probably wrong: {} {:?}",
style("Error!").red().bold(),
style(cause).cyan().italic(),
style(e.to_string()).yellow()
);
} else {
error!("unknown error: {e} {e:?}");
println!("{}", style("Unknown Error!").red().bold());
}
exit(1);
} }
} }
}); });
info!("continuing with state: {state:?}");
println!( println!(
"{} uploading: {}", "{} uploading: {}",
style("ShrUpl").yellow().bold(), style("ShrUpl").yellow().bold(),
style(state.file_names().join(", ")).magenta(), style(state.file_names().join(", ")).magenta(),
); );
info!("continuing with state: {state:?}");
loop { loop {
match state.upload_chunk(&agent, args.chunk_size * 1024 * 1024) { match state.upload_chunk() {
Err(e) => error!("error: {e:?}"), Err(e) => error!("error: {e:?}"),
Ok(None) => { Ok(None) => {
info!("all uploads done"); info!("all uploads done");
@ -105,6 +119,6 @@ fn main() {
} }
state.save().unwrap(); // HACK unwrap state.save().unwrap(); // HACK unwrap
stop.load(Ordering::SeqCst).then(|| exit(0)); check_ctrlc();
} }
} }

View file

@ -1,36 +0,0 @@
use std::fmt::{Debug, Display};
use log::debug;
use serde::{Deserialize, Serialize};
use ureq::RequestBuilder;
use super::api::Uri;
#[derive(Serialize, Deserialize, Debug, Hash)]
pub struct Alias {
pub(super) uri: Uri,
pub(super) id: String,
}
pub(super) trait SharryAlias {
fn sharry_header(self, alias: &Alias) -> Self;
}
impl<B> SharryAlias for RequestBuilder<B> {
fn sharry_header(self, alias: &Alias) -> Self {
self.header("Sharry-Alias", &alias.id)
}
}
impl Alias {
pub fn new(uri: Uri, id: impl Into<String>) -> Self {
Self { uri, id: id.into() }
}
pub(super) fn get_endpoint(&self, endpoint: impl Display + Debug) -> String {
let uri = format!("{}/{}", self.uri, endpoint);
debug!("endpoint uri: {uri:?}");
uri
}
}

View file

@ -10,19 +10,19 @@ pub struct Uri {
} }
impl Uri { impl Uri {
pub(super) fn get_endpoint(&self, endpoint: impl fmt::Display + fmt::Debug) -> String {
let uri = format!("{}/{}", self, endpoint);
debug!("endpoint uri: {uri:?}");
uri
}
pub fn with_protocol(protocol: impl Into<String>, base_url: impl Into<String>) -> Self { pub fn with_protocol(protocol: impl Into<String>, base_url: impl Into<String>) -> Self {
Self { Self {
protocol: protocol.into(), protocol: protocol.into(),
base_url: base_url.into(), base_url: base_url.into(),
} }
} }
pub fn endpoint(&self, endpoint: impl fmt::Display) -> String {
let uri = format!("{self}/{endpoint}");
debug!("endpoint: {uri:?}");
uri
}
} }
impl fmt::Display for Uri { impl fmt::Display for Uri {
@ -31,7 +31,7 @@ impl fmt::Display for Uri {
} }
} }
#[derive(Serialize)] #[derive(Serialize, Debug)]
#[allow(non_snake_case)] #[allow(non_snake_case)]
pub struct NewShareRequest { pub struct NewShareRequest {
name: String, name: String,

View file

@ -1,79 +1,106 @@
use std::{error::Error, io}; use std::fmt;
use log::debug; use log::{debug, trace};
use thiserror::Error;
use super::{ use super::api::{NewShareRequest, NewShareResponse, NotifyShareResponse};
api::{NewShareRequest, NewShareResponse, NotifyShareResponse, Uri},
file::{FileChecked, FileUploading, SharryFile}, pub type Result<T> = std::result::Result<T, ClientError>;
};
pub trait Client { pub trait Client {
fn sharry_share_create( fn share_create(&self, endpoint: &str, alias_id: &str, data: NewShareRequest)
&self, -> Result<String>;
uri: &Uri,
alias_id: &str,
data: NewShareRequest,
) -> Result<String, ClientError>;
fn sharry_share_notify( fn share_notify(&self, endpoint: &str, alias_id: &str) -> Result<()>;
&self,
uri: &Uri,
alias_id: &str,
share_id: &str,
) -> Result<(), ClientError>;
fn sharry_file_create( fn file_create(
&self, &self,
uri: &Uri, endpoint: &str,
alias_id: &str, alias_id: &str,
share_id: &str, file_name: &str,
file: FileChecked, file_size: u64,
) -> Result<FileUploading, ClientError>; ) -> Result<String>;
// fn sharry_file_patch(&self); fn file_patch(&self, endpoint: &str, alias_id: &str, offset: u64, chunk: &[u8]) -> Result<()>;
} }
#[derive(Debug, thiserror::Error)] #[derive(Debug, Error)]
pub enum ClientError { pub enum ClientError {
#[error("file I/O error: {0}")] #[error(transparent)]
FileIO(#[from] io::Error), StdIo(#[from] std::io::Error),
#[error("network request failed: {0}")] #[error("network request failed: {0}")]
Request(String), Request(String),
#[error("response parsing failed: {0}")]
ResponseParsing(String),
#[error("unexpected response status: {actual} (expected {expected})")] #[error("unexpected response status: {actual} (expected {expected})")]
ResponseStatus { actual: u16, expected: u16 }, ResponseStatus { actual: u16, expected: u16 },
#[error("response parsing failed: {0}")]
ResponseParsing(String),
#[error("unexpected response content: {0}")] #[error("unexpected response content: {0}")]
ResponseContent(String), ResponseContent(String),
// }
// #[error("could not parse offset header")]
// ResponseOffset, impl ClientError {
pub fn req_err(msg: impl fmt::Display) -> Self {
Self::Request(msg.to_string())
}
pub fn res_parse_err(msg: impl fmt::Display) -> Self {
Self::ResponseParsing(msg.to_string())
}
pub fn res_status_check<T>(actual: T, expected: T) -> Result<()>
where
T: PartialEq + Into<u16> + Copy,
{
if actual == expected {
Ok(())
} else {
Err(Self::ResponseStatus {
actual: actual.into(),
expected: expected.into(),
})
}
}
}
impl From<ureq::Error> for ClientError {
fn from(value: ureq::Error) -> Self {
match value {
ureq::Error::StatusCode(status) => Self::ResponseStatus {
actual: status,
expected: 200,
},
ureq::Error::Io(e) => e.into(),
error => Self::req_err(error),
}
}
} }
impl Client for ureq::Agent { impl Client for ureq::Agent {
fn sharry_share_create( fn share_create(
&self, &self,
uri: &Uri, endpoint: &str,
alias_id: &str, alias_id: &str,
data: NewShareRequest, data: NewShareRequest,
) -> Result<String, ClientError> { ) -> Result<String> {
let res = { let mut res = self
let endpoint = uri.get_endpoint("alias/upload/new"); .post(endpoint)
.header("Sharry-Alias", alias_id)
.send_json(data)
.map_err(ClientError::from)?;
self.post(endpoint) trace!("{endpoint:?} response: {res:?}");
.header("Sharry-Alias", alias_id) ClientError::res_status_check(res.status(), ureq::http::StatusCode::OK)?;
.send_json(data)
.map_err(|e| ClientError::Request(e.to_string()))?
.body_mut()
.read_json::<NewShareResponse>()
.map_err(|e| ClientError::ResponseParsing(e.to_string()))?
};
debug!("response: {res:?}"); let res = res
.body_mut()
.read_json::<NewShareResponse>()
.map_err(ClientError::res_parse_err)?;
debug!("{res:?}");
if res.success && (res.message == "Share created.") { if res.success && (res.message == "Share created.") {
Ok(res.id) Ok(res.id)
@ -82,64 +109,84 @@ impl Client for ureq::Agent {
} }
} }
fn sharry_share_notify( fn share_notify(&self, endpoint: &str, alias_id: &str) -> Result<()> {
&self, let mut res = self
uri: &Uri, .post(endpoint)
alias_id: &str, .header("Sharry-Alias", alias_id)
share_id: &str, .send_empty()
) -> Result<(), ClientError> { .map_err(ClientError::from)?;
let res = {
let endpoint = uri.get_endpoint(format!("alias/mail/notify/{}", share_id));
self.post(endpoint) trace!("{endpoint:?} response: {res:?}");
.header("Sharry-Alias", alias_id) ClientError::res_status_check(res.status(), ureq::http::StatusCode::OK)?;
.send_empty()
.map_err(|e| ClientError::Request(e.to_string()))?
.body_mut()
.read_json::<NotifyShareResponse>()
.map_err(|e| ClientError::ResponseParsing(e.to_string()))?
};
debug!("response: {res:?}"); let res = res
.body_mut()
.read_json::<NotifyShareResponse>()
.map_err(ClientError::res_parse_err)?;
debug!("{res:?}");
Ok(()) Ok(())
} }
fn sharry_file_create( fn file_create(
&self, &self,
uri: &Uri, endpoint: &str,
alias_id: &str, alias_id: &str,
share_id: &str, file_name: &str,
file: FileChecked, file_size: u64,
) -> Result<FileUploading, ClientError> { ) -> Result<String> {
let size = file.get_size(); let res = self
.post(endpoint)
.header("Sharry-Alias", alias_id)
.header("Sharry-File-Name", file_name)
.header("Upload-Length", file_size)
.send_empty()
.map_err(ClientError::from)?;
let res = { trace!("{endpoint:?} response: {res:?}");
let endpoint = uri.get_endpoint(format!("alias/upload/{}/files/tus", share_id)); ClientError::res_status_check(res.status(), ureq::http::StatusCode::CREATED)?;
self.post(endpoint)
.header("Sharry-Alias", alias_id)
.header("Sharry-File-Name", file.get_name())
.header("Upload-Length", size)
.send_empty()
.map_err(|e| ClientError::Request(e.to_string()))?
};
if res.status() != ureq::http::StatusCode::CREATED {
return Err(ClientError::ResponseStatus {
actual: res.status().as_u16(),
expected: ureq::http::StatusCode::CREATED.as_u16(),
});
}
let location = (res.headers().get("Location")) let location = (res.headers().get("Location"))
.ok_or_else(|| ClientError::ResponseParsing("Location header not found".to_owned()))? .ok_or_else(|| ClientError::res_parse_err("Location header not found"))?
.to_str() .to_str()
.map_err(|_| ClientError::ResponseParsing("Location header invalid".to_owned()))? .map_err(ClientError::res_parse_err)?
.to_string(); .to_string();
debug!("patch uri: {location}"); debug!("{location:?}");
Ok(FileUploading::new(file.into_path(), size, location)) Ok(location)
}
fn file_patch(&self, endpoint: &str, alias_id: &str, offset: u64, chunk: &[u8]) -> Result<()> {
let res = self
.patch(endpoint)
.header("Sharry-Alias", alias_id)
.header("Upload-Offset", offset)
.send(chunk)
.map_err(ClientError::from)?;
trace!("{endpoint:?} response: {res:?}");
ClientError::res_status_check(res.status(), ureq::http::StatusCode::NO_CONTENT)?;
let res_offset = (res.headers().get("Upload-Offset"))
.ok_or_else(|| ClientError::res_parse_err("Upload-Offset header not found"))?
.to_str()
.map_err(ClientError::res_parse_err)?
.parse::<u64>()
.map_err(ClientError::res_parse_err)?;
// get chunk length as `u64` (we have checked while reading the chunk!)
let chunk_len = u64::try_from(chunk.len()).expect("something's VERY wrong");
if offset + chunk_len == res_offset {
Ok(())
} else {
Err(ClientError::ResponseContent(format!(
"Unexpected Upload-Offset: {} (expected {})",
res_offset,
offset + chunk_len
)))
}
} }
} }

View file

@ -1,83 +0,0 @@
use std::{
ffi::OsStr,
fs, io,
path::{Path, PathBuf},
};
use log::debug;
use serde::{Deserialize, Serialize};
use ureq::http::{HeaderValue, StatusCode};
use super::{Alias, FileUploading, Share, SharryAlias, SharryFile};
#[derive(Debug, Clone, Hash, Serialize, Deserialize, PartialEq, Eq, PartialOrd, Ord)]
pub struct FileChecked {
path: PathBuf,
}
impl FileChecked {
pub fn new(value: impl AsRef<Path>) -> io::Result<Self> {
let meta = fs::metadata(&value)?;
if meta.is_file() {
Ok(Self {
path: fs::canonicalize(&value)?,
})
} else {
Err(io::Error::new(
io::ErrorKind::InvalidInput,
"Not a regular file",
))
}
}
pub fn start_upload(
self,
http: &ureq::Agent,
alias: &Alias,
share: &Share,
) -> io::Result<FileUploading> {
let size = self.get_size();
let res = {
let endpoint = alias.get_endpoint(format!("alias/upload/{}/files/tus", share.id));
(http.post(endpoint))
.sharry_header(alias)
.header("Sharry-File-Name", self.get_name())
.header("Upload-Length", size)
.send_empty()
.map_err(ureq::Error::into_io)?
};
if res.status() != StatusCode::CREATED {
return Err(io::Error::other("unexpected response status"));
}
let location = (res.headers().get("Location"))
.ok_or_else(|| io::Error::other("Location header not found"))?
.to_str()
.map_err(|_| io::Error::other("Location header invalid"))?
.to_string();
debug!("patch uri: {location}");
Ok(FileUploading::new(self.path, size, location))
}
}
impl<'t> SharryFile<'t> for FileChecked {
fn into_path(self) -> PathBuf {
self.path
}
/// get a reference to the file's name
///
/// Uses `SharryFile::extract_file_name`, which may **panic**!
fn get_name(&'t self) -> &'t str {
<Self as SharryFile>::extract_file_name(&self.path)
}
fn get_size(&self) -> u64 {
fs::metadata(&self.path).unwrap().len()
}
}

View file

@ -1,149 +0,0 @@
use std::{
ffi::OsStr,
fmt, fs,
io::{self, Read, Seek, SeekFrom},
path::PathBuf,
};
use log::debug;
use serde::{Deserialize, Serialize};
use ureq::http::{HeaderValue, StatusCode};
use super::{Alias, SharryAlias, SharryFile};
#[derive(Serialize, Deserialize, Debug)]
pub struct FileUploading {
path: PathBuf,
size: u64,
uri: String,
offset: u64,
}
#[derive(Debug, thiserror::Error)]
pub enum UploadError {
#[error("file I/O error: {0}")]
FileIO(#[from] io::Error),
#[error("network request failed")]
Request,
#[error("unexpected response status")]
ResponseStatus,
#[error("could not parse offset header")]
ResponseOffset,
// #[error("chunk length conversion failed: {0}")]
// InvalidChunkLength(String),
// #[error("offset mismatch")]
// ResponseOffsetMismatch,
}
pub enum ChunkState {
Ok(FileUploading),
Err(FileUploading, UploadError),
Finished(PathBuf),
}
impl fmt::Display for FileUploading {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(
f,
"Uploading {:?} ({}/{})",
self.path.display(),
self.offset,
self.size
)
}
}
impl FileUploading {
pub fn new(path: PathBuf, size: u64, uri: String) -> Self {
Self {
path,
size,
uri,
offset: 0,
}
}
pub fn get_offset(&self) -> u64 {
self.offset
}
fn read_chunk(&self, chunk_size: usize) -> io::Result<Vec<u8>> {
let mut f = fs::File::open(&self.path)?;
f.seek(SeekFrom::Start(self.offset))?;
let mut bytes = vec![0; chunk_size];
let read_len = f.read(&mut bytes)?;
bytes.truncate(read_len);
Ok(bytes)
}
pub fn upload_chunk(
mut self,
http: &ureq::Agent,
alias: &Alias,
chunk_size: usize,
) -> ChunkState {
let chunk = match self.read_chunk(chunk_size) {
Err(e) => return ChunkState::Err(self, UploadError::FileIO(e)),
Ok(value) => value,
};
let Ok(res) = (http.patch(&self.uri))
.sharry_header(alias)
.header("Upload-Offset", self.offset)
.send(&chunk)
else {
return ChunkState::Err(self, UploadError::Request);
};
if res.status() != StatusCode::NO_CONTENT {
return ChunkState::Err(self, UploadError::ResponseStatus);
}
let Some(Ok(Ok(res_offset))) = (res.headers().get("Upload-Offset"))
.map(HeaderValue::to_str)
.map(|v| v.map(str::parse::<u64>))
else {
return ChunkState::Err(self, UploadError::ResponseOffset);
};
// convert chunk.len() into an `u64`
//
// BOOKMARK this might panic on platforms where `usize` > 64 bit.
// Also whoa, you've sent more than 2 EiB ... in ONE chunk.
// Maybe just chill?
let chunk_len = u64::try_from(chunk.len())
.unwrap_or_else(|e| panic!("usize={} did not fit into u64: {}", chunk.len(), e));
if self.offset + chunk_len != res_offset {
return ChunkState::Err(self, UploadError::ResponseOffset);
}
self.offset = res_offset;
if self.offset == self.size {
return ChunkState::Finished(self.path);
}
ChunkState::Ok(self)
}
}
impl<'t> SharryFile<'t> for FileUploading {
fn into_path(self) -> PathBuf {
self.path
}
fn get_name(&'t self) -> &'t str {
<Self as SharryFile>::extract_file_name(&self.path)
}
fn get_size(&self) -> u64 {
self.size
}
}

View file

@ -1,13 +1,5 @@
#![allow(unused_imports)]
mod alias;
mod api; mod api;
mod client; mod client;
mod file;
mod share;
pub use alias::Alias;
pub use api::{NewShareRequest, Uri}; pub use api::{NewShareRequest, Uri};
// pub use client::SharryClient; pub use client::{Client, ClientError, Result};
pub use file::{ChunkState, FileChecked, FileUploading, SharryFile, UploadError};
pub use share::Share;

View file

@ -1,48 +0,0 @@
use log::debug;
use serde::{Deserialize, Serialize};
use super::{
alias::{Alias, SharryAlias},
api::{NewShareRequest, NewShareResponse, NotifyShareResponse},
};
#[derive(Serialize, Deserialize, Debug)]
pub struct Share {
pub(super) id: String,
}
impl Share {
pub fn create(
http: &ureq::Agent,
alias: &Alias,
data: NewShareRequest,
) -> Result<Self, ureq::Error> {
let res = (http.post(alias.get_endpoint("alias/upload/new")))
.sharry_header(alias)
.send_json(data)?
.body_mut()
.read_json::<NewShareResponse>()?;
debug!("response: {res:?}");
if !(res.success && (res.message == "Share created.")) {
return Err(ureq::Error::Other("unexpected json response".into()));
}
Ok(Self { id: res.id })
}
pub fn notify(&self, http: &ureq::Agent, alias: &Alias) -> Result<(), ureq::Error> {
let endpoint = alias.get_endpoint(format!("alias/mail/notify/{}", self.id));
let res = (http.post(endpoint))
.sharry_header(alias)
.send_empty()?
.body_mut()
.read_json::<NotifyShareResponse>()?;
debug!("response: {res:?}");
Ok(())
}
}