Merge branch 'feature/sharry_client' into develop

This commit is contained in:
Jörn-Michael Miehe 2025-06-12 23:07:25 +00:00
commit 4b650fd82f
15 changed files with 670 additions and 619 deletions

View file

@ -1,138 +1,84 @@
use std::{
collections::VecDeque,
fs,
io::{self, Write},
path::{Path, PathBuf},
cell::{Ref, RefCell},
fmt, io,
time::Duration,
};
use console::style;
use indicatif::{ProgressBar, ProgressStyle};
use log::{debug, trace};
use serde::{Deserialize, Serialize};
use log::debug;
use super::{
cachefile::CacheFile,
cli::Cli,
sharry::{Alias, ChunkState, FileChecked, FileUploading, Share, SharryFile, UploadError},
file::{self, FileTrait},
sharry::{self, Client, ClientError},
};
#[derive(Serialize, Deserialize, Debug)]
pub struct AppState {
#[serde(skip)]
file_name: PathBuf,
#[serde(skip)]
progress: Option<ProgressBar>,
current_bar: RefCell<Option<ProgressBar>>,
buffer: Vec<u8>,
alias: Alias,
share: Share,
files: VecDeque<FileState>,
http: ureq::Agent,
inner: CacheFile,
}
#[derive(Serialize, Deserialize, Debug)]
enum FileState {
C(FileChecked),
U(FileUploading),
impl fmt::Debug for AppState {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("AppState")
.field("inner", &self.inner)
.finish_non_exhaustive()
}
}
impl FileState {
fn file_name(&self) -> &str {
match self {
FileState::C(checked) => checked.get_name(),
FileState::U(uploading) => uploading.get_name(),
}
}
fn start_upload(
self,
http: &ureq::Agent,
alias: &Alias,
share: &Share,
) -> io::Result<FileUploading> {
match self {
FileState::C(checked) => checked.start_upload(http, alias, share),
FileState::U(uploading) => Ok(uploading),
}
}
fn new_http(timeout: Option<Duration>) -> ureq::Agent {
ureq::Agent::config_builder()
.timeout_global(timeout)
.build()
.into()
}
impl AppState {
fn cache_dir() -> PathBuf {
let dir_name = dirs_next::cache_dir()
.expect("could not determine cache directory")
.join("shrupl");
trace!("cachedir: {:?}", dir_name.display());
dir_name
fn new(chunk_size: usize, http: ureq::Agent, inner: CacheFile) -> Self {
Self {
current_bar: None.into(),
buffer: vec![0; chunk_size * 1024 * 1024],
http,
inner,
}
fn cache_file(args: &Cli) -> PathBuf {
let file_name = Self::cache_dir().join(format!("{}.json", args.get_hash()));
trace!("cachefile: {:?}", file_name.display());
file_name
}
fn load(file_name: impl AsRef<Path>) -> io::Result<Self> {
let content = fs::read_to_string(file_name)?;
serde_json::from_str(&content).map_err(io::Error::other)
}
pub fn try_resume(args: &Cli) -> Option<Self> {
let file_name = Self::cache_file(args);
let inner = CacheFile::try_resume(args)
.inspect_err(|e| debug!("could not resume from hash {:?}: {e}", args.get_hash()))
.ok()?;
Self::load(&file_name)
.inspect_err(|e| debug!("could not resume from {:?}: {e}", file_name.display()))
.map(|state| {
debug!("successfully loaded AppState");
Self {
file_name,
progress: None,
alias: state.alias,
share: state.share,
files: state.files,
}
})
.ok()
Some(Self::new(
args.chunk_size,
new_http(args.get_timeout()),
inner,
))
}
pub fn from_args(args: &Cli, http: &ureq::Agent) -> Result<Self, ureq::Error> {
let file_name = Self::cache_file(args);
let alias = args.get_alias();
pub fn from_args(args: &Cli) -> sharry::Result<Self> {
let http = new_http(args.get_timeout());
let share = Share::create(http, &alias, args.get_share_request())?;
let share_id = http.share_create(
&args.get_uri().endpoint("alias/upload/new"),
&args.alias,
args.get_share_request(),
)?;
let files: VecDeque<_> = args.files.clone().into_iter().map(FileState::C).collect();
Ok(Self {
file_name,
progress: None,
alias,
share,
files,
})
Ok(Self::new(
args.chunk_size,
http,
CacheFile::from_args(args, share_id),
))
}
pub fn file_names(&self) -> Vec<&str> {
self.files.iter().map(FileState::file_name).collect()
}
pub fn upload_chunk(
&mut self,
http: &ureq::Agent,
chunk_size: usize,
) -> Result<Option<()>, UploadError> {
let uploading = if let Some(state) = self.files.pop_front() {
state.start_upload(http, &self.alias, &self.share).unwrap() // HACK unwrap
} else {
return Ok(None);
};
debug!("{uploading} chunk {chunk_size}");
// Initialize or fetch the existing ProgressBar in one call:
let bar = &*self.progress.get_or_insert_with(|| {
// Create a new bar with style
fn get_or_create_progressbar(&self, uploading: &file::Uploading) -> Ref<'_, ProgressBar> {
let mut slot = self.current_bar.borrow_mut();
if slot.is_none() {
let bar = ProgressBar::new(uploading.get_size())
.with_style(
ProgressStyle::with_template(&format!(
@ -143,51 +89,80 @@ impl AppState {
),
style("/").magenta(),
))
.unwrap(),
.unwrap(), // safe as long as the style template is valid
)
.with_message(uploading.get_name().to_owned())
.with_position(uploading.get_offset());
.with_position(uploading.get_offset())
.with_message(uploading.get_name().to_owned());
bar.enable_steady_tick(Duration::from_millis(100));
bar
});
*slot = Some(bar);
}
drop(slot);
match uploading.upload_chunk(http, &self.alias, chunk_size) {
ChunkState::Ok(upl) => {
bar.set_position(upl.get_offset());
self.files.push_front(FileState::U(upl));
// unwrap is safe: We just made sure it's `Some`.
Ref::map(self.current_bar.borrow(), |opt| opt.as_ref().unwrap())
}
fn finish_bar(&self) {
let mut slot = self.current_bar.borrow_mut();
if let Some(bar) = &*slot {
bar.finish();
*slot = None;
}
}
pub fn upload_chunk(&mut self) -> sharry::Result<Option<()>> {
let Some(mut uploading) = self.inner.pop_file(&self.http) else {
self.inner.share_notify(&self.http).unwrap(); // HACK unwrap
return Ok(None);
};
self.get_or_create_progressbar(&uploading);
debug!("{uploading} chunk {}", self.buffer.len());
let chunk = uploading
.read(&mut self.buffer)
.map_err(ClientError::from)?;
self.http.file_patch(
chunk.get_patch_uri(),
self.inner.alias_id(),
chunk.get_offset(),
chunk.get_data(),
)?;
match uploading.check_eof() {
Ok(uploading) => {
let bar = self.get_or_create_progressbar(&uploading);
bar.set_position(uploading.get_offset());
// BUG in `indicatif` crate?
// `set_position` does not force immediate redraw, so we also call `inc_length` here
bar.inc_length(0);
drop(bar);
self.inner.push_file(uploading);
Ok(Some(()))
}
ChunkState::Err(upl, e) => {
self.files.push_front(FileState::U(upl));
Err(e)
}
ChunkState::Finished(path) => {
Err(path) => {
debug!("Finished {:?}!", path.display());
bar.finish();
self.progress = None;
self.share.notify(http, &self.alias).unwrap(); // HACK unwrap
self.finish_bar();
Ok(self.files.front().map(drop))
Ok(self.inner.has_file().then_some(()))
}
}
}
pub fn file_names(&self) -> Vec<&str> {
self.inner.file_names()
}
pub fn save(&self) -> io::Result<()> {
fs::create_dir_all(Self::cache_dir())?;
let json = serde_json::to_string_pretty(self).map_err(io::Error::other)?;
let mut file = fs::File::create(&self.file_name)?;
file.write_all(json.as_bytes())?;
trace!("updated {:?}", self.file_name.display());
Ok(())
self.inner.save()
}
pub fn clear(self) -> io::Result<()> {
fs::remove_file(&self.file_name)?;
trace!("removed {:?}", self.file_name.display());
Ok(())
self.inner.clear()
}
}

149
src/cachefile.rs Normal file
View file

@ -0,0 +1,149 @@
use std::{
collections::VecDeque,
fs,
io::{self, Write},
path::PathBuf,
};
use log::trace;
use serde::{Deserialize, Serialize};
use super::{
cli::Cli,
file::{self, FileTrait},
sharry::{self, Client, Uri},
};
#[derive(Serialize, Deserialize, Debug)]
enum FileState {
C(file::Checked),
U(file::Uploading),
}
impl FileState {
fn file_name(&self) -> &str {
match self {
FileState::C(c) => c.get_name(),
FileState::U(u) => u.get_name(),
}
}
fn start_upload(
self,
http: &impl Client,
endpoint: &str,
alias_id: &str,
) -> sharry::Result<file::Uploading> {
match self {
FileState::C(checked) => checked.start_upload(http, endpoint, alias_id),
FileState::U(uploading) => Ok(uploading),
}
}
}
#[derive(Serialize, Deserialize, Debug)]
pub struct CacheFile {
#[serde(skip)]
file_name: PathBuf,
uri: Uri,
alias_id: String,
share_id: String,
files: VecDeque<FileState>,
}
impl CacheFile {
fn cache_dir() -> PathBuf {
let dir_name = dirs_next::cache_dir()
.expect("could not determine cache directory")
.join("shrupl");
trace!("cachedir: {:?}", dir_name.display());
dir_name
}
fn cache_file(args: &Cli) -> PathBuf {
let file_name = Self::cache_dir().join(format!("{}.json", args.get_hash()));
trace!("cachefile: {:?}", file_name.display());
file_name
}
pub fn try_resume(args: &Cli) -> io::Result<Self> {
let file_name = Self::cache_file(args);
let state: Self = {
let file = fs::File::open(&file_name)?;
let reader = io::BufReader::new(file);
serde_json::from_reader(reader).map_err(io::Error::other)?
};
Ok(Self { file_name, ..state })
}
pub fn from_args(args: &Cli, share_id: String) -> Self {
Self {
file_name: Self::cache_file(args),
uri: args.get_uri(),
alias_id: args.alias.clone(),
share_id,
files: args.files.clone().into_iter().map(FileState::C).collect(),
}
}
pub fn alias_id(&self) -> &str {
&self.alias_id
}
pub fn file_names(&self) -> Vec<&str> {
self.files.iter().map(FileState::file_name).collect()
}
pub fn has_file(&self) -> bool {
!self.files.is_empty()
}
pub fn pop_file(&mut self, http: &impl Client) -> Option<file::Uploading> {
if let Some(state) = self.files.pop_front() {
let endpoint = self
.uri
.endpoint(format!("alias/upload/{}/files/tus", self.share_id));
Some(state.start_upload(http, &endpoint, &self.alias_id).unwrap()) // HACK unwrap
} else {
None
}
}
pub fn push_file(&mut self, file: file::Uploading) {
self.files.push_front(FileState::U(file));
}
pub fn share_notify(&self, http: &impl Client) -> sharry::Result<()> {
let endpoint = self
.uri
.endpoint(format!("alias/mail/notify/{}", self.share_id));
http.share_notify(&endpoint, &self.alias_id)
}
pub fn save(&self) -> io::Result<()> {
let cache_dir = self.file_name.parent().ok_or_else(|| {
io::Error::other(format!("orphan file {:?}", self.file_name.display()))
})?;
fs::create_dir_all(cache_dir)?;
let json = serde_json::to_string_pretty(self).map_err(io::Error::other)?;
let mut file = fs::File::create(&self.file_name)?;
file.write_all(json.as_bytes())?;
trace!("updated {:?}", self.file_name.display());
Ok(())
}
pub fn clear(self) -> io::Result<()> {
fs::remove_file(&self.file_name)?;
trace!("removed {:?}", self.file_name.display());
Ok(())
}
}

View file

@ -1,13 +1,17 @@
use std::{
fmt,
hash::{DefaultHasher, Hash, Hasher},
time::Duration,
};
use clap::{Parser, builder::PossibleValuesParser};
use super::sharry::{Alias, FileChecked, NewShareRequest, Uri};
use super::{
file::Checked,
sharry::{NewShareRequest, Uri},
};
#[derive(Parser, Debug, Hash)]
#[derive(Parser, Hash)]
#[command(version, about, long_about = None)]
pub struct Cli {
/// Timeout in seconds for HTTP actions (set 0 or invalid to disable)
@ -46,19 +50,33 @@ pub struct Cli {
url: String,
/// ID of a public alias to use
alias: String,
pub alias: String,
/// Files to upload to the new share
#[arg(value_name = "FILE", required = true, value_parser = parse_sharry_file)]
pub files: Vec<FileChecked>,
pub files: Vec<Checked>,
}
impl fmt::Debug for Cli {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("Cli")
.field("uri", &self.get_uri())
.field("alias", &self.alias)
.field("timeout", &self.get_timeout())
.field("chunk_size", &self.chunk_size)
.field("share_request", &self.get_share_request())
.field("files", &self.files)
.field("hash", &self.get_hash())
.finish_non_exhaustive()
}
}
fn parse_seconds(data: &str) -> Result<Duration, String> {
data.parse().or(Ok(0)).map(Duration::from_secs)
}
fn parse_sharry_file(data: &str) -> Result<FileChecked, String> {
FileChecked::new(data).map_err(|e| e.to_string())
fn parse_sharry_file(data: &str) -> Result<Checked, String> {
Checked::new(data).map_err(|e| e.to_string())
}
impl Cli {
@ -66,8 +84,8 @@ impl Cli {
(!self.timeout.is_zero()).then_some(self.timeout)
}
pub fn get_alias(&self) -> Alias {
Alias::new(Uri::with_protocol(&self.protocol, &self.url), &self.alias)
pub fn get_uri(&self) -> Uri {
Uri::with_protocol(&self.protocol, &self.url)
}
pub fn get_share_request(&self) -> NewShareRequest {
@ -83,7 +101,7 @@ impl Cli {
};
let mut hasher = DefaultHasher::new();
(self.get_alias(), file_refs).hash(&mut hasher);
(self.get_uri(), &self.alias, file_refs).hash(&mut hasher);
format!("{:x}", hasher.finish())
}

57
src/file/checked.rs Normal file
View file

@ -0,0 +1,57 @@
use std::{
fs, io,
path::{Path, PathBuf},
};
use serde::{Deserialize, Serialize};
use crate::sharry;
use super::{FileTrait, Uploading};
#[derive(Debug, Clone, Hash, Serialize, Deserialize, PartialEq, Eq, PartialOrd, Ord)]
pub struct Checked {
path: PathBuf,
size: u64,
}
impl Checked {
pub fn new(value: impl AsRef<Path>) -> io::Result<Self> {
let meta = fs::metadata(&value)?;
if meta.is_file() {
Ok(Self {
path: fs::canonicalize(&value)?,
size: meta.len(),
})
} else {
Err(io::Error::new(
io::ErrorKind::InvalidInput,
"Not a regular file",
))
}
}
pub fn start_upload(
self,
client: &impl sharry::Client,
endpoint: &str,
alias_id: &str,
) -> sharry::Result<Uploading> {
let patch_uri = client.file_create(endpoint, alias_id, self.get_name(), self.size)?;
Ok(Uploading::new(self.path, self.size, patch_uri))
}
}
impl<'t> FileTrait<'t> for Checked {
/// get a reference to the file's name
///
/// Uses `SharryFile::extract_file_name`, which may **panic**!
fn get_name(&'t self) -> &'t str {
<Self as FileTrait>::extract_file_name(&self.path)
}
fn get_size(&self) -> u64 {
self.size
}
}

36
src/file/chunk.rs Normal file
View file

@ -0,0 +1,36 @@
pub struct Chunk<'t> {
data: &'t [u8],
patch_uri: &'t str,
offset: u64,
}
impl<'t> Chunk<'t> {
pub fn new(data: &'t [u8], patch_uri: &'t str, offset: u64) -> Self {
Self {
data,
patch_uri,
offset,
}
}
pub fn get_data(&self) -> &[u8] {
self.data
}
pub fn get_length(&self) -> u64 {
let len = self.data.len();
// BOOKMARK this might **panic** on platforms where `usize` has more than 64 bit.
// Also, you've allocated more than 2 EiB ... in ONE chunk.
// Whoa! Maybe just chill?
u64::try_from(len).unwrap_or_else(|e| panic!("usize={len} did not fit into u64: {e}"))
}
pub fn get_patch_uri(&self) -> &str {
self.patch_uri
}
pub fn get_offset(&self) -> u64 {
self.offset
}
}

View file

@ -1,17 +1,14 @@
mod checked;
mod chunk;
mod uploading;
use std::{
ffi::OsStr,
path::{Path, PathBuf},
};
use std::{ffi::OsStr, path::Path};
pub use checked::FileChecked;
pub use uploading::{ChunkState, FileUploading, UploadError};
pub use checked::Checked;
pub use chunk::Chunk;
pub use uploading::Uploading;
use super::{Alias, Share, alias::SharryAlias};
pub trait SharryFile<'t> {
pub trait FileTrait<'t> {
/// extract the filename part of a `Path` reference
///
/// # Panics
@ -23,8 +20,6 @@ pub trait SharryFile<'t> {
.expect("bad file name")
}
fn into_path(self) -> PathBuf;
fn get_name(&'t self) -> &'t str;
fn get_size(&self) -> u64;

84
src/file/uploading.rs Normal file
View file

@ -0,0 +1,84 @@
use std::{
fmt, fs,
io::{self, Read, Seek, SeekFrom},
path::PathBuf,
};
use serde::{Deserialize, Serialize};
use super::{Chunk, FileTrait};
#[derive(Serialize, Deserialize, Debug)]
pub struct Uploading {
path: PathBuf,
size: u64,
patch_uri: String,
offset: u64,
}
impl fmt::Display for Uploading {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(
f,
"Uploading {:?} ({}/{})",
self.path.display(),
self.offset,
self.size
)
}
}
impl Uploading {
pub(super) fn new(path: PathBuf, size: u64, patch_uri: String) -> Self {
Self {
path,
size,
patch_uri,
offset: 0,
}
}
pub fn get_offset(&self) -> u64 {
self.offset
}
pub fn read<'t>(&'t mut self, buf: &'t mut [u8]) -> io::Result<Chunk<'t>> {
let mut f = fs::File::open(&self.path)?;
f.seek(SeekFrom::Start(self.offset))?;
let read_len = f.read(buf)?;
if read_len == 0 {
return Err(io::Error::new(
io::ErrorKind::UnexpectedEof,
format!("could not read from file {:?}", self.path.display()),
));
}
let chunk = Chunk::new(&buf[..read_len], &self.patch_uri, self.offset);
self.offset += chunk.get_length();
Ok(chunk)
}
pub fn check_eof(self) -> Result<Self, PathBuf> {
if self.offset < self.size {
Ok(self)
} else {
Err(self.path)
}
}
}
impl<'t> FileTrait<'t> for Uploading {
/// get a reference to the file's name
///
/// Uses `SharryFile::extract_file_name`, which may **panic**!
fn get_name(&'t self) -> &'t str {
<Self as FileTrait>::extract_file_name(&self.path)
}
fn get_size(&self) -> u64 {
self.size
}
}

View file

@ -1,9 +1,11 @@
mod appstate;
mod cachefile;
mod cli;
mod file;
mod sharry;
use std::{
process::exit,
process,
sync::{
Arc,
atomic::{AtomicBool, Ordering},
@ -14,37 +16,65 @@ use clap::Parser;
use console::style;
use dialoguer::{Confirm, theme::ColorfulTheme};
use log::{error, info};
use ureq::Agent;
use appstate::AppState;
use cli::Cli;
use sharry::ClientError;
fn print_error(e: &ClientError) {
if let Some(cause) = match e {
// known errors
ClientError::ResponseStatus {
actual: 403,
expected: _,
} => Some("Alias ID"),
ClientError::StdIo(_) => Some("URL"),
// unknown error
_ => None,
} {
// handle known error
info!("known error: {e:?}");
println!(
"{} probably wrong: {}",
style("Error!").red().bold(),
style(cause).cyan(),
);
println!("{}", style(e.to_string()).yellow().italic());
} else {
// handle unknown error
error!("unknown error: {e} ({e:?})");
println!("{}", style("Unknown Error!").red().bold());
}
}
fn main() {
env_logger::init();
println!(
"{} to {}!",
style("Welcome").magenta().bold(),
style("ShrUpl").yellow().bold(),
);
let check_ctrlc = {
let stop = Arc::new(AtomicBool::new(false));
let stop_ctrlc = stop.clone();
ctrlc::set_handler(move || {
stop_ctrlc.store(true, Ordering::SeqCst);
info!("stopping as soon as possible ...");
})
.expect("Error setting Ctrl-C handler");
env_logger::init();
move || {
if stop.load(Ordering::SeqCst) {
process::exit(255);
}
}
};
let args = Cli::parse();
info!("args: {args:?}");
info!("timeout: {:?}", args.get_timeout());
let agent: Agent = Agent::config_builder()
.timeout_global(args.get_timeout())
.build()
.into();
let mut state = AppState::try_resume(&args)
.and_then(|state| {
@ -55,46 +85,30 @@ fn main() {
.map_or(None, |b| b.then_some(state))
})
.unwrap_or_else(|| {
stop.load(Ordering::SeqCst).then(|| exit(0));
check_ctrlc();
match AppState::from_args(&args, &agent) {
match AppState::from_args(&args) {
Ok(state) => {
state.save().unwrap(); // HACK unwrap
state
}
Err(e) => {
if let Some(cause) = match e {
ureq::Error::StatusCode(403) => Some("Alias ID"),
ureq::Error::Io(_) => Some("URL"),
_ => None,
} {
info!("handling error: {e:?}");
println!(
"{} probably wrong: {} {:?}",
style("Error!").red().bold(),
style(cause).cyan().italic(),
style(e.to_string()).yellow()
);
} else {
error!("unknown error: {e} {e:?}");
println!("{}", style("Unknown Error!").red().bold());
}
exit(1);
print_error(&e);
process::exit(1);
}
}
});
info!("continuing with state: {state:?}");
println!(
"{} uploading: {}",
style("ShrUpl").yellow().bold(),
style(state.file_names().join(", ")).magenta(),
);
info!("continuing with state: {state:?}");
loop {
match state.upload_chunk(&agent, args.chunk_size * 1024 * 1024) {
match state.upload_chunk() {
Err(e) => error!("error: {e:?}"),
Ok(None) => {
info!("all uploads done");
@ -105,6 +119,6 @@ fn main() {
}
state.save().unwrap(); // HACK unwrap
stop.load(Ordering::SeqCst).then(|| exit(0));
check_ctrlc();
}
}

View file

@ -1,36 +0,0 @@
use std::fmt::{Debug, Display};
use log::debug;
use serde::{Deserialize, Serialize};
use ureq::RequestBuilder;
use super::api::Uri;
#[derive(Serialize, Deserialize, Debug, Hash)]
pub struct Alias {
pub(super) uri: Uri,
pub(super) id: String,
}
pub(super) trait SharryAlias {
fn sharry_header(self, alias: &Alias) -> Self;
}
impl<B> SharryAlias for RequestBuilder<B> {
fn sharry_header(self, alias: &Alias) -> Self {
self.header("Sharry-Alias", &alias.id)
}
}
impl Alias {
pub fn new(uri: Uri, id: impl Into<String>) -> Self {
Self { uri, id: id.into() }
}
pub(super) fn get_endpoint(&self, endpoint: impl Display + Debug) -> String {
let uri = format!("{}/{}", self.uri, endpoint);
debug!("endpoint uri: {uri:?}");
uri
}
}

View file

@ -10,19 +10,19 @@ pub struct Uri {
}
impl Uri {
pub(super) fn get_endpoint(&self, endpoint: impl fmt::Display + fmt::Debug) -> String {
let uri = format!("{}/{}", self, endpoint);
debug!("endpoint uri: {uri:?}");
uri
}
pub fn with_protocol(protocol: impl Into<String>, base_url: impl Into<String>) -> Self {
Self {
protocol: protocol.into(),
base_url: base_url.into(),
}
}
pub fn endpoint(&self, endpoint: impl fmt::Display) -> String {
let uri = format!("{self}/{endpoint}");
debug!("endpoint: {uri:?}");
uri
}
}
impl fmt::Display for Uri {
@ -31,7 +31,7 @@ impl fmt::Display for Uri {
}
}
#[derive(Serialize)]
#[derive(Serialize, Debug)]
#[allow(non_snake_case)]
pub struct NewShareRequest {
name: String,

View file

@ -1,79 +1,106 @@
use std::{error::Error, io};
use std::fmt;
use log::debug;
use log::{debug, trace};
use thiserror::Error;
use super::{
api::{NewShareRequest, NewShareResponse, NotifyShareResponse, Uri},
file::{FileChecked, FileUploading, SharryFile},
};
use super::api::{NewShareRequest, NewShareResponse, NotifyShareResponse};
pub type Result<T> = std::result::Result<T, ClientError>;
pub trait Client {
fn sharry_share_create(
&self,
uri: &Uri,
alias_id: &str,
data: NewShareRequest,
) -> Result<String, ClientError>;
fn share_create(&self, endpoint: &str, alias_id: &str, data: NewShareRequest)
-> Result<String>;
fn sharry_share_notify(
&self,
uri: &Uri,
alias_id: &str,
share_id: &str,
) -> Result<(), ClientError>;
fn share_notify(&self, endpoint: &str, alias_id: &str) -> Result<()>;
fn sharry_file_create(
fn file_create(
&self,
uri: &Uri,
endpoint: &str,
alias_id: &str,
share_id: &str,
file: FileChecked,
) -> Result<FileUploading, ClientError>;
file_name: &str,
file_size: u64,
) -> Result<String>;
// fn sharry_file_patch(&self);
fn file_patch(&self, endpoint: &str, alias_id: &str, offset: u64, chunk: &[u8]) -> Result<()>;
}
#[derive(Debug, thiserror::Error)]
#[derive(Debug, Error)]
pub enum ClientError {
#[error("file I/O error: {0}")]
FileIO(#[from] io::Error),
#[error(transparent)]
StdIo(#[from] std::io::Error),
#[error("network request failed: {0}")]
Request(String),
#[error("response parsing failed: {0}")]
ResponseParsing(String),
#[error("unexpected response status: {actual} (expected {expected})")]
ResponseStatus { actual: u16, expected: u16 },
#[error("response parsing failed: {0}")]
ResponseParsing(String),
#[error("unexpected response content: {0}")]
ResponseContent(String),
//
// #[error("could not parse offset header")]
// ResponseOffset,
}
impl ClientError {
pub fn req_err(msg: impl fmt::Display) -> Self {
Self::Request(msg.to_string())
}
pub fn res_parse_err(msg: impl fmt::Display) -> Self {
Self::ResponseParsing(msg.to_string())
}
pub fn res_status_check<T>(actual: T, expected: T) -> Result<()>
where
T: PartialEq + Into<u16> + Copy,
{
if actual == expected {
Ok(())
} else {
Err(Self::ResponseStatus {
actual: actual.into(),
expected: expected.into(),
})
}
}
}
impl From<ureq::Error> for ClientError {
fn from(value: ureq::Error) -> Self {
match value {
ureq::Error::StatusCode(status) => Self::ResponseStatus {
actual: status,
expected: 200,
},
ureq::Error::Io(e) => e.into(),
error => Self::req_err(error),
}
}
}
impl Client for ureq::Agent {
fn sharry_share_create(
fn share_create(
&self,
uri: &Uri,
endpoint: &str,
alias_id: &str,
data: NewShareRequest,
) -> Result<String, ClientError> {
let res = {
let endpoint = uri.get_endpoint("alias/upload/new");
self.post(endpoint)
) -> Result<String> {
let mut res = self
.post(endpoint)
.header("Sharry-Alias", alias_id)
.send_json(data)
.map_err(|e| ClientError::Request(e.to_string()))?
.map_err(ClientError::from)?;
trace!("{endpoint:?} response: {res:?}");
ClientError::res_status_check(res.status(), ureq::http::StatusCode::OK)?;
let res = res
.body_mut()
.read_json::<NewShareResponse>()
.map_err(|e| ClientError::ResponseParsing(e.to_string()))?
};
.map_err(ClientError::res_parse_err)?;
debug!("response: {res:?}");
debug!("{res:?}");
if res.success && (res.message == "Share created.") {
Ok(res.id)
@ -82,64 +109,84 @@ impl Client for ureq::Agent {
}
}
fn sharry_share_notify(
&self,
uri: &Uri,
alias_id: &str,
share_id: &str,
) -> Result<(), ClientError> {
let res = {
let endpoint = uri.get_endpoint(format!("alias/mail/notify/{}", share_id));
self.post(endpoint)
fn share_notify(&self, endpoint: &str, alias_id: &str) -> Result<()> {
let mut res = self
.post(endpoint)
.header("Sharry-Alias", alias_id)
.send_empty()
.map_err(|e| ClientError::Request(e.to_string()))?
.map_err(ClientError::from)?;
trace!("{endpoint:?} response: {res:?}");
ClientError::res_status_check(res.status(), ureq::http::StatusCode::OK)?;
let res = res
.body_mut()
.read_json::<NotifyShareResponse>()
.map_err(|e| ClientError::ResponseParsing(e.to_string()))?
};
.map_err(ClientError::res_parse_err)?;
debug!("response: {res:?}");
debug!("{res:?}");
Ok(())
}
fn sharry_file_create(
fn file_create(
&self,
uri: &Uri,
endpoint: &str,
alias_id: &str,
share_id: &str,
file: FileChecked,
) -> Result<FileUploading, ClientError> {
let size = file.get_size();
let res = {
let endpoint = uri.get_endpoint(format!("alias/upload/{}/files/tus", share_id));
self.post(endpoint)
file_name: &str,
file_size: u64,
) -> Result<String> {
let res = self
.post(endpoint)
.header("Sharry-Alias", alias_id)
.header("Sharry-File-Name", file.get_name())
.header("Upload-Length", size)
.header("Sharry-File-Name", file_name)
.header("Upload-Length", file_size)
.send_empty()
.map_err(|e| ClientError::Request(e.to_string()))?
};
.map_err(ClientError::from)?;
if res.status() != ureq::http::StatusCode::CREATED {
return Err(ClientError::ResponseStatus {
actual: res.status().as_u16(),
expected: ureq::http::StatusCode::CREATED.as_u16(),
});
}
trace!("{endpoint:?} response: {res:?}");
ClientError::res_status_check(res.status(), ureq::http::StatusCode::CREATED)?;
let location = (res.headers().get("Location"))
.ok_or_else(|| ClientError::ResponseParsing("Location header not found".to_owned()))?
.ok_or_else(|| ClientError::res_parse_err("Location header not found"))?
.to_str()
.map_err(|_| ClientError::ResponseParsing("Location header invalid".to_owned()))?
.map_err(ClientError::res_parse_err)?
.to_string();
debug!("patch uri: {location}");
debug!("{location:?}");
Ok(FileUploading::new(file.into_path(), size, location))
Ok(location)
}
fn file_patch(&self, endpoint: &str, alias_id: &str, offset: u64, chunk: &[u8]) -> Result<()> {
let res = self
.patch(endpoint)
.header("Sharry-Alias", alias_id)
.header("Upload-Offset", offset)
.send(chunk)
.map_err(ClientError::from)?;
trace!("{endpoint:?} response: {res:?}");
ClientError::res_status_check(res.status(), ureq::http::StatusCode::NO_CONTENT)?;
let res_offset = (res.headers().get("Upload-Offset"))
.ok_or_else(|| ClientError::res_parse_err("Upload-Offset header not found"))?
.to_str()
.map_err(ClientError::res_parse_err)?
.parse::<u64>()
.map_err(ClientError::res_parse_err)?;
// get chunk length as `u64` (we have checked while reading the chunk!)
let chunk_len = u64::try_from(chunk.len()).expect("something's VERY wrong");
if offset + chunk_len == res_offset {
Ok(())
} else {
Err(ClientError::ResponseContent(format!(
"Unexpected Upload-Offset: {} (expected {})",
res_offset,
offset + chunk_len
)))
}
}
}

View file

@ -1,83 +0,0 @@
use std::{
ffi::OsStr,
fs, io,
path::{Path, PathBuf},
};
use log::debug;
use serde::{Deserialize, Serialize};
use ureq::http::{HeaderValue, StatusCode};
use super::{Alias, FileUploading, Share, SharryAlias, SharryFile};
#[derive(Debug, Clone, Hash, Serialize, Deserialize, PartialEq, Eq, PartialOrd, Ord)]
pub struct FileChecked {
path: PathBuf,
}
impl FileChecked {
pub fn new(value: impl AsRef<Path>) -> io::Result<Self> {
let meta = fs::metadata(&value)?;
if meta.is_file() {
Ok(Self {
path: fs::canonicalize(&value)?,
})
} else {
Err(io::Error::new(
io::ErrorKind::InvalidInput,
"Not a regular file",
))
}
}
pub fn start_upload(
self,
http: &ureq::Agent,
alias: &Alias,
share: &Share,
) -> io::Result<FileUploading> {
let size = self.get_size();
let res = {
let endpoint = alias.get_endpoint(format!("alias/upload/{}/files/tus", share.id));
(http.post(endpoint))
.sharry_header(alias)
.header("Sharry-File-Name", self.get_name())
.header("Upload-Length", size)
.send_empty()
.map_err(ureq::Error::into_io)?
};
if res.status() != StatusCode::CREATED {
return Err(io::Error::other("unexpected response status"));
}
let location = (res.headers().get("Location"))
.ok_or_else(|| io::Error::other("Location header not found"))?
.to_str()
.map_err(|_| io::Error::other("Location header invalid"))?
.to_string();
debug!("patch uri: {location}");
Ok(FileUploading::new(self.path, size, location))
}
}
impl<'t> SharryFile<'t> for FileChecked {
fn into_path(self) -> PathBuf {
self.path
}
/// get a reference to the file's name
///
/// Uses `SharryFile::extract_file_name`, which may **panic**!
fn get_name(&'t self) -> &'t str {
<Self as SharryFile>::extract_file_name(&self.path)
}
fn get_size(&self) -> u64 {
fs::metadata(&self.path).unwrap().len()
}
}

View file

@ -1,149 +0,0 @@
use std::{
ffi::OsStr,
fmt, fs,
io::{self, Read, Seek, SeekFrom},
path::PathBuf,
};
use log::debug;
use serde::{Deserialize, Serialize};
use ureq::http::{HeaderValue, StatusCode};
use super::{Alias, SharryAlias, SharryFile};
#[derive(Serialize, Deserialize, Debug)]
pub struct FileUploading {
path: PathBuf,
size: u64,
uri: String,
offset: u64,
}
#[derive(Debug, thiserror::Error)]
pub enum UploadError {
#[error("file I/O error: {0}")]
FileIO(#[from] io::Error),
#[error("network request failed")]
Request,
#[error("unexpected response status")]
ResponseStatus,
#[error("could not parse offset header")]
ResponseOffset,
// #[error("chunk length conversion failed: {0}")]
// InvalidChunkLength(String),
// #[error("offset mismatch")]
// ResponseOffsetMismatch,
}
pub enum ChunkState {
Ok(FileUploading),
Err(FileUploading, UploadError),
Finished(PathBuf),
}
impl fmt::Display for FileUploading {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(
f,
"Uploading {:?} ({}/{})",
self.path.display(),
self.offset,
self.size
)
}
}
impl FileUploading {
pub fn new(path: PathBuf, size: u64, uri: String) -> Self {
Self {
path,
size,
uri,
offset: 0,
}
}
pub fn get_offset(&self) -> u64 {
self.offset
}
fn read_chunk(&self, chunk_size: usize) -> io::Result<Vec<u8>> {
let mut f = fs::File::open(&self.path)?;
f.seek(SeekFrom::Start(self.offset))?;
let mut bytes = vec![0; chunk_size];
let read_len = f.read(&mut bytes)?;
bytes.truncate(read_len);
Ok(bytes)
}
pub fn upload_chunk(
mut self,
http: &ureq::Agent,
alias: &Alias,
chunk_size: usize,
) -> ChunkState {
let chunk = match self.read_chunk(chunk_size) {
Err(e) => return ChunkState::Err(self, UploadError::FileIO(e)),
Ok(value) => value,
};
let Ok(res) = (http.patch(&self.uri))
.sharry_header(alias)
.header("Upload-Offset", self.offset)
.send(&chunk)
else {
return ChunkState::Err(self, UploadError::Request);
};
if res.status() != StatusCode::NO_CONTENT {
return ChunkState::Err(self, UploadError::ResponseStatus);
}
let Some(Ok(Ok(res_offset))) = (res.headers().get("Upload-Offset"))
.map(HeaderValue::to_str)
.map(|v| v.map(str::parse::<u64>))
else {
return ChunkState::Err(self, UploadError::ResponseOffset);
};
// convert chunk.len() into an `u64`
//
// BOOKMARK this might panic on platforms where `usize` > 64 bit.
// Also whoa, you've sent more than 2 EiB ... in ONE chunk.
// Maybe just chill?
let chunk_len = u64::try_from(chunk.len())
.unwrap_or_else(|e| panic!("usize={} did not fit into u64: {}", chunk.len(), e));
if self.offset + chunk_len != res_offset {
return ChunkState::Err(self, UploadError::ResponseOffset);
}
self.offset = res_offset;
if self.offset == self.size {
return ChunkState::Finished(self.path);
}
ChunkState::Ok(self)
}
}
impl<'t> SharryFile<'t> for FileUploading {
fn into_path(self) -> PathBuf {
self.path
}
fn get_name(&'t self) -> &'t str {
<Self as SharryFile>::extract_file_name(&self.path)
}
fn get_size(&self) -> u64 {
self.size
}
}

View file

@ -1,13 +1,5 @@
#![allow(unused_imports)]
mod alias;
mod api;
mod client;
mod file;
mod share;
pub use alias::Alias;
pub use api::{NewShareRequest, Uri};
// pub use client::SharryClient;
pub use file::{ChunkState, FileChecked, FileUploading, SharryFile, UploadError};
pub use share::Share;
pub use client::{Client, ClientError, Result};

View file

@ -1,48 +0,0 @@
use log::debug;
use serde::{Deserialize, Serialize};
use super::{
alias::{Alias, SharryAlias},
api::{NewShareRequest, NewShareResponse, NotifyShareResponse},
};
#[derive(Serialize, Deserialize, Debug)]
pub struct Share {
pub(super) id: String,
}
impl Share {
pub fn create(
http: &ureq::Agent,
alias: &Alias,
data: NewShareRequest,
) -> Result<Self, ureq::Error> {
let res = (http.post(alias.get_endpoint("alias/upload/new")))
.sharry_header(alias)
.send_json(data)?
.body_mut()
.read_json::<NewShareResponse>()?;
debug!("response: {res:?}");
if !(res.success && (res.message == "Share created.")) {
return Err(ureq::Error::Other("unexpected json response".into()));
}
Ok(Self { id: res.id })
}
pub fn notify(&self, http: &ureq::Agent, alias: &Alias) -> Result<(), ureq::Error> {
let endpoint = alias.get_endpoint(format!("alias/mail/notify/{}", self.id));
let res = (http.post(endpoint))
.sharry_header(alias)
.send_empty()?
.body_mut()
.read_json::<NotifyShareResponse>()?;
debug!("response: {res:?}");
Ok(())
}
}