[wip] documentation and minor refactoring

This commit is contained in:
Jörn-Michael Miehe 2025-07-03 22:46:19 +00:00
parent 6ba17d57df
commit a34aa968b4
7 changed files with 76 additions and 44 deletions

2
.vscode/tasks.json vendored
View file

@ -70,7 +70,7 @@
"command": "echo All Tests successful!",
"dependsOn": [
"Run Unit Tests",
"Run Integration Tests"
// "Run Integration Tests"
],
"dependsOrder": "sequence",
"group": "test"

View file

@ -181,7 +181,7 @@ impl CacheFile {
.take()
.expect("abort_upload called while not uploading");
self.files.push_front(upl.abort());
self.files.push_front(upl.into());
}
pub fn share_notify(&self, client: &impl Client) -> crate::Result<()> {

View file

@ -18,11 +18,11 @@ use super::{FileTrait, Uploading};
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, PartialOrd, Ord)]
pub struct Checked {
/// canonical path to a regular file
pub(super) path: PathBuf,
path: PathBuf,
/// size of that file
pub(super) size: u64,
size: u64,
/// hash of that file
pub(super) hash: Option<String>,
hash: Option<String>,
}
impl AsRef<[u8]> for Checked {
@ -32,6 +32,10 @@ impl AsRef<[u8]> for Checked {
}
impl Checked {
pub(super) fn new_direct(path: PathBuf, size: u64, hash: Option<String>) -> Self {
Self { path, size, hash }
}
/// create a new checked file from some path reference
///
/// # Errors
@ -60,6 +64,8 @@ impl Checked {
///
/// - from `file::compute_hash`
/// - Mismatch if file already hashed
///
/// TODO this could use an error variant like `IllegalInvocation`
pub fn hash(&mut self, f: impl Fn(u64)) -> crate::Result<()> {
if self.hash.is_some() {
return Err(crate::Error::mismatch("unhashed file", self.path.display()));
@ -70,10 +76,10 @@ impl Checked {
Ok(())
}
/// starts uploading this file
/// start uploading this file
///
/// - tries to create a new file using the client
/// - consumes `self` into a `file::Uploading` struct
/// - try to create a new file using the client
/// - consume `self` into a `file::Uploading` struct
///
/// # Errors
///
@ -87,7 +93,9 @@ impl Checked {
) -> crate::Result<Uploading> {
let file_id = client.file_create(uri, alias_id, share_id, &self)?;
Ok(Uploading::new(self.path, self.size, self.hash, file_id))
Ok(Uploading::new_direct(
self.path, self.size, self.hash, file_id,
))
}
}
@ -101,11 +109,6 @@ impl FileTrait for Checked {
}
fn check_hash(&self, on_progress: impl FnMut(u64)) -> crate::Result<()> {
super::check_hash(
&self.path,
self.size,
self.hash.as_deref(),
on_progress,
)
super::check_hash(&self.path, self.size, self.hash.as_deref(), on_progress)
}
}

View file

@ -19,7 +19,7 @@ impl fmt::Debug for Chunk<'_> {
}
impl<'t> Chunk<'t> {
pub fn new(file_id: sharry::FileID, offset: u64, data: &'t [u8]) -> Self {
pub(super) fn new_direct(file_id: sharry::FileID, offset: u64, data: &'t [u8]) -> Self {
Self {
file_id,
offset,
@ -39,7 +39,7 @@ impl<'t> Chunk<'t> {
self.data
}
pub fn get_length(&self) -> u64 {
pub(super) fn get_length(&self) -> u64 {
let len = self.data.len();
// BOOKMARK this might **panic** on platforms where `usize` has more than 64 bit.

View file

@ -26,7 +26,7 @@ const HASH_CHUNK_SIZE: usize = 4 * 1024 * 1024;
///
/// # Errors
///
/// - from `fs::File::open` and `fs::File::read`
/// - from `fs::File::{open, read}`
/// - Mismatch if given `size` does not match the file's size
fn compute_hash(path: &Path, size: u64, mut on_progress: impl FnMut(u64)) -> crate::Result<String> {
let mut file = fs::File::open(path)?;
@ -63,10 +63,8 @@ fn compute_hash(path: &Path, size: u64, mut on_progress: impl FnMut(u64)) -> cra
///
/// # Params
///
/// - `path` to the file to hash
/// - `size` of that file
/// - optional known `hash`
/// - `on_progress` will be called for each processed chunk (max. `HASH_CHUNK_SIZE`)
/// - everything from `compute_hash`
/// - optionally, known `hash`
///
/// # Errors
///

View file

@ -11,6 +11,10 @@ use crate::sharry;
use super::{Checked, Chunk, FileTrait};
/// Description of a `file::Checked` that is actively being uploaded
///
/// - impl `serde` for cachefile handling
/// - impl `Into<Checked>` to abort an upload
#[derive(Serialize, Deserialize, Debug)]
pub struct Uploading {
/// canonical path to a regular file
@ -19,14 +23,23 @@ pub struct Uploading {
size: u64,
/// hash of that file
hash: Option<String>,
/// file ID in a Sharry share
file_id: sharry::FileID,
/// previous offset, if applicable
#[serde(skip)]
last_offset: Option<u64>,
previous_offset: Option<u64>,
/// current reading offset
offset: u64,
}
impl From<Uploading> for Checked {
fn from(value: Uploading) -> Self {
Self::new_direct(value.path, value.size, value.hash)
}
}
impl Uploading {
pub(super) fn new(
pub(super) fn new_direct(
path: PathBuf,
size: u64,
hash: Option<String>,
@ -37,19 +50,26 @@ impl Uploading {
size,
hash,
file_id,
last_offset: None,
previous_offset: None,
offset: 0,
}
}
/// get the current reading offset
pub fn get_offset(&self) -> u64 {
self.offset
}
/// rewind to the previously read chunk
///
/// - consume self, returning Some(self) on success
///
/// TODO this should take &mut self and return `crate::Result<()>`
/// TODO this could use an error variant like `IllegalInvocation`
pub fn rewind(mut self) -> Option<Self> {
if let Some(last_offset) = self.last_offset {
self.last_offset = None;
self.offset = last_offset;
if let Some(offset) = self.previous_offset.take() {
self.offset = offset;
Some(self)
} else {
warn!("attempted to rewind twice");
@ -57,6 +77,15 @@ impl Uploading {
}
}
/// read the next chunk
///
/// # Errors
///
/// - from `fs::File::{open, seek, read}`
/// - `UnexpectedEof` if `read` returned no bytes
///
/// TODO this should return `crate::Result<Chunk<'t>>`
/// TODO this could use an error variant like `IllegalInvocation`
pub fn read<'t>(&mut self, buf: &'t mut [u8]) -> io::Result<Chunk<'t>> {
let mut f = fs::File::open(&self.path)?;
@ -70,13 +99,18 @@ impl Uploading {
));
}
let chunk = Chunk::new(self.file_id.clone(), self.offset, &buf[..read_len]);
self.last_offset = Some(self.offset);
let chunk = Chunk::new_direct(self.file_id.clone(), self.offset, &buf[..read_len]);
self.previous_offset = Some(self.offset);
self.offset += chunk.get_length();
Ok(chunk)
}
/// check if this file has been completely read
///
/// - consume self, returning Ok(self) if EOF not reached, Err(PathBuf) otherwise
///
/// TODO factor this into `read` and something more explicit like `finish(self) -> PathBuf`
pub fn check_eof(self) -> Result<Self, PathBuf> {
if self.offset < self.size {
Ok(self)
@ -84,14 +118,6 @@ impl Uploading {
Err(self.path)
}
}
pub fn abort(self) -> Checked {
Checked {
path: self.path,
size: self.size,
hash: self.hash,
}
}
}
impl FileTrait for Uploading {
@ -104,11 +130,6 @@ impl FileTrait for Uploading {
}
fn check_hash(&self, on_progress: impl FnMut(u64)) -> crate::Result<()> {
super::check_hash(
&self.path,
self.size,
self.hash.as_deref(),
on_progress,
)
super::check_hash(&self.path, self.size, self.hash.as_deref(), on_progress)
}
}

View file

@ -17,6 +17,16 @@ pub trait Client {
fn share_notify(&self, uri: &Uri, alias_id: &AliasID, share_id: &ShareID) -> crate::Result<()>;
/// create a new file in a Sharry share
///
/// - try to use endpoint from `Uri::file_create`
/// - try to extract `FileID` from the response
/// - return the new `FileID`
///
/// # Errors from
///
/// - request to endpoint
/// - parsing the response
fn file_create(
&self,
uri: &Uri,