shrupl/src/cachefile.rs

223 lines
6.2 KiB
Rust

use std::{
collections::VecDeque,
fs,
io::{self, Write},
path::{Path, PathBuf},
time::Duration,
};
use indicatif::ProgressBar;
use log::{info, trace};
use serde::{Deserialize, Serialize};
use crate::{
cli::Cli,
file::{self, Chunk, FileTrait},
output::new_progressbar,
sharry::{AliasID, Client, ShareID, Uri},
};
#[derive(Serialize, Deserialize, Debug)]
pub struct CacheFile {
#[serde(skip)]
file_name: PathBuf,
uri: Uri,
alias_id: AliasID,
share_id: ShareID,
uploading: Option<file::Uploading>,
files: VecDeque<file::Checked>,
}
impl CacheFile {
fn cache_dir() -> PathBuf {
let dir_name = dirs_next::cache_dir()
.expect("could not determine cache directory")
.join("shrupl");
trace!("cachedir: {:?}", dir_name.display());
dir_name
}
fn cache_file(args: &Cli) -> PathBuf {
let file_name = Self::cache_dir().join(format!("{}.json", args.get_hash()));
trace!("cachefile: {:?}", file_name.display());
file_name
}
pub fn try_resume(args: &Cli) -> crate::Result<Self> {
let file_name = Self::cache_file(args);
let state: Self = {
let file = fs::File::open(&file_name)?;
let reader = io::BufReader::new(file);
serde_json::from_reader(reader).map_err(io::Error::other)?
};
if args.should_hash() {
fn check_hash(file: &impl FileTrait, bar: &ProgressBar) -> crate::Result<()> {
bar.set_message(format!("checking {:?}", file.get_name()));
file.check_hash(|bytes| bar.inc(bytes))
}
info!("checking files in {state:?}");
// BOOKMARK assumption: total file size < 2 EiB
let total_size = {
let upl_size = if let Some(upl) = state.peek_uploading() {
upl.get_size()
} else {
0
};
upl_size + state.queue().iter().map(|&f| f.get_size()).sum::<u64>()
};
let bar = new_progressbar();
bar.set_length(total_size);
bar.enable_steady_tick(Duration::from_millis(50));
if let Some(upl) = state.peek_uploading() {
check_hash(upl, &bar)?;
}
for chk in state.queue() {
check_hash(chk, &bar)?;
}
bar.finish_with_message("finished checking files");
}
Ok(Self { file_name, ..state })
}
pub fn from_args(
args: &Cli,
new_share: impl FnOnce(&Cli) -> crate::Result<ShareID>,
) -> crate::Result<Self> {
let mut files = args.files.clone();
if args.should_hash() {
info!("hashing files {files:?}");
let bar = new_progressbar();
// BOOKMARK assumption: total file size < 2 EiB
bar.set_length(files.iter().map(FileTrait::get_size).sum());
bar.enable_steady_tick(Duration::from_millis(50));
for chk in &mut files {
bar.set_message(format!("hashing {:?}", chk.get_name()));
chk.hash(|bytes| bar.inc(bytes))?;
}
bar.finish_with_message("finished hashing files");
}
Ok(Self {
file_name: Self::cache_file(args),
uri: args.get_uri(),
alias_id: args.alias.clone(),
share_id: new_share(args)?,
uploading: None,
files: files.into(),
})
}
pub fn queue(&self) -> Vec<&file::Checked> {
self.files.iter().collect()
}
pub fn get_uploading(
&mut self,
client: &impl Client,
) -> crate::Result<Option<&mut file::Uploading>> {
if self.uploading.is_some() {
Ok(self.uploading.as_mut())
} else if let Some(chk) = self.files.pop_front() {
let upl = chk.start_upload(client, &self.uri, &self.alias_id, &self.share_id)?;
self.uploading.replace(upl);
Ok(self.uploading.as_mut())
} else {
Ok(None)
}
}
pub fn expect_uploading(&mut self) -> &mut file::Uploading {
self.uploading
.as_mut()
.expect("expect_uploading called while not uploading")
}
pub fn peek_uploading(&self) -> Option<&file::Uploading> {
self.uploading.as_ref()
}
pub fn check_eof(&mut self) -> Option<PathBuf> {
if let Some(upl) = self.uploading.take() {
match upl.check_eof() {
Ok(upl) => self.uploading = Some(upl),
Err(p) => return Some(p),
}
}
None
}
pub fn rewind_chunk(mut self) -> Option<Self> {
let upl = self
.uploading
.take()
.expect("rewind_chunk called while not uploading");
self.uploading = Some(upl.rewind()?);
Some(self)
}
pub fn abort_upload(&mut self) {
let upl = self
.uploading
.take()
.expect("abort_upload called while not uploading");
self.files.push_front(upl.into());
}
pub fn share_notify(&self, client: &impl Client) -> crate::Result<()> {
client.share_notify(&self.uri, &self.alias_id, &self.share_id)
}
pub fn file_patch(&self, client: &impl Client, chunk: &Chunk) -> crate::Result<()> {
client.file_patch(&self.uri, &self.alias_id, &self.share_id, chunk)
}
pub fn save(&self) -> io::Result<()> {
let cache_dir = self.file_name.parent().ok_or_else(|| {
io::Error::other(format!("orphan file {:?}", self.file_name.display()))
})?;
fs::create_dir_all(cache_dir)?;
let json = serde_json::to_string_pretty(self).map_err(io::Error::other)?;
let mut file = fs::File::create(&self.file_name)?;
file.write_all(json.as_bytes())?;
trace!("updated {:?}", self.file_name.display());
Ok(())
}
fn remove(path: &Path) -> io::Result<()> {
fs::remove_file(path)?;
trace!("removed {:?}", path.display());
Ok(())
}
pub fn clear_any(args: &Cli) {
let _ = Self::remove(&Self::cache_file(args));
}
pub fn discard(self) -> io::Result<()> {
Self::remove(&self.file_name)
}
}