Compare commits
8 commits
e391821fe5
...
4c88ea7291
| Author | SHA1 | Date | |
|---|---|---|---|
| 4c88ea7291 | |||
| 465c857126 | |||
| 1f9c247439 | |||
| 0b8bebad6e | |||
| 11a5106473 | |||
| ea5ef1fa10 | |||
| 2bcbc0d71c | |||
| 6e553cc185 |
13 changed files with 377 additions and 132 deletions
|
|
@ -1,2 +1,9 @@
|
|||
[build]
|
||||
target = "x86_64-unknown-linux-musl"
|
||||
|
||||
# rustflags = [
|
||||
# # emit instructions tuned to the current CPU
|
||||
# "-C", "target-cpu=native",
|
||||
# # assume CPU features
|
||||
# "-C", "target-feature=+avx2,+sse4.1,+ssse3,+aes",
|
||||
# ]
|
||||
37
Cargo.lock
generated
37
Cargo.lock
generated
|
|
@ -67,18 +67,47 @@ dependencies = [
|
|||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "arrayref"
|
||||
version = "0.3.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "76a2e8124351fda1ef8aaaa3bbd7ebbcb486bbcd4225aca0aa0d84bb2db8fecb"
|
||||
|
||||
[[package]]
|
||||
name = "arrayvec"
|
||||
version = "0.7.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50"
|
||||
|
||||
[[package]]
|
||||
name = "base64"
|
||||
version = "0.22.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6"
|
||||
|
||||
[[package]]
|
||||
name = "base64ct"
|
||||
version = "1.8.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "55248b47b0caf0546f7988906588779981c43bb1bc9d0c44087278f80cdb44ba"
|
||||
|
||||
[[package]]
|
||||
name = "bitflags"
|
||||
version = "2.9.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1b8e56985ec62d17e9c1001dc89c88ecd7dc08e47eba5ec7c29c7b5eeecde967"
|
||||
|
||||
[[package]]
|
||||
name = "blake2b_simd"
|
||||
version = "1.0.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "06e903a20b159e944f91ec8499fe1e55651480c541ea0a584f5d967c49ad9d99"
|
||||
dependencies = [
|
||||
"arrayref",
|
||||
"arrayvec",
|
||||
"constant_time_eq",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "bumpalo"
|
||||
version = "3.17.0"
|
||||
|
|
@ -171,6 +200,12 @@ dependencies = [
|
|||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "constant_time_eq"
|
||||
version = "0.3.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7c74b8349d32d297c9134b8c88677813a227df8f779daa29bfc29c183fe3dca6"
|
||||
|
||||
[[package]]
|
||||
name = "cookie"
|
||||
version = "0.18.1"
|
||||
|
|
@ -846,6 +881,8 @@ checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64"
|
|||
name = "shrupl"
|
||||
version = "0.1.0-alpha"
|
||||
dependencies = [
|
||||
"base64ct",
|
||||
"blake2b_simd",
|
||||
"clap",
|
||||
"console",
|
||||
"ctrlc",
|
||||
|
|
|
|||
|
|
@ -5,6 +5,8 @@ edition = "2024"
|
|||
description = "ShrUpl is a tool to upload files to a Sharry Instance through a public Alias, leveraging the tus protocol"
|
||||
|
||||
[dependencies]
|
||||
base64ct = { version = "1.8.0", default-features = false, features = ["alloc"] }
|
||||
blake2b_simd = "1.0.3"
|
||||
clap = { version = "4.5.38", features = ["derive"] }
|
||||
console = { version = "0.15.11", default-features = false }
|
||||
ctrlc = { version = "3.4.7", features = ["termination"] }
|
||||
|
|
@ -20,5 +22,11 @@ thiserror = "2.0.12"
|
|||
ureq = { version = "3.0.11", features = ["json"] }
|
||||
|
||||
[profile.release]
|
||||
# Optimize for speed even more aggressively
|
||||
opt-level = "z"
|
||||
# better inlining
|
||||
codegen-units = 1
|
||||
# linker‐time optimization
|
||||
lto = true
|
||||
debug = false
|
||||
panic = "abort"
|
||||
|
|
|
|||
5
notes.md
5
notes.md
|
|
@ -49,8 +49,3 @@
|
|||
- "continue" and "new" flags to avoid user interaction
|
||||
- "quiet" flag to disable output entirely
|
||||
- some switch to change log to "pretty-print"
|
||||
|
||||
- hashing
|
||||
- store file hashes with all `file::*` variants
|
||||
- check hashes on "continue"
|
||||
- CLI switch to skip hashing
|
||||
|
|
|
|||
|
|
@ -1,7 +1,6 @@
|
|||
use std::{fmt, io, time::Duration};
|
||||
|
||||
use console::style;
|
||||
use indicatif::{ProgressBar, ProgressDrawTarget, ProgressStyle};
|
||||
use indicatif::{ProgressBar, ProgressDrawTarget};
|
||||
use log::{debug, warn};
|
||||
|
||||
use crate::{
|
||||
|
|
@ -9,6 +8,7 @@ use crate::{
|
|||
cli::Cli,
|
||||
error,
|
||||
file::{Chunk, FileTrait},
|
||||
output::new_progressbar,
|
||||
sharry::Client,
|
||||
};
|
||||
|
||||
|
|
@ -26,13 +26,17 @@ impl fmt::Debug for AppState {
|
|||
}
|
||||
}
|
||||
|
||||
fn new_http(timeout: Option<Duration>) -> ureq::Agent {
|
||||
fn new_http(args: &Cli) -> ureq::Agent {
|
||||
ureq::Agent::config_builder()
|
||||
.timeout_global(timeout)
|
||||
.timeout_global(args.get_timeout())
|
||||
.build()
|
||||
.into()
|
||||
}
|
||||
|
||||
fn new_share(args: &Cli) -> error::Result<String> {
|
||||
new_http(args).share_create(&args.get_uri(), &args.alias, args.get_share_request())
|
||||
}
|
||||
|
||||
impl AppState {
|
||||
fn new(http: ureq::Agent, inner: CacheFile) -> Self {
|
||||
Self {
|
||||
|
|
@ -42,37 +46,19 @@ impl AppState {
|
|||
}
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn try_resume(args: &Cli) -> Option<Self> {
|
||||
let inner = CacheFile::try_resume(args)
|
||||
.inspect_err(|e| debug!("could not resume from hash {:?}: {e}", args.get_hash()))
|
||||
.ok()?;
|
||||
|
||||
Some(Self::new(new_http(args.get_timeout()), inner))
|
||||
pub fn try_resume(args: &Cli) -> error::Result<Self> {
|
||||
Ok(Self::new(new_http(args), CacheFile::try_resume(args)?))
|
||||
}
|
||||
|
||||
pub fn from_args(args: &Cli) -> error::Result<Self> {
|
||||
let http = new_http(args.get_timeout());
|
||||
|
||||
let share_id = http.share_create(&args.get_uri(), &args.alias, args.get_share_request())?;
|
||||
|
||||
Ok(Self::new(http, CacheFile::from_args(args, share_id)))
|
||||
Ok(Self::new(
|
||||
new_http(args),
|
||||
CacheFile::from_args(args, new_share)?,
|
||||
))
|
||||
}
|
||||
|
||||
fn with_progressbar(&mut self, f: impl FnOnce(&ProgressBar), drop_bar: bool) {
|
||||
let bar = &*self.progress.get_or_insert_with(|| {
|
||||
ProgressBar::hidden().with_style(
|
||||
ProgressStyle::with_template(&format!(
|
||||
concat!(
|
||||
"{{bar:50.cyan/blue}} {{msg:.magenta}}: ",
|
||||
"{{binary_bytes:.yellow}}{}{{binary_total_bytes:.yellow}} ",
|
||||
"({{eta}})",
|
||||
),
|
||||
style("/").magenta(),
|
||||
))
|
||||
.expect("style template is not valid"),
|
||||
)
|
||||
});
|
||||
let bar = &*self.progress.get_or_insert_with(new_progressbar);
|
||||
|
||||
if let Some(upl) = self.inner.peek_uploading() {
|
||||
if bar.length().is_none() {
|
||||
|
|
@ -137,7 +123,7 @@ impl AppState {
|
|||
self.drop_progressbar(ProgressBar::finish);
|
||||
}
|
||||
|
||||
Ok(self.inner.peek_uploading().is_none() && self.inner.queue_empty())
|
||||
Ok(self.inner.peek_uploading().is_none() && self.inner.queue().is_empty())
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
|
|
@ -153,18 +139,18 @@ impl AppState {
|
|||
}
|
||||
|
||||
pub fn rebuild_share(self, args: &Cli) -> error::Result<Self> {
|
||||
let share_id =
|
||||
self.http
|
||||
.share_create(&args.get_uri(), &args.alias, args.get_share_request())?;
|
||||
|
||||
Ok(Self::new(self.http, CacheFile::from_args(args, share_id)))
|
||||
Ok(Self::new(self.http, CacheFile::from_args(args, new_share)?))
|
||||
}
|
||||
|
||||
pub fn save(&self) -> io::Result<()> {
|
||||
self.inner.save()
|
||||
}
|
||||
|
||||
pub fn clear(self) -> io::Result<()> {
|
||||
self.inner.clear()
|
||||
pub fn discard(self) -> io::Result<()> {
|
||||
self.inner.discard()
|
||||
}
|
||||
|
||||
pub fn clear_any(args: &Cli) {
|
||||
CacheFile::clear_any(args);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -16,23 +16,6 @@ use shrupl::{
|
|||
};
|
||||
|
||||
fn main() {
|
||||
let check_ctrlc = {
|
||||
let stop = Arc::new(AtomicBool::new(false));
|
||||
let stop_ctrlc = stop.clone();
|
||||
|
||||
ctrlc::set_handler(move || {
|
||||
stop_ctrlc.store(true, Ordering::SeqCst);
|
||||
info!("stopping as soon as possible ...");
|
||||
})
|
||||
.expect("Error setting Ctrl-C handler");
|
||||
|
||||
move || {
|
||||
if stop.load(Ordering::SeqCst) {
|
||||
process::exit(255);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let args = Cli::parse();
|
||||
|
||||
env_logger::Builder::new()
|
||||
|
|
@ -44,12 +27,34 @@ fn main() {
|
|||
|
||||
println!("{} to {}!", style("Welcome").magenta().bold(), *SHRUPL);
|
||||
|
||||
let mut state = AppState::try_resume(&args)
|
||||
.and_then(|state| output::prompt_continue().then_some(state))
|
||||
.unwrap_or_else(|| {
|
||||
check_ctrlc();
|
||||
let resumed = AppState::try_resume(&args);
|
||||
|
||||
match AppState::from_args(&args) {
|
||||
let check_ctrlc = {
|
||||
let stop = Arc::new(AtomicBool::new(false));
|
||||
let stop_ctrlc = stop.clone();
|
||||
|
||||
ctrlc::set_handler(move || {
|
||||
stop_ctrlc.store(true, Ordering::SeqCst);
|
||||
eprintln!("{} stopping as soon as possible!", *SHRUPL);
|
||||
})
|
||||
.expect("Error setting Ctrl-C handler");
|
||||
|
||||
move || {
|
||||
if stop.load(Ordering::SeqCst) {
|
||||
process::exit(255);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let mut state = resumed
|
||||
.inspect_err(|e| {
|
||||
AppState::clear_any(&args);
|
||||
Log::handle(e);
|
||||
info!("could not resume from hash {:?}: {e}", args.get_hash());
|
||||
})
|
||||
.ok()
|
||||
.and_then(|state| output::prompt_continue().then_some(state))
|
||||
.unwrap_or_else(|| match AppState::from_args(&args) {
|
||||
Ok(state) => {
|
||||
state.save().unwrap_or_else(|e| {
|
||||
Log::warning(format_args!("Failed to save state: {e}"));
|
||||
|
|
@ -60,13 +65,13 @@ fn main() {
|
|||
Log::handle(&e);
|
||||
Log::error(format_args!("Failed to create state: {e}"));
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
check_ctrlc();
|
||||
|
||||
info!("continuing with state: {state:#?}");
|
||||
|
||||
let fns_magenta = output::style_all(&args.file_names(), StyledObject::magenta).join(", ");
|
||||
|
||||
println!("{} is uploading: {fns_magenta}", *SHRUPL);
|
||||
|
||||
let mut buffer = vec![0; args.chunk_size * 1024 * 1024];
|
||||
|
|
@ -129,7 +134,7 @@ fn main() {
|
|||
check_ctrlc();
|
||||
}
|
||||
|
||||
state.clear().unwrap_or_else(|e| {
|
||||
state.discard().unwrap_or_else(|e| {
|
||||
Log::warning(format_args!("Failed to remove state: {e}"));
|
||||
});
|
||||
|
||||
|
|
|
|||
106
src/cachefile.rs
106
src/cachefile.rs
|
|
@ -2,16 +2,19 @@ use std::{
|
|||
collections::VecDeque,
|
||||
fs,
|
||||
io::{self, Write},
|
||||
path::PathBuf,
|
||||
path::{Path, PathBuf},
|
||||
time::Duration,
|
||||
};
|
||||
|
||||
use log::trace;
|
||||
use indicatif::{ProgressBar, ProgressDrawTarget};
|
||||
use log::{info, trace};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::{
|
||||
cli::Cli,
|
||||
error,
|
||||
file::{self, Chunk},
|
||||
file::{self, Chunk, FileTrait},
|
||||
output::new_progressbar,
|
||||
sharry::{Client, Uri},
|
||||
};
|
||||
|
||||
|
|
@ -45,7 +48,7 @@ impl CacheFile {
|
|||
file_name
|
||||
}
|
||||
|
||||
pub fn try_resume(args: &Cli) -> io::Result<Self> {
|
||||
pub fn try_resume(args: &Cli) -> error::Result<Self> {
|
||||
let file_name = Self::cache_file(args);
|
||||
|
||||
let state: Self = {
|
||||
|
|
@ -54,22 +57,89 @@ impl CacheFile {
|
|||
serde_json::from_reader(reader).map_err(io::Error::other)?
|
||||
};
|
||||
|
||||
if args.should_hash() {
|
||||
fn check_hash<'a>(
|
||||
file: &'a impl FileTrait<'a>,
|
||||
bar: &ProgressBar,
|
||||
) -> error::Result<()> {
|
||||
bar.set_message(format!("checking {:?}", file.get_name()));
|
||||
|
||||
match file.check_hash(|bytes| bar.inc(bytes)) {
|
||||
Ok(true) => Ok(()),
|
||||
Ok(false) => Err(error::Error::unknown(format!(
|
||||
"Hash mismatch for file {:?}!",
|
||||
file.get_name()
|
||||
))),
|
||||
Err(e) => Err(e.into()),
|
||||
}
|
||||
}
|
||||
|
||||
info!("checking files in {state:?}");
|
||||
|
||||
// BOOKMARK assumption: total file size < 2 EiB
|
||||
let total_size = {
|
||||
let upl_size = if let Some(upl) = state.peek_uploading() {
|
||||
upl.get_size()
|
||||
} else {
|
||||
0
|
||||
};
|
||||
upl_size + state.queue().iter().map(|&f| f.get_size()).sum::<u64>()
|
||||
};
|
||||
|
||||
let bar = new_progressbar();
|
||||
bar.set_draw_target(ProgressDrawTarget::stderr());
|
||||
bar.set_length(total_size);
|
||||
bar.enable_steady_tick(Duration::from_millis(50));
|
||||
|
||||
if let Some(upl) = state.peek_uploading() {
|
||||
check_hash(upl, &bar)?;
|
||||
}
|
||||
|
||||
for chk in state.queue() {
|
||||
check_hash(chk, &bar)?;
|
||||
}
|
||||
|
||||
bar.finish_with_message("finished checking files");
|
||||
}
|
||||
|
||||
Ok(Self { file_name, ..state })
|
||||
}
|
||||
|
||||
pub fn from_args(args: &Cli, share_id: String) -> Self {
|
||||
Self {
|
||||
pub fn from_args(
|
||||
args: &Cli,
|
||||
new_share: impl FnOnce(&Cli) -> error::Result<String>,
|
||||
) -> error::Result<Self> {
|
||||
let mut files = args.files.clone();
|
||||
|
||||
if args.should_hash() {
|
||||
info!("hashing files {files:?}");
|
||||
|
||||
let bar = new_progressbar();
|
||||
bar.set_draw_target(ProgressDrawTarget::stderr());
|
||||
// BOOKMARK assumption: total file size < 2 EiB
|
||||
bar.set_length(files.iter().map(FileTrait::get_size).sum());
|
||||
bar.enable_steady_tick(Duration::from_millis(50));
|
||||
|
||||
for chk in &mut files {
|
||||
bar.set_message(format!("hashing {:?}", chk.get_name()));
|
||||
chk.hash(|bytes| bar.inc(bytes))?;
|
||||
}
|
||||
|
||||
bar.finish_with_message("finished hashing files");
|
||||
}
|
||||
|
||||
Ok(Self {
|
||||
file_name: Self::cache_file(args),
|
||||
uri: args.get_uri(),
|
||||
alias_id: args.alias.clone(),
|
||||
share_id,
|
||||
share_id: new_share(args)?,
|
||||
uploading: None,
|
||||
files: args.files.clone().into(),
|
||||
}
|
||||
files: files.into(),
|
||||
})
|
||||
}
|
||||
|
||||
pub fn queue_empty(&self) -> bool {
|
||||
self.files.is_empty()
|
||||
pub fn queue(&self) -> Vec<&file::Checked> {
|
||||
self.files.iter().collect()
|
||||
}
|
||||
|
||||
pub fn get_uploading(
|
||||
|
|
@ -151,10 +221,18 @@ impl CacheFile {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
pub fn clear(self) -> io::Result<()> {
|
||||
fs::remove_file(&self.file_name)?;
|
||||
fn remove(path: &Path) -> io::Result<()> {
|
||||
fs::remove_file(path)?;
|
||||
|
||||
trace!("removed {:?}", self.file_name.display());
|
||||
trace!("removed {:?}", path.display());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn clear_any(args: &Cli) {
|
||||
let _ = Self::remove(&Self::cache_file(args));
|
||||
}
|
||||
|
||||
pub fn discard(self) -> io::Result<()> {
|
||||
Self::remove(&self.file_name)
|
||||
}
|
||||
}
|
||||
|
|
|
|||
68
src/cli.rs
68
src/cli.rs
|
|
@ -1,11 +1,7 @@
|
|||
use std::{
|
||||
convert::Infallible,
|
||||
fmt,
|
||||
hash::{DefaultHasher, Hash, Hasher},
|
||||
io,
|
||||
time::Duration,
|
||||
};
|
||||
use std::{convert::Infallible, fmt, io, time::Duration};
|
||||
|
||||
use base64ct::{Base64UrlUnpadded, Encoding};
|
||||
use blake2b_simd::Params as Blake2b;
|
||||
use clap::{
|
||||
Parser,
|
||||
builder::{PossibleValuesParser, TypedValueParser},
|
||||
|
|
@ -18,7 +14,7 @@ use crate::{
|
|||
sharry::{NewShareRequest, Uri},
|
||||
};
|
||||
|
||||
#[derive(Parser, Hash)]
|
||||
#[derive(Parser)]
|
||||
#[command(version, about, long_about = None)]
|
||||
pub struct Cli {
|
||||
/// Timeout in seconds for HTTP actions (set 0 or invalid to disable)
|
||||
|
|
@ -43,7 +39,7 @@ pub struct Cli {
|
|||
|
||||
/// Name of the new share
|
||||
#[arg(short, long, default_value = "ShrUpl Upload", value_name = "TEXT")]
|
||||
name: String,
|
||||
share_name: String,
|
||||
|
||||
/// Description of the new share
|
||||
#[arg(short, long, value_name = "TEXT")]
|
||||
|
|
@ -61,6 +57,10 @@ pub struct Cli {
|
|||
)]
|
||||
pub chunk_size: usize,
|
||||
|
||||
/// Don't hash files before uploading
|
||||
#[arg(short, long)]
|
||||
no_hash: bool,
|
||||
|
||||
/// Increase output verbosity
|
||||
#[arg(short, long, action = clap::ArgAction::Count)]
|
||||
verbose: u8,
|
||||
|
|
@ -100,27 +100,47 @@ fn parse_sharry_file(data: &str) -> io::Result<Checked> {
|
|||
Checked::new(data)
|
||||
}
|
||||
|
||||
fn sorted<T>(values: &[T]) -> Vec<&T>
|
||||
where
|
||||
T: Ord,
|
||||
{
|
||||
let mut refs: Vec<_> = values.iter().collect();
|
||||
refs.sort_unstable();
|
||||
|
||||
refs
|
||||
}
|
||||
|
||||
impl Cli {
|
||||
#[must_use] pub fn get_timeout(&self) -> Option<Duration> {
|
||||
#[must_use]
|
||||
pub fn get_timeout(&self) -> Option<Duration> {
|
||||
(!self.timeout.is_zero()).then_some(self.timeout)
|
||||
}
|
||||
|
||||
#[must_use] pub fn get_uri(&self) -> Uri {
|
||||
#[must_use]
|
||||
pub fn get_uri(&self) -> Uri {
|
||||
Uri::new(&self.protocol, &self.url)
|
||||
}
|
||||
|
||||
#[must_use] pub fn may_retry(&self, tries: u32) -> bool {
|
||||
#[must_use]
|
||||
pub fn may_retry(&self, tries: u32) -> bool {
|
||||
match self.retry_limit {
|
||||
0 => true,
|
||||
limit => tries < limit,
|
||||
}
|
||||
}
|
||||
|
||||
#[must_use] pub fn get_share_request(&self) -> NewShareRequest {
|
||||
NewShareRequest::new(&self.name, self.description.as_ref(), self.max_views)
|
||||
#[must_use]
|
||||
pub fn should_hash(&self) -> bool {
|
||||
!self.no_hash
|
||||
}
|
||||
|
||||
#[must_use] pub fn get_level_filter(&self) -> LevelFilter {
|
||||
#[must_use]
|
||||
pub fn get_share_request(&self) -> NewShareRequest {
|
||||
NewShareRequest::new(&self.share_name, self.description.as_ref(), self.max_views)
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn get_level_filter(&self) -> LevelFilter {
|
||||
match self.verbose {
|
||||
0 => LevelFilter::Error,
|
||||
1 => LevelFilter::Warn,
|
||||
|
|
@ -134,17 +154,17 @@ impl Cli {
|
|||
self.files.iter().map(FileTrait::get_name).collect()
|
||||
}
|
||||
|
||||
#[must_use] pub fn get_hash(&self) -> String {
|
||||
let file_refs = {
|
||||
let mut refs: Vec<_> = self.files.iter().collect();
|
||||
refs.sort_unstable();
|
||||
#[must_use]
|
||||
pub fn get_hash(&self) -> String {
|
||||
let mut hasher = Blake2b::new().hash_length(16).to_state();
|
||||
|
||||
refs
|
||||
};
|
||||
hasher.update(self.get_uri().as_ref());
|
||||
hasher.update(self.alias.as_bytes());
|
||||
|
||||
let mut hasher = DefaultHasher::new();
|
||||
(self.get_uri(), &self.alias, file_refs).hash(&mut hasher);
|
||||
for chk in sorted(&self.files) {
|
||||
hasher.update(chk.as_ref());
|
||||
}
|
||||
|
||||
format!("{:x}", hasher.finish())
|
||||
Base64UrlUnpadded::encode_string(hasher.finalize().as_bytes())
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -11,15 +11,23 @@ use super::{FileTrait, Uploading};
|
|||
|
||||
/// Description of an existing, regular file
|
||||
///
|
||||
/// - impl Debug, Clone, Hash for `clap` compatibility
|
||||
/// - impl Clone for `clap` compatibility
|
||||
/// - impl serde for appstate caching
|
||||
/// - impl Ord to handle multiple files given
|
||||
#[derive(Debug, Clone, Hash, Serialize, Deserialize, PartialEq, Eq, PartialOrd, Ord)]
|
||||
/// - impl PartialEq..Ord to handle multiple files given
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, PartialOrd, Ord)]
|
||||
pub struct Checked {
|
||||
/// canonical path to a regular file
|
||||
pub(super) path: PathBuf,
|
||||
/// size of that file
|
||||
pub(super) size: u64,
|
||||
/// hash of that file
|
||||
pub(super) hash: Option<String>,
|
||||
}
|
||||
|
||||
impl AsRef<[u8]> for Checked {
|
||||
fn as_ref(&self) -> &[u8] {
|
||||
self.path.as_os_str().as_encoded_bytes()
|
||||
}
|
||||
}
|
||||
|
||||
impl Checked {
|
||||
|
|
@ -35,6 +43,7 @@ impl Checked {
|
|||
Ok(Self {
|
||||
path: fs::canonicalize(&value)?,
|
||||
size: meta.len(),
|
||||
hash: None,
|
||||
})
|
||||
} else {
|
||||
Err(io::Error::new(
|
||||
|
|
@ -44,6 +53,19 @@ impl Checked {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn hash(&mut self, f: impl Fn(u64)) -> io::Result<()> {
|
||||
if self.hash.is_some() {
|
||||
return Err(io::Error::other(format!(
|
||||
"file {:?} is already hashed!",
|
||||
self.path.display()
|
||||
)));
|
||||
}
|
||||
|
||||
self.hash = Some(super::compute_file_hash(&self.path, self.size, f)?);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// start uploading this file
|
||||
///
|
||||
/// - tries to create a new entry in a share
|
||||
|
|
@ -62,7 +84,7 @@ impl Checked {
|
|||
) -> error::Result<Uploading> {
|
||||
let file_id = client.file_create(uri, alias_id, share_id, &self)?;
|
||||
|
||||
Ok(Uploading::new(self.path, self.size, file_id))
|
||||
Ok(Uploading::new(self.path, self.size, self.hash, file_id))
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -78,4 +100,8 @@ impl<'t> FileTrait<'t> for Checked {
|
|||
fn get_size(&self) -> u64 {
|
||||
self.size
|
||||
}
|
||||
|
||||
fn check_hash(&self, on_progress: impl Fn(u64)) -> io::Result<bool> {
|
||||
super::check_file_hash(&self.path, self.size, self.hash.as_ref(), on_progress)
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -2,12 +2,66 @@ mod checked;
|
|||
mod chunk;
|
||||
mod uploading;
|
||||
|
||||
use std::{ffi::OsStr, path::Path};
|
||||
use std::{
|
||||
ffi::OsStr,
|
||||
fs,
|
||||
io::{self, Read},
|
||||
path::Path,
|
||||
};
|
||||
|
||||
use base64ct::{Base64, Encoding};
|
||||
use blake2b_simd::Params as Blake2b;
|
||||
|
||||
pub use checked::Checked;
|
||||
pub use chunk::Chunk;
|
||||
use log::debug;
|
||||
pub use uploading::Uploading;
|
||||
|
||||
fn compute_file_hash(path: &Path, size: u64, on_progress: impl Fn(u64)) -> io::Result<String> {
|
||||
let mut file = fs::File::open(path)?;
|
||||
let mut hasher = Blake2b::new().hash_length(64).to_state();
|
||||
|
||||
let mut buf = vec![0u8; 4 * 1024 * 1024];
|
||||
let mut bytes_read = 0;
|
||||
|
||||
loop {
|
||||
let n = file.read(&mut buf)?;
|
||||
if n == 0 {
|
||||
break;
|
||||
}
|
||||
hasher.update(&buf[..n]);
|
||||
|
||||
bytes_read += n as u64;
|
||||
on_progress(n as u64);
|
||||
}
|
||||
|
||||
if bytes_read != size {
|
||||
return Err(io::Error::other(format!(
|
||||
"Hashed {bytes_read:?} bytes, known file size {size:?}!"
|
||||
)));
|
||||
}
|
||||
|
||||
let result = Base64::encode_string(hasher.finalize().as_bytes());
|
||||
debug!("hashed {:?}: {result:?}", path.display());
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
fn check_file_hash(
|
||||
path: &Path,
|
||||
size: u64,
|
||||
hash: Option<&String>,
|
||||
on_progress: impl Fn(u64),
|
||||
) -> io::Result<bool> {
|
||||
let Some(hash) = hash else {
|
||||
debug!("no hash to check for {:?}!", path.display());
|
||||
return Ok(false);
|
||||
};
|
||||
|
||||
let result = *hash == compute_file_hash(path, size, on_progress)?;
|
||||
debug!("matches {:?}: {result:?}", *hash);
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
pub trait FileTrait<'t> {
|
||||
/// extract the filename part of a `Path` reference
|
||||
///
|
||||
|
|
@ -25,4 +79,6 @@ pub trait FileTrait<'t> {
|
|||
|
||||
/// get the file's size
|
||||
fn get_size(&self) -> u64;
|
||||
|
||||
fn check_hash(&self, on_progress: impl Fn(u64)) -> io::Result<bool>;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -11,8 +11,12 @@ use super::{Checked, Chunk, FileTrait};
|
|||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
pub struct Uploading {
|
||||
/// canonical path to a regular file
|
||||
path: PathBuf,
|
||||
/// size of that file
|
||||
size: u64,
|
||||
/// hash of that file
|
||||
hash: Option<String>,
|
||||
file_id: String,
|
||||
#[serde(skip)]
|
||||
last_offset: Option<u64>,
|
||||
|
|
@ -20,10 +24,11 @@ pub struct Uploading {
|
|||
}
|
||||
|
||||
impl Uploading {
|
||||
pub(super) fn new(path: PathBuf, size: u64, file_id: String) -> Self {
|
||||
pub(super) fn new(path: PathBuf, size: u64, hash: Option<String>, file_id: String) -> Self {
|
||||
Self {
|
||||
path,
|
||||
size,
|
||||
hash,
|
||||
file_id,
|
||||
last_offset: None,
|
||||
offset: 0,
|
||||
|
|
@ -79,6 +84,7 @@ impl Uploading {
|
|||
Checked {
|
||||
path: self.path,
|
||||
size: self.size,
|
||||
hash: self.hash,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -94,4 +100,8 @@ impl<'t> FileTrait<'t> for Uploading {
|
|||
fn get_size(&self) -> u64 {
|
||||
self.size
|
||||
}
|
||||
|
||||
fn check_hash(&self, on_progress: impl Fn(u64)) -> io::Result<bool> {
|
||||
super::check_file_hash(&self.path, self.size, self.hash.as_ref(), on_progress)
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@ use std::{fmt, process, sync::LazyLock};
|
|||
|
||||
use console::{StyledObject, style};
|
||||
use dialoguer::{Select, theme::ColorfulTheme};
|
||||
use indicatif::{ProgressBar, ProgressStyle};
|
||||
use log::{error, info};
|
||||
|
||||
type StaticStyled<'t> = LazyLock<StyledObject<&'t str>>;
|
||||
|
|
@ -42,6 +43,22 @@ where
|
|||
strs.iter().map(|&s| f(style(s)).to_string()).collect()
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
#[allow(clippy::missing_panics_doc)]
|
||||
pub fn new_progressbar() -> ProgressBar {
|
||||
ProgressBar::hidden().with_style(
|
||||
ProgressStyle::with_template(&format!(
|
||||
concat!(
|
||||
"{{bar:50.cyan/blue}} {{msg:.magenta}}: ",
|
||||
"{{binary_bytes:.yellow}}{}{{binary_total_bytes:.yellow}} ",
|
||||
"({{eta}})",
|
||||
),
|
||||
style("/").magenta(),
|
||||
))
|
||||
.expect("invalid style template"),
|
||||
)
|
||||
}
|
||||
|
||||
pub enum Log {}
|
||||
|
||||
impl Log {
|
||||
|
|
|
|||
|
|
@ -3,28 +3,28 @@ use std::fmt;
|
|||
use log::trace;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Hash)]
|
||||
pub struct Uri {
|
||||
protocol: String,
|
||||
base_url: String,
|
||||
}
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
pub struct Uri(String);
|
||||
|
||||
impl fmt::Display for Uri {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "{}://{}", self.protocol, self.base_url)
|
||||
f.write_str(&self.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl AsRef<[u8]> for Uri {
|
||||
fn as_ref(&self) -> &[u8] {
|
||||
self.0.as_bytes()
|
||||
}
|
||||
}
|
||||
|
||||
impl Uri {
|
||||
pub fn new(protocol: impl Into<String>, base_url: impl Into<String>) -> Self {
|
||||
Self {
|
||||
protocol: protocol.into(),
|
||||
base_url: base_url.into(),
|
||||
}
|
||||
pub fn new(protocol: impl fmt::Display, base_url: impl fmt::Display) -> Self {
|
||||
Self(format!("{protocol}://{base_url}"))
|
||||
}
|
||||
|
||||
fn endpoint(&self, path: fmt::Arguments) -> String {
|
||||
let uri = format!("{}://{}/api/v2/{path}", self.protocol, self.base_url);
|
||||
let uri = format!("{}/api/v2/{path}", self.0);
|
||||
trace!("endpoint: {uri:?}");
|
||||
uri
|
||||
}
|
||||
|
|
|
|||
Loading…
Reference in a new issue