Compare commits
106 commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 117faee583 | |||
| b77ab83ae0 | |||
| f03bcb46b6 | |||
| 3f5fa86f3a | |||
| 7e4bd398c2 | |||
| 0efde0e134 | |||
| b9e553f112 | |||
| fada53044d | |||
| 46913e93b9 | |||
| 470ebc4305 | |||
| 2cc13f24e7 | |||
| d55684096f | |||
| 0d68d139d5 | |||
| bc4c15d17c | |||
| c10d86ff65 | |||
| 3f00e08422 | |||
| 357f455ec0 | |||
| e3fc06b019 | |||
| 2315c9cd2e | |||
| c7b24b1250 | |||
| d8c48b74ca | |||
| d37797d2ec | |||
| e0c5b5517f | |||
| 087cef5d6f | |||
| b17f239801 | |||
| 9de8f948dc | |||
| 5adbf8cb38 | |||
| 3258b8fb74 | |||
| c9c21aa128 | |||
| f1c6eb5d75 | |||
| f2b063ba85 | |||
| 4c88ea7291 | |||
| 465c857126 | |||
| 1f9c247439 | |||
| e391821fe5 | |||
| 9000c8a46b | |||
| d1d7b55585 | |||
| 78fb04403c | |||
| 0b8bebad6e | |||
| 11a5106473 | |||
| ea5ef1fa10 | |||
| 2bcbc0d71c | |||
| 97e9e41117 | |||
| 6e553cc185 | |||
| ece742a1e3 | |||
| 1e05155cc1 | |||
| 005c5f7cfa | |||
| 14e1bed708 | |||
| 72e9a5d40f | |||
| a8ff354a51 | |||
| 686e0c3e5c | |||
| 5b403ea129 | |||
| 540953e4a9 | |||
| c734355ebb | |||
| 393feec125 | |||
| df055fc4e9 | |||
| 865566ad0c | |||
| 3b46d228f4 | |||
| 79bc8e67a7 | |||
| 4eb0627a5f | |||
| a4bef827d1 | |||
| a633f4e228 | |||
| 01bcf92d9c | |||
| 783346c888 | |||
| de07d556a2 | |||
| 6ca3e6c9dd | |||
| 4bf18631d4 | |||
| 56223b3ae8 | |||
| 205af655e5 | |||
| 10bb4feef5 | |||
| e257d1cf8c | |||
| 5b018cf84b | |||
| 876869b073 | |||
| e2151b592c | |||
| 30855ed8ff | |||
| 0a8e5cf3f0 | |||
| f05e112040 | |||
| 5556a658f5 | |||
| 09d22a0ad9 | |||
| f77acc1afd | |||
| 4b650fd82f | |||
| 9f1e0cfc6c | |||
| b9a0e1eeb0 | |||
| 2edc690331 | |||
| ed10f269c8 | |||
| fb06725f05 | |||
| 67da081ef9 | |||
| 9b1f7f872c | |||
| 592e7bf76e | |||
| 0993679641 | |||
| dc2a330d58 | |||
| d607380659 | |||
| 69bef4e994 | |||
| c9528a9ac1 | |||
| 09af480379 | |||
| 90cecd015e | |||
| 51ecab41bb | |||
| 5b6fa3eaf7 | |||
| e208df9711 | |||
| e908c09beb | |||
| 3dc39edcab | |||
| e98001f4a4 | |||
| 39560eeeed | |||
| faea74241d | |||
| 8537684656 | |||
| e746256fdc |
31 changed files with 1811 additions and 759 deletions
|
|
@ -1,2 +1,9 @@
|
||||||
[build]
|
[build]
|
||||||
target = "x86_64-unknown-linux-musl"
|
target = "x86_64-unknown-linux-musl"
|
||||||
|
|
||||||
|
# rustflags = [
|
||||||
|
# # emit instructions tuned to the current CPU
|
||||||
|
# "-C", "target-cpu=native",
|
||||||
|
# # assume CPU features
|
||||||
|
# "-C", "target-feature=+avx2,+sse4.1,+ssse3,+aes",
|
||||||
|
# ]
|
||||||
|
|
@ -15,7 +15,7 @@
|
||||||
"ghcr.io/devcontainers/features/rust:1": {
|
"ghcr.io/devcontainers/features/rust:1": {
|
||||||
"targets": "x86_64-unknown-linux-musl"
|
"targets": "x86_64-unknown-linux-musl"
|
||||||
},
|
},
|
||||||
"ghcr.io/devcontainers-contrib/features/apt-get-packages:1": {
|
"ghcr.io/devcontainers-extra/features/apt-get-packages:1": {
|
||||||
"packages": "git-flow, musl-tools"
|
"packages": "git-flow, musl-tools"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
@ -42,7 +42,8 @@
|
||||||
"terminal.integrated.defaultProfile.linux": "zsh"
|
"terminal.integrated.defaultProfile.linux": "zsh"
|
||||||
},
|
},
|
||||||
"extensions": [
|
"extensions": [
|
||||||
"mhutchie.git-graph"
|
"mhutchie.git-graph",
|
||||||
|
"Gruntfuggly.todo-tree"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
|
||||||
1
.vscode/launch.json
vendored
1
.vscode/launch.json
vendored
|
|
@ -22,6 +22,7 @@
|
||||||
"args": [
|
"args": [
|
||||||
"sharry.yavook.de",
|
"sharry.yavook.de",
|
||||||
"G7RYoWME1W7-pcgipemJcr8-39FcMd92gBu-RgufeHc51z6",
|
"G7RYoWME1W7-pcgipemJcr8-39FcMd92gBu-RgufeHc51z6",
|
||||||
|
"/lib/x86_64-linux-gnu/libLLVM-14.so",
|
||||||
"/lib/x86_64-linux-gnu/liblldb-14.so.1",
|
"/lib/x86_64-linux-gnu/liblldb-14.so.1",
|
||||||
],
|
],
|
||||||
"env": {
|
"env": {
|
||||||
|
|
|
||||||
31
.vscode/settings.json
vendored
31
.vscode/settings.json
vendored
|
|
@ -12,6 +12,37 @@
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
"rust-analyzer.imports.prefix": "plain",
|
"rust-analyzer.imports.prefix": "plain",
|
||||||
|
"todo-tree.general.tags": [
|
||||||
|
"BUG",
|
||||||
|
"HACK",
|
||||||
|
"FIXME",
|
||||||
|
"TODO",
|
||||||
|
"XXX",
|
||||||
|
"[ ]",
|
||||||
|
"[x]",
|
||||||
|
"BOOKMARK"
|
||||||
|
],
|
||||||
|
"todo-tree.highlights.backgroundColourScheme": [
|
||||||
|
"red",
|
||||||
|
"orange",
|
||||||
|
"yellow",
|
||||||
|
"green",
|
||||||
|
"blue",
|
||||||
|
"indigo",
|
||||||
|
"violet",
|
||||||
|
"yellow"
|
||||||
|
],
|
||||||
|
"todo-tree.highlights.foregroundColourScheme": [
|
||||||
|
"white",
|
||||||
|
"black",
|
||||||
|
"black",
|
||||||
|
"white",
|
||||||
|
"white",
|
||||||
|
"white",
|
||||||
|
"black",
|
||||||
|
"black"
|
||||||
|
],
|
||||||
|
"todo-tree.highlights.useColourScheme": true,
|
||||||
// // override the default setting (`cargo check --all-targets`) which produces the following error
|
// // override the default setting (`cargo check --all-targets`) which produces the following error
|
||||||
// // "can't find crate for `test`" when the default compilation target is a no_std target
|
// // "can't find crate for `test`" when the default compilation target is a no_std target
|
||||||
// "rust-analyzer.checkOnSave.allTargets": false,
|
// "rust-analyzer.checkOnSave.allTargets": false,
|
||||||
|
|
|
||||||
51
.vscode/tasks.json
vendored
51
.vscode/tasks.json
vendored
|
|
@ -5,9 +5,9 @@
|
||||||
"label": "Build Project",
|
"label": "Build Project",
|
||||||
"type": "cargo",
|
"type": "cargo",
|
||||||
"command": "build",
|
"command": "build",
|
||||||
"presentation": {
|
// "presentation": {
|
||||||
"reveal": "silent"
|
// "reveal": "silent"
|
||||||
},
|
// },
|
||||||
"problemMatcher": "$rustc",
|
"problemMatcher": "$rustc",
|
||||||
"group": "build"
|
"group": "build"
|
||||||
},
|
},
|
||||||
|
|
@ -32,6 +32,9 @@
|
||||||
"command": "clippy",
|
"command": "clippy",
|
||||||
"args": [
|
"args": [
|
||||||
"--fix",
|
"--fix",
|
||||||
|
"--lib",
|
||||||
|
"--bin",
|
||||||
|
"shrupl",
|
||||||
"--allow-dirty",
|
"--allow-dirty",
|
||||||
"--allow-staged",
|
"--allow-staged",
|
||||||
"--",
|
"--",
|
||||||
|
|
@ -40,16 +43,16 @@
|
||||||
"problemMatcher": "$rustc",
|
"problemMatcher": "$rustc",
|
||||||
"group": "build"
|
"group": "build"
|
||||||
},
|
},
|
||||||
// {
|
{
|
||||||
// "label": "Run Unit Tests",
|
"label": "Run Unit Tests",
|
||||||
// "type": "cargo",
|
"type": "cargo",
|
||||||
// "command": "test",
|
"command": "test",
|
||||||
// "args": [
|
"args": [
|
||||||
// "--lib"
|
"--lib"
|
||||||
// ],
|
],
|
||||||
// "problemMatcher": "$rustc",
|
"problemMatcher": "$rustc",
|
||||||
// "group": "test"
|
"group": "test"
|
||||||
// },
|
},
|
||||||
// {
|
// {
|
||||||
// "label": "Run Integration Tests",
|
// "label": "Run Integration Tests",
|
||||||
// "type": "cargo",
|
// "type": "cargo",
|
||||||
|
|
@ -61,16 +64,16 @@
|
||||||
// "problemMatcher": "$rustc",
|
// "problemMatcher": "$rustc",
|
||||||
// "group": "test"
|
// "group": "test"
|
||||||
// },
|
// },
|
||||||
// {
|
{
|
||||||
// "label": "Run All Tests",
|
"label": "Run All Tests",
|
||||||
// "type": "shell",
|
"type": "shell",
|
||||||
// "command": "echo All Tests successful!",
|
"command": "echo All Tests successful!",
|
||||||
// "dependsOn": [
|
"dependsOn": [
|
||||||
// "Run Unit Tests",
|
"Run Unit Tests",
|
||||||
// "Run Integration Tests"
|
"Run Integration Tests"
|
||||||
// ],
|
],
|
||||||
// "dependsOrder": "sequence",
|
"dependsOrder": "sequence",
|
||||||
// "group": "test"
|
"group": "test"
|
||||||
// }
|
}
|
||||||
],
|
],
|
||||||
}
|
}
|
||||||
59
Cargo.lock
generated
59
Cargo.lock
generated
|
|
@ -67,6 +67,18 @@ dependencies = [
|
||||||
"windows-sys 0.59.0",
|
"windows-sys 0.59.0",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "arrayref"
|
||||||
|
version = "0.3.9"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "76a2e8124351fda1ef8aaaa3bbd7ebbcb486bbcd4225aca0aa0d84bb2db8fecb"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "arrayvec"
|
||||||
|
version = "0.7.6"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "base64"
|
name = "base64"
|
||||||
version = "0.22.1"
|
version = "0.22.1"
|
||||||
|
|
@ -79,6 +91,17 @@ version = "2.9.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "1b8e56985ec62d17e9c1001dc89c88ecd7dc08e47eba5ec7c29c7b5eeecde967"
|
checksum = "1b8e56985ec62d17e9c1001dc89c88ecd7dc08e47eba5ec7c29c7b5eeecde967"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "blake2b_simd"
|
||||||
|
version = "1.0.3"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "06e903a20b159e944f91ec8499fe1e55651480c541ea0a584f5d967c49ad9d99"
|
||||||
|
dependencies = [
|
||||||
|
"arrayref",
|
||||||
|
"arrayvec",
|
||||||
|
"constant_time_eq",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "bumpalo"
|
name = "bumpalo"
|
||||||
version = "3.17.0"
|
version = "3.17.0"
|
||||||
|
|
@ -171,6 +194,12 @@ dependencies = [
|
||||||
"windows-sys 0.59.0",
|
"windows-sys 0.59.0",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "constant_time_eq"
|
||||||
|
version = "0.3.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "7c74b8349d32d297c9134b8c88677813a227df8f779daa29bfc29c183fe3dca6"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "cookie"
|
name = "cookie"
|
||||||
version = "0.18.1"
|
version = "0.18.1"
|
||||||
|
|
@ -236,7 +265,7 @@ checksum = "658bce805d770f407bc62102fca7c2c64ceef2fbcb2b8bd19d2765ce093980de"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"console",
|
"console",
|
||||||
"shell-words",
|
"shell-words",
|
||||||
"thiserror",
|
"thiserror 1.0.69",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
|
@ -702,7 +731,7 @@ checksum = "ba009ff324d1fc1b900bd1fdb31564febe58a8ccc8a6fdbb93b543d33b13ca43"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"getrandom",
|
"getrandom",
|
||||||
"libredox",
|
"libredox",
|
||||||
"thiserror",
|
"thiserror 1.0.69",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
|
@ -846,6 +875,8 @@ checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64"
|
||||||
name = "shrupl"
|
name = "shrupl"
|
||||||
version = "0.1.0-alpha"
|
version = "0.1.0-alpha"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
|
"base64",
|
||||||
|
"blake2b_simd",
|
||||||
"clap",
|
"clap",
|
||||||
"console",
|
"console",
|
||||||
"ctrlc",
|
"ctrlc",
|
||||||
|
|
@ -854,8 +885,10 @@ dependencies = [
|
||||||
"env_logger",
|
"env_logger",
|
||||||
"indicatif",
|
"indicatif",
|
||||||
"log",
|
"log",
|
||||||
|
"regex",
|
||||||
"serde",
|
"serde",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
|
"thiserror 2.0.12",
|
||||||
"ureq",
|
"ureq",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
@ -911,7 +944,16 @@ version = "1.0.69"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52"
|
checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"thiserror-impl",
|
"thiserror-impl 1.0.69",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "thiserror"
|
||||||
|
version = "2.0.12"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "567b8a2dae586314f7be2a752ec7474332959c6460e02bde30d702a66d488708"
|
||||||
|
dependencies = [
|
||||||
|
"thiserror-impl 2.0.12",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
|
@ -925,6 +967,17 @@ dependencies = [
|
||||||
"syn",
|
"syn",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "thiserror-impl"
|
||||||
|
version = "2.0.12"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "7f7cf42b4507d8ea322120659672cf1b9dbb93f8f2d4ecfd6e51350ff5b17a1d"
|
||||||
|
dependencies = [
|
||||||
|
"proc-macro2",
|
||||||
|
"quote",
|
||||||
|
"syn",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "time"
|
name = "time"
|
||||||
version = "0.3.41"
|
version = "0.3.41"
|
||||||
|
|
|
||||||
10
Cargo.toml
10
Cargo.toml
|
|
@ -5,6 +5,8 @@ edition = "2024"
|
||||||
description = "ShrUpl is a tool to upload files to a Sharry Instance through a public Alias, leveraging the tus protocol"
|
description = "ShrUpl is a tool to upload files to a Sharry Instance through a public Alias, leveraging the tus protocol"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
|
base64 = { version = "0.22.1", default-features = false }
|
||||||
|
blake2b_simd = "1.0.3"
|
||||||
clap = { version = "4.5.38", features = ["derive"] }
|
clap = { version = "4.5.38", features = ["derive"] }
|
||||||
console = { version = "0.15.11", default-features = false }
|
console = { version = "0.15.11", default-features = false }
|
||||||
ctrlc = { version = "3.4.7", features = ["termination"] }
|
ctrlc = { version = "3.4.7", features = ["termination"] }
|
||||||
|
|
@ -13,10 +15,18 @@ dirs-next = "2.0.0"
|
||||||
env_logger = "0.11.8"
|
env_logger = "0.11.8"
|
||||||
indicatif = { version = "0.17.11", default-features = false }
|
indicatif = { version = "0.17.11", default-features = false }
|
||||||
log = "0.4.27"
|
log = "0.4.27"
|
||||||
|
regex = "1.11.1"
|
||||||
serde = { version = "1.0.219", features = ["derive"] }
|
serde = { version = "1.0.219", features = ["derive"] }
|
||||||
serde_json = "1.0.140"
|
serde_json = "1.0.140"
|
||||||
|
thiserror = "2.0.12"
|
||||||
ureq = { version = "3.0.11", features = ["json"] }
|
ureq = { version = "3.0.11", features = ["json"] }
|
||||||
|
|
||||||
[profile.release]
|
[profile.release]
|
||||||
|
# Optimize for speed even more aggressively
|
||||||
|
opt-level = "z"
|
||||||
|
# better inlining
|
||||||
|
codegen-units = 1
|
||||||
|
# linker‐time optimization
|
||||||
lto = true
|
lto = true
|
||||||
|
debug = false
|
||||||
panic = "abort"
|
panic = "abort"
|
||||||
|
|
|
||||||
|
|
@ -1,87 +0,0 @@
|
||||||
Diese Konversation wurde mit Duck.ai (https://duck.ai) unter Verwendung des o3-mini-Modells von OpenAI generiert. KI-Chats zeigen möglicherweise falsche oder anstößige Informationen an (weitere Informationen siehe https://duckduckgo.com/duckai/privacy-terms).
|
|
||||||
|
|
||||||
====================
|
|
||||||
|
|
||||||
Benutzereingabe 1 von 2 - 5.6.2025, 02:27:36:
|
|
||||||
in Rust, what can I use to display multiple progress bars in the terminal?
|
|
||||||
|
|
||||||
o3-mini:
|
|
||||||
You can use the indicatif crate, which provides a MultiProgress type that lets you manage and display multiple progress bars concurrently in a terminal.
|
|
||||||
|
|
||||||
Below is a simple example of how to use indicatif’s MultiProgress to display multiple progress bars:
|
|
||||||
|
|
||||||
```rust
|
|
||||||
use indicatif::{MultiProgress, ProgressBar, ProgressStyle};
|
|
||||||
use std::{thread, time::Duration};
|
|
||||||
|
|
||||||
fn main() {
|
|
||||||
// Create a MultiProgress container.
|
|
||||||
let m = MultiProgress::new();
|
|
||||||
|
|
||||||
// Create several progress bars.
|
|
||||||
let pb1 = m.add(ProgressBar::new(100));
|
|
||||||
pb1.set_style(
|
|
||||||
ProgressStyle::default_bar()
|
|
||||||
.template("{msg} {bar:40.cyan/blue} {pos:>3}/{len:3}")
|
|
||||||
.expect("Failed to set progress bar style"),
|
|
||||||
);
|
|
||||||
pb1.set_message("Downloading");
|
|
||||||
|
|
||||||
let pb2 = m.add(ProgressBar::new(50));
|
|
||||||
pb2.set_style(
|
|
||||||
ProgressStyle::default_bar()
|
|
||||||
.template("{msg} {bar:40.green/black} {pos:>3}/{len:3}")
|
|
||||||
.expect("Failed to set progress bar style"),
|
|
||||||
);
|
|
||||||
pb2.set_message("Processing");
|
|
||||||
|
|
||||||
// Optionally, spawn threads to update the progress bars concurrently.
|
|
||||||
let h1 = std::thread::spawn(move || {
|
|
||||||
for i in 0..=100 {
|
|
||||||
pb1.set_position(i);
|
|
||||||
thread::sleep(Duration::from_millis(50));
|
|
||||||
}
|
|
||||||
pb1.finish_with_message("Download complete");
|
|
||||||
});
|
|
||||||
|
|
||||||
let h2 = std::thread::spawn(move || {
|
|
||||||
for i in 0..=50 {
|
|
||||||
pb2.set_position(i);
|
|
||||||
thread::sleep(Duration::from_millis(100));
|
|
||||||
}
|
|
||||||
pb2.finish_with_message("Processing complete");
|
|
||||||
});
|
|
||||||
|
|
||||||
// Wait for threads to complete. MultiProgress takes care of rendering.
|
|
||||||
h1.join().expect("Thread 1 panicked");
|
|
||||||
h2.join().expect("Thread 2 panicked");
|
|
||||||
|
|
||||||
// You may also iterate on the multi-progress which blocks until all bars finish:
|
|
||||||
// m.join().unwrap();
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### Steps Explained
|
|
||||||
|
|
||||||
1. **MultiProgress Container**:
|
|
||||||
Create a `MultiProgress` instance which is used to manage multiple progress bars.
|
|
||||||
|
|
||||||
2. **Progress Bars**:
|
|
||||||
Create individual progress bars using `MultiProgress::add()`. You can customize each progress bar with a message, style, and length.
|
|
||||||
|
|
||||||
3. **Concurrent Updates**:
|
|
||||||
The example spawns two threads – one for each progress bar. Each thread updates its progress bar independently using `set_position()` and finishes the progress bar with `finish_with_message()`.
|
|
||||||
|
|
||||||
4. **Thread Joining**:
|
|
||||||
Ensure the main thread waits for the worker threads to complete to prevent the program from exiting prematurely.
|
|
||||||
|
|
||||||
### Installation
|
|
||||||
|
|
||||||
Make sure to add indicatif to your Cargo.toml:
|
|
||||||
|
|
||||||
```toml
|
|
||||||
[dependencies]
|
|
||||||
indicatif = "0.17" # or the latest version available on crates.io
|
|
||||||
```
|
|
||||||
|
|
||||||
This crate provides many options for customizing and using progress bars in a concurrent and effective manner. The documentation is available at [indicatif on docs.rs](https://docs.rs/indicatif) for further details and advanced usage.
|
|
||||||
|
|
@ -1,4 +1,4 @@
|
||||||
outline of sharry uploading
|
# Outline of sharry upload API
|
||||||
|
|
||||||
1. POST to "new" route
|
1. POST to "new" route
|
||||||
- uri: https://sharry.yavook.de/api/v2/alias/upload/new
|
- uri: https://sharry.yavook.de/api/v2/alias/upload/new
|
||||||
|
|
@ -32,6 +32,16 @@ outline of sharry uploading
|
||||||
- res.status == 200
|
- res.status == 200
|
||||||
- res_json.success, res_json.message
|
- res_json.success, res_json.message
|
||||||
|
|
||||||
hints
|
|
||||||
|
|
||||||
- https://stackoverflow.com/questions/59586787/rust-how-to-do-http-put-of-large-files
|
# Links
|
||||||
|
|
||||||
|
- yvk repo: https://code.yavook.de/jmm/shrupl
|
||||||
|
- sharry issue: https://github.com/eikek/sharry/issues/1659
|
||||||
|
|
||||||
|
|
||||||
|
# Ideas
|
||||||
|
|
||||||
|
- cli functions
|
||||||
|
- "continue" and "new" flags to avoid user interaction
|
||||||
|
- "quiet" flag to disable output entirely
|
||||||
|
- some switch to change log to "pretty-print"
|
||||||
260
src/appstate.rs
260
src/appstate.rs
|
|
@ -1,185 +1,153 @@
|
||||||
use std::{
|
use std::{fmt, io, time::Duration};
|
||||||
collections::VecDeque,
|
|
||||||
fs,
|
|
||||||
io::{self, Write},
|
|
||||||
path::{Path, PathBuf},
|
|
||||||
};
|
|
||||||
|
|
||||||
use console::style;
|
use indicatif::ProgressBar;
|
||||||
use indicatif::{ProgressBar, ProgressStyle};
|
use log::{debug, warn};
|
||||||
use log::{debug, trace};
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
|
|
||||||
use super::{
|
use crate::{
|
||||||
|
cachefile::CacheFile,
|
||||||
cli::Cli,
|
cli::Cli,
|
||||||
sharry::{Alias, ChunkState, FileChecked, FileUploading, Share, UploadError},
|
file::{Chunk, FileTrait},
|
||||||
|
output::new_progressbar,
|
||||||
|
sharry::{Client, ShareID},
|
||||||
};
|
};
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Debug)]
|
|
||||||
pub struct AppState {
|
pub struct AppState {
|
||||||
#[serde(skip)]
|
|
||||||
file_name: PathBuf,
|
|
||||||
#[serde(skip)]
|
|
||||||
progress: Option<ProgressBar>,
|
progress: Option<ProgressBar>,
|
||||||
|
http: ureq::Agent,
|
||||||
alias: Alias,
|
inner: CacheFile,
|
||||||
share: Share,
|
|
||||||
files: VecDeque<FileState>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Debug)]
|
impl fmt::Debug for AppState {
|
||||||
enum FileState {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
C(FileChecked),
|
f.debug_struct("AppState")
|
||||||
U(FileUploading),
|
.field("inner", &self.inner)
|
||||||
|
.finish_non_exhaustive()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FileState {
|
fn new_http(args: &Cli) -> ureq::Agent {
|
||||||
fn file_name(&self) -> &str {
|
ureq::Agent::config_builder()
|
||||||
match self {
|
.timeout_global(args.get_timeout())
|
||||||
FileState::C(checked) => checked.file_name(),
|
.build()
|
||||||
FileState::U(uploading) => uploading.file_name(),
|
.into()
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn new_share(args: &Cli) -> crate::Result<ShareID> {
|
||||||
|
new_http(args).share_create(&args.get_uri(), &args.alias, args.get_share_request())
|
||||||
}
|
}
|
||||||
|
|
||||||
impl AppState {
|
impl AppState {
|
||||||
fn cache_dir() -> PathBuf {
|
fn new(http: ureq::Agent, inner: CacheFile) -> Self {
|
||||||
let dir_name = dirs_next::cache_dir()
|
|
||||||
.expect("could not determine cache directory")
|
|
||||||
.join("shrupl");
|
|
||||||
|
|
||||||
trace!("cachedir: {:?}", dir_name.display());
|
|
||||||
dir_name
|
|
||||||
}
|
|
||||||
|
|
||||||
fn cache_file(args: &Cli) -> PathBuf {
|
|
||||||
let file_name = Self::cache_dir().join(format!("{}.json", args.get_hash()));
|
|
||||||
|
|
||||||
trace!("cachefile: {:?}", file_name.display());
|
|
||||||
file_name
|
|
||||||
}
|
|
||||||
|
|
||||||
fn load(file_name: impl AsRef<Path>) -> io::Result<Self> {
|
|
||||||
let content = fs::read_to_string(file_name)?;
|
|
||||||
serde_json::from_str(&content).map_err(io::Error::other)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn try_resume(args: &Cli) -> Option<Self> {
|
|
||||||
let file_name = Self::cache_file(args);
|
|
||||||
|
|
||||||
Self::load(&file_name)
|
|
||||||
.inspect_err(|e| debug!("could not resume from {:?}: {e}", file_name.display()))
|
|
||||||
.map(|state| {
|
|
||||||
debug!("successfully loaded AppState");
|
|
||||||
|
|
||||||
Self {
|
Self {
|
||||||
file_name,
|
|
||||||
progress: None,
|
progress: None,
|
||||||
alias: state.alias,
|
http,
|
||||||
share: state.share,
|
inner,
|
||||||
files: state.files,
|
|
||||||
}
|
}
|
||||||
})
|
|
||||||
.ok()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn from_args(args: &Cli, http: &ureq::Agent) -> Result<Self, ureq::Error> {
|
pub fn try_resume(args: &Cli) -> crate::Result<Self> {
|
||||||
let file_name = Self::cache_file(args);
|
Ok(Self::new(new_http(args), CacheFile::try_resume(args)?))
|
||||||
let alias = args.get_alias();
|
|
||||||
|
|
||||||
let share = Share::create(http, &alias, args.get_share_request())?;
|
|
||||||
|
|
||||||
let files: VecDeque<_> = args.files.clone().into_iter().map(FileState::C).collect();
|
|
||||||
|
|
||||||
Ok(Self {
|
|
||||||
file_name,
|
|
||||||
progress: None,
|
|
||||||
alias,
|
|
||||||
share,
|
|
||||||
files,
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn file_names(&self) -> Vec<&str> {
|
pub fn from_args(args: &Cli) -> crate::Result<Self> {
|
||||||
self.files.iter().map(FileState::file_name).collect()
|
Ok(Self::new(
|
||||||
}
|
new_http(args),
|
||||||
|
CacheFile::from_args(args, new_share)?,
|
||||||
pub fn upload_chunk(
|
|
||||||
&mut self,
|
|
||||||
http: &ureq::Agent,
|
|
||||||
chunk_size: usize,
|
|
||||||
) -> Result<Option<()>, UploadError> {
|
|
||||||
let uploading = match self.files.pop_front() {
|
|
||||||
Some(FileState::C(checked)) => checked
|
|
||||||
.start_upload(http, &self.alias, &self.share)
|
|
||||||
.unwrap(),
|
|
||||||
Some(FileState::U(uploading)) => uploading,
|
|
||||||
None => return Ok(None),
|
|
||||||
};
|
|
||||||
|
|
||||||
debug!("{uploading} chunk {chunk_size}");
|
|
||||||
|
|
||||||
let pb = match self.progress {
|
|
||||||
Some(ref pb) => pb,
|
|
||||||
None => {
|
|
||||||
self.progress = Some({
|
|
||||||
let pb = {
|
|
||||||
let ps = ProgressStyle::with_template(&format!(
|
|
||||||
"{{msg:.yellow}}: {{bar:50.cyan/blue}} {{binary_bytes:.magenta}}{}{{binary_total_bytes:.magenta}} ({{elapsed}})",
|
|
||||||
style("/").magenta(),
|
|
||||||
))
|
))
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
ProgressBar::new(uploading.size())
|
|
||||||
.with_style(ps)
|
|
||||||
.with_message(uploading.file_name().to_owned())
|
|
||||||
.with_position(uploading.offset())
|
|
||||||
};
|
|
||||||
pb.tick();
|
|
||||||
|
|
||||||
pb
|
|
||||||
});
|
|
||||||
self.progress.as_ref().unwrap()
|
|
||||||
}
|
}
|
||||||
};
|
|
||||||
pb.tick();
|
|
||||||
|
|
||||||
match uploading.upload_chunk(http, &self.alias, chunk_size) {
|
fn with_progressbar(&mut self, f: impl FnOnce(&ProgressBar), drop_bar: bool) {
|
||||||
ChunkState::Ok(upl) => {
|
let bar = &*self.progress.get_or_insert_with(new_progressbar);
|
||||||
pb.set_position(upl.offset());
|
|
||||||
self.files.push_front(FileState::U(upl));
|
if let Some(upl) = self.inner.peek_uploading() {
|
||||||
Ok(Some(()))
|
if bar.length().is_none() {
|
||||||
|
bar.set_length(upl.get_size());
|
||||||
|
bar.set_message(upl.get_name().to_string());
|
||||||
|
bar.enable_steady_tick(Duration::from_millis(100));
|
||||||
}
|
}
|
||||||
ChunkState::Err(upl, e) => {
|
|
||||||
self.files.push_front(FileState::U(upl));
|
bar.set_position(upl.get_offset());
|
||||||
Err(e)
|
// BUG in `indicatif` crate?
|
||||||
|
// `set_position` does not force an immediate redraw like e.g. `inc_length`
|
||||||
}
|
}
|
||||||
ChunkState::Finished(path) => {
|
|
||||||
debug!("Finished {:?}!", path.display());
|
f(bar);
|
||||||
pb.finish();
|
|
||||||
|
if drop_bar {
|
||||||
self.progress = None;
|
self.progress = None;
|
||||||
self.share.notify(http, &self.alias).unwrap();
|
}
|
||||||
|
}
|
||||||
|
|
||||||
Ok(self.files.front().map(drop))
|
fn touch_progressbar(&mut self) {
|
||||||
|
self.with_progressbar(|_| (), false);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn drop_progressbar(&mut self, f: impl FnOnce(&ProgressBar)) {
|
||||||
|
self.with_progressbar(f, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn next_chunk<'t>(&mut self, buffer: &'t mut [u8]) -> crate::Result<Option<Chunk<'t>>> {
|
||||||
|
if self.inner.get_uploading(&self.http)?.is_none() {
|
||||||
|
return Ok(None);
|
||||||
|
}
|
||||||
|
|
||||||
|
self.touch_progressbar();
|
||||||
|
|
||||||
|
let uploading = self.inner.expect_uploading();
|
||||||
|
debug!("{uploading:?}");
|
||||||
|
|
||||||
|
let chunk = uploading.read(buffer)?;
|
||||||
|
debug!("{chunk:?}");
|
||||||
|
|
||||||
|
Ok(Some(chunk))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn upload_chunk(&mut self, buffer: &mut [u8]) -> crate::Result<bool> {
|
||||||
|
let Some(chunk) = self.next_chunk(buffer)? else {
|
||||||
|
self.inner
|
||||||
|
.share_notify(&self.http)
|
||||||
|
.unwrap_or_else(|e| warn!("Failed to notify the share: {e}"));
|
||||||
|
|
||||||
|
return Ok(true);
|
||||||
|
};
|
||||||
|
|
||||||
|
self.inner.file_patch(&self.http, &chunk)?;
|
||||||
|
|
||||||
|
self.touch_progressbar();
|
||||||
|
|
||||||
|
if let Some(path) = self.inner.check_eof() {
|
||||||
|
debug!("Finished {:?}!", path.display());
|
||||||
|
self.drop_progressbar(ProgressBar::finish);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(self.inner.peek_uploading().is_none() && self.inner.queue().is_empty())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
|
pub fn rewind_chunk(mut self) -> Option<Self> {
|
||||||
|
self.inner = self.inner.rewind_chunk()?;
|
||||||
|
|
||||||
|
Some(self)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn abort_upload(&mut self) {
|
||||||
|
self.inner.abort_upload();
|
||||||
|
self.drop_progressbar(ProgressBar::abandon);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn rebuild_share(self, args: &Cli) -> crate::Result<Self> {
|
||||||
|
Ok(Self::new(self.http, CacheFile::from_args(args, new_share)?))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn save(&self) -> io::Result<()> {
|
pub fn save(&self) -> io::Result<()> {
|
||||||
fs::create_dir_all(Self::cache_dir())?;
|
self.inner.save()
|
||||||
|
|
||||||
let json = serde_json::to_string_pretty(self).map_err(io::Error::other)?;
|
|
||||||
let mut file = fs::File::create(&self.file_name)?;
|
|
||||||
file.write_all(json.as_bytes())?;
|
|
||||||
|
|
||||||
trace!("updated {:?}", self.file_name.display());
|
|
||||||
Ok(())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn clear(self) -> io::Result<()> {
|
pub fn discard(self) -> io::Result<()> {
|
||||||
fs::remove_file(&self.file_name)?;
|
self.inner.discard()
|
||||||
|
}
|
||||||
|
|
||||||
trace!("removed {:?}", self.file_name.display());
|
pub fn clear_any(args: &Cli) {
|
||||||
Ok(())
|
CacheFile::clear_any(args);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
142
src/bin/shrupl.rs
Normal file
142
src/bin/shrupl.rs
Normal file
|
|
@ -0,0 +1,142 @@
|
||||||
|
use std::{
|
||||||
|
process,
|
||||||
|
sync::{
|
||||||
|
Arc,
|
||||||
|
atomic::{AtomicBool, Ordering},
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
use clap::Parser;
|
||||||
|
use console::{StyledObject, style};
|
||||||
|
use log::{info, trace};
|
||||||
|
|
||||||
|
use shrupl::{
|
||||||
|
AppState, Cli,
|
||||||
|
output::{self, Log, SHRUPL},
|
||||||
|
};
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
let args = Cli::parse();
|
||||||
|
|
||||||
|
env_logger::Builder::new()
|
||||||
|
.filter_module("shrupl", args.get_level_filter())
|
||||||
|
.parse_default_env()
|
||||||
|
.init();
|
||||||
|
|
||||||
|
info!("args: {args:#?}");
|
||||||
|
|
||||||
|
println!("{} to {}!", style("Welcome").magenta().bold(), *SHRUPL);
|
||||||
|
|
||||||
|
let resumed = AppState::try_resume(&args);
|
||||||
|
|
||||||
|
let check_ctrlc = {
|
||||||
|
let stop = Arc::new(AtomicBool::new(false));
|
||||||
|
let stop_ctrlc = stop.clone();
|
||||||
|
|
||||||
|
ctrlc::set_handler(move || {
|
||||||
|
stop_ctrlc.store(true, Ordering::SeqCst);
|
||||||
|
eprintln!("{} stopping as soon as possible!", *SHRUPL);
|
||||||
|
})
|
||||||
|
.expect("Error setting Ctrl-C handler");
|
||||||
|
|
||||||
|
move || {
|
||||||
|
if stop.load(Ordering::SeqCst) {
|
||||||
|
process::exit(255);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut state = resumed
|
||||||
|
.inspect_err(|e| {
|
||||||
|
AppState::clear_any(&args);
|
||||||
|
Log::handle(e);
|
||||||
|
info!("could not resume from hash {:?}: {e}", args.get_hash());
|
||||||
|
})
|
||||||
|
.ok()
|
||||||
|
.and_then(|state| output::prompt_continue().then_some(state))
|
||||||
|
.unwrap_or_else(|| match AppState::from_args(&args) {
|
||||||
|
Ok(state) => {
|
||||||
|
state.save().unwrap_or_else(|e| {
|
||||||
|
Log::warning(format_args!("Failed to save state: {e}"));
|
||||||
|
});
|
||||||
|
state
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
Log::handle(&e);
|
||||||
|
Log::error(format_args!("Failed to create state: {e}"));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
check_ctrlc();
|
||||||
|
|
||||||
|
info!("continuing with state: {state:#?}");
|
||||||
|
|
||||||
|
let fns_magenta = output::style_all(&args.file_names(), StyledObject::magenta).join(", ");
|
||||||
|
println!("{} is uploading: {fns_magenta}", *SHRUPL);
|
||||||
|
|
||||||
|
let mut buffer = vec![0; args.chunk_size * 1024 * 1024];
|
||||||
|
let mut tries = 0;
|
||||||
|
|
||||||
|
loop {
|
||||||
|
if !args.may_retry(tries) {
|
||||||
|
Log::error("Retry limit reached!");
|
||||||
|
}
|
||||||
|
|
||||||
|
match state.upload_chunk(&mut buffer) {
|
||||||
|
Err(e) => {
|
||||||
|
Log::handle(&e);
|
||||||
|
|
||||||
|
if let Some(p) = e.get_invalid_param() {
|
||||||
|
match p {
|
||||||
|
// Error 404 (File not found)
|
||||||
|
shrupl::Parameter::FileID(fid) => {
|
||||||
|
info!("retrying file {fid:?}");
|
||||||
|
|
||||||
|
state.abort_upload();
|
||||||
|
}
|
||||||
|
// Error 404 (Share not found)
|
||||||
|
shrupl::Parameter::ShareID(sid) => {
|
||||||
|
output::prompt_rebuild_share();
|
||||||
|
info!("rebuilding share {sid:?}");
|
||||||
|
|
||||||
|
// rebuild share
|
||||||
|
let Ok(s) = state.rebuild_share(&args) else {
|
||||||
|
Log::error("Failed to rebuild share!");
|
||||||
|
};
|
||||||
|
state = s;
|
||||||
|
}
|
||||||
|
p => Log::error(format_args!("Unexpected {p}!")),
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// retry chunk
|
||||||
|
let Some(s) = state.rewind_chunk() else {
|
||||||
|
Log::error("Failed to retry chunk!");
|
||||||
|
};
|
||||||
|
tries += 1;
|
||||||
|
|
||||||
|
info!("State rewound, retrying last chunk (tries: {tries})");
|
||||||
|
state = s;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(false) => {
|
||||||
|
trace!("chunk uploaded");
|
||||||
|
tries = 0;
|
||||||
|
}
|
||||||
|
Ok(true) => {
|
||||||
|
info!("all uploads done");
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
state.save().unwrap_or_else(|e| {
|
||||||
|
Log::warning(format_args!("Failed to save state: {e}"));
|
||||||
|
});
|
||||||
|
check_ctrlc();
|
||||||
|
}
|
||||||
|
|
||||||
|
state.discard().unwrap_or_else(|e| {
|
||||||
|
Log::warning(format_args!("Failed to remove state: {e}"));
|
||||||
|
});
|
||||||
|
|
||||||
|
println!("{} finished {}", *SHRUPL, style("successfully!").green());
|
||||||
|
}
|
||||||
223
src/cachefile.rs
Normal file
223
src/cachefile.rs
Normal file
|
|
@ -0,0 +1,223 @@
|
||||||
|
use std::{
|
||||||
|
collections::VecDeque,
|
||||||
|
fs,
|
||||||
|
io::{self, Write},
|
||||||
|
path::{Path, PathBuf},
|
||||||
|
time::Duration,
|
||||||
|
};
|
||||||
|
|
||||||
|
use indicatif::ProgressBar;
|
||||||
|
use log::{info, trace};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
cli::Cli,
|
||||||
|
file::{self, Chunk, FileTrait},
|
||||||
|
output::new_progressbar,
|
||||||
|
sharry::{AliasID, Client, ShareID, Uri},
|
||||||
|
};
|
||||||
|
|
||||||
|
#[derive(Serialize, Deserialize, Debug)]
|
||||||
|
pub struct CacheFile {
|
||||||
|
#[serde(skip)]
|
||||||
|
file_name: PathBuf,
|
||||||
|
|
||||||
|
uri: Uri,
|
||||||
|
alias_id: AliasID,
|
||||||
|
share_id: ShareID,
|
||||||
|
|
||||||
|
uploading: Option<file::Uploading>,
|
||||||
|
files: VecDeque<file::Checked>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl CacheFile {
|
||||||
|
fn cache_dir() -> PathBuf {
|
||||||
|
let dir_name = dirs_next::cache_dir()
|
||||||
|
.expect("could not determine cache directory")
|
||||||
|
.join("shrupl");
|
||||||
|
|
||||||
|
trace!("cachedir: {:?}", dir_name.display());
|
||||||
|
dir_name
|
||||||
|
}
|
||||||
|
|
||||||
|
fn cache_file(args: &Cli) -> PathBuf {
|
||||||
|
let file_name = Self::cache_dir().join(format!("{}.json", args.get_hash()));
|
||||||
|
|
||||||
|
trace!("cachefile: {:?}", file_name.display());
|
||||||
|
file_name
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn try_resume(args: &Cli) -> crate::Result<Self> {
|
||||||
|
let file_name = Self::cache_file(args);
|
||||||
|
|
||||||
|
let state: Self = {
|
||||||
|
let file = fs::File::open(&file_name)?;
|
||||||
|
let reader = io::BufReader::new(file);
|
||||||
|
serde_json::from_reader(reader).map_err(io::Error::other)?
|
||||||
|
};
|
||||||
|
|
||||||
|
if args.should_hash() {
|
||||||
|
fn check_hash(file: &impl FileTrait, bar: &ProgressBar) -> crate::Result<()> {
|
||||||
|
bar.set_message(format!("checking {:?}", file.get_name()));
|
||||||
|
file.check_hash(|bytes| bar.inc(bytes))
|
||||||
|
}
|
||||||
|
|
||||||
|
info!("checking files in {state:?}");
|
||||||
|
|
||||||
|
// BOOKMARK assumption: total file size < 2 EiB
|
||||||
|
let total_size = {
|
||||||
|
let upl_size = if let Some(upl) = state.peek_uploading() {
|
||||||
|
upl.get_size()
|
||||||
|
} else {
|
||||||
|
0
|
||||||
|
};
|
||||||
|
upl_size + state.queue().iter().map(|&f| f.get_size()).sum::<u64>()
|
||||||
|
};
|
||||||
|
|
||||||
|
let bar = new_progressbar();
|
||||||
|
bar.set_length(total_size);
|
||||||
|
bar.enable_steady_tick(Duration::from_millis(50));
|
||||||
|
|
||||||
|
if let Some(upl) = state.peek_uploading() {
|
||||||
|
check_hash(upl, &bar)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
for chk in state.queue() {
|
||||||
|
check_hash(chk, &bar)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
bar.finish_with_message("finished checking files");
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(Self { file_name, ..state })
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_args(
|
||||||
|
args: &Cli,
|
||||||
|
new_share: impl FnOnce(&Cli) -> crate::Result<ShareID>,
|
||||||
|
) -> crate::Result<Self> {
|
||||||
|
let mut files = args.files.clone();
|
||||||
|
|
||||||
|
if args.should_hash() {
|
||||||
|
info!("hashing files {files:?}");
|
||||||
|
|
||||||
|
let bar = new_progressbar();
|
||||||
|
// BOOKMARK assumption: total file size < 2 EiB
|
||||||
|
bar.set_length(files.iter().map(FileTrait::get_size).sum());
|
||||||
|
bar.enable_steady_tick(Duration::from_millis(50));
|
||||||
|
|
||||||
|
for chk in &mut files {
|
||||||
|
bar.set_message(format!("hashing {:?}", chk.get_name()));
|
||||||
|
chk.hash(|bytes| bar.inc(bytes))?;
|
||||||
|
}
|
||||||
|
|
||||||
|
bar.finish_with_message("finished hashing files");
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(Self {
|
||||||
|
file_name: Self::cache_file(args),
|
||||||
|
uri: args.get_uri(),
|
||||||
|
alias_id: args.alias.clone(),
|
||||||
|
share_id: new_share(args)?,
|
||||||
|
uploading: None,
|
||||||
|
files: files.into(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn queue(&self) -> Vec<&file::Checked> {
|
||||||
|
self.files.iter().collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_uploading(
|
||||||
|
&mut self,
|
||||||
|
client: &impl Client,
|
||||||
|
) -> crate::Result<Option<&mut file::Uploading>> {
|
||||||
|
if self.uploading.is_some() {
|
||||||
|
Ok(self.uploading.as_mut())
|
||||||
|
} else if let Some(chk) = self.files.pop_front() {
|
||||||
|
let upl = chk.start_upload(client, &self.uri, &self.alias_id, &self.share_id)?;
|
||||||
|
self.uploading.replace(upl);
|
||||||
|
|
||||||
|
Ok(self.uploading.as_mut())
|
||||||
|
} else {
|
||||||
|
Ok(None)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn expect_uploading(&mut self) -> &mut file::Uploading {
|
||||||
|
self.uploading
|
||||||
|
.as_mut()
|
||||||
|
.expect("expect_uploading called while not uploading")
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn peek_uploading(&self) -> Option<&file::Uploading> {
|
||||||
|
self.uploading.as_ref()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn check_eof(&mut self) -> Option<PathBuf> {
|
||||||
|
if let Some(upl) = self.uploading.take() {
|
||||||
|
match upl.check_eof() {
|
||||||
|
Ok(upl) => self.uploading = Some(upl),
|
||||||
|
Err(p) => return Some(p),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn rewind_chunk(mut self) -> Option<Self> {
|
||||||
|
let upl = self
|
||||||
|
.uploading
|
||||||
|
.take()
|
||||||
|
.expect("rewind_chunk called while not uploading");
|
||||||
|
|
||||||
|
self.uploading = Some(upl.rewind()?);
|
||||||
|
Some(self)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn abort_upload(&mut self) {
|
||||||
|
let upl = self
|
||||||
|
.uploading
|
||||||
|
.take()
|
||||||
|
.expect("abort_upload called while not uploading");
|
||||||
|
|
||||||
|
self.files.push_front(upl.abort());
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn share_notify(&self, client: &impl Client) -> crate::Result<()> {
|
||||||
|
client.share_notify(&self.uri, &self.alias_id, &self.share_id)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn file_patch(&self, client: &impl Client, chunk: &Chunk) -> crate::Result<()> {
|
||||||
|
client.file_patch(&self.uri, &self.alias_id, &self.share_id, chunk)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn save(&self) -> io::Result<()> {
|
||||||
|
let cache_dir = self.file_name.parent().ok_or_else(|| {
|
||||||
|
io::Error::other(format!("orphan file {:?}", self.file_name.display()))
|
||||||
|
})?;
|
||||||
|
fs::create_dir_all(cache_dir)?;
|
||||||
|
|
||||||
|
let json = serde_json::to_string_pretty(self).map_err(io::Error::other)?;
|
||||||
|
let mut file = fs::File::create(&self.file_name)?;
|
||||||
|
file.write_all(json.as_bytes())?;
|
||||||
|
|
||||||
|
trace!("updated {:?}", self.file_name.display());
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn remove(path: &Path) -> io::Result<()> {
|
||||||
|
fs::remove_file(path)?;
|
||||||
|
|
||||||
|
trace!("removed {:?}", path.display());
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn clear_any(args: &Cli) {
|
||||||
|
let _ = Self::remove(&Self::cache_file(args));
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn discard(self) -> io::Result<()> {
|
||||||
|
Self::remove(&self.file_name)
|
||||||
|
}
|
||||||
|
}
|
||||||
134
src/cli.rs
134
src/cli.rs
|
|
@ -1,13 +1,16 @@
|
||||||
use std::{
|
use std::{convert::Infallible, fmt, io, time::Duration};
|
||||||
hash::{DefaultHasher, Hash, Hasher},
|
|
||||||
time::Duration,
|
use base64::{Engine, prelude::BASE64_URL_SAFE_NO_PAD as BASE64URL};
|
||||||
|
use blake2b_simd::Params as Blake2b;
|
||||||
|
use clap::{Parser, builder::TypedValueParser, value_parser};
|
||||||
|
use log::LevelFilter;
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
file::{Checked, FileTrait},
|
||||||
|
sharry::{AliasID, Uri, json::NewShareRequest},
|
||||||
};
|
};
|
||||||
|
|
||||||
use clap::{Parser, builder::PossibleValuesParser};
|
#[derive(Parser)]
|
||||||
|
|
||||||
use super::sharry::{Alias, FileChecked, NewShareRequest, Uri};
|
|
||||||
|
|
||||||
#[derive(Parser, Debug, Hash)]
|
|
||||||
#[command(version, about, long_about = None)]
|
#[command(version, about, long_about = None)]
|
||||||
pub struct Cli {
|
pub struct Cli {
|
||||||
/// Timeout in seconds for HTTP actions (set 0 or invalid to disable)
|
/// Timeout in seconds for HTTP actions (set 0 or invalid to disable)
|
||||||
|
|
@ -18,17 +21,13 @@ pub struct Cli {
|
||||||
)]
|
)]
|
||||||
timeout: Duration,
|
timeout: Duration,
|
||||||
|
|
||||||
/// Protocol for Sharry instance
|
/// Number of times actions are retried
|
||||||
#[arg(
|
#[arg(short, long, default_value_t = 5, value_name = "N")]
|
||||||
short, long,
|
retry_limit: u32,
|
||||||
default_value = "https", value_name = "VARIANT",
|
|
||||||
value_parser = PossibleValuesParser::new(["http", "https"]),
|
|
||||||
)]
|
|
||||||
protocol: String,
|
|
||||||
|
|
||||||
/// Name of the new share
|
/// Name of the new share
|
||||||
#[arg(short, long, default_value = "ShrUpl Upload", value_name = "TEXT")]
|
#[arg(short, long, default_value = "ShrUpl Upload", value_name = "TEXT")]
|
||||||
name: String,
|
share_name: String,
|
||||||
|
|
||||||
/// Description of the new share
|
/// Description of the new share
|
||||||
#[arg(short, long, value_name = "TEXT")]
|
#[arg(short, long, value_name = "TEXT")]
|
||||||
|
|
@ -39,52 +38,121 @@ pub struct Cli {
|
||||||
max_views: u32,
|
max_views: u32,
|
||||||
|
|
||||||
/// Chunk size for uploading, in MiB
|
/// Chunk size for uploading, in MiB
|
||||||
#[arg(short, long, default_value_t = 10, value_name = "N")]
|
#[arg(
|
||||||
|
short, long,
|
||||||
|
default_value_t = 4, value_name = "M",
|
||||||
|
value_parser = value_parser!(u32).range(1..).map(|s| s as usize),
|
||||||
|
)]
|
||||||
pub chunk_size: usize,
|
pub chunk_size: usize,
|
||||||
|
|
||||||
|
/// Don't hash files before uploading
|
||||||
|
#[arg(short, long)]
|
||||||
|
no_hash: bool,
|
||||||
|
|
||||||
|
/// Increase output verbosity
|
||||||
|
#[arg(short, long, action = clap::ArgAction::Count)]
|
||||||
|
verbose: u8,
|
||||||
|
|
||||||
/// Base URL for Sharry Instance
|
/// Base URL for Sharry Instance
|
||||||
url: String,
|
url: String,
|
||||||
|
|
||||||
/// ID of a public alias to use
|
/// ID of a public alias to use
|
||||||
alias: String,
|
pub alias: AliasID,
|
||||||
|
|
||||||
/// Files to upload to the new share
|
/// Files to upload to the new share
|
||||||
#[arg(value_name = "FILE", required = true, value_parser = parse_sharry_file)]
|
#[arg(value_name = "FILE", required = true, value_parser = parse_sharry_file)]
|
||||||
pub files: Vec<FileChecked>,
|
pub files: Vec<Checked>,
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_seconds(data: &str) -> Result<Duration, String> {
|
impl fmt::Debug for Cli {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
f.debug_struct("Cli")
|
||||||
|
.field("uri", &self.get_uri())
|
||||||
|
.field("retry_limit", &self.retry_limit)
|
||||||
|
.field("alias", &self.alias)
|
||||||
|
.field("timeout", &self.get_timeout())
|
||||||
|
.field("chunk_size", &self.chunk_size)
|
||||||
|
.field("share_request", &self.get_share_request())
|
||||||
|
.field("files", &self.files)
|
||||||
|
.field("level_filter", &self.get_level_filter())
|
||||||
|
.field("hash", &self.get_hash())
|
||||||
|
.finish_non_exhaustive()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn parse_seconds(data: &str) -> Result<Duration, Infallible> {
|
||||||
data.parse().or(Ok(0)).map(Duration::from_secs)
|
data.parse().or(Ok(0)).map(Duration::from_secs)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_sharry_file(data: &str) -> Result<FileChecked, String> {
|
fn parse_sharry_file(data: &str) -> io::Result<Checked> {
|
||||||
FileChecked::new(data).map_err(|e| e.to_string())
|
Checked::new(data)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn sorted<T>(values: &[T]) -> Vec<&T>
|
||||||
|
where
|
||||||
|
T: Ord,
|
||||||
|
{
|
||||||
|
let mut refs: Vec<_> = values.iter().collect();
|
||||||
|
refs.sort_unstable();
|
||||||
|
|
||||||
|
refs
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Cli {
|
impl Cli {
|
||||||
|
#[must_use]
|
||||||
pub fn get_timeout(&self) -> Option<Duration> {
|
pub fn get_timeout(&self) -> Option<Duration> {
|
||||||
(!self.timeout.is_zero()).then_some(self.timeout)
|
(!self.timeout.is_zero()).then_some(self.timeout)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_alias(&self) -> Alias {
|
#[must_use]
|
||||||
Alias::new(Uri::with_protocol(&self.protocol, &self.url), &self.alias)
|
pub fn get_uri(&self) -> Uri {
|
||||||
|
Uri::from(self.url.clone())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
|
pub fn may_retry(&self, tries: u32) -> bool {
|
||||||
|
match self.retry_limit {
|
||||||
|
0 => true,
|
||||||
|
limit => tries < limit,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
|
pub fn should_hash(&self) -> bool {
|
||||||
|
!self.no_hash
|
||||||
|
}
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
pub fn get_share_request(&self) -> NewShareRequest {
|
pub fn get_share_request(&self) -> NewShareRequest {
|
||||||
NewShareRequest::new(&self.name, self.description.as_ref(), self.max_views)
|
NewShareRequest::new(&self.share_name, self.description.as_ref(), self.max_views)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
|
pub fn get_level_filter(&self) -> LevelFilter {
|
||||||
|
match self.verbose {
|
||||||
|
0 => LevelFilter::Error,
|
||||||
|
1 => LevelFilter::Warn,
|
||||||
|
2 => LevelFilter::Info,
|
||||||
|
3 => LevelFilter::Debug,
|
||||||
|
_ => LevelFilter::Trace,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn file_names(&self) -> Vec<&str> {
|
||||||
|
self.files.iter().map(FileTrait::get_name).collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
pub fn get_hash(&self) -> String {
|
pub fn get_hash(&self) -> String {
|
||||||
let file_refs = {
|
let mut hasher = Blake2b::new().hash_length(16).to_state();
|
||||||
let mut refs: Vec<_> = self.files.iter().collect();
|
|
||||||
refs.sort_unstable();
|
|
||||||
|
|
||||||
refs
|
hasher.update(self.get_uri().as_ref());
|
||||||
};
|
hasher.update(self.alias.as_ref());
|
||||||
|
|
||||||
let mut hasher = DefaultHasher::new();
|
for chk in sorted(&self.files) {
|
||||||
(self.get_alias(), file_refs).hash(&mut hasher);
|
hasher.update(chk.as_ref());
|
||||||
|
}
|
||||||
|
|
||||||
format!("{:x}", hasher.finish())
|
BASE64URL.encode(hasher.finalize())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
143
src/error.rs
Normal file
143
src/error.rs
Normal file
|
|
@ -0,0 +1,143 @@
|
||||||
|
use std::fmt;
|
||||||
|
|
||||||
|
use crate::sharry;
|
||||||
|
|
||||||
|
#[derive(Debug, thiserror::Error)]
|
||||||
|
pub enum Parameter {
|
||||||
|
#[error("given URI {0:?}")]
|
||||||
|
Uri(sharry::Uri),
|
||||||
|
|
||||||
|
#[error("given Alias ID {0:?}")]
|
||||||
|
AliasID(sharry::AliasID),
|
||||||
|
|
||||||
|
#[error("stored Share ID {0:?}")]
|
||||||
|
ShareID(sharry::ShareID),
|
||||||
|
|
||||||
|
#[error("stored {0:?}")]
|
||||||
|
FileID(sharry::FileID),
|
||||||
|
}
|
||||||
|
|
||||||
|
// a helper to generate all the `From<T> for Parameter` impls
|
||||||
|
macro_rules! impl_param_from {
|
||||||
|
// $typ: the source type; $var: the enum‐variant name
|
||||||
|
( $( $typ:path => $var:ident ),* $(,)? ) => {
|
||||||
|
$(
|
||||||
|
impl From<$typ> for Parameter {
|
||||||
|
fn from(value: $typ) -> Self {
|
||||||
|
Self::$var(value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)*
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
impl_param_from! {
|
||||||
|
sharry::Uri => Uri,
|
||||||
|
sharry::AliasID => AliasID,
|
||||||
|
sharry::ShareID => ShareID,
|
||||||
|
sharry::FileID => FileID,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Parameter {
|
||||||
|
fn is_fatal(&self) -> bool {
|
||||||
|
matches!(self, Self::Uri(_) | Self::AliasID(_))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, thiserror::Error)]
|
||||||
|
pub enum Error {
|
||||||
|
#[error(transparent)]
|
||||||
|
StdIo(#[from] std::io::Error),
|
||||||
|
|
||||||
|
#[error("Response error: {0}")]
|
||||||
|
Response(String),
|
||||||
|
|
||||||
|
#[error("Invalid {0}")]
|
||||||
|
InvalidParameter(Parameter),
|
||||||
|
|
||||||
|
#[error("Mismatch, expected {expected:?} but got {actual:?}")]
|
||||||
|
Mismatch { expected: String, actual: String },
|
||||||
|
|
||||||
|
#[error("Unknown error: {0}")]
|
||||||
|
Unknown(String),
|
||||||
|
}
|
||||||
|
|
||||||
|
// a helper to generate all the `From<T> for Error` impls
|
||||||
|
macro_rules! impl_error_from {
|
||||||
|
// $typ: the source type
|
||||||
|
( $( $typ:path ),* $(,)? ) => {
|
||||||
|
$(
|
||||||
|
// // implement for values
|
||||||
|
// impl From<$typ> for Error {
|
||||||
|
// fn from(value: $typ) -> Self {
|
||||||
|
// Self::InvalidParameter(value.into())
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
|
||||||
|
// implement for references
|
||||||
|
impl From<&$typ> for Error {
|
||||||
|
fn from(value: &$typ) -> Self {
|
||||||
|
Self::InvalidParameter(value.clone().into())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)*
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
impl_error_from! {
|
||||||
|
sharry::Uri,
|
||||||
|
sharry::AliasID,
|
||||||
|
sharry::ShareID,
|
||||||
|
sharry::FileID,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::needless_pass_by_value)]
|
||||||
|
fn into_string(val: impl ToString) -> String {
|
||||||
|
val.to_string()
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Error {
|
||||||
|
pub fn res_status_check<T>(actual: T, expected: T) -> Result<()>
|
||||||
|
where
|
||||||
|
T: PartialEq + fmt::Display + Copy,
|
||||||
|
{
|
||||||
|
if actual == expected {
|
||||||
|
Ok(())
|
||||||
|
} else {
|
||||||
|
Err(Self::Response(format!(
|
||||||
|
"unexpected status: {actual} (expected {expected})"
|
||||||
|
)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn response(e: impl ToString) -> Self {
|
||||||
|
Self::Response(into_string(e))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn mismatch(expected: impl ToString, actual: impl ToString) -> Self {
|
||||||
|
Self::Mismatch {
|
||||||
|
expected: into_string(expected),
|
||||||
|
actual: into_string(actual),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
|
pub fn get_invalid_param(&self) -> Option<&Parameter> {
|
||||||
|
if let Self::InvalidParameter(p) = self {
|
||||||
|
Some(p)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
|
pub fn is_fatal(&self) -> bool {
|
||||||
|
match self {
|
||||||
|
Self::InvalidParameter(p) => p.is_fatal(),
|
||||||
|
Self::Mismatch { .. } | Self::Unknown(_) => true,
|
||||||
|
_ => false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub type Result<T> = std::result::Result<T, Error>;
|
||||||
101
src/file/checked.rs
Normal file
101
src/file/checked.rs
Normal file
|
|
@ -0,0 +1,101 @@
|
||||||
|
use std::{
|
||||||
|
fs, io,
|
||||||
|
path::{Path, PathBuf},
|
||||||
|
};
|
||||||
|
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
use crate::sharry;
|
||||||
|
|
||||||
|
use super::{FileTrait, Uploading};
|
||||||
|
|
||||||
|
/// Description of an existing, regular file
|
||||||
|
///
|
||||||
|
/// - impl Clone for `clap` compatibility
|
||||||
|
/// - impl serde for appstate caching
|
||||||
|
/// - impl PartialEq..Ord to handle multiple files given
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, PartialOrd, Ord)]
|
||||||
|
pub struct Checked {
|
||||||
|
/// canonical path to a regular file
|
||||||
|
pub(super) path: PathBuf,
|
||||||
|
/// size of that file
|
||||||
|
pub(super) size: u64,
|
||||||
|
/// hash of that file
|
||||||
|
pub(super) hash: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl AsRef<[u8]> for Checked {
|
||||||
|
fn as_ref(&self) -> &[u8] {
|
||||||
|
self.path.as_os_str().as_encoded_bytes()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Checked {
|
||||||
|
/// create a new checked file from some path reference
|
||||||
|
///
|
||||||
|
/// # Errors
|
||||||
|
///
|
||||||
|
/// - from `fs::metadata(path)` or `fs::canonicalize`
|
||||||
|
/// - given path does not correspond to a regular file
|
||||||
|
pub fn new(value: impl AsRef<Path>) -> io::Result<Self> {
|
||||||
|
let meta = fs::metadata(&value)?;
|
||||||
|
if meta.is_file() {
|
||||||
|
Ok(Self {
|
||||||
|
path: fs::canonicalize(&value)?,
|
||||||
|
size: meta.len(),
|
||||||
|
hash: None,
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
Err(io::Error::new(
|
||||||
|
io::ErrorKind::InvalidInput,
|
||||||
|
"Not a regular file",
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn hash(&mut self, f: impl Fn(u64)) -> crate::Result<()> {
|
||||||
|
if self.hash.is_some() {
|
||||||
|
return Err(crate::Error::mismatch("unhashed file", self.path.display()));
|
||||||
|
}
|
||||||
|
|
||||||
|
self.hash = Some(super::compute_file_hash(&self.path, self.size, f)?);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// start uploading this file
|
||||||
|
///
|
||||||
|
/// - tries to create a new entry in a share
|
||||||
|
/// - expects endpoint like `{base_uri}/alias/upload/{share_id}/files/tus`
|
||||||
|
/// - consumes `self` into a `file::Uploading` struct
|
||||||
|
///
|
||||||
|
/// # Errors
|
||||||
|
///
|
||||||
|
/// TODO documentation after `ClientError` rework
|
||||||
|
pub fn start_upload(
|
||||||
|
self,
|
||||||
|
client: &impl sharry::Client,
|
||||||
|
uri: &sharry::Uri,
|
||||||
|
alias_id: &sharry::AliasID,
|
||||||
|
share_id: &sharry::ShareID,
|
||||||
|
) -> crate::Result<Uploading> {
|
||||||
|
let file_id = client.file_create(uri, alias_id, share_id, &self)?;
|
||||||
|
|
||||||
|
Ok(Uploading::new(self.path, self.size, self.hash, file_id))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FileTrait for Checked {
|
||||||
|
fn get_name(&self) -> &str {
|
||||||
|
<Self as FileTrait>::extract_file_name(&self.path)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// get the file's size
|
||||||
|
fn get_size(&self) -> u64 {
|
||||||
|
self.size
|
||||||
|
}
|
||||||
|
|
||||||
|
fn check_hash(&self, on_progress: impl Fn(u64)) -> crate::Result<()> {
|
||||||
|
super::check_file_hash(&self.path, self.size, self.hash.as_ref(), on_progress)
|
||||||
|
}
|
||||||
|
}
|
||||||
54
src/file/chunk.rs
Normal file
54
src/file/chunk.rs
Normal file
|
|
@ -0,0 +1,54 @@
|
||||||
|
use std::fmt;
|
||||||
|
|
||||||
|
use crate::sharry;
|
||||||
|
|
||||||
|
pub struct Chunk<'t> {
|
||||||
|
file_id: sharry::FileID,
|
||||||
|
offset: u64,
|
||||||
|
data: &'t [u8],
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Debug for Chunk<'_> {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
f.debug_struct("Chunk")
|
||||||
|
.field("file_id", &self.file_id)
|
||||||
|
.field("offset", &self.offset)
|
||||||
|
.field("data.len()", &self.data.len())
|
||||||
|
.finish_non_exhaustive()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'t> Chunk<'t> {
|
||||||
|
pub fn new(file_id: sharry::FileID, offset: u64, data: &'t [u8]) -> Self {
|
||||||
|
Self {
|
||||||
|
file_id,
|
||||||
|
offset,
|
||||||
|
data,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_file_id(&self) -> &sharry::FileID {
|
||||||
|
&self.file_id
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_offset(&self) -> u64 {
|
||||||
|
self.offset
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_data(&self) -> &[u8] {
|
||||||
|
self.data
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_length(&self) -> u64 {
|
||||||
|
let len = self.data.len();
|
||||||
|
|
||||||
|
// BOOKMARK this might **panic** on platforms where `usize` has more than 64 bit.
|
||||||
|
// Also, you've allocated more than 2 EiB ... in ONE chunk.
|
||||||
|
// Whoa! Maybe just chill?
|
||||||
|
u64::try_from(len).unwrap_or_else(|e| panic!("usize={len} did not fit into u64: {e}"))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_behind(&self) -> u64 {
|
||||||
|
self.offset + self.get_length()
|
||||||
|
}
|
||||||
|
}
|
||||||
84
src/file/mod.rs
Normal file
84
src/file/mod.rs
Normal file
|
|
@ -0,0 +1,84 @@
|
||||||
|
mod checked;
|
||||||
|
mod chunk;
|
||||||
|
mod uploading;
|
||||||
|
|
||||||
|
use std::{ffi::OsStr, fs, io::Read, path::Path};
|
||||||
|
|
||||||
|
use base64::{Engine, prelude::BASE64_STANDARD_NO_PAD as BASE64};
|
||||||
|
use blake2b_simd::Params as Blake2b;
|
||||||
|
|
||||||
|
pub use checked::Checked;
|
||||||
|
pub use chunk::Chunk;
|
||||||
|
use log::{debug, warn};
|
||||||
|
pub use uploading::Uploading;
|
||||||
|
|
||||||
|
fn compute_file_hash(path: &Path, size: u64, on_progress: impl Fn(u64)) -> crate::Result<String> {
|
||||||
|
let mut file = fs::File::open(path)?;
|
||||||
|
let mut hasher = Blake2b::new().hash_length(64).to_state();
|
||||||
|
|
||||||
|
let mut buf = vec![0u8; 4 * 1024 * 1024];
|
||||||
|
let mut bytes_read = 0;
|
||||||
|
|
||||||
|
loop {
|
||||||
|
let n = file.read(&mut buf)?;
|
||||||
|
if n == 0 {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
hasher.update(&buf[..n]);
|
||||||
|
|
||||||
|
bytes_read += n as u64;
|
||||||
|
on_progress(n as u64);
|
||||||
|
}
|
||||||
|
|
||||||
|
if bytes_read != size {
|
||||||
|
return Err(crate::Error::mismatch(size, bytes_read));
|
||||||
|
}
|
||||||
|
|
||||||
|
let result = BASE64.encode(hasher.finalize());
|
||||||
|
debug!("hashed {:?}: {result:?}", path.display());
|
||||||
|
Ok(result)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn check_file_hash(
|
||||||
|
path: &Path,
|
||||||
|
size: u64,
|
||||||
|
hash: Option<&String>,
|
||||||
|
on_progress: impl Fn(u64),
|
||||||
|
) -> crate::Result<()> {
|
||||||
|
let Some(expected) = hash else {
|
||||||
|
return Err(crate::Error::mismatch("hash", path.display()));
|
||||||
|
};
|
||||||
|
|
||||||
|
let actual = &compute_file_hash(path, size, on_progress)?;
|
||||||
|
|
||||||
|
if expected == actual {
|
||||||
|
debug!("hash matches {expected:?}");
|
||||||
|
Ok(())
|
||||||
|
} else {
|
||||||
|
warn!("hash mismatch for file {:?}", path.display());
|
||||||
|
Err(crate::Error::mismatch(expected, actual))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub trait FileTrait {
|
||||||
|
/// extract the filename part of a `Path` reference
|
||||||
|
///
|
||||||
|
/// # Panics
|
||||||
|
///
|
||||||
|
/// Expects `path::Path::file_name` and `ffi::OsStr::to_str` to succeed on the given path
|
||||||
|
fn extract_file_name(p: &Path) -> &str {
|
||||||
|
p.file_name()
|
||||||
|
.and_then(OsStr::to_str)
|
||||||
|
.expect("bad file name")
|
||||||
|
}
|
||||||
|
|
||||||
|
/// get a reference to the file's name
|
||||||
|
///
|
||||||
|
/// Uses `file::FileTrait::extract_file_name`, which may **panic**!
|
||||||
|
fn get_name(&self) -> &str;
|
||||||
|
|
||||||
|
/// get the file's size
|
||||||
|
fn get_size(&self) -> u64;
|
||||||
|
|
||||||
|
fn check_hash(&self, on_progress: impl Fn(u64)) -> crate::Result<()>;
|
||||||
|
}
|
||||||
109
src/file/uploading.rs
Normal file
109
src/file/uploading.rs
Normal file
|
|
@ -0,0 +1,109 @@
|
||||||
|
use std::{
|
||||||
|
fs,
|
||||||
|
io::{self, Read, Seek, SeekFrom},
|
||||||
|
path::PathBuf,
|
||||||
|
};
|
||||||
|
|
||||||
|
use log::warn;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
use crate::sharry;
|
||||||
|
|
||||||
|
use super::{Checked, Chunk, FileTrait};
|
||||||
|
|
||||||
|
#[derive(Serialize, Deserialize, Debug)]
|
||||||
|
pub struct Uploading {
|
||||||
|
/// canonical path to a regular file
|
||||||
|
path: PathBuf,
|
||||||
|
/// size of that file
|
||||||
|
size: u64,
|
||||||
|
/// hash of that file
|
||||||
|
hash: Option<String>,
|
||||||
|
file_id: sharry::FileID,
|
||||||
|
#[serde(skip)]
|
||||||
|
last_offset: Option<u64>,
|
||||||
|
offset: u64,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Uploading {
|
||||||
|
pub(super) fn new(
|
||||||
|
path: PathBuf,
|
||||||
|
size: u64,
|
||||||
|
hash: Option<String>,
|
||||||
|
file_id: sharry::FileID,
|
||||||
|
) -> Self {
|
||||||
|
Self {
|
||||||
|
path,
|
||||||
|
size,
|
||||||
|
hash,
|
||||||
|
file_id,
|
||||||
|
last_offset: None,
|
||||||
|
offset: 0,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_offset(&self) -> u64 {
|
||||||
|
self.offset
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn rewind(mut self) -> Option<Self> {
|
||||||
|
if let Some(last_offset) = self.last_offset {
|
||||||
|
self.last_offset = None;
|
||||||
|
self.offset = last_offset;
|
||||||
|
Some(self)
|
||||||
|
} else {
|
||||||
|
warn!("attempted to rewind twice");
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn read<'t>(&mut self, buf: &'t mut [u8]) -> io::Result<Chunk<'t>> {
|
||||||
|
let mut f = fs::File::open(&self.path)?;
|
||||||
|
|
||||||
|
f.seek(SeekFrom::Start(self.offset))?;
|
||||||
|
let read_len = f.read(buf)?;
|
||||||
|
|
||||||
|
if read_len == 0 {
|
||||||
|
return Err(io::Error::new(
|
||||||
|
io::ErrorKind::UnexpectedEof,
|
||||||
|
format!("could not read from file {:?}", self.path.display()),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
let chunk = Chunk::new(self.file_id.clone(), self.offset, &buf[..read_len]);
|
||||||
|
self.last_offset = Some(self.offset);
|
||||||
|
self.offset += chunk.get_length();
|
||||||
|
|
||||||
|
Ok(chunk)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn check_eof(self) -> Result<Self, PathBuf> {
|
||||||
|
if self.offset < self.size {
|
||||||
|
Ok(self)
|
||||||
|
} else {
|
||||||
|
Err(self.path)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn abort(self) -> Checked {
|
||||||
|
Checked {
|
||||||
|
path: self.path,
|
||||||
|
size: self.size,
|
||||||
|
hash: self.hash,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FileTrait for Uploading {
|
||||||
|
fn get_name(&self) -> &str {
|
||||||
|
<Self as FileTrait>::extract_file_name(&self.path)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_size(&self) -> u64 {
|
||||||
|
self.size
|
||||||
|
}
|
||||||
|
|
||||||
|
fn check_hash(&self, on_progress: impl Fn(u64)) -> crate::Result<()> {
|
||||||
|
super::check_file_hash(&self.path, self.size, self.hash.as_ref(), on_progress)
|
||||||
|
}
|
||||||
|
}
|
||||||
16
src/lib.rs
Normal file
16
src/lib.rs
Normal file
|
|
@ -0,0 +1,16 @@
|
||||||
|
// TODO fix with documentation
|
||||||
|
#![allow(clippy::missing_errors_doc)]
|
||||||
|
#![allow(clippy::missing_panics_doc)]
|
||||||
|
|
||||||
|
mod appstate;
|
||||||
|
mod cachefile;
|
||||||
|
mod cli;
|
||||||
|
mod error;
|
||||||
|
mod file;
|
||||||
|
pub mod output;
|
||||||
|
mod sharry;
|
||||||
|
mod ureq_client;
|
||||||
|
|
||||||
|
pub use appstate::AppState;
|
||||||
|
pub use cli::Cli;
|
||||||
|
pub use error::{Error, Parameter, Result};
|
||||||
110
src/main.rs
110
src/main.rs
|
|
@ -1,110 +0,0 @@
|
||||||
mod appstate;
|
|
||||||
mod cli;
|
|
||||||
mod sharry;
|
|
||||||
|
|
||||||
use std::{
|
|
||||||
process::exit,
|
|
||||||
sync::{
|
|
||||||
Arc,
|
|
||||||
atomic::{AtomicBool, Ordering},
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
use clap::Parser;
|
|
||||||
use console::style;
|
|
||||||
use dialoguer::{Confirm, theme::ColorfulTheme};
|
|
||||||
use log::{error, info};
|
|
||||||
use ureq::Agent;
|
|
||||||
|
|
||||||
use appstate::AppState;
|
|
||||||
use cli::Cli;
|
|
||||||
|
|
||||||
fn main() {
|
|
||||||
println!(
|
|
||||||
"{} to {}!",
|
|
||||||
style("Welcome").magenta().bold(),
|
|
||||||
style("ShrUpl").yellow().bold(),
|
|
||||||
);
|
|
||||||
|
|
||||||
let stop = Arc::new(AtomicBool::new(false));
|
|
||||||
|
|
||||||
let stop_ctrlc = stop.clone();
|
|
||||||
ctrlc::set_handler(move || {
|
|
||||||
stop_ctrlc.store(true, Ordering::SeqCst);
|
|
||||||
info!("stopping as soon as possible ...");
|
|
||||||
})
|
|
||||||
.expect("Error setting Ctrl-C handler");
|
|
||||||
|
|
||||||
env_logger::init();
|
|
||||||
|
|
||||||
let args = Cli::parse();
|
|
||||||
info!("args: {args:?}");
|
|
||||||
info!("timeout: {:?}", args.get_timeout());
|
|
||||||
|
|
||||||
let agent: Agent = Agent::config_builder()
|
|
||||||
.timeout_global(args.get_timeout())
|
|
||||||
.build()
|
|
||||||
.into();
|
|
||||||
|
|
||||||
let mut state = AppState::try_resume(&args)
|
|
||||||
.and_then(|state| {
|
|
||||||
Confirm::with_theme(&ColorfulTheme::default())
|
|
||||||
.with_prompt("Continue previously stopped operation?")
|
|
||||||
.default(true)
|
|
||||||
.interact()
|
|
||||||
.map_or(None, |b| b.then_some(state))
|
|
||||||
})
|
|
||||||
.unwrap_or_else(|| {
|
|
||||||
stop.load(Ordering::SeqCst).then(|| exit(0));
|
|
||||||
|
|
||||||
match AppState::from_args(&args, &agent) {
|
|
||||||
Ok(state) => {
|
|
||||||
state.save().unwrap();
|
|
||||||
state
|
|
||||||
}
|
|
||||||
Err(e) => {
|
|
||||||
if let Some(cause) = match e {
|
|
||||||
ureq::Error::StatusCode(403) => Some("Alias ID"),
|
|
||||||
ureq::Error::Io(_) => Some("URL"),
|
|
||||||
_ => None,
|
|
||||||
} {
|
|
||||||
info!("handling error: {e:?}");
|
|
||||||
println!(
|
|
||||||
"{} probably wrong: {} – {:?}",
|
|
||||||
style("Error!").red().bold(),
|
|
||||||
style(cause).cyan().italic(),
|
|
||||||
style(e.to_string()).yellow()
|
|
||||||
);
|
|
||||||
} else {
|
|
||||||
error!("unknown error: {e} – {e:?}");
|
|
||||||
println!("{}", style("Unknown Error!").red().bold());
|
|
||||||
}
|
|
||||||
|
|
||||||
exit(1);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
println!(
|
|
||||||
"{} uploading: {}",
|
|
||||||
style("ShrUpl").yellow().bold(),
|
|
||||||
style(state.file_names().join(", ")).magenta(),
|
|
||||||
);
|
|
||||||
|
|
||||||
info!("continuing with state: {state:?}");
|
|
||||||
|
|
||||||
loop {
|
|
||||||
match state.upload_chunk(&agent, args.chunk_size * 1024 * 1024) {
|
|
||||||
Err(e) => error!("error: {e:?}"),
|
|
||||||
Ok(None) => {
|
|
||||||
info!("all uploads done");
|
|
||||||
state.clear().unwrap();
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
_ => (),
|
|
||||||
}
|
|
||||||
|
|
||||||
state.save().unwrap();
|
|
||||||
stop.load(Ordering::SeqCst).then(|| exit(0));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
104
src/output.rs
Normal file
104
src/output.rs
Normal file
|
|
@ -0,0 +1,104 @@
|
||||||
|
use std::{fmt, process, sync::LazyLock};
|
||||||
|
|
||||||
|
use console::{StyledObject, style};
|
||||||
|
use dialoguer::{Confirm, Select, theme::ColorfulTheme};
|
||||||
|
use indicatif::{ProgressBar, ProgressStyle};
|
||||||
|
use log::{info, warn};
|
||||||
|
|
||||||
|
type StaticStyled<'t> = LazyLock<StyledObject<&'t str>>;
|
||||||
|
|
||||||
|
pub const SHRUPL: StaticStyled = LazyLock::new(|| style("ShrUpl").yellow().bold());
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
|
pub fn prompt_continue() -> bool {
|
||||||
|
let prompt = format!(
|
||||||
|
"This operation has previously been stopped. {}",
|
||||||
|
style("How to proceed?").cyan()
|
||||||
|
);
|
||||||
|
|
||||||
|
let choices = [
|
||||||
|
format!("Load and {}", style("continue operation").green().bold()),
|
||||||
|
format!("Start a {}", style("new operation").cyan().bold()),
|
||||||
|
format!("Quit {}", *SHRUPL),
|
||||||
|
];
|
||||||
|
|
||||||
|
let selection = Select::with_theme(&ColorfulTheme::default())
|
||||||
|
.with_prompt(prompt)
|
||||||
|
.default(0)
|
||||||
|
.items(&choices)
|
||||||
|
.interact()
|
||||||
|
.unwrap_or(2);
|
||||||
|
|
||||||
|
if selection == 2 {
|
||||||
|
process::exit(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
selection == 0
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn prompt_rebuild_share() {
|
||||||
|
let prompt = format!(
|
||||||
|
"Target Share cannot be accessed. {}",
|
||||||
|
style("Completely restart upload?").cyan()
|
||||||
|
);
|
||||||
|
|
||||||
|
let selection = Confirm::with_theme(&ColorfulTheme::default())
|
||||||
|
.with_prompt(prompt)
|
||||||
|
.default(true)
|
||||||
|
.interact()
|
||||||
|
.unwrap_or(false);
|
||||||
|
|
||||||
|
if !selection {
|
||||||
|
process::exit(0);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn style_all<'t, F>(strs: &[&'t str], f: F) -> Vec<String>
|
||||||
|
where
|
||||||
|
F: Fn(StyledObject<&'t str>) -> StyledObject<&'t str>,
|
||||||
|
{
|
||||||
|
strs.iter().map(|&s| f(style(s)).to_string()).collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
|
pub fn new_progressbar() -> ProgressBar {
|
||||||
|
ProgressBar::no_length().with_style(
|
||||||
|
ProgressStyle::with_template(&format!(
|
||||||
|
concat!(
|
||||||
|
"{{bar:50.cyan/blue}} {{msg:.magenta}}: ",
|
||||||
|
"{{binary_bytes:.yellow}}{}{{binary_total_bytes:.yellow}} ",
|
||||||
|
"({{eta}})",
|
||||||
|
),
|
||||||
|
style("/").magenta(),
|
||||||
|
))
|
||||||
|
.expect("invalid style template"),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub enum Log {}
|
||||||
|
|
||||||
|
impl Log {
|
||||||
|
fn eprintln(kind: impl fmt::Display, msg: impl fmt::Display) {
|
||||||
|
eprintln!("{} {}: {}", *SHRUPL, kind, style(msg).cyan().italic(),);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn warning(msg: impl fmt::Display) {
|
||||||
|
Self::eprintln(style("Warning").magenta().bold(), msg);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn error(msg: impl fmt::Display) -> ! {
|
||||||
|
Self::eprintln(style("Error").red().bold(), msg);
|
||||||
|
process::exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn handle(e: &crate::Error) {
|
||||||
|
if e.is_fatal() {
|
||||||
|
// react to fatal error
|
||||||
|
warn!("fatal error: {e:?}");
|
||||||
|
Self::error(e);
|
||||||
|
}
|
||||||
|
|
||||||
|
// handle recoverable error
|
||||||
|
info!("recoverable error: {e:?}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -1,36 +0,0 @@
|
||||||
use std::fmt::{Debug, Display};
|
|
||||||
|
|
||||||
use log::debug;
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
use ureq::RequestBuilder;
|
|
||||||
|
|
||||||
use super::api::Uri;
|
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Debug, Hash)]
|
|
||||||
pub struct Alias {
|
|
||||||
pub(super) uri: Uri,
|
|
||||||
pub(super) id: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(super) trait SharryAlias {
|
|
||||||
fn sharry_header(self, alias: &Alias) -> Self;
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<B> SharryAlias for RequestBuilder<B> {
|
|
||||||
fn sharry_header(self, alias: &Alias) -> Self {
|
|
||||||
self.header("Sharry-Alias", &alias.id)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Alias {
|
|
||||||
pub fn new(uri: Uri, id: impl Into<String>) -> Self {
|
|
||||||
Self { uri, id: id.into() }
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(super) fn get_endpoint(&self, endpoint: impl Display + Debug) -> String {
|
|
||||||
let uri = format!("{}/{}", self.uri, endpoint);
|
|
||||||
debug!("endpoint uri: {uri:?}");
|
|
||||||
|
|
||||||
uri
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
@ -1,75 +0,0 @@
|
||||||
use std::{
|
|
||||||
ffi::OsStr,
|
|
||||||
fs,
|
|
||||||
io::{self, ErrorKind},
|
|
||||||
path::{Path, PathBuf},
|
|
||||||
};
|
|
||||||
|
|
||||||
use log::debug;
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
use ureq::http::StatusCode;
|
|
||||||
|
|
||||||
use super::{Alias, FileUploading, Share, SharryAlias};
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Hash, Serialize, Deserialize, PartialEq, Eq, PartialOrd, Ord)]
|
|
||||||
pub struct FileChecked {
|
|
||||||
path: PathBuf,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FileChecked {
|
|
||||||
pub fn new(value: impl AsRef<Path>) -> io::Result<Self> {
|
|
||||||
let meta = fs::metadata(&value)?;
|
|
||||||
if meta.is_file() {
|
|
||||||
Ok(Self {
|
|
||||||
path: fs::canonicalize(&value)?,
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
Err(io::Error::new(
|
|
||||||
ErrorKind::InvalidInput,
|
|
||||||
"Not a regular file",
|
|
||||||
))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn file_name(&self) -> &str {
|
|
||||||
self.path.file_name().unwrap().to_str().unwrap()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn start_upload(
|
|
||||||
self,
|
|
||||||
http: &ureq::Agent,
|
|
||||||
alias: &Alias,
|
|
||||||
share: &Share,
|
|
||||||
) -> io::Result<FileUploading> {
|
|
||||||
let size = usize::try_from(fs::metadata(&self.path)?.len()).map_err(io::Error::other)?;
|
|
||||||
|
|
||||||
let res = {
|
|
||||||
let endpoint = alias.get_endpoint(format!("alias/upload/{}/files/tus", share.id));
|
|
||||||
|
|
||||||
let name = (self.path.file_name().and_then(OsStr::to_str))
|
|
||||||
.ok_or_else(|| io::Error::new(ErrorKind::NotFound, "bad file name"))?
|
|
||||||
.to_string();
|
|
||||||
|
|
||||||
(http.post(endpoint))
|
|
||||||
.sharry_header(alias)
|
|
||||||
.header("Sharry-File-Name", &name)
|
|
||||||
.header("Upload-Length", size)
|
|
||||||
.send_empty()
|
|
||||||
.map_err(io::Error::other)?
|
|
||||||
};
|
|
||||||
|
|
||||||
if res.status() != StatusCode::CREATED {
|
|
||||||
return Err(io::Error::other("unexpected response status"));
|
|
||||||
}
|
|
||||||
|
|
||||||
let location = (res.headers().get("Location"))
|
|
||||||
.ok_or_else(|| io::Error::other("Location header not found"))?
|
|
||||||
.to_str()
|
|
||||||
.map_err(|_| io::Error::other("Location header invalid"))?
|
|
||||||
.to_string();
|
|
||||||
|
|
||||||
debug!("patch uri: {location}");
|
|
||||||
|
|
||||||
Ok(FileUploading::new(self.path, size, location))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
@ -1,7 +0,0 @@
|
||||||
mod checked;
|
|
||||||
mod uploading;
|
|
||||||
|
|
||||||
pub use checked::FileChecked;
|
|
||||||
pub use uploading::{ChunkState, FileUploading, UploadError};
|
|
||||||
|
|
||||||
use super::{Alias, Share, alias::SharryAlias};
|
|
||||||
|
|
@ -1,133 +0,0 @@
|
||||||
use std::{
|
|
||||||
fmt::{Debug, Display},
|
|
||||||
fs::File,
|
|
||||||
io::{self, Read, Seek, SeekFrom},
|
|
||||||
path::PathBuf,
|
|
||||||
};
|
|
||||||
|
|
||||||
use log::debug;
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
use ureq::http::{HeaderValue, StatusCode};
|
|
||||||
|
|
||||||
use super::{Alias, SharryAlias};
|
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Debug)]
|
|
||||||
pub struct FileUploading {
|
|
||||||
path: PathBuf,
|
|
||||||
size: usize,
|
|
||||||
uri: String,
|
|
||||||
offset: usize,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub enum UploadError {
|
|
||||||
FileIO(io::Error),
|
|
||||||
Request,
|
|
||||||
ResponseStatus,
|
|
||||||
ResponseOffset,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub enum ChunkState {
|
|
||||||
Ok(FileUploading),
|
|
||||||
Err(FileUploading, UploadError),
|
|
||||||
Finished(PathBuf),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Display for FileUploading {
|
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
|
||||||
write!(
|
|
||||||
f,
|
|
||||||
"Uploading ({:?}, {}, {})",
|
|
||||||
self.path.display(),
|
|
||||||
self.size,
|
|
||||||
self.offset
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FileUploading {
|
|
||||||
pub(super) fn new(path: PathBuf, size: usize, uri: String) -> Self {
|
|
||||||
Self {
|
|
||||||
path,
|
|
||||||
size,
|
|
||||||
uri,
|
|
||||||
offset: 0,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn read_chunk(&self, chunk_size: usize) -> io::Result<Vec<u8>> {
|
|
||||||
let offset = u64::try_from(self.offset).map_err(io::Error::other)?;
|
|
||||||
|
|
||||||
let mut f = File::open(&self.path)?;
|
|
||||||
f.seek(SeekFrom::Start(offset))?;
|
|
||||||
|
|
||||||
let mut bytes = vec![0; chunk_size];
|
|
||||||
let read_len = f.read(&mut bytes)?;
|
|
||||||
bytes.truncate(read_len);
|
|
||||||
|
|
||||||
Ok(bytes)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn file_name(&self) -> &str {
|
|
||||||
self.path.file_name().unwrap().to_str().unwrap()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn offset<T>(&self) -> T
|
|
||||||
where
|
|
||||||
T: TryFrom<usize>,
|
|
||||||
<T as TryFrom<usize>>::Error: Debug,
|
|
||||||
{
|
|
||||||
self.offset.try_into().unwrap()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn size<T>(&self) -> T
|
|
||||||
where
|
|
||||||
T: TryFrom<usize>,
|
|
||||||
<T as TryFrom<usize>>::Error: Debug,
|
|
||||||
{
|
|
||||||
self.size.try_into().unwrap()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn upload_chunk(
|
|
||||||
mut self,
|
|
||||||
http: &ureq::Agent,
|
|
||||||
alias: &Alias,
|
|
||||||
chunk_size: usize,
|
|
||||||
) -> ChunkState {
|
|
||||||
let chunk = match self.read_chunk(chunk_size) {
|
|
||||||
Err(e) => return ChunkState::Err(self, UploadError::FileIO(e)),
|
|
||||||
Ok(value) => value,
|
|
||||||
};
|
|
||||||
|
|
||||||
let Ok(res) = (http.patch(&self.uri))
|
|
||||||
.sharry_header(alias)
|
|
||||||
.header("Upload-Offset", self.offset)
|
|
||||||
.send(&chunk)
|
|
||||||
else {
|
|
||||||
return ChunkState::Err(self, UploadError::Request);
|
|
||||||
};
|
|
||||||
|
|
||||||
if res.status() != StatusCode::NO_CONTENT {
|
|
||||||
return ChunkState::Err(self, UploadError::ResponseStatus);
|
|
||||||
}
|
|
||||||
|
|
||||||
let Some(Ok(Ok(res_offset))) = (res.headers().get("Upload-Offset"))
|
|
||||||
.map(HeaderValue::to_str)
|
|
||||||
.map(|v| v.map(str::parse::<usize>))
|
|
||||||
else {
|
|
||||||
return ChunkState::Err(self, UploadError::ResponseOffset);
|
|
||||||
};
|
|
||||||
|
|
||||||
if self.offset + chunk.len() != res_offset {
|
|
||||||
return ChunkState::Err(self, UploadError::ResponseOffset);
|
|
||||||
}
|
|
||||||
|
|
||||||
self.offset = res_offset;
|
|
||||||
|
|
||||||
if self.offset == self.size {
|
|
||||||
return ChunkState::Finished(self.path);
|
|
||||||
}
|
|
||||||
|
|
||||||
ChunkState::Ok(self)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
153
src/sharry/ids.rs
Normal file
153
src/sharry/ids.rs
Normal file
|
|
@ -0,0 +1,153 @@
|
||||||
|
use std::{fmt, sync::LazyLock};
|
||||||
|
|
||||||
|
use log::{debug, trace};
|
||||||
|
use regex::Regex;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
#[derive(Serialize, Deserialize, Debug, Clone)]
|
||||||
|
pub struct AliasID(String);
|
||||||
|
|
||||||
|
impl fmt::Display for AliasID {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
f.write_str(&self.0)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl AsRef<[u8]> for AliasID {
|
||||||
|
fn as_ref(&self) -> &[u8] {
|
||||||
|
self.0.as_bytes()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<String> for AliasID {
|
||||||
|
fn from(value: String) -> Self {
|
||||||
|
Self(value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, Deserialize, Debug, Clone)]
|
||||||
|
pub struct ShareID(String);
|
||||||
|
|
||||||
|
impl fmt::Display for ShareID {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
f.write_str(&self.0)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<String> for ShareID {
|
||||||
|
fn from(value: String) -> Self {
|
||||||
|
Self(value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, Deserialize, Debug, Clone)]
|
||||||
|
pub struct FileID(String);
|
||||||
|
|
||||||
|
impl fmt::Display for FileID {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
f.write_str(&self.0)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Pattern breakdown:
|
||||||
|
/// - `^([^:/?#]+)://` - scheme (anything but `:/?#`) + `"://"`
|
||||||
|
/// - `([^/?#]+)` - authority/host (anything but `/?#`)
|
||||||
|
/// - `/api/v2/alias/upload/` - literal path segment
|
||||||
|
/// - `([^/]+)` - capture SID (one or more non-slash chars)
|
||||||
|
/// - `/files/tus/` - literal path segment
|
||||||
|
/// - `(?P<fid>[^/]+)` - capture FID (one or more non-slash chars)
|
||||||
|
/// - `$` - end of string
|
||||||
|
const UPLOAD_URL_RE: LazyLock<Regex> = LazyLock::new(|| {
|
||||||
|
trace!("compiling UPLOAD_URL_RE");
|
||||||
|
|
||||||
|
Regex::new(r"^([^:/?#]+)://([^/?#]+)/api/v2/alias/upload/[^/]+/files/tus/(?P<fid>[^/]+)$")
|
||||||
|
.expect("Regex compilation failed")
|
||||||
|
});
|
||||||
|
|
||||||
|
impl TryFrom<String> for FileID {
|
||||||
|
type Error = crate::Error;
|
||||||
|
|
||||||
|
fn try_from(value: String) -> crate::Result<Self> {
|
||||||
|
trace!("TryFrom {value:?}");
|
||||||
|
|
||||||
|
if let Some(fid) = UPLOAD_URL_RE
|
||||||
|
.captures(&value)
|
||||||
|
.and_then(|caps| caps.name("fid").map(|m| m.as_str()))
|
||||||
|
{
|
||||||
|
let result = Self(fid.to_string());
|
||||||
|
debug!("{result:?}");
|
||||||
|
|
||||||
|
Ok(result)
|
||||||
|
} else {
|
||||||
|
Err(crate::Error::mismatch(
|
||||||
|
"<proto>://<host>/api/v2/alias/upload/<share>/files/tus/<file>",
|
||||||
|
value,
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn valid_urls_produce_expected_file_id() {
|
||||||
|
// a handful of valid‐looking URLs
|
||||||
|
let cases = vec![
|
||||||
|
(
|
||||||
|
"http://example.com/api/v2/alias/upload/SID123/files/tus/FID456",
|
||||||
|
"FID456",
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"https://my-host:8080/api/v2/alias/upload/another-SID/files/tus/some-file-id",
|
||||||
|
"some-file-id",
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"custom+scheme://host/api/v2/alias/upload/x/files/tus/y",
|
||||||
|
"y",
|
||||||
|
),
|
||||||
|
];
|
||||||
|
|
||||||
|
for (good, expected_fid) in cases {
|
||||||
|
let file_id =
|
||||||
|
FileID::try_from(good.to_string()).expect("URL should parse successfully");
|
||||||
|
assert_eq!(
|
||||||
|
file_id.0, expected_fid,
|
||||||
|
"Expected `{}` → FileID({}), got {:?}",
|
||||||
|
good, expected_fid, file_id
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn invalid_urls_return_error() {
|
||||||
|
let bad_inputs = vec![
|
||||||
|
// missing /api/v2/alias/upload
|
||||||
|
"http://example.com/files/tus/FID",
|
||||||
|
// missing /files/tus
|
||||||
|
"http://example.com/api/v2/alias/upload/SID123/FID456",
|
||||||
|
// trailing slash (doesn't match `$`)
|
||||||
|
"http://example.com/api/v2/alias/upload/SID/files/tus/FID/",
|
||||||
|
// empty fid
|
||||||
|
"http://example.com/api/v2/alias/upload/SID/files/tus/",
|
||||||
|
// random string
|
||||||
|
"just-a-random-string",
|
||||||
|
];
|
||||||
|
|
||||||
|
for bad in bad_inputs {
|
||||||
|
let err = FileID::try_from(bad.to_string()).expect_err("URL should not parse");
|
||||||
|
// make sure it's the Mismatch variant, and that it contains the original input
|
||||||
|
match err {
|
||||||
|
crate::Error::Mismatch { expected, actual } => {
|
||||||
|
assert_eq!(
|
||||||
|
expected, "<proto>://<host>/api/v2/alias/upload/<share>/files/tus/<file>",
|
||||||
|
"Error should output expected format"
|
||||||
|
);
|
||||||
|
assert_eq!(actual, bad.to_string(), "Error should echo back the input");
|
||||||
|
}
|
||||||
|
_ => panic!("Expected Error::Mismatch for input `{bad}` but got {err:?}"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -1,29 +1,6 @@
|
||||||
use std::fmt::Display;
|
|
||||||
|
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Debug, Hash)]
|
#[derive(Serialize, Debug)]
|
||||||
pub struct Uri {
|
|
||||||
protocol: String,
|
|
||||||
base_url: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Uri {
|
|
||||||
pub fn with_protocol(protocol: impl Into<String>, base_url: impl Into<String>) -> Self {
|
|
||||||
Self {
|
|
||||||
protocol: protocol.into(),
|
|
||||||
base_url: base_url.into(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Display for Uri {
|
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
|
||||||
write!(f, "{}://{}/api/v2", self.protocol, self.base_url)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Serialize)]
|
|
||||||
#[allow(non_snake_case)]
|
#[allow(non_snake_case)]
|
||||||
pub struct NewShareRequest {
|
pub struct NewShareRequest {
|
||||||
name: String,
|
name: String,
|
||||||
|
|
@ -50,7 +27,7 @@ impl NewShareRequest {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Deserialize, Debug)]
|
#[derive(Deserialize, Debug)]
|
||||||
pub(super) struct NewShareResponse {
|
pub struct NewShareResponse {
|
||||||
pub success: bool,
|
pub success: bool,
|
||||||
pub message: String,
|
pub message: String,
|
||||||
pub id: String,
|
pub id: String,
|
||||||
|
|
@ -58,7 +35,7 @@ pub(super) struct NewShareResponse {
|
||||||
|
|
||||||
#[derive(Deserialize, Debug)]
|
#[derive(Deserialize, Debug)]
|
||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
pub(super) struct NotifyShareResponse {
|
pub struct NotifyShareResponse {
|
||||||
pub success: bool,
|
pub success: bool,
|
||||||
pub message: String,
|
pub message: String,
|
||||||
}
|
}
|
||||||
|
|
@ -1,11 +1,35 @@
|
||||||
#![allow(unused_imports)]
|
mod ids;
|
||||||
|
pub mod json;
|
||||||
|
mod uri;
|
||||||
|
|
||||||
mod alias;
|
pub use ids::{AliasID, FileID, ShareID};
|
||||||
mod api;
|
pub use uri::Uri;
|
||||||
mod file;
|
|
||||||
mod share;
|
|
||||||
|
|
||||||
pub use alias::Alias;
|
use crate::file;
|
||||||
pub use api::{NewShareRequest, Uri};
|
|
||||||
pub use file::{ChunkState, FileChecked, FileUploading, UploadError};
|
pub trait Client {
|
||||||
pub use share::Share;
|
fn share_create(
|
||||||
|
&self,
|
||||||
|
uri: &Uri,
|
||||||
|
alias_id: &AliasID,
|
||||||
|
data: json::NewShareRequest,
|
||||||
|
) -> crate::Result<ShareID>;
|
||||||
|
|
||||||
|
fn share_notify(&self, uri: &Uri, alias_id: &AliasID, share_id: &ShareID) -> crate::Result<()>;
|
||||||
|
|
||||||
|
fn file_create(
|
||||||
|
&self,
|
||||||
|
uri: &Uri,
|
||||||
|
alias_id: &AliasID,
|
||||||
|
share_id: &ShareID,
|
||||||
|
file: &file::Checked,
|
||||||
|
) -> crate::Result<FileID>;
|
||||||
|
|
||||||
|
fn file_patch(
|
||||||
|
&self,
|
||||||
|
uri: &Uri,
|
||||||
|
alias_id: &AliasID,
|
||||||
|
share_id: &ShareID,
|
||||||
|
chunk: &file::Chunk,
|
||||||
|
) -> crate::Result<()>;
|
||||||
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,48 +0,0 @@
|
||||||
use log::debug;
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
|
|
||||||
use super::{
|
|
||||||
alias::{Alias, SharryAlias},
|
|
||||||
api::{NewShareRequest, NewShareResponse, NotifyShareResponse},
|
|
||||||
};
|
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Debug)]
|
|
||||||
pub struct Share {
|
|
||||||
pub(super) id: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Share {
|
|
||||||
pub fn create(
|
|
||||||
http: &ureq::Agent,
|
|
||||||
alias: &Alias,
|
|
||||||
data: NewShareRequest,
|
|
||||||
) -> Result<Self, ureq::Error> {
|
|
||||||
let res = (http.post(alias.get_endpoint("alias/upload/new")))
|
|
||||||
.sharry_header(alias)
|
|
||||||
.send_json(data)?
|
|
||||||
.body_mut()
|
|
||||||
.read_json::<NewShareResponse>()?;
|
|
||||||
|
|
||||||
debug!("response: {res:?}");
|
|
||||||
|
|
||||||
if !(res.success && (res.message == "Share created.")) {
|
|
||||||
return Err(ureq::Error::Other("unexpected json response".into()));
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(Self { id: res.id })
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn notify(&self, http: &ureq::Agent, alias: &Alias) -> Result<(), ureq::Error> {
|
|
||||||
let endpoint = alias.get_endpoint(format!("alias/mail/notify/{}", self.id));
|
|
||||||
|
|
||||||
let res = (http.post(endpoint))
|
|
||||||
.sharry_header(alias)
|
|
||||||
.send_empty()?
|
|
||||||
.body_mut()
|
|
||||||
.read_json::<NotifyShareResponse>()?;
|
|
||||||
|
|
||||||
debug!("response: {res:?}");
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
84
src/sharry/uri.rs
Normal file
84
src/sharry/uri.rs
Normal file
|
|
@ -0,0 +1,84 @@
|
||||||
|
use std::{fmt, sync::LazyLock};
|
||||||
|
|
||||||
|
use log::{debug, trace};
|
||||||
|
use regex::{Captures, Regex};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct Uri(String);
|
||||||
|
|
||||||
|
impl fmt::Display for Uri {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
f.write_str(&self.0)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl AsRef<[u8]> for Uri {
|
||||||
|
fn as_ref(&self) -> &[u8] {
|
||||||
|
self.0.as_bytes()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn captured(caps: &Captures, name: &str) -> String {
|
||||||
|
caps.name(name)
|
||||||
|
.unwrap_or_else(|| panic!("{name} not captured"))
|
||||||
|
.as_str()
|
||||||
|
.to_string()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Pattern breakdown:
|
||||||
|
/// - `^(?P<scheme>[^:/?#]+)://` - capture scheme (anything but `:/?#`) + `"://"`
|
||||||
|
/// - `(?P<host>[^/?#]+)` - capture authority/host (anything but `/?#`)
|
||||||
|
/// - `(/.*)?` - maybe trailing slash and some path
|
||||||
|
/// - `$` - end of string
|
||||||
|
const SHARRY_URI_RE: LazyLock<Regex> = LazyLock::new(|| {
|
||||||
|
trace!("compiling SHARRY_URI_RE");
|
||||||
|
|
||||||
|
Regex::new(r"^(?P<scheme>[^:/?#]+)://(?P<host>[^/?#]+)(/.*)?$")
|
||||||
|
.expect("Regex compilation failed")
|
||||||
|
});
|
||||||
|
|
||||||
|
fn parse_url(value: &str) -> Option<(String, String)> {
|
||||||
|
SHARRY_URI_RE
|
||||||
|
.captures(value)
|
||||||
|
.map(|caps| (captured(&caps, "scheme"), captured(&caps, "host")))
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<String> for Uri {
|
||||||
|
fn from(value: String) -> Self {
|
||||||
|
trace!("TryFrom {value:?}");
|
||||||
|
|
||||||
|
if let Some((scheme, host)) = parse_url(&value) {
|
||||||
|
let result = Self(format!("{scheme}://{host}"));
|
||||||
|
debug!("{result:?}");
|
||||||
|
|
||||||
|
result
|
||||||
|
} else {
|
||||||
|
Self(value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Uri {
|
||||||
|
fn endpoint(&self, path: fmt::Arguments) -> String {
|
||||||
|
let uri = format!("{}/api/v2/{path}", self.0);
|
||||||
|
trace!("endpoint: {uri:?}");
|
||||||
|
uri
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn share_create(&self) -> String {
|
||||||
|
self.endpoint(format_args!("alias/upload/new"))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn share_notify(&self, share_id: &super::ShareID) -> String {
|
||||||
|
self.endpoint(format_args!("alias/mail/notify/{share_id}"))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn file_create(&self, share_id: &super::ShareID) -> String {
|
||||||
|
self.endpoint(format_args!("alias/upload/{share_id}/files/tus"))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn file_patch(&self, share_id: &super::ShareID, file_id: &super::FileID) -> String {
|
||||||
|
self.endpoint(format_args!("alias/upload/{share_id}/files/tus/{file_id}"))
|
||||||
|
}
|
||||||
|
}
|
||||||
182
src/ureq_client.rs
Normal file
182
src/ureq_client.rs
Normal file
|
|
@ -0,0 +1,182 @@
|
||||||
|
use log::{debug, trace};
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
file::{self, FileTrait},
|
||||||
|
sharry::{self, AliasID, FileID, ShareID, Uri},
|
||||||
|
};
|
||||||
|
|
||||||
|
fn find_cause(
|
||||||
|
uri: &Uri,
|
||||||
|
alias_id: &AliasID,
|
||||||
|
share_id: Option<&ShareID>,
|
||||||
|
file_id: Option<&FileID>,
|
||||||
|
) -> impl FnOnce(ureq::Error) -> crate::Error {
|
||||||
|
move |error| match error {
|
||||||
|
ureq::Error::StatusCode(403) => {
|
||||||
|
trace!("HTTP Error 403: Alias not found!");
|
||||||
|
|
||||||
|
alias_id.into()
|
||||||
|
}
|
||||||
|
ureq::Error::StatusCode(404) => {
|
||||||
|
trace!("HTTP Error 404: Share and/or file may have been deleted!");
|
||||||
|
|
||||||
|
if let Some(file_id) = file_id {
|
||||||
|
file_id.into()
|
||||||
|
} else if let Some(share_id) = share_id {
|
||||||
|
share_id.into()
|
||||||
|
} else {
|
||||||
|
uri.into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ureq::Error::Io(error) => {
|
||||||
|
trace!("std::io::Error {error:?}");
|
||||||
|
|
||||||
|
if let Some(msg) = error.get_ref().map(ToString::to_string) {
|
||||||
|
if msg.starts_with("failed to lookup address information") {
|
||||||
|
uri.into()
|
||||||
|
} else {
|
||||||
|
error.into()
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
error.into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
error => crate::Error::Unknown(error.to_string()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl sharry::Client for ureq::Agent {
|
||||||
|
fn share_create(
|
||||||
|
&self,
|
||||||
|
uri: &Uri,
|
||||||
|
alias_id: &AliasID,
|
||||||
|
data: sharry::json::NewShareRequest,
|
||||||
|
) -> crate::Result<ShareID> {
|
||||||
|
let res = {
|
||||||
|
let endpoint = uri.share_create();
|
||||||
|
|
||||||
|
let mut res = self
|
||||||
|
.post(&endpoint)
|
||||||
|
.header("Sharry-Alias", alias_id.as_ref())
|
||||||
|
.send_json(data)
|
||||||
|
.map_err(find_cause(uri, alias_id, None, None))?;
|
||||||
|
|
||||||
|
trace!("{endpoint:?} response: {res:?}");
|
||||||
|
crate::Error::res_status_check(res.status(), ureq::http::StatusCode::OK)?;
|
||||||
|
|
||||||
|
res.body_mut()
|
||||||
|
.read_json::<sharry::json::NewShareResponse>()
|
||||||
|
.map_err(crate::Error::response)?
|
||||||
|
};
|
||||||
|
|
||||||
|
debug!("{res:?}");
|
||||||
|
|
||||||
|
if res.success && (res.message == "Share created.") {
|
||||||
|
trace!("new share id: {:?}", res.id);
|
||||||
|
|
||||||
|
Ok(res.id.into())
|
||||||
|
} else {
|
||||||
|
Err(crate::Error::response(format!("{res:?}")))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn share_notify(&self, uri: &Uri, alias_id: &AliasID, share_id: &ShareID) -> crate::Result<()> {
|
||||||
|
let res = {
|
||||||
|
let endpoint = uri.share_notify(share_id);
|
||||||
|
|
||||||
|
let mut res = self
|
||||||
|
.post(&endpoint)
|
||||||
|
.header("Sharry-Alias", alias_id.as_ref())
|
||||||
|
.send_empty()
|
||||||
|
.map_err(find_cause(uri, alias_id, Some(share_id), None))?;
|
||||||
|
|
||||||
|
trace!("{endpoint:?} response: {res:?}");
|
||||||
|
crate::Error::res_status_check(res.status(), ureq::http::StatusCode::OK)?;
|
||||||
|
|
||||||
|
res.body_mut()
|
||||||
|
.read_json::<sharry::json::NotifyShareResponse>()
|
||||||
|
.map_err(crate::Error::response)?
|
||||||
|
};
|
||||||
|
|
||||||
|
debug!("{res:?}");
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn file_create(
|
||||||
|
&self,
|
||||||
|
uri: &Uri,
|
||||||
|
alias_id: &AliasID,
|
||||||
|
share_id: &ShareID,
|
||||||
|
file: &file::Checked,
|
||||||
|
) -> crate::Result<FileID> {
|
||||||
|
let res = {
|
||||||
|
let endpoint = uri.file_create(share_id);
|
||||||
|
|
||||||
|
let res = self
|
||||||
|
.post(&endpoint)
|
||||||
|
.header("Sharry-Alias", alias_id.as_ref())
|
||||||
|
.header("Sharry-File-Name", file.get_name())
|
||||||
|
.header("Upload-Length", file.get_size())
|
||||||
|
.send_empty()
|
||||||
|
.map_err(find_cause(uri, alias_id, Some(share_id), None))?;
|
||||||
|
|
||||||
|
trace!("{endpoint:?} response: {res:?}");
|
||||||
|
crate::Error::res_status_check(res.status(), ureq::http::StatusCode::CREATED)?;
|
||||||
|
res
|
||||||
|
};
|
||||||
|
|
||||||
|
let location = (res.headers().get("Location"))
|
||||||
|
.ok_or_else(|| crate::Error::response("Location header not found"))?
|
||||||
|
.to_str()
|
||||||
|
.map_err(crate::Error::response)?
|
||||||
|
.to_string();
|
||||||
|
|
||||||
|
FileID::try_from(location)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn file_patch(
|
||||||
|
&self,
|
||||||
|
uri: &Uri,
|
||||||
|
alias_id: &AliasID,
|
||||||
|
share_id: &ShareID,
|
||||||
|
chunk: &file::Chunk,
|
||||||
|
) -> crate::Result<()> {
|
||||||
|
let res = {
|
||||||
|
let endpoint = uri.file_patch(share_id, chunk.get_file_id());
|
||||||
|
|
||||||
|
let res = self
|
||||||
|
.patch(&endpoint)
|
||||||
|
.header("Sharry-Alias", alias_id.as_ref())
|
||||||
|
.header("Upload-Offset", chunk.get_offset())
|
||||||
|
.send(chunk.get_data())
|
||||||
|
.map_err(find_cause(
|
||||||
|
uri,
|
||||||
|
alias_id,
|
||||||
|
Some(share_id),
|
||||||
|
Some(chunk.get_file_id()),
|
||||||
|
))?;
|
||||||
|
|
||||||
|
trace!("{endpoint:?} response: {res:?}");
|
||||||
|
crate::Error::res_status_check(res.status(), ureq::http::StatusCode::NO_CONTENT)?;
|
||||||
|
res
|
||||||
|
};
|
||||||
|
|
||||||
|
let res_offset = (res.headers().get("Upload-Offset"))
|
||||||
|
.ok_or_else(|| crate::Error::response("Upload-Offset header not found"))?
|
||||||
|
.to_str()
|
||||||
|
.map_err(crate::Error::response)?
|
||||||
|
.parse::<u64>()
|
||||||
|
.map_err(crate::Error::response)?;
|
||||||
|
|
||||||
|
if chunk.get_behind() == res_offset {
|
||||||
|
Ok(())
|
||||||
|
} else {
|
||||||
|
Err(crate::Error::response(format!(
|
||||||
|
"Unexpected Upload-Offset: {} (expected {})",
|
||||||
|
res_offset,
|
||||||
|
chunk.get_behind()
|
||||||
|
)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
Loading…
Reference in a new issue