Compare commits
170 commits
main
...
feature/un
| Author | SHA1 | Date | |
|---|---|---|---|
| 6e0baa138a | |||
| a5ecf19f70 | |||
| aa76c7efda | |||
| 147c78377a | |||
| 9e56779378 | |||
| 5358d8e68f | |||
| 66f59b4b3b | |||
| 22eeada52a | |||
| 1ac4db28ed | |||
| 037e59df4f | |||
| 98e4e96073 | |||
| 50098edc81 | |||
| 910d95230f | |||
| 117faee583 | |||
| 2900715a74 | |||
| 6167ebc98a | |||
| 47ad51f860 | |||
| b77ab83ae0 | |||
| 53fdb6cc97 | |||
| bd9aa77d6c | |||
| ab76563b83 | |||
| 2248d8fdd1 | |||
| 7bbb2bbc19 | |||
| b2c032d846 | |||
| cb5873b732 | |||
| 96ea0ddab9 | |||
| 4d47530326 | |||
| b3bccbbf65 | |||
| d4cc102a0f | |||
| 3257a97351 | |||
| 908e0031e2 | |||
| f03bcb46b6 | |||
| 655db21ef4 | |||
| 11515a4158 | |||
| 51cf212bb6 | |||
| 65f1e5d083 | |||
| c609f51c6a | |||
| f573b61ad1 | |||
| 4ad9e28bc7 | |||
| 4ff1f34b08 | |||
| 3f5fa86f3a | |||
| f988c80854 | |||
| 389e33e512 | |||
| 1ac110ddfb | |||
| 95dcc25e0d | |||
| a34aa968b4 | |||
| 6ba17d57df | |||
| 52d455e219 | |||
| 7e4bd398c2 | |||
| 5a34a8d791 | |||
| 61d62d731e | |||
| 6814f74484 | |||
| 6c385ffeea | |||
| cab6d13d28 | |||
| cca35e1ae8 | |||
| aa16cc9ede | |||
| 0efde0e134 | |||
| b9e553f112 | |||
| ea453da433 | |||
| b01eb69e95 | |||
| fada53044d | |||
| 67b048f9ac | |||
| a7cddf3205 | |||
| 46913e93b9 | |||
| 470ebc4305 | |||
| f4e0bc5be4 | |||
| abf76c9df7 | |||
| 2cc13f24e7 | |||
| ad1854dfb8 | |||
| d891e6d1c4 | |||
| 91085a9eec | |||
| d55684096f | |||
| de208a20d5 | |||
| 3bfdf87277 | |||
| 0d68d139d5 | |||
| 2eb651f919 | |||
| 903fbc3480 | |||
| bc4c15d17c | |||
| c10d86ff65 | |||
| 3f00e08422 | |||
| 357f455ec0 | |||
| e3fc06b019 | |||
| 2315c9cd2e | |||
| c7b24b1250 | |||
| d8c48b74ca | |||
| d37797d2ec | |||
| e0c5b5517f | |||
| 087cef5d6f | |||
| b17f239801 | |||
| 9de8f948dc | |||
| 5adbf8cb38 | |||
| 3258b8fb74 | |||
| c9c21aa128 | |||
| f1c6eb5d75 | |||
| f2b063ba85 | |||
| 4c88ea7291 | |||
| 465c857126 | |||
| 1f9c247439 | |||
| e391821fe5 | |||
| 9000c8a46b | |||
| d1d7b55585 | |||
| 78fb04403c | |||
| 0b8bebad6e | |||
| 11a5106473 | |||
| ea5ef1fa10 | |||
| 2bcbc0d71c | |||
| 97e9e41117 | |||
| 6e553cc185 | |||
| ece742a1e3 | |||
| 1e05155cc1 | |||
| 005c5f7cfa | |||
| 14e1bed708 | |||
| 72e9a5d40f | |||
| a8ff354a51 | |||
| 686e0c3e5c | |||
| 5b403ea129 | |||
| 540953e4a9 | |||
| c734355ebb | |||
| 393feec125 | |||
| df055fc4e9 | |||
| 865566ad0c | |||
| 3b46d228f4 | |||
| 79bc8e67a7 | |||
| 4eb0627a5f | |||
| a4bef827d1 | |||
| a633f4e228 | |||
| 01bcf92d9c | |||
| 783346c888 | |||
| de07d556a2 | |||
| 6ca3e6c9dd | |||
| 4bf18631d4 | |||
| 56223b3ae8 | |||
| 205af655e5 | |||
| 10bb4feef5 | |||
| e257d1cf8c | |||
| 5b018cf84b | |||
| 876869b073 | |||
| e2151b592c | |||
| 30855ed8ff | |||
| 0a8e5cf3f0 | |||
| f05e112040 | |||
| 5556a658f5 | |||
| 09d22a0ad9 | |||
| f77acc1afd | |||
| 4b650fd82f | |||
| 9f1e0cfc6c | |||
| b9a0e1eeb0 | |||
| 2edc690331 | |||
| ed10f269c8 | |||
| fb06725f05 | |||
| 67da081ef9 | |||
| 9b1f7f872c | |||
| 592e7bf76e | |||
| 0993679641 | |||
| dc2a330d58 | |||
| d607380659 | |||
| 69bef4e994 | |||
| c9528a9ac1 | |||
| 09af480379 | |||
| 90cecd015e | |||
| 51ecab41bb | |||
| 5b6fa3eaf7 | |||
| e208df9711 | |||
| e908c09beb | |||
| 3dc39edcab | |||
| e98001f4a4 | |||
| 39560eeeed | |||
| faea74241d | |||
| 8537684656 | |||
| e746256fdc |
37 changed files with 3821 additions and 890 deletions
|
|
@ -1,2 +1,9 @@
|
|||
[build]
|
||||
target = "x86_64-unknown-linux-musl"
|
||||
|
||||
# rustflags = [
|
||||
# # emit instructions tuned to the current CPU
|
||||
# "-C", "target-cpu=native",
|
||||
# # assume CPU features
|
||||
# "-C", "target-feature=+avx2,+sse4.1,+ssse3,+aes",
|
||||
# ]
|
||||
|
|
@ -13,13 +13,21 @@
|
|||
"configureZshAsDefaultShell": "true"
|
||||
},
|
||||
"ghcr.io/devcontainers/features/rust:1": {
|
||||
"targets": "x86_64-unknown-linux-musl"
|
||||
"targets": "x86_64-unknown-linux-gnu,x86_64-unknown-linux-musl"
|
||||
},
|
||||
"ghcr.io/devcontainers-contrib/features/apt-get-packages:1": {
|
||||
"packages": "git-flow, musl-tools"
|
||||
"ghcr.io/lee-orr/rusty-dev-containers/cargo-binstall:0": {
|
||||
"packages": "cargo-llvm-cov,cargo-edit"
|
||||
},
|
||||
"ghcr.io/devcontainers-extra/features/apt-get-packages:1": {
|
||||
"packages": "git-flow,musl-tools"
|
||||
}
|
||||
},
|
||||
|
||||
"overrideFeatureInstallOrder": [
|
||||
"ghcr.io/devcontainers/features/rust",
|
||||
"ghcr.io/lee-orr/rusty-dev-containers/cargo-binstall"
|
||||
],
|
||||
|
||||
// cargo cache persistence
|
||||
"mounts": [
|
||||
{
|
||||
|
|
@ -33,7 +41,7 @@
|
|||
// "forwardPorts": [],
|
||||
|
||||
// Use 'postCreateCommand' to run commands after the container is created.
|
||||
"postCreateCommand": "rustup target install x86_64-unknown-linux-musl | :",
|
||||
"postCreateCommand": "sudo chown -R \"${USER}:rustlang\" \"${RUSTUP_HOME}\" \"${CARGO_HOME}\"",
|
||||
|
||||
// Configure tool-specific properties.
|
||||
"customizations": {
|
||||
|
|
@ -42,7 +50,9 @@
|
|||
"terminal.integrated.defaultProfile.linux": "zsh"
|
||||
},
|
||||
"extensions": [
|
||||
"mhutchie.git-graph"
|
||||
"mhutchie.git-graph",
|
||||
"Gruntfuggly.todo-tree",
|
||||
"ryanluker.vscode-coverage-gutters"
|
||||
]
|
||||
}
|
||||
},
|
||||
|
|
|
|||
4
.gitignore
vendored
4
.gitignore
vendored
|
|
@ -1,3 +1,7 @@
|
|||
# code coverage reports
|
||||
coverage/
|
||||
|
||||
|
||||
# https://github.com/github/gitignore/raw/refs/heads/main/Rust.gitignore
|
||||
|
||||
# Generated by Cargo
|
||||
|
|
|
|||
1
.vscode/launch.json
vendored
1
.vscode/launch.json
vendored
|
|
@ -22,6 +22,7 @@
|
|||
"args": [
|
||||
"sharry.yavook.de",
|
||||
"G7RYoWME1W7-pcgipemJcr8-39FcMd92gBu-RgufeHc51z6",
|
||||
"/lib/x86_64-linux-gnu/libLLVM-14.so",
|
||||
"/lib/x86_64-linux-gnu/liblldb-14.so.1",
|
||||
],
|
||||
"env": {
|
||||
|
|
|
|||
31
.vscode/settings.json
vendored
31
.vscode/settings.json
vendored
|
|
@ -12,6 +12,37 @@
|
|||
},
|
||||
},
|
||||
"rust-analyzer.imports.prefix": "plain",
|
||||
"todo-tree.general.tags": [
|
||||
"BUG",
|
||||
"HACK",
|
||||
"FIXME",
|
||||
"TODO",
|
||||
"XXX",
|
||||
"[ ]",
|
||||
"[x]",
|
||||
"BOOKMARK"
|
||||
],
|
||||
"todo-tree.highlights.backgroundColourScheme": [
|
||||
"red",
|
||||
"orange",
|
||||
"yellow",
|
||||
"green",
|
||||
"blue",
|
||||
"indigo",
|
||||
"violet",
|
||||
"yellow"
|
||||
],
|
||||
"todo-tree.highlights.foregroundColourScheme": [
|
||||
"white",
|
||||
"black",
|
||||
"black",
|
||||
"white",
|
||||
"white",
|
||||
"white",
|
||||
"black",
|
||||
"black"
|
||||
],
|
||||
"todo-tree.highlights.useColourScheme": true,
|
||||
// // override the default setting (`cargo check --all-targets`) which produces the following error
|
||||
// // "can't find crate for `test`" when the default compilation target is a no_std target
|
||||
// "rust-analyzer.checkOnSave.allTargets": false,
|
||||
|
|
|
|||
122
.vscode/tasks.json
vendored
122
.vscode/tasks.json
vendored
|
|
@ -5,9 +5,6 @@
|
|||
"label": "Build Project",
|
||||
"type": "cargo",
|
||||
"command": "build",
|
||||
"presentation": {
|
||||
"reveal": "silent"
|
||||
},
|
||||
"problemMatcher": "$rustc",
|
||||
"group": "build"
|
||||
},
|
||||
|
|
@ -27,12 +24,24 @@
|
|||
"group": "none"
|
||||
},
|
||||
{
|
||||
"label": "Clippy Fix Project",
|
||||
"label": "Clippy All Targets",
|
||||
"type": "cargo",
|
||||
"command": "clippy",
|
||||
"args": [
|
||||
"--all-targets",
|
||||
"--",
|
||||
"-Wclippy::pedantic"
|
||||
],
|
||||
"problemMatcher": "$rustc",
|
||||
"group": "build"
|
||||
},
|
||||
{
|
||||
"label": "Clippy Fix All Targets",
|
||||
"type": "cargo",
|
||||
"command": "clippy",
|
||||
"args": [
|
||||
"--fix",
|
||||
"--allow-dirty",
|
||||
"--all-targets",
|
||||
"--allow-staged",
|
||||
"--",
|
||||
"-Wclippy::pedantic"
|
||||
|
|
@ -40,16 +49,16 @@
|
|||
"problemMatcher": "$rustc",
|
||||
"group": "build"
|
||||
},
|
||||
// {
|
||||
// "label": "Run Unit Tests",
|
||||
// "type": "cargo",
|
||||
// "command": "test",
|
||||
// "args": [
|
||||
// "--lib"
|
||||
// ],
|
||||
// "problemMatcher": "$rustc",
|
||||
// "group": "test"
|
||||
// },
|
||||
{
|
||||
"label": "Run Unit Tests",
|
||||
"type": "cargo",
|
||||
"command": "test",
|
||||
"args": [
|
||||
"--lib"
|
||||
],
|
||||
"problemMatcher": "$rustc",
|
||||
"group": "test"
|
||||
},
|
||||
// {
|
||||
// "label": "Run Integration Tests",
|
||||
// "type": "cargo",
|
||||
|
|
@ -61,16 +70,77 @@
|
|||
// "problemMatcher": "$rustc",
|
||||
// "group": "test"
|
||||
// },
|
||||
// {
|
||||
// "label": "Run All Tests",
|
||||
// "type": "shell",
|
||||
// "command": "echo All Tests successful!",
|
||||
// "dependsOn": [
|
||||
// "Run Unit Tests",
|
||||
// "Run Integration Tests"
|
||||
// ],
|
||||
// "dependsOrder": "sequence",
|
||||
// "group": "test"
|
||||
// }
|
||||
{
|
||||
"label": "Test Coverage",
|
||||
"hide": true,
|
||||
"type": "cargo",
|
||||
"command": "llvm-cov",
|
||||
"args": [
|
||||
"test",
|
||||
],
|
||||
"problemMatcher": "$rustc",
|
||||
"group": "test",
|
||||
},
|
||||
{
|
||||
"label": "Report Coverage (html)",
|
||||
"hide": true,
|
||||
"type": "cargo",
|
||||
"command": "llvm-cov",
|
||||
"args": [
|
||||
"report",
|
||||
"--html",
|
||||
"--output-dir" ,
|
||||
"coverage",
|
||||
],
|
||||
"problemMatcher": "$rustc",
|
||||
"group": "test"
|
||||
},
|
||||
{
|
||||
"label": "Report Coverage (lcov)",
|
||||
"hide": true,
|
||||
"type": "cargo",
|
||||
"command": "llvm-cov",
|
||||
"args": [
|
||||
"report",
|
||||
"--lcov",
|
||||
"--output-path" ,
|
||||
"coverage/lcov.info",
|
||||
],
|
||||
"problemMatcher": "$rustc",
|
||||
"group": "test"
|
||||
},
|
||||
{
|
||||
"label": "Run Coverage",
|
||||
"type": "shell",
|
||||
"dependsOn": [
|
||||
"Test Coverage",
|
||||
"Report Coverage (html)",
|
||||
"Report Coverage (lcov)",
|
||||
],
|
||||
"dependsOrder": "sequence",
|
||||
"group": "test"
|
||||
},
|
||||
{
|
||||
"label": "Run All Tests",
|
||||
"type": "shell",
|
||||
"command": "echo All Tests successful!",
|
||||
"dependsOn": [
|
||||
"Run Unit Tests",
|
||||
// "Run Integration Tests",
|
||||
"Run Coverage",
|
||||
],
|
||||
"dependsOrder": "parallel",
|
||||
"group": "test"
|
||||
},
|
||||
{
|
||||
"label": "Upgrade dependencies",
|
||||
"type": "cargo",
|
||||
"command": "upgrade",
|
||||
"args": [
|
||||
"--incompatible",
|
||||
"-vv"
|
||||
],
|
||||
"group": "none"
|
||||
}
|
||||
],
|
||||
}
|
||||
432
Cargo.lock
generated
432
Cargo.lock
generated
|
|
@ -4,9 +4,9 @@ version = 4
|
|||
|
||||
[[package]]
|
||||
name = "adler2"
|
||||
version = "2.0.0"
|
||||
version = "2.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "512761e0bb2578dd7380c6baaa0f4ce03e84f95e960231d1dec8bf4d7d6e2627"
|
||||
checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa"
|
||||
|
||||
[[package]]
|
||||
name = "aho-corasick"
|
||||
|
|
@ -19,9 +19,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "anstream"
|
||||
version = "0.6.18"
|
||||
version = "0.6.19"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8acc5369981196006228e28809f761875c0327210a891e941f4c683b3a99529b"
|
||||
checksum = "301af1932e46185686725e0fad2f8f2aa7da69dd70bf6ecc44d6b703844a3933"
|
||||
dependencies = [
|
||||
"anstyle",
|
||||
"anstyle-parse",
|
||||
|
|
@ -34,24 +34,24 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "anstyle"
|
||||
version = "1.0.10"
|
||||
version = "1.0.11"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "55cc3b69f167a1ef2e161439aa98aed94e6028e5f9a59be9a6ffb47aef1651f9"
|
||||
checksum = "862ed96ca487e809f1c8e5a8447f6ee2cf102f846893800b20cebdf541fc6bbd"
|
||||
|
||||
[[package]]
|
||||
name = "anstyle-parse"
|
||||
version = "0.2.6"
|
||||
version = "0.2.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3b2d16507662817a6a20a9ea92df6652ee4f94f914589377d69f3b21bc5798a9"
|
||||
checksum = "4e7644824f0aa2c7b9384579234ef10eb7efb6a0deb83f9630a49594dd9c15c2"
|
||||
dependencies = [
|
||||
"utf8parse",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "anstyle-query"
|
||||
version = "1.1.2"
|
||||
version = "1.1.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "79947af37f4177cfead1110013d678905c37501914fba0efea834c3fe9a8d60c"
|
||||
checksum = "6c8bdeb6047d8983be085bab0ba1472e6dc604e7041dbf6fcd5e71523014fae9"
|
||||
dependencies = [
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
|
@ -67,6 +67,27 @@ dependencies = [
|
|||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "arrayref"
|
||||
version = "0.3.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "76a2e8124351fda1ef8aaaa3bbd7ebbcb486bbcd4225aca0aa0d84bb2db8fecb"
|
||||
|
||||
[[package]]
|
||||
name = "arrayvec"
|
||||
version = "0.7.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50"
|
||||
|
||||
[[package]]
|
||||
name = "atomic"
|
||||
version = "0.6.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a89cbf775b137e9b968e67227ef7f775587cde3fd31b0d8599dbd0f598a48340"
|
||||
dependencies = [
|
||||
"bytemuck",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "base64"
|
||||
version = "0.22.1"
|
||||
|
|
@ -79,12 +100,29 @@ version = "2.9.1"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1b8e56985ec62d17e9c1001dc89c88ecd7dc08e47eba5ec7c29c7b5eeecde967"
|
||||
|
||||
[[package]]
|
||||
name = "blake2b_simd"
|
||||
version = "1.0.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "06e903a20b159e944f91ec8499fe1e55651480c541ea0a584f5d967c49ad9d99"
|
||||
dependencies = [
|
||||
"arrayref",
|
||||
"arrayvec",
|
||||
"constant_time_eq",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "bumpalo"
|
||||
version = "3.17.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1628fb46dfa0b37568d12e5edd512553eccf6a22a78e8bde00bb4aed84d5bdbf"
|
||||
|
||||
[[package]]
|
||||
name = "bytemuck"
|
||||
version = "1.23.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5c76a5792e44e4abe34d3abf15636779261d45a7450612059293d1d2cfc63422"
|
||||
|
||||
[[package]]
|
||||
name = "bytes"
|
||||
version = "1.10.1"
|
||||
|
|
@ -93,18 +131,18 @@ checksum = "d71b6127be86fdcfddb610f7182ac57211d4b18a3e9c82eb2d17662f2227ad6a"
|
|||
|
||||
[[package]]
|
||||
name = "cc"
|
||||
version = "1.2.24"
|
||||
version = "1.2.29"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "16595d3be041c03b09d08d0858631facccee9221e579704070e6e9e4915d3bc7"
|
||||
checksum = "5c1599538de2394445747c8cf7935946e3cc27e9625f889d979bfb2aaf569362"
|
||||
dependencies = [
|
||||
"shlex",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cfg-if"
|
||||
version = "1.0.0"
|
||||
version = "1.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
|
||||
checksum = "9555578bc9e57714c812a1f84e4fc5b4d21fcb063490c624de019f7464c91268"
|
||||
|
||||
[[package]]
|
||||
name = "cfg_aliases"
|
||||
|
|
@ -114,9 +152,9 @@ checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724"
|
|||
|
||||
[[package]]
|
||||
name = "clap"
|
||||
version = "4.5.38"
|
||||
version = "4.5.41"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ed93b9805f8ba930df42c2590f05453d5ec36cbb85d018868a5b24d31f6ac000"
|
||||
checksum = "be92d32e80243a54711e5d7ce823c35c41c9d929dc4ab58e1276f625841aadf9"
|
||||
dependencies = [
|
||||
"clap_builder",
|
||||
"clap_derive",
|
||||
|
|
@ -124,9 +162,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "clap_builder"
|
||||
version = "4.5.38"
|
||||
version = "4.5.41"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "379026ff283facf611b0ea629334361c4211d1b12ee01024eec1591133b04120"
|
||||
checksum = "707eab41e9622f9139419d573eca0900137718000c517d47da73045f54331c3d"
|
||||
dependencies = [
|
||||
"anstream",
|
||||
"anstyle",
|
||||
|
|
@ -136,9 +174,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "clap_derive"
|
||||
version = "4.5.32"
|
||||
version = "4.5.41"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "09176aae279615badda0765c0c0b3f6ed53f4709118af73cf4655d85d1530cd7"
|
||||
checksum = "ef4f52386a59ca4c860f7393bcf8abd8dfd91ecccc0f774635ff68e92eeef491"
|
||||
dependencies = [
|
||||
"heck",
|
||||
"proc-macro2",
|
||||
|
|
@ -148,15 +186,15 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "clap_lex"
|
||||
version = "0.7.4"
|
||||
version = "0.7.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f46ad14479a25103f283c0f10005961cf086d8dc42205bb44c46ac563475dca6"
|
||||
checksum = "b94f61472cee1439c0b966b47e3aca9ae07e45d070759512cd390ea2bebc6675"
|
||||
|
||||
[[package]]
|
||||
name = "colorchoice"
|
||||
version = "1.0.3"
|
||||
version = "1.0.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5b63caa9aa9397e2d9480a9b13673856c78d8ac123288526c37d7839f2a86990"
|
||||
checksum = "b05b61dc5112cbb17e4b6cd61790d9845d13888356391624cbe7e41efeac1e75"
|
||||
|
||||
[[package]]
|
||||
name = "console"
|
||||
|
|
@ -171,6 +209,24 @@ dependencies = [
|
|||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "console"
|
||||
version = "0.16.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2e09ced7ebbccb63b4c65413d821f2e00ce54c5ca4514ddc6b3c892fdbcbc69d"
|
||||
dependencies = [
|
||||
"encode_unicode",
|
||||
"libc",
|
||||
"once_cell",
|
||||
"windows-sys 0.60.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "constant_time_eq"
|
||||
version = "0.3.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7c74b8349d32d297c9134b8c88677813a227df8f779daa29bfc29c183fe3dca6"
|
||||
|
||||
[[package]]
|
||||
name = "cookie"
|
||||
version = "0.18.1"
|
||||
|
|
@ -202,9 +258,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "crc32fast"
|
||||
version = "1.4.2"
|
||||
version = "1.5.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a97769d94ddab943e4510d138150169a2758b5ef3eb191a9ee688de3e23ef7b3"
|
||||
checksum = "9481c1c90cbf2ac953f07c8d4a58aa3945c425b7185c9154d67a65e4230da511"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
]
|
||||
|
|
@ -234,9 +290,9 @@ version = "0.11.0"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "658bce805d770f407bc62102fca7c2c64ceef2fbcb2b8bd19d2765ce093980de"
|
||||
dependencies = [
|
||||
"console",
|
||||
"console 0.15.11",
|
||||
"shell-words",
|
||||
"thiserror",
|
||||
"thiserror 1.0.69",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -316,10 +372,26 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f"
|
||||
|
||||
[[package]]
|
||||
name = "flate2"
|
||||
version = "1.1.1"
|
||||
name = "errno"
|
||||
version = "0.3.13"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7ced92e76e966ca2fd84c8f7aa01a4aea65b0eb6648d72f7c8f3e2764a67fece"
|
||||
checksum = "778e2ac28f6c47af28e4907f13ffd1e1ddbd400980a9abd7c8df189bf578a5ad"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"windows-sys 0.60.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "fastrand"
|
||||
version = "2.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be"
|
||||
|
||||
[[package]]
|
||||
name = "flate2"
|
||||
version = "1.1.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4a3d7db9596fecd151c5f638c0ee5d5bd487b6e0ea232e5dc96d5250f6f94b1d"
|
||||
dependencies = [
|
||||
"crc32fast",
|
||||
"miniz_oxide",
|
||||
|
|
@ -348,14 +420,26 @@ checksum = "335ff9f135e4384c8150d6f27c6daed433577f86b4750418338c01a1a2528592"
|
|||
dependencies = [
|
||||
"cfg-if",
|
||||
"libc",
|
||||
"wasi",
|
||||
"wasi 0.11.0+wasi-snapshot-preview1",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "getrandom"
|
||||
version = "0.3.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "26145e563e54f2cadc477553f1ec5ee650b00862f0a58bcd12cbdc5f0ea2d2f4"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"libc",
|
||||
"r-efi",
|
||||
"wasi 0.14.2+wasi-0.2.4",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hashbrown"
|
||||
version = "0.15.3"
|
||||
version = "0.15.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "84b26c544d002229e640969970a2e74021aadf6e2f96372b9c58eff97de08eb3"
|
||||
checksum = "5971ac85611da7067dbfcabef3c70ebb5606018acd9e2a3903a0da507521e0d5"
|
||||
|
||||
[[package]]
|
||||
name = "heck"
|
||||
|
|
@ -489,9 +573,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "indexmap"
|
||||
version = "2.9.0"
|
||||
version = "2.10.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cea70ddb795996207ad57735b50c5982d8844f38ba9ee5f1aedcfb708a2aa11e"
|
||||
checksum = "fe4cd85333e22411419a0bcae1297d25e58c9443848b11dc6a86fefe8c78a661"
|
||||
dependencies = [
|
||||
"equivalent",
|
||||
"hashbrown",
|
||||
|
|
@ -499,13 +583,13 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "indicatif"
|
||||
version = "0.17.11"
|
||||
version = "0.18.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "183b3088984b400f4cfac3620d5e076c84da5364016b4f49473de574b2586235"
|
||||
checksum = "70a646d946d06bedbbc4cac4c218acf4bbf2d87757a784857025f4d447e4e1cd"
|
||||
dependencies = [
|
||||
"console",
|
||||
"number_prefix",
|
||||
"console 0.16.0",
|
||||
"portable-atomic",
|
||||
"unit-prefix",
|
||||
"web-time",
|
||||
]
|
||||
|
||||
|
|
@ -557,9 +641,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "libc"
|
||||
version = "0.2.172"
|
||||
version = "0.2.174"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d750af042f7ef4f724306de029d18836c26c1765a54a6a3f094cbd23a7267ffa"
|
||||
checksum = "1171693293099992e19cddea4e8b849964e9846f4acee11b3948bcc337be8776"
|
||||
|
||||
[[package]]
|
||||
name = "libredox"
|
||||
|
|
@ -571,6 +655,12 @@ dependencies = [
|
|||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "linux-raw-sys"
|
||||
version = "0.9.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cd945864f07fe9f5371a27ad7b52a172b4b499999f1d97574c9fa68373937e12"
|
||||
|
||||
[[package]]
|
||||
name = "litemap"
|
||||
version = "0.8.0"
|
||||
|
|
@ -591,15 +681,15 @@ checksum = "13dc2df351e3202783a1fe0d44375f7295ffb4049267b0f3018346dc122a1d94"
|
|||
|
||||
[[package]]
|
||||
name = "memchr"
|
||||
version = "2.7.4"
|
||||
version = "2.7.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3"
|
||||
checksum = "32a282da65faaf38286cf3be983213fcf1d2e2a58700e808f83f4ea9a4804bc0"
|
||||
|
||||
[[package]]
|
||||
name = "miniz_oxide"
|
||||
version = "0.8.8"
|
||||
version = "0.8.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3be647b768db090acb35d5ec5db2b0e1f1de11133ca123b9eacf5137868f892a"
|
||||
checksum = "1fa76a2c86f704bdb222d66965fb3d63269ce38518b83cb0575fca855ebb6316"
|
||||
dependencies = [
|
||||
"adler2",
|
||||
]
|
||||
|
|
@ -622,12 +712,6 @@ version = "0.1.0"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9"
|
||||
|
||||
[[package]]
|
||||
name = "number_prefix"
|
||||
version = "0.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "830b246a0e5f20af87141b25c173cd1b609bd7779a4617d6ec582abaf90870f3"
|
||||
|
||||
[[package]]
|
||||
name = "once_cell"
|
||||
version = "1.21.3"
|
||||
|
|
@ -648,9 +732,9 @@ checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e"
|
|||
|
||||
[[package]]
|
||||
name = "portable-atomic"
|
||||
version = "1.11.0"
|
||||
version = "1.11.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "350e9b48cbc6b0e028b0473b114454c6316e57336ee184ceab6e53f72c178b3e"
|
||||
checksum = "f84267b20a16ea918e43c6a88433c2d54fa145c92a811b5b047ccbe153674483"
|
||||
|
||||
[[package]]
|
||||
name = "portable-atomic-util"
|
||||
|
|
@ -694,15 +778,21 @@ dependencies = [
|
|||
"proc-macro2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "r-efi"
|
||||
version = "5.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f"
|
||||
|
||||
[[package]]
|
||||
name = "redox_users"
|
||||
version = "0.4.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ba009ff324d1fc1b900bd1fdb31564febe58a8ccc8a6fdbb93b543d33b13ca43"
|
||||
dependencies = [
|
||||
"getrandom",
|
||||
"getrandom 0.2.16",
|
||||
"libredox",
|
||||
"thiserror",
|
||||
"thiserror 1.0.69",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -742,17 +832,30 @@ checksum = "a4689e6c2294d81e88dc6261c768b63bc4fcdb852be6d1352498b114f61383b7"
|
|||
dependencies = [
|
||||
"cc",
|
||||
"cfg-if",
|
||||
"getrandom",
|
||||
"getrandom 0.2.16",
|
||||
"libc",
|
||||
"untrusted",
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rustls"
|
||||
version = "0.23.27"
|
||||
name = "rustix"
|
||||
version = "1.0.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "730944ca083c1c233a75c09f199e973ca499344a2b7ba9e755c457e86fb4a321"
|
||||
checksum = "c71e83d6afe7ff64890ec6b71d6a69bb8a610ab78ce364b3352876bb4c801266"
|
||||
dependencies = [
|
||||
"bitflags",
|
||||
"errno",
|
||||
"libc",
|
||||
"linux-raw-sys",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rustls"
|
||||
version = "0.23.29"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2491382039b29b9b11ff08b76ff6c97cf287671dbb74f0be44bda389fffe9bd1"
|
||||
dependencies = [
|
||||
"log",
|
||||
"once_cell",
|
||||
|
|
@ -783,15 +886,21 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "rustls-webpki"
|
||||
version = "0.103.3"
|
||||
version = "0.103.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e4a72fe2bcf7a6ac6fd7d0b9e5cb68aeb7d4c0a0271730218b3e92d43b4eb435"
|
||||
checksum = "0a17884ae0c1b773f1ccd2bd4a8c72f16da897310a98b0e84bf349ad5ead92fc"
|
||||
dependencies = [
|
||||
"ring",
|
||||
"rustls-pki-types",
|
||||
"untrusted",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rustversion"
|
||||
version = "1.0.21"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8a0d197bd2c9dc6e53b84da9556a69ba4cdfab8619eb41a8bd1cc2027a0f6b1d"
|
||||
|
||||
[[package]]
|
||||
name = "ryu"
|
||||
version = "1.0.20"
|
||||
|
|
@ -846,24 +955,30 @@ checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64"
|
|||
name = "shrupl"
|
||||
version = "0.1.0-alpha"
|
||||
dependencies = [
|
||||
"base64",
|
||||
"blake2b_simd",
|
||||
"clap",
|
||||
"console",
|
||||
"console 0.16.0",
|
||||
"ctrlc",
|
||||
"dialoguer",
|
||||
"dirs-next",
|
||||
"env_logger",
|
||||
"indicatif",
|
||||
"log",
|
||||
"regex",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"tempfile",
|
||||
"thiserror 2.0.12",
|
||||
"ureq",
|
||||
"uuid",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "smallvec"
|
||||
version = "1.15.0"
|
||||
version = "1.15.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8917285742e9f3e1683f0a9c4e6b57960b7314d0b08d30d1ecd426713ee2eee9"
|
||||
checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03"
|
||||
|
||||
[[package]]
|
||||
name = "stable_deref_trait"
|
||||
|
|
@ -885,9 +1000,9 @@ checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292"
|
|||
|
||||
[[package]]
|
||||
name = "syn"
|
||||
version = "2.0.101"
|
||||
version = "2.0.104"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8ce2b7fc941b3a24138a0a7cf8e858bfc6a992e7978a068a5c760deb0ed43caf"
|
||||
checksum = "17b6f705963418cdb9927482fa304bc562ece2fdd4f616084c50b7023b435a40"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
|
|
@ -905,13 +1020,35 @@ dependencies = [
|
|||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tempfile"
|
||||
version = "3.20.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e8a64e3985349f2441a1a9ef0b853f869006c3855f2cda6862a94d26ebb9d6a1"
|
||||
dependencies = [
|
||||
"fastrand",
|
||||
"getrandom 0.3.3",
|
||||
"once_cell",
|
||||
"rustix",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "thiserror"
|
||||
version = "1.0.69"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52"
|
||||
dependencies = [
|
||||
"thiserror-impl",
|
||||
"thiserror-impl 1.0.69",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "thiserror"
|
||||
version = "2.0.12"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "567b8a2dae586314f7be2a752ec7474332959c6460e02bde30d702a66d488708"
|
||||
dependencies = [
|
||||
"thiserror-impl 2.0.12",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -925,6 +1062,17 @@ dependencies = [
|
|||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "thiserror-impl"
|
||||
version = "2.0.12"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7f7cf42b4507d8ea322120659672cf1b9dbb93f8f2d4ecfd6e51350ff5b17a1d"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "time"
|
||||
version = "0.3.41"
|
||||
|
|
@ -974,9 +1122,15 @@ checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512"
|
|||
|
||||
[[package]]
|
||||
name = "unicode-width"
|
||||
version = "0.2.0"
|
||||
version = "0.2.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1fc81956842c57dac11422a97c3b8195a1ff727f06e85c84ed2e8aa277c9a0fd"
|
||||
checksum = "4a1a07cc7db3810833284e8d372ccdc6da29741639ecc70c9ec107df0fa6154c"
|
||||
|
||||
[[package]]
|
||||
name = "unit-prefix"
|
||||
version = "0.5.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "323402cff2dd658f39ca17c789b502021b3f18707c91cdf22e3838e1b4023817"
|
||||
|
||||
[[package]]
|
||||
name = "untrusted"
|
||||
|
|
@ -986,9 +1140,9 @@ checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1"
|
|||
|
||||
[[package]]
|
||||
name = "ureq"
|
||||
version = "3.0.11"
|
||||
version = "3.0.12"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b7a3e9af6113ecd57b8c63d3cd76a385b2e3881365f1f489e54f49801d0c83ea"
|
||||
checksum = "9f0fde9bc91026e381155f8c67cb354bcd35260b2f4a29bcc84639f762760c39"
|
||||
dependencies = [
|
||||
"base64",
|
||||
"cookie_store",
|
||||
|
|
@ -1007,9 +1161,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "ureq-proto"
|
||||
version = "0.4.1"
|
||||
version = "0.4.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fadf18427d33828c311234884b7ba2afb57143e6e7e69fda7ee883b624661e36"
|
||||
checksum = "59db78ad1923f2b1be62b6da81fe80b173605ca0d57f85da2e005382adf693f7"
|
||||
dependencies = [
|
||||
"base64",
|
||||
"http",
|
||||
|
|
@ -1046,6 +1200,18 @@ version = "0.2.2"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821"
|
||||
|
||||
[[package]]
|
||||
name = "uuid"
|
||||
version = "1.17.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3cf4199d1e5d15ddd86a694e4d0dffa9c323ce759fea589f00fef9d81cc1931d"
|
||||
dependencies = [
|
||||
"atomic",
|
||||
"getrandom 0.3.3",
|
||||
"js-sys",
|
||||
"wasm-bindgen",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "version_check"
|
||||
version = "0.9.5"
|
||||
|
|
@ -1058,6 +1224,15 @@ version = "0.11.0+wasi-snapshot-preview1"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
|
||||
|
||||
[[package]]
|
||||
name = "wasi"
|
||||
version = "0.14.2+wasi-0.2.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9683f9a5a998d873c0d21fcbe3c083009670149a8fab228644b8bd36b2c48cb3"
|
||||
dependencies = [
|
||||
"wit-bindgen-rt",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen"
|
||||
version = "0.2.100"
|
||||
|
|
@ -1066,6 +1241,7 @@ checksum = "1edc8929d7499fc4e8f0be2262a241556cfc54a0bea223790e71446f2aab1ef5"
|
|||
dependencies = [
|
||||
"cfg-if",
|
||||
"once_cell",
|
||||
"rustversion",
|
||||
"wasm-bindgen-macro",
|
||||
]
|
||||
|
||||
|
|
@ -1131,14 +1307,14 @@ version = "0.26.11"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "521bc38abb08001b01866da9f51eb7c5d647a19260e00054a8c7fd5f9e57f7a9"
|
||||
dependencies = [
|
||||
"webpki-roots 1.0.0",
|
||||
"webpki-roots 1.0.1",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "webpki-roots"
|
||||
version = "1.0.0"
|
||||
version = "1.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2853738d1cc4f2da3a225c18ec6c3721abb31961096e9dbf5ab35fa88b19cfdb"
|
||||
checksum = "8782dd5a41a24eed3a4f40b606249b3e236ca61adf1f25ea4d45c73de122b502"
|
||||
dependencies = [
|
||||
"rustls-pki-types",
|
||||
]
|
||||
|
|
@ -1171,7 +1347,7 @@ version = "0.52.0"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d"
|
||||
dependencies = [
|
||||
"windows-targets",
|
||||
"windows-targets 0.52.6",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -1180,7 +1356,16 @@ version = "0.59.0"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b"
|
||||
dependencies = [
|
||||
"windows-targets",
|
||||
"windows-targets 0.52.6",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-sys"
|
||||
version = "0.60.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f2f500e4d28234f72040990ec9d39e3a6b950f9f22d3dba18416c35882612bcb"
|
||||
dependencies = [
|
||||
"windows-targets 0.53.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -1189,14 +1374,30 @@ version = "0.52.6"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973"
|
||||
dependencies = [
|
||||
"windows_aarch64_gnullvm",
|
||||
"windows_aarch64_msvc",
|
||||
"windows_i686_gnu",
|
||||
"windows_i686_gnullvm",
|
||||
"windows_i686_msvc",
|
||||
"windows_x86_64_gnu",
|
||||
"windows_x86_64_gnullvm",
|
||||
"windows_x86_64_msvc",
|
||||
"windows_aarch64_gnullvm 0.52.6",
|
||||
"windows_aarch64_msvc 0.52.6",
|
||||
"windows_i686_gnu 0.52.6",
|
||||
"windows_i686_gnullvm 0.52.6",
|
||||
"windows_i686_msvc 0.52.6",
|
||||
"windows_x86_64_gnu 0.52.6",
|
||||
"windows_x86_64_gnullvm 0.52.6",
|
||||
"windows_x86_64_msvc 0.52.6",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-targets"
|
||||
version = "0.53.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c66f69fcc9ce11da9966ddb31a40968cad001c5bedeb5c2b82ede4253ab48aef"
|
||||
dependencies = [
|
||||
"windows_aarch64_gnullvm 0.53.0",
|
||||
"windows_aarch64_msvc 0.53.0",
|
||||
"windows_i686_gnu 0.53.0",
|
||||
"windows_i686_gnullvm 0.53.0",
|
||||
"windows_i686_msvc 0.53.0",
|
||||
"windows_x86_64_gnu 0.53.0",
|
||||
"windows_x86_64_gnullvm 0.53.0",
|
||||
"windows_x86_64_msvc 0.53.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -1205,48 +1406,105 @@ version = "0.52.6"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3"
|
||||
|
||||
[[package]]
|
||||
name = "windows_aarch64_gnullvm"
|
||||
version = "0.53.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "86b8d5f90ddd19cb4a147a5fa63ca848db3df085e25fee3cc10b39b6eebae764"
|
||||
|
||||
[[package]]
|
||||
name = "windows_aarch64_msvc"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469"
|
||||
|
||||
[[package]]
|
||||
name = "windows_aarch64_msvc"
|
||||
version = "0.53.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c7651a1f62a11b8cbd5e0d42526e55f2c99886c77e007179efff86c2b137e66c"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_gnu"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_gnu"
|
||||
version = "0.53.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c1dc67659d35f387f5f6c479dc4e28f1d4bb90ddd1a5d3da2e5d97b42d6272c3"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_gnullvm"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_gnullvm"
|
||||
version = "0.53.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9ce6ccbdedbf6d6354471319e781c0dfef054c81fbc7cf83f338a4296c0cae11"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_msvc"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_msvc"
|
||||
version = "0.53.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "581fee95406bb13382d2f65cd4a908ca7b1e4c2f1917f143ba16efe98a589b5d"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_gnu"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_gnu"
|
||||
version = "0.53.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2e55b5ac9ea33f2fc1716d1742db15574fd6fc8dadc51caab1c16a3d3b4190ba"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_gnullvm"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_gnullvm"
|
||||
version = "0.53.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0a6e035dd0599267ce1ee132e51c27dd29437f63325753051e71dd9e42406c57"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_msvc"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_msvc"
|
||||
version = "0.53.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "271414315aff87387382ec3d271b52d7ae78726f5d44ac98b4f4030c91880486"
|
||||
|
||||
[[package]]
|
||||
name = "wit-bindgen-rt"
|
||||
version = "0.39.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6f42320e61fe2cfd34354ecb597f86f413484a798ba44a8ca1165c58d42da6c1"
|
||||
dependencies = [
|
||||
"bitflags",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "writeable"
|
||||
version = "0.6.1"
|
||||
|
|
|
|||
22
Cargo.toml
22
Cargo.toml
|
|
@ -5,18 +5,32 @@ edition = "2024"
|
|||
description = "ShrUpl is a tool to upload files to a Sharry Instance through a public Alias, leveraging the tus protocol"
|
||||
|
||||
[dependencies]
|
||||
clap = { version = "4.5.38", features = ["derive"] }
|
||||
console = { version = "0.15.11", default-features = false }
|
||||
base64 = { version = "0.22.1", default-features = false }
|
||||
blake2b_simd = "1.0.3"
|
||||
clap = { version = "4.5.41", features = ["derive"] }
|
||||
console = { version = "0.16.0", default-features = false }
|
||||
ctrlc = { version = "3.4.7", features = ["termination"] }
|
||||
dialoguer = { version = "0.11.0", default-features = false }
|
||||
dirs-next = "2.0.0"
|
||||
env_logger = "0.11.8"
|
||||
indicatif = { version = "0.17.11", default-features = false }
|
||||
indicatif = { version = "0.18.0", default-features = false }
|
||||
log = "0.4.27"
|
||||
regex = "1.11.1"
|
||||
serde = { version = "1.0.219", features = ["derive"] }
|
||||
serde_json = "1.0.140"
|
||||
ureq = { version = "3.0.11", features = ["json"] }
|
||||
thiserror = "2.0.12"
|
||||
ureq = { version = "3.0.12", features = ["json"] }
|
||||
|
||||
[dev-dependencies]
|
||||
tempfile = "3.20.0"
|
||||
uuid = { version = "1.17.0", features = ["rng", "std", "v1"] }
|
||||
|
||||
[profile.release]
|
||||
# Optimize for speed even more aggressively
|
||||
opt-level = "z"
|
||||
# better inlining
|
||||
codegen-units = 1
|
||||
# linker‐time optimization
|
||||
lto = true
|
||||
debug = false
|
||||
panic = "abort"
|
||||
|
|
|
|||
|
|
@ -1,87 +0,0 @@
|
|||
Diese Konversation wurde mit Duck.ai (https://duck.ai) unter Verwendung des o3-mini-Modells von OpenAI generiert. KI-Chats zeigen möglicherweise falsche oder anstößige Informationen an (weitere Informationen siehe https://duckduckgo.com/duckai/privacy-terms).
|
||||
|
||||
====================
|
||||
|
||||
Benutzereingabe 1 von 2 - 5.6.2025, 02:27:36:
|
||||
in Rust, what can I use to display multiple progress bars in the terminal?
|
||||
|
||||
o3-mini:
|
||||
You can use the indicatif crate, which provides a MultiProgress type that lets you manage and display multiple progress bars concurrently in a terminal.
|
||||
|
||||
Below is a simple example of how to use indicatif’s MultiProgress to display multiple progress bars:
|
||||
|
||||
```rust
|
||||
use indicatif::{MultiProgress, ProgressBar, ProgressStyle};
|
||||
use std::{thread, time::Duration};
|
||||
|
||||
fn main() {
|
||||
// Create a MultiProgress container.
|
||||
let m = MultiProgress::new();
|
||||
|
||||
// Create several progress bars.
|
||||
let pb1 = m.add(ProgressBar::new(100));
|
||||
pb1.set_style(
|
||||
ProgressStyle::default_bar()
|
||||
.template("{msg} {bar:40.cyan/blue} {pos:>3}/{len:3}")
|
||||
.expect("Failed to set progress bar style"),
|
||||
);
|
||||
pb1.set_message("Downloading");
|
||||
|
||||
let pb2 = m.add(ProgressBar::new(50));
|
||||
pb2.set_style(
|
||||
ProgressStyle::default_bar()
|
||||
.template("{msg} {bar:40.green/black} {pos:>3}/{len:3}")
|
||||
.expect("Failed to set progress bar style"),
|
||||
);
|
||||
pb2.set_message("Processing");
|
||||
|
||||
// Optionally, spawn threads to update the progress bars concurrently.
|
||||
let h1 = std::thread::spawn(move || {
|
||||
for i in 0..=100 {
|
||||
pb1.set_position(i);
|
||||
thread::sleep(Duration::from_millis(50));
|
||||
}
|
||||
pb1.finish_with_message("Download complete");
|
||||
});
|
||||
|
||||
let h2 = std::thread::spawn(move || {
|
||||
for i in 0..=50 {
|
||||
pb2.set_position(i);
|
||||
thread::sleep(Duration::from_millis(100));
|
||||
}
|
||||
pb2.finish_with_message("Processing complete");
|
||||
});
|
||||
|
||||
// Wait for threads to complete. MultiProgress takes care of rendering.
|
||||
h1.join().expect("Thread 1 panicked");
|
||||
h2.join().expect("Thread 2 panicked");
|
||||
|
||||
// You may also iterate on the multi-progress which blocks until all bars finish:
|
||||
// m.join().unwrap();
|
||||
}
|
||||
```
|
||||
|
||||
### Steps Explained
|
||||
|
||||
1. **MultiProgress Container**:
|
||||
Create a `MultiProgress` instance which is used to manage multiple progress bars.
|
||||
|
||||
2. **Progress Bars**:
|
||||
Create individual progress bars using `MultiProgress::add()`. You can customize each progress bar with a message, style, and length.
|
||||
|
||||
3. **Concurrent Updates**:
|
||||
The example spawns two threads – one for each progress bar. Each thread updates its progress bar independently using `set_position()` and finishes the progress bar with `finish_with_message()`.
|
||||
|
||||
4. **Thread Joining**:
|
||||
Ensure the main thread waits for the worker threads to complete to prevent the program from exiting prematurely.
|
||||
|
||||
### Installation
|
||||
|
||||
Make sure to add indicatif to your Cargo.toml:
|
||||
|
||||
```toml
|
||||
[dependencies]
|
||||
indicatif = "0.17" # or the latest version available on crates.io
|
||||
```
|
||||
|
||||
This crate provides many options for customizing and using progress bars in a concurrent and effective manner. The documentation is available at [indicatif on docs.rs](https://docs.rs/indicatif) for further details and advanced usage.
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
outline of sharry uploading
|
||||
# Outline of sharry upload API
|
||||
|
||||
1. POST to "new" route
|
||||
- uri: https://sharry.yavook.de/api/v2/alias/upload/new
|
||||
|
|
@ -32,6 +32,16 @@ outline of sharry uploading
|
|||
- res.status == 200
|
||||
- res_json.success, res_json.message
|
||||
|
||||
hints
|
||||
|
||||
- https://stackoverflow.com/questions/59586787/rust-how-to-do-http-put-of-large-files
|
||||
# Links
|
||||
|
||||
- yvk repo: https://code.yavook.de/jmm/shrupl
|
||||
- sharry issue: https://github.com/eikek/sharry/issues/1659
|
||||
|
||||
|
||||
# Ideas
|
||||
|
||||
- cli functions
|
||||
- "continue" and "new" flags to avoid user interaction
|
||||
- "quiet" flag to disable output entirely
|
||||
- some switch to change log to "pretty-print"
|
||||
294
src/appstate.rs
294
src/appstate.rs
|
|
@ -1,185 +1,153 @@
|
|||
use std::{
|
||||
collections::VecDeque,
|
||||
fs,
|
||||
io::{self, Write},
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
use std::{fmt, io, time::Duration};
|
||||
|
||||
use console::style;
|
||||
use indicatif::{ProgressBar, ProgressStyle};
|
||||
use log::{debug, trace};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use indicatif::ProgressBar;
|
||||
use log::{debug, warn};
|
||||
|
||||
use super::{
|
||||
use crate::{
|
||||
cachefile::CacheFile,
|
||||
cli::Cli,
|
||||
sharry::{Alias, ChunkState, FileChecked, FileUploading, Share, UploadError},
|
||||
file::{Chunk, FileTrait},
|
||||
output::new_progressbar,
|
||||
sharry::{Client, ShareID},
|
||||
};
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
pub struct AppState {
|
||||
#[serde(skip)]
|
||||
file_name: PathBuf,
|
||||
#[serde(skip)]
|
||||
progress: Option<ProgressBar>,
|
||||
|
||||
alias: Alias,
|
||||
share: Share,
|
||||
files: VecDeque<FileState>,
|
||||
http: ureq::Agent,
|
||||
inner: CacheFile,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
enum FileState {
|
||||
C(FileChecked),
|
||||
U(FileUploading),
|
||||
}
|
||||
|
||||
impl FileState {
|
||||
fn file_name(&self) -> &str {
|
||||
match self {
|
||||
FileState::C(checked) => checked.file_name(),
|
||||
FileState::U(uploading) => uploading.file_name(),
|
||||
}
|
||||
impl fmt::Debug for AppState {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.debug_struct("AppState")
|
||||
.field("inner", &self.inner)
|
||||
.finish_non_exhaustive()
|
||||
}
|
||||
}
|
||||
|
||||
fn new_http(args: &Cli) -> ureq::Agent {
|
||||
ureq::Agent::config_builder()
|
||||
.timeout_global(args.get_timeout())
|
||||
.build()
|
||||
.into()
|
||||
}
|
||||
|
||||
fn new_share(args: &Cli) -> crate::Result<ShareID> {
|
||||
new_http(args).share_create(&args.get_uri(), &args.alias, args.get_share_request())
|
||||
}
|
||||
|
||||
impl AppState {
|
||||
fn cache_dir() -> PathBuf {
|
||||
let dir_name = dirs_next::cache_dir()
|
||||
.expect("could not determine cache directory")
|
||||
.join("shrupl");
|
||||
|
||||
trace!("cachedir: {:?}", dir_name.display());
|
||||
dir_name
|
||||
}
|
||||
|
||||
fn cache_file(args: &Cli) -> PathBuf {
|
||||
let file_name = Self::cache_dir().join(format!("{}.json", args.get_hash()));
|
||||
|
||||
trace!("cachefile: {:?}", file_name.display());
|
||||
file_name
|
||||
}
|
||||
|
||||
fn load(file_name: impl AsRef<Path>) -> io::Result<Self> {
|
||||
let content = fs::read_to_string(file_name)?;
|
||||
serde_json::from_str(&content).map_err(io::Error::other)
|
||||
}
|
||||
|
||||
pub fn try_resume(args: &Cli) -> Option<Self> {
|
||||
let file_name = Self::cache_file(args);
|
||||
|
||||
Self::load(&file_name)
|
||||
.inspect_err(|e| debug!("could not resume from {:?}: {e}", file_name.display()))
|
||||
.map(|state| {
|
||||
debug!("successfully loaded AppState");
|
||||
|
||||
Self {
|
||||
file_name,
|
||||
progress: None,
|
||||
alias: state.alias,
|
||||
share: state.share,
|
||||
files: state.files,
|
||||
}
|
||||
})
|
||||
.ok()
|
||||
}
|
||||
|
||||
pub fn from_args(args: &Cli, http: &ureq::Agent) -> Result<Self, ureq::Error> {
|
||||
let file_name = Self::cache_file(args);
|
||||
let alias = args.get_alias();
|
||||
|
||||
let share = Share::create(http, &alias, args.get_share_request())?;
|
||||
|
||||
let files: VecDeque<_> = args.files.clone().into_iter().map(FileState::C).collect();
|
||||
|
||||
Ok(Self {
|
||||
file_name,
|
||||
fn new(http: ureq::Agent, inner: CacheFile) -> Self {
|
||||
Self {
|
||||
progress: None,
|
||||
alias,
|
||||
share,
|
||||
files,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn file_names(&self) -> Vec<&str> {
|
||||
self.files.iter().map(FileState::file_name).collect()
|
||||
}
|
||||
|
||||
pub fn upload_chunk(
|
||||
&mut self,
|
||||
http: &ureq::Agent,
|
||||
chunk_size: usize,
|
||||
) -> Result<Option<()>, UploadError> {
|
||||
let uploading = match self.files.pop_front() {
|
||||
Some(FileState::C(checked)) => checked
|
||||
.start_upload(http, &self.alias, &self.share)
|
||||
.unwrap(),
|
||||
Some(FileState::U(uploading)) => uploading,
|
||||
None => return Ok(None),
|
||||
};
|
||||
|
||||
debug!("{uploading} chunk {chunk_size}");
|
||||
|
||||
let pb = match self.progress {
|
||||
Some(ref pb) => pb,
|
||||
None => {
|
||||
self.progress = Some({
|
||||
let pb = {
|
||||
let ps = ProgressStyle::with_template(&format!(
|
||||
"{{msg:.yellow}}: {{bar:50.cyan/blue}} {{binary_bytes:.magenta}}{}{{binary_total_bytes:.magenta}} ({{elapsed}})",
|
||||
style("/").magenta(),
|
||||
))
|
||||
.unwrap();
|
||||
|
||||
ProgressBar::new(uploading.size())
|
||||
.with_style(ps)
|
||||
.with_message(uploading.file_name().to_owned())
|
||||
.with_position(uploading.offset())
|
||||
};
|
||||
pb.tick();
|
||||
|
||||
pb
|
||||
});
|
||||
self.progress.as_ref().unwrap()
|
||||
}
|
||||
};
|
||||
pb.tick();
|
||||
|
||||
match uploading.upload_chunk(http, &self.alias, chunk_size) {
|
||||
ChunkState::Ok(upl) => {
|
||||
pb.set_position(upl.offset());
|
||||
self.files.push_front(FileState::U(upl));
|
||||
Ok(Some(()))
|
||||
}
|
||||
ChunkState::Err(upl, e) => {
|
||||
self.files.push_front(FileState::U(upl));
|
||||
Err(e)
|
||||
}
|
||||
ChunkState::Finished(path) => {
|
||||
debug!("Finished {:?}!", path.display());
|
||||
pb.finish();
|
||||
self.progress = None;
|
||||
self.share.notify(http, &self.alias).unwrap();
|
||||
|
||||
Ok(self.files.front().map(drop))
|
||||
}
|
||||
http,
|
||||
inner,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn save(&self) -> io::Result<()> {
|
||||
fs::create_dir_all(Self::cache_dir())?;
|
||||
|
||||
let json = serde_json::to_string_pretty(self).map_err(io::Error::other)?;
|
||||
let mut file = fs::File::create(&self.file_name)?;
|
||||
file.write_all(json.as_bytes())?;
|
||||
|
||||
trace!("updated {:?}", self.file_name.display());
|
||||
Ok(())
|
||||
pub fn try_resume(args: &Cli) -> crate::Result<Self> {
|
||||
Ok(Self::new(new_http(args), CacheFile::try_resume(args)?))
|
||||
}
|
||||
|
||||
pub fn clear(self) -> io::Result<()> {
|
||||
fs::remove_file(&self.file_name)?;
|
||||
pub fn from_args(args: &Cli) -> crate::Result<Self> {
|
||||
Ok(Self::new(
|
||||
new_http(args),
|
||||
CacheFile::from_args(args, new_share)?,
|
||||
))
|
||||
}
|
||||
|
||||
trace!("removed {:?}", self.file_name.display());
|
||||
Ok(())
|
||||
fn with_progressbar(&mut self, f: impl FnOnce(&ProgressBar), drop_bar: bool) {
|
||||
let bar = &*self.progress.get_or_insert_with(new_progressbar);
|
||||
|
||||
if let Some(upl) = self.inner.peek_uploading() {
|
||||
if bar.length().is_none() {
|
||||
bar.set_length(upl.get_size());
|
||||
bar.set_message(upl.get_name().to_string());
|
||||
bar.enable_steady_tick(Duration::from_millis(100));
|
||||
}
|
||||
|
||||
bar.set_position(upl.get_offset());
|
||||
// BUG in `indicatif` crate?
|
||||
// `set_position` does not force an immediate redraw like e.g. `inc_length`
|
||||
}
|
||||
|
||||
f(bar);
|
||||
|
||||
if drop_bar {
|
||||
self.progress = None;
|
||||
}
|
||||
}
|
||||
|
||||
fn touch_progressbar(&mut self) {
|
||||
self.with_progressbar(|_| (), false);
|
||||
}
|
||||
|
||||
fn drop_progressbar(&mut self, f: impl FnOnce(&ProgressBar)) {
|
||||
self.with_progressbar(f, true);
|
||||
}
|
||||
|
||||
fn next_chunk<'t>(&mut self, buffer: &'t mut [u8]) -> crate::Result<Option<Chunk<'t>>> {
|
||||
if self.inner.get_uploading(&self.http)?.is_none() {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
self.touch_progressbar();
|
||||
|
||||
let uploading = self.inner.expect_uploading();
|
||||
debug!("{uploading:?}");
|
||||
|
||||
let chunk = uploading.read(buffer)?;
|
||||
debug!("{chunk:?}");
|
||||
|
||||
Ok(Some(chunk))
|
||||
}
|
||||
|
||||
pub fn upload_chunk(&mut self, buffer: &mut [u8]) -> crate::Result<bool> {
|
||||
let Some(chunk) = self.next_chunk(buffer)? else {
|
||||
self.inner
|
||||
.share_notify(&self.http)
|
||||
.unwrap_or_else(|e| warn!("Failed to notify the share: {e}"));
|
||||
|
||||
return Ok(true);
|
||||
};
|
||||
|
||||
self.inner.file_patch(&self.http, &chunk)?;
|
||||
|
||||
self.touch_progressbar();
|
||||
|
||||
if let Some(path) = self.inner.check_eof() {
|
||||
debug!("Finished {:?}!", path.display());
|
||||
self.drop_progressbar(ProgressBar::finish);
|
||||
}
|
||||
|
||||
Ok(self.inner.peek_uploading().is_none() && self.inner.queue().is_empty())
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn rewind_chunk(mut self) -> Option<Self> {
|
||||
self.inner = self.inner.rewind_chunk()?;
|
||||
|
||||
Some(self)
|
||||
}
|
||||
|
||||
pub fn abort_upload(&mut self) {
|
||||
self.inner.abort_upload();
|
||||
self.drop_progressbar(ProgressBar::abandon);
|
||||
}
|
||||
|
||||
pub fn rebuild_share(self, args: &Cli) -> crate::Result<Self> {
|
||||
Ok(Self::new(self.http, CacheFile::from_args(args, new_share)?))
|
||||
}
|
||||
|
||||
pub fn save(&self) -> io::Result<()> {
|
||||
self.inner.save()
|
||||
}
|
||||
|
||||
pub fn discard(self) -> io::Result<()> {
|
||||
self.inner.discard()
|
||||
}
|
||||
|
||||
pub fn clear_any(args: &Cli) {
|
||||
CacheFile::clear_any(args);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
142
src/bin/shrupl.rs
Normal file
142
src/bin/shrupl.rs
Normal file
|
|
@ -0,0 +1,142 @@
|
|||
use std::{
|
||||
process,
|
||||
sync::{
|
||||
Arc,
|
||||
atomic::{AtomicBool, Ordering},
|
||||
},
|
||||
};
|
||||
|
||||
use clap::Parser;
|
||||
use console::{StyledObject, style};
|
||||
use log::{info, trace};
|
||||
|
||||
use shrupl::{
|
||||
AppState, Cli,
|
||||
output::{self, Log, SHRUPL},
|
||||
};
|
||||
|
||||
fn main() {
|
||||
let args = Cli::parse();
|
||||
|
||||
env_logger::Builder::new()
|
||||
.filter_module("shrupl", args.get_level_filter())
|
||||
.parse_default_env()
|
||||
.init();
|
||||
|
||||
info!("args: {args:#?}");
|
||||
|
||||
println!("{} to {}!", style("Welcome").magenta().bold(), *SHRUPL);
|
||||
|
||||
let resumed = AppState::try_resume(&args);
|
||||
|
||||
let check_ctrlc = {
|
||||
let stop = Arc::new(AtomicBool::new(false));
|
||||
let stop_ctrlc = stop.clone();
|
||||
|
||||
ctrlc::set_handler(move || {
|
||||
stop_ctrlc.store(true, Ordering::SeqCst);
|
||||
eprintln!("{} stopping as soon as possible!", *SHRUPL);
|
||||
})
|
||||
.expect("Error setting Ctrl-C handler");
|
||||
|
||||
move || {
|
||||
if stop.load(Ordering::SeqCst) {
|
||||
process::exit(255);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let mut state = resumed
|
||||
.inspect_err(|e| {
|
||||
AppState::clear_any(&args);
|
||||
Log::handle(e);
|
||||
info!("could not resume from hash {:?}: {e}", args.get_hash());
|
||||
})
|
||||
.ok()
|
||||
.and_then(|state| output::prompt_continue().then_some(state))
|
||||
.unwrap_or_else(|| match AppState::from_args(&args) {
|
||||
Ok(state) => {
|
||||
state.save().unwrap_or_else(|e| {
|
||||
Log::warning(format_args!("Failed to save state: {e}"));
|
||||
});
|
||||
state
|
||||
}
|
||||
Err(e) => {
|
||||
Log::handle(&e);
|
||||
Log::error(format_args!("Failed to create state: {e}"));
|
||||
}
|
||||
});
|
||||
|
||||
check_ctrlc();
|
||||
|
||||
info!("continuing with state: {state:#?}");
|
||||
|
||||
let fns_magenta = output::style_all(&args.file_names(), StyledObject::magenta).join(", ");
|
||||
println!("{} is uploading: {fns_magenta}", *SHRUPL);
|
||||
|
||||
let mut buffer = vec![0; args.chunk_size * 1024 * 1024];
|
||||
let mut tries = 0;
|
||||
|
||||
loop {
|
||||
if !args.may_retry(tries) {
|
||||
Log::error("Retry limit reached!");
|
||||
}
|
||||
|
||||
match state.upload_chunk(&mut buffer) {
|
||||
Err(e) => {
|
||||
Log::handle(&e);
|
||||
|
||||
if let Some(p) = e.get_invalid_param() {
|
||||
match p {
|
||||
// Error 404 (File not found)
|
||||
shrupl::Parameter::FileID(fid) => {
|
||||
info!("retrying file {fid:?}");
|
||||
|
||||
state.abort_upload();
|
||||
}
|
||||
// Error 404 (Share not found)
|
||||
shrupl::Parameter::ShareID(sid) => {
|
||||
output::prompt_rebuild_share();
|
||||
info!("rebuilding share {sid:?}");
|
||||
|
||||
// rebuild share
|
||||
let Ok(s) = state.rebuild_share(&args) else {
|
||||
Log::error("Failed to rebuild share!");
|
||||
};
|
||||
state = s;
|
||||
}
|
||||
p => Log::error(format_args!("Unexpected {p}!")),
|
||||
}
|
||||
} else {
|
||||
// retry chunk
|
||||
let Some(s) = state.rewind_chunk() else {
|
||||
Log::error("Failed to retry chunk!");
|
||||
};
|
||||
tries += 1;
|
||||
|
||||
info!("State rewound, retrying last chunk (tries: {tries})");
|
||||
state = s;
|
||||
}
|
||||
}
|
||||
Ok(false) => {
|
||||
trace!("chunk uploaded");
|
||||
tries = 0;
|
||||
}
|
||||
Ok(true) => {
|
||||
info!("all uploads done");
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
state.save().unwrap_or_else(|e| {
|
||||
Log::warning(format_args!("Failed to save state: {e}"));
|
||||
});
|
||||
check_ctrlc();
|
||||
}
|
||||
|
||||
state.discard().unwrap_or_else(|e| {
|
||||
Log::warning(format_args!("Failed to remove state: {e}"));
|
||||
});
|
||||
|
||||
println!("{} finished {}", *SHRUPL, style("successfully!").green());
|
||||
}
|
||||
223
src/cachefile.rs
Normal file
223
src/cachefile.rs
Normal file
|
|
@ -0,0 +1,223 @@
|
|||
use std::{
|
||||
collections::VecDeque,
|
||||
fs,
|
||||
io::{self, Write},
|
||||
path::{Path, PathBuf},
|
||||
time::Duration,
|
||||
};
|
||||
|
||||
use indicatif::ProgressBar;
|
||||
use log::{info, trace};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::{
|
||||
cli::Cli,
|
||||
file::{self, Chunk, FileTrait},
|
||||
output::new_progressbar,
|
||||
sharry::{AliasID, Client, ShareID, Uri},
|
||||
};
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
pub struct CacheFile {
|
||||
#[serde(skip)]
|
||||
file_name: PathBuf,
|
||||
|
||||
uri: Uri,
|
||||
alias_id: AliasID,
|
||||
share_id: ShareID,
|
||||
|
||||
uploading: Option<file::Uploading>,
|
||||
files: VecDeque<file::Checked>,
|
||||
}
|
||||
|
||||
impl CacheFile {
|
||||
fn cache_dir() -> PathBuf {
|
||||
let dir_name = dirs_next::cache_dir()
|
||||
.expect("could not determine cache directory")
|
||||
.join("shrupl");
|
||||
|
||||
trace!("cachedir: {:?}", dir_name.display());
|
||||
dir_name
|
||||
}
|
||||
|
||||
fn cache_file(args: &Cli) -> PathBuf {
|
||||
let file_name = Self::cache_dir().join(format!("{}.json", args.get_hash()));
|
||||
|
||||
trace!("cachefile: {:?}", file_name.display());
|
||||
file_name
|
||||
}
|
||||
|
||||
pub fn try_resume(args: &Cli) -> crate::Result<Self> {
|
||||
let file_name = Self::cache_file(args);
|
||||
|
||||
let state: Self = {
|
||||
let file = fs::File::open(&file_name)?;
|
||||
let reader = io::BufReader::new(file);
|
||||
serde_json::from_reader(reader).map_err(io::Error::other)?
|
||||
};
|
||||
|
||||
if args.should_hash() {
|
||||
fn check_hash(file: &impl FileTrait, bar: &ProgressBar) -> crate::Result<()> {
|
||||
bar.set_message(format!("checking {:?}", file.get_name()));
|
||||
file.check_hash(|bytes| bar.inc(bytes))
|
||||
}
|
||||
|
||||
info!("checking files in {state:?}");
|
||||
|
||||
// BOOKMARK assumption: total file size < 2 EiB
|
||||
let total_size = {
|
||||
let upl_size = if let Some(upl) = state.peek_uploading() {
|
||||
upl.get_size()
|
||||
} else {
|
||||
0
|
||||
};
|
||||
upl_size + state.queue().iter().map(|&f| f.get_size()).sum::<u64>()
|
||||
};
|
||||
|
||||
let bar = new_progressbar();
|
||||
bar.set_length(total_size);
|
||||
bar.enable_steady_tick(Duration::from_millis(50));
|
||||
|
||||
if let Some(upl) = state.peek_uploading() {
|
||||
check_hash(upl, &bar)?;
|
||||
}
|
||||
|
||||
for chk in state.queue() {
|
||||
check_hash(chk, &bar)?;
|
||||
}
|
||||
|
||||
bar.finish_with_message("finished checking files");
|
||||
}
|
||||
|
||||
Ok(Self { file_name, ..state })
|
||||
}
|
||||
|
||||
pub fn from_args(
|
||||
args: &Cli,
|
||||
new_share: impl FnOnce(&Cli) -> crate::Result<ShareID>,
|
||||
) -> crate::Result<Self> {
|
||||
let mut files = args.files.clone();
|
||||
|
||||
if args.should_hash() {
|
||||
info!("hashing files {files:?}");
|
||||
|
||||
let bar = new_progressbar();
|
||||
// BOOKMARK assumption: total file size < 2 EiB
|
||||
bar.set_length(files.iter().map(FileTrait::get_size).sum());
|
||||
bar.enable_steady_tick(Duration::from_millis(50));
|
||||
|
||||
for chk in &mut files {
|
||||
bar.set_message(format!("hashing {:?}", chk.get_name()));
|
||||
chk.hash(|bytes| bar.inc(bytes))?;
|
||||
}
|
||||
|
||||
bar.finish_with_message("finished hashing files");
|
||||
}
|
||||
|
||||
Ok(Self {
|
||||
file_name: Self::cache_file(args),
|
||||
uri: args.get_uri(),
|
||||
alias_id: args.alias.clone(),
|
||||
share_id: new_share(args)?,
|
||||
uploading: None,
|
||||
files: files.into(),
|
||||
})
|
||||
}
|
||||
|
||||
pub fn queue(&self) -> Vec<&file::Checked> {
|
||||
self.files.iter().collect()
|
||||
}
|
||||
|
||||
pub fn get_uploading(
|
||||
&mut self,
|
||||
client: &impl Client,
|
||||
) -> crate::Result<Option<&mut file::Uploading>> {
|
||||
if self.uploading.is_some() {
|
||||
Ok(self.uploading.as_mut())
|
||||
} else if let Some(chk) = self.files.pop_front() {
|
||||
let upl = chk.start_upload(client, &self.uri, &self.alias_id, &self.share_id)?;
|
||||
self.uploading.replace(upl);
|
||||
|
||||
Ok(self.uploading.as_mut())
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn expect_uploading(&mut self) -> &mut file::Uploading {
|
||||
self.uploading
|
||||
.as_mut()
|
||||
.expect("expect_uploading called while not uploading")
|
||||
}
|
||||
|
||||
pub fn peek_uploading(&self) -> Option<&file::Uploading> {
|
||||
self.uploading.as_ref()
|
||||
}
|
||||
|
||||
pub fn check_eof(&mut self) -> Option<PathBuf> {
|
||||
if let Some(upl) = self.uploading.take() {
|
||||
match upl.check_eof() {
|
||||
Ok(upl) => self.uploading = Some(upl),
|
||||
Err(p) => return Some(p),
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
pub fn rewind_chunk(mut self) -> Option<Self> {
|
||||
let upl = self
|
||||
.uploading
|
||||
.take()
|
||||
.expect("rewind_chunk called while not uploading");
|
||||
|
||||
self.uploading = Some(upl.rewind()?);
|
||||
Some(self)
|
||||
}
|
||||
|
||||
pub fn abort_upload(&mut self) {
|
||||
let upl = self
|
||||
.uploading
|
||||
.take()
|
||||
.expect("abort_upload called while not uploading");
|
||||
|
||||
self.files.push_front(upl.stop());
|
||||
}
|
||||
|
||||
pub fn share_notify(&self, client: &impl Client) -> crate::Result<()> {
|
||||
client.share_notify(&self.uri, &self.alias_id, &self.share_id)
|
||||
}
|
||||
|
||||
pub fn file_patch(&self, client: &impl Client, chunk: &Chunk) -> crate::Result<()> {
|
||||
client.file_patch(&self.uri, &self.alias_id, &self.share_id, chunk)
|
||||
}
|
||||
|
||||
pub fn save(&self) -> io::Result<()> {
|
||||
let cache_dir = self.file_name.parent().ok_or_else(|| {
|
||||
io::Error::other(format!("orphan file {:?}", self.file_name.display()))
|
||||
})?;
|
||||
fs::create_dir_all(cache_dir)?;
|
||||
|
||||
let json = serde_json::to_string_pretty(self).map_err(io::Error::other)?;
|
||||
let mut file = fs::File::create(&self.file_name)?;
|
||||
file.write_all(json.as_bytes())?;
|
||||
|
||||
trace!("updated {:?}", self.file_name.display());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn remove(path: &Path) -> io::Result<()> {
|
||||
fs::remove_file(path)?;
|
||||
|
||||
trace!("removed {:?}", path.display());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn clear_any(args: &Cli) {
|
||||
let _ = Self::remove(&Self::cache_file(args));
|
||||
}
|
||||
|
||||
pub fn discard(self) -> io::Result<()> {
|
||||
Self::remove(&self.file_name)
|
||||
}
|
||||
}
|
||||
135
src/cli.rs
135
src/cli.rs
|
|
@ -1,13 +1,16 @@
|
|||
use std::{
|
||||
hash::{DefaultHasher, Hash, Hasher},
|
||||
time::Duration,
|
||||
use std::{convert::Infallible, fmt, io, time::Duration};
|
||||
|
||||
use base64::prelude::{BASE64_URL_SAFE_NO_PAD as BASE64URL, Engine};
|
||||
use blake2b_simd::Params as Blake2b;
|
||||
use clap::{Parser, builder::TypedValueParser, value_parser};
|
||||
use log::LevelFilter;
|
||||
|
||||
use crate::{
|
||||
file::{Checked, FileTrait},
|
||||
sharry::{AliasID, Uri, json::NewShareRequest},
|
||||
};
|
||||
|
||||
use clap::{Parser, builder::PossibleValuesParser};
|
||||
|
||||
use super::sharry::{Alias, FileChecked, NewShareRequest, Uri};
|
||||
|
||||
#[derive(Parser, Debug, Hash)]
|
||||
#[derive(Parser)]
|
||||
#[command(version, about, long_about = None)]
|
||||
pub struct Cli {
|
||||
/// Timeout in seconds for HTTP actions (set 0 or invalid to disable)
|
||||
|
|
@ -18,17 +21,13 @@ pub struct Cli {
|
|||
)]
|
||||
timeout: Duration,
|
||||
|
||||
/// Protocol for Sharry instance
|
||||
#[arg(
|
||||
short, long,
|
||||
default_value = "https", value_name = "VARIANT",
|
||||
value_parser = PossibleValuesParser::new(["http", "https"]),
|
||||
)]
|
||||
protocol: String,
|
||||
/// Number of times actions are retried
|
||||
#[arg(short, long, default_value_t = 5, value_name = "N")]
|
||||
retry_limit: u32,
|
||||
|
||||
/// Name of the new share
|
||||
#[arg(short, long, default_value = "ShrUpl Upload", value_name = "TEXT")]
|
||||
name: String,
|
||||
share_name: String,
|
||||
|
||||
/// Description of the new share
|
||||
#[arg(short, long, value_name = "TEXT")]
|
||||
|
|
@ -39,52 +38,122 @@ pub struct Cli {
|
|||
max_views: u32,
|
||||
|
||||
/// Chunk size for uploading, in MiB
|
||||
#[arg(short, long, default_value_t = 10, value_name = "N")]
|
||||
#[arg(
|
||||
short, long,
|
||||
default_value_t = 4, value_name = "M",
|
||||
value_parser = value_parser!(u32).range(1..).map(|s| s as usize),
|
||||
)]
|
||||
pub chunk_size: usize,
|
||||
|
||||
/// Don't hash files before uploading
|
||||
#[arg(short, long)]
|
||||
no_hash: bool,
|
||||
|
||||
/// Increase output verbosity
|
||||
#[arg(short, long, action = clap::ArgAction::Count)]
|
||||
verbose: u8,
|
||||
|
||||
/// Base URL for Sharry Instance
|
||||
url: String,
|
||||
|
||||
/// ID of a public alias to use
|
||||
alias: String,
|
||||
pub alias: AliasID,
|
||||
|
||||
/// Files to upload to the new share
|
||||
#[arg(value_name = "FILE", required = true, value_parser = parse_sharry_file)]
|
||||
pub files: Vec<FileChecked>,
|
||||
pub files: Vec<Checked>,
|
||||
}
|
||||
|
||||
fn parse_seconds(data: &str) -> Result<Duration, String> {
|
||||
impl fmt::Debug for Cli {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.debug_struct("Cli")
|
||||
.field("uri", &self.get_uri())
|
||||
.field("retry_limit", &self.retry_limit)
|
||||
.field("alias", &self.alias)
|
||||
.field("timeout", &self.get_timeout())
|
||||
.field("chunk_size", &self.chunk_size)
|
||||
.field("share_request", &self.get_share_request())
|
||||
.field("files", &self.files)
|
||||
.field("level_filter", &self.get_level_filter())
|
||||
.field("hash", &self.get_hash())
|
||||
.finish_non_exhaustive()
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_seconds(data: &str) -> Result<Duration, Infallible> {
|
||||
data.parse().or(Ok(0)).map(Duration::from_secs)
|
||||
}
|
||||
|
||||
fn parse_sharry_file(data: &str) -> Result<FileChecked, String> {
|
||||
FileChecked::new(data).map_err(|e| e.to_string())
|
||||
fn parse_sharry_file(data: &str) -> io::Result<Checked> {
|
||||
Checked::new(data)
|
||||
}
|
||||
|
||||
fn sorted<T>(values: &[T]) -> Vec<&T>
|
||||
where
|
||||
T: Ord,
|
||||
{
|
||||
let mut refs: Vec<_> = values.iter().collect();
|
||||
refs.sort_unstable();
|
||||
|
||||
refs
|
||||
}
|
||||
|
||||
impl Cli {
|
||||
#[must_use]
|
||||
pub fn get_timeout(&self) -> Option<Duration> {
|
||||
(!self.timeout.is_zero()).then_some(self.timeout)
|
||||
}
|
||||
|
||||
pub fn get_alias(&self) -> Alias {
|
||||
Alias::new(Uri::with_protocol(&self.protocol, &self.url), &self.alias)
|
||||
#[must_use]
|
||||
pub fn get_uri(&self) -> Uri {
|
||||
self.url.clone().into()
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn may_retry(&self, tries: u32) -> bool {
|
||||
match self.retry_limit {
|
||||
0 => true,
|
||||
limit => tries < limit,
|
||||
}
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn should_hash(&self) -> bool {
|
||||
!self.no_hash
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn get_share_request(&self) -> NewShareRequest {
|
||||
NewShareRequest::new(&self.name, self.description.as_ref(), self.max_views)
|
||||
NewShareRequest::new(&self.share_name, self.max_views)
|
||||
.description(self.description.as_ref())
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn get_level_filter(&self) -> LevelFilter {
|
||||
match self.verbose {
|
||||
0 => LevelFilter::Error,
|
||||
1 => LevelFilter::Warn,
|
||||
2 => LevelFilter::Info,
|
||||
3 => LevelFilter::Debug,
|
||||
_ => LevelFilter::Trace,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn file_names(&self) -> Vec<&str> {
|
||||
self.files.iter().map(FileTrait::get_name).collect()
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn get_hash(&self) -> String {
|
||||
let file_refs = {
|
||||
let mut refs: Vec<_> = self.files.iter().collect();
|
||||
refs.sort_unstable();
|
||||
let mut hasher = Blake2b::new().hash_length(16).to_state();
|
||||
|
||||
refs
|
||||
};
|
||||
hasher.update(self.get_uri().as_ref());
|
||||
hasher.update(self.alias.as_ref().as_bytes());
|
||||
|
||||
let mut hasher = DefaultHasher::new();
|
||||
(self.get_alias(), file_refs).hash(&mut hasher);
|
||||
for chk in sorted(&self.files) {
|
||||
hasher.update(chk.as_ref());
|
||||
}
|
||||
|
||||
format!("{:x}", hasher.finish())
|
||||
BASE64URL.encode(hasher.finalize())
|
||||
}
|
||||
}
|
||||
|
|
|
|||
203
src/error.rs
Normal file
203
src/error.rs
Normal file
|
|
@ -0,0 +1,203 @@
|
|||
use std::fmt;
|
||||
|
||||
use crate::sharry;
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum Parameter {
|
||||
#[error("given URI {0:?}")]
|
||||
Uri(sharry::Uri),
|
||||
|
||||
#[error("given Alias ID {0:?}")]
|
||||
AliasID(sharry::AliasID),
|
||||
|
||||
#[error("stored Share ID {0:?}")]
|
||||
ShareID(sharry::ShareID),
|
||||
|
||||
#[error("stored {0:?}")]
|
||||
FileID(sharry::FileID),
|
||||
}
|
||||
|
||||
// a helper to generate all the `From<T> for Parameter` impls
|
||||
macro_rules! impl_param_from {
|
||||
// $typ: the source type; $var: the enum‐variant name
|
||||
( $( $typ:path => $var:ident ),* $(,)? ) => {
|
||||
$(
|
||||
impl From<$typ> for Parameter {
|
||||
fn from(value: $typ) -> Self {
|
||||
Self::$var(value)
|
||||
}
|
||||
}
|
||||
)*
|
||||
};
|
||||
}
|
||||
|
||||
impl_param_from! {
|
||||
sharry::Uri => Uri,
|
||||
sharry::AliasID => AliasID,
|
||||
sharry::ShareID => ShareID,
|
||||
sharry::FileID => FileID,
|
||||
}
|
||||
|
||||
impl Parameter {
|
||||
fn is_fatal(&self) -> bool {
|
||||
self.is_uri() || self.is_alias_id()
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn is_uri(&self) -> bool {
|
||||
matches!(self, Self::Uri(_))
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn is_alias_id(&self) -> bool {
|
||||
matches!(self, Self::AliasID(_))
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn is_share_id(&self) -> bool {
|
||||
matches!(self, Self::ShareID(_))
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn is_file_id(&self) -> bool {
|
||||
matches!(self, Self::FileID(_))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum Error {
|
||||
#[error(transparent)]
|
||||
StdIo(#[from] std::io::Error),
|
||||
|
||||
#[error("Response error: {0}")]
|
||||
Response(String),
|
||||
|
||||
#[error("Invalid {0}")]
|
||||
InvalidParameter(Parameter),
|
||||
|
||||
#[error("Mismatch, expected {expected:?} but got {actual:?}")]
|
||||
Mismatch { expected: String, actual: String },
|
||||
|
||||
#[error("Unknown error: {0}")]
|
||||
Unknown(String),
|
||||
}
|
||||
|
||||
pub type Result<T> = std::result::Result<T, Error>;
|
||||
|
||||
// a helper to generate all the `From<T> for Error` impls
|
||||
macro_rules! impl_error_from {
|
||||
// $typ: the source type
|
||||
( $( $typ:path ),* $(,)? ) => {
|
||||
$(
|
||||
// // implement for values
|
||||
// impl From<$typ> for Error {
|
||||
// fn from(value: $typ) -> Self {
|
||||
// Self::InvalidParameter(value.into())
|
||||
// }
|
||||
// }
|
||||
|
||||
// implement for references
|
||||
impl From<&$typ> for Error {
|
||||
fn from(value: &$typ) -> Self {
|
||||
Self::InvalidParameter(value.clone().into())
|
||||
}
|
||||
}
|
||||
)*
|
||||
};
|
||||
}
|
||||
|
||||
impl_error_from! {
|
||||
sharry::Uri,
|
||||
sharry::AliasID,
|
||||
sharry::ShareID,
|
||||
sharry::FileID,
|
||||
}
|
||||
|
||||
#[allow(clippy::needless_pass_by_value)]
|
||||
fn into_string(val: impl ToString) -> String {
|
||||
val.to_string()
|
||||
}
|
||||
|
||||
impl Error {
|
||||
pub fn res_status_check<T>(actual: T, expected: T) -> Result<()>
|
||||
where
|
||||
T: PartialEq + fmt::Display + Copy,
|
||||
{
|
||||
if actual == expected {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(Self::Response(format!(
|
||||
"unexpected status: {actual} (expected {expected})"
|
||||
)))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn response(e: impl ToString) -> Self {
|
||||
Self::Response(into_string(e))
|
||||
}
|
||||
|
||||
pub fn mismatch<T>(expected: impl ToString, actual: impl ToString) -> Result<T> {
|
||||
Err(Self::Mismatch {
|
||||
expected: into_string(expected),
|
||||
actual: into_string(actual),
|
||||
})
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn is_stdio_kind(&self, kind: std::io::ErrorKind) -> bool {
|
||||
if let Self::StdIo(e) = self {
|
||||
e.kind() == kind
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_mismatch<E, A>(&self, want_expected: E, want_actual: A) -> bool
|
||||
where
|
||||
E: AsRef<str>,
|
||||
A: AsRef<str>,
|
||||
{
|
||||
if let Self::Mismatch { expected, actual } = self {
|
||||
expected == want_expected.as_ref() && actual == want_actual.as_ref()
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn response_contains(&self, pat: &str) -> bool {
|
||||
if let Self::Response(r) = self {
|
||||
r.contains(pat)
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn get_invalid_param(&self) -> Option<&Parameter> {
|
||||
if let Self::InvalidParameter(p) = self {
|
||||
Some(p)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn is_fatal(&self) -> bool {
|
||||
match self {
|
||||
Self::InvalidParameter(p) => p.is_fatal(),
|
||||
Self::Mismatch { .. } | Self::Unknown(_) => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! error_response {
|
||||
// Match a format string plus optional arguments
|
||||
($fmt:expr $(, $arg:expr )* $(,)?) => {
|
||||
// Expand to constructing the Error::Response variant,
|
||||
// wrapping a `format!(...)` call
|
||||
Error::Response(format!($fmt $(, $arg )*))
|
||||
};
|
||||
}
|
||||
240
src/file/checked.rs
Normal file
240
src/file/checked.rs
Normal file
|
|
@ -0,0 +1,240 @@
|
|||
use std::{
|
||||
fs, io,
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::sharry;
|
||||
|
||||
use super::{FileTrait, Uploading};
|
||||
|
||||
/// Description of an existing, regular file
|
||||
///
|
||||
/// - impl `Clone` for `clap` compatibility
|
||||
/// - impl `serde` for cachefile handling
|
||||
/// - impl `PartialEq..Ord` to handle multiple files given
|
||||
/// - impl `AsRef<[u8]>` for hashing with `blake2b_simd`
|
||||
#[allow(clippy::unsafe_derive_deserialize)]
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, PartialOrd, Ord)]
|
||||
pub struct Checked {
|
||||
/// canonical path to a regular file
|
||||
path: PathBuf,
|
||||
/// size of that file
|
||||
size: u64,
|
||||
/// hash of that file
|
||||
hash: Option<String>,
|
||||
}
|
||||
|
||||
impl AsRef<[u8]> for Checked {
|
||||
fn as_ref(&self) -> &[u8] {
|
||||
self.path.as_os_str().as_encoded_bytes()
|
||||
}
|
||||
}
|
||||
|
||||
impl Checked {
|
||||
/// create this directly, without any checks
|
||||
pub(super) unsafe fn new_unchecked(path: PathBuf, size: u64, hash: Option<String>) -> Self {
|
||||
Self { path, size, hash }
|
||||
}
|
||||
|
||||
/// create a new checked file from some path reference
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// - from `fs::metadata(path)` or `fs::canonicalize`
|
||||
/// - given path does not correspond to a regular file
|
||||
pub fn new(value: impl AsRef<Path>) -> io::Result<Self> {
|
||||
let meta = fs::metadata(&value)?;
|
||||
if meta.is_file() {
|
||||
Ok(Self {
|
||||
path: fs::canonicalize(&value)?,
|
||||
size: meta.len(),
|
||||
hash: None,
|
||||
})
|
||||
} else {
|
||||
Err(io::Error::new(
|
||||
io::ErrorKind::InvalidInput,
|
||||
"Not a regular file",
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
/// calculate and store hash for this file
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// - from `file::compute_hash`
|
||||
/// - Mismatch if file already hashed
|
||||
///
|
||||
/// TODO this could use an error variant like `IllegalInvocation`
|
||||
pub fn hash(&mut self, on_progress: impl FnMut(u64)) -> crate::Result<()> {
|
||||
if self.hash.is_some() {
|
||||
return crate::Error::mismatch("unhashed file", self.path.display());
|
||||
}
|
||||
|
||||
self.hash = Some(super::compute_hash(&self.path, self.size, on_progress)?);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// start uploading this file
|
||||
///
|
||||
/// - try to create a new file using the client
|
||||
/// - consume `self` into a `file::Uploading` struct
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// - from `sharry::Client::file_create`
|
||||
pub fn start_upload(
|
||||
self,
|
||||
client: &impl sharry::Client,
|
||||
uri: &sharry::Uri,
|
||||
alias_id: &sharry::AliasID,
|
||||
share_id: &sharry::ShareID,
|
||||
) -> crate::Result<Uploading> {
|
||||
let file_id = client.file_create(uri, alias_id, share_id, &self)?;
|
||||
|
||||
Ok(unsafe { Uploading::new_unchecked(self.path, self.size, self.hash, file_id) })
|
||||
}
|
||||
}
|
||||
|
||||
impl FileTrait for Checked {
|
||||
fn get_name(&self) -> &str {
|
||||
<Self as FileTrait>::extract_file_name(&self.path)
|
||||
}
|
||||
|
||||
fn get_size(&self) -> u64 {
|
||||
self.size
|
||||
}
|
||||
|
||||
fn check_hash(&self, on_progress: impl FnMut(u64)) -> crate::Result<()> {
|
||||
super::check_hash(&self.path, self.size, self.hash.as_deref(), on_progress)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use tempfile::{NamedTempFile, TempDir};
|
||||
|
||||
use crate::{
|
||||
sharry::{Client, json::NewShareRequest},
|
||||
test_util::{
|
||||
MockClient, check_trait, create_file,
|
||||
data::{HASHES_STD_GOOD, cases, data},
|
||||
},
|
||||
};
|
||||
|
||||
use super::*;
|
||||
|
||||
fn create_checked(content: &[u8]) -> (Checked, NamedTempFile) {
|
||||
let file = create_file(content);
|
||||
let chk = Checked::new(file.path()).unwrap();
|
||||
|
||||
// return both, so the `NamedTempFile` is not auto-deleted here
|
||||
(chk, file)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn new_on_existing_file_works() {
|
||||
for (content, size) in cases() {
|
||||
let (chk, file) = create_checked(content);
|
||||
let path = file.path().canonicalize().unwrap();
|
||||
|
||||
assert_eq!(chk.path, path);
|
||||
assert_eq!(chk.size, size);
|
||||
assert!(chk.hash.is_none());
|
||||
|
||||
// `FileTrait`
|
||||
assert_eq!(chk.get_name(), file.path().file_name().unwrap());
|
||||
assert_eq!(chk.get_size(), size);
|
||||
|
||||
check_trait(
|
||||
chk.as_ref(),
|
||||
path.as_os_str().as_encoded_bytes(),
|
||||
"AsRef<u8>",
|
||||
"Checked",
|
||||
);
|
||||
|
||||
// new_unchecked
|
||||
let chk = unsafe { Checked::new_unchecked(chk.path, chk.size, chk.hash) };
|
||||
assert_eq!(chk.path, path);
|
||||
assert_eq!(chk.size, size);
|
||||
assert!(chk.hash.is_none());
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn new_on_dir_errors() {
|
||||
let tempdir = TempDir::new().unwrap();
|
||||
let fs_root = PathBuf::from("/");
|
||||
|
||||
let dirs = [tempdir.path(), fs_root.as_path()];
|
||||
|
||||
for p in dirs {
|
||||
let err = Checked::new(p).unwrap_err();
|
||||
assert_eq!(err.kind(), io::ErrorKind::InvalidInput);
|
||||
#[cfg(target_os = "linux")]
|
||||
assert_eq!(err.to_string(), "Not a regular file");
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn new_on_nex_errors() {
|
||||
let tempdir = TempDir::new().unwrap();
|
||||
let nex_paths = [0, 1, 2, 3, 4].map(|i| tempdir.path().join(format!("nex_{i}.ext")));
|
||||
|
||||
for p in nex_paths {
|
||||
let err = Checked::new(p).unwrap_err();
|
||||
assert_eq!(err.kind(), io::ErrorKind::NotFound);
|
||||
#[cfg(target_os = "linux")]
|
||||
assert_eq!(err.to_string(), "No such file or directory (os error 2)");
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hashing_works() {
|
||||
for (content, hash) in data().zip(HASHES_STD_GOOD) {
|
||||
let (mut chk, _file) = create_checked(content);
|
||||
|
||||
chk.hash(drop).unwrap();
|
||||
// `FileTrait`
|
||||
chk.check_hash(drop).unwrap();
|
||||
|
||||
assert_eq!(chk.hash, Some(hash.to_string()));
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hashing_again_errors() {
|
||||
for content in data() {
|
||||
let (mut chk, _file) = create_checked(content);
|
||||
|
||||
// fake hash
|
||||
chk.hash = Some(String::default());
|
||||
let err = chk.hash(drop).unwrap_err();
|
||||
|
||||
assert!(err.is_mismatch("unhashed file", chk.path.display().to_string()));
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn start_upload_works() {
|
||||
let client = MockClient::default();
|
||||
let uri = true.into();
|
||||
let alias_id = true.into();
|
||||
|
||||
let share_id = client
|
||||
.share_create(&uri, &alias_id, NewShareRequest::new("share", 0))
|
||||
.unwrap();
|
||||
|
||||
for content in data() {
|
||||
let (chk, _file) = create_checked(content);
|
||||
|
||||
assert!(
|
||||
chk.start_upload(&client, &uri, &alias_id, &share_id)
|
||||
.is_ok()
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
126
src/file/chunk.rs
Normal file
126
src/file/chunk.rs
Normal file
|
|
@ -0,0 +1,126 @@
|
|||
use std::{any, fmt};
|
||||
|
||||
use crate::sharry;
|
||||
|
||||
/// Chunk of binary data belonging to a currently uploading file
|
||||
pub struct Chunk<'t> {
|
||||
/// id of the associated file
|
||||
file_id: sharry::FileID,
|
||||
/// offset of this chunk in bytes
|
||||
offset: u64,
|
||||
/// data inside this chunk
|
||||
data: &'t [u8],
|
||||
}
|
||||
|
||||
impl fmt::Debug for Chunk<'_> {
|
||||
// chunks are 1 MiB or more, we shouldn't print that
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.debug_struct("Chunk")
|
||||
.field("file_id", &self.file_id)
|
||||
.field("offset", &self.offset)
|
||||
.field("data.len()", &self.data.len())
|
||||
.finish_non_exhaustive()
|
||||
}
|
||||
}
|
||||
|
||||
/// convert usize into other type
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// - if the given value does not fit into the target type
|
||||
fn from_usize_or_panic<I>(value: usize) -> I
|
||||
where
|
||||
I: TryFrom<usize>,
|
||||
I::Error: std::error::Error,
|
||||
{
|
||||
value.try_into().unwrap_or_else(|e| {
|
||||
let target_type = any::type_name::<I>();
|
||||
panic!("usize={value:?} did not fit into {target_type:?}: {e}")
|
||||
})
|
||||
}
|
||||
|
||||
impl<'t> Chunk<'t> {
|
||||
/// create this directly, without any checks
|
||||
pub(super) unsafe fn new_unchecked(
|
||||
file_id: sharry::FileID,
|
||||
offset: u64,
|
||||
data: &'t [u8],
|
||||
) -> Self {
|
||||
Self {
|
||||
file_id,
|
||||
offset,
|
||||
data,
|
||||
}
|
||||
}
|
||||
|
||||
/// get a reference to the associated `file_id`
|
||||
pub fn get_file_id(&self) -> &sharry::FileID {
|
||||
&self.file_id
|
||||
}
|
||||
|
||||
/// get the chunk's offset
|
||||
pub fn get_offset(&self) -> u64 {
|
||||
self.offset
|
||||
}
|
||||
|
||||
/// get a reference to the associated data
|
||||
pub fn get_data(&self) -> &[u8] {
|
||||
self.data
|
||||
}
|
||||
|
||||
/// get the chunk's length
|
||||
pub fn get_length(&self) -> u64 {
|
||||
// BOOKMARK this might **panic** on (hypothetical) platforms where `usize` has more than 64 bit.
|
||||
// Also, you've allocated more than 2 EiB ... in ONE chunk.
|
||||
// Whoa! Maybe just chill?
|
||||
from_usize_or_panic(self.data.len())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::test_util::{
|
||||
check_trait,
|
||||
data::{DATA_LENGTHS_BAD, cases_with},
|
||||
};
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn basic_tests() {
|
||||
// items from `DATA_LENGTHS_BAD` used as mock offsets
|
||||
for (data, len, mock_offset) in cases_with(DATA_LENGTHS_BAD) {
|
||||
let fid = sharry::FileID::default();
|
||||
let chunk = unsafe { Chunk::new_unchecked(fid, mock_offset, data) };
|
||||
|
||||
let repr_expect = format!(
|
||||
"Chunk {{ file_id: {:?}, offset: {:?}, data.len(): {:?}, .. }}",
|
||||
chunk.file_id,
|
||||
chunk.offset,
|
||||
chunk.data.len()
|
||||
);
|
||||
check_trait(format!("{chunk:?}"), repr_expect, "Debug", "Chunk");
|
||||
|
||||
assert_eq!(
|
||||
chunk.get_file_id().to_string(),
|
||||
sharry::FileID::default().to_string()
|
||||
);
|
||||
assert_eq!(chunk.get_offset(), mock_offset);
|
||||
assert_eq!(chunk.get_data(), data);
|
||||
assert_eq!(chunk.get_length(), len);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic = "did not fit into \"u32\""]
|
||||
fn test_usize_overflow_panics() {
|
||||
// works
|
||||
assert_eq!(from_usize_or_panic::<u64>(usize::MAX), u64::MAX);
|
||||
assert_eq!(from_usize_or_panic::<u32>(u32::MAX as usize), u32::MAX);
|
||||
assert_eq!(from_usize_or_panic::<u16>(u16::MAX as usize), u16::MAX);
|
||||
assert_eq!(from_usize_or_panic::<u8>(u8::MAX as usize), u8::MAX);
|
||||
|
||||
// panics
|
||||
from_usize_or_panic::<u32>(usize::MAX);
|
||||
}
|
||||
}
|
||||
214
src/file/mod.rs
Normal file
214
src/file/mod.rs
Normal file
|
|
@ -0,0 +1,214 @@
|
|||
mod checked;
|
||||
mod chunk;
|
||||
mod uploading;
|
||||
|
||||
use std::{ffi::OsStr, fs, io::Read, path::Path};
|
||||
|
||||
use base64::prelude::{BASE64_STANDARD_NO_PAD as BASE64, Engine};
|
||||
use blake2b_simd::Params as Blake2b;
|
||||
|
||||
pub use checked::Checked;
|
||||
pub use chunk::Chunk;
|
||||
use log::{debug, warn};
|
||||
pub use uploading::Uploading;
|
||||
|
||||
/// how many bytes to hash at once (default: 4 MiB)
|
||||
///
|
||||
/// size must never exceed 2 EiB
|
||||
const HASH_CHUNK_SIZE: usize = 4 * 1024 * 1024;
|
||||
|
||||
/// compute hash for a file given its path.
|
||||
/// Hash function: `BLAKE2b`, 512 bit
|
||||
///
|
||||
/// # Params
|
||||
///
|
||||
/// - `path` to the file to hash
|
||||
/// - `size` of that file
|
||||
/// - `on_progress` will be called for each processed chunk (max. `HASH_CHUNK_SIZE`)
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// - from `fs::File::{open, read}`
|
||||
/// - Mismatch if given `size` does not match the file's size
|
||||
fn compute_hash(path: &Path, size: u64, mut on_progress: impl FnMut(u64)) -> crate::Result<String> {
|
||||
let mut file = fs::File::open(path)?;
|
||||
|
||||
// Blake2b-512 hasher (64 * 8 bit)
|
||||
let mut hasher = Blake2b::new().hash_length(64).to_state();
|
||||
|
||||
// buffer
|
||||
let mut buffer = vec![0; HASH_CHUNK_SIZE];
|
||||
let mut bytes_read = 0;
|
||||
|
||||
loop {
|
||||
let n = file.read(&mut buffer)?;
|
||||
if n == 0 {
|
||||
break;
|
||||
}
|
||||
hasher.update(&buffer[..n]);
|
||||
|
||||
bytes_read += n as u64;
|
||||
on_progress(n as u64);
|
||||
}
|
||||
|
||||
if bytes_read != size {
|
||||
return crate::Error::mismatch(size, bytes_read);
|
||||
}
|
||||
|
||||
let result = BASE64.encode(hasher.finalize());
|
||||
debug!("hashed {:?}: {result:?}", path.display());
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
/// check hash for a file given its path, return Ok(()) on success
|
||||
///
|
||||
/// # Params
|
||||
///
|
||||
/// - everything from `compute_hash`
|
||||
/// - optionally, known `hash`
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// - from `file::compute_hash`
|
||||
/// - Mismatch if `hash` is `None`
|
||||
/// - Mismatch if given `hash` does not match the computed hash
|
||||
fn check_hash(
|
||||
path: &Path,
|
||||
size: u64,
|
||||
hash: Option<&str>,
|
||||
on_progress: impl FnMut(u64),
|
||||
) -> crate::Result<()> {
|
||||
// check if hash is None
|
||||
let Some(expected) = hash else {
|
||||
return crate::Error::mismatch("hash", path.display());
|
||||
};
|
||||
|
||||
// compute and check new hash
|
||||
let actual = &compute_hash(path, size, on_progress)?;
|
||||
|
||||
if expected == actual {
|
||||
debug!("hash matches {expected:?}");
|
||||
Ok(())
|
||||
} else {
|
||||
warn!("hash mismatch for file {:?}", path.display());
|
||||
crate::Error::mismatch(expected, actual)
|
||||
}
|
||||
}
|
||||
|
||||
pub trait FileTrait {
|
||||
/// extract the filename part of a `Path` reference
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// Expects `path::Path::file_name` and `ffi::OsStr::to_str` to succeed on the given path
|
||||
fn extract_file_name(p: &Path) -> &str {
|
||||
p.file_name()
|
||||
.and_then(OsStr::to_str)
|
||||
.expect("bad file name")
|
||||
}
|
||||
|
||||
/// get a reference to the file's name
|
||||
///
|
||||
/// Uses `file::FileTrait::extract_file_name`, which may **panic**!
|
||||
fn get_name(&self) -> &str;
|
||||
|
||||
/// get the file's size
|
||||
fn get_size(&self) -> u64;
|
||||
|
||||
/// check this file's hash, return Ok(()) on success
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// - from `file::check_hash`
|
||||
fn check_hash(&self, on_progress: impl FnMut(u64)) -> crate::Result<()>;
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use tempfile::TempDir;
|
||||
|
||||
use crate::test_util::{
|
||||
create_file,
|
||||
data::{DATA_LENGTHS_BAD, HASHES_STD_BAD, HASHES_STD_GOOD, cases, cases_with},
|
||||
};
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn compute_hash_as_expected() {
|
||||
for (content, size, expected_hash) in cases_with(HASHES_STD_GOOD) {
|
||||
let file = create_file(content);
|
||||
|
||||
// to capture progress updates from `compute_hash`
|
||||
let mut read_total = 0;
|
||||
let callback = |n| read_total += n;
|
||||
let hash = compute_hash(file.path(), size, callback).unwrap();
|
||||
|
||||
assert_eq!(hash, expected_hash);
|
||||
assert_eq!(read_total, size);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn compute_hash_nonexistent_file() {
|
||||
let nex_path = {
|
||||
// this is deleted at the end of this block, so will stop to exist
|
||||
let nex_file = create_file(&[]);
|
||||
nex_file.path().to_owned()
|
||||
};
|
||||
|
||||
let err = compute_hash(&nex_path, 0, drop).unwrap_err();
|
||||
assert!(err.is_stdio_kind(std::io::ErrorKind::NotFound));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn compute_hash_directory() {
|
||||
let dir = TempDir::new().unwrap();
|
||||
|
||||
let err = compute_hash(dir.path(), 0, drop).unwrap_err();
|
||||
assert!(err.is_stdio_kind(std::io::ErrorKind::IsADirectory));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hash_size_mismatch() {
|
||||
for (content, good_size, bad_size) in cases_with(DATA_LENGTHS_BAD) {
|
||||
let file = create_file(content);
|
||||
|
||||
{
|
||||
// `compute_hash` with bad size
|
||||
let err = compute_hash(file.path(), bad_size, drop).unwrap_err();
|
||||
assert!(err.is_mismatch(bad_size.to_string(), good_size.to_string()));
|
||||
}
|
||||
|
||||
{
|
||||
// `check_hash` with bad size
|
||||
let err = check_hash(file.path(), bad_size, Some("foobar"), drop).unwrap_err();
|
||||
assert!(err.is_mismatch(bad_size.to_string(), good_size.to_string()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hash_value_none() {
|
||||
for (content, size) in cases() {
|
||||
let file = create_file(content);
|
||||
|
||||
// `check_hash` with no hash
|
||||
let err = check_hash(file.path(), size, None, drop).unwrap_err();
|
||||
assert!(err.is_mismatch("hash", file.path().display().to_string()));
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hash_value_mismatch() {
|
||||
for ((content, size, good_hash), bad_hash) in
|
||||
cases_with(HASHES_STD_GOOD).zip(HASHES_STD_BAD)
|
||||
{
|
||||
let file = create_file(content);
|
||||
|
||||
// `check_hash` with bad hash
|
||||
let err = check_hash(file.path(), size, Some(bad_hash), drop).unwrap_err();
|
||||
assert!(err.is_mismatch(bad_hash, good_hash));
|
||||
}
|
||||
}
|
||||
}
|
||||
340
src/file/uploading.rs
Normal file
340
src/file/uploading.rs
Normal file
|
|
@ -0,0 +1,340 @@
|
|||
use std::{
|
||||
fs,
|
||||
io::{self, Read, Seek, SeekFrom},
|
||||
path::PathBuf,
|
||||
};
|
||||
|
||||
use log::warn;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::sharry;
|
||||
|
||||
use super::{Checked, Chunk, FileTrait};
|
||||
|
||||
/// Description of a `file::Checked` that is actively being uploaded
|
||||
///
|
||||
/// - impl `serde` for cachefile handling
|
||||
#[allow(clippy::unsafe_derive_deserialize)]
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
pub struct Uploading {
|
||||
/// canonical path to a regular file
|
||||
path: PathBuf,
|
||||
/// size of that file
|
||||
size: u64,
|
||||
/// hash of that file
|
||||
hash: Option<String>,
|
||||
/// file ID in a Sharry share
|
||||
file_id: sharry::FileID,
|
||||
/// previous offset, if applicable
|
||||
#[serde(skip)]
|
||||
previous_offset: Option<u64>,
|
||||
/// current reading offset
|
||||
offset: u64,
|
||||
}
|
||||
|
||||
impl Uploading {
|
||||
/// create this directly, without any checks
|
||||
pub(super) unsafe fn new_unchecked(
|
||||
path: PathBuf,
|
||||
size: u64,
|
||||
hash: Option<String>,
|
||||
file_id: sharry::FileID,
|
||||
) -> Self {
|
||||
Self {
|
||||
path,
|
||||
size,
|
||||
hash,
|
||||
file_id,
|
||||
previous_offset: None,
|
||||
offset: 0,
|
||||
}
|
||||
}
|
||||
|
||||
/// get the current reading offset
|
||||
pub fn get_offset(&self) -> u64 {
|
||||
self.offset
|
||||
}
|
||||
|
||||
/// rewind to the previously read chunk
|
||||
///
|
||||
/// - consume self, returning Some(self) on success
|
||||
///
|
||||
/// TODO this should take &mut self and return `crate::Result<()>`
|
||||
/// TODO this could use an error variant like `IllegalInvocation`
|
||||
pub fn rewind(mut self) -> Option<Self> {
|
||||
if let Some(offset) = self.previous_offset.take() {
|
||||
self.offset = offset;
|
||||
|
||||
Some(self)
|
||||
} else {
|
||||
warn!("attempted to rewind with no `previous_offset`");
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// read the next chunk
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// - from `fs::File::{open, seek, read}`
|
||||
/// - `UnexpectedEof` if `read` returned no bytes
|
||||
///
|
||||
/// TODO this should return `crate::Result<Chunk<'t>>`
|
||||
/// TODO this could use an error variant like `IllegalInvocation`
|
||||
pub fn read<'t>(&mut self, buf: &'t mut [u8]) -> io::Result<Chunk<'t>> {
|
||||
let mut f = fs::File::open(&self.path)?;
|
||||
|
||||
f.seek(SeekFrom::Start(self.offset))?;
|
||||
let read_len = f.read(buf)?;
|
||||
|
||||
if read_len == 0 {
|
||||
return Err(io::Error::new(
|
||||
io::ErrorKind::UnexpectedEof,
|
||||
format!("could not read from file {:?}", self.path.display()),
|
||||
));
|
||||
}
|
||||
|
||||
let chunk =
|
||||
unsafe { Chunk::new_unchecked(self.file_id.clone(), self.offset, &buf[..read_len]) };
|
||||
self.previous_offset = Some(self.offset);
|
||||
self.offset += chunk.get_length();
|
||||
|
||||
Ok(chunk)
|
||||
}
|
||||
|
||||
/// check if this file has been completely read
|
||||
///
|
||||
/// - consume self, returning Ok(self) if EOF not reached, Err(PathBuf) otherwise
|
||||
///
|
||||
/// TODO factor this into `read` and something more explicit like `finish(self) -> PathBuf`
|
||||
pub fn check_eof(self) -> Result<Self, PathBuf> {
|
||||
if self.offset < self.size {
|
||||
Ok(self)
|
||||
} else {
|
||||
Err(self.path)
|
||||
}
|
||||
}
|
||||
|
||||
/// stop uploading this file
|
||||
///
|
||||
/// - consume self, returning as a `file::Checked`
|
||||
pub fn stop(self) -> Checked {
|
||||
unsafe { Checked::new_unchecked(self.path, self.size, self.hash) }
|
||||
}
|
||||
}
|
||||
|
||||
impl FileTrait for Uploading {
|
||||
fn get_name(&self) -> &str {
|
||||
<Self as FileTrait>::extract_file_name(&self.path)
|
||||
}
|
||||
|
||||
fn get_size(&self) -> u64 {
|
||||
self.size
|
||||
}
|
||||
|
||||
fn check_hash(&self, on_progress: impl FnMut(u64)) -> crate::Result<()> {
|
||||
super::check_hash(&self.path, self.size, self.hash.as_deref(), on_progress)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use tempfile::NamedTempFile;
|
||||
|
||||
use crate::{
|
||||
sharry::{Client, json::NewShareRequest},
|
||||
test_util::{
|
||||
MockClient, create_file,
|
||||
data::{DATA_LENGTHS_BAD, cases, cases_with, data},
|
||||
},
|
||||
};
|
||||
|
||||
use super::*;
|
||||
|
||||
fn create_uploading(content: &[u8]) -> (Uploading, sharry::ShareID, NamedTempFile) {
|
||||
let client = MockClient::default();
|
||||
let uri = true.into();
|
||||
let alias_id = true.into();
|
||||
|
||||
let share_id = client
|
||||
.share_create(&uri, &alias_id, NewShareRequest::new("share", 0))
|
||||
.expect("");
|
||||
|
||||
let file = create_file(content);
|
||||
let upl = Checked::new(file.path())
|
||||
.unwrap()
|
||||
.start_upload(&client, &uri, &alias_id, &share_id)
|
||||
.unwrap();
|
||||
|
||||
// return all, so the `NamedTempFile` is not auto-deleted here
|
||||
(upl, share_id, file)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn basic_tests() {
|
||||
fn check_members(upl: &Uploading, path: &PathBuf, size: u64) {
|
||||
assert_eq!(upl.path, *path);
|
||||
assert_eq!(upl.size, size);
|
||||
assert_eq!(upl.offset, 0);
|
||||
assert!(upl.previous_offset.is_none());
|
||||
assert!(upl.hash.is_none());
|
||||
}
|
||||
|
||||
for (content, size) in cases() {
|
||||
let (upl, _share_id, file) = create_uploading(content);
|
||||
let path = file.path().canonicalize().unwrap();
|
||||
check_members(&upl, &path, size);
|
||||
|
||||
// `get_offset`
|
||||
assert_eq!(upl.get_offset(), upl.offset);
|
||||
|
||||
// `FileTrait`
|
||||
assert_eq!(upl.get_name(), file.path().file_name().unwrap());
|
||||
assert_eq!(upl.get_size(), size);
|
||||
assert!(upl.check_hash(drop).is_err());
|
||||
|
||||
// `new_unchecked`
|
||||
let upl =
|
||||
unsafe { Uploading::new_unchecked(upl.path, upl.size, upl.hash, upl.file_id) };
|
||||
check_members(&upl, &path, size);
|
||||
|
||||
// `stop`
|
||||
let chk = upl.stop();
|
||||
assert_eq!(chk.get_name(), path.file_name().unwrap().to_str().unwrap());
|
||||
assert_eq!(chk.get_size(), size);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn check_eof_works() {
|
||||
for (content, size) in cases() {
|
||||
let (upl, _share_id, file) = create_uploading(content);
|
||||
|
||||
let path = file.path().canonicalize().unwrap();
|
||||
let eof = upl.check_eof();
|
||||
|
||||
if size > 0 {
|
||||
assert!(eof.is_ok());
|
||||
} else {
|
||||
assert!(eof.is_err());
|
||||
assert_eq!(eof.unwrap_err(), path);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn check_read_works() {
|
||||
for (content, size, extra) in cases_with(DATA_LENGTHS_BAD) {
|
||||
let (mut upl, _share_id, _file) = create_uploading(content);
|
||||
|
||||
// use oversized buffer
|
||||
let mut buf = vec![0; usize::try_from(size + extra).unwrap()];
|
||||
let chunk_res = upl.read(&mut buf);
|
||||
|
||||
if size > 0 {
|
||||
assert_eq!(upl.previous_offset, Some(0));
|
||||
assert_eq!(upl.offset, size);
|
||||
|
||||
let chunk = chunk_res.unwrap();
|
||||
assert_eq!(chunk.get_offset(), 0);
|
||||
assert_eq!(chunk.get_length(), size);
|
||||
assert_eq!(chunk.get_file_id().to_string(), upl.file_id.to_string());
|
||||
assert_eq!(chunk.get_data(), content);
|
||||
} else {
|
||||
assert!(chunk_res.is_err());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn check_rewind_works() {
|
||||
let data = cases()
|
||||
// ignore "empty" testcase
|
||||
.filter(|&(_, size)| size > 0)
|
||||
// remove "size"
|
||||
.map(|(content, _)| content);
|
||||
|
||||
for content in data {
|
||||
let (mut upl, _share_id, _file) = create_uploading(content);
|
||||
|
||||
// read 1 byte and rewind
|
||||
upl.read(&mut [0]).unwrap();
|
||||
upl = upl.rewind().unwrap();
|
||||
|
||||
// check: read has been rewound
|
||||
assert_eq!(upl.offset, 0);
|
||||
assert_eq!(upl.previous_offset, None);
|
||||
|
||||
// check: attempting to rewind again will destroy
|
||||
assert!(upl.rewind().is_none());
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn upload_works() {
|
||||
let client = MockClient::default();
|
||||
let uri = true.into();
|
||||
let alias_id = true.into();
|
||||
|
||||
let share_id = client
|
||||
.share_create(&uri, &alias_id, NewShareRequest::new("share", 0))
|
||||
.unwrap();
|
||||
|
||||
let mut buf = vec![0; 1024 * 1024];
|
||||
|
||||
for content in data() {
|
||||
let file = create_file(content);
|
||||
let mut upl = Checked::new(file.path())
|
||||
.unwrap()
|
||||
.start_upload(&client, &uri, &alias_id, &share_id)
|
||||
.unwrap();
|
||||
|
||||
while let Ok(chunk) = upl.read(&mut buf) {
|
||||
assert!(
|
||||
client
|
||||
.file_patch(&uri, &alias_id, &share_id, &chunk)
|
||||
.is_ok()
|
||||
);
|
||||
}
|
||||
|
||||
assert_eq!(upl.offset, upl.size);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn check_upload_bad_align() {
|
||||
let data = cases()
|
||||
// ignore "empty" testcase
|
||||
.filter(|&(_, size)| size >= 2)
|
||||
// remove "size"
|
||||
.map(|(content, _)| content);
|
||||
|
||||
let client = MockClient::default();
|
||||
let uri = true.into();
|
||||
let alias_id = true.into();
|
||||
|
||||
let share_id = client
|
||||
.share_create(&uri, &alias_id, NewShareRequest::new("share", 0))
|
||||
.unwrap();
|
||||
|
||||
// single-byte buffer to force bad chunk alignment
|
||||
let mut buf = [0];
|
||||
for content in data {
|
||||
let file = create_file(content);
|
||||
let mut upl = Checked::new(file.path())
|
||||
.unwrap()
|
||||
.start_upload(&client, &uri, &alias_id, &share_id)
|
||||
.unwrap();
|
||||
|
||||
// first chunk works
|
||||
client
|
||||
.file_patch(&uri, &alias_id, &share_id, &upl.read(&mut buf).unwrap())
|
||||
.unwrap();
|
||||
|
||||
// second chunk misaligned
|
||||
let res = client.file_patch(&uri, &alias_id, &share_id, &upl.read(&mut buf).unwrap());
|
||||
assert!(res.is_err());
|
||||
assert!(res.unwrap_err().response_contains("not aligned to a MiB"));
|
||||
}
|
||||
}
|
||||
}
|
||||
17
src/lib.rs
Normal file
17
src/lib.rs
Normal file
|
|
@ -0,0 +1,17 @@
|
|||
// TODO fix with documentation
|
||||
#![allow(clippy::missing_errors_doc)]
|
||||
#![allow(clippy::missing_panics_doc)]
|
||||
|
||||
mod appstate;
|
||||
mod cachefile;
|
||||
mod cli;
|
||||
mod error;
|
||||
mod file;
|
||||
pub mod output;
|
||||
mod sharry;
|
||||
mod test_util;
|
||||
mod ureq_client;
|
||||
|
||||
pub use appstate::AppState;
|
||||
pub use cli::Cli;
|
||||
pub use error::{Error, Parameter, Result};
|
||||
110
src/main.rs
110
src/main.rs
|
|
@ -1,110 +0,0 @@
|
|||
mod appstate;
|
||||
mod cli;
|
||||
mod sharry;
|
||||
|
||||
use std::{
|
||||
process::exit,
|
||||
sync::{
|
||||
Arc,
|
||||
atomic::{AtomicBool, Ordering},
|
||||
},
|
||||
};
|
||||
|
||||
use clap::Parser;
|
||||
use console::style;
|
||||
use dialoguer::{Confirm, theme::ColorfulTheme};
|
||||
use log::{error, info};
|
||||
use ureq::Agent;
|
||||
|
||||
use appstate::AppState;
|
||||
use cli::Cli;
|
||||
|
||||
fn main() {
|
||||
println!(
|
||||
"{} to {}!",
|
||||
style("Welcome").magenta().bold(),
|
||||
style("ShrUpl").yellow().bold(),
|
||||
);
|
||||
|
||||
let stop = Arc::new(AtomicBool::new(false));
|
||||
|
||||
let stop_ctrlc = stop.clone();
|
||||
ctrlc::set_handler(move || {
|
||||
stop_ctrlc.store(true, Ordering::SeqCst);
|
||||
info!("stopping as soon as possible ...");
|
||||
})
|
||||
.expect("Error setting Ctrl-C handler");
|
||||
|
||||
env_logger::init();
|
||||
|
||||
let args = Cli::parse();
|
||||
info!("args: {args:?}");
|
||||
info!("timeout: {:?}", args.get_timeout());
|
||||
|
||||
let agent: Agent = Agent::config_builder()
|
||||
.timeout_global(args.get_timeout())
|
||||
.build()
|
||||
.into();
|
||||
|
||||
let mut state = AppState::try_resume(&args)
|
||||
.and_then(|state| {
|
||||
Confirm::with_theme(&ColorfulTheme::default())
|
||||
.with_prompt("Continue previously stopped operation?")
|
||||
.default(true)
|
||||
.interact()
|
||||
.map_or(None, |b| b.then_some(state))
|
||||
})
|
||||
.unwrap_or_else(|| {
|
||||
stop.load(Ordering::SeqCst).then(|| exit(0));
|
||||
|
||||
match AppState::from_args(&args, &agent) {
|
||||
Ok(state) => {
|
||||
state.save().unwrap();
|
||||
state
|
||||
}
|
||||
Err(e) => {
|
||||
if let Some(cause) = match e {
|
||||
ureq::Error::StatusCode(403) => Some("Alias ID"),
|
||||
ureq::Error::Io(_) => Some("URL"),
|
||||
_ => None,
|
||||
} {
|
||||
info!("handling error: {e:?}");
|
||||
println!(
|
||||
"{} probably wrong: {} – {:?}",
|
||||
style("Error!").red().bold(),
|
||||
style(cause).cyan().italic(),
|
||||
style(e.to_string()).yellow()
|
||||
);
|
||||
} else {
|
||||
error!("unknown error: {e} – {e:?}");
|
||||
println!("{}", style("Unknown Error!").red().bold());
|
||||
}
|
||||
|
||||
exit(1);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
println!(
|
||||
"{} uploading: {}",
|
||||
style("ShrUpl").yellow().bold(),
|
||||
style(state.file_names().join(", ")).magenta(),
|
||||
);
|
||||
|
||||
info!("continuing with state: {state:?}");
|
||||
|
||||
loop {
|
||||
match state.upload_chunk(&agent, args.chunk_size * 1024 * 1024) {
|
||||
Err(e) => error!("error: {e:?}"),
|
||||
Ok(None) => {
|
||||
info!("all uploads done");
|
||||
state.clear().unwrap();
|
||||
break;
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
|
||||
state.save().unwrap();
|
||||
stop.load(Ordering::SeqCst).then(|| exit(0));
|
||||
}
|
||||
}
|
||||
104
src/output.rs
Normal file
104
src/output.rs
Normal file
|
|
@ -0,0 +1,104 @@
|
|||
use std::{fmt, process, sync::LazyLock};
|
||||
|
||||
use console::{StyledObject, style};
|
||||
use dialoguer::{Confirm, Select, theme::ColorfulTheme};
|
||||
use indicatif::{ProgressBar, ProgressStyle};
|
||||
use log::{info, warn};
|
||||
|
||||
type StaticStyled<'t> = LazyLock<StyledObject<&'t str>>;
|
||||
|
||||
pub static SHRUPL: StaticStyled = LazyLock::new(|| style("ShrUpl").yellow().bold());
|
||||
|
||||
#[must_use]
|
||||
pub fn prompt_continue() -> bool {
|
||||
let prompt = format!(
|
||||
"This operation has previously been stopped. {}",
|
||||
style("How to proceed?").cyan()
|
||||
);
|
||||
|
||||
let choices = [
|
||||
format!("Load and {}", style("continue operation").green().bold()),
|
||||
format!("Start a {}", style("new operation").cyan().bold()),
|
||||
format!("Quit {}", *SHRUPL),
|
||||
];
|
||||
|
||||
let selection = Select::with_theme(&ColorfulTheme::default())
|
||||
.with_prompt(prompt)
|
||||
.default(0)
|
||||
.items(&choices)
|
||||
.interact()
|
||||
.unwrap_or(2);
|
||||
|
||||
if selection == 2 {
|
||||
process::exit(0);
|
||||
}
|
||||
|
||||
selection == 0
|
||||
}
|
||||
|
||||
pub fn prompt_rebuild_share() {
|
||||
let prompt = format!(
|
||||
"Target Share cannot be accessed. {}",
|
||||
style("Completely restart upload?").cyan()
|
||||
);
|
||||
|
||||
let selection = Confirm::with_theme(&ColorfulTheme::default())
|
||||
.with_prompt(prompt)
|
||||
.default(true)
|
||||
.interact()
|
||||
.unwrap_or(false);
|
||||
|
||||
if !selection {
|
||||
process::exit(0);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn style_all<'t, F>(strs: &[&'t str], f: F) -> Vec<String>
|
||||
where
|
||||
F: Fn(StyledObject<&'t str>) -> StyledObject<&'t str>,
|
||||
{
|
||||
strs.iter().map(|&s| f(style(s)).to_string()).collect()
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn new_progressbar() -> ProgressBar {
|
||||
ProgressBar::no_length().with_style(
|
||||
ProgressStyle::with_template(&format!(
|
||||
concat!(
|
||||
"{{bar:50.cyan/blue}} {{msg:.magenta}}: ",
|
||||
"{{binary_bytes:.yellow}}{}{{binary_total_bytes:.yellow}} ",
|
||||
"({{eta}})",
|
||||
),
|
||||
style("/").magenta(),
|
||||
))
|
||||
.expect("invalid style template"),
|
||||
)
|
||||
}
|
||||
|
||||
pub enum Log {}
|
||||
|
||||
impl Log {
|
||||
fn eprintln(kind: impl fmt::Display, msg: impl fmt::Display) {
|
||||
eprintln!("{} {}: {}", *SHRUPL, kind, style(msg).cyan().italic(),);
|
||||
}
|
||||
|
||||
pub fn warning(msg: impl fmt::Display) {
|
||||
Self::eprintln(style("Warning").magenta().bold(), msg);
|
||||
}
|
||||
|
||||
pub fn error(msg: impl fmt::Display) -> ! {
|
||||
Self::eprintln(style("Error").red().bold(), msg);
|
||||
process::exit(1);
|
||||
}
|
||||
|
||||
pub fn handle(e: &crate::Error) {
|
||||
if e.is_fatal() {
|
||||
// react to fatal error
|
||||
warn!("fatal error: {e:?}");
|
||||
Self::error(e);
|
||||
}
|
||||
|
||||
// handle recoverable error
|
||||
info!("recoverable error: {e:?}");
|
||||
}
|
||||
}
|
||||
|
|
@ -1,36 +0,0 @@
|
|||
use std::fmt::{Debug, Display};
|
||||
|
||||
use log::debug;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use ureq::RequestBuilder;
|
||||
|
||||
use super::api::Uri;
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Hash)]
|
||||
pub struct Alias {
|
||||
pub(super) uri: Uri,
|
||||
pub(super) id: String,
|
||||
}
|
||||
|
||||
pub(super) trait SharryAlias {
|
||||
fn sharry_header(self, alias: &Alias) -> Self;
|
||||
}
|
||||
|
||||
impl<B> SharryAlias for RequestBuilder<B> {
|
||||
fn sharry_header(self, alias: &Alias) -> Self {
|
||||
self.header("Sharry-Alias", &alias.id)
|
||||
}
|
||||
}
|
||||
|
||||
impl Alias {
|
||||
pub fn new(uri: Uri, id: impl Into<String>) -> Self {
|
||||
Self { uri, id: id.into() }
|
||||
}
|
||||
|
||||
pub(super) fn get_endpoint(&self, endpoint: impl Display + Debug) -> String {
|
||||
let uri = format!("{}/{}", self.uri, endpoint);
|
||||
debug!("endpoint uri: {uri:?}");
|
||||
|
||||
uri
|
||||
}
|
||||
}
|
||||
|
|
@ -1,64 +0,0 @@
|
|||
use std::fmt::Display;
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Hash)]
|
||||
pub struct Uri {
|
||||
protocol: String,
|
||||
base_url: String,
|
||||
}
|
||||
|
||||
impl Uri {
|
||||
pub fn with_protocol(protocol: impl Into<String>, base_url: impl Into<String>) -> Self {
|
||||
Self {
|
||||
protocol: protocol.into(),
|
||||
base_url: base_url.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for Uri {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{}://{}/api/v2", self.protocol, self.base_url)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
#[allow(non_snake_case)]
|
||||
pub struct NewShareRequest {
|
||||
name: String,
|
||||
validity: u32,
|
||||
description: Option<String>,
|
||||
maxViews: u32,
|
||||
password: Option<String>,
|
||||
}
|
||||
|
||||
impl NewShareRequest {
|
||||
pub fn new(
|
||||
name: impl Into<String>,
|
||||
description: Option<impl Into<String>>,
|
||||
max_views: u32,
|
||||
) -> Self {
|
||||
Self {
|
||||
name: name.into(),
|
||||
validity: 0,
|
||||
description: description.map(Into::into),
|
||||
maxViews: max_views,
|
||||
password: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug)]
|
||||
pub(super) struct NewShareResponse {
|
||||
pub success: bool,
|
||||
pub message: String,
|
||||
pub id: String,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug)]
|
||||
#[allow(dead_code)]
|
||||
pub(super) struct NotifyShareResponse {
|
||||
pub success: bool,
|
||||
pub message: String,
|
||||
}
|
||||
|
|
@ -1,75 +0,0 @@
|
|||
use std::{
|
||||
ffi::OsStr,
|
||||
fs,
|
||||
io::{self, ErrorKind},
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
|
||||
use log::debug;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use ureq::http::StatusCode;
|
||||
|
||||
use super::{Alias, FileUploading, Share, SharryAlias};
|
||||
|
||||
#[derive(Debug, Clone, Hash, Serialize, Deserialize, PartialEq, Eq, PartialOrd, Ord)]
|
||||
pub struct FileChecked {
|
||||
path: PathBuf,
|
||||
}
|
||||
|
||||
impl FileChecked {
|
||||
pub fn new(value: impl AsRef<Path>) -> io::Result<Self> {
|
||||
let meta = fs::metadata(&value)?;
|
||||
if meta.is_file() {
|
||||
Ok(Self {
|
||||
path: fs::canonicalize(&value)?,
|
||||
})
|
||||
} else {
|
||||
Err(io::Error::new(
|
||||
ErrorKind::InvalidInput,
|
||||
"Not a regular file",
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn file_name(&self) -> &str {
|
||||
self.path.file_name().unwrap().to_str().unwrap()
|
||||
}
|
||||
|
||||
pub fn start_upload(
|
||||
self,
|
||||
http: &ureq::Agent,
|
||||
alias: &Alias,
|
||||
share: &Share,
|
||||
) -> io::Result<FileUploading> {
|
||||
let size = usize::try_from(fs::metadata(&self.path)?.len()).map_err(io::Error::other)?;
|
||||
|
||||
let res = {
|
||||
let endpoint = alias.get_endpoint(format!("alias/upload/{}/files/tus", share.id));
|
||||
|
||||
let name = (self.path.file_name().and_then(OsStr::to_str))
|
||||
.ok_or_else(|| io::Error::new(ErrorKind::NotFound, "bad file name"))?
|
||||
.to_string();
|
||||
|
||||
(http.post(endpoint))
|
||||
.sharry_header(alias)
|
||||
.header("Sharry-File-Name", &name)
|
||||
.header("Upload-Length", size)
|
||||
.send_empty()
|
||||
.map_err(io::Error::other)?
|
||||
};
|
||||
|
||||
if res.status() != StatusCode::CREATED {
|
||||
return Err(io::Error::other("unexpected response status"));
|
||||
}
|
||||
|
||||
let location = (res.headers().get("Location"))
|
||||
.ok_or_else(|| io::Error::other("Location header not found"))?
|
||||
.to_str()
|
||||
.map_err(|_| io::Error::other("Location header invalid"))?
|
||||
.to_string();
|
||||
|
||||
debug!("patch uri: {location}");
|
||||
|
||||
Ok(FileUploading::new(self.path, size, location))
|
||||
}
|
||||
}
|
||||
|
|
@ -1,7 +0,0 @@
|
|||
mod checked;
|
||||
mod uploading;
|
||||
|
||||
pub use checked::FileChecked;
|
||||
pub use uploading::{ChunkState, FileUploading, UploadError};
|
||||
|
||||
use super::{Alias, Share, alias::SharryAlias};
|
||||
|
|
@ -1,133 +0,0 @@
|
|||
use std::{
|
||||
fmt::{Debug, Display},
|
||||
fs::File,
|
||||
io::{self, Read, Seek, SeekFrom},
|
||||
path::PathBuf,
|
||||
};
|
||||
|
||||
use log::debug;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use ureq::http::{HeaderValue, StatusCode};
|
||||
|
||||
use super::{Alias, SharryAlias};
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
pub struct FileUploading {
|
||||
path: PathBuf,
|
||||
size: usize,
|
||||
uri: String,
|
||||
offset: usize,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum UploadError {
|
||||
FileIO(io::Error),
|
||||
Request,
|
||||
ResponseStatus,
|
||||
ResponseOffset,
|
||||
}
|
||||
|
||||
pub enum ChunkState {
|
||||
Ok(FileUploading),
|
||||
Err(FileUploading, UploadError),
|
||||
Finished(PathBuf),
|
||||
}
|
||||
|
||||
impl Display for FileUploading {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(
|
||||
f,
|
||||
"Uploading ({:?}, {}, {})",
|
||||
self.path.display(),
|
||||
self.size,
|
||||
self.offset
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl FileUploading {
|
||||
pub(super) fn new(path: PathBuf, size: usize, uri: String) -> Self {
|
||||
Self {
|
||||
path,
|
||||
size,
|
||||
uri,
|
||||
offset: 0,
|
||||
}
|
||||
}
|
||||
|
||||
fn read_chunk(&self, chunk_size: usize) -> io::Result<Vec<u8>> {
|
||||
let offset = u64::try_from(self.offset).map_err(io::Error::other)?;
|
||||
|
||||
let mut f = File::open(&self.path)?;
|
||||
f.seek(SeekFrom::Start(offset))?;
|
||||
|
||||
let mut bytes = vec![0; chunk_size];
|
||||
let read_len = f.read(&mut bytes)?;
|
||||
bytes.truncate(read_len);
|
||||
|
||||
Ok(bytes)
|
||||
}
|
||||
|
||||
pub fn file_name(&self) -> &str {
|
||||
self.path.file_name().unwrap().to_str().unwrap()
|
||||
}
|
||||
|
||||
pub fn offset<T>(&self) -> T
|
||||
where
|
||||
T: TryFrom<usize>,
|
||||
<T as TryFrom<usize>>::Error: Debug,
|
||||
{
|
||||
self.offset.try_into().unwrap()
|
||||
}
|
||||
|
||||
pub fn size<T>(&self) -> T
|
||||
where
|
||||
T: TryFrom<usize>,
|
||||
<T as TryFrom<usize>>::Error: Debug,
|
||||
{
|
||||
self.size.try_into().unwrap()
|
||||
}
|
||||
|
||||
pub fn upload_chunk(
|
||||
mut self,
|
||||
http: &ureq::Agent,
|
||||
alias: &Alias,
|
||||
chunk_size: usize,
|
||||
) -> ChunkState {
|
||||
let chunk = match self.read_chunk(chunk_size) {
|
||||
Err(e) => return ChunkState::Err(self, UploadError::FileIO(e)),
|
||||
Ok(value) => value,
|
||||
};
|
||||
|
||||
let Ok(res) = (http.patch(&self.uri))
|
||||
.sharry_header(alias)
|
||||
.header("Upload-Offset", self.offset)
|
||||
.send(&chunk)
|
||||
else {
|
||||
return ChunkState::Err(self, UploadError::Request);
|
||||
};
|
||||
|
||||
if res.status() != StatusCode::NO_CONTENT {
|
||||
return ChunkState::Err(self, UploadError::ResponseStatus);
|
||||
}
|
||||
|
||||
let Some(Ok(Ok(res_offset))) = (res.headers().get("Upload-Offset"))
|
||||
.map(HeaderValue::to_str)
|
||||
.map(|v| v.map(str::parse::<usize>))
|
||||
else {
|
||||
return ChunkState::Err(self, UploadError::ResponseOffset);
|
||||
};
|
||||
|
||||
if self.offset + chunk.len() != res_offset {
|
||||
return ChunkState::Err(self, UploadError::ResponseOffset);
|
||||
}
|
||||
|
||||
self.offset = res_offset;
|
||||
|
||||
if self.offset == self.size {
|
||||
return ChunkState::Finished(self.path);
|
||||
}
|
||||
|
||||
ChunkState::Ok(self)
|
||||
}
|
||||
}
|
||||
203
src/sharry/ids.rs
Normal file
203
src/sharry/ids.rs
Normal file
|
|
@ -0,0 +1,203 @@
|
|||
use std::{fmt, sync::LazyLock};
|
||||
|
||||
use log::{debug, trace};
|
||||
use regex::Regex;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
/// ID of a public Sharry alias
|
||||
///
|
||||
/// - impl `From<String>` and `Clone` as this is just a String
|
||||
/// - impl `serde` for cachefile handling
|
||||
/// - impl `Default` while testing for value generation
|
||||
/// - impl `AsRef<str>` for using in a `ureq` header and hashing support
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[cfg_attr(test, derive(Default))]
|
||||
pub struct AliasID(String);
|
||||
|
||||
impl AsRef<str> for AliasID {
|
||||
fn as_ref(&self) -> &str {
|
||||
self.0.as_ref()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<String> for AliasID {
|
||||
fn from(value: String) -> Self {
|
||||
Self(value)
|
||||
}
|
||||
}
|
||||
|
||||
/// ID of a Sharry share
|
||||
///
|
||||
/// - impl `From<String>` and `Clone` as this is just a String
|
||||
/// - impl `serde` for cachefile handling
|
||||
/// - impl `Default` while testing for value generation
|
||||
/// - impl `Display` for formatting compatibility
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[cfg_attr(test, derive(Default))]
|
||||
pub struct ShareID(String);
|
||||
|
||||
impl fmt::Display for ShareID {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.write_str(&self.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<String> for ShareID {
|
||||
fn from(value: String) -> Self {
|
||||
Self(value)
|
||||
}
|
||||
}
|
||||
|
||||
/// ID of a file in a Sharry share
|
||||
///
|
||||
/// - impl `Clone` as this is just a String
|
||||
/// - impl `serde` for cachefile handling
|
||||
/// - impl `Default` while testing for value generation
|
||||
/// - impl `Display` for formatting compatibility
|
||||
/// - impl `TryFrom<String>` for extracting from matching a "PATCH" uri
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[cfg_attr(test, derive(Default))]
|
||||
pub struct FileID(String);
|
||||
|
||||
impl FileID {
|
||||
#[cfg(test)]
|
||||
/// create a new `FileID` for testing purposes
|
||||
pub fn new_test(value: impl Into<String>) -> Self {
|
||||
Self(value.into())
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for FileID {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.write_str(&self.0)
|
||||
}
|
||||
}
|
||||
|
||||
/// Pattern breakdown:
|
||||
/// - `^([^:/?#]+)://` - scheme (anything but `:/?#`) + `"://"`
|
||||
/// - `([^/?#]+)` - authority/host (anything but `/?#`)
|
||||
/// - `/api/v2/alias/upload/` - literal path segment
|
||||
/// - `([^/]+)` - capture SID (one or more non-slash chars)
|
||||
/// - `/files/tus/` - literal path segment
|
||||
/// - `(?P<fid>[^/]+)` - capture FID (one or more non-slash chars)
|
||||
/// - `$` - end of string
|
||||
static UPLOAD_URL_RE: LazyLock<Regex> = LazyLock::new(|| {
|
||||
trace!("compiling UPLOAD_URL_RE");
|
||||
|
||||
Regex::new(r"^([^:/?#]+)://([^/?#]+)/api/v2/alias/upload/[^/]+/files/tus/(?P<fid>[^/]+)$")
|
||||
.expect("Regex compilation failed")
|
||||
});
|
||||
|
||||
impl TryFrom<String> for FileID {
|
||||
type Error = crate::Error;
|
||||
|
||||
fn try_from(value: String) -> crate::Result<Self> {
|
||||
trace!("TryFrom {value:?}");
|
||||
|
||||
if let Some(fid) = UPLOAD_URL_RE
|
||||
.captures(&value)
|
||||
.and_then(|caps| caps.name("fid").map(|m| m.as_str()))
|
||||
{
|
||||
let result = Self(fid.to_string());
|
||||
debug!("{result:?}");
|
||||
|
||||
Ok(result)
|
||||
} else {
|
||||
crate::Error::mismatch(
|
||||
"<proto>://<host>/api/v2/alias/upload/<share>/files/tus/<file>",
|
||||
value,
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::test_util::check_trait;
|
||||
|
||||
#[test]
|
||||
fn basic_traits_working() {
|
||||
let inputs = [
|
||||
"",
|
||||
"abcd",
|
||||
"12345",
|
||||
"8woeurx09wp",
|
||||
"6NHNiSVFhZF-dSGbX8iD8ib-Pdb7TbzpsvC-uBBSCyExxb",
|
||||
];
|
||||
|
||||
for input in inputs {
|
||||
{
|
||||
// check AliasID
|
||||
let aid = AliasID::from(input.to_string());
|
||||
check_trait(&aid.0, input, "From<String>", "AliasID");
|
||||
check_trait(aid.as_ref(), input, "AsRef<str>", "AliasID");
|
||||
}
|
||||
|
||||
{
|
||||
// check ShareID
|
||||
let sid = ShareID::from(input.to_string());
|
||||
check_trait(&sid.0, input, "From<String>", "ShareID");
|
||||
check_trait(sid.to_string(), input, "Display", "ShareID");
|
||||
}
|
||||
|
||||
{
|
||||
// check FileID
|
||||
let fid = FileID(input.to_string());
|
||||
check_trait(fid.to_string(), input, "Display", "FileID");
|
||||
|
||||
let fid_test = FileID::new_test(input);
|
||||
assert_eq!(fid_test.0, fid.0);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn valid_urls_produce_expected_file_id() {
|
||||
// a handful of valid‐looking URLs
|
||||
let cases = [
|
||||
(
|
||||
"http://example.com/api/v2/alias/upload/SID123/files/tus/FID456",
|
||||
"FID456",
|
||||
),
|
||||
(
|
||||
"https://my-host:8080/api/v2/alias/upload/another-SID/files/tus/some-file-id",
|
||||
"some-file-id",
|
||||
),
|
||||
(
|
||||
"custom+scheme://host/api/v2/alias/upload/x/files/tus/y",
|
||||
"y",
|
||||
),
|
||||
];
|
||||
|
||||
for (good, expected_fid) in cases {
|
||||
let file_id = FileID::try_from(good.to_string()).unwrap();
|
||||
assert_eq!(file_id.0, expected_fid);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn invalid_urls_return_error() {
|
||||
let bad_inputs = [
|
||||
// missing /api/v2/alias/upload
|
||||
"http://example.com/files/tus/FID",
|
||||
// missing /files/tus
|
||||
"http://example.com/api/v2/alias/upload/SID123/FID456",
|
||||
// trailing slash (doesn't match `$`)
|
||||
"http://example.com/api/v2/alias/upload/SID/files/tus/FID/",
|
||||
// empty fid
|
||||
"http://example.com/api/v2/alias/upload/SID/files/tus/",
|
||||
// random string
|
||||
"just-a-random-string",
|
||||
];
|
||||
|
||||
for bad in bad_inputs {
|
||||
let err = FileID::try_from(bad.to_string()).unwrap_err();
|
||||
// make sure it's the Mismatch variant, and that it contains the original input
|
||||
assert!(err.is_mismatch(
|
||||
"<proto>://<host>/api/v2/alias/upload/<share>/files/tus/<file>",
|
||||
bad
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
119
src/sharry/json.rs
Normal file
119
src/sharry/json.rs
Normal file
|
|
@ -0,0 +1,119 @@
|
|||
use serde::{Deserialize, Serialize};
|
||||
|
||||
/// Request Body sent to the Sharry API for creating a new share
|
||||
///
|
||||
/// - impl `Serialize` to send in a HTTP request
|
||||
#[derive(Serialize, Debug)]
|
||||
pub struct NewShareRequest {
|
||||
name: String,
|
||||
validity: u32,
|
||||
description: Option<String>,
|
||||
#[serde(rename = "maxViews")]
|
||||
max_views: u32,
|
||||
password: Option<String>,
|
||||
}
|
||||
|
||||
impl NewShareRequest {
|
||||
pub fn new(name: impl Into<String>, max_views: u32) -> Self {
|
||||
Self {
|
||||
name: name.into(),
|
||||
// apparently, the "validity" param doesn't make any difference
|
||||
validity: 0,
|
||||
description: None,
|
||||
max_views,
|
||||
// new shares are private by default, setting a password doesn't really make sense
|
||||
password: None,
|
||||
}
|
||||
}
|
||||
|
||||
/// set a human‐readable description
|
||||
pub fn description(mut self, desc: Option<impl Into<String>>) -> Self {
|
||||
self.description = desc.map(Into::into);
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
/// Request Body received from the Sharry API when creating a new share
|
||||
///
|
||||
/// - impl `Deserialize` to parse from a HTTP request
|
||||
#[derive(Deserialize, Debug)]
|
||||
pub struct NewShareResponse {
|
||||
pub success: bool,
|
||||
pub message: String,
|
||||
pub id: String,
|
||||
}
|
||||
|
||||
/// Request Body received from the Sharry API when pinging a share's notification hook
|
||||
///
|
||||
/// - impl `Deserialize` to parse from a HTTP request
|
||||
#[derive(Deserialize, Debug)]
|
||||
#[allow(dead_code)]
|
||||
pub struct NotifyShareResponse {
|
||||
pub success: bool,
|
||||
pub message: String,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn nsreq_new_sets_fields_correctly() {
|
||||
let cases = [
|
||||
// simple ASCII name, small view count
|
||||
("alice", 1),
|
||||
// underscores, mid-range views
|
||||
("bob_smith", 10),
|
||||
// unicode in the name
|
||||
("漢字ユーザー", 5),
|
||||
// empty name edge case
|
||||
("", 3),
|
||||
// zero views
|
||||
("charlie", 0),
|
||||
// very large view count
|
||||
("delta", u32::MAX),
|
||||
// name with spaces and punctuation
|
||||
("user name!", 42),
|
||||
// name with mixed case
|
||||
("FooBar", 7),
|
||||
];
|
||||
|
||||
for (name, views) in cases {
|
||||
let req = NewShareRequest::new(name, views);
|
||||
|
||||
assert_eq!(req.name, name);
|
||||
assert_eq!(req.max_views, views);
|
||||
|
||||
// unset fields
|
||||
assert_eq!(req.validity, 0);
|
||||
assert!(req.description.is_none());
|
||||
assert!(req.password.is_none());
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn nsreq_new_allows_setting_description() {
|
||||
let longstr = "y".repeat(256);
|
||||
|
||||
let cases = [
|
||||
// simple alphanumeric
|
||||
"A simple test user",
|
||||
// whitespace & punctuation
|
||||
"Bob’s description: loves Rust!",
|
||||
// unicode
|
||||
"日本語の説明",
|
||||
// long string
|
||||
longstr.as_str(),
|
||||
// empty
|
||||
"",
|
||||
// only whitespace
|
||||
" ",
|
||||
];
|
||||
|
||||
for desc in cases {
|
||||
let req = NewShareRequest::new("whatever", 0).description(Some(desc));
|
||||
assert!(req.description.is_some());
|
||||
assert_eq!(req.description.unwrap(), desc);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -1,11 +1,45 @@
|
|||
#![allow(unused_imports)]
|
||||
mod ids;
|
||||
pub mod json;
|
||||
mod uri;
|
||||
|
||||
mod alias;
|
||||
mod api;
|
||||
mod file;
|
||||
mod share;
|
||||
pub use ids::{AliasID, FileID, ShareID};
|
||||
pub use uri::Uri;
|
||||
|
||||
pub use alias::Alias;
|
||||
pub use api::{NewShareRequest, Uri};
|
||||
pub use file::{ChunkState, FileChecked, FileUploading, UploadError};
|
||||
pub use share::Share;
|
||||
use crate::file;
|
||||
|
||||
pub trait Client {
|
||||
fn share_create(
|
||||
&self,
|
||||
uri: &Uri,
|
||||
alias_id: &AliasID,
|
||||
data: json::NewShareRequest,
|
||||
) -> crate::Result<ShareID>;
|
||||
|
||||
fn share_notify(&self, uri: &Uri, alias_id: &AliasID, share_id: &ShareID) -> crate::Result<()>;
|
||||
|
||||
/// create a new file in a Sharry share
|
||||
///
|
||||
/// - try to use endpoint from `Uri::file_create`
|
||||
/// - try to extract `FileID` from the response
|
||||
/// - return the new `FileID`
|
||||
///
|
||||
/// # Errors from
|
||||
///
|
||||
/// - request to endpoint
|
||||
/// - parsing the response
|
||||
fn file_create(
|
||||
&self,
|
||||
uri: &Uri,
|
||||
alias_id: &AliasID,
|
||||
share_id: &ShareID,
|
||||
file: &file::Checked,
|
||||
) -> crate::Result<FileID>;
|
||||
|
||||
fn file_patch(
|
||||
&self,
|
||||
uri: &Uri,
|
||||
alias_id: &AliasID,
|
||||
share_id: &ShareID,
|
||||
chunk: &file::Chunk,
|
||||
) -> crate::Result<()>;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,48 +0,0 @@
|
|||
use log::debug;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use super::{
|
||||
alias::{Alias, SharryAlias},
|
||||
api::{NewShareRequest, NewShareResponse, NotifyShareResponse},
|
||||
};
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
pub struct Share {
|
||||
pub(super) id: String,
|
||||
}
|
||||
|
||||
impl Share {
|
||||
pub fn create(
|
||||
http: &ureq::Agent,
|
||||
alias: &Alias,
|
||||
data: NewShareRequest,
|
||||
) -> Result<Self, ureq::Error> {
|
||||
let res = (http.post(alias.get_endpoint("alias/upload/new")))
|
||||
.sharry_header(alias)
|
||||
.send_json(data)?
|
||||
.body_mut()
|
||||
.read_json::<NewShareResponse>()?;
|
||||
|
||||
debug!("response: {res:?}");
|
||||
|
||||
if !(res.success && (res.message == "Share created.")) {
|
||||
return Err(ureq::Error::Other("unexpected json response".into()));
|
||||
}
|
||||
|
||||
Ok(Self { id: res.id })
|
||||
}
|
||||
|
||||
pub fn notify(&self, http: &ureq::Agent, alias: &Alias) -> Result<(), ureq::Error> {
|
||||
let endpoint = alias.get_endpoint(format!("alias/mail/notify/{}", self.id));
|
||||
|
||||
let res = (http.post(endpoint))
|
||||
.sharry_header(alias)
|
||||
.send_empty()?
|
||||
.body_mut()
|
||||
.read_json::<NotifyShareResponse>()?;
|
||||
|
||||
debug!("response: {res:?}");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
239
src/sharry/uri.rs
Normal file
239
src/sharry/uri.rs
Normal file
|
|
@ -0,0 +1,239 @@
|
|||
use std::{fmt, sync::LazyLock};
|
||||
|
||||
use log::{debug, trace};
|
||||
use regex::{Captures, Regex};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
/// ID of a file in a Sharry share
|
||||
///
|
||||
/// - impl `Clone` as this is just a String
|
||||
/// - impl `serde` for cachefile handling
|
||||
/// - impl `Display` for formatting compatibility
|
||||
/// - impl `AsRef<[u8]>` for hashing with `blake2b_simd`
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[cfg_attr(test, derive(Default))]
|
||||
pub struct Uri(String);
|
||||
|
||||
impl fmt::Display for Uri {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.write_str(&self.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl AsRef<[u8]> for Uri {
|
||||
fn as_ref(&self) -> &[u8] {
|
||||
self.0.as_bytes()
|
||||
}
|
||||
}
|
||||
|
||||
fn captured(caps: &Captures, name: &str) -> String {
|
||||
caps.name(name)
|
||||
.unwrap_or_else(|| panic!("{name:?} not captured"))
|
||||
.as_str()
|
||||
.to_string()
|
||||
}
|
||||
|
||||
/// Pattern breakdown:
|
||||
/// - `^(?P<scheme>[^:/?#]+)://` - capture scheme (anything but `:/?#`) + `"://"`
|
||||
/// - `(?P<host>[^/?#]+)` - capture authority/host (anything but `/?#`)
|
||||
/// - `(/.*)?` - maybe trailing slash and some path
|
||||
/// - `$` - end of string
|
||||
static SHARRY_URI_RE: LazyLock<Regex> = LazyLock::new(|| {
|
||||
trace!("compiling SHARRY_URI_RE");
|
||||
|
||||
Regex::new(r"^(?P<scheme>[^:/?#]+)://(?P<host>[^/?#]+)(/.*)?$")
|
||||
.expect("Regex compilation failed")
|
||||
});
|
||||
|
||||
fn parse_url(value: &str) -> Option<(String, String)> {
|
||||
SHARRY_URI_RE
|
||||
.captures(value)
|
||||
.map(|caps| (captured(&caps, "scheme"), captured(&caps, "host")))
|
||||
}
|
||||
|
||||
impl From<String> for Uri {
|
||||
fn from(value: String) -> Self {
|
||||
trace!("TryFrom {value:?}");
|
||||
|
||||
if let Some((scheme, host)) = parse_url(&value) {
|
||||
let result = Self(format!("{scheme}://{host}"));
|
||||
debug!("{result:?}");
|
||||
|
||||
result
|
||||
} else {
|
||||
Self(value)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Uri {
|
||||
/// arbitrary endpoint in the Sharry API v2
|
||||
fn endpoint(&self, path: fmt::Arguments) -> String {
|
||||
let uri = format!("{}/api/v2/{path}", self.0);
|
||||
trace!("endpoint: {uri:?}");
|
||||
uri
|
||||
}
|
||||
|
||||
/// Sharry API endpoint to create a new share
|
||||
pub fn share_create(&self) -> String {
|
||||
self.endpoint(format_args!("alias/upload/new"))
|
||||
}
|
||||
|
||||
/// Sharry API endpoint to ping a share's notification hook
|
||||
pub fn share_notify(&self, share_id: &super::ShareID) -> String {
|
||||
self.endpoint(format_args!("alias/mail/notify/{share_id}"))
|
||||
}
|
||||
|
||||
/// Sharry API endpoint to create a new file inside a share
|
||||
pub fn file_create(&self, share_id: &super::ShareID) -> String {
|
||||
self.endpoint(format_args!("alias/upload/{share_id}/files/tus"))
|
||||
}
|
||||
|
||||
/// Sharry API endpoint to push data into a file inside a share
|
||||
pub fn file_patch(&self, share_id: &super::ShareID, file_id: &super::FileID) -> String {
|
||||
self.endpoint(format_args!("alias/upload/{share_id}/files/tus/{file_id}"))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::{
|
||||
sharry::{FileID, ShareID},
|
||||
test_util::check_trait,
|
||||
};
|
||||
|
||||
#[test]
|
||||
fn check_captured_works() {
|
||||
let cases = [
|
||||
// simple http host
|
||||
("http://example.com", "http", "example.com"),
|
||||
// https host with port
|
||||
("https://my-host:8080", "https", "my-host:8080"),
|
||||
// trailing slash
|
||||
("scheme://host/", "scheme", "host"),
|
||||
// with path
|
||||
("scheme://host/path/to/whatever", "scheme", "host"),
|
||||
// custom scheme
|
||||
("custom+scheme://host", "custom+scheme", "host"),
|
||||
];
|
||||
|
||||
for (good, scheme, host) in cases {
|
||||
let caps = SHARRY_URI_RE.captures(good).unwrap();
|
||||
|
||||
assert_eq!(captured(&caps, "scheme"), scheme);
|
||||
assert_eq!(captured(&caps, "host"), host);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic = "\"foo\" not captured"]
|
||||
fn make_captured_panic() {
|
||||
let caps = SHARRY_URI_RE.captures("http://example.com").unwrap();
|
||||
captured(&caps, "foo");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn basic_traits_working() {
|
||||
let cases = [
|
||||
// simple http host
|
||||
"http://example.com",
|
||||
// https host with port
|
||||
"https://my-host:8080",
|
||||
// custom scheme
|
||||
"custom+scheme://host",
|
||||
];
|
||||
|
||||
for uri_data in cases {
|
||||
let uri = Uri(uri_data.to_string());
|
||||
check_trait(uri.to_string(), uri_data, "Display", "Uri");
|
||||
check_trait(uri.as_ref(), uri_data.as_bytes(), "AsRef<[u8]>", "Uri");
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn valid_urls_produce_expected_uri() {
|
||||
let cases = [
|
||||
// simple http host
|
||||
("http://example.com", "http://example.com"),
|
||||
// https host with port
|
||||
("https://my-host:8080", "https://my-host:8080"),
|
||||
// trailing slash
|
||||
("scheme://host/", "scheme://host"),
|
||||
// with path
|
||||
("scheme://host/path/to/whatever", "scheme://host"),
|
||||
// custom scheme
|
||||
("custom+scheme://host", "custom+scheme://host"),
|
||||
];
|
||||
|
||||
for (good, expected) in cases {
|
||||
let uri = Uri::from(good.to_string());
|
||||
check_trait(uri.0, expected, "From<String>", "Uri");
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn invalid_urls_passed_through() {
|
||||
let cases = [
|
||||
// missing “://”
|
||||
"http:/example.com",
|
||||
// missing scheme
|
||||
"://example.com",
|
||||
// missing host
|
||||
"http://",
|
||||
"ftp://?query",
|
||||
// totally malformed
|
||||
"just-a-string",
|
||||
"",
|
||||
"///",
|
||||
];
|
||||
|
||||
for bad in cases {
|
||||
let uri = Uri::from(bad.to_string());
|
||||
check_trait(uri.0, bad, "From<String>", "Uri");
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_endpoint() {
|
||||
let cases = [
|
||||
// simple path
|
||||
("path/to/something", "/api/v2/path/to/something"),
|
||||
// underscores, hyphens, dots
|
||||
("bob_smith-son.eve", "/api/v2/bob_smith-son.eve"),
|
||||
// unicode
|
||||
("漢字ユーザー", "/api/v2/漢字ユーザー"),
|
||||
// empty path
|
||||
("", "/api/v2/"),
|
||||
// leading/trailing spaces
|
||||
(" frank ", "/api/v2/ frank "),
|
||||
// uppercase
|
||||
("GUEST", "/api/v2/GUEST"),
|
||||
// numeric
|
||||
("12345", "/api/v2/12345"),
|
||||
];
|
||||
|
||||
let uri = Uri::default();
|
||||
for (path, expected) in cases {
|
||||
assert_eq!(&expected, &uri.endpoint(format_args!("{path}")));
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_pub_endpoints() {
|
||||
let uri = Uri::default();
|
||||
let share_id = ShareID::from("sid".to_string());
|
||||
let file_id = FileID::new_test("fid".to_string());
|
||||
|
||||
assert_eq!("/api/v2/alias/upload/new", uri.share_create());
|
||||
assert_eq!("/api/v2/alias/mail/notify/sid", uri.share_notify(&share_id));
|
||||
assert_eq!(
|
||||
"/api/v2/alias/upload/sid/files/tus",
|
||||
uri.file_create(&share_id)
|
||||
);
|
||||
assert_eq!(
|
||||
"/api/v2/alias/upload/sid/files/tus/fid",
|
||||
uri.file_patch(&share_id, &file_id)
|
||||
);
|
||||
}
|
||||
}
|
||||
118
src/test_util/data.rs
Normal file
118
src/test_util/data.rs
Normal file
|
|
@ -0,0 +1,118 @@
|
|||
/// test dataset
|
||||
const DATA: [&[u8]; 8] = [
|
||||
// empty slice
|
||||
b"",
|
||||
// single-byte
|
||||
b"x",
|
||||
// common ascii pangram
|
||||
b"The quick brown fox jumps over the lazy dog",
|
||||
// ascii with punctuation and digits
|
||||
b"Rust v1.65.0 - Memory Safety, Speed, Concurrency!",
|
||||
// simple unicode (utf-8) greeting
|
||||
"こんにちは世界".as_bytes(),
|
||||
// pseudo-random bytes with embedded nuls
|
||||
&[
|
||||
0x3C, 0xA7, 0x5D, 0xE1, 0x4F, 0x99, 0x00, 0x20, 0x7F, 0xB3, 0xCD, 0x8A, 0x10, 0x55, 0xAA,
|
||||
0xFF, 0x5E, 0xA3, 0x1F, 0xC8, 0x72, 0x4D, 0x99, 0x00, 0xB7, 0x3C, 0x8E, 0xAD, 0x26, 0xF1,
|
||||
],
|
||||
// long run of identical bytes (1 KiB of ascii 'A')
|
||||
&[b'A'; 1024],
|
||||
// very large slice (10 MiB of zeroes)
|
||||
&[0u8; 10 * 1024 * 1024],
|
||||
];
|
||||
|
||||
/// lengths of the test dataset
|
||||
const DATA_LENGTHS: [u64; 8] = [
|
||||
DATA[0].len() as u64,
|
||||
DATA[1].len() as u64,
|
||||
DATA[2].len() as u64,
|
||||
DATA[3].len() as u64,
|
||||
DATA[4].len() as u64,
|
||||
DATA[5].len() as u64,
|
||||
DATA[6].len() as u64,
|
||||
DATA[7].len() as u64,
|
||||
];
|
||||
|
||||
/// anything but the lengths of the test dataset
|
||||
pub const DATA_LENGTHS_BAD: [u64; 8] = [36, 12, 1, 0, 9, 24, 13, 10];
|
||||
|
||||
/// known good hashes of the test dataset
|
||||
///
|
||||
/// using `BLAKE2b`, 512 bit, with unpadded Base64 (standard variant)
|
||||
pub const HASHES_STD_GOOD: [&str; 8] = [
|
||||
// empty slice
|
||||
"eGoC90IBWQPGxv2FJVLScpEvR0DhWEdhiobiF/cfVBnSXhAxr+5YUxOJZESTTrBLkDpoWxRIt1XVb3Aa/pvizg",
|
||||
// single-byte
|
||||
"CQk3etNREMr7KQnhhWcrfyco0fUJT4rWjW+sYnS/H0mUhagOo2TATtAG0pRZ6jy3xgAoDi+D4DJSmQb4iuMNCg",
|
||||
// common ascii pangram
|
||||
"qK3Uvd39k+SHfSdG5igXsRY2Sh+nvBSNlQkLxzM7NnP4JAHPeqLkyx7NkCluPxTLVBP47Xe+cwRbE5FM3NapGA",
|
||||
// ascii with punctuation and digits
|
||||
"NOtceHp9LrSYpXvSP3ayPbgMUyX4hynBYt4KtHuwJDsv1ELco5QeUj9aJTYTqbw4KzRKY+RjsbR26N3smUeCmA",
|
||||
// simple unicode (utf-8) greeting
|
||||
"h3xQg25wr/XqaXgqXWJivbVgN89XQoZUN/JcSZB0jxOtkbVStY7hnO+pm3PnLv6yZ4ZDLrxzYpoBk05BR7Wo1A",
|
||||
// pseudo-random bytes with embedded nuls
|
||||
"kiUYjOegDM9n1ryWtZhukpTuZ8oZbhi2onpXNl6pg16R+JZj5ty4uJZs44YbCu0A9m35Xs3bi/mxfbSulbo5Rg",
|
||||
// long run of identical bytes (1 kib of ascii 'a')
|
||||
"xwGOG01h2kco4CgjJlD9T2v5bM8XVuCrYzKTM4D0s7rCnOH+HR1H2S2Tmg43M+ym1A+AEPTE4J7iGljgntTdZA",
|
||||
// very large slice (10 mib of zeroes)
|
||||
"xsHH9h63e1+254TSCQoWCl6L5eGOo0Zg+ubtQC8Inwj7dwW7oxg0kYCrnkuTRj+7bVYNjlRSDOa8OIdInp73wA",
|
||||
];
|
||||
|
||||
/// known bad version of `HASHES_STD_GOOD`
|
||||
pub const HASHES_STD_BAD: [&str; 8] = [
|
||||
// off by one character (last “z” -> “y”)
|
||||
"eGoC90IBWQPGxv2FJVLScpEvR0DhWEdhiobiF/cfVBnSXhAxr+5YUxOJZESTTrBLkDpoWxRIt1XVb3Aa/pviyg",
|
||||
// truncated by dropping the final 4 chars
|
||||
"CQk3etNREMr7KQnhhWcrfyco0fUJT4rWjW+sYnS/H0mUhagOo2TATtAG0pRZ6jy3xgAoDi+D4DJSmQb4iu",
|
||||
// contains a non‐Base64 character (“#”)
|
||||
"qK3Uvd39k+SHfSdG5igXsRY2Sh+nvBSNlQkLxzM7NnP4JAHPeqLkyx7NkCluPxTLVBP47Xe+cwRbE5FM3NapG#",
|
||||
// too long, extra “AA” at end
|
||||
"NOtceHp9LrSYpXvSP3ayPbgMUyX4hynBYt4KtHuwJDsv1ELco5QeUj9aJTYTqbw4KzRKY+RjsbR26N3smUeCmAAA",
|
||||
// one byte altered at the front (“h” -> “H”)
|
||||
"H3xQg25wr/XqaXgqXWJivbVgN89XQoZUN/JcSZB0jxOtkbVStY7hnO+pm3PnLv6yZ4ZDLrxzYpoBk05BR7Wo1A",
|
||||
// garbled mid‐section
|
||||
"kiUYjOegDM9n1ryWtZhukpTuZ8oZbhi2onpXYZ6pg16R+JZj5ty4uJZs44YbCu0A9m35Xs3bi/mxfbSulbo5Rg",
|
||||
// entirely different length (too short)
|
||||
"xwGOG01h2kco4CgjJlD9T2v5bM8XVuCrYzKTM4D0s7rCnO",
|
||||
// correct length, but all “A”s (obviously wrong)
|
||||
"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA",
|
||||
];
|
||||
|
||||
// /// known good hashes of the test dataset
|
||||
// ///
|
||||
// /// using BLAKE2b, 128 bit, with unpadded Base64 (url safe variant)
|
||||
// const HASHES_URL_GOOD: [&str; 8] = [
|
||||
// // empty slice
|
||||
// "########################################################",
|
||||
// // single-byte
|
||||
// "########################################################",
|
||||
// // common ascii pangram
|
||||
// "",
|
||||
// // ascii with punctuation and digits
|
||||
// "",
|
||||
// // simple unicode (utf-8) greeting
|
||||
// "",
|
||||
// // pseudo-random bytes with embedded nuls
|
||||
// "",
|
||||
// // long run of identical bytes (1 kib of ascii 'a')
|
||||
// "",
|
||||
// // very large slice (10 mib of zeroes)
|
||||
// "",
|
||||
// ];
|
||||
|
||||
pub fn data() -> impl Iterator<Item = &'static [u8]> {
|
||||
DATA.iter().copied()
|
||||
}
|
||||
|
||||
pub fn cases() -> impl Iterator<Item = (&'static [u8], u64)> {
|
||||
data().zip(DATA_LENGTHS)
|
||||
}
|
||||
|
||||
pub fn cases_with<T>(addons: T) -> impl Iterator<Item = (&'static [u8], u64, T::Item)>
|
||||
where
|
||||
T: IntoIterator,
|
||||
{
|
||||
cases()
|
||||
.zip(addons)
|
||||
.map(|((data, len), addon)| (data, len, addon))
|
||||
}
|
||||
330
src/test_util/mock_client.rs
Normal file
330
src/test_util/mock_client.rs
Normal file
|
|
@ -0,0 +1,330 @@
|
|||
use std::{
|
||||
cell::{RefCell, RefMut},
|
||||
collections::{HashMap, hash_map::Entry},
|
||||
};
|
||||
|
||||
use crate::{
|
||||
Error, Result, error_response,
|
||||
file::{self, FileTrait},
|
||||
sharry::{AliasID, Client, FileID, ShareID, Uri, json},
|
||||
};
|
||||
|
||||
use super::mock_ids::CheckID;
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct MockClient {
|
||||
shares: RefCell<HashMap<String, MockShare>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
struct MockShare {
|
||||
files: HashMap<String, MockFile>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
struct MockFile {
|
||||
size: u64,
|
||||
offset: u64,
|
||||
}
|
||||
|
||||
impl From<&file::Checked> for MockFile {
|
||||
fn from(value: &file::Checked) -> Self {
|
||||
Self {
|
||||
size: value.get_size(),
|
||||
offset: 0,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl MockClient {
|
||||
fn insert_share(&self, share_id: &ShareID, share: MockShare) -> Result<()> {
|
||||
let mut shares = self.shares.borrow_mut();
|
||||
|
||||
let Entry::Vacant(entry) = shares.entry(share_id.to_string()) else {
|
||||
return Err(error_response!("can't insert share {share_id:?}!"));
|
||||
};
|
||||
|
||||
entry.insert(share);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn insert_file(&self, share_id: &ShareID, file_id: &FileID, file: MockFile) -> Result<()> {
|
||||
let mut share = self.get_share(share_id)?;
|
||||
|
||||
let Entry::Vacant(entry) = share.files.entry(file_id.to_string()) else {
|
||||
return Err(error_response!("can't insert file {file_id:?}!"));
|
||||
};
|
||||
|
||||
entry.insert(file);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn get_share<'t>(&'t self, share_id: &ShareID) -> Result<RefMut<'t, MockShare>> {
|
||||
let share_id = &share_id.to_string();
|
||||
let shares = self.shares.borrow_mut();
|
||||
|
||||
// check share exists
|
||||
shares
|
||||
.get(share_id)
|
||||
.ok_or_else(|| error_response!("can't find share {share_id:?}!"))?;
|
||||
|
||||
Ok(RefMut::map(shares, |shares| {
|
||||
shares.get_mut(share_id).unwrap()
|
||||
}))
|
||||
}
|
||||
|
||||
fn get_file<'t>(
|
||||
&'t self,
|
||||
share_id: &ShareID,
|
||||
file_id: &FileID,
|
||||
) -> Result<RefMut<'t, MockFile>> {
|
||||
let file_id = &file_id.to_string();
|
||||
let share = self.get_share(share_id)?;
|
||||
|
||||
// check file exists
|
||||
share
|
||||
.files
|
||||
.get(file_id)
|
||||
.ok_or_else(|| error_response!("can't find file {file_id:?}!"))?;
|
||||
|
||||
Ok(RefMut::map(share, move |share| {
|
||||
share.files.get_mut(file_id).unwrap()
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
impl Client for MockClient {
|
||||
fn share_create(
|
||||
&self,
|
||||
uri: &Uri,
|
||||
alias_id: &AliasID,
|
||||
_: json::NewShareRequest,
|
||||
) -> Result<ShareID> {
|
||||
(uri, alias_id).check()?;
|
||||
|
||||
let share_id = true.into();
|
||||
self.insert_share(&share_id, MockShare::default())?;
|
||||
|
||||
Ok(share_id)
|
||||
}
|
||||
|
||||
fn share_notify(&self, uri: &Uri, alias_id: &AliasID, share_id: &ShareID) -> crate::Result<()> {
|
||||
(uri, alias_id).check()?;
|
||||
share_id.check()?;
|
||||
|
||||
let _share = self.get_share(share_id)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn file_create(
|
||||
&self,
|
||||
uri: &Uri,
|
||||
alias_id: &AliasID,
|
||||
share_id: &ShareID,
|
||||
file: &file::Checked,
|
||||
) -> Result<FileID> {
|
||||
(uri, alias_id).check()?;
|
||||
share_id.check()?;
|
||||
|
||||
let file_id = true.into();
|
||||
self.insert_file(share_id, &file_id, file.into())?;
|
||||
|
||||
Ok(file_id)
|
||||
}
|
||||
|
||||
fn file_patch(
|
||||
&self,
|
||||
uri: &Uri,
|
||||
alias_id: &AliasID,
|
||||
share_id: &ShareID,
|
||||
chunk: &file::Chunk,
|
||||
) -> Result<()> {
|
||||
(uri, alias_id).check()?;
|
||||
(share_id, chunk.get_file_id()).check()?;
|
||||
|
||||
let file = self.get_file(share_id, chunk.get_file_id())?;
|
||||
|
||||
if chunk.get_length() == 0 {
|
||||
return Err(error_response!("chunk {chunk:?} empty!"));
|
||||
} else if chunk.get_offset() % (1024 * 1024) != 0 {
|
||||
return Err(error_response!("chunk {chunk:?} not aligned to a MiB!"));
|
||||
} else if chunk.get_offset() != file.offset {
|
||||
return Error::mismatch(file.offset, chunk.get_offset());
|
||||
} else if file.offset + chunk.get_length() > file.size {
|
||||
return Err(error_response!("chunk {chunk:?} too long!"));
|
||||
}
|
||||
|
||||
let mut file = file;
|
||||
file.offset += chunk.get_length();
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
// technically redundant, but kept for refactoring purposes
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::collections::HashSet;
|
||||
|
||||
use super::*;
|
||||
|
||||
fn check_tostr_eq<L, R>(left: L, right: R)
|
||||
where
|
||||
L: ExactSizeIterator,
|
||||
L::Item: ToString,
|
||||
R: ExactSizeIterator,
|
||||
R::Item: ToString,
|
||||
{
|
||||
assert_eq!(left.len(), right.len());
|
||||
|
||||
let l_strings: HashSet<_> = left.map(|s| s.to_string()).collect();
|
||||
let r_strings: HashSet<_> = right.map(|s| s.to_string()).collect();
|
||||
|
||||
assert_eq!(l_strings, r_strings);
|
||||
}
|
||||
|
||||
fn add_share(client: &MockClient) -> ShareID {
|
||||
let share_id = true.into();
|
||||
client
|
||||
.insert_share(&share_id, MockShare::default())
|
||||
.unwrap();
|
||||
|
||||
share_id
|
||||
}
|
||||
|
||||
fn add_file(client: &MockClient, share_id: &ShareID) -> FileID {
|
||||
let file_id = true.into();
|
||||
client
|
||||
.insert_file(share_id, &file_id, MockFile::default())
|
||||
.unwrap();
|
||||
|
||||
file_id
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn insert_share_works() {
|
||||
let client = MockClient::default();
|
||||
let share_ids: [_; 10] = std::array::from_fn(|_| true.into());
|
||||
|
||||
for share_id in share_ids.as_ref() {
|
||||
assert!(client.insert_share(share_id, MockShare::default()).is_ok());
|
||||
}
|
||||
|
||||
check_tostr_eq(client.shares.borrow().keys(), share_ids.iter());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn insert_share_double_errors() {
|
||||
let client = MockClient::default();
|
||||
let share_ids: [_; 10] = std::array::from_fn(|_| add_share(&client));
|
||||
|
||||
for share_id in share_ids.as_ref() {
|
||||
let res = client.insert_share(share_id, MockShare::default());
|
||||
assert!(res.is_err());
|
||||
assert!(res.unwrap_err().response_contains("can't insert share"));
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn get_share_works() {
|
||||
let client = MockClient::default();
|
||||
let share_ids: [_; 10] = std::array::from_fn(|_| add_share(&client));
|
||||
|
||||
for share_id in share_ids.as_ref() {
|
||||
assert!(client.get_share(share_id).is_ok());
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn get_share_nex_errors() {
|
||||
let client = MockClient::default();
|
||||
add_share(&client);
|
||||
let share_ids_nex: [_; 10] = std::array::from_fn(|_| true.into());
|
||||
|
||||
for share_id_nex in share_ids_nex.as_ref() {
|
||||
let res = client.get_share(share_id_nex);
|
||||
|
||||
assert!(res.is_err());
|
||||
assert!(res.unwrap_err().response_contains("can't find share"));
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn insert_file_works() {
|
||||
let client = MockClient::default();
|
||||
let share_id = add_share(&client);
|
||||
let file_ids: [_; 10] = std::array::from_fn(|_| true.into());
|
||||
|
||||
for file_id in file_ids.as_ref() {
|
||||
assert!(
|
||||
client
|
||||
.insert_file(&share_id, file_id, MockFile::default())
|
||||
.is_ok()
|
||||
);
|
||||
}
|
||||
|
||||
let shares = client.shares.borrow();
|
||||
let share = shares.get(&share_id.to_string()).unwrap();
|
||||
|
||||
check_tostr_eq(share.files.keys(), file_ids.iter());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn insert_file_nex_share_errors() {
|
||||
let client = MockClient::default();
|
||||
add_share(&client);
|
||||
|
||||
let share_id_nex = ShareID::default();
|
||||
let res = client.insert_file(&share_id_nex, &true.into(), MockFile::default());
|
||||
assert!(res.is_err());
|
||||
assert!(res.unwrap_err().response_contains("can't find share"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn insert_file_double_errors() {
|
||||
let client = MockClient::default();
|
||||
let share_id = add_share(&client);
|
||||
let file_ids: [_; 10] = std::array::from_fn(|_| add_file(&client, &share_id));
|
||||
|
||||
for file_id in file_ids.as_ref() {
|
||||
let res = client.insert_file(&share_id, file_id, MockFile::default());
|
||||
assert!(res.is_err());
|
||||
assert!(res.unwrap_err().response_contains("can't insert file"));
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn get_file_works() {
|
||||
let client = MockClient::default();
|
||||
let share_id = add_share(&client);
|
||||
let file_ids: [_; 10] = std::array::from_fn(|_| add_file(&client, &share_id));
|
||||
|
||||
for file_id in file_ids.as_ref() {
|
||||
assert!(client.get_file(&share_id, file_id).is_ok());
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn get_file_nex_errors() {
|
||||
let client = MockClient::default();
|
||||
let share_id = add_share(&client);
|
||||
add_file(&client, &share_id);
|
||||
let file_ids_nex: [_; 10] = std::array::from_fn(|_| true.into());
|
||||
|
||||
for file_id_nex in file_ids_nex.as_ref() {
|
||||
let share_id_nex = true.into();
|
||||
let res = client.get_file(&share_id_nex, file_id_nex);
|
||||
|
||||
assert!(res.is_err());
|
||||
assert!(res.unwrap_err().response_contains("can't find share"));
|
||||
|
||||
let res = client.get_file(&share_id, file_id_nex);
|
||||
|
||||
assert!(res.is_err());
|
||||
assert!(res.unwrap_err().response_contains("can't find file"));
|
||||
}
|
||||
}
|
||||
}
|
||||
176
src/test_util/mock_ids.rs
Normal file
176
src/test_util/mock_ids.rs
Normal file
|
|
@ -0,0 +1,176 @@
|
|||
use uuid::Uuid;
|
||||
|
||||
use crate::{
|
||||
Result,
|
||||
sharry::{AliasID, FileID, ShareID, Uri},
|
||||
};
|
||||
|
||||
const VALID_URI: &str = "scheme://valid.uri";
|
||||
const VALID_ALIAS: &str = "valid-alias";
|
||||
const VALID_SHARE: &str = "valid-share";
|
||||
const VALID_FILE: &str = "valid-file";
|
||||
|
||||
fn make_invalid(valid: &str) -> String {
|
||||
let invalid = valid.replace("valid", "invalid");
|
||||
|
||||
assert_ne!(invalid, valid);
|
||||
|
||||
invalid
|
||||
}
|
||||
|
||||
fn make_valid(valid: &str) -> String {
|
||||
let invalid = make_invalid(valid);
|
||||
|
||||
let valid = {
|
||||
let id = Uuid::now_v1(&[4, 8, 15, 16, 23, 42]);
|
||||
valid.replace("valid", &id.to_string())
|
||||
};
|
||||
|
||||
assert_ne!(valid, invalid);
|
||||
|
||||
valid
|
||||
}
|
||||
|
||||
pub trait CheckID {
|
||||
fn check(self) -> Result<()>;
|
||||
}
|
||||
|
||||
impl CheckID for (&Uri, &AliasID) {
|
||||
fn check(self) -> Result<()> {
|
||||
if self.0.to_string() == make_invalid(VALID_URI) {
|
||||
Err(self.0.into())
|
||||
} else if self.1.as_ref() == make_invalid(VALID_ALIAS) {
|
||||
Err(self.1.into())
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl CheckID for &ShareID {
|
||||
fn check(self) -> Result<()> {
|
||||
if self.to_string() == make_invalid(VALID_SHARE) {
|
||||
Err(self.into())
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl CheckID for (&ShareID, &FileID) {
|
||||
fn check(self) -> Result<()> {
|
||||
self.0.check()?;
|
||||
|
||||
if self.1.to_string() == make_invalid(VALID_FILE) {
|
||||
Err(self.1.into())
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<bool> for Uri {
|
||||
fn from(value: bool) -> Self {
|
||||
if value {
|
||||
make_valid(VALID_URI).into()
|
||||
} else {
|
||||
make_invalid(VALID_URI).into()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<bool> for AliasID {
|
||||
fn from(value: bool) -> Self {
|
||||
if value {
|
||||
make_valid(VALID_ALIAS).into()
|
||||
} else {
|
||||
make_invalid(VALID_ALIAS).into()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<bool> for ShareID {
|
||||
fn from(value: bool) -> Self {
|
||||
if value {
|
||||
make_valid(VALID_SHARE).into()
|
||||
} else {
|
||||
make_invalid(VALID_SHARE).into()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<bool> for FileID {
|
||||
fn from(value: bool) -> Self {
|
||||
if value {
|
||||
Self::new_test(make_valid(VALID_FILE))
|
||||
} else {
|
||||
Self::new_test(make_invalid(VALID_FILE))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// technically redundant, but kept for refactoring purposes
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn true_makes_valids() {
|
||||
let uri = Uri::from(true);
|
||||
let alias_id = AliasID::from(true);
|
||||
let share_id = ShareID::from(true);
|
||||
let file_id = FileID::from(true);
|
||||
|
||||
assert!((&uri, &alias_id).check().is_ok());
|
||||
assert!(share_id.check().is_ok());
|
||||
assert!((&share_id, &file_id).check().is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn default_is_valid() {
|
||||
let uri = Uri::default();
|
||||
let alias_id = AliasID::default();
|
||||
let share_id = ShareID::default();
|
||||
let file_id = FileID::default();
|
||||
|
||||
assert!((&uri, &alias_id).check().is_ok());
|
||||
assert!(share_id.check().is_ok());
|
||||
assert!((&share_id, &file_id).check().is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn false_makes_invalids() {
|
||||
use crate::Parameter;
|
||||
|
||||
fn test_check(value: impl CheckID, callback: impl FnOnce(&Parameter) -> bool) {
|
||||
let check = value.check().unwrap_err();
|
||||
let p = check.get_invalid_param().unwrap();
|
||||
assert!(callback(p));
|
||||
}
|
||||
|
||||
// valid ids
|
||||
let uri = Uri::from(true);
|
||||
let alias_id = AliasID::from(true);
|
||||
let share_id = ShareID::from(true);
|
||||
let file_id = FileID::from(true);
|
||||
|
||||
// invalid ids
|
||||
let uri_i = Uri::from(false);
|
||||
let alias_id_i = AliasID::from(false);
|
||||
let share_id_i = ShareID::from(false);
|
||||
let file_id_i = FileID::from(false);
|
||||
|
||||
// uri + alias
|
||||
test_check((&uri_i, &alias_id_i), Parameter::is_uri);
|
||||
test_check((&uri_i, &alias_id), Parameter::is_uri);
|
||||
test_check((&uri, &alias_id_i), Parameter::is_alias_id);
|
||||
|
||||
// share
|
||||
test_check(&share_id_i, Parameter::is_share_id);
|
||||
|
||||
// share + file
|
||||
test_check((&share_id_i, &file_id_i), Parameter::is_share_id);
|
||||
test_check((&share_id_i, &file_id), Parameter::is_share_id);
|
||||
test_check((&share_id, &file_id_i), Parameter::is_file_id);
|
||||
}
|
||||
}
|
||||
42
src/test_util/mod.rs
Normal file
42
src/test_util/mod.rs
Normal file
|
|
@ -0,0 +1,42 @@
|
|||
#![cfg(test)]
|
||||
|
||||
pub mod data;
|
||||
mod mock_client;
|
||||
mod mock_ids;
|
||||
|
||||
pub use mock_client::MockClient;
|
||||
|
||||
use std::{fmt, io::Write};
|
||||
|
||||
use tempfile::NamedTempFile;
|
||||
|
||||
#[allow(clippy::needless_pass_by_value)]
|
||||
pub fn check_trait<A, E>(actual: A, expected: E, tr: &str, ty: &str)
|
||||
where
|
||||
A: fmt::Debug + PartialEq<E>,
|
||||
E: fmt::Debug,
|
||||
{
|
||||
assert_eq!(
|
||||
actual, expected,
|
||||
"`impl {tr} for {ty}` expected: {expected:?}, actual: {actual:?}",
|
||||
);
|
||||
}
|
||||
|
||||
/// Helper to create a temp file from `data`
|
||||
pub fn create_file(data: &[u8]) -> NamedTempFile {
|
||||
let mut tmp = NamedTempFile::new().unwrap();
|
||||
tmp.write_all(data).unwrap();
|
||||
tmp
|
||||
}
|
||||
|
||||
// technically redundant, but kept for refactoring purposes
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
#[should_panic = "`impl foo for bar` expected: 1, actual: 0"]
|
||||
fn make_check_trait_panic() {
|
||||
check_trait(0, 1, "foo", "bar");
|
||||
}
|
||||
}
|
||||
179
src/ureq_client.rs
Normal file
179
src/ureq_client.rs
Normal file
|
|
@ -0,0 +1,179 @@
|
|||
use log::{debug, trace};
|
||||
|
||||
use crate::{
|
||||
file::{self, FileTrait},
|
||||
sharry::{self, AliasID, FileID, ShareID, Uri},
|
||||
};
|
||||
|
||||
fn find_cause(
|
||||
uri: &Uri,
|
||||
alias_id: &AliasID,
|
||||
share_id: Option<&ShareID>,
|
||||
file_id: Option<&FileID>,
|
||||
) -> impl FnOnce(ureq::Error) -> crate::Error {
|
||||
move |error| match error {
|
||||
ureq::Error::StatusCode(403) => {
|
||||
trace!("HTTP Error 403: Alias not found!");
|
||||
|
||||
alias_id.into()
|
||||
}
|
||||
ureq::Error::StatusCode(404) => {
|
||||
trace!("HTTP Error 404: Share and/or file may have been deleted!");
|
||||
|
||||
if let Some(file_id) = file_id {
|
||||
file_id.into()
|
||||
} else if let Some(share_id) = share_id {
|
||||
share_id.into()
|
||||
} else {
|
||||
uri.into()
|
||||
}
|
||||
}
|
||||
ureq::Error::Io(error) => {
|
||||
trace!("std::io::Error {error:?}");
|
||||
|
||||
if let Some(msg) = error.get_ref().map(ToString::to_string) {
|
||||
if msg.starts_with("failed to lookup address information") {
|
||||
uri.into()
|
||||
} else {
|
||||
error.into()
|
||||
}
|
||||
} else {
|
||||
error.into()
|
||||
}
|
||||
}
|
||||
error => crate::Error::Unknown(error.to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
impl sharry::Client for ureq::Agent {
|
||||
fn share_create(
|
||||
&self,
|
||||
uri: &Uri,
|
||||
alias_id: &AliasID,
|
||||
data: sharry::json::NewShareRequest,
|
||||
) -> crate::Result<ShareID> {
|
||||
let res = {
|
||||
let endpoint = uri.share_create();
|
||||
|
||||
let mut res = self
|
||||
.post(&endpoint)
|
||||
.header("Sharry-Alias", alias_id.as_ref())
|
||||
.send_json(data)
|
||||
.map_err(find_cause(uri, alias_id, None, None))?;
|
||||
|
||||
trace!("{endpoint:?} response: {res:?}");
|
||||
crate::Error::res_status_check(res.status(), ureq::http::StatusCode::OK)?;
|
||||
|
||||
res.body_mut()
|
||||
.read_json::<sharry::json::NewShareResponse>()
|
||||
.map_err(crate::Error::response)?
|
||||
};
|
||||
|
||||
debug!("{res:?}");
|
||||
|
||||
if res.success && (res.message == "Share created.") {
|
||||
trace!("new share id: {:?}", res.id);
|
||||
|
||||
Ok(res.id.into())
|
||||
} else {
|
||||
crate::Error::mismatch("success/\"Share created.\"", format_args!("{res:?}"))
|
||||
}
|
||||
}
|
||||
|
||||
fn share_notify(&self, uri: &Uri, alias_id: &AliasID, share_id: &ShareID) -> crate::Result<()> {
|
||||
let res = {
|
||||
let endpoint = uri.share_notify(share_id);
|
||||
|
||||
let mut res = self
|
||||
.post(&endpoint)
|
||||
.header("Sharry-Alias", alias_id.as_ref())
|
||||
.send_empty()
|
||||
.map_err(find_cause(uri, alias_id, Some(share_id), None))?;
|
||||
|
||||
trace!("{endpoint:?} response: {res:?}");
|
||||
crate::Error::res_status_check(res.status(), ureq::http::StatusCode::OK)?;
|
||||
|
||||
res.body_mut()
|
||||
.read_json::<sharry::json::NotifyShareResponse>()
|
||||
.map_err(crate::Error::response)?
|
||||
};
|
||||
|
||||
debug!("{res:?}");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn file_create(
|
||||
&self,
|
||||
uri: &Uri,
|
||||
alias_id: &AliasID,
|
||||
share_id: &ShareID,
|
||||
file: &file::Checked,
|
||||
) -> crate::Result<FileID> {
|
||||
let res = {
|
||||
let endpoint = uri.file_create(share_id);
|
||||
|
||||
let res = self
|
||||
.post(&endpoint)
|
||||
.header("Sharry-Alias", alias_id.as_ref())
|
||||
.header("Sharry-File-Name", file.get_name())
|
||||
.header("Upload-Length", file.get_size())
|
||||
.send_empty()
|
||||
.map_err(find_cause(uri, alias_id, Some(share_id), None))?;
|
||||
|
||||
trace!("{endpoint:?} response: {res:?}");
|
||||
crate::Error::res_status_check(res.status(), ureq::http::StatusCode::CREATED)?;
|
||||
res
|
||||
};
|
||||
|
||||
let location = (res.headers().get("Location"))
|
||||
.ok_or_else(|| crate::Error::response("Location header not found"))?
|
||||
.to_str()
|
||||
.map_err(crate::Error::response)?
|
||||
.to_string();
|
||||
|
||||
FileID::try_from(location)
|
||||
}
|
||||
|
||||
fn file_patch(
|
||||
&self,
|
||||
uri: &Uri,
|
||||
alias_id: &AliasID,
|
||||
share_id: &ShareID,
|
||||
chunk: &file::Chunk,
|
||||
) -> crate::Result<()> {
|
||||
let res = {
|
||||
let endpoint = uri.file_patch(share_id, chunk.get_file_id());
|
||||
|
||||
let res = self
|
||||
.patch(&endpoint)
|
||||
.header("Sharry-Alias", alias_id.as_ref())
|
||||
.header("Upload-Offset", chunk.get_offset())
|
||||
.send(chunk.get_data())
|
||||
.map_err(find_cause(
|
||||
uri,
|
||||
alias_id,
|
||||
Some(share_id),
|
||||
Some(chunk.get_file_id()),
|
||||
))?;
|
||||
|
||||
trace!("{endpoint:?} response: {res:?}");
|
||||
crate::Error::res_status_check(res.status(), ureq::http::StatusCode::NO_CONTENT)?;
|
||||
res
|
||||
};
|
||||
|
||||
let res_offset = (res.headers().get("Upload-Offset"))
|
||||
.ok_or_else(|| crate::Error::response("Upload-Offset header not found"))?
|
||||
.to_str()
|
||||
.map_err(crate::Error::response)?
|
||||
.parse::<u64>()
|
||||
.map_err(crate::Error::response)?;
|
||||
let next_offset = chunk.get_offset() + chunk.get_length();
|
||||
|
||||
if res_offset == next_offset {
|
||||
Ok(())
|
||||
} else {
|
||||
crate::Error::mismatch(format_args!("Upload-Offset {next_offset:?}"), res_offset)
|
||||
}
|
||||
}
|
||||
}
|
||||
Loading…
Reference in a new issue