diff --git a/Cargo.lock b/Cargo.lock index eff9ffba..3d18f3db 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -291,7 +291,7 @@ checksum = "a27b8a3a6e1a44fa4c8baf1f653e4172e81486d4941f2237e20dc2d0cf4ddff1" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.87", ] [[package]] @@ -521,7 +521,7 @@ dependencies = [ "cached_proc_macro_types", "hashbrown 0.14.5", "once_cell", - "thiserror", + "thiserror 1.0.64", "web-time", ] @@ -534,7 +534,7 @@ dependencies = [ "darling", "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.87", ] [[package]] @@ -568,7 +568,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "03fa8484a7f2eef80e6dd2e2be90b322b9c29aeb1bbc206013d6eb2104db7241" dependencies = [ "serde", - "thiserror", + "thiserror 1.0.64", "toml", ] @@ -592,7 +592,7 @@ dependencies = [ "semver", "serde", "serde_json", - "thiserror", + "thiserror 1.0.64", ] [[package]] @@ -693,7 +693,7 @@ dependencies = [ "heck", "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.87", ] [[package]] @@ -756,7 +756,7 @@ dependencies = [ "proc-macro-error2", "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.87", ] [[package]] @@ -951,7 +951,7 @@ dependencies = [ "proc-macro2", "quote", "strsim", - "syn 2.0.79", + "syn 2.0.87", ] [[package]] @@ -962,7 +962,7 @@ checksum = "d336a2a514f6ccccaa3e09b02d41d35330c07ddf03a62165fcec10bb561c7806" dependencies = [ "darling_core", "quote", - "syn 2.0.79", + "syn 2.0.87", ] [[package]] @@ -1047,7 +1047,7 @@ checksum = "64b697ac90ff296f0fc031ee5a61c7ac31fb9fff50e3fb32873b09223613fc0c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.87", ] [[package]] @@ -1067,7 +1067,7 @@ checksum = "cb7330aeadfbe296029522e6c40f315320aba36fc43a5b3632f3795348f3bd22" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.87", "unicode-xid", ] @@ -1080,7 +1080,7 @@ dependencies = [ "darling", "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.87", ] [[package]] @@ -1130,7 +1130,7 @@ checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.87", ] [[package]] @@ -1160,6 +1160,12 @@ version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "92773504d58c093f6de2459af4af33faa518c13451eb8f2b5698ed3d36e7c813" +[[package]] +name = "ecow" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e42fc0a93992b20c58b99e59d61eaf1635a25bfbe49e4275c34ba0aee98119ba" + [[package]] name = "either" version = "1.13.0" @@ -1189,7 +1195,7 @@ checksum = "f282cfdfe92516eb26c2af8589c274c7c17681f5ecc03c18255fe741c6aa64eb" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.87", ] [[package]] @@ -1211,7 +1217,7 @@ dependencies = [ "darling", "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.87", ] [[package]] @@ -1420,7 +1426,7 @@ checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.87", ] [[package]] @@ -1973,7 +1979,7 @@ dependencies = [ "proc-macro2", "quote", "regex", - "syn 2.0.79", + "syn 2.0.87", ] [[package]] @@ -2133,7 +2139,7 @@ dependencies = [ "cfg-if", "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.87", ] [[package]] @@ -2155,7 +2161,7 @@ dependencies = [ "rustc_version", "smallvec", "tagptr", - "thiserror", + "thiserror 1.0.64", "triomphe", "uuid", ] @@ -2349,7 +2355,7 @@ dependencies = [ "once_cell", "shell-escape", "tempfile", - "thiserror", + "thiserror 1.0.64", "tokio", ] @@ -2401,7 +2407,7 @@ dependencies = [ "openssh", "openssh-sftp-protocol-error", "ssh_format_error", - "thiserror", + "thiserror 1.0.64", "tokio", ] @@ -2427,7 +2433,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0719269eb3f037866ae07ec89cb44ed2c1d63b72b2390cef8e1aa3016a956ff8" dependencies = [ "serde", - "thiserror", + "thiserror 1.0.64", "vec-strings", ] @@ -2499,6 +2505,17 @@ dependencies = [ "windows-targets 0.52.6", ] +[[package]] +name = "password-hash" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "346f04948ba92c43e8469c1ee6736c7563d71012b17d40745260fe106aac2166" +dependencies = [ + "base64ct", + "rand_core", + "subtle", +] + [[package]] name = "path-dedot" version = "3.1.1" @@ -2550,7 +2567,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fdbef9d1d47087a895abd220ed25eb4ad973a5e26f6a4367b038c25e28dfc2d9" dependencies = [ "memchr", - "thiserror", + "thiserror 1.0.64", "ucd-trie", ] @@ -2574,7 +2591,7 @@ dependencies = [ "pest_meta", "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.87", ] [[package]] @@ -2605,7 +2622,7 @@ checksum = "2f38a4412a78282e09a2cf38d195ea5420d15ba0602cb375210efbc877243965" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.87", ] [[package]] @@ -2786,7 +2803,7 @@ dependencies = [ "proc-macro-error-attr2", "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.87", ] [[package]] @@ -2889,7 +2906,7 @@ dependencies = [ "rustc-hash", "rustls 0.23.14", "socket2", - "thiserror", + "thiserror 1.0.64", "tokio", "tracing", ] @@ -2906,7 +2923,7 @@ dependencies = [ "rustc-hash", "rustls 0.23.14", "slab", - "thiserror", + "thiserror 1.0.64", "tinyvec", "tracing", ] @@ -3009,7 +3026,7 @@ checksum = "ba009ff324d1fc1b900bd1fdb31564febe58a8ccc8a6fdbb93b543d33b13ca43" dependencies = [ "getrandom", "libredox", - "thiserror", + "thiserror 1.0.64", ] [[package]] @@ -3206,7 +3223,7 @@ dependencies = [ "regex", "relative-path", "rustc_version", - "syn 2.0.79", + "syn 2.0.87", "unicode-ident", ] @@ -3260,7 +3277,7 @@ dependencies = [ "cargo-manifest", "cargo_metadata", "serde", - "thiserror", + "thiserror 1.0.64", "toml", "tracing", ] @@ -3292,7 +3309,7 @@ dependencies = [ "serde", "strum", "strum_macros", - "thiserror", + "thiserror 2.0.0", "tokio", "toml", "typed-path", @@ -3326,6 +3343,7 @@ dependencies = [ "dirs", "displaydoc", "dunce", + "ecow", "enum-map", "enum-map-derive", "enumset", @@ -3370,7 +3388,7 @@ dependencies = [ "strum", "tar", "tempfile", - "thiserror", + "thiserror 2.0.0", "toml", "walkdir", "xattr", @@ -3492,7 +3510,7 @@ version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "70d2278eb028d54ca3765d7c3f9ae100e119c07910f5731ade3564ea32a4ea20" dependencies = [ - "thiserror", + "thiserror 1.0.64", ] [[package]] @@ -3546,6 +3564,7 @@ version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0516a385866c09368f0b5bcd1caff3366aace790fcd46e2bb032697bb172fd1f" dependencies = [ + "password-hash", "pbkdf2", "salsa20", "sha2", @@ -3621,7 +3640,7 @@ checksum = "243902eda00fad750862fc144cea25caca5e20d615af0a81bee94ca738f1df1f" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.87", ] [[package]] @@ -3684,7 +3703,7 @@ dependencies = [ "darling", "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.87", ] [[package]] @@ -3770,7 +3789,7 @@ checksum = "adc4e5204eb1910f40f9cfa375f6f05b68c3abac4b6fd879c8ff5e7ae8a0a085" dependencies = [ "num-bigint", "num-traits", - "thiserror", + "thiserror 1.0.64", "time", ] @@ -3886,7 +3905,7 @@ dependencies = [ "proc-macro2", "quote", "rustversion", - "syn 2.0.79", + "syn 2.0.87", ] [[package]] @@ -3910,7 +3929,7 @@ dependencies = [ "log", "pin-project", "rustls 0.21.12", - "thiserror", + "thiserror 1.0.64", ] [[package]] @@ -3926,9 +3945,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.79" +version = "2.0.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89132cd0bf050864e1d38dc3bbc07a0eb8e7530af26344d3d2bbbef83499f590" +checksum = "25aa4ce346d03a6dcd68dd8b4010bcb74e54e62c90c573f394c46eae99aba32d" dependencies = [ "proc-macro2", "quote", @@ -4020,7 +4039,16 @@ version = "1.0.64" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d50af8abc119fb8bb6dbabcfa89656f46f84aa0ac7688088608076ad2b459a84" dependencies = [ - "thiserror-impl", + "thiserror-impl 1.0.64", +] + +[[package]] +name = "thiserror" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "15291287e9bff1bc6f9ff3409ed9af665bec7a5fc8ac079ea96be07bca0e2668" +dependencies = [ + "thiserror-impl 2.0.0", ] [[package]] @@ -4031,7 +4059,18 @@ checksum = "08904e7672f5eb876eaaf87e0ce17857500934f4981c4a0ab2b4aa98baac7fc3" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.87", +] + +[[package]] +name = "thiserror-impl" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22efd00f33f93fa62848a7cab956c3d38c8d43095efda1decfc2b3a5dc0b8972" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.87", ] [[package]] @@ -4127,7 +4166,7 @@ checksum = "693d596312e88961bc67d7f1f97af8a70227d9f90c31bba5806eec004978d752" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.87", ] [[package]] @@ -4214,7 +4253,7 @@ checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.87", ] [[package]] @@ -4415,7 +4454,7 @@ dependencies = [ "once_cell", "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.87", "wasm-bindgen-shared", ] @@ -4449,7 +4488,7 @@ checksum = "afc340c74d9005395cf9dd098506f7f44e38f2b4a21c6aaacf9a105ea5e1e836" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.87", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -4798,7 +4837,7 @@ checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.87", ] [[package]] diff --git a/Cargo.toml b/Cargo.toml index e54d61d6..2c9e838d 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -12,14 +12,19 @@ resolver = "2" rust-version = "1.76.0" [workspace.dependencies] +# Internal Dependencies +rustic_backend = { path = "crates/backend", version = "0" } +rustic_core = { path = "crates/core", version = "0" } +rustic_testing = { path = "crates/testing", version = "0" } + aho-corasick = "1.1.3" anyhow = "1.0.89" bytes = "1.7.2" +displaydoc = "0.2.5" enum-map = "2.7.3" -rustic_backend = { path = "crates/backend" } -rustic_core = { path = "crates/core", version = "0" } -rustic_testing = { path = "crates/testing", version = "0" } +log = "0.4.22" simplelog = "0.12.2" +thiserror = "2.0.0" # dev-dependencies rstest = "0.23.0" diff --git a/crates/backend/Cargo.toml b/crates/backend/Cargo.toml index 5c579d6f..abc8c9e7 100644 --- a/crates/backend/Cargo.toml +++ b/crates/backend/Cargo.toml @@ -50,15 +50,14 @@ rclone = ["rest", "dep:rand", "dep:semver"] rustic_core = { workspace = true } # errors -anyhow = "1.0.89" -displaydoc = "0.2.5" -thiserror = "1.0.64" +displaydoc = { workspace = true } +thiserror = { workspace = true } # logging -log = "0.4.22" +log = { workspace = true } # other dependencies -bytes = "1.7.2" +bytes = { workspace = true } derive_setters = "0.1.6" humantime = "2.1.0" itertools = "0.13.0" @@ -101,6 +100,7 @@ opendal = { version = "0.50.0", features = ["services-b2", "services-sftp", "ser opendal = { version = "0.50.0", features = ["services-b2", "services-swift", "services-azblob", "services-azdls", "services-cos", "services-fs", "services-ftp", "services-dropbox", "services-gdrive", "services-gcs", "services-ghac", "services-http", "services-ipmfs", "services-memory", "services-obs", "services-onedrive", "services-oss", "services-s3", "services-webdav", "services-webhdfs", "services-azfile", "layers-blocking", "layers-throttle"], optional = true } [dev-dependencies] +anyhow = { workspace = true } rstest = { workspace = true } toml = "0.8.19" diff --git a/crates/backend/src/choose.rs b/crates/backend/src/choose.rs index ab5007ca..9dca3831 100644 --- a/crates/backend/src/choose.rs +++ b/crates/backend/src/choose.rs @@ -1,14 +1,11 @@ //! This module contains [`BackendOptions`] and helpers to choose a backend from a given url. -use anyhow::{anyhow, Result}; use derive_setters::Setters; use std::{collections::HashMap, sync::Arc}; use strum_macros::{Display, EnumString}; -#[allow(unused_imports)] -use rustic_core::{RepositoryBackends, WriteBackend}; +use rustic_core::{ErrorKind, RepositoryBackends, RusticError, RusticResult, WriteBackend}; use crate::{ - error::BackendAccessErrorKind, local::LocalBackend, util::{location_to_type_and_path, BackendLocation}, }; @@ -75,12 +72,17 @@ impl BackendOptions { /// # Returns /// /// The backends for the repository. - pub fn to_backends(&self) -> Result { + pub fn to_backends(&self) -> RusticResult { let mut options = self.options.clone(); options.extend(self.options_cold.clone()); let be = self .get_backend(self.repository.as_ref(), options)? - .ok_or_else(|| anyhow!("No repository given."))?; + .ok_or_else(|| { + RusticError::new( + ErrorKind::Backend, + "No repository given. Please make sure, that you have set the repository.", + ) + })?; let mut options = self.options.clone(); options.extend(self.options_hot.clone()); let be_hot = self.get_backend(self.repo_hot.as_ref(), options)?; @@ -97,7 +99,7 @@ impl BackendOptions { /// /// # Errors /// - /// If the backend cannot be loaded, an error is returned. + /// * If the backend cannot be loaded, an error is returned. /// /// # Returns /// @@ -108,13 +110,18 @@ impl BackendOptions { &self, repo_string: Option<&String>, options: HashMap, - ) -> Result>> { + ) -> RusticResult>> { repo_string .map(|string| { let (be_type, location) = location_to_type_and_path(string)?; - be_type.to_backend(location, options.into()).map_err(|err| { - BackendAccessErrorKind::BackendLoadError(be_type.to_string(), err).into() - }) + be_type + .to_backend(location.clone(), options.into()) + .map_err(|err| { + err + .prepend_guidance_line("Could not load the backend `{name}` at `{location}`. Please check the given backend and try again.") + .attach_context("name", be_type.to_string()) + .attach_context("location", location.to_string()) + }) }) .transpose() } @@ -131,14 +138,12 @@ pub trait BackendChoice { /// /// # Errors /// - /// * [`BackendAccessErrorKind::BackendNotSupported`] - If the backend is not supported. - /// - /// [`BackendAccessErrorKind::BackendNotSupported`]: crate::error::BackendAccessErrorKind::BackendNotSupported + /// * If the backend is not supported. fn to_backend( &self, location: BackendLocation, options: Option>, - ) -> Result>; + ) -> RusticResult>; } /// The supported backend types. @@ -176,7 +181,7 @@ impl BackendChoice for SupportedBackend { &self, location: BackendLocation, options: Option>, - ) -> Result> { + ) -> RusticResult> { let options = options.unwrap_or_default(); Ok(match self { diff --git a/crates/backend/src/error.rs b/crates/backend/src/error.rs deleted file mode 100644 index f2aa3477..00000000 --- a/crates/backend/src/error.rs +++ /dev/null @@ -1,134 +0,0 @@ -#![allow(clippy::doc_markdown)] -use std::{num::TryFromIntError, process::ExitStatus, str::Utf8Error}; - -use displaydoc::Display; -use thiserror::Error; - -/// [`BackendAccessErrorKind`] describes the errors that can be returned by the various Backends -#[derive(Error, Debug, Display)] -#[non_exhaustive] -pub enum BackendAccessErrorKind { - /// backend {0:?} is not supported! - BackendNotSupported(String), - /// backend {0} cannot be loaded: {1:?} - BackendLoadError(String, anyhow::Error), - /// no suitable id found for {0} - NoSuitableIdFound(String), - /// id {0} is not unique - IdNotUnique(String), - /// {0:?} - #[error(transparent)] - FromIoError(#[from] std::io::Error), - /// {0:?} - #[error(transparent)] - FromTryIntError(#[from] TryFromIntError), - #[cfg(feature = "rest")] - /// backoff failed: {0:?} - BackoffError(#[from] backoff::Error), - /// parsing failed for url: `{0:?}` - UrlParsingFailed(#[from] url::ParseError), - /// creating data in backend failed - CreatingDataOnBackendFailed, - /// writing bytes to backend failed - WritingBytesToBackendFailed, - /// removing data from backend failed - RemovingDataFromBackendFailed, - /// failed to list files on Backend - ListingFilesOnBackendFailed, -} - -/// [`RcloneErrorKind`] describes the errors that can be returned by a backend provider -#[derive(Error, Debug, Display)] -#[non_exhaustive] -pub enum RcloneErrorKind { - /// 'rclone version' doesn't give any output - NoOutputForRcloneVersion, - /// cannot get stdout of rclone - NoStdOutForRclone, - /// rclone exited with `{0:?}` - RCloneExitWithBadStatus(ExitStatus), - /// url must start with http:\/\/! url: {0:?} - UrlNotStartingWithHttp(String), - /// StdIo Error: `{0:?}` - #[error(transparent)] - FromIoError(#[from] std::io::Error), - /// utf8 error: `{0:?}` - #[error(transparent)] - FromUtf8Error(#[from] Utf8Error), - /// error parsing verision number from `{0:?}` - FromParseVersion(String), - /// Using rclone without authentication! Upgrade to rclone >= 1.52.2 (current version: `{0}`)! - RCloneWithoutAuthentication(String), -} - -/// [`RestErrorKind`] describes the errors that can be returned while dealing with the REST API -#[derive(Error, Debug, Display)] -#[non_exhaustive] -pub enum RestErrorKind { - /// value `{0:?}` not supported for option retry! - NotSupportedForRetry(String), - /// parsing failed for url: `{0:?}` - UrlParsingFailed(#[from] url::ParseError), - #[cfg(feature = "rest")] - /// requesting resource failed: `{0:?}` - RequestingResourceFailed(#[from] reqwest::Error), - /// couldn't parse duration in humantime library: `{0:?}` - CouldNotParseDuration(#[from] humantime::DurationError), - #[cfg(feature = "rest")] - /// backoff failed: {0:?} - BackoffError(#[from] backoff::Error), - #[cfg(feature = "rest")] - /// Failed to build HTTP client: `{0:?}` - BuildingClientFailed(reqwest::Error), - /// joining URL failed on: {0:?} - JoiningUrlFailed(url::ParseError), -} - -/// [`LocalBackendErrorKind`] describes the errors that can be returned by an action on the filesystem in Backends -#[derive(Error, Debug, Display)] -#[non_exhaustive] -pub enum LocalBackendErrorKind { - /// directory creation failed: `{0:?}` - DirectoryCreationFailed(#[from] std::io::Error), - /// querying metadata failed: `{0:?}` - QueryingMetadataFailed(std::io::Error), - /// querying WalkDir metadata failed: `{0:?}` - QueryingWalkDirMetadataFailed(walkdir::Error), - /// executtion of command failed: `{0:?}` - CommandExecutionFailed(std::io::Error), - /// command was not successful for filename {file_name}, type {file_type}, id {id}: {status} - CommandNotSuccessful { - /// File name - file_name: String, - /// File type - file_type: String, - /// Item ID - id: String, - /// Exit status - status: ExitStatus, - }, - /// error building automaton `{0:?}` - FromAhoCorasick(#[from] aho_corasick::BuildError), - /// {0:?} - #[error(transparent)] - FromTryIntError(#[from] TryFromIntError), - /// {0:?} - #[error(transparent)] - FromWalkdirError(#[from] walkdir::Error), - /// removing file failed: `{0:?}` - FileRemovalFailed(std::io::Error), - /// opening file failed: `{0:?}` - OpeningFileFailed(std::io::Error), - /// setting file length failed: `{0:?}` - SettingFileLengthFailed(std::io::Error), - /// can't jump to position in file: `{0:?}` - CouldNotSeekToPositionInFile(std::io::Error), - /// couldn't write to buffer: `{0:?}` - CouldNotWriteToBuffer(std::io::Error), - /// reading file contents failed: `{0:?}` - ReadingContentsOfFileFailed(std::io::Error), - /// reading exact length of file contents failed: `{0:?}` - ReadingExactLengthOfFileFailed(std::io::Error), - /// failed to sync OS Metadata to disk: `{0:?}` - SyncingOfOsMetadataFailed(std::io::Error), -} diff --git a/crates/backend/src/lib.rs b/crates/backend/src/lib.rs index e24da48f..49eec804 100644 --- a/crates/backend/src/lib.rs +++ b/crates/backend/src/lib.rs @@ -53,27 +53,22 @@ This crate exposes a few features for controlling dependency usage: */ pub mod choose; -/// Error types for the backend. -pub mod error; /// Local backend for Rustic. pub mod local; +/// Utility functions for the backend. +pub mod util; + /// `OpenDAL` backend for Rustic. #[cfg(feature = "opendal")] pub mod opendal; + /// `Rclone` backend for Rustic. #[cfg(feature = "rclone")] pub mod rclone; + /// REST backend for Rustic. #[cfg(feature = "rest")] pub mod rest; -/// Utility functions for the backend. -pub mod util; - -// rustic_backend Public API -pub use crate::{ - choose::{BackendOptions, SupportedBackend}, - local::LocalBackend, -}; #[cfg(feature = "opendal")] pub use crate::opendal::OpenDALBackend; @@ -83,3 +78,12 @@ pub use crate::rclone::RcloneBackend; #[cfg(feature = "rest")] pub use crate::rest::RestBackend; + +// rustic_backend Public API +pub use crate::{ + choose::{BackendOptions, SupportedBackend}, + local::LocalBackend, +}; + +// re-export for error handling +pub use rustic_core::{ErrorKind, RusticError, RusticResult, Severity, Status}; diff --git a/crates/backend/src/local.rs b/crates/backend/src/local.rs index df52ade5..02cf6c5d 100644 --- a/crates/backend/src/local.rs +++ b/crates/backend/src/local.rs @@ -6,14 +6,14 @@ use std::{ }; use aho_corasick::AhoCorasick; -use anyhow::Result; use bytes::Bytes; -use log::{debug, trace, warn}; +use log::{debug, error, trace, warn}; use walkdir::WalkDir; -use rustic_core::{CommandInput, FileType, Id, ReadBackend, WriteBackend, ALL_FILE_TYPES}; - -use crate::error::LocalBackendErrorKind; +use rustic_core::{ + CommandInput, ErrorKind, FileType, Id, ReadBackend, RusticError, RusticResult, WriteBackend, + ALL_FILE_TYPES, +}; /// A local backend. #[derive(Clone, Debug)] @@ -32,16 +32,20 @@ impl LocalBackend { /// # Arguments /// /// * `path` - The base path of the backend + /// * `options` - Additional options for the backend /// /// # Errors /// - /// * [`LocalBackendErrorKind::DirectoryCreationFailed`] - If the directory could not be created. + /// * If the directory could not be created. + /// + /// # Options /// - /// [`LocalBackendErrorKind::DirectoryCreationFailed`]: LocalBackendErrorKind::DirectoryCreationFailed + /// * `post-create-command` - The command to call after a file was created. + /// * `post-delete-command` - The command to call after a file was deleted. pub fn new( path: impl AsRef, options: impl IntoIterator, - ) -> Result { + ) -> RusticResult { let path = path.as_ref().into(); let mut post_create_command = None; let mut post_delete_command = None; @@ -58,6 +62,7 @@ impl LocalBackend { } } } + Ok(Self { path, post_create_command, @@ -97,10 +102,10 @@ impl LocalBackend { /// /// # Errors /// - /// * [`LocalBackendErrorKind::FromAhoCorasick`] - If the patterns could not be compiled. - /// * [`LocalBackendErrorKind::FromSplitError`] - If the command could not be parsed. - /// * [`LocalBackendErrorKind::CommandExecutionFailed`] - If the command could not be executed. - /// * [`LocalBackendErrorKind::CommandNotSuccessful`] - If the command was not successful. + /// * If the patterns could not be compiled. + /// * If the command could not be parsed. + /// * If the command could not be executed. + /// * If the command was not successful. /// /// # Notes /// @@ -108,31 +113,59 @@ impl LocalBackend { /// * `%file` - The path to the file. /// * `%type` - The type of the file. /// * `%id` - The id of the file. - /// - /// [`LocalBackendErrorKind::FromAhoCorasick`]: LocalBackendErrorKind::FromAhoCorasick - /// [`LocalBackendErrorKind::FromSplitError`]: LocalBackendErrorKind::FromSplitError - /// [`LocalBackendErrorKind::CommandExecutionFailed`]: LocalBackendErrorKind::CommandExecutionFailed - /// [`LocalBackendErrorKind::CommandNotSuccessful`]: LocalBackendErrorKind::CommandNotSuccessful - fn call_command(tpe: FileType, id: &Id, filename: &Path, command: &str) -> Result<()> { + fn call_command(tpe: FileType, id: &Id, filename: &Path, command: &str) -> RusticResult<()> { let id = id.to_hex(); + let patterns = &["%file", "%type", "%id"]; - let ac = AhoCorasick::new(patterns).map_err(LocalBackendErrorKind::FromAhoCorasick)?; + + let ac = AhoCorasick::new(patterns).map_err(|err| { + RusticError::with_source( + ErrorKind::Internal, + "Experienced an error building AhoCorasick automaton for command replacement.", + err, + ) + .ask_report() + })?; + let replace_with = &[filename.to_str().unwrap(), tpe.dirname(), id.as_str()]; + let actual_command = ac.replace_all(command, replace_with); + debug!("calling {actual_command}..."); - let command: CommandInput = actual_command.parse()?; + + let command: CommandInput = actual_command.parse().map_err(|err| { + RusticError::with_source( + ErrorKind::Internal, + "Failed to parse command input: `{command}` is not a valid command.", + err, + ) + .attach_context("command", actual_command) + .attach_context("replacement", replace_with.join(", ")) + .ask_report() + })?; + let status = Command::new(command.command()) .args(command.args()) .status() - .map_err(LocalBackendErrorKind::CommandExecutionFailed)?; + .map_err(|err| { + RusticError::with_source( + ErrorKind::ExternalCommand, + "Failed to execute `{command}`. Please check the command and try again.", + err, + ) + .attach_context("command", command.to_string()) + })?; + if !status.success() { - return Err(LocalBackendErrorKind::CommandNotSuccessful { - file_name: replace_with[0].to_owned(), - file_type: replace_with[1].to_owned(), - id: replace_with[2].to_owned(), - status, - } - .into()); + return Err(RusticError::new( + ErrorKind::ExternalCommand, + "Command was not successful: `{command}` failed with status `{status}`.", + ) + .attach_context("command", command.to_string()) + .attach_context("file_name", replace_with[0]) + .attach_context("file_type", replace_with[1]) + .attach_context("id", replace_with[2]) + .attach_context("status", status.to_string())); } Ok(()) } @@ -157,7 +190,7 @@ impl ReadBackend for LocalBackend { /// # Notes /// /// If the file type is `FileType::Config`, this will return a list with a single default id. - fn list(&self, tpe: FileType) -> Result> { + fn list(&self, tpe: FileType) -> RusticResult> { trace!("listing tpe: {tpe:?}"); if tpe == FileType::Config { return Ok(if self.path.join("config").exists() { @@ -169,6 +202,12 @@ impl ReadBackend for LocalBackend { let walker = WalkDir::new(self.path.join(tpe.dirname())) .into_iter() + // TODO: What to do with errors? + .inspect(|r| { + if let Err(err) = r { + error!("Error while listing files: {err:?}"); + } + }) .filter_map(walkdir::Result::ok) .filter(|e| e.file_type().is_file()) .filter_map(|e| e.file_name().to_string_lossy().parse::().ok()); @@ -183,27 +222,35 @@ impl ReadBackend for LocalBackend { /// /// # Errors /// - /// * [`LocalBackendErrorKind::QueryingMetadataFailed`] - If the metadata of the file could not be queried. - /// * [`LocalBackendErrorKind::FromTryIntError`] - If the length of the file could not be converted to u32. - /// * [`LocalBackendErrorKind::QueryingWalkDirMetadataFailed`] - If the metadata of the file could not be queried. - /// - /// [`LocalBackendErrorKind::QueryingMetadataFailed`]: LocalBackendErrorKind::QueryingMetadataFailed - /// [`LocalBackendErrorKind::FromTryIntError`]: LocalBackendErrorKind::FromTryIntError - /// [`LocalBackendErrorKind::QueryingWalkDirMetadataFailed`]: LocalBackendErrorKind::QueryingWalkDirMetadataFailed - fn list_with_size(&self, tpe: FileType) -> Result> { + /// * If the metadata of the file could not be queried. + /// * If the length of the file could not be converted to u32. + /// * If the metadata of the file could not be queried. + fn list_with_size(&self, tpe: FileType) -> RusticResult> { trace!("listing tpe: {tpe:?}"); let path = self.path.join(tpe.dirname()); if tpe == FileType::Config { return Ok(if path.exists() { - vec![( - Id::default(), - path.metadata() - .map_err(LocalBackendErrorKind::QueryingMetadataFailed)? - .len() - .try_into() - .map_err(LocalBackendErrorKind::FromTryIntError)?, - )] + vec![(Id::default(), { + let metadata = path.metadata().map_err(|err| + RusticError::with_source( + ErrorKind::Backend, + "Failed to query metadata of the file `{path}`. Please check the file and try again.", + err + ) + .attach_context("path", path.to_string_lossy()) + )?; + + metadata.len().try_into().map_err(|err| { + RusticError::with_source( + ErrorKind::Backend, + "Failed to convert file length `{length}` to u32.", + err, + ) + .attach_context("length", metadata.len().to_string()) + .ask_report() + })? + })] } else { Vec::new() }); @@ -211,19 +258,49 @@ impl ReadBackend for LocalBackend { let walker = WalkDir::new(path) .into_iter() + .inspect(|r| { + if let Err(err) = r { + error!("Error while listing files: {err:?}"); + } + }) .filter_map(walkdir::Result::ok) .filter(|e| e.file_type().is_file()) - .map(|e| -> Result<_> { + .map(|e| -> RusticResult<_> { Ok(( e.file_name().to_string_lossy().parse()?, - e.metadata() - .map_err(LocalBackendErrorKind::QueryingWalkDirMetadataFailed)? + { + let metadata = e.metadata() + .map_err(|err| + RusticError::with_source( + ErrorKind::Backend, + "Failed to query metadata of the file `{path}`. Please check the file and try again.", + err + ) + .attach_context("path", e.path().to_string_lossy()) + ) + ?; + + metadata .len() .try_into() - .map_err(LocalBackendErrorKind::FromTryIntError)?, + .map_err(|err| + RusticError::with_source( + ErrorKind::Backend, + "Failed to convert file length `{length}` to u32.", + err + ) + .attach_context("length", metadata.len().to_string()) + .ask_report() + )? + }, )) }) - .filter_map(Result::ok); + .inspect(|r| { + if let Err(err) = r { + error!("Error while listing files: {err:?}"); + } + }) + .filter_map(RusticResult::ok); Ok(walker.collect()) } @@ -237,13 +314,19 @@ impl ReadBackend for LocalBackend { /// /// # Errors /// - /// * [`LocalBackendErrorKind::ReadingContentsOfFileFailed`] - If the file could not be read. - /// - /// [`LocalBackendErrorKind::ReadingContentsOfFileFailed`]: LocalBackendErrorKind::ReadingContentsOfFileFailed - fn read_full(&self, tpe: FileType, id: &Id) -> Result { + /// * If the file could not be read. + /// * If the file could not be found. + fn read_full(&self, tpe: FileType, id: &Id) -> RusticResult { trace!("reading tpe: {tpe:?}, id: {id}"); Ok(fs::read(self.path(tpe, id)) - .map_err(LocalBackendErrorKind::ReadingContentsOfFileFailed)? + .map_err(|err| { + RusticError::with_source( + ErrorKind::Backend, + "Failed to read the contents of the file. Please check the file and try again.", + err, + ) + .attach_context("path", self.path(tpe, id).to_string_lossy()) + })? .into()) } @@ -259,15 +342,10 @@ impl ReadBackend for LocalBackend { /// /// # Errors /// - /// * [`LocalBackendErrorKind::OpeningFileFailed`] - If the file could not be opened. - /// * [`LocalBackendErrorKind::CouldNotSeekToPositionInFile`] - If the file could not be seeked to the given position. - /// * [`LocalBackendErrorKind::FromTryIntError`] - If the length of the file could not be converted to u32. - /// * [`LocalBackendErrorKind::ReadingExactLengthOfFileFailed`] - If the length of the file could not be read. - /// - /// [`LocalBackendErrorKind::OpeningFileFailed`]: LocalBackendErrorKind::OpeningFileFailed - /// [`LocalBackendErrorKind::CouldNotSeekToPositionInFile`]: LocalBackendErrorKind::CouldNotSeekToPositionInFile - /// [`LocalBackendErrorKind::FromTryIntError`]: LocalBackendErrorKind::FromTryIntError - /// [`LocalBackendErrorKind::ReadingExactLengthOfFileFailed`]: LocalBackendErrorKind::ReadingExactLengthOfFileFailed + /// * If the file could not be opened. + /// * If the file could not be sought to the given position. + /// * If the length of the file could not be converted to u32. + /// * If the exact length of the file could not be read. fn read_partial( &self, tpe: FileType, @@ -275,21 +353,49 @@ impl ReadBackend for LocalBackend { _cacheable: bool, offset: u32, length: u32, - ) -> Result { + ) -> RusticResult { trace!("reading tpe: {tpe:?}, id: {id}, offset: {offset}, length: {length}"); - let mut file = - File::open(self.path(tpe, id)).map_err(LocalBackendErrorKind::OpeningFileFailed)?; - _ = file - .seek(SeekFrom::Start(offset.into())) - .map_err(LocalBackendErrorKind::CouldNotSeekToPositionInFile)?; + let mut file = File::open(self.path(tpe, id)).map_err(|err| { + RusticError::with_source( + ErrorKind::Backend, + "Failed to open the file `{path}`. Please check the file and try again.", + err, + ) + .attach_context("path", self.path(tpe, id).to_string_lossy()) + })?; + _ = file.seek(SeekFrom::Start(offset.into())).map_err(|err| { + RusticError::with_source( + ErrorKind::Backend, + "Failed to seek to the position `{offset}` in the file `{path}`. Please check the file and try again.", + err, + ) + .attach_context("path", self.path(tpe, id).to_string_lossy()) + .attach_context("offset", offset.to_string()) + })?; + let mut vec = vec![ 0; - length - .try_into() - .map_err(LocalBackendErrorKind::FromTryIntError)? + length.try_into().map_err(|err| { + RusticError::with_source( + ErrorKind::Backend, + "Failed to convert length `{length}` to u64.", + err, + ) + .attach_context("length", length.to_string()) + .ask_report() + })? ]; - file.read_exact(&mut vec) - .map_err(LocalBackendErrorKind::ReadingExactLengthOfFileFailed)?; + + file.read_exact(&mut vec).map_err(|err| { + RusticError::with_source( + ErrorKind::Backend, + "Failed to read the exact length `{length}` of the file `{path}`. Please check the file and try again.", + err, + ) + .attach_context("path", self.path(tpe, id).to_string_lossy()) + .attach_context("length", length.to_string()) + })?; + Ok(vec.into()) } } @@ -299,20 +405,40 @@ impl WriteBackend for LocalBackend { /// /// # Errors /// - /// * [`LocalBackendErrorKind::DirectoryCreationFailed`] - If the directory could not be created. - /// - /// [`LocalBackendErrorKind::DirectoryCreationFailed`]: LocalBackendErrorKind::DirectoryCreationFailed - fn create(&self) -> Result<()> { + /// * If the directory could not be created. + fn create(&self) -> RusticResult<()> { trace!("creating repo at {:?}", self.path); - fs::create_dir_all(&self.path).map_err(LocalBackendErrorKind::DirectoryCreationFailed)?; + fs::create_dir_all(&self.path).map_err(|err| { + RusticError::with_source( + ErrorKind::InputOutput, + "Failed to create the directory `{path}`. Please check the path and try again.", + err, + ) + .attach_context("path", self.path.display().to_string()) + })?; for tpe in ALL_FILE_TYPES { - fs::create_dir_all(self.path.join(tpe.dirname())) - .map_err(LocalBackendErrorKind::DirectoryCreationFailed)?; + let path = self.path.join(tpe.dirname()); + fs::create_dir_all(path.clone()).map_err(|err| { + RusticError::with_source( + ErrorKind::InputOutput, + "Failed to create the directory `{path}`. Please check the path and try again.", + err, + ) + .attach_context("path", path.display().to_string()) + })?; } + for i in 0u8..=255 { - fs::create_dir_all(self.path.join("data").join(hex::encode([i]))) - .map_err(LocalBackendErrorKind::DirectoryCreationFailed)?; + let path = self.path.join("data").join(hex::encode([i])); + fs::create_dir_all(path.clone()).map_err(|err| { + RusticError::with_source( + ErrorKind::InputOutput, + "Failed to create the directory `{path}`. Please check the path and try again.", + err, + ) + .attach_context("path", path.display().to_string()) + })?; } Ok(()) } @@ -328,36 +454,71 @@ impl WriteBackend for LocalBackend { /// /// # Errors /// - /// * [`LocalBackendErrorKind::OpeningFileFailed`] - If the file could not be opened. - /// * [`LocalBackendErrorKind::FromTryIntError`] - If the length of the bytes could not be converted to u64. - /// * [`LocalBackendErrorKind::SettingFileLengthFailed`] - If the length of the file could not be set. - /// * [`LocalBackendErrorKind::CouldNotWriteToBuffer`] - If the bytes could not be written to the file. - /// * [`LocalBackendErrorKind::SyncingOfOsMetadataFailed`] - If the metadata of the file could not be synced. - /// - /// [`LocalBackendErrorKind::OpeningFileFailed`]: LocalBackendErrorKind::OpeningFileFailed - /// [`LocalBackendErrorKind::FromTryIntError`]: LocalBackendErrorKind::FromTryIntError - /// [`LocalBackendErrorKind::SettingFileLengthFailed`]: LocalBackendErrorKind::SettingFileLengthFailed - /// [`LocalBackendErrorKind::CouldNotWriteToBuffer`]: LocalBackendErrorKind::CouldNotWriteToBuffer - /// [`LocalBackendErrorKind::SyncingOfOsMetadataFailed`]: LocalBackendErrorKind::SyncingOfOsMetadataFailed - fn write_bytes(&self, tpe: FileType, id: &Id, _cacheable: bool, buf: Bytes) -> Result<()> { + /// * If the file could not be opened. + /// * If the length of the bytes could not be converted to u64. + /// * If the length of the file could not be set. + /// * If the bytes could not be written to the file. + /// * If the OS Metadata could not be synced to disk. + fn write_bytes( + &self, + tpe: FileType, + id: &Id, + _cacheable: bool, + buf: Bytes, + ) -> RusticResult<()> { trace!("writing tpe: {:?}, id: {}", &tpe, &id); let filename = self.path(tpe, id); + let mut file = fs::OpenOptions::new() .create(true) .truncate(true) .write(true) .open(&filename) - .map_err(LocalBackendErrorKind::OpeningFileFailed)?; - file.set_len( - buf.len() - .try_into() - .map_err(LocalBackendErrorKind::FromTryIntError)?, - ) - .map_err(LocalBackendErrorKind::SettingFileLengthFailed)?; - file.write_all(&buf) - .map_err(LocalBackendErrorKind::CouldNotWriteToBuffer)?; - file.sync_all() - .map_err(LocalBackendErrorKind::SyncingOfOsMetadataFailed)?; + .map_err(|err| { + RusticError::with_source( + ErrorKind::InputOutput, + "Failed to open the file `{path}`. Please check the file and try again.", + err, + ) + .attach_context("path", filename.to_string_lossy()) + })?; + + file.set_len(buf.len().try_into().map_err(|err| { + RusticError::with_source( + ErrorKind::Internal, + "Failed to convert length `{length}` to u64.", + err, + ) + .attach_context("length", buf.len().to_string()) + .ask_report() + })?) + .map_err(|err| { + RusticError::with_source( + ErrorKind::InputOutput, + "Failed to set the length of the file `{path}`. Please check the file and try again.", + err, + ) + .attach_context("path", filename.to_string_lossy()) + })?; + + file.write_all(&buf).map_err(|err| { + RusticError::with_source( + ErrorKind::InputOutput, + "Failed to write to the buffer: `{path}`. Please check the file and try again.", + err, + ) + .attach_context("path", filename.to_string_lossy()) + })?; + + file.sync_all().map_err(|err| { + RusticError::with_source( + ErrorKind::InputOutput, + "Failed to sync OS Metadata to disk: `{path}`. Please check the file and try again.", + err, + ) + .attach_context("path", filename.to_string_lossy()) + })?; + if let Some(command) = &self.post_create_command { if let Err(err) = Self::call_command(tpe, id, &filename, command) { warn!("post-create: {err}"); @@ -376,13 +537,18 @@ impl WriteBackend for LocalBackend { /// /// # Errors /// - /// * [`LocalBackendErrorKind::FileRemovalFailed`] - If the file could not be removed. - /// - /// [`LocalBackendErrorKind::FileRemovalFailed`]: LocalBackendErrorKind::FileRemovalFailed - fn remove(&self, tpe: FileType, id: &Id, _cacheable: bool) -> Result<()> { + /// * If the file could not be removed. + fn remove(&self, tpe: FileType, id: &Id, _cacheable: bool) -> RusticResult<()> { trace!("removing tpe: {:?}, id: {}", &tpe, &id); let filename = self.path(tpe, id); - fs::remove_file(&filename).map_err(LocalBackendErrorKind::FileRemovalFailed)?; + fs::remove_file(&filename).map_err(|err| + RusticError::with_source( + ErrorKind::Backend, + "Failed to remove the file `{path}`. Was the file already removed or is it in use? Please check the file and remove it manually.", + err + ) + .attach_context("path", filename.to_string_lossy()) + )?; if let Some(command) = &self.post_delete_command { if let Err(err) = Self::call_command(tpe, id, &filename, command) { warn!("post-delete: {err}"); diff --git a/crates/backend/src/opendal.rs b/crates/backend/src/opendal.rs index 275b9429..e1fb71fa 100644 --- a/crates/backend/src/opendal.rs +++ b/crates/backend/src/opendal.rs @@ -1,19 +1,20 @@ /// `OpenDAL` backend for rustic. use std::{collections::HashMap, str::FromStr, sync::OnceLock}; -use anyhow::{anyhow, Error, Result}; use bytes::Bytes; use bytesize::ByteSize; -use log::trace; +use log::{error, trace}; use opendal::{ layers::{BlockingLayer, ConcurrentLimitLayer, LoggingLayer, RetryLayer, ThrottleLayer}, - BlockingOperator, ErrorKind, Metakey, Operator, Scheme, + BlockingOperator, Metakey, Operator, Scheme, }; use rayon::prelude::{IntoParallelIterator, ParallelIterator}; use tokio::runtime::Runtime; use typed_path::UnixPathBuf; -use rustic_core::{FileType, Id, ReadBackend, WriteBackend, ALL_FILE_TYPES}; +use rustic_core::{ + ErrorKind, FileType, Id, ReadBackend, RusticError, RusticResult, WriteBackend, ALL_FILE_TYPES, +}; mod constants { /// Default number of retries @@ -46,16 +47,42 @@ pub struct Throttle { } impl FromStr for Throttle { - type Err = Error; - fn from_str(s: &str) -> Result { + type Err = Box; + fn from_str(s: &str) -> Result { let mut values = s .split(',') - .map(|s| ByteSize::from_str(s.trim()).map_err(|err| anyhow!("Error: {err}"))) - .map(|b| -> Result { Ok(b?.as_u64().try_into()?) }); + .map(|s| { + ByteSize::from_str(s.trim()).map_err(|err| { + RusticError::with_source( + ErrorKind::InvalidInput, + "Parsing ByteSize from throttle string `{string}` failed", + err, + ) + .attach_context("string", s) + }) + }) + .map(|b| -> RusticResult { + let bytesize = b?.as_u64(); + bytesize.try_into().map_err(|err| { + RusticError::with_source( + ErrorKind::Internal, + "Converting ByteSize `{bytesize}` to u32 failed", + err, + ) + .attach_context("bytesize", bytesize.to_string()) + }) + }); + let bandwidth = values .next() - .ok_or_else(|| anyhow!("no bandwidth given"))??; - let burst = values.next().ok_or_else(|| anyhow!("no burst given"))??; + .transpose()? + .ok_or_else(|| RusticError::new(ErrorKind::MissingInput, "No bandwidth given."))?; + + let burst = values + .next() + .transpose()? + .ok_or_else(|| RusticError::new(ErrorKind::MissingInput, "No burst given."))?; + Ok(Self { bandwidth, burst }) } } @@ -70,20 +97,36 @@ impl OpenDALBackend { /// /// # Errors /// - /// If the path is not a valid `OpenDAL` path, an error is returned. + /// * If the path is not a valid `OpenDAL` path. /// /// # Returns /// /// A new `OpenDAL` backend. - pub fn new(path: impl AsRef, options: HashMap) -> Result { + pub fn new(path: impl AsRef, options: HashMap) -> RusticResult { let max_retries = match options.get("retry").map(String::as_str) { Some("false" | "off") => 0, None | Some("default") => constants::DEFAULT_RETRY, - Some(value) => usize::from_str(value)?, + Some(value) => usize::from_str(value).map_err(|err| { + RusticError::with_source( + ErrorKind::InvalidInput, + "Parsing retry value `{value}` failed, the value must be a valid integer.", + err, + ) + .attach_context("value", value.to_string()) + })?, }; let connections = options .get("connections") - .map(|c| usize::from_str(c)) + .map(|c| { + usize::from_str(c).map_err(|err| { + RusticError::with_source( + ErrorKind::InvalidInput, + "Parsing connections value `{value}` failed, the value must be a valid integer.", + err, + ) + .attach_context("value", c.to_string()) + }) + }) .transpose()?; let throttle = options @@ -91,8 +134,24 @@ impl OpenDALBackend { .map(|t| Throttle::from_str(t)) .transpose()?; - let schema = Scheme::from_str(path.as_ref())?; - let mut operator = Operator::via_iter(schema, options)? + let schema = Scheme::from_str(path.as_ref()).map_err(|err| { + RusticError::with_source( + ErrorKind::InvalidInput, + "Parsing scheme from path `{path}` failed, the path must contain a valid scheme.", + err, + ) + .attach_context("path", path.as_ref().to_string()) + })?; + let mut operator = Operator::via_iter(schema, options) + .map_err(|err| { + RusticError::with_source( + ErrorKind::Backend, + "Creating Operator from path `{path}` failed. Please check the given schema and options.", + err, + ) + .attach_context("path", path.as_ref().to_string()) + .attach_context("schema", schema.to_string()) + })? .layer(RetryLayer::new().with_max_times(max_retries).with_jitter()); if let Some(Throttle { bandwidth, burst }) = throttle { @@ -106,7 +165,10 @@ impl OpenDALBackend { let _guard = runtime().enter(); let operator = operator .layer(LoggingLayer::default()) - .layer(BlockingLayer::create()?) + .layer(BlockingLayer::create().map_err(|err| { + RusticError::with_source(ErrorKind::Backend, "Creating BlockingLayer failed.", err) + .ask_report() + })?) .blocking(); Ok(Self { operator }) @@ -156,21 +218,41 @@ impl ReadBackend for OpenDALBackend { /// # Notes /// /// If the file type is `FileType::Config`, this will return a list with a single default id. - fn list(&self, tpe: FileType) -> Result> { + fn list(&self, tpe: FileType) -> RusticResult> { trace!("listing tpe: {tpe:?}"); if tpe == FileType::Config { - return Ok(if self.operator.is_exist("config")? { - vec![Id::default()] - } else { - Vec::new() - }); + return Ok( + if self.operator.is_exist("config").map_err(|err| { + RusticError::with_source( + ErrorKind::Backend, + "Path `config` does not exist.", + err, + ) + .ask_report() + })? { + vec![Id::default()] + } else { + Vec::new() + }, + ); } + let path = tpe.dirname().to_string() + "/"; + Ok(self .operator - .list_with(&(tpe.dirname().to_string() + "/")) + .list_with(&path) .recursive(true) - .call()? + .call() + .map_err(|err| { + RusticError::with_source( + ErrorKind::Backend, + "Listing all files of `{path}` failed in the backend. Please check if the given path is correct.", + err, + ) + .attach_context("path", path) + .attach_context("type", tpe.to_string()) + })? .into_iter() .filter(|e| e.metadata().is_file()) .filter_map(|e| e.name().parse().ok()) @@ -183,35 +265,94 @@ impl ReadBackend for OpenDALBackend { /// /// * `tpe` - The type of the files to list. /// - fn list_with_size(&self, tpe: FileType) -> Result> { + fn list_with_size(&self, tpe: FileType) -> RusticResult> { trace!("listing tpe: {tpe:?}"); if tpe == FileType::Config { return match self.operator.stat("config") { - Ok(entry) => Ok(vec![(Id::default(), entry.content_length().try_into()?)]), - Err(err) if err.kind() == ErrorKind::NotFound => Ok(Vec::new()), - Err(err) => Err(err.into()), + Ok(entry) => Ok(vec![( + Id::default(), + entry.content_length().try_into().map_err(|err| { + RusticError::with_source( + ErrorKind::Internal, + "Parsing content length `{length}` failed", + err, + ) + .attach_context("length", entry.content_length().to_string()) + })?, + )]), + Err(err) if err.kind() == opendal::ErrorKind::NotFound => Ok(Vec::new()), + Err(err) => Err(err).map_err(|err| + RusticError::with_source( + ErrorKind::Backend, + "Getting Metadata of type `{type}` failed in the backend. Please check if `{type}` exists.", + err, + ) + .attach_context("type", tpe.to_string()) + ), }; } + let path = tpe.dirname().to_string() + "/"; Ok(self .operator - .list_with(&(tpe.dirname().to_string() + "/")) + .list_with(&path) .recursive(true) .metakey(Metakey::ContentLength) - .call()? + .call() + .map_err(|err| + RusticError::with_source( + ErrorKind::Backend, + "Listing all files of `{type}` in directory `{path}` and their sizes failed in the backend. Please check if the given path is correct.", + err, + ) + .attach_context("path", path) + .attach_context("type", tpe.to_string()) + )? .into_iter() .filter(|e| e.metadata().is_file()) - .map(|e| -> Result<(Id, u32)> { - Ok((e.name().parse()?, e.metadata().content_length().try_into()?)) + .map(|e| -> RusticResult<(Id, u32)> { + Ok(( + e.name().parse()?, + e.metadata() + .content_length() + .try_into() + .map_err(|err| + RusticError::with_source( + ErrorKind::Internal, + "Parsing content length `{length}` failed", + err, + ) + .attach_context("length", e.metadata().content_length().to_string()) + )?, + )) + }) + .inspect(|r| { + if let Err(err) = r { + error!("Error while listing files: {err:?}"); + } }) - .filter_map(Result::ok) + .filter_map(RusticResult::ok) .collect()) } - fn read_full(&self, tpe: FileType, id: &Id) -> Result { + fn read_full(&self, tpe: FileType, id: &Id) -> RusticResult { trace!("reading tpe: {tpe:?}, id: {id}"); - Ok(self.operator.read(&self.path(tpe, id))?.to_bytes()) + let path = self.path(tpe, id); + Ok(self + .operator + .read(&path) + .map_err(|err| + RusticError::with_source( + ErrorKind::Backend, + "Reading file `{path}` failed in the backend. Please check if the given path is correct.", + err, + ) + .attach_context("path", path) + .attach_context("type", tpe.to_string()) + .attach_context("id", id.to_string()) + )? + .to_bytes()) } fn read_partial( @@ -221,37 +362,72 @@ impl ReadBackend for OpenDALBackend { _cacheable: bool, offset: u32, length: u32, - ) -> Result { + ) -> RusticResult { trace!("reading tpe: {tpe:?}, id: {id}, offset: {offset}, length: {length}"); let range = u64::from(offset)..u64::from(offset + length); + let path = self.path(tpe, id); + Ok(self .operator - .read_with(&self.path(tpe, id)) + .read_with(&path) .range(range) - .call()? + .call() + .map_err(|err| + RusticError::with_source( + ErrorKind::Backend, + "Partially reading file `{path}` failed in the backend. Please check if the given path is correct.", + err, + ) + .attach_context("path", path) + .attach_context("type", tpe.to_string()) + .attach_context("id", id.to_string()) + .attach_context("offset", offset.to_string()) + .attach_context("length", length.to_string()) + )? .to_bytes()) } } impl WriteBackend for OpenDALBackend { /// Create a repository on the backend. - fn create(&self) -> Result<()> { + fn create(&self) -> RusticResult<()> { trace!("creating repo at {:?}", self.location()); for tpe in ALL_FILE_TYPES { + let path = tpe.dirname().to_string() + "/"; self.operator - .create_dir(&(tpe.dirname().to_string() + "/"))?; + .create_dir(&path) + .map_err(|err| + RusticError::with_source( + ErrorKind::Backend, + "Creating directory `{path}` failed in the backend `{location}`. Please check if the given path is correct.", + err, + ) + .attach_context("path", path) + .attach_context("location", self.location()) + .attach_context("type", tpe.to_string()) + )?; } // creating 256 dirs can be slow on remote backends, hence we parallelize it. - (0u8..=255).into_par_iter().try_for_each(|i| { - self.operator.create_dir( - &(UnixPathBuf::from("data") - .join(hex::encode([i])) - .to_string_lossy() - .to_string() - + "/"), - ) - })?; + (0u8..=255) + .into_par_iter() + .try_for_each(|i| { + let path = UnixPathBuf::from("data") + .join(hex::encode([i])) + .to_string_lossy() + .to_string() + + "/"; + + self.operator.create_dir(&path).map_err(|err| + RusticError::with_source( + ErrorKind::Backend, + "Creating directory `{path}` failed in the backend `{location}`. Please check if the given path is correct.", + err, + ) + .attach_context("path", path) + .attach_context("location", self.location()) + ) + })?; Ok(()) } @@ -264,10 +440,26 @@ impl WriteBackend for OpenDALBackend { /// * `id` - The id of the file. /// * `cacheable` - Whether the file is cacheable. /// * `buf` - The bytes to write. - fn write_bytes(&self, tpe: FileType, id: &Id, _cacheable: bool, buf: Bytes) -> Result<()> { + fn write_bytes( + &self, + tpe: FileType, + id: &Id, + _cacheable: bool, + buf: Bytes, + ) -> RusticResult<()> { trace!("writing tpe: {:?}, id: {}", &tpe, &id); let filename = self.path(tpe, id); - self.operator.write(&filename, buf)?; + self.operator.write(&filename, buf).map_err(|err| { + RusticError::with_source( + ErrorKind::Backend, + "Writing file `{path}` failed in the backend. Please check if the given path is correct.", + err, + ) + .attach_context("path", filename) + .attach_context("type", tpe.to_string()) + .attach_context("id", id.to_string()) + })?; + Ok(()) } @@ -278,10 +470,19 @@ impl WriteBackend for OpenDALBackend { /// * `tpe` - The type of the file. /// * `id` - The id of the file. /// * `cacheable` - Whether the file is cacheable. - fn remove(&self, tpe: FileType, id: &Id, _cacheable: bool) -> Result<()> { + fn remove(&self, tpe: FileType, id: &Id, _cacheable: bool) -> RusticResult<()> { trace!("removing tpe: {:?}, id: {}", &tpe, &id); let filename = self.path(tpe, id); - self.operator.delete(&filename)?; + self.operator.delete(&filename).map_err(|err| { + RusticError::with_source( + ErrorKind::Backend, + "Deleting file `{path}` failed in the backend. Please check if the given path is correct.", + err, + ) + .attach_context("path", filename) + .attach_context("type", tpe.to_string()) + .attach_context("id", id.to_string()) + })?; Ok(()) } } @@ -289,6 +490,7 @@ impl WriteBackend for OpenDALBackend { #[cfg(test)] mod tests { use super::*; + use anyhow::Result; use rstest::rstest; use serde::Deserialize; use std::{fs, path::PathBuf}; diff --git a/crates/backend/src/rclone.rs b/crates/backend/src/rclone.rs index 84e72a86..24836cf4 100644 --- a/crates/backend/src/rclone.rs +++ b/crates/backend/src/rclone.rs @@ -5,7 +5,6 @@ use std::{ thread::JoinHandle, }; -use anyhow::Result; use bytes::Bytes; use constants::DEFAULT_COMMAND; use log::{debug, info}; @@ -15,9 +14,11 @@ use rand::{ }; use semver::{BuildMetadata, Prerelease, Version, VersionReq}; -use crate::{error::RcloneErrorKind, rest::RestBackend}; +use crate::rest::RestBackend; -use rustic_core::{CommandInput, FileType, Id, ReadBackend, WriteBackend}; +use rustic_core::{ + CommandInput, ErrorKind, FileType, Id, ReadBackend, RusticError, RusticResult, WriteBackend, +}; pub(super) mod constants { /// The default command called if no other is specified @@ -57,28 +58,37 @@ impl Drop for RcloneBackend { /// /// # Errors /// -/// * [`RcloneErrorKind::FromIoError`] - If the rclone version could not be determined. -/// * [`RcloneErrorKind::FromUtf8Error`] - If the rclone version could not be determined. -/// * [`RcloneErrorKind::NoOutputForRcloneVersion`] - If the rclone version could not be determined. -/// * [`RcloneErrorKind::FromParseVersion`] - If the rclone version could not be determined. +/// * If the rclone version could not be determined or parsed. +/// * If the rclone version is not supported. /// /// # Returns /// -/// * `Ok(())` - If the rclone version is supported. -/// -/// [`RcloneErrorKind::FromIoError`]: RcloneErrorKind::FromIoError -/// [`RcloneErrorKind::FromUtf8Error`]: RcloneErrorKind::FromUtf8Error -/// [`RcloneErrorKind::NoOutputForRcloneVersion`]: RcloneErrorKind::NoOutputForRcloneVersion -/// [`RcloneErrorKind::FromParseVersion`]: RcloneErrorKind::FromParseVersion -fn check_clone_version(rclone_version_output: &[u8]) -> Result<()> { +/// * Ok(()), if the rclone version is supported. +fn check_clone_version(rclone_version_output: &[u8]) -> RusticResult<()> { let rclone_version = std::str::from_utf8(rclone_version_output) - .map_err(RcloneErrorKind::FromUtf8Error)? + .map_err(|err| { + RusticError::with_source( + ErrorKind::Internal, + "Expected rclone version to be valid utf8, but it was not. Please check the `rclone version` output manually.", + err + ) + })? .lines() .next() - .ok_or_else(|| RcloneErrorKind::NoOutputForRcloneVersion)? + .ok_or_else(|| { + RusticError::new( + ErrorKind::Internal, + "Expected rclone version to have at least one line, but it did not. Please check the `rclone version` output manually.", + ) + })? .trim_start_matches(|c: char| !c.is_numeric()); - let mut parsed_version = Version::parse(rclone_version)?; + let mut parsed_version = Version::parse(rclone_version).map_err(|err| { + RusticError::with_source(ErrorKind::Internal, + "Error parsing rclone version `{version}`. This should not happen. Please check the `rclone version` output manually.", + err) + .attach_context("version", rclone_version) + })?; // we need to set the pre and build fields to empty to make the comparison work // otherwise the comparison will take the pre and build fields into account @@ -91,10 +101,21 @@ fn check_clone_version(rclone_version_output: &[u8]) -> Result<()> { // we hard fail here to prevent this, as we can't guarantee the security of the data // also because 1.52.2 has been released on Jun 24, 2020, we can assume that this is a // reasonable lower bound for the version - if VersionReq::parse("<1.52.2")?.matches(&parsed_version) { - return Err( - RcloneErrorKind::RCloneWithoutAuthentication(rclone_version.to_string()).into(), - ); + if VersionReq::parse("<1.52.2") + .map_err(|err| { + RusticError::with_source( + ErrorKind::Internal, + "Error parsing version requirement. This should not happen.", + err, + ) + })? + .matches(&parsed_version) + { + return Err(RusticError::new( + ErrorKind::Unsupported, + "Unsupported rclone version `{version}`. We must not use rclone without authentication! Please upgrade to rclone >= 1.52.2!", + ) + .attach_context("version", rclone_version.to_string())); } Ok(()) @@ -109,15 +130,10 @@ impl RcloneBackend { /// /// # Errors /// - /// * [`RcloneErrorKind::FromIoError`] - If the rclone version could not be determined. - /// * [`RcloneErrorKind::NoStdOutForRclone`] - If the rclone version could not be determined. - /// * [`RcloneErrorKind::RCloneExitWithBadStatus`] - If rclone exited with a bad status. - /// * [`RcloneErrorKind::UrlNotStartingWithHttp`] - If the URL does not start with `http`. - /// - /// [`RcloneErrorKind::FromIoError`]: RcloneErrorKind::FromIoError - /// [`RcloneErrorKind::NoStdOutForRclone`]: RcloneErrorKind::NoStdOutForRclone - /// [`RcloneErrorKind::RCloneExitWithBadStatus`]: RcloneErrorKind::RCloneExitWithBadStatus - /// [`RcloneErrorKind::UrlNotStartingWithHttp`]: RcloneErrorKind::UrlNotStartingWithHttp + /// * If the rclone version could not be determined. + /// * If the rclone version could not be determined. + /// * If rclone exited with a bad status. + /// * If the URL does not start with `http`. /// /// # Returns /// @@ -125,13 +141,20 @@ impl RcloneBackend { /// /// # Panics /// - /// If the rclone command is not found. + /// * If the rclone command is not found. // TODO: This should be an error, not a panic. - pub fn new(url: impl AsRef, options: HashMap) -> Result { + #[allow(clippy::too_many_lines)] + pub fn new(url: impl AsRef, options: HashMap) -> RusticResult { let rclone_command = options.get("rclone-command"); let use_password = options .get("use-password") - .map(|v| v.parse()) + .map(|v| v.parse().map_err(|err| + RusticError::with_source( + ErrorKind::InvalidInput, + "Expected 'use-password' to be a boolean, but it was not. Please check the configuration file.", + err + ) + )) .transpose()? .unwrap_or(true); @@ -139,7 +162,11 @@ impl RcloneBackend { let rclone_version_output = Command::new("rclone") .arg("version") .output() - .map_err(RcloneErrorKind::FromIoError)? + .map_err(|err| RusticError::with_source( + ErrorKind::ExternalCommand, + "Experienced an error while running `rclone version` command. Please check if rclone is installed correctly and is in your PATH.", + err + ))? .stdout; // if we want to use a password and rclone_command is not explicitly set, @@ -154,13 +181,18 @@ impl RcloneBackend { let mut rclone_command = rclone_command.map_or(DEFAULT_COMMAND.to_string(), Clone::clone); rclone_command.push(' '); rclone_command.push_str(url.as_ref()); - let rclone_command: CommandInput = rclone_command.parse()?; + let rclone_command: CommandInput = rclone_command.parse().map_err( + |err| RusticError::with_source( + ErrorKind::InvalidInput, + "Expected rclone command to be valid, but it was not. Please check the configuration file.", + err + ) + )?; debug!("starting rclone via {rclone_command:?}"); let mut command = Command::new(rclone_command.command()); if use_password { - // TODO: We should handle errors here _ = command .env("RCLONE_USER", &user) .env("RCLONE_PASS", &password); @@ -170,25 +202,54 @@ impl RcloneBackend { .args(rclone_command.args()) .stderr(Stdio::piped()) .spawn() - .map_err(RcloneErrorKind::FromIoError)?; + .map_err(|err| + RusticError::with_source( + ErrorKind::ExternalCommand, + "Experienced an error while running rclone: `{rclone_command}`. Please check if rclone is installed and working correctly.", + err + ) + .attach_context("rclone_command", rclone_command.to_string()) + )?; let mut stderr = BufReader::new( child .stderr .take() - .ok_or_else(|| RcloneErrorKind::NoStdOutForRclone)?, + .ok_or_else(|| RusticError::new( + ErrorKind::ExternalCommand, + "Could not get stderr of rclone. Please check if rclone is installed and working correctly.", + ))?, ); let mut rest_url = match options.get("rest-url") { None => { loop { - if let Some(status) = child.try_wait().map_err(RcloneErrorKind::FromIoError)? { - return Err(RcloneErrorKind::RCloneExitWithBadStatus(status).into()); + if let Some(status) = child.try_wait().map_err(|err| + RusticError::with_source( + ErrorKind::ExternalCommand, + "Experienced an error while running rclone. Please check if rclone is installed and working correctly.", + err + ) + )? { + return Err( + RusticError::new( + ErrorKind::ExternalCommand, + "rclone exited before it could start the REST server: `{exit_status}`. Please check the exit status for more information.", + ).attach_context("exit_status", status.to_string()) + ); } let mut line = String::new(); + _ = stderr .read_line(&mut line) - .map_err(RcloneErrorKind::FromIoError)?; + .map_err(|err| + RusticError::with_source( + ErrorKind::InputOutput, + "Experienced an error while reading rclone output. Please check if rclone is installed and working correctly.", + err + ) + )?; + match line.find(constants::SEARCHSTRING) { Some(result) => { if let Some(url) = line.get(result + constants::SEARCHSTRING.len()..) { @@ -207,7 +268,11 @@ impl RcloneBackend { if use_password { if !rest_url.starts_with("http://") { - return Err(RcloneErrorKind::UrlNotStartingWithHttp(rest_url).into()); + return Err(RusticError::new( + ErrorKind::InputOutput, + "Please make sure, the URL `{url}` starts with 'http://'!", + ) + .attach_context("url", rest_url)); } rest_url = format!("http://{user}:{password}@{}", &rest_url[7..]); } @@ -247,7 +312,7 @@ impl ReadBackend for RcloneBackend { /// * `tpe` - The type of the file. /// /// If the size could not be determined. - fn list_with_size(&self, tpe: FileType) -> Result> { + fn list_with_size(&self, tpe: FileType) -> RusticResult> { self.rest.list_with_size(tpe) } @@ -261,7 +326,7 @@ impl ReadBackend for RcloneBackend { /// # Returns /// /// The data read. - fn read_full(&self, tpe: FileType, id: &Id) -> Result { + fn read_full(&self, tpe: FileType, id: &Id) -> RusticResult { self.rest.read_full(tpe, id) } @@ -285,14 +350,14 @@ impl ReadBackend for RcloneBackend { cacheable: bool, offset: u32, length: u32, - ) -> Result { + ) -> RusticResult { self.rest.read_partial(tpe, id, cacheable, offset, length) } } impl WriteBackend for RcloneBackend { /// Creates a new file. - fn create(&self) -> Result<()> { + fn create(&self) -> RusticResult<()> { self.rest.create() } @@ -304,7 +369,7 @@ impl WriteBackend for RcloneBackend { /// * `id` - The id of the file. /// * `cacheable` - Whether the data should be cached. /// * `buf` - The data to write. - fn write_bytes(&self, tpe: FileType, id: &Id, cacheable: bool, buf: Bytes) -> Result<()> { + fn write_bytes(&self, tpe: FileType, id: &Id, cacheable: bool, buf: Bytes) -> RusticResult<()> { self.rest.write_bytes(tpe, id, cacheable, buf) } @@ -315,7 +380,7 @@ impl WriteBackend for RcloneBackend { /// * `tpe` - The type of the file. /// * `id` - The id of the file. /// * `cacheable` - Whether the file is cacheable. - fn remove(&self, tpe: FileType, id: &Id, cacheable: bool) -> Result<()> { + fn remove(&self, tpe: FileType, id: &Id, cacheable: bool) -> RusticResult<()> { self.rest.remove(tpe, id, cacheable) } } diff --git a/crates/backend/src/rest.rs b/crates/backend/src/rest.rs index 3b0b5683..2293afa6 100644 --- a/crates/backend/src/rest.rs +++ b/crates/backend/src/rest.rs @@ -1,7 +1,6 @@ use std::str::FromStr; use std::time::Duration; -use anyhow::Result; use backoff::{backoff::Backoff, ExponentialBackoff, ExponentialBackoffBuilder}; use bytes::Bytes; use log::{trace, warn}; @@ -12,13 +11,21 @@ use reqwest::{ }; use serde::Deserialize; -use crate::error::RestErrorKind; +use rustic_core::{ErrorKind, FileType, Id, ReadBackend, RusticError, RusticResult, WriteBackend}; -use rustic_core::{FileType, Id, ReadBackend, WriteBackend}; +/// joining URL failed on: `{0}` +#[derive(thiserror::Error, Clone, Copy, Debug, displaydoc::Display)] +pub struct JoiningUrlFailedError(url::ParseError); + +pub(super) mod constants { + use std::time::Duration; -mod consts { /// Default number of retries pub(super) const DEFAULT_RETRY: usize = 5; + + /// Default timeout for the client + /// This is set to 10 minutes + pub(super) const DEFAULT_TIMEOUT: Duration = Duration::from_secs(600); } // trait CheckError to add user-defined method check_error on Response @@ -66,7 +73,7 @@ struct LimitRetryBackoff { impl Default for LimitRetryBackoff { fn default() -> Self { Self { - max_retries: consts::DEFAULT_RETRY, + max_retries: constants::DEFAULT_RETRY, retries: 0, exp: ExponentialBackoffBuilder::new() .with_max_elapsed_time(None) // no maximum elapsed time; we count number of retires @@ -130,53 +137,78 @@ impl RestBackend { /// /// # Errors /// - /// * [`RestErrorKind::UrlParsingFailed`] - If the url could not be parsed. - /// * [`RestErrorKind::BuildingClientFailed`] - If the client could not be built. - /// - /// [`RestErrorKind::UrlParsingFailed`]: RestErrorKind::UrlParsingFailed - /// [`RestErrorKind::BuildingClientFailed`]: RestErrorKind::BuildingClientFailed + /// * If the url could not be parsed. + /// * If the client could not be built. pub fn new( url: impl AsRef, options: impl IntoIterator, - ) -> Result { - let url = url.as_ref(); + ) -> RusticResult { + let url = url.as_ref().to_string(); + let url = if url.ends_with('/') { - Url::parse(url).map_err(RestErrorKind::UrlParsingFailed)? + url } else { // add a trailing '/' if there is none - let mut url = url.to_string(); + let mut url = url; url.push('/'); - Url::parse(&url).map_err(RestErrorKind::UrlParsingFailed)? + url }; + let url = Url::parse(&url).map_err(|err| { + RusticError::with_source(ErrorKind::InvalidInput, "URL `{url}` parsing failed", err) + .attach_context("url", url) + })?; + let mut headers = HeaderMap::new(); _ = headers.insert("User-Agent", HeaderValue::from_static("rustic")); let mut client = ClientBuilder::new() .default_headers(headers) - .timeout(Duration::from_secs(600)) // set default timeout to 10 minutes (we can have *large* packfiles) + .timeout(constants::DEFAULT_TIMEOUT) // set default timeout to 10 minutes (we can have *large* packfiles) .build() - .map_err(RestErrorKind::BuildingClientFailed)?; + .map_err(|err| { + RusticError::with_source(ErrorKind::Backend, "Failed to build HTTP client", err) + })?; let mut backoff = LimitRetryBackoff::default(); + // FIXME: If we have multiple times the same option, this could lead to unexpected behavior for (option, value) in options { if option == "retry" { let max_retries = match value.as_str() { "false" | "off" => 0, - "default" => consts::DEFAULT_RETRY, - _ => usize::from_str(&value) - .map_err(|_| RestErrorKind::NotSupportedForRetry(value))?, + "default" => constants::DEFAULT_RETRY, + _ => usize::from_str(&value).map_err(|err| { + RusticError::with_source( + ErrorKind::InvalidInput, + "Cannot parse value `{value}`, invalid value for option `{option}`.", + err, + ) + .attach_context("value", value) + .attach_context("option", "retry") + })?, }; backoff.max_retries = max_retries; } else if option == "timeout" { - let timeout = match humantime::Duration::from_str(&value) { - Ok(val) => val, - Err(e) => return Err(RestErrorKind::CouldNotParseDuration(e).into()), - }; - client = match ClientBuilder::new().timeout(*timeout).build() { - Ok(val) => val, - Err(err) => return Err(RestErrorKind::BuildingClientFailed(err).into()), - }; + let timeout = humantime::Duration::from_str(&value).map_err(|err| { + RusticError::with_source( + ErrorKind::InvalidInput, + "Could not parse value `{value}` as `humantime` duration. Invalid value for option `{option}`.", + err, + ) + .attach_context("value", value) + .attach_context("option", "timeout") + })?; + + client = ClientBuilder::new() + .timeout(*timeout) + .build() + .map_err(|err| { + RusticError::with_source( + ErrorKind::Backend, + "Failed to build HTTP client", + err, + ) + })?; } } @@ -196,8 +228,8 @@ impl RestBackend { /// /// # Errors /// - /// If the url could not be created. - fn url(&self, tpe: FileType, id: &Id) -> Result { + /// * If the url could not be joined/created. + fn url(&self, tpe: FileType, id: &Id) -> Result { let id_path = if tpe == FileType::Config { "config".to_string() } else { @@ -207,10 +239,8 @@ impl RestBackend { path.push_str(&hex_id); path }; - Ok(self - .url - .join(&id_path) - .map_err(RestErrorKind::JoiningUrlFailed)?) + + self.url.join(&id_path).map_err(JoiningUrlFailedError) } } @@ -234,7 +264,7 @@ impl ReadBackend for RestBackend { /// /// # Errors /// - /// * [`RestErrorKind::JoiningUrlFailed`] - If the url could not be created. + /// * If the url could not be created. /// /// # Notes /// @@ -243,9 +273,7 @@ impl ReadBackend for RestBackend { /// # Returns /// /// A vector of tuples containing the id and size of the files. - /// - /// [`RestErrorKind::JoiningUrlFailed`]: RestErrorKind::JoiningUrlFailed - fn list_with_size(&self, tpe: FileType) -> Result> { + fn list_with_size(&self, tpe: FileType) -> RusticResult> { // format which is delivered by the REST-service #[derive(Deserialize)] struct ListEntry { @@ -254,18 +282,23 @@ impl ReadBackend for RestBackend { } trace!("listing tpe: {tpe:?}"); - let url = if tpe == FileType::Config { - self.url - .join("config") - .map_err(RestErrorKind::JoiningUrlFailed)? + + // TODO: Explain why we need special handling here + let path = if tpe == FileType::Config { + "config".to_string() } else { let mut path = tpe.dirname().to_string(); path.push('/'); - self.url - .join(&path) - .map_err(RestErrorKind::JoiningUrlFailed)? + path }; + let url = self.url.join(&path).map_err(|err| { + RusticError::with_source(ErrorKind::Internal, "Joining URL `{url}` failed", err) + .attach_context("url", self.url.as_str()) + .attach_context("tpe", tpe.to_string()) + .attach_context("tpe_dir", tpe.dirname().to_string()) + })?; + backoff::retry_notify( self.backoff.clone(), || { @@ -297,7 +330,7 @@ impl ReadBackend for RestBackend { }, notify, ) - .map_err(|e| RestErrorKind::BackoffError(e).into()) + .map_err(construct_backoff_error) } /// Returns the content of a file. @@ -309,14 +342,16 @@ impl ReadBackend for RestBackend { /// /// # Errors /// - /// * [`reqwest::Error`] - If the request failed. - /// * [`RestErrorKind::BackoffError`] - If the backoff failed. - /// - /// [`RestErrorKind::BackoffError`]: RestErrorKind::BackoffError - fn read_full(&self, tpe: FileType, id: &Id) -> Result { + /// * If the request failed. + /// * If the backoff failed. + fn read_full(&self, tpe: FileType, id: &Id) -> RusticResult { trace!("reading tpe: {tpe:?}, id: {id}"); - let url = self.url(tpe, id)?; - Ok(backoff::retry_notify( + + let url = self + .url(tpe, id) + .map_err(|err| construct_join_url_error(err, tpe, id, &self.url))?; + + backoff::retry_notify( self.backoff.clone(), || { Ok(self @@ -328,7 +363,7 @@ impl ReadBackend for RestBackend { }, notify, ) - .map_err(RestErrorKind::BackoffError)?) + .map_err(construct_backoff_error) } /// Returns a part of the content of a file. @@ -343,9 +378,7 @@ impl ReadBackend for RestBackend { /// /// # Errors /// - /// * [`RestErrorKind::BackoffError`] - If the backoff failed. - /// - /// [`RestErrorKind::BackoffError`]: RestErrorKind::BackoffError + /// * If the backoff failed. fn read_partial( &self, tpe: FileType, @@ -353,12 +386,19 @@ impl ReadBackend for RestBackend { _cacheable: bool, offset: u32, length: u32, - ) -> Result { + ) -> RusticResult { trace!("reading tpe: {tpe:?}, id: {id}, offset: {offset}, length: {length}"); let offset2 = offset + length - 1; let header_value = format!("bytes={offset}-{offset2}"); - let url = self.url(tpe, id)?; - Ok(backoff::retry_notify( + let url = self.url(tpe, id).map_err(|err| { + RusticError::with_source(ErrorKind::Internal, "Joining URL `{url}` failed", err) + .attach_context("url", self.url.as_str()) + .attach_context("tpe", tpe.to_string()) + .attach_context("tpe_dir", tpe.dirname().to_string()) + .attach_context("id", id.to_string()) + })?; + + backoff::retry_notify( self.backoff.clone(), || { Ok(self @@ -371,24 +411,49 @@ impl ReadBackend for RestBackend { }, notify, ) - .map_err(RestErrorKind::BackoffError)?) + .map_err(construct_backoff_error) } } +fn construct_backoff_error(err: backoff::Error) -> Box { + RusticError::with_source( + ErrorKind::Backend, + "Backoff failed, please check the logs for more information.", + err, + ) +} + +fn construct_join_url_error( + err: JoiningUrlFailedError, + tpe: FileType, + id: &Id, + self_url: &Url, +) -> Box { + RusticError::with_source(ErrorKind::Internal, "Joining URL `{url}` failed", err) + .attach_context("url", self_url.as_str()) + .attach_context("tpe", tpe.to_string()) + .attach_context("tpe_dir", tpe.dirname().to_string()) + .attach_context("id", id.to_string()) +} + impl WriteBackend for RestBackend { /// Creates a new file. /// /// # Errors /// - /// * [`RestErrorKind::BackoffError`] - If the backoff failed. - /// - /// [`RestErrorKind::BackoffError`]: RestErrorKind::BackoffError - fn create(&self) -> Result<()> { - let url = self - .url - .join("?create=true") - .map_err(RestErrorKind::JoiningUrlFailed)?; - Ok(backoff::retry_notify( + /// * If the backoff failed. + fn create(&self) -> RusticResult<()> { + let url = self.url.join("?create=true").map_err(|err| { + RusticError::with_source( + ErrorKind::Internal, + "Joining URL `{url}` with `{join_input}` failed", + err, + ) + .attach_context("url", self.url.as_str()) + .attach_context("join_input", "?create=true") + })?; + + backoff::retry_notify( self.backoff.clone(), || { _ = self.client.post(url.clone()).send()?.check_error()?; @@ -396,7 +461,7 @@ impl WriteBackend for RestBackend { }, notify, ) - .map_err(RestErrorKind::BackoffError)?) + .map_err(construct_backoff_error) } /// Writes bytes to the given file. @@ -410,22 +475,33 @@ impl WriteBackend for RestBackend { /// /// # Errors /// - /// * [`RestErrorKind::BackoffError`] - If the backoff failed. - /// - /// [`RestErrorKind::BackoffError`]: RestErrorKind::BackoffError - fn write_bytes(&self, tpe: FileType, id: &Id, _cacheable: bool, buf: Bytes) -> Result<()> { + /// * If the backoff failed. + fn write_bytes( + &self, + tpe: FileType, + id: &Id, + _cacheable: bool, + buf: Bytes, + ) -> RusticResult<()> { trace!("writing tpe: {:?}, id: {}", &tpe, &id); - let req_builder = self.client.post(self.url(tpe, id)?).body(buf); - Ok(backoff::retry_notify( + let req_builder = self + .client + .post( + self.url(tpe, id) + .map_err(|err| construct_join_url_error(err, tpe, id, &self.url))?, + ) + .body(buf); + + backoff::retry_notify( self.backoff.clone(), || { - // Note: try_clone() always gives Some(_) as the body is Bytes which is clonable + // Note: try_clone() always gives Some(_) as the body is Bytes which is cloneable _ = req_builder.try_clone().unwrap().send()?.check_error()?; Ok(()) }, notify, ) - .map_err(RestErrorKind::BackoffError)?) + .map_err(construct_backoff_error) } /// Removes the given file. @@ -438,13 +514,14 @@ impl WriteBackend for RestBackend { /// /// # Errors /// - /// * [`RestErrorKind::BackoffError`] - If the backoff failed. - /// - /// [`RestErrorKind::BackoffError`]: RestErrorKind::BackoffError - fn remove(&self, tpe: FileType, id: &Id, _cacheable: bool) -> Result<()> { + /// * If the backoff failed. + fn remove(&self, tpe: FileType, id: &Id, _cacheable: bool) -> RusticResult<()> { trace!("removing tpe: {:?}, id: {}", &tpe, &id); - let url = self.url(tpe, id)?; - Ok(backoff::retry_notify( + let url = self + .url(tpe, id) + .map_err(|err| construct_join_url_error(err, tpe, id, &self.url))?; + + backoff::retry_notify( self.backoff.clone(), || { _ = self.client.delete(url.clone()).send()?.check_error()?; @@ -452,6 +529,6 @@ impl WriteBackend for RestBackend { }, notify, ) - .map_err(RestErrorKind::BackoffError)?) + .map_err(construct_backoff_error) } } diff --git a/crates/backend/src/util.rs b/crates/backend/src/util.rs index ecae470b..d1ff6928 100644 --- a/crates/backend/src/util.rs +++ b/crates/backend/src/util.rs @@ -1,8 +1,8 @@ use crate::SupportedBackend; -use anyhow::Result; +use rustic_core::{ErrorKind, RusticError, RusticResult}; /// A backend location. This is a string that represents the location of the backend. -#[derive(PartialEq, Eq, Debug)] +#[derive(PartialEq, Eq, Debug, Clone)] pub struct BackendLocation(String); impl std::ops::Deref for BackendLocation { @@ -34,7 +34,7 @@ impl std::fmt::Display for BackendLocation { /// /// # Errors /// -/// If the url is not a valid url, an error is returned. +/// * If the url is not a valid url, an error is returned. /// /// # Returns /// @@ -45,7 +45,7 @@ impl std::fmt::Display for BackendLocation { /// If the url is a windows path, the type will be "local". pub fn location_to_type_and_path( raw_location: &str, -) -> Result<(SupportedBackend, BackendLocation)> { +) -> RusticResult<(SupportedBackend, BackendLocation)> { match raw_location.split_once(':') { #[cfg(windows)] Some((drive_letter, _)) if drive_letter.len() == 1 && !raw_location.contains('/') => Ok(( @@ -58,7 +58,14 @@ pub fn location_to_type_and_path( BackendLocation(raw_location.to_string()), )), Some((scheme, path)) => Ok(( - SupportedBackend::try_from(scheme)?, + SupportedBackend::try_from(scheme).map_err(|err| { + RusticError::with_source( + ErrorKind::Unsupported, + "The backend type `{name}` is not supported. Please check the given backend and try again.", + err + ) + .attach_context("name", scheme) + })?, BackendLocation(path.to_string()), )), None => Ok(( diff --git a/crates/core/CHANGELOG.md b/crates/core/CHANGELOG.md index d27a9d92..d3e91dbb 100644 --- a/crates/core/CHANGELOG.md +++ b/crates/core/CHANGELOG.md @@ -93,9 +93,11 @@ All notable changes to this project will be documented in this file. ## [0.3.1](https://github.com/rustic-rs/rustic_core/compare/rustic_core-v0.3.0...rustic_core-v0.3.1) - 2024-09-06 ### Added + - Add autocompletion hints ([#257](https://github.com/rustic-rs/rustic_core/pull/257)) ### Fixed + - don't give invalid password error for other keyfile errors ([#247](https://github.com/rustic-rs/rustic_core/pull/247)) - adjust tests to new Rust version ([#259](https://github.com/rustic-rs/rustic_core/pull/259)) - fix FromStr for SnapshotGroupCriterion ([#254](https://github.com/rustic-rs/rustic_core/pull/254)) @@ -106,6 +108,7 @@ All notable changes to this project will be documented in this file. ## [0.3.0](https://github.com/rustic-rs/rustic_core/compare/rustic_core-v0.2.0...rustic_core-v0.3.0) - 2024-08-18 ### Added + - *(forget)* [**breaking**] Make keep-* Options and add keep-none ([#238](https://github.com/rustic-rs/rustic_core/pull/238)) - add search methods to Repository ([#212](https://github.com/rustic-rs/rustic_core/pull/212)) - [**breaking**] Allow specifying many options in config profile without array ([#211](https://github.com/rustic-rs/rustic_core/pull/211)) @@ -118,6 +121,7 @@ All notable changes to this project will be documented in this file. - Add append-only repository mode ([#164](https://github.com/rustic-rs/rustic_core/pull/164)) ### Fixed + - parse commands given by arg or env using shell_words ([#240](https://github.com/rustic-rs/rustic_core/pull/240)) - Allow non-value/null xattr ([#235](https://github.com/rustic-rs/rustic_core/pull/235)) - ensure Rust 1.76.0 compiles @@ -129,7 +133,7 @@ All notable changes to this project will be documented in this file. - clippy lints ([#220](https://github.com/rustic-rs/rustic_core/pull/220)) - *(errors)* Show filenames in error message coming from ignore source ([#215](https://github.com/rustic-rs/rustic_core/pull/215)) - *(paths)* Handle paths starting with "." correctly ([#213](https://github.com/rustic-rs/rustic_core/pull/213)) -- Add warning about unsorted files and sort where neccessary ([#205](https://github.com/rustic-rs/rustic_core/pull/205)) +- Add warning about unsorted files and sort where necessary ([#205](https://github.com/rustic-rs/rustic_core/pull/205)) - *(deps)* update rust crate thiserror to 1.0.58 ([#192](https://github.com/rustic-rs/rustic_core/pull/192)) - *(deps)* update rust crate anyhow to 1.0.81 ([#191](https://github.com/rustic-rs/rustic_core/pull/191)) - *(deps)* update rust crate serde_with to 3.7.0 ([#189](https://github.com/rustic-rs/rustic_core/pull/189)) @@ -143,6 +147,7 @@ All notable changes to this project will be documented in this file. - updated msrv and fix clippy lints ([#160](https://github.com/rustic-rs/rustic_core/pull/160)) ### Other + - dependency updates - Ensure that MSRV 1.76 works - *(deps)* more version updates ([#237](https://github.com/rustic-rs/rustic_core/pull/237)) diff --git a/crates/core/Cargo.toml b/crates/core/Cargo.toml index e83c7330..2c02d814 100644 --- a/crates/core/Cargo.toml +++ b/crates/core/Cargo.toml @@ -41,8 +41,8 @@ rustdoc-args = ["--document-private-items", "--generate-link-to-definition"] [dependencies] # errors -displaydoc = "0.2.5" -thiserror = "1.0.64" +displaydoc = { workspace = true } +thiserror = { workspace = true } # macros derivative = "2.2.0" @@ -50,7 +50,7 @@ derive_more = { version = "1.0.0", features = ["add", "constructor", "display", derive_setters = "0.1.6" # logging -log = "0.4.22" +log = { workspace = true } # parallelize crossbeam-channel = "0.5.13" @@ -58,9 +58,9 @@ pariter = "0.5.1" rayon = "1.10.0" # crypto -aes256ctr_poly1305aes = "0.2.0" +aes256ctr_poly1305aes = { version = "0.2.0", features = ["std"] } # we need std here for error impls rand = "0.8.5" -scrypt = { version = "0.11.0", default-features = false } +scrypt = { version = "0.11.0", default-features = false, features = ["std"] } # we need std here for error impls # serialization / packing binrw = "0.14.0" @@ -95,10 +95,10 @@ futures = { version = "0.3.30", optional = true } runtime-format = "0.1.3" # other dependencies -anyhow = { workspace = true } bytes = { workspace = true } bytesize = "1.3.0" chrono = { version = "0.4.38", default-features = false, features = ["clock", "serde"] } +ecow = "0.2.3" enum-map = { workspace = true } enum-map-derive = "0.17.0" enumset = { version = "1.1.5", features = ["serde"] } @@ -123,6 +123,7 @@ sha2 = "0.10.8" xattr = "1" [dev-dependencies] +anyhow = { workspace = true } expect-test = "1.5.0" flate2 = "1.0.34" globset = "0.4.15" diff --git a/crates/core/src/archiver.rs b/crates/core/src/archiver.rs index 9a304a7b..e9ab4ae3 100644 --- a/crates/core/src/archiver.rs +++ b/crates/core/src/archiver.rs @@ -16,11 +16,19 @@ use crate::{ }, backend::{decrypt::DecryptFullBackend, ReadSource, ReadSourceEntry}, blob::BlobType, - index::{indexer::Indexer, indexer::SharedIndexer, ReadGlobalIndex}, + error::{ErrorKind, RusticError, RusticResult}, + index::{ + indexer::{Indexer, SharedIndexer}, + ReadGlobalIndex, + }, repofile::{configfile::ConfigFile, snapshotfile::SnapshotFile}, - Progress, RusticResult, + Progress, }; +#[derive(thiserror::Error, Debug, displaydoc::Display)] +/// Tree stack empty +pub struct TreeStackEmptyError; + /// The `Archiver` is responsible for archiving files and trees. /// It will read the file, chunk it, and write the chunks to the backend. /// @@ -66,11 +74,8 @@ impl<'a, BE: DecryptFullBackend, I: ReadGlobalIndex> Archiver<'a, BE, I> { /// /// # Errors /// - /// * [`PackerErrorKind::SendingCrossbeamMessageFailed`] - If sending the message to the raw packer fails. - /// * [`PackerErrorKind::IntConversionFailed`] - If converting the data length to u64 fails - /// - /// [`PackerErrorKind::SendingCrossbeamMessageFailed`]: crate::error::PackerErrorKind::SendingCrossbeamMessageFailed - /// [`PackerErrorKind::IntConversionFailed`]: crate::error::PackerErrorKind::IntConversionFailed + /// * If sending the message to the raw packer fails. + /// * If converting the data length to u64 fails pub fn new( be: BE, index: &'a I, @@ -84,6 +89,7 @@ impl<'a, BE: DecryptFullBackend, I: ReadGlobalIndex> Archiver<'a, BE, I> { let file_archiver = FileArchiver::new(be.clone(), index, indexer.clone(), config)?; let tree_archiver = TreeArchiver::new(be.clone(), index, indexer.clone(), config, summary)?; + Ok(Self { file_archiver, tree_archiver, @@ -114,13 +120,9 @@ impl<'a, BE: DecryptFullBackend, I: ReadGlobalIndex> Archiver<'a, BE, I> { /// /// # Errors /// - /// * [`PackerErrorKind::SendingCrossbeamMessageFailed`] - If sending the message to the raw packer fails. - /// * [`CryptBackendErrorKind::SerializingToJsonByteVectorFailed`] - If the index file could not be serialized. - /// * [`SnapshotFileErrorKind::OutOfRange`] - If the time is not in the range of `Local::now()` - /// - /// [`PackerErrorKind::SendingCrossbeamMessageFailed`]: crate::error::PackerErrorKind::SendingCrossbeamMessageFailed - /// [`CryptBackendErrorKind::SerializingToJsonByteVectorFailed`]: crate::error::CryptBackendErrorKind::SerializingToJsonByteVectorFailed - /// [`SnapshotFileErrorKind::OutOfRange`]: crate::error::SnapshotFileErrorKind::OutOfRange + /// * If sending the message to the raw packer fails. + /// * If the index file could not be serialized. + /// * If the time is not in the range of `Local::now()`. pub fn archive( mut self, src: &R, @@ -201,8 +203,12 @@ impl<'a, BE: DecryptFullBackend, I: ReadGlobalIndex> Archiver<'a, BE, I> { }) .try_for_each(|item| self.tree_archiver.add(item)) }) - .unwrap()?; - src_size_handle.join().unwrap(); + .expect("Scoped Archiver thread should not panic!")?; + + src_size_handle + .join() + .expect("Scoped Size Handler thread should not panic!"); + Ok(()) })?; @@ -213,7 +219,13 @@ impl<'a, BE: DecryptFullBackend, I: ReadGlobalIndex> Archiver<'a, BE, I> { self.indexer.write().unwrap().finalize()?; - summary.finalize(self.snap.time)?; + summary.finalize(self.snap.time).map_err(|err| { + RusticError::with_source( + ErrorKind::Internal, + "Could not finalize summary, please check the logs for more information.", + err, + ) + })?; self.snap.summary = Some(summary); if !skip_identical_parent || Some(self.snap.tree) != self.parent.tree_id() { diff --git a/crates/core/src/archiver/file_archiver.rs b/crates/core/src/archiver/file_archiver.rs index 74920dda..07eb4b7e 100644 --- a/crates/core/src/archiver/file_archiver.rs +++ b/crates/core/src/archiver/file_archiver.rs @@ -18,7 +18,7 @@ use crate::{ cdc::rolling_hash::Rabin64, chunker::ChunkIter, crypto::hasher::hash, - error::{ArchiverErrorKind, RusticResult}, + error::{ErrorKind, RusticError, RusticResult}, index::{indexer::SharedIndexer, ReadGlobalIndex}, progress::Progress, repofile::configfile::ConfigFile, @@ -55,11 +55,8 @@ impl<'a, BE: DecryptWriteBackend, I: ReadGlobalIndex> FileArchiver<'a, BE, I> { /// /// # Errors /// - /// * [`PackerErrorKind::SendingCrossbeamMessageFailed`] - If sending the message to the raw packer fails. - /// * [`PackerErrorKind::IntConversionFailed`] - If converting the data length to u64 fails - /// - /// [`PackerErrorKind::SendingCrossbeamMessageFailed`]: crate::error::PackerErrorKind::SendingCrossbeamMessageFailed - /// [`PackerErrorKind::IntConversionFailed`]: crate::error::PackerErrorKind::IntConversionFailed + /// * If sending the message to the raw packer fails. + /// * If converting the data length to u64 fails pub(crate) fn new( be: BE, index: &'a I, @@ -75,7 +72,9 @@ impl<'a, BE: DecryptWriteBackend, I: ReadGlobalIndex> FileArchiver<'a, BE, I> { config, index.total_size(BlobType::Data), )?; + let rabin = Rabin64::new_with_polynom(6, poly); + Ok(Self { index, data_packer, @@ -96,13 +95,11 @@ impl<'a, BE: DecryptWriteBackend, I: ReadGlobalIndex> FileArchiver<'a, BE, I> { /// /// # Errors /// - /// [`ArchiverErrorKind::UnpackingTreeTypeOptionalFailed`] - If the item could not be unpacked. + /// * If the item could not be unpacked. /// /// # Returns /// /// The processed item. - /// - /// [`ArchiverErrorKind::UnpackingTreeTypeOptionalFailed`]: crate::error::ArchiverErrorKind::UnpackingTreeTypeOptionalFailed pub(crate) fn process( &self, item: ItemWithParent>, @@ -118,8 +115,22 @@ impl<'a, BE: DecryptWriteBackend, I: ReadGlobalIndex> FileArchiver<'a, BE, I> { (node, size) } else if node.node_type == NodeType::File { let r = open - .ok_or(ArchiverErrorKind::UnpackingTreeTypeOptionalFailed)? - .open()?; + .ok_or_else( + || RusticError::new( + ErrorKind::Internal, + "Failed to unpack tree type optional at `{path}`. Option should contain a value, but contained `None`.", + ) + .attach_context("path", path.display().to_string()) + .ask_report(), + )? + .open() + .map_err(|err| { + err + .overwrite_kind(ErrorKind::InputOutput) + .prepend_guidance_line("Failed to open ReadSourceOpen at `{path}`") + .attach_context("path", path.display().to_string()) + })?; + self.backup_reader(r, node, p)? } else { (node, 0) @@ -138,12 +149,18 @@ impl<'a, BE: DecryptWriteBackend, I: ReadGlobalIndex> FileArchiver<'a, BE, I> { ) -> RusticResult<(Node, u64)> { let chunks: Vec<_> = ChunkIter::new( r, - usize::try_from(node.meta.size) - .map_err(ArchiverErrorKind::ConversionFromU64ToUsizeFailed)?, + usize::try_from(node.meta.size).map_err(|err| { + RusticError::with_source( + ErrorKind::Internal, + "Failed to convert node size `{size}` to usize", + err, + ) + .attach_context("size", node.meta.size.to_string()) + })?, self.rabin.clone(), ) .map(|chunk| { - let chunk = chunk.map_err(ArchiverErrorKind::FromStdIo)?; + let chunk = chunk?; let id = hash(&chunk); let size = chunk.len() as u64; @@ -171,7 +188,7 @@ impl<'a, BE: DecryptWriteBackend, I: ReadGlobalIndex> FileArchiver<'a, BE, I> { /// /// # Panics /// - /// If the channel could not be dropped + /// * If the channel could not be dropped pub(crate) fn finalize(self) -> RusticResult { self.data_packer.finalize() } diff --git a/crates/core/src/archiver/parent.rs b/crates/core/src/archiver/parent.rs index 6db8917f..f807ce9d 100644 --- a/crates/core/src/archiver/parent.rs +++ b/crates/core/src/archiver/parent.rs @@ -6,10 +6,9 @@ use std::{ use log::warn; use crate::{ - archiver::tree::TreeType, + archiver::{tree::TreeType, TreeStackEmptyError}, backend::{decrypt::DecryptReadBackend, node::Node}, blob::tree::{Tree, TreeId}, - error::{ArchiverErrorKind, RusticResult}, index::ReadGlobalIndex, }; @@ -218,14 +217,9 @@ impl Parent { /// /// # Errors /// - /// * [`ArchiverErrorKind::TreeStackEmpty`] - If the tree stack is empty. - /// - /// [`ArchiverErrorKind::TreeStackEmpty`]: crate::error::ArchiverErrorKind::TreeStackEmpty - fn finish_dir(&mut self) -> RusticResult<()> { - let (tree, node_idx) = self - .stack - .pop() - .ok_or_else(|| ArchiverErrorKind::TreeStackEmpty)?; + /// * If the tree stack is empty. + fn finish_dir(&mut self) -> Result<(), TreeStackEmptyError> { + let (tree, node_idx) = self.stack.pop().ok_or(TreeStackEmptyError)?; self.tree = tree; self.node_idx = node_idx; @@ -252,15 +246,13 @@ impl Parent { /// /// # Errors /// - /// * [`ArchiverErrorKind::TreeStackEmpty`] - If the tree stack is empty. - /// - /// [`ArchiverErrorKind::TreeStackEmpty`]: crate::error::ArchiverErrorKind::TreeStackEmpty + /// * If the tree stack is empty. pub(crate) fn process( &mut self, be: &impl DecryptReadBackend, index: &impl ReadGlobalIndex, item: TreeType, - ) -> RusticResult> { + ) -> Result, TreeStackEmptyError> { let result = match item { TreeType::NewTree((path, node, tree)) => { let parent_result = self diff --git a/crates/core/src/archiver/tree_archiver.rs b/crates/core/src/archiver/tree_archiver.rs index 3e2cc319..df2ee52a 100644 --- a/crates/core/src/archiver/tree_archiver.rs +++ b/crates/core/src/archiver/tree_archiver.rs @@ -11,7 +11,7 @@ use crate::{ tree::{Tree, TreeId}, BlobType, }, - error::{ArchiverErrorKind, RusticResult}, + error::{ErrorKind, RusticError, RusticResult}, index::{indexer::SharedIndexer, ReadGlobalIndex}, repofile::{configfile::ConfigFile, snapshotfile::SnapshotSummary}, }; @@ -57,11 +57,8 @@ impl<'a, BE: DecryptWriteBackend, I: ReadGlobalIndex> TreeArchiver<'a, BE, I> { /// /// # Errors /// - /// * [`PackerErrorKind::SendingCrossbeamMessageFailed`] - If sending the message to the raw packer fails. - /// * [`PackerErrorKind::IntConversionFailed`] - If converting the data length to u64 fails - /// - /// [`PackerErrorKind::SendingCrossbeamMessageFailed`]: crate::error::PackerErrorKind::SendingCrossbeamMessageFailed - /// [`PackerErrorKind::IntConversionFailed`]: crate::error::PackerErrorKind::IntConversionFailed + /// * If sending the message to the raw packer fails. + /// * If converting the data length to u64 fails pub(crate) fn new( be: BE, index: &'a I, @@ -76,6 +73,7 @@ impl<'a, BE: DecryptWriteBackend, I: ReadGlobalIndex> TreeArchiver<'a, BE, I> { config, index.total_size(BlobType::Tree), )?; + Ok(Self { tree: Tree::new(), stack: Vec::new(), @@ -93,9 +91,7 @@ impl<'a, BE: DecryptWriteBackend, I: ReadGlobalIndex> TreeArchiver<'a, BE, I> { /// /// # Errors /// - /// * [`ArchiverErrorKind::TreeStackEmpty`] - If the tree stack is empty. - /// - /// [`ArchiverErrorKind::TreeStackEmpty`]: crate::error::ArchiverErrorKind::TreeStackEmpty + /// * If the tree stack is empty. // TODO: Add more errors! pub(crate) fn add(&mut self, item: TreeItem) -> RusticResult<()> { match item { @@ -106,10 +102,9 @@ impl<'a, BE: DecryptWriteBackend, I: ReadGlobalIndex> TreeArchiver<'a, BE, I> { self.stack.push((path, node, parent, tree)); } TreeType::EndTree => { - let (path, mut node, parent, tree) = self - .stack - .pop() - .ok_or_else(|| ArchiverErrorKind::TreeStackEmpty)?; + let (path, mut node, parent, tree) = self.stack.pop().ok_or_else(|| { + RusticError::new(ErrorKind::Internal, "Tree stack is empty.").ask_report() + })?; // save tree trace!("finishing {path:?}"); @@ -164,15 +159,21 @@ impl<'a, BE: DecryptWriteBackend, I: ReadGlobalIndex> TreeArchiver<'a, BE, I> { /// /// # Errors /// - /// * [`PackerErrorKind::SendingCrossbeamMessageFailed`] - If sending the message to the raw packer fails. + /// * If sending the message to the raw packer fails. /// /// # Returns /// /// The id of the tree. - /// - /// [`PackerErrorKind::SendingCrossbeamMessageFailed`]: crate::error::PackerErrorKind::SendingCrossbeamMessageFailed fn backup_tree(&mut self, path: &Path, parent: &ParentResult) -> RusticResult { - let (chunk, id) = self.tree.serialize()?; + let (chunk, id) = self.tree.serialize().map_err(|err| { + RusticError::with_source( + ErrorKind::Internal, + "Failed to serialize tree at `{path}`", + err, + ) + .attach_context("path", path.to_string_lossy()) + .ask_report() + })?; let dirsize = chunk.len() as u64; let dirsize_bytes = ByteSize(dirsize).to_string_as(true); @@ -209,7 +210,7 @@ impl<'a, BE: DecryptWriteBackend, I: ReadGlobalIndex> TreeArchiver<'a, BE, I> { /// /// # Errors /// - /// * [`PackerErrorKind::SendingCrossbeamMessageFailed`] - If sending the message to the raw packer fails. + /// * If sending the message to the raw packer fails. /// /// # Returns /// @@ -217,9 +218,7 @@ impl<'a, BE: DecryptWriteBackend, I: ReadGlobalIndex> TreeArchiver<'a, BE, I> { /// /// # Panics /// - /// If the channel of the tree packer is not dropped. - /// - /// [`PackerErrorKind::SendingCrossbeamMessageFailed`]: crate::error::PackerErrorKind::SendingCrossbeamMessageFailed + /// * If the channel of the tree packer is not dropped. pub(crate) fn finalize( mut self, parent_tree: Option, diff --git a/crates/core/src/backend.rs b/crates/core/src/backend.rs index b232ceb1..2179671b 100644 --- a/crates/core/src/backend.rs +++ b/crates/core/src/backend.rs @@ -12,7 +12,6 @@ pub(crate) mod warm_up; use std::{io::Read, ops::Deref, path::PathBuf, sync::Arc}; -use anyhow::Result; use bytes::Bytes; use enum_map::Enum; use log::trace; @@ -24,11 +23,20 @@ use serde_derive::{Deserialize, Serialize}; use crate::{ backend::node::{Metadata, Node, NodeType}, - error::{BackendAccessErrorKind, RusticErrorKind}, + error::{ErrorKind, RusticError, RusticResult}, id::Id, - RusticResult, }; +/// [`BackendErrorKind`] describes the errors that can be returned by the various Backends +#[derive(thiserror::Error, Debug, displaydoc::Display)] +#[non_exhaustive] +pub enum BackendErrorKind { + /// Path is not allowed: `{0:?}` + PathNotAllowed(PathBuf), +} + +pub(crate) type BackendResult = Result; + /// All [`FileType`]s which are located in separated directories pub const ALL_FILE_TYPES: [FileType; 4] = [ FileType::Key, @@ -38,7 +46,7 @@ pub const ALL_FILE_TYPES: [FileType; 4] = [ ]; /// Type for describing the kind of a file that can occur. -#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize, Enum)] +#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize, Enum, derive_more::Display)] pub enum FileType { /// Config file #[serde(rename = "config")] @@ -94,8 +102,8 @@ pub trait ReadBackend: Send + Sync + 'static { /// /// # Errors /// - /// If the files could not be listed. - fn list_with_size(&self, tpe: FileType) -> Result>; + /// * If the files could not be listed. + fn list_with_size(&self, tpe: FileType) -> RusticResult>; /// Lists all files of the given type. /// @@ -105,8 +113,8 @@ pub trait ReadBackend: Send + Sync + 'static { /// /// # Errors /// - /// If the files could not be listed. - fn list(&self, tpe: FileType) -> Result> { + /// * If the files could not be listed. + fn list(&self, tpe: FileType) -> RusticResult> { Ok(self .list_with_size(tpe)? .into_iter() @@ -123,8 +131,8 @@ pub trait ReadBackend: Send + Sync + 'static { /// /// # Errors /// - /// If the file could not be read. - fn read_full(&self, tpe: FileType, id: &Id) -> Result; + /// * If the file could not be read. + fn read_full(&self, tpe: FileType, id: &Id) -> RusticResult; /// Reads partial data of the given file. /// @@ -138,7 +146,7 @@ pub trait ReadBackend: Send + Sync + 'static { /// /// # Errors /// - /// If the file could not be read. + /// * If the file could not be read. fn read_partial( &self, tpe: FileType, @@ -146,7 +154,7 @@ pub trait ReadBackend: Send + Sync + 'static { cacheable: bool, offset: u32, length: u32, - ) -> Result; + ) -> RusticResult; /// Specify if the backend needs a warming-up of files before accessing them. fn needs_warm_up(&self) -> bool { @@ -162,8 +170,8 @@ pub trait ReadBackend: Send + Sync + 'static { /// /// # Errors /// - /// If the file could not be read. - fn warm_up(&self, _tpe: FileType, _id: &Id) -> Result<()> { + /// * If the file could not be read. + fn warm_up(&self, _tpe: FileType, _id: &Id) -> RusticResult<()> { Ok(()) } } @@ -189,15 +197,12 @@ pub trait FindInBackend: ReadBackend { /// /// # Errors /// - /// * [`BackendAccessErrorKind::NoSuitableIdFound`] - If no id could be found. - /// * [`BackendAccessErrorKind::IdNotUnique`] - If the id is not unique. + /// * If no id could be found. + /// * If the id is not unique. /// /// # Note /// /// This function is used to find the id of a snapshot. - /// - /// [`BackendAccessErrorKind::NoSuitableIdFound`]: crate::error::BackendAccessErrorKind::NoSuitableIdFound - /// [`BackendAccessErrorKind::IdNotUnique`]: crate::error::BackendAccessErrorKind::IdNotUnique fn find_starts_with>(&self, tpe: FileType, vec: &[T]) -> RusticResult> { #[derive(Clone, Copy, PartialEq, Eq)] enum MapResult { @@ -206,7 +211,7 @@ pub trait FindInBackend: ReadBackend { NonUnique, } let mut results = vec![MapResult::None; vec.len()]; - for id in self.list(tpe).map_err(RusticErrorKind::Backend)? { + for id in self.list(tpe)? { let id_hex = id.to_hex(); for (i, v) in vec.iter().enumerate() { if id_hex.starts_with(v.as_ref()) { @@ -224,13 +229,16 @@ pub trait FindInBackend: ReadBackend { .enumerate() .map(|(i, id)| match id { MapResult::Some(id) => Ok(id), - MapResult::None => Err(BackendAccessErrorKind::NoSuitableIdFound( - (vec[i]).as_ref().to_string(), + MapResult::None => Err(RusticError::new( + ErrorKind::Backend, + "No suitable id found for `{id}`.", ) - .into()), - MapResult::NonUnique => { - Err(BackendAccessErrorKind::IdNotUnique((vec[i]).as_ref().to_string()).into()) - } + .attach_context("id", vec[i].as_ref().to_string())), + MapResult::NonUnique => Err(RusticError::new( + ErrorKind::Backend, + "Id not unique: `{id}`.", + ) + .attach_context("id", vec[i].as_ref().to_string())), }) .collect() } @@ -244,13 +252,9 @@ pub trait FindInBackend: ReadBackend { /// /// # Errors /// - /// * [`IdErrorKind::HexError`] - If the string is not a valid hexadecimal string - /// * [`BackendAccessErrorKind::NoSuitableIdFound`] - If no id could be found. - /// * [`BackendAccessErrorKind::IdNotUnique`] - If the id is not unique. - /// - /// [`IdErrorKind::HexError`]: crate::error::IdErrorKind::HexError - /// [`BackendAccessErrorKind::NoSuitableIdFound`]: crate::error::BackendAccessErrorKind::NoSuitableIdFound - /// [`BackendAccessErrorKind::IdNotUnique`]: crate::error::BackendAccessErrorKind::IdNotUnique + /// * If the string is not a valid hexadecimal string + /// * If no id could be found. + /// * If the id is not unique. fn find_id(&self, tpe: FileType, id: &str) -> RusticResult { Ok(self.find_ids(tpe, &[id.to_string()])?.remove(0)) } @@ -268,13 +272,9 @@ pub trait FindInBackend: ReadBackend { /// /// # Errors /// - /// * [`IdErrorKind::HexError`] - If the string is not a valid hexadecimal string - /// * [`BackendAccessErrorKind::NoSuitableIdFound`] - If no id could be found. - /// * [`BackendAccessErrorKind::IdNotUnique`] - If the id is not unique. - /// - /// [`IdErrorKind::HexError`]: crate::error::IdErrorKind::HexError - /// [`BackendAccessErrorKind::NoSuitableIdFound`]: crate::error::BackendAccessErrorKind::NoSuitableIdFound - /// [`BackendAccessErrorKind::IdNotUnique`]: crate::error::BackendAccessErrorKind::IdNotUnique + /// * If the string is not a valid hexadecimal string + /// * If no id could be found. + /// * If the id is not unique. fn find_ids>(&self, tpe: FileType, ids: &[T]) -> RusticResult> { ids.iter() .map(|id| id.as_ref().parse()) @@ -294,12 +294,12 @@ pub trait WriteBackend: ReadBackend { /// /// # Errors /// - /// If the backend could not be created. + /// * If the backend could not be created. /// /// # Returns /// /// The result of the creation. - fn create(&self) -> Result<()> { + fn create(&self) -> RusticResult<()> { Ok(()) } @@ -314,12 +314,12 @@ pub trait WriteBackend: ReadBackend { /// /// # Errors /// - /// If the data could not be written. + /// * If the data could not be written. /// /// # Returns /// /// The result of the write. - fn write_bytes(&self, tpe: FileType, id: &Id, cacheable: bool, buf: Bytes) -> Result<()>; + fn write_bytes(&self, tpe: FileType, id: &Id, cacheable: bool, buf: Bytes) -> RusticResult<()>; /// Removes the given file. /// @@ -331,12 +331,12 @@ pub trait WriteBackend: ReadBackend { /// /// # Errors /// - /// If the file could not be removed. + /// * If the file could not be removed. /// /// # Returns /// /// The result of the removal. - fn remove(&self, tpe: FileType, id: &Id, cacheable: bool) -> Result<()>; + fn remove(&self, tpe: FileType, id: &Id, cacheable: bool) -> RusticResult<()>; } #[cfg(test)] @@ -345,8 +345,8 @@ mock! { impl ReadBackend for Backend{ fn location(&self) -> String; - fn list_with_size(&self, tpe: FileType) -> Result>; - fn read_full(&self, tpe: FileType, id: &Id) -> Result; + fn list_with_size(&self, tpe: FileType) -> RusticResult>; + fn read_full(&self, tpe: FileType, id: &Id) -> RusticResult; fn read_partial( &self, tpe: FileType, @@ -354,24 +354,24 @@ mock! { cacheable: bool, offset: u32, length: u32, - ) -> Result; + ) -> RusticResult; } impl WriteBackend for Backend { - fn create(&self) -> Result<()>; - fn write_bytes(&self, tpe: FileType, id: &Id, cacheable: bool, buf: Bytes) -> Result<()>; - fn remove(&self, tpe: FileType, id: &Id, cacheable: bool) -> Result<()>; + fn create(&self) -> RusticResult<()>; + fn write_bytes(&self, tpe: FileType, id: &Id, cacheable: bool, buf: Bytes) -> RusticResult<()>; + fn remove(&self, tpe: FileType, id: &Id, cacheable: bool) -> RusticResult<()>; } } impl WriteBackend for Arc { - fn create(&self) -> Result<()> { + fn create(&self) -> RusticResult<()> { self.deref().create() } - fn write_bytes(&self, tpe: FileType, id: &Id, cacheable: bool, buf: Bytes) -> Result<()> { + fn write_bytes(&self, tpe: FileType, id: &Id, cacheable: bool, buf: Bytes) -> RusticResult<()> { self.deref().write_bytes(tpe, id, cacheable, buf) } - fn remove(&self, tpe: FileType, id: &Id, cacheable: bool) -> Result<()> { + fn remove(&self, tpe: FileType, id: &Id, cacheable: bool) -> RusticResult<()> { self.deref().remove(tpe, id, cacheable) } } @@ -380,13 +380,13 @@ impl ReadBackend for Arc { fn location(&self) -> String { self.deref().location() } - fn list_with_size(&self, tpe: FileType) -> Result> { + fn list_with_size(&self, tpe: FileType) -> RusticResult> { self.deref().list_with_size(tpe) } - fn list(&self, tpe: FileType) -> Result> { + fn list(&self, tpe: FileType) -> RusticResult> { self.deref().list(tpe) } - fn read_full(&self, tpe: FileType, id: &Id) -> Result { + fn read_full(&self, tpe: FileType, id: &Id) -> RusticResult { self.deref().read_full(tpe, id) } fn read_partial( @@ -396,7 +396,7 @@ impl ReadBackend for Arc { cacheable: bool, offset: u32, length: u32, - ) -> Result { + ) -> RusticResult { self.deref() .read_partial(tpe, id, cacheable, offset, length) } @@ -426,10 +426,10 @@ pub struct ReadSourceEntry { } impl ReadSourceEntry { - fn from_path(path: PathBuf, open: Option) -> RusticResult { + fn from_path(path: PathBuf, open: Option) -> BackendResult { let node = Node::new_node( path.file_name() - .ok_or_else(|| BackendAccessErrorKind::PathNotAllowed(path.clone()))?, + .ok_or_else(|| BackendErrorKind::PathNotAllowed(path.clone()))?, NodeType::File, Metadata::default(), ); @@ -447,7 +447,7 @@ pub trait ReadSourceOpen { /// /// # Errors /// - /// If the source could not be opened. + /// * If the source could not be opened. /// /// # Result /// @@ -476,7 +476,7 @@ pub trait ReadSource: Sync + Send { /// /// # Errors /// - /// If the size could not be determined. + /// * If the size could not be determined. /// /// # Returns /// diff --git a/crates/core/src/backend/cache.rs b/crates/core/src/backend/cache.rs index d00d9699..8d82ecb0 100644 --- a/crates/core/src/backend/cache.rs +++ b/crates/core/src/backend/cache.rs @@ -1,12 +1,11 @@ use std::{ collections::HashMap, fs::{self, File}, - io::{ErrorKind, Read, Seek, SeekFrom, Write}, + io::{self, Read, Seek, SeekFrom, Write}, path::PathBuf, sync::Arc, }; -use anyhow::Result; use bytes::Bytes; use dirs::cache_dir; use log::{trace, warn}; @@ -14,7 +13,7 @@ use walkdir::WalkDir; use crate::{ backend::{FileType, ReadBackend, WriteBackend}, - error::{CacheBackendErrorKind, RusticResult}, + error::{ErrorKind, RusticError, RusticResult}, id::Id, repofile::configfile::RepositoryId, }; @@ -60,12 +59,12 @@ impl ReadBackend for CachedBackend { /// /// # Errors /// - /// If the backend does not support listing files. + /// * If the backend does not support listing files. /// /// # Returns /// /// A vector of tuples containing the id and size of the files. - fn list_with_size(&self, tpe: FileType) -> Result> { + fn list_with_size(&self, tpe: FileType) -> RusticResult> { let list = self.be.list_with_size(tpe)?; if tpe.is_cacheable() { @@ -86,14 +85,12 @@ impl ReadBackend for CachedBackend { /// /// # Errors /// - /// * [`CacheBackendErrorKind::FromIoError`] - If the file could not be read. + /// * If the file could not be read. /// /// # Returns /// /// The data read. - /// - /// [`CacheBackendErrorKind::FromIoError`]: crate::error::CacheBackendErrorKind::FromIoError - fn read_full(&self, tpe: FileType, id: &Id) -> Result { + fn read_full(&self, tpe: FileType, id: &Id) -> RusticResult { if tpe.is_cacheable() { match self.cache.read_full(tpe, id) { Ok(Some(data)) => return Ok(data), @@ -124,13 +121,11 @@ impl ReadBackend for CachedBackend { /// /// # Errors /// - /// * [`CacheBackendErrorKind::FromIoError`] - If the file could not be read. + /// * If the file could not be read. /// /// # Returns /// /// The data read. - /// - /// [`CacheBackendErrorKind::FromIoError`]: crate::error::CacheBackendErrorKind::FromIoError fn read_partial( &self, tpe: FileType, @@ -138,7 +133,7 @@ impl ReadBackend for CachedBackend { cacheable: bool, offset: u32, length: u32, - ) -> Result { + ) -> RusticResult { if cacheable || tpe.is_cacheable() { match self.cache.read_partial(tpe, id, offset, length) { Ok(Some(data)) => return Ok(data), @@ -164,14 +159,14 @@ impl ReadBackend for CachedBackend { self.be.needs_warm_up() } - fn warm_up(&self, tpe: FileType, id: &Id) -> Result<()> { + fn warm_up(&self, tpe: FileType, id: &Id) -> RusticResult<()> { self.be.warm_up(tpe, id) } } impl WriteBackend for CachedBackend { /// Creates the backend. - fn create(&self) -> Result<()> { + fn create(&self) -> RusticResult<()> { self.be.create() } @@ -185,7 +180,7 @@ impl WriteBackend for CachedBackend { /// * `id` - The id of the file. /// * `cacheable` - Whether the file is cacheable. /// * `buf` - The data to write. - fn write_bytes(&self, tpe: FileType, id: &Id, cacheable: bool, buf: Bytes) -> Result<()> { + fn write_bytes(&self, tpe: FileType, id: &Id, cacheable: bool, buf: Bytes) -> RusticResult<()> { if cacheable || tpe.is_cacheable() { if let Err(err) = self.cache.write_bytes(tpe, id, &buf) { warn!("Error in cache backend writing {tpe:?},{id}: {err}"); @@ -202,7 +197,7 @@ impl WriteBackend for CachedBackend { /// /// * `tpe` - The type of the file. /// * `id` - The id of the file. - fn remove(&self, tpe: FileType, id: &Id, cacheable: bool) -> Result<()> { + fn remove(&self, tpe: FileType, id: &Id, cacheable: bool) -> RusticResult<()> { if cacheable || tpe.is_cacheable() { if let Err(err) = self.cache.remove(tpe, id) { warn!("Error in cache backend removing {tpe:?},{id}: {err}"); @@ -231,23 +226,54 @@ impl Cache { /// /// # Errors /// - /// * [`CacheBackendErrorKind::NoCacheDirectory`] - If no path is given and the default cache directory could not be determined. - /// * [`CacheBackendErrorKind::FromIoError`] - If the cache directory could not be created. - /// - /// [`CacheBackendErrorKind::NoCacheDirectory`]: crate::error::CacheBackendErrorKind::NoCacheDirectory - /// [`CacheBackendErrorKind::FromIoError`]: crate::error::CacheBackendErrorKind::FromIoError + /// * If no path is given and the default cache directory could not be determined. + /// * If the cache directory could not be created. pub fn new(id: RepositoryId, path: Option) -> RusticResult { let mut path = if let Some(p) = path { p } else { - let mut dir = cache_dir().ok_or_else(|| CacheBackendErrorKind::NoCacheDirectory)?; + let mut dir = cache_dir().ok_or_else(|| + RusticError::new( + ErrorKind::Backend, + "Cache directory could not be determined, please set the environment variable XDG_CACHE_HOME or HOME!" + ) + )?; dir.push("rustic"); dir }; - fs::create_dir_all(&path).map_err(CacheBackendErrorKind::FromIoError)?; - cachedir::ensure_tag(&path).map_err(CacheBackendErrorKind::FromIoError)?; + + fs::create_dir_all(&path).map_err(|err| { + RusticError::with_source( + ErrorKind::InputOutput, + "Failed to create cache directory at `{path}`", + err, + ) + .attach_context("path", path.display().to_string()) + .attach_context("id", id.to_string()) + })?; + + cachedir::ensure_tag(&path).map_err(|err| { + RusticError::with_source( + ErrorKind::InputOutput, + "Failed to ensure cache directory tag at `{path}`", + err, + ) + .attach_context("path", path.display().to_string()) + .attach_context("id", id.to_string()) + })?; + path.push(id.to_hex()); - fs::create_dir_all(&path).map_err(CacheBackendErrorKind::FromIoError)?; + + fs::create_dir_all(&path).map_err(|err| { + RusticError::with_source( + ErrorKind::InputOutput, + "Failed to create cache directory with id `{id}` at `{path}`", + err, + ) + .attach_context("path", path.display().to_string()) + .attach_context("id", id.to_string()) + })?; + Ok(Self { path }) } @@ -255,7 +281,7 @@ impl Cache { /// /// # Panics /// - /// Panics if the path is not valid unicode. + /// * Panics if the path is not valid unicode. // TODO: Does this need to panic? Result? #[must_use] pub fn location(&self) -> &str { @@ -297,17 +323,27 @@ impl Cache { /// /// # Errors /// - /// * [`CacheBackendErrorKind::FromIoError`] - If the cache directory could not be read. - /// * [`IdErrorKind::HexError`] - If the string is not a valid hexadecimal string - /// - /// [`CacheBackendErrorKind::FromIoError`]: crate::error::CacheBackendErrorKind::FromIoError - /// [`IdErrorKind::HexError`]: crate::error::IdErrorKind::HexError + /// * If the cache directory could not be read. + /// * If the string is not a valid hexadecimal string #[allow(clippy::unnecessary_wraps)] pub fn list_with_size(&self, tpe: FileType) -> RusticResult> { let path = self.path.join(tpe.dirname()); let walker = WalkDir::new(path) .into_iter() + .inspect(|r| { + if let Err(err) = r { + if err.depth() == 0 { + if let Some(io_err) = err.io_error() { + if io_err.kind() == io::ErrorKind::NotFound { + // ignore errors if root path doesn't exist => this should return an empty list without error + return; + } + } + } + warn!("Error while listing files: {err:?}"); + } + }) .filter_map(walkdir::Result::ok) .filter(|e| { // only use files with length of 64 which are valid hex @@ -339,9 +375,7 @@ impl Cache { /// /// # Errors /// - /// * [`CacheBackendErrorKind::FromIoError`] - If the cache directory could not be read. - /// - /// [`CacheBackendErrorKind::FromIoError`]: crate::error::CacheBackendErrorKind::FromIoError + /// * If the cache directory could not be read. pub fn remove_not_in_list(&self, tpe: FileType, list: &Vec<(Id, u32)>) -> RusticResult<()> { let mut list_cache = self.list_with_size(tpe)?; // remove present files from the cache list @@ -369,18 +403,26 @@ impl Cache { /// /// # Errors /// - /// * [`CacheBackendErrorKind::FromIoError`] - If the file could not be read. - /// - /// [`CacheBackendErrorKind::FromIoError`]: crate::error::CacheBackendErrorKind::FromIoError + /// * If the file could not be read. pub fn read_full(&self, tpe: FileType, id: &Id) -> RusticResult> { trace!("cache reading tpe: {:?}, id: {}", &tpe, &id); - match fs::read(self.path(tpe, id)) { + + let path = self.path(tpe, id); + + match fs::read(&path) { Ok(data) => { trace!("cache hit!"); Ok(Some(data.into())) } - Err(err) if err.kind() == ErrorKind::NotFound => Ok(None), - Err(err) => Err(CacheBackendErrorKind::FromIoError(err).into()), + Err(err) if err.kind() == io::ErrorKind::NotFound => Ok(None), + Err(err) => Err(RusticError::with_source( + ErrorKind::InputOutput, + "Failed to read full data of file at `{path}`", + err, + ) + .attach_context("path", path.display().to_string()) + .attach_context("tpe", tpe.to_string()) + .attach_context("id", id.to_string())), } } @@ -395,9 +437,7 @@ impl Cache { /// /// # Errors /// - /// * [`CacheBackendErrorKind::FromIoError`] - If the file could not be read. - /// - /// [`CacheBackendErrorKind::FromIoError`]: crate::error::CacheBackendErrorKind::FromIoError + /// * If the file could not be read. pub fn read_partial( &self, tpe: FileType, @@ -411,18 +451,54 @@ impl Cache { &id, &offset ); - let mut file = match File::open(self.path(tpe, id)) { + + let path = self.path(tpe, id); + + let mut file = match File::open(&path) { Ok(file) => file, - Err(err) if err.kind() == ErrorKind::NotFound => return Ok(None), - Err(err) => return Err(CacheBackendErrorKind::FromIoError(err).into()), + Err(err) if err.kind() == io::ErrorKind::NotFound => return Ok(None), + Err(err) => { + return Err(RusticError::with_source( + ErrorKind::InputOutput, + "Failed to open file at `{path}`", + err, + ) + .attach_context("path", path.display().to_string()) + .attach_context("tpe", tpe.to_string()) + .attach_context("id", id.to_string())) + } }; + _ = file .seek(SeekFrom::Start(u64::from(offset))) - .map_err(CacheBackendErrorKind::FromIoError)?; + .map_err(|err| { + RusticError::with_source( + ErrorKind::InputOutput, + "Failed to seek to `{offset}` in file `{path}`", + err, + ) + .attach_context("path", path.display().to_string()) + .attach_context("tpe", tpe.to_string()) + .attach_context("id", id.to_string()) + .attach_context("offset", offset.to_string()) + })?; + let mut vec = vec![0; length as usize]; - file.read_exact(&mut vec) - .map_err(CacheBackendErrorKind::FromIoError)?; + + file.read_exact(&mut vec).map_err(|err| { + RusticError::with_source( + ErrorKind::InputOutput, + "Failed to read at offset `{offset}` from file at `{path}`", + err, + ) + .attach_context("tpe", tpe.to_string()) + .attach_context("id", id.to_string()) + .attach_context("offset", offset.to_string()) + .attach_context("length", length.to_string()) + })?; + trace!("cache hit!"); + Ok(Some(vec.into())) } @@ -436,21 +512,50 @@ impl Cache { /// /// # Errors /// - /// * [`CacheBackendErrorKind::FromIoError`] - If the file could not be written. - /// - /// [`CacheBackendErrorKind::FromIoError`]: crate::error::CacheBackendErrorKind::FromIoError + /// * If the file could not be written. pub fn write_bytes(&self, tpe: FileType, id: &Id, buf: &Bytes) -> RusticResult<()> { trace!("cache writing tpe: {:?}, id: {}", &tpe, &id); - fs::create_dir_all(self.dir(tpe, id)).map_err(CacheBackendErrorKind::FromIoError)?; + + let dir = self.dir(tpe, id); + + fs::create_dir_all(&dir).map_err(|err| { + RusticError::with_source( + ErrorKind::InputOutput, + "Failed to create directories at `{path}`", + err, + ) + .attach_context("path", dir.display().to_string()) + .attach_context("tpe", tpe.to_string()) + .attach_context("id", id.to_string()) + })?; + let filename = self.path(tpe, id); + let mut file = fs::OpenOptions::new() .create(true) .truncate(true) .write(true) - .open(filename) - .map_err(CacheBackendErrorKind::FromIoError)?; - file.write_all(buf) - .map_err(CacheBackendErrorKind::FromIoError)?; + .open(&filename) + .map_err(|err| { + RusticError::with_source( + ErrorKind::InputOutput, + "Failed to open file at `{path}`", + err, + ) + .attach_context("path", filename.display().to_string()) + })?; + + file.write_all(buf).map_err(|err| { + RusticError::with_source( + ErrorKind::InputOutput, + "Failed to write to buffer at `{path}`", + err, + ) + .attach_context("path", filename.display().to_string()) + .attach_context("tpe", tpe.to_string()) + .attach_context("id", id.to_string()) + })?; + Ok(()) } @@ -463,13 +568,21 @@ impl Cache { /// /// # Errors /// - /// * [`CacheBackendErrorKind::FromIoError`] - If the file could not be removed. - /// - /// [`CacheBackendErrorKind::FromIoError`]: crate::error::CacheBackendErrorKind::FromIoError + /// * If the file could not be removed. pub fn remove(&self, tpe: FileType, id: &Id) -> RusticResult<()> { trace!("cache writing tpe: {:?}, id: {}", &tpe, &id); let filename = self.path(tpe, id); - fs::remove_file(filename).map_err(CacheBackendErrorKind::FromIoError)?; + fs::remove_file(&filename).map_err(|err| { + RusticError::with_source( + ErrorKind::InputOutput, + "Failed to remove file at `{path}`", + err, + ) + .attach_context("path", filename.display().to_string()) + .attach_context("tpe", tpe.to_string()) + .attach_context("id", id.to_string()) + })?; + Ok(()) } } diff --git a/crates/core/src/backend/childstdout.rs b/crates/core/src/backend/childstdout.rs index 95439213..49b95dba 100644 --- a/crates/core/src/backend/childstdout.rs +++ b/crates/core/src/backend/childstdout.rs @@ -7,8 +7,8 @@ use std::{ use crate::{ backend::{ReadSource, ReadSourceEntry}, - error::{RepositoryErrorKind, RusticResult}, - CommandInput, + error::{ErrorKind, RusticError, RusticResult}, + repository::command_input::{CommandInput, CommandInputErrorKind}, }; /// The `ChildStdoutSource` is a `ReadSource` when spawning a child process and reading its stdout @@ -35,13 +35,10 @@ impl ChildStdoutSource { .args(cmd.args()) .stdout(Stdio::piped()) .spawn() - .map_err(|err| { - RepositoryErrorKind::CommandExecutionFailed( - "stdin-command".into(), - "call".into(), - err, - ) - .into() + .map_err(|err| CommandInputErrorKind::ProcessExecutionFailed { + command: cmd.clone(), + path: path.clone(), + source: err, }); let process = cmd.on_failure().display_result(process)?; @@ -73,6 +70,14 @@ impl ReadSource for ChildStdoutSource { fn entries(&self) -> Self::Iter { let open = self.process.lock().unwrap().stdout.take(); - once(ReadSourceEntry::from_path(self.path.clone(), open)) + once( + ReadSourceEntry::from_path(self.path.clone(), open).map_err(|err| { + RusticError::with_source( + ErrorKind::Backend, + "Failed to create ReadSourceEntry from ChildStdout", + err, + ) + }), + ) } } diff --git a/crates/core/src/backend/decrypt.rs b/crates/core/src/backend/decrypt.rs index e90d6f86..414084f4 100644 --- a/crates/core/src/backend/decrypt.rs +++ b/crates/core/src/backend/decrypt.rs @@ -1,6 +1,5 @@ use std::{num::NonZeroU32, sync::Arc}; -use anyhow::Result; use bytes::Bytes; use crossbeam_channel::{unbounded, Receiver}; use rayon::prelude::*; @@ -8,21 +7,21 @@ use zstd::stream::{copy_encode, decode_all, encode_all}; pub use zstd::compression_level_range; -/// The maximum compression level allowed by zstd -#[must_use] -pub fn max_compression_level() -> i32 { - *compression_level_range().end() -} - use crate::{ backend::{FileType, ReadBackend, WriteBackend}, crypto::{hasher::hash, CryptoKey}, - error::{CryptBackendErrorKind, RusticErrorKind}, + error::{ErrorKind, RusticError, RusticResult}, id::Id, repofile::{RepoFile, RepoId}, - Progress, RusticResult, + Progress, }; +/// The maximum compression level allowed by zstd +#[must_use] +pub fn max_compression_level() -> i32 { + *compression_level_range().end() +} + /// A backend that can decrypt data. /// This is a trait that is implemented by all backends that can decrypt data. /// It is implemented for all backends that implement `DecryptWriteBackend` and `DecryptReadBackend`. @@ -42,7 +41,7 @@ pub trait DecryptReadBackend: ReadBackend + Clone + 'static { /// /// # Errors /// - /// If the data could not be decrypted. + /// * If the data could not be decrypted. fn decrypt(&self, data: &[u8]) -> RusticResult>; /// Reads the given file. @@ -54,7 +53,7 @@ pub trait DecryptReadBackend: ReadBackend + Clone + 'static { /// /// # Errors /// - /// If the file could not be read. + /// * If the file could not be read. fn read_encrypted_full(&self, tpe: FileType, id: &Id) -> RusticResult; /// Reads the given file from partial data. @@ -66,11 +65,8 @@ pub trait DecryptReadBackend: ReadBackend + Clone + 'static { /// /// # Errors /// - /// * [`CryptBackendErrorKind::DecodingZstdCompressedDataFailed`] - If the data could not be decoded. - /// * [`CryptBackendErrorKind::LengthOfUncompressedDataDoesNotMatch`] - If the length of the uncompressed data does not match the given length. - /// - /// [`CryptBackendErrorKind::DecodingZstdCompressedDataFailed`]: crate::error::CryptBackendErrorKind::DecodingZstdCompressedDataFailed - /// [`CryptBackendErrorKind::LengthOfUncompressedDataDoesNotMatch`]: crate::error::CryptBackendErrorKind::LengthOfUncompressedDataDoesNotMatch + /// * If the data could not be decoded. + /// * If the length of the uncompressed data does not match the given length. fn read_encrypted_from_partial( &self, data: &[u8], @@ -78,10 +74,22 @@ pub trait DecryptReadBackend: ReadBackend + Clone + 'static { ) -> RusticResult { let mut data = self.decrypt(data)?; if let Some(length) = uncompressed_length { - data = decode_all(&*data) - .map_err(CryptBackendErrorKind::DecodingZstdCompressedDataFailed)?; + data = decode_all(&*data).map_err(|err| { + RusticError::with_source( + ErrorKind::Internal, + "Failed to decode zstd compressed data. The data may be corrupted.", + err, + ) + })?; + if data.len() != length.get() as usize { - return Err(CryptBackendErrorKind::LengthOfUncompressedDataDoesNotMatch.into()); + return Err(RusticError::new( + ErrorKind::Internal, + "Length of uncompressed data `{actual_length}` does not match the given length `{expected_length}`.", + ) + .attach_context("expected_length", length.get().to_string()) + .attach_context("actual_length", data.len().to_string()) + .ask_report()); } } Ok(data.into()) @@ -100,7 +108,7 @@ pub trait DecryptReadBackend: ReadBackend + Clone + 'static { /// /// # Errors /// - /// If the file could not be read. + /// * If the file could not be read. fn read_encrypted_partial( &self, tpe: FileType, @@ -111,9 +119,7 @@ pub trait DecryptReadBackend: ReadBackend + Clone + 'static { uncompressed_length: Option, ) -> RusticResult { self.read_encrypted_from_partial( - &self - .read_partial(tpe, id, cacheable, offset, length) - .map_err(RusticErrorKind::Backend)?, + &self.read_partial(tpe, id, cacheable, offset, length)?, uncompressed_length, ) } @@ -126,11 +132,18 @@ pub trait DecryptReadBackend: ReadBackend + Clone + 'static { /// /// # Errors /// - /// If the file could not be read. + /// * If the file could not be read. fn get_file(&self, id: &Id) -> RusticResult { let data = self.read_encrypted_full(F::TYPE, id)?; - Ok(serde_json::from_slice(&data) - .map_err(CryptBackendErrorKind::DeserializingFromBytesOfJsonTextFailed)?) + let deserialized = serde_json::from_slice(&data).map_err(|err| { + RusticError::with_source( + ErrorKind::Internal, + "Failed to deserialize file from JSON.", + err, + ) + })?; + + Ok(deserialized) } /// Streams all files. @@ -145,7 +158,7 @@ pub trait DecryptReadBackend: ReadBackend + Clone + 'static { /// /// If the files could not be read. fn stream_all(&self, p: &impl Progress) -> StreamResult { - let list = self.list(F::TYPE).map_err(RusticErrorKind::Backend)?; + let list = self.list(F::TYPE)?; self.stream_list(&list, p) } @@ -191,7 +204,7 @@ pub trait DecryptWriteBackend: WriteBackend + Clone + 'static { /// /// # Errors /// - /// If the data could not be written. + /// * If the data could not be written. /// /// # Returns /// @@ -215,7 +228,7 @@ pub trait DecryptWriteBackend: WriteBackend + Clone + 'static { /// /// # Errors /// - /// If the data could not be written. + /// * If the data could not be written. /// /// # Returns /// @@ -223,8 +236,7 @@ pub trait DecryptWriteBackend: WriteBackend + Clone + 'static { fn hash_write_full_uncompressed(&self, tpe: FileType, data: &[u8]) -> RusticResult { let data = self.key().encrypt_data(data)?; let id = hash(&data); - self.write_bytes(tpe, &id, false, data.into()) - .map_err(RusticErrorKind::Backend)?; + self.write_bytes(tpe, &id, false, data.into())?; Ok(id) } /// Saves the given file. @@ -235,16 +247,21 @@ pub trait DecryptWriteBackend: WriteBackend + Clone + 'static { /// /// # Errors /// - /// * [`CryptBackendErrorKind::SerializingToJsonByteVectorFailed`] - If the file could not be serialized to json. + /// * If the file could not be serialized to json. /// /// # Returns /// /// The id of the file. - /// - /// [`CryptBackendErrorKind::SerializingToJsonByteVectorFailed`]: crate::error::CryptBackendErrorKind::SerializingToJsonByteVectorFailed fn save_file(&self, file: &F) -> RusticResult { - let data = serde_json::to_vec(file) - .map_err(CryptBackendErrorKind::SerializingToJsonByteVectorFailed)?; + let data = serde_json::to_vec(file).map_err(|err| { + RusticError::with_source( + ErrorKind::Internal, + "Failed to serialize file to JSON.", + err, + ) + .ask_report() + })?; + self.hash_write_full(F::TYPE, &data) } @@ -256,16 +273,21 @@ pub trait DecryptWriteBackend: WriteBackend + Clone + 'static { /// /// # Errors /// - /// * [`CryptBackendErrorKind::SerializingToJsonByteVectorFailed`] - If the file could not be serialized to json. + /// * If the file could not be serialized to json. /// /// # Returns /// /// The id of the file. - /// - /// [`CryptBackendErrorKind::SerializingToJsonByteVectorFailed`]: crate::error::CryptBackendErrorKind::SerializingToJsonByteVectorFailed fn save_file_uncompressed(&self, file: &F) -> RusticResult { - let data = serde_json::to_vec(file) - .map_err(CryptBackendErrorKind::SerializingToJsonByteVectorFailed)?; + let data = serde_json::to_vec(file).map_err(|err| { + RusticError::with_source( + ErrorKind::Internal, + "Failed to serialize file to JSON.", + err, + ) + .ask_report() + })?; + self.hash_write_full_uncompressed(F::TYPE, &data) } @@ -278,7 +300,7 @@ pub trait DecryptWriteBackend: WriteBackend + Clone + 'static { /// /// # Errors /// - /// * [`CryptBackendErrorKind::SerializingToJsonByteVectorFailed`] - If the file could not be serialized to json. + /// * If the file could not be serialized to json. fn save_list<'a, F: RepoFile, I: ExactSizeIterator + Send>( &self, list: I, @@ -302,10 +324,6 @@ pub trait DecryptWriteBackend: WriteBackend + Clone + 'static { /// * `cacheable` - Whether the files should be cached. /// * `list` - The list of files to delete. /// * `p` - The progress bar. - /// - /// # Panics - /// - /// If the files could not be deleted. fn delete_list<'a, ID: RepoId, I: ExactSizeIterator + Send>( &self, cacheable: bool, @@ -314,8 +332,7 @@ pub trait DecryptWriteBackend: WriteBackend + Clone + 'static { ) -> RusticResult<()> { p.set_length(list.len() as u64); list.par_bridge().try_for_each(|id| -> RusticResult<_> { - // TODO: Don't panic on file not being able to be deleted. - self.remove(ID::TYPE, id, cacheable).unwrap(); + self.remove(ID::TYPE, id, cacheable)?; p.inc(1); Ok(()) })?; @@ -380,9 +397,19 @@ impl DecryptBackend { let decrypted = self.decrypt(data)?; Ok(match decrypted.first() { Some(b'{' | b'[') => decrypted, // not compressed - Some(2) => decode_all(&decrypted[1..]) - .map_err(CryptBackendErrorKind::DecodingZstdCompressedDataFailed)?, // 2 indicates compressed data following - _ => return Err(CryptBackendErrorKind::DecryptionNotSupportedForBackend)?, + Some(2) => decode_all(&decrypted[1..]).map_err(|err| { + RusticError::with_source( + ErrorKind::Internal, + "Failed to decode zstd compressed data. The data may be corrupted.", + err, + ) + })?, // 2 indicates compressed data following + _ => { + return Err(RusticError::new( + ErrorKind::Unsupported, + "Decryption not supported. The data is not in a supported format.", + ))? + } }) } @@ -391,8 +418,15 @@ impl DecryptBackend { let data_encrypted = match self.zstd { Some(level) => { let mut out = vec![2_u8]; - copy_encode(data, &mut out, level) - .map_err(CryptBackendErrorKind::CopyEncodingDataFailed)?; + copy_encode(data, &mut out, level).map_err(|err| { + RusticError::with_source( + ErrorKind::Internal, + "Compressing and appending data failed. The data may be corrupted.", + err, + ) + .attach_context("compression_level", level.to_string()) + })?; + self.key().encrypt_data(&out)? } None => self.key().encrypt_data(data)?, @@ -404,7 +438,12 @@ impl DecryptBackend { if self.extra_verify { let check_data = self.decrypt_file(data_encrypted)?; if data != check_data { - return Err(CryptBackendErrorKind::ExtraVerificationFailed.into()); + return Err( + RusticError::new( + ErrorKind::Verification, + "Verification failed: After decrypting and decompressing the data changed! The data may be corrupted.\nPlease check the backend for corruption and try again. You can also try to run `rustic check --read-data` to check for corruption. This may take a long time.", + ).attach_error_code("C003") + ); } } Ok(()) @@ -412,15 +451,29 @@ impl DecryptBackend { /// encrypt and potentially compress some data fn encrypt_data(&self, data: &[u8]) -> RusticResult<(Vec, u32, Option)> { - let data_len: u32 = data - .len() - .try_into() - .map_err(CryptBackendErrorKind::IntConversionFailed)?; + let data_len: u32 = data.len().try_into().map_err(|err| { + RusticError::with_source( + ErrorKind::Internal, + "Failed to convert data length `{length}` to u32.", + err, + ) + .attach_context("length", data.len().to_string()) + .ask_report() + })?; + let (data_encrypted, uncompressed_length) = match self.zstd { None => (self.key.encrypt_data(data)?, None), // compress if requested Some(level) => ( - self.key.encrypt_data(&encode_all(data, level)?)?, + self.key + .encrypt_data(&encode_all(data, level).map_err(|err| { + RusticError::with_source( + ErrorKind::Internal, + "Failed to encode zstd compressed data. The data may be corrupted.", + err, + ) + .attach_context("compression_level", level.to_string()) + })?)?, NonZeroU32::new(data_len), ), }; @@ -436,10 +489,17 @@ impl DecryptBackend { if self.extra_verify { let data_check = self.read_encrypted_from_partial(data_encrypted, uncompressed_length)?; + if data != data_check { - return Err(CryptBackendErrorKind::ExtraVerificationFailed.into()); + return Err( + RusticError::new( + ErrorKind::Verification, + "Verification failed: After decrypting and decompressing the data changed! The data may be corrupted.\nPlease check the backend for corruption and try again. You can also try to run `rustic check --read-data` to check for corruption. This may take a long time.", + ).attach_error_code("C003") + ); } } + Ok(()) } } @@ -462,25 +522,27 @@ impl DecryptWriteBackend for DecryptBackend { /// /// # Errors /// - /// * [`CryptBackendErrorKind::CopyEncodingDataFailed`] - If the data could not be encoded. + /// * If the data could not be encoded. /// /// # Returns /// /// The id of the data. - /// - /// [`CryptBackendErrorKind::CopyEncodingDataFailed`]: crate::error::CryptBackendErrorKind::CopyEncodingDataFailed fn hash_write_full(&self, tpe: FileType, data: &[u8]) -> RusticResult { let data_encrypted = self.encrypt_file(data)?; + self.very_file(&data_encrypted, data)?; + let id = hash(&data_encrypted); - self.write_bytes(tpe, &id, false, data_encrypted.into()) - .map_err(RusticErrorKind::Backend)?; + + self.write_bytes(tpe, &id, false, data_encrypted.into())?; Ok(id) } fn process_data(&self, data: &[u8]) -> RusticResult<(Vec, u32, Option)> { let (data_encrypted, data_len, uncompressed_length) = self.encrypt_data(data)?; + self.very_data(&data_encrypted, uncompressed_length, data)?; + Ok((data_encrypted, data_len, uncompressed_length)) } @@ -526,14 +588,10 @@ impl DecryptReadBackend for DecryptBackend { /// /// # Errors /// - /// * [`CryptBackendErrorKind::DecryptionNotSupportedForBackend`] - If the backend does not support decryption. - /// * [`CryptBackendErrorKind::DecodingZstdCompressedDataFailed`] - If the data could not be decoded. - /// - /// [`CryptBackendErrorKind::DecryptionNotSupportedForBackend`]: crate::error::CryptBackendErrorKind::DecryptionNotSupportedForBackend - /// [`CryptBackendErrorKind::DecodingZstdCompressedDataFailed`]: crate::error::CryptBackendErrorKind::DecodingZstdCompressedDataFailed + /// * If the backend does not support decryption. + /// * If the data could not be decoded. fn read_encrypted_full(&self, tpe: FileType, id: &Id) -> RusticResult { - self.decrypt_file(&self.read_full(tpe, id).map_err(RusticErrorKind::Backend)?) - .map(Into::into) + self.decrypt_file(&self.read_full(tpe, id)?).map(Into::into) } } @@ -542,15 +600,15 @@ impl ReadBackend for DecryptBackend { self.be.location() } - fn list(&self, tpe: FileType) -> Result> { + fn list(&self, tpe: FileType) -> RusticResult> { self.be.list(tpe) } - fn list_with_size(&self, tpe: FileType) -> Result> { + fn list_with_size(&self, tpe: FileType) -> RusticResult> { self.be.list_with_size(tpe) } - fn read_full(&self, tpe: FileType, id: &Id) -> Result { + fn read_full(&self, tpe: FileType, id: &Id) -> RusticResult { self.be.read_full(tpe, id) } @@ -561,21 +619,21 @@ impl ReadBackend for DecryptBackend { cacheable: bool, offset: u32, length: u32, - ) -> Result { + ) -> RusticResult { self.be.read_partial(tpe, id, cacheable, offset, length) } } impl WriteBackend for DecryptBackend { - fn create(&self) -> Result<()> { + fn create(&self) -> RusticResult<()> { self.be.create() } - fn write_bytes(&self, tpe: FileType, id: &Id, cacheable: bool, buf: Bytes) -> Result<()> { + fn write_bytes(&self, tpe: FileType, id: &Id, cacheable: bool, buf: Bytes) -> RusticResult<()> { self.be.write_bytes(tpe, id, cacheable, buf) } - fn remove(&self, tpe: FileType, id: &Id, cacheable: bool) -> Result<()> { + fn remove(&self, tpe: FileType, id: &Id, cacheable: bool) -> RusticResult<()> { self.be.remove(tpe, id, cacheable) } } diff --git a/crates/core/src/backend/dry_run.rs b/crates/core/src/backend/dry_run.rs index e93c9023..5bd07cb8 100644 --- a/crates/core/src/backend/dry_run.rs +++ b/crates/core/src/backend/dry_run.rs @@ -1,4 +1,3 @@ -use anyhow::Result; use bytes::Bytes; use zstd::decode_all; @@ -7,7 +6,7 @@ use crate::{ decrypt::{DecryptFullBackend, DecryptReadBackend, DecryptWriteBackend}, FileType, ReadBackend, WriteBackend, }, - error::{CryptBackendErrorKind, RusticErrorKind, RusticResult}, + error::{ErrorKind, RusticError, RusticResult}, id::Id, }; @@ -50,23 +49,32 @@ impl DecryptReadBackend for DryRunBackend { /// /// # Errors /// - /// * [`CryptBackendErrorKind::DecryptionNotSupportedForBackend`] - If the backend does not support decryption. - /// * [`CryptBackendErrorKind::DecodingZstdCompressedDataFailed`] - If decoding the zstd compressed data failed. + /// * If the backend does not support decryption. + /// * If decoding the zstd compressed data failed. /// /// # Returns /// /// The data read. - /// - /// [`CryptBackendErrorKind::DecryptionNotSupportedForBackend`]: crate::error::CryptBackendErrorKind::DecryptionNotSupportedForBackend - /// [`CryptBackendErrorKind::DecodingZstdCompressedDataFailed`]: crate::error::CryptBackendErrorKind::DecodingZstdCompressedDataFailed fn read_encrypted_full(&self, tpe: FileType, id: &Id) -> RusticResult { - let decrypted = - self.decrypt(&self.read_full(tpe, id).map_err(RusticErrorKind::Backend)?)?; + let decrypted = self.decrypt(&self.read_full(tpe, id)?)?; Ok(match decrypted.first() { Some(b'{' | b'[') => decrypted, // not compressed Some(2) => decode_all(&decrypted[1..]) - .map_err(CryptBackendErrorKind::DecodingZstdCompressedDataFailed)?, // 2 indicates compressed data following - _ => return Err(CryptBackendErrorKind::DecryptionNotSupportedForBackend.into()), + .map_err(|err| + RusticError::with_source( + ErrorKind::Internal, + "Decoding zstd compressed data failed. This can happen if the data is corrupted. Please check the backend for corruption and try again. You can also try to run `rustic check` to check for corruption.", + err + ) + ) + ?, // 2 indicates compressed data following + _ => { + return Err( + RusticError::new( + ErrorKind::Unsupported, + "Decryption not supported. The data is not in a supported format.", + )); + } } .into()) } @@ -77,11 +85,11 @@ impl ReadBackend for DryRunBackend { self.be.location() } - fn list_with_size(&self, tpe: FileType) -> Result> { + fn list_with_size(&self, tpe: FileType) -> RusticResult> { self.be.list_with_size(tpe) } - fn read_full(&self, tpe: FileType, id: &Id) -> Result { + fn read_full(&self, tpe: FileType, id: &Id) -> RusticResult { self.be.read_full(tpe, id) } @@ -92,7 +100,7 @@ impl ReadBackend for DryRunBackend { cacheable: bool, offset: u32, length: u32, - ) -> Result { + ) -> RusticResult { self.be.read_partial(tpe, id, cacheable, offset, length) } } @@ -133,7 +141,7 @@ impl DecryptWriteBackend for DryRunBackend { } impl WriteBackend for DryRunBackend { - fn create(&self) -> Result<()> { + fn create(&self) -> RusticResult<()> { if self.dry_run { Ok(()) } else { @@ -141,7 +149,7 @@ impl WriteBackend for DryRunBackend { } } - fn write_bytes(&self, tpe: FileType, id: &Id, cacheable: bool, buf: Bytes) -> Result<()> { + fn write_bytes(&self, tpe: FileType, id: &Id, cacheable: bool, buf: Bytes) -> RusticResult<()> { if self.dry_run { Ok(()) } else { @@ -149,7 +157,7 @@ impl WriteBackend for DryRunBackend { } } - fn remove(&self, tpe: FileType, id: &Id, cacheable: bool) -> Result<()> { + fn remove(&self, tpe: FileType, id: &Id, cacheable: bool) -> RusticResult<()> { if self.dry_run { Ok(()) } else { diff --git a/crates/core/src/backend/hotcold.rs b/crates/core/src/backend/hotcold.rs index 75a1f750..e5306e2f 100644 --- a/crates/core/src/backend/hotcold.rs +++ b/crates/core/src/backend/hotcold.rs @@ -1,10 +1,10 @@ use std::sync::Arc; -use anyhow::Result; use bytes::Bytes; use crate::{ backend::{FileType, ReadBackend, WriteBackend}, + error::RusticResult, id::Id, }; @@ -45,11 +45,11 @@ impl ReadBackend for HotColdBackend { self.be.location() } - fn list_with_size(&self, tpe: FileType) -> Result> { + fn list_with_size(&self, tpe: FileType) -> RusticResult> { self.be.list_with_size(tpe) } - fn read_full(&self, tpe: FileType, id: &Id) -> Result { + fn read_full(&self, tpe: FileType, id: &Id) -> RusticResult { self.be_hot.read_full(tpe, id) } @@ -60,7 +60,7 @@ impl ReadBackend for HotColdBackend { cacheable: bool, offset: u32, length: u32, - ) -> Result { + ) -> RusticResult { if cacheable || tpe != FileType::Pack { self.be_hot.read_partial(tpe, id, cacheable, offset, length) } else { @@ -72,25 +72,25 @@ impl ReadBackend for HotColdBackend { self.be.needs_warm_up() } - fn warm_up(&self, tpe: FileType, id: &Id) -> Result<()> { + fn warm_up(&self, tpe: FileType, id: &Id) -> RusticResult<()> { self.be.warm_up(tpe, id) } } impl WriteBackend for HotColdBackend { - fn create(&self) -> Result<()> { + fn create(&self) -> RusticResult<()> { self.be.create()?; self.be_hot.create() } - fn write_bytes(&self, tpe: FileType, id: &Id, cacheable: bool, buf: Bytes) -> Result<()> { + fn write_bytes(&self, tpe: FileType, id: &Id, cacheable: bool, buf: Bytes) -> RusticResult<()> { if tpe != FileType::Config && (cacheable || tpe != FileType::Pack) { self.be_hot.write_bytes(tpe, id, cacheable, buf.clone())?; } self.be.write_bytes(tpe, id, cacheable, buf) } - fn remove(&self, tpe: FileType, id: &Id, cacheable: bool) -> Result<()> { + fn remove(&self, tpe: FileType, id: &Id, cacheable: bool) -> RusticResult<()> { // First remove cold file self.be.remove(tpe, id, cacheable)?; if cacheable || tpe != FileType::Pack { diff --git a/crates/core/src/backend/ignore.rs b/crates/core/src/backend/ignore.rs index 4270851f..a3c5cf2d 100644 --- a/crates/core/src/backend/ignore.rs +++ b/crates/core/src/backend/ignore.rs @@ -1,4 +1,6 @@ #[cfg(not(windows))] +use std::num::TryFromIntError; +#[cfg(not(windows))] use std::os::unix::fs::{FileTypeExt, MetadataExt}; use std::{ @@ -6,8 +8,6 @@ use std::{ path::{Path, PathBuf}, }; -use serde_with::{serde_as, DisplayFromStr}; - use bytesize::ByteSize; #[cfg(not(windows))] use cached::proc_macro::cached; @@ -19,6 +19,7 @@ use ignore::{overrides::OverrideBuilder, DirEntry, Walk, WalkBuilder}; use log::warn; #[cfg(not(windows))] use nix::unistd::{Gid, Group, Uid, User}; +use serde_with::{serde_as, DisplayFromStr}; #[cfg(not(windows))] use crate::backend::node::ExtendedAttribute; @@ -28,9 +29,39 @@ use crate::{ node::{Metadata, Node, NodeType}, ReadSource, ReadSourceEntry, ReadSourceOpen, }, - error::{IgnoreErrorKind, RusticResult}, + error::{ErrorKind, RusticError, RusticResult}, }; +/// [`IgnoreErrorKind`] describes the errors that can be returned by a Ignore action in Backends +#[derive(thiserror::Error, Debug, displaydoc::Display)] +pub enum IgnoreErrorKind { + /// Failed to get metadata for entry: `{source:?}` + FailedToGetMetadata { source: ignore::Error }, + #[cfg(all(not(windows), not(target_os = "openbsd")))] + /// Error getting xattrs for `{path:?}`: `{source:?}` + ErrorXattr { + path: PathBuf, + source: std::io::Error, + }, + /// Error reading link target for `{path:?}`: `{source:?}` + ErrorLink { + path: PathBuf, + source: std::io::Error, + }, + #[cfg(not(windows))] + /// Error converting ctime `{ctime}` and `ctime_nsec` `{ctime_nsec}` to Utc Timestamp: `{source:?}` + CtimeConversionToTimestampFailed { + ctime: i64, + ctime_nsec: i64, + source: TryFromIntError, + }, + #[cfg(not(windows))] + /// Error acquiring metadata for `{name}`: `{source:?}` + AcquiringMetadataFailed { name: String, source: ignore::Error }, +} + +pub(crate) type IgnoreResult = Result; + /// A [`LocalSource`] is a source from local paths which is used to be read from (i.e. to backup it). #[derive(Debug)] pub struct LocalSource { @@ -139,11 +170,9 @@ impl LocalSource { /// /// # Errors /// - /// * [`IgnoreErrorKind::GenericError`] - If the a glob pattern could not be added to the override builder. - /// * [`IgnoreErrorKind::FromIoError`] - If a glob file could not be read. - /// - /// [`IgnoreErrorKind::GenericError`]: crate::error::IgnoreErrorKind::GenericError - /// [`IgnoreErrorKind::FromIoError`]: crate::error::IgnoreErrorKind::FromIoError + /// * If the a glob pattern could not be added to the override builder. + /// * If a glob file could not be read. + #[allow(clippy::too_many_lines)] pub fn new( save_opts: LocalSourceSaveOptions, filter_opts: &LocalSourceFilterOptions, @@ -157,46 +186,87 @@ impl LocalSource { let mut override_builder = OverrideBuilder::new(""); + // FIXME: Refactor this to a function to be reused + // This is the same of `tree::NodeStreamer::new_with_glob()` for g in &filter_opts.globs { - _ = override_builder - .add(g) - .map_err(IgnoreErrorKind::GenericError)?; + _ = override_builder.add(g).map_err(|err| { + RusticError::with_source( + ErrorKind::Internal, + "Failed to add glob pattern `{glob}` to override builder.", + err, + ) + .attach_context("glob", g.to_string()) + .ask_report() + })?; } for file in &filter_opts.glob_files { for line in std::fs::read_to_string(file) - .map_err(|err| IgnoreErrorKind::ErrorGlob { - file: file.into(), - source: err, + .map_err(|err| { + RusticError::with_source( + ErrorKind::Internal, + "Failed to read string from glob file at `{glob_file}`", + err, + ) + .attach_context("glob_file", file.to_string()) + .ask_report() })? .lines() { - _ = override_builder - .add(line) - .map_err(IgnoreErrorKind::GenericError)?; + _ = override_builder.add(line).map_err(|err| { + RusticError::with_source( + ErrorKind::Internal, + "Failed to add glob pattern line `{glob_pattern_line}` to override builder.", + err, + ) + .attach_context("glob_pattern_line", line.to_string()) + .ask_report() + })?; } } - _ = override_builder - .case_insensitive(true) - .map_err(IgnoreErrorKind::GenericError)?; + _ = override_builder.case_insensitive(true).map_err(|err| { + RusticError::with_source( + ErrorKind::Internal, + "Failed to set case insensitivity in override builder.", + err, + ) + .ask_report() + })?; for g in &filter_opts.iglobs { - _ = override_builder - .add(g) - .map_err(IgnoreErrorKind::GenericError)?; + _ = override_builder.add(g).map_err(|err| { + RusticError::with_source( + ErrorKind::Internal, + "Failed to add iglob pattern `{iglob}` to override builder.", + err, + ) + .attach_context("iglob", g.to_string()) + .ask_report() + })?; } for file in &filter_opts.iglob_files { for line in std::fs::read_to_string(file) - .map_err(|err| IgnoreErrorKind::ErrorGlob { - file: file.into(), - source: err, + .map_err(|err| { + RusticError::with_source( + ErrorKind::Internal, + "Failed to read string from iglob file at `{iglob_file}`", + err, + ) + .attach_context("iglob_file", file.to_string()) + .ask_report() })? .lines() { - _ = override_builder - .add(line) - .map_err(IgnoreErrorKind::GenericError)?; + _ = override_builder.add(line).map_err(|err| { + RusticError::with_source( + ErrorKind::Internal, + "Failed to add iglob pattern line `{iglob_pattern_line}` to override builder.", + err, + ) + .attach_context("iglob_pattern_line", line.to_string()) + .ask_report() + })?; } } @@ -213,11 +283,14 @@ impl LocalSource { .sort_by_file_path(Path::cmp) .same_file_system(filter_opts.one_file_system) .max_filesize(filter_opts.exclude_larger_than.map(|s| s.as_u64())) - .overrides( - override_builder - .build() - .map_err(IgnoreErrorKind::GenericError)?, - ); + .overrides(override_builder.build().map_err(|err| { + RusticError::with_source( + ErrorKind::Internal, + "Failed to build matcher for a set of glob overrides.", + err, + ) + .ask_report() + })?); let exclude_if_present = filter_opts.exclude_if_present.clone(); if !filter_opts.exclude_if_present.is_empty() { @@ -255,17 +328,16 @@ impl ReadSourceOpen for OpenFile { /// /// # Errors /// - /// * [`IgnoreErrorKind::UnableToOpenFile`] - If the file could not be opened. - /// - /// [`IgnoreErrorKind::UnableToOpenFile`]: crate::error::IgnoreErrorKind::UnableToOpenFile + /// * If the file could not be opened. fn open(self) -> RusticResult { let path = self.0; File::open(&path).map_err(|err| { - IgnoreErrorKind::UnableToOpenFile { - file: path, - source: err, - } - .into() + RusticError::with_source( + ErrorKind::InputOutput, + "Failed to open file at `{path}`. Please make sure the file exists and is accessible.", + err, + ) + .attach_context("path", path.display().to_string()) }) } } @@ -282,7 +354,7 @@ impl ReadSource for LocalSource { /// /// # Errors /// - /// If the size could not be determined. + /// * If the size could not be determined. fn size(&self) -> RusticResult> { let mut size = 0; for entry in self.builder.build() { @@ -330,11 +402,25 @@ impl Iterator for LocalSourceWalker { } .map(|e| { map_entry( - e.map_err(IgnoreErrorKind::GenericError)?, + e.map_err(|err| { + RusticError::with_source( + ErrorKind::Internal, + "Failed to get next entry from walk iterator.", + err, + ) + .ask_report() + })?, self.save_opts.with_atime, self.save_opts.ignore_devid, ) - .map_err(Into::into) + .map_err(|err| { + RusticError::with_source( + ErrorKind::Internal, + "Failed to map Directory entry to ReadSourceEntry.", + err, + ) + .ask_report() + }) }) } } @@ -349,20 +435,19 @@ impl Iterator for LocalSourceWalker { /// /// # Errors /// -/// * [`IgnoreErrorKind::GenericError`] - If metadata could not be read. -/// * [`IgnoreErrorKind::FromIoError`] - If path of the entry could not be read. -/// -/// [`IgnoreErrorKind::GenericError`]: crate::error::IgnoreErrorKind::GenericError -/// [`IgnoreErrorKind::FromIoError`]: crate::error::IgnoreErrorKind::FromIoError +/// * If metadata could not be read. +/// * If path of the entry could not be read. #[cfg(windows)] #[allow(clippy::similar_names)] fn map_entry( entry: DirEntry, with_atime: bool, _ignore_devid: bool, -) -> RusticResult> { +) -> IgnoreResult> { let name = entry.file_name(); - let m = entry.metadata().map_err(IgnoreErrorKind::GenericError)?; + let m = entry + .metadata() + .map_err(|err| IgnoreErrorKind::FailedToGetMetadata { source: err })?; // TODO: Set them to suitable values let uid = None; @@ -473,7 +558,7 @@ fn get_group_by_gid(gid: u32) -> Option { } #[cfg(all(not(windows), target_os = "openbsd"))] -fn list_extended_attributes(path: &Path) -> RusticResult> { +fn list_extended_attributes(path: &Path) -> IgnoreResult> { Ok(vec![]) } @@ -485,9 +570,9 @@ fn list_extended_attributes(path: &Path) -> RusticResult> /// /// # Errors /// -/// * [`IgnoreErrorKind::ErrorXattr`] - if Xattr couldn't be listed or couldn't be read +/// * If Xattr couldn't be listed or couldn't be read #[cfg(all(not(windows), not(target_os = "openbsd")))] -fn list_extended_attributes(path: &Path) -> RusticResult> { +fn list_extended_attributes(path: &Path) -> IgnoreResult> { xattr::list(path) .map_err(|err| IgnoreErrorKind::ErrorXattr { path: path.to_path_buf(), @@ -502,7 +587,7 @@ fn list_extended_attributes(path: &Path) -> RusticResult> })?, }) }) - .collect::>>() + .collect::>>() } /// Maps a [`DirEntry`] to a [`ReadSourceEntry`]. @@ -515,11 +600,8 @@ fn list_extended_attributes(path: &Path) -> RusticResult> /// /// # Errors /// -/// * [`IgnoreErrorKind::GenericError`] - If metadata could not be read. -/// * [`IgnoreErrorKind::FromIoError`] - If the xattr of the entry could not be read. -/// -/// [`IgnoreErrorKind::GenericError`]: crate::error::IgnoreErrorKind::GenericError -/// [`IgnoreErrorKind::FromIoError`]: crate::error::IgnoreErrorKind::FromIoError +/// * If metadata could not be read. +/// * If the xattr of the entry could not be read. #[cfg(not(windows))] // map_entry: turn entry into (Path, Node) #[allow(clippy::similar_names)] @@ -527,9 +609,14 @@ fn map_entry( entry: DirEntry, with_atime: bool, ignore_devid: bool, -) -> RusticResult> { +) -> IgnoreResult> { let name = entry.file_name(); - let m = entry.metadata().map_err(IgnoreErrorKind::GenericError)?; + let m = entry + .metadata() + .map_err(|err| IgnoreErrorKind::AcquiringMetadataFailed { + name: name.to_string_lossy().to_string(), + source: err, + })?; let uid = m.uid(); let gid = m.gid(); @@ -551,9 +638,13 @@ fn map_entry( let ctime = Utc .timestamp_opt( m.ctime(), - m.ctime_nsec() - .try_into() - .map_err(IgnoreErrorKind::FromTryFromIntError)?, + m.ctime_nsec().try_into().map_err(|err| { + IgnoreErrorKind::CtimeConversionToTimestampFailed { + ctime: m.ctime(), + ctime_nsec: m.ctime_nsec(), + source: err, + } + })?, ) .single() .map(|dt| dt.with_timezone(&Local)); diff --git a/crates/core/src/backend/local_destination.rs b/crates/core/src/backend/local_destination.rs index 8c2ea2d4..bef3852b 100644 --- a/crates/core/src/backend/local_destination.rs +++ b/crates/core/src/backend/local_destination.rs @@ -4,6 +4,7 @@ use std::os::unix::fs::{symlink, PermissionsExt}; use std::{ fs::{self, File, OpenOptions}, io::{Read, Seek, SeekFrom, Write}, + num::TryFromIntError, path::{Path, PathBuf}, }; @@ -14,6 +15,8 @@ use filetime::{set_symlink_file_times, FileTime}; #[cfg(not(windows))] use log::warn; #[cfg(not(windows))] +use nix::errno::Errno; +#[cfg(not(windows))] use nix::sys::stat::{mknod, Mode, SFlag}; #[cfg(not(windows))] use nix::{ @@ -27,10 +30,82 @@ use crate::backend::ignore::mapper::map_mode_from_go; use crate::backend::node::NodeType; use crate::{ backend::node::{ExtendedAttribute, Metadata, Node}, - error::LocalDestinationErrorKind, - RusticResult, + error::{ErrorKind, RusticError, RusticResult}, }; +/// [`LocalDestinationErrorKind`] describes the errors that can be returned by an action on the filesystem in Backends +#[derive(thiserror::Error, Debug, displaydoc::Display)] +pub enum LocalDestinationErrorKind { + /// directory creation failed: `{0:?}` + DirectoryCreationFailed(std::io::Error), + /// file `{0:?}` should have a parent + FileDoesNotHaveParent(PathBuf), + /// `DeviceID` could not be converted to other type `{target}` of device `{device}`: `{source}` + DeviceIdConversionFailed { + target: String, + device: u64, + source: TryFromIntError, + }, + /// Length conversion failed for `{target}` of length `{length}`: `{source}` + LengthConversionFailed { + target: String, + length: u64, + source: TryFromIntError, + }, + /// `{0}` + #[error(transparent)] + #[cfg(not(windows))] + FromErrnoError(Errno), + /// listing xattrs on `{path:?}`: `{source:?}` + #[cfg(not(any(windows, target_os = "openbsd")))] + ListingXattrsFailed { + path: PathBuf, + source: std::io::Error, + }, + /// setting xattr `{name}` on `{filename:?}` with `{source:?}` + #[cfg(not(any(windows, target_os = "openbsd")))] + SettingXattrFailed { + name: String, + filename: PathBuf, + source: std::io::Error, + }, + /// getting xattr `{name}` on `{filename:?}` with `{source:?}` + #[cfg(not(any(windows, target_os = "openbsd")))] + GettingXattrFailed { + name: String, + filename: PathBuf, + source: std::io::Error, + }, + /// removing directories failed: `{0:?}` + DirectoryRemovalFailed(std::io::Error), + /// removing file failed: `{0:?}` + FileRemovalFailed(std::io::Error), + /// setting time metadata failed: `{0:?}` + SettingTimeMetadataFailed(std::io::Error), + /// opening file failed: `{0:?}` + OpeningFileFailed(std::io::Error), + /// setting file length failed: `{0:?}` + SettingFileLengthFailed(std::io::Error), + /// can't jump to position in file: `{0:?}` + CouldNotSeekToPositionInFile(std::io::Error), + /// couldn't write to buffer: `{0:?}` + CouldNotWriteToBuffer(std::io::Error), + /// reading exact length of file contents failed: `{0:?}` + ReadingExactLengthOfFileFailed(std::io::Error), + /// setting file permissions failed: `{0:?}` + #[cfg(not(windows))] + SettingFilePermissionsFailed(std::io::Error), + /// failed to symlink target `{linktarget:?}` from `{filename:?}` with `{source:?}` + #[cfg(not(windows))] + SymlinkingFailed { + linktarget: PathBuf, + filename: PathBuf, + source: std::io::Error, + }, +} + +pub(crate) type LocalDestinationResult = Result; + #[derive(Clone, Debug)] /// Local destination, used when restoring. pub struct LocalDestination { @@ -65,24 +140,35 @@ impl LocalDestination { /// /// # Errors /// - /// * [`LocalDestinationErrorKind::DirectoryCreationFailed`] - If the directory could not be created. - /// - /// [`LocalDestinationErrorKind::DirectoryCreationFailed`]: crate::error::LocalDestinationErrorKind::DirectoryCreationFailed + /// * If the directory could not be created. // TODO: We should use `impl Into` here. we even use it in the body! pub fn new(path: &str, create: bool, expect_file: bool) -> RusticResult { let is_dir = path.ends_with('/'); let path: PathBuf = path.into(); let is_file = path.is_file() || (!path.is_dir() && !is_dir && expect_file); + // FIXME: Refactor logic to avoid duplication if create { if is_file { if let Some(path) = path.parent() { - fs::create_dir_all(path) - .map_err(LocalDestinationErrorKind::DirectoryCreationFailed)?; + fs::create_dir_all(path).map_err(|err| { + RusticError::with_source( + ErrorKind::InputOutput, + "The directory `{path}` could not be created.", + err, + ) + .attach_context("path", path.display().to_string()) + })?; } } else { - fs::create_dir_all(&path) - .map_err(LocalDestinationErrorKind::DirectoryCreationFailed)?; + fs::create_dir_all(&path).map_err(|err| { + RusticError::with_source( + ErrorKind::InputOutput, + "The directory `{path}` could not be created.", + err, + ) + .attach_context("path", path.display().to_string()) + })?; } } @@ -119,16 +205,14 @@ impl LocalDestination { /// /// # Errors /// - /// * [`LocalDestinationErrorKind::DirectoryRemovalFailed`] - If the directory could not be removed. + /// * If the directory could not be removed. /// /// # Notes /// /// This will remove the directory recursively. - /// - /// [`LocalDestinationErrorKind::DirectoryRemovalFailed`]: crate::error::LocalDestinationErrorKind::DirectoryRemovalFailed - pub fn remove_dir(&self, dirname: impl AsRef) -> RusticResult<()> { - Ok(fs::remove_dir_all(dirname) - .map_err(LocalDestinationErrorKind::DirectoryRemovalFailed)?) + #[allow(clippy::unused_self)] + pub(crate) fn remove_dir(&self, dirname: impl AsRef) -> LocalDestinationResult<()> { + fs::remove_dir_all(dirname).map_err(LocalDestinationErrorKind::DirectoryRemovalFailed) } /// Remove the given file (relative to the base path) @@ -139,7 +223,7 @@ impl LocalDestination { /// /// # Errors /// - /// * [`LocalDestinationErrorKind::FileRemovalFailed`] - If the file could not be removed. + /// * If the file could not be removed. /// /// # Notes /// @@ -147,10 +231,9 @@ impl LocalDestination { /// /// * If the file is a symlink, the symlink will be removed, not the file it points to. /// * If the file is a directory or device, this will fail. - /// - /// [`LocalDestinationErrorKind::FileRemovalFailed`]: crate::error::LocalDestinationErrorKind::FileRemovalFailed - pub fn remove_file(&self, filename: impl AsRef) -> RusticResult<()> { - Ok(fs::remove_file(filename).map_err(LocalDestinationErrorKind::FileRemovalFailed)?) + #[allow(clippy::unused_self)] + pub(crate) fn remove_file(&self, filename: impl AsRef) -> LocalDestinationResult<()> { + fs::remove_file(filename).map_err(LocalDestinationErrorKind::FileRemovalFailed) } /// Create the given directory (relative to the base path) @@ -161,14 +244,12 @@ impl LocalDestination { /// /// # Errors /// - /// * [`LocalDestinationErrorKind::DirectoryCreationFailed`] - If the directory could not be created. + /// * If the directory could not be created. /// /// # Notes /// /// This will create the directory structure recursively. - /// - /// [`LocalDestinationErrorKind::DirectoryCreationFailed`]: crate::error::LocalDestinationErrorKind::DirectoryCreationFailed - pub fn create_dir(&self, item: impl AsRef) -> RusticResult<()> { + pub(crate) fn create_dir(&self, item: impl AsRef) -> LocalDestinationResult<()> { let dirname = self.path.join(item); fs::create_dir_all(dirname).map_err(LocalDestinationErrorKind::DirectoryCreationFailed)?; Ok(()) @@ -183,10 +264,12 @@ impl LocalDestination { /// /// # Errors /// - /// * [`LocalDestinationErrorKind::SettingTimeMetadataFailed`] - If the times could not be set - /// - /// [`LocalDestinationErrorKind::SettingTimeMetadataFailed`]: crate::error::LocalDestinationErrorKind::SettingTimeMetadataFailed - pub fn set_times(&self, item: impl AsRef, meta: &Metadata) -> RusticResult<()> { + /// * If the times could not be set + pub(crate) fn set_times( + &self, + item: impl AsRef, + meta: &Metadata, + ) -> LocalDestinationResult<()> { let filename = self.path(item); if let Some(mtime) = meta.mtime { let atime = meta.atime.unwrap_or(mtime); @@ -212,8 +295,13 @@ impl LocalDestination { /// /// # Errors /// - /// If the user/group could not be set. - pub fn set_user_group(&self, _item: impl AsRef, _meta: &Metadata) -> RusticResult<()> { + /// * If the user/group could not be set. + #[allow(clippy::unused_self, clippy::unnecessary_wraps)] + pub(crate) fn set_user_group( + &self, + _item: impl AsRef, + _meta: &Metadata, + ) -> LocalDestinationResult<()> { // https://learn.microsoft.com/en-us/windows/win32/fileio/file-security-and-access-rights // https://microsoft.github.io/windows-docs-rs/doc/windows/Win32/Security/struct.SECURITY_ATTRIBUTES.html // https://microsoft.github.io/windows-docs-rs/doc/windows/Win32/Storage/FileSystem/struct.CREATEFILE2_EXTENDED_PARAMETERS.html#structfield.lpSecurityAttributes @@ -230,11 +318,13 @@ impl LocalDestination { /// /// # Errors /// - /// * [`LocalDestinationErrorKind::FromErrnoError`] - If the user/group could not be set. - /// - /// [`LocalDestinationErrorKind::FromErrnoError`]: crate::error::LocalDestinationErrorKind::FromErrnoError + /// * If the user/group could not be set. #[allow(clippy::similar_names)] - pub fn set_user_group(&self, item: impl AsRef, meta: &Metadata) -> RusticResult<()> { + pub(crate) fn set_user_group( + &self, + item: impl AsRef, + meta: &Metadata, + ) -> LocalDestinationResult<()> { let filename = self.path(item); let user = meta.user.clone().and_then(uid_from_name); @@ -261,8 +351,13 @@ impl LocalDestination { /// /// # Errors /// - /// If the uid/gid could not be set. - pub fn set_uid_gid(&self, _item: impl AsRef, _meta: &Metadata) -> RusticResult<()> { + /// * If the uid/gid could not be set. + #[allow(clippy::unused_self, clippy::unnecessary_wraps)] + pub(crate) fn set_uid_gid( + &self, + _item: impl AsRef, + _meta: &Metadata, + ) -> LocalDestinationResult<()> { Ok(()) } @@ -276,11 +371,13 @@ impl LocalDestination { /// /// # Errors /// - /// * [`LocalDestinationErrorKind::FromErrnoError`] - If the uid/gid could not be set. - /// - /// [`LocalDestinationErrorKind::FromErrnoError`]: crate::error::LocalDestinationErrorKind::FromErrnoError + /// * If the uid/gid could not be set. #[allow(clippy::similar_names)] - pub fn set_uid_gid(&self, item: impl AsRef, meta: &Metadata) -> RusticResult<()> { + pub(crate) fn set_uid_gid( + &self, + item: impl AsRef, + meta: &Metadata, + ) -> LocalDestinationResult<()> { let filename = self.path(item); let uid = meta.uid.map(Uid::from_raw); @@ -302,8 +399,13 @@ impl LocalDestination { /// /// # Errors /// - /// If the permissions could not be set. - pub fn set_permission(&self, _item: impl AsRef, _node: &Node) -> RusticResult<()> { + /// * If the permissions could not be set. + #[allow(clippy::unused_self, clippy::unnecessary_wraps)] + pub(crate) fn set_permission( + &self, + _item: impl AsRef, + _node: &Node, + ) -> LocalDestinationResult<()> { Ok(()) } @@ -317,11 +419,13 @@ impl LocalDestination { /// /// # Errors /// - /// * [`LocalDestinationErrorKind::SettingFilePermissionsFailed`] - If the permissions could not be set. - /// - /// [`LocalDestinationErrorKind::SettingFilePermissionsFailed`]: crate::error::LocalDestinationErrorKind::SettingFilePermissionsFailed + /// * If the permissions could not be set. #[allow(clippy::similar_names)] - pub fn set_permission(&self, item: impl AsRef, node: &Node) -> RusticResult<()> { + pub(crate) fn set_permission( + &self, + item: impl AsRef, + node: &Node, + ) -> LocalDestinationResult<()> { if node.is_symlink() { return Ok(()); } @@ -348,12 +452,13 @@ impl LocalDestination { /// /// # Errors /// - /// If the extended attributes could not be set. - pub fn set_extended_attributes( + /// * If the extended attributes could not be set. + #[allow(clippy::unused_self, clippy::unnecessary_wraps)] + pub(crate) fn set_extended_attributes( &self, _item: impl AsRef, _extended_attributes: &[ExtendedAttribute], - ) -> RusticResult<()> { + ) -> LocalDestinationResult<()> { Ok(()) } @@ -367,13 +472,9 @@ impl LocalDestination { /// /// # Errors /// - /// * [`LocalDestinationErrorKind::ListingXattrsFailed`] - If listing the extended attributes failed. - /// * [`LocalDestinationErrorKind::GettingXattrFailed`] - If getting an extended attribute failed. - /// * [`LocalDestinationErrorKind::SettingXattrFailed`] - If setting an extended attribute failed. - /// - /// [`LocalDestinationErrorKind::ListingXattrsFailed`]: crate::error::LocalDestinationErrorKind::ListingXattrsFailed - /// [`LocalDestinationErrorKind::GettingXattrFailed`]: crate::error::LocalDestinationErrorKind::GettingXattrFailed - /// [`LocalDestinationErrorKind::SettingXattrFailed`]: crate::error::LocalDestinationErrorKind::SettingXattrFailed + /// * If listing the extended attributes failed. + /// * If getting an extended attribute failed. + /// * If setting an extended attribute failed. /// /// # Returns /// @@ -381,12 +482,12 @@ impl LocalDestination { /// /// # Panics /// - /// If the extended attributes could not be set. - pub fn set_extended_attributes( + /// * If the extended attributes could not be set. + pub(crate) fn set_extended_attributes( &self, item: impl AsRef, extended_attributes: &[ExtendedAttribute], - ) -> RusticResult<()> { + ) -> LocalDestinationResult<()> { let filename = self.path(item); let mut done = vec![false; extended_attributes.len()]; @@ -449,21 +550,20 @@ impl LocalDestination { /// /// # Errors /// - /// * [`LocalDestinationErrorKind::FileDoesNotHaveParent`] - If the file does not have a parent. - /// * [`LocalDestinationErrorKind::DirectoryCreationFailed`] - If the directory could not be created. - /// * [`LocalDestinationErrorKind::OpeningFileFailed`] - If the file could not be opened. - /// * [`LocalDestinationErrorKind::SettingFileLengthFailed`] - If the length of the file could not be set. + /// * If the file does not have a parent. + /// * If the directory could not be created. + /// * If the file could not be opened. + /// * If the length of the file could not be set. /// /// # Notes /// /// If the file exists, truncate it to the given length. (TODO: check if this is correct) /// If it doesn't exist, create a new (empty) one with given length. - /// - /// [`LocalDestinationErrorKind::FileDoesNotHaveParent`]: crate::error::LocalDestinationErrorKind::FileDoesNotHaveParent - /// [`LocalDestinationErrorKind::DirectoryCreationFailed`]: crate::error::LocalDestinationErrorKind::DirectoryCreationFailed - /// [`LocalDestinationErrorKind::OpeningFileFailed`]: crate::error::LocalDestinationErrorKind::OpeningFileFailed - /// [`LocalDestinationErrorKind::SettingFileLengthFailed`]: crate::error::LocalDestinationErrorKind::SettingFileLengthFailed - pub fn set_length(&self, item: impl AsRef, size: u64) -> RusticResult<()> { + pub(crate) fn set_length( + &self, + item: impl AsRef, + size: u64, + ) -> LocalDestinationResult<()> { let filename = self.path(item); let dir = filename .parent() @@ -492,12 +592,17 @@ impl LocalDestination { /// /// # Errors /// - /// If the special file could not be created. + /// * If the special file could not be created. /// /// # Returns /// /// Ok if the special file was created. - pub fn create_special(&self, _item: impl AsRef, _node: &Node) -> RusticResult<()> { + #[allow(clippy::unused_self, clippy::unnecessary_wraps)] + pub(crate) fn create_special( + &self, + _item: impl AsRef, + _node: &Node, + ) -> LocalDestinationResult<()> { Ok(()) } @@ -511,14 +616,14 @@ impl LocalDestination { /// /// # Errors /// - /// * [`LocalDestinationErrorKind::SymlinkingFailed`] - If the symlink could not be created. - /// * [`LocalDestinationErrorKind::FromTryIntError`] - If the device could not be converted to the correct type. - /// * [`LocalDestinationErrorKind::FromErrnoError`] - If the device could not be created. - /// - /// [`LocalDestinationErrorKind::SymlinkingFailed`]: crate::error::LocalDestinationErrorKind::SymlinkingFailed - /// [`LocalDestinationErrorKind::FromTryIntError`]: crate::error::LocalDestinationErrorKind::FromTryIntError - /// [`LocalDestinationErrorKind::FromErrnoError`]: crate::error::LocalDestinationErrorKind::FromErrnoError - pub fn create_special(&self, item: impl AsRef, node: &Node) -> RusticResult<()> { + /// * If the symlink could not be created. + /// * If the device could not be converted to the correct type. + /// * If the device could not be created. + pub(crate) fn create_special( + &self, + item: impl AsRef, + node: &Node, + ) -> LocalDestinationResult<()> { let filename = self.path(item); match &node.node_type { @@ -540,11 +645,21 @@ impl LocalDestination { )))] let device = *device; #[cfg(any(target_os = "macos", target_os = "openbsd"))] - let device = - i32::try_from(*device).map_err(LocalDestinationErrorKind::FromTryIntError)?; + let device = i32::try_from(*device).map_err(|err| { + LocalDestinationErrorKind::DeviceIdConversionFailed { + target: "i32".to_string(), + device: *device, + source: err, + } + })?; #[cfg(target_os = "freebsd")] - let device = - u32::try_from(*device).map_err(LocalDestinationErrorKind::FromTryIntError)?; + let device = u32::try_from(*device).map_err(|err| { + LocalDestinationErrorKind::DeviceIdConversionFailed { + target: "u32".to_string(), + device: *device, + source: err, + } + })?; mknod(&filename, SFlag::S_IFBLK, Mode::empty(), device) .map_err(LocalDestinationErrorKind::FromErrnoError)?; } @@ -556,11 +671,21 @@ impl LocalDestination { )))] let device = *device; #[cfg(any(target_os = "macos", target_os = "openbsd"))] - let device = - i32::try_from(*device).map_err(LocalDestinationErrorKind::FromTryIntError)?; + let device = i32::try_from(*device).map_err(|err| { + LocalDestinationErrorKind::DeviceIdConversionFailed { + target: "i32".to_string(), + device: *device, + source: err, + } + })?; #[cfg(target_os = "freebsd")] - let device = - u32::try_from(*device).map_err(LocalDestinationErrorKind::FromTryIntError)?; + let device = u32::try_from(*device).map_err(|err| { + LocalDestinationErrorKind::DeviceIdConversionFailed { + target: "u32".to_string(), + device: *device, + source: err, + } + })?; mknod(&filename, SFlag::S_IFCHR, Mode::empty(), device) .map_err(LocalDestinationErrorKind::FromErrnoError)?; } @@ -587,16 +712,16 @@ impl LocalDestination { /// /// # Errors /// - /// * [`LocalDestinationErrorKind::OpeningFileFailed`] - If the file could not be opened. - /// * [`LocalDestinationErrorKind::CouldNotSeekToPositionInFile`] - If the file could not be seeked to the given position. - /// * [`LocalDestinationErrorKind::FromTryIntError`] - If the length of the file could not be converted to u32. - /// * [`LocalDestinationErrorKind::ReadingExactLengthOfFileFailed`] - If the length of the file could not be read. - /// - /// [`LocalDestinationErrorKind::OpeningFileFailed`]: crate::error::LocalDestinationErrorKind::OpeningFileFailed - /// [`LocalDestinationErrorKind::CouldNotSeekToPositionInFile`]: crate::error::LocalDestinationErrorKind::CouldNotSeekToPositionInFile - /// [`LocalDestinationErrorKind::FromTryIntError`]: crate::error::LocalDestinationErrorKind::FromTryIntError - /// [`LocalDestinationErrorKind::ReadingExactLengthOfFileFailed`]: crate::error::LocalDestinationErrorKind::ReadingExactLengthOfFileFailed - pub fn read_at(&self, item: impl AsRef, offset: u64, length: u64) -> RusticResult { + /// * If the file could not be opened. + /// * If the file could not be sought to the given position. + /// * If the length of the file could not be converted to u32. + /// * If the length of the file could not be read. + pub(crate) fn read_at( + &self, + item: impl AsRef, + offset: u64, + length: u64, + ) -> LocalDestinationResult { let filename = self.path(item); let mut file = File::open(filename).map_err(LocalDestinationErrorKind::OpeningFileFailed)?; @@ -605,9 +730,13 @@ impl LocalDestination { .map_err(LocalDestinationErrorKind::CouldNotSeekToPositionInFile)?; let mut vec = vec![ 0; - length - .try_into() - .map_err(LocalDestinationErrorKind::FromTryIntError)? + length.try_into().map_err(|err| { + LocalDestinationErrorKind::LengthConversionFailed { + target: "u8".to_string(), + length, + source: err, + } + })? ]; file.read_exact(&mut vec) .map_err(LocalDestinationErrorKind::ReadingExactLengthOfFileFailed)?; @@ -649,18 +778,19 @@ impl LocalDestination { /// /// # Errors /// - /// * [`LocalDestinationErrorKind::OpeningFileFailed`] - If the file could not be opened. - /// * [`LocalDestinationErrorKind::CouldNotSeekToPositionInFile`] - If the file could not be seeked to the given position. - /// * [`LocalDestinationErrorKind::CouldNotWriteToBuffer`] - If the bytes could not be written to the file. + /// * If the file could not be opened. + /// * If the file could not be sought to the given position. + /// * If the bytes could not be written to the file. /// /// # Notes /// /// This will create the file if it doesn't exist. - /// - /// [`LocalDestinationErrorKind::OpeningFileFailed`]: crate::error::LocalDestinationErrorKind::OpeningFileFailed - /// [`LocalDestinationErrorKind::CouldNotSeekToPositionInFile`]: crate::error::LocalDestinationErrorKind::CouldNotSeekToPositionInFile - /// [`LocalDestinationErrorKind::CouldNotWriteToBuffer`]: crate::error::LocalDestinationErrorKind::CouldNotWriteToBuffer - pub fn write_at(&self, item: impl AsRef, offset: u64, data: &[u8]) -> RusticResult<()> { + pub(crate) fn write_at( + &self, + item: impl AsRef, + offset: u64, + data: &[u8], + ) -> LocalDestinationResult<()> { let filename = self.path(item); let mut file = OpenOptions::new() .create(true) diff --git a/crates/core/src/backend/node.rs b/crates/core/src/backend/node.rs index 94fb7995..1c6635d5 100644 --- a/crates/core/src/backend/node.rs +++ b/crates/core/src/backend/node.rs @@ -9,10 +9,9 @@ use std::{ #[cfg(not(windows))] use std::fmt::Write; #[cfg(not(windows))] -use std::os::unix::ffi::OsStrExt; - +use std::num::ParseIntError; #[cfg(not(windows))] -use crate::RusticResult; +use std::os::unix::ffi::OsStrExt; use chrono::{DateTime, Local}; use derive_more::Constructor; @@ -24,10 +23,67 @@ use serde_with::{ serde_as, skip_serializing_none, DefaultOnNull, }; +use crate::blob::{tree::TreeId, DataId}; + #[cfg(not(windows))] -use crate::error::NodeErrorKind; +/// [`NodeErrorKind`] describes the errors that can be returned by an action utilizing a node in Backends +#[derive(thiserror::Error, Debug, displaydoc::Display)] +#[non_exhaustive] +pub enum NodeErrorKind<'a> { + /// Unexpected EOF while parsing filename: `{file_name}` + #[cfg(not(windows))] + UnexpectedEOF { + /// The filename + file_name: String, + /// The remaining chars + chars: std::str::Chars<'a>, + }, + /// Invalid unicode + #[cfg(not(windows))] + InvalidUnicode { + /// The filename + file_name: String, + /// The unicode codepoint + unicode: u32, + /// The remaining chars + chars: std::str::Chars<'a>, + }, + /// Unrecognized Escape while parsing filename: `{file_name}` + #[cfg(not(windows))] + UnrecognizedEscape { + /// The filename + file_name: String, + /// The remaining chars + chars: std::str::Chars<'a>, + }, + /// Parsing hex chars {chars:?} failed for `{hex}` in filename: `{file_name}` : `{source}` + #[cfg(not(windows))] + ParsingHexFailed { + /// The filename + file_name: String, + /// The hex string + hex: String, + /// The remaining chars + chars: std::str::Chars<'a>, + /// The error that occurred + source: ParseIntError, + }, + /// Parsing unicode chars {chars:?} failed for `{target}` in filename: `{file_name}` : `{source}` + #[cfg(not(windows))] + ParsingUnicodeFailed { + /// The filename + file_name: String, + /// The target type + target: String, + /// The remaining chars + chars: std::str::Chars<'a>, + /// The error that occurred + source: ParseIntError, + }, +} -use crate::blob::{tree::TreeId, DataId}; +#[cfg(not(windows))] +pub(crate) type NodeResult<'a, T> = Result>; #[derive( Default, Serialize, Deserialize, Clone, Debug, PartialEq, Eq, Constructor, PartialOrd, Ord, @@ -38,8 +94,8 @@ pub struct Node { /// /// # Warning /// - /// This contains an escaped variant of the name in order to handle non-unicode filenames. - /// Don't access this field directly, use the [`Node::name()`] method instead! + /// * This contains an escaped variant of the name in order to handle non-unicode filenames. + /// * Don't access this field directly, use the [`Node::name()`] method instead! pub name: String, #[serde(flatten)] /// Information about node type @@ -64,22 +120,25 @@ pub struct Node { } #[serde_as] -#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)] +#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, strum::Display)] #[serde(tag = "type", rename_all = "lowercase")] /// Types a [`Node`] can have with type-specific additional information pub enum NodeType { /// Node is a regular file + #[strum(to_string = "file")] File, /// Node is a directory + #[strum(to_string = "dir")] Dir, /// Node is a symlink + #[strum(to_string = "symlink:{linktarget}")] Symlink { /// The target of the symlink /// /// # Warning /// - /// This contains the target only if it is a valid unicode target. - /// Don't access this field directly, use the [`NodeType::to_link()`] method instead! + /// * This contains the target only if it is a valid unicode target. + /// * Don't access this field directly, use the [`NodeType::to_link()`] method instead! linktarget: String, #[serde_as(as = "DefaultOnNull>>")] #[serde(default, skip_serializing_if = "Option::is_none")] @@ -89,20 +148,24 @@ pub enum NodeType { linktarget_raw: Option>, }, /// Node is a block device file + #[strum(to_string = "dev:{device}")] Dev { #[serde(default)] /// Device id device: u64, }, /// Node is a char device file + #[strum(to_string = "chardev:{device}")] Chardev { #[serde(default)] /// Device id device: u64, }, /// Node is a fifo + #[strum(to_string = "fifo")] Fifo, /// Node is a socket + #[strum(to_string = "socket")] Socket, } @@ -143,7 +206,7 @@ impl NodeType { /// /// # Panics /// - /// If called on a non-symlink node + /// * If called on a non-symlink node #[cfg(not(windows))] #[must_use] pub fn to_link(&self) -> &Path { @@ -163,7 +226,7 @@ impl NodeType { /// /// # Warning /// - /// Must be only called on `NodeType::Symlink`! + /// * Must be only called on `NodeType::Symlink`! /// /// # Panics /// @@ -295,7 +358,7 @@ impl Node { /// /// # Panics /// - /// If the name is not valid unicode + /// * If the name is not valid unicode pub fn name(&self) -> OsString { unescape_filename(&self.name).unwrap_or_else(|_| OsString::from_str(&self.name).unwrap()) } @@ -399,7 +462,7 @@ fn escape_filename(name: &OsStr) -> String { /// /// * `s` - The escaped filename // inspired by the enquote crate -fn unescape_filename(s: &str) -> RusticResult { +fn unescape_filename(s: &str) -> NodeResult<'_, OsString> { let mut chars = s.chars(); let mut u = Vec::new(); loop { @@ -408,7 +471,12 @@ fn unescape_filename(s: &str) -> RusticResult { Some(c) => { if c == '\\' { match chars.next() { - None => return Err(NodeErrorKind::UnexpectedEOF.into()), + None => { + return Err(NodeErrorKind::UnexpectedEOF { + file_name: s.to_string(), + chars, + }) + } Some(c) => match c { '\\' => u.push(b'\\'), '"' => u.push(b'"'), @@ -424,31 +492,62 @@ fn unescape_filename(s: &str) -> RusticResult { // hex 'x' => { let hex = take(&mut chars, 2); - u.push( - u8::from_str_radix(&hex, 16) - .map_err(NodeErrorKind::FromParseIntError)?, - ); + u.push(u8::from_str_radix(&hex, 16).map_err(|err| { + NodeErrorKind::ParsingHexFailed { + file_name: s.to_string(), + hex: hex.to_string(), + chars: chars.clone(), + source: err, + } + })?); } // unicode 'u' => { - let n = u32::from_str_radix(&take(&mut chars, 4), 16) - .map_err(NodeErrorKind::FromParseIntError)?; - let c = - std::char::from_u32(n).ok_or(NodeErrorKind::InvalidUnicode)?; + let n = u32::from_str_radix(&take(&mut chars, 4), 16).map_err( + |err| NodeErrorKind::ParsingUnicodeFailed { + file_name: s.to_string(), + target: "u32".to_string(), + chars: chars.clone(), + source: err, + }, + )?; + let c = std::char::from_u32(n).ok_or_else(|| { + NodeErrorKind::InvalidUnicode { + file_name: s.to_string(), + unicode: n, + chars: chars.clone(), + } + })?; let mut bytes = vec![0u8; c.len_utf8()]; _ = c.encode_utf8(&mut bytes); u.extend_from_slice(&bytes); } 'U' => { - let n = u32::from_str_radix(&take(&mut chars, 8), 16) - .map_err(NodeErrorKind::FromParseIntError)?; - let c = - std::char::from_u32(n).ok_or(NodeErrorKind::InvalidUnicode)?; + let n = u32::from_str_radix(&take(&mut chars, 8), 16).map_err( + |err| NodeErrorKind::ParsingUnicodeFailed { + file_name: s.to_string(), + target: "u32".to_string(), + chars: chars.clone(), + source: err, + }, + )?; + let c = std::char::from_u32(n).ok_or_else(|| { + NodeErrorKind::InvalidUnicode { + file_name: s.to_string(), + unicode: n, + chars: chars.clone(), + } + })?; let mut bytes = vec![0u8; c.len_utf8()]; _ = c.encode_utf8(&mut bytes); u.extend_from_slice(&bytes); } - _ => return Err(NodeErrorKind::UnrecognizedEscape.into()), + _ => { + return Err(NodeErrorKind::UnrecognizedEscape { + file_name: s.to_string(), + chars: chars.clone(), + }) + } }, } } else { diff --git a/crates/core/src/backend/stdin.rs b/crates/core/src/backend/stdin.rs index fc747936..c79543fc 100644 --- a/crates/core/src/backend/stdin.rs +++ b/crates/core/src/backend/stdin.rs @@ -6,7 +6,7 @@ use std::{ use crate::{ backend::{ReadSource, ReadSourceEntry}, - error::RusticResult, + error::{ErrorKind, RusticError, RusticResult}, }; /// The `StdinSource` is a `ReadSource` for stdin. @@ -37,6 +37,14 @@ impl ReadSource for StdinSource { /// Returns an iterator over the source. fn entries(&self) -> Self::Iter { let open = Some(stdin()); - once(ReadSourceEntry::from_path(self.path.clone(), open)) + once( + ReadSourceEntry::from_path(self.path.clone(), open).map_err(|err| { + RusticError::with_source( + ErrorKind::Backend, + "Failed to create ReadSourceEntry from Stdin", + err, + ) + }), + ) } } diff --git a/crates/core/src/backend/warm_up.rs b/crates/core/src/backend/warm_up.rs index 87e9e009..3e25d49a 100644 --- a/crates/core/src/backend/warm_up.rs +++ b/crates/core/src/backend/warm_up.rs @@ -1,10 +1,10 @@ use std::sync::Arc; -use anyhow::Result; use bytes::Bytes; use crate::{ backend::{FileType, ReadBackend, WriteBackend}, + error::RusticResult, id::Id, }; @@ -31,11 +31,11 @@ impl ReadBackend for WarmUpAccessBackend { self.be.location() } - fn list_with_size(&self, tpe: FileType) -> Result> { + fn list_with_size(&self, tpe: FileType) -> RusticResult> { self.be.list_with_size(tpe) } - fn read_full(&self, tpe: FileType, id: &Id) -> Result { + fn read_full(&self, tpe: FileType, id: &Id) -> RusticResult { self.be.read_full(tpe, id) } @@ -46,7 +46,7 @@ impl ReadBackend for WarmUpAccessBackend { cacheable: bool, offset: u32, length: u32, - ) -> Result { + ) -> RusticResult { self.be.read_partial(tpe, id, cacheable, offset, length) } @@ -54,7 +54,7 @@ impl ReadBackend for WarmUpAccessBackend { true } - fn warm_up(&self, tpe: FileType, id: &Id) -> Result<()> { + fn warm_up(&self, tpe: FileType, id: &Id) -> RusticResult<()> { // warm up files by accessing them - error is ignored as we expect this to error out! _ = self.be.read_partial(tpe, id, false, 0, 1); Ok(()) @@ -62,15 +62,15 @@ impl ReadBackend for WarmUpAccessBackend { } impl WriteBackend for WarmUpAccessBackend { - fn create(&self) -> Result<()> { + fn create(&self) -> RusticResult<()> { self.be.create() } - fn write_bytes(&self, tpe: FileType, id: &Id, cacheable: bool, buf: Bytes) -> Result<()> { + fn write_bytes(&self, tpe: FileType, id: &Id, cacheable: bool, buf: Bytes) -> RusticResult<()> { self.be.write_bytes(tpe, id, cacheable, buf) } - fn remove(&self, tpe: FileType, id: &Id, cacheable: bool) -> Result<()> { + fn remove(&self, tpe: FileType, id: &Id, cacheable: bool) -> RusticResult<()> { // First remove cold file self.be.remove(tpe, id, cacheable) } diff --git a/crates/core/src/blob.rs b/crates/core/src/blob.rs index 1ae3de33..5b0a4c5e 100644 --- a/crates/core/src/blob.rs +++ b/crates/core/src/blob.rs @@ -11,7 +11,18 @@ use crate::define_new_id_struct; pub const ALL_BLOB_TYPES: [BlobType; 2] = [BlobType::Tree, BlobType::Data]; #[derive( - Serialize, Deserialize, Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Enum, + Serialize, + Deserialize, + Clone, + Copy, + Debug, + PartialEq, + Eq, + PartialOrd, + Ord, + Hash, + Enum, + derive_more::Display, )] /// The type a `blob` or a `packfile` can have pub enum BlobType { diff --git a/crates/core/src/blob/packer.rs b/crates/core/src/blob/packer.rs index 62c82759..a02b57c4 100644 --- a/crates/core/src/blob/packer.rs +++ b/crates/core/src/blob/packer.rs @@ -1,6 +1,3 @@ -use integer_sqrt::IntegerSquareRoot; -use log::warn; - use std::{ num::NonZeroU32, sync::{Arc, RwLock}, @@ -10,6 +7,8 @@ use std::{ use bytes::{Bytes, BytesMut}; use chrono::Local; use crossbeam_channel::{bounded, Receiver, Sender}; +use integer_sqrt::IntegerSquareRoot; +use log::warn; use pariter::{scope, IteratorExt}; use crate::{ @@ -19,7 +18,7 @@ use crate::{ }, blob::{BlobId, BlobType}, crypto::{hasher::hash, CryptoKey}, - error::{PackerErrorKind, RusticErrorKind, RusticResult}, + error::{ErrorKind, RusticError, RusticResult}, index::indexer::SharedIndexer, repofile::{ configfile::ConfigFile, @@ -29,6 +28,33 @@ use crate::{ }, }; +/// [`PackerErrorKind`] describes the errors that can be returned for a Packer +#[derive(thiserror::Error, Debug, displaydoc::Display)] +#[non_exhaustive] +pub enum PackerErrorKind { + /// Conversion from `{from}` to `{to}` failed: `{source}` + Conversion { + to: &'static str, + from: &'static str, + source: std::num::TryFromIntError, + }, + /// Sending crossbeam message failed: `size_limit`: `{size_limit:?}`, `id`: `{id:?}`, `data`: `{data:?}` : `{source}` + SendingCrossbeamMessage { + size_limit: Option, + id: BlobId, + data: Bytes, + source: crossbeam_channel::SendError<(Bytes, BlobId, Option)>, + }, + /// Sending crossbeam data message failed: `data`: `{data:?}`, `index_pack`: `{index_pack:?}` : `{source}` + SendingCrossbeamDataMessage { + data: Bytes, + index_pack: IndexPack, + source: crossbeam_channel::SendError<(Bytes, IndexPack)>, + }, +} + +pub(crate) type PackerResult = Result; + pub(super) mod constants { use std::time::Duration; @@ -144,7 +170,7 @@ impl PackSizer { /// /// # Panics /// - /// If the size is too large + /// * If the size is too large fn add_size(&mut self, added: u32) { self.current_size += u64::from(added); } @@ -188,11 +214,8 @@ impl Packer { /// /// # Errors /// - /// * [`PackerErrorKind::SendingCrossbeamMessageFailed`] - If sending the message to the raw packer fails. - /// * [`PackerErrorKind::IntConversionFailed`] - If converting the data length to u64 fails - /// - /// [`PackerErrorKind::SendingCrossbeamMessageFailed`]: crate::error::PackerErrorKind::SendingCrossbeamMessageFailed - /// [`PackerErrorKind::IntConversionFailed`]: crate::error::PackerErrorKind::IntConversionFailed + /// * If sending the message to the raw packer fails. + /// * If converting the data length to u64 fails #[allow(clippy::unnecessary_wraps)] pub fn new( be: BE, @@ -249,7 +272,7 @@ impl Packer { |(_, id, _, _, _)| !indexer.read().unwrap().has(id), ) }) - .try_for_each(|item: RusticResult<_>| { + .try_for_each(|item: RusticResult<_>| -> RusticResult<()> { let (data, id, data_len, ul, size_limit) = item?; raw_packer .write() @@ -274,12 +297,18 @@ impl Packer { /// /// # Errors /// - /// * [`PackerErrorKind::SendingCrossbeamMessageFailed`] - If sending the message to the raw packer fails. - /// - /// [`PackerErrorKind::SendingCrossbeamMessageFailed`]: crate::error::PackerErrorKind::SendingCrossbeamMessageFailed + /// * If sending the message to the raw packer fails. pub fn add(&self, data: Bytes, id: BlobId) -> RusticResult<()> { // compute size limit based on total size and size bounds - self.add_with_sizelimit(data, id, None) + self.add_with_sizelimit(data, id, None).map_err(|err| { + RusticError::with_source( + ErrorKind::Internal, + "Failed to add blob `{id}` to packfile.", + err, + ) + .attach_context("id", id.to_string()) + .ask_report() + }) } /// Adds the blob to the packfile, allows specifying a size limit for the pack file @@ -292,18 +321,21 @@ impl Packer { /// /// # Errors /// - /// * [`PackerErrorKind::SendingCrossbeamMessageFailed`] - If sending the message to the raw packer fails. - /// - /// [`PackerErrorKind::SendingCrossbeamMessageFailed`]: crate::error::PackerErrorKind::SendingCrossbeamMessageFailed + /// * If sending the message to the raw packer fails. fn add_with_sizelimit( &self, data: Bytes, id: BlobId, size_limit: Option, - ) -> RusticResult<()> { + ) -> PackerResult<()> { self.sender - .send((data, id, size_limit)) - .map_err(PackerErrorKind::SendingCrossbeamMessageFailed)?; + .send((data.clone(), id, size_limit)) + .map_err(|err| PackerErrorKind::SendingCrossbeamMessage { + size_limit, + id, + data, + source: err, + })?; Ok(()) } @@ -319,8 +351,8 @@ impl Packer { /// /// # Errors /// - /// If the blob is already present in the index - /// If sending the message to the raw packer fails. + /// * If the blob is already present in the index + /// * If sending the message to the raw packer fails. fn add_raw( &self, data: &[u8], @@ -347,12 +379,14 @@ impl Packer { /// /// # Panics /// - /// If the channel could not be dropped + /// * If the channel could not be dropped pub fn finalize(self) -> RusticResult { // cancel channel drop(self.sender); // wait for items in channel to be processed - self.finish.recv().unwrap() + self.finish + .recv() + .expect("Should be able to receive from channel to finalize packer.") } } @@ -377,7 +411,7 @@ impl PackerStats { /// /// # Panics /// - /// If the blob type is invalid + /// * If the blob type is invalid pub fn apply(self, summary: &mut SnapshotSummary, tpe: BlobType) { summary.data_added += self.data; summary.data_added_packed += self.data_packed; @@ -476,10 +510,16 @@ impl RawPacker { /// /// # Errors /// - /// If the packfile could not be saved + /// * If the packfile could not be saved fn finalize(&mut self) -> RusticResult { - self.save()?; + self.save().map_err(|err| { + err.overwrite_kind(ErrorKind::Internal) + .prepend_guidance_line("Failed to save packfile. Data may be lost.") + .ask_report() + })?; + self.file_writer.take().unwrap().finalize()?; + Ok(std::mem::take(&mut self.stats)) } @@ -492,11 +532,15 @@ impl RawPacker { /// # Returns /// /// The number of bytes written. - fn write_data(&mut self, data: &[u8]) -> RusticResult { + fn write_data(&mut self, data: &[u8]) -> PackerResult { let len = data .len() .try_into() - .map_err(PackerErrorKind::IntConversionFailed)?; + .map_err(|err| PackerErrorKind::Conversion { + to: "u32", + from: "usize", + source: err, + })?; self.file.extend_from_slice(data); self.size += len; Ok(len) @@ -514,9 +558,7 @@ impl RawPacker { /// /// # Errors /// - /// * [`PackerErrorKind::IntConversionFailed`] - If converting the data length to u64 fails - /// - /// [`PackerErrorKind::IntConversionFailed`]: crate::error::PackerErrorKind::IntConversionFailed + /// * If converting the data length to u64 fails fn add_raw( &mut self, data: &[u8], @@ -529,18 +571,38 @@ impl RawPacker { return Ok(()); } self.stats.blobs += 1; + self.stats.data += data_len; - let data_len_packed: u64 = data - .len() - .try_into() - .map_err(PackerErrorKind::IntConversionFailed)?; + + let data_len_packed: u64 = data.len().try_into().map_err(|err| { + RusticError::with_source( + ErrorKind::Internal, + "Failed to convert data length `{length}` to u64.", + err, + ) + .attach_context("length", data.len().to_string()) + })?; + self.stats.data_packed += data_len_packed; let size_limit = size_limit.unwrap_or_else(|| self.pack_sizer.pack_size()); + let offset = self.size; - let len = self.write_data(data)?; + + let len = self.write_data(data).map_err(|err| { + RusticError::with_source( + ErrorKind::Internal, + "Failed to write data to packfile for blob `{id}`.", + err, + ) + .attach_context("id", id.to_string()) + .attach_context("size_limit", size_limit.to_string()) + .attach_context("data_length_packed", data_len_packed.to_string()) + })?; + self.index .add(*id, self.blob_type, offset, len, uncompressed_length); + self.count += 1; // check if PackFile needs to be saved @@ -548,6 +610,7 @@ impl RawPacker { warn!("couldn't get elapsed time from system time: {err:?}"); Duration::ZERO }); + if self.count >= constants::MAX_COUNT || self.size >= size_limit || elapsed >= constants::MAX_AGE @@ -565,26 +628,64 @@ impl RawPacker { /// /// # Errors /// - /// * [`PackerErrorKind::IntConversionFailed`] - If converting the header length to u32 fails - /// * [`PackFileErrorKind::WritingBinaryRepresentationFailed`] - If the header could not be written - /// - /// [`PackerErrorKind::IntConversionFailed`]: crate::error::PackerErrorKind::IntConversionFailed - /// [`PackFileErrorKind::WritingBinaryRepresentationFailed`]: crate::error::PackFileErrorKind::WritingBinaryRepresentationFailed + /// * If converting the header length to u32 fails + /// * If the header could not be written fn write_header(&mut self) -> RusticResult<()> { // compute the pack header - let data = PackHeaderRef::from_index_pack(&self.index).to_binary()?; + let data = PackHeaderRef::from_index_pack(&self.index) + .to_binary() + .map_err(|err| -> Box { + RusticError::with_source( + ErrorKind::Internal, + "Failed to convert pack header `{index_pack_id}` to binary representation.", + err, + ) + .attach_context("index_pack_id", self.index.id.to_string()) + })?; // encrypt and write to pack file let data = self.be.key().encrypt_data(&data)?; - let headerlen = data - .len() - .try_into() - .map_err(PackerErrorKind::IntConversionFailed)?; - _ = self.write_data(&data)?; + let headerlen: u32 = data.len().try_into().map_err(|err| { + RusticError::with_source( + ErrorKind::Internal, + "Failed to convert header length `{length}` to u32.", + err, + ) + .attach_context("length", data.len().to_string()) + })?; + + // write header to pack file + _ = self.write_data(&data).map_err(|err| { + RusticError::with_source( + ErrorKind::Internal, + "Failed to write header with length `{length}` to packfile.", + err, + ) + .attach_context("length", headerlen.to_string()) + })?; + + // convert header length to binary representation + let binary_repr = PackHeaderLength::from_u32(headerlen) + .to_binary() + .map_err(|err| { + RusticError::with_source( + ErrorKind::Internal, + "Failed to convert header length `{length}` to binary representation.", + err, + ) + .attach_context("length", headerlen.to_string()) + })?; // finally write length of header unencrypted to pack file - _ = self.write_data(&PackHeaderLength::from_u32(headerlen).to_binary()?)?; + _ = self.write_data(&binary_repr).map_err(|err| { + RusticError::with_source( + ErrorKind::Internal, + "Failed to write header length `{length}` to packfile.", + err, + ) + .attach_context("length", headerlen.to_string()) + })?; Ok(()) } @@ -597,11 +698,8 @@ impl RawPacker { /// /// # Errors /// - /// * [`PackerErrorKind::IntConversionFailed`] - If converting the header length to u32 fails - /// * [`PackFileErrorKind::WritingBinaryRepresentationFailed`] - If the header could not be written - /// - /// [`PackerErrorKind::IntConversionFailed`]: crate::error::PackerErrorKind::IntConversionFailed - /// [`PackFileErrorKind::WritingBinaryRepresentationFailed`]: crate::error::PackFileErrorKind::WritingBinaryRepresentationFailed + /// * If converting the header length to u32 fails + /// * If the header could not be written fn save(&mut self) -> RusticResult<()> { if self.size == 0 { return Ok(()); @@ -615,7 +713,14 @@ impl RawPacker { self.file_writer .as_ref() .unwrap() - .send((file.into(), index))?; + .send((file.into(), index)) + .map_err(|err| { + RusticError::with_source( + ErrorKind::Internal, + "Failed to send packfile to file writer.", + err, + ) + })?; Ok(()) } @@ -645,8 +750,7 @@ impl FileWriterHandle { let (file, id, mut index) = load; index.id = id; self.be - .write_bytes(FileType::Pack, &id, self.cacheable, file) - .map_err(RusticErrorKind::Backend)?; + .write_bytes(FileType::Pack, &id, self.cacheable, file)?; index.time = Some(Local::now()); Ok(index) } @@ -718,10 +822,14 @@ impl Actor { /// # Errors /// /// If sending the message to the actor fails. - fn send(&self, load: (Bytes, IndexPack)) -> RusticResult<()> { - self.sender - .send(load) - .map_err(PackerErrorKind::SendingCrossbeamMessageFailedForIndexPack)?; + fn send(&self, load: (Bytes, IndexPack)) -> PackerResult<()> { + self.sender.send(load.clone()).map_err(|err| { + PackerErrorKind::SendingCrossbeamDataMessage { + data: load.0, + index_pack: load.1, + source: err, + } + })?; Ok(()) } @@ -729,7 +837,7 @@ impl Actor { /// /// # Panics /// - /// If the receiver is not present + /// * If the receiver is not present fn finalize(self) -> RusticResult<()> { // cancel channel drop(self.sender); @@ -773,7 +881,7 @@ impl Repacker { /// /// # Errors /// - /// If the Packer could not be created + /// * If the Packer could not be created pub fn new( be: BE, blob_type: BlobType, @@ -799,26 +907,33 @@ impl Repacker { /// /// # Errors /// - /// If the blob could not be added - /// If reading the blob from the backend fails + /// * If the blob could not be added + /// * If reading the blob from the backend fails pub fn add_fast(&self, pack_id: &PackId, blob: &IndexBlob) -> RusticResult<()> { - let data = self - .be - .read_partial( - FileType::Pack, - pack_id, - blob.tpe.is_cacheable(), - blob.offset, - blob.length, - ) - .map_err(RusticErrorKind::Backend)?; - self.packer.add_raw( - &data, - &blob.id, - 0, - blob.uncompressed_length, - Some(self.size_limit), + let data = self.be.read_partial( + FileType::Pack, + pack_id, + blob.tpe.is_cacheable(), + blob.offset, + blob.length, )?; + + self.packer + .add_raw( + &data, + &blob.id, + 0, + blob.uncompressed_length, + Some(self.size_limit), + ) + .map_err(|err| { + err.overwrite_kind(ErrorKind::Internal) + .prepend_guidance_line( + "Failed to fast-add (unchecked) blob `{blob_id}` to packfile.", + ) + .attach_context("blob_id", blob.id.to_string()) + })?; + Ok(()) } @@ -831,8 +946,8 @@ impl Repacker { /// /// # Errors /// - /// If the blob could not be added - /// If reading the blob from the backend fails + /// * If the blob could not be added + /// * If reading the blob from the backend fails pub fn add(&self, pack_id: &PackId, blob: &IndexBlob) -> RusticResult<()> { let data = self.be.read_encrypted_partial( FileType::Pack, @@ -842,8 +957,17 @@ impl Repacker { blob.length, blob.uncompressed_length, )?; + self.packer - .add_with_sizelimit(data, blob.id, Some(self.size_limit))?; + .add_with_sizelimit(data, blob.id, Some(self.size_limit)) + .map_err(|err| { + RusticError::with_source( + ErrorKind::Internal, + "Failed to add blob to packfile.", + err, + ) + })?; + Ok(()) } diff --git a/crates/core/src/blob/tree.rs b/crates/core/src/blob/tree.rs index ffd03577..e28d079d 100644 --- a/crates/core/src/blob/tree.rs +++ b/crates/core/src/blob/tree.rs @@ -4,7 +4,7 @@ use std::{ ffi::{OsStr, OsString}, mem, path::{Component, Path, PathBuf, Prefix}, - str, + str::{self, Utf8Error}, }; use crossbeam_channel::{bounded, unbounded, Receiver, Sender}; @@ -12,7 +12,6 @@ use derivative::Derivative; use derive_setters::Setters; use ignore::overrides::{Override, OverrideBuilder}; use ignore::Match; - use serde::{Deserialize, Deserializer}; use serde_derive::Serialize; @@ -23,13 +22,32 @@ use crate::{ }, blob::BlobType, crypto::hasher::hash, - error::{RusticResult, TreeErrorKind}, + error::{ErrorKind, RusticError, RusticResult}, impl_blobid, index::ReadGlobalIndex, progress::Progress, repofile::snapshotfile::SnapshotSummary, }; +/// [`TreeErrorKind`] describes the errors that can come up dealing with Trees +#[derive(thiserror::Error, Debug, displaydoc::Display)] +#[non_exhaustive] +pub enum TreeErrorKind { + /// path should not contain current or parent dir + ContainsCurrentOrParentDirectory, + /// `serde_json` couldn't serialize the tree: `{0:?}` + SerializingTreeFailed(serde_json::Error), + /// slice is not UTF-8: `{0:?}` + PathIsNotUtf8Conform(Utf8Error), + /// Error `{kind}` in tree streamer: `{source}` + Channel { + kind: &'static str, + source: Box, + }, +} + +pub(crate) type TreeResult = Result; + pub(super) mod constants { /// The maximum number of trees that are loaded in parallel pub(super) const MAX_TREE_LOADER: usize = 4; @@ -81,10 +99,15 @@ impl Tree { /// # Returns /// /// A tuple of the serialized tree as `Vec` and the tree's ID - pub(crate) fn serialize(&self) -> RusticResult<(Vec, TreeId)> { + pub(crate) fn serialize(&self) -> TreeResult<(Vec, TreeId)> { let mut chunk = serde_json::to_vec(&self).map_err(TreeErrorKind::SerializingTreeFailed)?; - chunk.push(b'\n'); // for whatever reason, restic adds a newline, so to be compatible... + // # COMPATIBILITY + // + // We add a newline to be compatible with `restic` here + chunk.push(b'\n'); + let id = hash(&chunk).into(); + Ok((chunk, id)) } @@ -97,15 +120,12 @@ impl Tree { /// /// # Errors /// - /// * [`TreeErrorKind::BlobIdNotFound`] - If the tree ID is not found in the backend. - /// * [`TreeErrorKind::DeserializingTreeFailed`] - If deserialization fails. + /// * If the tree ID is not found in the backend. + /// * If deserialization fails. /// /// # Returns /// /// The deserialized tree. - /// - /// [`TreeErrorKind::BlobIdNotFound`]: crate::error::TreeErrorKind::BlobIdNotFound - /// [`TreeErrorKind::DeserializingTreeFailed`]: crate::error::TreeErrorKind::DeserializingTreeFailed pub(crate) fn from_backend( be: &impl DecryptReadBackend, index: &impl ReadGlobalIndex, @@ -113,10 +133,25 @@ impl Tree { ) -> RusticResult { let data = index .get_tree(&id) - .ok_or_else(|| TreeErrorKind::BlobIdNotFound(id))? + .ok_or_else(|| { + RusticError::new( + ErrorKind::Internal, + "Tree ID `{tree_id}` not found in index", + ) + .attach_context("tree_id", id.to_string()) + })? .read_data(be)?; - Ok(serde_json::from_slice(&data).map_err(TreeErrorKind::DeserializingTreeFailed)?) + let tree = serde_json::from_slice(&data).map_err(|err| { + RusticError::with_source( + ErrorKind::Internal, + "Failed to deserialize tree from JSON.", + err, + ) + .ask_report() + })?; + + Ok(tree) } /// Creates a new node from a path. @@ -129,13 +164,9 @@ impl Tree { /// /// # Errors /// - /// * [`TreeErrorKind::NotADirectory`] - If the path is not a directory. - /// * [`TreeErrorKind::PathNotFound`] - If the path is not found. - /// * [`TreeErrorKind::PathIsNotUtf8Conform`] - If the path is not UTF-8 conform. - /// - /// [`TreeErrorKind::NotADirectory`]: crate::error::TreeErrorKind::NotADirectory - /// [`TreeErrorKind::PathNotFound`]: crate::error::TreeErrorKind::PathNotFound - /// [`TreeErrorKind::PathIsNotUtf8Conform`]: crate::error::TreeErrorKind::PathIsNotUtf8Conform + /// * If the path is not a directory. + /// * If the path is not found. + /// * If the path is not UTF-8 conform. pub(crate) fn node_from_path( be: &impl DecryptReadBackend, index: &impl ReadGlobalIndex, @@ -146,16 +177,30 @@ impl Tree { node.subtree = Some(id); for p in path.components() { - if let Some(p) = comp_to_osstr(p)? { - let id = node - .subtree - .ok_or_else(|| TreeErrorKind::NotADirectory(p.clone()))?; + if let Some(p) = comp_to_osstr(p).map_err(|err| { + RusticError::with_source( + ErrorKind::Internal, + "Failed to convert Path component `{path}` to OsString.", + err, + ) + .attach_context("path", path.display().to_string()) + .ask_report() + })? { + let id = node.subtree.ok_or_else(|| { + RusticError::new(ErrorKind::Internal, "Node `{node}` is not a directory.") + .attach_context("node", p.to_string_lossy()) + .ask_report() + })?; let tree = Self::from_backend(be, index, id)?; node = tree .nodes .into_iter() .find(|node| node.name() == p) - .ok_or_else(|| TreeErrorKind::PathNotFound(p.clone()))?; + .ok_or_else(|| { + RusticError::new(ErrorKind::Internal, "Node `{node}` not found in tree.") + .attach_context("node", p.to_string_lossy()) + .ask_report() + })?; } } @@ -193,9 +238,14 @@ impl Tree { let node_idx = nodes.entry(node).or_insert(new_idx); Some(*node_idx) } else { - let id = node - .subtree - .ok_or_else(|| TreeErrorKind::NotADirectory(path_comp[idx].clone()))?; + let id = node.subtree.ok_or_else(|| { + RusticError::new( + ErrorKind::Internal, + "Subtree ID not found for node `{node}`", + ) + .attach_context("node", path_comp[idx].to_string_lossy()) + .ask_report() + })?; find_node_from_component( be, @@ -217,7 +267,16 @@ impl Tree { let path_comp: Vec<_> = path .components() .filter_map(|p| comp_to_osstr(p).transpose()) - .collect::>()?; + .collect::>() + .map_err(|err| { + RusticError::with_source( + ErrorKind::Internal, + "Failed to convert Path component `{path}` to OsString.", + err, + ) + .attach_context("path", path.display().to_string()) + .ask_report() + })?; // caching all results let mut results_cache = vec![BTreeMap::new(); path_comp.len()]; @@ -289,9 +348,15 @@ impl Tree { for node in tree.nodes { let node_path = path.join(node.name()); if node.is_dir() { - let id = node - .subtree - .ok_or_else(|| TreeErrorKind::NotADirectory(node.name()))?; + let id = node.subtree.ok_or_else(|| { + RusticError::new( + ErrorKind::Internal, + "Subtree ID not found for node `{node}`", + ) + .attach_context("node", node.name().to_string_lossy()) + .ask_report() + })?; + result.append(&mut find_matching_nodes_recursive( be, index, id, &node_path, state, matches, )?); @@ -361,12 +426,9 @@ pub struct FindMatches { /// /// # Errors /// -/// * [`TreeErrorKind::ContainsCurrentOrParentDirectory`] - If the component is a current or parent directory. -/// * [`TreeErrorKind::PathIsNotUtf8Conform`] - If the component is not UTF-8 conform. -/// -/// [`TreeErrorKind::ContainsCurrentOrParentDirectory`]: crate::error::TreeErrorKind::ContainsCurrentOrParentDirectory -/// [`TreeErrorKind::PathIsNotUtf8Conform`]: crate::error::TreeErrorKind::PathIsNotUtf8Conform -pub(crate) fn comp_to_osstr(p: Component<'_>) -> RusticResult> { +/// * If the component is a current or parent directory. +/// * If the component is not UTF-8 conform. +pub(crate) fn comp_to_osstr(p: Component<'_>) -> TreeResult> { let s = match p { Component::RootDir => None, Component::Prefix(p) => match p.kind() { @@ -378,7 +440,7 @@ pub(crate) fn comp_to_osstr(p: Component<'_>) -> RusticResult> ), }, Component::Normal(p) => Some(p.to_os_string()), - _ => return Err(TreeErrorKind::ContainsCurrentOrParentDirectory.into()), + _ => return Err(TreeErrorKind::ContainsCurrentOrParentDirectory), }; Ok(s) } @@ -467,12 +529,8 @@ where /// /// # Errors /// - /// * [`TreeErrorKind::BlobIdNotFound`] - If the tree ID is not found in the backend. - /// * [`TreeErrorKind::DeserializingTreeFailed`] - If deserialization fails. - /// - /// [`TreeErrorKind::BlobIdNotFound`]: crate::error::TreeErrorKind::BlobIdNotFound - /// [`TreeErrorKind::DeserializingTreeFailed`]: crate::error::TreeErrorKind::DeserializingTreeFailed - #[allow(unused)] + /// * If the tree ID is not found in the backend. + /// * If deserialization fails. pub fn new(be: BE, index: &'a I, node: &Node) -> RusticResult { Self::new_streamer(be, index, node, None, true) } @@ -488,11 +546,8 @@ where /// /// # Errors /// - /// * [`TreeErrorKind::BlobIdNotFound`] - If the tree ID is not found in the backend. - /// * [`TreeErrorKind::DeserializingTreeFailed`] - If deserialization fails. - /// - /// [`TreeErrorKind::BlobIdNotFound`]: crate::error::TreeErrorKind::BlobIdNotFound - /// [`TreeErrorKind::DeserializingTreeFailed`]: crate::error::TreeErrorKind::DeserializingTreeFailed + /// * If the tree ID is not found in the backend. + /// * If deserialization fails. fn new_streamer( be: BE, index: &'a I, @@ -517,6 +572,7 @@ where recursive, }) } + /// Creates a new `NodeStreamer` with glob patterns. /// /// # Arguments @@ -528,11 +584,8 @@ where /// /// # Errors /// - /// * [`TreeErrorKind::BuildingNodeStreamerFailed`] - If building the streamer fails. - /// * [`TreeErrorKind::ReadingFileStringFromGlobsFailed`] - If reading a glob file fails. - /// - /// [`TreeErrorKind::BuildingNodeStreamerFailed`]: crate::error::TreeErrorKind::BuildingNodeStreamerFailed - /// [`TreeErrorKind::ReadingFileStringFromGlobsFailed`]: crate::error::TreeErrorKind::ReadingFileStringFromGlobsFailed + /// * If building the streamer fails. + /// * If reading a glob file fails. pub fn new_with_glob( be: BE, index: &'a I, @@ -541,45 +594,97 @@ where ) -> RusticResult { let mut override_builder = OverrideBuilder::new(""); + // FIXME: Refactor this to a function to be reused + // This is the same of `backend::ignore::Localsource::new` for g in &opts.glob { - _ = override_builder - .add(g) - .map_err(TreeErrorKind::BuildingNodeStreamerFailed)?; + _ = override_builder.add(g).map_err(|err| { + RusticError::with_source( + ErrorKind::Internal, + "Failed to add glob pattern `{glob}` to override builder.", + err, + ) + .attach_context("glob", g.to_string()) + .ask_report() + })?; } for file in &opts.glob_file { for line in std::fs::read_to_string(file) - .map_err(TreeErrorKind::ReadingFileStringFromGlobsFailed)? + .map_err(|err| { + RusticError::with_source( + ErrorKind::Internal, + "Failed to read string from glob file `{glob_file}` ", + err, + ) + .attach_context("glob_file", file.to_string()) + .ask_report() + })? .lines() { - _ = override_builder - .add(line) - .map_err(TreeErrorKind::BuildingNodeStreamerFailed)?; + _ = override_builder.add(line).map_err(|err| { + RusticError::with_source( + ErrorKind::Internal, + "Failed to add glob pattern line `{glob_pattern_line}` to override builder.", + err, + ) + .attach_context("glob_pattern_line", line.to_string()) + .ask_report() + })?; } } - _ = override_builder - .case_insensitive(true) - .map_err(TreeErrorKind::BuildingNodeStreamerFailed)?; + _ = override_builder.case_insensitive(true).map_err(|err| { + RusticError::with_source( + ErrorKind::Internal, + "Failed to set case insensitivity in override builder.", + err, + ) + .ask_report() + })?; for g in &opts.iglob { - _ = override_builder - .add(g) - .map_err(TreeErrorKind::BuildingNodeStreamerFailed)?; + _ = override_builder.add(g).map_err(|err| { + RusticError::with_source( + ErrorKind::Internal, + "Failed to add iglob pattern `{iglob}` to override builder.", + err, + ) + .attach_context("iglob", g.to_string()) + .ask_report() + })?; } for file in &opts.iglob_file { for line in std::fs::read_to_string(file) - .map_err(TreeErrorKind::ReadingFileStringFromGlobsFailed)? + .map_err(|err| { + RusticError::with_source( + ErrorKind::Internal, + "Failed to read string from iglob file `{iglob_file}`", + err, + ) + .attach_context("iglob_file", file.to_string()) + .ask_report() + })? .lines() { - _ = override_builder - .add(line) - .map_err(TreeErrorKind::BuildingNodeStreamerFailed)?; + _ = override_builder.add(line).map_err(|err| { + RusticError::with_source( + ErrorKind::Internal, + "Failed to add iglob pattern line `{iglob_pattern_line}` to override builder.", + err, + ) + .attach_context("iglob_pattern_line", line.to_string()) + .ask_report() + })?; } } - let overrides = override_builder - .build() - .map_err(TreeErrorKind::BuildingNodeStreamerFailed)?; + let overrides = override_builder.build().map_err(|err| { + RusticError::with_source( + ErrorKind::Internal, + "Failed to build matcher for a set of glob overrides.", + err, + ) + .ask_report() + })?; Self::new_streamer(be, index, node, Some(overrides), opts.recursive) } @@ -668,9 +773,7 @@ impl TreeStreamerOnce

{ /// /// # Errors /// - /// * [`TreeErrorKind::SendingCrossbeamMessageFailed`] - If sending the message fails. - /// - /// [`TreeErrorKind::SendingCrossbeamMessageFailed`]: crate::error::TreeErrorKind::SendingCrossbeamMessageFailed + /// * If sending the message fails. pub fn new( be: &BE, index: &I, @@ -707,7 +810,19 @@ impl TreeStreamerOnce

{ }; for (count, id) in ids.into_iter().enumerate() { - if !streamer.add_pending(PathBuf::new(), id, count)? { + if !streamer + .add_pending(PathBuf::new(), id, count) + .map_err(|err| { + RusticError::with_source( + ErrorKind::Internal, + "Failed to add tree ID `{tree_id}` to unbounded pending queue (`{count}`).", + err, + ) + .attach_context("tree_id", id.to_string()) + .attach_context("count", count.to_string()) + .ask_report() + })? + { streamer.p.inc(1); streamer.finished_ids += 1; } @@ -730,16 +845,18 @@ impl TreeStreamerOnce

{ /// /// # Errors /// - /// * [`TreeErrorKind::SendingCrossbeamMessageFailed`] - If sending the message fails. - /// - /// [`TreeErrorKind::SendingCrossbeamMessageFailed`]: crate::error::TreeErrorKind::SendingCrossbeamMessageFailed - fn add_pending(&mut self, path: PathBuf, id: TreeId, count: usize) -> RusticResult { + /// * If sending the message fails. + fn add_pending(&mut self, path: PathBuf, id: TreeId, count: usize) -> TreeResult { if self.visited.insert(id) { self.queue_in .as_ref() .unwrap() .send((path, id, count)) - .map_err(TreeErrorKind::SendingCrossbeamMessageFailed)?; + .map_err(|err| TreeErrorKind::Channel { + kind: "sending crossbeam message", + source: err.into(), + })?; + self.counter[count] += 1; Ok(true) } else { @@ -757,12 +874,17 @@ impl Iterator for TreeStreamerOnce

{ self.p.finish(); return None; } + let (path, tree, count) = match self.queue_out.recv() { Ok(Ok(res)) => res, Err(err) => { - return Some(Err( - TreeErrorKind::ReceivingCrossbreamMessageFailed(err).into() - )) + return Some(Err(RusticError::with_source( + ErrorKind::Internal, + "Failed to receive tree from crossbeam channel.", + err, + ) + .attach_context("finished_ids", self.finished_ids.to_string()) + .ask_report())); } Ok(Err(err)) => return Some(Err(err)), }; @@ -771,17 +893,32 @@ impl Iterator for TreeStreamerOnce

{ if let Some(id) = node.subtree { let mut path = path.clone(); path.push(node.name()); - match self.add_pending(path, id, count) { + match self.add_pending(path.clone(), id, count) { Ok(_) => {} - Err(err) => return Some(Err(err)), + Err(err) => { + return Some(Err(err).map_err(|err| { + RusticError::with_source( + ErrorKind::Internal, + "Failed to add tree ID `{tree_id}` to pending queue (`{count}`).", + err, + ) + .attach_context("path", path.display().to_string()) + .attach_context("tree_id", id.to_string()) + .attach_context("count", count.to_string()) + .ask_report() + })) + } } } } + self.counter[count] -= 1; + if self.counter[count] == 0 { self.p.inc(1); self.finished_ids += 1; } + Some(Ok((path, tree))) } } diff --git a/crates/core/src/chunker.rs b/crates/core/src/chunker.rs index fb6a300b..a8260151 100644 --- a/crates/core/src/chunker.rs +++ b/crates/core/src/chunker.rs @@ -7,7 +7,7 @@ use crate::{ polynom::{Polynom, Polynom64}, rolling_hash::{Rabin64, RollingHash64}, }, - error::{PolynomialErrorKind, RusticResult}, + error::{ErrorKind, RusticError, RusticResult}, }; pub(super) mod constants { @@ -87,9 +87,9 @@ impl ChunkIter { } impl Iterator for ChunkIter { - type Item = io::Result>; + type Item = RusticResult>; - fn next(&mut self) -> Option>> { + fn next(&mut self) -> Option { if self.finished { return None; } @@ -111,7 +111,13 @@ impl Iterator for ChunkIter { .read_to_end(&mut vec) { Ok(size) => size, - Err(err) => return Some(Err(err)), + Err(err) => { + return Some(Err(RusticError::with_source( + ErrorKind::InputOutput, + "Failed to read from reader in iterator", + err, + ))); + } }; // If self.min_size is not reached, we are done. @@ -149,8 +155,12 @@ impl Iterator for ChunkIter { } Err(ref e) if e.kind() == io::ErrorKind::Interrupted => continue, - Err(e) => { - return Some(Err(e)); + Err(err) => { + return Some(Err(RusticError::with_source( + ErrorKind::InputOutput, + "Failed to read from reader in iterator", + err, + ))); } } } @@ -173,9 +183,7 @@ impl Iterator for ChunkIter { /// /// # Errors /// -/// * [`PolynomialErrorKind::NoSuitablePolynomialFound`] - If no polynomial could be found in one million tries. -/// -/// [`PolynomialErrorKind::NoSuitablePolynomialFound`]: crate::error::PolynomialErrorKind::NoSuitablePolynomialFound +/// * If no polynomial could be found in one million tries. pub fn random_poly() -> RusticResult { for _ in 0..constants::RAND_POLY_MAX_TRIES { let mut poly: u64 = thread_rng().gen(); @@ -191,7 +199,12 @@ pub fn random_poly() -> RusticResult { return Ok(poly); } } - Err(PolynomialErrorKind::NoSuitablePolynomialFound.into()) + + Err(RusticError::new( + ErrorKind::Internal, + "No suitable polynomial found, this should essentially never happen. Please try again.", + ) + .ask_report()) } /// A trait for extending polynomials. diff --git a/crates/core/src/commands/backup.rs b/crates/core/src/commands/backup.rs index 1d6b60d0..0b7e6602 100644 --- a/crates/core/src/commands/backup.rs +++ b/crates/core/src/commands/backup.rs @@ -16,7 +16,7 @@ use crate::{ ignore::{LocalSource, LocalSourceFilterOptions, LocalSourceSaveOptions}, stdin::StdinSource, }, - error::RusticResult, + error::{ErrorKind, RusticError, RusticResult}, progress::ProgressBars, repofile::{ snapshotfile::{SnapshotGroup, SnapshotGroupCriterion, SnapshotId}, @@ -196,21 +196,16 @@ pub struct BackupOptions { /// /// # Errors /// -/// * [`PackerErrorKind::SendingCrossbeamMessageFailed`] - If sending the message to the raw packer fails. -/// * [`PackerErrorKind::IntConversionFailed`] - If converting the data length to u64 fails -/// * [`PackerErrorKind::SendingCrossbeamMessageFailed`] - If sending the message to the raw packer fails. -/// * [`CryptBackendErrorKind::SerializingToJsonByteVectorFailed`] - If the index file could not be serialized. -/// * [`SnapshotFileErrorKind::OutOfRange`] - If the time is not in the range of `Local::now()` +/// * If sending the message to the raw packer fails. +/// * If converting the data length to u64 fails +/// * If sending the message to the raw packer fails. +/// * If the index file could not be serialized. +/// * If the time is not in the range of `Local::now()` /// /// # Returns /// /// The snapshot pointing to the backup'ed data. -/// -/// [`PackerErrorKind::SendingCrossbeamMessageFailed`]: crate::error::PackerErrorKind::SendingCrossbeamMessageFailed -/// [`PackerErrorKind::IntConversionFailed`]: crate::error::PackerErrorKind::IntConversionFailed -/// [`PackerErrorKind::SendingCrossbeamMessageFailed`]: crate::error::PackerErrorKind::SendingCrossbeamMessageFailed -/// [`CryptBackendErrorKind::SerializingToJsonByteVectorFailed`]: crate::error::CryptBackendErrorKind::SerializingToJsonByteVectorFailed -/// [`SnapshotFileErrorKind::OutOfRange`]: crate::error::SnapshotFileErrorKind::OutOfRange +#[allow(clippy::too_many_lines)] pub(crate) fn backup( repo: &Repository, opts: &BackupOptions, @@ -229,12 +224,44 @@ pub(crate) fn backup( let as_path = opts .as_path .as_ref() - .map(|p| -> RusticResult<_> { Ok(p.parse_dot()?.to_path_buf()) }) + .map(|p| -> RusticResult<_> { + Ok(p.parse_dot() + .map_err(|err| { + RusticError::with_source( + ErrorKind::InvalidInput, + "Failed to parse dotted path `{path}`", + err, + ) + .attach_context("path", p.display().to_string()) + })? + .to_path_buf()) + }) .transpose()?; match &as_path { - Some(p) => snap.paths.set_paths(&[p.clone()])?, - None => snap.paths.set_paths(&backup_path)?, + Some(p) => snap.paths.set_paths(&[p.clone()]).map_err(|err| { + RusticError::with_source( + ErrorKind::Internal, + "Failed to set paths `{paths}` in snapshot.", + err, + ) + .attach_context("paths", p.display().to_string()) + })?, + None => snap.paths.set_paths(&backup_path).map_err(|err| { + RusticError::with_source( + ErrorKind::Internal, + "Failed to set paths `{paths}` in snapshot.", + err, + ) + .attach_context( + "paths", + backup_path + .iter() + .map(|p| p.display().to_string()) + .collect::>() + .join(","), + ) + })?, }; let (parent_id, parent) = opts.parent_opts.get_parent(repo, &snap, backup_stdin); diff --git a/crates/core/src/commands/cat.rs b/crates/core/src/commands/cat.rs index 9c45a077..55caa248 100644 --- a/crates/core/src/commands/cat.rs +++ b/crates/core/src/commands/cat.rs @@ -5,7 +5,7 @@ use bytes::Bytes; use crate::{ backend::{decrypt::DecryptReadBackend, FileType, FindInBackend}, blob::{tree::Tree, BlobId, BlobType}, - error::{CommandErrorKind, RusticResult}, + error::{ErrorKind, RusticError, RusticResult}, index::ReadIndex, progress::ProgressBars, repofile::SnapshotFile, @@ -27,17 +27,13 @@ use crate::{ /// /// # Errors /// -/// * [`IdErrorKind::HexError`] - If the string is not a valid hexadecimal string -/// * [`BackendAccessErrorKind::NoSuitableIdFound`] - If no id could be found. -/// * [`BackendAccessErrorKind::IdNotUnique`] - If the id is not unique. +/// * If the string is not a valid hexadecimal string +/// * If no id could be found. +/// * If the id is not unique. /// /// # Returns /// /// The data read. -/// -/// [`IdErrorKind::HexError`]: crate::error::IdErrorKind::HexError -/// [`BackendAccessErrorKind::NoSuitableIdFound`]: crate::error::BackendAccessErrorKind::NoSuitableIdFound -/// [`BackendAccessErrorKind::IdNotUnique`]: crate::error::BackendAccessErrorKind::IdNotUnique pub(crate) fn cat_file( repo: &Repository, tpe: FileType, @@ -63,9 +59,7 @@ pub(crate) fn cat_file( /// /// # Errors /// -/// * [`IdErrorKind::HexError`] - If the string is not a valid hexadecimal string -/// -/// [`IdErrorKind::HexError`]: crate::error::IdErrorKind::HexError +/// * If the string is not a valid hexadecimal string pub(crate) fn cat_blob( repo: &Repository, tpe: BlobType, @@ -92,13 +86,11 @@ pub(crate) fn cat_blob( /// /// # Errors /// -/// * [`CommandErrorKind::PathIsNoDir`] - If the path is not a directory. +/// * If the path is not a directory. /// /// # Returns /// /// The data read. -/// -/// [`CommandErrorKind::PathIsNoDir`]: crate::error::CommandErrorKind::PathIsNoDir pub(crate) fn cat_tree( repo: &Repository, snap: &str, @@ -112,9 +104,13 @@ pub(crate) fn cat_tree( &repo.pb.progress_counter("getting snapshot..."), )?; let node = Tree::node_from_path(repo.dbe(), repo.index(), snap.tree, Path::new(path))?; - let id = node - .subtree - .ok_or_else(|| CommandErrorKind::PathIsNoDir(path.to_string()))?; + let id = node.subtree.ok_or_else(|| { + RusticError::new( + ErrorKind::InvalidInput, + "Path `{path}` in Node subtree is not a directory. Please provide a directory path.", + ) + .attach_context("path", path.to_string()) + })?; let data = repo .index() .blob_from_backend(repo.dbe(), BlobType::Tree, &BlobId::from(*id))?; diff --git a/crates/core/src/commands/check.rs b/crates/core/src/commands/check.rs index 443e828a..ca92a12c 100644 --- a/crates/core/src/commands/check.rs +++ b/crates/core/src/commands/check.rs @@ -2,6 +2,7 @@ use std::{ collections::{BTreeSet, HashMap}, fmt::Debug, + num::ParseIntError, str::FromStr, }; @@ -18,7 +19,7 @@ use crate::{ backend::{cache::Cache, decrypt::DecryptReadBackend, node::NodeType, FileType, ReadBackend}, blob::{tree::TreeStreamerOnce, BlobId, BlobType}, crypto::hasher::hash, - error::{CommandErrorKind, RusticErrorKind, RusticResult}, + error::{RusticError, RusticResult}, id::Id, index::{ binarysorted::{IndexCollector, IndexType}, @@ -29,7 +30,7 @@ use crate::{ packfile::PackId, IndexFile, IndexPack, PackHeader, PackHeaderLength, PackHeaderRef, }, repository::{Open, Repository}, - TreeId, + ErrorKind, TreeId, }; #[derive(Clone, Copy, Debug, Default)] @@ -97,8 +98,8 @@ impl ReadSubsetOption { } } -/// parses n/m inclding named settings depending on current date -fn parse_n_m(now: NaiveDateTime, n_in: &str, m_in: &str) -> Result<(u32, u32), CommandErrorKind> { +/// parses n/m including named settings depending on current date +fn parse_n_m(now: NaiveDateTime, n_in: &str, m_in: &str) -> Result<(u32, u32), ParseIntError> { let is_leap_year = |dt: NaiveDateTime| { let year = dt.year(); year % 4 == 0 && (year % 25 != 0 || year % 16 == 0) @@ -139,23 +140,52 @@ fn parse_n_m(now: NaiveDateTime, n_in: &str, m_in: &str) -> Result<(u32, u32), C } impl FromStr for ReadSubsetOption { - type Err = CommandErrorKind; + type Err = Box; fn from_str(s: &str) -> Result { let result = if s == "all" { Self::All } else if let Some(p) = s.strip_suffix('%') { // try to read percentage - Self::Percentage(p.parse()?) + let percentage = p.parse().map_err(|err| { + RusticError::with_source( + ErrorKind::InvalidInput, + "Error parsing percentage from value `{value}` for ReadSubset option. Did you forget the '%'?", + err, + ) + .attach_context("value", p.to_string()) + })?; + + Self::Percentage(percentage) } else if let Some((n, m)) = s.split_once('/') { let now = Local::now().naive_local(); - Self::IdSubSet(parse_n_m(now, n, m)?) + let subset = parse_n_m(now, n, m).map_err( + |err| + RusticError::with_source( + ErrorKind::InvalidInput, + "Error parsing 'n/m' from value `{value}` for ReadSubset option. Allowed values: 'all', 'x%', 'n/m' or a size.", + err + ) + .attach_context("value", s) + .attach_context("n/m", format!("{n}/{m}")) + .attach_context("now", now.to_string()) + )?; + + Self::IdSubSet(subset) } else { - Self::Size( - ByteSize::from_str(s) - .map_err(CommandErrorKind::FromByteSizeParser)? - .as_u64(), - ) + let byte_size = ByteSize::from_str(s) + .map_err(|err| { + RusticError::with_source( + ErrorKind::InvalidInput, + "Error parsing size from value `{value}` for ReadSubset option. Allowed values: 'all', 'x%', 'n/m' or a size.", + err + ) + .attach_context("value", s) + })? + .as_u64(); + + Self::Size(byte_size) }; + Ok(result) } } @@ -197,7 +227,7 @@ pub struct CheckOptions { /// /// # Errors /// -/// If the repository is corrupted +/// * If the repository is corrupted /// /// # Panics /// @@ -219,9 +249,7 @@ pub(crate) fn check_repository( // // This lists files here and later when reading index / checking snapshots // TODO: Only list the files once... - _ = be - .list_with_size(file_type) - .map_err(RusticErrorKind::Backend)?; + _ = be.list_with_size(file_type)?; let p = pb.progress_bytes(format!("checking {file_type:?} in cache...")); // TODO: Make concurrency (20) customizable @@ -286,6 +314,7 @@ pub(crate) fn check_repository( }); p.finish(); } + Ok(()) } @@ -300,7 +329,7 @@ pub(crate) fn check_repository( /// /// # Errors /// -/// If a file is missing or has a different size +/// * If a file is missing or has a different size fn check_hot_files( be: &impl ReadBackend, be_hot: &impl ReadBackend, @@ -309,14 +338,11 @@ fn check_hot_files( ) -> RusticResult<()> { let p = pb.progress_spinner(format!("checking {file_type:?} in hot repo...")); let mut files = be - .list_with_size(file_type) - .map_err(RusticErrorKind::Backend)? + .list_with_size(file_type)? .into_iter() .collect::>(); - let files_hot = be_hot - .list_with_size(file_type) - .map_err(RusticErrorKind::Backend)?; + let files_hot = be_hot.list_with_size(file_type)?; for (id, size_hot) in files_hot { match files.remove(&id) { @@ -349,7 +375,7 @@ fn check_hot_files( /// /// # Errors /// -/// If a file is missing or has a different size +/// * If a file is missing or has a different size fn check_cache_files( _concurrency: usize, cache: &Cache, @@ -406,7 +432,7 @@ fn check_cache_files( /// /// # Errors /// -/// If a pack is missing or has a different size +/// * If a pack is missing or has a different size /// /// # Returns /// @@ -486,12 +512,9 @@ fn check_packs( /// /// # Errors /// -/// If a pack is missing or has a different size +/// * If a pack is missing or has a different size fn check_packs_list(be: &impl ReadBackend, mut packs: HashMap) -> RusticResult<()> { - for (id, size) in be - .list_with_size(FileType::Pack) - .map_err(RusticErrorKind::Backend)? - { + for (id, size) in be.list_with_size(FileType::Pack)? { match packs.remove(&PackId::from(id)) { None => warn!("pack {id} not referenced in index. Can be a parallel backup job. To repair: 'rustic repair index'."), Some(index_size) if index_size != size => { @@ -516,16 +539,13 @@ fn check_packs_list(be: &impl ReadBackend, mut packs: HashMap) -> R /// /// # Errors /// -/// If a pack is missing or has a different size +/// * If a pack is missing or has a different size fn check_packs_list_hot( be: &impl ReadBackend, mut treepacks: HashMap, packs: &HashMap, ) -> RusticResult<()> { - for (id, size) in be - .list_with_size(FileType::Pack) - .map_err(RusticErrorKind::Backend)? - { + for (id, size) in be.list_with_size(FileType::Pack)? { match treepacks.remove(&PackId::from(id)) { None => { if packs.contains_key(&PackId::from(id)) { @@ -556,7 +576,7 @@ fn check_packs_list_hot( /// /// # Errors /// -/// If a snapshot or tree is missing or has a different size +/// * If a snapshot or tree is missing or has a different size fn check_trees( be: &impl DecryptReadBackend, index: &impl ReadGlobalIndex, @@ -638,11 +658,11 @@ fn check_trees( /// /// # Errors /// -/// If the pack is invalid +/// * If the pack is invalid /// /// # Panics /// -/// If zstd decompression fails. +/// * If zstd decompression fails. fn check_pack( be: &impl DecryptReadBackend, index_pack: IndexPack, @@ -667,7 +687,18 @@ fn check_pack( // check header length let header_len = PackHeaderRef::from_index_pack(&index_pack).size(); - let pack_header_len = PackHeaderLength::from_binary(&data.split_off(data.len() - 4))?.to_u32(); + let pack_header_len = PackHeaderLength::from_binary(&data.split_off(data.len() - 4)) + .map_err(|err| { + RusticError::with_source( + ErrorKind::Internal, + "Error reading pack header length `{length}` for `{pack_id}`", + err, + ) + .attach_context("pack_id", id.to_string()) + .attach_context("length", header_len.to_string()) + .ask_report() + })? + .to_u32(); if pack_header_len != header_len { error!("pack {id}: Header length in pack file doesn't match index. In pack: {pack_header_len}, calculated: {header_len}"); return Ok(()); @@ -676,7 +707,17 @@ fn check_pack( // check header let header = be.decrypt(&data.split_off(data.len() - header_len as usize))?; - let pack_blobs = PackHeader::from_binary(&header)?.into_blobs(); + let pack_blobs = PackHeader::from_binary(&header) + .map_err(|err| { + RusticError::with_source( + ErrorKind::Internal, + "Error reading pack header for id `{pack_id}`", + err, + ) + .attach_context("pack_id", id.to_string()) + .ask_report() + })? + .into_blobs(); let mut blobs = index_pack.blobs; blobs.sort_unstable_by_key(|b| b.offset); if pack_blobs != blobs { diff --git a/crates/core/src/commands/config.rs b/crates/core/src/commands/config.rs index 195f8701..fa6c0065 100644 --- a/crates/core/src/commands/config.rs +++ b/crates/core/src/commands/config.rs @@ -5,7 +5,7 @@ use derive_setters::Setters; use crate::{ backend::decrypt::{DecryptBackend, DecryptWriteBackend}, crypto::CryptoKey, - error::{CommandErrorKind, RusticResult}, + error::{ErrorKind, RusticError, RusticResult}, repofile::ConfigFile, repository::{Open, Repository}, }; @@ -24,34 +24,29 @@ use crate::{ /// /// # Errors /// -/// * [`CommandErrorKind::VersionNotSupported`] - If the version is not supported -/// * [`CommandErrorKind::CannotDowngrade`] - If the version is lower than the current version -/// * [`CommandErrorKind::NoCompressionV1Repo`] - If compression is set for a v1 repo -/// * [`CommandErrorKind::CompressionLevelNotSupported`] - If the compression level is not supported -/// * [`CommandErrorKind::SizeTooLarge`] - If the size is too large -/// * [`CommandErrorKind::MinPackSizeTolerateWrong`] - If the min packsize tolerance percent is wrong -/// * [`CommandErrorKind::MaxPackSizeTolerateWrong`] - If the max packsize tolerance percent is wrong -/// * [`CryptBackendErrorKind::SerializingToJsonByteVectorFailed`] - If the file could not be serialized to json. +/// * If the version is not supported. +/// * If the version is lower than the current version. +/// * If compression is set for a v1 repo. +/// * If the compression level is not supported. +/// * If the size is too large. +/// * If the min pack size tolerance percent is wrong. +/// * If the max pack size tolerance percent is wrong. +/// * If the file could not be serialized to json. /// /// # Returns /// /// Whether the config was changed -/// -/// [`CommandErrorKind::VersionNotSupported`]: crate::error::CommandErrorKind::VersionNotSupported -/// [`CommandErrorKind::CannotDowngrade`]: crate::error::CommandErrorKind::CannotDowngrade -/// [`CommandErrorKind::NoCompressionV1Repo`]: crate::error::CommandErrorKind::NoCompressionV1Repo -/// [`CommandErrorKind::CompressionLevelNotSupported`]: crate::error::CommandErrorKind::CompressionLevelNotSupported -/// [`CommandErrorKind::SizeTooLarge`]: crate::error::CommandErrorKind::SizeTooLarge -/// [`CommandErrorKind::MinPackSizeTolerateWrong`]: crate::error::CommandErrorKind::MinPackSizeTolerateWrong -/// [`CommandErrorKind::MaxPackSizeTolerateWrong`]: crate::error::CommandErrorKind::MaxPackSizeTolerateWrong -/// [`CryptBackendErrorKind::SerializingToJsonByteVectorFailed`]: crate::error::CryptBackendErrorKind::SerializingToJsonByteVectorFailed pub(crate) fn apply_config( repo: &Repository, opts: &ConfigOptions, ) -> RusticResult { if repo.config().append_only == Some(true) { - return Err(CommandErrorKind::NotAllowedWithAppendOnly("config change".to_string()).into()); + return Err(RusticError::new( + ErrorKind::AppendOnly, + "Changing config is not allowed in append-only repositories. Please disable append-only mode first, if you know what you are doing. Aborting.", + )); } + let mut new_config = repo.config().clone(); opts.apply(&mut new_config)?; if &new_config == repo.config() { @@ -77,9 +72,7 @@ pub(crate) fn apply_config( /// /// # Errors /// -/// * [`CryptBackendErrorKind::SerializingToJsonByteVectorFailed`] - If the file could not be serialized to json. -/// -/// [`CryptBackendErrorKind::SerializingToJsonByteVectorFailed`]: crate::error::CryptBackendErrorKind::SerializingToJsonByteVectorFailed +/// * If the file could not be serialized to json. pub(crate) fn save_config( repo: &Repository, mut new_config: ConfigFile, @@ -92,10 +85,11 @@ pub(crate) fn save_config( if let Some(hot_be) = repo.be_hot.clone() { // save config to hot repo - let dbe = DecryptBackend::new(hot_be, key); + let dbe = DecryptBackend::new(hot_be.clone(), key); new_config.is_hot = Some(true); _ = dbe.save_file_uncompressed(&new_config)?; } + Ok(()) } @@ -183,41 +177,54 @@ impl ConfigOptions { /// /// # Errors /// - /// * [`CommandErrorKind::VersionNotSupported`] - If the version is not supported - /// * [`CommandErrorKind::CannotDowngrade`] - If the version is lower than the current version - /// * [`CommandErrorKind::NoCompressionV1Repo`] - If compression is set for a v1 repo - /// * [`CommandErrorKind::CompressionLevelNotSupported`] - If the compression level is not supported - /// * [`CommandErrorKind::SizeTooLarge`] - If the size is too large - /// * [`CommandErrorKind::MinPackSizeTolerateWrong`] - If the min packsize tolerate percent is wrong - /// * [`CommandErrorKind::MaxPackSizeTolerateWrong`] - If the max packsize tolerate percent is wrong - /// - /// [`CommandErrorKind::VersionNotSupported`]: crate::error::CommandErrorKind::VersionNotSupported - /// [`CommandErrorKind::CannotDowngrade`]: crate::error::CommandErrorKind::CannotDowngrade - /// [`CommandErrorKind::NoCompressionV1Repo`]: crate::error::CommandErrorKind::NoCompressionV1Repo - /// [`CommandErrorKind::CompressionLevelNotSupported`]: crate::error::CommandErrorKind::CompressionLevelNotSupported - /// [`CommandErrorKind::SizeTooLarge`]: crate::error::CommandErrorKind::SizeTooLarge - /// [`CommandErrorKind::MinPackSizeTolerateWrong`]: crate::error::CommandErrorKind::MinPackSizeTolerateWrong - /// [`CommandErrorKind::MaxPackSizeTolerateWrong`]: crate::error::CommandErrorKind::MaxPackSizeTolerateWrong + /// * If the version is not supported + /// * If the version is lower than the current version + /// * If compression is set for a v1 repo + /// * If the compression level is not supported + /// * If the size is too large + /// * If the min packsize tolerate percent is wrong + /// * If the max packsize tolerate percent is wrong pub fn apply(&self, config: &mut ConfigFile) -> RusticResult<()> { if let Some(version) = self.set_version { + // only allow versions 1 and 2 let range = 1..=2; + if !range.contains(&version) { - return Err(CommandErrorKind::VersionNotSupported(version, range).into()); + return Err(RusticError::new( + ErrorKind::Unsupported, + "Config version unsupported. Allowed versions are `{allowed_versions}`. You provided `{current_version}`. Please use a supported version. ", + ) + .attach_context("current_version", version.to_string()) + .attach_context("allowed_versions", format!("{range:?}"))); } else if version < config.version { - return Err(CommandErrorKind::CannotDowngrade(config.version, version).into()); + return Err(RusticError::new( + ErrorKind::Unsupported, + "Downgrading config version is unsupported. You provided `{new_version}` which is smaller than `{current_version}`. Please use a version that is greater or equal to the current one.", + ) + .attach_context("current_version", config.version.to_string()) + .attach_context("new_version", version.to_string())); } + config.version = version; } if let Some(compression) = self.set_compression { if config.version == 1 && compression != 0 { - return Err(CommandErrorKind::NoCompressionV1Repo(compression).into()); + return Err(RusticError::new( + ErrorKind::Unsupported, + "Compression `{compression}` unsupported for v1 repos.", + ) + .attach_context("compression", compression.to_string())); } + let range = zstd::compression_level_range(); if !range.contains(&compression) { - return Err( - CommandErrorKind::CompressionLevelNotSupported(compression, range).into(), - ); + return Err(RusticError::new( + ErrorKind::Unsupported, + "Compression level `{compression}` is unsupported. Allowed levels are `{allowed_levels}`. Please use a supported level.", + ) + .attach_context("compression", compression.to_string()) + .attach_context("allowed_levels", format!("{range:?}"))); } config.compression = Some(compression); } @@ -230,7 +237,7 @@ impl ConfigOptions { config.treepack_size = Some( size.as_u64() .try_into() - .map_err(|_| CommandErrorKind::SizeTooLarge(size))?, + .map_err(|err| construct_size_too_large_error(err, size))?, ); } if let Some(factor) = self.set_treepack_growfactor { @@ -240,7 +247,7 @@ impl ConfigOptions { config.treepack_size_limit = Some( size.as_u64() .try_into() - .map_err(|_| CommandErrorKind::SizeTooLarge(size))?, + .map_err(|err| construct_size_too_large_error(err, size))?, ); } @@ -248,7 +255,7 @@ impl ConfigOptions { config.datapack_size = Some( size.as_u64() .try_into() - .map_err(|_| CommandErrorKind::SizeTooLarge(size))?, + .map_err(|err| construct_size_too_large_error(err, size))?, ); } if let Some(factor) = self.set_datapack_growfactor { @@ -258,20 +265,29 @@ impl ConfigOptions { config.datapack_size_limit = Some( size.as_u64() .try_into() - .map_err(|_| CommandErrorKind::SizeTooLarge(size))?, + .map_err(|err| construct_size_too_large_error(err, size))?, ); } if let Some(percent) = self.set_min_packsize_tolerate_percent { if percent > 100 { - return Err(CommandErrorKind::MinPackSizeTolerateWrong.into()); + return Err(RusticError::new( + ErrorKind::InvalidInput, + "`min_packsize_tolerate_percent` must be <= 100. You provided `{percent}`.", + ) + .attach_context("percent", percent.to_string())); } + config.min_packsize_tolerate_percent = Some(percent); } if let Some(percent) = self.set_max_packsize_tolerate_percent { if percent < 100 && percent > 0 { - return Err(CommandErrorKind::MaxPackSizeTolerateWrong.into()); + return Err(RusticError::new( + ErrorKind::InvalidInput, + "`max_packsize_tolerate_percent` must be >= 100 or 0. You provided `{percent}`.", + ) + .attach_context("percent", percent.to_string())); } config.max_packsize_tolerate_percent = Some(percent); } @@ -281,3 +297,15 @@ impl ConfigOptions { Ok(()) } } + +fn construct_size_too_large_error( + err: std::num::TryFromIntError, + size: ByteSize, +) -> Box { + RusticError::with_source( + ErrorKind::Internal, + "Failed to convert ByteSize `{size}` to u64. Size is too large.", + err, + ) + .attach_context("size", size.to_string()) +} diff --git a/crates/core/src/commands/dump.rs b/crates/core/src/commands/dump.rs index 503a998c..dac2f347 100644 --- a/crates/core/src/commands/dump.rs +++ b/crates/core/src/commands/dump.rs @@ -3,7 +3,7 @@ use std::io::Write; use crate::{ backend::node::{Node, NodeType}, blob::{BlobId, BlobType}, - error::{CommandErrorKind, RusticResult}, + error::{ErrorKind, RusticError, RusticResult}, repository::{IndexedFull, Repository}, }; @@ -22,21 +22,29 @@ use crate::{ /// /// # Errors /// -/// * [`CommandErrorKind::DumpNotSupported`] - If the node is not a file. -/// -/// [`CommandErrorKind::DumpNotSupported`]: crate::error::CommandErrorKind::DumpNotSupported +/// * If the node is not a file. pub(crate) fn dump( repo: &Repository, node: &Node, w: &mut impl Write, ) -> RusticResult<()> { if node.node_type != NodeType::File { - return Err(CommandErrorKind::DumpNotSupported(node.node_type.clone()).into()); + return Err(RusticError::new( + ErrorKind::Unsupported, + "Dump is not supported for non-file node types `{node_type}`. You could try to use `cat` instead.", + ) + .attach_context("node_type", node.node_type.to_string())); } for id in node.content.as_ref().unwrap() { let data = repo.get_blob_cached(&BlobId::from(**id), BlobType::Data)?; - w.write_all(&data)?; + w.write_all(&data).map_err(|err| { + RusticError::with_source( + ErrorKind::InputOutput, + "Failed to write data to writer.", + err, + ) + })?; } Ok(()) } diff --git a/crates/core/src/commands/forget.rs b/crates/core/src/commands/forget.rs index 0bfd4c42..a8b4c6f3 100644 --- a/crates/core/src/commands/forget.rs +++ b/crates/core/src/commands/forget.rs @@ -6,7 +6,7 @@ use serde_derive::{Deserialize, Serialize}; use serde_with::{serde_as, skip_serializing_none, DisplayFromStr}; use crate::{ - error::{CommandErrorKind, RusticResult}, + error::{ErrorKind, RusticError, RusticResult}, progress::ProgressBars, repofile::{ snapshotfile::{SnapshotGroup, SnapshotGroupCriterion, SnapshotId}, @@ -72,7 +72,7 @@ impl ForgetGroups { /// /// # Errors /// -/// If keep options are not valid +/// * If keep options are not valid /// /// # Returns /// @@ -511,7 +511,7 @@ impl KeepOptions { /// /// # Errors /// - /// If keep options are not valid + /// * If keep options are not valid /// /// # Returns /// @@ -523,7 +523,10 @@ impl KeepOptions { now: DateTime, ) -> RusticResult> { if !self.is_valid() { - return Err(CommandErrorKind::NoKeepOption.into()); + return Err(RusticError::new( + ErrorKind::InvalidInput, + "Invalid keep options specified, please make sure to specify at least one keep-* option.", + )); } let mut group_keep = self.clone(); diff --git a/crates/core/src/commands/init.rs b/crates/core/src/commands/init.rs index f932658c..46037348 100644 --- a/crates/core/src/commands/init.rs +++ b/crates/core/src/commands/init.rs @@ -10,7 +10,7 @@ use crate::{ key::{init_key, KeyOptions}, }, crypto::aespoly1305::Key, - error::{RusticErrorKind, RusticResult}, + error::RusticResult, id::Id, repofile::{configfile::RepositoryId, ConfigFile}, repository::Repository, @@ -32,13 +32,11 @@ use crate::{ /// /// # Errors /// -/// * [`PolynomialErrorKind::NoSuitablePolynomialFound`] - If no polynomial could be found in one million tries. +/// * If no polynomial could be found in one million tries. /// /// # Returns /// /// A tuple of the key and the config file. -/// -/// [`PolynomialErrorKind::NoSuitablePolynomialFound`]: crate::error::PolynomialErrorKind::NoSuitablePolynomialFound pub(crate) fn init( repo: &Repository, pass: &str, @@ -85,7 +83,7 @@ pub(crate) fn init_with_config( key_opts: &KeyOptions, config: &ConfigFile, ) -> RusticResult { - repo.be.create().map_err(RusticErrorKind::Backend)?; + repo.be.create()?; let (key, id) = init_key(repo, key_opts, pass)?; info!("key {id} successfully added."); save_config(repo, config.clone(), key)?; diff --git a/crates/core/src/commands/key.rs b/crates/core/src/commands/key.rs index af277665..76238b00 100644 --- a/crates/core/src/commands/key.rs +++ b/crates/core/src/commands/key.rs @@ -4,7 +4,7 @@ use derive_setters::Setters; use crate::{ backend::{decrypt::DecryptWriteBackend, FileType, WriteBackend}, crypto::{aespoly1305::Key, hasher::hash}, - error::{CommandErrorKind, RusticErrorKind, RusticResult}, + error::{ErrorKind, RusticError, RusticResult}, repofile::{KeyFile, KeyId}, repository::{Open, Repository}, }; @@ -43,13 +43,11 @@ pub struct KeyOptions { /// /// # Errors /// -/// * [`CommandErrorKind::FromJsonError`] - If the key could not be serialized +/// * If the key could not be serialized /// /// # Returns /// /// The id of the key. -/// -/// [`CommandErrorKind::FromJsonError`]: crate::error::CommandErrorKind::FromJsonError pub(crate) fn add_current_key_to_repo( repo: &Repository, opts: &KeyOptions, @@ -96,13 +94,11 @@ pub(crate) fn init_key( /// /// # Errors /// -/// * [`CommandErrorKind::FromJsonError`] - If the key could not be serialized. +/// * If the key could not be serialized. /// /// # Returns /// /// The id of the key. -/// -/// [`CommandErrorKind::FromJsonError`]: crate::error::CommandErrorKind::FromJsonError pub(crate) fn add_key_to_repo( repo: &Repository, opts: &KeyOptions, @@ -112,10 +108,18 @@ pub(crate) fn add_key_to_repo( let ko = opts.clone(); let keyfile = KeyFile::generate(key, &pass, ko.hostname, ko.username, ko.with_created)?; - let data = serde_json::to_vec(&keyfile).map_err(CommandErrorKind::FromJsonError)?; + let data = serde_json::to_vec(&keyfile).map_err(|err| { + RusticError::with_source( + ErrorKind::InputOutput, + "Failed to serialize keyfile to JSON.", + err, + ) + })?; + let id = KeyId::from(hash(&data)); + repo.be - .write_bytes(FileType::Key, &id, false, data.into()) - .map_err(RusticErrorKind::Backend)?; + .write_bytes(FileType::Key, &id, false, data.into())?; + Ok(id) } diff --git a/crates/core/src/commands/merge.rs b/crates/core/src/commands/merge.rs index 561411fb..10d15dd1 100644 --- a/crates/core/src/commands/merge.rs +++ b/crates/core/src/commands/merge.rs @@ -11,7 +11,7 @@ use crate::{ tree::{self, Tree, TreeId}, BlobId, BlobType, }, - error::{CommandErrorKind, RusticResult}, + error::{ErrorKind, RusticError, RusticResult}, index::{indexer::Indexer, ReadIndex}, progress::{Progress, ProgressBars}, repofile::{PathList, SnapshotFile, SnapshotSummary}, @@ -44,7 +44,14 @@ pub(crate) fn merge_snapshots( .collect::() .merge(); - snap.paths.set_paths(&paths.paths())?; + snap.paths.set_paths(&paths.paths()).map_err(|err| { + RusticError::with_source( + ErrorKind::Internal, + "Failed to set paths `{paths}` in snapshot.", + err, + ) + .attach_context("paths", paths.to_string()) + })?; // set snapshot time to time of latest snapshot to be merged snap.time = snapshots @@ -58,7 +65,9 @@ pub(crate) fn merge_snapshots( let trees: Vec = snapshots.iter().map(|sn| sn.tree).collect(); snap.tree = merge_trees(repo, &trees, cmp, &mut summary)?; - summary.finalize(now)?; + summary.finalize(now).map_err(|err| { + RusticError::with_source(ErrorKind::Internal, "Failed to finalize summary.", err) + })?; snap.summary = Some(summary); snap.id = repo.dbe().save_file(&snap)?.into(); @@ -81,13 +90,11 @@ pub(crate) fn merge_snapshots( /// /// # Errors /// -/// * [`CommandErrorKind::ConversionToU64Failed`] - If the size of the tree is too large +/// * If the size of the tree is too large /// /// # Returns /// /// The merged tree -/// -/// [`CommandErrorKind::ConversionToU64Failed`]: crate::error::CommandErrorKind::ConversionToU64Failed pub(crate) fn merge_trees( repo: &Repository, trees: &[TreeId], @@ -104,12 +111,25 @@ pub(crate) fn merge_trees( repo.config(), index.total_size(BlobType::Tree), )?; - let save = |tree: Tree| { - let (chunk, new_id) = tree.serialize()?; - let size = u64::try_from(chunk.len()).map_err(CommandErrorKind::ConversionFromIntFailed)?; + + let save = |tree: Tree| -> RusticResult<_> { + let (chunk, new_id) = tree.serialize().map_err(|err| { + RusticError::with_source(ErrorKind::Internal, "Failed to serialize tree.", err) + })?; + + let size = u64::try_from(chunk.len()).map_err(|err| { + RusticError::with_source( + ErrorKind::Internal, + "Failed to convert chunk length `{length}` to u64.", + err, + ) + .attach_context("length", chunk.len().to_string()) + })?; + if !index.has_tree(&new_id) { packer.add(chunk.into(), BlobId::from(*new_id))?; } + Ok((new_id, size)) }; diff --git a/crates/core/src/commands/prune.rs b/crates/core/src/commands/prune.rs index d2082f9c..d1225eb3 100644 --- a/crates/core/src/commands/prune.rs +++ b/crates/core/src/commands/prune.rs @@ -30,7 +30,7 @@ use crate::{ tree::TreeStreamerOnce, BlobId, BlobType, BlobTypeMap, Initialize, }, - error::{CommandErrorKind, RusticErrorKind, RusticResult}, + error::{ErrorKind, RusticError, RusticResult}, index::{ binarysorted::{IndexCollector, IndexType}, indexer::Indexer, @@ -90,7 +90,7 @@ pub struct PruneOptions { /// /// # Warning /// - /// Only use if you are sure the repository is not accessed by parallel processes! + /// * Only use if you are sure the repository is not accessed by parallel processes! #[cfg_attr(feature = "clap", clap(long))] pub instant_delete: bool, @@ -98,7 +98,7 @@ pub struct PruneOptions { /// /// # Warning /// - /// If prune aborts, this can lead to a (partly) missing index which must be repaired! + /// * If prune aborts, this can lead to a (partly) missing index which must be repaired! #[cfg_attr(feature = "clap", clap(long))] pub early_delete_index: bool, @@ -128,10 +128,9 @@ pub struct PruneOptions { /// /// # Warning /// - /// Use this option with care! - /// - /// If you specify snapshots which are not deleted, running the resulting `PrunePlan` - /// will remove data which is used within those snapshots! + /// * Use this option with care! + /// * If you specify snapshots which are not deleted, running the resulting `PrunePlan` + /// will remove data which is used within those snapshots! pub ignore_snaps: Vec, } @@ -168,11 +167,8 @@ impl PruneOptions { /// /// # Errors /// - /// * [`CommandErrorKind::RepackUncompressedRepoV1`] - If `repack_uncompressed` is set and the repository is a version 1 repository - /// * [`CommandErrorKind::FromOutOfRangeError`] - If `keep_pack` or `keep_delete` is out of range - /// - /// [`CommandErrorKind::RepackUncompressedRepoV1`]: crate::error::CommandErrorKind::RepackUncompressedRepoV1 - /// [`CommandErrorKind::FromOutOfRangeError`]: crate::error::CommandErrorKind::FromOutOfRangeError + /// * If `repack_uncompressed` is set and the repository is a version 1 repository + /// * If `keep_pack` or `keep_delete` is out of range #[deprecated( since = "0.5.2", note = "Use `PrunePlan::from_prune_options()` instead" @@ -198,16 +194,34 @@ pub enum LimitOption { } impl FromStr for LimitOption { - type Err = CommandErrorKind; + type Err = Box; fn from_str(s: &str) -> Result { Ok(match s.chars().last().unwrap_or('0') { '%' => Self::Percentage({ let mut copy = s.to_string(); _ = copy.pop(); - copy.parse()? + copy.parse().map_err(|err| { + RusticError::with_source( + ErrorKind::InvalidInput, + "Failed to parse percentage limit `{limit}`", + err, + ) + .attach_context("limit", s) + })? }), 'd' if s == "unlimited" => Self::Unlimited, - _ => Self::Size(ByteSize::from_str(s).map_err(CommandErrorKind::FromByteSizeParser)?), + _ => { + let byte_size = ByteSize::from_str(s).map_err(|err| { + RusticError::with_source( + ErrorKind::InvalidInput, + "Failed to parse size limit `{limit}`", + err, + ) + .attach_context("limit", s) + })?; + + Self::Size(byte_size) + } }) } } @@ -676,11 +690,8 @@ impl PrunePlan { /// /// # Errors /// - /// * [`CommandErrorKind::RepackUncompressedRepoV1`] - If `repack_uncompressed` is set and the repository is a version 1 repository - /// * [`CommandErrorKind::FromOutOfRangeError`] - If `keep_pack` or `keep_delete` is out of range - /// - /// [`CommandErrorKind::RepackUncompressedRepoV1`]: crate::error::CommandErrorKind::RepackUncompressedRepoV1 - /// [`CommandErrorKind::FromOutOfRangeError`]: crate::error::CommandErrorKind::FromOutOfRangeError + /// * If `repack_uncompressed` is set and the repository is a version 1 repository + /// * If `keep_pack` or `keep_delete` is out of range pub fn from_prune_options( repo: &Repository, opts: &PruneOptions, @@ -688,8 +699,14 @@ impl PrunePlan { let pb = &repo.pb; let be = repo.dbe(); - if repo.config().version < 2 && opts.repack_uncompressed { - return Err(CommandErrorKind::RepackUncompressedRepoV1.into()); + let version = repo.config().version; + + if version < 2 && opts.repack_uncompressed { + return Err(RusticError::new( + ErrorKind::Unsupported, + "Repacking uncompressed pack is unsupported in Repository version `{config_version}`.", + ) + .attach_context("config_version", version.to_string())); } let mut index_files = Vec::new(); @@ -718,8 +735,7 @@ impl PrunePlan { // list existing pack files let p = pb.progress_spinner("getting packs from repository..."); let existing_packs: BTreeMap<_, _> = be - .list_with_size(FileType::Pack) - .map_err(RusticErrorKind::Backend)? + .list_with_size(FileType::Pack)? .into_iter() .map(|(id, size)| (PackId::from(id), size)) .collect(); @@ -733,14 +749,30 @@ impl PrunePlan { .unwrap_or_else(|| repo.config().is_hot == Some(true)); let pack_sizer = total_size.map(|tpe, size| PackSizer::from_config(repo.config(), tpe, size)); + pruner.decide_packs( - Duration::from_std(*opts.keep_pack).map_err(CommandErrorKind::FromOutOfRangeError)?, - Duration::from_std(*opts.keep_delete).map_err(CommandErrorKind::FromOutOfRangeError)?, + Duration::from_std(*opts.keep_pack).map_err(|err| { + RusticError::with_source( + ErrorKind::Internal, + "Failed to convert keep_pack duration `{keep_pack}` to std::time::Duration.", + err, + ) + .attach_context("keep_pack", opts.keep_pack.to_string()) + })?, + Duration::from_std(*opts.keep_delete).map_err(|err| { + RusticError::with_source( + ErrorKind::Internal, + "Failed to convert keep_delete duration `{keep_delete}` to std::time::Duration.", + err, + ) + .attach_context("keep_delete", opts.keep_delete.to_string()) + })?, repack_cacheable_only, opts.repack_uncompressed, opts.repack_all, &pack_sizer, )?; + pruner.decide_repack( &opts.max_repack, &opts.max_unused, @@ -748,6 +780,7 @@ impl PrunePlan { opts.no_resize, &pack_sizer, ); + pruner.check_existing_packs()?; pruner.filter_index_files(opts.instant_delete); @@ -775,15 +808,19 @@ impl PrunePlan { /// /// # Errors /// - /// * [`CommandErrorKind::BlobsMissing`] - If a blob is missing - /// - /// [`CommandErrorKind::BlobsMissing`]: crate::error::CommandErrorKind::BlobsMissing + /// * If a blob is missing fn check(&self) -> RusticResult<()> { for (id, count) in &self.used_ids { if *count == 0 { - return Err(CommandErrorKind::BlobsMissing(*id).into()); + return Err(RusticError::new( + ErrorKind::Internal, + "Blob ID `{blob_id}` is missing in index files.", + ) + .attach_context("blob_id", id.to_string()) + .ask_report()); } } + Ok(()) } @@ -1035,13 +1072,9 @@ impl PrunePlan { /// /// # Errors /// - /// * [`CommandErrorKind::NoDecision`] - If a pack is undecided - /// * [`CommandErrorKind::PackSizeNotMatching`] - If the size of a pack does not match - /// * [`CommandErrorKind::PackNotExisting`] - If a pack does not exist - /// - /// [`CommandErrorKind::NoDecision`]: crate::error::CommandErrorKind::NoDecision - /// [`CommandErrorKind::PackSizeNotMatching`]: crate::error::CommandErrorKind::PackSizeNotMatching - /// [`CommandErrorKind::PackNotExisting`]: crate::error::CommandErrorKind::PackNotExisting + /// * If a pack is undecided + /// * If the size of a pack does not match + /// * If a pack does not exist fn check_existing_packs(&mut self) -> RusticResult<()> { for pack in self.index_files.iter().flat_map(|index| &index.packs) { let existing_size = self.existing_packs.remove(&pack.id); @@ -1050,15 +1083,29 @@ impl PrunePlan { let check_size = || { match existing_size { Some(size) if size == pack.size => Ok(()), // size is ok => continue - Some(size) => Err(CommandErrorKind::PackSizeNotMatching( - pack.id, pack.size, size, - )), - None => Err(CommandErrorKind::PackNotExisting(pack.id)), + Some(size) => Err(RusticError::new( + ErrorKind::Internal, + "Pack size `{size_in_pack_real}` of id `{pack_id}` does not match the expected size `{size_in_index_expected}` in the index file. ", + ) + .attach_context("pack_id", pack.id.to_string()) + .attach_context("size_in_index_expected", pack.size.to_string()) + .attach_context("size_in_pack_real", size.to_string()) + .ask_report()), + None => Err(RusticError::new(ErrorKind::Internal, "Pack `{pack_id}` does not exist.") + .attach_context("pack_id", pack.id.to_string()) + .ask_report()), } }; match pack.to_do { - PackToDo::Undecided => return Err(CommandErrorKind::NoDecision(pack.id).into()), + PackToDo::Undecided => { + return Err(RusticError::new( + ErrorKind::Internal, + "Pack `{pack_id}` got no decision what to do with it!", + ) + .attach_context("pack_id", pack.id.to_string()) + .ask_report()); + } PackToDo::Keep | PackToDo::Recover => { for blob in &pack.blobs { _ = self.used_ids.remove(&blob.id); @@ -1138,8 +1185,8 @@ impl PrunePlan { /// /// # Errors /// - /// * [`CommandErrorKind::NotAllowedWithAppendOnly`] - If the repository is in append-only mode - /// * [`CommandErrorKind::NoDecision`] - If a pack has no decision + /// * If the repository is in append-only mode + /// * If a pack has no decision /// /// # Returns /// @@ -1170,8 +1217,8 @@ impl PrunePlan { /// /// # Errors /// -/// * [`CommandErrorKind::NotAllowedWithAppendOnly`] - If the repository is in append-only mode -/// * [`CommandErrorKind::NoDecision`] - If a pack has no decision +/// * If the repository is in append-only mode +/// * If a pack has no decision /// /// # Returns /// @@ -1188,7 +1235,10 @@ pub(crate) fn prune_repository( prune_plan: PrunePlan, ) -> RusticResult<()> { if repo.config().append_only == Some(true) { - return Err(CommandErrorKind::NotAllowedWithAppendOnly("prune".to_string()).into()); + return Err(RusticError::new( + ErrorKind::AppendOnly, + "Pruning is not allowed in append-only repositories. Please disable append-only mode first, if you know what you are doing. Aborting.", + )); } repo.warm_up_wait(prune_plan.repack_packs().into_iter())?; let be = repo.dbe(); @@ -1301,7 +1351,14 @@ pub(crate) fn prune_repository( .into_par_iter() .try_for_each(|pack| -> RusticResult<_> { match pack.to_do { - PackToDo::Undecided => return Err(CommandErrorKind::NoDecision(pack.id).into()), + PackToDo::Undecided => { + return Err(RusticError::new( + ErrorKind::Internal, + "Pack `{pack_id}` got no decision what to do with it!", + ) + .attach_context("pack_id", pack.id.to_string()) + .ask_report()); + } PackToDo::Keep => { // keep pack: add to new index let pack = pack.into_index_pack(); @@ -1528,8 +1585,7 @@ fn find_used_blobs( let p = pb.progress_counter("reading snapshots..."); let list: Vec<_> = be - .list(FileType::Snapshot) - .map_err(RusticErrorKind::Backend)? + .list(FileType::Snapshot)? .into_iter() .filter(|id| !ignore_snaps.contains(&SnapshotId::from(*id))) .collect(); diff --git a/crates/core/src/commands/repair/index.rs b/crates/core/src/commands/repair/index.rs index f2e8dc56..4ff7dc2f 100644 --- a/crates/core/src/commands/repair/index.rs +++ b/crates/core/src/commands/repair/index.rs @@ -9,7 +9,7 @@ use crate::{ decrypt::{DecryptReadBackend, DecryptWriteBackend}, FileType, ReadBackend, WriteBackend, }, - error::{CommandErrorKind, RusticErrorKind, RusticResult}, + error::{ErrorKind, RusticError, RusticResult}, index::{binarysorted::IndexCollector, indexer::Indexer, GlobalIndex}, progress::{Progress, ProgressBars}, repofile::{packfile::PackId, IndexFile, IndexPack, PackHeader, PackHeaderRef}, @@ -44,7 +44,12 @@ pub(crate) fn repair_index( dry_run: bool, ) -> RusticResult<()> { if repo.config().append_only == Some(true) { - return Err(CommandErrorKind::NotAllowedWithAppendOnly("index repair".to_string()).into()); + return Err( + RusticError::new( + ErrorKind::AppendOnly, + "Repairing the index is not allowed in append-only repositories. Please disable append-only mode first, if you know what you are doing. Aborting.", + ) + ); } let be = repo.dbe(); @@ -60,8 +65,7 @@ pub(crate) fn repair_index( if !new_index.packs.is_empty() || !new_index.packs_to_delete.is_empty() { _ = be.save_file(&new_index)?; } - be.remove(FileType::Index, &index_id, true) - .map_err(RusticErrorKind::Backend)?; + be.remove(FileType::Index, &index_id, true)?; } (false, _) => {} // nothing to do } @@ -73,12 +77,15 @@ pub(crate) fn repair_index( let indexer = Indexer::new(be.clone()).into_shared(); let p = repo.pb.progress_counter("reading pack headers"); - p.set_length( - pack_read_header - .len() - .try_into() - .map_err(CommandErrorKind::ConversionFromIntFailed)?, - ); + + p.set_length(pack_read_header.len().try_into().map_err(|err| { + RusticError::with_source( + ErrorKind::Internal, + "Failed to convert `pack_read_header` length `{length}` to u64.", + err, + ) + .attach_context("length", pack_read_header.len().to_string()) + })?); for (id, size_hint, packsize) in pack_read_header { debug!("reading pack {id}..."); match PackHeader::from_file(be, id, size_hint, packsize) { @@ -115,8 +122,7 @@ impl PackChecker { let be = repo.dbe(); let p = repo.pb.progress_spinner("listing packs..."); let packs: HashMap<_, _> = be - .list_with_size(FileType::Pack) - .map_err(RusticErrorKind::Backend)? + .list_with_size(FileType::Pack)? .into_iter() .map(|(id, size)| (PackId::from(id), size)) .collect(); @@ -186,12 +192,16 @@ pub(crate) fn index_checked_from_collector( repo.warm_up_wait(pack_read_header.iter().map(|(id, _, _)| *id))?; let p = repo.pb.progress_counter("reading pack headers"); - p.set_length( - pack_read_header - .len() - .try_into() - .map_err(CommandErrorKind::ConversionFromIntFailed)?, - ); + + p.set_length(pack_read_header.len().try_into().map_err(|err| { + RusticError::with_source( + ErrorKind::Internal, + "Failed to convert `pack_read_header` length `{length}` to u64.", + err, + ) + .attach_context("length", pack_read_header.len().to_string()) + })?); + let index_packs: Vec<_> = pack_read_header .into_iter() .map(|(id, size_hint, packsize)| { diff --git a/crates/core/src/commands/repair/snapshots.rs b/crates/core/src/commands/repair/snapshots.rs index ebf6509c..ebc43c87 100644 --- a/crates/core/src/commands/repair/snapshots.rs +++ b/crates/core/src/commands/repair/snapshots.rs @@ -14,7 +14,7 @@ use crate::{ tree::{Tree, TreeId}, BlobId, BlobType, }, - error::{CommandErrorKind, RusticResult}, + error::{ErrorKind, RusticError, RusticResult}, index::{indexer::Indexer, ReadGlobalIndex, ReadIndex}, progress::ProgressBars, repofile::{snapshotfile::SnapshotId, SnapshotFile, StringList}, @@ -31,7 +31,7 @@ pub struct RepairSnapshotsOptions { /// /// # Warning /// - /// This can result in data loss! + /// * This can result in data loss! #[cfg_attr(feature = "clap", clap(long))] pub delete: bool, @@ -99,7 +99,10 @@ pub(crate) fn repair_snapshots( if opts.delete && config_file.append_only == Some(true) { return Err( - CommandErrorKind::NotAllowedWithAppendOnly("snapshot removal".to_string()).into(), + RusticError::new( + ErrorKind::AppendOnly, + "Removing snapshots is not allowed in append-only repositories. Please disable append-only mode first, if you know what you are doing. Aborting.", + ) ); } @@ -280,13 +283,19 @@ pub(crate) fn repair_tree( (Some(id), Changed::None) => Ok((Changed::None, id)), (_, c) => { // the tree has been changed => save it - let (chunk, new_id) = tree.serialize()?; + let (chunk, new_id) = tree.serialize().map_err(|err| { + RusticError::with_source(ErrorKind::Internal, "Failed to serialize tree.", err) + .ask_report() + })?; + if !index.has_tree(&new_id) && !dry_run { packer.add(chunk.into(), BlobId::from(*new_id))?; } + if let Some(id) = id { _ = state.replaced.insert(id, (c, new_id)); } + Ok((c, new_id)) } } diff --git a/crates/core/src/commands/repoinfo.rs b/crates/core/src/commands/repoinfo.rs index e105e305..6e5af8cf 100644 --- a/crates/core/src/commands/repoinfo.rs +++ b/crates/core/src/commands/repoinfo.rs @@ -4,7 +4,7 @@ use serde_with::skip_serializing_none; use crate::{ backend::{decrypt::DecryptReadBackend, FileType, ReadBackend, ALL_FILE_TYPES}, blob::{BlobType, BlobTypeMap}, - error::{RusticErrorKind, RusticResult}, + error::RusticResult, index::IndexEntry, progress::{Progress, ProgressBars}, repofile::indexfile::{IndexFile, IndexPack}, @@ -187,11 +187,11 @@ pub struct RepoFileInfo { /// /// # Errors /// -/// If files could not be listed. +/// * If files could not be listed. pub(crate) fn collect_file_info(be: &impl ReadBackend) -> RusticResult> { let mut files = Vec::with_capacity(ALL_FILE_TYPES.len()); for tpe in ALL_FILE_TYPES { - let list = be.list_with_size(tpe).map_err(RusticErrorKind::Backend)?; + let list = be.list_with_size(tpe)?; let count = list.len() as u64; let size = list.iter().map(|f| u64::from(f.1)).sum(); files.push(RepoFileInfo { tpe, count, size }); diff --git a/crates/core/src/commands/restore.rs b/crates/core/src/commands/restore.rs index 033f9785..6d8e029f 100644 --- a/crates/core/src/commands/restore.rs +++ b/crates/core/src/commands/restore.rs @@ -23,7 +23,7 @@ use crate::{ node::{Node, NodeType}, FileType, ReadBackend, }, - error::{CommandErrorKind, RusticResult}, + error::{ErrorKind, RusticError, RusticResult}, progress::{Progress, ProgressBars}, repofile::packfile::PackId, repository::{IndexedFull, IndexedTree, Open, Repository}, @@ -51,7 +51,7 @@ pub struct RestoreOptions { /// /// # Warning /// - /// Use with care, maybe first try this with --dry-run? + /// * Use with care, maybe first try this with `--dry-run`? #[cfg_attr(feature = "clap", clap(long))] pub delete: bool, @@ -111,7 +111,7 @@ pub struct RestoreStats { /// /// # Errors /// -/// If the restore failed. +/// * If the restore failed. pub(crate) fn restore_repository( file_infos: RestorePlan, repo: &Repository, @@ -145,11 +145,8 @@ pub(crate) fn restore_repository( /// /// # Errors /// -/// * [`CommandErrorKind::ErrorCreating`] - If a directory could not be created. -/// * [`CommandErrorKind::ErrorCollecting`] - If the restore information could not be collected. -/// -/// [`CommandErrorKind::ErrorCreating`]: crate::error::CommandErrorKind::ErrorCreating -/// [`CommandErrorKind::ErrorCollecting`]: crate::error::CommandErrorKind::ErrorCollecting +/// * If a directory could not be created. +/// * If the restore information could not be collected. #[allow(clippy::too_many_lines)] pub(crate) fn collect_and_prepare( repo: &Repository, @@ -222,9 +219,15 @@ pub(crate) fn collect_and_prepare( stats.dirs.restore += 1; debug!("to restore: {path:?}"); if !dry_run { - dest.create_dir(path).map_err(|err| { - CommandErrorKind::ErrorCreating(path.clone(), Box::new(err)) - })?; + dest.create_dir(path) + .map_err(|err| { + RusticError::with_source( + ErrorKind::InputOutput, + "Failed to create the directory `{path}`. Please check the path and try again.", + err + ) + .attach_context("path", path.display().to_string()) + })?; } } } @@ -232,11 +235,7 @@ pub(crate) fn collect_and_prepare( // collect blobs needed for restoring match ( exists, - restore_infos - .add_file(dest, node, path.clone(), repo, opts.verify_existing) - .map_err(|err| { - CommandErrorKind::ErrorCollecting(path.clone(), Box::new(err)) - })?, + restore_infos.add_file(dest, node, path.clone(), repo, opts.verify_existing)?, ) { // Note that exists = false and Existing or Verified can happen if the file is changed between scanning the dir // and calling add_file. So we don't care about exists but trust add_file here. @@ -271,7 +270,13 @@ pub(crate) fn collect_and_prepare( .ignore(false) .sort_by_file_path(Path::cmp) .build() - .filter_map(Result::ok); // TODO: print out the ignored error + .inspect(|r| { + if let Err(err) = r { + error!("Error during collection of files: {err:?}"); + } + }) + .filter_map(Result::ok); + let mut next_dst = dst_iter.next(); let mut next_node = node_streamer.next().transpose()?; @@ -336,7 +341,7 @@ pub(crate) fn collect_and_prepare( /// /// # Errors /// -/// If the restore failed. +/// * If the restore failed. fn restore_metadata( mut node_streamer: impl Iterator>, opts: RestoreOptions, @@ -424,11 +429,8 @@ pub(crate) fn set_metadata( /// /// # Errors /// -/// * [`CommandErrorKind::ErrorSettingLength`] - If the length of a file could not be set. -/// * [`CommandErrorKind::FromRayonError`] - If the restore failed. -/// -/// [`CommandErrorKind::ErrorSettingLength`]: crate::error::CommandErrorKind::ErrorSettingLength -/// [`CommandErrorKind::FromRayonError`]: crate::error::CommandErrorKind::FromRayonError +/// * If the length of a file could not be set. +/// * If the restore failed. #[allow(clippy::too_many_lines)] fn restore_contents( repo: &Repository, @@ -449,8 +451,14 @@ fn restore_contents( for (i, size) in file_lengths.iter().enumerate() { if *size == 0 { let path = &filenames[i]; - dest.set_length(path, *size) - .map_err(|err| CommandErrorKind::ErrorSettingLength(path.clone(), Box::new(err)))?; + dest.set_length(path, *size).map_err(|err| { + RusticError::with_source( + ErrorKind::InputOutput, + "Failed to set the length of the file `{path}`. Please check the path and try again.", + err, + ) + .attach_context("path", path.display().to_string()) + })?; } } @@ -491,10 +499,20 @@ fn restore_contents( }) .collect(); + let threads = constants::MAX_READER_THREADS_NUM; + let pool = ThreadPoolBuilder::new() - .num_threads(constants::MAX_READER_THREADS_NUM) + .num_threads(threads) .build() - .map_err(CommandErrorKind::FromRayonError)?; + .map_err(|err| { + RusticError::with_source( + ErrorKind::Internal, + "Failed to create the thread pool with `{num_threads}` threads. Please try again.", + err, + ) + .attach_context("num_threads", threads.to_string()) + })?; + pool.in_place_scope(|s| { for (pack, offset, length, from_file, name_dests) in blobs { let p = &p; @@ -537,14 +555,7 @@ fn restore_contents( let mut sizes_guard = sizes.lock().unwrap(); let filesize = sizes_guard[file_idx]; if filesize > 0 { - dest.set_length(path, filesize) - .map_err(|err| { - CommandErrorKind::ErrorSettingLength( - path.clone(), - Box::new(err), - ) - }) - .unwrap(); + dest.set_length(path, filesize).unwrap(); sizes_guard[file_idx] = 0; } drop(sizes_guard); @@ -648,7 +659,7 @@ impl RestorePlan { /// /// # Errors /// - /// If the file could not be added. + /// * If the file could not be added. fn add_file( &mut self, dest: &LocalDestination, @@ -664,7 +675,15 @@ impl RestorePlan { if let Some(meta) = open_file .as_ref() .map(std::fs::File::metadata) - .transpose()? + .transpose() + .map_err(|err| + RusticError::with_source( + ErrorKind::InputOutput, + "Failed to get the metadata of the file `{path}`. Please check the path and try again.", + err + ) + .attach_context("path", name.display().to_string()) + )? { if meta.len() == 0 { // Empty file exists @@ -677,9 +696,17 @@ impl RestorePlan { if let Some(meta) = open_file .as_ref() .map(std::fs::File::metadata) - .transpose()? + .transpose() + .map_err(|err| + RusticError::with_source( + ErrorKind::InputOutput, + "Failed to get the metadata of the file `{path}`. Please check the path and try again.", + err + ) + .attach_context("path", name.display().to_string()) + )? { - // TODO: This is the same logic as in backend/ignore.rs => consollidate! + // TODO: This is the same logic as in backend/ignore.rs => consolidate! let mtime = meta .modified() .ok() @@ -706,8 +733,14 @@ impl RestorePlan { }; let length = bl.data_length(); - let usize_length = - usize::try_from(length).map_err(CommandErrorKind::ConversionFromIntFailed)?; + let usize_length = usize::try_from(length).map_err(|err| { + RusticError::with_source( + ErrorKind::Internal, + "Failed to convert the length `{length}` to usize. Please try again.", + err, + ) + .attach_context("length", length.to_string()) + })?; let matches = open_file .as_mut() diff --git a/crates/core/src/crypto/aespoly1305.rs b/crates/core/src/crypto/aespoly1305.rs index c068f9b1..4955f721 100644 --- a/crates/core/src/crypto/aespoly1305.rs +++ b/crates/core/src/crypto/aespoly1305.rs @@ -4,7 +4,10 @@ use aes256ctr_poly1305aes::{ }; use rand::{thread_rng, RngCore}; -use crate::{crypto::CryptoKey, error::CryptoErrorKind, error::RusticResult}; +use crate::{ + crypto::CryptoKey, + error::{ErrorKind, RusticError, RusticResult}, +}; pub(crate) type Nonce = aead::Nonce; pub(crate) type AeadKey = aes256ctr_poly1305aes::Key; @@ -81,16 +84,27 @@ impl CryptoKey for Key { /// /// # Errors /// - /// If the MAC couldn't be checked. + /// * If the MAC couldn't be checked. fn decrypt_data(&self, data: &[u8]) -> RusticResult> { if data.len() < 16 { - return Err(CryptoErrorKind::CryptoKeyTooShort)?; + return Err(RusticError::new( + ErrorKind::Cryptography, + "Data is too short (less than 16 bytes), cannot decrypt.", + ))?; } let nonce = Nonce::from_slice(&data[0..16]); Aes256CtrPoly1305Aes::new(&self.0) .decrypt(nonce, &data[16..]) - .map_err(|err| CryptoErrorKind::DataDecryptionFailed(err).into()) + .map_err(|err| { + RusticError::with_source( + ErrorKind::Cryptography, + "Data decryption failed, MAC check failed.", + err, + ) + .attach_context("nonce", format!("{nonce:?}")) + .attach_error_code("C001") + }) } /// Returns the encrypted+MACed data from the given data. @@ -101,7 +115,7 @@ impl CryptoKey for Key { /// /// # Errors /// - /// If the data could not be encrypted. + /// * If the data could not be encrypted. fn encrypt_data(&self, data: &[u8]) -> RusticResult> { let mut nonce = Nonce::default(); thread_rng().fill_bytes(&mut nonce); @@ -111,7 +125,10 @@ impl CryptoKey for Key { res.extend_from_slice(data); let tag = Aes256CtrPoly1305Aes::new(&self.0) .encrypt_in_place_detached(&nonce, &[], &mut res[16..]) - .map_err(CryptoErrorKind::DataEncryptionFailed)?; + .map_err(|err| { + RusticError::with_source(ErrorKind::Cryptography, "Data encryption failed.", err) + .attach_context("nonce", format!("{nonce:?}")) + })?; res.extend_from_slice(&tag); Ok(res) } diff --git a/crates/core/src/error.rs b/crates/core/src/error.rs index 025a4091..5d98035f 100644 --- a/crates/core/src/error.rs +++ b/crates/core/src/error.rs @@ -1,810 +1,521 @@ //! Error types and Result module. +//! +//! ## Error handling rules +//! +//! ### Visibility +//! +//! All `pub fn` (associated) functions need to return a `Result (==RusticResult)`, if they are fallible. +//! As they are user facing and will cross the API boundary we need to make sure they are high-quality errors containing all +//! needed information and actionable guidance. +//! +//! `pub(crate) fn` visibility should use a local error and thus a Result and error type limited in visibility, e.g. +//! `pub(crate) type ArchiverResult = Result`. +//! +//! ### Downgrading and Forwarding +//! +//! `RusticError`s should **not** be downgraded, instead we **upgrade** the function signature to contain a `RusticResult`. +//! For instance, if a function returns `Result` and we discover an error path that contains a `RusticError`, +//! we don't need to convert that into an `ArchiverErrorKind`, we should change the function signature, so it returns either a +//! `Result (==RusticResult)` or nested results like `RusticResult>`. +//! So even if the visibility of that function is `fn` or `pub(crate) fn` it should return a `RusticResult` containing a `RusticError`. +//! +//! If we `map_err` or `and_then` a `RusticError`, we don't want to create a new RusticError from it, but just attach some context +//! to it, e.g. `map_err(|e| e.attach_context("key", "value"))`, so we don't lose the original error. We can also change the error +//! kind with `map_err(|e| e.overwrite_kind(ErrorKind::NewKind))`. If we want to pre- or append to the guidance, we can use +//! `map_err(|e| e.append_guidance_line("new line"))` or `map_err(|e| e.prepend_guidance_line("new line"))`. +//! +//! ### Conversion and Nested Results +//! +//! Converting between different error kinds or their variants e.g. `TreeErrorKind::Channel` -> `ArchiverErrorKind::Channel` +//! should seldom happen (probably never?), as the caller is most likely not setup to handle such errors from a different layer, +//! so at this point, we should return either a `RusticError` indicating this is a hard error. Or use a nested Result, e.g. +//! `Result, RusticError>`. +//! +//! Local error types in `pub fn` (associated) functions need to be manually converted into a `RusticError` with a good error message +//! and other important information, e.g. actionable guidance for the user. +//! +//! ### Backend traits +//! +//! By using `RusticResult` in our `Backend` traits, we also make sure, we get back presentable errors for our users. +//! We had them before as type erased errors, that we just bubbled up. Now we can provide more context and guidance. +//! +//! ### Traits +//! +//! All traits and implementations of (foreign) traits should use `RusticResult` as return type or `Box` as `Self::Err`. +//! +//! ### Display and Debug +//! +//! All types that we want to attach to an error should implement `Display` and `Debug` to provide a good error message and a nice way +//! to display the error. // FIXME: Remove when 'displaydoc' has fixed/recommended further treatment upstream: https://github.com/yaahc/displaydoc/issues/48 #![allow(clippy::doc_markdown)] // use std::error::Error as StdError; // use std::fmt; +use derive_more::derive::Display; +use ecow::{EcoString, EcoVec}; use std::{ - error::Error, - ffi::OsString, - num::{ParseFloatError, ParseIntError, TryFromIntError}, - ops::RangeInclusive, - path::{PathBuf, StripPrefixError}, - process::ExitStatus, - str::Utf8Error, + backtrace::{Backtrace, BacktraceStatus}, + convert::Into, + fmt::{self, Display}, }; -#[cfg(not(windows))] -use nix::errno::Errno; +pub(crate) mod constants { + pub const DEFAULT_DOCS_URL: &str = "https://rustic.cli.rs/docs/errors/"; + pub const DEFAULT_ISSUE_URL: &str = "https://github.com/rustic-rs/rustic_core/issues/new"; +} -use aes256ctr_poly1305aes::aead; -use chrono::OutOfRangeError; -use displaydoc::Display; -use thiserror::Error; +/// Result type that is being returned from methods that can fail and thus have [`RusticError`]s. +pub type RusticResult> = Result; -use crate::{ - backend::node::NodeType, - blob::{tree::TreeId, BlobId}, - repofile::{indexfile::IndexPack, packfile::PackId}, -}; +/// Severity of an error, ranging from informational to fatal. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Display)] +pub enum Severity { + /// Informational + Info, -/// Result type that is being returned from methods that can fail and thus have [`RusticError`]s. -pub type RusticResult = Result; + /// Warning + Warning, -// [`Error`] is public, but opaque and easy to keep compatible. -#[derive(Error, Debug)] -#[error(transparent)] -/// Errors that can result from rustic. -pub struct RusticError(#[from] pub(crate) RusticErrorKind); + /// Error + Error, -// Accessors for anything we do want to expose publicly. -impl RusticError { - /// Expose the inner error kind. - /// - /// This is useful for matching on the error kind. - pub fn into_inner(self) -> RusticErrorKind { - self.0 - } + /// Fatal + Fatal, +} - /// Checks if the error is due to an incorrect password - pub fn is_incorrect_password(&self) -> bool { - matches!( - self.0, - RusticErrorKind::Repository(RepositoryErrorKind::IncorrectPassword) - ) - } +/// Status of an error, indicating whether it is permanent, temporary, or persistent. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Display)] +pub enum Status { + /// Permanent, may not be retried + Permanent, - /// Get the corresponding backend error, if error is caused by the backend. - /// - /// Returns `anyhow::Error`; you need to cast this to the real backend error type - pub fn backend_error(&self) -> Option<&anyhow::Error> { - if let RusticErrorKind::Backend(error) = &self.0 { - Some(error) - } else { - None - } - } + /// Temporary, may be retried + Temporary, + + /// Persistent, may be retried, but may not succeed + Persistent, } -/// [`RusticErrorKind`] describes the errors that can happen while executing a high-level command. +// NOTE: +// +// we use `an error related to {kind}` in the Display impl, so the variant display comments +// should be able to be used in a sentence. +// +/// [`ErrorKind`] describes the errors that can happen while executing a high-level command. /// /// This is a non-exhaustive enum, so additional variants may be added in future. It is /// recommended to match against the wildcard `_` instead of listing all possible variants, /// to avoid problems when new variants are added. #[non_exhaustive] -#[derive(Error, Debug)] -pub enum RusticErrorKind { - /// [`CommandErrorKind`] describes the errors that can happen while executing a high-level command - #[error(transparent)] - Command(#[from] CommandErrorKind), - - /// [`CryptoErrorKind`] describes the errors that can happen while dealing with Cryptographic functions - #[error(transparent)] - Crypto(#[from] CryptoErrorKind), - - /// [`PolynomialErrorKind`] describes the errors that can happen while dealing with Polynomials - #[error(transparent)] - Polynomial(#[from] PolynomialErrorKind), - - /// [`IdErrorKind`] describes the errors that can be returned by processing IDs - #[error(transparent)] - Id(#[from] IdErrorKind), - - /// [`RepositoryErrorKind`] describes the errors that can be returned by processing Repositories - #[error(transparent)] - Repository(#[from] RepositoryErrorKind), - - /// [`IndexErrorKind`] describes the errors that can be returned by processing Indizes - #[error(transparent)] - Index(#[from] IndexErrorKind), - - /// describes the errors that can be returned by the various Backends - #[error(transparent)] - Backend(#[from] anyhow::Error), - - /// [`BackendAccessErrorKind`] describes the errors that can be returned by accessing the various Backends - #[error(transparent)] - BackendAccess(#[from] BackendAccessErrorKind), - - /// [`ConfigFileErrorKind`] describes the errors that can be returned for `ConfigFile`s - #[error(transparent)] - ConfigFile(#[from] ConfigFileErrorKind), - - /// [`KeyFileErrorKind`] describes the errors that can be returned for `KeyFile`s - #[error(transparent)] - KeyFile(#[from] KeyFileErrorKind), - - /// [`PackFileErrorKind`] describes the errors that can be returned for `PackFile`s - #[error(transparent)] - PackFile(#[from] PackFileErrorKind), - - /// [`SnapshotFileErrorKind`] describes the errors that can be returned for `SnapshotFile`s - #[error(transparent)] - SnapshotFile(#[from] SnapshotFileErrorKind), - - /// [`PackerErrorKind`] describes the errors that can be returned for a Packer - #[error(transparent)] - Packer(#[from] PackerErrorKind), - - /// [`FileErrorKind`] describes the errors that can happen while dealing with files during restore/backups - #[error(transparent)] - File(#[from] FileErrorKind), - - /// [`TreeErrorKind`] describes the errors that can come up dealing with Trees - #[error(transparent)] - Tree(#[from] TreeErrorKind), - - /// [`CacheBackendErrorKind`] describes the errors that can be returned by a Caching action in Backends - #[error(transparent)] - CacheBackend(#[from] CacheBackendErrorKind), - - /// [`CryptBackendErrorKind`] describes the errors that can be returned by a Decryption action in Backends - #[error(transparent)] - CryptBackend(#[from] CryptBackendErrorKind), - - /// [`IgnoreErrorKind`] describes the errors that can be returned by a Ignore action in Backends - #[error(transparent)] - Ignore(#[from] IgnoreErrorKind), - - /// [`LocalDestinationErrorKind`] describes the errors that can be returned by an action on the local filesystem as Destination - #[error(transparent)] - LocalDestination(#[from] LocalDestinationErrorKind), - - /// [`NodeErrorKind`] describes the errors that can be returned by an action utilizing a node in Backends - #[error(transparent)] - Node(#[from] NodeErrorKind), - - /// [`StdInErrorKind`] describes the errors that can be returned while dealing IO from CLI - #[error(transparent)] - StdIn(#[from] StdInErrorKind), - - /// [`ArchiverErrorKind`] describes the errors that can be returned from the archiver - #[error(transparent)] - ArchiverError(#[from] ArchiverErrorKind), - - /// [`VfsErrorKind`] describes the errors that can be returned from the Virtual File System - #[error(transparent)] - VfsError(#[from] VfsErrorKind), - - /// [`std::io::Error`] - #[error(transparent)] - StdIo(#[from] std::io::Error), +#[derive(thiserror::Error, Debug, displaydoc::Display, Default, Clone, Copy, PartialEq, Eq)] +pub enum ErrorKind { + /// append-only mode + AppendOnly, + /// the backend + Backend, + /// the configuration + Configuration, + /// cryptographic operations + Cryptography, + /// running an external command + ExternalCommand, + /// internal operations + // Blob, Pack, Index, Tree Errors + // Compression, Parsing, Multithreading etc. + // These are deep errors that are not expected to be handled by the user. + Internal, + /// invalid user input + InvalidInput, + /// input/output operations + InputOutput, + /// a key + Key, + /// missing user input + MissingInput, + /// general operations + #[default] + Other, + /// password handling + Password, + /// the repository + Repository, + /// unsupported operations + Unsupported, + /// verification + Verification, + /// the virtual filesystem + Vfs, } -/// [`CommandErrorKind`] describes the errors that can happen while executing a high-level command -#[derive(Error, Debug, Display)] -pub enum CommandErrorKind { - /// path is no dir: `{0}` - PathIsNoDir(String), - /// used blobs are missing: blob `{0}` doesn't existing - BlobsMissing(BlobId), - /// used pack `{0}`: size does not match! Expected size: `{1}`, real size: `{2}` - PackSizeNotMatching(PackId, u32, u32), - /// used pack `{0}` does not exist! - PackNotExisting(PackId), - /// pack `{0}` got no decision what to do - NoDecision(PackId), - /// `{0}` - #[error(transparent)] - FromParseFloatError(#[from] ParseFloatError), - /// `{0}` - #[error(transparent)] - FromParseIntError(#[from] ParseIntError), - /// Bytesize parser failed: `{0}` - FromByteSizeParser(String), - /// --repack-uncompressed makes no sense for v1 repo! - RepackUncompressedRepoV1, - /// datetime out of range: `{0}` - FromOutOfRangeError(#[from] OutOfRangeError), - /// node type `{0:?}` not supported by dump - DumpNotSupported(NodeType), - /// `{0}` - #[error(transparent)] - FromJsonError(#[from] serde_json::Error), - /// version `{0}` is not supported. Allowed values: {1:?} - VersionNotSupported(u32, RangeInclusive), - /// cannot downgrade version from `{0}` to `{1}` - CannotDowngrade(u32, u32), - /// compression level `{0}` is not supported for repo v1 - NoCompressionV1Repo(i32), - /// compression level `{0}` is not supported. Allowed values: `{1:?}` - CompressionLevelNotSupported(i32, RangeInclusive), - /// Size is too large: `{0}` - SizeTooLarge(bytesize::ByteSize), - /// min_packsize_tolerate_percent must be <= 100 - MinPackSizeTolerateWrong, - /// max_packsize_tolerate_percent must be >= 100 or 0" - MaxPackSizeTolerateWrong, - /// error creating `{0:?}`: `{1:?}` - ErrorCreating(PathBuf, Box), - /// error collecting information for `{0:?}`: `{1:?}` - ErrorCollecting(PathBuf, Box), - /// error setting length for `{0:?}`: `{1:?}` - ErrorSettingLength(PathBuf, Box), - /// `{0}` - #[error(transparent)] - FromRayonError(#[from] rayon::ThreadPoolBuildError), - /// Conversion from integer failed: `{0:?}` - ConversionFromIntFailed(TryFromIntError), - /// Not allowed on an append-only repository: `{0}` - NotAllowedWithAppendOnly(String), - /// Specify one of the keep-* options for forget! Please use keep-none to keep no snapshot. - NoKeepOption, - /// `{0}` - #[error(transparent)] - FromParseError(#[from] shell_words::ParseError), -} +#[derive(thiserror::Error, Debug)] +#[non_exhaustive] +/// Errors that can result from rustic. +pub struct RusticError { + /// The kind of the error. + kind: ErrorKind, -/// [`CryptoErrorKind`] describes the errors that can happen while dealing with Cryptographic functions -#[derive(Error, Debug, Display, Copy, Clone)] -pub enum CryptoErrorKind { - /// data decryption failed: `{0:?}` - DataDecryptionFailed(aead::Error), - /// data encryption failed: `{0:?}` - DataEncryptionFailed(aead::Error), - /// crypto key too short - CryptoKeyTooShort, -} + /// The error message with guidance. + guidance: EcoString, -/// [`PolynomialErrorKind`] describes the errors that can happen while dealing with Polynomials -#[derive(Error, Debug, Display, Copy, Clone)] -pub enum PolynomialErrorKind { - /// no suitable polynomial found - NoSuitablePolynomialFound, -} + /// The URL of the documentation for the error. + docs_url: Option, -/// [`FileErrorKind`] describes the errors that can happen while dealing with files during restore/backups -#[derive(Error, Debug, Display)] -pub enum FileErrorKind { - /// transposing an Option of a Result into a Result of an Option failed: `{0:?}` - TransposingOptionResultFailed(std::io::Error), - /// conversion from `u64` to `usize` failed: `{0:?}` - ConversionFromU64ToUsizeFailed(TryFromIntError), -} + /// Error code. + error_code: Option, -/// [`IdErrorKind`] describes the errors that can be returned by processing IDs -#[derive(Error, Debug, Display, Copy, Clone)] -pub enum IdErrorKind { - /// Hex decoding error: `{0:?}` - HexError(hex::FromHexError), -} + /// Whether to ask the user to report the error. + ask_report: bool, -/// [`RepositoryErrorKind`] describes the errors that can be returned by processing Repositories -#[derive(Error, Debug, Display)] -pub enum RepositoryErrorKind { - /// No repository given. Please use the --repository option. - NoRepositoryGiven, - /// No password given. Please use one of the --password-* options. - NoPasswordGiven, - /// warm-up command must contain %id! - NoIDSpecified, - /// error opening password file `{0:?}` - OpeningPasswordFileFailed(std::io::Error), - /// No repository config file found. Is there a repo at `{0}`? - NoRepositoryConfigFound(String), - /// More than one repository config file at `{0}`. Aborting. - MoreThanOneRepositoryConfig(String), - /// keys from repo and repo-hot do not match for `{0}`. Aborting. - KeysDontMatchForRepositories(String), - /// repository is a hot repository!\nPlease use as --repo-hot in combination with the normal repo. Aborting. - HotRepositoryFlagMissing, - /// repo-hot is not a hot repository! Aborting. - IsNotHotRepository, - /// incorrect password! - IncorrectPassword, - /// error running the password command - PasswordCommandExecutionFailed, - /// error reading password from command - ReadingPasswordFromCommandFailed, - /// running command `{0}`:`{1}` was not successful: `{2}` - CommandExecutionFailed(String, String, std::io::Error), - /// running command {0}:{1} returned status: `{2}` - CommandErrorStatus(String, String, ExitStatus), - /// error listing the repo config file - ListingRepositoryConfigFileFailed, - /// error listing the repo keys - ListingRepositoryKeysFailed, - /// error listing the hot repo keys - ListingHotRepositoryKeysFailed, - /// error accessing config file - AccessToConfigFileFailed, - /// Thread pool build error: `{0:?}` - FromThreadPoolbilderError(rayon::ThreadPoolBuildError), - /// reading Password failed: `{0:?}` - ReadingPasswordFromReaderFailed(std::io::Error), - /// reading Password from prompt failed: `{0:?}` - ReadingPasswordFromPromptFailed(std::io::Error), - /// Config file already exists. Aborting. - ConfigFileExists, - /// did not find id `{0}` in index - IdNotFound(BlobId), - /// no suitable backend type found - NoBackendTypeGiven, -} + /// The URL of an already existing issue that is related to this error. + existing_issue_urls: EcoVec, -/// [`IndexErrorKind`] describes the errors that can be returned by processing Indizes -#[derive(Error, Debug, Display, Clone, Copy)] -pub enum IndexErrorKind { - /// blob not found in index - BlobInIndexNotFound, - /// failed to get a blob from the backend - GettingBlobIndexEntryFromBackendFailed, - /// saving IndexFile failed - SavingIndexFileFailed, -} + /// The URL of the issue tracker for opening a new issue. + new_issue_url: Option, -/// [`BackendAccessErrorKind`] describes the errors that can be returned by the various Backends -#[derive(Error, Debug, Display)] -pub enum BackendAccessErrorKind { - /// backend `{0:?}` is not supported! - BackendNotSupported(String), - /// backend `{0}` cannot be loaded: {1:?} - BackendLoadError(String, anyhow::Error), - /// no suitable id found for `{0}` - NoSuitableIdFound(String), - /// id `{0}` is not unique - IdNotUnique(String), - /// `{0}` - #[error(transparent)] - FromIoError(#[from] std::io::Error), - /// `{0}` - #[error(transparent)] - FromTryIntError(#[from] TryFromIntError), - /// `{0}` - #[error(transparent)] - FromLocalError(#[from] LocalDestinationErrorKind), - /// `{0}` - #[error(transparent)] - FromIdError(#[from] IdErrorKind), - /// `{0}` - #[error(transparent)] - FromIgnoreError(#[from] IgnoreErrorKind), - /// `{0}` - #[error(transparent)] - FromBackendDecryptionError(#[from] CryptBackendErrorKind), - /// `{0}` - #[error(transparent)] - GenericError(#[from] ignore::Error), - /// creating data in backend failed - CreatingDataOnBackendFailed, - /// writing bytes to backend failed - WritingBytesToBackendFailed, - /// removing data from backend failed - RemovingDataFromBackendFailed, - /// failed to list files on Backend - ListingFilesOnBackendFailed, - /// Path is not allowed: `{0:?}` - PathNotAllowed(PathBuf), -} + /// The context of the error. + context: EcoVec<(EcoString, EcoString)>, -/// [`ConfigFileErrorKind`] describes the errors that can be returned for `ConfigFile`s -#[derive(Error, Debug, Display)] -pub enum ConfigFileErrorKind { - /// config version not supported! - ConfigVersionNotSupported, - /// Parsing Polynomial in config failed: `{0:?}` - ParsingFailedForPolynomial(#[from] ParseIntError), -} + /// Chain to the cause of the error. + source: Option>, -/// [`KeyFileErrorKind`] describes the errors that can be returned for `KeyFile`s -#[derive(Error, Debug, Display)] -pub enum KeyFileErrorKind { - /// no suitable key found! - NoSuitableKeyFound, - /// listing KeyFiles failed - ListingKeyFilesFailed, - /// couldn't get KeyFile from backend - CouldNotGetKeyFileFromBackend, - /// serde_json couldn't deserialize the data: `{0:?}` - DeserializingFromSliceFailed(serde_json::Error), - /// couldn't encrypt data: `{0:?}` - CouldNotEncryptData(#[from] CryptoErrorKind), - /// serde_json couldn't serialize the data into a JSON byte vector: `{0:?}` - CouldNotSerializeAsJsonByteVector(serde_json::Error), - /// conversion from `u32` to `u8` failed: `{0:?}` - ConversionFromU32ToU8Failed(TryFromIntError), - /// output length is invalid: `{0:?}` - OutputLengthInvalid(scrypt::errors::InvalidOutputLen), - /// invalid scrypt parameters: `{0:?}` - InvalidSCryptParameters(scrypt::errors::InvalidParams), -} + /// Severity of the error. + severity: Option, -/// [`PackFileErrorKind`] describes the errors that can be returned for `PackFile`s -#[derive(Error, Debug, Display)] -pub enum PackFileErrorKind { - /// Failed reading binary representation of the pack header: `{0:?}` - ReadingBinaryRepresentationFailed(binrw::Error), - /// Failed writing binary representation of the pack header: `{0:?}` - WritingBinaryRepresentationFailed(binrw::Error), - /// Read header length is too large! Length: `{size_real}`, file size: `{pack_size}` - HeaderLengthTooLarge { size_real: u32, pack_size: u32 }, - /// Read header length doesn't match header contents! Length: `{size_real}`, computed: `{size_computed}` - HeaderLengthDoesNotMatchHeaderContents { size_real: u32, size_computed: u32 }, - /// pack size computed from header doesn't match real pack isch! Computed: `{size_computed}`, real: `{size_real}` - HeaderPackSizeComputedDoesNotMatchRealPackFile { size_real: u32, size_computed: u32 }, - /// partially reading the pack header from packfile failed: `{0:?}` - ListingKeyFilesFailed(#[from] BackendAccessErrorKind), - /// decrypting from binary failed - BinaryDecryptionFailed, - /// Partial read of PackFile failed - PartialReadOfPackfileFailed, - /// writing Bytes failed - WritingBytesFailed, - /// `{0}` - #[error(transparent)] - PackDecryptionFailed(#[from] CryptBackendErrorKind), -} + /// The status of the error. + status: Option, -/// [`SnapshotFileErrorKind`] describes the errors that can be returned for `SnapshotFile`s -#[derive(Error, Debug, Display)] -pub enum SnapshotFileErrorKind { - /// non-unicode hostname `{0:?}` - NonUnicodeHostname(OsString), - /// non-unicode path `{0:?}` - NonUnicodePath(PathBuf), - /// no snapshots found - NoSnapshotsFound, - /// value `{0:?}` not allowed - ValueNotAllowed(String), - /// datetime out of range: `{0:?}` - OutOfRange(#[from] OutOfRangeError), - /// reading the description file failed: `{0:?}` - ReadingDescriptionFailed(#[from] std::io::Error), - /// getting the SnapshotFile from the backend failed - GettingSnapshotFileFailed, - /// getting the SnapshotFile by ID failed - GettingSnapshotFileByIdFailed, - /// unpacking SnapshotFile result failed - UnpackingSnapshotFileResultFailed, - /// collecting IDs failed: `{0:?}` - FindingIdsFailed(Vec), - /// removing dots from paths failed: `{0:?}` - RemovingDotsFromPathFailed(std::io::Error), - /// canonicalizing path failed: `{0:?}` - CanonicalizingPathFailed(std::io::Error), + /// Backtrace of the error. + /// + // Need to use option, otherwise thiserror will not be able to derive the Error trait. + backtrace: Option, } -/// [`PackerErrorKind`] describes the errors that can be returned for a Packer -#[derive(Error, Debug, Display)] -pub enum PackerErrorKind { - /// error returned by cryptographic libraries: `{0:?}` - CryptoError(#[from] CryptoErrorKind), - /// could not compress due to unsupported config version: `{0:?}` - ConfigVersionNotSupported(#[from] ConfigFileErrorKind), - /// compressing data failed: `{0:?}` - CompressingDataFailed(#[from] std::io::Error), - /// getting total size failed - GettingTotalSizeFailed, - /// `{0}` - #[error(transparent)] - SendingCrossbeamMessageFailed( - #[from] crossbeam_channel::SendError<(bytes::Bytes, BlobId, Option)>, - ), - /// `{0}` - #[error(transparent)] - SendingCrossbeamMessageFailedForIndexPack( - #[from] crossbeam_channel::SendError<(bytes::Bytes, IndexPack)>, - ), - /// couldn't create binary representation for pack header: `{0:?}` - CouldNotCreateBinaryRepresentationForHeader(#[from] PackFileErrorKind), - /// failed to write bytes in backend: `{0:?}` - WritingBytesFailedInBackend(#[from] BackendAccessErrorKind), - /// failed to write bytes for PackFile: `{0:?}` - WritingBytesFailedForPackFile(PackFileErrorKind), - /// failed to read partially encrypted data: `{0:?}` - ReadingPartiallyEncryptedDataFailed(#[from] CryptBackendErrorKind), - /// failed to partially read data: `{0:?}` - PartiallyReadingDataFailed(PackFileErrorKind), - /// failed to add index pack: `{0:?}` - AddingIndexPackFailed(#[from] IndexErrorKind), - /// conversion for integer failed: `{0:?}` - IntConversionFailed(#[from] TryFromIntError), -} +impl Display for RusticError { + #[allow(clippy::too_many_lines)] + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + writeln!( + f, + "`rustic_core` experienced an error related to `{}`.", + self.kind + )?; + + writeln!(f)?; + writeln!(f, "Message:")?; + let context = if self.context.is_empty() { + writeln!(f, "{}", self.guidance)?; + Vec::new() + } else { + // If there is context, we want to iterate over it + // use the key to replace the placeholder in the guidance. + let mut guidance = self.guidance.to_string(); + let context = self + .context + .iter() + // remove context which has been used in the guidance + .filter(|(key, value)| { + let pattern = "{".to_owned() + key + "}"; + if guidance.contains(&pattern) { + guidance = guidance.replace(&pattern, value); + false + } else { + true + } + }) + .collect(); + writeln!(f, "{guidance}")?; + context + }; + + if let Some(code) = &self.error_code { + let default_docs_url = EcoString::from(constants::DEFAULT_DOCS_URL); + let docs_url = self + .docs_url + .as_ref() + .unwrap_or(&default_docs_url) + .to_string(); + + // If the docs_url doesn't end with a slash, add one. + let docs_url = if docs_url.ends_with('/') { + docs_url + } else { + docs_url + "/" + }; + + writeln!(f)?; + writeln!(f, "For more information, see: {docs_url}{code}")?; + } -/// [`TreeErrorKind`] describes the errors that can come up dealing with Trees -#[derive(Error, Debug, Display)] -pub enum TreeErrorKind { - /// blob `{0}` not found in index - BlobIdNotFound(TreeId), - /// `{0:?}` is not a directory - NotADirectory(OsString), - /// Path `{0:?}` not found - PathNotFound(OsString), - /// path should not contain current or parent dir - ContainsCurrentOrParentDirectory, - /// serde_json couldn't serialize the tree: `{0:?}` - SerializingTreeFailed(#[from] serde_json::Error), - /// serde_json couldn't deserialize tree from bytes of JSON text: `{0:?}` - DeserializingTreeFailed(serde_json::Error), - /// reading blob data failed `{0:?}` - ReadingBlobDataFailed(#[from] IndexErrorKind), - /// slice is not UTF-8: `{0:?}` - PathIsNotUtf8Conform(#[from] Utf8Error), - /// error in building nodestreamer: `{0:?}` - BuildingNodeStreamerFailed(#[from] ignore::Error), - /// failed to read file string from glob file: `{0:?}` - ReadingFileStringFromGlobsFailed(#[from] std::io::Error), - /// `{0}` - #[error(transparent)] - SendingCrossbeamMessageFailed(#[from] crossbeam_channel::SendError<(PathBuf, TreeId, usize)>), - /// `{0}` - #[error(transparent)] - ReceivingCrossbreamMessageFailed(#[from] crossbeam_channel::RecvError), -} + if !self.existing_issue_urls.is_empty() { + writeln!(f)?; + writeln!(f, "Related issues:")?; + self.existing_issue_urls + .iter() + .try_for_each(|url| writeln!(f, "- {url}"))?; + } -/// [`CacheBackendErrorKind`] describes the errors that can be returned by a Caching action in Backends -#[derive(Error, Debug, Display)] -pub enum CacheBackendErrorKind { - /// no cache dir - NoCacheDirectory, - /// `{0}` - #[error(transparent)] - FromIoError(#[from] std::io::Error), - /// setting option on CacheBackend failed - SettingOptionOnCacheBackendFailed, - /// listing with size on CacheBackend failed - ListingWithSizeOnCacheBackendFailed, - /// fully reading from CacheBackend failed - FullyReadingFromCacheBackendFailed, - /// partially reading from CacheBackend failed - PartiallyReadingFromBackendDataFailed, - /// creating data on CacheBackend failed - CreatingDataOnCacheBackendFailed, - /// writing bytes on CacheBackend failed - WritingBytesOnCacheBackendFailed, - /// removing data on CacheBackend failed - RemovingDataOnCacheBackendFailed, -} + if self.ask_report { + let default_issue_url = EcoString::from(constants::DEFAULT_ISSUE_URL); + let new_issue_url = self.new_issue_url.as_ref().unwrap_or(&default_issue_url); + + writeln!(f)?; + + writeln!( + f, + "We believe this is a bug, please report it by opening an issue at:" + )?; + writeln!(f, "{new_issue_url}")?; + writeln!(f)?; + writeln!( + f, + "If you can, please attach an anonymized debug log to the issue." + )?; + writeln!(f)?; + writeln!(f, "Thank you for helping us improve rustic!")?; + } -/// [`CryptBackendErrorKind`] describes the errors that can be returned by a Decryption action in Backends -#[derive(Error, Debug, Display)] -pub enum CryptBackendErrorKind { - /// decryption not supported for backend - DecryptionNotSupportedForBackend, - /// length of uncompressed data does not match! - LengthOfUncompressedDataDoesNotMatch, - /// failed to read encrypted data during full read - DecryptionInFullReadFailed, - /// failed to read encrypted data during partial read - DecryptionInPartialReadFailed, - /// decrypting from backend failed - DecryptingFromBackendFailed, - /// deserializing from bytes of JSON Text failed: `{0:?}` - DeserializingFromBytesOfJsonTextFailed(serde_json::Error), - /// failed to write data in crypt backend - WritingDataInCryptBackendFailed, - /// failed to list Ids - ListingIdsInDecryptionBackendFailed, - /// `{0}` - #[error(transparent)] - FromKey(#[from] CryptoErrorKind), - /// `{0}` - #[error(transparent)] - FromIo(#[from] std::io::Error), - /// `{0}` - #[error(transparent)] - FromJson(#[from] serde_json::Error), - /// writing full hash failed in CryptBackend - WritingFullHashFailed, - /// decoding Zstd compressed data failed: `{0:?}` - DecodingZstdCompressedDataFailed(std::io::Error), - /// Serializing to JSON byte vector failed: `{0:?}` - SerializingToJsonByteVectorFailed(serde_json::Error), - /// encrypting data failed - EncryptingDataFailed, - /// Compressing and appending data failed: `{0:?}` - CopyEncodingDataFailed(std::io::Error), - /// conversion for integer failed: `{0:?}` - IntConversionFailed(#[from] TryFromIntError), - /// Extra verification failed: After decrypting and decompressing the data changed! - ExtraVerificationFailed, -} + writeln!(f)?; + writeln!(f)?; -/// [`IgnoreErrorKind`] describes the errors that can be returned by a Ignore action in Backends -#[derive(Error, Debug, Display)] -pub enum IgnoreErrorKind { - /// generic Ignore error: `{0:?}` - GenericError(#[from] ignore::Error), - /// Error reading glob file `{file:?}`: `{source:?}` - ErrorGlob { - file: PathBuf, - source: std::io::Error, - }, - /// Unable to open file `{file:?}`: `{source:?}` - UnableToOpenFile { - file: PathBuf, - source: std::io::Error, - }, - /// Error getting xattrs for `{path:?}`: `{source:?}` - ErrorXattr { - path: PathBuf, - source: std::io::Error, - }, - /// Error reading link target for `{path:?}`: `{source:?}` - ErrorLink { - path: PathBuf, - source: std::io::Error, - }, - /// `{0}` - #[error(transparent)] - FromTryFromIntError(#[from] TryFromIntError), -} + writeln!(f, "Some additional details ...")?; -/// [`LocalDestinationErrorKind`] describes the errors that can be returned by an action on the filesystem in Backends -#[derive(Error, Debug, Display)] -pub enum LocalDestinationErrorKind { - /// directory creation failed: `{0:?}` - DirectoryCreationFailed(#[from] std::io::Error), - /// file `{0:?}` should have a parent - FileDoesNotHaveParent(PathBuf), - /// `{0}` - #[error(transparent)] - FromTryIntError(#[from] TryFromIntError), - /// `{0}` - #[error(transparent)] - FromIdError(#[from] IdErrorKind), - /// `{0}` - #[error(transparent)] - FromWalkdirError(#[from] walkdir::Error), - /// `{0}` - #[error(transparent)] - #[cfg(not(windows))] - FromErrnoError(#[from] Errno), - /// listing xattrs on `{path:?}`: `{source:?}` - #[cfg(not(any(windows, target_os = "openbsd")))] - ListingXattrsFailed { - path: PathBuf, - source: std::io::Error, - }, - /// setting xattr `{name}` on `{filename:?}` with `{source:?}` - #[cfg(not(any(windows, target_os = "openbsd")))] - SettingXattrFailed { - name: String, - filename: PathBuf, - source: std::io::Error, - }, - /// getting xattr `{name}` on `{filename:?}` with `{source:?}` - #[cfg(not(any(windows, target_os = "openbsd")))] - GettingXattrFailed { - name: String, - filename: PathBuf, - source: std::io::Error, - }, - /// removing directories failed: `{0:?}` - DirectoryRemovalFailed(std::io::Error), - /// removing file failed: `{0:?}` - FileRemovalFailed(std::io::Error), - /// setting time metadata failed: `{0:?}` - SettingTimeMetadataFailed(std::io::Error), - /// opening file failed: `{0:?}` - OpeningFileFailed(std::io::Error), - /// setting file length failed: `{0:?}` - SettingFileLengthFailed(std::io::Error), - /// can't jump to position in file: `{0:?}` - CouldNotSeekToPositionInFile(std::io::Error), - /// couldn't write to buffer: `{0:?}` - CouldNotWriteToBuffer(std::io::Error), - /// reading exact length of file contents failed: `{0:?}` - ReadingExactLengthOfFileFailed(std::io::Error), - /// setting file permissions failed: `{0:?}` - #[cfg(not(windows))] - SettingFilePermissionsFailed(std::io::Error), - /// failed to symlink target `{linktarget:?}` from `{filename:?}` with `{source:?}` - #[cfg(not(windows))] - SymlinkingFailed { - linktarget: PathBuf, - filename: PathBuf, - source: std::io::Error, - }, -} + if !context.is_empty() { + writeln!(f)?; + writeln!(f, "Context:")?; + context + .iter() + .try_for_each(|(key, value)| writeln!(f, "- {key}: {value}"))?; + } -/// [`NodeErrorKind`] describes the errors that can be returned by an action utilizing a node in Backends -#[derive(Error, Debug, Display)] -pub enum NodeErrorKind { - /// Parsing integer failed: `{0:?}` - FromParseIntError(#[from] ParseIntError), - /// Unexpected EOF - #[cfg(not(windows))] - UnexpectedEOF, - /// Invalid unicode - #[cfg(not(windows))] - InvalidUnicode, - /// Unrecognized Escape - #[cfg(not(windows))] - UnrecognizedEscape, -} + if let Some(cause) = &self.source { + writeln!(f)?; + writeln!(f, "Caused by:")?; + writeln!(f, "{cause}")?; + if let Some(source) = cause.source() { + write!(f, " : (source: {source:?})")?; + } + writeln!(f)?; + } -/// [`StdInErrorKind`] describes the errors that can be returned while dealing IO from CLI -#[derive(Error, Debug, Display)] -pub enum StdInErrorKind { - /// error reading from stdin: `{0:?}` - StdInError(#[from] std::io::Error), -} + if let Some(severity) = &self.severity { + writeln!(f)?; + writeln!(f, "Severity: {severity}")?; + } + + if let Some(status) = &self.status { + writeln!(f)?; + writeln!(f, "Status: {status}")?; + } -/// [`ArchiverErrorKind`] describes the errors that can be returned from the archiver -#[derive(Error, Debug, Display)] -pub enum ArchiverErrorKind { - /// tree stack empty - TreeStackEmpty, - /// cannot open file - OpeningFileFailed, - /// option should contain a value, but contained `None` - UnpackingTreeTypeOptionalFailed, - /// couldn't get size for archive: `{0:?}` - CouldNotGetSizeForArchive(#[from] BackendAccessErrorKind), - /// couldn't determine size for item in Archiver - CouldNotDetermineSize, - /// failed to save index: `{0:?}` - IndexSavingFailed(#[from] IndexErrorKind), - /// failed to save file in backend: `{0:?}` - FailedToSaveFileInBackend(#[from] CryptBackendErrorKind), - /// finalizing SnapshotSummary failed: `{0:?}` - FinalizingSnapshotSummaryFailed(#[from] SnapshotFileErrorKind), - /// `{0}` - #[error(transparent)] - FromPacker(#[from] PackerErrorKind), - /// `{0}` - #[error(transparent)] - FromTree(#[from] TreeErrorKind), - /// `{0}` - #[error(transparent)] - FromConfigFile(#[from] ConfigFileErrorKind), - /// `{0}` - #[error(transparent)] - FromStdIo(#[from] std::io::Error), - /// `{0}` - #[error(transparent)] - FromStripPrefix(#[from] StripPrefixError), - /// conversion from `u64` to `usize` failed: `{0:?}` - ConversionFromU64ToUsizeFailed(TryFromIntError), + if let Some(backtrace) = &self.backtrace { + writeln!(f)?; + writeln!(f, "Backtrace:")?; + write!(f, "{backtrace}")?; + + if backtrace.status() == BacktraceStatus::Disabled { + writeln!( + f, + " (set 'RUST_BACKTRACE=\"1\"' environment variable to enable)" + )?; + } + } + + Ok(()) + } } -/// [`VfsErrorKind`] describes the errors that can be returned from the Virtual File System -#[derive(Error, Debug, Display)] -pub enum VfsErrorKind { - /// No directory entries for symlink found: `{0:?}` - NoDirectoryEntriesForSymlinkFound(OsString), - /// Directory exists as non-virtual directory - DirectoryExistsAsNonVirtual, - /// Only normal paths allowed - OnlyNormalPathsAreAllowed, - /// Name `{0:?}`` doesn't exist - NameDoesNotExist(OsString), +// Accessors for anything we do want to expose publicly. +impl RusticError { + /// Creates a new error with the given kind and guidance. + pub fn new(kind: ErrorKind, guidance: impl Into) -> Box { + Box::new(Self { + kind, + guidance: guidance.into(), + context: EcoVec::default(), + source: None, + error_code: None, + docs_url: None, + new_issue_url: None, + existing_issue_urls: EcoVec::default(), + severity: None, + status: None, + ask_report: false, + // `Backtrace::capture()` will check if backtrace has been enabled + // internally. It's zero cost if backtrace is disabled. + backtrace: Some(Backtrace::capture()), + }) + } + + /// Creates a new error with the given kind and guidance. + pub fn with_source( + kind: ErrorKind, + guidance: impl Into, + source: impl Into>, + ) -> Box { + Self::new(kind, guidance).attach_source(source) + } + + /// Checks if the error has a specific error code. + pub fn is_code(&self, code: &str) -> bool { + self.error_code + .as_ref() + .map_or(false, |c| c.as_str() == code) + } + + /// Checks if the error is due to an incorrect password + pub fn is_incorrect_password(&self) -> bool { + self.is_code("C002") + } + + /// Creates a new error from a given error. + pub fn from( + kind: ErrorKind, + error: T, + ) -> Box { + Self::with_source(kind, error.to_string(), error) + } } -trait RusticErrorMarker: Error {} - -impl RusticErrorMarker for CryptoErrorKind {} -impl RusticErrorMarker for PolynomialErrorKind {} -impl RusticErrorMarker for IdErrorKind {} -impl RusticErrorMarker for RepositoryErrorKind {} -impl RusticErrorMarker for IndexErrorKind {} -impl RusticErrorMarker for BackendAccessErrorKind {} -impl RusticErrorMarker for ConfigFileErrorKind {} -impl RusticErrorMarker for KeyFileErrorKind {} -impl RusticErrorMarker for PackFileErrorKind {} -impl RusticErrorMarker for SnapshotFileErrorKind {} -impl RusticErrorMarker for PackerErrorKind {} -impl RusticErrorMarker for FileErrorKind {} -impl RusticErrorMarker for TreeErrorKind {} -impl RusticErrorMarker for CacheBackendErrorKind {} -impl RusticErrorMarker for CryptBackendErrorKind {} -impl RusticErrorMarker for IgnoreErrorKind {} -impl RusticErrorMarker for LocalDestinationErrorKind {} -impl RusticErrorMarker for NodeErrorKind {} -impl RusticErrorMarker for StdInErrorKind {} -impl RusticErrorMarker for ArchiverErrorKind {} -impl RusticErrorMarker for CommandErrorKind {} -impl RusticErrorMarker for VfsErrorKind {} -impl RusticErrorMarker for std::io::Error {} - -impl From for RusticError -where - E: RusticErrorMarker, - RusticErrorKind: From, -{ - fn from(value: E) -> Self { - Self(RusticErrorKind::from(value)) +// Setters for anything we do want to expose publicly. +// +// These were initially generated by `derive_setters`, +// and then manually adjusted to return `Box` instead of `Self` which +// unfortunately is not possible with the current version of the `derive_setters`. +// +// BEWARE! `attach_context` is manually implemented to allow for multiple contexts +// to be added and is not generated by `derive_setters`. +impl RusticError { + /// Attach what kind the error is. + pub fn overwrite_kind(self, value: impl Into) -> Box { + Box::new(Self { + kind: value.into(), + ..self + }) + } + + /// Ask the user to report the error. + pub fn ask_report(self) -> Box { + Box::new(Self { + ask_report: true, + ..self + }) + } + + /// Attach a chain to the cause of the error. + pub fn attach_source( + self, + value: impl Into>, + ) -> Box { + Box::new(Self { + source: Some(value.into()), + ..self + }) + } + + /// Attach the error message with guidance. + pub fn overwrite_guidance(self, value: impl Into) -> Box { + Box::new(Self { + guidance: value.into(), + ..self + }) + } + + /// Append a newline to the guidance message. + /// This is useful for adding additional information to the guidance. + pub fn append_guidance_line(self, value: impl Into) -> Box { + Box::new(Self { + guidance: format!("{}\n{}", self.guidance, value.into()).into(), + ..self + }) + } + + /// Prepend a newline to the guidance message. + /// This is useful for adding additional information to the guidance. + pub fn prepend_guidance_line(self, value: impl Into) -> Box { + Box::new(Self { + guidance: format!("{}\n{}", value.into(), self.guidance).into(), + ..self + }) + } + + // IMPORTANT: This is manually implemented to allow for multiple contexts to be added. + /// Attach context to the error. + pub fn attach_context( + mut self, + key: impl Into, + value: impl Into, + ) -> Box { + self.context.push((key.into(), value.into())); + Box::new(self) + } + + /// Overwrite context of the error. + /// + /// # Caution + /// + /// This should not be used in most cases, as it will overwrite any existing contexts. + /// Rather use `attach_context` for multiple contexts. + pub fn overwrite_context(self, value: impl Into>) -> Box { + Box::new(Self { + context: value.into(), + ..self + }) + } + + /// Attach the URL of the documentation for the error. + pub fn attach_docs_url(self, value: impl Into) -> Box { + Box::new(Self { + docs_url: Some(value.into()), + ..self + }) + } + + /// Attach an error code. + pub fn attach_error_code(self, value: impl Into) -> Box { + Box::new(Self { + error_code: Some(value.into()), + ..self + }) + } + + /// Attach the URL of the issue tracker for opening a new issue. + pub fn attach_new_issue_url(self, value: impl Into) -> Box { + Box::new(Self { + new_issue_url: Some(value.into()), + ..self + }) + } + + /// Attach the URL of an already existing issue that is related to this error. + pub fn attach_existing_issue_url(mut self, value: impl Into) -> Box { + self.existing_issue_urls.push(value.into()); + Box::new(self) + } + + /// Attach the severity of the error. + pub fn attach_severity(self, value: impl Into) -> Box { + Box::new(Self { + severity: Some(value.into()), + ..self + }) + } + + /// Attach the status of the error. + pub fn attach_status(self, value: impl Into) -> Box { + Box::new(Self { + status: Some(value.into()), + ..self + }) + } + + /// Overwrite the backtrace of the error. + /// + /// This should not be used in most cases, as the backtrace is automatically captured. + pub fn overwrite_backtrace(self, value: impl Into) -> Box { + Box::new(Self { + backtrace: Some(value.into()), + ..self + }) } } diff --git a/crates/core/src/id.rs b/crates/core/src/id.rs index 6f9c59e5..856cab24 100644 --- a/crates/core/src/id.rs +++ b/crates/core/src/id.rs @@ -7,7 +7,10 @@ use derive_more::{Constructor, Display}; use rand::{thread_rng, RngCore}; use serde_derive::{Deserialize, Serialize}; -use crate::{crypto::hasher::hash, error::IdErrorKind, RusticError, RusticResult}; +use crate::{ + crypto::hasher::hash, + error::{ErrorKind, RusticError, RusticResult}, +}; pub(super) mod constants { /// The length of the hash in bytes @@ -40,6 +43,14 @@ macro_rules! define_new_id_struct { )] #[serde(transparent)] pub struct $a($crate::Id); + + impl $a { + /// impl `into_inner` + #[must_use] + pub fn into_inner(self) -> $crate::Id { + self.0 + } + } }; } @@ -71,10 +82,18 @@ pub struct Id( ); impl FromStr for Id { - type Err = RusticError; + type Err = Box; fn from_str(s: &str) -> Result { let mut id = Self::default(); - hex::decode_to_slice(s, &mut id.0).map_err(IdErrorKind::HexError)?; + hex::decode_to_slice(s, &mut id.0).map_err(|err| { + RusticError::with_source( + ErrorKind::InvalidInput, + "Failed to decode hex string `{value}` into Id. The value must be a valid hexadecimal string.", + err + ) + .attach_context("value", s) + })?; + Ok(id) } } @@ -88,19 +107,15 @@ impl Id { /// /// # Errors /// - /// * [`IdErrorKind::HexError`] - If the string is not a valid hexadecimal string + /// * If the string is not a valid hexadecimal string /// /// # Examples /// /// ``` - /// use rustic_core::Id; - /// + /// # use rustic_core::Id; /// let id = Id::from_hex("0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef").unwrap(); - /// - /// assert_eq!(id.to_hex().as_str(), "0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef"); + /// # assert_eq!(id.to_hex().as_str(), "0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef"); /// ``` - /// - /// [`IdErrorKind::HexError`]: crate::error::IdErrorKind::HexError #[deprecated(note = "use FromStr::from_str instead")] pub fn from_hex(s: &str) -> RusticResult { s.parse() @@ -134,7 +149,7 @@ impl Id { /// /// # Panics /// - /// Panics if the `hex` crate fails to encode the hash + /// * Panics if the `hex` crate fails to encode the hash // TODO! - remove the panic #[must_use] pub fn to_hex(self) -> HexId { @@ -202,7 +217,7 @@ impl HexId { /// /// # Panics /// - /// If the [`HexId`] is not a valid UTF-8 string + /// * If the [`HexId`] is not a valid UTF-8 string #[must_use] pub fn as_str(&self) -> &str { // This is only ever filled with hex chars, which are ascii diff --git a/crates/core/src/index.rs b/crates/core/src/index.rs index 60630a08..4251d2ff 100644 --- a/crates/core/src/index.rs +++ b/crates/core/src/index.rs @@ -6,14 +6,13 @@ use derive_more::Constructor; use crate::{ backend::{decrypt::DecryptReadBackend, FileType}, blob::{tree::TreeId, BlobId, BlobType, DataId}, - error::IndexErrorKind, + error::{ErrorKind, RusticError, RusticResult}, index::binarysorted::{Index, IndexCollector, IndexType}, progress::Progress, repofile::{ indexfile::{IndexBlob, IndexFile}, packfile::PackId, }, - RusticResult, }; pub(crate) mod binarysorted; @@ -70,6 +69,7 @@ impl IndexEntry { self.length, self.uncompressed_length, )?; + Ok(data) } @@ -175,9 +175,7 @@ pub trait ReadIndex { /// /// # Errors /// - /// * [`IndexErrorKind::BlobInIndexNotFound`] - If the blob could not be found in the index - /// - /// [`IndexErrorKind::BlobInIndexNotFound`]: crate::error::IndexErrorKind::BlobInIndexNotFound + /// * If the blob could not be found in the index fn blob_from_backend( &self, be: &impl DecryptReadBackend, @@ -185,7 +183,14 @@ pub trait ReadIndex { id: &BlobId, ) -> RusticResult { self.get_id(tpe, id).map_or_else( - || Err(IndexErrorKind::BlobInIndexNotFound.into()), + || { + Err(RusticError::new( + ErrorKind::Internal, + "Blob `{id}` with type `{type}` not found in index", + ) + .attach_context("id", id.to_string()) + .attach_context("type", tpe.to_string())) + }, |ie| ie.read_data(be), ) } @@ -267,7 +272,7 @@ impl GlobalIndex { /// /// # Errors /// - /// If the index could not be read + /// * If the index could not be read fn new_from_collector( be: &impl DecryptReadBackend, p: &impl Progress, @@ -302,7 +307,7 @@ impl GlobalIndex { /// /// # Errors /// - /// If the index could not be read + /// * If the index could not be read pub fn only_full_trees(be: &impl DecryptReadBackend, p: &impl Progress) -> RusticResult { Self::new_from_collector(be, p, IndexCollector::new(IndexType::DataIds)) } diff --git a/crates/core/src/index/indexer.rs b/crates/core/src/index/indexer.rs index 902f634b..8f8f143e 100644 --- a/crates/core/src/index/indexer.rs +++ b/crates/core/src/index/indexer.rs @@ -101,9 +101,7 @@ impl Indexer { /// /// # Errors /// - /// * [`CryptBackendErrorKind::SerializingToJsonByteVectorFailed`] - If the index file could not be serialized. - /// - /// [`CryptBackendErrorKind::SerializingToJsonByteVectorFailed`]: crate::error::CryptBackendErrorKind::SerializingToJsonByteVectorFailed + /// * If the index file could not be serialized. pub fn finalize(&self) -> RusticResult<()> { self.save() } @@ -112,9 +110,7 @@ impl Indexer { /// /// # Errors /// - /// * [`CryptBackendErrorKind::SerializingToJsonByteVectorFailed`] - If the index file could not be serialized. - /// - /// [`CryptBackendErrorKind::SerializingToJsonByteVectorFailed`]: crate::error::CryptBackendErrorKind::SerializingToJsonByteVectorFailed + /// * If the index file could not be serialized. pub fn save(&self) -> RusticResult<()> { if (self.file.packs.len() + self.file.packs_to_delete.len()) > 0 { _ = self.be.save_file(&self.file)?; @@ -130,9 +126,7 @@ impl Indexer { /// /// # Errors /// - /// * [`CryptBackendErrorKind::SerializingToJsonByteVectorFailed`] - If the index file could not be serialized. - /// - /// [`CryptBackendErrorKind::SerializingToJsonByteVectorFailed`]: crate::error::CryptBackendErrorKind::SerializingToJsonByteVectorFailed + /// * If the index file could not be serialized. pub fn add(&mut self, pack: IndexPack) -> RusticResult<()> { self.add_with(pack, false) } @@ -145,9 +139,7 @@ impl Indexer { /// /// # Errors /// - /// * [`CryptBackendErrorKind::SerializingToJsonByteVectorFailed`] - If the index file could not be serialized. - /// - /// [`CryptBackendErrorKind::SerializingToJsonByteVectorFailed`]: crate::error::CryptBackendErrorKind::SerializingToJsonByteVectorFailed + /// * If the index file could not be serialized. pub fn add_remove(&mut self, pack: IndexPack) -> RusticResult<()> { self.add_with(pack, true) } @@ -161,9 +153,7 @@ impl Indexer { /// /// # Errors /// - /// * [`CryptBackendErrorKind::SerializingToJsonByteVectorFailed`] - If the index file could not be serialized. - /// - /// [`CryptBackendErrorKind::SerializingToJsonByteVectorFailed`]: crate::error::CryptBackendErrorKind::SerializingToJsonByteVectorFailed + /// * If the index file could not be serialized. pub fn add_with(&mut self, pack: IndexPack, delete: bool) -> RusticResult<()> { self.count += pack.blobs.len(); diff --git a/crates/core/src/lib.rs b/crates/core/src/lib.rs index ea763bdb..161cb018 100644 --- a/crates/core/src/lib.rs +++ b/crates/core/src/lib.rs @@ -145,14 +145,15 @@ pub use crate::{ repoinfo::{BlobInfo, IndexInfos, PackInfo, RepoFileInfo, RepoFileInfos}, restore::{FileDirStats, RestoreOptions, RestorePlan, RestoreStats}, }, - error::{RusticError, RusticResult}, + error::{ErrorKind, RusticError, RusticResult, Severity, Status}, id::{HexId, Id}, progress::{NoProgress, NoProgressBars, Progress, ProgressBars}, repofile::snapshotfile::{ PathList, SnapshotGroup, SnapshotGroupCriterion, SnapshotOptions, StringList, }, repository::{ - CommandInput, FullIndex, IndexedFull, IndexedIds, IndexedStatus, IndexedTree, Open, - OpenStatus, Repository, RepositoryOptions, + command_input::{CommandInput, CommandInputErrorKind}, + FullIndex, IndexedFull, IndexedIds, IndexedStatus, IndexedTree, Open, OpenStatus, + Repository, RepositoryOptions, }, }; diff --git a/crates/core/src/repofile/configfile.rs b/crates/core/src/repofile/configfile.rs index 278a95bf..52e4d11b 100644 --- a/crates/core/src/repofile/configfile.rs +++ b/crates/core/src/repofile/configfile.rs @@ -2,8 +2,12 @@ use serde_derive::{Deserialize, Serialize}; use serde_with::skip_serializing_none; use crate::{ - backend::FileType, blob::BlobType, define_new_id_struct, error::ConfigFileErrorKind, - impl_repofile, repofile::RepoFile, RusticResult, + backend::FileType, + blob::BlobType, + define_new_id_struct, + error::{ErrorKind, RusticError, RusticResult}, + impl_repofile, + repofile::RepoFile, }; pub(super) mod constants { @@ -131,31 +135,38 @@ impl ConfigFile { /// /// # Errors /// - /// * [`ConfigFileErrorKind::ParsingFailedForPolynomial`] - If the polynomial could not be parsed - /// - /// [`ConfigFileErrorKind::ParsingFailedForPolynomial`]: crate::error::ConfigFileErrorKind::ParsingFailedForPolynomial + /// * If the polynomial could not be parsed pub fn poly(&self) -> RusticResult { - Ok(u64::from_str_radix(&self.chunker_polynomial, 16) - .map_err(ConfigFileErrorKind::ParsingFailedForPolynomial)?) + let chunker_poly = u64::from_str_radix(&self.chunker_polynomial, 16) + .map_err(|err| RusticError::with_source( + ErrorKind::InvalidInput, + "Parsing u64 from hex failed for polynomial `{polynomial}`, the value must be a valid hexadecimal string.", + err) + .attach_context("polynomial",self.chunker_polynomial.to_string())) + ?; + + Ok(chunker_poly) } /// Get the compression level /// /// # Errors /// - /// * [`ConfigFileErrorKind::ConfigVersionNotSupported`] - If the version is not supported - /// - /// [`ConfigFileErrorKind::ConfigVersionNotSupported`]: crate::error::ConfigFileErrorKind::ConfigVersionNotSupported + /// * If the version is not supported pub fn zstd(&self) -> RusticResult> { match (self.version, self.compression) { (1, _) | (2, Some(0)) => Ok(None), (2, None) => Ok(Some(0)), // use default (=0) zstd compression (2, Some(c)) => Ok(Some(c)), - _ => Err(ConfigFileErrorKind::ConfigVersionNotSupported.into()), + _ => Err(RusticError::new( + ErrorKind::Unsupported, + "Config version `{version}` not supported. Please make sure, that you use the correct version.", + ) + .attach_context("version", self.version.to_string())), } } - /// Get wheter an extra verification (decompressing/decrypting data before writing to the repository) should be performed. + /// Get whether an extra verification (decompressing/decrypting data before writing to the repository) should be performed. #[must_use] pub fn extra_verify(&self) -> bool { self.extra_verify.unwrap_or(true) // default is to do the extra check diff --git a/crates/core/src/repofile/keyfile.rs b/crates/core/src/repofile/keyfile.rs index cbd44c2c..ce134aa1 100644 --- a/crates/core/src/repofile/keyfile.rs +++ b/crates/core/src/repofile/keyfile.rs @@ -7,10 +7,25 @@ use serde_with::{base64::Base64, serde_as, skip_serializing_none}; use crate::{ backend::{FileType, ReadBackend}, crypto::{aespoly1305::Key, CryptoKey}, - error::{CryptoErrorKind, KeyFileErrorKind, RusticErrorKind, RusticResult}, - impl_repoid, RusticError, + error::{ErrorKind, RusticError, RusticResult}, + impl_repoid, }; +/// [`KeyFileErrorKind`] describes the errors that can be returned for `KeyFile`s +#[derive(thiserror::Error, Debug, displaydoc::Display)] +#[non_exhaustive] +pub enum KeyFileErrorKind { + /// conversion from `{from}` to `{to}` failed for `{x}` : `{source}` + ConversionFailed { + from: &'static str, + to: &'static str, + x: u32, + source: std::num::TryFromIntError, + }, +} + +pub(crate) type KeyFileResult = Result; + pub(super) mod constants { /// Returns the number of bits of the given type. pub(super) const fn num_bits() -> usize { @@ -69,22 +84,41 @@ impl KeyFile { /// /// # Errors /// - /// * [`KeyFileErrorKind::InvalidSCryptParameters`] - If the parameters of the key derivation function are invalid - /// * [`KeyFileErrorKind::OutputLengthInvalid`] - If the output length of the key derivation function is invalid + /// * If the parameters of the key derivation function are invalid + /// * If the output length of the key derivation function is invalid /// /// # Returns /// /// The generated key - /// - /// [`KeyFileErrorKind::InvalidSCryptParameters`]: crate::error::KeyFileErrorKind::InvalidSCryptParameters - /// [`KeyFileErrorKind::OutputLengthInvalid`]: crate::error::KeyFileErrorKind::OutputLengthInvalid pub fn kdf_key(&self, passwd: &impl AsRef<[u8]>) -> RusticResult { - let params = Params::new(log_2(self.n)?, self.r, self.p, Params::RECOMMENDED_LEN) - .map_err(KeyFileErrorKind::InvalidSCryptParameters)?; + let params = Params::new( + log_2(self.n).map_err(|err| { + RusticError::with_source( + ErrorKind::Internal, + "Calculating log2 failed. Please check the key file and password.", + err, + ) + })?, + self.r, + self.p, + Params::RECOMMENDED_LEN, + ) + .map_err(|err| { + RusticError::with_source( + ErrorKind::Key, + "Invalid scrypt parameters. Please check the key file and password.", + err, + ) + })?; let mut key = [0; 64]; - scrypt::scrypt(passwd.as_ref(), &self.salt, ¶ms, &mut key) - .map_err(KeyFileErrorKind::OutputLengthInvalid)?; + scrypt::scrypt(passwd.as_ref(), &self.salt, ¶ms, &mut key).map_err(|err| { + RusticError::with_source( + ErrorKind::Key, + "Output length invalid. Please check the key file and password.", + err, + ) + })?; Ok(Key::from_slice(&key)) } @@ -98,18 +132,25 @@ impl KeyFile { /// /// # Errors /// - /// * [`KeyFileErrorKind::DeserializingFromSliceFailed`] - If the data could not be deserialized + /// * If the data could not be deserialized /// /// # Returns /// /// The extracted key - /// - /// [`KeyFileErrorKind::DeserializingFromSliceFailed`]: crate::error::KeyFileErrorKind::DeserializingFromSliceFailed pub fn key_from_data(&self, key: &Key) -> RusticResult { let dec_data = key.decrypt_data(&self.data)?; - Ok(serde_json::from_slice::(&dec_data) - .map_err(KeyFileErrorKind::DeserializingFromSliceFailed)? - .key()) + + let key = serde_json::from_slice::(&dec_data) + .map_err(|err| { + RusticError::with_source( + ErrorKind::Key, + "Deserializing master key from slice failed. Please check the key file.", + err, + ) + })? + .key(); + + Ok(key) } /// Extract a key from the data of the [`KeyFile`] using the key @@ -121,13 +162,11 @@ impl KeyFile { /// /// # Errors /// - /// * [`KeyFileErrorKind::InvalidSCryptParameters`] - If the parameters of the key derivation function are invalid + /// * If the parameters of the key derivation function are invalid /// /// # Returns /// /// The extracted key - /// - /// [`KeyFileErrorKind::InvalidSCryptParameters`]: crate::error::KeyFileErrorKind::InvalidSCryptParameters pub fn key_from_password(&self, passwd: &impl AsRef<[u8]>) -> RusticResult { self.key_from_data(&self.kdf_key(passwd)?) } @@ -144,15 +183,12 @@ impl KeyFile { /// /// # Errors /// - /// * [`KeyFileErrorKind::OutputLengthInvalid`] - If the output length of the key derivation function is invalid - /// * [`KeyFileErrorKind::CouldNotSerializeAsJsonByteVector`] - If the [`KeyFile`] could not be serialized + /// * If the output length of the key derivation function is invalid + /// * If the [`KeyFile`] could not be serialized /// /// # Returns /// /// The generated [`KeyFile`] - /// - /// [`KeyFileErrorKind::OutputLengthInvalid`]: crate::error::KeyFileErrorKind::OutputLengthInvalid - /// [`KeyFileErrorKind::CouldNotSerializeAsJsonByteVector`]: crate::error::KeyFileErrorKind::CouldNotSerializeAsJsonByteVector pub fn generate( key: Key, passwd: &impl AsRef<[u8]>, @@ -166,14 +202,26 @@ impl KeyFile { thread_rng().fill_bytes(&mut salt); let mut key = [0; 64]; - scrypt::scrypt(passwd.as_ref(), &salt, ¶ms, &mut key) - .map_err(KeyFileErrorKind::OutputLengthInvalid)?; + scrypt::scrypt(passwd.as_ref(), &salt, ¶ms, &mut key).map_err(|err| { + RusticError::with_source( + ErrorKind::Key, + "Output length invalid. Please check the key file and password.", + err, + ) + })?; let key = Key::from_slice(&key); - let data = key.encrypt_data( - &serde_json::to_vec(&masterkey) - .map_err(KeyFileErrorKind::CouldNotSerializeAsJsonByteVector)?, - )?; + + let json_byte_vec = serde_json::to_vec(&masterkey).map_err(|err| { + RusticError::with_source( + ErrorKind::Key, + "Could not serialize as JSON byte vector.", + err, + ) + .ask_report() + })?; + + let data = key.encrypt_data(&json_byte_vec)?; Ok(Self { hostname, @@ -197,19 +245,22 @@ impl KeyFile { /// /// # Errors /// - // TODO!: Add errors! + /// * If the [`KeyFile`] could not be deserialized/read from the backend /// /// # Returns /// /// The [`KeyFile`] read from the backend fn from_backend(be: &B, id: &KeyId) -> RusticResult { - let data = be - .read_full(FileType::Key, id) - .map_err(RusticErrorKind::Backend)?; - Ok( - serde_json::from_slice(&data) - .map_err(KeyFileErrorKind::DeserializingFromSliceFailed)?, - ) + let data = be.read_full(FileType::Key, id)?; + + serde_json::from_slice(&data).map_err(|err| { + RusticError::with_source( + ErrorKind::Key, + "Couldn't deserialize the data for key `{key_id}`.", + err, + ) + .attach_context("key_id", id.to_string()) + }) } } @@ -221,19 +272,26 @@ impl KeyFile { /// /// # Errors /// -/// * [`KeyFileErrorKind::ConversionFromU32ToU8Failed`] - If the conversion from `u32` to `u8` failed +/// * If the conversion from `u32` to `u8` failed /// /// # Returns /// /// The logarithm to base 2 of the given number -/// -/// [`KeyFileErrorKind::ConversionFromU32ToU8Failed`]: crate::error::KeyFileErrorKind::ConversionFromU32ToU8Failed -fn log_2(x: u32) -> RusticResult { +fn log_2(x: u32) -> KeyFileResult { assert!(x > 0); - Ok(u8::try_from(constants::num_bits::()) - .map_err(KeyFileErrorKind::ConversionFromU32ToU8Failed)? - - u8::try_from(x.leading_zeros()).map_err(KeyFileErrorKind::ConversionFromU32ToU8Failed)? - - 1) + Ok(u8::try_from(constants::num_bits::()).map_err(|err| { + KeyFileErrorKind::ConversionFailed { + from: "usize", + to: "u8", + x, + source: err, + } + })? - u8::try_from(x.leading_zeros()).map_err(|err| KeyFileErrorKind::ConversionFailed { + from: "u32", + to: "u8", + x, + source: err, + })? - 1) } /// The mac of a [`Key`] @@ -319,13 +377,11 @@ pub(crate) fn key_from_backend( /// /// # Errors /// -/// * [`KeyFileErrorKind::NoSuitableKeyFound`] - If no suitable key was found +/// * If no suitable key was found /// /// # Returns /// /// The found key -/// -/// [`KeyFileErrorKind::NoSuitableKeyFound`]: crate::error::KeyFileErrorKind::NoSuitableKeyFound pub(crate) fn find_key_in_backend( be: &B, passwd: &impl AsRef<[u8]>, @@ -334,15 +390,17 @@ pub(crate) fn find_key_in_backend( if let Some(id) = hint { key_from_backend(be, id, passwd) } else { - for id in be.list(FileType::Key).map_err(RusticErrorKind::Backend)? { + for id in be.list(FileType::Key)? { match key_from_backend(be, &id.into(), passwd) { Ok(key) => return Ok(key), - Err(RusticError(RusticErrorKind::Crypto( - CryptoErrorKind::DataDecryptionFailed(_), - ))) => continue, + Err(err) if err.is_code("C001") => continue, err => return err, } } - Err(KeyFileErrorKind::NoSuitableKeyFound.into()) + + Err(RusticError::new( + ErrorKind::Password, + "The password that has been entered, seems to be incorrect. No suitable key found for the given password. Please check your password and try again.", + ).attach_error_code("C002")) } } diff --git a/crates/core/src/repofile/packfile.rs b/crates/core/src/repofile/packfile.rs index 87a2404d..c0202ad6 100644 --- a/crates/core/src/repofile/packfile.rs +++ b/crates/core/src/repofile/packfile.rs @@ -6,13 +6,24 @@ use log::trace; use crate::{ backend::{decrypt::DecryptReadBackend, FileType}, blob::BlobType, - error::{PackFileErrorKind, RusticErrorKind}, + error::{ErrorKind, RusticError, RusticResult}, id::Id, impl_repoid, repofile::indexfile::{IndexBlob, IndexPack}, - RusticResult, }; +/// [`PackFileErrorKind`] describes the errors that can be returned for `PackFile`s +#[derive(thiserror::Error, Debug, displaydoc::Display)] +#[non_exhaustive] +pub enum PackFileErrorKind { + /// Failed reading binary representation of the pack header: `{0:?}` + ReadingBinaryRepresentationFailed(binrw::Error), + /// Failed writing binary representation of the pack header: `{0:?}` + WritingBinaryRepresentationFailed(binrw::Error), +} + +pub(crate) type PackFileResult = Result; + impl_repoid!(PackId, FileType::Pack); pub(super) mod constants { @@ -54,25 +65,18 @@ impl PackHeaderLength { /// /// # Errors /// - /// * [`PackFileErrorKind::ReadingBinaryRepresentationFailed`] - If reading the binary representation failed - /// - /// [`PackFileErrorKind::ReadingBinaryRepresentationFailed`]: crate::error::PackFileErrorKind::ReadingBinaryRepresentationFailed - pub(crate) fn from_binary(data: &[u8]) -> RusticResult { + /// * If reading the binary representation failed + pub(crate) fn from_binary(data: &[u8]) -> PackFileResult { let mut reader = Cursor::new(data); - Ok( - Self::read(&mut reader) - .map_err(PackFileErrorKind::ReadingBinaryRepresentationFailed)?, - ) + Self::read(&mut reader).map_err(PackFileErrorKind::ReadingBinaryRepresentationFailed) } /// Generate the binary representation of the pack header length /// /// # Errors /// - /// * [`PackFileErrorKind::WritingBinaryRepresentationFailed`] - If writing the binary representation failed - /// - /// [`PackFileErrorKind::WritingBinaryRepresentationFailed`]: crate::error::PackFileErrorKind::WritingBinaryRepresentationFailed - pub(crate) fn to_binary(self) -> RusticResult> { + /// * If writing the binary representation failed + pub(crate) fn to_binary(self) -> PackFileResult> { let mut writer = Cursor::new(Vec::with_capacity(4)); self.write(&mut writer) .map_err(PackFileErrorKind::WritingBinaryRepresentationFailed)?; @@ -118,7 +122,7 @@ pub enum HeaderEntry { CompTree { /// Lengths within a packfile len: u32, - /// Raw blob length withou compression/encryption + /// Raw blob length without compression/encryption len_data: u32, /// Id of compressed tree blob id: Id, @@ -220,10 +224,8 @@ impl PackHeader { /// /// # Errors /// - /// * [`PackFileErrorKind::ReadingBinaryRepresentationFailed`] - If reading the binary representation failed - /// - /// [`PackFileErrorKind::ReadingBinaryRepresentationFailed`]: crate::error::PackFileErrorKind::ReadingBinaryRepresentationFailed - pub(crate) fn from_binary(pack: &[u8]) -> RusticResult { + /// * If reading the binary representation failed + pub(crate) fn from_binary(pack: &[u8]) -> PackFileResult { let mut reader = Cursor::new(pack); let mut offset = 0; let mut blobs = Vec::new(); @@ -231,9 +233,7 @@ impl PackHeader { let blob = match HeaderEntry::read(&mut reader) { Ok(entry) => entry.into_blob(offset), Err(err) if err.is_eof() => break, - Err(err) => { - return Err(PackFileErrorKind::ReadingBinaryRepresentationFailed(err).into()) - } + Err(err) => return Err(PackFileErrorKind::ReadingBinaryRepresentationFailed(err)), }; offset += blob.length; blobs.push(blob); @@ -252,15 +252,10 @@ impl PackHeader { /// /// # Errors /// - /// * [`PackFileErrorKind::ReadingBinaryRepresentationFailed`] - If reading the binary representation failed - /// * [`PackFileErrorKind::HeaderLengthTooLarge`] - If the header length is too large - /// * [`PackFileErrorKind::HeaderLengthDoesNotMatchHeaderContents`] - If the header length does not match the header contents - /// * [`PackFileErrorKind::HeaderPackSizeComputedDoesNotMatchRealPackFile`] - If the pack size computed from the header does not match the real pack file size - /// - /// [`PackFileErrorKind::ReadingBinaryRepresentationFailed`]: crate::error::PackFileErrorKind::ReadingBinaryRepresentationFailed - /// [`PackFileErrorKind::HeaderLengthTooLarge`]: crate::error::PackFileErrorKind::HeaderLengthTooLarge - /// [`PackFileErrorKind::HeaderLengthDoesNotMatchHeaderContents`]: crate::error::PackFileErrorKind::HeaderLengthDoesNotMatchHeaderContents - /// [`PackFileErrorKind::HeaderPackSizeComputedDoesNotMatchRealPackFile`]: crate::error::PackFileErrorKind::HeaderPackSizeComputedDoesNotMatchRealPackFile + /// * If reading the binary representation failed + /// * If the header length is too large + /// * If the header length does not match the header contents + /// * If the pack size computed from the header does not match the real pack file size pub(crate) fn from_file( be: &impl DecryptReadBackend, id: PackId, @@ -275,21 +270,28 @@ impl PackHeader { // read (guessed) header + length field let read_size = size_guess + constants::LENGTH_LEN; let offset = pack_size - read_size; - let mut data = be - .read_partial(FileType::Pack, &id, false, offset, read_size) - .map_err(RusticErrorKind::Backend)?; + let mut data = be.read_partial(FileType::Pack, &id, false, offset, read_size)?; // get header length from the file - let size_real = - PackHeaderLength::from_binary(&data.split_off(size_guess as usize))?.to_u32(); + let size_real = PackHeaderLength::from_binary(&data.split_off(size_guess as usize)) + .map_err(|err| { + RusticError::with_source( + ErrorKind::Internal, + "Reading pack header length failed", + err, + ) + })? + .to_u32(); trace!("header size: {size_real}"); if size_real + constants::LENGTH_LEN > pack_size { - return Err(PackFileErrorKind::HeaderLengthTooLarge { - size_real, - pack_size, - } - .into()); + return Err(RusticError::new( + ErrorKind::Internal, + "Read header length `{size_real}` + `{length}` is larger than `{pack_size}`!", + ) + .attach_context("size_real", size_real.to_string()) + .attach_context("pack_size", pack_size.to_string()) + .attach_context("length", constants::LENGTH_LEN.to_string())); } // now read the header @@ -299,28 +301,29 @@ impl PackHeader { } else { // size_guess was too small; we have to read again let offset = pack_size - size_real - constants::LENGTH_LEN; - be.read_partial(FileType::Pack, &id, false, offset, size_real) - .map_err(RusticErrorKind::Backend)? + be.read_partial(FileType::Pack, &id, false, offset, size_real)? }; - let header = Self::from_binary(&be.decrypt(&data)?)?; + let header = Self::from_binary(&be.decrypt(&data)?).map_err(|err| { + RusticError::with_source(ErrorKind::Internal, "Reading pack header failed.", err) + })?; if header.size() != size_real { - return Err(PackFileErrorKind::HeaderLengthDoesNotMatchHeaderContents { - size_real, - size_computed: header.size(), - } - .into()); + return Err(RusticError::new( + ErrorKind::Internal, + "Read header length doesn't match header contents!", + ) + .attach_context("size_real", size_real.to_string()) + .attach_context("size_computed", header.size().to_string())); } if header.pack_size() != pack_size { - return Err( - PackFileErrorKind::HeaderPackSizeComputedDoesNotMatchRealPackFile { - size_real: pack_size, - size_computed: header.pack_size(), - } - .into(), - ); + return Err(RusticError::new( + ErrorKind::Internal, + "pack size `{size_computed}` computed from header doesn't match real pack file size `{size_real}`!", + ) + .attach_context("size_real", pack_size.to_string()) + .attach_context("size_computed", header.pack_size().to_string())); } Ok(header) @@ -381,10 +384,8 @@ impl<'a> PackHeaderRef<'a> { /// /// # Errors /// - /// * [`PackFileErrorKind::WritingBinaryRepresentationFailed`] - If writing the binary representation failed - /// - /// [`PackFileErrorKind::WritingBinaryRepresentationFailed`]: crate::error::PackFileErrorKind::WritingBinaryRepresentationFailed - pub(crate) fn to_binary(&self) -> RusticResult> { + /// * If writing the binary representation failed + pub(crate) fn to_binary(&self) -> PackFileResult> { let mut writer = Cursor::new(Vec::with_capacity(self.pack_size() as usize)); // collect header entries for blob in self.0 { diff --git a/crates/core/src/repofile/snapshotfile.rs b/crates/core/src/repofile/snapshotfile.rs index a973bfce..ec7eecc6 100644 --- a/crates/core/src/repofile/snapshotfile.rs +++ b/crates/core/src/repofile/snapshotfile.rs @@ -6,7 +6,9 @@ use std::{ str::FromStr, }; -use chrono::{DateTime, Duration, Local}; +use chrono::{DateTime, Duration, Local, OutOfRangeError}; +#[cfg(feature = "clap")] +use clap::ValueHint; use derivative::Derivative; use derive_setters::Setters; use dunce::canonicalize; @@ -20,15 +22,30 @@ use serde_with::{serde_as, skip_serializing_none, DisplayFromStr}; use crate::{ backend::{decrypt::DecryptReadBackend, FileType, FindInBackend}, blob::tree::TreeId, - error::{RusticError, RusticErrorKind, RusticResult, SnapshotFileErrorKind}, + error::{ErrorKind, RusticError, RusticResult}, impl_repofile, progress::Progress, repofile::RepoFile, Id, }; -#[cfg(feature = "clap")] -use clap::ValueHint; +/// [`SnapshotFileErrorKind`] describes the errors that can be returned for `SnapshotFile`s +#[derive(thiserror::Error, Debug, displaydoc::Display)] +#[non_exhaustive] +pub enum SnapshotFileErrorKind { + /// non-unicode path `{0:?}` + NonUnicodePath(PathBuf), + /// value `{0:?}` not allowed + ValueNotAllowed(String), + /// datetime out of range: `{0:?}` + OutOfRange(OutOfRangeError), + /// removing dots from paths failed: `{0:?}` + RemovingDotsFromPathFailed(std::io::Error), + /// canonicalizing path failed: `{0:?}` + CanonicalizingPathFailed(std::io::Error), +} + +pub(crate) type SnapshotFileResult = Result; /// Options for creating a new [`SnapshotFile`] structure for a new backup snapshot. /// @@ -110,15 +127,20 @@ impl SnapshotOptions { /// /// # Errors /// - /// * [`SnapshotFileErrorKind::NonUnicodeTag`] - If the tag is not valid unicode + /// * If the tag is not valid unicode /// /// # Returns /// /// The modified [`SnapshotOptions`] - /// - /// [`SnapshotFileErrorKind::NonUnicodeTag`]: crate::error::SnapshotFileErrorKind::NonUnicodeTag pub fn add_tags(mut self, tag: &str) -> RusticResult { - self.tags.push(StringList::from_str(tag)?); + self.tags.push(StringList::from_str(tag).map_err(|err| { + RusticError::with_source( + ErrorKind::InvalidInput, + "Failed to create string list from tag `{tag}`. The value must be a valid unicode string.", + err, + ) + .attach_context("tag", tag) + })?); Ok(self) } @@ -126,13 +148,11 @@ impl SnapshotOptions { /// /// # Errors /// - /// * [`SnapshotFileErrorKind::NonUnicodeHostname`] - If the hostname is not valid unicode + /// * If the hostname is not valid unicode /// /// # Returns /// /// The new [`SnapshotFile`] - /// - /// [`SnapshotFileErrorKind::NonUnicodeHostname`]: crate::error::SnapshotFileErrorKind::NonUnicodeHostname pub fn to_snapshot(&self) -> RusticResult { SnapshotFile::from_options(self) } @@ -232,10 +252,8 @@ impl SnapshotSummary { /// /// # Errors /// - /// * [`SnapshotFileErrorKind::OutOfRange`] - If the time is not in the range of `Local::now()` - /// - /// [`SnapshotFileErrorKind::OutOfRange`]: crate::error::SnapshotFileErrorKind::OutOfRange - pub(crate) fn finalize(&mut self, snap_time: DateTime) -> RusticResult<()> { + /// * If the time is not in the range of `Local::now()` + pub(crate) fn finalize(&mut self, snap_time: DateTime) -> SnapshotFileResult<()> { let end_time = Local::now(); self.backup_duration = (end_time - self.backup_start) .to_std() @@ -355,17 +373,13 @@ impl SnapshotFile { /// /// # Errors /// - /// * [`SnapshotFileErrorKind::NonUnicodeHostname`] - If the hostname is not valid unicode - /// * [`SnapshotFileErrorKind::OutOfRange`] - If the delete time is not in the range of `Local::now()` - /// * [`SnapshotFileErrorKind::ReadingDescriptionFailed`] - If the description file could not be read + /// * If the hostname is not valid unicode + /// * If the delete time is not in the range of `Local::now()` + /// * If the description file could not be read /// /// # Note /// /// This is the preferred way to create a new [`SnapshotFile`] to be used within [`crate::Repository::backup`]. - /// - /// [`SnapshotFileErrorKind::NonUnicodeHostname`]: crate::error::SnapshotFileErrorKind::NonUnicodeHostname - /// [`SnapshotFileErrorKind::OutOfRange`]: crate::error::SnapshotFileErrorKind::OutOfRange - /// [`SnapshotFileErrorKind::ReadingDescriptionFailed`]: crate::error::SnapshotFileErrorKind::ReadingDescriptionFailed pub fn from_options(opts: &SnapshotOptions) -> RusticResult { let hostname = if let Some(host) = &opts.host { host.clone() @@ -373,7 +387,13 @@ impl SnapshotFile { let hostname = gethostname(); hostname .to_str() - .ok_or_else(|| SnapshotFileErrorKind::NonUnicodeHostname(hostname.clone()))? + .ok_or_else(|| { + RusticError::new( + ErrorKind::InvalidInput, + "Failed to convert hostname `{hostname}` to string. The value must be a valid unicode string.", + ) + .attach_context("hostname", hostname.to_string_lossy().to_string()) + })? .to_string() }; @@ -381,8 +401,15 @@ impl SnapshotFile { let delete = match (opts.delete_never, opts.delete_after) { (true, _) => DeleteOption::Never, - (_, Some(d)) => DeleteOption::After( - time + Duration::from_std(*d).map_err(SnapshotFileErrorKind::OutOfRange)?, + (_, Some(duration)) => DeleteOption::After( + time + Duration::from_std(*duration).map_err(|err| { + RusticError::with_source( + ErrorKind::InvalidInput, + "Failed to convert duration `{duration}` to std::time::Duration. Please make sure the value is a valid duration string.", + err, + ) + .attach_context("duration", duration.to_string()) + })?, ), (false, None) => DeleteOption::NotSet, }; @@ -411,11 +438,15 @@ impl SnapshotFile { }; // use description from description file if it is given - if let Some(ref file) = opts.description_from { - snap.description = Some( - std::fs::read_to_string(file) - .map_err(SnapshotFileErrorKind::ReadingDescriptionFailed)?, - ); + if let Some(ref path) = opts.description_from { + snap.description = Some(std::fs::read_to_string(path).map_err(|err| { + RusticError::with_source( + ErrorKind::InvalidInput, + "Failed to read description file `{path}`. Please make sure the file exists and is readable.", + err, + ) + .attach_context("path", path.to_string_lossy().to_string()) + })?); } _ = snap.set_tags(opts.tags.clone()); @@ -456,13 +487,9 @@ impl SnapshotFile { /// /// # Errors /// - /// * [`IdErrorKind::HexError`] - If the string is not a valid hexadecimal string - /// * [`BackendAccessErrorKind::NoSuitableIdFound`] - If no id could be found. - /// * [`BackendAccessErrorKind::IdNotUnique`] - If the id is not unique. - /// - /// [`IdErrorKind::HexError`]: crate::error::IdErrorKind::HexError - /// [`BackendAccessErrorKind::NoSuitableIdFound`]: crate::error::BackendAccessErrorKind::NoSuitableIdFound - /// [`BackendAccessErrorKind::IdNotUnique`]: crate::error::BackendAccessErrorKind::IdNotUnique + /// * If the string is not a valid hexadecimal string + /// * If no id could be found. + /// * If the id is not unique. pub(crate) fn from_str( be: &B, string: &str, @@ -485,9 +512,7 @@ impl SnapshotFile { /// /// # Errors /// - /// * [`SnapshotFileErrorKind::NoSnapshotsFound`] - If no snapshots are found - /// - /// [`SnapshotFileErrorKind::NoSnapshotsFound`]: crate::error::SnapshotFileErrorKind::NoSnapshotsFound + /// * If no snapshots are found pub(crate) fn latest( be: &B, predicate: impl FnMut(&Self) -> bool + Send + Sync, @@ -511,8 +536,15 @@ impl SnapshotFile { } } } + p.finish(); - latest.ok_or_else(|| SnapshotFileErrorKind::NoSnapshotsFound.into()) + + latest.ok_or_else(|| { + RusticError::new( + ErrorKind::Repository, + "No snapshots found. Please make sure there are snapshots in the repository.", + ) + }) } /// Get a [`SnapshotFile`] from the backend by (part of the) id @@ -523,13 +555,10 @@ impl SnapshotFile { /// * `id` - The (part of the) id of the snapshot /// /// # Errors - /// * [`IdErrorKind::HexError`] - If the string is not a valid hexadecimal string - /// * [`BackendAccessErrorKind::NoSuitableIdFound`] - If no id could be found. - /// * [`BackendAccessErrorKind::IdNotUnique`] - If the id is not unique. /// - /// [`IdErrorKind::HexError`]: crate::error::IdErrorKind::HexError - /// [`BackendAccessErrorKind::NoSuitableIdFound`]: crate::error::BackendAccessErrorKind::NoSuitableIdFound - /// [`BackendAccessErrorKind::IdNotUnique`]: crate::error::BackendAccessErrorKind::IdNotUnique + /// * If the string is not a valid hexadecimal string + /// * If no id could be found. + /// * If the id is not unique. pub(crate) fn from_id(be: &B, id: &str) -> RusticResult { info!("getting snapshot..."); let id = be.find_id(FileType::Snapshot, id)?; @@ -546,13 +575,9 @@ impl SnapshotFile { /// /// # Errors /// - /// * [`IdErrorKind::HexError`] - If the string is not a valid hexadecimal string - /// * [`BackendAccessErrorKind::NoSuitableIdFound`] - If no id could be found. - /// * [`BackendAccessErrorKind::IdNotUnique`] - If the id is not unique. - /// - /// [`IdErrorKind::HexError`]: crate::error::IdErrorKind::HexError - /// [`BackendAccessErrorKind::NoSuitableIdFound`]: crate::error::BackendAccessErrorKind::NoSuitableIdFound - /// [`BackendAccessErrorKind::IdNotUnique`]: crate::error::BackendAccessErrorKind::IdNotUnique + /// * If the string is not a valid hexadecimal string + /// * If no id could be found. + /// * If the id is not unique. pub(crate) fn from_ids>( be: &B, ids: &[T], @@ -571,13 +596,9 @@ impl SnapshotFile { /// /// # Errors /// - /// * [`IdErrorKind::HexError`] - If the string is not a valid hexadecimal string - /// * [`BackendAccessErrorKind::NoSuitableIdFound`] - If no id could be found. - /// * [`BackendAccessErrorKind::IdNotUnique`] - If the id is not unique. - /// - /// [`IdErrorKind::HexError`]: crate::error::IdErrorKind::HexError - /// [`BackendAccessErrorKind::NoSuitableIdFound`]: crate::error::BackendAccessErrorKind::NoSuitableIdFound - /// [`BackendAccessErrorKind::IdNotUnique`]: crate::error::BackendAccessErrorKind::IdNotUnique + /// * If the string is not a valid hexadecimal string + /// * If no id could be found. + /// * If the id is not unique. pub(crate) fn update_from_ids>( be: &B, current: Vec, @@ -752,9 +773,7 @@ impl SnapshotFile { B: DecryptReadBackend, F: FnMut(&Self) -> bool, { - let ids = be - .list(FileType::Snapshot) - .map_err(RusticErrorKind::Backend)?; + let ids = be.list(FileType::Snapshot)?; Self::fill_missing(be, current, &ids, filter, p) } @@ -946,8 +965,8 @@ impl Default for SnapshotGroupCriterion { } impl FromStr for SnapshotGroupCriterion { - type Err = RusticError; - fn from_str(s: &str) -> RusticResult { + type Err = SnapshotFileErrorKind; + fn from_str(s: &str) -> SnapshotFileResult { let mut crit = Self::new(); for val in s.split(',') { match val { @@ -956,7 +975,7 @@ impl FromStr for SnapshotGroupCriterion { "paths" => crit.paths = true, "tags" => crit.tags = true, "" => continue, - v => return Err(SnapshotFileErrorKind::ValueNotAllowed(v.into()).into()), + v => return Err(SnapshotFileErrorKind::ValueNotAllowed(v.into())), } } Ok(crit) @@ -1052,8 +1071,8 @@ impl SnapshotGroup { pub struct StringList(pub(crate) BTreeSet); impl FromStr for StringList { - type Err = RusticError; - fn from_str(s: &str) -> RusticResult { + type Err = SnapshotFileErrorKind; + fn from_str(s: &str) -> SnapshotFileResult { Ok(Self(s.split(',').map(ToString::to_string).collect())) } } @@ -1134,10 +1153,8 @@ impl StringList { /// /// # Errors /// - /// * [`SnapshotFileErrorKind::NonUnicodePath`] - If a path is not valid unicode - /// - /// [`SnapshotFileErrorKind::NonUnicodePath`]: crate::error::SnapshotFileErrorKind::NonUnicodePath - pub(crate) fn set_paths>(&mut self, paths: &[T]) -> RusticResult<()> { + /// * If a path is not valid unicode + pub(crate) fn set_paths>(&mut self, paths: &[T]) -> SnapshotFileResult<()> { self.0 = paths .iter() .map(|p| { @@ -1146,7 +1163,7 @@ impl StringList { .ok_or_else(|| SnapshotFileErrorKind::NonUnicodePath(p.as_ref().to_path_buf()))? .to_string()) }) - .collect::>>()?; + .collect::>>()?; Ok(()) } @@ -1216,7 +1233,9 @@ impl PathList { /// * `source` - The String to parse /// /// # Errors - /// no errors can occur here + /// + /// * no errors can occur here + /// * [`RusticResult`] is used for consistency and future compatibility pub fn from_string(source: &str) -> RusticResult { Ok(Self(vec![source.into()])) } @@ -1243,12 +1262,9 @@ impl PathList { /// /// # Errors /// - /// * [`SnapshotFileErrorKind::RemovingDotsFromPathFailed`] - If removing dots from path failed - /// * [`SnapshotFileErrorKind::CanonicalizingPathFailed`] - If canonicalizing path failed - /// - /// [`SnapshotFileErrorKind::RemovingDotsFromPathFailed`]: crate::error::SnapshotFileErrorKind::RemovingDotsFromPathFailed - /// [`SnapshotFileErrorKind::CanonicalizingPathFailed`]: crate::error::SnapshotFileErrorKind::CanonicalizingPathFailed - pub fn sanitize(mut self) -> RusticResult { + /// * If removing dots from path failed + /// * If canonicalizing path failed + pub fn sanitize(mut self) -> SnapshotFileResult { for path in &mut self.0 { *path = sanitize_dot(path)?; } @@ -1285,7 +1301,7 @@ impl PathList { } // helper function to sanitize paths containing dots -fn sanitize_dot(path: &Path) -> RusticResult { +fn sanitize_dot(path: &Path) -> SnapshotFileResult { if path == Path::new(".") || path == Path::new("./") { return Ok(PathBuf::from(".")); } diff --git a/crates/core/src/repository.rs b/crates/core/src/repository.rs index 607a4264..a94f108b 100644 --- a/crates/core/src/repository.rs +++ b/crates/core/src/repository.rs @@ -1,7 +1,5 @@ -mod command_input; -mod warm_up; - -pub use command_input::CommandInput; +pub(crate) mod command_input; +pub(crate) mod warm_up; use std::{ cmp::Ordering, @@ -48,7 +46,7 @@ use crate::{ restore::{collect_and_prepare, restore_repository, RestoreOptions, RestorePlan}, }, crypto::aespoly1305::Key, - error::{CommandErrorKind, KeyFileErrorKind, RepositoryErrorKind, RusticErrorKind}, + error::{ErrorKind, RusticResult}, index::{ binarysorted::{IndexCollector, IndexType}, GlobalIndex, IndexEntry, ReadGlobalIndex, ReadIndex, @@ -61,9 +59,12 @@ use crate::{ snapshotfile::{SnapshotGroup, SnapshotGroupCriterion, SnapshotId}, ConfigFile, KeyId, PathList, RepoFile, RepoId, SnapshotFile, SnapshotSummary, Tree, }, - repository::warm_up::{warm_up, warm_up_wait}, + repository::{ + command_input::CommandInput, + warm_up::{warm_up, warm_up_wait}, + }, vfs::OpenFile, - RepositoryBackends, RusticResult, + RepositoryBackends, RusticError, }; #[cfg(feature = "clap")] @@ -90,7 +91,7 @@ pub struct RepositoryOptions { /// /// # Warning /// - /// Using --password can reveal the password in the process list! + /// * Using --password can reveal the password in the process list! #[cfg_attr( feature = "clap", clap(long, global = true, env = "RUSTIC_PASSWORD", hide_env_values = true) @@ -168,50 +169,59 @@ impl RepositoryOptions { /// /// # Errors /// - /// * [`RepositoryErrorKind::OpeningPasswordFileFailed`] - If opening the password file failed - /// * [`RepositoryErrorKind::ReadingPasswordFromReaderFailed`] - If reading the password failed - /// * [`RepositoryErrorKind::FromSplitError`] - If splitting the password command failed - /// * [`RepositoryErrorKind::PasswordCommandExecutionFailed`] - If executing the password command failed - /// * [`RepositoryErrorKind::ReadingPasswordFromCommandFailed`] - If reading the password from the command failed + /// * If opening the password file failed + /// * If reading the password failed + /// * If splitting the password command failed + /// * If executing the password command failed + /// * If reading the password from the command failed /// /// # Returns /// /// The password or `None` if no password is given - /// - /// [`RepositoryErrorKind::OpeningPasswordFileFailed`]: crate::error::RepositoryErrorKind::OpeningPasswordFileFailed - /// [`RepositoryErrorKind::ReadingPasswordFromReaderFailed`]: crate::error::RepositoryErrorKind::ReadingPasswordFromReaderFailed - /// [`RepositoryErrorKind::FromSplitError`]: crate::error::RepositoryErrorKind::FromSplitError - /// [`RepositoryErrorKind::PasswordCommandExecutionFailed`]: crate::error::RepositoryErrorKind::PasswordCommandExecutionFailed - /// [`RepositoryErrorKind::ReadingPasswordFromCommandFailed`]: crate::error::RepositoryErrorKind::ReadingPasswordFromCommandFailed pub fn evaluate_password(&self) -> RusticResult> { match (&self.password, &self.password_file, &self.password_command) { (Some(pwd), _, _) => Ok(Some(pwd.clone())), (_, Some(file), _) => { - let mut file = BufReader::new( - File::open(file).map_err(RepositoryErrorKind::OpeningPasswordFileFailed)?, - ); + let mut file = BufReader::new(File::open(file).map_err(|err| { + RusticError::with_source( + ErrorKind::Password, + "Opening password file failed. Is the path `{path}` correct?", + err, + ) + .attach_context("path", file.display().to_string()) + })?); Ok(Some(read_password_from_reader(&mut file)?)) } (_, _, Some(command)) if command.is_set() => { debug!("commands: {command:?}"); - let command = Command::new(command.command()) + let run_command = Command::new(command.command()) .args(command.args()) .stdout(Stdio::piped()) .spawn(); - let process = match command { + let process = match run_command { Ok(process) => process, Err(err) => { error!("password-command could not be executed: {}", err); - return Err(RepositoryErrorKind::PasswordCommandExecutionFailed.into()); + return Err(RusticError::with_source( + ErrorKind::Password, + "Password command `{command}` could not be executed", + err, + ) + .attach_context("command", command.to_string())); } }; let output = match process.wait_with_output() { Ok(output) => output, Err(err) => { - error!("error reading output from password-command: {}", err); - return Err(RepositoryErrorKind::ReadingPasswordFromCommandFailed.into()); + error!("error reading output from password-command: {err}"); + return Err(RusticError::with_source( + ErrorKind::Password, + "Error reading output from password command `{command}`", + err, + ) + .attach_context("command", command.to_string())); } }; @@ -222,16 +232,16 @@ impl RepositoryOptions { None => "was terminated".into(), }; error!("password-command {s}"); - return Err(RepositoryErrorKind::PasswordCommandExecutionFailed.into()); + return Err(RusticError::new( + ErrorKind::Password, + "Password command `{command}` did not exit successfully: `{status}`", + ) + .attach_context("command", command.to_string()) + .attach_context("status", s)); } let mut pwd = BufReader::new(&*output.stdout); - Ok(Some(match read_password_from_reader(&mut pwd) { - Ok(val) => val, - Err(_) => { - return Err(RepositoryErrorKind::ReadingPasswordFromCommandFailed.into()) - } - })) + Ok(Some(read_password_from_reader(&mut pwd)?)) } (None, None, _) => Ok(None), } @@ -246,14 +256,17 @@ impl RepositoryOptions { /// /// # Errors /// -/// * [`RepositoryErrorKind::ReadingPasswordFromReaderFailed`] - If reading the password failed -/// -/// [`RepositoryErrorKind::ReadingPasswordFromReaderFailed`]: crate::error::RepositoryErrorKind::ReadingPasswordFromReaderFailed +/// * If reading the password failed pub fn read_password_from_reader(file: &mut impl BufRead) -> RusticResult { let mut password = String::new(); - _ = file - .read_line(&mut password) - .map_err(RepositoryErrorKind::ReadingPasswordFromReaderFailed)?; + _ = file.read_line(&mut password).map_err(|err| { + RusticError::with_source( + ErrorKind::Password, + "Reading password from reader failed. Is the file empty? Please check the file and the password.", + err + ) + .attach_context("password", password.clone()) + })?; // Remove the \n from the line if present if password.ends_with('\n') { @@ -309,11 +322,9 @@ impl Repository { /// /// # Errors /// - /// * [`RepositoryErrorKind::NoRepositoryGiven`] - If no repository is given - /// * [`RepositoryErrorKind::NoIDSpecified`] - If the warm-up command does not contain `%id` - /// * [`BackendAccessErrorKind::BackendLoadError`] - If the specified backend cannot be loaded, e.g. is not supported - /// - /// [`BackendAccessErrorKind::BackendLoadError`]: crate::error::BackendAccessErrorKind::BackendLoadError + /// * If no repository is given + /// * If the warm-up command does not contain `%id` + /// * If the specified backend cannot be loaded, e.g. is not supported pub fn new(opts: &RepositoryOptions, backends: &RepositoryBackends) -> RusticResult { Self::new_with_progress(opts, backends, NoProgressBars {}) } @@ -334,13 +345,9 @@ impl

Repository { /// /// # Errors /// - /// * [`RepositoryErrorKind::NoRepositoryGiven`] - If no repository is given - /// * [`RepositoryErrorKind::NoIDSpecified`] - If the warm-up command does not contain `%id` - /// * [`BackendAccessErrorKind::BackendLoadError`] - If the specified backend cannot be loaded, e.g. is not supported - /// - /// [`RepositoryErrorKind::NoRepositoryGiven`]: crate::error::RepositoryErrorKind::NoRepositoryGiven - /// [`RepositoryErrorKind::NoIDSpecified`]: crate::error::RepositoryErrorKind::NoIDSpecified - /// [`BackendAccessErrorKind::BackendLoadError`]: crate::error::BackendAccessErrorKind::BackendLoadError + /// * If no repository is given + /// * If the warm-up command does not contain `%id` + /// * If the specified backend cannot be loaded, e.g. is not supported pub fn new_with_progress( opts: &RepositoryOptions, backends: &RepositoryBackends, @@ -351,7 +358,11 @@ impl

Repository { if let Some(warm_up) = &opts.warm_up_command { if warm_up.args().iter().all(|c| !c.contains("%id")) { - return Err(RepositoryErrorKind::NoIDSpecified.into()); + return Err(RusticError::new( + ErrorKind::MissingInput, + "No `%id` specified in warm-up command `{command}`. Please specify `%id` in the command.", + ) + .attach_context("command", warm_up.to_string())); } info!("using warm-up command {warm_up}"); } @@ -383,21 +394,15 @@ impl Repository { /// /// # Errors /// - /// * [`RepositoryErrorKind::OpeningPasswordFileFailed`] - If opening the password file failed - /// * [`RepositoryErrorKind::ReadingPasswordFromReaderFailed`] - If reading the password failed - /// * [`RepositoryErrorKind::FromSplitError`] - If splitting the password command failed - /// * [`RepositoryErrorKind::PasswordCommandExecutionFailed`] - If parsing the password command failed - /// * [`RepositoryErrorKind::ReadingPasswordFromCommandFailed`] - If reading the password from the command failed + /// * If opening the password file failed + /// * If reading the password failed + /// * If splitting the password command failed + /// * If parsing the password command failed + /// * If reading the password from the command failed /// /// # Returns /// /// The password or `None` if no password is given - /// - /// [`RepositoryErrorKind::OpeningPasswordFileFailed`]: crate::error::RepositoryErrorKind::OpeningPasswordFileFailed - /// [`RepositoryErrorKind::ReadingPasswordFromReaderFailed`]: crate::error::RepositoryErrorKind::ReadingPasswordFromReaderFailed - /// [`RepositoryErrorKind::FromSplitError`]: crate::error::RepositoryErrorKind::FromSplitError - /// [`RepositoryErrorKind::PasswordCommandExecutionFailed`]: crate::error::RepositoryErrorKind::PasswordCommandExecutionFailed - /// [`RepositoryErrorKind::ReadingPasswordFromCommandFailed`]: crate::error::RepositoryErrorKind::ReadingPasswordFromCommandFailed pub fn password(&self) -> RusticResult> { self.opts.evaluate_password() } @@ -406,25 +411,23 @@ impl Repository { /// /// # Errors /// - /// * [`RepositoryErrorKind::ListingRepositoryConfigFileFailed`] - If listing the repository config file failed - /// * [`RepositoryErrorKind::MoreThanOneRepositoryConfig`] - If there is more than one repository config file + /// * If listing the repository config file failed + /// * If there is more than one repository config file /// /// # Returns /// /// The id of the config file or `None` if no config file is found - /// - /// [`RepositoryErrorKind::ListingRepositoryConfigFileFailed`]: crate::error::RepositoryErrorKind::ListingRepositoryConfigFileFailed - /// [`RepositoryErrorKind::MoreThanOneRepositoryConfig`]: crate::error::RepositoryErrorKind::MoreThanOneRepositoryConfig pub fn config_id(&self) -> RusticResult> { - let config_ids = self - .be - .list(FileType::Config) - .map_err(|_| RepositoryErrorKind::ListingRepositoryConfigFileFailed)?; + let config_ids = self.be.list(FileType::Config)?; match config_ids.len() { 1 => Ok(Some(ConfigId::from(config_ids[0]))), 0 => Ok(None), - _ => Err(RepositoryErrorKind::MoreThanOneRepositoryConfig(self.name.clone()).into()), + _ => Err(RusticError::new( + ErrorKind::Configuration, + "More than one repository found for `{name}`. Please check the config file.", + ) + .attach_context("name", self.name.clone())), } } @@ -434,39 +437,30 @@ impl Repository { /// /// # Errors /// - /// * [`RepositoryErrorKind::NoPasswordGiven`] - If no password is given - /// * [`RepositoryErrorKind::ReadingPasswordFromReaderFailed`] - If reading the password failed - /// * [`RepositoryErrorKind::OpeningPasswordFileFailed`] - If opening the password file failed - /// * [`RepositoryErrorKind::PasswordCommandExecutionFailed`] - If parsing the password command failed - /// * [`RepositoryErrorKind::ReadingPasswordFromCommandFailed`] - If reading the password from the command failed - /// * [`RepositoryErrorKind::FromSplitError`] - If splitting the password command failed - /// * [`RepositoryErrorKind::NoRepositoryConfigFound`] - If no repository config file is found - /// * [`RepositoryErrorKind::KeysDontMatchForRepositories`] - If the keys of the hot and cold backend don't match - /// * [`RepositoryErrorKind::IncorrectPassword`] - If the password is incorrect - /// * [`KeyFileErrorKind::NoSuitableKeyFound`] - If no suitable key is found - /// * [`RepositoryErrorKind::ListingRepositoryConfigFileFailed`] - If listing the repository config file failed - /// * [`RepositoryErrorKind::MoreThanOneRepositoryConfig`] - If there is more than one repository config file + /// * If no password is given + /// * If reading the password failed + /// * If opening the password file failed + /// * If parsing the password command failed + /// * If reading the password from the command failed + /// * If splitting the password command failed + /// * If no repository config file is found + /// * If the keys of the hot and cold backend don't match + /// * If the password is incorrect + /// * If no suitable key is found + /// * If listing the repository config file failed + /// * If there is more than one repository config file /// /// # Returns /// /// The open repository - /// - /// [`RepositoryErrorKind::NoPasswordGiven`]: crate::error::RepositoryErrorKind::NoPasswordGiven - /// [`RepositoryErrorKind::ReadingPasswordFromReaderFailed`]: crate::error::RepositoryErrorKind::ReadingPasswordFromReaderFailed - /// [`RepositoryErrorKind::OpeningPasswordFileFailed`]: crate::error::RepositoryErrorKind::OpeningPasswordFileFailed - /// [`RepositoryErrorKind::PasswordCommandExecutionFailed`]: crate::error::RepositoryErrorKind::PasswordCommandExecutionFailed - /// [`RepositoryErrorKind::ReadingPasswordFromCommandFailed`]: crate::error::RepositoryErrorKind::ReadingPasswordFromCommandFailed - /// [`RepositoryErrorKind::FromSplitError`]: crate::error::RepositoryErrorKind::FromSplitError - /// [`RepositoryErrorKind::NoRepositoryConfigFound`]: crate::error::RepositoryErrorKind::NoRepositoryConfigFound - /// [`RepositoryErrorKind::KeysDontMatchForRepositories`]: crate::error::RepositoryErrorKind::KeysDontMatchForRepositories - /// [`RepositoryErrorKind::IncorrectPassword`]: crate::error::RepositoryErrorKind::IncorrectPassword - /// [`KeyFileErrorKind::NoSuitableKeyFound`]: crate::error::KeyFileErrorKind::NoSuitableKeyFound - /// [`RepositoryErrorKind::ListingRepositoryConfigFileFailed`]: crate::error::RepositoryErrorKind::ListingRepositoryConfigFileFailed - /// [`RepositoryErrorKind::MoreThanOneRepositoryConfig`]: crate::error::RepositoryErrorKind::MoreThanOneRepositoryConfig pub fn open(self) -> RusticResult> { - let password = self - .password()? - .ok_or(RepositoryErrorKind::NoPasswordGiven)?; + let password = self.password()?.ok_or_else(|| { + RusticError::new( + ErrorKind::Password, + "No password given, or Password was empty. Please specify a valid password.", + ) + })?; + self.open_with_password(&password) } @@ -480,50 +474,39 @@ impl Repository { /// /// # Errors /// - /// * [`RepositoryErrorKind::NoRepositoryConfigFound`] - If no repository config file is found - /// * [`RepositoryErrorKind::KeysDontMatchForRepositories`] - If the keys of the hot and cold backend don't match - /// * [`RepositoryErrorKind::IncorrectPassword`] - If the password is incorrect - /// * [`KeyFileErrorKind::NoSuitableKeyFound`] - If no suitable key is found - /// * [`RepositoryErrorKind::ListingRepositoryConfigFileFailed`] - If listing the repository config file failed - /// * [`RepositoryErrorKind::MoreThanOneRepositoryConfig`] - If there is more than one repository config file - /// - /// [`RepositoryErrorKind::NoRepositoryConfigFound`]: crate::error::RepositoryErrorKind::NoRepositoryConfigFound - /// [`RepositoryErrorKind::KeysDontMatchForRepositories`]: crate::error::RepositoryErrorKind::KeysDontMatchForRepositories - /// [`RepositoryErrorKind::IncorrectPassword`]: crate::error::RepositoryErrorKind::IncorrectPassword - /// [`KeyFileErrorKind::NoSuitableKeyFound`]: crate::error::KeyFileErrorKind::NoSuitableKeyFound - /// [`RepositoryErrorKind::ListingRepositoryConfigFileFailed`]: crate::error::RepositoryErrorKind::ListingRepositoryConfigFileFailed - /// [`RepositoryErrorKind::MoreThanOneRepositoryConfig`]: crate::error::RepositoryErrorKind::MoreThanOneRepositoryConfig + /// * If no repository config file is found + /// * If the keys of the hot and cold backend don't match + /// * If the password is incorrect + /// * If no suitable key is found + /// * If listing the repository config file failed + /// * If there is more than one repository config file pub fn open_with_password(self, password: &str) -> RusticResult> { - let config_id = self - .config_id()? - .ok_or(RepositoryErrorKind::NoRepositoryConfigFound( - self.name.clone(), - ))?; + let config_id = self.config_id()?.ok_or_else(|| { + RusticError::new( + ErrorKind::Configuration, + "No repository config file found for `{name}`. Please check the repository.", + ) + .attach_context("name", self.name.clone()) + })?; if let Some(be_hot) = &self.be_hot { - let mut keys = self - .be - .list_with_size(FileType::Key) - .map_err(RusticErrorKind::Backend)?; + let mut keys = self.be.list_with_size(FileType::Key)?; keys.sort_unstable_by_key(|key| key.0); - let mut hot_keys = be_hot - .list_with_size(FileType::Key) - .map_err(RusticErrorKind::Backend)?; + let mut hot_keys = be_hot.list_with_size(FileType::Key)?; hot_keys.sort_unstable_by_key(|key| key.0); if keys != hot_keys { - return Err(RepositoryErrorKind::KeysDontMatchForRepositories(self.name).into()); + return Err(RusticError::new( + ErrorKind::Key, + "Keys of hot and cold repositories don't match for `{name}`. Please check the keys.", + ) + .attach_context("name", self.name.clone())); } } - let key = find_key_in_backend(&self.be, &password, None).map_err(|err| { - match err.into_inner() { - RusticErrorKind::KeyFile(KeyFileErrorKind::NoSuitableKeyFound) => { - RepositoryErrorKind::IncorrectPassword.into() - } - err => err, - } - })?; + let key = find_key_in_backend(&self.be, &password, None)?; + info!("repository {}: password is correct.", self.name); + let dbe = DecryptBackend::new(self.be.clone(), key); let config: ConfigFile = dbe.get_file(&config_id)?; self.open_raw(key, config) @@ -544,27 +527,25 @@ impl Repository { /// /// # Errors /// - /// * [`RepositoryErrorKind::NoPasswordGiven`] - If no password is given - /// * [`RepositoryErrorKind::ReadingPasswordFromReaderFailed`] - If reading the password failed - /// * [`RepositoryErrorKind::OpeningPasswordFileFailed`] - If opening the password file failed - /// * [`RepositoryErrorKind::PasswordCommandExecutionFailed`] - If parsing the password command failed - /// * [`RepositoryErrorKind::ReadingPasswordFromCommandFailed`] - If reading the password from the command failed - /// * [`RepositoryErrorKind::FromSplitError`] - If splitting the password command failed - /// - /// [`RepositoryErrorKind::NoPasswordGiven`]: crate::error::RepositoryErrorKind::NoPasswordGiven - /// [`RepositoryErrorKind::ReadingPasswordFromReaderFailed`]: crate::error::RepositoryErrorKind::ReadingPasswordFromReaderFailed - /// [`RepositoryErrorKind::OpeningPasswordFileFailed`]: crate::error::RepositoryErrorKind::OpeningPasswordFileFailed - /// [`RepositoryErrorKind::PasswordCommandExecutionFailed`]: crate::error::RepositoryErrorKind::PasswordCommandExecutionFailed - /// [`RepositoryErrorKind::ReadingPasswordFromCommandFailed`]: crate::error::RepositoryErrorKind::ReadingPasswordFromCommandFailed - /// [`RepositoryErrorKind::FromSplitError`]: crate::error::RepositoryErrorKind::FromSplitError + /// * If no password is given + /// * If reading the password failed + /// * If opening the password file failed + /// * If parsing the password command failed + /// * If reading the password from the command failed + /// * If splitting the password command failed pub fn init( self, key_opts: &KeyOptions, config_opts: &ConfigOptions, ) -> RusticResult> { - let password = self - .password()? - .ok_or(RepositoryErrorKind::NoPasswordGiven)?; + let password = self.password()?.ok_or_else(|| { + RusticError::new( + ErrorKind::Password, + "No password given, or Password was empty. Please specify a valid password for `{name}`.", + ) + .attach_context("name", self.name.clone()) + })?; + self.init_with_password(&password, key_opts, config_opts) } @@ -584,13 +565,9 @@ impl Repository { /// /// # Errors /// - /// * [`RepositoryErrorKind::ConfigFileExists`] - If a config file already exists - /// * [`RepositoryErrorKind::ListingRepositoryConfigFileFailed`] - If listing the repository config file failed - /// * [`RepositoryErrorKind::MoreThanOneRepositoryConfig`] - If there is more than one repository config file - /// - /// [`RepositoryErrorKind::ConfigFileExists`]: crate::error::RepositoryErrorKind::ConfigFileExists - /// [`RepositoryErrorKind::ListingRepositoryConfigFileFailed`]: crate::error::RepositoryErrorKind::ListingRepositoryConfigFileFailed - /// [`RepositoryErrorKind::MoreThanOneRepositoryConfig`]: crate::error::RepositoryErrorKind::MoreThanOneRepositoryConfig + /// * If a config file already exists + /// * If listing the repository config file failed + /// * If there is more than one repository config file pub fn init_with_password( self, pass: &str, @@ -598,9 +575,15 @@ impl Repository { config_opts: &ConfigOptions, ) -> RusticResult> { if self.config_id()?.is_some() { - return Err(RepositoryErrorKind::ConfigFileExists.into()); + return Err(RusticError::new( + ErrorKind::Configuration, + "Config file already exists for `{name}`. Please check the repository.", + ) + .attach_context("name", self.name)); } + let (key, config) = commands::init::init(&self, pass, key_opts, config_opts)?; + self.open_raw(key, config) } @@ -645,15 +628,22 @@ impl Repository { /// /// # Errors /// - /// * [`RepositoryErrorKind::HotRepositoryFlagMissing`] - If the config file has `is_hot` set to `true` but the repository is not hot - /// * [`RepositoryErrorKind::IsNotHotRepository`] - If the config file has `is_hot` set to `false` but the repository is hot - /// - /// [`RepositoryErrorKind::HotRepositoryFlagMissing`]: crate::error::RepositoryErrorKind::HotRepositoryFlagMissing - /// [`RepositoryErrorKind::IsNotHotRepository`]: crate::error::RepositoryErrorKind::IsNotHotRepository + /// * If the config file has `is_hot` set to `true` but the repository is not hot + /// * If the config file has `is_hot` set to `false` but the repository is hot fn open_raw(mut self, key: Key, config: ConfigFile) -> RusticResult> { match (config.is_hot == Some(true), self.be_hot.is_some()) { - (true, false) => return Err(RepositoryErrorKind::HotRepositoryFlagMissing.into()), - (false, true) => return Err(RepositoryErrorKind::IsNotHotRepository.into()), + (true, false) => return Err( + RusticError::new( + ErrorKind::Repository, + "The given repository is a hot repository! Please use `--repo-hot` in combination with the normal repo. Aborting.", + ) + ), + (false, true) => return Err( + RusticError::new( + ErrorKind::Repository, + "The given repository is not a hot repository! Aborting.", + ) + ), _ => {} } @@ -694,12 +684,7 @@ impl Repository { /// // TODO: Document errors pub fn list(&self) -> RusticResult> { - Ok(self - .be - .list(T::TYPE) - .map_err(RusticErrorKind::Backend)? - .into_iter() - .map(Into::into)) + Ok(self.be.list(T::TYPE)?.into_iter().map(Into::into)) } } @@ -708,7 +693,7 @@ impl Repository { /// /// # Errors /// - /// If files could not be listed. + /// * If files could not be listed. pub fn infos_files(&self) -> RusticResult { commands::repoinfo::collect_file_infos(self) } @@ -721,8 +706,8 @@ impl Repository { /// /// # Errors /// - /// * [`RepositoryErrorKind::FromSplitError`] - If the command could not be parsed. - /// * [`RepositoryErrorKind::FromThreadPoolbilderError`] - If the thread pool could not be created. + /// * If the command could not be parsed. + /// * If the thread pool could not be created. /// /// # Returns /// @@ -739,11 +724,8 @@ impl Repository { /// /// # Errors /// - /// * [`RepositoryErrorKind::FromSplitError`] - If the command could not be parsed. - /// * [`RepositoryErrorKind::FromThreadPoolbilderError`] - If the thread pool could not be created. - /// - /// [`RepositoryErrorKind::FromSplitError`]: crate::error::RepositoryErrorKind::FromSplitError - /// [`RepositoryErrorKind::FromThreadPoolbilderError`]: crate::error::RepositoryErrorKind::FromThreadPoolbilderError + /// * If the command could not be parsed. + /// * If the thread pool could not be created. pub fn warm_up_wait(&self, packs: impl ExactSizeIterator) -> RusticResult<()> { warm_up_wait(self, packs) } @@ -816,13 +798,9 @@ impl Repository { /// /// # Errors /// - /// * [`IdErrorKind::HexError`] - If the string is not a valid hexadecimal string - /// * [`BackendAccessErrorKind::NoSuitableIdFound`] - If no id could be found. - /// * [`BackendAccessErrorKind::IdNotUnique`] - If the id is not unique. - /// - /// [`IdErrorKind::HexError`]: crate::error::IdErrorKind::HexError - /// [`BackendAccessErrorKind::NoSuitableIdFound`]: crate::error::BackendAccessErrorKind::NoSuitableIdFound - /// [`BackendAccessErrorKind::IdNotUnique`]: crate::error::BackendAccessErrorKind::IdNotUnique + /// * If the string is not a valid hexadecimal string + /// * If no id could be found. + /// * If the id is not unique. pub fn cat_file(&self, tpe: FileType, id: &str) -> RusticResult { commands::cat::cat_file(self, tpe, id) } @@ -836,9 +814,7 @@ impl Repository { /// /// # Errors /// - /// * [`CommandErrorKind::FromJsonError`] - If the key could not be serialized. - /// - /// [`CommandErrorKind::FromJsonError`]: crate::error::CommandErrorKind::FromJsonError + /// * If the key could not be serialized. pub fn add_key(&self, pass: &str, opts: &KeyOptions) -> RusticResult { add_current_key_to_repo(self, opts, pass) } @@ -851,23 +827,14 @@ impl Repository { /// /// # Errors /// - /// * [`CommandErrorKind::VersionNotSupported`] - If the version is not supported - /// * [`CommandErrorKind::CannotDowngrade`] - If the version is lower than the current version - /// * [`CommandErrorKind::NoCompressionV1Repo`] - If compression is set for a v1 repo - /// * [`CommandErrorKind::CompressionLevelNotSupported`] - If the compression level is not supported - /// * [`CommandErrorKind::SizeTooLarge`] - If the size is too large - /// * [`CommandErrorKind::MinPackSizeTolerateWrong`] - If the min packsize tolerance percent is wrong - /// * [`CommandErrorKind::MaxPackSizeTolerateWrong`] - If the max packsize tolerance percent is wrong - /// * [`CryptBackendErrorKind::SerializingToJsonByteVectorFailed`] - If the file could not be serialized to json. - /// - /// [`CommandErrorKind::VersionNotSupported`]: crate::error::CommandErrorKind::VersionNotSupported - /// [`CommandErrorKind::CannotDowngrade`]: crate::error::CommandErrorKind::CannotDowngrade - /// [`CommandErrorKind::NoCompressionV1Repo`]: crate::error::CommandErrorKind::NoCompressionV1Repo - /// [`CommandErrorKind::CompressionLevelNotSupported`]: crate::error::CommandErrorKind::CompressionLevelNotSupported - /// [`CommandErrorKind::SizeTooLarge`]: crate::error::CommandErrorKind::SizeTooLarge - /// [`CommandErrorKind::MinPackSizeTolerateWrong`]: crate::error::CommandErrorKind::MinPackSizeTolerateWrong - /// [`CommandErrorKind::MaxPackSizeTolerateWrong`]: crate::error::CommandErrorKind::MaxPackSizeTolerateWrong - /// [`CryptBackendErrorKind::SerializingToJsonByteVectorFailed`]: crate::error::CryptBackendErrorKind::SerializingToJsonByteVectorFailed + /// * If the version is not supported + /// * If the version is lower than the current version + /// * If compression is set for a v1 repo + /// * If the compression level is not supported + /// * If the size is too large + /// * If the min pack size tolerance percent is wrong + /// * If the max pack size tolerance percent is wrong + /// * If the file could not be serialized to json. pub fn apply_config(&self, opts: &ConfigOptions) -> RusticResult { commands::config::apply_config(self, opts) } @@ -918,18 +885,14 @@ impl Repository { /// /// # Errors /// - /// * [`IdErrorKind::HexError`] - If the string is not a valid hexadecimal string - /// * [`BackendAccessErrorKind::NoSuitableIdFound`] - If no id could be found. - /// * [`BackendAccessErrorKind::IdNotUnique`] - If the id is not unique. + /// * If the string is not a valid hexadecimal string + /// * If no id could be found. + /// * If the id is not unique. /// /// # Returns /// /// If `id` is (part of) an `Id`, return this snapshot. /// If `id` is "latest", return the latest snapshot respecting the giving filter. - /// - /// [`IdErrorKind::HexError`]: crate::error::IdErrorKind::HexError - /// [`BackendAccessErrorKind::NoSuitableIdFound`]: crate::error::BackendAccessErrorKind::NoSuitableIdFound - /// [`BackendAccessErrorKind::IdNotUnique`]: crate::error::BackendAccessErrorKind::IdNotUnique pub fn get_snapshot_from_str( &self, id: &str, @@ -1018,10 +981,11 @@ impl Repository { /// /// # Errors /// + // TODO: Document errors + /// /// # Note - /// The result is not sorted and may come in random order! /// - // TODO: Document errors + /// The result is not sorted and may come in random order! pub fn get_matching_snapshots( &self, filter: impl FnMut(&SnapshotFile) -> bool, @@ -1038,10 +1002,11 @@ impl Repository { /// /// # Errors /// + // TODO: Document errors + /// /// # Note - /// The result is not sorted and may come in random order! /// - // TODO: Document errors + /// The result is not sorted and may come in random order! pub fn update_matching_snapshots( &self, current: Vec, @@ -1063,7 +1028,7 @@ impl Repository { /// /// # Errors /// - /// If keep options are not valid + /// * If keep options are not valid /// /// # Returns /// @@ -1113,13 +1078,15 @@ impl Repository { /// /// # Panics /// - /// If the files could not be deleted. + /// * If the files could not be deleted. pub fn delete_snapshots(&self, ids: &[SnapshotId]) -> RusticResult<()> { if self.config().append_only == Some(true) { - return Err(CommandErrorKind::NotAllowedWithAppendOnly( - "snapshots removal".to_string(), - ) - .into()); + return Err( + RusticError::new( + ErrorKind::Repository, + "Repository is in append-only mode and snapshots cannot be deleted from it. Aborting.", + ) + ); } let p = self.pb.progress_counter("removing snapshots..."); self.dbe().delete_list(true, ids.iter(), p)?; @@ -1134,9 +1101,7 @@ impl Repository { /// /// # Errors /// - /// * [`CryptBackendErrorKind::SerializingToJsonByteVectorFailed`] - If the file could not be serialized to json. - /// - /// [`CryptBackendErrorKind::SerializingToJsonByteVectorFailed`]: crate::error::CryptBackendErrorKind::SerializingToJsonByteVectorFailed + /// * If the file could not be serialized to json. pub fn save_snapshots(&self, mut snaps: Vec) -> RusticResult<()> { for snap in &mut snaps { snap.id = SnapshotId::default(); @@ -1155,6 +1120,10 @@ impl Repository { /// # Errors /// // TODO: Document errors + /// + /// # Panics + /// + // TODO: Document panics pub fn check(&self, opts: CheckOptions) -> RusticResult<()> { let trees = self .get_all_snapshots()? @@ -1162,7 +1131,9 @@ impl Repository { .map(|snap| snap.tree) .collect(); - check_repository(self, opts, trees) + check_repository(self, opts, trees)?; + + Ok(()) } /// Check the repository and given trees for errors or inconsistencies @@ -1174,6 +1145,9 @@ impl Repository { /// # Errors /// // TODO: Document errors + /// # Panics + /// + // TODO: Document panics pub fn check_with_trees(&self, opts: CheckOptions, trees: Vec) -> RusticResult<()> { check_repository(self, opts, trees) } @@ -1204,8 +1178,8 @@ impl Repository { /// /// # Errors /// - /// * [`CommandErrorKind::NotAllowedWithAppendOnly`] - If the repository is in append-only mode - /// * [`CommandErrorKind::NoDecision`] - If a pack has no decision + /// * If the repository is in append-only mode + /// * If a pack has no decision /// /// # Returns /// @@ -1213,6 +1187,7 @@ impl Repository { /// /// # Panics /// + // TODO: Document panics pub fn prune(&self, opts: &PruneOptions, prune_plan: PrunePlan) -> RusticResult<()> { prune_repository(self, opts, prune_plan) } @@ -1343,7 +1318,7 @@ impl Repository { /// /// # Errors /// - /// If the index could not be read. + /// * If the index could not be read. /// /// # Returns /// @@ -1360,10 +1335,11 @@ impl Repository { /// /// # Returns /// + /// An iterator over all files of the given type + /// /// # Note - /// The result is not sorted and may come in random order! /// - /// An iterator over all files of the given type + /// The result is not sorted and may come in random order! pub fn stream_files( &self, ) -> RusticResult>> { @@ -1446,7 +1422,7 @@ pub trait IndexedFull: IndexedIds { /// /// # Errors /// - /// If the blob could not be fetched from the repository. + /// * If the blob could not be fetched from the repository. /// /// # Returns /// @@ -1582,15 +1558,18 @@ impl Repository { /// /// # Errors /// - /// * [`RepositoryErrorKind::IdNotFound`] - If the id is not found in the index - /// - /// [`RepositoryErrorKind::IdNotFound`]: crate::error::RepositoryErrorKind::IdNotFound + /// * If the id is not found in the index pub fn get_index_entry(&self, id: &T) -> RusticResult { let blob_id: BlobId = (*id).into(); - let ie = self - .index() - .get_id(T::TYPE, &blob_id) - .ok_or_else(|| RepositoryErrorKind::IdNotFound(blob_id))?; + let ie = self.index().get_id(T::TYPE, &blob_id).ok_or_else(|| { + RusticError::new( + ErrorKind::Internal, + "Blob ID `{id}` not found in index, but should be there.", + ) + .attach_context("id", blob_id.to_string()) + .ask_report() + })?; + Ok(ie) } @@ -1644,15 +1623,12 @@ impl Repository { /// /// # Errors /// - /// * [`TreeErrorKind::BlobIdNotFound`] - If the tree ID is not found in the backend. - /// * [`TreeErrorKind::DeserializingTreeFailed`] - If deserialization fails. + /// * If the tree ID is not found in the backend. + /// * If deserialization fails. /// /// # Returns /// /// The tree with the given `Id` - /// - /// [`TreeErrorKind::BlobIdNotFound`]: crate::error::TreeErrorKind::BlobIdNotFound - /// [`TreeErrorKind::DeserializingTreeFailed`]: crate::error::TreeErrorKind::DeserializingTreeFailed pub fn get_tree(&self, id: &TreeId) -> RusticResult { Tree::from_backend(self.dbe(), self.index(), *id) } @@ -1663,19 +1639,14 @@ impl Repository { /// /// # Arguments /// - /// * `root_tree` - The `Id` of the root tree - // TODO!: This ID should be a tree ID, we should refactor it to wrap it in a TreeId type + /// * `root_tree` - The `TreeId` of the root tree /// * `path` - The path /// /// # Errors /// - /// * [`TreeErrorKind::NotADirectory`] - If the path is not a directory. - /// * [`TreeErrorKind::PathNotFound`] - If the path is not found. - /// * [`TreeErrorKind::PathIsNotUtf8Conform`] - If the path is not UTF-8 conform. - /// - /// [`TreeErrorKind::NotADirectory`]: crate::error::TreeErrorKind::NotADirectory - /// [`TreeErrorKind::PathNotFound`]: crate::error::TreeErrorKind::PathNotFound - /// [`TreeErrorKind::PathIsNotUtf8Conform`]: crate::error::TreeErrorKind::PathIsNotUtf8Conform + /// * If the path is not a directory. + /// * If the path is not found. + /// * If the path is not UTF-8 conform. pub fn node_from_path(&self, root_tree: TreeId, path: &Path) -> RusticResult { Tree::node_from_path(self.dbe(), self.index(), root_tree, Path::new(path)) } @@ -1688,7 +1659,8 @@ impl Repository { /// * `path` - The path /// /// # Errors - /// if loading trees from the backend fails + /// + /// * If loading trees from the backend fails pub fn find_nodes_from_path( &self, ids: impl IntoIterator, @@ -1705,7 +1677,8 @@ impl Repository { /// * `matches` - The matching criterion /// /// # Errors - /// if loading trees from the backend fails + /// + /// * If loading trees from the backend fails pub fn find_matching_nodes( &self, ids: impl IntoIterator, @@ -1739,13 +1712,9 @@ impl Repository { /// /// # Errors /// - /// * [`IdErrorKind::HexError`] - If the string is not a valid hexadecimal string - /// * [`BackendAccessErrorKind::NoSuitableIdFound`] - If no id could be found. - /// * [`BackendAccessErrorKind::IdNotUnique`] - If the id is not unique. - /// - /// [`IdErrorKind::HexError`]: crate::error::IdErrorKind::HexError - /// [`BackendAccessErrorKind::NoSuitableIdFound`]: crate::error::BackendAccessErrorKind::NoSuitableIdFound - /// [`BackendAccessErrorKind::IdNotUnique`]: crate::error::BackendAccessErrorKind::IdNotUnique + /// * If the string is not a valid hexadecimal string + /// * If no id could be found. + /// * If the id is not unique. pub fn node_from_snapshot_path( &self, snap_path: &str, @@ -1940,13 +1909,11 @@ impl Repository { /// /// # Errors /// - /// * [`IndexErrorKind::BlobInIndexNotFound`] - If the blob is not found in the index + /// * If the blob is not found in the index /// /// # Returns /// /// The cached blob in bytes. - /// - /// [`IndexErrorKind::BlobInIndexNotFound`]: crate::error::IndexErrorKind::BlobInIndexNotFound pub fn get_blob_cached(&self, id: &BlobId, tpe: BlobType) -> RusticResult { self.get_blob_or_insert_with(id, || self.index().blob_from_backend(self.dbe(), tpe, id)) } @@ -1974,13 +1941,11 @@ impl Repository { /// /// # Errors /// - /// * [`IdErrorKind::HexError`] - If the string is not a valid hexadecimal string + /// * If the string is not a valid hexadecimal string /// /// # Returns /// /// The raw blob in bytes. - /// - /// [`IdErrorKind::HexError`]: crate::error::IdErrorKind::HexError pub fn cat_blob(&self, tpe: BlobType, id: &str) -> RusticResult { commands::cat::cat_blob(self, tpe, id) } @@ -1991,16 +1956,14 @@ impl Repository { /// /// * `node` - The node to dump /// * `w` - The writer to use - /// - /// # Note - /// - /// Currently, only regular file nodes are supported. /// /// # Errors /// - /// * [`CommandErrorKind::DumpNotSupported`] - If the node is not a file. + /// * If the node is not a file. + /// + /// # Note /// - /// [`CommandErrorKind::DumpNotSupported`]: crate::error::CommandErrorKind::DumpNotSupported + /// Currently, only regular file nodes are supported. pub fn dump(&self, node: &Node, w: &mut impl Write) -> RusticResult<()> { commands::dump::dump(self, node, w) } @@ -2020,15 +1983,12 @@ impl Repository { /// /// # Errors /// - /// * [`CommandErrorKind::ErrorCreating`] - If a directory could not be created. - /// * [`CommandErrorKind::ErrorCollecting`] - If the restore information could not be collected. + /// * If a directory could not be created. + /// * If the restore information could not be collected. /// /// # Returns /// /// The restore plan. - /// - /// [`CommandErrorKind::ErrorCreating`]: crate::error::CommandErrorKind::ErrorCreating - /// [`CommandErrorKind::ErrorCollecting`]: crate::error::CommandErrorKind::ErrorCollecting pub fn prepare_restore( &self, opts: &RestoreOptions, @@ -2081,7 +2041,7 @@ impl Repository { /// /// # Warning /// - /// If you remove the original snapshots, you may loose data! + /// * If you remove the original snapshots, you may loose data! /// /// # Errors /// diff --git a/crates/core/src/repository/command_input.rs b/crates/core/src/repository/command_input.rs index 6a258d4d..63e897b0 100644 --- a/crates/core/src/repository/command_input.rs +++ b/crates/core/src/repository/command_input.rs @@ -8,10 +8,56 @@ use log::{debug, error, trace, warn}; use serde::{Deserialize, Serialize, Serializer}; use serde_with::{serde_as, DisplayFromStr, PickFirst}; -use crate::{ - error::{RepositoryErrorKind, RusticErrorKind}, - RusticError, RusticResult, -}; +use crate::error::{ErrorKind, RusticError, RusticResult}; + +/// [`CommandInputErrorKind`] describes the errors that can be returned from the `CommandInput` +#[derive(thiserror::Error, Debug, displaydoc::Display)] +#[non_exhaustive] +pub enum CommandInputErrorKind { + /// Command execution failed: {context}:{what} : {source} + CommandExecutionFailed { + /// The context in which the command was called + context: String, + + /// The action that was performed + what: String, + + /// The source of the error + source: std::io::Error, + }, + /// Command error status: {context}:{what} : {status} + CommandErrorStatus { + /// The context in which the command was called + context: String, + + /// The action that was performed + what: String, + + /// The exit status of the command + status: ExitStatus, + }, + /// Splitting arguments failed: {arguments} : {source} + SplittingArgumentsFailed { + /// The arguments that were tried to be split + arguments: String, + + /// The source of the error + source: shell_words::ParseError, + }, + /// Process execution failed: {command:?} : {path:?} : {source} + ProcessExecutionFailed { + /// The command that was tried to be executed + command: CommandInput, + + /// The path in which the command was tried to be executed + path: std::path::PathBuf, + + /// The source of the error + source: std::io::Error, + }, +} + +pub(crate) type CommandInputResult = Result; /// A command to be called which can be given as CLI option as well as in config files /// `CommandInput` implements Serialize/Deserialize as well as FromStr. @@ -81,7 +127,7 @@ impl CommandInput { /// /// # Errors /// - /// `RusticError` if return status cannot be read + /// * If return status cannot be read pub fn run(&self, context: &str, what: &str) -> RusticResult<()> { if !self.is_set() { trace!("not calling command {context}:{what} - not set"); @@ -95,7 +141,7 @@ impl CommandInput { } impl FromStr for CommandInput { - type Err = RusticError; + type Err = CommandInputErrorKind; fn from_str(s: &str) -> Result { Ok(Self(_CommandInput::from_str(s)?)) } @@ -137,7 +183,7 @@ impl From> for _CommandInput { } impl FromStr for _CommandInput { - type Err = RusticError; + type Err = CommandInputErrorKind; fn from_str(s: &str) -> Result { Ok(split(s)?.into()) } @@ -164,7 +210,7 @@ pub enum OnFailure { } impl OnFailure { - fn eval(self, res: RusticResult) -> RusticResult> { + fn eval(self, res: CommandInputResult) -> RusticResult> { let res = self.display_result(res); match (res, self) { (Err(err), Self::Error) => Err(err), @@ -173,12 +219,13 @@ impl OnFailure { } } - /// Displays a result depending on the defined error handling which still yielding the same result + /// Displays a result depending on the defined error handling while still yielding the same result + /// /// # Note /// /// This can be used where an error might occur, but in that /// case we have to abort. - pub fn display_result(self, res: RusticResult) -> RusticResult { + pub fn display_result(self, res: CommandInputResult) -> RusticResult { if let Err(err) = &res { match self { Self::Error => { @@ -190,7 +237,14 @@ impl OnFailure { Self::Ignore => {} } } - res + + res.map_err(|err| { + RusticError::with_source( + ErrorKind::ExternalCommand, + "Experienced an error while calling an external command.", + err, + ) + }) } /// Handle a status of a called command depending on the defined error handling @@ -200,20 +254,22 @@ impl OnFailure { context: &str, what: &str, ) -> RusticResult<()> { - let status = status.map_err(|err| { - RepositoryErrorKind::CommandExecutionFailed(context.into(), what.into(), err).into() + let status = status.map_err(|err| CommandInputErrorKind::CommandExecutionFailed { + context: context.to_string(), + what: what.to_string(), + source: err, }); + let Some(status) = self.eval(status)? else { return Ok(()); }; if !status.success() { - let _: Option<()> = self.eval(Err(RepositoryErrorKind::CommandErrorStatus( - context.into(), - what.into(), + let _: Option<()> = self.eval(Err(CommandInputErrorKind::CommandErrorStatus { + context: context.to_string(), + what: what.to_string(), status, - ) - .into()))?; + }))?; } Ok(()) } @@ -221,6 +277,9 @@ impl OnFailure { /// helper to split arguments // TODO: Maybe use special parser (winsplit?) for windows? -fn split(s: &str) -> RusticResult> { - Ok(shell_words::split(s).map_err(|err| RusticErrorKind::Command(err.into()))?) +fn split(s: &str) -> CommandInputResult> { + shell_words::split(s).map_err(|err| CommandInputErrorKind::SplittingArgumentsFailed { + arguments: s.to_string(), + source: err, + }) } diff --git a/crates/core/src/repository/warm_up.rs b/crates/core/src/repository/warm_up.rs index 14ea76da..22131a92 100644 --- a/crates/core/src/repository/warm_up.rs +++ b/crates/core/src/repository/warm_up.rs @@ -6,7 +6,7 @@ use rayon::ThreadPoolBuilder; use crate::{ backend::{FileType, ReadBackend}, - error::{RepositoryErrorKind, RusticResult}, + error::{ErrorKind, RusticError, RusticResult}, progress::{Progress, ProgressBars}, repofile::packfile::PackId, repository::Repository, @@ -27,11 +27,8 @@ pub(super) mod constants { /// /// # Errors /// -/// * [`RepositoryErrorKind::FromSplitError`] - If the command could not be parsed. -/// * [`RepositoryErrorKind::FromThreadPoolbilderError`] - If the thread pool could not be created. -/// -/// [`RepositoryErrorKind::FromSplitError`]: crate::error::RepositoryErrorKind::FromSplitError -/// [`RepositoryErrorKind::FromThreadPoolbilderError`]: crate::error::RepositoryErrorKind::FromThreadPoolbilderError +/// * If the command could not be parsed. +/// * If the thread pool could not be created. pub(crate) fn warm_up_wait( repo: &Repository, packs: impl ExactSizeIterator, @@ -54,11 +51,8 @@ pub(crate) fn warm_up_wait( /// /// # Errors /// -/// * [`RepositoryErrorKind::FromSplitError`] - If the command could not be parsed. -/// * [`RepositoryErrorKind::FromThreadPoolbilderError`] - If the thread pool could not be created. -/// -/// [`RepositoryErrorKind::FromSplitError`]: crate::error::RepositoryErrorKind::FromSplitError -/// [`RepositoryErrorKind::FromThreadPoolbilderError`]: crate::error::RepositoryErrorKind::FromThreadPoolbilderError +/// * If the command could not be parsed. +/// * If the thread pool could not be created. pub(crate) fn warm_up( repo: &Repository, packs: impl ExactSizeIterator, @@ -81,9 +75,7 @@ pub(crate) fn warm_up( /// /// # Errors /// -/// * [`RepositoryErrorKind::FromSplitError`] - If the command could not be parsed. -/// -/// [`RepositoryErrorKind::FromSplitError`]: crate::error::RepositoryErrorKind::FromSplitError +/// * If the command could not be parsed. fn warm_up_command( packs: impl ExactSizeIterator, command: &CommandInput, @@ -97,8 +89,21 @@ fn warm_up_command( .iter() .map(|c| c.replace("%id", &pack.to_hex())) .collect(); + debug!("calling {command:?}..."); - let status = Command::new(command.command()).args(&args).status()?; + + let status = Command::new(command.command()) + .args(&args) + .status() + .map_err(|err| { + RusticError::with_source( + ErrorKind::ExternalCommand, + "Error in executing warm-up command `{command}`.", + err, + ) + .attach_context("command", command.to_string()) + })?; + if !status.success() { warn!("warm-up command was not successful for pack {pack:?}. {status}"); } @@ -116,9 +121,7 @@ fn warm_up_command( /// /// # Errors /// -/// * [`RepositoryErrorKind::FromThreadPoolbilderError`] - If the thread pool could not be created. -/// -/// [`RepositoryErrorKind::FromThreadPoolbilderError`]: crate::error::RepositoryErrorKind::FromThreadPoolbilderError +/// * If the thread pool could not be created. fn warm_up_repo( repo: &Repository, packs: impl ExactSizeIterator, @@ -129,7 +132,13 @@ fn warm_up_repo( let pool = ThreadPoolBuilder::new() .num_threads(constants::MAX_READER_THREADS_NUM) .build() - .map_err(RepositoryErrorKind::FromThreadPoolbilderError)?; + .map_err(|err| { + RusticError::with_source( + ErrorKind::Internal, + "Failed to create thread pool for warm-up. Please try again.", + err, + ) + })?; let progress_bar_ref = &progress_bar; let backend = &repo.be; pool.in_place_scope(|scope| { diff --git a/crates/core/src/vfs.rs b/crates/core/src/vfs.rs index 5ae211b9..5904ea4d 100644 --- a/crates/core/src/vfs.rs +++ b/crates/core/src/vfs.rs @@ -18,16 +18,26 @@ pub use crate::vfs::webdavfs::WebDavFS; use crate::{ blob::{tree::TreeId, BlobId, DataId}, - error::VfsErrorKind, - repofile::{BlobType, Metadata, Node, NodeType, SnapshotFile}, -}; -use crate::{ + error::{ErrorKind, RusticError, RusticResult}, index::ReadIndex, + repofile::{BlobType, Metadata, Node, NodeType, SnapshotFile}, repository::{IndexedFull, IndexedTree, Repository}, vfs::format::FormattedSnapshot, - RusticResult, }; +/// [`VfsErrorKind`] describes the errors that can be returned from the Virtual File System +#[derive(thiserror::Error, Debug, displaydoc::Display)] +pub enum VfsErrorKind { + /// Directory exists as non-virtual directory + DirectoryExistsAsNonVirtual, + /// Only normal paths allowed + OnlyNormalPathsAreAllowed, + /// Name `{0:?}` doesn't exist + NameDoesNotExist(OsString), +} + +pub(crate) type VfsResult = Result; + #[derive(Debug, Clone, Copy)] /// `IdenticalSnapshot` describes how to handle identical snapshots. pub enum IdenticalSnapshot { @@ -85,20 +95,17 @@ impl VfsTree { /// /// # Errors /// - /// * [`VfsErrorKind::OnlyNormalPathsAreAllowed`] if the path is not a normal path - /// * [`VfsErrorKind::DirectoryExistsAsNonVirtual`] if the path is a directory in the repository + /// * If the path is not a normal path + /// * If the path is a directory in the repository /// /// # Returns /// /// `Ok(())` if the tree was added successfully - /// - /// [`VfsErrorKind::DirectoryExistsAsNonVirtual`]: crate::error::VfsErrorKind::DirectoryExistsAsNonVirtual - /// [`VfsErrorKind::OnlyNormalPathsAreAllowed`]: crate::error::VfsErrorKind::OnlyNormalPathsAreAllowed - fn add_tree(&mut self, path: &Path, new_tree: Self) -> RusticResult<()> { + fn add_tree(&mut self, path: &Path, new_tree: Self) -> VfsResult<()> { let mut tree = self; let mut components = path.components(); let Some(Component::Normal(last)) = components.next_back() else { - return Err(VfsErrorKind::OnlyNormalPathsAreAllowed.into()); + return Err(VfsErrorKind::OnlyNormalPathsAreAllowed); }; for comp in components { @@ -110,14 +117,14 @@ impl VfsTree { .or_insert(Self::VirtualTree(BTreeMap::new())); } _ => { - return Err(VfsErrorKind::DirectoryExistsAsNonVirtual.into()); + return Err(VfsErrorKind::DirectoryExistsAsNonVirtual); } } } } let Self::VirtualTree(virtual_tree) = tree else { - return Err(VfsErrorKind::DirectoryExistsAsNonVirtual.into()); + return Err(VfsErrorKind::DirectoryExistsAsNonVirtual); }; _ = virtual_tree.insert(last.to_os_string(), new_tree); @@ -137,7 +144,7 @@ impl VfsTree { /// # Returns /// /// If the path is within a real repository tree, this returns the [`VfsTree::RusticTree`] and the remaining path - fn get_path(&self, path: &Path) -> RusticResult> { + fn get_path(&self, path: &Path) -> VfsResult> { let mut tree = self; let mut components = path.components(); loop { @@ -151,7 +158,7 @@ impl VfsTree { if let Some(new_tree) = virtual_tree.get(name) { tree = new_tree; } else { - return Err(VfsErrorKind::NameDoesNotExist(name.to_os_string()).into()); + return Err(VfsErrorKind::NameDoesNotExist(name.to_os_string())); }; } None => { @@ -193,7 +200,7 @@ impl Vfs { /// /// # Panics /// - /// If the node is not a directory + /// * If the node is not a directory #[must_use] pub fn from_dir_node(node: &Node) -> Self { let tree = VfsTree::RusticTree(node.subtree.unwrap()); @@ -212,11 +219,9 @@ impl Vfs { /// /// # Errors /// - /// * [`VfsErrorKind::OnlyNormalPathsAreAllowed`] if the path is not a normal path - /// * [`VfsErrorKind::DirectoryExistsAsNonVirtual`] if the path is a directory in the repository - /// - /// [`VfsErrorKind::DirectoryExistsAsNonVirtual`]: crate::error::VfsErrorKind::DirectoryExistsAsNonVirtual - /// [`VfsErrorKind::OnlyNormalPathsAreAllowed`]: crate::error::VfsErrorKind::OnlyNormalPathsAreAllowed + /// * If the path is not a normal path + /// * If the path is a directory in the repository + #[allow(clippy::too_many_lines)] pub fn from_snapshots( mut snapshots: Vec, path_template: &str, @@ -249,7 +254,7 @@ impl Vfs { let filename = path.file_name().map(OsStr::to_os_string); let parent_path = path.parent().map(Path::to_path_buf); - // Save pathes for latest entries, if requested + // Save paths for latest entries, if requested if matches!(latest_option, Latest::AsLink) { _ = dirs_for_link.insert(parent_path.clone(), filename.clone()); } @@ -264,10 +269,30 @@ impl Vfs { && last_tree == snap.tree { if let Some(name) = last_name { - tree.add_tree(path, VfsTree::Link(name))?; + tree.add_tree(path, VfsTree::Link(name.clone())) + .map_err(|err| { + RusticError::with_source( + ErrorKind::Vfs, + "Failed to add a link `{name}` to root tree at `{path}`", + err, + ) + .attach_context("path", path.display().to_string()) + .attach_context("name", name.to_string_lossy()) + .ask_report() + })?; } } else { - tree.add_tree(path, VfsTree::RusticTree(snap.tree))?; + tree.add_tree(path, VfsTree::RusticTree(snap.tree)) + .map_err(|err| { + RusticError::with_source( + ErrorKind::Vfs, + "Failed to add repository tree `{tree_id}` to root tree at `{path}`", + err, + ) + .attach_context("path", path.display().to_string()) + .attach_context("tree_id", snap.tree.to_string()) + .ask_report() + })?; } } last_parent = parent_path; @@ -282,7 +307,18 @@ impl Vfs { for (path, target) in dirs_for_link { if let (Some(mut path), Some(target)) = (path, target) { path.push("latest"); - tree.add_tree(&path, VfsTree::Link(target))?; + tree.add_tree(&path, VfsTree::Link(target.clone())) + .map_err(|err| { + RusticError::with_source( + ErrorKind::Vfs, + "Failed to link latest `{target}` entry to root tree at `{path}`", + err, + ) + .attach_context("path", path.display().to_string()) + .attach_context("target", target.to_string_lossy()) + .attach_context("latest", "link") + .ask_report() + })?; } } } @@ -290,7 +326,18 @@ impl Vfs { for (path, subtree) in dirs_for_snap { if let Some(mut path) = path { path.push("latest"); - tree.add_tree(&path, VfsTree::RusticTree(subtree))?; + tree.add_tree(&path, VfsTree::RusticTree(subtree)) + .map_err(|err| { + RusticError::with_source( + ErrorKind::Vfs, + "Failed to add latest subtree id `{id}` to root tree at `{path}`", + err, + ) + .attach_context("path", path.display().to_string()) + .attach_context("tree_id", subtree.to_string()) + .attach_context("latest", "dir") + .ask_report() + })?; } } } @@ -307,13 +354,12 @@ impl Vfs { /// /// # Errors /// - /// * [`VfsErrorKind::NameDoesNotExist`] - if the component name doesn't exist + /// * If the component name doesn't exist /// /// # Returns /// /// The [`Node`] at the specified path /// - /// [`VfsErrorKind::NameDoesNotExist`]: crate::error::VfsErrorKind::NameDoesNotExist /// [`Tree`]: crate::repofile::Tree pub fn node_from_path( &self, @@ -321,20 +367,26 @@ impl Vfs { path: &Path, ) -> RusticResult { let meta = Metadata::default(); - match self.tree.get_path(path)? { + match self.tree.get_path(path).map_err(|err| { + RusticError::with_source( + ErrorKind::Vfs, + "Failed to get tree at given path `{path}`", + err, + ) + .attach_context("path", path.display().to_string()) + .ask_report() + })? { VfsPath::RusticPath(tree_id, path) => Ok(repo.node_from_path(*tree_id, &path)?), VfsPath::VirtualTree(_) => { Ok(Node::new(String::new(), NodeType::Dir, meta, None, None)) } - VfsPath::Link(target) => { - return Ok(Node::new( - String::new(), - NodeType::from_link(Path::new(target)), - meta, - None, - None, - )); - } + VfsPath::Link(target) => Ok(Node::new( + String::new(), + NodeType::from_link(Path::new(target)), + meta, + None, + None, + )), } } @@ -347,24 +399,31 @@ impl Vfs { /// /// # Errors /// - /// * [`VfsErrorKind::NameDoesNotExist`] - if the component name doesn't exist + /// * If the component name doesn't exist /// /// # Returns /// /// The list of [`Node`]s at the specified path /// - /// [`VfsErrorKind::NameDoesNotExist`]: crate::error::VfsErrorKind::NameDoesNotExist /// [`Tree`]: crate::repofile::Tree /// /// # Panics /// - /// Panics if the path is not a directory. + /// * Panics if the path is not a directory. pub fn dir_entries_from_path( &self, repo: &Repository, path: &Path, ) -> RusticResult> { - let result = match self.tree.get_path(path)? { + let result = match self.tree.get_path(path).map_err(|err| { + RusticError::with_source( + ErrorKind::Vfs, + "Failed to get tree at given path `{path}`", + err, + ) + .attach_context("path", path.display().to_string()) + .ask_report() + })? { VfsPath::RusticPath(tree_id, path) => { let node = repo.node_from_path(*tree_id, &path)?; if node.is_dir() { @@ -385,7 +444,11 @@ impl Vfs { }) .collect(), VfsPath::Link(str) => { - return Err(VfsErrorKind::NoDirectoryEntriesForSymlinkFound(str.clone()).into()); + return Err(RusticError::new( + ErrorKind::Vfs, + "No directory entries for symlink `{symlink}` found. Is the path valid unicode?", + ) + .attach_context("symlink", str.to_string_lossy().to_string())); } }; Ok(result) @@ -446,7 +509,7 @@ impl OpenFile { /// /// # Panics /// - /// Panics if the `Node` has no content + /// * Panics if the `Node` has no content pub fn from_node(repo: &Repository, node: &Node) -> Self { let mut start = 0; let mut content: Vec<_> = node @@ -461,7 +524,7 @@ impl OpenFile { }) .collect(); - // content is assumed to be partioned, so we add a starts_at:MAX entry + // content is assumed to be partitioned, so we add a starts_at:MAX entry content.push(BlobInfo { id: DataId::default(), starts_at: usize::MAX, @@ -496,21 +559,30 @@ impl OpenFile { // find the start of relevant blobs => find the largest index such that self.content[i].starts_at <= offset, but // self.content[i+1] > offset (note that a last dummy element has been added) let mut i = self.content.partition_point(|c| c.starts_at <= offset) - 1; + offset -= self.content[i].starts_at; + let mut result = BytesMut::with_capacity(length); while length > 0 && i < self.content.len() - 1 { let data = repo.get_blob_cached(&BlobId::from(*self.content[i].id), BlobType::Data)?; + if offset > data.len() { // we cannot read behind the blob. This only happens if offset is too large to fit in the last blob break; } + let to_copy = (data.len() - offset).min(length); + result.extend_from_slice(&data[offset..offset + to_copy]); + offset = 0; + length -= to_copy; + i += 1; } + Ok(result.into()) } } diff --git a/crates/core/src/vfs/format.rs b/crates/core/src/vfs/format.rs index b3f6dee1..91f26eb6 100644 --- a/crates/core/src/vfs/format.rs +++ b/crates/core/src/vfs/format.rs @@ -9,15 +9,15 @@ use runtime_format::{FormatKey, FormatKeyError}; /// To be formatted with [`runtime_format`]. /// /// The following keys are available: -/// - `id`: the snapshot id -/// - `long_id`: the snapshot id as a string -/// - `time`: the snapshot time -/// - `username`: the snapshot username -/// - `hostname`: the snapshot hostname -/// - `label`: the snapshot label -/// - `tags`: the snapshot tags -/// - `backup_start`: the snapshot backup start time -/// - `backup_end`: the snapshot backup end time +/// * `id`: the snapshot id +/// * `long_id`: the snapshot id as a string +/// * `time`: the snapshot time +/// * `username`: the snapshot username +/// * `hostname`: the snapshot hostname +/// * `label`: the snapshot label +/// * `tags`: the snapshot tags +/// * `backup_start`: the snapshot backup start time +/// * `backup_end`: the snapshot backup end time #[derive(Debug)] pub(crate) struct FormattedSnapshot<'a> { /// The snapshot file. diff --git a/crates/core/src/vfs/webdavfs.rs b/crates/core/src/vfs/webdavfs.rs index 1813b612..c2401136 100644 --- a/crates/core/src/vfs/webdavfs.rs +++ b/crates/core/src/vfs/webdavfs.rs @@ -84,7 +84,7 @@ impl WebDavFS { /// /// # Errors /// - /// * [`FsError::GeneralFailure`] - If the [`Tree`] could not be found + /// * If the [`Tree`] could not be found /// /// # Returns /// @@ -106,7 +106,7 @@ impl WebDavFS { /// /// # Errors /// - /// * [`FsError::GeneralFailure`] - If the [`Tree`] could not be found + /// * If the [`Tree`] could not be found /// /// # Returns /// @@ -186,7 +186,7 @@ impl = Box::new(DavFsFile { node, open, @@ -274,7 +274,7 @@ impl DavFile for D .fs .repo .read_file_at(&self.open, self.seek, count) - .map_err(|_| FsError::GeneralFailure)?; + .map_err(|_err| FsError::GeneralFailure)?; self.seek += data.len(); Ok(data) } diff --git a/crates/core/tests/errors.rs b/crates/core/tests/errors.rs new file mode 100644 index 00000000..2171dd81 --- /dev/null +++ b/crates/core/tests/errors.rs @@ -0,0 +1,31 @@ +use rstest::{fixture, rstest}; + +use rustic_core::{ErrorKind, RusticError, Severity, Status}; + +#[fixture] +fn error() -> Box { + RusticError::with_source( + ErrorKind::InputOutput, + "A file could not be read, make sure the file at `{path}` is existing and readable by the system.", + std::io::Error::new(std::io::ErrorKind::ConnectionReset, "Networking Error"), + ) + .attach_context("path", "/path/to/file") + .attach_context("called", "used s3 backend") + .attach_status(Status::Permanent) + .attach_severity(Severity::Error) + .attach_error_code("C001") + .append_guidance_line("Appended guidance line") + .prepend_guidance_line("Prepended guidance line") + .attach_existing_issue_url("https://github.com/rustic-rs/rustic_core/issues/209") + .ask_report() +} + +#[rstest] +fn test_error_display(error: Box) { + insta::assert_snapshot!(error); +} + +#[rstest] +fn test_error_debug(error: Box) { + insta::assert_debug_snapshot!(error); +} diff --git a/crates/core/tests/integration.rs b/crates/core/tests/integration.rs index 4a210234..57d5a63e 100644 --- a/crates/core/tests/integration.rs +++ b/crates/core/tests/integration.rs @@ -253,7 +253,7 @@ fn test_backup_with_tar_gz_passes( // re-read index let repo = repo.to_indexed_ids()?; - // third backup with tags and explicitely given parent + // third backup with tags and explicitly given parent let snap = SnapshotOptions::default() .tags([StringList::from_str("a,b")?]) .to_snapshot()?; diff --git a/crates/core/tests/keys.rs b/crates/core/tests/keys.rs index 8e99ff32..45608ad5 100644 --- a/crates/core/tests/keys.rs +++ b/crates/core/tests/keys.rs @@ -10,7 +10,7 @@ use sha2::{Digest, Sha256}; #[case("test", true)] #[case("test2", true)] #[case("wrong", false)] -fn working_keys(#[case] password: &str, #[case] should_work: bool) -> Result<()> { +fn test_working_keys_passes(#[case] password: &str, #[case] should_work: bool) -> Result<()> { let be = InMemoryBackend::new(); add_to_be(&be, FileType::Config, "tests/fixtures/config")?; add_to_be(&be, FileType::Key, "tests/fixtures/key1")?; @@ -29,7 +29,7 @@ fn working_keys(#[case] password: &str, #[case] should_work: bool) -> Result<()> #[test] // using an invalid keyfile: Here the scrypt params are not valid -fn failing_key() -> Result<()> { +fn test_keys_failing_passes() -> Result<()> { let be = InMemoryBackend::new(); add_to_be(&be, FileType::Config, "tests/fixtures/config")?; add_to_be(&be, FileType::Key, "tests/fixtures/key-failing")?; diff --git a/crates/core/tests/snapshots/errors__error_debug.snap b/crates/core/tests/snapshots/errors__error_debug.snap new file mode 100644 index 00000000..6868ff11 --- /dev/null +++ b/crates/core/tests/snapshots/errors__error_debug.snap @@ -0,0 +1,42 @@ +--- +source: crates/core/tests/errors.rs +expression: error +--- +RusticError { + kind: InputOutput, + guidance: "Prepended guidance line\nA file could not be read, make sure the file at `{path}` is existing and readable by the system.\nAppended guidance line", + docs_url: None, + error_code: Some( + "C001", + ), + ask_report: true, + existing_issue_urls: [ + "https://github.com/rustic-rs/rustic_core/issues/209", + ], + new_issue_url: None, + context: [ + ( + "path", + "/path/to/file", + ), + ( + "called", + "used s3 backend", + ), + ], + source: Some( + Custom { + kind: ConnectionReset, + error: "Networking Error", + }, + ), + severity: Some( + Error, + ), + status: Some( + Permanent, + ), + backtrace: Some( + , + ), +} diff --git a/crates/core/tests/snapshots/errors__error_display.snap b/crates/core/tests/snapshots/errors__error_display.snap new file mode 100644 index 00000000..3acc4c8b --- /dev/null +++ b/crates/core/tests/snapshots/errors__error_display.snap @@ -0,0 +1,39 @@ +--- +source: crates/core/tests/errors.rs +expression: error +--- +`rustic_core` experienced an error related to `input/output operations`. + +Message: +Prepended guidance line +A file could not be read, make sure the file at `/path/to/file` is existing and readable by the system. +Appended guidance line + +For more information, see: https://rustic.cli.rs/docs/errors/C001 + +Related issues: +- https://github.com/rustic-rs/rustic_core/issues/209 + +We believe this is a bug, please report it by opening an issue at: +https://github.com/rustic-rs/rustic_core/issues/new + +If you can, please attach an anonymized debug log to the issue. + +Thank you for helping us improve rustic! + + +Some additional details ... + +Context: +- called: used s3 backend + +Caused by: +Networking Error + + +Severity: Error + +Status: Permanent + +Backtrace: +disabled backtrace (set 'RUST_BACKTRACE="1"' environment variable to enable) diff --git a/crates/testing/src/backend.rs b/crates/testing/src/backend.rs index 1c7ca4d1..418bd320 100644 --- a/crates/testing/src/backend.rs +++ b/crates/testing/src/backend.rs @@ -2,11 +2,12 @@ pub mod in_memory_backend { use std::{collections::BTreeMap, sync::RwLock}; - use anyhow::{bail, Result}; use bytes::Bytes; use enum_map::EnumMap; - use rustic_core::{FileType, Id, ReadBackend, WriteBackend}; + use rustic_core::{ + ErrorKind, FileType, Id, ReadBackend, RusticError, RusticResult, WriteBackend, + }; #[derive(Debug)] /// In-Memory backend to be used for testing @@ -31,7 +32,7 @@ pub mod in_memory_backend { "test".to_string() } - fn list_with_size(&self, tpe: FileType) -> Result> { + fn list_with_size(&self, tpe: FileType) -> RusticResult> { Ok(self.0.read().unwrap()[tpe] .iter() .map(|(id, byte)| { @@ -43,7 +44,7 @@ pub mod in_memory_backend { .collect()) } - fn read_full(&self, tpe: FileType, id: &Id) -> Result { + fn read_full(&self, tpe: FileType, id: &Id) -> RusticResult { Ok(self.0.read().unwrap()[tpe][id].clone()) } @@ -54,26 +55,39 @@ pub mod in_memory_backend { _cacheable: bool, offset: u32, length: u32, - ) -> Result { + ) -> RusticResult { Ok(self.0.read().unwrap()[tpe][id].slice(offset as usize..(offset + length) as usize)) } } impl WriteBackend for InMemoryBackend { - fn create(&self) -> Result<()> { + fn create(&self) -> RusticResult<()> { Ok(()) } - fn write_bytes(&self, tpe: FileType, id: &Id, _cacheable: bool, buf: Bytes) -> Result<()> { + fn write_bytes( + &self, + tpe: FileType, + id: &Id, + _cacheable: bool, + buf: Bytes, + ) -> RusticResult<()> { if self.0.write().unwrap()[tpe].insert(*id, buf).is_some() { - bail!("id {id} already exists"); + return Err( + RusticError::new(ErrorKind::Backend, "ID `{id}` already exists.") + .attach_context("id", id.to_string()), + ); } + Ok(()) } - fn remove(&self, tpe: FileType, id: &Id, _cacheable: bool) -> Result<()> { + fn remove(&self, tpe: FileType, id: &Id, _cacheable: bool) -> RusticResult<()> { if self.0.write().unwrap()[tpe].remove(id).is_none() { - bail!("id {id} doesn't exists"); + return Err( + RusticError::new(ErrorKind::Backend, "ID `{id}` does not exist.") + .attach_context("id", id.to_string()), + ); } Ok(()) }