Browse Source

Tar files, better return codes, etc.

pull/10/head
Dennis Schwerdel 5 years ago
parent
commit
15a01dfbac
  1. 21
      Cargo.lock
  2. 1
      Cargo.toml
  3. 1
      README.md
  4. 0
      docs/excludes.default
  5. 274
      src/cli/args.rs
  6. 272
      src/cli/mod.rs
  7. 7
      src/main.rs
  8. 2
      src/prelude.rs
  9. 9
      src/repository/backup.rs
  10. 4
      src/repository/backup_file.rs
  11. 56
      src/repository/basic_io.rs
  12. 6
      src/repository/mod.rs
  13. 220
      src/repository/tarfile.rs
  14. 4
      src/repository/vacuum.rs

21
Cargo.lock generated

@ -25,6 +25,7 @@ dependencies = [
"serde_yaml 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)",
"sodiumoxide 0.0.14 (registry+https://github.com/rust-lang/crates.io-index)",
"squash-sys 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)",
"tar 0.4.11 (registry+https://github.com/rust-lang/crates.io-index)",
"time 0.1.36 (registry+https://github.com/rust-lang/crates.io-index)",
]
@ -332,6 +333,16 @@ name = "strsim"
version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "tar"
version = "0.4.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)",
"xattr 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "tempdir"
version = "0.3.5"
@ -427,6 +438,14 @@ name = "winapi-build"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "xattr"
version = "0.1.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "yaml-rust"
version = "0.3.5"
@ -477,6 +496,7 @@ dependencies = [
"checksum sodiumoxide 0.0.14 (registry+https://github.com/rust-lang/crates.io-index)" = "bc02c0bc77ffed8e8eaef004399b825cf4fd8aa02d0af6e473225affd583ff4d"
"checksum squash-sys 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "db1f9dde91d819b7746e153bc32489fa19e6a106c3d7f2b92187a4efbdc88b40"
"checksum strsim 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b4d15c810519a91cf877e7e36e63fe068815c678181439f2f29e2562147c3694"
"checksum tar 0.4.11 (registry+https://github.com/rust-lang/crates.io-index)" = "c2374f318bbe2c5ac6c83dd6240d5f1a73106f72d39b3f7d6f8d8637c7b425d8"
"checksum tempdir 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "87974a6f5c1dfb344d733055601650059a3363de2a6104819293baff662132d6"
"checksum term_size 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "07b6c1ac5b3fffd75073276bca1ceed01f67a28537097a2a9539e116e50fb21a"
"checksum thread-id 3.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4437c97558c70d129e40629a5b385b3fb1ffac301e63941335e4d354081ec14a"
@ -491,4 +511,5 @@ dependencies = [
"checksum void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6a02e4885ed3bc0f2de90ea6dd45ebcbb66dacffe03547fadbb0eeae2770887d"
"checksum winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "167dc9d6949a9b857f3451275e911c3f44255842c1f7a76f33c55103a909087a"
"checksum winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2d315eee3b34aca4797b2da6b13ed88266e6d612562a0c46390af8299fc699bc"
"checksum xattr 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)" = "5f04de8a1346489a2f9e9bd8526b73d135ec554227b17568456e86aa35b6f3fc"
"checksum yaml-rust 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "e66366e18dc58b46801afbf2ca7661a9f59cc8c5962c29892b6039b4f86fa992"

1
Cargo.toml

@ -26,6 +26,7 @@ regex = "0.2"
fuse = "0.3"
lazy_static = "0.2"
rand = "0.3"
tar = "0.4"
time = "*"
libc = "*"

1
README.md

@ -99,7 +99,6 @@ Recommended: Brotli/2-7
### Core functionality
- Recompress & combine bundles
- Allow to use tar files for backup and restore (--tar, http://alexcrichton.com/tar-rs/tar/index.html)
- File attributes
- xattrs https://crates.io/crates/xattr

0
excludes.default → docs/excludes.default

274
src/cli/args.rs

@ -1,8 +1,6 @@
use ::prelude::*;
use super::*;
use std::process::exit;
pub enum Arguments {
Init {
@ -23,13 +21,15 @@ pub enum Arguments {
same_device: bool,
excludes: Vec<String>,
excludes_from: Option<String>,
no_default_excludes: bool
no_default_excludes: bool,
tar: bool
},
Restore {
repo_path: String,
backup_name: String,
inode: Option<String>,
dst_path: String
dst_path: String,
tar: bool
},
Remove {
repo_path: String,
@ -126,7 +126,7 @@ pub enum Arguments {
}
pub fn parse_repo_path(repo_path: &str, backup_restr: Option<bool>, path_restr: Option<bool>) -> (&str, Option<&str>, Option<&str>) {
pub fn parse_repo_path(repo_path: &str, backup_restr: Option<bool>, path_restr: Option<bool>) -> Result<(&str, Option<&str>, Option<&str>), ErrorCode> {
let mut parts = repo_path.splitn(3, "::");
let mut repo = parts.next().unwrap_or(&DEFAULT_REPOSITORY);
if repo.is_empty() {
@ -146,93 +146,93 @@ pub fn parse_repo_path(repo_path: &str, backup_restr: Option<bool>, path_restr:
}
if let Some(restr) = backup_restr {
if !restr && backup.is_some() {
println!("No backup may be given here");
exit(1);
error!("No backup may be given here");
return Err(ErrorCode::InvalidArgs);
}
if restr && backup.is_none() {
println!("A backup must be specified");
exit(1);
error!("A backup must be specified");
return Err(ErrorCode::InvalidArgs);
}
}
if let Some(restr) = path_restr {
if !restr && path.is_some() {
println!("No subpath may be given here");
exit(1);
error!("No subpath may be given here");
return Err(ErrorCode::InvalidArgs);
}
if restr && path.is_none() {
println!("A subpath must be specified");
exit(1);
error!("A subpath must be specified");
return Err(ErrorCode::InvalidArgs);
}
}
(repo, backup, path)
Ok((repo, backup, path))
}
fn parse_num(num: &str, name: &str) -> u64 {
fn parse_num(num: &str, name: &str) -> Result<u64, ErrorCode> {
if let Ok(num) = num.parse::<u64>() {
num
Ok(num)
} else {
error!("{} must be a number, was '{}'", name, num);
exit(1);
Err(ErrorCode::InvalidArgs)
}
}
fn parse_chunker(val: &str) -> ChunkerType {
fn parse_chunker(val: &str) -> Result<ChunkerType, ErrorCode> {
if let Ok(chunker) = ChunkerType::from_string(val) {
chunker
Ok(chunker)
} else {
error!("Invalid chunker method/size: {}", val);
exit(1);
Err(ErrorCode::InvalidArgs)
}
}
fn parse_compression(val: &str) -> Option<Compression> {
fn parse_compression(val: &str) -> Result<Option<Compression>, ErrorCode> {
if val == "none" {
return None
return Ok(None)
}
if let Ok(compression) = Compression::from_string(val) {
Some(compression)
Ok(Some(compression))
} else {
error!("Invalid compression method/level: {}", val);
exit(1);
Err(ErrorCode::InvalidArgs)
}
}
fn parse_public_key(val: &str) -> PublicKey {
fn parse_public_key(val: &str) -> Result<PublicKey, ErrorCode> {
let bytes = match parse_hex(val) {
Ok(bytes) => bytes,
Err(_) => {
error!("Invalid key: {}", val);
exit(1);
return Err(ErrorCode::InvalidArgs);
}
};
if let Some(key) = PublicKey::from_slice(&bytes) {
key
Ok(key)
} else {
error!("Invalid key: {}", val);
exit(1);
Err(ErrorCode::InvalidArgs)
}
}
fn parse_hash(val: &str) -> HashMethod {
fn parse_hash(val: &str) -> Result<HashMethod, ErrorCode> {
if let Ok(hash) = HashMethod::from(val) {
hash
Ok(hash)
} else {
error!("Invalid hash method: {}", val);
exit(1);
Err(ErrorCode::InvalidArgs)
}
}
fn parse_bundle_id(val: &str) -> BundleId {
fn parse_bundle_id(val: &str) -> Result<BundleId, ErrorCode> {
if let Ok(hash) = Hash::from_string(val) {
BundleId(hash)
Ok(BundleId(hash))
} else {
error!("Invalid bundle id: {}", val);
exit(1);
Err(ErrorCode::InvalidArgs)
}
}
#[allow(unknown_lints,cyclomatic_complexity)]
pub fn parse() -> Arguments {
pub fn parse() -> Result<Arguments, ErrorCode> {
let args = clap_app!(zvault =>
(version: crate_version!())
(author: crate_authors!(",\n"))
@ -259,11 +259,13 @@ pub fn parse() -> Arguments {
(@arg exclude: --exclude -e ... +takes_value "exclude this path or file")
(@arg excludes_from: --excludesfrom +takes_value "read the list of exludes from this file")
(@arg no_default_excludes: --nodefaultexcludes "do not load the default excludes file")
(@arg tar: --tar "the source is a tar file")
(@arg SRC: +required "source path to backup")
(@arg BACKUP: +required "repository::backup path")
)
(@subcommand restore =>
(about: "restores a backup (or subpath)")
(@arg tar: --tar "restore in form of a tar file")
(@arg BACKUP: +required "repository::backup[::subpath] path")
(@arg DST: +required "destination path for backup")
)
@ -365,20 +367,20 @@ pub fn parse() -> Arguments {
)
).get_matches();
if let Some(args) = args.subcommand_matches("init") {
let (repository, _backup, _inode) = parse_repo_path(args.value_of("REPO").unwrap_or(""), Some(false), Some(false));
return Arguments::Init {
bundle_size: (parse_num(args.value_of("bundle_size").unwrap_or(&DEFAULT_BUNDLE_SIZE.to_string()), "Bundle size") * 1024 * 1024) as usize,
chunker: parse_chunker(args.value_of("chunker").unwrap_or(DEFAULT_CHUNKER)),
compression: parse_compression(args.value_of("compression").unwrap_or(DEFAULT_COMPRESSION)),
let (repository, _backup, _inode) = try!(parse_repo_path(args.value_of("REPO").unwrap_or(""), Some(false), Some(false)));
return Ok(Arguments::Init {
bundle_size: (try!(parse_num(args.value_of("bundle_size").unwrap_or(&DEFAULT_BUNDLE_SIZE.to_string()), "Bundle size")) * 1024 * 1024) as usize,
chunker: try!(parse_chunker(args.value_of("chunker").unwrap_or(DEFAULT_CHUNKER))),
compression: try!(parse_compression(args.value_of("compression").unwrap_or(DEFAULT_COMPRESSION))),
encryption: args.is_present("encryption"),
hash: parse_hash(args.value_of("hash").unwrap_or(DEFAULT_HASH)),
hash: try!(parse_hash(args.value_of("hash").unwrap_or(DEFAULT_HASH))),
repo_path: repository.to_string(),
remote_path: args.value_of("remote").unwrap().to_string()
}
})
}
if let Some(args) = args.subcommand_matches("backup") {
let (repository, backup, _inode) = parse_repo_path(args.value_of("BACKUP").unwrap(), Some(true), Some(false));
return Arguments::Backup {
let (repository, backup, _inode) = try!(parse_repo_path(args.value_of("BACKUP").unwrap(), Some(true), Some(false)));
return Ok(Arguments::Backup {
repo_path: repository.to_string(),
backup_name: backup.unwrap().to_string(),
full: args.is_present("full"),
@ -387,175 +389,199 @@ pub fn parse() -> Arguments {
excludes_from: args.value_of("excludes_from").map(|v| v.to_string()),
src_path: args.value_of("SRC").unwrap().to_string(),
reference: args.value_of("reference").map(|v| v.to_string()),
no_default_excludes: args.is_present("no_default_excludes")
}
no_default_excludes: args.is_present("no_default_excludes"),
tar: args.is_present("tar")
})
}
if let Some(args) = args.subcommand_matches("restore") {
let (repository, backup, inode) = parse_repo_path(args.value_of("BACKUP").unwrap(), Some(true), None);
return Arguments::Restore {
let (repository, backup, inode) = try!(parse_repo_path(args.value_of("BACKUP").unwrap(), Some(true), None));
return Ok(Arguments::Restore {
repo_path: repository.to_string(),
backup_name: backup.unwrap().to_string(),
inode: inode.map(|v| v.to_string()),
dst_path: args.value_of("DST").unwrap().to_string()
}
dst_path: args.value_of("DST").unwrap().to_string(),
tar: args.is_present("tar")
})
}
if let Some(args) = args.subcommand_matches("remove") {
let (repository, backup, inode) = parse_repo_path(args.value_of("BACKUP").unwrap(), Some(true), None);
return Arguments::Remove {
let (repository, backup, inode) = try!(parse_repo_path(args.value_of("BACKUP").unwrap(), Some(true), None));
return Ok(Arguments::Remove {
repo_path: repository.to_string(),
backup_name: backup.unwrap().to_string(),
inode: inode.map(|v| v.to_string())
}
})
}
if let Some(args) = args.subcommand_matches("prune") {
let (repository, _backup, _inode) = parse_repo_path(args.value_of("REPO").unwrap_or(""), Some(false), Some(false));
return Arguments::Prune {
let (repository, _backup, _inode) = try!(parse_repo_path(args.value_of("REPO").unwrap_or(""), Some(false), Some(false)));
return Ok(Arguments::Prune {
repo_path: repository.to_string(),
prefix: args.value_of("prefix").unwrap_or("").to_string(),
force: args.is_present("force"),
daily: args.value_of("daily").map(|v| parse_num(v, "daily backups") as usize),
weekly: args.value_of("weekly").map(|v| parse_num(v, "weekly backups") as usize),
monthly: args.value_of("monthly").map(|v| parse_num(v, "monthly backups") as usize),
yearly: args.value_of("yearly").map(|v| parse_num(v, "yearly backups") as usize),
}
daily: match args.value_of("daily") {
None => None,
Some(v) => Some(try!(parse_num(v, "daily backups")) as usize)
},
weekly: match args.value_of("weekly") {
None => None,
Some(v) => Some(try!(parse_num(v, "weekly backups")) as usize)
},
monthly: match args.value_of("monthly") {
None => None,
Some(v) => Some(try!(parse_num(v, "monthly backups")) as usize)
},
yearly: match args.value_of("yearly") {
None => None,
Some(v) => Some(try!(parse_num(v, "yearly backups")) as usize)
}
})
}
if let Some(args) = args.subcommand_matches("vacuum") {
let (repository, _backup, _inode) = parse_repo_path(args.value_of("REPO").unwrap_or(""), Some(false), Some(false));
return Arguments::Vacuum {
let (repository, _backup, _inode) = try!(parse_repo_path(args.value_of("REPO").unwrap_or(""), Some(false), Some(false)));
return Ok(Arguments::Vacuum {
repo_path: repository.to_string(),
force: args.is_present("force"),
ratio: parse_num(args.value_of("ratio").unwrap_or(&DEFAULT_VACUUM_RATIO.to_string()), "ratio") as f32 / 100.0
}
ratio: try!(parse_num(args.value_of("ratio").unwrap_or(&DEFAULT_VACUUM_RATIO.to_string()), "ratio")) as f32 / 100.0
})
}
if let Some(args) = args.subcommand_matches("check") {
let (repository, backup, inode) = parse_repo_path(args.value_of("PATH").unwrap_or(""), None, None);
return Arguments::Check {
let (repository, backup, inode) = try!(parse_repo_path(args.value_of("PATH").unwrap_or(""), None, None));
return Ok(Arguments::Check {
repo_path: repository.to_string(),
backup_name: backup.map(|v| v.to_string()),
inode: inode.map(|v| v.to_string()),
full: args.is_present("full")
}
})
}
if let Some(args) = args.subcommand_matches("list") {
let (repository, backup, inode) = parse_repo_path(args.value_of("PATH").unwrap_or(""), None, None);
return Arguments::List {
let (repository, backup, inode) = try!(parse_repo_path(args.value_of("PATH").unwrap_or(""), None, None));
return Ok(Arguments::List {
repo_path: repository.to_string(),
backup_name: backup.map(|v| v.to_string()),
inode: inode.map(|v| v.to_string())
}
})
}
if let Some(args) = args.subcommand_matches("bundlelist") {
let (repository, _backup, _inode) = parse_repo_path(args.value_of("REPO").unwrap_or(""), Some(false), Some(false));
return Arguments::BundleList {
let (repository, _backup, _inode) = try!(parse_repo_path(args.value_of("REPO").unwrap_or(""), Some(false), Some(false)));
return Ok(Arguments::BundleList {
repo_path: repository.to_string(),
}
})
}
if let Some(args) = args.subcommand_matches("bundleinfo") {
let (repository, _backup, _inode) = parse_repo_path(args.value_of("REPO").unwrap_or(""), Some(false), Some(false));
return Arguments::BundleInfo {
let (repository, _backup, _inode) = try!(parse_repo_path(args.value_of("REPO").unwrap_or(""), Some(false), Some(false)));
return Ok(Arguments::BundleInfo {
repo_path: repository.to_string(),
bundle_id: parse_bundle_id(args.value_of("BUNDLE").unwrap())
}
bundle_id: try!(parse_bundle_id(args.value_of("BUNDLE").unwrap()))
})
}
if let Some(args) = args.subcommand_matches("info") {
let (repository, backup, inode) = parse_repo_path(args.value_of("PATH").unwrap_or(""), None, None);
return Arguments::Info {
let (repository, backup, inode) = try!(parse_repo_path(args.value_of("PATH").unwrap_or(""), None, None));
return Ok(Arguments::Info {
repo_path: repository.to_string(),
backup_name: backup.map(|v| v.to_string()),
inode: inode.map(|v| v.to_string())
}
})
}
if let Some(args) = args.subcommand_matches("mount") {
let (repository, backup, inode) = parse_repo_path(args.value_of("PATH").unwrap_or(""), None, None);
return Arguments::Mount {
let (repository, backup, inode) = try!(parse_repo_path(args.value_of("PATH").unwrap_or(""), None, None));
return Ok(Arguments::Mount {
repo_path: repository.to_string(),
backup_name: backup.map(|v| v.to_string()),
inode: inode.map(|v| v.to_string()),
mount_point: args.value_of("MOUNTPOINT").unwrap().to_string()
}
})
}
if let Some(args) = args.subcommand_matches("versions") {
let (repository, _backup, _inode) = parse_repo_path(args.value_of("REPO").unwrap_or(""), Some(false), Some(false));
return Arguments::Versions {
let (repository, _backup, _inode) = try!(parse_repo_path(args.value_of("REPO").unwrap_or(""), Some(false), Some(false)));
return Ok(Arguments::Versions {
repo_path: repository.to_string(),
path: args.value_of("PATH").unwrap().to_string()
}
})
}
if let Some(args) = args.subcommand_matches("diff") {
let (repository_old, backup_old, inode_old) = parse_repo_path(args.value_of("OLD").unwrap(), Some(true), None);
let (repository_new, backup_new, inode_new) = parse_repo_path(args.value_of("NEW").unwrap(), Some(true), None);
return Arguments::Diff {
let (repository_old, backup_old, inode_old) = try!(parse_repo_path(args.value_of("OLD").unwrap(), Some(true), None));
let (repository_new, backup_new, inode_new) = try!(parse_repo_path(args.value_of("NEW").unwrap(), Some(true), None));
return Ok(Arguments::Diff {
repo_path_old: repository_old.to_string(),
backup_name_old: backup_old.unwrap().to_string(),
inode_old: inode_old.map(|v| v.to_string()),
repo_path_new: repository_new.to_string(),
backup_name_new: backup_new.unwrap().to_string(),
inode_new: inode_new.map(|v| v.to_string()),
}
})
}
if let Some(args) = args.subcommand_matches("analyze") {
let (repository, _backup, _inode) = parse_repo_path(args.value_of("REPO").unwrap_or(""), Some(false), Some(false));
return Arguments::Analyze {
let (repository, _backup, _inode) = try!(parse_repo_path(args.value_of("REPO").unwrap_or(""), Some(false), Some(false)));
return Ok(Arguments::Analyze {
repo_path: repository.to_string()
}
})
}
if let Some(args) = args.subcommand_matches("import") {
let (repository, _backup, _inode) = parse_repo_path(args.value_of("REPO").unwrap_or(""), Some(false), Some(false));
return Arguments::Import {
let (repository, _backup, _inode) = try!(parse_repo_path(args.value_of("REPO").unwrap_or(""), Some(false), Some(false)));
return Ok(Arguments::Import {
repo_path: repository.to_string(),
remote_path: args.value_of("REMOTE").unwrap().to_string(),
key_files: args.values_of("key").map(|v| v.map(|k| k.to_string()).collect()).unwrap_or_else(|| vec![])
}
})
}
if let Some(args) = args.subcommand_matches("config") {
let (repository, _backup, _inode) = parse_repo_path(args.value_of("REPO").unwrap_or(""), Some(false), Some(false));
return Arguments::Config {
bundle_size: args.value_of("bundle_size").map(|v| (parse_num(v, "Bundle size") * 1024 * 1024) as usize),
chunker: args.value_of("chunker").map(|v| parse_chunker(v)),
compression: args.value_of("compression").map(|v| parse_compression(v)),
encryption: args.value_of("encryption").map(|v| {
if v == "none" {
None
} else {
Some(parse_public_key(v))
}
}),
hash: args.value_of("hash").map(|v| parse_hash(v)),
let (repository, _backup, _inode) = try!(parse_repo_path(args.value_of("REPO").unwrap_or(""), Some(false), Some(false)));
return Ok(Arguments::Config {
bundle_size: match args.value_of("bundle_size") {
None => None,
Some(v) => Some((try!(parse_num(v, "Bundle size")) * 1024 * 1024) as usize)
},
chunker: match args.value_of("chunker") {
None => None,
Some(v) => Some(try!(parse_chunker(v)))
},
compression: match args.value_of("compression") {
None => None,
Some(v) => Some(try!(parse_compression(v)))
},
encryption: match args.value_of("encryption") {
None => None,
Some("none") => Some(None),
Some(v) => Some(Some(try!(parse_public_key(v))))
},
hash: match args.value_of("hash") {
None => None,
Some(v) => Some(try!(parse_hash(v)))
},
repo_path: repository.to_string(),
}
})
}
if let Some(args) = args.subcommand_matches("genkey") {
return Arguments::GenKey {
return Ok(Arguments::GenKey {
file: args.value_of("FILE").map(|v| v.to_string())
}
})
}
if let Some(args) = args.subcommand_matches("addkey") {
let (repository, _backup, _inode) = parse_repo_path(args.value_of("REPO").unwrap_or(""), Some(false), Some(false));
let (repository, _backup, _inode) = try!(parse_repo_path(args.value_of("REPO").unwrap_or(""), Some(false), Some(false)));
let generate = args.is_present("generate");
if !generate && !args.is_present("FILE") {
println!("Without --generate, a file containing the key pair must be given");
exit(1);
return Err(ErrorCode::InvalidArgs)
}
if generate && args.is_present("FILE") {
println!("With --generate, no file may be given");
exit(1);
return Err(ErrorCode::InvalidArgs)
}
return Arguments::AddKey {
return Ok(Arguments::AddKey {
repo_path: repository.to_string(),
set_default: args.is_present("set_default"),
file: args.value_of("FILE").map(|v| v.to_string())
}
})
}
if let Some(args) = args.subcommand_matches("algotest") {
return Arguments::AlgoTest {
bundle_size: (parse_num(args.value_of("bundle_size").unwrap_or(&DEFAULT_BUNDLE_SIZE.to_string()), "Bundle size") * 1024 * 1024) as usize,
chunker: parse_chunker(args.value_of("chunker").unwrap_or(DEFAULT_CHUNKER)),
compression: parse_compression(args.value_of("compression").unwrap_or(DEFAULT_COMPRESSION)),
return Ok(Arguments::AlgoTest {
bundle_size: (try!(parse_num(args.value_of("bundle_size").unwrap_or(&DEFAULT_BUNDLE_SIZE.to_string()), "Bundle size")) * 1024 * 1024) as usize,
chunker: try!(parse_chunker(args.value_of("chunker").unwrap_or(DEFAULT_CHUNKER))),
compression: try!(parse_compression(args.value_of("compression").unwrap_or(DEFAULT_COMPRESSION))),
encrypt: args.is_present("encrypt"),
hash: parse_hash(args.value_of("hash").unwrap_or(DEFAULT_HASH)),
hash: try!(parse_hash(args.value_of("hash").unwrap_or(DEFAULT_HASH))),
file: args.value_of("FILE").unwrap().to_string(),
}
})
}
error!("No subcommand given");
exit(1);
Err(ErrorCode::InvalidArgs)
}

272
src/cli/mod.rs

@ -7,9 +7,7 @@ use ::prelude::*;
use chrono::prelude::*;
use regex::{self, RegexSet};
use std::process::exit;
use std::collections::HashMap;
use std::fmt::Display;
use std::io::{BufReader, BufRead};
use std::fs::File;
use std::env;
@ -17,6 +15,56 @@ use std::env;
use self::args::Arguments;
pub enum ErrorCode {
UnsafeArgs, InvalidArgs,
InitializeLogger,
CreateRepository,
LoadRepository, SaveBackup, LoadBackup, LoadInode, LoadBundle,
AddKey, LoadKey, SaveKey,
SaveConfig,
LoadExcludes, InvalidExcludes,
BackupRun, RestoreRun, RemoveRun, PruneRun, VacuumRun, CheckRun, AnalyzeRun, DiffRun,
VersionsRun, ImportRun, FuseMount
}
impl ErrorCode {
pub fn code(&self) -> i32 {
match *self {
// Crazy stuff
ErrorCode::InitializeLogger => -1,
ErrorCode::InvalidExcludes => -1,
// Arguments
ErrorCode::InvalidArgs => 1,
ErrorCode::UnsafeArgs => 2,
// Load things
ErrorCode::LoadRepository => 3,
ErrorCode::LoadBackup => 4,
ErrorCode::LoadInode => 5,
ErrorCode::LoadBundle => 6,
ErrorCode::LoadKey => 7,
ErrorCode::LoadExcludes => 8,
// Minor operations
ErrorCode::SaveBackup => 9,
ErrorCode::AddKey => 10,
ErrorCode::SaveKey => 11,
ErrorCode::SaveConfig => 12,
// Main operation
ErrorCode::CreateRepository => 13,
ErrorCode::BackupRun => 14,
ErrorCode::RestoreRun => 15,
ErrorCode::RemoveRun => 16,
ErrorCode::PruneRun => 17,
ErrorCode::VacuumRun => 18,
ErrorCode::CheckRun => 19,
ErrorCode::AnalyzeRun => 20,
ErrorCode::DiffRun => 21,
ErrorCode::VersionsRun => 22,
ErrorCode::ImportRun => 23,
ErrorCode::FuseMount => 24,
}
}
}
pub const DEFAULT_CHUNKER: &'static str = "fastcdc/16";
pub const DEFAULT_HASH: &'static str = "blake2";
pub const DEFAULT_COMPRESSION: &'static str = "brotli/3";
@ -28,30 +76,31 @@ lazy_static! {
};
}
fn checked<T, E: Display>(result: Result<T, E>, msg: &'static str) -> T {
match result {
Ok(val) => val,
Err(err) => {
error!("Failed to {}\n\tcaused by: {}", msg, err);
exit(3);
macro_rules! checked {
($expr:expr, $msg:expr, $code:expr) => {
match $expr {
Ok(val) => val,
Err(err) => {
error!("Failed to {}\n\tcaused by: {}", $msg, err);
return Err($code)
}
}
}
};
}
fn open_repository(path: &str) -> Repository {
checked(Repository::open(path), "load repository")
fn open_repository(path: &str) -> Result<Repository, ErrorCode> {
Ok(checked!(Repository::open(path), "load repository", ErrorCode::LoadRepository))
}
fn get_backup(repo: &Repository, backup_name: &str) -> Backup {
checked(repo.get_backup(backup_name), "load backup")
fn get_backup(repo: &Repository, backup_name: &str) -> Result<Backup, ErrorCode> {
Ok(checked!(repo.get_backup(backup_name), "load backup", ErrorCode::LoadBackup))
}
fn find_reference_backup(repo: &Repository, path: &str) -> Option<(String, Backup)> {
fn find_reference_backup(repo: &Repository, path: &str) -> Result<Option<(String, Backup)>, ErrorCode> {
let mut matching = Vec::new();
let hostname = match get_hostname() {
Ok(hostname) => hostname,
Err(_) => return None
Err(_) => return Ok(None)
};
let backup_map = match repo.get_backups() {
Ok(backup_map) => backup_map,
@ -61,7 +110,7 @@ fn find_reference_backup(repo: &Repository, path: &str) -> Option<(String, Backu
},
Err(err) => {
error!("Failed to load backup files: {}", err);
exit(3)
return Err(ErrorCode::LoadBackup)
}
};
for (name, backup) in backup_map {
@ -70,11 +119,13 @@ fn find_reference_backup(repo: &Repository, path: &str) -> Option<(String, Backu
}
}
matching.sort_by_key(|&(_, ref b)| b.date);
matching.pop()
Ok(matching.pop())
}
fn print_backup(backup: &Backup) {
println!("Modified: {}", backup.modified);
println!("Date: {}", Local.timestamp(backup.date, 0).to_rfc2822());
println!("Source: {}:{}", backup.host, backup.path);
println!("Duration: {}", to_duration(backup.duration));
println!("Entries: {} files, {} dirs", backup.file_count, backup.dir_count);
println!("Total backup size: {}", to_file_size(backup.total_data_size));
@ -88,9 +139,9 @@ fn print_backup(backup: &Backup) {
pub fn format_inode_one_line(inode: &Inode) -> String {
match inode.file_type {
FileType::Directory => format!("{:25}\t{} entries", format!("{}/", inode.name), inode.children.as_ref().unwrap().len()),
FileType::Directory => format!("{:25}\t{} entries", format!("{}/", inode.name), inode.children.as_ref().map(|c| c.len()).unwrap_or(0)),
FileType::File => format!("{:25}\t{:>10}\t{}", inode.name, to_file_size(inode.size), Local.timestamp(inode.timestamp, 0).to_rfc2822()),
FileType::Symlink => format!("{:25}\t -> {}", inode.name, inode.symlink_target.as_ref().unwrap()),
FileType::Symlink => format!("{:25}\t -> {}", inode.name, inode.symlink_target.as_ref().map(|s| s as &str).unwrap_or("?")),
}
}
@ -203,41 +254,44 @@ fn print_analysis(analysis: &HashMap<u32, BundleAnalysis>) {
#[allow(unknown_lints,cyclomatic_complexity)]
pub fn run() {
pub fn run() -> Result<(), ErrorCode> {
if let Err(err) = logger::init() {
println!("Failed to initialize the logger: {}", err);
exit(-1)
return Err(ErrorCode::InitializeLogger)
}
match args::parse() {
match try!(args::parse()) {
Arguments::Init{repo_path, bundle_size, chunker, compression, encryption, hash, remote_path} => {
let mut repo = checked(Repository::create(repo_path, Config {
let mut repo = checked!(Repository::create(repo_path, Config {
bundle_size: bundle_size,
chunker: chunker,
compression: compression,
encryption: None,
hash: hash
}, remote_path), "create repository");
}, remote_path), "create repository", ErrorCode::CreateRepository);
if encryption {
let (public, secret) = gen_keypair();
println!("public: {}", to_hex(&public[..]));
println!("secret: {}", to_hex(&secret[..]));
repo.set_encryption(Some(&public));
checked(repo.register_key(public, secret), "add key");
checked(repo.save_config(), "save config");
checked!(repo.register_key(public, secret), "add key", ErrorCode::AddKey);
checked!(repo.save_config(), "save config", ErrorCode::SaveConfig);
println!();
}
print_config(&repo.config);
},
Arguments::Backup{repo_path, backup_name, src_path, full, reference, same_device, mut excludes, excludes_from, no_default_excludes} => {
let mut repo = open_repository(&repo_path);
Arguments::Backup{repo_path, backup_name, src_path, full, reference, same_device, mut excludes, excludes_from, no_default_excludes, tar} => {
let mut repo = try!(open_repository(&repo_path));
let mut reference_backup = None;
if !full {
reference_backup = reference.map(|r| {
let b = get_backup(&repo, &r);
(r, b)
});
if !full && !tar {
reference_backup = match reference {
Some(r) => {
let b = try!(get_backup(&repo, &r));
Some((r, b))
},
None => None
};
if reference_backup.is_none() {
reference_backup = find_reference_backup(&repo, &src_path);
reference_backup = try!(find_reference_backup(&repo, &src_path));
}
if let Some(&(ref name, _)) = reference_backup.as_ref() {
info!("Using backup {} as reference", name);
@ -246,14 +300,14 @@ pub fn run() {
}
}
let reference_backup = reference_backup.map(|(_, backup)| backup);
if !no_default_excludes {
for line in BufReader::new(checked(File::open(&repo.excludes_path), "open default excludes file")).lines() {
excludes.push(checked(line, "read default excludes file"));
if !no_default_excludes && !tar {
for line in BufReader::new(checked!(File::open(&repo.excludes_path), "open default excludes file", ErrorCode::LoadExcludes)).lines() {
excludes.push(checked!(line, "read default excludes file", ErrorCode::LoadExcludes));
}
}
if let Some(excludes_from) = excludes_from {
for line in BufReader::new(checked(File::open(excludes_from), "open excludes file")).lines() {
excludes.push(checked(line, "read excludes file"));
for line in BufReader::new(checked!(File::open(excludes_from), "open excludes file", ErrorCode::LoadExcludes)).lines() {
excludes.push(checked!(line, "read excludes file", ErrorCode::LoadExcludes));
}
}
let mut excludes_parsed = Vec::with_capacity(excludes.len());
@ -271,13 +325,18 @@ pub fn run() {
let excludes = if excludes_parsed.is_empty() {
None
} else {
Some(checked(RegexSet::new(excludes_parsed), "parse exclude patterns"))
Some(checked!(RegexSet::new(excludes_parsed), "parse exclude patterns", ErrorCode::InvalidExcludes))
};
let options = BackupOptions {
same_device: same_device,
excludes: excludes
};
let backup = match repo.create_backup_recursively(&src_path, reference_backup.as_ref(), &options) {
let result = if tar {
repo.import_tarfile(&src_path)
} else {
repo.create_backup_recursively(&src_path, reference_backup.as_ref(), &options)
};
let backup = match result {
Ok(backup) => backup,
Err(RepositoryError::Backup(BackupError::FailedPaths(backup, _failed_paths))) => {
warn!("Some files are missing from the backup");
@ -285,49 +344,53 @@ pub fn run() {
},
Err(err) => {
error!("Backup failed: {}", err);
exit(3)
return Err(ErrorCode::BackupRun)
}
};
checked(repo.save_backup(&backup, &backup_name), "save backup file");
checked!(repo.save_backup(&backup, &backup_name), "save backup file", ErrorCode::SaveBackup);
print_backup(&backup);
},
Arguments::Restore{repo_path, backup_name, inode, dst_path} => {
let mut repo = open_repository(&repo_path);
let backup = get_backup(&repo, &backup_name);
if let Some(inode) = inode {
let inode = checked(repo.get_backup_inode(&backup, &inode), "load subpath inode");
checked(repo.restore_inode_tree(inode, &dst_path), "restore subpath");
Arguments::Restore{repo_path, backup_name, inode, dst_path, tar} => {
let mut repo = try!(open_repository(&repo_path));
let backup = try!(get_backup(&repo, &backup_name));
let inode = if let Some(inode) = inode {
checked!(repo.get_backup_inode(&backup, &inode), "load subpath inode", ErrorCode::LoadInode)
} else {
checked!(repo.get_inode(&backup.root), "load root inode", ErrorCode::LoadInode)
};
if tar {
checked!(repo.export_tarfile(inode, &dst_path), "restore backup", ErrorCode::RestoreRun);
} else {
checked(repo.restore_backup(&backup, &dst_path), "restore backup");
checked!(repo.restore_inode_tree(inode, &dst_path), "restore backup", ErrorCode::RestoreRun);
}
},
Arguments::Remove{repo_path, backup_name, inode} => {
let mut repo = open_repository(&repo_path);
let mut repo = try!(open_repository(&repo_path));
if let Some(inode) = inode {
let mut backup = get_backup(&repo, &backup_name);
checked(repo.remove_backup_path(&mut backup, inode), "remove backup subpath");
checked(repo.save_backup(&backup, &backup_name), "save backup file");
let mut backup = try!(get_backup(&repo, &backup_name));
checked!(repo.remove_backup_path(&mut backup, inode), "remove backup subpath", ErrorCode::RemoveRun);
checked!(repo.save_backup(&backup, &backup_name), "save backup file", ErrorCode::SaveBackup);
info!("The backup subpath has been deleted, run vacuum to reclaim space");
} else {
checked(repo.delete_backup(&backup_name), "delete backup");
checked!(repo.delete_backup(&backup_name), "delete backup", ErrorCode::RemoveRun);
info!("The backup has been deleted, run vacuum to reclaim space");
}
},
Arguments::Prune{repo_path, prefix, daily, weekly, monthly, yearly, force} => {
let repo = open_repository(&repo_path);
let repo = try!(open_repository(&repo_path));
if daily.is_none() && weekly.is_none() && monthly.is_none() && yearly.is_none() {
error!("This would remove all those backups");
exit(1);
return Err(ErrorCode::UnsafeArgs)
}
checked(repo.prune_backups(&prefix, daily, weekly, monthly, yearly, force), "prune backups");
checked!(repo.prune_backups(&prefix, daily, weekly, monthly, yearly, force), "prune backups", ErrorCode::PruneRun);
if !force {
info!("Run with --force to actually execute this command");
}
},
Arguments::Vacuum{repo_path, ratio, force} => {
let mut repo = open_repository(&repo_path);
let mut repo = try!(open_repository(&repo_path));
let info_before = repo.info();
checked(repo.vacuum(ratio, force), "vacuum");
checked!(repo.vacuum(ratio, force), "vacuum", ErrorCode::VacuumRun);
if !force {
info!("Run with --force to actually execute this command");
} else {
@ -336,29 +399,29 @@ pub fn run() {
}
},
Arguments::Check{repo_path, backup_name, inode, full} => {
let mut repo = open_repository(&repo_path);
let mut repo = try!(open_repository(&repo_path));
if let Some(backup_name) = backup_name {
let backup = get_backup(&repo, &backup_name);
let backup = try!(get_backup(&repo, &backup_name));
if let Some(inode) = inode {
let inode = checked(repo.get_backup_inode(&backup, inode), "load subpath inode");
checked(repo.check_inode(&inode), "check inode")
let inode = checked!(repo.get_backup_inode(&backup, inode), "load subpath inode", ErrorCode::LoadInode);
checked!(repo.check_inode(&inode), "check inode", ErrorCode::CheckRun)
} else {
checked(repo.check_backup(&backup), "check backup")
checked!(repo.check_backup(&backup), "check backup", ErrorCode::CheckRun)
}
} else {
checked(repo.check(full), "check repository")
checked!(repo.check(full), "check repository", ErrorCode::CheckRun)
}
info!("Integrity verified")
},
Arguments::List{repo_path, backup_name, inode} => {
let mut repo = open_repository(&repo_path);
let mut repo = try!(open_repository(&repo_path));
if let Some(backup_name) = backup_name {
let backup = get_backup(&repo, &backup_name);
let inode = checked(repo.get_backup_inode(&backup, inode.as_ref().map(|v| v as &str).unwrap_or("/")), "load subpath inode");
let backup = try!(get_backup(&repo, &backup_name));
let inode = checked!(repo.get_backup_inode(&backup, inode.as_ref().map(|v| v as &str).unwrap_or("/")), "load subpath inode", ErrorCode::LoadInode);
println!("{}", format_inode_one_line(&inode));
if let Some(children) = inode.children {
for chunks in children.values() {
let inode = checked(repo.get_inode(chunks), "load child inode");
let inode = checked!(repo.get_inode(chunks), "load child inode", ErrorCode::LoadInode);
println!("- {}", format_inode_one_line(&inode));
}
}
@ -371,18 +434,18 @@ pub fn run() {
},
Err(err) => {
error!("Failed to load backup files: {}", err);
exit(3)
return Err(ErrorCode::LoadBackup)
}
};
print_backups(&backup_map);
}
},
Arguments::Info{repo_path, backup_name, inode} => {
let mut repo = open_repository(&repo_path);
let mut repo = try!(open_repository(&repo_path));
if let Some(backup_name) = backup_name {
let backup = get_backup(&repo, &backup_name);
let backup = try!(get_backup(&repo, &backup_name));
if let Some(inode) = inode {
let inode = checked(repo.get_backup_inode(&backup, inode), "load subpath inode");
let inode = checked!(repo.get_backup_inode(&backup, inode), "load subpath inode", ErrorCode::LoadInode);
print_inode(&inode);
} else {
print_backup(&backup);
@ -392,45 +455,45 @@ pub fn run() {
}
},
Arguments::Mount{repo_path, backup_name, inode, mount_point} => {
let mut repo = open_repository(&repo_path);
let mut repo = try!(open_repository(&repo_path));
let fs = if let Some(backup_name) = backup_name {
let backup = get_backup(&repo, &backup_name);
let backup = try!(get_backup(&repo, &backup_name));
if let Some(inode) = inode {
let inode = checked(repo.get_backup_inode(&backup, inode), "load subpath inode");
checked(FuseFilesystem::from_inode(&mut repo, inode), "create fuse filesystem")
let inode = checked!(repo.get_backup_inode(&backup, inode), "load subpath inode", ErrorCode::LoadInode);
checked!(FuseFilesystem::from_inode(&mut repo, inode), "create fuse filesystem", ErrorCode::FuseMount)
} else {
checked(FuseFilesystem::from_backup(&mut repo, &backup), "create fuse filesystem")
checked!(FuseFilesystem::from_backup(&mut repo, &backup), "create fuse filesystem", ErrorCode::FuseMount)
}
} else {
checked(FuseFilesystem::from_repository(&mut repo), "create fuse filesystem")
checked!(FuseFilesystem::from_repository(&mut repo), "create fuse filesystem", ErrorCode::FuseMount)
};
checked(fs.mount(&mount_point), "mount filesystem");
checked!(fs.mount(&mount_point), "mount filesystem", ErrorCode::FuseMount);
},
Arguments::Analyze{repo_path} => {
let mut repo = open_repository(&repo_path);
print_analysis(&checked(repo.analyze_usage(), "analyze repository"));
let mut repo = try!(open_repository(&repo_path));
print_analysis(&checked!(repo.analyze_usage(), "analyze repository", ErrorCode::AnalyzeRun));
},
Arguments::BundleList{repo_path} => {
let repo = open_repository(&repo_path);
let repo = try!(open_repository(&repo_path));
for bundle in repo.list_bundles() {
print_bundle_one_line(bundle);
}
},
Arguments::BundleInfo{repo_path, bundle_id} => {
let repo = open_repository(&repo_path);
let repo = try!(open_repository(&repo_path));
if let Some(bundle) = repo.get_bundle(&bundle_id) {
print_bundle(bundle);
} else {
error!("No such bundle");
exit(3);
return Err(ErrorCode::LoadBundle)
}
},
Arguments::Import{repo_path, remote_path, key_files} => {
checked(Repository::import(repo_path, remote_path, key_files), "import repository");
checked!(Repository::import(repo_path, remote_path, key_files), "import repository", ErrorCode::ImportRun);
},
Arguments::Versions{repo_path, path} => {
let mut repo = open_repository(&repo_path);
for (name, mut inode) in checked(repo.find_versions(&path), "find versions") {
let mut repo = try!(open_repository(&repo_path));
for (name, mut inode) in checked!(repo.find_versions(&path), "find versions", ErrorCode::VersionsRun) {
inode.name = format!("{}::{}", name, &path);
println!("{}", format_inode_one_line(&inode));
}
@ -438,14 +501,14 @@ pub fn run() {
Arguments::Diff{repo_path_old, backup_name_old, inode_old, repo_path_new, backup_name_new, inode_new} => {
if repo_path_old != repo_path_new {
error!("Can only run diff on same repository");
exit(2)
return Err(ErrorCode::InvalidArgs)
}
let mut repo = open_repository(&repo_path_old);
let backup_old = get_backup(&repo, &backup_name_old);
let backup_new = get_backup(&repo, &backup_name_new);
let inode1 = checked(repo.get_backup_inode(&backup_old, inode_old.unwrap_or_else(|| "/".to_string())), "load subpath inode");
let inode2 = checked(repo.get_backup_inode(&backup_new, inode_new.unwrap_or_else(|| "/".to_string())), "load subpath inode");
let diffs = checked(repo.find_differences(&inode1, &inode2), "find differences");
let mut repo = try!(open_repository(&repo_path_old));
let backup_old = try!(get_backup(&repo, &backup_name_old));
let backup_new = try!(get_backup(&repo, &backup_name_new));
let inode1 = checked!(repo.get_backup_inode(&backup_old, inode_old.unwrap_or_else(|| "/".to_string())), "load subpath inode", ErrorCode::LoadInode);
let inode2 = checked!(repo.get_backup_inode(&backup_new, inode_new.unwrap_or_else(|| "/".to_string())), "load subpath inode", ErrorCode::LoadInode);
let diffs = checked!(repo.find_differences(&inode1, &inode2), "find differences", ErrorCode::DiffRun);
for diff in diffs {
println!("{} {:?}", match diff.0 {
DiffType::Add => "add",
@ -455,7 +518,7 @@ pub fn run() {
}
},
Arguments::Config{repo_path, bundle_size, chunker, compression, encryption, hash} => {
let mut repo = open_repository(&repo_path);
let mut repo = try!(open_repository(&repo_path));
if let Some(bundle_size) = bundle_size {
repo.config.bundle_size = bundle_size
}
@ -473,7 +536,7 @@ pub fn run() {
warn!("Changing the hash makes it impossible to use existing data for deduplication");
repo.config.hash = hash
}
checked(repo.save_config(), "save config");
checked!(repo.save_config(), "save config", ErrorCode::SaveConfig);
print_config(&repo.config);
},
Arguments::GenKey{file} => {
@ -481,27 +544,28 @@ pub fn run() {
println!("public: {}", to_hex(&public[..]));
println!("secret: {}", to_hex(&secret[..]));
if let Some(file) = file {
checked(Crypto::save_keypair_to_file(&public, &secret, file), "save key pair");
checked!(Crypto::save_keypair_to_file(&public, &secret, file), "save key pair", ErrorCode::SaveKey);
}
},
Arguments::AddKey{repo_path, set_default, file} => {
let mut repo = open_repository(&repo_path);
let mut repo = try!(open_repository(&repo_path));
let (public, secret) = if let Some(file) = file {
checked(Crypto::load_keypair_from_file(file), "load key pair")
checked!(Crypto::load_keypair_from_file(file), "load key pair", ErrorCode::LoadKey)
} else {
let (public, secret) = gen_keypair();
println!("public: {}", to_hex(&public[..]));
println!("secret: {}", to_hex(&secret[..]));
(public, secret)
};
checked(repo.register_key(public, secret), "add key pair");
checked!(repo.register_key(public, secret), "add key pair", ErrorCode::AddKey);
if set_default {
repo.set_encryption(Some(&public));
checked(repo.save_config(), "save config");
checked!(repo.save_config(), "save config", ErrorCode::SaveConfig);
}