Tar files, better return codes, etc.

pull/10/head
Dennis Schwerdel 2017-04-03 15:18:06 +02:00
parent 4145160660
commit 15a01dfbac
14 changed files with 632 additions and 245 deletions

21
Cargo.lock generated
View File

@ -25,6 +25,7 @@ dependencies = [
"serde_yaml 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)", "serde_yaml 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)",
"sodiumoxide 0.0.14 (registry+https://github.com/rust-lang/crates.io-index)", "sodiumoxide 0.0.14 (registry+https://github.com/rust-lang/crates.io-index)",
"squash-sys 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)", "squash-sys 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)",
"tar 0.4.11 (registry+https://github.com/rust-lang/crates.io-index)",
"time 0.1.36 (registry+https://github.com/rust-lang/crates.io-index)", "time 0.1.36 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -332,6 +333,16 @@ name = "strsim"
version = "0.6.0" version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "tar"
version = "0.4.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)",
"xattr 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]] [[package]]
name = "tempdir" name = "tempdir"
version = "0.3.5" version = "0.3.5"
@ -427,6 +438,14 @@ name = "winapi-build"
version = "0.1.1" version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "xattr"
version = "0.1.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]] [[package]]
name = "yaml-rust" name = "yaml-rust"
version = "0.3.5" version = "0.3.5"
@ -477,6 +496,7 @@ dependencies = [
"checksum sodiumoxide 0.0.14 (registry+https://github.com/rust-lang/crates.io-index)" = "bc02c0bc77ffed8e8eaef004399b825cf4fd8aa02d0af6e473225affd583ff4d" "checksum sodiumoxide 0.0.14 (registry+https://github.com/rust-lang/crates.io-index)" = "bc02c0bc77ffed8e8eaef004399b825cf4fd8aa02d0af6e473225affd583ff4d"
"checksum squash-sys 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "db1f9dde91d819b7746e153bc32489fa19e6a106c3d7f2b92187a4efbdc88b40" "checksum squash-sys 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "db1f9dde91d819b7746e153bc32489fa19e6a106c3d7f2b92187a4efbdc88b40"
"checksum strsim 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b4d15c810519a91cf877e7e36e63fe068815c678181439f2f29e2562147c3694" "checksum strsim 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b4d15c810519a91cf877e7e36e63fe068815c678181439f2f29e2562147c3694"
"checksum tar 0.4.11 (registry+https://github.com/rust-lang/crates.io-index)" = "c2374f318bbe2c5ac6c83dd6240d5f1a73106f72d39b3f7d6f8d8637c7b425d8"
"checksum tempdir 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "87974a6f5c1dfb344d733055601650059a3363de2a6104819293baff662132d6" "checksum tempdir 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "87974a6f5c1dfb344d733055601650059a3363de2a6104819293baff662132d6"
"checksum term_size 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "07b6c1ac5b3fffd75073276bca1ceed01f67a28537097a2a9539e116e50fb21a" "checksum term_size 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "07b6c1ac5b3fffd75073276bca1ceed01f67a28537097a2a9539e116e50fb21a"
"checksum thread-id 3.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4437c97558c70d129e40629a5b385b3fb1ffac301e63941335e4d354081ec14a" "checksum thread-id 3.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4437c97558c70d129e40629a5b385b3fb1ffac301e63941335e4d354081ec14a"
@ -491,4 +511,5 @@ dependencies = [
"checksum void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6a02e4885ed3bc0f2de90ea6dd45ebcbb66dacffe03547fadbb0eeae2770887d" "checksum void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6a02e4885ed3bc0f2de90ea6dd45ebcbb66dacffe03547fadbb0eeae2770887d"
"checksum winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "167dc9d6949a9b857f3451275e911c3f44255842c1f7a76f33c55103a909087a" "checksum winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "167dc9d6949a9b857f3451275e911c3f44255842c1f7a76f33c55103a909087a"
"checksum winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2d315eee3b34aca4797b2da6b13ed88266e6d612562a0c46390af8299fc699bc" "checksum winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2d315eee3b34aca4797b2da6b13ed88266e6d612562a0c46390af8299fc699bc"
"checksum xattr 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)" = "5f04de8a1346489a2f9e9bd8526b73d135ec554227b17568456e86aa35b6f3fc"
"checksum yaml-rust 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "e66366e18dc58b46801afbf2ca7661a9f59cc8c5962c29892b6039b4f86fa992" "checksum yaml-rust 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "e66366e18dc58b46801afbf2ca7661a9f59cc8c5962c29892b6039b4f86fa992"

View File

@ -26,6 +26,7 @@ regex = "0.2"
fuse = "0.3" fuse = "0.3"
lazy_static = "0.2" lazy_static = "0.2"
rand = "0.3" rand = "0.3"
tar = "0.4"
time = "*" time = "*"
libc = "*" libc = "*"

View File

@ -99,7 +99,6 @@ Recommended: Brotli/2-7
### Core functionality ### Core functionality
- Recompress & combine bundles - Recompress & combine bundles
- Allow to use tar files for backup and restore (--tar, http://alexcrichton.com/tar-rs/tar/index.html)
- File attributes - File attributes
- xattrs https://crates.io/crates/xattr - xattrs https://crates.io/crates/xattr

View File

@ -1,8 +1,6 @@
use ::prelude::*; use ::prelude::*;
use super::*; use super::*;
use std::process::exit;
pub enum Arguments { pub enum Arguments {
Init { Init {
@ -23,13 +21,15 @@ pub enum Arguments {
same_device: bool, same_device: bool,
excludes: Vec<String>, excludes: Vec<String>,
excludes_from: Option<String>, excludes_from: Option<String>,
no_default_excludes: bool no_default_excludes: bool,
tar: bool
}, },
Restore { Restore {
repo_path: String, repo_path: String,
backup_name: String, backup_name: String,
inode: Option<String>, inode: Option<String>,
dst_path: String dst_path: String,
tar: bool
}, },
Remove { Remove {
repo_path: String, repo_path: String,
@ -126,7 +126,7 @@ pub enum Arguments {
} }
pub fn parse_repo_path(repo_path: &str, backup_restr: Option<bool>, path_restr: Option<bool>) -> (&str, Option<&str>, Option<&str>) { pub fn parse_repo_path(repo_path: &str, backup_restr: Option<bool>, path_restr: Option<bool>) -> Result<(&str, Option<&str>, Option<&str>), ErrorCode> {
let mut parts = repo_path.splitn(3, "::"); let mut parts = repo_path.splitn(3, "::");
let mut repo = parts.next().unwrap_or(&DEFAULT_REPOSITORY); let mut repo = parts.next().unwrap_or(&DEFAULT_REPOSITORY);
if repo.is_empty() { if repo.is_empty() {
@ -146,93 +146,93 @@ pub fn parse_repo_path(repo_path: &str, backup_restr: Option<bool>, path_restr:
} }
if let Some(restr) = backup_restr { if let Some(restr) = backup_restr {
if !restr && backup.is_some() { if !restr && backup.is_some() {
println!("No backup may be given here"); error!("No backup may be given here");
exit(1); return Err(ErrorCode::InvalidArgs);
} }
if restr && backup.is_none() { if restr && backup.is_none() {
println!("A backup must be specified"); error!("A backup must be specified");
exit(1); return Err(ErrorCode::InvalidArgs);
} }
} }
if let Some(restr) = path_restr { if let Some(restr) = path_restr {
if !restr && path.is_some() { if !restr && path.is_some() {
println!("No subpath may be given here"); error!("No subpath may be given here");
exit(1); return Err(ErrorCode::InvalidArgs);
} }
if restr && path.is_none() { if restr && path.is_none() {
println!("A subpath must be specified"); error!("A subpath must be specified");
exit(1); return Err(ErrorCode::InvalidArgs);
} }
} }
(repo, backup, path) Ok((repo, backup, path))
} }
fn parse_num(num: &str, name: &str) -> u64 { fn parse_num(num: &str, name: &str) -> Result<u64, ErrorCode> {
if let Ok(num) = num.parse::<u64>() { if let Ok(num) = num.parse::<u64>() {
num Ok(num)
} else { } else {
error!("{} must be a number, was '{}'", name, num); error!("{} must be a number, was '{}'", name, num);
exit(1); Err(ErrorCode::InvalidArgs)
} }
} }
fn parse_chunker(val: &str) -> ChunkerType { fn parse_chunker(val: &str) -> Result<ChunkerType, ErrorCode> {
if let Ok(chunker) = ChunkerType::from_string(val) { if let Ok(chunker) = ChunkerType::from_string(val) {
chunker Ok(chunker)
} else { } else {
error!("Invalid chunker method/size: {}", val); error!("Invalid chunker method/size: {}", val);
exit(1); Err(ErrorCode::InvalidArgs)
} }
} }
fn parse_compression(val: &str) -> Option<Compression> { fn parse_compression(val: &str) -> Result<Option<Compression>, ErrorCode> {
if val == "none" { if val == "none" {
return None return Ok(None)
} }
if let Ok(compression) = Compression::from_string(val) { if let Ok(compression) = Compression::from_string(val) {
Some(compression) Ok(Some(compression))
} else { } else {
error!("Invalid compression method/level: {}", val); error!("Invalid compression method/level: {}", val);
exit(1); Err(ErrorCode::InvalidArgs)
} }
} }
fn parse_public_key(val: &str) -> PublicKey { fn parse_public_key(val: &str) -> Result<PublicKey, ErrorCode> {
let bytes = match parse_hex(val) { let bytes = match parse_hex(val) {
Ok(bytes) => bytes, Ok(bytes) => bytes,
Err(_) => { Err(_) => {
error!("Invalid key: {}", val); error!("Invalid key: {}", val);
exit(1); return Err(ErrorCode::InvalidArgs);
} }
}; };
if let Some(key) = PublicKey::from_slice(&bytes) { if let Some(key) = PublicKey::from_slice(&bytes) {
key Ok(key)
} else { } else {
error!("Invalid key: {}", val); error!("Invalid key: {}", val);
exit(1); Err(ErrorCode::InvalidArgs)
} }
} }
fn parse_hash(val: &str) -> HashMethod { fn parse_hash(val: &str) -> Result<HashMethod, ErrorCode> {
if let Ok(hash) = HashMethod::from(val) { if let Ok(hash) = HashMethod::from(val) {
hash Ok(hash)
} else { } else {
error!("Invalid hash method: {}", val); error!("Invalid hash method: {}", val);
exit(1); Err(ErrorCode::InvalidArgs)
} }
} }
fn parse_bundle_id(val: &str) -> BundleId { fn parse_bundle_id(val: &str) -> Result<BundleId, ErrorCode> {
if let Ok(hash) = Hash::from_string(val) { if let Ok(hash) = Hash::from_string(val) {
BundleId(hash) Ok(BundleId(hash))
} else { } else {
error!("Invalid bundle id: {}", val); error!("Invalid bundle id: {}", val);
exit(1); Err(ErrorCode::InvalidArgs)
} }
} }
#[allow(unknown_lints,cyclomatic_complexity)] #[allow(unknown_lints,cyclomatic_complexity)]
pub fn parse() -> Arguments { pub fn parse() -> Result<Arguments, ErrorCode> {
let args = clap_app!(zvault => let args = clap_app!(zvault =>
(version: crate_version!()) (version: crate_version!())
(author: crate_authors!(",\n")) (author: crate_authors!(",\n"))
@ -259,11 +259,13 @@ pub fn parse() -> Arguments {
(@arg exclude: --exclude -e ... +takes_value "exclude this path or file") (@arg exclude: --exclude -e ... +takes_value "exclude this path or file")
(@arg excludes_from: --excludesfrom +takes_value "read the list of exludes from this file") (@arg excludes_from: --excludesfrom +takes_value "read the list of exludes from this file")
(@arg no_default_excludes: --nodefaultexcludes "do not load the default excludes file") (@arg no_default_excludes: --nodefaultexcludes "do not load the default excludes file")
(@arg tar: --tar "the source is a tar file")
(@arg SRC: +required "source path to backup") (@arg SRC: +required "source path to backup")
(@arg BACKUP: +required "repository::backup path") (@arg BACKUP: +required "repository::backup path")
) )
(@subcommand restore => (@subcommand restore =>
(about: "restores a backup (or subpath)") (about: "restores a backup (or subpath)")
(@arg tar: --tar "restore in form of a tar file")
(@arg BACKUP: +required "repository::backup[::subpath] path") (@arg BACKUP: +required "repository::backup[::subpath] path")
(@arg DST: +required "destination path for backup") (@arg DST: +required "destination path for backup")
) )
@ -365,20 +367,20 @@ pub fn parse() -> Arguments {
) )
).get_matches(); ).get_matches();
if let Some(args) = args.subcommand_matches("init") { if let Some(args) = args.subcommand_matches("init") {
let (repository, _backup, _inode) = parse_repo_path(args.value_of("REPO").unwrap_or(""), Some(false), Some(false)); let (repository, _backup, _inode) = try!(parse_repo_path(args.value_of("REPO").unwrap_or(""), Some(false), Some(false)));
return Arguments::Init { return Ok(Arguments::Init {
bundle_size: (parse_num(args.value_of("bundle_size").unwrap_or(&DEFAULT_BUNDLE_SIZE.to_string()), "Bundle size") * 1024 * 1024) as usize, bundle_size: (try!(parse_num(args.value_of("bundle_size").unwrap_or(&DEFAULT_BUNDLE_SIZE.to_string()), "Bundle size")) * 1024 * 1024) as usize,
chunker: parse_chunker(args.value_of("chunker").unwrap_or(DEFAULT_CHUNKER)), chunker: try!(parse_chunker(args.value_of("chunker").unwrap_or(DEFAULT_CHUNKER))),
compression: parse_compression(args.value_of("compression").unwrap_or(DEFAULT_COMPRESSION)), compression: try!(parse_compression(args.value_of("compression").unwrap_or(DEFAULT_COMPRESSION))),
encryption: args.is_present("encryption"), encryption: args.is_present("encryption"),
hash: parse_hash(args.value_of("hash").unwrap_or(DEFAULT_HASH)), hash: try!(parse_hash(args.value_of("hash").unwrap_or(DEFAULT_HASH))),
repo_path: repository.to_string(), repo_path: repository.to_string(),
remote_path: args.value_of("remote").unwrap().to_string() remote_path: args.value_of("remote").unwrap().to_string()
} })
} }
if let Some(args) = args.subcommand_matches("backup") { if let Some(args) = args.subcommand_matches("backup") {
let (repository, backup, _inode) = parse_repo_path(args.value_of("BACKUP").unwrap(), Some(true), Some(false)); let (repository, backup, _inode) = try!(parse_repo_path(args.value_of("BACKUP").unwrap(), Some(true), Some(false)));
return Arguments::Backup { return Ok(Arguments::Backup {
repo_path: repository.to_string(), repo_path: repository.to_string(),
backup_name: backup.unwrap().to_string(), backup_name: backup.unwrap().to_string(),
full: args.is_present("full"), full: args.is_present("full"),
@ -387,175 +389,199 @@ pub fn parse() -> Arguments {
excludes_from: args.value_of("excludes_from").map(|v| v.to_string()), excludes_from: args.value_of("excludes_from").map(|v| v.to_string()),
src_path: args.value_of("SRC").unwrap().to_string(), src_path: args.value_of("SRC").unwrap().to_string(),
reference: args.value_of("reference").map(|v| v.to_string()), reference: args.value_of("reference").map(|v| v.to_string()),
no_default_excludes: args.is_present("no_default_excludes") no_default_excludes: args.is_present("no_default_excludes"),
} tar: args.is_present("tar")
})
} }
if let Some(args) = args.subcommand_matches("restore") { if let Some(args) = args.subcommand_matches("restore") {
let (repository, backup, inode) = parse_repo_path(args.value_of("BACKUP").unwrap(), Some(true), None); let (repository, backup, inode) = try!(parse_repo_path(args.value_of("BACKUP").unwrap(), Some(true), None));
return Arguments::Restore { return Ok(Arguments::Restore {
repo_path: repository.to_string(), repo_path: repository.to_string(),
backup_name: backup.unwrap().to_string(), backup_name: backup.unwrap().to_string(),
inode: inode.map(|v| v.to_string()), inode: inode.map(|v| v.to_string()),
dst_path: args.value_of("DST").unwrap().to_string() dst_path: args.value_of("DST").unwrap().to_string(),
} tar: args.is_present("tar")
})
} }
if let Some(args) = args.subcommand_matches("remove") { if let Some(args) = args.subcommand_matches("remove") {
let (repository, backup, inode) = parse_repo_path(args.value_of("BACKUP").unwrap(), Some(true), None); let (repository, backup, inode) = try!(parse_repo_path(args.value_of("BACKUP").unwrap(), Some(true), None));
return Arguments::Remove { return Ok(Arguments::Remove {
repo_path: repository.to_string(), repo_path: repository.to_string(),
backup_name: backup.unwrap().to_string(), backup_name: backup.unwrap().to_string(),
inode: inode.map(|v| v.to_string()) inode: inode.map(|v| v.to_string())
} })
} }
if let Some(args) = args.subcommand_matches("prune") { if let Some(args) = args.subcommand_matches("prune") {
let (repository, _backup, _inode) = parse_repo_path(args.value_of("REPO").unwrap_or(""), Some(false), Some(false)); let (repository, _backup, _inode) = try!(parse_repo_path(args.value_of("REPO").unwrap_or(""), Some(false), Some(false)));
return Arguments::Prune { return Ok(Arguments::Prune {
repo_path: repository.to_string(), repo_path: repository.to_string(),
prefix: args.value_of("prefix").unwrap_or("").to_string(), prefix: args.value_of("prefix").unwrap_or("").to_string(),
force: args.is_present("force"), force: args.is_present("force"),
daily: args.value_of("daily").map(|v| parse_num(v, "daily backups") as usize), daily: match args.value_of("daily") {
weekly: args.value_of("weekly").map(|v| parse_num(v, "weekly backups") as usize), None => None,
monthly: args.value_of("monthly").map(|v| parse_num(v, "monthly backups") as usize), Some(v) => Some(try!(parse_num(v, "daily backups")) as usize)
yearly: args.value_of("yearly").map(|v| parse_num(v, "yearly backups") as usize), },
} weekly: match args.value_of("weekly") {
None => None,
Some(v) => Some(try!(parse_num(v, "weekly backups")) as usize)
},
monthly: match args.value_of("monthly") {
None => None,
Some(v) => Some(try!(parse_num(v, "monthly backups")) as usize)
},
yearly: match args.value_of("yearly") {
None => None,
Some(v) => Some(try!(parse_num(v, "yearly backups")) as usize)
}
})
} }
if let Some(args) = args.subcommand_matches("vacuum") { if let Some(args) = args.subcommand_matches("vacuum") {
let (repository, _backup, _inode) = parse_repo_path(args.value_of("REPO").unwrap_or(""), Some(false), Some(false)); let (repository, _backup, _inode) = try!(parse_repo_path(args.value_of("REPO").unwrap_or(""), Some(false), Some(false)));
return Arguments::Vacuum { return Ok(Arguments::Vacuum {
repo_path: repository.to_string(), repo_path: repository.to_string(),
force: args.is_present("force"), force: args.is_present("force"),
ratio: parse_num(args.value_of("ratio").unwrap_or(&DEFAULT_VACUUM_RATIO.to_string()), "ratio") as f32 / 100.0 ratio: try!(parse_num(args.value_of("ratio").unwrap_or(&DEFAULT_VACUUM_RATIO.to_string()), "ratio")) as f32 / 100.0
} })
} }
if let Some(args) = args.subcommand_matches("check") { if let Some(args) = args.subcommand_matches("check") {
let (repository, backup, inode) = parse_repo_path(args.value_of("PATH").unwrap_or(""), None, None); let (repository, backup, inode) = try!(parse_repo_path(args.value_of("PATH").unwrap_or(""), None, None));
return Arguments::Check { return Ok(Arguments::Check {
repo_path: repository.to_string(), repo_path: repository.to_string(),
backup_name: backup.map(|v| v.to_string()), backup_name: backup.map(|v| v.to_string()),
inode: inode.map(|v| v.to_string()), inode: inode.map(|v| v.to_string()),
full: args.is_present("full") full: args.is_present("full")
} })
} }
if let Some(args) = args.subcommand_matches("list") { if let Some(args) = args.subcommand_matches("list") {
let (repository, backup, inode) = parse_repo_path(args.value_of("PATH").unwrap_or(""), None, None); let (repository, backup, inode) = try!(parse_repo_path(args.value_of("PATH").unwrap_or(""), None, None));
return Arguments::List { return Ok(Arguments::List {
repo_path: repository.to_string(), repo_path: repository.to_string(),
backup_name: backup.map(|v| v.to_string()), backup_name: backup.map(|v| v.to_string()),
inode: inode.map(|v| v.to_string()) inode: inode.map(|v| v.to_string())
} })
} }
if let Some(args) = args.subcommand_matches("bundlelist") { if let Some(args) = args.subcommand_matches("bundlelist") {
let (repository, _backup, _inode) = parse_repo_path(args.value_of("REPO").unwrap_or(""), Some(false), Some(false)); let (repository, _backup, _inode) = try!(parse_repo_path(args.value_of("REPO").unwrap_or(""), Some(false), Some(false)));
return Arguments::BundleList { return Ok(Arguments::BundleList {
repo_path: repository.to_string(), repo_path: repository.to_string(),
} })
} }
if let Some(args) = args.subcommand_matches("bundleinfo") { if let Some(args) = args.subcommand_matches("bundleinfo") {
let (repository, _backup, _inode) = parse_repo_path(args.value_of("REPO").unwrap_or(""), Some(false), Some(false)); let (repository, _backup, _inode) = try!(parse_repo_path(args.value_of("REPO").unwrap_or(""), Some(false), Some(false)));
return Arguments::BundleInfo { return Ok(Arguments::BundleInfo {
repo_path: repository.to_string(), repo_path: repository.to_string(),
bundle_id: parse_bundle_id(args.value_of("BUNDLE").unwrap()) bundle_id: try!(parse_bundle_id(args.value_of("BUNDLE").unwrap()))
} })
} }
if let Some(args) = args.subcommand_matches("info") { if let Some(args) = args.subcommand_matches("info") {
let (repository, backup, inode) = parse_repo_path(args.value_of("PATH").unwrap_or(""), None, None); let (repository, backup, inode) = try!(parse_repo_path(args.value_of("PATH").unwrap_or(""), None, None));
return Arguments::Info { return Ok(Arguments::Info {
repo_path: repository.to_string(), repo_path: repository.to_string(),
backup_name: backup.map(|v| v.to_string()), backup_name: backup.map(|v| v.to_string()),
inode: inode.map(|v| v.to_string()) inode: inode.map(|v| v.to_string())
} })
} }
if let Some(args) = args.subcommand_matches("mount") { if let Some(args) = args.subcommand_matches("mount") {
let (repository, backup, inode) = parse_repo_path(args.value_of("PATH").unwrap_or(""), None, None); let (repository, backup, inode) = try!(parse_repo_path(args.value_of("PATH").unwrap_or(""), None, None));
return Arguments::Mount { return Ok(Arguments::Mount {
repo_path: repository.to_string(), repo_path: repository.to_string(),
backup_name: backup.map(|v| v.to_string()), backup_name: backup.map(|v| v.to_string()),
inode: inode.map(|v| v.to_string()), inode: inode.map(|v| v.to_string()),
mount_point: args.value_of("MOUNTPOINT").unwrap().to_string() mount_point: args.value_of("MOUNTPOINT").unwrap().to_string()
} })
} }
if let Some(args) = args.subcommand_matches("versions") { if let Some(args) = args.subcommand_matches("versions") {
let (repository, _backup, _inode) = parse_repo_path(args.value_of("REPO").unwrap_or(""), Some(false), Some(false)); let (repository, _backup, _inode) = try!(parse_repo_path(args.value_of("REPO").unwrap_or(""), Some(false), Some(false)));
return Arguments::Versions { return Ok(Arguments::Versions {
repo_path: repository.to_string(), repo_path: repository.to_string(),
path: args.value_of("PATH").unwrap().to_string() path: args.value_of("PATH").unwrap().to_string()
} })
} }
if let Some(args) = args.subcommand_matches("diff") { if let Some(args) = args.subcommand_matches("diff") {
let (repository_old, backup_old, inode_old) = parse_repo_path(args.value_of("OLD").unwrap(), Some(true), None); let (repository_old, backup_old, inode_old) = try!(parse_repo_path(args.value_of("OLD").unwrap(), Some(true), None));
let (repository_new, backup_new, inode_new) = parse_repo_path(args.value_of("NEW").unwrap(), Some(true), None); let (repository_new, backup_new, inode_new) = try!(parse_repo_path(args.value_of("NEW").unwrap(), Some(true), None));
return Arguments::Diff { return Ok(Arguments::Diff {
repo_path_old: repository_old.to_string(), repo_path_old: repository_old.to_string(),
backup_name_old: backup_old.unwrap().to_string(), backup_name_old: backup_old.unwrap().to_string(),
inode_old: inode_old.map(|v| v.to_string()), inode_old: inode_old.map(|v| v.to_string()),
repo_path_new: repository_new.to_string(), repo_path_new: repository_new.to_string(),
backup_name_new: backup_new.unwrap().to_string(), backup_name_new: backup_new.unwrap().to_string(),
inode_new: inode_new.map(|v| v.to_string()), inode_new: inode_new.map(|v| v.to_string()),
} })
} }
if let Some(args) = args.subcommand_matches("analyze") { if let Some(args) = args.subcommand_matches("analyze") {
let (repository, _backup, _inode) = parse_repo_path(args.value_of("REPO").unwrap_or(""), Some(false), Some(false)); let (repository, _backup, _inode) = try!(parse_repo_path(args.value_of("REPO").unwrap_or(""), Some(false), Some(false)));
return Arguments::Analyze { return Ok(Arguments::Analyze {
repo_path: repository.to_string() repo_path: repository.to_string()
} })
} }
if let Some(args) = args.subcommand_matches("import") { if let Some(args) = args.subcommand_matches("import") {
let (repository, _backup, _inode) = parse_repo_path(args.value_of("REPO").unwrap_or(""), Some(false), Some(false)); let (repository, _backup, _inode) = try!(parse_repo_path(args.value_of("REPO").unwrap_or(""), Some(false), Some(false)));
return Arguments::Import { return Ok(Arguments::Import {
repo_path: repository.to_string(), repo_path: repository.to_string(),
remote_path: args.value_of("REMOTE").unwrap().to_string(), remote_path: args.value_of("REMOTE").unwrap().to_string(),
key_files: args.values_of("key").map(|v| v.map(|k| k.to_string()).collect()).unwrap_or_else(|| vec![]) key_files: args.values_of("key").map(|v| v.map(|k| k.to_string()).collect()).unwrap_or_else(|| vec![])
} })
} }
if let Some(args) = args.subcommand_matches("config") { if let Some(args) = args.subcommand_matches("config") {
let (repository, _backup, _inode) = parse_repo_path(args.value_of("REPO").unwrap_or(""), Some(false), Some(false)); let (repository, _backup, _inode) = try!(parse_repo_path(args.value_of("REPO").unwrap_or(""), Some(false), Some(false)));
return Arguments::Config { return Ok(Arguments::Config {
bundle_size: args.value_of("bundle_size").map(|v| (parse_num(v, "Bundle size") * 1024 * 1024) as usize), bundle_size: match args.value_of("bundle_size") {
chunker: args.value_of("chunker").map(|v| parse_chunker(v)), None => None,
compression: args.value_of("compression").map(|v| parse_compression(v)), Some(v) => Some((try!(parse_num(v, "Bundle size")) * 1024 * 1024) as usize)
encryption: args.value_of("encryption").map(|v| { },
if v == "none" { chunker: match args.value_of("chunker") {
None None => None,
} else { Some(v) => Some(try!(parse_chunker(v)))
Some(parse_public_key(v)) },
} compression: match args.value_of("compression") {
}), None => None,
hash: args.value_of("hash").map(|v| parse_hash(v)), Some(v) => Some(try!(parse_compression(v)))
},
encryption: match args.value_of("encryption") {
None => None,
Some("none") => Some(None),
Some(v) => Some(Some(try!(parse_public_key(v))))
},
hash: match args.value_of("hash") {
None => None,
Some(v) => Some(try!(parse_hash(v)))
},
repo_path: repository.to_string(), repo_path: repository.to_string(),
} })
} }
if let Some(args) = args.subcommand_matches("genkey") { if let Some(args) = args.subcommand_matches("genkey") {
return Arguments::GenKey { return Ok(Arguments::GenKey {
file: args.value_of("FILE").map(|v| v.to_string()) file: args.value_of("FILE").map(|v| v.to_string())
} })
} }
if let Some(args) = args.subcommand_matches("addkey") { if let Some(args) = args.subcommand_matches("addkey") {
let (repository, _backup, _inode) = parse_repo_path(args.value_of("REPO").unwrap_or(""), Some(false), Some(false)); let (repository, _backup, _inode) = try!(parse_repo_path(args.value_of("REPO").unwrap_or(""), Some(false), Some(false)));
let generate = args.is_present("generate"); let generate = args.is_present("generate");
if !generate && !args.is_present("FILE") { if !generate && !args.is_present("FILE") {
println!("Without --generate, a file containing the key pair must be given"); println!("Without --generate, a file containing the key pair must be given");
exit(1); return Err(ErrorCode::InvalidArgs)
} }
if generate && args.is_present("FILE") { if generate && args.is_present("FILE") {
println!("With --generate, no file may be given"); println!("With --generate, no file may be given");
exit(1); return Err(ErrorCode::InvalidArgs)
} }
return Arguments::AddKey { return Ok(Arguments::AddKey {
repo_path: repository.to_string(), repo_path: repository.to_string(),
set_default: args.is_present("set_default"), set_default: args.is_present("set_default"),
file: args.value_of("FILE").map(|v| v.to_string()) file: args.value_of("FILE").map(|v| v.to_string())
} })
} }
if let Some(args) = args.subcommand_matches("algotest") { if let Some(args) = args.subcommand_matches("algotest") {
return Arguments::AlgoTest { return Ok(Arguments::AlgoTest {
bundle_size: (parse_num(args.value_of("bundle_size").unwrap_or(&DEFAULT_BUNDLE_SIZE.to_string()), "Bundle size") * 1024 * 1024) as usize, bundle_size: (try!(parse_num(args.value_of("bundle_size").unwrap_or(&DEFAULT_BUNDLE_SIZE.to_string()), "Bundle size")) * 1024 * 1024) as usize,
chunker: parse_chunker(args.value_of("chunker").unwrap_or(DEFAULT_CHUNKER)), chunker: try!(parse_chunker(args.value_of("chunker").unwrap_or(DEFAULT_CHUNKER))),
compression: parse_compression(args.value_of("compression").unwrap_or(DEFAULT_COMPRESSION)), compression: try!(parse_compression(args.value_of("compression").unwrap_or(DEFAULT_COMPRESSION))),
encrypt: args.is_present("encrypt"), encrypt: args.is_present("encrypt"),
hash: parse_hash(args.value_of("hash").unwrap_or(DEFAULT_HASH)), hash: try!(parse_hash(args.value_of("hash").unwrap_or(DEFAULT_HASH))),
file: args.value_of("FILE").unwrap().to_string(), file: args.value_of("FILE").unwrap().to_string(),
} })
} }
error!("No subcommand given"); error!("No subcommand given");
exit(1); Err(ErrorCode::InvalidArgs)
} }

View File

@ -7,9 +7,7 @@ use ::prelude::*;
use chrono::prelude::*; use chrono::prelude::*;
use regex::{self, RegexSet}; use regex::{self, RegexSet};
use std::process::exit;
use std::collections::HashMap; use std::collections::HashMap;
use std::fmt::Display;
use std::io::{BufReader, BufRead}; use std::io::{BufReader, BufRead};
use std::fs::File; use std::fs::File;
use std::env; use std::env;
@ -17,6 +15,56 @@ use std::env;
use self::args::Arguments; use self::args::Arguments;
pub enum ErrorCode {
UnsafeArgs, InvalidArgs,
InitializeLogger,
CreateRepository,
LoadRepository, SaveBackup, LoadBackup, LoadInode, LoadBundle,
AddKey, LoadKey, SaveKey,
SaveConfig,
LoadExcludes, InvalidExcludes,
BackupRun, RestoreRun, RemoveRun, PruneRun, VacuumRun, CheckRun, AnalyzeRun, DiffRun,
VersionsRun, ImportRun, FuseMount
}
impl ErrorCode {
pub fn code(&self) -> i32 {
match *self {
// Crazy stuff
ErrorCode::InitializeLogger => -1,
ErrorCode::InvalidExcludes => -1,
// Arguments
ErrorCode::InvalidArgs => 1,
ErrorCode::UnsafeArgs => 2,
// Load things
ErrorCode::LoadRepository => 3,
ErrorCode::LoadBackup => 4,
ErrorCode::LoadInode => 5,
ErrorCode::LoadBundle => 6,
ErrorCode::LoadKey => 7,
ErrorCode::LoadExcludes => 8,
// Minor operations
ErrorCode::SaveBackup => 9,
ErrorCode::AddKey => 10,
ErrorCode::SaveKey => 11,
ErrorCode::SaveConfig => 12,
// Main operation
ErrorCode::CreateRepository => 13,
ErrorCode::BackupRun => 14,
ErrorCode::RestoreRun => 15,
ErrorCode::RemoveRun => 16,
ErrorCode::PruneRun => 17,
ErrorCode::VacuumRun => 18,
ErrorCode::CheckRun => 19,
ErrorCode::AnalyzeRun => 20,
ErrorCode::DiffRun => 21,
ErrorCode::VersionsRun => 22,
ErrorCode::ImportRun => 23,
ErrorCode::FuseMount => 24,
}
}
}
pub const DEFAULT_CHUNKER: &'static str = "fastcdc/16"; pub const DEFAULT_CHUNKER: &'static str = "fastcdc/16";
pub const DEFAULT_HASH: &'static str = "blake2"; pub const DEFAULT_HASH: &'static str = "blake2";
pub const DEFAULT_COMPRESSION: &'static str = "brotli/3"; pub const DEFAULT_COMPRESSION: &'static str = "brotli/3";
@ -28,30 +76,31 @@ lazy_static! {
}; };
} }
macro_rules! checked {
fn checked<T, E: Display>(result: Result<T, E>, msg: &'static str) -> T { ($expr:expr, $msg:expr, $code:expr) => {
match result { match $expr {
Ok(val) => val, Ok(val) => val,
Err(err) => { Err(err) => {
error!("Failed to {}\n\tcaused by: {}", msg, err); error!("Failed to {}\n\tcaused by: {}", $msg, err);
exit(3); return Err($code)
}
} }
} };
} }
fn open_repository(path: &str) -> Repository { fn open_repository(path: &str) -> Result<Repository, ErrorCode> {
checked(Repository::open(path), "load repository") Ok(checked!(Repository::open(path), "load repository", ErrorCode::LoadRepository))
} }
fn get_backup(repo: &Repository, backup_name: &str) -> Backup { fn get_backup(repo: &Repository, backup_name: &str) -> Result<Backup, ErrorCode> {
checked(repo.get_backup(backup_name), "load backup") Ok(checked!(repo.get_backup(backup_name), "load backup", ErrorCode::LoadBackup))
} }
fn find_reference_backup(repo: &Repository, path: &str) -> Option<(String, Backup)> { fn find_reference_backup(repo: &Repository, path: &str) -> Result<Option<(String, Backup)>, ErrorCode> {
let mut matching = Vec::new(); let mut matching = Vec::new();
let hostname = match get_hostname() { let hostname = match get_hostname() {
Ok(hostname) => hostname, Ok(hostname) => hostname,
Err(_) => return None Err(_) => return Ok(None)
}; };
let backup_map = match repo.get_backups() { let backup_map = match repo.get_backups() {
Ok(backup_map) => backup_map, Ok(backup_map) => backup_map,
@ -61,7 +110,7 @@ fn find_reference_backup(repo: &Repository, path: &str) -> Option<(String, Backu
}, },
Err(err) => { Err(err) => {
error!("Failed to load backup files: {}", err); error!("Failed to load backup files: {}", err);
exit(3) return Err(ErrorCode::LoadBackup)
} }
}; };
for (name, backup) in backup_map { for (name, backup) in backup_map {
@ -70,11 +119,13 @@ fn find_reference_backup(repo: &Repository, path: &str) -> Option<(String, Backu
} }
} }
matching.sort_by_key(|&(_, ref b)| b.date); matching.sort_by_key(|&(_, ref b)| b.date);
matching.pop() Ok(matching.pop())
} }
fn print_backup(backup: &Backup) { fn print_backup(backup: &Backup) {
println!("Modified: {}", backup.modified);
println!("Date: {}", Local.timestamp(backup.date, 0).to_rfc2822()); println!("Date: {}", Local.timestamp(backup.date, 0).to_rfc2822());
println!("Source: {}:{}", backup.host, backup.path);
println!("Duration: {}", to_duration(backup.duration)); println!("Duration: {}", to_duration(backup.duration));
println!("Entries: {} files, {} dirs", backup.file_count, backup.dir_count); println!("Entries: {} files, {} dirs", backup.file_count, backup.dir_count);
println!("Total backup size: {}", to_file_size(backup.total_data_size)); println!("Total backup size: {}", to_file_size(backup.total_data_size));
@ -88,9 +139,9 @@ fn print_backup(backup: &Backup) {
pub fn format_inode_one_line(inode: &Inode) -> String { pub fn format_inode_one_line(inode: &Inode) -> String {
match inode.file_type { match inode.file_type {
FileType::Directory => format!("{:25}\t{} entries", format!("{}/", inode.name), inode.children.as_ref().unwrap().len()), FileType::Directory => format!("{:25}\t{} entries", format!("{}/", inode.name), inode.children.as_ref().map(|c| c.len()).unwrap_or(0)),
FileType::File => format!("{:25}\t{:>10}\t{}", inode.name, to_file_size(inode.size), Local.timestamp(inode.timestamp, 0).to_rfc2822()), FileType::File => format!("{:25}\t{:>10}\t{}", inode.name, to_file_size(inode.size), Local.timestamp(inode.timestamp, 0).to_rfc2822()),
FileType::Symlink => format!("{:25}\t -> {}", inode.name, inode.symlink_target.as_ref().unwrap()), FileType::Symlink => format!("{:25}\t -> {}", inode.name, inode.symlink_target.as_ref().map(|s| s as &str).unwrap_or("?")),
} }
} }
@ -203,41 +254,44 @@ fn print_analysis(analysis: &HashMap<u32, BundleAnalysis>) {
#[allow(unknown_lints,cyclomatic_complexity)] #[allow(unknown_lints,cyclomatic_complexity)]
pub fn run() { pub fn run() -> Result<(), ErrorCode> {
if let Err(err) = logger::init() { if let Err(err) = logger::init() {
println!("Failed to initialize the logger: {}", err); println!("Failed to initialize the logger: {}", err);
exit(-1) return Err(ErrorCode::InitializeLogger)
} }
match args::parse() { match try!(args::parse()) {
Arguments::Init{repo_path, bundle_size, chunker, compression, encryption, hash, remote_path} => { Arguments::Init{repo_path, bundle_size, chunker, compression, encryption, hash, remote_path} => {
let mut repo = checked(Repository::create(repo_path, Config { let mut repo = checked!(Repository::create(repo_path, Config {
bundle_size: bundle_size, bundle_size: bundle_size,
chunker: chunker, chunker: chunker,
compression: compression, compression: compression,
encryption: None, encryption: None,
hash: hash hash: hash
}, remote_path), "create repository"); }, remote_path), "create repository", ErrorCode::CreateRepository);
if encryption { if encryption {
let (public, secret) = gen_keypair(); let (public, secret) = gen_keypair();
println!("public: {}", to_hex(&public[..])); println!("public: {}", to_hex(&public[..]));
println!("secret: {}", to_hex(&secret[..])); println!("secret: {}", to_hex(&secret[..]));
repo.set_encryption(Some(&public)); repo.set_encryption(Some(&public));
checked(repo.register_key(public, secret), "add key"); checked!(repo.register_key(public, secret), "add key", ErrorCode::AddKey);
checked(repo.save_config(), "save config"); checked!(repo.save_config(), "save config", ErrorCode::SaveConfig);
println!(); println!();
} }
print_config(&repo.config); print_config(&repo.config);
}, },
Arguments::Backup{repo_path, backup_name, src_path, full, reference, same_device, mut excludes, excludes_from, no_default_excludes} => { Arguments::Backup{repo_path, backup_name, src_path, full, reference, same_device, mut excludes, excludes_from, no_default_excludes, tar} => {
let mut repo = open_repository(&repo_path); let mut repo = try!(open_repository(&repo_path));
let mut reference_backup = None; let mut reference_backup = None;
if !full { if !full && !tar {
reference_backup = reference.map(|r| { reference_backup = match reference {
let b = get_backup(&repo, &r); Some(r) => {
(r, b) let b = try!(get_backup(&repo, &r));
}); Some((r, b))
},
None => None
};
if reference_backup.is_none() { if reference_backup.is_none() {
reference_backup = find_reference_backup(&repo, &src_path); reference_backup = try!(find_reference_backup(&repo, &src_path));
} }
if let Some(&(ref name, _)) = reference_backup.as_ref() { if let Some(&(ref name, _)) = reference_backup.as_ref() {
info!("Using backup {} as reference", name); info!("Using backup {} as reference", name);
@ -246,14 +300,14 @@ pub fn run() {
} }
} }
let reference_backup = reference_backup.map(|(_, backup)| backup); let reference_backup = reference_backup.map(|(_, backup)| backup);
if !no_default_excludes { if !no_default_excludes && !tar {
for line in BufReader::new(checked(File::open(&repo.excludes_path), "open default excludes file")).lines() { for line in BufReader::new(checked!(File::open(&repo.excludes_path), "open default excludes file", ErrorCode::LoadExcludes)).lines() {
excludes.push(checked(line, "read default excludes file")); excludes.push(checked!(line, "read default excludes file", ErrorCode::LoadExcludes));
} }
} }
if let Some(excludes_from) = excludes_from { if let Some(excludes_from) = excludes_from {
for line in BufReader::new(checked(File::open(excludes_from), "open excludes file")).lines() { for line in BufReader::new(checked!(File::open(excludes_from), "open excludes file", ErrorCode::LoadExcludes)).lines() {
excludes.push(checked(line, "read excludes file")); excludes.push(checked!(line, "read excludes file", ErrorCode::LoadExcludes));
} }
} }
let mut excludes_parsed = Vec::with_capacity(excludes.len()); let mut excludes_parsed = Vec::with_capacity(excludes.len());
@ -271,13 +325,18 @@ pub fn run() {
let excludes = if excludes_parsed.is_empty() { let excludes = if excludes_parsed.is_empty() {
None None
} else { } else {
Some(checked(RegexSet::new(excludes_parsed), "parse exclude patterns")) Some(checked!(RegexSet::new(excludes_parsed), "parse exclude patterns", ErrorCode::InvalidExcludes))
}; };
let options = BackupOptions { let options = BackupOptions {
same_device: same_device, same_device: same_device,
excludes: excludes excludes: excludes
}; };
let backup = match repo.create_backup_recursively(&src_path, reference_backup.as_ref(), &options) { let result = if tar {
repo.import_tarfile(&src_path)
} else {
repo.create_backup_recursively(&src_path, reference_backup.as_ref(), &options)
};
let backup = match result {
Ok(backup) => backup, Ok(backup) => backup,
Err(RepositoryError::Backup(BackupError::FailedPaths(backup, _failed_paths))) => { Err(RepositoryError::Backup(BackupError::FailedPaths(backup, _failed_paths))) => {
warn!("Some files are missing from the backup"); warn!("Some files are missing from the backup");
@ -285,49 +344,53 @@ pub fn run() {
}, },
Err(err) => { Err(err) => {
error!("Backup failed: {}", err); error!("Backup failed: {}", err);
exit(3) return Err(ErrorCode::BackupRun)
} }
}; };
checked(repo.save_backup(&backup, &backup_name), "save backup file"); checked!(repo.save_backup(&backup, &backup_name), "save backup file", ErrorCode::SaveBackup);
print_backup(&backup); print_backup(&backup);
}, },
Arguments::Restore{repo_path, backup_name, inode, dst_path} => { Arguments::Restore{repo_path, backup_name, inode, dst_path, tar} => {
let mut repo = open_repository(&repo_path); let mut repo = try!(open_repository(&repo_path));
let backup = get_backup(&repo, &backup_name); let backup = try!(get_backup(&repo, &backup_name));
if let Some(inode) = inode { let inode = if let Some(inode) = inode {
let inode = checked(repo.get_backup_inode(&backup, &inode), "load subpath inode"); checked!(repo.get_backup_inode(&backup, &inode), "load subpath inode", ErrorCode::LoadInode)
checked(repo.restore_inode_tree(inode, &dst_path), "restore subpath");
} else { } else {
checked(repo.restore_backup(&backup, &dst_path), "restore backup"); checked!(repo.get_inode(&backup.root), "load root inode", ErrorCode::LoadInode)
};
if tar {
checked!(repo.export_tarfile(inode, &dst_path), "restore backup", ErrorCode::RestoreRun);
} else {
checked!(repo.restore_inode_tree(inode, &dst_path), "restore backup", ErrorCode::RestoreRun);
} }
}, },
Arguments::Remove{repo_path, backup_name, inode} => { Arguments::Remove{repo_path, backup_name, inode} => {
let mut repo = open_repository(&repo_path); let mut repo = try!(open_repository(&repo_path));
if let Some(inode) = inode { if let Some(inode) = inode {
let mut backup = get_backup(&repo, &backup_name); let mut backup = try!(get_backup(&repo, &backup_name));
checked(repo.remove_backup_path(&mut backup, inode), "remove backup subpath"); checked!(repo.remove_backup_path(&mut backup, inode), "remove backup subpath", ErrorCode::RemoveRun);
checked(repo.save_backup(&backup, &backup_name), "save backup file"); checked!(repo.save_backup(&backup, &backup_name), "save backup file", ErrorCode::SaveBackup);
info!("The backup subpath has been deleted, run vacuum to reclaim space"); info!("The backup subpath has been deleted, run vacuum to reclaim space");
} else { } else {
checked(repo.delete_backup(&backup_name), "delete backup"); checked!(repo.delete_backup(&backup_name), "delete backup", ErrorCode::RemoveRun);
info!("The backup has been deleted, run vacuum to reclaim space"); info!("The backup has been deleted, run vacuum to reclaim space");
} }
}, },
Arguments::Prune{repo_path, prefix, daily, weekly, monthly, yearly, force} => { Arguments::Prune{repo_path, prefix, daily, weekly, monthly, yearly, force} => {
let repo = open_repository(&repo_path); let repo = try!(open_repository(&repo_path));
if daily.is_none() && weekly.is_none() && monthly.is_none() && yearly.is_none() { if daily.is_none() && weekly.is_none() && monthly.is_none() && yearly.is_none() {
error!("This would remove all those backups"); error!("This would remove all those backups");
exit(1); return Err(ErrorCode::UnsafeArgs)
} }
checked(repo.prune_backups(&prefix, daily, weekly, monthly, yearly, force), "prune backups"); checked!(repo.prune_backups(&prefix, daily, weekly, monthly, yearly, force), "prune backups", ErrorCode::PruneRun);
if !force { if !force {
info!("Run with --force to actually execute this command"); info!("Run with --force to actually execute this command");
} }
}, },
Arguments::Vacuum{repo_path, ratio, force} => { Arguments::Vacuum{repo_path, ratio, force} => {
let mut repo = open_repository(&repo_path); let mut repo = try!(open_repository(&repo_path));
let info_before = repo.info(); let info_before = repo.info();
checked(repo.vacuum(ratio, force), "vacuum"); checked!(repo.vacuum(ratio, force), "vacuum", ErrorCode::VacuumRun);
if !force { if !force {
info!("Run with --force to actually execute this command"); info!("Run with --force to actually execute this command");
} else { } else {
@ -336,29 +399,29 @@ pub fn run() {
} }
}, },
Arguments::Check{repo_path, backup_name, inode, full} => { Arguments::Check{repo_path, backup_name, inode, full} => {
let mut repo = open_repository(&repo_path); let mut repo = try!(open_repository(&repo_path));
if let Some(backup_name) = backup_name { if let Some(backup_name) = backup_name {
let backup = get_backup(&repo, &backup_name); let backup = try!(get_backup(&repo, &backup_name));
if let Some(inode) = inode { if let Some(inode) = inode {
let inode = checked(repo.get_backup_inode(&backup, inode), "load subpath inode"); let inode = checked!(repo.get_backup_inode(&backup, inode), "load subpath inode", ErrorCode::LoadInode);
checked(repo.check_inode(&inode), "check inode") checked!(repo.check_inode(&inode), "check inode", ErrorCode::CheckRun)
} else { } else {
checked(repo.check_backup(&backup), "check backup") checked!(repo.check_backup(&backup), "check backup", ErrorCode::CheckRun)
} }
} else { } else {
checked(repo.check(full), "check repository") checked!(repo.check(full), "check repository", ErrorCode::CheckRun)
} }
info!("Integrity verified") info!("Integrity verified")
}, },
Arguments::List{repo_path, backup_name, inode} => { Arguments::List{repo_path, backup_name, inode} => {
let mut repo = open_repository(&repo_path); let mut repo = try!(open_repository(&repo_path));
if let Some(backup_name) = backup_name { if let Some(backup_name) = backup_name {
let backup = get_backup(&repo, &backup_name); let backup = try!(get_backup(&repo, &backup_name));
let inode = checked(repo.get_backup_inode(&backup, inode.as_ref().map(|v| v as &str).unwrap_or("/")), "load subpath inode"); let inode = checked!(repo.get_backup_inode(&backup, inode.as_ref().map(|v| v as &str).unwrap_or("/")), "load subpath inode", ErrorCode::LoadInode);
println!("{}", format_inode_one_line(&inode)); println!("{}", format_inode_one_line(&inode));
if let Some(children) = inode.children { if let Some(children) = inode.children {
for chunks in children.values() { for chunks in children.values() {
let inode = checked(repo.get_inode(chunks), "load child inode"); let inode = checked!(repo.get_inode(chunks), "load child inode", ErrorCode::LoadInode);
println!("- {}", format_inode_one_line(&inode)); println!("- {}", format_inode_one_line(&inode));
} }
} }
@ -371,18 +434,18 @@ pub fn run() {
}, },
Err(err) => { Err(err) => {
error!("Failed to load backup files: {}", err); error!("Failed to load backup files: {}", err);
exit(3) return Err(ErrorCode::LoadBackup)
} }
}; };
print_backups(&backup_map); print_backups(&backup_map);
} }
}, },
Arguments::Info{repo_path, backup_name, inode} => { Arguments::Info{repo_path, backup_name, inode} => {
let mut repo = open_repository(&repo_path); let mut repo = try!(open_repository(&repo_path));
if let Some(backup_name) = backup_name { if let Some(backup_name) = backup_name {
let backup = get_backup(&repo, &backup_name); let backup = try!(get_backup(&repo, &backup_name));
if let Some(inode) = inode { if let Some(inode) = inode {
let inode = checked(repo.get_backup_inode(&backup, inode), "load subpath inode"); let inode = checked!(repo.get_backup_inode(&backup, inode), "load subpath inode", ErrorCode::LoadInode);
print_inode(&inode); print_inode(&inode);
} else { } else {
print_backup(&backup); print_backup(&backup);
@ -392,45 +455,45 @@ pub fn run() {
} }
}, },
Arguments::Mount{repo_path, backup_name, inode, mount_point} => { Arguments::Mount{repo_path, backup_name, inode, mount_point} => {
let mut repo = open_repository(&repo_path); let mut repo = try!(open_repository(&repo_path));
let fs = if let Some(backup_name) = backup_name { let fs = if let Some(backup_name) = backup_name {
let backup = get_backup(&repo, &backup_name); let backup = try!(get_backup(&repo, &backup_name));
if let Some(inode) = inode { if let Some(inode) = inode {
let inode = checked(repo.get_backup_inode(&backup, inode), "load subpath inode"); let inode = checked!(repo.get_backup_inode(&backup, inode), "load subpath inode", ErrorCode::LoadInode);
checked(FuseFilesystem::from_inode(&mut repo, inode), "create fuse filesystem") checked!(FuseFilesystem::from_inode(&mut repo, inode), "create fuse filesystem", ErrorCode::FuseMount)
} else { } else {
checked(FuseFilesystem::from_backup(&mut repo, &backup), "create fuse filesystem") checked!(FuseFilesystem::from_backup(&mut repo, &backup), "create fuse filesystem", ErrorCode::FuseMount)
} }
} else { } else {
checked(FuseFilesystem::from_repository(&mut repo), "create fuse filesystem") checked!(FuseFilesystem::from_repository(&mut repo), "create fuse filesystem", ErrorCode::FuseMount)
}; };
checked(fs.mount(&mount_point), "mount filesystem"); checked!(fs.mount(&mount_point), "mount filesystem", ErrorCode::FuseMount);
}, },
Arguments::Analyze{repo_path} => { Arguments::Analyze{repo_path} => {
let mut repo = open_repository(&repo_path); let mut repo = try!(open_repository(&repo_path));
print_analysis(&checked(repo.analyze_usage(), "analyze repository")); print_analysis(&checked!(repo.analyze_usage(), "analyze repository", ErrorCode::AnalyzeRun));
}, },
Arguments::BundleList{repo_path} => { Arguments::BundleList{repo_path} => {
let repo = open_repository(&repo_path); let repo = try!(open_repository(&repo_path));
for bundle in repo.list_bundles() { for bundle in repo.list_bundles() {
print_bundle_one_line(bundle); print_bundle_one_line(bundle);
} }
}, },
Arguments::BundleInfo{repo_path, bundle_id} => { Arguments::BundleInfo{repo_path, bundle_id} => {
let repo = open_repository(&repo_path); let repo = try!(open_repository(&repo_path));
if let Some(bundle) = repo.get_bundle(&bundle_id) { if let Some(bundle) = repo.get_bundle(&bundle_id) {
print_bundle(bundle); print_bundle(bundle);
} else { } else {
error!("No such bundle"); error!("No such bundle");
exit(3); return Err(ErrorCode::LoadBundle)
} }
}, },
Arguments::Import{repo_path, remote_path, key_files} => { Arguments::Import{repo_path, remote_path, key_files} => {
checked(Repository::import(repo_path, remote_path, key_files), "import repository"); checked!(Repository::import(repo_path, remote_path, key_files), "import repository", ErrorCode::ImportRun);
}, },
Arguments::Versions{repo_path, path} => { Arguments::Versions{repo_path, path} => {
let mut repo = open_repository(&repo_path); let mut repo = try!(open_repository(&repo_path));
for (name, mut inode) in checked(repo.find_versions(&path), "find versions") { for (name, mut inode) in checked!(repo.find_versions(&path), "find versions", ErrorCode::VersionsRun) {
inode.name = format!("{}::{}", name, &path); inode.name = format!("{}::{}", name, &path);
println!("{}", format_inode_one_line(&inode)); println!("{}", format_inode_one_line(&inode));
} }
@ -438,14 +501,14 @@ pub fn run() {
Arguments::Diff{repo_path_old, backup_name_old, inode_old, repo_path_new, backup_name_new, inode_new} => { Arguments::Diff{repo_path_old, backup_name_old, inode_old, repo_path_new, backup_name_new, inode_new} => {
if repo_path_old != repo_path_new { if repo_path_old != repo_path_new {
error!("Can only run diff on same repository"); error!("Can only run diff on same repository");
exit(2) return Err(ErrorCode::InvalidArgs)
} }
let mut repo = open_repository(&repo_path_old); let mut repo = try!(open_repository(&repo_path_old));
let backup_old = get_backup(&repo, &backup_name_old); let backup_old = try!(get_backup(&repo, &backup_name_old));
let backup_new = get_backup(&repo, &backup_name_new); let backup_new = try!(get_backup(&repo, &backup_name_new));
let inode1 = checked(repo.get_backup_inode(&backup_old, inode_old.unwrap_or_else(|| "/".to_string())), "load subpath inode"); let inode1 = checked!(repo.get_backup_inode(&backup_old, inode_old.unwrap_or_else(|| "/".to_string())), "load subpath inode", ErrorCode::LoadInode);
let inode2 = checked(repo.get_backup_inode(&backup_new, inode_new.unwrap_or_else(|| "/".to_string())), "load subpath inode"); let inode2 = checked!(repo.get_backup_inode(&backup_new, inode_new.unwrap_or_else(|| "/".to_string())), "load subpath inode", ErrorCode::LoadInode);
let diffs = checked(repo.find_differences(&inode1, &inode2), "find differences"); let diffs = checked!(repo.find_differences(&inode1, &inode2), "find differences", ErrorCode::DiffRun);
for diff in diffs { for diff in diffs {
println!("{} {:?}", match diff.0 { println!("{} {:?}", match diff.0 {
DiffType::Add => "add", DiffType::Add => "add",
@ -455,7 +518,7 @@ pub fn run() {
} }
}, },
Arguments::Config{repo_path, bundle_size, chunker, compression, encryption, hash} => { Arguments::Config{repo_path, bundle_size, chunker, compression, encryption, hash} => {
let mut repo = open_repository(&repo_path); let mut repo = try!(open_repository(&repo_path));
if let Some(bundle_size) = bundle_size { if let Some(bundle_size) = bundle_size {
repo.config.bundle_size = bundle_size repo.config.bundle_size = bundle_size
} }
@ -473,7 +536,7 @@ pub fn run() {
warn!("Changing the hash makes it impossible to use existing data for deduplication"); warn!("Changing the hash makes it impossible to use existing data for deduplication");
repo.config.hash = hash repo.config.hash = hash
} }
checked(repo.save_config(), "save config"); checked!(repo.save_config(), "save config", ErrorCode::SaveConfig);
print_config(&repo.config); print_config(&repo.config);
}, },
Arguments::GenKey{file} => { Arguments::GenKey{file} => {
@ -481,27 +544,28 @@ pub fn run() {
println!("public: {}", to_hex(&public[..])); println!("public: {}", to_hex(&public[..]));
println!("secret: {}", to_hex(&secret[..])); println!("secret: {}", to_hex(&secret[..]));
if let Some(file) = file { if let Some(file) = file {
checked(Crypto::save_keypair_to_file(&public, &secret, file), "save key pair"); checked!(Crypto::save_keypair_to_file(&public, &secret, file), "save key pair", ErrorCode::SaveKey);
} }
}, },
Arguments::AddKey{repo_path, set_default, file} => { Arguments::AddKey{repo_path, set_default, file} => {
let mut repo = open_repository(&repo_path); let mut repo = try!(open_repository(&repo_path));
let (public, secret) = if let Some(file) = file { let (public, secret) = if let Some(file) = file {
checked(Crypto::load_keypair_from_file(file), "load key pair") checked!(Crypto::load_keypair_from_file(file), "load key pair", ErrorCode::LoadKey)
} else { } else {
let (public, secret) = gen_keypair(); let (public, secret) = gen_keypair();
println!("public: {}", to_hex(&public[..])); println!("public: {}", to_hex(&public[..]));
println!("secret: {}", to_hex(&secret[..])); println!("secret: {}", to_hex(&secret[..]));
(public, secret) (public, secret)
}; };
checked(repo.register_key(public, secret), "add key pair"); checked!(repo.register_key(public, secret), "add key pair", ErrorCode::AddKey);
if set_default { if set_default {
repo.set_encryption(Some(&public)); repo.set_encryption(Some(&public));
checked(repo.save_config(), "save config"); checked!(repo.save_config(), "save config", ErrorCode::SaveConfig);
} }
}, },
Arguments::AlgoTest{bundle_size, chunker, compression, encrypt, hash, file} => { Arguments::AlgoTest{bundle_size, chunker, compression, encrypt, hash, file} => {
algotest::run(&file, bundle_size, chunker, compression, encrypt, hash); algotest::run(&file, bundle_size, chunker, compression, encrypt, hash);
} }
} }
Ok(())
} }

View File

@ -22,6 +22,7 @@ extern crate fuse;
extern crate rand; extern crate rand;
extern crate time; extern crate time;
extern crate libc; extern crate libc;
extern crate tar;
pub mod util; pub mod util;
mod bundledb; mod bundledb;
@ -32,7 +33,11 @@ mod cli;
mod prelude; mod prelude;
mod mount; mod mount;
use std::process::exit;
fn main() { fn main() {
cli::run(); match cli::run() {
Ok(()) => exit(0),
Err(code) => exit(code.code())
}
} }

View File

@ -1,7 +1,7 @@
pub use ::util::*; pub use ::util::*;
pub use ::bundledb::{BundleReader, BundleMode, BundleWriter, BundleInfo, BundleId, BundleDbError, BundleDb, BundleWriterError}; pub use ::bundledb::{BundleReader, BundleMode, BundleWriter, BundleInfo, BundleId, BundleDbError, BundleDb, BundleWriterError};
pub use ::chunker::{ChunkerType, Chunker, ChunkerStatus, IChunker, ChunkerError}; pub use ::chunker::{ChunkerType, Chunker, ChunkerStatus, IChunker, ChunkerError};
pub use ::repository::{Repository, Backup, Config, RepositoryError, RepositoryInfo, Inode, FileType, RepositoryIntegrityError, BackupFileError, BackupError, BackupOptions, BundleAnalysis, FileData, DiffType}; pub use ::repository::{Repository, Backup, Config, RepositoryError, RepositoryInfo, Inode, FileType, RepositoryIntegrityError, BackupFileError, BackupError, BackupOptions, BundleAnalysis, FileData, DiffType, InodeError};
pub use ::index::{Index, Location, IndexError}; pub use ::index::{Index, Location, IndexError};
pub use ::mount::FuseFilesystem; pub use ::mount::FuseFilesystem;

View File

@ -143,14 +143,6 @@ impl Repository {
Ok(()) Ok(())
} }
#[inline]
pub fn restore_backup<P: AsRef<Path>>(&mut self, backup: &Backup, path: P) -> Result<(), RepositoryError> {
let _lock = try!(self.lock(false));
let inode = try!(self.get_inode(&backup.root));
self.restore_inode_tree(inode, path)
}
pub fn create_backup_recurse<P: AsRef<Path>>( pub fn create_backup_recurse<P: AsRef<Path>>(
&mut self, &mut self,
path: P, path: P,
@ -265,6 +257,7 @@ impl Repository {
last_inode_name = inode.name; last_inode_name = inode.name;
} }
backup.root = last_inode_chunks; backup.root = last_inode_chunks;
backup.modified = true;
Ok(()) Ok(())
} }

View File

@ -88,6 +88,7 @@ pub struct Backup {
pub host: String, pub host: String,
pub path: String, pub path: String,
pub config: Config, pub config: Config,
pub modified: bool
} }
serde_impl!(Backup(u8?) { serde_impl!(Backup(u8?) {
root: ChunkList => 0, root: ChunkList => 0,
@ -104,7 +105,8 @@ serde_impl!(Backup(u8?) {
dir_count: usize => 11, dir_count: usize => 11,
host: String => 12, host: String => 12,
path: String => 13, path: String => 13,
config: Config => 14 config: Config => 14,
modified: bool => 15
}); });
impl Backup { impl Backup {

View File

@ -1,7 +1,56 @@
use ::prelude::*; use ::prelude::*;
use std::mem; use std::mem;
use std::io::{Read, Write, Cursor}; use std::cmp::min;
use std::collections::VecDeque;
use std::io::{self, Read, Write, Cursor};
pub struct ChunkReader<'a> {
chunks: VecDeque<Chunk>,
data: Vec<u8>,
pos: usize,
repo: &'a mut Repository
}
impl<'a> ChunkReader<'a> {
pub fn new(repo: &'a mut Repository, chunks: ChunkList) -> Self {
ChunkReader {
repo: repo,
chunks: chunks.into_inner().into(),
data: vec![],
pos: 0
}
}
}
impl<'a> Read for ChunkReader<'a> {
fn read(&mut self, mut buf: &mut [u8]) -> Result<usize, io::Error> {
let mut bpos = 0;
loop {
if buf.len() == bpos {
break
}
if self.data.len() == self.pos {
if let Some(chunk) = self.chunks.pop_front() {
self.data = match self.repo.get_chunk(chunk.0) {
Ok(Some(data)) => data,
Ok(None) => return Err(io::Error::new(io::ErrorKind::Other, RepositoryIntegrityError::MissingChunk(chunk.0))),
Err(err) => return Err(io::Error::new(io::ErrorKind::Other, err))
};
self.pos = 0;
} else {
break
}
}
let l = min(self.data.len()-self.pos, buf.len() - bpos);
buf[bpos..bpos+l].copy_from_slice(&self.data[self.pos..self.pos+l]);
bpos += l;
self.pos += l;
}
Ok(bpos)
}
}
impl Repository { impl Repository {
@ -114,6 +163,11 @@ impl Repository {
Ok(data) Ok(data)
} }
#[inline]
pub fn get_reader(&mut self, chunks: ChunkList) -> ChunkReader {
ChunkReader::new(self, chunks)
}
#[inline] #[inline]
pub fn get_stream<W: Write>(&mut self, chunks: &[Chunk], w: &mut W) -> Result<(), RepositoryError> { pub fn get_stream<W: Write>(&mut self, chunks: &[Chunk], w: &mut W) -> Result<(), RepositoryError> {
for &(ref hash, len) in chunks { for &(ref hash, len) in chunks {

View File

@ -8,6 +8,7 @@ mod backup;
mod error; mod error;
mod vacuum; mod vacuum;
mod backup_file; mod backup_file;
mod tarfile;
use ::prelude::*; use ::prelude::*;
@ -21,7 +22,7 @@ use std::io::Write;
pub use self::error::RepositoryError; pub use self::error::RepositoryError;
pub use self::config::Config; pub use self::config::Config;
pub use self::metadata::{Inode, FileType, FileData}; pub use self::metadata::{Inode, FileType, FileData, InodeError};
pub use self::backup::{BackupError, BackupOptions, DiffType}; pub use self::backup::{BackupError, BackupOptions, DiffType};
pub use self::backup_file::{Backup, BackupFileError}; pub use self::backup_file::{Backup, BackupFileError};
pub use self::integrity::RepositoryIntegrityError; pub use self::integrity::RepositoryIntegrityError;
@ -30,7 +31,7 @@ use self::bundle_map::BundleMap;
const REPOSITORY_README: &'static [u8] = include_bytes!("../../docs/repository_readme.md"); const REPOSITORY_README: &'static [u8] = include_bytes!("../../docs/repository_readme.md");
const DEFAULT_EXCLUDES: &'static [u8] = include_bytes!("../../excludes.default"); const DEFAULT_EXCLUDES: &'static [u8] = include_bytes!("../../docs/excludes.default");
pub struct Repository { pub struct Repository {
@ -243,6 +244,7 @@ impl Repository {
} }
} }
impl Drop for Repository { impl Drop for Repository {
fn drop(&mut self) { fn drop(&mut self) {
self.flush().expect("Failed to write last bundles") self.flush().expect("Failed to write last bundles")

220
src/repository/tarfile.rs Normal file
View File

@ -0,0 +1,220 @@
use ::prelude::*;
use std::collections::{HashMap, HashSet, BTreeMap};
use std::path::{Path, PathBuf};
use std::io::{Read, Cursor};
use std::fs::File;
use chrono::prelude::*;
use tar;
fn inode_from_entry<R: Read>(entry: &mut tar::Entry<R>) -> Result<Inode, RepositoryError> {
let path = try!(entry.path());
let header = entry.header();
let file_type = match header.entry_type() {
tar::EntryType::Regular | tar::EntryType::Link | tar::EntryType::Continuous => FileType::File,
tar::EntryType::Symlink => FileType::Symlink,
tar::EntryType::Directory => FileType::Directory,
_ => return Err(InodeError::UnsupportedFiletype(path.to_path_buf()).into())
};
let mut inode = Inode {
file_type: file_type,
name: path.file_name().map(|s| s.to_string_lossy().to_string()).unwrap_or_else(|| "/".to_string()),
symlink_target: try!(entry.link_name()).map(|s| s.to_string_lossy().to_string()),
size: try!(header.size()),
mode: try!(header.mode()),
user: try!(header.uid()),
group: try!(header.gid()),
timestamp: try!(header.mtime()) as i64,
..Default::default()
};
if inode.file_type == FileType::Directory {
inode.children = Some(BTreeMap::new());
}
Ok(inode)
}
impl Repository {
fn import_tar_entry<R: Read>(&mut self, entry: &mut tar::Entry<R>) -> Result<Inode, RepositoryError> {
let mut inode = try!(inode_from_entry(entry));
if inode.size < 100 {
let mut data = Vec::with_capacity(inode.size as usize);
try!(entry.read_to_end(&mut data));
inode.data = Some(FileData::Inline(data.into()));
} else {
let mut chunks = try!(self.put_stream(BundleMode::Data, entry));
if chunks.len() < 10 {
inode.data = Some(FileData::ChunkedDirect(chunks));
} else {
let mut chunk_data = Vec::with_capacity(chunks.encoded_size());
chunks.write_to(&mut chunk_data).unwrap();
chunks = try!(self.put_data(BundleMode::Meta, &chunk_data));
inode.data = Some(FileData::ChunkedIndirect(chunks));
}
}
Ok(inode)
}
fn import_tarfile_as_inode<P: AsRef<Path>>(&mut self, tarfile: P, failed_paths: &mut Vec<PathBuf>) -> Result<(Inode, ChunkList), RepositoryError> {
let mut tarfile = tar::Archive::new(try!(File::open(tarfile)));
// Step 1: create inodes for all entries
let mut inodes = HashMap::<PathBuf, (Inode, HashSet<String>)>::new();
for entry in try!(tarfile.entries()) {
let mut entry = try!(entry);
let path = try!(entry.path()).to_path_buf();
match self.import_tar_entry(&mut entry) {
Ok(mut inode) => {
inode.cum_size = inode.size + 1000;
if inode.file_type == FileType::Directory {
inode.cum_dirs = 1;
} else {
inode.cum_files = 1;
}
if let Some(parent_path) = path.parent() {
if let Some(&mut (_, ref mut children)) = inodes.get_mut(parent_path) {
children.insert(inode.name.clone());
}
}
inodes.insert(path, (inode, HashSet::new()));
},
Err(_) => {
warn!("Failed to backup {:?}", path);
failed_paths.push(path);
continue
}
}
}
// Step 2: save all inodes
let mut roots = vec![];
while !inodes.is_empty() {
let mut childless = vec![];
for (path, &(_, ref children)) in &inodes {
if children.is_empty() {
childless.push(path.clone());
}
}
for path in childless {
let (inode, _) = inodes.remove(&path).unwrap();
let chunks = try!(self.put_inode(&inode));
if let Some(parent_path) = path.parent() {
if let Some(&mut (ref mut parent_inode, ref mut children)) = inodes.get_mut(parent_path) {
children.remove(&inode.name);
parent_inode.children.as_mut().unwrap().insert(inode.name.clone(), chunks);
parent_inode.cum_size += inode.cum_size;
parent_inode.cum_files += inode.cum_files;
parent_inode.cum_dirs += inode.cum_dirs;
continue
}
}
roots.push((inode, chunks));
}
}
let mut root_inode;
let chunks;
if roots.len() == 1 {
let r = roots.pop().unwrap();
root_inode = r.0;
chunks = r.1;
} else {
warn!("Tar file contains multiple roots, adding dummy folder");
root_inode = Inode {
file_type: FileType::Directory,
mode: 0o755,
name: "archive".to_string(),
cum_size: 1000,
cum_files: 0,
cum_dirs: 1,
..Default::default()
};
let mut children = BTreeMap::new();
for (inode, chunks) in roots {
children.insert(inode.name, chunks);
root_inode.cum_size += inode.cum_size;
root_inode.cum_files += inode.cum_files;
root_inode.cum_dirs += inode.cum_dirs;
}
root_inode.children = Some(children);
chunks = try!(self.put_inode(&root_inode));
}
Ok((root_inode, chunks))
}
pub fn import_tarfile<P: AsRef<Path>>(&mut self, tarfile: P) -> Result<Backup, RepositoryError> {
let _lock = try!(self.lock(false));
let mut backup = Backup::default();
backup.config = self.config.clone();
backup.host = get_hostname().unwrap_or_else(|_| "".to_string());
backup.path = tarfile.as_ref().to_string_lossy().to_string();
let info_before = self.info();
let start = Local::now();
let mut failed_paths = vec![];
let (root_inode, chunks) = try!(self.import_tarfile_as_inode(tarfile, &mut failed_paths));
backup.root = chunks;
try!(self.flush());
let elapsed = Local::now().signed_duration_since(start);
backup.date = start.timestamp();
backup.total_data_size = root_inode.cum_size;
backup.file_count = root_inode.cum_files;
backup.dir_count = root_inode.cum_dirs;
backup.duration = elapsed.num_milliseconds() as f32 / 1_000.0;
let info_after = self.info();
backup.deduplicated_data_size = info_after.raw_data_size - info_before.raw_data_size;
backup.encoded_data_size = info_after.encoded_data_size - info_before.encoded_data_size;
backup.bundle_count = info_after.bundle_count - info_before.bundle_count;
backup.chunk_count = info_after.chunk_count - info_before.chunk_count;
backup.avg_chunk_size = backup.deduplicated_data_size as f32 / backup.chunk_count as f32;
if failed_paths.is_empty() {
Ok(backup)
} else {
Err(BackupError::FailedPaths(backup, failed_paths).into())
}
}
fn export_tarfile_recurse(&mut self, path: &Path, inode: Inode, tarfile: &mut tar::Builder<File>) -> Result<(), RepositoryError> {
let mut header = tar::Header::new_gnu();
header.set_size(inode.size);
let path = path.join(inode.name);
try!(header.set_path(&path));
if let Some(target) = inode.symlink_target {
try!(header.set_link_name(target));
}
header.set_mode(inode.mode);
header.set_uid(inode.user);
header.set_gid(inode.group);
header.set_mtime(inode.timestamp as u64);
header.set_entry_type(match inode.file_type {
FileType::File => tar::EntryType::Regular,
FileType::Symlink => tar::EntryType::Symlink,
FileType::Directory => tar::EntryType::Directory
});
header.set_cksum();
match inode.data {
None => try!(tarfile.append(&header, Cursor::new(&[]))),
Some(FileData::Inline(data)) => try!(tarfile.append(&header, Cursor::new(data))),
Some(FileData::ChunkedDirect(chunks)) => try!(tarfile.append(&header, self.get_reader(chunks))),
Some(FileData::ChunkedIndirect(chunks)) => {
let chunks = ChunkList::read_from(&try!(self.get_data(&chunks)));
try!(tarfile.append(&header, self.get_reader(chunks)))
}
}
if let Some(children) = inode.children {
for chunks in children.values() {
let inode = try!(self.get_inode(chunks));
try!(self.export_tarfile_recurse(&path, inode, tarfile));
}
}
Ok(())
}
pub fn export_tarfile<P: AsRef<Path>>(&mut self, inode: Inode, tarfile: P) -> Result<(), RepositoryError> {
let mut tarfile = tar::Builder::new(try!(File::create(tarfile)));
try!(self.export_tarfile_recurse(Path::new(""), inode, &mut tarfile));
try!(tarfile.finish());
Ok(())
}
}

View File

@ -54,9 +54,9 @@ impl Repository {
} }
try!(self.flush()); try!(self.flush());
info!("Checking index"); info!("Checking index");
self.index.walk::<_, ()>(|_hash, location| { self.index.walk::<_, ()>(|hash, location| {
if rewrite_bundles.contains(&location.bundle) { if rewrite_bundles.contains(&location.bundle) {
panic!("Removed bundle is still referenced in index"); panic!("Removed bundle is still referenced in index: hash:{}, bundle:{}, chunk:{}", hash, location.bundle, location.chunk);
} }
Ok(()) Ok(())
}).ok(); }).ok();