mirror of https://github.com/dswd/zvault
Verbosity control
This commit is contained in:
parent
0b3ab57335
commit
f5faa89c6a
1
TODO.md
1
TODO.md
|
@ -10,7 +10,6 @@
|
||||||
* Lock the local repository to avoid index corruption
|
* Lock the local repository to avoid index corruption
|
||||||
|
|
||||||
## Usability
|
## Usability
|
||||||
* Verbosity control
|
|
||||||
* Error: Backup already exists
|
* Error: Backup already exists
|
||||||
* Backup directories as a thing (list, remove)
|
* Backup directories as a thing (list, remove)
|
||||||
* Display backup name and path on backup integrity error
|
* Display backup name and path on backup integrity error
|
||||||
|
|
|
@ -120,11 +120,15 @@ impl BundleDb {
|
||||||
for bundle in list {
|
for bundle in list {
|
||||||
self.local_bundles.insert(bundle.id(), bundle);
|
self.local_bundles.insert(bundle.id(), bundle);
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
warn!("Failed to read local bundle cache, rebuilding cache");
|
||||||
}
|
}
|
||||||
if let Ok(list) = StoredBundle::read_list_from(&self.layout.remote_bundle_cache_path()) {
|
if let Ok(list) = StoredBundle::read_list_from(&self.layout.remote_bundle_cache_path()) {
|
||||||
for bundle in list {
|
for bundle in list {
|
||||||
self.remote_bundles.insert(bundle.id(), bundle);
|
self.remote_bundles.insert(bundle.id(), bundle);
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
warn!("Failed to read remote bundle cache, rebuilding cache");
|
||||||
}
|
}
|
||||||
let base_path = self.layout.base_path();
|
let base_path = self.layout.base_path();
|
||||||
let (new, gone) = try!(load_bundles(&self.layout.local_bundles_path(), base_path, &mut self.local_bundles, self.crypto.clone()));
|
let (new, gone) = try!(load_bundles(&self.layout.local_bundles_path(), base_path, &mut self.local_bundles, self.crypto.clone()));
|
||||||
|
|
|
@ -2,7 +2,7 @@ use ::prelude::*;
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
use log::LogLevel;
|
||||||
use clap::{App, AppSettings, Arg, SubCommand};
|
use clap::{App, AppSettings, Arg, SubCommand};
|
||||||
|
|
||||||
pub enum Arguments {
|
pub enum Arguments {
|
||||||
|
@ -274,10 +274,11 @@ fn validate_existing_path(val: String) -> Result<(), String> {
|
||||||
|
|
||||||
|
|
||||||
#[allow(unknown_lints,cyclomatic_complexity)]
|
#[allow(unknown_lints,cyclomatic_complexity)]
|
||||||
pub fn parse() -> Result<Arguments, ErrorCode> {
|
pub fn parse() -> Result<(LogLevel, Arguments), ErrorCode> {
|
||||||
let args = App::new("zvault").version(crate_version!()).author(crate_authors!(",\n")).about(crate_description!())
|
let args = App::new("zvault").version(crate_version!()).author(crate_authors!(",\n")).about(crate_description!())
|
||||||
.settings(&[AppSettings::VersionlessSubcommands, AppSettings::SubcommandRequiredElseHelp])
|
.settings(&[AppSettings::VersionlessSubcommands, AppSettings::SubcommandRequiredElseHelp])
|
||||||
.global_settings(&[AppSettings::AllowMissingPositional, AppSettings::UnifiedHelpMessage, AppSettings::ColoredHelp, AppSettings::ColorAuto])
|
.global_settings(&[AppSettings::AllowMissingPositional, AppSettings::UnifiedHelpMessage, AppSettings::ColoredHelp, AppSettings::ColorAuto])
|
||||||
|
.arg(Arg::from_usage("-v --verbose 'Print more information'").global(true).multiple(true).max_values(3).takes_value(false))
|
||||||
.subcommand(SubCommand::with_name("init").about("Initialize a new repository")
|
.subcommand(SubCommand::with_name("init").about("Initialize a new repository")
|
||||||
.arg(Arg::from_usage("[bundle_size] --bundle-size [SIZE] 'Set the target bundle size in MiB'")
|
.arg(Arg::from_usage("[bundle_size] --bundle-size [SIZE] 'Set the target bundle size in MiB'")
|
||||||
.default_value(DEFAULT_BUNDLE_SIZE_STR).validator(validate_num))
|
.default_value(DEFAULT_BUNDLE_SIZE_STR).validator(validate_num))
|
||||||
|
@ -409,10 +410,16 @@ pub fn parse() -> Result<Arguments, ErrorCode> {
|
||||||
.default_value(DEFAULT_HASH).validator(validate_hash))
|
.default_value(DEFAULT_HASH).validator(validate_hash))
|
||||||
.arg(Arg::from_usage("<FILE> 'File with test data'")
|
.arg(Arg::from_usage("<FILE> 'File with test data'")
|
||||||
.validator(validate_existing_path))).get_matches();
|
.validator(validate_existing_path))).get_matches();
|
||||||
match args.subcommand() {
|
let log_level = match args.subcommand().1.map(|m| m.occurrences_of("verbose")).unwrap_or(0) + args.occurrences_of("verbose") {
|
||||||
|
0 => LogLevel::Warn,
|
||||||
|
1 => LogLevel::Info,
|
||||||
|
2 => LogLevel::Debug,
|
||||||
|
_ => LogLevel::Trace
|
||||||
|
};
|
||||||
|
let args = match args.subcommand() {
|
||||||
("init", Some(args)) => {
|
("init", Some(args)) => {
|
||||||
let (repository, _backup, _inode) = parse_repo_path(args.value_of("REPO").unwrap(), false, Some(false), Some(false)).unwrap();
|
let (repository, _backup, _inode) = parse_repo_path(args.value_of("REPO").unwrap(), false, Some(false), Some(false)).unwrap();
|
||||||
Ok(Arguments::Init {
|
Arguments::Init {
|
||||||
bundle_size: (parse_num(args.value_of("bundle_size").unwrap()).unwrap() * 1024 * 1024) as usize,
|
bundle_size: (parse_num(args.value_of("bundle_size").unwrap()).unwrap() * 1024 * 1024) as usize,
|
||||||
chunker: parse_chunker(args.value_of("chunker").unwrap()).unwrap(),
|
chunker: parse_chunker(args.value_of("chunker").unwrap()).unwrap(),
|
||||||
compression: parse_compression(args.value_of("compression").unwrap()).unwrap(),
|
compression: parse_compression(args.value_of("compression").unwrap()).unwrap(),
|
||||||
|
@ -420,11 +427,11 @@ pub fn parse() -> Result<Arguments, ErrorCode> {
|
||||||
hash: parse_hash(args.value_of("hash").unwrap()).unwrap(),
|
hash: parse_hash(args.value_of("hash").unwrap()).unwrap(),
|
||||||
repo_path: repository.to_string(),
|
repo_path: repository.to_string(),
|
||||||
remote_path: args.value_of("remote").unwrap().to_string()
|
remote_path: args.value_of("remote").unwrap().to_string()
|
||||||
})
|
}
|
||||||
},
|
},
|
||||||
("backup", Some(args)) => {
|
("backup", Some(args)) => {
|
||||||
let (repository, backup, _inode) = parse_repo_path(args.value_of("BACKUP").unwrap(), true, Some(true), Some(false)).unwrap();
|
let (repository, backup, _inode) = parse_repo_path(args.value_of("BACKUP").unwrap(), true, Some(true), Some(false)).unwrap();
|
||||||
Ok(Arguments::Backup {
|
Arguments::Backup {
|
||||||
repo_path: repository.to_string(),
|
repo_path: repository.to_string(),
|
||||||
backup_name: backup.unwrap().to_string(),
|
backup_name: backup.unwrap().to_string(),
|
||||||
full: args.is_present("full"),
|
full: args.is_present("full"),
|
||||||
|
@ -435,29 +442,29 @@ pub fn parse() -> Result<Arguments, ErrorCode> {
|
||||||
reference: args.value_of("reference").map(|v| v.to_string()),
|
reference: args.value_of("reference").map(|v| v.to_string()),
|
||||||
no_default_excludes: args.is_present("no_default_excludes"),
|
no_default_excludes: args.is_present("no_default_excludes"),
|
||||||
tar: args.is_present("tar")
|
tar: args.is_present("tar")
|
||||||
})
|
}
|
||||||
},
|
},
|
||||||
("restore", Some(args)) => {
|
("restore", Some(args)) => {
|
||||||
let (repository, backup, inode) = parse_repo_path(args.value_of("BACKUP").unwrap(), true, Some(true), None).unwrap();
|
let (repository, backup, inode) = parse_repo_path(args.value_of("BACKUP").unwrap(), true, Some(true), None).unwrap();
|
||||||
Ok(Arguments::Restore {
|
Arguments::Restore {
|
||||||
repo_path: repository.to_string(),
|
repo_path: repository.to_string(),
|
||||||
backup_name: backup.unwrap().to_string(),
|
backup_name: backup.unwrap().to_string(),
|
||||||
inode: inode.map(|v| v.to_string()),
|
inode: inode.map(|v| v.to_string()),
|
||||||
dst_path: args.value_of("DST").unwrap().to_string(),
|
dst_path: args.value_of("DST").unwrap().to_string(),
|
||||||
tar: args.is_present("tar")
|
tar: args.is_present("tar")
|
||||||
})
|
}
|
||||||
},
|
},
|
||||||
("remove", Some(args)) => {
|
("remove", Some(args)) => {
|
||||||
let (repository, backup, inode) = parse_repo_path(args.value_of("BACKUP").unwrap(), true, Some(true), None).unwrap();
|
let (repository, backup, inode) = parse_repo_path(args.value_of("BACKUP").unwrap(), true, Some(true), None).unwrap();
|
||||||
Ok(Arguments::Remove {
|
Arguments::Remove {
|
||||||
repo_path: repository.to_string(),
|
repo_path: repository.to_string(),
|
||||||
backup_name: backup.unwrap().to_string(),
|
backup_name: backup.unwrap().to_string(),
|
||||||
inode: inode.map(|v| v.to_string())
|
inode: inode.map(|v| v.to_string())
|
||||||
})
|
}
|
||||||
},
|
},
|
||||||
("prune", Some(args)) => {
|
("prune", Some(args)) => {
|
||||||
let (repository, _backup, _inode) = parse_repo_path(args.value_of("REPO").unwrap(), true, Some(false), Some(false)).unwrap();
|
let (repository, _backup, _inode) = parse_repo_path(args.value_of("REPO").unwrap(), true, Some(false), Some(false)).unwrap();
|
||||||
Ok(Arguments::Prune {
|
Arguments::Prune {
|
||||||
repo_path: repository.to_string(),
|
repo_path: repository.to_string(),
|
||||||
prefix: args.value_of("prefix").unwrap_or("").to_string(),
|
prefix: args.value_of("prefix").unwrap_or("").to_string(),
|
||||||
force: args.is_present("force"),
|
force: args.is_present("force"),
|
||||||
|
@ -465,133 +472,134 @@ pub fn parse() -> Result<Arguments, ErrorCode> {
|
||||||
weekly: parse_num(args.value_of("weekly").unwrap()).unwrap() as usize,
|
weekly: parse_num(args.value_of("weekly").unwrap()).unwrap() as usize,
|
||||||
monthly: parse_num(args.value_of("monthly").unwrap()).unwrap() as usize,
|
monthly: parse_num(args.value_of("monthly").unwrap()).unwrap() as usize,
|
||||||
yearly: parse_num(args.value_of("yearly").unwrap()).unwrap() as usize
|
yearly: parse_num(args.value_of("yearly").unwrap()).unwrap() as usize
|
||||||
})
|
}
|
||||||
},
|
},
|
||||||
("vacuum", Some(args)) => {
|
("vacuum", Some(args)) => {
|
||||||
let (repository, _backup, _inode) = parse_repo_path(args.value_of("REPO").unwrap(), true, Some(false), Some(false)).unwrap();
|
let (repository, _backup, _inode) = parse_repo_path(args.value_of("REPO").unwrap(), true, Some(false), Some(false)).unwrap();
|
||||||
Ok(Arguments::Vacuum {
|
Arguments::Vacuum {
|
||||||
repo_path: repository.to_string(),
|
repo_path: repository.to_string(),
|
||||||
force: args.is_present("force"),
|
force: args.is_present("force"),
|
||||||
ratio: parse_num(args.value_of("ratio").unwrap()).unwrap() as f32 / 100.0
|
ratio: parse_num(args.value_of("ratio").unwrap()).unwrap() as f32 / 100.0
|
||||||
})
|
}
|
||||||
},
|
},
|
||||||
("check", Some(args)) => {
|
("check", Some(args)) => {
|
||||||
let (repository, backup, inode) = parse_repo_path(args.value_of("PATH").unwrap(), true, None, None).unwrap();
|
let (repository, backup, inode) = parse_repo_path(args.value_of("PATH").unwrap(), true, None, None).unwrap();
|
||||||
Ok(Arguments::Check {
|
Arguments::Check {
|
||||||
repo_path: repository.to_string(),
|
repo_path: repository.to_string(),
|
||||||
backup_name: backup.map(|v| v.to_string()),
|
backup_name: backup.map(|v| v.to_string()),
|
||||||
inode: inode.map(|v| v.to_string()),
|
inode: inode.map(|v| v.to_string()),
|
||||||
full: args.is_present("full")
|
full: args.is_present("full")
|
||||||
})
|
}
|
||||||
},
|
},
|
||||||
("list", Some(args)) => {
|
("list", Some(args)) => {
|
||||||
let (repository, backup, inode) = parse_repo_path(args.value_of("PATH").unwrap(), true, None, None).unwrap();
|
let (repository, backup, inode) = parse_repo_path(args.value_of("PATH").unwrap(), true, None, None).unwrap();
|
||||||
Ok(Arguments::List {
|
Arguments::List {
|
||||||
repo_path: repository.to_string(),
|
repo_path: repository.to_string(),
|
||||||
backup_name: backup.map(|v| v.to_string()),
|
backup_name: backup.map(|v| v.to_string()),
|
||||||
inode: inode.map(|v| v.to_string())
|
inode: inode.map(|v| v.to_string())
|
||||||
})
|
}
|
||||||
},
|
},
|
||||||
("bundlelist", Some(args)) => {
|
("bundlelist", Some(args)) => {
|
||||||
let (repository, _backup, _inode) = parse_repo_path(args.value_of("REPO").unwrap(), true, Some(false), Some(false)).unwrap();
|
let (repository, _backup, _inode) = parse_repo_path(args.value_of("REPO").unwrap(), true, Some(false), Some(false)).unwrap();
|
||||||
Ok(Arguments::BundleList {
|
Arguments::BundleList {
|
||||||
repo_path: repository.to_string(),
|
repo_path: repository.to_string(),
|
||||||
})
|
}
|
||||||
},
|
},
|
||||||
("bundleinfo", Some(args)) => {
|
("bundleinfo", Some(args)) => {
|
||||||
let (repository, _backup, _inode) = parse_repo_path(args.value_of("REPO").unwrap(), true, Some(false), Some(false)).unwrap();
|
let (repository, _backup, _inode) = parse_repo_path(args.value_of("REPO").unwrap(), true, Some(false), Some(false)).unwrap();
|
||||||
Ok(Arguments::BundleInfo {
|
Arguments::BundleInfo {
|
||||||
repo_path: repository.to_string(),
|
repo_path: repository.to_string(),
|
||||||
bundle_id: try!(parse_bundle_id(args.value_of("BUNDLE").unwrap()))
|
bundle_id: try!(parse_bundle_id(args.value_of("BUNDLE").unwrap()))
|
||||||
})
|
}
|
||||||
},
|
},
|
||||||
("info", Some(args)) => {
|
("info", Some(args)) => {
|
||||||
let (repository, backup, inode) = parse_repo_path(args.value_of("PATH").unwrap(), true, None, None).unwrap();
|
let (repository, backup, inode) = parse_repo_path(args.value_of("PATH").unwrap(), true, None, None).unwrap();
|
||||||
Ok(Arguments::Info {
|
Arguments::Info {
|
||||||
repo_path: repository.to_string(),
|
repo_path: repository.to_string(),
|
||||||
backup_name: backup.map(|v| v.to_string()),
|
backup_name: backup.map(|v| v.to_string()),
|
||||||
inode: inode.map(|v| v.to_string())
|
inode: inode.map(|v| v.to_string())
|
||||||
})
|
}
|
||||||
},
|
},
|
||||||
("mount", Some(args)) => {
|
("mount", Some(args)) => {
|
||||||
let (repository, backup, inode) = parse_repo_path(args.value_of("PATH").unwrap(), true, None, None).unwrap();
|
let (repository, backup, inode) = parse_repo_path(args.value_of("PATH").unwrap(), true, None, None).unwrap();
|
||||||
Ok(Arguments::Mount {
|
Arguments::Mount {
|
||||||
repo_path: repository.to_string(),
|
repo_path: repository.to_string(),
|
||||||
backup_name: backup.map(|v| v.to_string()),
|
backup_name: backup.map(|v| v.to_string()),
|
||||||
inode: inode.map(|v| v.to_string()),
|
inode: inode.map(|v| v.to_string()),
|
||||||
mount_point: args.value_of("MOUNTPOINT").unwrap().to_string()
|
mount_point: args.value_of("MOUNTPOINT").unwrap().to_string()
|
||||||
})
|
}
|
||||||
},
|
},
|
||||||
("versions", Some(args)) => {
|
("versions", Some(args)) => {
|
||||||
let (repository, _backup, _inode) = parse_repo_path(args.value_of("REPO").unwrap(), true, Some(false), Some(false)).unwrap();
|
let (repository, _backup, _inode) = parse_repo_path(args.value_of("REPO").unwrap(), true, Some(false), Some(false)).unwrap();
|
||||||
Ok(Arguments::Versions {
|
Arguments::Versions {
|
||||||
repo_path: repository.to_string(),
|
repo_path: repository.to_string(),
|
||||||
path: args.value_of("PATH").unwrap().to_string()
|
path: args.value_of("PATH").unwrap().to_string()
|
||||||
})
|
}
|
||||||
},
|
},
|
||||||
("diff", Some(args)) => {
|
("diff", Some(args)) => {
|
||||||
let (repository_old, backup_old, inode_old) = parse_repo_path(args.value_of("OLD").unwrap(), true, Some(true), None).unwrap();
|
let (repository_old, backup_old, inode_old) = parse_repo_path(args.value_of("OLD").unwrap(), true, Some(true), None).unwrap();
|
||||||
let (repository_new, backup_new, inode_new) = parse_repo_path(args.value_of("NEW").unwrap(), true, Some(true), None).unwrap();
|
let (repository_new, backup_new, inode_new) = parse_repo_path(args.value_of("NEW").unwrap(), true, Some(true), None).unwrap();
|
||||||
Ok(Arguments::Diff {
|
Arguments::Diff {
|
||||||
repo_path_old: repository_old.to_string(),
|
repo_path_old: repository_old.to_string(),
|
||||||
backup_name_old: backup_old.unwrap().to_string(),
|
backup_name_old: backup_old.unwrap().to_string(),
|
||||||
inode_old: inode_old.map(|v| v.to_string()),
|
inode_old: inode_old.map(|v| v.to_string()),
|
||||||
repo_path_new: repository_new.to_string(),
|
repo_path_new: repository_new.to_string(),
|
||||||
backup_name_new: backup_new.unwrap().to_string(),
|
backup_name_new: backup_new.unwrap().to_string(),
|
||||||
inode_new: inode_new.map(|v| v.to_string()),
|
inode_new: inode_new.map(|v| v.to_string()),
|
||||||
})
|
}
|
||||||
},
|
},
|
||||||
("analyze", Some(args)) => {
|
("analyze", Some(args)) => {
|
||||||
let (repository, _backup, _inode) = parse_repo_path(args.value_of("REPO").unwrap(), true, Some(false), Some(false)).unwrap();
|
let (repository, _backup, _inode) = parse_repo_path(args.value_of("REPO").unwrap(), true, Some(false), Some(false)).unwrap();
|
||||||
Ok(Arguments::Analyze {
|
Arguments::Analyze {
|
||||||
repo_path: repository.to_string()
|
repo_path: repository.to_string()
|
||||||
})
|
}
|
||||||
},
|
},
|
||||||
("import", Some(args)) => {
|
("import", Some(args)) => {
|
||||||
let (repository, _backup, _inode) = parse_repo_path(args.value_of("REPO").unwrap(), false, Some(false), Some(false)).unwrap();
|
let (repository, _backup, _inode) = parse_repo_path(args.value_of("REPO").unwrap(), false, Some(false), Some(false)).unwrap();
|
||||||
Ok(Arguments::Import {
|
Arguments::Import {
|
||||||
repo_path: repository.to_string(),
|
repo_path: repository.to_string(),
|
||||||
remote_path: args.value_of("REMOTE").unwrap().to_string(),
|
remote_path: args.value_of("REMOTE").unwrap().to_string(),
|
||||||
key_files: args.values_of("key").map(|v| v.map(|k| k.to_string()).collect()).unwrap_or_else(|| vec![])
|
key_files: args.values_of("key").map(|v| v.map(|k| k.to_string()).collect()).unwrap_or_else(|| vec![])
|
||||||
})
|
}
|
||||||
},
|
},
|
||||||
("config", Some(args)) => {
|
("config", Some(args)) => {
|
||||||
let (repository, _backup, _inode) = parse_repo_path(args.value_of("REPO").unwrap(), true, Some(false), Some(false)).unwrap();
|
let (repository, _backup, _inode) = parse_repo_path(args.value_of("REPO").unwrap(), true, Some(false), Some(false)).unwrap();
|
||||||
Ok(Arguments::Config {
|
Arguments::Config {
|
||||||
bundle_size: args.value_of("bundle_size").map(|v| parse_num(v).unwrap() as usize * 1024 * 1024),
|
bundle_size: args.value_of("bundle_size").map(|v| parse_num(v).unwrap() as usize * 1024 * 1024),
|
||||||
chunker: args.value_of("chunker").map(|v| parse_chunker(v).unwrap()),
|
chunker: args.value_of("chunker").map(|v| parse_chunker(v).unwrap()),
|
||||||
compression: args.value_of("compression").map(|v| parse_compression(v).unwrap()),
|
compression: args.value_of("compression").map(|v| parse_compression(v).unwrap()),
|
||||||
encryption: args.value_of("encryption").map(|v| parse_public_key(v).unwrap()),
|
encryption: args.value_of("encryption").map(|v| parse_public_key(v).unwrap()),
|
||||||
hash: args.value_of("hash").map(|v| parse_hash(v).unwrap()),
|
hash: args.value_of("hash").map(|v| parse_hash(v).unwrap()),
|
||||||
repo_path: repository.to_string(),
|
repo_path: repository.to_string(),
|
||||||
})
|
}
|
||||||
},
|
},
|
||||||
("genkey", Some(args)) => {
|
("genkey", Some(args)) => {
|
||||||
Ok(Arguments::GenKey {
|
Arguments::GenKey {
|
||||||
file: args.value_of("FILE").map(|v| v.to_string())
|
file: args.value_of("FILE").map(|v| v.to_string())
|
||||||
})
|
}
|
||||||
},
|
},
|
||||||
("addkey", Some(args)) => {
|
("addkey", Some(args)) => {
|
||||||
let (repository, _backup, _inode) = parse_repo_path(args.value_of("REPO").unwrap(), true, Some(false), Some(false)).unwrap();
|
let (repository, _backup, _inode) = parse_repo_path(args.value_of("REPO").unwrap(), true, Some(false), Some(false)).unwrap();
|
||||||
Ok(Arguments::AddKey {
|
Arguments::AddKey {
|
||||||
repo_path: repository.to_string(),
|
repo_path: repository.to_string(),
|
||||||
set_default: args.is_present("set_default"),
|
set_default: args.is_present("set_default"),
|
||||||
file: args.value_of("FILE").map(|v| v.to_string())
|
file: args.value_of("FILE").map(|v| v.to_string())
|
||||||
})
|
}
|
||||||
},
|
},
|
||||||
("algotest", Some(args)) => {
|
("algotest", Some(args)) => {
|
||||||
Ok(Arguments::AlgoTest {
|
Arguments::AlgoTest {
|
||||||
bundle_size: (parse_num(args.value_of("bundle_size").unwrap()).unwrap() * 1024 * 1024) as usize,
|
bundle_size: (parse_num(args.value_of("bundle_size").unwrap()).unwrap() * 1024 * 1024) as usize,
|
||||||
chunker: parse_chunker(args.value_of("chunker").unwrap()).unwrap(),
|
chunker: parse_chunker(args.value_of("chunker").unwrap()).unwrap(),
|
||||||
compression: parse_compression(args.value_of("compression").unwrap()).unwrap(),
|
compression: parse_compression(args.value_of("compression").unwrap()).unwrap(),
|
||||||
encrypt: args.is_present("encrypt"),
|
encrypt: args.is_present("encrypt"),
|
||||||
hash: parse_hash(args.value_of("hash").unwrap()).unwrap(),
|
hash: parse_hash(args.value_of("hash").unwrap()).unwrap(),
|
||||||
file: args.value_of("FILE").unwrap().to_string(),
|
file: args.value_of("FILE").unwrap().to_string(),
|
||||||
})
|
}
|
||||||
},
|
},
|
||||||
_ => {
|
_ => {
|
||||||
error!("No subcommand given");
|
error!("No subcommand given");
|
||||||
Err(ErrorCode::InvalidArgs)
|
return Err(ErrorCode::InvalidArgs)
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
};
|
||||||
|
Ok((log_level, args))
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,20 +1,19 @@
|
||||||
use log::{self, LogRecord, LogLevel, LogMetadata, LogLevelFilter};
|
use log::{self, LogRecord, LogLevel, LogMetadata};
|
||||||
pub use log::SetLoggerError;
|
pub use log::SetLoggerError;
|
||||||
|
|
||||||
use ansi_term::{Color, Style};
|
use ansi_term::{Color, Style};
|
||||||
|
|
||||||
|
|
||||||
struct Logger;
|
struct Logger(LogLevel);
|
||||||
|
|
||||||
impl log::Log for Logger {
|
impl log::Log for Logger {
|
||||||
fn enabled(&self, metadata: &LogMetadata) -> bool {
|
fn enabled(&self, metadata: &LogMetadata) -> bool {
|
||||||
metadata.level() <= LogLevel::Info
|
metadata.level() <= self.0
|
||||||
}
|
}
|
||||||
|
|
||||||
fn log(&self, record: &LogRecord) {
|
fn log(&self, record: &LogRecord) {
|
||||||
if self.enabled(record.metadata()) {
|
if self.enabled(record.metadata()) {
|
||||||
let lvl = record.level();
|
match record.level() {
|
||||||
match lvl {
|
|
||||||
LogLevel::Error => println!("{}: {}", Color::Red.bold().paint("error"), record.args()),
|
LogLevel::Error => println!("{}: {}", Color::Red.bold().paint("error"), record.args()),
|
||||||
LogLevel::Warn => println!("{}: {}", Color::Yellow.bold().paint("warning"), record.args()),
|
LogLevel::Warn => println!("{}: {}", Color::Yellow.bold().paint("warning"), record.args()),
|
||||||
LogLevel::Info => println!("{}: {}", Color::Green.bold().paint("info"), record.args()),
|
LogLevel::Info => println!("{}: {}", Color::Green.bold().paint("info"), record.args()),
|
||||||
|
@ -25,9 +24,9 @@ impl log::Log for Logger {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn init() -> Result<(), SetLoggerError> {
|
pub fn init(level: LogLevel) -> Result<(), SetLoggerError> {
|
||||||
log::set_logger(|max_log_level| {
|
log::set_logger(|max_log_level| {
|
||||||
max_log_level.set(LogLevelFilter::Info);
|
max_log_level.set(level.to_log_level_filter());
|
||||||
Box::new(Logger)
|
Box::new(Logger(level))
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
|
@ -257,11 +257,12 @@ fn print_analysis(analysis: &HashMap<u32, BundleAnalysis>) {
|
||||||
|
|
||||||
#[allow(unknown_lints,cyclomatic_complexity)]
|
#[allow(unknown_lints,cyclomatic_complexity)]
|
||||||
pub fn run() -> Result<(), ErrorCode> {
|
pub fn run() -> Result<(), ErrorCode> {
|
||||||
if let Err(err) = logger::init() {
|
let (log_level, args) = try!(args::parse());
|
||||||
|
if let Err(err) = logger::init(log_level) {
|
||||||
println!("Failed to initialize the logger: {}", err);
|
println!("Failed to initialize the logger: {}", err);
|
||||||
return Err(ErrorCode::InitializeLogger)
|
return Err(ErrorCode::InitializeLogger)
|
||||||
}
|
}
|
||||||
match try!(args::parse()) {
|
match args {
|
||||||
Arguments::Init{repo_path, bundle_size, chunker, compression, encryption, hash, remote_path} => {
|
Arguments::Init{repo_path, bundle_size, chunker, compression, encryption, hash, remote_path} => {
|
||||||
let mut repo = checked!(Repository::create(repo_path, Config {
|
let mut repo = checked!(Repository::create(repo_path, Config {
|
||||||
bundle_size: bundle_size,
|
bundle_size: bundle_size,
|
||||||
|
|
|
@ -112,7 +112,7 @@ impl Repository {
|
||||||
mark_needed(&backups, &mut keep, daily, |d| (d.year(), d.month(), d.day()));
|
mark_needed(&backups, &mut keep, daily, |d| (d.year(), d.month(), d.day()));
|
||||||
}
|
}
|
||||||
let mut remove = Vec::new();
|
let mut remove = Vec::new();
|
||||||
info!("Removing the following backups");
|
println!("Removing the following backups");
|
||||||
for (i, backup) in backups.into_iter().enumerate() {
|
for (i, backup) in backups.into_iter().enumerate() {
|
||||||
if !keep.get(i) {
|
if !keep.get(i) {
|
||||||
println!(" - {}", backup.0);
|
println!(" - {}", backup.0);
|
||||||
|
@ -183,7 +183,7 @@ impl Repository {
|
||||||
let child_inode = match self.create_backup_recurse(&child_path, ref_child.as_ref(), options, backup, failed_paths) {
|
let child_inode = match self.create_backup_recurse(&child_path, ref_child.as_ref(), options, backup, failed_paths) {
|
||||||
Ok(inode) => inode,
|
Ok(inode) => inode,
|
||||||
Err(RepositoryError::Inode(_)) | Err(RepositoryError::Chunker(_)) | Err(RepositoryError::Io(_)) => {
|
Err(RepositoryError::Inode(_)) | Err(RepositoryError::Chunker(_)) | Err(RepositoryError::Io(_)) => {
|
||||||
warn!("Failed to backup {:?}", child_path);
|
info!("Failed to backup {:?}", child_path);
|
||||||
failed_paths.push(child_path);
|
failed_paths.push(child_path);
|
||||||
continue
|
continue
|
||||||
},
|
},
|
||||||
|
|
|
@ -106,9 +106,15 @@ impl Repository {
|
||||||
meta_bundle: None,
|
meta_bundle: None,
|
||||||
locks: locks
|
locks: locks
|
||||||
};
|
};
|
||||||
|
if !new.is_empty() {
|
||||||
|
info!("Adding {} new bundles to index", new.len());
|
||||||
|
}
|
||||||
for bundle in new {
|
for bundle in new {
|
||||||
try!(repo.add_new_remote_bundle(bundle))
|
try!(repo.add_new_remote_bundle(bundle))
|
||||||
}
|
}
|
||||||
|
if !gone.is_empty() {
|
||||||
|
info!("Removig {} old bundles from index", gone.len());
|
||||||
|
}
|
||||||
for bundle in gone {
|
for bundle in gone {
|
||||||
try!(repo.remove_gone_remote_bundle(bundle))
|
try!(repo.remove_gone_remote_bundle(bundle))
|
||||||
}
|
}
|
||||||
|
@ -208,7 +214,7 @@ impl Repository {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn add_new_remote_bundle(&mut self, bundle: BundleInfo) -> Result<(), RepositoryError> {
|
fn add_new_remote_bundle(&mut self, bundle: BundleInfo) -> Result<(), RepositoryError> {
|
||||||
info!("Adding new bundle to index: {}", bundle.id);
|
debug!("Adding new bundle to index: {}", bundle.id);
|
||||||
let bundle_id = match bundle.mode {
|
let bundle_id = match bundle.mode {
|
||||||
BundleMode::Data => self.next_data_bundle,
|
BundleMode::Data => self.next_data_bundle,
|
||||||
BundleMode::Meta => self.next_meta_bundle
|
BundleMode::Meta => self.next_meta_bundle
|
||||||
|
@ -259,7 +265,7 @@ impl Repository {
|
||||||
|
|
||||||
fn remove_gone_remote_bundle(&mut self, bundle: BundleInfo) -> Result<(), RepositoryError> {
|
fn remove_gone_remote_bundle(&mut self, bundle: BundleInfo) -> Result<(), RepositoryError> {
|
||||||
if let Some(id) = self.bundle_map.find(&bundle.id) {
|
if let Some(id) = self.bundle_map.find(&bundle.id) {
|
||||||
info!("Removing bundle from index: {}", bundle.id);
|
debug!("Removing bundle from index: {}", bundle.id);
|
||||||
try!(self.bundles.delete_local_bundle(&bundle.id));
|
try!(self.bundles.delete_local_bundle(&bundle.id));
|
||||||
try!(self.index.filter(|_key, data| data.bundle != id));
|
try!(self.index.filter(|_key, data| data.bundle != id));
|
||||||
self.bundle_map.remove(id);
|
self.bundle_map.remove(id);
|
||||||
|
|
|
@ -87,7 +87,7 @@ impl Repository {
|
||||||
inodes.insert(path, (inode, HashSet::new()));
|
inodes.insert(path, (inode, HashSet::new()));
|
||||||
},
|
},
|
||||||
Err(RepositoryError::Inode(_)) | Err(RepositoryError::Chunker(_)) | Err(RepositoryError::Io(_)) => {
|
Err(RepositoryError::Inode(_)) | Err(RepositoryError::Chunker(_)) | Err(RepositoryError::Io(_)) => {
|
||||||
warn!("Failed to backup {:?}", path);
|
info!("Failed to backup {:?}", path);
|
||||||
failed_paths.push(path);
|
failed_paths.push(path);
|
||||||
continue
|
continue
|
||||||
},
|
},
|
||||||
|
|
Loading…
Reference in New Issue