2017-03-10 11:43:32 +00:00
|
|
|
extern crate serde;
|
|
|
|
extern crate rmp_serde;
|
|
|
|
#[macro_use] extern crate serde_utils;
|
|
|
|
extern crate squash_sys as squash;
|
|
|
|
extern crate mmap;
|
|
|
|
extern crate blake2_rfc as blake2;
|
|
|
|
extern crate murmurhash3;
|
|
|
|
extern crate serde_yaml;
|
|
|
|
#[macro_use] extern crate quick_error;
|
2017-03-14 15:07:52 +00:00
|
|
|
extern crate docopt;
|
|
|
|
extern crate rustc_serialize;
|
2017-03-16 11:33:10 +00:00
|
|
|
extern crate chrono;
|
2017-03-10 11:43:32 +00:00
|
|
|
|
2017-03-15 20:53:05 +00:00
|
|
|
pub mod util;
|
|
|
|
pub mod bundle;
|
|
|
|
pub mod index;
|
2017-03-10 11:43:32 +00:00
|
|
|
mod chunker;
|
|
|
|
mod repository;
|
|
|
|
mod algotest;
|
|
|
|
|
2017-03-14 15:07:52 +00:00
|
|
|
use docopt::Docopt;
|
2017-03-16 11:33:10 +00:00
|
|
|
use chrono::prelude::*;
|
2017-03-14 15:07:52 +00:00
|
|
|
|
2017-03-15 20:53:05 +00:00
|
|
|
use chunker::ChunkerType;
|
2017-03-16 11:33:10 +00:00
|
|
|
use repository::{Repository, Config, Inode};
|
|
|
|
use util::{ChecksumType, Compression, HashMethod, to_file_size, to_duration};
|
2017-03-15 20:53:05 +00:00
|
|
|
|
2017-03-14 15:07:52 +00:00
|
|
|
|
|
|
|
static USAGE: &'static str = "
|
|
|
|
Usage:
|
2017-03-15 20:53:05 +00:00
|
|
|
zvault init [--bundle-size SIZE] [--chunker METHOD] [--chunk-size SIZE] [--compression COMPRESSION] <repo>
|
|
|
|
zvault backup [--full] <backup> <path>
|
|
|
|
zvault restore <backup> <path>
|
2017-03-14 15:07:52 +00:00
|
|
|
zvault check [--full] <repo>
|
2017-03-16 11:33:10 +00:00
|
|
|
zvault backups <repo>
|
2017-03-15 20:53:05 +00:00
|
|
|
zvault info <backup>
|
2017-03-16 12:59:57 +00:00
|
|
|
zvault list <backup> <path>
|
2017-03-15 20:53:05 +00:00
|
|
|
zvault stats <repo>
|
|
|
|
zvault bundles <repo>
|
2017-03-15 07:27:27 +00:00
|
|
|
zvault algotest <path>
|
2017-03-15 11:32:44 +00:00
|
|
|
zvault stat <path>
|
2017-03-14 15:07:52 +00:00
|
|
|
|
|
|
|
Options:
|
2017-03-15 20:53:05 +00:00
|
|
|
--full Whether to verify the repository by loading all bundles
|
|
|
|
--bundle-size SIZE The target size of a full bundle in MiB [default: 25]
|
|
|
|
--chunker METHOD The chunking algorithm to use [default: fastcdc]
|
|
|
|
--chunk-size SIZE The target average chunk size in KiB [default: 8]
|
|
|
|
--compression COMPRESSION The compression to use [default: brotli/3]
|
2017-03-14 15:07:52 +00:00
|
|
|
";
|
|
|
|
|
|
|
|
|
|
|
|
#[derive(RustcDecodable, Debug)]
|
|
|
|
struct Args {
|
|
|
|
cmd_init: bool,
|
2017-03-15 20:53:05 +00:00
|
|
|
cmd_backup: bool,
|
|
|
|
cmd_restore: bool,
|
|
|
|
cmd_check: bool,
|
|
|
|
|
2017-03-16 11:33:10 +00:00
|
|
|
cmd_backups: bool,
|
2017-03-15 07:27:27 +00:00
|
|
|
cmd_info: bool,
|
2017-03-16 12:59:57 +00:00
|
|
|
cmd_list: bool,
|
2017-03-15 20:53:05 +00:00
|
|
|
|
|
|
|
cmd_stats: bool,
|
|
|
|
cmd_bundles: bool,
|
|
|
|
|
2017-03-14 15:07:52 +00:00
|
|
|
cmd_algotest: bool,
|
2017-03-15 11:32:44 +00:00
|
|
|
cmd_stat: bool,
|
2017-03-15 20:53:05 +00:00
|
|
|
|
2017-03-14 15:07:52 +00:00
|
|
|
arg_repo: Option<String>,
|
|
|
|
arg_path: Option<String>,
|
2017-03-15 20:53:05 +00:00
|
|
|
arg_backup: Option<String>,
|
|
|
|
|
2017-03-14 15:07:52 +00:00
|
|
|
flag_full: bool,
|
|
|
|
flag_bundle_size: usize,
|
|
|
|
flag_chunker: String,
|
|
|
|
flag_chunk_size: usize,
|
|
|
|
flag_compression: String
|
|
|
|
}
|
|
|
|
|
2017-03-10 11:43:32 +00:00
|
|
|
|
|
|
|
fn main() {
|
2017-03-14 15:07:52 +00:00
|
|
|
let args: Args = Docopt::new(USAGE).and_then(|d| d.decode()).unwrap_or_else(|e| e.exit());
|
2017-03-15 07:27:27 +00:00
|
|
|
//println!("{:?}", args);
|
2017-03-14 15:07:52 +00:00
|
|
|
|
|
|
|
if args.cmd_algotest {
|
|
|
|
algotest::run(&args.arg_path.unwrap());
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
if args.cmd_init {
|
2017-03-15 07:27:27 +00:00
|
|
|
let chunker = ChunkerType::from(&args.flag_chunker, args.flag_chunk_size*1024, 0).expect("No such chunk algorithm");
|
2017-03-14 15:07:52 +00:00
|
|
|
let compression = if args.flag_compression == "none" {
|
|
|
|
None
|
|
|
|
} else {
|
|
|
|
Some(Compression::from_string(&args.flag_compression).expect("Failed to parse compression"))
|
|
|
|
};
|
|
|
|
Repository::create(&args.arg_repo.unwrap(), Config {
|
|
|
|
bundle_size: args.flag_bundle_size*1024*1024,
|
2017-03-14 14:24:41 +00:00
|
|
|
checksum: ChecksumType::Blake2_256,
|
2017-03-14 15:07:52 +00:00
|
|
|
chunker: chunker,
|
|
|
|
compression: compression,
|
2017-03-10 11:43:32 +00:00
|
|
|
hash: HashMethod::Blake2
|
2017-03-14 15:07:52 +00:00
|
|
|
}).unwrap();
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2017-03-15 11:32:44 +00:00
|
|
|
if args.cmd_stat {
|
|
|
|
println!("{:?}", Inode::get_from(&args.arg_path.unwrap()).unwrap());
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2017-03-15 20:53:05 +00:00
|
|
|
|
|
|
|
let mut repo;
|
|
|
|
if let Some(path) = args.arg_repo {
|
|
|
|
repo = Repository::open(path).unwrap();
|
|
|
|
} else if let Some(ref backup) = args.arg_backup {
|
|
|
|
let path = backup.splitn(2, "::").nth(0).unwrap();
|
|
|
|
repo = Repository::open(path).unwrap();
|
|
|
|
} else {
|
|
|
|
panic!("Repository is needed");
|
|
|
|
}
|
2017-03-14 15:07:52 +00:00
|
|
|
|
|
|
|
if args.cmd_check {
|
|
|
|
repo.check(args.flag_full).unwrap();
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2017-03-15 20:53:05 +00:00
|
|
|
if args.cmd_stats {
|
2017-03-15 07:27:27 +00:00
|
|
|
let info = repo.info();
|
|
|
|
println!("Bundles: {}", info.bundle_count);
|
|
|
|
println!("Total size: {}", to_file_size(info.encoded_data_size));
|
|
|
|
println!("Uncompressed size: {}", to_file_size(info.raw_data_size));
|
2017-03-16 11:33:10 +00:00
|
|
|
println!("Compression ratio: {:.1}%", info.compression_ratio * 100.0);
|
2017-03-15 07:27:27 +00:00
|
|
|
println!("Chunk count: {}", info.chunk_count);
|
|
|
|
println!("Average chunk size: {}", to_file_size(info.avg_chunk_size as u64));
|
2017-03-15 20:53:05 +00:00
|
|
|
let index_usage = info.index_entries as f32 / info.index_capacity as f32;
|
2017-03-16 11:33:10 +00:00
|
|
|
println!("Index: {}, {:.0}% full", to_file_size(info.index_size as u64), index_usage * 100.0);
|
2017-03-15 20:53:05 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2017-03-16 11:33:10 +00:00
|
|
|
if args.cmd_backups {
|
2017-03-15 20:53:05 +00:00
|
|
|
for backup in repo.list_backups().unwrap() {
|
|
|
|
println!("{}", backup);
|
|
|
|
}
|
2017-03-15 07:27:27 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
if args.cmd_bundles {
|
|
|
|
for bundle in repo.list_bundles() {
|
|
|
|
println!("Bundle {}", bundle.id);
|
|
|
|
println!(" - Chunks: {}", bundle.chunk_count);
|
|
|
|
println!(" - Size: {}", to_file_size(bundle.encoded_size as u64));
|
|
|
|
println!(" - Data size: {}", to_file_size(bundle.raw_size as u64));
|
|
|
|
let ratio = bundle.encoded_size as f32 / bundle.raw_size as f32;
|
|
|
|
let compression = if let Some(ref c) = bundle.compression {
|
|
|
|
c.to_string()
|
|
|
|
} else {
|
|
|
|
"none".to_string()
|
|
|
|
};
|
|
|
|
println!(" - Compression: {}, ratio: {:.1}%", compression, ratio * 100.0);
|
|
|
|
println!();
|
|
|
|
}
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2017-03-15 20:53:05 +00:00
|
|
|
let backup_name = args.arg_backup.unwrap().splitn(2, "::").nth(1).unwrap().to_string();
|
|
|
|
|
|
|
|
if args.cmd_backup {
|
2017-03-16 11:33:10 +00:00
|
|
|
let backup = repo.create_full_backup(&args.arg_path.unwrap()).unwrap();
|
|
|
|
repo.save_backup(&backup, &backup_name).unwrap();
|
|
|
|
return
|
2017-03-15 20:53:05 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
let backup = repo.get_backup(&backup_name).unwrap();
|
|
|
|
|
|
|
|
if args.cmd_info {
|
2017-03-16 11:33:10 +00:00
|
|
|
println!("Date: {}", Local.timestamp(backup.date, 0).to_rfc2822());
|
|
|
|
println!("Duration: {}", to_duration(backup.duration));
|
|
|
|
println!("Entries: {} files, {} dirs", backup.file_count, backup.dir_count);
|
|
|
|
println!("Total backup size: {}", to_file_size(backup.total_data_size));
|
|
|
|
println!("Modified data size: {}", to_file_size(backup.changed_data_size));
|
|
|
|
let dedup_ratio = backup.deduplicated_data_size as f32 / backup.changed_data_size as f32;
|
|
|
|
println!("Deduplicated size: {}, {:.1}% saved", to_file_size(backup.deduplicated_data_size), (1.0 - dedup_ratio)*100.0);
|
|
|
|
let compress_ratio = backup.encoded_data_size as f32 / backup.deduplicated_data_size as f32;
|
|
|
|
println!("Compressed size: {} in {} bundles, {:.1}% saved", to_file_size(backup.encoded_data_size), backup.bundle_count, (1.0 - compress_ratio)*100.0);
|
|
|
|
println!("Chunk count: {}, avg size: {}", backup.chunk_count, to_file_size(backup.avg_chunk_size as u64));
|
2017-03-15 20:53:05 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
if args.cmd_restore {
|
|
|
|
repo.restore_backup(&backup, &args.arg_path.unwrap()).unwrap();
|
2017-03-16 12:59:57 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
if args.cmd_list {
|
|
|
|
let inode = repo.get_backup_inode(&backup, &args.arg_path.unwrap()).unwrap();
|
|
|
|
println!("{}", inode.format_one_line());
|
|
|
|
if let Some(children) = inode.children {
|
|
|
|
for chunks in children.values() {
|
|
|
|
let inode = repo.get_inode(&chunks).unwrap();
|
|
|
|
println!("- {}", inode.format_one_line());
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return
|
2017-03-14 15:07:52 +00:00
|
|
|
}
|
2017-03-10 11:43:32 +00:00
|
|
|
}
|