2017-03-16 19:05:58 +00:00
|
|
|
mod args;
|
2017-03-17 10:03:07 +00:00
|
|
|
mod logger;
|
2017-03-16 19:05:58 +00:00
|
|
|
mod algotest;
|
|
|
|
|
2017-03-21 10:28:11 +00:00
|
|
|
use ::prelude::*;
|
|
|
|
|
2017-03-16 19:05:58 +00:00
|
|
|
use chrono::prelude::*;
|
2017-03-24 10:00:20 +00:00
|
|
|
use regex::{self, RegexSet};
|
|
|
|
|
2017-03-22 10:10:13 +00:00
|
|
|
use std::collections::HashMap;
|
2017-03-24 10:00:20 +00:00
|
|
|
use std::io::{BufReader, BufRead};
|
|
|
|
use std::fs::File;
|
2017-03-31 16:44:27 +00:00
|
|
|
use std::env;
|
2017-03-16 19:05:58 +00:00
|
|
|
|
2017-03-17 10:03:07 +00:00
|
|
|
use self::args::Arguments;
|
2017-03-16 19:05:58 +00:00
|
|
|
|
|
|
|
|
2017-04-03 13:18:06 +00:00
|
|
|
pub enum ErrorCode {
|
|
|
|
UnsafeArgs, InvalidArgs,
|
|
|
|
InitializeLogger,
|
|
|
|
CreateRepository,
|
|
|
|
LoadRepository, SaveBackup, LoadBackup, LoadInode, LoadBundle,
|
|
|
|
AddKey, LoadKey, SaveKey,
|
|
|
|
SaveConfig,
|
|
|
|
LoadExcludes, InvalidExcludes,
|
|
|
|
BackupRun, RestoreRun, RemoveRun, PruneRun, VacuumRun, CheckRun, AnalyzeRun, DiffRun,
|
|
|
|
VersionsRun, ImportRun, FuseMount
|
|
|
|
}
|
|
|
|
impl ErrorCode {
|
|
|
|
pub fn code(&self) -> i32 {
|
|
|
|
match *self {
|
|
|
|
// Crazy stuff
|
2017-04-04 11:59:57 +00:00
|
|
|
ErrorCode::InitializeLogger | ErrorCode::InvalidExcludes => -1,
|
2017-04-03 13:18:06 +00:00
|
|
|
// Arguments
|
|
|
|
ErrorCode::InvalidArgs => 1,
|
|
|
|
ErrorCode::UnsafeArgs => 2,
|
|
|
|
// Load things
|
|
|
|
ErrorCode::LoadRepository => 3,
|
|
|
|
ErrorCode::LoadBackup => 4,
|
|
|
|
ErrorCode::LoadInode => 5,
|
|
|
|
ErrorCode::LoadBundle => 6,
|
|
|
|
ErrorCode::LoadKey => 7,
|
|
|
|
ErrorCode::LoadExcludes => 8,
|
|
|
|
// Minor operations
|
|
|
|
ErrorCode::SaveBackup => 9,
|
|
|
|
ErrorCode::AddKey => 10,
|
|
|
|
ErrorCode::SaveKey => 11,
|
|
|
|
ErrorCode::SaveConfig => 12,
|
|
|
|
// Main operation
|
|
|
|
ErrorCode::CreateRepository => 13,
|
|
|
|
ErrorCode::BackupRun => 14,
|
|
|
|
ErrorCode::RestoreRun => 15,
|
|
|
|
ErrorCode::RemoveRun => 16,
|
|
|
|
ErrorCode::PruneRun => 17,
|
|
|
|
ErrorCode::VacuumRun => 18,
|
|
|
|
ErrorCode::CheckRun => 19,
|
|
|
|
ErrorCode::AnalyzeRun => 20,
|
|
|
|
ErrorCode::DiffRun => 21,
|
|
|
|
ErrorCode::VersionsRun => 22,
|
|
|
|
ErrorCode::ImportRun => 23,
|
|
|
|
ErrorCode::FuseMount => 24,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2017-03-22 08:19:16 +00:00
|
|
|
pub const DEFAULT_CHUNKER: &'static str = "fastcdc/16";
|
|
|
|
pub const DEFAULT_HASH: &'static str = "blake2";
|
|
|
|
pub const DEFAULT_COMPRESSION: &'static str = "brotli/3";
|
|
|
|
pub const DEFAULT_BUNDLE_SIZE: usize = 25;
|
2017-03-24 11:52:01 +00:00
|
|
|
pub const DEFAULT_VACUUM_RATIO: usize = 50;
|
2017-03-31 16:44:27 +00:00
|
|
|
lazy_static! {
|
|
|
|
pub static ref DEFAULT_REPOSITORY: String = {
|
|
|
|
env::home_dir().unwrap().join(".zvault").to_string_lossy().to_string()
|
|
|
|
};
|
|
|
|
}
|
2017-03-22 08:19:16 +00:00
|
|
|
|
2017-04-03 13:18:06 +00:00
|
|
|
macro_rules! checked {
|
|
|
|
($expr:expr, $msg:expr, $code:expr) => {
|
|
|
|
match $expr {
|
|
|
|
Ok(val) => val,
|
|
|
|
Err(err) => {
|
|
|
|
error!("Failed to {}\n\tcaused by: {}", $msg, err);
|
|
|
|
return Err($code)
|
|
|
|
}
|
2017-03-16 19:05:58 +00:00
|
|
|
}
|
2017-04-03 13:18:06 +00:00
|
|
|
};
|
2017-03-17 10:03:07 +00:00
|
|
|
}
|
2017-03-16 19:05:58 +00:00
|
|
|
|
2017-04-03 13:18:06 +00:00
|
|
|
fn open_repository(path: &str) -> Result<Repository, ErrorCode> {
|
|
|
|
Ok(checked!(Repository::open(path), "load repository", ErrorCode::LoadRepository))
|
2017-03-23 06:43:45 +00:00
|
|
|
}
|
|
|
|
|
2017-04-03 13:18:06 +00:00
|
|
|
fn get_backup(repo: &Repository, backup_name: &str) -> Result<Backup, ErrorCode> {
|
|
|
|
Ok(checked!(repo.get_backup(backup_name), "load backup", ErrorCode::LoadBackup))
|
2017-03-17 10:03:07 +00:00
|
|
|
}
|
2017-03-16 19:05:58 +00:00
|
|
|
|
2017-04-03 13:18:06 +00:00
|
|
|
fn find_reference_backup(repo: &Repository, path: &str) -> Result<Option<(String, Backup)>, ErrorCode> {
|
2017-03-20 21:24:53 +00:00
|
|
|
let mut matching = Vec::new();
|
|
|
|
let hostname = match get_hostname() {
|
|
|
|
Ok(hostname) => hostname,
|
2017-04-03 13:18:06 +00:00
|
|
|
Err(_) => return Ok(None)
|
2017-03-20 21:24:53 +00:00
|
|
|
};
|
2017-03-22 08:19:16 +00:00
|
|
|
let backup_map = match repo.get_backups() {
|
|
|
|
Ok(backup_map) => backup_map,
|
|
|
|
Err(RepositoryError::BackupFile(BackupFileError::PartialBackupsList(backup_map, _failed))) => {
|
|
|
|
warn!("Some backups could not be read, ignoring them");
|
|
|
|
backup_map
|
|
|
|
},
|
|
|
|
Err(err) => {
|
|
|
|
error!("Failed to load backup files: {}", err);
|
2017-04-03 13:18:06 +00:00
|
|
|
return Err(ErrorCode::LoadBackup)
|
2017-03-22 08:19:16 +00:00
|
|
|
}
|
|
|
|
};
|
2017-03-25 12:09:45 +00:00
|
|
|
for (name, backup) in backup_map {
|
2017-03-20 21:24:53 +00:00
|
|
|
if backup.host == hostname && backup.path == path {
|
2017-03-25 12:09:45 +00:00
|
|
|
matching.push((name, backup));
|
2017-03-20 21:24:53 +00:00
|
|
|
}
|
|
|
|
}
|
2017-03-25 12:09:45 +00:00
|
|
|
matching.sort_by_key(|&(_, ref b)| b.date);
|
2017-04-03 13:18:06 +00:00
|
|
|
Ok(matching.pop())
|
2017-03-20 21:24:53 +00:00
|
|
|
}
|
|
|
|
|
2017-03-22 10:10:13 +00:00
|
|
|
fn print_backup(backup: &Backup) {
|
2017-04-03 13:18:06 +00:00
|
|
|
println!("Modified: {}", backup.modified);
|
2017-03-22 10:10:13 +00:00
|
|
|
println!("Date: {}", Local.timestamp(backup.date, 0).to_rfc2822());
|
2017-04-03 13:18:06 +00:00
|
|
|
println!("Source: {}:{}", backup.host, backup.path);
|
2017-03-22 10:10:13 +00:00
|
|
|
println!("Duration: {}", to_duration(backup.duration));
|
|
|
|
println!("Entries: {} files, {} dirs", backup.file_count, backup.dir_count);
|
|
|
|
println!("Total backup size: {}", to_file_size(backup.total_data_size));
|
|
|
|
println!("Modified data size: {}", to_file_size(backup.changed_data_size));
|
|
|
|
let dedup_ratio = backup.deduplicated_data_size as f32 / backup.changed_data_size as f32;
|
|
|
|
println!("Deduplicated size: {}, {:.1}% saved", to_file_size(backup.deduplicated_data_size), (1.0 - dedup_ratio)*100.0);
|
|
|
|
let compress_ratio = backup.encoded_data_size as f32 / backup.deduplicated_data_size as f32;
|
|
|
|
println!("Compressed size: {} in {} bundles, {:.1}% saved", to_file_size(backup.encoded_data_size), backup.bundle_count, (1.0 - compress_ratio)*100.0);
|
|
|
|
println!("Chunk count: {}, avg size: {}", backup.chunk_count, to_file_size(backup.avg_chunk_size as u64));
|
|
|
|
}
|
|
|
|
|
2017-03-24 11:52:01 +00:00
|
|
|
pub fn format_inode_one_line(inode: &Inode) -> String {
|
|
|
|
match inode.file_type {
|
2017-04-03 13:18:06 +00:00
|
|
|
FileType::Directory => format!("{:25}\t{} entries", format!("{}/", inode.name), inode.children.as_ref().map(|c| c.len()).unwrap_or(0)),
|
2017-04-02 16:55:53 +00:00
|
|
|
FileType::File => format!("{:25}\t{:>10}\t{}", inode.name, to_file_size(inode.size), Local.timestamp(inode.timestamp, 0).to_rfc2822()),
|
2017-04-03 13:18:06 +00:00
|
|
|
FileType::Symlink => format!("{:25}\t -> {}", inode.name, inode.symlink_target.as_ref().map(|s| s as &str).unwrap_or("?")),
|
2017-03-24 11:52:01 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-03-23 08:31:23 +00:00
|
|
|
fn print_inode(inode: &Inode) {
|
|
|
|
println!("Name: {}", inode.name);
|
|
|
|
println!("Type: {}", inode.file_type);
|
|
|
|
println!("Size: {}", to_file_size(inode.size));
|
|
|
|
println!("Permissions: {:3o}", inode.mode);
|
|
|
|
println!("User: {}", inode.user);
|
|
|
|
println!("Group: {}", inode.group);
|
2017-04-02 16:55:53 +00:00
|
|
|
println!("Timestamp: {}", Local.timestamp(inode.timestamp, 0).to_rfc2822());
|
2017-03-23 08:31:23 +00:00
|
|
|
if let Some(ref target) = inode.symlink_target {
|
|
|
|
println!("Symlink target: {}", target);
|
|
|
|
}
|
2017-04-02 18:37:34 +00:00
|
|
|
println!("Cumulative size: {}", to_file_size(inode.cum_size));
|
|
|
|
println!("Cumulative file count: {}", inode.cum_files);
|
|
|
|
println!("Cumulative directory count: {}", inode.cum_dirs);
|
2017-03-23 08:31:23 +00:00
|
|
|
if let Some(ref children) = inode.children {
|
|
|
|
println!("Children:");
|
|
|
|
for name in children.keys() {
|
|
|
|
println!(" - {}", name);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-03-22 10:10:13 +00:00
|
|
|
fn print_backups(backup_map: &HashMap<String, Backup>) {
|
2017-03-30 09:02:36 +00:00
|
|
|
let mut backups: Vec<_> = backup_map.into_iter().collect();
|
|
|
|
backups.sort_by_key(|b| b.0);
|
|
|
|
for (name, backup) in backups {
|
|
|
|
println!("{:40} {:>32} {:7} files, {:6} dirs, {:>10}",
|
2017-03-22 10:10:13 +00:00
|
|
|
name, Local.timestamp(backup.date, 0).to_rfc2822(), backup.file_count,
|
|
|
|
backup.dir_count, to_file_size(backup.total_data_size));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn print_repoinfo(info: &RepositoryInfo) {
|
|
|
|
println!("Bundles: {}", info.bundle_count);
|
|
|
|
println!("Total size: {}", to_file_size(info.encoded_data_size));
|
|
|
|
println!("Uncompressed size: {}", to_file_size(info.raw_data_size));
|
|
|
|
println!("Compression ratio: {:.1}%", info.compression_ratio * 100.0);
|
|
|
|
println!("Chunk count: {}", info.chunk_count);
|
|
|
|
println!("Average chunk size: {}", to_file_size(info.avg_chunk_size as u64));
|
|
|
|
let index_usage = info.index_entries as f32 / info.index_capacity as f32;
|
|
|
|
println!("Index: {}, {:.0}% full", to_file_size(info.index_size as u64), index_usage * 100.0);
|
|
|
|
}
|
|
|
|
|
|
|
|
fn print_bundle(bundle: &BundleInfo) {
|
|
|
|
println!("Bundle {}", bundle.id);
|
|
|
|
println!(" - Mode: {:?}", bundle.mode);
|
|
|
|
println!(" - Hash method: {:?}", bundle.hash_method);
|
2017-03-24 11:52:01 +00:00
|
|
|
let encryption = if let Some((_, ref key)) = bundle.encryption {
|
|
|
|
to_hex(key)
|
|
|
|
} else {
|
|
|
|
"none".to_string()
|
|
|
|
};
|
|
|
|
println!(" - Encryption: {}", encryption);
|
2017-03-22 10:10:13 +00:00
|
|
|
println!(" - Chunks: {}", bundle.chunk_count);
|
|
|
|
println!(" - Size: {}", to_file_size(bundle.encoded_size as u64));
|
|
|
|
println!(" - Data size: {}", to_file_size(bundle.raw_size as u64));
|
|
|
|
let ratio = bundle.encoded_size as f32 / bundle.raw_size as f32;
|
|
|
|
let compression = if let Some(ref c) = bundle.compression {
|
|
|
|
c.to_string()
|
|
|
|
} else {
|
|
|
|
"none".to_string()
|
|
|
|
};
|
|
|
|
println!(" - Compression: {}, ratio: {:.1}%", compression, ratio * 100.0);
|
|
|
|
}
|
|
|
|
|
2017-03-24 11:52:01 +00:00
|
|
|
fn print_bundle_one_line(bundle: &BundleInfo) {
|
|
|
|
println!("{}: {:8?}, {:5} chunks, {:8}", bundle.id, bundle.mode, bundle.chunk_count, to_file_size(bundle.encoded_size as u64))
|
|
|
|
}
|
|
|
|
|
2017-03-22 10:10:13 +00:00
|
|
|
fn print_config(config: &Config) {
|
|
|
|
println!("Bundle size: {}", to_file_size(config.bundle_size as u64));
|
|
|
|
println!("Chunker: {}", config.chunker.to_string());
|
|
|
|
if let Some(ref compression) = config.compression {
|
|
|
|
println!("Compression: {}", compression.to_string());
|
|
|
|
} else {
|
|
|
|
println!("Compression: none");
|
|
|
|
}
|
|
|
|
if let Some(ref encryption) = config.encryption {
|
|
|
|
println!("Encryption: {}", to_hex(&encryption.1[..]));
|
|
|
|
} else {
|
|
|
|
println!("Encryption: none");
|
|
|
|
}
|
|
|
|
println!("Hash method: {}", config.hash.name());
|
|
|
|
}
|
|
|
|
|
2017-03-25 11:43:49 +00:00
|
|
|
fn print_analysis(analysis: &HashMap<u32, BundleAnalysis>) {
|
|
|
|
let mut reclaim_space = [0; 11];
|
|
|
|
let mut data_total = 0;
|
|
|
|
for bundle in analysis.values() {
|
|
|
|
data_total += bundle.info.encoded_size;
|
|
|
|
#[allow(unknown_lints,needless_range_loop)]
|
|
|
|
for i in 0..11 {
|
|
|
|
if bundle.get_usage_ratio() <= i as f32 * 0.1 {
|
|
|
|
reclaim_space[i] += bundle.get_unused_size();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
println!("Total bundle size: {}", to_file_size(data_total as u64));
|
|
|
|
let used = data_total - reclaim_space[10];
|
|
|
|
println!("Space used: {}, {:.1} %", to_file_size(used as u64), used as f32 / data_total as f32 * 100.0);
|
|
|
|
println!("Reclaimable space (depending on vacuum ratio)");
|
|
|
|
#[allow(unknown_lints,needless_range_loop)]
|
|
|
|
for i in 0..11 {
|
2017-03-25 12:09:45 +00:00
|
|
|
println!(" - ratio={:3}: {:>10}, {:4.1} %", i*10, to_file_size(reclaim_space[i] as u64), reclaim_space[i] as f32 / data_total as f32 * 100.0);
|
2017-03-25 11:43:49 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-03-20 21:24:53 +00:00
|
|
|
|
2017-03-20 17:11:03 +00:00
|
|
|
#[allow(unknown_lints,cyclomatic_complexity)]
|
2017-04-03 13:18:06 +00:00
|
|
|
pub fn run() -> Result<(), ErrorCode> {
|
2017-03-17 10:03:07 +00:00
|
|
|
if let Err(err) = logger::init() {
|
|
|
|
println!("Failed to initialize the logger: {}", err);
|
2017-04-03 13:18:06 +00:00
|
|
|
return Err(ErrorCode::InitializeLogger)
|
2017-03-16 19:05:58 +00:00
|
|
|
}
|
2017-04-03 13:18:06 +00:00
|
|
|
match try!(args::parse()) {
|
2017-03-22 13:42:27 +00:00
|
|
|
Arguments::Init{repo_path, bundle_size, chunker, compression, encryption, hash, remote_path} => {
|
2017-04-03 13:18:06 +00:00
|
|
|
let mut repo = checked!(Repository::create(repo_path, Config {
|
2017-03-17 10:03:07 +00:00
|
|
|
bundle_size: bundle_size,
|
|
|
|
chunker: chunker,
|
|
|
|
compression: compression,
|
2017-03-18 16:22:11 +00:00
|
|
|
encryption: None,
|
2017-03-17 10:03:07 +00:00
|
|
|
hash: hash
|
2017-04-03 13:18:06 +00:00
|
|
|
}, remote_path), "create repository", ErrorCode::CreateRepository);
|
2017-03-18 16:22:11 +00:00
|
|
|
if encryption {
|
|
|
|
let (public, secret) = gen_keypair();
|
2017-03-22 17:21:48 +00:00
|
|
|
println!("public: {}", to_hex(&public[..]));
|
|
|
|
println!("secret: {}", to_hex(&secret[..]));
|
2017-03-18 16:22:11 +00:00
|
|
|
repo.set_encryption(Some(&public));
|
2017-04-03 13:18:06 +00:00
|
|
|
checked!(repo.register_key(public, secret), "add key", ErrorCode::AddKey);
|
|
|
|
checked!(repo.save_config(), "save config", ErrorCode::SaveConfig);
|
2017-03-22 17:21:48 +00:00
|
|
|
println!();
|
2017-03-18 16:22:11 +00:00
|
|
|
}
|
2017-03-22 17:21:48 +00:00
|
|
|
print_config(&repo.config);
|
2017-03-17 10:03:07 +00:00
|
|
|
},
|
2017-04-03 13:18:06 +00:00
|
|
|
Arguments::Backup{repo_path, backup_name, src_path, full, reference, same_device, mut excludes, excludes_from, no_default_excludes, tar} => {
|
|
|
|
let mut repo = try!(open_repository(&repo_path));
|
2017-03-20 21:24:53 +00:00
|
|
|
let mut reference_backup = None;
|
2017-04-03 13:18:06 +00:00
|
|
|
if !full && !tar {
|
|
|
|
reference_backup = match reference {
|
|
|
|
Some(r) => {
|
|
|
|
let b = try!(get_backup(&repo, &r));
|
|
|
|
Some((r, b))
|
|
|
|
},
|
|
|
|
None => None
|
|
|
|
};
|
2017-03-20 21:24:53 +00:00
|
|
|
if reference_backup.is_none() {
|
2017-04-03 13:18:06 +00:00
|
|
|
reference_backup = try!(find_reference_backup(&repo, &src_path));
|
2017-03-20 21:24:53 +00:00
|
|
|
}
|
2017-03-25 12:09:45 +00:00
|
|
|
if let Some(&(ref name, _)) = reference_backup.as_ref() {
|
|
|
|
info!("Using backup {} as reference", name);
|
2017-03-20 21:24:53 +00:00
|
|
|
} else {
|
|
|
|
info!("No reference backup found, doing a full scan instead");
|
|
|
|
}
|
2017-03-17 10:03:07 +00:00
|
|
|
}
|
2017-03-25 12:09:45 +00:00
|
|
|
let reference_backup = reference_backup.map(|(_, backup)| backup);
|
2017-04-03 13:18:06 +00:00
|
|
|
if !no_default_excludes && !tar {
|
|
|
|
for line in BufReader::new(checked!(File::open(&repo.excludes_path), "open default excludes file", ErrorCode::LoadExcludes)).lines() {
|
|
|
|
excludes.push(checked!(line, "read default excludes file", ErrorCode::LoadExcludes));
|
2017-03-26 18:33:32 +00:00
|
|
|
}
|
|
|
|
}
|
2017-03-24 10:00:20 +00:00
|
|
|
if let Some(excludes_from) = excludes_from {
|
2017-04-03 13:18:06 +00:00
|
|
|
for line in BufReader::new(checked!(File::open(excludes_from), "open excludes file", ErrorCode::LoadExcludes)).lines() {
|
|
|
|
excludes.push(checked!(line, "read excludes file", ErrorCode::LoadExcludes));
|
2017-03-24 10:00:20 +00:00
|
|
|
}
|
|
|
|
}
|
2017-03-26 18:33:32 +00:00
|
|
|
let mut excludes_parsed = Vec::with_capacity(excludes.len());
|
|
|
|
for mut exclude in excludes {
|
|
|
|
if exclude.starts_with('#') || exclude.is_empty() {
|
|
|
|
continue
|
|
|
|
}
|
2017-03-24 11:52:01 +00:00
|
|
|
exclude = regex::escape(&exclude).replace('?', ".").replace(r"\*\*", ".*").replace(r"\*", "[^/]*");
|
2017-03-26 18:33:32 +00:00
|
|
|
excludes_parsed.push(if exclude.starts_with('/') {
|
2017-03-24 10:00:20 +00:00
|
|
|
format!(r"^{}($|/)", exclude)
|
|
|
|
} else {
|
|
|
|
format!(r"/{}($|/)", exclude)
|
2017-03-26 18:33:32 +00:00
|
|
|
});
|
|
|
|
};
|
|
|
|
let excludes = if excludes_parsed.is_empty() {
|
2017-03-24 10:00:20 +00:00
|
|
|
None
|
|
|
|
} else {
|
2017-04-03 13:18:06 +00:00
|
|
|
Some(checked!(RegexSet::new(excludes_parsed), "parse exclude patterns", ErrorCode::InvalidExcludes))
|
2017-03-24 10:00:20 +00:00
|
|
|
};
|
2017-03-24 08:26:55 +00:00
|
|
|
let options = BackupOptions {
|
2017-03-24 10:00:20 +00:00
|
|
|
same_device: same_device,
|
|
|
|
excludes: excludes
|
2017-03-24 08:26:55 +00:00
|
|
|
};
|
2017-04-03 13:18:06 +00:00
|
|
|
let result = if tar {
|
|
|
|
repo.import_tarfile(&src_path)
|
|
|
|
} else {
|
|
|
|
repo.create_backup_recursively(&src_path, reference_backup.as_ref(), &options)
|
|
|
|
};
|
|
|
|
let backup = match result {
|
2017-03-22 10:10:13 +00:00
|
|
|
Ok(backup) => backup,
|
|
|
|
Err(RepositoryError::Backup(BackupError::FailedPaths(backup, _failed_paths))) => {
|
2017-03-24 06:01:04 +00:00
|
|
|
warn!("Some files are missing from the backup");
|
2017-03-22 10:10:13 +00:00
|
|
|
backup
|
|
|
|
},
|
|
|
|
Err(err) => {
|
|
|
|
error!("Backup failed: {}", err);
|
2017-04-03 13:18:06 +00:00
|
|
|
return Err(ErrorCode::BackupRun)
|
2017-03-22 10:10:13 +00:00
|
|
|
}
|
|
|
|
};
|
2017-04-03 13:18:06 +00:00
|
|
|
checked!(repo.save_backup(&backup, &backup_name), "save backup file", ErrorCode::SaveBackup);
|
2017-03-22 10:10:13 +00:00
|
|
|
print_backup(&backup);
|
2017-03-17 10:03:07 +00:00
|
|
|
},
|
2017-04-03 13:18:06 +00:00
|
|
|
Arguments::Restore{repo_path, backup_name, inode, dst_path, tar} => {
|
|
|
|
let mut repo = try!(open_repository(&repo_path));
|
|
|
|
let backup = try!(get_backup(&repo, &backup_name));
|
|
|
|
let inode = if let Some(inode) = inode {
|
|
|
|
checked!(repo.get_backup_inode(&backup, &inode), "load subpath inode", ErrorCode::LoadInode)
|
|
|
|
} else {
|
|
|
|
checked!(repo.get_inode(&backup.root), "load root inode", ErrorCode::LoadInode)
|
|
|
|
};
|
|
|
|
if tar {
|
|
|
|
checked!(repo.export_tarfile(inode, &dst_path), "restore backup", ErrorCode::RestoreRun);
|
2017-03-17 10:03:07 +00:00
|
|
|
} else {
|
2017-04-03 13:18:06 +00:00
|
|
|
checked!(repo.restore_inode_tree(inode, &dst_path), "restore backup", ErrorCode::RestoreRun);
|
2017-03-17 10:03:07 +00:00
|
|
|
}
|
|
|
|
},
|
2017-03-20 17:11:03 +00:00
|
|
|
Arguments::Remove{repo_path, backup_name, inode} => {
|
2017-04-03 13:18:06 +00:00
|
|
|
let mut repo = try!(open_repository(&repo_path));
|
2017-03-23 07:24:27 +00:00
|
|
|
if let Some(inode) = inode {
|
2017-04-03 13:18:06 +00:00
|
|
|
let mut backup = try!(get_backup(&repo, &backup_name));
|
|
|
|
checked!(repo.remove_backup_path(&mut backup, inode), "remove backup subpath", ErrorCode::RemoveRun);
|
|
|
|
checked!(repo.save_backup(&backup, &backup_name), "save backup file", ErrorCode::SaveBackup);
|
2017-03-23 07:24:27 +00:00
|
|
|
info!("The backup subpath has been deleted, run vacuum to reclaim space");
|
2017-03-17 11:58:22 +00:00
|
|
|
} else {
|
2017-04-03 13:18:06 +00:00
|
|
|
checked!(repo.delete_backup(&backup_name), "delete backup", ErrorCode::RemoveRun);
|
2017-03-18 15:54:43 +00:00
|
|
|
info!("The backup has been deleted, run vacuum to reclaim space");
|
2017-03-17 11:58:22 +00:00
|
|
|
}
|
|
|
|
},
|
2017-03-20 17:11:03 +00:00
|
|
|
Arguments::Prune{repo_path, prefix, daily, weekly, monthly, yearly, force} => {
|
2017-04-03 13:18:06 +00:00
|
|
|
let repo = try!(open_repository(&repo_path));
|
2017-03-20 14:38:33 +00:00
|
|
|
if daily.is_none() && weekly.is_none() && monthly.is_none() && yearly.is_none() {
|
|
|
|
error!("This would remove all those backups");
|
2017-04-03 13:18:06 +00:00
|
|
|
return Err(ErrorCode::UnsafeArgs)
|
2017-03-20 14:38:33 +00:00
|
|
|
}
|
2017-04-03 13:18:06 +00:00
|
|
|
checked!(repo.prune_backups(&prefix, daily, weekly, monthly, yearly, force), "prune backups", ErrorCode::PruneRun);
|
2017-03-20 17:11:03 +00:00
|
|
|
if !force {
|
|
|
|
info!("Run with --force to actually execute this command");
|
2017-03-20 14:38:33 +00:00
|
|
|
}
|
|
|
|
},
|
2017-03-20 17:11:03 +00:00
|
|
|
Arguments::Vacuum{repo_path, ratio, force} => {
|
2017-04-03 13:18:06 +00:00
|
|
|
let mut repo = try!(open_repository(&repo_path));
|
2017-03-24 11:52:01 +00:00
|
|
|
let info_before = repo.info();
|
2017-04-03 13:18:06 +00:00
|
|
|
checked!(repo.vacuum(ratio, force), "vacuum", ErrorCode::VacuumRun);
|
2017-03-20 17:11:03 +00:00
|
|
|
if !force {
|
|
|
|
info!("Run with --force to actually execute this command");
|
2017-03-24 11:52:01 +00:00
|
|
|
} else {
|
|
|
|
let info_after = repo.info();
|
|
|
|
info!("Reclaimed {}", to_file_size(info_before.encoded_data_size - info_after.encoded_data_size));
|
2017-03-20 17:11:03 +00:00
|
|
|
}
|
2017-03-17 11:58:22 +00:00
|
|
|
},
|
2017-03-17 10:03:07 +00:00
|
|
|
Arguments::Check{repo_path, backup_name, inode, full} => {
|
2017-04-03 13:18:06 +00:00
|
|
|
let mut repo = try!(open_repository(&repo_path));
|
2017-03-17 10:03:07 +00:00
|
|
|
if let Some(backup_name) = backup_name {
|
2017-04-03 13:18:06 +00:00
|
|
|
let backup = try!(get_backup(&repo, &backup_name));
|
2017-03-22 10:10:13 +00:00
|
|
|
if let Some(inode) = inode {
|
2017-04-03 13:18:06 +00:00
|
|
|
let inode = checked!(repo.get_backup_inode(&backup, inode), "load subpath inode", ErrorCode::LoadInode);
|
|
|
|
checked!(repo.check_inode(&inode), "check inode", ErrorCode::CheckRun)
|
2017-03-17 10:03:07 +00:00
|
|
|
} else {
|
2017-04-03 13:18:06 +00:00
|
|
|
checked!(repo.check_backup(&backup), "check backup", ErrorCode::CheckRun)
|
2017-03-17 10:03:07 +00:00
|
|
|
}
|
|
|
|
} else {
|
2017-04-03 13:18:06 +00:00
|
|
|
checked!(repo.check(full), "check repository", ErrorCode::CheckRun)
|
2017-03-17 10:03:07 +00:00
|
|
|
}
|
2017-03-23 07:24:27 +00:00
|
|
|
info!("Integrity verified")
|
2017-03-17 10:03:07 +00:00
|
|
|
},
|
|
|
|
Arguments::List{repo_path, backup_name, inode} => {
|
2017-04-03 13:18:06 +00:00
|
|
|
let mut repo = try!(open_repository(&repo_path));
|
2017-03-17 10:03:07 +00:00
|
|
|
if let Some(backup_name) = backup_name {
|
2017-04-03 13:18:06 +00:00
|
|
|
let backup = try!(get_backup(&repo, &backup_name));
|
|
|
|
let inode = checked!(repo.get_backup_inode(&backup, inode.as_ref().map(|v| v as &str).unwrap_or("/")), "load subpath inode", ErrorCode::LoadInode);
|
2017-03-17 10:03:07 +00:00
|
|
|
println!("{}", format_inode_one_line(&inode));
|
|
|
|
if let Some(children) = inode.children {
|
|
|
|
for chunks in children.values() {
|
2017-04-03 13:18:06 +00:00
|
|
|
let inode = checked!(repo.get_inode(chunks), "load child inode", ErrorCode::LoadInode);
|
2017-03-17 10:03:07 +00:00
|
|
|
println!("- {}", format_inode_one_line(&inode));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
} else {
|
2017-03-22 08:19:16 +00:00
|
|
|
let backup_map = match repo.get_backups() {
|
|
|
|
Ok(backup_map) => backup_map,
|
|
|
|
Err(RepositoryError::BackupFile(BackupFileError::PartialBackupsList(backup_map, _failed))) => {
|
|
|
|
warn!("Some backups could not be read, ignoring them");
|
|
|
|
backup_map
|
|
|
|
},
|
|
|
|
Err(err) => {
|
|
|
|
error!("Failed to load backup files: {}", err);
|
2017-04-03 13:18:06 +00:00
|
|
|
return Err(ErrorCode::LoadBackup)
|
2017-03-22 08:19:16 +00:00
|
|
|
}
|
|
|
|
};
|
2017-03-22 10:10:13 +00:00
|
|
|
print_backups(&backup_map);
|
2017-03-17 10:03:07 +00:00
|
|
|
}
|
2017-03-17 11:58:22 +00:00
|
|
|
},
|
2017-03-17 10:03:07 +00:00
|
|
|
Arguments::Info{repo_path, backup_name, inode} => {
|
2017-04-03 13:18:06 +00:00
|
|
|
let mut repo = try!(open_repository(&repo_path));
|
2017-03-17 10:03:07 +00:00
|
|
|
if let Some(backup_name) = backup_name {
|
2017-04-03 13:18:06 +00:00
|
|
|
let backup = try!(get_backup(&repo, &backup_name));
|
2017-03-23 08:31:23 +00:00
|
|
|
if let Some(inode) = inode {
|
2017-04-03 13:18:06 +00:00
|
|
|
let inode = checked!(repo.get_backup_inode(&backup, inode), "load subpath inode", ErrorCode::LoadInode);
|
2017-03-23 08:31:23 +00:00
|
|
|
print_inode(&inode);
|
2017-03-17 10:03:07 +00:00
|
|
|
} else {
|
2017-03-22 10:10:13 +00:00
|
|
|
print_backup(&backup);
|
2017-03-17 10:03:07 +00:00
|
|
|
}
|
|
|
|
} else {
|
2017-03-22 10:10:13 +00:00
|
|
|
print_repoinfo(&repo.info());
|
2017-03-17 10:03:07 +00:00
|
|
|
}
|
2017-03-17 11:58:22 +00:00
|
|
|
},
|
2017-03-26 09:34:16 +00:00
|
|
|
Arguments::Mount{repo_path, backup_name, inode, mount_point} => {
|
2017-04-03 13:18:06 +00:00
|
|
|
let mut repo = try!(open_repository(&repo_path));
|
2017-03-26 09:34:16 +00:00
|
|
|
let fs = if let Some(backup_name) = backup_name {
|
2017-04-03 13:18:06 +00:00
|
|
|
let backup = try!(get_backup(&repo, &backup_name));
|
2017-03-26 09:34:16 +00:00
|
|
|
if let Some(inode) = inode {
|
2017-04-03 13:18:06 +00:00
|
|
|
let inode = checked!(repo.get_backup_inode(&backup, inode), "load subpath inode", ErrorCode::LoadInode);
|
|
|
|
checked!(FuseFilesystem::from_inode(&mut repo, inode), "create fuse filesystem", ErrorCode::FuseMount)
|
2017-03-26 09:34:16 +00:00
|
|
|
} else {
|
2017-04-03 13:18:06 +00:00
|
|
|
checked!(FuseFilesystem::from_backup(&mut repo, &backup), "create fuse filesystem", ErrorCode::FuseMount)
|
2017-03-26 09:34:16 +00:00
|
|
|
}
|
|
|
|
} else {
|
2017-04-03 13:18:06 +00:00
|
|
|
checked!(FuseFilesystem::from_repository(&mut repo), "create fuse filesystem", ErrorCode::FuseMount)
|
2017-03-26 09:34:16 +00:00
|
|
|
};
|
2017-04-03 13:18:06 +00:00
|
|
|
checked!(fs.mount(&mount_point), "mount filesystem", ErrorCode::FuseMount);
|
2017-03-26 09:34:16 +00:00
|
|
|
},
|
2017-03-25 11:43:49 +00:00
|
|
|
Arguments::Analyze{repo_path} => {
|
2017-04-03 13:18:06 +00:00
|
|
|
let mut repo = try!(open_repository(&repo_path));
|
|
|
|
print_analysis(&checked!(repo.analyze_usage(), "analyze repository", ErrorCode::AnalyzeRun));
|
2017-03-25 11:43:49 +00:00
|
|
|
},
|
2017-03-24 11:52:01 +00:00
|
|
|
Arguments::BundleList{repo_path} => {
|
2017-04-03 13:18:06 +00:00
|
|
|
let repo = try!(open_repository(&repo_path));
|
2017-03-17 10:03:07 +00:00
|
|
|
for bundle in repo.list_bundles() {
|
2017-03-24 11:52:01 +00:00
|
|
|
print_bundle_one_line(bundle);
|
|
|
|
}
|
|
|
|
},
|
|
|
|
Arguments::BundleInfo{repo_path, bundle_id} => {
|
2017-04-03 13:18:06 +00:00
|
|
|
let repo = try!(open_repository(&repo_path));
|
2017-03-24 11:52:01 +00:00
|
|
|
if let Some(bundle) = repo.get_bundle(&bundle_id) {
|
2017-03-22 10:10:13 +00:00
|
|
|
print_bundle(bundle);
|
2017-03-24 11:52:01 +00:00
|
|
|
} else {
|
|
|
|
error!("No such bundle");
|
2017-04-03 13:18:06 +00:00
|
|
|
return Err(ErrorCode::LoadBundle)
|
2017-03-16 19:05:58 +00:00
|
|
|
}
|
2017-03-17 11:58:22 +00:00
|
|
|
},
|
2017-03-22 16:28:45 +00:00
|
|
|
Arguments::Import{repo_path, remote_path, key_files} => {
|
2017-04-03 13:18:06 +00:00
|
|
|
checked!(Repository::import(repo_path, remote_path, key_files), "import repository", ErrorCode::ImportRun);
|
2017-03-17 11:58:22 +00:00
|
|
|
},
|
2017-03-27 20:31:24 +00:00
|
|
|
Arguments::Versions{repo_path, path} => {
|
2017-04-03 13:18:06 +00:00
|
|
|
let mut repo = try!(open_repository(&repo_path));
|
|
|
|
for (name, mut inode) in checked!(repo.find_versions(&path), "find versions", ErrorCode::VersionsRun) {
|
2017-03-27 20:31:24 +00:00
|
|
|
inode.name = format!("{}::{}", name, &path);
|
|
|
|
println!("{}", format_inode_one_line(&inode));
|
|
|
|
}
|
|
|
|
},
|
2017-03-29 21:24:26 +00:00
|
|
|
Arguments::Diff{repo_path_old, backup_name_old, inode_old, repo_path_new, backup_name_new, inode_new} => {
|
|
|
|
if repo_path_old != repo_path_new {
|
|
|
|
error!("Can only run diff on same repository");
|
2017-04-03 13:18:06 +00:00
|
|
|
return Err(ErrorCode::InvalidArgs)
|
2017-03-29 21:24:26 +00:00
|
|
|
}
|
2017-04-03 13:18:06 +00:00
|
|
|
let mut repo = try!(open_repository(&repo_path_old));
|
|
|
|
let backup_old = try!(get_backup(&repo, &backup_name_old));
|
|
|
|
let backup_new = try!(get_backup(&repo, &backup_name_new));
|
|
|
|
let inode1 = checked!(repo.get_backup_inode(&backup_old, inode_old.unwrap_or_else(|| "/".to_string())), "load subpath inode", ErrorCode::LoadInode);
|
|
|
|
let inode2 = checked!(repo.get_backup_inode(&backup_new, inode_new.unwrap_or_else(|| "/".to_string())), "load subpath inode", ErrorCode::LoadInode);
|
|
|
|
let diffs = checked!(repo.find_differences(&inode1, &inode2), "find differences", ErrorCode::DiffRun);
|
2017-03-29 21:24:26 +00:00
|
|
|
for diff in diffs {
|
|
|
|
println!("{} {:?}", match diff.0 {
|
|
|
|
DiffType::Add => "add",
|
|
|
|
DiffType::Mod => "mod",
|
|
|
|
DiffType::Del => "del"
|
|
|
|
}, diff.1);
|
|
|
|
}
|
|
|
|
},
|
2017-03-26 18:33:32 +00:00
|
|
|
Arguments::Config{repo_path, bundle_size, chunker, compression, encryption, hash} => {
|
2017-04-03 13:18:06 +00:00
|
|
|
let mut repo = try!(open_repository(&repo_path));
|
2017-03-18 16:22:11 +00:00
|
|
|
if let Some(bundle_size) = bundle_size {
|
|
|
|
repo.config.bundle_size = bundle_size
|
|
|
|
}
|
|
|
|
if let Some(chunker) = chunker {
|
|
|
|
warn!("Changing the chunker makes it impossible to use existing data for deduplication");
|
|
|
|
repo.config.chunker = chunker
|
|
|
|
}
|
|
|
|
if let Some(compression) = compression {
|
|
|
|
repo.config.compression = compression
|
|
|
|
}
|
|
|
|
if let Some(encryption) = encryption {
|
|
|
|
repo.set_encryption(encryption.as_ref())
|
|
|
|
}
|
|
|
|
if let Some(hash) = hash {
|
|
|
|
warn!("Changing the hash makes it impossible to use existing data for deduplication");
|
|
|
|
repo.config.hash = hash
|
|
|
|
}
|
2017-04-03 13:18:06 +00:00
|
|
|
checked!(repo.save_config(), "save config", ErrorCode::SaveConfig);
|
2017-03-22 10:10:13 +00:00
|
|
|
print_config(&repo.config);
|
2017-03-18 16:22:11 +00:00
|
|
|
},
|
2017-03-22 16:28:45 +00:00
|
|
|
Arguments::GenKey{file} => {
|
2017-03-18 16:22:11 +00:00
|
|
|
let (public, secret) = gen_keypair();
|
2017-03-22 16:28:45 +00:00
|
|
|
println!("public: {}", to_hex(&public[..]));
|
|
|
|
println!("secret: {}", to_hex(&secret[..]));
|
|
|
|
if let Some(file) = file {
|
2017-04-03 13:18:06 +00:00
|
|
|
checked!(Crypto::save_keypair_to_file(&public, &secret, file), "save key pair", ErrorCode::SaveKey);
|
2017-03-22 16:28:45 +00:00
|
|
|
}
|
2017-03-18 16:22:11 +00:00
|
|
|
},
|
2017-03-22 16:28:45 +00:00
|
|
|
Arguments::AddKey{repo_path, set_default, file} => {
|
2017-04-03 13:18:06 +00:00
|
|
|
let mut repo = try!(open_repository(&repo_path));
|
2017-03-22 16:28:45 +00:00
|
|
|
let (public, secret) = if let Some(file) = file {
|
2017-04-03 13:18:06 +00:00
|
|
|
checked!(Crypto::load_keypair_from_file(file), "load key pair", ErrorCode::LoadKey)
|
2017-03-18 16:22:11 +00:00
|
|
|
} else {
|
|
|
|
let (public, secret) = gen_keypair();
|
2017-03-22 16:28:45 +00:00
|
|
|
println!("public: {}", to_hex(&public[..]));
|
|
|
|
println!("secret: {}", to_hex(&secret[..]));
|
2017-03-18 16:22:11 +00:00
|
|
|
(public, secret)
|
|
|
|
};
|
2017-04-03 13:18:06 +00:00
|
|
|
checked!(repo.register_key(public, secret), "add key pair", ErrorCode::AddKey);
|
2017-03-18 16:22:11 +00:00
|
|
|
if set_default {
|
|
|
|
repo.set_encryption(Some(&public));
|
2017-04-03 13:18:06 +00:00
|
|
|
checked!(repo.save_config(), "save config", ErrorCode::SaveConfig);
|
2017-03-18 16:22:11 +00:00
|
|
|
}
|
|
|
|
},
|
|
|
|
Arguments::AlgoTest{bundle_size, chunker, compression, encrypt, hash, file} => {
|
|
|
|
algotest::run(&file, bundle_size, chunker, compression, encrypt, hash);
|
2017-03-17 10:03:07 +00:00
|
|
|
}
|
2017-03-16 19:05:58 +00:00
|
|
|
}
|
2017-04-03 13:18:06 +00:00
|
|
|
Ok(())
|
2017-03-16 19:05:58 +00:00
|
|
|
}
|