2017-03-16 19:05:58 +00:00
|
|
|
mod args;
|
2017-03-17 10:03:07 +00:00
|
|
|
mod logger;
|
2017-03-16 19:05:58 +00:00
|
|
|
mod algotest;
|
|
|
|
|
2017-07-21 09:21:59 +00:00
|
|
|
use prelude::*;
|
2017-03-21 10:28:11 +00:00
|
|
|
|
2017-03-16 19:05:58 +00:00
|
|
|
use chrono::prelude::*;
|
2017-03-24 10:00:20 +00:00
|
|
|
use regex::{self, RegexSet};
|
|
|
|
|
2017-03-22 10:10:13 +00:00
|
|
|
use std::collections::HashMap;
|
2017-03-24 10:00:20 +00:00
|
|
|
use std::io::{BufReader, BufRead};
|
|
|
|
use std::fs::File;
|
2017-04-13 13:08:21 +00:00
|
|
|
use std::str;
|
2017-05-17 07:43:14 +00:00
|
|
|
use std::path::{Path, PathBuf};
|
2018-10-26 12:05:28 +00:00
|
|
|
use std::error::Error;
|
2017-03-16 19:05:58 +00:00
|
|
|
|
2017-03-17 10:03:07 +00:00
|
|
|
use self::args::Arguments;
|
2017-03-16 19:05:58 +00:00
|
|
|
|
|
|
|
|
2017-04-03 13:18:06 +00:00
|
|
|
pub enum ErrorCode {
|
2017-07-21 09:21:59 +00:00
|
|
|
UnsafeArgs,
|
|
|
|
InvalidArgs,
|
2017-04-03 13:18:06 +00:00
|
|
|
InitializeLogger,
|
|
|
|
CreateRepository,
|
2017-07-21 09:21:59 +00:00
|
|
|
LoadRepository,
|
|
|
|
SaveBackup,
|
|
|
|
LoadBackup,
|
|
|
|
LoadInode,
|
|
|
|
LoadBundle,
|
|
|
|
NoSuchBackup,
|
|
|
|
BackupAlreadyExists,
|
|
|
|
AddKey,
|
|
|
|
LoadKey,
|
|
|
|
SaveKey,
|
2017-04-03 13:18:06 +00:00
|
|
|
SaveConfig,
|
2017-07-21 09:21:59 +00:00
|
|
|
LoadExcludes,
|
|
|
|
InvalidExcludes,
|
|
|
|
BackupRun,
|
|
|
|
RestoreRun,
|
|
|
|
RemoveRun,
|
|
|
|
PruneRun,
|
|
|
|
VacuumRun,
|
|
|
|
CheckRun,
|
|
|
|
AnalyzeRun,
|
|
|
|
DiffRun,
|
|
|
|
VersionsRun,
|
|
|
|
ImportRun,
|
2018-03-08 14:20:20 +00:00
|
|
|
FuseMount,
|
|
|
|
DuplicatesRun
|
2017-04-03 13:18:06 +00:00
|
|
|
}
|
|
|
|
impl ErrorCode {
|
|
|
|
pub fn code(&self) -> i32 {
|
|
|
|
match *self {
|
|
|
|
// Crazy stuff
|
2017-07-21 09:21:59 +00:00
|
|
|
ErrorCode::InitializeLogger |
|
|
|
|
ErrorCode::InvalidExcludes => -1,
|
2017-04-03 13:18:06 +00:00
|
|
|
// Arguments
|
|
|
|
ErrorCode::InvalidArgs => 1,
|
|
|
|
ErrorCode::UnsafeArgs => 2,
|
|
|
|
// Load things
|
|
|
|
ErrorCode::LoadRepository => 3,
|
|
|
|
ErrorCode::LoadBackup => 4,
|
|
|
|
ErrorCode::LoadInode => 5,
|
|
|
|
ErrorCode::LoadBundle => 6,
|
|
|
|
ErrorCode::LoadKey => 7,
|
|
|
|
ErrorCode::LoadExcludes => 8,
|
|
|
|
// Minor operations
|
|
|
|
ErrorCode::SaveBackup => 9,
|
|
|
|
ErrorCode::AddKey => 10,
|
|
|
|
ErrorCode::SaveKey => 11,
|
|
|
|
ErrorCode::SaveConfig => 12,
|
|
|
|
// Main operation
|
|
|
|
ErrorCode::CreateRepository => 13,
|
|
|
|
ErrorCode::BackupRun => 14,
|
|
|
|
ErrorCode::RestoreRun => 15,
|
|
|
|
ErrorCode::RemoveRun => 16,
|
|
|
|
ErrorCode::PruneRun => 17,
|
|
|
|
ErrorCode::VacuumRun => 18,
|
|
|
|
ErrorCode::CheckRun => 19,
|
|
|
|
ErrorCode::AnalyzeRun => 20,
|
|
|
|
ErrorCode::DiffRun => 21,
|
|
|
|
ErrorCode::VersionsRun => 22,
|
|
|
|
ErrorCode::ImportRun => 23,
|
2017-04-09 16:48:38 +00:00
|
|
|
ErrorCode::FuseMount => 24,
|
2018-03-08 14:20:20 +00:00
|
|
|
ErrorCode::DuplicatesRun => 27,
|
2017-04-09 16:48:38 +00:00
|
|
|
//
|
|
|
|
ErrorCode::NoSuchBackup => 25,
|
2017-07-21 09:21:59 +00:00
|
|
|
ErrorCode::BackupAlreadyExists => 26,
|
2017-04-03 13:18:06 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2018-02-19 21:30:59 +00:00
|
|
|
pub const DEFAULT_CHUNKER: &str = "fastcdc/16";
|
|
|
|
pub const DEFAULT_HASH: &str = "blake2";
|
|
|
|
pub const DEFAULT_COMPRESSION: &str = "brotli/3";
|
|
|
|
pub const DEFAULT_BUNDLE_SIZE_STR: &str = "25";
|
|
|
|
pub const DEFAULT_VACUUM_RATIO_STR: &str = "0";
|
2018-03-08 14:20:20 +00:00
|
|
|
pub const DEFAULT_DUPLICATES_MIN_SIZE_STR: &str = "1b";
|
2017-03-31 16:44:27 +00:00
|
|
|
lazy_static! {
|
2017-05-17 07:43:14 +00:00
|
|
|
pub static ref ZVAULT_FOLDER: PathBuf = {
|
2018-10-26 12:05:28 +00:00
|
|
|
dirs::home_dir().unwrap().join(".zvault")
|
2017-03-31 16:44:27 +00:00
|
|
|
};
|
|
|
|
}
|
2017-03-22 08:19:16 +00:00
|
|
|
|
2017-04-03 13:18:06 +00:00
|
|
|
macro_rules! checked {
|
|
|
|
($expr:expr, $msg:expr, $code:expr) => {
|
|
|
|
match $expr {
|
|
|
|
Ok(val) => val,
|
|
|
|
Err(err) => {
|
2018-03-03 16:25:05 +00:00
|
|
|
tr_error!("Failed to {}\n\tcaused by: {}", tr!($msg), err);
|
2017-04-03 13:18:06 +00:00
|
|
|
return Err($code)
|
|
|
|
}
|
2017-03-16 19:05:58 +00:00
|
|
|
}
|
2017-04-03 13:18:06 +00:00
|
|
|
};
|
2017-03-17 10:03:07 +00:00
|
|
|
}
|
2017-03-16 19:05:58 +00:00
|
|
|
|
2018-03-08 22:41:56 +00:00
|
|
|
fn open_repository(path: &Path, online: bool) -> Result<BackupRepository, ErrorCode> {
|
2017-07-21 09:21:59 +00:00
|
|
|
Ok(checked!(
|
2018-03-08 22:41:56 +00:00
|
|
|
BackupRepository::open(path, online),
|
2017-07-21 09:21:59 +00:00
|
|
|
"load repository",
|
|
|
|
ErrorCode::LoadRepository
|
|
|
|
))
|
2017-03-23 06:43:45 +00:00
|
|
|
}
|
|
|
|
|
2018-03-10 15:35:40 +00:00
|
|
|
fn get_backup(repo: &BackupRepository, backup_name: &str) -> Result<BackupFile, ErrorCode> {
|
2017-04-09 16:48:38 +00:00
|
|
|
if !repo.has_backup(backup_name) {
|
2018-02-24 12:19:51 +00:00
|
|
|
tr_error!("A backup with that name does not exist");
|
2017-07-21 09:21:59 +00:00
|
|
|
return Err(ErrorCode::NoSuchBackup);
|
2017-04-09 16:48:38 +00:00
|
|
|
}
|
2017-07-21 09:21:59 +00:00
|
|
|
Ok(checked!(
|
|
|
|
repo.get_backup(backup_name),
|
|
|
|
"load backup",
|
|
|
|
ErrorCode::LoadBackup
|
|
|
|
))
|
2017-03-17 10:03:07 +00:00
|
|
|
}
|
2017-03-16 19:05:58 +00:00
|
|
|
|
2018-03-10 15:35:40 +00:00
|
|
|
fn get_inode(repo: &mut BackupRepository, backup: &BackupFile, inode: Option<&String>) -> Result<Inode, ErrorCode> {
|
2018-03-08 14:20:20 +00:00
|
|
|
Ok(if let Some(inode) = inode {
|
|
|
|
checked!(
|
2018-03-08 19:05:27 +00:00
|
|
|
repo.get_backup_inode(backup, &inode),
|
2018-03-08 14:20:20 +00:00
|
|
|
"load subpath inode",
|
|
|
|
ErrorCode::LoadInode
|
|
|
|
)
|
|
|
|
} else {
|
|
|
|
checked!(
|
2018-08-07 09:31:50 +00:00
|
|
|
repo.get_root_inode(&backup),
|
2018-03-08 14:20:20 +00:00
|
|
|
"load root inode",
|
|
|
|
ErrorCode::LoadInode
|
|
|
|
)
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2017-07-21 09:21:59 +00:00
|
|
|
fn find_reference_backup(
|
2018-03-08 22:41:56 +00:00
|
|
|
repo: &BackupRepository,
|
2017-07-21 09:21:59 +00:00
|
|
|
path: &str,
|
2018-03-10 15:35:40 +00:00
|
|
|
) -> Result<Option<(String, BackupFile)>, ErrorCode> {
|
2017-03-20 21:24:53 +00:00
|
|
|
let mut matching = Vec::new();
|
|
|
|
let hostname = match get_hostname() {
|
|
|
|
Ok(hostname) => hostname,
|
2017-07-21 09:21:59 +00:00
|
|
|
Err(_) => return Ok(None),
|
2017-03-20 21:24:53 +00:00
|
|
|
};
|
2017-04-13 11:32:59 +00:00
|
|
|
let backup_map = match repo.get_all_backups() {
|
2017-03-22 08:19:16 +00:00
|
|
|
Ok(backup_map) => backup_map,
|
2017-07-21 09:21:59 +00:00
|
|
|
Err(RepositoryError::BackupFile(BackupFileError::PartialBackupsList(backup_map,
|
|
|
|
_failed))) => {
|
2018-02-24 12:19:51 +00:00
|
|
|
tr_warn!("Some backups could not be read, ignoring them");
|
2017-03-22 08:19:16 +00:00
|
|
|
backup_map
|
2017-07-21 09:21:59 +00:00
|
|
|
}
|
2017-03-22 08:19:16 +00:00
|
|
|
Err(err) => {
|
2018-02-24 12:19:51 +00:00
|
|
|
tr_error!("Failed to load backup files: {}", err);
|
2017-07-21 09:21:59 +00:00
|
|
|
return Err(ErrorCode::LoadBackup);
|
2017-03-22 08:19:16 +00:00
|
|
|
}
|
|
|
|
};
|
2017-03-25 12:09:45 +00:00
|
|
|
for (name, backup) in backup_map {
|
2017-03-20 21:24:53 +00:00
|
|
|
if backup.host == hostname && backup.path == path {
|
2017-03-25 12:09:45 +00:00
|
|
|
matching.push((name, backup));
|
2017-03-20 21:24:53 +00:00
|
|
|
}
|
|
|
|
}
|
2017-04-14 20:46:55 +00:00
|
|
|
matching.sort_by_key(|&(_, ref b)| b.timestamp);
|
2017-04-03 13:18:06 +00:00
|
|
|
Ok(matching.pop())
|
2017-03-20 21:24:53 +00:00
|
|
|
}
|
|
|
|
|
2018-03-10 15:35:40 +00:00
|
|
|
fn print_backup(backup: &BackupFile) {
|
2017-04-06 11:39:24 +00:00
|
|
|
if backup.modified {
|
2018-02-24 12:19:51 +00:00
|
|
|
tr_warn!("This backup has been modified");
|
2017-04-06 11:39:24 +00:00
|
|
|
}
|
2018-02-24 12:19:51 +00:00
|
|
|
tr_println!(
|
2017-07-21 09:21:59 +00:00
|
|
|
"Date: {}",
|
|
|
|
Local.timestamp(backup.timestamp, 0).to_rfc2822()
|
|
|
|
);
|
2018-02-24 12:19:51 +00:00
|
|
|
tr_println!("Source: {}:{}", backup.host, backup.path);
|
|
|
|
tr_println!("Duration: {}", to_duration(backup.duration));
|
|
|
|
tr_println!(
|
2017-07-21 09:21:59 +00:00
|
|
|
"Entries: {} files, {} dirs",
|
|
|
|
backup.file_count,
|
|
|
|
backup.dir_count
|
|
|
|
);
|
2018-02-24 12:19:51 +00:00
|
|
|
tr_println!(
|
2017-07-21 09:21:59 +00:00
|
|
|
"Total backup size: {}",
|
|
|
|
to_file_size(backup.total_data_size)
|
|
|
|
);
|
2018-02-24 12:19:51 +00:00
|
|
|
tr_println!(
|
2017-07-21 09:21:59 +00:00
|
|
|
"Modified data size: {}",
|
|
|
|
to_file_size(backup.changed_data_size)
|
|
|
|
);
|
2017-03-22 10:10:13 +00:00
|
|
|
let dedup_ratio = backup.deduplicated_data_size as f32 / backup.changed_data_size as f32;
|
2018-02-24 12:19:51 +00:00
|
|
|
tr_println!(
|
2018-03-03 16:25:05 +00:00
|
|
|
"Deduplicated size: {}, {:.1}%",
|
2017-07-21 09:21:59 +00:00
|
|
|
to_file_size(backup.deduplicated_data_size),
|
2018-03-03 16:25:05 +00:00
|
|
|
(dedup_ratio - 1.0) * 100.0
|
2017-07-21 09:21:59 +00:00
|
|
|
);
|
2017-03-22 10:10:13 +00:00
|
|
|
let compress_ratio = backup.encoded_data_size as f32 / backup.deduplicated_data_size as f32;
|
2018-02-24 12:19:51 +00:00
|
|
|
tr_println!(
|
2018-03-03 16:25:05 +00:00
|
|
|
"Compressed size: {} in {} bundles, {:.1}%",
|
2017-07-21 09:21:59 +00:00
|
|
|
to_file_size(backup.encoded_data_size),
|
|
|
|
backup.bundle_count,
|
2018-03-03 16:25:05 +00:00
|
|
|
(compress_ratio - 1.0) * 100.0
|
2017-07-21 09:21:59 +00:00
|
|
|
);
|
2018-02-24 12:19:51 +00:00
|
|
|
tr_println!(
|
2017-07-21 09:21:59 +00:00
|
|
|
"Chunk count: {}, avg size: {}",
|
|
|
|
backup.chunk_count,
|
|
|
|
to_file_size(backup.avg_chunk_size as u64)
|
|
|
|
);
|
2017-03-22 10:10:13 +00:00
|
|
|
}
|
|
|
|
|
2017-03-24 11:52:01 +00:00
|
|
|
pub fn format_inode_one_line(inode: &Inode) -> String {
|
|
|
|
match inode.file_type {
|
2017-07-21 09:21:59 +00:00
|
|
|
FileType::Directory => {
|
|
|
|
format!(
|
|
|
|
"{:25}\t{} entries",
|
|
|
|
format!("{}/", inode.name),
|
|
|
|
inode.children.as_ref().map(|c| c.len()).unwrap_or(0)
|
|
|
|
)
|
|
|
|
}
|
|
|
|
FileType::File => {
|
|
|
|
format!(
|
|
|
|
"{:25}\t{:>10}\t{}",
|
|
|
|
inode.name,
|
|
|
|
to_file_size(inode.size),
|
|
|
|
Local.timestamp(inode.timestamp, 0).to_rfc2822()
|
|
|
|
)
|
|
|
|
}
|
|
|
|
FileType::Symlink => {
|
|
|
|
format!(
|
|
|
|
"{:25}\t -> {}",
|
|
|
|
inode.name,
|
|
|
|
inode.symlink_target.as_ref().map(|s| s as &str).unwrap_or(
|
|
|
|
"?"
|
|
|
|
)
|
|
|
|
)
|
|
|
|
}
|
2017-06-20 10:38:16 +00:00
|
|
|
FileType::BlockDevice | FileType::CharDevice => {
|
|
|
|
let device = inode.device.unwrap_or((0, 0));
|
2017-07-21 09:21:59 +00:00
|
|
|
format!(
|
|
|
|
"{:25}\t{:12}\t{}:{}",
|
|
|
|
inode.name,
|
|
|
|
inode.file_type,
|
|
|
|
device.0,
|
|
|
|
device.1
|
|
|
|
)
|
|
|
|
}
|
|
|
|
FileType::NamedPipe => format!("{:25}\t fifo", inode.name),
|
2017-03-24 11:52:01 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-03-23 08:31:23 +00:00
|
|
|
fn print_inode(inode: &Inode) {
|
2018-02-24 12:19:51 +00:00
|
|
|
tr_println!("Name: {}", inode.name);
|
|
|
|
tr_println!("Type: {}", inode.file_type);
|
|
|
|
tr_println!("Size: {}", to_file_size(inode.size));
|
|
|
|
tr_println!("Permissions: {:3o}", inode.mode);
|
|
|
|
tr_println!("User: {}", inode.user);
|
|
|
|
tr_println!("Group: {}", inode.group);
|
|
|
|
tr_println!(
|
2017-07-21 09:21:59 +00:00
|
|
|
"Timestamp: {}",
|
|
|
|
Local.timestamp(inode.timestamp, 0).to_rfc2822()
|
|
|
|
);
|
2017-03-23 08:31:23 +00:00
|
|
|
if let Some(ref target) = inode.symlink_target {
|
2018-02-24 12:19:51 +00:00
|
|
|
tr_println!("Symlink target: {}", target);
|
2017-03-23 08:31:23 +00:00
|
|
|
}
|
2018-02-24 12:19:51 +00:00
|
|
|
tr_println!("Cumulative size: {}", to_file_size(inode.cum_size));
|
|
|
|
tr_println!("Cumulative file count: {}", inode.cum_files);
|
|
|
|
tr_println!("Cumulative directory count: {}", inode.cum_dirs);
|
2017-03-23 08:31:23 +00:00
|
|
|
if let Some(ref children) = inode.children {
|
2018-02-24 12:19:51 +00:00
|
|
|
tr_println!("Children:");
|
2017-03-23 08:31:23 +00:00
|
|
|
for name in children.keys() {
|
|
|
|
println!(" - {}", name);
|
|
|
|
}
|
|
|
|
}
|
2017-04-13 13:08:21 +00:00
|
|
|
if !inode.xattrs.is_empty() {
|
2018-02-24 12:19:51 +00:00
|
|
|
tr_println!("Extended attributes:");
|
2017-04-13 13:08:21 +00:00
|
|
|
for (key, value) in &inode.xattrs {
|
|
|
|
if let Ok(value) = str::from_utf8(value) {
|
|
|
|
println!(" - {} = '{}'", key, value);
|
|
|
|
} else {
|
|
|
|
println!(" - {} = 0x{}", key, to_hex(value));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2017-03-23 08:31:23 +00:00
|
|
|
}
|
|
|
|
|
2018-03-10 15:35:40 +00:00
|
|
|
fn print_backups(backup_map: &HashMap<String, BackupFile>) {
|
2017-03-30 09:02:36 +00:00
|
|
|
let mut backups: Vec<_> = backup_map.into_iter().collect();
|
|
|
|
backups.sort_by_key(|b| b.0);
|
|
|
|
for (name, backup) in backups {
|
2017-07-21 09:21:59 +00:00
|
|
|
println!(
|
|
|
|
"{:40} {:>32} {:7} files, {:6} dirs, {:>10}",
|
|
|
|
name,
|
|
|
|
Local.timestamp(backup.timestamp, 0).to_rfc2822(),
|
|
|
|
backup.file_count,
|
|
|
|
backup.dir_count,
|
|
|
|
to_file_size(backup.total_data_size)
|
|
|
|
);
|
2017-03-22 10:10:13 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn print_repoinfo(info: &RepositoryInfo) {
|
2018-02-24 12:19:51 +00:00
|
|
|
tr_println!("Bundles: {}", info.bundle_count);
|
|
|
|
tr_println!("Total size: {}", to_file_size(info.encoded_data_size));
|
|
|
|
tr_println!("Uncompressed size: {}", to_file_size(info.raw_data_size));
|
2018-03-03 16:25:05 +00:00
|
|
|
tr_println!("Compression ratio: {:.1}%", (info.compression_ratio - 1.0) * 100.0);
|
2018-02-24 12:19:51 +00:00
|
|
|
tr_println!("Chunk count: {}", info.chunk_count);
|
|
|
|
tr_println!(
|
2017-07-21 09:21:59 +00:00
|
|
|
"Average chunk size: {}",
|
|
|
|
to_file_size(info.avg_chunk_size as u64)
|
|
|
|
);
|
2017-03-22 10:10:13 +00:00
|
|
|
let index_usage = info.index_entries as f32 / info.index_capacity as f32;
|
2018-02-24 12:19:51 +00:00
|
|
|
tr_println!(
|
2017-07-21 09:21:59 +00:00
|
|
|
"Index: {}, {:.0}% full",
|
|
|
|
to_file_size(info.index_size as u64),
|
|
|
|
index_usage * 100.0
|
|
|
|
);
|
2017-03-22 10:10:13 +00:00
|
|
|
}
|
|
|
|
|
2018-03-06 23:36:44 +00:00
|
|
|
fn print_repostats(stats: &RepositoryStatistics) {
|
|
|
|
tr_println!("Index\n=====");
|
|
|
|
let index_usage = stats.index.count as f32 / stats.index.capacity as f32;
|
|
|
|
tr_println!("Size: {}", to_file_size(stats.index.size as u64));
|
|
|
|
tr_println!("Entries: {} / {}, {:.0}%", stats.index.count, stats.index.capacity, index_usage*100.0);
|
|
|
|
let disp = &stats.index.displacement;
|
|
|
|
tr_println!("Displacement:\n - average: {:.1}\n - stddev: {:.1}\n - over {:.1}: {:.0}, {:.1}%\n - maximum: {:.0}",
|
|
|
|
disp.avg, disp.stddev, disp.avg + 2.0 * disp.stddev, disp.count_xl, disp.count_xl as f32 / disp.count as f32 * 100.0, disp.max);
|
2018-03-08 19:05:27 +00:00
|
|
|
println!();
|
2018-03-08 14:20:20 +00:00
|
|
|
tr_println!("Bundles\n=======");
|
|
|
|
let tsize = (stats.bundles.raw_size.count as f32 * stats.bundles.encoded_size.avg) as u64;
|
|
|
|
tr_println!("All bundles: {} in {} bundles", to_file_size(tsize), stats.bundles.raw_size.count);
|
2018-03-06 23:36:44 +00:00
|
|
|
let rsize = &stats.bundles.raw_size;
|
2018-03-08 14:20:20 +00:00
|
|
|
tr_println!(" - raw size: ø = {}, maximum: {}", to_file_size(rsize.avg as u64), to_file_size(rsize.max as u64));
|
2018-03-06 23:36:44 +00:00
|
|
|
let esize = &stats.bundles.encoded_size;
|
2018-03-08 14:20:20 +00:00
|
|
|
tr_println!(" - encoded size: ø = {}, maximum: {}", to_file_size(esize.avg as u64), to_file_size(esize.max as u64));
|
2018-03-06 23:36:44 +00:00
|
|
|
let ccount = &stats.bundles.chunk_count;
|
2018-03-08 14:20:20 +00:00
|
|
|
tr_println!(" - chunk count: ø = {:.1}, maximum: {:.0}", ccount.avg, ccount.max);
|
|
|
|
let tsize = (stats.bundles.raw_size_meta.count as f32 * stats.bundles.encoded_size_meta.avg) as u64;
|
|
|
|
tr_println!("Meta bundles: {} in {} bundles", to_file_size(tsize), stats.bundles.raw_size_meta.count);
|
2018-03-06 23:36:44 +00:00
|
|
|
let rsize = &stats.bundles.raw_size_meta;
|
2018-03-08 14:20:20 +00:00
|
|
|
tr_println!(" - raw size: ø = {}, maximum: {}", to_file_size(rsize.avg as u64), to_file_size(rsize.max as u64));
|
2018-03-06 23:36:44 +00:00
|
|
|
let esize = &stats.bundles.encoded_size_meta;
|
2018-03-08 14:20:20 +00:00
|
|
|
tr_println!(" - encoded size: ø = {}, maximum: {}", to_file_size(esize.avg as u64), to_file_size(esize.max as u64));
|
2018-03-06 23:36:44 +00:00
|
|
|
let ccount = &stats.bundles.chunk_count_meta;
|
2018-03-08 14:20:20 +00:00
|
|
|
tr_println!(" - chunk count: ø = {:.1}, maximum: {:.0}", ccount.avg, ccount.max);
|
|
|
|
let tsize = (stats.bundles.raw_size_data.count as f32 * stats.bundles.encoded_size_data.avg) as u64;
|
|
|
|
tr_println!("Data bundles: {} in {} bundles", to_file_size(tsize), stats.bundles.raw_size_data.count);
|
2018-03-06 23:36:44 +00:00
|
|
|
let rsize = &stats.bundles.raw_size_data;
|
2018-03-08 14:20:20 +00:00
|
|
|
tr_println!(" - raw size: ø = {}, maximum: {}", to_file_size(rsize.avg as u64), to_file_size(rsize.max as u64));
|
2018-03-06 23:36:44 +00:00
|
|
|
let esize = &stats.bundles.encoded_size_data;
|
2018-03-08 14:20:20 +00:00
|
|
|
tr_println!(" - encoded size: ø = {}, maximum: {}", to_file_size(esize.avg as u64), to_file_size(esize.max as u64));
|
2018-03-06 23:36:44 +00:00
|
|
|
let ccount = &stats.bundles.chunk_count_data;
|
2018-03-08 14:20:20 +00:00
|
|
|
tr_println!(" - chunk count: ø = {:.1}, maximum: {:.0}", ccount.avg, ccount.max);
|
2018-03-08 19:05:27 +00:00
|
|
|
println!();
|
2018-03-06 23:36:44 +00:00
|
|
|
tr_println!("Bundle methods\n==============");
|
2018-03-08 14:20:20 +00:00
|
|
|
tr_println!("Hash:");
|
|
|
|
for (hash, &count) in &stats.bundles.hash_methods {
|
|
|
|
tr_println!(" - {}: {}, {:.1}%", hash.name(), count, count as f32 / stats.bundles.raw_size.count as f32 * 100.0);
|
|
|
|
}
|
2018-03-06 23:36:44 +00:00
|
|
|
tr_println!("Compression:");
|
|
|
|
for (compr, &count) in &stats.bundles.compressions {
|
2018-03-08 19:05:27 +00:00
|
|
|
let compr_name = if let Some(ref compr) = *compr {
|
2018-03-06 23:36:44 +00:00
|
|
|
compr.to_string()
|
|
|
|
} else {
|
|
|
|
tr!("none").to_string()
|
|
|
|
};
|
|
|
|
tr_println!(" - {}: {}, {:.1}%", compr_name, count, count as f32 / stats.bundles.raw_size.count as f32 * 100.0);
|
|
|
|
}
|
2018-03-08 14:20:20 +00:00
|
|
|
tr_println!("Encryption:");
|
|
|
|
for (encr, &count) in &stats.bundles.encryptions {
|
2018-03-08 19:05:27 +00:00
|
|
|
let encr_name = if let Some(ref encr) = *encr {
|
2018-03-08 14:20:20 +00:00
|
|
|
to_hex(&encr.1[..])
|
|
|
|
} else {
|
|
|
|
tr!("none").to_string()
|
|
|
|
};
|
|
|
|
tr_println!(" - {}: {}, {:.1}%", encr_name, count, count as f32 / stats.bundles.raw_size.count as f32 * 100.0);
|
2018-03-06 23:36:44 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-04-07 16:57:49 +00:00
|
|
|
fn print_bundle(bundle: &StoredBundle) {
|
2018-02-24 12:19:51 +00:00
|
|
|
tr_println!("Bundle {}", bundle.info.id);
|
|
|
|
tr_println!(" - Mode: {:?}", bundle.info.mode);
|
|
|
|
tr_println!(" - Path: {:?}", bundle.path);
|
|
|
|
tr_println!(
|
2017-07-21 09:21:59 +00:00
|
|
|
" - Date: {}",
|
|
|
|
Local.timestamp(bundle.info.timestamp, 0).to_rfc2822()
|
|
|
|
);
|
2018-02-24 12:19:51 +00:00
|
|
|
tr_println!(" - Hash method: {:?}", bundle.info.hash_method);
|
2017-04-07 16:57:49 +00:00
|
|
|
let encryption = if let Some((_, ref key)) = bundle.info.encryption {
|
2017-03-24 11:52:01 +00:00
|
|
|
to_hex(key)
|
|
|
|
} else {
|
|
|
|
"none".to_string()
|
|
|
|
};
|
2018-02-24 12:19:51 +00:00
|
|
|
tr_println!(" - Encryption: {}", encryption);
|
|
|
|
tr_println!(" - Chunks: {}", bundle.info.chunk_count);
|
|
|
|
tr_println!(
|
2017-07-21 09:21:59 +00:00
|
|
|
" - Size: {}",
|
|
|
|
to_file_size(bundle.info.encoded_size as u64)
|
|
|
|
);
|
2018-02-24 12:19:51 +00:00
|
|
|
tr_println!(
|
2017-07-21 09:21:59 +00:00
|
|
|
" - Data size: {}",
|
|
|
|
to_file_size(bundle.info.raw_size as u64)
|
|
|
|
);
|
2017-04-07 16:57:49 +00:00
|
|
|
let ratio = bundle.info.encoded_size as f32 / bundle.info.raw_size as f32;
|
|
|
|
let compression = if let Some(ref c) = bundle.info.compression {
|
2017-03-22 10:10:13 +00:00
|
|
|
c.to_string()
|
|
|
|
} else {
|
|
|
|
"none".to_string()
|
|
|
|
};
|
2018-02-24 12:19:51 +00:00
|
|
|
tr_println!(
|
2017-07-21 09:21:59 +00:00
|
|
|
" - Compression: {}, ratio: {:.1}%",
|
|
|
|
compression,
|
2018-03-03 16:25:05 +00:00
|
|
|
(ratio - 1.0) * 100.0
|
2017-07-21 09:21:59 +00:00
|
|
|
);
|
2017-03-22 10:10:13 +00:00
|
|
|
}
|
|
|
|
|
2017-03-24 11:52:01 +00:00
|
|
|
fn print_bundle_one_line(bundle: &BundleInfo) {
|
2018-02-24 12:19:51 +00:00
|
|
|
tr_println!(
|
2017-07-21 09:21:59 +00:00
|
|
|
"{}: {:8?}, {:5} chunks, {:8}",
|
|
|
|
bundle.id,
|
|
|
|
bundle.mode,
|
|
|
|
bundle.chunk_count,
|
|
|
|
to_file_size(bundle.encoded_size as u64)
|
|
|
|
)
|
2017-03-24 11:52:01 +00:00
|
|
|
}
|
|
|
|
|
2017-03-22 10:10:13 +00:00
|
|
|
fn print_config(config: &Config) {
|
2018-02-24 12:19:51 +00:00
|
|
|
tr_println!("Bundle size: {}", to_file_size(config.bundle_size as u64));
|
|
|
|
tr_println!("Chunker: {}", config.chunker.to_string());
|
2017-03-22 10:10:13 +00:00
|
|
|
if let Some(ref compression) = config.compression {
|
2018-02-24 12:19:51 +00:00
|
|
|
tr_println!("Compression: {}", compression.to_string());
|
2017-03-22 10:10:13 +00:00
|
|
|
} else {
|
2018-02-24 12:19:51 +00:00
|
|
|
tr_println!("Compression: none");
|
2017-03-22 10:10:13 +00:00
|
|
|
}
|
|
|
|
if let Some(ref encryption) = config.encryption {
|
2018-02-24 12:19:51 +00:00
|
|
|
tr_println!("Encryption: {}", to_hex(&encryption.1[..]));
|
2017-03-22 10:10:13 +00:00
|
|
|
} else {
|
2018-02-24 12:19:51 +00:00
|
|
|
tr_println!("Encryption: none");
|
2017-03-22 10:10:13 +00:00
|
|
|
}
|
2018-02-24 12:19:51 +00:00
|
|
|
tr_println!("Hash method: {}", config.hash.name());
|
2017-03-22 10:10:13 +00:00
|
|
|
}
|
|
|
|
|
2017-03-25 11:43:49 +00:00
|
|
|
fn print_analysis(analysis: &HashMap<u32, BundleAnalysis>) {
|
|
|
|
let mut reclaim_space = [0; 11];
|
2017-04-16 19:36:09 +00:00
|
|
|
let mut rewrite_size = [0; 11];
|
2017-03-25 11:43:49 +00:00
|
|
|
let mut data_total = 0;
|
|
|
|
for bundle in analysis.values() {
|
|
|
|
data_total += bundle.info.encoded_size;
|
2017-07-21 09:21:59 +00:00
|
|
|
#[allow(unknown_lints, needless_range_loop)]
|
2017-03-25 11:43:49 +00:00
|
|
|
for i in 0..11 {
|
|
|
|
if bundle.get_usage_ratio() <= i as f32 * 0.1 {
|
|
|
|
reclaim_space[i] += bundle.get_unused_size();
|
2017-04-16 19:36:09 +00:00
|
|
|
rewrite_size[i] += bundle.get_used_size();
|
2017-03-25 11:43:49 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2018-02-24 12:19:51 +00:00
|
|
|
tr_println!("Total bundle size: {}", to_file_size(data_total as u64));
|
2017-03-25 11:43:49 +00:00
|
|
|
let used = data_total - reclaim_space[10];
|
2018-02-24 12:19:51 +00:00
|
|
|
tr_println!(
|
2017-07-21 09:21:59 +00:00
|
|
|
"Space used: {}, {:.1} %",
|
|
|
|
to_file_size(used as u64),
|
|
|
|
used as f32 / data_total as f32 * 100.0
|
|
|
|
);
|
2018-02-24 12:19:51 +00:00
|
|
|
tr_println!("Reclaimable space (depending on vacuum ratio)");
|
2017-07-21 09:21:59 +00:00
|
|
|
#[allow(unknown_lints, needless_range_loop)]
|
2017-03-25 11:43:49 +00:00
|
|
|
for i in 0..11 {
|
2018-02-24 12:19:51 +00:00
|
|
|
tr_println!(
|
2017-07-21 09:21:59 +00:00
|
|
|
" - ratio={:3}: {:>10}, {:4.1} %, rewriting {:>10}",
|
|
|
|
i * 10,
|
|
|
|
to_file_size(reclaim_space[i] as u64),
|
|
|
|
reclaim_space[i] as f32 / data_total as f32 * 100.0,
|
|
|
|
to_file_size(rewrite_size[i] as u64)
|
|
|
|
);
|
2017-03-25 11:43:49 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-03-08 14:20:20 +00:00
|
|
|
fn print_duplicates(dups: Vec<(Vec<PathBuf>, u64)>) {
|
|
|
|
for (group, size) in dups {
|
|
|
|
tr_println!("{} duplicates found, size: {}", group.len(), to_file_size(size));
|
|
|
|
for dup in group {
|
|
|
|
println!(" - {}", dup.to_string_lossy());
|
|
|
|
}
|
|
|
|
println!();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-10-26 12:05:28 +00:00
|
|
|
fn print_integrity_report_module<T: Error>(name: &str, module: &ModuleIntegrityReport<T>) -> usize {
|
|
|
|
let found = module.errors_fixed.len() + module.errors_unfixed.len();
|
|
|
|
let fixed = module.errors_fixed.len();
|
|
|
|
tr_println!("{}: {} errors found, {} corrected:", name, found, fixed);
|
|
|
|
for e in &module.errors_fixed {
|
|
|
|
println!("{}", e);
|
|
|
|
}
|
|
|
|
for e in &module.errors_unfixed {
|
|
|
|
println!("{}", e);
|
|
|
|
}
|
|
|
|
found - fixed
|
|
|
|
}
|
|
|
|
|
|
|
|
fn print_integrity_report(report: &IntegrityReport) {
|
|
|
|
let mut unfixed = 0;
|
|
|
|
if let Some(ref module) = report.bundle_map {
|
|
|
|
unfixed += print_integrity_report_module("Bundle map", module);
|
|
|
|
}
|
|
|
|
if let Some(ref module) = report.index {
|
|
|
|
unfixed += print_integrity_report_module("Index", module);
|
|
|
|
}
|
|
|
|
if let Some(ref module) = report.bundles {
|
|
|
|
unfixed += print_integrity_report_module("Bundles", module);
|
|
|
|
}
|
|
|
|
if let Some(ref module) = report.backups {
|
|
|
|
unfixed += print_integrity_report_module("Backups", module);
|
|
|
|
}
|
|
|
|
if unfixed == 0 {
|
|
|
|
tr_info!("Integrity verified")
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-03-08 14:20:20 +00:00
|
|
|
|
2017-03-20 21:24:53 +00:00
|
|
|
|
2017-07-21 09:21:59 +00:00
|
|
|
#[allow(unknown_lints, cyclomatic_complexity)]
|
2017-04-03 13:18:06 +00:00
|
|
|
pub fn run() -> Result<(), ErrorCode> {
|
2017-04-09 10:04:28 +00:00
|
|
|
let (log_level, args) = try!(args::parse());
|
|
|
|
if let Err(err) = logger::init(log_level) {
|
2018-02-24 12:19:51 +00:00
|
|
|
tr_println!("Failed to initialize the logger: {}", err);
|
2017-07-21 09:21:59 +00:00
|
|
|
return Err(ErrorCode::InitializeLogger);
|
2017-03-16 19:05:58 +00:00
|
|
|
}
|
2017-04-09 10:04:28 +00:00
|
|
|
match args {
|
2017-07-21 09:21:59 +00:00
|
|
|
Arguments::Init {
|
|
|
|
repo_path,
|
|
|
|
bundle_size,
|
|
|
|
chunker,
|
|
|
|
compression,
|
|
|
|
encryption,
|
|
|
|
hash,
|
|
|
|
remote_path
|
|
|
|
} => {
|
|
|
|
if !Path::new(&remote_path).is_absolute() {
|
2018-02-24 12:19:51 +00:00
|
|
|
tr_error!("The remote path of a repository must be absolute.");
|
2017-07-21 09:21:59 +00:00
|
|
|
return Err(ErrorCode::InvalidArgs);
|
|
|
|
}
|
|
|
|
let mut repo = checked!(
|
2018-03-08 22:41:56 +00:00
|
|
|
BackupRepository::create(
|
2017-07-21 09:21:59 +00:00
|
|
|
repo_path,
|
2018-02-19 21:30:59 +00:00
|
|
|
&Config {
|
2018-03-03 16:25:05 +00:00
|
|
|
bundle_size,
|
|
|
|
chunker,
|
|
|
|
compression,
|
2017-07-21 09:21:59 +00:00
|
|
|
encryption: None,
|
2018-03-03 16:25:05 +00:00
|
|
|
hash
|
2017-07-21 09:21:59 +00:00
|
|
|
},
|
|
|
|
remote_path
|
|
|
|
),
|
|
|
|
"create repository",
|
|
|
|
ErrorCode::CreateRepository
|
|
|
|
);
|
2017-03-18 16:22:11 +00:00
|
|
|
if encryption {
|
2017-04-12 06:30:42 +00:00
|
|
|
let (public, secret) = Crypto::gen_keypair();
|
2018-02-24 12:19:51 +00:00
|
|
|
tr_info!("Created the following key pair");
|
|
|
|
tr_println!("public: {}", to_hex(&public[..]));
|
|
|
|
tr_println!("secret: {}", to_hex(&secret[..]));
|
2017-03-18 16:22:11 +00:00
|
|
|
repo.set_encryption(Some(&public));
|
2017-07-21 09:21:59 +00:00
|
|
|
checked!(
|
|
|
|
repo.register_key(public, secret),
|
|
|
|
"add key",
|
|
|
|
ErrorCode::AddKey
|
|
|
|
);
|
2017-04-03 13:18:06 +00:00
|
|
|
checked!(repo.save_config(), "save config", ErrorCode::SaveConfig);
|
2018-02-24 12:19:51 +00:00
|
|
|
tr_warn!(
|
2017-07-21 09:21:59 +00:00
|
|
|
"Please store this key pair in a secure location before using the repository"
|
|
|
|
);
|
2017-03-22 17:21:48 +00:00
|
|
|
println!();
|
2017-03-18 16:22:11 +00:00
|
|
|
}
|
2018-03-08 22:41:56 +00:00
|
|
|
print_config(repo.get_config());
|
2017-07-21 09:21:59 +00:00
|
|
|
}
|
|
|
|
Arguments::Backup {
|
|
|
|
repo_path,
|
|
|
|
backup_name,
|
|
|
|
src_path,
|
|
|
|
full,
|
|
|
|
reference,
|
|
|
|
same_device,
|
|
|
|
mut excludes,
|
|
|
|
excludes_from,
|
|
|
|
no_default_excludes,
|
|
|
|
tar
|
|
|
|
} => {
|
2018-02-25 00:03:18 +00:00
|
|
|
let mut repo = try!(open_repository(&repo_path, true));
|
2017-04-09 16:48:38 +00:00
|
|
|
if repo.has_backup(&backup_name) {
|
2018-02-24 12:19:51 +00:00
|
|
|
tr_error!("A backup with that name already exists");
|
2017-07-21 09:21:59 +00:00
|
|
|
return Err(ErrorCode::BackupAlreadyExists);
|
2017-04-09 16:48:38 +00:00
|
|
|
}
|
2017-05-11 07:45:55 +00:00
|
|
|
if src_path == "-" && !tar {
|
2018-02-24 12:19:51 +00:00
|
|
|
tr_error!("Reading from stdin requires --tar");
|
2017-07-21 09:21:59 +00:00
|
|
|
return Err(ErrorCode::InvalidArgs);
|
2017-05-11 07:45:55 +00:00
|
|
|
}
|
2017-03-20 21:24:53 +00:00
|
|
|
let mut reference_backup = None;
|
2017-04-03 13:18:06 +00:00
|
|
|
if !full && !tar {
|
|
|
|
reference_backup = match reference {
|
|
|
|
Some(r) => {
|
|
|
|
let b = try!(get_backup(&repo, &r));
|
|
|
|
Some((r, b))
|
2017-07-21 09:21:59 +00:00
|
|
|
}
|
|
|
|
None => None,
|
2017-04-03 13:18:06 +00:00
|
|
|
};
|
2017-03-20 21:24:53 +00:00
|
|
|
if reference_backup.is_none() {
|
2017-04-03 13:18:06 +00:00
|
|
|
reference_backup = try!(find_reference_backup(&repo, &src_path));
|
2017-03-20 21:24:53 +00:00
|
|
|
}
|
2017-03-25 12:09:45 +00:00
|
|
|
if let Some(&(ref name, _)) = reference_backup.as_ref() {
|
2018-02-24 12:19:51 +00:00
|
|
|
tr_info!("Using backup {} as reference", name);
|
2017-03-20 21:24:53 +00:00
|
|
|
} else {
|
2018-02-24 12:19:51 +00:00
|
|
|
tr_info!("No reference backup found, doing a full scan instead");
|
2017-03-20 21:24:53 +00:00
|
|
|
}
|
2017-03-17 10:03:07 +00:00
|
|
|
}
|
2017-03-25 12:09:45 +00:00
|
|
|
let reference_backup = reference_backup.map(|(_, backup)| backup);
|
2017-04-03 13:18:06 +00:00
|
|
|
if !no_default_excludes && !tar {
|
2017-07-21 09:21:59 +00:00
|
|
|
for line in BufReader::new(checked!(
|
2018-03-08 22:41:56 +00:00
|
|
|
File::open(&repo.get_layout().excludes_path()),
|
2017-07-21 09:21:59 +00:00
|
|
|
"open default excludes file",
|
|
|
|
ErrorCode::LoadExcludes
|
|
|
|
)).lines()
|
|
|
|
{
|
|
|
|
excludes.push(checked!(
|
|
|
|
line,
|
|
|
|
"read default excludes file",
|
|
|
|
ErrorCode::LoadExcludes
|
|
|
|
));
|
2017-03-26 18:33:32 +00:00
|
|
|
}
|
|
|
|
}
|
2017-03-24 10:00:20 +00:00
|
|
|
if let Some(excludes_from) = excludes_from {
|
2017-07-21 09:21:59 +00:00
|
|
|
for line in BufReader::new(checked!(
|
|
|
|
File::open(excludes_from),
|
|
|
|
"open excludes file",
|
|
|
|
ErrorCode::LoadExcludes
|
|
|
|
)).lines()
|
|
|
|
{
|
|
|
|
excludes.push(checked!(
|
|
|
|
line,
|
|
|
|
"read excludes file",
|
|
|
|
ErrorCode::LoadExcludes
|
|
|
|
));
|
2017-03-24 10:00:20 +00:00
|
|
|
}
|
|
|
|
}
|
2017-03-26 18:33:32 +00:00
|
|
|
let mut excludes_parsed = Vec::with_capacity(excludes.len());
|
|
|
|
for mut exclude in excludes {
|
|
|
|
if exclude.starts_with('#') || exclude.is_empty() {
|
2017-07-21 09:21:59 +00:00
|
|
|
continue;
|
2017-03-26 18:33:32 +00:00
|
|
|
}
|
2017-07-21 09:21:59 +00:00
|
|
|
exclude = regex::escape(&exclude)
|
|
|
|
.replace('?', ".")
|
|
|
|
.replace(r"\*\*", ".*")
|
|
|
|
.replace(r"\*", "[^/]*");
|
2017-03-26 18:33:32 +00:00
|
|
|
excludes_parsed.push(if exclude.starts_with('/') {
|
2017-03-24 10:00:20 +00:00
|
|
|
format!(r"^{}($|/)", exclude)
|
|
|
|
} else {
|
|
|
|
format!(r"/{}($|/)", exclude)
|
2017-03-26 18:33:32 +00:00
|
|
|
});
|
2017-07-21 09:21:59 +00:00
|
|
|
}
|
2017-03-26 18:33:32 +00:00
|
|
|
let excludes = if excludes_parsed.is_empty() {
|
2017-03-24 10:00:20 +00:00
|
|
|
None
|
|
|
|
} else {
|
2017-07-21 09:21:59 +00:00
|
|
|
Some(checked!(
|
|
|
|
RegexSet::new(excludes_parsed),
|
|
|
|
"parse exclude patterns",
|
|
|
|
ErrorCode::InvalidExcludes
|
|
|
|
))
|
2017-03-24 10:00:20 +00:00
|
|
|
};
|
2017-03-24 08:26:55 +00:00
|
|
|
let options = BackupOptions {
|
2018-03-03 16:25:05 +00:00
|
|
|
same_device,
|
|
|
|
excludes
|
2017-03-24 08:26:55 +00:00
|
|
|
};
|
2017-04-03 13:18:06 +00:00
|
|
|
let result = if tar {
|
|
|
|
repo.import_tarfile(&src_path)
|
|
|
|
} else {
|
2018-08-07 09:31:50 +00:00
|
|
|
repo.create_backup(&src_path, &backup_name, reference_backup.as_ref(), &options)
|
2017-04-03 13:18:06 +00:00
|
|
|
};
|
|
|
|
let backup = match result {
|
2017-04-12 12:05:13 +00:00
|
|
|
Ok(backup) => {
|
2018-02-24 12:19:51 +00:00
|
|
|
tr_info!("Backup finished");
|
2017-04-12 12:05:13 +00:00
|
|
|
backup
|
2017-07-21 09:21:59 +00:00
|
|
|
}
|
2017-03-22 10:10:13 +00:00
|
|
|
Err(RepositoryError::Backup(BackupError::FailedPaths(backup, _failed_paths))) => {
|
2018-02-24 12:19:51 +00:00
|
|
|
tr_warn!("Some files are missing from the backup");
|
2017-03-22 10:10:13 +00:00
|
|
|
backup
|
2017-07-21 09:21:59 +00:00
|
|
|
}
|
2017-03-22 10:10:13 +00:00
|
|
|
Err(err) => {
|
2018-02-24 12:19:51 +00:00
|
|
|
tr_error!("Backup failed: {}", err);
|
2017-07-21 09:21:59 +00:00
|
|
|
return Err(ErrorCode::BackupRun);
|
2017-03-22 10:10:13 +00:00
|
|
|
}
|
|
|
|
};
|
|
|
|
print_backup(&backup);
|
2017-07-21 09:21:59 +00:00
|
|
|
}
|
|
|
|
Arguments::Restore {
|
|
|
|
repo_path,
|
|
|
|
backup_name,
|
|
|
|
inode,
|
|
|
|
dst_path,
|
|
|
|
tar
|
|
|
|
} => {
|
2018-02-25 00:03:18 +00:00
|
|
|
let mut repo = try!(open_repository(&repo_path, true));
|
2017-04-03 13:18:06 +00:00
|
|
|
let backup = try!(get_backup(&repo, &backup_name));
|
2018-03-08 14:20:20 +00:00
|
|
|
let inode = try!(get_inode(&mut repo, &backup, inode.as_ref()));
|
2017-04-03 13:18:06 +00:00
|
|
|
if tar {
|
2017-07-21 09:21:59 +00:00
|
|
|
checked!(
|
|
|
|
repo.export_tarfile(&backup, inode, &dst_path),
|
|
|
|
"restore backup",
|
|
|
|
ErrorCode::RestoreRun
|
|
|
|
);
|
2017-03-17 10:03:07 +00:00
|
|
|
} else {
|
2017-07-21 09:21:59 +00:00
|
|
|
checked!(
|
|
|
|
repo.restore_inode_tree(&backup, inode, &dst_path),
|
|
|
|
"restore backup",
|
|
|
|
ErrorCode::RestoreRun
|
|
|
|
);
|
2017-03-17 10:03:07 +00:00
|
|
|
}
|
2018-02-24 12:19:51 +00:00
|
|
|
tr_info!("Restore finished");
|
2017-07-21 09:21:59 +00:00
|
|
|
}
|
|
|
|
Arguments::Copy {
|
|
|
|
repo_path_src,
|
|
|
|
backup_name_src,
|
|
|
|
repo_path_dst,
|
|
|
|
backup_name_dst
|
|
|
|
} => {
|
2017-05-17 05:35:41 +00:00
|
|
|
if repo_path_src != repo_path_dst {
|
2018-02-24 12:19:51 +00:00
|
|
|
tr_error!("Can only run copy on same repository");
|
2017-07-21 09:21:59 +00:00
|
|
|
return Err(ErrorCode::InvalidArgs);
|
2017-05-17 05:35:41 +00:00
|
|
|
}
|
2018-02-25 00:03:18 +00:00
|
|
|
let mut repo = try!(open_repository(&repo_path_src, false));
|
2017-05-17 05:35:41 +00:00
|
|
|
if repo.has_backup(&backup_name_dst) {
|
2018-02-24 12:19:51 +00:00
|
|
|
tr_error!("A backup with that name already exists");
|
2017-07-21 09:21:59 +00:00
|
|
|
return Err(ErrorCode::BackupAlreadyExists);
|
2017-05-17 05:35:41 +00:00
|
|
|
}
|
|
|
|
let backup = try!(get_backup(&repo, &backup_name_src));
|
2018-10-26 12:05:28 +00:00
|
|
|
checked!(
|
|
|
|
repo.save_backup(&backup, &backup_name_dst),
|
|
|
|
"save backup",
|
|
|
|
ErrorCode::SaveBackup
|
|
|
|
);
|
2017-07-21 09:21:59 +00:00
|
|
|
}
|
|
|
|
Arguments::Remove {
|
|
|
|
repo_path,
|
|
|
|
backup_name,
|
|
|
|
inode,
|
|
|
|
force
|
|
|
|
} => {
|
2018-02-25 00:03:18 +00:00
|
|
|
let mut repo = try!(open_repository(&repo_path, true));
|
2017-03-23 07:24:27 +00:00
|
|
|
if let Some(inode) = inode {
|
2017-04-03 13:18:06 +00:00
|
|
|
let mut backup = try!(get_backup(&repo, &backup_name));
|
2017-07-21 09:21:59 +00:00
|
|
|
checked!(
|
2018-10-26 12:05:28 +00:00
|
|
|
repo.remove_backup_path(&mut backup, &backup_name, inode),
|
2017-07-21 09:21:59 +00:00
|
|
|
"remove backup subpath",
|
|
|
|
ErrorCode::RemoveRun
|
|
|
|
);
|
2018-02-24 12:19:51 +00:00
|
|
|
tr_info!("The backup subpath has been deleted, run vacuum to reclaim space");
|
2018-03-08 22:41:56 +00:00
|
|
|
} else if repo.get_layout().backups_path().join(&backup_name).is_dir() {
|
2017-07-21 09:21:59 +00:00
|
|
|
let backups = checked!(
|
|
|
|
repo.get_backups(&backup_name),
|
|
|
|
"retrieve backups",
|
|
|
|
ErrorCode::RemoveRun
|
|
|
|
);
|
2017-04-13 12:10:11 +00:00
|
|
|
if force {
|
|
|
|
for name in backups.keys() {
|
2017-07-21 09:21:59 +00:00
|
|
|
checked!(
|
|
|
|
repo.delete_backup(&format!("{}/{}", &backup_name, name)),
|
|
|
|
"delete backup",
|
|
|
|
ErrorCode::RemoveRun
|
|
|
|
);
|
2017-04-13 12:10:11 +00:00
|
|
|
}
|
|
|
|
} else {
|
2018-02-24 12:19:51 +00:00
|
|
|
tr_error!("Denying to remove multiple backups (use --force):");
|
2017-04-13 12:10:11 +00:00
|
|
|
for name in backups.keys() {
|
|
|
|
println!(" - {}/{}", backup_name, name);
|
|
|
|
}
|
|
|
|
}
|
2017-03-17 11:58:22 +00:00
|
|
|
} else {
|
2017-07-21 09:21:59 +00:00
|
|
|
checked!(
|
|
|
|
repo.delete_backup(&backup_name),
|
|
|
|
"delete backup",
|
|
|
|
ErrorCode::RemoveRun
|
|
|
|
);
|
2018-02-24 12:19:51 +00:00
|
|
|
tr_info!("The backup has been deleted, run vacuum to reclaim space");
|
2017-03-17 11:58:22 +00:00
|
|
|
}
|
2017-07-21 09:21:59 +00:00
|
|
|
}
|
|
|
|
Arguments::Prune {
|
|
|
|
repo_path,
|
|
|
|
prefix,
|
|
|
|
daily,
|
|
|
|
weekly,
|
|
|
|
monthly,
|
|
|
|
yearly,
|
|
|
|
force
|
|
|
|
} => {
|
2018-02-25 00:03:18 +00:00
|
|
|
let mut repo = try!(open_repository(&repo_path, true));
|
2017-04-07 16:57:49 +00:00
|
|
|
if daily + weekly + monthly + yearly == 0 {
|
2018-02-24 12:19:51 +00:00
|
|
|
tr_error!("This would remove all those backups");
|
2017-07-21 09:21:59 +00:00
|
|
|
return Err(ErrorCode::UnsafeArgs);
|
2017-03-20 14:38:33 +00:00
|
|
|
}
|
2017-07-21 09:21:59 +00:00
|
|
|
checked!(
|
|
|
|
repo.prune_backups(&prefix, daily, weekly, monthly, yearly, force),
|
|
|
|
"prune backups",
|
|
|
|
ErrorCode::PruneRun
|
|
|
|
);
|
2017-03-20 17:11:03 +00:00
|
|
|
if !force {
|
2018-02-24 12:19:51 +00:00
|
|
|
tr_info!("Run with --force to actually execute this command");
|
2017-03-20 14:38:33 +00:00
|
|
|
}
|
2017-07-21 09:21:59 +00:00
|
|
|
}
|
|
|
|
Arguments::Vacuum {
|
|
|
|
repo_path,
|
|
|
|
ratio,
|
|
|
|
force,
|
|
|
|
combine
|
|
|
|
} => {
|
2018-02-25 00:03:18 +00:00
|
|
|
let mut repo = try!(open_repository(&repo_path, true));
|
2017-03-24 11:52:01 +00:00
|
|
|
let info_before = repo.info();
|
2017-07-21 09:21:59 +00:00
|
|
|
checked!(
|
|
|
|
repo.vacuum(ratio, combine, force),
|
|
|
|
"vacuum",
|
|
|
|
ErrorCode::VacuumRun
|
|
|
|
);
|
2017-03-20 17:11:03 +00:00
|
|
|
if !force {
|
2018-02-24 12:19:51 +00:00
|
|
|
tr_info!("Run with --force to actually execute this command");
|
2017-03-24 11:52:01 +00:00
|
|
|
} else {
|
|
|
|
let info_after = repo.info();
|
2018-02-24 12:19:51 +00:00
|
|
|
tr_info!(
|
2017-07-21 09:21:59 +00:00
|
|
|
"Reclaimed {}",
|
|
|
|
to_file_size(info_before.encoded_data_size - info_after.encoded_data_size)
|
|
|
|
);
|
2017-03-20 17:11:03 +00:00
|
|
|
}
|
2017-07-21 09:21:59 +00:00
|
|
|
}
|
|
|
|
Arguments::Check {
|
|
|
|
repo_path,
|
|
|
|
backup_name,
|
|
|
|
inode,
|
|
|
|
bundles,
|
|
|
|
index,
|
|
|
|
bundle_data,
|
|
|
|
repair
|
|
|
|
} => {
|
2018-02-25 00:03:18 +00:00
|
|
|
let mut repo = try!(open_repository(&repo_path, true));
|
2018-08-07 09:31:50 +00:00
|
|
|
let mut options = CheckOptions::new();
|
|
|
|
options.index(index).bundle_data(bundle_data).bundles(bundles).repair(repair);
|
2017-03-17 10:03:07 +00:00
|
|
|
if let Some(backup_name) = backup_name {
|
2018-10-26 12:05:28 +00:00
|
|
|
let backup = try!(get_backup(&repo, &backup_name));
|
|
|
|
if let Some(inode_name) = inode {
|
|
|
|
let inode = checked!(
|
|
|
|
repo.get_backup_inode(&backup, &inode_name),
|
|
|
|
"load subpath inode",
|
|
|
|
ErrorCode::LoadInode
|
|
|
|
);
|
|
|
|
options.subpath(Path::new(&inode_name), inode);
|
2017-03-17 10:03:07 +00:00
|
|
|
}
|
2018-10-26 12:05:28 +00:00
|
|
|
options.single_backup(&backup_name, backup);
|
2017-03-17 10:03:07 +00:00
|
|
|
} else {
|
2018-08-07 09:31:50 +00:00
|
|
|
options.all_backups();
|
2017-03-17 10:03:07 +00:00
|
|
|
}
|
2018-10-26 12:05:28 +00:00
|
|
|
let report = checked!(
|
|
|
|
repo.check(options),
|
2018-08-07 09:31:50 +00:00
|
|
|
"check repository",
|
|
|
|
ErrorCode::CheckRun
|
|
|
|
);
|
2018-10-26 12:05:28 +00:00
|
|
|
print_integrity_report(&report);
|
2017-07-21 09:21:59 +00:00
|
|
|
}
|
|
|
|
Arguments::List {
|
|
|
|
repo_path,
|
|
|
|
backup_name,
|
|
|
|
inode
|
|
|
|
} => {
|
2018-02-25 00:03:18 +00:00
|
|
|
let mut repo = try!(open_repository(&repo_path, false));
|
2017-04-13 11:32:59 +00:00
|
|
|
let backup_map = if let Some(backup_name) = backup_name {
|
2018-03-08 22:41:56 +00:00
|
|
|
if repo.get_layout().backups_path().join(&backup_name).is_dir() {
|
2017-04-13 11:32:59 +00:00
|
|
|
repo.get_backups(&backup_name)
|
|
|
|
} else {
|
|
|
|
let backup = try!(get_backup(&repo, &backup_name));
|
2017-07-21 09:21:59 +00:00
|
|
|
let inode = checked!(
|
|
|
|
repo.get_backup_inode(
|
|
|
|
&backup,
|
|
|
|
inode.as_ref().map(|v| v as &str).unwrap_or("/")
|
|
|
|
),
|
|
|
|
"load subpath inode",
|
|
|
|
ErrorCode::LoadInode
|
|
|
|
);
|
2017-04-13 11:32:59 +00:00
|
|
|
println!("{}", format_inode_one_line(&inode));
|
2018-08-07 09:31:50 +00:00
|
|
|
for ch in checked!(repo.get_inode_children(&inode), "load inodes", ErrorCode::LoadInode) {
|
|
|
|
println!("- {}", format_inode_one_line(&ch));
|
2017-03-17 10:03:07 +00:00
|
|
|
}
|
2017-07-21 09:21:59 +00:00
|
|
|
return Ok(());
|
2017-03-17 10:03:07 +00:00
|
|
|
}
|
|
|
|
} else {
|
2017-04-13 11:32:59 +00:00
|
|
|
repo.get_all_backups()
|
|
|
|
};
|
|
|
|
let backup_map = match backup_map {
|
|
|
|
Ok(backup_map) => backup_map,
|
|
|
|
Err(RepositoryError::BackupFile(BackupFileError::PartialBackupsList(backup_map, _failed))) => {
|
2018-02-24 12:19:51 +00:00
|
|
|
tr_warn!("Some backups could not be read, ignoring them");
|
2017-04-13 11:32:59 +00:00
|
|
|
backup_map
|
2017-07-21 09:21:59 +00:00
|
|
|
}
|
2017-04-13 11:32:59 +00:00
|
|
|
Err(err) => {
|
2018-02-24 12:19:51 +00:00
|
|
|
tr_error!("Failed to load backup files: {}", err);
|
2017-07-21 09:21:59 +00:00
|
|
|
return Err(ErrorCode::LoadBackup);
|
2017-04-13 11:32:59 +00:00
|
|
|
}
|
|
|
|
};
|
|
|
|
print_backups(&backup_map);
|
2017-07-21 09:21:59 +00:00
|
|
|
}
|
|
|
|
Arguments::Info {
|
|
|
|
repo_path,
|
|
|
|
backup_name,
|
|
|
|
inode
|
|
|
|
} => {
|
2018-02-25 00:03:18 +00:00
|
|
|
let mut repo = try!(open_repository(&repo_path, false));
|
2017-03-17 10:03:07 +00:00
|
|
|
if let Some(backup_name) = backup_name {
|
2017-04-03 13:18:06 +00:00
|
|
|
let backup = try!(get_backup(&repo, &backup_name));
|
2017-03-23 08:31:23 +00:00
|
|
|
if let Some(inode) = inode {
|
2017-07-21 09:21:59 +00:00
|
|
|
let inode = checked!(
|
|
|
|
repo.get_backup_inode(&backup, inode),
|
|
|
|
"load subpath inode",
|
|
|
|
ErrorCode::LoadInode
|
|
|
|
);
|
2017-03-23 08:31:23 +00:00
|
|
|
print_inode(&inode);
|
2017-03-17 10:03:07 +00:00
|
|
|
} else {
|
2017-03-22 10:10:13 +00:00
|
|
|
print_backup(&backup);
|
2017-03-17 10:03:07 +00:00
|
|
|
}
|
|
|
|
} else {
|
2017-03-22 10:10:13 +00:00
|
|
|
print_repoinfo(&repo.info());
|
2017-03-17 10:03:07 +00:00
|
|
|
}
|
2017-07-21 09:21:59 +00:00
|
|
|
}
|
2018-03-08 14:20:20 +00:00
|
|
|
Arguments::Statistics {
|
2018-03-06 23:36:44 +00:00
|
|
|
repo_path
|
|
|
|
} => {
|
|
|
|
let mut repo = try!(open_repository(&repo_path, false));
|
|
|
|
print_repostats(&repo.statistics());
|
|
|
|
}
|
2018-03-08 14:20:20 +00:00
|
|
|
Arguments::Duplicates {
|
|
|
|
repo_path,
|
|
|
|
backup_name,
|
|
|
|
inode,
|
|
|
|
min_size
|
|
|
|
} => {
|
|
|
|
let mut repo = try!(open_repository(&repo_path, true));
|
|
|
|
let backup = try!(get_backup(&repo, &backup_name));
|
|
|
|
let inode = try!(get_inode(&mut repo, &backup, inode.as_ref()));
|
|
|
|
let dups = checked!(
|
|
|
|
repo.find_duplicates(&inode, min_size),
|
|
|
|
"find duplicates",
|
|
|
|
ErrorCode::DuplicatesRun
|
|
|
|
);
|
|
|
|
print_duplicates(dups);
|
|
|
|
}
|
2017-07-21 09:21:59 +00:00
|
|
|
Arguments::Mount {
|
|
|
|
repo_path,
|
|
|
|
backup_name,
|
|
|
|
inode,
|
|
|
|
mount_point
|
|
|
|
} => {
|
2018-02-25 00:03:18 +00:00
|
|
|
let mut repo = try!(open_repository(&repo_path, true));
|
2018-08-07 09:31:50 +00:00
|
|
|
tr_info!("Mounting the filesystem...");
|
|
|
|
tr_info!(
|
|
|
|
"Please unmount the filesystem via 'fusermount -u {}' when done.",
|
|
|
|
mount_point
|
|
|
|
);
|
|
|
|
if let Some(backup_name) = backup_name {
|
2018-03-08 22:41:56 +00:00
|
|
|
if repo.get_layout().backups_path().join(&backup_name).is_dir() {
|
2017-07-21 09:21:59 +00:00
|
|
|
checked!(
|
2018-08-07 09:31:50 +00:00
|
|
|
repo.mount_repository(Some(&backup_name), mount_point),
|
2017-07-21 09:21:59 +00:00
|
|
|
"create fuse filesystem",
|
|
|
|
ErrorCode::FuseMount
|
|
|
|
)
|
2017-03-26 09:34:16 +00:00
|
|
|
} else {
|
2017-04-13 12:24:58 +00:00
|
|
|
let backup = try!(get_backup(&repo, &backup_name));
|
|
|
|
if let Some(inode) = inode {
|
2017-07-21 09:21:59 +00:00
|
|
|
let inode = checked!(
|
|
|
|
repo.get_backup_inode(&backup, inode),
|
|
|
|
"load subpath inode",
|
|
|
|
ErrorCode::LoadInode
|
|
|
|
);
|
|
|
|
checked!(
|
2018-08-07 09:31:50 +00:00
|
|
|
repo.mount_inode(backup, inode, mount_point),
|
2017-07-21 09:21:59 +00:00
|
|
|
"create fuse filesystem",
|
|
|
|
ErrorCode::FuseMount
|
|
|
|
)
|
2017-04-13 12:24:58 +00:00
|
|
|
} else {
|
2017-07-21 09:21:59 +00:00
|
|
|
checked!(
|
2018-08-07 09:31:50 +00:00
|
|
|
repo.mount_backup(backup, mount_point),
|
2017-07-21 09:21:59 +00:00
|
|
|
"create fuse filesystem",
|
|
|
|
ErrorCode::FuseMount
|
|
|
|
)
|
2017-04-13 12:24:58 +00:00
|
|
|
}
|
2017-03-26 09:34:16 +00:00
|
|
|
}
|
|
|
|
} else {
|
2017-07-21 09:21:59 +00:00
|
|
|
checked!(
|
2018-08-07 09:31:50 +00:00
|
|
|
repo.mount_repository(None, mount_point),
|
2017-07-21 09:21:59 +00:00
|
|
|
"create fuse filesystem",
|
|
|
|
ErrorCode::FuseMount
|
|
|
|
)
|
2018-08-07 09:31:50 +00:00
|
|
|
}
|
2017-07-21 09:21:59 +00:00
|
|
|
}
|
|
|
|
Arguments::Analyze { repo_path } => {
|
2018-02-25 00:03:18 +00:00
|
|
|
let mut repo = try!(open_repository(&repo_path, true));
|
2017-07-21 09:21:59 +00:00
|
|
|
print_analysis(&checked!(
|
|
|
|
repo.analyze_usage(),
|
|
|
|
"analyze repository",
|
|
|
|
ErrorCode::AnalyzeRun
|
|
|
|
));
|
|
|
|
}
|
|
|
|
Arguments::BundleList { repo_path } => {
|
2018-02-25 00:03:18 +00:00
|
|
|
let repo = try!(open_repository(&repo_path, true));
|
2017-03-17 10:03:07 +00:00
|
|
|
for bundle in repo.list_bundles() {
|
2017-03-24 11:52:01 +00:00
|
|
|
print_bundle_one_line(bundle);
|
|
|
|
}
|
2017-07-21 09:21:59 +00:00
|
|
|
}
|
|
|
|
Arguments::BundleInfo {
|
|
|
|
repo_path,
|
|
|
|
bundle_id
|
|
|
|
} => {
|
2018-02-25 00:03:18 +00:00
|
|
|
let repo = try!(open_repository(&repo_path, true));
|
2017-03-24 11:52:01 +00:00
|
|
|
if let Some(bundle) = repo.get_bundle(&bundle_id) {
|
2017-03-22 10:10:13 +00:00
|
|
|
print_bundle(bundle);
|
2017-03-24 11:52:01 +00:00
|
|
|
} else {
|
2018-02-24 12:19:51 +00:00
|
|
|
tr_error!("No such bundle");
|
2017-07-21 09:21:59 +00:00
|
|
|
return Err(ErrorCode::LoadBundle);
|
2017-03-16 19:05:58 +00:00
|
|
|
}
|
2017-07-21 09:21:59 +00:00
|
|
|
}
|
|
|
|
Arguments::Import {
|
|
|
|
repo_path,
|
|
|
|
remote_path,
|
|
|
|
key_files
|
|
|
|
} => {
|
|
|
|
checked!(
|
2018-03-08 22:41:56 +00:00
|
|
|
BackupRepository::import(repo_path, remote_path, key_files),
|
2017-07-21 09:21:59 +00:00
|
|
|
"import repository",
|
|
|
|
ErrorCode::ImportRun
|
|
|
|
);
|
2018-02-24 12:19:51 +00:00
|
|
|
tr_info!("Import finished");
|
2017-07-21 09:21:59 +00:00
|
|
|
}
|
|
|
|
Arguments::Versions { repo_path, path } => {
|
2018-02-25 00:03:18 +00:00
|
|
|
let mut repo = try!(open_repository(&repo_path, true));
|
2017-04-07 09:05:28 +00:00
|
|
|
let mut found = false;
|
2017-07-21 09:21:59 +00:00
|
|
|
for (name, mut inode) in
|
|
|
|
checked!(
|
|
|
|
repo.find_versions(&path),
|
|
|
|
"find versions",
|
|
|
|
ErrorCode::VersionsRun
|
|
|
|
)
|
|
|
|
{
|
2017-03-27 20:31:24 +00:00
|
|
|
inode.name = format!("{}::{}", name, &path);
|
|
|
|
println!("{}", format_inode_one_line(&inode));
|
2017-04-07 09:05:28 +00:00
|
|
|
found = true;
|
|
|
|
}
|
|
|
|
if !found {
|
2018-02-24 12:19:51 +00:00
|
|
|
tr_info!("No versions of that file were found.");
|
2017-03-27 20:31:24 +00:00
|
|
|
}
|
2017-07-21 09:21:59 +00:00
|
|
|
}
|
|
|
|
Arguments::Diff {
|
|
|
|
repo_path_old,
|
|
|
|
backup_name_old,
|
|
|
|
inode_old,
|
|
|
|
repo_path_new,
|
|
|
|
backup_name_new,
|
|
|
|
inode_new
|
|
|
|
} => {
|
2017-03-29 21:24:26 +00:00
|
|
|
if repo_path_old != repo_path_new {
|
2018-02-24 12:19:51 +00:00
|
|
|
tr_error!("Can only run diff on same repository");
|
2017-07-21 09:21:59 +00:00
|
|
|
return Err(ErrorCode::InvalidArgs);
|
2017-03-29 21:24:26 +00:00
|
|
|
}
|
2018-02-25 00:03:18 +00:00
|
|
|
let mut repo = try!(open_repository(&repo_path_old, true));
|
2017-04-03 13:18:06 +00:00
|
|
|
let backup_old = try!(get_backup(&repo, &backup_name_old));
|
|
|
|
let backup_new = try!(get_backup(&repo, &backup_name_new));
|
2017-07-21 09:21:59 +00:00
|
|
|
let inode1 =
|
|
|
|
checked!(
|
|
|
|
repo.get_backup_inode(&backup_old, inode_old.unwrap_or_else(|| "/".to_string())),
|
|
|
|
"load subpath inode",
|
|
|
|
ErrorCode::LoadInode
|
|
|
|
);
|
|
|
|
let inode2 =
|
|
|
|
checked!(
|
|
|
|
repo.get_backup_inode(&backup_new, inode_new.unwrap_or_else(|| "/".to_string())),
|
|
|
|
"load subpath inode",
|
|
|
|
ErrorCode::LoadInode
|
|
|
|
);
|
|
|
|
let diffs = checked!(
|
|
|
|
repo.find_differences(&inode1, &inode2),
|
|
|
|
"find differences",
|
|
|
|
ErrorCode::DiffRun
|
|
|
|
);
|
2017-04-07 09:05:28 +00:00
|
|
|
for diff in &diffs {
|
2017-07-21 09:21:59 +00:00
|
|
|
println!(
|
|
|
|
"{} {:?}",
|
|
|
|
match diff.0 {
|
|
|
|
DiffType::Add => "add",
|
|
|
|
DiffType::Mod => "mod",
|
|
|
|
DiffType::Del => "del",
|
|
|
|
},
|
|
|
|
diff.1
|
|
|
|
);
|
2017-03-29 21:24:26 +00:00
|
|
|
}
|
2017-04-07 09:05:28 +00:00
|
|
|
if diffs.is_empty() {
|
2018-02-24 12:19:51 +00:00
|
|
|
tr_info!("No differences found");
|
2017-04-07 09:05:28 +00:00
|
|
|
}
|
2017-07-21 09:21:59 +00:00
|
|
|
}
|
|
|
|
Arguments::Config {
|
|
|
|
repo_path,
|
|
|
|
bundle_size,
|
|
|
|
chunker,
|
|
|
|
compression,
|
|
|
|
encryption,
|
|
|
|
hash
|
|
|
|
} => {
|
2018-02-25 00:03:18 +00:00
|
|
|
let mut repo = try!(open_repository(&repo_path, false));
|
2017-04-07 09:05:28 +00:00
|
|
|
let mut changed = false;
|
2018-03-08 22:41:56 +00:00
|
|
|
let mut config = repo.get_config().clone();
|
2017-03-18 16:22:11 +00:00
|
|
|
if let Some(bundle_size) = bundle_size {
|
2018-03-08 22:41:56 +00:00
|
|
|
config.bundle_size = bundle_size;
|
2017-04-07 09:05:28 +00:00
|
|
|
changed = true;
|
2017-03-18 16:22:11 +00:00
|
|
|
}
|
|
|
|
if let Some(chunker) = chunker {
|
2018-02-24 12:19:51 +00:00
|
|
|
tr_warn!(
|
2017-07-21 09:21:59 +00:00
|
|
|
"Changing the chunker makes it impossible to use existing data for deduplication"
|
|
|
|
);
|
2018-03-08 22:41:56 +00:00
|
|
|
config.chunker = chunker;
|
2017-04-07 09:05:28 +00:00
|
|
|
changed = true;
|
2017-03-18 16:22:11 +00:00
|
|
|
}
|
|
|
|
if let Some(compression) = compression {
|
2018-03-08 22:41:56 +00:00
|
|
|
config.compression = compression;
|
2017-04-07 09:05:28 +00:00
|
|
|
changed = true;
|
2017-03-18 16:22:11 +00:00
|
|
|
}
|
|
|
|
if let Some(encryption) = encryption {
|
2017-04-07 09:05:28 +00:00
|
|
|
repo.set_encryption(encryption.as_ref());
|
|
|
|
changed = true;
|
2017-03-18 16:22:11 +00:00
|
|
|
}
|
|
|
|
if let Some(hash) = hash {
|
2018-02-24 12:19:51 +00:00
|
|
|
tr_warn!(
|
2017-07-21 09:21:59 +00:00
|
|
|
"Changing the hash makes it impossible to use existing data for deduplication"
|
|
|
|
);
|
2018-03-08 22:41:56 +00:00
|
|
|
config.hash = hash;
|
2017-04-07 09:05:28 +00:00
|
|
|
changed = true;
|
|
|
|
}
|
|
|
|
if changed {
|
2018-03-08 22:41:56 +00:00
|
|
|
repo.set_config(config);
|
2017-04-07 09:05:28 +00:00
|
|
|
checked!(repo.save_config(), "save config", ErrorCode::SaveConfig);
|
2018-02-24 12:19:51 +00:00
|
|
|
tr_info!("The configuration has been updated.");
|
2017-04-07 09:05:28 +00:00
|
|
|
} else {
|
2018-03-08 22:41:56 +00:00
|
|
|
print_config(repo.get_config());
|
2017-03-18 16:22:11 +00:00
|
|
|
}
|
2017-07-21 09:21:59 +00:00
|
|
|
}
|
|
|
|
Arguments::GenKey { file, password } => {
|
2017-05-08 13:11:07 +00:00
|
|
|
let (public, secret) = match password {
|
|
|
|
None => Crypto::gen_keypair(),
|
2017-07-21 09:21:59 +00:00
|
|
|
Some(ref password) => Crypto::keypair_from_password(password),
|
2017-05-08 13:11:07 +00:00
|
|
|
};
|
2018-02-24 12:19:51 +00:00
|
|
|
tr_info!("Created the following key pair");
|
|
|
|
tr_println!("public: {}", to_hex(&public[..]));
|
|
|
|
tr_println!("secret: {}", to_hex(&secret[..]));
|
2017-03-22 16:28:45 +00:00
|
|
|
if let Some(file) = file {
|
2017-07-21 09:21:59 +00:00
|
|
|
checked!(
|
|
|
|
Crypto::save_keypair_to_file(&public, &secret, file),
|
|
|
|
"save key pair",
|
|
|
|
ErrorCode::SaveKey
|
|
|
|
);
|
2017-03-22 16:28:45 +00:00
|
|
|
}
|
2017-07-21 09:21:59 +00:00
|
|
|
}
|
|
|
|
Arguments::AddKey {
|
|
|
|
repo_path,
|
|
|
|
set_default,
|
|
|
|
password,
|
|
|
|
file
|
|
|
|
} => {
|
2018-02-25 00:03:18 +00:00
|
|
|
let mut repo = try!(open_repository(&repo_path, false));
|
2017-03-22 16:28:45 +00:00
|
|
|
let (public, secret) = if let Some(file) = file {
|
2017-07-21 09:21:59 +00:00
|
|
|
checked!(
|
|
|
|
Crypto::load_keypair_from_file(file),
|
|
|
|
"load key pair",
|
|
|
|
ErrorCode::LoadKey
|
|
|
|
)
|
2017-03-18 16:22:11 +00:00
|
|
|
} else {
|
2018-02-24 12:19:51 +00:00
|
|
|
tr_info!("Created the following key pair");
|
2017-05-08 13:11:07 +00:00
|
|
|
let (public, secret) = match password {
|
|
|
|
None => Crypto::gen_keypair(),
|
2017-07-21 09:21:59 +00:00
|
|
|
Some(ref password) => Crypto::keypair_from_password(password),
|
2017-05-08 13:11:07 +00:00
|
|
|
};
|
2018-02-24 12:19:51 +00:00
|
|
|
tr_println!("public: {}", to_hex(&public[..]));
|
|
|
|
tr_println!("secret: {}", to_hex(&secret[..]));
|
2017-03-18 16:22:11 +00:00
|
|
|
(public, secret)
|
|
|
|
};
|
2017-07-21 09:21:59 +00:00
|
|
|
checked!(
|
|
|
|
repo.register_key(public, secret),
|
|
|
|
"add key pair",
|
|
|
|
ErrorCode::AddKey
|
|
|
|
);
|
2017-03-18 16:22:11 +00:00
|
|
|
if set_default {
|
|
|
|
repo.set_encryption(Some(&public));
|
2017-04-03 13:18:06 +00:00
|
|
|
checked!(repo.save_config(), "save config", ErrorCode::SaveConfig);
|
2018-02-24 12:19:51 +00:00
|
|
|
tr_warn!(
|
2017-07-21 09:21:59 +00:00
|
|
|
"Please store this key pair in a secure location before using the repository"
|
|
|
|
);
|
2017-03-18 16:22:11 +00:00
|
|
|
}
|
2017-07-21 09:21:59 +00:00
|
|
|
}
|
|
|
|
Arguments::AlgoTest {
|
|
|
|
bundle_size,
|
|
|
|
chunker,
|
|
|
|
compression,
|
|
|
|
encrypt,
|
|
|
|
hash,
|
|
|
|
file
|
|
|
|
} => {
|
2017-03-18 16:22:11 +00:00
|
|
|
algotest::run(&file, bundle_size, chunker, compression, encrypt, hash);
|
2017-03-17 10:03:07 +00:00
|
|
|
}
|
2017-03-16 19:05:58 +00:00
|
|
|
}
|
2017-04-03 13:18:06 +00:00
|
|
|
Ok(())
|
2017-03-16 19:05:58 +00:00
|
|
|
}
|