Encrypted backups

This commit is contained in:
Dennis Schwerdel 2017-03-21 10:52:48 +01:00
parent 657952d4d7
commit 142b4c0972
8 changed files with 148 additions and 26 deletions

View File

@ -11,7 +11,7 @@ use quick_error::ResultExt;
use util::*; use util::*;
static HEADER_STRING: [u8; 7] = *b"zbundle"; static HEADER_STRING: [u8; 7] = *b"zvault\x01";
static HEADER_VERSION: u8 = 1; static HEADER_VERSION: u8 = 1;
/* /*
@ -37,8 +37,8 @@ quick_error!{
Io(err: io::Error, path: PathBuf) { Io(err: io::Error, path: PathBuf) {
cause(err) cause(err)
context(path: &'a Path, err: io::Error) -> (err, path.to_path_buf()) context(path: &'a Path, err: io::Error) -> (err, path.to_path_buf())
description("Failed to read bundle") description("Failed to read/write bundle")
display("Failed to read bundle {:?}: {}", path, err) display("Failed to read/write bundle {:?}: {}", path, err)
} }
Decode(err: msgpack::DecodeError, path: PathBuf) { Decode(err: msgpack::DecodeError, path: PathBuf) {
cause(err) cause(err)
@ -411,8 +411,7 @@ impl BundleWriter {
encoded_size: encoded_size, encoded_size: encoded_size,
chunk_info_size: chunk_data.len() chunk_info_size: chunk_data.len()
}; };
try!(msgpack::encode_to_stream(&header, &mut file) try!(msgpack::encode_to_stream(&header, &mut file).context(&path as &Path));
.map_err(|e| BundleError::Encode(e, path.clone())));
try!(file.write_all(&chunk_data).context(&path as &Path)); try!(file.write_all(&chunk_data).context(&path as &Path));
let content_start = file.seek(SeekFrom::Current(0)).unwrap() as usize; let content_start = file.seek(SeekFrom::Current(0)).unwrap() as usize;
try!(file.write_all(&self.data).context(&path as &Path)); try!(file.write_all(&self.data).context(&path as &Path));

View File

@ -36,7 +36,7 @@ fn find_reference_backup(repo: &Repository, path: &str) -> Option<Backup> {
Ok(hostname) => hostname, Ok(hostname) => hostname,
Err(_) => return None Err(_) => return None
}; };
for (_, backup) in repo.list_backups().unwrap() { for (_name, backup) in repo.get_backups().unwrap().0 {
if backup.host == hostname && backup.path == path { if backup.host == hostname && backup.path == path {
matching.push(backup); matching.push(backup);
} }
@ -155,8 +155,10 @@ pub fn run() {
} }
} }
} else { } else {
for (name, backup) in repo.list_backups().unwrap() { for (name, backup) in repo.get_backups().unwrap().0 {
println!("{:25} {:>32} {:5} files, {:4} dirs, {:>10}", name, Local.timestamp(backup.date, 0).to_rfc2822(), backup.file_count, backup.dir_count, to_file_size(backup.total_data_size)); println!("{:25} {:>32} {:5} files, {:4} dirs, {:>10}",
name, Local.timestamp(backup.date, 0).to_rfc2822(), backup.file_count,
backup.dir_count, to_file_size(backup.total_data_size));
} }
} }
}, },

View File

@ -9,7 +9,7 @@ use std::os::unix::io::AsRawFd;
use mmap::{MemoryMap, MapOption, MapError}; use mmap::{MemoryMap, MapOption, MapError};
const MAGIC: [u8; 7] = *b"zcindex"; const MAGIC: [u8; 7] = *b"zvault\x02";
const VERSION: u8 = 1; const VERSION: u8 = 1;
pub const MAX_USAGE: f64 = 0.9; pub const MAX_USAGE: f64 = 0.9;
pub const MIN_USAGE: f64 = 0.25; pub const MIN_USAGE: f64 = 0.25;

View File

@ -30,9 +30,9 @@ mod cli;
// TODO: - Load and compare remote bundles to bundle map // TODO: - Load and compare remote bundles to bundle map
// TODO: - Write backup files there as well // TODO: - Write backup files there as well
// TODO: - Avoid loading remote backups // TODO: - Avoid loading remote backups
// TODO: - Lock during vacuum
// TODO: Remove backup subtrees // TODO: Remove backup subtrees
// TODO: Recompress & combine bundles // TODO: Recompress & combine bundles
// TODO: Encrypt backup files too
// TODO: list --tree // TODO: list --tree
// TODO: Import repository from remote folder // TODO: Import repository from remote folder
// TODO: Continue on errors // TODO: Continue on errors

View File

@ -3,13 +3,73 @@ use super::metadata::{FileType, Inode};
use ::util::*; use ::util::*;
use std::io::{self, BufReader, BufWriter, Read, Write};
use std::fs::{self, File}; use std::fs::{self, File};
use std::path::{self, Path}; use std::path::{self, Path, PathBuf};
use std::collections::{HashMap, VecDeque}; use std::collections::{HashMap, VecDeque};
use quick_error::ResultExt;
use chrono::prelude::*; use chrono::prelude::*;
static HEADER_STRING: [u8; 7] = *b"zvault\x03";
static HEADER_VERSION: u8 = 1;
quick_error!{
#[derive(Debug)]
pub enum BackupError {
Io(err: io::Error, path: PathBuf) {
cause(err)
context(path: &'a Path, err: io::Error) -> (err, path.to_path_buf())
description("Failed to read/write backup")
display("Failed to read/write backup {:?}: {}", path, err)
}
Decode(err: msgpack::DecodeError, path: PathBuf) {
cause(err)
context(path: &'a Path, err: msgpack::DecodeError) -> (err, path.to_path_buf())
description("Failed to decode backup")
display("Failed to decode backup of {:?}: {}", path, err)
}
Encode(err: msgpack::EncodeError, path: PathBuf) {
cause(err)
context(path: &'a Path, err: msgpack::EncodeError) -> (err, path.to_path_buf())
description("Failed to encode backup")
display("Failed to encode backup of {:?}: {}", path, err)
}
WrongHeader(path: PathBuf) {
description("Wrong header")
display("Wrong header on backup {:?}", path)
}
WrongVersion(path: PathBuf, version: u8) {
description("Wrong version")
display("Wrong version on backup {:?}: {}", path, version)
}
Decryption(err: EncryptionError, path: PathBuf) {
cause(err)
context(path: &'a Path, err: EncryptionError) -> (err, path.to_path_buf())
description("Decryption failed")
display("Decryption failed on backup {:?}: {}", path, err)
}
Encryption(err: EncryptionError) {
from()
cause(err)
description("Encryption failed")
display("Encryption failed: {}", err)
}
}
}
#[derive(Default, Debug, Clone)]
struct BackupHeader {
pub encryption: Option<Encryption>
}
serde_impl!(BackupHeader(u8) {
encryption: Option<Encryption> => 0
});
#[derive(Default, Debug, Clone)] #[derive(Default, Debug, Clone)]
pub struct Backup { pub struct Backup {
pub root: ChunkList, pub root: ChunkList,
@ -44,13 +104,53 @@ serde_impl!(Backup(u8) {
path: String => 13 path: String => 13
}); });
impl Backup {
pub fn read_from<P: AsRef<Path>>(crypto: &Crypto, path: P) -> Result<Self, BackupError> {
let path = path.as_ref();
let mut file = BufReader::new(try!(File::open(path).context(path)));
let mut header = [0u8; 8];
try!(file.read_exact(&mut header).context(&path as &Path));
if header[..HEADER_STRING.len()] != HEADER_STRING {
return Err(BackupError::WrongHeader(path.to_path_buf()))
}
let version = header[HEADER_STRING.len()];
if version != HEADER_VERSION {
return Err(BackupError::WrongVersion(path.to_path_buf(), version))
}
let header: BackupHeader = try!(msgpack::decode_from_stream(&mut file).context(path));
let mut data = Vec::new();
try!(file.read_to_end(&mut data).context(path));
if let Some(ref encryption) = header.encryption {
data = try!(crypto.decrypt(encryption, &data));
}
Ok(try!(msgpack::decode(&data).context(path)))
}
pub fn save_to<P: AsRef<Path>>(&self, crypto: &Crypto, encryption: Option<Encryption>, path: P) -> Result<(), BackupError> {
let path = path.as_ref();
let mut data = try!(msgpack::encode(self).context(path));
if let Some(ref encryption) = encryption {
data = try!(crypto.encrypt(encryption, &data));
}
let mut file = BufWriter::new(try!(File::create(path).context(path)));
try!(file.write_all(&HEADER_STRING).context(path));
try!(file.write_all(&[HEADER_VERSION]).context(path));
let header = BackupHeader { encryption: encryption };
try!(msgpack::encode_to_stream(&header, &mut file).context(path));
try!(file.write_all(&data).context(path));
Ok(())
}
}
impl Repository { impl Repository {
pub fn list_backups(&self) -> Result<HashMap<String, Backup>, RepositoryError> { pub fn get_backups(&self) -> Result<(HashMap<String, Backup>, bool), RepositoryError> {
let mut backups = HashMap::new(); let mut backups = HashMap::new();
let mut paths = Vec::new(); let mut paths = Vec::new();
let base_path = self.path.join("backups"); let base_path = self.path.join("backups");
paths.push(base_path.clone()); paths.push(base_path.clone());
let mut some_failed = false;
while let Some(path) = paths.pop() { while let Some(path) = paths.pop() {
for entry in try!(fs::read_dir(path)) { for entry in try!(fs::read_dir(path)) {
let entry = try!(entry); let entry = try!(entry);
@ -60,24 +160,28 @@ impl Repository {
} else { } else {
let relpath = path.strip_prefix(&base_path).unwrap(); let relpath = path.strip_prefix(&base_path).unwrap();
let name = relpath.to_string_lossy().to_string(); let name = relpath.to_string_lossy().to_string();
let backup = try!(self.get_backup(&name)); if let Ok(backup) = self.get_backup(&name) {
backups.insert(name, backup); backups.insert(name, backup);
} else {
some_failed = true;
}
} }
} }
} }
Ok(backups) if some_failed {
warn!("Some backups could not be read");
}
Ok((backups, some_failed))
} }
pub fn get_backup(&self, name: &str) -> Result<Backup, RepositoryError> { pub fn get_backup(&self, name: &str) -> Result<Backup, RepositoryError> {
let mut file = try!(File::open(self.path.join("backups").join(name))); Ok(try!(Backup::read_from(&self.crypto.lock().unwrap(), self.path.join("backups").join(name))))
Ok(try!(msgpack::decode_from_stream(&mut file)))
} }
pub fn save_backup(&mut self, backup: &Backup, name: &str) -> Result<(), RepositoryError> { pub fn save_backup(&mut self, backup: &Backup, name: &str) -> Result<(), RepositoryError> {
let path = self.path.join("backups").join(name); let path = self.path.join("backups").join(name);
try!(fs::create_dir_all(path.parent().unwrap())); try!(fs::create_dir_all(path.parent().unwrap()));
let mut file = try!(File::create(path)); Ok(try!(backup.save_to(&self.crypto.lock().unwrap(), self.config.encryption.clone(), path)))
Ok(try!(msgpack::encode_to_stream(backup, &mut file)))
} }
pub fn delete_backup(&self, name: &str) -> Result<(), RepositoryError> { pub fn delete_backup(&self, name: &str) -> Result<(), RepositoryError> {
@ -95,7 +199,11 @@ impl Repository {
pub fn prune_backups(&self, prefix: &str, daily: Option<usize>, weekly: Option<usize>, monthly: Option<usize>, yearly: Option<usize>, force: bool) -> Result<(), RepositoryError> { pub fn prune_backups(&self, prefix: &str, daily: Option<usize>, weekly: Option<usize>, monthly: Option<usize>, yearly: Option<usize>, force: bool) -> Result<(), RepositoryError> {
let mut backups = Vec::new(); let mut backups = Vec::new();
for (name, backup) in try!(self.list_backups()) { let (backup_map, some_failed) = try!(self.get_backups());
if some_failed {
info!("Ignoring backups that can not be read");
}
for (name, backup) in backup_map {
if name.starts_with(prefix) { if name.starts_with(prefix) {
let date = Local.timestamp(backup.date, 0); let date = Local.timestamp(backup.date, 0);
backups.push((name, date, backup)); backups.push((name, date, backup));

View File

@ -1,7 +1,7 @@
use std::io; use std::io;
use std::path::PathBuf; use std::path::PathBuf;
use super::backup::Backup; use super::backup::{Backup, BackupError};
use super::bundle_map::BundleMapError; use super::bundle_map::BundleMapError;
use super::config::ConfigError; use super::config::ConfigError;
use super::integrity::RepositoryIntegrityError; use super::integrity::RepositoryIntegrityError;
@ -44,6 +44,12 @@ quick_error!{
description("Bundle error") description("Bundle error")
display("Bundle error: {}", err) display("Bundle error: {}", err)
} }
Backup(err: BackupError) {
from()
cause(err)
description("Backup error")
display("Backup error: {}", err)
}
Chunker(err: ChunkerError) { Chunker(err: ChunkerError) {
from() from()
cause(err) cause(err)
@ -82,5 +88,8 @@ quick_error!{
description("No such file in backup") description("No such file in backup")
display("The backup does not contain the file {:?}", path) display("The backup does not contain the file {:?}", path)
} }
UnsafeVacuum {
description("Not all backups can be read, refusing to run vacuum")
}
} }
} }

View File

@ -90,7 +90,7 @@ impl Repository {
fn check_backups(&mut self) -> Result<(), RepositoryError> { fn check_backups(&mut self) -> Result<(), RepositoryError> {
let mut checked = Bitmap::new(self.index.capacity()); let mut checked = Bitmap::new(self.index.capacity());
for (_name, backup) in try!(self.list_backups()) { for (_name, backup) in try!(self.get_backups()).0 {
let mut todo = VecDeque::new(); let mut todo = VecDeque::new();
todo.push_back(backup.root); todo.push_back(backup.root);
while let Some(chunks) = todo.pop_front() { while let Some(chunks) = todo.pop_front() {

View File

@ -37,7 +37,7 @@ impl Repository {
Ok(new) Ok(new)
} }
pub fn analyze_usage(&mut self) -> Result<HashMap<u32, BundleUsage>, RepositoryError> { pub fn analyze_usage(&mut self) -> Result<(HashMap<u32, BundleUsage>, bool), RepositoryError> {
let mut usage = HashMap::new(); let mut usage = HashMap::new();
for (id, bundle) in self.bundle_map.bundles() { for (id, bundle) in self.bundle_map.bundles() {
usage.insert(id, BundleUsage { usage.insert(id, BundleUsage {
@ -48,7 +48,8 @@ impl Repository {
used_size: 0 used_size: 0
}); });
} }
for (_name, backup) in try!(self.list_backups()) { let (backups, some_failed) = try!(self.get_backups());
for (_name, backup) in backups {
let mut todo = VecDeque::new(); let mut todo = VecDeque::new();
todo.push_back(backup.root); todo.push_back(backup.root);
while let Some(chunks) = todo.pop_front() { while let Some(chunks) = todo.pop_front() {
@ -78,7 +79,7 @@ impl Repository {
} }
} }
} }
Ok(usage) Ok((usage, some_failed))
} }
fn delete_bundle(&mut self, id: u32) -> Result<(), RepositoryError> { fn delete_bundle(&mut self, id: u32) -> Result<(), RepositoryError> {
@ -93,7 +94,10 @@ impl Repository {
pub fn vacuum(&mut self, ratio: f32, force: bool) -> Result<(), RepositoryError> { pub fn vacuum(&mut self, ratio: f32, force: bool) -> Result<(), RepositoryError> {
try!(self.flush()); try!(self.flush());
info!("Analyzing chunk usage"); info!("Analyzing chunk usage");
let usage = try!(self.analyze_usage()); let (usage, some_failed) = try!(self.analyze_usage());
if some_failed {
return Err(RepositoryError::UnsafeVacuum);
}
let total = usage.values().map(|b| b.total_size).sum::<usize>(); let total = usage.values().map(|b| b.total_size).sum::<usize>();
let used = usage.values().map(|b| b.used_size).sum::<usize>(); let used = usage.values().map(|b| b.used_size).sum::<usize>();
info!("Usage: {} of {}, {:.1}%", to_file_size(used as u64), to_file_size(total as u64), used as f32/total as f32*100.0); info!("Usage: {} of {}, {:.1}%", to_file_size(used as u64), to_file_size(total as u64), used as f32/total as f32*100.0);