Same device options

This commit is contained in:
Dennis Schwerdel 2017-03-24 09:26:55 +01:00
parent f53b9a0923
commit fd6b9119f6
7 changed files with 208 additions and 180 deletions

View File

@ -19,7 +19,8 @@ pub enum Arguments {
backup_name: String,
src_path: String,
full: bool,
reference: Option<String>
reference: Option<String>,
same_device: bool
},
Restore {
repo_path: String,
@ -194,6 +195,7 @@ pub fn parse() -> Arguments {
(about: "creates a new backup")
(@arg full: --full "create a full backup")
(@arg reference: --ref +takes_value "the reference backup to use for partial backup")
(@arg same_device: --xdev -x "do not cross filesystem boundaries")
(@arg SRC: +required "source path to backup")
(@arg BACKUP: +required "repository::backup path")
)
@ -305,6 +307,7 @@ pub fn parse() -> Arguments {
repo_path: repository.to_string(),
backup_name: backup.unwrap().to_string(),
full: args.is_present("full"),
same_device: args.is_present("same_device"),
src_path: args.value_of("SRC").unwrap().to_string(),
reference: args.value_of("reference").map(|v| v.to_string())
}

View File

@ -174,7 +174,7 @@ pub fn run() {
}
print_config(&repo.config);
},
Arguments::Backup{repo_path, backup_name, src_path, full, reference} => {
Arguments::Backup{repo_path, backup_name, src_path, full, reference, same_device} => {
let mut repo = open_repository(&repo_path);
let mut reference_backup = None;
if !full {
@ -188,7 +188,10 @@ pub fn run() {
info!("No reference backup found, doing a full scan instead");
}
}
let backup = match repo.create_backup_recursively(&src_path, reference_backup.as_ref()) {
let options = BackupOptions {
same_device: same_device
};
let backup = match repo.create_backup_recursively(&src_path, reference_backup.as_ref(), &options) {
Ok(backup) => backup,
Err(RepositoryError::Backup(BackupError::FailedPaths(backup, _failed_paths))) => {
warn!("Some files are missing from the backup");

View File

@ -1,7 +1,7 @@
pub use ::util::*;
pub use ::bundledb::{BundleReader, BundleMode, BundleWriter, BundleInfo, BundleId, BundleDbError, BundleDb, BundleWriterError};
pub use ::chunker::{ChunkerType, Chunker, ChunkerStatus, IChunker, ChunkerError};
pub use ::repository::{Repository, Backup, Config, RepositoryError, RepositoryInfo, Inode, FileType, RepositoryIntegrityError, BackupFileError, BackupError};
pub use ::repository::{Repository, Backup, Config, RepositoryError, RepositoryInfo, Inode, FileType, RepositoryIntegrityError, BackupFileError, BackupError, BackupOptions};
pub use ::index::{Index, Location, IndexError};
pub use serde::{Serialize, Deserialize};

View File

@ -1,182 +1,13 @@
use ::prelude::*;
use std::io::{self, BufReader, BufWriter, Read, Write};
use std::fs::{self, File};
use std::fs;
use std::path::{self, Path, PathBuf};
use std::collections::{HashMap, BTreeMap, VecDeque};
use std::os::linux::fs::MetadataExt;
use chrono::prelude::*;
static HEADER_STRING: [u8; 7] = *b"zvault\x03";
static HEADER_VERSION: u8 = 1;
quick_error!{
#[derive(Debug)]
pub enum BackupFileError {
Read(err: io::Error, path: PathBuf) {
cause(err)
description("Failed to write backup")
display("Backup file error: failed to write backup file {:?}\n\tcaused by: {}", path, err)
}
Write(err: io::Error, path: PathBuf) {
cause(err)
description("Failed to read/write backup")
display("Backup file error: failed to read backup file {:?}\n\tcaused by: {}", path, err)
}
Decode(err: msgpack::DecodeError, path: PathBuf) {
cause(err)
context(path: &'a Path, err: msgpack::DecodeError) -> (err, path.to_path_buf())
description("Failed to decode backup")
display("Backup file error: failed to decode backup of {:?}\n\tcaused by: {}", path, err)
}
Encode(err: msgpack::EncodeError, path: PathBuf) {
cause(err)
context(path: &'a Path, err: msgpack::EncodeError) -> (err, path.to_path_buf())
description("Failed to encode backup")
display("Backup file error: failed to encode backup of {:?}\n\tcaused by: {}", path, err)
}
WrongHeader(path: PathBuf) {
description("Wrong header")
display("Backup file error: wrong header on backup {:?}", path)
}
UnsupportedVersion(path: PathBuf, version: u8) {
description("Wrong version")
display("Backup file error: unsupported version on backup {:?}: {}", path, version)
}
Decryption(err: EncryptionError, path: PathBuf) {
cause(err)
context(path: &'a Path, err: EncryptionError) -> (err, path.to_path_buf())
description("Decryption failed")
display("Backup file error: decryption failed on backup {:?}\n\tcaused by: {}", path, err)
}
Encryption(err: EncryptionError) {
from()
cause(err)
description("Encryption failed")
display("Backup file error: encryption failed\n\tcaused by: {}", err)
}
PartialBackupsList(partial: HashMap<String, Backup>, failed: Vec<PathBuf>) {
description("Some backups could not be loaded")
display("Backup file error: some backups could not be loaded: {:?}", failed)
}
}
}
#[derive(Default, Debug, Clone)]
struct BackupHeader {
pub encryption: Option<Encryption>
}
serde_impl!(BackupHeader(u8) {
encryption: Option<Encryption> => 0
});
#[derive(Default, Debug, Clone)]
pub struct Backup {
pub root: ChunkList,
pub total_data_size: u64, // Sum of all raw sizes of all entities
pub changed_data_size: u64, // Sum of all raw sizes of all entities actively stored
pub deduplicated_data_size: u64, // Sum of all raw sizes of all new bundles
pub encoded_data_size: u64, // Sum al all encoded sizes of all new bundles
pub bundle_count: usize,
pub chunk_count: usize,
pub avg_chunk_size: f32,
pub date: i64,
pub duration: f32,
pub file_count: usize,
pub dir_count: usize,
pub host: String,
pub path: String,
pub config: Config,
}
serde_impl!(Backup(u8) {
root: Vec<Chunk> => 0,
total_data_size: u64 => 1,
changed_data_size: u64 => 2,
deduplicated_data_size: u64 => 3,
encoded_data_size: u64 => 4,
bundle_count: usize => 5,
chunk_count: usize => 6,
avg_chunk_size: f32 => 7,
date: i64 => 8,
duration: f32 => 9,
file_count: usize => 10,
dir_count: usize => 11,
host: String => 12,
path: String => 13,
config: Config => 14
});
impl Backup {
pub fn read_from<P: AsRef<Path>>(crypto: &Crypto, path: P) -> Result<Self, BackupFileError> {
let path = path.as_ref();
let mut file = BufReader::new(try!(File::open(path).map_err(|err| BackupFileError::Read(err, path.to_path_buf()))));
let mut header = [0u8; 8];
try!(file.read_exact(&mut header).map_err(|err| BackupFileError::Read(err, path.to_path_buf())));
if header[..HEADER_STRING.len()] != HEADER_STRING {
return Err(BackupFileError::WrongHeader(path.to_path_buf()))
}
let version = header[HEADER_STRING.len()];
if version != HEADER_VERSION {
return Err(BackupFileError::UnsupportedVersion(path.to_path_buf(), version))
}
let header: BackupHeader = try!(msgpack::decode_from_stream(&mut file).context(path));
let mut data = Vec::new();
try!(file.read_to_end(&mut data).map_err(|err| BackupFileError::Read(err, path.to_path_buf())));
if let Some(ref encryption) = header.encryption {
data = try!(crypto.decrypt(encryption, &data));
}
Ok(try!(msgpack::decode(&data).context(path)))
}
pub fn save_to<P: AsRef<Path>>(&self, crypto: &Crypto, encryption: Option<Encryption>, path: P) -> Result<(), BackupFileError> {
let path = path.as_ref();
let mut data = try!(msgpack::encode(self).context(path));
if let Some(ref encryption) = encryption {
data = try!(crypto.encrypt(encryption, &data));
}
let mut file = BufWriter::new(try!(File::create(path).map_err(|err| BackupFileError::Write(err, path.to_path_buf()))));
try!(file.write_all(&HEADER_STRING).map_err(|err| BackupFileError::Write(err, path.to_path_buf())));
try!(file.write_all(&[HEADER_VERSION]).map_err(|err| BackupFileError::Write(err, path.to_path_buf())));
let header = BackupHeader { encryption: encryption };
try!(msgpack::encode_to_stream(&header, &mut file).context(path));
try!(file.write_all(&data).map_err(|err| BackupFileError::Write(err, path.to_path_buf())));
Ok(())
}
pub fn get_all_from<P: AsRef<Path>>(crypto: &Crypto, path: P) -> Result<HashMap<String, Backup>, BackupFileError> {
let mut backups = HashMap::new();
let base_path = path.as_ref();
let mut paths = vec![path.as_ref().to_path_buf()];
let mut failed_paths = vec![];
while let Some(path) = paths.pop() {
for entry in try!(fs::read_dir(&path).map_err(|e| BackupFileError::Read(e, path.clone()))) {
let entry = try!(entry.map_err(|e| BackupFileError::Read(e, path.clone())));
let path = entry.path();
if path.is_dir() {
paths.push(path);
} else {
let relpath = path.strip_prefix(&base_path).unwrap();
let name = relpath.to_string_lossy().to_string();
if let Ok(backup) = Backup::read_from(crypto, &path) {
backups.insert(name, backup);
} else {
failed_paths.push(path.clone());
}
}
}
}
if failed_paths.is_empty() {
Ok(backups)
} else {
Err(BackupFileError::PartialBackupsList(backups, failed_paths))
}
}
}
quick_error!{
#[derive(Debug)]
#[allow(unknown_lints,large_enum_variant)]
@ -193,6 +24,11 @@ quick_error!{
}
pub struct BackupOptions {
pub same_device: bool
}
impl Repository {
pub fn get_backups(&self) -> Result<HashMap<String, Backup>, RepositoryError> {
Ok(try!(Backup::get_all_from(&self.crypto.lock().unwrap(), &self.backups_path)))
@ -314,6 +150,7 @@ impl Repository {
&mut self,
path: P,
reference: Option<&Inode>,
options: &BackupOptions,
backup: &mut Backup,
failed_paths: &mut Vec<PathBuf>
) -> Result<ChunkList, RepositoryError> {
@ -331,15 +168,22 @@ impl Repository {
if inode.file_type == FileType::Directory {
backup.dir_count +=1;
let mut children = BTreeMap::new();
let parent_dev = try!(path.metadata()).st_dev();
for ch in try!(fs::read_dir(path)) {
let child = try!(ch);
if options.same_device {
let child_dev = try!(child.metadata()).st_dev();
if child_dev != parent_dev {
continue
}
}
let name = child.file_name().to_string_lossy().to_string();
let ref_child = reference.as_ref()
.and_then(|inode| inode.children.as_ref())
.and_then(|map| map.get(&name))
.and_then(|chunks| self.get_inode(chunks).ok());
let child_path = child.path();
let chunks = match self.create_backup_recurse(&child_path, ref_child.as_ref(), backup, failed_paths) {
let chunks = match self.create_backup_recurse(&child_path, ref_child.as_ref(), options, backup, failed_paths) {
Ok(chunks) => chunks,
Err(_) => {
warn!("Failed to backup {:?}", child_path);
@ -357,7 +201,7 @@ impl Repository {
}
#[allow(dead_code)]
pub fn create_backup_recursively<P: AsRef<Path>>(&mut self, path: P, reference: Option<&Backup>) -> Result<Backup, RepositoryError> {
pub fn create_backup_recursively<P: AsRef<Path>>(&mut self, path: P, reference: Option<&Backup>, options: &BackupOptions) -> Result<Backup, RepositoryError> {
let _lock = try!(self.lock(false));
let reference_inode = reference.and_then(|b| self.get_inode(&b.root).ok());
let mut backup = Backup::default();
@ -367,7 +211,7 @@ impl Repository {
let info_before = self.info();
let start = Local::now();
let mut failed_paths = vec![];
backup.root = try!(self.create_backup_recurse(path, reference_inode.as_ref(), &mut backup, &mut failed_paths));
backup.root = try!(self.create_backup_recurse(path, reference_inode.as_ref(), options, &mut backup, &mut failed_paths));
try!(self.flush());
let elapsed = Local::now().signed_duration_since(start);
backup.date = start.timestamp();

View File

@ -0,0 +1,175 @@
use ::prelude::*;
use std::io::{self, BufReader, BufWriter, Read, Write};
use std::fs::{self, File};
use std::path::{Path, PathBuf};
use std::collections::HashMap;
static HEADER_STRING: [u8; 7] = *b"zvault\x03";
static HEADER_VERSION: u8 = 1;
quick_error!{
#[derive(Debug)]
pub enum BackupFileError {
Read(err: io::Error, path: PathBuf) {
cause(err)
description("Failed to write backup")
display("Backup file error: failed to write backup file {:?}\n\tcaused by: {}", path, err)
}
Write(err: io::Error, path: PathBuf) {
cause(err)
description("Failed to read/write backup")
display("Backup file error: failed to read backup file {:?}\n\tcaused by: {}", path, err)
}
Decode(err: msgpack::DecodeError, path: PathBuf) {
cause(err)
context(path: &'a Path, err: msgpack::DecodeError) -> (err, path.to_path_buf())
description("Failed to decode backup")
display("Backup file error: failed to decode backup of {:?}\n\tcaused by: {}", path, err)
}
Encode(err: msgpack::EncodeError, path: PathBuf) {
cause(err)
context(path: &'a Path, err: msgpack::EncodeError) -> (err, path.to_path_buf())
description("Failed to encode backup")
display("Backup file error: failed to encode backup of {:?}\n\tcaused by: {}", path, err)
}
WrongHeader(path: PathBuf) {
description("Wrong header")
display("Backup file error: wrong header on backup {:?}", path)
}
UnsupportedVersion(path: PathBuf, version: u8) {
description("Wrong version")
display("Backup file error: unsupported version on backup {:?}: {}", path, version)
}
Decryption(err: EncryptionError, path: PathBuf) {
cause(err)
context(path: &'a Path, err: EncryptionError) -> (err, path.to_path_buf())
description("Decryption failed")
display("Backup file error: decryption failed on backup {:?}\n\tcaused by: {}", path, err)
}
Encryption(err: EncryptionError) {
from()
cause(err)
description("Encryption failed")
display("Backup file error: encryption failed\n\tcaused by: {}", err)
}
PartialBackupsList(partial: HashMap<String, Backup>, failed: Vec<PathBuf>) {
description("Some backups could not be loaded")
display("Backup file error: some backups could not be loaded: {:?}", failed)
}
}
}
#[derive(Default, Debug, Clone)]
struct BackupHeader {
pub encryption: Option<Encryption>
}
serde_impl!(BackupHeader(u8) {
encryption: Option<Encryption> => 0
});
#[derive(Default, Debug, Clone)]
pub struct Backup {
pub root: ChunkList,
pub total_data_size: u64, // Sum of all raw sizes of all entities
pub changed_data_size: u64, // Sum of all raw sizes of all entities actively stored
pub deduplicated_data_size: u64, // Sum of all raw sizes of all new bundles
pub encoded_data_size: u64, // Sum al all encoded sizes of all new bundles
pub bundle_count: usize,
pub chunk_count: usize,
pub avg_chunk_size: f32,
pub date: i64,
pub duration: f32,
pub file_count: usize,
pub dir_count: usize,
pub host: String,
pub path: String,
pub config: Config,
}
serde_impl!(Backup(u8) {
root: Vec<Chunk> => 0,
total_data_size: u64 => 1,
changed_data_size: u64 => 2,
deduplicated_data_size: u64 => 3,
encoded_data_size: u64 => 4,
bundle_count: usize => 5,
chunk_count: usize => 6,
avg_chunk_size: f32 => 7,
date: i64 => 8,
duration: f32 => 9,
file_count: usize => 10,
dir_count: usize => 11,
host: String => 12,
path: String => 13,
config: Config => 14
});
impl Backup {
pub fn read_from<P: AsRef<Path>>(crypto: &Crypto, path: P) -> Result<Self, BackupFileError> {
let path = path.as_ref();
let mut file = BufReader::new(try!(File::open(path).map_err(|err| BackupFileError::Read(err, path.to_path_buf()))));
let mut header = [0u8; 8];
try!(file.read_exact(&mut header).map_err(|err| BackupFileError::Read(err, path.to_path_buf())));
if header[..HEADER_STRING.len()] != HEADER_STRING {
return Err(BackupFileError::WrongHeader(path.to_path_buf()))
}
let version = header[HEADER_STRING.len()];
if version != HEADER_VERSION {
return Err(BackupFileError::UnsupportedVersion(path.to_path_buf(), version))
}
let header: BackupHeader = try!(msgpack::decode_from_stream(&mut file).context(path));
let mut data = Vec::new();
try!(file.read_to_end(&mut data).map_err(|err| BackupFileError::Read(err, path.to_path_buf())));
if let Some(ref encryption) = header.encryption {
data = try!(crypto.decrypt(encryption, &data));
}
Ok(try!(msgpack::decode(&data).context(path)))
}
pub fn save_to<P: AsRef<Path>>(&self, crypto: &Crypto, encryption: Option<Encryption>, path: P) -> Result<(), BackupFileError> {
let path = path.as_ref();
let mut data = try!(msgpack::encode(self).context(path));
if let Some(ref encryption) = encryption {
data = try!(crypto.encrypt(encryption, &data));
}
let mut file = BufWriter::new(try!(File::create(path).map_err(|err| BackupFileError::Write(err, path.to_path_buf()))));
try!(file.write_all(&HEADER_STRING).map_err(|err| BackupFileError::Write(err, path.to_path_buf())));
try!(file.write_all(&[HEADER_VERSION]).map_err(|err| BackupFileError::Write(err, path.to_path_buf())));
let header = BackupHeader { encryption: encryption };
try!(msgpack::encode_to_stream(&header, &mut file).context(path));
try!(file.write_all(&data).map_err(|err| BackupFileError::Write(err, path.to_path_buf())));
Ok(())
}
pub fn get_all_from<P: AsRef<Path>>(crypto: &Crypto, path: P) -> Result<HashMap<String, Backup>, BackupFileError> {
let mut backups = HashMap::new();
let base_path = path.as_ref();
let mut paths = vec![path.as_ref().to_path_buf()];
let mut failed_paths = vec![];
while let Some(path) = paths.pop() {
for entry in try!(fs::read_dir(&path).map_err(|e| BackupFileError::Read(e, path.clone()))) {
let entry = try!(entry.map_err(|e| BackupFileError::Read(e, path.clone())));
let path = entry.path();
if path.is_dir() {
paths.push(path);
} else {
let relpath = path.strip_prefix(&base_path).unwrap();
let name = relpath.to_string_lossy().to_string();
if let Ok(backup) = Backup::read_from(crypto, &path) {
backups.insert(name, backup);
} else {
failed_paths.push(path.clone());
}
}
}
}
if failed_paths.is_empty() {
Ok(backups)
} else {
Err(BackupFileError::PartialBackupsList(backups, failed_paths))
}
}
}

View File

@ -3,7 +3,8 @@ use ::prelude::*;
use std::io;
use std::path::PathBuf;
use super::backup::{BackupFileError, BackupError};
use super::backup_file::BackupFileError;
use super::backup::BackupError;
use super::bundle_map::BundleMapError;
use super::config::ConfigError;
use super::metadata::InodeError;

View File

@ -7,6 +7,7 @@ mod metadata;
mod backup;
mod error;
mod vacuum;
mod backup_file;
use ::prelude::*;
@ -20,7 +21,8 @@ use std::os::unix::fs::symlink;
pub use self::error::RepositoryError;
pub use self::config::Config;
pub use self::metadata::{Inode, FileType};
pub use self::backup::{Backup, BackupFileError, BackupError};
pub use self::backup::{BackupError, BackupOptions};
pub use self::backup_file::{Backup, BackupFileError};
pub use self::integrity::RepositoryIntegrityError;
pub use self::info::RepositoryInfo;
use self::bundle_map::BundleMap;