Some fixes

This commit is contained in:
Dennis Schwerdel 2017-03-26 20:33:32 +02:00 committed by Dennis Schwerdel
parent a94702991d
commit bf59a9ca39
7 changed files with 61 additions and 22 deletions

View File

@ -98,6 +98,8 @@ Recommended: Brotli/2-7
## TODO ## TODO
### Core functionality ### Core functionality
- Subcommand 'versions': find different versions of a file in different backups
- Subcommand 'diff': find differences between two backups (add, mod, del)
- Default excludes in repository - Default excludes in repository
- Fix vacuum inconsistencies (either index related, or bundle syncing related) - Fix vacuum inconsistencies (either index related, or bundle syncing related)
- Recompress & combine bundles - Recompress & combine bundles

17
excludes.default Normal file
View File

@ -0,0 +1,17 @@
# Mounted locations and pseudo filesystems
/cdrom
/dev
lost+found
/mnt
/sys
/proc
/run
/snap
# Cache data that does not need to be backed up
/home/*/.cache
/var/cache
/tmp
# Avoid backing up zvault remote backups
remote/bundles

View File

@ -22,7 +22,8 @@ pub enum Arguments {
reference: Option<String>, reference: Option<String>,
same_device: bool, same_device: bool,
excludes: Vec<String>, excludes: Vec<String>,
excludes_from: Option<String> excludes_from: Option<String>,
no_default_excludes: bool
}, },
Restore { Restore {
repo_path: String, repo_path: String,
@ -86,7 +87,7 @@ pub enum Arguments {
remote_path: String, remote_path: String,
key_files: Vec<String> key_files: Vec<String>
}, },
Configure { Config {
repo_path: String, repo_path: String,
bundle_size: Option<usize>, bundle_size: Option<usize>,
chunker: Option<ChunkerType>, chunker: Option<ChunkerType>,
@ -237,6 +238,7 @@ pub fn parse() -> Arguments {
(@arg same_device: --xdev -x "do not cross filesystem boundaries") (@arg same_device: --xdev -x "do not cross filesystem boundaries")
(@arg exclude: --exclude -e ... +takes_value "exclude this path or file") (@arg exclude: --exclude -e ... +takes_value "exclude this path or file")
(@arg excludes_from: --excludesfrom +takes_value "read the list of exludes from this file") (@arg excludes_from: --excludesfrom +takes_value "read the list of exludes from this file")
(@arg no_default_excludes: --nodefaultexcludes "do not load the default excludes file")
(@arg SRC: +required "source path to backup") (@arg SRC: +required "source path to backup")
(@arg BACKUP: +required "repository::backup path") (@arg BACKUP: +required "repository::backup path")
) )
@ -302,7 +304,7 @@ pub fn parse() -> Arguments {
(about: "analyze the used and reclaimable space of bundles") (about: "analyze the used and reclaimable space of bundles")
(@arg REPO: +required "repository path") (@arg REPO: +required "repository path")
) )
(@subcommand configure => (@subcommand config =>
(about: "changes the configuration") (about: "changes the configuration")
(@arg REPO: +required "path of the repository") (@arg REPO: +required "path of the repository")
(@arg bundle_size: --bundlesize +takes_value "maximal bundle size in MiB [default: 25]") (@arg bundle_size: --bundlesize +takes_value "maximal bundle size in MiB [default: 25]")
@ -354,7 +356,8 @@ pub fn parse() -> Arguments {
excludes: args.values_of("exclude").map(|v| v.map(|k| k.to_string()).collect()).unwrap_or_else(|| vec![]), excludes: args.values_of("exclude").map(|v| v.map(|k| k.to_string()).collect()).unwrap_or_else(|| vec![]),
excludes_from: args.value_of("excludes_from").map(|v| v.to_string()), excludes_from: args.value_of("excludes_from").map(|v| v.to_string()),
src_path: args.value_of("SRC").unwrap().to_string(), src_path: args.value_of("SRC").unwrap().to_string(),
reference: args.value_of("reference").map(|v| v.to_string()) reference: args.value_of("reference").map(|v| v.to_string()),
no_default_excludes: args.is_present("no_default_excludes")
} }
} }
if let Some(args) = args.subcommand_matches("restore") { if let Some(args) = args.subcommand_matches("restore") {
@ -455,9 +458,9 @@ pub fn parse() -> Arguments {
key_files: args.values_of("key").map(|v| v.map(|k| k.to_string()).collect()).unwrap_or_else(|| vec![]) key_files: args.values_of("key").map(|v| v.map(|k| k.to_string()).collect()).unwrap_or_else(|| vec![])
} }
} }
if let Some(args) = args.subcommand_matches("configure") { if let Some(args) = args.subcommand_matches("config") {
let (repository, _backup, _inode) = parse_repo_path(args.value_of("REPO").unwrap(), Some(false), Some(false)); let (repository, _backup, _inode) = parse_repo_path(args.value_of("REPO").unwrap(), Some(false), Some(false));
return Arguments::Configure { return Arguments::Config {
bundle_size: args.value_of("bundle_size").map(|v| (parse_num(v, "Bundle size") * 1024 * 1024) as usize), bundle_size: args.value_of("bundle_size").map(|v| (parse_num(v, "Bundle size") * 1024 * 1024) as usize),
chunker: args.value_of("chunker").map(|v| parse_chunker(v)), chunker: args.value_of("chunker").map(|v| parse_chunker(v)),
compression: args.value_of("compression").map(|v| parse_compression(v)), compression: args.value_of("compression").map(|v| parse_compression(v)),

View File

@ -218,7 +218,7 @@ pub fn run() {
} }
print_config(&repo.config); print_config(&repo.config);
}, },
Arguments::Backup{repo_path, backup_name, src_path, full, reference, same_device, mut excludes, excludes_from} => { Arguments::Backup{repo_path, backup_name, src_path, full, reference, same_device, mut excludes, excludes_from, no_default_excludes} => {
let mut repo = open_repository(&repo_path); let mut repo = open_repository(&repo_path);
let mut reference_backup = None; let mut reference_backup = None;
if !full { if !full {
@ -236,23 +236,32 @@ pub fn run() {
} }
} }
let reference_backup = reference_backup.map(|(_, backup)| backup); let reference_backup = reference_backup.map(|(_, backup)| backup);
if !no_default_excludes {
for line in BufReader::new(checked(File::open(&repo.excludes_path), "open default excludes file")).lines() {
excludes.push(checked(line, "read default excludes file"));
}
}
if let Some(excludes_from) = excludes_from { if let Some(excludes_from) = excludes_from {
for line in BufReader::new(checked(File::open(excludes_from), "open excludes file")).lines() { for line in BufReader::new(checked(File::open(excludes_from), "open excludes file")).lines() {
excludes.push(checked(line, "read excludes file")); excludes.push(checked(line, "read excludes file"));
} }
} }
let excludes: Vec<String> = excludes.into_iter().map(|mut exclude| { let mut excludes_parsed = Vec::with_capacity(excludes.len());
for mut exclude in excludes {
if exclude.starts_with('#') || exclude.is_empty() {
continue
}
exclude = regex::escape(&exclude).replace('?', ".").replace(r"\*\*", ".*").replace(r"\*", "[^/]*"); exclude = regex::escape(&exclude).replace('?', ".").replace(r"\*\*", ".*").replace(r"\*", "[^/]*");
if exclude.starts_with('/') { excludes_parsed.push(if exclude.starts_with('/') {
format!(r"^{}($|/)", exclude) format!(r"^{}($|/)", exclude)
} else { } else {
format!(r"/{}($|/)", exclude) format!(r"/{}($|/)", exclude)
} });
}).collect(); };
let excludes = if excludes.is_empty() { let excludes = if excludes_parsed.is_empty() {
None None
} else { } else {
Some(checked(RegexSet::new(excludes), "parse exclude patterns")) Some(checked(RegexSet::new(excludes_parsed), "parse exclude patterns"))
}; };
let options = BackupOptions { let options = BackupOptions {
same_device: same_device, same_device: same_device,
@ -409,7 +418,7 @@ pub fn run() {
Arguments::Import{repo_path, remote_path, key_files} => { Arguments::Import{repo_path, remote_path, key_files} => {
checked(Repository::import(repo_path, remote_path, key_files), "import repository"); checked(Repository::import(repo_path, remote_path, key_files), "import repository");
}, },
Arguments::Configure{repo_path, bundle_size, chunker, compression, encryption, hash} => { Arguments::Config{repo_path, bundle_size, chunker, compression, encryption, hash} => {
let mut repo = open_repository(&repo_path); let mut repo = open_repository(&repo_path);
if let Some(bundle_size) = bundle_size { if let Some(bundle_size) = bundle_size {
repo.config.bundle_size = bundle_size repo.config.bundle_size = bundle_size

View File

@ -15,13 +15,13 @@ quick_error!{
pub enum BackupFileError { pub enum BackupFileError {
Read(err: io::Error, path: PathBuf) { Read(err: io::Error, path: PathBuf) {
cause(err) cause(err)
description("Failed to write backup") description("Failed to read backup")
display("Backup file error: failed to write backup file {:?}\n\tcaused by: {}", path, err) display("Backup file error: failed to read backup file {:?}\n\tcaused by: {}", path, err)
} }
Write(err: io::Error, path: PathBuf) { Write(err: io::Error, path: PathBuf) {
cause(err) cause(err)
description("Failed to read/write backup") description("Failed to write backup")
display("Backup file error: failed to read backup file {:?}\n\tcaused by: {}", path, err) display("Backup file error: failed to write backup file {:?}\n\tcaused by: {}", path, err)
} }
Decode(err: msgpack::DecodeError, path: PathBuf) { Decode(err: msgpack::DecodeError, path: PathBuf) {
cause(err) cause(err)

View File

@ -155,9 +155,7 @@ serde_impl!(Inode(u8) {
impl Inode { impl Inode {
pub fn get_from<P: AsRef<Path>>(path: P) -> Result<Self, InodeError> { pub fn get_from<P: AsRef<Path>>(path: P) -> Result<Self, InodeError> {
let path = path.as_ref(); let path = path.as_ref();
let name = try!(path.file_name() let name = path.file_name().map(|s| s.to_string_lossy().to_string()).unwrap_or_else(|| "_".to_string());
.ok_or_else(|| InodeError::UnsupportedFiletype(path.to_owned())))
.to_string_lossy().to_string();
let meta = try!(fs::symlink_metadata(path).map_err(|e| InodeError::ReadMetadata(e, path.to_owned()))); let meta = try!(fs::symlink_metadata(path).map_err(|e| InodeError::ReadMetadata(e, path.to_owned())));
let mut inode = Inode::default(); let mut inode = Inode::default();
inode.name = name; inode.name = name;
@ -220,6 +218,7 @@ impl Inode {
pub fn is_unchanged(&self, other: &Inode) -> bool { pub fn is_unchanged(&self, other: &Inode) -> bool {
self.modify_time == other.modify_time self.modify_time == other.modify_time
&& self.file_type == other.file_type && self.file_type == other.file_type
&& self.size == other.size
} }
#[inline] #[inline]

View File

@ -14,9 +14,10 @@ use ::prelude::*;
use std::mem; use std::mem;
use std::cmp::max; use std::cmp::max;
use std::path::{PathBuf, Path}; use std::path::{PathBuf, Path};
use std::fs; use std::fs::{self, File};
use std::sync::{Arc, Mutex}; use std::sync::{Arc, Mutex};
use std::os::unix::fs::symlink; use std::os::unix::fs::symlink;
use std::io::Write;
pub use self::error::RepositoryError; pub use self::error::RepositoryError;
pub use self::config::Config; pub use self::config::Config;
@ -28,9 +29,13 @@ pub use self::info::{RepositoryInfo, BundleAnalysis};
use self::bundle_map::BundleMap; use self::bundle_map::BundleMap;
const DEFAULT_EXCLUDES: &'static [u8] = include_bytes!("../../excludes.default");
pub struct Repository { pub struct Repository {
path: PathBuf, path: PathBuf,
backups_path: PathBuf, backups_path: PathBuf,
pub excludes_path: PathBuf,
pub config: Config, pub config: Config,
index: Index, index: Index,
crypto: Arc<Mutex<Crypto>>, crypto: Arc<Mutex<Crypto>>,
@ -49,6 +54,8 @@ impl Repository {
pub fn create<P: AsRef<Path>, R: AsRef<Path>>(path: P, config: Config, remote: R) -> Result<Self, RepositoryError> { pub fn create<P: AsRef<Path>, R: AsRef<Path>>(path: P, config: Config, remote: R) -> Result<Self, RepositoryError> {
let path = path.as_ref().to_owned(); let path = path.as_ref().to_owned();
try!(fs::create_dir(&path)); try!(fs::create_dir(&path));
let mut excludes = try!(File::create(path.join("excludes")));
try!(excludes.write_all(DEFAULT_EXCLUDES));
try!(fs::create_dir(path.join("keys"))); try!(fs::create_dir(path.join("keys")));
let crypto = Arc::new(Mutex::new(try!(Crypto::open(path.join("keys"))))); let crypto = Arc::new(Mutex::new(try!(Crypto::open(path.join("keys")))));
try!(symlink(remote, path.join("remote"))); try!(symlink(remote, path.join("remote")));
@ -66,6 +73,7 @@ impl Repository {
try!(fs::create_dir_all(&path.join("remote/backups"))); try!(fs::create_dir_all(&path.join("remote/backups")));
Ok(Repository { Ok(Repository {
backups_path: path.join("remote/backups"), backups_path: path.join("remote/backups"),
excludes_path: path.join("excludes"),
path: path, path: path,
chunker: config.chunker.create(), chunker: config.chunker.create(),
config: config, config: config,
@ -95,6 +103,7 @@ impl Repository {
let bundle_map = try!(BundleMap::load(path.join("bundles.map"))); let bundle_map = try!(BundleMap::load(path.join("bundles.map")));
let mut repo = Repository { let mut repo = Repository {
backups_path: path.join("remote/backups"), backups_path: path.join("remote/backups"),
excludes_path: path.join("excludes"),
path: path, path: path,
chunker: config.chunker.create(), chunker: config.chunker.create(),
config: config, config: config,