diff --git a/README.md b/README.md index e61d592..3f2e8ce 100644 --- a/README.md +++ b/README.md @@ -98,6 +98,8 @@ Recommended: Brotli/2-7 ## TODO ### Core functionality +- Subcommand 'versions': find different versions of a file in different backups +- Subcommand 'diff': find differences between two backups (add, mod, del) - Default excludes in repository - Fix vacuum inconsistencies (either index related, or bundle syncing related) - Recompress & combine bundles diff --git a/excludes.default b/excludes.default new file mode 100644 index 0000000..f32e0b4 --- /dev/null +++ b/excludes.default @@ -0,0 +1,17 @@ +# Mounted locations and pseudo filesystems +/cdrom +/dev +lost+found +/mnt +/sys +/proc +/run +/snap + +# Cache data that does not need to be backed up +/home/*/.cache +/var/cache +/tmp + +# Avoid backing up zvault remote backups +remote/bundles diff --git a/src/cli/args.rs b/src/cli/args.rs index 0a363f1..81377f4 100644 --- a/src/cli/args.rs +++ b/src/cli/args.rs @@ -22,7 +22,8 @@ pub enum Arguments { reference: Option, same_device: bool, excludes: Vec, - excludes_from: Option + excludes_from: Option, + no_default_excludes: bool }, Restore { repo_path: String, @@ -86,7 +87,7 @@ pub enum Arguments { remote_path: String, key_files: Vec }, - Configure { + Config { repo_path: String, bundle_size: Option, chunker: Option, @@ -237,6 +238,7 @@ pub fn parse() -> Arguments { (@arg same_device: --xdev -x "do not cross filesystem boundaries") (@arg exclude: --exclude -e ... +takes_value "exclude this path or file") (@arg excludes_from: --excludesfrom +takes_value "read the list of exludes from this file") + (@arg no_default_excludes: --nodefaultexcludes "do not load the default excludes file") (@arg SRC: +required "source path to backup") (@arg BACKUP: +required "repository::backup path") ) @@ -302,7 +304,7 @@ pub fn parse() -> Arguments { (about: "analyze the used and reclaimable space of bundles") (@arg REPO: +required "repository path") ) - (@subcommand configure => + (@subcommand config => (about: "changes the configuration") (@arg REPO: +required "path of the repository") (@arg bundle_size: --bundlesize +takes_value "maximal bundle size in MiB [default: 25]") @@ -354,7 +356,8 @@ pub fn parse() -> Arguments { excludes: args.values_of("exclude").map(|v| v.map(|k| k.to_string()).collect()).unwrap_or_else(|| vec![]), excludes_from: args.value_of("excludes_from").map(|v| v.to_string()), src_path: args.value_of("SRC").unwrap().to_string(), - reference: args.value_of("reference").map(|v| v.to_string()) + reference: args.value_of("reference").map(|v| v.to_string()), + no_default_excludes: args.is_present("no_default_excludes") } } if let Some(args) = args.subcommand_matches("restore") { @@ -455,9 +458,9 @@ pub fn parse() -> Arguments { key_files: args.values_of("key").map(|v| v.map(|k| k.to_string()).collect()).unwrap_or_else(|| vec![]) } } - if let Some(args) = args.subcommand_matches("configure") { + if let Some(args) = args.subcommand_matches("config") { let (repository, _backup, _inode) = parse_repo_path(args.value_of("REPO").unwrap(), Some(false), Some(false)); - return Arguments::Configure { + return Arguments::Config { bundle_size: args.value_of("bundle_size").map(|v| (parse_num(v, "Bundle size") * 1024 * 1024) as usize), chunker: args.value_of("chunker").map(|v| parse_chunker(v)), compression: args.value_of("compression").map(|v| parse_compression(v)), diff --git a/src/cli/mod.rs b/src/cli/mod.rs index 5de6229..ea007b3 100644 --- a/src/cli/mod.rs +++ b/src/cli/mod.rs @@ -218,7 +218,7 @@ pub fn run() { } print_config(&repo.config); }, - Arguments::Backup{repo_path, backup_name, src_path, full, reference, same_device, mut excludes, excludes_from} => { + Arguments::Backup{repo_path, backup_name, src_path, full, reference, same_device, mut excludes, excludes_from, no_default_excludes} => { let mut repo = open_repository(&repo_path); let mut reference_backup = None; if !full { @@ -236,23 +236,32 @@ pub fn run() { } } let reference_backup = reference_backup.map(|(_, backup)| backup); + if !no_default_excludes { + for line in BufReader::new(checked(File::open(&repo.excludes_path), "open default excludes file")).lines() { + excludes.push(checked(line, "read default excludes file")); + } + } if let Some(excludes_from) = excludes_from { for line in BufReader::new(checked(File::open(excludes_from), "open excludes file")).lines() { excludes.push(checked(line, "read excludes file")); } } - let excludes: Vec = excludes.into_iter().map(|mut exclude| { + let mut excludes_parsed = Vec::with_capacity(excludes.len()); + for mut exclude in excludes { + if exclude.starts_with('#') || exclude.is_empty() { + continue + } exclude = regex::escape(&exclude).replace('?', ".").replace(r"\*\*", ".*").replace(r"\*", "[^/]*"); - if exclude.starts_with('/') { + excludes_parsed.push(if exclude.starts_with('/') { format!(r"^{}($|/)", exclude) } else { format!(r"/{}($|/)", exclude) - } - }).collect(); - let excludes = if excludes.is_empty() { + }); + }; + let excludes = if excludes_parsed.is_empty() { None } else { - Some(checked(RegexSet::new(excludes), "parse exclude patterns")) + Some(checked(RegexSet::new(excludes_parsed), "parse exclude patterns")) }; let options = BackupOptions { same_device: same_device, @@ -409,7 +418,7 @@ pub fn run() { Arguments::Import{repo_path, remote_path, key_files} => { checked(Repository::import(repo_path, remote_path, key_files), "import repository"); }, - Arguments::Configure{repo_path, bundle_size, chunker, compression, encryption, hash} => { + Arguments::Config{repo_path, bundle_size, chunker, compression, encryption, hash} => { let mut repo = open_repository(&repo_path); if let Some(bundle_size) = bundle_size { repo.config.bundle_size = bundle_size diff --git a/src/repository/backup_file.rs b/src/repository/backup_file.rs index 856e227..3cf2ff4 100644 --- a/src/repository/backup_file.rs +++ b/src/repository/backup_file.rs @@ -15,13 +15,13 @@ quick_error!{ pub enum BackupFileError { Read(err: io::Error, path: PathBuf) { cause(err) - description("Failed to write backup") - display("Backup file error: failed to write backup file {:?}\n\tcaused by: {}", path, err) + description("Failed to read backup") + display("Backup file error: failed to read backup file {:?}\n\tcaused by: {}", path, err) } Write(err: io::Error, path: PathBuf) { cause(err) - description("Failed to read/write backup") - display("Backup file error: failed to read backup file {:?}\n\tcaused by: {}", path, err) + description("Failed to write backup") + display("Backup file error: failed to write backup file {:?}\n\tcaused by: {}", path, err) } Decode(err: msgpack::DecodeError, path: PathBuf) { cause(err) diff --git a/src/repository/metadata.rs b/src/repository/metadata.rs index 346938f..31615f8 100644 --- a/src/repository/metadata.rs +++ b/src/repository/metadata.rs @@ -155,9 +155,7 @@ serde_impl!(Inode(u8) { impl Inode { pub fn get_from>(path: P) -> Result { let path = path.as_ref(); - let name = try!(path.file_name() - .ok_or_else(|| InodeError::UnsupportedFiletype(path.to_owned()))) - .to_string_lossy().to_string(); + let name = path.file_name().map(|s| s.to_string_lossy().to_string()).unwrap_or_else(|| "_".to_string()); let meta = try!(fs::symlink_metadata(path).map_err(|e| InodeError::ReadMetadata(e, path.to_owned()))); let mut inode = Inode::default(); inode.name = name; @@ -220,6 +218,7 @@ impl Inode { pub fn is_unchanged(&self, other: &Inode) -> bool { self.modify_time == other.modify_time && self.file_type == other.file_type + && self.size == other.size } #[inline] diff --git a/src/repository/mod.rs b/src/repository/mod.rs index f8a5b53..d6b55cf 100644 --- a/src/repository/mod.rs +++ b/src/repository/mod.rs @@ -14,9 +14,10 @@ use ::prelude::*; use std::mem; use std::cmp::max; use std::path::{PathBuf, Path}; -use std::fs; +use std::fs::{self, File}; use std::sync::{Arc, Mutex}; use std::os::unix::fs::symlink; +use std::io::Write; pub use self::error::RepositoryError; pub use self::config::Config; @@ -28,9 +29,13 @@ pub use self::info::{RepositoryInfo, BundleAnalysis}; use self::bundle_map::BundleMap; +const DEFAULT_EXCLUDES: &'static [u8] = include_bytes!("../../excludes.default"); + + pub struct Repository { path: PathBuf, backups_path: PathBuf, + pub excludes_path: PathBuf, pub config: Config, index: Index, crypto: Arc>, @@ -49,6 +54,8 @@ impl Repository { pub fn create, R: AsRef>(path: P, config: Config, remote: R) -> Result { let path = path.as_ref().to_owned(); try!(fs::create_dir(&path)); + let mut excludes = try!(File::create(path.join("excludes"))); + try!(excludes.write_all(DEFAULT_EXCLUDES)); try!(fs::create_dir(path.join("keys"))); let crypto = Arc::new(Mutex::new(try!(Crypto::open(path.join("keys"))))); try!(symlink(remote, path.join("remote"))); @@ -66,6 +73,7 @@ impl Repository { try!(fs::create_dir_all(&path.join("remote/backups"))); Ok(Repository { backups_path: path.join("remote/backups"), + excludes_path: path.join("excludes"), path: path, chunker: config.chunker.create(), config: config, @@ -95,6 +103,7 @@ impl Repository { let bundle_map = try!(BundleMap::load(path.join("bundles.map"))); let mut repo = Repository { backups_path: path.join("remote/backups"), + excludes_path: path.join("excludes"), path: path, chunker: config.chunker.create(), config: config,