mirror of https://github.com/dswd/zvault
Find versions of a file in different backups
This commit is contained in:
parent
bf59a9ca39
commit
7294e7edf6
|
@ -98,9 +98,7 @@ Recommended: Brotli/2-7
|
||||||
## TODO
|
## TODO
|
||||||
|
|
||||||
### Core functionality
|
### Core functionality
|
||||||
- Subcommand 'versions': find different versions of a file in different backups
|
|
||||||
- Subcommand 'diff': find differences between two backups (add, mod, del)
|
- Subcommand 'diff': find differences between two backups (add, mod, del)
|
||||||
- Default excludes in repository
|
|
||||||
- Fix vacuum inconsistencies (either index related, or bundle syncing related)
|
- Fix vacuum inconsistencies (either index related, or bundle syncing related)
|
||||||
- Recompress & combine bundles
|
- Recompress & combine bundles
|
||||||
- Allow to use tar files for backup and restore (--tar, http://alexcrichton.com/tar-rs/tar/index.html)
|
- Allow to use tar files for backup and restore (--tar, http://alexcrichton.com/tar-rs/tar/index.html)
|
||||||
|
|
|
@ -72,6 +72,10 @@ pub enum Arguments {
|
||||||
inode: Option<String>,
|
inode: Option<String>,
|
||||||
mount_point: String
|
mount_point: String
|
||||||
},
|
},
|
||||||
|
Versions {
|
||||||
|
repo_path: String,
|
||||||
|
path: String
|
||||||
|
},
|
||||||
Analyze {
|
Analyze {
|
||||||
repo_path: String
|
repo_path: String
|
||||||
},
|
},
|
||||||
|
@ -304,6 +308,11 @@ pub fn parse() -> Arguments {
|
||||||
(about: "analyze the used and reclaimable space of bundles")
|
(about: "analyze the used and reclaimable space of bundles")
|
||||||
(@arg REPO: +required "repository path")
|
(@arg REPO: +required "repository path")
|
||||||
)
|
)
|
||||||
|
(@subcommand versions =>
|
||||||
|
(about: "display different versions of a file in all backups")
|
||||||
|
(@arg REPO: +required "repository path")
|
||||||
|
(@arg PATH: +required "the file path")
|
||||||
|
)
|
||||||
(@subcommand config =>
|
(@subcommand config =>
|
||||||
(about: "changes the configuration")
|
(about: "changes the configuration")
|
||||||
(@arg REPO: +required "path of the repository")
|
(@arg REPO: +required "path of the repository")
|
||||||
|
@ -444,6 +453,13 @@ pub fn parse() -> Arguments {
|
||||||
mount_point: args.value_of("MOUNTPOINT").unwrap().to_string()
|
mount_point: args.value_of("MOUNTPOINT").unwrap().to_string()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if let Some(args) = args.subcommand_matches("versions") {
|
||||||
|
let (repository, _backup, _inode) = parse_repo_path(args.value_of("REPO").unwrap(), Some(false), Some(false));
|
||||||
|
return Arguments::Versions {
|
||||||
|
repo_path: repository.to_string(),
|
||||||
|
path: args.value_of("PATH").unwrap().to_string()
|
||||||
|
}
|
||||||
|
}
|
||||||
if let Some(args) = args.subcommand_matches("analyze") {
|
if let Some(args) = args.subcommand_matches("analyze") {
|
||||||
let (repository, _backup, _inode) = parse_repo_path(args.value_of("REPO").unwrap(), Some(false), Some(false));
|
let (repository, _backup, _inode) = parse_repo_path(args.value_of("REPO").unwrap(), Some(false), Some(false));
|
||||||
return Arguments::Analyze {
|
return Arguments::Analyze {
|
||||||
|
|
|
@ -83,7 +83,7 @@ fn print_backup(backup: &Backup) {
|
||||||
pub fn format_inode_one_line(inode: &Inode) -> String {
|
pub fn format_inode_one_line(inode: &Inode) -> String {
|
||||||
match inode.file_type {
|
match inode.file_type {
|
||||||
FileType::Directory => format!("{:25}\t{} entries", format!("{}/", inode.name), inode.children.as_ref().unwrap().len()),
|
FileType::Directory => format!("{:25}\t{} entries", format!("{}/", inode.name), inode.children.as_ref().unwrap().len()),
|
||||||
FileType::File => format!("{:25}\t{}", inode.name, to_file_size(inode.size)),
|
FileType::File => format!("{:25}\t{:>10}\t{}", inode.name, to_file_size(inode.size), Local.timestamp(inode.modify_time, 0).to_rfc2822()),
|
||||||
FileType::Symlink => format!("{:25}\t -> {}", inode.name, inode.symlink_target.as_ref().unwrap()),
|
FileType::Symlink => format!("{:25}\t -> {}", inode.name, inode.symlink_target.as_ref().unwrap()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -110,7 +110,7 @@ fn print_inode(inode: &Inode) {
|
||||||
|
|
||||||
fn print_backups(backup_map: &HashMap<String, Backup>) {
|
fn print_backups(backup_map: &HashMap<String, Backup>) {
|
||||||
for (name, backup) in backup_map {
|
for (name, backup) in backup_map {
|
||||||
println!("{:25} {:>32} {:5} files, {:4} dirs, {:>10}",
|
println!("{:40} {:>32} {:5} files, {:4} dirs, {:>10}",
|
||||||
name, Local.timestamp(backup.date, 0).to_rfc2822(), backup.file_count,
|
name, Local.timestamp(backup.date, 0).to_rfc2822(), backup.file_count,
|
||||||
backup.dir_count, to_file_size(backup.total_data_size));
|
backup.dir_count, to_file_size(backup.total_data_size));
|
||||||
}
|
}
|
||||||
|
@ -418,6 +418,13 @@ pub fn run() {
|
||||||
Arguments::Import{repo_path, remote_path, key_files} => {
|
Arguments::Import{repo_path, remote_path, key_files} => {
|
||||||
checked(Repository::import(repo_path, remote_path, key_files), "import repository");
|
checked(Repository::import(repo_path, remote_path, key_files), "import repository");
|
||||||
},
|
},
|
||||||
|
Arguments::Versions{repo_path, path} => {
|
||||||
|
let mut repo = open_repository(&repo_path);
|
||||||
|
for (name, mut inode) in checked(repo.find_versions(&path), "find versions") {
|
||||||
|
inode.name = format!("{}::{}", name, &path);
|
||||||
|
println!("{}", format_inode_one_line(&inode));
|
||||||
|
}
|
||||||
|
},
|
||||||
Arguments::Config{repo_path, bundle_size, chunker, compression, encryption, hash} => {
|
Arguments::Config{repo_path, bundle_size, chunker, compression, encryption, hash} => {
|
||||||
let mut repo = open_repository(&repo_path);
|
let mut repo = open_repository(&repo_path);
|
||||||
if let Some(bundle_size) = bundle_size {
|
if let Some(bundle_size) = bundle_size {
|
||||||
|
|
|
@ -277,4 +277,22 @@ impl Repository {
|
||||||
pub fn get_backup_inode<P: AsRef<Path>>(&mut self, backup: &Backup, path: P) -> Result<Inode, RepositoryError> {
|
pub fn get_backup_inode<P: AsRef<Path>>(&mut self, backup: &Backup, path: P) -> Result<Inode, RepositoryError> {
|
||||||
self.get_backup_path(backup, path).map(|mut inodes| inodes.pop().unwrap())
|
self.get_backup_path(backup, path).map(|mut inodes| inodes.pop().unwrap())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
pub fn find_versions<P: AsRef<Path>>(&mut self, path: P) -> Result<Vec<(String, Inode)>, RepositoryError> {
|
||||||
|
let path = path.as_ref();
|
||||||
|
let mut versions = HashMap::new();
|
||||||
|
for (name, backup) in try!(self.get_backups()) {
|
||||||
|
match self.get_backup_inode(&backup, path) {
|
||||||
|
Ok(inode) => {
|
||||||
|
versions.insert((inode.file_type, inode.modify_time, inode.size), (name, inode));
|
||||||
|
},
|
||||||
|
Err(RepositoryError::NoSuchFileInBackup(..)) => continue,
|
||||||
|
Err(err) => return Err(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let mut versions: Vec<_> = versions.into_iter().map(|(_, v)| v).collect();
|
||||||
|
versions.sort_by_key(|v| v.1.modify_time);
|
||||||
|
Ok(versions)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -68,7 +68,7 @@ quick_error!{
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
#[derive(Debug, Eq, PartialEq, Clone, Copy)]
|
#[derive(Debug, Eq, PartialEq, Clone, Copy, Hash)]
|
||||||
pub enum FileType {
|
pub enum FileType {
|
||||||
File,
|
File,
|
||||||
Directory,
|
Directory,
|
||||||
|
@ -90,7 +90,7 @@ impl fmt::Display for FileType {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
|
||||||
pub enum FileContents {
|
pub enum FileContents {
|
||||||
Inline(msgpack::Bytes),
|
Inline(msgpack::Bytes),
|
||||||
ChunkedDirect(ChunkList),
|
ChunkedDirect(ChunkList),
|
||||||
|
@ -103,7 +103,7 @@ serde_impl!(FileContents(u8) {
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug, Hash, Eq, PartialEq)]
|
||||||
pub struct Inode {
|
pub struct Inode {
|
||||||
pub name: String,
|
pub name: String,
|
||||||
pub size: u64,
|
pub size: u64,
|
||||||
|
|
Loading…
Reference in New Issue