This commit is contained in:
Dennis Schwerdel 2017-03-15 21:53:05 +01:00 committed by Dennis Schwerdel
parent 7e66d806b5
commit 1a2ea29d24
10 changed files with 524 additions and 41 deletions

259
Comparison.txt Normal file
View File

@ -0,0 +1,259 @@
++ mkdir repos
++ target/release/zvault init --compression brotli/3 repos/zvault_brotli3
real 0m0.009s
user 0m0.008s
sys 0m0.000s
++ target/release/zvault init --compression brotli/6 repos/zvault_brotli6
real 0m0.010s
user 0m0.004s
sys 0m0.004s
++ target/release/zvault init --compression lzma2/2 repos/zvault_lzma2
real 0m0.009s
user 0m0.004s
sys 0m0.004s
++ attic init repos/attic
Initializing repository at "repos/attic"
Encryption NOT enabled.
Use the "--encryption=passphrase|keyfile" to enable encryption.
Initializing cache...
real 0m0.136s
user 0m0.100s
sys 0m0.016s
++ borg init -e none repos/borg
real 0m0.253s
user 0m0.204s
sys 0m0.012s
++ borg init -e none repos/borg-zlib
real 0m0.243s
user 0m0.200s
sys 0m0.008s
++ zbackup init --non-encrypted repos/zbackup
real 0m0.003s
user 0m0.000s
sys 0m0.000s
++ cat
++ target/release/zvault put repos/zvault_brotli3::silesia1 test_data/silesia.tar
real 0m3.389s
user 0m3.172s
sys 0m0.220s
++ target/release/zvault put repos/zvault_brotli3::silesia2 test_data/silesia.tar
real 0m0.741s
user 0m0.708s
sys 0m0.032s
++ target/release/zvault put repos/zvault_brotli6::silesia1 test_data/silesia.tar
real 0m10.166s
user 0m9.880s
sys 0m0.284s
++ target/release/zvault put repos/zvault_brotli6::silesia2 test_data/silesia.tar
real 0m0.707s
user 0m0.660s
sys 0m0.044s
++ target/release/zvault put repos/zvault_lzma2::silesia1 test_data/silesia.tar
real 0m26.277s
user 0m25.988s
sys 0m0.288s
++ target/release/zvault put repos/zvault_lzma2::silesia2 test_data/silesia.tar
real 0m0.710s
user 0m0.656s
sys 0m0.052s
++ attic create repos/attic::silesia1 test_data/silesia.tar
real 0m9.304s
user 0m8.440s
sys 0m0.328s
++ attic create repos/attic::silesia2 test_data/silesia.tar
real 0m1.210s
user 0m1.128s
sys 0m0.040s
++ borg create -C none repos/borg::silesia1 test_data/silesia.tar
real 0m4.805s
user 0m2.240s
sys 0m1.156s
++ borg create -C none repos/borg::silesia2 test_data/silesia.tar
real 0m1.475s
user 0m1.260s
sys 0m0.164s
++ borg create -C zlib repos/borg-zlib::silesia1 test_data/silesia.tar
real 0m10.093s
user 0m9.184s
sys 0m0.428s
++ borg create -C zlib repos/borg-zlib::silesia2 test_data/silesia.tar
real 0m1.534s
user 0m1.280s
sys 0m0.192s
++ zbackup backup --non-encrypted repos/zbackup/backups/silesia1
Loading index...
Index loaded.
Using up to 4 thread(s) for compression
real 0m24.362s
user 1m32.292s
sys 0m0.776s
++ zbackup backup --non-encrypted repos/zbackup/backups/silesia2
Loading index...
Loading index file 90cd1b771e3c068c6617b0089f4fe5fde4f654cb0473b7d7...
Index loaded.
Using up to 4 thread(s) for compression
real 0m1.281s
user 0m1.248s
sys 0m0.032s
++ du -h test_data/silesia.tar
203M test_data/silesia.tar
++ du -sh repos/zbackup/bundles repos/zvault_brotli3/bundles repos/zvault_brotli6/bundles repos/zvault_lzma2/bundles repos/attic repos/borg repos/borg-zlib repos/zbackup
51M repos/zbackup/bundles
65M repos/zvault_brotli3/bundles
58M repos/zvault_brotli6/bundles
55M repos/zvault_lzma2/bundles
68M repos/attic
203M repos/borg
66M repos/borg-zlib
164K repos/zbackup
++ rm -rf repos
++ mkdir repos
++ target/release/zvault init --compression brotli/3 repos/zvault_brotli3
real 0m0.005s
user 0m0.000s
sys 0m0.004s
++ target/release/zvault init --compression brotli/6 repos/zvault_brotli6
real 0m0.005s
user 0m0.004s
sys 0m0.000s
++ target/release/zvault init --compression lzma2/2 repos/zvault_lzma2
real 0m0.005s
user 0m0.004s
sys 0m0.000s
++ attic init repos/attic
Initializing repository at "repos/attic"
Encryption NOT enabled.
Use the "--encryption=passphrase|keyfile" to enable encryption.
Initializing cache...
real 0m0.095s
user 0m0.060s
sys 0m0.020s
++ borg init -e none repos/borg
real 0m0.235s
user 0m0.184s
sys 0m0.016s
++ borg init -e none repos/borg-zlib
real 0m0.262s
user 0m0.200s
sys 0m0.016s
++ zbackup init --non-encrypted repos/zbackup
real 0m0.004s
user 0m0.000s
sys 0m0.000s
++ cat
++ target/release/zvault put repos/zvault_brotli3::ubuntu1 test_data/ubuntu.tar
real 0m3.008s
user 0m2.748s
sys 0m0.260s
++ target/release/zvault put repos/zvault_brotli3::ubuntu2 test_data/ubuntu.tar
real 0m0.621s
user 0m0.580s
sys 0m0.040s
++ target/release/zvault put repos/zvault_brotli6::ubuntu1 test_data/ubuntu.tar
real 0m10.916s
user 0m10.680s
sys 0m0.232s
++ target/release/zvault put repos/zvault_brotli6::ubuntu2 test_data/ubuntu.tar
real 0m0.619s
user 0m0.596s
sys 0m0.020s
++ target/release/zvault put repos/zvault_lzma2::ubuntu1 test_data/ubuntu.tar
real 0m37.039s
user 0m36.792s
sys 0m0.244s
++ target/release/zvault put repos/zvault_lzma2::ubuntu2 test_data/ubuntu.tar
real 0m0.640s
user 0m0.624s
sys 0m0.012s
++ attic create repos/attic::ubuntu1 test_data/ubuntu.tar
real 0m9.309s
user 0m8.316s
sys 0m0.368s
++ attic create repos/attic::ubuntu2 test_data/ubuntu.tar
real 0m1.093s
user 0m1.008s
sys 0m0.044s
++ borg create -C none repos/borg::ubuntu1 test_data/ubuntu.tar
real 0m4.317s
user 0m1.988s
sys 0m1.032s
++ borg create -C none repos/borg::ubuntu2 test_data/ubuntu.tar
real 0m1.402s
user 0m1.160s
sys 0m0.188s
++ borg create -C zlib repos/borg-zlib::ubuntu1 test_data/ubuntu.tar
real 0m10.049s
user 0m8.788s
sys 0m0.532s
++ borg create -C zlib repos/borg-zlib::ubuntu2 test_data/ubuntu.tar
real 0m1.291s
user 0m1.088s
sys 0m0.168s
++ zbackup backup --non-encrypted repos/zbackup/backups/ubuntu1
Loading index...
Index loaded.
Using up to 4 thread(s) for compression
real 0m17.972s
user 1m7.956s
sys 0m0.644s
++ zbackup backup --non-encrypted repos/zbackup/backups/ubuntu2
Loading index...
Loading index file 3fea916708827f3f5cdfc18fca024ef9cf00aa92a0c8d676...
Index loaded.
Using up to 4 thread(s) for compression
real 0m1.158s
user 0m1.132s
sys 0m0.024s
++ du -h test_data/ubuntu.tar
176M test_data/ubuntu.tar
++ du -sh repos/zbackup/bundles repos/zvault_brotli3/bundles repos/zvault_brotli6/bundles repos/zvault_lzma2/bundles repos/attic repos/borg repos/borg-zlib repos/zbackup
64M repos/zbackup/bundles
77M repos/zvault_brotli3/bundles
68M repos/zvault_brotli6/bundles
63M repos/zvault_lzma2/bundles
84M repos/attic
176M repos/borg
83M repos/borg-zlib
148K repos/zbackup

View File

@ -483,6 +483,11 @@ impl Index {
self.entries
}
#[inline]
pub fn size(&self) -> usize {
self.mmap.len()
}
#[inline]
pub fn is_empty(&self) -> bool {
self.entries == 0

View File

@ -11,56 +11,70 @@ extern crate docopt;
extern crate rustc_serialize;
mod errors;
mod util;
mod bundle;
mod index;
pub mod util;
pub mod bundle;
pub mod index;
mod chunker;
mod repository;
mod algotest;
use chunker::ChunkerType;
use repository::{Repository, Config, Mode, Inode};
use util::{ChecksumType, Compression, HashMethod, to_file_size};
use std::fs::File;
use std::io::Read;
use std::time;
use docopt::Docopt;
use chunker::ChunkerType;
use repository::{Repository, Config, Mode, Inode, Backup};
use util::{ChecksumType, Compression, HashMethod, to_file_size};
static USAGE: &'static str = "
Usage:
zvault init <repo>
zvault info <repo>
zvault bundles <repo>
zvault init [--bundle-size SIZE] [--chunker METHOD] [--chunk-size SIZE] [--compression COMPRESSION] <repo>
zvault backup [--full] <backup> <path>
zvault restore <backup> <path>
zvault check [--full] <repo>
zvault list <repo>
zvault info <backup>
zvault stats <repo>
zvault bundles <repo>
zvault algotest <path>
zvault test <repo> <path>
zvault stat <path>
zvault put <repo> <path>
zvault put <backup> <path>
Options:
--full Whether to verify the repository by loading all bundles
--bundle-size SIZE The target size of a full bundle in MiB [default: 25]
--chunker METHOD The chunking algorithm to use [default: fastcdc]
--chunk-size SIZE The target average chunk size in KiB [default: 8]
--compression COMPRESSION The compression to use [default: brotli/3]
--full Whether to verify the repository by loading all bundles
--bundle-size SIZE The target size of a full bundle in MiB [default: 25]
--chunker METHOD The chunking algorithm to use [default: fastcdc]
--chunk-size SIZE The target average chunk size in KiB [default: 8]
--compression COMPRESSION The compression to use [default: brotli/3]
";
#[derive(RustcDecodable, Debug)]
struct Args {
cmd_init: bool,
cmd_backup: bool,
cmd_restore: bool,
cmd_check: bool,
cmd_list: bool,
cmd_info: bool,
cmd_stats: bool,
cmd_bundles: bool,
cmd_algotest: bool,
cmd_test: bool,
cmd_stat: bool,
cmd_check: bool,
cmd_bundles: bool,
cmd_put: bool,
arg_repo: Option<String>,
arg_path: Option<String>,
arg_backup: Option<String>,
flag_full: bool,
flag_bundle_size: usize,
flag_chunker: String,
@ -100,14 +114,23 @@ fn main() {
return
}
let mut repo = Repository::open(&args.arg_repo.unwrap()).unwrap();
let mut repo;
if let Some(path) = args.arg_repo {
repo = Repository::open(path).unwrap();
} else if let Some(ref backup) = args.arg_backup {
let path = backup.splitn(2, "::").nth(0).unwrap();
repo = Repository::open(path).unwrap();
} else {
panic!("Repository is needed");
}
if args.cmd_check {
repo.check(args.flag_full).unwrap();
return
}
if args.cmd_info {
if args.cmd_stats {
let info = repo.info();
println!("Bundles: {}", info.bundle_count);
println!("Total size: {}", to_file_size(info.encoded_data_size));
@ -115,6 +138,15 @@ fn main() {
println!("Compression ratio: {:.1}", info.compression_ratio * 100.0);
println!("Chunk count: {}", info.chunk_count);
println!("Average chunk size: {}", to_file_size(info.avg_chunk_size as u64));
let index_usage = info.index_entries as f32 / info.index_capacity as f32;
println!("Index: {}, {}% full", to_file_size(info.index_size as u64), index_usage * 100.0);
return
}
if args.cmd_list {
for backup in repo.list_backups().unwrap() {
println!("{}", backup);
}
return
}
@ -136,12 +168,6 @@ fn main() {
return
}
if args.cmd_put {
let chunks = repo.put_inode(&args.arg_path.unwrap()).unwrap();
println!("done. {} chunks, total size: {}", chunks.len(), to_file_size(chunks.iter().map(|&(_,s)| s).sum::<usize>() as u64));
return
}
if args.cmd_test {
print!("Integrity check before...");
repo.check(true).unwrap();
@ -175,5 +201,29 @@ fn main() {
let read_speed = data.len() as f64 / duration;
assert_eq!(data.len(), data2.len());
println!(" done. {:.1} MB/s", read_speed / 1_000_000.0);
return
}
let backup_name = args.arg_backup.unwrap().splitn(2, "::").nth(1).unwrap().to_string();
if args.cmd_put {
let chunks = repo.put_inode(&args.arg_path.unwrap()).unwrap();
repo.save_backup(&Backup{root: chunks, ..Default::default()}, &backup_name).unwrap();
return
}
if args.cmd_backup {
unimplemented!()
}
let backup = repo.get_backup(&backup_name).unwrap();
if args.cmd_info {
println!("{:?}", backup.root);
return
}
if args.cmd_restore {
repo.restore_backup(&backup, &args.arg_path.unwrap()).unwrap();
}
}

79
src/repository/backup.rs Normal file
View File

@ -0,0 +1,79 @@
use super::{Repository, Chunk};
use rmp_serde;
use serde::{Deserialize, Serialize};
use std::fs::{self, File};
use std::path::Path;
#[derive(Default, Debug)]
pub struct Backup {
pub root: Vec<Chunk>,
pub total_data_size: u64,
pub changed_data_size: u64,
pub new_data_size: u64,
pub encoded_data_size: u64,
pub new_bundle_count: usize,
pub chunk_count: usize,
pub avg_chunk_size: f32,
pub date: i64,
pub duration: f32,
pub file_count: usize,
pub dir_count: usize
}
serde_impl!(Backup(u8) {
root: Vec<Chunk> => 0,
total_data_size: u64 => 1,
changed_data_size: u64 => 2,
new_data_size: u64 => 3,
encoded_data_size: u64 => 4,
new_bundle_count: usize => 5,
chunk_count: usize => 6,
avg_chunk_size: f32 => 7,
date: i64 => 8,
duration: f32 => 9,
file_count: usize => 10,
dir_count: usize => 11
});
impl Repository {
pub fn list_backups(&self) -> Result<Vec<String>, &'static str> {
let mut backups = Vec::new();
let mut paths = Vec::new();
let base_path = self.path.join("backups");
paths.push(base_path.clone());
while let Some(path) = paths.pop() {
for entry in try!(fs::read_dir(path).map_err(|_| "Failed to list files")) {
let entry = try!(entry.map_err(|_| "Failed to list files"));
let path = entry.path();
if path.is_dir() {
paths.push(path);
} else {
let relpath = try!(path.strip_prefix(&base_path).map_err(|_| "Failed to obtain relative path"));
backups.push(relpath.to_string_lossy().to_string());
}
}
}
Ok(backups)
}
pub fn get_backup(&self, name: &str) -> Result<Backup, &'static str> {
let file = try!(File::open(self.path.join("backups").join(name)).map_err(|_| "Failed to load backup"));
let mut reader = rmp_serde::Deserializer::new(file);
Backup::deserialize(&mut reader).map_err(|_| "Failed to read backup data")
}
pub fn save_backup(&mut self, backup: &Backup, name: &str) -> Result<(), &'static str> {
let mut file = try!(File::create(self.path.join("backups").join(name)).map_err(|_| "Failed to save backup"));
let mut writer = rmp_serde::Serializer::new(&mut file);
backup.serialize(&mut writer).map_err(|_| "Failed to write backup data")
}
pub fn restore_backup<P: AsRef<Path>>(&mut self, backup: &Backup, path: P) -> Result<(), &'static str> {
let inode = try!(self.get_inode(&backup.root));
try!(self.save_inode_at(&inode, path));
Ok(())
}
}

View File

@ -7,7 +7,10 @@ pub struct RepositoryInfo {
pub raw_data_size: u64,
pub compression_ratio: f32,
pub chunk_count: usize,
pub avg_chunk_size: f32
pub avg_chunk_size: f32,
pub index_size: usize,
pub index_capacity: usize,
pub index_entries: usize
}
@ -28,7 +31,10 @@ impl Repository {
encoded_data_size: encoded_data_size,
raw_data_size: raw_data_size,
compression_ratio: encoded_data_size as f32 / raw_data_size as f32,
avg_chunk_size: raw_data_size as f32 / chunk_count as f32
avg_chunk_size: raw_data_size as f32 / chunk_count as f32,
index_size: self.index.size(),
index_capacity: self.index.capacity(),
index_entries: self.index.len()
}
}
}

View File

@ -4,11 +4,11 @@ use rmp_serde;
use std::collections::HashMap;
use std::path::Path;
use std::fs::{self, Metadata, File};
use std::fs::{self, Metadata, File, Permissions};
use std::os::linux::fs::MetadataExt;
use std::io::{Cursor, Read};
use std::os::unix::fs::{PermissionsExt, symlink};
use std::io::{Cursor, Read, Write};
use ::util::Hash;
use super::{Repository, Mode, Chunk};
@ -118,25 +118,29 @@ impl Inode {
}
#[allow(dead_code)]
pub fn create_at<P: AsRef<Path>>(&self, path: P) -> Result<(), &'static str> {
pub fn create_at<P: AsRef<Path>>(&self, path: P) -> Result<Option<File>, &'static str> {
let full_path = path.as_ref().join(&self.name);
let mut file = None;
match self.file_type {
FileType::File => {
try!(File::create(&full_path).map_err(|_| "Failed to create file"));
file = Some(try!(File::create(&full_path).map_err(|_| "Failed to create file")));
},
FileType::Directory => {
try!(fs::create_dir(&full_path).map_err(|_| "Failed to create directory"));
},
FileType::Symlink => {
if let Some(ref src) = self.symlink_target {
try!(fs::soft_link(src, &full_path).map_err(|_| "Failed to create symlink"));
try!(symlink(src, &full_path).map_err(|_| "Failed to create symlink"));
} else {
return Err("Symlink without destination")
}
}
}
//FIXME: set times and permissions
Ok(())
try!(fs::set_permissions(&full_path, Permissions::from_mode(self.mode)).map_err(|_| "Failed to set permissions"));
//FIXME: set times and gid/uid
// https://crates.io/crates/filetime
Ok(file)
}
}
@ -158,7 +162,7 @@ impl Repository {
let mut inode_data = Vec::new();
{
let mut writer = rmp_serde::Serializer::new(&mut inode_data);
inode.serialize(&mut writer).map_err(|_| "Failed to write inode data");
try!(inode.serialize(&mut writer).map_err(|_| "Failed to write inode data"));
}
self.put_data(Mode::Meta, &inode_data)
}
@ -169,4 +173,21 @@ impl Repository {
let mut reader = rmp_serde::Deserializer::new(data);
Inode::deserialize(&mut reader).map_err(|_| "Failed to read inode data")
}
#[inline]
pub fn save_inode_at<P: AsRef<Path>>(&mut self, inode: &Inode, path: P) -> Result<(), &'static str> {
if let Some(mut file) = try!(inode.create_at(path.as_ref())) {
if let Some(ref contents) = inode.contents {
match *contents {
FileContents::Inline(ref data) => {
try!(file.write_all(&data).map_err(|_| "Failed to write data to file"));
},
FileContents::Chunked(ref chunks) => {
try!(self.get_stream(chunks, &mut file));
}
}
}
}
Ok(())
}
}

View File

@ -4,6 +4,7 @@ mod integrity;
mod basic_io;
mod info;
mod metadata;
mod backup;
use std::mem;
use std::cmp::max;
@ -17,6 +18,7 @@ use super::chunker::Chunker;
pub use self::config::Config;
pub use self::metadata::Inode;
pub use self::basic_io::Chunk;
pub use self::backup::Backup;
use self::bundle_map::BundleMap;
@ -53,6 +55,7 @@ impl Repository {
try!(config.save(path.join("config.yaml")).map_err(|_| "Failed to save config"));
let bundle_map = BundleMap::create();
try!(bundle_map.save(path.join("bundles.map")).map_err(|_| "Failed to save bundle map"));
try!(fs::create_dir(&path.join("backups")).map_err(|_| "Failed to create backup directory"));
Ok(Repository{
path: path,
chunker: config.chunker.create(),

View File

@ -33,14 +33,14 @@ impl Hash {
impl fmt::Display for Hash {
#[inline]
fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error> {
write!(fmt, "{:16x}{:16x}", self.high, self.low)
write!(fmt, "{:016x}{:016x}", self.high, self.low)
}
}
impl fmt::Debug for Hash {
#[inline]
fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error> {
write!(fmt, "{:16x}{:16x}", self.high, self.low)
write!(fmt, "{:016x}{:016x}", self.high, self.low)
}
}

View File

@ -45,9 +45,9 @@ impl<K: Eq+Hash, V> LruCache<K, V> {
fn shrink(&mut self) {
let mut tags: Vec<u64> = self.items.values().map(|&(_, n)| n).collect();
tags.sort();
let bar = tags[tags.len()-self.min_size];
let min = tags[tags.len()-self.min_size];
let mut new = HashMap::with_capacity(self.min_size);
new.extend(self.items.drain().filter(|&(_,(_, n))| n>=bar));
new.extend(self.items.drain().filter(|&(_,(_, n))| n>=min));
self.items = new;
}
}

60
test.sh Executable file
View File

@ -0,0 +1,60 @@
set -ex
mkdir repos
time target/release/zvault init --compression brotli/3 repos/zvault_brotli3
time target/release/zvault init --compression brotli/6 repos/zvault_brotli6
time target/release/zvault init --compression lzma2/2 repos/zvault_lzma2
time attic init repos/attic
time borg init -e none repos/borg
time borg init -e none repos/borg-zlib
time zbackup init --non-encrypted repos/zbackup
cat < test_data/silesia.tar > /dev/null
time target/release/zvault put repos/zvault_brotli3::silesia1 test_data/silesia.tar
time target/release/zvault put repos/zvault_brotli3::silesia2 test_data/silesia.tar
time target/release/zvault put repos/zvault_brotli6::silesia1 test_data/silesia.tar
time target/release/zvault put repos/zvault_brotli6::silesia2 test_data/silesia.tar
time target/release/zvault put repos/zvault_lzma2::silesia1 test_data/silesia.tar
time target/release/zvault put repos/zvault_lzma2::silesia2 test_data/silesia.tar
time attic create repos/attic::silesia1 test_data/silesia.tar
time attic create repos/attic::silesia2 test_data/silesia.tar
time borg create -C none repos/borg::silesia1 test_data/silesia.tar
time borg create -C none repos/borg::silesia2 test_data/silesia.tar
time borg create -C zlib repos/borg-zlib::silesia1 test_data/silesia.tar
time borg create -C zlib repos/borg-zlib::silesia2 test_data/silesia.tar
time zbackup backup --non-encrypted repos/zbackup/backups/silesia1 < test_data/silesia.tar
time zbackup backup --non-encrypted repos/zbackup/backups/silesia2 < test_data/silesia.tar
du -h test_data/silesia.tar
du -sh repos/zvault*/bundles repos/attic repos/borg repos/borg-zlib repos/zbackup
rm -rf repos
mkdir repos
time target/release/zvault init --compression brotli/3 repos/zvault_brotli3
time target/release/zvault init --compression brotli/6 repos/zvault_brotli6
time target/release/zvault init --compression lzma2/2 repos/zvault_lzma2
time attic init repos/attic
time borg init -e none repos/borg
time borg init -e none repos/borg-zlib
time zbackup init --non-encrypted repos/zbackup
cat < test_data/ubuntu.tar > /dev/null
time target/release/zvault put repos/zvault_brotli3::ubuntu1 test_data/ubuntu.tar
time target/release/zvault put repos/zvault_brotli3::ubuntu2 test_data/ubuntu.tar
time target/release/zvault put repos/zvault_brotli6::ubuntu1 test_data/ubuntu.tar
time target/release/zvault put repos/zvault_brotli6::ubuntu2 test_data/ubuntu.tar
time target/release/zvault put repos/zvault_lzma2::ubuntu1 test_data/ubuntu.tar
time target/release/zvault put repos/zvault_lzma2::ubuntu2 test_data/ubuntu.tar
time attic create repos/attic::ubuntu1 test_data/ubuntu.tar
time attic create repos/attic::ubuntu2 test_data/ubuntu.tar
time borg create -C none repos/borg::ubuntu1 test_data/ubuntu.tar
time borg create -C none repos/borg::ubuntu2 test_data/ubuntu.tar
time borg create -C zlib repos/borg-zlib::ubuntu1 test_data/ubuntu.tar
time borg create -C zlib repos/borg-zlib::ubuntu2 test_data/ubuntu.tar
time zbackup backup --non-encrypted repos/zbackup/backups/ubuntu1 < test_data/ubuntu.tar
time zbackup backup --non-encrypted repos/zbackup/backups/ubuntu2 < test_data/ubuntu.tar
du -h test_data/ubuntu.tar
du -sh repos/zvault*/bundles repos/attic repos/borg repos/borg-zlib repos/zbackup