mirror of https://github.com/dswd/zvault
Translation infrastructure
This commit is contained in:
parent
24e28e6bcc
commit
8911c8af6d
|
@ -151,6 +151,11 @@ dependencies = [
|
||||||
"winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
"winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "lazy_static"
|
||||||
|
version = "0.2.11"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "lazy_static"
|
name = "lazy_static"
|
||||||
version = "1.0.0"
|
version = "1.0.0"
|
||||||
|
@ -180,6 +185,17 @@ name = "linked-hash-map"
|
||||||
version = "0.5.1"
|
version = "0.5.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "locale_config"
|
||||||
|
version = "0.2.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
dependencies = [
|
||||||
|
"kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
"lazy_static 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
"regex 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
"winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "log"
|
name = "log"
|
||||||
version = "0.3.9"
|
version = "0.3.9"
|
||||||
|
@ -583,6 +599,7 @@ dependencies = [
|
||||||
"lazy_static 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"lazy_static 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"libc 0.2.36 (registry+https://github.com/rust-lang/crates.io-index)",
|
"libc 0.2.36 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"libsodium-sys 0.0.16 (registry+https://github.com/rust-lang/crates.io-index)",
|
"libsodium-sys 0.0.16 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
"locale_config 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"log 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
"log 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"murmurhash3 0.0.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
"murmurhash3 0.0.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"pbr 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"pbr 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
@ -622,11 +639,13 @@ dependencies = [
|
||||||
"checksum fuchsia-zircon-sys 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "3dcaa9ae7725d12cdb85b3ad99a434db70b468c09ded17e012d86b5c1010f7a7"
|
"checksum fuchsia-zircon-sys 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "3dcaa9ae7725d12cdb85b3ad99a434db70b468c09ded17e012d86b5c1010f7a7"
|
||||||
"checksum fuse 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "80e57070510966bfef93662a81cb8aa2b1c7db0964354fa9921434f04b9e8660"
|
"checksum fuse 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "80e57070510966bfef93662a81cb8aa2b1c7db0964354fa9921434f04b9e8660"
|
||||||
"checksum kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d"
|
"checksum kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d"
|
||||||
|
"checksum lazy_static 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "76f033c7ad61445c5b347c7382dd1237847eb1bce590fe50365dcb33d546be73"
|
||||||
"checksum lazy_static 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c8f31047daa365f19be14b47c29df4f7c3b581832407daabe6ae77397619237d"
|
"checksum lazy_static 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c8f31047daa365f19be14b47c29df4f7c3b581832407daabe6ae77397619237d"
|
||||||
"checksum libc 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)" = "e32a70cf75e5846d53a673923498228bbec6a8624708a9ea5645f075d6276122"
|
"checksum libc 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)" = "e32a70cf75e5846d53a673923498228bbec6a8624708a9ea5645f075d6276122"
|
||||||
"checksum libc 0.2.36 (registry+https://github.com/rust-lang/crates.io-index)" = "1e5d97d6708edaa407429faa671b942dc0f2727222fb6b6539bf1db936e4b121"
|
"checksum libc 0.2.36 (registry+https://github.com/rust-lang/crates.io-index)" = "1e5d97d6708edaa407429faa671b942dc0f2727222fb6b6539bf1db936e4b121"
|
||||||
"checksum libsodium-sys 0.0.16 (registry+https://github.com/rust-lang/crates.io-index)" = "fcbd1beeed8d44caa8a669ebaa697c313976e242c03cc9fb23d88bf1656f5542"
|
"checksum libsodium-sys 0.0.16 (registry+https://github.com/rust-lang/crates.io-index)" = "fcbd1beeed8d44caa8a669ebaa697c313976e242c03cc9fb23d88bf1656f5542"
|
||||||
"checksum linked-hash-map 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "70fb39025bc7cdd76305867c4eccf2f2dcf6e9a57f5b21a93e1c2d86cd03ec9e"
|
"checksum linked-hash-map 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "70fb39025bc7cdd76305867c4eccf2f2dcf6e9a57f5b21a93e1c2d86cd03ec9e"
|
||||||
|
"checksum locale_config 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "14fbee0e39bc2dd6a2427c4fdea66e9826cc1fd09b0a0b7550359f5f6efe1dab"
|
||||||
"checksum log 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)" = "e19e8d5c34a3e0e2223db8e060f9e8264aeeb5c5fc64a4ee9965c062211c024b"
|
"checksum log 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)" = "e19e8d5c34a3e0e2223db8e060f9e8264aeeb5c5fc64a4ee9965c062211c024b"
|
||||||
"checksum log 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "89f010e843f2b1a31dbd316b3b8d443758bc634bed37aabade59c686d644e0a2"
|
"checksum log 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "89f010e843f2b1a31dbd316b3b8d443758bc634bed37aabade59c686d644e0a2"
|
||||||
"checksum memchr 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "796fba70e76612589ed2ce7f45282f5af869e0fdd7cc6199fa1aa1f1d591ba9d"
|
"checksum memchr 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "796fba70e76612589ed2ce7f45282f5af869e0fdd7cc6199fa1aa1f1d591ba9d"
|
||||||
|
|
|
@ -34,6 +34,7 @@ users = "0.6"
|
||||||
time = "*"
|
time = "*"
|
||||||
libc = "0.2"
|
libc = "0.2"
|
||||||
runtime-fmt = "0.3"
|
runtime-fmt = "0.3"
|
||||||
|
locale_config = "^0.2.2"
|
||||||
index = {path="index"}
|
index = {path="index"}
|
||||||
chunking = {path="chunking"}
|
chunking = {path="chunking"}
|
||||||
|
|
||||||
|
|
|
@ -14,33 +14,33 @@ quick_error!{
|
||||||
pub enum BundleCacheError {
|
pub enum BundleCacheError {
|
||||||
Read(err: io::Error) {
|
Read(err: io::Error) {
|
||||||
cause(err)
|
cause(err)
|
||||||
description("Failed to read bundle cache")
|
description(tr!("Failed to read bundle cache"))
|
||||||
display("Bundle cache error: failed to read bundle cache\n\tcaused by: {}", err)
|
display("{}", tr_format!("Bundle cache error: failed to read bundle cache\n\tcaused by: {}", err))
|
||||||
}
|
}
|
||||||
Write(err: io::Error) {
|
Write(err: io::Error) {
|
||||||
cause(err)
|
cause(err)
|
||||||
description("Failed to write bundle cache")
|
description(tr!("Failed to write bundle cache"))
|
||||||
display("Bundle cache error: failed to write bundle cache\n\tcaused by: {}", err)
|
display("{}", tr_format!("Bundle cache error: failed to write bundle cache\n\tcaused by: {}", err))
|
||||||
}
|
}
|
||||||
WrongHeader {
|
WrongHeader {
|
||||||
description("Wrong header")
|
description(tr!("Wrong header"))
|
||||||
display("Bundle cache error: wrong header on bundle cache")
|
display("{}", tr_format!("Bundle cache error: wrong header on bundle cache"))
|
||||||
}
|
}
|
||||||
UnsupportedVersion(version: u8) {
|
UnsupportedVersion(version: u8) {
|
||||||
description("Wrong version")
|
description(tr!("Wrong version"))
|
||||||
display("Bundle cache error: unsupported version: {}", version)
|
display("{}", tr_format!("Bundle cache error: unsupported version: {}", version))
|
||||||
}
|
}
|
||||||
Decode(err: msgpack::DecodeError) {
|
Decode(err: msgpack::DecodeError) {
|
||||||
from()
|
from()
|
||||||
cause(err)
|
cause(err)
|
||||||
description("Failed to decode bundle cache")
|
description(tr!("Failed to decode bundle cache"))
|
||||||
display("Bundle cache error: failed to decode bundle cache\n\tcaused by: {}", err)
|
display("{}", tr_format!("Bundle cache error: failed to decode bundle cache\n\tcaused by: {}", err))
|
||||||
}
|
}
|
||||||
Encode(err: msgpack::EncodeError) {
|
Encode(err: msgpack::EncodeError) {
|
||||||
from()
|
from()
|
||||||
cause(err)
|
cause(err)
|
||||||
description("Failed to encode bundle cache")
|
description(tr!("Failed to encode bundle cache"))
|
||||||
display("Bundle cache error: failed to encode bundle cache\n\tcaused by: {}", err)
|
display("{}", tr_format!("Bundle cache error: failed to encode bundle cache\n\tcaused by: {}", err))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -14,44 +14,44 @@ quick_error!{
|
||||||
pub enum BundleDbError {
|
pub enum BundleDbError {
|
||||||
ListBundles(err: io::Error) {
|
ListBundles(err: io::Error) {
|
||||||
cause(err)
|
cause(err)
|
||||||
description("Failed to list bundles")
|
description(tr!("Failed to list bundles"))
|
||||||
display("Bundle db error: failed to list bundles\n\tcaused by: {}", err)
|
display("{}", tr_format!("Bundle db error: failed to list bundles\n\tcaused by: {}", err))
|
||||||
}
|
}
|
||||||
Reader(err: BundleReaderError) {
|
Reader(err: BundleReaderError) {
|
||||||
from()
|
from()
|
||||||
cause(err)
|
cause(err)
|
||||||
description("Failed to read bundle")
|
description(tr!("Failed to read bundle"))
|
||||||
display("Bundle db error: failed to read bundle\n\tcaused by: {}", err)
|
display("{}", tr_format!("Bundle db error: failed to read bundle\n\tcaused by: {}", err))
|
||||||
}
|
}
|
||||||
Writer(err: BundleWriterError) {
|
Writer(err: BundleWriterError) {
|
||||||
from()
|
from()
|
||||||
cause(err)
|
cause(err)
|
||||||
description("Failed to write bundle")
|
description(tr!("Failed to write bundle"))
|
||||||
display("Bundle db error: failed to write bundle\n\tcaused by: {}", err)
|
display("{}", tr_format!("Bundle db error: failed to write bundle\n\tcaused by: {}", err))
|
||||||
}
|
}
|
||||||
Cache(err: BundleCacheError) {
|
Cache(err: BundleCacheError) {
|
||||||
from()
|
from()
|
||||||
cause(err)
|
cause(err)
|
||||||
description("Failed to read/write bundle cache")
|
description(tr!("Failed to read/write bundle cache"))
|
||||||
display("Bundle db error: failed to read/write bundle cache\n\tcaused by: {}", err)
|
display("{}", tr_format!("Bundle db error: failed to read/write bundle cache\n\tcaused by: {}", err))
|
||||||
}
|
}
|
||||||
UploadFailed {
|
UploadFailed {
|
||||||
description("Uploading a bundle failed")
|
description(tr!("Uploading a bundle failed"))
|
||||||
}
|
}
|
||||||
Io(err: io::Error, path: PathBuf) {
|
Io(err: io::Error, path: PathBuf) {
|
||||||
cause(err)
|
cause(err)
|
||||||
context(path: &'a Path, err: io::Error) -> (err, path.to_path_buf())
|
context(path: &'a Path, err: io::Error) -> (err, path.to_path_buf())
|
||||||
description("Io error")
|
description(tr!("Io error"))
|
||||||
display("Bundle db error: io error on {:?}\n\tcaused by: {}", path, err)
|
display("{}", tr_format!("Bundle db error: io error on {:?}\n\tcaused by: {}", path, err))
|
||||||
}
|
}
|
||||||
NoSuchBundle(bundle: BundleId) {
|
NoSuchBundle(bundle: BundleId) {
|
||||||
description("No such bundle")
|
description(tr!("No such bundle"))
|
||||||
display("Bundle db error: no such bundle: {:?}", bundle)
|
display("{}", tr_format!("Bundle db error: no such bundle: {:?}", bundle))
|
||||||
}
|
}
|
||||||
Remove(err: io::Error, bundle: BundleId) {
|
Remove(err: io::Error, bundle: BundleId) {
|
||||||
cause(err)
|
cause(err)
|
||||||
description("Failed to remove bundle")
|
description(tr!("Failed to remove bundle"))
|
||||||
display("Bundle db error: failed to remove bundle {}\n\tcaused by: {}", bundle, err)
|
display("{}", tr_format!("Bundle db error: failed to remove bundle {}\n\tcaused by: {}", bundle, err))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -146,14 +146,14 @@ impl BundleDb {
|
||||||
self.local_bundles.insert(bundle.id(), bundle);
|
self.local_bundles.insert(bundle.id(), bundle);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
warn!("Failed to read local bundle cache, rebuilding cache");
|
tr_warn!("Failed to read local bundle cache, rebuilding cache");
|
||||||
}
|
}
|
||||||
if let Ok(list) = StoredBundle::read_list_from(&self.layout.remote_bundle_cache_path()) {
|
if let Ok(list) = StoredBundle::read_list_from(&self.layout.remote_bundle_cache_path()) {
|
||||||
for bundle in list {
|
for bundle in list {
|
||||||
self.remote_bundles.insert(bundle.id(), bundle);
|
self.remote_bundles.insert(bundle.id(), bundle);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
warn!("Failed to read remote bundle cache, rebuilding cache");
|
tr_warn!("Failed to read remote bundle cache, rebuilding cache");
|
||||||
}
|
}
|
||||||
let base_path = self.layout.base_path();
|
let base_path = self.layout.base_path();
|
||||||
let (new, gone) = try!(load_bundles(
|
let (new, gone) = try!(load_bundles(
|
||||||
|
@ -219,7 +219,7 @@ impl BundleDb {
|
||||||
for id in meta_bundles {
|
for id in meta_bundles {
|
||||||
if !self.local_bundles.contains_key(&id) {
|
if !self.local_bundles.contains_key(&id) {
|
||||||
let bundle = self.remote_bundles[&id].clone();
|
let bundle = self.remote_bundles[&id].clone();
|
||||||
debug!("Copying new meta bundle to local cache: {}", bundle.info.id);
|
tr_debug!("Copying new meta bundle to local cache: {}", bundle.info.id);
|
||||||
try!(self.copy_remote_bundle_to_cache(&bundle));
|
try!(self.copy_remote_bundle_to_cache(&bundle));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -407,7 +407,7 @@ impl BundleDb {
|
||||||
pub fn check(&mut self, full: bool, repair: bool) -> Result<bool, BundleDbError> {
|
pub fn check(&mut self, full: bool, repair: bool) -> Result<bool, BundleDbError> {
|
||||||
let mut to_repair = vec![];
|
let mut to_repair = vec![];
|
||||||
for (id, stored) in ProgressIter::new(
|
for (id, stored) in ProgressIter::new(
|
||||||
"checking bundles",
|
tr!("checking bundles"),
|
||||||
self.remote_bundles.len(),
|
self.remote_bundles.len(),
|
||||||
self.remote_bundles.iter()
|
self.remote_bundles.iter()
|
||||||
)
|
)
|
||||||
|
@ -433,7 +433,7 @@ impl BundleDb {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if !to_repair.is_empty() {
|
if !to_repair.is_empty() {
|
||||||
for id in ProgressIter::new("repairing bundles", to_repair.len(), to_repair.iter()) {
|
for id in ProgressIter::new(tr!("repairing bundles"), to_repair.len(), to_repair.iter()) {
|
||||||
try!(self.repair_bundle(id));
|
try!(self.repair_bundle(id));
|
||||||
}
|
}
|
||||||
try!(self.flush());
|
try!(self.flush());
|
||||||
|
@ -460,7 +460,7 @@ impl BundleDb {
|
||||||
let mut bundle = match self.get_bundle(&stored) {
|
let mut bundle = match self.get_bundle(&stored) {
|
||||||
Ok(bundle) => bundle,
|
Ok(bundle) => bundle,
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
warn!(
|
tr_warn!(
|
||||||
"Problem detected: failed to read bundle header: {}\n\tcaused by: {}",
|
"Problem detected: failed to read bundle header: {}\n\tcaused by: {}",
|
||||||
id,
|
id,
|
||||||
err
|
err
|
||||||
|
@ -471,7 +471,7 @@ impl BundleDb {
|
||||||
let chunks = match bundle.get_chunk_list() {
|
let chunks = match bundle.get_chunk_list() {
|
||||||
Ok(chunks) => chunks.clone(),
|
Ok(chunks) => chunks.clone(),
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
warn!(
|
tr_warn!(
|
||||||
"Problem detected: failed to read bundle chunks: {}\n\tcaused by: {}",
|
"Problem detected: failed to read bundle chunks: {}\n\tcaused by: {}",
|
||||||
id,
|
id,
|
||||||
err
|
err
|
||||||
|
@ -482,7 +482,7 @@ impl BundleDb {
|
||||||
let data = match bundle.load_contents() {
|
let data = match bundle.load_contents() {
|
||||||
Ok(data) => data,
|
Ok(data) => data,
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
warn!(
|
tr_warn!(
|
||||||
"Problem detected: failed to read bundle data: {}\n\tcaused by: {}",
|
"Problem detected: failed to read bundle data: {}\n\tcaused by: {}",
|
||||||
id,
|
id,
|
||||||
err
|
err
|
||||||
|
@ -490,8 +490,8 @@ impl BundleDb {
|
||||||
return self.evacuate_broken_bundle(stored);
|
return self.evacuate_broken_bundle(stored);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
warn!("Problem detected: bundle data was truncated: {}", id);
|
tr_warn!("Problem detected: bundle data was truncated: {}", id);
|
||||||
info!("Copying readable data into new bundle");
|
tr_info!("Copying readable data into new bundle");
|
||||||
let info = stored.info.clone();
|
let info = stored.info.clone();
|
||||||
let mut new_bundle = try!(self.create_bundle(
|
let mut new_bundle = try!(self.create_bundle(
|
||||||
info.mode,
|
info.mode,
|
||||||
|
@ -509,7 +509,7 @@ impl BundleDb {
|
||||||
pos += len as usize;
|
pos += len as usize;
|
||||||
}
|
}
|
||||||
let bundle = try!(self.add_bundle(new_bundle));
|
let bundle = try!(self.add_bundle(new_bundle));
|
||||||
info!("New bundle id is {}", bundle.id);
|
tr_info!("New bundle id is {}", bundle.id);
|
||||||
self.evacuate_broken_bundle(stored)
|
self.evacuate_broken_bundle(stored)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -15,42 +15,42 @@ quick_error!{
|
||||||
Read(err: io::Error, path: PathBuf) {
|
Read(err: io::Error, path: PathBuf) {
|
||||||
cause(err)
|
cause(err)
|
||||||
context(path: &'a Path, err: io::Error) -> (err, path.to_path_buf())
|
context(path: &'a Path, err: io::Error) -> (err, path.to_path_buf())
|
||||||
description("Failed to read data from file")
|
description(tr!("Failed to read data from file"))
|
||||||
display("Bundle reader error: failed to read data from file {:?}\n\tcaused by: {}", path, err)
|
display("{}", tr_format!("Bundle reader error: failed to read data from file {:?}\n\tcaused by: {}", path, err))
|
||||||
}
|
}
|
||||||
WrongHeader(path: PathBuf) {
|
WrongHeader(path: PathBuf) {
|
||||||
description("Wrong header")
|
description(tr!("Wrong header"))
|
||||||
display("Bundle reader error: wrong header on bundle {:?}", path)
|
display("{}", tr_format!("Bundle reader error: wrong header on bundle {:?}", path))
|
||||||
}
|
}
|
||||||
UnsupportedVersion(path: PathBuf, version: u8) {
|
UnsupportedVersion(path: PathBuf, version: u8) {
|
||||||
description("Wrong version")
|
description(tr!("Wrong version"))
|
||||||
display("Bundle reader error: unsupported version on bundle {:?}: {}", path, version)
|
display("{}", tr_format!("Bundle reader error: unsupported version on bundle {:?}: {}", path, version))
|
||||||
}
|
}
|
||||||
NoSuchChunk(bundle: BundleId, id: usize) {
|
NoSuchChunk(bundle: BundleId, id: usize) {
|
||||||
description("Bundle has no such chunk")
|
description(tr!("Bundle has no such chunk"))
|
||||||
display("Bundle reader error: bundle {:?} has no chunk with id {}", bundle, id)
|
display("{}", tr_format!("Bundle reader error: bundle {:?} has no chunk with id {}", bundle, id))
|
||||||
}
|
}
|
||||||
Decode(err: msgpack::DecodeError, path: PathBuf) {
|
Decode(err: msgpack::DecodeError, path: PathBuf) {
|
||||||
cause(err)
|
cause(err)
|
||||||
context(path: &'a Path, err: msgpack::DecodeError) -> (err, path.to_path_buf())
|
context(path: &'a Path, err: msgpack::DecodeError) -> (err, path.to_path_buf())
|
||||||
description("Failed to decode bundle header")
|
description(tr!("Failed to decode bundle header"))
|
||||||
display("Bundle reader error: failed to decode bundle header of {:?}\n\tcaused by: {}", path, err)
|
display("{}", tr_format!("Bundle reader error: failed to decode bundle header of {:?}\n\tcaused by: {}", path, err))
|
||||||
}
|
}
|
||||||
Decompression(err: CompressionError, path: PathBuf) {
|
Decompression(err: CompressionError, path: PathBuf) {
|
||||||
cause(err)
|
cause(err)
|
||||||
context(path: &'a Path, err: CompressionError) -> (err, path.to_path_buf())
|
context(path: &'a Path, err: CompressionError) -> (err, path.to_path_buf())
|
||||||
description("Decompression failed")
|
description(tr!("Decompression failed"))
|
||||||
display("Bundle reader error: decompression failed on bundle {:?}\n\tcaused by: {}", path, err)
|
display("{}", tr_format!("Bundle reader error: decompression failed on bundle {:?}\n\tcaused by: {}", path, err))
|
||||||
}
|
}
|
||||||
Decryption(err: EncryptionError, path: PathBuf) {
|
Decryption(err: EncryptionError, path: PathBuf) {
|
||||||
cause(err)
|
cause(err)
|
||||||
context(path: &'a Path, err: EncryptionError) -> (err, path.to_path_buf())
|
context(path: &'a Path, err: EncryptionError) -> (err, path.to_path_buf())
|
||||||
description("Decryption failed")
|
description(tr!("Decryption failed"))
|
||||||
display("Bundle reader error: decryption failed on bundle {:?}\n\tcaused by: {}", path, err)
|
display("{}", tr_format!("Bundle reader error: decryption failed on bundle {:?}\n\tcaused by: {}", path, err))
|
||||||
}
|
}
|
||||||
Integrity(bundle: BundleId, reason: &'static str) {
|
Integrity(bundle: BundleId, reason: &'static str) {
|
||||||
description("Bundle has an integrity error")
|
description(tr!("Bundle has an integrity error"))
|
||||||
display("Bundle reader error: bundle {:?} has an integrity error: {}", bundle, reason)
|
display("{}", tr_format!("Bundle reader error: bundle {:?} has an integrity error: {}", bundle, reason))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -151,7 +151,7 @@ impl BundleReader {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn load_chunklist(&mut self) -> Result<(), BundleReaderError> {
|
fn load_chunklist(&mut self) -> Result<(), BundleReaderError> {
|
||||||
debug!(
|
tr_debug!(
|
||||||
"Load bundle chunklist {} ({:?})",
|
"Load bundle chunklist {} ({:?})",
|
||||||
self.info.id,
|
self.info.id,
|
||||||
self.info.mode
|
self.info.mode
|
||||||
|
@ -197,7 +197,7 @@ impl BundleReader {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn load_encoded_contents(&self) -> Result<Vec<u8>, BundleReaderError> {
|
fn load_encoded_contents(&self) -> Result<Vec<u8>, BundleReaderError> {
|
||||||
debug!("Load bundle data {} ({:?})", self.info.id, self.info.mode);
|
tr_debug!("Load bundle data {} ({:?})", self.info.id, self.info.mode);
|
||||||
let mut file = BufReader::new(try!(File::open(&self.path).context(&self.path as &Path)));
|
let mut file = BufReader::new(try!(File::open(&self.path).context(&self.path as &Path)));
|
||||||
try!(
|
try!(
|
||||||
file.seek(SeekFrom::Start(self.content_start as u64))
|
file.seek(SeekFrom::Start(self.content_start as u64))
|
||||||
|
@ -256,7 +256,7 @@ impl BundleReader {
|
||||||
if self.info.chunk_count != self.chunks.as_ref().unwrap().len() {
|
if self.info.chunk_count != self.chunks.as_ref().unwrap().len() {
|
||||||
return Err(BundleReaderError::Integrity(
|
return Err(BundleReaderError::Integrity(
|
||||||
self.id(),
|
self.id(),
|
||||||
"Chunk list size does not match chunk count"
|
tr!("Chunk list size does not match chunk count")
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
if self.chunks
|
if self.chunks
|
||||||
|
@ -268,7 +268,7 @@ impl BundleReader {
|
||||||
{
|
{
|
||||||
return Err(BundleReaderError::Integrity(
|
return Err(BundleReaderError::Integrity(
|
||||||
self.id(),
|
self.id(),
|
||||||
"Individual chunk sizes do not add up to total size"
|
tr!("Individual chunk sizes do not add up to total size")
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
if !full {
|
if !full {
|
||||||
|
@ -276,7 +276,7 @@ impl BundleReader {
|
||||||
if size as usize != self.info.encoded_size + self.content_start {
|
if size as usize != self.info.encoded_size + self.content_start {
|
||||||
return Err(BundleReaderError::Integrity(
|
return Err(BundleReaderError::Integrity(
|
||||||
self.id(),
|
self.id(),
|
||||||
"File size does not match size in header, truncated file"
|
tr!("File size does not match size in header, truncated file")
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
return Ok(());
|
return Ok(());
|
||||||
|
@ -285,14 +285,14 @@ impl BundleReader {
|
||||||
if self.info.encoded_size != encoded_contents.len() {
|
if self.info.encoded_size != encoded_contents.len() {
|
||||||
return Err(BundleReaderError::Integrity(
|
return Err(BundleReaderError::Integrity(
|
||||||
self.id(),
|
self.id(),
|
||||||
"Encoded data size does not match size in header, truncated bundle"
|
tr!("Encoded data size does not match size in header, truncated bundle")
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
let contents = try!(self.decode_contents(encoded_contents));
|
let contents = try!(self.decode_contents(encoded_contents));
|
||||||
if self.info.raw_size != contents.len() {
|
if self.info.raw_size != contents.len() {
|
||||||
return Err(BundleReaderError::Integrity(
|
return Err(BundleReaderError::Integrity(
|
||||||
self.id(),
|
self.id(),
|
||||||
"Raw data size does not match size in header, truncated bundle"
|
tr!("Raw data size does not match size in header, truncated bundle")
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
//TODO: verify checksum
|
//TODO: verify checksum
|
||||||
|
@ -302,15 +302,14 @@ impl BundleReader {
|
||||||
|
|
||||||
impl Debug for BundleReader {
|
impl Debug for BundleReader {
|
||||||
fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error> {
|
fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error> {
|
||||||
write!(
|
write!(fmt, "{}",
|
||||||
fmt,
|
tr_format!("Bundle(\n\tid: {}\n\tpath: {:?}\n\tchunks: {}\n\tsize: {}, encoded: {}\n\tcompression: {:?}\n)",
|
||||||
"Bundle(\n\tid: {}\n\tpath: {:?}\n\tchunks: {}\n\tsize: {}, encoded: {}\n\tcompression: {:?}\n)",
|
|
||||||
self.info.id.to_string(),
|
self.info.id.to_string(),
|
||||||
self.path,
|
self.path,
|
||||||
self.info.chunk_count,
|
self.info.chunk_count,
|
||||||
self.info.raw_size,
|
self.info.raw_size,
|
||||||
self.info.encoded_size,
|
self.info.encoded_size,
|
||||||
self.info.compression
|
self.info.compression
|
||||||
)
|
))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -51,10 +51,10 @@ impl BundleUploader {
|
||||||
|
|
||||||
pub fn queue(&self, local_path: PathBuf, remote_path: PathBuf) -> Result<(), BundleDbError> {
|
pub fn queue(&self, local_path: PathBuf, remote_path: PathBuf) -> Result<(), BundleDbError> {
|
||||||
while self.waiting.load(Ordering::SeqCst) >= self.capacity {
|
while self.waiting.load(Ordering::SeqCst) >= self.capacity {
|
||||||
debug!("Upload queue is full, waiting for slots");
|
tr_debug!("Upload queue is full, waiting for slots");
|
||||||
let _ = self.wait.0.wait(self.wait.1.lock().unwrap()).unwrap();
|
let _ = self.wait.0.wait(self.wait.1.lock().unwrap()).unwrap();
|
||||||
}
|
}
|
||||||
trace!("Adding to upload queue: {:?}", local_path);
|
tr_trace!("Adding to upload queue: {:?}", local_path);
|
||||||
if !self.error_present.load(Ordering::SeqCst) {
|
if !self.error_present.load(Ordering::SeqCst) {
|
||||||
self.waiting.fetch_add(1, Ordering::SeqCst);
|
self.waiting.fetch_add(1, Ordering::SeqCst);
|
||||||
self.queue.push(Some((local_path, remote_path)));
|
self.queue.push(Some((local_path, remote_path)));
|
||||||
|
@ -75,21 +75,21 @@ impl BundleUploader {
|
||||||
|
|
||||||
fn worker_thread_inner(&self) -> Result<(), BundleDbError> {
|
fn worker_thread_inner(&self) -> Result<(), BundleDbError> {
|
||||||
while let Some((src_path, dst_path)) = self.queue.pop() {
|
while let Some((src_path, dst_path)) = self.queue.pop() {
|
||||||
trace!("Uploading {:?} to {:?}", src_path, dst_path);
|
tr_trace!("Uploading {:?} to {:?}", src_path, dst_path);
|
||||||
self.waiting.fetch_sub(1, Ordering::SeqCst);
|
self.waiting.fetch_sub(1, Ordering::SeqCst);
|
||||||
self.wait.0.notify_all();
|
self.wait.0.notify_all();
|
||||||
let folder = dst_path.parent().unwrap();
|
let folder = dst_path.parent().unwrap();
|
||||||
try!(fs::create_dir_all(&folder).context(folder as &Path));
|
try!(fs::create_dir_all(&folder).context(folder as &Path));
|
||||||
try!(fs::copy(&src_path, &dst_path).context(&dst_path as &Path));
|
try!(fs::copy(&src_path, &dst_path).context(&dst_path as &Path));
|
||||||
try!(fs::remove_file(&src_path).context(&src_path as &Path));
|
try!(fs::remove_file(&src_path).context(&src_path as &Path));
|
||||||
debug!("Uploaded {:?} to {:?}", src_path, dst_path);
|
tr_debug!("Uploaded {:?} to {:?}", src_path, dst_path);
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn worker_thread(&self) {
|
fn worker_thread(&self) {
|
||||||
if let Err(err) = self.worker_thread_inner() {
|
if let Err(err) = self.worker_thread_inner() {
|
||||||
debug!("Upload thread failed with error: {}", err);
|
tr_debug!("Upload thread failed with error: {}", err);
|
||||||
*self.error.lock().unwrap() = Some(err);
|
*self.error.lock().unwrap() = Some(err);
|
||||||
self.error_present.store(true, Ordering::SeqCst);
|
self.error_present.store(true, Ordering::SeqCst);
|
||||||
}
|
}
|
||||||
|
|
|
@ -14,31 +14,31 @@ quick_error!{
|
||||||
pub enum BundleWriterError {
|
pub enum BundleWriterError {
|
||||||
CompressionSetup(err: CompressionError) {
|
CompressionSetup(err: CompressionError) {
|
||||||
cause(err)
|
cause(err)
|
||||||
description("Failed to setup compression")
|
description(tr!("Failed to setup compression"))
|
||||||
display("Bundle writer error: failed to setup compression\n\tcaused by: {}", err)
|
display("{}", tr_format!("Bundle writer error: failed to setup compression\n\tcaused by: {}", err))
|
||||||
}
|
}
|
||||||
Compression(err: CompressionError) {
|
Compression(err: CompressionError) {
|
||||||
cause(err)
|
cause(err)
|
||||||
description("Failed to compress data")
|
description(tr!("Failed to compress data"))
|
||||||
display("Bundle writer error: failed to compress data\n\tcaused by: {}", err)
|
display("{}", tr_format!("Bundle writer error: failed to compress data\n\tcaused by: {}", err))
|
||||||
}
|
}
|
||||||
Encryption(err: EncryptionError) {
|
Encryption(err: EncryptionError) {
|
||||||
from()
|
from()
|
||||||
cause(err)
|
cause(err)
|
||||||
description("Encryption failed")
|
description(tr!("Encryption failed"))
|
||||||
display("Bundle writer error: failed to encrypt data\n\tcaused by: {}", err)
|
display("{}", tr_format!("Bundle writer error: failed to encrypt data\n\tcaused by: {}", err))
|
||||||
}
|
}
|
||||||
Encode(err: msgpack::EncodeError, path: PathBuf) {
|
Encode(err: msgpack::EncodeError, path: PathBuf) {
|
||||||
cause(err)
|
cause(err)
|
||||||
context(path: &'a Path, err: msgpack::EncodeError) -> (err, path.to_path_buf())
|
context(path: &'a Path, err: msgpack::EncodeError) -> (err, path.to_path_buf())
|
||||||
description("Failed to encode bundle header to file")
|
description(tr!("Failed to encode bundle header to file"))
|
||||||
display("Bundle writer error: failed to encode bundle header to file {:?}\n\tcaused by: {}", path, err)
|
display("{}", tr_format!("Bundle writer error: failed to encode bundle header to file {:?}\n\tcaused by: {}", path, err))
|
||||||
}
|
}
|
||||||
Write(err: io::Error, path: PathBuf) {
|
Write(err: io::Error, path: PathBuf) {
|
||||||
cause(err)
|
cause(err)
|
||||||
context(path: &'a Path, err: io::Error) -> (err, path.to_path_buf())
|
context(path: &'a Path, err: io::Error) -> (err, path.to_path_buf())
|
||||||
description("Failed to write data to file")
|
description(tr!("Failed to write data to file"))
|
||||||
display("Bundle writer error: failed to write data to file {:?}\n\tcaused by: {}", path, err)
|
display("{}", tr_format!("Bundle writer error: failed to write data to file {:?}\n\tcaused by: {}", path, err))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -25,14 +25,14 @@ impl ChunkerType {
|
||||||
"rabin" => Ok(ChunkerType::Rabin((avg_size, seed as u32))),
|
"rabin" => Ok(ChunkerType::Rabin((avg_size, seed as u32))),
|
||||||
"fastcdc" => Ok(ChunkerType::FastCdc((avg_size, seed))),
|
"fastcdc" => Ok(ChunkerType::FastCdc((avg_size, seed))),
|
||||||
"fixed" => Ok(ChunkerType::Fixed(avg_size)),
|
"fixed" => Ok(ChunkerType::Fixed(avg_size)),
|
||||||
_ => Err("Unsupported chunker type"),
|
_ => Err(tr!("Unsupported chunker type")),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn from_string(name: &str) -> Result<Self, &'static str> {
|
pub fn from_string(name: &str) -> Result<Self, &'static str> {
|
||||||
let (name, size) = if let Some(pos) = name.find('/') {
|
let (name, size) = if let Some(pos) = name.find('/') {
|
||||||
let size = try!(usize::from_str(&name[pos + 1..]).map_err(
|
let size = try!(usize::from_str(&name[pos + 1..]).map_err(
|
||||||
|_| "Chunk size must be a number"
|
|_| tr!("Chunk size must be a number")
|
||||||
));
|
));
|
||||||
let name = &name[..pos];
|
let name = &name[..pos];
|
||||||
(name, size)
|
(name, size)
|
||||||
|
|
|
@ -52,7 +52,7 @@ pub fn run(
|
||||||
let mut total_write_time = 0.0;
|
let mut total_write_time = 0.0;
|
||||||
let mut total_read_time = 0.0;
|
let mut total_read_time = 0.0;
|
||||||
|
|
||||||
println!("Reading input file ...");
|
tr_println!("Reading input file ...");
|
||||||
let mut file = File::open(path).unwrap();
|
let mut file = File::open(path).unwrap();
|
||||||
let total_size = file.metadata().unwrap().len();
|
let total_size = file.metadata().unwrap().len();
|
||||||
let mut size = total_size;
|
let mut size = total_size;
|
||||||
|
@ -67,7 +67,7 @@ pub fn run(
|
||||||
|
|
||||||
println!();
|
println!();
|
||||||
|
|
||||||
println!(
|
tr_println!(
|
||||||
"Chunking data with {}, avg chunk size {} ...",
|
"Chunking data with {}, avg chunk size {} ...",
|
||||||
chunker.name(),
|
chunker.name(),
|
||||||
to_file_size(chunker.avg_size() as u64)
|
to_file_size(chunker.avg_size() as u64)
|
||||||
|
@ -95,7 +95,7 @@ pub fn run(
|
||||||
.sum::<f32>() /
|
.sum::<f32>() /
|
||||||
(chunks.len() as f32 - 1.0))
|
(chunks.len() as f32 - 1.0))
|
||||||
.sqrt();
|
.sqrt();
|
||||||
println!(
|
tr_println!(
|
||||||
"- {} chunks, avg size: {} ±{}",
|
"- {} chunks, avg size: {} ±{}",
|
||||||
chunks.len(),
|
chunks.len(),
|
||||||
to_file_size(chunk_size_avg as u64),
|
to_file_size(chunk_size_avg as u64),
|
||||||
|
@ -104,7 +104,7 @@ pub fn run(
|
||||||
|
|
||||||
println!();
|
println!();
|
||||||
|
|
||||||
println!("Hashing chunks with {} ...", hash.name());
|
tr_println!("Hashing chunks with {} ...", hash.name());
|
||||||
let mut hashes = Vec::with_capacity(chunks.len());
|
let mut hashes = Vec::with_capacity(chunks.len());
|
||||||
let hash_time = Duration::span(|| for &(pos, len) in &chunks {
|
let hash_time = Duration::span(|| for &(pos, len) in &chunks {
|
||||||
hashes.push(hash.hash(&data[pos..pos + len]))
|
hashes.push(hash.hash(&data[pos..pos + len]))
|
||||||
|
@ -128,7 +128,7 @@ pub fn run(
|
||||||
let (_, len) = chunks.remove(*i);
|
let (_, len) = chunks.remove(*i);
|
||||||
dup_size += len;
|
dup_size += len;
|
||||||
}
|
}
|
||||||
println!(
|
tr_println!(
|
||||||
"- {} duplicate chunks, {}, {:.1}% saved by internal deduplication",
|
"- {} duplicate chunks, {}, {:.1}% saved by internal deduplication",
|
||||||
dups.len(),
|
dups.len(),
|
||||||
to_file_size(dup_size as u64),
|
to_file_size(dup_size as u64),
|
||||||
|
@ -141,7 +141,7 @@ pub fn run(
|
||||||
if let Some(compression) = compression.clone() {
|
if let Some(compression) = compression.clone() {
|
||||||
println!();
|
println!();
|
||||||
|
|
||||||
println!("Compressing chunks with {} ...", compression.to_string());
|
tr_println!("Compressing chunks with {} ...", compression.to_string());
|
||||||
let compress_time = Duration::span(|| {
|
let compress_time = Duration::span(|| {
|
||||||
let mut bundle = Vec::with_capacity(bundle_size + 2 * chunk_size_avg as usize);
|
let mut bundle = Vec::with_capacity(bundle_size + 2 * chunk_size_avg as usize);
|
||||||
let mut c = compression.compress_stream().unwrap();
|
let mut c = compression.compress_stream().unwrap();
|
||||||
|
@ -164,7 +164,7 @@ pub fn run(
|
||||||
to_speed(size, compress_time)
|
to_speed(size, compress_time)
|
||||||
);
|
);
|
||||||
let compressed_size = bundles.iter().map(|b| b.len()).sum::<usize>();
|
let compressed_size = bundles.iter().map(|b| b.len()).sum::<usize>();
|
||||||
println!(
|
tr_println!(
|
||||||
"- {} bundles, {}, {:.1}% saved",
|
"- {} bundles, {}, {:.1}% saved",
|
||||||
bundles.len(),
|
bundles.len(),
|
||||||
to_file_size(compressed_size as u64),
|
to_file_size(compressed_size as u64),
|
||||||
|
@ -191,7 +191,7 @@ pub fn run(
|
||||||
crypto.add_secret_key(public, secret);
|
crypto.add_secret_key(public, secret);
|
||||||
let encryption = (EncryptionMethod::Sodium, public[..].to_vec().into());
|
let encryption = (EncryptionMethod::Sodium, public[..].to_vec().into());
|
||||||
|
|
||||||
println!("Encrypting bundles...");
|
tr_println!("Encrypting bundles...");
|
||||||
let mut encrypted_bundles = Vec::with_capacity(bundles.len());
|
let mut encrypted_bundles = Vec::with_capacity(bundles.len());
|
||||||
|
|
||||||
let encrypt_time = Duration::span(|| for bundle in bundles {
|
let encrypt_time = Duration::span(|| for bundle in bundles {
|
||||||
|
@ -206,7 +206,7 @@ pub fn run(
|
||||||
|
|
||||||
println!();
|
println!();
|
||||||
|
|
||||||
println!("Decrypting bundles...");
|
tr_println!("Decrypting bundles...");
|
||||||
bundles = Vec::with_capacity(encrypted_bundles.len());
|
bundles = Vec::with_capacity(encrypted_bundles.len());
|
||||||
let decrypt_time = Duration::span(|| for bundle in encrypted_bundles {
|
let decrypt_time = Duration::span(|| for bundle in encrypted_bundles {
|
||||||
bundles.push(crypto.decrypt(&encryption, &bundle).unwrap());
|
bundles.push(crypto.decrypt(&encryption, &bundle).unwrap());
|
||||||
|
@ -222,7 +222,7 @@ pub fn run(
|
||||||
if let Some(compression) = compression {
|
if let Some(compression) = compression {
|
||||||
println!();
|
println!();
|
||||||
|
|
||||||
println!("Decompressing bundles with {} ...", compression.to_string());
|
tr_println!("Decompressing bundles with {} ...", compression.to_string());
|
||||||
let mut dummy = ChunkSink {
|
let mut dummy = ChunkSink {
|
||||||
chunks: vec![],
|
chunks: vec![],
|
||||||
written: 0,
|
written: 0,
|
||||||
|
@ -243,17 +243,17 @@ pub fn run(
|
||||||
|
|
||||||
println!();
|
println!();
|
||||||
|
|
||||||
println!(
|
tr_println!(
|
||||||
"Total storage size: {} / {}, ratio: {:.1}%",
|
"Total storage size: {} / {}, ratio: {:.1}%",
|
||||||
to_file_size(size as u64),
|
to_file_size(size as u64),
|
||||||
to_file_size(total_size as u64),
|
to_file_size(total_size as u64),
|
||||||
size as f32 / total_size as f32 * 100.0
|
size as f32 / total_size as f32 * 100.0
|
||||||
);
|
);
|
||||||
println!(
|
tr_println!(
|
||||||
"Total processing speed: {}",
|
"Total processing speed: {}",
|
||||||
to_speed(total_size, total_write_time)
|
to_speed(total_size, total_write_time)
|
||||||
);
|
);
|
||||||
println!(
|
tr_println!(
|
||||||
"Total read speed: {}",
|
"Total read speed: {}",
|
||||||
to_speed(total_size, total_read_time)
|
to_speed(total_size, total_read_time)
|
||||||
);
|
);
|
||||||
|
|
433
src/cli/args.rs
433
src/cli/args.rs
|
@ -157,10 +157,10 @@ fn parse_repo_path(
|
||||||
let mut parts = repo_path.splitn(3, "::");
|
let mut parts = repo_path.splitn(3, "::");
|
||||||
let repo = convert_repo_path(parts.next().unwrap_or(""));
|
let repo = convert_repo_path(parts.next().unwrap_or(""));
|
||||||
if existing && !repo.join("config.yaml").exists() {
|
if existing && !repo.join("config.yaml").exists() {
|
||||||
return Err("The specified repository does not exist".to_string());
|
return Err(tr!("The specified repository does not exist").to_string());
|
||||||
}
|
}
|
||||||
if !existing && repo.exists() {
|
if !existing && repo.exists() {
|
||||||
return Err("The specified repository already exists".to_string());
|
return Err(tr!("The specified repository already exists").to_string());
|
||||||
}
|
}
|
||||||
let mut backup = parts.next();
|
let mut backup = parts.next();
|
||||||
if let Some(val) = backup {
|
if let Some(val) = backup {
|
||||||
|
@ -176,18 +176,18 @@ fn parse_repo_path(
|
||||||
}
|
}
|
||||||
if let Some(restr) = backup_restr {
|
if let Some(restr) = backup_restr {
|
||||||
if !restr && backup.is_some() {
|
if !restr && backup.is_some() {
|
||||||
return Err("No backup may be given here".to_string());
|
return Err(tr!("No backup may be given here").to_string());
|
||||||
}
|
}
|
||||||
if restr && backup.is_none() {
|
if restr && backup.is_none() {
|
||||||
return Err("A backup must be specified".to_string());
|
return Err(tr!("A backup must be specified").to_string());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if let Some(restr) = path_restr {
|
if let Some(restr) = path_restr {
|
||||||
if !restr && path.is_some() {
|
if !restr && path.is_some() {
|
||||||
return Err("No subpath may be given here".to_string());
|
return Err(tr!("No subpath may be given here").to_string());
|
||||||
}
|
}
|
||||||
if restr && path.is_none() {
|
if restr && path.is_none() {
|
||||||
return Err("A subpath must be specified".to_string());
|
return Err(tr!("A subpath must be specified").to_string());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Ok((repo, backup, path))
|
Ok((repo, backup, path))
|
||||||
|
@ -207,7 +207,7 @@ fn parse_num(num: &str) -> Result<u64, String> {
|
||||||
if let Ok(num) = num.parse::<u64>() {
|
if let Ok(num) = num.parse::<u64>() {
|
||||||
Ok(num)
|
Ok(num)
|
||||||
} else {
|
} else {
|
||||||
Err("Must be a number".to_string())
|
Err(tr!("Must be a number").to_string())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -220,7 +220,7 @@ fn parse_chunker(val: &str) -> Result<ChunkerType, String> {
|
||||||
if let Ok(chunker) = ChunkerType::from_string(val) {
|
if let Ok(chunker) = ChunkerType::from_string(val) {
|
||||||
Ok(chunker)
|
Ok(chunker)
|
||||||
} else {
|
} else {
|
||||||
Err("Invalid chunker method/size".to_string())
|
Err(tr!("Invalid chunker method/size").to_string())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -236,7 +236,7 @@ fn parse_compression(val: &str) -> Result<Option<Compression>, String> {
|
||||||
if let Ok(compression) = Compression::from_string(val) {
|
if let Ok(compression) = Compression::from_string(val) {
|
||||||
Ok(Some(compression))
|
Ok(Some(compression))
|
||||||
} else {
|
} else {
|
||||||
Err("Invalid compression method/level".to_string())
|
Err(tr!("Invalid compression method/level").to_string())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -252,13 +252,13 @@ fn parse_public_key(val: &str) -> Result<Option<PublicKey>, String> {
|
||||||
let bytes = match parse_hex(val) {
|
let bytes = match parse_hex(val) {
|
||||||
Ok(bytes) => bytes,
|
Ok(bytes) => bytes,
|
||||||
Err(_) => {
|
Err(_) => {
|
||||||
return Err("Invalid hexadecimal".to_string());
|
return Err(tr!("Invalid hexadecimal").to_string());
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
if let Some(key) = PublicKey::from_slice(&bytes) {
|
if let Some(key) = PublicKey::from_slice(&bytes) {
|
||||||
Ok(Some(key))
|
Ok(Some(key))
|
||||||
} else {
|
} else {
|
||||||
return Err("Invalid key".to_string());
|
return Err(tr!("Invalid key").to_string());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -271,7 +271,7 @@ fn parse_hash(val: &str) -> Result<HashMethod, String> {
|
||||||
if let Ok(hash) = HashMethod::from(val) {
|
if let Ok(hash) = HashMethod::from(val) {
|
||||||
Ok(hash)
|
Ok(hash)
|
||||||
} else {
|
} else {
|
||||||
Err("Invalid hash method".to_string())
|
Err(tr!("Invalid hash method").to_string())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -284,7 +284,7 @@ fn parse_bundle_id(val: &str) -> Result<BundleId, ErrorCode> {
|
||||||
if let Ok(hash) = Hash::from_string(val) {
|
if let Ok(hash) = Hash::from_string(val) {
|
||||||
Ok(BundleId(hash))
|
Ok(BundleId(hash))
|
||||||
} else {
|
} else {
|
||||||
error!("Invalid bundle id: {}", val);
|
tr_error!("Invalid bundle id: {}", val);
|
||||||
Err(ErrorCode::InvalidArgs)
|
Err(ErrorCode::InvalidArgs)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -292,7 +292,7 @@ fn parse_bundle_id(val: &str) -> Result<BundleId, ErrorCode> {
|
||||||
#[allow(unknown_lints, needless_pass_by_value)]
|
#[allow(unknown_lints, needless_pass_by_value)]
|
||||||
fn validate_existing_path(val: String) -> Result<(), String> {
|
fn validate_existing_path(val: String) -> Result<(), String> {
|
||||||
if !Path::new(&val).exists() {
|
if !Path::new(&val).exists() {
|
||||||
Err("Path does not exist".to_string())
|
Err(tr!("Path does not exist").to_string())
|
||||||
} else {
|
} else {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -301,7 +301,7 @@ fn validate_existing_path(val: String) -> Result<(), String> {
|
||||||
#[allow(unknown_lints, needless_pass_by_value)]
|
#[allow(unknown_lints, needless_pass_by_value)]
|
||||||
fn validate_existing_path_or_stdio(val: String) -> Result<(), String> {
|
fn validate_existing_path_or_stdio(val: String) -> Result<(), String> {
|
||||||
if val != "-" && !Path::new(&val).exists() {
|
if val != "-" && !Path::new(&val).exists() {
|
||||||
Err("Path does not exist".to_string())
|
Err(tr!("Path does not exist").to_string())
|
||||||
} else {
|
} else {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -310,153 +310,274 @@ fn validate_existing_path_or_stdio(val: String) -> Result<(), String> {
|
||||||
|
|
||||||
#[allow(unknown_lints, cyclomatic_complexity)]
|
#[allow(unknown_lints, cyclomatic_complexity)]
|
||||||
pub fn parse() -> Result<(log::Level, Arguments), ErrorCode> {
|
pub fn parse() -> Result<(log::Level, Arguments), ErrorCode> {
|
||||||
let args = App::new("zvault").version(crate_version!()).author(crate_authors!(",\n")).about(crate_description!())
|
let args = App::new("zvault")
|
||||||
|
.version(crate_version!())
|
||||||
|
.author(crate_authors!(",\n"))
|
||||||
|
.about(crate_description!())
|
||||||
.settings(&[AppSettings::VersionlessSubcommands, AppSettings::SubcommandRequiredElseHelp])
|
.settings(&[AppSettings::VersionlessSubcommands, AppSettings::SubcommandRequiredElseHelp])
|
||||||
.global_settings(&[AppSettings::AllowMissingPositional, AppSettings::UnifiedHelpMessage, AppSettings::ColoredHelp, AppSettings::ColorAuto])
|
.global_settings(&[AppSettings::AllowMissingPositional, AppSettings::UnifiedHelpMessage, AppSettings::ColoredHelp, AppSettings::ColorAuto])
|
||||||
.arg(Arg::from_usage("-v --verbose 'Print more information'").global(true).multiple(true).max_values(3).takes_value(false))
|
.arg(Arg::from_usage("-v --verbose")
|
||||||
.arg(Arg::from_usage("-q --quiet 'Print less information'").global(true).conflicts_with("verbose"))
|
.help(tr!("Print more information"))
|
||||||
.subcommand(SubCommand::with_name("init").about("Initialize a new repository")
|
.global(true)
|
||||||
.arg(Arg::from_usage("[bundle_size] --bundle-size [SIZE] 'Set the target bundle size in MiB'")
|
.multiple(true)
|
||||||
.default_value(DEFAULT_BUNDLE_SIZE_STR).validator(validate_num))
|
.max_values(3)
|
||||||
.arg(Arg::from_usage("--chunker [CHUNKER] 'Set the chunker algorithm and target chunk size'")
|
.takes_value(false))
|
||||||
.default_value(DEFAULT_CHUNKER).validator(validate_chunker))
|
.arg(Arg::from_usage("-q --quiet")
|
||||||
.arg(Arg::from_usage("-c --compression [COMPRESSION] 'Set the compression method and level'")
|
.help(tr!("Print less information"))
|
||||||
.default_value(DEFAULT_COMPRESSION).validator(validate_compression))
|
.global(true)
|
||||||
.arg(Arg::from_usage("-e --encrypt 'Generate a keypair and enable encryption'"))
|
.conflicts_with("verbose"))
|
||||||
.arg(Arg::from_usage("--hash [HASH] 'Set the hash method'")
|
.subcommand(SubCommand::with_name("init")
|
||||||
.default_value(DEFAULT_HASH).validator(validate_hash))
|
.about(tr!("Initialize a new repository"))
|
||||||
.arg(Arg::from_usage("-r --remote <REMOTE> 'Set the path to the mounted remote storage'")
|
.arg(Arg::from_usage("[bundle_size] --bundle-size [SIZE]")
|
||||||
.validator(validate_existing_path))
|
.help(tr!("Set the target bundle size in MiB"))
|
||||||
.arg(Arg::from_usage("<REPO> 'The path for the new repository'")
|
.default_value(DEFAULT_BUNDLE_SIZE_STR)
|
||||||
.validator(|val| validate_repo_path(val, false, Some(false), Some(false)))))
|
|
||||||
.subcommand(SubCommand::with_name("backup").about("Create a new backup")
|
|
||||||
.arg(Arg::from_usage("--full 'Create a full backup without using a reference'"))
|
|
||||||
.arg(Arg::from_usage("[reference] --ref [REF] 'Base the new backup on this reference'")
|
|
||||||
.conflicts_with("full"))
|
|
||||||
.arg(Arg::from_usage("[cross_device] -x --xdev 'Allow to cross filesystem boundaries'"))
|
|
||||||
.arg(Arg::from_usage("-e --exclude [PATTERN]... 'Exclude this path or file pattern'"))
|
|
||||||
.arg(Arg::from_usage("[excludes_from] --excludes-from [FILE] 'Read the list of excludes from this file'"))
|
|
||||||
.arg(Arg::from_usage("[no_default_excludes] --no-default-excludes 'Do not load the default excludes file'"))
|
|
||||||
.arg(Arg::from_usage("--tar 'Read the source data from a tar file'")
|
|
||||||
.conflicts_with_all(&["reference", "exclude", "excludes_from"]))
|
|
||||||
.arg(Arg::from_usage("<SRC> 'Source path to backup'")
|
|
||||||
.validator(validate_existing_path_or_stdio))
|
|
||||||
.arg(Arg::from_usage("<BACKUP> 'Backup path, [repository]::backup'")
|
|
||||||
.validator(|val| validate_repo_path(val, true, Some(true), Some(false)))))
|
|
||||||
.subcommand(SubCommand::with_name("restore").about("Restore a backup or subtree")
|
|
||||||
.arg(Arg::from_usage("--tar 'Restore in form of a tar file'"))
|
|
||||||
.arg(Arg::from_usage("<BACKUP> 'The backup/subtree path, [repository]::backup[::subtree]'")
|
|
||||||
.validator(|val| validate_repo_path(val, true, Some(true), None)))
|
|
||||||
.arg(Arg::from_usage("<DST> 'Destination path for backup'")))
|
|
||||||
.subcommand(SubCommand::with_name("remove").aliases(&["rm", "delete", "del"]).about("Remove a backup or a subtree")
|
|
||||||
.arg(Arg::from_usage("-f --force 'Remove multiple backups in a backup folder'"))
|
|
||||||
.arg(Arg::from_usage("<BACKUP> 'The backup/subtree path, [repository]::backup[::subtree]'")
|
|
||||||
.validator(|val| validate_repo_path(val, true, Some(true), None))))
|
|
||||||
.subcommand(SubCommand::with_name("prune").about("Remove backups based on age")
|
|
||||||
.arg(Arg::from_usage("-p --prefix [PREFIX] 'Only consider backups starting with this prefix'"))
|
|
||||||
.arg(Arg::from_usage("-d --daily [NUM] 'Keep this number of daily backups'")
|
|
||||||
.default_value("0").validator(validate_num))
|
|
||||||
.arg(Arg::from_usage("-w --weekly [NUM] 'Keep this number of weekly backups'")
|
|
||||||
.default_value("0").validator(validate_num))
|
|
||||||
.arg(Arg::from_usage("-m --monthly [NUM] 'Keep this number of monthly backups'")
|
|
||||||
.default_value("0").validator(validate_num))
|
|
||||||
.arg(Arg::from_usage("-y --yearly [NUM] 'Keep this number of yearly backups'")
|
|
||||||
.default_value("0").validator(validate_num))
|
|
||||||
.arg(Arg::from_usage("-f --force 'Actually run the prune instead of simulating it'"))
|
|
||||||
.arg(Arg::from_usage("<REPO> 'Path of the repository'")
|
|
||||||
.validator(|val| validate_repo_path(val, true, Some(false), Some(false)))))
|
|
||||||
.subcommand(SubCommand::with_name("vacuum").about("Reclaim space by rewriting bundles")
|
|
||||||
.arg(Arg::from_usage("-r --ratio [NUM] 'Ratio in % of unused space in a bundle to rewrite that bundle'")
|
|
||||||
.default_value(DEFAULT_VACUUM_RATIO_STR).validator(validate_num))
|
|
||||||
.arg(Arg::from_usage("--combine 'Combine small bundles into larger ones'"))
|
|
||||||
.arg(Arg::from_usage("-f --force 'Actually run the vacuum instead of simulating it'"))
|
|
||||||
.arg(Arg::from_usage("<REPO> 'Path of the repository'")
|
|
||||||
.validator(|val| validate_repo_path(val, true, Some(false), Some(false)))))
|
|
||||||
.subcommand(SubCommand::with_name("check").about("Check the repository, a backup or a backup subtree")
|
|
||||||
.arg(Arg::from_usage("-b --bundles 'Check the bundles'"))
|
|
||||||
.arg(Arg::from_usage("[bundle_data] --bundle-data 'Check bundle contents (slow)'").requires("bundles").alias("data"))
|
|
||||||
.arg(Arg::from_usage("-i --index 'Check the chunk index'"))
|
|
||||||
.arg(Arg::from_usage("-r --repair 'Try to repair errors'"))
|
|
||||||
.arg(Arg::from_usage("<PATH> 'Path of the repository/backup/subtree, [repository][::backup[::subtree]]'")
|
|
||||||
.validator(|val| validate_repo_path(val, true, None, None))))
|
|
||||||
.subcommand(SubCommand::with_name("list").alias("ls").about("List backups or backup contents")
|
|
||||||
.arg(Arg::from_usage("<PATH> 'Path of the repository/backup/subtree, [repository][::backup[::subtree]]'")
|
|
||||||
.validator(|val| validate_repo_path(val, true, None, None))))
|
|
||||||
.subcommand(SubCommand::with_name("mount").about("Mount the repository, a backup or a subtree")
|
|
||||||
.arg(Arg::from_usage("<PATH> 'Path of the repository/backup/subtree, [repository][::backup[::subtree]]'")
|
|
||||||
.validator(|val| validate_repo_path(val, true, None, None)))
|
|
||||||
.arg(Arg::from_usage("<MOUNTPOINT> 'Existing mount point'")
|
|
||||||
.validator(validate_existing_path)))
|
|
||||||
.subcommand(SubCommand::with_name("bundlelist").about("List bundles in a repository")
|
|
||||||
.arg(Arg::from_usage("<REPO> 'Path of the repository'")
|
|
||||||
.validator(|val| validate_repo_path(val, true, Some(false), Some(false)))))
|
|
||||||
.subcommand(SubCommand::with_name("bundleinfo").about("Display information on a bundle")
|
|
||||||
.arg(Arg::from_usage("<REPO> 'Path of the repository'")
|
|
||||||
.validator(|val| validate_repo_path(val, true, Some(false), Some(false))))
|
|
||||||
.arg(Arg::from_usage("<BUNDLE> 'Id of the bundle'")))
|
|
||||||
.subcommand(SubCommand::with_name("import").about("Reconstruct a repository from the remote storage")
|
|
||||||
.arg(Arg::from_usage("-k --key [FILE]... 'Key file needed to read the bundles'"))
|
|
||||||
.arg(Arg::from_usage("<REMOTE> 'Remote repository path'")
|
|
||||||
.validator(validate_existing_path))
|
|
||||||
.arg(Arg::from_usage("<REPO> 'The path for the new repository'")
|
|
||||||
.validator(|val| validate_repo_path(val, false, Some(false), Some(false)))))
|
|
||||||
.subcommand(SubCommand::with_name("info").about("Display information on a repository, a backup or a subtree")
|
|
||||||
.arg(Arg::from_usage("<PATH> 'Path of the repository/backup/subtree, [repository][::backup[::subtree]]'")
|
|
||||||
.validator(|val| validate_repo_path(val, true, None, None))))
|
|
||||||
.subcommand(SubCommand::with_name("analyze").about("Analyze the used and reclaimable space of bundles")
|
|
||||||
.arg(Arg::from_usage("<REPO> 'Path of the repository'")
|
|
||||||
.validator(|val| validate_repo_path(val, true, Some(false), Some(false)))))
|
|
||||||
.subcommand(SubCommand::with_name("versions").about("Find different versions of a file in all backups")
|
|
||||||
.arg(Arg::from_usage("<REPO> 'Path of the repository'")
|
|
||||||
.validator(|val| validate_repo_path(val, true, Some(false), Some(false))))
|
|
||||||
.arg(Arg::from_usage("<PATH> 'Path of the file'")))
|
|
||||||
.subcommand(SubCommand::with_name("diff").about("Display differences between two backup versions")
|
|
||||||
.arg(Arg::from_usage("<OLD> 'Old version, [repository]::backup[::subpath]'")
|
|
||||||
.validator(|val| validate_repo_path(val, true, Some(true), None)))
|
|
||||||
.arg(Arg::from_usage("<NEW> 'New version, [repository]::backup[::subpath]'")
|
|
||||||
.validator(|val| validate_repo_path(val, true, Some(true), None))))
|
|
||||||
.subcommand(SubCommand::with_name("copy").alias("cp").about("Create a copy of a backup")
|
|
||||||
.arg(Arg::from_usage("<SRC> 'Existing backup, [repository]::backup'")
|
|
||||||
.validator(|val| validate_repo_path(val, true, Some(true), Some(false))))
|
|
||||||
.arg(Arg::from_usage("<DST> 'Destination backup, [repository]::backup'")
|
|
||||||
.validator(|val| validate_repo_path(val, true, Some(true), Some(false)))))
|
|
||||||
.subcommand(SubCommand::with_name("config").about("Display or change the configuration")
|
|
||||||
.arg(Arg::from_usage("[bundle_size] --bundle-size [SIZE] 'Set the target bundle size in MiB'")
|
|
||||||
.validator(validate_num))
|
.validator(validate_num))
|
||||||
.arg(Arg::from_usage("--chunker [CHUNKER] 'Set the chunker algorithm and target chunk size'")
|
.arg(Arg::from_usage("--chunker [CHUNKER]")
|
||||||
|
.help(tr!("Set the chunker algorithm and target chunk size"))
|
||||||
|
.default_value(DEFAULT_CHUNKER)
|
||||||
|
.validator(validate_chunker))
|
||||||
|
.arg(Arg::from_usage("-c --compression [COMPRESSION]")
|
||||||
|
.help(tr!("Set the compression method and level"))
|
||||||
|
.default_value(DEFAULT_COMPRESSION)
|
||||||
|
.validator(validate_compression))
|
||||||
|
.arg(Arg::from_usage("-e --encrypt")
|
||||||
|
.help(tr!("Generate a keypair and enable encryption")))
|
||||||
|
.arg(Arg::from_usage("--hash [HASH]")
|
||||||
|
.help(tr!("Set the hash method'"))
|
||||||
|
.default_value(DEFAULT_HASH)
|
||||||
|
.validator(validate_hash))
|
||||||
|
.arg(Arg::from_usage("-r --remote <REMOTE>")
|
||||||
|
.help(tr!("Set the path to the mounted remote storage"))
|
||||||
|
.validator(validate_existing_path))
|
||||||
|
.arg(Arg::from_usage("<REPO>")
|
||||||
|
.help(tr!("The path for the new repository"))
|
||||||
|
.validator(|val| validate_repo_path(val, false, Some(false), Some(false)))))
|
||||||
|
.subcommand(SubCommand::with_name("backup")
|
||||||
|
.about(tr!("Create a new backup"))
|
||||||
|
.arg(Arg::from_usage("--full")
|
||||||
|
.help(tr!("Create a full backup without using a reference")))
|
||||||
|
.arg(Arg::from_usage("[reference] --ref [REF]")
|
||||||
|
.help(tr!("Base the new backup on this reference"))
|
||||||
|
.conflicts_with("full"))
|
||||||
|
.arg(Arg::from_usage("[cross_device] -x --xdev")
|
||||||
|
.help(tr!("Allow to cross filesystem boundaries")))
|
||||||
|
.arg(Arg::from_usage("-e --exclude [PATTERN]...")
|
||||||
|
.help(tr!("Exclude this path or file pattern")))
|
||||||
|
.arg(Arg::from_usage("[excludes_from] --excludes-from [FILE]")
|
||||||
|
.help(tr!("Read the list of excludes from this file")))
|
||||||
|
.arg(Arg::from_usage("[no_default_excludes] --no-default-excludes")
|
||||||
|
.help(tr!("Do not load the default excludes file")))
|
||||||
|
.arg(Arg::from_usage("--tar")
|
||||||
|
.help(tr!("Read the source data from a tar file"))
|
||||||
|
.conflicts_with_all(&["reference", "exclude", "excludes_from"]))
|
||||||
|
.arg(Arg::from_usage("<SRC>")
|
||||||
|
.help(tr!("Source path to backup"))
|
||||||
|
.validator(validate_existing_path_or_stdio))
|
||||||
|
.arg(Arg::from_usage("<BACKUP>")
|
||||||
|
.help(tr!("Backup path, [repository]::backup"))
|
||||||
|
.validator(|val| validate_repo_path(val, true, Some(true), Some(false)))))
|
||||||
|
.subcommand(SubCommand::with_name("restore")
|
||||||
|
.about(tr!("Restore a backup or subtree"))
|
||||||
|
.arg(Arg::from_usage("--tar")
|
||||||
|
.help(tr!("Restore in form of a tar file")))
|
||||||
|
.arg(Arg::from_usage("<BACKUP>")
|
||||||
|
.help(tr!("The backup/subtree path, [repository]::backup[::subtree]"))
|
||||||
|
.validator(|val| validate_repo_path(val, true, Some(true), None)))
|
||||||
|
.arg(Arg::from_usage("<DST>")
|
||||||
|
.help(tr!("Destination path for backup"))))
|
||||||
|
.subcommand(SubCommand::with_name("remove")
|
||||||
|
.aliases(&["rm", "delete", "del"])
|
||||||
|
.about(tr!("Remove a backup or a subtree"))
|
||||||
|
.arg(Arg::from_usage("-f --force")
|
||||||
|
.help(tr!("Remove multiple backups in a backup folder")))
|
||||||
|
.arg(Arg::from_usage("<BACKUP>")
|
||||||
|
.help(tr!("The backup/subtree path, [repository]::backup[::subtree]"))
|
||||||
|
.validator(|val| validate_repo_path(val, true, Some(true), None))))
|
||||||
|
.subcommand(SubCommand::with_name("prune")
|
||||||
|
.about(tr!("Remove backups based on age"))
|
||||||
|
.arg(Arg::from_usage("-p --prefix [PREFIX]")
|
||||||
|
.help(tr!("Only consider backups starting with this prefix")))
|
||||||
|
.arg(Arg::from_usage("-d --daily [NUM]")
|
||||||
|
.help(tr!("Keep this number of daily backups"))
|
||||||
|
.default_value("0")
|
||||||
|
.validator(validate_num))
|
||||||
|
.arg(Arg::from_usage("-w --weekly [NUM]")
|
||||||
|
.help(tr!("Keep this number of weekly backups"))
|
||||||
|
.default_value("0")
|
||||||
|
.validator(validate_num))
|
||||||
|
.arg(Arg::from_usage("-m --monthly [NUM]")
|
||||||
|
.help(tr!("Keep this number of monthly backups"))
|
||||||
|
.default_value("0")
|
||||||
|
.validator(validate_num))
|
||||||
|
.arg(Arg::from_usage("-y --yearly [NUM]")
|
||||||
|
.help(tr!("Keep this number of yearly backups"))
|
||||||
|
.default_value("0")
|
||||||
|
.validator(validate_num))
|
||||||
|
.arg(Arg::from_usage("-f --force")
|
||||||
|
.help(tr!("Actually run the prune instead of simulating it")))
|
||||||
|
.arg(Arg::from_usage("<REPO>")
|
||||||
|
.help(tr!("Path of the repository"))
|
||||||
|
.validator(|val| validate_repo_path(val, true, Some(false), Some(false)))))
|
||||||
|
.subcommand(SubCommand::with_name("vacuum")
|
||||||
|
.about(tr!("Reclaim space by rewriting bundles"))
|
||||||
|
.arg(Arg::from_usage("-r --ratio [NUM]")
|
||||||
|
.help(tr!("Ratio in % of unused space in a bundle to rewrite that bundle"))
|
||||||
|
.default_value(DEFAULT_VACUUM_RATIO_STR).validator(validate_num))
|
||||||
|
.arg(Arg::from_usage("--combine")
|
||||||
|
.help(tr!("Combine small bundles into larger ones")))
|
||||||
|
.arg(Arg::from_usage("-f --force")
|
||||||
|
.help(tr!("Actually run the vacuum instead of simulating it")))
|
||||||
|
.arg(Arg::from_usage("<REPO>")
|
||||||
|
.help(tr!("Path of the repository"))
|
||||||
|
.validator(|val| validate_repo_path(val, true, Some(false), Some(false)))))
|
||||||
|
.subcommand(SubCommand::with_name("check")
|
||||||
|
.about(tr!("Check the repository, a backup or a backup subtree"))
|
||||||
|
.arg(Arg::from_usage("-b --bundles")
|
||||||
|
.help(tr!("Check the bundles")))
|
||||||
|
.arg(Arg::from_usage("[bundle_data] --bundle-data")
|
||||||
|
.help(tr!("Check bundle contents (slow)"))
|
||||||
|
.requires("bundles")
|
||||||
|
.alias("data"))
|
||||||
|
.arg(Arg::from_usage("-i --index")
|
||||||
|
.help(tr!("Check the chunk index")))
|
||||||
|
.arg(Arg::from_usage("-r --repair")
|
||||||
|
.help(tr!("Try to repair errors")))
|
||||||
|
.arg(Arg::from_usage("<PATH>")
|
||||||
|
.help(tr!("Path of the repository/backup/subtree, [repository][::backup[::subtree]]"))
|
||||||
|
.validator(|val| validate_repo_path(val, true, None, None))))
|
||||||
|
.subcommand(SubCommand::with_name("list")
|
||||||
|
.alias("ls")
|
||||||
|
.about(tr!("List backups or backup contents"))
|
||||||
|
.arg(Arg::from_usage("<PATH>")
|
||||||
|
.help(tr!("Path of the repository/backup/subtree, [repository][::backup[::subtree]]"))
|
||||||
|
.validator(|val| validate_repo_path(val, true, None, None))))
|
||||||
|
.subcommand(SubCommand::with_name("mount")
|
||||||
|
.about(tr!("Mount the repository, a backup or a subtree"))
|
||||||
|
.arg(Arg::from_usage("<PATH>")
|
||||||
|
.help(tr!("Path of the repository/backup/subtree, [repository][::backup[::subtree]]"))
|
||||||
|
.validator(|val| validate_repo_path(val, true, None, None)))
|
||||||
|
.arg(Arg::from_usage("<MOUNTPOINT>")
|
||||||
|
.help(tr!("Existing mount point"))
|
||||||
|
.validator(validate_existing_path)))
|
||||||
|
.subcommand(SubCommand::with_name("bundlelist")
|
||||||
|
.about(tr!("List bundles in a repository"))
|
||||||
|
.arg(Arg::from_usage("<REPO>")
|
||||||
|
.help(tr!("Path of the repository"))
|
||||||
|
.validator(|val| validate_repo_path(val, true, Some(false), Some(false)))))
|
||||||
|
.subcommand(SubCommand::with_name("bundleinfo")
|
||||||
|
.about(tr!("Display information on a bundle"))
|
||||||
|
.arg(Arg::from_usage("<REPO>")
|
||||||
|
.help(tr!("Path of the repository"))
|
||||||
|
.validator(|val| validate_repo_path(val, true, Some(false), Some(false))))
|
||||||
|
.arg(Arg::from_usage("<BUNDLE>")
|
||||||
|
.help(tr!("Id of the bundle"))))
|
||||||
|
.subcommand(SubCommand::with_name("import")
|
||||||
|
.about(tr!("Reconstruct a repository from the remote storage"))
|
||||||
|
.arg(Arg::from_usage("-k --key [FILE]...")
|
||||||
|
.help(tr!("Key file needed to read the bundles")))
|
||||||
|
.arg(Arg::from_usage("<REMOTE>")
|
||||||
|
.help(tr!("Remote repository path"))
|
||||||
|
.validator(validate_existing_path))
|
||||||
|
.arg(Arg::from_usage("<REPO>")
|
||||||
|
.help(tr!("The path for the new repository"))
|
||||||
|
.validator(|val| validate_repo_path(val, false, Some(false), Some(false)))))
|
||||||
|
.subcommand(SubCommand::with_name("info")
|
||||||
|
.about(tr!("Display information on a repository, a backup or a subtree"))
|
||||||
|
.arg(Arg::from_usage("<PATH>")
|
||||||
|
.help(tr!("Path of the repository/backup/subtree, [repository][::backup[::subtree]]"))
|
||||||
|
.validator(|val| validate_repo_path(val, true, None, None))))
|
||||||
|
.subcommand(SubCommand::with_name("analyze")
|
||||||
|
.about(tr!("Analyze the used and reclaimable space of bundles"))
|
||||||
|
.arg(Arg::from_usage("<REPO>")
|
||||||
|
.help(tr!("Path of the repository"))
|
||||||
|
.validator(|val| validate_repo_path(val, true, Some(false), Some(false)))))
|
||||||
|
.subcommand(SubCommand::with_name("versions")
|
||||||
|
.about(tr!("Find different versions of a file in all backups"))
|
||||||
|
.arg(Arg::from_usage("<REPO>")
|
||||||
|
.help(tr!("Path of the repository"))
|
||||||
|
.validator(|val| validate_repo_path(val, true, Some(false), Some(false))))
|
||||||
|
.arg(Arg::from_usage("<PATH>")
|
||||||
|
.help(tr!("Path of the file"))))
|
||||||
|
.subcommand(SubCommand::with_name("diff")
|
||||||
|
.about(tr!("Display differences between two backup versions"))
|
||||||
|
.arg(Arg::from_usage("<OLD>")
|
||||||
|
.help(tr!("Old version, [repository]::backup[::subpath]"))
|
||||||
|
.validator(|val| validate_repo_path(val, true, Some(true), None)))
|
||||||
|
.arg(Arg::from_usage("<NEW>")
|
||||||
|
.help(tr!("New version, [repository]::backup[::subpath]"))
|
||||||
|
.validator(|val| validate_repo_path(val, true, Some(true), None))))
|
||||||
|
.subcommand(SubCommand::with_name("copy")
|
||||||
|
.alias("cp")
|
||||||
|
.about(tr!("Create a copy of a backup"))
|
||||||
|
.arg(Arg::from_usage("<SRC>")
|
||||||
|
.help(tr!("Existing backup, [repository]::backup"))
|
||||||
|
.validator(|val| validate_repo_path(val, true, Some(true), Some(false))))
|
||||||
|
.arg(Arg::from_usage("<DST>")
|
||||||
|
.help(tr!("Destination backup, [repository]::backup"))
|
||||||
|
.validator(|val| validate_repo_path(val, true, Some(true), Some(false)))))
|
||||||
|
.subcommand(SubCommand::with_name("config")
|
||||||
|
.about(tr!("Display or change the configuration"))
|
||||||
|
.arg(Arg::from_usage("[bundle_size] --bundle-size [SIZE]")
|
||||||
|
.help(tr!("Set the target bundle size in MiB"))
|
||||||
|
.validator(validate_num))
|
||||||
|
.arg(Arg::from_usage("--chunker [CHUNKER]")
|
||||||
|
.help(tr!("Set the chunker algorithm and target chunk size"))
|
||||||
|
.validator(validate_chunker))
|
||||||
|
.arg(Arg::from_usage("-c --compression [COMPRESSION]")
|
||||||
|
.help(tr!("Set the compression method and level"))
|
||||||
|
.validator(validate_compression))
|
||||||
|
.arg(Arg::from_usage("-e --encryption [PUBLIC_KEY]")
|
||||||
|
.help(tr!("The public key to use for encryption"))
|
||||||
|
.validator(validate_public_key))
|
||||||
|
.arg(Arg::from_usage("--hash [HASH]")
|
||||||
|
.help(tr!("Set the hash method"))
|
||||||
|
.validator(validate_hash))
|
||||||
|
.arg(Arg::from_usage("<REPO>")
|
||||||
|
.help(tr!("Path of the repository"))
|
||||||
|
.validator(|val| validate_repo_path(val, true, Some(false), Some(false)))))
|
||||||
|
.subcommand(SubCommand::with_name("genkey")
|
||||||
|
.about(tr!("Generate a new key pair"))
|
||||||
|
.arg(Arg::from_usage("-p --password [PASSWORD]")
|
||||||
|
.help(tr!("Derive the key pair from the given password")))
|
||||||
|
.arg(Arg::from_usage("[FILE]")
|
||||||
|
.help(tr!("Destination file for the keypair"))))
|
||||||
|
.subcommand(SubCommand::with_name("addkey")
|
||||||
|
.about(tr!("Add a key pair to the repository"))
|
||||||
|
.arg(Arg::from_usage("-g --generate")
|
||||||
|
.help(tr!("Generate a new key pair"))
|
||||||
|
.conflicts_with("FILE"))
|
||||||
|
.arg(Arg::from_usage("[set_default] --default -d")
|
||||||
|
.help(tr!("Set the key pair as default")))
|
||||||
|
.arg(Arg::from_usage("-p --password [PASSWORD]")
|
||||||
|
.help(tr!("Derive the key pair from the given password"))
|
||||||
|
.requires("generate"))
|
||||||
|
.arg(Arg::from_usage("[FILE]")
|
||||||
|
.help(tr!("File containing the keypair"))
|
||||||
|
.validator(validate_existing_path))
|
||||||
|
.arg(Arg::from_usage("<REPO>")
|
||||||
|
.help(tr!("Path of the repository"))
|
||||||
|
.validator(|val| validate_repo_path(val, true, Some(false), Some(false)))))
|
||||||
|
.subcommand(SubCommand::with_name("algotest")
|
||||||
|
.about(tr!("Test a specific algorithm combination"))
|
||||||
|
.arg(Arg::from_usage("[bundle_size] --bundle-size [SIZE]")
|
||||||
|
.help(tr!("Set the target bundle size in MiB"))
|
||||||
|
.default_value(DEFAULT_BUNDLE_SIZE_STR)
|
||||||
|
.validator(validate_num))
|
||||||
|
.arg(Arg::from_usage("--chunker [CHUNKER]")
|
||||||
|
.help(tr!("Set the chunker algorithm and target chunk size"))
|
||||||
|
.default_value(DEFAULT_CHUNKER)
|
||||||
.validator(validate_chunker))
|
.validator(validate_chunker))
|
||||||
.arg(Arg::from_usage("-c --compression [COMPRESSION] 'Set the compression method and level'")
|
.arg(Arg::from_usage("-c --compression [COMPRESSION] 'Set the compression method and level'")
|
||||||
|
.default_value(DEFAULT_COMPRESSION)
|
||||||
.validator(validate_compression))
|
.validator(validate_compression))
|
||||||
.arg(Arg::from_usage("-e --encryption [PUBLIC_KEY] 'The public key to use for encryption'")
|
.arg(Arg::from_usage("-e --encrypt")
|
||||||
.validator(validate_public_key))
|
.help(tr!("Generate a keypair and enable encryption")))
|
||||||
.arg(Arg::from_usage("--hash [HASH] 'Set the hash method'")
|
.arg(Arg::from_usage("--hash [HASH]")
|
||||||
|
.help(tr!("Set the hash method"))
|
||||||
|
.default_value(DEFAULT_HASH)
|
||||||
.validator(validate_hash))
|
.validator(validate_hash))
|
||||||
.arg(Arg::from_usage("<REPO> 'Path of the repository'")
|
.arg(Arg::from_usage("<FILE>")
|
||||||
.validator(|val| validate_repo_path(val, true, Some(false), Some(false)))))
|
.help(tr!("File with test data"))
|
||||||
.subcommand(SubCommand::with_name("genkey").about("Generate a new key pair")
|
|
||||||
.arg(Arg::from_usage("-p --password [PASSWORD] 'Derive the key pair from the given password'"))
|
|
||||||
.arg(Arg::from_usage("[FILE] 'Destination file for the keypair'")))
|
|
||||||
.subcommand(SubCommand::with_name("addkey").about("Add a key pair to the repository")
|
|
||||||
.arg(Arg::from_usage("-g --generate 'Generate a new key pair'")
|
|
||||||
.conflicts_with("FILE"))
|
|
||||||
.arg(Arg::from_usage("[set_default] --default -d 'Set the key pair as default'"))
|
|
||||||
.arg(Arg::from_usage("-p --password [PASSWORD] 'Derive the key pair from the given password'")
|
|
||||||
.requires("generate"))
|
|
||||||
.arg(Arg::from_usage("[FILE] 'File containing the keypair'")
|
|
||||||
.validator(validate_existing_path))
|
|
||||||
.arg(Arg::from_usage("<REPO> 'Path of the repository'")
|
|
||||||
.validator(|val| validate_repo_path(val, true, Some(false), Some(false)))))
|
|
||||||
.subcommand(SubCommand::with_name("algotest").about("Test a specific algorithm combination")
|
|
||||||
.arg(Arg::from_usage("[bundle_size] --bundle-size [SIZE] 'Set the target bundle size in MiB'")
|
|
||||||
.default_value(DEFAULT_BUNDLE_SIZE_STR).validator(validate_num))
|
|
||||||
.arg(Arg::from_usage("--chunker [CHUNKER] 'Set the chunker algorithm and target chunk size'")
|
|
||||||
.default_value(DEFAULT_CHUNKER).validator(validate_chunker))
|
|
||||||
.arg(Arg::from_usage("-c --compression [COMPRESSION] 'Set the compression method and level'")
|
|
||||||
.default_value(DEFAULT_COMPRESSION).validator(validate_compression))
|
|
||||||
.arg(Arg::from_usage("-e --encrypt 'Generate a keypair and enable encryption'"))
|
|
||||||
.arg(Arg::from_usage("--hash [HASH] 'Set the hash method'")
|
|
||||||
.default_value(DEFAULT_HASH).validator(validate_hash))
|
|
||||||
.arg(Arg::from_usage("<FILE> 'File with test data'")
|
|
||||||
.validator(validate_existing_path))).get_matches();
|
.validator(validate_existing_path))).get_matches();
|
||||||
let verbose_count = args.subcommand()
|
let verbose_count = args.subcommand()
|
||||||
.1
|
.1
|
||||||
|
@ -745,7 +866,7 @@ pub fn parse() -> Result<(log::Level, Arguments), ErrorCode> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
error!("No subcommand given");
|
tr_error!("No subcommand given");
|
||||||
return Err(ErrorCode::InvalidArgs);
|
return Err(ErrorCode::InvalidArgs);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
198
src/cli/mod.rs
198
src/cli/mod.rs
|
@ -105,7 +105,7 @@ macro_rules! checked {
|
||||||
match $expr {
|
match $expr {
|
||||||
Ok(val) => val,
|
Ok(val) => val,
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
error!("Failed to {}\n\tcaused by: {}", $msg, err);
|
tr_error!("Failed to {}\n\tcaused by: {}", $msg, err);
|
||||||
return Err($code)
|
return Err($code)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -122,7 +122,7 @@ fn open_repository(path: &Path) -> Result<Repository, ErrorCode> {
|
||||||
|
|
||||||
fn get_backup(repo: &Repository, backup_name: &str) -> Result<Backup, ErrorCode> {
|
fn get_backup(repo: &Repository, backup_name: &str) -> Result<Backup, ErrorCode> {
|
||||||
if !repo.has_backup(backup_name) {
|
if !repo.has_backup(backup_name) {
|
||||||
error!("A backup with that name does not exist");
|
tr_error!("A backup with that name does not exist");
|
||||||
return Err(ErrorCode::NoSuchBackup);
|
return Err(ErrorCode::NoSuchBackup);
|
||||||
}
|
}
|
||||||
Ok(checked!(
|
Ok(checked!(
|
||||||
|
@ -145,11 +145,11 @@ fn find_reference_backup(
|
||||||
Ok(backup_map) => backup_map,
|
Ok(backup_map) => backup_map,
|
||||||
Err(RepositoryError::BackupFile(BackupFileError::PartialBackupsList(backup_map,
|
Err(RepositoryError::BackupFile(BackupFileError::PartialBackupsList(backup_map,
|
||||||
_failed))) => {
|
_failed))) => {
|
||||||
warn!("Some backups could not be read, ignoring them");
|
tr_warn!("Some backups could not be read, ignoring them");
|
||||||
backup_map
|
backup_map
|
||||||
}
|
}
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
error!("Failed to load backup files: {}", err);
|
tr_error!("Failed to load backup files: {}", err);
|
||||||
return Err(ErrorCode::LoadBackup);
|
return Err(ErrorCode::LoadBackup);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -164,41 +164,41 @@ fn find_reference_backup(
|
||||||
|
|
||||||
fn print_backup(backup: &Backup) {
|
fn print_backup(backup: &Backup) {
|
||||||
if backup.modified {
|
if backup.modified {
|
||||||
warn!("This backup has been modified");
|
tr_warn!("This backup has been modified");
|
||||||
}
|
}
|
||||||
println!(
|
tr_println!(
|
||||||
"Date: {}",
|
"Date: {}",
|
||||||
Local.timestamp(backup.timestamp, 0).to_rfc2822()
|
Local.timestamp(backup.timestamp, 0).to_rfc2822()
|
||||||
);
|
);
|
||||||
println!("Source: {}:{}", backup.host, backup.path);
|
tr_println!("Source: {}:{}", backup.host, backup.path);
|
||||||
println!("Duration: {}", to_duration(backup.duration));
|
tr_println!("Duration: {}", to_duration(backup.duration));
|
||||||
println!(
|
tr_println!(
|
||||||
"Entries: {} files, {} dirs",
|
"Entries: {} files, {} dirs",
|
||||||
backup.file_count,
|
backup.file_count,
|
||||||
backup.dir_count
|
backup.dir_count
|
||||||
);
|
);
|
||||||
println!(
|
tr_println!(
|
||||||
"Total backup size: {}",
|
"Total backup size: {}",
|
||||||
to_file_size(backup.total_data_size)
|
to_file_size(backup.total_data_size)
|
||||||
);
|
);
|
||||||
println!(
|
tr_println!(
|
||||||
"Modified data size: {}",
|
"Modified data size: {}",
|
||||||
to_file_size(backup.changed_data_size)
|
to_file_size(backup.changed_data_size)
|
||||||
);
|
);
|
||||||
let dedup_ratio = backup.deduplicated_data_size as f32 / backup.changed_data_size as f32;
|
let dedup_ratio = backup.deduplicated_data_size as f32 / backup.changed_data_size as f32;
|
||||||
println!(
|
tr_println!(
|
||||||
"Deduplicated size: {}, {:.1}% saved",
|
"Deduplicated size: {}, {:.1}% saved",
|
||||||
to_file_size(backup.deduplicated_data_size),
|
to_file_size(backup.deduplicated_data_size),
|
||||||
(1.0 - dedup_ratio) * 100.0
|
(1.0 - dedup_ratio) * 100.0
|
||||||
);
|
);
|
||||||
let compress_ratio = backup.encoded_data_size as f32 / backup.deduplicated_data_size as f32;
|
let compress_ratio = backup.encoded_data_size as f32 / backup.deduplicated_data_size as f32;
|
||||||
println!(
|
tr_println!(
|
||||||
"Compressed size: {} in {} bundles, {:.1}% saved",
|
"Compressed size: {} in {} bundles, {:.1}% saved",
|
||||||
to_file_size(backup.encoded_data_size),
|
to_file_size(backup.encoded_data_size),
|
||||||
backup.bundle_count,
|
backup.bundle_count,
|
||||||
(1.0 - compress_ratio) * 100.0
|
(1.0 - compress_ratio) * 100.0
|
||||||
);
|
);
|
||||||
println!(
|
tr_println!(
|
||||||
"Chunk count: {}, avg size: {}",
|
"Chunk count: {}, avg size: {}",
|
||||||
backup.chunk_count,
|
backup.chunk_count,
|
||||||
to_file_size(backup.avg_chunk_size as u64)
|
to_file_size(backup.avg_chunk_size as u64)
|
||||||
|
@ -246,30 +246,30 @@ pub fn format_inode_one_line(inode: &Inode) -> String {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn print_inode(inode: &Inode) {
|
fn print_inode(inode: &Inode) {
|
||||||
println!("Name: {}", inode.name);
|
tr_println!("Name: {}", inode.name);
|
||||||
println!("Type: {}", inode.file_type);
|
tr_println!("Type: {}", inode.file_type);
|
||||||
println!("Size: {}", to_file_size(inode.size));
|
tr_println!("Size: {}", to_file_size(inode.size));
|
||||||
println!("Permissions: {:3o}", inode.mode);
|
tr_println!("Permissions: {:3o}", inode.mode);
|
||||||
println!("User: {}", inode.user);
|
tr_println!("User: {}", inode.user);
|
||||||
println!("Group: {}", inode.group);
|
tr_println!("Group: {}", inode.group);
|
||||||
println!(
|
tr_println!(
|
||||||
"Timestamp: {}",
|
"Timestamp: {}",
|
||||||
Local.timestamp(inode.timestamp, 0).to_rfc2822()
|
Local.timestamp(inode.timestamp, 0).to_rfc2822()
|
||||||
);
|
);
|
||||||
if let Some(ref target) = inode.symlink_target {
|
if let Some(ref target) = inode.symlink_target {
|
||||||
println!("Symlink target: {}", target);
|
tr_println!("Symlink target: {}", target);
|
||||||
}
|
}
|
||||||
println!("Cumulative size: {}", to_file_size(inode.cum_size));
|
tr_println!("Cumulative size: {}", to_file_size(inode.cum_size));
|
||||||
println!("Cumulative file count: {}", inode.cum_files);
|
tr_println!("Cumulative file count: {}", inode.cum_files);
|
||||||
println!("Cumulative directory count: {}", inode.cum_dirs);
|
tr_println!("Cumulative directory count: {}", inode.cum_dirs);
|
||||||
if let Some(ref children) = inode.children {
|
if let Some(ref children) = inode.children {
|
||||||
println!("Children:");
|
tr_println!("Children:");
|
||||||
for name in children.keys() {
|
for name in children.keys() {
|
||||||
println!(" - {}", name);
|
println!(" - {}", name);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if !inode.xattrs.is_empty() {
|
if !inode.xattrs.is_empty() {
|
||||||
println!("Extended attributes:");
|
tr_println!("Extended attributes:");
|
||||||
for (key, value) in &inode.xattrs {
|
for (key, value) in &inode.xattrs {
|
||||||
if let Ok(value) = str::from_utf8(value) {
|
if let Ok(value) = str::from_utf8(value) {
|
||||||
println!(" - {} = '{}'", key, value);
|
println!(" - {} = '{}'", key, value);
|
||||||
|
@ -296,17 +296,17 @@ fn print_backups(backup_map: &HashMap<String, Backup>) {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn print_repoinfo(info: &RepositoryInfo) {
|
fn print_repoinfo(info: &RepositoryInfo) {
|
||||||
println!("Bundles: {}", info.bundle_count);
|
tr_println!("Bundles: {}", info.bundle_count);
|
||||||
println!("Total size: {}", to_file_size(info.encoded_data_size));
|
tr_println!("Total size: {}", to_file_size(info.encoded_data_size));
|
||||||
println!("Uncompressed size: {}", to_file_size(info.raw_data_size));
|
tr_println!("Uncompressed size: {}", to_file_size(info.raw_data_size));
|
||||||
println!("Compression ratio: {:.1}%", info.compression_ratio * 100.0);
|
tr_println!("Compression ratio: {:.1}%", info.compression_ratio * 100.0);
|
||||||
println!("Chunk count: {}", info.chunk_count);
|
tr_println!("Chunk count: {}", info.chunk_count);
|
||||||
println!(
|
tr_println!(
|
||||||
"Average chunk size: {}",
|
"Average chunk size: {}",
|
||||||
to_file_size(info.avg_chunk_size as u64)
|
to_file_size(info.avg_chunk_size as u64)
|
||||||
);
|
);
|
||||||
let index_usage = info.index_entries as f32 / info.index_capacity as f32;
|
let index_usage = info.index_entries as f32 / info.index_capacity as f32;
|
||||||
println!(
|
tr_println!(
|
||||||
"Index: {}, {:.0}% full",
|
"Index: {}, {:.0}% full",
|
||||||
to_file_size(info.index_size as u64),
|
to_file_size(info.index_size as u64),
|
||||||
index_usage * 100.0
|
index_usage * 100.0
|
||||||
|
@ -314,26 +314,26 @@ fn print_repoinfo(info: &RepositoryInfo) {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn print_bundle(bundle: &StoredBundle) {
|
fn print_bundle(bundle: &StoredBundle) {
|
||||||
println!("Bundle {}", bundle.info.id);
|
tr_println!("Bundle {}", bundle.info.id);
|
||||||
println!(" - Mode: {:?}", bundle.info.mode);
|
tr_println!(" - Mode: {:?}", bundle.info.mode);
|
||||||
println!(" - Path: {:?}", bundle.path);
|
tr_println!(" - Path: {:?}", bundle.path);
|
||||||
println!(
|
tr_println!(
|
||||||
" - Date: {}",
|
" - Date: {}",
|
||||||
Local.timestamp(bundle.info.timestamp, 0).to_rfc2822()
|
Local.timestamp(bundle.info.timestamp, 0).to_rfc2822()
|
||||||
);
|
);
|
||||||
println!(" - Hash method: {:?}", bundle.info.hash_method);
|
tr_println!(" - Hash method: {:?}", bundle.info.hash_method);
|
||||||
let encryption = if let Some((_, ref key)) = bundle.info.encryption {
|
let encryption = if let Some((_, ref key)) = bundle.info.encryption {
|
||||||
to_hex(key)
|
to_hex(key)
|
||||||
} else {
|
} else {
|
||||||
"none".to_string()
|
"none".to_string()
|
||||||
};
|
};
|
||||||
println!(" - Encryption: {}", encryption);
|
tr_println!(" - Encryption: {}", encryption);
|
||||||
println!(" - Chunks: {}", bundle.info.chunk_count);
|
tr_println!(" - Chunks: {}", bundle.info.chunk_count);
|
||||||
println!(
|
tr_println!(
|
||||||
" - Size: {}",
|
" - Size: {}",
|
||||||
to_file_size(bundle.info.encoded_size as u64)
|
to_file_size(bundle.info.encoded_size as u64)
|
||||||
);
|
);
|
||||||
println!(
|
tr_println!(
|
||||||
" - Data size: {}",
|
" - Data size: {}",
|
||||||
to_file_size(bundle.info.raw_size as u64)
|
to_file_size(bundle.info.raw_size as u64)
|
||||||
);
|
);
|
||||||
|
@ -343,7 +343,7 @@ fn print_bundle(bundle: &StoredBundle) {
|
||||||
} else {
|
} else {
|
||||||
"none".to_string()
|
"none".to_string()
|
||||||
};
|
};
|
||||||
println!(
|
tr_println!(
|
||||||
" - Compression: {}, ratio: {:.1}%",
|
" - Compression: {}, ratio: {:.1}%",
|
||||||
compression,
|
compression,
|
||||||
ratio * 100.0
|
ratio * 100.0
|
||||||
|
@ -351,7 +351,7 @@ fn print_bundle(bundle: &StoredBundle) {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn print_bundle_one_line(bundle: &BundleInfo) {
|
fn print_bundle_one_line(bundle: &BundleInfo) {
|
||||||
println!(
|
tr_println!(
|
||||||
"{}: {:8?}, {:5} chunks, {:8}",
|
"{}: {:8?}, {:5} chunks, {:8}",
|
||||||
bundle.id,
|
bundle.id,
|
||||||
bundle.mode,
|
bundle.mode,
|
||||||
|
@ -361,19 +361,19 @@ fn print_bundle_one_line(bundle: &BundleInfo) {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn print_config(config: &Config) {
|
fn print_config(config: &Config) {
|
||||||
println!("Bundle size: {}", to_file_size(config.bundle_size as u64));
|
tr_println!("Bundle size: {}", to_file_size(config.bundle_size as u64));
|
||||||
println!("Chunker: {}", config.chunker.to_string());
|
tr_println!("Chunker: {}", config.chunker.to_string());
|
||||||
if let Some(ref compression) = config.compression {
|
if let Some(ref compression) = config.compression {
|
||||||
println!("Compression: {}", compression.to_string());
|
tr_println!("Compression: {}", compression.to_string());
|
||||||
} else {
|
} else {
|
||||||
println!("Compression: none");
|
tr_println!("Compression: none");
|
||||||
}
|
}
|
||||||
if let Some(ref encryption) = config.encryption {
|
if let Some(ref encryption) = config.encryption {
|
||||||
println!("Encryption: {}", to_hex(&encryption.1[..]));
|
tr_println!("Encryption: {}", to_hex(&encryption.1[..]));
|
||||||
} else {
|
} else {
|
||||||
println!("Encryption: none");
|
tr_println!("Encryption: none");
|
||||||
}
|
}
|
||||||
println!("Hash method: {}", config.hash.name());
|
tr_println!("Hash method: {}", config.hash.name());
|
||||||
}
|
}
|
||||||
|
|
||||||
fn print_analysis(analysis: &HashMap<u32, BundleAnalysis>) {
|
fn print_analysis(analysis: &HashMap<u32, BundleAnalysis>) {
|
||||||
|
@ -390,17 +390,17 @@ fn print_analysis(analysis: &HashMap<u32, BundleAnalysis>) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
println!("Total bundle size: {}", to_file_size(data_total as u64));
|
tr_println!("Total bundle size: {}", to_file_size(data_total as u64));
|
||||||
let used = data_total - reclaim_space[10];
|
let used = data_total - reclaim_space[10];
|
||||||
println!(
|
tr_println!(
|
||||||
"Space used: {}, {:.1} %",
|
"Space used: {}, {:.1} %",
|
||||||
to_file_size(used as u64),
|
to_file_size(used as u64),
|
||||||
used as f32 / data_total as f32 * 100.0
|
used as f32 / data_total as f32 * 100.0
|
||||||
);
|
);
|
||||||
println!("Reclaimable space (depending on vacuum ratio)");
|
tr_println!("Reclaimable space (depending on vacuum ratio)");
|
||||||
#[allow(unknown_lints, needless_range_loop)]
|
#[allow(unknown_lints, needless_range_loop)]
|
||||||
for i in 0..11 {
|
for i in 0..11 {
|
||||||
println!(
|
tr_println!(
|
||||||
" - ratio={:3}: {:>10}, {:4.1} %, rewriting {:>10}",
|
" - ratio={:3}: {:>10}, {:4.1} %, rewriting {:>10}",
|
||||||
i * 10,
|
i * 10,
|
||||||
to_file_size(reclaim_space[i] as u64),
|
to_file_size(reclaim_space[i] as u64),
|
||||||
|
@ -415,7 +415,7 @@ fn print_analysis(analysis: &HashMap<u32, BundleAnalysis>) {
|
||||||
pub fn run() -> Result<(), ErrorCode> {
|
pub fn run() -> Result<(), ErrorCode> {
|
||||||
let (log_level, args) = try!(args::parse());
|
let (log_level, args) = try!(args::parse());
|
||||||
if let Err(err) = logger::init(log_level) {
|
if let Err(err) = logger::init(log_level) {
|
||||||
println!("Failed to initialize the logger: {}", err);
|
tr_println!("Failed to initialize the logger: {}", err);
|
||||||
return Err(ErrorCode::InitializeLogger);
|
return Err(ErrorCode::InitializeLogger);
|
||||||
}
|
}
|
||||||
match args {
|
match args {
|
||||||
|
@ -429,7 +429,7 @@ pub fn run() -> Result<(), ErrorCode> {
|
||||||
remote_path
|
remote_path
|
||||||
} => {
|
} => {
|
||||||
if !Path::new(&remote_path).is_absolute() {
|
if !Path::new(&remote_path).is_absolute() {
|
||||||
error!("The remote path of a repository must be absolute.");
|
tr_error!("The remote path of a repository must be absolute.");
|
||||||
return Err(ErrorCode::InvalidArgs);
|
return Err(ErrorCode::InvalidArgs);
|
||||||
}
|
}
|
||||||
let mut repo = checked!(
|
let mut repo = checked!(
|
||||||
|
@ -449,9 +449,9 @@ pub fn run() -> Result<(), ErrorCode> {
|
||||||
);
|
);
|
||||||
if encryption {
|
if encryption {
|
||||||
let (public, secret) = Crypto::gen_keypair();
|
let (public, secret) = Crypto::gen_keypair();
|
||||||
info!("Created the following key pair");
|
tr_info!("Created the following key pair");
|
||||||
println!("public: {}", to_hex(&public[..]));
|
tr_println!("public: {}", to_hex(&public[..]));
|
||||||
println!("secret: {}", to_hex(&secret[..]));
|
tr_println!("secret: {}", to_hex(&secret[..]));
|
||||||
repo.set_encryption(Some(&public));
|
repo.set_encryption(Some(&public));
|
||||||
checked!(
|
checked!(
|
||||||
repo.register_key(public, secret),
|
repo.register_key(public, secret),
|
||||||
|
@ -459,7 +459,7 @@ pub fn run() -> Result<(), ErrorCode> {
|
||||||
ErrorCode::AddKey
|
ErrorCode::AddKey
|
||||||
);
|
);
|
||||||
checked!(repo.save_config(), "save config", ErrorCode::SaveConfig);
|
checked!(repo.save_config(), "save config", ErrorCode::SaveConfig);
|
||||||
warn!(
|
tr_warn!(
|
||||||
"Please store this key pair in a secure location before using the repository"
|
"Please store this key pair in a secure location before using the repository"
|
||||||
);
|
);
|
||||||
println!();
|
println!();
|
||||||
|
@ -480,11 +480,11 @@ pub fn run() -> Result<(), ErrorCode> {
|
||||||
} => {
|
} => {
|
||||||
let mut repo = try!(open_repository(&repo_path));
|
let mut repo = try!(open_repository(&repo_path));
|
||||||
if repo.has_backup(&backup_name) {
|
if repo.has_backup(&backup_name) {
|
||||||
error!("A backup with that name already exists");
|
tr_error!("A backup with that name already exists");
|
||||||
return Err(ErrorCode::BackupAlreadyExists);
|
return Err(ErrorCode::BackupAlreadyExists);
|
||||||
}
|
}
|
||||||
if src_path == "-" && !tar {
|
if src_path == "-" && !tar {
|
||||||
error!("Reading from stdin requires --tar");
|
tr_error!("Reading from stdin requires --tar");
|
||||||
return Err(ErrorCode::InvalidArgs);
|
return Err(ErrorCode::InvalidArgs);
|
||||||
}
|
}
|
||||||
let mut reference_backup = None;
|
let mut reference_backup = None;
|
||||||
|
@ -500,9 +500,9 @@ pub fn run() -> Result<(), ErrorCode> {
|
||||||
reference_backup = try!(find_reference_backup(&repo, &src_path));
|
reference_backup = try!(find_reference_backup(&repo, &src_path));
|
||||||
}
|
}
|
||||||
if let Some(&(ref name, _)) = reference_backup.as_ref() {
|
if let Some(&(ref name, _)) = reference_backup.as_ref() {
|
||||||
info!("Using backup {} as reference", name);
|
tr_info!("Using backup {} as reference", name);
|
||||||
} else {
|
} else {
|
||||||
info!("No reference backup found, doing a full scan instead");
|
tr_info!("No reference backup found, doing a full scan instead");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let reference_backup = reference_backup.map(|(_, backup)| backup);
|
let reference_backup = reference_backup.map(|(_, backup)| backup);
|
||||||
|
@ -569,15 +569,15 @@ pub fn run() -> Result<(), ErrorCode> {
|
||||||
};
|
};
|
||||||
let backup = match result {
|
let backup = match result {
|
||||||
Ok(backup) => {
|
Ok(backup) => {
|
||||||
info!("Backup finished");
|
tr_info!("Backup finished");
|
||||||
backup
|
backup
|
||||||
}
|
}
|
||||||
Err(RepositoryError::Backup(BackupError::FailedPaths(backup, _failed_paths))) => {
|
Err(RepositoryError::Backup(BackupError::FailedPaths(backup, _failed_paths))) => {
|
||||||
warn!("Some files are missing from the backup");
|
tr_warn!("Some files are missing from the backup");
|
||||||
backup
|
backup
|
||||||
}
|
}
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
error!("Backup failed: {}", err);
|
tr_error!("Backup failed: {}", err);
|
||||||
return Err(ErrorCode::BackupRun);
|
return Err(ErrorCode::BackupRun);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -623,7 +623,7 @@ pub fn run() -> Result<(), ErrorCode> {
|
||||||
ErrorCode::RestoreRun
|
ErrorCode::RestoreRun
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
info!("Restore finished");
|
tr_info!("Restore finished");
|
||||||
}
|
}
|
||||||
Arguments::Copy {
|
Arguments::Copy {
|
||||||
repo_path_src,
|
repo_path_src,
|
||||||
|
@ -632,12 +632,12 @@ pub fn run() -> Result<(), ErrorCode> {
|
||||||
backup_name_dst
|
backup_name_dst
|
||||||
} => {
|
} => {
|
||||||
if repo_path_src != repo_path_dst {
|
if repo_path_src != repo_path_dst {
|
||||||
error!("Can only run copy on same repository");
|
tr_error!("Can only run copy on same repository");
|
||||||
return Err(ErrorCode::InvalidArgs);
|
return Err(ErrorCode::InvalidArgs);
|
||||||
}
|
}
|
||||||
let mut repo = try!(open_repository(&repo_path_src));
|
let mut repo = try!(open_repository(&repo_path_src));
|
||||||
if repo.has_backup(&backup_name_dst) {
|
if repo.has_backup(&backup_name_dst) {
|
||||||
error!("A backup with that name already exists");
|
tr_error!("A backup with that name already exists");
|
||||||
return Err(ErrorCode::BackupAlreadyExists);
|
return Err(ErrorCode::BackupAlreadyExists);
|
||||||
}
|
}
|
||||||
let backup = try!(get_backup(&repo, &backup_name_src));
|
let backup = try!(get_backup(&repo, &backup_name_src));
|
||||||
|
@ -666,7 +666,7 @@ pub fn run() -> Result<(), ErrorCode> {
|
||||||
"save backup file",
|
"save backup file",
|
||||||
ErrorCode::SaveBackup
|
ErrorCode::SaveBackup
|
||||||
);
|
);
|
||||||
info!("The backup subpath has been deleted, run vacuum to reclaim space");
|
tr_info!("The backup subpath has been deleted, run vacuum to reclaim space");
|
||||||
} else if repo.layout.backups_path().join(&backup_name).is_dir() {
|
} else if repo.layout.backups_path().join(&backup_name).is_dir() {
|
||||||
let backups = checked!(
|
let backups = checked!(
|
||||||
repo.get_backups(&backup_name),
|
repo.get_backups(&backup_name),
|
||||||
|
@ -682,7 +682,7 @@ pub fn run() -> Result<(), ErrorCode> {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
error!("Denying to remove multiple backups (use --force):");
|
tr_error!("Denying to remove multiple backups (use --force):");
|
||||||
for name in backups.keys() {
|
for name in backups.keys() {
|
||||||
println!(" - {}/{}", backup_name, name);
|
println!(" - {}/{}", backup_name, name);
|
||||||
}
|
}
|
||||||
|
@ -693,7 +693,7 @@ pub fn run() -> Result<(), ErrorCode> {
|
||||||
"delete backup",
|
"delete backup",
|
||||||
ErrorCode::RemoveRun
|
ErrorCode::RemoveRun
|
||||||
);
|
);
|
||||||
info!("The backup has been deleted, run vacuum to reclaim space");
|
tr_info!("The backup has been deleted, run vacuum to reclaim space");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Arguments::Prune {
|
Arguments::Prune {
|
||||||
|
@ -707,7 +707,7 @@ pub fn run() -> Result<(), ErrorCode> {
|
||||||
} => {
|
} => {
|
||||||
let mut repo = try!(open_repository(&repo_path));
|
let mut repo = try!(open_repository(&repo_path));
|
||||||
if daily + weekly + monthly + yearly == 0 {
|
if daily + weekly + monthly + yearly == 0 {
|
||||||
error!("This would remove all those backups");
|
tr_error!("This would remove all those backups");
|
||||||
return Err(ErrorCode::UnsafeArgs);
|
return Err(ErrorCode::UnsafeArgs);
|
||||||
}
|
}
|
||||||
checked!(
|
checked!(
|
||||||
|
@ -716,7 +716,7 @@ pub fn run() -> Result<(), ErrorCode> {
|
||||||
ErrorCode::PruneRun
|
ErrorCode::PruneRun
|
||||||
);
|
);
|
||||||
if !force {
|
if !force {
|
||||||
info!("Run with --force to actually execute this command");
|
tr_info!("Run with --force to actually execute this command");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Arguments::Vacuum {
|
Arguments::Vacuum {
|
||||||
|
@ -733,10 +733,10 @@ pub fn run() -> Result<(), ErrorCode> {
|
||||||
ErrorCode::VacuumRun
|
ErrorCode::VacuumRun
|
||||||
);
|
);
|
||||||
if !force {
|
if !force {
|
||||||
info!("Run with --force to actually execute this command");
|
tr_info!("Run with --force to actually execute this command");
|
||||||
} else {
|
} else {
|
||||||
let info_after = repo.info();
|
let info_after = repo.info();
|
||||||
info!(
|
tr_info!(
|
||||||
"Reclaimed {}",
|
"Reclaimed {}",
|
||||||
to_file_size(info_before.encoded_data_size - info_after.encoded_data_size)
|
to_file_size(info_before.encoded_data_size - info_after.encoded_data_size)
|
||||||
);
|
);
|
||||||
|
@ -790,7 +790,7 @@ pub fn run() -> Result<(), ErrorCode> {
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
repo.set_clean();
|
repo.set_clean();
|
||||||
info!("Integrity verified")
|
tr_info!("Integrity verified")
|
||||||
}
|
}
|
||||||
Arguments::List {
|
Arguments::List {
|
||||||
repo_path,
|
repo_path,
|
||||||
|
@ -830,11 +830,11 @@ pub fn run() -> Result<(), ErrorCode> {
|
||||||
let backup_map = match backup_map {
|
let backup_map = match backup_map {
|
||||||
Ok(backup_map) => backup_map,
|
Ok(backup_map) => backup_map,
|
||||||
Err(RepositoryError::BackupFile(BackupFileError::PartialBackupsList(backup_map, _failed))) => {
|
Err(RepositoryError::BackupFile(BackupFileError::PartialBackupsList(backup_map, _failed))) => {
|
||||||
warn!("Some backups could not be read, ignoring them");
|
tr_warn!("Some backups could not be read, ignoring them");
|
||||||
backup_map
|
backup_map
|
||||||
}
|
}
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
error!("Failed to load backup files: {}", err);
|
tr_error!("Failed to load backup files: {}", err);
|
||||||
return Err(ErrorCode::LoadBackup);
|
return Err(ErrorCode::LoadBackup);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -904,8 +904,8 @@ pub fn run() -> Result<(), ErrorCode> {
|
||||||
ErrorCode::FuseMount
|
ErrorCode::FuseMount
|
||||||
)
|
)
|
||||||
};
|
};
|
||||||
info!("Mounting the filesystem...");
|
tr_info!("Mounting the filesystem...");
|
||||||
info!(
|
tr_info!(
|
||||||
"Please unmount the filesystem via 'fusermount -u {}' when done.",
|
"Please unmount the filesystem via 'fusermount -u {}' when done.",
|
||||||
mount_point
|
mount_point
|
||||||
);
|
);
|
||||||
|
@ -937,7 +937,7 @@ pub fn run() -> Result<(), ErrorCode> {
|
||||||
if let Some(bundle) = repo.get_bundle(&bundle_id) {
|
if let Some(bundle) = repo.get_bundle(&bundle_id) {
|
||||||
print_bundle(bundle);
|
print_bundle(bundle);
|
||||||
} else {
|
} else {
|
||||||
error!("No such bundle");
|
tr_error!("No such bundle");
|
||||||
return Err(ErrorCode::LoadBundle);
|
return Err(ErrorCode::LoadBundle);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -951,7 +951,7 @@ pub fn run() -> Result<(), ErrorCode> {
|
||||||
"import repository",
|
"import repository",
|
||||||
ErrorCode::ImportRun
|
ErrorCode::ImportRun
|
||||||
);
|
);
|
||||||
info!("Import finished");
|
tr_info!("Import finished");
|
||||||
}
|
}
|
||||||
Arguments::Versions { repo_path, path } => {
|
Arguments::Versions { repo_path, path } => {
|
||||||
let mut repo = try!(open_repository(&repo_path));
|
let mut repo = try!(open_repository(&repo_path));
|
||||||
|
@ -968,7 +968,7 @@ pub fn run() -> Result<(), ErrorCode> {
|
||||||
found = true;
|
found = true;
|
||||||
}
|
}
|
||||||
if !found {
|
if !found {
|
||||||
info!("No versions of that file were found.");
|
tr_info!("No versions of that file were found.");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Arguments::Diff {
|
Arguments::Diff {
|
||||||
|
@ -980,7 +980,7 @@ pub fn run() -> Result<(), ErrorCode> {
|
||||||
inode_new
|
inode_new
|
||||||
} => {
|
} => {
|
||||||
if repo_path_old != repo_path_new {
|
if repo_path_old != repo_path_new {
|
||||||
error!("Can only run diff on same repository");
|
tr_error!("Can only run diff on same repository");
|
||||||
return Err(ErrorCode::InvalidArgs);
|
return Err(ErrorCode::InvalidArgs);
|
||||||
}
|
}
|
||||||
let mut repo = try!(open_repository(&repo_path_old));
|
let mut repo = try!(open_repository(&repo_path_old));
|
||||||
|
@ -1015,7 +1015,7 @@ pub fn run() -> Result<(), ErrorCode> {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
if diffs.is_empty() {
|
if diffs.is_empty() {
|
||||||
info!("No differences found");
|
tr_info!("No differences found");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Arguments::Config {
|
Arguments::Config {
|
||||||
|
@ -1033,7 +1033,7 @@ pub fn run() -> Result<(), ErrorCode> {
|
||||||
changed = true;
|
changed = true;
|
||||||
}
|
}
|
||||||
if let Some(chunker) = chunker {
|
if let Some(chunker) = chunker {
|
||||||
warn!(
|
tr_warn!(
|
||||||
"Changing the chunker makes it impossible to use existing data for deduplication"
|
"Changing the chunker makes it impossible to use existing data for deduplication"
|
||||||
);
|
);
|
||||||
repo.config.chunker = chunker;
|
repo.config.chunker = chunker;
|
||||||
|
@ -1048,7 +1048,7 @@ pub fn run() -> Result<(), ErrorCode> {
|
||||||
changed = true;
|
changed = true;
|
||||||
}
|
}
|
||||||
if let Some(hash) = hash {
|
if let Some(hash) = hash {
|
||||||
warn!(
|
tr_warn!(
|
||||||
"Changing the hash makes it impossible to use existing data for deduplication"
|
"Changing the hash makes it impossible to use existing data for deduplication"
|
||||||
);
|
);
|
||||||
repo.config.hash = hash;
|
repo.config.hash = hash;
|
||||||
|
@ -1056,7 +1056,7 @@ pub fn run() -> Result<(), ErrorCode> {
|
||||||
}
|
}
|
||||||
if changed {
|
if changed {
|
||||||
checked!(repo.save_config(), "save config", ErrorCode::SaveConfig);
|
checked!(repo.save_config(), "save config", ErrorCode::SaveConfig);
|
||||||
info!("The configuration has been updated.");
|
tr_info!("The configuration has been updated.");
|
||||||
} else {
|
} else {
|
||||||
print_config(&repo.config);
|
print_config(&repo.config);
|
||||||
}
|
}
|
||||||
|
@ -1066,9 +1066,9 @@ pub fn run() -> Result<(), ErrorCode> {
|
||||||
None => Crypto::gen_keypair(),
|
None => Crypto::gen_keypair(),
|
||||||
Some(ref password) => Crypto::keypair_from_password(password),
|
Some(ref password) => Crypto::keypair_from_password(password),
|
||||||
};
|
};
|
||||||
info!("Created the following key pair");
|
tr_info!("Created the following key pair");
|
||||||
println!("public: {}", to_hex(&public[..]));
|
tr_println!("public: {}", to_hex(&public[..]));
|
||||||
println!("secret: {}", to_hex(&secret[..]));
|
tr_println!("secret: {}", to_hex(&secret[..]));
|
||||||
if let Some(file) = file {
|
if let Some(file) = file {
|
||||||
checked!(
|
checked!(
|
||||||
Crypto::save_keypair_to_file(&public, &secret, file),
|
Crypto::save_keypair_to_file(&public, &secret, file),
|
||||||
|
@ -1091,13 +1091,13 @@ pub fn run() -> Result<(), ErrorCode> {
|
||||||
ErrorCode::LoadKey
|
ErrorCode::LoadKey
|
||||||
)
|
)
|
||||||
} else {
|
} else {
|
||||||
info!("Created the following key pair");
|
tr_info!("Created the following key pair");
|
||||||
let (public, secret) = match password {
|
let (public, secret) = match password {
|
||||||
None => Crypto::gen_keypair(),
|
None => Crypto::gen_keypair(),
|
||||||
Some(ref password) => Crypto::keypair_from_password(password),
|
Some(ref password) => Crypto::keypair_from_password(password),
|
||||||
};
|
};
|
||||||
println!("public: {}", to_hex(&public[..]));
|
tr_println!("public: {}", to_hex(&public[..]));
|
||||||
println!("secret: {}", to_hex(&secret[..]));
|
tr_println!("secret: {}", to_hex(&secret[..]));
|
||||||
(public, secret)
|
(public, secret)
|
||||||
};
|
};
|
||||||
checked!(
|
checked!(
|
||||||
|
@ -1108,7 +1108,7 @@ pub fn run() -> Result<(), ErrorCode> {
|
||||||
if set_default {
|
if set_default {
|
||||||
repo.set_encryption(Some(&public));
|
repo.set_encryption(Some(&public));
|
||||||
checked!(repo.save_config(), "save config", ErrorCode::SaveConfig);
|
checked!(repo.save_config(), "save config", ErrorCode::SaveConfig);
|
||||||
warn!(
|
tr_warn!(
|
||||||
"Please store this key pair in a secure location before using the repository"
|
"Please store this key pair in a secure location before using the repository"
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
|
@ -40,6 +40,7 @@ extern crate index;
|
||||||
extern crate chunking;
|
extern crate chunking;
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate runtime_fmt;
|
extern crate runtime_fmt;
|
||||||
|
extern crate locale_config;
|
||||||
|
|
||||||
#[macro_use] mod translation;
|
#[macro_use] mod translation;
|
||||||
pub mod util;
|
pub mod util;
|
||||||
|
|
|
@ -524,10 +524,10 @@ impl<'a> fuse::Filesystem for FuseFilesystem<'a> {
|
||||||
/// Read data
|
/// Read data
|
||||||
/// Read should send exactly the number of bytes requested except on EOF or error,
|
/// Read should send exactly the number of bytes requested except on EOF or error,
|
||||||
/// otherwise the rest of the data will be substituted with zeroes. An exception to
|
/// otherwise the rest of the data will be substituted with zeroes. An exception to
|
||||||
/// this is when the file has been opened in 'direct_io' mode, in which case the
|
/// this is when the file has been opened in direct_io mode, in which case the
|
||||||
/// return value of the read system call will reflect the return value of this
|
/// return value of the read system call will reflect the return value of this
|
||||||
/// operation. fh will contain the value set by the open method, or will be undefined
|
/// operation. fh will contain the value set by the open method, or will be undefined
|
||||||
/// if the open method didn't set any value.
|
/// if the open method didnt set any value.
|
||||||
fn read(
|
fn read(
|
||||||
&mut self,
|
&mut self,
|
||||||
_req: &fuse::Request,
|
_req: &fuse::Request,
|
||||||
|
@ -608,7 +608,7 @@ impl<'a> fuse::Filesystem for FuseFilesystem<'a> {
|
||||||
/// call there will be exactly one release call. The filesystem may reply with an
|
/// call there will be exactly one release call. The filesystem may reply with an
|
||||||
/// error, but error values are not returned to close() or munmap() which triggered
|
/// error, but error values are not returned to close() or munmap() which triggered
|
||||||
/// the release. fh will contain the value set by the open method, or will be undefined
|
/// the release. fh will contain the value set by the open method, or will be undefined
|
||||||
/// if the open method didn't set any value. flags will contain the same flags as for
|
/// if the open method didnt set any value. flags will contain the same flags as for
|
||||||
/// open.
|
/// open.
|
||||||
fn release(
|
fn release(
|
||||||
&mut self,
|
&mut self,
|
||||||
|
|
|
@ -7,6 +7,7 @@ pub use repository::{Repository, Backup, Config, RepositoryError, RepositoryInfo
|
||||||
FileData, DiffType, InodeError, RepositoryLayout, Location};
|
FileData, DiffType, InodeError, RepositoryLayout, Location};
|
||||||
pub use index::{Index, IndexError};
|
pub use index::{Index, IndexError};
|
||||||
pub use mount::FuseFilesystem;
|
pub use mount::FuseFilesystem;
|
||||||
|
pub use translation::CowStr;
|
||||||
|
|
||||||
pub use serde::{Serialize, Deserialize};
|
pub use serde::{Serialize, Deserialize};
|
||||||
|
|
||||||
|
|
|
@ -15,12 +15,12 @@ quick_error!{
|
||||||
#[allow(unknown_lints,large_enum_variant)]
|
#[allow(unknown_lints,large_enum_variant)]
|
||||||
pub enum BackupError {
|
pub enum BackupError {
|
||||||
FailedPaths(backup: Backup, failed: Vec<PathBuf>) {
|
FailedPaths(backup: Backup, failed: Vec<PathBuf>) {
|
||||||
description("Some paths could not be backed up")
|
description(tr!("Some paths could not be backed up"))
|
||||||
display("Backup error: some paths could not be backed up")
|
display("{}", tr_format!("Backup error: some paths could not be backed up"))
|
||||||
}
|
}
|
||||||
RemoveRoot {
|
RemoveRoot {
|
||||||
description("The root of a backup can not be removed")
|
description(tr!("The root of a backup can not be removed"))
|
||||||
display("Backup error: the root of a backup can not be removed")
|
display("{}", tr_format!("Backup error: the root of a backup can not be removed"))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -110,7 +110,7 @@ impl Repository {
|
||||||
Ok(backup_map) => backup_map,
|
Ok(backup_map) => backup_map,
|
||||||
Err(RepositoryError::BackupFile(BackupFileError::PartialBackupsList(backup_map,
|
Err(RepositoryError::BackupFile(BackupFileError::PartialBackupsList(backup_map,
|
||||||
_failed))) => {
|
_failed))) => {
|
||||||
warn!("Some backups could not be read, ignoring them");
|
tr_warn!("Some backups could not be read, ignoring them");
|
||||||
backup_map
|
backup_map
|
||||||
}
|
}
|
||||||
Err(err) => return Err(err),
|
Err(err) => return Err(err),
|
||||||
|
@ -239,7 +239,7 @@ impl Repository {
|
||||||
user.name().to_string()
|
user.name().to_string()
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
warn!("Failed to retrieve name of user {}", inode.user);
|
tr_warn!("Failed to retrieve name of user {}", inode.user);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if !backup.group_names.contains_key(&inode.group) {
|
if !backup.group_names.contains_key(&inode.group) {
|
||||||
|
@ -249,7 +249,7 @@ impl Repository {
|
||||||
group.name().to_string()
|
group.name().to_string()
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
warn!("Failed to retrieve name of group {}", inode.group);
|
tr_warn!("Failed to retrieve name of group {}", inode.group);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let mut meta_size = 0;
|
let mut meta_size = 0;
|
||||||
|
|
|
@ -15,49 +15,49 @@ quick_error!{
|
||||||
pub enum BackupFileError {
|
pub enum BackupFileError {
|
||||||
Read(err: io::Error, path: PathBuf) {
|
Read(err: io::Error, path: PathBuf) {
|
||||||
cause(err)
|
cause(err)
|
||||||
description("Failed to read backup")
|
description(tr!("Failed to read backup"))
|
||||||
display("Backup file error: failed to read backup file {:?}\n\tcaused by: {}", path, err)
|
display("{}", tr_format!("Backup file error: failed to read backup file {:?}\n\tcaused by: {}", path, err))
|
||||||
}
|
}
|
||||||
Write(err: io::Error, path: PathBuf) {
|
Write(err: io::Error, path: PathBuf) {
|
||||||
cause(err)
|
cause(err)
|
||||||
description("Failed to write backup")
|
description(tr!("Failed to write backup"))
|
||||||
display("Backup file error: failed to write backup file {:?}\n\tcaused by: {}", path, err)
|
display("{}", tr_format!("Backup file error: failed to write backup file {:?}\n\tcaused by: {}", path, err))
|
||||||
}
|
}
|
||||||
Decode(err: msgpack::DecodeError, path: PathBuf) {
|
Decode(err: msgpack::DecodeError, path: PathBuf) {
|
||||||
cause(err)
|
cause(err)
|
||||||
context(path: &'a Path, err: msgpack::DecodeError) -> (err, path.to_path_buf())
|
context(path: &'a Path, err: msgpack::DecodeError) -> (err, path.to_path_buf())
|
||||||
description("Failed to decode backup")
|
description(tr!("Failed to decode backup"))
|
||||||
display("Backup file error: failed to decode backup of {:?}\n\tcaused by: {}", path, err)
|
display("{}", tr_format!("Backup file error: failed to decode backup of {:?}\n\tcaused by: {}", path, err))
|
||||||
}
|
}
|
||||||
Encode(err: msgpack::EncodeError, path: PathBuf) {
|
Encode(err: msgpack::EncodeError, path: PathBuf) {
|
||||||
cause(err)
|
cause(err)
|
||||||
context(path: &'a Path, err: msgpack::EncodeError) -> (err, path.to_path_buf())
|
context(path: &'a Path, err: msgpack::EncodeError) -> (err, path.to_path_buf())
|
||||||
description("Failed to encode backup")
|
description(tr!("Failed to encode backup"))
|
||||||
display("Backup file error: failed to encode backup of {:?}\n\tcaused by: {}", path, err)
|
display("{}", tr_format!("Backup file error: failed to encode backup of {:?}\n\tcaused by: {}", path, err))
|
||||||
}
|
}
|
||||||
WrongHeader(path: PathBuf) {
|
WrongHeader(path: PathBuf) {
|
||||||
description("Wrong header")
|
description(tr!("Wrong header"))
|
||||||
display("Backup file error: wrong header on backup {:?}", path)
|
display("{}", tr_format!("Backup file error: wrong header on backup {:?}", path))
|
||||||
}
|
}
|
||||||
UnsupportedVersion(path: PathBuf, version: u8) {
|
UnsupportedVersion(path: PathBuf, version: u8) {
|
||||||
description("Wrong version")
|
description(tr!("Wrong version"))
|
||||||
display("Backup file error: unsupported version on backup {:?}: {}", path, version)
|
display("{}", tr_format!("Backup file error: unsupported version on backup {:?}: {}", path, version))
|
||||||
}
|
}
|
||||||
Decryption(err: EncryptionError, path: PathBuf) {
|
Decryption(err: EncryptionError, path: PathBuf) {
|
||||||
cause(err)
|
cause(err)
|
||||||
context(path: &'a Path, err: EncryptionError) -> (err, path.to_path_buf())
|
context(path: &'a Path, err: EncryptionError) -> (err, path.to_path_buf())
|
||||||
description("Decryption failed")
|
description(tr!("Decryption failed"))
|
||||||
display("Backup file error: decryption failed on backup {:?}\n\tcaused by: {}", path, err)
|
display("{}", tr_format!("Backup file error: decryption failed on backup {:?}\n\tcaused by: {}", path, err))
|
||||||
}
|
}
|
||||||
Encryption(err: EncryptionError) {
|
Encryption(err: EncryptionError) {
|
||||||
from()
|
from()
|
||||||
cause(err)
|
cause(err)
|
||||||
description("Encryption failed")
|
description(tr!("Encryption failed"))
|
||||||
display("Backup file error: encryption failed\n\tcaused by: {}", err)
|
display("{}", tr_format!("Backup file error: encryption failed\n\tcaused by: {}", err))
|
||||||
}
|
}
|
||||||
PartialBackupsList(partial: HashMap<String, Backup>, failed: Vec<PathBuf>) {
|
PartialBackupsList(partial: HashMap<String, Backup>, failed: Vec<PathBuf>) {
|
||||||
description("Some backups could not be loaded")
|
description(tr!("Some backups could not be loaded"))
|
||||||
display("Backup file error: some backups could not be loaded: {:?}", failed)
|
display("{}", tr_format!("Backup file error: some backups could not be loaded: {:?}", failed))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -180,7 +180,7 @@ impl Backup {
|
||||||
let base_path = path.as_ref();
|
let base_path = path.as_ref();
|
||||||
let path = path.as_ref();
|
let path = path.as_ref();
|
||||||
if !path.exists() {
|
if !path.exists() {
|
||||||
debug!("Backup root folder does not exist");
|
tr_debug!("Backup root folder does not exist");
|
||||||
return Ok(backups);
|
return Ok(backups);
|
||||||
}
|
}
|
||||||
let mut paths = vec![path.to_path_buf()];
|
let mut paths = vec![path.to_path_buf()];
|
||||||
|
|
|
@ -16,24 +16,24 @@ quick_error!{
|
||||||
Io(err: io::Error) {
|
Io(err: io::Error) {
|
||||||
from()
|
from()
|
||||||
cause(err)
|
cause(err)
|
||||||
description("Failed to read/write bundle map")
|
description(tr!("Failed to read/write bundle map"))
|
||||||
}
|
}
|
||||||
Decode(err: msgpack::DecodeError) {
|
Decode(err: msgpack::DecodeError) {
|
||||||
from()
|
from()
|
||||||
cause(err)
|
cause(err)
|
||||||
description("Failed to decode bundle map")
|
description(tr!("Failed to decode bundle map"))
|
||||||
}
|
}
|
||||||
Encode(err: msgpack::EncodeError) {
|
Encode(err: msgpack::EncodeError) {
|
||||||
from()
|
from()
|
||||||
cause(err)
|
cause(err)
|
||||||
description("Failed to encode bundle map")
|
description(tr!("Failed to encode bundle map"))
|
||||||
}
|
}
|
||||||
WrongHeader {
|
WrongHeader {
|
||||||
description("Wrong header")
|
description(tr!("Wrong header"))
|
||||||
}
|
}
|
||||||
WrongVersion(version: u8) {
|
WrongVersion(version: u8) {
|
||||||
description("Wrong version")
|
description(tr!("Wrong version"))
|
||||||
display("Wrong version: {}", version)
|
display("{}", tr_format!("Wrong version: {}", version))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -16,14 +16,14 @@ quick_error!{
|
||||||
}
|
}
|
||||||
Parse(reason: &'static str) {
|
Parse(reason: &'static str) {
|
||||||
from()
|
from()
|
||||||
description("Failed to parse config")
|
description(tr!("Failed to parse config"))
|
||||||
display("Failed to parse config: {}", reason)
|
display("{}", tr_format!("Failed to parse config: {}", reason))
|
||||||
}
|
}
|
||||||
Yaml(err: serde_yaml::Error) {
|
Yaml(err: serde_yaml::Error) {
|
||||||
from()
|
from()
|
||||||
cause(err)
|
cause(err)
|
||||||
description("Yaml format error")
|
description(tr!("Yaml format error"))
|
||||||
display("Yaml format error: {}", err)
|
display("{}", tr_format!("Yaml format error: {}", err))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -79,7 +79,7 @@ impl ChunkerType {
|
||||||
impl Compression {
|
impl Compression {
|
||||||
#[inline]
|
#[inline]
|
||||||
fn from_yaml(yaml: &str) -> Result<Self, ConfigError> {
|
fn from_yaml(yaml: &str) -> Result<Self, ConfigError> {
|
||||||
Compression::from_string(yaml).map_err(|_| ConfigError::Parse("Invalid codec"))
|
Compression::from_string(yaml).map_err(|_| ConfigError::Parse(tr!("Invalid codec")))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
|
@ -92,7 +92,7 @@ impl Compression {
|
||||||
impl EncryptionMethod {
|
impl EncryptionMethod {
|
||||||
#[inline]
|
#[inline]
|
||||||
fn from_yaml(yaml: &str) -> Result<Self, ConfigError> {
|
fn from_yaml(yaml: &str) -> Result<Self, ConfigError> {
|
||||||
EncryptionMethod::from_string(yaml).map_err(|_| ConfigError::Parse("Invalid codec"))
|
EncryptionMethod::from_string(yaml).map_err(|_| ConfigError::Parse(tr!("Invalid codec")))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
|
@ -186,7 +186,7 @@ impl Config {
|
||||||
let encryption = if let Some(e) = yaml.encryption {
|
let encryption = if let Some(e) = yaml.encryption {
|
||||||
let method = try!(EncryptionMethod::from_yaml(&e.method));
|
let method = try!(EncryptionMethod::from_yaml(&e.method));
|
||||||
let key = try!(parse_hex(&e.key).map_err(|_| {
|
let key = try!(parse_hex(&e.key).map_err(|_| {
|
||||||
ConfigError::Parse("Invalid public key")
|
ConfigError::Parse(tr!("Invalid public key"))
|
||||||
}));
|
}));
|
||||||
Some((method, key.into()))
|
Some((method, key.into()))
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -15,95 +15,95 @@ quick_error!{
|
||||||
#[allow(unknown_lints,large_enum_variant)]
|
#[allow(unknown_lints,large_enum_variant)]
|
||||||
pub enum RepositoryError {
|
pub enum RepositoryError {
|
||||||
NoRemote {
|
NoRemote {
|
||||||
description("Remote storage not found")
|
description(tr!("Remote storage not found"))
|
||||||
display("Repository error: The remote storage has not been found, may be it needs to be mounted?")
|
display("{}", tr_format!("Repository error: The remote storage has not been found, may be it needs to be mounted?"))
|
||||||
}
|
}
|
||||||
Index(err: IndexError) {
|
Index(err: IndexError) {
|
||||||
from()
|
from()
|
||||||
cause(err)
|
cause(err)
|
||||||
description("Index error")
|
description(tr!("Index error"))
|
||||||
display("Repository error: index error\n\tcaused by: {}", err)
|
display("{}", tr_format!("Repository error: index error\n\tcaused by: {}", err))
|
||||||
}
|
}
|
||||||
BundleDb(err: BundleDbError) {
|
BundleDb(err: BundleDbError) {
|
||||||
from()
|
from()
|
||||||
cause(err)
|
cause(err)
|
||||||
description("Bundle error")
|
description(tr!("Bundle error"))
|
||||||
display("Repository error: bundle db error\n\tcaused by: {}", err)
|
display("{}", tr_format!("Repository error: bundle db error\n\tcaused by: {}", err))
|
||||||
}
|
}
|
||||||
BundleWriter(err: BundleWriterError) {
|
BundleWriter(err: BundleWriterError) {
|
||||||
from()
|
from()
|
||||||
cause(err)
|
cause(err)
|
||||||
description("Bundle write error")
|
description(tr!("Bundle write error"))
|
||||||
display("Repository error: failed to write to new bundle\n\tcaused by: {}", err)
|
display("{}", tr_format!("Repository error: failed to write to new bundle\n\tcaused by: {}", err))
|
||||||
}
|
}
|
||||||
BackupFile(err: BackupFileError) {
|
BackupFile(err: BackupFileError) {
|
||||||
from()
|
from()
|
||||||
cause(err)
|
cause(err)
|
||||||
description("Backup file error")
|
description(tr!("Backup file error"))
|
||||||
display("Repository error: backup file error\n\tcaused by: {}", err)
|
display("{}", tr_format!("Repository error: backup file error\n\tcaused by: {}", err))
|
||||||
}
|
}
|
||||||
Chunker(err: ChunkerError) {
|
Chunker(err: ChunkerError) {
|
||||||
from()
|
from()
|
||||||
cause(err)
|
cause(err)
|
||||||
description("Chunker error")
|
description(tr!("Chunker error"))
|
||||||
display("Repository error: failed to chunk data\n\tcaused by: {}", err)
|
display("{}", tr_format!("Repository error: failed to chunk data\n\tcaused by: {}", err))
|
||||||
}
|
}
|
||||||
Config(err: ConfigError) {
|
Config(err: ConfigError) {
|
||||||
from()
|
from()
|
||||||
cause(err)
|
cause(err)
|
||||||
description("Configuration error")
|
description(tr!("Configuration error"))
|
||||||
display("Repository error: configuration error\n\tcaused by: {}", err)
|
display("{}", tr_format!("Repository error: configuration error\n\tcaused by: {}", err))
|
||||||
}
|
}
|
||||||
Inode(err: InodeError) {
|
Inode(err: InodeError) {
|
||||||
from()
|
from()
|
||||||
cause(err)
|
cause(err)
|
||||||
description("Inode error")
|
description(tr!("Inode error"))
|
||||||
display("Repository error: inode error\n\tcaused by: {}", err)
|
display("{}", tr_format!("Repository error: inode error\n\tcaused by: {}", err))
|
||||||
}
|
}
|
||||||
LoadKeys(err: EncryptionError) {
|
LoadKeys(err: EncryptionError) {
|
||||||
from()
|
from()
|
||||||
cause(err)
|
cause(err)
|
||||||
description("Failed to load keys")
|
description(tr!("Failed to load keys"))
|
||||||
display("Repository error: failed to load keys\n\tcaused by: {}", err)
|
display("{}", tr_format!("Repository error: failed to load keys\n\tcaused by: {}", err))
|
||||||
}
|
}
|
||||||
BundleMap(err: BundleMapError) {
|
BundleMap(err: BundleMapError) {
|
||||||
from()
|
from()
|
||||||
cause(err)
|
cause(err)
|
||||||
description("Bundle map error")
|
description(tr!("Bundle map error"))
|
||||||
display("Repository error: bundle map error\n\tcaused by: {}", err)
|
display("{}", tr_format!("Repository error: bundle map error\n\tcaused by: {}", err))
|
||||||
}
|
}
|
||||||
Integrity(err: IntegrityError) {
|
Integrity(err: IntegrityError) {
|
||||||
from()
|
from()
|
||||||
cause(err)
|
cause(err)
|
||||||
description("Integrity error")
|
description(tr!("Integrity error"))
|
||||||
display("Repository error: integrity error\n\tcaused by: {}", err)
|
display("{}", tr_format!("Repository error: integrity error\n\tcaused by: {}", err))
|
||||||
}
|
}
|
||||||
Dirty {
|
Dirty {
|
||||||
description("Dirty repository")
|
description(tr!("Dirty repository"))
|
||||||
display("The repository is dirty, please run a check")
|
display("{}", tr_format!("The repository is dirty, please run a check"))
|
||||||
}
|
}
|
||||||
Backup(err: BackupError) {
|
Backup(err: BackupError) {
|
||||||
from()
|
from()
|
||||||
cause(err)
|
cause(err)
|
||||||
description("Failed to create a backup")
|
description(tr!("Failed to create a backup"))
|
||||||
display("Repository error: failed to create backup\n\tcaused by: {}", err)
|
display("{}", tr_format!("Repository error: failed to create backup\n\tcaused by: {}", err))
|
||||||
}
|
}
|
||||||
Lock(err: LockError) {
|
Lock(err: LockError) {
|
||||||
from()
|
from()
|
||||||
cause(err)
|
cause(err)
|
||||||
description("Failed to obtain lock")
|
description(tr!("Failed to obtain lock"))
|
||||||
display("Repository error: failed to obtain lock\n\tcaused by: {}", err)
|
display("{}", tr_format!("Repository error: failed to obtain lock\n\tcaused by: {}", err))
|
||||||
}
|
}
|
||||||
|
|
||||||
Io(err: io::Error) {
|
Io(err: io::Error) {
|
||||||
from()
|
from()
|
||||||
cause(err)
|
cause(err)
|
||||||
description("IO error")
|
description(tr!("IO error"))
|
||||||
display("IO error: {}", err)
|
display("{}", tr_format!("IO error: {}", err))
|
||||||
}
|
}
|
||||||
NoSuchFileInBackup(backup: Backup, path: PathBuf) {
|
NoSuchFileInBackup(backup: Backup, path: PathBuf) {
|
||||||
description("No such file in backup")
|
description(tr!("No such file in backup"))
|
||||||
display("The backup does not contain the file {:?}", path)
|
display("{}", tr_format!("The backup does not contain the file {:?}", path))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -12,36 +12,36 @@ quick_error!{
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub enum IntegrityError {
|
pub enum IntegrityError {
|
||||||
MissingChunk(hash: Hash) {
|
MissingChunk(hash: Hash) {
|
||||||
description("Missing chunk")
|
description(tr!("Missing chunk"))
|
||||||
display("Missing chunk: {}", hash)
|
display("{}", tr_format!("Missing chunk: {}", hash))
|
||||||
}
|
}
|
||||||
MissingBundleId(id: u32) {
|
MissingBundleId(id: u32) {
|
||||||
description("Missing bundle")
|
description(tr!("Missing bundle"))
|
||||||
display("Missing bundle: {}", id)
|
display("{}", tr_format!("Missing bundle: {}", id))
|
||||||
}
|
}
|
||||||
MissingBundle(id: BundleId) {
|
MissingBundle(id: BundleId) {
|
||||||
description("Missing bundle")
|
description(tr!("Missing bundle"))
|
||||||
display("Missing bundle: {}", id)
|
display("{}", tr_format!("Missing bundle: {}", id))
|
||||||
}
|
}
|
||||||
NoSuchChunk(bundle: BundleId, chunk: u32) {
|
NoSuchChunk(bundle: BundleId, chunk: u32) {
|
||||||
description("No such chunk")
|
description(tr!("No such chunk"))
|
||||||
display("Bundle {} does not contain the chunk {}", bundle, chunk)
|
display("{}", tr_format!("Bundle {} does not contain the chunk {}", bundle, chunk))
|
||||||
}
|
}
|
||||||
RemoteBundlesNotInMap {
|
RemoteBundlesNotInMap {
|
||||||
description("Remote bundles missing from map")
|
description(tr!("Remote bundles missing from map"))
|
||||||
}
|
}
|
||||||
MapContainsDuplicates {
|
MapContainsDuplicates {
|
||||||
description("Map contains duplicates")
|
description(tr!("Map contains duplicates"))
|
||||||
}
|
}
|
||||||
BrokenInode(path: PathBuf, err: Box<RepositoryError>) {
|
BrokenInode(path: PathBuf, err: Box<RepositoryError>) {
|
||||||
cause(err)
|
cause(err)
|
||||||
description("Broken inode")
|
description(tr!("Broken inode"))
|
||||||
display("Broken inode: {:?}\n\tcaused by: {}", path, err)
|
display("{}", tr_format!("Broken inode: {:?}\n\tcaused by: {}", path, err))
|
||||||
}
|
}
|
||||||
MissingInodeData(path: PathBuf, err: Box<RepositoryError>) {
|
MissingInodeData(path: PathBuf, err: Box<RepositoryError>) {
|
||||||
cause(err)
|
cause(err)
|
||||||
description("Missing inode data")
|
description(tr!("Missing inode data"))
|
||||||
display("Missing inode data in: {:?}\n\tcaused by: {}", path, err)
|
display("{}", tr_format!("Missing inode data in: {:?}\n\tcaused by: {}", path, err))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -49,7 +49,7 @@ quick_error!{
|
||||||
impl Repository {
|
impl Repository {
|
||||||
fn check_index_chunks(&self) -> Result<(), RepositoryError> {
|
fn check_index_chunks(&self) -> Result<(), RepositoryError> {
|
||||||
let mut progress = ProgressBar::new(self.index.len() as u64);
|
let mut progress = ProgressBar::new(self.index.len() as u64);
|
||||||
progress.message("checking index: ");
|
progress.message(tr!("checking index: "));
|
||||||
progress.set_max_refresh_rate(Some(Duration::from_millis(100)));
|
progress.set_max_refresh_rate(Some(Duration::from_millis(100)));
|
||||||
for (count, (_hash, location)) in self.index.iter().enumerate() {
|
for (count, (_hash, location)) in self.index.iter().enumerate() {
|
||||||
// Lookup bundle id from map
|
// Lookup bundle id from map
|
||||||
|
@ -58,12 +58,12 @@ impl Repository {
|
||||||
let bundle = if let Some(bundle) = self.bundles.get_bundle_info(&bundle_id) {
|
let bundle = if let Some(bundle) = self.bundles.get_bundle_info(&bundle_id) {
|
||||||
bundle
|
bundle
|
||||||
} else {
|
} else {
|
||||||
progress.finish_print("checking index: done.");
|
progress.finish_print(tr!("checking index: done."));
|
||||||
return Err(IntegrityError::MissingBundle(bundle_id.clone()).into());
|
return Err(IntegrityError::MissingBundle(bundle_id.clone()).into());
|
||||||
};
|
};
|
||||||
// Get chunk from bundle
|
// Get chunk from bundle
|
||||||
if bundle.info.chunk_count <= location.chunk as usize {
|
if bundle.info.chunk_count <= location.chunk as usize {
|
||||||
progress.finish_print("checking index: done.");
|
progress.finish_print(tr!("checking index: done."));
|
||||||
return Err(
|
return Err(
|
||||||
IntegrityError::NoSuchChunk(bundle_id.clone(), location.chunk).into()
|
IntegrityError::NoSuchChunk(bundle_id.clone(), location.chunk).into()
|
||||||
);
|
);
|
||||||
|
@ -72,7 +72,7 @@ impl Repository {
|
||||||
progress.set(count as u64);
|
progress.set(count as u64);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
progress.finish_print("checking index: done.");
|
progress.finish_print(tr!("checking index: done."));
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -135,12 +135,12 @@ impl Repository {
|
||||||
// Mark the content chunks as used
|
// Mark the content chunks as used
|
||||||
if let Err(err) = self.check_inode_contents(&inode, checked) {
|
if let Err(err) = self.check_inode_contents(&inode, checked) {
|
||||||
if repair {
|
if repair {
|
||||||
warn!(
|
tr_warn!(
|
||||||
"Problem detected: data of {:?} is corrupt\n\tcaused by: {}",
|
"Problem detected: data of {:?} is corrupt\n\tcaused by: {}",
|
||||||
path,
|
path,
|
||||||
err
|
err
|
||||||
);
|
);
|
||||||
info!("Removing inode data");
|
tr_info!("Removing inode data");
|
||||||
inode.data = Some(FileData::Inline(vec![].into()));
|
inode.data = Some(FileData::Inline(vec![].into()));
|
||||||
inode.size = 0;
|
inode.size = 0;
|
||||||
modified = true;
|
modified = true;
|
||||||
|
@ -160,12 +160,12 @@ impl Repository {
|
||||||
}
|
}
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
if repair {
|
if repair {
|
||||||
warn!(
|
tr_warn!(
|
||||||
"Problem detected: inode {:?} is corrupt\n\tcaused by: {}",
|
"Problem detected: inode {:?} is corrupt\n\tcaused by: {}",
|
||||||
path.join(name),
|
path.join(name),
|
||||||
err
|
err
|
||||||
);
|
);
|
||||||
info!("Removing broken inode from backup");
|
tr_info!("Removing broken inode from backup");
|
||||||
removed.push(name.to_string());
|
removed.push(name.to_string());
|
||||||
modified = true;
|
modified = true;
|
||||||
} else {
|
} else {
|
||||||
|
@ -187,7 +187,7 @@ impl Repository {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn evacuate_broken_backup(&self, name: &str) -> Result<(), RepositoryError> {
|
fn evacuate_broken_backup(&self, name: &str) -> Result<(), RepositoryError> {
|
||||||
warn!(
|
tr_warn!(
|
||||||
"The backup {} was corrupted and needed to be modified.",
|
"The backup {} was corrupted and needed to be modified.",
|
||||||
name
|
name
|
||||||
);
|
);
|
||||||
|
@ -202,7 +202,7 @@ impl Repository {
|
||||||
try!(fs::copy(&src, &dst));
|
try!(fs::copy(&src, &dst));
|
||||||
try!(fs::remove_file(&src));
|
try!(fs::remove_file(&src));
|
||||||
}
|
}
|
||||||
info!("The original backup was renamed to {:?}", dst);
|
tr_info!("The original backup was renamed to {:?}", dst);
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -219,7 +219,7 @@ impl Repository {
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
};
|
};
|
||||||
info!("Checking backup...");
|
tr_info!("Checking backup...");
|
||||||
let mut checked = Bitmap::new(self.index.capacity());
|
let mut checked = Bitmap::new(self.index.capacity());
|
||||||
match self.check_subtree(
|
match self.check_subtree(
|
||||||
Path::new("").to_path_buf(),
|
Path::new("").to_path_buf(),
|
||||||
|
@ -237,7 +237,7 @@ impl Repository {
|
||||||
}
|
}
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
if repair {
|
if repair {
|
||||||
warn!(
|
tr_warn!(
|
||||||
"The root of the backup {} has been corrupted\n\tcaused by: {}",
|
"The root of the backup {} has been corrupted\n\tcaused by: {}",
|
||||||
name,
|
name,
|
||||||
err
|
err
|
||||||
|
@ -264,19 +264,19 @@ impl Repository {
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
};
|
};
|
||||||
info!("Checking inode...");
|
tr_info!("Checking inode...");
|
||||||
let mut checked = Bitmap::new(self.index.capacity());
|
let mut checked = Bitmap::new(self.index.capacity());
|
||||||
let mut inodes = try!(self.get_backup_path(backup, path));
|
let mut inodes = try!(self.get_backup_path(backup, path));
|
||||||
let mut inode = inodes.pop().unwrap();
|
let mut inode = inodes.pop().unwrap();
|
||||||
let mut modified = false;
|
let mut modified = false;
|
||||||
if let Err(err) = self.check_inode_contents(&inode, &mut checked) {
|
if let Err(err) = self.check_inode_contents(&inode, &mut checked) {
|
||||||
if repair {
|
if repair {
|
||||||
warn!(
|
tr_warn!(
|
||||||
"Problem detected: data of {:?} is corrupt\n\tcaused by: {}",
|
"Problem detected: data of {:?} is corrupt\n\tcaused by: {}",
|
||||||
path,
|
path,
|
||||||
err
|
err
|
||||||
);
|
);
|
||||||
info!("Removing inode data");
|
tr_info!("Removing inode data");
|
||||||
inode.data = Some(FileData::Inline(vec![].into()));
|
inode.data = Some(FileData::Inline(vec![].into()));
|
||||||
inode.size = 0;
|
inode.size = 0;
|
||||||
modified = true;
|
modified = true;
|
||||||
|
@ -297,12 +297,12 @@ impl Repository {
|
||||||
}
|
}
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
if repair {
|
if repair {
|
||||||
warn!(
|
tr_warn!(
|
||||||
"Problem detected: inode {:?} is corrupt\n\tcaused by: {}",
|
"Problem detected: inode {:?} is corrupt\n\tcaused by: {}",
|
||||||
path.join(name),
|
path.join(name),
|
||||||
err
|
err
|
||||||
);
|
);
|
||||||
info!("Removing broken inode from backup");
|
tr_info!("Removing broken inode from backup");
|
||||||
removed.push(name.to_string());
|
removed.push(name.to_string());
|
||||||
modified = true;
|
modified = true;
|
||||||
} else {
|
} else {
|
||||||
|
@ -338,19 +338,19 @@ impl Repository {
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
};
|
};
|
||||||
info!("Checking backups...");
|
tr_info!("Checking backups...");
|
||||||
let mut checked = Bitmap::new(self.index.capacity());
|
let mut checked = Bitmap::new(self.index.capacity());
|
||||||
let backup_map = match self.get_all_backups() {
|
let backup_map = match self.get_all_backups() {
|
||||||
Ok(backup_map) => backup_map,
|
Ok(backup_map) => backup_map,
|
||||||
Err(RepositoryError::BackupFile(BackupFileError::PartialBackupsList(backup_map,
|
Err(RepositoryError::BackupFile(BackupFileError::PartialBackupsList(backup_map,
|
||||||
_failed))) => {
|
_failed))) => {
|
||||||
warn!("Some backups could not be read, ignoring them");
|
tr_warn!("Some backups could not be read, ignoring them");
|
||||||
backup_map
|
backup_map
|
||||||
}
|
}
|
||||||
Err(err) => return Err(err),
|
Err(err) => return Err(err),
|
||||||
};
|
};
|
||||||
for (name, mut backup) in
|
for (name, mut backup) in
|
||||||
ProgressIter::new("checking backups", backup_map.len(), backup_map.into_iter())
|
ProgressIter::new(tr!("checking backups"), backup_map.len(), backup_map.into_iter())
|
||||||
{
|
{
|
||||||
let path = format!("{}::", name);
|
let path = format!("{}::", name);
|
||||||
match self.check_subtree(
|
match self.check_subtree(
|
||||||
|
@ -369,7 +369,7 @@ impl Repository {
|
||||||
}
|
}
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
if repair {
|
if repair {
|
||||||
warn!(
|
tr_warn!(
|
||||||
"The root of the backup {} has been corrupted\n\tcaused by: {}",
|
"The root of the backup {} has been corrupted\n\tcaused by: {}",
|
||||||
name,
|
name,
|
||||||
err
|
err
|
||||||
|
@ -385,12 +385,12 @@ impl Repository {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn check_repository(&mut self, repair: bool) -> Result<(), RepositoryError> {
|
pub fn check_repository(&mut self, repair: bool) -> Result<(), RepositoryError> {
|
||||||
info!("Checking repository integrity...");
|
tr_info!("Checking repository integrity...");
|
||||||
let mut rebuild = false;
|
let mut rebuild = false;
|
||||||
for (_id, bundle_id) in self.bundle_map.bundles() {
|
for (_id, bundle_id) in self.bundle_map.bundles() {
|
||||||
if self.bundles.get_bundle_info(&bundle_id).is_none() {
|
if self.bundles.get_bundle_info(&bundle_id).is_none() {
|
||||||
if repair {
|
if repair {
|
||||||
warn!(
|
tr_warn!(
|
||||||
"Problem detected: bundle map contains unknown bundle {}",
|
"Problem detected: bundle map contains unknown bundle {}",
|
||||||
bundle_id
|
bundle_id
|
||||||
);
|
);
|
||||||
|
@ -402,7 +402,7 @@ impl Repository {
|
||||||
}
|
}
|
||||||
if self.bundle_map.len() < self.bundles.len() {
|
if self.bundle_map.len() < self.bundles.len() {
|
||||||
if repair {
|
if repair {
|
||||||
warn!("Problem detected: bundle map does not contain all remote bundles");
|
tr_warn!("Problem detected: bundle map does not contain all remote bundles");
|
||||||
rebuild = true;
|
rebuild = true;
|
||||||
} else {
|
} else {
|
||||||
return Err(IntegrityError::RemoteBundlesNotInMap.into());
|
return Err(IntegrityError::RemoteBundlesNotInMap.into());
|
||||||
|
@ -410,7 +410,7 @@ impl Repository {
|
||||||
}
|
}
|
||||||
if self.bundle_map.len() > self.bundles.len() {
|
if self.bundle_map.len() > self.bundles.len() {
|
||||||
if repair {
|
if repair {
|
||||||
warn!("Problem detected: bundle map contains bundles multiple times");
|
tr_warn!("Problem detected: bundle map contains bundles multiple times");
|
||||||
rebuild = true;
|
rebuild = true;
|
||||||
} else {
|
} else {
|
||||||
return Err(IntegrityError::MapContainsDuplicates.into());
|
return Err(IntegrityError::MapContainsDuplicates.into());
|
||||||
|
@ -424,7 +424,7 @@ impl Repository {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn rebuild_bundle_map(&mut self) -> Result<(), RepositoryError> {
|
pub fn rebuild_bundle_map(&mut self) -> Result<(), RepositoryError> {
|
||||||
info!("Rebuilding bundle map from bundles");
|
tr_info!("Rebuilding bundle map from bundles");
|
||||||
self.bundle_map = BundleMap::create();
|
self.bundle_map = BundleMap::create();
|
||||||
for bundle in self.bundles.list_bundles() {
|
for bundle in self.bundles.list_bundles() {
|
||||||
let bundle_id = match bundle.mode {
|
let bundle_id = match bundle.mode {
|
||||||
|
@ -443,11 +443,11 @@ impl Repository {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn rebuild_index(&mut self) -> Result<(), RepositoryError> {
|
pub fn rebuild_index(&mut self) -> Result<(), RepositoryError> {
|
||||||
info!("Rebuilding index from bundles");
|
tr_info!("Rebuilding index from bundles");
|
||||||
self.index.clear();
|
self.index.clear();
|
||||||
let mut bundles = self.bundle_map.bundles();
|
let mut bundles = self.bundle_map.bundles();
|
||||||
bundles.sort_by_key(|&(_, ref v)| v.clone());
|
bundles.sort_by_key(|&(_, ref v)| v.clone());
|
||||||
for (num, id) in ProgressIter::new("Rebuilding index from bundles", bundles.len(), bundles.into_iter()) {
|
for (num, id) in ProgressIter::new(tr!("Rebuilding index from bundles"), bundles.len(), bundles.into_iter()) {
|
||||||
let chunks = try!(self.bundles.get_chunk_list(&id));
|
let chunks = try!(self.bundles.get_chunk_list(&id));
|
||||||
for (i, (hash, _len)) in chunks.into_inner().into_iter().enumerate() {
|
for (i, (hash, _len)) in chunks.into_inner().into_iter().enumerate() {
|
||||||
try!(self.index.set(
|
try!(self.index.set(
|
||||||
|
@ -467,10 +467,10 @@ impl Repository {
|
||||||
if repair {
|
if repair {
|
||||||
try!(self.write_mode());
|
try!(self.write_mode());
|
||||||
}
|
}
|
||||||
info!("Checking index integrity...");
|
tr_info!("Checking index integrity...");
|
||||||
if let Err(err) = self.index.check() {
|
if let Err(err) = self.index.check() {
|
||||||
if repair {
|
if repair {
|
||||||
warn!(
|
tr_warn!(
|
||||||
"Problem detected: index was corrupted\n\tcaused by: {}",
|
"Problem detected: index was corrupted\n\tcaused by: {}",
|
||||||
err
|
err
|
||||||
);
|
);
|
||||||
|
@ -479,10 +479,10 @@ impl Repository {
|
||||||
return Err(err.into());
|
return Err(err.into());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
info!("Checking index entries...");
|
tr_info!("Checking index entries...");
|
||||||
if let Err(err) = self.check_index_chunks() {
|
if let Err(err) = self.check_index_chunks() {
|
||||||
if repair {
|
if repair {
|
||||||
warn!(
|
tr_warn!(
|
||||||
"Problem detected: index entries were inconsistent\n\tcaused by: {}",
|
"Problem detected: index entries were inconsistent\n\tcaused by: {}",
|
||||||
err
|
err
|
||||||
);
|
);
|
||||||
|
@ -499,10 +499,10 @@ impl Repository {
|
||||||
if repair {
|
if repair {
|
||||||
try!(self.write_mode());
|
try!(self.write_mode());
|
||||||
}
|
}
|
||||||
info!("Checking bundle integrity...");
|
tr_info!("Checking bundle integrity...");
|
||||||
if try!(self.bundles.check(full, repair)) {
|
if try!(self.bundles.check(full, repair)) {
|
||||||
// Some bundles got repaired
|
// Some bundles got repaired
|
||||||
warn!("Some bundles have been rewritten, please remove the broken bundles manually.");
|
tr_warn!("Some bundles have been rewritten, please remove the broken bundles manually.");
|
||||||
try!(self.rebuild_bundle_map());
|
try!(self.rebuild_bundle_map());
|
||||||
try!(self.rebuild_index());
|
try!(self.rebuild_index());
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,44 +19,44 @@ quick_error!{
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub enum InodeError {
|
pub enum InodeError {
|
||||||
UnsupportedFiletype(path: PathBuf) {
|
UnsupportedFiletype(path: PathBuf) {
|
||||||
description("Unsupported file type")
|
description(tr!("Unsupported file type"))
|
||||||
display("Inode error: file {:?} has an unsupported type", path)
|
display("{}", tr_format!("Inode error: file {:?} has an unsupported type", path))
|
||||||
}
|
}
|
||||||
ReadMetadata(err: io::Error, path: PathBuf) {
|
ReadMetadata(err: io::Error, path: PathBuf) {
|
||||||
cause(err)
|
cause(err)
|
||||||
description("Failed to obtain metadata for file")
|
description(tr!("Failed to obtain metadata for file"))
|
||||||
display("Inode error: failed to obtain metadata for file {:?}\n\tcaused by: {}", path, err)
|
display("{}", tr_format!("Inode error: failed to obtain metadata for file {:?}\n\tcaused by: {}", path, err))
|
||||||
}
|
}
|
||||||
ReadXattr(err: io::Error, path: PathBuf) {
|
ReadXattr(err: io::Error, path: PathBuf) {
|
||||||
cause(err)
|
cause(err)
|
||||||
description("Failed to obtain xattr for file")
|
description(tr!("Failed to obtain xattr for file"))
|
||||||
display("Inode error: failed to obtain xattr for file {:?}\n\tcaused by: {}", path, err)
|
display("{}", tr_format!("Inode error: failed to obtain xattr for file {:?}\n\tcaused by: {}", path, err))
|
||||||
}
|
}
|
||||||
ReadLinkTarget(err: io::Error, path: PathBuf) {
|
ReadLinkTarget(err: io::Error, path: PathBuf) {
|
||||||
cause(err)
|
cause(err)
|
||||||
description("Failed to obtain link target for file")
|
description(tr!("Failed to obtain link target for file"))
|
||||||
display("Inode error: failed to obtain link target for file {:?}\n\tcaused by: {}", path, err)
|
display("{}", tr_format!("Inode error: failed to obtain link target for file {:?}\n\tcaused by: {}", path, err))
|
||||||
}
|
}
|
||||||
Create(err: io::Error, path: PathBuf) {
|
Create(err: io::Error, path: PathBuf) {
|
||||||
cause(err)
|
cause(err)
|
||||||
description("Failed to create entity")
|
description(tr!("Failed to create entity"))
|
||||||
display("Inode error: failed to create entity {:?}\n\tcaused by: {}", path, err)
|
display("{}", tr_format!("Inode error: failed to create entity {:?}\n\tcaused by: {}", path, err))
|
||||||
}
|
}
|
||||||
Integrity(reason: &'static str) {
|
Integrity(reason: &'static str) {
|
||||||
description("Integrity error")
|
description(tr!("Integrity error"))
|
||||||
display("Inode error: inode integrity error: {}", reason)
|
display("{}", tr_format!("Inode error: inode integrity error: {}", reason))
|
||||||
}
|
}
|
||||||
Decode(err: msgpack::DecodeError) {
|
Decode(err: msgpack::DecodeError) {
|
||||||
from()
|
from()
|
||||||
cause(err)
|
cause(err)
|
||||||
description("Failed to decode metadata")
|
description(tr!("Failed to decode metadata"))
|
||||||
display("Inode error: failed to decode metadata\n\tcaused by: {}", err)
|
display("{}", tr_format!("Inode error: failed to decode metadata\n\tcaused by: {}", err))
|
||||||
}
|
}
|
||||||
Encode(err: msgpack::EncodeError) {
|
Encode(err: msgpack::EncodeError) {
|
||||||
from()
|
from()
|
||||||
cause(err)
|
cause(err)
|
||||||
description("Failed to encode metadata")
|
description(tr!("Failed to encode metadata"))
|
||||||
display("Inode error: failed to encode metadata\n\tcaused by: {}", err)
|
display("{}", tr_format!("Inode error: failed to encode metadata\n\tcaused by: {}", err))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -82,12 +82,12 @@ serde_impl!(FileType(u8) {
|
||||||
impl fmt::Display for FileType {
|
impl fmt::Display for FileType {
|
||||||
fn fmt(&self, format: &mut fmt::Formatter) -> Result<(), fmt::Error> {
|
fn fmt(&self, format: &mut fmt::Formatter) -> Result<(), fmt::Error> {
|
||||||
match *self {
|
match *self {
|
||||||
FileType::File => write!(format, "file"),
|
FileType::File => write!(format, "{}", tr!("file")),
|
||||||
FileType::Directory => write!(format, "directory"),
|
FileType::Directory => write!(format, "{}", tr!("directory")),
|
||||||
FileType::Symlink => write!(format, "symlink"),
|
FileType::Symlink => write!(format, "{}", tr!("symlink")),
|
||||||
FileType::BlockDevice => write!(format, "block device"),
|
FileType::BlockDevice => write!(format, "{}", tr!("block device")),
|
||||||
FileType::CharDevice => write!(format, "char device"),
|
FileType::CharDevice => write!(format, "{}", tr!("char device")),
|
||||||
FileType::NamedPipe => write!(format, "named pipe"),
|
FileType::NamedPipe => write!(format, "{}", tr!("named pipe")),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -249,13 +249,13 @@ impl Inode {
|
||||||
InodeError::Create(e, full_path.clone())
|
InodeError::Create(e, full_path.clone())
|
||||||
}));
|
}));
|
||||||
} else {
|
} else {
|
||||||
return Err(InodeError::Integrity("Symlink without target"));
|
return Err(InodeError::Integrity(tr!("Symlink without target")));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
FileType::NamedPipe => {
|
FileType::NamedPipe => {
|
||||||
let name = try!(
|
let name = try!(
|
||||||
ffi::CString::new(full_path.as_os_str().as_bytes())
|
ffi::CString::new(full_path.as_os_str().as_bytes())
|
||||||
.map_err(|_| InodeError::Integrity("Name contains nulls"))
|
.map_err(|_| InodeError::Integrity(tr!("Name contains nulls")))
|
||||||
);
|
);
|
||||||
let mode = self.mode | libc::S_IFIFO;
|
let mode = self.mode | libc::S_IFIFO;
|
||||||
if unsafe { libc::mkfifo(name.as_ptr(), mode) } != 0 {
|
if unsafe { libc::mkfifo(name.as_ptr(), mode) } != 0 {
|
||||||
|
@ -268,7 +268,7 @@ impl Inode {
|
||||||
FileType::BlockDevice | FileType::CharDevice => {
|
FileType::BlockDevice | FileType::CharDevice => {
|
||||||
let name = try!(
|
let name = try!(
|
||||||
ffi::CString::new(full_path.as_os_str().as_bytes())
|
ffi::CString::new(full_path.as_os_str().as_bytes())
|
||||||
.map_err(|_| InodeError::Integrity("Name contains nulls"))
|
.map_err(|_| InodeError::Integrity(tr!("Name contains nulls")))
|
||||||
);
|
);
|
||||||
let mode = self.mode |
|
let mode = self.mode |
|
||||||
match self.file_type {
|
match self.file_type {
|
||||||
|
@ -279,7 +279,7 @@ impl Inode {
|
||||||
let device = if let Some((major, minor)) = self.device {
|
let device = if let Some((major, minor)) = self.device {
|
||||||
unsafe { libc::makedev(major, minor) }
|
unsafe { libc::makedev(major, minor) }
|
||||||
} else {
|
} else {
|
||||||
return Err(InodeError::Integrity("Device without id"));
|
return Err(InodeError::Integrity(tr!("Device without id")));
|
||||||
};
|
};
|
||||||
if unsafe { libc::mknod(name.as_ptr(), mode, device) } != 0 {
|
if unsafe { libc::mknod(name.as_ptr(), mode, device) } != 0 {
|
||||||
return Err(InodeError::Create(
|
return Err(InodeError::Create(
|
||||||
|
@ -291,21 +291,21 @@ impl Inode {
|
||||||
}
|
}
|
||||||
let time = FileTime::from_seconds_since_1970(self.timestamp as u64, 0);
|
let time = FileTime::from_seconds_since_1970(self.timestamp as u64, 0);
|
||||||
if let Err(err) = filetime::set_file_times(&full_path, time, time) {
|
if let Err(err) = filetime::set_file_times(&full_path, time, time) {
|
||||||
warn!("Failed to set file time on {:?}: {}", full_path, err);
|
tr_warn!("Failed to set file time on {:?}: {}", full_path, err);
|
||||||
}
|
}
|
||||||
if !self.xattrs.is_empty() {
|
if !self.xattrs.is_empty() {
|
||||||
if xattr::SUPPORTED_PLATFORM {
|
if xattr::SUPPORTED_PLATFORM {
|
||||||
for (name, data) in &self.xattrs {
|
for (name, data) in &self.xattrs {
|
||||||
if let Err(err) = xattr::set(&full_path, name, data) {
|
if let Err(err) = xattr::set(&full_path, name, data) {
|
||||||
warn!("Failed to set xattr {} on {:?}: {}", name, full_path, err);
|
tr_warn!("Failed to set xattr {} on {:?}: {}", name, full_path, err);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
warn!("Not setting xattr on {:?}", full_path);
|
tr_warn!("Not setting xattr on {:?}", full_path);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if let Err(err) = fs::set_permissions(&full_path, Permissions::from_mode(self.mode)) {
|
if let Err(err) = fs::set_permissions(&full_path, Permissions::from_mode(self.mode)) {
|
||||||
warn!(
|
tr_warn!(
|
||||||
"Failed to set permissions {:o} on {:?}: {}",
|
"Failed to set permissions {:o} on {:?}: {}",
|
||||||
self.mode,
|
self.mode,
|
||||||
full_path,
|
full_path,
|
||||||
|
@ -313,7 +313,7 @@ impl Inode {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
if let Err(err) = chown(&full_path, self.user, self.group) {
|
if let Err(err) = chown(&full_path, self.user, self.group) {
|
||||||
warn!(
|
tr_warn!(
|
||||||
"Failed to set user {} and group {} on {:?}: {}",
|
"Failed to set user {} and group {} on {:?}: {}",
|
||||||
self.user,
|
self.user,
|
||||||
self.group,
|
self.group,
|
||||||
|
|
|
@ -139,7 +139,7 @@ impl Repository {
|
||||||
match unsafe { Index::open(layout.index_path(), &INDEX_MAGIC, INDEX_VERSION) } {
|
match unsafe { Index::open(layout.index_path(), &INDEX_MAGIC, INDEX_VERSION) } {
|
||||||
Ok(index) => (index, false),
|
Ok(index) => (index, false),
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
error!("Failed to load local index:\n\tcaused by: {}", err);
|
tr_error!("Failed to load local index:\n\tcaused by: {}", err);
|
||||||
(
|
(
|
||||||
try!(Index::create(
|
try!(Index::create(
|
||||||
layout.index_path(),
|
layout.index_path(),
|
||||||
|
@ -153,7 +153,7 @@ impl Repository {
|
||||||
let (bundle_map, rebuild_bundle_map) = match BundleMap::load(layout.bundle_map_path()) {
|
let (bundle_map, rebuild_bundle_map) = match BundleMap::load(layout.bundle_map_path()) {
|
||||||
Ok(bundle_map) => (bundle_map, false),
|
Ok(bundle_map) => (bundle_map, false),
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
error!("Failed to load local bundle map:\n\tcaused by: {}", err);
|
tr_error!("Failed to load local bundle map:\n\tcaused by: {}", err);
|
||||||
(BundleMap::create(), true)
|
(BundleMap::create(), true)
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -178,7 +178,7 @@ impl Repository {
|
||||||
if !rebuild_bundle_map {
|
if !rebuild_bundle_map {
|
||||||
let mut save_bundle_map = false;
|
let mut save_bundle_map = false;
|
||||||
if !gone.is_empty() {
|
if !gone.is_empty() {
|
||||||
info!("Removig {} old bundles from index", gone.len());
|
tr_info!("Removig {} old bundles from index", gone.len());
|
||||||
try!(repo.write_mode());
|
try!(repo.write_mode());
|
||||||
for bundle in gone {
|
for bundle in gone {
|
||||||
try!(repo.remove_gone_remote_bundle(&bundle))
|
try!(repo.remove_gone_remote_bundle(&bundle))
|
||||||
|
@ -186,10 +186,10 @@ impl Repository {
|
||||||
save_bundle_map = true;
|
save_bundle_map = true;
|
||||||
}
|
}
|
||||||
if !new.is_empty() {
|
if !new.is_empty() {
|
||||||
info!("Adding {} new bundles to index", new.len());
|
tr_info!("Adding {} new bundles to index", new.len());
|
||||||
try!(repo.write_mode());
|
try!(repo.write_mode());
|
||||||
for bundle in ProgressIter::new(
|
for bundle in ProgressIter::new(
|
||||||
"adding bundles to index",
|
tr!("adding bundles to index"),
|
||||||
new.len(),
|
new.len(),
|
||||||
new.into_iter()
|
new.into_iter()
|
||||||
)
|
)
|
||||||
|
@ -232,11 +232,11 @@ impl Repository {
|
||||||
let mut backups: Vec<(String, Backup)> = try!(repo.get_all_backups()).into_iter().collect();
|
let mut backups: Vec<(String, Backup)> = try!(repo.get_all_backups()).into_iter().collect();
|
||||||
backups.sort_by_key(|&(_, ref b)| b.timestamp);
|
backups.sort_by_key(|&(_, ref b)| b.timestamp);
|
||||||
if let Some((name, backup)) = backups.pop() {
|
if let Some((name, backup)) = backups.pop() {
|
||||||
info!("Taking configuration from the last backup '{}'", name);
|
tr_info!("Taking configuration from the last backup '{}'", name);
|
||||||
repo.config = backup.config;
|
repo.config = backup.config;
|
||||||
try!(repo.save_config())
|
try!(repo.save_config())
|
||||||
} else {
|
} else {
|
||||||
warn!(
|
tr_warn!(
|
||||||
"No backup found in the repository to take configuration from, please set the configuration manually."
|
"No backup found in the repository to take configuration from, please set the configuration manually."
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -268,7 +268,7 @@ impl Repository {
|
||||||
pub fn set_encryption(&mut self, public: Option<&PublicKey>) {
|
pub fn set_encryption(&mut self, public: Option<&PublicKey>) {
|
||||||
if let Some(key) = public {
|
if let Some(key) = public {
|
||||||
if !self.crypto.lock().unwrap().contains_secret_key(key) {
|
if !self.crypto.lock().unwrap().contains_secret_key(key) {
|
||||||
warn!("The secret key for that public key is not stored in the repository.")
|
tr_warn!("The secret key for that public key is not stored in the repository.")
|
||||||
}
|
}
|
||||||
let mut key_bytes = Vec::new();
|
let mut key_bytes = Vec::new();
|
||||||
key_bytes.extend_from_slice(&key[..]);
|
key_bytes.extend_from_slice(&key[..]);
|
||||||
|
@ -343,7 +343,7 @@ impl Repository {
|
||||||
if self.bundle_map.find(&bundle.id).is_some() {
|
if self.bundle_map.find(&bundle.id).is_some() {
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
debug!("Adding new bundle to index: {}", bundle.id);
|
tr_debug!("Adding new bundle to index: {}", bundle.id);
|
||||||
let bundle_id = match bundle.mode {
|
let bundle_id = match bundle.mode {
|
||||||
BundleMode::Data => self.next_data_bundle,
|
BundleMode::Data => self.next_data_bundle,
|
||||||
BundleMode::Meta => self.next_meta_bundle,
|
BundleMode::Meta => self.next_meta_bundle,
|
||||||
|
@ -377,7 +377,7 @@ impl Repository {
|
||||||
|
|
||||||
fn remove_gone_remote_bundle(&mut self, bundle: &BundleInfo) -> Result<(), RepositoryError> {
|
fn remove_gone_remote_bundle(&mut self, bundle: &BundleInfo) -> Result<(), RepositoryError> {
|
||||||
if let Some(id) = self.bundle_map.find(&bundle.id) {
|
if let Some(id) = self.bundle_map.find(&bundle.id) {
|
||||||
debug!("Removing bundle from index: {}", bundle.id);
|
tr_debug!("Removing bundle from index: {}", bundle.id);
|
||||||
try!(self.bundles.delete_local_bundle(&bundle.id));
|
try!(self.bundles.delete_local_bundle(&bundle.id));
|
||||||
try!(self.index.filter(|_key, data| data.bundle != id));
|
try!(self.index.filter(|_key, data| data.bundle != id));
|
||||||
self.bundle_map.remove(id);
|
self.bundle_map.remove(id);
|
||||||
|
@ -406,7 +406,7 @@ impl Repository {
|
||||||
impl Drop for Repository {
|
impl Drop for Repository {
|
||||||
fn drop(&mut self) {
|
fn drop(&mut self) {
|
||||||
if let Err(err) = self.flush() {
|
if let Err(err) = self.flush() {
|
||||||
error!("Failed to flush repository: {}", err);
|
tr_error!("Failed to flush repository: {}", err);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -198,7 +198,7 @@ impl Repository {
|
||||||
Err(RepositoryError::Inode(_)) |
|
Err(RepositoryError::Inode(_)) |
|
||||||
Err(RepositoryError::Chunker(_)) |
|
Err(RepositoryError::Chunker(_)) |
|
||||||
Err(RepositoryError::Io(_)) => {
|
Err(RepositoryError::Io(_)) => {
|
||||||
info!("Failed to backup {:?}", path);
|
tr_info!("Failed to backup {:?}", path);
|
||||||
failed_paths.push(path);
|
failed_paths.push(path);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
@ -243,7 +243,7 @@ impl Repository {
|
||||||
if roots.len() == 1 {
|
if roots.len() == 1 {
|
||||||
Ok(roots.pop().unwrap())
|
Ok(roots.pop().unwrap())
|
||||||
} else {
|
} else {
|
||||||
warn!("Tar file contains multiple roots, adding dummy folder");
|
tr_warn!("Tar file contains multiple roots, adding dummy folder");
|
||||||
let mut root_inode = Inode {
|
let mut root_inode = Inode {
|
||||||
file_type: FileType::Directory,
|
file_type: FileType::Directory,
|
||||||
mode: 0o755,
|
mode: 0o755,
|
||||||
|
|
|
@ -20,11 +20,11 @@ impl Repository {
|
||||||
force: bool,
|
force: bool,
|
||||||
) -> Result<(), RepositoryError> {
|
) -> Result<(), RepositoryError> {
|
||||||
try!(self.flush());
|
try!(self.flush());
|
||||||
info!("Locking repository");
|
tr_info!("Locking repository");
|
||||||
try!(self.write_mode());
|
try!(self.write_mode());
|
||||||
let _lock = try!(self.lock(true));
|
let _lock = try!(self.lock(true));
|
||||||
// analyze_usage will set the dirty flag
|
// analyze_usage will set the dirty flag
|
||||||
info!("Analyzing chunk usage");
|
tr_info!("Analyzing chunk usage");
|
||||||
let usage = try!(self.analyze_usage());
|
let usage = try!(self.analyze_usage());
|
||||||
let mut data_total = 0;
|
let mut data_total = 0;
|
||||||
let mut data_used = 0;
|
let mut data_used = 0;
|
||||||
|
@ -32,7 +32,7 @@ impl Repository {
|
||||||
data_total += bundle.info.encoded_size;
|
data_total += bundle.info.encoded_size;
|
||||||
data_used += bundle.get_used_size();
|
data_used += bundle.get_used_size();
|
||||||
}
|
}
|
||||||
info!(
|
tr_info!(
|
||||||
"Usage: {} of {}, {:.1}%",
|
"Usage: {} of {}, {:.1}%",
|
||||||
to_file_size(data_used as u64),
|
to_file_size(data_used as u64),
|
||||||
to_file_size(data_total as u64),
|
to_file_size(data_total as u64),
|
||||||
|
@ -70,7 +70,7 @@ impl Repository {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
info!(
|
tr_info!(
|
||||||
"Reclaiming about {} by rewriting {} bundles ({})",
|
"Reclaiming about {} by rewriting {} bundles ({})",
|
||||||
to_file_size(reclaim_space as u64),
|
to_file_size(reclaim_space as u64),
|
||||||
rewrite_bundles.len(),
|
rewrite_bundles.len(),
|
||||||
|
@ -81,7 +81,7 @@ impl Repository {
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
for id in ProgressIter::new(
|
for id in ProgressIter::new(
|
||||||
"rewriting bundles",
|
tr!("rewriting bundles"),
|
||||||
rewrite_bundles.len(),
|
rewrite_bundles.len(),
|
||||||
rewrite_bundles.iter()
|
rewrite_bundles.iter()
|
||||||
)
|
)
|
||||||
|
@ -100,12 +100,12 @@ impl Repository {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
try!(self.flush());
|
try!(self.flush());
|
||||||
info!("Checking index");
|
tr_info!("Checking index");
|
||||||
for (hash, location) in self.index.iter() {
|
for (hash, location) in self.index.iter() {
|
||||||
let loc_bundle = location.bundle;
|
let loc_bundle = location.bundle;
|
||||||
let loc_chunk = location.chunk;
|
let loc_chunk = location.chunk;
|
||||||
if rewrite_bundles.contains(&loc_bundle) {
|
if rewrite_bundles.contains(&loc_bundle) {
|
||||||
panic!(
|
tr_panic!(
|
||||||
"Removed bundle is still referenced in index: hash:{}, bundle:{}, chunk:{}",
|
"Removed bundle is still referenced in index: hash:{}, bundle:{}, chunk:{}",
|
||||||
hash,
|
hash,
|
||||||
loc_bundle,
|
loc_bundle,
|
||||||
|
@ -113,7 +113,7 @@ impl Repository {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
info!("Deleting {} bundles", rewrite_bundles.len());
|
tr_info!("Deleting {} bundles", rewrite_bundles.len());
|
||||||
for id in rewrite_bundles {
|
for id in rewrite_bundles {
|
||||||
try!(self.delete_bundle(id));
|
try!(self.delete_bundle(id));
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,30 +1,179 @@
|
||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
|
||||||
type TransStr = Cow<'static, str>;
|
use std::cmp::max;
|
||||||
|
use std::str;
|
||||||
|
|
||||||
pub struct Translation(HashMap<TransStr, TransStr>);
|
use std::path::{Path, PathBuf};
|
||||||
|
use std::io::Read;
|
||||||
|
use std::fs::File;
|
||||||
|
|
||||||
|
use locale_config::Locale;
|
||||||
|
|
||||||
|
|
||||||
|
pub type CowStr = Cow<'static, str>;
|
||||||
|
|
||||||
|
fn read_u32(b: &[u8], reorder: bool) -> u32 {
|
||||||
|
if reorder {
|
||||||
|
(u32::from(b[0]) << 24) + (u32::from(b[1]) << 16) + (u32::from(b[2]) << 8) + u32::from(b[3])
|
||||||
|
} else {
|
||||||
|
(u32::from(b[3]) << 24) + (u32::from(b[2]) << 16) + (u32::from(b[1]) << 8) + u32::from(b[0])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
struct MoFile<'a> {
|
||||||
|
data: &'a [u8],
|
||||||
|
count: usize,
|
||||||
|
orig_pos: usize,
|
||||||
|
trans_pos: usize,
|
||||||
|
reorder: bool,
|
||||||
|
i : usize
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> MoFile<'a> {
|
||||||
|
fn new(data: &'a [u8]) -> Result<Self, ()> {
|
||||||
|
if data.len() < 20 {
|
||||||
|
return Err(());
|
||||||
|
}
|
||||||
|
// Magic header
|
||||||
|
let magic = read_u32(&data[0..4], false);
|
||||||
|
let reorder = if magic == 0x9504_12de {
|
||||||
|
false
|
||||||
|
} else if magic == 0xde12_0495 {
|
||||||
|
true
|
||||||
|
} else {
|
||||||
|
return Err(());
|
||||||
|
};
|
||||||
|
// Version
|
||||||
|
if read_u32(&data[4..8], reorder) != 0x0000_0000 {
|
||||||
|
return Err(());
|
||||||
|
}
|
||||||
|
// Translation count
|
||||||
|
let count = read_u32(&data[8..12], reorder) as usize;
|
||||||
|
// Original string offset
|
||||||
|
let orig_pos = read_u32(&data[12..16], reorder) as usize;
|
||||||
|
// Original string offset
|
||||||
|
let trans_pos = read_u32(&data[16..20], reorder) as usize;
|
||||||
|
if data.len() < max(orig_pos, trans_pos) + count * 8 {
|
||||||
|
return Err(());
|
||||||
|
}
|
||||||
|
Ok(MoFile{
|
||||||
|
data: data,
|
||||||
|
count: count,
|
||||||
|
orig_pos: orig_pos,
|
||||||
|
trans_pos: trans_pos,
|
||||||
|
reorder: reorder,
|
||||||
|
i: 0
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> Iterator for MoFile<'a> {
|
||||||
|
type Item = (&'a str, &'a str);
|
||||||
|
|
||||||
|
fn next(&mut self) -> Option<Self::Item> {
|
||||||
|
if self.i >= self.count {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
let length = read_u32(&self.data[self.orig_pos+self.i*8..], self.reorder) as usize;
|
||||||
|
let offset = read_u32(&self.data[self.orig_pos+self.i*8+4..], self.reorder) as usize;
|
||||||
|
let orig = match str::from_utf8(&self.data[offset..offset+length]) {
|
||||||
|
Ok(s) => s,
|
||||||
|
Err(_) => return None
|
||||||
|
};
|
||||||
|
let length = read_u32(&self.data[self.trans_pos+self.i*8..], self.reorder) as usize;
|
||||||
|
let offset = read_u32(&self.data[self.trans_pos+self.i*8+4..], self.reorder) as usize;
|
||||||
|
let trans = match str::from_utf8(&self.data[offset..offset+length]) {
|
||||||
|
Ok(s) => s,
|
||||||
|
Err(_) => return None
|
||||||
|
};
|
||||||
|
self.i += 1;
|
||||||
|
Some((orig, trans))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
pub struct Translation(HashMap<CowStr, CowStr>);
|
||||||
|
|
||||||
impl Translation {
|
impl Translation {
|
||||||
pub fn new() -> Self {
|
pub fn new() -> Self {
|
||||||
Translation(Default::default())
|
Translation(Default::default())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn set<O: Into<TransStr>, T: Into<TransStr>>(&mut self, orig: O, trans: T) {
|
pub fn from_mo_data(data: &'static[u8]) -> Self {
|
||||||
self.0.insert(orig.into(), trans.into());
|
let mut translation = Translation::new();
|
||||||
|
match MoFile::new(data) {
|
||||||
|
Ok(mo_file) => for (orig, trans) in mo_file {
|
||||||
|
translation.set(orig, trans);
|
||||||
|
}
|
||||||
|
Err(_) => error!("Invalid translation data")
|
||||||
|
}
|
||||||
|
translation
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get<O: Into<TransStr>>(&self, orig: O) -> TransStr {
|
pub fn from_mo_file(path: &Path) -> Self {
|
||||||
let orig = orig.into();
|
let mut translation = Translation::new();
|
||||||
self.0.get(&orig).cloned().unwrap_or(orig)
|
if let Ok(mut file) = File::open(&path) {
|
||||||
|
let mut data = vec![];
|
||||||
|
if file.read_to_end(&mut data).is_ok() {
|
||||||
|
match MoFile::new(&data) {
|
||||||
|
Ok(mo_file) => for (orig, trans) in mo_file {
|
||||||
|
translation.set(orig.to_string(), trans.to_string());
|
||||||
|
}
|
||||||
|
Err(_) => error!("Invalid translation data")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
translation
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn set<O: Into<CowStr>, T: Into<CowStr>>(&mut self, orig: O, trans: T) {
|
||||||
|
let trans = trans.into();
|
||||||
|
if !trans.is_empty() {
|
||||||
|
self.0.insert(orig.into(), trans);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get<'a, 'b: 'a>(&'b self, orig: &'a str) -> &'a str {
|
||||||
|
self.0.get(orig).map(|s| s as &'a str).unwrap_or(orig)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn get_translation(locale: &str) -> Translation {
|
||||||
|
if let Some(trans) = find_translation(locale) {
|
||||||
|
return trans;
|
||||||
|
}
|
||||||
|
let country = locale.split('_').next().unwrap();
|
||||||
|
if let Some(trans) = find_translation(country) {
|
||||||
|
return trans;
|
||||||
|
}
|
||||||
|
Translation::new()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn find_translation(name: &str) -> Option<Translation> {
|
||||||
|
if EMBEDDED_TRANS.contains_key(name) {
|
||||||
|
return Some(Translation::from_mo_data(EMBEDDED_TRANS[name]));
|
||||||
|
}
|
||||||
|
let path = PathBuf::from(format!("/usr/share/locale/{}/LC_MESSAGES/zvault.mo", name));
|
||||||
|
if path.exists() {
|
||||||
|
return Some(Translation::from_mo_file(&path));
|
||||||
|
}
|
||||||
|
let path = PathBuf::from(format!("lang/{}.mo", name));
|
||||||
|
if path.exists() {
|
||||||
|
return Some(Translation::from_mo_file(&path));
|
||||||
|
}
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
lazy_static! {
|
lazy_static! {
|
||||||
static ref TRANS: Translation = {
|
pub static ref EMBEDDED_TRANS: HashMap<&'static str, &'static[u8]> = {
|
||||||
let mut trans = Translation::new();
|
HashMap::new()
|
||||||
trans.set("Hello", "Hallo");
|
//map.insert("de", include_bytes!("../lang/de.mo") as &'static [u8]);
|
||||||
trans
|
};
|
||||||
|
pub static ref TRANS: Translation = {
|
||||||
|
let locale = Locale::current();
|
||||||
|
let locale_str = locale.tags_for("").next().unwrap().as_ref().to_string();
|
||||||
|
get_translation(&locale_str)
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -42,6 +191,26 @@ lazy_static! {
|
||||||
($fmt:tt, $($arg:tt)*) => (rt_println!(tr!($fmt), $($arg)*).expect("invalid format"));
|
($fmt:tt, $($arg:tt)*) => (rt_println!(tr!($fmt), $($arg)*).expect("invalid format"));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[macro_export] macro_rules! tr_trace {
|
||||||
|
($($arg:tt)*) => (debug!("{}", tr_format!($($arg)*)));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[macro_export] macro_rules! tr_debug {
|
||||||
|
($($arg:tt)*) => (debug!("{}", tr_format!($($arg)*)));
|
||||||
|
}
|
||||||
|
|
||||||
#[macro_export] macro_rules! tr_info {
|
#[macro_export] macro_rules! tr_info {
|
||||||
($($arg:tt)*) => (info!("{}", tr_format!($($arg)*)));
|
($($arg:tt)*) => (info!("{}", tr_format!($($arg)*)));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[macro_export] macro_rules! tr_warn {
|
||||||
|
($($arg:tt)*) => (warn!("{}", tr_format!($($arg)*)));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[macro_export] macro_rules! tr_error {
|
||||||
|
($($arg:tt)*) => (error!("{}", tr_format!($($arg)*)));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[macro_export] macro_rules! tr_panic {
|
||||||
|
($($arg:tt)*) => (panic!("{}", tr_format!($($arg)*)));
|
||||||
|
}
|
||||||
|
|
|
@ -61,7 +61,7 @@ impl ChunkList {
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn read_from(src: &[u8]) -> Self {
|
pub fn read_from(src: &[u8]) -> Self {
|
||||||
if src.len() % 20 != 0 {
|
if src.len() % 20 != 0 {
|
||||||
warn!("Reading truncated chunk list");
|
tr_warn!("Reading truncated chunk list");
|
||||||
}
|
}
|
||||||
ChunkList::read_n_from(src.len() / 20, &mut Cursor::new(src)).unwrap()
|
ChunkList::read_n_from(src.len() / 20, &mut Cursor::new(src)).unwrap()
|
||||||
}
|
}
|
||||||
|
@ -129,7 +129,7 @@ impl<'a> Deserialize<'a> for ChunkList {
|
||||||
{
|
{
|
||||||
let data: Vec<u8> = try!(ByteBuf::deserialize(deserializer)).into();
|
let data: Vec<u8> = try!(ByteBuf::deserialize(deserializer)).into();
|
||||||
if data.len() % 20 != 0 {
|
if data.len() % 20 != 0 {
|
||||||
return Err(D::Error::custom("Invalid chunk list length"));
|
return Err(D::Error::custom(tr!("Invalid chunk list length")));
|
||||||
}
|
}
|
||||||
Ok(
|
Ok(
|
||||||
ChunkList::read_n_from(data.len() / 20, &mut Cursor::new(data)).unwrap()
|
ChunkList::read_n_from(data.len() / 20, &mut Cursor::new(data)).unwrap()
|
||||||
|
|
|
@ -73,7 +73,7 @@ impl<T: Iterator> Iterator for ProgressIter<T> {
|
||||||
fn next(&mut self) -> Option<Self::Item> {
|
fn next(&mut self) -> Option<Self::Item> {
|
||||||
match self.inner.next() {
|
match self.inner.next() {
|
||||||
None => {
|
None => {
|
||||||
let msg = self.msg.clone() + "done.";
|
let msg = self.msg.clone() + tr!("done.");
|
||||||
self.bar.finish_print(&msg);
|
self.bar.finish_print(&msg);
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
|
|
@ -11,26 +11,26 @@ quick_error!{
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub enum CompressionError {
|
pub enum CompressionError {
|
||||||
UnsupportedCodec(name: String) {
|
UnsupportedCodec(name: String) {
|
||||||
description("Unsupported codec")
|
description(tr!("Unsupported codec"))
|
||||||
display("Unsupported codec: {}", name)
|
display("{}", tr_format!("Unsupported codec: {}", name))
|
||||||
}
|
}
|
||||||
InitializeCodec {
|
InitializeCodec {
|
||||||
description("Failed to initialize codec")
|
description(tr!("Failed to initialize codec"))
|
||||||
}
|
}
|
||||||
InitializeOptions {
|
InitializeOptions {
|
||||||
description("Failed to set codec options")
|
description(tr!("Failed to set codec options"))
|
||||||
}
|
}
|
||||||
InitializeStream {
|
InitializeStream {
|
||||||
description("Failed to create stream")
|
description(tr!("Failed to create stream"))
|
||||||
}
|
}
|
||||||
Operation(reason: &'static str) {
|
Operation(reason: &'static str) {
|
||||||
description("Operation failed")
|
description(tr!("Operation failed"))
|
||||||
display("Operation failed: {}", reason)
|
display("{}", tr_format!("Operation failed: {}", reason))
|
||||||
}
|
}
|
||||||
Output(err: io::Error) {
|
Output(err: io::Error) {
|
||||||
from()
|
from()
|
||||||
cause(err)
|
cause(err)
|
||||||
description("Failed to write to output")
|
description(tr!("Failed to write to output"))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -21,7 +21,7 @@ static INIT: Once = ONCE_INIT;
|
||||||
|
|
||||||
fn sodium_init() {
|
fn sodium_init() {
|
||||||
INIT.call_once(|| if !sodiumoxide::init() {
|
INIT.call_once(|| if !sodiumoxide::init() {
|
||||||
panic!("Failed to initialize sodiumoxide");
|
tr_panic!("Failed to initialize sodiumoxide");
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -29,27 +29,27 @@ quick_error!{
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub enum EncryptionError {
|
pub enum EncryptionError {
|
||||||
InvalidKey {
|
InvalidKey {
|
||||||
description("Invalid key")
|
description(tr!("Invalid key"))
|
||||||
}
|
}
|
||||||
MissingKey(key: PublicKey) {
|
MissingKey(key: PublicKey) {
|
||||||
description("Missing key")
|
description(tr!("Missing key"))
|
||||||
display("Missing key: {}", to_hex(&key[..]))
|
display("{}", tr_format!("Missing key: {}", to_hex(&key[..])))
|
||||||
}
|
}
|
||||||
Operation(reason: &'static str) {
|
Operation(reason: &'static str) {
|
||||||
description("Operation failed")
|
description(tr!("Operation failed"))
|
||||||
display("Operation failed: {}", reason)
|
display("{}", tr_format!("Operation failed: {}", reason))
|
||||||
}
|
}
|
||||||
Io(err: io::Error) {
|
Io(err: io::Error) {
|
||||||
from()
|
from()
|
||||||
cause(err)
|
cause(err)
|
||||||
description("IO error")
|
description(tr!("IO error"))
|
||||||
display("IO error: {}", err)
|
display("{}", tr_format!("IO error: {}", err))
|
||||||
}
|
}
|
||||||
Yaml(err: serde_yaml::Error) {
|
Yaml(err: serde_yaml::Error) {
|
||||||
from()
|
from()
|
||||||
cause(err)
|
cause(err)
|
||||||
description("Yaml format error")
|
description(tr!("Yaml format error"))
|
||||||
display("Yaml format error: {}", err)
|
display("{}", tr_format!("Yaml format error: {}", err))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -68,7 +68,7 @@ impl EncryptionMethod {
|
||||||
pub fn from_string(val: &str) -> Result<Self, &'static str> {
|
pub fn from_string(val: &str) -> Result<Self, &'static str> {
|
||||||
match val {
|
match val {
|
||||||
"sodium" => Ok(EncryptionMethod::Sodium),
|
"sodium" => Ok(EncryptionMethod::Sodium),
|
||||||
_ => Err("Unsupported encryption method"),
|
_ => Err(tr!("Unsupported encryption method")),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -255,7 +255,7 @@ impl Crypto {
|
||||||
match *method {
|
match *method {
|
||||||
EncryptionMethod::Sodium => {
|
EncryptionMethod::Sodium => {
|
||||||
sealedbox::open(data, &public, secret).map_err(|_| {
|
sealedbox::open(data, &public, secret).map_err(|_| {
|
||||||
EncryptionError::Operation("Decryption failed")
|
EncryptionError::Operation(tr!("Decryption failed"))
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -285,7 +285,7 @@ impl Crypto {
|
||||||
let mut pk = [0u8; 32];
|
let mut pk = [0u8; 32];
|
||||||
let mut sk = [0u8; 32];
|
let mut sk = [0u8; 32];
|
||||||
if unsafe { libsodium_sys::crypto_box_seed_keypair(&mut pk, &mut sk, &seed) } != 0 {
|
if unsafe { libsodium_sys::crypto_box_seed_keypair(&mut pk, &mut sk, &seed) } != 0 {
|
||||||
panic!("Libsodium failed");
|
tr_panic!("Libsodium failed");
|
||||||
}
|
}
|
||||||
(
|
(
|
||||||
PublicKey::from_slice(&pk).unwrap(),
|
PublicKey::from_slice(&pk).unwrap(),
|
||||||
|
|
|
@ -95,7 +95,7 @@ impl<'a> Deserialize<'a> for Hash {
|
||||||
{
|
{
|
||||||
let dat: Vec<u8> = try!(ByteBuf::deserialize(deserializer)).into();
|
let dat: Vec<u8> = try!(ByteBuf::deserialize(deserializer)).into();
|
||||||
if dat.len() != 16 {
|
if dat.len() != 16 {
|
||||||
return Err(D::Error::custom("Invalid key length"));
|
return Err(D::Error::custom(tr!("Invalid key length")));
|
||||||
}
|
}
|
||||||
Ok(Hash {
|
Ok(Hash {
|
||||||
high: LittleEndian::read_u64(&dat[..8]),
|
high: LittleEndian::read_u64(&dat[..8]),
|
||||||
|
@ -141,7 +141,7 @@ impl HashMethod {
|
||||||
match name {
|
match name {
|
||||||
"blake2" => Ok(HashMethod::Blake2),
|
"blake2" => Ok(HashMethod::Blake2),
|
||||||
"murmur3" => Ok(HashMethod::Murmur3),
|
"murmur3" => Ok(HashMethod::Murmur3),
|
||||||
_ => Err("Unsupported hash method"),
|
_ => Err(tr!("Unsupported hash method")),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -15,22 +15,22 @@ quick_error!{
|
||||||
Io(err: io::Error) {
|
Io(err: io::Error) {
|
||||||
from()
|
from()
|
||||||
cause(err)
|
cause(err)
|
||||||
description("IO error")
|
description(tr!("IO error"))
|
||||||
display("Lock error: IO error\n\tcaused by: {}", err)
|
display("{}", tr_format!("Lock error: IO error\n\tcaused by: {}", err))
|
||||||
}
|
}
|
||||||
Yaml(err: serde_yaml::Error) {
|
Yaml(err: serde_yaml::Error) {
|
||||||
from()
|
from()
|
||||||
cause(err)
|
cause(err)
|
||||||
description("Yaml format error")
|
description(tr!("Yaml format error"))
|
||||||
display("Lock error: yaml format error\n\tcaused by: {}", err)
|
display("{}", tr_format!("Lock error: yaml format error\n\tcaused by: {}", err))
|
||||||
}
|
}
|
||||||
InvalidLockState(reason: &'static str) {
|
InvalidLockState(reason: &'static str) {
|
||||||
description("Invalid lock state")
|
description(tr!("Invalid lock state"))
|
||||||
display("Lock error: invalid lock state: {}", reason)
|
display("{}", tr_format!("Lock error: invalid lock state: {}", reason))
|
||||||
}
|
}
|
||||||
Locked {
|
Locked {
|
||||||
description("Locked")
|
description(tr!("Locked"))
|
||||||
display("Lock error: locked")
|
display("{}", tr_format!("Lock error: locked"))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -122,13 +122,13 @@ impl LockFolder {
|
||||||
for lock in try!(self.get_locks()) {
|
for lock in try!(self.get_locks()) {
|
||||||
if lock.exclusive {
|
if lock.exclusive {
|
||||||
if level == LockLevel::Exclusive {
|
if level == LockLevel::Exclusive {
|
||||||
return Err(LockError::InvalidLockState("multiple exclusive locks"));
|
return Err(LockError::InvalidLockState(tr!("multiple exclusive locks")));
|
||||||
} else {
|
} else {
|
||||||
level = LockLevel::Exclusive
|
level = LockLevel::Exclusive
|
||||||
}
|
}
|
||||||
} else if level == LockLevel::Exclusive {
|
} else if level == LockLevel::Exclusive {
|
||||||
return Err(LockError::InvalidLockState(
|
return Err(LockError::InvalidLockState(
|
||||||
"exclusive lock and shared locks"
|
tr!("exclusive lock and shared locks")
|
||||||
));
|
));
|
||||||
} else {
|
} else {
|
||||||
level = LockLevel::Shared
|
level = LockLevel::Shared
|
||||||
|
|
Loading…
Reference in New Issue