diff --git a/Cargo.lock b/Cargo.lock index 47e48ea..826b307 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -151,6 +151,11 @@ dependencies = [ "winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "lazy_static" +version = "0.2.11" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "lazy_static" version = "1.0.0" @@ -180,6 +185,17 @@ name = "linked-hash-map" version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "locale_config" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)", + "regex 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "log" version = "0.3.9" @@ -583,6 +599,7 @@ dependencies = [ "lazy_static 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.36 (registry+https://github.com/rust-lang/crates.io-index)", "libsodium-sys 0.0.16 (registry+https://github.com/rust-lang/crates.io-index)", + "locale_config 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)", "murmurhash3 0.0.5 (registry+https://github.com/rust-lang/crates.io-index)", "pbr 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", @@ -622,11 +639,13 @@ dependencies = [ "checksum fuchsia-zircon-sys 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "3dcaa9ae7725d12cdb85b3ad99a434db70b468c09ded17e012d86b5c1010f7a7" "checksum fuse 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "80e57070510966bfef93662a81cb8aa2b1c7db0964354fa9921434f04b9e8660" "checksum kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d" +"checksum lazy_static 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "76f033c7ad61445c5b347c7382dd1237847eb1bce590fe50365dcb33d546be73" "checksum lazy_static 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c8f31047daa365f19be14b47c29df4f7c3b581832407daabe6ae77397619237d" "checksum libc 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)" = "e32a70cf75e5846d53a673923498228bbec6a8624708a9ea5645f075d6276122" "checksum libc 0.2.36 (registry+https://github.com/rust-lang/crates.io-index)" = "1e5d97d6708edaa407429faa671b942dc0f2727222fb6b6539bf1db936e4b121" "checksum libsodium-sys 0.0.16 (registry+https://github.com/rust-lang/crates.io-index)" = "fcbd1beeed8d44caa8a669ebaa697c313976e242c03cc9fb23d88bf1656f5542" "checksum linked-hash-map 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "70fb39025bc7cdd76305867c4eccf2f2dcf6e9a57f5b21a93e1c2d86cd03ec9e" +"checksum locale_config 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "14fbee0e39bc2dd6a2427c4fdea66e9826cc1fd09b0a0b7550359f5f6efe1dab" "checksum log 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)" = "e19e8d5c34a3e0e2223db8e060f9e8264aeeb5c5fc64a4ee9965c062211c024b" "checksum log 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "89f010e843f2b1a31dbd316b3b8d443758bc634bed37aabade59c686d644e0a2" "checksum memchr 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "796fba70e76612589ed2ce7f45282f5af869e0fdd7cc6199fa1aa1f1d591ba9d" diff --git a/Cargo.toml b/Cargo.toml index adf7017..a363ae2 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -34,6 +34,7 @@ users = "0.6" time = "*" libc = "0.2" runtime-fmt = "0.3" +locale_config = "^0.2.2" index = {path="index"} chunking = {path="chunking"} diff --git a/src/bundledb/cache.rs b/src/bundledb/cache.rs index 21cca8b..5447dfa 100644 --- a/src/bundledb/cache.rs +++ b/src/bundledb/cache.rs @@ -14,33 +14,33 @@ quick_error!{ pub enum BundleCacheError { Read(err: io::Error) { cause(err) - description("Failed to read bundle cache") - display("Bundle cache error: failed to read bundle cache\n\tcaused by: {}", err) + description(tr!("Failed to read bundle cache")) + display("{}", tr_format!("Bundle cache error: failed to read bundle cache\n\tcaused by: {}", err)) } Write(err: io::Error) { cause(err) - description("Failed to write bundle cache") - display("Bundle cache error: failed to write bundle cache\n\tcaused by: {}", err) + description(tr!("Failed to write bundle cache")) + display("{}", tr_format!("Bundle cache error: failed to write bundle cache\n\tcaused by: {}", err)) } WrongHeader { - description("Wrong header") - display("Bundle cache error: wrong header on bundle cache") + description(tr!("Wrong header")) + display("{}", tr_format!("Bundle cache error: wrong header on bundle cache")) } UnsupportedVersion(version: u8) { - description("Wrong version") - display("Bundle cache error: unsupported version: {}", version) + description(tr!("Wrong version")) + display("{}", tr_format!("Bundle cache error: unsupported version: {}", version)) } Decode(err: msgpack::DecodeError) { from() cause(err) - description("Failed to decode bundle cache") - display("Bundle cache error: failed to decode bundle cache\n\tcaused by: {}", err) + description(tr!("Failed to decode bundle cache")) + display("{}", tr_format!("Bundle cache error: failed to decode bundle cache\n\tcaused by: {}", err)) } Encode(err: msgpack::EncodeError) { from() cause(err) - description("Failed to encode bundle cache") - display("Bundle cache error: failed to encode bundle cache\n\tcaused by: {}", err) + description(tr!("Failed to encode bundle cache")) + display("{}", tr_format!("Bundle cache error: failed to encode bundle cache\n\tcaused by: {}", err)) } } } diff --git a/src/bundledb/db.rs b/src/bundledb/db.rs index d1c6108..df67d51 100644 --- a/src/bundledb/db.rs +++ b/src/bundledb/db.rs @@ -14,44 +14,44 @@ quick_error!{ pub enum BundleDbError { ListBundles(err: io::Error) { cause(err) - description("Failed to list bundles") - display("Bundle db error: failed to list bundles\n\tcaused by: {}", err) + description(tr!("Failed to list bundles")) + display("{}", tr_format!("Bundle db error: failed to list bundles\n\tcaused by: {}", err)) } Reader(err: BundleReaderError) { from() cause(err) - description("Failed to read bundle") - display("Bundle db error: failed to read bundle\n\tcaused by: {}", err) + description(tr!("Failed to read bundle")) + display("{}", tr_format!("Bundle db error: failed to read bundle\n\tcaused by: {}", err)) } Writer(err: BundleWriterError) { from() cause(err) - description("Failed to write bundle") - display("Bundle db error: failed to write bundle\n\tcaused by: {}", err) + description(tr!("Failed to write bundle")) + display("{}", tr_format!("Bundle db error: failed to write bundle\n\tcaused by: {}", err)) } Cache(err: BundleCacheError) { from() cause(err) - description("Failed to read/write bundle cache") - display("Bundle db error: failed to read/write bundle cache\n\tcaused by: {}", err) + description(tr!("Failed to read/write bundle cache")) + display("{}", tr_format!("Bundle db error: failed to read/write bundle cache\n\tcaused by: {}", err)) } UploadFailed { - description("Uploading a bundle failed") + description(tr!("Uploading a bundle failed")) } Io(err: io::Error, path: PathBuf) { cause(err) context(path: &'a Path, err: io::Error) -> (err, path.to_path_buf()) - description("Io error") - display("Bundle db error: io error on {:?}\n\tcaused by: {}", path, err) + description(tr!("Io error")) + display("{}", tr_format!("Bundle db error: io error on {:?}\n\tcaused by: {}", path, err)) } NoSuchBundle(bundle: BundleId) { - description("No such bundle") - display("Bundle db error: no such bundle: {:?}", bundle) + description(tr!("No such bundle")) + display("{}", tr_format!("Bundle db error: no such bundle: {:?}", bundle)) } Remove(err: io::Error, bundle: BundleId) { cause(err) - description("Failed to remove bundle") - display("Bundle db error: failed to remove bundle {}\n\tcaused by: {}", bundle, err) + description(tr!("Failed to remove bundle")) + display("{}", tr_format!("Bundle db error: failed to remove bundle {}\n\tcaused by: {}", bundle, err)) } } } @@ -146,14 +146,14 @@ impl BundleDb { self.local_bundles.insert(bundle.id(), bundle); } } else { - warn!("Failed to read local bundle cache, rebuilding cache"); + tr_warn!("Failed to read local bundle cache, rebuilding cache"); } if let Ok(list) = StoredBundle::read_list_from(&self.layout.remote_bundle_cache_path()) { for bundle in list { self.remote_bundles.insert(bundle.id(), bundle); } } else { - warn!("Failed to read remote bundle cache, rebuilding cache"); + tr_warn!("Failed to read remote bundle cache, rebuilding cache"); } let base_path = self.layout.base_path(); let (new, gone) = try!(load_bundles( @@ -219,7 +219,7 @@ impl BundleDb { for id in meta_bundles { if !self.local_bundles.contains_key(&id) { let bundle = self.remote_bundles[&id].clone(); - debug!("Copying new meta bundle to local cache: {}", bundle.info.id); + tr_debug!("Copying new meta bundle to local cache: {}", bundle.info.id); try!(self.copy_remote_bundle_to_cache(&bundle)); } } @@ -407,7 +407,7 @@ impl BundleDb { pub fn check(&mut self, full: bool, repair: bool) -> Result { let mut to_repair = vec![]; for (id, stored) in ProgressIter::new( - "checking bundles", + tr!("checking bundles"), self.remote_bundles.len(), self.remote_bundles.iter() ) @@ -433,7 +433,7 @@ impl BundleDb { } } if !to_repair.is_empty() { - for id in ProgressIter::new("repairing bundles", to_repair.len(), to_repair.iter()) { + for id in ProgressIter::new(tr!("repairing bundles"), to_repair.len(), to_repair.iter()) { try!(self.repair_bundle(id)); } try!(self.flush()); @@ -460,7 +460,7 @@ impl BundleDb { let mut bundle = match self.get_bundle(&stored) { Ok(bundle) => bundle, Err(err) => { - warn!( + tr_warn!( "Problem detected: failed to read bundle header: {}\n\tcaused by: {}", id, err @@ -471,7 +471,7 @@ impl BundleDb { let chunks = match bundle.get_chunk_list() { Ok(chunks) => chunks.clone(), Err(err) => { - warn!( + tr_warn!( "Problem detected: failed to read bundle chunks: {}\n\tcaused by: {}", id, err @@ -482,7 +482,7 @@ impl BundleDb { let data = match bundle.load_contents() { Ok(data) => data, Err(err) => { - warn!( + tr_warn!( "Problem detected: failed to read bundle data: {}\n\tcaused by: {}", id, err @@ -490,8 +490,8 @@ impl BundleDb { return self.evacuate_broken_bundle(stored); } }; - warn!("Problem detected: bundle data was truncated: {}", id); - info!("Copying readable data into new bundle"); + tr_warn!("Problem detected: bundle data was truncated: {}", id); + tr_info!("Copying readable data into new bundle"); let info = stored.info.clone(); let mut new_bundle = try!(self.create_bundle( info.mode, @@ -509,7 +509,7 @@ impl BundleDb { pos += len as usize; } let bundle = try!(self.add_bundle(new_bundle)); - info!("New bundle id is {}", bundle.id); + tr_info!("New bundle id is {}", bundle.id); self.evacuate_broken_bundle(stored) } diff --git a/src/bundledb/reader.rs b/src/bundledb/reader.rs index 72c8a1d..fd86e55 100644 --- a/src/bundledb/reader.rs +++ b/src/bundledb/reader.rs @@ -15,42 +15,42 @@ quick_error!{ Read(err: io::Error, path: PathBuf) { cause(err) context(path: &'a Path, err: io::Error) -> (err, path.to_path_buf()) - description("Failed to read data from file") - display("Bundle reader error: failed to read data from file {:?}\n\tcaused by: {}", path, err) + description(tr!("Failed to read data from file")) + display("{}", tr_format!("Bundle reader error: failed to read data from file {:?}\n\tcaused by: {}", path, err)) } WrongHeader(path: PathBuf) { - description("Wrong header") - display("Bundle reader error: wrong header on bundle {:?}", path) + description(tr!("Wrong header")) + display("{}", tr_format!("Bundle reader error: wrong header on bundle {:?}", path)) } UnsupportedVersion(path: PathBuf, version: u8) { - description("Wrong version") - display("Bundle reader error: unsupported version on bundle {:?}: {}", path, version) + description(tr!("Wrong version")) + display("{}", tr_format!("Bundle reader error: unsupported version on bundle {:?}: {}", path, version)) } NoSuchChunk(bundle: BundleId, id: usize) { - description("Bundle has no such chunk") - display("Bundle reader error: bundle {:?} has no chunk with id {}", bundle, id) + description(tr!("Bundle has no such chunk")) + display("{}", tr_format!("Bundle reader error: bundle {:?} has no chunk with id {}", bundle, id)) } Decode(err: msgpack::DecodeError, path: PathBuf) { cause(err) context(path: &'a Path, err: msgpack::DecodeError) -> (err, path.to_path_buf()) - description("Failed to decode bundle header") - display("Bundle reader error: failed to decode bundle header of {:?}\n\tcaused by: {}", path, err) + description(tr!("Failed to decode bundle header")) + display("{}", tr_format!("Bundle reader error: failed to decode bundle header of {:?}\n\tcaused by: {}", path, err)) } Decompression(err: CompressionError, path: PathBuf) { cause(err) context(path: &'a Path, err: CompressionError) -> (err, path.to_path_buf()) - description("Decompression failed") - display("Bundle reader error: decompression failed on bundle {:?}\n\tcaused by: {}", path, err) + description(tr!("Decompression failed")) + display("{}", tr_format!("Bundle reader error: decompression failed on bundle {:?}\n\tcaused by: {}", path, err)) } Decryption(err: EncryptionError, path: PathBuf) { cause(err) context(path: &'a Path, err: EncryptionError) -> (err, path.to_path_buf()) - description("Decryption failed") - display("Bundle reader error: decryption failed on bundle {:?}\n\tcaused by: {}", path, err) + description(tr!("Decryption failed")) + display("{}", tr_format!("Bundle reader error: decryption failed on bundle {:?}\n\tcaused by: {}", path, err)) } Integrity(bundle: BundleId, reason: &'static str) { - description("Bundle has an integrity error") - display("Bundle reader error: bundle {:?} has an integrity error: {}", bundle, reason) + description(tr!("Bundle has an integrity error")) + display("{}", tr_format!("Bundle reader error: bundle {:?} has an integrity error: {}", bundle, reason)) } } } @@ -151,7 +151,7 @@ impl BundleReader { } fn load_chunklist(&mut self) -> Result<(), BundleReaderError> { - debug!( + tr_debug!( "Load bundle chunklist {} ({:?})", self.info.id, self.info.mode @@ -197,7 +197,7 @@ impl BundleReader { } fn load_encoded_contents(&self) -> Result, BundleReaderError> { - debug!("Load bundle data {} ({:?})", self.info.id, self.info.mode); + tr_debug!("Load bundle data {} ({:?})", self.info.id, self.info.mode); let mut file = BufReader::new(try!(File::open(&self.path).context(&self.path as &Path))); try!( file.seek(SeekFrom::Start(self.content_start as u64)) @@ -256,7 +256,7 @@ impl BundleReader { if self.info.chunk_count != self.chunks.as_ref().unwrap().len() { return Err(BundleReaderError::Integrity( self.id(), - "Chunk list size does not match chunk count" + tr!("Chunk list size does not match chunk count") )); } if self.chunks @@ -268,7 +268,7 @@ impl BundleReader { { return Err(BundleReaderError::Integrity( self.id(), - "Individual chunk sizes do not add up to total size" + tr!("Individual chunk sizes do not add up to total size") )); } if !full { @@ -276,7 +276,7 @@ impl BundleReader { if size as usize != self.info.encoded_size + self.content_start { return Err(BundleReaderError::Integrity( self.id(), - "File size does not match size in header, truncated file" + tr!("File size does not match size in header, truncated file") )); } return Ok(()); @@ -285,14 +285,14 @@ impl BundleReader { if self.info.encoded_size != encoded_contents.len() { return Err(BundleReaderError::Integrity( self.id(), - "Encoded data size does not match size in header, truncated bundle" + tr!("Encoded data size does not match size in header, truncated bundle") )); } let contents = try!(self.decode_contents(encoded_contents)); if self.info.raw_size != contents.len() { return Err(BundleReaderError::Integrity( self.id(), - "Raw data size does not match size in header, truncated bundle" + tr!("Raw data size does not match size in header, truncated bundle") )); } //TODO: verify checksum @@ -302,15 +302,14 @@ impl BundleReader { impl Debug for BundleReader { fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error> { - write!( - fmt, - "Bundle(\n\tid: {}\n\tpath: {:?}\n\tchunks: {}\n\tsize: {}, encoded: {}\n\tcompression: {:?}\n)", + write!(fmt, "{}", + tr_format!("Bundle(\n\tid: {}\n\tpath: {:?}\n\tchunks: {}\n\tsize: {}, encoded: {}\n\tcompression: {:?}\n)", self.info.id.to_string(), self.path, self.info.chunk_count, self.info.raw_size, self.info.encoded_size, self.info.compression - ) + )) } } diff --git a/src/bundledb/uploader.rs b/src/bundledb/uploader.rs index 3428f09..1a96087 100644 --- a/src/bundledb/uploader.rs +++ b/src/bundledb/uploader.rs @@ -51,10 +51,10 @@ impl BundleUploader { pub fn queue(&self, local_path: PathBuf, remote_path: PathBuf) -> Result<(), BundleDbError> { while self.waiting.load(Ordering::SeqCst) >= self.capacity { - debug!("Upload queue is full, waiting for slots"); + tr_debug!("Upload queue is full, waiting for slots"); let _ = self.wait.0.wait(self.wait.1.lock().unwrap()).unwrap(); } - trace!("Adding to upload queue: {:?}", local_path); + tr_trace!("Adding to upload queue: {:?}", local_path); if !self.error_present.load(Ordering::SeqCst) { self.waiting.fetch_add(1, Ordering::SeqCst); self.queue.push(Some((local_path, remote_path))); @@ -75,21 +75,21 @@ impl BundleUploader { fn worker_thread_inner(&self) -> Result<(), BundleDbError> { while let Some((src_path, dst_path)) = self.queue.pop() { - trace!("Uploading {:?} to {:?}", src_path, dst_path); + tr_trace!("Uploading {:?} to {:?}", src_path, dst_path); self.waiting.fetch_sub(1, Ordering::SeqCst); self.wait.0.notify_all(); let folder = dst_path.parent().unwrap(); try!(fs::create_dir_all(&folder).context(folder as &Path)); try!(fs::copy(&src_path, &dst_path).context(&dst_path as &Path)); try!(fs::remove_file(&src_path).context(&src_path as &Path)); - debug!("Uploaded {:?} to {:?}", src_path, dst_path); + tr_debug!("Uploaded {:?} to {:?}", src_path, dst_path); } Ok(()) } fn worker_thread(&self) { if let Err(err) = self.worker_thread_inner() { - debug!("Upload thread failed with error: {}", err); + tr_debug!("Upload thread failed with error: {}", err); *self.error.lock().unwrap() = Some(err); self.error_present.store(true, Ordering::SeqCst); } diff --git a/src/bundledb/writer.rs b/src/bundledb/writer.rs index 0dc7b68..f7aca9c 100644 --- a/src/bundledb/writer.rs +++ b/src/bundledb/writer.rs @@ -14,31 +14,31 @@ quick_error!{ pub enum BundleWriterError { CompressionSetup(err: CompressionError) { cause(err) - description("Failed to setup compression") - display("Bundle writer error: failed to setup compression\n\tcaused by: {}", err) + description(tr!("Failed to setup compression")) + display("{}", tr_format!("Bundle writer error: failed to setup compression\n\tcaused by: {}", err)) } Compression(err: CompressionError) { cause(err) - description("Failed to compress data") - display("Bundle writer error: failed to compress data\n\tcaused by: {}", err) + description(tr!("Failed to compress data")) + display("{}", tr_format!("Bundle writer error: failed to compress data\n\tcaused by: {}", err)) } Encryption(err: EncryptionError) { from() cause(err) - description("Encryption failed") - display("Bundle writer error: failed to encrypt data\n\tcaused by: {}", err) + description(tr!("Encryption failed")) + display("{}", tr_format!("Bundle writer error: failed to encrypt data\n\tcaused by: {}", err)) } Encode(err: msgpack::EncodeError, path: PathBuf) { cause(err) context(path: &'a Path, err: msgpack::EncodeError) -> (err, path.to_path_buf()) - description("Failed to encode bundle header to file") - display("Bundle writer error: failed to encode bundle header to file {:?}\n\tcaused by: {}", path, err) + description(tr!("Failed to encode bundle header to file")) + display("{}", tr_format!("Bundle writer error: failed to encode bundle header to file {:?}\n\tcaused by: {}", path, err)) } Write(err: io::Error, path: PathBuf) { cause(err) context(path: &'a Path, err: io::Error) -> (err, path.to_path_buf()) - description("Failed to write data to file") - display("Bundle writer error: failed to write data to file {:?}\n\tcaused by: {}", path, err) + description(tr!("Failed to write data to file")) + display("{}", tr_format!("Bundle writer error: failed to write data to file {:?}\n\tcaused by: {}", path, err)) } } } diff --git a/src/chunker.rs b/src/chunker.rs index f23af52..6102aa6 100644 --- a/src/chunker.rs +++ b/src/chunker.rs @@ -25,14 +25,14 @@ impl ChunkerType { "rabin" => Ok(ChunkerType::Rabin((avg_size, seed as u32))), "fastcdc" => Ok(ChunkerType::FastCdc((avg_size, seed))), "fixed" => Ok(ChunkerType::Fixed(avg_size)), - _ => Err("Unsupported chunker type"), + _ => Err(tr!("Unsupported chunker type")), } } pub fn from_string(name: &str) -> Result { let (name, size) = if let Some(pos) = name.find('/') { let size = try!(usize::from_str(&name[pos + 1..]).map_err( - |_| "Chunk size must be a number" + |_| tr!("Chunk size must be a number") )); let name = &name[..pos]; (name, size) diff --git a/src/cli/algotest.rs b/src/cli/algotest.rs index dfeb980..be88f6b 100644 --- a/src/cli/algotest.rs +++ b/src/cli/algotest.rs @@ -52,7 +52,7 @@ pub fn run( let mut total_write_time = 0.0; let mut total_read_time = 0.0; - println!("Reading input file ..."); + tr_println!("Reading input file ..."); let mut file = File::open(path).unwrap(); let total_size = file.metadata().unwrap().len(); let mut size = total_size; @@ -67,7 +67,7 @@ pub fn run( println!(); - println!( + tr_println!( "Chunking data with {}, avg chunk size {} ...", chunker.name(), to_file_size(chunker.avg_size() as u64) @@ -95,7 +95,7 @@ pub fn run( .sum::() / (chunks.len() as f32 - 1.0)) .sqrt(); - println!( + tr_println!( "- {} chunks, avg size: {} ±{}", chunks.len(), to_file_size(chunk_size_avg as u64), @@ -104,7 +104,7 @@ pub fn run( println!(); - println!("Hashing chunks with {} ...", hash.name()); + tr_println!("Hashing chunks with {} ...", hash.name()); let mut hashes = Vec::with_capacity(chunks.len()); let hash_time = Duration::span(|| for &(pos, len) in &chunks { hashes.push(hash.hash(&data[pos..pos + len])) @@ -128,7 +128,7 @@ pub fn run( let (_, len) = chunks.remove(*i); dup_size += len; } - println!( + tr_println!( "- {} duplicate chunks, {}, {:.1}% saved by internal deduplication", dups.len(), to_file_size(dup_size as u64), @@ -141,7 +141,7 @@ pub fn run( if let Some(compression) = compression.clone() { println!(); - println!("Compressing chunks with {} ...", compression.to_string()); + tr_println!("Compressing chunks with {} ...", compression.to_string()); let compress_time = Duration::span(|| { let mut bundle = Vec::with_capacity(bundle_size + 2 * chunk_size_avg as usize); let mut c = compression.compress_stream().unwrap(); @@ -164,7 +164,7 @@ pub fn run( to_speed(size, compress_time) ); let compressed_size = bundles.iter().map(|b| b.len()).sum::(); - println!( + tr_println!( "- {} bundles, {}, {:.1}% saved", bundles.len(), to_file_size(compressed_size as u64), @@ -191,7 +191,7 @@ pub fn run( crypto.add_secret_key(public, secret); let encryption = (EncryptionMethod::Sodium, public[..].to_vec().into()); - println!("Encrypting bundles..."); + tr_println!("Encrypting bundles..."); let mut encrypted_bundles = Vec::with_capacity(bundles.len()); let encrypt_time = Duration::span(|| for bundle in bundles { @@ -206,7 +206,7 @@ pub fn run( println!(); - println!("Decrypting bundles..."); + tr_println!("Decrypting bundles..."); bundles = Vec::with_capacity(encrypted_bundles.len()); let decrypt_time = Duration::span(|| for bundle in encrypted_bundles { bundles.push(crypto.decrypt(&encryption, &bundle).unwrap()); @@ -222,7 +222,7 @@ pub fn run( if let Some(compression) = compression { println!(); - println!("Decompressing bundles with {} ...", compression.to_string()); + tr_println!("Decompressing bundles with {} ...", compression.to_string()); let mut dummy = ChunkSink { chunks: vec![], written: 0, @@ -243,17 +243,17 @@ pub fn run( println!(); - println!( + tr_println!( "Total storage size: {} / {}, ratio: {:.1}%", to_file_size(size as u64), to_file_size(total_size as u64), size as f32 / total_size as f32 * 100.0 ); - println!( + tr_println!( "Total processing speed: {}", to_speed(total_size, total_write_time) ); - println!( + tr_println!( "Total read speed: {}", to_speed(total_size, total_read_time) ); diff --git a/src/cli/args.rs b/src/cli/args.rs index 039185e..687e6c7 100644 --- a/src/cli/args.rs +++ b/src/cli/args.rs @@ -157,10 +157,10 @@ fn parse_repo_path( let mut parts = repo_path.splitn(3, "::"); let repo = convert_repo_path(parts.next().unwrap_or("")); if existing && !repo.join("config.yaml").exists() { - return Err("The specified repository does not exist".to_string()); + return Err(tr!("The specified repository does not exist").to_string()); } if !existing && repo.exists() { - return Err("The specified repository already exists".to_string()); + return Err(tr!("The specified repository already exists").to_string()); } let mut backup = parts.next(); if let Some(val) = backup { @@ -176,18 +176,18 @@ fn parse_repo_path( } if let Some(restr) = backup_restr { if !restr && backup.is_some() { - return Err("No backup may be given here".to_string()); + return Err(tr!("No backup may be given here").to_string()); } if restr && backup.is_none() { - return Err("A backup must be specified".to_string()); + return Err(tr!("A backup must be specified").to_string()); } } if let Some(restr) = path_restr { if !restr && path.is_some() { - return Err("No subpath may be given here".to_string()); + return Err(tr!("No subpath may be given here").to_string()); } if restr && path.is_none() { - return Err("A subpath must be specified".to_string()); + return Err(tr!("A subpath must be specified").to_string()); } } Ok((repo, backup, path)) @@ -207,7 +207,7 @@ fn parse_num(num: &str) -> Result { if let Ok(num) = num.parse::() { Ok(num) } else { - Err("Must be a number".to_string()) + Err(tr!("Must be a number").to_string()) } } @@ -220,7 +220,7 @@ fn parse_chunker(val: &str) -> Result { if let Ok(chunker) = ChunkerType::from_string(val) { Ok(chunker) } else { - Err("Invalid chunker method/size".to_string()) + Err(tr!("Invalid chunker method/size").to_string()) } } @@ -236,7 +236,7 @@ fn parse_compression(val: &str) -> Result, String> { if let Ok(compression) = Compression::from_string(val) { Ok(Some(compression)) } else { - Err("Invalid compression method/level".to_string()) + Err(tr!("Invalid compression method/level").to_string()) } } @@ -252,13 +252,13 @@ fn parse_public_key(val: &str) -> Result, String> { let bytes = match parse_hex(val) { Ok(bytes) => bytes, Err(_) => { - return Err("Invalid hexadecimal".to_string()); + return Err(tr!("Invalid hexadecimal").to_string()); } }; if let Some(key) = PublicKey::from_slice(&bytes) { Ok(Some(key)) } else { - return Err("Invalid key".to_string()); + return Err(tr!("Invalid key").to_string()); } } @@ -271,7 +271,7 @@ fn parse_hash(val: &str) -> Result { if let Ok(hash) = HashMethod::from(val) { Ok(hash) } else { - Err("Invalid hash method".to_string()) + Err(tr!("Invalid hash method").to_string()) } } @@ -284,7 +284,7 @@ fn parse_bundle_id(val: &str) -> Result { if let Ok(hash) = Hash::from_string(val) { Ok(BundleId(hash)) } else { - error!("Invalid bundle id: {}", val); + tr_error!("Invalid bundle id: {}", val); Err(ErrorCode::InvalidArgs) } } @@ -292,7 +292,7 @@ fn parse_bundle_id(val: &str) -> Result { #[allow(unknown_lints, needless_pass_by_value)] fn validate_existing_path(val: String) -> Result<(), String> { if !Path::new(&val).exists() { - Err("Path does not exist".to_string()) + Err(tr!("Path does not exist").to_string()) } else { Ok(()) } @@ -301,7 +301,7 @@ fn validate_existing_path(val: String) -> Result<(), String> { #[allow(unknown_lints, needless_pass_by_value)] fn validate_existing_path_or_stdio(val: String) -> Result<(), String> { if val != "-" && !Path::new(&val).exists() { - Err("Path does not exist".to_string()) + Err(tr!("Path does not exist").to_string()) } else { Ok(()) } @@ -310,153 +310,274 @@ fn validate_existing_path_or_stdio(val: String) -> Result<(), String> { #[allow(unknown_lints, cyclomatic_complexity)] pub fn parse() -> Result<(log::Level, Arguments), ErrorCode> { - let args = App::new("zvault").version(crate_version!()).author(crate_authors!(",\n")).about(crate_description!()) + let args = App::new("zvault") + .version(crate_version!()) + .author(crate_authors!(",\n")) + .about(crate_description!()) .settings(&[AppSettings::VersionlessSubcommands, AppSettings::SubcommandRequiredElseHelp]) .global_settings(&[AppSettings::AllowMissingPositional, AppSettings::UnifiedHelpMessage, AppSettings::ColoredHelp, AppSettings::ColorAuto]) - .arg(Arg::from_usage("-v --verbose 'Print more information'").global(true).multiple(true).max_values(3).takes_value(false)) - .arg(Arg::from_usage("-q --quiet 'Print less information'").global(true).conflicts_with("verbose")) - .subcommand(SubCommand::with_name("init").about("Initialize a new repository") - .arg(Arg::from_usage("[bundle_size] --bundle-size [SIZE] 'Set the target bundle size in MiB'") - .default_value(DEFAULT_BUNDLE_SIZE_STR).validator(validate_num)) - .arg(Arg::from_usage("--chunker [CHUNKER] 'Set the chunker algorithm and target chunk size'") - .default_value(DEFAULT_CHUNKER).validator(validate_chunker)) - .arg(Arg::from_usage("-c --compression [COMPRESSION] 'Set the compression method and level'") - .default_value(DEFAULT_COMPRESSION).validator(validate_compression)) - .arg(Arg::from_usage("-e --encrypt 'Generate a keypair and enable encryption'")) - .arg(Arg::from_usage("--hash [HASH] 'Set the hash method'") - .default_value(DEFAULT_HASH).validator(validate_hash)) - .arg(Arg::from_usage("-r --remote 'Set the path to the mounted remote storage'") - .validator(validate_existing_path)) - .arg(Arg::from_usage(" 'The path for the new repository'") - .validator(|val| validate_repo_path(val, false, Some(false), Some(false))))) - .subcommand(SubCommand::with_name("backup").about("Create a new backup") - .arg(Arg::from_usage("--full 'Create a full backup without using a reference'")) - .arg(Arg::from_usage("[reference] --ref [REF] 'Base the new backup on this reference'") - .conflicts_with("full")) - .arg(Arg::from_usage("[cross_device] -x --xdev 'Allow to cross filesystem boundaries'")) - .arg(Arg::from_usage("-e --exclude [PATTERN]... 'Exclude this path or file pattern'")) - .arg(Arg::from_usage("[excludes_from] --excludes-from [FILE] 'Read the list of excludes from this file'")) - .arg(Arg::from_usage("[no_default_excludes] --no-default-excludes 'Do not load the default excludes file'")) - .arg(Arg::from_usage("--tar 'Read the source data from a tar file'") - .conflicts_with_all(&["reference", "exclude", "excludes_from"])) - .arg(Arg::from_usage(" 'Source path to backup'") - .validator(validate_existing_path_or_stdio)) - .arg(Arg::from_usage(" 'Backup path, [repository]::backup'") - .validator(|val| validate_repo_path(val, true, Some(true), Some(false))))) - .subcommand(SubCommand::with_name("restore").about("Restore a backup or subtree") - .arg(Arg::from_usage("--tar 'Restore in form of a tar file'")) - .arg(Arg::from_usage(" 'The backup/subtree path, [repository]::backup[::subtree]'") - .validator(|val| validate_repo_path(val, true, Some(true), None))) - .arg(Arg::from_usage(" 'Destination path for backup'"))) - .subcommand(SubCommand::with_name("remove").aliases(&["rm", "delete", "del"]).about("Remove a backup or a subtree") - .arg(Arg::from_usage("-f --force 'Remove multiple backups in a backup folder'")) - .arg(Arg::from_usage(" 'The backup/subtree path, [repository]::backup[::subtree]'") - .validator(|val| validate_repo_path(val, true, Some(true), None)))) - .subcommand(SubCommand::with_name("prune").about("Remove backups based on age") - .arg(Arg::from_usage("-p --prefix [PREFIX] 'Only consider backups starting with this prefix'")) - .arg(Arg::from_usage("-d --daily [NUM] 'Keep this number of daily backups'") - .default_value("0").validator(validate_num)) - .arg(Arg::from_usage("-w --weekly [NUM] 'Keep this number of weekly backups'") - .default_value("0").validator(validate_num)) - .arg(Arg::from_usage("-m --monthly [NUM] 'Keep this number of monthly backups'") - .default_value("0").validator(validate_num)) - .arg(Arg::from_usage("-y --yearly [NUM] 'Keep this number of yearly backups'") - .default_value("0").validator(validate_num)) - .arg(Arg::from_usage("-f --force 'Actually run the prune instead of simulating it'")) - .arg(Arg::from_usage(" 'Path of the repository'") - .validator(|val| validate_repo_path(val, true, Some(false), Some(false))))) - .subcommand(SubCommand::with_name("vacuum").about("Reclaim space by rewriting bundles") - .arg(Arg::from_usage("-r --ratio [NUM] 'Ratio in % of unused space in a bundle to rewrite that bundle'") - .default_value(DEFAULT_VACUUM_RATIO_STR).validator(validate_num)) - .arg(Arg::from_usage("--combine 'Combine small bundles into larger ones'")) - .arg(Arg::from_usage("-f --force 'Actually run the vacuum instead of simulating it'")) - .arg(Arg::from_usage(" 'Path of the repository'") - .validator(|val| validate_repo_path(val, true, Some(false), Some(false))))) - .subcommand(SubCommand::with_name("check").about("Check the repository, a backup or a backup subtree") - .arg(Arg::from_usage("-b --bundles 'Check the bundles'")) - .arg(Arg::from_usage("[bundle_data] --bundle-data 'Check bundle contents (slow)'").requires("bundles").alias("data")) - .arg(Arg::from_usage("-i --index 'Check the chunk index'")) - .arg(Arg::from_usage("-r --repair 'Try to repair errors'")) - .arg(Arg::from_usage(" 'Path of the repository/backup/subtree, [repository][::backup[::subtree]]'") - .validator(|val| validate_repo_path(val, true, None, None)))) - .subcommand(SubCommand::with_name("list").alias("ls").about("List backups or backup contents") - .arg(Arg::from_usage(" 'Path of the repository/backup/subtree, [repository][::backup[::subtree]]'") - .validator(|val| validate_repo_path(val, true, None, None)))) - .subcommand(SubCommand::with_name("mount").about("Mount the repository, a backup or a subtree") - .arg(Arg::from_usage(" 'Path of the repository/backup/subtree, [repository][::backup[::subtree]]'") - .validator(|val| validate_repo_path(val, true, None, None))) - .arg(Arg::from_usage(" 'Existing mount point'") - .validator(validate_existing_path))) - .subcommand(SubCommand::with_name("bundlelist").about("List bundles in a repository") - .arg(Arg::from_usage(" 'Path of the repository'") - .validator(|val| validate_repo_path(val, true, Some(false), Some(false))))) - .subcommand(SubCommand::with_name("bundleinfo").about("Display information on a bundle") - .arg(Arg::from_usage(" 'Path of the repository'") - .validator(|val| validate_repo_path(val, true, Some(false), Some(false)))) - .arg(Arg::from_usage(" 'Id of the bundle'"))) - .subcommand(SubCommand::with_name("import").about("Reconstruct a repository from the remote storage") - .arg(Arg::from_usage("-k --key [FILE]... 'Key file needed to read the bundles'")) - .arg(Arg::from_usage(" 'Remote repository path'") - .validator(validate_existing_path)) - .arg(Arg::from_usage(" 'The path for the new repository'") - .validator(|val| validate_repo_path(val, false, Some(false), Some(false))))) - .subcommand(SubCommand::with_name("info").about("Display information on a repository, a backup or a subtree") - .arg(Arg::from_usage(" 'Path of the repository/backup/subtree, [repository][::backup[::subtree]]'") - .validator(|val| validate_repo_path(val, true, None, None)))) - .subcommand(SubCommand::with_name("analyze").about("Analyze the used and reclaimable space of bundles") - .arg(Arg::from_usage(" 'Path of the repository'") - .validator(|val| validate_repo_path(val, true, Some(false), Some(false))))) - .subcommand(SubCommand::with_name("versions").about("Find different versions of a file in all backups") - .arg(Arg::from_usage(" 'Path of the repository'") - .validator(|val| validate_repo_path(val, true, Some(false), Some(false)))) - .arg(Arg::from_usage(" 'Path of the file'"))) - .subcommand(SubCommand::with_name("diff").about("Display differences between two backup versions") - .arg(Arg::from_usage(" 'Old version, [repository]::backup[::subpath]'") - .validator(|val| validate_repo_path(val, true, Some(true), None))) - .arg(Arg::from_usage(" 'New version, [repository]::backup[::subpath]'") - .validator(|val| validate_repo_path(val, true, Some(true), None)))) - .subcommand(SubCommand::with_name("copy").alias("cp").about("Create a copy of a backup") - .arg(Arg::from_usage(" 'Existing backup, [repository]::backup'") - .validator(|val| validate_repo_path(val, true, Some(true), Some(false)))) - .arg(Arg::from_usage(" 'Destination backup, [repository]::backup'") - .validator(|val| validate_repo_path(val, true, Some(true), Some(false))))) - .subcommand(SubCommand::with_name("config").about("Display or change the configuration") - .arg(Arg::from_usage("[bundle_size] --bundle-size [SIZE] 'Set the target bundle size in MiB'") + .arg(Arg::from_usage("-v --verbose") + .help(tr!("Print more information")) + .global(true) + .multiple(true) + .max_values(3) + .takes_value(false)) + .arg(Arg::from_usage("-q --quiet") + .help(tr!("Print less information")) + .global(true) + .conflicts_with("verbose")) + .subcommand(SubCommand::with_name("init") + .about(tr!("Initialize a new repository")) + .arg(Arg::from_usage("[bundle_size] --bundle-size [SIZE]") + .help(tr!("Set the target bundle size in MiB")) + .default_value(DEFAULT_BUNDLE_SIZE_STR) .validator(validate_num)) - .arg(Arg::from_usage("--chunker [CHUNKER] 'Set the chunker algorithm and target chunk size'") + .arg(Arg::from_usage("--chunker [CHUNKER]") + .help(tr!("Set the chunker algorithm and target chunk size")) + .default_value(DEFAULT_CHUNKER) + .validator(validate_chunker)) + .arg(Arg::from_usage("-c --compression [COMPRESSION]") + .help(tr!("Set the compression method and level")) + .default_value(DEFAULT_COMPRESSION) + .validator(validate_compression)) + .arg(Arg::from_usage("-e --encrypt") + .help(tr!("Generate a keypair and enable encryption"))) + .arg(Arg::from_usage("--hash [HASH]") + .help(tr!("Set the hash method'")) + .default_value(DEFAULT_HASH) + .validator(validate_hash)) + .arg(Arg::from_usage("-r --remote ") + .help(tr!("Set the path to the mounted remote storage")) + .validator(validate_existing_path)) + .arg(Arg::from_usage("") + .help(tr!("The path for the new repository")) + .validator(|val| validate_repo_path(val, false, Some(false), Some(false))))) + .subcommand(SubCommand::with_name("backup") + .about(tr!("Create a new backup")) + .arg(Arg::from_usage("--full") + .help(tr!("Create a full backup without using a reference"))) + .arg(Arg::from_usage("[reference] --ref [REF]") + .help(tr!("Base the new backup on this reference")) + .conflicts_with("full")) + .arg(Arg::from_usage("[cross_device] -x --xdev") + .help(tr!("Allow to cross filesystem boundaries"))) + .arg(Arg::from_usage("-e --exclude [PATTERN]...") + .help(tr!("Exclude this path or file pattern"))) + .arg(Arg::from_usage("[excludes_from] --excludes-from [FILE]") + .help(tr!("Read the list of excludes from this file"))) + .arg(Arg::from_usage("[no_default_excludes] --no-default-excludes") + .help(tr!("Do not load the default excludes file"))) + .arg(Arg::from_usage("--tar") + .help(tr!("Read the source data from a tar file")) + .conflicts_with_all(&["reference", "exclude", "excludes_from"])) + .arg(Arg::from_usage("") + .help(tr!("Source path to backup")) + .validator(validate_existing_path_or_stdio)) + .arg(Arg::from_usage("") + .help(tr!("Backup path, [repository]::backup")) + .validator(|val| validate_repo_path(val, true, Some(true), Some(false))))) + .subcommand(SubCommand::with_name("restore") + .about(tr!("Restore a backup or subtree")) + .arg(Arg::from_usage("--tar") + .help(tr!("Restore in form of a tar file"))) + .arg(Arg::from_usage("") + .help(tr!("The backup/subtree path, [repository]::backup[::subtree]")) + .validator(|val| validate_repo_path(val, true, Some(true), None))) + .arg(Arg::from_usage("") + .help(tr!("Destination path for backup")))) + .subcommand(SubCommand::with_name("remove") + .aliases(&["rm", "delete", "del"]) + .about(tr!("Remove a backup or a subtree")) + .arg(Arg::from_usage("-f --force") + .help(tr!("Remove multiple backups in a backup folder"))) + .arg(Arg::from_usage("") + .help(tr!("The backup/subtree path, [repository]::backup[::subtree]")) + .validator(|val| validate_repo_path(val, true, Some(true), None)))) + .subcommand(SubCommand::with_name("prune") + .about(tr!("Remove backups based on age")) + .arg(Arg::from_usage("-p --prefix [PREFIX]") + .help(tr!("Only consider backups starting with this prefix"))) + .arg(Arg::from_usage("-d --daily [NUM]") + .help(tr!("Keep this number of daily backups")) + .default_value("0") + .validator(validate_num)) + .arg(Arg::from_usage("-w --weekly [NUM]") + .help(tr!("Keep this number of weekly backups")) + .default_value("0") + .validator(validate_num)) + .arg(Arg::from_usage("-m --monthly [NUM]") + .help(tr!("Keep this number of monthly backups")) + .default_value("0") + .validator(validate_num)) + .arg(Arg::from_usage("-y --yearly [NUM]") + .help(tr!("Keep this number of yearly backups")) + .default_value("0") + .validator(validate_num)) + .arg(Arg::from_usage("-f --force") + .help(tr!("Actually run the prune instead of simulating it"))) + .arg(Arg::from_usage("") + .help(tr!("Path of the repository")) + .validator(|val| validate_repo_path(val, true, Some(false), Some(false))))) + .subcommand(SubCommand::with_name("vacuum") + .about(tr!("Reclaim space by rewriting bundles")) + .arg(Arg::from_usage("-r --ratio [NUM]") + .help(tr!("Ratio in % of unused space in a bundle to rewrite that bundle")) + .default_value(DEFAULT_VACUUM_RATIO_STR).validator(validate_num)) + .arg(Arg::from_usage("--combine") + .help(tr!("Combine small bundles into larger ones"))) + .arg(Arg::from_usage("-f --force") + .help(tr!("Actually run the vacuum instead of simulating it"))) + .arg(Arg::from_usage("") + .help(tr!("Path of the repository")) + .validator(|val| validate_repo_path(val, true, Some(false), Some(false))))) + .subcommand(SubCommand::with_name("check") + .about(tr!("Check the repository, a backup or a backup subtree")) + .arg(Arg::from_usage("-b --bundles") + .help(tr!("Check the bundles"))) + .arg(Arg::from_usage("[bundle_data] --bundle-data") + .help(tr!("Check bundle contents (slow)")) + .requires("bundles") + .alias("data")) + .arg(Arg::from_usage("-i --index") + .help(tr!("Check the chunk index"))) + .arg(Arg::from_usage("-r --repair") + .help(tr!("Try to repair errors"))) + .arg(Arg::from_usage("") + .help(tr!("Path of the repository/backup/subtree, [repository][::backup[::subtree]]")) + .validator(|val| validate_repo_path(val, true, None, None)))) + .subcommand(SubCommand::with_name("list") + .alias("ls") + .about(tr!("List backups or backup contents")) + .arg(Arg::from_usage("") + .help(tr!("Path of the repository/backup/subtree, [repository][::backup[::subtree]]")) + .validator(|val| validate_repo_path(val, true, None, None)))) + .subcommand(SubCommand::with_name("mount") + .about(tr!("Mount the repository, a backup or a subtree")) + .arg(Arg::from_usage("") + .help(tr!("Path of the repository/backup/subtree, [repository][::backup[::subtree]]")) + .validator(|val| validate_repo_path(val, true, None, None))) + .arg(Arg::from_usage("") + .help(tr!("Existing mount point")) + .validator(validate_existing_path))) + .subcommand(SubCommand::with_name("bundlelist") + .about(tr!("List bundles in a repository")) + .arg(Arg::from_usage("") + .help(tr!("Path of the repository")) + .validator(|val| validate_repo_path(val, true, Some(false), Some(false))))) + .subcommand(SubCommand::with_name("bundleinfo") + .about(tr!("Display information on a bundle")) + .arg(Arg::from_usage("") + .help(tr!("Path of the repository")) + .validator(|val| validate_repo_path(val, true, Some(false), Some(false)))) + .arg(Arg::from_usage("") + .help(tr!("Id of the bundle")))) + .subcommand(SubCommand::with_name("import") + .about(tr!("Reconstruct a repository from the remote storage")) + .arg(Arg::from_usage("-k --key [FILE]...") + .help(tr!("Key file needed to read the bundles"))) + .arg(Arg::from_usage("") + .help(tr!("Remote repository path")) + .validator(validate_existing_path)) + .arg(Arg::from_usage("") + .help(tr!("The path for the new repository")) + .validator(|val| validate_repo_path(val, false, Some(false), Some(false))))) + .subcommand(SubCommand::with_name("info") + .about(tr!("Display information on a repository, a backup or a subtree")) + .arg(Arg::from_usage("") + .help(tr!("Path of the repository/backup/subtree, [repository][::backup[::subtree]]")) + .validator(|val| validate_repo_path(val, true, None, None)))) + .subcommand(SubCommand::with_name("analyze") + .about(tr!("Analyze the used and reclaimable space of bundles")) + .arg(Arg::from_usage("") + .help(tr!("Path of the repository")) + .validator(|val| validate_repo_path(val, true, Some(false), Some(false))))) + .subcommand(SubCommand::with_name("versions") + .about(tr!("Find different versions of a file in all backups")) + .arg(Arg::from_usage("") + .help(tr!("Path of the repository")) + .validator(|val| validate_repo_path(val, true, Some(false), Some(false)))) + .arg(Arg::from_usage("") + .help(tr!("Path of the file")))) + .subcommand(SubCommand::with_name("diff") + .about(tr!("Display differences between two backup versions")) + .arg(Arg::from_usage("") + .help(tr!("Old version, [repository]::backup[::subpath]")) + .validator(|val| validate_repo_path(val, true, Some(true), None))) + .arg(Arg::from_usage("") + .help(tr!("New version, [repository]::backup[::subpath]")) + .validator(|val| validate_repo_path(val, true, Some(true), None)))) + .subcommand(SubCommand::with_name("copy") + .alias("cp") + .about(tr!("Create a copy of a backup")) + .arg(Arg::from_usage("") + .help(tr!("Existing backup, [repository]::backup")) + .validator(|val| validate_repo_path(val, true, Some(true), Some(false)))) + .arg(Arg::from_usage("") + .help(tr!("Destination backup, [repository]::backup")) + .validator(|val| validate_repo_path(val, true, Some(true), Some(false))))) + .subcommand(SubCommand::with_name("config") + .about(tr!("Display or change the configuration")) + .arg(Arg::from_usage("[bundle_size] --bundle-size [SIZE]") + .help(tr!("Set the target bundle size in MiB")) + .validator(validate_num)) + .arg(Arg::from_usage("--chunker [CHUNKER]") + .help(tr!("Set the chunker algorithm and target chunk size")) + .validator(validate_chunker)) + .arg(Arg::from_usage("-c --compression [COMPRESSION]") + .help(tr!("Set the compression method and level")) + .validator(validate_compression)) + .arg(Arg::from_usage("-e --encryption [PUBLIC_KEY]") + .help(tr!("The public key to use for encryption")) + .validator(validate_public_key)) + .arg(Arg::from_usage("--hash [HASH]") + .help(tr!("Set the hash method")) + .validator(validate_hash)) + .arg(Arg::from_usage("") + .help(tr!("Path of the repository")) + .validator(|val| validate_repo_path(val, true, Some(false), Some(false))))) + .subcommand(SubCommand::with_name("genkey") + .about(tr!("Generate a new key pair")) + .arg(Arg::from_usage("-p --password [PASSWORD]") + .help(tr!("Derive the key pair from the given password"))) + .arg(Arg::from_usage("[FILE]") + .help(tr!("Destination file for the keypair")))) + .subcommand(SubCommand::with_name("addkey") + .about(tr!("Add a key pair to the repository")) + .arg(Arg::from_usage("-g --generate") + .help(tr!("Generate a new key pair")) + .conflicts_with("FILE")) + .arg(Arg::from_usage("[set_default] --default -d") + .help(tr!("Set the key pair as default"))) + .arg(Arg::from_usage("-p --password [PASSWORD]") + .help(tr!("Derive the key pair from the given password")) + .requires("generate")) + .arg(Arg::from_usage("[FILE]") + .help(tr!("File containing the keypair")) + .validator(validate_existing_path)) + .arg(Arg::from_usage("") + .help(tr!("Path of the repository")) + .validator(|val| validate_repo_path(val, true, Some(false), Some(false))))) + .subcommand(SubCommand::with_name("algotest") + .about(tr!("Test a specific algorithm combination")) + .arg(Arg::from_usage("[bundle_size] --bundle-size [SIZE]") + .help(tr!("Set the target bundle size in MiB")) + .default_value(DEFAULT_BUNDLE_SIZE_STR) + .validator(validate_num)) + .arg(Arg::from_usage("--chunker [CHUNKER]") + .help(tr!("Set the chunker algorithm and target chunk size")) + .default_value(DEFAULT_CHUNKER) .validator(validate_chunker)) .arg(Arg::from_usage("-c --compression [COMPRESSION] 'Set the compression method and level'") + .default_value(DEFAULT_COMPRESSION) .validator(validate_compression)) - .arg(Arg::from_usage("-e --encryption [PUBLIC_KEY] 'The public key to use for encryption'") - .validator(validate_public_key)) - .arg(Arg::from_usage("--hash [HASH] 'Set the hash method'") + .arg(Arg::from_usage("-e --encrypt") + .help(tr!("Generate a keypair and enable encryption"))) + .arg(Arg::from_usage("--hash [HASH]") + .help(tr!("Set the hash method")) + .default_value(DEFAULT_HASH) .validator(validate_hash)) - .arg(Arg::from_usage(" 'Path of the repository'") - .validator(|val| validate_repo_path(val, true, Some(false), Some(false))))) - .subcommand(SubCommand::with_name("genkey").about("Generate a new key pair") - .arg(Arg::from_usage("-p --password [PASSWORD] 'Derive the key pair from the given password'")) - .arg(Arg::from_usage("[FILE] 'Destination file for the keypair'"))) - .subcommand(SubCommand::with_name("addkey").about("Add a key pair to the repository") - .arg(Arg::from_usage("-g --generate 'Generate a new key pair'") - .conflicts_with("FILE")) - .arg(Arg::from_usage("[set_default] --default -d 'Set the key pair as default'")) - .arg(Arg::from_usage("-p --password [PASSWORD] 'Derive the key pair from the given password'") - .requires("generate")) - .arg(Arg::from_usage("[FILE] 'File containing the keypair'") - .validator(validate_existing_path)) - .arg(Arg::from_usage(" 'Path of the repository'") - .validator(|val| validate_repo_path(val, true, Some(false), Some(false))))) - .subcommand(SubCommand::with_name("algotest").about("Test a specific algorithm combination") - .arg(Arg::from_usage("[bundle_size] --bundle-size [SIZE] 'Set the target bundle size in MiB'") - .default_value(DEFAULT_BUNDLE_SIZE_STR).validator(validate_num)) - .arg(Arg::from_usage("--chunker [CHUNKER] 'Set the chunker algorithm and target chunk size'") - .default_value(DEFAULT_CHUNKER).validator(validate_chunker)) - .arg(Arg::from_usage("-c --compression [COMPRESSION] 'Set the compression method and level'") - .default_value(DEFAULT_COMPRESSION).validator(validate_compression)) - .arg(Arg::from_usage("-e --encrypt 'Generate a keypair and enable encryption'")) - .arg(Arg::from_usage("--hash [HASH] 'Set the hash method'") - .default_value(DEFAULT_HASH).validator(validate_hash)) - .arg(Arg::from_usage(" 'File with test data'") + .arg(Arg::from_usage("") + .help(tr!("File with test data")) .validator(validate_existing_path))).get_matches(); let verbose_count = args.subcommand() .1 @@ -745,7 +866,7 @@ pub fn parse() -> Result<(log::Level, Arguments), ErrorCode> { } } _ => { - error!("No subcommand given"); + tr_error!("No subcommand given"); return Err(ErrorCode::InvalidArgs); } }; diff --git a/src/cli/mod.rs b/src/cli/mod.rs index 1f185f9..ba74259 100644 --- a/src/cli/mod.rs +++ b/src/cli/mod.rs @@ -105,7 +105,7 @@ macro_rules! checked { match $expr { Ok(val) => val, Err(err) => { - error!("Failed to {}\n\tcaused by: {}", $msg, err); + tr_error!("Failed to {}\n\tcaused by: {}", $msg, err); return Err($code) } } @@ -122,7 +122,7 @@ fn open_repository(path: &Path) -> Result { fn get_backup(repo: &Repository, backup_name: &str) -> Result { if !repo.has_backup(backup_name) { - error!("A backup with that name does not exist"); + tr_error!("A backup with that name does not exist"); return Err(ErrorCode::NoSuchBackup); } Ok(checked!( @@ -145,11 +145,11 @@ fn find_reference_backup( Ok(backup_map) => backup_map, Err(RepositoryError::BackupFile(BackupFileError::PartialBackupsList(backup_map, _failed))) => { - warn!("Some backups could not be read, ignoring them"); + tr_warn!("Some backups could not be read, ignoring them"); backup_map } Err(err) => { - error!("Failed to load backup files: {}", err); + tr_error!("Failed to load backup files: {}", err); return Err(ErrorCode::LoadBackup); } }; @@ -164,41 +164,41 @@ fn find_reference_backup( fn print_backup(backup: &Backup) { if backup.modified { - warn!("This backup has been modified"); + tr_warn!("This backup has been modified"); } - println!( + tr_println!( "Date: {}", Local.timestamp(backup.timestamp, 0).to_rfc2822() ); - println!("Source: {}:{}", backup.host, backup.path); - println!("Duration: {}", to_duration(backup.duration)); - println!( + tr_println!("Source: {}:{}", backup.host, backup.path); + tr_println!("Duration: {}", to_duration(backup.duration)); + tr_println!( "Entries: {} files, {} dirs", backup.file_count, backup.dir_count ); - println!( + tr_println!( "Total backup size: {}", to_file_size(backup.total_data_size) ); - println!( + tr_println!( "Modified data size: {}", to_file_size(backup.changed_data_size) ); let dedup_ratio = backup.deduplicated_data_size as f32 / backup.changed_data_size as f32; - println!( + tr_println!( "Deduplicated size: {}, {:.1}% saved", to_file_size(backup.deduplicated_data_size), (1.0 - dedup_ratio) * 100.0 ); let compress_ratio = backup.encoded_data_size as f32 / backup.deduplicated_data_size as f32; - println!( + tr_println!( "Compressed size: {} in {} bundles, {:.1}% saved", to_file_size(backup.encoded_data_size), backup.bundle_count, (1.0 - compress_ratio) * 100.0 ); - println!( + tr_println!( "Chunk count: {}, avg size: {}", backup.chunk_count, to_file_size(backup.avg_chunk_size as u64) @@ -246,30 +246,30 @@ pub fn format_inode_one_line(inode: &Inode) -> String { } fn print_inode(inode: &Inode) { - println!("Name: {}", inode.name); - println!("Type: {}", inode.file_type); - println!("Size: {}", to_file_size(inode.size)); - println!("Permissions: {:3o}", inode.mode); - println!("User: {}", inode.user); - println!("Group: {}", inode.group); - println!( + tr_println!("Name: {}", inode.name); + tr_println!("Type: {}", inode.file_type); + tr_println!("Size: {}", to_file_size(inode.size)); + tr_println!("Permissions: {:3o}", inode.mode); + tr_println!("User: {}", inode.user); + tr_println!("Group: {}", inode.group); + tr_println!( "Timestamp: {}", Local.timestamp(inode.timestamp, 0).to_rfc2822() ); if let Some(ref target) = inode.symlink_target { - println!("Symlink target: {}", target); + tr_println!("Symlink target: {}", target); } - println!("Cumulative size: {}", to_file_size(inode.cum_size)); - println!("Cumulative file count: {}", inode.cum_files); - println!("Cumulative directory count: {}", inode.cum_dirs); + tr_println!("Cumulative size: {}", to_file_size(inode.cum_size)); + tr_println!("Cumulative file count: {}", inode.cum_files); + tr_println!("Cumulative directory count: {}", inode.cum_dirs); if let Some(ref children) = inode.children { - println!("Children:"); + tr_println!("Children:"); for name in children.keys() { println!(" - {}", name); } } if !inode.xattrs.is_empty() { - println!("Extended attributes:"); + tr_println!("Extended attributes:"); for (key, value) in &inode.xattrs { if let Ok(value) = str::from_utf8(value) { println!(" - {} = '{}'", key, value); @@ -296,17 +296,17 @@ fn print_backups(backup_map: &HashMap) { } fn print_repoinfo(info: &RepositoryInfo) { - println!("Bundles: {}", info.bundle_count); - println!("Total size: {}", to_file_size(info.encoded_data_size)); - println!("Uncompressed size: {}", to_file_size(info.raw_data_size)); - println!("Compression ratio: {:.1}%", info.compression_ratio * 100.0); - println!("Chunk count: {}", info.chunk_count); - println!( + tr_println!("Bundles: {}", info.bundle_count); + tr_println!("Total size: {}", to_file_size(info.encoded_data_size)); + tr_println!("Uncompressed size: {}", to_file_size(info.raw_data_size)); + tr_println!("Compression ratio: {:.1}%", info.compression_ratio * 100.0); + tr_println!("Chunk count: {}", info.chunk_count); + tr_println!( "Average chunk size: {}", to_file_size(info.avg_chunk_size as u64) ); let index_usage = info.index_entries as f32 / info.index_capacity as f32; - println!( + tr_println!( "Index: {}, {:.0}% full", to_file_size(info.index_size as u64), index_usage * 100.0 @@ -314,26 +314,26 @@ fn print_repoinfo(info: &RepositoryInfo) { } fn print_bundle(bundle: &StoredBundle) { - println!("Bundle {}", bundle.info.id); - println!(" - Mode: {:?}", bundle.info.mode); - println!(" - Path: {:?}", bundle.path); - println!( + tr_println!("Bundle {}", bundle.info.id); + tr_println!(" - Mode: {:?}", bundle.info.mode); + tr_println!(" - Path: {:?}", bundle.path); + tr_println!( " - Date: {}", Local.timestamp(bundle.info.timestamp, 0).to_rfc2822() ); - println!(" - Hash method: {:?}", bundle.info.hash_method); + tr_println!(" - Hash method: {:?}", bundle.info.hash_method); let encryption = if let Some((_, ref key)) = bundle.info.encryption { to_hex(key) } else { "none".to_string() }; - println!(" - Encryption: {}", encryption); - println!(" - Chunks: {}", bundle.info.chunk_count); - println!( + tr_println!(" - Encryption: {}", encryption); + tr_println!(" - Chunks: {}", bundle.info.chunk_count); + tr_println!( " - Size: {}", to_file_size(bundle.info.encoded_size as u64) ); - println!( + tr_println!( " - Data size: {}", to_file_size(bundle.info.raw_size as u64) ); @@ -343,7 +343,7 @@ fn print_bundle(bundle: &StoredBundle) { } else { "none".to_string() }; - println!( + tr_println!( " - Compression: {}, ratio: {:.1}%", compression, ratio * 100.0 @@ -351,7 +351,7 @@ fn print_bundle(bundle: &StoredBundle) { } fn print_bundle_one_line(bundle: &BundleInfo) { - println!( + tr_println!( "{}: {:8?}, {:5} chunks, {:8}", bundle.id, bundle.mode, @@ -361,19 +361,19 @@ fn print_bundle_one_line(bundle: &BundleInfo) { } fn print_config(config: &Config) { - println!("Bundle size: {}", to_file_size(config.bundle_size as u64)); - println!("Chunker: {}", config.chunker.to_string()); + tr_println!("Bundle size: {}", to_file_size(config.bundle_size as u64)); + tr_println!("Chunker: {}", config.chunker.to_string()); if let Some(ref compression) = config.compression { - println!("Compression: {}", compression.to_string()); + tr_println!("Compression: {}", compression.to_string()); } else { - println!("Compression: none"); + tr_println!("Compression: none"); } if let Some(ref encryption) = config.encryption { - println!("Encryption: {}", to_hex(&encryption.1[..])); + tr_println!("Encryption: {}", to_hex(&encryption.1[..])); } else { - println!("Encryption: none"); + tr_println!("Encryption: none"); } - println!("Hash method: {}", config.hash.name()); + tr_println!("Hash method: {}", config.hash.name()); } fn print_analysis(analysis: &HashMap) { @@ -390,17 +390,17 @@ fn print_analysis(analysis: &HashMap) { } } } - println!("Total bundle size: {}", to_file_size(data_total as u64)); + tr_println!("Total bundle size: {}", to_file_size(data_total as u64)); let used = data_total - reclaim_space[10]; - println!( + tr_println!( "Space used: {}, {:.1} %", to_file_size(used as u64), used as f32 / data_total as f32 * 100.0 ); - println!("Reclaimable space (depending on vacuum ratio)"); + tr_println!("Reclaimable space (depending on vacuum ratio)"); #[allow(unknown_lints, needless_range_loop)] for i in 0..11 { - println!( + tr_println!( " - ratio={:3}: {:>10}, {:4.1} %, rewriting {:>10}", i * 10, to_file_size(reclaim_space[i] as u64), @@ -415,7 +415,7 @@ fn print_analysis(analysis: &HashMap) { pub fn run() -> Result<(), ErrorCode> { let (log_level, args) = try!(args::parse()); if let Err(err) = logger::init(log_level) { - println!("Failed to initialize the logger: {}", err); + tr_println!("Failed to initialize the logger: {}", err); return Err(ErrorCode::InitializeLogger); } match args { @@ -429,7 +429,7 @@ pub fn run() -> Result<(), ErrorCode> { remote_path } => { if !Path::new(&remote_path).is_absolute() { - error!("The remote path of a repository must be absolute."); + tr_error!("The remote path of a repository must be absolute."); return Err(ErrorCode::InvalidArgs); } let mut repo = checked!( @@ -449,9 +449,9 @@ pub fn run() -> Result<(), ErrorCode> { ); if encryption { let (public, secret) = Crypto::gen_keypair(); - info!("Created the following key pair"); - println!("public: {}", to_hex(&public[..])); - println!("secret: {}", to_hex(&secret[..])); + tr_info!("Created the following key pair"); + tr_println!("public: {}", to_hex(&public[..])); + tr_println!("secret: {}", to_hex(&secret[..])); repo.set_encryption(Some(&public)); checked!( repo.register_key(public, secret), @@ -459,7 +459,7 @@ pub fn run() -> Result<(), ErrorCode> { ErrorCode::AddKey ); checked!(repo.save_config(), "save config", ErrorCode::SaveConfig); - warn!( + tr_warn!( "Please store this key pair in a secure location before using the repository" ); println!(); @@ -480,11 +480,11 @@ pub fn run() -> Result<(), ErrorCode> { } => { let mut repo = try!(open_repository(&repo_path)); if repo.has_backup(&backup_name) { - error!("A backup with that name already exists"); + tr_error!("A backup with that name already exists"); return Err(ErrorCode::BackupAlreadyExists); } if src_path == "-" && !tar { - error!("Reading from stdin requires --tar"); + tr_error!("Reading from stdin requires --tar"); return Err(ErrorCode::InvalidArgs); } let mut reference_backup = None; @@ -500,9 +500,9 @@ pub fn run() -> Result<(), ErrorCode> { reference_backup = try!(find_reference_backup(&repo, &src_path)); } if let Some(&(ref name, _)) = reference_backup.as_ref() { - info!("Using backup {} as reference", name); + tr_info!("Using backup {} as reference", name); } else { - info!("No reference backup found, doing a full scan instead"); + tr_info!("No reference backup found, doing a full scan instead"); } } let reference_backup = reference_backup.map(|(_, backup)| backup); @@ -569,15 +569,15 @@ pub fn run() -> Result<(), ErrorCode> { }; let backup = match result { Ok(backup) => { - info!("Backup finished"); + tr_info!("Backup finished"); backup } Err(RepositoryError::Backup(BackupError::FailedPaths(backup, _failed_paths))) => { - warn!("Some files are missing from the backup"); + tr_warn!("Some files are missing from the backup"); backup } Err(err) => { - error!("Backup failed: {}", err); + tr_error!("Backup failed: {}", err); return Err(ErrorCode::BackupRun); } }; @@ -623,7 +623,7 @@ pub fn run() -> Result<(), ErrorCode> { ErrorCode::RestoreRun ); } - info!("Restore finished"); + tr_info!("Restore finished"); } Arguments::Copy { repo_path_src, @@ -632,12 +632,12 @@ pub fn run() -> Result<(), ErrorCode> { backup_name_dst } => { if repo_path_src != repo_path_dst { - error!("Can only run copy on same repository"); + tr_error!("Can only run copy on same repository"); return Err(ErrorCode::InvalidArgs); } let mut repo = try!(open_repository(&repo_path_src)); if repo.has_backup(&backup_name_dst) { - error!("A backup with that name already exists"); + tr_error!("A backup with that name already exists"); return Err(ErrorCode::BackupAlreadyExists); } let backup = try!(get_backup(&repo, &backup_name_src)); @@ -666,7 +666,7 @@ pub fn run() -> Result<(), ErrorCode> { "save backup file", ErrorCode::SaveBackup ); - info!("The backup subpath has been deleted, run vacuum to reclaim space"); + tr_info!("The backup subpath has been deleted, run vacuum to reclaim space"); } else if repo.layout.backups_path().join(&backup_name).is_dir() { let backups = checked!( repo.get_backups(&backup_name), @@ -682,7 +682,7 @@ pub fn run() -> Result<(), ErrorCode> { ); } } else { - error!("Denying to remove multiple backups (use --force):"); + tr_error!("Denying to remove multiple backups (use --force):"); for name in backups.keys() { println!(" - {}/{}", backup_name, name); } @@ -693,7 +693,7 @@ pub fn run() -> Result<(), ErrorCode> { "delete backup", ErrorCode::RemoveRun ); - info!("The backup has been deleted, run vacuum to reclaim space"); + tr_info!("The backup has been deleted, run vacuum to reclaim space"); } } Arguments::Prune { @@ -707,7 +707,7 @@ pub fn run() -> Result<(), ErrorCode> { } => { let mut repo = try!(open_repository(&repo_path)); if daily + weekly + monthly + yearly == 0 { - error!("This would remove all those backups"); + tr_error!("This would remove all those backups"); return Err(ErrorCode::UnsafeArgs); } checked!( @@ -716,7 +716,7 @@ pub fn run() -> Result<(), ErrorCode> { ErrorCode::PruneRun ); if !force { - info!("Run with --force to actually execute this command"); + tr_info!("Run with --force to actually execute this command"); } } Arguments::Vacuum { @@ -733,10 +733,10 @@ pub fn run() -> Result<(), ErrorCode> { ErrorCode::VacuumRun ); if !force { - info!("Run with --force to actually execute this command"); + tr_info!("Run with --force to actually execute this command"); } else { let info_after = repo.info(); - info!( + tr_info!( "Reclaimed {}", to_file_size(info_before.encoded_data_size - info_after.encoded_data_size) ); @@ -790,7 +790,7 @@ pub fn run() -> Result<(), ErrorCode> { ) } repo.set_clean(); - info!("Integrity verified") + tr_info!("Integrity verified") } Arguments::List { repo_path, @@ -830,11 +830,11 @@ pub fn run() -> Result<(), ErrorCode> { let backup_map = match backup_map { Ok(backup_map) => backup_map, Err(RepositoryError::BackupFile(BackupFileError::PartialBackupsList(backup_map, _failed))) => { - warn!("Some backups could not be read, ignoring them"); + tr_warn!("Some backups could not be read, ignoring them"); backup_map } Err(err) => { - error!("Failed to load backup files: {}", err); + tr_error!("Failed to load backup files: {}", err); return Err(ErrorCode::LoadBackup); } }; @@ -904,8 +904,8 @@ pub fn run() -> Result<(), ErrorCode> { ErrorCode::FuseMount ) }; - info!("Mounting the filesystem..."); - info!( + tr_info!("Mounting the filesystem..."); + tr_info!( "Please unmount the filesystem via 'fusermount -u {}' when done.", mount_point ); @@ -937,7 +937,7 @@ pub fn run() -> Result<(), ErrorCode> { if let Some(bundle) = repo.get_bundle(&bundle_id) { print_bundle(bundle); } else { - error!("No such bundle"); + tr_error!("No such bundle"); return Err(ErrorCode::LoadBundle); } } @@ -951,7 +951,7 @@ pub fn run() -> Result<(), ErrorCode> { "import repository", ErrorCode::ImportRun ); - info!("Import finished"); + tr_info!("Import finished"); } Arguments::Versions { repo_path, path } => { let mut repo = try!(open_repository(&repo_path)); @@ -968,7 +968,7 @@ pub fn run() -> Result<(), ErrorCode> { found = true; } if !found { - info!("No versions of that file were found."); + tr_info!("No versions of that file were found."); } } Arguments::Diff { @@ -980,7 +980,7 @@ pub fn run() -> Result<(), ErrorCode> { inode_new } => { if repo_path_old != repo_path_new { - error!("Can only run diff on same repository"); + tr_error!("Can only run diff on same repository"); return Err(ErrorCode::InvalidArgs); } let mut repo = try!(open_repository(&repo_path_old)); @@ -1015,7 +1015,7 @@ pub fn run() -> Result<(), ErrorCode> { ); } if diffs.is_empty() { - info!("No differences found"); + tr_info!("No differences found"); } } Arguments::Config { @@ -1033,7 +1033,7 @@ pub fn run() -> Result<(), ErrorCode> { changed = true; } if let Some(chunker) = chunker { - warn!( + tr_warn!( "Changing the chunker makes it impossible to use existing data for deduplication" ); repo.config.chunker = chunker; @@ -1048,7 +1048,7 @@ pub fn run() -> Result<(), ErrorCode> { changed = true; } if let Some(hash) = hash { - warn!( + tr_warn!( "Changing the hash makes it impossible to use existing data for deduplication" ); repo.config.hash = hash; @@ -1056,7 +1056,7 @@ pub fn run() -> Result<(), ErrorCode> { } if changed { checked!(repo.save_config(), "save config", ErrorCode::SaveConfig); - info!("The configuration has been updated."); + tr_info!("The configuration has been updated."); } else { print_config(&repo.config); } @@ -1066,9 +1066,9 @@ pub fn run() -> Result<(), ErrorCode> { None => Crypto::gen_keypair(), Some(ref password) => Crypto::keypair_from_password(password), }; - info!("Created the following key pair"); - println!("public: {}", to_hex(&public[..])); - println!("secret: {}", to_hex(&secret[..])); + tr_info!("Created the following key pair"); + tr_println!("public: {}", to_hex(&public[..])); + tr_println!("secret: {}", to_hex(&secret[..])); if let Some(file) = file { checked!( Crypto::save_keypair_to_file(&public, &secret, file), @@ -1091,13 +1091,13 @@ pub fn run() -> Result<(), ErrorCode> { ErrorCode::LoadKey ) } else { - info!("Created the following key pair"); + tr_info!("Created the following key pair"); let (public, secret) = match password { None => Crypto::gen_keypair(), Some(ref password) => Crypto::keypair_from_password(password), }; - println!("public: {}", to_hex(&public[..])); - println!("secret: {}", to_hex(&secret[..])); + tr_println!("public: {}", to_hex(&public[..])); + tr_println!("secret: {}", to_hex(&secret[..])); (public, secret) }; checked!( @@ -1108,7 +1108,7 @@ pub fn run() -> Result<(), ErrorCode> { if set_default { repo.set_encryption(Some(&public)); checked!(repo.save_config(), "save config", ErrorCode::SaveConfig); - warn!( + tr_warn!( "Please store this key pair in a secure location before using the repository" ); } diff --git a/src/main.rs b/src/main.rs index 1582f45..a5ca168 100644 --- a/src/main.rs +++ b/src/main.rs @@ -40,6 +40,7 @@ extern crate index; extern crate chunking; #[macro_use] extern crate runtime_fmt; +extern crate locale_config; #[macro_use] mod translation; pub mod util; diff --git a/src/mount.rs b/src/mount.rs index a1b5ded..10a0934 100644 --- a/src/mount.rs +++ b/src/mount.rs @@ -524,10 +524,10 @@ impl<'a> fuse::Filesystem for FuseFilesystem<'a> { /// Read data /// Read should send exactly the number of bytes requested except on EOF or error, /// otherwise the rest of the data will be substituted with zeroes. An exception to - /// this is when the file has been opened in 'direct_io' mode, in which case the + /// this is when the file has been opened in direct_io mode, in which case the /// return value of the read system call will reflect the return value of this /// operation. fh will contain the value set by the open method, or will be undefined - /// if the open method didn't set any value. + /// if the open method didnt set any value. fn read( &mut self, _req: &fuse::Request, @@ -608,7 +608,7 @@ impl<'a> fuse::Filesystem for FuseFilesystem<'a> { /// call there will be exactly one release call. The filesystem may reply with an /// error, but error values are not returned to close() or munmap() which triggered /// the release. fh will contain the value set by the open method, or will be undefined - /// if the open method didn't set any value. flags will contain the same flags as for + /// if the open method didnt set any value. flags will contain the same flags as for /// open. fn release( &mut self, diff --git a/src/prelude.rs b/src/prelude.rs index 149faa5..5f1d6a4 100644 --- a/src/prelude.rs +++ b/src/prelude.rs @@ -7,6 +7,7 @@ pub use repository::{Repository, Backup, Config, RepositoryError, RepositoryInfo FileData, DiffType, InodeError, RepositoryLayout, Location}; pub use index::{Index, IndexError}; pub use mount::FuseFilesystem; +pub use translation::CowStr; pub use serde::{Serialize, Deserialize}; diff --git a/src/repository/backup.rs b/src/repository/backup.rs index e7345f4..48ce955 100644 --- a/src/repository/backup.rs +++ b/src/repository/backup.rs @@ -15,12 +15,12 @@ quick_error!{ #[allow(unknown_lints,large_enum_variant)] pub enum BackupError { FailedPaths(backup: Backup, failed: Vec) { - description("Some paths could not be backed up") - display("Backup error: some paths could not be backed up") + description(tr!("Some paths could not be backed up")) + display("{}", tr_format!("Backup error: some paths could not be backed up")) } RemoveRoot { - description("The root of a backup can not be removed") - display("Backup error: the root of a backup can not be removed") + description(tr!("The root of a backup can not be removed")) + display("{}", tr_format!("Backup error: the root of a backup can not be removed")) } } } @@ -110,7 +110,7 @@ impl Repository { Ok(backup_map) => backup_map, Err(RepositoryError::BackupFile(BackupFileError::PartialBackupsList(backup_map, _failed))) => { - warn!("Some backups could not be read, ignoring them"); + tr_warn!("Some backups could not be read, ignoring them"); backup_map } Err(err) => return Err(err), @@ -239,7 +239,7 @@ impl Repository { user.name().to_string() ); } else { - warn!("Failed to retrieve name of user {}", inode.user); + tr_warn!("Failed to retrieve name of user {}", inode.user); } } if !backup.group_names.contains_key(&inode.group) { @@ -249,7 +249,7 @@ impl Repository { group.name().to_string() ); } else { - warn!("Failed to retrieve name of group {}", inode.group); + tr_warn!("Failed to retrieve name of group {}", inode.group); } } let mut meta_size = 0; diff --git a/src/repository/backup_file.rs b/src/repository/backup_file.rs index fce7fdf..415a07a 100644 --- a/src/repository/backup_file.rs +++ b/src/repository/backup_file.rs @@ -15,49 +15,49 @@ quick_error!{ pub enum BackupFileError { Read(err: io::Error, path: PathBuf) { cause(err) - description("Failed to read backup") - display("Backup file error: failed to read backup file {:?}\n\tcaused by: {}", path, err) + description(tr!("Failed to read backup")) + display("{}", tr_format!("Backup file error: failed to read backup file {:?}\n\tcaused by: {}", path, err)) } Write(err: io::Error, path: PathBuf) { cause(err) - description("Failed to write backup") - display("Backup file error: failed to write backup file {:?}\n\tcaused by: {}", path, err) + description(tr!("Failed to write backup")) + display("{}", tr_format!("Backup file error: failed to write backup file {:?}\n\tcaused by: {}", path, err)) } Decode(err: msgpack::DecodeError, path: PathBuf) { cause(err) context(path: &'a Path, err: msgpack::DecodeError) -> (err, path.to_path_buf()) - description("Failed to decode backup") - display("Backup file error: failed to decode backup of {:?}\n\tcaused by: {}", path, err) + description(tr!("Failed to decode backup")) + display("{}", tr_format!("Backup file error: failed to decode backup of {:?}\n\tcaused by: {}", path, err)) } Encode(err: msgpack::EncodeError, path: PathBuf) { cause(err) context(path: &'a Path, err: msgpack::EncodeError) -> (err, path.to_path_buf()) - description("Failed to encode backup") - display("Backup file error: failed to encode backup of {:?}\n\tcaused by: {}", path, err) + description(tr!("Failed to encode backup")) + display("{}", tr_format!("Backup file error: failed to encode backup of {:?}\n\tcaused by: {}", path, err)) } WrongHeader(path: PathBuf) { - description("Wrong header") - display("Backup file error: wrong header on backup {:?}", path) + description(tr!("Wrong header")) + display("{}", tr_format!("Backup file error: wrong header on backup {:?}", path)) } UnsupportedVersion(path: PathBuf, version: u8) { - description("Wrong version") - display("Backup file error: unsupported version on backup {:?}: {}", path, version) + description(tr!("Wrong version")) + display("{}", tr_format!("Backup file error: unsupported version on backup {:?}: {}", path, version)) } Decryption(err: EncryptionError, path: PathBuf) { cause(err) context(path: &'a Path, err: EncryptionError) -> (err, path.to_path_buf()) - description("Decryption failed") - display("Backup file error: decryption failed on backup {:?}\n\tcaused by: {}", path, err) + description(tr!("Decryption failed")) + display("{}", tr_format!("Backup file error: decryption failed on backup {:?}\n\tcaused by: {}", path, err)) } Encryption(err: EncryptionError) { from() cause(err) - description("Encryption failed") - display("Backup file error: encryption failed\n\tcaused by: {}", err) + description(tr!("Encryption failed")) + display("{}", tr_format!("Backup file error: encryption failed\n\tcaused by: {}", err)) } PartialBackupsList(partial: HashMap, failed: Vec) { - description("Some backups could not be loaded") - display("Backup file error: some backups could not be loaded: {:?}", failed) + description(tr!("Some backups could not be loaded")) + display("{}", tr_format!("Backup file error: some backups could not be loaded: {:?}", failed)) } } } @@ -180,7 +180,7 @@ impl Backup { let base_path = path.as_ref(); let path = path.as_ref(); if !path.exists() { - debug!("Backup root folder does not exist"); + tr_debug!("Backup root folder does not exist"); return Ok(backups); } let mut paths = vec![path.to_path_buf()]; diff --git a/src/repository/bundle_map.rs b/src/repository/bundle_map.rs index c6a2553..b5723f3 100644 --- a/src/repository/bundle_map.rs +++ b/src/repository/bundle_map.rs @@ -16,24 +16,24 @@ quick_error!{ Io(err: io::Error) { from() cause(err) - description("Failed to read/write bundle map") + description(tr!("Failed to read/write bundle map")) } Decode(err: msgpack::DecodeError) { from() cause(err) - description("Failed to decode bundle map") + description(tr!("Failed to decode bundle map")) } Encode(err: msgpack::EncodeError) { from() cause(err) - description("Failed to encode bundle map") + description(tr!("Failed to encode bundle map")) } WrongHeader { - description("Wrong header") + description(tr!("Wrong header")) } WrongVersion(version: u8) { - description("Wrong version") - display("Wrong version: {}", version) + description(tr!("Wrong version")) + display("{}", tr_format!("Wrong version: {}", version)) } } } diff --git a/src/repository/config.rs b/src/repository/config.rs index 8217b40..850f65c 100644 --- a/src/repository/config.rs +++ b/src/repository/config.rs @@ -16,14 +16,14 @@ quick_error!{ } Parse(reason: &'static str) { from() - description("Failed to parse config") - display("Failed to parse config: {}", reason) + description(tr!("Failed to parse config")) + display("{}", tr_format!("Failed to parse config: {}", reason)) } Yaml(err: serde_yaml::Error) { from() cause(err) - description("Yaml format error") - display("Yaml format error: {}", err) + description(tr!("Yaml format error")) + display("{}", tr_format!("Yaml format error: {}", err)) } } } @@ -79,7 +79,7 @@ impl ChunkerType { impl Compression { #[inline] fn from_yaml(yaml: &str) -> Result { - Compression::from_string(yaml).map_err(|_| ConfigError::Parse("Invalid codec")) + Compression::from_string(yaml).map_err(|_| ConfigError::Parse(tr!("Invalid codec"))) } #[inline] @@ -92,7 +92,7 @@ impl Compression { impl EncryptionMethod { #[inline] fn from_yaml(yaml: &str) -> Result { - EncryptionMethod::from_string(yaml).map_err(|_| ConfigError::Parse("Invalid codec")) + EncryptionMethod::from_string(yaml).map_err(|_| ConfigError::Parse(tr!("Invalid codec"))) } #[inline] @@ -186,7 +186,7 @@ impl Config { let encryption = if let Some(e) = yaml.encryption { let method = try!(EncryptionMethod::from_yaml(&e.method)); let key = try!(parse_hex(&e.key).map_err(|_| { - ConfigError::Parse("Invalid public key") + ConfigError::Parse(tr!("Invalid public key")) })); Some((method, key.into())) } else { diff --git a/src/repository/error.rs b/src/repository/error.rs index 9bab880..2740e2d 100644 --- a/src/repository/error.rs +++ b/src/repository/error.rs @@ -15,95 +15,95 @@ quick_error!{ #[allow(unknown_lints,large_enum_variant)] pub enum RepositoryError { NoRemote { - description("Remote storage not found") - display("Repository error: The remote storage has not been found, may be it needs to be mounted?") + description(tr!("Remote storage not found")) + display("{}", tr_format!("Repository error: The remote storage has not been found, may be it needs to be mounted?")) } Index(err: IndexError) { from() cause(err) - description("Index error") - display("Repository error: index error\n\tcaused by: {}", err) + description(tr!("Index error")) + display("{}", tr_format!("Repository error: index error\n\tcaused by: {}", err)) } BundleDb(err: BundleDbError) { from() cause(err) - description("Bundle error") - display("Repository error: bundle db error\n\tcaused by: {}", err) + description(tr!("Bundle error")) + display("{}", tr_format!("Repository error: bundle db error\n\tcaused by: {}", err)) } BundleWriter(err: BundleWriterError) { from() cause(err) - description("Bundle write error") - display("Repository error: failed to write to new bundle\n\tcaused by: {}", err) + description(tr!("Bundle write error")) + display("{}", tr_format!("Repository error: failed to write to new bundle\n\tcaused by: {}", err)) } BackupFile(err: BackupFileError) { from() cause(err) - description("Backup file error") - display("Repository error: backup file error\n\tcaused by: {}", err) + description(tr!("Backup file error")) + display("{}", tr_format!("Repository error: backup file error\n\tcaused by: {}", err)) } Chunker(err: ChunkerError) { from() cause(err) - description("Chunker error") - display("Repository error: failed to chunk data\n\tcaused by: {}", err) + description(tr!("Chunker error")) + display("{}", tr_format!("Repository error: failed to chunk data\n\tcaused by: {}", err)) } Config(err: ConfigError) { from() cause(err) - description("Configuration error") - display("Repository error: configuration error\n\tcaused by: {}", err) + description(tr!("Configuration error")) + display("{}", tr_format!("Repository error: configuration error\n\tcaused by: {}", err)) } Inode(err: InodeError) { from() cause(err) - description("Inode error") - display("Repository error: inode error\n\tcaused by: {}", err) + description(tr!("Inode error")) + display("{}", tr_format!("Repository error: inode error\n\tcaused by: {}", err)) } LoadKeys(err: EncryptionError) { from() cause(err) - description("Failed to load keys") - display("Repository error: failed to load keys\n\tcaused by: {}", err) + description(tr!("Failed to load keys")) + display("{}", tr_format!("Repository error: failed to load keys\n\tcaused by: {}", err)) } BundleMap(err: BundleMapError) { from() cause(err) - description("Bundle map error") - display("Repository error: bundle map error\n\tcaused by: {}", err) + description(tr!("Bundle map error")) + display("{}", tr_format!("Repository error: bundle map error\n\tcaused by: {}", err)) } Integrity(err: IntegrityError) { from() cause(err) - description("Integrity error") - display("Repository error: integrity error\n\tcaused by: {}", err) + description(tr!("Integrity error")) + display("{}", tr_format!("Repository error: integrity error\n\tcaused by: {}", err)) } Dirty { - description("Dirty repository") - display("The repository is dirty, please run a check") + description(tr!("Dirty repository")) + display("{}", tr_format!("The repository is dirty, please run a check")) } Backup(err: BackupError) { from() cause(err) - description("Failed to create a backup") - display("Repository error: failed to create backup\n\tcaused by: {}", err) + description(tr!("Failed to create a backup")) + display("{}", tr_format!("Repository error: failed to create backup\n\tcaused by: {}", err)) } Lock(err: LockError) { from() cause(err) - description("Failed to obtain lock") - display("Repository error: failed to obtain lock\n\tcaused by: {}", err) + description(tr!("Failed to obtain lock")) + display("{}", tr_format!("Repository error: failed to obtain lock\n\tcaused by: {}", err)) } Io(err: io::Error) { from() cause(err) - description("IO error") - display("IO error: {}", err) + description(tr!("IO error")) + display("{}", tr_format!("IO error: {}", err)) } NoSuchFileInBackup(backup: Backup, path: PathBuf) { - description("No such file in backup") - display("The backup does not contain the file {:?}", path) + description(tr!("No such file in backup")) + display("{}", tr_format!("The backup does not contain the file {:?}", path)) } } } diff --git a/src/repository/integrity.rs b/src/repository/integrity.rs index 1bc957c..f95c303 100644 --- a/src/repository/integrity.rs +++ b/src/repository/integrity.rs @@ -12,36 +12,36 @@ quick_error!{ #[derive(Debug)] pub enum IntegrityError { MissingChunk(hash: Hash) { - description("Missing chunk") - display("Missing chunk: {}", hash) + description(tr!("Missing chunk")) + display("{}", tr_format!("Missing chunk: {}", hash)) } MissingBundleId(id: u32) { - description("Missing bundle") - display("Missing bundle: {}", id) + description(tr!("Missing bundle")) + display("{}", tr_format!("Missing bundle: {}", id)) } MissingBundle(id: BundleId) { - description("Missing bundle") - display("Missing bundle: {}", id) + description(tr!("Missing bundle")) + display("{}", tr_format!("Missing bundle: {}", id)) } NoSuchChunk(bundle: BundleId, chunk: u32) { - description("No such chunk") - display("Bundle {} does not contain the chunk {}", bundle, chunk) + description(tr!("No such chunk")) + display("{}", tr_format!("Bundle {} does not contain the chunk {}", bundle, chunk)) } RemoteBundlesNotInMap { - description("Remote bundles missing from map") + description(tr!("Remote bundles missing from map")) } MapContainsDuplicates { - description("Map contains duplicates") + description(tr!("Map contains duplicates")) } BrokenInode(path: PathBuf, err: Box) { cause(err) - description("Broken inode") - display("Broken inode: {:?}\n\tcaused by: {}", path, err) + description(tr!("Broken inode")) + display("{}", tr_format!("Broken inode: {:?}\n\tcaused by: {}", path, err)) } MissingInodeData(path: PathBuf, err: Box) { cause(err) - description("Missing inode data") - display("Missing inode data in: {:?}\n\tcaused by: {}", path, err) + description(tr!("Missing inode data")) + display("{}", tr_format!("Missing inode data in: {:?}\n\tcaused by: {}", path, err)) } } } @@ -49,7 +49,7 @@ quick_error!{ impl Repository { fn check_index_chunks(&self) -> Result<(), RepositoryError> { let mut progress = ProgressBar::new(self.index.len() as u64); - progress.message("checking index: "); + progress.message(tr!("checking index: ")); progress.set_max_refresh_rate(Some(Duration::from_millis(100))); for (count, (_hash, location)) in self.index.iter().enumerate() { // Lookup bundle id from map @@ -58,12 +58,12 @@ impl Repository { let bundle = if let Some(bundle) = self.bundles.get_bundle_info(&bundle_id) { bundle } else { - progress.finish_print("checking index: done."); + progress.finish_print(tr!("checking index: done.")); return Err(IntegrityError::MissingBundle(bundle_id.clone()).into()); }; // Get chunk from bundle if bundle.info.chunk_count <= location.chunk as usize { - progress.finish_print("checking index: done."); + progress.finish_print(tr!("checking index: done.")); return Err( IntegrityError::NoSuchChunk(bundle_id.clone(), location.chunk).into() ); @@ -72,7 +72,7 @@ impl Repository { progress.set(count as u64); } } - progress.finish_print("checking index: done."); + progress.finish_print(tr!("checking index: done.")); Ok(()) } @@ -135,12 +135,12 @@ impl Repository { // Mark the content chunks as used if let Err(err) = self.check_inode_contents(&inode, checked) { if repair { - warn!( + tr_warn!( "Problem detected: data of {:?} is corrupt\n\tcaused by: {}", path, err ); - info!("Removing inode data"); + tr_info!("Removing inode data"); inode.data = Some(FileData::Inline(vec![].into())); inode.size = 0; modified = true; @@ -160,12 +160,12 @@ impl Repository { } Err(err) => { if repair { - warn!( + tr_warn!( "Problem detected: inode {:?} is corrupt\n\tcaused by: {}", path.join(name), err ); - info!("Removing broken inode from backup"); + tr_info!("Removing broken inode from backup"); removed.push(name.to_string()); modified = true; } else { @@ -187,7 +187,7 @@ impl Repository { } fn evacuate_broken_backup(&self, name: &str) -> Result<(), RepositoryError> { - warn!( + tr_warn!( "The backup {} was corrupted and needed to be modified.", name ); @@ -202,7 +202,7 @@ impl Repository { try!(fs::copy(&src, &dst)); try!(fs::remove_file(&src)); } - info!("The original backup was renamed to {:?}", dst); + tr_info!("The original backup was renamed to {:?}", dst); Ok(()) } @@ -219,7 +219,7 @@ impl Repository { } else { None }; - info!("Checking backup..."); + tr_info!("Checking backup..."); let mut checked = Bitmap::new(self.index.capacity()); match self.check_subtree( Path::new("").to_path_buf(), @@ -237,7 +237,7 @@ impl Repository { } Err(err) => { if repair { - warn!( + tr_warn!( "The root of the backup {} has been corrupted\n\tcaused by: {}", name, err @@ -264,19 +264,19 @@ impl Repository { } else { None }; - info!("Checking inode..."); + tr_info!("Checking inode..."); let mut checked = Bitmap::new(self.index.capacity()); let mut inodes = try!(self.get_backup_path(backup, path)); let mut inode = inodes.pop().unwrap(); let mut modified = false; if let Err(err) = self.check_inode_contents(&inode, &mut checked) { if repair { - warn!( + tr_warn!( "Problem detected: data of {:?} is corrupt\n\tcaused by: {}", path, err ); - info!("Removing inode data"); + tr_info!("Removing inode data"); inode.data = Some(FileData::Inline(vec![].into())); inode.size = 0; modified = true; @@ -297,12 +297,12 @@ impl Repository { } Err(err) => { if repair { - warn!( + tr_warn!( "Problem detected: inode {:?} is corrupt\n\tcaused by: {}", path.join(name), err ); - info!("Removing broken inode from backup"); + tr_info!("Removing broken inode from backup"); removed.push(name.to_string()); modified = true; } else { @@ -338,19 +338,19 @@ impl Repository { } else { None }; - info!("Checking backups..."); + tr_info!("Checking backups..."); let mut checked = Bitmap::new(self.index.capacity()); let backup_map = match self.get_all_backups() { Ok(backup_map) => backup_map, Err(RepositoryError::BackupFile(BackupFileError::PartialBackupsList(backup_map, _failed))) => { - warn!("Some backups could not be read, ignoring them"); + tr_warn!("Some backups could not be read, ignoring them"); backup_map } Err(err) => return Err(err), }; for (name, mut backup) in - ProgressIter::new("checking backups", backup_map.len(), backup_map.into_iter()) + ProgressIter::new(tr!("checking backups"), backup_map.len(), backup_map.into_iter()) { let path = format!("{}::", name); match self.check_subtree( @@ -369,7 +369,7 @@ impl Repository { } Err(err) => { if repair { - warn!( + tr_warn!( "The root of the backup {} has been corrupted\n\tcaused by: {}", name, err @@ -385,12 +385,12 @@ impl Repository { } pub fn check_repository(&mut self, repair: bool) -> Result<(), RepositoryError> { - info!("Checking repository integrity..."); + tr_info!("Checking repository integrity..."); let mut rebuild = false; for (_id, bundle_id) in self.bundle_map.bundles() { if self.bundles.get_bundle_info(&bundle_id).is_none() { if repair { - warn!( + tr_warn!( "Problem detected: bundle map contains unknown bundle {}", bundle_id ); @@ -402,7 +402,7 @@ impl Repository { } if self.bundle_map.len() < self.bundles.len() { if repair { - warn!("Problem detected: bundle map does not contain all remote bundles"); + tr_warn!("Problem detected: bundle map does not contain all remote bundles"); rebuild = true; } else { return Err(IntegrityError::RemoteBundlesNotInMap.into()); @@ -410,7 +410,7 @@ impl Repository { } if self.bundle_map.len() > self.bundles.len() { if repair { - warn!("Problem detected: bundle map contains bundles multiple times"); + tr_warn!("Problem detected: bundle map contains bundles multiple times"); rebuild = true; } else { return Err(IntegrityError::MapContainsDuplicates.into()); @@ -424,7 +424,7 @@ impl Repository { } pub fn rebuild_bundle_map(&mut self) -> Result<(), RepositoryError> { - info!("Rebuilding bundle map from bundles"); + tr_info!("Rebuilding bundle map from bundles"); self.bundle_map = BundleMap::create(); for bundle in self.bundles.list_bundles() { let bundle_id = match bundle.mode { @@ -443,11 +443,11 @@ impl Repository { } pub fn rebuild_index(&mut self) -> Result<(), RepositoryError> { - info!("Rebuilding index from bundles"); + tr_info!("Rebuilding index from bundles"); self.index.clear(); let mut bundles = self.bundle_map.bundles(); bundles.sort_by_key(|&(_, ref v)| v.clone()); - for (num, id) in ProgressIter::new("Rebuilding index from bundles", bundles.len(), bundles.into_iter()) { + for (num, id) in ProgressIter::new(tr!("Rebuilding index from bundles"), bundles.len(), bundles.into_iter()) { let chunks = try!(self.bundles.get_chunk_list(&id)); for (i, (hash, _len)) in chunks.into_inner().into_iter().enumerate() { try!(self.index.set( @@ -467,10 +467,10 @@ impl Repository { if repair { try!(self.write_mode()); } - info!("Checking index integrity..."); + tr_info!("Checking index integrity..."); if let Err(err) = self.index.check() { if repair { - warn!( + tr_warn!( "Problem detected: index was corrupted\n\tcaused by: {}", err ); @@ -479,10 +479,10 @@ impl Repository { return Err(err.into()); } } - info!("Checking index entries..."); + tr_info!("Checking index entries..."); if let Err(err) = self.check_index_chunks() { if repair { - warn!( + tr_warn!( "Problem detected: index entries were inconsistent\n\tcaused by: {}", err ); @@ -499,10 +499,10 @@ impl Repository { if repair { try!(self.write_mode()); } - info!("Checking bundle integrity..."); + tr_info!("Checking bundle integrity..."); if try!(self.bundles.check(full, repair)) { // Some bundles got repaired - warn!("Some bundles have been rewritten, please remove the broken bundles manually."); + tr_warn!("Some bundles have been rewritten, please remove the broken bundles manually."); try!(self.rebuild_bundle_map()); try!(self.rebuild_index()); } diff --git a/src/repository/metadata.rs b/src/repository/metadata.rs index 0b24e0b..8ea1956 100644 --- a/src/repository/metadata.rs +++ b/src/repository/metadata.rs @@ -19,44 +19,44 @@ quick_error!{ #[derive(Debug)] pub enum InodeError { UnsupportedFiletype(path: PathBuf) { - description("Unsupported file type") - display("Inode error: file {:?} has an unsupported type", path) + description(tr!("Unsupported file type")) + display("{}", tr_format!("Inode error: file {:?} has an unsupported type", path)) } ReadMetadata(err: io::Error, path: PathBuf) { cause(err) - description("Failed to obtain metadata for file") - display("Inode error: failed to obtain metadata for file {:?}\n\tcaused by: {}", path, err) + description(tr!("Failed to obtain metadata for file")) + display("{}", tr_format!("Inode error: failed to obtain metadata for file {:?}\n\tcaused by: {}", path, err)) } ReadXattr(err: io::Error, path: PathBuf) { cause(err) - description("Failed to obtain xattr for file") - display("Inode error: failed to obtain xattr for file {:?}\n\tcaused by: {}", path, err) + description(tr!("Failed to obtain xattr for file")) + display("{}", tr_format!("Inode error: failed to obtain xattr for file {:?}\n\tcaused by: {}", path, err)) } ReadLinkTarget(err: io::Error, path: PathBuf) { cause(err) - description("Failed to obtain link target for file") - display("Inode error: failed to obtain link target for file {:?}\n\tcaused by: {}", path, err) + description(tr!("Failed to obtain link target for file")) + display("{}", tr_format!("Inode error: failed to obtain link target for file {:?}\n\tcaused by: {}", path, err)) } Create(err: io::Error, path: PathBuf) { cause(err) - description("Failed to create entity") - display("Inode error: failed to create entity {:?}\n\tcaused by: {}", path, err) + description(tr!("Failed to create entity")) + display("{}", tr_format!("Inode error: failed to create entity {:?}\n\tcaused by: {}", path, err)) } Integrity(reason: &'static str) { - description("Integrity error") - display("Inode error: inode integrity error: {}", reason) + description(tr!("Integrity error")) + display("{}", tr_format!("Inode error: inode integrity error: {}", reason)) } Decode(err: msgpack::DecodeError) { from() cause(err) - description("Failed to decode metadata") - display("Inode error: failed to decode metadata\n\tcaused by: {}", err) + description(tr!("Failed to decode metadata")) + display("{}", tr_format!("Inode error: failed to decode metadata\n\tcaused by: {}", err)) } Encode(err: msgpack::EncodeError) { from() cause(err) - description("Failed to encode metadata") - display("Inode error: failed to encode metadata\n\tcaused by: {}", err) + description(tr!("Failed to encode metadata")) + display("{}", tr_format!("Inode error: failed to encode metadata\n\tcaused by: {}", err)) } } } @@ -82,12 +82,12 @@ serde_impl!(FileType(u8) { impl fmt::Display for FileType { fn fmt(&self, format: &mut fmt::Formatter) -> Result<(), fmt::Error> { match *self { - FileType::File => write!(format, "file"), - FileType::Directory => write!(format, "directory"), - FileType::Symlink => write!(format, "symlink"), - FileType::BlockDevice => write!(format, "block device"), - FileType::CharDevice => write!(format, "char device"), - FileType::NamedPipe => write!(format, "named pipe"), + FileType::File => write!(format, "{}", tr!("file")), + FileType::Directory => write!(format, "{}", tr!("directory")), + FileType::Symlink => write!(format, "{}", tr!("symlink")), + FileType::BlockDevice => write!(format, "{}", tr!("block device")), + FileType::CharDevice => write!(format, "{}", tr!("char device")), + FileType::NamedPipe => write!(format, "{}", tr!("named pipe")), } } } @@ -249,13 +249,13 @@ impl Inode { InodeError::Create(e, full_path.clone()) })); } else { - return Err(InodeError::Integrity("Symlink without target")); + return Err(InodeError::Integrity(tr!("Symlink without target"))); } } FileType::NamedPipe => { let name = try!( ffi::CString::new(full_path.as_os_str().as_bytes()) - .map_err(|_| InodeError::Integrity("Name contains nulls")) + .map_err(|_| InodeError::Integrity(tr!("Name contains nulls"))) ); let mode = self.mode | libc::S_IFIFO; if unsafe { libc::mkfifo(name.as_ptr(), mode) } != 0 { @@ -268,7 +268,7 @@ impl Inode { FileType::BlockDevice | FileType::CharDevice => { let name = try!( ffi::CString::new(full_path.as_os_str().as_bytes()) - .map_err(|_| InodeError::Integrity("Name contains nulls")) + .map_err(|_| InodeError::Integrity(tr!("Name contains nulls"))) ); let mode = self.mode | match self.file_type { @@ -279,7 +279,7 @@ impl Inode { let device = if let Some((major, minor)) = self.device { unsafe { libc::makedev(major, minor) } } else { - return Err(InodeError::Integrity("Device without id")); + return Err(InodeError::Integrity(tr!("Device without id"))); }; if unsafe { libc::mknod(name.as_ptr(), mode, device) } != 0 { return Err(InodeError::Create( @@ -291,21 +291,21 @@ impl Inode { } let time = FileTime::from_seconds_since_1970(self.timestamp as u64, 0); if let Err(err) = filetime::set_file_times(&full_path, time, time) { - warn!("Failed to set file time on {:?}: {}", full_path, err); + tr_warn!("Failed to set file time on {:?}: {}", full_path, err); } if !self.xattrs.is_empty() { if xattr::SUPPORTED_PLATFORM { for (name, data) in &self.xattrs { if let Err(err) = xattr::set(&full_path, name, data) { - warn!("Failed to set xattr {} on {:?}: {}", name, full_path, err); + tr_warn!("Failed to set xattr {} on {:?}: {}", name, full_path, err); } } } else { - warn!("Not setting xattr on {:?}", full_path); + tr_warn!("Not setting xattr on {:?}", full_path); } } if let Err(err) = fs::set_permissions(&full_path, Permissions::from_mode(self.mode)) { - warn!( + tr_warn!( "Failed to set permissions {:o} on {:?}: {}", self.mode, full_path, @@ -313,7 +313,7 @@ impl Inode { ); } if let Err(err) = chown(&full_path, self.user, self.group) { - warn!( + tr_warn!( "Failed to set user {} and group {} on {:?}: {}", self.user, self.group, diff --git a/src/repository/mod.rs b/src/repository/mod.rs index 95fcd57..81d14ba 100644 --- a/src/repository/mod.rs +++ b/src/repository/mod.rs @@ -139,7 +139,7 @@ impl Repository { match unsafe { Index::open(layout.index_path(), &INDEX_MAGIC, INDEX_VERSION) } { Ok(index) => (index, false), Err(err) => { - error!("Failed to load local index:\n\tcaused by: {}", err); + tr_error!("Failed to load local index:\n\tcaused by: {}", err); ( try!(Index::create( layout.index_path(), @@ -153,7 +153,7 @@ impl Repository { let (bundle_map, rebuild_bundle_map) = match BundleMap::load(layout.bundle_map_path()) { Ok(bundle_map) => (bundle_map, false), Err(err) => { - error!("Failed to load local bundle map:\n\tcaused by: {}", err); + tr_error!("Failed to load local bundle map:\n\tcaused by: {}", err); (BundleMap::create(), true) } }; @@ -178,7 +178,7 @@ impl Repository { if !rebuild_bundle_map { let mut save_bundle_map = false; if !gone.is_empty() { - info!("Removig {} old bundles from index", gone.len()); + tr_info!("Removig {} old bundles from index", gone.len()); try!(repo.write_mode()); for bundle in gone { try!(repo.remove_gone_remote_bundle(&bundle)) @@ -186,10 +186,10 @@ impl Repository { save_bundle_map = true; } if !new.is_empty() { - info!("Adding {} new bundles to index", new.len()); + tr_info!("Adding {} new bundles to index", new.len()); try!(repo.write_mode()); for bundle in ProgressIter::new( - "adding bundles to index", + tr!("adding bundles to index"), new.len(), new.into_iter() ) @@ -232,11 +232,11 @@ impl Repository { let mut backups: Vec<(String, Backup)> = try!(repo.get_all_backups()).into_iter().collect(); backups.sort_by_key(|&(_, ref b)| b.timestamp); if let Some((name, backup)) = backups.pop() { - info!("Taking configuration from the last backup '{}'", name); + tr_info!("Taking configuration from the last backup '{}'", name); repo.config = backup.config; try!(repo.save_config()) } else { - warn!( + tr_warn!( "No backup found in the repository to take configuration from, please set the configuration manually." ); } @@ -268,7 +268,7 @@ impl Repository { pub fn set_encryption(&mut self, public: Option<&PublicKey>) { if let Some(key) = public { if !self.crypto.lock().unwrap().contains_secret_key(key) { - warn!("The secret key for that public key is not stored in the repository.") + tr_warn!("The secret key for that public key is not stored in the repository.") } let mut key_bytes = Vec::new(); key_bytes.extend_from_slice(&key[..]); @@ -343,7 +343,7 @@ impl Repository { if self.bundle_map.find(&bundle.id).is_some() { return Ok(()); } - debug!("Adding new bundle to index: {}", bundle.id); + tr_debug!("Adding new bundle to index: {}", bundle.id); let bundle_id = match bundle.mode { BundleMode::Data => self.next_data_bundle, BundleMode::Meta => self.next_meta_bundle, @@ -377,7 +377,7 @@ impl Repository { fn remove_gone_remote_bundle(&mut self, bundle: &BundleInfo) -> Result<(), RepositoryError> { if let Some(id) = self.bundle_map.find(&bundle.id) { - debug!("Removing bundle from index: {}", bundle.id); + tr_debug!("Removing bundle from index: {}", bundle.id); try!(self.bundles.delete_local_bundle(&bundle.id)); try!(self.index.filter(|_key, data| data.bundle != id)); self.bundle_map.remove(id); @@ -406,7 +406,7 @@ impl Repository { impl Drop for Repository { fn drop(&mut self) { if let Err(err) = self.flush() { - error!("Failed to flush repository: {}", err); + tr_error!("Failed to flush repository: {}", err); } } } diff --git a/src/repository/tarfile.rs b/src/repository/tarfile.rs index a9538a9..2414686 100644 --- a/src/repository/tarfile.rs +++ b/src/repository/tarfile.rs @@ -198,7 +198,7 @@ impl Repository { Err(RepositoryError::Inode(_)) | Err(RepositoryError::Chunker(_)) | Err(RepositoryError::Io(_)) => { - info!("Failed to backup {:?}", path); + tr_info!("Failed to backup {:?}", path); failed_paths.push(path); continue; } @@ -243,7 +243,7 @@ impl Repository { if roots.len() == 1 { Ok(roots.pop().unwrap()) } else { - warn!("Tar file contains multiple roots, adding dummy folder"); + tr_warn!("Tar file contains multiple roots, adding dummy folder"); let mut root_inode = Inode { file_type: FileType::Directory, mode: 0o755, diff --git a/src/repository/vacuum.rs b/src/repository/vacuum.rs index 55daa6b..d11b667 100644 --- a/src/repository/vacuum.rs +++ b/src/repository/vacuum.rs @@ -20,11 +20,11 @@ impl Repository { force: bool, ) -> Result<(), RepositoryError> { try!(self.flush()); - info!("Locking repository"); + tr_info!("Locking repository"); try!(self.write_mode()); let _lock = try!(self.lock(true)); // analyze_usage will set the dirty flag - info!("Analyzing chunk usage"); + tr_info!("Analyzing chunk usage"); let usage = try!(self.analyze_usage()); let mut data_total = 0; let mut data_used = 0; @@ -32,7 +32,7 @@ impl Repository { data_total += bundle.info.encoded_size; data_used += bundle.get_used_size(); } - info!( + tr_info!( "Usage: {} of {}, {:.1}%", to_file_size(data_used as u64), to_file_size(data_total as u64), @@ -70,7 +70,7 @@ impl Repository { } } } - info!( + tr_info!( "Reclaiming about {} by rewriting {} bundles ({})", to_file_size(reclaim_space as u64), rewrite_bundles.len(), @@ -81,7 +81,7 @@ impl Repository { return Ok(()); } for id in ProgressIter::new( - "rewriting bundles", + tr!("rewriting bundles"), rewrite_bundles.len(), rewrite_bundles.iter() ) @@ -100,12 +100,12 @@ impl Repository { } } try!(self.flush()); - info!("Checking index"); + tr_info!("Checking index"); for (hash, location) in self.index.iter() { let loc_bundle = location.bundle; let loc_chunk = location.chunk; if rewrite_bundles.contains(&loc_bundle) { - panic!( + tr_panic!( "Removed bundle is still referenced in index: hash:{}, bundle:{}, chunk:{}", hash, loc_bundle, @@ -113,7 +113,7 @@ impl Repository { ); } } - info!("Deleting {} bundles", rewrite_bundles.len()); + tr_info!("Deleting {} bundles", rewrite_bundles.len()); for id in rewrite_bundles { try!(self.delete_bundle(id)); } diff --git a/src/translation.rs b/src/translation.rs index 7c2d45d..90a711e 100644 --- a/src/translation.rs +++ b/src/translation.rs @@ -1,30 +1,179 @@ use std::borrow::Cow; use std::collections::HashMap; -type TransStr = Cow<'static, str>; +use std::cmp::max; +use std::str; -pub struct Translation(HashMap); +use std::path::{Path, PathBuf}; +use std::io::Read; +use std::fs::File; + +use locale_config::Locale; + + +pub type CowStr = Cow<'static, str>; + +fn read_u32(b: &[u8], reorder: bool) -> u32 { + if reorder { + (u32::from(b[0]) << 24) + (u32::from(b[1]) << 16) + (u32::from(b[2]) << 8) + u32::from(b[3]) + } else { + (u32::from(b[3]) << 24) + (u32::from(b[2]) << 16) + (u32::from(b[1]) << 8) + u32::from(b[0]) + } +} + +struct MoFile<'a> { + data: &'a [u8], + count: usize, + orig_pos: usize, + trans_pos: usize, + reorder: bool, + i : usize +} + +impl<'a> MoFile<'a> { + fn new(data: &'a [u8]) -> Result { + if data.len() < 20 { + return Err(()); + } + // Magic header + let magic = read_u32(&data[0..4], false); + let reorder = if magic == 0x9504_12de { + false + } else if magic == 0xde12_0495 { + true + } else { + return Err(()); + }; + // Version + if read_u32(&data[4..8], reorder) != 0x0000_0000 { + return Err(()); + } + // Translation count + let count = read_u32(&data[8..12], reorder) as usize; + // Original string offset + let orig_pos = read_u32(&data[12..16], reorder) as usize; + // Original string offset + let trans_pos = read_u32(&data[16..20], reorder) as usize; + if data.len() < max(orig_pos, trans_pos) + count * 8 { + return Err(()); + } + Ok(MoFile{ + data: data, + count: count, + orig_pos: orig_pos, + trans_pos: trans_pos, + reorder: reorder, + i: 0 + }) + } +} + +impl<'a> Iterator for MoFile<'a> { + type Item = (&'a str, &'a str); + + fn next(&mut self) -> Option { + if self.i >= self.count { + return None; + } + let length = read_u32(&self.data[self.orig_pos+self.i*8..], self.reorder) as usize; + let offset = read_u32(&self.data[self.orig_pos+self.i*8+4..], self.reorder) as usize; + let orig = match str::from_utf8(&self.data[offset..offset+length]) { + Ok(s) => s, + Err(_) => return None + }; + let length = read_u32(&self.data[self.trans_pos+self.i*8..], self.reorder) as usize; + let offset = read_u32(&self.data[self.trans_pos+self.i*8+4..], self.reorder) as usize; + let trans = match str::from_utf8(&self.data[offset..offset+length]) { + Ok(s) => s, + Err(_) => return None + }; + self.i += 1; + Some((orig, trans)) + } +} + + +pub struct Translation(HashMap); impl Translation { pub fn new() -> Self { Translation(Default::default()) } - pub fn set, T: Into>(&mut self, orig: O, trans: T) { - self.0.insert(orig.into(), trans.into()); + pub fn from_mo_data(data: &'static[u8]) -> Self { + let mut translation = Translation::new(); + match MoFile::new(data) { + Ok(mo_file) => for (orig, trans) in mo_file { + translation.set(orig, trans); + } + Err(_) => error!("Invalid translation data") + } + translation } - pub fn get>(&self, orig: O) -> TransStr { - let orig = orig.into(); - self.0.get(&orig).cloned().unwrap_or(orig) + pub fn from_mo_file(path: &Path) -> Self { + let mut translation = Translation::new(); + if let Ok(mut file) = File::open(&path) { + let mut data = vec![]; + if file.read_to_end(&mut data).is_ok() { + match MoFile::new(&data) { + Ok(mo_file) => for (orig, trans) in mo_file { + translation.set(orig.to_string(), trans.to_string()); + } + Err(_) => error!("Invalid translation data") + } + } + } + translation + } + + pub fn set, T: Into>(&mut self, orig: O, trans: T) { + let trans = trans.into(); + if !trans.is_empty() { + self.0.insert(orig.into(), trans); + } + } + + pub fn get<'a, 'b: 'a>(&'b self, orig: &'a str) -> &'a str { + self.0.get(orig).map(|s| s as &'a str).unwrap_or(orig) } } +fn get_translation(locale: &str) -> Translation { + if let Some(trans) = find_translation(locale) { + return trans; + } + let country = locale.split('_').next().unwrap(); + if let Some(trans) = find_translation(country) { + return trans; + } + Translation::new() +} + +fn find_translation(name: &str) -> Option { + if EMBEDDED_TRANS.contains_key(name) { + return Some(Translation::from_mo_data(EMBEDDED_TRANS[name])); + } + let path = PathBuf::from(format!("/usr/share/locale/{}/LC_MESSAGES/zvault.mo", name)); + if path.exists() { + return Some(Translation::from_mo_file(&path)); + } + let path = PathBuf::from(format!("lang/{}.mo", name)); + if path.exists() { + return Some(Translation::from_mo_file(&path)); + } + None +} + lazy_static! { - static ref TRANS: Translation = { - let mut trans = Translation::new(); - trans.set("Hello", "Hallo"); - trans + pub static ref EMBEDDED_TRANS: HashMap<&'static str, &'static[u8]> = { + HashMap::new() + //map.insert("de", include_bytes!("../lang/de.mo") as &'static [u8]); + }; + pub static ref TRANS: Translation = { + let locale = Locale::current(); + let locale_str = locale.tags_for("").next().unwrap().as_ref().to_string(); + get_translation(&locale_str) }; } @@ -42,6 +191,26 @@ lazy_static! { ($fmt:tt, $($arg:tt)*) => (rt_println!(tr!($fmt), $($arg)*).expect("invalid format")); } +#[macro_export] macro_rules! tr_trace { + ($($arg:tt)*) => (debug!("{}", tr_format!($($arg)*))); +} + +#[macro_export] macro_rules! tr_debug { + ($($arg:tt)*) => (debug!("{}", tr_format!($($arg)*))); +} + #[macro_export] macro_rules! tr_info { ($($arg:tt)*) => (info!("{}", tr_format!($($arg)*))); -} \ No newline at end of file +} + +#[macro_export] macro_rules! tr_warn { + ($($arg:tt)*) => (warn!("{}", tr_format!($($arg)*))); +} + +#[macro_export] macro_rules! tr_error { + ($($arg:tt)*) => (error!("{}", tr_format!($($arg)*))); +} + +#[macro_export] macro_rules! tr_panic { + ($($arg:tt)*) => (panic!("{}", tr_format!($($arg)*))); +} diff --git a/src/util/chunk.rs b/src/util/chunk.rs index 3aa9934..a07a343 100644 --- a/src/util/chunk.rs +++ b/src/util/chunk.rs @@ -61,7 +61,7 @@ impl ChunkList { #[inline] pub fn read_from(src: &[u8]) -> Self { if src.len() % 20 != 0 { - warn!("Reading truncated chunk list"); + tr_warn!("Reading truncated chunk list"); } ChunkList::read_n_from(src.len() / 20, &mut Cursor::new(src)).unwrap() } @@ -129,7 +129,7 @@ impl<'a> Deserialize<'a> for ChunkList { { let data: Vec = try!(ByteBuf::deserialize(deserializer)).into(); if data.len() % 20 != 0 { - return Err(D::Error::custom("Invalid chunk list length")); + return Err(D::Error::custom(tr!("Invalid chunk list length"))); } Ok( ChunkList::read_n_from(data.len() / 20, &mut Cursor::new(data)).unwrap() diff --git a/src/util/cli.rs b/src/util/cli.rs index 3a3ffc1..37e357a 100644 --- a/src/util/cli.rs +++ b/src/util/cli.rs @@ -73,7 +73,7 @@ impl Iterator for ProgressIter { fn next(&mut self) -> Option { match self.inner.next() { None => { - let msg = self.msg.clone() + "done."; + let msg = self.msg.clone() + tr!("done."); self.bar.finish_print(&msg); None } diff --git a/src/util/compression.rs b/src/util/compression.rs index befa547..0ab0e3f 100644 --- a/src/util/compression.rs +++ b/src/util/compression.rs @@ -11,26 +11,26 @@ quick_error!{ #[derive(Debug)] pub enum CompressionError { UnsupportedCodec(name: String) { - description("Unsupported codec") - display("Unsupported codec: {}", name) + description(tr!("Unsupported codec")) + display("{}", tr_format!("Unsupported codec: {}", name)) } InitializeCodec { - description("Failed to initialize codec") + description(tr!("Failed to initialize codec")) } InitializeOptions { - description("Failed to set codec options") + description(tr!("Failed to set codec options")) } InitializeStream { - description("Failed to create stream") + description(tr!("Failed to create stream")) } Operation(reason: &'static str) { - description("Operation failed") - display("Operation failed: {}", reason) + description(tr!("Operation failed")) + display("{}", tr_format!("Operation failed: {}", reason)) } Output(err: io::Error) { from() cause(err) - description("Failed to write to output") + description(tr!("Failed to write to output")) } } } diff --git a/src/util/encryption.rs b/src/util/encryption.rs index 41ee20b..ca2a5fb 100644 --- a/src/util/encryption.rs +++ b/src/util/encryption.rs @@ -21,7 +21,7 @@ static INIT: Once = ONCE_INIT; fn sodium_init() { INIT.call_once(|| if !sodiumoxide::init() { - panic!("Failed to initialize sodiumoxide"); + tr_panic!("Failed to initialize sodiumoxide"); }); } @@ -29,27 +29,27 @@ quick_error!{ #[derive(Debug)] pub enum EncryptionError { InvalidKey { - description("Invalid key") + description(tr!("Invalid key")) } MissingKey(key: PublicKey) { - description("Missing key") - display("Missing key: {}", to_hex(&key[..])) + description(tr!("Missing key")) + display("{}", tr_format!("Missing key: {}", to_hex(&key[..]))) } Operation(reason: &'static str) { - description("Operation failed") - display("Operation failed: {}", reason) + description(tr!("Operation failed")) + display("{}", tr_format!("Operation failed: {}", reason)) } Io(err: io::Error) { from() cause(err) - description("IO error") - display("IO error: {}", err) + description(tr!("IO error")) + display("{}", tr_format!("IO error: {}", err)) } Yaml(err: serde_yaml::Error) { from() cause(err) - description("Yaml format error") - display("Yaml format error: {}", err) + description(tr!("Yaml format error")) + display("{}", tr_format!("Yaml format error: {}", err)) } } } @@ -68,7 +68,7 @@ impl EncryptionMethod { pub fn from_string(val: &str) -> Result { match val { "sodium" => Ok(EncryptionMethod::Sodium), - _ => Err("Unsupported encryption method"), + _ => Err(tr!("Unsupported encryption method")), } } @@ -255,7 +255,7 @@ impl Crypto { match *method { EncryptionMethod::Sodium => { sealedbox::open(data, &public, secret).map_err(|_| { - EncryptionError::Operation("Decryption failed") + EncryptionError::Operation(tr!("Decryption failed")) }) } } @@ -285,7 +285,7 @@ impl Crypto { let mut pk = [0u8; 32]; let mut sk = [0u8; 32]; if unsafe { libsodium_sys::crypto_box_seed_keypair(&mut pk, &mut sk, &seed) } != 0 { - panic!("Libsodium failed"); + tr_panic!("Libsodium failed"); } ( PublicKey::from_slice(&pk).unwrap(), diff --git a/src/util/hash.rs b/src/util/hash.rs index e8d2e93..4ee2e54 100644 --- a/src/util/hash.rs +++ b/src/util/hash.rs @@ -95,7 +95,7 @@ impl<'a> Deserialize<'a> for Hash { { let dat: Vec = try!(ByteBuf::deserialize(deserializer)).into(); if dat.len() != 16 { - return Err(D::Error::custom("Invalid key length")); + return Err(D::Error::custom(tr!("Invalid key length"))); } Ok(Hash { high: LittleEndian::read_u64(&dat[..8]), @@ -141,7 +141,7 @@ impl HashMethod { match name { "blake2" => Ok(HashMethod::Blake2), "murmur3" => Ok(HashMethod::Murmur3), - _ => Err("Unsupported hash method"), + _ => Err(tr!("Unsupported hash method")), } } diff --git a/src/util/lock.rs b/src/util/lock.rs index 35d367e..d2f242f 100644 --- a/src/util/lock.rs +++ b/src/util/lock.rs @@ -15,22 +15,22 @@ quick_error!{ Io(err: io::Error) { from() cause(err) - description("IO error") - display("Lock error: IO error\n\tcaused by: {}", err) + description(tr!("IO error")) + display("{}", tr_format!("Lock error: IO error\n\tcaused by: {}", err)) } Yaml(err: serde_yaml::Error) { from() cause(err) - description("Yaml format error") - display("Lock error: yaml format error\n\tcaused by: {}", err) + description(tr!("Yaml format error")) + display("{}", tr_format!("Lock error: yaml format error\n\tcaused by: {}", err)) } InvalidLockState(reason: &'static str) { - description("Invalid lock state") - display("Lock error: invalid lock state: {}", reason) + description(tr!("Invalid lock state")) + display("{}", tr_format!("Lock error: invalid lock state: {}", reason)) } Locked { - description("Locked") - display("Lock error: locked") + description(tr!("Locked")) + display("{}", tr_format!("Lock error: locked")) } } } @@ -122,13 +122,13 @@ impl LockFolder { for lock in try!(self.get_locks()) { if lock.exclusive { if level == LockLevel::Exclusive { - return Err(LockError::InvalidLockState("multiple exclusive locks")); + return Err(LockError::InvalidLockState(tr!("multiple exclusive locks"))); } else { level = LockLevel::Exclusive } } else if level == LockLevel::Exclusive { return Err(LockError::InvalidLockState( - "exclusive lock and shared locks" + tr!("exclusive lock and shared locks") )); } else { level = LockLevel::Shared