diff --git a/src/bundledb/cache.rs b/src/bundledb/cache.rs new file mode 100644 index 0000000..52c27bc --- /dev/null +++ b/src/bundledb/cache.rs @@ -0,0 +1,106 @@ +use ::prelude::*; + +use std::path::{Path, PathBuf}; +use std::fs::{self, File}; +use std::io::{self, BufReader, BufWriter, Write, Read}; + + +pub static CACHE_FILE_STRING: [u8; 7] = *b"zvault\x04"; +pub static CACHE_FILE_VERSION: u8 = 1; + + +quick_error!{ + #[derive(Debug)] + pub enum BundleCacheError { + Read(err: io::Error) { + cause(err) + description("Failed to read bundle cache") + display("Bundle cache error: failed to read bundle cache\n\tcaused by: {}", err) + } + Write(err: io::Error) { + cause(err) + description("Failed to write bundle cache") + display("Bundle cache error: failed to write bundle cache\n\tcaused by: {}", err) + } + WrongHeader { + description("Wrong header") + display("Bundle cache error: wrong header on bundle cache") + } + UnsupportedVersion(version: u8) { + description("Wrong version") + display("Bundle cache error: unsupported version: {}", version) + } + Decode(err: msgpack::DecodeError) { + from() + cause(err) + description("Failed to decode bundle cache") + display("Bundle cache error: failed to decode bundle cache\n\tcaused by: {}", err) + } + Encode(err: msgpack::EncodeError) { + from() + cause(err) + description("Failed to encode bundle cache") + display("Bundle cache error: failed to encode bundle cache\n\tcaused by: {}", err) + } + } +} + + +#[derive(Clone, Default)] +pub struct StoredBundle { + pub info: BundleInfo, + pub path: PathBuf +} +serde_impl!(StoredBundle(u64) { + info: BundleInfo => 0, + path: PathBuf => 1 +}); + +impl StoredBundle { + #[inline] + pub fn id(&self) -> BundleId { + self.info.id.clone() + } + + pub fn move_to>(mut self, path: P) -> Result { + let path = path.as_ref(); + if fs::rename(&self.path, path).is_err() { + try!(fs::copy(&self.path, path).context(path)); + try!(fs::remove_file(&self.path).context(&self.path as &Path)); + } + self.path = path.to_path_buf(); + Ok(self) + } + + pub fn copy_to>(&self, path: P) -> Result { + let path = path.as_ref(); + try!(fs::copy(&self.path, path).context(path)); + let mut bundle = self.clone(); + bundle.path = path.to_path_buf(); + Ok(bundle) + } + + pub fn read_list_from>(path: P) -> Result, BundleCacheError> { + let path = path.as_ref(); + let mut file = BufReader::new(try!(File::open(path).map_err(BundleCacheError::Read))); + let mut header = [0u8; 8]; + try!(file.read_exact(&mut header).map_err(BundleCacheError::Read)); + if header[..CACHE_FILE_STRING.len()] != CACHE_FILE_STRING { + return Err(BundleCacheError::WrongHeader) + } + let version = header[CACHE_FILE_STRING.len()]; + if version != CACHE_FILE_VERSION { + return Err(BundleCacheError::UnsupportedVersion(version)) + } + Ok(try!(msgpack::decode_from_stream(&mut file))) + } + + pub fn save_list_to>(list: &[Self], path: P) -> Result<(), BundleCacheError> { + let path = path.as_ref(); + let mut file = BufWriter::new(try!(File::create(path).map_err(BundleCacheError::Write))); + try!(file.write_all(&CACHE_FILE_STRING).map_err(BundleCacheError::Write)); + try!(file.write_all(&[CACHE_FILE_VERSION]).map_err(BundleCacheError::Write)); + try!(msgpack::encode_to_stream(&list, &mut file)); + Ok(()) + } +} diff --git a/src/bundledb/db.rs b/src/bundledb/db.rs index b260044..d1c555b 100644 --- a/src/bundledb/db.rs +++ b/src/bundledb/db.rs @@ -3,13 +3,54 @@ use super::*; use std::path::{Path, PathBuf}; use std::collections::{HashMap, HashSet}; -use std::fs::{self, File}; +use std::fs; use std::sync::{Arc, Mutex}; -use std::io::{BufReader, BufWriter, Read, Write}; +use std::io; -pub static CACHE_FILE_STRING: [u8; 7] = *b"zvault\x04"; -pub static CACHE_FILE_VERSION: u8 = 1; +quick_error!{ + #[derive(Debug)] + pub enum BundleDbError { + ListBundles(err: io::Error) { + cause(err) + description("Failed to list bundles") + display("Bundle db error: failed to list bundles\n\tcaused by: {}", err) + } + Reader(err: BundleReaderError) { + from() + cause(err) + description("Failed to read bundle") + display("Bundle db error: failed to read bundle\n\tcaused by: {}", err) + } + Writer(err: BundleWriterError) { + from() + cause(err) + description("Failed to write bundle") + display("Bundle db error: failed to write bundle\n\tcaused by: {}", err) + } + Cache(err: BundleCacheError) { + from() + cause(err) + description("Failed to read/write bundle cache") + display("Bundle db error: failed to read/write bundle cache\n\tcaused by: {}", err) + } + Io(err: io::Error, path: PathBuf) { + cause(err) + context(path: &'a Path, err: io::Error) -> (err, path.to_path_buf()) + description("Io error") + display("Bundle db error: io error on {:?}\n\tcaused by: {}", path, err) + } + NoSuchBundle(bundle: BundleId) { + description("No such bundle") + display("Bundle db error: no such bundle: {:?}", bundle) + } + Remove(err: io::Error, bundle: BundleId) { + cause(err) + description("Failed to remove bundle") + display("Bundle db error: failed to remove bundle {}\n\tcaused by: {}", bundle, err) + } + } +} pub fn bundle_path(bundle: &BundleId, mut folder: PathBuf, mut count: usize) -> (PathBuf, PathBuf) { @@ -25,12 +66,12 @@ pub fn bundle_path(bundle: &BundleId, mut folder: PathBuf, mut count: usize) -> (folder, file.into()) } -pub fn load_bundles>(path: P, bundles: &mut HashMap) -> Result<(Vec, Vec), BundleError> { +pub fn load_bundles>(path: P, bundles: &mut HashMap) -> Result<(Vec, Vec), BundleDbError> { let mut paths = vec![path.as_ref().to_path_buf()]; let mut bundle_paths = HashSet::new(); while let Some(path) = paths.pop() { - for entry in try!(fs::read_dir(path).map_err(BundleError::List)) { - let entry = try!(entry.map_err(BundleError::List)); + for entry in try!(fs::read_dir(path).map_err(BundleDbError::ListBundles)) { + let entry = try!(entry.map_err(BundleDbError::ListBundles)); let path = entry.path(); if path.is_dir() { paths.push(path); @@ -51,7 +92,7 @@ pub fn load_bundles>(path: P, bundles: &mut HashMap>(path: P, bundles: &mut HashMap 0, - path: PathBuf => 1 -}); - -impl StoredBundle { - #[inline] - pub fn id(&self) -> BundleId { - self.info.id.clone() - } - - pub fn move_to>(mut self, path: P) -> Result { - let path = path.as_ref(); - if fs::rename(&self.path, path).is_err() { - try!(fs::copy(&self.path, path).context(path)); - try!(fs::remove_file(&self.path).context(&self.path as &Path)); - } - self.path = path.to_path_buf(); - Ok(self) - } - - pub fn copy_to>(&self, path: P) -> Result { - let path = path.as_ref(); - try!(fs::copy(&self.path, path).context(path)); - let mut bundle = self.clone(); - bundle.path = path.to_path_buf(); - Ok(bundle) - } - - pub fn read_list_from>(path: P) -> Result, BundleError> { - let path = path.as_ref(); - let mut file = BufReader::new(try!(File::open(path).context(path))); - let mut header = [0u8; 8]; - try!(file.read_exact(&mut header).context(&path as &Path)); - if header[..CACHE_FILE_STRING.len()] != CACHE_FILE_STRING { - return Err(BundleError::WrongHeader(path.to_path_buf())) - } - let version = header[HEADER_STRING.len()]; - if version != HEADER_VERSION { - return Err(BundleError::WrongVersion(path.to_path_buf(), version)) - } - Ok(try!(msgpack::decode_from_stream(&mut file).context(path))) - } - - pub fn save_list_to>(list: &[Self], path: P) -> Result<(), BundleError> { - let path = path.as_ref(); - let mut file = BufWriter::new(try!(File::create(path).context(path))); - try!(file.write_all(&HEADER_STRING).context(path)); - try!(file.write_all(&[HEADER_VERSION]).context(path)); - try!(msgpack::encode_to_stream(&list, &mut file).context(path)); - Ok(()) - } -} - pub struct BundleDb { remote_path: PathBuf, @@ -129,7 +111,7 @@ pub struct BundleDb { crypto: Arc>, local_bundles: HashMap, remote_bundles: HashMap, - bundle_cache: LruCache)> + bundle_cache: LruCache)> } @@ -147,7 +129,7 @@ impl BundleDb { } } - fn load_bundle_list(&mut self) -> Result<(Vec, Vec), BundleError> { + fn load_bundle_list(&mut self) -> Result<(Vec, Vec), BundleDbError> { let bundle_info_cache = &self.remote_cache_path; if let Ok(list) = StoredBundle::read_list_from(&bundle_info_cache) { for bundle in list { @@ -168,7 +150,7 @@ impl BundleDb { } #[inline] - pub fn open, L: AsRef>(remote_path: R, local_path: L, crypto: Arc>) -> Result<(Self, Vec, Vec), BundleError> { + pub fn open, L: AsRef>(remote_path: R, local_path: L, crypto: Arc>) -> Result<(Self, Vec, Vec), BundleDbError> { let remote_path = remote_path.as_ref().to_owned(); let local_path = local_path.as_ref().to_owned(); let mut self_ = Self::new(remote_path, local_path, crypto); @@ -177,7 +159,7 @@ impl BundleDb { } #[inline] - pub fn create, L: AsRef>(remote_path: R, local_path: L, crypto: Arc>) -> Result { + pub fn create, L: AsRef>(remote_path: R, local_path: L, crypto: Arc>) -> Result { let remote_path = remote_path.as_ref().to_owned(); let local_path = local_path.as_ref().to_owned(); let self_ = Self::new(remote_path, local_path, crypto); @@ -188,29 +170,23 @@ impl BundleDb { } #[inline] - pub fn create_bundle( - &self, - mode: BundleMode, - hash_method: HashMethod, - compression: Option, - encryption: Option - ) -> Result { - BundleWriter::new(mode, hash_method, compression, encryption, self.crypto.clone()) + pub fn create_bundle(&self, mode: BundleMode, hash_method: HashMethod, compression: Option, encryption: Option) -> Result { + Ok(try!(BundleWriter::new(mode, hash_method, compression, encryption, self.crypto.clone()))) } - fn get_stored_bundle(&self, bundle_id: &BundleId) -> Result<&StoredBundle, BundleError> { + fn get_stored_bundle(&self, bundle_id: &BundleId) -> Result<&StoredBundle, BundleDbError> { if let Some(stored) = self.local_bundles.get(bundle_id).or_else(|| self.remote_bundles.get(bundle_id)) { Ok(stored) } else { - Err(BundleError::NoSuchBundle(bundle_id.clone())) + Err(BundleDbError::NoSuchBundle(bundle_id.clone())) } } - fn get_bundle(&self, stored: &StoredBundle) -> Result { - Bundle::load(stored.path.clone(), self.crypto.clone()) + fn get_bundle(&self, stored: &StoredBundle) -> Result { + Ok(try!(BundleReader::load(stored.path.clone(), self.crypto.clone()))) } - pub fn get_chunk(&mut self, bundle_id: &BundleId, id: usize) -> Result, BundleError> { + pub fn get_chunk(&mut self, bundle_id: &BundleId, id: usize) -> Result, BundleDbError> { if let Some(&mut (ref mut bundle, ref data)) = self.bundle_cache.get_mut(bundle_id) { let (pos, len) = try!(bundle.get_chunk_position(id)); let mut chunk = Vec::with_capacity(len); @@ -227,7 +203,7 @@ impl BundleDb { } #[inline] - pub fn add_bundle(&mut self, bundle: BundleWriter) -> Result { + pub fn add_bundle(&mut self, bundle: BundleWriter) -> Result { let bundle = try!(bundle.finish(&self)); let id = bundle.id(); if bundle.info.mode == BundleMode::Meta { @@ -254,19 +230,19 @@ impl BundleDb { } #[inline] - pub fn delete_bundle(&mut self, bundle: &BundleId) -> Result<(), BundleError> { + pub fn delete_bundle(&mut self, bundle: &BundleId) -> Result<(), BundleDbError> { if let Some(bundle) = self.local_bundles.remove(bundle) { - try!(fs::remove_file(&bundle.path).map_err(|e| BundleError::Remove(e, bundle.id()))) + try!(fs::remove_file(&bundle.path).map_err(|e| BundleDbError::Remove(e, bundle.id()))) } if let Some(bundle) = self.remote_bundles.remove(bundle) { - fs::remove_file(&bundle.path).map_err(|e| BundleError::Remove(e, bundle.id())) + fs::remove_file(&bundle.path).map_err(|e| BundleDbError::Remove(e, bundle.id())) } else { - Err(BundleError::NoSuchBundle(bundle.clone())) + Err(BundleDbError::NoSuchBundle(bundle.clone())) } } #[inline] - pub fn check(&mut self, full: bool) -> Result<(), BundleError> { + pub fn check(&mut self, full: bool) -> Result<(), BundleDbError> { for stored in self.remote_bundles.values() { let mut bundle = try!(self.get_bundle(stored)); try!(bundle.check(full)) diff --git a/src/bundledb/error.rs b/src/bundledb/error.rs index c2f7c51..8090160 100644 --- a/src/bundledb/error.rs +++ b/src/bundledb/error.rs @@ -4,9 +4,10 @@ use super::*; use std::path::{Path, PathBuf}; use std::io; + quick_error!{ #[derive(Debug)] - pub enum BundleError { + pub enum BundleDbError { List(err: io::Error) { cause(err) description("Failed to list bundles") @@ -79,5 +80,17 @@ quick_error!{ description("Failed to remove bundle") display("Failed to remove bundle {}", bundle) } + Writer(err: BundleWriterError) { + from() + cause(err) + description("Failed to write new bundle") + display("Bundle db error: failed to write new bundle\n\tcaused by: {}", err) + } + Reader(err: BundleReaderError) { + from() + cause(err) + description("Failed to read bundle") + display("Bundle db error: failed to read a bundle\n\tcaused by: {}", err) + } } } diff --git a/src/bundledb/mod.rs b/src/bundledb/mod.rs index ee655b7..8313137 100644 --- a/src/bundledb/mod.rs +++ b/src/bundledb/mod.rs @@ -1,12 +1,107 @@ -mod error; mod writer; -mod bundle; +mod reader; mod db; +mod cache; -pub use self::error::BundleError; -pub use self::writer::BundleWriter; -pub use self::bundle::*; +pub use self::cache::{StoredBundle, BundleCacheError}; +pub use self::writer::{BundleWriter, BundleWriterError}; +pub use self::reader::{BundleReader, BundleReaderError}; pub use self::db::*; +use ::prelude::*; + +use std::fmt; +use serde; + + pub static HEADER_STRING: [u8; 7] = *b"zvault\x01"; pub static HEADER_VERSION: u8 = 1; + + +#[derive(Hash, PartialEq, Eq, Clone, Default)] +pub struct BundleId(pub Hash); + +impl Serialize for BundleId { + fn serialize(&self, ser: S) -> Result { + self.0.serialize(ser) + } +} + +impl Deserialize for BundleId { + fn deserialize(de: D) -> Result { + let hash = try!(Hash::deserialize(de)); + Ok(BundleId(hash)) + } +} + +impl BundleId { + #[inline] + fn to_string(&self) -> String { + self.0.to_string() + } +} + +impl fmt::Display for BundleId { + #[inline] + fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error> { + write!(fmt, "{}", self.to_string()) + } +} + +impl fmt::Debug for BundleId { + #[inline] + fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error> { + write!(fmt, "{}", self.to_string()) + } +} + + +#[derive(Eq, Debug, PartialEq, Clone, Copy)] +pub enum BundleMode { + Content, Meta +} +serde_impl!(BundleMode(u8) { + Content => 0, + Meta => 1 +}); + + +#[derive(Clone)] +pub struct BundleInfo { + pub id: BundleId, + pub mode: BundleMode, + pub compression: Option, + pub encryption: Option, + pub hash_method: HashMethod, + pub raw_size: usize, + pub encoded_size: usize, + pub chunk_count: usize, + pub chunk_info_size: usize +} +serde_impl!(BundleInfo(u64) { + id: BundleId => 0, + mode: BundleMode => 1, + compression: Option => 2, + encryption: Option => 3, + hash_method: HashMethod => 4, + raw_size: usize => 6, + encoded_size: usize => 7, + chunk_count: usize => 8, + chunk_info_size: usize => 9 +}); + +impl Default for BundleInfo { + fn default() -> Self { + BundleInfo { + id: BundleId(Hash::empty()), + compression: None, + encryption: None, + hash_method: HashMethod::Blake2, + raw_size: 0, + encoded_size: 0, + chunk_count: 0, + mode: BundleMode::Content, + chunk_info_size: 0 + } + } +} diff --git a/src/bundledb/bundle.rs b/src/bundledb/reader.rs similarity index 63% rename from src/bundledb/bundle.rs rename to src/bundledb/reader.rs index f10e764..6631b89 100644 --- a/src/bundledb/bundle.rs +++ b/src/bundledb/reader.rs @@ -3,104 +3,60 @@ use super::*; use std::path::{Path, PathBuf}; use std::fs::{self, File}; -use std::io::{Read, Seek, SeekFrom, BufReader}; +use std::io::{self, Read, Seek, SeekFrom, BufReader}; use std::cmp::max; use std::fmt::{self, Debug}; use std::sync::{Arc, Mutex}; -use serde; - -#[derive(Hash, PartialEq, Eq, Clone, Default)] -pub struct BundleId(pub Hash); - -impl Serialize for BundleId { - fn serialize(&self, ser: S) -> Result { - self.0.serialize(ser) - } -} - -impl Deserialize for BundleId { - fn deserialize(de: D) -> Result { - let hash = try!(Hash::deserialize(de)); - Ok(BundleId(hash)) - } -} - -impl BundleId { - #[inline] - fn to_string(&self) -> String { - self.0.to_string() - } -} - -impl fmt::Display for BundleId { - #[inline] - fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error> { - write!(fmt, "{}", self.to_string()) - } -} - -impl fmt::Debug for BundleId { - #[inline] - fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error> { - write!(fmt, "{}", self.to_string()) - } -} - - -#[derive(Eq, Debug, PartialEq, Clone, Copy)] -pub enum BundleMode { - Content, Meta -} -serde_impl!(BundleMode(u8) { - Content => 0, - Meta => 1 -}); - - -#[derive(Clone)] -pub struct BundleInfo { - pub id: BundleId, - pub mode: BundleMode, - pub compression: Option, - pub encryption: Option, - pub hash_method: HashMethod, - pub raw_size: usize, - pub encoded_size: usize, - pub chunk_count: usize, - pub chunk_info_size: usize -} -serde_impl!(BundleInfo(u64) { - id: BundleId => 0, - mode: BundleMode => 1, - compression: Option => 2, - encryption: Option => 3, - hash_method: HashMethod => 4, - raw_size: usize => 6, - encoded_size: usize => 7, - chunk_count: usize => 8, - chunk_info_size: usize => 9 -}); - -impl Default for BundleInfo { - fn default() -> Self { - BundleInfo { - id: BundleId(Hash::empty()), - compression: None, - encryption: None, - hash_method: HashMethod::Blake2, - raw_size: 0, - encoded_size: 0, - chunk_count: 0, - mode: BundleMode::Content, - chunk_info_size: 0 +quick_error!{ + #[derive(Debug)] + pub enum BundleReaderError { + Read(err: io::Error, path: PathBuf) { + cause(err) + context(path: &'a Path, err: io::Error) -> (err, path.to_path_buf()) + description("Failed to read data from file") + display("Bundle reader error: failed to read data from file {:?}\n\tcaused by: {}", path, err) + } + WrongHeader(path: PathBuf) { + description("Wrong header") + display("Bundle reader error: wrong header on bundle {:?}", path) + } + UnsupportedVersion(path: PathBuf, version: u8) { + description("Wrong version") + display("Bundle reader error: unsupported version on bundle {:?}: {}", path, version) + } + NoSuchChunk(bundle: BundleId, id: usize) { + description("Bundle has no such chunk") + display("Bundle reader error: bundle {:?} has no chunk with id {}", bundle, id) + } + Decode(err: msgpack::DecodeError, path: PathBuf) { + cause(err) + context(path: &'a Path, err: msgpack::DecodeError) -> (err, path.to_path_buf()) + description("Failed to decode bundle header") + display("Bundle reader error: failed to decode bundle header of {:?}\n\tcaused by: {}", path, err) + } + Decompression(err: CompressionError, path: PathBuf) { + cause(err) + context(path: &'a Path, err: CompressionError) -> (err, path.to_path_buf()) + description("Decompression failed") + display("Bundle reader error: decompression failed on bundle {:?}\n\tcaused by: {}", path, err) + } + Decryption(err: EncryptionError, path: PathBuf) { + cause(err) + context(path: &'a Path, err: EncryptionError) -> (err, path.to_path_buf()) + description("Decryption failed") + display("Bundle reader error: decryption failed on bundle {:?}\n\tcaused by: {}", path, err) + } + Integrity(bundle: BundleId, reason: &'static str) { + description("Bundle has an integrity error") + display("Bundle reader error: bundle {:?} has an integrity error: {}", bundle, reason) } } } -pub struct Bundle { +pub struct BundleReader { pub info: BundleInfo, pub version: u8, pub path: PathBuf, @@ -110,9 +66,9 @@ pub struct Bundle { pub chunk_positions: Option> } -impl Bundle { +impl BundleReader { pub fn new(path: PathBuf, version: u8, content_start: usize, crypto: Arc>, info: BundleInfo) -> Self { - Bundle { + BundleReader { info: info, chunks: None, version: version, @@ -128,17 +84,17 @@ impl Bundle { self.info.id.clone() } - fn load_header>(path: P) -> Result<(BundleInfo, u8, usize), BundleError> { + fn load_header>(path: P) -> Result<(BundleInfo, u8, usize), BundleReaderError> { let path = path.as_ref(); let mut file = BufReader::new(try!(File::open(path).context(path))); let mut header = [0u8; 8]; try!(file.read_exact(&mut header).context(path)); if header[..HEADER_STRING.len()] != HEADER_STRING { - return Err(BundleError::WrongHeader(path.to_path_buf())) + return Err(BundleReaderError::WrongHeader(path.to_path_buf())) } let version = header[HEADER_STRING.len()]; if version != HEADER_VERSION { - return Err(BundleError::WrongVersion(path.to_path_buf(), version)) + return Err(BundleReaderError::UnsupportedVersion(path.to_path_buf(), version)) } let header: BundleInfo = try!(msgpack::decode_from_stream(&mut file).context(path)); debug!("Load bundle {}", header.id); @@ -147,17 +103,17 @@ impl Bundle { } #[inline] - pub fn load_info>(path: P) -> Result { + pub fn load_info>(path: P) -> Result { Self::load_header(path).map(|b| b.0) } #[inline] - pub fn load(path: PathBuf, crypto: Arc>) -> Result { + pub fn load(path: PathBuf, crypto: Arc>) -> Result { let (header, version, content_start) = try!(Self::load_header(&path)); - Ok(Bundle::new(path, version, content_start, crypto, header)) + Ok(BundleReader::new(path, version, content_start, crypto, header)) } - pub fn load_chunklist(&mut self) -> Result<(), BundleError> { + pub fn load_chunklist(&mut self) -> Result<(), BundleReaderError> { debug!("Load bundle chunklist {} ({:?})", self.info.id, self.info.mode); let mut file = BufReader::new(try!(File::open(&self.path).context(&self.path as &Path))); let len = self.info.chunk_info_size; @@ -182,7 +138,7 @@ impl Bundle { } #[inline] - fn load_encoded_contents(&self) -> Result, BundleError> { + fn load_encoded_contents(&self) -> Result, BundleReaderError> { debug!("Load bundle data {} ({:?})", self.info.id, self.info.mode); let mut file = BufReader::new(try!(File::open(&self.path).context(&self.path as &Path))); try!(file.seek(SeekFrom::Start(self.content_start as u64)).context(&self.path as &Path)); @@ -192,7 +148,7 @@ impl Bundle { } #[inline] - fn decode_contents(&self, mut data: Vec) -> Result, BundleError> { + fn decode_contents(&self, mut data: Vec) -> Result, BundleReaderError> { if let Some(ref encryption) = self.info.encryption { data = try!(self.crypto.lock().unwrap().decrypt(&encryption, &data).context(&self.path as &Path)); } @@ -203,14 +159,14 @@ impl Bundle { } #[inline] - pub fn load_contents(&self) -> Result, BundleError> { + pub fn load_contents(&self) -> Result, BundleReaderError> { self.load_encoded_contents().and_then(|data| self.decode_contents(data)) } #[inline] - pub fn get_chunk_position(&mut self, id: usize) -> Result<(usize, usize), BundleError> { + pub fn get_chunk_position(&mut self, id: usize) -> Result<(usize, usize), BundleReaderError> { if id >= self.info.chunk_count { - return Err(BundleError::NoSuchChunk(self.id(), id)) + return Err(BundleReaderError::NoSuchChunk(self.id(), id)) } if self.chunks.is_none() || self.chunk_positions.is_none() { try!(self.load_chunklist()); @@ -220,34 +176,34 @@ impl Bundle { Ok((pos, len)) } - pub fn check(&mut self, full: bool) -> Result<(), BundleError> { + pub fn check(&mut self, full: bool) -> Result<(), BundleReaderError> { if self.chunks.is_none() || self.chunk_positions.is_none() { try!(self.load_chunklist()); } if self.info.chunk_count != self.chunks.as_ref().unwrap().len() { - return Err(BundleError::Integrity(self.id(), + return Err(BundleReaderError::Integrity(self.id(), "Chunk list size does not match chunk count")) } if self.chunks.as_ref().unwrap().iter().map(|c| c.1 as usize).sum::() != self.info.raw_size { - return Err(BundleError::Integrity(self.id(), + return Err(BundleReaderError::Integrity(self.id(), "Individual chunk sizes do not add up to total size")) } if !full { let size = try!(fs::metadata(&self.path).context(&self.path as &Path)).len(); if size as usize != self.info.encoded_size + self.content_start { - return Err(BundleError::Integrity(self.id(), + return Err(BundleReaderError::Integrity(self.id(), "File size does not match size in header, truncated file")) } return Ok(()) } let encoded_contents = try!(self.load_encoded_contents()); if self.info.encoded_size != encoded_contents.len() { - return Err(BundleError::Integrity(self.id(), + return Err(BundleReaderError::Integrity(self.id(), "Encoded data size does not match size in header, truncated bundle")) } let contents = try!(self.decode_contents(encoded_contents)); if self.info.raw_size != contents.len() { - return Err(BundleError::Integrity(self.id(), + return Err(BundleReaderError::Integrity(self.id(), "Raw data size does not match size in header, truncated bundle")) } //TODO: verify checksum @@ -255,7 +211,7 @@ impl Bundle { } } -impl Debug for Bundle { +impl Debug for BundleReader { fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error> { write!(fmt, "Bundle(\n\tid: {}\n\tpath: {:?}\n\tchunks: {}\n\tsize: {}, encoded: {}\n\tcompression: {:?}\n)", self.info.id.to_string(), self.path, self.info.chunk_count, self.info.raw_size, diff --git a/src/bundledb/writer.rs b/src/bundledb/writer.rs index 63a0dd3..8f3b07b 100644 --- a/src/bundledb/writer.rs +++ b/src/bundledb/writer.rs @@ -1,12 +1,47 @@ use ::prelude::*; use super::*; -use std::path::Path; +use std::path::{Path, PathBuf}; use std::fs::File; -use std::io::{Write, BufWriter}; +use std::io::{self, Write, BufWriter}; use std::sync::{Arc, Mutex}; +quick_error!{ + #[derive(Debug)] + pub enum BundleWriterError { + CompressionSetup(err: CompressionError) { + cause(err) + description("Failed to setup compression") + display("Bundle writer error: failed to setup compression\n\tcaused by: {}", err) + } + Compression(err: CompressionError) { + cause(err) + description("Failed to compress data") + display("Bundle writer error: failed to compress data\n\tcaused by: {}", err) + } + Encryption(err: EncryptionError) { + from() + cause(err) + description("Encryption failed") + display("Bundle writer error: failed to encrypt data\n\tcaused by: {}", err) + } + Encode(err: msgpack::EncodeError, path: PathBuf) { + cause(err) + context(path: &'a Path, err: msgpack::EncodeError) -> (err, path.to_path_buf()) + description("Failed to encode bundle header to file") + display("Bundle writer error: failed to encode bundle header to file {:?}\n\tcaused by: {}", path, err) + } + Write(err: io::Error, path: PathBuf) { + cause(err) + context(path: &'a Path, err: io::Error) -> (err, path.to_path_buf()) + description("Failed to write data to file") + display("Bundle writer error: failed to write data to file {:?}\n\tcaused by: {}", path, err) + } + } +} + + pub struct BundleWriter { mode: BundleMode, hash_method: HashMethod, @@ -21,15 +56,9 @@ pub struct BundleWriter { } impl BundleWriter { - pub fn new( - mode: BundleMode, - hash_method: HashMethod, - compression: Option, - encryption: Option, - crypto: Arc> - ) -> Result { + pub fn new(mode: BundleMode, hash_method: HashMethod, compression: Option, encryption: Option, crypto: Arc>) -> Result { let compression_stream = match compression { - Some(ref compression) => Some(try!(compression.compress_stream())), + Some(ref compression) => Some(try!(compression.compress_stream().map_err(BundleWriterError::CompressionSetup))), None => None }; Ok(BundleWriter { @@ -46,9 +75,9 @@ impl BundleWriter { }) } - pub fn add(&mut self, chunk: &[u8], hash: Hash) -> Result { + pub fn add(&mut self, chunk: &[u8], hash: Hash) -> Result { if let Some(ref mut stream) = self.compression_stream { - try!(stream.process(chunk, &mut self.data)) + try!(stream.process(chunk, &mut self.data).map_err(BundleWriterError::Compression)) } else { self.data.extend_from_slice(chunk) } @@ -58,9 +87,9 @@ impl BundleWriter { Ok(self.chunk_count-1) } - pub fn finish(mut self, db: &BundleDb) -> Result { + pub fn finish(mut self, db: &BundleDb) -> Result { if let Some(stream) = self.compression_stream { - try!(stream.finish(&mut self.data)) + try!(stream.finish(&mut self.data).map_err(BundleWriterError::Compression)) } if let Some(ref encryption) = self.encryption { self.data = try!(self.crypto.lock().unwrap().encrypt(&encryption, &self.data)); diff --git a/src/chunker/mod.rs b/src/chunker/mod.rs index cbd7e4f..aadcf2d 100644 --- a/src/chunker/mod.rs +++ b/src/chunker/mod.rs @@ -21,18 +21,19 @@ quick_error!{ #[derive(Debug)] pub enum ChunkerError { Read(err: io::Error) { - from(err) cause(err) - description("Failed to read") + description("Failed to read input") + display("Chunker error: failed to read input\n\tcaused by: {}", err) } Write(err: io::Error) { - from(err) cause(err) - description("Failed to write") + description("Failed to write to output") + display("Chunker error: failed to write to output\n\tcaused by: {}", err) } - Custom { - from(&'static str) + Custom(reason: &'static str) { + from() description("Custom error") + display("Chunker error: {}", reason) } } } diff --git a/src/cli/args.rs b/src/cli/args.rs index e99232e..a51942c 100644 --- a/src/cli/args.rs +++ b/src/cli/args.rs @@ -1,4 +1,5 @@ use ::prelude::*; +use super::*; use std::process::exit; @@ -259,7 +260,7 @@ pub fn parse() -> Arguments { (about: "changes the configuration") (@arg REPO: +required "path of the repository") (@arg bundle_size: --bundlesize +takes_value "maximal bundle size in MiB [default: 25]") - (@arg chunker: --chunker +takes_value "chunker algorithm [default: fastcdc/8]") + (@arg chunker: --chunker +takes_value "chunker algorithm [default: fastcdc/16]") (@arg compression: --compression -c +takes_value "compression to use [default: brotli/3]") (@arg encryption: --encryption -e +takes_value "the public key to use for encryption") (@arg hash: --hash +takes_value "hash method to use [default: blake2]") @@ -278,7 +279,7 @@ pub fn parse() -> Arguments { (@subcommand algotest => (about: "test a specific algorithm combination") (@arg bundle_size: --bundlesize +takes_value "maximal bundle size in MiB [default: 25]") - (@arg chunker: --chunker +takes_value "chunker algorithm [default: fastcdc/8]") + (@arg chunker: --chunker +takes_value "chunker algorithm [default: fastcdc/16]") (@arg compression: --compression -c +takes_value "compression to use [default: brotli/3]") (@arg encrypt: --encrypt -e "enable encryption") (@arg hash: --hash +takes_value "hash method to use [default: blake2]") @@ -292,11 +293,11 @@ pub fn parse() -> Arguments { exit(1); } return Arguments::Init { - bundle_size: (parse_num(args.value_of("bundle_size").unwrap_or("25"), "Bundle size") * 1024 * 1024) as usize, - chunker: parse_chunker(args.value_of("chunker").unwrap_or("fastcdc/8")), - compression: parse_compression(args.value_of("compression").unwrap_or("brotli/3")), + bundle_size: (parse_num(args.value_of("bundle_size").unwrap_or(&DEFAULT_BUNDLE_SIZE.to_string()), "Bundle size") * 1024 * 1024) as usize, + chunker: parse_chunker(args.value_of("chunker").unwrap_or(DEFAULT_CHUNKER)), + compression: parse_compression(args.value_of("compression").unwrap_or(DEFAULT_COMPRESSION)), encryption: args.is_present("encryption"), - hash: parse_hash(args.value_of("hash").unwrap_or("blake2")), + hash: parse_hash(args.value_of("hash").unwrap_or(DEFAULT_HASH)), repo_path: repository.to_string(), } } @@ -368,7 +369,7 @@ pub fn parse() -> Arguments { return Arguments::Vacuum { repo_path: repository.to_string(), force: args.is_present("force"), - ratio: parse_float(args.value_of("ratio").unwrap_or("0.5"), "ratio") as f32 + ratio: parse_float(args.value_of("ratio").unwrap_or(&DEFAULT_VACUUM_RATIO.to_string()), "ratio") as f32 } } if let Some(args) = args.subcommand_matches("check") { @@ -469,11 +470,11 @@ pub fn parse() -> Arguments { } if let Some(args) = args.subcommand_matches("algotest") { return Arguments::AlgoTest { - bundle_size: (parse_num(args.value_of("bundle_size").unwrap_or("25"), "Bundle size") * 1024 * 1024) as usize, - chunker: parse_chunker(args.value_of("chunker").unwrap_or("fastcdc/8")), - compression: parse_compression(args.value_of("compression").unwrap_or("brotli/3")), + bundle_size: (parse_num(args.value_of("bundle_size").unwrap_or(&DEFAULT_BUNDLE_SIZE.to_string()), "Bundle size") * 1024 * 1024) as usize, + chunker: parse_chunker(args.value_of("chunker").unwrap_or(DEFAULT_CHUNKER)), + compression: parse_compression(args.value_of("compression").unwrap_or(DEFAULT_COMPRESSION)), encrypt: args.is_present("encrypt"), - hash: parse_hash(args.value_of("hash").unwrap_or("blake2")), + hash: parse_hash(args.value_of("hash").unwrap_or(DEFAULT_HASH)), file: args.value_of("FILE").unwrap().to_string(), } } diff --git a/src/cli/mod.rs b/src/cli/mod.rs index 44dc9ce..26efd52 100644 --- a/src/cli/mod.rs +++ b/src/cli/mod.rs @@ -10,6 +10,13 @@ use std::process::exit; use self::args::Arguments; +pub const DEFAULT_CHUNKER: &'static str = "fastcdc/16"; +pub const DEFAULT_HASH: &'static str = "blake2"; +pub const DEFAULT_COMPRESSION: &'static str = "brotli/3"; +pub const DEFAULT_BUNDLE_SIZE: usize = 25; +pub const DEFAULT_VACUUM_RATIO: f32 = 0.5; + + fn open_repository(path: &str) -> Repository { match Repository::open(path) { Ok(repo) => repo, @@ -36,7 +43,18 @@ fn find_reference_backup(repo: &Repository, path: &str) -> Option { Ok(hostname) => hostname, Err(_) => return None }; - for (_name, backup) in repo.get_backups().unwrap().0 { + let backup_map = match repo.get_backups() { + Ok(backup_map) => backup_map, + Err(RepositoryError::BackupFile(BackupFileError::PartialBackupsList(backup_map, _failed))) => { + warn!("Some backups could not be read, ignoring them"); + backup_map + }, + Err(err) => { + error!("Failed to load backup files: {}", err); + exit(3) + } + }; + for (_name, backup) in backup_map { if backup.host == hostname && backup.path == path { matching.push(backup); } @@ -155,7 +173,18 @@ pub fn run() { } } } else { - for (name, backup) in repo.get_backups().unwrap().0 { + let backup_map = match repo.get_backups() { + Ok(backup_map) => backup_map, + Err(RepositoryError::BackupFile(BackupFileError::PartialBackupsList(backup_map, _failed))) => { + warn!("Some backups could not be read, ignoring them"); + backup_map + }, + Err(err) => { + error!("Failed to load backup files: {}", err); + exit(3) + } + }; + for (name, backup) in backup_map { println!("{:25} {:>32} {:5} files, {:4} dirs, {:>10}", name, Local.timestamp(backup.date, 0).to_rfc2822(), backup.file_count, backup.dir_count, to_file_size(backup.total_data_size)); diff --git a/src/index.rs b/src/index.rs index d903ded..4a5ce9e 100644 --- a/src/index.rs +++ b/src/index.rs @@ -9,12 +9,49 @@ use std::os::unix::io::AsRawFd; use mmap::{MemoryMap, MapOption, MapError}; + const MAGIC: [u8; 7] = *b"zvault\x02"; const VERSION: u8 = 1; pub const MAX_USAGE: f64 = 0.9; pub const MIN_USAGE: f64 = 0.25; pub const INITIAL_SIZE: usize = 1024; + +quick_error!{ + #[derive(Debug)] + pub enum IndexError { + Io(err: io::Error) { + from() + cause(err) + description("Failed to open index file") + display("Index error: failed to open the index file\n\tcaused by: {}", err) + } + Mmap(err: MapError) { + from() + cause(err) + description("Failed to memory-map the index file") + display("Index error: failed to memory-map the index file\n\tcaused by: {}", err) + } + WrongMagic { + description("Wrong header") + display("Index error: file has the wrong magic header") + } + UnsupportedVersion(version: u8) { + description("Unsupported version") + display("Index error: index file has unsupported version: {}", version) + } + WrongPosition(key: Hash, should: usize, is: LocateResult) { + description("Key at wrong position") + display("Index error: key {} has wrong position, expected at: {}, but is at: {:?}", key, should, is) + } + WrongEntryCount(header: usize, actual: usize) { + description("Wrong entry count") + display("Index error: index has wrong entry count, expected {}, but is {}", header, actual) + } + } +} + + #[repr(packed)] pub struct Header { magic: [u8; 7], @@ -65,39 +102,6 @@ pub struct Index { data: &'static mut [Entry] } -quick_error!{ - #[derive(Debug)] - pub enum IndexError { - Io(err: io::Error) { - from() - cause(err) - description("Failed to open index file") - } - Mmap(err: MapError) { - from() - cause(err) - description("Failed to write bundle map") - } - NoHeader { - description("Index file does not contain a header") - } - WrongHeader { - description("Wrong header") - } - WrongVersion(version: u8) { - description("Wrong version") - display("Wrong version: {}", version) - } - WrongPosition(key: Hash, should: usize, is: LocateResult) { - description("Key at wrong position") - display("Key {} has wrong position, expected at: {}, but is at: {:?}", key, should, is) - } - WrongEntryCount(header: usize, actual: usize) { - description("Wrong entry count") - display("Wrong entry count, expected {}, but is {}", header, actual) - } - } -} #[derive(Debug)] pub enum LocateResult { @@ -114,7 +118,7 @@ impl Index { } let mmap = try!(Index::map_fd(&fd)); if mmap.len() < mem::size_of::
() { - return Err(IndexError::NoHeader); + return Err(IndexError::WrongMagic); } let data = Index::mmap_as_slice(&mmap, INITIAL_SIZE as usize); let mut index = Index{capacity: 0, max_entries: 0, min_entries: 0, entries: 0, fd: fd, mmap: mmap, data: data}; @@ -130,10 +134,10 @@ impl Index { header.capacity = INITIAL_SIZE as u64; } else { if header.magic != MAGIC { - return Err(IndexError::WrongHeader); + return Err(IndexError::WrongMagic); } if header.version != VERSION { - return Err(IndexError::WrongVersion(header.version)); + return Err(IndexError::UnsupportedVersion(header.version)); } } capacity = header.capacity; diff --git a/src/main.rs b/src/main.rs index f9c6d37..0b93e14 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,3 +1,4 @@ +#![recursion_limit="128"] extern crate serde; extern crate rmp_serde; #[macro_use] extern crate serde_utils; diff --git a/src/prelude.rs b/src/prelude.rs index 259b1a7..bfe0fb6 100644 --- a/src/prelude.rs +++ b/src/prelude.rs @@ -1,7 +1,7 @@ pub use ::util::*; -pub use ::bundledb::{Bundle, BundleMode, BundleWriter, BundleInfo, BundleId, BundleError, BundleDb}; +pub use ::bundledb::{BundleReader, BundleMode, BundleWriter, BundleInfo, BundleId, BundleDbError, BundleDb, BundleWriterError}; pub use ::chunker::{ChunkerType, Chunker, ChunkerStatus, IChunker, ChunkerError}; -pub use ::repository::{Repository, Backup, Config, RepositoryError, RepositoryInfo, Inode, FileType, RepositoryIntegrityError}; +pub use ::repository::{Repository, Backup, Config, RepositoryError, RepositoryInfo, Inode, FileType, RepositoryIntegrityError, BackupFileError}; pub use ::index::{Index, Location, IndexError}; pub use serde::{Serialize, Deserialize}; diff --git a/src/repository/backup.rs b/src/repository/backup.rs index f59a875..e93ae94 100644 --- a/src/repository/backup.rs +++ b/src/repository/backup.rs @@ -14,49 +14,56 @@ static HEADER_VERSION: u8 = 1; quick_error!{ #[derive(Debug)] - pub enum BackupError { - Io(err: io::Error, path: PathBuf) { + pub enum BackupFileError { + Read(err: io::Error, path: PathBuf) { + cause(err) + description("Failed to write backup") + display("Backup file error: failed to write backup file {:?}\n\tcaused by: {}", path, err) + } + Write(err: io::Error, path: PathBuf) { cause(err) - context(path: &'a Path, err: io::Error) -> (err, path.to_path_buf()) description("Failed to read/write backup") - display("Failed to read/write backup {:?}: {}", path, err) + display("Backup file error: failed to read backup file {:?}\n\tcaused by: {}", path, err) } Decode(err: msgpack::DecodeError, path: PathBuf) { cause(err) context(path: &'a Path, err: msgpack::DecodeError) -> (err, path.to_path_buf()) description("Failed to decode backup") - display("Failed to decode backup of {:?}: {}", path, err) + display("Backup file error: failed to decode backup of {:?}\n\tcaused by: {}", path, err) } Encode(err: msgpack::EncodeError, path: PathBuf) { cause(err) context(path: &'a Path, err: msgpack::EncodeError) -> (err, path.to_path_buf()) description("Failed to encode backup") - display("Failed to encode backup of {:?}: {}", path, err) + display("Backup file error: failed to encode backup of {:?}\n\tcaused by: {}", path, err) } WrongHeader(path: PathBuf) { description("Wrong header") - display("Wrong header on backup {:?}", path) + display("Backup file error: wrong header on backup {:?}", path) } - WrongVersion(path: PathBuf, version: u8) { + UnsupportedVersion(path: PathBuf, version: u8) { description("Wrong version") - display("Wrong version on backup {:?}: {}", path, version) + display("Backup file error: unsupported version on backup {:?}: {}", path, version) } Decryption(err: EncryptionError, path: PathBuf) { cause(err) context(path: &'a Path, err: EncryptionError) -> (err, path.to_path_buf()) description("Decryption failed") - display("Decryption failed on backup {:?}: {}", path, err) + display("Backup file error: decryption failed on backup {:?}\n\tcaused by: {}", path, err) } Encryption(err: EncryptionError) { from() cause(err) description("Encryption failed") - display("Encryption failed: {}", err) + display("Backup file error: encryption failed\n\tcaused by: {}", err) + } + PartialBackupsList(partial: HashMap, failed: Vec) { + description("Some backups could not be loaded") + display("Backup file error: some backups could not be loaded: {:?}", failed) } } } - #[derive(Default, Debug, Clone)] struct BackupHeader { pub encryption: Option @@ -101,73 +108,83 @@ serde_impl!(Backup(u8) { }); impl Backup { - pub fn read_from>(crypto: &Crypto, path: P) -> Result { + pub fn read_from>(crypto: &Crypto, path: P) -> Result { let path = path.as_ref(); - let mut file = BufReader::new(try!(File::open(path).context(path))); + let mut file = BufReader::new(try!(File::open(path).map_err(|err| BackupFileError::Read(err, path.to_path_buf())))); let mut header = [0u8; 8]; - try!(file.read_exact(&mut header).context(&path as &Path)); + try!(file.read_exact(&mut header).map_err(|err| BackupFileError::Read(err, path.to_path_buf()))); if header[..HEADER_STRING.len()] != HEADER_STRING { - return Err(BackupError::WrongHeader(path.to_path_buf())) + return Err(BackupFileError::WrongHeader(path.to_path_buf())) } let version = header[HEADER_STRING.len()]; if version != HEADER_VERSION { - return Err(BackupError::WrongVersion(path.to_path_buf(), version)) + return Err(BackupFileError::UnsupportedVersion(path.to_path_buf(), version)) } let header: BackupHeader = try!(msgpack::decode_from_stream(&mut file).context(path)); let mut data = Vec::new(); - try!(file.read_to_end(&mut data).context(path)); + try!(file.read_to_end(&mut data).map_err(|err| BackupFileError::Read(err, path.to_path_buf()))); if let Some(ref encryption) = header.encryption { data = try!(crypto.decrypt(encryption, &data)); } Ok(try!(msgpack::decode(&data).context(path))) } - pub fn save_to>(&self, crypto: &Crypto, encryption: Option, path: P) -> Result<(), BackupError> { + pub fn save_to>(&self, crypto: &Crypto, encryption: Option, path: P) -> Result<(), BackupFileError> { let path = path.as_ref(); let mut data = try!(msgpack::encode(self).context(path)); if let Some(ref encryption) = encryption { data = try!(crypto.encrypt(encryption, &data)); } - let mut file = BufWriter::new(try!(File::create(path).context(path))); - try!(file.write_all(&HEADER_STRING).context(path)); - try!(file.write_all(&[HEADER_VERSION]).context(path)); + let mut file = BufWriter::new(try!(File::create(path).map_err(|err| BackupFileError::Write(err, path.to_path_buf())))); + try!(file.write_all(&HEADER_STRING).map_err(|err| BackupFileError::Write(err, path.to_path_buf()))); + try!(file.write_all(&[HEADER_VERSION]).map_err(|err| BackupFileError::Write(err, path.to_path_buf()))); let header = BackupHeader { encryption: encryption }; try!(msgpack::encode_to_stream(&header, &mut file).context(path)); - try!(file.write_all(&data).context(path)); + try!(file.write_all(&data).map_err(|err| BackupFileError::Write(err, path.to_path_buf()))); Ok(()) } -} - - -impl Repository { - pub fn get_backups(&self) -> Result<(HashMap, bool), RepositoryError> { + pub fn get_all_from>(crypto: &Crypto, path: P) -> Result, BackupFileError> { let mut backups = HashMap::new(); - let mut paths = Vec::new(); - let base_path = self.path.join("backups"); - paths.push(base_path.clone()); - let mut some_failed = false; + let base_path = path.as_ref(); + let mut paths = vec![path.as_ref().to_path_buf()]; + let mut failed_paths = vec![]; while let Some(path) = paths.pop() { - for entry in try!(fs::read_dir(path)) { - let entry = try!(entry); + for entry in try!(fs::read_dir(&path).map_err(|e| BackupFileError::Read(e, path.clone()))) { + let entry = try!(entry.map_err(|e| BackupFileError::Read(e, path.clone()))); let path = entry.path(); if path.is_dir() { paths.push(path); } else { let relpath = path.strip_prefix(&base_path).unwrap(); let name = relpath.to_string_lossy().to_string(); - if let Ok(backup) = self.get_backup(&name) { + if let Ok(backup) = Backup::read_from(crypto, &path) { backups.insert(name, backup); } else { - some_failed = true; + failed_paths.push(path.clone()); } } } } - if some_failed { - warn!("Some backups could not be read"); + if failed_paths.is_empty() { + Ok(backups) + } else { + Err(BackupFileError::PartialBackupsList(backups, failed_paths)) } - Ok((backups, some_failed)) + } +} + + +quick_error!{ + #[derive(Debug)] + pub enum BackupError { + } +} + + +impl Repository { + pub fn get_backups(&self) -> Result, RepositoryError> { + Ok(try!(Backup::get_all_from(&self.crypto.lock().unwrap(), self.path.join("backups")))) } pub fn get_backup(&self, name: &str) -> Result { @@ -195,10 +212,14 @@ impl Repository { pub fn prune_backups(&self, prefix: &str, daily: Option, weekly: Option, monthly: Option, yearly: Option, force: bool) -> Result<(), RepositoryError> { let mut backups = Vec::new(); - let (backup_map, some_failed) = try!(self.get_backups()); - if some_failed { - info!("Ignoring backups that can not be read"); - } + let backup_map = match self.get_backups() { + Ok(backup_map) => backup_map, + Err(RepositoryError::BackupFile(BackupFileError::PartialBackupsList(backup_map, _failed))) => { + warn!("Some backups could not be read, ignoring them"); + backup_map + }, + Err(err) => return Err(err) + }; for (name, backup) in backup_map { if name.starts_with(prefix) { let date = Local.timestamp(backup.date, 0); diff --git a/src/repository/error.rs b/src/repository/error.rs index 5656de2..afe0ad0 100644 --- a/src/repository/error.rs +++ b/src/repository/error.rs @@ -3,90 +3,85 @@ use ::prelude::*; use std::io; use std::path::PathBuf; -use super::backup::BackupError; +use super::backup::BackupFileError; use super::bundle_map::BundleMapError; use super::config::ConfigError; +use super::metadata::InodeError; quick_error!{ #[derive(Debug)] pub enum RepositoryError { + Index(err: IndexError) { + from() + cause(err) + description("Index error") + display("Repository error: index error\n\tcaused by: {}", err) + } + BundleDb(err: BundleDbError) { + from() + cause(err) + description("Bundle error") + display("Repository error: bundle db error\n\tcaused by: {}", err) + } + BundleWriter(err: BundleWriterError) { + from() + cause(err) + description("Bundle write error") + display("Repository error: failed to write to new bundle\n\tcaused by: {}", err) + } + BackupFile(err: BackupFileError) { + from() + cause(err) + description("Backup file error") + display("Repository error: backup file error\n\tcaused by: {}", err) + } + Chunker(err: ChunkerError) { + from() + cause(err) + description("Chunker error") + display("Repository error: failed to chunk data\n\tcaused by: {}", err) + } + Config(err: ConfigError) { + from() + cause(err) + description("Configuration error") + display("Repository error: configuration error\n\tcaused by: {}", err) + } + Inode(err: InodeError) { + from() + cause(err) + description("Inode error") + display("Repository error: inode error\n\tcaused by: {}", err) + } + LoadKeys(err: EncryptionError) { + from() + cause(err) + description("Failed to load keys") + display("Repository error: failed to load keys\n\tcaused by: {}", err) + } + BundleMap(err: BundleMapError) { + from() + cause(err) + description("Bundle map error") + display("Repository error: bundle map error\n\tcaused by: {}", err) + } + Integrity(err: RepositoryIntegrityError) { + from() + cause(err) + description("Integrity error") + display("Repository error: integrity error\n\tcaused by: {}", err) + } + Io(err: io::Error) { from() cause(err) description("IO error") display("IO error: {}", err) } - Config(err: ConfigError) { - from() - cause(err) - description("Configuration error") - display("Configuration error: {}", err) - } - BundleMap(err: BundleMapError) { - from() - cause(err) - description("Bundle map error") - display("Bundle map error: {}", err) - } - Index(err: IndexError) { - from() - cause(err) - description("Index error") - display("Index error: {}", err) - } - Bundle(err: BundleError) { - from() - cause(err) - description("Bundle error") - display("Bundle error: {}", err) - } - Backup(err: BackupError) { - from() - cause(err) - description("Backup error") - display("Backup error: {}", err) - } - Chunker(err: ChunkerError) { - from() - cause(err) - description("Chunker error") - display("Chunker error: {}", err) - } - Decode(err: msgpack::DecodeError) { - from() - cause(err) - description("Failed to decode metadata") - display("Failed to decode metadata: {}", err) - } - Encode(err: msgpack::EncodeError) { - from() - cause(err) - description("Failed to encode metadata") - display("Failed to encode metadata: {}", err) - } - Integrity(err: RepositoryIntegrityError) { - from() - cause(err) - description("Integrity error") - display("Integrity error: {}", err) - } - Encryption(err: EncryptionError) { - from() - cause(err) - description("Failed to load keys") - display("Failed to load keys: {}", err) - } - InvalidFileType(path: PathBuf) { - description("Invalid file type") - display("{:?} has an invalid file type", path) - } NoSuchFileInBackup(backup: Backup, path: PathBuf) { description("No such file in backup") display("The backup does not contain the file {:?}", path) } - UnsafeVacuum { - description("Not all backups can be read, refusing to run vacuum") - } } } diff --git a/src/repository/integrity.rs b/src/repository/integrity.rs index d96cb77..4a80e89 100644 --- a/src/repository/integrity.rs +++ b/src/repository/integrity.rs @@ -27,9 +27,6 @@ quick_error!{ InvalidNextBundleId { description("Invalid next bundle id") } - SymlinkWithoutTarget { - description("Symlink without target") - } } } @@ -88,7 +85,15 @@ impl Repository { fn check_backups(&mut self) -> Result<(), RepositoryError> { let mut checked = Bitmap::new(self.index.capacity()); - for (_name, backup) in try!(self.get_backups()).0 { + let backup_map = match self.get_backups() { + Ok(backup_map) => backup_map, + Err(RepositoryError::BackupFile(BackupFileError::PartialBackupsList(backup_map, _failed))) => { + warn!("Some backups could not be read, ignoring them"); + backup_map + }, + Err(err) => return Err(err) + }; + for (_name, backup) in backup_map { let mut todo = VecDeque::new(); todo.push_back(backup.root); while let Some(chunks) = todo.pop_front() { diff --git a/src/repository/metadata.rs b/src/repository/metadata.rs index 971f850..f8b5efc 100644 --- a/src/repository/metadata.rs +++ b/src/repository/metadata.rs @@ -1,11 +1,58 @@ use ::prelude::*; use std::collections::HashMap; -use std::path::Path; +use std::path::{Path, PathBuf}; use std::fs::{self, Metadata, File, Permissions}; use std::os::linux::fs::MetadataExt; use std::os::unix::fs::{PermissionsExt, symlink}; -use std::io::{Read, Write}; +use std::io::{self, Read, Write}; + + +quick_error!{ + #[derive(Debug)] + pub enum InodeError { + UnsupportedFiletype(path: PathBuf) { + description("Unsupported file type") + display("Inode error: file {:?} has an unsupported type", path) + } + ReadMetadata(err: io::Error, path: PathBuf) { + cause(err) + description("Failed to obtain metadata for file") + display("Inode error: failed to obtain metadata for file {:?}\n\tcaused by: {}", path, err) + } + ReadLinkTarget(err: io::Error, path: PathBuf) { + cause(err) + description("Failed to obtain link target for file") + display("Inode error: failed to obtain link target for file {:?}\n\tcaused by: {}", path, err) + } + Create(err: io::Error, path: PathBuf) { + cause(err) + description("Failed to create entity") + display("Inode error: failed to create entity {:?}\n\tcaused by: {}", path, err) + } + SetPermissions(err: io::Error, path: PathBuf, mode: u32) { + cause(err) + description("Failed to set permissions") + display("Inode error: failed to set permissions to {:3o} on {:?}\n\tcaused by: {}", mode, path, err) + } + Integrity(reason: &'static str) { + description("Integrity error") + display("Inode error: inode integrity error: {}", reason) + } + Decode(err: msgpack::DecodeError) { + from() + cause(err) + description("Failed to decode metadata") + display("Inode error: failed to decode metadata\n\tcaused by: {}", err) + } + Encode(err: msgpack::EncodeError) { + from() + cause(err) + description("Failed to encode metadata") + display("Inode error: failed to encode metadata\n\tcaused by: {}", err) + } + } +} #[derive(Debug, Eq, PartialEq)] @@ -85,7 +132,7 @@ serde_impl!(Inode(u8) { impl Inode { #[inline] - fn get_extended_attrs_from(&mut self, meta: &Metadata) -> Result<(), RepositoryError> { + fn get_extended_attrs_from(&mut self, meta: &Metadata) -> Result<(), InodeError> { self.mode = meta.st_mode(); self.user = meta.st_uid(); self.group = meta.st_gid(); @@ -95,11 +142,12 @@ impl Inode { Ok(()) } - pub fn get_from>(path: P) -> Result { - let name = try!(path.as_ref().file_name() - .ok_or_else(|| RepositoryError::InvalidFileType(path.as_ref().to_owned()))) + pub fn get_from>(path: P) -> Result { + let path = path.as_ref(); + let name = try!(path.file_name() + .ok_or_else(|| InodeError::UnsupportedFiletype(path.to_owned()))) .to_string_lossy().to_string(); - let meta = try!(fs::symlink_metadata(path.as_ref())); + let meta = try!(fs::symlink_metadata(path).map_err(|e| InodeError::ReadMetadata(e, path.to_owned()))); let mut inode = Inode::default(); inode.name = name; inode.size = meta.len(); @@ -110,35 +158,35 @@ impl Inode { } else if meta.file_type().is_symlink() { FileType::Symlink } else { - return Err(RepositoryError::InvalidFileType(path.as_ref().to_owned())); + return Err(InodeError::UnsupportedFiletype(path.to_owned())); }; if meta.file_type().is_symlink() { - inode.symlink_target = Some(try!(fs::read_link(path)).to_string_lossy().to_string()); + inode.symlink_target = Some(try!(fs::read_link(path).map_err(|e| InodeError::ReadLinkTarget(e, path.to_owned()))).to_string_lossy().to_string()); } try!(inode.get_extended_attrs_from(&meta)); Ok(inode) } #[allow(dead_code)] - pub fn create_at>(&self, path: P) -> Result, RepositoryError> { + pub fn create_at>(&self, path: P) -> Result, InodeError> { let full_path = path.as_ref().join(&self.name); let mut file = None; match self.file_type { FileType::File => { - file = Some(try!(File::create(&full_path))); + file = Some(try!(File::create(&full_path).map_err(|e| InodeError::Create(e, full_path.clone())))); }, FileType::Directory => { - try!(fs::create_dir(&full_path)); + try!(fs::create_dir(&full_path).map_err(|e| InodeError::Create(e, full_path.clone()))); }, FileType::Symlink => { if let Some(ref src) = self.symlink_target { - try!(symlink(src, &full_path)); + try!(symlink(src, &full_path).map_err(|e| InodeError::Create(e, full_path.clone()))); } else { - return Err(RepositoryIntegrityError::SymlinkWithoutTarget.into()) + return Err(InodeError::Integrity("Symlink without target")) } } } - try!(fs::set_permissions(&full_path, Permissions::from_mode(self.mode))); + try!(fs::set_permissions(&full_path, Permissions::from_mode(self.mode)).map_err(|e| InodeError::SetPermissions(e, full_path.clone(), self.mode))); //FIXME: set times and gid/uid // https://crates.io/crates/filetime Ok(file) @@ -149,6 +197,16 @@ impl Inode { && self.create_time == other.create_time && self.file_type == other.file_type } + + #[inline] + pub fn encode(&self) -> Result, InodeError> { + Ok(try!(msgpack::encode(&self))) + } + + #[inline] + pub fn decode(data: &[u8]) -> Result { + Ok(try!(msgpack::decode(&data))) + } } @@ -184,12 +242,12 @@ impl Repository { #[inline] pub fn put_inode(&mut self, inode: &Inode) -> Result { - self.put_data(BundleMode::Meta, &try!(msgpack::encode(inode))) + self.put_data(BundleMode::Meta, &try!(inode.encode())) } #[inline] pub fn get_inode(&mut self, chunks: &[Chunk]) -> Result { - Ok(try!(msgpack::decode(&try!(self.get_data(chunks))))) + Ok(try!(Inode::decode(&try!(self.get_data(chunks))))) } #[inline] diff --git a/src/repository/mod.rs b/src/repository/mod.rs index 2d94a0d..e27a5ba 100644 --- a/src/repository/mod.rs +++ b/src/repository/mod.rs @@ -19,7 +19,7 @@ use std::sync::{Arc, Mutex}; pub use self::error::RepositoryError; pub use self::config::Config; pub use self::metadata::{Inode, FileType}; -pub use self::backup::Backup; +pub use self::backup::{Backup, BackupFileError}; pub use self::integrity::RepositoryIntegrityError; pub use self::info::RepositoryInfo; use self::bundle_map::BundleMap; diff --git a/src/repository/vacuum.rs b/src/repository/vacuum.rs index 8aa39ac..508cd22 100644 --- a/src/repository/vacuum.rs +++ b/src/repository/vacuum.rs @@ -35,7 +35,7 @@ impl Repository { Ok(new) } - pub fn analyze_usage(&mut self) -> Result<(HashMap, bool), RepositoryError> { + pub fn analyze_usage(&mut self) -> Result, RepositoryError> { let mut usage = HashMap::new(); for (id, bundle) in self.bundle_map.bundles() { usage.insert(id, BundleUsage { @@ -46,7 +46,7 @@ impl Repository { used_size: 0 }); } - let (backups, some_failed) = try!(self.get_backups()); + let backups = try!(self.get_backups()); for (_name, backup) in backups { let mut todo = VecDeque::new(); todo.push_back(backup.root); @@ -77,7 +77,7 @@ impl Repository { } } } - Ok((usage, some_failed)) + Ok(usage) } fn delete_bundle(&mut self, id: u32) -> Result<(), RepositoryError> { @@ -92,10 +92,7 @@ impl Repository { pub fn vacuum(&mut self, ratio: f32, force: bool) -> Result<(), RepositoryError> { try!(self.flush()); info!("Analyzing chunk usage"); - let (usage, some_failed) = try!(self.analyze_usage()); - if some_failed { - return Err(RepositoryError::UnsafeVacuum); - } + let usage = try!(self.analyze_usage()); let total = usage.values().map(|b| b.total_size).sum::(); let used = usage.values().map(|b| b.used_size).sum::(); info!("Usage: {} of {}, {:.1}%", to_file_size(used as u64), to_file_size(total as u64), used as f32/total as f32*100.0);