zvault/src/bundle.rs

550 lines
18 KiB
Rust
Raw Normal View History

2017-03-10 11:43:32 +00:00
use std::path::{Path, PathBuf};
use std::collections::HashMap;
use std::fs::{self, File};
2017-03-18 14:41:59 +00:00
use std::io::{self, Read, Write, Seek, SeekFrom, BufWriter, BufReader};
2017-03-10 11:43:32 +00:00
use std::cmp::max;
2017-03-18 14:41:59 +00:00
use std::fmt::{self, Debug};
2017-03-10 11:43:32 +00:00
use std::sync::{Arc, Mutex};
use serde::{self, Serialize, Deserialize};
use util::*;
static HEADER_STRING: [u8; 7] = *b"zbundle";
static HEADER_VERSION: u8 = 1;
2017-03-17 11:58:22 +00:00
/*
Bundle format
- Magic header + version
- Encoded header structure (contains size of next structure)
- Encoded contents structure (with chunk sizes and hashes)
- Chunk data
*/
2017-03-10 11:43:32 +00:00
2017-03-16 08:42:30 +00:00
quick_error!{
#[derive(Debug)]
pub enum BundleError {
List(err: io::Error) {
cause(err)
description("Failed to list bundles")
}
Read(err: io::Error, path: PathBuf) {
cause(err)
description("Failed to read bundle")
}
Decode(err: msgpack::DecodeError, path: PathBuf) {
cause(err)
description("Failed to decode bundle header")
}
Write(err: io::Error, path: PathBuf) {
cause(err)
description("Failed to write bundle")
}
Encode(err: msgpack::EncodeError, path: PathBuf) {
cause(err)
description("Failed to encode bundle header")
}
WrongHeader(path: PathBuf) {
description("Wrong header")
display("Wrong header on bundle {:?}", path)
}
WrongVersion(path: PathBuf, version: u8) {
description("Wrong version")
display("Wrong version on bundle {:?}: {}", path, version)
}
Integrity(bundle: BundleId, reason: &'static str) {
description("Bundle has an integrity error")
display("Bundle {:?} has an integrity error: {}", bundle, reason)
}
NoSuchBundle(bundle: BundleId) {
description("No such bundle")
display("No such bundle: {:?}", bundle)
}
NoSuchChunk(bundle: BundleId, id: usize) {
description("Bundle has no such chunk")
display("Bundle {:?} has no chunk with that id: {}", bundle, id)
}
Compression(err: CompressionError) {
from()
cause(err)
}
Encryption(err: EncryptionError) {
from()
cause(err)
}
Remove(err: io::Error, bundle: BundleId) {
cause(err)
description("Failed to remove bundle")
display("Failed to remove bundle {}", bundle)
}
}
}
2017-03-10 11:43:32 +00:00
#[derive(Hash, PartialEq, Eq, Clone, Default)]
2017-03-18 14:41:59 +00:00
pub struct BundleId(pub Hash);
2017-03-10 11:43:32 +00:00
impl Serialize for BundleId {
fn serialize<S: serde::Serializer>(&self, ser: S) -> Result<S::Ok, S::Error> {
2017-03-18 14:41:59 +00:00
self.0.serialize(ser)
2017-03-10 11:43:32 +00:00
}
}
impl Deserialize for BundleId {
fn deserialize<D: serde::Deserializer>(de: D) -> Result<Self, D::Error> {
2017-03-18 14:41:59 +00:00
let hash = try!(Hash::deserialize(de));
Ok(BundleId(hash))
2017-03-10 11:43:32 +00:00
}
}
impl BundleId {
#[inline]
fn to_string(&self) -> String {
2017-03-18 14:41:59 +00:00
self.0.to_string()
2017-03-10 11:43:32 +00:00
}
}
impl fmt::Display for BundleId {
#[inline]
fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error> {
write!(fmt, "{}", self.to_string())
}
}
impl fmt::Debug for BundleId {
#[inline]
fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error> {
write!(fmt, "{}", self.to_string())
}
}
2017-03-17 06:15:19 +00:00
#[derive(Eq, Debug, PartialEq, Clone, Copy)]
pub enum BundleMode {
Content, Meta
}
serde_impl!(BundleMode(u8) {
Content => 0,
Meta => 1
});
2017-03-10 11:43:32 +00:00
#[derive(Clone)]
2017-03-15 07:27:27 +00:00
pub struct BundleInfo {
2017-03-10 11:43:32 +00:00
pub id: BundleId,
2017-03-17 06:15:19 +00:00
pub mode: BundleMode,
2017-03-10 11:43:32 +00:00
pub compression: Option<Compression>,
pub encryption: Option<Encryption>,
2017-03-17 11:58:22 +00:00
pub hash_method: HashMethod,
2017-03-10 11:43:32 +00:00
pub raw_size: usize,
pub encoded_size: usize,
pub chunk_count: usize,
2017-03-18 14:41:59 +00:00
pub chunk_info_size: usize
2017-03-10 11:43:32 +00:00
}
2017-03-15 07:27:27 +00:00
serde_impl!(BundleInfo(u64) {
2017-03-10 11:43:32 +00:00
id: BundleId => 0,
2017-03-17 11:58:22 +00:00
mode: BundleMode => 1,
compression: Option<Compression> => 2,
encryption: Option<Encryption> => 3,
hash_method: HashMethod => 4,
raw_size: usize => 6,
encoded_size: usize => 7,
chunk_count: usize => 8,
2017-03-18 14:41:59 +00:00
chunk_info_size: usize => 9
2017-03-10 11:43:32 +00:00
});
2017-03-15 07:27:27 +00:00
impl Default for BundleInfo {
2017-03-10 11:43:32 +00:00
fn default() -> Self {
2017-03-15 07:27:27 +00:00
BundleInfo {
2017-03-18 14:41:59 +00:00
id: BundleId(Hash::empty()),
2017-03-10 11:43:32 +00:00
compression: None,
encryption: None,
2017-03-17 11:58:22 +00:00
hash_method: HashMethod::Blake2,
2017-03-10 11:43:32 +00:00
raw_size: 0,
encoded_size: 0,
chunk_count: 0,
2017-03-17 11:58:22 +00:00
mode: BundleMode::Content,
2017-03-18 14:41:59 +00:00
chunk_info_size: 0
2017-03-10 11:43:32 +00:00
}
}
}
pub struct Bundle {
2017-03-15 07:27:27 +00:00
pub info: BundleInfo,
2017-03-18 14:41:59 +00:00
pub chunks: ChunkList,
2017-03-10 11:43:32 +00:00
pub version: u8,
pub path: PathBuf,
crypto: Arc<Mutex<Crypto>>,
pub content_start: usize,
pub chunk_positions: Vec<usize>
}
impl Bundle {
2017-03-18 14:41:59 +00:00
fn new(path: PathBuf, version: u8, content_start: usize, crypto: Arc<Mutex<Crypto>>, info: BundleInfo, chunks: ChunkList) -> Self {
let mut chunk_positions = Vec::with_capacity(chunks.len());
2017-03-10 11:43:32 +00:00
let mut pos = 0;
2017-03-18 14:41:59 +00:00
for &(_, len) in (&chunks).iter() {
2017-03-10 11:43:32 +00:00
chunk_positions.push(pos);
2017-03-18 14:41:59 +00:00
pos += len as usize;
2017-03-10 11:43:32 +00:00
}
Bundle {
2017-03-15 07:27:27 +00:00
info: info,
2017-03-18 14:41:59 +00:00
chunks: chunks,
2017-03-10 11:43:32 +00:00
version: version,
path: path,
crypto: crypto,
content_start: content_start,
chunk_positions: chunk_positions
}
}
2017-03-15 07:27:27 +00:00
#[inline]
pub fn id(&self) -> BundleId {
self.info.id.clone()
}
2017-03-10 11:43:32 +00:00
pub fn load(path: PathBuf, crypto: Arc<Mutex<Crypto>>) -> Result<Self, BundleError> {
2017-03-16 08:42:30 +00:00
let mut file = BufReader::new(try!(File::open(&path).map_err(|e| BundleError::Read(e, path.clone()))));
2017-03-10 11:43:32 +00:00
let mut header = [0u8; 8];
2017-03-16 08:42:30 +00:00
try!(file.read_exact(&mut header).map_err(|e| BundleError::Read(e, path.clone())));
2017-03-10 11:43:32 +00:00
if header[..HEADER_STRING.len()] != HEADER_STRING {
2017-03-16 08:42:30 +00:00
return Err(BundleError::WrongHeader(path.clone()))
2017-03-10 11:43:32 +00:00
}
let version = header[HEADER_STRING.len()];
if version != HEADER_VERSION {
2017-03-16 08:42:30 +00:00
return Err(BundleError::WrongVersion(path.clone(), version))
2017-03-10 11:43:32 +00:00
}
2017-03-17 11:58:22 +00:00
let header: BundleInfo = try!(msgpack::decode_from_stream(&mut file)
.map_err(|e| BundleError::Decode(e, path.clone())));
2017-03-18 14:41:59 +00:00
let mut chunk_data = Vec::with_capacity(header.chunk_info_size);
chunk_data.resize(header.chunk_info_size, 0);
try!(file.read_exact(&mut chunk_data).map_err(|e| BundleError::Read(e, path.clone())));
2017-03-17 11:58:22 +00:00
if let Some(ref encryption) = header.encryption {
2017-03-18 14:41:59 +00:00
chunk_data = try!(crypto.lock().unwrap().decrypt(encryption.clone(), &chunk_data));
2017-03-17 11:58:22 +00:00
}
2017-03-18 14:41:59 +00:00
let chunks = ChunkList::read_from(&chunk_data);
2017-03-10 11:43:32 +00:00
let content_start = file.seek(SeekFrom::Current(0)).unwrap() as usize;
2017-03-18 14:41:59 +00:00
Ok(Bundle::new(path, version, content_start, crypto, header, chunks))
2017-03-10 11:43:32 +00:00
}
#[inline]
fn load_encoded_contents(&self) -> Result<Vec<u8>, BundleError> {
2017-03-16 08:42:30 +00:00
let mut file = BufReader::new(try!(File::open(&self.path).map_err(|e| BundleError::Read(e, self.path.clone()))));
try!(file.seek(SeekFrom::Start(self.content_start as u64)).map_err(|e| BundleError::Read(e, self.path.clone())));
2017-03-15 07:27:27 +00:00
let mut data = Vec::with_capacity(max(self.info.encoded_size, self.info.raw_size)+1024);
2017-03-16 08:42:30 +00:00
try!(file.read_to_end(&mut data).map_err(|e| BundleError::Read(e, self.path.clone())));
2017-03-10 11:43:32 +00:00
Ok(data)
}
#[inline]
fn decode_contents(&self, mut data: Vec<u8>) -> Result<Vec<u8>, BundleError> {
2017-03-15 07:27:27 +00:00
if let Some(ref encryption) = self.info.encryption {
2017-03-10 11:43:32 +00:00
data = try!(self.crypto.lock().unwrap().decrypt(encryption.clone(), &data));
}
2017-03-15 07:27:27 +00:00
if let Some(ref compression) = self.info.compression {
2017-03-10 11:43:32 +00:00
data = try!(compression.decompress(&data));
}
Ok(data)
}
#[inline]
pub fn load_contents(&self) -> Result<Vec<u8>, BundleError> {
self.load_encoded_contents().and_then(|data| self.decode_contents(data))
}
#[inline]
pub fn get_chunk_position(&self, id: usize) -> Result<(usize, usize), BundleError> {
2017-03-15 07:27:27 +00:00
if id >= self.info.chunk_count {
2017-03-16 08:42:30 +00:00
return Err(BundleError::NoSuchChunk(self.id(), id))
2017-03-10 11:43:32 +00:00
}
2017-03-18 14:41:59 +00:00
Ok((self.chunk_positions[id], self.chunks[id].1 as usize))
2017-03-10 11:43:32 +00:00
}
pub fn check(&self, full: bool) -> Result<(), BundleError> {
2017-03-17 11:58:22 +00:00
//FIXME: adapt to new format
2017-03-18 14:41:59 +00:00
if self.info.chunk_count != self.chunks.len() {
2017-03-15 07:27:27 +00:00
return Err(BundleError::Integrity(self.id(),
2017-03-10 11:43:32 +00:00
"Chunk list size does not match chunk count"))
}
2017-03-18 14:41:59 +00:00
if self.chunks.iter().map(|c| c.1 as usize).sum::<usize>() != self.info.raw_size {
2017-03-15 07:27:27 +00:00
return Err(BundleError::Integrity(self.id(),
2017-03-10 11:43:32 +00:00
"Individual chunk sizes do not add up to total size"))
}
if !full {
2017-03-16 08:42:30 +00:00
let size = try!(fs::metadata(&self.path).map_err(|e| BundleError::Read(e, self.path.clone()))
2017-03-10 11:43:32 +00:00
).len();
2017-03-15 07:27:27 +00:00
if size as usize != self.info.encoded_size + self.content_start {
return Err(BundleError::Integrity(self.id(),
2017-03-10 11:43:32 +00:00
"File size does not match size in header, truncated file"))
}
return Ok(())
}
let encoded_contents = try!(self.load_encoded_contents());
2017-03-15 07:27:27 +00:00
if self.info.encoded_size != encoded_contents.len() {
return Err(BundleError::Integrity(self.id(),
2017-03-10 11:43:32 +00:00
"Encoded data size does not match size in header, truncated bundle"))
}
let contents = try!(self.decode_contents(encoded_contents));
2017-03-15 07:27:27 +00:00
if self.info.raw_size != contents.len() {
return Err(BundleError::Integrity(self.id(),
2017-03-10 11:43:32 +00:00
"Raw data size does not match size in header, truncated bundle"))
}
//TODO: verify checksum
Ok(())
}
}
impl Debug for Bundle {
fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error> {
write!(fmt, "Bundle(\n\tid: {}\n\tpath: {:?}\n\tchunks: {}\n\tsize: {}, encoded: {}\n\tcompression: {:?}\n)",
2017-03-15 07:27:27 +00:00
self.info.id.to_string(), self.path, self.info.chunk_count, self.info.raw_size,
self.info.encoded_size, self.info.compression)
2017-03-10 11:43:32 +00:00
}
}
pub struct BundleWriter {
2017-03-17 06:15:19 +00:00
mode: BundleMode,
2017-03-17 11:58:22 +00:00
hash_method: HashMethod,
2017-03-10 11:43:32 +00:00
data: Vec<u8>,
compression: Option<Compression>,
compression_stream: Option<CompressionStream>,
encryption: Option<Encryption>,
crypto: Arc<Mutex<Crypto>>,
raw_size: usize,
chunk_count: usize,
2017-03-18 14:41:59 +00:00
chunks: ChunkList,
2017-03-10 11:43:32 +00:00
}
impl BundleWriter {
2017-03-17 11:58:22 +00:00
fn new(
mode: BundleMode,
hash_method: HashMethod,
compression: Option<Compression>,
encryption: Option<Encryption>,
2017-03-18 14:41:59 +00:00
crypto: Arc<Mutex<Crypto>>
2017-03-17 11:58:22 +00:00
) -> Result<Self, BundleError> {
2017-03-10 11:43:32 +00:00
let compression_stream = match compression {
Some(ref compression) => Some(try!(compression.compress_stream())),
None => None
};
Ok(BundleWriter {
2017-03-17 06:15:19 +00:00
mode: mode,
2017-03-17 11:58:22 +00:00
hash_method: hash_method,
2017-03-10 11:43:32 +00:00
data: vec![],
compression: compression,
compression_stream: compression_stream,
encryption: encryption,
crypto: crypto,
raw_size: 0,
chunk_count: 0,
2017-03-18 14:41:59 +00:00
chunks: ChunkList::new()
2017-03-10 11:43:32 +00:00
})
}
2017-03-17 11:58:22 +00:00
pub fn add(&mut self, chunk: &[u8], hash: Hash) -> Result<usize, BundleError> {
2017-03-10 11:43:32 +00:00
if let Some(ref mut stream) = self.compression_stream {
try!(stream.process(chunk, &mut self.data))
} else {
self.data.extend_from_slice(chunk)
}
self.raw_size += chunk.len();
self.chunk_count += 1;
2017-03-18 14:41:59 +00:00
self.chunks.push((hash, chunk.len() as u32));
2017-03-10 11:43:32 +00:00
Ok(self.chunk_count-1)
}
fn finish(mut self, db: &BundleDb) -> Result<Bundle, BundleError> {
if let Some(stream) = self.compression_stream {
try!(stream.finish(&mut self.data))
}
if let Some(ref encryption) = self.encryption {
self.data = try!(self.crypto.lock().unwrap().encrypt(encryption.clone(), &self.data));
}
let encoded_size = self.data.len();
2017-03-18 14:41:59 +00:00
let mut chunk_data = Vec::with_capacity(self.chunks.encoded_size());
self.chunks.write_to(&mut chunk_data).unwrap();
let id = BundleId(self.hash_method.hash(&chunk_data));
if let Some(ref encryption) = self.encryption {
chunk_data = try!(self.crypto.lock().unwrap().encrypt(encryption.clone(), &chunk_data));
}
2017-03-10 11:43:32 +00:00
let (folder, file) = db.bundle_path(&id);
let path = folder.join(file);
2017-03-16 08:42:30 +00:00
try!(fs::create_dir_all(&folder).map_err(|e| BundleError::Write(e, path.clone())));
let mut file = BufWriter::new(try!(File::create(&path).map_err(|e| BundleError::Write(e, path.clone()))));
try!(file.write_all(&HEADER_STRING).map_err(|e| BundleError::Write(e, path.clone())));
try!(file.write_all(&[HEADER_VERSION]).map_err(|e| BundleError::Write(e, path.clone())));
2017-03-15 07:27:27 +00:00
let header = BundleInfo {
2017-03-17 06:15:19 +00:00
mode: self.mode,
2017-03-17 11:58:22 +00:00
hash_method: self.hash_method,
2017-03-10 11:43:32 +00:00
compression: self.compression,
encryption: self.encryption,
chunk_count: self.chunk_count,
id: id.clone(),
raw_size: self.raw_size,
encoded_size: encoded_size,
2017-03-18 14:41:59 +00:00
chunk_info_size: chunk_data.len()
2017-03-10 11:43:32 +00:00
};
2017-03-16 08:42:30 +00:00
try!(msgpack::encode_to_stream(&header, &mut file)
.map_err(|e| BundleError::Encode(e, path.clone())));
2017-03-18 14:41:59 +00:00
try!(file.write_all(&chunk_data).map_err(|e| BundleError::Write(e, path.clone())));
2017-03-10 11:43:32 +00:00
let content_start = file.seek(SeekFrom::Current(0)).unwrap() as usize;
2017-03-16 08:42:30 +00:00
try!(file.write_all(&self.data).map_err(|e| BundleError::Write(e, path.clone())));
2017-03-18 14:41:59 +00:00
Ok(Bundle::new(path, HEADER_VERSION, content_start, self.crypto, header, self.chunks))
2017-03-10 11:43:32 +00:00
}
#[inline]
pub fn size(&self) -> usize {
self.data.len()
}
2017-03-15 07:27:27 +00:00
#[inline]
pub fn raw_size(&self) -> usize {
self.raw_size
}
2017-03-10 11:43:32 +00:00
}
pub struct BundleDb {
path: PathBuf,
compression: Option<Compression>,
encryption: Option<Encryption>,
crypto: Arc<Mutex<Crypto>>,
bundles: HashMap<BundleId, Bundle>,
bundle_cache: LruCache<BundleId, Vec<u8>>
}
impl BundleDb {
2017-03-18 14:41:59 +00:00
fn new(path: PathBuf, compression: Option<Compression>, encryption: Option<Encryption>) -> Self {
2017-03-10 11:43:32 +00:00
BundleDb {
path: path,
compression:
compression,
crypto: Arc::new(Mutex::new(Crypto::new())),
encryption: encryption,
bundles: HashMap::new(),
bundle_cache: LruCache::new(5, 10)
}
}
fn bundle_path(&self, bundle: &BundleId) -> (PathBuf, PathBuf) {
let mut folder = self.path.clone();
2017-03-15 07:27:27 +00:00
let mut file = bundle.to_string()[0..32].to_owned() + ".bundle";
2017-03-10 11:43:32 +00:00
let mut count = self.bundles.len();
2017-03-15 07:27:27 +00:00
while count >= 100 {
2017-03-10 11:43:32 +00:00
if file.len() < 10 {
break
}
2017-03-15 07:27:27 +00:00
folder = folder.join(&file[0..2]);
file = file[2..].to_string();
count /= 100;
2017-03-10 11:43:32 +00:00
}
(folder, file.into())
}
fn load_bundle_list(&mut self) -> Result<(), BundleError> {
self.bundles.clear();
let mut paths = Vec::new();
paths.push(self.path.clone());
while let Some(path) = paths.pop() {
for entry in try!(fs::read_dir(path).map_err(BundleError::List)) {
let entry = try!(entry.map_err(BundleError::List));
let path = entry.path();
if path.is_dir() {
paths.push(path);
} else {
let bundle = try!(Bundle::load(path, self.crypto.clone()));
2017-03-15 07:27:27 +00:00
self.bundles.insert(bundle.id(), bundle);
2017-03-10 11:43:32 +00:00
}
}
}
Ok(())
}
#[inline]
2017-03-18 14:41:59 +00:00
pub fn open<P: AsRef<Path>>(path: P, compression: Option<Compression>, encryption: Option<Encryption>) -> Result<Self, BundleError> {
2017-03-10 11:43:32 +00:00
let path = path.as_ref().to_owned();
2017-03-18 14:41:59 +00:00
let mut self_ = Self::new(path, compression, encryption);
2017-03-10 11:43:32 +00:00
try!(self_.load_bundle_list());
Ok(self_)
}
#[inline]
2017-03-18 14:41:59 +00:00
pub fn create<P: AsRef<Path>>(path: P, compression: Option<Compression>, encryption: Option<Encryption>) -> Result<Self, BundleError> {
2017-03-10 11:43:32 +00:00
let path = path.as_ref().to_owned();
try!(fs::create_dir_all(&path)
2017-03-16 08:42:30 +00:00
.map_err(|e| BundleError::Write(e, path.clone())));
2017-03-18 14:41:59 +00:00
Ok(Self::new(path, compression, encryption))
2017-03-10 11:43:32 +00:00
}
#[inline]
2017-03-18 14:41:59 +00:00
pub fn open_or_create<P: AsRef<Path>>(path: P, compression: Option<Compression>, encryption: Option<Encryption>) -> Result<Self, BundleError> {
2017-03-10 11:43:32 +00:00
if path.as_ref().exists() {
2017-03-18 14:41:59 +00:00
Self::open(path, compression, encryption)
2017-03-10 11:43:32 +00:00
} else {
2017-03-18 14:41:59 +00:00
Self::create(path, compression, encryption)
2017-03-10 11:43:32 +00:00
}
}
#[inline]
2017-03-17 11:58:22 +00:00
pub fn create_bundle(&self, mode: BundleMode, hash_method: HashMethod) -> Result<BundleWriter, BundleError> {
2017-03-18 14:41:59 +00:00
BundleWriter::new(mode, hash_method, self.compression.clone(), self.encryption.clone(), self.crypto.clone())
2017-03-10 11:43:32 +00:00
}
pub fn get_chunk(&mut self, bundle_id: &BundleId, id: usize) -> Result<Vec<u8>, BundleError> {
2017-03-16 08:42:30 +00:00
let bundle = try!(self.bundles.get(bundle_id).ok_or(BundleError::NoSuchBundle(bundle_id.clone())));
2017-03-10 11:43:32 +00:00
let (pos, len) = try!(bundle.get_chunk_position(id));
let mut chunk = Vec::with_capacity(len);
if let Some(data) = self.bundle_cache.get(bundle_id) {
chunk.extend_from_slice(&data[pos..pos+len]);
return Ok(chunk);
}
let data = try!(bundle.load_contents());
chunk.extend_from_slice(&data[pos..pos+len]);
self.bundle_cache.put(bundle_id.clone(), data);
Ok(chunk)
}
#[inline]
pub fn add_bundle(&mut self, bundle: BundleWriter) -> Result<&Bundle, BundleError> {
let bundle = try!(bundle.finish(&self));
2017-03-15 07:27:27 +00:00
let id = bundle.id();
2017-03-10 11:43:32 +00:00
self.bundles.insert(id.clone(), bundle);
Ok(self.get_bundle(&id).unwrap())
}
#[inline]
pub fn get_bundle(&self, bundle: &BundleId) -> Option<&Bundle> {
self.bundles.get(bundle)
}
#[inline]
pub fn list_bundles(&self) -> Vec<&Bundle> {
self.bundles.values().collect()
}
#[inline]
pub fn delete_bundle(&mut self, bundle: &BundleId) -> Result<(), BundleError> {
if let Some(bundle) = self.bundles.remove(bundle) {
2017-03-15 07:27:27 +00:00
fs::remove_file(&bundle.path).map_err(|e| BundleError::Remove(e, bundle.id()))
2017-03-10 11:43:32 +00:00
} else {
2017-03-16 08:42:30 +00:00
Err(BundleError::NoSuchBundle(bundle.clone()))
2017-03-10 11:43:32 +00:00
}
}
#[inline]
pub fn check(&self, full: bool) -> Result<(), BundleError> {
for bundle in self.bundles.values() {
try!(bundle.check(full))
}
Ok(())
}
}