pull/10/head
Dennis Schwerdel 2017-03-16 09:42:30 +01:00
parent 717fc7472d
commit 0b673d145f
17 changed files with 558 additions and 351 deletions

View File

@ -1,24 +1,77 @@
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use std::collections::HashMap; use std::collections::HashMap;
use std::fs::{self, File}; use std::fs::{self, File};
use std::io::{Read, Write, Seek, SeekFrom, BufWriter, BufReader}; use std::io::{self, Read, Write, Seek, SeekFrom, BufWriter, BufReader};
use std::cmp::max; use std::cmp::max;
use std::fmt::{self, Debug, Write as FmtWrite}; use std::fmt::{self, Debug, Write as FmtWrite};
use std::sync::{Arc, Mutex}; use std::sync::{Arc, Mutex};
use serde::{self, Serialize, Deserialize}; use serde::{self, Serialize, Deserialize};
use serde::bytes::ByteBuf;
use rmp_serde;
use errors::BundleError;
use util::*; use util::*;
static HEADER_STRING: [u8; 7] = *b"zbundle"; static HEADER_STRING: [u8; 7] = *b"zbundle";
static HEADER_VERSION: u8 = 1; static HEADER_VERSION: u8 = 1;
// TODO: Test cases quick_error!{
// TODO: Benchmarks #[derive(Debug)]
pub enum BundleError {
List(err: io::Error) {
cause(err)
description("Failed to list bundles")
}
Read(err: io::Error, path: PathBuf) {
cause(err)
description("Failed to read bundle")
}
Decode(err: msgpack::DecodeError, path: PathBuf) {
cause(err)
description("Failed to decode bundle header")
}
Write(err: io::Error, path: PathBuf) {
cause(err)
description("Failed to write bundle")
}
Encode(err: msgpack::EncodeError, path: PathBuf) {
cause(err)
description("Failed to encode bundle header")
}
WrongHeader(path: PathBuf) {
description("Wrong header")
display("Wrong header on bundle {:?}", path)
}
WrongVersion(path: PathBuf, version: u8) {
description("Wrong version")
display("Wrong version on bundle {:?}: {}", path, version)
}
Integrity(bundle: BundleId, reason: &'static str) {
description("Bundle has an integrity error")
display("Bundle {:?} has an integrity error: {}", bundle, reason)
}
NoSuchBundle(bundle: BundleId) {
description("No such bundle")
display("No such bundle: {:?}", bundle)
}
NoSuchChunk(bundle: BundleId, id: usize) {
description("Bundle has no such chunk")
display("Bundle {:?} has no chunk with that id: {}", bundle, id)
}
Compression(err: CompressionError) {
from()
cause(err)
}
Encryption(err: EncryptionError) {
from()
cause(err)
}
Remove(err: io::Error, bundle: BundleId) {
cause(err)
description("Failed to remove bundle")
display("Failed to remove bundle {}", bundle)
}
}
}
#[derive(Hash, PartialEq, Eq, Clone, Default)] #[derive(Hash, PartialEq, Eq, Clone, Default)]
@ -32,7 +85,7 @@ impl Serialize for BundleId {
impl Deserialize for BundleId { impl Deserialize for BundleId {
fn deserialize<D: serde::Deserializer>(de: D) -> Result<Self, D::Error> { fn deserialize<D: serde::Deserializer>(de: D) -> Result<Self, D::Error> {
let bytes = try!(ByteBuf::deserialize(de)); let bytes = try!(msgpack::Bytes::deserialize(de));
Ok(BundleId(bytes.into())) Ok(BundleId(bytes.into()))
} }
} }
@ -92,7 +145,7 @@ impl Default for BundleInfo {
id: BundleId(vec![]), id: BundleId(vec![]),
compression: None, compression: None,
encryption: None, encryption: None,
checksum: (ChecksumType::Blake2_256, ByteBuf::new()), checksum: (ChecksumType::Blake2_256, msgpack::Bytes::new()),
raw_size: 0, raw_size: 0,
encoded_size: 0, encoded_size: 0,
chunk_count: 0, chunk_count: 0,
@ -135,34 +188,28 @@ impl Bundle {
} }
pub fn load(path: PathBuf, crypto: Arc<Mutex<Crypto>>) -> Result<Self, BundleError> { pub fn load(path: PathBuf, crypto: Arc<Mutex<Crypto>>) -> Result<Self, BundleError> {
let mut file = BufReader::new(try!(File::open(&path) let mut file = BufReader::new(try!(File::open(&path).map_err(|e| BundleError::Read(e, path.clone()))));
.map_err(|e| BundleError::Read(e, path.clone(), "Failed to open bundle file"))));
let mut header = [0u8; 8]; let mut header = [0u8; 8];
try!(file.read_exact(&mut header) try!(file.read_exact(&mut header).map_err(|e| BundleError::Read(e, path.clone())));
.map_err(|e| BundleError::Read(e, path.clone(), "Failed to read bundle header")));
if header[..HEADER_STRING.len()] != HEADER_STRING { if header[..HEADER_STRING.len()] != HEADER_STRING {
return Err(BundleError::Format(path.clone(), "Wrong header string")) return Err(BundleError::WrongHeader(path.clone()))
} }
let version = header[HEADER_STRING.len()]; let version = header[HEADER_STRING.len()];
if version != HEADER_VERSION { if version != HEADER_VERSION {
return Err(BundleError::Format(path.clone(), "Unsupported bundle file version")) return Err(BundleError::WrongVersion(path.clone(), version))
} }
let mut reader = rmp_serde::Deserializer::new(file); let header = try!(msgpack::decode_from_stream(&mut file)
let header = try!(BundleInfo::deserialize(&mut reader)
.map_err(|e| BundleError::Decode(e, path.clone()))); .map_err(|e| BundleError::Decode(e, path.clone())));
file = reader.into_inner();
let content_start = file.seek(SeekFrom::Current(0)).unwrap() as usize; let content_start = file.seek(SeekFrom::Current(0)).unwrap() as usize;
Ok(Bundle::new(path, version, content_start, crypto, header)) Ok(Bundle::new(path, version, content_start, crypto, header))
} }
#[inline] #[inline]
fn load_encoded_contents(&self) -> Result<Vec<u8>, BundleError> { fn load_encoded_contents(&self) -> Result<Vec<u8>, BundleError> {
let mut file = BufReader::new(try!(File::open(&self.path) let mut file = BufReader::new(try!(File::open(&self.path).map_err(|e| BundleError::Read(e, self.path.clone()))));
.map_err(|e| BundleError::Read(e, self.path.clone(), "Failed to open bundle file")))); try!(file.seek(SeekFrom::Start(self.content_start as u64)).map_err(|e| BundleError::Read(e, self.path.clone())));
try!(file.seek(SeekFrom::Start(self.content_start as u64))
.map_err(|e| BundleError::Read(e, self.path.clone(), "Failed to seek to data")));
let mut data = Vec::with_capacity(max(self.info.encoded_size, self.info.raw_size)+1024); let mut data = Vec::with_capacity(max(self.info.encoded_size, self.info.raw_size)+1024);
try!(file.read_to_end(&mut data).map_err(|_| "Failed to read data")); try!(file.read_to_end(&mut data).map_err(|e| BundleError::Read(e, self.path.clone())));
Ok(data) Ok(data)
} }
@ -185,7 +232,7 @@ impl Bundle {
#[inline] #[inline]
pub fn get_chunk_position(&self, id: usize) -> Result<(usize, usize), BundleError> { pub fn get_chunk_position(&self, id: usize) -> Result<(usize, usize), BundleError> {
if id >= self.info.chunk_count { if id >= self.info.chunk_count {
return Err("Invalid chunk id".into()) return Err(BundleError::NoSuchChunk(self.id(), id))
} }
Ok((self.chunk_positions[id], self.info.chunk_sizes[id])) Ok((self.chunk_positions[id], self.info.chunk_sizes[id]))
} }
@ -200,8 +247,7 @@ impl Bundle {
"Individual chunk sizes do not add up to total size")) "Individual chunk sizes do not add up to total size"))
} }
if !full { if !full {
let size = try!(fs::metadata(&self.path) let size = try!(fs::metadata(&self.path).map_err(|e| BundleError::Read(e, self.path.clone()))
.map_err(|e| BundleError::Read(e, self.path.clone(), "Failed to get size of file"))
).len(); ).len();
if size as usize != self.info.encoded_size + self.content_start { if size as usize != self.info.encoded_size + self.content_start {
return Err(BundleError::Integrity(self.id(), return Err(BundleError::Integrity(self.id(),
@ -290,14 +336,10 @@ impl BundleWriter {
let id = BundleId(checksum.1.to_vec()); let id = BundleId(checksum.1.to_vec());
let (folder, file) = db.bundle_path(&id); let (folder, file) = db.bundle_path(&id);
let path = folder.join(file); let path = folder.join(file);
try!(fs::create_dir_all(&folder) try!(fs::create_dir_all(&folder).map_err(|e| BundleError::Write(e, path.clone())));
.map_err(|e| BundleError::Write(e, path.clone(), "Failed to create folder"))); let mut file = BufWriter::new(try!(File::create(&path).map_err(|e| BundleError::Write(e, path.clone()))));
let mut file = BufWriter::new(try!(File::create(&path) try!(file.write_all(&HEADER_STRING).map_err(|e| BundleError::Write(e, path.clone())));
.map_err(|e| BundleError::Write(e, path.clone(), "Failed to create bundle file")))); try!(file.write_all(&[HEADER_VERSION]).map_err(|e| BundleError::Write(e, path.clone())));
try!(file.write_all(&HEADER_STRING)
.map_err(|e| BundleError::Write(e, path.clone(), "Failed to write bundle header")));
try!(file.write_all(&[HEADER_VERSION])
.map_err(|e| BundleError::Write(e, path.clone(), "Failed to write bundle header")));
let header = BundleInfo { let header = BundleInfo {
checksum: checksum, checksum: checksum,
compression: self.compression, compression: self.compression,
@ -308,14 +350,10 @@ impl BundleWriter {
encoded_size: encoded_size, encoded_size: encoded_size,
chunk_sizes: self.chunk_sizes chunk_sizes: self.chunk_sizes
}; };
{ try!(msgpack::encode_to_stream(&header, &mut file)
let mut writer = rmp_serde::Serializer::new(&mut file); .map_err(|e| BundleError::Encode(e, path.clone())));
try!(header.serialize(&mut writer)
.map_err(|e| BundleError::Encode(e, path.clone())));
}
let content_start = file.seek(SeekFrom::Current(0)).unwrap() as usize; let content_start = file.seek(SeekFrom::Current(0)).unwrap() as usize;
try!(file.write_all(&self.data) try!(file.write_all(&self.data).map_err(|e| BundleError::Write(e, path.clone())));
.map_err(|e| BundleError::Write(e, path.clone(), "Failed to write bundle data")));
Ok(Bundle::new(path, HEADER_VERSION, content_start, self.crypto, header)) Ok(Bundle::new(path, HEADER_VERSION, content_start, self.crypto, header))
} }
@ -402,7 +440,7 @@ impl BundleDb {
pub fn create<P: AsRef<Path>>(path: P, compression: Option<Compression>, encryption: Option<Encryption>, checksum: ChecksumType) -> Result<Self, BundleError> { pub fn create<P: AsRef<Path>>(path: P, compression: Option<Compression>, encryption: Option<Encryption>, checksum: ChecksumType) -> Result<Self, BundleError> {
let path = path.as_ref().to_owned(); let path = path.as_ref().to_owned();
try!(fs::create_dir_all(&path) try!(fs::create_dir_all(&path)
.map_err(|e| BundleError::Write(e, path.clone(), "Failed to create folder"))); .map_err(|e| BundleError::Write(e, path.clone())));
Ok(Self::new(path, compression, encryption, checksum)) Ok(Self::new(path, compression, encryption, checksum))
} }
@ -421,7 +459,7 @@ impl BundleDb {
} }
pub fn get_chunk(&mut self, bundle_id: &BundleId, id: usize) -> Result<Vec<u8>, BundleError> { pub fn get_chunk(&mut self, bundle_id: &BundleId, id: usize) -> Result<Vec<u8>, BundleError> {
let bundle = try!(self.bundles.get(bundle_id).ok_or("Bundle not found")); let bundle = try!(self.bundles.get(bundle_id).ok_or(BundleError::NoSuchBundle(bundle_id.clone())));
let (pos, len) = try!(bundle.get_chunk_position(id)); let (pos, len) = try!(bundle.get_chunk_position(id));
let mut chunk = Vec::with_capacity(len); let mut chunk = Vec::with_capacity(len);
if let Some(data) = self.bundle_cache.get(bundle_id) { if let Some(data) = self.bundle_cache.get(bundle_id) {
@ -457,7 +495,7 @@ impl BundleDb {
if let Some(bundle) = self.bundles.remove(bundle) { if let Some(bundle) = self.bundles.remove(bundle) {
fs::remove_file(&bundle.path).map_err(|e| BundleError::Remove(e, bundle.id())) fs::remove_file(&bundle.path).map_err(|e| BundleError::Remove(e, bundle.id()))
} else { } else {
Err("No such bundle".into()) Err(BundleError::NoSuchBundle(bundle.clone()))
} }
} }

View File

@ -1,6 +1,4 @@
use std::io::{Write, Read}; use std::io::{self, Write, Read};
use super::errors::ChunkerError;
mod ae; mod ae;
mod rabin; mod rabin;
@ -18,6 +16,27 @@ pub use self::fastcdc::FastCdcChunker;
// https://borgbackup.readthedocs.io/en/stable/internals.html#chunks // https://borgbackup.readthedocs.io/en/stable/internals.html#chunks
// https://github.com/bup/bup/blob/master/lib/bup/bupsplit.c // https://github.com/bup/bup/blob/master/lib/bup/bupsplit.c
quick_error!{
#[derive(Debug)]
pub enum ChunkerError {
Read(err: io::Error) {
from(err)
cause(err)
description("Failed to read")
}
Write(err: io::Error) {
from(err)
cause(err)
description("Failed to write")
}
Custom {
from(&'static str)
description("Custom error")
}
}
}
#[derive(Debug, Eq, PartialEq)] #[derive(Debug, Eq, PartialEq)]
pub enum ChunkerStatus { pub enum ChunkerStatus {
Continue, Continue,
@ -35,6 +54,7 @@ pub enum Chunker {
FastCdc(Box<FastCdcChunker>) FastCdc(Box<FastCdcChunker>)
} }
impl IChunker for Chunker { impl IChunker for Chunker {
#[inline] #[inline]
fn get_type(&self) -> ChunkerType { fn get_type(&self) -> ChunkerType {

View File

@ -1,72 +0,0 @@
use std::io;
use std::path::PathBuf;
use rmp_serde::decode::Error as MsgpackDecode;
use rmp_serde::encode::Error as MsgpackEncode;
use super::bundle::BundleId;
quick_error!{
#[derive(Debug)]
pub enum BundleError {
List(err: io::Error) {
cause(err)
description("Failed to list bundles")
}
Read(err: io::Error, path: PathBuf, reason: &'static str) {
cause(err)
description("Failed to read bundle")
display("Failed to read bundle {:?}: {}", path, reason)
}
Decode(err: MsgpackDecode, path: PathBuf) {
cause(err)
description("Failed to decode bundle header")
}
Write(err: io::Error, path: PathBuf, reason: &'static str) {
cause(err)
description("Failed to write bundle")
display("Failed to write bundle {:?}: {}", path, reason)
}
Encode(err: MsgpackEncode, path: PathBuf) {
cause(err)
description("Failed to encode bundle header")
}
Format(path: PathBuf, reason: &'static str) {
description("Failed to decode bundle")
display("Failed to decode bundle {:?}: {}", path, reason)
}
Integrity(bundle: BundleId, reason: &'static str) {
description("Bundle has an integrity error")
display("Bundle {:?} has an integrity error: {}", bundle, reason)
}
Remove(err: io::Error, bundle: BundleId) {
cause(err)
description("Failed to remove bundle")
display("Failed to remove bundle {}", bundle)
}
Custom {
from(&'static str)
description("Custom error")
}
}
}
quick_error!{
#[derive(Debug)]
pub enum ChunkerError {
Read(err: io::Error) {
from(err)
cause(err)
description("Failed to read")
}
Write(err: io::Error) {
from(err)
cause(err)
description("Failed to write")
}
Custom {
from(&'static str)
description("Custom error")
}
}
}

View File

@ -65,31 +65,56 @@ pub struct Index {
data: &'static mut [Entry] data: &'static mut [Entry]
} }
#[derive(Debug)] quick_error!{
pub enum Error { #[derive(Debug)]
IOError(io::Error), pub enum IndexError {
MapError(MapError), Io(err: io::Error) {
NoHeader, from()
MagicError, cause(err)
VersionError, description("Failed to open index file")
}
Mmap(err: MapError) {
from()
cause(err)
description("Failed to write bundle map")
}
NoHeader {
description("Index file does not contain a header")
}
WrongHeader {
description("Wrong header")
}
WrongVersion(version: u8) {
description("Wrong version")
display("Wrong version: {}", version)
}
WrongPosition(key: Hash, should: usize, is: LocateResult) {
description("Key at wrong position")
display("Key {} has wrong position, expected at: {}, but is at: {:?}", key, should, is)
}
WrongEntryCount(header: usize, actual: usize) {
description("Wrong entry count")
display("Wrong entry count, expected {}, but is {}", header, actual)
}
}
} }
#[derive(Debug)] #[derive(Debug)]
enum LocateResult { pub enum LocateResult {
Found(usize), // Found the key at this position Found(usize), // Found the key at this position
Hole(usize), // Found a hole at this position while searching for a key Hole(usize), // Found a hole at this position while searching for a key
Steal(usize) // Found a spot to steal at this position while searching for a key Steal(usize) // Found a spot to steal at this position while searching for a key
} }
impl Index { impl Index {
pub fn new(path: &Path, create: bool) -> Result<Index, Error> { pub fn new(path: &Path, create: bool) -> Result<Index, IndexError> {
let fd = try!(OpenOptions::new().read(true).write(true).create(create).open(path).map_err(|e| { Error::IOError(e) })); let fd = try!(OpenOptions::new().read(true).write(true).create(create).open(path));
if create { if create {
try!(Index::resize_fd(&fd, INITIAL_SIZE)); try!(Index::resize_fd(&fd, INITIAL_SIZE));
} }
let mmap = try!(Index::map_fd(&fd)); let mmap = try!(Index::map_fd(&fd));
if mmap.len() < mem::size_of::<Header>() { if mmap.len() < mem::size_of::<Header>() {
return Err(Error::NoHeader); return Err(IndexError::NoHeader);
} }
let data = Index::mmap_as_slice(&mmap, INITIAL_SIZE as usize); let data = Index::mmap_as_slice(&mmap, INITIAL_SIZE as usize);
let mut index = Index{capacity: 0, max_entries: 0, min_entries: 0, entries: 0, fd: fd, mmap: mmap, data: data}; let mut index = Index{capacity: 0, max_entries: 0, min_entries: 0, entries: 0, fd: fd, mmap: mmap, data: data};
@ -105,10 +130,10 @@ impl Index {
header.capacity = INITIAL_SIZE as u64; header.capacity = INITIAL_SIZE as u64;
} else { } else {
if header.magic != MAGIC { if header.magic != MAGIC {
return Err(Error::MagicError); return Err(IndexError::WrongHeader);
} }
if header.version != VERSION { if header.version != VERSION {
return Err(Error::VersionError); return Err(IndexError::WrongVersion(header.version));
} }
} }
capacity = header.capacity; capacity = header.capacity;
@ -118,34 +143,34 @@ impl Index {
index.set_capacity(capacity as usize); index.set_capacity(capacity as usize);
index.entries = entries as usize; index.entries = entries as usize;
} }
debug_assert!(index.is_consistent(), "Inconsistent after creation"); debug_assert!(index.check().is_ok(), "Inconsistent after creation");
Ok(index) Ok(index)
} }
#[inline] #[inline]
pub fn open(path: &Path) -> Result<Index, Error> { pub fn open(path: &Path) -> Result<Index, IndexError> {
Index::new(path, false) Index::new(path, false)
} }
#[inline] #[inline]
pub fn create(path: &Path) -> Result<Index, Error> { pub fn create(path: &Path) -> Result<Index, IndexError> {
Index::new(path, true) Index::new(path, true)
} }
#[inline] #[inline]
fn map_fd(fd: &File) -> Result<MemoryMap, Error> { fn map_fd(fd: &File) -> Result<MemoryMap, IndexError> {
MemoryMap::new( MemoryMap::new(
try!(fd.metadata().map_err(Error::IOError)).len() as usize, try!(fd.metadata().map_err(IndexError::Io)).len() as usize,
&[MapOption::MapReadable, &[MapOption::MapReadable,
MapOption::MapWritable, MapOption::MapWritable,
MapOption::MapFd(fd.as_raw_fd()), MapOption::MapFd(fd.as_raw_fd()),
MapOption::MapNonStandardFlags(0x0001) //libc::consts::os::posix88::MAP_SHARED MapOption::MapNonStandardFlags(0x0001) //libc::consts::os::posix88::MAP_SHARED
]).map_err(|e| { Error::MapError(e) }) ]).map_err(IndexError::Mmap)
} }
#[inline] #[inline]
fn resize_fd(fd: &File, capacity: usize) -> Result<(), Error> { fn resize_fd(fd: &File, capacity: usize) -> Result<(), IndexError> {
fd.set_len((mem::size_of::<Header>() + capacity * mem::size_of::<Entry>()) as u64).map_err( Error::IOError) fd.set_len((mem::size_of::<Header>() + capacity * mem::size_of::<Entry>()) as u64).map_err(IndexError::Io)
} }
#[inline] #[inline]
@ -172,7 +197,7 @@ impl Index {
self.max_entries = (capacity as f64 * MAX_USAGE) as usize; self.max_entries = (capacity as f64 * MAX_USAGE) as usize;
} }
fn reinsert(&mut self, start: usize, end: usize) -> Result<(), Error> { fn reinsert(&mut self, start: usize, end: usize) -> Result<(), IndexError> {
for pos in start..end { for pos in start..end {
let key; let key;
let data; let data;
@ -191,7 +216,7 @@ impl Index {
Ok(()) Ok(())
} }
fn shrink(&mut self) -> Result<bool, Error> { fn shrink(&mut self) -> Result<bool, IndexError> {
if self.entries >= self.min_entries || self.capacity <= INITIAL_SIZE { if self.entries >= self.min_entries || self.capacity <= INITIAL_SIZE {
return Ok(false) return Ok(false)
} }
@ -206,7 +231,7 @@ impl Index {
Ok(true) Ok(true)
} }
fn extend(&mut self) -> Result<bool, Error> { fn extend(&mut self) -> Result<bool, IndexError> {
if self.entries <= self.max_entries { if self.entries <= self.max_entries {
return Ok(false) return Ok(false)
} }
@ -220,8 +245,7 @@ impl Index {
Ok(true) Ok(true)
} }
#[allow(dead_code)] pub fn check(&self) -> Result<(), IndexError> {
pub fn is_consistent(&self) -> bool {
let mut entries = 0; let mut entries = 0;
for pos in 0..self.capacity { for pos in 0..self.capacity {
let entry = &self.data[pos]; let entry = &self.data[pos];
@ -231,30 +255,17 @@ impl Index {
entries += 1; entries += 1;
match self.locate(&entry.key) { match self.locate(&entry.key) {
LocateResult::Found(p) if p == pos => true, LocateResult::Found(p) if p == pos => true,
found => { found => return Err(IndexError::WrongPosition(entry.key, pos, found))
println!("Inconsistency found: Key {:?} should be at {} but is at {:?}", entry.key, pos, found);
return false
}
}; };
} }
if entries != self.entries { if entries != self.entries {
println!("Inconsistency found: Index contains {} entries, should contain {}", entries, self.entries); return Err(IndexError::WrongEntryCount(self.entries, entries));
return false
}
true
}
pub fn check(&self) -> Result<(), &'static str> {
//TODO: proper errors instead of string
if self.is_consistent() {
Ok(())
} else {
Err("Inconsistent")
} }
Ok(())
} }
#[inline] #[inline]
fn increase_count(&mut self) -> Result<(), Error> { fn increase_count(&mut self) -> Result<(), IndexError> {
self.entries += 1; self.entries += 1;
try!(self.extend()); try!(self.extend());
self.write_header(); self.write_header();
@ -262,7 +273,7 @@ impl Index {
} }
#[inline] #[inline]
fn decrease_count(&mut self) -> Result<(), Error> { fn decrease_count(&mut self) -> Result<(), IndexError> {
self.entries -= 1; self.entries -= 1;
try!(self.shrink()); try!(self.shrink());
self.write_header(); self.write_header();
@ -328,7 +339,7 @@ impl Index {
/// Adds the key, data pair into the table. /// Adds the key, data pair into the table.
/// If the key existed in the table before, it is overwritten and false is returned. /// If the key existed in the table before, it is overwritten and false is returned.
/// Otherwise it will be added to the table and true is returned. /// Otherwise it will be added to the table and true is returned.
pub fn set(&mut self, key: &Hash, data: &Location) -> Result<bool, Error> { pub fn set(&mut self, key: &Hash, data: &Location) -> Result<bool, IndexError> {
match self.locate(key) { match self.locate(key) {
LocateResult::Found(pos) => { LocateResult::Found(pos) => {
self.data[pos].data = *data; self.data[pos].data = *data;
@ -374,7 +385,7 @@ impl Index {
#[inline] #[inline]
pub fn contains(&self, key: &Hash) -> bool { pub fn contains(&self, key: &Hash) -> bool {
debug_assert!(self.is_consistent(), "Inconsistent before get"); debug_assert!(self.check().is_ok(), "Inconsistent before get");
match self.locate(key) { match self.locate(key) {
LocateResult::Found(_) => true, LocateResult::Found(_) => true,
_ => false _ => false
@ -383,7 +394,7 @@ impl Index {
#[inline] #[inline]
pub fn get(&self, key: &Hash) -> Option<Location> { pub fn get(&self, key: &Hash) -> Option<Location> {
debug_assert!(self.is_consistent(), "Inconsistent before get"); debug_assert!(self.check().is_ok(), "Inconsistent before get");
match self.locate(key) { match self.locate(key) {
LocateResult::Found(pos) => Some(self.data[pos].data), LocateResult::Found(pos) => Some(self.data[pos].data),
_ => None _ => None
@ -392,7 +403,7 @@ impl Index {
#[inline] #[inline]
pub fn modify<F>(&mut self, key: &Hash, mut f: F) -> bool where F: FnMut(&mut Location) { pub fn modify<F>(&mut self, key: &Hash, mut f: F) -> bool where F: FnMut(&mut Location) {
debug_assert!(self.is_consistent(), "Inconsistent before get"); debug_assert!(self.check().is_ok(), "Inconsistent before get");
match self.locate(key) { match self.locate(key) {
LocateResult::Found(pos) => { LocateResult::Found(pos) => {
f(&mut self.data[pos].data); f(&mut self.data[pos].data);
@ -403,7 +414,7 @@ impl Index {
} }
#[inline] #[inline]
pub fn delete(&mut self, key: &Hash) -> Result<bool, Error> { pub fn delete(&mut self, key: &Hash) -> Result<bool, IndexError> {
match self.locate(key) { match self.locate(key) {
LocateResult::Found(pos) => { LocateResult::Found(pos) => {
self.backshift(pos); self.backshift(pos);
@ -414,7 +425,7 @@ impl Index {
} }
} }
pub fn filter<F>(&mut self, mut f: F) -> Result<usize, Error> where F: FnMut(&Hash, &Location) -> bool { pub fn filter<F>(&mut self, mut f: F) -> Result<usize, IndexError> where F: FnMut(&Hash, &Location) -> bool {
//TODO: is it faster to walk in reverse direction? //TODO: is it faster to walk in reverse direction?
let mut deleted = 0; let mut deleted = 0;
let mut pos = 0; let mut pos = 0;
@ -485,7 +496,7 @@ impl Index {
#[inline] #[inline]
pub fn size(&self) -> usize { pub fn size(&self) -> usize {
self.mmap.len() self.mmap.len()
} }
#[inline] #[inline]

View File

@ -10,7 +10,6 @@ extern crate serde_yaml;
extern crate docopt; extern crate docopt;
extern crate rustc_serialize; extern crate rustc_serialize;
mod errors;
pub mod util; pub mod util;
pub mod bundle; pub mod bundle;
pub mod index; pub mod index;

View File

@ -1,7 +1,6 @@
use super::{Repository, Chunk}; use super::{Repository, Chunk, RepositoryError};
use rmp_serde; use ::util::*;
use serde::{Deserialize, Serialize};
use std::fs::{self, File}; use std::fs::{self, File};
use std::path::Path; use std::path::Path;
@ -39,19 +38,19 @@ serde_impl!(Backup(u8) {
impl Repository { impl Repository {
pub fn list_backups(&self) -> Result<Vec<String>, &'static str> { pub fn list_backups(&self) -> Result<Vec<String>, RepositoryError> {
let mut backups = Vec::new(); let mut backups = Vec::new();
let mut paths = Vec::new(); let mut paths = Vec::new();
let base_path = self.path.join("backups"); let base_path = self.path.join("backups");
paths.push(base_path.clone()); paths.push(base_path.clone());
while let Some(path) = paths.pop() { while let Some(path) = paths.pop() {
for entry in try!(fs::read_dir(path).map_err(|_| "Failed to list files")) { for entry in try!(fs::read_dir(path)) {
let entry = try!(entry.map_err(|_| "Failed to list files")); let entry = try!(entry);
let path = entry.path(); let path = entry.path();
if path.is_dir() { if path.is_dir() {
paths.push(path); paths.push(path);
} else { } else {
let relpath = try!(path.strip_prefix(&base_path).map_err(|_| "Failed to obtain relative path")); let relpath = path.strip_prefix(&base_path).unwrap();
backups.push(relpath.to_string_lossy().to_string()); backups.push(relpath.to_string_lossy().to_string());
} }
} }
@ -59,25 +58,23 @@ impl Repository {
Ok(backups) Ok(backups)
} }
pub fn get_backup(&self, name: &str) -> Result<Backup, &'static str> { pub fn get_backup(&self, name: &str) -> Result<Backup, RepositoryError> {
let file = try!(File::open(self.path.join("backups").join(name)).map_err(|_| "Failed to load backup")); let mut file = try!(File::open(self.path.join("backups").join(name)));
let mut reader = rmp_serde::Deserializer::new(file); Ok(try!(msgpack::decode_from_stream(&mut file)))
Backup::deserialize(&mut reader).map_err(|_| "Failed to read backup data")
} }
pub fn save_backup(&mut self, backup: &Backup, name: &str) -> Result<(), &'static str> { pub fn save_backup(&mut self, backup: &Backup, name: &str) -> Result<(), RepositoryError> {
let mut file = try!(File::create(self.path.join("backups").join(name)).map_err(|_| "Failed to save backup")); let mut file = try!(File::create(self.path.join("backups").join(name)));
let mut writer = rmp_serde::Serializer::new(&mut file); Ok(try!(msgpack::encode_to_stream(backup, &mut file)))
backup.serialize(&mut writer).map_err(|_| "Failed to write backup data")
} }
pub fn restore_backup<P: AsRef<Path>>(&mut self, backup: &Backup, path: P) -> Result<(), &'static str> { pub fn restore_backup<P: AsRef<Path>>(&mut self, backup: &Backup, path: P) -> Result<(), RepositoryError> {
let inode = try!(self.get_inode(&backup.root)); let inode = try!(self.get_inode(&backup.root));
try!(self.save_inode_at(&inode, path)); try!(self.save_inode_at(&inode, path));
Ok(()) Ok(())
} }
pub fn create_full_backup<P: AsRef<Path>>(&mut self, path: P) -> Result<Backup, &'static str> { pub fn create_full_backup<P: AsRef<Path>>(&mut self, path: P) -> Result<Backup, RepositoryError> {
// Maintain a stack of folders still todo // Maintain a stack of folders still todo
// Maintain a map of path->inode entries // Maintain a map of path->inode entries
// Work on topmost stack entry // Work on topmost stack entry

View File

@ -1,8 +1,10 @@
use std::mem; use std::mem;
use std::io::{Read, Write, Cursor}; use std::io::{Read, Write, Cursor};
use super::{Repository, Mode}; use super::{Repository, Mode, RepositoryError};
use ::index::Location; use ::index::Location;
use ::bundle::BundleId;
use super::integrity::RepositoryIntegrityError;
use ::util::Hash; use ::util::Hash;
use ::chunker::{IChunker, ChunkerStatus}; use ::chunker::{IChunker, ChunkerStatus};
@ -12,7 +14,15 @@ pub type Chunk = (Hash, usize);
impl Repository { impl Repository {
pub fn get_chunk(&mut self, hash: Hash) -> Result<Option<Vec<u8>>, &'static str> { pub fn get_bundle_id(&self, id: u32) -> Result<BundleId, RepositoryError> {
if let Some(bundle_info) = self.bundle_map.get(id) {
Ok(bundle_info.id())
} else {
Err(RepositoryIntegrityError::MissingBundleId(id).into())
}
}
pub fn get_chunk(&mut self, hash: Hash) -> Result<Option<Vec<u8>>, RepositoryError> {
// Find bundle and chunk id in index // Find bundle and chunk id in index
let found = if let Some(found) = self.index.get(&hash) { let found = if let Some(found) = self.index.get(&hash) {
found found
@ -20,20 +30,12 @@ impl Repository {
return Ok(None) return Ok(None)
}; };
// Lookup bundle id from map // Lookup bundle id from map
let bundle_id = if let Some(bundle_info) = self.bundle_map.get(found.bundle) { let bundle_id = try!(self.get_bundle_id(found.bundle));
bundle_info.id()
} else {
return Err("Bundle id not found in map")
};
// Get chunk from bundle // Get chunk from bundle
if let Ok(chunk) = self.bundles.get_chunk(&bundle_id, found.chunk as usize) { Ok(Some(try!(self.bundles.get_chunk(&bundle_id, found.chunk as usize))))
Ok(Some(chunk))
} else {
Err("Failed to load chunk from bundle")
}
} }
pub fn put_chunk(&mut self, mode: Mode, hash: Hash, data: &[u8]) -> Result<(), &'static str> { pub fn put_chunk(&mut self, mode: Mode, hash: Hash, data: &[u8]) -> Result<(), RepositoryError> {
// If this chunk is in the index, ignore it // If this chunk is in the index, ignore it
if self.index.contains(&hash) { if self.index.contains(&hash) {
return Ok(()) return Ok(())
@ -47,7 +49,7 @@ impl Repository {
}; };
// ...alocate one if needed // ...alocate one if needed
if writer.is_none() { if writer.is_none() {
*writer = Some(try!(self.bundles.create_bundle().map_err(|_| "Failed to create new bundle"))); *writer = Some(try!(self.bundles.create_bundle()));
} }
debug_assert!(writer.is_some()); debug_assert!(writer.is_some());
let chunk_id; let chunk_id;
@ -56,7 +58,7 @@ impl Repository {
{ {
// Add chunk to bundle writer and determine the size of the bundle // Add chunk to bundle writer and determine the size of the bundle
let writer_obj = writer.as_mut().unwrap(); let writer_obj = writer.as_mut().unwrap();
chunk_id = try!(writer_obj.add(data).map_err(|_| "Failed to write chunk")); chunk_id = try!(writer_obj.add(data));
size = writer_obj.size(); size = writer_obj.size();
raw_size = writer_obj.raw_size(); raw_size = writer_obj.raw_size();
} }
@ -68,7 +70,7 @@ impl Repository {
if size >= self.config.bundle_size || raw_size >= 4 * self.config.bundle_size { if size >= self.config.bundle_size || raw_size >= 4 * self.config.bundle_size {
let mut finished = None; let mut finished = None;
mem::swap(writer, &mut finished); mem::swap(writer, &mut finished);
let bundle = try!(self.bundles.add_bundle(finished.unwrap()).map_err(|_| "Failed to write finished bundle")); let bundle = try!(self.bundles.add_bundle(finished.unwrap()));
self.bundle_map.set(bundle_id, bundle); self.bundle_map.set(bundle_id, bundle);
if self.next_meta_bundle == bundle_id { if self.next_meta_bundle == bundle_id {
self.next_meta_bundle = next_free_bundle_id self.next_meta_bundle = next_free_bundle_id
@ -79,27 +81,27 @@ impl Repository {
// Not saving the bundle map, this will be done by flush // Not saving the bundle map, this will be done by flush
} }
// Add location to the index // Add location to the index
try!(self.index.set(&hash, &Location::new(bundle_id, chunk_id as u32)).map_err(|_| "Failed to add chunk location to index")); try!(self.index.set(&hash, &Location::new(bundle_id, chunk_id as u32)));
Ok(()) Ok(())
} }
#[inline] #[inline]
pub fn put_data(&mut self, mode: Mode, data: &[u8]) -> Result<Vec<Chunk>, &'static str> { pub fn put_data(&mut self, mode: Mode, data: &[u8]) -> Result<Vec<Chunk>, RepositoryError> {
let mut input = Cursor::new(data); let mut input = Cursor::new(data);
self.put_stream(mode, &mut input) self.put_stream(mode, &mut input)
} }
pub fn put_stream<R: Read>(&mut self, mode: Mode, data: &mut R) -> Result<Vec<Chunk>, &'static str> { pub fn put_stream<R: Read>(&mut self, mode: Mode, data: &mut R) -> Result<Vec<Chunk>, RepositoryError> {
let avg_size = self.config.chunker.avg_size(); let avg_size = self.config.chunker.avg_size();
let mut chunks = Vec::new(); let mut chunks = Vec::new();
let mut chunk = Vec::with_capacity(avg_size * 2); let mut chunk = Vec::with_capacity(avg_size * 2);
loop { loop {
chunk.clear(); chunk.clear();
let mut output = Cursor::new(chunk); let mut output = Cursor::new(chunk);
let res = try!(self.chunker.chunk(data, &mut output).map_err(|_| "Failed to chunk")); let res = try!(self.chunker.chunk(data, &mut output));
chunk = output.into_inner(); chunk = output.into_inner();
let hash = self.config.hash.hash(&chunk); let hash = self.config.hash.hash(&chunk);
try!(self.put_chunk(mode, hash, &chunk).map_err(|_| "Failed to store chunk")); try!(self.put_chunk(mode, hash, &chunk));
chunks.push((hash, chunk.len())); chunks.push((hash, chunk.len()));
if res == ChunkerStatus::Finished { if res == ChunkerStatus::Finished {
break break
@ -109,18 +111,18 @@ impl Repository {
} }
#[inline] #[inline]
pub fn get_data(&mut self, chunks: &[Chunk]) -> Result<Vec<u8>, &'static str> { pub fn get_data(&mut self, chunks: &[Chunk]) -> Result<Vec<u8>, RepositoryError> {
let mut data = Vec::with_capacity(chunks.iter().map(|&(_, size)| size).sum()); let mut data = Vec::with_capacity(chunks.iter().map(|&(_, size)| size).sum());
try!(self.get_stream(chunks, &mut data)); try!(self.get_stream(chunks, &mut data));
Ok(data) Ok(data)
} }
#[inline] #[inline]
pub fn get_stream<W: Write>(&mut self, chunks: &[Chunk], w: &mut W) -> Result<(), &'static str> { pub fn get_stream<W: Write>(&mut self, chunks: &[Chunk], w: &mut W) -> Result<(), RepositoryError> {
for &(ref hash, len) in chunks { for &(ref hash, len) in chunks {
let data = try!(try!(self.get_chunk(*hash).map_err(|_| "Failed to load chunk")).ok_or("Chunk missing")); let data = try!(try!(self.get_chunk(*hash)).ok_or_else(|| RepositoryIntegrityError::MissingChunk(hash.clone())));
debug_assert_eq!(data.len(), len); debug_assert_eq!(data.len(), len);
try!(w.write_all(&data).map_err(|_| "Failed to write to sink")); try!(w.write_all(&data));
} }
Ok(()) Ok(())
} }

View File

@ -1,19 +1,45 @@
use std::collections::HashMap; use std::collections::HashMap;
use std::path::Path; use std::path::Path;
use std::io::{BufReader, Read, Write, BufWriter}; use std::io::{self, BufReader, Read, Write, BufWriter};
use std::fs::File; use std::fs::File;
use rmp_serde;
use serde::Deserialize;
use serde::Serialize;
use ::bundle::{Bundle, BundleId, BundleInfo}; use ::bundle::{Bundle, BundleId, BundleInfo};
use ::util::*;
static HEADER_STRING: [u8; 7] = *b"zbunmap"; static HEADER_STRING: [u8; 7] = *b"zbunmap";
static HEADER_VERSION: u8 = 1; static HEADER_VERSION: u8 = 1;
quick_error!{
#[derive(Debug)]
pub enum BundleMapError {
Io(err: io::Error) {
from()
cause(err)
description("Failed to read/write bundle map")
}
Decode(err: msgpack::DecodeError) {
from()
cause(err)
description("Failed to decode bundle map")
}
Encode(err: msgpack::EncodeError) {
from()
cause(err)
description("Failed to encode bundle map")
}
WrongHeader {
description("Wrong header")
}
WrongVersion(version: u8) {
description("Wrong version")
display("Wrong version: {}", version)
}
}
}
#[derive(Default)] #[derive(Default)]
pub struct BundleData { pub struct BundleData {
pub info: BundleInfo pub info: BundleInfo
@ -37,36 +63,26 @@ impl BundleMap {
BundleMap(Default::default()) BundleMap(Default::default())
} }
pub fn load<P: AsRef<Path>>(path: P) -> Result<Self, &'static str> { pub fn load<P: AsRef<Path>>(path: P) -> Result<Self, BundleMapError> {
let mut file = BufReader::new(try!(File::open(path.as_ref()) let mut file = BufReader::new(try!(File::open(path.as_ref())));
.map_err(|_| "Failed to open bundle map file")));
let mut header = [0u8; 8]; let mut header = [0u8; 8];
try!(file.read_exact(&mut header) try!(file.read_exact(&mut header));
.map_err(|_| "Failed to read bundle map header"));
if header[..HEADER_STRING.len()] != HEADER_STRING { if header[..HEADER_STRING.len()] != HEADER_STRING {
return Err("Wrong header string") return Err(BundleMapError::WrongHeader)
} }
let version = header[HEADER_STRING.len()]; let version = header[HEADER_STRING.len()];
if version != HEADER_VERSION { if version != HEADER_VERSION {
return Err("Unsupported bundle map file version") return Err(BundleMapError::WrongVersion(version))
} }
let mut reader = rmp_serde::Deserializer::new(file); Ok(BundleMap(try!(msgpack::decode_from_stream(&mut file))))
let map = try!(HashMap::deserialize(&mut reader)
.map_err(|_| "Failed to read bundle map data"));
Ok(BundleMap(map))
} }
pub fn save<P: AsRef<Path>>(&self, path: P) -> Result<(), &'static str> { pub fn save<P: AsRef<Path>>(&self, path: P) -> Result<(), BundleMapError> {
let mut file = BufWriter::new(try!(File::create(path) let mut file = BufWriter::new(try!(File::create(path)));
.map_err(|_| "Failed to create bundle file"))); try!(file.write_all(&HEADER_STRING));
try!(file.write_all(&HEADER_STRING) try!(file.write_all(&[HEADER_VERSION]));
.map_err(|_| "Failed to write bundle header")); msgpack::encode_to_stream(&self.0, &mut file).map_err(BundleMapError::Encode)
try!(file.write_all(&[HEADER_VERSION])
.map_err(|_| "Failed to write bundle header"));
let mut writer = rmp_serde::Serializer::new(&mut file);
self.0.serialize(&mut writer)
.map_err(|_| "Failed to write bundle map data")
} }
#[inline] #[inline]

View File

@ -2,14 +2,36 @@ use serde_yaml;
use std::fs::File; use std::fs::File;
use std::path::Path; use std::path::Path;
use std::io;
use ::util::*; use ::util::*;
use ::chunker::ChunkerType; use ::chunker::ChunkerType;
quick_error!{
#[derive(Debug)]
pub enum ConfigError {
Io(err: io::Error) {
from()
cause(err)
}
Parse(reason: &'static str) {
from()
description("Failed to parse config")
display("Failed to parse config: {}", reason)
}
Yaml(err: serde_yaml::Error) {
from()
cause(err)
description("Yaml format error")
}
}
}
impl HashMethod { impl HashMethod {
fn from_yaml(yaml: String) -> Result<Self, &'static str> { fn from_yaml(yaml: String) -> Result<Self, ConfigError> {
HashMethod::from(&yaml) HashMethod::from(&yaml).map_err(ConfigError::Parse)
} }
fn to_yaml(&self) -> String { fn to_yaml(&self) -> String {
@ -20,8 +42,8 @@ impl HashMethod {
impl ChecksumType { impl ChecksumType {
fn from_yaml(yaml: String) -> Result<Self, &'static str> { fn from_yaml(yaml: String) -> Result<Self, ConfigError> {
ChecksumType::from(&yaml) ChecksumType::from(&yaml).map_err(ConfigError::Parse)
} }
fn to_yaml(&self) -> String { fn to_yaml(&self) -> String {
@ -52,8 +74,8 @@ serde_impl!(ChunkerYaml(String) {
}); });
impl ChunkerType { impl ChunkerType {
fn from_yaml(yaml: ChunkerYaml) -> Result<Self, &'static str> { fn from_yaml(yaml: ChunkerYaml) -> Result<Self, ConfigError> {
ChunkerType::from(&yaml.method, yaml.avg_size, yaml.seed) ChunkerType::from(&yaml.method, yaml.avg_size, yaml.seed).map_err(ConfigError::Parse)
} }
fn to_yaml(&self) -> ChunkerYaml { fn to_yaml(&self) -> ChunkerYaml {
@ -69,8 +91,8 @@ impl ChunkerType {
impl Compression { impl Compression {
#[inline] #[inline]
fn from_yaml(yaml: String) -> Result<Self, &'static str> { fn from_yaml(yaml: String) -> Result<Self, ConfigError> {
Compression::from_string(&yaml) Compression::from_string(&yaml).map_err(|_| ConfigError::Parse("Invalid codec"))
} }
#[inline] #[inline]
@ -118,7 +140,7 @@ pub struct Config {
pub hash: HashMethod pub hash: HashMethod
} }
impl Config { impl Config {
fn from_yaml(yaml: ConfigYaml) -> Result<Self, &'static str> { fn from_yaml(yaml: ConfigYaml) -> Result<Self, ConfigError> {
let compression = if let Some(c) = yaml.compression { let compression = if let Some(c) = yaml.compression {
Some(try!(Compression::from_yaml(c))) Some(try!(Compression::from_yaml(c)))
} else { } else {
@ -143,15 +165,15 @@ impl Config {
} }
} }
pub fn load<P: AsRef<Path>>(path: P) -> Result<Self, &'static str> { pub fn load<P: AsRef<Path>>(path: P) -> Result<Self, ConfigError> {
let f = try!(File::open(path).map_err(|_| "Failed to open config")); let f = try!(File::open(path));
let config = try!(serde_yaml::from_reader(f).map_err(|_| "Failed to parse config")); let config = try!(serde_yaml::from_reader(f));
Config::from_yaml(config) Config::from_yaml(config)
} }
pub fn save<P: AsRef<Path>>(&self, path: P) -> Result<(), &'static str> { pub fn save<P: AsRef<Path>>(&self, path: P) -> Result<(), ConfigError> {
let mut f = try!(File::create(path).map_err(|_| "Failed to open config")); let mut f = try!(File::create(path));
try!(serde_yaml::to_writer(&mut f, &self.to_yaml()).map_err(|_| "Failed to wrtie config")); try!(serde_yaml::to_writer(&mut f, &self.to_yaml()));
Ok(()) Ok(())
} }
} }

66
src/repository/error.rs Normal file
View File

@ -0,0 +1,66 @@
use std::io;
use std::path::PathBuf;
use super::bundle_map::BundleMapError;
use super::config::ConfigError;
use super::integrity::RepositoryIntegrityError;
use ::index::IndexError;
use ::bundle::BundleError;
use ::chunker::ChunkerError;
use ::util::*;
quick_error!{
#[derive(Debug)]
pub enum RepositoryError {
Io(err: io::Error) {
from()
cause(err)
description("IO Error")
}
Config(err: ConfigError) {
from()
cause(err)
description("Configuration error")
}
BundleMap(err: BundleMapError) {
from()
cause(err)
description("Bundle map error")
}
Index(err: IndexError) {
from()
cause(err)
description("Index error")
}
Bundle(err: BundleError) {
from()
cause(err)
description("Bundle error")
}
Chunker(err: ChunkerError) {
from()
cause(err)
description("Chunker error")
}
Decode(err: msgpack::DecodeError) {
from()
cause(err)
description("Failed to decode metadata")
}
Encode(err: msgpack::EncodeError) {
from()
cause(err)
description("Failed to encode metadata")
}
Integrity(err: RepositoryIntegrityError) {
from()
cause(err)
description("Integrity error")
}
InvalidFileType(path: PathBuf) {
description("Invalid file type")
display("{:?} has an invalid file type", path)
}
}
}

View File

@ -1,41 +1,66 @@
use super::Repository; use super::{Repository, RepositoryError};
use ::bundle::BundleId;
use ::util::Hash; use ::util::Hash;
quick_error!{
#[derive(Debug)]
pub enum RepositoryIntegrityError {
MissingChunk(hash: Hash) {
description("Missing chunk")
display("Missing chunk: {}", hash)
}
MissingBundleId(id: u32) {
description("Missing bundle")
display("Missing bundle: {}", id)
}
MissingBundle(id: BundleId) {
description("Missing bundle")
display("Missing bundle: {}", id)
}
NoSuchChunk(bundle: BundleId, chunk: u32) {
description("No such chunk")
display("Bundle {} does not conain the chunk {}", bundle, chunk)
}
InvalidNextBundleId {
description("Invalid next bundle id")
}
SymlinkWithoutTarget {
description("Symlink without target")
}
}
}
impl Repository { impl Repository {
fn check_chunk(&self, hash: Hash) -> Result<(), &'static str> { fn check_chunk(&self, hash: Hash) -> Result<(), RepositoryError> {
// Find bundle and chunk id in index // Find bundle and chunk id in index
let found = if let Some(found) = self.index.get(&hash) { let found = if let Some(found) = self.index.get(&hash) {
found found
} else { } else {
return Err("Chunk not in index"); return Err(RepositoryIntegrityError::MissingChunk(hash).into());
}; };
// Lookup bundle id from map // Lookup bundle id from map
let bundle_id = if let Some(bundle_info) = self.bundle_map.get(found.bundle) { let bundle_id = try!(self.get_bundle_id(found.bundle));
bundle_info.id()
} else {
return Err("Bundle id not found in map")
};
// Get bundle object from bundledb // Get bundle object from bundledb
let bundle = if let Some(bundle) = self.bundles.get_bundle(&bundle_id) { let bundle = if let Some(bundle) = self.bundles.get_bundle(&bundle_id) {
bundle bundle
} else { } else {
return Err("Bundle not found in bundledb") return Err(RepositoryIntegrityError::MissingBundle(bundle_id.clone()).into())
}; };
// Get chunk from bundle // Get chunk from bundle
if bundle.info.chunk_count > found.chunk as usize { if bundle.info.chunk_count > found.chunk as usize {
Ok(()) Ok(())
} else { } else {
Err("Bundle does not contain that chunk") Err(RepositoryIntegrityError::NoSuchChunk(bundle_id.clone(), found.chunk).into())
} }
//TODO: check that contents match their hash //TODO: check that contents match their hash
} }
pub fn check(&mut self, full: bool) -> Result<(), &'static str> { pub fn check(&mut self, full: bool) -> Result<(), RepositoryError> {
try!(self.flush()); try!(self.flush());
try!(self.bundles.check(full).map_err(|_| "Bundles inconsistent")); try!(self.bundles.check(full));
try!(self.index.check().map_err(|_| "Index inconsistent")); try!(self.index.check());
let mut pos = 0; let mut pos = 0;
loop { loop {
pos = if let Some(pos) = self.index.next_entry(pos) { pos = if let Some(pos) = self.index.next_entry(pos) {
@ -48,13 +73,13 @@ impl Repository {
pos += 1; pos += 1;
} }
if self.next_content_bundle == self.next_meta_bundle { if self.next_content_bundle == self.next_meta_bundle {
return Err("Next bundle ids for meta and content as the same") return Err(RepositoryIntegrityError::InvalidNextBundleId.into())
} }
if self.bundle_map.get(self.next_content_bundle).is_some() { if self.bundle_map.get(self.next_content_bundle).is_some() {
return Err("Bundle map already contains next bundle bundle id") return Err(RepositoryIntegrityError::InvalidNextBundleId.into())
} }
if self.bundle_map.get(self.next_meta_bundle).is_some() { if self.bundle_map.get(self.next_meta_bundle).is_some() {
return Err("Bundle map already contains next meta bundle id") return Err(RepositoryIntegrityError::InvalidNextBundleId.into())
} }
Ok(()) Ok(())
} }

View File

@ -1,15 +1,13 @@
use serde::bytes::ByteBuf;
use serde::{Serialize, Deserialize};
use rmp_serde;
use std::collections::HashMap; use std::collections::HashMap;
use std::path::Path; use std::path::Path;
use std::fs::{self, Metadata, File, Permissions}; use std::fs::{self, Metadata, File, Permissions};
use std::os::linux::fs::MetadataExt; use std::os::linux::fs::MetadataExt;
use std::os::unix::fs::{PermissionsExt, symlink}; use std::os::unix::fs::{PermissionsExt, symlink};
use std::io::{Cursor, Read, Write}; use std::io::{Read, Write};
use super::{Repository, Mode, Chunk}; use ::util::*;
use super::{Repository, RepositoryError, Mode, Chunk};
use super::integrity::RepositoryIntegrityError;
#[derive(Debug, Eq, PartialEq)] #[derive(Debug, Eq, PartialEq)]
@ -27,13 +25,14 @@ serde_impl!(FileType(u8) {
#[derive(Debug)] #[derive(Debug)]
pub enum FileContents { pub enum FileContents {
Inline(ByteBuf), Inline(msgpack::Bytes),
Chunked(Vec<Chunk>) ChunkedDirect(Vec<Chunk>),
//TODO: ChunkedIndirect ChunkedIndirect(Vec<Chunk>)
} }
serde_impl!(FileContents(u8) { serde_impl!(FileContents(u8) {
Inline(ByteBuf) => 0, Inline(ByteBuf) => 0,
Chunked(Vec<Chunk>) => 1 ChunkedDirect(Vec<Chunk>) => 1,
ChunkedIndirect(Vec<Chunk>) => 2
}); });
@ -86,7 +85,7 @@ serde_impl!(Inode(u8) {
}); });
impl Inode { impl Inode {
fn get_extended_attrs_from(&mut self, meta: &Metadata) -> Result<(), &'static str> { fn get_extended_attrs_from(&mut self, meta: &Metadata) -> Result<(), RepositoryError> {
self.mode = meta.st_mode(); self.mode = meta.st_mode();
self.user = meta.st_uid(); self.user = meta.st_uid();
self.group = meta.st_gid(); self.group = meta.st_gid();
@ -96,9 +95,11 @@ impl Inode {
Ok(()) Ok(())
} }
pub fn get_from<P: AsRef<Path>>(path: P) -> Result<Self, &'static str> { pub fn get_from<P: AsRef<Path>>(path: P) -> Result<Self, RepositoryError> {
let name = try!(path.as_ref().file_name().ok_or("Not a file")).to_string_lossy().to_string(); let name = try!(path.as_ref().file_name()
let meta = try!(fs::symlink_metadata(path.as_ref()).map_err(|_| "Failed to get metadata")); .ok_or_else(|| RepositoryError::InvalidFileType(path.as_ref().to_owned())))
.to_string_lossy().to_string();
let meta = try!(fs::symlink_metadata(path.as_ref()));
let mut inode = Inode::default(); let mut inode = Inode::default();
inode.name = name; inode.name = name;
inode.size = meta.len(); inode.size = meta.len();
@ -109,36 +110,35 @@ impl Inode {
} else if meta.file_type().is_symlink() { } else if meta.file_type().is_symlink() {
FileType::Symlink FileType::Symlink
} else { } else {
return Err("Unsupported file type"); return Err(RepositoryError::InvalidFileType(path.as_ref().to_owned()));
}; };
if meta.file_type().is_symlink() { if meta.file_type().is_symlink() {
inode.symlink_target = Some(try!(fs::read_link(path).map_err(|_| "Failed to read symlink")).to_string_lossy().to_string()); inode.symlink_target = Some(try!(fs::read_link(path)).to_string_lossy().to_string());
} }
try!(inode.get_extended_attrs_from(&meta)); try!(inode.get_extended_attrs_from(&meta));
Ok(inode) Ok(inode)
} }
#[allow(dead_code)] #[allow(dead_code)]
pub fn create_at<P: AsRef<Path>>(&self, path: P) -> Result<Option<File>, &'static str> { pub fn create_at<P: AsRef<Path>>(&self, path: P) -> Result<Option<File>, RepositoryError> {
let full_path = path.as_ref().join(&self.name); let full_path = path.as_ref().join(&self.name);
let mut file = None; let mut file = None;
match self.file_type { match self.file_type {
FileType::File => { FileType::File => {
file = Some(try!(File::create(&full_path).map_err(|_| "Failed to create file"))); file = Some(try!(File::create(&full_path)));
}, },
FileType::Directory => { FileType::Directory => {
try!(fs::create_dir(&full_path).map_err(|_| "Failed to create directory")); try!(fs::create_dir(&full_path));
}, },
FileType::Symlink => { FileType::Symlink => {
if let Some(ref src) = self.symlink_target { if let Some(ref src) = self.symlink_target {
try!(symlink(src, &full_path));
try!(symlink(src, &full_path).map_err(|_| "Failed to create symlink"));
} else { } else {
return Err("Symlink without destination") return Err(RepositoryIntegrityError::SymlinkWithoutTarget.into())
} }
} }
} }
try!(fs::set_permissions(&full_path, Permissions::from_mode(self.mode)).map_err(|_| "Failed to set permissions")); try!(fs::set_permissions(&full_path, Permissions::from_mode(self.mode)));
//FIXME: set times and gid/uid //FIXME: set times and gid/uid
// https://crates.io/crates/filetime // https://crates.io/crates/filetime
Ok(file) Ok(file)
@ -147,44 +147,48 @@ impl Inode {
impl Repository { impl Repository {
pub fn put_inode<P: AsRef<Path>>(&mut self, path: P) -> Result<Vec<Chunk>, &'static str> { pub fn put_inode<P: AsRef<Path>>(&mut self, path: P) -> Result<Vec<Chunk>, RepositoryError> {
let mut inode = try!(Inode::get_from(path.as_ref())); let mut inode = try!(Inode::get_from(path.as_ref()));
if inode.file_type == FileType::File && inode.size > 0 { if inode.file_type == FileType::File && inode.size > 0 {
let mut file = try!(File::open(path).map_err(|_| "Failed to open file")); let mut file = try!(File::open(path));
if inode.size < 100 { if inode.size < 100 {
let mut data = Vec::with_capacity(inode.size as usize); let mut data = Vec::with_capacity(inode.size as usize);
try!(file.read_to_end(&mut data).map_err(|_| "Failed to read file contents")); try!(file.read_to_end(&mut data));
inode.contents = Some(FileContents::Inline(data.into())); inode.contents = Some(FileContents::Inline(data.into()));
} else { } else {
let chunks = try!(self.put_stream(Mode::Content, &mut file)); let mut chunks = try!(self.put_stream(Mode::Content, &mut file));
inode.contents = Some(FileContents::Chunked(chunks)); if chunks.len() < 10 {
inode.contents = Some(FileContents::ChunkedDirect(chunks));
} else {
let chunks_data = try!(msgpack::encode(&chunks));
chunks = try!(self.put_data(Mode::Meta, &chunks_data));
inode.contents = Some(FileContents::ChunkedIndirect(chunks));
}
} }
} }
let mut inode_data = Vec::new(); self.put_data(Mode::Meta, &try!(msgpack::encode(&inode)))
{
let mut writer = rmp_serde::Serializer::new(&mut inode_data);
try!(inode.serialize(&mut writer).map_err(|_| "Failed to write inode data"));
}
self.put_data(Mode::Meta, &inode_data)
} }
#[inline] #[inline]
pub fn get_inode(&mut self, chunks: &[Chunk]) -> Result<Inode, &'static str> { pub fn get_inode(&mut self, chunks: &[Chunk]) -> Result<Inode, RepositoryError> {
let data = Cursor::new(try!(self.get_data(chunks))); Ok(try!(msgpack::decode(&try!(self.get_data(chunks)))))
let mut reader = rmp_serde::Deserializer::new(data);
Inode::deserialize(&mut reader).map_err(|_| "Failed to read inode data")
} }
#[inline] #[inline]
pub fn save_inode_at<P: AsRef<Path>>(&mut self, inode: &Inode, path: P) -> Result<(), &'static str> { pub fn save_inode_at<P: AsRef<Path>>(&mut self, inode: &Inode, path: P) -> Result<(), RepositoryError> {
if let Some(mut file) = try!(inode.create_at(path.as_ref())) { if let Some(mut file) = try!(inode.create_at(path.as_ref())) {
if let Some(ref contents) = inode.contents { if let Some(ref contents) = inode.contents {
match *contents { match *contents {
FileContents::Inline(ref data) => { FileContents::Inline(ref data) => {
try!(file.write_all(&data).map_err(|_| "Failed to write data to file")); try!(file.write_all(&data));
}, },
FileContents::Chunked(ref chunks) => { FileContents::ChunkedDirect(ref chunks) => {
try!(self.get_stream(chunks, &mut file)); try!(self.get_stream(chunks, &mut file));
},
FileContents::ChunkedIndirect(ref chunks) => {
let chunk_data = try!(self.get_data(chunks));
let chunks: Vec<Chunk> = try!(msgpack::decode(&chunk_data));
try!(self.get_stream(&chunks, &mut file));
} }
} }
} }

View File

@ -5,6 +5,7 @@ mod basic_io;
mod info; mod info;
mod metadata; mod metadata;
mod backup; mod backup;
mod error;
use std::mem; use std::mem;
use std::cmp::max; use std::cmp::max;
@ -15,6 +16,7 @@ use super::index::Index;
use super::bundle::{BundleDb, BundleWriter}; use super::bundle::{BundleDb, BundleWriter};
use super::chunker::Chunker; use super::chunker::Chunker;
pub use self::error::RepositoryError;
pub use self::config::Config; pub use self::config::Config;
pub use self::metadata::Inode; pub use self::metadata::Inode;
pub use self::basic_io::Chunk; pub use self::basic_io::Chunk;
@ -42,20 +44,20 @@ pub struct Repository {
impl Repository { impl Repository {
pub fn create<P: AsRef<Path>>(path: P, config: Config) -> Result<Self, &'static str> { pub fn create<P: AsRef<Path>>(path: P, config: Config) -> Result<Self, RepositoryError> {
let path = path.as_ref().to_owned(); let path = path.as_ref().to_owned();
try!(fs::create_dir(&path).map_err(|_| "Failed to create repository directory")); try!(fs::create_dir(&path));
let bundles = try!(BundleDb::create( let bundles = try!(BundleDb::create(
path.join("bundles"), path.join("bundles"),
config.compression.clone(), config.compression.clone(),
None, //FIXME: store encryption in config None, //FIXME: store encryption in config
config.checksum config.checksum
).map_err(|_| "Failed to create bundle db")); ));
let index = try!(Index::create(&path.join("index")).map_err(|_| "Failed to create index")); let index = try!(Index::create(&path.join("index")));
try!(config.save(path.join("config.yaml")).map_err(|_| "Failed to save config")); try!(config.save(path.join("config.yaml")));
let bundle_map = BundleMap::create(); let bundle_map = BundleMap::create();
try!(bundle_map.save(path.join("bundles.map")).map_err(|_| "Failed to save bundle map")); try!(bundle_map.save(path.join("bundles.map")));
try!(fs::create_dir(&path.join("backups")).map_err(|_| "Failed to create backup directory")); try!(fs::create_dir(&path.join("backups")));
Ok(Repository{ Ok(Repository{
path: path, path: path,
chunker: config.chunker.create(), chunker: config.chunker.create(),
@ -70,17 +72,17 @@ impl Repository {
}) })
} }
pub fn open<P: AsRef<Path>>(path: P) -> Result<Self, &'static str> { pub fn open<P: AsRef<Path>>(path: P) -> Result<Self, RepositoryError> {
let path = path.as_ref().to_owned(); let path = path.as_ref().to_owned();
let config = try!(Config::load(path.join("config.yaml")).map_err(|_| "Failed to load config")); let config = try!(Config::load(path.join("config.yaml")));
let bundles = try!(BundleDb::open( let bundles = try!(BundleDb::open(
path.join("bundles"), path.join("bundles"),
config.compression.clone(), config.compression.clone(),
None, //FIXME: load encryption from config None, //FIXME: load encryption from config
config.checksum config.checksum
).map_err(|_| "Failed to open bundle db")); ));
let index = try!(Index::open(&path.join("index")).map_err(|_| "Failed to open index")); let index = try!(Index::open(&path.join("index")));
let bundle_map = try!(BundleMap::load(path.join("bundles.map")).map_err(|_| "Failed to load bundle map")); let bundle_map = try!(BundleMap::load(path.join("bundles.map")));
let mut repo = Repository { let mut repo = Repository {
path: path, path: path,
chunker: config.chunker.create(), chunker: config.chunker.create(),
@ -99,8 +101,9 @@ impl Repository {
} }
#[inline] #[inline]
fn save_bundle_map(&self) -> Result<(), &'static str> { fn save_bundle_map(&self) -> Result<(), RepositoryError> {
self.bundle_map.save(self.path.join("bundles.map")) try!(self.bundle_map.save(self.path.join("bundles.map")));
Ok(())
} }
#[inline] #[inline]
@ -112,12 +115,12 @@ impl Repository {
id id
} }
pub fn flush(&mut self) -> Result<(), &'static str> { pub fn flush(&mut self) -> Result<(), RepositoryError> {
if self.content_bundle.is_some() { if self.content_bundle.is_some() {
let mut finished = None; let mut finished = None;
mem::swap(&mut self.content_bundle, &mut finished); mem::swap(&mut self.content_bundle, &mut finished);
{ {
let bundle = try!(self.bundles.add_bundle(finished.unwrap()).map_err(|_| "Failed to write finished bundle")); let bundle = try!(self.bundles.add_bundle(finished.unwrap()));
self.bundle_map.set(self.next_content_bundle, bundle); self.bundle_map.set(self.next_content_bundle, bundle);
} }
self.next_content_bundle = self.next_free_bundle_id() self.next_content_bundle = self.next_free_bundle_id()
@ -126,12 +129,12 @@ impl Repository {
let mut finished = None; let mut finished = None;
mem::swap(&mut self.meta_bundle, &mut finished); mem::swap(&mut self.meta_bundle, &mut finished);
{ {
let bundle = try!(self.bundles.add_bundle(finished.unwrap()).map_err(|_| "Failed to write finished bundle")); let bundle = try!(self.bundles.add_bundle(finished.unwrap()));
self.bundle_map.set(self.next_meta_bundle, bundle); self.bundle_map.set(self.next_meta_bundle, bundle);
} }
self.next_meta_bundle = self.next_free_bundle_id() self.next_meta_bundle = self.next_free_bundle_id()
} }
try!(self.save_bundle_map().map_err(|_| "Failed to save bundle map")); try!(self.save_bundle_map());
Ok(()) Ok(())
} }
} }

View File

@ -1,11 +1,40 @@
use std::ptr; use std::ptr;
use std::ffi::{CStr, CString}; use std::ffi::{CStr, CString};
use std::io::Write; use std::io::{self, Write};
use std::str::FromStr; use std::str::FromStr;
use squash::*; use squash::*;
quick_error!{
#[derive(Debug)]
pub enum CompressionError {
UnsupportedCodec(name: String) {
description("Unsupported codec")
display("Unsupported codec: {}", name)
}
InitializeCodec {
description("Failed to initialize codec")
}
InitializeOptions {
description("Failed to set codec options")
}
InitializeStream {
description("Failed to create stream")
}
Operation(reason: &'static str) {
description("Operation failed")
display("Operation failed: {}", reason)
}
Output(err: io::Error) {
from()
cause(err)
description("Failed to write to output")
}
}
}
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub enum Compression { pub enum Compression {
Snappy(()), Snappy(()),
@ -34,9 +63,9 @@ impl Compression {
} }
#[inline] #[inline]
pub fn from_string(name: &str) -> Result<Self, &'static str> { pub fn from_string(name: &str) -> Result<Self, CompressionError> {
let (name, level) = if let Some(pos) = name.find('/') { let (name, level) = if let Some(pos) = name.find('/') {
let level = try!(u8::from_str(&name[pos+1..]).map_err(|_| "Level must be a number")); let level = try!(u8::from_str(&name[pos+1..]).map_err(|_| CompressionError::UnsupportedCodec(name.to_string())));
let name = &name[..pos]; let name = &name[..pos];
(name, level) (name, level)
} else { } else {
@ -48,7 +77,7 @@ impl Compression {
"deflate" | "zlib" | "gzip" => Ok(Compression::Deflate(level)), "deflate" | "zlib" | "gzip" => Ok(Compression::Deflate(level)),
"brotli" => Ok(Compression::Brotli(level)), "brotli" => Ok(Compression::Brotli(level)),
"lzma2" => Ok(Compression::Lzma2(level)), "lzma2" => Ok(Compression::Lzma2(level)),
_ => Err("Unsupported codec") _ => Err(CompressionError::UnsupportedCodec(name.to_string()))
} }
} }
@ -64,11 +93,11 @@ impl Compression {
} }
#[inline] #[inline]
fn codec(&self) -> Result<*mut SquashCodec, &'static str> { fn codec(&self) -> Result<*mut SquashCodec, CompressionError> {
let name = CString::new(self.name().as_bytes()).unwrap(); let name = CString::new(self.name().as_bytes()).unwrap();
let codec = unsafe { squash_get_codec(name.as_ptr()) }; let codec = unsafe { squash_get_codec(name.as_ptr()) };
if codec.is_null() { if codec.is_null() {
return Err("Unsupported algorithm") return Err(CompressionError::InitializeCodec)
} }
Ok(codec) Ok(codec)
} }
@ -84,12 +113,12 @@ impl Compression {
} }
} }
fn options(&self) -> Result<*mut SquashOptions, &'static str> { fn options(&self) -> Result<*mut SquashOptions, CompressionError> {
let codec = try!(self.codec()); let codec = try!(self.codec());
let options = unsafe { squash_options_new(codec, ptr::null::<()>()) }; let options = unsafe { squash_options_new(codec, ptr::null::<()>()) };
if let Some(level) = self.level() { if let Some(level) = self.level() {
if options.is_null() { if options.is_null() {
return Err("Algorithm does not support a level") return Err(CompressionError::InitializeOptions)
} }
let option = CString::new("level"); let option = CString::new("level");
let value = CString::new(format!("{}", level)); let value = CString::new(format!("{}", level));
@ -100,18 +129,18 @@ impl Compression {
)}; )};
if res != SQUASH_OK { if res != SQUASH_OK {
//panic!(unsafe { CStr::from_ptr(squash_status_to_string(res)).to_str().unwrap() }); //panic!(unsafe { CStr::from_ptr(squash_status_to_string(res)).to_str().unwrap() });
return Err("Failed to set compression level") return Err(CompressionError::InitializeOptions)
} }
} }
Ok(options) Ok(options)
} }
#[inline] #[inline]
fn error(code: SquashStatus) -> &'static str { fn error(code: SquashStatus) -> CompressionError {
unsafe { CStr::from_ptr(squash_status_to_string(code)).to_str().unwrap() } CompressionError::Operation(unsafe { CStr::from_ptr(squash_status_to_string(code)).to_str().unwrap() })
} }
pub fn compress(&self, data: &[u8]) -> Result<Vec<u8>, &'static str> { pub fn compress(&self, data: &[u8]) -> Result<Vec<u8>, CompressionError> {
let codec = try!(self.codec()); let codec = try!(self.codec());
let options = try!(self.options()); let options = try!(self.options());
let mut size = data.len() * 2 + 500; let mut size = data.len() * 2 + 500;
@ -138,7 +167,7 @@ impl Compression {
Ok(buf) Ok(buf)
} }
pub fn decompress(&self, data: &[u8]) -> Result<Vec<u8>, &'static str> { pub fn decompress(&self, data: &[u8]) -> Result<Vec<u8>, CompressionError> {
let codec = try!(self.codec()); let codec = try!(self.codec());
let mut size = unsafe { squash_codec_get_uncompressed_size( let mut size = unsafe { squash_codec_get_uncompressed_size(
codec, codec,
@ -165,26 +194,26 @@ impl Compression {
} }
#[inline] #[inline]
pub fn compress_stream(&self) -> Result<CompressionStream, &'static str> { pub fn compress_stream(&self) -> Result<CompressionStream, CompressionError> {
let codec = try!(self.codec()); let codec = try!(self.codec());
let options = try!(self.options()); let options = try!(self.options());
let stream = unsafe { squash_stream_new_with_options( let stream = unsafe { squash_stream_new_with_options(
codec, SQUASH_STREAM_COMPRESS, options codec, SQUASH_STREAM_COMPRESS, options
) }; ) };
if stream.is_null() { if stream.is_null() {
return Err("Failed to create stream"); return Err(CompressionError::InitializeStream);
} }
Ok(CompressionStream::new(unsafe { Box::from_raw(stream) })) Ok(CompressionStream::new(unsafe { Box::from_raw(stream) }))
} }
#[inline] #[inline]
pub fn decompress_stream(&self) -> Result<CompressionStream, &'static str> { pub fn decompress_stream(&self) -> Result<CompressionStream, CompressionError> {
let codec = try!(self.codec()); let codec = try!(self.codec());
let stream = unsafe { squash_stream_new( let stream = unsafe { squash_stream_new(
codec, SQUASH_STREAM_DECOMPRESS, ptr::null::<()>() codec, SQUASH_STREAM_DECOMPRESS, ptr::null::<()>()
) }; ) };
if stream.is_null() { if stream.is_null() {
return Err("Failed to create stream"); return Err(CompressionError::InitializeStream);
} }
Ok(CompressionStream::new(unsafe { Box::from_raw(stream) })) Ok(CompressionStream::new(unsafe { Box::from_raw(stream) }))
} }
@ -205,7 +234,7 @@ impl CompressionStream {
} }
} }
pub fn process<W: Write>(&mut self, input: &[u8], output: &mut W) -> Result<(), &'static str> { pub fn process<W: Write>(&mut self, input: &[u8], output: &mut W) -> Result<(), CompressionError> {
let mut stream = &mut *self.stream; let mut stream = &mut *self.stream;
stream.next_in = input.as_ptr(); stream.next_in = input.as_ptr();
stream.avail_in = input.len(); stream.avail_in = input.len();
@ -217,7 +246,7 @@ impl CompressionStream {
return Err(Compression::error(res)) return Err(Compression::error(res))
} }
let output_size = self.buffer.len() - stream.avail_out; let output_size = self.buffer.len() - stream.avail_out;
try!(output.write_all(&self.buffer[..output_size]).map_err(|_| "Failed to write to output")); try!(output.write_all(&self.buffer[..output_size]));
if res != SQUASH_PROCESSING { if res != SQUASH_PROCESSING {
break break
} }
@ -225,7 +254,7 @@ impl CompressionStream {
Ok(()) Ok(())
} }
pub fn finish<W: Write>(mut self, output: &mut W) -> Result<(), &'static str> { pub fn finish<W: Write>(mut self, output: &mut W) -> Result<(), CompressionError> {
let mut stream = &mut *self.stream; let mut stream = &mut *self.stream;
loop { loop {
stream.next_out = self.buffer.as_mut_ptr(); stream.next_out = self.buffer.as_mut_ptr();
@ -235,7 +264,7 @@ impl CompressionStream {
return Err(Compression::error(res)) return Err(Compression::error(res))
} }
let output_size = self.buffer.len() - stream.avail_out; let output_size = self.buffer.len() - stream.avail_out;
try!(output.write_all(&self.buffer[..output_size]).map_err(|_| "Failed to write to output")); try!(output.write_all(&self.buffer[..output_size]));
if res != SQUASH_PROCESSING { if res != SQUASH_PROCESSING {
break break
} }

View File

@ -1,5 +1,16 @@
use std::collections::HashMap; use std::collections::HashMap;
quick_error!{
#[derive(Debug)]
pub enum EncryptionError {
Operation(reason: &'static str) {
description("Operation failed")
display("Operation failed: {}", reason)
}
}
}
#[derive(Clone)] #[derive(Clone)]
pub enum EncryptionMethod { pub enum EncryptionMethod {
Dummy Dummy
@ -36,12 +47,12 @@ impl Crypto {
} }
#[inline] #[inline]
pub fn encrypt(&self, _enc: Encryption, _data: &[u8]) -> Result<Vec<u8>, &'static str> { pub fn encrypt(&self, _enc: Encryption, _data: &[u8]) -> Result<Vec<u8>, EncryptionError> {
unimplemented!() unimplemented!()
} }
#[inline] #[inline]
pub fn decrypt(&self, _enc: Encryption, _data: &[u8]) -> Result<Vec<u8>, &'static str> { pub fn decrypt(&self, _enc: Encryption, _data: &[u8]) -> Result<Vec<u8>, EncryptionError> {
unimplemented!() unimplemented!()
} }
} }

View File

@ -3,6 +3,7 @@ mod compression;
mod encryption; mod encryption;
mod hash; mod hash;
mod lru_cache; mod lru_cache;
pub mod msgpack;
pub use self::checksum::*; pub use self::checksum::*;
pub use self::compression::*; pub use self::compression::*;
@ -10,6 +11,7 @@ pub use self::encryption::*;
pub use self::hash::*; pub use self::hash::*;
pub use self::lru_cache::*; pub use self::lru_cache::*;
pub fn to_file_size(size: u64) -> String { pub fn to_file_size(size: u64) -> String {
let mut size = size as f32; let mut size = size as f32;
if size >= 512.0 { if size >= 512.0 {

34
src/util/msgpack.rs Normal file
View File

@ -0,0 +1,34 @@
use rmp_serde;
use serde::{Serialize, Deserialize};
use std::io::{Write, Read, Cursor};
pub use serde::bytes::ByteBuf as Bytes;
pub use rmp_serde::decode::Error as DecodeError;
pub use rmp_serde::encode::Error as EncodeError;
pub fn encode<T: Serialize>(t: &T) -> Result<Vec<u8>, EncodeError> {
let mut data = Vec::new();
{
let mut writer = rmp_serde::Serializer::new(&mut data);
try!(t.serialize(&mut writer));
}
Ok(data)
}
pub fn encode_to_stream<T: Serialize>(t: T, w: &mut Write) -> Result<(), EncodeError> {
let mut writer = rmp_serde::Serializer::new(w);
t.serialize(&mut writer)
}
pub fn decode<T: Deserialize>(data: &[u8]) -> Result<T, DecodeError> {
let data = Cursor::new(data);
let mut reader = rmp_serde::Deserializer::new(data);
T::deserialize(&mut reader)
}
pub fn decode_from_stream<T: Deserialize>(r: &mut Read) -> Result<T, DecodeError> {
let mut reader = rmp_serde::Deserializer::new(r);
T::deserialize(&mut reader)
}