pull/10/head
Dennis Schwerdel 2017-03-16 09:42:30 +01:00
parent 717fc7472d
commit 0b673d145f
17 changed files with 558 additions and 351 deletions

View File

@ -1,24 +1,77 @@
use std::path::{Path, PathBuf};
use std::collections::HashMap;
use std::fs::{self, File};
use std::io::{Read, Write, Seek, SeekFrom, BufWriter, BufReader};
use std::io::{self, Read, Write, Seek, SeekFrom, BufWriter, BufReader};
use std::cmp::max;
use std::fmt::{self, Debug, Write as FmtWrite};
use std::sync::{Arc, Mutex};
use serde::{self, Serialize, Deserialize};
use serde::bytes::ByteBuf;
use rmp_serde;
use errors::BundleError;
use util::*;
static HEADER_STRING: [u8; 7] = *b"zbundle";
static HEADER_VERSION: u8 = 1;
// TODO: Test cases
// TODO: Benchmarks
quick_error!{
#[derive(Debug)]
pub enum BundleError {
List(err: io::Error) {
cause(err)
description("Failed to list bundles")
}
Read(err: io::Error, path: PathBuf) {
cause(err)
description("Failed to read bundle")
}
Decode(err: msgpack::DecodeError, path: PathBuf) {
cause(err)
description("Failed to decode bundle header")
}
Write(err: io::Error, path: PathBuf) {
cause(err)
description("Failed to write bundle")
}
Encode(err: msgpack::EncodeError, path: PathBuf) {
cause(err)
description("Failed to encode bundle header")
}
WrongHeader(path: PathBuf) {
description("Wrong header")
display("Wrong header on bundle {:?}", path)
}
WrongVersion(path: PathBuf, version: u8) {
description("Wrong version")
display("Wrong version on bundle {:?}: {}", path, version)
}
Integrity(bundle: BundleId, reason: &'static str) {
description("Bundle has an integrity error")
display("Bundle {:?} has an integrity error: {}", bundle, reason)
}
NoSuchBundle(bundle: BundleId) {
description("No such bundle")
display("No such bundle: {:?}", bundle)
}
NoSuchChunk(bundle: BundleId, id: usize) {
description("Bundle has no such chunk")
display("Bundle {:?} has no chunk with that id: {}", bundle, id)
}
Compression(err: CompressionError) {
from()
cause(err)
}
Encryption(err: EncryptionError) {
from()
cause(err)
}
Remove(err: io::Error, bundle: BundleId) {
cause(err)
description("Failed to remove bundle")
display("Failed to remove bundle {}", bundle)
}
}
}
#[derive(Hash, PartialEq, Eq, Clone, Default)]
@ -32,7 +85,7 @@ impl Serialize for BundleId {
impl Deserialize for BundleId {
fn deserialize<D: serde::Deserializer>(de: D) -> Result<Self, D::Error> {
let bytes = try!(ByteBuf::deserialize(de));
let bytes = try!(msgpack::Bytes::deserialize(de));
Ok(BundleId(bytes.into()))
}
}
@ -92,7 +145,7 @@ impl Default for BundleInfo {
id: BundleId(vec![]),
compression: None,
encryption: None,
checksum: (ChecksumType::Blake2_256, ByteBuf::new()),
checksum: (ChecksumType::Blake2_256, msgpack::Bytes::new()),
raw_size: 0,
encoded_size: 0,
chunk_count: 0,
@ -135,34 +188,28 @@ impl Bundle {
}
pub fn load(path: PathBuf, crypto: Arc<Mutex<Crypto>>) -> Result<Self, BundleError> {
let mut file = BufReader::new(try!(File::open(&path)
.map_err(|e| BundleError::Read(e, path.clone(), "Failed to open bundle file"))));
let mut file = BufReader::new(try!(File::open(&path).map_err(|e| BundleError::Read(e, path.clone()))));
let mut header = [0u8; 8];
try!(file.read_exact(&mut header)
.map_err(|e| BundleError::Read(e, path.clone(), "Failed to read bundle header")));
try!(file.read_exact(&mut header).map_err(|e| BundleError::Read(e, path.clone())));
if header[..HEADER_STRING.len()] != HEADER_STRING {
return Err(BundleError::Format(path.clone(), "Wrong header string"))
return Err(BundleError::WrongHeader(path.clone()))
}
let version = header[HEADER_STRING.len()];
if version != HEADER_VERSION {
return Err(BundleError::Format(path.clone(), "Unsupported bundle file version"))
return Err(BundleError::WrongVersion(path.clone(), version))
}
let mut reader = rmp_serde::Deserializer::new(file);
let header = try!(BundleInfo::deserialize(&mut reader)
let header = try!(msgpack::decode_from_stream(&mut file)
.map_err(|e| BundleError::Decode(e, path.clone())));
file = reader.into_inner();
let content_start = file.seek(SeekFrom::Current(0)).unwrap() as usize;
Ok(Bundle::new(path, version, content_start, crypto, header))
}
#[inline]
fn load_encoded_contents(&self) -> Result<Vec<u8>, BundleError> {
let mut file = BufReader::new(try!(File::open(&self.path)
.map_err(|e| BundleError::Read(e, self.path.clone(), "Failed to open bundle file"))));
try!(file.seek(SeekFrom::Start(self.content_start as u64))
.map_err(|e| BundleError::Read(e, self.path.clone(), "Failed to seek to data")));
let mut file = BufReader::new(try!(File::open(&self.path).map_err(|e| BundleError::Read(e, self.path.clone()))));
try!(file.seek(SeekFrom::Start(self.content_start as u64)).map_err(|e| BundleError::Read(e, self.path.clone())));
let mut data = Vec::with_capacity(max(self.info.encoded_size, self.info.raw_size)+1024);
try!(file.read_to_end(&mut data).map_err(|_| "Failed to read data"));
try!(file.read_to_end(&mut data).map_err(|e| BundleError::Read(e, self.path.clone())));
Ok(data)
}
@ -185,7 +232,7 @@ impl Bundle {
#[inline]
pub fn get_chunk_position(&self, id: usize) -> Result<(usize, usize), BundleError> {
if id >= self.info.chunk_count {
return Err("Invalid chunk id".into())
return Err(BundleError::NoSuchChunk(self.id(), id))
}
Ok((self.chunk_positions[id], self.info.chunk_sizes[id]))
}
@ -200,8 +247,7 @@ impl Bundle {
"Individual chunk sizes do not add up to total size"))
}
if !full {
let size = try!(fs::metadata(&self.path)
.map_err(|e| BundleError::Read(e, self.path.clone(), "Failed to get size of file"))
let size = try!(fs::metadata(&self.path).map_err(|e| BundleError::Read(e, self.path.clone()))
).len();
if size as usize != self.info.encoded_size + self.content_start {
return Err(BundleError::Integrity(self.id(),
@ -290,14 +336,10 @@ impl BundleWriter {
let id = BundleId(checksum.1.to_vec());
let (folder, file) = db.bundle_path(&id);
let path = folder.join(file);
try!(fs::create_dir_all(&folder)
.map_err(|e| BundleError::Write(e, path.clone(), "Failed to create folder")));
let mut file = BufWriter::new(try!(File::create(&path)
.map_err(|e| BundleError::Write(e, path.clone(), "Failed to create bundle file"))));
try!(file.write_all(&HEADER_STRING)
.map_err(|e| BundleError::Write(e, path.clone(), "Failed to write bundle header")));
try!(file.write_all(&[HEADER_VERSION])
.map_err(|e| BundleError::Write(e, path.clone(), "Failed to write bundle header")));
try!(fs::create_dir_all(&folder).map_err(|e| BundleError::Write(e, path.clone())));
let mut file = BufWriter::new(try!(File::create(&path).map_err(|e| BundleError::Write(e, path.clone()))));
try!(file.write_all(&HEADER_STRING).map_err(|e| BundleError::Write(e, path.clone())));
try!(file.write_all(&[HEADER_VERSION]).map_err(|e| BundleError::Write(e, path.clone())));
let header = BundleInfo {
checksum: checksum,
compression: self.compression,
@ -308,14 +350,10 @@ impl BundleWriter {
encoded_size: encoded_size,
chunk_sizes: self.chunk_sizes
};
{
let mut writer = rmp_serde::Serializer::new(&mut file);
try!(header.serialize(&mut writer)
.map_err(|e| BundleError::Encode(e, path.clone())));
}
try!(msgpack::encode_to_stream(&header, &mut file)
.map_err(|e| BundleError::Encode(e, path.clone())));
let content_start = file.seek(SeekFrom::Current(0)).unwrap() as usize;
try!(file.write_all(&self.data)
.map_err(|e| BundleError::Write(e, path.clone(), "Failed to write bundle data")));
try!(file.write_all(&self.data).map_err(|e| BundleError::Write(e, path.clone())));
Ok(Bundle::new(path, HEADER_VERSION, content_start, self.crypto, header))
}
@ -402,7 +440,7 @@ impl BundleDb {
pub fn create<P: AsRef<Path>>(path: P, compression: Option<Compression>, encryption: Option<Encryption>, checksum: ChecksumType) -> Result<Self, BundleError> {
let path = path.as_ref().to_owned();
try!(fs::create_dir_all(&path)
.map_err(|e| BundleError::Write(e, path.clone(), "Failed to create folder")));
.map_err(|e| BundleError::Write(e, path.clone())));
Ok(Self::new(path, compression, encryption, checksum))
}
@ -421,7 +459,7 @@ impl BundleDb {
}
pub fn get_chunk(&mut self, bundle_id: &BundleId, id: usize) -> Result<Vec<u8>, BundleError> {
let bundle = try!(self.bundles.get(bundle_id).ok_or("Bundle not found"));
let bundle = try!(self.bundles.get(bundle_id).ok_or(BundleError::NoSuchBundle(bundle_id.clone())));
let (pos, len) = try!(bundle.get_chunk_position(id));
let mut chunk = Vec::with_capacity(len);
if let Some(data) = self.bundle_cache.get(bundle_id) {
@ -457,7 +495,7 @@ impl BundleDb {
if let Some(bundle) = self.bundles.remove(bundle) {
fs::remove_file(&bundle.path).map_err(|e| BundleError::Remove(e, bundle.id()))
} else {
Err("No such bundle".into())
Err(BundleError::NoSuchBundle(bundle.clone()))
}
}

View File

@ -1,6 +1,4 @@
use std::io::{Write, Read};
use super::errors::ChunkerError;
use std::io::{self, Write, Read};
mod ae;
mod rabin;
@ -18,6 +16,27 @@ pub use self::fastcdc::FastCdcChunker;
// https://borgbackup.readthedocs.io/en/stable/internals.html#chunks
// https://github.com/bup/bup/blob/master/lib/bup/bupsplit.c
quick_error!{
#[derive(Debug)]
pub enum ChunkerError {
Read(err: io::Error) {
from(err)
cause(err)
description("Failed to read")
}
Write(err: io::Error) {
from(err)
cause(err)
description("Failed to write")
}
Custom {
from(&'static str)
description("Custom error")
}
}
}
#[derive(Debug, Eq, PartialEq)]
pub enum ChunkerStatus {
Continue,
@ -35,6 +54,7 @@ pub enum Chunker {
FastCdc(Box<FastCdcChunker>)
}
impl IChunker for Chunker {
#[inline]
fn get_type(&self) -> ChunkerType {

View File

@ -1,72 +0,0 @@
use std::io;
use std::path::PathBuf;
use rmp_serde::decode::Error as MsgpackDecode;
use rmp_serde::encode::Error as MsgpackEncode;
use super::bundle::BundleId;
quick_error!{
#[derive(Debug)]
pub enum BundleError {
List(err: io::Error) {
cause(err)
description("Failed to list bundles")
}
Read(err: io::Error, path: PathBuf, reason: &'static str) {
cause(err)
description("Failed to read bundle")
display("Failed to read bundle {:?}: {}", path, reason)
}
Decode(err: MsgpackDecode, path: PathBuf) {
cause(err)
description("Failed to decode bundle header")
}
Write(err: io::Error, path: PathBuf, reason: &'static str) {
cause(err)
description("Failed to write bundle")
display("Failed to write bundle {:?}: {}", path, reason)
}
Encode(err: MsgpackEncode, path: PathBuf) {
cause(err)
description("Failed to encode bundle header")
}
Format(path: PathBuf, reason: &'static str) {
description("Failed to decode bundle")
display("Failed to decode bundle {:?}: {}", path, reason)
}
Integrity(bundle: BundleId, reason: &'static str) {
description("Bundle has an integrity error")
display("Bundle {:?} has an integrity error: {}", bundle, reason)
}
Remove(err: io::Error, bundle: BundleId) {
cause(err)
description("Failed to remove bundle")
display("Failed to remove bundle {}", bundle)
}
Custom {
from(&'static str)
description("Custom error")
}
}
}
quick_error!{
#[derive(Debug)]
pub enum ChunkerError {
Read(err: io::Error) {
from(err)
cause(err)
description("Failed to read")
}
Write(err: io::Error) {
from(err)
cause(err)
description("Failed to write")
}
Custom {
from(&'static str)
description("Custom error")
}
}
}

View File

@ -65,31 +65,56 @@ pub struct Index {
data: &'static mut [Entry]
}
#[derive(Debug)]
pub enum Error {
IOError(io::Error),
MapError(MapError),
NoHeader,
MagicError,
VersionError,
quick_error!{
#[derive(Debug)]
pub enum IndexError {
Io(err: io::Error) {
from()
cause(err)
description("Failed to open index file")
}
Mmap(err: MapError) {
from()
cause(err)
description("Failed to write bundle map")
}
NoHeader {
description("Index file does not contain a header")
}
WrongHeader {
description("Wrong header")
}
WrongVersion(version: u8) {
description("Wrong version")
display("Wrong version: {}", version)
}
WrongPosition(key: Hash, should: usize, is: LocateResult) {
description("Key at wrong position")
display("Key {} has wrong position, expected at: {}, but is at: {:?}", key, should, is)
}
WrongEntryCount(header: usize, actual: usize) {
description("Wrong entry count")
display("Wrong entry count, expected {}, but is {}", header, actual)
}
}
}
#[derive(Debug)]
enum LocateResult {
pub enum LocateResult {
Found(usize), // Found the key at this position
Hole(usize), // Found a hole at this position while searching for a key
Steal(usize) // Found a spot to steal at this position while searching for a key
}
impl Index {
pub fn new(path: &Path, create: bool) -> Result<Index, Error> {
let fd = try!(OpenOptions::new().read(true).write(true).create(create).open(path).map_err(|e| { Error::IOError(e) }));
pub fn new(path: &Path, create: bool) -> Result<Index, IndexError> {
let fd = try!(OpenOptions::new().read(true).write(true).create(create).open(path));
if create {
try!(Index::resize_fd(&fd, INITIAL_SIZE));
}
let mmap = try!(Index::map_fd(&fd));
if mmap.len() < mem::size_of::<Header>() {
return Err(Error::NoHeader);
return Err(IndexError::NoHeader);
}
let data = Index::mmap_as_slice(&mmap, INITIAL_SIZE as usize);
let mut index = Index{capacity: 0, max_entries: 0, min_entries: 0, entries: 0, fd: fd, mmap: mmap, data: data};
@ -105,10 +130,10 @@ impl Index {
header.capacity = INITIAL_SIZE as u64;
} else {
if header.magic != MAGIC {
return Err(Error::MagicError);
return Err(IndexError::WrongHeader);
}
if header.version != VERSION {
return Err(Error::VersionError);
return Err(IndexError::WrongVersion(header.version));
}
}
capacity = header.capacity;
@ -118,34 +143,34 @@ impl Index {
index.set_capacity(capacity as usize);
index.entries = entries as usize;
}
debug_assert!(index.is_consistent(), "Inconsistent after creation");
debug_assert!(index.check().is_ok(), "Inconsistent after creation");
Ok(index)
}
#[inline]
pub fn open(path: &Path) -> Result<Index, Error> {
pub fn open(path: &Path) -> Result<Index, IndexError> {
Index::new(path, false)
}
#[inline]
pub fn create(path: &Path) -> Result<Index, Error> {
pub fn create(path: &Path) -> Result<Index, IndexError> {
Index::new(path, true)
}
#[inline]
fn map_fd(fd: &File) -> Result<MemoryMap, Error> {
fn map_fd(fd: &File) -> Result<MemoryMap, IndexError> {
MemoryMap::new(
try!(fd.metadata().map_err(Error::IOError)).len() as usize,
try!(fd.metadata().map_err(IndexError::Io)).len() as usize,
&[MapOption::MapReadable,
MapOption::MapWritable,
MapOption::MapFd(fd.as_raw_fd()),
MapOption::MapNonStandardFlags(0x0001) //libc::consts::os::posix88::MAP_SHARED
]).map_err(|e| { Error::MapError(e) })
]).map_err(IndexError::Mmap)
}
#[inline]
fn resize_fd(fd: &File, capacity: usize) -> Result<(), Error> {
fd.set_len((mem::size_of::<Header>() + capacity * mem::size_of::<Entry>()) as u64).map_err( Error::IOError)
fn resize_fd(fd: &File, capacity: usize) -> Result<(), IndexError> {
fd.set_len((mem::size_of::<Header>() + capacity * mem::size_of::<Entry>()) as u64).map_err(IndexError::Io)
}
#[inline]
@ -172,7 +197,7 @@ impl Index {
self.max_entries = (capacity as f64 * MAX_USAGE) as usize;
}
fn reinsert(&mut self, start: usize, end: usize) -> Result<(), Error> {
fn reinsert(&mut self, start: usize, end: usize) -> Result<(), IndexError> {
for pos in start..end {
let key;
let data;
@ -191,7 +216,7 @@ impl Index {
Ok(())
}
fn shrink(&mut self) -> Result<bool, Error> {
fn shrink(&mut self) -> Result<bool, IndexError> {
if self.entries >= self.min_entries || self.capacity <= INITIAL_SIZE {
return Ok(false)
}
@ -206,7 +231,7 @@ impl Index {
Ok(true)
}
fn extend(&mut self) -> Result<bool, Error> {
fn extend(&mut self) -> Result<bool, IndexError> {
if self.entries <= self.max_entries {
return Ok(false)
}
@ -220,8 +245,7 @@ impl Index {
Ok(true)
}
#[allow(dead_code)]
pub fn is_consistent(&self) -> bool {
pub fn check(&self) -> Result<(), IndexError> {
let mut entries = 0;
for pos in 0..self.capacity {
let entry = &self.data[pos];
@ -231,30 +255,17 @@ impl Index {
entries += 1;
match self.locate(&entry.key) {
LocateResult::Found(p) if p == pos => true,
found => {
println!("Inconsistency found: Key {:?} should be at {} but is at {:?}", entry.key, pos, found);
return false
}
found => return Err(IndexError::WrongPosition(entry.key, pos, found))
};
}
if entries != self.entries {
println!("Inconsistency found: Index contains {} entries, should contain {}", entries, self.entries);
return false
}
true
}
pub fn check(&self) -> Result<(), &'static str> {
//TODO: proper errors instead of string
if self.is_consistent() {
Ok(())
} else {
Err("Inconsistent")
return Err(IndexError::WrongEntryCount(self.entries, entries));
}
Ok(())
}
#[inline]
fn increase_count(&mut self) -> Result<(), Error> {
fn increase_count(&mut self) -> Result<(), IndexError> {
self.entries += 1;
try!(self.extend());
self.write_header();
@ -262,7 +273,7 @@ impl Index {
}
#[inline]
fn decrease_count(&mut self) -> Result<(), Error> {
fn decrease_count(&mut self) -> Result<(), IndexError> {
self.entries -= 1;
try!(self.shrink());
self.write_header();
@ -328,7 +339,7 @@ impl Index {
/// Adds the key, data pair into the table.
/// If the key existed in the table before, it is overwritten and false is returned.
/// Otherwise it will be added to the table and true is returned.
pub fn set(&mut self, key: &Hash, data: &Location) -> Result<bool, Error> {
pub fn set(&mut self, key: &Hash, data: &Location) -> Result<bool, IndexError> {
match self.locate(key) {
LocateResult::Found(pos) => {
self.data[pos].data = *data;
@ -374,7 +385,7 @@ impl Index {
#[inline]
pub fn contains(&self, key: &Hash) -> bool {
debug_assert!(self.is_consistent(), "Inconsistent before get");
debug_assert!(self.check().is_ok(), "Inconsistent before get");
match self.locate(key) {
LocateResult::Found(_) => true,
_ => false
@ -383,7 +394,7 @@ impl Index {
#[inline]
pub fn get(&self, key: &Hash) -> Option<Location> {
debug_assert!(self.is_consistent(), "Inconsistent before get");
debug_assert!(self.check().is_ok(), "Inconsistent before get");
match self.locate(key) {
LocateResult::Found(pos) => Some(self.data[pos].data),
_ => None
@ -392,7 +403,7 @@ impl Index {
#[inline]
pub fn modify<F>(&mut self, key: &Hash, mut f: F) -> bool where F: FnMut(&mut Location) {
debug_assert!(self.is_consistent(), "Inconsistent before get");
debug_assert!(self.check().is_ok(), "Inconsistent before get");
match self.locate(key) {
LocateResult::Found(pos) => {
f(&mut self.data[pos].data);
@ -403,7 +414,7 @@ impl Index {
}
#[inline]
pub fn delete(&mut self, key: &Hash) -> Result<bool, Error> {
pub fn delete(&mut self, key: &Hash) -> Result<bool, IndexError> {
match self.locate(key) {
LocateResult::Found(pos) => {
self.backshift(pos);
@ -414,7 +425,7 @@ impl Index {
}
}
pub fn filter<F>(&mut self, mut f: F) -> Result<usize, Error> where F: FnMut(&Hash, &Location) -> bool {
pub fn filter<F>(&mut self, mut f: F) -> Result<usize, IndexError> where F: FnMut(&Hash, &Location) -> bool {
//TODO: is it faster to walk in reverse direction?
let mut deleted = 0;
let mut pos = 0;
@ -485,7 +496,7 @@ impl Index {
#[inline]
pub fn size(&self) -> usize {
self.mmap.len()
self.mmap.len()
}
#[inline]

View File

@ -10,7 +10,6 @@ extern crate serde_yaml;
extern crate docopt;
extern crate rustc_serialize;
mod errors;
pub mod util;
pub mod bundle;
pub mod index;

View File

@ -1,7 +1,6 @@
use super::{Repository, Chunk};
use super::{Repository, Chunk, RepositoryError};
use rmp_serde;
use serde::{Deserialize, Serialize};
use ::util::*;
use std::fs::{self, File};
use std::path::Path;
@ -39,19 +38,19 @@ serde_impl!(Backup(u8) {
impl Repository {
pub fn list_backups(&self) -> Result<Vec<String>, &'static str> {
pub fn list_backups(&self) -> Result<Vec<String>, RepositoryError> {
let mut backups = Vec::new();
let mut paths = Vec::new();
let base_path = self.path.join("backups");
paths.push(base_path.clone());
while let Some(path) = paths.pop() {
for entry in try!(fs::read_dir(path).map_err(|_| "Failed to list files")) {
let entry = try!(entry.map_err(|_| "Failed to list files"));
for entry in try!(fs::read_dir(path)) {
let entry = try!(entry);
let path = entry.path();
if path.is_dir() {
paths.push(path);
} else {
let relpath = try!(path.strip_prefix(&base_path).map_err(|_| "Failed to obtain relative path"));
let relpath = path.strip_prefix(&base_path).unwrap();
backups.push(relpath.to_string_lossy().to_string());
}
}
@ -59,25 +58,23 @@ impl Repository {
Ok(backups)
}
pub fn get_backup(&self, name: &str) -> Result<Backup, &'static str> {
let file = try!(File::open(self.path.join("backups").join(name)).map_err(|_| "Failed to load backup"));
let mut reader = rmp_serde::Deserializer::new(file);
Backup::deserialize(&mut reader).map_err(|_| "Failed to read backup data")
pub fn get_backup(&self, name: &str) -> Result<Backup, RepositoryError> {
let mut file = try!(File::open(self.path.join("backups").join(name)));
Ok(try!(msgpack::decode_from_stream(&mut file)))
}
pub fn save_backup(&mut self, backup: &Backup, name: &str) -> Result<(), &'static str> {
let mut file = try!(File::create(self.path.join("backups").join(name)).map_err(|_| "Failed to save backup"));
let mut writer = rmp_serde::Serializer::new(&mut file);
backup.serialize(&mut writer).map_err(|_| "Failed to write backup data")
pub fn save_backup(&mut self, backup: &Backup, name: &str) -> Result<(), RepositoryError> {
let mut file = try!(File::create(self.path.join("backups").join(name)));
Ok(try!(msgpack::encode_to_stream(backup, &mut file)))
}
pub fn restore_backup<P: AsRef<Path>>(&mut self, backup: &Backup, path: P) -> Result<(), &'static str> {
pub fn restore_backup<P: AsRef<Path>>(&mut self, backup: &Backup, path: P) -> Result<(), RepositoryError> {
let inode = try!(self.get_inode(&backup.root));
try!(self.save_inode_at(&inode, path));
Ok(())
}
pub fn create_full_backup<P: AsRef<Path>>(&mut self, path: P) -> Result<Backup, &'static str> {
pub fn create_full_backup<P: AsRef<Path>>(&mut self, path: P) -> Result<Backup, RepositoryError> {
// Maintain a stack of folders still todo
// Maintain a map of path->inode entries
// Work on topmost stack entry

View File

@ -1,8 +1,10 @@
use std::mem;
use std::io::{Read, Write, Cursor};
use super::{Repository, Mode};
use super::{Repository, Mode, RepositoryError};
use ::index::Location;
use ::bundle::BundleId;
use super::integrity::RepositoryIntegrityError;
use ::util::Hash;
use ::chunker::{IChunker, ChunkerStatus};
@ -12,7 +14,15 @@ pub type Chunk = (Hash, usize);
impl Repository {
pub fn get_chunk(&mut self, hash: Hash) -> Result<Option<Vec<u8>>, &'static str> {
pub fn get_bundle_id(&self, id: u32) -> Result<BundleId, RepositoryError> {
if let Some(bundle_info) = self.bundle_map.get(id) {
Ok(bundle_info.id())
} else {
Err(RepositoryIntegrityError::MissingBundleId(id).into())
}
}
pub fn get_chunk(&mut self, hash: Hash) -> Result<Option<Vec<u8>>, RepositoryError> {
// Find bundle and chunk id in index
let found = if let Some(found) = self.index.get(&hash) {
found
@ -20,20 +30,12 @@ impl Repository {
return Ok(None)
};
// Lookup bundle id from map
let bundle_id = if let Some(bundle_info) = self.bundle_map.get(found.bundle) {
bundle_info.id()
} else {
return Err("Bundle id not found in map")
};
let bundle_id = try!(self.get_bundle_id(found.bundle));
// Get chunk from bundle
if let Ok(chunk) = self.bundles.get_chunk(&bundle_id, found.chunk as usize) {
Ok(Some(chunk))
} else {
Err("Failed to load chunk from bundle")
}
Ok(Some(try!(self.bundles.get_chunk(&bundle_id, found.chunk as usize))))
}
pub fn put_chunk(&mut self, mode: Mode, hash: Hash, data: &[u8]) -> Result<(), &'static str> {
pub fn put_chunk(&mut self, mode: Mode, hash: Hash, data: &[u8]) -> Result<(), RepositoryError> {
// If this chunk is in the index, ignore it
if self.index.contains(&hash) {
return Ok(())
@ -47,7 +49,7 @@ impl Repository {
};
// ...alocate one if needed
if writer.is_none() {
*writer = Some(try!(self.bundles.create_bundle().map_err(|_| "Failed to create new bundle")));
*writer = Some(try!(self.bundles.create_bundle()));
}
debug_assert!(writer.is_some());
let chunk_id;
@ -56,7 +58,7 @@ impl Repository {
{
// Add chunk to bundle writer and determine the size of the bundle
let writer_obj = writer.as_mut().unwrap();
chunk_id = try!(writer_obj.add(data).map_err(|_| "Failed to write chunk"));
chunk_id = try!(writer_obj.add(data));
size = writer_obj.size();
raw_size = writer_obj.raw_size();
}
@ -68,7 +70,7 @@ impl Repository {
if size >= self.config.bundle_size || raw_size >= 4 * self.config.bundle_size {
let mut finished = None;
mem::swap(writer, &mut finished);
let bundle = try!(self.bundles.add_bundle(finished.unwrap()).map_err(|_| "Failed to write finished bundle"));
let bundle = try!(self.bundles.add_bundle(finished.unwrap()));
self.bundle_map.set(bundle_id, bundle);
if self.next_meta_bundle == bundle_id {
self.next_meta_bundle = next_free_bundle_id
@ -79,27 +81,27 @@ impl Repository {
// Not saving the bundle map, this will be done by flush
}
// Add location to the index
try!(self.index.set(&hash, &Location::new(bundle_id, chunk_id as u32)).map_err(|_| "Failed to add chunk location to index"));
try!(self.index.set(&hash, &Location::new(bundle_id, chunk_id as u32)));
Ok(())
}
#[inline]
pub fn put_data(&mut self, mode: Mode, data: &[u8]) -> Result<Vec<Chunk>, &'static str> {
pub fn put_data(&mut self, mode: Mode, data: &[u8]) -> Result<Vec<Chunk>, RepositoryError> {
let mut input = Cursor::new(data);
self.put_stream(mode, &mut input)
}
pub fn put_stream<R: Read>(&mut self, mode: Mode, data: &mut R) -> Result<Vec<Chunk>, &'static str> {
pub fn put_stream<R: Read>(&mut self, mode: Mode, data: &mut R) -> Result<Vec<Chunk>, RepositoryError> {
let avg_size = self.config.chunker.avg_size();
let mut chunks = Vec::new();
let mut chunk = Vec::with_capacity(avg_size * 2);
loop {
chunk.clear();
let mut output = Cursor::new(chunk);
let res = try!(self.chunker.chunk(data, &mut output).map_err(|_| "Failed to chunk"));
let res = try!(self.chunker.chunk(data, &mut output));
chunk = output.into_inner();
let hash = self.config.hash.hash(&chunk);
try!(self.put_chunk(mode, hash, &chunk).map_err(|_| "Failed to store chunk"));
try!(self.put_chunk(mode, hash, &chunk));
chunks.push((hash, chunk.len()));
if res == ChunkerStatus::Finished {
break
@ -109,18 +111,18 @@ impl Repository {
}
#[inline]
pub fn get_data(&mut self, chunks: &[Chunk]) -> Result<Vec<u8>, &'static str> {
pub fn get_data(&mut self, chunks: &[Chunk]) -> Result<Vec<u8>, RepositoryError> {
let mut data = Vec::with_capacity(chunks.iter().map(|&(_, size)| size).sum());
try!(self.get_stream(chunks, &mut data));
Ok(data)
}
#[inline]
pub fn get_stream<W: Write>(&mut self, chunks: &[Chunk], w: &mut W) -> Result<(), &'static str> {
pub fn get_stream<W: Write>(&mut self, chunks: &[Chunk], w: &mut W) -> Result<(), RepositoryError> {
for &(ref hash, len) in chunks {
let data = try!(try!(self.get_chunk(*hash).map_err(|_| "Failed to load chunk")).ok_or("Chunk missing"));
let data = try!(try!(self.get_chunk(*hash)).ok_or_else(|| RepositoryIntegrityError::MissingChunk(hash.clone())));
debug_assert_eq!(data.len(), len);
try!(w.write_all(&data).map_err(|_| "Failed to write to sink"));
try!(w.write_all(&data));
}
Ok(())
}

View File

@ -1,19 +1,45 @@
use std::collections::HashMap;
use std::path::Path;
use std::io::{BufReader, Read, Write, BufWriter};
use std::io::{self, BufReader, Read, Write, BufWriter};
use std::fs::File;
use rmp_serde;
use serde::Deserialize;
use serde::Serialize;
use ::bundle::{Bundle, BundleId, BundleInfo};
use ::util::*;
static HEADER_STRING: [u8; 7] = *b"zbunmap";
static HEADER_VERSION: u8 = 1;
quick_error!{
#[derive(Debug)]
pub enum BundleMapError {
Io(err: io::Error) {
from()
cause(err)
description("Failed to read/write bundle map")
}
Decode(err: msgpack::DecodeError) {
from()
cause(err)
description("Failed to decode bundle map")
}
Encode(err: msgpack::EncodeError) {
from()
cause(err)
description("Failed to encode bundle map")
}
WrongHeader {
description("Wrong header")
}
WrongVersion(version: u8) {
description("Wrong version")
display("Wrong version: {}", version)
}
}
}
#[derive(Default)]
pub struct BundleData {
pub info: BundleInfo
@ -37,36 +63,26 @@ impl BundleMap {
BundleMap(Default::default())
}
pub fn load<P: AsRef<Path>>(path: P) -> Result<Self, &'static str> {
let mut file = BufReader::new(try!(File::open(path.as_ref())
.map_err(|_| "Failed to open bundle map file")));
pub fn load<P: AsRef<Path>>(path: P) -> Result<Self, BundleMapError> {
let mut file = BufReader::new(try!(File::open(path.as_ref())));
let mut header = [0u8; 8];
try!(file.read_exact(&mut header)
.map_err(|_| "Failed to read bundle map header"));
try!(file.read_exact(&mut header));
if header[..HEADER_STRING.len()] != HEADER_STRING {
return Err("Wrong header string")
return Err(BundleMapError::WrongHeader)
}
let version = header[HEADER_STRING.len()];
if version != HEADER_VERSION {
return Err("Unsupported bundle map file version")
return Err(BundleMapError::WrongVersion(version))
}
let mut reader = rmp_serde::Deserializer::new(file);
let map = try!(HashMap::deserialize(&mut reader)
.map_err(|_| "Failed to read bundle map data"));
Ok(BundleMap(map))
Ok(BundleMap(try!(msgpack::decode_from_stream(&mut file))))
}
pub fn save<P: AsRef<Path>>(&self, path: P) -> Result<(), &'static str> {
let mut file = BufWriter::new(try!(File::create(path)
.map_err(|_| "Failed to create bundle file")));
try!(file.write_all(&HEADER_STRING)
.map_err(|_| "Failed to write bundle header"));
try!(file.write_all(&[HEADER_VERSION])
.map_err(|_| "Failed to write bundle header"));
let mut writer = rmp_serde::Serializer::new(&mut file);
self.0.serialize(&mut writer)
.map_err(|_| "Failed to write bundle map data")
pub fn save<P: AsRef<Path>>(&self, path: P) -> Result<(), BundleMapError> {
let mut file = BufWriter::new(try!(File::create(path)));
try!(file.write_all(&HEADER_STRING));
try!(file.write_all(&[HEADER_VERSION]));
msgpack::encode_to_stream(&self.0, &mut file).map_err(BundleMapError::Encode)
}
#[inline]

View File

@ -2,14 +2,36 @@ use serde_yaml;
use std::fs::File;
use std::path::Path;
use std::io;
use ::util::*;
use ::chunker::ChunkerType;
quick_error!{
#[derive(Debug)]
pub enum ConfigError {
Io(err: io::Error) {
from()
cause(err)
}
Parse(reason: &'static str) {
from()
description("Failed to parse config")
display("Failed to parse config: {}", reason)
}
Yaml(err: serde_yaml::Error) {
from()
cause(err)
description("Yaml format error")
}
}
}
impl HashMethod {
fn from_yaml(yaml: String) -> Result<Self, &'static str> {
HashMethod::from(&yaml)
fn from_yaml(yaml: String) -> Result<Self, ConfigError> {
HashMethod::from(&yaml).map_err(ConfigError::Parse)
}
fn to_yaml(&self) -> String {
@ -20,8 +42,8 @@ impl HashMethod {
impl ChecksumType {
fn from_yaml(yaml: String) -> Result<Self, &'static str> {
ChecksumType::from(&yaml)
fn from_yaml(yaml: String) -> Result<Self, ConfigError> {
ChecksumType::from(&yaml).map_err(ConfigError::Parse)
}
fn to_yaml(&self) -> String {
@ -52,8 +74,8 @@ serde_impl!(ChunkerYaml(String) {
});
impl ChunkerType {
fn from_yaml(yaml: ChunkerYaml) -> Result<Self, &'static str> {
ChunkerType::from(&yaml.method, yaml.avg_size, yaml.seed)
fn from_yaml(yaml: ChunkerYaml) -> Result<Self, ConfigError> {
ChunkerType::from(&yaml.method, yaml.avg_size, yaml.seed).map_err(ConfigError::Parse)
}
fn to_yaml(&self) -> ChunkerYaml {
@ -69,8 +91,8 @@ impl ChunkerType {
impl Compression {
#[inline]
fn from_yaml(yaml: String) -> Result<Self, &'static str> {
Compression::from_string(&yaml)
fn from_yaml(yaml: String) -> Result<Self, ConfigError> {
Compression::from_string(&yaml).map_err(|_| ConfigError::Parse("Invalid codec"))
}
#[inline]
@ -118,7 +140,7 @@ pub struct Config {
pub hash: HashMethod
}
impl Config {
fn from_yaml(yaml: ConfigYaml) -> Result<Self, &'static str> {
fn from_yaml(yaml: ConfigYaml) -> Result<Self, ConfigError> {
let compression = if let Some(c) = yaml.compression {
Some(try!(Compression::from_yaml(c)))
} else {
@ -143,15 +165,15 @@ impl Config {
}
}
pub fn load<P: AsRef<Path>>(path: P) -> Result<Self, &'static str> {
let f = try!(File::open(path).map_err(|_| "Failed to open config"));
let config = try!(serde_yaml::from_reader(f).map_err(|_| "Failed to parse config"));
pub fn load<P: AsRef<Path>>(path: P) -> Result<Self, ConfigError> {
let f = try!(File::open(path));
let config = try!(serde_yaml::from_reader(f));
Config::from_yaml(config)
}
pub fn save<P: AsRef<Path>>(&self, path: P) -> Result<(), &'static str> {
let mut f = try!(File::create(path).map_err(|_| "Failed to open config"));
try!(serde_yaml::to_writer(&mut f, &self.to_yaml()).map_err(|_| "Failed to wrtie config"));
pub fn save<P: AsRef<Path>>(&self, path: P) -> Result<(), ConfigError> {
let mut f = try!(File::create(path));
try!(serde_yaml::to_writer(&mut f, &self.to_yaml()));
Ok(())
}
}

66
src/repository/error.rs Normal file
View File

@ -0,0 +1,66 @@
use std::io;
use std::path::PathBuf;
use super::bundle_map::BundleMapError;
use super::config::ConfigError;
use super::integrity::RepositoryIntegrityError;
use ::index::IndexError;
use ::bundle::BundleError;
use ::chunker::ChunkerError;
use ::util::*;
quick_error!{
#[derive(Debug)]
pub enum RepositoryError {
Io(err: io::Error) {
from()
cause(err)
description("IO Error")
}
Config(err: ConfigError) {
from()
cause(err)
description("Configuration error")
}
BundleMap(err: BundleMapError) {
from()
cause(err)
description("Bundle map error")
}
Index(err: IndexError) {
from()
cause(err)
description("Index error")
}
Bundle(err: BundleError) {
from()
cause(err)
description("Bundle error")
}
Chunker(err: ChunkerError) {
from()
cause(err)
description("Chunker error")
}
Decode(err: msgpack::DecodeError) {
from()
cause(err)
description("Failed to decode metadata")
}
Encode(err: msgpack::EncodeError) {
from()
cause(err)
description("Failed to encode metadata")
}
Integrity(err: RepositoryIntegrityError) {
from()
cause(err)
description("Integrity error")
}
InvalidFileType(path: PathBuf) {
description("Invalid file type")
display("{:?} has an invalid file type", path)
}
}
}

View File

@ -1,41 +1,66 @@
use super::Repository;
use super::{Repository, RepositoryError};
use ::bundle::BundleId;
use ::util::Hash;
quick_error!{
#[derive(Debug)]
pub enum RepositoryIntegrityError {
MissingChunk(hash: Hash) {
description("Missing chunk")
display("Missing chunk: {}", hash)
}
MissingBundleId(id: u32) {
description("Missing bundle")
display("Missing bundle: {}", id)
}
MissingBundle(id: BundleId) {
description("Missing bundle")
display("Missing bundle: {}", id)
}
NoSuchChunk(bundle: BundleId, chunk: u32) {
description("No such chunk")
display("Bundle {} does not conain the chunk {}", bundle, chunk)
}
InvalidNextBundleId {
description("Invalid next bundle id")
}
SymlinkWithoutTarget {
description("Symlink without target")
}
}
}
impl Repository {
fn check_chunk(&self, hash: Hash) -> Result<(), &'static str> {
fn check_chunk(&self, hash: Hash) -> Result<(), RepositoryError> {
// Find bundle and chunk id in index
let found = if let Some(found) = self.index.get(&hash) {
found
} else {
return Err("Chunk not in index");
return Err(RepositoryIntegrityError::MissingChunk(hash).into());
};
// Lookup bundle id from map
let bundle_id = if let Some(bundle_info) = self.bundle_map.get(found.bundle) {
bundle_info.id()
} else {
return Err("Bundle id not found in map")
};
let bundle_id = try!(self.get_bundle_id(found.bundle));
// Get bundle object from bundledb
let bundle = if let Some(bundle) = self.bundles.get_bundle(&bundle_id) {
bundle
} else {
return Err("Bundle not found in bundledb")
return Err(RepositoryIntegrityError::MissingBundle(bundle_id.clone()).into())
};
// Get chunk from bundle
if bundle.info.chunk_count > found.chunk as usize {
Ok(())
} else {
Err("Bundle does not contain that chunk")
Err(RepositoryIntegrityError::NoSuchChunk(bundle_id.clone(), found.chunk).into())
}
//TODO: check that contents match their hash
}
pub fn check(&mut self, full: bool) -> Result<(), &'static str> {
pub fn check(&mut self, full: bool) -> Result<(), RepositoryError> {
try!(self.flush());
try!(self.bundles.check(full).map_err(|_| "Bundles inconsistent"));
try!(self.index.check().map_err(|_| "Index inconsistent"));
try!(self.bundles.check(full));
try!(self.index.check());
let mut pos = 0;
loop {
pos = if let Some(pos) = self.index.next_entry(pos) {
@ -48,13 +73,13 @@ impl Repository {
pos += 1;
}
if self.next_content_bundle == self.next_meta_bundle {
return Err("Next bundle ids for meta and content as the same")
return Err(RepositoryIntegrityError::InvalidNextBundleId.into())
}
if self.bundle_map.get(self.next_content_bundle).is_some() {
return Err("Bundle map already contains next bundle bundle id")
return Err(RepositoryIntegrityError::InvalidNextBundleId.into())
}
if self.bundle_map.get(self.next_meta_bundle).is_some() {
return Err("Bundle map already contains next meta bundle id")
return Err(RepositoryIntegrityError::InvalidNextBundleId.into())
}
Ok(())
}

View File

@ -1,15 +1,13 @@
use serde::bytes::ByteBuf;
use serde::{Serialize, Deserialize};
use rmp_serde;
use std::collections::HashMap;
use std::path::Path;
use std::fs::{self, Metadata, File, Permissions};
use std::os::linux::fs::MetadataExt;
use std::os::unix::fs::{PermissionsExt, symlink};
use std::io::{Cursor, Read, Write};
use std::io::{Read, Write};
use super::{Repository, Mode, Chunk};
use ::util::*;
use super::{Repository, RepositoryError, Mode, Chunk};
use super::integrity::RepositoryIntegrityError;
#[derive(Debug, Eq, PartialEq)]
@ -27,13 +25,14 @@ serde_impl!(FileType(u8) {
#[derive(Debug)]
pub enum FileContents {
Inline(ByteBuf),
Chunked(Vec<Chunk>)
//TODO: ChunkedIndirect
Inline(msgpack::Bytes),
ChunkedDirect(Vec<Chunk>),
ChunkedIndirect(Vec<Chunk>)
}
serde_impl!(FileContents(u8) {
Inline(ByteBuf) => 0,
Chunked(Vec<Chunk>) => 1
ChunkedDirect(Vec<Chunk>) => 1,
ChunkedIndirect(Vec<Chunk>) => 2
});
@ -86,7 +85,7 @@ serde_impl!(Inode(u8) {
});
impl Inode {
fn get_extended_attrs_from(&mut self, meta: &Metadata) -> Result<(), &'static str> {
fn get_extended_attrs_from(&mut self, meta: &Metadata) -> Result<(), RepositoryError> {
self.mode = meta.st_mode();
self.user = meta.st_uid();
self.group = meta.st_gid();
@ -96,9 +95,11 @@ impl Inode {
Ok(())
}
pub fn get_from<P: AsRef<Path>>(path: P) -> Result<Self, &'static str> {
let name = try!(path.as_ref().file_name().ok_or("Not a file")).to_string_lossy().to_string();
let meta = try!(fs::symlink_metadata(path.as_ref()).map_err(|_| "Failed to get metadata"));
pub fn get_from<P: AsRef<Path>>(path: P) -> Result<Self, RepositoryError> {
let name = try!(path.as_ref().file_name()
.ok_or_else(|| RepositoryError::InvalidFileType(path.as_ref().to_owned())))
.to_string_lossy().to_string();
let meta = try!(fs::symlink_metadata(path.as_ref()));
let mut inode = Inode::default();
inode.name = name;
inode.size = meta.len();
@ -109,36 +110,35 @@ impl Inode {
} else if meta.file_type().is_symlink() {
FileType::Symlink
} else {
return Err("Unsupported file type");
return Err(RepositoryError::InvalidFileType(path.as_ref().to_owned()));
};
if meta.file_type().is_symlink() {
inode.symlink_target = Some(try!(fs::read_link(path).map_err(|_| "Failed to read symlink")).to_string_lossy().to_string());
inode.symlink_target = Some(try!(fs::read_link(path)).to_string_lossy().to_string());
}
try!(inode.get_extended_attrs_from(&meta));
Ok(inode)
}
#[allow(dead_code)]
pub fn create_at<P: AsRef<Path>>(&self, path: P) -> Result<Option<File>, &'static str> {
pub fn create_at<P: AsRef<Path>>(&self, path: P) -> Result<Option<File>, RepositoryError> {
let full_path = path.as_ref().join(&self.name);
let mut file = None;
match self.file_type {
FileType::File => {
file = Some(try!(File::create(&full_path).map_err(|_| "Failed to create file")));
file = Some(try!(File::create(&full_path)));
},
FileType::Directory => {
try!(fs::create_dir(&full_path).map_err(|_| "Failed to create directory"));
try!(fs::create_dir(&full_path));
},
FileType::Symlink => {
if let Some(ref src) = self.symlink_target {
try!(symlink(src, &full_path).map_err(|_| "Failed to create symlink"));
try!(symlink(src, &full_path));
} else {
return Err("Symlink without destination")
return Err(RepositoryIntegrityError::SymlinkWithoutTarget.into())
}
}
}
try!(fs::set_permissions(&full_path, Permissions::from_mode(self.mode)).map_err(|_| "Failed to set permissions"));
try!(fs::set_permissions(&full_path, Permissions::from_mode(self.mode)));
//FIXME: set times and gid/uid
// https://crates.io/crates/filetime
Ok(file)
@ -147,44 +147,48 @@ impl Inode {
impl Repository {
pub fn put_inode<P: AsRef<Path>>(&mut self, path: P) -> Result<Vec<Chunk>, &'static str> {
pub fn put_inode<P: AsRef<Path>>(&mut self, path: P) -> Result<Vec<Chunk>, RepositoryError> {
let mut inode = try!(Inode::get_from(path.as_ref()));
if inode.file_type == FileType::File && inode.size > 0 {
let mut file = try!(File::open(path).map_err(|_| "Failed to open file"));
let mut file = try!(File::open(path));
if inode.size < 100 {
let mut data = Vec::with_capacity(inode.size as usize);
try!(file.read_to_end(&mut data).map_err(|_| "Failed to read file contents"));
try!(file.read_to_end(&mut data));
inode.contents = Some(FileContents::Inline(data.into()));
} else {
let chunks = try!(self.put_stream(Mode::Content, &mut file));
inode.contents = Some(FileContents::Chunked(chunks));
let mut chunks = try!(self.put_stream(Mode::Content, &mut file));
if chunks.len() < 10 {
inode.contents = Some(FileContents::ChunkedDirect(chunks));
} else {
let chunks_data = try!(msgpack::encode(&chunks));
chunks = try!(self.put_data(Mode::Meta, &chunks_data));
inode.contents = Some(FileContents::ChunkedIndirect(chunks));
}
}
}
let mut inode_data = Vec::new();
{
let mut writer = rmp_serde::Serializer::new(&mut inode_data);
try!(inode.serialize(&mut writer).map_err(|_| "Failed to write inode data"));
}
self.put_data(Mode::Meta, &inode_data)
self.put_data(Mode::Meta, &try!(msgpack::encode(&inode)))
}
#[inline]
pub fn get_inode(&mut self, chunks: &[Chunk]) -> Result<Inode, &'static str> {
let data = Cursor::new(try!(self.get_data(chunks)));
let mut reader = rmp_serde::Deserializer::new(data);
Inode::deserialize(&mut reader).map_err(|_| "Failed to read inode data")
pub fn get_inode(&mut self, chunks: &[Chunk]) -> Result<Inode, RepositoryError> {
Ok(try!(msgpack::decode(&try!(self.get_data(chunks)))))
}
#[inline]
pub fn save_inode_at<P: AsRef<Path>>(&mut self, inode: &Inode, path: P) -> Result<(), &'static str> {
pub fn save_inode_at<P: AsRef<Path>>(&mut self, inode: &Inode, path: P) -> Result<(), RepositoryError> {
if let Some(mut file) = try!(inode.create_at(path.as_ref())) {
if let Some(ref contents) = inode.contents {
match *contents {
FileContents::Inline(ref data) => {
try!(file.write_all(&data).map_err(|_| "Failed to write data to file"));
try!(file.write_all(&data));
},
FileContents::Chunked(ref chunks) => {
FileContents::ChunkedDirect(ref chunks) => {
try!(self.get_stream(chunks, &mut file));
},
FileContents::ChunkedIndirect(ref chunks) => {
let chunk_data = try!(self.get_data(chunks));
let chunks: Vec<Chunk> = try!(msgpack::decode(&chunk_data));
try!(self.get_stream(&chunks, &mut file));
}
}
}

View File

@ -5,6 +5,7 @@ mod basic_io;
mod info;
mod metadata;
mod backup;
mod error;
use std::mem;
use std::cmp::max;
@ -15,6 +16,7 @@ use super::index::Index;
use super::bundle::{BundleDb, BundleWriter};
use super::chunker::Chunker;
pub use self::error::RepositoryError;
pub use self::config::Config;
pub use self::metadata::Inode;
pub use self::basic_io::Chunk;
@ -42,20 +44,20 @@ pub struct Repository {
impl Repository {
pub fn create<P: AsRef<Path>>(path: P, config: Config) -> Result<Self, &'static str> {
pub fn create<P: AsRef<Path>>(path: P, config: Config) -> Result<Self, RepositoryError> {
let path = path.as_ref().to_owned();
try!(fs::create_dir(&path).map_err(|_| "Failed to create repository directory"));
try!(fs::create_dir(&path));
let bundles = try!(BundleDb::create(
path.join("bundles"),
config.compression.clone(),
None, //FIXME: store encryption in config
config.checksum
).map_err(|_| "Failed to create bundle db"));
let index = try!(Index::create(&path.join("index")).map_err(|_| "Failed to create index"));
try!(config.save(path.join("config.yaml")).map_err(|_| "Failed to save config"));
));
let index = try!(Index::create(&path.join("index")));
try!(config.save(path.join("config.yaml")));
let bundle_map = BundleMap::create();
try!(bundle_map.save(path.join("bundles.map")).map_err(|_| "Failed to save bundle map"));
try!(fs::create_dir(&path.join("backups")).map_err(|_| "Failed to create backup directory"));
try!(bundle_map.save(path.join("bundles.map")));
try!(fs::create_dir(&path.join("backups")));
Ok(Repository{
path: path,
chunker: config.chunker.create(),
@ -70,17 +72,17 @@ impl Repository {
})
}
pub fn open<P: AsRef<Path>>(path: P) -> Result<Self, &'static str> {
pub fn open<P: AsRef<Path>>(path: P) -> Result<Self, RepositoryError> {
let path = path.as_ref().to_owned();
let config = try!(Config::load(path.join("config.yaml")).map_err(|_| "Failed to load config"));
let config = try!(Config::load(path.join("config.yaml")));
let bundles = try!(BundleDb::open(
path.join("bundles"),
config.compression.clone(),
None, //FIXME: load encryption from config
config.checksum
).map_err(|_| "Failed to open bundle db"));
let index = try!(Index::open(&path.join("index")).map_err(|_| "Failed to open index"));
let bundle_map = try!(BundleMap::load(path.join("bundles.map")).map_err(|_| "Failed to load bundle map"));
));
let index = try!(Index::open(&path.join("index")));
let bundle_map = try!(BundleMap::load(path.join("bundles.map")));
let mut repo = Repository {
path: path,
chunker: config.chunker.create(),
@ -99,8 +101,9 @@ impl Repository {
}
#[inline]
fn save_bundle_map(&self) -> Result<(), &'static str> {
self.bundle_map.save(self.path.join("bundles.map"))
fn save_bundle_map(&self) -> Result<(), RepositoryError> {
try!(self.bundle_map.save(self.path.join("bundles.map")));
Ok(())
}
#[inline]
@ -112,12 +115,12 @@ impl Repository {
id
}
pub fn flush(&mut self) -> Result<(), &'static str> {
pub fn flush(&mut self) -> Result<(), RepositoryError> {
if self.content_bundle.is_some() {
let mut finished = None;
mem::swap(&mut self.content_bundle, &mut finished);
{
let bundle = try!(self.bundles.add_bundle(finished.unwrap()).map_err(|_| "Failed to write finished bundle"));
let bundle = try!(self.bundles.add_bundle(finished.unwrap()));
self.bundle_map.set(self.next_content_bundle, bundle);
}
self.next_content_bundle = self.next_free_bundle_id()
@ -126,12 +129,12 @@ impl Repository {
let mut finished = None;
mem::swap(&mut self.meta_bundle, &mut finished);
{
let bundle = try!(self.bundles.add_bundle(finished.unwrap()).map_err(|_| "Failed to write finished bundle"));
let bundle = try!(self.bundles.add_bundle(finished.unwrap()));
self.bundle_map.set(self.next_meta_bundle, bundle);
}
self.next_meta_bundle = self.next_free_bundle_id()
}
try!(self.save_bundle_map().map_err(|_| "Failed to save bundle map"));
try!(self.save_bundle_map());
Ok(())
}
}

View File

@ -1,11 +1,40 @@
use std::ptr;
use std::ffi::{CStr, CString};
use std::io::Write;
use std::io::{self, Write};
use std::str::FromStr;
use squash::*;
quick_error!{
#[derive(Debug)]
pub enum CompressionError {
UnsupportedCodec(name: String) {
description("Unsupported codec")
display("Unsupported codec: {}", name)
}
InitializeCodec {
description("Failed to initialize codec")
}
InitializeOptions {
description("Failed to set codec options")
}
InitializeStream {
description("Failed to create stream")
}
Operation(reason: &'static str) {
description("Operation failed")
display("Operation failed: {}", reason)
}
Output(err: io::Error) {
from()
cause(err)
description("Failed to write to output")
}
}
}
#[derive(Clone, Debug)]
pub enum Compression {
Snappy(()),
@ -34,9 +63,9 @@ impl Compression {
}
#[inline]
pub fn from_string(name: &str) -> Result<Self, &'static str> {
pub fn from_string(name: &str) -> Result<Self, CompressionError> {
let (name, level) = if let Some(pos) = name.find('/') {
let level = try!(u8::from_str(&name[pos+1..]).map_err(|_| "Level must be a number"));
let level = try!(u8::from_str(&name[pos+1..]).map_err(|_| CompressionError::UnsupportedCodec(name.to_string())));
let name = &name[..pos];
(name, level)
} else {
@ -48,7 +77,7 @@ impl Compression {
"deflate" | "zlib" | "gzip" => Ok(Compression::Deflate(level)),
"brotli" => Ok(Compression::Brotli(level)),
"lzma2" => Ok(Compression::Lzma2(level)),
_ => Err("Unsupported codec")
_ => Err(CompressionError::UnsupportedCodec(name.to_string()))
}
}
@ -64,11 +93,11 @@ impl Compression {
}
#[inline]
fn codec(&self) -> Result<*mut SquashCodec, &'static str> {
fn codec(&self) -> Result<*mut SquashCodec, CompressionError> {
let name = CString::new(self.name().as_bytes()).unwrap();
let codec = unsafe { squash_get_codec(name.as_ptr()) };
if codec.is_null() {
return Err("Unsupported algorithm")
return Err(CompressionError::InitializeCodec)
}
Ok(codec)
}
@ -84,12 +113,12 @@ impl Compression {
}
}
fn options(&self) -> Result<*mut SquashOptions, &'static str> {
fn options(&self) -> Result<*mut SquashOptions, CompressionError> {
let codec = try!(self.codec());
let options = unsafe { squash_options_new(codec, ptr::null::<()>()) };
if let Some(level) = self.level() {
if options.is_null() {
return Err("Algorithm does not support a level")
return Err(CompressionError::InitializeOptions)
}
let option = CString::new("level");
let value = CString::new(format!("{}", level));
@ -100,18 +129,18 @@ impl Compression {
)};
if res != SQUASH_OK {
//panic!(unsafe { CStr::from_ptr(squash_status_to_string(res)).to_str().unwrap() });
return Err("Failed to set compression level")
return Err(CompressionError::InitializeOptions)
}
}
Ok(options)
}
#[inline]
fn error(code: SquashStatus) -> &'static str {
unsafe { CStr::from_ptr(squash_status_to_string(code)).to_str().unwrap() }
fn error(code: SquashStatus) -> CompressionError {
CompressionError::Operation(unsafe { CStr::from_ptr(squash_status_to_string(code)).to_str().unwrap() })
}
pub fn compress(&self, data: &[u8]) -> Result<Vec<u8>, &'static str> {
pub fn compress(&self, data: &[u8]) -> Result<Vec<u8>, CompressionError> {
let codec = try!(self.codec());
let options = try!(self.options());
let mut size = data.len() * 2 + 500;
@ -138,7 +167,7 @@ impl Compression {
Ok(buf)
}
pub fn decompress(&self, data: &[u8]) -> Result<Vec<u8>, &'static str> {
pub fn decompress(&self, data: &[u8]) -> Result<Vec<u8>, CompressionError> {
let codec = try!(self.codec());
let mut size = unsafe { squash_codec_get_uncompressed_size(
codec,
@ -165,26 +194,26 @@ impl Compression {
}
#[inline]
pub fn compress_stream(&self) -> Result<CompressionStream, &'static str> {
pub fn compress_stream(&self) -> Result<CompressionStream, CompressionError> {
let codec = try!(self.codec());
let options = try!(self.options());
let stream = unsafe { squash_stream_new_with_options(
codec, SQUASH_STREAM_COMPRESS, options
) };
if stream.is_null() {
return Err("Failed to create stream");
return Err(CompressionError::InitializeStream);
}
Ok(CompressionStream::new(unsafe { Box::from_raw(stream) }))
}
#[inline]
pub fn decompress_stream(&self) -> Result<CompressionStream, &'static str> {
pub fn decompress_stream(&self) -> Result<CompressionStream, CompressionError> {
let codec = try!(self.codec());
let stream = unsafe { squash_stream_new(
codec, SQUASH_STREAM_DECOMPRESS, ptr::null::<()>()
) };
if stream.is_null() {
return Err("Failed to create stream");
return Err(CompressionError::InitializeStream);
}
Ok(CompressionStream::new(unsafe { Box::from_raw(stream) }))
}
@ -205,7 +234,7 @@ impl CompressionStream {
}
}
pub fn process<W: Write>(&mut self, input: &[u8], output: &mut W) -> Result<(), &'static str> {
pub fn process<W: Write>(&mut self, input: &[u8], output: &mut W) -> Result<(), CompressionError> {
let mut stream = &mut *self.stream;
stream.next_in = input.as_ptr();
stream.avail_in = input.len();
@ -217,7 +246,7 @@ impl CompressionStream {
return Err(Compression::error(res))
}
let output_size = self.buffer.len() - stream.avail_out;
try!(output.write_all(&self.buffer[..output_size]).map_err(|_| "Failed to write to output"));
try!(output.write_all(&self.buffer[..output_size]));
if res != SQUASH_PROCESSING {
break
}
@ -225,7 +254,7 @@ impl CompressionStream {
Ok(())
}
pub fn finish<W: Write>(mut self, output: &mut W) -> Result<(), &'static str> {
pub fn finish<W: Write>(mut self, output: &mut W) -> Result<(), CompressionError> {
let mut stream = &mut *self.stream;
loop {
stream.next_out = self.buffer.as_mut_ptr();
@ -235,7 +264,7 @@ impl CompressionStream {
return Err(Compression::error(res))
}
let output_size = self.buffer.len() - stream.avail_out;
try!(output.write_all(&self.buffer[..output_size]).map_err(|_| "Failed to write to output"));
try!(output.write_all(&self.buffer[..output_size]));
if res != SQUASH_PROCESSING {
break
}

View File

@ -1,5 +1,16 @@
use std::collections::HashMap;
quick_error!{
#[derive(Debug)]
pub enum EncryptionError {
Operation(reason: &'static str) {
description("Operation failed")
display("Operation failed: {}", reason)
}
}
}
#[derive(Clone)]
pub enum EncryptionMethod {
Dummy
@ -36,12 +47,12 @@ impl Crypto {
}
#[inline]
pub fn encrypt(&self, _enc: Encryption, _data: &[u8]) -> Result<Vec<u8>, &'static str> {
pub fn encrypt(&self, _enc: Encryption, _data: &[u8]) -> Result<Vec<u8>, EncryptionError> {
unimplemented!()
}
#[inline]
pub fn decrypt(&self, _enc: Encryption, _data: &[u8]) -> Result<Vec<u8>, &'static str> {
pub fn decrypt(&self, _enc: Encryption, _data: &[u8]) -> Result<Vec<u8>, EncryptionError> {
unimplemented!()
}
}

View File

@ -3,6 +3,7 @@ mod compression;
mod encryption;
mod hash;
mod lru_cache;
pub mod msgpack;
pub use self::checksum::*;
pub use self::compression::*;
@ -10,6 +11,7 @@ pub use self::encryption::*;
pub use self::hash::*;
pub use self::lru_cache::*;
pub fn to_file_size(size: u64) -> String {
let mut size = size as f32;
if size >= 512.0 {

34
src/util/msgpack.rs Normal file
View File

@ -0,0 +1,34 @@
use rmp_serde;
use serde::{Serialize, Deserialize};
use std::io::{Write, Read, Cursor};
pub use serde::bytes::ByteBuf as Bytes;
pub use rmp_serde::decode::Error as DecodeError;
pub use rmp_serde::encode::Error as EncodeError;
pub fn encode<T: Serialize>(t: &T) -> Result<Vec<u8>, EncodeError> {
let mut data = Vec::new();
{
let mut writer = rmp_serde::Serializer::new(&mut data);
try!(t.serialize(&mut writer));
}
Ok(data)
}
pub fn encode_to_stream<T: Serialize>(t: T, w: &mut Write) -> Result<(), EncodeError> {
let mut writer = rmp_serde::Serializer::new(w);
t.serialize(&mut writer)
}
pub fn decode<T: Deserialize>(data: &[u8]) -> Result<T, DecodeError> {
let data = Cursor::new(data);
let mut reader = rmp_serde::Deserializer::new(data);
T::deserialize(&mut reader)
}
pub fn decode_from_stream<T: Deserialize>(r: &mut Read) -> Result<T, DecodeError> {
let mut reader = rmp_serde::Deserializer::new(r);
T::deserialize(&mut reader)
}