zvault/src/bundledb/writer.rs

148 lines
5.6 KiB
Rust
Raw Normal View History

2017-03-21 10:28:11 +00:00
use ::prelude::*;
use super::*;
2017-03-22 08:19:16 +00:00
use std::path::{Path, PathBuf};
2017-03-21 14:38:42 +00:00
use std::fs::File;
2017-03-22 08:19:16 +00:00
use std::io::{self, Write, BufWriter};
2017-03-21 10:08:01 +00:00
use std::sync::{Arc, Mutex};
2017-04-14 20:44:40 +00:00
use chrono::prelude::*;
2017-03-21 10:08:01 +00:00
2017-03-22 08:19:16 +00:00
quick_error!{
#[derive(Debug)]
pub enum BundleWriterError {
CompressionSetup(err: CompressionError) {
cause(err)
description("Failed to setup compression")
display("Bundle writer error: failed to setup compression\n\tcaused by: {}", err)
}
Compression(err: CompressionError) {
cause(err)
description("Failed to compress data")
display("Bundle writer error: failed to compress data\n\tcaused by: {}", err)
}
Encryption(err: EncryptionError) {
from()
cause(err)
description("Encryption failed")
display("Bundle writer error: failed to encrypt data\n\tcaused by: {}", err)
}
Encode(err: msgpack::EncodeError, path: PathBuf) {
cause(err)
context(path: &'a Path, err: msgpack::EncodeError) -> (err, path.to_path_buf())
description("Failed to encode bundle header to file")
display("Bundle writer error: failed to encode bundle header to file {:?}\n\tcaused by: {}", path, err)
}
Write(err: io::Error, path: PathBuf) {
cause(err)
context(path: &'a Path, err: io::Error) -> (err, path.to_path_buf())
description("Failed to write data to file")
display("Bundle writer error: failed to write data to file {:?}\n\tcaused by: {}", path, err)
}
}
}
2017-03-21 10:08:01 +00:00
pub struct BundleWriter {
mode: BundleMode,
hash_method: HashMethod,
data: Vec<u8>,
compression: Option<Compression>,
compression_stream: Option<CompressionStream>,
encryption: Option<Encryption>,
crypto: Arc<Mutex<Crypto>>,
raw_size: usize,
chunk_count: usize,
chunks: ChunkList,
}
impl BundleWriter {
2017-03-22 08:19:16 +00:00
pub fn new(mode: BundleMode, hash_method: HashMethod, compression: Option<Compression>, encryption: Option<Encryption>, crypto: Arc<Mutex<Crypto>>) -> Result<Self, BundleWriterError> {
2017-03-21 10:08:01 +00:00
let compression_stream = match compression {
2017-03-22 08:19:16 +00:00
Some(ref compression) => Some(try!(compression.compress_stream().map_err(BundleWriterError::CompressionSetup))),
2017-03-21 10:08:01 +00:00
None => None
};
Ok(BundleWriter {
mode: mode,
hash_method: hash_method,
data: vec![],
compression: compression,
compression_stream: compression_stream,
encryption: encryption,
crypto: crypto,
raw_size: 0,
chunk_count: 0,
chunks: ChunkList::new()
})
}
2017-03-22 08:19:16 +00:00
pub fn add(&mut self, chunk: &[u8], hash: Hash) -> Result<usize, BundleWriterError> {
2017-03-21 10:08:01 +00:00
if let Some(ref mut stream) = self.compression_stream {
2017-03-22 08:19:16 +00:00
try!(stream.process(chunk, &mut self.data).map_err(BundleWriterError::Compression))
2017-03-21 10:08:01 +00:00
} else {
self.data.extend_from_slice(chunk)
}
self.raw_size += chunk.len();
self.chunk_count += 1;
self.chunks.push((hash, chunk.len() as u32));
Ok(self.chunk_count-1)
}
2017-03-22 08:19:16 +00:00
pub fn finish(mut self, db: &BundleDb) -> Result<StoredBundle, BundleWriterError> {
2017-03-21 10:08:01 +00:00
if let Some(stream) = self.compression_stream {
2017-03-22 08:19:16 +00:00
try!(stream.finish(&mut self.data).map_err(BundleWriterError::Compression))
2017-03-21 10:08:01 +00:00
}
if let Some(ref encryption) = self.encryption {
self.data = try!(self.crypto.lock().unwrap().encrypt(&encryption, &self.data));
}
let encoded_size = self.data.len();
let mut chunk_data = Vec::with_capacity(self.chunks.encoded_size());
self.chunks.write_to(&mut chunk_data).unwrap();
let id = BundleId(self.hash_method.hash(&chunk_data));
if let Some(ref encryption) = self.encryption {
chunk_data = try!(self.crypto.lock().unwrap().encrypt(&encryption, &chunk_data));
}
let mut path = db.layout.temp_bundle_path();
2017-03-21 10:08:01 +00:00
let mut file = BufWriter::new(try!(File::create(&path).context(&path as &Path)));
try!(file.write_all(&HEADER_STRING).context(&path as &Path));
try!(file.write_all(&[HEADER_VERSION]).context(&path as &Path));
2017-04-03 05:57:58 +00:00
let info = BundleInfo {
2017-03-21 10:08:01 +00:00
mode: self.mode,
hash_method: self.hash_method,
compression: self.compression,
2017-04-03 05:57:58 +00:00
encryption: self.encryption.clone(),
2017-03-21 10:08:01 +00:00
chunk_count: self.chunk_count,
id: id.clone(),
raw_size: self.raw_size,
encoded_size: encoded_size,
2017-04-14 20:44:40 +00:00
chunk_list_size: chunk_data.len(),
timestamp: Local::now().timestamp()
2017-03-21 10:08:01 +00:00
};
2017-04-03 05:57:58 +00:00
let mut info_data = try!(msgpack::encode(&info).context(&path as &Path));
if let Some(ref encryption) = self.encryption {
info_data = try!(self.crypto.lock().unwrap().encrypt(&encryption, &info_data));
}
let header = BundleHeader {
encryption: self.encryption,
info_size: info_data.len()
};
2017-03-21 10:08:01 +00:00
try!(msgpack::encode_to_stream(&header, &mut file).context(&path as &Path));
2017-04-03 05:57:58 +00:00
try!(file.write_all(&info_data).context(&path as &Path));
2017-03-21 10:08:01 +00:00
try!(file.write_all(&chunk_data).context(&path as &Path));
try!(file.write_all(&self.data).context(&path as &Path));
path = path.strip_prefix(db.layout.base_path()).unwrap().to_path_buf();
2017-04-03 05:57:58 +00:00
Ok(StoredBundle { path: path, info: info })
2017-03-21 10:08:01 +00:00
}
#[inline]
2017-04-16 19:39:50 +00:00
pub fn raw_size(&self) -> usize {
self.raw_size
2017-03-21 10:08:01 +00:00
}
#[inline]
2017-04-16 19:39:50 +00:00
pub fn estimate_final_size(&self) -> usize {
self.data.len() + self.chunk_count * 20 + 500
2017-03-21 10:08:01 +00:00
}
}