2017-07-21 09:21:59 +00:00
|
|
|
use prelude::*;
|
2017-03-21 10:28:11 +00:00
|
|
|
use super::*;
|
|
|
|
|
2017-03-22 08:19:16 +00:00
|
|
|
use std::path::{Path, PathBuf};
|
2017-03-21 14:38:42 +00:00
|
|
|
use std::fs::File;
|
2017-03-22 08:19:16 +00:00
|
|
|
use std::io::{self, Write, BufWriter};
|
2017-03-21 10:08:01 +00:00
|
|
|
use std::sync::{Arc, Mutex};
|
|
|
|
|
2017-04-14 20:44:40 +00:00
|
|
|
use chrono::prelude::*;
|
|
|
|
|
2017-03-21 10:08:01 +00:00
|
|
|
|
2017-03-22 08:19:16 +00:00
|
|
|
quick_error!{
|
|
|
|
#[derive(Debug)]
|
|
|
|
pub enum BundleWriterError {
|
|
|
|
CompressionSetup(err: CompressionError) {
|
|
|
|
cause(err)
|
2018-02-24 12:19:51 +00:00
|
|
|
description(tr!("Failed to setup compression"))
|
|
|
|
display("{}", tr_format!("Bundle writer error: failed to setup compression\n\tcaused by: {}", err))
|
2017-03-22 08:19:16 +00:00
|
|
|
}
|
|
|
|
Compression(err: CompressionError) {
|
|
|
|
cause(err)
|
2018-02-24 12:19:51 +00:00
|
|
|
description(tr!("Failed to compress data"))
|
|
|
|
display("{}", tr_format!("Bundle writer error: failed to compress data\n\tcaused by: {}", err))
|
2017-03-22 08:19:16 +00:00
|
|
|
}
|
|
|
|
Encryption(err: EncryptionError) {
|
|
|
|
from()
|
|
|
|
cause(err)
|
2018-02-24 12:19:51 +00:00
|
|
|
description(tr!("Encryption failed"))
|
|
|
|
display("{}", tr_format!("Bundle writer error: failed to encrypt data\n\tcaused by: {}", err))
|
2017-03-22 08:19:16 +00:00
|
|
|
}
|
|
|
|
Encode(err: msgpack::EncodeError, path: PathBuf) {
|
|
|
|
cause(err)
|
|
|
|
context(path: &'a Path, err: msgpack::EncodeError) -> (err, path.to_path_buf())
|
2018-02-24 12:19:51 +00:00
|
|
|
description(tr!("Failed to encode bundle header to file"))
|
|
|
|
display("{}", tr_format!("Bundle writer error: failed to encode bundle header to file {:?}\n\tcaused by: {}", path, err))
|
2017-03-22 08:19:16 +00:00
|
|
|
}
|
|
|
|
Write(err: io::Error, path: PathBuf) {
|
|
|
|
cause(err)
|
|
|
|
context(path: &'a Path, err: io::Error) -> (err, path.to_path_buf())
|
2018-02-24 12:19:51 +00:00
|
|
|
description(tr!("Failed to write data to file"))
|
|
|
|
display("{}", tr_format!("Bundle writer error: failed to write data to file {:?}\n\tcaused by: {}", path, err))
|
2017-03-22 08:19:16 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2017-03-21 10:08:01 +00:00
|
|
|
pub struct BundleWriter {
|
|
|
|
mode: BundleMode,
|
|
|
|
hash_method: HashMethod,
|
|
|
|
data: Vec<u8>,
|
|
|
|
compression: Option<Compression>,
|
|
|
|
compression_stream: Option<CompressionStream>,
|
|
|
|
encryption: Option<Encryption>,
|
|
|
|
crypto: Arc<Mutex<Crypto>>,
|
|
|
|
raw_size: usize,
|
|
|
|
chunk_count: usize,
|
2017-07-21 09:21:59 +00:00
|
|
|
chunks: ChunkList
|
2017-03-21 10:08:01 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
impl BundleWriter {
|
2017-07-21 09:21:59 +00:00
|
|
|
pub fn new(
|
|
|
|
mode: BundleMode,
|
|
|
|
hash_method: HashMethod,
|
|
|
|
compression: Option<Compression>,
|
|
|
|
encryption: Option<Encryption>,
|
|
|
|
crypto: Arc<Mutex<Crypto>>,
|
|
|
|
) -> Result<Self, BundleWriterError> {
|
2017-03-21 10:08:01 +00:00
|
|
|
let compression_stream = match compression {
|
2017-07-21 09:21:59 +00:00
|
|
|
Some(ref compression) => Some(try!(compression.compress_stream().map_err(
|
|
|
|
BundleWriterError::CompressionSetup
|
|
|
|
))),
|
|
|
|
None => None,
|
2017-03-21 10:08:01 +00:00
|
|
|
};
|
|
|
|
Ok(BundleWriter {
|
2018-03-03 16:25:05 +00:00
|
|
|
mode,
|
|
|
|
hash_method,
|
2017-03-21 10:08:01 +00:00
|
|
|
data: vec![],
|
2018-03-03 16:25:05 +00:00
|
|
|
compression,
|
|
|
|
compression_stream,
|
|
|
|
encryption,
|
|
|
|
crypto,
|
2017-03-21 10:08:01 +00:00
|
|
|
raw_size: 0,
|
|
|
|
chunk_count: 0,
|
|
|
|
chunks: ChunkList::new()
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2017-03-22 08:19:16 +00:00
|
|
|
pub fn add(&mut self, chunk: &[u8], hash: Hash) -> Result<usize, BundleWriterError> {
|
2017-03-21 10:08:01 +00:00
|
|
|
if let Some(ref mut stream) = self.compression_stream {
|
2017-07-21 09:21:59 +00:00
|
|
|
try!(stream.process(chunk, &mut self.data).map_err(
|
|
|
|
BundleWriterError::Compression
|
|
|
|
))
|
2017-03-21 10:08:01 +00:00
|
|
|
} else {
|
|
|
|
self.data.extend_from_slice(chunk)
|
|
|
|
}
|
|
|
|
self.raw_size += chunk.len();
|
|
|
|
self.chunk_count += 1;
|
|
|
|
self.chunks.push((hash, chunk.len() as u32));
|
2017-07-21 09:21:59 +00:00
|
|
|
Ok(self.chunk_count - 1)
|
2017-03-21 10:08:01 +00:00
|
|
|
}
|
|
|
|
|
2017-03-22 08:19:16 +00:00
|
|
|
pub fn finish(mut self, db: &BundleDb) -> Result<StoredBundle, BundleWriterError> {
|
2017-03-21 10:08:01 +00:00
|
|
|
if let Some(stream) = self.compression_stream {
|
2017-07-21 09:21:59 +00:00
|
|
|
try!(stream.finish(&mut self.data).map_err(
|
|
|
|
BundleWriterError::Compression
|
|
|
|
))
|
2017-03-21 10:08:01 +00:00
|
|
|
}
|
|
|
|
if let Some(ref encryption) = self.encryption {
|
2017-04-19 20:43:08 +00:00
|
|
|
self.data = try!(self.crypto.lock().unwrap().encrypt(encryption, &self.data));
|
2017-03-21 10:08:01 +00:00
|
|
|
}
|
|
|
|
let encoded_size = self.data.len();
|
|
|
|
let mut chunk_data = Vec::with_capacity(self.chunks.encoded_size());
|
|
|
|
self.chunks.write_to(&mut chunk_data).unwrap();
|
|
|
|
let id = BundleId(self.hash_method.hash(&chunk_data));
|
|
|
|
if let Some(ref encryption) = self.encryption {
|
2017-04-19 20:43:08 +00:00
|
|
|
chunk_data = try!(self.crypto.lock().unwrap().encrypt(encryption, &chunk_data));
|
2017-03-21 10:08:01 +00:00
|
|
|
}
|
2017-04-12 11:57:28 +00:00
|
|
|
let mut path = db.layout.temp_bundle_path();
|
2017-03-21 10:08:01 +00:00
|
|
|
let mut file = BufWriter::new(try!(File::create(&path).context(&path as &Path)));
|
|
|
|
try!(file.write_all(&HEADER_STRING).context(&path as &Path));
|
|
|
|
try!(file.write_all(&[HEADER_VERSION]).context(&path as &Path));
|
2017-04-03 05:57:58 +00:00
|
|
|
let info = BundleInfo {
|
2017-03-21 10:08:01 +00:00
|
|
|
mode: self.mode,
|
|
|
|
hash_method: self.hash_method,
|
|
|
|
compression: self.compression,
|
2017-04-03 05:57:58 +00:00
|
|
|
encryption: self.encryption.clone(),
|
2017-03-21 10:08:01 +00:00
|
|
|
chunk_count: self.chunk_count,
|
|
|
|
id: id.clone(),
|
|
|
|
raw_size: self.raw_size,
|
2018-03-03 16:25:05 +00:00
|
|
|
encoded_size,
|
2017-04-14 20:44:40 +00:00
|
|
|
chunk_list_size: chunk_data.len(),
|
|
|
|
timestamp: Local::now().timestamp()
|
2017-03-21 10:08:01 +00:00
|
|
|
};
|
2017-04-03 05:57:58 +00:00
|
|
|
let mut info_data = try!(msgpack::encode(&info).context(&path as &Path));
|
|
|
|
if let Some(ref encryption) = self.encryption {
|
2017-04-19 20:43:08 +00:00
|
|
|
info_data = try!(self.crypto.lock().unwrap().encrypt(encryption, &info_data));
|
2017-04-03 05:57:58 +00:00
|
|
|
}
|
|
|
|
let header = BundleHeader {
|
|
|
|
encryption: self.encryption,
|
|
|
|
info_size: info_data.len()
|
|
|
|
};
|
2017-07-21 09:21:59 +00:00
|
|
|
try!(msgpack::encode_to_stream(&header, &mut file).context(
|
|
|
|
&path as &Path
|
|
|
|
));
|
2017-04-03 05:57:58 +00:00
|
|
|
try!(file.write_all(&info_data).context(&path as &Path));
|
2017-03-21 10:08:01 +00:00
|
|
|
try!(file.write_all(&chunk_data).context(&path as &Path));
|
|
|
|
try!(file.write_all(&self.data).context(&path as &Path));
|
2017-07-21 09:21:59 +00:00
|
|
|
path = path.strip_prefix(db.layout.base_path())
|
|
|
|
.unwrap()
|
|
|
|
.to_path_buf();
|
|
|
|
Ok(StoredBundle {
|
2018-03-03 16:25:05 +00:00
|
|
|
path,
|
|
|
|
info
|
2017-07-21 09:21:59 +00:00
|
|
|
})
|
2017-03-21 10:08:01 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
#[inline]
|
2017-04-16 19:39:50 +00:00
|
|
|
pub fn raw_size(&self) -> usize {
|
|
|
|
self.raw_size
|
2017-03-21 10:08:01 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
#[inline]
|
2017-04-16 19:39:50 +00:00
|
|
|
pub fn estimate_final_size(&self) -> usize {
|
|
|
|
self.data.len() + self.chunk_count * 20 + 500
|
2017-03-21 10:08:01 +00:00
|
|
|
}
|
|
|
|
}
|