mirror of https://github.com/dswd/zvault
Make clippy happy
This commit is contained in:
parent
fb73e29a20
commit
618a858506
|
@ -1,4 +1,4 @@
|
||||||
[root]
|
[[package]]
|
||||||
name = "chunking"
|
name = "chunking"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
|
|
|
@ -27,7 +27,7 @@ impl AeChunker {
|
||||||
|
|
||||||
impl Chunker for AeChunker {
|
impl Chunker for AeChunker {
|
||||||
#[allow(unknown_lints,explicit_counter_loop)]
|
#[allow(unknown_lints,explicit_counter_loop)]
|
||||||
fn chunk(&mut self, r: &mut Read, mut w: &mut Write) -> Result<ChunkerStatus, ChunkerError> {
|
fn chunk(&mut self, r: &mut Read, w: &mut Write) -> Result<ChunkerStatus, ChunkerError> {
|
||||||
let mut max;
|
let mut max;
|
||||||
let mut pos = 0;
|
let mut pos = 0;
|
||||||
let mut max_pos = 0;
|
let mut max_pos = 0;
|
||||||
|
|
|
@ -88,7 +88,7 @@ impl FastCdcChunker {
|
||||||
|
|
||||||
impl Chunker for FastCdcChunker {
|
impl Chunker for FastCdcChunker {
|
||||||
#[allow(unknown_lints,explicit_counter_loop,needless_range_loop)]
|
#[allow(unknown_lints,explicit_counter_loop,needless_range_loop)]
|
||||||
fn chunk(&mut self, r: &mut Read, mut w: &mut Write) -> Result<ChunkerStatus, ChunkerError> {
|
fn chunk(&mut self, r: &mut Read, w: &mut Write) -> Result<ChunkerStatus, ChunkerError> {
|
||||||
let mut max;
|
let mut max;
|
||||||
let mut hash = 0u64;
|
let mut hash = 0u64;
|
||||||
let mut pos = 0;
|
let mut pos = 0;
|
||||||
|
@ -107,7 +107,7 @@ impl Chunker for FastCdcChunker {
|
||||||
for i in min_size_p..avg_size_p {
|
for i in min_size_p..avg_size_p {
|
||||||
hash = (hash << 1).wrapping_add(self.gear[self.buffer[i] as usize]);
|
hash = (hash << 1).wrapping_add(self.gear[self.buffer[i] as usize]);
|
||||||
if hash & self.mask_short == 0 {
|
if hash & self.mask_short == 0 {
|
||||||
return self.write_output(w, i+1, max);
|
return self.write_output(w, i + 1, max);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,7 +19,7 @@ impl FixedChunker {
|
||||||
|
|
||||||
impl Chunker for FixedChunker {
|
impl Chunker for FixedChunker {
|
||||||
#[allow(unknown_lints,explicit_counter_loop)]
|
#[allow(unknown_lints,explicit_counter_loop)]
|
||||||
fn chunk(&mut self, r: &mut Read, mut w: &mut Write) -> Result<ChunkerStatus, ChunkerError> {
|
fn chunk(&mut self, r: &mut Read, w: &mut Write) -> Result<ChunkerStatus, ChunkerError> {
|
||||||
let mut todo = self.size;
|
let mut todo = self.size;
|
||||||
loop {
|
loop {
|
||||||
// Fill the buffer, there might be some bytes still in there from last chunk
|
// Fill the buffer, there might be some bytes still in there from last chunk
|
||||||
|
|
|
@ -67,7 +67,7 @@ impl RabinChunker {
|
||||||
|
|
||||||
impl Chunker for RabinChunker {
|
impl Chunker for RabinChunker {
|
||||||
#[allow(unknown_lints,explicit_counter_loop)]
|
#[allow(unknown_lints,explicit_counter_loop)]
|
||||||
fn chunk(&mut self, r: &mut Read, mut w: &mut Write) -> Result<ChunkerStatus, ChunkerError> {
|
fn chunk(&mut self, r: &mut Read, w: &mut Write) -> Result<ChunkerStatus, ChunkerError> {
|
||||||
let mut max;
|
let mut max;
|
||||||
let mut hash = 0u32;
|
let mut hash = 0u32;
|
||||||
let mut pos = 0;
|
let mut pos = 0;
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
[root]
|
[[package]]
|
||||||
name = "index"
|
name = "index"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
|
|
|
@ -57,6 +57,7 @@ quick_error!{
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
#[allow(needless_pass_by_value)]
|
||||||
fn load_bundles(
|
fn load_bundles(
|
||||||
path: &Path,
|
path: &Path,
|
||||||
base: &Path,
|
base: &Path,
|
||||||
|
@ -195,10 +196,11 @@ impl BundleDb {
|
||||||
&self.layout.local_bundle_cache_path()
|
&self.layout.local_bundle_cache_path()
|
||||||
));
|
));
|
||||||
let bundles: Vec<_> = self.remote_bundles.values().cloned().collect();
|
let bundles: Vec<_> = self.remote_bundles.values().cloned().collect();
|
||||||
Ok(try!(StoredBundle::save_list_to(
|
try!(StoredBundle::save_list_to(
|
||||||
&bundles,
|
&bundles,
|
||||||
&self.layout.remote_bundle_cache_path()
|
&self.layout.remote_bundle_cache_path()
|
||||||
)))
|
));
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn update_cache(&mut self) -> Result<(), BundleDbError> {
|
fn update_cache(&mut self) -> Result<(), BundleDbError> {
|
||||||
|
@ -244,7 +246,7 @@ impl BundleDb {
|
||||||
Ok((self_, new, gone))
|
Ok((self_, new, gone))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn create(layout: RepositoryLayout) -> Result<(), BundleDbError> {
|
pub fn create(layout: &RepositoryLayout) -> Result<(), BundleDbError> {
|
||||||
try!(fs::create_dir_all(layout.remote_bundles_path()).context(
|
try!(fs::create_dir_all(layout.remote_bundles_path()).context(
|
||||||
&layout.remote_bundles_path() as
|
&layout.remote_bundles_path() as
|
||||||
&Path
|
&Path
|
||||||
|
@ -432,7 +434,7 @@ impl BundleDb {
|
||||||
}
|
}
|
||||||
if !to_repair.is_empty() {
|
if !to_repair.is_empty() {
|
||||||
for id in ProgressIter::new("repairing bundles", to_repair.len(), to_repair.iter()) {
|
for id in ProgressIter::new("repairing bundles", to_repair.len(), to_repair.iter()) {
|
||||||
try!(self.repair_bundle(id.clone()));
|
try!(self.repair_bundle(id));
|
||||||
}
|
}
|
||||||
try!(self.flush());
|
try!(self.flush());
|
||||||
}
|
}
|
||||||
|
@ -453,8 +455,8 @@ impl BundleDb {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn repair_bundle(&mut self, id: BundleId) -> Result<(), BundleDbError> {
|
fn repair_bundle(&mut self, id: &BundleId) -> Result<(), BundleDbError> {
|
||||||
let stored = self.remote_bundles[&id].clone();
|
let stored = self.remote_bundles[id].clone();
|
||||||
let mut bundle = match self.get_bundle(&stored) {
|
let mut bundle = match self.get_bundle(&stored) {
|
||||||
Ok(bundle) => bundle,
|
Ok(bundle) => bundle,
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
|
|
|
@ -90,6 +90,7 @@ impl BundleReader {
|
||||||
self.info.id.clone()
|
self.info.id.clone()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(needless_pass_by_value)]
|
||||||
fn load_header<P: AsRef<Path>>(
|
fn load_header<P: AsRef<Path>>(
|
||||||
path: P,
|
path: P,
|
||||||
crypto: Arc<Mutex<Crypto>>,
|
crypto: Arc<Mutex<Crypto>>,
|
||||||
|
|
|
@ -79,7 +79,7 @@ impl ChunkerType {
|
||||||
match *self {
|
match *self {
|
||||||
ChunkerType::Ae(_size) |
|
ChunkerType::Ae(_size) |
|
||||||
ChunkerType::Fixed(_size) => 0,
|
ChunkerType::Fixed(_size) => 0,
|
||||||
ChunkerType::Rabin((_size, seed)) => seed as u64,
|
ChunkerType::Rabin((_size, seed)) => u64::from(seed),
|
||||||
ChunkerType::FastCdc((_size, seed)) => seed,
|
ChunkerType::FastCdc((_size, seed)) => seed,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,6 +5,7 @@ use std::path::{Path, PathBuf};
|
||||||
use log::LogLevel;
|
use log::LogLevel;
|
||||||
use clap::{App, AppSettings, Arg, SubCommand};
|
use clap::{App, AppSettings, Arg, SubCommand};
|
||||||
|
|
||||||
|
#[allow(option_option)]
|
||||||
pub enum Arguments {
|
pub enum Arguments {
|
||||||
Init {
|
Init {
|
||||||
repo_path: PathBuf,
|
repo_path: PathBuf,
|
||||||
|
|
|
@ -89,11 +89,11 @@ impl ErrorCode {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
pub const DEFAULT_CHUNKER: &'static str = "fastcdc/16";
|
pub const DEFAULT_CHUNKER: &str = "fastcdc/16";
|
||||||
pub const DEFAULT_HASH: &'static str = "blake2";
|
pub const DEFAULT_HASH: &str = "blake2";
|
||||||
pub const DEFAULT_COMPRESSION: &'static str = "brotli/3";
|
pub const DEFAULT_COMPRESSION: &str = "brotli/3";
|
||||||
pub const DEFAULT_BUNDLE_SIZE_STR: &'static str = "25";
|
pub const DEFAULT_BUNDLE_SIZE_STR: &str = "25";
|
||||||
pub const DEFAULT_VACUUM_RATIO_STR: &'static str = "0";
|
pub const DEFAULT_VACUUM_RATIO_STR: &str = "0";
|
||||||
lazy_static! {
|
lazy_static! {
|
||||||
pub static ref ZVAULT_FOLDER: PathBuf = {
|
pub static ref ZVAULT_FOLDER: PathBuf = {
|
||||||
env::home_dir().unwrap().join(".zvault")
|
env::home_dir().unwrap().join(".zvault")
|
||||||
|
@ -435,7 +435,7 @@ pub fn run() -> Result<(), ErrorCode> {
|
||||||
let mut repo = checked!(
|
let mut repo = checked!(
|
||||||
Repository::create(
|
Repository::create(
|
||||||
repo_path,
|
repo_path,
|
||||||
Config {
|
&Config {
|
||||||
bundle_size: bundle_size,
|
bundle_size: bundle_size,
|
||||||
chunker: chunker,
|
chunker: chunker,
|
||||||
compression: compression,
|
compression: compression,
|
||||||
|
|
|
@ -260,7 +260,7 @@ impl<'a> FuseFilesystem<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn mount<P: AsRef<Path>>(self, mountpoint: P) -> Result<(), RepositoryError> {
|
pub fn mount<P: AsRef<Path>>(self, mountpoint: P) -> Result<(), RepositoryError> {
|
||||||
Ok(try!(fuse::mount(
|
try!(fuse::mount(
|
||||||
self,
|
self,
|
||||||
&mountpoint,
|
&mountpoint,
|
||||||
&[
|
&[
|
||||||
|
@ -269,7 +269,8 @@ impl<'a> FuseFilesystem<'a> {
|
||||||
OsStr::new("auto_cache"),
|
OsStr::new("auto_cache"),
|
||||||
OsStr::new("readonly"),
|
OsStr::new("readonly"),
|
||||||
]
|
]
|
||||||
)))
|
));
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_inode(&mut self, num: u64) -> Option<FuseInodeRef> {
|
pub fn get_inode(&mut self, num: u64) -> Option<FuseInodeRef> {
|
||||||
|
@ -551,8 +552,8 @@ impl<'a> fuse::Filesystem for FuseFilesystem<'a> {
|
||||||
if let Some(ref chunks) = inode.chunks {
|
if let Some(ref chunks) = inode.chunks {
|
||||||
let mut data = Vec::with_capacity(size as usize);
|
let mut data = Vec::with_capacity(size as usize);
|
||||||
for &(hash, len) in chunks.iter() {
|
for &(hash, len) in chunks.iter() {
|
||||||
if len as u64 <= offset {
|
if u64::from(len) <= offset {
|
||||||
offset -= len as u64;
|
offset -= u64::from(len);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
let chunk = match fuse_try!(self.repository.get_chunk(hash), reply) {
|
let chunk = match fuse_try!(self.repository.get_chunk(hash), reply) {
|
||||||
|
|
|
@ -73,11 +73,12 @@ impl Repository {
|
||||||
try!(self.write_mode());
|
try!(self.write_mode());
|
||||||
let path = self.layout.backup_path(name);
|
let path = self.layout.backup_path(name);
|
||||||
try!(fs::create_dir_all(path.parent().unwrap()));
|
try!(fs::create_dir_all(path.parent().unwrap()));
|
||||||
Ok(try!(backup.save_to(
|
try!(backup.save_to(
|
||||||
&self.crypto.lock().unwrap(),
|
&self.crypto.lock().unwrap(),
|
||||||
self.config.encryption.clone(),
|
self.config.encryption.clone(),
|
||||||
path
|
path
|
||||||
)))
|
));
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn delete_backup(&mut self, name: &str) -> Result<(), RepositoryError> {
|
pub fn delete_backup(&mut self, name: &str) -> Result<(), RepositoryError> {
|
||||||
|
@ -298,7 +299,7 @@ impl Repository {
|
||||||
let chunks = try!(self.put_inode(&child_inode));
|
let chunks = try!(self.put_inode(&child_inode));
|
||||||
inode.cum_size += child_inode.cum_size;
|
inode.cum_size += child_inode.cum_size;
|
||||||
for &(_, len) in chunks.iter() {
|
for &(_, len) in chunks.iter() {
|
||||||
meta_size += len as u64;
|
meta_size += u64::from(len);
|
||||||
}
|
}
|
||||||
inode.cum_dirs += child_inode.cum_dirs;
|
inode.cum_dirs += child_inode.cum_dirs;
|
||||||
inode.cum_files += child_inode.cum_files;
|
inode.cum_files += child_inode.cum_files;
|
||||||
|
@ -309,7 +310,7 @@ impl Repository {
|
||||||
inode.cum_files = 1;
|
inode.cum_files = 1;
|
||||||
if let Some(FileData::ChunkedIndirect(ref chunks)) = inode.data {
|
if let Some(FileData::ChunkedIndirect(ref chunks)) = inode.data {
|
||||||
for &(_, len) in chunks.iter() {
|
for &(_, len) in chunks.iter() {
|
||||||
meta_size += len as u64;
|
meta_size += u64::from(len);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -357,7 +358,7 @@ impl Repository {
|
||||||
backup.timestamp = start.timestamp();
|
backup.timestamp = start.timestamp();
|
||||||
backup.total_data_size = root_inode.cum_size;
|
backup.total_data_size = root_inode.cum_size;
|
||||||
for &(_, len) in backup.root.iter() {
|
for &(_, len) in backup.root.iter() {
|
||||||
backup.total_data_size += len as u64;
|
backup.total_data_size += u64::from(len);
|
||||||
}
|
}
|
||||||
backup.file_count = root_inode.cum_files;
|
backup.file_count = root_inode.cum_files;
|
||||||
backup.dir_count = root_inode.cum_dirs;
|
backup.dir_count = root_inode.cum_dirs;
|
||||||
|
@ -474,6 +475,7 @@ impl Repository {
|
||||||
Ok(versions)
|
Ok(versions)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(needless_pass_by_value)]
|
||||||
fn find_differences_recurse(
|
fn find_differences_recurse(
|
||||||
&mut self,
|
&mut self,
|
||||||
inode1: &Inode,
|
inode1: &Inode,
|
||||||
|
|
|
@ -25,7 +25,7 @@ impl<'a> ChunkReader<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> Read for ChunkReader<'a> {
|
impl<'a> Read for ChunkReader<'a> {
|
||||||
fn read(&mut self, mut buf: &mut [u8]) -> Result<usize, io::Error> {
|
fn read(&mut self, buf: &mut [u8]) -> Result<usize, io::Error> {
|
||||||
let mut bpos = 0;
|
let mut bpos = 0;
|
||||||
loop {
|
loop {
|
||||||
if buf.len() == bpos {
|
if buf.len() == bpos {
|
||||||
|
|
|
@ -30,8 +30,8 @@ quick_error!{
|
||||||
|
|
||||||
|
|
||||||
impl HashMethod {
|
impl HashMethod {
|
||||||
fn from_yaml(yaml: String) -> Result<Self, ConfigError> {
|
fn from_yaml(yaml: &str) -> Result<Self, ConfigError> {
|
||||||
HashMethod::from(&yaml).map_err(ConfigError::Parse)
|
HashMethod::from(yaml).map_err(ConfigError::Parse)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn to_yaml(&self) -> String {
|
fn to_yaml(&self) -> String {
|
||||||
|
@ -61,7 +61,7 @@ serde_impl!(ChunkerYaml(String) {
|
||||||
});
|
});
|
||||||
|
|
||||||
impl ChunkerType {
|
impl ChunkerType {
|
||||||
fn from_yaml(yaml: ChunkerYaml) -> Result<Self, ConfigError> {
|
fn from_yaml(yaml: &ChunkerYaml) -> Result<Self, ConfigError> {
|
||||||
ChunkerType::from(&yaml.method, yaml.avg_size, yaml.seed).map_err(ConfigError::Parse)
|
ChunkerType::from(&yaml.method, yaml.avg_size, yaml.seed).map_err(ConfigError::Parse)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -78,8 +78,8 @@ impl ChunkerType {
|
||||||
|
|
||||||
impl Compression {
|
impl Compression {
|
||||||
#[inline]
|
#[inline]
|
||||||
fn from_yaml(yaml: String) -> Result<Self, ConfigError> {
|
fn from_yaml(yaml: &str) -> Result<Self, ConfigError> {
|
||||||
Compression::from_string(&yaml).map_err(|_| ConfigError::Parse("Invalid codec"))
|
Compression::from_string(yaml).map_err(|_| ConfigError::Parse("Invalid codec"))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
|
@ -91,8 +91,8 @@ impl Compression {
|
||||||
|
|
||||||
impl EncryptionMethod {
|
impl EncryptionMethod {
|
||||||
#[inline]
|
#[inline]
|
||||||
fn from_yaml(yaml: String) -> Result<Self, ConfigError> {
|
fn from_yaml(yaml: &str) -> Result<Self, ConfigError> {
|
||||||
EncryptionMethod::from_string(&yaml).map_err(|_| ConfigError::Parse("Invalid codec"))
|
EncryptionMethod::from_string(yaml).map_err(|_| ConfigError::Parse("Invalid codec"))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
|
@ -179,12 +179,12 @@ serde_impl!(Config(u64) {
|
||||||
impl Config {
|
impl Config {
|
||||||
fn from_yaml(yaml: ConfigYaml) -> Result<Self, ConfigError> {
|
fn from_yaml(yaml: ConfigYaml) -> Result<Self, ConfigError> {
|
||||||
let compression = if let Some(c) = yaml.compression {
|
let compression = if let Some(c) = yaml.compression {
|
||||||
Some(try!(Compression::from_yaml(c)))
|
Some(try!(Compression::from_yaml(&c)))
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
};
|
};
|
||||||
let encryption = if let Some(e) = yaml.encryption {
|
let encryption = if let Some(e) = yaml.encryption {
|
||||||
let method = try!(EncryptionMethod::from_yaml(e.method));
|
let method = try!(EncryptionMethod::from_yaml(&e.method));
|
||||||
let key = try!(parse_hex(&e.key).map_err(|_| {
|
let key = try!(parse_hex(&e.key).map_err(|_| {
|
||||||
ConfigError::Parse("Invalid public key")
|
ConfigError::Parse("Invalid public key")
|
||||||
}));
|
}));
|
||||||
|
@ -196,8 +196,8 @@ impl Config {
|
||||||
compression: compression,
|
compression: compression,
|
||||||
encryption: encryption,
|
encryption: encryption,
|
||||||
bundle_size: yaml.bundle_size,
|
bundle_size: yaml.bundle_size,
|
||||||
chunker: try!(ChunkerType::from_yaml(yaml.chunker)),
|
chunker: try!(ChunkerType::from_yaml(&yaml.chunker)),
|
||||||
hash: try!(HashMethod::from_yaml(yaml.hash))
|
hash: try!(HashMethod::from_yaml(&yaml.hash))
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -48,7 +48,8 @@ impl Repository {
|
||||||
let mut new = false;
|
let mut new = false;
|
||||||
for &(hash, len) in chunks {
|
for &(hash, len) in chunks {
|
||||||
if let Some(pos) = self.index.get(&hash) {
|
if let Some(pos) = self.index.get(&hash) {
|
||||||
if let Some(bundle) = bundles.get_mut(&pos.bundle) {
|
let bundle = pos.bundle;
|
||||||
|
if let Some(bundle) = bundles.get_mut(&bundle) {
|
||||||
if !bundle.chunk_usage.get(pos.chunk as usize) {
|
if !bundle.chunk_usage.get(pos.chunk as usize) {
|
||||||
new = true;
|
new = true;
|
||||||
bundle.chunk_usage.set(pos.chunk as usize);
|
bundle.chunk_usage.set(pos.chunk as usize);
|
||||||
|
|
|
@ -488,7 +488,7 @@ impl Repository {
|
||||||
);
|
);
|
||||||
return self.rebuild_index();
|
return self.rebuild_index();
|
||||||
} else {
|
} else {
|
||||||
return Err(err.into());
|
return Err(err);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
|
|
|
@ -32,8 +32,8 @@ pub use self::layout::RepositoryLayout;
|
||||||
use self::bundle_map::BundleMap;
|
use self::bundle_map::BundleMap;
|
||||||
|
|
||||||
|
|
||||||
const REPOSITORY_README: &'static [u8] = include_bytes!("../../docs/repository_readme.md");
|
const REPOSITORY_README: &[u8] = include_bytes!("../../docs/repository_readme.md");
|
||||||
const DEFAULT_EXCLUDES: &'static [u8] = include_bytes!("../../docs/excludes.default");
|
const DEFAULT_EXCLUDES: &[u8] = include_bytes!("../../docs/excludes.default");
|
||||||
|
|
||||||
const INDEX_MAGIC: [u8; 7] = *b"zvault\x02";
|
const INDEX_MAGIC: [u8; 7] = *b"zvault\x02";
|
||||||
const INDEX_VERSION: u8 = 1;
|
const INDEX_VERSION: u8 = 1;
|
||||||
|
@ -93,7 +93,7 @@ pub struct Repository {
|
||||||
impl Repository {
|
impl Repository {
|
||||||
pub fn create<P: AsRef<Path>, R: AsRef<Path>>(
|
pub fn create<P: AsRef<Path>, R: AsRef<Path>>(
|
||||||
path: P,
|
path: P,
|
||||||
config: Config,
|
config: &Config,
|
||||||
remote: R,
|
remote: R,
|
||||||
) -> Result<Self, RepositoryError> {
|
) -> Result<Self, RepositoryError> {
|
||||||
let layout = RepositoryLayout::new(path.as_ref().to_path_buf());
|
let layout = RepositoryLayout::new(path.as_ref().to_path_buf());
|
||||||
|
@ -111,7 +111,7 @@ impl Repository {
|
||||||
));
|
));
|
||||||
try!(fs::create_dir_all(layout.remote_locks_path()));
|
try!(fs::create_dir_all(layout.remote_locks_path()));
|
||||||
try!(config.save(layout.config_path()));
|
try!(config.save(layout.config_path()));
|
||||||
try!(BundleDb::create(layout.clone()));
|
try!(BundleDb::create(&layout));
|
||||||
try!(Index::<Hash, Location>::create(
|
try!(Index::<Hash, Location>::create(
|
||||||
layout.index_path(),
|
layout.index_path(),
|
||||||
&INDEX_MAGIC,
|
&INDEX_MAGIC,
|
||||||
|
@ -181,7 +181,7 @@ impl Repository {
|
||||||
info!("Removig {} old bundles from index", gone.len());
|
info!("Removig {} old bundles from index", gone.len());
|
||||||
try!(repo.write_mode());
|
try!(repo.write_mode());
|
||||||
for bundle in gone {
|
for bundle in gone {
|
||||||
try!(repo.remove_gone_remote_bundle(bundle))
|
try!(repo.remove_gone_remote_bundle(&bundle))
|
||||||
}
|
}
|
||||||
save_bundle_map = true;
|
save_bundle_map = true;
|
||||||
}
|
}
|
||||||
|
@ -194,7 +194,7 @@ impl Repository {
|
||||||
new.into_iter()
|
new.into_iter()
|
||||||
)
|
)
|
||||||
{
|
{
|
||||||
try!(repo.add_new_remote_bundle(bundle))
|
try!(repo.add_new_remote_bundle(&bundle))
|
||||||
}
|
}
|
||||||
save_bundle_map = true;
|
save_bundle_map = true;
|
||||||
}
|
}
|
||||||
|
@ -224,7 +224,7 @@ impl Repository {
|
||||||
key_files: Vec<String>,
|
key_files: Vec<String>,
|
||||||
) -> Result<Self, RepositoryError> {
|
) -> Result<Self, RepositoryError> {
|
||||||
let path = path.as_ref();
|
let path = path.as_ref();
|
||||||
let mut repo = try!(Repository::create(path, Config::default(), remote));
|
let mut repo = try!(Repository::create(path, &Config::default(), remote));
|
||||||
for file in key_files {
|
for file in key_files {
|
||||||
try!(repo.crypto.lock().unwrap().register_keyfile(file));
|
try!(repo.crypto.lock().unwrap().register_keyfile(file));
|
||||||
}
|
}
|
||||||
|
@ -250,10 +250,11 @@ impl Repository {
|
||||||
secret: SecretKey,
|
secret: SecretKey,
|
||||||
) -> Result<(), RepositoryError> {
|
) -> Result<(), RepositoryError> {
|
||||||
try!(self.write_mode());
|
try!(self.write_mode());
|
||||||
Ok(try!(self.crypto.lock().unwrap().register_secret_key(
|
try!(self.crypto.lock().unwrap().register_secret_key(
|
||||||
public,
|
public,
|
||||||
secret
|
secret
|
||||||
)))
|
));
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
|
@ -338,7 +339,7 @@ impl Repository {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn add_new_remote_bundle(&mut self, bundle: BundleInfo) -> Result<(), RepositoryError> {
|
fn add_new_remote_bundle(&mut self, bundle: &BundleInfo) -> Result<(), RepositoryError> {
|
||||||
if self.bundle_map.find(&bundle.id).is_some() {
|
if self.bundle_map.find(&bundle.id).is_some() {
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
@ -374,7 +375,7 @@ impl Repository {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn remove_gone_remote_bundle(&mut self, bundle: BundleInfo) -> Result<(), RepositoryError> {
|
fn remove_gone_remote_bundle(&mut self, bundle: &BundleInfo) -> Result<(), RepositoryError> {
|
||||||
if let Some(id) = self.bundle_map.find(&bundle.id) {
|
if let Some(id) = self.bundle_map.find(&bundle.id) {
|
||||||
debug!("Removing bundle from index: {}", bundle.id);
|
debug!("Removing bundle from index: {}", bundle.id);
|
||||||
try!(self.bundles.delete_local_bundle(&bundle.id));
|
try!(self.bundles.delete_local_bundle(&bundle.id));
|
||||||
|
@ -386,7 +387,8 @@ impl Repository {
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
fn write_mode(&mut self) -> Result<(), RepositoryError> {
|
fn write_mode(&mut self) -> Result<(), RepositoryError> {
|
||||||
Ok(try!(self.local_locks.upgrade(&mut self.lock)))
|
try!(self.local_locks.upgrade(&mut self.lock));
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
|
|
|
@ -177,7 +177,7 @@ impl Repository {
|
||||||
} else {
|
} else {
|
||||||
if let Some(FileData::ChunkedIndirect(ref chunks)) = inode.data {
|
if let Some(FileData::ChunkedIndirect(ref chunks)) = inode.data {
|
||||||
for &(_, len) in chunks.iter() {
|
for &(_, len) in chunks.iter() {
|
||||||
inode.cum_size += len as u64;
|
inode.cum_size += u64::from(len);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
inode.cum_files = 1;
|
inode.cum_files = 1;
|
||||||
|
@ -226,7 +226,7 @@ impl Repository {
|
||||||
children.remove(&inode.name);
|
children.remove(&inode.name);
|
||||||
parent_inode.cum_size += inode.cum_size;
|
parent_inode.cum_size += inode.cum_size;
|
||||||
for &(_, len) in chunks.iter() {
|
for &(_, len) in chunks.iter() {
|
||||||
parent_inode.cum_size += len as u64;
|
parent_inode.cum_size += u64::from(len);
|
||||||
}
|
}
|
||||||
parent_inode.cum_files += inode.cum_files;
|
parent_inode.cum_files += inode.cum_files;
|
||||||
parent_inode.cum_dirs += inode.cum_dirs;
|
parent_inode.cum_dirs += inode.cum_dirs;
|
||||||
|
@ -257,7 +257,7 @@ impl Repository {
|
||||||
for (inode, chunks) in roots {
|
for (inode, chunks) in roots {
|
||||||
root_inode.cum_size += inode.cum_size;
|
root_inode.cum_size += inode.cum_size;
|
||||||
for &(_, len) in chunks.iter() {
|
for &(_, len) in chunks.iter() {
|
||||||
root_inode.cum_size += len as u64;
|
root_inode.cum_size += u64::from(len);
|
||||||
}
|
}
|
||||||
root_inode.cum_files += inode.cum_files;
|
root_inode.cum_files += inode.cum_files;
|
||||||
root_inode.cum_dirs += inode.cum_dirs;
|
root_inode.cum_dirs += inode.cum_dirs;
|
||||||
|
@ -334,7 +334,8 @@ impl Repository {
|
||||||
str::from_utf8(value).unwrap()
|
str::from_utf8(value).unwrap()
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
Ok(try!(tarfile.append_pax_extensions(&pax)))
|
try!(tarfile.append_pax_extensions(&pax));
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn export_tarfile_recurse<W: Write>(
|
fn export_tarfile_recurse<W: Write>(
|
||||||
|
|
|
@ -102,12 +102,14 @@ impl Repository {
|
||||||
try!(self.flush());
|
try!(self.flush());
|
||||||
info!("Checking index");
|
info!("Checking index");
|
||||||
for (hash, location) in self.index.iter() {
|
for (hash, location) in self.index.iter() {
|
||||||
if rewrite_bundles.contains(&location.bundle) {
|
let loc_bundle = location.bundle;
|
||||||
|
let loc_chunk = location.chunk;
|
||||||
|
if rewrite_bundles.contains(&loc_bundle) {
|
||||||
panic!(
|
panic!(
|
||||||
"Removed bundle is still referenced in index: hash:{}, bundle:{}, chunk:{}",
|
"Removed bundle is still referenced in index: hash:{}, bundle:{}, chunk:{}",
|
||||||
hash,
|
hash,
|
||||||
location.bundle,
|
loc_bundle,
|
||||||
location.chunk
|
loc_chunk
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -108,7 +108,8 @@ impl KeyfileYaml {
|
||||||
|
|
||||||
pub fn save<P: AsRef<Path>>(&self, path: P) -> Result<(), EncryptionError> {
|
pub fn save<P: AsRef<Path>>(&self, path: P) -> Result<(), EncryptionError> {
|
||||||
let mut f = try!(File::create(path));
|
let mut f = try!(File::create(path));
|
||||||
Ok(try!(serde_yaml::to_writer(&mut f, &self)))
|
try!(serde_yaml::to_writer(&mut f, &self));
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -12,7 +12,6 @@ use std::u64;
|
||||||
use std::io::{self, Read, Write};
|
use std::io::{self, Read, Write};
|
||||||
|
|
||||||
|
|
||||||
#[repr(packed)]
|
|
||||||
#[derive(Clone, Copy, PartialEq, Hash, Eq, Default, Ord, PartialOrd)]
|
#[derive(Clone, Copy, PartialEq, Hash, Eq, Default, Ord, PartialOrd)]
|
||||||
pub struct Hash {
|
pub struct Hash {
|
||||||
pub high: u64,
|
pub high: u64,
|
||||||
|
|
|
@ -58,7 +58,8 @@ impl LockFile {
|
||||||
|
|
||||||
pub fn save<P: AsRef<Path>>(&self, path: P) -> Result<(), LockError> {
|
pub fn save<P: AsRef<Path>>(&self, path: P) -> Result<(), LockError> {
|
||||||
let mut f = try!(File::create(path));
|
let mut f = try!(File::create(path));
|
||||||
Ok(try!(serde_yaml::to_writer(&mut f, &self)))
|
try!(serde_yaml::to_writer(&mut f, &self));
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue