From 0b5a485a260180444f8d38948e83cedd9a1501d1 Mon Sep 17 00:00:00 2001 From: Dennis Schwerdel Date: Thu, 8 Mar 2018 23:41:56 +0100 Subject: [PATCH] Refactor part 1 --- src/backups/mod.rs | 197 ++++++++++++++++++ src/{ => backups}/mount.rs | 10 +- src/chunking/mod.rs | 53 ----- src/cli/mod.rs | 36 ++-- src/main.rs | 6 +- src/prelude.rs | 9 +- src/{ => repository}/bundledb/cache.rs | 0 src/{ => repository}/bundledb/db.rs | 2 +- src/{ => repository}/bundledb/mod.rs | 2 +- src/{ => repository}/bundledb/reader.rs | 0 src/{ => repository}/bundledb/uploader.rs | 0 src/{ => repository}/bundledb/writer.rs | 0 src/{ => repository}/chunking/ae.rs | 0 src/{ => repository}/chunking/benches.rs | 0 src/{ => repository}/chunking/fastcdc.rs | 0 src/{ => repository}/chunking/fixed.rs | 0 .../chunking/mod.rs} | 55 ++++- src/{ => repository}/chunking/rabin.rs | 0 src/{ => repository}/chunking/test.rs | 0 src/{index.rs => repository/index/mod.rs} | 0 src/repository/info.rs | 2 +- src/repository/integrity.rs | 2 +- src/repository/mod.rs | 7 +- 23 files changed, 289 insertions(+), 92 deletions(-) create mode 100644 src/backups/mod.rs rename src/{ => backups}/mount.rs (98%) delete mode 100644 src/chunking/mod.rs rename src/{ => repository}/bundledb/cache.rs (100%) rename src/{ => repository}/bundledb/db.rs (99%) rename src/{ => repository}/bundledb/mod.rs (99%) rename src/{ => repository}/bundledb/reader.rs (100%) rename src/{ => repository}/bundledb/uploader.rs (100%) rename src/{ => repository}/bundledb/writer.rs (100%) rename src/{ => repository}/chunking/ae.rs (100%) rename src/{ => repository}/chunking/benches.rs (100%) rename src/{ => repository}/chunking/fastcdc.rs (100%) rename src/{ => repository}/chunking/fixed.rs (100%) rename src/{chunker.rs => repository/chunking/mod.rs} (62%) rename src/{ => repository}/chunking/rabin.rs (100%) rename src/{ => repository}/chunking/test.rs (100%) rename src/{index.rs => repository/index/mod.rs} (100%) diff --git a/src/backups/mod.rs b/src/backups/mod.rs new file mode 100644 index 0000000..33d1fd2 --- /dev/null +++ b/src/backups/mod.rs @@ -0,0 +1,197 @@ +pub mod mount; + +use ::prelude::*; + +use std::path::{Path, PathBuf}; +use std::collections::HashMap; + + +pub struct BackupRepository { + repo: Repository +} + +impl BackupRepository { + pub fn create, R: AsRef>(path: P, config: &Config, remote: R) -> Result { + Ok(BackupRepository { + repo: try!(Repository::create(path, config, remote)) + }) + } + + #[allow(unknown_lints, useless_let_if_seq)] + pub fn open>(path: P, online: bool) -> Result { + Ok(BackupRepository { + repo: try!(Repository::open(path, online)) + }) + } + + pub fn import, R: AsRef>(path: P, remote: R, key_files: Vec) -> Result { + Ok(BackupRepository { + repo: try!(Repository::import(path, remote, key_files)) + }) + } + + #[inline] + pub fn register_key(&mut self, public: PublicKey, secret: SecretKey) -> Result<(), RepositoryError> { + self.repo.register_key(public, secret) + } + + #[inline] + pub fn save_config(&mut self) -> Result<(), RepositoryError> { + self.repo.save_config() + } + + #[inline] + pub fn set_encryption(&mut self, public: Option<&PublicKey>) { + self.repo.set_encryption(public) + } + + #[inline] + pub fn has_backup(&self, name: &str) -> bool { + self.repo.has_backup(name) + } + + pub fn get_backup(&self, name: &str) -> Result { + self.repo.get_backup(name) + } + + #[inline] + pub fn get_backup_inode>(&mut self, backup: &Backup, path: P) -> Result { + self.repo.get_backup_inode(backup, path) + } + + #[inline] + pub fn get_inode(&mut self, chunks: &[Chunk]) -> Result { + self.repo.get_inode(chunks) + } + + pub fn get_all_backups(&self) -> Result, RepositoryError> { + self.repo.get_all_backups() + } + + pub fn get_config(&self) -> &Config { + &self.repo.config + } + + pub fn set_config(&mut self, config: Config) { + self.repo.config = config; + } + + pub fn get_layout(&self) -> &RepositoryLayout { + &self.repo.layout + } + + pub fn create_backup_recursively>(&mut self, path: P, reference: Option<&Backup>, options: &BackupOptions) -> Result { + self.repo.create_backup_recursively(path, reference, options) + } + + pub fn import_tarfile>(&mut self, tarfile: P) -> Result { + self.repo.import_tarfile(tarfile) + } + + pub fn save_backup(&mut self, backup: &Backup, name: &str) -> Result<(), RepositoryError> { + self.repo.save_backup(backup, name) + } + + pub fn export_tarfile>(&mut self, backup: &Backup, inode: Inode, tarfile: P) -> Result<(), RepositoryError> { + self.repo.export_tarfile(backup, inode, tarfile) + } + + pub fn restore_inode_tree>(&mut self, backup: &Backup, inode: Inode, path: P) -> Result<(), RepositoryError> { + self.repo.restore_inode_tree(backup, inode, path) + } + + pub fn remove_backup_path>(&mut self, backup: &mut Backup, path: P) -> Result<(), RepositoryError> { + self.repo.remove_backup_path(backup, path) + } + + pub fn get_backups>(&self, path: P) -> Result, RepositoryError> { + self.repo.get_backups(path) + } + + pub fn delete_backup(&mut self, name: &str) -> Result<(), RepositoryError> { + self.repo.delete_backup(name) + } + + pub fn prune_backups(&mut self, prefix: &str, daily: usize, weekly: usize, monthly: usize, yearly: usize, force: bool) -> Result<(), RepositoryError> { + self.repo.prune_backups(prefix, daily, weekly, monthly, yearly, force) + } + + pub fn info(&self) -> RepositoryInfo { + self.repo.info() + } + + pub fn vacuum(&mut self, ratio: f32, combine: bool, force: bool) -> Result<(), RepositoryError> { + self.repo.vacuum(ratio, combine, force) + } + + pub fn check_repository(&mut self, repair: bool) -> Result<(), RepositoryError> { + self.repo.check_repository(repair) + } + + #[inline] + pub fn check_bundles(&mut self, full: bool, repair: bool) -> Result<(), RepositoryError> { + self.repo.check_bundles(full, repair) + } + + #[inline] + pub fn check_index(&mut self, repair: bool) -> Result<(), RepositoryError> { + self.repo.check_index(repair) + } + + pub fn check_backup_inode(&mut self, name: &str, backup: &mut Backup, path: &Path, repair: bool) -> Result<(), RepositoryError> { + self.repo.check_backup_inode(name, backup, path, repair) + } + + #[inline] + pub fn check_backup(&mut self, name: &str, backup: &mut Backup, repair: bool) -> Result<(), RepositoryError> { + self.repo.check_backup(name, backup, repair) + } + + pub fn check_backups(&mut self, repair: bool) -> Result<(), RepositoryError> { + self.repo.check_backups(repair) + } + + #[inline] + pub fn set_clean(&mut self) { + self.repo.set_clean() + } + + pub fn statistics(&self) -> RepositoryStatistics { + self.repo.statistics() + } + + pub fn find_duplicates(&mut self, inode: &Inode, min_size: u64) -> Result, u64)>, RepositoryError> { + self.repo.find_duplicates(inode, min_size) + } + + pub fn analyze_usage(&mut self) -> Result, RepositoryError> { + self.repo.analyze_usage() + } + + #[inline] + pub fn list_bundles(&self) -> Vec<&BundleInfo> { + self.repo.list_bundles() + } + + #[inline] + pub fn get_bundle(&self, bundle: &BundleId) -> Option<&StoredBundle> { + self.repo.get_bundle(bundle) + } + + pub fn find_versions>(&mut self, path: P) -> Result, RepositoryError> { + self.repo.find_versions(path) + } + + #[inline] + pub fn find_differences(&mut self, inode1: &Inode, inode2: &Inode) -> Result, RepositoryError> { + self.repo.find_differences(inode1, inode2) + } + + pub fn get_chunk(&mut self, hash: Hash) -> Result>, RepositoryError> { + self.repo.get_chunk(hash) + } + + pub fn get_data(&mut self, chunks: &[Chunk]) -> Result, RepositoryError> { + self.repo.get_data(chunks) + } +} diff --git a/src/mount.rs b/src/backups/mount.rs similarity index 98% rename from src/mount.rs rename to src/backups/mount.rs index 86aa100..b71b39c 100644 --- a/src/mount.rs +++ b/src/backups/mount.rs @@ -150,12 +150,12 @@ impl FuseInode { pub struct FuseFilesystem<'a> { next_id: u64, - repository: &'a mut Repository, + repository: &'a mut BackupRepository, inodes: HashMap } impl<'a> FuseFilesystem<'a> { - pub fn new(repository: &'a mut Repository) -> Result { + pub fn new(repository: &'a mut BackupRepository) -> Result { Ok(FuseFilesystem { next_id: 1, repository, @@ -164,7 +164,7 @@ impl<'a> FuseFilesystem<'a> { } pub fn from_repository( - repository: &'a mut Repository, + repository: &'a mut BackupRepository, path: Option<&str>, ) -> Result { let mut backups = vec![]; @@ -196,7 +196,7 @@ impl<'a> FuseFilesystem<'a> { } pub fn from_backup( - repository: &'a mut Repository, + repository: &'a mut BackupRepository, backup: Backup, ) -> Result { let inode = try!(repository.get_inode(&backup.root)); @@ -206,7 +206,7 @@ impl<'a> FuseFilesystem<'a> { } pub fn from_inode( - repository: &'a mut Repository, + repository: &'a mut BackupRepository, backup: Backup, inode: Inode, ) -> Result { diff --git a/src/chunking/mod.rs b/src/chunking/mod.rs deleted file mode 100644 index bee1e29..0000000 --- a/src/chunking/mod.rs +++ /dev/null @@ -1,53 +0,0 @@ -use std::io::{self, Write, Read}; - -mod fixed; -mod ae; -mod rabin; -mod fastcdc; -#[cfg(test)] mod test; -#[cfg(feature = "bench")] mod benches; - -pub use self::fixed::FixedChunker; -pub use self::ae::AeChunker; -pub use self::rabin::RabinChunker; -pub use self::fastcdc::FastCdcChunker; - -// https://moinakg.wordpress.com/2013/06/22/high-performance-content-defined-chunking/ - -// Paper: "A Comprehensive Study of the Past, Present, and Future of Data Deduplication" -// Paper-URL: http://wxia.hustbackup.cn/IEEE-Survey-final.pdf - -// https://borgbackup.readthedocs.io/en/stable/internals.html#chunks -// https://github.com/bup/bup/blob/master/lib/bup/bupsplit.c - -quick_error!{ - #[derive(Debug)] - pub enum ChunkerError { - Read(err: io::Error) { - cause(err) - description(tr!("Failed to read input")) - display("{}", tr_format!("Chunker error: failed to read input\n\tcaused by: {}", err)) - } - Write(err: io::Error) { - cause(err) - description(tr!("Failed to write to output")) - display("{}", tr_format!("Chunker error: failed to write to output\n\tcaused by: {}", err)) - } - Custom(reason: &'static str) { - from() - description(tr!("Custom error")) - display("{}", tr_format!("Chunker error: {}", reason)) - } - } -} - - -#[derive(Debug, Eq, PartialEq)] -pub enum ChunkerStatus { - Continue, - Finished -} - -pub trait Chunker { - fn chunk(&mut self, r: &mut Read, w: &mut Write) -> Result; -} diff --git a/src/cli/mod.rs b/src/cli/mod.rs index 5c34f8f..b5f6bbf 100644 --- a/src/cli/mod.rs +++ b/src/cli/mod.rs @@ -115,15 +115,15 @@ macro_rules! checked { }; } -fn open_repository(path: &Path, online: bool) -> Result { +fn open_repository(path: &Path, online: bool) -> Result { Ok(checked!( - Repository::open(path, online), + BackupRepository::open(path, online), "load repository", ErrorCode::LoadRepository )) } -fn get_backup(repo: &Repository, backup_name: &str) -> Result { +fn get_backup(repo: &BackupRepository, backup_name: &str) -> Result { if !repo.has_backup(backup_name) { tr_error!("A backup with that name does not exist"); return Err(ErrorCode::NoSuchBackup); @@ -135,7 +135,7 @@ fn get_backup(repo: &Repository, backup_name: &str) -> Result )) } -fn get_inode(repo: &mut Repository, backup: &Backup, inode: Option<&String>) -> Result { +fn get_inode(repo: &mut BackupRepository, backup: &Backup, inode: Option<&String>) -> Result { Ok(if let Some(inode) = inode { checked!( repo.get_backup_inode(backup, &inode), @@ -152,7 +152,7 @@ fn get_inode(repo: &mut Repository, backup: &Backup, inode: Option<&String>) -> } fn find_reference_backup( - repo: &Repository, + repo: &BackupRepository, path: &str, ) -> Result, ErrorCode> { let mut matching = Vec::new(); @@ -523,7 +523,7 @@ pub fn run() -> Result<(), ErrorCode> { return Err(ErrorCode::InvalidArgs); } let mut repo = checked!( - Repository::create( + BackupRepository::create( repo_path, &Config { bundle_size, @@ -554,7 +554,7 @@ pub fn run() -> Result<(), ErrorCode> { ); println!(); } - print_config(&repo.config); + print_config(repo.get_config()); } Arguments::Backup { repo_path, @@ -598,7 +598,7 @@ pub fn run() -> Result<(), ErrorCode> { let reference_backup = reference_backup.map(|(_, backup)| backup); if !no_default_excludes && !tar { for line in BufReader::new(checked!( - File::open(&repo.layout.excludes_path()), + File::open(&repo.get_layout().excludes_path()), "open default excludes file", ErrorCode::LoadExcludes )).lines() @@ -745,7 +745,7 @@ pub fn run() -> Result<(), ErrorCode> { ErrorCode::SaveBackup ); tr_info!("The backup subpath has been deleted, run vacuum to reclaim space"); - } else if repo.layout.backups_path().join(&backup_name).is_dir() { + } else if repo.get_layout().backups_path().join(&backup_name).is_dir() { let backups = checked!( repo.get_backups(&backup_name), "retrieve backups", @@ -877,7 +877,7 @@ pub fn run() -> Result<(), ErrorCode> { } => { let mut repo = try!(open_repository(&repo_path, false)); let backup_map = if let Some(backup_name) = backup_name { - if repo.layout.backups_path().join(&backup_name).is_dir() { + if repo.get_layout().backups_path().join(&backup_name).is_dir() { repo.get_backups(&backup_name) } else { let backup = try!(get_backup(&repo, &backup_name)); @@ -970,7 +970,7 @@ pub fn run() -> Result<(), ErrorCode> { } => { let mut repo = try!(open_repository(&repo_path, true)); let fs = if let Some(backup_name) = backup_name { - if repo.layout.backups_path().join(&backup_name).is_dir() { + if repo.get_layout().backups_path().join(&backup_name).is_dir() { checked!( FuseFilesystem::from_repository(&mut repo, Some(&backup_name)), "create fuse filesystem", @@ -1047,7 +1047,7 @@ pub fn run() -> Result<(), ErrorCode> { key_files } => { checked!( - Repository::import(repo_path, remote_path, key_files), + BackupRepository::import(repo_path, remote_path, key_files), "import repository", ErrorCode::ImportRun ); @@ -1128,19 +1128,20 @@ pub fn run() -> Result<(), ErrorCode> { } => { let mut repo = try!(open_repository(&repo_path, false)); let mut changed = false; + let mut config = repo.get_config().clone(); if let Some(bundle_size) = bundle_size { - repo.config.bundle_size = bundle_size; + config.bundle_size = bundle_size; changed = true; } if let Some(chunker) = chunker { tr_warn!( "Changing the chunker makes it impossible to use existing data for deduplication" ); - repo.config.chunker = chunker; + config.chunker = chunker; changed = true; } if let Some(compression) = compression { - repo.config.compression = compression; + config.compression = compression; changed = true; } if let Some(encryption) = encryption { @@ -1151,14 +1152,15 @@ pub fn run() -> Result<(), ErrorCode> { tr_warn!( "Changing the hash makes it impossible to use existing data for deduplication" ); - repo.config.hash = hash; + config.hash = hash; changed = true; } if changed { + repo.set_config(config); checked!(repo.save_config(), "save config", ErrorCode::SaveConfig); tr_info!("The configuration has been updated."); } else { - print_config(&repo.config); + print_config(repo.get_config()); } } Arguments::GenKey { file, password } => { diff --git a/src/main.rs b/src/main.rs index 304bb68..64e1998 100644 --- a/src/main.rs +++ b/src/main.rs @@ -43,14 +43,10 @@ extern crate mmap; #[macro_use] mod translation; pub mod util; -mod bundledb; mod repository; mod cli; mod prelude; -mod mount; -mod chunker; -mod chunking; -mod index; +mod backups; use std::process::exit; diff --git a/src/prelude.rs b/src/prelude.rs index 7f08c1d..26772cf 100644 --- a/src/prelude.rs +++ b/src/prelude.rs @@ -1,14 +1,15 @@ pub use util::*; -pub use bundledb::{BundleReader, BundleMode, BundleWriter, BundleInfo, BundleId, BundleDbError, +pub use repository::bundledb::{BundleReader, BundleMode, BundleWriter, BundleInfo, BundleId, BundleDbError, BundleDb, BundleWriterError, StoredBundle, BundleStatistics}; -pub use chunker::{ChunkerType, Chunker, ChunkerStatus, ChunkerError}; +pub use repository::chunking::{ChunkerType, Chunker, ChunkerStatus, ChunkerError}; pub use repository::{Repository, Backup, Config, RepositoryError, RepositoryInfo, Inode, FileType, IntegrityError, BackupFileError, BackupError, BackupOptions, BundleAnalysis, FileData, DiffType, InodeError, RepositoryLayout, Location, RepositoryStatistics}; -pub use index::{Index, IndexError, IndexStatistics}; -pub use mount::FuseFilesystem; +pub use repository::index::{Index, IndexError, IndexStatistics}; +pub use backups::mount::FuseFilesystem; pub use translation::CowStr; +pub use backups::BackupRepository; pub use serde::{Serialize, Deserialize}; diff --git a/src/bundledb/cache.rs b/src/repository/bundledb/cache.rs similarity index 100% rename from src/bundledb/cache.rs rename to src/repository/bundledb/cache.rs diff --git a/src/bundledb/db.rs b/src/repository/bundledb/db.rs similarity index 99% rename from src/bundledb/db.rs rename to src/repository/bundledb/db.rs index 09e26c3..9879ff9 100644 --- a/src/bundledb/db.rs +++ b/src/repository/bundledb/db.rs @@ -1,4 +1,4 @@ -use prelude::*; +use ::prelude::*; use super::*; use std::path::{Path, PathBuf}; diff --git a/src/bundledb/mod.rs b/src/repository/bundledb/mod.rs similarity index 99% rename from src/bundledb/mod.rs rename to src/repository/bundledb/mod.rs index d53ef02..44a764f 100644 --- a/src/bundledb/mod.rs +++ b/src/repository/bundledb/mod.rs @@ -10,7 +10,7 @@ pub use self::reader::{BundleReader, BundleReaderError}; pub use self::db::*; pub use self::uploader::BundleUploader; -use prelude::*; +use ::prelude::*; use std::fmt; use std::collections::HashMap; diff --git a/src/bundledb/reader.rs b/src/repository/bundledb/reader.rs similarity index 100% rename from src/bundledb/reader.rs rename to src/repository/bundledb/reader.rs diff --git a/src/bundledb/uploader.rs b/src/repository/bundledb/uploader.rs similarity index 100% rename from src/bundledb/uploader.rs rename to src/repository/bundledb/uploader.rs diff --git a/src/bundledb/writer.rs b/src/repository/bundledb/writer.rs similarity index 100% rename from src/bundledb/writer.rs rename to src/repository/bundledb/writer.rs diff --git a/src/chunking/ae.rs b/src/repository/chunking/ae.rs similarity index 100% rename from src/chunking/ae.rs rename to src/repository/chunking/ae.rs diff --git a/src/chunking/benches.rs b/src/repository/chunking/benches.rs similarity index 100% rename from src/chunking/benches.rs rename to src/repository/chunking/benches.rs diff --git a/src/chunking/fastcdc.rs b/src/repository/chunking/fastcdc.rs similarity index 100% rename from src/chunking/fastcdc.rs rename to src/repository/chunking/fastcdc.rs diff --git a/src/chunking/fixed.rs b/src/repository/chunking/fixed.rs similarity index 100% rename from src/chunking/fixed.rs rename to src/repository/chunking/fixed.rs diff --git a/src/chunker.rs b/src/repository/chunking/mod.rs similarity index 62% rename from src/chunker.rs rename to src/repository/chunking/mod.rs index 6102aa6..e1f5792 100644 --- a/src/chunker.rs +++ b/src/repository/chunking/mod.rs @@ -1,7 +1,58 @@ -pub use chunking::*; - +use std::io::{self, Write, Read}; use std::str::FromStr; +mod fixed; +mod ae; +mod rabin; +mod fastcdc; +#[cfg(test)] mod test; +#[cfg(feature = "bench")] mod benches; + +pub use self::fixed::FixedChunker; +pub use self::ae::AeChunker; +pub use self::rabin::RabinChunker; +pub use self::fastcdc::FastCdcChunker; + +// https://moinakg.wordpress.com/2013/06/22/high-performance-content-defined-chunking/ + +// Paper: "A Comprehensive Study of the Past, Present, and Future of Data Deduplication" +// Paper-URL: http://wxia.hustbackup.cn/IEEE-Survey-final.pdf + +// https://borgbackup.readthedocs.io/en/stable/internals.html#chunks +// https://github.com/bup/bup/blob/master/lib/bup/bupsplit.c + +quick_error!{ + #[derive(Debug)] + pub enum ChunkerError { + Read(err: io::Error) { + cause(err) + description(tr!("Failed to read input")) + display("{}", tr_format!("Chunker error: failed to read input\n\tcaused by: {}", err)) + } + Write(err: io::Error) { + cause(err) + description(tr!("Failed to write to output")) + display("{}", tr_format!("Chunker error: failed to write to output\n\tcaused by: {}", err)) + } + Custom(reason: &'static str) { + from() + description(tr!("Custom error")) + display("{}", tr_format!("Chunker error: {}", reason)) + } + } +} + + +#[derive(Debug, Eq, PartialEq)] +pub enum ChunkerStatus { + Continue, + Finished +} + +pub trait Chunker { + fn chunk(&mut self, r: &mut Read, w: &mut Write) -> Result; +} + #[derive(Debug, Clone, Copy, Eq, PartialEq)] pub enum ChunkerType { diff --git a/src/chunking/rabin.rs b/src/repository/chunking/rabin.rs similarity index 100% rename from src/chunking/rabin.rs rename to src/repository/chunking/rabin.rs diff --git a/src/chunking/test.rs b/src/repository/chunking/test.rs similarity index 100% rename from src/chunking/test.rs rename to src/repository/chunking/test.rs diff --git a/src/index.rs b/src/repository/index/mod.rs similarity index 100% rename from src/index.rs rename to src/repository/index/mod.rs diff --git a/src/repository/info.rs b/src/repository/info.rs index 083aa48..6b3e582 100644 --- a/src/repository/info.rs +++ b/src/repository/info.rs @@ -116,7 +116,7 @@ impl Repository { } } } - // Put children in todo + // Put children in to do if let Some(children) = inode.children { for (_name, chunks) in children { todo.push_back(chunks); diff --git a/src/repository/integrity.rs b/src/repository/integrity.rs index c0bb31c..003d8d9 100644 --- a/src/repository/integrity.rs +++ b/src/repository/integrity.rs @@ -149,7 +149,7 @@ impl Repository { return Err(IntegrityError::MissingInodeData(path, Box::new(err)).into()); } } - // Put children in todo + // Put children in to do if let Some(ref mut children) = inode.children { let mut removed = vec![]; for (name, chunks) in children.iter_mut() { diff --git a/src/repository/mod.rs b/src/repository/mod.rs index e90a716..e385f9d 100644 --- a/src/repository/mod.rs +++ b/src/repository/mod.rs @@ -10,6 +10,9 @@ mod vacuum; mod backup_file; mod tarfile; mod layout; +pub mod bundledb; +pub mod index; +pub mod chunking; use prelude::*; @@ -53,9 +56,9 @@ impl Location { } } -impl ::index::Value for Location {} +impl index::Value for Location {} -impl ::index::Key for Hash { +impl index::Key for Hash { fn hash(&self) -> u64 { self.low }