Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 6 additions & 5 deletions crates/core/src/archiver.rs
Original file line number Diff line number Diff line change
Expand Up @@ -40,16 +40,16 @@ pub struct TreeStackEmptyError;
#[allow(clippy::struct_field_names)]
pub struct Archiver<'a, BE: DecryptFullBackend, I: ReadGlobalIndex> {
/// The `FileArchiver` is responsible for archiving files.
file_archiver: FileArchiver<'a, BE, I>,
file_archiver: FileArchiver<'a, I>,

/// The `TreeArchiver` is responsible for archiving trees.
tree_archiver: TreeArchiver<'a, BE, I>,
tree_archiver: TreeArchiver<'a, I>,

/// The parent snapshot to use.
parent: Parent,

/// The `SharedIndexer` is used to index the data.
indexer: SharedIndexer<BE>,
indexer: SharedIndexer,

/// The backend to write to.
be: BE,
Expand Down Expand Up @@ -83,7 +83,7 @@ impl<'a, BE: DecryptFullBackend, I: ReadGlobalIndex> Archiver<'a, BE, I> {
parent: Parent,
mut snap: SnapshotFile,
) -> RusticResult<Self> {
let indexer = Indexer::new(be.clone()).into_shared();
let indexer = Indexer::new().into_shared();
let mut summary = snap.summary.take().unwrap_or_default();
summary.backup_start = Local::now();

Expand Down Expand Up @@ -217,7 +217,8 @@ impl<'a, BE: DecryptFullBackend, I: ReadGlobalIndex> Archiver<'a, BE, I> {
stats.apply(&mut summary, BlobType::Data);
self.snap.tree = id;

self.indexer.write().unwrap().finalize()?;
self.indexer
.finalize_and_check_save(|file| self.be.save_file_no_id(file))?;

summary.finalize(self.snap.time).map_err(|err| {
RusticError::with_source(
Expand Down
23 changes: 7 additions & 16 deletions crates/core/src/archiver/file_archiver.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,10 +13,7 @@ use crate::{
decrypt::DecryptWriteBackend,
node::{Node, NodeType},
},
blob::{
BlobId, BlobType, DataId,
packer::{Packer, PackerStats},
},
blob::{BlobId, BlobType, DataId, packer::PackerStats, repopacker::RepositoryPacker},
chunker::ChunkIter,
crypto::hasher::hash,
error::{ErrorKind, RusticError, RusticResult},
Expand All @@ -30,16 +27,15 @@ use crate::{
///
/// # Type Parameters
///
/// * `BE` - The backend type.
/// * `I` - The index to read from.
#[derive(Clone)]
pub(crate) struct FileArchiver<'a, BE: DecryptWriteBackend, I: ReadGlobalIndex> {
pub(crate) struct FileArchiver<'a, I: ReadGlobalIndex> {
index: &'a I,
data_packer: Packer<BE>,
data_packer: RepositoryPacker,
rabin: Rabin64,
}

impl<'a, BE: DecryptWriteBackend, I: ReadGlobalIndex> FileArchiver<'a, BE, I> {
impl<'a, I: ReadGlobalIndex> FileArchiver<'a, I> {
/// Creates a new `FileArchiver`.
///
/// # Type Parameters
Expand All @@ -53,20 +49,15 @@ impl<'a, BE: DecryptWriteBackend, I: ReadGlobalIndex> FileArchiver<'a, BE, I> {
/// * `index` - The index to read from.
/// * `indexer` - The indexer to write to.
/// * `config` - The config file.
///
/// # Errors
///
/// * If sending the message to the raw packer fails.
/// * If converting the data length to u64 fails
pub(crate) fn new(
pub(crate) fn new<BE: DecryptWriteBackend>(
be: BE,
index: &'a I,
indexer: SharedIndexer<BE>,
indexer: SharedIndexer,
config: &ConfigFile,
) -> RusticResult<Self> {
let poly = config.poly()?;

let data_packer = Packer::new(
let data_packer = RepositoryPacker::new_with_default_sizer(
be,
BlobType::Data,
indexer,
Expand Down
22 changes: 7 additions & 15 deletions crates/core/src/archiver/tree_archiver.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ use crate::{
backend::{decrypt::DecryptWriteBackend, node::Node},
blob::{
BlobType,
packer::Packer,
repopacker::RepositoryPacker,
tree::{Tree, TreeId},
},
error::{ErrorKind, RusticError, RusticResult},
Expand All @@ -22,24 +22,21 @@ pub(crate) type TreeItem = TreeType<(ParentResult<()>, u64), ParentResult<TreeId
///
/// # Type Parameters
///
/// * `BE` - The backend type.
/// * `I` - The index to read from.
///
// TODO: Add documentation
pub(crate) struct TreeArchiver<'a, BE: DecryptWriteBackend, I: ReadGlobalIndex> {
pub(crate) struct TreeArchiver<'a, I: ReadGlobalIndex> {
/// The current tree.
tree: Tree,
/// The stack of trees.
stack: Vec<(PathBuf, Node, ParentResult<TreeId>, Tree)>,
/// The index to read from.
index: &'a I,
/// The packer to write to.
tree_packer: Packer<BE>,
tree_packer: RepositoryPacker,
/// The summary of the snapshot.
summary: SnapshotSummary,
}

impl<'a, BE: DecryptWriteBackend, I: ReadGlobalIndex> TreeArchiver<'a, BE, I> {
impl<'a, I: ReadGlobalIndex> TreeArchiver<'a, I> {
/// Creates a new `TreeArchiver`.
///
/// # Type Parameters
Expand All @@ -54,19 +51,14 @@ impl<'a, BE: DecryptWriteBackend, I: ReadGlobalIndex> TreeArchiver<'a, BE, I> {
/// * `indexer` - The indexer to write to.
/// * `config` - The config file.
/// * `summary` - The summary of the snapshot.
///
/// # Errors
///
/// * If sending the message to the raw packer fails.
/// * If converting the data length to u64 fails
pub(crate) fn new(
pub(crate) fn new<BE: DecryptWriteBackend>(
be: BE,
index: &'a I,
indexer: SharedIndexer<BE>,
indexer: SharedIndexer,
config: &ConfigFile,
summary: SnapshotSummary,
) -> RusticResult<Self> {
let tree_packer = Packer::new(
let tree_packer = RepositoryPacker::new_with_default_sizer(
be,
BlobType::Tree,
indexer,
Expand Down
10 changes: 10 additions & 0 deletions crates/core/src/backend/decrypt.rs
Original file line number Diff line number Diff line change
Expand Up @@ -281,6 +281,16 @@ pub trait DecryptWriteBackend: WriteBackend + Clone + 'static {
}
}

/// Saves the given file without returning the id.
///
/// # Arguments
///
/// * `file` - The file to save.
fn save_file_no_id<F: RepoFile>(&self, file: &F) -> RusticResult<()> {
let _ = self.save_file(file)?;
Ok(())
}

/// Saves the given file uncompressed.
///
/// # Arguments
Expand Down
2 changes: 2 additions & 0 deletions crates/core/src/blob.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
pub(crate) mod pack_sizer;
pub(crate) mod packer;
pub(crate) mod repopacker;
pub(crate) mod tree;

use derive_more::Constructor;
Expand Down
131 changes: 131 additions & 0 deletions crates/core/src/blob/pack_sizer.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,131 @@
use crate::{blob::BlobType, repofile::ConfigFile};

use integer_sqrt::IntegerSquareRoot;

/// The pack sizer is responsible for computing the size of the pack file.
pub trait PackSizer {
/// Computes the size of the pack file.
#[must_use]
fn pack_size(&self) -> u32;

/// Evaluates whether the given size is not too small or too large
///
/// # Arguments
///
/// * `size` - The size to check
#[must_use]
fn size_ok(&self, size: u32) -> bool {
!self.is_too_small(size) && !self.is_too_large(size)
}

/// Evaluates whether the given size is too small
///
/// # Arguments
///
/// * `size` - The size to check
#[must_use]
fn is_too_small(&self, _size: u32) -> bool {
false
}

/// Evaluates whether the given size is too large
///
/// # Arguments
///
/// * `size` - The size to check
#[must_use]
fn is_too_large(&self, _size: u32) -> bool {
false
}

/// Adds the given size to the current size.
///
/// # Arguments
///
/// * `added` - The size to add
fn add_size(&mut self, _added: u32) {}
}

/// The default pack sizer computes packs depending on a default size, a grow factor amd a size limit.
#[derive(Debug, Clone, Copy)]
pub struct DefaultPackSizer {
/// The default size of a pack file.
default_size: u32,
/// The grow factor of a pack file.
grow_factor: u32,
/// The size limit of a pack file.
size_limit: u32,
/// The current size of a pack file.
current_size: u64,
/// The minimum pack size tolerance in percent before a repack is triggered.
min_packsize_tolerate_percent: u32,
/// The maximum pack size tolerance in percent before a repack is triggered.
max_packsize_tolerate_percent: u32,
}

impl DefaultPackSizer {
/// Creates a new `DefaultPackSizer` from a config file.
///
/// # Arguments
///
/// * `config` - The config file.
/// * `blob_type` - The blob type.
/// * `current_size` - The current size of the pack file.
///
/// # Returns
///
/// A new `DefaultPackSizer`.
#[must_use]
pub fn from_config(config: &ConfigFile, blob_type: BlobType, current_size: u64) -> Self {
let (default_size, grow_factor, size_limit) = config.packsize(blob_type);
let (min_packsize_tolerate_percent, max_packsize_tolerate_percent) =
config.packsize_ok_percents();
Self {
default_size,
grow_factor,
size_limit,
current_size,
min_packsize_tolerate_percent,
max_packsize_tolerate_percent,
}
}
}

impl PackSizer for DefaultPackSizer {
#[allow(clippy::cast_possible_truncation)]
fn pack_size(&self) -> u32 {
(self.current_size.integer_sqrt() as u32 * self.grow_factor + self.default_size)
.min(self.size_limit)
}

fn is_too_small(&self, size: u32) -> bool {
let target_size = self.pack_size();
// Note: we cast to u64 so that no overflow can occur in the multiplications
u64::from(size) * 100
< u64::from(target_size) * u64::from(self.min_packsize_tolerate_percent)
}

fn is_too_large(&self, size: u32) -> bool {
let target_size = self.pack_size();
// Note: we cast to u64 so that no overflow can occur in the multiplications
u64::from(size) * 100
> u64::from(target_size) * u64::from(self.max_packsize_tolerate_percent)
}

fn add_size(&mut self, added: u32) {
self.current_size += u64::from(added);
}
}

/// A pack sizer which uses a fixed pack size
#[derive(Debug, Clone, Copy)]
pub struct FixedPackSizer(pub u32);

impl PackSizer for FixedPackSizer {
fn pack_size(&self) -> u32 {
self.0
}
fn is_too_large(&self, size: u32) -> bool {
size > self.0
}
}
Loading
Loading