Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions deny.toml
Original file line number Diff line number Diff line change
Expand Up @@ -39,3 +39,8 @@ name = "ring"
[[licenses.clarify.license-files]]
hash = 3171872035
path = "LICENSE"

[sources]
allow-git = [
"https://github.com/n0-computer/iroh",
]
2 changes: 0 additions & 2 deletions src/api/blobs.rs
Original file line number Diff line number Diff line change
Expand Up @@ -144,15 +144,13 @@ impl Blobs {
/// clears the protections before.
///
/// Users should rely only on garbage collection for blob deletion.
#[cfg(feature = "fs-store")]
pub(crate) async fn delete_with_opts(&self, options: DeleteOptions) -> RequestResult<()> {
trace!("{options:?}");
self.client.rpc(options).await??;
Ok(())
}

/// See [`Self::delete_with_opts`].
#[cfg(feature = "fs-store")]
pub(crate) async fn delete(
&self,
hashes: impl IntoIterator<Item = impl Into<Hash>>,
Expand Down
3 changes: 2 additions & 1 deletion src/api/blobs/reader.rs
Original file line number Diff line number Diff line change
Expand Up @@ -225,10 +225,11 @@ mod tests {
protocol::ChunkRangesExt,
store::{
fs::{
tests::{create_n0_bao, test_data, INTERESTING_SIZES},
tests::{test_data, INTERESTING_SIZES},
FsStore,
},
mem::MemStore,
util::tests::create_n0_bao,
},
};

Expand Down
3 changes: 2 additions & 1 deletion src/api/remote.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1073,10 +1073,11 @@ mod tests {
protocol::{ChunkRangesExt, ChunkRangesSeq, GetRequest},
store::{
fs::{
tests::{create_n0_bao, test_data, INTERESTING_SIZES},
tests::{test_data, INTERESTING_SIZES},
FsStore,
},
mem::MemStore,
util::tests::create_n0_bao,
},
tests::{add_test_hash_seq, add_test_hash_seq_incomplete},
};
Expand Down
21 changes: 3 additions & 18 deletions src/store/fs.rs
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,6 @@ use bytes::Bytes;
use delete_set::{BaoFilePart, ProtectHandle};
use entity_manager::{EntityManagerState, SpawnArg};
use entry_state::{DataLocation, OutboardLocation};
use gc::run_gc;
use import::{ImportEntry, ImportSource};
use irpc::{channel::mpsc, RpcMessage};
use meta::list_blobs;
Expand Down Expand Up @@ -120,6 +119,7 @@ use crate::{
},
util::entity_manager::{self, ActiveEntityState},
},
gc::run_gc,
util::{BaoTreeSender, FixedSize, MemOrFile, ValueOrPoisioned},
IROH_BLOCK_SIZE,
},
Expand All @@ -141,7 +141,6 @@ use entry_state::EntryState;
use import::{import_byte_stream, import_bytes, import_path, ImportEntryMsg};
use options::Options;
use tracing::Instrument;
mod gc;

use crate::{
api::{
Expand Down Expand Up @@ -1498,10 +1497,7 @@ pub mod tests {
use core::panic;
use std::collections::{HashMap, HashSet};

use bao_tree::{
io::{outboard::PreOrderMemOutboard, round_up_to_chunks_groups},
ChunkRanges,
};
use bao_tree::{io::round_up_to_chunks_groups, ChunkRanges};
use n0_future::{stream, Stream, StreamExt};
use testresult::TestResult;
use walkdir::WalkDir;
Expand All @@ -1510,7 +1506,7 @@ pub mod tests {
use crate::{
api::blobs::Bitfield,
store::{
util::{read_checksummed, SliceInfoExt, Tag},
util::{read_checksummed, tests::create_n0_bao, SliceInfoExt, Tag},
IROH_BLOCK_SIZE,
},
};
Expand All @@ -1527,17 +1523,6 @@ pub mod tests {
1024 * 1024 * 8, // data file, outboard file
];

/// Create n0 flavoured bao. Note that this can be used to request ranges below a chunk group size,
/// which can not be exported via bao because we don't store hashes below the chunk group level.
pub fn create_n0_bao(data: &[u8], ranges: &ChunkRanges) -> anyhow::Result<(Hash, Vec<u8>)> {
let outboard = PreOrderMemOutboard::create(data, IROH_BLOCK_SIZE);
let mut encoded = Vec::new();
let size = data.len() as u64;
encoded.extend_from_slice(&size.to_le_bytes());
bao_tree::io::sync::encode_ranges_validated(data, &outboard, ranges, &mut encoded)?;
Ok((outboard.root.into(), encoded))
}

pub fn round_up_request(size: u64, ranges: &ChunkRanges) -> ChunkRanges {
let last_chunk = ChunkNum::chunks(size);
let data_range = ChunkRanges::from(..last_chunk);
Expand Down
2 changes: 1 addition & 1 deletion src/store/fs/options.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,8 @@ use std::{
time::Duration,
};

pub use super::gc::{GcConfig, ProtectCb, ProtectOutcome};
use super::{meta::raw_outboard_size, temp_name};
pub use crate::store::gc::{GcConfig, ProtectCb, ProtectOutcome};
use crate::Hash;

/// Options for directories used by the file store.
Expand Down
22 changes: 15 additions & 7 deletions src/store/fs/gc.rs → src/store/gc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -240,20 +240,16 @@ pub async fn run_gc(store: Store, config: GcConfig) {

#[cfg(test)]
mod tests {
use std::{
io::{self},
path::Path,
};
use std::io::{self};

use bao_tree::{io::EncodeError, ChunkNum};
use bao_tree::io::EncodeError;
use range_collections::RangeSet2;
use testresult::TestResult;

use super::*;
use crate::{
api::{blobs::AddBytesOptions, ExportBaoError, RequestError, Store},
hashseq::HashSeq,
store::fs::{options::PathOptions, tests::create_n0_bao},
BlobFormat,
};

Expand All @@ -266,13 +262,15 @@ mod tests {
let et = blobs.add_slice("e").temp_tag().await?;
let ft = blobs.add_slice("f").temp_tag().await?;
let gt = blobs.add_slice("g").temp_tag().await?;
let ht = blobs.add_slice("h").with_named_tag("h").await?;
let a = *at.hash();
let b = *bt.hash();
let c = *ct.hash();
let d = *dt.hash();
let e = *et.hash();
let f = *ft.hash();
let g = *gt.hash();
let h = ht.hash;
store.tags().set("c", *ct.hash_and_format()).await?;
let dehs = [d, e].into_iter().collect::<HashSeq>();
let hehs = blobs
Expand All @@ -292,6 +290,7 @@ mod tests {
store.tags().set("fg", *fghs.hash_and_format()).await?;
drop(fghs);
drop(bt);
store.tags().delete("h").await?;
let mut live = HashSet::new();
gc_run_once(store, &mut live).await?;
// a is protected because we keep the temp tag
Expand All @@ -313,12 +312,19 @@ mod tests {
assert!(store.has(f).await?);
assert!(live.contains(&g));
assert!(store.has(g).await?);
// h is not protected because we deleted the tag before gc ran
assert!(!live.contains(&h));
assert!(!store.has(h).await?);
drop(at);
drop(hehs);
Ok(())
}

async fn gc_file_delete(path: &Path, store: &Store) -> TestResult<()> {
#[cfg(feature = "fs-store")]
async fn gc_file_delete(path: &std::path::Path, store: &Store) -> TestResult<()> {
use bao_tree::ChunkNum;

use crate::store::{fs::options::PathOptions, util::tests::create_n0_bao};
let mut live = HashSet::new();
let options = PathOptions::new(&path.join("db"));
// create a large complete file and check that the data and outboard files are deleted by gc
Expand Down Expand Up @@ -366,6 +372,7 @@ mod tests {
}

#[tokio::test]
#[cfg(feature = "fs-store")]
async fn gc_smoke_fs() -> TestResult {
tracing_subscriber::fmt::try_init().ok();
let testdir = tempfile::tempdir()?;
Expand All @@ -385,6 +392,7 @@ mod tests {
}

#[tokio::test]
#[cfg(feature = "fs-store")]
async fn gc_check_deletion_fs() -> TestResult {
tracing_subscriber::fmt::try_init().ok();
let testdir = tempfile::tempdir()?;
Expand Down
17 changes: 15 additions & 2 deletions src/store/mem.rs
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,7 @@ use crate::{
},
protocol::ChunkRangesExt,
store::{
gc::{run_gc, GcConfig},
util::{SizeInfo, SparseMemFile, Tag},
IROH_BLOCK_SIZE,
},
Expand All @@ -66,7 +67,9 @@ use crate::{
};

#[derive(Debug, Default)]
pub struct Options {}
pub struct Options {
pub gc_config: Option<GcConfig>,
}

#[derive(Debug, Clone)]
#[repr(transparent)]
Expand Down Expand Up @@ -113,6 +116,10 @@ impl MemStore {
}

pub fn new() -> Self {
Self::new_with_opts(Options::default())
}

pub fn new_with_opts(opts: Options) -> Self {
let (sender, receiver) = tokio::sync::mpsc::channel(32);
tokio::spawn(
Actor {
Expand All @@ -130,7 +137,13 @@ impl MemStore {
}
.run(),
);
Self::from_sender(sender.into())

let store = Self::from_sender(sender.into());
if let Some(gc_config) = opts.gc_config {
tokio::spawn(run_gc(store.deref().clone(), gc_config));
}

store
}
}

Expand Down
1 change: 1 addition & 0 deletions src/store/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
use bao_tree::BlockSize;
#[cfg(feature = "fs-store")]
pub mod fs;
mod gc;
pub mod mem;
pub mod readonly_mem;
mod test;
Expand Down
19 changes: 19 additions & 0 deletions src/store/util.rs
Original file line number Diff line number Diff line change
Expand Up @@ -404,3 +404,22 @@ impl bao_tree::io::mixed::Sender for BaoTreeSender {
self.0.send(item).await
}
}

#[cfg(test)]
#[cfg(feature = "fs-store")]
pub mod tests {
use bao_tree::{io::outboard::PreOrderMemOutboard, ChunkRanges};

use crate::{hash::Hash, store::IROH_BLOCK_SIZE};

/// Create n0 flavoured bao. Note that this can be used to request ranges below a chunk group size,
/// which can not be exported via bao because we don't store hashes below the chunk group level.
pub fn create_n0_bao(data: &[u8], ranges: &ChunkRanges) -> anyhow::Result<(Hash, Vec<u8>)> {
let outboard = PreOrderMemOutboard::create(data, IROH_BLOCK_SIZE);
let mut encoded = Vec::new();
let size = data.len() as u64;
encoded.extend_from_slice(&size.to_le_bytes());
bao_tree::io::sync::encode_ranges_validated(data, &outboard, ranges, &mut encoded)?;
Ok((outboard.root.into(), encoded))
}
}
4 changes: 2 additions & 2 deletions src/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,11 +19,11 @@ use crate::{
provider::events::{AbortReason, EventMask, EventSender, ProviderMessage, RequestUpdate},
store::{
fs::{
tests::{create_n0_bao, test_data, INTERESTING_SIZES},
tests::{test_data, INTERESTING_SIZES},
FsStore,
},
mem::MemStore,
util::observer::Combine,
util::{observer::Combine, tests::create_n0_bao},
},
util::sink::Drain,
BlobFormat, Hash, HashAndFormat,
Expand Down
Loading