Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ env:
RUSTFLAGS: -Dwarnings
RUSTDOCFLAGS: -Dwarnings
MSRV: "1.75"
RS_EXAMPLES_LIST: "content-discovery,iroh-ipfs,dumbpipe-web,iroh-pkarr-node-discovery"
RS_EXAMPLES_LIST: "content-discovery,doc-photos,dumbpipe-web,extism/host,extism/iroh-extism-host-functions,extism/plugin,iroh-automerge,iroh-gateway,iroh-ipfs,iroh-pkarr-naming-system,iroh-pkarr-node-discovery,iroh-s3-bao-store"
GO_EXAMPLES_LIST: "dall_e_worker"

jobs:
Expand Down
2 changes: 1 addition & 1 deletion doc-photos/src/node.rs
Original file line number Diff line number Diff line change
Expand Up @@ -191,7 +191,7 @@ fn make_rpc_endpoint(
)?;
server_config.concurrent_connections(MAX_RPC_CONNECTIONS);

let rpc_quinn_endpoint = quinn::Endpoint::server(server_config.clone(), rpc_addr.into())?;
let rpc_quinn_endpoint = quinn::Endpoint::server(server_config.clone(), rpc_addr)?;
let rpc_endpoint =
QuinnServerEndpoint::<ProviderRequest, ProviderResponse>::new(rpc_quinn_endpoint)?;
Ok(rpc_endpoint)
Expand Down
2 changes: 1 addition & 1 deletion doc-photos/src/routes.rs
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ impl AppState {
let provider_details = ProviderInfo {
author_id: None,
peer_id: provider_peer_id.to_string(),
port: config.provider_port.clone(),
port: config.provider_port,
// Use the empty string as a sentinel value, real value is
// set in the "provider" api handler
auth_token: "".to_string(),
Expand Down
2 changes: 1 addition & 1 deletion extism/plugin/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,6 @@ extern "ExtismHost" {
#[plugin_fn]
pub fn print_hai_and_get_ticket(ticket: String) -> FnResult<Vec<u8>> {
println!("Hai from a wasm plugin!");
let v = unsafe { iroh_blob_get_ticket(ticket.into()) }?;
let v = unsafe { iroh_blob_get_ticket(ticket) }?;
Ok(v)
}
1 change: 1 addition & 0 deletions iroh-automerge/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@ impl Peer {
self.storage = self.auto_commit.save();
}

#[allow(unused)]
async fn load(data: &[u8]) -> Result<Self> {
let ep = inet::MagicEndpoint::builder()
.alpns(vec![ALPN.to_vec()])
Expand Down
8 changes: 5 additions & 3 deletions iroh-gateway/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -85,6 +85,8 @@ impl Deref for Gateway {
}
}

type MimeCache = LruCache<(Hash, Option<String>), (u64, Mime)>;

#[derive(derive_more::Debug)]
struct Inner {
/// Endpoint to connect to nodes
Expand All @@ -95,7 +97,7 @@ struct Inner {
#[debug("MimeClassifier")]
mime_classifier: MimeClassifier,
/// Cache of hashes to mime types
mime_cache: Mutex<LruCache<(Hash, Option<String>), (u64, Mime)>>,
mime_cache: Mutex<MimeCache>,
/// Cache of hashes to collections
collection_cache: Mutex<LruCache<Hash, Collection>>,
}
Expand Down Expand Up @@ -183,7 +185,7 @@ async fn get_collection(
tracing::debug!("hash {hash:?} for name {name:?} not found in headers");
continue;
};
let mime = get_mime_from_ext_and_data(ext.as_deref(), &data, &gateway.mime_classifier);
let mime = get_mime_from_ext_and_data(ext.as_deref(), data, &gateway.mime_classifier);
let key = (*hash, ext);
cache.put(key, (*size, mime));
}
Expand Down Expand Up @@ -256,7 +258,7 @@ async fn get_mime_type(
name: Option<&str>,
connection: &quinn::Connection,
) -> anyhow::Result<(u64, Mime)> {
let ext = name.map(|n| get_extension(n)).flatten();
let ext = name.and_then(get_extension);
let key = (*hash, ext.clone());
if let Some(sm) = gateway.mime_cache.lock().unwrap().get(&key) {
return Ok(sm.clone());
Expand Down
4 changes: 2 additions & 2 deletions iroh-s3-bao-store/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ rust-version = "1.75"

[dependencies]
anyhow = "1.0.75"
bao-tree = "0.13" # needs to be kept in sync with iroh-bytes's bao-tree
bao-tree = "0.11"
base32 = "0.4.0"
bytes = "1.5.0"
clap = { version = "4.4.10", features = ["derive"] }
Expand All @@ -21,7 +21,7 @@ flume = "0.11.0"
futures = "0.3.29"
hex = "0.4.3"
indicatif = "0.17.7"
iroh = "0.14"
iroh = "0.13"
iroh-io = { version = "0.4.0", features = ["x-http"] }
num_cpus = "1.16.0"
rand = "0.8.5"
Expand Down
22 changes: 7 additions & 15 deletions iroh-s3-bao-store/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ use std::sync::{Arc, Mutex};

use bao_tree::io::fsm::Outboard;
use bao_tree::io::outboard::PreOrderMemOutboard;
use bao_tree::{BaoTree, ChunkNum};
use bao_tree::{BaoTree, ByteNum};
use bytes::Bytes;
use iroh::bytes::store::bao_tree::blake3;
use iroh::bytes::store::{BaoBlobSize, MapEntry};
Expand All @@ -26,13 +26,9 @@ impl S3Store {
let size = data.as_ref().len() as u64;
let (mut outboard, hash) = bao_tree::io::outboard(&data, IROH_BLOCK_SIZE);
outboard.splice(0..8, []);
let tree = BaoTree::new(size, IROH_BLOCK_SIZE);
let outboard = PreOrderMemOutboard {
root: hash,
tree,
data: outboard.into(),
}
.map_err(|e| anyhow::anyhow!("outboard creation fail {}", e))?;
let tree = BaoTree::new(ByteNum(size), IROH_BLOCK_SIZE);
let outboard = PreOrderMemOutboard::new(hash, tree, outboard.into())
.map_err(|e| anyhow::anyhow!("outboard creation fail {}", e))?;
let mut state = self.0.entries.lock().unwrap();
state.insert(
hash,
Expand All @@ -47,13 +43,9 @@ impl S3Store {
let size = data.len() as u64;
let (mut outboard, hash) = bao_tree::io::outboard(data, IROH_BLOCK_SIZE);
outboard.splice(0..8, []);
let tree = BaoTree::new(size, IROH_BLOCK_SIZE);
let outboard = PreOrderMemOutboard {
root: hash,
tree,
data: outboard.into(),
}
.map_err(|e| anyhow::anyhow!("outboard creation fail {}", e))?;
let tree = BaoTree::new(ByteNum(size), IROH_BLOCK_SIZE);
let outboard = PreOrderMemOutboard::new(hash, tree, outboard.into())
.map_err(|e| anyhow::anyhow!("outboard creation fail {}", e))?;
let mut state = self.0.entries.lock().unwrap();
state.insert(
hash,
Expand Down
6 changes: 2 additions & 4 deletions iroh-s3-bao-store/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -264,7 +264,6 @@ async fn serve_s3(args: ServeS3Args) -> anyhow::Result<()> {
for path in bucket.contents.iter().map(|c| c.key.clone()) {
let url = root.join(&path)?;
let hash = db.import_url(url).await?;
let hash = iroh::bytes::Hash::from(hash);
let name = format!("{prefix}/{path}");
hashes.push((name, hash));
}
Expand All @@ -280,7 +279,7 @@ async fn serve_s3(args: ServeS3Args) -> anyhow::Result<()> {

serve_db(db, args.common.magic_port, |addr| {
if let Some(hash) = last_hash {
let ticket = BlobTicket::new(addr.clone(), hash.into(), BlobFormat::HashSeq)?;
let ticket = BlobTicket::new(addr.clone(), hash, BlobFormat::HashSeq)?;
println!("collection: {}", ticket);
}
Ok(())
Expand All @@ -295,7 +294,6 @@ async fn serve_urls(args: ImportS3Args) -> anyhow::Result<()> {
for url in args.url {
let hash = db.import_url(url.clone()).await?;
println!("added {}, {}", url, print_hash(&hash, args.common.format));
let hash = iroh::bytes::Hash::from(hash);
let name = url.to_string().replace('/', "_");
hashes.push((name, hash));
}
Expand All @@ -315,7 +313,7 @@ async fn serve_urls(args: ImportS3Args) -> anyhow::Result<()> {
println!("{} {}", name, ticket);
}
if let Some(hash) = last_hash {
let ticket = BlobTicket::new(addr.clone(), hash.into(), BlobFormat::HashSeq)?;
let ticket = BlobTicket::new(addr.clone(), hash, BlobFormat::HashSeq)?;
println!("collection: {}", ticket);
}
Ok(())
Expand Down