diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index dd26294c..c633a323 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -13,7 +13,7 @@ env: RUSTFLAGS: -Dwarnings RUSTDOCFLAGS: -Dwarnings MSRV: "1.75" - RS_EXAMPLES_LIST: "content-discovery,iroh-ipfs,dumbpipe-web,iroh-pkarr-node-discovery" + RS_EXAMPLES_LIST: "content-discovery,doc-photos,dumbpipe-web,extism/host,extism/iroh-extism-host-functions,extism/plugin,iroh-automerge,iroh-gateway,iroh-ipfs,iroh-pkarr-naming-system,iroh-pkarr-node-discovery,iroh-s3-bao-store" GO_EXAMPLES_LIST: "dall_e_worker" jobs: diff --git a/doc-photos/src/node.rs b/doc-photos/src/node.rs index b716a8e6..ca4ea3d4 100644 --- a/doc-photos/src/node.rs +++ b/doc-photos/src/node.rs @@ -191,7 +191,7 @@ fn make_rpc_endpoint( )?; server_config.concurrent_connections(MAX_RPC_CONNECTIONS); - let rpc_quinn_endpoint = quinn::Endpoint::server(server_config.clone(), rpc_addr.into())?; + let rpc_quinn_endpoint = quinn::Endpoint::server(server_config.clone(), rpc_addr)?; let rpc_endpoint = QuinnServerEndpoint::::new(rpc_quinn_endpoint)?; Ok(rpc_endpoint) diff --git a/doc-photos/src/routes.rs b/doc-photos/src/routes.rs index e41df687..e36c5c5a 100644 --- a/doc-photos/src/routes.rs +++ b/doc-photos/src/routes.rs @@ -49,7 +49,7 @@ impl AppState { let provider_details = ProviderInfo { author_id: None, peer_id: provider_peer_id.to_string(), - port: config.provider_port.clone(), + port: config.provider_port, // Use the empty string as a sentinel value, real value is // set in the "provider" api handler auth_token: "".to_string(), diff --git a/extism/plugin/src/lib.rs b/extism/plugin/src/lib.rs index 249feb8e..e68223e2 100644 --- a/extism/plugin/src/lib.rs +++ b/extism/plugin/src/lib.rs @@ -8,6 +8,6 @@ extern "ExtismHost" { #[plugin_fn] pub fn print_hai_and_get_ticket(ticket: String) -> FnResult> { println!("Hai from a wasm plugin!"); - let v = unsafe { iroh_blob_get_ticket(ticket.into()) }?; + let v = unsafe { iroh_blob_get_ticket(ticket) }?; Ok(v) } diff --git a/iroh-automerge/src/main.rs b/iroh-automerge/src/main.rs index d35c2298..3815a1af 100644 --- a/iroh-automerge/src/main.rs +++ b/iroh-automerge/src/main.rs @@ -46,6 +46,7 @@ impl Peer { self.storage = self.auto_commit.save(); } + #[allow(unused)] async fn load(data: &[u8]) -> Result { let ep = inet::MagicEndpoint::builder() .alpns(vec![ALPN.to_vec()]) diff --git a/iroh-gateway/src/main.rs b/iroh-gateway/src/main.rs index eb18b706..9b2440d0 100644 --- a/iroh-gateway/src/main.rs +++ b/iroh-gateway/src/main.rs @@ -85,6 +85,8 @@ impl Deref for Gateway { } } +type MimeCache = LruCache<(Hash, Option), (u64, Mime)>; + #[derive(derive_more::Debug)] struct Inner { /// Endpoint to connect to nodes @@ -95,7 +97,7 @@ struct Inner { #[debug("MimeClassifier")] mime_classifier: MimeClassifier, /// Cache of hashes to mime types - mime_cache: Mutex), (u64, Mime)>>, + mime_cache: Mutex, /// Cache of hashes to collections collection_cache: Mutex>, } @@ -183,7 +185,7 @@ async fn get_collection( tracing::debug!("hash {hash:?} for name {name:?} not found in headers"); continue; }; - let mime = get_mime_from_ext_and_data(ext.as_deref(), &data, &gateway.mime_classifier); + let mime = get_mime_from_ext_and_data(ext.as_deref(), data, &gateway.mime_classifier); let key = (*hash, ext); cache.put(key, (*size, mime)); } @@ -256,7 +258,7 @@ async fn get_mime_type( name: Option<&str>, connection: &quinn::Connection, ) -> anyhow::Result<(u64, Mime)> { - let ext = name.map(|n| get_extension(n)).flatten(); + let ext = name.and_then(get_extension); let key = (*hash, ext.clone()); if let Some(sm) = gateway.mime_cache.lock().unwrap().get(&key) { return Ok(sm.clone()); diff --git a/iroh-s3-bao-store/Cargo.toml b/iroh-s3-bao-store/Cargo.toml index 1bce71ca..257baaa7 100644 --- a/iroh-s3-bao-store/Cargo.toml +++ b/iroh-s3-bao-store/Cargo.toml @@ -12,7 +12,7 @@ rust-version = "1.75" [dependencies] anyhow = "1.0.75" -bao-tree = "0.13" # needs to be kept in sync with iroh-bytes's bao-tree +bao-tree = "0.11" base32 = "0.4.0" bytes = "1.5.0" clap = { version = "4.4.10", features = ["derive"] } @@ -21,7 +21,7 @@ flume = "0.11.0" futures = "0.3.29" hex = "0.4.3" indicatif = "0.17.7" -iroh = "0.14" +iroh = "0.13" iroh-io = { version = "0.4.0", features = ["x-http"] } num_cpus = "1.16.0" rand = "0.8.5" diff --git a/iroh-s3-bao-store/src/lib.rs b/iroh-s3-bao-store/src/lib.rs index 28220d99..3b20dcf5 100644 --- a/iroh-s3-bao-store/src/lib.rs +++ b/iroh-s3-bao-store/src/lib.rs @@ -4,7 +4,7 @@ use std::sync::{Arc, Mutex}; use bao_tree::io::fsm::Outboard; use bao_tree::io::outboard::PreOrderMemOutboard; -use bao_tree::{BaoTree, ChunkNum}; +use bao_tree::{BaoTree, ByteNum}; use bytes::Bytes; use iroh::bytes::store::bao_tree::blake3; use iroh::bytes::store::{BaoBlobSize, MapEntry}; @@ -26,13 +26,9 @@ impl S3Store { let size = data.as_ref().len() as u64; let (mut outboard, hash) = bao_tree::io::outboard(&data, IROH_BLOCK_SIZE); outboard.splice(0..8, []); - let tree = BaoTree::new(size, IROH_BLOCK_SIZE); - let outboard = PreOrderMemOutboard { - root: hash, - tree, - data: outboard.into(), - } - .map_err(|e| anyhow::anyhow!("outboard creation fail {}", e))?; + let tree = BaoTree::new(ByteNum(size), IROH_BLOCK_SIZE); + let outboard = PreOrderMemOutboard::new(hash, tree, outboard.into()) + .map_err(|e| anyhow::anyhow!("outboard creation fail {}", e))?; let mut state = self.0.entries.lock().unwrap(); state.insert( hash, @@ -47,13 +43,9 @@ impl S3Store { let size = data.len() as u64; let (mut outboard, hash) = bao_tree::io::outboard(data, IROH_BLOCK_SIZE); outboard.splice(0..8, []); - let tree = BaoTree::new(size, IROH_BLOCK_SIZE); - let outboard = PreOrderMemOutboard { - root: hash, - tree, - data: outboard.into(), - } - .map_err(|e| anyhow::anyhow!("outboard creation fail {}", e))?; + let tree = BaoTree::new(ByteNum(size), IROH_BLOCK_SIZE); + let outboard = PreOrderMemOutboard::new(hash, tree, outboard.into()) + .map_err(|e| anyhow::anyhow!("outboard creation fail {}", e))?; let mut state = self.0.entries.lock().unwrap(); state.insert( hash, diff --git a/iroh-s3-bao-store/src/main.rs b/iroh-s3-bao-store/src/main.rs index 4bf494e9..e9ee7553 100644 --- a/iroh-s3-bao-store/src/main.rs +++ b/iroh-s3-bao-store/src/main.rs @@ -264,7 +264,6 @@ async fn serve_s3(args: ServeS3Args) -> anyhow::Result<()> { for path in bucket.contents.iter().map(|c| c.key.clone()) { let url = root.join(&path)?; let hash = db.import_url(url).await?; - let hash = iroh::bytes::Hash::from(hash); let name = format!("{prefix}/{path}"); hashes.push((name, hash)); } @@ -280,7 +279,7 @@ async fn serve_s3(args: ServeS3Args) -> anyhow::Result<()> { serve_db(db, args.common.magic_port, |addr| { if let Some(hash) = last_hash { - let ticket = BlobTicket::new(addr.clone(), hash.into(), BlobFormat::HashSeq)?; + let ticket = BlobTicket::new(addr.clone(), hash, BlobFormat::HashSeq)?; println!("collection: {}", ticket); } Ok(()) @@ -295,7 +294,6 @@ async fn serve_urls(args: ImportS3Args) -> anyhow::Result<()> { for url in args.url { let hash = db.import_url(url.clone()).await?; println!("added {}, {}", url, print_hash(&hash, args.common.format)); - let hash = iroh::bytes::Hash::from(hash); let name = url.to_string().replace('/', "_"); hashes.push((name, hash)); } @@ -315,7 +313,7 @@ async fn serve_urls(args: ImportS3Args) -> anyhow::Result<()> { println!("{} {}", name, ticket); } if let Some(hash) = last_hash { - let ticket = BlobTicket::new(addr.clone(), hash.into(), BlobFormat::HashSeq)?; + let ticket = BlobTicket::new(addr.clone(), hash, BlobFormat::HashSeq)?; println!("collection: {}", ticket); } Ok(())