Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
28 commits
Select commit Hold shift + click to select a range
612c796
kns_indexer: make newHeads subscription more robust
barraguda Aug 21, 2024
8226b0a
kns_indexer: unsubscribe newHeads after pending cleared
barraguda Aug 21, 2024
311842f
kns_indexer: do not unsubscribe if sub not open
barraguda Aug 21, 2024
2697f0a
kns: wip
barraguda Aug 21, 2024
f8a7e8f
kns: sub for notes
nick1udwig Aug 21, 2024
fe50f31
add wip
nick1udwig Aug 21, 2024
9c20cf5
Format Rust code using rustfmt
github-actions[bot] Aug 21, 2024
e1106b4
eth: borrow fixes unsubscribe
barraguda Aug 21, 2024
db9eda4
add some prints (and unsub in another case)
nick1udwig Aug 21, 2024
79c8abf
Format Rust code using rustfmt
github-actions[bot] Aug 21, 2024
f0a66d0
kns_indexer: replace newHeads sub with ticker
barraguda Aug 22, 2024
defe765
app_store UI: hotfix unpublish
barraguda Aug 22, 2024
d72462d
app_store: delay kns queries by 5s to allow kns time to process block
nick1udwig Aug 22, 2024
bb1906d
Merge pull request #498 from kinode-dao/bp/unpublishfix
nick1udwig Aug 22, 2024
fd3564c
Merge branch 'develop' into bp/kns-timer
nick1udwig Aug 22, 2024
bb21e55
Merge pull request #497 from kinode-dao/bp/kns-timer
nick1udwig Aug 22, 2024
881d4a6
bump to version 0.9.1
nick1udwig Aug 22, 2024
ac078d7
remove some prints & fix a bug
nick1udwig Aug 22, 2024
02c2dfe
Merge pull request #494 from kinode-dao/develop
nick1udwig Aug 22, 2024
290ae3d
eth: remove dead code; add error print
nick1udwig Aug 22, 2024
e93eaf3
kns/app_store: change delay to 1s
nick1udwig Aug 22, 2024
b2479fa
kns: change print verbosity
nick1udwig Aug 22, 2024
5a63d1a
use forked alloy with working unsubscribe
nick1udwig Aug 22, 2024
7678e1e
fix some bugs
nick1udwig Aug 22, 2024
aa56bad
Merge pull request #500 from kinode-dao/hf/final-0.9.1-rc-tweaks
nick1udwig Aug 22, 2024
5f635c7
app_store: retry once on RPCError
nick1udwig Aug 22, 2024
b5f1759
app_store: clean up compiler warnings
nick1udwig Aug 22, 2024
6563147
Merge pull request #501 from kinode-dao/hf/app_store-retry-once-on-rp…
nick1udwig Aug 22, 2024
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
342 changes: 293 additions & 49 deletions Cargo.lock

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
[package]
name = "kinode_lib"
authors = ["KinodeDAO"]
version = "0.9.0"
version = "0.9.1"
edition = "2021"
description = "A general-purpose sovereign cloud computing platform"
homepage = "https://kinode.org"
Expand Down
4 changes: 2 additions & 2 deletions kinode/Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
[package]
name = "kinode"
authors = ["KinodeDAO"]
version = "0.9.0"
version = "0.9.1"
edition = "2021"
description = "A general-purpose sovereign cloud computing platform"
homepage = "https://kinode.org"
Expand All @@ -26,7 +26,7 @@ simulation-mode = []

[dependencies]
aes-gcm = "0.10.3"
alloy = { version = "0.1.3", features = [
alloy = { git = "https://github.com/kinode-dao/alloy.git", rev = "e672f3e", features = [
"consensus",
"contract",
"json-rpc",
Expand Down
9 changes: 1 addition & 8 deletions kinode/packages/app_store/app_store/src/http_api.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ use crate::{

use kinode_process_lib::{
http::{self, server, Method, StatusCode},
println, Address, LazyLoadBlob, PackageId, Request,
Address, LazyLoadBlob, PackageId, Request,
};
use kinode_process_lib::{SendError, SendErrorKind};
use serde_json::json;
Expand Down Expand Up @@ -226,13 +226,6 @@ fn get_package_id(url_params: &HashMap<String, String>) -> anyhow::Result<Packag
Ok(id)
}

fn get_version_hash(url_params: &HashMap<String, String>) -> anyhow::Result<String> {
let Some(version_hash) = url_params.get("version_hash") else {
return Err(anyhow::anyhow!("Missing version_hash"));
};
Ok(version_hash.to_string())
}

fn gen_package_info(id: &PackageId, state: &PackageState) -> serde_json::Value {
// installed package info
json!({
Expand Down
2 changes: 1 addition & 1 deletion kinode/packages/app_store/app_store/src/state.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
use crate::{utils, VFS_TIMEOUT};
use kinode_process_lib::{kimap, println, vfs, PackageId};
use kinode_process_lib::{kimap, vfs, PackageId};
use serde::{Deserialize, Serialize};
use std::collections::{HashMap, HashSet};

Expand Down
5 changes: 1 addition & 4 deletions kinode/packages/app_store/app_store/src/utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,10 +11,7 @@ use {
get_blob, kernel_types as kt, println, vfs, Address, LazyLoadBlob, PackageId, ProcessId,
Request,
},
std::{
collections::{HashMap, HashSet},
str::FromStr,
},
std::collections::{HashMap, HashSet},
};

// quite annoyingly, we must convert from our gen'd version of PackageId
Expand Down
58 changes: 37 additions & 21 deletions kinode/packages/app_store/chain/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ use alloy_sol_types::SolEvent;
use kinode::process::chain::ChainResponses;
use kinode_process_lib::{
await_message, call_init, eth, get_blob, get_state, http, kernel_types as kt, kimap,
print_to_terminal, println, Address, Message, PackageId, Request, Response,
print_to_terminal, println, timer, Address, Message, PackageId, Request, Response,
};
use std::{
collections::{HashMap, HashSet},
Expand Down Expand Up @@ -40,10 +40,7 @@ const KIMAP_ADDRESS: &'static str = kimap::KIMAP_ADDRESS; // optimism
#[cfg(feature = "simulation-mode")]
const KIMAP_ADDRESS: &str = "0xcA92476B2483aBD5D82AEBF0b56701Bb2e9be658";

#[cfg(not(feature = "simulation-mode"))]
const KIMAP_FIRST_BLOCK: u64 = kimap::KIMAP_FIRST_BLOCK;
#[cfg(feature = "simulation-mode")]
const KIMAP_FIRST_BLOCK: u64 = 1;
const DELAY_MS: u64 = 1_000; // 1s

#[derive(Debug, Serialize, Deserialize)]
pub struct State {
Expand Down Expand Up @@ -106,7 +103,18 @@ fn init(our: Address) {
}

fn handle_message(our: &Address, state: &mut State, message: &Message) -> anyhow::Result<()> {
if message.is_request() {
if !message.is_request() {
if message.is_local(&our) && message.source().process == "timer:distro:sys" {
// handling of ETH RPC subscriptions delayed by DELAY_MS
// to allow kns to have a chance to process block: handle now
let Some(context) = message.context() else {
return Err(anyhow::anyhow!("foo"));
};
let log = serde_json::from_slice(context)?;
handle_eth_log(our, state, log)?;
return Ok(());
}
} else {
let req: Req = serde_json::from_slice(message.body())?;
match req {
Req::Eth(eth_result) => {
Expand All @@ -118,8 +126,10 @@ fn handle_message(our: &Address, state: &mut State, message: &Message) -> anyhow
}

if let Ok(eth::EthSub { result, .. }) = eth_result {
if let eth::SubscriptionResult::Log(log) = result {
handle_eth_log(our, state, *log)?;
if let eth::SubscriptionResult::Log(ref log) = result {
// delay handling of ETH RPC subscriptions by DELAY_MS
// to allow kns to have a chance to process block
timer::set_timer(DELAY_MS, Some(serde_json::to_vec(log)?));
}
} else {
// attempt to resubscribe
Expand All @@ -130,21 +140,15 @@ fn handle_message(our: &Address, state: &mut State, message: &Message) -> anyhow
}
}
Req::Request(chains) => {
handle_local_request(our, state, chains)?;
handle_local_request(state, chains)?;
}
}
} else {
return Err(anyhow::anyhow!("not a request"));
}

Ok(())
}

fn handle_local_request(
our: &Address,
state: &mut State,
req: ChainRequests,
) -> anyhow::Result<()> {
fn handle_local_request(state: &mut State, req: ChainRequests) -> anyhow::Result<()> {
match req {
ChainRequests::GetApp(package_id) => {
let onchain_app = state
Expand Down Expand Up @@ -244,7 +248,6 @@ fn handle_eth_log(our: &Address, state: &mut State, log: eth::Log) -> anyhow::Re
// the app store exclusively looks for ~metadata-uri postings: if one is
// observed, we then *query* for ~metadata-hash to verify the content
// at the URI.
//

let metadata_uri = String::from_utf8_lossy(&note.data).to_string();
let is_our_package = &package_id.publisher() == &our.node();
Expand All @@ -254,7 +257,21 @@ fn handle_eth_log(our: &Address, state: &mut State, log: eth::Log) -> anyhow::Re
let hash_note = format!("~metadata-hash.{}", note.parent_path);

// owner can change which we don't track (yet?) so don't save, need to get when desired
let (tba, _owner, data) = state.kimap.get(&hash_note).map_err(|e| {
let (tba, _owner, data) = match state.kimap.get(&hash_note) {
Ok(gr) => Ok(gr),
Err(e) => match e {
eth::EthError::RpcError(_) => {
// retry on RpcError after DELAY_MS sleep
// sleep here rather than with, e.g., a message to
// `timer:distro:sys` so that events are processed in
// order of receipt
std::thread::sleep(std::time::Duration::from_millis(DELAY_MS));
state.kimap.get(&hash_note)
}
_ => Err(e),
},
}
.map_err(|e| {
println!("Couldn't find {hash_note}: {e:?}");
anyhow::anyhow!("metadata hash mismatch")
})?;
Expand All @@ -264,9 +281,7 @@ fn handle_eth_log(our: &Address, state: &mut State, log: eth::Log) -> anyhow::Re
// if ~metadata-uri is also empty, this is an unpublish action!
if metadata_uri.is_empty() {
state.published.remove(&package_id);
if is_our_package {
state.listings.remove(&package_id);
}
state.listings.remove(&package_id);
return Ok(());
}
return Err(anyhow::anyhow!("metadata hash not found"));
Expand Down Expand Up @@ -341,6 +356,7 @@ pub fn fetch_and_subscribe_logs(our: &Address, state: &mut State) {
let filter = app_store_filter(state);
// get past logs, subscribe to new ones.
// subscribe first so we don't miss any logs
println!("subscribing...");
state.kimap.provider.subscribe_loop(1, filter.clone());
for log in fetch_logs(
&state.kimap.provider,
Expand Down
2 changes: 1 addition & 1 deletion kinode/packages/app_store/download/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ fn init(our: Address) {
};

match response {
DownloadResponses::Error(e) => {
DownloadResponses::Error(_e) => {
println!("download: error");
}
DownloadResponses::Success => {
Expand Down
10 changes: 3 additions & 7 deletions kinode/packages/app_store/downloads/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,17 +7,13 @@ use crate::kinode::process::downloads::{
DownloadResponses, Entry, FileEntry, HashMismatch, LocalDownloadRequest, RemoteDownloadRequest,
RemoveFileRequest,
};
use std::{
collections::{HashMap, HashSet},
io::Read,
str::FromStr,
};
use std::{collections::HashSet, io::Read, str::FromStr};

use ft_worker_lib::{spawn_receive_transfer, spawn_send_transfer};
use kinode_process_lib::{
await_message, call_init, get_blob, get_state,
http::client,
kernel_types as kt, print_to_terminal, println, set_state,
print_to_terminal, println, set_state,
vfs::{self, Directory, File},
Address, Message, PackageId, ProcessId, Request, Response,
};
Expand Down Expand Up @@ -113,7 +109,7 @@ fn handle_message(
state: &mut State,
message: &Message,
downloads: &mut Directory,
tmp: &mut Directory,
_tmp: &mut Directory,
auto_updates: &mut HashSet<(PackageId, String)>,
) -> anyhow::Result<()> {
if message.is_request() {
Expand Down
5 changes: 3 additions & 2 deletions kinode/packages/app_store/pkg/manifest.json
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,8 @@
"kns_indexer:kns_indexer:sys",
"vfs:distro:sys",
"http_client:distro:sys",
"eth:distro:sys"
"eth:distro:sys",
"timer:distro:sys"
],
"public": false
},
Expand Down Expand Up @@ -98,4 +99,4 @@
],
"public": false
}
]
]
2 changes: 2 additions & 0 deletions kinode/packages/app_store/ui/package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

5 changes: 3 additions & 2 deletions kinode/packages/app_store/ui/src/pages/PublishPage.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -150,11 +150,12 @@ export default function PublishPage() {
address: tba as `0x${string}`,
functionName: 'execute',
args: [
KIMAP,
MULTICALL,
BigInt(0),
multicall,
1
]
],
gas: BigInt(1000000),
});

} catch (error) {
Expand Down
Loading