diff --git a/.gitignore b/.gitignore index 8d43a2a..17c34a5 100644 --- a/.gitignore +++ b/.gitignore @@ -27,3 +27,5 @@ Cargo.lock .patcher/ .DS_Store .env + +owner_key* \ No newline at end of file diff --git a/Cargo.toml b/Cargo.toml index 92cd33c..93499b8 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -3,14 +3,4 @@ resolver = "2" members = [ "crates/rustpatcher-macros", "crates/rustpatcher", -] - -default-members = [ - "crates/rustpatcher-macros", - "crates/rustpatcher", -] - -[profile.release] -panic = "abort" -debug = true -lto = "thin" \ No newline at end of file +] \ No newline at end of file diff --git a/README.md b/README.md index b9f95a4..6ca6fac 100644 --- a/README.md +++ b/README.md @@ -1,126 +1,231 @@ -# Rust Patcher -*Secure Decentralized Software Updates* - Working work in progress - [![Crates.io](https://img.shields.io/crates/v/rustpatcher.svg)](https://crates.io/crates/rustpatcher) [![Docs.rs](https://docs.rs/rustpatcher/badge.svg)](https://docs.rs/rustpatcher) ![License](https://img.shields.io/badge/License-MIT-green) -## Implementation Flow +# Rust Patcher +Secure fully decentralized software updates. + -### 1. Add Dependency (Crates.io) +## Implementation Flow + +### 1. Add dependency ```toml # Cargo.toml [dependencies] -rustpatcher = "0.1" +rustpatcher = "0.2" +tokio = { version = "1", features = ["rt-multi-thread","macros"] } ``` - -### 2. Initialize Patcher +### 2. Embed owner public key and start the updater ```rust -// main.rs -use rustpatcher::Patcher; -#[rustpatcher::main] #[tokio::main] +#[rustpatcher::public_key("axegnqus3miex47g1kxf1j7j8spczbc57go7jgpeixq8nxjfz7gy")] async fn main() -> anyhow::Result<()> { - let patcher = Patcher::new() - .build() - .await?; + + // Only in --release builds, not intended for debug builds + rustpatcher::spawn(rustpatcher::UpdaterMode::At(13, 40)).await?; + + println!("my version is {:?}", rustpatcher::Version::current()?); + + // your app code after this + loop { + tokio::select! { + _ = tokio::signal::ctrl_c() => { + println!("Exiting on Ctrl-C"); + break; + } + } + } + Ok(()) } ``` -### 3. Initialize Cryptographic Identity +### 3. Generate signing key (one-time) ```bash -cargo run -- rustpatcher init -``` -**Output:** -```text -New keys generated: - Trusted-Key = mw6iuq1iu7qd5gcz59qpjnu6tw9yn7pn4gxxkdbqwwwxfzyziuro - Shared-Secret = 8656fg8j6s43a4jndkzdysjuof588zezsn6s8sd6wwcpwf6b3r9y +cargo install rustpatcher +rustpatcher gen ./owner_key ``` +Output includes: +- Owner signing key saved to ./owner_key (z-base-32 encoded) +- Owner public key (z-base-32) +- Attribute snippet to paste into main: #[rustpatcher::public_key("")] -### 4. Extend main with keys -```rust -// main.rs -use rustpatcher::Patcher; +### 4. Build and sign releases +```bash +# build your binary +cargo build --release -#[rustpatcher::main] -#[tokio::main] -async fn main() -> anyhow::Result<()> { - let patcher = Patcher::new() - .trusted_key_from_z32_str("mw6iuq1iu7qd5gcz59qpjnu6tw9yn7pn4gxxkdbqwwwxfzyziuro") - .shared_secret_key_from_z32_str("mw6iuq1iu7qd5gcz59qpjnu6tw9yn7pn4gxxkdbqwwwxfzyziuro")) - .build() - .await?; -} +# sign the compiled binary in-place +rustpatcher sign target/release/ --key-file=./owner_key ``` -### 5. Publish Updates (Master Node) -```bash -# Increment version in Cargo.toml first -cargo run -- rustpatcher publish +### 5. Publish updates +- Run the newly signed binary on at least one node until a couple of peers have updated themselfs. +- The running process periodically publishes the latest PatchInfo to the DHT. +- Clients discover new PatchInfo, fetch the patch from peers, verify, and self-replace. + + +--- + +## Run Example: simple +```sh +git clone https://github.com/rustonbsd/rustpatcher +cd rustpatcher +cargo build --release --example simple +cargo run --bin rustpatcher sign target/release/examples/simple --key-file ./owner_key_example + +# Run signed app: +./target/release/examples/simple + + +# if you increase the version in /crates/rustpatcher/Cargo.toml +# and build+sign+start another node, then the first +# node will update via the second node. ``` -Creates signed package with: -- SHA-256 executable hash -- Version metadata (major.minor.patch) -- Ed25519 publisher signature -- PKARR DHT record --- -## Network Architecture +## Network Architecture -### Master Node Flow ```mermaid sequenceDiagram - Master->>+PKARR: Publish signed package - Master->>+Iroh: Announce version topic - Master-->>Network: Propagate via DHT + participant Owner as Owner Node (new version) + participant DTT as DHT Topic Tracker + participant Peer as Peer Node (old) + + Owner->>DTT: Publish PatchInfo(version, size, hash, sig) + Peer->>DTT: Query latest PatchInfo (minute slots) + DTT-->>Peer: Return newest records + Peer->>Owner: Iroh connect (ALPN /rustpatcher//v0) + Peer->>Owner: Auth = sha512(pubkey || unix_minute) + Owner-->>Peer: OK + Patch (postcard) + Peer->>Peer: Verify(hash, size, ed25519(pubkey)) + Peer->>Peer: Atomic replace + optional execv restart ``` -### Client Node Flow -```mermaid -sequenceDiagram - Client->>+PKARR: Check version records - PKARR-->>-Client: Return latest signed package - Client->>+Iroh: Discover peers via topic - Iroh-->>-Client: Return node list - Client->>Peer: Establish P2P connection - Peer-->>Client: Stream verified update - Client->>Self: Safe replace via self_replace +- Discovery: [distributed-topic-tracker](https://github.com/rustonbsd/distributed-topic-tracker) minute-slotted records over the DHT +- Transport: [iroh](https://github.com/n0-computer/iroh) QUIC, ALPN namespaced per owner key +- Authentication: rotating hash auth per minute bucket + +--- + +## Key Processes + +1. Version propagation +- Running a node publishes a PatchInfo record roughly every minute. +- Records are minute scoped with short TTL to avoid staleness. +- Peers scan current and previous minute for latest version. + +2. Patch fetch + verification +- Peer connects to other peers with newer version via iroh using an ALPN derived from the owner pubkey. +- Auth: sha512(owner_pub_key || unix_minute(t)) for t ∈ {-1..1}. +- Owner sends the signed patch (postcard-encoded). + +3. Self-update mechanism +- Write to temp file +- Atomic [self-replace](https://crates.io/crates/self-replace) +- Optional immediate restart via execv (UpdaterMode::Now) or deferred (OnRestart / At(hh, mm)) + +--- + +## Data Embedded in the Binary + +- Fixed-size embedded region in a dedicated link section (.embedded_signature) +- Layout: + - 28 bytes: bounds start marker + - 32 bytes: binary hash (sha512 truncated to 32) + - 8 bytes: binary size (LE) + - 64 bytes: ed25519 signature + - 16 bytes: ASCII version (padded) + - 28 bytes: bounds end marker + +At runtime, the library: +- Locates the embedded region +- Parses version/hash/size/signature +- Verifies the binary contents against the signed metadata + +--- + +## CLI Reference (rustpatcher) + +- gen + - Generates a new ed25519 signing key in z-base-32; prints the public key and attribute snippet. +- sign --key-file + - Reads the compiled binary, computes PatchInfo, and writes it into the embedded region. + +DO NOT COMMIT YOUR PRIVATE KEY! + +```sh +# add this to your .gitignore +owner_key* ``` -## Key Processes +Notes: +- Keys are z-base-32 encoded on disk, the public key is embedded in code via #[rustpatcher::public_key("...")]. +- Signing must be re-run after each new build that is intendet to self update. +- For every build target a seperate keypair is required (we don't want the arm users patching in x86 binaries). + +--- + +## Library API (overview) + +- #[rustpatcher::public_key("")] + - Embeds the owner public key and the package version for verification +- rustpatcher::spawn(mode: UpdaterMode) -> Future> + - Starts discovery, publishing, distribution server, and updater +- UpdaterMode::{Now, OnRestart, At(h, m)} + +--- + +## How It Changed (vs previous rustpatcher) + +- Single embedded region with explicit bounds, constant size, and zero-allocation compile-time construction +- Signature scheme clarified and minimal: + - sha512(data_no_embed) -> first 32 bytes as hash + - sign sha512(version || hash || size_le) with ed25519 +- Owner key embedding via attribute macro, version captured from CARGO_PKG_VERSION and embedded as fixed-length ASCII +- Minute-slotted record publishing and discovery via distributed-topic-tracker +- iroh-based distributor with rotating minute auth derived from owner public key +- Simple updater modes: Now, OnRestart, At(hh:mm) +- CLI split: cargo install rustpatcher to manage keys and sign releases + +--- + +## Example + +```rust +use rustpatcher::UpdaterMode; + +#[tokio::main] +#[rustpatcher::public_key("axegnqus3miex47g1kxf1j7j8spczbc57go7jgpeixq8nxjfz7gy")] +async fn main() -> anyhow::Result<()> { -1. **Version Propagation** - - Master nodes sign packages with secret key - - PKARR DHT stores version records with TTL - - Iroh topic tracker maintains peer list per version + rustpatcher::spawn(UpdaterMode::At(02, 30)).await?; + + // app code... + Ok(()) +} +``` -2. **Update Verification** - ```rust - // Verification chain - if pub_key.verify(&data, &sig).is_ok() - && compute_hash(data) == stored_hash - && version > current_version { - apply_update() - } - ``` +## Release Workflow -3. **Self-Update Mechanism** - - Hash and Signature verification after data download - - Temp file write with atomic replacement - - Execv syscall for instant reload +1) Generate key (once): +- rustpatcher gen ./owner_key -## CLI Reference +2) Build + sign each release: +- cargo build --release +- rustpatcher sign target/release/ --key-file=./owner_key -| Command | Function | -|-----------------|--------------------------------------| -| `init` | Generate cryptographic identity | -| `publish` | Create/distribute signed package | +3) Deploy and run the signed binary on at least one node: +- It will publish PatchInfo and serve patches to peers. +- No need to have any exposed ports. -*Zero configuration needed for peer discovery - automatic via Iroh Topic Tracker* +```make +build: + cargo build --release + rustpatcher sign target/release/ --key-file ./owner_key -## Old Architecture Diagram -![Rough outline of how everythong works](media/patcher_diagram.svg "Patcher diagram") +publish: + target/release/ +``` \ No newline at end of file diff --git a/crates/rustpatcher-macros/Cargo.toml b/crates/rustpatcher-macros/Cargo.toml index 240aae1..8ffc06c 100644 --- a/crates/rustpatcher-macros/Cargo.toml +++ b/crates/rustpatcher-macros/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "rustpatcher-macros" -version = "0.1.0" -edition = "2021" +version = "0.2.0" +edition = "2024" authors = ["Zacharias Boehler "] description = "p2p patching system" license = "MIT" @@ -14,7 +14,7 @@ categories = ["network-programming"] proc-macro = true [dependencies] -syn = { version = "2.0", features = ["full"] } +syn = { version = "2.0", default-features=false, features = ["full"] } quote = "1.0" proc-macro2 = "1.0" -ctor = "0.4.2" +ctor = "0.5.0" diff --git a/crates/rustpatcher-macros/src/lib.rs b/crates/rustpatcher-macros/src/lib.rs index bbcc4c5..7e8cb27 100644 --- a/crates/rustpatcher-macros/src/lib.rs +++ b/crates/rustpatcher-macros/src/lib.rs @@ -1,16 +1,27 @@ use proc_macro::TokenStream; use quote::quote; -use syn::{parse_macro_input, ItemFn}; +use syn::{parse_macro_input, ItemFn, Expr}; #[proc_macro_attribute] -pub fn main(_args: TokenStream, input: TokenStream) -> TokenStream { +pub fn public_key(args: TokenStream, input: TokenStream) -> TokenStream { let input_fn = parse_macro_input!(input as ItemFn); + let public_key_expr = parse_macro_input!(args as Expr); + let expanded = quote! { - // Create a static initializer that runs before main - #[ctor::ctor] - fn __init_version() { - rustpatcher::version_embed::__set_version(env!("CARGO_PKG_VERSION")); - } + const _: () = { + #[::ctor::ctor] + fn __rustpatcher_init_version() { + let __rustpatcher_public_key: &'static str = { + let __cow: ::std::borrow::Cow<'static, str> = + ::std::convert::Into::<::std::borrow::Cow<'static, str>>::into(#public_key_expr); + match __cow { + ::std::borrow::Cow::Borrowed(s) => s, + ::std::borrow::Cow::Owned(s) => ::std::boxed::Box::leak(s.into_boxed_str()), + } + }; + ::rustpatcher::embed::embed(env!("CARGO_PKG_VERSION"), __rustpatcher_public_key); + } + }; #input_fn }; diff --git a/crates/rustpatcher/Cargo.toml b/crates/rustpatcher/Cargo.toml index eed2828..3478c7a 100644 --- a/crates/rustpatcher/Cargo.toml +++ b/crates/rustpatcher/Cargo.toml @@ -1,54 +1,43 @@ [package] name = "rustpatcher" -version = "0.1.13" -edition = "2021" -authors = ["Zacharias Boehler "] -description = "p2p patching system" -license = "MIT" -repository = "https://github.com/rustonbsd/rustpatcher" -readme = "README.md" -keywords = ["networking"] -categories = ["network-programming"] - +version = "0.2.0" +edition = "2024" [dependencies] -iroh = "0.34.1" -iroh-topic-tracker = {version = "0.1.12" } -tokio ={ version="1",features = ["full"] } +rustpatcher_macros = { path = "../rustpatcher-macros", package = "rustpatcher-macros", version = "0.2.0" } + +ctor = "0.5" +actor-helper = "0.1" +tokio ={ version = "1", features = ["rt-multi-thread","macros","sync"] } anyhow = "1" -rand = "0.8.0" -serde = {version = "1", features = ["derive"]} +serde = { version = "1", default-features = false, features = ["derive"] } serde_json = "1" -serde_bytes = "0.11" -bytes = { version = "1.10.0", features = ["serde"] } -ed25519-dalek = { version = "2.0.0", features = ["serde", "rand_core"] } -postcard = { version = "1", default-features = false, features = ["alloc", "use-std", "experimental-derive"] } -futures-lite = "2.6.0" -z32 = "1" +ed25519-dalek = { version = "2", features = ["serde", "rand_core"] } sha2 = "0.10" -pkarr = "3.7.2" -clap = { version = "4.4.8", features = ["derive"] } +rand = "0.8" +z32 = "1" +clap = { version = "4", features = ["derive"] } +once_cell = "1" tempfile = "3" +nix = { version = "0.30", features = ["process"] } self-replace = "1" -nix = { version = "0.30.1", features = ["process"] } -reqwest = "0.12" -dotenv = "0.15" -once_cell = "1" -log = "0.4.26" -env_logger = "0.11.8" +chrono = { version = "0.4", default-features = false, features = ["std"] } +tracing = "0.1" +tracing-subscriber = { version = "0.3", default-features=false, features = ["std"] } -rustpatcher_macros = { path = "../rustpatcher-macros", package = "rustpatcher-macros", default-features = true, version = "0.1.0" } -ctor = "0.4.2" +iroh = { version = "0.92", default-features = false } +postcard = { version = "1" } -[dev-dependencies] -tokio = { version = "1", features = ["full", "test-util"] } +distributed-topic-tracker = { git = "https://github.com/rustonbsd/distributed-topic-tracker", rev="f4a567dad5cfde5eab6dccb3a8784f4bf3197bb9", default-features = false } +[[example]] +name = "platforms" +path = "examples/platforms.rs" -[profile.dev] -opt-level = 0 +[[example]] +name = "simple" +path = "examples/simple.rs" -[profile.release] -opt-level = 3 -lto = true -panic = "abort" -features = [] \ No newline at end of file +[[bin]] +name = "rustpatcher" +path = "xtask/sign.rs" diff --git a/crates/rustpatcher/Makefile b/crates/rustpatcher/Makefile new file mode 100644 index 0000000..c26d6c7 --- /dev/null +++ b/crates/rustpatcher/Makefile @@ -0,0 +1,18 @@ +.PHONY: build-simple +build-simple: + cargo build --release --example simple + cargo run --bin rustpatcher sign ../../target/release/examples/simple --key-file ./owner_key + +.PHONY: run-simple +run-simple: + ../../target/release/examples/simple + +.PHONY: build-simple-copy +build-simple-copy: + cargo build --release --example simple + cargo run --bin rustpatcher sign ../../target/release/examples/simple --key-file ./owner_key + cp ../../target/release/examples/simple ../../target/release/examples/simple_copy + +.PHONY: run-simple-copy +run-simple-copy: + ../../target/release/examples/simple_copy \ No newline at end of file diff --git a/crates/rustpatcher/README.md b/crates/rustpatcher/README.md deleted file mode 100644 index b9f95a4..0000000 --- a/crates/rustpatcher/README.md +++ /dev/null @@ -1,126 +0,0 @@ -# Rust Patcher -*Secure Decentralized Software Updates* - Working work in progress - -[![Crates.io](https://img.shields.io/crates/v/rustpatcher.svg)](https://crates.io/crates/rustpatcher) -[![Docs.rs](https://docs.rs/rustpatcher/badge.svg)](https://docs.rs/rustpatcher) -![License](https://img.shields.io/badge/License-MIT-green) - -## Implementation Flow - -### 1. Add Dependency (Crates.io) -```toml -# Cargo.toml -[dependencies] -rustpatcher = "0.1" -``` - - -### 2. Initialize Patcher -```rust -// main.rs -use rustpatcher::Patcher; - -#[rustpatcher::main] -#[tokio::main] -async fn main() -> anyhow::Result<()> { - let patcher = Patcher::new() - .build() - .await?; -} -``` - -### 3. Initialize Cryptographic Identity -```bash -cargo run -- rustpatcher init -``` -**Output:** -```text -New keys generated: - Trusted-Key = mw6iuq1iu7qd5gcz59qpjnu6tw9yn7pn4gxxkdbqwwwxfzyziuro - Shared-Secret = 8656fg8j6s43a4jndkzdysjuof588zezsn6s8sd6wwcpwf6b3r9y -``` - -### 4. Extend main with keys -```rust -// main.rs -use rustpatcher::Patcher; - -#[rustpatcher::main] -#[tokio::main] -async fn main() -> anyhow::Result<()> { - let patcher = Patcher::new() - .trusted_key_from_z32_str("mw6iuq1iu7qd5gcz59qpjnu6tw9yn7pn4gxxkdbqwwwxfzyziuro") - .shared_secret_key_from_z32_str("mw6iuq1iu7qd5gcz59qpjnu6tw9yn7pn4gxxkdbqwwwxfzyziuro")) - .build() - .await?; -} -``` - -### 5. Publish Updates (Master Node) -```bash -# Increment version in Cargo.toml first -cargo run -- rustpatcher publish -``` -Creates signed package with: -- SHA-256 executable hash -- Version metadata (major.minor.patch) -- Ed25519 publisher signature -- PKARR DHT record - ---- - -## Network Architecture - -### Master Node Flow -```mermaid -sequenceDiagram - Master->>+PKARR: Publish signed package - Master->>+Iroh: Announce version topic - Master-->>Network: Propagate via DHT -``` - -### Client Node Flow -```mermaid -sequenceDiagram - Client->>+PKARR: Check version records - PKARR-->>-Client: Return latest signed package - Client->>+Iroh: Discover peers via topic - Iroh-->>-Client: Return node list - Client->>Peer: Establish P2P connection - Peer-->>Client: Stream verified update - Client->>Self: Safe replace via self_replace -``` - -## Key Processes - -1. **Version Propagation** - - Master nodes sign packages with secret key - - PKARR DHT stores version records with TTL - - Iroh topic tracker maintains peer list per version - -2. **Update Verification** - ```rust - // Verification chain - if pub_key.verify(&data, &sig).is_ok() - && compute_hash(data) == stored_hash - && version > current_version { - apply_update() - } - ``` - -3. **Self-Update Mechanism** - - Hash and Signature verification after data download - - Temp file write with atomic replacement - - Execv syscall for instant reload - -## CLI Reference - -| Command | Function | -|-----------------|--------------------------------------| -| `init` | Generate cryptographic identity | -| `publish` | Create/distribute signed package | - -*Zero configuration needed for peer discovery - automatic via Iroh Topic Tracker* - -## Old Architecture Diagram -![Rough outline of how everythong works](media/patcher_diagram.svg "Patcher diagram") diff --git a/crates/rustpatcher/build.rs b/crates/rustpatcher/build.rs deleted file mode 100644 index 2c9bb5d..0000000 --- a/crates/rustpatcher/build.rs +++ /dev/null @@ -1,4 +0,0 @@ -fn main() { - println!("cargo:rustc-env=TRUSTED_KEY=36nqmiugqobr5uw4j7mm8xfbfpc8pggpxnmw6k9sj7x7mtgbdr9o"); - println!("cargo:rustc-env=SHARED_SECRET_KEY=3j86j9r7zn1r71xj4ky4nakwhpu1syywrwn9m6ahe5iqp897up1o"); -} \ No newline at end of file diff --git a/crates/rustpatcher/examples/platforms.rs b/crates/rustpatcher/examples/platforms.rs new file mode 100644 index 0000000..ea2dff1 --- /dev/null +++ b/crates/rustpatcher/examples/platforms.rs @@ -0,0 +1,30 @@ +#[cfg(target_os = "windows")] +const PUBLIC_KEY: &'static str = "...windows-key..."; +#[cfg(target_os = "linux")] +const PUBLIC_KEY: &'static str = "...linux-key..."; +#[cfg(target_os = "macos")] +const PUBLIC_KEY: &'static str = "...macos-key..."; + +#[rustpatcher::public_key(PUBLIC_KEY)] +#[tokio::main] +async fn main() -> anyhow::Result<()> { + tracing_subscriber::fmt() + .with_max_level(tracing::Level::INFO) + .with_thread_ids(true) + .init(); + + rustpatcher::spawn(rustpatcher::UpdaterMode::At(13, 40)).await?; + + let self_patch = rustpatcher::Patch::from_self()?; + println!("my version {:?} running", self_patch.info().version); + + loop { + tokio::select! { + _ = tokio::signal::ctrl_c() => { + println!("Exiting on Ctrl-C"); + break; + } + } + } + Ok(()) +} diff --git a/crates/rustpatcher/examples/simple.rs b/crates/rustpatcher/examples/simple.rs index fb45304..0e39097 100644 --- a/crates/rustpatcher/examples/simple.rs +++ b/crates/rustpatcher/examples/simple.rs @@ -1,37 +1,25 @@ -use std::{env, time::Duration}; -use rustpatcher::data::Patcher; -use tokio::time::sleep; - -//#[rustpatcher::main] #[tokio::main] +#[rustpatcher::public_key("axegnqus3miex47g1kxf1j7j8spczbc57go7jgpeixq8nxjfz7gy")] async fn main() -> anyhow::Result<()> { - env_logger::init(); - - let v_string = env!("CARGO_PKG_VERSION").to_string().clone(); - // Needed since this is an example of the same crate and not loading as a package - // Normal: - // - // #[rustpatcher::main] - // fn main() -> Result<()> { - // // .. - // } - // - rustpatcher::version_embed::__set_version(Box::leak(v_string.into_boxed_str())); + // Only in --release builds, not intended for debug builds + rustpatcher::spawn(rustpatcher::UpdaterMode::At(13, 40)).await?; - let patcher = Patcher::new() - .trusted_key_from_z32_str("36nqmiugqobr5uw4j7mm8xfbfpc8pggpxnmw6k9sj7x7mtgbdr9o") - .shared_secret_key_from_z32_str("3j86j9r7zn1r71xj4ky4nakwhpu1syywrwn9m6ahe5iqp897up1o") - .update_interval(Duration::from_secs(10)) - .build() - .await?; + println!("my version is {:?}", rustpatcher::Version::current()?); + + #[cfg(not(debug_assertions))] + println!("{:?}", rustpatcher::Patch::from_self()?.info()); + #[cfg(debug_assertions)] + println!("Debug build, skipping Patch::from_self()"); loop { - sleep(Duration::from_secs(10)).await; - if patcher.clone().update_available().await? { - println!("Update available"); - println!("Updating: {:?}", patcher.clone().try_update().await?); + tokio::select! { + _ = tokio::signal::ctrl_c() => { + println!("Exiting on Ctrl-C"); + break; + } } } -} + Ok(()) +} \ No newline at end of file diff --git a/crates/rustpatcher/src/data.rs b/crates/rustpatcher/src/data.rs deleted file mode 100644 index 56d1d65..0000000 --- a/crates/rustpatcher/src/data.rs +++ /dev/null @@ -1,443 +0,0 @@ -use std::{str::FromStr, sync::Arc, time::Duration}; - -use anyhow::bail; -use bytes::Bytes; -use ed25519_dalek::{ed25519::signature::SignerMut, Signature, SigningKey, PUBLIC_KEY_LENGTH, SECRET_KEY_LENGTH, SIGNATURE_LENGTH}; -use iroh::{Endpoint, PublicKey}; -use iroh_topic_tracker::topic_tracker::{Topic, TopicTracker}; -use pkarr::{Keypair, SignedPacket}; -use serde::{Deserialize, Serialize}; -use tokio::sync::Mutex; - - -use crate::{utils::{compute_hash, Storage, LAST_REPLY_ID_NAME}, LastReplyId}; - -#[derive(Debug, Clone, Serialize,Deserialize,PartialOrd, PartialEq, Eq)] -pub struct Version(pub i32, pub i32, pub i32); - -impl ToString for Version { - fn to_string(&self) -> String { - format!("{}.{}.{}",self.0,self.1,self.2) - } -} - -impl FromStr for Version { - type Err = anyhow::Error; - - fn from_str(s: &str) -> Result { - let parts: Vec<&str> = s.split('.').collect(); - - if parts.len() != 3 { - bail!("wrong version format") - } - - // Parse each component individually - let parse_component = |s: &str| -> anyhow::Result { - Ok(s.parse::()?) - }; - - let major = parse_component(parts[0])?; - let minor = parse_component(parts[1])?; - let patch = parse_component(parts[2])?; - - Ok(Version(major, minor, patch)) - } -} - -impl Ord for Version { - fn cmp(&self, other: &Self) -> std::cmp::Ordering { - let self_sum: i128 = ((i32::MAX as i128).pow(2) * self.0 as i128) - + ((i32::MAX as i128).pow(1) * self.1 as i128) - + self.2 as i128; - let other_sum: i128 = ((i32::MAX as i128).pow(2) * other.0 as i128) - + ((i32::MAX as i128).pow(1) * other.1 as i128) - + other.2 as i128; - self_sum.cmp(&other_sum) - } -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct VersionInfo { - #[serde(with = "serde_version")] - pub version: Version, - #[serde(with = "serde_z32_array")] - pub hash: [u8; 32], - #[serde(with = "serde_z32_signature")] - pub signature: Signature, -} - -impl VersionInfo { - pub fn to_topic_hash(&self,shared_secret: [u8;SECRET_KEY_LENGTH]) -> anyhow::Result { - let mut signing_key = SigningKey::from_bytes(&shared_secret); - let data = serde_json::to_string(self)?; - let signature = signing_key.sign(data.as_bytes()); - Ok(Topic::from_passphrase(&z32::encode(&signature.to_bytes()))) - } -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct VersionTracker { - #[serde(with = "serde_z32_array")] - trusted_key: [u8; PUBLIC_KEY_LENGTH], - version_info: Option, - #[serde(with = "serde_z32_vec_array")] - node_ids: Vec<[u8; PUBLIC_KEY_LENGTH]>, - #[serde(with = "serde_z32_bytes_option")] - data: Option, -} - -impl VersionTracker { - pub fn new(trusted_key: &[u8; PUBLIC_KEY_LENGTH]) -> Self { - Self { - trusted_key: trusted_key.clone(), - version_info: None, - node_ids: vec![], - data: None, - } - } - - pub fn version_info(&self) -> Option { - self.version_info.clone() - } - - pub fn data(&self) -> Option { - self.data.clone() - } - - pub fn node_ids(&self) -> Vec<[u8; PUBLIC_KEY_LENGTH]> { - self.node_ids.clone() - } - - pub fn load( - trusted_key: &[u8; PUBLIC_KEY_LENGTH], - version_info: &VersionInfo, - data: &Bytes, - node_ids: Vec<[u8; PUBLIC_KEY_LENGTH]>, - ) -> anyhow::Result { - Self::verify_data(trusted_key,version_info, data)?; - - Ok(Self { - trusted_key: trusted_key.clone(), - version_info: Some(version_info.clone()), - node_ids: node_ids, - data: Some(data.clone()), - }) - } - - // Verify data signature, update version - pub fn update_version( - self: &mut Self, - version_info: &VersionInfo, - data: &Bytes, - node_ids: Option>, - ) -> anyhow::Result<()> { - Self::verify_data(&self.trusted_key,version_info, data)?; - - self.version_info = Some(version_info.clone()); - self.node_ids = node_ids.unwrap_or(vec![]); - self.data = Some(data.clone()); - - Ok(()) - } - - pub fn add_node_id(self: &mut Self, node_id: &[u8; PUBLIC_KEY_LENGTH]) { - if !self.node_ids.contains(node_id) { - self.node_ids.push(node_id.clone()); - } - } - - pub fn rm_node_id(self: &mut Self, node_id: &[u8; PUBLIC_KEY_LENGTH]) { - if self.node_ids.contains(node_id) { - let len = self.node_ids.len(); - for i in 1..len + 1 { - if self.node_ids[len - i].eq(node_id) { - self.node_ids.remove(len - i); - } - } - } - } - - pub fn verify_data(trusted_key: &[u8; PUBLIC_KEY_LENGTH], version_info: &VersionInfo, data: &Bytes) -> anyhow::Result<()> { - let pub_key = PublicKey::from_bytes(&trusted_key)?; - let sig = version_info.signature; - - log::warn!("Sig: {}",z32::encode(&sig.to_bytes())); - log::warn!("hash: {}",z32::encode(&compute_hash(&data))); - log::warn!("trusted: {}",z32::encode(trusted_key)); - - match pub_key.verify(&data, &sig) { - Ok(_) => Ok(()), - Err(_) => anyhow::bail!("signature doesn't match data"), - } - } - - pub async fn as_signed_packet(&self,secret_key: &[u8;SECRET_KEY_LENGTH]) -> anyhow::Result { - if self.version_info().is_none() { - bail!("uninitialized version info") - } - if self.data().is_none() { - bail!("uninitialized data") - } - - // Set reply id to unix time - let vi = self.version_info.clone().unwrap(); - - let mut last_reply_id: LastReplyId = LastReplyId::from_file(LAST_REPLY_ID_NAME) - .await - .unwrap_or(LastReplyId(0)); - - // Not sure if rap around will cause an error so to be safe - last_reply_id.0 = if last_reply_id.0 >= u16::MAX - 1 { - 0 - } else { - last_reply_id.0 + 1 - }; - let _ = last_reply_id.to_file("last_reply_id").await; - let mut signed_packet = SignedPacket::builder(); - - // Version - let version = serde_json::to_string(&vi.version)?; - signed_packet = signed_packet.txt("_version".try_into()?, version.as_str().try_into()?, 30); - - // Signature - let signature = serde_json::to_string(&vi.signature)?; - signed_packet = signed_packet.txt("_signature".try_into()?, signature.as_str().try_into()?, 30); - - // Hash - let hash = serde_json::to_string(&vi.hash)?; - signed_packet = signed_packet.txt("_hash".try_into()?, hash.as_str().try_into()?, 30); - - let key_pair = Keypair::from_secret_key(secret_key); - - Ok(signed_packet.sign(&key_pair, )?) - } -} - -#[derive(Debug,Clone,Serialize,Deserialize)] -pub struct AuthRequest { - pub sign_request: Bytes, -} - -#[derive(Debug,Clone,Serialize,Deserialize)] -pub struct Auth { - pub signature: Signature, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub enum Protocol { - Ready, - AuthRequest(AuthRequest), - Request(Auth), - Data(VersionInfo, Bytes), - DataUnavailable, - Done, -} - -#[derive(Debug, Clone)] -pub struct Patcher { - pub trusted_key: [u8; PUBLIC_KEY_LENGTH], - pub(crate) secret_key: [u8; SECRET_KEY_LENGTH], - pub(crate) public_key: [u8; PUBLIC_KEY_LENGTH], - pub(crate) shared_secret_key: [u8; SECRET_KEY_LENGTH], - pub(crate) inner: Inner, -} - -#[derive(Debug, Clone)] -pub(crate) struct Inner { - pub endpoint: Endpoint, - pub topic_tracker: TopicTracker, - pub latest_version: Arc>, - pub latest_trusted_package: Arc>>, - pub pkarr_publishing_interval: Duration, -} - -pub mod serde_version { - - use serde::de::Error; - use serde::{Deserializer, Serializer}; - - use super::*; - - pub fn serialize(version: &Version, serializer: S) -> Result - where - S: Serializer, - { - serializer.serialize_str(&version.to_string()) - } - - pub fn deserialize<'de, D>(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - let s = String::deserialize(deserializer)?; - match Version::from_str(&s) { - Ok(version) => Ok(version), - Err(err) => Err(D::Error::custom(err)), - } - } -} - -pub mod serde_z32_array { - use serde::de::Error; - use serde::{Deserializer, Serializer}; - - use super::*; - - pub fn serialize(bytes: &[u8; N], serializer: S) -> Result - where - S: Serializer, - { - let encoded = z32::encode(bytes); - serializer.serialize_str(&encoded) - } - - pub fn deserialize<'de, D, const N: usize>(deserializer: D) -> Result<[u8; N], D::Error> - where - D: Deserializer<'de>, - { - let s = String::deserialize(deserializer)?; - let bytes = z32::decode(s.as_bytes()).map_err(|e| D::Error::custom(e.to_string()))?; - - if bytes.len() != N { - return Err(D::Error::custom("invalid length")); - } - - let mut arr = [0u8; N]; - arr.copy_from_slice(&bytes); - Ok(arr) - } -} - -pub mod serde_z32_bytes { - use serde::de::Error; - use serde::{Deserializer, Serializer}; - - use super::*; - - pub fn serialize(bytes: &Bytes, serializer: S) -> Result - where - S: Serializer, - { - let encoded = z32::encode(bytes); - serializer.serialize_str(&encoded) - } - - pub fn deserialize<'de, D>(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - let s = String::deserialize(deserializer)?; - let bytes = z32::decode(s.as_bytes()).map_err(|e| D::Error::custom(e.to_string()))?; - - Ok(bytes.into()) - } -} - -pub mod serde_z32_bytes_option { - use serde::de::Error; - use serde::{Deserializer, Serializer}; - - use super::*; - - pub fn serialize(bytes: &Option, serializer: S) -> Result - where - S: Serializer, - { - let encoded = if let Some(bytes) = bytes { - z32::encode(bytes) - } else { - "".to_string() - }; - serializer.serialize_str(&encoded) - } - - pub fn deserialize<'de, D>(deserializer: D) -> Result, D::Error> - where - D: Deserializer<'de>, - { - let s = String::deserialize(deserializer)?; - if s.len() == 0 { - return Ok(None) - } - let bytes = z32::decode(s.as_bytes()).map_err(|e| D::Error::custom(e.to_string()))?; - - Ok(Some(bytes.into())) - } -} - -pub mod serde_z32_signature { - use serde::de::Error; - use serde::{Deserializer, Serializer}; - - use super::*; - - pub fn serialize(signature: &Signature, serializer: S) -> Result - where - S: Serializer, - { - let encoded = z32::encode(&signature.to_bytes()); - serializer.serialize_str(&encoded) - } - - pub fn deserialize<'de, D>(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - let s = String::deserialize(deserializer)?; - let bytes = z32::decode(s.as_bytes()).map_err(|e| D::Error::custom(e.to_string()))?; - - if bytes.len() != SIGNATURE_LENGTH { - return Err(D::Error::custom("invalid length")); - } - - let mut arr = [0u8; SIGNATURE_LENGTH]; - arr.copy_from_slice(&bytes); - - Ok(Signature::from_bytes(&arr)) - } -} - -pub mod serde_z32_vec_array { - use serde::{Deserializer, Serializer}; - - use super::*; - - pub fn serialize( - vec: &Vec<[u8; N]>, - serializer: S, - ) -> Result - where - S: Serializer, - { - let encoded = vec - .iter() - .map(|bytes| z32::encode(bytes)) - .collect::>() - .join(","); - serializer.serialize_str(&encoded) - } - - pub fn deserialize<'de, D, const N: usize>(deserializer: D) -> Result, D::Error> - where - D: Deserializer<'de>, - { - let s = String::deserialize(deserializer)?; - let vec = s - .split(",") - .filter_map(|s| match z32::decode(s.as_bytes()) { - Ok(bytes) => { - if !bytes.len().eq(&N) { - return None; - } - - let mut arr = [0u8; N]; - arr.copy_from_slice(&bytes); - Some(arr) - } - Err(_) => None, - }) - .collect::>(); - - Ok(vec) - } -} diff --git a/crates/rustpatcher/src/distributor.rs b/crates/rustpatcher/src/distributor.rs new file mode 100644 index 0000000..555b750 --- /dev/null +++ b/crates/rustpatcher/src/distributor.rs @@ -0,0 +1,176 @@ +use actor_helper::{Action, Actor, Handle, act_ok}; +use distributed_topic_tracker::unix_minute; +use iroh::{ + Endpoint, NodeId, + endpoint::VarInt, + protocol::{AcceptError, ProtocolHandler}, +}; +use sha2::Digest; +use tokio::io::{AsyncReadExt, AsyncWriteExt}; +use tracing::error; + +use crate::{Patch, PatchInfo}; + +#[derive(Debug, Clone)] +pub struct Distributor { + api: Handle, +} + +#[derive(Debug)] +struct DistributorActor { + rx: tokio::sync::mpsc::Receiver>, + + self_patch_bytes: Vec, + endpoint: Endpoint, +} + +impl Distributor { + pub fn new(endpoint: Endpoint) -> anyhow::Result { + let self_patch_bytes = postcard::to_allocvec(&Patch::from_self()?)?; + let (api, rx) = Handle::channel(32); + tokio::spawn(async move { + let mut actor = DistributorActor { + rx, + endpoint, + self_patch_bytes, + }; + if let Err(e) = actor.run().await { + error!("Distributor actor error: {:?}", e); + } + }); + Ok(Self { api }) + } + + #[allow(non_snake_case)] + pub fn ALPN() -> Vec { + format!( + "/rustpatcher/{}/v0", + z32::encode(crate::embed::get_owner_pub_key().as_bytes()) + ) + .into_bytes() + } + + pub async fn get_patch(&self, node_id: NodeId, patch_info: PatchInfo) -> anyhow::Result { + let endpoint = self + .api + .call(act_ok!(actor => async move { + actor.endpoint.clone() + })) + .await?; + + let conn = endpoint.connect(node_id, &Distributor::ALPN()).await?; + let (mut tx, mut rx) = conn.open_bi().await?; + + // auth: hash(owner_pub_key + unix_minute) + let mut auth_hasher = sha2::Sha512::new(); + auth_hasher.update(crate::embed::get_owner_pub_key().as_bytes()); + auth_hasher.update(unix_minute(0).to_le_bytes()); + let auth_hash = auth_hasher.finalize(); + tx.write_all(&auth_hash).await?; + + if let Ok(0) = rx.read_u8().await { + anyhow::bail!("auth failed"); + } + + // read data + let buf_len = rx.read_u64().await?; + let mut buf = vec![0u8; buf_len as usize]; + rx.read_exact(&mut buf).await?; + + // verify and parse + let patch = postcard::from_bytes::(buf.as_slice())?; + patch.verify()?; + if !patch.info().eq(&patch_info) { + anyhow::bail!("patch info mismatch"); + } + + Ok(patch) + } +} + +impl Actor for DistributorActor { + async fn run(&mut self) -> anyhow::Result<()> { + loop { + tokio::select! { + Some(action) = self.rx.recv() => { + action(self).await + } + } + } + } +} + +type IrohError = Box; + +fn to_iroh_error(e: E) -> AcceptError +where + E: Into, +{ + AcceptError::User { + source: e.into(), + } +} + +impl ProtocolHandler for Distributor { + async fn accept( + &self, + connection: iroh::endpoint::Connection, + ) -> Result<(), iroh::protocol::AcceptError> { + let (mut tx, mut rx) = connection.accept_bi().await.map_err(to_iroh_error)?; + + // auth: hash(owner_pub_key + unix_minute) + let mut auth_buf = [0u8; 64]; + rx.read_exact(&mut auth_buf).await.map_err(to_iroh_error)?; + + let owner_pub_key = crate::embed::get_owner_pub_key(); + + fn auth_hash(t: i64, owner_pub_key: &ed25519_dalek::VerifyingKey) -> Vec { + let mut auth_hasher = sha2::Sha512::new(); + auth_hasher.update(owner_pub_key.as_bytes()); + auth_hasher.update(unix_minute(t).to_le_bytes()); + let auth_hash = auth_hasher.finalize(); + auth_hash.to_vec() + } + + let mut accept_auth = false; + for t in -1..2 { + if auth_buf == auth_hash(t, &owner_pub_key)[..] { + accept_auth = true; + break; + } + } + + if !accept_auth { + tx.write_u8(0).await.map_err(to_iroh_error)?; + connection.close(VarInt::default(), b"auth failed"); + return Err(to_iroh_error(std::io::Error::new( + std::io::ErrorKind::Other, + "auth failed", + ))); + } else { + tx.write_u8(1).await.map_err(to_iroh_error)?; + } + + // send data + let self_patch_bytes = self + .api + .call(act_ok!(actor => async move { + actor.self_patch_bytes.clone() + })) + .await + .map_err(to_iroh_error)?; + + tx.write_u64(self_patch_bytes.len() as u64) + .await + .map_err(to_iroh_error) + .map_err(to_iroh_error)?; + tx.write_all(&self_patch_bytes) + .await + .map_err(to_iroh_error) + .map_err(to_iroh_error)?; + + let _ = tx.stopped().await; + + Ok(()) + } +} diff --git a/crates/rustpatcher/src/embed.rs b/crates/rustpatcher/src/embed.rs new file mode 100644 index 0000000..93f0f82 --- /dev/null +++ b/crates/rustpatcher/src/embed.rs @@ -0,0 +1,241 @@ +use std::str::FromStr; + +use once_cell::sync::OnceCell; + +use crate::{PatchInfo, Version}; + +#[doc(hidden)] +static APP_VERSION: OnceCell<&'static str> = OnceCell::new(); +#[doc(hidden)] +static OWNER_PUB_KEY: OnceCell = OnceCell::new(); + +#[doc(hidden)] +pub fn __set_version(version: &'static str) { + let _ = APP_VERSION.set(version); +} + +#[doc(hidden)] +pub fn __set_owner_pub_key(pub_key: ed25519_dalek::VerifyingKey) { + let _ = OWNER_PUB_KEY.set(pub_key); +} + +pub fn get_owner_pub_key() -> &'static ed25519_dalek::VerifyingKey { + OWNER_PUB_KEY.get().expect("Owner public key not initialized") +} + +pub fn get_app_version() -> &'static str { + APP_VERSION.get().expect("Version not initialized") +} + +// 28_bytes +// hex: 0x1742525553545041544348455242454d42454442424f554e44534217 +#[doc(hidden)] +pub static EMBED_BOUNDS: &[u8] = b"\x17\x42RUSTPATCHER\x42EMBED\x42BOUNDS\x42\x17"; + +#[doc(hidden)] +const VERSION_FIELD_LEN: usize = 16; +#[doc(hidden)] +const VERSION_ASCII: &str = env!("CARGO_PKG_VERSION"); + +#[doc(hidden)] +const fn version_field_ascii_padded(s: &str) -> [u8; VERSION_FIELD_LEN] { + let bytes = s.as_bytes(); + let mut out = [0u8; VERSION_FIELD_LEN]; + let mut i = 0; + while i < bytes.len() && i < VERSION_FIELD_LEN { + out[i] = bytes[i]; + i += 1; + } + out +} + +#[doc(hidden)] +const VERSION_BYTES: [u8; VERSION_FIELD_LEN] = version_field_ascii_padded(VERSION_ASCII); + +#[doc(hidden)] +const BIN_HASH: [u8; 32] = [0; 32]; +#[doc(hidden)] +const BIN_SIZE: [u8; 8] = [0; 8]; +#[doc(hidden)] +const BIN_SIG: [u8; 64] = [0; 64]; +#[doc(hidden)] +pub const EMBED_REGION_LEN: usize = + 28 + VERSION_BYTES.len() + BIN_HASH.len() + BIN_SIZE.len() + BIN_SIG.len() + 28; + +// Assert sizes at compile time +#[doc(hidden)] +const _: () = { + assert!(EMBED_BOUNDS.len() == 28); + assert!(VERSION_BYTES.len() == 16); + assert!(EMBED_REGION_LEN == 176); +}; + +// Build const array without any runtime code or allocation +#[doc(hidden)] +#[unsafe(link_section = ".embedded_signature")] +#[used] +#[unsafe(no_mangle)] +pub static EMBED_REGION: [u8; EMBED_REGION_LEN] = { + let mut buf = [0u8; EMBED_REGION_LEN]; + let mut off = 0; + + // bounds start + { + let b = EMBED_BOUNDS; + let mut i = 0; + while i < b.len() { + buf[off + i] = b[i]; + i += 1; + } + off += b.len(); + } + + // bin_hash placeholder + { + let b = BIN_HASH; + let mut i = 0; + while i < b.len() { + buf[off + i] = b[i]; + i += 1; + } + off += b.len(); + } + + // bin_size placeholder + { + let b = BIN_SIZE; + let mut i = 0; + while i < b.len() { + buf[off + i] = b[i]; + i += 1; + } + off += b.len(); + } + + // bin_sig placeholder + { + let b = BIN_SIG; + let mut i = 0; + while i < b.len() { + buf[off + i] = b[i]; + i += 1; + } + off += b.len(); + } + + // padded-str-version + { + let b = VERSION_BYTES; + let mut i = 0; + while i < b.len() { + buf[off + i] = b[i]; + i += 1; + } + off += b.len(); + } + + // bounds end + { + let b = EMBED_BOUNDS; + let mut i = 0; + while i < b.len() { + buf[off + i] = b[i]; + i += 1; + } + } + buf +}; + +#[doc(hidden)] +pub fn embed(version: &'static str, pub_key: &'static str) { + __set_version(version); + __set_owner_pub_key(z32::decode(pub_key.as_bytes()).ok().and_then(|k_bytes| { + let key_array: [u8; 32] = k_bytes.try_into().ok()?; + ed25519_dalek::VerifyingKey::from_bytes(&key_array).ok() + }).expect("failed to decode public key")); + #[cfg(not(debug_assertions))] + unsafe { + core::ptr::read_volatile(&EMBED_REGION as *const _); + } +} + +#[doc(hidden)] +pub struct EmbeddedRegion { + pub start: usize, + pub end: usize, +} + +#[doc(hidden)] +pub fn cut_embed_section(bin_bytes: Vec) -> anyhow::Result<(Vec, Vec, EmbeddedRegion)> { + let start = bin_bytes + .windows(EMBED_BOUNDS.len()) + .position(|window| window == EMBED_BOUNDS) + .ok_or_else(|| anyhow::anyhow!("failed to find embed bounds start"))?; + let end = bin_bytes + .windows(EMBED_BOUNDS.len()) + .rposition(|window| window == EMBED_BOUNDS) + .ok_or_else(|| anyhow::anyhow!("failed to find embed bounds end"))? + + EMBED_BOUNDS.len(); + if end as i128 - start as i128 != EMBED_REGION.len() as i128 { + return Err(anyhow::anyhow!("invalid embed section size")); + } + let mut out = bin_bytes; + let embed_region = out.drain(start..end).into_iter().collect::>(); + Ok((out, embed_region, EmbeddedRegion { start, end })) +} + +#[doc(hidden)] +pub fn get_embedded_version(embed_region_bytes: &Vec) -> anyhow::Result { + let version_offset = EMBED_BOUNDS.len() + BIN_HASH.len() + BIN_SIZE.len() + BIN_SIG.len(); + let version_bytes = + embed_region_bytes[version_offset..version_offset + VERSION_FIELD_LEN].to_vec(); + let version_str = std::str::from_utf8(&version_bytes)?; + Version::from_str(version_str.trim_end_matches(char::from(0)).trim()) +} + +#[doc(hidden)] +pub fn get_embedded_patch_info(bin_data: &Vec) -> anyhow::Result { + let (_, embed_region_bytes, _) = cut_embed_section(bin_data.clone())?; + + let (_, buf) = embed_region_bytes.split_at(EMBED_BOUNDS.len()); + let (hash_buf, buf) = buf.split_at(BIN_HASH.len()); + let (size_buf, buf) = buf.split_at(BIN_SIZE.len()); + let (sig_buf, _) = buf.split_at(BIN_SIG.len()); + + let version = get_embedded_version(&embed_region_bytes)?; + let size = u64::from_le_bytes(size_buf.try_into().map_err(|_| anyhow::anyhow!("invalid size bytes"))?); + let hash: [u8; 32] = hash_buf.try_into().map_err(|_| anyhow::anyhow!("invalid hash bytes"))?; + let signature: [u8; 64] = sig_buf.try_into().map_err(|_| anyhow::anyhow!("invalid signature bytes"))?; + + Ok(crate::PatchInfo { + version, + size, + hash, + signature: signature.into(), + }) +} + +#[doc(hidden)] +pub fn set_embedded_patch_info(bin_data: &mut Vec, patch_info: PatchInfo,embed_region_bytes: EmbeddedRegion) -> anyhow::Result<()> { + + let (start, end) = (embed_region_bytes.start, embed_region_bytes.end); + if end - start != EMBED_REGION_LEN { + return Err(anyhow::anyhow!("invalid embed region length")); + } + + let mut region_buf = Vec::with_capacity(EMBED_REGION_LEN); + region_buf.extend_from_slice(EMBED_BOUNDS); + region_buf.extend_from_slice(&patch_info.hash); + region_buf.extend_from_slice(&patch_info.size.to_le_bytes()); + region_buf.extend_from_slice(&patch_info.signature.to_bytes()); + region_buf.extend_from_slice(&VERSION_BYTES); + region_buf.extend_from_slice(EMBED_BOUNDS); + + if region_buf.len() != EMBED_REGION_LEN { + return Err(anyhow::anyhow!("internal error: invalid embed region length")); + } + + bin_data.splice(start..end, region_buf.iter().cloned()); + + Ok(()) +} \ No newline at end of file diff --git a/crates/rustpatcher/src/lib.rs b/crates/rustpatcher/src/lib.rs index 8ae9a60..46c9b97 100644 --- a/crates/rustpatcher/src/lib.rs +++ b/crates/rustpatcher/src/lib.rs @@ -1,1154 +1,25 @@ -pub mod data; -pub mod utils; -pub mod version_embed; +mod version; +mod patch; +mod patcher; +mod publisher; +mod updater; +mod distributor; -pub use rustpatcher_macros::main; +#[doc(hidden)] +pub mod embed; -use std::{ - cmp::min, env, ffi::CString, future::Future, io::Write, pin::Pin, ptr, sync::Arc, - time::Duration, -}; +#[doc(hidden)] +pub use version::Version; -use anyhow::{bail, Result}; -use data::{Auth, AuthRequest, Inner, Patcher, Protocol, Version, VersionInfo, VersionTracker}; -use ed25519_dalek::{ - ed25519::signature::SignerMut, Signature, SigningKey, PUBLIC_KEY_LENGTH, SECRET_KEY_LENGTH, -}; -use iroh::{ - endpoint::{Connecting, Connection, Endpoint, RecvStream, SendStream}, - protocol::ProtocolHandler, - NodeAddr, NodeId, SecretKey, -}; -use iroh_topic_tracker::topic_tracker::TopicTracker; -use nix::libc::{self}; -use pkarr::{dns, Client, Keypair, PublicKey, SignedPacket}; -use rand::{rngs::OsRng, Rng}; -use reqwest::{Method, StatusCode}; -use serde::{Deserialize, Serialize}; -use tokio::{ - fs::File, - io::{AsyncReadExt, AsyncWriteExt}, - sync::{ - mpsc::{self, Receiver}, - Mutex, - }, - time::sleep, -}; -use utils::{ - compute_hash, get_app_version, Storage, LAST_REPLY_ID_NAME, LAST_TRUSTED_PACKAGE, - LATEST_VERSION_NAME, PKARR_PUBLISHING_INTERVAL, PUBLISHER_SIGNING_KEY_NAME, - PUBLISHER_TRUSTED_KEY_NAME, SECRET_KEY_NAME, SHARED_SECRET_KEY_NAME, -}; +#[doc(hidden)] +pub use patch::{Patch,PatchInfo}; -use crate::utils::wait_for_relay; +use publisher::Publisher; +use updater::Updater; +use distributor::Distributor; -#[derive(Debug, Clone)] -pub struct Builder { - secret_key: [u8; SECRET_KEY_LENGTH], - trusted_key: Option<[u8; PUBLIC_KEY_LENGTH]>, - shared_secret_key: Option<[u8; SECRET_KEY_LENGTH]>, - load_latest_version_from_file: bool, - load_secret_key_from_file: bool, - master_node: bool, - trusted_packet: Option, - update_interval: Duration, -} +use embed::get_owner_pub_key; -impl Builder { - pub fn new() -> Self { - Self { - secret_key: SecretKey::generate(rand::rngs::OsRng).to_bytes(), - shared_secret_key: None, - trusted_key: None, - load_latest_version_from_file: true, - load_secret_key_from_file: true, - master_node: false, - trusted_packet: None, - update_interval: PKARR_PUBLISHING_INTERVAL, - } - } - - pub fn load_latest_version_from_file(mut self, val: bool) -> Self { - self.load_latest_version_from_file = val; - self - } - - pub fn update_interval(mut self, update_interval: Duration) -> Self { - self.update_interval = update_interval; - self - } - - pub fn load_secret_key_from_file(mut self, val: bool) -> Self { - self.load_secret_key_from_file = val; - self - } - - pub fn trusted_key(mut self, trusted_key: &[u8; PUBLIC_KEY_LENGTH]) -> Self { - self.trusted_key = Some(*trusted_key); - self - } - - pub fn shared_secret_key(mut self, shared_secret_key: &[u8; SECRET_KEY_LENGTH]) -> Self { - self.shared_secret_key = Some(*shared_secret_key); - self - } - - pub fn trusted_key_from_z32_str(mut self, trusted_key: &str) -> Self { - let tk = z32::decode(trusted_key.as_bytes()); - if tk.is_err() { - return self; - } - - let mut trusted_key_buf = [0u8; PUBLIC_KEY_LENGTH]; - trusted_key_buf.copy_from_slice(tk.unwrap().as_slice()); - self.trusted_key = Some(trusted_key_buf); - self - } - - pub fn shared_secret_key_from_z32_str(mut self, shared_secret_key: &str) -> Self { - let sk = z32::decode(shared_secret_key.as_bytes()); - if sk.is_err() { - return self; - } - - let mut shared_secret_key_buf = [0u8; PUBLIC_KEY_LENGTH]; - shared_secret_key_buf.copy_from_slice(sk.unwrap().as_slice()); - self.shared_secret_key = Some(shared_secret_key_buf); - self - } - - async fn check_cli(self) -> anyhow::Result<()> { - let args: Vec = env::args().collect(); - - if args.len() == 3 && "rustpatcher".eq(&args[1]) { - match args[2].as_str() { - "init" => { - self.init().await?; - std::process::exit(0); - } - "publish" => { - self.publish().await?; - } - _ => {} - } - } - Ok(()) - } - - async fn init(self) -> anyhow::Result<()> { - let publisher_signing_key = { - if let Ok(secret_key) = SecretKey::from_file(PUBLISHER_SIGNING_KEY_NAME).await { - SigningKey::from_bytes(&secret_key.to_bytes()) - } else { - let mut csprng = OsRng; - let signing_key = SigningKey::generate(&mut csprng); - - // persist generated keys - signing_key - .clone() - .to_file(PUBLISHER_SIGNING_KEY_NAME) - .await?; - signing_key - .clone() - .verifying_key() - .to_file(PUBLISHER_TRUSTED_KEY_NAME) - .await?; - signing_key - } - }; - - let shared_key = { - if let Ok(secret_key) = SecretKey::from_file(SHARED_SECRET_KEY_NAME).await { - SigningKey::from_bytes(&secret_key.to_bytes()) - } else { - let mut csprng = OsRng; - let signing_key = SigningKey::generate(&mut csprng); - - // persist generated keys - signing_key.clone().to_file(SHARED_SECRET_KEY_NAME).await?; - signing_key - } - }; - - println!(""); - println!(""); - println!("New Signing key generated in ./patcher/publisher_signing_key!"); - println!(""); - println!( - " Trusted-Key = {}", - z32::encode(publisher_signing_key.verifying_key().as_bytes()) - ); - println!(" Shared-Secret = {}", z32::encode(shared_key.as_bytes())); - println!(""); - println!("Insert the new trusted key into the patcher builder:"); - println!(""); - println!( - r#"let patcher = Patcher::new() - .trusted_key_from_z32_str("{}") - .shared_secret_key_from_z32_str("{}") - .build() - .await?;"#, - z32::encode(publisher_signing_key.verifying_key().as_bytes()), - z32::encode(shared_key.as_bytes()) - ); - println!(""); - println!(""); - - Ok(()) - } - - async fn publish(self) -> anyhow::Result<()> { - let version = get_app_version()?; - let file_path = std::env::current_exe()?; - let mut file = File::open(file_path).await?; - let mut buf = vec![]; - file.read_to_end(&mut buf).await?; - - log::warn!("Version: {version:?}"); - - let mut publisher_signing_key = { - if let Ok(secret_key) = SecretKey::from_file(PUBLISHER_SIGNING_KEY_NAME).await { - SigningKey::from_bytes(&secret_key.to_bytes()) - } else { - let mut csprng = OsRng; - let signing_key = SigningKey::generate(&mut csprng); - - // persist generated keys - signing_key - .clone() - .to_file(PUBLISHER_SIGNING_KEY_NAME) - .await?; - signing_key - .clone() - .verifying_key() - .to_file(PUBLISHER_TRUSTED_KEY_NAME) - .await?; - signing_key - } - }; - - let node_secret_key = { - if let Ok(secret_key) = ed25519_dalek::SecretKey::from_file(SECRET_KEY_NAME).await { - secret_key - } else { - let signing_key = *publisher_signing_key.as_bytes(); - signing_key.clone().to_file(SECRET_KEY_NAME).await?; - signing_key - } - }; - - if !publisher_signing_key.as_bytes().eq(&node_secret_key) { - anyhow::bail!( - "secret key and publisher signing key don't match. not allowed for trusted node" - ) - } - - let signature = publisher_signing_key.sign(&buf.as_slice()); - let hash = compute_hash(&buf); - let trusted_key = publisher_signing_key.verifying_key().as_bytes().clone(); - - let version_info = VersionInfo { - version, - hash, - signature, - }; - let version_tracker = VersionTracker::load( - &trusted_key, - &version_info, - &buf.clone().into(), - vec![node_secret_key], - )?; - version_tracker.to_file(LATEST_VERSION_NAME).await?; - - log::warn!( - "Signature validation check: {:?}", - VersionTracker::verify_data(&trusted_key, &version_info, &buf.clone().into()) - ); - - log::warn!("Sig: {}", z32::encode(&signature.to_bytes())); - log::warn!("hash: {}", z32::encode(&hash)); - log::warn!("trusted: {}", z32::encode(&trusted_key)); - - log::warn!("Publish successfull!"); - - Ok(()) - } - - pub async fn build(self: &mut Self) -> anyhow::Result { - self.clone().check_cli().await?; - - if self.trusted_key.is_none() { - bail!("trusted key required") - } - if self.shared_secret_key.is_none() { - bail!("shared secret key required") - } - - if self.load_secret_key_from_file { - if let Ok(secret_key) = SecretKey::from_file(SECRET_KEY_NAME).await { - self.secret_key = secret_key.to_bytes(); - if self.trusted_key.is_some() - && SigningKey::from_bytes(&self.secret_key) - .verifying_key() - .as_bytes() - .eq(&self.trusted_key.unwrap()) - { - // Master node here - self.master_node = true; - log::warn!("Master node"); - } - } - } - - if let Ok(signed_packet) = SignedPacket::from_file(LAST_TRUSTED_PACKAGE).await { - self.trusted_packet = Some(signed_packet); - } - - // Iroh setup - let secret_key = SecretKey::from_bytes(&self.secret_key); - let endpoint = Endpoint::builder() - .secret_key(secret_key) - .discovery_n0() - .bind() - .await?; - - let topic_tracker = TopicTracker::new(&endpoint); - let patcher = if self.load_latest_version_from_file { - let latest_version = VersionTracker::from_file(LATEST_VERSION_NAME).await; - - if latest_version.is_ok() { - let latest_version = latest_version.unwrap(); - if self.master_node { - self.trusted_packet = - Some(latest_version.as_signed_packet(&self.secret_key).await?); - log::warn!("master mode"); - } - Patcher::with_latest_version( - &self.trusted_key.unwrap(), - &self.shared_secret_key.unwrap(), - &endpoint, - &topic_tracker, - self.trusted_packet.clone(), - latest_version, - self.update_interval, - ) - } else { - let me = Patcher::with_latest_version( - &self.trusted_key.unwrap(), - &self.shared_secret_key.unwrap(), - &endpoint, - &topic_tracker, - self.trusted_packet.clone(), - VersionTracker::new(&self.trusted_key.unwrap()), - self.update_interval, - ); - me - } - } else { - Patcher::with_latest_version( - &self.trusted_key.unwrap(), - &self.shared_secret_key.unwrap(), - &endpoint, - &topic_tracker, - self.trusted_packet.clone(), - VersionTracker::new(&self.trusted_key.unwrap()), - self.update_interval, - ) - }; - - let _router = iroh::protocol::Router::builder(endpoint.clone()) - .accept(&patcher.ALPN(), patcher.clone()) - .spawn() - .await?; - - Ok(patcher._spawn().await?) - } -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -struct SigPackage(pub Vec); - -impl Patcher { - const MAX_MSG_SIZE_BYTES: u64 = 1024 * 1024 * 1024; - pub fn new() -> Builder { - Builder::new() - } - - #[allow(non_snake_case)] - pub fn ALPN(&self) -> Vec { - let shared_key = SigningKey::from_bytes(&self.shared_secret_key); - let verifying_key = shared_key.verifying_key(); - format!("{}/0", z32::encode(verifying_key.as_bytes())).into_bytes() - } - - pub async fn persist(&self) -> anyhow::Result<()> { - let inner = self.inner.clone(); - - let secret_key = self.secret_key.clone(); - secret_key.to_file(SECRET_KEY_NAME).await?; - - if let Some(lp) = inner.latest_trusted_package.lock().await.clone() { - lp.as_bytes() - .clone() - .to_vec() - .to_file(LAST_TRUSTED_PACKAGE) - .await?; - } - - Ok(()) - } - - pub async fn update_available(self) -> Result { - let lv = self.inner.latest_version.lock().await.clone(); - let version = get_app_version()?; - let patcher_version = lv.version_info(); - - Ok(patcher_version.is_some() && version < patcher_version.unwrap().version) - } - - pub async fn try_update(self) -> Result<()> { - if self.clone().update_available().await? == false { - bail!("no update available") - } - let lv = self.clone().inner.latest_version.lock().await.clone(); - if lv.data().is_none() { - bail!("no new version found in version tracker") - } - let data = lv.data().unwrap(); - let mut temp_file = tempfile::NamedTempFile::new()?; - temp_file.write_all(&data)?; - let path = temp_file.path(); - - // Get exe path before self_replace. - // after: it will add "[..] (deleted)" - // to the end of the filename on ubuntu (maybe all linux). - let exe_raw = std::env::current_exe()?; - let exe = CString::new(exe_raw.to_str().unwrap())?; - - self_replace::self_replace(path)?; - - // The array must be null-terminated. - let args: [*const libc::c_char; 1] = [ptr::null()]; - - unsafe { - libc::execv(exe.as_ptr(), args.as_ptr()); - } - Ok(()) - } -} -trait TPatcher: Sized { - fn with_latest_version( - trusted_key: &[u8; PUBLIC_KEY_LENGTH], - shared_secret_key: &[u8; SECRET_KEY_LENGTH], - endpoint: &Endpoint, - topic_tracker: &TopicTracker, - signed_packet: Option, - latest_version: VersionTracker, - pkarr_publishing_interval: Duration, - ) -> Self; - async fn _spawn(self) -> Result; - async fn _spawn_pkarr_publish(self) -> Result<()>; - async fn _spawn_pkarr_trusted_publish(self) -> Result>; - async fn _spawn_updater( - self, - trusted_update_notifier: Receiver, - tracker_update_notifier: Receiver, - ) -> Result<()>; - async fn _spawn_topic_tracker_update(self) -> Result>; - async fn topic_tracker_update(self) -> Result>; - async fn update(self: &mut Self, new_version_info: VersionInfo) -> Result<()>; -} - -impl TPatcher for Patcher { - fn with_latest_version( - trusted_key: &[u8; PUBLIC_KEY_LENGTH], - shared_secret_key: &[u8; SECRET_KEY_LENGTH], - endpoint: &Endpoint, - topic_tracker: &TopicTracker, - signed_packet: Option, - latest_version: VersionTracker, - pkarr_publishing_interval: Duration, - ) -> Self { - let me = Self { - trusted_key: trusted_key.clone(), - shared_secret_key: shared_secret_key.clone(), - inner: Inner { - endpoint: endpoint.clone(), - topic_tracker: topic_tracker.clone(), - latest_version: Arc::new(Mutex::new(latest_version)), - latest_trusted_package: Arc::new(Mutex::new(signed_packet)), - pkarr_publishing_interval: pkarr_publishing_interval, - }, - secret_key: endpoint.secret_key().to_bytes(), - public_key: endpoint.node_id().as_bytes().clone(), - }; - me - } - - // Publish the latest version_tracker own file under own key - async fn _spawn_pkarr_publish(self) -> Result<()> { - tokio::spawn({ - let me = self.clone(); - async move { - loop { - let version_tracker = { me.inner.latest_version.lock().await.clone() }; - if version_tracker.version_info().is_some() && version_tracker.data().is_some() - { - let _ = me.publish_pkarr().await; - } - sleep(self.inner.pkarr_publishing_interval).await; - } - } - }); - Ok(()) - } - - async fn _spawn_pkarr_trusted_publish(self) -> Result> { - let (tx, rx) = mpsc::channel(1); - - tokio::spawn({ - let me = self.clone(); - async move { - loop { - if let Ok((trusted_version_info, trusted_signed_packet)) = - me.resolve_pkarr(&me.trusted_key).await - { - if trusted_signed_packet - .public_key() - .as_bytes() - .eq(&me.trusted_key) - { - // Check if latest_trusted_packet is the same - { - let l_trusted_packet = - self.inner.latest_trusted_package.lock().await.clone(); - if l_trusted_packet.clone().is_some() - && l_trusted_packet - .unwrap() - .as_bytes() - .eq(trusted_signed_packet.as_bytes()) - { - // Same packet as last time (no update) - log::warn!("no update"); //, {:?}",me.inner.latest_version.lock().await.version_info()); - let _ = self.publish_trusted_pkarr().await; - sleep(self.inner.pkarr_publishing_interval).await; - continue; - } - } - - // Different package candidate - - // check if newer version update notifier - let lt = me.inner.latest_version.lock().await.clone(); - - // Check for newer version signed packet even exists at all - { - let me_signed_package = - me.inner.latest_trusted_package.lock().await.clone(); - if me_signed_package.is_none() - || (lt.version_info().is_some() - && trusted_version_info.version - > lt.version_info().unwrap().version) - { - let mut signed_packet = - self.inner.latest_trusted_package.lock().await; - *signed_packet = Some(trusted_signed_packet); - drop(signed_packet); - let _ = self.persist().await; - log::warn!("Signed packet replaced"); - } - } - - // Update notifier - if lt.version_info().is_none() - || trusted_version_info.version > lt.version_info().unwrap().version - { - let vtc = self.inner.latest_version.lock().await.clone(); - if vtc.version_info().is_none() - || vtc.version_info().unwrap().version - < trusted_version_info.version - { - log::warn!( - "Send update notification: {:?}", - trusted_version_info.version - ); - let _ = tx.send(trusted_version_info).await; - } - } - } - } else { - log::warn!("Failed to resolve trusted key!"); - } - - let _ = self.publish_trusted_pkarr().await; - sleep(self.inner.pkarr_publishing_interval).await; - } - } - }); - Ok(rx) - } - - async fn _spawn_updater( - self, - mut trusted_update_notifier: Receiver, - mut tracker_update_notifier: Receiver, - ) -> Result<()> { - let my_version = get_app_version()?; - tokio::spawn({ - let me = self.clone(); - async move { - loop { - tokio::select! { - Some(trusted_potential_update) = trusted_update_notifier.recv() => { - log::warn!( - "RCs: {}", - trusted_potential_update.version.to_string() - ); - if my_version < trusted_potential_update.version || me.clone().inner.latest_version.lock().await.clone().data().is_none() { - log::warn!("Starting to update!"); - match me.clone().update(trusted_potential_update).await { - Ok(_) => { - log::warn!("Update attempt successfull"); - } - Err(_) => log::warn!("Update attempt failed: "), - } - } - } - Some(tracker_potential_update) = tracker_update_notifier.recv() => { - log::warn!( - "RCs: {}", - tracker_potential_update.version.to_string() - ); - if my_version < tracker_potential_update.version || me.clone().inner.latest_version.lock().await.clone().data().is_none() { - log::warn!("Starting to update!"); - match me.clone().update(tracker_potential_update).await { - Ok(_) => { - log::warn!("Update successfull") - } - Err(_) => log::warn!("Update attempt failed: "), - } - } - } - //todo - } - } - } - }); - Ok(()) - } - - async fn update(self: &mut Self, new_version_info: VersionInfo) -> Result<()> { - // Update - // 1. Find node ids - // 2. Try update from node ids - log::warn!("update: version {}", new_version_info.version.to_string()); - - // 1. Find node ids via topic tracker - let topic_tracker = self.inner.topic_tracker.clone(); - let node_ids = topic_tracker - .get_topic_nodes(&new_version_info.to_topic_hash(self.shared_secret_key)?) - .await?; - log::warn!("update: found node_ids: {:?}", node_ids); - for node_id in node_ids.clone() { - // 2. try and update - match self - .try_update(iroh::PublicKey::from_bytes(&node_id.as_bytes()).unwrap()) - .await - { - Ok(_) => { - log::warn!("New version downloaded"); - return Ok(()); - } - Err(err) => { - log::warn!("New version download failed: {err:?}"); - let mut lt = self.inner.latest_version.lock().await; - lt.rm_node_id(&node_id.as_bytes()); - } - } - } - bail!("no node ids found") - } - - async fn _spawn(self) -> Result { - // Iroh - tokio::spawn({ - let me2 = self.clone(); - async move { - while let Some(connecting) = me2.inner.endpoint.accept().await { - match connecting.accept() { - Ok(conn) => { - tokio::spawn({ - let me3 = me2.clone(); - async move { - let res = me3.accept_handler(conn).await; - log::warn!("accept - {res:?}"); - } - }); - } - Err(_) => { - log::warn!("Failed to connect"); - } - } - } - } - }); - - let tracker_notifier = self.clone()._spawn_topic_tracker_update().await?; - self.clone()._spawn_pkarr_publish().await?; - let notifier = self.clone()._spawn_pkarr_trusted_publish().await?; - self.clone() - ._spawn_updater(notifier, tracker_notifier) - .await?; - - Ok(self) - } - - async fn _spawn_topic_tracker_update(self) -> Result> { - let (tx, rx) = mpsc::channel(1024); - - tokio::spawn({ - let me = self.clone(); - async move { - loop { - let node_ids = me.clone().topic_tracker_update().await; - if let Ok(node_ids) = node_ids { - for node_id in node_ids { - if let Ok((vi, _)) = me.resolve_pkarr(&node_id.as_bytes()).await { - let _ = tx.send(vi).await; - } - } - } - sleep(self.inner.pkarr_publishing_interval).await - } - } - }); - Ok(rx) - } - - async fn topic_tracker_update(self) -> Result> { - let lv = self.inner.latest_version.lock().await.clone(); - if let Some(vi) = lv.version_info() { - if let Ok(topic_hash) = vi.to_topic_hash(self.shared_secret_key) { - return self.inner.topic_tracker.get_topic_nodes(&topic_hash).await; - } - } - Ok(vec![]) - } -} - -trait TPatcherIroh: Sized { - async fn send_msg(msg: Protocol, send: &mut SendStream) -> Result<()>; - async fn recv_msg(recv: &mut RecvStream) -> Result; - async fn accept_handler(&self, conn: Connecting) -> Result<()>; - async fn try_update(self: &mut Self, node_id: NodeId) -> Result<()>; -} - -impl TPatcherIroh for Patcher { - async fn send_msg(msg: Protocol, send: &mut SendStream) -> Result<()> { - let encoded = postcard::to_stdvec(&msg)?; - assert!(encoded.len() <= Self::MAX_MSG_SIZE_BYTES as usize); - - send.write_u64_le(encoded.len() as u64).await?; - let chunk_size = 1024; - let chunks = encoded - .chunks(chunk_size) - .into_iter() - .collect::>(); - for mut chunk in chunks { - send.write_all(&mut chunk).await?; - } - - Ok(()) - } - - async fn recv_msg(recv: &mut RecvStream) -> Result { - log::warn!("starting to recv msg"); - let len = recv.read_u64_le().await? as usize; - log::warn!("Recv: len: {len}"); - - assert!(len <= Self::MAX_MSG_SIZE_BYTES as usize); - - let mut buffer = [0u8; 1024]; - let mut data = Vec::with_capacity(len); - - while let Some(size) = recv.read(&mut buffer).await? { - data.extend_from_slice(&buffer[..min(size, len - data.len())]); - - if data.len() == len { - break; - } - } - - let msg: Protocol = postcard::from_bytes(&data)?; - Ok(msg) - } - - async fn try_update(self: &mut Self, node_id: NodeId) -> Result<()> { - log::warn!("try update"); - let (node_version_info, _) = self.resolve_pkarr(node_id.as_bytes()).await?; - { - let me_version_info = self.inner.latest_version.lock().await.version_info(); - // if we dont have an inner version update - log::warn!( - "try_update: me version info: is_some == {}", - me_version_info.is_some() - ); - if me_version_info.is_some() - && node_version_info.version <= me_version_info.unwrap().version - { - bail!( - "node version not newer {}", - node_version_info.version.to_string() - ) - } - } - - wait_for_relay(&self.inner.endpoint).await?; - log::warn!("update: got record: {:?}", z32::encode(node_id.as_bytes())); - - let conn = self - .inner - .endpoint - .connect(NodeAddr::new(node_id), &self.ALPN()) - .await?; - - let (mut send, mut recv) = conn.open_bi().await?; - log::warn!("update: connected to {}", z32::encode(node_id.as_bytes())); - - Self::send_msg(Protocol::Ready, &mut send).await?; - - // 1 . Receive auth token to sign with shared key - match Self::recv_msg(&mut recv).await? { - Protocol::AuthRequest(auth_request) => { - let mut key = SigningKey::from_bytes(&self.shared_secret_key); - let request = Protocol::Request(Auth { - signature: key.sign(&auth_request.sign_request), - }); - Self::send_msg(request, &mut send).await? - } - _ => bail!("expected auth request got something different"), - } - - // Await data request after successfull auth - match Self::recv_msg(&mut recv).await? { - Protocol::Data(version_info, data) => { - log::warn!("update: data received: {}", data.len()); - let mut latest_vt = self.inner.latest_version.lock().await; - let latest_version_info = latest_vt.version_info(); - if latest_version_info.is_none() - || latest_version_info.unwrap().version < version_info.version - { - // Signature checked in update_version - // as low as possible - latest_vt.update_version( - &version_info, - &data, - Some(vec![*node_id.as_bytes()]), - )?; - } - drop(latest_vt); - self.persist().await?; - - self.publish_pkarr().await?; - log::warn!("After data received and lv overwrite pkarr published"); - } - Protocol::DataUnavailable => {} - _ => { - log::warn!("update - illegal msg or auth failure"); - bail!("illegal message received") - } - }; - - Self::send_msg(Protocol::Done, &mut send).await?; - - //Self::recv_msg(&mut recv).await?; - Ok(()) - } - - async fn accept_handler(&self, conn: Connecting) -> Result<()> { - let connection = conn.await?; - let remote_node_id = connection.remote_node_id()?; - - log::warn!("accept - splitting streams"); - let (mut send, mut recv) = connection.accept_bi().await?; - log::warn!( - "accept - new connection accepted: {}", - z32::encode(remote_node_id.as_bytes()) - ); - - match Self::recv_msg(&mut recv).await? { - Protocol::Ready => { - log::warn!("accept - starting auth") - } - _ => bail!("illegal command"), - }; - - // 1 . Send auth request - let token = rand::thread_rng().gen::<[u8; 32]>(); - let auth_req = Protocol::AuthRequest(AuthRequest { - sign_request: token.to_vec().into(), - }); - Self::send_msg(auth_req, &mut send).await?; - - // 2 . recv auth token signed with shared key - let request = Self::recv_msg(&mut recv).await?; - match request { - Protocol::Request(auth) => { - let key = SigningKey::from_bytes(&self.shared_secret_key); - if key.verify(&token, &auth.signature).is_err() { - log::warn!("auth failure"); - bail!("authentication failed") - } - } - _ => bail!("Illegal request"), - }; - - // 3 . after auth confirmation send data - log::warn!("accepted - auth successfull"); - log::warn!("accept - sending data..."); - let latest_vt = { self.inner.latest_version.lock().await.clone() }; - - if latest_vt.version_info().is_none() { - log::warn!("accept - Data unavailable"); - Self::send_msg(Protocol::DataUnavailable, &mut send).await?; - return Ok(()); - } - let resp = Protocol::Data(latest_vt.version_info().unwrap(), latest_vt.data().unwrap()); - - Self::send_msg(resp, &mut send).await?; - Self::recv_msg(&mut recv).await?; - - self.inner - .latest_version - .lock() - .await - .add_node_id(remote_node_id.as_bytes()); - - send.finish()?; - Ok(()) - } -} - -trait TPatcherPkarr: Sized { - fn resolve_pkarr( - &self, - public_key: &[u8; PUBLIC_KEY_LENGTH], - ) -> impl std::future::Future> + Send; - fn publish_pkarr(&self) -> impl std::future::Future> + Send; - fn publish_trusted_pkarr(&self) - -> impl std::future::Future> + Send; - fn pkarr_dht_relay_switch_get( - &self, - public_key: &[u8; PUBLIC_KEY_LENGTH], - ) -> impl std::future::Future>> + Send; - fn pkarr_dht_relay_switch_put( - &self, - public_key: &[u8; PUBLIC_KEY_LENGTH], - signed_packet: SignedPacket, - ) -> impl std::future::Future> + Send; -} - -impl TPatcherPkarr for Patcher { - async fn resolve_pkarr( - &self, - public_key: &[u8; PUBLIC_KEY_LENGTH], - ) -> anyhow::Result<(VersionInfo, SignedPacket)> { - match self.pkarr_dht_relay_switch_get(public_key).await { - Ok(Some(pkg)) => { - let enc_packet = pkg.encoded_packet().to_vec(); - let packet = &dns::Packet::parse(enc_packet.as_slice())?; - - let version = utils::decode_rdata::(packet, "_version")?; - let hash = utils::decode_rdata::<[u8; PUBLIC_KEY_LENGTH]>(packet, "_hash")?; - let signature = utils::decode_rdata::(packet, "_signature")?; - - Ok(( - VersionInfo { - version, - hash, - signature, - }, - pkg, - )) - } - _ => bail!("failed to resolve package: {}", z32::encode(public_key)), - } - } - - async fn publish_trusted_pkarr(&self) -> anyhow::Result<()> { - let signed_packet = { self.inner.latest_trusted_package.lock().await.clone() }; - if let Some(signed_packet) = signed_packet { - return match self - .pkarr_dht_relay_switch_put(&self.trusted_key, signed_packet) - .await - { - Ok(_) => Ok(()), - Err(err) => bail!("bail {}", err), - }; - } - bail!("nomb") - } - - async fn publish_pkarr(&self) -> anyhow::Result<()> { - let lt = { self.inner.latest_version.lock().await.clone() }; - - if lt.version_info().is_none() { - log::warn!("no version available locally"); - bail!("no version available locally") - } - - // Set reply id to unix time - let vi = lt.version_info().unwrap(); - let mut last_reply_id: LastReplyId = LastReplyId::from_file(LAST_REPLY_ID_NAME) - .await - .unwrap_or(LastReplyId(0)); - let mut signed_packet = SignedPacket::builder(); - - // Not sure if rap around will cause an error so to be safe - last_reply_id.0 = if last_reply_id.0 >= u16::MAX - 1 { - 0 - } else { - last_reply_id.0 + 1 - }; - let _ = last_reply_id.to_file("last_reply_id").await; - - // Version - let version = serde_json::to_string(&vi.version)?; - signed_packet = signed_packet.txt("_version".try_into()?, version.as_str().try_into()?, 30); - - // Signature - let signature = serde_json::to_string(&vi.signature)?; - signed_packet = signed_packet.txt("_signature".try_into()?, signature.as_str().try_into()?, 30); - - // Hash - let hash = serde_json::to_string(&vi.hash)?; - signed_packet = signed_packet.txt("_hash".try_into()?, hash.as_str().try_into()?, 30); - - let key_pair = Keypair::from_secret_key(&self.secret_key); - - log::warn!("publishing from {}", z32::encode(&self.public_key)); - match self - .pkarr_dht_relay_switch_put(&self.public_key, signed_packet.sign(&key_pair, )?) - .await - { - Ok(_) => {} - Err(err) => { - log::warn!("pkarr pub: {err}"); - } - }; - - Ok(()) - } - - async fn pkarr_dht_relay_switch_get( - &self, - public_key: &[u8; PUBLIC_KEY_LENGTH], - ) -> Result> { - let mut signed_package = None; - - // Pkarr Relay server - let req_client = reqwest::ClientBuilder::new().build()?; - if let Ok(resp) = req_client - .request( - Method::GET, - format!("https://pkarr.pubky.org/{}", z32::encode(public_key)), - ) - .send() - .await - { - if resp.status() == StatusCode::OK { - if let Ok(content) = resp.bytes().await { - let pub_key = PublicKey::try_from(public_key)?; - if let Ok(_signed_package) = - SignedPacket::from_relay_payload(&pub_key, &content) - { - log::warn!("PKARR GET RELAY"); - return Ok(Some(_signed_package)); - } - } - } - } - - // Pkarr dht - if let Ok(client) = Client::builder() - .cache_size(1) - .build() - { - let pkarr_pk = PublicKey::try_from(public_key)?; - if let Some(_signed_package) = client.resolve(&pkarr_pk).await { - signed_package = Some(_signed_package); - - log::warn!("PKARR GET DHT"); - } - - Ok(signed_package) - } else { - bail!( - "failed to get package by public_key: {}", - z32::encode(public_key) - ) - } - } - - async fn pkarr_dht_relay_switch_put( - &self, - public_key: &[u8; PUBLIC_KEY_LENGTH], - signed_packet: SignedPacket, - ) -> Result<()> { - // Pkarr Relay server - let req_client = reqwest::ClientBuilder::new().build()?; - let packet_bytes: Vec = signed_packet.as_bytes()[32..].to_vec(); - - if let Ok(resp) = req_client - .request( - Method::PUT, - format!("https://pkarr.pubky.org/{}", z32::encode(public_key)), - ) - .body(packet_bytes) - .send() - .await - { - if resp.status() == StatusCode::OK || resp.status() == StatusCode::CONFLICT || resp.status() == StatusCode::NO_CONTENT { - log::warn!("PKARR PUT RELAY"); - return Ok(()); - } - } - - // Pkarr dht - if let Ok(client) = Client::builder().build() { - match client.publish(&signed_packet,None).await { - Ok(_) => { - log::warn!("PKARR PUT DHT"); - Ok(()) - } - Err(_) => bail!( - "dht and relay failed to publish pkarr record for nodeid: {}", - z32::encode(public_key) - ), - } - } else { - log::warn!("PKARR PUT DHT FAILED"); - bail!( - "dht and relay failed to publish pkarr record for nodeid: {}", - z32::encode(public_key) - ) - } - } -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -struct LastReplyId(u16); - -impl ProtocolHandler for Patcher { - fn accept( - &self, - conn: Connection, - ) -> Pin> + Send + 'static>> { - let patcher = self.clone(); - - Box::pin(async move { - patcher.accept(conn).await?; - Ok(()) - }) - } -} +pub use rustpatcher_macros::*; +pub use updater::UpdaterMode; +pub use patcher::spawn; diff --git a/crates/rustpatcher/src/patch.rs b/crates/rustpatcher/src/patch.rs new file mode 100644 index 0000000..2333bbe --- /dev/null +++ b/crates/rustpatcher/src/patch.rs @@ -0,0 +1,89 @@ +use ed25519_dalek::{Signature, SigningKey, ed25519::signature::SignerMut}; +use serde::{Deserialize, Serialize}; +use sha2::Digest; +use crate::Version; + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] +pub struct PatchInfo { + pub version: Version, + pub size: u64, + pub hash: [u8; 32], + pub signature: Signature, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct Patch { + info: PatchInfo, + data: Vec, +} + +impl Patch { + pub fn info(&self) -> &PatchInfo { + &self.info + } + + pub fn data(&self) -> &Vec { + &self.data + } + + pub fn verify(&self) -> anyhow::Result<()> { + let (data_no_embed, _, _) = crate::embed::cut_embed_section(self.data.clone())?; + + let mut data_hasher = sha2::Sha512::new(); + data_hasher.update(&data_no_embed); + let data_hash: [u8; 32] = data_hasher.finalize()[..32].try_into()?; + + let mut sign_hash = sha2::Sha512::new(); + sign_hash.update(self.info.version.to_string()); + sign_hash.update(data_hash); + sign_hash.update((data_no_embed.len() as u64).to_le_bytes()); + let sign_hash = sign_hash.finalize(); + + crate::get_owner_pub_key().verify_strict(&sign_hash, &self.info.signature)?; + + if data_hash != self.info.hash { + anyhow::bail!("data hash mismatch"); + } + + if data_no_embed.len() as u64 != self.info.size { + anyhow::bail!("data size mismatch"); + } + + Ok(()) + } + + pub fn sign(owner_signing_key: SigningKey, data_no_embed: Vec, version: Version, ) -> anyhow::Result { + + let mut owner_siging_key = owner_signing_key; + let mut data_hasher = sha2::Sha512::new(); + data_hasher.update(data_no_embed.as_slice()); + let data_hash = data_hasher.finalize()[..32].try_into()?; + + let mut sign_hash = sha2::Sha512::new(); + sign_hash.update(version.to_string()); + sign_hash.update(&data_hash); + sign_hash.update((data_no_embed.len() as u64).to_le_bytes()); + let sign_hash = sign_hash.finalize(); + let signature = owner_siging_key.sign(&sign_hash); + + Ok(PatchInfo { + version, + size: data_no_embed.len() as u64, + hash: data_hash, + signature, + }) + } + + pub fn from_self() -> anyhow::Result { + + let data = std::fs::read(std::env::current_exe()?)?; + let patch_info = crate::embed::get_embedded_patch_info(&data)?; + + let patch = Self { + info: patch_info, + data, + }; + patch.verify()?; + Ok(patch) + } +} diff --git a/crates/rustpatcher/src/patcher.rs b/crates/rustpatcher/src/patcher.rs new file mode 100644 index 0000000..d40590c --- /dev/null +++ b/crates/rustpatcher/src/patcher.rs @@ -0,0 +1,172 @@ +use std::{str::FromStr, sync::Mutex}; + +use actor_helper::{Action, Actor, Handle}; +use distributed_topic_tracker::{RecordPublisher,RecordTopic}; +use iroh::{protocol::Router, Endpoint}; +use once_cell::sync::OnceCell; +use sha2::Digest; +use tracing::{error, warn}; + +use crate::{Distributor, Publisher, Updater, UpdaterMode}; + +static PATCHER: OnceCell>> = OnceCell::new(); + +pub async fn spawn(update_mode: UpdaterMode) -> anyhow::Result<()> { + + #[cfg(not(debug_assertions))] + if PATCHER.get().is_none() { + let patcher = Patcher::builder().updater_mode(update_mode).build().await?; + let _ = PATCHER.set(Mutex::new(Some(patcher))); + } + + #[cfg(debug_assertions)] + if PATCHER.get().is_none() { + let _ = update_mode; + warn!("Skipping rustpatcher initialization in debug build"); + let _ = PATCHER.set(Mutex::new(None)); + } + Ok(()) +} + +#[derive(Debug, Clone)] +pub struct Builder { + updater_mode: UpdaterMode, +} + +impl Default for Builder { + fn default() -> Self { + Self { + updater_mode: UpdaterMode::Now, + } + } +} + +impl Builder { + + #[cfg_attr(debug_assertions, allow(dead_code))] + pub fn updater_mode(mut self, mode: UpdaterMode) -> Self { + self.updater_mode = mode; + self + } + + #[cfg_attr(debug_assertions, allow(dead_code))] + pub async fn build(self) -> anyhow::Result { + let secret_key = iroh::SecretKey::generate(rand::rngs::OsRng); + let topic_id = RecordTopic::from_str(format!( + "rustpatcher:{}", + z32::encode(crate::embed::get_owner_pub_key().as_bytes()) + ).as_str())?; + let mut hash = sha2::Sha512::new(); + hash.update(topic_id.hash()); + hash.update("v1"); + let initial_secret = hash.finalize().to_vec(); + + let record_publisher = RecordPublisher::new( + topic_id, + secret_key.public().public(), + secret_key.secret().clone(), + None, + initial_secret, + ); + + let (update_starter, update_receiver) = tokio::sync::mpsc::channel(1); + let publisher = Publisher::new(record_publisher, update_starter)?; + + let endpoint = Endpoint::builder() + .secret_key(secret_key.clone()) + //.add_discovery(DnsDiscovery::n0_dns()) + .discovery_n0() + .bind() + .await?; + + let distributor = Distributor::new(endpoint.clone())?; + + let _router = iroh::protocol::Router::builder(endpoint.clone()) + .accept(Distributor::ALPN(), distributor.clone()) + .spawn(); + + Ok(Patcher::new( + publisher, + self.updater_mode, + update_receiver, + distributor, + endpoint, + _router, + )) + } +} + +#[derive(Debug, Clone)] +pub struct Patcher { + _api: Handle, +} + +#[derive(Debug)] +struct PatcherActor { + rx: tokio::sync::mpsc::Receiver>, + + publisher: Publisher, + updater: Option, + updater_mode: UpdaterMode, + distributor: Distributor, + + _endpoint: Endpoint, + _router: Router, + + update_receiver: tokio::sync::mpsc::Receiver<()>, +} + +impl Patcher { + #[cfg_attr(debug_assertions, allow(dead_code))] + pub fn builder() -> Builder { + Builder::default() + } + + fn new( + publisher: Publisher, + updater_mode: UpdaterMode, + update_receiver: tokio::sync::mpsc::Receiver<()>, + distributor: Distributor, + endpoint: Endpoint, + router: Router, + ) -> Self { + let (api, rx) = Handle::channel(32); + tokio::spawn(async move { + let mut actor = PatcherActor { + rx, + publisher, + updater: None, + _endpoint: endpoint, + _router: router, + updater_mode, + update_receiver, + distributor, + }; + if let Err(e) = actor.run().await { + error!("Patcher actor error: {:?}", e); + } + }); + + Self { _api: api } + } +} + +impl Actor for PatcherActor { + async fn run(&mut self) -> anyhow::Result<()> { + loop { + tokio::select! { + Some(action) = self.rx.recv() => { + action(self).await + } + Some(_) = self.update_receiver.recv(), if self.updater.is_none() => { + warn!("update notification received from Publisher, starting Updater"); + if let Ok(record_publisher) = self.publisher.get_record_publisher().await { + self.updater = Some(Updater::new(self.updater_mode.clone(),self.distributor.clone(),record_publisher)); + } else { + anyhow::bail!("Failed to get RecordPublisher for Updater"); + } + } + } + } + } +} diff --git a/crates/rustpatcher/src/publisher.rs b/crates/rustpatcher/src/publisher.rs new file mode 100644 index 0000000..c504ba7 --- /dev/null +++ b/crates/rustpatcher/src/publisher.rs @@ -0,0 +1,116 @@ +use actor_helper::{Action, Actor, Handle, act_ok}; +use distributed_topic_tracker::{RecordPublisher, unix_minute}; +use iroh::NodeId; +use tracing::{debug, error, warn}; + +use crate::{Patch, PatchInfo, Version}; + +#[derive(Debug, Clone)] +pub struct Publisher { + api: Handle, +} + +#[derive(Debug, Clone)] +pub enum PublisherState { + Publishing, + NewerAvailable, +} + +#[derive(Debug)] +struct PublisherActor { + rx: tokio::sync::mpsc::Receiver>, + state: PublisherState, + + interval: tokio::time::Interval, + self_patch: Patch, + record_publisher: RecordPublisher, + + update_starter: tokio::sync::mpsc::Sender<()>, +} + +impl Publisher { + pub fn new( + record_publisher: RecordPublisher, + update_starter: tokio::sync::mpsc::Sender<()>, + ) -> anyhow::Result { + let self_patch = Patch::from_self()?; + let (api, rx) = Handle::channel(32); + tokio::spawn(async move { + let mut interval = tokio::time::interval(tokio::time::Duration::from_secs(55)); + interval.set_missed_tick_behavior(tokio::time::MissedTickBehavior::Skip); + + let mut actor = PublisherActor { + rx, + state: PublisherState::Publishing, + interval, + self_patch, + record_publisher, + update_starter, + }; + if let Err(e) = actor.run().await { + error!("VersionPublisher actor error: {:?}", e); + } + }); + Ok(Self { api }) + } + + pub async fn get_record_publisher(&self) -> anyhow::Result { + self.api + .call(act_ok!(actor => async move { + actor.record_publisher.clone() + })) + .await + } +} + +impl Actor for PublisherActor { + async fn run(&mut self) -> anyhow::Result<()> { + loop { + tokio::select! { + Some(action) = self.rx.recv() => { + action(self).await + } + _ = self.interval.tick(), if matches!(self.state, PublisherState::Publishing) => { + let now = unix_minute(0); + let mut records = self.record_publisher.get_records(now).await; + records.extend(self.record_publisher.get_records(now-1).await); + + warn!("Checked for records, found {} records", records.len()); + let c_version = Version::current()?; + let newer_patch_infos = records + .iter() + .filter_map(|r| if let Ok(patch_info) = r.content::(){ + if let Ok(node_id) = NodeId::from_bytes(&r.node_id()) { + warn!("Found patch info: {:?}{:?}", node_id,patch_info); + Some((node_id,patch_info.clone())) + } else { + None + } + } else { + None + }) + .filter(|(_,p)| p.version > c_version) + .collect::>(); + + warn!("Checked for updates, found {} newer versions", newer_patch_infos.len()); + if newer_patch_infos.is_empty() { + let res = self.publish_self(now).await; + debug!("Published self: {:?}", res); + continue; + } + self.state = PublisherState::NewerAvailable; + let _ = self.update_starter.send(()).await; + } + } + } + } +} + +impl PublisherActor { + async fn publish_self(&mut self, unix_minute: u64) -> anyhow::Result<()> { + let record = self + .record_publisher + .new_record(unix_minute, self.self_patch.info().clone())?; + self.record_publisher.publish_record(record).await + } +} diff --git a/crates/rustpatcher/src/updater.rs b/crates/rustpatcher/src/updater.rs new file mode 100644 index 0000000..1230818 --- /dev/null +++ b/crates/rustpatcher/src/updater.rs @@ -0,0 +1,182 @@ +use std::{ + env, ffi::{CString, OsString}, io::Write, process, ptr +}; + +use actor_helper::{Action, Actor, Handle}; +use chrono::Timelike; +use distributed_topic_tracker::{RecordPublisher, unix_minute}; +use iroh::NodeId; +use nix::libc; +use tracing::{error, info}; + +use crate::{Patch, PatchInfo, Version, distributor::Distributor}; + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum UpdaterMode { + Now, + OnRestart, + At(u8, u8), // hour, minute +} + +#[derive(Debug, Clone)] +pub struct Updater { + _api: Handle, +} + +#[derive(Debug)] +struct UpdaterActor { + rx: tokio::sync::mpsc::Receiver>, + distributor: Distributor, + + mode: UpdaterMode, + newer_patch: Option, + record_publisher: RecordPublisher, + try_update_interval: tokio::time::Interval, + + self_path_before_replace: Option, +} + +impl Updater { + pub fn new( + mode: UpdaterMode, + distributor: Distributor, + record_publisher: RecordPublisher, + ) -> Self { + let (api, rx) = Handle::channel(32); + tokio::spawn(async move { + let mut try_update_interval = + tokio::time::interval(tokio::time::Duration::from_secs(56)); + try_update_interval.set_missed_tick_behavior(tokio::time::MissedTickBehavior::Skip); + + let mut actor = UpdaterActor { + rx, + mode, + distributor, + newer_patch: None, + record_publisher, + try_update_interval, + self_path_before_replace: None, + }; + if let Err(e) = actor.run().await { + error!("Updater actor error: {:?}", e); + } + }); + Self { _api: api } + } +} + +impl Actor for UpdaterActor { + async fn run(&mut self) -> anyhow::Result<()> { + loop { + tokio::select! { + Some(action) = self.rx.recv() => { + action(self).await + } + _ = self.try_update_interval.tick() => { + if self.newer_patch.is_none() { + let patches = self.check_for_updates().await?; + for (node_id, patch_info) in patches { + if self.try_download_patch(node_id, patch_info).await.is_ok() { + break; + } + } + } else { + match self.mode { + UpdaterMode::Now => { + self.restart_after_update().await?; + }, + UpdaterMode::OnRestart => { + // do nothing, wait for next restart + }, + UpdaterMode::At(hour, minute) => { + let now = chrono::Local::now(); + // prob midnight rollover bug here, fine for now [!] todo + let t_offset = (now.hour() as i32 * 60 + now.minute() as i32) - (hour as i32 * 60 + minute as i32); + if matches!(t_offset, 0..2) { + let _ = self.restart_after_update().await; + } + } + } + } + } + } + } + } +} + +impl UpdaterActor { + async fn check_for_updates(&mut self) -> anyhow::Result> { + let now = unix_minute(0); + let mut records = self.record_publisher.get_records(now).await; + records.extend(self.record_publisher.get_records(now-1).await); + let c_version = Version::current()?; + let mut newer_patch_infos = records + .iter() + .filter_map(|r| { + if let Ok(patch_info) = r.content::() { + if let Ok(node_id) = NodeId::from_bytes(&r.node_id()) { + Some((node_id, patch_info.clone())) + } else { + None + } + } else { + None + } + }) + .filter(|(_, p)| p.version > c_version) + .collect::>(); + + if newer_patch_infos.is_empty() { + return Ok(vec![]); + } + newer_patch_infos.sort_by_key(|(_, p)| p.version.clone()); + newer_patch_infos.reverse(); + + let newest = newer_patch_infos[0].clone(); + Ok(newer_patch_infos + .iter() + .filter(|(_, p)| p.version == newest.1.version) + .cloned() + .collect::>()) + } + + async fn try_download_patch( + &mut self, + node_id: NodeId, + patch_info: PatchInfo, + ) -> anyhow::Result<()> { + info!("Downloading patch {:?} from {:?}", patch_info, node_id); + let res = self.distributor.get_patch(node_id, patch_info).await; + info!("Downloaded patch: {:?}", res.is_ok()); + let patch = res?; + self.newer_patch = Some(patch.clone()); + + + self.self_path_before_replace = Some(env::current_exe()?.into()); + + let mut temp_file = tempfile::NamedTempFile::new()?; + temp_file.write_all(&patch.data())?; + let path = temp_file.path(); + + self_replace::self_replace(path)?; + info!("Updated successfully to version {:?}", patch.info().version); + + if self.mode == UpdaterMode::Now { + self.restart_after_update().await?; + } + Ok(()) + } + + async fn restart_after_update(&mut self) -> anyhow::Result<()> { + let exe_raw = self.self_path_before_replace.clone().ok_or(anyhow::anyhow!("no self path stored"))?; + let exe = CString::new(exe_raw.to_str().unwrap())?; + + // The array must be null-terminated. + let args: [*const libc::c_char; 1] = [ptr::null()]; + + unsafe { + info!("execv: {:?}", nix::libc::execv(exe.as_ptr(), args.as_ptr())); + } + process::exit(0); + } +} diff --git a/crates/rustpatcher/src/utils.rs b/crates/rustpatcher/src/utils.rs deleted file mode 100644 index 4a84259..0000000 --- a/crates/rustpatcher/src/utils.rs +++ /dev/null @@ -1,133 +0,0 @@ -use std::{io::SeekFrom, str::FromStr, time::Duration}; - -use anyhow::bail; -use iroh::Endpoint; -use pkarr::dns::{self, Packet}; -use serde::{de::DeserializeOwned, Serialize}; -use sha2::{Digest, Sha256}; -use tokio::{ - fs::{create_dir, File, OpenOptions}, - io::{AsyncReadExt, AsyncSeekExt, AsyncWriteExt}, -}; - -use crate::data::Version; - -pub const PUBLISHER_TRUSTED_KEY_NAME: &str = "trusted_key"; -pub const PUBLISHER_SIGNING_KEY_NAME: &str = "publisher_signing_key"; - -pub const PATCHER_DIR: &str = ".patcher"; -pub const SECRET_KEY_NAME: &str = "secret_key"; -pub const SHARED_SECRET_KEY_NAME: &str = "shared_secret_key"; -pub const LATEST_VERSION_NAME: &str = "latest_version"; -pub const LAST_REPLY_ID_NAME: &str = "last_reply_id"; -pub const LAST_TRUSTED_PACKAGE: &str = "last_trusted_package"; - -pub const PKARR_PUBLISHING_INTERVAL: Duration = Duration::from_secs(60*60); - -pub async fn wait_for_relay(endpoint: &Endpoint) -> anyhow::Result<()> { - while endpoint.home_relay().get().is_err() { - tokio::time::sleep(std::time::Duration::from_millis(50)).await; - } - Ok(()) -} - -pub fn decode_rdata( - packet: &Packet<'_>, - query: &str, -) -> anyhow::Result { - let record = packet - .answers - .iter() - .find(|&record| record.name.to_string().starts_with(&query)) - .ok_or_else(|| anyhow::anyhow!("record not found"))?; - - match &record.rdata { - dns::rdata::RData::TXT(txt) => { - let attrbs_raw = txt.attributes(); - let attrbs = attrbs_raw - .keys() - .map(|a| a.clone()) - .collect::>() - .clone(); - - let val = attrbs - .first() - .ok_or_else(|| anyhow::anyhow!("no attributes"))?; - - Ok(serde_json::from_str::(&val.clone())?.clone()) - } - _ => { - bail!("rdata record not txt: {:?}", record.rdata.type_code()) - } - } -} - -pub trait Storage { - fn from_file(file_name: &str) -> impl std::future::Future> + Send; - fn to_file(self, file_name: &str) -> impl std::future::Future> + Send; -} - -impl Storage for S { - async fn from_file(file_name: &str) -> anyhow::Result { - create_check_patcher_dir().await; - - let mut file = File::open(format!("{PATCHER_DIR}/{file_name}")).await?; - let mut buf = vec![]; - file.read_to_end(&mut buf).await?; - - let t: S = serde_json::from_slice(&buf.as_slice())?; - - Ok(t) - } - - async fn to_file(self: S, file_name: &str) -> anyhow::Result<()> { - create_check_patcher_dir().await; - - let mut file = OpenOptions::new() - .create(true) - .write(true) - .open(format!("{PATCHER_DIR}/{file_name}")) - .await?; - - file.seek(SeekFrom::Start(0)).await?; - file.set_len(0).await?; - - let buf = serde_json::to_vec(&self)?; - file.write_all(buf.as_slice()).await?; - file.flush().await?; - - Ok(()) - } -} - -pub async fn create_check_patcher_dir() { - let mut create = false; - if let Ok(dir) = File::open(PATCHER_DIR).await { - if let Ok(meta) = dir.metadata().await { - if !meta.is_dir() { - create = true; - } - } else { - create = true; - } - } else { - create = true; - } - - if create { - let _ = create_dir(PATCHER_DIR).await; - } -} - - -pub fn compute_hash(data: &[u8]) -> [u8; 32] { - let mut hasher = Sha256::new(); - hasher.update(data); - let mut buf = [0u8; 32]; - buf.copy_from_slice(&hasher.finalize()); - buf -} - -pub fn get_app_version()->anyhow::Result { - Version::from_str(super::version_embed::get_app_version()) -} \ No newline at end of file diff --git a/crates/rustpatcher/src/version.rs b/crates/rustpatcher/src/version.rs new file mode 100644 index 0000000..6d2bb66 --- /dev/null +++ b/crates/rustpatcher/src/version.rs @@ -0,0 +1,56 @@ +use std::str::FromStr; + +use anyhow::bail; +use serde::{Deserialize, Serialize}; +use tracing::warn; + +#[derive(Debug, Clone, Serialize, Deserialize,PartialOrd, PartialEq, Eq)] +pub struct Version(pub i32, pub i32, pub i32); + +impl ToString for Version { + fn to_string(&self) -> String { + format!("{}.{}.{}",self.0,self.1,self.2) + } +} + +impl FromStr for Version { + type Err = anyhow::Error; + + fn from_str(s: &str) -> Result { + let parts: Vec<&str> = s.split('.').collect(); + + if parts.len() != 3 { + bail!("wrong version format") + } + + let major = parts[0].parse::()?; + let minor = parts[1].parse::()?; + let patch = parts[2].parse::()?; + + Ok(Version(major, minor, patch)) + } +} + +impl Ord for Version { + fn cmp(&self, other: &Self) -> std::cmp::Ordering { + let self_sum: i128 = ((i32::MAX as i128).pow(2) * self.0 as i128) + + ((i32::MAX as i128).pow(1) * self.1 as i128) + + self.2 as i128; + let other_sum: i128 = ((i32::MAX as i128).pow(2) * other.0 as i128) + + ((i32::MAX as i128).pow(1) * other.1 as i128) + + other.2 as i128; + self_sum.cmp(&other_sum) + } +} + +impl Version { + pub fn new(major: i32, minor: i32, patch: i32) -> Self { + Version(major, minor, patch) + } + + pub fn current() -> anyhow::Result { + let v = Version::from_str(&crate::embed::get_app_version()); + warn!("Current version: {:?}", v); + v + } +} \ No newline at end of file diff --git a/crates/rustpatcher/src/version_embed.rs b/crates/rustpatcher/src/version_embed.rs deleted file mode 100644 index 10949f8..0000000 --- a/crates/rustpatcher/src/version_embed.rs +++ /dev/null @@ -1,12 +0,0 @@ -use once_cell::sync::OnceCell; - -pub static APP_VERSION: OnceCell<&'static str> = OnceCell::new(); - -#[doc(hidden)] -pub fn __set_version(version: &'static str) { - let _ = APP_VERSION.set(version); -} - -pub fn get_app_version() -> &'static str { - APP_VERSION.get().expect("Version not initialized") -} \ No newline at end of file diff --git a/crates/rustpatcher/test.file b/crates/rustpatcher/test.file deleted file mode 100644 index 2a5d9c4..0000000 --- a/crates/rustpatcher/test.file +++ /dev/null @@ -1,4 +0,0 @@ -This is a real test file with a bit of text but not too much! 34 -This is a real test file with a bit of text but not too much! 35 -This is a real test file with a bit of text but not too much! 36 -This is a real test file with a bit of text but not too much! 37 \ No newline at end of file diff --git a/crates/rustpatcher/xtask/sign.rs b/crates/rustpatcher/xtask/sign.rs new file mode 100644 index 0000000..89befba --- /dev/null +++ b/crates/rustpatcher/xtask/sign.rs @@ -0,0 +1,141 @@ +use std::{fs::{self, OpenOptions}, io::{Seek, SeekFrom, Write}, path::PathBuf}; + +use clap::{Parser, Subcommand}; +use ed25519_dalek::SigningKey; + +#[derive(Parser, Debug)] +#[command(name = "rustpatcher", version, about)] +struct RootCli { + #[command(subcommand)] + cmd: Commands, +} +enum KeySource { + File(PathBuf), + Inline(String), +} + +#[derive(Subcommand, Debug)] +enum Commands { + /// Sign and embed a patch into a binary + Sign(SignArgs), + /// generates new signing key and saves to file it prints pubkey to std out + Gen { + #[arg(value_name = "PATH", required = true)] + key_file: std::path::PathBuf, + }, +} + +#[derive(Parser, Debug)] +struct SignArgs { + #[arg(value_name = "BIN")] + binary: std::path::PathBuf, + #[arg(long = "key-file", value_name = "PATH")] + key_file: Option, + #[arg(long = "key", value_name = "Z32")] + key: Option, +} + +fn main() -> anyhow::Result<()> { + let root = RootCli::parse(); + match root.cmd { + Commands::Sign(args) => sign_cmd(args), + Commands::Gen { key_file } => generate_key_cmd(key_file), + } +} + +fn generate_key_cmd(key_file: std::path::PathBuf) -> anyhow::Result<()> { + let signing_key = SigningKey::generate(&mut rand::thread_rng()); + let signing_key_z32 = z32::encode(signing_key.as_bytes()); + let signing_key_bytes = signing_key_z32.as_bytes(); + + if key_file.exists() { + println!("Key file {} already exists", key_file.display()); + return Ok(()); + } + + fs::write(&key_file, signing_key_bytes)?; + println!("Wrote signing key to {}", key_file.display()); + println!("Public key (z-base-32): {}", z32::encode(signing_key.verifying_key().as_bytes())); + println!("\n"); + println!("// Add the following to your main function:\n"); + println!("[rustpatcher::public_key(\"{}\")]",z32::encode(signing_key.verifying_key().as_bytes())); + println!("fn main() {{\n // your code here\n}}"); + + Ok(()) +} + +fn sign_cmd(args: SignArgs) -> anyhow::Result<()> { + let key_src = if let Some(k) = args.key { + KeySource::Inline(k) + } else { + KeySource::File( + args.key_file + .unwrap_or_else(|| PathBuf::from("./owner_signing_key")), + ) + }; + + let signing_key = load_signing_key(key_src)?; + + let mut file = OpenOptions::new() + .read(true) + .write(true) + .create(false) + .open(&args.binary)?; + + let mut data = fs::read(&args.binary) + .map_err(|e| anyhow::anyhow!("failed to read binary {}: {}", args.binary.display(), e))?; + + let (data_no_embed, data_embed, embed_region) = + rustpatcher::embed::cut_embed_section(data.clone())?; + let version = rustpatcher::embed::get_embedded_version(&data_embed)?; + + let patch_info = rustpatcher::Patch::sign(signing_key, data_no_embed, version)?; + rustpatcher::embed::set_embedded_patch_info(&mut data, patch_info, embed_region)?; + + file.seek(SeekFrom::Start(0))?; + file.write_all(&data)?; + file.set_len(data.len() as u64)?; + + Ok(()) +} + +fn load_signing_key(source: KeySource) -> anyhow::Result { + match source { + KeySource::File(path) => { + let data = if let Ok(data) = fs::read(&path) { + data + } else { + let signing_key = SigningKey::generate(&mut rand::thread_rng()); + let signing_key_z32 = z32::encode(signing_key.as_bytes()); + let signing_key_bytes = signing_key_z32.as_bytes(); + fs::write(&path, signing_key_bytes)?; + signing_key_bytes.to_vec() + }; + + let sing_key_bytes = z32::decode(&data) + .map_err(|_| anyhow::anyhow!("failed to decode signing key from z-base-32"))?; + let sign_key_bytes = sing_key_bytes.as_slice(); + Ok(SigningKey::from_bytes(sign_key_bytes.try_into().map_err( + |_| { + anyhow::anyhow!( + "signing key must be 32 bytes (got {})", + sign_key_bytes.len() + ) + }, + )?)) + } + KeySource::Inline(key_str) => { + let sing_key_bytes = z32::decode(key_str.as_bytes()) + .map_err(|_| anyhow::anyhow!("failed to decode signing key from z-base-32"))?; + let sign_key_bytes = sing_key_bytes.as_slice(); + Ok(SigningKey::from_bytes(sign_key_bytes.try_into().map_err( + |_| { + anyhow::anyhow!( + "signing key must be 32 bytes (got {})", + sign_key_bytes.len() + ) + }, + )?)) + } + } +} diff --git a/media/patcher_diagram.svg b/media/patcher_diagram.svg deleted file mode 100644 index c7ddbe6..0000000 --- a/media/patcher_diagram.svg +++ /dev/null @@ -1,4 +0,0 @@ - - - -
app
app
PKARR
DHT
PKARR...

Trusted Pkg

republish the trusted and signed 
version info pkarr package to keep it in circulation after the master patch node is down again
Trusted Pkg...

Own Pkg

publish app's current version under apps pub key to pkarr (bittorrent dht)
Own Pkg...
IROH-TOPIC-TRACKER
IROH-TOPIC-TRACKER

Node Id Announce

compute topic channel name from currently known highest version and get node ids that are also interested in that version
Node Id Announce...
app
app
app
app

Download p2p

check nodes Own pkarr record and if version is newer then own use iroh to create a p2p connection between the two apps and download the patch data. after verify the signature with the trusted pub key you know and run patch rutine.
Download p2p...
Text is not SVG - cannot display
\ No newline at end of file diff --git a/owner_key_example b/owner_key_example new file mode 100644 index 0000000..383991e --- /dev/null +++ b/owner_key_example @@ -0,0 +1 @@ +5wrjzq7h54t61sr8izgqdkrjx9tcqey5n6apff5ju5yadoobuj3o \ No newline at end of file